From 09dcfaf99e2afed69070e571f09296847f1b99b2 Mon Sep 17 00:00:00 2001 From: Charles Martin Date: Tue, 30 Apr 2024 15:55:55 +1000 Subject: [PATCH] updated data to integrate 2023 music proceedings --- _data/nime_installations.yaml | 434 +- _data/nime_music.yaml | 21369 ++-- _data/nime_papers.yaml | 160683 ++++++++++++++++--------------- 3 files changed, 91285 insertions(+), 91201 deletions(-) diff --git a/_data/nime_installations.yaml b/_data/nime_installations.yaml index 695dec5d..46e4f9e6 100644 --- a/_data/nime_installations.yaml +++ b/_data/nime_installations.yaml @@ -1,86 +1,3 @@ -- ENTRYTYPE: inproceedings - ID: nime2011-installations-Norderval2011 - address: 'Oslo, Norway' - author: Kristin Norderval - bibtex: "@inproceedings{nime2011-installations-Norderval2011,\n address = {Oslo,\ - \ Norway},\n author = {Kristin Norderval},\n booktitle = {Installation Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Kjell Tore Innervik and Ivar Frounberg},\n month = {June},\n publisher\ - \ = {Norwegian Academy of Music},\n title = {TURN ME! I need 12 Volts!},\n url\ - \ = {http://www.nime.org/proceedings/2019/nime2019_music001.pdf},\n year = {2011}\n\ - }\n" - booktitle: Installation Proceedings of the International Conference on New Interfaces - for Musical Expression - editor: Kjell Tore Innervik and Ivar Frounberg - month: June - publisher: Norwegian Academy of Music - title: TURN ME! I need 12 Volts! - url: http://www.nime.org/proceedings/2019/nime2019_music001.pdf - year: 2011 - - -- ENTRYTYPE: inproceedings - ID: nime2011-installations-2011 - address: 'Oslo, Norway' - author: Nicolas d'Alessandro and Roberto Calderon - bibtex: "@inproceedings{nime2011-installations-2011,\n address = {Oslo, Norway},\n\ - \ author = {Nicolas d'Alessandro and Roberto Calderon},\n booktitle = {Installation\ - \ Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Kjell Tore Innervik and Ivar Frounberg},\n month = {June},\n publisher\ - \ = {Norwegian Academy of Music},\n title = {ROOM\\#81},\n url = {http://www.nime.org/proceedings/2019/nime2019_music001.pdf},\n\ - \ year = {2011}\n}\n" - booktitle: Installation Proceedings of the International Conference on New Interfaces - for Musical Expression - editor: Kjell Tore Innervik and Ivar Frounberg - month: June - publisher: Norwegian Academy of Music - title: "ROOM\\#81" - url: http://www.nime.org/proceedings/2019/nime2019_music001.pdf - year: 2011 - - -- ENTRYTYPE: inproceedings - ID: nime2011-installations-Peschta2011 - address: 'Oslo, Norway' - author: Leo Peschta - bibtex: "@inproceedings{nime2011-installations-Peschta2011,\n address = {Oslo, Norway},\n\ - \ author = {Leo Peschta},\n booktitle = {Installation Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Kjell Tore\ - \ Innervik and Ivar Frounberg},\n month = {June},\n publisher = {Norwegian Academy\ - \ of Music},\n title = {BM 0.1},\n url = {http://www.nime.org/proceedings/2019/nime2019_music001.pdf},\n\ - \ year = {2011}\n}\n" - booktitle: Installation Proceedings of the International Conference on New Interfaces - for Musical Expression - editor: Kjell Tore Innervik and Ivar Frounberg - month: June - publisher: Norwegian Academy of Music - title: BM 0.1 - url: http://www.nime.org/proceedings/2019/nime2019_music001.pdf - year: 2011 - - -- ENTRYTYPE: inproceedings - ID: nime2011-installations-Andersson2011 - address: 'Oslo, Norway' - author: 'MusicalFieldsForever – Anders-Petter Andersson, Birgitta Cappelen, Fredrik - Olofsson' - bibtex: "@inproceedings{nime2011-installations-Andersson2011,\n address = {Oslo,\ - \ Norway},\n author = {MusicalFieldsForever – Anders-Petter Andersson, Birgitta\ - \ Cappelen, Fredrik Olofsson},\n booktitle = {Installation Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n editor\ - \ = {Kjell Tore Innervik and Ivar Frounberg},\n month = {June},\n publisher =\ - \ {Norwegian Academy of Music},\n title = {ORFI},\n url = {http://www.nime.org/proceedings/2019/nime2019_music001.pdf},\n\ - \ year = {2011}\n}\n" - booktitle: Installation Proceedings of the International Conference on New Interfaces - for Musical Expression - editor: Kjell Tore Innervik and Ivar Frounberg - month: June - publisher: Norwegian Academy of Music - title: ORFI - url: http://www.nime.org/proceedings/2019/nime2019_music001.pdf - year: 2011 - - - ENTRYTYPE: incollection ID: nime2012-installations-Kotin2012 abstract: "Program notes:\n\nWhisker Organ is an interactive sound instrument linking\ @@ -767,6 +684,223 @@ year: 2012 +- ENTRYTYPE: inproceedings + ID: nime2011-installations-Norderval2011 + address: 'Oslo, Norway' + author: Kristin Norderval + bibtex: "@inproceedings{nime2011-installations-Norderval2011,\n address = {Oslo,\ + \ Norway},\n author = {Kristin Norderval},\n booktitle = {Installation Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Kjell Tore Innervik and Ivar Frounberg},\n month = {June},\n publisher\ + \ = {Norwegian Academy of Music},\n title = {TURN ME! I need 12 Volts!},\n url\ + \ = {http://www.nime.org/proceedings/2019/nime2019_music001.pdf},\n year = {2011}\n\ + }\n" + booktitle: Installation Proceedings of the International Conference on New Interfaces + for Musical Expression + editor: Kjell Tore Innervik and Ivar Frounberg + month: June + publisher: Norwegian Academy of Music + title: TURN ME! I need 12 Volts! + url: http://www.nime.org/proceedings/2019/nime2019_music001.pdf + year: 2011 + + +- ENTRYTYPE: inproceedings + ID: nime2011-installations-2011 + address: 'Oslo, Norway' + author: Nicolas d'Alessandro and Roberto Calderon + bibtex: "@inproceedings{nime2011-installations-2011,\n address = {Oslo, Norway},\n\ + \ author = {Nicolas d'Alessandro and Roberto Calderon},\n booktitle = {Installation\ + \ Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Kjell Tore Innervik and Ivar Frounberg},\n month = {June},\n publisher\ + \ = {Norwegian Academy of Music},\n title = {ROOM\\#81},\n url = {http://www.nime.org/proceedings/2019/nime2019_music001.pdf},\n\ + \ year = {2011}\n}\n" + booktitle: Installation Proceedings of the International Conference on New Interfaces + for Musical Expression + editor: Kjell Tore Innervik and Ivar Frounberg + month: June + publisher: Norwegian Academy of Music + title: "ROOM\\#81" + url: http://www.nime.org/proceedings/2019/nime2019_music001.pdf + year: 2011 + + +- ENTRYTYPE: inproceedings + ID: nime2011-installations-Peschta2011 + address: 'Oslo, Norway' + author: Leo Peschta + bibtex: "@inproceedings{nime2011-installations-Peschta2011,\n address = {Oslo, Norway},\n\ + \ author = {Leo Peschta},\n booktitle = {Installation Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Kjell Tore\ + \ Innervik and Ivar Frounberg},\n month = {June},\n publisher = {Norwegian Academy\ + \ of Music},\n title = {BM 0.1},\n url = {http://www.nime.org/proceedings/2019/nime2019_music001.pdf},\n\ + \ year = {2011}\n}\n" + booktitle: Installation Proceedings of the International Conference on New Interfaces + for Musical Expression + editor: Kjell Tore Innervik and Ivar Frounberg + month: June + publisher: Norwegian Academy of Music + title: BM 0.1 + url: http://www.nime.org/proceedings/2019/nime2019_music001.pdf + year: 2011 + + +- ENTRYTYPE: inproceedings + ID: nime2011-installations-Andersson2011 + address: 'Oslo, Norway' + author: 'MusicalFieldsForever – Anders-Petter Andersson, Birgitta Cappelen, Fredrik + Olofsson' + bibtex: "@inproceedings{nime2011-installations-Andersson2011,\n address = {Oslo,\ + \ Norway},\n author = {MusicalFieldsForever – Anders-Petter Andersson, Birgitta\ + \ Cappelen, Fredrik Olofsson},\n booktitle = {Installation Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n editor\ + \ = {Kjell Tore Innervik and Ivar Frounberg},\n month = {June},\n publisher =\ + \ {Norwegian Academy of Music},\n title = {ORFI},\n url = {http://www.nime.org/proceedings/2019/nime2019_music001.pdf},\n\ + \ year = {2011}\n}\n" + booktitle: Installation Proceedings of the International Conference on New Interfaces + for Musical Expression + editor: Kjell Tore Innervik and Ivar Frounberg + month: June + publisher: Norwegian Academy of Music + title: ORFI + url: http://www.nime.org/proceedings/2019/nime2019_music001.pdf + year: 2011 + + +- ENTRYTYPE: inproceedings + ID: nime2008-installations-Farshi2008 + address: 'Genova, Italy' + author: Olly Farshi + bibtex: "@inproceedings{nime2008-installations-Farshi2008,\n address = {Genova,\ + \ Italy},\n author = {Olly Farshi},\n booktitle = {Installation Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n editor\ + \ = {Corrado Canepa},\n month = {June},\n publisher = {Casa Paganini},\n title\ + \ = {Habitat},\n year = {2008}\n}\n" + booktitle: Installation Proceedings of the International Conference on New Interfaces + for Musical Expression + editor: Corrado Canepa + month: June + publisher: Casa Paganini + title: Habitat + year: 2008 + + +- ENTRYTYPE: inproceedings + ID: nime2008-installations-Talman2008 + address: 'Genova, Italy' + author: Jeff Talman + bibtex: "@inproceedings{nime2008-installations-Talman2008,\n address = {Genova,\ + \ Italy},\n author = {Jeff Talman},\n booktitle = {Installation Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n editor\ + \ = {Corrado Canepa},\n month = {June},\n publisher = {Casa Paganini},\n title\ + \ = {MIRROR OF THE MOON},\n url = {www.jefftalman.com},\n year = {2008}\n}\n" + booktitle: Installation Proceedings of the International Conference on New Interfaces + for Musical Expression + editor: Corrado Canepa + month: June + publisher: Casa Paganini + title: MIRROR OF THE MOON + url: www.jefftalman.com + year: 2008 + + +- ENTRYTYPE: inproceedings + ID: nime2008-installations-Paek2008 + address: 'Genova, Italy' + author: JooYoun Paek + bibtex: "@inproceedings{nime2008-installations-Paek2008,\n address = {Genova, Italy},\n\ + \ author = {JooYoun Paek},\n booktitle = {Installation Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Corrado Canepa},\n\ + \ month = {June},\n publisher = {Casa Paganini},\n title = {Fold Loud},\n url\ + \ = {https://shakethatbutton.com/fold-loud/},\n year = {2008}\n}\n" + booktitle: Installation Proceedings of the International Conference on New Interfaces + for Musical Expression + editor: Corrado Canepa + month: June + publisher: Casa Paganini + title: Fold Loud + url: https://shakethatbutton.com/fold-loud/ + year: 2008 + + +- ENTRYTYPE: inproceedings + ID: nime2008-installations-Newby2008 + address: 'Genova, Italy' + author: 'Kenneth Newby, Aleksandra Dulic and Martin Gotfrit' + bibtex: "@inproceedings{nime2008-installations-Newby2008,\n address = {Genova, Italy},\n\ + \ author = {Kenneth Newby, Aleksandra Dulic and Martin Gotfrit},\n booktitle =\ + \ {Installation Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n editor = {Corrado Canepa},\n month = {June},\n publisher\ + \ = {Casa Paganini},\n title = {in a thousand drops... refracted glances},\n url\ + \ = {http://aleksandradulic.net/Projects/1000/1000.html},\n year = {2008}\n}\n" + booktitle: Installation Proceedings of the International Conference on New Interfaces + for Musical Expression + editor: Corrado Canepa + month: June + publisher: Casa Paganini + title: in a thousand drops... refracted glances + url: http://aleksandradulic.net/Projects/1000/1000.html + year: 2008 + + +- ENTRYTYPE: inproceedings + ID: nime2008-installations-Lamenzo2008 + address: 'Genova, Italy' + author: 'Jared Lamenzo, Mohit Santram, Kuan Huan and Maia Marinelli' + bibtex: "@inproceedings{nime2008-installations-Lamenzo2008,\n address = {Genova,\ + \ Italy},\n author = {Jared Lamenzo, Mohit Santram, Kuan Huan and Maia Marinelli},\n\ + \ booktitle = {Installation Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n editor = {Corrado Canepa},\n month = {June},\n\ + \ publisher = {Casa Paganini},\n title = {Soundscaper},\n url = {http://mediatedspaces/lib/mov/soundscaperlow.mov.},\n\ + \ year = {2008}\n}\n" + booktitle: Installation Proceedings of the International Conference on New Interfaces + for Musical Expression + editor: Corrado Canepa + month: June + publisher: Casa Paganini + title: Soundscaper + url: http://mediatedspaces/lib/mov/soundscaperlow.mov. + year: 2008 + + +- ENTRYTYPE: inproceedings + ID: nime2008-installations-Napolitano2008 + address: 'Genova, Italy' + author: 'Pasquale Napolitano, Stefano Perna and Pier Giuseppe Mariconda' + bibtex: "@inproceedings{nime2008-installations-Napolitano2008,\n address = {Genova,\ + \ Italy},\n author = {Pasquale Napolitano, Stefano Perna and Pier Giuseppe Mariconda},\n\ + \ booktitle = {Installation Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n editor = {Corrado Canepa},\n month = {June},\n\ + \ publisher = {Casa Paganini},\n title = {SoundBarrier_},\n year = {2008}\n}\n" + booktitle: Installation Proceedings of the International Conference on New Interfaces + for Musical Expression + editor: Corrado Canepa + month: June + publisher: Casa Paganini + title: SoundBarrier_ + year: 2008 + + +- ENTRYTYPE: inproceedings + ID: nime2008-installations-Majoe2008 + address: 'Genova, Italy' + author: Art Clay & Dennis Majoe + bibtex: "@inproceedings{nime2008-installations-Majoe2008,\n address = {Genova, Italy},\n\ + \ author = {Art Clay & Dennis Majoe},\n booktitle = {Installation Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Corrado Canepa},\n month = {June},\n publisher = {Casa Paganini},\n\ + \ title = {China Gates},\n url = {http://mypage.bluewin.ch/artclay},\n year =\ + \ {2008}\n}\n" + booktitle: Installation Proceedings of the International Conference on New Interfaces + for Musical Expression + editor: Corrado Canepa + month: June + publisher: Casa Paganini + title: China Gates + url: http://mypage.bluewin.ch/artclay + year: 2008 + + - ENTRYTYPE: inproceedings ID: Stearns2009 abstract: 'Artificial Analog Neural Network (AANN) is an interactive, handmade electronic @@ -1025,137 +1159,3 @@ publisher: Carnegie Mellon University title: Sound Lanterns year: 2009 - - -- ENTRYTYPE: inproceedings - ID: nime2008-installations-Farshi2008 - address: 'Genova, Italy' - author: Olly Farshi - bibtex: "@inproceedings{nime2008-installations-Farshi2008,\n address = {Genova,\ - \ Italy},\n author = {Olly Farshi},\n booktitle = {Installation Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n editor\ - \ = {Corrado Canepa},\n month = {June},\n publisher = {Casa Paganini},\n title\ - \ = {Habitat},\n year = {2008}\n}\n" - booktitle: Installation Proceedings of the International Conference on New Interfaces - for Musical Expression - editor: Corrado Canepa - month: June - publisher: Casa Paganini - title: Habitat - year: 2008 - - -- ENTRYTYPE: inproceedings - ID: nime2008-installations-Talman2008 - address: 'Genova, Italy' - author: Jeff Talman - bibtex: "@inproceedings{nime2008-installations-Talman2008,\n address = {Genova,\ - \ Italy},\n author = {Jeff Talman},\n booktitle = {Installation Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n editor\ - \ = {Corrado Canepa},\n month = {June},\n publisher = {Casa Paganini},\n title\ - \ = {MIRROR OF THE MOON},\n url = {www.jefftalman.com},\n year = {2008}\n}\n" - booktitle: Installation Proceedings of the International Conference on New Interfaces - for Musical Expression - editor: Corrado Canepa - month: June - publisher: Casa Paganini - title: MIRROR OF THE MOON - url: www.jefftalman.com - year: 2008 - - -- ENTRYTYPE: inproceedings - ID: nime2008-installations-Paek2008 - address: 'Genova, Italy' - author: JooYoun Paek - bibtex: "@inproceedings{nime2008-installations-Paek2008,\n address = {Genova, Italy},\n\ - \ author = {JooYoun Paek},\n booktitle = {Installation Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Corrado Canepa},\n\ - \ month = {June},\n publisher = {Casa Paganini},\n title = {Fold Loud},\n url\ - \ = {https://shakethatbutton.com/fold-loud/},\n year = {2008}\n}\n" - booktitle: Installation Proceedings of the International Conference on New Interfaces - for Musical Expression - editor: Corrado Canepa - month: June - publisher: Casa Paganini - title: Fold Loud - url: https://shakethatbutton.com/fold-loud/ - year: 2008 - - -- ENTRYTYPE: inproceedings - ID: nime2008-installations-Newby2008 - address: 'Genova, Italy' - author: 'Kenneth Newby, Aleksandra Dulic and Martin Gotfrit' - bibtex: "@inproceedings{nime2008-installations-Newby2008,\n address = {Genova, Italy},\n\ - \ author = {Kenneth Newby, Aleksandra Dulic and Martin Gotfrit},\n booktitle =\ - \ {Installation Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n editor = {Corrado Canepa},\n month = {June},\n publisher\ - \ = {Casa Paganini},\n title = {in a thousand drops... refracted glances},\n url\ - \ = {http://aleksandradulic.net/Projects/1000/1000.html},\n year = {2008}\n}\n" - booktitle: Installation Proceedings of the International Conference on New Interfaces - for Musical Expression - editor: Corrado Canepa - month: June - publisher: Casa Paganini - title: in a thousand drops... refracted glances - url: http://aleksandradulic.net/Projects/1000/1000.html - year: 2008 - - -- ENTRYTYPE: inproceedings - ID: nime2008-installations-Lamenzo2008 - address: 'Genova, Italy' - author: 'Jared Lamenzo, Mohit Santram, Kuan Huan and Maia Marinelli' - bibtex: "@inproceedings{nime2008-installations-Lamenzo2008,\n address = {Genova,\ - \ Italy},\n author = {Jared Lamenzo, Mohit Santram, Kuan Huan and Maia Marinelli},\n\ - \ booktitle = {Installation Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n editor = {Corrado Canepa},\n month = {June},\n\ - \ publisher = {Casa Paganini},\n title = {Soundscaper},\n url = {http://mediatedspaces/lib/mov/soundscaperlow.mov.},\n\ - \ year = {2008}\n}\n" - booktitle: Installation Proceedings of the International Conference on New Interfaces - for Musical Expression - editor: Corrado Canepa - month: June - publisher: Casa Paganini - title: Soundscaper - url: http://mediatedspaces/lib/mov/soundscaperlow.mov. - year: 2008 - - -- ENTRYTYPE: inproceedings - ID: nime2008-installations-Napolitano2008 - address: 'Genova, Italy' - author: 'Pasquale Napolitano, Stefano Perna and Pier Giuseppe Mariconda' - bibtex: "@inproceedings{nime2008-installations-Napolitano2008,\n address = {Genova,\ - \ Italy},\n author = {Pasquale Napolitano, Stefano Perna and Pier Giuseppe Mariconda},\n\ - \ booktitle = {Installation Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n editor = {Corrado Canepa},\n month = {June},\n\ - \ publisher = {Casa Paganini},\n title = {SoundBarrier_},\n year = {2008}\n}\n" - booktitle: Installation Proceedings of the International Conference on New Interfaces - for Musical Expression - editor: Corrado Canepa - month: June - publisher: Casa Paganini - title: SoundBarrier_ - year: 2008 - - -- ENTRYTYPE: inproceedings - ID: nime2008-installations-Majoe2008 - address: 'Genova, Italy' - author: Art Clay & Dennis Majoe - bibtex: "@inproceedings{nime2008-installations-Majoe2008,\n address = {Genova, Italy},\n\ - \ author = {Art Clay & Dennis Majoe},\n booktitle = {Installation Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Corrado Canepa},\n month = {June},\n publisher = {Casa Paganini},\n\ - \ title = {China Gates},\n url = {http://mypage.bluewin.ch/artclay},\n year =\ - \ {2008}\n}\n" - booktitle: Installation Proceedings of the International Conference on New Interfaces - for Musical Expression - editor: Corrado Canepa - month: June - publisher: Casa Paganini - title: China Gates - url: http://mypage.bluewin.ch/artclay - year: 2008 diff --git a/_data/nime_music.yaml b/_data/nime_music.yaml index b3785aeb..b19f77ef 100644 --- a/_data/nime_music.yaml +++ b/_data/nime_music.yaml @@ -1,1426 +1,1849 @@ -- ENTRYTYPE: inproceedings - ID: nime2016-music-Burke2016 - abstract: 'Program notes: Coral Bells explores the diverse overtone, microtone sounds - and origins of the Federation Hand Bells and Bass clarinet into the visual with - discrete sounds of the ecosystems of coral from Fitzroy Island Northern Australia. - This creation brings a new life to the Federation Hand Bells providing deepening - connections with the Australian landscape. It is the conversation of between the - audio and dead coral from that accentuates the audio-visual reflecting both the - translucent Federation Bell sounds, Bass clarinet, glass and dead coral. The acoustic - resonators vibrates with the coral and are recreated into visuals of moving glass - objects. These sounds transform into acousmatic sounds. The colors and texture - within the visuals are layered white/grey, sepia, hints of pastel colours, burnt - reds, yellows and gold images that are layered to create a thick timbral texture - to form the video voice. The sounds of subtle high pitched Bells and gritty sand - sounds with the Bass clarinet periodically joining the drones with discordant - multiphonics and flourishes of notes dominate throughout. Subsequent acoustic - and visual motifs capture and emerge sonically/visually creating timbre layers - of the interpreted coral and glass reflections.' - address: 'Brisbane, Australia' - author: Brigid Burke - bibtex: "@inproceedings{nime2016-music-Burke2016,\n abstract = {Program notes: Coral\ - \ Bells explores the diverse overtone, microtone sounds and origins of the Federation\ - \ Hand Bells and Bass clarinet into the visual with discrete sounds of the ecosystems\ - \ of coral from Fitzroy Island Northern Australia. This creation brings a new\ - \ life to the Federation Hand Bells providing deepening connections with the Australian\ - \ landscape. It is the conversation of between the audio and dead coral from that\ - \ accentuates the audio-visual reflecting both the translucent Federation Bell\ - \ sounds, Bass clarinet, glass and dead coral. The acoustic resonators vibrates\ - \ with the coral and are recreated into visuals of moving glass objects. These\ - \ sounds transform into acousmatic sounds. The colors and texture within the visuals\ - \ are layered white/grey, sepia, hints of pastel colours, burnt reds, yellows\ - \ and gold images that are layered to create a thick timbral texture to form the\ - \ video voice. The sounds of subtle high pitched Bells and gritty sand sounds\ - \ with the Bass clarinet periodically joining the drones with discordant multiphonics\ - \ and flourishes of notes dominate throughout. Subsequent acoustic and visual\ - \ motifs capture and emerge sonically/visually creating timbre layers of the interpreted\ - \ coral and glass reflections.},\n address = {Brisbane, Australia},\n author =\ - \ {Brigid Burke},\n booktitle = {Music Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n editor = {Andrew Brown and Toby\ - \ Gifford},\n month = {June},\n publisher = {Griffith University},\n title = {Coral\ - \ Bells Movt.2},\n year = {2016}\n}\n" +- ENTRYTYPE: article + ID: nime23-music-12 + abstract: 'SPLT/SCRN is a game-piece where two improvisers play against each-other + using their instruments as game controllers. The piece consists of multiple randomized + mini-challenges where the performers need to improvise in order to understand + what musical gestures are required from them through positive feedback from the + screen. The mini-games cover a range of musical affordances, giving the advantage + to both instrumentalists at different times. The instrument signal is analysed + in real-time using machine learning techniques through Max/MSP, and used as control + data for both the progress within the game, as well as the control of the live + electronics. These parameters are then sent through OSC to the game engine Unity + and control the game. In addition, the hybrid system makes use of DMX-controlled + lights, which are also mapped to control data and game levels. On-screen events + are accentuated through lights within the physical space, merging the physical + and the digital.' + articleno: 12 + author: Christos Michalakos + bibtex: "@article{nime23-music-12,\n abstract = {SPLT/SCRN is a game-piece where\ + \ two improvisers play against each-other using their instruments as game controllers.\ + \ The piece consists of multiple randomized mini-challenges where the performers\ + \ need to improvise in order to understand what musical gestures are required\ + \ from them through positive feedback from the screen. The mini-games cover a\ + \ range of musical affordances, giving the advantage to both instrumentalists\ + \ at different times. The instrument signal is analysed in real-time using machine\ + \ learning techniques through Max/MSP, and used as control data for both the progress\ + \ within the game, as well as the control of the live electronics. These parameters\ + \ are then sent through OSC to the game engine Unity and control the game. In\ + \ addition, the hybrid system makes use of DMX-controlled lights, which are also\ + \ mapped to control data and game levels. On-screen events are accentuated through\ + \ lights within the physical space, merging the physical and the digital.},\n\ + \ articleno = {12},\n author = {Christos Michalakos},\n booktitle = {Music Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Rob Hamilton},\n month = {May},\n note = {Live Concert 1, Wednesday\ + \ May 31, Biblioteca Vasconcelos},\n title = {SPLT/SCRN: A Game-Piece for Dueling\ + \ Improvisers},\n url = {https://www.nime.org/proceedings/2023/nime23_music_12.pdf},\n\ + \ urlsuppl1 = {https://www.nime.org/proceedings/2023/nime23_concert_1.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: Coral Bells Movt.2 - year: 2016 + editor: Rob Hamilton + month: May + note: 'Live Concert 1, Wednesday May 31, Biblioteca Vasconcelos' + title: 'SPLT/SCRN: A Game-Piece for Dueling Improvisers' + url: https://www.nime.org/proceedings/2023/nime23_music_12.pdf + urlsuppl1: https://www.nime.org/proceedings/2023/nime23_concert_1.pdf + year: 2023 -- ENTRYTYPE: inproceedings - ID: nime2016-music-Mulder2016 - abstract: 'Program notes: The performance is part of the ongoing research project - into Karlheinz Stockhausen’s historic work Solo (Solo, für Melodie-Instrument - mit Rückkopplung 1965-6). Together with my colleague Dr. Juan Parra Cancino from - ORCIM Ghent we are teasing out the consequences of the (now common) software replacement - of the elaborate tape delay system that was used in the time of the work’s inception.' - address: 'Brisbane, Australia' - author: Johannes Mulder - bibtex: "@inproceedings{nime2016-music-Mulder2016,\n abstract = {Program notes:\ - \ The performance is part of the ongoing research project into Karlheinz Stockhausen’s\ - \ historic work Solo (Solo, für Melodie-Instrument mit Rückkopplung 1965-6). Together\ - \ with my colleague Dr. Juan Parra Cancino from ORCIM Ghent we are teasing out\ - \ the consequences of the (now common) software replacement of the elaborate tape\ - \ delay system that was used in the time of the work’s inception.},\n address\ - \ = {Brisbane, Australia},\n author = {Johannes Mulder},\n booktitle = {Music\ - \ Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Andrew Brown and Toby Gifford},\n month = {June},\n publisher = {Griffith\ - \ University},\n title = {On Solo},\n year = {2016}\n}\n" +- ENTRYTYPE: article + ID: nime23-music-13 + abstract: 'Our idea starts from the necessity to investigate space, explore its + features, find the potential in acoustic properties, and use them as a starting + point for our research. How is it possible to create a three-dimensional and analog + sound system? How are we able to work with instruments that can move sound in + space? Taking advantage of the use of customized industrial items, we will have + the possibility to create three-dimensional audio images controlled and designed + in real-time by the performers. The concept that interests us is the single percussive + impulse as a music creator. We can change the surface, and speed of the execution + but the impulse is at the core of every percussive action. Solenoids are our artistic + medium and the interesting aspect is the relationship between us as human performers + and the possibilities that arise through our interaction with a complex mechanical + instrument. Thus we see in this instrument an extension of our percussive possibilities.' + articleno: 13 + author: Anderson Maq + bibtex: "@article{nime23-music-13,\n abstract = {Our idea starts from the necessity\ + \ to investigate space, explore its features, find the potential in acoustic properties,\ + \ and use them as a starting point for our research. How is it possible to create\ + \ a three-dimensional and analog sound system? How are we able to work with instruments\ + \ that can move sound in space? Taking advantage of the use of customized industrial\ + \ items, we will have the possibility to create three-dimensional audio images\ + \ controlled and designed in real-time by the performers. The concept that interests\ + \ us is the single percussive impulse as a music creator. We can change the surface,\ + \ and speed of the execution but the impulse is at the core of every percussive\ + \ action. Solenoids are our artistic medium and the interesting aspect is the\ + \ relationship between us as human performers and the possibilities that arise\ + \ through our interaction with a complex mechanical instrument. Thus we see in\ + \ this instrument an extension of our percussive possibilities.},\n articleno\ + \ = {13},\n author = {Anderson Maq},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Rob Hamilton},\n\ + \ month = {May},\n note = {Online Presentation},\n title = {(ex)tension by Fabrizio\ + \ di Salvo in collaboration with reConvert},\n url = {https://www.nime.org/proceedings/2023/nime23_music_13.pdf},\n\ + \ urlsuppl1 = {https://www.nime2023.org/program/online-in-person-concerts},\n\ + \ year = {2023}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: On Solo - year: 2016 + editor: Rob Hamilton + month: May + note: Online Presentation + title: (ex)tension by Fabrizio di Salvo in collaboration with reConvert + url: https://www.nime.org/proceedings/2023/nime23_music_13.pdf + urlsuppl1: https://www.nime2023.org/program/online-in-person-concerts + year: 2023 -- ENTRYTYPE: inproceedings - ID: nime2016-music-Gillies2016 - abstract: "Program notes: Working almost exclusively at a very soft volume, Shelter\ - \ inverts the relationships between the source sound material and it’s experience\ - \ in the real world, placing very large sounds (sourced from field recordings)\ - \ at the threshold of audibility while audio artifacts are brought to the forefront\ - \ of our focus to act as recognisable musical material. By utilising a soft dynamic,\ - \ all audience members are able to hear each channel more equally, regardless\ - \ of their position in the performance space. This new version for bass clarinet,\ - \ electric guitar, and electronics expands the original electronic composition\ - \ into something more lively and environmentally focused. The compositional intentions\ - \ of the original Shelter remain at play here - this version still seeks to address\ - \ the assumptions of multichannel listening, while affecting an environment of\ - \ sound in preference to an experience of sound. However, this electroacoustic\ - \ version adds a little bit of much needed chaos, allowing performers to interact\ - \ and manipulate this sonic environment.\n\nAbout the performers:\n\nCat Hope\ - \ - Bass Flute\nLindsay Vickery - Bass Clarinet\nAaron Wyatt - Viola" - address: 'Brisbane, Australia' - author: Sam Gillies - bibtex: "@inproceedings{nime2016-music-Gillies2016,\n abstract = {Program notes:\ - \ Working almost exclusively at a very soft volume, Shelter inverts the relationships\ - \ between the source sound material and it’s experience in the real world, placing\ - \ very large sounds (sourced from field recordings) at the threshold of audibility\ - \ while audio artifacts are brought to the forefront of our focus to act as recognisable\ - \ musical material. By utilising a soft dynamic, all audience members are able\ - \ to hear each channel more equally, regardless of their position in the performance\ - \ space. This new version for bass clarinet, electric guitar, and electronics\ - \ expands the original electronic composition into something more lively and environmentally\ - \ focused. The compositional intentions of the original Shelter remain at play\ - \ here - this version still seeks to address the assumptions of multichannel listening,\ - \ while affecting an environment of sound in preference to an experience of sound.\ - \ However, this electroacoustic version adds a little bit of much needed chaos,\ - \ allowing performers to interact and manipulate this sonic environment.\n\nAbout\ - \ the performers:\n\nCat Hope - Bass Flute\nLindsay Vickery - Bass Clarinet\n\ - Aaron Wyatt - Viola},\n address = {Brisbane, Australia},\n author = {Sam Gillies},\n\ - \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n editor = {Andrew Brown and Toby Gifford},\n month\ - \ = {June},\n publisher = {Griffith University},\n title = {Shelter},\n year =\ - \ {2016}\n}\n" +- ENTRYTYPE: article + ID: nime23-music-19 + abstract: 'Elegy (Ready, Set, Rapture) is the second work composed for Coretet, + a virtual reality musical instrument modeled after traditional bowed stringed + instruments including the violin, viola, cello and double bass. Elegy (Ready, + Set, Rapture) is a solo multi-channel performance for the Coretet double bass + that combines a pre-composed musical chord structure displayed on the neck of + the instrument in real-time with improvisation. Coretet is built using the Unreal + Engine and is performed using the Oculus Rift or Quest 2 head-mounted displays + and Oculus Touch controllers. All audio in Coretet is procedurally generated, + using physical models of a bowed string from the Synthesis Toolkit (STK) and a + waveguide plucked string, all running within Pure Data.' + articleno: 19 + author: Rob Hamilton + bibtex: "@article{nime23-music-19,\n abstract = {Elegy (Ready, Set, Rapture) is\ + \ the second work composed for Coretet, a virtual reality musical instrument modeled\ + \ after traditional bowed stringed instruments including the violin, viola, cello\ + \ and double bass. Elegy (Ready, Set, Rapture) is a solo multi-channel performance\ + \ for the Coretet double bass that combines a pre-composed musical chord structure\ + \ displayed on the neck of the instrument in real-time with improvisation. Coretet\ + \ is built using the Unreal Engine and is performed using the Oculus Rift or Quest\ + \ 2 head-mounted displays and Oculus Touch controllers. All audio in Coretet is\ + \ procedurally generated, using physical models of a bowed string from the Synthesis\ + \ Toolkit (STK) and a waveguide plucked string, all running within Pure Data.},\n\ + \ articleno = {19},\n author = {Rob Hamilton},\n booktitle = {Music Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Rob Hamilton},\n month = {May},\n note = {Live Concert 5, Friday June\ + \ 2, Centro de Cultura Digital},\n title = {Elegy (Ready, Set, Rapture)},\n url\ + \ = {https://www.nime.org/proceedings/2023/nime23_music_19.pdf},\n urlsuppl1 =\ + \ {https://www.nime.org/proceedings/2023/nime23_concert_5.pdf},\n year = {2023}\n\ + }\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: Shelter - year: 2016 + editor: Rob Hamilton + month: May + note: 'Live Concert 5, Friday June 2, Centro de Cultura Digital' + title: 'Elegy (Ready, Set, Rapture)' + url: https://www.nime.org/proceedings/2023/nime23_music_19.pdf + urlsuppl1: https://www.nime.org/proceedings/2023/nime23_concert_5.pdf + year: 2023 -- ENTRYTYPE: inproceedings - ID: nime2016-music-Barclay2016 - abstract: 'Program notes: Ground Interference draws on short recordings from each - location I visited in spring 2014 with a particular focus on Joshua Tree National - Park, Jornada Biosphere Reserve, Mojave Desert, and Death Valley National Park. - These fragile desert environments are inhabited by thousands of species all part - of a delicate ecosystem that is in a state of flux induced by changing climates. - The transfixing acoustic ecologies of the southwest deserts demand a stillness - that encourages a deeper environmental awareness and engagement. In many instances - during our field trip we struggled to find locations without human interference. - The distant hum of highway traffic and relentless airplanes under the flight path - from LAX were expected, yet we also encountered unexpected sounds interfering - with the acoustic ecologies of the land. These range from an obscure reverberating - vending machine in Death Valley National Park to rattling power lines in the Jornada - Biosphere Reserve that were so loud I could feel the vibrations through my feet.' - address: 'Brisbane, Australia' - author: Leah Barclay - bibtex: "@inproceedings{nime2016-music-Barclay2016,\n abstract = {Program notes:\ - \ Ground Interference draws on short recordings from each location I visited in\ - \ spring 2014 with a particular focus on Joshua Tree National Park, Jornada Biosphere\ - \ Reserve, Mojave Desert, and Death Valley National Park. These fragile desert\ - \ environments are inhabited by thousands of species all part of a delicate ecosystem\ - \ that is in a state of flux induced by changing climates. The transfixing acoustic\ - \ ecologies of the southwest deserts demand a stillness that encourages a deeper\ - \ environmental awareness and engagement. In many instances during our field trip\ - \ we struggled to find locations without human interference. The distant hum of\ - \ highway traffic and relentless airplanes under the flight path from LAX were\ - \ expected, yet we also encountered unexpected sounds interfering with the acoustic\ - \ ecologies of the land. These range from an obscure reverberating vending machine\ - \ in Death Valley National Park to rattling power lines in the Jornada Biosphere\ - \ Reserve that were so loud I could feel the vibrations through my feet.},\n address\ - \ = {Brisbane, Australia},\n author = {Leah Barclay},\n booktitle = {Music Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Andrew Brown and Toby Gifford},\n month = {June},\n publisher = {Griffith\ - \ University},\n title = {Ground Interference - The Listen(n) Project},\n year\ - \ = {2016}\n}\n" +- ENTRYTYPE: article + ID: nime23-music-26 + abstract: 'Born from the will to offer a unique live experience, ALEA(s) delivers + boiling, improvised performances mixing live drawing, video animation and electronic + music. Surrounded by their audience, the three members are busy creating their + show, without any safety net. While the complex, loaded electronic music fills + the room, the illustrator’s physical implication in his drawings and the hypnotic + animations projected onto the big screen unite to finish this well-rounded show. ALEA(s) + performances are often described as immersive, intense and crafted.' + articleno: 26 + author: Boris Wilmot + bibtex: "@article{nime23-music-26,\n abstract = {Born from the will to offer a unique\ + \ live experience, ALEA(s) delivers boiling, improvised performances mixing live\ + \ drawing, video animation and electronic music. Surrounded by their audience,\ + \ the three members are busy creating their show, without any safety net. While\ + \ the complex, loaded electronic music fills the room, the illustrator’s physical\ + \ implication in his drawings and the hypnotic animations projected onto the big\ + \ screen unite to finish this well-rounded show. ALEA(s) performances are often\ + \ described as immersive, intense and crafted.},\n articleno = {26},\n author\ + \ = {Boris Wilmot},\n booktitle = {Music Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n editor = {Rob Hamilton},\n month\ + \ = {May},\n note = {Online Presentation},\n title = {ALEA(s)},\n url = {https://www.nime.org/proceedings/2023/nime23_music_26.pdf},\n\ + \ urlsuppl1 = {https://www.nime2023.org/program/online-in-person-concerts},\n\ + \ year = {2023}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: Ground Interference - The Listen(n) Project - year: 2016 + editor: Rob Hamilton + month: May + note: Online Presentation + title: ALEA(s) + url: https://www.nime.org/proceedings/2023/nime23_music_26.pdf + urlsuppl1: https://www.nime2023.org/program/online-in-person-concerts + year: 2023 -- ENTRYTYPE: inproceedings - ID: nime2016-music-Paine2016 - abstract: 'Program notes: Becoming Desert draws on the experience of sitting or - lying down silent in the desert for several hours at a time to make sound recordings. - The field recordings I made in four deserts of the American Southwest are the - basis of this work. When listening to the desert sounds through headphones at - the time of recording, one is aware of a kind of hyper-real sonic environment. - The amplified soundfield in the headphones is surreal in its presence and accuracy - and multiplies my direct experience of listening many times.' - address: 'Brisbane, Australia' - author: Garth Paine - bibtex: "@inproceedings{nime2016-music-Paine2016,\n abstract = {Program notes: Becoming\ - \ Desert draws on the experience of sitting or lying down silent in the desert\ - \ for several hours at a time to make sound recordings. The field recordings I\ - \ made in four deserts of the American Southwest are the basis of this work. When\ - \ listening to the desert sounds through headphones at the time of recording,\ - \ one is aware of a kind of hyper-real sonic environment. The amplified soundfield\ - \ in the headphones is surreal in its presence and accuracy and multiplies my\ - \ direct experience of listening many times.},\n address = {Brisbane, Australia},\n\ - \ author = {Garth Paine},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ - \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ - \ title = {Becoming Desert - The Listen(n) Project},\n year = {2016}\n}\n" +- ENTRYTYPE: article + ID: nime23-music-28 + abstract: 'The Center of the Universe was inspired by my impression of New York + City after several trips to this world center. When I stood at the top of the + Empire State Building, I felt that it absorbed the energy of the entire universe. + People with different backgrounds travel to New York from all over the world, + creating a colorful and spectacular city. The primary material in this work is + the text “The Center of the Universe.” This text is stated and manipulated in + various languages, including English, Spanish, French, German, Italian, Russian, + Chinese, Japanese, Korean, and Thai. All the human voices come from the sampled + AI voices of the MacOS system. Two Bluetooth Nintendo Wiimote Controllers provide + the capability to stand untethered at center stage and play this composition.' + articleno: 28 + author: Sunhuimei Xia + bibtex: "@article{nime23-music-28,\n abstract = {The Center of the Universe was\ + \ inspired by my impression of New York City after several trips to this world\ + \ center. When I stood at the top of the Empire State Building, I felt that it\ + \ absorbed the energy of the entire universe. People with different backgrounds\ + \ travel to New York from all over the world, creating a colorful and spectacular\ + \ city. The primary material in this work is the text “The Center of the Universe.”\ + \ This text is stated and manipulated in various languages, including English,\ + \ Spanish, French, German, Italian, Russian, Chinese, Japanese, Korean, and Thai.\ + \ All the human voices come from the sampled AI voices of the MacOS system. Two\ + \ Bluetooth Nintendo Wiimote Controllers provide the capability to stand untethered\ + \ at center stage and play this composition.},\n articleno = {28},\n author =\ + \ {Sunhuimei Xia},\n booktitle = {Music Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n editor = {Rob Hamilton},\n month\ + \ = {May},\n note = {Online Presentation},\n title = {The Center of the Universe},\n\ + \ url = {https://www.nime.org/proceedings/2023/nime23_music_28.pdf},\n urlsuppl1\ + \ = {https://www.nime2023.org/program/online-in-person-concerts},\n year = {2023}\n\ + }\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: Becoming Desert - The Listen(n) Project - year: 2016 + editor: Rob Hamilton + month: May + note: Online Presentation + title: The Center of the Universe + url: https://www.nime.org/proceedings/2023/nime23_music_28.pdf + urlsuppl1: https://www.nime2023.org/program/online-in-person-concerts + year: 2023 -- ENTRYTYPE: inproceedings - ID: nime2016-music-Vickery2016 - abstract: "Program notes: Nature Forms II is an eco-structuralist work, maintaining\ - \ what Opie and Brown term the “primary rules” of “environmentally-based musical\ - \ composition”: that “structures must be derived from natural sound sources” and\ - \ that “structural data must remain in series”. Nature Forms II explores the possibility\ - \ of recursive re-interrogation of a field recording through visualization and\ - \ resonification/resynthesis via machine and performative means. The source field\ - \ recording is contrasted with artificially generated versions created with additive,\ - \ subtractive and ring modulation resynthesis. Interaction between the live performers\ - \ and the electronic components are explores through “spectral freezing” of components\ - \ of the field recording to create spectrally derived chords from features of\ - \ the recording bird sounds and a rusty gate which are then transcribed into notation\ - \ for the instrumentalists and temporal manipulation of the recording to allow\ - \ complex bird calls to be emulated in a human time-scale.\n\nCat Hope - Bass\ - \ Flute\nLindsay Vickery - Clarinet\nAaron Wyatt - Viola\nVanessa Tomlinson -\ - \ Percussion" - address: 'Brisbane, Australia' - author: Lindsay Vickery - bibtex: "@inproceedings{nime2016-music-Vickery2016,\n abstract = {Program notes:\ - \ Nature Forms II is an eco-structuralist work, maintaining what Opie and Brown\ - \ term the “primary rules” of “environmentally-based musical composition”: that\ - \ “structures must be derived from natural sound sources” and that “structural\ - \ data must remain in series”. Nature Forms II explores the possibility of recursive\ - \ re-interrogation of a field recording through visualization and resonification/resynthesis\ - \ via machine and performative means. The source field recording is contrasted\ - \ with artificially generated versions created with additive, subtractive and\ - \ ring modulation resynthesis. Interaction between the live performers and the\ - \ electronic components are explores through “spectral freezing” of components\ - \ of the field recording to create spectrally derived chords from features of\ - \ the recording bird sounds and a rusty gate which are then transcribed into notation\ - \ for the instrumentalists and temporal manipulation of the recording to allow\ - \ complex bird calls to be emulated in a human time-scale.\n\nCat Hope - Bass\ - \ Flute\nLindsay Vickery - Clarinet\nAaron Wyatt - Viola\nVanessa Tomlinson -\ - \ Percussion},\n address = {Brisbane, Australia},\n author = {Lindsay Vickery},\n\ - \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n editor = {Andrew Brown and Toby Gifford},\n month\ - \ = {June},\n publisher = {Griffith University},\n title = {Nature Forms II for\ - \ Flute, Clarinet, Viola, Percussion, Hybrid Field Recording and Electronics},\n\ - \ year = {2016}\n}\n" +- ENTRYTYPE: article + ID: nime23-music-29 + abstract: '“Chomsky Hash” is a piece for improvisation, electric guitar, and live + electronics. The piece utilizes traditional guitar effects processing with a variety + of unconventional effects for the instrument, along with a surround panner setup + for quadraphonic sound. The laptop and electronic elements also act as improvising + agent, with a variety of chance operations that allow the computer to make decisions + for itself in performance. The title is a reference to the famous debate between + Noam Chomsky and Michel Foucault. Famously, Foucault asked to be paid in a large + amount of hash for his participation in the debate. Friends would say that on + special occasions Foucault would break out “that Chomsky Hash”. The relevance + of this debate to the piece is the elements I’m working with and transforming. + The electric guitar itself has a long history in American popular music and has + a lot of specific cultural connotations that could seem traditional even though + at times it’s been a counter cultural symbol. With the use of DAW’s such as Ableton + Live or Max/MSP, the electric guitar can be further altered and expanded upon. + Noam Chomsky is considered a radical and countercultural figure in American politics, + but within the debate with Michel Foucault comes off as traditional and conservative + compared to Foucault’s Dionysian and hedonistic character traits. The debate itself + is an interesting synthesis of the two thinkers'' ideas. The main driving factors + of the piece are improvisation, timbral transformation, live electronics processing, + and spatialization. Since 2019, I’ve been working on bringing together my instrumental + background as a guitarist and improviser with my interest in electronic music. + This piece is a part of a series of pieces for electric guitar & live electronics.' + articleno: 29 + author: Seth A Davis + bibtex: "@article{nime23-music-29,\n abstract = {“Chomsky Hash” is a piece for improvisation,\ + \ electric guitar, and live electronics. The piece utilizes traditional guitar\ + \ effects processing with a variety of unconventional effects for the instrument,\ + \ along with a surround panner setup for quadraphonic sound. The laptop and electronic\ + \ elements also act as improvising agent, with a variety of chance operations\ + \ that allow the computer to make decisions for itself in performance. The title\ + \ is a reference to the famous debate between Noam Chomsky and Michel Foucault.\ + \ Famously, Foucault asked to be paid in a large amount of hash for his participation\ + \ in the debate. Friends would say that on special occasions Foucault would break\ + \ out “that Chomsky Hash”. The relevance of this debate to the piece is the elements\ + \ I’m working with and transforming. The electric guitar itself has a long history\ + \ in American popular music and has a lot of specific cultural connotations that\ + \ could seem traditional even though at times it’s been a counter cultural symbol.\ + \ With the use of DAW’s such as Ableton Live or Max/MSP, the electric guitar can\ + \ be further altered and expanded upon. Noam Chomsky is considered a radical and\ + \ countercultural figure in American politics, but within the debate with Michel\ + \ Foucault comes off as traditional and conservative compared to Foucault’s Dionysian\ + \ and hedonistic character traits. The debate itself is an interesting synthesis\ + \ of the two thinkers' ideas. The main driving factors of the piece are improvisation,\ + \ timbral transformation, live electronics processing, and spatialization. Since\ + \ 2019, I’ve been working on bringing together my instrumental background as a\ + \ guitarist and improviser with my interest in electronic music. This piece is\ + \ a part of a series of pieces for electric guitar \\& live electronics.},\n articleno\ + \ = {29},\n author = {Seth A Davis},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Rob Hamilton},\n\ + \ month = {May},\n note = {Online Presentation},\n title = {“Chomsky Hash” for\ + \ improvisation, electric guitar, and live electronics},\n url = {https://www.nime.org/proceedings/2023/nime23_music_29.pdf},\n\ + \ urlsuppl1 = {https://www.nime2023.org/program/online-in-person-concerts},\n\ + \ year = {2023}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: 'Nature Forms II for Flute, Clarinet, Viola, Percussion, Hybrid Field Recording - and Electronics' - year: 2016 + editor: Rob Hamilton + month: May + note: Online Presentation + title: '“Chomsky Hash” for improvisation, electric guitar, and live electronics' + url: https://www.nime.org/proceedings/2023/nime23_music_29.pdf + urlsuppl1: https://www.nime2023.org/program/online-in-person-concerts + year: 2023 -- ENTRYTYPE: inproceedings - ID: nime2016-music-Moore2016 - abstract: 'Program notes: Basaur is a structured improvisation for software, microphones, - and objects, performed through a multichannel sound system. Using simple, readymade - household devices as the primary sound source, Basaur unfolds as a guided exploration - of the small mechanical drones and noises that occupy the edges of our quotidian - sonic awareness. Using both pre-recorded and live-performed sound sources, textures - are layered and connected, building to a richly detailed environment of active - sounds -- background becomes foreground, and the everyday annoyances of modern - convenience take on a full-throated presence that is by turns lyrical and menacing.' - address: 'Brisbane, Australia' - author: Stephan Moore - bibtex: "@inproceedings{nime2016-music-Moore2016,\n abstract = {Program notes: Basaur\ - \ is a structured improvisation for software, microphones, and objects, performed\ - \ through a multichannel sound system. Using simple, readymade household devices\ - \ as the primary sound source, Basaur unfolds as a guided exploration of the small\ - \ mechanical drones and noises that occupy the edges of our quotidian sonic awareness.\ - \ Using both pre-recorded and live-performed sound sources, textures are layered\ - \ and connected, building to a richly detailed environment of active sounds --\ - \ background becomes foreground, and the everyday annoyances of modern convenience\ - \ take on a full-throated presence that is by turns lyrical and menacing.},\n\ - \ address = {Brisbane, Australia},\n author = {Stephan Moore},\n booktitle = {Music\ - \ Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Andrew Brown and Toby Gifford},\n month = {June},\n publisher = {Griffith\ - \ University},\n title = {Basaur},\n year = {2016}\n}\n" +- ENTRYTYPE: article + ID: nime23-music-36 + abstract: 'Galactic Madness is a structured improvisational network piece inspired + by a set of pictures of the galaxy taken by NASA''s James Webb Space Telescope(released + in June 2022). After closely observing the pictures for hours, I wanted to create + a mesmerizing system that resembles the infinite and enigmatic nature of the galaxy.' + articleno: 36 + author: Qiujiang Lu + bibtex: "@article{nime23-music-36,\n abstract = {Galactic Madness is a structured\ + \ improvisational network piece inspired by a set of pictures of the galaxy taken\ + \ by NASA's James Webb Space Telescope(released in June 2022). After closely observing\ + \ the pictures for hours, I wanted to create a mesmerizing system that resembles\ + \ the infinite and enigmatic nature of the galaxy.},\n articleno = {36},\n author\ + \ = {Qiujiang Lu},\n booktitle = {Music Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n editor = {Rob Hamilton},\n month\ + \ = {May},\n note = {Online Presentation},\n title = {Galactic Madness},\n url\ + \ = {https://www.nime.org/proceedings/2023/nime23_music_36.pdf},\n urlsuppl1 =\ + \ {https://www.nime2023.org/program/online-in-person-concerts},\n year = {2023}\n\ + }\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: Basaur - year: 2016 - - -- ENTRYTYPE: inproceedings - ID: nime2016-music-Bennett2016 - abstract: "Program notes: Three short pieces for flute and micro-bats (world premiere).\n\ - \nThis work uses data collected by Australian environmental scientist, Dr. Lindy\ - \ Lumsden, in her research of native Australian micro bats. It uses data from\ - \ bat-detecting devices: ultrasonic recording devices that recognize bat calls\ - \ and transpose them down to the human hearing range. The data is analysed in\ - \ the form of a spectrogram, and each species of bat is discerned by the shape\ - \ and range of the calls. This piece uses the pitch and rhythm of bat calls as\ - \ source material for the structure of each movement, and also uses the transposed\ - \ calls throughout. The recordings are triggered at certain frequencies and dynamics\ - \ of the flute via Max MSP, setting bats flying across the room (in 4 channels).\ - \ The flute mimics different types of bat calls, triggering and reacting to the\ - \ recordings and using its inherent flexibility to create a different voice in\ - \ each register.\n\nI. Victoria Circa 5.' There are 21 species of native bats\ - \ in Victoria, all with unique calls above human hearing range. Like birds, these\ - \ calls occur in different frequency levels so that different species of bat may\ - \ co-exist without disturbing each other. A bat’s call bounces off the objects\ - \ around it allowing it to ‘see’ at night, creating a beautiful cacophony that\ - \ no one ever notices.\n\nII. Melbourne Circa 5.' Did you think that bats only\ - \ live in the bush? 17 of the 21 species of bats in Victoria can be found in metropolitan\ - \ Melbourne, roosting in the hollows of our 100+-year-old trees. These fascinating\ - \ creatures go largely unnoticed by all except the odd cat due to their size (most\ - \ adult micro bats fit into a matchbox), speed, and auditory range (only a few\ - \ species can be heard by humans, including the White-striped Freetail Bat). These\ - \ bats are insectivorous and without them we’d be inundated with mosquitos and\ - \ bugs.\n\nIII. Southern Bent-Wing Bat Circa 6.' Very little is known about this\ - \ curious endangered species other than its secretive breeding place in a cave\ - \ somewhere in South-West Victoria. These bats can be found all over Victoria,\ - \ but unlike any other species of bat, they travel hundreds of miles to breed\ - \ in one place. No one knows how the young bats know where to go, without flying\ - \ in flocks like birds there’s no way for them to follow each other, so how do\ - \ they know where to go? This is one of the questions that Dr. Lindy Lumsden hopes\ - \ to answer in her research." - address: 'Brisbane, Australia' - author: Alice Bennett - bibtex: "@inproceedings{nime2016-music-Bennett2016,\n abstract = {Program notes:\ - \ Three short pieces for flute and micro-bats (world premiere).\n\nThis work uses\ - \ data collected by Australian environmental scientist, Dr. Lindy Lumsden, in\ - \ her research of native Australian micro bats. It uses data from bat-detecting\ - \ devices: ultrasonic recording devices that recognize bat calls and transpose\ - \ them down to the human hearing range. The data is analysed in the form of a\ - \ spectrogram, and each species of bat is discerned by the shape and range of\ - \ the calls. This piece uses the pitch and rhythm of bat calls as source material\ - \ for the structure of each movement, and also uses the transposed calls throughout.\ - \ The recordings are triggered at certain frequencies and dynamics of the flute\ - \ via Max MSP, setting bats flying across the room (in 4 channels). The flute\ - \ mimics different types of bat calls, triggering and reacting to the recordings\ - \ and using its inherent flexibility to create a different voice in each register.\n\ - \nI. Victoria Circa 5.' There are 21 species of native bats in Victoria, all with\ - \ unique calls above human hearing range. Like birds, these calls occur in different\ - \ frequency levels so that different species of bat may co-exist without disturbing\ - \ each other. A bat’s call bounces off the objects around it allowing it to ‘see’\ - \ at night, creating a beautiful cacophony that no one ever notices.\n\nII. Melbourne\ - \ Circa 5.' Did you think that bats only live in the bush? 17 of the 21 species\ - \ of bats in Victoria can be found in metropolitan Melbourne, roosting in the\ - \ hollows of our 100+-year-old trees. These fascinating creatures go largely unnoticed\ - \ by all except the odd cat due to their size (most adult micro bats fit into\ - \ a matchbox), speed, and auditory range (only a few species can be heard by humans,\ - \ including the White-striped Freetail Bat). These bats are insectivorous and\ - \ without them we’d be inundated with mosquitos and bugs.\n\nIII. Southern Bent-Wing\ - \ Bat Circa 6.' Very little is known about this curious endangered species other\ - \ than its secretive breeding place in a cave somewhere in South-West Victoria.\ - \ These bats can be found all over Victoria, but unlike any other species of bat,\ - \ they travel hundreds of miles to breed in one place. No one knows how the young\ - \ bats know where to go, without flying in flocks like birds there’s no way for\ - \ them to follow each other, so how do they know where to go? This is one of the\ - \ questions that Dr. Lindy Lumsden hopes to answer in her research.},\n address\ - \ = {Brisbane, Australia},\n author = {Alice Bennett},\n booktitle = {Music Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Andrew Brown and Toby Gifford},\n month = {June},\n publisher = {Griffith\ - \ University},\n title = {Echolocation Suite},\n year = {2016}\n}\n" - booktitle: Music Proceedings of the International Conference on New Interfaces for - Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: Echolocation Suite - year: 2016 + editor: Rob Hamilton + month: May + note: Online Presentation + title: Galactic Madness + url: https://www.nime.org/proceedings/2023/nime23_music_36.pdf + urlsuppl1: https://www.nime2023.org/program/online-in-person-concerts + year: 2023 -- ENTRYTYPE: inproceedings - ID: nime2016-music-OBrien2016 - abstract: 'Program notes: "along the eaves" is part of a series that focuses on - my interest in translational procedures and machine listening. It takes its name - from the following line in Franz Kafka’s “A Crossbreed [A Sport]” (1931, trans. - 1933): “On the moonlight nights its favourite promenade is along the eaves.” To - compose the work, I developed custom software written in the programming languages - of C and SuperCollider. I used these programs in different ways to process and - sequence my source materials, which, in this case, included audio recordings of - water, babies, and string instruments. Like other works in the series, I am interested - in fabricating sonic regions of coincidence, where my coordinated mix of carefully - selected sounds suggests relationships between the sounds and the illusions they - foster.' - address: 'Brisbane, Australia' - author: Benjamin O'Brien - bibtex: "@inproceedings{nime2016-music-OBrien2016,\n abstract = {Program notes:\ - \ \"along the eaves\" is part of a series that focuses on my interest in translational\ - \ procedures and machine listening. It takes its name from the following line\ - \ in Franz Kafka’s “A Crossbreed [A Sport]” (1931, trans. 1933): “On the moonlight\ - \ nights its favourite promenade is along the eaves.” To compose the work, I developed\ - \ custom software written in the programming languages of C and SuperCollider.\ - \ I used these programs in different ways to process and sequence my source materials,\ - \ which, in this case, included audio recordings of water, babies, and string\ - \ instruments. Like other works in the series, I am interested in fabricating\ - \ sonic regions of coincidence, where my coordinated mix of carefully selected\ - \ sounds suggests relationships between the sounds and the illusions they foster.},\n\ - \ address = {Brisbane, Australia},\n author = {Benjamin O'Brien},\n booktitle\ - \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n editor = {Andrew Brown and Toby Gifford},\n month = {June},\n\ - \ publisher = {Griffith University},\n title = {Along the Eaves},\n year = {2016}\n\ - }\n" +- ENTRYTYPE: article + ID: nime23-music-52 + abstract: '“Refraction Interlude” features a solo performer surrounded by a battery + of gongs and cymbals that are activated by surfaces transducers. The metal percussion + responds to the performer’s improvisation, seeming to sound autonomously. The + work can be performed by any instrument. Each new performer records a set of samples, + short improvisations centered around a specified set of techniques. These recordings + are then analyzed and used to as a foundation for forms of mixed synthesis, generating + sounds that are tailored to the specific acoustical properties of the metal percussion. + This iteration of the work is a new realization for piano.' + articleno: 52 + author: Matthew Goodheart + bibtex: "@article{nime23-music-52,\n abstract = {“Refraction Interlude” features\ + \ a solo performer surrounded by a battery of gongs and cymbals that are activated\ + \ by surfaces transducers. The metal percussion responds to the performer’s improvisation,\ + \ seeming to sound autonomously. The work can be performed by any instrument.\ + \ Each new performer records a set of samples, short improvisations centered around\ + \ a specified set of techniques. These recordings are then analyzed and used to\ + \ as a foundation for forms of mixed synthesis, generating sounds that are tailored\ + \ to the specific acoustical properties of the metal percussion. This iteration\ + \ of the work is a new realization for piano.},\n articleno = {52},\n author =\ + \ {Matthew Goodheart},\n booktitle = {Music Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n editor = {Rob Hamilton},\n month\ + \ = {May},\n note = {Live Concert 5, Friday June 2, Centro de Cultura Digital},\n\ + \ title = {Refraction Interlude: piano},\n url = {https://www.nime.org/proceedings/2023/nime23_music_52.pdf},\n\ + \ urlsuppl1 = {https://www.nime.org/proceedings/2023/nime23_concert_5.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: Along the Eaves - year: 2016 + editor: Rob Hamilton + month: May + note: 'Live Concert 5, Friday June 2, Centro de Cultura Digital' + title: 'Refraction Interlude: piano' + url: https://www.nime.org/proceedings/2023/nime23_music_52.pdf + urlsuppl1: https://www.nime.org/proceedings/2023/nime23_concert_5.pdf + year: 2023 -- ENTRYTYPE: inproceedings - ID: nime2016-music-Burraston2016 - abstract: 'Program notes: Rainwire encompasses the investigation of rainfall & its - application as a medium for artistic, cultural & scientific exchange. The Rainwire - project includes development of a prototype Acoustic Rain Gauge using suspended - cables (long wire instruments), and subsequently expanded through various collaborations - in a range of creative & environmental contexts. Rainwire is an experimental approach - at technological appropriation of agricultural based objects for art and science, - with particular emphasis on climate change issues and agriculture. This performance - will present a live laptop mix of environmental sonification recordings from the - newly built Rainwire prototype. Previous work on Rainwire has been conducted on - shared instruments, this performance will be an opportunity to present the newly - built dedicated Rainwire prototype in public for the first time in Australia. - Long-wire instruments are made from spans of fencing wire across the open landscape. - Rainwire developed from using contact mic recordings of rainfall ‘playing’ the - long wire instruments for my music compositions. This enabled a proof of concept - study to the extent that the audio recordings demonstrate a wide variety of temporal - & spatial rain event complexity. This suggests that environmental sonification - has great potential to measure rainfall accurately, & address recognized shortcomings - of existing equipment & approaches in meteorology. Rain induced sounds with long - wire instruments have a wide range of unique, audibly recognisable features. All - of these sonic features exhibit dynamic volume & tonal characteristics, depending - on the rain type & environmental conditions. Aside from the vast array of creative - possibilities, the high spatial, temporal, volume & tonal resolution could provide - significant advancement to knowledge of rainfall event profiles, intensity & microstructure. - The challenge lies in identifying distinctive sound patterns & relating them to - particular types of rainfall events. Rainwire is beyond simple sonification of - data, it embeds technology & data collection within cultural contexts. With rainfall - as catalyst to draw inspiration from, artists, scientists & cultural groups are - key to informing science & incite new creative modalities.' - address: 'Brisbane, Australia' - author: David Burraston - bibtex: "@inproceedings{nime2016-music-Burraston2016,\n abstract = {Program notes:\ - \ Rainwire encompasses the investigation of rainfall & its application as a medium\ - \ for artistic, cultural & scientific exchange. The Rainwire project includes\ - \ development of a prototype Acoustic Rain Gauge using suspended cables (long\ - \ wire instruments), and subsequently expanded through various collaborations\ - \ in a range of creative & environmental contexts. Rainwire is an experimental\ - \ approach at technological appropriation of agricultural based objects for art\ - \ and science, with particular emphasis on climate change issues and agriculture.\ - \ This performance will present a live laptop mix of environmental sonification\ - \ recordings from the newly built Rainwire prototype. Previous work on Rainwire\ - \ has been conducted on shared instruments, this performance will be an opportunity\ - \ to present the newly built dedicated Rainwire prototype in public for the first\ - \ time in Australia. Long-wire instruments are made from spans of fencing wire\ - \ across the open landscape. Rainwire developed from using contact mic recordings\ - \ of rainfall ‘playing’ the long wire instruments for my music compositions. This\ - \ enabled a proof of concept study to the extent that the audio recordings demonstrate\ - \ a wide variety of temporal & spatial rain event complexity. This suggests that\ - \ environmental sonification has great potential to measure rainfall accurately,\ - \ & address recognized shortcomings of existing equipment & approaches in meteorology.\ - \ Rain induced sounds with long wire instruments have a wide range of unique,\ - \ audibly recognisable features. All of these sonic features exhibit dynamic volume\ - \ & tonal characteristics, depending on the rain type & environmental conditions.\ - \ Aside from the vast array of creative possibilities, the high spatial, temporal,\ - \ volume & tonal resolution could provide significant advancement to knowledge\ - \ of rainfall event profiles, intensity & microstructure. The challenge lies in\ - \ identifying distinctive sound patterns & relating them to particular types of\ - \ rainfall events. Rainwire is beyond simple sonification of data, it embeds technology\ - \ & data collection within cultural contexts. With rainfall as catalyst to draw\ - \ inspiration from, artists, scientists & cultural groups are key to informing\ - \ science & incite new creative modalities.},\n address = {Brisbane, Australia},\n\ - \ author = {David Burraston},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ - \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ - \ title = {Rainwire},\n year = {2016}\n}\n" +- ENTRYTYPE: article + ID: nime23-music-66 + abstract: 'Our duo -ence improvises live remixes of augmented 7” vinyl records combined + with performance on, and sequenced sampling of, custom-made elecroacoustic instruments. + Our collaboration draws on O’Rawe’s experience in art installation contexts and + with electronic dance music group Not Squares, and Stapleton’s work as an instrument + inventor, sound designer and improviser in groups such as Ens Ekt and 3BP. Our + performance for NIME 2023 begins by asking, what kind of strange rhythmic futures + will continue to be built at the intersection of Mexican and Irish cultures? To + aid this endeavour, we invoke the mythology of Batallón de San Patricio, a group + of disenfranchised European (largely Irish) immigrants and African slaves who + defected from the United States Army to fight on the side of the Mexican Army + during the Mexican-American War of 1846-48. The battalion has been memorialised + by a broad range of musicians, novelists and filmmakers. These accounts provide + stories of cultural resonances in the lives of diverse peoples, unlikely collectives + who formed allegiances through their shared oppression at the hands of dominant + imperialist powers. Our storytelling here is similar, but also different. While + we are interested in resonances, allegiances, and points of connection that form + moments of tense but productive co-existences between different communities, we + are likewise drawn towards the precarious, noisy and uncertain material processes + enacted in such meetings. Thus, we seek a kind of dissensual groove, an oscillation + between distance and relation, remixing fragments from Irish and Mexican music + traditions into fragile and ever-collapsing rhythmic architectures, creating spaces + in which to move.' + articleno: 66 + author: Paul Stapleton and Ricki O'Rawe + bibtex: "@article{nime23-music-66,\n abstract = {Our duo -ence improvises live remixes\ + \ of augmented 7” vinyl records combined with performance on, and sequenced sampling\ + \ of, custom-made elecroacoustic instruments. Our collaboration draws on O’Rawe’s\ + \ experience in art installation contexts and with electronic dance music group\ + \ Not Squares, and Stapleton’s work as an instrument inventor, sound designer\ + \ and improviser in groups such as Ens Ekt and 3BP. Our performance for NIME 2023\ + \ begins by asking, what kind of strange rhythmic futures will continue to be\ + \ built at the intersection of Mexican and Irish cultures? To aid this endeavour,\ + \ we invoke the mythology of Batallón de San Patricio, a group of disenfranchised\ + \ European (largely Irish) immigrants and African slaves who defected from the\ + \ United States Army to fight on the side of the Mexican Army during the Mexican-American\ + \ War of 1846-48. The battalion has been memorialised by a broad range of musicians,\ + \ novelists and filmmakers. These accounts provide stories of cultural resonances\ + \ in the lives of diverse peoples, unlikely collectives who formed allegiances\ + \ through their shared oppression at the hands of dominant imperialist powers.\ + \ Our storytelling here is similar, but also different. While we are interested\ + \ in resonances, allegiances, and points of connection that form moments of tense\ + \ but productive co-existences between different communities, we are likewise\ + \ drawn towards the precarious, noisy and uncertain material processes enacted\ + \ in such meetings. Thus, we seek a kind of dissensual groove, an oscillation\ + \ between distance and relation, remixing fragments from Irish and Mexican music\ + \ traditions into fragile and ever-collapsing rhythmic architectures, creating\ + \ spaces in which to move.},\n articleno = {66},\n author = {Paul Stapleton and\ + \ Ricki O'Rawe},\n booktitle = {Music Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n editor = {Rob Hamilton},\n month\ + \ = {May},\n note = {Live Concert 4, Thursday June 1, Centro de Cultura Digital},\n\ + \ title = {Where is that Batallón de San Patricio Groove?},\n url = {https://www.nime.org/proceedings/2023/nime23_music_66.pdf},\n\ + \ urlsuppl1 = {https://www.nime.org/proceedings/2023/nime23_concert_4.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: Rainwire - year: 2016 + editor: Rob Hamilton + month: May + note: 'Live Concert 4, Thursday June 1, Centro de Cultura Digital' + title: 'Where is that Batallón de San Patricio Groove?' + url: https://www.nime.org/proceedings/2023/nime23_music_66.pdf + urlsuppl1: https://www.nime.org/proceedings/2023/nime23_concert_4.pdf + year: 2023 -- ENTRYTYPE: inproceedings - ID: nime2016-music-Hallett2016 - abstract: 'Program notes: The Elephant Listening Project from Cornell University - is the basis of Elephant Talk/Elephant Listening Project music performances. They - present not only logistical difficulties but musical difficulties. It was 2-3 - years of attempting to confirm the possibility of the project with Cornell University. - The researchers and contacts of course, were deep in Africa recording the sounds - for their research. Threats of poaching are a reality and in one instance, although - the researcher reached safety, the elephants weren''t so lucky. Cornell University - use a variety of technological platforms for their research both recording and - processing of these recordings. The music created also uses a variety of technological - and compositional methods to both utilise the sounds and to create something that - is inspiring, innovative and become a whole listening experience. Through using - different format types of sounds, for example: infrasonic sampled so that humans - can hear them as well as regular files, the aim is to create relationships between - the natural environment of the forest elephants, the other recorded acoustic occurrences - while incorporating various instruments to create a conversation between the sonic - environment, performer and listener.' - address: 'Brisbane, Australia' - author: Vicki Hallett - bibtex: "@inproceedings{nime2016-music-Hallett2016,\n abstract = {Program notes:\ - \ The Elephant Listening Project from Cornell University is the basis of Elephant\ - \ Talk/Elephant Listening Project music performances. They present not only logistical\ - \ difficulties but musical difficulties. It was 2-3 years of attempting to confirm\ - \ the possibility of the project with Cornell University. The researchers and\ - \ contacts of course, were deep in Africa recording the sounds for their research.\ - \ Threats of poaching are a reality and in one instance, although the researcher\ - \ reached safety, the elephants weren't so lucky. Cornell University use a variety\ - \ of technological platforms for their research both recording and processing\ - \ of these recordings. The music created also uses a variety of technological\ - \ and compositional methods to both utilise the sounds and to create something\ - \ that is inspiring, innovative and become a whole listening experience. Through\ - \ using different format types of sounds, for example: infrasonic sampled so that\ - \ humans can hear them as well as regular files, the aim is to create relationships\ - \ between the natural environment of the forest elephants, the other recorded\ - \ acoustic occurrences while incorporating various instruments to create a conversation\ - \ between the sonic environment, performer and listener.},\n address = {Brisbane,\ - \ Australia},\n author = {Vicki Hallett},\n booktitle = {Music Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n editor\ - \ = {Andrew Brown and Toby Gifford},\n month = {June},\n publisher = {Griffith\ - \ University},\n title = {Elephant Talk},\n year = {2016}\n}\n" +- ENTRYTYPE: article + ID: nime23-music-73 + abstract: 'This piece is an mobile outdoor performance where a dancer wearing mobile + wireless IMU sensors controls sound generated by a laptop through their movements. + The performance is mobile, and can take place in any available public space. The + dancer''s movements acquired by the sensors drive sound generation algorithms + running on SuperCollider and output from a mobile speaker. Since all the hardware + is commercially available and relatively inexpensive, this system is easy to build. + Through this work, we are showing that a performance that is not bound by location + is possible through a relatively inexpensive and easy-to-construct performance + system. The title "Unboxing" refers to escaping from the economic, social, political, + and artistic constraints of conventional performances. It also alludes to “unboxing” + as an internet meme in online videos where one does not know what is contained + in the box before it is opened - as the performance data and the resulting sound + structures cannot be evaluated beforehand. This project aims to open up computer + music creativity to a wider audience through frugal technology and escape Western-centric + concepts of music and dances. As alternative, we propose the term “electronic + sound performance”.' + articleno: 73 + author: Takumi Ikeda and Hanako Atake and Iannis Zannos + bibtex: "@article{nime23-music-73,\n abstract = {This piece is an mobile outdoor\ + \ performance where a dancer wearing mobile wireless IMU sensors controls sound\ + \ generated by a laptop through their movements. The performance is mobile, and\ + \ can take place in any available public space. The dancer's movements acquired\ + \ by the sensors drive sound generation algorithms running on SuperCollider and\ + \ output from a mobile speaker. Since all the hardware is commercially available\ + \ and relatively inexpensive, this system is easy to build. Through this work,\ + \ we are showing that a performance that is not bound by location is possible\ + \ through a relatively inexpensive and easy-to-construct performance system. The\ + \ title \"Unboxing\" refers to escaping from the economic, social, political,\ + \ and artistic constraints of conventional performances. It also alludes to “unboxing”\ + \ as an internet meme in online videos where one does not know what is contained\ + \ in the box before it is opened - as the performance data and the resulting sound\ + \ structures cannot be evaluated beforehand. This project aims to open up computer\ + \ music creativity to a wider audience through frugal technology and escape Western-centric\ + \ concepts of music and dances. As alternative, we propose the term “electronic\ + \ sound performance”.},\n articleno = {73},\n author = {Takumi Ikeda and Hanako\ + \ Atake and Iannis Zannos},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Rob Hamilton},\n\ + \ month = {May},\n note = {Online Presentation},\n title = {Unboxing: Public-Space\ + \ Performance With Wearable-Sensors And SuperCollider},\n url = {https://www.nime.org/proceedings/2023/nime23_music_73.pdf},\n\ + \ urlsuppl1 = {https://www.nime2023.org/program/online-in-person-concerts},\n\ + \ year = {2023}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: Elephant Talk - year: 2016 + editor: Rob Hamilton + month: May + note: Online Presentation + title: 'Unboxing: Public-Space Performance With Wearable-Sensors And SuperCollider' + url: https://www.nime.org/proceedings/2023/nime23_music_73.pdf + urlsuppl1: https://www.nime2023.org/program/online-in-person-concerts + year: 2023 -- ENTRYTYPE: inproceedings - ID: nime2016-music-Tahiroglu2016 - abstract: 'Program notes: "NOISA Étude 2" is a second set of performance instructions - created to showcase compelling, evolving and complex soundscapes only possible - when operating the NOISA instruments, integrating the system’s autonomous responses - as part of a musical piece. The multi-layered sound interaction design is based - on radical transformations of acoustic instruments performing works from the classical - music repertoire. This second "étude" is based entirely on interaction with spectrum-complementary - Phase Vocoders. The system is fed with variations of a fixed musical motif, encouraging - the system to recognise elements of the motive and create its own set of different - versions emulating a human musical compositional process. Also, the Myo Armband - is used in a creative way as an independent element for dynamic control, using - raw data extracted from the muscles’ tension.' - address: 'Brisbane, Australia' - author: Juan Carlos Vasquez & Koray Tahiroğlu - bibtex: "@inproceedings{nime2016-music-Tahiroglu2016,\n abstract = {Program notes:\ - \ \"NOISA Étude 2\" is a second set of performance instructions created to showcase\ - \ compelling, evolving and complex soundscapes only possible when operating the\ - \ NOISA instruments, integrating the system’s autonomous responses as part of\ - \ a musical piece. The multi-layered sound interaction design is based on radical\ - \ transformations of acoustic instruments performing works from the classical\ - \ music repertoire. This second \"étude\" is based entirely on interaction with\ - \ spectrum-complementary Phase Vocoders. The system is fed with variations of\ - \ a fixed musical motif, encouraging the system to recognise elements of the motive\ - \ and create its own set of different versions emulating a human musical compositional\ - \ process. Also, the Myo Armband is used in a creative way as an independent element\ - \ for dynamic control, using raw data extracted from the muscles’ tension.},\n\ - \ address = {Brisbane, Australia},\n author = {Juan Carlos Vasquez & Koray Tahiroğlu},\n\ - \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n editor = {Andrew Brown and Toby Gifford},\n month\ - \ = {June},\n publisher = {Griffith University},\n title = {NOISA Étude 2},\n\ - \ year = {2016}\n}\n" +- ENTRYTYPE: article + ID: nime23-music-1099 + abstract: 'Music(re)ality is a collaborative musical performance between the virtual + and real world. Two musicians will present a musical improvisation, with one performing + with an iPad instrument and the others using a freehand augmented reality musical + instrument. While musicians are physically located in the space, the music jamming + will happen across a virtual and real environment. How will the collaboration + happen and what is a mixed reality musical performance? Through sonic feedback + or performers'' musical gestures? It will all be demonstrated in this performance.' + articleno: 1099 + author: Yichen Wang and Charles Patrick Martin + bibtex: "@article{nime23-music-1099,\n abstract = {Music(re)ality is a collaborative\ + \ musical performance between the virtual and real world. Two musicians will present\ + \ a musical improvisation, with one performing with an iPad instrument and the\ + \ others using a freehand augmented reality musical instrument. While musicians\ + \ are physically located in the space, the music jamming will happen across a\ + \ virtual and real environment. How will the collaboration happen and what is\ + \ a mixed reality musical performance? Through sonic feedback or performers' musical\ + \ gestures? It will all be demonstrated in this performance.},\n articleno = {1099},\n\ + \ author = {Yichen Wang and Charles Patrick Martin},\n booktitle = {Music Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Rob Hamilton},\n month = {May},\n note = {Online Presentation},\n\ + \ title = {Music(re)ality: A Collaborative Improvisation between Virtual and Real\ + \ World},\n url = {https://www.nime.org/proceedings/2023/nime23_music_1099.pdf},\n\ + \ urlsuppl1 = {https://www.nime2023.org/program/online-in-person-concerts},\n\ + \ year = {2023}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: NOISA Étude 2 - year: 2016 + editor: Rob Hamilton + month: May + note: Online Presentation + title: 'Music(re)ality: A Collaborative Improvisation between Virtual and Real World' + url: https://www.nime.org/proceedings/2023/nime23_music_1099.pdf + urlsuppl1: https://www.nime2023.org/program/online-in-person-concerts + year: 2023 -- ENTRYTYPE: inproceedings - ID: nime2016-music-Andean2016 - abstract: 'Program notes: Hyvat matkustajat (2014) (Finnish for ''Dear Travellers'', - but also for ''The Good Travellers'') began life as a "sonic postcard from Finland", - using soundscape field recordings from around the country. This turned out to - be only the first stop on its journey, however. The original material was later - further developed as material for sonic exploration and spectral transformations, - with the external spaces of the original version taking a sharp digital turn inwards, - to chart internal spectral landscapes, together with the soundmarks and soundscapes - of its first incarnation. Everything in Hyvat matkustajat is made from the original - field recordings which first gave birth to the piece.' - address: 'Brisbane, Australia' - author: James Andean - bibtex: "@inproceedings{nime2016-music-Andean2016,\n abstract = {Program notes:\ - \ Hyvat matkustajat (2014) (Finnish for 'Dear Travellers', but also for 'The Good\ - \ Travellers') began life as a \"sonic postcard from Finland\", using soundscape\ - \ field recordings from around the country. This turned out to be only the first\ - \ stop on its journey, however. The original material was later further developed\ - \ as material for sonic exploration and spectral transformations, with the external\ - \ spaces of the original version taking a sharp digital turn inwards, to chart\ - \ internal spectral landscapes, together with the soundmarks and soundscapes of\ - \ its first incarnation. Everything in Hyvat matkustajat is made from the original\ - \ field recordings which first gave birth to the piece.},\n address = {Brisbane,\ - \ Australia},\n author = {James Andean},\n booktitle = {Music Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n editor\ - \ = {Andrew Brown and Toby Gifford},\n month = {June},\n publisher = {Griffith\ - \ University},\n title = {Hyvät matkustajat},\n year = {2016}\n}\n" +- ENTRYTYPE: article + ID: nime23-music-1101 + abstract: 'The performance of Sculpture DAXR is an offshoot of the Oscuterium project, + created by the group, RedSpills, a collaborative trio of new musical instrument + technologists, artists and performers: Michał Seta (Montreal, Quebec, Canada), + Dirk Stromberg (Republic of Singapore), and D. Andrew Stewart (Lethbridge, Alberta, + Canada). While Sculpture DAXR can be experienced as a live, in-person, multi-media + show involving the karlax digital musical instrument, live coding, video and sound + projection, this work is best experienced in its original form: a hybrid performance + and experience in which the participants (performer and audience) inhabit both + a live venue in real life (IRL) and a 3D virtual reality (VR) meeting point in + Mozilla''s real-time communications platform, Hubs. The innovative nature of this + work arises from the production of sound directly within the Hubs environment + using the Faust (Functional Audio Stream) programming language (i.e., browser-based + software synthesis engine). Both sound creation and 3D objects are transformed + by real-time data transmitted from a DMI over the internet.' + articleno: 1101 + author: D Stewart + bibtex: "@article{nime23-music-1101,\n abstract = {The performance of Sculpture\ + \ DAXR is an offshoot of the Oscuterium project, created by the group, RedSpills,\ + \ a collaborative trio of new musical instrument technologists, artists and performers:\ + \ Michał Seta (Montreal, Quebec, Canada), Dirk Stromberg (Republic of Singapore),\ + \ and D. Andrew Stewart (Lethbridge, Alberta, Canada). While Sculpture DAXR can\ + \ be experienced as a live, in-person, multi-media show involving the karlax digital\ + \ musical instrument, live coding, video and sound projection, this work is best\ + \ experienced in its original form: a hybrid performance and experience in which\ + \ the participants (performer and audience) inhabit both a live venue in real\ + \ life (IRL) and a 3D virtual reality (VR) meeting point in Mozilla's real-time\ + \ communications platform, Hubs. The innovative nature of this work arises from\ + \ the production of sound directly within the Hubs environment using the Faust\ + \ (Functional Audio Stream) programming language (i.e., browser-based software\ + \ synthesis engine). Both sound creation and 3D objects are transformed by real-time\ + \ data transmitted from a DMI over the internet.},\n articleno = {1101},\n author\ + \ = {D Stewart},\n booktitle = {Music Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n editor = {Rob Hamilton},\n month\ + \ = {May},\n note = {Live Concert 4, Thursday June 1, Centro de Cultura Digital},\n\ + \ title = {Sculpture DAXR},\n url = {https://www.nime.org/proceedings/2023/nime23_music_1101.pdf},\n\ + \ urlsuppl1 = {https://www.nime.org/proceedings/2023/nime23_concert_4.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: Hyvät matkustajat - year: 2016 + editor: Rob Hamilton + month: May + note: 'Live Concert 4, Thursday June 1, Centro de Cultura Digital' + title: Sculpture DAXR + url: https://www.nime.org/proceedings/2023/nime23_music_1101.pdf + urlsuppl1: https://www.nime.org/proceedings/2023/nime23_concert_4.pdf + year: 2023 -- ENTRYTYPE: inproceedings - ID: nime2016-music-Essl2016 - address: 'Brisbane, Australia' - author: Karheinz Essl - bibtex: "@inproceedings{nime2016-music-Essl2016,\n address = {Brisbane, Australia},\n\ - \ author = {Karheinz Essl},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ - \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ - \ title = {Lexicon Sonate},\n year = {2016}\n}\n" +- ENTRYTYPE: article + ID: nime23-music-1110 + abstract: 'Transcontinental Grapevine is a new crowdsourced telematic work by the + Virginia Tech Linux Laptop Orchestra (L2Ork) that was co-created and performed + with collaborators from UNTREF, Buenos Aires, Argentina. The work is inspired + by the introductory loop of the "Grapevine" song by Lane 8 and Elderbrook and + utilizes L2Ork Tweeter online collaborative musicking platform that allows for + perfect sync among performers regardless the distance (in this case two groups + of performers, 11 in total, were over 5,000 miles apart). The work’s EDM aesthetics + intentionally seeks to test the limits of the newfound platform’s ability to sync + players, as well as to expand the telematic musical vocabulary. The work was + co-created by the participants, each offering their own monophonic contributions. + It starts with Lane 8''s "Grapevine" intro, and then crossfades into a crowdsourced + theme and variations.' + articleno: 1110 + author: Ivica Ico Bukvic + bibtex: "@article{nime23-music-1110,\n abstract = {Transcontinental Grapevine is\ + \ a new crowdsourced telematic work by the Virginia Tech Linux Laptop Orchestra\ + \ (L2Ork) that was co-created and performed with collaborators from UNTREF, Buenos\ + \ Aires, Argentina. The work is inspired by the introductory loop of the \"Grapevine\"\ + \ song by Lane 8 and Elderbrook and utilizes L2Ork Tweeter online collaborative\ + \ musicking platform that allows for perfect sync among performers regardless\ + \ the distance (in this case two groups of performers, 11 in total, were over\ + \ 5,000 miles apart). The work’s EDM aesthetics intentionally seeks to test the\ + \ limits of the newfound platform’s ability to sync players, as well as to expand\ + \ the telematic musical vocabulary. The work was co-created by the participants,\ + \ each offering their own monophonic contributions. It starts with Lane 8's \"\ + Grapevine\" intro, and then crossfades into a crowdsourced theme and variations.},\n\ + \ articleno = {1110},\n author = {Ivica Ico Bukvic},\n booktitle = {Music Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Rob Hamilton},\n month = {May},\n note = {Live Concert 3, Thursday\ + \ June 1, Centro de Cultura Digital},\n title = {Transcontinental Grapevine},\n\ + \ url = {https://www.nime.org/proceedings/2023/nime23_music_1110.pdf},\n urlsuppl1\ + \ = {https://www.nime.org/proceedings/2023/nime23_concert_3.pdf},\n year = {2023}\n\ + }\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: Lexicon Sonate - year: 2016 + editor: Rob Hamilton + month: May + note: 'Live Concert 3, Thursday June 1, Centro de Cultura Digital' + title: Transcontinental Grapevine + url: https://www.nime.org/proceedings/2023/nime23_music_1110.pdf + urlsuppl1: https://www.nime.org/proceedings/2023/nime23_concert_3.pdf + year: 2023 -- ENTRYTYPE: inproceedings - ID: nime2016-music-Foran2016 - address: 'Brisbane, Australia' - author: Sean Foran - bibtex: "@inproceedings{nime2016-music-Foran2016,\n address = {Brisbane, Australia},\n\ - \ author = {Sean Foran},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ - \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ - \ title = {Improvisations with the other},\n year = {2016}\n}\n" +- ENTRYTYPE: article + ID: nime23-music-1113 + abstract: 'T/ensor/~ (version 0.3) is a prototype of a dynamic performance system + developed in MAX that involves adaptive digital signal processing modules and + generative processes towards exploring the field and performance practice of human-machine + improvisation. The system is the result of a pilot, artistic research study entitled + ‘Improvisation Technologies and Creative Machines: The Performer-Instrument Relational + Milieu’. Our proposal for the NIME 2023 conference involves a c.10–12 minutes + improvised performance with the system (drum-kit performer and T/ensor/~ 0.3).' + articleno: 1113 + author: Dimitris Papageorgiou + bibtex: "@article{nime23-music-1113,\n abstract = {T/ensor/~ (version 0.3) is a\ + \ prototype of a dynamic performance system developed in MAX that involves adaptive\ + \ digital signal processing modules and generative processes towards exploring\ + \ the field and performance practice of human-machine improvisation. The system\ + \ is the result of a pilot, artistic research study entitled ‘Improvisation Technologies\ + \ and Creative Machines: The Performer-Instrument Relational Milieu’. Our proposal\ + \ for the NIME 2023 conference involves a c.10–12 minutes improvised performance\ + \ with the system (drum-kit performer and T/ensor/~ 0.3).},\n articleno = {1113},\n\ + \ author = {Dimitris Papageorgiou},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Rob Hamilton},\n\ + \ month = {May},\n note = {Live Concert 2, Wednesday May 31, Biblioteca Vasconcelos},\n\ + \ title = {T/ensor/~ 0.3},\n url = {https://www.nime.org/proceedings/2023/nime23_music_1113.pdf},\n\ + \ urlsuppl1 = {https://www.nime.org/proceedings/2023/nime23_concert_2.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: Improvisations with the other - year: 2016 + editor: Rob Hamilton + month: May + note: 'Live Concert 2, Wednesday May 31, Biblioteca Vasconcelos' + title: T/ensor/~ 0.3 + url: https://www.nime.org/proceedings/2023/nime23_music_1113.pdf + urlsuppl1: https://www.nime.org/proceedings/2023/nime23_concert_2.pdf + year: 2023 -- ENTRYTYPE: inproceedings - ID: nime2016-music-Eigenfeldt2016 - address: 'Brisbane, Australia' - author: Arne Eigenfeldt - bibtex: "@inproceedings{nime2016-music-Eigenfeldt2016,\n address = {Brisbane, Australia},\n\ - \ author = {Arne Eigenfeldt},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ - \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ - \ title = {Machine Songs},\n year = {2016}\n}\n" +- ENTRYTYPE: article + ID: nime23-music-1115 + abstract: 'Neo Tokyo, ca. 2019, 31 years after World War III, Akira awakens. This + homage is an audiovisual, live-coded performance, remixing and re-envisioning + the 1988 classic film created in the year of its setting, 2019 and reimagined + now in 2022/2023 as the audiovisual work DEF FUNCTION(DYSTOPIAKIRA). The authors + use the code editor Jensaarai to collaboratively and simultaneously live-code + TidalCycles and Python, each supported by SuperCollider and Touch Designer on + the backend respectively. The authors often collaborate remotely due to their + respective locations which is facilitated by Jensaarai. This enables the client-side + rendering of both audio and visuals in order to retain high-quality representations + of both elements.' + articleno: 1115 + author: Ryan R Smith and Shawn Lawson + bibtex: "@article{nime23-music-1115,\n abstract = {Neo Tokyo, ca. 2019, 31 years\ + \ after World War III, Akira awakens. This homage is an audiovisual, live-coded\ + \ performance, remixing and re-envisioning the 1988 classic film created in the\ + \ year of its setting, 2019 and reimagined now in 2022/2023 as the audiovisual\ + \ work DEF FUNCTION(DYSTOPIAKIRA). The authors use the code editor Jensaarai\ + \ to collaboratively and simultaneously live-code TidalCycles and Python, each\ + \ supported by SuperCollider and Touch Designer on the backend respectively. The\ + \ authors often collaborate remotely due to their respective locations which is\ + \ facilitated by Jensaarai. This enables the client-side rendering of both audio\ + \ and visuals in order to retain high-quality representations of both elements.},\n\ + \ articleno = {1115},\n author = {Ryan R Smith and Shawn Lawson},\n booktitle\ + \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n editor = {Rob Hamilton},\n month = {May},\n note = {Online Presentation},\n\ + \ title = {DEF FUNCTION(DYSTOPIAKIRA)},\n url = {https://www.nime.org/proceedings/2023/nime23_music_1115.pdf},\n\ + \ urlsuppl1 = {https://www.nime2023.org/program/online-in-person-concerts},\n\ + \ year = {2023}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: Machine Songs - year: 2016 + editor: Rob Hamilton + month: May + note: Online Presentation + title: DEF FUNCTION(DYSTOPIAKIRA) + url: https://www.nime.org/proceedings/2023/nime23_music_1115.pdf + urlsuppl1: https://www.nime2023.org/program/online-in-person-concerts + year: 2023 -- ENTRYTYPE: inproceedings - ID: nime2016-music-Sorensen2016 - address: 'Brisbane, Australia' - author: Andrew Sorensen - bibtex: "@inproceedings{nime2016-music-Sorensen2016,\n address = {Brisbane, Australia},\n\ - \ author = {Andrew Sorensen},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ - \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ - \ title = {Barely a Piano},\n year = {2016}\n}\n" +- ENTRYTYPE: article + ID: nime23-music-1137 + abstract: 'In this performance, two guitar players improvise using electro-acoustic + guitars equipped with actuators that can hit each of the strings. By moving through + virtual shapes placed around them with their guitars and bodies, they can control + the actuators. By using minimal modifications of the instrument and subtly extending + existing playing techniques, the setup aims at preserving the technical and cultural + heritage of the acoustic instrument. During the performance, the two musicians + combine elements of traditional playing with rhythmical interventions that complements + the interaction with the shapes. In particular, the shapes allow them to generate + stable rhythmical overlapped sequences. The improvisation then develops according + to the musicians'' inspiration with the shapes integrated in their playing.' + articleno: 1137 + author: Sebastien Beaumont and Ivann Cruz and Arthur Paté and Florent Berthaut + bibtex: "@article{nime23-music-1137,\n abstract = {In this performance, two guitar\ + \ players improvise using electro-acoustic guitars equipped with actuators that\ + \ can hit each of the strings. By moving through virtual shapes placed around\ + \ them with their guitars and bodies, they can control the actuators. By using\ + \ minimal modifications of the instrument and subtly extending existing playing\ + \ techniques, the setup aims at preserving the technical and cultural heritage\ + \ of the acoustic instrument. During the performance, the two musicians combine\ + \ elements of traditional playing with rhythmical interventions that complements\ + \ the interaction with the shapes. In particular, the shapes allow them to generate\ + \ stable rhythmical overlapped sequences. The improvisation then develops according\ + \ to the musicians' inspiration with the shapes integrated in their playing.},\n\ + \ articleno = {1137},\n author = {Sebastien Beaumont and Ivann Cruz and Arthur\ + \ Paté and Florent Berthaut},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Rob Hamilton},\n\ + \ month = {May},\n note = {Online Presentation},\n title = {VS : Improvisation\ + \ with Automated Interactive Instruments},\n url = {https://www.nime.org/proceedings/2023/nime23_music_1137.pdf},\n\ + \ urlsuppl1 = {https://www.nime2023.org/program/online-in-person-concerts},\n\ + \ year = {2023}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: Barely a Piano - year: 2016 + editor: Rob Hamilton + month: May + note: Online Presentation + title: 'VS : Improvisation with Automated Interactive Instruments' + url: https://www.nime.org/proceedings/2023/nime23_music_1137.pdf + urlsuppl1: https://www.nime2023.org/program/online-in-person-concerts + year: 2023 -- ENTRYTYPE: inproceedings - ID: nime2016-music-Berg2016 - address: 'Brisbane, Australia' - author: Henning Berg - bibtex: "@inproceedings{nime2016-music-Berg2016,\n address = {Brisbane, Australia},\n\ - \ author = {Henning Berg},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ - \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ - \ title = {Improvising with Tango},\n year = {2016}\n}\n" +- ENTRYTYPE: article + ID: nime23-music-1141 + abstract: 'Codex Saqqara is a cycle of five semi-improvised musical pieces for live + coding and electric violin. Here, for duration reasons, we present a short excerpt. + The interaction between the two performers takes place through a system that allows + the violinist to record and overdub up to five samples in real-time, which are + then processed and organized into structures by the live coder. In this way, the + two musicians interact with each other’s musical space, taking on different musical + roles during the performance, such as soloists, orchestrators or accompanists. + Given its extemporaneous nature, the piece is composed from-scratch, following + a series of macro-structures determined beforehand. This submission accompanies + a paper regarding the system used, along with some reflections that emerged during + the rehearsals for this performance.' + articleno: 1141 + author: Francesco Dal Rì and Francesca Zanghellini + bibtex: "@article{nime23-music-1141,\n abstract = {Codex Saqqara is a cycle of five\ + \ semi-improvised musical pieces for live coding and electric violin. Here, for\ + \ duration reasons, we present a short excerpt. The interaction between the two\ + \ performers takes place through a system that allows the violinist to record\ + \ and overdub up to five samples in real-time, which are then processed and organized\ + \ into structures by the live coder. In this way, the two musicians interact with\ + \ each other’s musical space, taking on different musical roles during the performance,\ + \ such as soloists, orchestrators or accompanists. Given its extemporaneous nature,\ + \ the piece is composed from-scratch, following a series of macro-structures determined\ + \ beforehand. This submission accompanies a paper regarding the system used, along\ + \ with some reflections that emerged during the rehearsals for this performance.},\n\ + \ articleno = {1141},\n author = {Francesco Dal Rì and Francesca Zanghellini},\n\ + \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n editor = {Rob Hamilton},\n month = {May},\n note\ + \ = {Online Presentation},\n title = {Codex Saqqara},\n url = {https://www.nime.org/proceedings/2023/nime23_music_1141.pdf},\n\ + \ urlsuppl1 = {https://www.nime2023.org/program/online-in-person-concerts},\n\ + \ year = {2023}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: Improvising with Tango - year: 2016 + editor: Rob Hamilton + month: May + note: Online Presentation + title: Codex Saqqara + url: https://www.nime.org/proceedings/2023/nime23_music_1141.pdf + urlsuppl1: https://www.nime2023.org/program/online-in-person-concerts + year: 2023 -- ENTRYTYPE: inproceedings - ID: nime2016-music-James2016 - address: 'Brisbane, Australia' - author: Cat Hope & Stuart James - bibtex: "@inproceedings{nime2016-music-James2016,\n address = {Brisbane, Australia},\n\ - \ author = {Cat Hope & Stuart James},\n booktitle = {Music Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n editor\ - \ = {Andrew Brown and Toby Gifford},\n month = {June},\n publisher = {Griffith\ - \ University},\n title = {Chunk},\n year = {2016}\n}\n" +- ENTRYTYPE: article + ID: nime23-music-1148 + abstract: 'This is a proposal for the premier of Flightless Path, a new work for + The Terpsichora Pressure-Sensitive Floors and Renaissance Violone. The Terpsichora + Pressure-Sensitive Floors (The Floors) are a new digital musical instrument which + uses whole-body motion to control electronic music. The instrument continues the + development of early models for pioneering dancer Philippa Cullen (1950-1975), + expanding its use as an expressive and versatile instrument for musicians to play. + The Floors use a large interactive surface for fine control of many sonic parameters + with a small number of sensors. The violone is the Renaissance precursor to the + double bass. It is a large instrument that has six gut strings, gut frets and + is played with a viol style underhand bow. This instrument also requires the whole + body to play and physically support the instrument in performance. This new work + brings these two instruments together and is an interplay between the definitions + of instruments and controller as they relate to contemporary practices based on + gesture. Working with the specific limitations of the body in relation to large + objects, the Floors and the violone both function as controllers for affecting + sound and as instruments for creating sound.' + articleno: 1148 + author: iran sanadzadeh and Chloë Sobek + bibtex: "@article{nime23-music-1148,\n abstract = {This is a proposal for the premier\ + \ of Flightless Path, a new work for The Terpsichora Pressure-Sensitive Floors\ + \ and Renaissance Violone. The Terpsichora Pressure-Sensitive Floors (The Floors)\ + \ are a new digital musical instrument which uses whole-body motion to control\ + \ electronic music. The instrument continues the development of early models for\ + \ pioneering dancer Philippa Cullen (1950-1975), expanding its use as an expressive\ + \ and versatile instrument for musicians to play. The Floors use a large interactive\ + \ surface for fine control of many sonic parameters with a small number of sensors.\ + \ The violone is the Renaissance precursor to the double bass. It is a large instrument\ + \ that has six gut strings, gut frets and is played with a viol style underhand\ + \ bow. This instrument also requires the whole body to play and physically support\ + \ the instrument in performance. This new work brings these two instruments together\ + \ and is an interplay between the definitions of instruments and controller as\ + \ they relate to contemporary practices based on gesture. Working with the specific\ + \ limitations of the body in relation to large objects, the Floors and the violone\ + \ both function as controllers for affecting sound and as instruments for creating\ + \ sound.},\n articleno = {1148},\n author = {iran sanadzadeh and Chloë Sobek},\n\ + \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n editor = {Rob Hamilton},\n month = {May},\n note\ + \ = {Live Concert 5, Friday June 2, Centro de Cultura Digital},\n title = {Flightless\ + \ Path},\n url = {https://www.nime.org/proceedings/2023/nime23_music_1148.pdf},\n\ + \ urlsuppl1 = {https://www.nime.org/proceedings/2023/nime23_concert_5.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: Chunk - year: 2016 + editor: Rob Hamilton + month: May + note: 'Live Concert 5, Friday June 2, Centro de Cultura Digital' + title: Flightless Path + url: https://www.nime.org/proceedings/2023/nime23_music_1148.pdf + urlsuppl1: https://www.nime.org/proceedings/2023/nime23_concert_5.pdf + year: 2023 -- ENTRYTYPE: inproceedings - ID: nime2016-music-Beck2016 - address: 'Brisbane, Australia' - author: Stephen Beck - bibtex: "@inproceedings{nime2016-music-Beck2016,\n address = {Brisbane, Australia},\n\ - \ author = {Stephen Beck},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ - \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ - \ title = {Quartet for Strings},\n year = {2016}\n}\n" +- ENTRYTYPE: article + ID: nime23-music-1158 + abstract: 'The Moirai Mask is an ornate mask that operates as a NIME. The mask has + an integrated MIDI controller that allows the performer to play music by touching + the brass and bamboo panels. In performance, the artist uses audio-montage to + collage sounds of the Australian wilderness with electronics and sampled fragments + of an acoustic string instrument. The mask is handmade from predominantly recycled + materials; hand cut brass panels and hand painted bamboo elements adorn the front + of the mask, which are sewn into the cotton paneling that covers the hand soldered + electrical components. The Moirai Mask is a sonic play on the Covid-19 PPE mask. + The PPE mask, like an exo-skeleton, provides an extra, augmented layer of protection + from our bodies, the ‘outside world’, the virus, the Other. The Covid-19 pandemic + forced us to accept our bodily limitations and embrace this prosaic form of human + augmentation, the PPE mask. Furthermore, as the Covid-19 virus enters our bodies + and is transmitted through our breath, we must acknowledge that we are not separate + from the non-human world that we inhabit but are in fact bodily constituted through + it [1]. As Deborah Lupton et al. point out ‘the COVID crisis [has] heightened + awareness of our collective vulnerability to each other’s more-than-human bodies’ + [ibid.]. Drawing on the concept of a NIME, here the PPE mask is appropriated as + a symbolic and subversive art object, paying sonic homage to the non-human world + while the artist’s voice is subtly silenced.' + articleno: 1158 + author: Chloë L A Sobek + bibtex: "@article{nime23-music-1158,\n abstract = {The Moirai Mask is an ornate\ + \ mask that operates as a NIME. The mask has an integrated MIDI controller that\ + \ allows the performer to play music by touching the brass and bamboo panels.\ + \ In performance, the artist uses audio-montage to collage sounds of the Australian\ + \ wilderness with electronics and sampled fragments of an acoustic string instrument.\ + \ The mask is handmade from predominantly recycled materials; hand cut brass panels\ + \ and hand painted bamboo elements adorn the front of the mask, which are sewn\ + \ into the cotton paneling that covers the hand soldered electrical components.\ + \ The Moirai Mask is a sonic play on the Covid-19 PPE mask. The PPE mask, like\ + \ an exo-skeleton, provides an extra, augmented layer of protection from our bodies,\ + \ the ‘outside world’, the virus, the Other. The Covid-19 pandemic forced us to\ + \ accept our bodily limitations and embrace this prosaic form of human augmentation,\ + \ the PPE mask. Furthermore, as the Covid-19 virus enters our bodies and is transmitted\ + \ through our breath, we must acknowledge that we are not separate from the non-human\ + \ world that we inhabit but are in fact bodily constituted through it [1]. As\ + \ Deborah Lupton et al. point out ‘the COVID crisis [has] heightened awareness\ + \ of our collective vulnerability to each other’s more-than-human bodies’ [ibid.].\ + \ Drawing on the concept of a NIME, here the PPE mask is appropriated as a symbolic\ + \ and subversive art object, paying sonic homage to the non-human world while\ + \ the artist’s voice is subtly silenced.},\n articleno = {1158},\n author = {Chloë\ + \ L A Sobek},\n booktitle = {Music Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n editor = {Rob Hamilton},\n month\ + \ = {May},\n note = {Live Concert 5, Friday June 2, Centro de Cultura Digital},\n\ + \ title = {The Moirai Mask},\n url = {https://www.nime.org/proceedings/2023/nime23_music_1158.pdf},\n\ + \ urlsuppl1 = {https://www.nime.org/proceedings/2023/nime23_concert_5.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: Quartet for Strings - year: 2016 + editor: Rob Hamilton + month: May + note: 'Live Concert 5, Friday June 2, Centro de Cultura Digital' + title: The Moirai Mask + url: https://www.nime.org/proceedings/2023/nime23_music_1158.pdf + urlsuppl1: https://www.nime.org/proceedings/2023/nime23_concert_5.pdf + year: 2023 -- ENTRYTYPE: inproceedings - ID: nime2016-music-Nakanishi2016 - address: 'Brisbane, Australia' - author: Yoshihito Nakanishi - bibtex: "@inproceedings{nime2016-music-Nakanishi2016,\n address = {Brisbane, Australia},\n\ - \ author = {Yoshihito Nakanishi},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ - \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ - \ title = {TRI=NITRO},\n year = {2016}\n}\n" +- ENTRYTYPE: article + ID: nime23-music-1166 + abstract: 'BANDNAME is composed of three womxn NAME, NAME, and NAME and guests who + use the physical properties of the electromagnetic spectrum to create installations, + performances and recordings. Using electronic feedback, audio speakers, various + kinds of microphones/pickups, and resonant objects of all shapes and kinds, we + summon the feminine spirit of electromagnetism, aka the Goddess of the Electronic + Medium aka the ElecroMagnetic Goddess. We have a flexible membership inclusive + to all peoples who are willing to open themselves up to this spirit. In terms + of current trends in audio technology, we invoke a feminist response to the masculinization + of the music industry, audio engineering, and to the artistic spaces of sound + arts in general. Our latest project includes playing with painted score-objects + Bareëmins. They are painted with conductive carbon paint, and non-conductive paint. + When the area that is conductive is activated it produces sound, the non-conductive + area does not. Thereby, by alternating painted and not painted areas in an aesthetic + way, a score can be embedded into the very instrument itself. The paint can be + applied to paintings as well as the inside of paper and plastic sculptures the + results are many fold. There are folded paper crystal Bareëmins that look like + crystals suitable for an electromagnetic altar. You can use them to invoke the + Electromagnetic Goddess at home. The project is particularly aligned with this + year''s theme of Frugal Music Innovation as it uses all natural materials + paste + glue to create the painted score/instrument. The carbon paint is made by recycling + charcoal from a cooking fire, the colored paint is everyday school supplies and + paint made out of found earth pigment. The binder is paste glue. The brains are + an Arduino running simple theremin code with only 2 resistors and an 8ohm speaker + as peripherals. video here https://youtu.be/YAD-F68Ntl4' + articleno: 1166 + author: Sofya Yuditskaya and Jess Rowland and Margaret Schedel + bibtex: "@article{nime23-music-1166,\n abstract = {BANDNAME is composed of three\ + \ womxn NAME, NAME, and NAME and guests who use the physical properties of the\ + \ electromagnetic spectrum to create installations, performances and recordings.\ + \ Using electronic feedback, audio speakers, various kinds of microphones/pickups,\ + \ and resonant objects of all shapes and kinds, we summon the feminine spirit\ + \ of electromagnetism, aka the Goddess of the Electronic Medium aka the ElecroMagnetic\ + \ Goddess. We have a flexible membership inclusive to all peoples who are willing\ + \ to open themselves up to this spirit. In terms of current trends in audio technology,\ + \ we invoke a feminist response to the masculinization of the music industry,\ + \ audio engineering, and to the artistic spaces of sound arts in general. Our\ + \ latest project includes playing with painted score-objects Bareëmins. They are\ + \ painted with conductive carbon paint, and non-conductive paint. When the area\ + \ that is conductive is activated it produces sound, the non-conductive area does\ + \ not. Thereby, by alternating painted and not painted areas in an aesthetic way,\ + \ a score can be embedded into the very instrument itself. The paint can be applied\ + \ to paintings as well as the inside of paper and plastic sculptures the results\ + \ are many fold. There are folded paper crystal Bareëmins that look like crystals\ + \ suitable for an electromagnetic altar. You can use them to invoke the Electromagnetic\ + \ Goddess at home. The project is particularly aligned with this year's theme\ + \ of Frugal Music Innovation as it uses all natural materials + paste glue to\ + \ create the painted score/instrument. The carbon paint is made by recycling charcoal\ + \ from a cooking fire, the colored paint is everyday school supplies and paint\ + \ made out of found earth pigment. The binder is paste glue. The brains are an\ + \ Arduino running simple theremin code with only 2 resistors and an 8ohm speaker\ + \ as peripherals. video here https://youtu.be/YAD-F68Ntl4},\n articleno =\ + \ {1166},\n author = {Sofya Yuditskaya and Jess Rowland and Margaret Schedel},\n\ + \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n editor = {Rob Hamilton},\n month = {May},\n note\ + \ = {Live Concert 4, Thursday June 1, Centro de Cultura Digital},\n title = {Carbon\ + \ Based EM Fields},\n url = {https://www.nime.org/proceedings/2023/nime23_music_1166.pdf},\n\ + \ urlsuppl1 = {https://www.nime.org/proceedings/2023/nime23_concert_4.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: TRI=NITRO - year: 2016 + editor: Rob Hamilton + month: May + note: 'Live Concert 4, Thursday June 1, Centro de Cultura Digital' + title: Carbon Based EM Fields + url: https://www.nime.org/proceedings/2023/nime23_music_1166.pdf + urlsuppl1: https://www.nime.org/proceedings/2023/nime23_concert_4.pdf + year: 2023 -- ENTRYTYPE: inproceedings - ID: nime2016-music-Allison2016 - address: 'Brisbane, Australia' - author: Jesse Allison - bibtex: "@inproceedings{nime2016-music-Allison2016,\n address = {Brisbane, Australia},\n\ - \ author = {Jesse Allison},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ - \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ - \ title = {Causeway},\n year = {2016}\n}\n" +- ENTRYTYPE: article + ID: nime23-music-1168 + abstract: 'Sonic Swells is a multimedia music composition for fixed audio, filmed + footage of a surfer and live saxophone. This iterative sound art project explores + the use of sonification of ocean weather data, sonification of movement data from + a surfer riding waves, and live performance as tools for music composition. Weather + data is collected through a free API and converted to sound in Max/MSP, driving + the parameters of a very large additive and subtractive synthesizer that uses + pink noise as its fundamental sound source. The sonification includes swell direction + and wind speed that dictate the positions of audio in the stereo or surround speaker + field, and wave height and swell period driving an undulating filter effect. The + severity of the conditions dictates the complexity of the soundscape. Sampled + audio is blended into the sonification. The surfer''s movement data is collected + with a DIY kit including an iPhone for telemetry, an android or esp32 watch for + data logging, and a small Wi-Fi router with battery and a GoPro. This information + influences elements of the ocean weather sonification and affects the saxophone + live performance. The performer plays a combination of scored and improvised material. + The piece explores the relationship between sonification, motion and music.' + articleno: 1168 + author: Cayn Borthwick + bibtex: "@article{nime23-music-1168,\n abstract = {Sonic Swells is a multimedia\ + \ music composition for fixed audio, filmed footage of a surfer and live saxophone.\ + \ This iterative sound art project explores the use of sonification of ocean weather\ + \ data, sonification of movement data from a surfer riding waves, and live performance\ + \ as tools for music composition. Weather data is collected through a free API\ + \ and converted to sound in Max/MSP, driving the parameters of a very large additive\ + \ and subtractive synthesizer that uses pink noise as its fundamental sound source.\ + \ The sonification includes swell direction and wind speed that dictate the positions\ + \ of audio in the stereo or surround speaker field, and wave height and swell\ + \ period driving an undulating filter effect. The severity of the conditions dictates\ + \ the complexity of the soundscape. Sampled audio is blended into the sonification.\ + \ The surfer's movement data is collected with a DIY kit including an iPhone for\ + \ telemetry, an android or esp32 watch for data logging, and a small Wi-Fi router\ + \ with battery and a GoPro. This information influences elements of the ocean\ + \ weather sonification and affects the saxophone live performance. The performer\ + \ plays a combination of scored and improvised material. The piece explores the\ + \ relationship between sonification, motion and music.},\n articleno = {1168},\n\ + \ author = {Cayn Borthwick},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Rob Hamilton},\n\ + \ month = {May},\n note = {Live Concert 5, Friday June 2, Centro de Cultura Digital},\n\ + \ title = {Sonic Swells - Riding Swells},\n url = {https://www.nime.org/proceedings/2023/nime23_music_1168.pdf},\n\ + \ urlsuppl1 = {https://www.nime.org/proceedings/2023/nime23_concert_5.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: Causeway - year: 2016 + editor: Rob Hamilton + month: May + note: 'Live Concert 5, Friday June 2, Centro de Cultura Digital' + title: Sonic Swells - Riding Swells + url: https://www.nime.org/proceedings/2023/nime23_music_1168.pdf + urlsuppl1: https://www.nime.org/proceedings/2023/nime23_concert_5.pdf + year: 2023 -- ENTRYTYPE: inproceedings - ID: nime2016-music-Freeth2016 - address: 'Brisbane, Australia' - author: Ben Freeth - bibtex: "@inproceedings{nime2016-music-Freeth2016,\n address = {Brisbane, Australia},\n\ - \ author = {Ben Freeth},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ - \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ - \ title = {Bio-vortex: Exploring Wet},\n year = {2016}\n}\n" +- ENTRYTYPE: article + ID: nime23-music-1174 + abstract: 'Pandora hears her own dreams, they talk to her in mysterious voices, + unknown languages. You find yourself standing alone, in the middle of her darkness. + You don’t know how you got there. Are you one of Pandora’s dreams? Talk to her, + maybe she will answer you. In this audiovisual dreamscape lies a re-imagining + of Pandora’s story, where the contents of her jar are bioluminescent swarming + spores that seek to fill the world with hope instead of evil, and life instead + of death. The spores want to get out, their evolutionary powers are hidden, and + the whole universe is waiting to be explored. Meanwhile, Pandora is dreaming, + condemned to keep the box closed. Life waits to be released.' + articleno: 1174 + author: Jack Armitage and Celeste Betancur + bibtex: "@article{nime23-music-1174,\n abstract = {Pandora hears her own dreams,\ + \ they talk to her in mysterious voices, unknown languages. You find yourself\ + \ standing alone, in the middle of her darkness. You don’t know how you got there.\ + \ Are you one of Pandora’s dreams? Talk to her, maybe she will answer you. In\ + \ this audiovisual dreamscape lies a re-imagining of Pandora’s story, where the\ + \ contents of her jar are bioluminescent swarming spores that seek to fill the\ + \ world with hope instead of evil, and life instead of death. The spores want\ + \ to get out, their evolutionary powers are hidden, and the whole universe is\ + \ waiting to be explored. Meanwhile, Pandora is dreaming, condemned to keep the\ + \ box closed. Life waits to be released.},\n articleno = {1174},\n author = {Jack\ + \ Armitage and Celeste Betancur},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Rob Hamilton},\n\ + \ month = {May},\n note = {Live Concert 5, Friday June 2, Centro de Cultura Digital},\n\ + \ title = {Pandora's Mycophony},\n url = {https://www.nime.org/proceedings/2023/nime23_music_1174.pdf},\n\ + \ urlsuppl1 = {https://www.nime.org/proceedings/2023/nime23_concert_5.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: 'Bio-vortex: Exploring Wet' - year: 2016 + editor: Rob Hamilton + month: May + note: 'Live Concert 5, Friday June 2, Centro de Cultura Digital' + title: Pandora's Mycophony + url: https://www.nime.org/proceedings/2023/nime23_music_1174.pdf + urlsuppl1: https://www.nime.org/proceedings/2023/nime23_concert_5.pdf + year: 2023 -- ENTRYTYPE: inproceedings - ID: nime2016-music-Hajdu2016 - address: 'Brisbane, Australia' - author: Georg Hajdu - bibtex: "@inproceedings{nime2016-music-Hajdu2016,\n address = {Brisbane, Australia},\n\ - \ author = {Georg Hajdu},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ - \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ - \ title = {Just Her - Jester - Gesture},\n year = {2016}\n}\n" - booktitle: Music Proceedings of the International Conference on New Interfaces for - Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: Just Her - Jester - Gesture - year: 2016 - - -- ENTRYTYPE: inproceedings - ID: nime2016-music-Bussigel2016 - address: 'Brisbane, Australia' - author: Travis Thatcher & Peter Bussigel - bibtex: "@inproceedings{nime2016-music-Bussigel2016,\n address = {Brisbane, Australia},\n\ - \ author = {Travis Thatcher & Peter Bussigel},\n booktitle = {Music Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Andrew Brown and Toby Gifford},\n month = {June},\n publisher = {Griffith\ - \ University},\n title = {Danger Music No. 85},\n year = {2016}\n}\n" +- ENTRYTYPE: article + ID: nime23-music-1212 + abstract: 'The Sabotaging Piano is an electronic prepared piano that challenges + performers through the remapping of keys to unexpected pitches. For every new + performance, a new remapping pattern is given, so performers face a continuously + surprising new element. The performer is provided with an expression pedal (a + ``sabotaging pedal'''') to modulate the amount of keys that will we remapped, + going from none to all of them.' + articleno: 1212 + author: Teodoro Dannemann + bibtex: "@article{nime23-music-1212,\n abstract = {The Sabotaging Piano is an electronic\ + \ prepared piano that challenges performers through the remapping of keys to unexpected\ + \ pitches. For every new performance, a new remapping pattern is given, so performers\ + \ face a continuously surprising new element. The performer is provided with an\ + \ expression pedal (a ``sabotaging pedal'') to modulate the amount of keys that\ + \ will we remapped, going from none to all of them.},\n articleno = {1212},\n\ + \ author = {Teodoro Dannemann},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Rob Hamilton},\n\ + \ month = {May},\n note = {Live Concert 1, Wednesday May 31, Biblioteca Vasconcelos},\n\ + \ title = {Sabotaging Piano Concert},\n url = {https://www.nime.org/proceedings/2023/nime23_music_1212.pdf},\n\ + \ urlsuppl1 = {https://www.nime.org/proceedings/2023/nime23_concert_5.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: Danger Music No. 85 - year: 2016 + editor: Rob Hamilton + month: May + note: 'Live Concert 1, Wednesday May 31, Biblioteca Vasconcelos' + title: Sabotaging Piano Concert + url: https://www.nime.org/proceedings/2023/nime23_music_1212.pdf + urlsuppl1: https://www.nime.org/proceedings/2023/nime23_concert_5.pdf + year: 2023 -- ENTRYTYPE: inproceedings - ID: nime2016-music-Waerstad2016 - address: 'Brisbane, Australia' - author: Bernt Isak Wærstad - bibtex: "@inproceedings{nime2016-music-Waerstad2016,\n address = {Brisbane, Australia},\n\ - \ author = {Bernt Isak Wærstad},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ - \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ - \ title = {Cosmo Collective},\n year = {2016}\n}\n" +- ENTRYTYPE: article + ID: nime23-music-1214 + abstract: 'Returns and Simulacra combines sound and projections of video onto a + screen with the performer’s body on stage. It uses mini bee accelerometers and + touch-sensor attachments as an instrument called Piano Hands. Through this instrument, + the pianist controls a max/MSP patch interface and some elements in the projected + video of the piece. The piece addresses the performer’s multiple identities on + stage, playing the line between the real and virtual performance while incorporating + different footage from filmed videos of the pianist and archived cabaret performances + of the British queer performers of the past. The digital score relies on the pianist''s + embodied gestural behaviour and his reaction to audio and video material.' + articleno: 1214 + author: Solomiya Moroz and Zubin Kanga + bibtex: "@article{nime23-music-1214,\n abstract = {Returns and Simulacra combines\ + \ sound and projections of video onto a screen with the performer’s body on stage.\ + \ It uses mini bee accelerometers and touch-sensor attachments as an instrument\ + \ called Piano Hands. Through this instrument, the pianist controls a max/MSP\ + \ patch interface and some elements in the projected video of the piece. The piece\ + \ addresses the performer’s multiple identities on stage, playing the line between\ + \ the real and virtual performance while incorporating different footage from\ + \ filmed videos of the pianist and archived cabaret performances of the British\ + \ queer performers of the past. The digital score relies on the pianist's embodied\ + \ gestural behaviour and his reaction to audio and video material.},\n articleno\ + \ = {1214},\n author = {Solomiya Moroz and Zubin Kanga},\n booktitle = {Music\ + \ Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Rob Hamilton},\n month = {May},\n note = {Online Presentation},\n\ + \ title = {Returns \\& Simulacra},\n url = {https://www.nime.org/proceedings/2023/nime23_music_1214.pdf},\n\ + \ urlsuppl1 = {https://www.nime2023.org/program/online-in-person-concerts},\n\ + \ year = {2023}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: Cosmo Collective - year: 2016 + editor: Rob Hamilton + month: May + note: Online Presentation + title: Returns & Simulacra + url: https://www.nime.org/proceedings/2023/nime23_music_1214.pdf + urlsuppl1: https://www.nime2023.org/program/online-in-person-concerts + year: 2023 -- ENTRYTYPE: inproceedings - ID: nime2016-music-Smallwood2016 - address: 'Brisbane, Australia' - author: Stephan Moore & Scott Smallwood - bibtex: "@inproceedings{nime2016-music-Smallwood2016,\n address = {Brisbane, Australia},\n\ - \ author = {Stephan Moore & Scott Smallwood},\n booktitle = {Music Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Andrew Brown and Toby Gifford},\n month = {June},\n publisher = {Griffith\ - \ University},\n title = {Losperus},\n year = {2016}\n}\n" +- ENTRYTYPE: article + ID: nime23-music-1245 + abstract: 'Absence is a performance for audio–visual concatenative synthesis elaborated + during Diemo Schwarz’s art–science residency at the IMéRA Institute for Advanced + Study in 2022. It explores several notions of absence: of light, of love, of humanity, + where societies and all human artefacts will be destructured to gradually disappear + within the materials and textures of the natural world. Audio–visual concatenative + synthesis extends the principle of corpus-based sound synthesis to the visual + domain, where, in addition to the sound corpus (i.e. a collection of segments + of recorded sound with a perceptual description of their sound character), the + artist uses a corpus of still images with perceptual description (colour, texture, + brightness, entropy, and other content-based image descriptors). The artist then + creates an audio–visual musical performance by navigating through one of these + descriptor spaces, e.g. through the collection of sound grains in a space of perceptual + audio descriptors, and at the same time through the other descriptor space, i.e. + select images from the visual corpus for rendering, and thus navigate in parallel + through both corpora interactively with gestural control via movement sensors. + This will evoke an aesthetic of acoustic and visual collage or cut-up, generating + an audio–visual sequence of similar sounds/images from the two corpora when navigation + is local, and opposing contrasting sounds/images when the navigation jumps to + different parts of the linked sound/image descriptor space. The artistic–technological + question that is explored here is how to control at the same time the navigation + through the audio and the image descriptor spaces with gesture sensors, i.e. how + to link the gesture sensing to both the image descriptors and the sound descriptors + in order to create a multi-modal audio–visual performance.' + articleno: 1245 + author: Diemo Schwarz + bibtex: "@article{nime23-music-1245,\n abstract = {Absence is a performance for\ + \ audio–visual concatenative synthesis elaborated during Diemo Schwarz’s art–science\ + \ residency at the IMéRA Institute for Advanced Study in 2022. It explores several\ + \ notions of absence: of light, of love, of humanity, where societies and all\ + \ human artefacts will be destructured to gradually disappear within the materials\ + \ and textures of the natural world. Audio–visual concatenative synthesis extends\ + \ the principle of corpus-based sound synthesis to the visual domain, where, in\ + \ addition to the sound corpus (i.e. a collection of segments of recorded sound\ + \ with a perceptual description of their sound character), the artist uses a corpus\ + \ of still images with perceptual description (colour, texture, brightness, entropy,\ + \ and other content-based image descriptors). The artist then creates an audio–visual\ + \ musical performance by navigating through one of these descriptor spaces, e.g.\ + \ through the collection of sound grains in a space of perceptual audio descriptors,\ + \ and at the same time through the other descriptor space, i.e. select images\ + \ from the visual corpus for rendering, and thus navigate in parallel through\ + \ both corpora interactively with gestural control via movement sensors. This\ + \ will evoke an aesthetic of acoustic and visual collage or cut-up, generating\ + \ an audio–visual sequence of similar sounds/images from the two corpora when\ + \ navigation is local, and opposing contrasting sounds/images when the navigation\ + \ jumps to different parts of the linked sound/image descriptor space. The artistic–technological\ + \ question that is explored here is how to control at the same time the navigation\ + \ through the audio and the image descriptor spaces with gesture sensors, i.e.\ + \ how to link the gesture sensing to both the image descriptors and the sound\ + \ descriptors in order to create a multi-modal audio–visual performance.},\n articleno\ + \ = {1245},\n author = {Diemo Schwarz},\n booktitle = {Music Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n editor\ + \ = {Rob Hamilton},\n month = {May},\n note = {Live Concert 5, Friday June 2,\ + \ Centro de Cultura Digital},\n title = {Absence},\n url = {https://www.nime.org/proceedings/2023/nime23_music_1245.pdf},\n\ + \ urlsuppl1 = {https://www.nime.org/proceedings/2023/nime23_concert_5.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: Losperus - year: 2016 + editor: Rob Hamilton + month: May + note: 'Live Concert 5, Friday June 2, Centro de Cultura Digital' + title: Absence + url: https://www.nime.org/proceedings/2023/nime23_music_1245.pdf + urlsuppl1: https://www.nime.org/proceedings/2023/nime23_concert_5.pdf + year: 2023 -- ENTRYTYPE: inproceedings - ID: nime2016-music-Michalakos2016 - address: 'Brisbane, Australia' - author: Christos Michalakos - bibtex: "@inproceedings{nime2016-music-Michalakos2016,\n address = {Brisbane, Australia},\n\ - \ author = {Christos Michalakos},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ - \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ - \ title = {Augmented Drum-Kit: Path Finder},\n year = {2016}\n}\n" +- ENTRYTYPE: article + ID: nime23-music-1249 + abstract: 'Dream Structures is a live coding performance that uses computational + audio analysis and machine learning to navigate and resample a half-terabyte archive + of 90s/00s trance music, creating a live musical collage that organises fragments + of audio from thousands of tracks by traversing a multidimensional feature space.' + articleno: 1249 + author: Daniel Jones + bibtex: "@article{nime23-music-1249,\n abstract = {Dream Structures is a live coding\ + \ performance that uses computational audio analysis and machine learning to navigate\ + \ and resample a half-terabyte archive of 90s/00s trance music, creating a live\ + \ musical collage that organises fragments of audio from thousands of tracks by\ + \ traversing a multidimensional feature space.},\n articleno = {1249},\n author\ + \ = {Daniel Jones},\n booktitle = {Music Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n editor = {Rob Hamilton},\n month\ + \ = {May},\n note = {Live Concert 3, Thursday June 1, Centro de Cultura Digital},\n\ + \ title = {Dream Structures},\n url = {https://www.nime.org/proceedings/2023/nime23_music_1249.pdf},\n\ + \ urlsuppl1 = {https://www.nime.org/proceedings/2023/nime23_concert_3.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: 'Augmented Drum-Kit: Path Finder' - year: 2016 + editor: Rob Hamilton + month: May + note: 'Live Concert 3, Thursday June 1, Centro de Cultura Digital' + title: Dream Structures + url: https://www.nime.org/proceedings/2023/nime23_music_1249.pdf + urlsuppl1: https://www.nime.org/proceedings/2023/nime23_concert_3.pdf + year: 2023 -- ENTRYTYPE: inproceedings - ID: nime2016-music-Nakanishi2016 - address: 'Brisbane, Australia' - author: Yoshihito Nakanishi - bibtex: "@inproceedings{nime2016-music-Nakanishi2016,\n address = {Brisbane, Australia},\n\ - \ author = {Yoshihito Nakanishi},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ - \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ - \ title = {Powder Box},\n year = {2016}\n}\n" +- ENTRYTYPE: article + ID: nime23-music-1262 + abstract: Stir bugs is an exploration of live algorithmic control in corpus-based + performance. A community of computational agents confined to a two-dimensional + square prison cell is live-coded into collective madness. Agents are controlled + by simple code functions that define navigation in a terrain made of a collection + of electronic noise samples. Each agent is also associated with a sound playback/synthesis + function. The performance embraces the complexity emerging from quickly coding + a multiplicity of behaviours in a shared sonic space. + articleno: 1262 + author: Gerard Roma + bibtex: "@article{nime23-music-1262,\n abstract = {Stir bugs is an exploration of\ + \ live algorithmic control in corpus-based performance. A community of computational\ + \ agents confined to a two-dimensional square prison cell is live-coded into collective\ + \ madness. Agents are controlled by simple code functions that define navigation\ + \ in a terrain made of a collection of electronic noise samples. Each agent is\ + \ also associated with a sound playback/synthesis function. The performance embraces\ + \ the complexity emerging from quickly coding a multiplicity of behaviours in\ + \ a shared sonic space.},\n articleno = {1262},\n author = {Gerard Roma},\n booktitle\ + \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n editor = {Rob Hamilton},\n month = {May},\n note = {Online Presentation},\n\ + \ title = {Stir bugs},\n url = {https://www.nime.org/proceedings/2023/nime23_music_1262.pdf},\n\ + \ urlsuppl1 = {https://www.nime2023.org/program/online-in-person-concerts},\n\ + \ year = {2023}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: Powder Box - year: 2016 - + editor: Rob Hamilton + month: May + note: Online Presentation + title: Stir bugs + url: https://www.nime.org/proceedings/2023/nime23_music_1262.pdf + urlsuppl1: https://www.nime2023.org/program/online-in-person-concerts + year: 2023 -- ENTRYTYPE: inproceedings - ID: nime2016-music-Tadokoro2016 - address: 'Brisbane, Australia' - author: Atsushi Tadokoro - bibtex: "@inproceedings{nime2016-music-Tadokoro2016,\n address = {Brisbane, Australia},\n\ - \ author = {Atsushi Tadokoro},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ - \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ - \ title = {Membranes},\n year = {2016}\n}\n" - booktitle: Music Proceedings of the International Conference on New Interfaces for - Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: Membranes - year: 2016 - -- ENTRYTYPE: inproceedings - ID: nime2016-music-Lee2016 - address: 'Brisbane, Australia' - author: Sang Won Lee - bibtex: "@inproceedings{nime2016-music-Lee2016,\n address = {Brisbane, Australia},\n\ - \ author = {Sang Won Lee},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ - \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ - \ title = {Live Writing: Gloomy Streets},\n year = {2016}\n}\n" +- ENTRYTYPE: article + ID: nime23-music-1272 + abstract: 'Branch is a live coding étude centered around speech and form. The piece + uses the TidalCycles language alongside a tool we developed called SHARP, which + provides an interactive, tree-like structure embedded in the text editor to track + how blocks of code evolve over time. SHARP opens up new musical affordances centered + around quickly switching between previous program states. In addition, SHARP’s + version trees act as a kind of post-hoc score, leaving a visual trace of the piece’s + structure as it unfolds. With Branch, we attempt to go beyond a simple demonstration + of SHARP as a tool and instead create a piece which highlights the interplay between + musical form, its visual representation in SHARP, and the sonic material itself. + To that end, Branch makes use of machine-generated speech based mostly on snippets + from the text of Robert Frost’s poem “The Road Not Taken”. The text is largely + decontextualized, and its treatment is somewhat tongue-in-cheek: while the poem’s + premise centers around not being able to take both paths, we can easily explore + as many code paths as we wish. In addition to speech, Branch uses audio samples + from Freesound, including the sounds of twigs snapping, knocking on wood, and + a person stepping on leaves.' + articleno: 1272 + author: Daniel Manesh and Douglas A Bowman Jr and Sang Won Lee + bibtex: "@article{nime23-music-1272,\n abstract = {Branch is a live coding étude\ + \ centered around speech and form. The piece uses the TidalCycles language alongside\ + \ a tool we developed called SHARP, which provides an interactive, tree-like structure\ + \ embedded in the text editor to track how blocks of code evolve over time. SHARP\ + \ opens up new musical affordances centered around quickly switching between previous\ + \ program states. In addition, SHARP’s version trees act as a kind of post-hoc\ + \ score, leaving a visual trace of the piece’s structure as it unfolds. With Branch,\ + \ we attempt to go beyond a simple demonstration of SHARP as a tool and instead\ + \ create a piece which highlights the interplay between musical form, its visual\ + \ representation in SHARP, and the sonic material itself. To that end, Branch\ + \ makes use of machine-generated speech based mostly on snippets from the text\ + \ of Robert Frost’s poem “The Road Not Taken”. The text is largely decontextualized,\ + \ and its treatment is somewhat tongue-in-cheek: while the poem’s premise centers\ + \ around not being able to take both paths, we can easily explore as many code\ + \ paths as we wish. In addition to speech, Branch uses audio samples from Freesound,\ + \ including the sounds of twigs snapping, knocking on wood, and a person stepping\ + \ on leaves.},\n articleno = {1272},\n author = {Daniel Manesh and Douglas A Bowman\ + \ Jr and Sang Won Lee},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Rob Hamilton},\n\ + \ month = {May},\n note = {Live Concert 4, Thursday June 1, Centro de Cultura\ + \ Digital},\n title = {Branch},\n url = {https://www.nime.org/proceedings/2023/nime23_music_1272.pdf},\n\ + \ urlsuppl1 = {https://www.nime.org/proceedings/2023/nime23_concert_4.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: 'Live Writing: Gloomy Streets' - year: 2016 + editor: Rob Hamilton + month: May + note: 'Live Concert 4, Thursday June 1, Centro de Cultura Digital' + title: Branch + url: https://www.nime.org/proceedings/2023/nime23_music_1272.pdf + urlsuppl1: https://www.nime.org/proceedings/2023/nime23_concert_4.pdf + year: 2023 -- ENTRYTYPE: inproceedings - ID: nime2016-music-Sorensen2016 - address: 'Brisbane, Australia' - author: Andrew Sorensen - bibtex: "@inproceedings{nime2016-music-Sorensen2016,\n address = {Brisbane, Australia},\n\ - \ author = {Andrew Sorensen},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ - \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ - \ title = {Splice},\n year = {2016}\n}\n" +- ENTRYTYPE: article + ID: nime23-music-1284 + abstract: 'Displacements is a music and video performance that thematizes the recording + of walks in public spaces (a relatively recent and popular genre of videos on + streaming platforms). In a place built to organize human displacements, a moving + observer registers passing bodies: their directions, flows and speeds superimposed + on the shades and forms of the environment are the visual information that feed + an algorithmic composition based on shifts of space, time and color. The music, + likewise algorithmic and mainly synthetic (but also including transformations + of the sound captured during the footage), modulates its visual counterpart by + providing an ethereal atmosphere uncorrelated with the expected soundscape. The + work alludes to principles of the live coding practice as its performance happens + in an improvised way through editing and running a pre-prepared computer code + that controls the processes for music and video generation. The code is displayed + as the top layer of the video, making available to the audience the performer’s + decisions, as well as the algorithmic structure of the work, and having an aesthetic + role as part of the visual composition of the work.' + articleno: 1284 + author: Adriano Claro Monteiro + bibtex: "@article{nime23-music-1284,\n abstract = {Displacements is a music and\ + \ video performance that thematizes the recording of walks in public spaces (a\ + \ relatively recent and popular genre of videos on streaming platforms). In a\ + \ place built to organize human displacements, a moving observer registers passing\ + \ bodies: their directions, flows and speeds superimposed on the shades and forms\ + \ of the environment are the visual information that feed an algorithmic composition\ + \ based on shifts of space, time and color. The music, likewise algorithmic and\ + \ mainly synthetic (but also including transformations of the sound captured during\ + \ the footage), modulates its visual counterpart by providing an ethereal atmosphere\ + \ uncorrelated with the expected soundscape. The work alludes to principles of\ + \ the live coding practice as its performance happens in an improvised way through\ + \ editing and running a pre-prepared computer code that controls the processes\ + \ for music and video generation. The code is displayed as the top layer of the\ + \ video, making available to the audience the performer’s decisions, as well as\ + \ the algorithmic structure of the work, and having an aesthetic role as part\ + \ of the visual composition of the work.},\n articleno = {1284},\n author = {Adriano\ + \ Claro Monteiro},\n booktitle = {Music Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n editor = {Rob Hamilton},\n month\ + \ = {May},\n note = {Live Concert 4, Thursday June 1, Centro de Cultura Digital},\n\ + \ title = {Displacements},\n url = {https://www.nime.org/proceedings/2023/nime23_music_1284.pdf},\n\ + \ urlsuppl1 = {https://www.nime.org/proceedings/2023/nime23_concert_4.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: Splice - year: 2016 + editor: Rob Hamilton + month: May + note: 'Live Concert 4, Thursday June 1, Centro de Cultura Digital' + title: Displacements + url: https://www.nime.org/proceedings/2023/nime23_music_1284.pdf + urlsuppl1: https://www.nime.org/proceedings/2023/nime23_concert_4.pdf + year: 2023 -- ENTRYTYPE: inproceedings - ID: nime2016-music-VandemastBell2016 - address: 'Brisbane, Australia' - author: Paul Vandemast-Bell - bibtex: "@inproceedings{nime2016-music-VandemastBell2016,\n address = {Brisbane,\ - \ Australia},\n author = {Paul Vandemast-Bell},\n booktitle = {Music Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Andrew Brown and Toby Gifford},\n month = {June},\n publisher = {Griffith\ - \ University},\n title = {Deformed Electronic Dance Music},\n year = {2016}\n\ - }\n" +- ENTRYTYPE: article + ID: nime23-music-1285 + abstract: 'If crystal bowls could speak, what would they say? Beyond Hexagons is + a performance using the shard-speakers, a musical instrument and playback system + created from the shards of broken crystal singing bowls with affixed transducers + and resonators. Tracing their lifespans from quartz mines to factories and from + scientific laboratories and sound studios, the bowls transmit their origin stories + of purpose, function, and pleasure through a unique and alien sonic language that + makes heavy use of improvisation, whimsy, and custom software instruments. The + result is a sonic exploration of the paradoxes contained in these materials — + strength and fragility, acuity and intuition, secrecy and frankness.' + articleno: 1285 + author: Anastasia Clarke + bibtex: "@article{nime23-music-1285,\n abstract = {If crystal bowls could speak,\ + \ what would they say? Beyond Hexagons is a performance using the shard-speakers,\ + \ a musical instrument and playback system created from the shards of broken crystal\ + \ singing bowls with affixed transducers and resonators. Tracing their lifespans\ + \ from quartz mines to factories and from scientific laboratories and sound studios,\ + \ the bowls transmit their origin stories of purpose, function, and pleasure through\ + \ a unique and alien sonic language that makes heavy use of improvisation, whimsy,\ + \ and custom software instruments. The result is a sonic exploration of the paradoxes\ + \ contained in these materials — strength and fragility, acuity and intuition,\ + \ secrecy and frankness.},\n articleno = {1285},\n author = {Anastasia Clarke},\n\ + \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n editor = {Rob Hamilton},\n month = {May},\n note\ + \ = {Live Concert 3, Thursday June 1, Centro de Cultura Digital},\n title = {Shard\ + \ Speakers: Beyond Hexagons},\n url = {https://www.nime.org/proceedings/2023/nime23_music_1285.pdf},\n\ + \ urlsuppl1 = {https://www.nime.org/proceedings/2023/nime23_concert_3.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: Deformed Electronic Dance Music - year: 2016 + editor: Rob Hamilton + month: May + note: 'Live Concert 3, Thursday June 1, Centro de Cultura Digital' + title: 'Shard Speakers: Beyond Hexagons' + url: https://www.nime.org/proceedings/2023/nime23_music_1285.pdf + urlsuppl1: https://www.nime.org/proceedings/2023/nime23_concert_3.pdf + year: 2023 -- ENTRYTYPE: inproceedings - ID: nime2016-music-Bussigel2016 - address: 'Brisbane, Australia' - author: Peter Bussigel - bibtex: "@inproceedings{nime2016-music-Bussigel2016,\n address = {Brisbane, Australia},\n\ - \ author = {Peter Bussigel},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ - \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ - \ title = {Ndial Performance},\n year = {2016}\n}\n" +- ENTRYTYPE: article + ID: nime23-music-1287 + abstract: 'Fluid Flows, Transit, and Symbols is a coded composition and re-imagined + sonic poem. The primary influence for the design of the piece was pipe flow and + fluid mechanics; specifically the transition from laminar (smooth) flow to turbulent + flow within a pipe. The sounds and sequences are all designed in SuperCollider + and the organization of the composition is composed live and sounds different + with every performance. The reading of the poem is processed through granular + synthesis, creating new sentences amongst the soundscape at an unpredictable rate. + The performance and piece can be adapted to any space and only requires a microphone, + a laptop, and a soundsystem.' + articleno: 1287 + author: Costa K Colachis Glass + bibtex: "@article{nime23-music-1287,\n abstract = {Fluid Flows, Transit, and Symbols\ + \ is a coded composition and re-imagined sonic poem. The primary influence for\ + \ the design of the piece was pipe flow and fluid mechanics; specifically the\ + \ transition from laminar (smooth) flow to turbulent flow within a pipe. The sounds\ + \ and sequences are all designed in SuperCollider and the organization of the\ + \ composition is composed live and sounds different with every performance. The\ + \ reading of the poem is processed through granular synthesis, creating new sentences\ + \ amongst the soundscape at an unpredictable rate. The performance and piece can\ + \ be adapted to any space and only requires a microphone, a laptop, and a soundsystem.},\n\ + \ articleno = {1287},\n author = {Costa K Colachis Glass},\n booktitle = {Music\ + \ Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Rob Hamilton},\n month = {May},\n note = {Live Concert 2, Wednesday\ + \ May 31, Biblioteca Vasconcelos},\n title = {Fluid Flows, Transit, and Symbols},\n\ + \ url = {https://www.nime.org/proceedings/2023/nime23_music_1287.pdf},\n urlsuppl1\ + \ = {https://www.nime.org/proceedings/2023/nime23_concert_2.pdf},\n year = {2023}\n\ + }\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: Ndial Performance - year: 2016 + editor: Rob Hamilton + month: May + note: 'Live Concert 2, Wednesday May 31, Biblioteca Vasconcelos' + title: 'Fluid Flows, Transit, and Symbols' + url: https://www.nime.org/proceedings/2023/nime23_music_1287.pdf + urlsuppl1: https://www.nime.org/proceedings/2023/nime23_concert_2.pdf + year: 2023 -- ENTRYTYPE: inproceedings - ID: nime2016-music-Lawson2016 - address: 'Brisbane, Australia' - author: Shawn Lawson - bibtex: "@inproceedings{nime2016-music-Lawson2016,\n address = {Brisbane, Australia},\n\ - \ author = {Shawn Lawson},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ - \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ - \ title = {Owego System Trade Routes},\n year = {2016}\n}\n" +- ENTRYTYPE: article + ID: nime23-music-1305 + abstract: 'Survival Kit is a live electroacoustic piece that explores the connection + between textual and musical meanings. It is a revised take on choral music in + the digital era. The author experiments with ways to interpret natural language + in computer music and suggests a novel approach to performing text/sound compositions. + The foundation of the piece is a poetic text that lists all the things that may + come to mind amidst a futile preparation for a global disaster. The piece is + performed by a single performer in the live coding manner. The author enters the + text in his original computer music software, which triggers sections of pre-recorded + music and corresponding processing algorithms. All vocals were performed by a + collaborator vocalist (tenor) using a recording score for individual lines, and + then edited and programmed into the software by the author.' + articleno: 1305 + author: Eugene Markin + bibtex: "@article{nime23-music-1305,\n abstract = {Survival Kit is a live electroacoustic\ + \ piece that explores the connection between textual and musical meanings. It\ + \ is a revised take on choral music in the digital era. The author experiments\ + \ with ways to interpret natural language in computer music and suggests a novel\ + \ approach to performing text/sound compositions. The foundation of the piece\ + \ is a poetic text that lists all the things that may come to mind amidst a futile\ + \ preparation for a global disaster. The piece is performed by a single performer\ + \ in the live coding manner. The author enters the text in his original computer\ + \ music software, which triggers sections of pre-recorded music and corresponding\ + \ processing algorithms. All vocals were performed by a collaborator vocalist\ + \ (tenor) using a recording score for individual lines, and then edited and programmed\ + \ into the software by the author.},\n articleno = {1305},\n author = {Eugene\ + \ Markin},\n booktitle = {Music Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n editor = {Rob Hamilton},\n month =\ + \ {May},\n note = {Live Concert 1, Wednesday May 31, Biblioteca Vasconcelos},\n\ + \ title = {Survival Kit},\n url = {https://www.nime.org/proceedings/2023/nime23_music_1305.pdf},\n\ + \ urlsuppl1 = {https://www.nime.org/proceedings/2023/nime23_concert_1.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: Owego System Trade Routes - year: 2016 - - -- ENTRYTYPE: inproceedings - ID: nime2016-music-Tahiroglu2016 - address: 'Brisbane, Australia' - author: Koray Tahiroğlu - bibtex: "@inproceedings{nime2016-music-Tahiroglu2016,\n address = {Brisbane, Australia},\n\ - \ author = {Koray Tahiroğlu},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ - \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ - \ title = {KET Conversations},\n year = {2016}\n}\n" - booktitle: Music Proceedings of the International Conference on New Interfaces for - Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: KET Conversations - year: 2016 + editor: Rob Hamilton + month: May + note: 'Live Concert 1, Wednesday May 31, Biblioteca Vasconcelos' + title: Survival Kit + url: https://www.nime.org/proceedings/2023/nime23_music_1305.pdf + urlsuppl1: https://www.nime.org/proceedings/2023/nime23_concert_1.pdf + year: 2023 -- ENTRYTYPE: inproceedings - ID: nime2016-music-NorahLorway2016 - address: 'Brisbane, Australia' - author: 'Norah Lorway, Kiran Bhumber & Nancy Lee' - bibtex: "@inproceedings{nime2016-music-NorahLorway2016,\n address = {Brisbane, Australia},\n\ - \ author = {Norah Lorway, Kiran Bhumber & Nancy Lee},\n booktitle = {Music Proceedings\ +- ENTRYTYPE: article + ID: nime23-music-1315 + abstract: 'Sound composition on the fly that consists of a descent into the training + of a deep learning audio neural network, which will explore with voice the implications + of artificial intelligence from a transhackfeminist ethical perspective in order + to critically look at these tools from the same and thus intervene them from within. + That is, as an algorithmic essay, the piece will explore with voice, the implications + of these technologies taking as text feminist and transfeminist research that + theorize on the subject. The voice will be synthesized and reconstructed as a + means to hack the same networks and the way we understand them.' + articleno: 1315 + author: Marianne Teixido and Emilio Ocelotl + bibtex: "@article{nime23-music-1315,\n abstract = {Sound composition on the fly\ + \ that consists of a descent into the training of a deep learning audio neural\ + \ network, which will explore with voice the implications of artificial intelligence\ + \ from a transhackfeminist ethical perspective in order to critically look at\ + \ these tools from the same and thus intervene them from within. That is, as an\ + \ algorithmic essay, the piece will explore with voice, the implications of these\ + \ technologies taking as text feminist and transfeminist research that theorize\ + \ on the subject. The voice will be synthesized and reconstructed as a means to\ + \ hack the same networks and the way we understand them.},\n articleno = {1315},\n\ + \ author = {Marianne Teixido and Emilio Ocelotl},\n booktitle = {Music Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Andrew Brown and Toby Gifford},\n month = {June},\n publisher = {Griffith\ - \ University},\n title = {Hollow Vertices},\n year = {2016}\n}\n" + \ editor = {Rob Hamilton},\n month = {May},\n note = {Live Concert 5, Friday June\ + \ 2, Centro de Cultura Digital},\n title = {deep structures},\n url = {https://www.nime.org/proceedings/2023/nime23_music_1315.pdf},\n\ + \ urlsuppl1 = {https://www.nime.org/proceedings/2023/nime23_concert_5.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: Hollow Vertices - year: 2016 + editor: Rob Hamilton + month: May + note: 'Live Concert 5, Friday June 2, Centro de Cultura Digital' + title: deep structures + url: https://www.nime.org/proceedings/2023/nime23_music_1315.pdf + urlsuppl1: https://www.nime.org/proceedings/2023/nime23_concert_5.pdf + year: 2023 -- ENTRYTYPE: inproceedings - ID: nime2016-music-Cyngler2016 - address: 'Brisbane, Australia' - author: Richard Cyngler - bibtex: "@inproceedings{nime2016-music-Cyngler2016,\n address = {Brisbane, Australia},\n\ - \ author = {Richard Cyngler},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ - \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ - \ title = {Music for various groups of performers (after Lucier)},\n year = {2016}\n\ +- ENTRYTYPE: article + ID: nime23-music-1316 + abstract: 'Affordance describes the relationship between the environment and the + individual from the action provider’s perspective. Affordance can be false, can + be hidden, or can be perceptible. Within our complex environment, real or virtual, + material or intellectual, the affordances can be functional or delusional, can + be ephemeral or permanent, can be present or delayed – a choice for you to observe, + adapt, participate, and evolve.' + articleno: 1316 + author: Chi Wang + bibtex: "@article{nime23-music-1316,\n abstract = {Affordance describes the relationship\ + \ between the environment and the individual from the action provider’s perspective.\ + \ Affordance can be false, can be hidden, or can be perceptible. Within our complex\ + \ environment, real or virtual, material or intellectual, the affordances can\ + \ be functional or delusional, can be ephemeral or permanent, can be present or\ + \ delayed – a choice for you to observe, adapt, participate, and evolve.},\n articleno\ + \ = {1316},\n author = {Chi Wang},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Rob Hamilton},\n\ + \ month = {May},\n note = {Online Presentation},\n title = {Transparent Affordance},\n\ + \ url = {https://www.nime.org/proceedings/2023/nime23_music_1316.pdf},\n urlsuppl1\ + \ = {https://www.nime2023.org/program/online-in-person-concerts},\n year = {2023}\n\ }\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: Music for various groups of performers (after Lucier) - year: 2016 + editor: Rob Hamilton + month: May + note: Online Presentation + title: Transparent Affordance + url: https://www.nime.org/proceedings/2023/nime23_music_1316.pdf + urlsuppl1: https://www.nime2023.org/program/online-in-person-concerts + year: 2023 -- ENTRYTYPE: inproceedings - ID: nime2016-music-Miller2016 - address: 'Brisbane, Australia' - author: Amy Alexander & Curt Miller - bibtex: "@inproceedings{nime2016-music-Miller2016,\n address = {Brisbane, Australia},\n\ - \ author = {Amy Alexander & Curt Miller},\n booktitle = {Music Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n editor\ - \ = {Andrew Brown and Toby Gifford},\n month = {June},\n publisher = {Griffith\ - \ University},\n title = {Composition #1 for PIGS (Percussive Image Gestural System)},\n\ - \ year = {2016}\n}\n" +- ENTRYTYPE: article + ID: nime23-music-1340 + abstract: 'Innermost Echoes is a performance work which utilizes performer physiological + data as a novel input mechanism to introduce a new form of hybrid improvisation + alongside a robotic koto which will sonify this data in a communicative feedback + loop and a Eurorack system which will serve as a bridge between the passive physiological + data and the active performance. By introducing this form of input, our improvisational + performance will challenge the traditional approach to live performance by creating + a closed loop between our emotions and the performance itself. In a sense, we + will be improvising with our own presence. We believe this new kind of performative + dialogue can challenge existing hierarchies within live music performances. This + novel performance paradigm seeks to examine new performative dialogues and ideas + on what it means to perform live. Current performance practices are often based + predominantly on the direct communication of the performers through their respective + instruments. When we introduce the performer’s physiology as a gestural language, + we hope to define a new methodology of presence-based improvisation. The performers + wear custom built sensing wristbands and elastic breathing bands around their + chest to gather physiological data consisting of EDA (electrodermal activity), + HRV (heart rate variability), and respiration rate. This data is then sent via + OSC to a laptop running Max/MSP which applies this live data to the robotic koto + and the Eurorack system. These data streams and occurrences of synchrony between + the performers’ data are then sonified and used as a structural indicator of the + current state of the performers, thereby forming a new unspoken dialogue between + the two.' + articleno: 1340 + author: Danny Hynds and Aoi Uyama and George Chernyshov and DingDing Zheng and Kozue + Matsumoto and Michael Pogorzhelskiy and Tatsuya Saito and Kai Kunze and Kouta + Minamizawa + bibtex: "@article{nime23-music-1340,\n abstract = {Innermost Echoes is a performance\ + \ work which utilizes performer physiological data as a novel input mechanism\ + \ to introduce a new form of hybrid improvisation alongside a robotic koto which\ + \ will sonify this data in a communicative feedback loop and a Eurorack system\ + \ which will serve as a bridge between the passive physiological data and the\ + \ active performance. By introducing this form of input, our improvisational performance\ + \ will challenge the traditional approach to live performance by creating a closed\ + \ loop between our emotions and the performance itself. In a sense, we will be\ + \ improvising with our own presence. We believe this new kind of performative\ + \ dialogue can challenge existing hierarchies within live music performances.\ + \ This novel performance paradigm seeks to examine new performative dialogues\ + \ and ideas on what it means to perform live. Current performance practices are\ + \ often based predominantly on the direct communication of the performers through\ + \ their respective instruments. When we introduce the performer’s physiology as\ + \ a gestural language, we hope to define a new methodology of presence-based improvisation.\ + \ The performers wear custom built sensing wristbands and elastic breathing bands\ + \ around their chest to gather physiological data consisting of EDA (electrodermal\ + \ activity), HRV (heart rate variability), and respiration rate. This data is\ + \ then sent via OSC to a laptop running Max/MSP which applies this live data to\ + \ the robotic koto and the Eurorack system. These data streams and occurrences\ + \ of synchrony between the performers’ data are then sonified and used as a structural\ + \ indicator of the current state of the performers, thereby forming a new unspoken\ + \ dialogue between the two.},\n articleno = {1340},\n author = {Danny Hynds and\ + \ Aoi Uyama and George Chernyshov and DingDing Zheng and Kozue Matsumoto and Michael\ + \ Pogorzhelskiy and Tatsuya Saito and Kai Kunze and Kouta Minamizawa},\n booktitle\ + \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n editor = {Rob Hamilton},\n month = {May},\n note = {Online Presentation},\n\ + \ title = {Innermost Echoes},\n url = {https://www.nime.org/proceedings/2023/nime23_music_1340.pdf},\n\ + \ urlsuppl1 = {https://www.nime2023.org/program/online-in-person-concerts},\n\ + \ year = {2023}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: "Composition #1 for PIGS (Percussive Image Gestural System)" - year: 2016 + editor: Rob Hamilton + month: May + note: Online Presentation + title: Innermost Echoes + url: https://www.nime.org/proceedings/2023/nime23_music_1340.pdf + urlsuppl1: https://www.nime2023.org/program/online-in-person-concerts + year: 2023 -- ENTRYTYPE: inproceedings - ID: nime2016-music-Stewart2016 - address: 'Brisbane, Australia' - author: Andrew Stewart - bibtex: "@inproceedings{nime2016-music-Stewart2016,\n address = {Brisbane, Australia},\n\ - \ author = {Andrew Stewart},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ - \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ - \ title = {Ritual for Karlax},\n year = {2016}\n}\n" +- ENTRYTYPE: article + ID: nime23-music-1389 + abstract: 'Our performance research quintet has been set up to explore multiple + instantiations of DIY electronic musical instruments (EMI) through improvisation. + Our group consists of five highly experienced music improvisers, visual artists + and instrument makers with a shared connection to the Sonic Arts Research Centre + (SARC) at Queen’s University Belfast. Performer-makers in this group have multiple + decades of experience producing work in academic and professional contexts in + Europe, the Americas and the Middle East [websites anonymised, but available upon + request]. We are particularly interested in exploiting irregularities in the + qualities of circuit components (e.g. imprecise tolerances/values), and how this + allows for the development of stylistic differences across multiple instrument-performer + configurations. We are also interested in how skill, style and performance techniques + are developed in different ways on similar devices over extended periods of time, + and how our existing musical practices are reconfigured through such collaborative + exchanges. For this musical performance each performer will use DIY EMI featuring + function generators and wide band noise. The instruments are ‘bent by design’ + (Hordijk 2009) and use ‘withered technologies’(Ott 2020) at their core. These + musical instruments have been selected to promote productive instability whilst + building a timbral playground. The DIY instrument ethos includes the publication + of the designs and ‘how to’ instructions to assist other makers in the creation + of their own EMI, especially those who have to adopt a frugal approach to resources. The + aesthetic of our performance is informed by noise and free improvised musics, + and is offered as continuation of ‘thinkering’ (Huhtamo 2011) practice as part + of the history of electronic music experimentation.' + articleno: 1389 + author: Miguel Ortiz and Barry Cullen and Paul Stapleton + bibtex: "@article{nime23-music-1389,\n abstract = {Our performance research quintet\ + \ has been set up to explore multiple instantiations of DIY electronic musical\ + \ instruments (EMI) through improvisation. Our group consists of five highly experienced\ + \ music improvisers, visual artists and instrument makers with a shared connection\ + \ to the Sonic Arts Research Centre (SARC) at Queen’s University Belfast. Performer-makers\ + \ in this group have multiple decades of experience producing work in academic\ + \ and professional contexts in Europe, the Americas and the Middle East [websites\ + \ anonymised, but available upon request]. We are particularly interested in\ + \ exploiting irregularities in the qualities of circuit components (e.g. imprecise\ + \ tolerances/values), and how this allows for the development of stylistic differences\ + \ across multiple instrument-performer configurations. We are also interested\ + \ in how skill, style and performance techniques are developed in different ways\ + \ on similar devices over extended periods of time, and how our existing musical\ + \ practices are reconfigured through such collaborative exchanges. For this\ + \ musical performance each performer will use DIY EMI featuring function generators\ + \ and wide band noise. The instruments are ‘bent by design’ (Hordijk 2009) and\ + \ use ‘withered technologies’(Ott 2020) at their core. These musical instruments\ + \ have been selected to promote productive instability whilst building a timbral\ + \ playground. The DIY instrument ethos includes the publication of the designs\ + \ and ‘how to’ instructions to assist other makers in the creation of their own\ + \ EMI, especially those who have to adopt a frugal approach to resources. The\ + \ aesthetic of our performance is informed by noise and free improvised musics,\ + \ and is offered as continuation of ‘thinkering’ (Huhtamo 2011) practice as part\ + \ of the history of electronic music experimentation.},\n articleno = {1389},\n\ + \ author = {Miguel Ortiz and Barry Cullen and Paul Stapleton},\n booktitle = {Music\ + \ Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Rob Hamilton},\n month = {May},\n note = {Live Concert 3, Thursday\ + \ June 1, Centro de Cultura Digital},\n title = {Pandemonium Quintet play Drone\ + \ \\& Drama Versions},\n url = {https://www.nime.org/proceedings/2023/nime23_music_1389.pdf},\n\ + \ urlsuppl1 = {https://www.nime.org/proceedings/2023/nime23_concert_3.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: Ritual for Karlax - year: 2016 - - -- ENTRYTYPE: inproceedings - ID: nime2016-music-Kanga2016 - address: 'Brisbane, Australia' - author: Zubin Kanga - bibtex: "@inproceedings{nime2016-music-Kanga2016,\n address = {Brisbane, Australia},\n\ - \ author = {Zubin Kanga},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ - \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ - \ title = {Morphosis for piano},\n year = {2016}\n}\n" - booktitle: Music Proceedings of the International Conference on New Interfaces for - Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: Morphosis for piano - year: 2016 - - -- ENTRYTYPE: inproceedings - ID: nime2016-music-Vickery2016 - address: 'Brisbane, Australia' - author: Lindsay Vickery - bibtex: "@inproceedings{nime2016-music-Vickery2016,\n address = {Brisbane, Australia},\n\ - \ author = {Lindsay Vickery},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ - \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ - \ title = {Detritus (2015)},\n year = {2016}\n}\n" - booktitle: Music Proceedings of the International Conference on New Interfaces for - Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: Detritus (2015) - year: 2016 - - -- ENTRYTYPE: inproceedings - ID: nime2016-music-Romain2016 - address: 'Brisbane, Australia' - author: Michon Romain - bibtex: "@inproceedings{nime2016-music-Romain2016,\n address = {Brisbane, Australia},\n\ - \ author = {Michon Romain},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ - \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ - \ title = {A Minor Chord for BladeAxe},\n year = {2016}\n}\n" - booktitle: Music Proceedings of the International Conference on New Interfaces for - Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: A Minor Chord for BladeAxe - year: 2016 + editor: Rob Hamilton + month: May + note: 'Live Concert 3, Thursday June 1, Centro de Cultura Digital' + title: Pandemonium Quintet play Drone & Drama Versions + url: https://www.nime.org/proceedings/2023/nime23_music_1389.pdf + urlsuppl1: https://www.nime.org/proceedings/2023/nime23_concert_3.pdf + year: 2023 -- ENTRYTYPE: inproceedings - ID: nime2016-music-Carroll2016 - address: 'Brisbane, Australia' - author: Nicole Carroll - bibtex: "@inproceedings{nime2016-music-Carroll2016,\n address = {Brisbane, Australia},\n\ - \ author = {Nicole Carroll},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ - \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ - \ title = {Everything In Its Place},\n year = {2016}\n}\n" +- ENTRYTYPE: article + ID: nime23-music-1392 + abstract: 'Laser Phase Synthesis [XXI VII III I] is an audiovisual performance informed + by the historical Audio/Video/Laser system developed by Lowell Cross and Carson + Jeffries for use by David Tudor and Experiments in Arts and Technology (E.A.T.) + at the 1970 Japan World Exposition in Osaka, Japan. The current work employs digital + audio synthesis, modern laser display technology, and close collaboration between + sound and image composition to illustrate the harmonic progression of a musical + work.' + articleno: 1392 + author: Derek Holzer and Luka Aron + bibtex: "@article{nime23-music-1392,\n abstract = {Laser Phase Synthesis [XXI VII\ + \ III I] is an audiovisual performance informed by the historical Audio/Video/Laser\ + \ system developed by Lowell Cross and Carson Jeffries for use by David Tudor\ + \ and Experiments in Arts and Technology (E.A.T.) at the 1970 Japan World Exposition\ + \ in Osaka, Japan. The current work employs digital audio synthesis, modern laser\ + \ display technology, and close collaboration between sound and image composition\ + \ to illustrate the harmonic progression of a musical work.},\n articleno = {1392},\n\ + \ author = {Derek Holzer and Luka Aron},\n booktitle = {Music Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n editor\ + \ = {Rob Hamilton},\n month = {May},\n note = {Live Concert 2, Wednesday May 31,\ + \ Biblioteca Vasconcelos},\n title = {Laser Phase Synthesis [XXI VII III I]},\n\ + \ url = {https://www.nime.org/proceedings/2023/nime23_music_1392.pdf},\n urlsuppl1\ + \ = {https://www.nime.org/proceedings/2023/nime23_concert_2.pdf},\n year = {2023}\n\ + }\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: Everything In Its Place - year: 2016 + editor: Rob Hamilton + month: May + note: 'Live Concert 2, Wednesday May 31, Biblioteca Vasconcelos' + title: 'Laser Phase Synthesis [XXI VII III I]' + url: https://www.nime.org/proceedings/2023/nime23_music_1392.pdf + urlsuppl1: https://www.nime.org/proceedings/2023/nime23_concert_2.pdf + year: 2023 -- ENTRYTYPE: inproceedings - ID: nime2016-music-Kim2016 - address: 'Brisbane, Australia' - author: Jonghyun Kim - bibtex: "@inproceedings{nime2016-music-Kim2016,\n address = {Brisbane, Australia},\n\ - \ author = {Jonghyun Kim},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ - \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ - \ title = {Live Performance for Leappmotion},\n year = {2016}\n}\n" +- ENTRYTYPE: article + ID: nime23-music-1393 + abstract: '4 Disklavier Preludes is one of the main works in The Gedanken Room (2021). + This is a work that explores the implications of Quantum Computing for Music composition, + both conceptually and practically. Its 4 parts explore the use of the Disklavier + both as an input and output interface for building Quantum Circuits and retrieving + its measurements, in a live interactive multimedia environment with which live + performers interact. The cinematographic narrative addresses utopian/dystopian + issues in human-machine interaction.' + articleno: 1393 + author: Omar C Hamido + bibtex: "@article{nime23-music-1393,\n abstract = {4 Disklavier Preludes is one\ + \ of the main works in The Gedanken Room (2021). This is a work that explores\ + \ the implications of Quantum Computing for Music composition, both conceptually\ + \ and practically. Its 4 parts explore the use of the Disklavier both as an input\ + \ and output interface for building Quantum Circuits and retrieving its measurements,\ + \ in a live interactive multimedia environment with which live performers interact.\ + \ The cinematographic narrative addresses utopian/dystopian issues in human-machine\ + \ interaction.},\n articleno = {1393},\n author = {Omar C Hamido},\n booktitle\ + \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n editor = {Rob Hamilton},\n month = {May},\n note = {Online Presentation},\n\ + \ title = {4 Disklavier Preludes},\n url = {https://www.nime.org/proceedings/2023/nime23_music_1393.pdf},\n\ + \ urlsuppl1 = {https://www.nime2023.org/program/online-in-person-concerts},\n\ + \ year = {2023}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: Live Performance for Leappmotion - year: 2016 + editor: Rob Hamilton + month: May + note: Online Presentation + title: 4 Disklavier Preludes + url: https://www.nime.org/proceedings/2023/nime23_music_1393.pdf + urlsuppl1: https://www.nime2023.org/program/online-in-person-concerts + year: 2023 -- ENTRYTYPE: inproceedings - ID: nime2016-music-Beck2016 - address: 'Brisbane, Australia' - author: Stephen David Beck and Scott Smallwood - bibtex: "@inproceedings{nime2016-music-Beck2016,\n address = {Brisbane, Australia},\n\ - \ author = {Stephen David Beck and Scott Smallwood},\n booktitle = {Music Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Andrew Brown and Toby Gifford},\n month = {June},\n publisher = {Griffith\ - \ University},\n title = {From Uganda\" Mara Helmuth},\n year = {2016}\n}\n" +- ENTRYTYPE: article + ID: nime23-music-1394 + abstract: 'Finger Breath, for performer, live electronics, and zither, was originally + commissioned by the the Frontside International Chamber Music Festival, funded + by a grant from the Swedish Arts Council, and premiered in January 2023 as a headphone + performance in the belly of a small passenger ferry. The main concepts behind + the work are three: First, the intimate sounds from the musicians breathing, and + from his fingers on the strings of an ancient zither. Second, the idea that the + live breathing and the musician’s sounds played by finger movements are the only + sources of gestural control and expression in the piece. Breathing and finger + movements form the basis of many musical expressions throughout the world, as + they are our most intimate physiological and gestural bodily mechanisms. Third, + the combination of the first two into a situation of “entangled musicianship”, + where each action has triple consequences: as a sound source to be heard live, + as a sound source being fed to various buffers for later manipulation and playback, + but also as a source of gestural control, affecting a variety of playback mechanisms + for the buffered sounds. It is thus impossible to play something without also + altering the conditions for future playing. Hence the entanglement.' + articleno: 1394 + author: Palle Dahlstedt + bibtex: "@article{nime23-music-1394,\n abstract = {Finger Breath, for performer,\ + \ live electronics, and zither, was originally commissioned by the the Frontside\ + \ International Chamber Music Festival, funded by a grant from the Swedish Arts\ + \ Council, and premiered in January 2023 as a headphone performance in the belly\ + \ of a small passenger ferry. The main concepts behind the work are three: First,\ + \ the intimate sounds from the musicians breathing, and from his fingers on the\ + \ strings of an ancient zither. Second, the idea that the live breathing and the\ + \ musician’s sounds played by finger movements are the only sources of gestural\ + \ control and expression in the piece. Breathing and finger movements form the\ + \ basis of many musical expressions throughout the world, as they are our most\ + \ intimate physiological and gestural bodily mechanisms. Third, the combination\ + \ of the first two into a situation of “entangled musicianship”, where each action\ + \ has triple consequences: as a sound source to be heard live, as a sound source\ + \ being fed to various buffers for later manipulation and playback, but also as\ + \ a source of gestural control, affecting a variety of playback mechanisms for\ + \ the buffered sounds. It is thus impossible to play something without also altering\ + \ the conditions for future playing. Hence the entanglement.},\n articleno = {1394},\n\ + \ author = {Palle Dahlstedt},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Rob Hamilton},\n\ + \ month = {May},\n note = {Live Concert 2, Wednesday May 31, Biblioteca Vasconcelos},\n\ + \ title = {Finger Breath – Material and control through intimate sounds},\n url\ + \ = {https://www.nime.org/proceedings/2023/nime23_music_1394.pdf},\n urlsuppl1\ + \ = {https://www.nime.org/proceedings/2023/nime23_concert_2.pdf},\n year = {2023}\n\ + }\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: From Uganda" Mara Helmuth - year: 2016 + editor: Rob Hamilton + month: May + note: 'Live Concert 2, Wednesday May 31, Biblioteca Vasconcelos' + title: Finger Breath – Material and control through intimate sounds + url: https://www.nime.org/proceedings/2023/nime23_music_1394.pdf + urlsuppl1: https://www.nime.org/proceedings/2023/nime23_concert_2.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: nime2016-music-Carey2016 - address: 'Brisbane, Australia' - author: Zubin Kanga & Benjiman Carey - bibtex: "@inproceedings{nime2016-music-Carey2016,\n address = {Brisbane, Australia},\n\ - \ author = {Zubin Kanga & Benjiman Carey},\n booktitle = {Music Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n editor\ - \ = {Andrew Brown and Toby Gifford},\n month = {June},\n publisher = {Griffith\ - \ University},\n title = {Taking the Auspices},\n year = {2016}\n}\n" + ID: nime20-music-Tonagel + abstract: 'The Cologne based ladies'' quartet 120 DEN, founded in 2019, plays with + modified mannequin legs, which become independent electronic instruments through + guitar strings, contact microphones and built-in synthesizer elements. The resulting + sounds range from subtle caresses to overflowing tapestries of sound, to knee-jerked + death metal passages and conceptual electronic textures. The experimental leg + sound is of course also supported orally.' + address: 'Birmingham, UK' + author: 'Tonagel, Tina and Crumbach, Conny and Grundmann Gesine and Britta Fehrmann' + bibtex: "@inproceedings{nime20-music-Tonagel,\n abstract = {The Cologne based ladies'\ + \ quartet 120 DEN, founded in 2019, plays with modified mannequin legs, which\ + \ become independent electronic instruments through guitar strings, contact microphones\ + \ and built-in synthesizer elements. The resulting sounds range from subtle caresses\ + \ to overflowing tapestries of sound, to knee-jerked death metal passages and\ + \ conceptual electronic textures. The experimental leg sound is of course also\ + \ supported orally.},\n address = {Birmingham, UK},\n author = {Tonagel, Tina\ + \ and Crumbach, Conny and Grundmann Gesine and Britta Fehrmann},\n booktitle =\ + \ {Music Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.6350588},\n editor = {Wright, Joe and Feng,\ + \ Jian},\n month = {July},\n pages = {1-3},\n publisher = {Royal Birmingham Conservatoire},\n\ + \ title = {4 Women, 12 Legs. 120 DEN!},\n url = {http://www.nime.org/proceedings/2020/nime2020_music01.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: Taking the Auspices - year: 2016 - - -- ENTRYTYPE: inproceedings - ID: nime2016-music-Drummond2016 - address: 'Brisbane, Australia' - author: Jon Drummond - bibtex: "@inproceedings{nime2016-music-Drummond2016,\n address = {Brisbane, Australia},\n\ - \ author = {Jon Drummond},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ - \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ - \ title = {Light Traces},\n year = {2016}\n}\n" - booktitle: Music Proceedings of the International Conference on New Interfaces for - Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: Light Traces - year: 2016 - - -- ENTRYTYPE: inproceedings - ID: nime2016-music-Sniff2016 - address: 'Brisbane, Australia' - author: Dj Sniff - bibtex: "@inproceedings{nime2016-music-Sniff2016,\n address = {Brisbane, Australia},\n\ - \ author = {Dj Sniff},\n booktitle = {Music Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n editor = {Andrew Brown and Toby\ - \ Gifford},\n month = {June},\n publisher = {Griffith University},\n title = {Live\ - \ performance},\n year = {2016}\n}\n" - booktitle: Music Proceedings of the International Conference on New Interfaces for - Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: Live performance - year: 2016 - - -- ENTRYTYPE: inproceedings - ID: nime2016-music-Paine2016 - abstract: 'About the performer: Garth is particularly fascinated with sound as an - experiential medium, both in musical performance and as an exhibitable object. - This passion has led to several interactive responsive environments where the - inhabitant generates the sonic landscape through their presence and behaviour. - Garth has composed several music scores for dance generated through video tracking - of the choreography, and more recently using Bio-Sensing on the dancers body. - His immersive interactive environments have been exhibited in Australia, Europe, - Japan, USA, Canada, UK, Hong Kong and New Zealand. Garth Paine is internationally - regarded as an innovator in the field of interactivity in electronic music and - media arts (some papers here). He gained his PhD in interactive immersive environments - from the Royal Melbourne Institute of Technology, Australia in 2003, and completed - a Graduate Diploma in software engineering in the following year at Swinburne - University. All a long way from his Bachelor of classical Flute performance from - the conservatorium of Tasmania. Garth is Associate Professor in Digital Sound - and Interactive Media at the School of Arts Media + Engineering at Arizona State - University in the USA. His previous post was as Associate Professor of Sound Technologies - at the University of Western Sydney, where he established the Virtual, Interactive, - Performance Research environment (VIPRe) . He is often invited to run workshops - on interactivity for musical performance and commissioned to develop interactive - system for realtime musical composition for dance and theatre performances.' - address: 'Brisbane, Australia' - author: Garth Paine - bibtex: "@inproceedings{nime2016-music-Paine2016,\n abstract = {About the performer:\ - \ Garth is particularly fascinated with sound as an experiential medium, both\ - \ in musical performance and as an exhibitable object. This passion has led to\ - \ several interactive responsive environments where the inhabitant generates the\ - \ sonic landscape through their presence and behaviour. Garth has composed several\ - \ music scores for dance generated through video tracking of the choreography,\ - \ and more recently using Bio-Sensing on the dancers body. His immersive interactive\ - \ environments have been exhibited in Australia, Europe, Japan, USA, Canada, UK,\ - \ Hong Kong and New Zealand. Garth Paine is internationally regarded as an innovator\ - \ in the field of interactivity in electronic music and media arts (some papers\ - \ here). He gained his PhD in interactive immersive environments from the Royal\ - \ Melbourne Institute of Technology, Australia in 2003, and completed a Graduate\ - \ Diploma in software engineering in the following year at Swinburne University.\ - \ All a long way from his Bachelor of classical Flute performance from the conservatorium\ - \ of Tasmania. Garth is Associate Professor in Digital Sound and Interactive Media\ - \ at the School of Arts Media + Engineering at Arizona State University in the\ - \ USA. His previous post was as Associate Professor of Sound Technologies at the\ - \ University of Western Sydney, where he established the Virtual, Interactive,\ - \ Performance Research environment (VIPRe) . He is often invited to run workshops\ - \ on interactivity for musical performance and commissioned to develop interactive\ - \ system for realtime musical composition for dance and theatre performances.},\n\ - \ address = {Brisbane, Australia},\n author = {Garth Paine},\n booktitle = {Music\ - \ Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Andrew Brown and Toby Gifford},\n month = {June},\n publisher = {Griffith\ - \ University},\n year = {2016}\n}\n" - booktitle: Music Proceedings of the International Conference on New Interfaces for - Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - year: 2016 - - -- ENTRYTYPE: inproceedings - ID: nime2016-music-Bown2016 - address: 'Brisbane, Australia' - author: Oliver Bown - bibtex: "@inproceedings{nime2016-music-Bown2016,\n address = {Brisbane, Australia},\n\ - \ author = {Oliver Bown},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ - \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ - \ title = {DIADs - The Ford Transit…},\n year = {2016}\n}\n" - booktitle: Music Proceedings of the International Conference on New Interfaces for - Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: DIADs - The Ford Transit… - year: 2016 - - -- ENTRYTYPE: inproceedings - ID: nime2016-music-Cantrell2016 - address: 'Brisbane, Australia' - author: Joe Cantrell - bibtex: "@inproceedings{nime2016-music-Cantrell2016,\n address = {Brisbane, Australia},\n\ - \ author = {Joe Cantrell},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ - \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ - \ title = {Blackbox Loops},\n year = {2016}\n}\n" - booktitle: Music Proceedings of the International Conference on New Interfaces for - Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: Blackbox Loops - year: 2016 - - -- ENTRYTYPE: inproceedings - ID: nime2016-music-Pfalz2016 - address: 'Brisbane, Australia' - author: Andrew Pfalz - bibtex: "@inproceedings{nime2016-music-Pfalz2016,\n address = {Brisbane, Australia},\n\ - \ author = {Andrew Pfalz},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ - \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ - \ title = {Of Grating Imperma},\n year = {2016}\n}\n" - booktitle: Music Proceedings of the International Conference on New Interfaces for - Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: Of Grating Imperma - year: 2016 - - -- ENTRYTYPE: inproceedings - ID: nime2016-music-Knowles2016 - address: 'Brisbane, Australia' - author: Donna Hewitt & Julian Knowles - bibtex: "@inproceedings{nime2016-music-Knowles2016,\n address = {Brisbane, Australia},\n\ - \ author = {Donna Hewitt & Julian Knowles},\n booktitle = {Music Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n editor\ - \ = {Andrew Brown and Toby Gifford},\n month = {June},\n publisher = {Griffith\ - \ University},\n title = {Doppelgänger³ Macrophonics²},\n year = {2016}\n}\n" - booktitle: Music Proceedings of the International Conference on New Interfaces for - Musical Expression - editor: Andrew Brown and Toby Gifford - month: June - publisher: Griffith University - title: Doppelgänger³ Macrophonics² - year: 2016 - - -- ENTRYTYPE: inproceedings - ID: nime20-music-Tonagel - abstract: 'The Cologne based ladies'' quartet 120 DEN, founded in 2019, plays with - modified mannequin legs, which become independent electronic instruments through - guitar strings, contact microphones and built-in synthesizer elements. The resulting - sounds range from subtle caresses to overflowing tapestries of sound, to knee-jerked - death metal passages and conceptual electronic textures. The experimental leg - sound is of course also supported orally.' - address: 'Birmingham, UK' - author: 'Tonagel, Tina and Crumbach, Conny and Grundmann Gesine and Britta Fehrmann' - bibtex: "@inproceedings{nime20-music-Tonagel,\n abstract = {The Cologne based ladies'\ - \ quartet 120 DEN, founded in 2019, plays with modified mannequin legs, which\ - \ become independent electronic instruments through guitar strings, contact microphones\ - \ and built-in synthesizer elements. The resulting sounds range from subtle caresses\ - \ to overflowing tapestries of sound, to knee-jerked death metal passages and\ - \ conceptual electronic textures. The experimental leg sound is of course also\ - \ supported orally.},\n address = {Birmingham, UK},\n author = {Tonagel, Tina\ - \ and Crumbach, Conny and Grundmann Gesine and Britta Fehrmann},\n booktitle =\ - \ {Music Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.6350588},\n editor = {Wright, Joe and Feng,\ - \ Jian},\n month = {July},\n pages = {1-3},\n publisher = {Royal Birmingham Conservatoire},\n\ - \ title = {4 Women, 12 Legs. 120 DEN!},\n url = {http://www.nime.org/proceedings/2020/nime2020_music01.pdf},\n\ - \ year = {2020}\n}\n" - booktitle: Music Proceedings of the International Conference on New Interfaces for - Musical Expression - doi: 10.5281/zenodo.6350588 - editor: 'Wright, Joe and Feng, Jian' - month: July - pages: 1-3 - publisher: Royal Birmingham Conservatoire - title: '4 Women, 12 Legs. 120 DEN!' - url: http://www.nime.org/proceedings/2020/nime2020_music01.pdf - year: 2020 + doi: 10.5281/zenodo.6350588 + editor: 'Wright, Joe and Feng, Jian' + month: July + pages: 1-3 + publisher: Royal Birmingham Conservatoire + title: '4 Women, 12 Legs. 120 DEN!' + url: http://www.nime.org/proceedings/2020/nime2020_music01.pdf + year: 2020 - ENTRYTYPE: inproceedings @@ -2723,6391 +3146,5643 @@ - ENTRYTYPE: inproceedings - ID: nime2011-music-Troyer2011 - abstract: "Program notes:\n\nIn LOLC, the musicians in the laptop orchestra use\ - \ a textual performance interface, developed specifically for this piece, to create\ - \ and share rhythmic motives based on a collection of recorded sounds. The environment\ - \ encourages musicians to share their code with each other, developing an improvisational\ - \ conversation over time as material is looped, borrowed, and transformed. LOLC\ - \ was originally created by Akito van Troyer and Jason Freeman and is in active\ - \ development at the Georgia Tech Center for Music Technology by Jason Freeman,\ - \ Andrew Colella, Sang Won Lee and Shannon Yao. LOLC is supported by a grant from\ - \ the National Science Foundation as part of a larger research project on musical\ - \ improvisation in performance and education (NSF CreativeIT#0855758).\n\nAbout\ - \ the performers:\n\nAaron Albin, Andrew Colella, Sertan Sentürk and Sang Won\ - \ Lee are current degree candidates or alumni from the Georgia Tech Center for\ - \ Music Technology. All are focused on exploring new methods of musical interactivity\ - \ through projects that involve current technology such as the Kinect, swarm robots,\ - \ creative video games, and current MIR techniques." - address: 'Oslo, Norway' - author: Akito van Troyer and Jason Freeman and Avinash Sastry and Sang Won Lee and - Shannon Yao - bibtex: "@inproceedings{nime2011-music-Troyer2011,\n abstract = {Program notes:\n\ - \nIn LOLC, the musicians in the laptop orchestra use a textual performance interface,\ - \ developed specifically for this piece, to create and share rhythmic motives\ - \ based on a collection of recorded sounds. The environment encourages musicians\ - \ to share their code with each other, developing an improvisational conversation\ - \ over time as material is looped, borrowed, and transformed. LOLC was originally\ - \ created by Akito van Troyer and Jason Freeman and is in active development at\ - \ the Georgia Tech Center for Music Technology by Jason Freeman, Andrew Colella,\ - \ Sang Won Lee and Shannon Yao. LOLC is supported by a grant from the National\ - \ Science Foundation as part of a larger research project on musical improvisation\ - \ in performance and education (NSF CreativeIT#0855758).\n\nAbout the performers:\n\ - \nAaron Albin, Andrew Colella, Sertan Sentürk and Sang Won Lee are current degree\ - \ candidates or alumni from the Georgia Tech Center for Music Technology. All\ - \ are focused on exploring new methods of musical interactivity through projects\ - \ that involve current technology such as the Kinect, swarm robots, creative video\ - \ games, and current MIR techniques.},\n address = {Oslo, Norway},\n author =\ - \ {Akito van Troyer and Jason Freeman and Avinash Sastry and Sang Won Lee and\ - \ Shannon Yao},\n booktitle = {Music Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n editor = {Kjell Tore Innervik and\ - \ Ivar Frounberg},\n month = {June},\n publisher = {Norwegian Academy of Music},\n\ - \ title = {LOLC},\n url = {https://vimeo.com/26678685},\n year = {2011}\n}\n" + ID: nime19-music-Rust + abstract: 'Bad Mother / Good Mother is an audiovisual performance involving a projection, + a modified electronic breast pump as a sound generator, and a sound- reactive + LED pumping costume. The project has four songs that critically explore technologies + directed specifically at women like breast pumps and fertility extending treatments + such as egg-freezing (social freezing). Depending on the song, the breast pump + is either a solo instrument or part of an arrangement. The idea is to use workplace + lactation as a departure point to uncover a web of societal politics and pre-conceived + perceptions (pun intended) of ideal and non-ideal motherhood.' + address: 'Porto Alegre, Brazil' + author: Anna Rüst + bibtex: "@inproceedings{nime19-music-Rust,\n abstract = {Bad Mother / Good Mother\ + \ is an audiovisual performance involving a projection, a modified electronic\ + \ breast pump as a sound generator, and a sound- reactive LED pumping costume.\ + \ The project has four songs that critically explore technologies directed specifically\ + \ at women like breast pumps and fertility extending treatments such as egg-freezing\ + \ (social freezing). Depending on the song, the breast pump is either a solo instrument\ + \ or part of an arrangement. The idea is to use workplace lactation as a departure\ + \ point to uncover a web of societal politics and pre-conceived perceptions (pun\ + \ intended) of ideal and non-ideal motherhood.},\n address = {Porto Alegre, Brazil},\n\ + \ author = {Anna R{\\\"u}st},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Federico Visi},\n\ + \ month = {June},\n pages = {8--10},\n publisher = {UFRGS},\n title = {Bad Mother\ + \ / Good Mother - an audiovisual performance},\n url = {http://www.nime.org/proceedings/2019/nime2019_music001.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Kjell Tore Innervik and Ivar Frounberg + editor: Federico Visi month: June - publisher: Norwegian Academy of Music - title: LOLC - url: https://vimeo.com/26678685 - year: 2011 + pages: 8--10 + publisher: UFRGS + title: Bad Mother / Good Mother - an audiovisual performance + url: http://www.nime.org/proceedings/2019/nime2019_music001.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: nime2011-music-Brandtsegg2011 - abstract: "Program notes:\n\nThe duo Little Soldier Joe uses percussion and live\ - \ processing to explore thematic and textural ideas that arise in the improvised\ - \ interplay between these two performers. LSJ uses live sampling and manipulation\ - \ matter-of-factly as an established manner of music making. The audio manipulation\ - \ techniques used are based on recent developments in particle synthesis.\n\n\ - About the performers:\n\nØyvind Brandtsegg: Composer, musician and professor in\ - \ music technology at NTNU. His focus lies in Compositionally Enabled Instruments,\ - \ Particle Synthesis and sound installations. Øyvind has performed with the groups\ - \ Krøyt and Motorpsycho, written music for interactive dance, theatre and TV,\ - \ and worked as a programmer for other artists. His latest effort in music software\ - \ programming is the “Hadron Particle Synthesizer”, to be released as a device\ - \ for “Ableton Live”' and as a VST plug-in.\n\nCarl Haakon Waadeland: Musician,\ - \ composer and professor in music at NTNU. His main scientific interest lies within\ - \ empirical rhythm research and the construction of models that simulate rhythm\ - \ performance. Waadeland has performed and recorded amongst others with Gary Holton\ - \ & Casino Steel, Warne Marsh, Siris Svale Band, Mikis Theodorakis & Arja Saijonmaa,\ - \ Dadafon, and Rasmus og Verdens Beste Band. Waadeland published a book and CD\ - \ on the Norwegian folk drum tradition in 2008." - address: 'Oslo, Norway' - author: Øyvind Brandtsegg and Carl Haakon Waadeland - bibtex: "@inproceedings{nime2011-music-Brandtsegg2011,\n abstract = {Program notes:\n\ - \nThe duo Little Soldier Joe uses percussion and live processing to explore thematic\ - \ and textural ideas that arise in the improvised interplay between these two\ - \ performers. LSJ uses live sampling and manipulation matter-of-factly as an established\ - \ manner of music making. The audio manipulation techniques used are based on\ - \ recent developments in particle synthesis.\n\nAbout the performers:\n\nØyvind\ - \ Brandtsegg: Composer, musician and professor in music technology at NTNU. His\ - \ focus lies in Compositionally Enabled Instruments, Particle Synthesis and sound\ - \ installations. Øyvind has performed with the groups Krøyt and Motorpsycho, written\ - \ music for interactive dance, theatre and TV, and worked as a programmer for\ - \ other artists. His latest effort in music software programming is the “Hadron\ - \ Particle Synthesizer”, to be released as a device for “Ableton Live”' and as\ - \ a VST plug-in.\n\nCarl Haakon Waadeland: Musician, composer and professor in\ - \ music at NTNU. His main scientific interest lies within empirical rhythm research\ - \ and the construction of models that simulate rhythm performance. Waadeland has\ - \ performed and recorded amongst others with Gary Holton & Casino Steel, Warne\ - \ Marsh, Siris Svale Band, Mikis Theodorakis & Arja Saijonmaa, Dadafon, and Rasmus\ - \ og Verdens Beste Band. Waadeland published a book and CD on the Norwegian folk\ - \ drum tradition in 2008.},\n address = {Oslo, Norway},\n author = {Øyvind Brandtsegg\ - \ and Carl Haakon Waadeland},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Kjell Tore\ - \ Innervik and Ivar Frounberg},\n month = {June},\n publisher = {Norwegian Academy\ - \ of Music},\n title = {Little Soldier Joe},\n url = {https://vimeo.com/26680018},\n\ - \ year = {2011}\n}\n" + ID: nime19-music-DAlessandro + abstract: 'Borrowed voices is a performance featuring performative voice synthesis, + with two types of instruments: C-Voks and T-Voks. The voices are played a cappella + in a double choir of natural and synthetic voices. Performative singing synthesis + is a new paradigm in the already long history of artificial voices. The singing + voice is played like an instrument, allowing singing with the borrowed voice of + another. The relationship of embodiment between the singer''s gestures and the + vocal sound produced is broken. A voice is singing, with realism, expressivity + and musicality, but it is not the musician''s own voice, and a vocal apparatus + does not control it. The project focuses on control gestures: the music explores + vocal sounds produced by the vocal apparatus (the basic sound material), and “played” + by the natural voice, by free-hand Theremin-controlled gestures, and by writing + gestures on a graphic tablet. The same (types of) sounds but different gestures + give different musical “instruments” and expressive possibilities. Another interesting + aspect is the distance between synthetic voices and the player, the voice being + at the same time embodied (by the player gestures playing the instrument with + her/his body) and externalized (because the instrument is not her/his own voice): + two different voices sung/played by the same person.' + address: 'Porto Alegre, Brazil' + author: Christophe D'Alessandro and Xiao Xiao and Grégoire Locqueville and Boris + Doval + bibtex: "@inproceedings{nime19-music-DAlessandro,\n abstract = {Borrowed voices\ + \ is a performance featuring performative voice synthesis, with two types of instruments:\ + \ C-Voks and T-Voks. The voices are played a cappella in a double choir of natural\ + \ and synthetic voices. Performative singing synthesis is a new paradigm in the\ + \ already long history of artificial voices. The singing voice is played like\ + \ an instrument, allowing singing with the borrowed voice of another. The relationship\ + \ of embodiment between the singer's gestures and the vocal sound produced is\ + \ broken. A voice is singing, with realism, expressivity and musicality, but it\ + \ is not the musician's own voice, and a vocal apparatus does not control it.\ + \ The project focuses on control gestures: the music explores vocal sounds produced\ + \ by the vocal apparatus (the basic sound material), and “played” by the natural\ + \ voice, by free-hand Theremin-controlled gestures, and by writing gestures on\ + \ a graphic tablet. The same (types of) sounds but different gestures give different\ + \ musical “instruments” and expressive possibilities. Another interesting aspect\ + \ is the distance between synthetic voices and the player, the voice being at\ + \ the same time embodied (by the player gestures playing the instrument with her/his\ + \ body) and externalized (because the instrument is not her/his own voice): two\ + \ different voices sung/played by the same person.},\n address = {Porto Alegre,\ + \ Brazil},\n author = {Christophe D'Alessandro and Xiao Xiao and Grégoire Locqueville\ + \ and Boris Doval},\n booktitle = {Music Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n editor = {Federico Visi},\n month\ + \ = {June},\n pages = {11--14},\n publisher = {UFRGS},\n title = {Borrowed Voices},\n\ + \ url = {http://www.nime.org/proceedings/2019/nime2019_music001.pdf},\n year =\ + \ {2019}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Kjell Tore Innervik and Ivar Frounberg + editor: Federico Visi month: June - publisher: Norwegian Academy of Music - title: Little Soldier Joe - url: https://vimeo.com/26680018 - year: 2011 + pages: 11--14 + publisher: UFRGS + title: Borrowed Voices + url: http://www.nime.org/proceedings/2019/nime2019_music001.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: nime2011-music-Lopez2011 - abstract: "Program notes:\n\nThe Reactable was conceived in 2003 and was first presented\ - \ at the International Computer Music Conference (ICMC) 2005 in Barcelona. Since\ - \ then, the Reactable team has given more than 300 presentations and concerts\ - \ in 40 countries, turning it into one of the most worldwide acclaimed new musical\ - \ instruments of the 21st century. Since 2009, the Barcelona spin-off company\ - \ Reactable Systems has been producing several Reactable models, such as the Reactable\ - \ Live for traveling musicians and DJs, or its latest incarnation, Reactable mobile\ - \ for Apple's iPhones and iPads.\n\nAbout the performers:\n\nCarles López: Musician,\ - \ producer and DJ born in Barcelona. López has been playing with the Reactable\ - \ for the last three years. With this instrument he has performed in more than\ - \ 40 countries, at all kinds of events, clubs and festivals. López also works\ - \ as a composer for films and contemporary dance." - address: 'Oslo, Norway' - author: Carles López - bibtex: "@inproceedings{nime2011-music-Lopez2011,\n abstract = {Program notes:\n\ - \nThe Reactable was conceived in 2003 and was first presented at the International\ - \ Computer Music Conference (ICMC) 2005 in Barcelona. Since then, the Reactable\ - \ team has given more than 300 presentations and concerts in 40 countries, turning\ - \ it into one of the most worldwide acclaimed new musical instruments of the 21st\ - \ century. Since 2009, the Barcelona spin-off company Reactable Systems has been\ - \ producing several Reactable models, such as the Reactable Live for traveling\ - \ musicians and DJs, or its latest incarnation, Reactable mobile for Apple's iPhones\ - \ and iPads.\n\nAbout the performers:\n\nCarles López: Musician, producer and\ - \ DJ born in Barcelona. López has been playing with the Reactable for the last\ - \ three years. With this instrument he has performed in more than 40 countries,\ - \ at all kinds of events, clubs and festivals. López also works as a composer\ - \ for films and contemporary dance.},\n address = {Oslo, Norway},\n author = {Carles\ - \ López},\n booktitle = {Music Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n editor = {Kjell Tore Innervik and\ - \ Ivar Frounberg},\n month = {June},\n publisher = {Norwegian Academy of Music},\n\ - \ title = {Reactable},\n url = {https://vimeo.com/26678704},\n year = {2011}\n\ - }\n" + ID: nime19-music-Dooley + abstract: 'colligation (to bring or tie together) is a physical performance work + for one performer that explores the idea of sculpting sound through gesture. Treating + sound as if it were a tangible object capable of being fashioned into new sonic + forms, "pieces" of sound are captured, shaped and sculpted by the performer''s + hand and arm gestures, appearing pliable as they are thrown around and transformed + into new sonic material. colligation uses two Thalmic Labs Myo armbands, one placed + on the left arm and the other on the right arm. The Myo Mapper [1] software is + used to transmit scaled data via OSC from the armbands to Pure Data. Positional + (yaw, pitch and roll) and electromyographic data (EMG) from the devices are mapped + to parameters controlling a hybrid synth created in Pure Data. The synth utilises + a combination of Phase Aligned Formant synthesis [2] and Frequency Modulation + synthesis [3] to allow a range of complex audio spectra to be explored. Pitch, + yaw and roll data from the left Myo are respectively mapped to the PAF synth''s + carrier frequency (ranging from 8.175-12543.9Hz), bandwidth and relative centre + frequency. Pitch, yaw and roll data from the right Myo are respectively mapped + to FM modulation frequency (relative to and ranging from 0.01-10 times the PAF + carrier frequency), modulation depth (relative to and ranging from 0.01-10 times + the PAF carrier frequency), and modulation wave shape (crossfading between sine, + triangle, square, rising sawtooth and impulse). Data from the left and right Myo''s + EMG sensors are mapped respectively to amplitude control of the left and right + audio channels, giving the performer control over the level and panning of the + audio within the stereo field. By employing both positional and bio data, an embodied + relationship between action and response is created; the gesture and the resulting + sonic transformation become inextricably entwined.' + address: 'Porto Alegre, Brazil' + author: James Dooley + bibtex: "@inproceedings{nime19-music-Dooley,\n abstract = {colligation (to bring\ + \ or tie together) is a physical performance work for one performer that explores\ + \ the idea of sculpting sound through gesture. Treating sound as if it were a\ + \ tangible object capable of being fashioned into new sonic forms, \"pieces\"\ + \ of sound are captured, shaped and sculpted by the performer's hand and arm gestures,\ + \ appearing pliable as they are thrown around and transformed into new sonic material.\ + \ colligation uses two Thalmic Labs Myo armbands, one placed on the left arm and\ + \ the other on the right arm. The Myo Mapper [1] software is used to transmit\ + \ scaled data via OSC from the armbands to Pure Data. Positional (yaw, pitch and\ + \ roll) and electromyographic data (EMG) from the devices are mapped to parameters\ + \ controlling a hybrid synth created in Pure Data. The synth utilises a combination\ + \ of Phase Aligned Formant synthesis [2] and Frequency Modulation synthesis [3]\ + \ to allow a range of complex audio spectra to be explored. Pitch, yaw and roll\ + \ data from the left Myo are respectively mapped to the PAF synth's carrier frequency\ + \ (ranging from 8.175-12543.9Hz), bandwidth and relative centre frequency. Pitch,\ + \ yaw and roll data from the right Myo are respectively mapped to FM modulation\ + \ frequency (relative to and ranging from 0.01-10 times the PAF carrier frequency),\ + \ modulation depth (relative to and ranging from 0.01-10 times the PAF carrier\ + \ frequency), and modulation wave shape (crossfading between sine, triangle, square,\ + \ rising sawtooth and impulse). Data from the left and right Myo's EMG sensors\ + \ are mapped respectively to amplitude control of the left and right audio channels,\ + \ giving the performer control over the level and panning of the audio within\ + \ the stereo field. By employing both positional and bio data, an embodied relationship\ + \ between action and response is created; the gesture and the resulting sonic\ + \ transformation become inextricably entwined.},\n address = {Porto Alegre, Brazil},\n\ + \ author = {James Dooley},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Federico Visi},\n\ + \ month = {June},\n pages = {15-16},\n publisher = {UFRGS},\n title = {colligation},\n\ + \ url = {http://www.nime.org/proceedings/2019/nime2019_music003.pdf},\n year =\ + \ {2019}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Kjell Tore Innervik and Ivar Frounberg + editor: Federico Visi month: June - publisher: Norwegian Academy of Music - title: Reactable - url: https://vimeo.com/26678704 - year: 2011 + pages: 15-16 + publisher: UFRGS + title: colligation + url: http://www.nime.org/proceedings/2019/nime2019_music003.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: nime2011-music-Selle2011 - abstract: "Program notes:\n\nLicht & Hiebe (2010) is the first concert piece for\ - \ the new Instrument: The ``Hexenkessel'' (witch's cauldron) is a modified 22\"\ - \ timpani that uses LLP technology to turn the drumhead into an intuitive multitouch-interface\ - \ for the control of live-electronics & dmx-stage-lights. The multitouch technique\ - \ goes into symbiosis with a traditional instrument, keeping its acoustic qualities,\ - \ but opening it to the vast possibilities of interactive multimedia. Besides\ - \ the control of live-electronics the instrument features an interface to dmx-controlled\ - \ stage-lights to create a situation of intense intermedial fireworks entirely\ - \ controlled by the performer. The parts needed for this non-destructive timpani-hack\ - \ cost less than $500.\n\nAbout the performers:\n\nJacob Sello (1976, Hamburg/Germany)\ - \ studied Audio Engineering, Systematic Musicology and Multimedia Composition\ - \ in Hamburg. He is highly interested in the exciting possibilities that arise\ - \ from the conjunction of traditional acoustic instruments and state-of-the-art\ - \ technology. Pieces for clarinet controlled RC- helicopters or DJ-driven pneumatically\ - \ prepared disklavier pieces are the outcome.\n\nStefan Weinzierl (1985, Günzburg/Germany)\ - \ is constantly searching for fascinating challenges beyond genre-boundaries;\ - \ as a drummer in contemporary solo performances, classical ensembles and orchestras\ - \ as well as in Jazz- and Rock/Pop bands. He graduated in educational sciences\ - \ in Regensburg and completed the Percussion Master program at the HfMT Hamburg\ - \ in 2010." - address: 'Oslo, Norway' - author: Jacob Selle and Stefan Weinzierl - bibtex: "@inproceedings{nime2011-music-Selle2011,\n abstract = {Program notes:\n\ - \nLicht & Hiebe (2010) is the first concert piece for the new Instrument: The\ - \ ``Hexenkessel'' (witch's cauldron) is a modified 22\" timpani that uses LLP\ - \ technology to turn the drumhead into an intuitive multitouch-interface for the\ - \ control of live-electronics \\& dmx-stage-lights. The multitouch technique goes\ - \ into symbiosis with a traditional instrument, keeping its acoustic qualities,\ - \ but opening it to the vast possibilities of interactive multimedia. Besides\ - \ the control of live-electronics the instrument features an interface to dmx-controlled\ - \ stage-lights to create a situation of intense intermedial fireworks entirely\ - \ controlled by the performer. The parts needed for this non-destructive timpani-hack\ - \ cost less than \\$500.\n\nAbout the performers:\n\nJacob Sello (1976, Hamburg/Germany)\ - \ studied Audio Engineering, Systematic Musicology and Multimedia Composition\ - \ in Hamburg. He is highly interested in the exciting possibilities that arise\ - \ from the conjunction of traditional acoustic instruments and state-of-the-art\ - \ technology. Pieces for clarinet controlled RC- helicopters or DJ-driven pneumatically\ - \ prepared disklavier pieces are the outcome.\n\nStefan Weinzierl (1985, Günzburg/Germany)\ - \ is constantly searching for fascinating challenges beyond genre-boundaries;\ - \ as a drummer in contemporary solo performances, classical ensembles and orchestras\ - \ as well as in Jazz- and Rock/Pop bands. He graduated in educational sciences\ - \ in Regensburg and completed the Percussion Master program at the HfMT Hamburg\ - \ in 2010.},\n address = {Oslo, Norway},\n author = {Jacob Selle and Stefan Weinzierl},\n\ - \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n editor = {Kjell Tore Innervik and Ivar Frounberg},\n\ - \ month = {June},\n publisher = {Norwegian Academy of Music},\n title = {Licht\ - \ \\& Hiebe},\n url = {https://vimeo.com/27687788},\n year = {2011}\n}\n" + ID: nime19-music-Ahn + abstract: 'DIY Bionoise (2018) is an instrument in which the performer can generate + sound and noise, deriving from their own body. It contains a circuit that can + measure the bioelectricity from living beings to control the instrument by tactile + sense. This instrument has two functions – a modular synthesizer with an eight-step + sequencer and a bionoise control mode.' + address: 'Porto Alegre, Brazil' + author: Sabina Hyoju Ahn + bibtex: "@inproceedings{nime19-music-Ahn,\n abstract = {DIY Bionoise (2018) is an\ + \ instrument in which the performer can generate sound and noise, deriving from\ + \ their own body. It contains a circuit that can measure the bioelectricity from\ + \ living beings to control the instrument by tactile sense. This instrument has\ + \ two functions – a modular synthesizer with an eight-step sequencer and a bionoise\ + \ control mode.},\n address = {Porto Alegre, Brazil},\n author = {Sabina Hyoju\ + \ Ahn},\n booktitle = {Music Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n editor = {Federico Visi},\n month = {June},\n\ + \ pages = {17--20},\n publisher = {UFRGS},\n title = {DIY Bionoise},\n url = {http://www.nime.org/proceedings/2019/nime2019_music004.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Kjell Tore Innervik and Ivar Frounberg + editor: Federico Visi month: June - publisher: Norwegian Academy of Music - title: Licht & Hiebe - url: https://vimeo.com/27687788 - year: 2011 + pages: 17--20 + publisher: UFRGS + title: DIY Bionoise + url: http://www.nime.org/proceedings/2019/nime2019_music004.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: nime2011-music-Clayton2011 - abstract: "Program notes:\n\nRefraction of Your Gaze by Indeterminate Variables\ - \ (ROYGBIV) is an effort to interface sound and the visible spectrum with digital\ - \ and analog media. A collage of field recording, synth pad, and mechanical noise,\ - \ ROYGBIV unfolds as wavelengths of light are read with discrete color sensors.\ - \ Data is communicated through microcontrollers to custom audio software and a\ - \ slide projector reproduces images of the natural world. ROYGBIV is concerned\ - \ with fundamental properties of sensing, perception, and the technologies that\ - \ mediate such experience. Metaphysical dimensions of color and sound are implied\ - \ as the projected image and rainbow form a dialectic between reflection and refraction.\n\ - \nAbout the performers:\n\nJoshua Clayton: New York-based artist whose work occupies\ - \ a hybrid space of media art and language. His recent projects explore semiotics,\ - \ mysticism, architecture and the urban landscape, and research-based forms of\ - \ practice. Joshua has just completed a master's degree in Interactive Telecommunications\ - \ from New York University." - address: 'Oslo, Norway' - author: Joshua Clayton - bibtex: "@inproceedings{nime2011-music-Clayton2011,\n abstract = {Program notes:\n\ - \nRefraction of Your Gaze by Indeterminate Variables (ROYGBIV) is an effort to\ - \ interface sound and the visible spectrum with digital and analog media. A collage\ - \ of field recording, synth pad, and mechanical noise, ROYGBIV unfolds as wavelengths\ - \ of light are read with discrete color sensors. Data is communicated through\ - \ microcontrollers to custom audio software and a slide projector reproduces images\ - \ of the natural world. ROYGBIV is concerned with fundamental properties of sensing,\ - \ perception, and the technologies that mediate such experience. Metaphysical\ - \ dimensions of color and sound are implied as the projected image and rainbow\ - \ form a dialectic between reflection and refraction.\n\nAbout the performers:\n\ - \nJoshua Clayton: New York-based artist whose work occupies a hybrid space of\ - \ media art and language. His recent projects explore semiotics, mysticism, architecture\ - \ and the urban landscape, and research-based forms of practice. Joshua has just\ - \ completed a master's degree in Interactive Telecommunications from New York\ - \ University.},\n address = {Oslo, Norway},\n author = {Joshua Clayton},\n booktitle\ - \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n editor = {Kjell Tore Innervik and Ivar Frounberg},\n month =\ - \ {June},\n publisher = {Norwegian Academy of Music},\n title = {ROYGBIV},\n url\ - \ = {https://vimeo.com/27690118},\n year = {2011}\n}\n" + ID: nime19-music-Tom + abstract: 'FlexSynth is an interpretation of The Sponge, a DMI embedded with sensors + to detect squeeze, flexion and torsion along with buttons to form an interface + using which musical sounds are generated and the sound is sculpted. The key idea + of the sponge is to harness the properties of a retractable, flexible object that + gives the performer wide range of multi- parametric controls with high resolution + in a maximized gesture space, considering its high manoeuvrability.' + address: 'Porto Alegre, Brazil' + author: Ajin Tom + bibtex: "@inproceedings{nime19-music-Tom,\n abstract = {FlexSynth is an interpretation\ + \ of The Sponge, a DMI embedded with sensors to detect squeeze, flexion and torsion\ + \ along with buttons to form an interface using which musical sounds are generated\ + \ and the sound is sculpted. The key idea of the sponge is to harness the properties\ + \ of a retractable, flexible object that gives the performer wide range of multi-\ + \ parametric controls with high resolution in a maximized gesture space, considering\ + \ its high manoeuvrability.},\n address = {Porto Alegre, Brazil},\n author = {Ajin\ + \ Tom},\n booktitle = {Music Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n editor = {Federico Visi},\n month = {June},\n\ + \ pages = {21--24},\n publisher = {UFRGS},\n title = {FlexSynth – Blending Multi-Dimensional\ + \ Sonic Scenes},\n url = {http://www.nime.org/proceedings/2019/nime2019_music005.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Kjell Tore Innervik and Ivar Frounberg + editor: Federico Visi month: June - publisher: Norwegian Academy of Music - title: ROYGBIV - url: https://vimeo.com/27690118 - year: 2011 + pages: 21--24 + publisher: UFRGS + title: FlexSynth – Blending Multi-Dimensional Sonic Scenes + url: http://www.nime.org/proceedings/2019/nime2019_music005.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: nime2011-music-Stewart2011 - abstract: "Program notes: The t-sticks grew out of a collaborative project by Joseph\ - \ Malloch and composer D. Andrew Stewart, at McGill University. The first prototype\ - \ was completed in 2006. The t-sticks form a family of tubular digital musical\ - \ instruments, ranging in length from 0.6 metres (soprano) to 1.2 metres (tenor).\ - \ They have been designed and constructed to allow a large variety of unique interaction\ - \ techniques. As a result, a significant emphasis is placed on the gestural vocabulary\ - \ required to manipulate and manoeuvre the instrument. The musical experience\ - \ for both the performer and audience is characterised by a unique engagement\ - \ between performer body and instrument.\n\nAbout the performers: D. Andrew Stewart\ - \ (Hexagram-MATRALAB, Concordia University, Montreal, Canada): composer, pianist,\ - \ clarinettist and digital musical instrumentalist. Stewart has been working in\ - \ the field of music composition since 1994. Since 2000, he has been pursuing\ - \ a career in live electronics -- gesture-controlled -- performance, after developing\ - \ his own sensor-suit." - address: 'Oslo, Norway' - author: Andrew Stewart - bibtex: "@inproceedings{nime2011-music-Stewart2011,\n abstract = {Program notes:\ - \ The t-sticks grew out of a collaborative project by Joseph Malloch and composer\ - \ D. Andrew Stewart, at McGill University. The first prototype was completed in\ - \ 2006. The t-sticks form a family of tubular digital musical instruments, ranging\ - \ in length from 0.6 metres (soprano) to 1.2 metres (tenor). They have been designed\ - \ and constructed to allow a large variety of unique interaction techniques. As\ - \ a result, a significant emphasis is placed on the gestural vocabulary required\ - \ to manipulate and manoeuvre the instrument. The musical experience for both\ - \ the performer and audience is characterised by a unique engagement between performer\ - \ body and instrument.\n\nAbout the performers: D. Andrew Stewart (Hexagram-MATRALAB,\ - \ Concordia University, Montreal, Canada): composer, pianist, clarinettist and\ - \ digital musical instrumentalist. Stewart has been working in the field of music\ - \ composition since 1994. Since 2000, he has been pursuing a career in live electronics\ - \ -- gesture-controlled -- performance, after developing his own sensor-suit.},\n\ - \ address = {Oslo, Norway},\n author = {Andrew Stewart},\n booktitle = {Music\ - \ Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Kjell Tore Innervik and Ivar Frounberg},\n month = {June},\n publisher\ - \ = {Norwegian Academy of Music},\n title = {With Winds (for soprano t-stick)},\n\ - \ url = {https://vimeo.com/28226070},\n year = {2011}\n}\n" + ID: nime19-music-Tragtenberg + abstract: 'Gira is a music and dance performance with Giromin, a wearable wireless + digital instrument. With this Digital Dance and Music Instrument a gesture is + transformed into sound by motion sensors and an analog synthesizer. This transmutation + of languages allows dance to generate music, which stimulates a new dance in an + infinite feedback loop.' + address: 'Porto Alegre, Brazil' + author: 'João Tragtenberg, Filipe Calegario' + bibtex: "@inproceedings{nime19-music-Tragtenberg,\n abstract = {Gira is a music\ + \ and dance performance with Giromin, a wearable wireless digital instrument.\ + \ With this Digital Dance and Music Instrument a gesture is transformed into sound\ + \ by motion sensors and an analog synthesizer. This transmutation of languages\ + \ allows dance to generate music, which stimulates a new dance in an infinite\ + \ feedback loop.},\n address = {Porto Alegre, Brazil},\n author = {João Tragtenberg,\ + \ Filipe Calegario},\n booktitle = {Music Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n editor = {Federico Visi},\n month\ + \ = {June},\n pages = {25--28},\n publisher = {UFRGS},\n title = {Gira},\n url\ + \ = {http://www.nime.org/proceedings/2019/nime2019_music006.pdf},\n year = {2019}\n\ + }\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Kjell Tore Innervik and Ivar Frounberg + editor: Federico Visi month: June - publisher: Norwegian Academy of Music - title: With Winds (for soprano t-stick) - url: https://vimeo.com/28226070 - year: 2011 + pages: 25--28 + publisher: UFRGS + title: Gira + url: http://www.nime.org/proceedings/2019/nime2019_music006.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: nime2011-music-Mays2011 - abstract: "Program notes: \"L'instant\" (2011) : Solo performance for Karlax instrument\ - \ and laptop. Composed and performed by Tom Mays. Originally an 8 channel tape\ - \ piece, it was completely re-constructed as a live solo for the composer performing\ - \ on a Karlax instrument – a gestural controller developed by Da Fact in France\ - \ (see http://www.dafact.com/). Musically, \"L'instant\" is a musical interpretation\ - \ of subatomic instantons, employing rotation and layering of parts who's rhythms\ - \ and timbres are built out of the combining and crossing of series of numbers...\ - \ The scenario is roughly “from the big bang to entropy”, and a “surround sound”\ - \ 5.1 diffusion space is critical to the sense of immersion within the rotating\ - \ sound objects and textures.\n\nAbout the performer: Tom Mays: composer, computer\ - \ musician and teacher, teaches at the National Superior Conservatory of Music\ - \ in Paris, and is currently working on PhD at the University of Paris 8 with\ - \ Horacio Vaggione. He is especially interested in gestural performance of real-time\ - \ computer systems for both written and improvised music, as well as in interaction\ - \ between music and video." - address: 'Oslo, Norway' - author: Tom Mays - bibtex: "@inproceedings{nime2011-music-Mays2011,\n abstract = {Program notes: \"\ - L'instant\" (2011) : Solo performance for Karlax instrument and laptop. Composed\ - \ and performed by Tom Mays. Originally an 8 channel tape piece, it was completely\ - \ re-constructed as a live solo for the composer performing on a Karlax instrument\ - \ – a gestural controller developed by Da Fact in France (see http://www.dafact.com/).\ - \ Musically, \"L'instant\" is a musical interpretation of subatomic instantons,\ - \ employing rotation and layering of parts who's rhythms and timbres are built\ - \ out of the combining and crossing of series of numbers... The scenario is roughly\ - \ “from the big bang to entropy”, and a “surround sound” 5.1 diffusion space is\ - \ critical to the sense of immersion within the rotating sound objects and textures.\n\ - \nAbout the performer: Tom Mays: composer, computer musician and teacher, teaches\ - \ at the National Superior Conservatory of Music in Paris, and is currently working\ - \ on PhD at the University of Paris 8 with Horacio Vaggione. He is especially\ - \ interested in gestural performance of real-time computer systems for both written\ - \ and improvised music, as well as in interaction between music and video.},\n\ - \ address = {Oslo, Norway},\n author = {Tom Mays},\n booktitle = {Music Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Kjell Tore Innervik and Ivar Frounberg},\n month = {June},\n publisher\ - \ = {Norwegian Academy of Music},\n title = {L'instant},\n url = {https://vimeo.com/28238543},\n\ - \ year = {2011}\n}\n" + ID: nime19-music-Cadiz + abstract: 'iCons is an interactive multi-channel music piece for live computer and + a gesture sensor system designed by the composer especially for this piece, called + AirTouch. Such system allows a much more musical approach to controlling sounds + than the computer keyboard or mouse. Using only movements of the hands in the + air it is possible to control most aspects of the music, such as sound shapes + in time, loops, space positioning, or create very rich spectral densities.' + address: 'Porto Alegre, Brazil' + author: Rodrigo F. Cádiz + bibtex: "@inproceedings{nime19-music-Cadiz,\n abstract = {iCons is an interactive\ + \ multi-channel music piece for live computer and a gesture sensor system designed\ + \ by the composer especially for this piece, called AirTouch. Such system allows\ + \ a much more musical approach to controlling sounds than the computer keyboard\ + \ or mouse. Using only movements of the hands in the air it is possible to control\ + \ most aspects of the music, such as sound shapes in time, loops, space positioning,\ + \ or create very rich spectral densities.},\n address = {Porto Alegre, Brazil},\n\ + \ author = {Rodrigo F. Cádiz},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Federico Visi},\n\ + \ month = {June},\n pages = {29--31},\n publisher = {UFRGS},\n title = {iCons},\n\ + \ url = {http://www.nime.org/proceedings/2019/nime2019_music007.pdf},\n year =\ + \ {2019}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Kjell Tore Innervik and Ivar Frounberg + editor: Federico Visi month: June - publisher: Norwegian Academy of Music - title: L'instant - url: https://vimeo.com/28238543 - year: 2011 + pages: 29--31 + publisher: UFRGS + title: iCons + url: http://www.nime.org/proceedings/2019/nime2019_music007.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: nime2011-music-Dupuis2011 - abstract: "Program notes:\n\nAn interactive audiovisual feedback loop forms the\ - \ basis of All Hail the Dawn. The instrument contains two simple light-sensitive\ - \ oscillators. A crude spectral analysis in Max/MSP is used to filter the oscillators\ - \ as well as looped buffers recorded from the instrument. A matrix of the spectral\ - \ analysis, interactively altered in Jitter using audio data, is projected back\ - \ onto the instrument and performer as a series of shifting patterns. This setup\ - \ allows both the graphics and sound to drive each other, creating an evolving\ - \ audiovisual relationship sensitive to slight changes in position, sound or processing.\n\ - \nAbout the performers:\n\nAlexander Dupuis: composer, performer, and multimedia\ - \ artist. His work involves live electronics and guitar, real-time graphics and\ - \ 3D animation, feedback systems and audiovisual installations. He graduated from\ - \ Brown University in 2010, and is currently working towards his Masters Degree\ - \ in the Digital Musics program at Dartmouth College." - address: 'Oslo, Norway' - author: Alexander Dupuis - bibtex: "@inproceedings{nime2011-music-Dupuis2011,\n abstract = {Program notes:\n\ - \nAn interactive audiovisual feedback loop forms the basis of All Hail the Dawn.\ - \ The instrument contains two simple light-sensitive oscillators. A crude spectral\ - \ analysis in Max/MSP is used to filter the oscillators as well as looped buffers\ - \ recorded from the instrument. A matrix of the spectral analysis, interactively\ - \ altered in Jitter using audio data, is projected back onto the instrument and\ - \ performer as a series of shifting patterns. This setup allows both the graphics\ - \ and sound to drive each other, creating an evolving audiovisual relationship\ - \ sensitive to slight changes in position, sound or processing.\n\nAbout the performers:\n\ - \nAlexander Dupuis: composer, performer, and multimedia artist. His work involves\ - \ live electronics and guitar, real-time graphics and 3D animation, feedback systems\ - \ and audiovisual installations. He graduated from Brown University in 2010, and\ - \ is currently working towards his Masters Degree in the Digital Musics program\ - \ at Dartmouth College.},\n address = {Oslo, Norway},\n author = {Alexander Dupuis},\n\ - \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n editor = {Kjell Tore Innervik and Ivar Frounberg},\n\ - \ month = {June},\n publisher = {Norwegian Academy of Music},\n title = {All Hail\ - \ the Dawn},\n url = {https://vimeo.com/27691545},\n year = {2011}\n}\n" + ID: nime19-music-Galvao + abstract: 'MusiCursor is an interactive multimedia performance/interface that reimagines + consumer-facing technologies as sites for creative expression. The piece draws + inspiration from established UI/UX design paradigms and the role of the user in + relation to these technologies. The performer assumes the role of a user installing + a musically-driven navigation interface on their computer. After an installation + prompt, they are guided through a series of demos, in which a software assistant + instructs the performer to accomplish several tasks. Through their playing, the + performer controls the cursor''s navigation and clicking behavior. In lieu of + a traditional score, the performer relies on text instructions and visual indicators + from a software assistant. The software tracks the progress of the user throughout + the piece and moves onto the next section only once a task has been completed. + Each of the main tasks takes place on the web, where the user navigates across + YouTube, Wikipedia, and Google Maps.' + address: 'Porto Alegre, Brazil' + author: Martim Galvão + bibtex: "@inproceedings{nime19-music-Galvao,\n abstract = {MusiCursor is an interactive\ + \ multimedia performance/interface that reimagines consumer-facing technologies\ + \ as sites for creative expression. The piece draws inspiration from established\ + \ UI/UX design paradigms and the role of the user in relation to these technologies.\ + \ The performer assumes the role of a user installing a musically-driven navigation\ + \ interface on their computer. After an installation prompt, they are guided through\ + \ a series of demos, in which a software assistant instructs the performer to\ + \ accomplish several tasks. Through their playing, the performer controls the\ + \ cursor's navigation and clicking behavior. In lieu of a traditional score, the\ + \ performer relies on text instructions and visual indicators from a software\ + \ assistant. The software tracks the progress of the user throughout the piece\ + \ and moves onto the next section only once a task has been completed. Each of\ + \ the main tasks takes place on the web, where the user navigates across YouTube,\ + \ Wikipedia, and Google Maps.},\n address = {Porto Alegre, Brazil},\n author =\ + \ {Martim Galvão},\n booktitle = {Music Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n editor = {Federico Visi},\n month\ + \ = {June},\n pages = {32--34},\n publisher = {UFRGS},\n title = {MusiCursor},\n\ + \ url = {http://www.nime.org/proceedings/2019/nime2019_music008.pdf},\n year =\ + \ {2019}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Kjell Tore Innervik and Ivar Frounberg + editor: Federico Visi month: June - publisher: Norwegian Academy of Music - title: All Hail the Dawn - url: https://vimeo.com/27691545 - year: 2011 + pages: 32--34 + publisher: UFRGS + title: MusiCursor + url: http://www.nime.org/proceedings/2019/nime2019_music008.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: nime2011-music-Nagashima2011 - abstract: "Program notes: Live computer music (multimedia) work, composed in 2010\ - \ and premiered in Russia. For this work, the composer developed a new interface\ - \ system for musical expression. The new interface has 8 channels of infrared-ray\ - \ distance sensors. This instrument is set up with two mic-stands on the stage.\ - \ The performer also wears the specially developed instrument called MiniBioMuse-III\ - \ which is 16 channels EMG sensor of the performance. The graphic part of this\ - \ work is real-time OpenGL 3D graphics, which is live-controlled by the performance.\ - \ This work is programmed in Max/MSP/jitter environment.\n\nAbout the performer:\ - \ Yoichi Nagashima: composer/researcher/PE, was born in 1958 in Japan. Since 1991\ - \ he has been the director of the Art & Science Laboratory in Hamamatsu, Japan.\ - \ He is a professor of Shizouka University of Art and Culture, Faculty of Design,\ - \ Department of Art and Science. He was the General Chair of NIME04." - address: 'Oslo, Norway' - author: Yoichi Nagashima - bibtex: "@inproceedings{nime2011-music-Nagashima2011,\n abstract = {Program notes:\ - \ Live computer music (multimedia) work, composed in 2010 and premiered in Russia.\ - \ For this work, the composer developed a new interface system for musical expression.\ - \ The new interface has 8 channels of infrared-ray distance sensors. This instrument\ - \ is set up with two mic-stands on the stage. The performer also wears the specially\ - \ developed instrument called MiniBioMuse-III which is 16 channels EMG sensor\ - \ of the performance. The graphic part of this work is real-time OpenGL 3D graphics,\ - \ which is live-controlled by the performance. This work is programmed in Max/MSP/jitter\ - \ environment.\n\nAbout the performer: Yoichi Nagashima: composer/researcher/PE,\ - \ was born in 1958 in Japan. Since 1991 he has been the director of the Art &\ - \ Science Laboratory in Hamamatsu, Japan. He is a professor of Shizouka University\ - \ of Art and Culture, Faculty of Design, Department of Art and Science. He was\ - \ the General Chair of NIME04.},\n address = {Oslo, Norway},\n author = {Yoichi\ - \ Nagashima},\n booktitle = {Music Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n editor = {Kjell Tore Innervik and\ - \ Ivar Frounberg},\n month = {June},\n publisher = {Norwegian Academy of Music},\n\ - \ title = {Ural Power},\n url = {https://vimeo.com/27731875},\n year = {2011}\n\ + ID: nime19-music-Cullen + abstract: 'Pandemonium Trio is Barry Cullen, Miguel Ortiz and Paul Stapleton. Our + performance research trio has been set up to explore multiple instantiations of + custom-made electronic instruments through improvisation. We are particularly + interested in exploiting irregularities in the qualities of circuit components + (e.g. imprecise tolerances/values), and how this allows for the development of + stylistic differences across multiple instrument-performer configurations. We + are also interested in how skill, style and performance techniques are developed + in different ways on similar devices over extended periods of time, and how our + existing musical practices are reconfigured through such collaborative exchanges.' + address: 'Porto Alegre, Brazil' + author: Barry Cullen and Miguel Ortiz and Paul Stapleton + bibtex: "@inproceedings{nime19-music-Cullen,\n abstract = {Pandemonium Trio is Barry\ + \ Cullen, Miguel Ortiz and Paul Stapleton. Our performance research trio has been\ + \ set up to explore multiple instantiations of custom-made electronic instruments\ + \ through improvisation. We are particularly interested in exploiting irregularities\ + \ in the qualities of circuit components (e.g. imprecise tolerances/values), and\ + \ how this allows for the development of stylistic differences across multiple\ + \ instrument-performer configurations. We are also interested in how skill, style\ + \ and performance techniques are developed in different ways on similar devices\ + \ over extended periods of time, and how our existing musical practices are reconfigured\ + \ through such collaborative exchanges.},\n address = {Porto Alegre, Brazil},\n\ + \ author = {Barry Cullen and Miguel Ortiz and Paul Stapleton},\n booktitle = {Music\ + \ Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Federico Visi},\n month = {June},\n pages = {35--38},\n publisher\ + \ = {UFRGS},\n title = {Pandemonium Trio perform Drone and Drama v2},\n url =\ + \ {http://www.nime.org/proceedings/2019/nime2019_music009.pdf},\n year = {2019}\n\ }\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Kjell Tore Innervik and Ivar Frounberg - month: June - publisher: Norwegian Academy of Music - title: Ural Power - url: https://vimeo.com/27731875 - year: 2011 - - -- ENTRYTYPE: inproceedings - ID: nime2011-music-EPtrio–ErikaDonald2011 - abstract: "Program notes: Television Sky is a three-movement work composed by Eliot\ - \ Britton. The movements (Channels 1, 2, 3) deal with various musical and physical\ - \ elements that figure prominently in the EP trio's research: Gesture, Texture,\ - \ and Rhythm. Each movement adopts a different approach to organizing sounds;\ - \ these provide unique arenas to explore communication, expression, and synchronization\ - \ issues arising in an electroacoustic chamber music ensemble.\n\nAbout the performer:\ - \ EP trio is a multi-faceted research group and performing ensemble. It is comprised\ - \ of cellist Erika Donald, percussionist Ben Duinker, and composer/ turntablist\ - \ Eliot Britton. They are based at McGill University in Montreal, Canada where\ - \ they enjoy support from the Centre for Interdisciplinary Research in Music Media\ - \ and Technology (CIRMMT)." - address: 'Oslo, Norway' - author: Erika Donald and Ben Duinker and Eliot Britton - bibtex: "@inproceedings{nime2011-music-EPtrio–ErikaDonald2011,\n abstract = {Program\ - \ notes: Television Sky is a three-movement work composed by Eliot Britton. The\ - \ movements (Channels 1, 2, 3) deal with various musical and physical elements\ - \ that figure prominently in the EP trio's research: Gesture, Texture, and Rhythm.\ - \ Each movement adopts a different approach to organizing sounds; these provide\ - \ unique arenas to explore communication, expression, and synchronization issues\ - \ arising in an electroacoustic chamber music ensemble.\n\nAbout the performer:\ - \ EP trio is a multi-faceted research group and performing ensemble. It is comprised\ - \ of cellist Erika Donald, percussionist Ben Duinker, and composer/ turntablist\ - \ Eliot Britton. They are based at McGill University in Montreal, Canada where\ - \ they enjoy support from the Centre for Interdisciplinary Research in Music Media\ - \ and Technology (CIRMMT).},\n address = {Oslo, Norway},\n author = {Erika Donald\ - \ and Ben Duinker and Eliot Britton},\n booktitle = {Music Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n editor\ - \ = {Kjell Tore Innervik and Ivar Frounberg},\n month = {June},\n publisher =\ - \ {Norwegian Academy of Music},\n title = {Television Sky},\n url = {https://vimeo.com/28241338},\n\ - \ year = {2011}\n}\n" - booktitle: Music Proceedings of the International Conference on New Interfaces for - Musical Expression - editor: Kjell Tore Innervik and Ivar Frounberg + editor: Federico Visi month: June - publisher: Norwegian Academy of Music - title: Television Sky - url: https://vimeo.com/28241338 - year: 2011 + pages: 35--38 + publisher: UFRGS + title: Pandemonium Trio perform Drone and Drama v2 + url: http://www.nime.org/proceedings/2019/nime2019_music009.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: nime2011-music-SarahTaylor2011 - abstract: "About the performers:\n\nSarah Taylor: Dancer, Choreographer trained\ - \ at the Australian Ballet School (Degree in Dance), in Classical, Cunningham\ - \ and Graham, Scholarship student to Martha Graham school in New York. Currently\ - \ working with Cesc Gelabert, for the 2011 Grec Festival, Barcelona.\n\nMaurizio\ - \ Goina: Viola player and an audio-visual composer. Currently he is affiliated\ - \ with the School of Music and New Technologies of the Conservatory of Trieste\ - \ where he is developing, together with Pietro Polotti and with the collaboration\ - \ of Sarah Taylor, the EGGS system for gesture sonification.\n\nPietro Polotti:\ - \ Studied piano, composition and electronic music. He has a degree in physics\ - \ from the University of Trieste. In 2002, he obtained a Ph.D. in Communication\ - \ Systems from the EPFL, Switzerland. Presently, he teaches Electronic Music at\ - \ the Conservatory Tartini of Trieste, Italy. He has been part of the EGGS project\ - \ since 2008." - address: 'Oslo, Norway' - author: Sarah Taylor and Maurizio Goina and Pietro Polotti - bibtex: "@inproceedings{nime2011-music-SarahTaylor2011,\n abstract = {About the\ - \ performers:\n\nSarah Taylor: Dancer, Choreographer trained at the Australian\ - \ Ballet School (Degree in Dance), in Classical, Cunningham and Graham, Scholarship\ - \ student to Martha Graham school in New York. Currently working with Cesc Gelabert,\ - \ for the 2011 Grec Festival, Barcelona.\n\nMaurizio Goina: Viola player and an\ - \ audio-visual composer. Currently he is affiliated with the School of Music and\ - \ New Technologies of the Conservatory of Trieste where he is developing, together\ - \ with Pietro Polotti and with the collaboration of Sarah Taylor, the EGGS system\ - \ for gesture sonification.\n\nPietro Polotti: Studied piano, composition and\ - \ electronic music. He has a degree in physics from the University of Trieste.\ - \ In 2002, he obtained a Ph.D. in Communication Systems from the EPFL, Switzerland.\ - \ Presently, he teaches Electronic Music at the Conservatory Tartini of Trieste,\ - \ Italy. He has been part of the EGGS project since 2008.},\n address = {Oslo,\ - \ Norway},\n author = {Sarah Taylor and Maurizio Goina and Pietro Polotti},\n\ - \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n editor = {Kjell Tore Innervik and Ivar Frounberg},\n\ - \ month = {June},\n publisher = {Norwegian Academy of Music},\n title = {Body\ - \ Jockey},\n url = {http://www.nime.org/proceedings/2019/nime2019_music001.pdf},\n\ - \ year = {2011}\n}\n" + ID: nime19-music-DallAra-Majek + abstract: 'Pythagorean Domino is an improvisatory composition composed in 2019 for + an augmented Theremin and a gyro-based gestural controller. This work aims to + integrate music concrete techniques and an algorithmic compositional approach + in the context of composition for gestural controllers. While music concrete compositional + practice brings out the concept of “composite object”—a sound object made up of + several distinct and successive elements [1]—in the piece, our algorithmic compositional + approach delivers an interpolation technique which entails gradual transformations + of the composite objects over time. Our challenge is to perform a chain of short + fragmental elements in tandem in the way to form a single musical unit, while + the algorithms for transformation are autonomously changing synthetic and control + parameter settings. This approach derives closely interconnected triangular interactions + between two performers and a computer.' + address: 'Porto Alegre, Brazil' + author: Ana Dall'Ara-Majek and Takuto Fukuda + bibtex: "@inproceedings{nime19-music-DallAra-Majek,\n abstract = {Pythagorean Domino\ + \ is an improvisatory composition composed in 2019 for an augmented Theremin and\ + \ a gyro-based gestural controller. This work aims to integrate music concrete\ + \ techniques and an algorithmic compositional approach in the context of composition\ + \ for gestural controllers. While music concrete compositional practice brings\ + \ out the concept of “composite object”—a sound object made up of several distinct\ + \ and successive elements [1]—in the piece, our algorithmic compositional approach\ + \ delivers an interpolation technique which entails gradual transformations of\ + \ the composite objects over time. Our challenge is to perform a chain of short\ + \ fragmental elements in tandem in the way to form a single musical unit, while\ + \ the algorithms for transformation are autonomously changing synthetic and control\ + \ parameter settings. This approach derives closely interconnected triangular\ + \ interactions between two performers and a computer.},\n address = {Porto Alegre,\ + \ Brazil},\n author = {Ana Dall'Ara-Majek and Takuto Fukuda},\n booktitle = {Music\ + \ Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Federico Visi},\n month = {June},\n pages = {39--42},\n publisher\ + \ = {UFRGS},\n title = {Pythagorean Domino},\n url = {http://www.nime.org/proceedings/2019/nime2019_music010.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Kjell Tore Innervik and Ivar Frounberg + editor: Federico Visi month: June - publisher: Norwegian Academy of Music - title: Body Jockey - url: http://www.nime.org/proceedings/2019/nime2019_music001.pdf - year: 2011 + pages: 39--42 + publisher: UFRGS + title: Pythagorean Domino + url: http://www.nime.org/proceedings/2019/nime2019_music010.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: nime2011-music-Nicolls2011 - abstract: "Program notes:\nSN: I wanted to get at the closest relationship possible\ - \ between my hands and the resulting sound. Having worked with sampling and complex\ - \ processing and various sensors such as EMG, motion capture with live sound as\ - \ the source seemed a way to really get inside an improvisation system that was\ - \ really live and really intuitive. You can judge for yourselves!,\n\nNG: Sarah's\ - \ movements are sensed using a Kinect 3D motion capture device and the gestures\ - \ are recognised in real-time using the SEC, a machine learning toolbox that has\ - \ been specifically developed for musician-computer interaction.\n\nAbout the\ - \ performers:\n\nSarah Nicolls UK-based experimental pianist and inventor of `Inside-out\ - \ piano'; collaborative researcher with e.g. Atau Tanaka, PA Tremblay; concerts\ - \ e.g. world premieres of Larry Goves' Piano Concerto, Richard Barrett's Mesopotamia/London\ - \ Sinfonietta/BBC Radio; article in LMJ20; Senior Lecturer at Brunel University;\ - \ funding: Arts and Humanities Research Council (AHRC), Brunel Research Initiative\ - \ and Enterprise Fund (BRIEF), Arts Council England.\n\nNick Gillian Post-doctoral\ - \ researcher currently working on an E.U. project entitled SIEMPRE at the Sonic\ - \ Arts Research Centre, Belfast. Nick recently completed his PhD in Gesture Recognition\ - \ for Musician-Computer Interaction under the supervision of R. Benjamin Knapp\ - \ and Sile O'Modhrain. His interests are in machine learning and pattern recognition\ - \ and applying these techniques to enable real-time musician-computer interaction." - address: 'Oslo, Norway' - author: Sarah Nicolls and Nick Gillian - bibtex: "@inproceedings{nime2011-music-Nicolls2011,\n abstract = {Program notes:\n\ - SN: I wanted to get at the closest relationship possible between my hands and\ - \ the resulting sound. Having worked with sampling and complex processing and\ - \ various sensors such as EMG, motion capture with live sound as the source seemed\ - \ a way to really get inside an improvisation system that was really live and\ - \ really intuitive. You can judge for yourselves!,\n\nNG: Sarah's movements are\ - \ sensed using a Kinect 3D motion capture device and the gestures are recognised\ - \ in real-time using the SEC, a machine learning toolbox that has been specifically\ - \ developed for musician-computer interaction.\n\nAbout the performers:\n\nSarah\ - \ Nicolls UK-based experimental pianist and inventor of `Inside-out piano'; collaborative\ - \ researcher with e.g. Atau Tanaka, PA Tremblay; concerts e.g. world premieres\ - \ of Larry Goves' Piano Concerto, Richard Barrett's Mesopotamia/London Sinfonietta/BBC\ - \ Radio; article in LMJ20; Senior Lecturer at Brunel University; funding: Arts\ - \ and Humanities Research Council (AHRC), Brunel Research Initiative and Enterprise\ - \ Fund (BRIEF), Arts Council England.\n\nNick Gillian Post-doctoral researcher\ - \ currently working on an E.U. project entitled SIEMPRE at the Sonic Arts Research\ - \ Centre, Belfast. Nick recently completed his PhD in Gesture Recognition for\ - \ Musician-Computer Interaction under the supervision of R. Benjamin Knapp and\ - \ Sile O'Modhrain. His interests are in machine learning and pattern recognition\ - \ and applying these techniques to enable real-time musician-computer interaction.},\n\ - \ address = {Oslo, Norway},\n author = {Sarah Nicolls and Nick Gillian},\n booktitle\ - \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n editor = {Kjell Tore Innervik and Ivar Frounberg},\n month =\ - \ {June},\n publisher = {Norwegian Academy of Music},\n title = {Improvisation\ - \ for piano + motion capture system},\n url = {https://vimeo.com/26678719},\n\ - \ year = {2011}\n}\n" + ID: nime19-music-Nie + abstract: '“No one can step into the same river twice.” This instrument, named as + River, contains rules and randomness. What exactly is music and how does it connect + to and shape our form? Traditional musical instruments always have fixed physical + forms that require performers to adjust to them. How about making a musical instrument + that is more fluid and more expressive via deforming according to performers'' + movements? This was the question I attempted to explore when I started making + this project. For this project, I combine the movement of dancing with music to + present a fluid and dynamic shape of musical instrument. The fabric of this instrument + can be separated as an extension to wash. It''s portable, wireless, chargeable, + stable and beautiful. This musical instrument generates sound by detecting different + movements of the performer. It has four different modes selected by toggling the + switches on the instrument interface. Each mode has different movement detection + methods, generating various sound and music. Moreover, it can be played as a transmitting + Tambourine. As for the music in my performance, it''s all played by myself lively, + consisting of different sound triggered and changed by performers'' gestures and + melody composed myself. Like the name of this instrument River, the four toggles + and their detection methods and their corresponding generated sounds are intentionally + designed. From simple node, beat, loop, drum, to various node, melody, music, + the detection methods and their triggered sounds are becoming more and more complex + and various, developing like a journey of a river.' + address: 'Porto Alegre, Brazil' + author: Yiyao Nie + bibtex: "@inproceedings{nime19-music-Nie,\n abstract = {“No one can step into the\ + \ same river twice.” This instrument, named as River, contains rules and randomness.\ + \ What exactly is music and how does it connect to and shape our form? Traditional\ + \ musical instruments always have fixed physical forms that require performers\ + \ to adjust to them. How about making a musical instrument that is more fluid\ + \ and more expressive via deforming according to performers' movements? This was\ + \ the question I attempted to explore when I started making this project. For\ + \ this project, I combine the movement of dancing with music to present a fluid\ + \ and dynamic shape of musical instrument. The fabric of this instrument can be\ + \ separated as an extension to wash. It's portable, wireless, chargeable, stable\ + \ and beautiful. This musical instrument generates sound by detecting different\ + \ movements of the performer. It has four different modes selected by toggling\ + \ the switches on the instrument interface. Each mode has different movement detection\ + \ methods, generating various sound and music. Moreover, it can be played as a\ + \ transmitting Tambourine. As for the music in my performance, it's all played\ + \ by myself lively, consisting of different sound triggered and changed by performers'\ + \ gestures and melody composed myself. Like the name of this instrument River,\ + \ the four toggles and their detection methods and their corresponding generated\ + \ sounds are intentionally designed. From simple node, beat, loop, drum, to various\ + \ node, melody, music, the detection methods and their triggered sounds are becoming\ + \ more and more complex and various, developing like a journey of a river.},\n\ + \ address = {Porto Alegre, Brazil},\n author = {Yiyao Nie},\n booktitle = {Music\ + \ Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Federico Visi},\n month = {June},\n pages = {43--46},\n publisher\ + \ = {UFRGS},\n title = {River},\n url = {http://www.nime.org/proceedings/2019/nime2019_music011.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Kjell Tore Innervik and Ivar Frounberg + editor: Federico Visi month: June - publisher: Norwegian Academy of Music - title: Improvisation for piano + motion capture system - url: https://vimeo.com/26678719 - year: 2011 + pages: 43--46 + publisher: UFRGS + title: River + url: http://www.nime.org/proceedings/2019/nime2019_music011.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: nime2011-music-Hayes2011 - abstract: "Performer notes:\nSocks and Ammo for piano, percussion and live electronics,\ - \ is a new work investigating novel methods of communication between laptop and\ - \ performer, as well as performer and performer, in an improvisational setting.\ - \ Enhancing traditional aural and visual cues, a network is established between\ - \ laptops, providing direction and suggestion to and between performers. Tactile\ - \ feedback is provided to performers in the form of tiny vibrations on the skin,\ - \ opening up a further, yet covert, channel of information to transmit signals\ - \ and cues, allowing for a more informed and focused performance.\n\nAbout the\ - \ performers:\n\nLauren Sarah Hayes: Composer and performer from Glasgow. Her\ - \ recent practice focuses on realizing compositions for piano and live electronics,\ - \ which unify extended technique, bespoke software and instrument augmentation.\ - \ Undertaken at the University of Edinburgh, her research investigates audio-haptic\ - \ relationships as performance strategies for performers of digital music.\n\n\ - Christos Michalakos: Composer and improviser from northern Greece. Working predominantly\ - \ with percussion and live electronics, his music explores relationships between\ - \ acoustic and electronic sound worlds, through an examination of methods for\ - \ developing and augmenting his drum kit, forming part of his PhD research at\ - \ the University of Edinburgh.\n\n=== Recorded at:\n\n11th International Conference\ - \ on New Interfaces for Musical Expression. 30 May - 1 June 2011, Oslo, Norway.\n\ - \nhttp://www.nime2011.org" - address: 'Oslo, Norway' - author: Lauren Sarah Hayes and Christos Michalakos - bibtex: "@inproceedings{nime2011-music-Hayes2011,\n abstract = {Performer notes:\n\ - Socks and Ammo for piano, percussion and live electronics, is a new work investigating\ - \ novel methods of communication between laptop and performer, as well as performer\ - \ and performer, in an improvisational setting. Enhancing traditional aural and\ - \ visual cues, a network is established between laptops, providing direction and\ - \ suggestion to and between performers. Tactile feedback is provided to performers\ - \ in the form of tiny vibrations on the skin, opening up a further, yet covert,\ - \ channel of information to transmit signals and cues, allowing for a more informed\ - \ and focused performance.\n\nAbout the performers:\n\nLauren Sarah Hayes: Composer\ - \ and performer from Glasgow. Her recent practice focuses on realizing compositions\ - \ for piano and live electronics, which unify extended technique, bespoke software\ - \ and instrument augmentation. Undertaken at the University of Edinburgh, her\ - \ research investigates audio-haptic relationships as performance strategies for\ - \ performers of digital music.\n\nChristos Michalakos: Composer and improviser\ - \ from northern Greece. Working predominantly with percussion and live electronics,\ - \ his music explores relationships between acoustic and electronic sound worlds,\ - \ through an examination of methods for developing and augmenting his drum kit,\ - \ forming part of his PhD research at the University of Edinburgh.\n\n=== Recorded\ - \ at:\n\n11th International Conference on New Interfaces for Musical Expression.\ - \ 30 May - 1 June 2011, Oslo, Norway.\n\nhttp://www.nime2011.org},\n address =\ - \ {Oslo, Norway},\n author = {Lauren Sarah Hayes and Christos Michalakos},\n booktitle\ - \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n editor = {Kjell Tore Innervik and Ivar Frounberg},\n month =\ - \ {June},\n publisher = {Norwegian Academy of Music},\n title = {Socks and Ammo},\n\ - \ url = {https://vimeo.com/26629807},\n year = {2011}\n}\n" + ID: nime19-music-Park + abstract: 'Self-Built Instrument project is focused on sound performance with an + experi- mental instrument which is composed of strings and metallic sound box, + pro- ducing overtones, harmonics and feed- back. It is capable to play with different + sound colours : Resonances by cooper, bowing on strings, overtones and feed- back. + All of factors triggers each other''s sound. It is not a point to play a specific + tone or to make a musical harmony, because the instrument is not able to per- + fectly control. Playing this Instrument is a challenge to your capacity, such + as gestures and sonic phenomenon following sense and space. The artist composed + a piece and use few repertoire partly, however, mostly it is interesting to find + what kind of sound comes to nest in mesh. The Artist tried to get over typical + aesthetics of classical music, such as using precise pitches, melodies, and read + scores. Instead of that, her approach towards to discover unusual sound elements + which are considered as mistake in tradi- tional way. And play with them, for + instance, strings without tuning, hitting a stuffs, unorganized pitch, also so-called + clicker which happens unskilled.' + address: 'Porto Alegre, Brazil' + author: Jiyun Park + bibtex: "@inproceedings{nime19-music-Park,\n abstract = {Self-Built Instrument project\ + \ is focused on sound performance with an experi- mental instrument which is composed\ + \ of strings and metallic sound box, pro- ducing overtones, harmonics and feed-\ + \ back. It is capable to play with different sound colours : Resonances by cooper,\ + \ bowing on strings, overtones and feed- back. All of factors triggers each other's\ + \ sound. It is not a point to play a specific tone or to make a musical harmony,\ + \ because the instrument is not able to per- fectly control. Playing this Instrument\ + \ is a challenge to your capacity, such as gestures and sonic phenomenon following\ + \ sense and space. The artist composed a piece and use few repertoire partly,\ + \ however, mostly it is interesting to find what kind of sound comes to nest in\ + \ mesh. The Artist tried to get over typical aesthetics of classical music, such\ + \ as using precise pitches, melodies, and read scores. Instead of that, her approach\ + \ towards to discover unusual sound elements which are considered as mistake in\ + \ tradi- tional way. And play with them, for instance, strings without tuning,\ + \ hitting a stuffs, unorganized pitch, also so-called clicker which happens unskilled.},\n\ + \ address = {Porto Alegre, Brazil},\n author = {Jiyun Park},\n booktitle = {Music\ + \ Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Federico Visi},\n month = {June},\n pages = {47--49},\n publisher\ + \ = {UFRGS},\n title = {Self-Built Instrument (sound performance)},\n url = {http://www.nime.org/proceedings/2019/nime2019_music012.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Kjell Tore Innervik and Ivar Frounberg + editor: Federico Visi month: June - publisher: Norwegian Academy of Music - title: Socks and Ammo - url: https://vimeo.com/26629807 - year: 2011 + pages: 47--49 + publisher: UFRGS + title: Self-Built Instrument (sound performance) + url: http://www.nime.org/proceedings/2019/nime2019_music012.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: nime2011-music-PaulStapleton2011 - abstract: "About the performers:\n\nE=MCH is a recently formed quartet featuring\ - \ Belfast-based improvisers Paul Stapleton (BoSS & Postcard Weevil), Caroline\ - \ Pugh (Voice & Analogue Cassette Decks, Zero-input Mixer), Adnan Marquez-Borbon\ - \ (Feedback Bass Clarinet, Recording Modules & Delay Lines) and Cavan Fyans (DIY\ - \ Electronics). Memories, distortions of time and place, echoes from analogue\ - \ delay lengths, solid state samplers, and modified vinyl all help shape the fabric\ - \ of the music in response to its larger ecology. ``Okay so making instruments\ - \ and playing on them is not new, can't really see that there is any new thought\ - \ about how why and what here, but the sound sculpture looks nice.'' --- Cosmopolitan\n\ - \nPaul Stapleton: Sound artist, improviser and writer originally from Southern\ - \ California, currently lecturing at the Sonic Arts Research Centre in Belfast\ - \ (SARC). Paul designs and performs with a variety of custom made metallic sound\ - \ sculptures, electronics and found objects in locations ranging from impro clubs\ - \ in Cork to abandoned beaches on Vancouver Island.\n\nCaroline Pugh: Scottish\ - \ vocalist and performance artist. She deviously borrows analogue technologies\ - \ and oral histories to create performances that present imagined constructions\ - \ of traditional and popular culture. With a background in both folk music and\ - \ improvisation, she collaborates with people from any discipline and performs\ - \ in a wide variety of venues including folk clubs, arts venues and cinemas.\n\ - \nAdnan Marquez-Borbon: Saxophonist, improviser, computer musician, and composer,\ - \ currently a PhD student at SARC. His research emphasis is on the roles of learning\ - \ models and skill development in the design of digital musical instruments. As\ - \ a musician, his music focuses on improvisation and the electronic manipulation\ - \ of sounds in real-time.\n\nCavan Fyans: PhD research student, instrument builder,\ - \ noise maker & improviser. Currently located at SARC, Cavan's research examines\ - \ the spectator's cognition of interaction and performance in communicative interactions\ - \ with technology. Cavan also devotes time to developing new and innovative ways\ - \ of breaking cheap electronic toys (Circuit Bending) and (re)constructing circuitry\ - \ for sonic creation (Hardware Hacking)." - address: 'Oslo, Norway' - author: Paul Stapleton and Caroline Pugh and Adnan Marquez-Borbon and Cavan Fyans - bibtex: "@inproceedings{nime2011-music-PaulStapleton2011,\n abstract = {About the\ - \ performers:\n\nE=MCH is a recently formed quartet featuring Belfast-based improvisers\ - \ Paul Stapleton (BoSS \\& Postcard Weevil), Caroline Pugh (Voice \\& Analogue\ - \ Cassette Decks, Zero-input Mixer), Adnan Marquez-Borbon (Feedback Bass Clarinet,\ - \ Recording Modules \\& Delay Lines) and Cavan Fyans (DIY Electronics). Memories,\ - \ distortions of time and place, echoes from analogue delay lengths, solid state\ - \ samplers, and modified vinyl all help shape the fabric of the music in response\ - \ to its larger ecology. ``Okay so making instruments and playing on them is not\ - \ new, can't really see that there is any new thought about how why and what here,\ - \ but the sound sculpture looks nice.'' --- Cosmopolitan\n\nPaul Stapleton: Sound\ - \ artist, improviser and writer originally from Southern California, currently\ - \ lecturing at the Sonic Arts Research Centre in Belfast (SARC). Paul designs\ - \ and performs with a variety of custom made metallic sound sculptures, electronics\ - \ and found objects in locations ranging from impro clubs in Cork to abandoned\ - \ beaches on Vancouver Island.\n\nCaroline Pugh: Scottish vocalist and performance\ - \ artist. She deviously borrows analogue technologies and oral histories to create\ - \ performances that present imagined constructions of traditional and popular\ - \ culture. With a background in both folk music and improvisation, she collaborates\ - \ with people from any discipline and performs in a wide variety of venues including\ - \ folk clubs, arts venues and cinemas.\n\nAdnan Marquez-Borbon: Saxophonist, improviser,\ - \ computer musician, and composer, currently a PhD student at SARC. His research\ - \ emphasis is on the roles of learning models and skill development in the design\ - \ of digital musical instruments. As a musician, his music focuses on improvisation\ - \ and the electronic manipulation of sounds in real-time.\n\nCavan Fyans: PhD\ - \ research student, instrument builder, noise maker \\& improviser. Currently\ - \ located at SARC, Cavan's research examines the spectator's cognition of interaction\ - \ and performance in communicative interactions with technology. Cavan also devotes\ - \ time to developing new and innovative ways of breaking cheap electronic toys\ - \ (Circuit Bending) and (re)constructing circuitry for sonic creation (Hardware\ - \ Hacking).},\n address = {Oslo, Norway},\n author = {Paul Stapleton and Caroline\ - \ Pugh and Adnan Marquez-Borbon and Cavan Fyans},\n booktitle = {Music Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Kjell Tore Innervik and Ivar Frounberg},\n month = {June},\n publisher\ - \ = {Norwegian Academy of Music},\n title = {E=MCH},\n url = {https://vimeo.com/26620232},\n\ - \ year = {2011}\n}\n" + ID: nime19-music-Martins + abstract: '"Tanto Mar" seeks to recreate the properties present in history between + Portugal and Brazil, embracing the idea of an aqueous sound that dances and moves + as much by cadence as by voluminous waves. The Atlantic Ocean, which separates + and unites the two countries, serves as an inspiration for this quadraphonic performance, + involving musical instruments and live electronics, where the sounds move through + the four speakers. Each speaker symbolizes the paths that the sea travels uninterruptedly, + in a unique dance of latitudes and longitudes. The intersection of sounds occurs + through processes of reverberations, spatializations, echoes, modulations and + grains that slowly form the sound material, composing, decomposing and manipulating + the sound waves. Sound characters such as wind, oars, storms, calm, among others, + are metaphorically evidenced through the sound material, creating a kind of rhythmic + movement of a caravel at sea. The sounds of "Tanto Mar" move between entropy and + chaos, between stillness and tsunami, between starboard and port, culminating + in a textural dance where the objective is to take the listener away from electronic + processing, and propose a dive in an intensified, attentive, deep and involving + listening. New musical possibilities can happen through the experimentation of + new routes, unusual routes and horizons not yet covered. The sea and its imprecise + distances represent permanent challenges. "Tanto Mar" seeks to revive the feeling + of the Portuguese poet Fernando Pessoa, when he wrote: "to dream even if it is + impossible".' + address: 'Porto Alegre, Brazil' + author: André L. Martins and Paulo Assis Barbosa + bibtex: "@inproceedings{nime19-music-Martins,\n abstract = {\"Tanto Mar\" seeks\ + \ to recreate the properties present in history between Portugal and Brazil, embracing\ + \ the idea of an aqueous sound that dances and moves as much by cadence as by\ + \ voluminous waves. The Atlantic Ocean, which separates and unites the two countries,\ + \ serves as an inspiration for this quadraphonic performance, involving musical\ + \ instruments and live electronics, where the sounds move through the four speakers.\ + \ Each speaker symbolizes the paths that the sea travels uninterruptedly, in a\ + \ unique dance of latitudes and longitudes. The intersection of sounds occurs\ + \ through processes of reverberations, spatializations, echoes, modulations and\ + \ grains that slowly form the sound material, composing, decomposing and manipulating\ + \ the sound waves. Sound characters such as wind, oars, storms, calm, among others,\ + \ are metaphorically evidenced through the sound material, creating a kind of\ + \ rhythmic movement of a caravel at sea. The sounds of \"Tanto Mar\" move between\ + \ entropy and chaos, between stillness and tsunami, between starboard and port,\ + \ culminating in a textural dance where the objective is to take the listener\ + \ away from electronic processing, and propose a dive in an intensified, attentive,\ + \ deep and involving listening. New musical possibilities can happen through the\ + \ experimentation of new routes, unusual routes and horizons not yet covered.\ + \ The sea and its imprecise distances represent permanent challenges. \"Tanto\ + \ Mar\" seeks to revive the feeling of the Portuguese poet Fernando Pessoa, when\ + \ he wrote: \"to dream even if it is impossible\".},\n address = {Porto Alegre,\ + \ Brazil},\n author = {André L. Martins and Paulo Assis Barbosa},\n booktitle\ + \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n editor = {Federico Visi},\n month = {June},\n pages = {50--51},\n\ + \ publisher = {UFRGS},\n title = {Tanto Mar},\n url = {http://www.nime.org/proceedings/2019/nime2019_music013.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Kjell Tore Innervik and Ivar Frounberg + editor: Federico Visi month: June - publisher: Norwegian Academy of Music - title: E=MCH - url: https://vimeo.com/26620232 - year: 2011 + pages: 50--51 + publisher: UFRGS + title: Tanto Mar + url: http://www.nime.org/proceedings/2019/nime2019_music013.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: nime2011-music-Alden2011 - abstract: "Program notes:\nREMI Sings is an electroacoustic performance for the\ - \ bio-inspired Rhizomatic Experimental Musical Interface (REMI) and accordion.\ - \ REMI is an interactive networked musical organism that receives sonic input\ - \ from its environment, processes it based on the ever changing structure of its\ - \ interior network, and generates a unique musical output. This rhizomatic network\ - \ is a software structure modelled after the functioning and growth patterns of\ - \ biological rhizomes, specifically the mycorrhizal association that form vital\ - \ nutrient pathways for the majority of the planet's land-plant ecosystems. The\ - \ performance REMI Sings highlights this interface's interactive nature, creating\ - \ a dialogue between human performer and non-human musical intelligence.\n\nAbout\ - \ the performer:\n\nChristopher Alden: Composer, programmer, and instrumentalist\ - \ currently studying at New York University's Interactive Telecommunications Program,\ - \ where his research focuses on interactive music systems for composition and\ - \ performance. Before ITP, he received his undergraduate degree in Music Theory\ - \ and Composition at NYU where he studied composition under Marc Antonio-Consoli" - address: 'Oslo, Norway' - author: Christopher Alden - bibtex: "@inproceedings{nime2011-music-Alden2011,\n abstract = {Program notes:\n\ - REMI Sings is an electroacoustic performance for the bio-inspired Rhizomatic Experimental\ - \ Musical Interface (REMI) and accordion. REMI is an interactive networked musical\ - \ organism that receives sonic input from its environment, processes it based\ - \ on the ever changing structure of its interior network, and generates a unique\ - \ musical output. This rhizomatic network is a software structure modelled after\ - \ the functioning and growth patterns of biological rhizomes, specifically the\ - \ mycorrhizal association that form vital nutrient pathways for the majority of\ - \ the planet's land-plant ecosystems. The performance REMI Sings highlights this\ - \ interface's interactive nature, creating a dialogue between human performer\ - \ and non-human musical intelligence.\n\nAbout the performer:\n\nChristopher Alden:\ - \ Composer, programmer, and instrumentalist currently studying at New York University's\ - \ Interactive Telecommunications Program, where his research focuses on interactive\ - \ music systems for composition and performance. Before ITP, he received his undergraduate\ - \ degree in Music Theory and Composition at NYU where he studied composition under\ - \ Marc Antonio-Consoli},\n address = {Oslo, Norway},\n author = {Christopher Alden},\n\ - \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n editor = {Kjell Tore Innervik and Ivar Frounberg},\n\ - \ month = {June},\n publisher = {Norwegian Academy of Music},\n title = {REMI\ - \ Sings},\n url = {https://vimeo.com/26619152},\n year = {2011}\n}\n" + ID: nime19-music-Carrascoza + abstract: '“Tempo Transversal – Flauta Expandida” aims to establish a computer- + controlled catalyzer, which simultaneously combines and extends the flutist body + actions, electronic sounds and the performative physical space. Some flute performance + fragments, captured in real time by video cameras, besides pre-recorded images, + built the visual projection. The flute player develops two pieces of experimental + music for flute and electronic. All these heterogeneous elements are interrelated + with each other in a network mediated by the computer. The result is a continuously + unfolded interactive performance, which intends to manipulate settings of space-time + perception. Brazilian contemporary repertoire for amplified bass flute and electronic + sounds establishes the proposal.' + address: 'Porto Alegre, Brazil' + author: Cassia Carrascoza and Felipe Merker Castellani + bibtex: "@inproceedings{nime19-music-Carrascoza,\n abstract = {“Tempo Transversal\ + \ – Flauta Expandida” aims to establish a computer- controlled catalyzer, which\ + \ simultaneously combines and extends the flutist body actions, electronic sounds\ + \ and the performative physical space. Some flute performance fragments, captured\ + \ in real time by video cameras, besides pre-recorded images, built the visual\ + \ projection. The flute player develops two pieces of experimental music for flute\ + \ and electronic. All these heterogeneous elements are interrelated with each\ + \ other in a network mediated by the computer. The result is a continuously unfolded\ + \ interactive performance, which intends to manipulate settings of space-time\ + \ perception. Brazilian contemporary repertoire for amplified bass flute and electronic\ + \ sounds establishes the proposal.},\n address = {Porto Alegre, Brazil},\n author\ + \ = {Cassia Carrascoza and Felipe Merker Castellani},\n booktitle = {Music Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Federico Visi},\n month = {June},\n pages = {52--55},\n publisher\ + \ = {UFRGS},\n title = {Tempo Transversal – Flauta Expandida},\n url = {http://www.nime.org/proceedings/2019/nime2019_music014.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Kjell Tore Innervik and Ivar Frounberg + editor: Federico Visi month: June - publisher: Norwegian Academy of Music - title: REMI Sings - url: https://vimeo.com/26619152 - year: 2011 + pages: 52--55 + publisher: UFRGS + title: Tempo Transversal – Flauta Expandida + url: http://www.nime.org/proceedings/2019/nime2019_music014.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: nime2011-music-Schwarz2011 - abstract: "Program notes:\n\nThe performance between electric violinist Victoria\ - \ Johnson and Diemo Schwarz playing his interactive corpus-based concatenative\ - \ synthesis software CataRT is an improvisation with two brains and four hands\ - \ controlling one shared symbolic instrument, the sound space, built-up from nothing\ - \ and nourished in unplanned ways by the sound of the instrument, explored and\ - \ consumed with whatever the live instant filled it with. It creates a symbiotic\ - \ relationship between the player of the instrument and that of the software.\ - \ Live corpus-based concatenative synthesis permits here a new approach to improvisation,\ - \ where sound from an instrument is recontextualised by interactive, gesture-controlled\ - \ software. Not knowing what can happen is an integral part of the performance.\n\ - \nAbout the performers:\n\nVictoria Johnson works with electric violin, live electronics,\ - \ improvisation and musical technological issues in her artistic work. Trained\ - \ as a classical violinist in Oslo, Vienna and London, she gave her debut recital\ - \ in Oslo in 1995. She has established herself internationally as a soloist, chamber\ - \ musician and improviser in contemporary, improvised and experimental, cross-disciplinary\ - \ music and art.\n\nDiemo Schwarz: Researcher and developer at Ircam, composer\ - \ of electronic music, and musician on drums and laptop with gestural controllers.\ - \ His compositions and live performances, in solo as Mean Time Between Failure,\ - \ or improvising with other musicians, explore the possibilities of corpus-based\ - \ concatenative synthesis to re-contextualise any sound source by rearranging\ - \ sound units into a new musical framework using interactive navigation through\ - \ a timbral space." - address: 'Oslo, Norway' - author: Diemo Schwarz and Victoria Johnson - bibtex: "@inproceedings{nime2011-music-Schwarz2011,\n abstract = {Program notes:\n\ - \nThe performance between electric violinist Victoria Johnson and Diemo Schwarz\ - \ playing his interactive corpus-based concatenative synthesis software CataRT\ - \ is an improvisation with two brains and four hands controlling one shared symbolic\ - \ instrument, the sound space, built-up from nothing and nourished in unplanned\ - \ ways by the sound of the instrument, explored and consumed with whatever the\ - \ live instant filled it with. It creates a symbiotic relationship between the\ - \ player of the instrument and that of the software. Live corpus-based concatenative\ - \ synthesis permits here a new approach to improvisation, where sound from an\ - \ instrument is recontextualised by interactive, gesture-controlled software.\ - \ Not knowing what can happen is an integral part of the performance.\n\nAbout\ - \ the performers:\n\nVictoria Johnson works with electric violin, live electronics,\ - \ improvisation and musical technological issues in her artistic work. Trained\ - \ as a classical violinist in Oslo, Vienna and London, she gave her debut recital\ - \ in Oslo in 1995. She has established herself internationally as a soloist, chamber\ - \ musician and improviser in contemporary, improvised and experimental, cross-disciplinary\ - \ music and art.\n\nDiemo Schwarz: Researcher and developer at Ircam, composer\ - \ of electronic music, and musician on drums and laptop with gestural controllers.\ - \ His compositions and live performances, in solo as Mean Time Between Failure,\ - \ or improvising with other musicians, explore the possibilities of corpus-based\ - \ concatenative synthesis to re-contextualise any sound source by rearranging\ - \ sound units into a new musical framework using interactive navigation through\ - \ a timbral space.},\n address = {Oslo, Norway},\n author = {Diemo Schwarz and\ - \ Victoria Johnson},\n booktitle = {Music Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n editor = {Kjell Tore Innervik and\ - \ Ivar Frounberg},\n month = {June},\n publisher = {Norwegian Academy of Music},\n\ - \ title = {Suspended Beginnings},\n url = {https://vimeo.com/26679877},\n year\ - \ = {2011}\n}\n" + ID: nime19-music-Hamilton + abstract: 'Trois Machins de la Grâce Aimante is a composition intended to explore + Twenty-First century technological and musical paradigms. At its heart Trois Machins + is a string quartet fundamentally descended from a tradition that spans back to + the 18th century. As such, the work primarily explores timbral material based + around the sound of a bowed string, in this case realized using a set of physically + modeled bowed strings controlled by Coretet, a virtual reality string instrument + and networked performance environment. The composition - for four performers, + preferably from an existing string quartet ensemble - takes the form of three + distinct movements, each exploring different capabilities of the instrument itself + and requiring different forms of communication and collaboration between the four + performers.' + address: 'Porto Alegre, Brazil' + author: Rob Hamilton + bibtex: "@inproceedings{nime19-music-Hamilton,\n abstract = {Trois Machins de la\ + \ Grâce Aimante is a composition intended to explore Twenty-First century technological\ + \ and musical paradigms. At its heart Trois Machins is a string quartet fundamentally\ + \ descended from a tradition that spans back to the 18th century. As such, the\ + \ work primarily explores timbral material based around the sound of a bowed string,\ + \ in this case realized using a set of physically modeled bowed strings controlled\ + \ by Coretet, a virtual reality string instrument and networked performance environment.\ + \ The composition - for four performers, preferably from an existing string quartet\ + \ ensemble - takes the form of three distinct movements, each exploring different\ + \ capabilities of the instrument itself and requiring different forms of communication\ + \ and collaboration between the four performers.},\n address = {Porto Alegre,\ + \ Brazil},\n author = {Rob Hamilton},\n booktitle = {Music Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n editor\ + \ = {Federico Visi},\n month = {June},\n pages = {56--59},\n publisher = {UFRGS},\n\ + \ title = {Trois Machins de la Grâce Aimante (Coretet no. 1)},\n url = {http://www.nime.org/proceedings/2019/nime2019_music015.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Kjell Tore Innervik and Ivar Frounberg + editor: Federico Visi month: June - publisher: Norwegian Academy of Music - title: Suspended Beginnings - url: https://vimeo.com/26679877 - year: 2011 + pages: 56--59 + publisher: UFRGS + title: Trois Machins de la Grâce Aimante (Coretet no. 1) + url: http://www.nime.org/proceedings/2019/nime2019_music015.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: nime2011-music-JasonDixon2011 - abstract: "Program notes:\n\nThe Loop explores the possibilities of co-located performance,\ - \ decentralized composition, and the acoustics of network. This performance begins\ - \ with a brief improvisation presenting acoustic sources to excite the network.\ - \ This material is shared, transformed, and reintroduced into the composition.\ - \ This process continues through successive generations until a predetermined\ - \ time or a point at which the composition naturally concludes. The result is\ - \ an integrated meta-instrument and an emergent composition, with no one artist\ - \ being the sole performer or composer. Remote participants are represented locally\ - \ by a mono speaker enabling the audiences to hear the transformation of audio\ - \ through the networked instrument.\n\nAbout the performers:\n\nJason Dixon: Irish\ - \ composer currently based in Norwich where he is in the process of completing\ - \ his PhD in composition. His work explores issues of language, perception and\ - \ memory in music. More recently he has been focusing on the Irish storytelling\ - \ tradition and its place in contemporary Ireland.\n\nTom Davis: Digital artist\ - \ working mainly in the medium of sound installation. His practice and theory\ - \ based output involves the creation of technology led environments for interaction.\ - \ Davis is currently a lecturer at the University of Bournemouth and holds a PhD\ - \ from the Sonic Arts Research Centre, Belfast.\n\nJason Geistweidt: Sound artist\ - \ based at the University or Tromsø, Norway, researching mixed-reality stages\ - \ and performance systems. He is a former faculty member of Interactive Arts and\ - \ Media department at Columbia College Chicago. He holds PhD in electro-acoustic\ - \ composition from the Sonic Arts Research Centre, Queens University, Belfast.\n\ - \nAlain B. Renaud: Alain's research focuses on networked music performance systems\ - \ with an emphasis on the creation of strategies to interact over a network musically\ - \ and the notion of shared networked acoustic spaces. He is a lecturer in at Bournemouth\ - \ University, England and holds a PhD from the Sonic Arts Research Centre." - address: 'Oslo, Norway' - author: Jason Dixon and Tom Davis and Jason Geistweidt and Alain B. Renaud - bibtex: "@inproceedings{nime2011-music-JasonDixon2011,\n abstract = {Program notes:\n\ - \nThe Loop explores the possibilities of co-located performance, decentralized\ - \ composition, and the acoustics of network. This performance begins with a brief\ - \ improvisation presenting acoustic sources to excite the network. This material\ - \ is shared, transformed, and reintroduced into the composition. This process\ - \ continues through successive generations until a predetermined time or a point\ - \ at which the composition naturally concludes. The result is an integrated meta-instrument\ - \ and an emergent composition, with no one artist being the sole performer or\ - \ composer. Remote participants are represented locally by a mono speaker enabling\ - \ the audiences to hear the transformation of audio through the networked instrument.\n\ - \nAbout the performers:\n\nJason Dixon: Irish composer currently based in Norwich\ - \ where he is in the process of completing his PhD in composition. His work explores\ - \ issues of language, perception and memory in music. More recently he has been\ - \ focusing on the Irish storytelling tradition and its place in contemporary Ireland.\n\ - \nTom Davis: Digital artist working mainly in the medium of sound installation.\ - \ His practice and theory based output involves the creation of technology led\ - \ environments for interaction. Davis is currently a lecturer at the University\ - \ of Bournemouth and holds a PhD from the Sonic Arts Research Centre, Belfast.\n\ - \nJason Geistweidt: Sound artist based at the University or Tromsø, Norway, researching\ - \ mixed-reality stages and performance systems. He is a former faculty member\ - \ of Interactive Arts and Media department at Columbia College Chicago. He holds\ - \ PhD in electro-acoustic composition from the Sonic Arts Research Centre, Queens\ - \ University, Belfast.\n\nAlain B. Renaud: Alain's research focuses on networked\ - \ music performance systems with an emphasis on the creation of strategies to\ - \ interact over a network musically and the notion of shared networked acoustic\ - \ spaces. He is a lecturer in at Bournemouth University, England and holds a PhD\ - \ from the Sonic Arts Research Centre.},\n address = {Oslo, Norway},\n author\ - \ = {Jason Dixon and Tom Davis and Jason Geistweidt and Alain B. Renaud},\n booktitle\ - \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n editor = {Kjell Tore Innervik and Ivar Frounberg},\n month =\ - \ {June},\n publisher = {Norwegian Academy of Music},\n title = {The Loop},\n\ - \ url = {https://vimeo.com/26679893},\n year = {2011}\n}\n" + ID: nime19-music-Stapleton + abstract: 'This work is a continuation of my research into developing new performance + ecosystems for improvisation. For this project I developed a new volatile assemblage, + aka VOLA. My self-designed musical instruments are shaped by my history as a performer + working in acoustic, mechanical, electronic and digital musics, blending and exploring + the boundaries and breaking points of these different domains. My instruments + support many of my existing techniques originally developed on more conventional + instruments, while also affording the development of extended and novel techniques + and performance strategies. In much of my work I am particularly focused on the + exploration of musical timbre and texture; however, for this project my attention + is also directed towards time, flow, pulse, duration, friction, disruption – in + short, qualitative rhythms and defamiliarisation.' + address: 'Porto Alegre, Brazil' + author: Paul Stapleton + bibtex: "@inproceedings{nime19-music-Stapleton,\n abstract = {This work is a continuation\ + \ of my research into developing new performance ecosystems for improvisation.\ + \ For this project I developed a new volatile assemblage, aka VOLA. My self-designed\ + \ musical instruments are shaped by my history as a performer working in acoustic,\ + \ mechanical, electronic and digital musics, blending and exploring the boundaries\ + \ and breaking points of these different domains. My instruments support many\ + \ of my existing techniques originally developed on more conventional instruments,\ + \ while also affording the development of extended and novel techniques and performance\ + \ strategies. In much of my work I am particularly focused on the exploration\ + \ of musical timbre and texture; however, for this project my attention is also\ + \ directed towards time, flow, pulse, duration, friction, disruption – in short,\ + \ qualitative rhythms and defamiliarisation.},\n address = {Porto Alegre, Brazil},\n\ + \ author = {Paul Stapleton},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Federico Visi},\n\ + \ month = {June},\n pages = {60--62},\n publisher = {UFRGS},\n title = {uncertain\ + \ rhythms},\n url = {http://www.nime.org/proceedings/2019/nime2019_music016.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Kjell Tore Innervik and Ivar Frounberg + editor: Federico Visi month: June - publisher: Norwegian Academy of Music - title: The Loop - url: https://vimeo.com/26679893 - year: 2011 + pages: 60--62 + publisher: UFRGS + title: uncertain rhythms + url: http://www.nime.org/proceedings/2019/nime2019_music016.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: nime2011-music-Zappi2011 - abstract: "Program notes:\nDissonance is an audio/visual performance in which a\ - \ progressive soundtrack is created along with the exploration of an interactive\ - \ virtual environment. While real instrument--generated music animates the projected\ - \ worlds, the two performers are allowed to physically interact with virtual objects,\ - \ changing their position, shape and color to control music and create new sounds.\ - \ As the journey continues and the environment introduces new elements and new\ - \ metaphors, performers are driven to explore the sonic laws that rule each scenario.\ - \ Spectators wearing 3D glasses perceive the virtual environment as moving out\ - \ of the screen and embracing the artists, in choreographies where real and virtual\ - \ world literally overlap.\n\nAbout the performers:\n\nVictor Zappi: PhD student\ - \ and a new media artist. His research focuses on Virtual Reality and its applications\ - \ in art and live performances.\n\nDario Mazzanti: computer science engineer and\ - \ multi-instrumentalist composer. He enjoys writing, recording and playing music\ - \ combining his artistic streak with his interest for technology." - address: 'Oslo, Norway' - author: Victor Zappi and Dario Mazzanti - bibtex: "@inproceedings{nime2011-music-Zappi2011,\n abstract = {Program notes:\n\ - Dissonance is an audio/visual performance in which a progressive soundtrack is\ - \ created along with the exploration of an interactive virtual environment. While\ - \ real instrument--generated music animates the projected worlds, the two performers\ - \ are allowed to physically interact with virtual objects, changing their position,\ - \ shape and color to control music and create new sounds. As the journey continues\ - \ and the environment introduces new elements and new metaphors, performers are\ - \ driven to explore the sonic laws that rule each scenario. Spectators wearing\ - \ 3D glasses perceive the virtual environment as moving out of the screen and\ - \ embracing the artists, in choreographies where real and virtual world literally\ - \ overlap.\n\nAbout the performers:\n\nVictor Zappi: PhD student and a new media\ - \ artist. His research focuses on Virtual Reality and its applications in art\ - \ and live performances.\n\nDario Mazzanti: computer science engineer and multi-instrumentalist\ - \ composer. He enjoys writing, recording and playing music combining his artistic\ - \ streak with his interest for technology.},\n address = {Oslo, Norway},\n author\ - \ = {Victor Zappi and Dario Mazzanti},\n booktitle = {Music Proceedings of the\ + ID: nime19-music-Erdem + abstract: 'What if a musician could step outside the familiar instrumental paradigm + and adopt a new embodied language for moving through sound with a dancer in true + partnership? And what if a dancer''s body could coalesce with a musician''s skills + and intuitively render movements into instrumental actions for active sound- making? + Vrengt is a multi-user instrument, specifically developed for music-dance performance, + with a particular focus on exploring the boundaries between standstill vs motion, + and silence vs sound. We sought for creating a work for one, hybrid corporeality, + in which a dancer and a musician would co-creatively and co- dependently interact + with their bodies and a machine. The challenge, then, was how could two performers + with distinct embodied skills unite in a continuous entanglement of intentions, + senses and experiences to control the same sonic and musical parameters? This + was conceptually different than they had done before in the context of interactive + dance performances.' + address: 'Porto Alegre, Brazil' + author: Çağri Erdem and Katja Henriksen Schia and Alexander Refsum Jensenius + bibtex: "@inproceedings{nime19-music-Erdem,\n abstract = {What if a musician could\ + \ step outside the familiar instrumental paradigm and adopt a new embodied language\ + \ for moving through sound with a dancer in true partnership? And what if a dancer's\ + \ body could coalesce with a musician's skills and intuitively render movements\ + \ into instrumental actions for active sound- making? Vrengt is a multi-user instrument,\ + \ specifically developed for music-dance performance, with a particular focus\ + \ on exploring the boundaries between standstill vs motion, and silence vs sound.\ + \ We sought for creating a work for one, hybrid corporeality, in which a dancer\ + \ and a musician would co-creatively and co- dependently interact with their bodies\ + \ and a machine. The challenge, then, was how could two performers with distinct\ + \ embodied skills unite in a continuous entanglement of intentions, senses and\ + \ experiences to control the same sonic and musical parameters? This was conceptually\ + \ different than they had done before in the context of interactive dance performances.},\n\ + \ address = {Porto Alegre, Brazil},\n author = {Çağri Erdem and Katja Henriksen\ + \ Schia and Alexander Refsum Jensenius},\n booktitle = {Music Proceedings of the\ \ International Conference on New Interfaces for Musical Expression},\n editor\ - \ = {Kjell Tore Innervik and Ivar Frounberg},\n month = {June},\n publisher =\ - \ {Norwegian Academy of Music},\n title = {Dissonance},\n url = {https://vimeo.com/26616186},\n\ - \ year = {2011}\n}\n" - booktitle: Music Proceedings of the International Conference on New Interfaces for - Musical Expression - editor: Kjell Tore Innervik and Ivar Frounberg - month: June - publisher: Norwegian Academy of Music - title: Dissonance - url: https://vimeo.com/26616186 - year: 2011 - - -- ENTRYTYPE: inproceedings - ID: nime2011-music-Nowitz2011 - abstract: "Program notes:\n\nSince 2008 I have been performing and composing music\ - \ for voice and live-electronics using two Wii-remotes as gestural controllers.\ - \ The live-electronics function in two ways: as an extension of my voice and as\ - \ an instrument as well. The music creation is mainly based on live-sampling the\ - \ voice. I also use pre-recorded sounds and my own compositions. In addition,\ - \ since the beginning of 2010 we have been developing a new instrument, which\ - \ goes beyond the technical possibilities of the Wii-controllers. I call this\ - \ instrument the Shells. Besides motion sensors there are three more continuous\ - \ controllers available: a pressure sensor, a joystick control and ultrasound\ - \ for distance measurement.\n\nAbout the performers:\n\nAlex Nowitz: Composer\ - \ of vocal, chamber and electronic music as well as music for dance, theatre and\ - \ opera. Furthermore, he is a voice artist, whistling and singing virtuoso who\ - \ is classically trained as tenor and countertenor and presents a wide array of\ - \ diverse and extended techniques. He has been artist in residence at STEIM, Amsterdam,\ - \ since 2010." - address: 'Oslo, Norway' - author: Alex Nowitz - bibtex: "@inproceedings{nime2011-music-Nowitz2011,\n abstract = {Program notes:\n\ - \nSince 2008 I have been performing and composing music for voice and live-electronics\ - \ using two Wii-remotes as gestural controllers. The live-electronics function\ - \ in two ways: as an extension of my voice and as an instrument as well. The music\ - \ creation is mainly based on live-sampling the voice. I also use pre-recorded\ - \ sounds and my own compositions. In addition, since the beginning of 2010 we\ - \ have been developing a new instrument, which goes beyond the technical possibilities\ - \ of the Wii-controllers. I call this instrument the Shells. Besides motion sensors\ - \ there are three more continuous controllers available: a pressure sensor, a\ - \ joystick control and ultrasound for distance measurement.\n\nAbout the performers:\n\ - \nAlex Nowitz: Composer of vocal, chamber and electronic music as well as music\ - \ for dance, theatre and opera. Furthermore, he is a voice artist, whistling and\ - \ singing virtuoso who is classically trained as tenor and countertenor and presents\ - \ a wide array of diverse and extended techniques. He has been artist in residence\ - \ at STEIM, Amsterdam, since 2010.},\n address = {Oslo, Norway},\n author = {Alex\ - \ Nowitz},\n booktitle = {Music Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n editor = {Kjell Tore Innervik and\ - \ Ivar Frounberg},\n month = {June},\n publisher = {Norwegian Academy of Music},\n\ - \ title = {The Shells},\n url = {https://vimeo.com/26661484},\n year = {2011}\n\ - }\n" + \ = {Federico Visi},\n month = {June},\n pages = {63--65},\n publisher = {UFRGS},\n\ + \ title = {Vrengt: A Shared Body-Machine Instrument for Music-Dance Performance},\n\ + \ url = {http://www.nime.org/proceedings/2019/nime2019_music017.pdf},\n year =\ + \ {2019}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Kjell Tore Innervik and Ivar Frounberg + editor: Federico Visi month: June - publisher: Norwegian Academy of Music - title: The Shells - url: https://vimeo.com/26661484 - year: 2011 + pages: 63--65 + publisher: UFRGS + title: 'Vrengt: A Shared Body-Machine Instrument for Music-Dance Performance' + url: http://www.nime.org/proceedings/2019/nime2019_music017.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: nime2011-music-Guillamat2011 - abstract: "Program notes:\nAn open playground for laptop improvisation and performance.\ - \ BiLE's performance will focus on semi-structured improvisation, with players\ - \ creating and manipulating sound using a variety of motion capture devices -\ - \ iPhones, Wiimotes, and Xbox Kinect. The data captured by each device, along\ - \ with analysed musical parameters, will be sent out over the shared network,\ - \ to be used by each performer as they see fit. The aim is to allow players to\ - \ latch onto other members of the group by mapping the shared data to their own\ - \ software parameters, creating moments of convergence between the ensemble. BiLE\ - \ takes an `instrumental' approach to performance, with each performer having\ - \ their own speaker, sonic identity and spatial location.\n\nAbout the performers:\n\ - \nBiLE (Birmingham Laptop Ensemble): A collaborative group of six composers, brought\ - \ together through their shared interest in live performance and improvisation.\ - \ BiLE has an open and inclusive attitude towards experimentation with sound,\ - \ and draws on the members' wide-ranging musical backgrounds." - address: 'Oslo, Norway' - author: Julien Guillamat and Charles Céleste Hutchins and Shelly Knotts and Norah - Lorway and Jorge Garcia Moncada and Chris Tarren - bibtex: "@inproceedings{nime2011-music-Guillamat2011,\n abstract = {Program notes:\n\ - An open playground for laptop improvisation and performance. BiLE's performance\ - \ will focus on semi-structured improvisation, with players creating and manipulating\ - \ sound using a variety of motion capture devices - iPhones, Wiimotes, and Xbox\ - \ Kinect. The data captured by each device, along with analysed musical parameters,\ - \ will be sent out over the shared network, to be used by each performer as they\ - \ see fit. The aim is to allow players to latch onto other members of the group\ - \ by mapping the shared data to their own software parameters, creating moments\ - \ of convergence between the ensemble. BiLE takes an `instrumental' approach to\ - \ performance, with each performer having their own speaker, sonic identity and\ - \ spatial location.\n\nAbout the performers:\n\nBiLE (Birmingham Laptop Ensemble):\ - \ A collaborative group of six composers, brought together through their shared\ - \ interest in live performance and improvisation. BiLE has an open and inclusive\ - \ attitude towards experimentation with sound, and draws on the members' wide-ranging\ - \ musical backgrounds.},\n address = {Oslo, Norway},\n author = {Julien Guillamat\ - \ and Charles Céleste Hutchins and Shelly Knotts and Norah Lorway and Jorge Garcia\ - \ Moncada and Chris Tarren},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Kjell Tore\ - \ Innervik and Ivar Frounberg},\n month = {June},\n publisher = {Norwegian Academy\ - \ of Music},\n title = {BiLE (Birmingham Laptop Ensemble)},\n url = {https://vimeo.com/26619928},\n\ - \ year = {2011}\n}\n" + ID: nime19-music-Barbosa + abstract: 'The key for a collective process of free improvisation is the interaction, + dependence and surrender of its parts, so the resulting sound flux is more than + the sum of each individual layer. The We Bass performance is an exploration of + the symbiosis of two performers playing the same instrument: Their actions have + direct consequence on the resulting sound, challenging the other player with instability + and interference. From the experiments of the English scientist Thomas Young (1773-1829) + on the phenomena of diffraction and interference of light waves, we observe that + interferences generated by overlapping light waves can have a character of annihilation, + when they are out of phase (destructive interference), or a reinforcing character + when in phase (constructive interference). From this reflection we try to deepen + the discussion about the interferences of the performers inputs involved in a + free improvisation session. We seek a model of connection between the performers + that promotes processes of creation in the free improvisation, exploring the dialectics + between reinforcement actions (processes of interaction that reinforces a certain + sound moment) and movement actions (that destabilizes and transforms the flow). + We Bass is a duo performance exploring the interactions between the musicians + playing one hybrid machine: an electric upright bass guitar with live electronics + processing. The instrument consists of an electric upright bass with movement + sensors and a live processing machine with a controller that interacts with the + sensors, changing some processing parameters and some controller mapping settings, + creating an instable ground for the musicians.' + address: 'Porto Alegre, Brazil' + author: Paulo Assis Barbosa and Miguel Antar + bibtex: "@inproceedings{nime19-music-Barbosa,\n abstract = {The key for a collective\ + \ process of free improvisation is the interaction, dependence and surrender of\ + \ its parts, so the resulting sound flux is more than the sum of each individual\ + \ layer. The We Bass performance is an exploration of the symbiosis of two performers\ + \ playing the same instrument: Their actions have direct consequence on the resulting\ + \ sound, challenging the other player with instability and interference. From\ + \ the experiments of the English scientist Thomas Young (1773-1829) on the phenomena\ + \ of diffraction and interference of light waves, we observe that interferences\ + \ generated by overlapping light waves can have a character of annihilation, when\ + \ they are out of phase (destructive interference), or a reinforcing character\ + \ when in phase (constructive interference). From this reflection we try to deepen\ + \ the discussion about the interferences of the performers inputs involved in\ + \ a free improvisation session. We seek a model of connection between the performers\ + \ that promotes processes of creation in the free improvisation, exploring the\ + \ dialectics between reinforcement actions (processes of interaction that reinforces\ + \ a certain sound moment) and movement actions (that destabilizes and transforms\ + \ the flow). We Bass is a duo performance exploring the interactions between the\ + \ musicians playing one hybrid machine: an electric upright bass guitar with live\ + \ electronics processing. The instrument consists of an electric upright bass\ + \ with movement sensors and a live processing machine with a controller that interacts\ + \ with the sensors, changing some processing parameters and some controller mapping\ + \ settings, creating an instable ground for the musicians.},\n address = {Porto\ + \ Alegre, Brazil},\n author = {Paulo Assis Barbosa and Miguel Antar},\n booktitle\ + \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n editor = {Federico Visi},\n month = {June},\n pages = {66--67},\n\ + \ publisher = {UFRGS},\n title = {We Bass: inter(actions) on a hybrid instrument},\n\ + \ url = {http://www.nime.org/proceedings/2019/nime2019_music018.pdf},\n year =\ + \ {2019}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Kjell Tore Innervik and Ivar Frounberg + editor: Federico Visi month: June - publisher: Norwegian Academy of Music - title: BiLE (Birmingham Laptop Ensemble) - url: https://vimeo.com/26619928 - year: 2011 + pages: 66--67 + publisher: UFRGS + title: 'We Bass: inter(actions) on a hybrid instrument' + url: http://www.nime.org/proceedings/2019/nime2019_music018.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: nime2011-music-Quay2011 - abstract: "Program notes:\nAs artists, we have learned that throughout the history\ - \ of mankind music and technology have co-evolved, shaping --- and being shaped\ - \ by --- human expression and creativity. The variety and intricacy of these recombination\ - \ processes contribute profoundly to the current diversity of performative structures\ - \ and aesthetics within the arts. Where art Thou? is a 15 minute theatrical performance\ - \ where sounds are controlled by sensors on the dancer's body. Blending a mixture\ - \ of electronic music and sound effects with dance and acting, this novel act\ - \ refocuses sensors from simplistic action-to-sound to contextualized aesthetic\ - \ and dramatic expression. The name reflects the itinerant quality of the stage\ - \ character as he travels through a world of sounds.\n\nAbout the performers:\n\ - \nYago de Quay: Interactive media artist, musician and researcher based in Porto.\ - \ His numerous installations and performances focus on user participation contributing\ - \ to modify the art piece itself. They always have a strong sonic component and\ - \ combine technologies to help create new modes of expression. Yago is currently\ - \ finishing his M.Sc. in Sound Design and Interactive Music at the Faculty of\ - \ Engineering, University of Porto.\n\nStåle Skogstad: PhD student in the fourMs\ - \ group at the University of Oslo. His research is focused on using real-time\ - \ full-body motion capture technology for musical interaction. This includes real-time\ - \ feature extraction from full body motion capture data and technical studies\ - \ of motion capture technologies. He is currently working with the Xsens MVN inertial\ - \ sensor suit." - address: 'Oslo, Norway' - author: Yago de Quay and Ståle Skogstad - bibtex: "@inproceedings{nime2011-music-Quay2011,\n abstract = {Program notes:\n\ - As artists, we have learned that throughout the history of mankind music and technology\ - \ have co-evolved, shaping --- and being shaped by --- human expression and creativity.\ - \ The variety and intricacy of these recombination processes contribute profoundly\ - \ to the current diversity of performative structures and aesthetics within the\ - \ arts. Where art Thou? is a 15 minute theatrical performance where sounds are\ - \ controlled by sensors on the dancer's body. Blending a mixture of electronic\ - \ music and sound effects with dance and acting, this novel act refocuses sensors\ - \ from simplistic action-to-sound to contextualized aesthetic and dramatic expression.\ - \ The name reflects the itinerant quality of the stage character as he travels\ - \ through a world of sounds.\n\nAbout the performers:\n\nYago de Quay: Interactive\ - \ media artist, musician and researcher based in Porto. His numerous installations\ - \ and performances focus on user participation contributing to modify the art\ - \ piece itself. They always have a strong sonic component and combine technologies\ - \ to help create new modes of expression. Yago is currently finishing his M.Sc.\ - \ in Sound Design and Interactive Music at the Faculty of Engineering, University\ - \ of Porto.\n\nStåle Skogstad: PhD student in the fourMs group at the University\ - \ of Oslo. His research is focused on using real-time full-body motion capture\ - \ technology for musical interaction. This includes real-time feature extraction\ - \ from full body motion capture data and technical studies of motion capture technologies.\ - \ He is currently working with the Xsens MVN inertial sensor suit.},\n address\ - \ = {Oslo, Norway},\n author = {Yago de Quay and Ståle Skogstad},\n booktitle\ - \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n editor = {Kjell Tore Innervik and Ivar Frounberg},\n month =\ - \ {June},\n publisher = {Norwegian Academy of Music},\n title = {Where Art Thou?:\ - \ Dance Jockey},\n url = {https://vimeo.com/26619980},\n year = {2011}\n}\n" + ID: nime19-music-introduction + address: 'Porto Alegre, Brazil' + author: Federico Visi and Rodrigo Schramm + bibtex: "@inproceedings{nime19-music-introduction,\n address = {Porto Alegre, Brazil},\n\ + \ author = {Federico Visi and Rodrigo Schramm},\n booktitle = {Music Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Federico Visi},\n month = {June},\n pages = {4},\n publisher = {UFRGS},\n\ + \ title = {Introduction},\n url = {http://www.nime.org/proceedings/2019/nime2019_music00I.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Kjell Tore Innervik and Ivar Frounberg + editor: Federico Visi month: June - publisher: Norwegian Academy of Music - title: 'Where Art Thou?: Dance Jockey' - url: https://vimeo.com/26619980 - year: 2011 + pages: 4 + publisher: UFRGS + title: Introduction + url: http://www.nime.org/proceedings/2019/nime2019_music00I.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: nime2011-music-Sciajno2011 - abstract: "Program notes:\nIn this AV performance, images and sound interact: the\ - \ basic elements of the images (brightness, color, saturation, hue, dislocation\ - \ and relocation) are sensitive to the fundamental parameters of the sound being\ - \ generated at that moment. Sound waves (also controlled by light waves during\ - \ the performance) cross the physical world and alter the data stream that gives\ - \ life to digital video in the same way that molecules are transformed by the\ - \ sound contracting and expanding air particles in space.\n\nAbout the performers:\n\ - \nDomenico Sciajno: Double bass player and composer of acoustic and electronic\ - \ music. Thanks to his interest in improvisation and the influence of academic\ - \ education, his research currently focuses on the creative possibilities provided\ - \ by the interaction between acoustic instruments, indeterminacy factors and live\ - \ processing by electronic devices or computers." - address: 'Oslo, Norway' - author: Domenico Sciajno - bibtex: "@inproceedings{nime2011-music-Sciajno2011,\n abstract = {Program notes:\n\ - In this AV performance, images and sound interact: the basic elements of the images\ - \ (brightness, color, saturation, hue, dislocation and relocation) are sensitive\ - \ to the fundamental parameters of the sound being generated at that moment. Sound\ - \ waves (also controlled by light waves during the performance) cross the physical\ - \ world and alter the data stream that gives life to digital video in the same\ - \ way that molecules are transformed by the sound contracting and expanding air\ - \ particles in space.\n\nAbout the performers:\n\nDomenico Sciajno: Double bass\ - \ player and composer of acoustic and electronic music. Thanks to his interest\ - \ in improvisation and the influence of academic education, his research currently\ - \ focuses on the creative possibilities provided by the interaction between acoustic\ - \ instruments, indeterminacy factors and live processing by electronic devices\ - \ or computers.},\n address = {Oslo, Norway},\n author = {Domenico Sciajno},\n\ + ID: nime19-music-program + address: 'Porto Alegre, Brazil' + bibtex: "@inproceedings{nime19-music-program,\n address = {Porto Alegre, Brazil},\n\ \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n editor = {Kjell Tore Innervik and Ivar Frounberg},\n\ - \ month = {June},\n publisher = {Norwegian Academy of Music},\n title = {Sonolume},\n\ - \ url = {https://vimeo.com/26679879},\n year = {2011}\n}\n" + \ for Musical Expression},\n editor = {Federico Visi},\n month = {June},\n pages\ + \ = {5},\n publisher = {UFRGS},\n title = {NIME 2019 Concert Program},\n url =\ + \ {http://www.nime.org/proceedings/2019/nime2019_music0II.pdf},\n year = {2019}\n\ + }\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Kjell Tore Innervik and Ivar Frounberg + editor: Federico Visi month: June - publisher: Norwegian Academy of Music - title: Sonolume - url: https://vimeo.com/26679879 - year: 2011 + pages: 5 + publisher: UFRGS + title: NIME 2019 Concert Program + url: http://www.nime.org/proceedings/2019/nime2019_music0II.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: nime2011-music-Aase2011 - abstract: "Program notes:\n\nTrondheim Voices is in this performance exploring a\ - \ new tool in their work with voice sound and improvisation. The ensemble is working\ - \ with a tracking system for sound positioning to enable a given singer's position\ - \ on stage to directly influence the sound processing, both spatialisation and\ - \ effects. Through their improvisations and compositions they are exploring: a)\ - \ The effect of the sound “following”' the singers' movements on stage. b) The\ - \ flexible use of processed voice sound within the big vocal ensemble, through\ - \ the control each singer gets over the sound output by moving on stage. c) The\ - \ visualization of choices and changes regarding sound, both for the performer\ - \ and the audience, through the movements of each singer on stage.\n\nAbout the\ - \ performers:\n\nTrondheim Voices Professional ensemble, working with the endless\ - \ possibilities within the field of vocal improvisation, to find new expressions\ - \ and new music. Consisting of individual soloists, Trondheim Voices wishes to\ - \ develop what happens when the unique soloist quality of each singer is set to\ - \ interact with each other, and to find the collective sound and feeling. All\ - \ of the singers are educated at NTNU, Trondheim, Norway.\n\nSound: Asle Karstad.\ - \ Tracking system: John Torger Skjelstad" - address: 'Oslo, Norway' - author: Tone Åse and Siri Gjære and Live Maria Roggen and Heidi Skjerve and Ingrid - Lode and Kirsti Huke and Anita Kaasbøll and Silje R. Karlsen - bibtex: "@inproceedings{nime2011-music-Aase2011,\n abstract = {Program notes:\n\n\ - Trondheim Voices is in this performance exploring a new tool in their work with\ - \ voice sound and improvisation. The ensemble is working with a tracking system\ - \ for sound positioning to enable a given singer's position on stage to directly\ - \ influence the sound processing, both spatialisation and effects. Through their\ - \ improvisations and compositions they are exploring: a) The effect of the sound\ - \ “following”' the singers' movements on stage. b) The flexible use of processed\ - \ voice sound within the big vocal ensemble, through the control each singer gets\ - \ over the sound output by moving on stage. c) The visualization of choices and\ - \ changes regarding sound, both for the performer and the audience, through the\ - \ movements of each singer on stage.\n\nAbout the performers:\n\nTrondheim Voices\ - \ Professional ensemble, working with the endless possibilities within the field\ - \ of vocal improvisation, to find new expressions and new music. Consisting of\ - \ individual soloists, Trondheim Voices wishes to develop what happens when the\ - \ unique soloist quality of each singer is set to interact with each other, and\ - \ to find the collective sound and feeling. All of the singers are educated at\ - \ NTNU, Trondheim, Norway.\n\nSound: Asle Karstad. Tracking system: John Torger\ - \ Skjelstad},\n address = {Oslo, Norway},\n author = {Tone Åse and Siri Gjære\ - \ and Live Maria Roggen and Heidi Skjerve and Ingrid Lode and Kirsti Huke and\ - \ Anita Kaasbøll and Silje R. Karlsen},\n booktitle = {Music Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n editor\ - \ = {Kjell Tore Innervik and Ivar Frounberg},\n month = {June},\n publisher =\ - \ {Norwegian Academy of Music},\n title = {Trondheim Voices},\n url = {https://vimeo.com/26680007},\n\ - \ year = {2011}\n}\n" + ID: nime19-music-PC-members + address: 'Porto Alegre, Brazil' + bibtex: "@inproceedings{nime19-music-PC-members,\n address = {Porto Alegre, Brazil},\n\ + \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n editor = {Federico Visi},\n month = {June},\n pages\ + \ = {6},\n publisher = {UFRGS},\n title = {NIME 2019 Program Committee Members},\n\ + \ url = {http://www.nime.org/proceedings/2019/nime2019_musicIII.pdf},\n year =\ + \ {2019}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Kjell Tore Innervik and Ivar Frounberg + editor: Federico Visi month: June - publisher: Norwegian Academy of Music - title: Trondheim Voices - url: https://vimeo.com/26680007 - year: 2011 + pages: 6 + publisher: UFRGS + title: NIME 2019 Program Committee Members + url: http://www.nime.org/proceedings/2019/nime2019_musicIII.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: nime2011-music-Hsu2011 - abstract: "Program notes:\n\nInterstices AP is a structured audio-visual solo improvisation,\ - \ using the multitouch Airplane Controller to manipulate live electronic sound\ - \ and interactive animations. During the piece, Bill Hsu will be using the Airplane\ - \ Controller in combination with his PSHIVA particle system software, to synthesize\ - \ and interact with generative sound and animations. The visual component of Interstices\ - \ AP is a physics-based simulation of a particle system. Particles, images and\ - \ other components interact with physical gestures in a fluid like system; the\ - \ results resemble asymmetric, constantly evolving Rorschach blots that open up\ - \ a wide range of visual associations. For more details, see Bill Hsu's poster\ - \ in the conference proceedings.\n\nAbout the performers:\n\nBill Hsu: Associate\ - \ Professor of Computer Science at San Francisco State University. His work with\ - \ real-time audiovisual performance systems has been presented at (among others)\ - \ SMC 2009 (Porto), Harvestworks Festival 2009 (New York), Fete Quaqua 2008 (London),\ - \ MIX Festival 2007 and 2009 (New York), and Stimme+ 2006 (Karlsruhe).\n\nAlain\ - \ Crevoisier: Senior researcher at the Music Conservatory of Geneva, Switzerland.\ - \ He is the founder of Future-instruments.net, a collaborative research network\ - \ active in the field of new musical interfaces and interactive technologies.\ - \ The latest realization is the Airplane controller, a portable system that makes\ - \ possible to transform any flat surface, into a multitouch interface." - address: 'Oslo, Norway' - author: Bill Hsu and Alain Crevoisier - bibtex: "@inproceedings{nime2011-music-Hsu2011,\n abstract = {Program notes:\n\n\ - Interstices AP is a structured audio-visual solo improvisation, using the multitouch\ - \ Airplane Controller to manipulate live electronic sound and interactive animations.\ - \ During the piece, Bill Hsu will be using the Airplane Controller in combination\ - \ with his PSHIVA particle system software, to synthesize and interact with generative\ - \ sound and animations. The visual component of Interstices AP is a physics-based\ - \ simulation of a particle system. Particles, images and other components interact\ - \ with physical gestures in a fluid like system; the results resemble asymmetric,\ - \ constantly evolving Rorschach blots that open up a wide range of visual associations.\ - \ For more details, see Bill Hsu's poster in the conference proceedings.\n\nAbout\ - \ the performers:\n\nBill Hsu: Associate Professor of Computer Science at San\ - \ Francisco State University. His work with real-time audiovisual performance\ - \ systems has been presented at (among others) SMC 2009 (Porto), Harvestworks\ - \ Festival 2009 (New York), Fete Quaqua 2008 (London), MIX Festival 2007 and 2009\ - \ (New York), and Stimme+ 2006 (Karlsruhe).\n\nAlain Crevoisier: Senior researcher\ - \ at the Music Conservatory of Geneva, Switzerland. He is the founder of Future-instruments.net,\ - \ a collaborative research network active in the field of new musical interfaces\ - \ and interactive technologies. The latest realization is the Airplane controller,\ - \ a portable system that makes possible to transform any flat surface, into a\ - \ multitouch interface.},\n address = {Oslo, Norway},\n author = {Bill Hsu and\ - \ Alain Crevoisier},\n booktitle = {Music Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n editor = {Kjell Tore Innervik and\ - \ Ivar Frounberg},\n month = {June},\n publisher = {Norwegian Academy of Music},\n\ - \ title = {Interstices AP},\n url = {https://vimeo.com/26629820},\n year = {2011}\n\ - }\n" - booktitle: Music Proceedings of the International Conference on New Interfaces for - Musical Expression - editor: Kjell Tore Innervik and Ivar Frounberg - month: June - publisher: Norwegian Academy of Music - title: Interstices AP - url: https://vimeo.com/26629820 - year: 2011 - - -- ENTRYTYPE: inproceedings - ID: nime2011-music-Hsu2011a - abstract: "Performer notes:\n\nFlayed/Flock is a structured audio-visual improvisation\ - \ for three musicians, utilizing live acoustic and electronic sound and interactive\ - \ animations. The visual component of Flayed/Flock is an enhanced flocking simulation\ - \ that interacts with real-time audio from the performance of improvising musicians.\ - \ Abstract patterns develop out of the flocking behavior; the flocks are also\ - \ able to coalesce into well-defined symbols and forms such as crescents and stars,\ - \ all while moving in a natural-looking manner consistent with flocking behavior.\ - \ For more details, see Bill Hsu's poster in the conference proceedings.\n\nAbout\ - \ the performers:\n\nBill Hsu: Associate Professor of Computer Science at San\ - \ Francisco State University. His work with real-time audiovisual performance\ - \ systems has been presented at (among others) SMC 2009 (Porto), Harvestworks\ - \ Festival 2009 (New York), Fete Quaqua 2008 (London), MIX Festival 2007 and 2009\ - \ (New York), and Stimme+ 2006 (Karlsruhe).\n\nHåvard Skaset (guitar) and Guro\ - \ Skumsnes Moe (bass): The Oslo-based duo works intensively in the borders between\ - \ improv, noise and rock. Skaset and Moe play in bands including Bluefaced People,\ - \ Art Directors, Sult, Mirror Trio, SEKSTETT, Telling Stories About Trees and\ - \ MOE. They have been working with Christian Wolff, Pauline Oliveros, Fred Frith,\ - \ Ikue Mori, Okkyung Lee, Frode Gjerstad and many more." - address: 'Oslo, Norway' - author: Bill Hsu and Håvard Skaset and Guro Skumsnes Moe - bibtex: "@inproceedings{nime2011-music-Hsu2011a,\n abstract = {Performer notes:\n\ - \nFlayed/Flock is a structured audio-visual improvisation for three musicians,\ - \ utilizing live acoustic and electronic sound and interactive animations. The\ - \ visual component of Flayed/Flock is an enhanced flocking simulation that interacts\ - \ with real-time audio from the performance of improvising musicians. Abstract\ - \ patterns develop out of the flocking behavior; the flocks are also able to coalesce\ - \ into well-defined symbols and forms such as crescents and stars, all while moving\ - \ in a natural-looking manner consistent with flocking behavior. For more details,\ - \ see Bill Hsu's poster in the conference proceedings.\n\nAbout the performers:\n\ - \nBill Hsu: Associate Professor of Computer Science at San Francisco State University.\ - \ His work with real-time audiovisual performance systems has been presented at\ - \ (among others) SMC 2009 (Porto), Harvestworks Festival 2009 (New York), Fete\ - \ Quaqua 2008 (London), MIX Festival 2007 and 2009 (New York), and Stimme+ 2006\ - \ (Karlsruhe).\n\nHåvard Skaset (guitar) and Guro Skumsnes Moe (bass): The Oslo-based\ - \ duo works intensively in the borders between improv, noise and rock. Skaset\ - \ and Moe play in bands including Bluefaced People, Art Directors, Sult, Mirror\ - \ Trio, SEKSTETT, Telling Stories About Trees and MOE. They have been working\ - \ with Christian Wolff, Pauline Oliveros, Fred Frith, Ikue Mori, Okkyung Lee,\ - \ Frode Gjerstad and many more.},\n address = {Oslo, Norway},\n author = {Bill\ - \ Hsu and Håvard Skaset and Guro Skumsnes Moe},\n booktitle = {Music Proceedings\ + ID: nime2008-music-Girolin2008 + abstract: "Program notes:\nLo specchio confuso dall'ombra can be translated as “The\ + \ mirror confused by its shadow” and it is between a distributed installation\ + \ and a concert, in which opposing groups of performers in two remote places play\ + \ solo or interact.\nThe audience (two people at a time, one for each installation)\ + \ activates video and sound transformations, depending on the space they occupy\ + \ and their gesture. The two installation are in the Foyer and in the Auditorium,\ + \ respectively, so the two persons from the audience cannot see and talk each\ + \ other. Multimodal data and expressive gesture cues are extracted in real- time\ + \ by an EyesWeb patch, interacting and playing with the electronic performer.\ + \ The interaction occurs both between the electronic performer and the two places\ + \ where the audience has access, and between the two remote installations. There\ + \ are two different levels of intervention in the audio and video transformation:\ + \ autonomous, depending on the single person and conditioned, depending on the\ + \ behaviour and the actions occurring in the other, separate installation.\nFurther,\ + \ the entrance of the concert hall has microphones, which capture words, sentences,\ + \ coughs, laughs or other noise, which are transformed in real-time and thus entering\ + \ into the piece.\nLo specchio confuso dall'ombra can't bind the audience remain\ + \ seated or follow a specific pattern in his behaviour. His duration is indefinite:\ + \ it changes every time it is performed.\n\nAbout the performers:\nRoberto Girolin\ + \ (1975) was born in Pordenone, Italy, and after studying of the classical guitar\ + \ he began to study the piano and composition at the \"J. Tomadini\" Conservatory\ + \ in Udine. He studied the vocal and instrumental counterpoint, graduating in\ + \ choral music and conducting in the same Conservatory. He has conducted many\ + \ choirs and orchestras, exploring different kinds of repertories from Gregorian\ + \ music to contemporary music.\nHe has deepened the study of contemporary music\ + \ at the University of Udine with Dr.A.Orcalli and then with Dr.N.Venzina at \"\ + B.Maderna\" Archive in Bologna (Italy). He has followed several Masterclasses\ + \ and seminars: choral music, chamber music, composition (Salvatore Sciarrino,\ + \ Fabio Nieder, Mauro Bonifacio), electronic music (Lelio Camilleri, Agostino\ + \ Di Scipio), a Sound Design course with Trevor Wishart, an Audio Digital Signal\ + \ Processing for Musical Applications (Lab workshop, lessons and applications)\ + \ with Giuseppe Di Giugno and live electronics in Luigi Nono's works with Alvise\ + \ Vidolin and André Richard (Experimental Studio Freiburg für Akustische Kunst).\n\ + He graduated with full marks in Electronic Music and Multimedia at the Musical\ + \ Academy of Pescara (Italy) and in 2006 he also got his degree at the Conservatory\ + \ of Venice under the direction of Alvise Vidolin with full marks (cum Laude).\n\ + He is actively involved in performing and investigating the compositional and\ + \ performance potential offered by electronic&multimedia music systems. His music\ + \ is performed in Italy and abroad. He has recently won the “Call 2007”, (Italian\ + \ CEMAT Competition) and a Mention at the 34th \"Concours Internationaux de Musique\ + \ et d'Art Sonore Electroacoustiques de Bourges\", France.\n\nPaolo Coletta, Simone\ + \ Ghisio and Gualtiero Volpe - EyesWeb interactive systems design" + address: 'Genova, Italy' + author: Roberto Girolin + bibtex: "@inproceedings{nime2008-music-Girolin2008,\n abstract = {Program notes:\n\ + Lo specchio confuso dall'ombra can be translated as “The mirror confused by its\ + \ shadow” and it is between a distributed installation and a concert, in which\ + \ opposing groups of performers in two remote places play solo or interact.\n\ + The audience (two people at a time, one for each installation) activates video\ + \ and sound transformations, depending on the space they occupy and their gesture.\ + \ The two installation are in the Foyer and in the Auditorium, respectively, so\ + \ the two persons from the audience cannot see and talk each other. Multimodal\ + \ data and expressive gesture cues are extracted in real- time by an EyesWeb patch,\ + \ interacting and playing with the electronic performer. The interaction occurs\ + \ both between the electronic performer and the two places where the audience\ + \ has access, and between the two remote installations. There are two different\ + \ levels of intervention in the audio and video transformation: autonomous, depending\ + \ on the single person and conditioned, depending on the behaviour and the actions\ + \ occurring in the other, separate installation.\nFurther, the entrance of the\ + \ concert hall has microphones, which capture words, sentences, coughs, laughs\ + \ or other noise, which are transformed in real-time and thus entering into the\ + \ piece.\nLo specchio confuso dall'ombra can't bind the audience remain seated\ + \ or follow a specific pattern in his behaviour. His duration is indefinite: it\ + \ changes every time it is performed.\n\nAbout the performers:\nRoberto Girolin\ + \ (1975) was born in Pordenone, Italy, and after studying of the classical guitar\ + \ he began to study the piano and composition at the \"J. Tomadini\" Conservatory\ + \ in Udine. He studied the vocal and instrumental counterpoint, graduating in\ + \ choral music and conducting in the same Conservatory. He has conducted many\ + \ choirs and orchestras, exploring different kinds of repertories from Gregorian\ + \ music to contemporary music.\nHe has deepened the study of contemporary music\ + \ at the University of Udine with Dr.A.Orcalli and then with Dr.N.Venzina at \"\ + B.Maderna\" Archive in Bologna (Italy). He has followed several Masterclasses\ + \ and seminars: choral music, chamber music, composition (Salvatore Sciarrino,\ + \ Fabio Nieder, Mauro Bonifacio), electronic music (Lelio Camilleri, Agostino\ + \ Di Scipio), a Sound Design course with Trevor Wishart, an Audio Digital Signal\ + \ Processing for Musical Applications (Lab workshop, lessons and applications)\ + \ with Giuseppe Di Giugno and live electronics in Luigi Nono's works with Alvise\ + \ Vidolin and André Richard (Experimental Studio Freiburg für Akustische Kunst).\n\ + He graduated with full marks in Electronic Music and Multimedia at the Musical\ + \ Academy of Pescara (Italy) and in 2006 he also got his degree at the Conservatory\ + \ of Venice under the direction of Alvise Vidolin with full marks (cum Laude).\n\ + He is actively involved in performing and investigating the compositional and\ + \ performance potential offered by electronic&multimedia music systems. His music\ + \ is performed in Italy and abroad. He has recently won the “Call 2007”, (Italian\ + \ CEMAT Competition) and a Mention at the 34th \"Concours Internationaux de Musique\ + \ et d'Art Sonore Electroacoustiques de Bourges\", France.\n\nPaolo Coletta, Simone\ + \ Ghisio and Gualtiero Volpe - EyesWeb interactive systems design},\n address\ + \ = {Genova, Italy},\n author = {Roberto Girolin},\n booktitle = {Music Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Kjell Tore Innervik and Ivar Frounberg},\n month = {June},\n publisher\ - \ = {Norwegian Academy of Music},\n title = {Flayed/Flock},\n url = {https://vimeo.com/26629835},\n\ - \ year = {2011}\n}\n" + \ editor = {Roberto Doati},\n month = {June},\n publisher = {Casa Paganini},\n\ + \ title = {Lo specchio confuso dall'ombra},\n year = {2008}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Kjell Tore Innervik and Ivar Frounberg + editor: Roberto Doati month: June - publisher: Norwegian Academy of Music - title: Flayed/Flock - url: https://vimeo.com/26629835 - year: 2011 + publisher: Casa Paganini + title: Lo specchio confuso dall'ombra + year: 2008 - ENTRYTYPE: inproceedings - ID: nime2011-music-IvicaIcoBukvicDirector2011 - abstract: "Program notes:\n13 (Ivica Ico Bukvic): is a game of prime numbers and\ - \ primal instincts pitting timbre against rhythm. Driven by conductor's oversight\ - \ over an array of performer-specific and ensemble-wide parameters, a networked\ - \ ensemble acts as one large meta-tracker where each individual performer contributes\ - \ its own gesture-driven motives or tracks. The ensuing meta-tracker texture is\ - \ superimposed against improvised acoustic percussion in a search of a meaningful\ - \ discourse and ultimately musical synergy.\n\nSerene (Ivica Ico Bukvic): ...the\ - \ one moment in the day when the world melts away and we catch a glimpse of life\ - \ that just is... a celebration of this moment through juxtaposition of Taiji\ - \ (Tai Chi Chuan) choreography and music...\n\nCitadel for soprano and L2Ork (Ivica\ - \ Ico Bukvic) draws inspiration from a famous poem \"Himna Slobodi\" (Hymn to\ - \ Freedom) by the 17th century Croatian poet Ivan Gundulic. As the first piece\ - \ ever written for the newfound ensemble, it relies upon pervasive tonality, in\ - \ many ways posing as an electronic counterpart to a traditional string ensemble.\ - \ Using the infinite-bow metaphor to create lush tonal harmonies the piece forms\ - \ a compelling aural foundation for a lyrical showcase of soloist's vocal talent.\n\ - \n=== About the performers:\n\nL2Ork: Founded by Dr. Ivica Ico Bukvic in May 2009,\ - \ is part of the latest interdisciplinary initiative by the Virginia Tech Music\ - \ Department's Digital Interactive Sound & Intermedia Studio (DISIS). As an emerging\ - \ contemporary intermedia ensemble with a uniquely open design, L2Ork thrives\ - \ upon the quintessential form of collaboration found in the western classical\ - \ orchestra and its cross-pollination with increasingly accessible human-computer\ - \ interaction technologies for the purpose of exploring expressive power of gesture,\ - \ communal interaction, discipline-agnostic environment, and the multidimensionality\ - \ of arts.\n\nMembers: Ivica Ico Bukvic (Director), John Elder, Hillary Guilliams,\ - \ Bennett Layman, David Mudre, Steven Querry, Philip Seward, Andrew Street, Elizabeth\ - \ Ullrich and Adam Wirdzek\n\n=== Recorded at:\n\n11th International Conference\ - \ on New Interfaces for Musical Expression. 30 May - 1 June 2011, Oslo, Norway.\n\ - \nhttp://www.nime2011.org\nAbout the performers:\n\nL2Ork Founded by Dr. Ivica\ - \ Ico Bukvic in May 2009, is part of the latest interdisciplinary initiative by\ - \ the Virginia Tech Music Department's Digital Interactive Sound & Intermedia\ - \ Studio (DISIS). As an emerging contemporary intermedia ensemble with a uniquely\ - \ open design, L2Ork thrives upon the quintessential form of collaboration found\ - \ in the western classical orchestra and its cross-pollination with increasingly\ - \ accessible human-computer interaction technologies for the purpose of exploring\ - \ expressive power of gesture, communal interaction, discipline-agnostic environment,\ - \ and the multidimensionality of arts.\n\nMembers: Ivica Ico Bukvic (Director),\ - \ John Elder, Hillary Guilliams, Bennett Layman, David Mudre, Steven Querry, Philip\ - \ Seward, Andrew Street, Elizabeth Ullrich and Adam Wirdzek" - address: 'Oslo, Norway' - author: Ivica Ico Bukvic and John Elder and Hillary Guilliams and Bennett Layman - and David Mudre and Steven Querry and Philip Seward and Andrew Street and Elizabeth - Ullrich and Adam Wirdzek - bibtex: "@inproceedings{nime2011-music-IvicaIcoBukvicDirector2011,\n abstract =\ - \ {Program notes:\n13 (Ivica Ico Bukvic): is a game of prime numbers and primal\ - \ instincts pitting timbre against rhythm. Driven by conductor's oversight over\ - \ an array of performer-specific and ensemble-wide parameters, a networked ensemble\ - \ acts as one large meta-tracker where each individual performer contributes its\ - \ own gesture-driven motives or tracks. The ensuing meta-tracker texture is superimposed\ - \ against improvised acoustic percussion in a search of a meaningful discourse\ - \ and ultimately musical synergy.\n\nSerene (Ivica Ico Bukvic): ...the one moment\ - \ in the day when the world melts away and we catch a glimpse of life that just\ - \ is... a celebration of this moment through juxtaposition of Taiji (Tai Chi Chuan)\ - \ choreography and music...\n\nCitadel for soprano and L2Ork (Ivica Ico Bukvic)\ - \ draws inspiration from a famous poem \"Himna Slobodi\" (Hymn to Freedom) by\ - \ the 17th century Croatian poet Ivan Gundulic. As the first piece ever written\ - \ for the newfound ensemble, it relies upon pervasive tonality, in many ways posing\ - \ as an electronic counterpart to a traditional string ensemble. Using the infinite-bow\ - \ metaphor to create lush tonal harmonies the piece forms a compelling aural foundation\ - \ for a lyrical showcase of soloist's vocal talent.\n\n=== About the performers:\n\ - \nL2Ork: Founded by Dr. Ivica Ico Bukvic in May 2009, is part of the latest interdisciplinary\ - \ initiative by the Virginia Tech Music Department's Digital Interactive Sound\ - \ \\& Intermedia Studio (DISIS). As an emerging contemporary intermedia ensemble\ - \ with a uniquely open design, L2Ork thrives upon the quintessential form of collaboration\ - \ found in the western classical orchestra and its cross-pollination with increasingly\ - \ accessible human-computer interaction technologies for the purpose of exploring\ - \ expressive power of gesture, communal interaction, discipline-agnostic environment,\ - \ and the multidimensionality of arts.\n\nMembers: Ivica Ico Bukvic (Director),\ - \ John Elder, Hillary Guilliams, Bennett Layman, David Mudre, Steven Querry, Philip\ - \ Seward, Andrew Street, Elizabeth Ullrich and Adam Wirdzek\n\n=== Recorded at:\n\ - \n11th International Conference on New Interfaces for Musical Expression. 30 May\ - \ - 1 June 2011, Oslo, Norway.\n\nhttp://www.nime2011.org\nAbout the performers:\n\ - \nL2Ork Founded by Dr. Ivica Ico Bukvic in May 2009, is part of the latest interdisciplinary\ - \ initiative by the Virginia Tech Music Department's Digital Interactive Sound\ - \ & Intermedia Studio (DISIS). As an emerging contemporary intermedia ensemble\ - \ with a uniquely open design, L2Ork thrives upon the quintessential form of collaboration\ - \ found in the western classical orchestra and its cross-pollination with increasingly\ - \ accessible human-computer interaction technologies for the purpose of exploring\ - \ expressive power of gesture, communal interaction, discipline-agnostic environment,\ - \ and the multidimensionality of arts.\n\nMembers: Ivica Ico Bukvic (Director),\ - \ John Elder, Hillary Guilliams, Bennett Layman, David Mudre, Steven Querry, Philip\ - \ Seward, Andrew Street, Elizabeth Ullrich and Adam Wirdzek},\n address = {Oslo,\ - \ Norway},\n author = {Ivica Ico Bukvic and John Elder and Hillary Guilliams and\ - \ Bennett Layman and David Mudre and Steven Querry and Philip Seward and Andrew\ - \ Street and Elizabeth Ullrich and Adam Wirdzek},\n booktitle = {Music Proceedings\ + ID: nime2008-music-Ferrari2008 + abstract: "Program notes:\nBased on the installation \"Mappe per Affetti Erranti\"\ + , designed and developed by Antonio Camurri, Corrado Canepa, Nicola Ferrari, Gualtiero\ + \ Volpe texts from Edmund Spenser's The Faire Queen and William Shakespeare's\ + \ King Lear with support of EU ICT Project SAME.\nThe bow is a theatrical mise-en-scene\ + \ of the installation Mappe per Affetti Erranti. During the Science Festival 2007,\ + \ as a preparatory work for the EU ICT Project SAME on active listening (www.sameproject.org),\ + \ the audience was invited to explore and experience a song by John Dowland (see\ + \ the paper on these proceedings by Camurri et al). The audience could walk inside\ + \ the polyphonic texture, listen to the singles parts, change the expressive quality\ + \ of musical interpretation by their movement on the stage of Casa Paganini analysed\ + \ with EyesWeb XMI. Aesthetically, the most interesting result consists in the\ + \ game of hiding and revealing a known piece. The idea could be matched with the\ + \ classical theatrical topos of recognition. So, the musical potentiality of the\ + \ 'interactive performance' of a prerecorded music becomes a new dramaturgical\ + \ structure.\nRoberto Tiranti and his madrigalistic group recorded, under the\ + \ supervision of Marco Canepa, different anamorphic interpretations of a bachian\ + \ choral. Thanks to the interactive application developed with EyesWeb XMI, the\ + \ group of dancers conducted by the choreographer Giovanni Di Cicco, mix and mould\ + \ the recorded music material in real time. At the same time, the live sound of\ + \ the vocal group explores the whole space of Casa Paganini, as a global (both\ + \ real and imaginary) musical instrument. In a metamorphic game where, according\ + \ to Corrado Canepa's compositive lesson, electronic and acoustic technologies\ + \ merge and interchange their specificity, this interactive score of losing and\ + \ finding, multiplying and distillating the ancient bachian palimpsest tries to\ + \ tell the dramatic history of King Lear, the most tragic western figure of difficulty\ + \ to reach the affects you possess without being able to know or express.\n\n\ + About the performers:\nNicola Ferrari was born in 1973. He studied composition\ + \ with Adriano Guarnieri and took his degree at 'G. B. Martini' Conservatory in\ + \ Bologna. He took his Master Degree and PhD from the Faculty of Arts and Philosophy\ + \ at University of Genoa. Since 2005 he is a member of the staff of the InfoMus\ + \ Lab. For many years he directed the 'S.Anna' polyphonic choir. He wrote scores\ + \ for theatrical performances.\n\nVocalists - Roberto Tiranti (tenor and vocal\ + \ conductor), Valeria Bruzzone (alto),\nChiara Longobardi (soprano), Edoardo Valle\ + \ (bass)\nDancers - Giovanni Di Cicco (choreography), Luca Alberti, Filippo Bandiera,\ + \ Nicola Marrapodi\nRecording engineer and music consultant - Marco Canepa\nSound\ + \ Engineers - Corrado Canepa (director), Chiara Erra (assistant)\nEyesWeb interactive\ + \ systems design - Paolo Coletta, Barbara Mazzarino, Gualtiero Volpe" + address: 'Genova, Italy' + author: Nicola Ferrari + bibtex: "@inproceedings{nime2008-music-Ferrari2008,\n abstract = {Program notes:\n\ + Based on the installation \"Mappe per Affetti Erranti\", designed and developed\ + \ by Antonio Camurri, Corrado Canepa, Nicola Ferrari, Gualtiero Volpe texts from\ + \ Edmund Spenser's The Faire Queen and William Shakespeare's King Lear with support\ + \ of EU ICT Project SAME.\nThe bow is a theatrical mise-en-scene of the installation\ + \ Mappe per Affetti Erranti. During the Science Festival 2007, as a preparatory\ + \ work for the EU ICT Project SAME on active listening (www.sameproject.org),\ + \ the audience was invited to explore and experience a song by John Dowland (see\ + \ the paper on these proceedings by Camurri et al). The audience could walk inside\ + \ the polyphonic texture, listen to the singles parts, change the expressive quality\ + \ of musical interpretation by their movement on the stage of Casa Paganini analysed\ + \ with EyesWeb XMI. Aesthetically, the most interesting result consists in the\ + \ game of hiding and revealing a known piece. The idea could be matched with the\ + \ classical theatrical topos of recognition. So, the musical potentiality of the\ + \ 'interactive performance' of a prerecorded music becomes a new dramaturgical\ + \ structure.\nRoberto Tiranti and his madrigalistic group recorded, under the\ + \ supervision of Marco Canepa, different anamorphic interpretations of a bachian\ + \ choral. Thanks to the interactive application developed with EyesWeb XMI, the\ + \ group of dancers conducted by the choreographer Giovanni Di Cicco, mix and mould\ + \ the recorded music material in real time. At the same time, the live sound of\ + \ the vocal group explores the whole space of Casa Paganini, as a global (both\ + \ real and imaginary) musical instrument. In a metamorphic game where, according\ + \ to Corrado Canepa's compositive lesson, electronic and acoustic technologies\ + \ merge and interchange their specificity, this interactive score of losing and\ + \ finding, multiplying and distillating the ancient bachian palimpsest tries to\ + \ tell the dramatic history of King Lear, the most tragic western figure of difficulty\ + \ to reach the affects you possess without being able to know or express.\n\n\ + About the performers:\nNicola Ferrari was born in 1973. He studied composition\ + \ with Adriano Guarnieri and took his degree at 'G. B. Martini' Conservatory in\ + \ Bologna. He took his Master Degree and PhD from the Faculty of Arts and Philosophy\ + \ at University of Genoa. Since 2005 he is a member of the staff of the InfoMus\ + \ Lab. For many years he directed the 'S.Anna' polyphonic choir. He wrote scores\ + \ for theatrical performances.\n\nVocalists - Roberto Tiranti (tenor and vocal\ + \ conductor), Valeria Bruzzone (alto),\nChiara Longobardi (soprano), Edoardo Valle\ + \ (bass)\nDancers - Giovanni Di Cicco (choreography), Luca Alberti, Filippo Bandiera,\ + \ Nicola Marrapodi\nRecording engineer and music consultant - Marco Canepa\nSound\ + \ Engineers - Corrado Canepa (director), Chiara Erra (assistant)\nEyesWeb interactive\ + \ systems design - Paolo Coletta, Barbara Mazzarino, Gualtiero Volpe},\n address\ + \ = {Genova, Italy},\n author = {Nicola Ferrari},\n booktitle = {Music Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Kjell Tore Innervik and Ivar Frounberg},\n month = {June},\n publisher\ - \ = {Norwegian Academy of Music},\n title = {L2Ork},\n url = {https://vimeo.com/26678669},\n\ - \ url2 = {https://vimeo.com/26678662},\n url3 = {https://vimeo.com/26643771},\n\ - \ year = {2011}\n}\n" + \ editor = {Roberto Doati},\n month = {June},\n publisher = {Casa Paganini},\n\ + \ title = {The Bow is Bent and Drawn},\n year = {2008}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Kjell Tore Innervik and Ivar Frounberg + editor: Roberto Doati month: June - publisher: Norwegian Academy of Music - title: L2Ork - url: https://vimeo.com/26678669 - url2: https://vimeo.com/26678662 - url3: https://vimeo.com/26643771 - year: 2011 + publisher: Casa Paganini + title: The Bow is Bent and Drawn + year: 2008 - ENTRYTYPE: inproceedings - ID: nime2011-music-Bokowiec2011 - abstract: "Program notes: V'Oct(Ritual) places the audience inside a circular liminal\ - \ space of sonic evocation and features the Bodycoder System© the first generation\ - \ of which was developed by the artists in 1995. The Bodycoder interface is a\ - \ flexible sensor array worn on the body of a performer that sends data generated\ - \ by movement to an MSP environment via radio. All vocalisations, decision making,\ - \ navigation of the MSP environment and qualities of expressivity are selected,\ - \ initiated and manipulated by the performer, uniquely, this also includes access\ - \ to gestural control of live 8-channel spatialization. This piece is fully scored\ - \ with few moments of improvisation.\n\nAbout the performers: Julie Wilson-Bokowiec:\ - \ has created new works in opera/music theatre, contemporary dance and theatre\ - \ and has worked with Lindsey Kemp, Genesis P-Orridge, Psychic TV and Hermann\ - \ Nitsch. Julie is a Research Fellow at CeReNem (Centre for Research in New Music)\ - \ at the University of Huddersfield.\nMark Bokowiec: is the manager of the electroacoustic\ - \ music studios and the Spacialization and Interactive Research Lab at the University\ - \ of Huddersfield where he also lectures in interactive performance, interface\ - \ design and composition. Mark began creating work with interactive technologies\ - \ in 1995." - address: 'Oslo, Norway' - author: Mark Bokowiec and Julie Wilson-Bokowiec - bibtex: "@inproceedings{nime2011-music-Bokowiec2011,\n abstract = {Program notes:\ - \ V'Oct(Ritual) places the audience inside a circular liminal space of sonic evocation\ - \ and features the Bodycoder System© the first generation of which was developed\ - \ by the artists in 1995. The Bodycoder interface is a flexible sensor array\ - \ worn on the body of a performer that sends data generated by movement to an\ - \ MSP environment via radio. All vocalisations, decision making, navigation of\ - \ the MSP environment and qualities of expressivity are selected, initiated and\ - \ manipulated by the performer, uniquely, this also includes access to gestural\ - \ control of live 8-channel spatialization. This piece is fully scored with few\ - \ moments of improvisation.\n\nAbout the performers: Julie Wilson-Bokowiec: has\ - \ created new works in opera/music theatre, contemporary dance and theatre and\ - \ has worked with Lindsey Kemp, Genesis P-Orridge, Psychic TV and Hermann Nitsch.\ - \ Julie is a Research Fellow at CeReNem (Centre for Research in New Music) at\ - \ the University of Huddersfield.\nMark Bokowiec: is the manager of the electroacoustic\ - \ music studios and the Spacialization and Interactive Research Lab at the University\ - \ of Huddersfield where he also lectures in interactive performance, interface\ - \ design and composition. Mark began creating work with interactive technologies\ - \ in 1995.},\n address = {Oslo, Norway},\n author = {Mark Bokowiec and Julie Wilson-Bokowiec},\n\ - \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n editor = {Kjell Tore Innervik and Ivar Frounberg},\n\ - \ month = {June},\n publisher = {Norwegian Academy of Music},\n title = {V'Oct(Ritual)},\n\ - \ url = {https://vimeo.com/27694214},\n year = {2011}\n}\n" + ID: nime2008-music-Klauer2008 + abstract: "Program notes:\nPutting a distance sensor under the scroll of the instrument\ + \ and an inclination sensor on the wrist, the detection of the displacements of\ + \ the limbs of the interpreter becomes possible. These displacements, drawn onto\ + \ a cartesian plane, give the coordinates of a track in an ideal performing space,\ + \ whose third dimension is increased and formed by the passing of time. Actually,\ + \ the computer permits to assimilate to the aforesaid track the sounding path\ + \ proposed by the interpreter, hence to rehear it. Also in the latter case, the\ + \ coordinates to access it are given by current gestures, therefore the dimension\ + \ of time results bundled, somehow like considering a parchment palimpsest: the\ + \ sounding form returned by the computer results increasingly dense and inexplicable\ + \ and needs an electroacoustic exegesis that unleash it at least in shreds.\n\ + The procedures of musical production are here a metaphor for knowledge; alike\ + \ are the compositional methods at the root of the score, which providing the\ + \ prescriptions of the musical path, portrays in addition a mental track.\n\n\ + About the performer:\nGiorgio Klauer studied electronic music, instrumental composition,\ + \ flute and musicology in Trieste, where he was born in 1976, in Cremona and in\ + \ Liège. He is professor at the Conservatory of Como, school of music and sound\ + \ technologies." + address: 'Genova, Italy' + author: Giorgio Klauer + bibtex: "@inproceedings{nime2008-music-Klauer2008,\n abstract = {Program notes:\n\ + Putting a distance sensor under the scroll of the instrument and an inclination\ + \ sensor on the wrist, the detection of the displacements of the limbs of the\ + \ interpreter becomes possible. These displacements, drawn onto a cartesian plane,\ + \ give the coordinates of a track in an ideal performing space, whose third dimension\ + \ is increased and formed by the passing of time. Actually, the computer permits\ + \ to assimilate to the aforesaid track the sounding path proposed by the interpreter,\ + \ hence to rehear it. Also in the latter case, the coordinates to access it are\ + \ given by current gestures, therefore the dimension of time results bundled,\ + \ somehow like considering a parchment palimpsest: the sounding form returned\ + \ by the computer results increasingly dense and inexplicable and needs an electroacoustic\ + \ exegesis that unleash it at least in shreds.\nThe procedures of musical production\ + \ are here a metaphor for knowledge; alike are the compositional methods at the\ + \ root of the score, which providing the prescriptions of the musical path, portrays\ + \ in addition a mental track.\n\nAbout the performer:\nGiorgio Klauer studied\ + \ electronic music, instrumental composition, flute and musicology in Trieste,\ + \ where he was born in 1976, in Cremona and in Liège. He is professor at the Conservatory\ + \ of Como, school of music and sound technologies.},\n address = {Genova, Italy},\n\ + \ author = {Giorgio Klauer},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Roberto Doati},\n\ + \ month = {June},\n publisher = {Casa Paganini},\n title = {Tre Aspetti del Tempo\ + \ per Iperviolino e Computer},\n year = {2008}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Kjell Tore Innervik and Ivar Frounberg + editor: Roberto Doati month: June - publisher: Norwegian Academy of Music - title: V'Oct(Ritual) - url: https://vimeo.com/27694214 - year: 2011 + publisher: Casa Paganini + title: Tre Aspetti del Tempo per Iperviolino e Computer + year: 2008 - ENTRYTYPE: inproceedings - ID: nime2011-music-Shiraishi2011 - abstract: "Program notes:\n\nmikro:strukt is a collaborative performance in which\ - \ the custom-built e-clambone provides an acoustic source for the ensuing audiovisual\ - \ environment. E-clambone is custom-built electronic instrument that consists\ - \ of an aerophone supplied with haptic sensors and digital signal processing algorithms.\ - \ The performance seeks to integrate elements of electro-acoustic improvisation,\ - \ timbre composition and artificial intelligence based approach to autonomous\ - \ audiovisual composition and explore micro level timbre composition in real time.\n\ - \nAbout the performers:\n\nSatoshi Shiraishi: Electro-acoustic instrument designer/performer\ - \ from Japan, currently living in The Hague, The Netherlands. He originally started\ - \ his music carrier as a rock guitarist. After the meeting with computer music,\ - \ he moved to The Netherlands to pursue his own way of playing computer generated\ - \ sound on a stage.\n\nAlo Allik: (Estonia) has a musically and geographically\ - \ restless lifestyle, which has taken him through diverse musical worlds including\ - \ DJ-ing and producing electronic dance music, live laptop jams, electroacoustic\ - \ composition, free improvisation, audiovisual installations and performances." - address: 'Oslo, Norway' - author: Satoshi Shiraishi and Alo Allik - bibtex: "@inproceedings{nime2011-music-Shiraishi2011,\n abstract = {Program notes:\n\ - \nmikro:strukt is a collaborative performance in which the custom-built e-clambone\ - \ provides an acoustic source for the ensuing audiovisual environment. E-clambone\ - \ is custom-built electronic instrument that consists of an aerophone supplied\ - \ with haptic sensors and digital signal processing algorithms. The performance\ - \ seeks to integrate elements of electro-acoustic improvisation, timbre composition\ - \ and artificial intelligence based approach to autonomous audiovisual composition\ - \ and explore micro level timbre composition in real time.\n\nAbout the performers:\n\ - \nSatoshi Shiraishi: Electro-acoustic instrument designer/performer from Japan,\ - \ currently living in The Hague, The Netherlands. He originally started his music\ - \ carrier as a rock guitarist. After the meeting with computer music, he moved\ - \ to The Netherlands to pursue his own way of playing computer generated sound\ - \ on a stage.\n\nAlo Allik: (Estonia) has a musically and geographically restless\ - \ lifestyle, which has taken him through diverse musical worlds including DJ-ing\ - \ and producing electronic dance music, live laptop jams, electroacoustic composition,\ - \ free improvisation, audiovisual installations and performances.},\n address\ - \ = {Oslo, Norway},\n author = {Satoshi Shiraishi and Alo Allik},\n booktitle\ - \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n editor = {Kjell Tore Innervik and Ivar Frounberg},\n month =\ - \ {June},\n publisher = {Norwegian Academy of Music},\n title = {mikro:strukt},\n\ - \ url = {https://vimeo.com/27694202},\n year = {2011}\n}\n" - booktitle: Music Proceedings of the International Conference on New Interfaces for - Musical Expression - editor: Kjell Tore Innervik and Ivar Frounberg + ID: nime2008-music-Sartini2008 + abstract: "Program notes:\nAurora Polare (Polar Dawn) is a short piece for cymbals,\ + \ tam-tam, vibraphone, live electronics and EyesWeb system. This piece was inspired\ + \ by the smooth movements of waves, the drawings created by polar dawns and the\ + \ cold weather in polar seas – that's the reason why only metallophones are used.\n\ + The first matter to fight with was making the percussionist elaborate the sound\ + \ they produce while playing their instruments and crafting a brand-new easy way\ + \ to specify every movement. That's why, under the traditional notation score,\ + \ two special lines follow the music specifying the direction to move to: up-down\ + \ and left-right/near-far. A line approaching the top or the bottom of the Y axis\ + \ tells the way to track. You can find an example here on the left.\nAll of those\ + \ movements fully interact with EyesWeb and MAX MSP thru two 30fps accelerometer\ + \ bracelets worn by the performers. Every vertical movement controls the volume\ + \ of the processed sound, while horizontal movements manage a different patch\ + \ in MAX MSP suited to every instrument: a tam-tam sample speed controller (this\ + \ make the instrument play without being touched), an harmonizer to make cymbals\ + \ sing just like a Theremin, but with their own processed sound, and\nthe rate\ + \ of a delay. In the control room a MIDI controller and a computer will be used\ + \ to manage live additional effects and parameters, like granular synthesis, reverb\ + \ and multi-slider filters.\nThanks to Martino Sarolli for helping me with MAX\ + \ MSP, to Matteo Rabolini and Matteo Bonanni for playing my composition.\n\nAbout\ + \ the performer:\nAlessandro Sartini: Born in Genoa in 1982, he studied piano\ + \ with Canzio Bucciarelli and attends the last year of Composition at the Conservatory\ + \ of Genoa with Riccardo Dapelo, who introduced him to “live electronic” treatments.\ + \ His first public exhibition was at the Auditorium Montale of the Carlo Felice\ + \ Theatre in Genoa, during the concert commemorating the 50th anniversary of Béla\ + \ Bartók's death in 1995. From that year on he established a great number of collaboration\ + \ with various solo musicians, who really appreciated his way to accompany; this\ + \ guided him to work in partnership with a good number of professional soloists.\ + \ In 1999 he joined the class of Composition at the Conservatory of Genoa with\ + \ Luigi Giachino, who introduced him to film music: this interest led him to win\ + \ the third prize at the Lavagnino International Film Music Festival in Gavi in\ + \ 2006 and the first prize at the “Concorso Internazionale di Composizione di\ + \ Alice Belcolle\" in 2007. With Valentina Abrami, he is the founder of the “Associazione\ + \ Musica in Movimento”, which operates at the “International School in Genoa”." + address: 'Genova, Italy' + author: Alessandro Sartini + bibtex: "@inproceedings{nime2008-music-Sartini2008,\n abstract = {Program notes:\n\ + Aurora Polare (Polar Dawn) is a short piece for cymbals, tam-tam, vibraphone,\ + \ live electronics and EyesWeb system. This piece was inspired by the smooth movements\ + \ of waves, the drawings created by polar dawns and the cold weather in polar\ + \ seas – that's the reason why only metallophones are used.\nThe first matter\ + \ to fight with was making the percussionist elaborate the sound they produce\ + \ while playing their instruments and crafting a brand-new easy way to specify\ + \ every movement. That's why, under the traditional notation score, two special\ + \ lines follow the music specifying the direction to move to: up-down and left-right/near-far.\ + \ A line approaching the top or the bottom of the Y axis tells the way to track.\ + \ You can find an example here on the left.\nAll of those movements fully interact\ + \ with EyesWeb and MAX MSP thru two 30fps accelerometer bracelets worn by the\ + \ performers. Every vertical movement controls the volume of the processed sound,\ + \ while horizontal movements manage a different patch in MAX MSP suited to every\ + \ instrument: a tam-tam sample speed controller (this make the instrument play\ + \ without being touched), an harmonizer to make cymbals sing just like a Theremin,\ + \ but with their own processed sound, and\nthe rate of a delay. In the control\ + \ room a MIDI controller and a computer will be used to manage live additional\ + \ effects and parameters, like granular synthesis, reverb and multi-slider filters.\n\ + Thanks to Martino Sarolli for helping me with MAX MSP, to Matteo Rabolini and\ + \ Matteo Bonanni for playing my composition.\n\nAbout the performer:\nAlessandro\ + \ Sartini: Born in Genoa in 1982, he studied piano with Canzio Bucciarelli and\ + \ attends the last year of Composition at the Conservatory of Genoa with Riccardo\ + \ Dapelo, who introduced him to “live electronic” treatments. His first public\ + \ exhibition was at the Auditorium Montale of the Carlo Felice Theatre in Genoa,\ + \ during the concert commemorating the 50th anniversary of Béla Bartók's death\ + \ in 1995. From that year on he established a great number of collaboration with\ + \ various solo musicians, who really appreciated his way to accompany; this guided\ + \ him to work in partnership with a good number of professional soloists. In 1999\ + \ he joined the class of Composition at the Conservatory of Genoa with Luigi Giachino,\ + \ who introduced him to film music: this interest led him to win the third prize\ + \ at the Lavagnino International Film Music Festival in Gavi in 2006 and the first\ + \ prize at the “Concorso Internazionale di Composizione di Alice Belcolle\" in\ + \ 2007. With Valentina Abrami, he is the founder of the “Associazione Musica in\ + \ Movimento”, which operates at the “International School in Genoa”.},\n address\ + \ = {Genova, Italy},\n author = {Alessandro Sartini},\n booktitle = {Music Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Roberto Doati},\n month = {June},\n publisher = {Casa Paganini},\n\ + \ title = {Aurora Polare},\n year = {2008}\n}\n" + booktitle: Music Proceedings of the International Conference on New Interfaces for + Musical Expression + editor: Roberto Doati month: June - publisher: Norwegian Academy of Music - title: mikro:strukt - url: https://vimeo.com/27694202 - year: 2011 + publisher: Casa Paganini + title: Aurora Polare + year: 2008 - ENTRYTYPE: inproceedings - ID: nime2011-music-Overholt2011 - abstract: "Program notes:\n\nThis generative / improvisatory work uses an iPod Touch\ - \ and a tactile sound transducer attached to the Overtone Fiddle's resonant body\ - \ as a mobile system to lay out a variety of animated and transformed sound sources\ - \ over time.\n\nAbout the performers:\n\nDan Overholt: Associate Professor in\ - \ the Department of Architecture, Design and Media Technology at Aalborg University,\ - \ Denmark. He received a PhD in Media Arts and Technology from the University\ - \ of California, Santa Barbara, a M.S. from the MIT Media Lab, and studied Music\ - \ and Electronics Engineering and at CSU, Chico. As a musician, he composes and\ - \ performs internationally with experimental human-computer interfaces and musical\ - \ signal processing algorithms.\n\nLars Graugaard: Free-lance composer, laptop\ - \ performer and researcher. He holds a PhD in Artistic and Technological Challenges\ - \ of Interactive Music from Oxford Brookes University and a MS in flute performance\ - \ from the Royal Danish Academy of Music. His main interest is the systematic\ - \ study of music's expressive capacity applied to score composing, realtime interactive\ - \ performance, generative and emergent music." - address: 'Oslo, Norway' - author: Dan Overholt and Lars Grausgaard - bibtex: "@inproceedings{nime2011-music-Overholt2011,\n abstract = {Program notes:\n\ - \nThis generative / improvisatory work uses an iPod Touch and a tactile sound\ - \ transducer attached to the Overtone Fiddle's resonant body as a mobile system\ - \ to lay out a variety of animated and transformed sound sources over time.\n\n\ - About the performers:\n\nDan Overholt: Associate Professor in the Department of\ - \ Architecture, Design and Media Technology at Aalborg University, Denmark. He\ - \ received a PhD in Media Arts and Technology from the University of California,\ - \ Santa Barbara, a M.S. from the MIT Media Lab, and studied Music and Electronics\ - \ Engineering and at CSU, Chico. As a musician, he composes and performs internationally\ - \ with experimental human-computer interfaces and musical signal processing algorithms.\n\ - \nLars Graugaard: Free-lance composer, laptop performer and researcher. He holds\ - \ a PhD in Artistic and Technological Challenges of Interactive Music from Oxford\ - \ Brookes University and a MS in flute performance from the Royal Danish Academy\ - \ of Music. His main interest is the systematic study of music's expressive capacity\ - \ applied to score composing, realtime interactive performance, generative and\ - \ emergent music.},\n address = {Oslo, Norway},\n author = {Dan Overholt and Lars\ - \ Grausgaard},\n booktitle = {Music Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n editor = {Kjell Tore Innervik and\ - \ Ivar Frounberg},\n month = {June},\n publisher = {Norwegian Academy of Music},\n\ - \ title = {Study No. 1 for Overtone Fiddle},\n url = {https://vimeo.com/26661494},\n\ - \ year = {2011}\n}\n" + ID: nime2008-music-Baltazar2008 + abstract: "Program notes:\nThe composition of Pyrogenesis took inspiration from\ + \ several aspects of the blacksmithing, not in a literal way, but much as a set\ + \ of correspondences :\nFirst, the gesture, by which the blacksmith models the\ + \ matter continuously; striking, heating, twisting, soaking metals to gradually\ + \ print a form into them.\nThen, the tool: Just like the blacksmith manufactures\ + \ his own tools, I work on developing my own electro-acoustic instrument: an instrument\ + \ to write sound, in space and with a gestural input.\nLastly, the organic construction\ + \ of the form: Gilles Deleuze says \"Why is the blacksmith a musician? It is not\ + \ simply because the forging mill makes noise, it is because the music and the\ + \ metallurgy are haunted by the same problem: that the metallurgy puts the matter\ + \ in the state of continuous variation just as the music is haunted by putting\ + \ the sound in a state of continuous variation and to found in the sound world\ + \ a continuous development of the form and a continuous variation of the matter\"\ + .\nOn a more technical/scientific point of view, the interaction with the performer\ + \ uses two interfaces : a Wacom tablet, and a set of force- resistive-sensors\ + \ (through an analog-to-digital converter), which common point is that they both\ + \ allow control by the pressure of hands, and thus offer a very “physical” mode\ + \ of control.\nThe composition/performance environment consists of a set of generative\ + \ audio modules, fully addressable and presettable, including a mapping engine\ + \ allowing a quick yet powerful set of mapping strategies from controllers inputs\ + \ and volume envelopes to any parameter, including those of the mappers themselves,\ + \ allowing a very precise, flexible, and evolutive sound/gesture relationship\ + \ in time.\nThe composition has been realized through a constant dialogue between\ + \ improvisations in a pre-determined trajectory, and afterwards- listening of\ + \ the produced result. Thus, most of the details of the composition have been\ + \ generated by an improvisation/learning-through- repetition process, without\ + \ any visual support - thus allowing to emphasize expressivity while keeping a\ + \ very direct relationship to the musical gesture.\n\nAbout the performer:\nPascal\ + \ Baltazar is a composer and research coordinator at GMEA, National Center for\ + \ Musical Creation in Albi, France. His research focuses on spatial and temporal\ + \ perception of sound, and its relationship to the body and musical gesture. He\ + \ is coordinating the Virage research platform, on control and scripting novel\ + \ interfaces for artistic creation and entertainment industries, granted by the\ + \ French Research Agency, in the frame of its Audiovisual and Multimedia program,\ + \ for the 2008-2009 period. He is an active member of the Jamoma collective.\n\ + He has studied Aesthetics (Masters of Philosophy Thesis The sonic image : material\ + \ and sensation, 2001, Toulouse III, France) and electroacoustic composition at\ + \ the National Conservatoire of Toulouse. He has then been implied as a composer\ + \ or interactive designer in diverse artistic projects : concerts, performing\ + \ arts shows and interactive installations. He has been commissioned for musical\ + \ works by several institutions, as the French State, INA-GRM, GMEA, IMEB... and\ + \ participated in international festivals (Présences Électroniques, Paris / Radio\ + \ France Festival, Montpellier / Synthèse, Bourges / Videomedeja, Novi Sad / Space\ + \ + Place, Berlin...)." + address: 'Genova, Italy' + author: Pascal Baltazar + bibtex: "@inproceedings{nime2008-music-Baltazar2008,\n abstract = {Program notes:\n\ + The composition of Pyrogenesis took inspiration from several aspects of the blacksmithing,\ + \ not in a literal way, but much as a set of correspondences :\nFirst, the gesture,\ + \ by which the blacksmith models the matter continuously; striking, heating, twisting,\ + \ soaking metals to gradually print a form into them.\nThen, the tool: Just like\ + \ the blacksmith manufactures his own tools, I work on developing my own electro-acoustic\ + \ instrument: an instrument to write sound, in space and with a gestural input.\n\ + Lastly, the organic construction of the form: Gilles Deleuze says \"Why is the\ + \ blacksmith a musician? It is not simply because the forging mill makes noise,\ + \ it is because the music and the metallurgy are haunted by the same problem:\ + \ that the metallurgy puts the matter in the state of continuous variation just\ + \ as the music is haunted by putting the sound in a state of continuous variation\ + \ and to found in the sound world a continuous development of the form and a continuous\ + \ variation of the matter\".\nOn a more technical/scientific point of view, the\ + \ interaction with the performer uses two interfaces : a Wacom tablet, and a set\ + \ of force- resistive-sensors (through an analog-to-digital converter), which\ + \ common point is that they both allow control by the pressure of hands, and thus\ + \ offer a very “physical” mode of control.\nThe composition/performance environment\ + \ consists of a set of generative audio modules, fully addressable and presettable,\ + \ including a mapping engine allowing a quick yet powerful set of mapping strategies\ + \ from controllers inputs and volume envelopes to any parameter, including those\ + \ of the mappers themselves, allowing a very precise, flexible, and evolutive\ + \ sound/gesture relationship in time.\nThe composition has been realized through\ + \ a constant dialogue between improvisations in a pre-determined trajectory, and\ + \ afterwards- listening of the produced result. Thus, most of the details of the\ + \ composition have been generated by an improvisation/learning-through- repetition\ + \ process, without any visual support - thus allowing to emphasize expressivity\ + \ while keeping a very direct relationship to the musical gesture.\n\nAbout the\ + \ performer:\nPascal Baltazar is a composer and research coordinator at GMEA,\ + \ National Center for Musical Creation in Albi, France. His research focuses on\ + \ spatial and temporal perception of sound, and its relationship to the body and\ + \ musical gesture. He is coordinating the Virage research platform, on control\ + \ and scripting novel interfaces for artistic creation and entertainment industries,\ + \ granted by the French Research Agency, in the frame of its Audiovisual and Multimedia\ + \ program, for the 2008-2009 period. He is an active member of the Jamoma collective.\n\ + He has studied Aesthetics (Masters of Philosophy Thesis The sonic image : material\ + \ and sensation, 2001, Toulouse III, France) and electroacoustic composition at\ + \ the National Conservatoire of Toulouse. He has then been implied as a composer\ + \ or interactive designer in diverse artistic projects : concerts, performing\ + \ arts shows and interactive installations. He has been commissioned for musical\ + \ works by several institutions, as the French State, INA-GRM, GMEA, IMEB... and\ + \ participated in international festivals (Présences Électroniques, Paris / Radio\ + \ France Festival, Montpellier / Synthèse, Bourges / Videomedeja, Novi Sad / Space\ + \ + Place, Berlin...).},\n address = {Genova, Italy},\n author = {Pascal Baltazar},\n\ + \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n editor = {Roberto Doati},\n month = {June},\n publisher\ + \ = {Casa Paganini},\n title = {Pyrogenesis},\n year = {2008}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Kjell Tore Innervik and Ivar Frounberg + editor: Roberto Doati month: June - publisher: Norwegian Academy of Music - title: Study No. 1 for Overtone Fiddle - url: https://vimeo.com/26661494 - year: 2011 + publisher: Casa Paganini + title: Pyrogenesis + year: 2008 - ENTRYTYPE: inproceedings - ID: nime2011-music-DougVanNort2011 - abstract: "Program notes:\n\nThis piece is written in consideration of two distinct\ - \ paradigms: telematic music performance and human-machine improvisation. Specifically\ - \ this work is a structured improvisation for three humans and one intelligent\ - \ agent, being constrained by sections that determine which pairing (duos, trios)\ - \ of performers are active. Instrumentation also changes between sections in a\ - \ way that blurs the line of agency and intent between acoustic human performers,\ - \ laptop tablet-based human performer, and agent improviser, as the two remote\ - \ (NY, Stanford) acoustic performers (v-accordion, soprano saxophone) engage with\ - \ the on-stage laptop performer (GREIS system) and ambient presence of the agent\ - \ performer (spatialization, loops, textures).\n\nAbout the performers:\n\nDoug\ - \ Van Nort: Experimental musician and digital music researcher whose work includes\ - \ composition, improvisation, interactive system design and cross-disciplinary\ - \ collaboration. His writings can be found in Organised Sound and Leonardo Music\ - \ Journal among other publications, and his music is documented on Deep Listening,\ - \ Pogus and other labels.\n\nPauline Oliveros: (1932) is a composer and improviser,\ - \ teaches at RPI, plays a Roland V Accordion in solo and ensemble improvisations.\ - \ Her works are available through download, cassette, CD, DVD, and Vinyl releases.\ - \ Oliveros founded the Deep Listening Institute, Ltd. based in Kingston NY.\n\n\ - Jonas Braasch: Experimental soprano saxophonist and acoustician with interests\ - \ in Telematic Music and Intelligent Music Systems. He has performed with Curtis\ - \ Bahn, Chris Chafe, Michael Century, Mark Dresser, Pauline Oliveros, Doug van\ - \ Nort and Stuart Dempster -- among others. He currently directs the Communication\ - \ Acoustics and Aural Architecture Research Laboratory at RPI." - address: 'Oslo, Norway' - author: Doug Van Nort and Pauline Oliveros and Jonas Braasch - bibtex: "@inproceedings{nime2011-music-DougVanNort2011,\n abstract = {Program notes:\n\ - \nThis piece is written in consideration of two distinct paradigms: telematic\ - \ music performance and human-machine improvisation. Specifically this work is\ - \ a structured improvisation for three humans and one intelligent agent, being\ - \ constrained by sections that determine which pairing (duos, trios) of performers\ - \ are active. Instrumentation also changes between sections in a way that blurs\ - \ the line of agency and intent between acoustic human performers, laptop tablet-based\ - \ human performer, and agent improviser, as the two remote (NY, Stanford) acoustic\ - \ performers (v-accordion, soprano saxophone) engage with the on-stage laptop\ - \ performer (GREIS system) and ambient presence of the agent performer (spatialization,\ - \ loops, textures).\n\nAbout the performers:\n\nDoug Van Nort: Experimental musician\ - \ and digital music researcher whose work includes composition, improvisation,\ - \ interactive system design and cross-disciplinary collaboration. His writings\ - \ can be found in Organised Sound and Leonardo Music Journal among other publications,\ - \ and his music is documented on Deep Listening, Pogus and other labels.\n\nPauline\ - \ Oliveros: (1932) is a composer and improviser, teaches at RPI, plays a Roland\ - \ V Accordion in solo and ensemble improvisations. Her works are available through\ - \ download, cassette, CD, DVD, and Vinyl releases. Oliveros founded the Deep Listening\ - \ Institute, Ltd. based in Kingston NY.\n\nJonas Braasch: Experimental soprano\ - \ saxophonist and acoustician with interests in Telematic Music and Intelligent\ - \ Music Systems. He has performed with Curtis Bahn, Chris Chafe, Michael Century,\ - \ Mark Dresser, Pauline Oliveros, Doug van Nort and Stuart Dempster -- among others.\ - \ He currently directs the Communication Acoustics and Aural Architecture Research\ - \ Laboratory at RPI.},\n address = {Oslo, Norway},\n author = {Doug Van Nort and\ - \ Pauline Oliveros and Jonas Braasch},\n booktitle = {Music Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n editor\ - \ = {Kjell Tore Innervik and Ivar Frounberg},\n month = {June},\n publisher =\ - \ {Norwegian Academy of Music},\n title = {Distributed Composition #1},\n url\ - \ = {https://vimeo.com/27691551},\n year = {2011}\n}\n" - booktitle: Music Proceedings of the International Conference on New Interfaces for - Musical Expression - editor: Kjell Tore Innervik and Ivar Frounberg - month: June - publisher: Norwegian Academy of Music - title: "Distributed Composition #1" - url: https://vimeo.com/27691551 - year: 2011 - - -- ENTRYTYPE: inproceedings - ID: nime2011-music-Schorno2011 - abstract: "Program notes: The formalistic identity of ``7-of-12'' consists of a\ - \ showcase format for ``penta digit instrumental inventions'' diffused in quadrophonic\ - \ audio and 3d interactive video projection. The dialectic intertwining of Karlsson's\ - \ abstract art and Schorno's sonetic world extends into a composition of 12''\ - \ duration. Eponymous instrument group ``EIG''' consist of two former classmates\ - \ of Sonology where they among other things studied the making of alternative\ - \ electronic instruments. The performance``7-of-12 dialectologies'' is an outcome\ - \ of collaborated teachings and methodology in dialogue with past performances.\n\ - \nAbout the performers: Daniel Schorno: composer, born in Zurich in 1963. Studied\ - \ composition in London with Melanie Daiken and electronic and computer music\ - \ in The Hague, with Joel Ryan and Clarence Barlow. Invited by Michel Waisvisz\ - \ he led STEIM - the re-nown Dutch Studio for Electro Instrumental Music, and\ - \ home of ``New Instruments'' - as Artistic Director until 2005. He is currently\ - \ STEIM's composer-in-research and creative project advisor.\nHaraldur Karlsson:\ - \ visual artist, born in Reykjavik 1967. Haraldur studied Multi-media in the art\ - \ academy in Iceland, Media-art in AKI in Enschede and Sonology in the Royal conservatories\ - \ The Hague. Haraldur is mainly focused on interactive audio/video/3D installations\ - \ and performances, and instrumental computer controllers. His fire instrument\ - \ ``TFI''' is part of the Little Solarsystem ``LSS'' navigation system that is\ - \ an audio/video/3D performance." - address: 'Oslo, Norway' - author: Daniel Schorno and Haraldur Karlsson - bibtex: "@inproceedings{nime2011-music-Schorno2011,\n abstract = {Program notes:\ - \ The formalistic identity of ``7-of-12'' consists of a showcase format for ``penta\ - \ digit instrumental inventions'' diffused in quadrophonic audio and 3d interactive\ - \ video projection. The dialectic intertwining of Karlsson's abstract art and\ - \ Schorno's sonetic world extends into a composition of 12'' duration. Eponymous\ - \ instrument group ``EIG''' consist of two former classmates of Sonology where\ - \ they among other things studied the making of alternative electronic instruments.\ - \ The performance``7-of-12 dialectologies'' is an outcome of collaborated teachings\ - \ and methodology in dialogue with past performances.\n\nAbout the performers:\ - \ Daniel Schorno: composer, born in Zurich in 1963. Studied composition in London\ - \ with Melanie Daiken and electronic and computer music in The Hague, with Joel\ - \ Ryan and Clarence Barlow. Invited by Michel Waisvisz he led STEIM - the re-nown\ - \ Dutch Studio for Electro Instrumental Music, and home of ``New Instruments''\ - \ - as Artistic Director until 2005. He is currently STEIM's composer-in-research\ - \ and creative project advisor.\nHaraldur Karlsson: visual artist, born in Reykjavik\ - \ 1967. Haraldur studied Multi-media in the art academy in Iceland, Media-art\ - \ in AKI in Enschede and Sonology in the Royal conservatories The Hague. Haraldur\ - \ is mainly focused on interactive audio/video/3D installations and performances,\ - \ and instrumental computer controllers. His fire instrument ``TFI''' is part\ - \ of the Little Solarsystem ``LSS'' navigation system that is an audio/video/3D\ - \ performance.},\n address = {Oslo, Norway},\n author = {Daniel Schorno and Haraldur\ - \ Karlsson},\n booktitle = {Music Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n editor = {Kjell Tore Innervik and\ - \ Ivar Frounberg},\n month = {June},\n publisher = {Norwegian Academy of Music},\n\ - \ title = {7-of-12 dialectologies},\n url = {https://vimeo.com/27694220},\n year\ - \ = {2011}\n}\n" - booktitle: Music Proceedings of the International Conference on New Interfaces for - Musical Expression - editor: Kjell Tore Innervik and Ivar Frounberg - month: June - publisher: Norwegian Academy of Music - title: 7-of-12 dialectologies - url: https://vimeo.com/27694220 - year: 2011 - - -- ENTRYTYPE: inproceedings - ID: nime2011-music-Dahl2011 - abstract: "Program notes: TweetDreams uses real-time Twitter data to generate music\ - \ and visuals. During the performance tweets containing specific search terms\ - \ are retrieved from Twitter. Each tweet is displayed and plays a short melody.\ - \ Tweets are grouped into trees of related tweets, which are given similar melodies.\ - \ We invite the audience to participate in TweetDreams by tweeting during performance\ - \ with the term #Nime2011. This term is used to identify tweets from the audience\ - \ and performers. Global search terms are used to bring the world into the performance.\ - \ Any tweet with these terms occurring anywhere in the world becomes part of the\ - \ piece.\n\nAbout the performers: Luke Dahl: Musician and engineer currently pursuing\ - \ a PhD at Stanford University's CCRMA. His research interests include new musical\ - \ instruments and performance ensembles, musical gesture, rhythm perception, and\ - \ MIR. He has composed works for the Stanford Laptop and Mobile Phone Orchestras\ - \ and also creates electronic dance music.\nCarr Wilkerson: System Administrator\ - \ at CCRMA specializing in Linux and Mac OS systems. He is a controller and software\ - \ system builder and sometime performer/impresario, instructor and researcher." - address: 'Oslo, Norway' - author: Luke Dahl and Carr Wilkerson - bibtex: "@inproceedings{nime2011-music-Dahl2011,\n abstract = {Program notes: TweetDreams\ - \ uses real-time Twitter data to generate music and visuals. During the performance\ - \ tweets containing specific search terms are retrieved from Twitter. Each tweet\ - \ is displayed and plays a short melody. Tweets are grouped into trees of related\ - \ tweets, which are given similar melodies. We invite the audience to participate\ - \ in TweetDreams by tweeting during performance with the term \\emph{\\#Nime2011}.\ - \ This term is used to identify tweets from the audience and performers. Global\ - \ search terms are used to bring the world into the performance. Any tweet with\ - \ these terms occurring anywhere in the world becomes part of the piece.\n\nAbout\ - \ the performers: Luke Dahl: Musician and engineer currently pursuing a PhD at\ - \ Stanford University's CCRMA. His research interests include new musical instruments\ - \ and performance ensembles, musical gesture, rhythm perception, and MIR. He has\ - \ composed works for the Stanford Laptop and Mobile Phone Orchestras and also\ - \ creates electronic dance music.\nCarr Wilkerson: System Administrator at CCRMA\ - \ specializing in Linux and Mac OS systems. He is a controller and software system\ - \ builder and sometime performer/impresario, instructor and researcher.},\n address\ - \ = {Oslo, Norway},\n author = {Luke Dahl and Carr Wilkerson},\n booktitle = {Music\ - \ Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Kjell Tore Innervik and Ivar Frounberg},\n month = {June},\n publisher\ - \ = {Norwegian Academy of Music},\n title = {TweetDreams},\n url = {https://vimeo.com/27694232},\n\ - \ year = {2011}\n}\n" - booktitle: Music Proceedings of the International Conference on New Interfaces for - Musical Expression - editor: Kjell Tore Innervik and Ivar Frounberg - month: June - publisher: Norwegian Academy of Music - title: TweetDreams - url: https://vimeo.com/27694232 - year: 2011 - - -- ENTRYTYPE: inproceedings - ID: nime2010-music-Miyama2010 - address: 'Sydney, Australia' + ID: nime2008-music-Miyama2008 + abstract: "Program notes:\n\"Keo\" is a performance for voice improvisation, Qgo\ + \ sensor instrument, and live electronics. The author attempts to realize three\ + \ concepts in the work. The first is \"dual-layered control,\" in which the performer\ + \ improvises phrases by singing and providing sound materials for a computer.\ + \ Simultaneously, he sends commands to the\ncomputer to process vocals using a\ + \ pair of sensor devices worn on both hands. between the visuality of the performance\ + \ and the musical\ngestures. In most parts of the performance, the movement of\ + \ the sensor instrument and the musical parameters are clearly connected. If the\ + \ performer moves his hand even slightly, particular aspects of the sound are\ + \ influenced in an obvious manner. The third is the strong connection between\ + \ music and theatricality. In several parts of this work, the body motions of\ + \ the performer not only control the sensor device, but also provide some theatrical\ + \ meanings.\n\nAbout the performer:\nChikashi Miyama received his BA(2002) and\ + \ MA(2004) from the Sonology Department, Kunitachi College of Music, Tokyo, Japan\ + \ and Nachdiplom(2007) from Elektronisches studio, Musik-Akademie der Stadt Basel,\ + \ Basel, Switzerland. He is currently attending the State University of New York\ + \ at Buffalo for his ph.D. He has studied under T.Rai, C.Lippe, E.Ona, and G.F.Haas.\ + \ His works, especially his interactive multimedia works, have been performed\ + \ at international festivals, such as June in Buffalo 2001 (New york, USA) , Mix\ + \ '02 (Arfus, Denmark), Musica Viva '03 (Coimbra, Portugal), Realtime/non-realtime\ + \ electronic music festival (Basel, Switzerland), Next generation'05 (Karlsruhe,\ + \ Germany), as well as various cities in Japan. His papers about his works and\ + \ realtime visual processing software \"DIPS\" have also been accepted by ICMC,\ + \ and presented at several SIGMUS conferences. Since 2005, he has been performing\ + \ as a laptop musician, employing his original sensor devices and involving himself\ + \ in several Media-art activities, such as Dorkbot, Shift-Festival, SPARK, and\ + \ SGMK workshops. His compositions have received honorable mention in the Residence\ + \ Prize section of the 30th International Electroacoustic Music Competition Bourges\ + \ and have been accepted by the International Computer Music Conference in 2004,\ + \ 2005, 2006 and 2007. Several works of him are published, including the Computer\ + \ Music Journal Vol.28 DVD by MIT press and the ICMC 2005 official CD." + address: 'Genova, Italy' author: Chikashi Miyama - bibtex: "@inproceedings{nime2010-music-Miyama2010,\n address = {Sydney, Australia},\n\ - \ author = {Chikashi Miyama},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Johnston,\ - \ Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer,\ - \ Kirsty Beilharz, Roger Mills},\n month = {June},\n publisher = {University of\ - \ Technology Sydney},\n title = {Black Vox},\n year = {2010}\n}\n" - booktitle: Music Proceedings of the International Conference on New Interfaces for - Musical Expression - editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, - Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' - month: June - publisher: University of Technology Sydney - title: Black Vox - year: 2010 - - -- ENTRYTYPE: inproceedings - ID: nime2010-music-Collins2010 - address: 'Sydney, Australia' - author: Nicolas Collins - bibtex: "@inproceedings{nime2010-music-Collins2010,\n address = {Sydney, Australia},\n\ - \ author = {Nicolas Collins},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Johnston,\ - \ Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer,\ - \ Kirsty Beilharz, Roger Mills},\n month = {June},\n publisher = {University of\ - \ Technology Sydney},\n title = {Salvage},\n year = {2010}\n}\n" + bibtex: "@inproceedings{nime2008-music-Miyama2008,\n abstract = {Program notes:\n\ + \"Keo\" is a performance for voice improvisation, Qgo sensor instrument, and live\ + \ electronics. The author attempts to realize three concepts in the work. The\ + \ first is \"dual-layered control,\" in which the performer improvises phrases\ + \ by singing and providing sound materials for a computer. Simultaneously, he\ + \ sends commands to the\ncomputer to process vocals using a pair of sensor devices\ + \ worn on both hands. between the visuality of the performance and the musical\n\ + gestures. In most parts of the performance, the movement of the sensor instrument\ + \ and the musical parameters are clearly connected. If the performer moves his\ + \ hand even slightly, particular aspects of the sound are influenced in an obvious\ + \ manner. The third is the strong connection between music and theatricality.\ + \ In several parts of this work, the body motions of the performer not only control\ + \ the sensor device, but also provide some theatrical meanings.\n\nAbout the performer:\n\ + Chikashi Miyama received his BA(2002) and MA(2004) from the Sonology Department,\ + \ Kunitachi College of Music, Tokyo, Japan and Nachdiplom(2007) from Elektronisches\ + \ studio, Musik-Akademie der Stadt Basel, Basel, Switzerland. He is currently\ + \ attending the State University of New York at Buffalo for his ph.D. He has studied\ + \ under T.Rai, C.Lippe, E.Ona, and G.F.Haas. His works, especially his interactive\ + \ multimedia works, have been performed at international festivals, such as June\ + \ in Buffalo 2001 (New york, USA) , Mix '02 (Arfus, Denmark), Musica Viva '03\ + \ (Coimbra, Portugal), Realtime/non-realtime electronic music festival (Basel,\ + \ Switzerland), Next generation'05 (Karlsruhe, Germany), as well as various cities\ + \ in Japan. His papers about his works and realtime visual processing software\ + \ \"DIPS\" have also been accepted by ICMC, and presented at several SIGMUS conferences.\ + \ Since 2005, he has been performing as a laptop musician, employing his original\ + \ sensor devices and involving himself in several Media-art activities, such as\ + \ Dorkbot, Shift-Festival, SPARK, and SGMK workshops. His compositions have received\ + \ honorable mention in the Residence Prize section of the 30th International Electroacoustic\ + \ Music Competition Bourges and have been accepted by the International Computer\ + \ Music Conference in 2004, 2005, 2006 and 2007. Several works of him are published,\ + \ including the Computer Music Journal Vol.28 DVD by MIT press and the ICMC 2005\ + \ official CD.},\n address = {Genova, Italy},\n author = {Chikashi Miyama},\n\ + \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n editor = {Roberto Doati},\n month = {June},\n publisher\ + \ = {Casa Paganini},\n title = {Keo Improvisation for sensor instrument Qgo},\n\ + \ year = {2008}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, - Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + editor: Roberto Doati month: June - publisher: University of Technology Sydney - title: Salvage - year: 2010 + publisher: Casa Paganini + title: Keo Improvisation for sensor instrument Qgo + year: 2008 - ENTRYTYPE: inproceedings - ID: nime2010-music-Perrin2010 - address: 'Sydney, Australia' - author: Stéphane Perrin and Utako Shibatsuji - bibtex: "@inproceedings{nime2010-music-Perrin2010,\n address = {Sydney, Australia},\n\ - \ author = {Stéphane Perrin and Utako Shibatsuji},\n booktitle = {Music Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine,\ - \ Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills},\n month = {June},\n\ - \ publisher = {University of Technology Sydney},\n title = {The Ningen Dogs Orchestra},\n\ - \ year = {2010}\n}\n" - booktitle: Music Proceedings of the International Conference on New Interfaces for - Musical Expression - editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, - Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' - month: June - publisher: University of Technology Sydney - title: The Ningen Dogs Orchestra - year: 2010 - - -- ENTRYTYPE: inproceedings - ID: nime2010-music-Kanda2010 - address: 'Sydney, Australia' - author: Ryo Kanda - bibtex: "@inproceedings{nime2010-music-Kanda2010,\n address = {Sydney, Australia},\n\ - \ author = {Ryo Kanda},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Johnston,\ - \ Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer,\ - \ Kirsty Beilharz, Roger Mills},\n month = {June},\n publisher = {University of\ - \ Technology Sydney},\n title = {Tennendai no 0m0s},\n year = {2010}\n}\n" + ID: nime2008-music-Hamel2008 + abstract: "Program notes:\nIntersecting Lines is a collaboration between clarinetist\ + \ François Houle, interactive video artist Aleksandra Dulic and computer music\ + \ composer Keith Hamel. The work grew out of Dulic's research in visual music\ + \ and involves mapping a live clarinet improvisation onto both the visual and\ + \ audio realms. In this work an intelligent system for visualization and signification\ + \ is used to develop and expand the musical material played by the clarinet. This\ + \ system monitors and interprets various nuances of the musical performance. The\ + \ clarinetist's improvisations, musical intentions, meanings and feelings are\ + \ enhanced and extended, both visually and aurally, by the computer system, so\ + \ that the various textures and gestured played by the performer have corresponding\ + \ visuals and computer-generated sounds. The melodic line, as played by the clarinet,\ + \ is used as the main compositional strategy for visualization. Since the control\ + \ input is based on a classical instrument, the strategy is based on calligraphic\ + \ line drawing using artistic rendering: the computer-generated line is drawn\ + \ in 3D space and rendered using expressive painterly and ink drawing styles.\ + \ The appearance of animated lines and textures portray a new artistic expression\ + \ that transforms a musical gesture onto a visual plane. Kenneth Newby made contributions\ + \ to the development of the animation software. This project was made possible\ + \ with generous support of Social Sciences and Humanities Research Council of\ + \ Canada.\n\nAbout the performers:\nFrançois Houle has established himself as\ + \ one of Canada's finest musicians. His performances and recordings transcend\ + \ the stylistic borders associated with his instrument in all of the diverse musical\ + \ spheres he embraces: classical, jazz, new music, improvised music, and world\ + \ music. As an improviser, he has developed a unique language, virtuosic and rich\ + \ with sonic embellishments and technical extensions. As a soloist and chamber\ + \ musician, he has actively expanded the clarinet's repertoire by commissioning\ + \ some of today's leading Canadian and international composers and premieringover\ + \ one hundred new works. An alumnus of McGill University and Yale University,\ + \ François has been an artist-in-residence at the Banff Centre for the Arts and\ + \ the Civitella Ranieri Foundation in Umbria, Italy. Now based in Vancouver, François\ + \ is a leader in the city's music community and is considered by many to be Canada's\ + \ leading exponent of the clarinet.\n\nKeith Hamel is a Professor in the School\ + \ of Music, an Associate Researcher at the Institute for Computing, Information\ + \ and Cognitive Systems (ICICS), a Researcher at the Media and Graphics Interdisciplinary\ + \ Centre (MAGIC) and Director of the Computer Music Studio at the University of\ + \ British Columbia. Keith Hamel has written both acoustic and electroacoustic\ + \ music and his works have been performed by many of the finest soloists and ensembles\ + \ both in Canada and abroad. Many of his recent compositions focus on interaction\ + \ between live performers and computer-controlled electronics.\n\nAleksandra Dulic\ + \ is media artist, theorist and experimental filmmaker working at the intersections\ + \ of multimedia and live performance with research foci in computational poetics,\ + \ interactive animation and cross-cultural media performance. She has received\ + \ a number of awards for her short animated films. She is active as a new media\ + \ artist, curator, a writer, an educator, teaching courses, presenting art projects\ + \ and publishing papers, across North America, Australia, Europe and Asia. She\ + \ received her Ph.D. from the School of Interactive Art and Technology, Simon\ + \ Fraser University in 2006. She is currently a Postdoctoral research fellow at\ + \ the Media and Graphics Interdisciplinary Centre, University of British Columbia\ + \ funded by Social Sciences and Humanities Research Council of Canada (SSHRC)." + address: 'Genova, Italy' + author: Keith Hamel and François Houle and Aleksandra Dulic + bibtex: "@inproceedings{nime2008-music-Hamel2008,\n abstract = {Program notes:\n\ + Intersecting Lines is a collaboration between clarinetist François Houle, interactive\ + \ video artist Aleksandra Dulic and computer music composer Keith Hamel. The work\ + \ grew out of Dulic's research in visual music and involves mapping a live clarinet\ + \ improvisation onto both the visual and audio realms. In this work an intelligent\ + \ system for visualization and signification is used to develop and expand the\ + \ musical material played by the clarinet. This system monitors and interprets\ + \ various nuances of the musical performance. The clarinetist's improvisations,\ + \ musical intentions, meanings and feelings are enhanced and extended, both visually\ + \ and aurally, by the computer system, so that the various textures and gestured\ + \ played by the performer have corresponding visuals and computer-generated sounds.\ + \ The melodic line, as played by the clarinet, is used as the main compositional\ + \ strategy for visualization. Since the control input is based on a classical\ + \ instrument, the strategy is based on calligraphic line drawing using artistic\ + \ rendering: the computer-generated line is drawn in 3D space and rendered using\ + \ expressive painterly and ink drawing styles. The appearance of animated lines\ + \ and textures portray a new artistic expression that transforms a musical gesture\ + \ onto a visual plane. Kenneth Newby made contributions to the development of\ + \ the animation software. This project was made possible with generous support\ + \ of Social Sciences and Humanities Research Council of Canada.\n\nAbout the performers:\n\ + François Houle has established himself as one of Canada's finest musicians. His\ + \ performances and recordings transcend the stylistic borders associated with\ + \ his instrument in all of the diverse musical spheres he embraces: classical,\ + \ jazz, new music, improvised music, and world music. As an improviser, he has\ + \ developed a unique language, virtuosic and rich with sonic embellishments and\ + \ technical extensions. As a soloist and chamber musician, he has actively expanded\ + \ the clarinet's repertoire by commissioning some of today's leading Canadian\ + \ and international composers and premieringover one hundred new works. An alumnus\ + \ of McGill University and Yale University, François has been an artist-in-residence\ + \ at the Banff Centre for the Arts and the Civitella Ranieri Foundation in Umbria,\ + \ Italy. Now based in Vancouver, François is a leader in the city's music community\ + \ and is considered by many to be Canada's leading exponent of the clarinet.\n\ + \nKeith Hamel is a Professor in the School of Music, an Associate Researcher at\ + \ the Institute for Computing, Information and Cognitive Systems (ICICS), a Researcher\ + \ at the Media and Graphics Interdisciplinary Centre (MAGIC) and Director of the\ + \ Computer Music Studio at the University of British Columbia. Keith Hamel has\ + \ written both acoustic and electroacoustic music and his works have been performed\ + \ by many of the finest soloists and ensembles both in Canada and abroad. Many\ + \ of his recent compositions focus on interaction between live performers and\ + \ computer-controlled electronics.\n\nAleksandra Dulic is media artist, theorist\ + \ and experimental filmmaker working at the intersections of multimedia and live\ + \ performance with research foci in computational poetics, interactive animation\ + \ and cross-cultural media performance. She has received a number of awards for\ + \ her short animated films. She is active as a new media artist, curator, a writer,\ + \ an educator, teaching courses, presenting art projects and publishing papers,\ + \ across North America, Australia, Europe and Asia. She received her Ph.D. from\ + \ the School of Interactive Art and Technology, Simon Fraser University in 2006.\ + \ She is currently a Postdoctoral research fellow at the Media and Graphics Interdisciplinary\ + \ Centre, University of British Columbia funded by Social Sciences and Humanities\ + \ Research Council of Canada (SSHRC).},\n address = {Genova, Italy},\n author\ + \ = {Keith Hamel and François Houle and Aleksandra Dulic},\n booktitle = {Music\ + \ Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Roberto Doati},\n month = {June},\n publisher = {Casa Paganini},\n\ + \ title = {Intersecting Lines},\n year = {2008}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, - Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + editor: Roberto Doati month: June - publisher: University of Technology Sydney - title: Tennendai no 0m0s - year: 2010 + publisher: Casa Paganini + title: Intersecting Lines + year: 2008 - ENTRYTYPE: inproceedings - ID: nime2010-music-Tomczak2010 - address: 'Sydney, Australia' - author: Sebastian Tomczak and Poppi Doser - bibtex: "@inproceedings{nime2010-music-Tomczak2010,\n address = {Sydney, Australia},\n\ - \ author = {Sebastian Tomczak and Poppi Doser},\n booktitle = {Music Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine,\ - \ Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills},\n month = {June},\n\ - \ publisher = {University of Technology Sydney},\n title = {Antia},\n year = {2010}\n\ + ID: nime2008-music-Romero2008 + abstract: "Program notes:\nVISTAS(2005) - Choreography with video, one musician\ + \ playng live electronics and two dancers with metainstruments interacting with\ + \ the music. Divided in three scenes the work is conceptually based in the “self-other”\ + \ cognitive phenomena inspired by Edgar Morin's idea of the evolution of society\ + \ through interdisciplinary interaction. The interdisciplinary feature of the\ + \ piece is carefully constructed using 2 metainstruments that link the formal\ + \ elements in a structural way. This metainstruments are two wireless microphones\ + \ plugged into two stethoscopes attached to the dancers hands. The movements of\ + \ the dancers make the microphones generate an amplitude that is transmitted to\ + \ the computer and mapped into different music elements. Some live voice participations\ + \ from the dancers add dramatic accents to the piece. Vistas is en integral piece\ + \ in wich the music supports the choreography as well as the choreography gets\ + \ influenced by the music. The video supports the scene creating an abstract space\ + \ that changes and evolves according to the performance. The musical aesthetic\ + \ has Noise elements and voice sample manipulation playing with texture and density\ + \ contrast in a very dynamic way. The language of the choreography comes from\ + \ an exploration of the planes in a 3rd dimension space by separate first and\ + \ united later. The language is also influenced by the need to achieve the best\ + \ usage as possible of the metainstrument.\n\nAbout the performers:\nLos Platelmintos\ + \ are a group of artists, living in Mexico City, that work under the premise of\ + \ interdiscipline and experimentation. Dance, music and electronic media are fundamental\ + \ elements in their work.\n\nErnesto Romero: music composition and electronic\ + \ media. Studies Composition, Mathematics and Choir conduction in México. Chief\ + \ of the Audio Department at the National Center for the Arts in México where\ + \ he researches and developes technology applied to the arts.\n\nEsthel Vogrig\ + \ : Coreographer and dancer. Studies contemporary dance and coreography in México,\ + \ V ienna and the United States. Director of Los PLatelmintos company. Recipient\ + \ of the \"Grant for Investigation and Production of Art Works and New Media”\ + \ from the National Council of the Arts and the Multimedia Center in Mexico. This\ + \ grant was used to produce the piece Vistas.\n\nKarina Sánchez: Dancer. Studies\ + \ contemporary dance and coreography in Chile, Spain and México." + address: 'Genova, Italy' + author: Ernesto Romero and Esthel Vogrig + bibtex: "@inproceedings{nime2008-music-Romero2008,\n abstract = {Program notes:\n\ + VISTAS(2005) - Choreography with video, one musician playng live electronics and\ + \ two dancers with metainstruments interacting with the music. Divided in three\ + \ scenes the work is conceptually based in the “self-other” cognitive phenomena\ + \ inspired by Edgar Morin's idea of the evolution of society through interdisciplinary\ + \ interaction. The interdisciplinary feature of the piece is carefully constructed\ + \ using 2 metainstruments that link the formal elements in a structural way. This\ + \ metainstruments are two wireless microphones plugged into two stethoscopes attached\ + \ to the dancers hands. The movements of the dancers make the microphones generate\ + \ an amplitude that is transmitted to the computer and mapped into different music\ + \ elements. Some live voice participations from the dancers add dramatic accents\ + \ to the piece. Vistas is en integral piece in wich the music supports the choreography\ + \ as well as the choreography gets influenced by the music. The video supports\ + \ the scene creating an abstract space that changes and evolves according to the\ + \ performance. The musical aesthetic has Noise elements and voice sample manipulation\ + \ playing with texture and density contrast in a very dynamic way. The language\ + \ of the choreography comes from an exploration of the planes in a 3rd dimension\ + \ space by separate first and united later. The language is also influenced by\ + \ the need to achieve the best usage as possible of the metainstrument.\n\nAbout\ + \ the performers:\nLos Platelmintos are a group of artists, living in Mexico City,\ + \ that work under the premise of interdiscipline and experimentation. Dance, music\ + \ and electronic media are fundamental elements in their work.\n\nErnesto Romero:\ + \ music composition and electronic media. Studies Composition, Mathematics and\ + \ Choir conduction in México. Chief of the Audio Department at the National Center\ + \ for the Arts in México where he researches and developes technology applied\ + \ to the arts.\n\nEsthel Vogrig : Coreographer and dancer. Studies contemporary\ + \ dance and coreography in México, V ienna and the United States. Director of\ + \ Los PLatelmintos company. Recipient of the \"Grant for Investigation and Production\ + \ of Art Works and New Media” from the National Council of the Arts and the Multimedia\ + \ Center in Mexico. This grant was used to produce the piece Vistas.\n\nKarina\ + \ Sánchez: Dancer. Studies contemporary dance and coreography in Chile, Spain\ + \ and México.},\n address = {Genova, Italy},\n author = {Ernesto Romero and Esthel\ + \ Vogrig},\n booktitle = {Music Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n editor = {Roberto Doati},\n month\ + \ = {June},\n publisher = {Casa Paganini},\n title = {Vistas},\n year = {2008}\n\ }\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, - Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' - month: June - publisher: University of Technology Sydney - title: Antia - year: 2010 - - -- ENTRYTYPE: inproceedings - ID: nime2010-music-Ganburged2010 - abstract: 'Bukhchuluun Ganburged , Fiddle and Throat Singing , Martin Slawig , Roger - Mills' - address: 'Sydney, Australia' - author: Bukhchuluun Ganburged and Martin Slawig and Roger Mills - bibtex: "@inproceedings{nime2010-music-Ganburged2010,\n abstract = {Bukhchuluun\ - \ Ganburged , Fiddle and Throat Singing , Martin Slawig , Roger Mills},\n address\ - \ = {Sydney, Australia},\n author = {Bukhchuluun Ganburged and Martin Slawig and\ - \ Roger Mills},\n booktitle = {Music Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n editor = {Andrew Johnston, Sam\ - \ Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer,\ - \ Kirsty Beilharz, Roger Mills},\n month = {June},\n publisher = {University of\ - \ Technology Sydney},\n title = {Ethernet Orchestra - Remote Networked Performance},\n\ - \ year = {2010}\n}\n" - booktitle: Music Proceedings of the International Conference on New Interfaces for - Musical Expression - editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, - Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + editor: Roberto Doati month: June - publisher: University of Technology Sydney - title: Ethernet Orchestra - Remote Networked Performance - year: 2010 + publisher: Casa Paganini + title: Vistas + year: 2008 - ENTRYTYPE: inproceedings - ID: nime2010-music-Pritchard2010 - address: 'Sydney, Australia' - author: Bob Pritchard and Marguerite Witvoet - bibtex: "@inproceedings{nime2010-music-Pritchard2010,\n address = {Sydney, Australia},\n\ - \ author = {Bob Pritchard and Marguerite Witvoet},\n booktitle = {Music Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine,\ - \ Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills},\n month = {June},\n\ - \ publisher = {University of Technology Sydney},\n title = {What Does A Body Know?},\n\ - \ year = {2010}\n}\n" + ID: nime2008-music-Messier2008 + abstract: "Program notes:\nThe Pencil Project is a performance piece created by\ + \ sound artists Martin Messier and Jacques Poulin-Denis. Their intention was to\ + \ craft a live electronic music piece inspired by the physicality of writing and\ + \ the imagery it articulates. The performers translate scribbling, scratching,\ + \ dotting and drawing with pencil music. The computers are hidden and untouched\ + \ throughout the piece, allowing object manipulation and the creation of sound\ + \ to be the performers' main focus.\nThe Pencil Project is about musicianship.\ + \ Liberated from the computer screen and equipped with hands-on objects, the performers\ + \ explore a new form of expressivity. Through an authentic and stimulating performance,\ + \ the musicians bring computer music intimately close to playing an actual musical\ + \ instrument.\n\nAbout the performers:\nMartin Messier: Holding a diploma in drums\ + \ for jazz interpretation, Martin Messier has completed a bachelor's degree in\ + \ electroacoustic composition at the University of Montreal, and De Montfort University\ + \ in England. Recently, Martin has founded a solo project called « et si l'aurore\ + \ disait oui... », through which he develops live electroacoustic performance\ + \ borrowing stylistic elements from Intelligent Dance Music, acousmatic and folk.\ + \ Based on strong aptitudes for rhythm, Martin's esthetic can be defined as a\ + \ complex, left field and happily strange sound amalgam, constantly playing with\ + \ construction and deconstruction.\n\nJacques Poulin-Denis is active in projects\ + \ that intersect theater, dance and music. He has completed his undergraduate\ + \ studies in electroacoustic composition from the University of Montreal, and\ + \ De Montfort University in England. Most of his music was composed for theater\ + \ and dance. Jacques explores innovative ways of presenting electro-acoustic music.\ + \ Jacques' musical style is evocative and filled with imagery. Combining traditional\ + \ and electronic instruments with anecdotic sound sources of everyday life, he\ + \ creates vibrant music that is fierce and poetic." + address: 'Genova, Italy' + author: Martin Messier and Jacques Poulin-Denis + bibtex: "@inproceedings{nime2008-music-Messier2008,\n abstract = {Program notes:\n\ + The Pencil Project is a performance piece created by sound artists Martin Messier\ + \ and Jacques Poulin-Denis. Their intention was to craft a live electronic music\ + \ piece inspired by the physicality of writing and the imagery it articulates.\ + \ The performers translate scribbling, scratching, dotting and drawing with pencil\ + \ music. The computers are hidden and untouched throughout the piece, allowing\ + \ object manipulation and the creation of sound to be the performers' main focus.\n\ + The Pencil Project is about musicianship. Liberated from the computer screen and\ + \ equipped with hands-on objects, the performers explore a new form of expressivity.\ + \ Through an authentic and stimulating performance, the musicians bring computer\ + \ music intimately close to playing an actual musical instrument.\n\nAbout the\ + \ performers:\nMartin Messier: Holding a diploma in drums for jazz interpretation,\ + \ Martin Messier has completed a bachelor's degree in electroacoustic composition\ + \ at the University of Montreal, and De Montfort University in England. Recently,\ + \ Martin has founded a solo project called « et si l'aurore disait oui... », through\ + \ which he develops live electroacoustic performance borrowing stylistic elements\ + \ from Intelligent Dance Music, acousmatic and folk. Based on strong aptitudes\ + \ for rhythm, Martin's esthetic can be defined as a complex, left field and happily\ + \ strange sound amalgam, constantly playing with construction and deconstruction.\n\ + \nJacques Poulin-Denis is active in projects that intersect theater, dance and\ + \ music. He has completed his undergraduate studies in electroacoustic composition\ + \ from the University of Montreal, and De Montfort University in England. Most\ + \ of his music was composed for theater and dance. Jacques explores innovative\ + \ ways of presenting electro-acoustic music. Jacques' musical style is evocative\ + \ and filled with imagery. Combining traditional and electronic instruments with\ + \ anecdotic sound sources of everyday life, he creates vibrant music that is fierce\ + \ and poetic.},\n address = {Genova, Italy},\n author = {Martin Messier and Jacques\ + \ Poulin-Denis},\n booktitle = {Music Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n editor = {Roberto Doati},\n month\ + \ = {June},\n publisher = {Casa Paganini},\n title = {The Pencil Project},\n year\ + \ = {2008}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, - Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + editor: Roberto Doati month: June - publisher: University of Technology Sydney - title: 'What Does A Body Know?' - year: 2010 + publisher: Casa Paganini + title: The Pencil Project + year: 2008 - ENTRYTYPE: inproceedings - ID: nime2010-music-Barrass2010 - abstract: "Program notes: The Cellist's brain signals and pulse figure against the\ - \ Baroque Basso Continuo that they are playing. The Cellist wears the state of\ - \ the art Enobio system which transmits EEG, ECG and EOG from brain activity,\ - \ eye movements, muscle contractions, and pulse to a laptop computer. These signals\ - \ are mapped into sound in realtime with specially designed sonication algorithms.\n\ - \nAbout the performers:\nStephen Barrass teaches and researches Digital Design\ - \ and Media Arts at the University of Canberra.\nDiane Whitmer is a Neuroscientist\ - \ at Starlab Pty. Ltd. in Barcelona.\nGeoffrey Gartner is a Cellist in the Ensemble\ - \ Offspring in Sydney" - address: 'Sydney, Australia' - author: Stephen Barrass and Diane Whitmer - bibtex: "@inproceedings{nime2010-music-Barrass2010,\n abstract = {Program notes:\ - \ The Cellist's brain signals and pulse figure against the Baroque Basso Continuo\ - \ that they are playing. The Cellist wears the state of the art Enobio system\ - \ which transmits EEG, ECG and EOG from brain activity, eye movements, muscle\ - \ contractions, and pulse to a laptop computer. These signals are mapped into\ - \ sound in realtime with specially designed sonication algorithms.\n\nAbout the\ - \ performers:\nStephen Barrass teaches and researches Digital Design and Media\ - \ Arts at the University of Canberra.\nDiane Whitmer is a Neuroscientist at Starlab\ - \ Pty. Ltd. in Barcelona.\nGeoffrey Gartner is a Cellist in the Ensemble Offspring\ - \ in Sydney},\n address = {Sydney, Australia},\n author = {Stephen Barrass and\ - \ Diane Whitmer},\n booktitle = {Music Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n editor = {Andrew Johnston, Sam\ - \ Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer,\ - \ Kirsty Beilharz, Roger Mills},\n month = {June},\n publisher = {University of\ - \ Technology Sydney},\n title = {Baroque Basso Continuo for Cello, Heart (ECG)\ - \ and Mind (EEG)},\n year = {2010}\n}\n" + ID: nime2008-music-Favilla2008 + abstract: "Program notes:\nBent Leather Band introduces their new extended instrument\ + \ project, Heretics Brew. The aim of this project is to develop an extended line\ + \ up with the aim of building a larger ensemble. So far the project [quintet]\ + \ has developed a number of new extended saxophone controllers and is currently\ + \ working on trumpets and guitars. Their instruments are based on Gluion OSC,\ + \ interfaces; programmable frame gate array devices that have multiple configurable\ + \ inputs and outputs. For NIME08, the ensemble trio will demonstrate their instruments,\ + \ language and techniques through ensemble improvisation.\n\nAbout the performers:\n\ + Joanne Cannon, composer/improviser, is one of Australia's leading bassoonists.\ + \ Although she began her career as a professional orchestral musician, she now\ + \ works as a composer and improviser, exploring extended techniques. Stuart Favilla\ + \ has a background in composition and improvisation. Together they form the Bent\ + \ Leather Band, a duo that has been developing experimental electronic instruments\ + \ for over twenty years in Australia. Bent Leather Band blurs virtuosity and group\ + \ improvisation across a visual spectacle of stunning original instruments. These\ + \ were made in conjunction with Tasmanian leather artist, Garry Greenwood. The\ + \ instruments include fanciful dragon headed Light-Harps, leather Serpents and\ + \ Monsters that embody sensor interfaces, synthesis and signal processing technology.\ + \ Practicable and intuitive instruments, they have been built with multi-parameter\ + \ control in mind. Joint winners of the Karl Szucka Preis, their work of Bent\ + \ Leather has gained selection at Bourges and won the IAWM New Genre Prize.\n\ + Inspired by the legacy of Percy Grainger's Free music, i.e. “music beyond the\ + \ constraints of conventional pitch and rhythm” [Grainger, 1951], Bent Leather\ + \ Band has strived to develop a new musical language that exploits the potentials\ + \ of synthesis/signal processing, defining new expressive boundaries and dimensions\ + \ and yet also connecting with a heritage of Grainger's musical discourse. Grainger\ + \ conceived his music towards the end of the 19th Century, and spent in excess\ + \ of fifty years bringing his ideas to fruition through composition for theremin\ + \ ensemble, the development of 6th tone instruments [pianos and klaviers], the\ + \ development of polyphonic reed instruments for portamento control and a series\ + \ of paper roll, score driven electronic oscillator instruments.\n\nTony Hicks\ + \ enjoys a high profile reputation as Australia's most versatile woodwind artist.\ + \ Equally adept on saxophones, flutes and clarinets, his abilities span a broad\ + \ spectrum of music genres. A student of Dr. Peter Clinch, Tony also studied at\ + \ the Eastman School of Music. He has performed throughout Australia, and across\ + \ Europe, the United States, Japan and China with a number of leading Australian\ + \ ensembles including the Australian Art Orchestra, Elision, and the Peter Clinch\ + \ Saxophone Quartet. He has performed saxophone concertos with the Melbourne Symphony\ + \ Orchestra, and solo'd for Stevie Wonder and his band. As a jazz artist he has\ + \ performed and recorded with leading jazz figures Randy Brecker, Billy Cobham,\ + \ notable Australian artists, Paul Grabowsky, Joe Chindamo, David Jones, and also\ + \ lead a number of important groups in the local Australian scene. An explorer\ + \ of improvised music, he consistently collaborates with numerous artists both\ + \ in Australia and overseas." + address: 'Genova, Italy' + author: Stuart Favilla and Joanne Cannon and Tony Hicks + bibtex: "@inproceedings{nime2008-music-Favilla2008,\n abstract = {Program notes:\n\ + Bent Leather Band introduces their new extended instrument project, Heretics Brew.\ + \ The aim of this project is to develop an extended line up with the aim of building\ + \ a larger ensemble. So far the project [quintet] has developed a number of new\ + \ extended saxophone controllers and is currently working on trumpets and guitars.\ + \ Their instruments are based on Gluion OSC, interfaces; programmable frame gate\ + \ array devices that have multiple configurable inputs and outputs. For NIME08,\ + \ the ensemble trio will demonstrate their instruments, language and techniques\ + \ through ensemble improvisation.\n\nAbout the performers:\nJoanne Cannon, composer/improviser,\ + \ is one of Australia's leading bassoonists. Although she began her career as\ + \ a professional orchestral musician, she now works as a composer and improviser,\ + \ exploring extended techniques. Stuart Favilla has a background in composition\ + \ and improvisation. Together they form the Bent Leather Band, a duo that has\ + \ been developing experimental electronic instruments for over twenty years in\ + \ Australia. Bent Leather Band blurs virtuosity and group improvisation across\ + \ a visual spectacle of stunning original instruments. These were made in conjunction\ + \ with Tasmanian leather artist, Garry Greenwood. The instruments include fanciful\ + \ dragon headed Light-Harps, leather Serpents and Monsters that embody sensor\ + \ interfaces, synthesis and signal processing technology. Practicable and intuitive\ + \ instruments, they have been built with multi-parameter control in mind. Joint\ + \ winners of the Karl Szucka Preis, their work of Bent Leather has gained selection\ + \ at Bourges and won the IAWM New Genre Prize.\nInspired by the legacy of Percy\ + \ Grainger's Free music, i.e. “music beyond the constraints of conventional pitch\ + \ and rhythm” [Grainger, 1951], Bent Leather Band has strived to develop a new\ + \ musical language that exploits the potentials of synthesis/signal processing,\ + \ defining new expressive boundaries and dimensions and yet also connecting with\ + \ a heritage of Grainger's musical discourse. Grainger conceived his music towards\ + \ the end of the 19th Century, and spent in excess of fifty years bringing his\ + \ ideas to fruition through composition for theremin ensemble, the development\ + \ of 6th tone instruments [pianos and klaviers], the development of polyphonic\ + \ reed instruments for portamento control and a series of paper roll, score driven\ + \ electronic oscillator instruments.\n\nTony Hicks enjoys a high profile reputation\ + \ as Australia's most versatile woodwind artist. Equally adept on saxophones,\ + \ flutes and clarinets, his abilities span a broad spectrum of music genres. A\ + \ student of Dr. Peter Clinch, Tony also studied at the Eastman School of Music.\ + \ He has performed throughout Australia, and across Europe, the United States,\ + \ Japan and China with a number of leading Australian ensembles including the\ + \ Australian Art Orchestra, Elision, and the Peter Clinch Saxophone Quartet. He\ + \ has performed saxophone concertos with the Melbourne Symphony Orchestra, and\ + \ solo'd for Stevie Wonder and his band. As a jazz artist he has performed and\ + \ recorded with leading jazz figures Randy Brecker, Billy Cobham, notable Australian\ + \ artists, Paul Grabowsky, Joe Chindamo, David Jones, and also lead a number of\ + \ important groups in the local Australian scene. An explorer of improvised music,\ + \ he consistently collaborates with numerous artists both in Australia and overseas.},\n\ + \ address = {Genova, Italy},\n author = {Stuart Favilla and Joanne Cannon and\ + \ Tony Hicks},\n booktitle = {Music Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n editor = {Roberto Doati},\n month\ + \ = {June},\n publisher = {Casa Paganini},\n title = {Heretic's Brew},\n year\ + \ = {2008}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, - Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + editor: Roberto Doati month: June - publisher: University of Technology Sydney - title: 'Baroque Basso Continuo for Cello, Heart (ECG) and Mind (EEG)' - year: 2010 + publisher: Casa Paganini + title: Heretic's Brew + year: 2008 - ENTRYTYPE: inproceedings - ID: nime2010-music-Drummond2010 - abstract: "Program notes: The interactive electroacoustics in Jet Stream are created\ - \ through the use of an underlying virtual model of a flute. This hybrid virtual\ - \ instrument is controlled through parameters such as bore length, blow intensity,\ - \ pressure, canal width, labium position. Lamorna's “real” flute sounds are analyzed\ - \ with respect to tone color, volume envelopes, frequency and spectral content.\ - \ These sonic gestures are then mapped to performance parameters for the computer's\ - \ virtual flute sonification. Of course the virtual flute doesn't have to conform\ - \ to the physical constraints of the “real-world”.\n\nAbout the performer: Jon\ - \ Drummond is a Sydney based composer, sound artist, programmer, academic and\ - \ researcher. His creative work spans the fields of instrumental music, electroacoustic,\ - \ interactive, sound and new media arts. Jon's electroacoustic and interactive\ - \ work has been presented widely including the International Computer Music Conferences\ - \ (Denmark 1994, Canada 1995, Greece 1997, China 1999, Singapore 2003), Electrofringe,\ - \ Totally Huge New Music Festival, Darwin International Guitar Festival and the\ - \ Adelaide Festival of Arts. Jon is currently employed as a researcher at MARCS\ - \ Auditory Laboratories, the University of Western Sydney." - address: 'Sydney, Australia' - author: Jon Drummond - bibtex: "@inproceedings{nime2010-music-Drummond2010,\n abstract = {Program notes:\ - \ The interactive electroacoustics in Jet Stream are created through the use of\ - \ an underlying virtual model of a flute. This hybrid virtual instrument is controlled\ - \ through parameters such as bore length, blow intensity, pressure, canal width,\ - \ labium position. Lamorna's “real” flute sounds are analyzed with respect to\ - \ tone color, volume envelopes, frequency and spectral content. These sonic gestures\ - \ are then mapped to performance parameters for the computer's virtual flute sonification.\ - \ Of course the virtual flute doesn't have to conform to the physical constraints\ - \ of the “real-world”.\n\nAbout the performer: Jon Drummond is a Sydney based\ - \ composer, sound artist, programmer, academic and researcher. His creative work\ - \ spans the fields of instrumental music, electroacoustic, interactive, sound\ - \ and new media arts. Jon's electroacoustic and interactive work has been presented\ - \ widely including the International Computer Music Conferences (Denmark 1994,\ - \ Canada 1995, Greece 1997, China 1999, Singapore 2003), Electrofringe, Totally\ - \ Huge New Music Festival, Darwin International Guitar Festival and the Adelaide\ - \ Festival of Arts. Jon is currently employed as a researcher at MARCS Auditory\ - \ Laboratories, the University of Western Sydney.},\n address = {Sydney, Australia},\n\ - \ author = {Jon Drummond},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Johnston,\ - \ Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer,\ - \ Kirsty Beilharz, Roger Mills},\n month = {June},\n publisher = {University of\ - \ Technology Sydney},\n title = {Jet Stream},\n year = {2010}\n}\n" - booktitle: Music Proceedings of the International Conference on New Interfaces for - Musical Expression - editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, - Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + ID: nime2008-music-Bokowiec2008 + abstract: "Program notes:\nThe Suicided Voice is the second piece in the Vox Circuit\ + \ Trilogy, a series of interactive vocal works completed in 2007. In this piece\ + \ the acoustic voice of the performer is “suicided” and given up to digital processing\ + \ and physical re-embodiment. Dialogues are created between acoustic and digital\ + \ voices. Gender specific registers are willfully subverted and fractured. Extended\ + \ vocal techniques make available unusual acoustic resonances that generate rich\ + \ processing textures and spiral into new acoustic and physical trajectories that\ + \ traverse culturally specific boundaries crossing from the human into the virtual,\ + \ from the real into the mythical. The piece is fully scored, there are no pre-recorded\ + \ soundfiles used and no sound manipulation external to the performer's control.\n\ + In The Suicided Voice the sensor interface of the Bodycoder System is located\ + \ on the upper part of the torso. Movement data is mapped to live processing and\ + \ manipulation of sound and images. The Bodycoder also provides the performer\ + \ with real-time access to processing parameters and patches within the MSP environment.\ + \ All vocalisations, decisive navigation of the MSP environment and Kinaesonic\ + \ expressivity are selected, initiated and manipulated by the performer. The primary\ + \ expressive functionality of the Bodycoder System is Kinaesonic. The term Kinaesonic\ + \ is derived from the compound of two words: Kinaesthetic meaning the movement\ + \ principles of the body and Sonic meaning sound. In terms of interactive technology\ + \ the term Kinaesonic refers to the one-to-one, mapping of sonic effects to bodily\ + \ movements. In our practice this is usually executed in real-time. The Suicided\ + \ Voice was created in residency at the Banff Centre, Canada and completed in\ + \ the electro-acoustic music facilities of the University of Huddersfield.\n\n\ + About the performers:\n\nMark Bokowiec (Composer, Electronics & Software Designer):\ + \ Mark is the manager of the electro-acoustic music studios and the new Spacialization\ + \ and Interactive Research Lab at the University of Huddersfield. Mark lectures\ + \ in interactive performance, interface design and composition. Composition credits\ + \ include: Tricorder a work for two quarter tone recorders and live MSP, commissioned\ + \ by Ensemble QTR. Commissions for interactive instruments include: the LiteHarp\ + \ for London Science Museum and A Passage To India an interactive sound sculpture\ + \ commissioned by Wakefield City Art Gallery. CD releases include: Route (2001)\ + \ the complete soundtrack on MPS and Ghosts (2000) on Sonic Art from Aberdeen,\ + \ Glasgow, Huddersfield and Newcastle also on the MPS label. Mark is currently\ + \ working on an interactive hydro-acoustic installation.\n\nJulie Wilson-Bokowiec\ + \ (vocalist/performer, video and computer graphics): Julie has creating new works\ + \ in opera/music theatre, contemporary dance and theatre including: Salome (Hammersmith\ + \ Odeon – Harvey Goldsmith/Enid production) Suspended Sentences (ICA & touring)\ + \ Figure Three (ICA) for Julia Bardsley, Dorian Grey (LBT/Opera North), Alice\ + \ (LBT) and a variety of large-scale site-specific and Body Art works. As a performer\ + \ and collaborator Julie has worked with such luminaries as Lindsey Kemp, Genesis\ + \ P-Orridge and Psychic TV and the notorious Austrian artist Hermann Nitsch. Julie\ + \ and Mark began creating work with interactive technologies in 1995 developing\ + \ the first generation of the Bodycoder System in 1996." + address: 'Genova, Italy' + author: Mark A. Bokowiec and Julie Wilson-Bokowiec + bibtex: "@inproceedings{nime2008-music-Bokowiec2008,\n abstract = {Program notes:\n\ + The Suicided Voice is the second piece in the Vox Circuit Trilogy, a series of\ + \ interactive vocal works completed in 2007. In this piece the acoustic voice\ + \ of the performer is “suicided” and given up to digital processing and physical\ + \ re-embodiment. Dialogues are created between acoustic and digital voices. Gender\ + \ specific registers are willfully subverted and fractured. Extended vocal techniques\ + \ make available unusual acoustic resonances that generate rich processing textures\ + \ and spiral into new acoustic and physical trajectories that traverse culturally\ + \ specific boundaries crossing from the human into the virtual, from the real\ + \ into the mythical. The piece is fully scored, there are no pre-recorded soundfiles\ + \ used and no sound manipulation external to the performer's control.\nIn The\ + \ Suicided Voice the sensor interface of the Bodycoder System is located on the\ + \ upper part of the torso. Movement data is mapped to live processing and manipulation\ + \ of sound and images. The Bodycoder also provides the performer with real-time\ + \ access to processing parameters and patches within the MSP environment. All\ + \ vocalisations, decisive navigation of the MSP environment and Kinaesonic expressivity\ + \ are selected, initiated and manipulated by the performer. The primary expressive\ + \ functionality of the Bodycoder System is Kinaesonic. The term Kinaesonic is\ + \ derived from the compound of two words: Kinaesthetic meaning the movement principles\ + \ of the body and Sonic meaning sound. In terms of interactive technology the\ + \ term Kinaesonic refers to the one-to-one, mapping of sonic effects to bodily\ + \ movements. In our practice this is usually executed in real-time. The Suicided\ + \ Voice was created in residency at the Banff Centre, Canada and completed in\ + \ the electro-acoustic music facilities of the University of Huddersfield.\n\n\ + About the performers:\n\nMark Bokowiec (Composer, Electronics & Software Designer):\ + \ Mark is the manager of the electro-acoustic music studios and the new Spacialization\ + \ and Interactive Research Lab at the University of Huddersfield. Mark lectures\ + \ in interactive performance, interface design and composition. Composition credits\ + \ include: Tricorder a work for two quarter tone recorders and live MSP, commissioned\ + \ by Ensemble QTR. Commissions for interactive instruments include: the LiteHarp\ + \ for London Science Museum and A Passage To India an interactive sound sculpture\ + \ commissioned by Wakefield City Art Gallery. CD releases include: Route (2001)\ + \ the complete soundtrack on MPS and Ghosts (2000) on Sonic Art from Aberdeen,\ + \ Glasgow, Huddersfield and Newcastle also on the MPS label. Mark is currently\ + \ working on an interactive hydro-acoustic installation.\n\nJulie Wilson-Bokowiec\ + \ (vocalist/performer, video and computer graphics): Julie has creating new works\ + \ in opera/music theatre, contemporary dance and theatre including: Salome (Hammersmith\ + \ Odeon – Harvey Goldsmith/Enid production) Suspended Sentences (ICA & touring)\ + \ Figure Three (ICA) for Julia Bardsley, Dorian Grey (LBT/Opera North), Alice\ + \ (LBT) and a variety of large-scale site-specific and Body Art works. As a performer\ + \ and collaborator Julie has worked with such luminaries as Lindsey Kemp, Genesis\ + \ P-Orridge and Psychic TV and the notorious Austrian artist Hermann Nitsch. Julie\ + \ and Mark began creating work with interactive technologies in 1995 developing\ + \ the first generation of the Bodycoder System in 1996.},\n address = {Genova,\ + \ Italy},\n author = {Mark A. Bokowiec and Julie Wilson-Bokowiec},\n booktitle\ + \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n editor = {Roberto Doati},\n month = {June},\n publisher = {Casa\ + \ Paganini},\n title = {The Suicided Voice},\n year = {2008}\n}\n" + booktitle: Music Proceedings of the International Conference on New Interfaces for + Musical Expression + editor: Roberto Doati month: June - publisher: University of Technology Sydney - title: Jet Stream - year: 2010 + publisher: Casa Paganini + title: The Suicided Voice + year: 2008 - ENTRYTYPE: inproceedings - ID: nime2010-music-Johnston2010 - abstract: "Program notes: This audiovisual work for acoustic instruments and interactive\ - \ software uses simple models of physical structures to mediate between acoustic\ - \ sounds and computer generated sound and visuals. Musicians use their acoustic\ - \ instruments to playfully interact with a physically modelled virtual sound sculpture\ - \ which is projected onto the screen.\nThe musicians use sounds produced on their\ - \ acoustic instruments to reach into the virtual world and grasp, push and hit\ - \ the sculpture. In response the structure glows, spins, bounces around and generates\ - \ its own sounds. The pitch and timbre of the live acoustic sounds are captured\ - \ and transformed by the virtual sculpture which sings back in its own way. Each\ - \ individual object (or mass) in the physical model is linked to a synthesis engine\ - \ which uses additive and subtractive synthesis techniques to produce a wide range\ - \ of sonic textures.\nThe frequency of oscillators of the synthesis engines are\ - \ set by the acoustic sounds played by the acoustic musicians and the volume of\ - \ sound produced is controlled by the movement of the masses. The effect is that\ - \ the sound sculpture produces evocative sounds clearly linked to the sonic gestures\ - \ of the performers and the movement of the onscreen sculpture.\nDuring performance\ - \ the physical structure and characteristics of the sculpture are altered. Links\ - \ between masses are cut, spring tension of the links\naltered and damping is\ - \ ramped up and down. Thus, while transparency of operation is maintained, the\ - \ complexity of the interaction between the acoustic and electronic performers\ - \ and the sound sculpture itself leads to rich conversational musical interactions.\n\ - \nAbout the performers: Andrew Johnston is a musician and software developer\ - \ living in Sydney, Australia. He completed a music performance degree at the\ - \ Victorian College of the Arts in 1995 and has performed with several Australian\ - \ symphony orchestras and a number of other ensembles.\nSubsequently he has completed\ - \ a Masters degree in Information Technology and in 2009 he completed a PhD investigating\ - \ the design and use of software to support an experimental, exploratory approach\ - \ to live music making. Andrew currently holds the position of Lecturer in the\ - \ Faculty of Engineering and IT at the University of Technology, Sydney.\n\nPhil\ - \ Slater - Trumpet\nJason Noble - Clarinet" - address: 'Sydney, Australia' - author: Andrew Johnston - bibtex: "@inproceedings{nime2010-music-Johnston2010,\n abstract = {Program notes:\ - \ This audiovisual work for acoustic instruments and interactive software uses\ - \ simple models of physical structures to mediate between acoustic sounds and\ - \ computer generated sound and visuals. Musicians use their acoustic instruments\ - \ to playfully interact with a physically modelled virtual sound sculpture which\ - \ is projected onto the screen.\nThe musicians use sounds produced on their acoustic\ - \ instruments to reach into the virtual world and grasp, push and hit the sculpture.\ - \ In response the structure glows, spins, bounces around and generates its own\ - \ sounds. The pitch and timbre of the live acoustic sounds are captured and transformed\ - \ by the virtual sculpture which sings back in its own way. Each individual object\ - \ (or mass) in the physical model is linked to a synthesis engine which uses additive\ - \ and subtractive synthesis techniques to produce a wide range of sonic textures.\n\ - The frequency of oscillators of the synthesis engines are set by the acoustic\ - \ sounds played by the acoustic musicians and the volume of sound produced is\ - \ controlled by the movement of the masses. The effect is that the sound sculpture\ - \ produces evocative sounds clearly linked to the sonic gestures of the performers\ - \ and the movement of the onscreen sculpture.\nDuring performance the physical\ - \ structure and characteristics of the sculpture are altered. Links between masses\ - \ are cut, spring tension of the links\naltered and damping is ramped up and down.\ - \ Thus, while transparency of operation is maintained, the complexity of the interaction\ - \ between the acoustic and electronic performers and the sound sculpture itself\ - \ leads to rich conversational musical interactions.\n\nAbout the performers:\ - \ Andrew Johnston is a musician and software developer living in Sydney, Australia.\ - \ He completed a music performance degree at the Victorian College of the Arts\ - \ in 1995 and has performed with several Australian symphony orchestras and a\ - \ number of other ensembles.\nSubsequently he has completed a Masters degree in\ - \ Information Technology and in 2009 he completed a PhD investigating the design\ - \ and use of software to support an experimental, exploratory approach to live\ - \ music making. Andrew currently holds the position of Lecturer in the Faculty\ - \ of Engineering and IT at the University of Technology, Sydney.\n\nPhil Slater\ - \ - Trumpet\nJason Noble - Clarinet},\n address = {Sydney, Australia},\n author\ - \ = {Andrew Johnston},\n booktitle = {Music Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n editor = {Andrew Johnston, Sam\ - \ Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer,\ - \ Kirsty Beilharz, Roger Mills},\n month = {June},\n publisher = {University of\ - \ Technology Sydney},\n title = {Touching Dialogue},\n year = {2010}\n}\n" + ID: nime2008-music-Bokowiec2008 + abstract: "Program notes:\nEtch is the third work in the Vox Circuit Trilogy (2007).\ + \ In Etch extended vocal techniques, Yakut and Bell Canto singing, are coupled\ + \ with live interactive sound processing and manipulation. Etch calls forth fauna,\ + \ building soundscapes of glitch infestations, howler tones, clustering sonic-amphibians,\ + \ and swirling flocks of synthetic granular flyers. All sounds are derived from\ + \ the live acoustic voice of the performer. There are no pre-recorded soundfiles\ + \ used in this piece and no sound manipulation external to the performer's control.\ + \ The ability to initiate, embody and manipulate both the acoustic sound and multiple\ + \ layers of processed sound manipulated simultaneously on the limbs – requires\ + \ a unique kind of perceptual, physical and aural precision. This is particularly\ + \ evident at moments when the source vocal articulates of the performer, unheard\ + \ in the diffused soundscape, enter as seemingly phantom sound cells pitch-changed,\ + \ fractured and heavily processed. In such instances the sung score, and the diffused\ + \ and physically manipulated soundscape seem to separate and the performer is\ + \ seen working in counterpoint, articulating an unheard score. Etch is punctuated\ + \ by such separations and correlations, by choric expansions, intricate micro\ + \ constructions and moments when the acoustic voice of the performer soars over\ + \ and through the soundscape.\nAlthough the Bodycoder interface configuration\ + \ for Etch is similar to that of The Suicided Voice, located on the upper torso\ + \ - the functional protocols and qualities of physical expressivity are completely\ + \ different. Interface flexibility is a key feature of the Bodycoder System and\ + \ allows for the development of interactive works unrestrained by interface limitations\ + \ or fixed protocols. The flexibility of the interface does however present a\ + \ number of challenges for the performer who must be able to adapt to new protocols,\ + \ adjust and temper her physical expressivity to the requirements of each piece.\n\ + The visual content of both Etch and The Suicided Voice was created in a variety\ + \ of 2D and 3D packages using original photographic and video material. Images\ + \ are processed and manipulated using the same interactive protocols that govern\ + \ sound manipulation. Content and processing is mapped to the physical gestures\ + \ of the performer. As the performer conjures extraordinary voices out of the\ + \ digital realm, so she weaves a multi-layered visual environment combining sound,\ + \ gesture and image to form a powerful 'linguistic intent'.\nEtch was created\ + \ in residency at the Confederation Centre for the Arts on Prince Edward Island,\ + \ Nova Scotia in June 2007.\n\nAbout the performers:\nMark Bokowiec (Composer,\ + \ Electronics & Software Designer). Mark is the manager of the electro-acoustic\ + \ music studios and the new Spacialization and Interactive Research Lab at the\ + \ University of Huddersfield. Mark lectures in interactive performance, interface\ + \ design and composition. Composition credits include: Tricorder a work for two\ + \ quarter tone recorders and live MSP, commissioned by Ensemble QTR. Commissions\ + \ for interactive instruments include: the LiteHarp for London Science Museum\ + \ and A Passage To India an interactive sound sculpture commissioned by Wakefield\ + \ City Art Gallery. CD releases include: Route (2001) the complete soundtrack\ + \ on MPS and Ghosts (2000) on Sonic Art from Aberdeen, Glasgow, Huddersfield and\ + \ Newcastle also on the MPS label. Mark is currently working on an interactive\ + \ hydro-acoustic installation.\n\nJulie Wilson-Bokowiec (vocalist/performer, video\ + \ and computer graphics). Julie has creating new works in opera/music theatre,\ + \ contemporary dance and theatre including: Salome (Hammersmith Odeon – Harvey\ + \ Goldsmith/Enid production) Suspended Sentences (ICA & touring) Figure Three\ + \ (ICA) for Julia Bardsley, The Red Room (Canal Café Theatre) nominated for the\ + \ Whitbread London Fringe Theatre Award, Dorian Grey (LBT/Opera North), Alice\ + \ (LBT) and a variety of large- scale site-specific and Body Art works. As a performer\ + \ and collaborator Julie has worked with such luminaries as Lindsey Kemp, Genesis\ + \ P-Orridge and Psychic TV and the notorious Austrian artist Hermann Nitsch. She\ + \ guest lectures in digital performance at a number of University centres, and\ + \ together with Mark, regularly publishes articles on interactive performance\ + \ practice.\n\nJulie and Mark began creating work with interactive technologies\ + \ in 1995 developing the first generation of the Bodycoder System an on- the-body\ + \ sensor interface that uses radio to transmit data in 1996. They have created\ + \ and performed work with the Bodycoder System at various events and venues across\ + \ Europe the US and Canada and at artist gatherings including ISEA and ICMC. Major\ + \ works include Spiral Fiction (2002) commissioned by Digital Summer (cultural\ + \ programme of the Commonwealth Games, Manchester). Cyborg Dreaming (2000/1) commissioned\ + \ by the Science Museum, London. Zeitgeist at the KlangArt Festival and Lifting\ + \ Bodies (1999) at the Trafo, Budapest as featured artists at the Hungarian Computer\ + \ Music Foundation Festival NEW WAVES supported by the British Council." + address: 'Genova, Italy' + author: Mark A. Bokowiec and Julie Wilson-Bokowiec + bibtex: "@inproceedings{nime2008-music-Bokowiec2008,\n abstract = {Program notes:\n\ + Etch is the third work in the Vox Circuit Trilogy (2007). In Etch extended vocal\ + \ techniques, Yakut and Bell Canto singing, are coupled with live interactive\ + \ sound processing and manipulation. Etch calls forth fauna, building soundscapes\ + \ of glitch infestations, howler tones, clustering sonic-amphibians, and swirling\ + \ flocks of synthetic granular flyers. All sounds are derived from the live acoustic\ + \ voice of the performer. There are no pre-recorded soundfiles used in this piece\ + \ and no sound manipulation external to the performer's control. The ability to\ + \ initiate, embody and manipulate both the acoustic sound and multiple layers\ + \ of processed sound manipulated simultaneously on the limbs – requires a unique\ + \ kind of perceptual, physical and aural precision. This is particularly evident\ + \ at moments when the source vocal articulates of the performer, unheard in the\ + \ diffused soundscape, enter as seemingly phantom sound cells pitch-changed, fractured\ + \ and heavily processed. In such instances the sung score, and the diffused and\ + \ physically manipulated soundscape seem to separate and the performer is seen\ + \ working in counterpoint, articulating an unheard score. Etch is punctuated by\ + \ such separations and correlations, by choric expansions, intricate micro constructions\ + \ and moments when the acoustic voice of the performer soars over and through\ + \ the soundscape.\nAlthough the Bodycoder interface configuration for Etch is\ + \ similar to that of The Suicided Voice, located on the upper torso - the functional\ + \ protocols and qualities of physical expressivity are completely different. Interface\ + \ flexibility is a key feature of the Bodycoder System and allows for the development\ + \ of interactive works unrestrained by interface limitations or fixed protocols.\ + \ The flexibility of the interface does however present a number of challenges\ + \ for the performer who must be able to adapt to new protocols, adjust and temper\ + \ her physical expressivity to the requirements of each piece.\nThe visual content\ + \ of both Etch and The Suicided Voice was created in a variety of 2D and 3D packages\ + \ using original photographic and video material. Images are processed and manipulated\ + \ using the same interactive protocols that govern sound manipulation. Content\ + \ and processing is mapped to the physical gestures of the performer. As the performer\ + \ conjures extraordinary voices out of the digital realm, so she weaves a multi-layered\ + \ visual environment combining sound, gesture and image to form a powerful 'linguistic\ + \ intent'.\nEtch was created in residency at the Confederation Centre for the\ + \ Arts on Prince Edward Island, Nova Scotia in June 2007.\n\nAbout the performers:\n\ + Mark Bokowiec (Composer, Electronics & Software Designer). Mark is the manager\ + \ of the electro-acoustic music studios and the new Spacialization and Interactive\ + \ Research Lab at the University of Huddersfield. Mark lectures in interactive\ + \ performance, interface design and composition. Composition credits include:\ + \ Tricorder a work for two quarter tone recorders and live MSP, commissioned by\ + \ Ensemble QTR. Commissions for interactive instruments include: the LiteHarp\ + \ for London Science Museum and A Passage To India an interactive sound sculpture\ + \ commissioned by Wakefield City Art Gallery. CD releases include: Route (2001)\ + \ the complete soundtrack on MPS and Ghosts (2000) on Sonic Art from Aberdeen,\ + \ Glasgow, Huddersfield and Newcastle also on the MPS label. Mark is currently\ + \ working on an interactive hydro-acoustic installation.\n\nJulie Wilson-Bokowiec\ + \ (vocalist/performer, video and computer graphics). Julie has creating new works\ + \ in opera/music theatre, contemporary dance and theatre including: Salome (Hammersmith\ + \ Odeon – Harvey Goldsmith/Enid production) Suspended Sentences (ICA & touring)\ + \ Figure Three (ICA) for Julia Bardsley, The Red Room (Canal Café Theatre) nominated\ + \ for the Whitbread London Fringe Theatre Award, Dorian Grey (LBT/Opera North),\ + \ Alice (LBT) and a variety of large- scale site-specific and Body Art works.\ + \ As a performer and collaborator Julie has worked with such luminaries as Lindsey\ + \ Kemp, Genesis P-Orridge and Psychic TV and the notorious Austrian artist Hermann\ + \ Nitsch. She guest lectures in digital performance at a number of University\ + \ centres, and together with Mark, regularly publishes articles on interactive\ + \ performance practice.\n\nJulie and Mark began creating work with interactive\ + \ technologies in 1995 developing the first generation of the Bodycoder System\ + \ an on- the-body sensor interface that uses radio to transmit data in 1996. They\ + \ have created and performed work with the Bodycoder System at various events\ + \ and venues across Europe the US and Canada and at artist gatherings including\ + \ ISEA and ICMC. Major works include Spiral Fiction (2002) commissioned by Digital\ + \ Summer (cultural programme of the Commonwealth Games, Manchester). Cyborg Dreaming\ + \ (2000/1) commissioned by the Science Museum, London. Zeitgeist at the KlangArt\ + \ Festival and Lifting Bodies (1999) at the Trafo, Budapest as featured artists\ + \ at the Hungarian Computer Music Foundation Festival NEW WAVES supported by the\ + \ British Council.},\n address = {Genova, Italy},\n author = {Mark A. Bokowiec\ + \ and Julie Wilson-Bokowiec},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Roberto Doati},\n\ + \ month = {June},\n publisher = {Casa Paganini},\n title = {Etch},\n year = {2008}\n\ + }\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, - Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + editor: Roberto Doati month: June - publisher: University of Technology Sydney - title: Touching Dialogue - year: 2010 + publisher: Casa Paganini + title: Etch + year: 2008 - ENTRYTYPE: inproceedings - ID: nime2010-music-Collins2010 - abstract: "Program notes: Kinesics is a structure for improvisation; the computer\ - \ uses extensive machine listening technology to track the pianist and generates\ - \ feature-based effects. The computer also guides the pianist to explore actions\ - \ from a catalogue of gestures, some of which are heavy-handed. A feedback loop\ - \ is established of interpretation of sounding and physical gesture.\nThe Computer\ - \ was born in China in 2009, but eventually found its way to England to the ownership\ - \ of a grubby handed computer musician. Though ostensibly based somewhere near\ - \ Brighton, it went on to have many adventures around the world, and is grateful\ - \ to its owner at least for never putting it in hold luggage. Though suffering\ - \ an alarming logic board failure of cataclysmic proportions before even reaching\ - \ its first birthday, replacement surgery by qualified though over familiar service\ - \ personnel saved its life. Philosophical questions remain about the extent to\ - \ which its current personality is contiguous with the old, as evidenced in various\ - \ proprietary programs temporarily refusing to believe in their host brain anymore.\ - \ But it is just happy it can be here tonight to play for you.\nThere will also\ - \ be a dispensable human being on stage.\nAbout the performers:\nComputer - Electronics\n\ - Nick Collins - Piano" - address: 'Sydney, Australia' - author: Nick Collins - bibtex: "@inproceedings{nime2010-music-Collins2010,\n abstract = {Program notes:\ - \ Kinesics is a structure for improvisation; the computer uses extensive machine\ - \ listening technology to track the pianist and generates feature-based effects.\ - \ The computer also guides the pianist to explore actions from a catalogue of\ - \ gestures, some of which are heavy-handed. A feedback loop is established of\ - \ interpretation of sounding and physical gesture.\nThe Computer was born in China\ - \ in 2009, but eventually found its way to England to the ownership of a grubby\ - \ handed computer musician. Though ostensibly based somewhere near Brighton, it\ - \ went on to have many adventures around the world, and is grateful to its owner\ - \ at least for never putting it in hold luggage. Though suffering an alarming\ - \ logic board failure of cataclysmic proportions before even reaching its first\ - \ birthday, replacement surgery by qualified though over familiar service personnel\ - \ saved its life. Philosophical questions remain about the extent to which its\ - \ current personality is contiguous with the old, as evidenced in various proprietary\ - \ programs temporarily refusing to believe in their host brain anymore. But it\ - \ is just happy it can be here tonight to play for you.\nThere will also be a\ - \ dispensable human being on stage.\nAbout the performers:\nComputer - Electronics\n\ - Nick Collins - Piano},\n address = {Sydney, Australia},\n author = {Nick Collins},\n\ - \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n editor = {Andrew Johnston, Sam Ferguson, Jos Mulder,\ - \ Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger\ - \ Mills},\n month = {June},\n publisher = {University of Technology Sydney},\n\ - \ title = {Kinesics},\n year = {2010}\n}\n" + ID: nime2008-music-Ciufo2008 + abstract: "Program notes:\nSilent Movies is an attempt to explore and confront some\ + \ of the possible relationships / interdependencies between visual and sonic perception.\ + \ In collaboration with a variety of moving image artists, this performance piece\ + \ complicates visual engagement through performed / improvised sound. In a sense,\ + \ Silent Movies plays with the live soundtrack idea, but from a somewhat different\ + \ vantage point. Or maybe it is an inversion; a visual accompaniment to an improvised\ + \ sonic landscape? For this performance, I will use a hybrid extended electric\ + \ guitar / computer performance system, which allows me to explore extended playing\ + \ techniques and sonic transformations provided by sensor controlled interactive\ + \ digital signal processing. For tonight's performance, the moving image composition\ + \ is by Mark Domino (fieldform.com).\nFor more information, please refer to online\ + \ documentation: Guitar performance system : http://ciufo.org/eighth_nerve_guitar.html\n\ + Performance documentation: http://ciufo.org/silent_movies.html\n\nAbout the performer:\n\ + Thomas Ciufo is an improviser, sound / media artist, and researcher working primarily\ + \ in the areas of electroacoustic improvisational performance and hybrid instrument\ + \ / interactive systems design, and is currently serving as artist-in- residence\ + \ in Arts and Technology at Smith College. Recent and ongoing sound works include,\ + \ three meditations, for prepared piano and computer, the series, sonic improvisations\ + \ #N, and eighth nerve, an improvisational piece for prepared electric guitar\ + \ and computer. Recent performances include off-ICMC in Barcelona, Visione Sonoras\ + \ in Mexico City, the SPARK festival in Minneapolis, the International Society\ + \ for Improvised Music conference in Ann Arbor, and the Enaction in Arts conference\ + \ in Grenoble." + address: 'Genova, Italy' + author: Thomas Ciufo + bibtex: "@inproceedings{nime2008-music-Ciufo2008,\n abstract = {Program notes:\n\ + Silent Movies is an attempt to explore and confront some of the possible relationships\ + \ / interdependencies between visual and sonic perception. In collaboration with\ + \ a variety of moving image artists, this performance piece complicates visual\ + \ engagement through performed / improvised sound. In a sense, Silent Movies plays\ + \ with the live soundtrack idea, but from a somewhat different vantage point.\ + \ Or maybe it is an inversion; a visual accompaniment to an improvised sonic landscape?\ + \ For this performance, I will use a hybrid extended electric guitar / computer\ + \ performance system, which allows me to explore extended playing techniques and\ + \ sonic transformations provided by sensor controlled interactive digital signal\ + \ processing. For tonight's performance, the moving image composition is by Mark\ + \ Domino (fieldform.com).\nFor more information, please refer to online documentation:\ + \ Guitar performance system : http://ciufo.org/eighth_nerve_guitar.html\nPerformance\ + \ documentation: http://ciufo.org/silent_movies.html\n\nAbout the performer:\n\ + Thomas Ciufo is an improviser, sound / media artist, and researcher working primarily\ + \ in the areas of electroacoustic improvisational performance and hybrid instrument\ + \ / interactive systems design, and is currently serving as artist-in- residence\ + \ in Arts and Technology at Smith College. Recent and ongoing sound works include,\ + \ three meditations, for prepared piano and computer, the series, sonic improvisations\ + \ #N, and eighth nerve, an improvisational piece for prepared electric guitar\ + \ and computer. Recent performances include off-ICMC in Barcelona, Visione Sonoras\ + \ in Mexico City, the SPARK festival in Minneapolis, the International Society\ + \ for Improvised Music conference in Ann Arbor, and the Enaction in Arts conference\ + \ in Grenoble.},\n address = {Genova, Italy},\n author = {Thomas Ciufo},\n booktitle\ + \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n editor = {Roberto Doati},\n month = {June},\n publisher = {Casa\ + \ Paganini},\n title = {Silent Movies: an Improvisational Sound / Image Performance},\n\ + \ year = {2008}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, - Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + editor: Roberto Doati month: June - publisher: University of Technology Sydney - title: Kinesics - year: 2010 + publisher: Casa Paganini + title: 'Silent Movies: an Improvisational Sound / Image Performance' + year: 2008 - ENTRYTYPE: inproceedings - ID: nime2010-music-Ratcliffe2010 - abstract: "Program notes: \"Mutations\" is an interactive work exploring notions\ - \ of the DJ set and the remix through the integration of various streams of piano\ - \ based material in live performance. Incorporating human and machine-generated\ - \ material, a “realization” of the piece involves the management of a pool of\ - \ audio files, MIDI files, and score fragments, which are drawn upon during performance.\ - \ In this way, the performer is required to control and shape the various streams\ - \ of material in the same way that a DJ would select and combine records during\ - \ the structuring of a live set (an alternative realization of `Mutations' may\ - \ involve the playback of mixed material, in which the trajectory of the narrative\ - \ has been determined in advance). The supply of audio files, MIDI files, and\ - \ score fragments used in the construction of the piece takes existing works from\ - \ the piano repertoire as source material, both transformed and quoted intact,\ - \ resulting in a spectrum of recognizability ranging from the easily identifiable,\ - \ to the ambiguous, to the non-referential. The integration of this borrowed material\ - \ within the three strands of the piece highlights various connections between\ - \ traditional forms of musical borrowing, transformative imitation, improvisation,\ - \ electroacoustic sound transformation and quotation, EDM sampling practices,\ - \ remix practices and DJ performance. This version of `Mutations' features a pre-recorded\ - \ electronic part, realized using a software application created by Jon Weinel.\n\ - \nAbout the performers: Robert Ratcliffe is currently completing a PhD in composition\ - \ (New Forms of Hybrid Musical Discourse) at Keele University (UK). He is the\ - \ first composer to develop a musical language based on the cross fertilization\ - \ of contemporary art music and electronic dance music (EDM). http://www.myspace.com/visionfugitive.\n\ - \nZubin Kanga - Piano\nRobert Ratcliffe - Electronics\nJon Weinel - Software Author" - address: 'Sydney, Australia' - author: Robert Ratcliffe and Jon Weinel - bibtex: "@inproceedings{nime2010-music-Ratcliffe2010,\n abstract = {Program notes:\ - \ \"Mutations\" is an interactive work exploring notions of the DJ set and the\ - \ remix through the integration of various streams of piano based material in\ - \ live performance. Incorporating human and machine-generated material, a “realization”\ - \ of the piece involves the management of a pool of audio files, MIDI files, and\ - \ score fragments, which are drawn upon during performance. In this way, the performer\ - \ is required to control and shape the various streams of material in the same\ - \ way that a DJ would select and combine records during the structuring of a live\ - \ set (an alternative realization of `Mutations' may involve the playback of mixed\ - \ material, in which the trajectory of the narrative has been determined in advance).\ - \ The supply of audio files, MIDI files, and score fragments used in the construction\ - \ of the piece takes existing works from the piano repertoire as source material,\ - \ both transformed and quoted intact, resulting in a spectrum of recognizability\ - \ ranging from the easily identifiable, to the ambiguous, to the non-referential.\ - \ The integration of this borrowed material within the three strands of the piece\ - \ highlights various connections between traditional forms of musical borrowing,\ - \ transformative imitation, improvisation, electroacoustic sound transformation\ - \ and quotation, EDM sampling practices, remix practices and DJ performance. This\ - \ version of `Mutations' features a pre-recorded electronic part, realized using\ - \ a software application created by Jon Weinel.\n\nAbout the performers: Robert\ - \ Ratcliffe is currently completing a PhD in composition (New Forms of Hybrid\ - \ Musical Discourse) at Keele University (UK). He is the first composer to develop\ - \ a musical language based on the cross fertilization of contemporary art music\ - \ and electronic dance music (EDM). http://www.myspace.com/visionfugitive.\n\n\ - Zubin Kanga - Piano\nRobert Ratcliffe - Electronics\nJon Weinel - Software Author},\n\ - \ address = {Sydney, Australia},\n author = {Robert Ratcliffe and Jon Weinel},\n\ + ID: nime2008-music-Schedel2008 + abstract: "Program notes:\nDeveloped in Amsterdam, at STEIM, The Color of Waiting\ + \ uses animation, movement and video to portray themes of expectation. This collaboration\ + \ (between animator Nick Fox-Gieg, chorographer/dancer Alison Rootberg, composer/programmer\ + \ Margaret Schedel, and set designer Abra Brayman) deals with the anticipation\ + \ of events by understanding the way time unfolds. The performers shift between\ + \ frustration and acceptance as they portray the emotions evoked when waiting\ + \ for something or someone. The Color of Waiting is an experience and a mood,\ + \ an abstraction depicting human interaction.\n\nAbout the performers:\nAlison\ + \ Rootberg and Margaret Schedel founded The Kinesthetech Sense in 2006 with the\ + \ intent to collaborate with visual artists, dancers, and musicians, creating\ + \ ferociously interactive experiences for audiences throughout the world. Rootberg,\ + \ the Vice President of Programming for the Dance Resource Center, focuses on\ + \ incorporating dance with video while Schedel, an assistant professor of music\ + \ at Stony Brook University, combines audio with interactive technologies. Oskar\ + \ Fischinger once said that, \"everything in the world has its own spirit which\ + \ can be released by setting it in motion.\" Together Rootberg and Schedel create\ + \ systems which are set in motion by artistic input, facilitating interplay between\ + \ computers and humans. Kinesthetech Sense has had their work presented throughout\ + \ the US, Canada, Denmark, Germany, Italy, and Mexico. For more info, please go\ + \ to: www.ksense.org" + address: 'Genova, Italy' + author: Alison Rootberg and Margaret Schedel + bibtex: "@inproceedings{nime2008-music-Schedel2008,\n abstract = {Program notes:\n\ + Developed in Amsterdam, at STEIM, The Color of Waiting uses animation, movement\ + \ and video to portray themes of expectation. This collaboration (between animator\ + \ Nick Fox-Gieg, chorographer/dancer Alison Rootberg, composer/programmer Margaret\ + \ Schedel, and set designer Abra Brayman) deals with the anticipation of events\ + \ by understanding the way time unfolds. The performers shift between frustration\ + \ and acceptance as they portray the emotions evoked when waiting for something\ + \ or someone. The Color of Waiting is an experience and a mood, an abstraction\ + \ depicting human interaction.\n\nAbout the performers:\nAlison Rootberg and Margaret\ + \ Schedel founded The Kinesthetech Sense in 2006 with the intent to collaborate\ + \ with visual artists, dancers, and musicians, creating ferociously interactive\ + \ experiences for audiences throughout the world. Rootberg, the Vice President\ + \ of Programming for the Dance Resource Center, focuses on incorporating dance\ + \ with video while Schedel, an assistant professor of music at Stony Brook University,\ + \ combines audio with interactive technologies. Oskar Fischinger once said that,\ + \ \"everything in the world has its own spirit which can be released by setting\ + \ it in motion.\" Together Rootberg and Schedel create systems which are set in\ + \ motion by artistic input, facilitating interplay between computers and humans.\ + \ Kinesthetech Sense has had their work presented throughout the US, Canada, Denmark,\ + \ Germany, Italy, and Mexico. For more info, please go to: www.ksense.org},\n\ + \ address = {Genova, Italy},\n author = {Alison Rootberg and Margaret Schedel},\n\ \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n editor = {Andrew Johnston, Sam Ferguson, Jos Mulder,\ - \ Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger\ - \ Mills},\n month = {June},\n publisher = {University of Technology Sydney},\n\ - \ title = {Mutations},\n year = {2010}\n}\n" + \ for Musical Expression},\n editor = {Roberto Doati},\n month = {June},\n publisher\ + \ = {Casa Paganini},\n title = {NIME Performance - The Color of Waiting},\n year\ + \ = {2008}\n}\n" + booktitle: Music Proceedings of the International Conference on New Interfaces for + Musical Expression + editor: Roberto Doati + month: June + publisher: Casa Paganini + title: NIME Performance - The Color of Waiting + year: 2008 + + +- ENTRYTYPE: inproceedings + ID: nime2008-music-Drummond2008 + abstract: "Program notes:\nInspired by the swirls, vortices and lemniscate like\ + \ patterns created by moving water and other fluids, Sonic Construction uses the\ + \ movement of coloured dyes in a semi-viscous liquid to generate and control sound.\ + \ The work is performed by dropping different coloured dyes (red, green, yellow,\ + \ blue) into a clear glass vessel filled with water, made slightly viscous through\ + \ the addition of a sugar syrup (Figure 2). Through the use of video tracking,\ + \ the speed, colour and spatial location of the different coloured drops of dye\ + \ are analysed as they are dropped into the glass vessel and subsequently expand,\ + \ swirl, coil and entwine in the water. The control data derived from the video\ + \ tracking of the ink drops is used to define both the shape and the way in which\ + \ individual grains of sound are combined using FOF (Fonction d'Onde Formatique\ + \ translated as Formant Wave-Form or Formant Wave Function) synthesis [1] [2],\ + \ to create a rich and varied timbral sound environment. In developing Sonic Construction\ + \ I sought to create a system that would provide a sense of connection with the\ + \ interactive processes being employed and at the same time to create a system\ + \ over which I had only limited direct control; ideally being influenced by the\ + \ system's responses as much as I was influencing the system.\nTimbres produced\ + \ by the system include bass-rich pulse streams, vocal textures and a variety\ + \ of bell like sounds. The fluid movement of the coloured dye in the liquid is\ + \ further used to spatialise the outputs of the FOF synthesis. The video captured\ + \ of the dyes in the liquid, used for motion analysis and colour matching, is\ + \ also projected back into the performance space, slightly processed using contrast,\ + \ saturation and hue effects.\n\nAbout the performer:\nJon Drummond is a Sydney\ + \ based composer and performer. His creative work spans the fields of instrumental\ + \ music, electroacoustic, interactive, sound and new media arts. Jon's electroacoustic\ + \ and interactive work has been presented widely including the International Computer\ + \ Music Conferences (Denmark 1994, Canada 1995, Greece 1997, China 1999, Singapore\ + \ 2003), Electrofringe, Totally Huge New Music Festival, Darwin International\ + \ Guitar Festival and the Adelaide Festival of Arts. Many of his acoustic and\ + \ electronic compositions have been commissioned and performed by leading Australian\ + \ performers and ensembles including austraLYSIS, The Song Company, Ros Dunlop\ + \ and Kathleen Gallagher. Recently Jon has been exploring the use of environmental\ + \ signals from the natural world as generative devices for creating electroacoustic\ + \ sound - video tracking the fluid motions of water in \"Sonic Construction\"\ + \ and the motion of air through the use of kites in \"Sounding the Winds\"." + address: 'Genova, Italy' + author: Jon Drummond + bibtex: "@inproceedings{nime2008-music-Drummond2008,\n abstract = {Program notes:\n\ + Inspired by the swirls, vortices and lemniscate like patterns created by moving\ + \ water and other fluids, Sonic Construction uses the movement of coloured dyes\ + \ in a semi-viscous liquid to generate and control sound. The work is performed\ + \ by dropping different coloured dyes (red, green, yellow, blue) into a clear\ + \ glass vessel filled with water, made slightly viscous through the addition of\ + \ a sugar syrup (Figure 2). Through the use of video tracking, the speed, colour\ + \ and spatial location of the different coloured drops of dye are analysed as\ + \ they are dropped into the glass vessel and subsequently expand, swirl, coil\ + \ and entwine in the water. The control data derived from the video tracking of\ + \ the ink drops is used to define both the shape and the way in which individual\ + \ grains of sound are combined using FOF (Fonction d'Onde Formatique translated\ + \ as Formant Wave-Form or Formant Wave Function) synthesis [1] [2], to create\ + \ a rich and varied timbral sound environment. In developing Sonic Construction\ + \ I sought to create a system that would provide a sense of connection with the\ + \ interactive processes being employed and at the same time to create a system\ + \ over which I had only limited direct control; ideally being influenced by the\ + \ system's responses as much as I was influencing the system.\nTimbres produced\ + \ by the system include bass-rich pulse streams, vocal textures and a variety\ + \ of bell like sounds. The fluid movement of the coloured dye in the liquid is\ + \ further used to spatialise the outputs of the FOF synthesis. The video captured\ + \ of the dyes in the liquid, used for motion analysis and colour matching, is\ + \ also projected back into the performance space, slightly processed using contrast,\ + \ saturation and hue effects.\n\nAbout the performer:\nJon Drummond is a Sydney\ + \ based composer and performer. His creative work spans the fields of instrumental\ + \ music, electroacoustic, interactive, sound and new media arts. Jon's electroacoustic\ + \ and interactive work has been presented widely including the International Computer\ + \ Music Conferences (Denmark 1994, Canada 1995, Greece 1997, China 1999, Singapore\ + \ 2003), Electrofringe, Totally Huge New Music Festival, Darwin International\ + \ Guitar Festival and the Adelaide Festival of Arts. Many of his acoustic and\ + \ electronic compositions have been commissioned and performed by leading Australian\ + \ performers and ensembles including austraLYSIS, The Song Company, Ros Dunlop\ + \ and Kathleen Gallagher. Recently Jon has been exploring the use of environmental\ + \ signals from the natural world as generative devices for creating electroacoustic\ + \ sound - video tracking the fluid motions of water in \"Sonic Construction\"\ + \ and the motion of air through the use of kites in \"Sounding the Winds\".},\n\ + \ address = {Genova, Italy},\n author = {Jon Drummond},\n booktitle = {Music Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Roberto Doati},\n month = {June},\n publisher = {Casa Paganini},\n\ + \ title = {Sonic Construction},\n year = {2008}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, - Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + editor: Roberto Doati month: June - publisher: University of Technology Sydney - title: Mutations - year: 2010 + publisher: Casa Paganini + title: Sonic Construction + year: 2008 - ENTRYTYPE: inproceedings - ID: nime2010-music-Paine2010 - abstract: "Program notes: Grace Space is a new work for clarinet and realtime electronic\ - \ transformation. It plays with sonic space and non-space; the use of the grace\ - \ note to define relationships of transition from a forgotten or distant space\ - \ to a familiar space, a known pitch. The piece contemplates memory, the experience\ - \ of snapping out of a daydream, from distant imaginings or recollections to the\ - \ real space, the events right in front of you. The realtime electronic transformation\ - \ makes the space fluid and introduces a height ened depth of perspective. Surround\ - \ sound spatialization techniques are used also to bring the sound of the clarinet\ - \ off the stage and around the audience, subverting the audience as spectator\ - \ relationship to one where the audience is at the core of the work, the position\ - \ the performer usually occupies.\n\nAbout the performers: Garth Paine has exhibited\ - \ immersive interactive environments in Australia, Europe, Japan, USA, Hong Kong\ - \ and New Zealand. He is on the organizing and peer review panels for the International\ - \ Conference On New Interfaces for Musical Expression (NIME), the International\ - \ Computer Music Conference. He has twice been guest editor of Organized Sound\ - \ Journal (Cambridge University Press) for special editions on interactive systems\ - \ in music and sound installation. He is often invited to run workshops on interactivity\ - \ for musical performance and commissioned to develop interactive system for realtime\ - \ musical composition for dance and theatre performances. He was selected as one\ - \ of ten creative professionals internationally for exhibition in the 10th New\ - \ York Digital Salon; DesignX Critical Reflections, and as a millennium leader\ - \ of innovation by the German Keyboard Magazine in 2000. Dr Paine was awarded\ - \ the Australia Council for the Arts, New Media Arts Fellowship in 2000, and The\ - \ RMIT Innovation Research Award in 2002. He is a member of the advisory panel\ - \ for the Electronic Music Foundation and one of 17 advisors to the UNESCO funded\ - \ Symposium on the Future, which is developing a taxonomy / design space of electronic\ - \ musical instruments. Recently Dr Paine been invited to perform at the Agora\ - \ Festival, Centre Pompidou, Paris (2006) and the New York Electronic Arts Festival\ - \ (2007), and in 2009 will perform in Sydney, Melbourne, Perth, Lymerik Ireland,\ - \ New York City, Montreal and Quebec in Canada, and Phoenix Arizona. In 2008 Dr\ - \ Paine received the UWS Vice-Chancellor's Excellence Award for Postgraduate Research\ - \ Training and Supervision.\n\nJason Noble - Clarinet" - address: 'Sydney, Australia' - author: Garth Paine - bibtex: "@inproceedings{nime2010-music-Paine2010,\n abstract = {Program notes: Grace\ - \ Space is a new work for clarinet and realtime electronic transformation. It\ - \ plays with sonic space and non-space; the use of the grace note to define relationships\ - \ of transition from a forgotten or distant space to a familiar space, a known\ - \ pitch. The piece contemplates memory, the experience of snapping out of a daydream,\ - \ from distant imaginings or recollections to the real space, the events right\ - \ in front of you. The realtime electronic transformation makes the space fluid\ - \ and introduces a height ened depth of perspective. Surround sound spatialization\ - \ techniques are used also to bring the sound of the clarinet off the stage and\ - \ around the audience, subverting the audience as spectator relationship to one\ - \ where the audience is at the core of the work, the position the performer usually\ - \ occupies.\n\nAbout the performers: Garth Paine has exhibited immersive interactive\ - \ environments in Australia, Europe, Japan, USA, Hong Kong and New Zealand. He\ - \ is on the organizing and peer review panels for the International Conference\ - \ On New Interfaces for Musical Expression (NIME), the International Computer\ - \ Music Conference. He has twice been guest editor of Organized Sound Journal\ - \ (Cambridge University Press) for special editions on interactive systems in\ - \ music and sound installation. He is often invited to run workshops on interactivity\ - \ for musical performance and commissioned to develop interactive system for realtime\ - \ musical composition for dance and theatre performances. He was selected as one\ - \ of ten creative professionals internationally for exhibition in the 10th New\ - \ York Digital Salon; DesignX Critical Reflections, and as a millennium leader\ - \ of innovation by the German Keyboard Magazine in 2000. Dr Paine was awarded\ - \ the Australia Council for the Arts, New Media Arts Fellowship in 2000, and The\ - \ RMIT Innovation Research Award in 2002. He is a member of the advisory panel\ - \ for the Electronic Music Foundation and one of 17 advisors to the UNESCO funded\ - \ Symposium on the Future, which is developing a taxonomy / design space of electronic\ - \ musical instruments. Recently Dr Paine been invited to perform at the Agora\ - \ Festival, Centre Pompidou, Paris (2006) and the New York Electronic Arts Festival\ - \ (2007), and in 2009 will perform in Sydney, Melbourne, Perth, Lymerik Ireland,\ - \ New York City, Montreal and Quebec in Canada, and Phoenix Arizona. In 2008 Dr\ - \ Paine received the UWS Vice-Chancellor's Excellence Award for Postgraduate Research\ - \ Training and Supervision.\n\nJason Noble - Clarinet},\n address = {Sydney, Australia},\n\ - \ author = {Garth Paine},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Johnston,\ - \ Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer,\ - \ Kirsty Beilharz, Roger Mills},\n month = {June},\n publisher = {University of\ - \ Technology Sydney},\n title = {Grace Space},\n year = {2010}\n}\n" + ID: nime2008-music-GeWang2008 + abstract: "Program notes:\nThe Mobile Phone Orchestra is a new repetoire-based ensemble\ + \ using mobile phones as the primary musical instrument. The MoPhO Suite contains\ + \ a selection of recent compositions that highlights different aspects of what\ + \ it means to compose for and perform with such an instrument in an ensemble setting.\ + \ Brief program note: The Mobile Phone Orchestra of CCRMA (MoPhO) presents an\ + \ ensemble suite featuring music performed on mobile phones. Far beyond ring-tones,\ + \ these interactive musical works take advantage of the unique technological capabilities\ + \ of today's hardware, transforming phone keypads, built-in accelerometers, and\ + \ built-in microphones into powerful and yet mobile chamber meta-instruments.\ + \ The suite consists of selection of representative pieces:\n***Drone In/Drone\ + \ Out (Ge Wang): human players, mobile phones, FM timbres, accelerometers.\n***TamaG\ + \ (Georg Essl): TamaG is a piece that explores the boundary of projecting the\ + \ humane onto mobile devices and at the same time display the fact that they are\ + \ deeply mechanical and artificial. It explores the question how much control\ + \ we have in the interaction with these devices or if the device itself at times\ + \ controls us. The piece work with the tension between these positions and crosses\ + \ the desirable and the alarming, the human voice with mechanical noise. The alarming\ + \ effect has a social quality and spreads between the performers. The sounding\ + \ algorithm is the non-linear circle map which is used in easier-to-control and\ + \ hard-to-control regimes to evoke the effects of control and desirability on\ + \ the one hand the the loss of control and mechanistic function on the other hand.\n\ + ***The Phones and Fury (Jeff Cooper and Henri Penttinen): how much damage can\ + \ a single player do with 10 mobile phones? Facilitating loops, controllable playback\ + \ speed, and solo instruments.\n***Chatter (Ge Wang): the audience is placed in\ + \ the middle of a web of conversations...\n\nAbout the performers:\nGe Wang received\ + \ his B.S. in Computer Science in 2000 from Duke University, PhD (soon) in Computer\ + \ Science (advisor Perry Cook) in 2008 from Princeton University, and is currently\ + \ an assistant professor at Stanford University in the Center for Computer Research\ + \ in Music and Acoustics (CCRMA). His research interests include interactive software\ + \ systems (of all sizes) for computer music, programming languages, sound synthesis\ + \ and analysis, music information retrieval, new performance ensembles (e.g.,\ + \ laptop orchestra) and paradigms (e.g., live coding), visualization, interfaces\ + \ for human-computer interaction, interactive audio over networks, and methodologies\ + \ for education at the intersection of computer science and music. Ge is the chief\ + \ architect of the ChucK audio programming language and the Audicle environment.\ + \ He was a founding developer and co-director of the Princeton Laptop Orchestra\ + \ (PLOrk), the founder and director of the Stanford Laptop Orchestra (SLOrk),\ + \ and a co-creator of the TAPESTREA sound design environment. Ge composes and\ + \ performs via various electro-acoustic and computer-mediated means, including\ + \ with PLOrk/SLOrk, with Perry as a live coding duo, and with Princeton graduate\ + \ student and comrade Rebecca Fiebrink in a duo exploring new performance paradigms,\ + \ cool audio software, and great food.\n\nGeorg Essl is currently Senior Research\ + \ Scientist at Deutsche Telekom Laboratories at TU-Berlin, Germany. He works on\ + \ mobile interaction, new interfaces for musical expression and sound synthesis\ + \ algorithms that are abstract mathematical or physical models. After he received\ + \ his Ph.D. in Computer Science at Princeton University under the supervision\ + \ of Perry Cook he served on the faculty of the University of Florida and worked\ + \ at the MIT Media Lab Europe in Dublin before joining T-Labs.\n\nHenri Penttinen\ + \ was born in Espoo, Finland, in 1975. He completed his M.Sc. and PhD (Dr. Tech.)\ + \ degrees in Electrical Engineering at the Helsinki University of Technology (TKK)\ + \ in 2002 and 2006, respectively. He conducted his studies and teaches about digital\ + \ signal processors and audio processing at the Department of Signal Processing\ + \ and Acoustics (until 2007 known as Laboratory of Acoustics and Signal Processing)\ + \ at TKK. Dr. Penttinen was a visiting scholar at Center for Computer Research\ + \ in Music and Acoustics (CCRMA), Stanford University, during 2007 and 2008. His\ + \ main research interests are sound synthesis, signal processing algorithms, musical\ + \ acoustics, real-time audio applications in mobile environments. He is one of\ + \ the co-founders and directors, with Georg Essl and Ge Wang, of the Mobile Phone\ + \ Orchestra of CCRMA (MoPhO). He is also the co-inventor, with Jaakko Prättälä,\ + \ of the electro-acoustic bottle (eBottle). His electro-acoustic pieces have been\ + \ performed around Finland, in the USA, and Cuba. Additional Composer Biography:\ + \ Jeffrey Cooper is a musician / producer from Bryan, Texas. Having worked as\ + \ a programmer and DJ for a number of years, he is currently finishing a Master\ + \ Degree in Music, Science, and Technology at Stanford University / CCRMA. Co-\ + \ composer of music for mobile phones with the honorable Henri Penttinen." + address: 'Genova, Italy' + author: 'Ge Wang, Georg Essl and Henri Penttinen' + bibtex: "@inproceedings{nime2008-music-GeWang2008,\n abstract = {Program notes:\n\ + The Mobile Phone Orchestra is a new repetoire-based ensemble using mobile phones\ + \ as the primary musical instrument. The MoPhO Suite contains a selection of recent\ + \ compositions that highlights different aspects of what it means to compose for\ + \ and perform with such an instrument in an ensemble setting. Brief program note:\ + \ The Mobile Phone Orchestra of CCRMA (MoPhO) presents an ensemble suite featuring\ + \ music performed on mobile phones. Far beyond ring-tones, these interactive musical\ + \ works take advantage of the unique technological capabilities of today's hardware,\ + \ transforming phone keypads, built-in accelerometers, and built-in microphones\ + \ into powerful and yet mobile chamber meta-instruments. The suite consists of\ + \ selection of representative pieces:\n***Drone In/Drone Out (Ge Wang): human\ + \ players, mobile phones, FM timbres, accelerometers.\n***TamaG (Georg Essl):\ + \ TamaG is a piece that explores the boundary of projecting the humane onto mobile\ + \ devices and at the same time display the fact that they are deeply mechanical\ + \ and artificial. It explores the question how much control we have in the interaction\ + \ with these devices or if the device itself at times controls us. The piece work\ + \ with the tension between these positions and crosses the desirable and the alarming,\ + \ the human voice with mechanical noise. The alarming effect has a social quality\ + \ and spreads between the performers. The sounding algorithm is the non-linear\ + \ circle map which is used in easier-to-control and hard-to-control regimes to\ + \ evoke the effects of control and desirability on the one hand the the loss of\ + \ control and mechanistic function on the other hand.\n***The Phones and Fury\ + \ (Jeff Cooper and Henri Penttinen): how much damage can a single player do with\ + \ 10 mobile phones? Facilitating loops, controllable playback speed, and solo\ + \ instruments.\n***Chatter (Ge Wang): the audience is placed in the middle of\ + \ a web of conversations...\n\nAbout the performers:\nGe Wang received his B.S.\ + \ in Computer Science in 2000 from Duke University, PhD (soon) in Computer Science\ + \ (advisor Perry Cook) in 2008 from Princeton University, and is currently an\ + \ assistant professor at Stanford University in the Center for Computer Research\ + \ in Music and Acoustics (CCRMA). His research interests include interactive software\ + \ systems (of all sizes) for computer music, programming languages, sound synthesis\ + \ and analysis, music information retrieval, new performance ensembles (e.g.,\ + \ laptop orchestra) and paradigms (e.g., live coding), visualization, interfaces\ + \ for human-computer interaction, interactive audio over networks, and methodologies\ + \ for education at the intersection of computer science and music. Ge is the chief\ + \ architect of the ChucK audio programming language and the Audicle environment.\ + \ He was a founding developer and co-director of the Princeton Laptop Orchestra\ + \ (PLOrk), the founder and director of the Stanford Laptop Orchestra (SLOrk),\ + \ and a co-creator of the TAPESTREA sound design environment. Ge composes and\ + \ performs via various electro-acoustic and computer-mediated means, including\ + \ with PLOrk/SLOrk, with Perry as a live coding duo, and with Princeton graduate\ + \ student and comrade Rebecca Fiebrink in a duo exploring new performance paradigms,\ + \ cool audio software, and great food.\n\nGeorg Essl is currently Senior Research\ + \ Scientist at Deutsche Telekom Laboratories at TU-Berlin, Germany. He works on\ + \ mobile interaction, new interfaces for musical expression and sound synthesis\ + \ algorithms that are abstract mathematical or physical models. After he received\ + \ his Ph.D. in Computer Science at Princeton University under the supervision\ + \ of Perry Cook he served on the faculty of the University of Florida and worked\ + \ at the MIT Media Lab Europe in Dublin before joining T-Labs.\n\nHenri Penttinen\ + \ was born in Espoo, Finland, in 1975. He completed his M.Sc. and PhD (Dr. Tech.)\ + \ degrees in Electrical Engineering at the Helsinki University of Technology (TKK)\ + \ in 2002 and 2006, respectively. He conducted his studies and teaches about digital\ + \ signal processors and audio processing at the Department of Signal Processing\ + \ and Acoustics (until 2007 known as Laboratory of Acoustics and Signal Processing)\ + \ at TKK. Dr. Penttinen was a visiting scholar at Center for Computer Research\ + \ in Music and Acoustics (CCRMA), Stanford University, during 2007 and 2008. His\ + \ main research interests are sound synthesis, signal processing algorithms, musical\ + \ acoustics, real-time audio applications in mobile environments. He is one of\ + \ the co-founders and directors, with Georg Essl and Ge Wang, of the Mobile Phone\ + \ Orchestra of CCRMA (MoPhO). He is also the co-inventor, with Jaakko Prättälä,\ + \ of the electro-acoustic bottle (eBottle). His electro-acoustic pieces have been\ + \ performed around Finland, in the USA, and Cuba. Additional Composer Biography:\ + \ Jeffrey Cooper is a musician / producer from Bryan, Texas. Having worked as\ + \ a programmer and DJ for a number of years, he is currently finishing a Master\ + \ Degree in Music, Science, and Technology at Stanford University / CCRMA. Co-\ + \ composer of music for mobile phones with the honorable Henri Penttinen.},\n\ + \ address = {Genova, Italy},\n author = {Ge Wang, Georg Essl and Henri Penttinen},\n\ + \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n editor = {Roberto Doati},\n month = {June},\n publisher\ + \ = {Casa Paganini},\n title = {MoPho – A Suite for a Mobile Phone Orchestra},\n\ + \ year = {2008}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, - Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + editor: Roberto Doati month: June - publisher: University of Technology Sydney - title: Grace Space - year: 2010 + publisher: Casa Paganini + title: MoPho – A Suite for a Mobile Phone Orchestra + year: 2008 - ENTRYTYPE: inproceedings - ID: nime2010-music-Mackay2010 - abstract: "Program notes: This piece was written for Duo Contour, and uses the following\ - \ poem `Under the Slates' by Martin Daws as its inspiration:\nWe are earth people\n\ - \nLong have we hidden\nIn the rock heavy heart\nAnd harboured our strengths\n\ - Among the agonies of stone\n\nOurs is the granite\nWind withered to pinnacles\n\ - And the whispered secret\nPassed behind its scream\n\nAnd the dark slate blasted\n\ - Into fragments of its nature\nShattered forgotten bodies\nPatterned random\nHeaped\ - \ on houses\n\nDropped on churches\nSilenced hymns\nOn buried villages lost to\ - \ light\n\nWe mourn our eagles\nCount our sheep\nLay our seed on crusted bed spring\n\ - Spines shrunk with the gravity\nDreams pulled out of star flight\nDriven back\ - \ to earth to bone\nTo wakeful vision raw with piling rock\nAgainst the sun\n\n\ - We are the subjects of a skyline\nHeld in hard embrace\nIts dark love a sanctuary\n\ - For our healing\n\nThe poem reflects Daws' response to the altered landscape formed\ - \ by slate quarrying in the village of Bethesda in North Wales.\nThe role of the\ - \ instrumentalists in this piece is to create a textural accompaniment to the\ - \ words. Through different sound transformation techniques, the sounds of the\ - \ instruments are altered in real-time to create word-painting effects.\nVideo\ - \ sequences of the poet himself are juxtaposed against images of the area in question.\ - \ These images are manipulated in real-time by the sound of the instruments themselves.\ - \ The video imagery, like the music, is intended to re flect the meaning of the\ - \ text. For this piece, I created my own software tools in Max/MSP/Jitter for\ - \ live audio/video interaction.\n\n\nAbout the performer: Rob Mackay is a composer,\ - \ sound artist and performer. He obtained a degree in Geology and Music at the\ - \ University of Keele, studying composition there with Mike Vaughan, before going\ - \ on to complete a Master's and PhD with Andrew Lewis at the University of Wales,\ - \ Bangor. Currently he is a lecturer in Creative Music Technology at the University\ - \ of Hull, Scarborough Campus, and is the course director.\nRecent projects have\ - \ moved towards a cross-disciplinary approach, including theatre, audio/visual\ - \ installation work, and human/computer interaction. Prizes and honours include:\ - \ IMEB Bourges (1997 and 2001); EAR99 from Hungarian Radio (1999); Confluencias\ - \ (2003); La Muse en Circuit (2004 and 2006). His work has received over 100 performances\ - \ in 16 countries (including several performances on BBC Radio 3). He has held\ - \ composer residencies at Slovak Radio (Bratislava), La Muse en Circuit (Paris),\ - \ and the Tyrone Guthre Arts Centre (Ireland).\nHe has played, written and produced\ - \ in a number of bands and ensembles, including the Welsh Hip-Hop collective \"\ - Tystion\" with whom he collaborated alongside John Cale on the film `A Beautiful\ - \ Mistake', as well as recording two John Peel sessions on BBC Radio 1 and supporting\ - \ PJ Harvey. More recently, he has done session work for Gowel Owen and Euros\ - \ Childs. 6 CDs including his compositions are available. More information and\ - \ pieces at:\n\nwww.myspace.com/robflute\nwww.digital-music-archives.com" - address: 'Sydney, Australia' - author: Robert Mackay - bibtex: "@inproceedings{nime2010-music-Mackay2010,\n abstract = {Program notes:\ - \ This piece was written for Duo Contour, and uses the following poem `Under the\ - \ Slates' by Martin Daws as its inspiration:\nWe are earth people\n\nLong have\ - \ we hidden\nIn the rock heavy heart\nAnd harboured our strengths\nAmong the agonies\ - \ of stone\n\nOurs is the granite\nWind withered to pinnacles\nAnd the whispered\ - \ secret\nPassed behind its scream\n\nAnd the dark slate blasted\nInto fragments\ - \ of its nature\nShattered forgotten bodies\nPatterned random\nHeaped on houses\n\ - \nDropped on churches\nSilenced hymns\nOn buried villages lost to light\n\nWe\ - \ mourn our eagles\nCount our sheep\nLay our seed on crusted bed spring\nSpines\ - \ shrunk with the gravity\nDreams pulled out of star flight\nDriven back to earth\ - \ to bone\nTo wakeful vision raw with piling rock\nAgainst the sun\n\nWe are the\ - \ subjects of a skyline\nHeld in hard embrace\nIts dark love a sanctuary\nFor\ - \ our healing\n\nThe poem reflects Daws' response to the altered landscape formed\ - \ by slate quarrying in the village of Bethesda in North Wales.\nThe role of the\ - \ instrumentalists in this piece is to create a textural accompaniment to the\ - \ words. Through different sound transformation techniques, the sounds of the\ - \ instruments are altered in real-time to create word-painting effects.\nVideo\ - \ sequences of the poet himself are juxtaposed against images of the area in question.\ - \ These images are manipulated in real-time by the sound of the instruments themselves.\ - \ The video imagery, like the music, is intended to re flect the meaning of the\ - \ text. For this piece, I created my own software tools in Max/MSP/Jitter for\ - \ live audio/video interaction.\n\n\nAbout the performer: Rob Mackay is a composer,\ - \ sound artist and performer. He obtained a degree in Geology and Music at the\ - \ University of Keele, studying composition there with Mike Vaughan, before going\ - \ on to complete a Master's and PhD with Andrew Lewis at the University of Wales,\ - \ Bangor. Currently he is a lecturer in Creative Music Technology at the University\ - \ of Hull, Scarborough Campus, and is the course director.\nRecent projects have\ - \ moved towards a cross-disciplinary approach, including theatre, audio/visual\ - \ installation work, and human/computer interaction. Prizes and honours include:\ - \ IMEB Bourges (1997 and 2001); EAR99 from Hungarian Radio (1999); Confluencias\ - \ (2003); La Muse en Circuit (2004 and 2006). His work has received over 100 performances\ - \ in 16 countries (including several performances on BBC Radio 3). He has held\ - \ composer residencies at Slovak Radio (Bratislava), La Muse en Circuit (Paris),\ - \ and the Tyrone Guthre Arts Centre (Ireland).\nHe has played, written and produced\ - \ in a number of bands and ensembles, including the Welsh Hip-Hop collective \"\ - Tystion\" with whom he collaborated alongside John Cale on the film `A Beautiful\ - \ Mistake', as well as recording two John Peel sessions on BBC Radio 1 and supporting\ - \ PJ Harvey. More recently, he has done session work for Gowel Owen and Euros\ - \ Childs. 6 CDs including his compositions are available. More information and\ - \ pieces at:\n\nwww.myspace.com/robflute\nwww.digital-music-archives.com},\n address\ - \ = {Sydney, Australia},\n author = {Robert Mackay},\n booktitle = {Music Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine,\ - \ Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills},\n month = {June},\n\ - \ publisher = {University of Technology Sydney},\n title = {Altered Landscapes},\n\ - \ year = {2010}\n}\n" + ID: nime2011-music-Troyer2011 + abstract: "Program notes:\n\nIn LOLC, the musicians in the laptop orchestra use\ + \ a textual performance interface, developed specifically for this piece, to create\ + \ and share rhythmic motives based on a collection of recorded sounds. The environment\ + \ encourages musicians to share their code with each other, developing an improvisational\ + \ conversation over time as material is looped, borrowed, and transformed. LOLC\ + \ was originally created by Akito van Troyer and Jason Freeman and is in active\ + \ development at the Georgia Tech Center for Music Technology by Jason Freeman,\ + \ Andrew Colella, Sang Won Lee and Shannon Yao. LOLC is supported by a grant from\ + \ the National Science Foundation as part of a larger research project on musical\ + \ improvisation in performance and education (NSF CreativeIT#0855758).\n\nAbout\ + \ the performers:\n\nAaron Albin, Andrew Colella, Sertan Sentürk and Sang Won\ + \ Lee are current degree candidates or alumni from the Georgia Tech Center for\ + \ Music Technology. All are focused on exploring new methods of musical interactivity\ + \ through projects that involve current technology such as the Kinect, swarm robots,\ + \ creative video games, and current MIR techniques." + address: 'Oslo, Norway' + author: Akito van Troyer and Jason Freeman and Avinash Sastry and Sang Won Lee and + Shannon Yao + bibtex: "@inproceedings{nime2011-music-Troyer2011,\n abstract = {Program notes:\n\ + \nIn LOLC, the musicians in the laptop orchestra use a textual performance interface,\ + \ developed specifically for this piece, to create and share rhythmic motives\ + \ based on a collection of recorded sounds. The environment encourages musicians\ + \ to share their code with each other, developing an improvisational conversation\ + \ over time as material is looped, borrowed, and transformed. LOLC was originally\ + \ created by Akito van Troyer and Jason Freeman and is in active development at\ + \ the Georgia Tech Center for Music Technology by Jason Freeman, Andrew Colella,\ + \ Sang Won Lee and Shannon Yao. LOLC is supported by a grant from the National\ + \ Science Foundation as part of a larger research project on musical improvisation\ + \ in performance and education (NSF CreativeIT#0855758).\n\nAbout the performers:\n\ + \nAaron Albin, Andrew Colella, Sertan Sentürk and Sang Won Lee are current degree\ + \ candidates or alumni from the Georgia Tech Center for Music Technology. All\ + \ are focused on exploring new methods of musical interactivity through projects\ + \ that involve current technology such as the Kinect, swarm robots, creative video\ + \ games, and current MIR techniques.},\n address = {Oslo, Norway},\n author =\ + \ {Akito van Troyer and Jason Freeman and Avinash Sastry and Sang Won Lee and\ + \ Shannon Yao},\n booktitle = {Music Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n editor = {Kjell Tore Innervik and\ + \ Ivar Frounberg},\n month = {June},\n publisher = {Norwegian Academy of Music},\n\ + \ title = {LOLC},\n url = {https://vimeo.com/26678685},\n year = {2011}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, - Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + editor: Kjell Tore Innervik and Ivar Frounberg month: June - publisher: University of Technology Sydney - title: Altered Landscapes - year: 2010 + publisher: Norwegian Academy of Music + title: LOLC + url: https://vimeo.com/26678685 + year: 2011 - ENTRYTYPE: inproceedings - ID: nime2010-music-Havryliv2010 - abstract: "Program notes: This performance draws on a natural feature of a particular\ - \ class of chaotic oscillators described by Julien Sprott, namely that they require\ - \ a driving force in order to perform as chaotic attractors. In the unmodified\ - \ equations driving forces are introduced mathematically, however, as we calculate\ - \ the chaotic systems in real-time we open the door to using a performers audio\ - \ signal as an input force.\nThis class of oscillator exhibits interesting behavior\ - \ in response to different frequency inputs; in particular, the systems are sensitive\ - \ to changes in low frequency tones. This encourages the use of Just Intonation\ - \ as a method of determining tuning systems with easily defined difference tones;\ - \ the scale developed by Kraig Grady features many difference tones in an excitable\ - \ range for the chaotic oscillators.\n\nAbout the performers:\nMark Havryliv is\ - \ a doctoral student developing a haptic musical instrument at the University\ - \ of Wollongong. Aside from that research, he is interested in the musical possibilities\ - \ of integrating real-time sonification with other disciplines like game design\ - \ and creative writing.\n\nKraig Grady, an Anaphorian now living in Australia,\ - \ composes almost exclusively for acoustic instruments of his own making or modification\ - \ tuned to just intonation. Often his work is combined with his Shadow Theatre\ - \ productions. His work has been presented at Ballhaus Naunyn Berlin (Germany),\ - \ the Chateau de la Napoule (France), the Norton Simon Museum of Art, the UCLA\ - \ Armand Hammer Museum, the Pacific Asia Museum, the Los Angeles Philharmonics\ - \ American Music Weekend and New Music America 1985. He was chosen by Buzz Magazine\ - \ as one of the \"100 coolest people in Los Angeles\".\n\nKraig Grady - Just Intonation\ - \ Tuned Marimba\nMark Havryliv - Saxophone" - address: 'Sydney, Australia' - author: Mark Havryliv - bibtex: "@inproceedings{nime2010-music-Havryliv2010,\n abstract = {Program notes:\ - \ This performance draws on a natural feature of a particular class of chaotic\ - \ oscillators described by Julien Sprott, namely that they require a driving force\ - \ in order to perform as chaotic attractors. In the unmodified equations driving\ - \ forces are introduced mathematically, however, as we calculate the chaotic systems\ - \ in real-time we open the door to using a performers audio signal as an input\ - \ force.\nThis class of oscillator exhibits interesting behavior in response to\ - \ different frequency inputs; in particular, the systems are sensitive to changes\ - \ in low frequency tones. This encourages the use of Just Intonation as a method\ - \ of determining tuning systems with easily defined difference tones; the scale\ - \ developed by Kraig Grady features many difference tones in an excitable range\ - \ for the chaotic oscillators.\n\nAbout the performers:\nMark Havryliv is a doctoral\ - \ student developing a haptic musical instrument at the University of Wollongong.\ - \ Aside from that research, he is interested in the musical possibilities of integrating\ - \ real-time sonification with other disciplines like game design and creative\ - \ writing.\n\nKraig Grady, an Anaphorian now living in Australia, composes almost\ - \ exclusively for acoustic instruments of his own making or modification tuned\ - \ to just intonation. Often his work is combined with his Shadow Theatre productions.\ - \ His work has been presented at Ballhaus Naunyn Berlin (Germany), the Chateau\ - \ de la Napoule (France), the Norton Simon Museum of Art, the UCLA Armand Hammer\ - \ Museum, the Pacific Asia Museum, the Los Angeles Philharmonics American Music\ - \ Weekend and New Music America 1985. He was chosen by Buzz Magazine as one of\ - \ the \"100 coolest people in Los Angeles\".\n\nKraig Grady - Just Intonation\ - \ Tuned Marimba\nMark Havryliv - Saxophone},\n address = {Sydney, Australia},\n\ - \ author = {Mark Havryliv},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Johnston,\ - \ Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer,\ - \ Kirsty Beilharz, Roger Mills},\n month = {June},\n publisher = {University of\ - \ Technology Sydney},\n title = {Warming for Blackall},\n year = {2010}\n}\n" + ID: nime2011-music-Brandtsegg2011 + abstract: "Program notes:\n\nThe duo Little Soldier Joe uses percussion and live\ + \ processing to explore thematic and textural ideas that arise in the improvised\ + \ interplay between these two performers. LSJ uses live sampling and manipulation\ + \ matter-of-factly as an established manner of music making. The audio manipulation\ + \ techniques used are based on recent developments in particle synthesis.\n\n\ + About the performers:\n\nØyvind Brandtsegg: Composer, musician and professor in\ + \ music technology at NTNU. His focus lies in Compositionally Enabled Instruments,\ + \ Particle Synthesis and sound installations. Øyvind has performed with the groups\ + \ Krøyt and Motorpsycho, written music for interactive dance, theatre and TV,\ + \ and worked as a programmer for other artists. His latest effort in music software\ + \ programming is the “Hadron Particle Synthesizer”, to be released as a device\ + \ for “Ableton Live”' and as a VST plug-in.\n\nCarl Haakon Waadeland: Musician,\ + \ composer and professor in music at NTNU. His main scientific interest lies within\ + \ empirical rhythm research and the construction of models that simulate rhythm\ + \ performance. Waadeland has performed and recorded amongst others with Gary Holton\ + \ & Casino Steel, Warne Marsh, Siris Svale Band, Mikis Theodorakis & Arja Saijonmaa,\ + \ Dadafon, and Rasmus og Verdens Beste Band. Waadeland published a book and CD\ + \ on the Norwegian folk drum tradition in 2008." + address: 'Oslo, Norway' + author: Øyvind Brandtsegg and Carl Haakon Waadeland + bibtex: "@inproceedings{nime2011-music-Brandtsegg2011,\n abstract = {Program notes:\n\ + \nThe duo Little Soldier Joe uses percussion and live processing to explore thematic\ + \ and textural ideas that arise in the improvised interplay between these two\ + \ performers. LSJ uses live sampling and manipulation matter-of-factly as an established\ + \ manner of music making. The audio manipulation techniques used are based on\ + \ recent developments in particle synthesis.\n\nAbout the performers:\n\nØyvind\ + \ Brandtsegg: Composer, musician and professor in music technology at NTNU. His\ + \ focus lies in Compositionally Enabled Instruments, Particle Synthesis and sound\ + \ installations. Øyvind has performed with the groups Krøyt and Motorpsycho, written\ + \ music for interactive dance, theatre and TV, and worked as a programmer for\ + \ other artists. His latest effort in music software programming is the “Hadron\ + \ Particle Synthesizer”, to be released as a device for “Ableton Live”' and as\ + \ a VST plug-in.\n\nCarl Haakon Waadeland: Musician, composer and professor in\ + \ music at NTNU. His main scientific interest lies within empirical rhythm research\ + \ and the construction of models that simulate rhythm performance. Waadeland has\ + \ performed and recorded amongst others with Gary Holton & Casino Steel, Warne\ + \ Marsh, Siris Svale Band, Mikis Theodorakis & Arja Saijonmaa, Dadafon, and Rasmus\ + \ og Verdens Beste Band. Waadeland published a book and CD on the Norwegian folk\ + \ drum tradition in 2008.},\n address = {Oslo, Norway},\n author = {Øyvind Brandtsegg\ + \ and Carl Haakon Waadeland},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Kjell Tore\ + \ Innervik and Ivar Frounberg},\n month = {June},\n publisher = {Norwegian Academy\ + \ of Music},\n title = {Little Soldier Joe},\n url = {https://vimeo.com/26680018},\n\ + \ year = {2011}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, - Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + editor: Kjell Tore Innervik and Ivar Frounberg month: June - publisher: University of Technology Sydney - title: Warming for Blackall - year: 2010 + publisher: Norwegian Academy of Music + title: Little Soldier Joe + url: https://vimeo.com/26680018 + year: 2011 - ENTRYTYPE: inproceedings - ID: nime2010-music-Hopson2010 - abstract: "Program notes: Life on (Planet) is a work for two rocks and interactive\ - \ computer processing. The performer clicks and rubs the rocks together in front\ - \ of a stereo microphone. The computer responds to how the rocks are played, with\ - \ particular regard to changes in tempo, articulation, volume, and position of\ - \ the rocks relative to the left/right stereo field of the microphone. Complex\ - \ combinations of (somewhat) controllable sounds arise from the accretion of input\ - \ sound combined with feedback from the space.\n\nAbout the performer:\nHolland\ - \ Hopson is a composer, improviser, and electronic artist. As an instrumentalist\ - \ he performs on soprano saxophone, clawhammer banjo and electronics. He has held\ - \ residencies at STEIM, Amsterdam; Experimental Music Studios, Krakow and Katowice,\ - \ Poland; Sonic Arts Research Studio, Vancouver, Canada; LEMURPlex, Brooklyn;\ - \ and Harvestworks Digital Media Arts, New York where he developed a sound installation\ - \ based on Marcel Duchamp's sculpture, With Hidden Noise. An avid phonographer,\ - \ Holland has recorded sounds on four continents and in over a dozen countries.\ - \ Holland's latest recording is With Hidden Noises released on Grab Rare Arts\ - \ (www.grabrarearts.com)." - address: 'Sydney, Australia' - author: Holland Hopson - bibtex: "@inproceedings{nime2010-music-Hopson2010,\n abstract = {Program notes:\ - \ Life on (Planet) is a work for two rocks and interactive computer processing.\ - \ The performer clicks and rubs the rocks together in front of a stereo microphone.\ - \ The computer responds to how the rocks are played, with particular regard to\ - \ changes in tempo, articulation, volume, and position of the rocks relative to\ - \ the left/right stereo field of the microphone. Complex combinations of (somewhat)\ - \ controllable sounds arise from the accretion of input sound combined with feedback\ - \ from the space.\n\nAbout the performer:\nHolland Hopson is a composer, improviser,\ - \ and electronic artist. As an instrumentalist he performs on soprano saxophone,\ - \ clawhammer banjo and electronics. He has held residencies at STEIM, Amsterdam;\ - \ Experimental Music Studios, Krakow and Katowice, Poland; Sonic Arts Research\ - \ Studio, Vancouver, Canada; LEMURPlex, Brooklyn; and Harvestworks Digital Media\ - \ Arts, New York where he developed a sound installation based on Marcel Duchamp's\ - \ sculpture, With Hidden Noise. An avid phonographer, Holland has recorded sounds\ - \ on four continents and in over a dozen countries. Holland's latest recording\ - \ is With Hidden Noises released on Grab Rare Arts (www.grabrarearts.com).},\n\ - \ address = {Sydney, Australia},\n author = {Holland Hopson},\n booktitle = {Music\ - \ Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine,\ - \ Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills},\n month = {June},\n\ - \ publisher = {University of Technology Sydney},\n title = {Life on (Planet)},\n\ - \ year = {2010}\n}\n" + ID: nime2011-music-Lopez2011 + abstract: "Program notes:\n\nThe Reactable was conceived in 2003 and was first presented\ + \ at the International Computer Music Conference (ICMC) 2005 in Barcelona. Since\ + \ then, the Reactable team has given more than 300 presentations and concerts\ + \ in 40 countries, turning it into one of the most worldwide acclaimed new musical\ + \ instruments of the 21st century. Since 2009, the Barcelona spin-off company\ + \ Reactable Systems has been producing several Reactable models, such as the Reactable\ + \ Live for traveling musicians and DJs, or its latest incarnation, Reactable mobile\ + \ for Apple's iPhones and iPads.\n\nAbout the performers:\n\nCarles López: Musician,\ + \ producer and DJ born in Barcelona. López has been playing with the Reactable\ + \ for the last three years. With this instrument he has performed in more than\ + \ 40 countries, at all kinds of events, clubs and festivals. López also works\ + \ as a composer for films and contemporary dance." + address: 'Oslo, Norway' + author: Carles López + bibtex: "@inproceedings{nime2011-music-Lopez2011,\n abstract = {Program notes:\n\ + \nThe Reactable was conceived in 2003 and was first presented at the International\ + \ Computer Music Conference (ICMC) 2005 in Barcelona. Since then, the Reactable\ + \ team has given more than 300 presentations and concerts in 40 countries, turning\ + \ it into one of the most worldwide acclaimed new musical instruments of the 21st\ + \ century. Since 2009, the Barcelona spin-off company Reactable Systems has been\ + \ producing several Reactable models, such as the Reactable Live for traveling\ + \ musicians and DJs, or its latest incarnation, Reactable mobile for Apple's iPhones\ + \ and iPads.\n\nAbout the performers:\n\nCarles López: Musician, producer and\ + \ DJ born in Barcelona. López has been playing with the Reactable for the last\ + \ three years. With this instrument he has performed in more than 40 countries,\ + \ at all kinds of events, clubs and festivals. López also works as a composer\ + \ for films and contemporary dance.},\n address = {Oslo, Norway},\n author = {Carles\ + \ López},\n booktitle = {Music Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n editor = {Kjell Tore Innervik and\ + \ Ivar Frounberg},\n month = {June},\n publisher = {Norwegian Academy of Music},\n\ + \ title = {Reactable},\n url = {https://vimeo.com/26678704},\n year = {2011}\n\ + }\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, - Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + editor: Kjell Tore Innervik and Ivar Frounberg month: June - publisher: University of Technology Sydney - title: Life on (Planet) - year: 2010 + publisher: Norwegian Academy of Music + title: Reactable + url: https://vimeo.com/26678704 + year: 2011 - ENTRYTYPE: inproceedings - ID: nime2010-music-Sorensen2010 - address: 'Sydney, Australia' - author: Andrew Sorensen - bibtex: "@inproceedings{nime2010-music-Sorensen2010,\n address = {Sydney, Australia},\n\ - \ author = {Andrew Sorensen},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Johnston,\ - \ Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer,\ - \ Kirsty Beilharz, Roger Mills},\n month = {June},\n publisher = {University of\ - \ Technology Sydney},\n title = {Live Coding Improvisation},\n year = {2010}\n\ - }\n" + ID: nime2011-music-Selle2011 + abstract: "Program notes:\n\nLicht & Hiebe (2010) is the first concert piece for\ + \ the new Instrument: The ``Hexenkessel'' (witch's cauldron) is a modified 22\"\ + \ timpani that uses LLP technology to turn the drumhead into an intuitive multitouch-interface\ + \ for the control of live-electronics & dmx-stage-lights. The multitouch technique\ + \ goes into symbiosis with a traditional instrument, keeping its acoustic qualities,\ + \ but opening it to the vast possibilities of interactive multimedia. Besides\ + \ the control of live-electronics the instrument features an interface to dmx-controlled\ + \ stage-lights to create a situation of intense intermedial fireworks entirely\ + \ controlled by the performer. The parts needed for this non-destructive timpani-hack\ + \ cost less than $500.\n\nAbout the performers:\n\nJacob Sello (1976, Hamburg/Germany)\ + \ studied Audio Engineering, Systematic Musicology and Multimedia Composition\ + \ in Hamburg. He is highly interested in the exciting possibilities that arise\ + \ from the conjunction of traditional acoustic instruments and state-of-the-art\ + \ technology. Pieces for clarinet controlled RC- helicopters or DJ-driven pneumatically\ + \ prepared disklavier pieces are the outcome.\n\nStefan Weinzierl (1985, Günzburg/Germany)\ + \ is constantly searching for fascinating challenges beyond genre-boundaries;\ + \ as a drummer in contemporary solo performances, classical ensembles and orchestras\ + \ as well as in Jazz- and Rock/Pop bands. He graduated in educational sciences\ + \ in Regensburg and completed the Percussion Master program at the HfMT Hamburg\ + \ in 2010." + address: 'Oslo, Norway' + author: Jacob Selle and Stefan Weinzierl + bibtex: "@inproceedings{nime2011-music-Selle2011,\n abstract = {Program notes:\n\ + \nLicht & Hiebe (2010) is the first concert piece for the new Instrument: The\ + \ ``Hexenkessel'' (witch's cauldron) is a modified 22\" timpani that uses LLP\ + \ technology to turn the drumhead into an intuitive multitouch-interface for the\ + \ control of live-electronics \\& dmx-stage-lights. The multitouch technique goes\ + \ into symbiosis with a traditional instrument, keeping its acoustic qualities,\ + \ but opening it to the vast possibilities of interactive multimedia. Besides\ + \ the control of live-electronics the instrument features an interface to dmx-controlled\ + \ stage-lights to create a situation of intense intermedial fireworks entirely\ + \ controlled by the performer. The parts needed for this non-destructive timpani-hack\ + \ cost less than \\$500.\n\nAbout the performers:\n\nJacob Sello (1976, Hamburg/Germany)\ + \ studied Audio Engineering, Systematic Musicology and Multimedia Composition\ + \ in Hamburg. He is highly interested in the exciting possibilities that arise\ + \ from the conjunction of traditional acoustic instruments and state-of-the-art\ + \ technology. Pieces for clarinet controlled RC- helicopters or DJ-driven pneumatically\ + \ prepared disklavier pieces are the outcome.\n\nStefan Weinzierl (1985, Günzburg/Germany)\ + \ is constantly searching for fascinating challenges beyond genre-boundaries;\ + \ as a drummer in contemporary solo performances, classical ensembles and orchestras\ + \ as well as in Jazz- and Rock/Pop bands. He graduated in educational sciences\ + \ in Regensburg and completed the Percussion Master program at the HfMT Hamburg\ + \ in 2010.},\n address = {Oslo, Norway},\n author = {Jacob Selle and Stefan Weinzierl},\n\ + \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n editor = {Kjell Tore Innervik and Ivar Frounberg},\n\ + \ month = {June},\n publisher = {Norwegian Academy of Music},\n title = {Licht\ + \ \\& Hiebe},\n url = {https://vimeo.com/27687788},\n year = {2011}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, - Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + editor: Kjell Tore Innervik and Ivar Frounberg month: June - publisher: University of Technology Sydney - title: Live Coding Improvisation - year: 2010 + publisher: Norwegian Academy of Music + title: Licht & Hiebe + url: https://vimeo.com/27687788 + year: 2011 - ENTRYTYPE: inproceedings - ID: nime2010-music-Brown2010 - address: 'Sydney, Australia' - author: Andrew Brown - bibtex: "@inproceedings{nime2010-music-Brown2010,\n address = {Sydney, Australia},\n\ - \ author = {Andrew Brown},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Johnston,\ - \ Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer,\ - \ Kirsty Beilharz, Roger Mills},\n month = {June},\n publisher = {University of\ - \ Technology Sydney},\n title = {A Live Coding Performance},\n year = {2010}\n\ - }\n" + ID: nime2011-music-Clayton2011 + abstract: "Program notes:\n\nRefraction of Your Gaze by Indeterminate Variables\ + \ (ROYGBIV) is an effort to interface sound and the visible spectrum with digital\ + \ and analog media. A collage of field recording, synth pad, and mechanical noise,\ + \ ROYGBIV unfolds as wavelengths of light are read with discrete color sensors.\ + \ Data is communicated through microcontrollers to custom audio software and a\ + \ slide projector reproduces images of the natural world. ROYGBIV is concerned\ + \ with fundamental properties of sensing, perception, and the technologies that\ + \ mediate such experience. Metaphysical dimensions of color and sound are implied\ + \ as the projected image and rainbow form a dialectic between reflection and refraction.\n\ + \nAbout the performers:\n\nJoshua Clayton: New York-based artist whose work occupies\ + \ a hybrid space of media art and language. His recent projects explore semiotics,\ + \ mysticism, architecture and the urban landscape, and research-based forms of\ + \ practice. Joshua has just completed a master's degree in Interactive Telecommunications\ + \ from New York University." + address: 'Oslo, Norway' + author: Joshua Clayton + bibtex: "@inproceedings{nime2011-music-Clayton2011,\n abstract = {Program notes:\n\ + \nRefraction of Your Gaze by Indeterminate Variables (ROYGBIV) is an effort to\ + \ interface sound and the visible spectrum with digital and analog media. A collage\ + \ of field recording, synth pad, and mechanical noise, ROYGBIV unfolds as wavelengths\ + \ of light are read with discrete color sensors. Data is communicated through\ + \ microcontrollers to custom audio software and a slide projector reproduces images\ + \ of the natural world. ROYGBIV is concerned with fundamental properties of sensing,\ + \ perception, and the technologies that mediate such experience. Metaphysical\ + \ dimensions of color and sound are implied as the projected image and rainbow\ + \ form a dialectic between reflection and refraction.\n\nAbout the performers:\n\ + \nJoshua Clayton: New York-based artist whose work occupies a hybrid space of\ + \ media art and language. His recent projects explore semiotics, mysticism, architecture\ + \ and the urban landscape, and research-based forms of practice. Joshua has just\ + \ completed a master's degree in Interactive Telecommunications from New York\ + \ University.},\n address = {Oslo, Norway},\n author = {Joshua Clayton},\n booktitle\ + \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n editor = {Kjell Tore Innervik and Ivar Frounberg},\n month =\ + \ {June},\n publisher = {Norwegian Academy of Music},\n title = {ROYGBIV},\n url\ + \ = {https://vimeo.com/27690118},\n year = {2011}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, - Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + editor: Kjell Tore Innervik and Ivar Frounberg month: June - publisher: University of Technology Sydney - title: A Live Coding Performance - year: 2010 + publisher: Norwegian Academy of Music + title: ROYGBIV + url: https://vimeo.com/27690118 + year: 2011 - ENTRYTYPE: inproceedings - ID: nime2010-music-Magnusson2010 - address: 'Sydney, Australia' - author: Thor Magnusson - bibtex: "@inproceedings{nime2010-music-Magnusson2010,\n address = {Sydney, Australia},\n\ - \ author = {Thor Magnusson},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Johnston,\ - \ Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer,\ - \ Kirsty Beilharz, Roger Mills},\n month = {June},\n publisher = {University of\ - \ Technology Sydney},\n title = {Ixi Lang Performance},\n year = {2010}\n}\n" + ID: nime2011-music-Stewart2011 + abstract: "Program notes: The t-sticks grew out of a collaborative project by Joseph\ + \ Malloch and composer D. Andrew Stewart, at McGill University. The first prototype\ + \ was completed in 2006. The t-sticks form a family of tubular digital musical\ + \ instruments, ranging in length from 0.6 metres (soprano) to 1.2 metres (tenor).\ + \ They have been designed and constructed to allow a large variety of unique interaction\ + \ techniques. As a result, a significant emphasis is placed on the gestural vocabulary\ + \ required to manipulate and manoeuvre the instrument. The musical experience\ + \ for both the performer and audience is characterised by a unique engagement\ + \ between performer body and instrument.\n\nAbout the performers: D. Andrew Stewart\ + \ (Hexagram-MATRALAB, Concordia University, Montreal, Canada): composer, pianist,\ + \ clarinettist and digital musical instrumentalist. Stewart has been working in\ + \ the field of music composition since 1994. Since 2000, he has been pursuing\ + \ a career in live electronics -- gesture-controlled -- performance, after developing\ + \ his own sensor-suit." + address: 'Oslo, Norway' + author: Andrew Stewart + bibtex: "@inproceedings{nime2011-music-Stewart2011,\n abstract = {Program notes:\ + \ The t-sticks grew out of a collaborative project by Joseph Malloch and composer\ + \ D. Andrew Stewart, at McGill University. The first prototype was completed in\ + \ 2006. The t-sticks form a family of tubular digital musical instruments, ranging\ + \ in length from 0.6 metres (soprano) to 1.2 metres (tenor). They have been designed\ + \ and constructed to allow a large variety of unique interaction techniques. As\ + \ a result, a significant emphasis is placed on the gestural vocabulary required\ + \ to manipulate and manoeuvre the instrument. The musical experience for both\ + \ the performer and audience is characterised by a unique engagement between performer\ + \ body and instrument.\n\nAbout the performers: D. Andrew Stewart (Hexagram-MATRALAB,\ + \ Concordia University, Montreal, Canada): composer, pianist, clarinettist and\ + \ digital musical instrumentalist. Stewart has been working in the field of music\ + \ composition since 1994. Since 2000, he has been pursuing a career in live electronics\ + \ -- gesture-controlled -- performance, after developing his own sensor-suit.},\n\ + \ address = {Oslo, Norway},\n author = {Andrew Stewart},\n booktitle = {Music\ + \ Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Kjell Tore Innervik and Ivar Frounberg},\n month = {June},\n publisher\ + \ = {Norwegian Academy of Music},\n title = {With Winds (for soprano t-stick)},\n\ + \ url = {https://vimeo.com/28226070},\n year = {2011}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, - Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + editor: Kjell Tore Innervik and Ivar Frounberg month: June - publisher: University of Technology Sydney - title: Ixi Lang Performance - year: 2010 + publisher: Norwegian Academy of Music + title: With Winds (for soprano t-stick) + url: https://vimeo.com/28226070 + year: 2011 - ENTRYTYPE: inproceedings - ID: nime2010-music-Dubrau2010 - address: 'Sydney, Australia' - author: Mei-Ling Dubrau and Mark Havryliv - bibtex: "@inproceedings{nime2010-music-Dubrau2010,\n address = {Sydney, Australia},\n\ - \ author = {Mei-Ling Dubrau and Mark Havryliv},\n booktitle = {Music Proceedings\ + ID: nime2011-music-Mays2011 + abstract: "Program notes: \"L'instant\" (2011) : Solo performance for Karlax instrument\ + \ and laptop. Composed and performed by Tom Mays. Originally an 8 channel tape\ + \ piece, it was completely re-constructed as a live solo for the composer performing\ + \ on a Karlax instrument – a gestural controller developed by Da Fact in France\ + \ (see http://www.dafact.com/). Musically, \"L'instant\" is a musical interpretation\ + \ of subatomic instantons, employing rotation and layering of parts who's rhythms\ + \ and timbres are built out of the combining and crossing of series of numbers...\ + \ The scenario is roughly “from the big bang to entropy”, and a “surround sound”\ + \ 5.1 diffusion space is critical to the sense of immersion within the rotating\ + \ sound objects and textures.\n\nAbout the performer: Tom Mays: composer, computer\ + \ musician and teacher, teaches at the National Superior Conservatory of Music\ + \ in Paris, and is currently working on PhD at the University of Paris 8 with\ + \ Horacio Vaggione. He is especially interested in gestural performance of real-time\ + \ computer systems for both written and improvised music, as well as in interaction\ + \ between music and video." + address: 'Oslo, Norway' + author: Tom Mays + bibtex: "@inproceedings{nime2011-music-Mays2011,\n abstract = {Program notes: \"\ + L'instant\" (2011) : Solo performance for Karlax instrument and laptop. Composed\ + \ and performed by Tom Mays. Originally an 8 channel tape piece, it was completely\ + \ re-constructed as a live solo for the composer performing on a Karlax instrument\ + \ – a gestural controller developed by Da Fact in France (see http://www.dafact.com/).\ + \ Musically, \"L'instant\" is a musical interpretation of subatomic instantons,\ + \ employing rotation and layering of parts who's rhythms and timbres are built\ + \ out of the combining and crossing of series of numbers... The scenario is roughly\ + \ “from the big bang to entropy”, and a “surround sound” 5.1 diffusion space is\ + \ critical to the sense of immersion within the rotating sound objects and textures.\n\ + \nAbout the performer: Tom Mays: composer, computer musician and teacher, teaches\ + \ at the National Superior Conservatory of Music in Paris, and is currently working\ + \ on PhD at the University of Paris 8 with Horacio Vaggione. He is especially\ + \ interested in gestural performance of real-time computer systems for both written\ + \ and improvised music, as well as in interaction between music and video.},\n\ + \ address = {Oslo, Norway},\n author = {Tom Mays},\n booktitle = {Music Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine,\ - \ Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills},\n month = {June},\n\ - \ publisher = {University of Technology Sydney},\n title = {P[r]o[pri]et[a]ry\ - \ in[ternet] [Ad]mo[ni]tion[s]},\n year = {2010}\n}\n" + \ editor = {Kjell Tore Innervik and Ivar Frounberg},\n month = {June},\n publisher\ + \ = {Norwegian Academy of Music},\n title = {L'instant},\n url = {https://vimeo.com/28238543},\n\ + \ year = {2011}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, - Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + editor: Kjell Tore Innervik and Ivar Frounberg month: June - publisher: University of Technology Sydney - title: 'P[r]o[pri]et[a]ry in[ternet] [Ad]mo[ni]tion[s]' - year: 2010 + publisher: Norwegian Academy of Music + title: L'instant + url: https://vimeo.com/28238543 + year: 2011 - ENTRYTYPE: inproceedings - ID: nime2010-music-Ptak2010 - address: 'Sydney, Australia' - author: Anthony Ptak - bibtex: "@inproceedings{nime2010-music-Ptak2010,\n address = {Sydney, Australia},\n\ - \ author = {Anthony Ptak},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Johnston,\ - \ Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer,\ - \ Kirsty Beilharz, Roger Mills},\n month = {June},\n publisher = {University of\ - \ Technology Sydney},\n title = {Live Bar-Coding},\n year = {2010}\n}\n" + ID: nime2011-music-Dupuis2011 + abstract: "Program notes:\n\nAn interactive audiovisual feedback loop forms the\ + \ basis of All Hail the Dawn. The instrument contains two simple light-sensitive\ + \ oscillators. A crude spectral analysis in Max/MSP is used to filter the oscillators\ + \ as well as looped buffers recorded from the instrument. A matrix of the spectral\ + \ analysis, interactively altered in Jitter using audio data, is projected back\ + \ onto the instrument and performer as a series of shifting patterns. This setup\ + \ allows both the graphics and sound to drive each other, creating an evolving\ + \ audiovisual relationship sensitive to slight changes in position, sound or processing.\n\ + \nAbout the performers:\n\nAlexander Dupuis: composer, performer, and multimedia\ + \ artist. His work involves live electronics and guitar, real-time graphics and\ + \ 3D animation, feedback systems and audiovisual installations. He graduated from\ + \ Brown University in 2010, and is currently working towards his Masters Degree\ + \ in the Digital Musics program at Dartmouth College." + address: 'Oslo, Norway' + author: Alexander Dupuis + bibtex: "@inproceedings{nime2011-music-Dupuis2011,\n abstract = {Program notes:\n\ + \nAn interactive audiovisual feedback loop forms the basis of All Hail the Dawn.\ + \ The instrument contains two simple light-sensitive oscillators. A crude spectral\ + \ analysis in Max/MSP is used to filter the oscillators as well as looped buffers\ + \ recorded from the instrument. A matrix of the spectral analysis, interactively\ + \ altered in Jitter using audio data, is projected back onto the instrument and\ + \ performer as a series of shifting patterns. This setup allows both the graphics\ + \ and sound to drive each other, creating an evolving audiovisual relationship\ + \ sensitive to slight changes in position, sound or processing.\n\nAbout the performers:\n\ + \nAlexander Dupuis: composer, performer, and multimedia artist. His work involves\ + \ live electronics and guitar, real-time graphics and 3D animation, feedback systems\ + \ and audiovisual installations. He graduated from Brown University in 2010, and\ + \ is currently working towards his Masters Degree in the Digital Musics program\ + \ at Dartmouth College.},\n address = {Oslo, Norway},\n author = {Alexander Dupuis},\n\ + \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n editor = {Kjell Tore Innervik and Ivar Frounberg},\n\ + \ month = {June},\n publisher = {Norwegian Academy of Music},\n title = {All Hail\ + \ the Dawn},\n url = {https://vimeo.com/27691545},\n year = {2011}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, - Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + editor: Kjell Tore Innervik and Ivar Frounberg month: June - publisher: University of Technology Sydney - title: Live Bar-Coding - year: 2010 + publisher: Norwegian Academy of Music + title: All Hail the Dawn + url: https://vimeo.com/27691545 + year: 2011 - ENTRYTYPE: inproceedings - ID: nime2010-music-Sazdov2010 - address: 'Sydney, Australia' - author: Robert Sazdov and Giuseppe Torre - bibtex: "@inproceedings{nime2010-music-Sazdov2010,\n address = {Sydney, Australia},\n\ - \ author = {Robert Sazdov and Giuseppe Torre},\n booktitle = {Music Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine,\ - \ Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills},\n month = {June},\n\ - \ publisher = {University of Technology Sydney},\n title = {MOLITVA},\n year =\ - \ {2010}\n}\n" + ID: nime2011-music-Nagashima2011 + abstract: "Program notes: Live computer music (multimedia) work, composed in 2010\ + \ and premiered in Russia. For this work, the composer developed a new interface\ + \ system for musical expression. The new interface has 8 channels of infrared-ray\ + \ distance sensors. This instrument is set up with two mic-stands on the stage.\ + \ The performer also wears the specially developed instrument called MiniBioMuse-III\ + \ which is 16 channels EMG sensor of the performance. The graphic part of this\ + \ work is real-time OpenGL 3D graphics, which is live-controlled by the performance.\ + \ This work is programmed in Max/MSP/jitter environment.\n\nAbout the performer:\ + \ Yoichi Nagashima: composer/researcher/PE, was born in 1958 in Japan. Since 1991\ + \ he has been the director of the Art & Science Laboratory in Hamamatsu, Japan.\ + \ He is a professor of Shizouka University of Art and Culture, Faculty of Design,\ + \ Department of Art and Science. He was the General Chair of NIME04." + address: 'Oslo, Norway' + author: Yoichi Nagashima + bibtex: "@inproceedings{nime2011-music-Nagashima2011,\n abstract = {Program notes:\ + \ Live computer music (multimedia) work, composed in 2010 and premiered in Russia.\ + \ For this work, the composer developed a new interface system for musical expression.\ + \ The new interface has 8 channels of infrared-ray distance sensors. This instrument\ + \ is set up with two mic-stands on the stage. The performer also wears the specially\ + \ developed instrument called MiniBioMuse-III which is 16 channels EMG sensor\ + \ of the performance. The graphic part of this work is real-time OpenGL 3D graphics,\ + \ which is live-controlled by the performance. This work is programmed in Max/MSP/jitter\ + \ environment.\n\nAbout the performer: Yoichi Nagashima: composer/researcher/PE,\ + \ was born in 1958 in Japan. Since 1991 he has been the director of the Art &\ + \ Science Laboratory in Hamamatsu, Japan. He is a professor of Shizouka University\ + \ of Art and Culture, Faculty of Design, Department of Art and Science. He was\ + \ the General Chair of NIME04.},\n address = {Oslo, Norway},\n author = {Yoichi\ + \ Nagashima},\n booktitle = {Music Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n editor = {Kjell Tore Innervik and\ + \ Ivar Frounberg},\n month = {June},\n publisher = {Norwegian Academy of Music},\n\ + \ title = {Ural Power},\n url = {https://vimeo.com/27731875},\n year = {2011}\n\ + }\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, - Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + editor: Kjell Tore Innervik and Ivar Frounberg month: June - publisher: University of Technology Sydney - title: MOLITVA - year: 2010 + publisher: Norwegian Academy of Music + title: Ural Power + url: https://vimeo.com/27731875 + year: 2011 - ENTRYTYPE: inproceedings - ID: nime2010-music-Hopson2010 - address: 'Sydney, Australia' - author: Holland Hopson - bibtex: "@inproceedings{nime2010-music-Hopson2010,\n address = {Sydney, Australia},\n\ - \ author = {Holland Hopson},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Johnston,\ - \ Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer,\ - \ Kirsty Beilharz, Roger Mills},\n month = {June},\n publisher = {University of\ - \ Technology Sydney},\n title = {Banjo & Electronics},\n year = {2010}\n}\n" + ID: nime2011-music-EPtrio–ErikaDonald2011 + abstract: "Program notes: Television Sky is a three-movement work composed by Eliot\ + \ Britton. The movements (Channels 1, 2, 3) deal with various musical and physical\ + \ elements that figure prominently in the EP trio's research: Gesture, Texture,\ + \ and Rhythm. Each movement adopts a different approach to organizing sounds;\ + \ these provide unique arenas to explore communication, expression, and synchronization\ + \ issues arising in an electroacoustic chamber music ensemble.\n\nAbout the performer:\ + \ EP trio is a multi-faceted research group and performing ensemble. It is comprised\ + \ of cellist Erika Donald, percussionist Ben Duinker, and composer/ turntablist\ + \ Eliot Britton. They are based at McGill University in Montreal, Canada where\ + \ they enjoy support from the Centre for Interdisciplinary Research in Music Media\ + \ and Technology (CIRMMT)." + address: 'Oslo, Norway' + author: Erika Donald and Ben Duinker and Eliot Britton + bibtex: "@inproceedings{nime2011-music-EPtrio–ErikaDonald2011,\n abstract = {Program\ + \ notes: Television Sky is a three-movement work composed by Eliot Britton. The\ + \ movements (Channels 1, 2, 3) deal with various musical and physical elements\ + \ that figure prominently in the EP trio's research: Gesture, Texture, and Rhythm.\ + \ Each movement adopts a different approach to organizing sounds; these provide\ + \ unique arenas to explore communication, expression, and synchronization issues\ + \ arising in an electroacoustic chamber music ensemble.\n\nAbout the performer:\ + \ EP trio is a multi-faceted research group and performing ensemble. It is comprised\ + \ of cellist Erika Donald, percussionist Ben Duinker, and composer/ turntablist\ + \ Eliot Britton. They are based at McGill University in Montreal, Canada where\ + \ they enjoy support from the Centre for Interdisciplinary Research in Music Media\ + \ and Technology (CIRMMT).},\n address = {Oslo, Norway},\n author = {Erika Donald\ + \ and Ben Duinker and Eliot Britton},\n booktitle = {Music Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n editor\ + \ = {Kjell Tore Innervik and Ivar Frounberg},\n month = {June},\n publisher =\ + \ {Norwegian Academy of Music},\n title = {Television Sky},\n url = {https://vimeo.com/28241338},\n\ + \ year = {2011}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, - Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + editor: Kjell Tore Innervik and Ivar Frounberg month: June - publisher: University of Technology Sydney - title: Banjo & Electronics - year: 2010 + publisher: Norwegian Academy of Music + title: Television Sky + url: https://vimeo.com/28241338 + year: 2011 - ENTRYTYPE: inproceedings - ID: nime2010-music-Lyon2010 - address: 'Sydney, Australia' - author: Eric Lyon and Ben Knapp - bibtex: "@inproceedings{nime2010-music-Lyon2010,\n address = {Sydney, Australia},\n\ - \ author = {Eric Lyon and Ben Knapp},\n booktitle = {Music Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n editor\ - \ = {Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon\ - \ Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills},\n month = {June},\n\ - \ publisher = {University of Technology Sydney},\n title = {Stem Cells},\n year\ - \ = {2010}\n}\n" + ID: nime2011-music-SarahTaylor2011 + abstract: "About the performers:\n\nSarah Taylor: Dancer, Choreographer trained\ + \ at the Australian Ballet School (Degree in Dance), in Classical, Cunningham\ + \ and Graham, Scholarship student to Martha Graham school in New York. Currently\ + \ working with Cesc Gelabert, for the 2011 Grec Festival, Barcelona.\n\nMaurizio\ + \ Goina: Viola player and an audio-visual composer. Currently he is affiliated\ + \ with the School of Music and New Technologies of the Conservatory of Trieste\ + \ where he is developing, together with Pietro Polotti and with the collaboration\ + \ of Sarah Taylor, the EGGS system for gesture sonification.\n\nPietro Polotti:\ + \ Studied piano, composition and electronic music. He has a degree in physics\ + \ from the University of Trieste. In 2002, he obtained a Ph.D. in Communication\ + \ Systems from the EPFL, Switzerland. Presently, he teaches Electronic Music at\ + \ the Conservatory Tartini of Trieste, Italy. He has been part of the EGGS project\ + \ since 2008." + address: 'Oslo, Norway' + author: Sarah Taylor and Maurizio Goina and Pietro Polotti + bibtex: "@inproceedings{nime2011-music-SarahTaylor2011,\n abstract = {About the\ + \ performers:\n\nSarah Taylor: Dancer, Choreographer trained at the Australian\ + \ Ballet School (Degree in Dance), in Classical, Cunningham and Graham, Scholarship\ + \ student to Martha Graham school in New York. Currently working with Cesc Gelabert,\ + \ for the 2011 Grec Festival, Barcelona.\n\nMaurizio Goina: Viola player and an\ + \ audio-visual composer. Currently he is affiliated with the School of Music and\ + \ New Technologies of the Conservatory of Trieste where he is developing, together\ + \ with Pietro Polotti and with the collaboration of Sarah Taylor, the EGGS system\ + \ for gesture sonification.\n\nPietro Polotti: Studied piano, composition and\ + \ electronic music. He has a degree in physics from the University of Trieste.\ + \ In 2002, he obtained a Ph.D. in Communication Systems from the EPFL, Switzerland.\ + \ Presently, he teaches Electronic Music at the Conservatory Tartini of Trieste,\ + \ Italy. He has been part of the EGGS project since 2008.},\n address = {Oslo,\ + \ Norway},\n author = {Sarah Taylor and Maurizio Goina and Pietro Polotti},\n\ + \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n editor = {Kjell Tore Innervik and Ivar Frounberg},\n\ + \ month = {June},\n publisher = {Norwegian Academy of Music},\n title = {Body\ + \ Jockey},\n url = {http://www.nime.org/proceedings/2019/nime2019_music001.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, - Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + editor: Kjell Tore Innervik and Ivar Frounberg month: June - publisher: University of Technology Sydney - title: Stem Cells - year: 2010 + publisher: Norwegian Academy of Music + title: Body Jockey + url: http://www.nime.org/proceedings/2019/nime2019_music001.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: nime2010-music-Ensemble2010 - address: 'Sydney, Australia' - author: Charisma Ensemble and Kirsty Beilharz - bibtex: "@inproceedings{nime2010-music-Ensemble2010,\n address = {Sydney, Australia},\n\ - \ author = {Charisma Ensemble and Kirsty Beilharz},\n booktitle = {Music Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine,\ - \ Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills},\n month = {June},\n\ - \ publisher = {University of Technology Sydney},\n title = {Diamond Quills Hyper-Ensemble},\n\ - \ year = {2010}\n}\n" + ID: nime2011-music-Nicolls2011 + abstract: "Program notes:\nSN: I wanted to get at the closest relationship possible\ + \ between my hands and the resulting sound. Having worked with sampling and complex\ + \ processing and various sensors such as EMG, motion capture with live sound as\ + \ the source seemed a way to really get inside an improvisation system that was\ + \ really live and really intuitive. You can judge for yourselves!,\n\nNG: Sarah's\ + \ movements are sensed using a Kinect 3D motion capture device and the gestures\ + \ are recognised in real-time using the SEC, a machine learning toolbox that has\ + \ been specifically developed for musician-computer interaction.\n\nAbout the\ + \ performers:\n\nSarah Nicolls UK-based experimental pianist and inventor of `Inside-out\ + \ piano'; collaborative researcher with e.g. Atau Tanaka, PA Tremblay; concerts\ + \ e.g. world premieres of Larry Goves' Piano Concerto, Richard Barrett's Mesopotamia/London\ + \ Sinfonietta/BBC Radio; article in LMJ20; Senior Lecturer at Brunel University;\ + \ funding: Arts and Humanities Research Council (AHRC), Brunel Research Initiative\ + \ and Enterprise Fund (BRIEF), Arts Council England.\n\nNick Gillian Post-doctoral\ + \ researcher currently working on an E.U. project entitled SIEMPRE at the Sonic\ + \ Arts Research Centre, Belfast. Nick recently completed his PhD in Gesture Recognition\ + \ for Musician-Computer Interaction under the supervision of R. Benjamin Knapp\ + \ and Sile O'Modhrain. His interests are in machine learning and pattern recognition\ + \ and applying these techniques to enable real-time musician-computer interaction." + address: 'Oslo, Norway' + author: Sarah Nicolls and Nick Gillian + bibtex: "@inproceedings{nime2011-music-Nicolls2011,\n abstract = {Program notes:\n\ + SN: I wanted to get at the closest relationship possible between my hands and\ + \ the resulting sound. Having worked with sampling and complex processing and\ + \ various sensors such as EMG, motion capture with live sound as the source seemed\ + \ a way to really get inside an improvisation system that was really live and\ + \ really intuitive. You can judge for yourselves!,\n\nNG: Sarah's movements are\ + \ sensed using a Kinect 3D motion capture device and the gestures are recognised\ + \ in real-time using the SEC, a machine learning toolbox that has been specifically\ + \ developed for musician-computer interaction.\n\nAbout the performers:\n\nSarah\ + \ Nicolls UK-based experimental pianist and inventor of `Inside-out piano'; collaborative\ + \ researcher with e.g. Atau Tanaka, PA Tremblay; concerts e.g. world premieres\ + \ of Larry Goves' Piano Concerto, Richard Barrett's Mesopotamia/London Sinfonietta/BBC\ + \ Radio; article in LMJ20; Senior Lecturer at Brunel University; funding: Arts\ + \ and Humanities Research Council (AHRC), Brunel Research Initiative and Enterprise\ + \ Fund (BRIEF), Arts Council England.\n\nNick Gillian Post-doctoral researcher\ + \ currently working on an E.U. project entitled SIEMPRE at the Sonic Arts Research\ + \ Centre, Belfast. Nick recently completed his PhD in Gesture Recognition for\ + \ Musician-Computer Interaction under the supervision of R. Benjamin Knapp and\ + \ Sile O'Modhrain. His interests are in machine learning and pattern recognition\ + \ and applying these techniques to enable real-time musician-computer interaction.},\n\ + \ address = {Oslo, Norway},\n author = {Sarah Nicolls and Nick Gillian},\n booktitle\ + \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n editor = {Kjell Tore Innervik and Ivar Frounberg},\n month =\ + \ {June},\n publisher = {Norwegian Academy of Music},\n title = {Improvisation\ + \ for piano + motion capture system},\n url = {https://vimeo.com/26678719},\n\ + \ year = {2011}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, - Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + editor: Kjell Tore Innervik and Ivar Frounberg month: June - publisher: University of Technology Sydney - title: Diamond Quills Hyper-Ensemble - year: 2010 + publisher: Norwegian Academy of Music + title: Improvisation for piano + motion capture system + url: https://vimeo.com/26678719 + year: 2011 - ENTRYTYPE: inproceedings - ID: nime2010-music-Hewitt2010 - address: 'Sydney, Australia' - author: Donna Hewitt and Avril Huddy - bibtex: "@inproceedings{nime2010-music-Hewitt2010,\n address = {Sydney, Australia},\n\ - \ author = {Donna Hewitt and Avril Huddy},\n booktitle = {Music Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n editor\ - \ = {Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon\ - \ Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills},\n month = {June},\n\ - \ publisher = {University of Technology Sydney},\n title = {Idol},\n year = {2010}\n\ - }\n" + ID: nime2011-music-Hayes2011 + abstract: "Performer notes:\nSocks and Ammo for piano, percussion and live electronics,\ + \ is a new work investigating novel methods of communication between laptop and\ + \ performer, as well as performer and performer, in an improvisational setting.\ + \ Enhancing traditional aural and visual cues, a network is established between\ + \ laptops, providing direction and suggestion to and between performers. Tactile\ + \ feedback is provided to performers in the form of tiny vibrations on the skin,\ + \ opening up a further, yet covert, channel of information to transmit signals\ + \ and cues, allowing for a more informed and focused performance.\n\nAbout the\ + \ performers:\n\nLauren Sarah Hayes: Composer and performer from Glasgow. Her\ + \ recent practice focuses on realizing compositions for piano and live electronics,\ + \ which unify extended technique, bespoke software and instrument augmentation.\ + \ Undertaken at the University of Edinburgh, her research investigates audio-haptic\ + \ relationships as performance strategies for performers of digital music.\n\n\ + Christos Michalakos: Composer and improviser from northern Greece. Working predominantly\ + \ with percussion and live electronics, his music explores relationships between\ + \ acoustic and electronic sound worlds, through an examination of methods for\ + \ developing and augmenting his drum kit, forming part of his PhD research at\ + \ the University of Edinburgh.\n\n=== Recorded at:\n\n11th International Conference\ + \ on New Interfaces for Musical Expression. 30 May - 1 June 2011, Oslo, Norway.\n\ + \nhttp://www.nime2011.org" + address: 'Oslo, Norway' + author: Lauren Sarah Hayes and Christos Michalakos + bibtex: "@inproceedings{nime2011-music-Hayes2011,\n abstract = {Performer notes:\n\ + Socks and Ammo for piano, percussion and live electronics, is a new work investigating\ + \ novel methods of communication between laptop and performer, as well as performer\ + \ and performer, in an improvisational setting. Enhancing traditional aural and\ + \ visual cues, a network is established between laptops, providing direction and\ + \ suggestion to and between performers. Tactile feedback is provided to performers\ + \ in the form of tiny vibrations on the skin, opening up a further, yet covert,\ + \ channel of information to transmit signals and cues, allowing for a more informed\ + \ and focused performance.\n\nAbout the performers:\n\nLauren Sarah Hayes: Composer\ + \ and performer from Glasgow. Her recent practice focuses on realizing compositions\ + \ for piano and live electronics, which unify extended technique, bespoke software\ + \ and instrument augmentation. Undertaken at the University of Edinburgh, her\ + \ research investigates audio-haptic relationships as performance strategies for\ + \ performers of digital music.\n\nChristos Michalakos: Composer and improviser\ + \ from northern Greece. Working predominantly with percussion and live electronics,\ + \ his music explores relationships between acoustic and electronic sound worlds,\ + \ through an examination of methods for developing and augmenting his drum kit,\ + \ forming part of his PhD research at the University of Edinburgh.\n\n=== Recorded\ + \ at:\n\n11th International Conference on New Interfaces for Musical Expression.\ + \ 30 May - 1 June 2011, Oslo, Norway.\n\nhttp://www.nime2011.org},\n address =\ + \ {Oslo, Norway},\n author = {Lauren Sarah Hayes and Christos Michalakos},\n booktitle\ + \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n editor = {Kjell Tore Innervik and Ivar Frounberg},\n month =\ + \ {June},\n publisher = {Norwegian Academy of Music},\n title = {Socks and Ammo},\n\ + \ url = {https://vimeo.com/26629807},\n year = {2011}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, - Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + editor: Kjell Tore Innervik and Ivar Frounberg month: June - publisher: University of Technology Sydney - title: Idol - year: 2010 + publisher: Norwegian Academy of Music + title: Socks and Ammo + url: https://vimeo.com/26629807 + year: 2011 - ENTRYTYPE: inproceedings - ID: nime2010-music-Martinez2010 - address: 'Sydney, Australia' - author: Christopher Martinez - bibtex: "@inproceedings{nime2010-music-Martinez2010,\n address = {Sydney, Australia},\n\ - \ author = {Christopher Martinez},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Johnston,\ - \ Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer,\ - \ Kirsty Beilharz, Roger Mills},\n month = {June},\n publisher = {University of\ - \ Technology Sydney},\n title = {Radio Healer},\n year = {2010}\n}\n" + ID: nime2011-music-PaulStapleton2011 + abstract: "About the performers:\n\nE=MCH is a recently formed quartet featuring\ + \ Belfast-based improvisers Paul Stapleton (BoSS & Postcard Weevil), Caroline\ + \ Pugh (Voice & Analogue Cassette Decks, Zero-input Mixer), Adnan Marquez-Borbon\ + \ (Feedback Bass Clarinet, Recording Modules & Delay Lines) and Cavan Fyans (DIY\ + \ Electronics). Memories, distortions of time and place, echoes from analogue\ + \ delay lengths, solid state samplers, and modified vinyl all help shape the fabric\ + \ of the music in response to its larger ecology. ``Okay so making instruments\ + \ and playing on them is not new, can't really see that there is any new thought\ + \ about how why and what here, but the sound sculpture looks nice.'' --- Cosmopolitan\n\ + \nPaul Stapleton: Sound artist, improviser and writer originally from Southern\ + \ California, currently lecturing at the Sonic Arts Research Centre in Belfast\ + \ (SARC). Paul designs and performs with a variety of custom made metallic sound\ + \ sculptures, electronics and found objects in locations ranging from impro clubs\ + \ in Cork to abandoned beaches on Vancouver Island.\n\nCaroline Pugh: Scottish\ + \ vocalist and performance artist. She deviously borrows analogue technologies\ + \ and oral histories to create performances that present imagined constructions\ + \ of traditional and popular culture. With a background in both folk music and\ + \ improvisation, she collaborates with people from any discipline and performs\ + \ in a wide variety of venues including folk clubs, arts venues and cinemas.\n\ + \nAdnan Marquez-Borbon: Saxophonist, improviser, computer musician, and composer,\ + \ currently a PhD student at SARC. His research emphasis is on the roles of learning\ + \ models and skill development in the design of digital musical instruments. As\ + \ a musician, his music focuses on improvisation and the electronic manipulation\ + \ of sounds in real-time.\n\nCavan Fyans: PhD research student, instrument builder,\ + \ noise maker & improviser. Currently located at SARC, Cavan's research examines\ + \ the spectator's cognition of interaction and performance in communicative interactions\ + \ with technology. Cavan also devotes time to developing new and innovative ways\ + \ of breaking cheap electronic toys (Circuit Bending) and (re)constructing circuitry\ + \ for sonic creation (Hardware Hacking)." + address: 'Oslo, Norway' + author: Paul Stapleton and Caroline Pugh and Adnan Marquez-Borbon and Cavan Fyans + bibtex: "@inproceedings{nime2011-music-PaulStapleton2011,\n abstract = {About the\ + \ performers:\n\nE=MCH is a recently formed quartet featuring Belfast-based improvisers\ + \ Paul Stapleton (BoSS \\& Postcard Weevil), Caroline Pugh (Voice \\& Analogue\ + \ Cassette Decks, Zero-input Mixer), Adnan Marquez-Borbon (Feedback Bass Clarinet,\ + \ Recording Modules \\& Delay Lines) and Cavan Fyans (DIY Electronics). Memories,\ + \ distortions of time and place, echoes from analogue delay lengths, solid state\ + \ samplers, and modified vinyl all help shape the fabric of the music in response\ + \ to its larger ecology. ``Okay so making instruments and playing on them is not\ + \ new, can't really see that there is any new thought about how why and what here,\ + \ but the sound sculpture looks nice.'' --- Cosmopolitan\n\nPaul Stapleton: Sound\ + \ artist, improviser and writer originally from Southern California, currently\ + \ lecturing at the Sonic Arts Research Centre in Belfast (SARC). Paul designs\ + \ and performs with a variety of custom made metallic sound sculptures, electronics\ + \ and found objects in locations ranging from impro clubs in Cork to abandoned\ + \ beaches on Vancouver Island.\n\nCaroline Pugh: Scottish vocalist and performance\ + \ artist. She deviously borrows analogue technologies and oral histories to create\ + \ performances that present imagined constructions of traditional and popular\ + \ culture. With a background in both folk music and improvisation, she collaborates\ + \ with people from any discipline and performs in a wide variety of venues including\ + \ folk clubs, arts venues and cinemas.\n\nAdnan Marquez-Borbon: Saxophonist, improviser,\ + \ computer musician, and composer, currently a PhD student at SARC. His research\ + \ emphasis is on the roles of learning models and skill development in the design\ + \ of digital musical instruments. As a musician, his music focuses on improvisation\ + \ and the electronic manipulation of sounds in real-time.\n\nCavan Fyans: PhD\ + \ research student, instrument builder, noise maker \\& improviser. Currently\ + \ located at SARC, Cavan's research examines the spectator's cognition of interaction\ + \ and performance in communicative interactions with technology. Cavan also devotes\ + \ time to developing new and innovative ways of breaking cheap electronic toys\ + \ (Circuit Bending) and (re)constructing circuitry for sonic creation (Hardware\ + \ Hacking).},\n address = {Oslo, Norway},\n author = {Paul Stapleton and Caroline\ + \ Pugh and Adnan Marquez-Borbon and Cavan Fyans},\n booktitle = {Music Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Kjell Tore Innervik and Ivar Frounberg},\n month = {June},\n publisher\ + \ = {Norwegian Academy of Music},\n title = {E=MCH},\n url = {https://vimeo.com/26620232},\n\ + \ year = {2011}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, - Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + editor: Kjell Tore Innervik and Ivar Frounberg month: June - publisher: University of Technology Sydney - title: Radio Healer - year: 2010 + publisher: Norwegian Academy of Music + title: E=MCH + url: https://vimeo.com/26620232 + year: 2011 - ENTRYTYPE: inproceedings - ID: nime2010-music-Langley2010 - address: 'Sydney, Australia' - author: Somaya Langley - bibtex: "@inproceedings{nime2010-music-Langley2010,\n address = {Sydney, Australia},\n\ - \ author = {Somaya Langley},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Johnston,\ - \ Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer,\ - \ Kirsty Beilharz, Roger Mills},\n month = {June},\n publisher = {University of\ - \ Technology Sydney},\n title = {ID-i/o},\n year = {2010}\n}\n" + ID: nime2011-music-Alden2011 + abstract: "Program notes:\nREMI Sings is an electroacoustic performance for the\ + \ bio-inspired Rhizomatic Experimental Musical Interface (REMI) and accordion.\ + \ REMI is an interactive networked musical organism that receives sonic input\ + \ from its environment, processes it based on the ever changing structure of its\ + \ interior network, and generates a unique musical output. This rhizomatic network\ + \ is a software structure modelled after the functioning and growth patterns of\ + \ biological rhizomes, specifically the mycorrhizal association that form vital\ + \ nutrient pathways for the majority of the planet's land-plant ecosystems. The\ + \ performance REMI Sings highlights this interface's interactive nature, creating\ + \ a dialogue between human performer and non-human musical intelligence.\n\nAbout\ + \ the performer:\n\nChristopher Alden: Composer, programmer, and instrumentalist\ + \ currently studying at New York University's Interactive Telecommunications Program,\ + \ where his research focuses on interactive music systems for composition and\ + \ performance. Before ITP, he received his undergraduate degree in Music Theory\ + \ and Composition at NYU where he studied composition under Marc Antonio-Consoli" + address: 'Oslo, Norway' + author: Christopher Alden + bibtex: "@inproceedings{nime2011-music-Alden2011,\n abstract = {Program notes:\n\ + REMI Sings is an electroacoustic performance for the bio-inspired Rhizomatic Experimental\ + \ Musical Interface (REMI) and accordion. REMI is an interactive networked musical\ + \ organism that receives sonic input from its environment, processes it based\ + \ on the ever changing structure of its interior network, and generates a unique\ + \ musical output. This rhizomatic network is a software structure modelled after\ + \ the functioning and growth patterns of biological rhizomes, specifically the\ + \ mycorrhizal association that form vital nutrient pathways for the majority of\ + \ the planet's land-plant ecosystems. The performance REMI Sings highlights this\ + \ interface's interactive nature, creating a dialogue between human performer\ + \ and non-human musical intelligence.\n\nAbout the performer:\n\nChristopher Alden:\ + \ Composer, programmer, and instrumentalist currently studying at New York University's\ + \ Interactive Telecommunications Program, where his research focuses on interactive\ + \ music systems for composition and performance. Before ITP, he received his undergraduate\ + \ degree in Music Theory and Composition at NYU where he studied composition under\ + \ Marc Antonio-Consoli},\n address = {Oslo, Norway},\n author = {Christopher Alden},\n\ + \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n editor = {Kjell Tore Innervik and Ivar Frounberg},\n\ + \ month = {June},\n publisher = {Norwegian Academy of Music},\n title = {REMI\ + \ Sings},\n url = {https://vimeo.com/26619152},\n year = {2011}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, - Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + editor: Kjell Tore Innervik and Ivar Frounberg month: June - publisher: University of Technology Sydney - title: ID-i/o - year: 2010 + publisher: Norwegian Academy of Music + title: REMI Sings + url: https://vimeo.com/26619152 + year: 2011 - ENTRYTYPE: inproceedings - ID: nime2010-music-Schubert2010 - address: 'Sydney, Australia' - author: Alexander Schubert - bibtex: "@inproceedings{nime2010-music-Schubert2010,\n address = {Sydney, Australia},\n\ - \ author = {Alexander Schubert},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Johnston,\ - \ Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer,\ - \ Kirsty Beilharz, Roger Mills},\n month = {June},\n publisher = {University of\ - \ Technology Sydney},\n title = {Laplace Tiger},\n year = {2010}\n}\n" + ID: nime2011-music-Schwarz2011 + abstract: "Program notes:\n\nThe performance between electric violinist Victoria\ + \ Johnson and Diemo Schwarz playing his interactive corpus-based concatenative\ + \ synthesis software CataRT is an improvisation with two brains and four hands\ + \ controlling one shared symbolic instrument, the sound space, built-up from nothing\ + \ and nourished in unplanned ways by the sound of the instrument, explored and\ + \ consumed with whatever the live instant filled it with. It creates a symbiotic\ + \ relationship between the player of the instrument and that of the software.\ + \ Live corpus-based concatenative synthesis permits here a new approach to improvisation,\ + \ where sound from an instrument is recontextualised by interactive, gesture-controlled\ + \ software. Not knowing what can happen is an integral part of the performance.\n\ + \nAbout the performers:\n\nVictoria Johnson works with electric violin, live electronics,\ + \ improvisation and musical technological issues in her artistic work. Trained\ + \ as a classical violinist in Oslo, Vienna and London, she gave her debut recital\ + \ in Oslo in 1995. She has established herself internationally as a soloist, chamber\ + \ musician and improviser in contemporary, improvised and experimental, cross-disciplinary\ + \ music and art.\n\nDiemo Schwarz: Researcher and developer at Ircam, composer\ + \ of electronic music, and musician on drums and laptop with gestural controllers.\ + \ His compositions and live performances, in solo as Mean Time Between Failure,\ + \ or improvising with other musicians, explore the possibilities of corpus-based\ + \ concatenative synthesis to re-contextualise any sound source by rearranging\ + \ sound units into a new musical framework using interactive navigation through\ + \ a timbral space." + address: 'Oslo, Norway' + author: Diemo Schwarz and Victoria Johnson + bibtex: "@inproceedings{nime2011-music-Schwarz2011,\n abstract = {Program notes:\n\ + \nThe performance between electric violinist Victoria Johnson and Diemo Schwarz\ + \ playing his interactive corpus-based concatenative synthesis software CataRT\ + \ is an improvisation with two brains and four hands controlling one shared symbolic\ + \ instrument, the sound space, built-up from nothing and nourished in unplanned\ + \ ways by the sound of the instrument, explored and consumed with whatever the\ + \ live instant filled it with. It creates a symbiotic relationship between the\ + \ player of the instrument and that of the software. Live corpus-based concatenative\ + \ synthesis permits here a new approach to improvisation, where sound from an\ + \ instrument is recontextualised by interactive, gesture-controlled software.\ + \ Not knowing what can happen is an integral part of the performance.\n\nAbout\ + \ the performers:\n\nVictoria Johnson works with electric violin, live electronics,\ + \ improvisation and musical technological issues in her artistic work. Trained\ + \ as a classical violinist in Oslo, Vienna and London, she gave her debut recital\ + \ in Oslo in 1995. She has established herself internationally as a soloist, chamber\ + \ musician and improviser in contemporary, improvised and experimental, cross-disciplinary\ + \ music and art.\n\nDiemo Schwarz: Researcher and developer at Ircam, composer\ + \ of electronic music, and musician on drums and laptop with gestural controllers.\ + \ His compositions and live performances, in solo as Mean Time Between Failure,\ + \ or improvising with other musicians, explore the possibilities of corpus-based\ + \ concatenative synthesis to re-contextualise any sound source by rearranging\ + \ sound units into a new musical framework using interactive navigation through\ + \ a timbral space.},\n address = {Oslo, Norway},\n author = {Diemo Schwarz and\ + \ Victoria Johnson},\n booktitle = {Music Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n editor = {Kjell Tore Innervik and\ + \ Ivar Frounberg},\n month = {June},\n publisher = {Norwegian Academy of Music},\n\ + \ title = {Suspended Beginnings},\n url = {https://vimeo.com/26679877},\n year\ + \ = {2011}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, - Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + editor: Kjell Tore Innervik and Ivar Frounberg month: June - publisher: University of Technology Sydney - title: Laplace Tiger - year: 2010 + publisher: Norwegian Academy of Music + title: Suspended Beginnings + url: https://vimeo.com/26679877 + year: 2011 - ENTRYTYPE: inproceedings - ID: nime2010-music-Lai2010 - abstract: "Interactivity between performer and technology is a crucial part of media\ - \ performance. This is made possible through creative integration of software\ - \ and hardware devices. The performance work, Strike On Stage, uses such an integration\ - \ to bridge three aspects of performance: performers' body movements, audio processing,\ - \ and projected video. \n\nFor the NIME 2010 concert performance, we are proposing\ - \ to present a media work composed for the Strike On Stage instrument demonstrating\ - \ a wide variety of interactions between the two performers, the instrument itself\ - \ and the video projection.\n\nThe instrument for Strike On Stage is a large performance\ - \ surface for multiple players to control computer based musical instruments and\ - \ visuals. This concept is a new perspective on Chi-Hsia Lai's MPhil research\ - \ project, Hands On Stage (video documentation can be found at ),\ - \ which was a solo, audiovisual performance work created during 2007 and 2008." - address: 'Sydney, Australia' - author: Chi-Hsia Lai and Charles Martin - bibtex: "@inproceedings{nime2010-music-Lai2010,\n abstract = {Interactivity between\ - \ performer and technology is a crucial part of media performance. This is made\ - \ possible through creative integration of software and hardware devices. The\ - \ performance work, Strike On Stage, uses such an integration to bridge three\ - \ aspects of performance: performers' body movements, audio processing, and projected\ - \ video. \n\nFor the NIME 2010 concert performance, we are proposing to present\ - \ a media work composed for the Strike On Stage instrument demonstrating a wide\ - \ variety of interactions between the two performers, the instrument itself and\ - \ the video projection.\n\nThe instrument for Strike On Stage is a large performance\ - \ surface for multiple players to control computer based musical instruments and\ - \ visuals. This concept is a new perspective on Chi-Hsia Lai's MPhil research\ - \ project, Hands On Stage (video documentation can be found at ),\ - \ which was a solo, audiovisual performance work created during 2007 and 2008.},\n\ - \ address = {Sydney, Australia},\n author = {Chi-Hsia Lai and Charles Martin},\n\ - \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n editor = {Andrew Johnston, Sam Ferguson, Jos Mulder,\ - \ Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger\ - \ Mills},\n month = {June},\n publisher = {University of Technology Sydney},\n\ - \ title = {Strike On Stage},\n year = {2010}\n}\n" + ID: nime2011-music-JasonDixon2011 + abstract: "Program notes:\n\nThe Loop explores the possibilities of co-located performance,\ + \ decentralized composition, and the acoustics of network. This performance begins\ + \ with a brief improvisation presenting acoustic sources to excite the network.\ + \ This material is shared, transformed, and reintroduced into the composition.\ + \ This process continues through successive generations until a predetermined\ + \ time or a point at which the composition naturally concludes. The result is\ + \ an integrated meta-instrument and an emergent composition, with no one artist\ + \ being the sole performer or composer. Remote participants are represented locally\ + \ by a mono speaker enabling the audiences to hear the transformation of audio\ + \ through the networked instrument.\n\nAbout the performers:\n\nJason Dixon: Irish\ + \ composer currently based in Norwich where he is in the process of completing\ + \ his PhD in composition. His work explores issues of language, perception and\ + \ memory in music. More recently he has been focusing on the Irish storytelling\ + \ tradition and its place in contemporary Ireland.\n\nTom Davis: Digital artist\ + \ working mainly in the medium of sound installation. His practice and theory\ + \ based output involves the creation of technology led environments for interaction.\ + \ Davis is currently a lecturer at the University of Bournemouth and holds a PhD\ + \ from the Sonic Arts Research Centre, Belfast.\n\nJason Geistweidt: Sound artist\ + \ based at the University or Tromsø, Norway, researching mixed-reality stages\ + \ and performance systems. He is a former faculty member of Interactive Arts and\ + \ Media department at Columbia College Chicago. He holds PhD in electro-acoustic\ + \ composition from the Sonic Arts Research Centre, Queens University, Belfast.\n\ + \nAlain B. Renaud: Alain's research focuses on networked music performance systems\ + \ with an emphasis on the creation of strategies to interact over a network musically\ + \ and the notion of shared networked acoustic spaces. He is a lecturer in at Bournemouth\ + \ University, England and holds a PhD from the Sonic Arts Research Centre." + address: 'Oslo, Norway' + author: Jason Dixon and Tom Davis and Jason Geistweidt and Alain B. Renaud + bibtex: "@inproceedings{nime2011-music-JasonDixon2011,\n abstract = {Program notes:\n\ + \nThe Loop explores the possibilities of co-located performance, decentralized\ + \ composition, and the acoustics of network. This performance begins with a brief\ + \ improvisation presenting acoustic sources to excite the network. This material\ + \ is shared, transformed, and reintroduced into the composition. This process\ + \ continues through successive generations until a predetermined time or a point\ + \ at which the composition naturally concludes. The result is an integrated meta-instrument\ + \ and an emergent composition, with no one artist being the sole performer or\ + \ composer. Remote participants are represented locally by a mono speaker enabling\ + \ the audiences to hear the transformation of audio through the networked instrument.\n\ + \nAbout the performers:\n\nJason Dixon: Irish composer currently based in Norwich\ + \ where he is in the process of completing his PhD in composition. His work explores\ + \ issues of language, perception and memory in music. More recently he has been\ + \ focusing on the Irish storytelling tradition and its place in contemporary Ireland.\n\ + \nTom Davis: Digital artist working mainly in the medium of sound installation.\ + \ His practice and theory based output involves the creation of technology led\ + \ environments for interaction. Davis is currently a lecturer at the University\ + \ of Bournemouth and holds a PhD from the Sonic Arts Research Centre, Belfast.\n\ + \nJason Geistweidt: Sound artist based at the University or Tromsø, Norway, researching\ + \ mixed-reality stages and performance systems. He is a former faculty member\ + \ of Interactive Arts and Media department at Columbia College Chicago. He holds\ + \ PhD in electro-acoustic composition from the Sonic Arts Research Centre, Queens\ + \ University, Belfast.\n\nAlain B. Renaud: Alain's research focuses on networked\ + \ music performance systems with an emphasis on the creation of strategies to\ + \ interact over a network musically and the notion of shared networked acoustic\ + \ spaces. He is a lecturer in at Bournemouth University, England and holds a PhD\ + \ from the Sonic Arts Research Centre.},\n address = {Oslo, Norway},\n author\ + \ = {Jason Dixon and Tom Davis and Jason Geistweidt and Alain B. Renaud},\n booktitle\ + \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n editor = {Kjell Tore Innervik and Ivar Frounberg},\n month =\ + \ {June},\n publisher = {Norwegian Academy of Music},\n title = {The Loop},\n\ + \ url = {https://vimeo.com/26679893},\n year = {2011}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, - Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + editor: Kjell Tore Innervik and Ivar Frounberg month: June - publisher: University of Technology Sydney - title: Strike On Stage - year: 2010 + publisher: Norwegian Academy of Music + title: The Loop + url: https://vimeo.com/26679893 + year: 2011 - ENTRYTYPE: inproceedings - ID: nime2010-music-Schunior2010 - address: 'Sydney, Australia' - author: Michael Schunior - bibtex: "@inproceedings{nime2010-music-Schunior2010,\n address = {Sydney, Australia},\n\ - \ author = {Michael Schunior},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Johnston,\ - \ Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer,\ - \ Kirsty Beilharz, Roger Mills},\n month = {June},\n publisher = {University of\ - \ Technology Sydney},\n title = {{HAITIAN HAARPS}},\n year = {2010}\n}\n" + ID: nime2011-music-Zappi2011 + abstract: "Program notes:\nDissonance is an audio/visual performance in which a\ + \ progressive soundtrack is created along with the exploration of an interactive\ + \ virtual environment. While real instrument--generated music animates the projected\ + \ worlds, the two performers are allowed to physically interact with virtual objects,\ + \ changing their position, shape and color to control music and create new sounds.\ + \ As the journey continues and the environment introduces new elements and new\ + \ metaphors, performers are driven to explore the sonic laws that rule each scenario.\ + \ Spectators wearing 3D glasses perceive the virtual environment as moving out\ + \ of the screen and embracing the artists, in choreographies where real and virtual\ + \ world literally overlap.\n\nAbout the performers:\n\nVictor Zappi: PhD student\ + \ and a new media artist. His research focuses on Virtual Reality and its applications\ + \ in art and live performances.\n\nDario Mazzanti: computer science engineer and\ + \ multi-instrumentalist composer. He enjoys writing, recording and playing music\ + \ combining his artistic streak with his interest for technology." + address: 'Oslo, Norway' + author: Victor Zappi and Dario Mazzanti + bibtex: "@inproceedings{nime2011-music-Zappi2011,\n abstract = {Program notes:\n\ + Dissonance is an audio/visual performance in which a progressive soundtrack is\ + \ created along with the exploration of an interactive virtual environment. While\ + \ real instrument--generated music animates the projected worlds, the two performers\ + \ are allowed to physically interact with virtual objects, changing their position,\ + \ shape and color to control music and create new sounds. As the journey continues\ + \ and the environment introduces new elements and new metaphors, performers are\ + \ driven to explore the sonic laws that rule each scenario. Spectators wearing\ + \ 3D glasses perceive the virtual environment as moving out of the screen and\ + \ embracing the artists, in choreographies where real and virtual world literally\ + \ overlap.\n\nAbout the performers:\n\nVictor Zappi: PhD student and a new media\ + \ artist. His research focuses on Virtual Reality and its applications in art\ + \ and live performances.\n\nDario Mazzanti: computer science engineer and multi-instrumentalist\ + \ composer. He enjoys writing, recording and playing music combining his artistic\ + \ streak with his interest for technology.},\n address = {Oslo, Norway},\n author\ + \ = {Victor Zappi and Dario Mazzanti},\n booktitle = {Music Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n editor\ + \ = {Kjell Tore Innervik and Ivar Frounberg},\n month = {June},\n publisher =\ + \ {Norwegian Academy of Music},\n title = {Dissonance},\n url = {https://vimeo.com/26616186},\n\ + \ year = {2011}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, - Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + editor: Kjell Tore Innervik and Ivar Frounberg month: June - publisher: University of Technology Sydney - title: '{HAITIAN HAARPS}' - year: 2010 + publisher: Norwegian Academy of Music + title: Dissonance + url: https://vimeo.com/26616186 + year: 2011 - ENTRYTYPE: inproceedings - ID: nime2010-music-GeWang2010 - address: 'Sydney, Australia' - author: 'Ge Wang, Jieun Oh, Jorge Herrera, Nicholas J. Bryan and Luke Dahl' - bibtex: "@inproceedings{nime2010-music-GeWang2010,\n address = {Sydney, Australia},\n\ - \ author = {Ge Wang, Jieun Oh, Jorge Herrera, Nicholas J. Bryan and Luke Dahl},\n\ - \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n editor = {Andrew Johnston, Sam Ferguson, Jos Mulder,\ - \ Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger\ - \ Mills},\n month = {June},\n publisher = {University of Technology Sydney},\n\ - \ title = {Stanford Mobile Phone Orchestra (MoPhO)},\n year = {2010}\n}\n" + ID: nime2011-music-Nowitz2011 + abstract: "Program notes:\n\nSince 2008 I have been performing and composing music\ + \ for voice and live-electronics using two Wii-remotes as gestural controllers.\ + \ The live-electronics function in two ways: as an extension of my voice and as\ + \ an instrument as well. The music creation is mainly based on live-sampling the\ + \ voice. I also use pre-recorded sounds and my own compositions. In addition,\ + \ since the beginning of 2010 we have been developing a new instrument, which\ + \ goes beyond the technical possibilities of the Wii-controllers. I call this\ + \ instrument the Shells. Besides motion sensors there are three more continuous\ + \ controllers available: a pressure sensor, a joystick control and ultrasound\ + \ for distance measurement.\n\nAbout the performers:\n\nAlex Nowitz: Composer\ + \ of vocal, chamber and electronic music as well as music for dance, theatre and\ + \ opera. Furthermore, he is a voice artist, whistling and singing virtuoso who\ + \ is classically trained as tenor and countertenor and presents a wide array of\ + \ diverse and extended techniques. He has been artist in residence at STEIM, Amsterdam,\ + \ since 2010." + address: 'Oslo, Norway' + author: Alex Nowitz + bibtex: "@inproceedings{nime2011-music-Nowitz2011,\n abstract = {Program notes:\n\ + \nSince 2008 I have been performing and composing music for voice and live-electronics\ + \ using two Wii-remotes as gestural controllers. The live-electronics function\ + \ in two ways: as an extension of my voice and as an instrument as well. The music\ + \ creation is mainly based on live-sampling the voice. I also use pre-recorded\ + \ sounds and my own compositions. In addition, since the beginning of 2010 we\ + \ have been developing a new instrument, which goes beyond the technical possibilities\ + \ of the Wii-controllers. I call this instrument the Shells. Besides motion sensors\ + \ there are three more continuous controllers available: a pressure sensor, a\ + \ joystick control and ultrasound for distance measurement.\n\nAbout the performers:\n\ + \nAlex Nowitz: Composer of vocal, chamber and electronic music as well as music\ + \ for dance, theatre and opera. Furthermore, he is a voice artist, whistling and\ + \ singing virtuoso who is classically trained as tenor and countertenor and presents\ + \ a wide array of diverse and extended techniques. He has been artist in residence\ + \ at STEIM, Amsterdam, since 2010.},\n address = {Oslo, Norway},\n author = {Alex\ + \ Nowitz},\n booktitle = {Music Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n editor = {Kjell Tore Innervik and\ + \ Ivar Frounberg},\n month = {June},\n publisher = {Norwegian Academy of Music},\n\ + \ title = {The Shells},\n url = {https://vimeo.com/26661484},\n year = {2011}\n\ + }\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, - Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + editor: Kjell Tore Innervik and Ivar Frounberg month: June - publisher: University of Technology Sydney - title: Stanford Mobile Phone Orchestra (MoPhO) - year: 2010 + publisher: Norwegian Academy of Music + title: The Shells + url: https://vimeo.com/26661484 + year: 2011 - ENTRYTYPE: inproceedings - ID: nime2010-music-Essl2010 - address: 'Sydney, Australia' - author: Georg Essl - bibtex: "@inproceedings{nime2010-music-Essl2010,\n address = {Sydney, Australia},\n\ - \ author = {Georg Essl},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Johnston,\ - \ Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer,\ - \ Kirsty Beilharz, Roger Mills},\n month = {June},\n publisher = {University of\ - \ Technology Sydney},\n title = {Mobile Phone Orchestras presents...},\n year\ - \ = {2010}\n}\n" + ID: nime2011-music-Guillamat2011 + abstract: "Program notes:\nAn open playground for laptop improvisation and performance.\ + \ BiLE's performance will focus on semi-structured improvisation, with players\ + \ creating and manipulating sound using a variety of motion capture devices -\ + \ iPhones, Wiimotes, and Xbox Kinect. The data captured by each device, along\ + \ with analysed musical parameters, will be sent out over the shared network,\ + \ to be used by each performer as they see fit. The aim is to allow players to\ + \ latch onto other members of the group by mapping the shared data to their own\ + \ software parameters, creating moments of convergence between the ensemble. BiLE\ + \ takes an `instrumental' approach to performance, with each performer having\ + \ their own speaker, sonic identity and spatial location.\n\nAbout the performers:\n\ + \nBiLE (Birmingham Laptop Ensemble): A collaborative group of six composers, brought\ + \ together through their shared interest in live performance and improvisation.\ + \ BiLE has an open and inclusive attitude towards experimentation with sound,\ + \ and draws on the members' wide-ranging musical backgrounds." + address: 'Oslo, Norway' + author: Julien Guillamat and Charles Céleste Hutchins and Shelly Knotts and Norah + Lorway and Jorge Garcia Moncada and Chris Tarren + bibtex: "@inproceedings{nime2011-music-Guillamat2011,\n abstract = {Program notes:\n\ + An open playground for laptop improvisation and performance. BiLE's performance\ + \ will focus on semi-structured improvisation, with players creating and manipulating\ + \ sound using a variety of motion capture devices - iPhones, Wiimotes, and Xbox\ + \ Kinect. The data captured by each device, along with analysed musical parameters,\ + \ will be sent out over the shared network, to be used by each performer as they\ + \ see fit. The aim is to allow players to latch onto other members of the group\ + \ by mapping the shared data to their own software parameters, creating moments\ + \ of convergence between the ensemble. BiLE takes an `instrumental' approach to\ + \ performance, with each performer having their own speaker, sonic identity and\ + \ spatial location.\n\nAbout the performers:\n\nBiLE (Birmingham Laptop Ensemble):\ + \ A collaborative group of six composers, brought together through their shared\ + \ interest in live performance and improvisation. BiLE has an open and inclusive\ + \ attitude towards experimentation with sound, and draws on the members' wide-ranging\ + \ musical backgrounds.},\n address = {Oslo, Norway},\n author = {Julien Guillamat\ + \ and Charles Céleste Hutchins and Shelly Knotts and Norah Lorway and Jorge Garcia\ + \ Moncada and Chris Tarren},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Kjell Tore\ + \ Innervik and Ivar Frounberg},\n month = {June},\n publisher = {Norwegian Academy\ + \ of Music},\n title = {BiLE (Birmingham Laptop Ensemble)},\n url = {https://vimeo.com/26619928},\n\ + \ year = {2011}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, - Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + editor: Kjell Tore Innervik and Ivar Frounberg month: June - publisher: University of Technology Sydney - title: Mobile Phone Orchestras presents... - year: 2010 + publisher: Norwegian Academy of Music + title: BiLE (Birmingham Laptop Ensemble) + url: https://vimeo.com/26619928 + year: 2011 - ENTRYTYPE: inproceedings - ID: nime2010-music-Haines2010 - address: 'Sydney, Australia' - author: Christian Haines - bibtex: "@inproceedings{nime2010-music-Haines2010,\n address = {Sydney, Australia},\n\ - \ author = {Christian Haines},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Johnston,\ - \ Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer,\ - \ Kirsty Beilharz, Roger Mills},\n month = {June},\n publisher = {University of\ - \ Technology Sydney},\n title = {SOMETHING TO GO HEAR #4},\n year = {2010}\n}\n" + ID: nime2011-music-Quay2011 + abstract: "Program notes:\nAs artists, we have learned that throughout the history\ + \ of mankind music and technology have co-evolved, shaping --- and being shaped\ + \ by --- human expression and creativity. The variety and intricacy of these recombination\ + \ processes contribute profoundly to the current diversity of performative structures\ + \ and aesthetics within the arts. Where art Thou? is a 15 minute theatrical performance\ + \ where sounds are controlled by sensors on the dancer's body. Blending a mixture\ + \ of electronic music and sound effects with dance and acting, this novel act\ + \ refocuses sensors from simplistic action-to-sound to contextualized aesthetic\ + \ and dramatic expression. The name reflects the itinerant quality of the stage\ + \ character as he travels through a world of sounds.\n\nAbout the performers:\n\ + \nYago de Quay: Interactive media artist, musician and researcher based in Porto.\ + \ His numerous installations and performances focus on user participation contributing\ + \ to modify the art piece itself. They always have a strong sonic component and\ + \ combine technologies to help create new modes of expression. Yago is currently\ + \ finishing his M.Sc. in Sound Design and Interactive Music at the Faculty of\ + \ Engineering, University of Porto.\n\nStåle Skogstad: PhD student in the fourMs\ + \ group at the University of Oslo. His research is focused on using real-time\ + \ full-body motion capture technology for musical interaction. This includes real-time\ + \ feature extraction from full body motion capture data and technical studies\ + \ of motion capture technologies. He is currently working with the Xsens MVN inertial\ + \ sensor suit." + address: 'Oslo, Norway' + author: Yago de Quay and Ståle Skogstad + bibtex: "@inproceedings{nime2011-music-Quay2011,\n abstract = {Program notes:\n\ + As artists, we have learned that throughout the history of mankind music and technology\ + \ have co-evolved, shaping --- and being shaped by --- human expression and creativity.\ + \ The variety and intricacy of these recombination processes contribute profoundly\ + \ to the current diversity of performative structures and aesthetics within the\ + \ arts. Where art Thou? is a 15 minute theatrical performance where sounds are\ + \ controlled by sensors on the dancer's body. Blending a mixture of electronic\ + \ music and sound effects with dance and acting, this novel act refocuses sensors\ + \ from simplistic action-to-sound to contextualized aesthetic and dramatic expression.\ + \ The name reflects the itinerant quality of the stage character as he travels\ + \ through a world of sounds.\n\nAbout the performers:\n\nYago de Quay: Interactive\ + \ media artist, musician and researcher based in Porto. His numerous installations\ + \ and performances focus on user participation contributing to modify the art\ + \ piece itself. They always have a strong sonic component and combine technologies\ + \ to help create new modes of expression. Yago is currently finishing his M.Sc.\ + \ in Sound Design and Interactive Music at the Faculty of Engineering, University\ + \ of Porto.\n\nStåle Skogstad: PhD student in the fourMs group at the University\ + \ of Oslo. His research is focused on using real-time full-body motion capture\ + \ technology for musical interaction. This includes real-time feature extraction\ + \ from full body motion capture data and technical studies of motion capture technologies.\ + \ He is currently working with the Xsens MVN inertial sensor suit.},\n address\ + \ = {Oslo, Norway},\n author = {Yago de Quay and Ståle Skogstad},\n booktitle\ + \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n editor = {Kjell Tore Innervik and Ivar Frounberg},\n month =\ + \ {June},\n publisher = {Norwegian Academy of Music},\n title = {Where Art Thou?:\ + \ Dance Jockey},\n url = {https://vimeo.com/26619980},\n year = {2011}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, - Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + editor: Kjell Tore Innervik and Ivar Frounberg month: June - publisher: University of Technology Sydney - title: "SOMETHING TO GO HEAR #4" - year: 2010 + publisher: Norwegian Academy of Music + title: 'Where Art Thou?: Dance Jockey' + url: https://vimeo.com/26619980 + year: 2011 - ENTRYTYPE: inproceedings - ID: nime2010-music-Schiemer2010 - address: 'Sydney, Australia' - author: Greg Schiemer - bibtex: "@inproceedings{nime2010-music-Schiemer2010,\n address = {Sydney, Australia},\n\ - \ author = {Greg Schiemer},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Johnston,\ - \ Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer,\ - \ Kirsty Beilharz, Roger Mills},\n month = {June},\n publisher = {University of\ - \ Technology Sydney},\n title = {Mandala 9},\n year = {2010}\n}\n" + ID: nime2011-music-Sciajno2011 + abstract: "Program notes:\nIn this AV performance, images and sound interact: the\ + \ basic elements of the images (brightness, color, saturation, hue, dislocation\ + \ and relocation) are sensitive to the fundamental parameters of the sound being\ + \ generated at that moment. Sound waves (also controlled by light waves during\ + \ the performance) cross the physical world and alter the data stream that gives\ + \ life to digital video in the same way that molecules are transformed by the\ + \ sound contracting and expanding air particles in space.\n\nAbout the performers:\n\ + \nDomenico Sciajno: Double bass player and composer of acoustic and electronic\ + \ music. Thanks to his interest in improvisation and the influence of academic\ + \ education, his research currently focuses on the creative possibilities provided\ + \ by the interaction between acoustic instruments, indeterminacy factors and live\ + \ processing by electronic devices or computers." + address: 'Oslo, Norway' + author: Domenico Sciajno + bibtex: "@inproceedings{nime2011-music-Sciajno2011,\n abstract = {Program notes:\n\ + In this AV performance, images and sound interact: the basic elements of the images\ + \ (brightness, color, saturation, hue, dislocation and relocation) are sensitive\ + \ to the fundamental parameters of the sound being generated at that moment. Sound\ + \ waves (also controlled by light waves during the performance) cross the physical\ + \ world and alter the data stream that gives life to digital video in the same\ + \ way that molecules are transformed by the sound contracting and expanding air\ + \ particles in space.\n\nAbout the performers:\n\nDomenico Sciajno: Double bass\ + \ player and composer of acoustic and electronic music. Thanks to his interest\ + \ in improvisation and the influence of academic education, his research currently\ + \ focuses on the creative possibilities provided by the interaction between acoustic\ + \ instruments, indeterminacy factors and live processing by electronic devices\ + \ or computers.},\n address = {Oslo, Norway},\n author = {Domenico Sciajno},\n\ + \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n editor = {Kjell Tore Innervik and Ivar Frounberg},\n\ + \ month = {June},\n publisher = {Norwegian Academy of Music},\n title = {Sonolume},\n\ + \ url = {https://vimeo.com/26679879},\n year = {2011}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, - Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + editor: Kjell Tore Innervik and Ivar Frounberg month: June - publisher: University of Technology Sydney - title: Mandala 9 - year: 2010 + publisher: Norwegian Academy of Music + title: Sonolume + url: https://vimeo.com/26679879 + year: 2011 - ENTRYTYPE: inproceedings - ID: nime2008-music-Girolin2008 - abstract: "Program notes:\nLo specchio confuso dall'ombra can be translated as “The\ - \ mirror confused by its shadow” and it is between a distributed installation\ - \ and a concert, in which opposing groups of performers in two remote places play\ - \ solo or interact.\nThe audience (two people at a time, one for each installation)\ - \ activates video and sound transformations, depending on the space they occupy\ - \ and their gesture. The two installation are in the Foyer and in the Auditorium,\ - \ respectively, so the two persons from the audience cannot see and talk each\ - \ other. Multimodal data and expressive gesture cues are extracted in real- time\ - \ by an EyesWeb patch, interacting and playing with the electronic performer.\ - \ The interaction occurs both between the electronic performer and the two places\ - \ where the audience has access, and between the two remote installations. There\ - \ are two different levels of intervention in the audio and video transformation:\ - \ autonomous, depending on the single person and conditioned, depending on the\ - \ behaviour and the actions occurring in the other, separate installation.\nFurther,\ - \ the entrance of the concert hall has microphones, which capture words, sentences,\ - \ coughs, laughs or other noise, which are transformed in real-time and thus entering\ - \ into the piece.\nLo specchio confuso dall'ombra can't bind the audience remain\ - \ seated or follow a specific pattern in his behaviour. His duration is indefinite:\ - \ it changes every time it is performed.\n\nAbout the performers:\nRoberto Girolin\ - \ (1975) was born in Pordenone, Italy, and after studying of the classical guitar\ - \ he began to study the piano and composition at the \"J. Tomadini\" Conservatory\ - \ in Udine. He studied the vocal and instrumental counterpoint, graduating in\ - \ choral music and conducting in the same Conservatory. He has conducted many\ - \ choirs and orchestras, exploring different kinds of repertories from Gregorian\ - \ music to contemporary music.\nHe has deepened the study of contemporary music\ - \ at the University of Udine with Dr.A.Orcalli and then with Dr.N.Venzina at \"\ - B.Maderna\" Archive in Bologna (Italy). He has followed several Masterclasses\ - \ and seminars: choral music, chamber music, composition (Salvatore Sciarrino,\ - \ Fabio Nieder, Mauro Bonifacio), electronic music (Lelio Camilleri, Agostino\ - \ Di Scipio), a Sound Design course with Trevor Wishart, an Audio Digital Signal\ - \ Processing for Musical Applications (Lab workshop, lessons and applications)\ - \ with Giuseppe Di Giugno and live electronics in Luigi Nono's works with Alvise\ - \ Vidolin and André Richard (Experimental Studio Freiburg für Akustische Kunst).\n\ - He graduated with full marks in Electronic Music and Multimedia at the Musical\ - \ Academy of Pescara (Italy) and in 2006 he also got his degree at the Conservatory\ - \ of Venice under the direction of Alvise Vidolin with full marks (cum Laude).\n\ - He is actively involved in performing and investigating the compositional and\ - \ performance potential offered by electronic&multimedia music systems. His music\ - \ is performed in Italy and abroad. He has recently won the “Call 2007”, (Italian\ - \ CEMAT Competition) and a Mention at the 34th \"Concours Internationaux de Musique\ - \ et d'Art Sonore Electroacoustiques de Bourges\", France.\n\nPaolo Coletta, Simone\ - \ Ghisio and Gualtiero Volpe - EyesWeb interactive systems design" - address: 'Genova, Italy' - author: Roberto Girolin - bibtex: "@inproceedings{nime2008-music-Girolin2008,\n abstract = {Program notes:\n\ - Lo specchio confuso dall'ombra can be translated as “The mirror confused by its\ - \ shadow” and it is between a distributed installation and a concert, in which\ - \ opposing groups of performers in two remote places play solo or interact.\n\ - The audience (two people at a time, one for each installation) activates video\ - \ and sound transformations, depending on the space they occupy and their gesture.\ - \ The two installation are in the Foyer and in the Auditorium, respectively, so\ - \ the two persons from the audience cannot see and talk each other. Multimodal\ - \ data and expressive gesture cues are extracted in real- time by an EyesWeb patch,\ - \ interacting and playing with the electronic performer. The interaction occurs\ - \ both between the electronic performer and the two places where the audience\ - \ has access, and between the two remote installations. There are two different\ - \ levels of intervention in the audio and video transformation: autonomous, depending\ - \ on the single person and conditioned, depending on the behaviour and the actions\ - \ occurring in the other, separate installation.\nFurther, the entrance of the\ - \ concert hall has microphones, which capture words, sentences, coughs, laughs\ - \ or other noise, which are transformed in real-time and thus entering into the\ - \ piece.\nLo specchio confuso dall'ombra can't bind the audience remain seated\ - \ or follow a specific pattern in his behaviour. His duration is indefinite: it\ - \ changes every time it is performed.\n\nAbout the performers:\nRoberto Girolin\ - \ (1975) was born in Pordenone, Italy, and after studying of the classical guitar\ - \ he began to study the piano and composition at the \"J. Tomadini\" Conservatory\ - \ in Udine. He studied the vocal and instrumental counterpoint, graduating in\ - \ choral music and conducting in the same Conservatory. He has conducted many\ - \ choirs and orchestras, exploring different kinds of repertories from Gregorian\ - \ music to contemporary music.\nHe has deepened the study of contemporary music\ - \ at the University of Udine with Dr.A.Orcalli and then with Dr.N.Venzina at \"\ - B.Maderna\" Archive in Bologna (Italy). He has followed several Masterclasses\ - \ and seminars: choral music, chamber music, composition (Salvatore Sciarrino,\ - \ Fabio Nieder, Mauro Bonifacio), electronic music (Lelio Camilleri, Agostino\ - \ Di Scipio), a Sound Design course with Trevor Wishart, an Audio Digital Signal\ - \ Processing for Musical Applications (Lab workshop, lessons and applications)\ - \ with Giuseppe Di Giugno and live electronics in Luigi Nono's works with Alvise\ - \ Vidolin and André Richard (Experimental Studio Freiburg für Akustische Kunst).\n\ - He graduated with full marks in Electronic Music and Multimedia at the Musical\ - \ Academy of Pescara (Italy) and in 2006 he also got his degree at the Conservatory\ - \ of Venice under the direction of Alvise Vidolin with full marks (cum Laude).\n\ - He is actively involved in performing and investigating the compositional and\ - \ performance potential offered by electronic&multimedia music systems. His music\ - \ is performed in Italy and abroad. He has recently won the “Call 2007”, (Italian\ - \ CEMAT Competition) and a Mention at the 34th \"Concours Internationaux de Musique\ - \ et d'Art Sonore Electroacoustiques de Bourges\", France.\n\nPaolo Coletta, Simone\ - \ Ghisio and Gualtiero Volpe - EyesWeb interactive systems design},\n address\ - \ = {Genova, Italy},\n author = {Roberto Girolin},\n booktitle = {Music Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Roberto Doati},\n month = {June},\n publisher = {Casa Paganini},\n\ - \ title = {Lo specchio confuso dall'ombra},\n year = {2008}\n}\n" + ID: nime2011-music-Aase2011 + abstract: "Program notes:\n\nTrondheim Voices is in this performance exploring a\ + \ new tool in their work with voice sound and improvisation. The ensemble is working\ + \ with a tracking system for sound positioning to enable a given singer's position\ + \ on stage to directly influence the sound processing, both spatialisation and\ + \ effects. Through their improvisations and compositions they are exploring: a)\ + \ The effect of the sound “following”' the singers' movements on stage. b) The\ + \ flexible use of processed voice sound within the big vocal ensemble, through\ + \ the control each singer gets over the sound output by moving on stage. c) The\ + \ visualization of choices and changes regarding sound, both for the performer\ + \ and the audience, through the movements of each singer on stage.\n\nAbout the\ + \ performers:\n\nTrondheim Voices Professional ensemble, working with the endless\ + \ possibilities within the field of vocal improvisation, to find new expressions\ + \ and new music. Consisting of individual soloists, Trondheim Voices wishes to\ + \ develop what happens when the unique soloist quality of each singer is set to\ + \ interact with each other, and to find the collective sound and feeling. All\ + \ of the singers are educated at NTNU, Trondheim, Norway.\n\nSound: Asle Karstad.\ + \ Tracking system: John Torger Skjelstad" + address: 'Oslo, Norway' + author: Tone Åse and Siri Gjære and Live Maria Roggen and Heidi Skjerve and Ingrid + Lode and Kirsti Huke and Anita Kaasbøll and Silje R. Karlsen + bibtex: "@inproceedings{nime2011-music-Aase2011,\n abstract = {Program notes:\n\n\ + Trondheim Voices is in this performance exploring a new tool in their work with\ + \ voice sound and improvisation. The ensemble is working with a tracking system\ + \ for sound positioning to enable a given singer's position on stage to directly\ + \ influence the sound processing, both spatialisation and effects. Through their\ + \ improvisations and compositions they are exploring: a) The effect of the sound\ + \ “following”' the singers' movements on stage. b) The flexible use of processed\ + \ voice sound within the big vocal ensemble, through the control each singer gets\ + \ over the sound output by moving on stage. c) The visualization of choices and\ + \ changes regarding sound, both for the performer and the audience, through the\ + \ movements of each singer on stage.\n\nAbout the performers:\n\nTrondheim Voices\ + \ Professional ensemble, working with the endless possibilities within the field\ + \ of vocal improvisation, to find new expressions and new music. Consisting of\ + \ individual soloists, Trondheim Voices wishes to develop what happens when the\ + \ unique soloist quality of each singer is set to interact with each other, and\ + \ to find the collective sound and feeling. All of the singers are educated at\ + \ NTNU, Trondheim, Norway.\n\nSound: Asle Karstad. Tracking system: John Torger\ + \ Skjelstad},\n address = {Oslo, Norway},\n author = {Tone Åse and Siri Gjære\ + \ and Live Maria Roggen and Heidi Skjerve and Ingrid Lode and Kirsti Huke and\ + \ Anita Kaasbøll and Silje R. Karlsen},\n booktitle = {Music Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n editor\ + \ = {Kjell Tore Innervik and Ivar Frounberg},\n month = {June},\n publisher =\ + \ {Norwegian Academy of Music},\n title = {Trondheim Voices},\n url = {https://vimeo.com/26680007},\n\ + \ year = {2011}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Roberto Doati + editor: Kjell Tore Innervik and Ivar Frounberg month: June - publisher: Casa Paganini - title: Lo specchio confuso dall'ombra - year: 2008 + publisher: Norwegian Academy of Music + title: Trondheim Voices + url: https://vimeo.com/26680007 + year: 2011 - ENTRYTYPE: inproceedings - ID: nime2008-music-Ferrari2008 - abstract: "Program notes:\nBased on the installation \"Mappe per Affetti Erranti\"\ - , designed and developed by Antonio Camurri, Corrado Canepa, Nicola Ferrari, Gualtiero\ - \ Volpe texts from Edmund Spenser's The Faire Queen and William Shakespeare's\ - \ King Lear with support of EU ICT Project SAME.\nThe bow is a theatrical mise-en-scene\ - \ of the installation Mappe per Affetti Erranti. During the Science Festival 2007,\ - \ as a preparatory work for the EU ICT Project SAME on active listening (www.sameproject.org),\ - \ the audience was invited to explore and experience a song by John Dowland (see\ - \ the paper on these proceedings by Camurri et al). The audience could walk inside\ - \ the polyphonic texture, listen to the singles parts, change the expressive quality\ - \ of musical interpretation by their movement on the stage of Casa Paganini analysed\ - \ with EyesWeb XMI. Aesthetically, the most interesting result consists in the\ - \ game of hiding and revealing a known piece. The idea could be matched with the\ - \ classical theatrical topos of recognition. So, the musical potentiality of the\ - \ 'interactive performance' of a prerecorded music becomes a new dramaturgical\ - \ structure.\nRoberto Tiranti and his madrigalistic group recorded, under the\ - \ supervision of Marco Canepa, different anamorphic interpretations of a bachian\ - \ choral. Thanks to the interactive application developed with EyesWeb XMI, the\ - \ group of dancers conducted by the choreographer Giovanni Di Cicco, mix and mould\ - \ the recorded music material in real time. At the same time, the live sound of\ - \ the vocal group explores the whole space of Casa Paganini, as a global (both\ - \ real and imaginary) musical instrument. In a metamorphic game where, according\ - \ to Corrado Canepa's compositive lesson, electronic and acoustic technologies\ - \ merge and interchange their specificity, this interactive score of losing and\ - \ finding, multiplying and distillating the ancient bachian palimpsest tries to\ - \ tell the dramatic history of King Lear, the most tragic western figure of difficulty\ - \ to reach the affects you possess without being able to know or express.\n\n\ - About the performers:\nNicola Ferrari was born in 1973. He studied composition\ - \ with Adriano Guarnieri and took his degree at 'G. B. Martini' Conservatory in\ - \ Bologna. He took his Master Degree and PhD from the Faculty of Arts and Philosophy\ - \ at University of Genoa. Since 2005 he is a member of the staff of the InfoMus\ - \ Lab. For many years he directed the 'S.Anna' polyphonic choir. He wrote scores\ - \ for theatrical performances.\n\nVocalists - Roberto Tiranti (tenor and vocal\ - \ conductor), Valeria Bruzzone (alto),\nChiara Longobardi (soprano), Edoardo Valle\ - \ (bass)\nDancers - Giovanni Di Cicco (choreography), Luca Alberti, Filippo Bandiera,\ - \ Nicola Marrapodi\nRecording engineer and music consultant - Marco Canepa\nSound\ - \ Engineers - Corrado Canepa (director), Chiara Erra (assistant)\nEyesWeb interactive\ - \ systems design - Paolo Coletta, Barbara Mazzarino, Gualtiero Volpe" - address: 'Genova, Italy' - author: Nicola Ferrari - bibtex: "@inproceedings{nime2008-music-Ferrari2008,\n abstract = {Program notes:\n\ - Based on the installation \"Mappe per Affetti Erranti\", designed and developed\ - \ by Antonio Camurri, Corrado Canepa, Nicola Ferrari, Gualtiero Volpe texts from\ - \ Edmund Spenser's The Faire Queen and William Shakespeare's King Lear with support\ - \ of EU ICT Project SAME.\nThe bow is a theatrical mise-en-scene of the installation\ - \ Mappe per Affetti Erranti. During the Science Festival 2007, as a preparatory\ - \ work for the EU ICT Project SAME on active listening (www.sameproject.org),\ - \ the audience was invited to explore and experience a song by John Dowland (see\ - \ the paper on these proceedings by Camurri et al). The audience could walk inside\ - \ the polyphonic texture, listen to the singles parts, change the expressive quality\ - \ of musical interpretation by their movement on the stage of Casa Paganini analysed\ - \ with EyesWeb XMI. Aesthetically, the most interesting result consists in the\ - \ game of hiding and revealing a known piece. The idea could be matched with the\ - \ classical theatrical topos of recognition. So, the musical potentiality of the\ - \ 'interactive performance' of a prerecorded music becomes a new dramaturgical\ - \ structure.\nRoberto Tiranti and his madrigalistic group recorded, under the\ - \ supervision of Marco Canepa, different anamorphic interpretations of a bachian\ - \ choral. Thanks to the interactive application developed with EyesWeb XMI, the\ - \ group of dancers conducted by the choreographer Giovanni Di Cicco, mix and mould\ - \ the recorded music material in real time. At the same time, the live sound of\ - \ the vocal group explores the whole space of Casa Paganini, as a global (both\ - \ real and imaginary) musical instrument. In a metamorphic game where, according\ - \ to Corrado Canepa's compositive lesson, electronic and acoustic technologies\ - \ merge and interchange their specificity, this interactive score of losing and\ - \ finding, multiplying and distillating the ancient bachian palimpsest tries to\ - \ tell the dramatic history of King Lear, the most tragic western figure of difficulty\ - \ to reach the affects you possess without being able to know or express.\n\n\ - About the performers:\nNicola Ferrari was born in 1973. He studied composition\ - \ with Adriano Guarnieri and took his degree at 'G. B. Martini' Conservatory in\ - \ Bologna. He took his Master Degree and PhD from the Faculty of Arts and Philosophy\ - \ at University of Genoa. Since 2005 he is a member of the staff of the InfoMus\ - \ Lab. For many years he directed the 'S.Anna' polyphonic choir. He wrote scores\ - \ for theatrical performances.\n\nVocalists - Roberto Tiranti (tenor and vocal\ - \ conductor), Valeria Bruzzone (alto),\nChiara Longobardi (soprano), Edoardo Valle\ - \ (bass)\nDancers - Giovanni Di Cicco (choreography), Luca Alberti, Filippo Bandiera,\ - \ Nicola Marrapodi\nRecording engineer and music consultant - Marco Canepa\nSound\ - \ Engineers - Corrado Canepa (director), Chiara Erra (assistant)\nEyesWeb interactive\ - \ systems design - Paolo Coletta, Barbara Mazzarino, Gualtiero Volpe},\n address\ - \ = {Genova, Italy},\n author = {Nicola Ferrari},\n booktitle = {Music Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Roberto Doati},\n month = {June},\n publisher = {Casa Paganini},\n\ - \ title = {The Bow is Bent and Drawn},\n year = {2008}\n}\n" + ID: nime2011-music-Hsu2011 + abstract: "Program notes:\n\nInterstices AP is a structured audio-visual solo improvisation,\ + \ using the multitouch Airplane Controller to manipulate live electronic sound\ + \ and interactive animations. During the piece, Bill Hsu will be using the Airplane\ + \ Controller in combination with his PSHIVA particle system software, to synthesize\ + \ and interact with generative sound and animations. The visual component of Interstices\ + \ AP is a physics-based simulation of a particle system. Particles, images and\ + \ other components interact with physical gestures in a fluid like system; the\ + \ results resemble asymmetric, constantly evolving Rorschach blots that open up\ + \ a wide range of visual associations. For more details, see Bill Hsu's poster\ + \ in the conference proceedings.\n\nAbout the performers:\n\nBill Hsu: Associate\ + \ Professor of Computer Science at San Francisco State University. His work with\ + \ real-time audiovisual performance systems has been presented at (among others)\ + \ SMC 2009 (Porto), Harvestworks Festival 2009 (New York), Fete Quaqua 2008 (London),\ + \ MIX Festival 2007 and 2009 (New York), and Stimme+ 2006 (Karlsruhe).\n\nAlain\ + \ Crevoisier: Senior researcher at the Music Conservatory of Geneva, Switzerland.\ + \ He is the founder of Future-instruments.net, a collaborative research network\ + \ active in the field of new musical interfaces and interactive technologies.\ + \ The latest realization is the Airplane controller, a portable system that makes\ + \ possible to transform any flat surface, into a multitouch interface." + address: 'Oslo, Norway' + author: Bill Hsu and Alain Crevoisier + bibtex: "@inproceedings{nime2011-music-Hsu2011,\n abstract = {Program notes:\n\n\ + Interstices AP is a structured audio-visual solo improvisation, using the multitouch\ + \ Airplane Controller to manipulate live electronic sound and interactive animations.\ + \ During the piece, Bill Hsu will be using the Airplane Controller in combination\ + \ with his PSHIVA particle system software, to synthesize and interact with generative\ + \ sound and animations. The visual component of Interstices AP is a physics-based\ + \ simulation of a particle system. Particles, images and other components interact\ + \ with physical gestures in a fluid like system; the results resemble asymmetric,\ + \ constantly evolving Rorschach blots that open up a wide range of visual associations.\ + \ For more details, see Bill Hsu's poster in the conference proceedings.\n\nAbout\ + \ the performers:\n\nBill Hsu: Associate Professor of Computer Science at San\ + \ Francisco State University. His work with real-time audiovisual performance\ + \ systems has been presented at (among others) SMC 2009 (Porto), Harvestworks\ + \ Festival 2009 (New York), Fete Quaqua 2008 (London), MIX Festival 2007 and 2009\ + \ (New York), and Stimme+ 2006 (Karlsruhe).\n\nAlain Crevoisier: Senior researcher\ + \ at the Music Conservatory of Geneva, Switzerland. He is the founder of Future-instruments.net,\ + \ a collaborative research network active in the field of new musical interfaces\ + \ and interactive technologies. The latest realization is the Airplane controller,\ + \ a portable system that makes possible to transform any flat surface, into a\ + \ multitouch interface.},\n address = {Oslo, Norway},\n author = {Bill Hsu and\ + \ Alain Crevoisier},\n booktitle = {Music Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n editor = {Kjell Tore Innervik and\ + \ Ivar Frounberg},\n month = {June},\n publisher = {Norwegian Academy of Music},\n\ + \ title = {Interstices AP},\n url = {https://vimeo.com/26629820},\n year = {2011}\n\ + }\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Roberto Doati + editor: Kjell Tore Innervik and Ivar Frounberg month: June - publisher: Casa Paganini - title: The Bow is Bent and Drawn - year: 2008 + publisher: Norwegian Academy of Music + title: Interstices AP + url: https://vimeo.com/26629820 + year: 2011 - ENTRYTYPE: inproceedings - ID: nime2008-music-Klauer2008 - abstract: "Program notes:\nPutting a distance sensor under the scroll of the instrument\ - \ and an inclination sensor on the wrist, the detection of the displacements of\ - \ the limbs of the interpreter becomes possible. These displacements, drawn onto\ - \ a cartesian plane, give the coordinates of a track in an ideal performing space,\ - \ whose third dimension is increased and formed by the passing of time. Actually,\ - \ the computer permits to assimilate to the aforesaid track the sounding path\ - \ proposed by the interpreter, hence to rehear it. Also in the latter case, the\ - \ coordinates to access it are given by current gestures, therefore the dimension\ - \ of time results bundled, somehow like considering a parchment palimpsest: the\ - \ sounding form returned by the computer results increasingly dense and inexplicable\ - \ and needs an electroacoustic exegesis that unleash it at least in shreds.\n\ - The procedures of musical production are here a metaphor for knowledge; alike\ - \ are the compositional methods at the root of the score, which providing the\ - \ prescriptions of the musical path, portrays in addition a mental track.\n\n\ - About the performer:\nGiorgio Klauer studied electronic music, instrumental composition,\ - \ flute and musicology in Trieste, where he was born in 1976, in Cremona and in\ - \ Liège. He is professor at the Conservatory of Como, school of music and sound\ - \ technologies." - address: 'Genova, Italy' - author: Giorgio Klauer - bibtex: "@inproceedings{nime2008-music-Klauer2008,\n abstract = {Program notes:\n\ - Putting a distance sensor under the scroll of the instrument and an inclination\ - \ sensor on the wrist, the detection of the displacements of the limbs of the\ - \ interpreter becomes possible. These displacements, drawn onto a cartesian plane,\ - \ give the coordinates of a track in an ideal performing space, whose third dimension\ - \ is increased and formed by the passing of time. Actually, the computer permits\ - \ to assimilate to the aforesaid track the sounding path proposed by the interpreter,\ - \ hence to rehear it. Also in the latter case, the coordinates to access it are\ - \ given by current gestures, therefore the dimension of time results bundled,\ - \ somehow like considering a parchment palimpsest: the sounding form returned\ - \ by the computer results increasingly dense and inexplicable and needs an electroacoustic\ - \ exegesis that unleash it at least in shreds.\nThe procedures of musical production\ - \ are here a metaphor for knowledge; alike are the compositional methods at the\ - \ root of the score, which providing the prescriptions of the musical path, portrays\ - \ in addition a mental track.\n\nAbout the performer:\nGiorgio Klauer studied\ - \ electronic music, instrumental composition, flute and musicology in Trieste,\ - \ where he was born in 1976, in Cremona and in Liège. He is professor at the Conservatory\ - \ of Como, school of music and sound technologies.},\n address = {Genova, Italy},\n\ - \ author = {Giorgio Klauer},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Roberto Doati},\n\ - \ month = {June},\n publisher = {Casa Paganini},\n title = {Tre Aspetti del Tempo\ - \ per Iperviolino e Computer},\n year = {2008}\n}\n" + ID: nime2011-music-Hsu2011a + abstract: "Performer notes:\n\nFlayed/Flock is a structured audio-visual improvisation\ + \ for three musicians, utilizing live acoustic and electronic sound and interactive\ + \ animations. The visual component of Flayed/Flock is an enhanced flocking simulation\ + \ that interacts with real-time audio from the performance of improvising musicians.\ + \ Abstract patterns develop out of the flocking behavior; the flocks are also\ + \ able to coalesce into well-defined symbols and forms such as crescents and stars,\ + \ all while moving in a natural-looking manner consistent with flocking behavior.\ + \ For more details, see Bill Hsu's poster in the conference proceedings.\n\nAbout\ + \ the performers:\n\nBill Hsu: Associate Professor of Computer Science at San\ + \ Francisco State University. His work with real-time audiovisual performance\ + \ systems has been presented at (among others) SMC 2009 (Porto), Harvestworks\ + \ Festival 2009 (New York), Fete Quaqua 2008 (London), MIX Festival 2007 and 2009\ + \ (New York), and Stimme+ 2006 (Karlsruhe).\n\nHåvard Skaset (guitar) and Guro\ + \ Skumsnes Moe (bass): The Oslo-based duo works intensively in the borders between\ + \ improv, noise and rock. Skaset and Moe play in bands including Bluefaced People,\ + \ Art Directors, Sult, Mirror Trio, SEKSTETT, Telling Stories About Trees and\ + \ MOE. They have been working with Christian Wolff, Pauline Oliveros, Fred Frith,\ + \ Ikue Mori, Okkyung Lee, Frode Gjerstad and many more." + address: 'Oslo, Norway' + author: Bill Hsu and Håvard Skaset and Guro Skumsnes Moe + bibtex: "@inproceedings{nime2011-music-Hsu2011a,\n abstract = {Performer notes:\n\ + \nFlayed/Flock is a structured audio-visual improvisation for three musicians,\ + \ utilizing live acoustic and electronic sound and interactive animations. The\ + \ visual component of Flayed/Flock is an enhanced flocking simulation that interacts\ + \ with real-time audio from the performance of improvising musicians. Abstract\ + \ patterns develop out of the flocking behavior; the flocks are also able to coalesce\ + \ into well-defined symbols and forms such as crescents and stars, all while moving\ + \ in a natural-looking manner consistent with flocking behavior. For more details,\ + \ see Bill Hsu's poster in the conference proceedings.\n\nAbout the performers:\n\ + \nBill Hsu: Associate Professor of Computer Science at San Francisco State University.\ + \ His work with real-time audiovisual performance systems has been presented at\ + \ (among others) SMC 2009 (Porto), Harvestworks Festival 2009 (New York), Fete\ + \ Quaqua 2008 (London), MIX Festival 2007 and 2009 (New York), and Stimme+ 2006\ + \ (Karlsruhe).\n\nHåvard Skaset (guitar) and Guro Skumsnes Moe (bass): The Oslo-based\ + \ duo works intensively in the borders between improv, noise and rock. Skaset\ + \ and Moe play in bands including Bluefaced People, Art Directors, Sult, Mirror\ + \ Trio, SEKSTETT, Telling Stories About Trees and MOE. They have been working\ + \ with Christian Wolff, Pauline Oliveros, Fred Frith, Ikue Mori, Okkyung Lee,\ + \ Frode Gjerstad and many more.},\n address = {Oslo, Norway},\n author = {Bill\ + \ Hsu and Håvard Skaset and Guro Skumsnes Moe},\n booktitle = {Music Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Kjell Tore Innervik and Ivar Frounberg},\n month = {June},\n publisher\ + \ = {Norwegian Academy of Music},\n title = {Flayed/Flock},\n url = {https://vimeo.com/26629835},\n\ + \ year = {2011}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Roberto Doati + editor: Kjell Tore Innervik and Ivar Frounberg month: June - publisher: Casa Paganini - title: Tre Aspetti del Tempo per Iperviolino e Computer - year: 2008 + publisher: Norwegian Academy of Music + title: Flayed/Flock + url: https://vimeo.com/26629835 + year: 2011 - ENTRYTYPE: inproceedings - ID: nime2008-music-Sartini2008 - abstract: "Program notes:\nAurora Polare (Polar Dawn) is a short piece for cymbals,\ - \ tam-tam, vibraphone, live electronics and EyesWeb system. This piece was inspired\ - \ by the smooth movements of waves, the drawings created by polar dawns and the\ - \ cold weather in polar seas – that's the reason why only metallophones are used.\n\ - The first matter to fight with was making the percussionist elaborate the sound\ - \ they produce while playing their instruments and crafting a brand-new easy way\ - \ to specify every movement. That's why, under the traditional notation score,\ - \ two special lines follow the music specifying the direction to move to: up-down\ - \ and left-right/near-far. A line approaching the top or the bottom of the Y axis\ - \ tells the way to track. You can find an example here on the left.\nAll of those\ - \ movements fully interact with EyesWeb and MAX MSP thru two 30fps accelerometer\ - \ bracelets worn by the performers. Every vertical movement controls the volume\ - \ of the processed sound, while horizontal movements manage a different patch\ - \ in MAX MSP suited to every instrument: a tam-tam sample speed controller (this\ - \ make the instrument play without being touched), an harmonizer to make cymbals\ - \ sing just like a Theremin, but with their own processed sound, and\nthe rate\ - \ of a delay. In the control room a MIDI controller and a computer will be used\ - \ to manage live additional effects and parameters, like granular synthesis, reverb\ - \ and multi-slider filters.\nThanks to Martino Sarolli for helping me with MAX\ - \ MSP, to Matteo Rabolini and Matteo Bonanni for playing my composition.\n\nAbout\ - \ the performer:\nAlessandro Sartini: Born in Genoa in 1982, he studied piano\ - \ with Canzio Bucciarelli and attends the last year of Composition at the Conservatory\ - \ of Genoa with Riccardo Dapelo, who introduced him to “live electronic” treatments.\ - \ His first public exhibition was at the Auditorium Montale of the Carlo Felice\ - \ Theatre in Genoa, during the concert commemorating the 50th anniversary of Béla\ - \ Bartók's death in 1995. From that year on he established a great number of collaboration\ - \ with various solo musicians, who really appreciated his way to accompany; this\ - \ guided him to work in partnership with a good number of professional soloists.\ - \ In 1999 he joined the class of Composition at the Conservatory of Genoa with\ - \ Luigi Giachino, who introduced him to film music: this interest led him to win\ - \ the third prize at the Lavagnino International Film Music Festival in Gavi in\ - \ 2006 and the first prize at the “Concorso Internazionale di Composizione di\ - \ Alice Belcolle\" in 2007. With Valentina Abrami, he is the founder of the “Associazione\ - \ Musica in Movimento”, which operates at the “International School in Genoa”." - address: 'Genova, Italy' - author: Alessandro Sartini - bibtex: "@inproceedings{nime2008-music-Sartini2008,\n abstract = {Program notes:\n\ - Aurora Polare (Polar Dawn) is a short piece for cymbals, tam-tam, vibraphone,\ - \ live electronics and EyesWeb system. This piece was inspired by the smooth movements\ - \ of waves, the drawings created by polar dawns and the cold weather in polar\ - \ seas – that's the reason why only metallophones are used.\nThe first matter\ - \ to fight with was making the percussionist elaborate the sound they produce\ - \ while playing their instruments and crafting a brand-new easy way to specify\ - \ every movement. That's why, under the traditional notation score, two special\ - \ lines follow the music specifying the direction to move to: up-down and left-right/near-far.\ - \ A line approaching the top or the bottom of the Y axis tells the way to track.\ - \ You can find an example here on the left.\nAll of those movements fully interact\ - \ with EyesWeb and MAX MSP thru two 30fps accelerometer bracelets worn by the\ - \ performers. Every vertical movement controls the volume of the processed sound,\ - \ while horizontal movements manage a different patch in MAX MSP suited to every\ - \ instrument: a tam-tam sample speed controller (this make the instrument play\ - \ without being touched), an harmonizer to make cymbals sing just like a Theremin,\ - \ but with their own processed sound, and\nthe rate of a delay. In the control\ - \ room a MIDI controller and a computer will be used to manage live additional\ - \ effects and parameters, like granular synthesis, reverb and multi-slider filters.\n\ - Thanks to Martino Sarolli for helping me with MAX MSP, to Matteo Rabolini and\ - \ Matteo Bonanni for playing my composition.\n\nAbout the performer:\nAlessandro\ - \ Sartini: Born in Genoa in 1982, he studied piano with Canzio Bucciarelli and\ - \ attends the last year of Composition at the Conservatory of Genoa with Riccardo\ - \ Dapelo, who introduced him to “live electronic” treatments. His first public\ - \ exhibition was at the Auditorium Montale of the Carlo Felice Theatre in Genoa,\ - \ during the concert commemorating the 50th anniversary of Béla Bartók's death\ - \ in 1995. From that year on he established a great number of collaboration with\ - \ various solo musicians, who really appreciated his way to accompany; this guided\ - \ him to work in partnership with a good number of professional soloists. In 1999\ - \ he joined the class of Composition at the Conservatory of Genoa with Luigi Giachino,\ - \ who introduced him to film music: this interest led him to win the third prize\ - \ at the Lavagnino International Film Music Festival in Gavi in 2006 and the first\ - \ prize at the “Concorso Internazionale di Composizione di Alice Belcolle\" in\ - \ 2007. With Valentina Abrami, he is the founder of the “Associazione Musica in\ - \ Movimento”, which operates at the “International School in Genoa”.},\n address\ - \ = {Genova, Italy},\n author = {Alessandro Sartini},\n booktitle = {Music Proceedings\ + ID: nime2011-music-IvicaIcoBukvicDirector2011 + abstract: "Program notes:\n13 (Ivica Ico Bukvic): is a game of prime numbers and\ + \ primal instincts pitting timbre against rhythm. Driven by conductor's oversight\ + \ over an array of performer-specific and ensemble-wide parameters, a networked\ + \ ensemble acts as one large meta-tracker where each individual performer contributes\ + \ its own gesture-driven motives or tracks. The ensuing meta-tracker texture is\ + \ superimposed against improvised acoustic percussion in a search of a meaningful\ + \ discourse and ultimately musical synergy.\n\nSerene (Ivica Ico Bukvic): ...the\ + \ one moment in the day when the world melts away and we catch a glimpse of life\ + \ that just is... a celebration of this moment through juxtaposition of Taiji\ + \ (Tai Chi Chuan) choreography and music...\n\nCitadel for soprano and L2Ork (Ivica\ + \ Ico Bukvic) draws inspiration from a famous poem \"Himna Slobodi\" (Hymn to\ + \ Freedom) by the 17th century Croatian poet Ivan Gundulic. As the first piece\ + \ ever written for the newfound ensemble, it relies upon pervasive tonality, in\ + \ many ways posing as an electronic counterpart to a traditional string ensemble.\ + \ Using the infinite-bow metaphor to create lush tonal harmonies the piece forms\ + \ a compelling aural foundation for a lyrical showcase of soloist's vocal talent.\n\ + \n=== About the performers:\n\nL2Ork: Founded by Dr. Ivica Ico Bukvic in May 2009,\ + \ is part of the latest interdisciplinary initiative by the Virginia Tech Music\ + \ Department's Digital Interactive Sound & Intermedia Studio (DISIS). As an emerging\ + \ contemporary intermedia ensemble with a uniquely open design, L2Ork thrives\ + \ upon the quintessential form of collaboration found in the western classical\ + \ orchestra and its cross-pollination with increasingly accessible human-computer\ + \ interaction technologies for the purpose of exploring expressive power of gesture,\ + \ communal interaction, discipline-agnostic environment, and the multidimensionality\ + \ of arts.\n\nMembers: Ivica Ico Bukvic (Director), John Elder, Hillary Guilliams,\ + \ Bennett Layman, David Mudre, Steven Querry, Philip Seward, Andrew Street, Elizabeth\ + \ Ullrich and Adam Wirdzek\n\n=== Recorded at:\n\n11th International Conference\ + \ on New Interfaces for Musical Expression. 30 May - 1 June 2011, Oslo, Norway.\n\ + \nhttp://www.nime2011.org\nAbout the performers:\n\nL2Ork Founded by Dr. Ivica\ + \ Ico Bukvic in May 2009, is part of the latest interdisciplinary initiative by\ + \ the Virginia Tech Music Department's Digital Interactive Sound & Intermedia\ + \ Studio (DISIS). As an emerging contemporary intermedia ensemble with a uniquely\ + \ open design, L2Ork thrives upon the quintessential form of collaboration found\ + \ in the western classical orchestra and its cross-pollination with increasingly\ + \ accessible human-computer interaction technologies for the purpose of exploring\ + \ expressive power of gesture, communal interaction, discipline-agnostic environment,\ + \ and the multidimensionality of arts.\n\nMembers: Ivica Ico Bukvic (Director),\ + \ John Elder, Hillary Guilliams, Bennett Layman, David Mudre, Steven Querry, Philip\ + \ Seward, Andrew Street, Elizabeth Ullrich and Adam Wirdzek" + address: 'Oslo, Norway' + author: Ivica Ico Bukvic and John Elder and Hillary Guilliams and Bennett Layman + and David Mudre and Steven Querry and Philip Seward and Andrew Street and Elizabeth + Ullrich and Adam Wirdzek + bibtex: "@inproceedings{nime2011-music-IvicaIcoBukvicDirector2011,\n abstract =\ + \ {Program notes:\n13 (Ivica Ico Bukvic): is a game of prime numbers and primal\ + \ instincts pitting timbre against rhythm. Driven by conductor's oversight over\ + \ an array of performer-specific and ensemble-wide parameters, a networked ensemble\ + \ acts as one large meta-tracker where each individual performer contributes its\ + \ own gesture-driven motives or tracks. The ensuing meta-tracker texture is superimposed\ + \ against improvised acoustic percussion in a search of a meaningful discourse\ + \ and ultimately musical synergy.\n\nSerene (Ivica Ico Bukvic): ...the one moment\ + \ in the day when the world melts away and we catch a glimpse of life that just\ + \ is... a celebration of this moment through juxtaposition of Taiji (Tai Chi Chuan)\ + \ choreography and music...\n\nCitadel for soprano and L2Ork (Ivica Ico Bukvic)\ + \ draws inspiration from a famous poem \"Himna Slobodi\" (Hymn to Freedom) by\ + \ the 17th century Croatian poet Ivan Gundulic. As the first piece ever written\ + \ for the newfound ensemble, it relies upon pervasive tonality, in many ways posing\ + \ as an electronic counterpart to a traditional string ensemble. Using the infinite-bow\ + \ metaphor to create lush tonal harmonies the piece forms a compelling aural foundation\ + \ for a lyrical showcase of soloist's vocal talent.\n\n=== About the performers:\n\ + \nL2Ork: Founded by Dr. Ivica Ico Bukvic in May 2009, is part of the latest interdisciplinary\ + \ initiative by the Virginia Tech Music Department's Digital Interactive Sound\ + \ \\& Intermedia Studio (DISIS). As an emerging contemporary intermedia ensemble\ + \ with a uniquely open design, L2Ork thrives upon the quintessential form of collaboration\ + \ found in the western classical orchestra and its cross-pollination with increasingly\ + \ accessible human-computer interaction technologies for the purpose of exploring\ + \ expressive power of gesture, communal interaction, discipline-agnostic environment,\ + \ and the multidimensionality of arts.\n\nMembers: Ivica Ico Bukvic (Director),\ + \ John Elder, Hillary Guilliams, Bennett Layman, David Mudre, Steven Querry, Philip\ + \ Seward, Andrew Street, Elizabeth Ullrich and Adam Wirdzek\n\n=== Recorded at:\n\ + \n11th International Conference on New Interfaces for Musical Expression. 30 May\ + \ - 1 June 2011, Oslo, Norway.\n\nhttp://www.nime2011.org\nAbout the performers:\n\ + \nL2Ork Founded by Dr. Ivica Ico Bukvic in May 2009, is part of the latest interdisciplinary\ + \ initiative by the Virginia Tech Music Department's Digital Interactive Sound\ + \ & Intermedia Studio (DISIS). As an emerging contemporary intermedia ensemble\ + \ with a uniquely open design, L2Ork thrives upon the quintessential form of collaboration\ + \ found in the western classical orchestra and its cross-pollination with increasingly\ + \ accessible human-computer interaction technologies for the purpose of exploring\ + \ expressive power of gesture, communal interaction, discipline-agnostic environment,\ + \ and the multidimensionality of arts.\n\nMembers: Ivica Ico Bukvic (Director),\ + \ John Elder, Hillary Guilliams, Bennett Layman, David Mudre, Steven Querry, Philip\ + \ Seward, Andrew Street, Elizabeth Ullrich and Adam Wirdzek},\n address = {Oslo,\ + \ Norway},\n author = {Ivica Ico Bukvic and John Elder and Hillary Guilliams and\ + \ Bennett Layman and David Mudre and Steven Querry and Philip Seward and Andrew\ + \ Street and Elizabeth Ullrich and Adam Wirdzek},\n booktitle = {Music Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Roberto Doati},\n month = {June},\n publisher = {Casa Paganini},\n\ - \ title = {Aurora Polare},\n year = {2008}\n}\n" - booktitle: Music Proceedings of the International Conference on New Interfaces for - Musical Expression - editor: Roberto Doati - month: June - publisher: Casa Paganini - title: Aurora Polare - year: 2008 - - -- ENTRYTYPE: inproceedings - ID: nime2008-music-Baltazar2008 - abstract: "Program notes:\nThe composition of Pyrogenesis took inspiration from\ - \ several aspects of the blacksmithing, not in a literal way, but much as a set\ - \ of correspondences :\nFirst, the gesture, by which the blacksmith models the\ - \ matter continuously; striking, heating, twisting, soaking metals to gradually\ - \ print a form into them.\nThen, the tool: Just like the blacksmith manufactures\ - \ his own tools, I work on developing my own electro-acoustic instrument: an instrument\ - \ to write sound, in space and with a gestural input.\nLastly, the organic construction\ - \ of the form: Gilles Deleuze says \"Why is the blacksmith a musician? It is not\ - \ simply because the forging mill makes noise, it is because the music and the\ - \ metallurgy are haunted by the same problem: that the metallurgy puts the matter\ - \ in the state of continuous variation just as the music is haunted by putting\ - \ the sound in a state of continuous variation and to found in the sound world\ - \ a continuous development of the form and a continuous variation of the matter\"\ - .\nOn a more technical/scientific point of view, the interaction with the performer\ - \ uses two interfaces : a Wacom tablet, and a set of force- resistive-sensors\ - \ (through an analog-to-digital converter), which common point is that they both\ - \ allow control by the pressure of hands, and thus offer a very “physical” mode\ - \ of control.\nThe composition/performance environment consists of a set of generative\ - \ audio modules, fully addressable and presettable, including a mapping engine\ - \ allowing a quick yet powerful set of mapping strategies from controllers inputs\ - \ and volume envelopes to any parameter, including those of the mappers themselves,\ - \ allowing a very precise, flexible, and evolutive sound/gesture relationship\ - \ in time.\nThe composition has been realized through a constant dialogue between\ - \ improvisations in a pre-determined trajectory, and afterwards- listening of\ - \ the produced result. Thus, most of the details of the composition have been\ - \ generated by an improvisation/learning-through- repetition process, without\ - \ any visual support - thus allowing to emphasize expressivity while keeping a\ - \ very direct relationship to the musical gesture.\n\nAbout the performer:\nPascal\ - \ Baltazar is a composer and research coordinator at GMEA, National Center for\ - \ Musical Creation in Albi, France. His research focuses on spatial and temporal\ - \ perception of sound, and its relationship to the body and musical gesture. He\ - \ is coordinating the Virage research platform, on control and scripting novel\ - \ interfaces for artistic creation and entertainment industries, granted by the\ - \ French Research Agency, in the frame of its Audiovisual and Multimedia program,\ - \ for the 2008-2009 period. He is an active member of the Jamoma collective.\n\ - He has studied Aesthetics (Masters of Philosophy Thesis The sonic image : material\ - \ and sensation, 2001, Toulouse III, France) and electroacoustic composition at\ - \ the National Conservatoire of Toulouse. He has then been implied as a composer\ - \ or interactive designer in diverse artistic projects : concerts, performing\ - \ arts shows and interactive installations. He has been commissioned for musical\ - \ works by several institutions, as the French State, INA-GRM, GMEA, IMEB... and\ - \ participated in international festivals (Présences Électroniques, Paris / Radio\ - \ France Festival, Montpellier / Synthèse, Bourges / Videomedeja, Novi Sad / Space\ - \ + Place, Berlin...)." - address: 'Genova, Italy' - author: Pascal Baltazar - bibtex: "@inproceedings{nime2008-music-Baltazar2008,\n abstract = {Program notes:\n\ - The composition of Pyrogenesis took inspiration from several aspects of the blacksmithing,\ - \ not in a literal way, but much as a set of correspondences :\nFirst, the gesture,\ - \ by which the blacksmith models the matter continuously; striking, heating, twisting,\ - \ soaking metals to gradually print a form into them.\nThen, the tool: Just like\ - \ the blacksmith manufactures his own tools, I work on developing my own electro-acoustic\ - \ instrument: an instrument to write sound, in space and with a gestural input.\n\ - Lastly, the organic construction of the form: Gilles Deleuze says \"Why is the\ - \ blacksmith a musician? It is not simply because the forging mill makes noise,\ - \ it is because the music and the metallurgy are haunted by the same problem:\ - \ that the metallurgy puts the matter in the state of continuous variation just\ - \ as the music is haunted by putting the sound in a state of continuous variation\ - \ and to found in the sound world a continuous development of the form and a continuous\ - \ variation of the matter\".\nOn a more technical/scientific point of view, the\ - \ interaction with the performer uses two interfaces : a Wacom tablet, and a set\ - \ of force- resistive-sensors (through an analog-to-digital converter), which\ - \ common point is that they both allow control by the pressure of hands, and thus\ - \ offer a very “physical” mode of control.\nThe composition/performance environment\ - \ consists of a set of generative audio modules, fully addressable and presettable,\ - \ including a mapping engine allowing a quick yet powerful set of mapping strategies\ - \ from controllers inputs and volume envelopes to any parameter, including those\ - \ of the mappers themselves, allowing a very precise, flexible, and evolutive\ - \ sound/gesture relationship in time.\nThe composition has been realized through\ - \ a constant dialogue between improvisations in a pre-determined trajectory, and\ - \ afterwards- listening of the produced result. Thus, most of the details of the\ - \ composition have been generated by an improvisation/learning-through- repetition\ - \ process, without any visual support - thus allowing to emphasize expressivity\ - \ while keeping a very direct relationship to the musical gesture.\n\nAbout the\ - \ performer:\nPascal Baltazar is a composer and research coordinator at GMEA,\ - \ National Center for Musical Creation in Albi, France. His research focuses on\ - \ spatial and temporal perception of sound, and its relationship to the body and\ - \ musical gesture. He is coordinating the Virage research platform, on control\ - \ and scripting novel interfaces for artistic creation and entertainment industries,\ - \ granted by the French Research Agency, in the frame of its Audiovisual and Multimedia\ - \ program, for the 2008-2009 period. He is an active member of the Jamoma collective.\n\ - He has studied Aesthetics (Masters of Philosophy Thesis The sonic image : material\ - \ and sensation, 2001, Toulouse III, France) and electroacoustic composition at\ - \ the National Conservatoire of Toulouse. He has then been implied as a composer\ - \ or interactive designer in diverse artistic projects : concerts, performing\ - \ arts shows and interactive installations. He has been commissioned for musical\ - \ works by several institutions, as the French State, INA-GRM, GMEA, IMEB... and\ - \ participated in international festivals (Présences Électroniques, Paris / Radio\ - \ France Festival, Montpellier / Synthèse, Bourges / Videomedeja, Novi Sad / Space\ - \ + Place, Berlin...).},\n address = {Genova, Italy},\n author = {Pascal Baltazar},\n\ - \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n editor = {Roberto Doati},\n month = {June},\n publisher\ - \ = {Casa Paganini},\n title = {Pyrogenesis},\n year = {2008}\n}\n" + \ editor = {Kjell Tore Innervik and Ivar Frounberg},\n month = {June},\n publisher\ + \ = {Norwegian Academy of Music},\n title = {L2Ork},\n url = {https://vimeo.com/26678669},\n\ + \ url2 = {https://vimeo.com/26678662},\n url3 = {https://vimeo.com/26643771},\n\ + \ year = {2011}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Roberto Doati + editor: Kjell Tore Innervik and Ivar Frounberg month: June - publisher: Casa Paganini - title: Pyrogenesis - year: 2008 + publisher: Norwegian Academy of Music + title: L2Ork + url: https://vimeo.com/26678669 + url2: https://vimeo.com/26678662 + url3: https://vimeo.com/26643771 + year: 2011 - ENTRYTYPE: inproceedings - ID: nime2008-music-Miyama2008 - abstract: "Program notes:\n\"Keo\" is a performance for voice improvisation, Qgo\ - \ sensor instrument, and live electronics. The author attempts to realize three\ - \ concepts in the work. The first is \"dual-layered control,\" in which the performer\ - \ improvises phrases by singing and providing sound materials for a computer.\ - \ Simultaneously, he sends commands to the\ncomputer to process vocals using a\ - \ pair of sensor devices worn on both hands. between the visuality of the performance\ - \ and the musical\ngestures. In most parts of the performance, the movement of\ - \ the sensor instrument and the musical parameters are clearly connected. If the\ - \ performer moves his hand even slightly, particular aspects of the sound are\ - \ influenced in an obvious manner. The third is the strong connection between\ - \ music and theatricality. In several parts of this work, the body motions of\ - \ the performer not only control the sensor device, but also provide some theatrical\ - \ meanings.\n\nAbout the performer:\nChikashi Miyama received his BA(2002) and\ - \ MA(2004) from the Sonology Department, Kunitachi College of Music, Tokyo, Japan\ - \ and Nachdiplom(2007) from Elektronisches studio, Musik-Akademie der Stadt Basel,\ - \ Basel, Switzerland. He is currently attending the State University of New York\ - \ at Buffalo for his ph.D. He has studied under T.Rai, C.Lippe, E.Ona, and G.F.Haas.\ - \ His works, especially his interactive multimedia works, have been performed\ - \ at international festivals, such as June in Buffalo 2001 (New york, USA) , Mix\ - \ '02 (Arfus, Denmark), Musica Viva '03 (Coimbra, Portugal), Realtime/non-realtime\ - \ electronic music festival (Basel, Switzerland), Next generation'05 (Karlsruhe,\ - \ Germany), as well as various cities in Japan. His papers about his works and\ - \ realtime visual processing software \"DIPS\" have also been accepted by ICMC,\ - \ and presented at several SIGMUS conferences. Since 2005, he has been performing\ - \ as a laptop musician, employing his original sensor devices and involving himself\ - \ in several Media-art activities, such as Dorkbot, Shift-Festival, SPARK, and\ - \ SGMK workshops. His compositions have received honorable mention in the Residence\ - \ Prize section of the 30th International Electroacoustic Music Competition Bourges\ - \ and have been accepted by the International Computer Music Conference in 2004,\ - \ 2005, 2006 and 2007. Several works of him are published, including the Computer\ - \ Music Journal Vol.28 DVD by MIT press and the ICMC 2005 official CD." - address: 'Genova, Italy' - author: Chikashi Miyama - bibtex: "@inproceedings{nime2008-music-Miyama2008,\n abstract = {Program notes:\n\ - \"Keo\" is a performance for voice improvisation, Qgo sensor instrument, and live\ - \ electronics. The author attempts to realize three concepts in the work. The\ - \ first is \"dual-layered control,\" in which the performer improvises phrases\ - \ by singing and providing sound materials for a computer. Simultaneously, he\ - \ sends commands to the\ncomputer to process vocals using a pair of sensor devices\ - \ worn on both hands. between the visuality of the performance and the musical\n\ - gestures. In most parts of the performance, the movement of the sensor instrument\ - \ and the musical parameters are clearly connected. If the performer moves his\ - \ hand even slightly, particular aspects of the sound are influenced in an obvious\ - \ manner. The third is the strong connection between music and theatricality.\ - \ In several parts of this work, the body motions of the performer not only control\ - \ the sensor device, but also provide some theatrical meanings.\n\nAbout the performer:\n\ - Chikashi Miyama received his BA(2002) and MA(2004) from the Sonology Department,\ - \ Kunitachi College of Music, Tokyo, Japan and Nachdiplom(2007) from Elektronisches\ - \ studio, Musik-Akademie der Stadt Basel, Basel, Switzerland. He is currently\ - \ attending the State University of New York at Buffalo for his ph.D. He has studied\ - \ under T.Rai, C.Lippe, E.Ona, and G.F.Haas. His works, especially his interactive\ - \ multimedia works, have been performed at international festivals, such as June\ - \ in Buffalo 2001 (New york, USA) , Mix '02 (Arfus, Denmark), Musica Viva '03\ - \ (Coimbra, Portugal), Realtime/non-realtime electronic music festival (Basel,\ - \ Switzerland), Next generation'05 (Karlsruhe, Germany), as well as various cities\ - \ in Japan. His papers about his works and realtime visual processing software\ - \ \"DIPS\" have also been accepted by ICMC, and presented at several SIGMUS conferences.\ - \ Since 2005, he has been performing as a laptop musician, employing his original\ - \ sensor devices and involving himself in several Media-art activities, such as\ - \ Dorkbot, Shift-Festival, SPARK, and SGMK workshops. His compositions have received\ - \ honorable mention in the Residence Prize section of the 30th International Electroacoustic\ - \ Music Competition Bourges and have been accepted by the International Computer\ - \ Music Conference in 2004, 2005, 2006 and 2007. Several works of him are published,\ - \ including the Computer Music Journal Vol.28 DVD by MIT press and the ICMC 2005\ - \ official CD.},\n address = {Genova, Italy},\n author = {Chikashi Miyama},\n\ + ID: nime2011-music-Bokowiec2011 + abstract: "Program notes: V'Oct(Ritual) places the audience inside a circular liminal\ + \ space of sonic evocation and features the Bodycoder System© the first generation\ + \ of which was developed by the artists in 1995. The Bodycoder interface is a\ + \ flexible sensor array worn on the body of a performer that sends data generated\ + \ by movement to an MSP environment via radio. All vocalisations, decision making,\ + \ navigation of the MSP environment and qualities of expressivity are selected,\ + \ initiated and manipulated by the performer, uniquely, this also includes access\ + \ to gestural control of live 8-channel spatialization. This piece is fully scored\ + \ with few moments of improvisation.\n\nAbout the performers: Julie Wilson-Bokowiec:\ + \ has created new works in opera/music theatre, contemporary dance and theatre\ + \ and has worked with Lindsey Kemp, Genesis P-Orridge, Psychic TV and Hermann\ + \ Nitsch. Julie is a Research Fellow at CeReNem (Centre for Research in New Music)\ + \ at the University of Huddersfield.\nMark Bokowiec: is the manager of the electroacoustic\ + \ music studios and the Spacialization and Interactive Research Lab at the University\ + \ of Huddersfield where he also lectures in interactive performance, interface\ + \ design and composition. Mark began creating work with interactive technologies\ + \ in 1995." + address: 'Oslo, Norway' + author: Mark Bokowiec and Julie Wilson-Bokowiec + bibtex: "@inproceedings{nime2011-music-Bokowiec2011,\n abstract = {Program notes:\ + \ V'Oct(Ritual) places the audience inside a circular liminal space of sonic evocation\ + \ and features the Bodycoder System© the first generation of which was developed\ + \ by the artists in 1995. The Bodycoder interface is a flexible sensor array\ + \ worn on the body of a performer that sends data generated by movement to an\ + \ MSP environment via radio. All vocalisations, decision making, navigation of\ + \ the MSP environment and qualities of expressivity are selected, initiated and\ + \ manipulated by the performer, uniquely, this also includes access to gestural\ + \ control of live 8-channel spatialization. This piece is fully scored with few\ + \ moments of improvisation.\n\nAbout the performers: Julie Wilson-Bokowiec: has\ + \ created new works in opera/music theatre, contemporary dance and theatre and\ + \ has worked with Lindsey Kemp, Genesis P-Orridge, Psychic TV and Hermann Nitsch.\ + \ Julie is a Research Fellow at CeReNem (Centre for Research in New Music) at\ + \ the University of Huddersfield.\nMark Bokowiec: is the manager of the electroacoustic\ + \ music studios and the Spacialization and Interactive Research Lab at the University\ + \ of Huddersfield where he also lectures in interactive performance, interface\ + \ design and composition. Mark began creating work with interactive technologies\ + \ in 1995.},\n address = {Oslo, Norway},\n author = {Mark Bokowiec and Julie Wilson-Bokowiec},\n\ \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n editor = {Roberto Doati},\n month = {June},\n publisher\ - \ = {Casa Paganini},\n title = {Keo Improvisation for sensor instrument Qgo},\n\ - \ year = {2008}\n}\n" - booktitle: Music Proceedings of the International Conference on New Interfaces for - Musical Expression - editor: Roberto Doati - month: June - publisher: Casa Paganini - title: Keo Improvisation for sensor instrument Qgo - year: 2008 - - -- ENTRYTYPE: inproceedings - ID: nime2008-music-Hamel2008 - abstract: "Program notes:\nIntersecting Lines is a collaboration between clarinetist\ - \ François Houle, interactive video artist Aleksandra Dulic and computer music\ - \ composer Keith Hamel. The work grew out of Dulic's research in visual music\ - \ and involves mapping a live clarinet improvisation onto both the visual and\ - \ audio realms. In this work an intelligent system for visualization and signification\ - \ is used to develop and expand the musical material played by the clarinet. This\ - \ system monitors and interprets various nuances of the musical performance. The\ - \ clarinetist's improvisations, musical intentions, meanings and feelings are\ - \ enhanced and extended, both visually and aurally, by the computer system, so\ - \ that the various textures and gestured played by the performer have corresponding\ - \ visuals and computer-generated sounds. The melodic line, as played by the clarinet,\ - \ is used as the main compositional strategy for visualization. Since the control\ - \ input is based on a classical instrument, the strategy is based on calligraphic\ - \ line drawing using artistic rendering: the computer-generated line is drawn\ - \ in 3D space and rendered using expressive painterly and ink drawing styles.\ - \ The appearance of animated lines and textures portray a new artistic expression\ - \ that transforms a musical gesture onto a visual plane. Kenneth Newby made contributions\ - \ to the development of the animation software. This project was made possible\ - \ with generous support of Social Sciences and Humanities Research Council of\ - \ Canada.\n\nAbout the performers:\nFrançois Houle has established himself as\ - \ one of Canada's finest musicians. His performances and recordings transcend\ - \ the stylistic borders associated with his instrument in all of the diverse musical\ - \ spheres he embraces: classical, jazz, new music, improvised music, and world\ - \ music. As an improviser, he has developed a unique language, virtuosic and rich\ - \ with sonic embellishments and technical extensions. As a soloist and chamber\ - \ musician, he has actively expanded the clarinet's repertoire by commissioning\ - \ some of today's leading Canadian and international composers and premieringover\ - \ one hundred new works. An alumnus of McGill University and Yale University,\ - \ François has been an artist-in-residence at the Banff Centre for the Arts and\ - \ the Civitella Ranieri Foundation in Umbria, Italy. Now based in Vancouver, François\ - \ is a leader in the city's music community and is considered by many to be Canada's\ - \ leading exponent of the clarinet.\n\nKeith Hamel is a Professor in the School\ - \ of Music, an Associate Researcher at the Institute for Computing, Information\ - \ and Cognitive Systems (ICICS), a Researcher at the Media and Graphics Interdisciplinary\ - \ Centre (MAGIC) and Director of the Computer Music Studio at the University of\ - \ British Columbia. Keith Hamel has written both acoustic and electroacoustic\ - \ music and his works have been performed by many of the finest soloists and ensembles\ - \ both in Canada and abroad. Many of his recent compositions focus on interaction\ - \ between live performers and computer-controlled electronics.\n\nAleksandra Dulic\ - \ is media artist, theorist and experimental filmmaker working at the intersections\ - \ of multimedia and live performance with research foci in computational poetics,\ - \ interactive animation and cross-cultural media performance. She has received\ - \ a number of awards for her short animated films. She is active as a new media\ - \ artist, curator, a writer, an educator, teaching courses, presenting art projects\ - \ and publishing papers, across North America, Australia, Europe and Asia. She\ - \ received her Ph.D. from the School of Interactive Art and Technology, Simon\ - \ Fraser University in 2006. She is currently a Postdoctoral research fellow at\ - \ the Media and Graphics Interdisciplinary Centre, University of British Columbia\ - \ funded by Social Sciences and Humanities Research Council of Canada (SSHRC)." - address: 'Genova, Italy' - author: Keith Hamel and François Houle and Aleksandra Dulic - bibtex: "@inproceedings{nime2008-music-Hamel2008,\n abstract = {Program notes:\n\ - Intersecting Lines is a collaboration between clarinetist François Houle, interactive\ - \ video artist Aleksandra Dulic and computer music composer Keith Hamel. The work\ - \ grew out of Dulic's research in visual music and involves mapping a live clarinet\ - \ improvisation onto both the visual and audio realms. In this work an intelligent\ - \ system for visualization and signification is used to develop and expand the\ - \ musical material played by the clarinet. This system monitors and interprets\ - \ various nuances of the musical performance. The clarinetist's improvisations,\ - \ musical intentions, meanings and feelings are enhanced and extended, both visually\ - \ and aurally, by the computer system, so that the various textures and gestured\ - \ played by the performer have corresponding visuals and computer-generated sounds.\ - \ The melodic line, as played by the clarinet, is used as the main compositional\ - \ strategy for visualization. Since the control input is based on a classical\ - \ instrument, the strategy is based on calligraphic line drawing using artistic\ - \ rendering: the computer-generated line is drawn in 3D space and rendered using\ - \ expressive painterly and ink drawing styles. The appearance of animated lines\ - \ and textures portray a new artistic expression that transforms a musical gesture\ - \ onto a visual plane. Kenneth Newby made contributions to the development of\ - \ the animation software. This project was made possible with generous support\ - \ of Social Sciences and Humanities Research Council of Canada.\n\nAbout the performers:\n\ - François Houle has established himself as one of Canada's finest musicians. His\ - \ performances and recordings transcend the stylistic borders associated with\ - \ his instrument in all of the diverse musical spheres he embraces: classical,\ - \ jazz, new music, improvised music, and world music. As an improviser, he has\ - \ developed a unique language, virtuosic and rich with sonic embellishments and\ - \ technical extensions. As a soloist and chamber musician, he has actively expanded\ - \ the clarinet's repertoire by commissioning some of today's leading Canadian\ - \ and international composers and premieringover one hundred new works. An alumnus\ - \ of McGill University and Yale University, François has been an artist-in-residence\ - \ at the Banff Centre for the Arts and the Civitella Ranieri Foundation in Umbria,\ - \ Italy. Now based in Vancouver, François is a leader in the city's music community\ - \ and is considered by many to be Canada's leading exponent of the clarinet.\n\ - \nKeith Hamel is a Professor in the School of Music, an Associate Researcher at\ - \ the Institute for Computing, Information and Cognitive Systems (ICICS), a Researcher\ - \ at the Media and Graphics Interdisciplinary Centre (MAGIC) and Director of the\ - \ Computer Music Studio at the University of British Columbia. Keith Hamel has\ - \ written both acoustic and electroacoustic music and his works have been performed\ - \ by many of the finest soloists and ensembles both in Canada and abroad. Many\ - \ of his recent compositions focus on interaction between live performers and\ - \ computer-controlled electronics.\n\nAleksandra Dulic is media artist, theorist\ - \ and experimental filmmaker working at the intersections of multimedia and live\ - \ performance with research foci in computational poetics, interactive animation\ - \ and cross-cultural media performance. She has received a number of awards for\ - \ her short animated films. She is active as a new media artist, curator, a writer,\ - \ an educator, teaching courses, presenting art projects and publishing papers,\ - \ across North America, Australia, Europe and Asia. She received her Ph.D. from\ - \ the School of Interactive Art and Technology, Simon Fraser University in 2006.\ - \ She is currently a Postdoctoral research fellow at the Media and Graphics Interdisciplinary\ - \ Centre, University of British Columbia funded by Social Sciences and Humanities\ - \ Research Council of Canada (SSHRC).},\n address = {Genova, Italy},\n author\ - \ = {Keith Hamel and François Houle and Aleksandra Dulic},\n booktitle = {Music\ - \ Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Roberto Doati},\n month = {June},\n publisher = {Casa Paganini},\n\ - \ title = {Intersecting Lines},\n year = {2008}\n}\n" + \ for Musical Expression},\n editor = {Kjell Tore Innervik and Ivar Frounberg},\n\ + \ month = {June},\n publisher = {Norwegian Academy of Music},\n title = {V'Oct(Ritual)},\n\ + \ url = {https://vimeo.com/27694214},\n year = {2011}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Roberto Doati + editor: Kjell Tore Innervik and Ivar Frounberg month: June - publisher: Casa Paganini - title: Intersecting Lines - year: 2008 + publisher: Norwegian Academy of Music + title: V'Oct(Ritual) + url: https://vimeo.com/27694214 + year: 2011 - ENTRYTYPE: inproceedings - ID: nime2008-music-Romero2008 - abstract: "Program notes:\nVISTAS(2005) - Choreography with video, one musician\ - \ playng live electronics and two dancers with metainstruments interacting with\ - \ the music. Divided in three scenes the work is conceptually based in the “self-other”\ - \ cognitive phenomena inspired by Edgar Morin's idea of the evolution of society\ - \ through interdisciplinary interaction. The interdisciplinary feature of the\ - \ piece is carefully constructed using 2 metainstruments that link the formal\ - \ elements in a structural way. This metainstruments are two wireless microphones\ - \ plugged into two stethoscopes attached to the dancers hands. The movements of\ - \ the dancers make the microphones generate an amplitude that is transmitted to\ - \ the computer and mapped into different music elements. Some live voice participations\ - \ from the dancers add dramatic accents to the piece. Vistas is en integral piece\ - \ in wich the music supports the choreography as well as the choreography gets\ - \ influenced by the music. The video supports the scene creating an abstract space\ - \ that changes and evolves according to the performance. The musical aesthetic\ - \ has Noise elements and voice sample manipulation playing with texture and density\ - \ contrast in a very dynamic way. The language of the choreography comes from\ - \ an exploration of the planes in a 3rd dimension space by separate first and\ - \ united later. The language is also influenced by the need to achieve the best\ - \ usage as possible of the metainstrument.\n\nAbout the performers:\nLos Platelmintos\ - \ are a group of artists, living in Mexico City, that work under the premise of\ - \ interdiscipline and experimentation. Dance, music and electronic media are fundamental\ - \ elements in their work.\n\nErnesto Romero: music composition and electronic\ - \ media. Studies Composition, Mathematics and Choir conduction in México. Chief\ - \ of the Audio Department at the National Center for the Arts in México where\ - \ he researches and developes technology applied to the arts.\n\nEsthel Vogrig\ - \ : Coreographer and dancer. Studies contemporary dance and coreography in México,\ - \ V ienna and the United States. Director of Los PLatelmintos company. Recipient\ - \ of the \"Grant for Investigation and Production of Art Works and New Media”\ - \ from the National Council of the Arts and the Multimedia Center in Mexico. This\ - \ grant was used to produce the piece Vistas.\n\nKarina Sánchez: Dancer. Studies\ - \ contemporary dance and coreography in Chile, Spain and México." - address: 'Genova, Italy' - author: Ernesto Romero and Esthel Vogrig - bibtex: "@inproceedings{nime2008-music-Romero2008,\n abstract = {Program notes:\n\ - VISTAS(2005) - Choreography with video, one musician playng live electronics and\ - \ two dancers with metainstruments interacting with the music. Divided in three\ - \ scenes the work is conceptually based in the “self-other” cognitive phenomena\ - \ inspired by Edgar Morin's idea of the evolution of society through interdisciplinary\ - \ interaction. The interdisciplinary feature of the piece is carefully constructed\ - \ using 2 metainstruments that link the formal elements in a structural way. This\ - \ metainstruments are two wireless microphones plugged into two stethoscopes attached\ - \ to the dancers hands. The movements of the dancers make the microphones generate\ - \ an amplitude that is transmitted to the computer and mapped into different music\ - \ elements. Some live voice participations from the dancers add dramatic accents\ - \ to the piece. Vistas is en integral piece in wich the music supports the choreography\ - \ as well as the choreography gets influenced by the music. The video supports\ - \ the scene creating an abstract space that changes and evolves according to the\ - \ performance. The musical aesthetic has Noise elements and voice sample manipulation\ - \ playing with texture and density contrast in a very dynamic way. The language\ - \ of the choreography comes from an exploration of the planes in a 3rd dimension\ - \ space by separate first and united later. The language is also influenced by\ - \ the need to achieve the best usage as possible of the metainstrument.\n\nAbout\ - \ the performers:\nLos Platelmintos are a group of artists, living in Mexico City,\ - \ that work under the premise of interdiscipline and experimentation. Dance, music\ - \ and electronic media are fundamental elements in their work.\n\nErnesto Romero:\ - \ music composition and electronic media. Studies Composition, Mathematics and\ - \ Choir conduction in México. Chief of the Audio Department at the National Center\ - \ for the Arts in México where he researches and developes technology applied\ - \ to the arts.\n\nEsthel Vogrig : Coreographer and dancer. Studies contemporary\ - \ dance and coreography in México, V ienna and the United States. Director of\ - \ Los PLatelmintos company. Recipient of the \"Grant for Investigation and Production\ - \ of Art Works and New Media” from the National Council of the Arts and the Multimedia\ - \ Center in Mexico. This grant was used to produce the piece Vistas.\n\nKarina\ - \ Sánchez: Dancer. Studies contemporary dance and coreography in Chile, Spain\ - \ and México.},\n address = {Genova, Italy},\n author = {Ernesto Romero and Esthel\ - \ Vogrig},\n booktitle = {Music Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n editor = {Roberto Doati},\n month\ - \ = {June},\n publisher = {Casa Paganini},\n title = {Vistas},\n year = {2008}\n\ - }\n" + ID: nime2011-music-Shiraishi2011 + abstract: "Program notes:\n\nmikro:strukt is a collaborative performance in which\ + \ the custom-built e-clambone provides an acoustic source for the ensuing audiovisual\ + \ environment. E-clambone is custom-built electronic instrument that consists\ + \ of an aerophone supplied with haptic sensors and digital signal processing algorithms.\ + \ The performance seeks to integrate elements of electro-acoustic improvisation,\ + \ timbre composition and artificial intelligence based approach to autonomous\ + \ audiovisual composition and explore micro level timbre composition in real time.\n\ + \nAbout the performers:\n\nSatoshi Shiraishi: Electro-acoustic instrument designer/performer\ + \ from Japan, currently living in The Hague, The Netherlands. He originally started\ + \ his music carrier as a rock guitarist. After the meeting with computer music,\ + \ he moved to The Netherlands to pursue his own way of playing computer generated\ + \ sound on a stage.\n\nAlo Allik: (Estonia) has a musically and geographically\ + \ restless lifestyle, which has taken him through diverse musical worlds including\ + \ DJ-ing and producing electronic dance music, live laptop jams, electroacoustic\ + \ composition, free improvisation, audiovisual installations and performances." + address: 'Oslo, Norway' + author: Satoshi Shiraishi and Alo Allik + bibtex: "@inproceedings{nime2011-music-Shiraishi2011,\n abstract = {Program notes:\n\ + \nmikro:strukt is a collaborative performance in which the custom-built e-clambone\ + \ provides an acoustic source for the ensuing audiovisual environment. E-clambone\ + \ is custom-built electronic instrument that consists of an aerophone supplied\ + \ with haptic sensors and digital signal processing algorithms. The performance\ + \ seeks to integrate elements of electro-acoustic improvisation, timbre composition\ + \ and artificial intelligence based approach to autonomous audiovisual composition\ + \ and explore micro level timbre composition in real time.\n\nAbout the performers:\n\ + \nSatoshi Shiraishi: Electro-acoustic instrument designer/performer from Japan,\ + \ currently living in The Hague, The Netherlands. He originally started his music\ + \ carrier as a rock guitarist. After the meeting with computer music, he moved\ + \ to The Netherlands to pursue his own way of playing computer generated sound\ + \ on a stage.\n\nAlo Allik: (Estonia) has a musically and geographically restless\ + \ lifestyle, which has taken him through diverse musical worlds including DJ-ing\ + \ and producing electronic dance music, live laptop jams, electroacoustic composition,\ + \ free improvisation, audiovisual installations and performances.},\n address\ + \ = {Oslo, Norway},\n author = {Satoshi Shiraishi and Alo Allik},\n booktitle\ + \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n editor = {Kjell Tore Innervik and Ivar Frounberg},\n month =\ + \ {June},\n publisher = {Norwegian Academy of Music},\n title = {mikro:strukt},\n\ + \ url = {https://vimeo.com/27694202},\n year = {2011}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Roberto Doati + editor: Kjell Tore Innervik and Ivar Frounberg month: June - publisher: Casa Paganini - title: Vistas - year: 2008 + publisher: Norwegian Academy of Music + title: mikro:strukt + url: https://vimeo.com/27694202 + year: 2011 - ENTRYTYPE: inproceedings - ID: nime2008-music-Messier2008 - abstract: "Program notes:\nThe Pencil Project is a performance piece created by\ - \ sound artists Martin Messier and Jacques Poulin-Denis. Their intention was to\ - \ craft a live electronic music piece inspired by the physicality of writing and\ - \ the imagery it articulates. The performers translate scribbling, scratching,\ - \ dotting and drawing with pencil music. The computers are hidden and untouched\ - \ throughout the piece, allowing object manipulation and the creation of sound\ - \ to be the performers' main focus.\nThe Pencil Project is about musicianship.\ - \ Liberated from the computer screen and equipped with hands-on objects, the performers\ - \ explore a new form of expressivity. Through an authentic and stimulating performance,\ - \ the musicians bring computer music intimately close to playing an actual musical\ - \ instrument.\n\nAbout the performers:\nMartin Messier: Holding a diploma in drums\ - \ for jazz interpretation, Martin Messier has completed a bachelor's degree in\ - \ electroacoustic composition at the University of Montreal, and De Montfort University\ - \ in England. Recently, Martin has founded a solo project called « et si l'aurore\ - \ disait oui... », through which he develops live electroacoustic performance\ - \ borrowing stylistic elements from Intelligent Dance Music, acousmatic and folk.\ - \ Based on strong aptitudes for rhythm, Martin's esthetic can be defined as a\ - \ complex, left field and happily strange sound amalgam, constantly playing with\ - \ construction and deconstruction.\n\nJacques Poulin-Denis is active in projects\ - \ that intersect theater, dance and music. He has completed his undergraduate\ - \ studies in electroacoustic composition from the University of Montreal, and\ - \ De Montfort University in England. Most of his music was composed for theater\ - \ and dance. Jacques explores innovative ways of presenting electro-acoustic music.\ - \ Jacques' musical style is evocative and filled with imagery. Combining traditional\ - \ and electronic instruments with anecdotic sound sources of everyday life, he\ - \ creates vibrant music that is fierce and poetic." - address: 'Genova, Italy' - author: Martin Messier and Jacques Poulin-Denis - bibtex: "@inproceedings{nime2008-music-Messier2008,\n abstract = {Program notes:\n\ - The Pencil Project is a performance piece created by sound artists Martin Messier\ - \ and Jacques Poulin-Denis. Their intention was to craft a live electronic music\ - \ piece inspired by the physicality of writing and the imagery it articulates.\ - \ The performers translate scribbling, scratching, dotting and drawing with pencil\ - \ music. The computers are hidden and untouched throughout the piece, allowing\ - \ object manipulation and the creation of sound to be the performers' main focus.\n\ - The Pencil Project is about musicianship. Liberated from the computer screen and\ - \ equipped with hands-on objects, the performers explore a new form of expressivity.\ - \ Through an authentic and stimulating performance, the musicians bring computer\ - \ music intimately close to playing an actual musical instrument.\n\nAbout the\ - \ performers:\nMartin Messier: Holding a diploma in drums for jazz interpretation,\ - \ Martin Messier has completed a bachelor's degree in electroacoustic composition\ - \ at the University of Montreal, and De Montfort University in England. Recently,\ - \ Martin has founded a solo project called « et si l'aurore disait oui... », through\ - \ which he develops live electroacoustic performance borrowing stylistic elements\ - \ from Intelligent Dance Music, acousmatic and folk. Based on strong aptitudes\ - \ for rhythm, Martin's esthetic can be defined as a complex, left field and happily\ - \ strange sound amalgam, constantly playing with construction and deconstruction.\n\ - \nJacques Poulin-Denis is active in projects that intersect theater, dance and\ - \ music. He has completed his undergraduate studies in electroacoustic composition\ - \ from the University of Montreal, and De Montfort University in England. Most\ - \ of his music was composed for theater and dance. Jacques explores innovative\ - \ ways of presenting electro-acoustic music. Jacques' musical style is evocative\ - \ and filled with imagery. Combining traditional and electronic instruments with\ - \ anecdotic sound sources of everyday life, he creates vibrant music that is fierce\ - \ and poetic.},\n address = {Genova, Italy},\n author = {Martin Messier and Jacques\ - \ Poulin-Denis},\n booktitle = {Music Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n editor = {Roberto Doati},\n month\ - \ = {June},\n publisher = {Casa Paganini},\n title = {The Pencil Project},\n year\ - \ = {2008}\n}\n" + ID: nime2011-music-Overholt2011 + abstract: "Program notes:\n\nThis generative / improvisatory work uses an iPod Touch\ + \ and a tactile sound transducer attached to the Overtone Fiddle's resonant body\ + \ as a mobile system to lay out a variety of animated and transformed sound sources\ + \ over time.\n\nAbout the performers:\n\nDan Overholt: Associate Professor in\ + \ the Department of Architecture, Design and Media Technology at Aalborg University,\ + \ Denmark. He received a PhD in Media Arts and Technology from the University\ + \ of California, Santa Barbara, a M.S. from the MIT Media Lab, and studied Music\ + \ and Electronics Engineering and at CSU, Chico. As a musician, he composes and\ + \ performs internationally with experimental human-computer interfaces and musical\ + \ signal processing algorithms.\n\nLars Graugaard: Free-lance composer, laptop\ + \ performer and researcher. He holds a PhD in Artistic and Technological Challenges\ + \ of Interactive Music from Oxford Brookes University and a MS in flute performance\ + \ from the Royal Danish Academy of Music. His main interest is the systematic\ + \ study of music's expressive capacity applied to score composing, realtime interactive\ + \ performance, generative and emergent music." + address: 'Oslo, Norway' + author: Dan Overholt and Lars Grausgaard + bibtex: "@inproceedings{nime2011-music-Overholt2011,\n abstract = {Program notes:\n\ + \nThis generative / improvisatory work uses an iPod Touch and a tactile sound\ + \ transducer attached to the Overtone Fiddle's resonant body as a mobile system\ + \ to lay out a variety of animated and transformed sound sources over time.\n\n\ + About the performers:\n\nDan Overholt: Associate Professor in the Department of\ + \ Architecture, Design and Media Technology at Aalborg University, Denmark. He\ + \ received a PhD in Media Arts and Technology from the University of California,\ + \ Santa Barbara, a M.S. from the MIT Media Lab, and studied Music and Electronics\ + \ Engineering and at CSU, Chico. As a musician, he composes and performs internationally\ + \ with experimental human-computer interfaces and musical signal processing algorithms.\n\ + \nLars Graugaard: Free-lance composer, laptop performer and researcher. He holds\ + \ a PhD in Artistic and Technological Challenges of Interactive Music from Oxford\ + \ Brookes University and a MS in flute performance from the Royal Danish Academy\ + \ of Music. His main interest is the systematic study of music's expressive capacity\ + \ applied to score composing, realtime interactive performance, generative and\ + \ emergent music.},\n address = {Oslo, Norway},\n author = {Dan Overholt and Lars\ + \ Grausgaard},\n booktitle = {Music Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n editor = {Kjell Tore Innervik and\ + \ Ivar Frounberg},\n month = {June},\n publisher = {Norwegian Academy of Music},\n\ + \ title = {Study No. 1 for Overtone Fiddle},\n url = {https://vimeo.com/26661494},\n\ + \ year = {2011}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Roberto Doati + editor: Kjell Tore Innervik and Ivar Frounberg month: June - publisher: Casa Paganini - title: The Pencil Project - year: 2008 + publisher: Norwegian Academy of Music + title: Study No. 1 for Overtone Fiddle + url: https://vimeo.com/26661494 + year: 2011 - ENTRYTYPE: inproceedings - ID: nime2008-music-Favilla2008 - abstract: "Program notes:\nBent Leather Band introduces their new extended instrument\ - \ project, Heretics Brew. The aim of this project is to develop an extended line\ - \ up with the aim of building a larger ensemble. So far the project [quintet]\ - \ has developed a number of new extended saxophone controllers and is currently\ - \ working on trumpets and guitars. Their instruments are based on Gluion OSC,\ - \ interfaces; programmable frame gate array devices that have multiple configurable\ - \ inputs and outputs. For NIME08, the ensemble trio will demonstrate their instruments,\ - \ language and techniques through ensemble improvisation.\n\nAbout the performers:\n\ - Joanne Cannon, composer/improviser, is one of Australia's leading bassoonists.\ - \ Although she began her career as a professional orchestral musician, she now\ - \ works as a composer and improviser, exploring extended techniques. Stuart Favilla\ - \ has a background in composition and improvisation. Together they form the Bent\ - \ Leather Band, a duo that has been developing experimental electronic instruments\ - \ for over twenty years in Australia. Bent Leather Band blurs virtuosity and group\ - \ improvisation across a visual spectacle of stunning original instruments. These\ - \ were made in conjunction with Tasmanian leather artist, Garry Greenwood. The\ - \ instruments include fanciful dragon headed Light-Harps, leather Serpents and\ - \ Monsters that embody sensor interfaces, synthesis and signal processing technology.\ - \ Practicable and intuitive instruments, they have been built with multi-parameter\ - \ control in mind. Joint winners of the Karl Szucka Preis, their work of Bent\ - \ Leather has gained selection at Bourges and won the IAWM New Genre Prize.\n\ - Inspired by the legacy of Percy Grainger's Free music, i.e. “music beyond the\ - \ constraints of conventional pitch and rhythm” [Grainger, 1951], Bent Leather\ - \ Band has strived to develop a new musical language that exploits the potentials\ - \ of synthesis/signal processing, defining new expressive boundaries and dimensions\ - \ and yet also connecting with a heritage of Grainger's musical discourse. Grainger\ - \ conceived his music towards the end of the 19th Century, and spent in excess\ - \ of fifty years bringing his ideas to fruition through composition for theremin\ - \ ensemble, the development of 6th tone instruments [pianos and klaviers], the\ - \ development of polyphonic reed instruments for portamento control and a series\ - \ of paper roll, score driven electronic oscillator instruments.\n\nTony Hicks\ - \ enjoys a high profile reputation as Australia's most versatile woodwind artist.\ - \ Equally adept on saxophones, flutes and clarinets, his abilities span a broad\ - \ spectrum of music genres. A student of Dr. Peter Clinch, Tony also studied at\ - \ the Eastman School of Music. He has performed throughout Australia, and across\ - \ Europe, the United States, Japan and China with a number of leading Australian\ - \ ensembles including the Australian Art Orchestra, Elision, and the Peter Clinch\ - \ Saxophone Quartet. He has performed saxophone concertos with the Melbourne Symphony\ - \ Orchestra, and solo'd for Stevie Wonder and his band. As a jazz artist he has\ - \ performed and recorded with leading jazz figures Randy Brecker, Billy Cobham,\ - \ notable Australian artists, Paul Grabowsky, Joe Chindamo, David Jones, and also\ - \ lead a number of important groups in the local Australian scene. An explorer\ - \ of improvised music, he consistently collaborates with numerous artists both\ - \ in Australia and overseas." - address: 'Genova, Italy' - author: Stuart Favilla and Joanne Cannon and Tony Hicks - bibtex: "@inproceedings{nime2008-music-Favilla2008,\n abstract = {Program notes:\n\ - Bent Leather Band introduces their new extended instrument project, Heretics Brew.\ - \ The aim of this project is to develop an extended line up with the aim of building\ - \ a larger ensemble. So far the project [quintet] has developed a number of new\ - \ extended saxophone controllers and is currently working on trumpets and guitars.\ - \ Their instruments are based on Gluion OSC, interfaces; programmable frame gate\ - \ array devices that have multiple configurable inputs and outputs. For NIME08,\ - \ the ensemble trio will demonstrate their instruments, language and techniques\ - \ through ensemble improvisation.\n\nAbout the performers:\nJoanne Cannon, composer/improviser,\ - \ is one of Australia's leading bassoonists. Although she began her career as\ - \ a professional orchestral musician, she now works as a composer and improviser,\ - \ exploring extended techniques. Stuart Favilla has a background in composition\ - \ and improvisation. Together they form the Bent Leather Band, a duo that has\ - \ been developing experimental electronic instruments for over twenty years in\ - \ Australia. Bent Leather Band blurs virtuosity and group improvisation across\ - \ a visual spectacle of stunning original instruments. These were made in conjunction\ - \ with Tasmanian leather artist, Garry Greenwood. The instruments include fanciful\ - \ dragon headed Light-Harps, leather Serpents and Monsters that embody sensor\ - \ interfaces, synthesis and signal processing technology. Practicable and intuitive\ - \ instruments, they have been built with multi-parameter control in mind. Joint\ - \ winners of the Karl Szucka Preis, their work of Bent Leather has gained selection\ - \ at Bourges and won the IAWM New Genre Prize.\nInspired by the legacy of Percy\ - \ Grainger's Free music, i.e. “music beyond the constraints of conventional pitch\ - \ and rhythm” [Grainger, 1951], Bent Leather Band has strived to develop a new\ - \ musical language that exploits the potentials of synthesis/signal processing,\ - \ defining new expressive boundaries and dimensions and yet also connecting with\ - \ a heritage of Grainger's musical discourse. Grainger conceived his music towards\ - \ the end of the 19th Century, and spent in excess of fifty years bringing his\ - \ ideas to fruition through composition for theremin ensemble, the development\ - \ of 6th tone instruments [pianos and klaviers], the development of polyphonic\ - \ reed instruments for portamento control and a series of paper roll, score driven\ - \ electronic oscillator instruments.\n\nTony Hicks enjoys a high profile reputation\ - \ as Australia's most versatile woodwind artist. Equally adept on saxophones,\ - \ flutes and clarinets, his abilities span a broad spectrum of music genres. A\ - \ student of Dr. Peter Clinch, Tony also studied at the Eastman School of Music.\ - \ He has performed throughout Australia, and across Europe, the United States,\ - \ Japan and China with a number of leading Australian ensembles including the\ - \ Australian Art Orchestra, Elision, and the Peter Clinch Saxophone Quartet. He\ - \ has performed saxophone concertos with the Melbourne Symphony Orchestra, and\ - \ solo'd for Stevie Wonder and his band. As a jazz artist he has performed and\ - \ recorded with leading jazz figures Randy Brecker, Billy Cobham, notable Australian\ - \ artists, Paul Grabowsky, Joe Chindamo, David Jones, and also lead a number of\ - \ important groups in the local Australian scene. An explorer of improvised music,\ - \ he consistently collaborates with numerous artists both in Australia and overseas.},\n\ - \ address = {Genova, Italy},\n author = {Stuart Favilla and Joanne Cannon and\ - \ Tony Hicks},\n booktitle = {Music Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n editor = {Roberto Doati},\n month\ - \ = {June},\n publisher = {Casa Paganini},\n title = {Heretic's Brew},\n year\ - \ = {2008}\n}\n" + ID: nime2011-music-DougVanNort2011 + abstract: "Program notes:\n\nThis piece is written in consideration of two distinct\ + \ paradigms: telematic music performance and human-machine improvisation. Specifically\ + \ this work is a structured improvisation for three humans and one intelligent\ + \ agent, being constrained by sections that determine which pairing (duos, trios)\ + \ of performers are active. Instrumentation also changes between sections in a\ + \ way that blurs the line of agency and intent between acoustic human performers,\ + \ laptop tablet-based human performer, and agent improviser, as the two remote\ + \ (NY, Stanford) acoustic performers (v-accordion, soprano saxophone) engage with\ + \ the on-stage laptop performer (GREIS system) and ambient presence of the agent\ + \ performer (spatialization, loops, textures).\n\nAbout the performers:\n\nDoug\ + \ Van Nort: Experimental musician and digital music researcher whose work includes\ + \ composition, improvisation, interactive system design and cross-disciplinary\ + \ collaboration. His writings can be found in Organised Sound and Leonardo Music\ + \ Journal among other publications, and his music is documented on Deep Listening,\ + \ Pogus and other labels.\n\nPauline Oliveros: (1932) is a composer and improviser,\ + \ teaches at RPI, plays a Roland V Accordion in solo and ensemble improvisations.\ + \ Her works are available through download, cassette, CD, DVD, and Vinyl releases.\ + \ Oliveros founded the Deep Listening Institute, Ltd. based in Kingston NY.\n\n\ + Jonas Braasch: Experimental soprano saxophonist and acoustician with interests\ + \ in Telematic Music and Intelligent Music Systems. He has performed with Curtis\ + \ Bahn, Chris Chafe, Michael Century, Mark Dresser, Pauline Oliveros, Doug van\ + \ Nort and Stuart Dempster -- among others. He currently directs the Communication\ + \ Acoustics and Aural Architecture Research Laboratory at RPI." + address: 'Oslo, Norway' + author: Doug Van Nort and Pauline Oliveros and Jonas Braasch + bibtex: "@inproceedings{nime2011-music-DougVanNort2011,\n abstract = {Program notes:\n\ + \nThis piece is written in consideration of two distinct paradigms: telematic\ + \ music performance and human-machine improvisation. Specifically this work is\ + \ a structured improvisation for three humans and one intelligent agent, being\ + \ constrained by sections that determine which pairing (duos, trios) of performers\ + \ are active. Instrumentation also changes between sections in a way that blurs\ + \ the line of agency and intent between acoustic human performers, laptop tablet-based\ + \ human performer, and agent improviser, as the two remote (NY, Stanford) acoustic\ + \ performers (v-accordion, soprano saxophone) engage with the on-stage laptop\ + \ performer (GREIS system) and ambient presence of the agent performer (spatialization,\ + \ loops, textures).\n\nAbout the performers:\n\nDoug Van Nort: Experimental musician\ + \ and digital music researcher whose work includes composition, improvisation,\ + \ interactive system design and cross-disciplinary collaboration. His writings\ + \ can be found in Organised Sound and Leonardo Music Journal among other publications,\ + \ and his music is documented on Deep Listening, Pogus and other labels.\n\nPauline\ + \ Oliveros: (1932) is a composer and improviser, teaches at RPI, plays a Roland\ + \ V Accordion in solo and ensemble improvisations. Her works are available through\ + \ download, cassette, CD, DVD, and Vinyl releases. Oliveros founded the Deep Listening\ + \ Institute, Ltd. based in Kingston NY.\n\nJonas Braasch: Experimental soprano\ + \ saxophonist and acoustician with interests in Telematic Music and Intelligent\ + \ Music Systems. He has performed with Curtis Bahn, Chris Chafe, Michael Century,\ + \ Mark Dresser, Pauline Oliveros, Doug van Nort and Stuart Dempster -- among others.\ + \ He currently directs the Communication Acoustics and Aural Architecture Research\ + \ Laboratory at RPI.},\n address = {Oslo, Norway},\n author = {Doug Van Nort and\ + \ Pauline Oliveros and Jonas Braasch},\n booktitle = {Music Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n editor\ + \ = {Kjell Tore Innervik and Ivar Frounberg},\n month = {June},\n publisher =\ + \ {Norwegian Academy of Music},\n title = {Distributed Composition #1},\n url\ + \ = {https://vimeo.com/27691551},\n year = {2011}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Roberto Doati + editor: Kjell Tore Innervik and Ivar Frounberg month: June - publisher: Casa Paganini - title: Heretic's Brew - year: 2008 + publisher: Norwegian Academy of Music + title: "Distributed Composition #1" + url: https://vimeo.com/27691551 + year: 2011 - ENTRYTYPE: inproceedings - ID: nime2008-music-Bokowiec2008 - abstract: "Program notes:\nThe Suicided Voice is the second piece in the Vox Circuit\ - \ Trilogy, a series of interactive vocal works completed in 2007. In this piece\ - \ the acoustic voice of the performer is “suicided” and given up to digital processing\ - \ and physical re-embodiment. Dialogues are created between acoustic and digital\ - \ voices. Gender specific registers are willfully subverted and fractured. Extended\ - \ vocal techniques make available unusual acoustic resonances that generate rich\ - \ processing textures and spiral into new acoustic and physical trajectories that\ - \ traverse culturally specific boundaries crossing from the human into the virtual,\ - \ from the real into the mythical. The piece is fully scored, there are no pre-recorded\ - \ soundfiles used and no sound manipulation external to the performer's control.\n\ - In The Suicided Voice the sensor interface of the Bodycoder System is located\ - \ on the upper part of the torso. Movement data is mapped to live processing and\ - \ manipulation of sound and images. The Bodycoder also provides the performer\ - \ with real-time access to processing parameters and patches within the MSP environment.\ - \ All vocalisations, decisive navigation of the MSP environment and Kinaesonic\ - \ expressivity are selected, initiated and manipulated by the performer. The primary\ - \ expressive functionality of the Bodycoder System is Kinaesonic. The term Kinaesonic\ - \ is derived from the compound of two words: Kinaesthetic meaning the movement\ - \ principles of the body and Sonic meaning sound. In terms of interactive technology\ - \ the term Kinaesonic refers to the one-to-one, mapping of sonic effects to bodily\ - \ movements. In our practice this is usually executed in real-time. The Suicided\ - \ Voice was created in residency at the Banff Centre, Canada and completed in\ - \ the electro-acoustic music facilities of the University of Huddersfield.\n\n\ - About the performers:\n\nMark Bokowiec (Composer, Electronics & Software Designer):\ - \ Mark is the manager of the electro-acoustic music studios and the new Spacialization\ - \ and Interactive Research Lab at the University of Huddersfield. Mark lectures\ - \ in interactive performance, interface design and composition. Composition credits\ - \ include: Tricorder a work for two quarter tone recorders and live MSP, commissioned\ - \ by Ensemble QTR. Commissions for interactive instruments include: the LiteHarp\ - \ for London Science Museum and A Passage To India an interactive sound sculpture\ - \ commissioned by Wakefield City Art Gallery. CD releases include: Route (2001)\ - \ the complete soundtrack on MPS and Ghosts (2000) on Sonic Art from Aberdeen,\ - \ Glasgow, Huddersfield and Newcastle also on the MPS label. Mark is currently\ - \ working on an interactive hydro-acoustic installation.\n\nJulie Wilson-Bokowiec\ - \ (vocalist/performer, video and computer graphics): Julie has creating new works\ - \ in opera/music theatre, contemporary dance and theatre including: Salome (Hammersmith\ - \ Odeon – Harvey Goldsmith/Enid production) Suspended Sentences (ICA & touring)\ - \ Figure Three (ICA) for Julia Bardsley, Dorian Grey (LBT/Opera North), Alice\ - \ (LBT) and a variety of large-scale site-specific and Body Art works. As a performer\ - \ and collaborator Julie has worked with such luminaries as Lindsey Kemp, Genesis\ - \ P-Orridge and Psychic TV and the notorious Austrian artist Hermann Nitsch. Julie\ - \ and Mark began creating work with interactive technologies in 1995 developing\ - \ the first generation of the Bodycoder System in 1996." - address: 'Genova, Italy' - author: Mark A. Bokowiec and Julie Wilson-Bokowiec - bibtex: "@inproceedings{nime2008-music-Bokowiec2008,\n abstract = {Program notes:\n\ - The Suicided Voice is the second piece in the Vox Circuit Trilogy, a series of\ - \ interactive vocal works completed in 2007. In this piece the acoustic voice\ - \ of the performer is “suicided” and given up to digital processing and physical\ - \ re-embodiment. Dialogues are created between acoustic and digital voices. Gender\ - \ specific registers are willfully subverted and fractured. Extended vocal techniques\ - \ make available unusual acoustic resonances that generate rich processing textures\ - \ and spiral into new acoustic and physical trajectories that traverse culturally\ - \ specific boundaries crossing from the human into the virtual, from the real\ - \ into the mythical. The piece is fully scored, there are no pre-recorded soundfiles\ - \ used and no sound manipulation external to the performer's control.\nIn The\ - \ Suicided Voice the sensor interface of the Bodycoder System is located on the\ - \ upper part of the torso. Movement data is mapped to live processing and manipulation\ - \ of sound and images. The Bodycoder also provides the performer with real-time\ - \ access to processing parameters and patches within the MSP environment. All\ - \ vocalisations, decisive navigation of the MSP environment and Kinaesonic expressivity\ - \ are selected, initiated and manipulated by the performer. The primary expressive\ - \ functionality of the Bodycoder System is Kinaesonic. The term Kinaesonic is\ - \ derived from the compound of two words: Kinaesthetic meaning the movement principles\ - \ of the body and Sonic meaning sound. In terms of interactive technology the\ - \ term Kinaesonic refers to the one-to-one, mapping of sonic effects to bodily\ - \ movements. In our practice this is usually executed in real-time. The Suicided\ - \ Voice was created in residency at the Banff Centre, Canada and completed in\ - \ the electro-acoustic music facilities of the University of Huddersfield.\n\n\ - About the performers:\n\nMark Bokowiec (Composer, Electronics & Software Designer):\ - \ Mark is the manager of the electro-acoustic music studios and the new Spacialization\ - \ and Interactive Research Lab at the University of Huddersfield. Mark lectures\ - \ in interactive performance, interface design and composition. Composition credits\ - \ include: Tricorder a work for two quarter tone recorders and live MSP, commissioned\ - \ by Ensemble QTR. Commissions for interactive instruments include: the LiteHarp\ - \ for London Science Museum and A Passage To India an interactive sound sculpture\ - \ commissioned by Wakefield City Art Gallery. CD releases include: Route (2001)\ - \ the complete soundtrack on MPS and Ghosts (2000) on Sonic Art from Aberdeen,\ - \ Glasgow, Huddersfield and Newcastle also on the MPS label. Mark is currently\ - \ working on an interactive hydro-acoustic installation.\n\nJulie Wilson-Bokowiec\ - \ (vocalist/performer, video and computer graphics): Julie has creating new works\ - \ in opera/music theatre, contemporary dance and theatre including: Salome (Hammersmith\ - \ Odeon – Harvey Goldsmith/Enid production) Suspended Sentences (ICA & touring)\ - \ Figure Three (ICA) for Julia Bardsley, Dorian Grey (LBT/Opera North), Alice\ - \ (LBT) and a variety of large-scale site-specific and Body Art works. As a performer\ - \ and collaborator Julie has worked with such luminaries as Lindsey Kemp, Genesis\ - \ P-Orridge and Psychic TV and the notorious Austrian artist Hermann Nitsch. Julie\ - \ and Mark began creating work with interactive technologies in 1995 developing\ - \ the first generation of the Bodycoder System in 1996.},\n address = {Genova,\ - \ Italy},\n author = {Mark A. Bokowiec and Julie Wilson-Bokowiec},\n booktitle\ - \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n editor = {Roberto Doati},\n month = {June},\n publisher = {Casa\ - \ Paganini},\n title = {The Suicided Voice},\n year = {2008}\n}\n" + ID: nime2011-music-Schorno2011 + abstract: "Program notes: The formalistic identity of ``7-of-12'' consists of a\ + \ showcase format for ``penta digit instrumental inventions'' diffused in quadrophonic\ + \ audio and 3d interactive video projection. The dialectic intertwining of Karlsson's\ + \ abstract art and Schorno's sonetic world extends into a composition of 12''\ + \ duration. Eponymous instrument group ``EIG''' consist of two former classmates\ + \ of Sonology where they among other things studied the making of alternative\ + \ electronic instruments. The performance``7-of-12 dialectologies'' is an outcome\ + \ of collaborated teachings and methodology in dialogue with past performances.\n\ + \nAbout the performers: Daniel Schorno: composer, born in Zurich in 1963. Studied\ + \ composition in London with Melanie Daiken and electronic and computer music\ + \ in The Hague, with Joel Ryan and Clarence Barlow. Invited by Michel Waisvisz\ + \ he led STEIM - the re-nown Dutch Studio for Electro Instrumental Music, and\ + \ home of ``New Instruments'' - as Artistic Director until 2005. He is currently\ + \ STEIM's composer-in-research and creative project advisor.\nHaraldur Karlsson:\ + \ visual artist, born in Reykjavik 1967. Haraldur studied Multi-media in the art\ + \ academy in Iceland, Media-art in AKI in Enschede and Sonology in the Royal conservatories\ + \ The Hague. Haraldur is mainly focused on interactive audio/video/3D installations\ + \ and performances, and instrumental computer controllers. His fire instrument\ + \ ``TFI''' is part of the Little Solarsystem ``LSS'' navigation system that is\ + \ an audio/video/3D performance." + address: 'Oslo, Norway' + author: Daniel Schorno and Haraldur Karlsson + bibtex: "@inproceedings{nime2011-music-Schorno2011,\n abstract = {Program notes:\ + \ The formalistic identity of ``7-of-12'' consists of a showcase format for ``penta\ + \ digit instrumental inventions'' diffused in quadrophonic audio and 3d interactive\ + \ video projection. The dialectic intertwining of Karlsson's abstract art and\ + \ Schorno's sonetic world extends into a composition of 12'' duration. Eponymous\ + \ instrument group ``EIG''' consist of two former classmates of Sonology where\ + \ they among other things studied the making of alternative electronic instruments.\ + \ The performance``7-of-12 dialectologies'' is an outcome of collaborated teachings\ + \ and methodology in dialogue with past performances.\n\nAbout the performers:\ + \ Daniel Schorno: composer, born in Zurich in 1963. Studied composition in London\ + \ with Melanie Daiken and electronic and computer music in The Hague, with Joel\ + \ Ryan and Clarence Barlow. Invited by Michel Waisvisz he led STEIM - the re-nown\ + \ Dutch Studio for Electro Instrumental Music, and home of ``New Instruments''\ + \ - as Artistic Director until 2005. He is currently STEIM's composer-in-research\ + \ and creative project advisor.\nHaraldur Karlsson: visual artist, born in Reykjavik\ + \ 1967. Haraldur studied Multi-media in the art academy in Iceland, Media-art\ + \ in AKI in Enschede and Sonology in the Royal conservatories The Hague. Haraldur\ + \ is mainly focused on interactive audio/video/3D installations and performances,\ + \ and instrumental computer controllers. His fire instrument ``TFI''' is part\ + \ of the Little Solarsystem ``LSS'' navigation system that is an audio/video/3D\ + \ performance.},\n address = {Oslo, Norway},\n author = {Daniel Schorno and Haraldur\ + \ Karlsson},\n booktitle = {Music Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n editor = {Kjell Tore Innervik and\ + \ Ivar Frounberg},\n month = {June},\n publisher = {Norwegian Academy of Music},\n\ + \ title = {7-of-12 dialectologies},\n url = {https://vimeo.com/27694220},\n year\ + \ = {2011}\n}\n" + booktitle: Music Proceedings of the International Conference on New Interfaces for + Musical Expression + editor: Kjell Tore Innervik and Ivar Frounberg + month: June + publisher: Norwegian Academy of Music + title: 7-of-12 dialectologies + url: https://vimeo.com/27694220 + year: 2011 + + +- ENTRYTYPE: inproceedings + ID: nime2011-music-Dahl2011 + abstract: "Program notes: TweetDreams uses real-time Twitter data to generate music\ + \ and visuals. During the performance tweets containing specific search terms\ + \ are retrieved from Twitter. Each tweet is displayed and plays a short melody.\ + \ Tweets are grouped into trees of related tweets, which are given similar melodies.\ + \ We invite the audience to participate in TweetDreams by tweeting during performance\ + \ with the term #Nime2011. This term is used to identify tweets from the audience\ + \ and performers. Global search terms are used to bring the world into the performance.\ + \ Any tweet with these terms occurring anywhere in the world becomes part of the\ + \ piece.\n\nAbout the performers: Luke Dahl: Musician and engineer currently pursuing\ + \ a PhD at Stanford University's CCRMA. His research interests include new musical\ + \ instruments and performance ensembles, musical gesture, rhythm perception, and\ + \ MIR. He has composed works for the Stanford Laptop and Mobile Phone Orchestras\ + \ and also creates electronic dance music.\nCarr Wilkerson: System Administrator\ + \ at CCRMA specializing in Linux and Mac OS systems. He is a controller and software\ + \ system builder and sometime performer/impresario, instructor and researcher." + address: 'Oslo, Norway' + author: Luke Dahl and Carr Wilkerson + bibtex: "@inproceedings{nime2011-music-Dahl2011,\n abstract = {Program notes: TweetDreams\ + \ uses real-time Twitter data to generate music and visuals. During the performance\ + \ tweets containing specific search terms are retrieved from Twitter. Each tweet\ + \ is displayed and plays a short melody. Tweets are grouped into trees of related\ + \ tweets, which are given similar melodies. We invite the audience to participate\ + \ in TweetDreams by tweeting during performance with the term \\emph{\\#Nime2011}.\ + \ This term is used to identify tweets from the audience and performers. Global\ + \ search terms are used to bring the world into the performance. Any tweet with\ + \ these terms occurring anywhere in the world becomes part of the piece.\n\nAbout\ + \ the performers: Luke Dahl: Musician and engineer currently pursuing a PhD at\ + \ Stanford University's CCRMA. His research interests include new musical instruments\ + \ and performance ensembles, musical gesture, rhythm perception, and MIR. He has\ + \ composed works for the Stanford Laptop and Mobile Phone Orchestras and also\ + \ creates electronic dance music.\nCarr Wilkerson: System Administrator at CCRMA\ + \ specializing in Linux and Mac OS systems. He is a controller and software system\ + \ builder and sometime performer/impresario, instructor and researcher.},\n address\ + \ = {Oslo, Norway},\n author = {Luke Dahl and Carr Wilkerson},\n booktitle = {Music\ + \ Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Kjell Tore Innervik and Ivar Frounberg},\n month = {June},\n publisher\ + \ = {Norwegian Academy of Music},\n title = {TweetDreams},\n url = {https://vimeo.com/27694232},\n\ + \ year = {2011}\n}\n" + booktitle: Music Proceedings of the International Conference on New Interfaces for + Musical Expression + editor: Kjell Tore Innervik and Ivar Frounberg + month: June + publisher: Norwegian Academy of Music + title: TweetDreams + url: https://vimeo.com/27694232 + year: 2011 + + +- ENTRYTYPE: inproceedings + ID: nime2016-music-Burke2016 + abstract: 'Program notes: Coral Bells explores the diverse overtone, microtone sounds + and origins of the Federation Hand Bells and Bass clarinet into the visual with + discrete sounds of the ecosystems of coral from Fitzroy Island Northern Australia. + This creation brings a new life to the Federation Hand Bells providing deepening + connections with the Australian landscape. It is the conversation of between the + audio and dead coral from that accentuates the audio-visual reflecting both the + translucent Federation Bell sounds, Bass clarinet, glass and dead coral. The acoustic + resonators vibrates with the coral and are recreated into visuals of moving glass + objects. These sounds transform into acousmatic sounds. The colors and texture + within the visuals are layered white/grey, sepia, hints of pastel colours, burnt + reds, yellows and gold images that are layered to create a thick timbral texture + to form the video voice. The sounds of subtle high pitched Bells and gritty sand + sounds with the Bass clarinet periodically joining the drones with discordant + multiphonics and flourishes of notes dominate throughout. Subsequent acoustic + and visual motifs capture and emerge sonically/visually creating timbre layers + of the interpreted coral and glass reflections.' + address: 'Brisbane, Australia' + author: Brigid Burke + bibtex: "@inproceedings{nime2016-music-Burke2016,\n abstract = {Program notes: Coral\ + \ Bells explores the diverse overtone, microtone sounds and origins of the Federation\ + \ Hand Bells and Bass clarinet into the visual with discrete sounds of the ecosystems\ + \ of coral from Fitzroy Island Northern Australia. This creation brings a new\ + \ life to the Federation Hand Bells providing deepening connections with the Australian\ + \ landscape. It is the conversation of between the audio and dead coral from that\ + \ accentuates the audio-visual reflecting both the translucent Federation Bell\ + \ sounds, Bass clarinet, glass and dead coral. The acoustic resonators vibrates\ + \ with the coral and are recreated into visuals of moving glass objects. These\ + \ sounds transform into acousmatic sounds. The colors and texture within the visuals\ + \ are layered white/grey, sepia, hints of pastel colours, burnt reds, yellows\ + \ and gold images that are layered to create a thick timbral texture to form the\ + \ video voice. The sounds of subtle high pitched Bells and gritty sand sounds\ + \ with the Bass clarinet periodically joining the drones with discordant multiphonics\ + \ and flourishes of notes dominate throughout. Subsequent acoustic and visual\ + \ motifs capture and emerge sonically/visually creating timbre layers of the interpreted\ + \ coral and glass reflections.},\n address = {Brisbane, Australia},\n author =\ + \ {Brigid Burke},\n booktitle = {Music Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n editor = {Andrew Brown and Toby\ + \ Gifford},\n month = {June},\n publisher = {Griffith University},\n title = {Coral\ + \ Bells Movt.2},\n year = {2016}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Roberto Doati + editor: Andrew Brown and Toby Gifford month: June - publisher: Casa Paganini - title: The Suicided Voice - year: 2008 + publisher: Griffith University + title: Coral Bells Movt.2 + year: 2016 - ENTRYTYPE: inproceedings - ID: nime2008-music-Bokowiec2008 - abstract: "Program notes:\nEtch is the third work in the Vox Circuit Trilogy (2007).\ - \ In Etch extended vocal techniques, Yakut and Bell Canto singing, are coupled\ - \ with live interactive sound processing and manipulation. Etch calls forth fauna,\ - \ building soundscapes of glitch infestations, howler tones, clustering sonic-amphibians,\ - \ and swirling flocks of synthetic granular flyers. All sounds are derived from\ - \ the live acoustic voice of the performer. There are no pre-recorded soundfiles\ - \ used in this piece and no sound manipulation external to the performer's control.\ - \ The ability to initiate, embody and manipulate both the acoustic sound and multiple\ - \ layers of processed sound manipulated simultaneously on the limbs – requires\ - \ a unique kind of perceptual, physical and aural precision. This is particularly\ - \ evident at moments when the source vocal articulates of the performer, unheard\ - \ in the diffused soundscape, enter as seemingly phantom sound cells pitch-changed,\ - \ fractured and heavily processed. In such instances the sung score, and the diffused\ - \ and physically manipulated soundscape seem to separate and the performer is\ - \ seen working in counterpoint, articulating an unheard score. Etch is punctuated\ - \ by such separations and correlations, by choric expansions, intricate micro\ - \ constructions and moments when the acoustic voice of the performer soars over\ - \ and through the soundscape.\nAlthough the Bodycoder interface configuration\ - \ for Etch is similar to that of The Suicided Voice, located on the upper torso\ - \ - the functional protocols and qualities of physical expressivity are completely\ - \ different. Interface flexibility is a key feature of the Bodycoder System and\ - \ allows for the development of interactive works unrestrained by interface limitations\ - \ or fixed protocols. The flexibility of the interface does however present a\ - \ number of challenges for the performer who must be able to adapt to new protocols,\ - \ adjust and temper her physical expressivity to the requirements of each piece.\n\ - The visual content of both Etch and The Suicided Voice was created in a variety\ - \ of 2D and 3D packages using original photographic and video material. Images\ - \ are processed and manipulated using the same interactive protocols that govern\ - \ sound manipulation. Content and processing is mapped to the physical gestures\ - \ of the performer. As the performer conjures extraordinary voices out of the\ - \ digital realm, so she weaves a multi-layered visual environment combining sound,\ - \ gesture and image to form a powerful 'linguistic intent'.\nEtch was created\ - \ in residency at the Confederation Centre for the Arts on Prince Edward Island,\ - \ Nova Scotia in June 2007.\n\nAbout the performers:\nMark Bokowiec (Composer,\ - \ Electronics & Software Designer). Mark is the manager of the electro-acoustic\ - \ music studios and the new Spacialization and Interactive Research Lab at the\ - \ University of Huddersfield. Mark lectures in interactive performance, interface\ - \ design and composition. Composition credits include: Tricorder a work for two\ - \ quarter tone recorders and live MSP, commissioned by Ensemble QTR. Commissions\ - \ for interactive instruments include: the LiteHarp for London Science Museum\ - \ and A Passage To India an interactive sound sculpture commissioned by Wakefield\ - \ City Art Gallery. CD releases include: Route (2001) the complete soundtrack\ - \ on MPS and Ghosts (2000) on Sonic Art from Aberdeen, Glasgow, Huddersfield and\ - \ Newcastle also on the MPS label. Mark is currently working on an interactive\ - \ hydro-acoustic installation.\n\nJulie Wilson-Bokowiec (vocalist/performer, video\ - \ and computer graphics). Julie has creating new works in opera/music theatre,\ - \ contemporary dance and theatre including: Salome (Hammersmith Odeon – Harvey\ - \ Goldsmith/Enid production) Suspended Sentences (ICA & touring) Figure Three\ - \ (ICA) for Julia Bardsley, The Red Room (Canal Café Theatre) nominated for the\ - \ Whitbread London Fringe Theatre Award, Dorian Grey (LBT/Opera North), Alice\ - \ (LBT) and a variety of large- scale site-specific and Body Art works. As a performer\ - \ and collaborator Julie has worked with such luminaries as Lindsey Kemp, Genesis\ - \ P-Orridge and Psychic TV and the notorious Austrian artist Hermann Nitsch. She\ - \ guest lectures in digital performance at a number of University centres, and\ - \ together with Mark, regularly publishes articles on interactive performance\ - \ practice.\n\nJulie and Mark began creating work with interactive technologies\ - \ in 1995 developing the first generation of the Bodycoder System an on- the-body\ - \ sensor interface that uses radio to transmit data in 1996. They have created\ - \ and performed work with the Bodycoder System at various events and venues across\ - \ Europe the US and Canada and at artist gatherings including ISEA and ICMC. Major\ - \ works include Spiral Fiction (2002) commissioned by Digital Summer (cultural\ - \ programme of the Commonwealth Games, Manchester). Cyborg Dreaming (2000/1) commissioned\ - \ by the Science Museum, London. Zeitgeist at the KlangArt Festival and Lifting\ - \ Bodies (1999) at the Trafo, Budapest as featured artists at the Hungarian Computer\ - \ Music Foundation Festival NEW WAVES supported by the British Council." - address: 'Genova, Italy' - author: Mark A. Bokowiec and Julie Wilson-Bokowiec - bibtex: "@inproceedings{nime2008-music-Bokowiec2008,\n abstract = {Program notes:\n\ - Etch is the third work in the Vox Circuit Trilogy (2007). In Etch extended vocal\ - \ techniques, Yakut and Bell Canto singing, are coupled with live interactive\ - \ sound processing and manipulation. Etch calls forth fauna, building soundscapes\ - \ of glitch infestations, howler tones, clustering sonic-amphibians, and swirling\ - \ flocks of synthetic granular flyers. All sounds are derived from the live acoustic\ - \ voice of the performer. There are no pre-recorded soundfiles used in this piece\ - \ and no sound manipulation external to the performer's control. The ability to\ - \ initiate, embody and manipulate both the acoustic sound and multiple layers\ - \ of processed sound manipulated simultaneously on the limbs – requires a unique\ - \ kind of perceptual, physical and aural precision. This is particularly evident\ - \ at moments when the source vocal articulates of the performer, unheard in the\ - \ diffused soundscape, enter as seemingly phantom sound cells pitch-changed, fractured\ - \ and heavily processed. In such instances the sung score, and the diffused and\ - \ physically manipulated soundscape seem to separate and the performer is seen\ - \ working in counterpoint, articulating an unheard score. Etch is punctuated by\ - \ such separations and correlations, by choric expansions, intricate micro constructions\ - \ and moments when the acoustic voice of the performer soars over and through\ - \ the soundscape.\nAlthough the Bodycoder interface configuration for Etch is\ - \ similar to that of The Suicided Voice, located on the upper torso - the functional\ - \ protocols and qualities of physical expressivity are completely different. Interface\ - \ flexibility is a key feature of the Bodycoder System and allows for the development\ - \ of interactive works unrestrained by interface limitations or fixed protocols.\ - \ The flexibility of the interface does however present a number of challenges\ - \ for the performer who must be able to adapt to new protocols, adjust and temper\ - \ her physical expressivity to the requirements of each piece.\nThe visual content\ - \ of both Etch and The Suicided Voice was created in a variety of 2D and 3D packages\ - \ using original photographic and video material. Images are processed and manipulated\ - \ using the same interactive protocols that govern sound manipulation. Content\ - \ and processing is mapped to the physical gestures of the performer. As the performer\ - \ conjures extraordinary voices out of the digital realm, so she weaves a multi-layered\ - \ visual environment combining sound, gesture and image to form a powerful 'linguistic\ - \ intent'.\nEtch was created in residency at the Confederation Centre for the\ - \ Arts on Prince Edward Island, Nova Scotia in June 2007.\n\nAbout the performers:\n\ - Mark Bokowiec (Composer, Electronics & Software Designer). Mark is the manager\ - \ of the electro-acoustic music studios and the new Spacialization and Interactive\ - \ Research Lab at the University of Huddersfield. Mark lectures in interactive\ - \ performance, interface design and composition. Composition credits include:\ - \ Tricorder a work for two quarter tone recorders and live MSP, commissioned by\ - \ Ensemble QTR. Commissions for interactive instruments include: the LiteHarp\ - \ for London Science Museum and A Passage To India an interactive sound sculpture\ - \ commissioned by Wakefield City Art Gallery. CD releases include: Route (2001)\ - \ the complete soundtrack on MPS and Ghosts (2000) on Sonic Art from Aberdeen,\ - \ Glasgow, Huddersfield and Newcastle also on the MPS label. Mark is currently\ - \ working on an interactive hydro-acoustic installation.\n\nJulie Wilson-Bokowiec\ - \ (vocalist/performer, video and computer graphics). Julie has creating new works\ - \ in opera/music theatre, contemporary dance and theatre including: Salome (Hammersmith\ - \ Odeon – Harvey Goldsmith/Enid production) Suspended Sentences (ICA & touring)\ - \ Figure Three (ICA) for Julia Bardsley, The Red Room (Canal Café Theatre) nominated\ - \ for the Whitbread London Fringe Theatre Award, Dorian Grey (LBT/Opera North),\ - \ Alice (LBT) and a variety of large- scale site-specific and Body Art works.\ - \ As a performer and collaborator Julie has worked with such luminaries as Lindsey\ - \ Kemp, Genesis P-Orridge and Psychic TV and the notorious Austrian artist Hermann\ - \ Nitsch. She guest lectures in digital performance at a number of University\ - \ centres, and together with Mark, regularly publishes articles on interactive\ - \ performance practice.\n\nJulie and Mark began creating work with interactive\ - \ technologies in 1995 developing the first generation of the Bodycoder System\ - \ an on- the-body sensor interface that uses radio to transmit data in 1996. They\ - \ have created and performed work with the Bodycoder System at various events\ - \ and venues across Europe the US and Canada and at artist gatherings including\ - \ ISEA and ICMC. Major works include Spiral Fiction (2002) commissioned by Digital\ - \ Summer (cultural programme of the Commonwealth Games, Manchester). Cyborg Dreaming\ - \ (2000/1) commissioned by the Science Museum, London. Zeitgeist at the KlangArt\ - \ Festival and Lifting Bodies (1999) at the Trafo, Budapest as featured artists\ - \ at the Hungarian Computer Music Foundation Festival NEW WAVES supported by the\ - \ British Council.},\n address = {Genova, Italy},\n author = {Mark A. Bokowiec\ - \ and Julie Wilson-Bokowiec},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Roberto Doati},\n\ - \ month = {June},\n publisher = {Casa Paganini},\n title = {Etch},\n year = {2008}\n\ - }\n" + ID: nime2016-music-Mulder2016 + abstract: 'Program notes: The performance is part of the ongoing research project + into Karlheinz Stockhausen’s historic work Solo (Solo, für Melodie-Instrument + mit Rückkopplung 1965-6). Together with my colleague Dr. Juan Parra Cancino from + ORCIM Ghent we are teasing out the consequences of the (now common) software replacement + of the elaborate tape delay system that was used in the time of the work’s inception.' + address: 'Brisbane, Australia' + author: Johannes Mulder + bibtex: "@inproceedings{nime2016-music-Mulder2016,\n abstract = {Program notes:\ + \ The performance is part of the ongoing research project into Karlheinz Stockhausen’s\ + \ historic work Solo (Solo, für Melodie-Instrument mit Rückkopplung 1965-6). Together\ + \ with my colleague Dr. Juan Parra Cancino from ORCIM Ghent we are teasing out\ + \ the consequences of the (now common) software replacement of the elaborate tape\ + \ delay system that was used in the time of the work’s inception.},\n address\ + \ = {Brisbane, Australia},\n author = {Johannes Mulder},\n booktitle = {Music\ + \ Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Andrew Brown and Toby Gifford},\n month = {June},\n publisher = {Griffith\ + \ University},\n title = {On Solo},\n year = {2016}\n}\n" + booktitle: Music Proceedings of the International Conference on New Interfaces for + Musical Expression + editor: Andrew Brown and Toby Gifford + month: June + publisher: Griffith University + title: On Solo + year: 2016 + + +- ENTRYTYPE: inproceedings + ID: nime2016-music-Gillies2016 + abstract: "Program notes: Working almost exclusively at a very soft volume, Shelter\ + \ inverts the relationships between the source sound material and it’s experience\ + \ in the real world, placing very large sounds (sourced from field recordings)\ + \ at the threshold of audibility while audio artifacts are brought to the forefront\ + \ of our focus to act as recognisable musical material. By utilising a soft dynamic,\ + \ all audience members are able to hear each channel more equally, regardless\ + \ of their position in the performance space. This new version for bass clarinet,\ + \ electric guitar, and electronics expands the original electronic composition\ + \ into something more lively and environmentally focused. The compositional intentions\ + \ of the original Shelter remain at play here - this version still seeks to address\ + \ the assumptions of multichannel listening, while affecting an environment of\ + \ sound in preference to an experience of sound. However, this electroacoustic\ + \ version adds a little bit of much needed chaos, allowing performers to interact\ + \ and manipulate this sonic environment.\n\nAbout the performers:\n\nCat Hope\ + \ - Bass Flute\nLindsay Vickery - Bass Clarinet\nAaron Wyatt - Viola" + address: 'Brisbane, Australia' + author: Sam Gillies + bibtex: "@inproceedings{nime2016-music-Gillies2016,\n abstract = {Program notes:\ + \ Working almost exclusively at a very soft volume, Shelter inverts the relationships\ + \ between the source sound material and it’s experience in the real world, placing\ + \ very large sounds (sourced from field recordings) at the threshold of audibility\ + \ while audio artifacts are brought to the forefront of our focus to act as recognisable\ + \ musical material. By utilising a soft dynamic, all audience members are able\ + \ to hear each channel more equally, regardless of their position in the performance\ + \ space. This new version for bass clarinet, electric guitar, and electronics\ + \ expands the original electronic composition into something more lively and environmentally\ + \ focused. The compositional intentions of the original Shelter remain at play\ + \ here - this version still seeks to address the assumptions of multichannel listening,\ + \ while affecting an environment of sound in preference to an experience of sound.\ + \ However, this electroacoustic version adds a little bit of much needed chaos,\ + \ allowing performers to interact and manipulate this sonic environment.\n\nAbout\ + \ the performers:\n\nCat Hope - Bass Flute\nLindsay Vickery - Bass Clarinet\n\ + Aaron Wyatt - Viola},\n address = {Brisbane, Australia},\n author = {Sam Gillies},\n\ + \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n editor = {Andrew Brown and Toby Gifford},\n month\ + \ = {June},\n publisher = {Griffith University},\n title = {Shelter},\n year =\ + \ {2016}\n}\n" + booktitle: Music Proceedings of the International Conference on New Interfaces for + Musical Expression + editor: Andrew Brown and Toby Gifford + month: June + publisher: Griffith University + title: Shelter + year: 2016 + + +- ENTRYTYPE: inproceedings + ID: nime2016-music-Barclay2016 + abstract: 'Program notes: Ground Interference draws on short recordings from each + location I visited in spring 2014 with a particular focus on Joshua Tree National + Park, Jornada Biosphere Reserve, Mojave Desert, and Death Valley National Park. + These fragile desert environments are inhabited by thousands of species all part + of a delicate ecosystem that is in a state of flux induced by changing climates. + The transfixing acoustic ecologies of the southwest deserts demand a stillness + that encourages a deeper environmental awareness and engagement. In many instances + during our field trip we struggled to find locations without human interference. + The distant hum of highway traffic and relentless airplanes under the flight path + from LAX were expected, yet we also encountered unexpected sounds interfering + with the acoustic ecologies of the land. These range from an obscure reverberating + vending machine in Death Valley National Park to rattling power lines in the Jornada + Biosphere Reserve that were so loud I could feel the vibrations through my feet.' + address: 'Brisbane, Australia' + author: Leah Barclay + bibtex: "@inproceedings{nime2016-music-Barclay2016,\n abstract = {Program notes:\ + \ Ground Interference draws on short recordings from each location I visited in\ + \ spring 2014 with a particular focus on Joshua Tree National Park, Jornada Biosphere\ + \ Reserve, Mojave Desert, and Death Valley National Park. These fragile desert\ + \ environments are inhabited by thousands of species all part of a delicate ecosystem\ + \ that is in a state of flux induced by changing climates. The transfixing acoustic\ + \ ecologies of the southwest deserts demand a stillness that encourages a deeper\ + \ environmental awareness and engagement. In many instances during our field trip\ + \ we struggled to find locations without human interference. The distant hum of\ + \ highway traffic and relentless airplanes under the flight path from LAX were\ + \ expected, yet we also encountered unexpected sounds interfering with the acoustic\ + \ ecologies of the land. These range from an obscure reverberating vending machine\ + \ in Death Valley National Park to rattling power lines in the Jornada Biosphere\ + \ Reserve that were so loud I could feel the vibrations through my feet.},\n address\ + \ = {Brisbane, Australia},\n author = {Leah Barclay},\n booktitle = {Music Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Andrew Brown and Toby Gifford},\n month = {June},\n publisher = {Griffith\ + \ University},\n title = {Ground Interference - The Listen(n) Project},\n year\ + \ = {2016}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Roberto Doati + editor: Andrew Brown and Toby Gifford month: June - publisher: Casa Paganini - title: Etch - year: 2008 + publisher: Griffith University + title: Ground Interference - The Listen(n) Project + year: 2016 - ENTRYTYPE: inproceedings - ID: nime2008-music-Ciufo2008 - abstract: "Program notes:\nSilent Movies is an attempt to explore and confront some\ - \ of the possible relationships / interdependencies between visual and sonic perception.\ - \ In collaboration with a variety of moving image artists, this performance piece\ - \ complicates visual engagement through performed / improvised sound. In a sense,\ - \ Silent Movies plays with the live soundtrack idea, but from a somewhat different\ - \ vantage point. Or maybe it is an inversion; a visual accompaniment to an improvised\ - \ sonic landscape? For this performance, I will use a hybrid extended electric\ - \ guitar / computer performance system, which allows me to explore extended playing\ - \ techniques and sonic transformations provided by sensor controlled interactive\ - \ digital signal processing. For tonight's performance, the moving image composition\ - \ is by Mark Domino (fieldform.com).\nFor more information, please refer to online\ - \ documentation: Guitar performance system : http://ciufo.org/eighth_nerve_guitar.html\n\ - Performance documentation: http://ciufo.org/silent_movies.html\n\nAbout the performer:\n\ - Thomas Ciufo is an improviser, sound / media artist, and researcher working primarily\ - \ in the areas of electroacoustic improvisational performance and hybrid instrument\ - \ / interactive systems design, and is currently serving as artist-in- residence\ - \ in Arts and Technology at Smith College. Recent and ongoing sound works include,\ - \ three meditations, for prepared piano and computer, the series, sonic improvisations\ - \ #N, and eighth nerve, an improvisational piece for prepared electric guitar\ - \ and computer. Recent performances include off-ICMC in Barcelona, Visione Sonoras\ - \ in Mexico City, the SPARK festival in Minneapolis, the International Society\ - \ for Improvised Music conference in Ann Arbor, and the Enaction in Arts conference\ - \ in Grenoble." - address: 'Genova, Italy' - author: Thomas Ciufo - bibtex: "@inproceedings{nime2008-music-Ciufo2008,\n abstract = {Program notes:\n\ - Silent Movies is an attempt to explore and confront some of the possible relationships\ - \ / interdependencies between visual and sonic perception. In collaboration with\ - \ a variety of moving image artists, this performance piece complicates visual\ - \ engagement through performed / improvised sound. In a sense, Silent Movies plays\ - \ with the live soundtrack idea, but from a somewhat different vantage point.\ - \ Or maybe it is an inversion; a visual accompaniment to an improvised sonic landscape?\ - \ For this performance, I will use a hybrid extended electric guitar / computer\ - \ performance system, which allows me to explore extended playing techniques and\ - \ sonic transformations provided by sensor controlled interactive digital signal\ - \ processing. For tonight's performance, the moving image composition is by Mark\ - \ Domino (fieldform.com).\nFor more information, please refer to online documentation:\ - \ Guitar performance system : http://ciufo.org/eighth_nerve_guitar.html\nPerformance\ - \ documentation: http://ciufo.org/silent_movies.html\n\nAbout the performer:\n\ - Thomas Ciufo is an improviser, sound / media artist, and researcher working primarily\ - \ in the areas of electroacoustic improvisational performance and hybrid instrument\ - \ / interactive systems design, and is currently serving as artist-in- residence\ - \ in Arts and Technology at Smith College. Recent and ongoing sound works include,\ - \ three meditations, for prepared piano and computer, the series, sonic improvisations\ - \ #N, and eighth nerve, an improvisational piece for prepared electric guitar\ - \ and computer. Recent performances include off-ICMC in Barcelona, Visione Sonoras\ - \ in Mexico City, the SPARK festival in Minneapolis, the International Society\ - \ for Improvised Music conference in Ann Arbor, and the Enaction in Arts conference\ - \ in Grenoble.},\n address = {Genova, Italy},\n author = {Thomas Ciufo},\n booktitle\ + ID: nime2016-music-Paine2016 + abstract: 'Program notes: Becoming Desert draws on the experience of sitting or + lying down silent in the desert for several hours at a time to make sound recordings. + The field recordings I made in four deserts of the American Southwest are the + basis of this work. When listening to the desert sounds through headphones at + the time of recording, one is aware of a kind of hyper-real sonic environment. + The amplified soundfield in the headphones is surreal in its presence and accuracy + and multiplies my direct experience of listening many times.' + address: 'Brisbane, Australia' + author: Garth Paine + bibtex: "@inproceedings{nime2016-music-Paine2016,\n abstract = {Program notes: Becoming\ + \ Desert draws on the experience of sitting or lying down silent in the desert\ + \ for several hours at a time to make sound recordings. The field recordings I\ + \ made in four deserts of the American Southwest are the basis of this work. When\ + \ listening to the desert sounds through headphones at the time of recording,\ + \ one is aware of a kind of hyper-real sonic environment. The amplified soundfield\ + \ in the headphones is surreal in its presence and accuracy and multiplies my\ + \ direct experience of listening many times.},\n address = {Brisbane, Australia},\n\ + \ author = {Garth Paine},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ + \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ + \ title = {Becoming Desert - The Listen(n) Project},\n year = {2016}\n}\n" + booktitle: Music Proceedings of the International Conference on New Interfaces for + Musical Expression + editor: Andrew Brown and Toby Gifford + month: June + publisher: Griffith University + title: Becoming Desert - The Listen(n) Project + year: 2016 + + +- ENTRYTYPE: inproceedings + ID: nime2016-music-Vickery2016 + abstract: "Program notes: Nature Forms II is an eco-structuralist work, maintaining\ + \ what Opie and Brown term the “primary rules” of “environmentally-based musical\ + \ composition”: that “structures must be derived from natural sound sources” and\ + \ that “structural data must remain in series”. Nature Forms II explores the possibility\ + \ of recursive re-interrogation of a field recording through visualization and\ + \ resonification/resynthesis via machine and performative means. The source field\ + \ recording is contrasted with artificially generated versions created with additive,\ + \ subtractive and ring modulation resynthesis. Interaction between the live performers\ + \ and the electronic components are explores through “spectral freezing” of components\ + \ of the field recording to create spectrally derived chords from features of\ + \ the recording bird sounds and a rusty gate which are then transcribed into notation\ + \ for the instrumentalists and temporal manipulation of the recording to allow\ + \ complex bird calls to be emulated in a human time-scale.\n\nCat Hope - Bass\ + \ Flute\nLindsay Vickery - Clarinet\nAaron Wyatt - Viola\nVanessa Tomlinson -\ + \ Percussion" + address: 'Brisbane, Australia' + author: Lindsay Vickery + bibtex: "@inproceedings{nime2016-music-Vickery2016,\n abstract = {Program notes:\ + \ Nature Forms II is an eco-structuralist work, maintaining what Opie and Brown\ + \ term the “primary rules” of “environmentally-based musical composition”: that\ + \ “structures must be derived from natural sound sources” and that “structural\ + \ data must remain in series”. Nature Forms II explores the possibility of recursive\ + \ re-interrogation of a field recording through visualization and resonification/resynthesis\ + \ via machine and performative means. The source field recording is contrasted\ + \ with artificially generated versions created with additive, subtractive and\ + \ ring modulation resynthesis. Interaction between the live performers and the\ + \ electronic components are explores through “spectral freezing” of components\ + \ of the field recording to create spectrally derived chords from features of\ + \ the recording bird sounds and a rusty gate which are then transcribed into notation\ + \ for the instrumentalists and temporal manipulation of the recording to allow\ + \ complex bird calls to be emulated in a human time-scale.\n\nCat Hope - Bass\ + \ Flute\nLindsay Vickery - Clarinet\nAaron Wyatt - Viola\nVanessa Tomlinson -\ + \ Percussion},\n address = {Brisbane, Australia},\n author = {Lindsay Vickery},\n\ + \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n editor = {Andrew Brown and Toby Gifford},\n month\ + \ = {June},\n publisher = {Griffith University},\n title = {Nature Forms II for\ + \ Flute, Clarinet, Viola, Percussion, Hybrid Field Recording and Electronics},\n\ + \ year = {2016}\n}\n" + booktitle: Music Proceedings of the International Conference on New Interfaces for + Musical Expression + editor: Andrew Brown and Toby Gifford + month: June + publisher: Griffith University + title: 'Nature Forms II for Flute, Clarinet, Viola, Percussion, Hybrid Field Recording + and Electronics' + year: 2016 + + +- ENTRYTYPE: inproceedings + ID: nime2016-music-Moore2016 + abstract: 'Program notes: Basaur is a structured improvisation for software, microphones, + and objects, performed through a multichannel sound system. Using simple, readymade + household devices as the primary sound source, Basaur unfolds as a guided exploration + of the small mechanical drones and noises that occupy the edges of our quotidian + sonic awareness. Using both pre-recorded and live-performed sound sources, textures + are layered and connected, building to a richly detailed environment of active + sounds -- background becomes foreground, and the everyday annoyances of modern + convenience take on a full-throated presence that is by turns lyrical and menacing.' + address: 'Brisbane, Australia' + author: Stephan Moore + bibtex: "@inproceedings{nime2016-music-Moore2016,\n abstract = {Program notes: Basaur\ + \ is a structured improvisation for software, microphones, and objects, performed\ + \ through a multichannel sound system. Using simple, readymade household devices\ + \ as the primary sound source, Basaur unfolds as a guided exploration of the small\ + \ mechanical drones and noises that occupy the edges of our quotidian sonic awareness.\ + \ Using both pre-recorded and live-performed sound sources, textures are layered\ + \ and connected, building to a richly detailed environment of active sounds --\ + \ background becomes foreground, and the everyday annoyances of modern convenience\ + \ take on a full-throated presence that is by turns lyrical and menacing.},\n\ + \ address = {Brisbane, Australia},\n author = {Stephan Moore},\n booktitle = {Music\ + \ Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Andrew Brown and Toby Gifford},\n month = {June},\n publisher = {Griffith\ + \ University},\n title = {Basaur},\n year = {2016}\n}\n" + booktitle: Music Proceedings of the International Conference on New Interfaces for + Musical Expression + editor: Andrew Brown and Toby Gifford + month: June + publisher: Griffith University + title: Basaur + year: 2016 + + +- ENTRYTYPE: inproceedings + ID: nime2016-music-Bennett2016 + abstract: "Program notes: Three short pieces for flute and micro-bats (world premiere).\n\ + \nThis work uses data collected by Australian environmental scientist, Dr. Lindy\ + \ Lumsden, in her research of native Australian micro bats. It uses data from\ + \ bat-detecting devices: ultrasonic recording devices that recognize bat calls\ + \ and transpose them down to the human hearing range. The data is analysed in\ + \ the form of a spectrogram, and each species of bat is discerned by the shape\ + \ and range of the calls. This piece uses the pitch and rhythm of bat calls as\ + \ source material for the structure of each movement, and also uses the transposed\ + \ calls throughout. The recordings are triggered at certain frequencies and dynamics\ + \ of the flute via Max MSP, setting bats flying across the room (in 4 channels).\ + \ The flute mimics different types of bat calls, triggering and reacting to the\ + \ recordings and using its inherent flexibility to create a different voice in\ + \ each register.\n\nI. Victoria Circa 5.' There are 21 species of native bats\ + \ in Victoria, all with unique calls above human hearing range. Like birds, these\ + \ calls occur in different frequency levels so that different species of bat may\ + \ co-exist without disturbing each other. A bat’s call bounces off the objects\ + \ around it allowing it to ‘see’ at night, creating a beautiful cacophony that\ + \ no one ever notices.\n\nII. Melbourne Circa 5.' Did you think that bats only\ + \ live in the bush? 17 of the 21 species of bats in Victoria can be found in metropolitan\ + \ Melbourne, roosting in the hollows of our 100+-year-old trees. These fascinating\ + \ creatures go largely unnoticed by all except the odd cat due to their size (most\ + \ adult micro bats fit into a matchbox), speed, and auditory range (only a few\ + \ species can be heard by humans, including the White-striped Freetail Bat). These\ + \ bats are insectivorous and without them we’d be inundated with mosquitos and\ + \ bugs.\n\nIII. Southern Bent-Wing Bat Circa 6.' Very little is known about this\ + \ curious endangered species other than its secretive breeding place in a cave\ + \ somewhere in South-West Victoria. These bats can be found all over Victoria,\ + \ but unlike any other species of bat, they travel hundreds of miles to breed\ + \ in one place. No one knows how the young bats know where to go, without flying\ + \ in flocks like birds there’s no way for them to follow each other, so how do\ + \ they know where to go? This is one of the questions that Dr. Lindy Lumsden hopes\ + \ to answer in her research." + address: 'Brisbane, Australia' + author: Alice Bennett + bibtex: "@inproceedings{nime2016-music-Bennett2016,\n abstract = {Program notes:\ + \ Three short pieces for flute and micro-bats (world premiere).\n\nThis work uses\ + \ data collected by Australian environmental scientist, Dr. Lindy Lumsden, in\ + \ her research of native Australian micro bats. It uses data from bat-detecting\ + \ devices: ultrasonic recording devices that recognize bat calls and transpose\ + \ them down to the human hearing range. The data is analysed in the form of a\ + \ spectrogram, and each species of bat is discerned by the shape and range of\ + \ the calls. This piece uses the pitch and rhythm of bat calls as source material\ + \ for the structure of each movement, and also uses the transposed calls throughout.\ + \ The recordings are triggered at certain frequencies and dynamics of the flute\ + \ via Max MSP, setting bats flying across the room (in 4 channels). The flute\ + \ mimics different types of bat calls, triggering and reacting to the recordings\ + \ and using its inherent flexibility to create a different voice in each register.\n\ + \nI. Victoria Circa 5.' There are 21 species of native bats in Victoria, all with\ + \ unique calls above human hearing range. Like birds, these calls occur in different\ + \ frequency levels so that different species of bat may co-exist without disturbing\ + \ each other. A bat’s call bounces off the objects around it allowing it to ‘see’\ + \ at night, creating a beautiful cacophony that no one ever notices.\n\nII. Melbourne\ + \ Circa 5.' Did you think that bats only live in the bush? 17 of the 21 species\ + \ of bats in Victoria can be found in metropolitan Melbourne, roosting in the\ + \ hollows of our 100+-year-old trees. These fascinating creatures go largely unnoticed\ + \ by all except the odd cat due to their size (most adult micro bats fit into\ + \ a matchbox), speed, and auditory range (only a few species can be heard by humans,\ + \ including the White-striped Freetail Bat). These bats are insectivorous and\ + \ without them we’d be inundated with mosquitos and bugs.\n\nIII. Southern Bent-Wing\ + \ Bat Circa 6.' Very little is known about this curious endangered species other\ + \ than its secretive breeding place in a cave somewhere in South-West Victoria.\ + \ These bats can be found all over Victoria, but unlike any other species of bat,\ + \ they travel hundreds of miles to breed in one place. No one knows how the young\ + \ bats know where to go, without flying in flocks like birds there’s no way for\ + \ them to follow each other, so how do they know where to go? This is one of the\ + \ questions that Dr. Lindy Lumsden hopes to answer in her research.},\n address\ + \ = {Brisbane, Australia},\n author = {Alice Bennett},\n booktitle = {Music Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Andrew Brown and Toby Gifford},\n month = {June},\n publisher = {Griffith\ + \ University},\n title = {Echolocation Suite},\n year = {2016}\n}\n" + booktitle: Music Proceedings of the International Conference on New Interfaces for + Musical Expression + editor: Andrew Brown and Toby Gifford + month: June + publisher: Griffith University + title: Echolocation Suite + year: 2016 + + +- ENTRYTYPE: inproceedings + ID: nime2016-music-OBrien2016 + abstract: 'Program notes: "along the eaves" is part of a series that focuses on + my interest in translational procedures and machine listening. It takes its name + from the following line in Franz Kafka’s “A Crossbreed [A Sport]” (1931, trans. + 1933): “On the moonlight nights its favourite promenade is along the eaves.” To + compose the work, I developed custom software written in the programming languages + of C and SuperCollider. I used these programs in different ways to process and + sequence my source materials, which, in this case, included audio recordings of + water, babies, and string instruments. Like other works in the series, I am interested + in fabricating sonic regions of coincidence, where my coordinated mix of carefully + selected sounds suggests relationships between the sounds and the illusions they + foster.' + address: 'Brisbane, Australia' + author: Benjamin O'Brien + bibtex: "@inproceedings{nime2016-music-OBrien2016,\n abstract = {Program notes:\ + \ \"along the eaves\" is part of a series that focuses on my interest in translational\ + \ procedures and machine listening. It takes its name from the following line\ + \ in Franz Kafka’s “A Crossbreed [A Sport]” (1931, trans. 1933): “On the moonlight\ + \ nights its favourite promenade is along the eaves.” To compose the work, I developed\ + \ custom software written in the programming languages of C and SuperCollider.\ + \ I used these programs in different ways to process and sequence my source materials,\ + \ which, in this case, included audio recordings of water, babies, and string\ + \ instruments. Like other works in the series, I am interested in fabricating\ + \ sonic regions of coincidence, where my coordinated mix of carefully selected\ + \ sounds suggests relationships between the sounds and the illusions they foster.},\n\ + \ address = {Brisbane, Australia},\n author = {Benjamin O'Brien},\n booktitle\ \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n editor = {Roberto Doati},\n month = {June},\n publisher = {Casa\ - \ Paganini},\n title = {Silent Movies: an Improvisational Sound / Image Performance},\n\ - \ year = {2008}\n}\n" + \ Expression},\n editor = {Andrew Brown and Toby Gifford},\n month = {June},\n\ + \ publisher = {Griffith University},\n title = {Along the Eaves},\n year = {2016}\n\ + }\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Roberto Doati + editor: Andrew Brown and Toby Gifford month: June - publisher: Casa Paganini - title: 'Silent Movies: an Improvisational Sound / Image Performance' - year: 2008 + publisher: Griffith University + title: Along the Eaves + year: 2016 - ENTRYTYPE: inproceedings - ID: nime2008-music-Schedel2008 - abstract: "Program notes:\nDeveloped in Amsterdam, at STEIM, The Color of Waiting\ - \ uses animation, movement and video to portray themes of expectation. This collaboration\ - \ (between animator Nick Fox-Gieg, chorographer/dancer Alison Rootberg, composer/programmer\ - \ Margaret Schedel, and set designer Abra Brayman) deals with the anticipation\ - \ of events by understanding the way time unfolds. The performers shift between\ - \ frustration and acceptance as they portray the emotions evoked when waiting\ - \ for something or someone. The Color of Waiting is an experience and a mood,\ - \ an abstraction depicting human interaction.\n\nAbout the performers:\nAlison\ - \ Rootberg and Margaret Schedel founded The Kinesthetech Sense in 2006 with the\ - \ intent to collaborate with visual artists, dancers, and musicians, creating\ - \ ferociously interactive experiences for audiences throughout the world. Rootberg,\ - \ the Vice President of Programming for the Dance Resource Center, focuses on\ - \ incorporating dance with video while Schedel, an assistant professor of music\ - \ at Stony Brook University, combines audio with interactive technologies. Oskar\ - \ Fischinger once said that, \"everything in the world has its own spirit which\ - \ can be released by setting it in motion.\" Together Rootberg and Schedel create\ - \ systems which are set in motion by artistic input, facilitating interplay between\ - \ computers and humans. Kinesthetech Sense has had their work presented throughout\ - \ the US, Canada, Denmark, Germany, Italy, and Mexico. For more info, please go\ - \ to: www.ksense.org" - address: 'Genova, Italy' - author: Alison Rootberg and Margaret Schedel - bibtex: "@inproceedings{nime2008-music-Schedel2008,\n abstract = {Program notes:\n\ - Developed in Amsterdam, at STEIM, The Color of Waiting uses animation, movement\ - \ and video to portray themes of expectation. This collaboration (between animator\ - \ Nick Fox-Gieg, chorographer/dancer Alison Rootberg, composer/programmer Margaret\ - \ Schedel, and set designer Abra Brayman) deals with the anticipation of events\ - \ by understanding the way time unfolds. The performers shift between frustration\ - \ and acceptance as they portray the emotions evoked when waiting for something\ - \ or someone. The Color of Waiting is an experience and a mood, an abstraction\ - \ depicting human interaction.\n\nAbout the performers:\nAlison Rootberg and Margaret\ - \ Schedel founded The Kinesthetech Sense in 2006 with the intent to collaborate\ - \ with visual artists, dancers, and musicians, creating ferociously interactive\ - \ experiences for audiences throughout the world. Rootberg, the Vice President\ - \ of Programming for the Dance Resource Center, focuses on incorporating dance\ - \ with video while Schedel, an assistant professor of music at Stony Brook University,\ - \ combines audio with interactive technologies. Oskar Fischinger once said that,\ - \ \"everything in the world has its own spirit which can be released by setting\ - \ it in motion.\" Together Rootberg and Schedel create systems which are set in\ - \ motion by artistic input, facilitating interplay between computers and humans.\ - \ Kinesthetech Sense has had their work presented throughout the US, Canada, Denmark,\ - \ Germany, Italy, and Mexico. For more info, please go to: www.ksense.org},\n\ - \ address = {Genova, Italy},\n author = {Alison Rootberg and Margaret Schedel},\n\ - \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n editor = {Roberto Doati},\n month = {June},\n publisher\ - \ = {Casa Paganini},\n title = {NIME Performance - The Color of Waiting},\n year\ - \ = {2008}\n}\n" + ID: nime2016-music-Burraston2016 + abstract: 'Program notes: Rainwire encompasses the investigation of rainfall & its + application as a medium for artistic, cultural & scientific exchange. The Rainwire + project includes development of a prototype Acoustic Rain Gauge using suspended + cables (long wire instruments), and subsequently expanded through various collaborations + in a range of creative & environmental contexts. Rainwire is an experimental approach + at technological appropriation of agricultural based objects for art and science, + with particular emphasis on climate change issues and agriculture. This performance + will present a live laptop mix of environmental sonification recordings from the + newly built Rainwire prototype. Previous work on Rainwire has been conducted on + shared instruments, this performance will be an opportunity to present the newly + built dedicated Rainwire prototype in public for the first time in Australia. + Long-wire instruments are made from spans of fencing wire across the open landscape. + Rainwire developed from using contact mic recordings of rainfall ‘playing’ the + long wire instruments for my music compositions. This enabled a proof of concept + study to the extent that the audio recordings demonstrate a wide variety of temporal + & spatial rain event complexity. This suggests that environmental sonification + has great potential to measure rainfall accurately, & address recognized shortcomings + of existing equipment & approaches in meteorology. Rain induced sounds with long + wire instruments have a wide range of unique, audibly recognisable features. All + of these sonic features exhibit dynamic volume & tonal characteristics, depending + on the rain type & environmental conditions. Aside from the vast array of creative + possibilities, the high spatial, temporal, volume & tonal resolution could provide + significant advancement to knowledge of rainfall event profiles, intensity & microstructure. + The challenge lies in identifying distinctive sound patterns & relating them to + particular types of rainfall events. Rainwire is beyond simple sonification of + data, it embeds technology & data collection within cultural contexts. With rainfall + as catalyst to draw inspiration from, artists, scientists & cultural groups are + key to informing science & incite new creative modalities.' + address: 'Brisbane, Australia' + author: David Burraston + bibtex: "@inproceedings{nime2016-music-Burraston2016,\n abstract = {Program notes:\ + \ Rainwire encompasses the investigation of rainfall & its application as a medium\ + \ for artistic, cultural & scientific exchange. The Rainwire project includes\ + \ development of a prototype Acoustic Rain Gauge using suspended cables (long\ + \ wire instruments), and subsequently expanded through various collaborations\ + \ in a range of creative & environmental contexts. Rainwire is an experimental\ + \ approach at technological appropriation of agricultural based objects for art\ + \ and science, with particular emphasis on climate change issues and agriculture.\ + \ This performance will present a live laptop mix of environmental sonification\ + \ recordings from the newly built Rainwire prototype. Previous work on Rainwire\ + \ has been conducted on shared instruments, this performance will be an opportunity\ + \ to present the newly built dedicated Rainwire prototype in public for the first\ + \ time in Australia. Long-wire instruments are made from spans of fencing wire\ + \ across the open landscape. Rainwire developed from using contact mic recordings\ + \ of rainfall ‘playing’ the long wire instruments for my music compositions. This\ + \ enabled a proof of concept study to the extent that the audio recordings demonstrate\ + \ a wide variety of temporal & spatial rain event complexity. This suggests that\ + \ environmental sonification has great potential to measure rainfall accurately,\ + \ & address recognized shortcomings of existing equipment & approaches in meteorology.\ + \ Rain induced sounds with long wire instruments have a wide range of unique,\ + \ audibly recognisable features. All of these sonic features exhibit dynamic volume\ + \ & tonal characteristics, depending on the rain type & environmental conditions.\ + \ Aside from the vast array of creative possibilities, the high spatial, temporal,\ + \ volume & tonal resolution could provide significant advancement to knowledge\ + \ of rainfall event profiles, intensity & microstructure. The challenge lies in\ + \ identifying distinctive sound patterns & relating them to particular types of\ + \ rainfall events. Rainwire is beyond simple sonification of data, it embeds technology\ + \ & data collection within cultural contexts. With rainfall as catalyst to draw\ + \ inspiration from, artists, scientists & cultural groups are key to informing\ + \ science & incite new creative modalities.},\n address = {Brisbane, Australia},\n\ + \ author = {David Burraston},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ + \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ + \ title = {Rainwire},\n year = {2016}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Roberto Doati + editor: Andrew Brown and Toby Gifford month: June - publisher: Casa Paganini - title: NIME Performance - The Color of Waiting - year: 2008 + publisher: Griffith University + title: Rainwire + year: 2016 - ENTRYTYPE: inproceedings - ID: nime2008-music-Drummond2008 - abstract: "Program notes:\nInspired by the swirls, vortices and lemniscate like\ - \ patterns created by moving water and other fluids, Sonic Construction uses the\ - \ movement of coloured dyes in a semi-viscous liquid to generate and control sound.\ - \ The work is performed by dropping different coloured dyes (red, green, yellow,\ - \ blue) into a clear glass vessel filled with water, made slightly viscous through\ - \ the addition of a sugar syrup (Figure 2). Through the use of video tracking,\ - \ the speed, colour and spatial location of the different coloured drops of dye\ - \ are analysed as they are dropped into the glass vessel and subsequently expand,\ - \ swirl, coil and entwine in the water. The control data derived from the video\ - \ tracking of the ink drops is used to define both the shape and the way in which\ - \ individual grains of sound are combined using FOF (Fonction d'Onde Formatique\ - \ translated as Formant Wave-Form or Formant Wave Function) synthesis [1] [2],\ - \ to create a rich and varied timbral sound environment. In developing Sonic Construction\ - \ I sought to create a system that would provide a sense of connection with the\ - \ interactive processes being employed and at the same time to create a system\ - \ over which I had only limited direct control; ideally being influenced by the\ - \ system's responses as much as I was influencing the system.\nTimbres produced\ - \ by the system include bass-rich pulse streams, vocal textures and a variety\ - \ of bell like sounds. The fluid movement of the coloured dye in the liquid is\ - \ further used to spatialise the outputs of the FOF synthesis. The video captured\ - \ of the dyes in the liquid, used for motion analysis and colour matching, is\ - \ also projected back into the performance space, slightly processed using contrast,\ - \ saturation and hue effects.\n\nAbout the performer:\nJon Drummond is a Sydney\ - \ based composer and performer. His creative work spans the fields of instrumental\ - \ music, electroacoustic, interactive, sound and new media arts. Jon's electroacoustic\ - \ and interactive work has been presented widely including the International Computer\ - \ Music Conferences (Denmark 1994, Canada 1995, Greece 1997, China 1999, Singapore\ - \ 2003), Electrofringe, Totally Huge New Music Festival, Darwin International\ - \ Guitar Festival and the Adelaide Festival of Arts. Many of his acoustic and\ - \ electronic compositions have been commissioned and performed by leading Australian\ - \ performers and ensembles including austraLYSIS, The Song Company, Ros Dunlop\ - \ and Kathleen Gallagher. Recently Jon has been exploring the use of environmental\ - \ signals from the natural world as generative devices for creating electroacoustic\ - \ sound - video tracking the fluid motions of water in \"Sonic Construction\"\ - \ and the motion of air through the use of kites in \"Sounding the Winds\"." - address: 'Genova, Italy' - author: Jon Drummond - bibtex: "@inproceedings{nime2008-music-Drummond2008,\n abstract = {Program notes:\n\ - Inspired by the swirls, vortices and lemniscate like patterns created by moving\ - \ water and other fluids, Sonic Construction uses the movement of coloured dyes\ - \ in a semi-viscous liquid to generate and control sound. The work is performed\ - \ by dropping different coloured dyes (red, green, yellow, blue) into a clear\ - \ glass vessel filled with water, made slightly viscous through the addition of\ - \ a sugar syrup (Figure 2). Through the use of video tracking, the speed, colour\ - \ and spatial location of the different coloured drops of dye are analysed as\ - \ they are dropped into the glass vessel and subsequently expand, swirl, coil\ - \ and entwine in the water. The control data derived from the video tracking of\ - \ the ink drops is used to define both the shape and the way in which individual\ - \ grains of sound are combined using FOF (Fonction d'Onde Formatique translated\ - \ as Formant Wave-Form or Formant Wave Function) synthesis [1] [2], to create\ - \ a rich and varied timbral sound environment. In developing Sonic Construction\ - \ I sought to create a system that would provide a sense of connection with the\ - \ interactive processes being employed and at the same time to create a system\ - \ over which I had only limited direct control; ideally being influenced by the\ - \ system's responses as much as I was influencing the system.\nTimbres produced\ - \ by the system include bass-rich pulse streams, vocal textures and a variety\ - \ of bell like sounds. The fluid movement of the coloured dye in the liquid is\ - \ further used to spatialise the outputs of the FOF synthesis. The video captured\ - \ of the dyes in the liquid, used for motion analysis and colour matching, is\ - \ also projected back into the performance space, slightly processed using contrast,\ - \ saturation and hue effects.\n\nAbout the performer:\nJon Drummond is a Sydney\ - \ based composer and performer. His creative work spans the fields of instrumental\ - \ music, electroacoustic, interactive, sound and new media arts. Jon's electroacoustic\ - \ and interactive work has been presented widely including the International Computer\ - \ Music Conferences (Denmark 1994, Canada 1995, Greece 1997, China 1999, Singapore\ - \ 2003), Electrofringe, Totally Huge New Music Festival, Darwin International\ - \ Guitar Festival and the Adelaide Festival of Arts. Many of his acoustic and\ - \ electronic compositions have been commissioned and performed by leading Australian\ - \ performers and ensembles including austraLYSIS, The Song Company, Ros Dunlop\ - \ and Kathleen Gallagher. Recently Jon has been exploring the use of environmental\ - \ signals from the natural world as generative devices for creating electroacoustic\ - \ sound - video tracking the fluid motions of water in \"Sonic Construction\"\ - \ and the motion of air through the use of kites in \"Sounding the Winds\".},\n\ - \ address = {Genova, Italy},\n author = {Jon Drummond},\n booktitle = {Music Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Roberto Doati},\n month = {June},\n publisher = {Casa Paganini},\n\ - \ title = {Sonic Construction},\n year = {2008}\n}\n" + ID: nime2016-music-Hallett2016 + abstract: 'Program notes: The Elephant Listening Project from Cornell University + is the basis of Elephant Talk/Elephant Listening Project music performances. They + present not only logistical difficulties but musical difficulties. It was 2-3 + years of attempting to confirm the possibility of the project with Cornell University. + The researchers and contacts of course, were deep in Africa recording the sounds + for their research. Threats of poaching are a reality and in one instance, although + the researcher reached safety, the elephants weren''t so lucky. Cornell University + use a variety of technological platforms for their research both recording and + processing of these recordings. The music created also uses a variety of technological + and compositional methods to both utilise the sounds and to create something that + is inspiring, innovative and become a whole listening experience. Through using + different format types of sounds, for example: infrasonic sampled so that humans + can hear them as well as regular files, the aim is to create relationships between + the natural environment of the forest elephants, the other recorded acoustic occurrences + while incorporating various instruments to create a conversation between the sonic + environment, performer and listener.' + address: 'Brisbane, Australia' + author: Vicki Hallett + bibtex: "@inproceedings{nime2016-music-Hallett2016,\n abstract = {Program notes:\ + \ The Elephant Listening Project from Cornell University is the basis of Elephant\ + \ Talk/Elephant Listening Project music performances. They present not only logistical\ + \ difficulties but musical difficulties. It was 2-3 years of attempting to confirm\ + \ the possibility of the project with Cornell University. The researchers and\ + \ contacts of course, were deep in Africa recording the sounds for their research.\ + \ Threats of poaching are a reality and in one instance, although the researcher\ + \ reached safety, the elephants weren't so lucky. Cornell University use a variety\ + \ of technological platforms for their research both recording and processing\ + \ of these recordings. The music created also uses a variety of technological\ + \ and compositional methods to both utilise the sounds and to create something\ + \ that is inspiring, innovative and become a whole listening experience. Through\ + \ using different format types of sounds, for example: infrasonic sampled so that\ + \ humans can hear them as well as regular files, the aim is to create relationships\ + \ between the natural environment of the forest elephants, the other recorded\ + \ acoustic occurrences while incorporating various instruments to create a conversation\ + \ between the sonic environment, performer and listener.},\n address = {Brisbane,\ + \ Australia},\n author = {Vicki Hallett},\n booktitle = {Music Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n editor\ + \ = {Andrew Brown and Toby Gifford},\n month = {June},\n publisher = {Griffith\ + \ University},\n title = {Elephant Talk},\n year = {2016}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Roberto Doati + editor: Andrew Brown and Toby Gifford month: June - publisher: Casa Paganini - title: Sonic Construction - year: 2008 + publisher: Griffith University + title: Elephant Talk + year: 2016 - ENTRYTYPE: inproceedings - ID: nime2008-music-GeWang2008 - abstract: "Program notes:\nThe Mobile Phone Orchestra is a new repetoire-based ensemble\ - \ using mobile phones as the primary musical instrument. The MoPhO Suite contains\ - \ a selection of recent compositions that highlights different aspects of what\ - \ it means to compose for and perform with such an instrument in an ensemble setting.\ - \ Brief program note: The Mobile Phone Orchestra of CCRMA (MoPhO) presents an\ - \ ensemble suite featuring music performed on mobile phones. Far beyond ring-tones,\ - \ these interactive musical works take advantage of the unique technological capabilities\ - \ of today's hardware, transforming phone keypads, built-in accelerometers, and\ - \ built-in microphones into powerful and yet mobile chamber meta-instruments.\ - \ The suite consists of selection of representative pieces:\n***Drone In/Drone\ - \ Out (Ge Wang): human players, mobile phones, FM timbres, accelerometers.\n***TamaG\ - \ (Georg Essl): TamaG is a piece that explores the boundary of projecting the\ - \ humane onto mobile devices and at the same time display the fact that they are\ - \ deeply mechanical and artificial. It explores the question how much control\ - \ we have in the interaction with these devices or if the device itself at times\ - \ controls us. The piece work with the tension between these positions and crosses\ - \ the desirable and the alarming, the human voice with mechanical noise. The alarming\ - \ effect has a social quality and spreads between the performers. The sounding\ - \ algorithm is the non-linear circle map which is used in easier-to-control and\ - \ hard-to-control regimes to evoke the effects of control and desirability on\ - \ the one hand the the loss of control and mechanistic function on the other hand.\n\ - ***The Phones and Fury (Jeff Cooper and Henri Penttinen): how much damage can\ - \ a single player do with 10 mobile phones? Facilitating loops, controllable playback\ - \ speed, and solo instruments.\n***Chatter (Ge Wang): the audience is placed in\ - \ the middle of a web of conversations...\n\nAbout the performers:\nGe Wang received\ - \ his B.S. in Computer Science in 2000 from Duke University, PhD (soon) in Computer\ - \ Science (advisor Perry Cook) in 2008 from Princeton University, and is currently\ - \ an assistant professor at Stanford University in the Center for Computer Research\ - \ in Music and Acoustics (CCRMA). His research interests include interactive software\ - \ systems (of all sizes) for computer music, programming languages, sound synthesis\ - \ and analysis, music information retrieval, new performance ensembles (e.g.,\ - \ laptop orchestra) and paradigms (e.g., live coding), visualization, interfaces\ - \ for human-computer interaction, interactive audio over networks, and methodologies\ - \ for education at the intersection of computer science and music. Ge is the chief\ - \ architect of the ChucK audio programming language and the Audicle environment.\ - \ He was a founding developer and co-director of the Princeton Laptop Orchestra\ - \ (PLOrk), the founder and director of the Stanford Laptop Orchestra (SLOrk),\ - \ and a co-creator of the TAPESTREA sound design environment. Ge composes and\ - \ performs via various electro-acoustic and computer-mediated means, including\ - \ with PLOrk/SLOrk, with Perry as a live coding duo, and with Princeton graduate\ - \ student and comrade Rebecca Fiebrink in a duo exploring new performance paradigms,\ - \ cool audio software, and great food.\n\nGeorg Essl is currently Senior Research\ - \ Scientist at Deutsche Telekom Laboratories at TU-Berlin, Germany. He works on\ - \ mobile interaction, new interfaces for musical expression and sound synthesis\ - \ algorithms that are abstract mathematical or physical models. After he received\ - \ his Ph.D. in Computer Science at Princeton University under the supervision\ - \ of Perry Cook he served on the faculty of the University of Florida and worked\ - \ at the MIT Media Lab Europe in Dublin before joining T-Labs.\n\nHenri Penttinen\ - \ was born in Espoo, Finland, in 1975. He completed his M.Sc. and PhD (Dr. Tech.)\ - \ degrees in Electrical Engineering at the Helsinki University of Technology (TKK)\ - \ in 2002 and 2006, respectively. He conducted his studies and teaches about digital\ - \ signal processors and audio processing at the Department of Signal Processing\ - \ and Acoustics (until 2007 known as Laboratory of Acoustics and Signal Processing)\ - \ at TKK. Dr. Penttinen was a visiting scholar at Center for Computer Research\ - \ in Music and Acoustics (CCRMA), Stanford University, during 2007 and 2008. His\ - \ main research interests are sound synthesis, signal processing algorithms, musical\ - \ acoustics, real-time audio applications in mobile environments. He is one of\ - \ the co-founders and directors, with Georg Essl and Ge Wang, of the Mobile Phone\ - \ Orchestra of CCRMA (MoPhO). He is also the co-inventor, with Jaakko Prättälä,\ - \ of the electro-acoustic bottle (eBottle). His electro-acoustic pieces have been\ - \ performed around Finland, in the USA, and Cuba. Additional Composer Biography:\ - \ Jeffrey Cooper is a musician / producer from Bryan, Texas. Having worked as\ - \ a programmer and DJ for a number of years, he is currently finishing a Master\ - \ Degree in Music, Science, and Technology at Stanford University / CCRMA. Co-\ - \ composer of music for mobile phones with the honorable Henri Penttinen." - address: 'Genova, Italy' - author: 'Ge Wang, Georg Essl and Henri Penttinen' - bibtex: "@inproceedings{nime2008-music-GeWang2008,\n abstract = {Program notes:\n\ - The Mobile Phone Orchestra is a new repetoire-based ensemble using mobile phones\ - \ as the primary musical instrument. The MoPhO Suite contains a selection of recent\ - \ compositions that highlights different aspects of what it means to compose for\ - \ and perform with such an instrument in an ensemble setting. Brief program note:\ - \ The Mobile Phone Orchestra of CCRMA (MoPhO) presents an ensemble suite featuring\ - \ music performed on mobile phones. Far beyond ring-tones, these interactive musical\ - \ works take advantage of the unique technological capabilities of today's hardware,\ - \ transforming phone keypads, built-in accelerometers, and built-in microphones\ - \ into powerful and yet mobile chamber meta-instruments. The suite consists of\ - \ selection of representative pieces:\n***Drone In/Drone Out (Ge Wang): human\ - \ players, mobile phones, FM timbres, accelerometers.\n***TamaG (Georg Essl):\ - \ TamaG is a piece that explores the boundary of projecting the humane onto mobile\ - \ devices and at the same time display the fact that they are deeply mechanical\ - \ and artificial. It explores the question how much control we have in the interaction\ - \ with these devices or if the device itself at times controls us. The piece work\ - \ with the tension between these positions and crosses the desirable and the alarming,\ - \ the human voice with mechanical noise. The alarming effect has a social quality\ - \ and spreads between the performers. The sounding algorithm is the non-linear\ - \ circle map which is used in easier-to-control and hard-to-control regimes to\ - \ evoke the effects of control and desirability on the one hand the the loss of\ - \ control and mechanistic function on the other hand.\n***The Phones and Fury\ - \ (Jeff Cooper and Henri Penttinen): how much damage can a single player do with\ - \ 10 mobile phones? Facilitating loops, controllable playback speed, and solo\ - \ instruments.\n***Chatter (Ge Wang): the audience is placed in the middle of\ - \ a web of conversations...\n\nAbout the performers:\nGe Wang received his B.S.\ - \ in Computer Science in 2000 from Duke University, PhD (soon) in Computer Science\ - \ (advisor Perry Cook) in 2008 from Princeton University, and is currently an\ - \ assistant professor at Stanford University in the Center for Computer Research\ - \ in Music and Acoustics (CCRMA). His research interests include interactive software\ - \ systems (of all sizes) for computer music, programming languages, sound synthesis\ - \ and analysis, music information retrieval, new performance ensembles (e.g.,\ - \ laptop orchestra) and paradigms (e.g., live coding), visualization, interfaces\ - \ for human-computer interaction, interactive audio over networks, and methodologies\ - \ for education at the intersection of computer science and music. Ge is the chief\ - \ architect of the ChucK audio programming language and the Audicle environment.\ - \ He was a founding developer and co-director of the Princeton Laptop Orchestra\ - \ (PLOrk), the founder and director of the Stanford Laptop Orchestra (SLOrk),\ - \ and a co-creator of the TAPESTREA sound design environment. Ge composes and\ - \ performs via various electro-acoustic and computer-mediated means, including\ - \ with PLOrk/SLOrk, with Perry as a live coding duo, and with Princeton graduate\ - \ student and comrade Rebecca Fiebrink in a duo exploring new performance paradigms,\ - \ cool audio software, and great food.\n\nGeorg Essl is currently Senior Research\ - \ Scientist at Deutsche Telekom Laboratories at TU-Berlin, Germany. He works on\ - \ mobile interaction, new interfaces for musical expression and sound synthesis\ - \ algorithms that are abstract mathematical or physical models. After he received\ - \ his Ph.D. in Computer Science at Princeton University under the supervision\ - \ of Perry Cook he served on the faculty of the University of Florida and worked\ - \ at the MIT Media Lab Europe in Dublin before joining T-Labs.\n\nHenri Penttinen\ - \ was born in Espoo, Finland, in 1975. He completed his M.Sc. and PhD (Dr. Tech.)\ - \ degrees in Electrical Engineering at the Helsinki University of Technology (TKK)\ - \ in 2002 and 2006, respectively. He conducted his studies and teaches about digital\ - \ signal processors and audio processing at the Department of Signal Processing\ - \ and Acoustics (until 2007 known as Laboratory of Acoustics and Signal Processing)\ - \ at TKK. Dr. Penttinen was a visiting scholar at Center for Computer Research\ - \ in Music and Acoustics (CCRMA), Stanford University, during 2007 and 2008. His\ - \ main research interests are sound synthesis, signal processing algorithms, musical\ - \ acoustics, real-time audio applications in mobile environments. He is one of\ - \ the co-founders and directors, with Georg Essl and Ge Wang, of the Mobile Phone\ - \ Orchestra of CCRMA (MoPhO). He is also the co-inventor, with Jaakko Prättälä,\ - \ of the electro-acoustic bottle (eBottle). His electro-acoustic pieces have been\ - \ performed around Finland, in the USA, and Cuba. Additional Composer Biography:\ - \ Jeffrey Cooper is a musician / producer from Bryan, Texas. Having worked as\ - \ a programmer and DJ for a number of years, he is currently finishing a Master\ - \ Degree in Music, Science, and Technology at Stanford University / CCRMA. Co-\ - \ composer of music for mobile phones with the honorable Henri Penttinen.},\n\ - \ address = {Genova, Italy},\n author = {Ge Wang, Georg Essl and Henri Penttinen},\n\ + ID: nime2016-music-Tahiroglu2016 + abstract: 'Program notes: "NOISA Étude 2" is a second set of performance instructions + created to showcase compelling, evolving and complex soundscapes only possible + when operating the NOISA instruments, integrating the system’s autonomous responses + as part of a musical piece. The multi-layered sound interaction design is based + on radical transformations of acoustic instruments performing works from the classical + music repertoire. This second "étude" is based entirely on interaction with spectrum-complementary + Phase Vocoders. The system is fed with variations of a fixed musical motif, encouraging + the system to recognise elements of the motive and create its own set of different + versions emulating a human musical compositional process. Also, the Myo Armband + is used in a creative way as an independent element for dynamic control, using + raw data extracted from the muscles’ tension.' + address: 'Brisbane, Australia' + author: Juan Carlos Vasquez & Koray Tahiroğlu + bibtex: "@inproceedings{nime2016-music-Tahiroglu2016,\n abstract = {Program notes:\ + \ \"NOISA Étude 2\" is a second set of performance instructions created to showcase\ + \ compelling, evolving and complex soundscapes only possible when operating the\ + \ NOISA instruments, integrating the system’s autonomous responses as part of\ + \ a musical piece. The multi-layered sound interaction design is based on radical\ + \ transformations of acoustic instruments performing works from the classical\ + \ music repertoire. This second \"étude\" is based entirely on interaction with\ + \ spectrum-complementary Phase Vocoders. The system is fed with variations of\ + \ a fixed musical motif, encouraging the system to recognise elements of the motive\ + \ and create its own set of different versions emulating a human musical compositional\ + \ process. Also, the Myo Armband is used in a creative way as an independent element\ + \ for dynamic control, using raw data extracted from the muscles’ tension.},\n\ + \ address = {Brisbane, Australia},\n author = {Juan Carlos Vasquez & Koray Tahiroğlu},\n\ \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n editor = {Roberto Doati},\n month = {June},\n publisher\ - \ = {Casa Paganini},\n title = {MoPho – A Suite for a Mobile Phone Orchestra},\n\ - \ year = {2008}\n}\n" + \ for Musical Expression},\n editor = {Andrew Brown and Toby Gifford},\n month\ + \ = {June},\n publisher = {Griffith University},\n title = {NOISA Étude 2},\n\ + \ year = {2016}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Roberto Doati + editor: Andrew Brown and Toby Gifford month: June - publisher: Casa Paganini - title: MoPho – A Suite for a Mobile Phone Orchestra - year: 2008 + publisher: Griffith University + title: NOISA Étude 2 + year: 2016 - ENTRYTYPE: inproceedings - ID: nime19-music-Rust - abstract: 'Bad Mother / Good Mother is an audiovisual performance involving a projection, - a modified electronic breast pump as a sound generator, and a sound- reactive - LED pumping costume. The project has four songs that critically explore technologies - directed specifically at women like breast pumps and fertility extending treatments - such as egg-freezing (social freezing). Depending on the song, the breast pump - is either a solo instrument or part of an arrangement. The idea is to use workplace - lactation as a departure point to uncover a web of societal politics and pre-conceived - perceptions (pun intended) of ideal and non-ideal motherhood.' - address: 'Porto Alegre, Brazil' - author: Anna Rüst - bibtex: "@inproceedings{nime19-music-Rust,\n abstract = {Bad Mother / Good Mother\ - \ is an audiovisual performance involving a projection, a modified electronic\ - \ breast pump as a sound generator, and a sound- reactive LED pumping costume.\ - \ The project has four songs that critically explore technologies directed specifically\ - \ at women like breast pumps and fertility extending treatments such as egg-freezing\ - \ (social freezing). Depending on the song, the breast pump is either a solo instrument\ - \ or part of an arrangement. The idea is to use workplace lactation as a departure\ - \ point to uncover a web of societal politics and pre-conceived perceptions (pun\ - \ intended) of ideal and non-ideal motherhood.},\n address = {Porto Alegre, Brazil},\n\ - \ author = {Anna R{\\\"u}st},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Federico Visi},\n\ - \ month = {June},\n pages = {8--10},\n publisher = {UFRGS},\n title = {Bad Mother\ - \ / Good Mother - an audiovisual performance},\n url = {http://www.nime.org/proceedings/2019/nime2019_music001.pdf},\n\ - \ year = {2019}\n}\n" + ID: nime2016-music-Andean2016 + abstract: 'Program notes: Hyvat matkustajat (2014) (Finnish for ''Dear Travellers'', + but also for ''The Good Travellers'') began life as a "sonic postcard from Finland", + using soundscape field recordings from around the country. This turned out to + be only the first stop on its journey, however. The original material was later + further developed as material for sonic exploration and spectral transformations, + with the external spaces of the original version taking a sharp digital turn inwards, + to chart internal spectral landscapes, together with the soundmarks and soundscapes + of its first incarnation. Everything in Hyvat matkustajat is made from the original + field recordings which first gave birth to the piece.' + address: 'Brisbane, Australia' + author: James Andean + bibtex: "@inproceedings{nime2016-music-Andean2016,\n abstract = {Program notes:\ + \ Hyvat matkustajat (2014) (Finnish for 'Dear Travellers', but also for 'The Good\ + \ Travellers') began life as a \"sonic postcard from Finland\", using soundscape\ + \ field recordings from around the country. This turned out to be only the first\ + \ stop on its journey, however. The original material was later further developed\ + \ as material for sonic exploration and spectral transformations, with the external\ + \ spaces of the original version taking a sharp digital turn inwards, to chart\ + \ internal spectral landscapes, together with the soundmarks and soundscapes of\ + \ its first incarnation. Everything in Hyvat matkustajat is made from the original\ + \ field recordings which first gave birth to the piece.},\n address = {Brisbane,\ + \ Australia},\n author = {James Andean},\n booktitle = {Music Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n editor\ + \ = {Andrew Brown and Toby Gifford},\n month = {June},\n publisher = {Griffith\ + \ University},\n title = {Hyvät matkustajat},\n year = {2016}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Federico Visi + editor: Andrew Brown and Toby Gifford month: June - pages: 8--10 - publisher: UFRGS - title: Bad Mother / Good Mother - an audiovisual performance - url: http://www.nime.org/proceedings/2019/nime2019_music001.pdf - year: 2019 + publisher: Griffith University + title: Hyvät matkustajat + year: 2016 - ENTRYTYPE: inproceedings - ID: nime19-music-DAlessandro - abstract: 'Borrowed voices is a performance featuring performative voice synthesis, - with two types of instruments: C-Voks and T-Voks. The voices are played a cappella - in a double choir of natural and synthetic voices. Performative singing synthesis - is a new paradigm in the already long history of artificial voices. The singing - voice is played like an instrument, allowing singing with the borrowed voice of - another. The relationship of embodiment between the singer''s gestures and the - vocal sound produced is broken. A voice is singing, with realism, expressivity - and musicality, but it is not the musician''s own voice, and a vocal apparatus - does not control it. The project focuses on control gestures: the music explores - vocal sounds produced by the vocal apparatus (the basic sound material), and “played” - by the natural voice, by free-hand Theremin-controlled gestures, and by writing - gestures on a graphic tablet. The same (types of) sounds but different gestures - give different musical “instruments” and expressive possibilities. Another interesting - aspect is the distance between synthetic voices and the player, the voice being - at the same time embodied (by the player gestures playing the instrument with - her/his body) and externalized (because the instrument is not her/his own voice): - two different voices sung/played by the same person.' - address: 'Porto Alegre, Brazil' - author: Christophe D'Alessandro and Xiao Xiao and Grégoire Locqueville and Boris - Doval - bibtex: "@inproceedings{nime19-music-DAlessandro,\n abstract = {Borrowed voices\ - \ is a performance featuring performative voice synthesis, with two types of instruments:\ - \ C-Voks and T-Voks. The voices are played a cappella in a double choir of natural\ - \ and synthetic voices. Performative singing synthesis is a new paradigm in the\ - \ already long history of artificial voices. The singing voice is played like\ - \ an instrument, allowing singing with the borrowed voice of another. The relationship\ - \ of embodiment between the singer's gestures and the vocal sound produced is\ - \ broken. A voice is singing, with realism, expressivity and musicality, but it\ - \ is not the musician's own voice, and a vocal apparatus does not control it.\ - \ The project focuses on control gestures: the music explores vocal sounds produced\ - \ by the vocal apparatus (the basic sound material), and “played” by the natural\ - \ voice, by free-hand Theremin-controlled gestures, and by writing gestures on\ - \ a graphic tablet. The same (types of) sounds but different gestures give different\ - \ musical “instruments” and expressive possibilities. Another interesting aspect\ - \ is the distance between synthetic voices and the player, the voice being at\ - \ the same time embodied (by the player gestures playing the instrument with her/his\ - \ body) and externalized (because the instrument is not her/his own voice): two\ - \ different voices sung/played by the same person.},\n address = {Porto Alegre,\ - \ Brazil},\n author = {Christophe D'Alessandro and Xiao Xiao and Grégoire Locqueville\ - \ and Boris Doval},\n booktitle = {Music Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n editor = {Federico Visi},\n month\ - \ = {June},\n pages = {11--14},\n publisher = {UFRGS},\n title = {Borrowed Voices},\n\ - \ url = {http://www.nime.org/proceedings/2019/nime2019_music001.pdf},\n year =\ - \ {2019}\n}\n" + ID: nime2016-music-Essl2016 + address: 'Brisbane, Australia' + author: Karheinz Essl + bibtex: "@inproceedings{nime2016-music-Essl2016,\n address = {Brisbane, Australia},\n\ + \ author = {Karheinz Essl},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ + \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ + \ title = {Lexicon Sonate},\n year = {2016}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Federico Visi + editor: Andrew Brown and Toby Gifford month: June - pages: 11--14 - publisher: UFRGS - title: Borrowed Voices - url: http://www.nime.org/proceedings/2019/nime2019_music001.pdf - year: 2019 + publisher: Griffith University + title: Lexicon Sonate + year: 2016 - ENTRYTYPE: inproceedings - ID: nime19-music-Dooley - abstract: 'colligation (to bring or tie together) is a physical performance work - for one performer that explores the idea of sculpting sound through gesture. Treating - sound as if it were a tangible object capable of being fashioned into new sonic - forms, "pieces" of sound are captured, shaped and sculpted by the performer''s - hand and arm gestures, appearing pliable as they are thrown around and transformed - into new sonic material. colligation uses two Thalmic Labs Myo armbands, one placed - on the left arm and the other on the right arm. The Myo Mapper [1] software is - used to transmit scaled data via OSC from the armbands to Pure Data. Positional - (yaw, pitch and roll) and electromyographic data (EMG) from the devices are mapped - to parameters controlling a hybrid synth created in Pure Data. The synth utilises - a combination of Phase Aligned Formant synthesis [2] and Frequency Modulation - synthesis [3] to allow a range of complex audio spectra to be explored. Pitch, - yaw and roll data from the left Myo are respectively mapped to the PAF synth''s - carrier frequency (ranging from 8.175-12543.9Hz), bandwidth and relative centre - frequency. Pitch, yaw and roll data from the right Myo are respectively mapped - to FM modulation frequency (relative to and ranging from 0.01-10 times the PAF - carrier frequency), modulation depth (relative to and ranging from 0.01-10 times - the PAF carrier frequency), and modulation wave shape (crossfading between sine, - triangle, square, rising sawtooth and impulse). Data from the left and right Myo''s - EMG sensors are mapped respectively to amplitude control of the left and right - audio channels, giving the performer control over the level and panning of the - audio within the stereo field. By employing both positional and bio data, an embodied - relationship between action and response is created; the gesture and the resulting - sonic transformation become inextricably entwined.' - address: 'Porto Alegre, Brazil' - author: James Dooley - bibtex: "@inproceedings{nime19-music-Dooley,\n abstract = {colligation (to bring\ - \ or tie together) is a physical performance work for one performer that explores\ - \ the idea of sculpting sound through gesture. Treating sound as if it were a\ - \ tangible object capable of being fashioned into new sonic forms, \"pieces\"\ - \ of sound are captured, shaped and sculpted by the performer's hand and arm gestures,\ - \ appearing pliable as they are thrown around and transformed into new sonic material.\ - \ colligation uses two Thalmic Labs Myo armbands, one placed on the left arm and\ - \ the other on the right arm. The Myo Mapper [1] software is used to transmit\ - \ scaled data via OSC from the armbands to Pure Data. Positional (yaw, pitch and\ - \ roll) and electromyographic data (EMG) from the devices are mapped to parameters\ - \ controlling a hybrid synth created in Pure Data. The synth utilises a combination\ - \ of Phase Aligned Formant synthesis [2] and Frequency Modulation synthesis [3]\ - \ to allow a range of complex audio spectra to be explored. Pitch, yaw and roll\ - \ data from the left Myo are respectively mapped to the PAF synth's carrier frequency\ - \ (ranging from 8.175-12543.9Hz), bandwidth and relative centre frequency. Pitch,\ - \ yaw and roll data from the right Myo are respectively mapped to FM modulation\ - \ frequency (relative to and ranging from 0.01-10 times the PAF carrier frequency),\ - \ modulation depth (relative to and ranging from 0.01-10 times the PAF carrier\ - \ frequency), and modulation wave shape (crossfading between sine, triangle, square,\ - \ rising sawtooth and impulse). Data from the left and right Myo's EMG sensors\ - \ are mapped respectively to amplitude control of the left and right audio channels,\ - \ giving the performer control over the level and panning of the audio within\ - \ the stereo field. By employing both positional and bio data, an embodied relationship\ - \ between action and response is created; the gesture and the resulting sonic\ - \ transformation become inextricably entwined.},\n address = {Porto Alegre, Brazil},\n\ - \ author = {James Dooley},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Federico Visi},\n\ - \ month = {June},\n pages = {15-16},\n publisher = {UFRGS},\n title = {colligation},\n\ - \ url = {http://www.nime.org/proceedings/2019/nime2019_music003.pdf},\n year =\ - \ {2019}\n}\n" + ID: nime2016-music-Foran2016 + address: 'Brisbane, Australia' + author: Sean Foran + bibtex: "@inproceedings{nime2016-music-Foran2016,\n address = {Brisbane, Australia},\n\ + \ author = {Sean Foran},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ + \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ + \ title = {Improvisations with the other},\n year = {2016}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Federico Visi + editor: Andrew Brown and Toby Gifford month: June - pages: 15-16 - publisher: UFRGS - title: colligation - url: http://www.nime.org/proceedings/2019/nime2019_music003.pdf - year: 2019 + publisher: Griffith University + title: Improvisations with the other + year: 2016 - ENTRYTYPE: inproceedings - ID: nime19-music-Ahn - abstract: 'DIY Bionoise (2018) is an instrument in which the performer can generate - sound and noise, deriving from their own body. It contains a circuit that can - measure the bioelectricity from living beings to control the instrument by tactile - sense. This instrument has two functions – a modular synthesizer with an eight-step - sequencer and a bionoise control mode.' - address: 'Porto Alegre, Brazil' - author: Sabina Hyoju Ahn - bibtex: "@inproceedings{nime19-music-Ahn,\n abstract = {DIY Bionoise (2018) is an\ - \ instrument in which the performer can generate sound and noise, deriving from\ - \ their own body. It contains a circuit that can measure the bioelectricity from\ - \ living beings to control the instrument by tactile sense. This instrument has\ - \ two functions – a modular synthesizer with an eight-step sequencer and a bionoise\ - \ control mode.},\n address = {Porto Alegre, Brazil},\n author = {Sabina Hyoju\ - \ Ahn},\n booktitle = {Music Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n editor = {Federico Visi},\n month = {June},\n\ - \ pages = {17--20},\n publisher = {UFRGS},\n title = {DIY Bionoise},\n url = {http://www.nime.org/proceedings/2019/nime2019_music004.pdf},\n\ - \ year = {2019}\n}\n" + ID: nime2016-music-Eigenfeldt2016 + address: 'Brisbane, Australia' + author: Arne Eigenfeldt + bibtex: "@inproceedings{nime2016-music-Eigenfeldt2016,\n address = {Brisbane, Australia},\n\ + \ author = {Arne Eigenfeldt},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ + \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ + \ title = {Machine Songs},\n year = {2016}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Federico Visi + editor: Andrew Brown and Toby Gifford month: June - pages: 17--20 - publisher: UFRGS - title: DIY Bionoise - url: http://www.nime.org/proceedings/2019/nime2019_music004.pdf - year: 2019 + publisher: Griffith University + title: Machine Songs + year: 2016 - ENTRYTYPE: inproceedings - ID: nime19-music-Tom - abstract: 'FlexSynth is an interpretation of The Sponge, a DMI embedded with sensors - to detect squeeze, flexion and torsion along with buttons to form an interface - using which musical sounds are generated and the sound is sculpted. The key idea - of the sponge is to harness the properties of a retractable, flexible object that - gives the performer wide range of multi- parametric controls with high resolution - in a maximized gesture space, considering its high manoeuvrability.' - address: 'Porto Alegre, Brazil' - author: Ajin Tom - bibtex: "@inproceedings{nime19-music-Tom,\n abstract = {FlexSynth is an interpretation\ - \ of The Sponge, a DMI embedded with sensors to detect squeeze, flexion and torsion\ - \ along with buttons to form an interface using which musical sounds are generated\ - \ and the sound is sculpted. The key idea of the sponge is to harness the properties\ - \ of a retractable, flexible object that gives the performer wide range of multi-\ - \ parametric controls with high resolution in a maximized gesture space, considering\ - \ its high manoeuvrability.},\n address = {Porto Alegre, Brazil},\n author = {Ajin\ - \ Tom},\n booktitle = {Music Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n editor = {Federico Visi},\n month = {June},\n\ - \ pages = {21--24},\n publisher = {UFRGS},\n title = {FlexSynth – Blending Multi-Dimensional\ - \ Sonic Scenes},\n url = {http://www.nime.org/proceedings/2019/nime2019_music005.pdf},\n\ - \ year = {2019}\n}\n" + ID: nime2016-music-Sorensen2016 + address: 'Brisbane, Australia' + author: Andrew Sorensen + bibtex: "@inproceedings{nime2016-music-Sorensen2016,\n address = {Brisbane, Australia},\n\ + \ author = {Andrew Sorensen},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ + \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ + \ title = {Barely a Piano},\n year = {2016}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Federico Visi + editor: Andrew Brown and Toby Gifford month: June - pages: 21--24 - publisher: UFRGS - title: FlexSynth – Blending Multi-Dimensional Sonic Scenes - url: http://www.nime.org/proceedings/2019/nime2019_music005.pdf - year: 2019 + publisher: Griffith University + title: Barely a Piano + year: 2016 - ENTRYTYPE: inproceedings - ID: nime19-music-Tragtenberg - abstract: 'Gira is a music and dance performance with Giromin, a wearable wireless - digital instrument. With this Digital Dance and Music Instrument a gesture is - transformed into sound by motion sensors and an analog synthesizer. This transmutation - of languages allows dance to generate music, which stimulates a new dance in an - infinite feedback loop.' - address: 'Porto Alegre, Brazil' - author: 'João Tragtenberg, Filipe Calegario' - bibtex: "@inproceedings{nime19-music-Tragtenberg,\n abstract = {Gira is a music\ - \ and dance performance with Giromin, a wearable wireless digital instrument.\ - \ With this Digital Dance and Music Instrument a gesture is transformed into sound\ - \ by motion sensors and an analog synthesizer. This transmutation of languages\ - \ allows dance to generate music, which stimulates a new dance in an infinite\ - \ feedback loop.},\n address = {Porto Alegre, Brazil},\n author = {João Tragtenberg,\ - \ Filipe Calegario},\n booktitle = {Music Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n editor = {Federico Visi},\n month\ - \ = {June},\n pages = {25--28},\n publisher = {UFRGS},\n title = {Gira},\n url\ - \ = {http://www.nime.org/proceedings/2019/nime2019_music006.pdf},\n year = {2019}\n\ - }\n" + ID: nime2016-music-Berg2016 + address: 'Brisbane, Australia' + author: Henning Berg + bibtex: "@inproceedings{nime2016-music-Berg2016,\n address = {Brisbane, Australia},\n\ + \ author = {Henning Berg},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ + \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ + \ title = {Improvising with Tango},\n year = {2016}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Federico Visi + editor: Andrew Brown and Toby Gifford month: June - pages: 25--28 - publisher: UFRGS - title: Gira - url: http://www.nime.org/proceedings/2019/nime2019_music006.pdf - year: 2019 + publisher: Griffith University + title: Improvising with Tango + year: 2016 - ENTRYTYPE: inproceedings - ID: nime19-music-Cadiz - abstract: 'iCons is an interactive multi-channel music piece for live computer and - a gesture sensor system designed by the composer especially for this piece, called - AirTouch. Such system allows a much more musical approach to controlling sounds - than the computer keyboard or mouse. Using only movements of the hands in the - air it is possible to control most aspects of the music, such as sound shapes - in time, loops, space positioning, or create very rich spectral densities.' - address: 'Porto Alegre, Brazil' - author: Rodrigo F. Cádiz - bibtex: "@inproceedings{nime19-music-Cadiz,\n abstract = {iCons is an interactive\ - \ multi-channel music piece for live computer and a gesture sensor system designed\ - \ by the composer especially for this piece, called AirTouch. Such system allows\ - \ a much more musical approach to controlling sounds than the computer keyboard\ - \ or mouse. Using only movements of the hands in the air it is possible to control\ - \ most aspects of the music, such as sound shapes in time, loops, space positioning,\ - \ or create very rich spectral densities.},\n address = {Porto Alegre, Brazil},\n\ - \ author = {Rodrigo F. Cádiz},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Federico Visi},\n\ - \ month = {June},\n pages = {29--31},\n publisher = {UFRGS},\n title = {iCons},\n\ - \ url = {http://www.nime.org/proceedings/2019/nime2019_music007.pdf},\n year =\ - \ {2019}\n}\n" + ID: nime2016-music-James2016 + address: 'Brisbane, Australia' + author: Cat Hope & Stuart James + bibtex: "@inproceedings{nime2016-music-James2016,\n address = {Brisbane, Australia},\n\ + \ author = {Cat Hope & Stuart James},\n booktitle = {Music Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n editor\ + \ = {Andrew Brown and Toby Gifford},\n month = {June},\n publisher = {Griffith\ + \ University},\n title = {Chunk},\n year = {2016}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Federico Visi + editor: Andrew Brown and Toby Gifford month: June - pages: 29--31 - publisher: UFRGS - title: iCons - url: http://www.nime.org/proceedings/2019/nime2019_music007.pdf - year: 2019 + publisher: Griffith University + title: Chunk + year: 2016 - ENTRYTYPE: inproceedings - ID: nime19-music-Galvao - abstract: 'MusiCursor is an interactive multimedia performance/interface that reimagines - consumer-facing technologies as sites for creative expression. The piece draws - inspiration from established UI/UX design paradigms and the role of the user in - relation to these technologies. The performer assumes the role of a user installing - a musically-driven navigation interface on their computer. After an installation - prompt, they are guided through a series of demos, in which a software assistant - instructs the performer to accomplish several tasks. Through their playing, the - performer controls the cursor''s navigation and clicking behavior. In lieu of - a traditional score, the performer relies on text instructions and visual indicators - from a software assistant. The software tracks the progress of the user throughout - the piece and moves onto the next section only once a task has been completed. - Each of the main tasks takes place on the web, where the user navigates across - YouTube, Wikipedia, and Google Maps.' - address: 'Porto Alegre, Brazil' - author: Martim Galvão - bibtex: "@inproceedings{nime19-music-Galvao,\n abstract = {MusiCursor is an interactive\ - \ multimedia performance/interface that reimagines consumer-facing technologies\ - \ as sites for creative expression. The piece draws inspiration from established\ - \ UI/UX design paradigms and the role of the user in relation to these technologies.\ - \ The performer assumes the role of a user installing a musically-driven navigation\ - \ interface on their computer. After an installation prompt, they are guided through\ - \ a series of demos, in which a software assistant instructs the performer to\ - \ accomplish several tasks. Through their playing, the performer controls the\ - \ cursor's navigation and clicking behavior. In lieu of a traditional score, the\ - \ performer relies on text instructions and visual indicators from a software\ - \ assistant. The software tracks the progress of the user throughout the piece\ - \ and moves onto the next section only once a task has been completed. Each of\ - \ the main tasks takes place on the web, where the user navigates across YouTube,\ - \ Wikipedia, and Google Maps.},\n address = {Porto Alegre, Brazil},\n author =\ - \ {Martim Galvão},\n booktitle = {Music Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n editor = {Federico Visi},\n month\ - \ = {June},\n pages = {32--34},\n publisher = {UFRGS},\n title = {MusiCursor},\n\ - \ url = {http://www.nime.org/proceedings/2019/nime2019_music008.pdf},\n year =\ - \ {2019}\n}\n" + ID: nime2016-music-Beck2016 + address: 'Brisbane, Australia' + author: Stephen Beck + bibtex: "@inproceedings{nime2016-music-Beck2016,\n address = {Brisbane, Australia},\n\ + \ author = {Stephen Beck},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ + \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ + \ title = {Quartet for Strings},\n year = {2016}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Federico Visi + editor: Andrew Brown and Toby Gifford month: June - pages: 32--34 - publisher: UFRGS - title: MusiCursor - url: http://www.nime.org/proceedings/2019/nime2019_music008.pdf - year: 2019 + publisher: Griffith University + title: Quartet for Strings + year: 2016 - ENTRYTYPE: inproceedings - ID: nime19-music-Cullen - abstract: 'Pandemonium Trio is Barry Cullen, Miguel Ortiz and Paul Stapleton. Our - performance research trio has been set up to explore multiple instantiations of - custom-made electronic instruments through improvisation. We are particularly - interested in exploiting irregularities in the qualities of circuit components - (e.g. imprecise tolerances/values), and how this allows for the development of - stylistic differences across multiple instrument-performer configurations. We - are also interested in how skill, style and performance techniques are developed - in different ways on similar devices over extended periods of time, and how our - existing musical practices are reconfigured through such collaborative exchanges.' - address: 'Porto Alegre, Brazil' - author: Barry Cullen and Miguel Ortiz and Paul Stapleton - bibtex: "@inproceedings{nime19-music-Cullen,\n abstract = {Pandemonium Trio is Barry\ - \ Cullen, Miguel Ortiz and Paul Stapleton. Our performance research trio has been\ - \ set up to explore multiple instantiations of custom-made electronic instruments\ - \ through improvisation. We are particularly interested in exploiting irregularities\ - \ in the qualities of circuit components (e.g. imprecise tolerances/values), and\ - \ how this allows for the development of stylistic differences across multiple\ - \ instrument-performer configurations. We are also interested in how skill, style\ - \ and performance techniques are developed in different ways on similar devices\ - \ over extended periods of time, and how our existing musical practices are reconfigured\ - \ through such collaborative exchanges.},\n address = {Porto Alegre, Brazil},\n\ - \ author = {Barry Cullen and Miguel Ortiz and Paul Stapleton},\n booktitle = {Music\ - \ Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Federico Visi},\n month = {June},\n pages = {35--38},\n publisher\ - \ = {UFRGS},\n title = {Pandemonium Trio perform Drone and Drama v2},\n url =\ - \ {http://www.nime.org/proceedings/2019/nime2019_music009.pdf},\n year = {2019}\n\ - }\n" + ID: nime2016-music-Nakanishi2016 + address: 'Brisbane, Australia' + author: Yoshihito Nakanishi + bibtex: "@inproceedings{nime2016-music-Nakanishi2016,\n address = {Brisbane, Australia},\n\ + \ author = {Yoshihito Nakanishi},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ + \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ + \ title = {TRI=NITRO},\n year = {2016}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Federico Visi + editor: Andrew Brown and Toby Gifford month: June - pages: 35--38 - publisher: UFRGS - title: Pandemonium Trio perform Drone and Drama v2 - url: http://www.nime.org/proceedings/2019/nime2019_music009.pdf - year: 2019 + publisher: Griffith University + title: TRI=NITRO + year: 2016 - ENTRYTYPE: inproceedings - ID: nime19-music-DallAra-Majek - abstract: 'Pythagorean Domino is an improvisatory composition composed in 2019 for - an augmented Theremin and a gyro-based gestural controller. This work aims to - integrate music concrete techniques and an algorithmic compositional approach - in the context of composition for gestural controllers. While music concrete compositional - practice brings out the concept of “composite object”—a sound object made up of - several distinct and successive elements [1]—in the piece, our algorithmic compositional - approach delivers an interpolation technique which entails gradual transformations - of the composite objects over time. Our challenge is to perform a chain of short - fragmental elements in tandem in the way to form a single musical unit, while - the algorithms for transformation are autonomously changing synthetic and control - parameter settings. This approach derives closely interconnected triangular interactions - between two performers and a computer.' - address: 'Porto Alegre, Brazil' - author: Ana Dall'Ara-Majek and Takuto Fukuda - bibtex: "@inproceedings{nime19-music-DallAra-Majek,\n abstract = {Pythagorean Domino\ - \ is an improvisatory composition composed in 2019 for an augmented Theremin and\ - \ a gyro-based gestural controller. This work aims to integrate music concrete\ - \ techniques and an algorithmic compositional approach in the context of composition\ - \ for gestural controllers. While music concrete compositional practice brings\ - \ out the concept of “composite object”—a sound object made up of several distinct\ - \ and successive elements [1]—in the piece, our algorithmic compositional approach\ - \ delivers an interpolation technique which entails gradual transformations of\ - \ the composite objects over time. Our challenge is to perform a chain of short\ - \ fragmental elements in tandem in the way to form a single musical unit, while\ - \ the algorithms for transformation are autonomously changing synthetic and control\ - \ parameter settings. This approach derives closely interconnected triangular\ - \ interactions between two performers and a computer.},\n address = {Porto Alegre,\ - \ Brazil},\n author = {Ana Dall'Ara-Majek and Takuto Fukuda},\n booktitle = {Music\ - \ Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Federico Visi},\n month = {June},\n pages = {39--42},\n publisher\ - \ = {UFRGS},\n title = {Pythagorean Domino},\n url = {http://www.nime.org/proceedings/2019/nime2019_music010.pdf},\n\ - \ year = {2019}\n}\n" + ID: nime2016-music-Allison2016 + address: 'Brisbane, Australia' + author: Jesse Allison + bibtex: "@inproceedings{nime2016-music-Allison2016,\n address = {Brisbane, Australia},\n\ + \ author = {Jesse Allison},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ + \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ + \ title = {Causeway},\n year = {2016}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Federico Visi + editor: Andrew Brown and Toby Gifford month: June - pages: 39--42 - publisher: UFRGS - title: Pythagorean Domino - url: http://www.nime.org/proceedings/2019/nime2019_music010.pdf - year: 2019 + publisher: Griffith University + title: Causeway + year: 2016 - ENTRYTYPE: inproceedings - ID: nime19-music-Nie - abstract: '“No one can step into the same river twice.” This instrument, named as - River, contains rules and randomness. What exactly is music and how does it connect - to and shape our form? Traditional musical instruments always have fixed physical - forms that require performers to adjust to them. How about making a musical instrument - that is more fluid and more expressive via deforming according to performers'' - movements? This was the question I attempted to explore when I started making - this project. For this project, I combine the movement of dancing with music to - present a fluid and dynamic shape of musical instrument. The fabric of this instrument - can be separated as an extension to wash. It''s portable, wireless, chargeable, - stable and beautiful. This musical instrument generates sound by detecting different - movements of the performer. It has four different modes selected by toggling the - switches on the instrument interface. Each mode has different movement detection - methods, generating various sound and music. Moreover, it can be played as a transmitting - Tambourine. As for the music in my performance, it''s all played by myself lively, - consisting of different sound triggered and changed by performers'' gestures and - melody composed myself. Like the name of this instrument River, the four toggles - and their detection methods and their corresponding generated sounds are intentionally - designed. From simple node, beat, loop, drum, to various node, melody, music, - the detection methods and their triggered sounds are becoming more and more complex - and various, developing like a journey of a river.' - address: 'Porto Alegre, Brazil' - author: Yiyao Nie - bibtex: "@inproceedings{nime19-music-Nie,\n abstract = {“No one can step into the\ - \ same river twice.” This instrument, named as River, contains rules and randomness.\ - \ What exactly is music and how does it connect to and shape our form? Traditional\ - \ musical instruments always have fixed physical forms that require performers\ - \ to adjust to them. How about making a musical instrument that is more fluid\ - \ and more expressive via deforming according to performers' movements? This was\ - \ the question I attempted to explore when I started making this project. For\ - \ this project, I combine the movement of dancing with music to present a fluid\ - \ and dynamic shape of musical instrument. The fabric of this instrument can be\ - \ separated as an extension to wash. It's portable, wireless, chargeable, stable\ - \ and beautiful. This musical instrument generates sound by detecting different\ - \ movements of the performer. It has four different modes selected by toggling\ - \ the switches on the instrument interface. Each mode has different movement detection\ - \ methods, generating various sound and music. Moreover, it can be played as a\ - \ transmitting Tambourine. As for the music in my performance, it's all played\ - \ by myself lively, consisting of different sound triggered and changed by performers'\ - \ gestures and melody composed myself. Like the name of this instrument River,\ - \ the four toggles and their detection methods and their corresponding generated\ - \ sounds are intentionally designed. From simple node, beat, loop, drum, to various\ - \ node, melody, music, the detection methods and their triggered sounds are becoming\ - \ more and more complex and various, developing like a journey of a river.},\n\ - \ address = {Porto Alegre, Brazil},\n author = {Yiyao Nie},\n booktitle = {Music\ - \ Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Federico Visi},\n month = {June},\n pages = {43--46},\n publisher\ - \ = {UFRGS},\n title = {River},\n url = {http://www.nime.org/proceedings/2019/nime2019_music011.pdf},\n\ - \ year = {2019}\n}\n" + ID: nime2016-music-Freeth2016 + address: 'Brisbane, Australia' + author: Ben Freeth + bibtex: "@inproceedings{nime2016-music-Freeth2016,\n address = {Brisbane, Australia},\n\ + \ author = {Ben Freeth},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ + \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ + \ title = {Bio-vortex: Exploring Wet},\n year = {2016}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Federico Visi + editor: Andrew Brown and Toby Gifford month: June - pages: 43--46 - publisher: UFRGS - title: River - url: http://www.nime.org/proceedings/2019/nime2019_music011.pdf - year: 2019 + publisher: Griffith University + title: 'Bio-vortex: Exploring Wet' + year: 2016 - ENTRYTYPE: inproceedings - ID: nime19-music-Park - abstract: 'Self-Built Instrument project is focused on sound performance with an - experi- mental instrument which is composed of strings and metallic sound box, - pro- ducing overtones, harmonics and feed- back. It is capable to play with different - sound colours : Resonances by cooper, bowing on strings, overtones and feed- back. - All of factors triggers each other''s sound. It is not a point to play a specific - tone or to make a musical harmony, because the instrument is not able to per- - fectly control. Playing this Instrument is a challenge to your capacity, such - as gestures and sonic phenomenon following sense and space. The artist composed - a piece and use few repertoire partly, however, mostly it is interesting to find - what kind of sound comes to nest in mesh. The Artist tried to get over typical - aesthetics of classical music, such as using precise pitches, melodies, and read - scores. Instead of that, her approach towards to discover unusual sound elements - which are considered as mistake in tradi- tional way. And play with them, for - instance, strings without tuning, hitting a stuffs, unorganized pitch, also so-called - clicker which happens unskilled.' - address: 'Porto Alegre, Brazil' - author: Jiyun Park - bibtex: "@inproceedings{nime19-music-Park,\n abstract = {Self-Built Instrument project\ - \ is focused on sound performance with an experi- mental instrument which is composed\ - \ of strings and metallic sound box, pro- ducing overtones, harmonics and feed-\ - \ back. It is capable to play with different sound colours : Resonances by cooper,\ - \ bowing on strings, overtones and feed- back. All of factors triggers each other's\ - \ sound. It is not a point to play a specific tone or to make a musical harmony,\ - \ because the instrument is not able to per- fectly control. Playing this Instrument\ - \ is a challenge to your capacity, such as gestures and sonic phenomenon following\ - \ sense and space. The artist composed a piece and use few repertoire partly,\ - \ however, mostly it is interesting to find what kind of sound comes to nest in\ - \ mesh. The Artist tried to get over typical aesthetics of classical music, such\ - \ as using precise pitches, melodies, and read scores. Instead of that, her approach\ - \ towards to discover unusual sound elements which are considered as mistake in\ - \ tradi- tional way. And play with them, for instance, strings without tuning,\ - \ hitting a stuffs, unorganized pitch, also so-called clicker which happens unskilled.},\n\ - \ address = {Porto Alegre, Brazil},\n author = {Jiyun Park},\n booktitle = {Music\ - \ Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Federico Visi},\n month = {June},\n pages = {47--49},\n publisher\ - \ = {UFRGS},\n title = {Self-Built Instrument (sound performance)},\n url = {http://www.nime.org/proceedings/2019/nime2019_music012.pdf},\n\ - \ year = {2019}\n}\n" + ID: nime2016-music-Hajdu2016 + address: 'Brisbane, Australia' + author: Georg Hajdu + bibtex: "@inproceedings{nime2016-music-Hajdu2016,\n address = {Brisbane, Australia},\n\ + \ author = {Georg Hajdu},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ + \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ + \ title = {Just Her - Jester - Gesture},\n year = {2016}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Federico Visi + editor: Andrew Brown and Toby Gifford month: June - pages: 47--49 - publisher: UFRGS - title: Self-Built Instrument (sound performance) - url: http://www.nime.org/proceedings/2019/nime2019_music012.pdf - year: 2019 + publisher: Griffith University + title: Just Her - Jester - Gesture + year: 2016 - ENTRYTYPE: inproceedings - ID: nime19-music-Martins - abstract: '"Tanto Mar" seeks to recreate the properties present in history between - Portugal and Brazil, embracing the idea of an aqueous sound that dances and moves - as much by cadence as by voluminous waves. The Atlantic Ocean, which separates - and unites the two countries, serves as an inspiration for this quadraphonic performance, - involving musical instruments and live electronics, where the sounds move through - the four speakers. Each speaker symbolizes the paths that the sea travels uninterruptedly, - in a unique dance of latitudes and longitudes. The intersection of sounds occurs - through processes of reverberations, spatializations, echoes, modulations and - grains that slowly form the sound material, composing, decomposing and manipulating - the sound waves. Sound characters such as wind, oars, storms, calm, among others, - are metaphorically evidenced through the sound material, creating a kind of rhythmic - movement of a caravel at sea. The sounds of "Tanto Mar" move between entropy and - chaos, between stillness and tsunami, between starboard and port, culminating - in a textural dance where the objective is to take the listener away from electronic - processing, and propose a dive in an intensified, attentive, deep and involving - listening. New musical possibilities can happen through the experimentation of - new routes, unusual routes and horizons not yet covered. The sea and its imprecise - distances represent permanent challenges. "Tanto Mar" seeks to revive the feeling - of the Portuguese poet Fernando Pessoa, when he wrote: "to dream even if it is - impossible".' - address: 'Porto Alegre, Brazil' - author: André L. Martins and Paulo Assis Barbosa - bibtex: "@inproceedings{nime19-music-Martins,\n abstract = {\"Tanto Mar\" seeks\ - \ to recreate the properties present in history between Portugal and Brazil, embracing\ - \ the idea of an aqueous sound that dances and moves as much by cadence as by\ - \ voluminous waves. The Atlantic Ocean, which separates and unites the two countries,\ - \ serves as an inspiration for this quadraphonic performance, involving musical\ - \ instruments and live electronics, where the sounds move through the four speakers.\ - \ Each speaker symbolizes the paths that the sea travels uninterruptedly, in a\ - \ unique dance of latitudes and longitudes. The intersection of sounds occurs\ - \ through processes of reverberations, spatializations, echoes, modulations and\ - \ grains that slowly form the sound material, composing, decomposing and manipulating\ - \ the sound waves. Sound characters such as wind, oars, storms, calm, among others,\ - \ are metaphorically evidenced through the sound material, creating a kind of\ - \ rhythmic movement of a caravel at sea. The sounds of \"Tanto Mar\" move between\ - \ entropy and chaos, between stillness and tsunami, between starboard and port,\ - \ culminating in a textural dance where the objective is to take the listener\ - \ away from electronic processing, and propose a dive in an intensified, attentive,\ - \ deep and involving listening. New musical possibilities can happen through the\ - \ experimentation of new routes, unusual routes and horizons not yet covered.\ - \ The sea and its imprecise distances represent permanent challenges. \"Tanto\ - \ Mar\" seeks to revive the feeling of the Portuguese poet Fernando Pessoa, when\ - \ he wrote: \"to dream even if it is impossible\".},\n address = {Porto Alegre,\ - \ Brazil},\n author = {André L. Martins and Paulo Assis Barbosa},\n booktitle\ - \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n editor = {Federico Visi},\n month = {June},\n pages = {50--51},\n\ - \ publisher = {UFRGS},\n title = {Tanto Mar},\n url = {http://www.nime.org/proceedings/2019/nime2019_music013.pdf},\n\ - \ year = {2019}\n}\n" + ID: nime2016-music-Bussigel2016 + address: 'Brisbane, Australia' + author: Travis Thatcher & Peter Bussigel + bibtex: "@inproceedings{nime2016-music-Bussigel2016,\n address = {Brisbane, Australia},\n\ + \ author = {Travis Thatcher & Peter Bussigel},\n booktitle = {Music Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Andrew Brown and Toby Gifford},\n month = {June},\n publisher = {Griffith\ + \ University},\n title = {Danger Music No. 85},\n year = {2016}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Federico Visi + editor: Andrew Brown and Toby Gifford month: June - pages: 50--51 - publisher: UFRGS - title: Tanto Mar - url: http://www.nime.org/proceedings/2019/nime2019_music013.pdf - year: 2019 + publisher: Griffith University + title: Danger Music No. 85 + year: 2016 - ENTRYTYPE: inproceedings - ID: nime19-music-Carrascoza - abstract: '“Tempo Transversal – Flauta Expandida” aims to establish a computer- - controlled catalyzer, which simultaneously combines and extends the flutist body - actions, electronic sounds and the performative physical space. Some flute performance - fragments, captured in real time by video cameras, besides pre-recorded images, - built the visual projection. The flute player develops two pieces of experimental - music for flute and electronic. All these heterogeneous elements are interrelated - with each other in a network mediated by the computer. The result is a continuously - unfolded interactive performance, which intends to manipulate settings of space-time - perception. Brazilian contemporary repertoire for amplified bass flute and electronic - sounds establishes the proposal.' - address: 'Porto Alegre, Brazil' - author: Cassia Carrascoza and Felipe Merker Castellani - bibtex: "@inproceedings{nime19-music-Carrascoza,\n abstract = {“Tempo Transversal\ - \ – Flauta Expandida” aims to establish a computer- controlled catalyzer, which\ - \ simultaneously combines and extends the flutist body actions, electronic sounds\ - \ and the performative physical space. Some flute performance fragments, captured\ - \ in real time by video cameras, besides pre-recorded images, built the visual\ - \ projection. The flute player develops two pieces of experimental music for flute\ - \ and electronic. All these heterogeneous elements are interrelated with each\ - \ other in a network mediated by the computer. The result is a continuously unfolded\ - \ interactive performance, which intends to manipulate settings of space-time\ - \ perception. Brazilian contemporary repertoire for amplified bass flute and electronic\ - \ sounds establishes the proposal.},\n address = {Porto Alegre, Brazil},\n author\ - \ = {Cassia Carrascoza and Felipe Merker Castellani},\n booktitle = {Music Proceedings\ + ID: nime2016-music-Waerstad2016 + address: 'Brisbane, Australia' + author: Bernt Isak Wærstad + bibtex: "@inproceedings{nime2016-music-Waerstad2016,\n address = {Brisbane, Australia},\n\ + \ author = {Bernt Isak Wærstad},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ + \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ + \ title = {Cosmo Collective},\n year = {2016}\n}\n" + booktitle: Music Proceedings of the International Conference on New Interfaces for + Musical Expression + editor: Andrew Brown and Toby Gifford + month: June + publisher: Griffith University + title: Cosmo Collective + year: 2016 + + +- ENTRYTYPE: inproceedings + ID: nime2016-music-Smallwood2016 + address: 'Brisbane, Australia' + author: Stephan Moore & Scott Smallwood + bibtex: "@inproceedings{nime2016-music-Smallwood2016,\n address = {Brisbane, Australia},\n\ + \ author = {Stephan Moore & Scott Smallwood},\n booktitle = {Music Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Federico Visi},\n month = {June},\n pages = {52--55},\n publisher\ - \ = {UFRGS},\n title = {Tempo Transversal – Flauta Expandida},\n url = {http://www.nime.org/proceedings/2019/nime2019_music014.pdf},\n\ - \ year = {2019}\n}\n" + \ editor = {Andrew Brown and Toby Gifford},\n month = {June},\n publisher = {Griffith\ + \ University},\n title = {Losperus},\n year = {2016}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Federico Visi + editor: Andrew Brown and Toby Gifford month: June - pages: 52--55 - publisher: UFRGS - title: Tempo Transversal – Flauta Expandida - url: http://www.nime.org/proceedings/2019/nime2019_music014.pdf - year: 2019 + publisher: Griffith University + title: Losperus + year: 2016 - ENTRYTYPE: inproceedings - ID: nime19-music-Hamilton - abstract: 'Trois Machins de la Grâce Aimante is a composition intended to explore - Twenty-First century technological and musical paradigms. At its heart Trois Machins - is a string quartet fundamentally descended from a tradition that spans back to - the 18th century. As such, the work primarily explores timbral material based - around the sound of a bowed string, in this case realized using a set of physically - modeled bowed strings controlled by Coretet, a virtual reality string instrument - and networked performance environment. The composition - for four performers, - preferably from an existing string quartet ensemble - takes the form of three - distinct movements, each exploring different capabilities of the instrument itself - and requiring different forms of communication and collaboration between the four - performers.' - address: 'Porto Alegre, Brazil' - author: Rob Hamilton - bibtex: "@inproceedings{nime19-music-Hamilton,\n abstract = {Trois Machins de la\ - \ Grâce Aimante is a composition intended to explore Twenty-First century technological\ - \ and musical paradigms. At its heart Trois Machins is a string quartet fundamentally\ - \ descended from a tradition that spans back to the 18th century. As such, the\ - \ work primarily explores timbral material based around the sound of a bowed string,\ - \ in this case realized using a set of physically modeled bowed strings controlled\ - \ by Coretet, a virtual reality string instrument and networked performance environment.\ - \ The composition - for four performers, preferably from an existing string quartet\ - \ ensemble - takes the form of three distinct movements, each exploring different\ - \ capabilities of the instrument itself and requiring different forms of communication\ - \ and collaboration between the four performers.},\n address = {Porto Alegre,\ - \ Brazil},\n author = {Rob Hamilton},\n booktitle = {Music Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n editor\ - \ = {Federico Visi},\n month = {June},\n pages = {56--59},\n publisher = {UFRGS},\n\ - \ title = {Trois Machins de la Grâce Aimante (Coretet no. 1)},\n url = {http://www.nime.org/proceedings/2019/nime2019_music015.pdf},\n\ - \ year = {2019}\n}\n" + ID: nime2016-music-Michalakos2016 + address: 'Brisbane, Australia' + author: Christos Michalakos + bibtex: "@inproceedings{nime2016-music-Michalakos2016,\n address = {Brisbane, Australia},\n\ + \ author = {Christos Michalakos},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ + \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ + \ title = {Augmented Drum-Kit: Path Finder},\n year = {2016}\n}\n" + booktitle: Music Proceedings of the International Conference on New Interfaces for + Musical Expression + editor: Andrew Brown and Toby Gifford + month: June + publisher: Griffith University + title: 'Augmented Drum-Kit: Path Finder' + year: 2016 + + +- ENTRYTYPE: inproceedings + ID: nime2016-music-Nakanishi2016 + address: 'Brisbane, Australia' + author: Yoshihito Nakanishi + bibtex: "@inproceedings{nime2016-music-Nakanishi2016,\n address = {Brisbane, Australia},\n\ + \ author = {Yoshihito Nakanishi},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ + \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ + \ title = {Powder Box},\n year = {2016}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Federico Visi + editor: Andrew Brown and Toby Gifford month: June - pages: 56--59 - publisher: UFRGS - title: Trois Machins de la Grâce Aimante (Coretet no. 1) - url: http://www.nime.org/proceedings/2019/nime2019_music015.pdf - year: 2019 + publisher: Griffith University + title: Powder Box + year: 2016 - ENTRYTYPE: inproceedings - ID: nime19-music-Stapleton - abstract: 'This work is a continuation of my research into developing new performance - ecosystems for improvisation. For this project I developed a new volatile assemblage, - aka VOLA. My self-designed musical instruments are shaped by my history as a performer - working in acoustic, mechanical, electronic and digital musics, blending and exploring - the boundaries and breaking points of these different domains. My instruments - support many of my existing techniques originally developed on more conventional - instruments, while also affording the development of extended and novel techniques - and performance strategies. In much of my work I am particularly focused on the - exploration of musical timbre and texture; however, for this project my attention - is also directed towards time, flow, pulse, duration, friction, disruption – in - short, qualitative rhythms and defamiliarisation.' - address: 'Porto Alegre, Brazil' - author: Paul Stapleton - bibtex: "@inproceedings{nime19-music-Stapleton,\n abstract = {This work is a continuation\ - \ of my research into developing new performance ecosystems for improvisation.\ - \ For this project I developed a new volatile assemblage, aka VOLA. My self-designed\ - \ musical instruments are shaped by my history as a performer working in acoustic,\ - \ mechanical, electronic and digital musics, blending and exploring the boundaries\ - \ and breaking points of these different domains. My instruments support many\ - \ of my existing techniques originally developed on more conventional instruments,\ - \ while also affording the development of extended and novel techniques and performance\ - \ strategies. In much of my work I am particularly focused on the exploration\ - \ of musical timbre and texture; however, for this project my attention is also\ - \ directed towards time, flow, pulse, duration, friction, disruption – in short,\ - \ qualitative rhythms and defamiliarisation.},\n address = {Porto Alegre, Brazil},\n\ - \ author = {Paul Stapleton},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Federico Visi},\n\ - \ month = {June},\n pages = {60--62},\n publisher = {UFRGS},\n title = {uncertain\ - \ rhythms},\n url = {http://www.nime.org/proceedings/2019/nime2019_music016.pdf},\n\ - \ year = {2019}\n}\n" + ID: nime2016-music-Tadokoro2016 + address: 'Brisbane, Australia' + author: Atsushi Tadokoro + bibtex: "@inproceedings{nime2016-music-Tadokoro2016,\n address = {Brisbane, Australia},\n\ + \ author = {Atsushi Tadokoro},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ + \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ + \ title = {Membranes},\n year = {2016}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Federico Visi + editor: Andrew Brown and Toby Gifford month: June - pages: 60--62 - publisher: UFRGS - title: uncertain rhythms - url: http://www.nime.org/proceedings/2019/nime2019_music016.pdf - year: 2019 + publisher: Griffith University + title: Membranes + year: 2016 - ENTRYTYPE: inproceedings - ID: nime19-music-Erdem - abstract: 'What if a musician could step outside the familiar instrumental paradigm - and adopt a new embodied language for moving through sound with a dancer in true - partnership? And what if a dancer''s body could coalesce with a musician''s skills - and intuitively render movements into instrumental actions for active sound- making? - Vrengt is a multi-user instrument, specifically developed for music-dance performance, - with a particular focus on exploring the boundaries between standstill vs motion, - and silence vs sound. We sought for creating a work for one, hybrid corporeality, - in which a dancer and a musician would co-creatively and co- dependently interact - with their bodies and a machine. The challenge, then, was how could two performers - with distinct embodied skills unite in a continuous entanglement of intentions, - senses and experiences to control the same sonic and musical parameters? This - was conceptually different than they had done before in the context of interactive - dance performances.' - address: 'Porto Alegre, Brazil' - author: Çağri Erdem and Katja Henriksen Schia and Alexander Refsum Jensenius - bibtex: "@inproceedings{nime19-music-Erdem,\n abstract = {What if a musician could\ - \ step outside the familiar instrumental paradigm and adopt a new embodied language\ - \ for moving through sound with a dancer in true partnership? And what if a dancer's\ - \ body could coalesce with a musician's skills and intuitively render movements\ - \ into instrumental actions for active sound- making? Vrengt is a multi-user instrument,\ - \ specifically developed for music-dance performance, with a particular focus\ - \ on exploring the boundaries between standstill vs motion, and silence vs sound.\ - \ We sought for creating a work for one, hybrid corporeality, in which a dancer\ - \ and a musician would co-creatively and co- dependently interact with their bodies\ - \ and a machine. The challenge, then, was how could two performers with distinct\ - \ embodied skills unite in a continuous entanglement of intentions, senses and\ - \ experiences to control the same sonic and musical parameters? This was conceptually\ - \ different than they had done before in the context of interactive dance performances.},\n\ - \ address = {Porto Alegre, Brazil},\n author = {Çağri Erdem and Katja Henriksen\ - \ Schia and Alexander Refsum Jensenius},\n booktitle = {Music Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n editor\ - \ = {Federico Visi},\n month = {June},\n pages = {63--65},\n publisher = {UFRGS},\n\ - \ title = {Vrengt: A Shared Body-Machine Instrument for Music-Dance Performance},\n\ - \ url = {http://www.nime.org/proceedings/2019/nime2019_music017.pdf},\n year =\ - \ {2019}\n}\n" + ID: nime2016-music-Lee2016 + address: 'Brisbane, Australia' + author: Sang Won Lee + bibtex: "@inproceedings{nime2016-music-Lee2016,\n address = {Brisbane, Australia},\n\ + \ author = {Sang Won Lee},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ + \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ + \ title = {Live Writing: Gloomy Streets},\n year = {2016}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Federico Visi + editor: Andrew Brown and Toby Gifford month: June - pages: 63--65 - publisher: UFRGS - title: 'Vrengt: A Shared Body-Machine Instrument for Music-Dance Performance' - url: http://www.nime.org/proceedings/2019/nime2019_music017.pdf - year: 2019 + publisher: Griffith University + title: 'Live Writing: Gloomy Streets' + year: 2016 - ENTRYTYPE: inproceedings - ID: nime19-music-Barbosa - abstract: 'The key for a collective process of free improvisation is the interaction, - dependence and surrender of its parts, so the resulting sound flux is more than - the sum of each individual layer. The We Bass performance is an exploration of - the symbiosis of two performers playing the same instrument: Their actions have - direct consequence on the resulting sound, challenging the other player with instability - and interference. From the experiments of the English scientist Thomas Young (1773-1829) - on the phenomena of diffraction and interference of light waves, we observe that - interferences generated by overlapping light waves can have a character of annihilation, - when they are out of phase (destructive interference), or a reinforcing character - when in phase (constructive interference). From this reflection we try to deepen - the discussion about the interferences of the performers inputs involved in a - free improvisation session. We seek a model of connection between the performers - that promotes processes of creation in the free improvisation, exploring the dialectics - between reinforcement actions (processes of interaction that reinforces a certain - sound moment) and movement actions (that destabilizes and transforms the flow). - We Bass is a duo performance exploring the interactions between the musicians - playing one hybrid machine: an electric upright bass guitar with live electronics - processing. The instrument consists of an electric upright bass with movement - sensors and a live processing machine with a controller that interacts with the - sensors, changing some processing parameters and some controller mapping settings, - creating an instable ground for the musicians.' - address: 'Porto Alegre, Brazil' - author: Paulo Assis Barbosa and Miguel Antar - bibtex: "@inproceedings{nime19-music-Barbosa,\n abstract = {The key for a collective\ - \ process of free improvisation is the interaction, dependence and surrender of\ - \ its parts, so the resulting sound flux is more than the sum of each individual\ - \ layer. The We Bass performance is an exploration of the symbiosis of two performers\ - \ playing the same instrument: Their actions have direct consequence on the resulting\ - \ sound, challenging the other player with instability and interference. From\ - \ the experiments of the English scientist Thomas Young (1773-1829) on the phenomena\ - \ of diffraction and interference of light waves, we observe that interferences\ - \ generated by overlapping light waves can have a character of annihilation, when\ - \ they are out of phase (destructive interference), or a reinforcing character\ - \ when in phase (constructive interference). From this reflection we try to deepen\ - \ the discussion about the interferences of the performers inputs involved in\ - \ a free improvisation session. We seek a model of connection between the performers\ - \ that promotes processes of creation in the free improvisation, exploring the\ - \ dialectics between reinforcement actions (processes of interaction that reinforces\ - \ a certain sound moment) and movement actions (that destabilizes and transforms\ - \ the flow). We Bass is a duo performance exploring the interactions between the\ - \ musicians playing one hybrid machine: an electric upright bass guitar with live\ - \ electronics processing. The instrument consists of an electric upright bass\ - \ with movement sensors and a live processing machine with a controller that interacts\ - \ with the sensors, changing some processing parameters and some controller mapping\ - \ settings, creating an instable ground for the musicians.},\n address = {Porto\ - \ Alegre, Brazil},\n author = {Paulo Assis Barbosa and Miguel Antar},\n booktitle\ - \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n editor = {Federico Visi},\n month = {June},\n pages = {66--67},\n\ - \ publisher = {UFRGS},\n title = {We Bass: inter(actions) on a hybrid instrument},\n\ - \ url = {http://www.nime.org/proceedings/2019/nime2019_music018.pdf},\n year =\ - \ {2019}\n}\n" + ID: nime2016-music-Sorensen2016 + address: 'Brisbane, Australia' + author: Andrew Sorensen + bibtex: "@inproceedings{nime2016-music-Sorensen2016,\n address = {Brisbane, Australia},\n\ + \ author = {Andrew Sorensen},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ + \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ + \ title = {Splice},\n year = {2016}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Federico Visi + editor: Andrew Brown and Toby Gifford month: June - pages: 66--67 - publisher: UFRGS - title: 'We Bass: inter(actions) on a hybrid instrument' - url: http://www.nime.org/proceedings/2019/nime2019_music018.pdf - year: 2019 + publisher: Griffith University + title: Splice + year: 2016 - ENTRYTYPE: inproceedings - ID: nime19-music-introduction - address: 'Porto Alegre, Brazil' - author: Federico Visi and Rodrigo Schramm - bibtex: "@inproceedings{nime19-music-introduction,\n address = {Porto Alegre, Brazil},\n\ - \ author = {Federico Visi and Rodrigo Schramm},\n booktitle = {Music Proceedings\ + ID: nime2016-music-VandemastBell2016 + address: 'Brisbane, Australia' + author: Paul Vandemast-Bell + bibtex: "@inproceedings{nime2016-music-VandemastBell2016,\n address = {Brisbane,\ + \ Australia},\n author = {Paul Vandemast-Bell},\n booktitle = {Music Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Federico Visi},\n month = {June},\n pages = {4},\n publisher = {UFRGS},\n\ - \ title = {Introduction},\n url = {http://www.nime.org/proceedings/2019/nime2019_music00I.pdf},\n\ - \ year = {2019}\n}\n" + \ editor = {Andrew Brown and Toby Gifford},\n month = {June},\n publisher = {Griffith\ + \ University},\n title = {Deformed Electronic Dance Music},\n year = {2016}\n\ + }\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Federico Visi + editor: Andrew Brown and Toby Gifford month: June - pages: 4 - publisher: UFRGS - title: Introduction - url: http://www.nime.org/proceedings/2019/nime2019_music00I.pdf - year: 2019 + publisher: Griffith University + title: Deformed Electronic Dance Music + year: 2016 - ENTRYTYPE: inproceedings - ID: nime19-music-program - address: 'Porto Alegre, Brazil' - bibtex: "@inproceedings{nime19-music-program,\n address = {Porto Alegre, Brazil},\n\ - \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n editor = {Federico Visi},\n month = {June},\n pages\ - \ = {5},\n publisher = {UFRGS},\n title = {NIME 2019 Concert Program},\n url =\ - \ {http://www.nime.org/proceedings/2019/nime2019_music0II.pdf},\n year = {2019}\n\ - }\n" + ID: nime2016-music-Bussigel2016 + address: 'Brisbane, Australia' + author: Peter Bussigel + bibtex: "@inproceedings{nime2016-music-Bussigel2016,\n address = {Brisbane, Australia},\n\ + \ author = {Peter Bussigel},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ + \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ + \ title = {Ndial Performance},\n year = {2016}\n}\n" + booktitle: Music Proceedings of the International Conference on New Interfaces for + Musical Expression + editor: Andrew Brown and Toby Gifford + month: June + publisher: Griffith University + title: Ndial Performance + year: 2016 + + +- ENTRYTYPE: inproceedings + ID: nime2016-music-Lawson2016 + address: 'Brisbane, Australia' + author: Shawn Lawson + bibtex: "@inproceedings{nime2016-music-Lawson2016,\n address = {Brisbane, Australia},\n\ + \ author = {Shawn Lawson},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ + \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ + \ title = {Owego System Trade Routes},\n year = {2016}\n}\n" + booktitle: Music Proceedings of the International Conference on New Interfaces for + Musical Expression + editor: Andrew Brown and Toby Gifford + month: June + publisher: Griffith University + title: Owego System Trade Routes + year: 2016 + + +- ENTRYTYPE: inproceedings + ID: nime2016-music-Tahiroglu2016 + address: 'Brisbane, Australia' + author: Koray Tahiroğlu + bibtex: "@inproceedings{nime2016-music-Tahiroglu2016,\n address = {Brisbane, Australia},\n\ + \ author = {Koray Tahiroğlu},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ + \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ + \ title = {KET Conversations},\n year = {2016}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Federico Visi + editor: Andrew Brown and Toby Gifford month: June - pages: 5 - publisher: UFRGS - title: NIME 2019 Concert Program - url: http://www.nime.org/proceedings/2019/nime2019_music0II.pdf - year: 2019 + publisher: Griffith University + title: KET Conversations + year: 2016 - ENTRYTYPE: inproceedings - ID: nime19-music-PC-members - address: 'Porto Alegre, Brazil' - bibtex: "@inproceedings{nime19-music-PC-members,\n address = {Porto Alegre, Brazil},\n\ - \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n editor = {Federico Visi},\n month = {June},\n pages\ - \ = {6},\n publisher = {UFRGS},\n title = {NIME 2019 Program Committee Members},\n\ - \ url = {http://www.nime.org/proceedings/2019/nime2019_musicIII.pdf},\n year =\ - \ {2019}\n}\n" + ID: nime2016-music-NorahLorway2016 + address: 'Brisbane, Australia' + author: 'Norah Lorway, Kiran Bhumber & Nancy Lee' + bibtex: "@inproceedings{nime2016-music-NorahLorway2016,\n address = {Brisbane, Australia},\n\ + \ author = {Norah Lorway, Kiran Bhumber & Nancy Lee},\n booktitle = {Music Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Andrew Brown and Toby Gifford},\n month = {June},\n publisher = {Griffith\ + \ University},\n title = {Hollow Vertices},\n year = {2016}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Federico Visi + editor: Andrew Brown and Toby Gifford month: June - pages: 6 - publisher: UFRGS - title: NIME 2019 Program Committee Members - url: http://www.nime.org/proceedings/2019/nime2019_musicIII.pdf - year: 2019 + publisher: Griffith University + title: Hollow Vertices + year: 2016 -- ENTRYTYPE: incollection - ID: nime2012-music-SchneiderbangerVierling2012 - abstract: "Program notes:\n\nThe piece Floating Points II is the result of the continued\ - \ work of the instrument makers and performers Michael Vierling and Matthias Schneiderbanger\ - \ within their self-developed system for collaborative performance including the\ - \ digital musical instruments Sensor-table and Chirotron. These instruments use\ - \ several sensors to transform the movements and gestures of their players into\ - \ data for sound generation, placement and movement of the sound in the room.\n\ - \nThe performances with Sensor-table and Chirotron emphasize the connection between\ - \ the performer and the digital musical instruments by using the basic noise of\ - \ the sensors as a notable characteristic in the sound synthesis to accentuate\ - \ the technical boundaries in an aesthetic way. The network is the core of the\ - \ common setup: It offers the ability to connect two physically separated instruments\ - \ into one common signal chain for sound processing and spatialisation.\n\nComposer(s)\ - \ Credits:\n\nInstrumentalist(s) Credits:\n\nMatthias Schneiderbanger (Chirotron),\ - \ Michael Vierling (Sensor-table)\n\nArtist(s) Biography:\n\nMatthias Schneiderbanger\ - \ (*1987) musician and sonic artist, studies since 2007 at the Karlsruhe University\ - \ of Music, Germany. Currently master student in music informatics with emphasis\ - \ in composition and sonic arts. Main foucs on development of digital musical\ - \ instruments, sound installations, contemporary music and live coding. Since\ - \ 2010, there is also an artistic collaboration with M. Vierling in the development\ - \ of digital musical instruments. Their instruments were presented 2011 at the\ - \ Music and Sonic Arts Symposium in Baden-Baden, performances include the Network\ - \ Music Festival in Birmingham and the ZKM in Karlsruhe. He is a member of the\ - \ laptop ensemble Beno\\^it and the Mandelbrots, performances along with numerous\ - \ other concerts at the BEAM Festival in Uxbridge, the SuperCollider Symposium\ - \ 2012 in London, the Laptops Meet Musicians Festival 2011 in Venice and the next-generation\ - \ 4.0 Festival at the ZKM in Karlsruhe. He is a member of Karlsruhe artist collective\ - \ nil.\n\nMichael Vierling studies music informatics master at the Karlsruhe University\ - \ of Music, Germany. He is drummer in several band projects and teaches a drumclass\ - \ at School for Music and Performing Arts in Bühl, Germany. His main interests\ - \ besides producing and performing music are sonic arts especially live- electronics,\ - \ creating digital music instruments and sound installations with use of sensor\ - \ technologies. Since 2010, there is an artistic collaboration with M. Schneiderbanger\ - \ in the development of digital musical instruments. Their instruments were presented\ - \ 2011 at the Music and Sonic Arts Symposium in Baden-Baden, performances include,\ - \ the NIME 2012 in Michigan and the Network Music Festival 2012 in Birmingham.\ - \ His works have been exhibited at various Festivals e.g. ton:art 2010/11, UND\ - \ 6/7, Sommerloch 2011, Beyond 3D-Festival in Karlsruhe and the Next Level Conference\ - \ in Cologne. He is a member of Karlsruhe artist collective nil.\n\nConcert Venue\ - \ and Time: Lydia Mendelssohn Theatre, Monday May 21, 9:00pm" - address: 'Ann Arbor, Michigan, U.S.A.' - author: Matthias Schneiderbanger and Michael Vierling - bibtex: "@incollection{nime2012-music-SchneiderbangerVierling2012,\n abstract =\ - \ {Program notes:\n\nThe piece \\emph{Floating Points II} is the result of the\ - \ continued work of the instrument makers and performers Michael Vierling and\ - \ Matthias Schneiderbanger within their self-developed system for collaborative\ - \ performance including the digital musical instruments Sensor-table and Chirotron.\ - \ These instruments use several sensors to transform the movements and gestures\ - \ of their players into data for sound generation, placement and movement of the\ - \ sound in the room.\n\nThe performances with \\emph{Sensor-table} and \\emph{Chirotron}\ - \ emphasize the connection between the performer and the digital musical instruments\ - \ by using the basic noise of the sensors as a notable characteristic in the sound\ - \ synthesis to accentuate the technical boundaries in an aesthetic way. The network\ - \ is the core of the common setup: It offers the ability to connect two physically\ - \ separated instruments into one common signal chain for sound processing and\ - \ spatialisation.\n\nComposer(s) Credits:\n\nInstrumentalist(s) Credits:\n\nMatthias\ - \ Schneiderbanger (Chirotron), Michael Vierling (Sensor-table)\n\nArtist(s) Biography:\n\ - \nMatthias Schneiderbanger (*1987) musician and sonic artist, studies since 2007\ - \ at the Karlsruhe University of Music, Germany. Currently master student in music\ - \ informatics with emphasis in composition and sonic arts. Main foucs on development\ - \ of digital musical instruments, sound installations, contemporary music and\ - \ live coding. Since 2010, there is also an artistic collaboration with M. Vierling\ - \ in the development of digital musical instruments. Their instruments were presented\ - \ 2011 at the Music and Sonic Arts Symposium in Baden-Baden, performances include\ - \ the Network Music Festival in Birmingham and the ZKM in Karlsruhe. He is a member\ - \ of the laptop ensemble Beno\\^{i}t and the Mandelbrots, performances along with\ - \ numerous other concerts at the BEAM Festival in Uxbridge, the SuperCollider\ - \ Symposium 2012 in London, the Laptops Meet Musicians Festival 2011 in Venice\ - \ and the next-generation 4.0 Festival at the ZKM in Karlsruhe. He is a member\ - \ of Karlsruhe artist collective nil.\n\nMichael Vierling studies music informatics\ - \ master at the Karlsruhe University of Music, Germany. He is drummer in several\ - \ band projects and teaches a drumclass at School for Music and Performing Arts\ - \ in B\\\"{u}hl, Germany. His main interests besides producing and performing\ - \ music are sonic arts especially live- electronics, creating digital music instruments\ - \ and sound installations with use of sensor technologies. Since 2010, there is\ - \ an artistic collaboration with M. Schneiderbanger in the development of digital\ - \ musical instruments. Their instruments were presented 2011 at the Music and\ - \ Sonic Arts Symposium in Baden-Baden, performances include, the NIME 2012 in\ - \ Michigan and the Network Music Festival 2012 in Birmingham. His works have been\ - \ exhibited at various Festivals e.g. ton:art 2010/11, UND 6/7, Sommerloch 2011,\ - \ Beyond 3D-Festival in Karlsruhe and the Next Level Conference in Cologne. He\ - \ is a member of Karlsruhe artist collective nil.\n\nConcert Venue and Time: Lydia\ - \ Mendelssohn Theatre, Monday May 21, 9:00pm},\n address = {Ann Arbor, Michigan,\ - \ U.S.A.},\n author = {Matthias Schneiderbanger and Michael Vierling},\n booktitle\ - \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n day = {21-23},\n editor = {Georg Essl and Brent Gillespie and\ - \ Michael Gurevich and Sile O'Modhrain},\n month = {May},\n publisher = {Electrical\ - \ Engineering \\& Computer Science and Performing Arts Technology, University\ - \ of Michigan},\n title = {Floating Points II},\n year = {2012}\n}\n" +- ENTRYTYPE: inproceedings + ID: nime2016-music-Cyngler2016 + address: 'Brisbane, Australia' + author: Richard Cyngler + bibtex: "@inproceedings{nime2016-music-Cyngler2016,\n address = {Brisbane, Australia},\n\ + \ author = {Richard Cyngler},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ + \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ + \ title = {Music for various groups of performers (after Lucier)},\n year = {2016}\n\ + }\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - day: 21-23 - editor: Georg Essl and Brent Gillespie and Michael Gurevich and Sile O'Modhrain - month: May - publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, - University of Michigan' - title: Floating Points II - year: 2012 + editor: Andrew Brown and Toby Gifford + month: June + publisher: Griffith University + title: Music for various groups of performers (after Lucier) + year: 2016 -- ENTRYTYPE: incollection - ID: nime2012-music-HelmuthDanard2012 - abstract: "Program notes:\n\nWater Birds is an interactive and collaborative composition\ - \ for clarinet, bass clarinet and computer. The sound of the clarinets is processed\ - \ live by spectral delays with MaxMSP and rtcmix~. Space structures the composition,\ - \ as the particular sound parameters initiated depend on the performer's location\ - \ on the stage. The development of the current version of the piece involved a\ - \ custom wireless infrared sensor network, which responds to the clarinetist's\ - \ movements. Currently the piece is performed without the sensor network, but\ - \ the strategy of that configuration still drives the composition. A score containing\ - \ five sound-generating ideas, consisting of musical fragments and a Zen poem,\ - \ allows the performer to improvise, creating his/her own sound pathway through\ - \ the piece. The pathway is reminiscent of the path of birds in the Zen poem,\ - \ Dogen's On the Nondependence of Mind, which reads: ``Water birds/going and coming/their\ - \ traces disappear/but they never/forget their path.''\n\nComposer(s) Credits:\n\ - \nMara Helmuth and Rebecca Danard\n\nInstrumentalist(s) Credits:\n\nRebecca Danard\ - \ (B♭ Clarinet, bass clarinet), Mara Helmuth (Computer)\n\nArtist(s) Biography:\n\ - \nMara Helmuth composes music often involving the computer, and creates multimedia\ - \ and software for composition and improvisation. Her recordings include Sounding\ - \ Out! (Everglade, forthcoming 2010), Sound Collaborations, (CDCM v.36, Centaur\ - \ CRC 2903), Implements of Actuation (Electronic Music Foundation EMF 023), and\ - \ Open Space CD 16, and her work has been performed internationally. She is on\ - \ the faculty of the College-Conservatory of Music, University of Cincinnati and\ - \ its Center for Computer Music's director. She holds a D.M.A. from Columbia University,\ - \ and earlier degrees from the University of Illinois, Urbana-Champaign. Her software\ - \ for composition and improvisation has involved granular synthesis, Internet2,\ - \ and RTcmix instruments. Her writings have appeared in Audible Traces, Analytical\ - \ Methods of Electroacoustic Music, the Journal of New Music Research and Perspectives\ - \ of New Music. Installations including Hidden Mountain (2007) were created for\ - \ the Sino-Nordic Arts Space in Beijing. She is a past president of the International\ - \ Computer Music Association.\n\nRebecca Danard: Performer, educator, scholar\ - \ and entrepreneur, Rebecca Danard holds a doctorate in clarinet performance at\ - \ the University of Cincinnati College-Conservatory of Music. Also an enthusiastic\ - \ teacher, Rebecca is Adjunct Faculty at Carleton University. She is currently\ - \ Artistic Director of the Ottawa New Music Creators: a collective of professional\ - \ composers and performers dedicated to bringing contemporary music to Canada's\ - \ capital. Rebecca's performance career centres on new and experimental music,\ - \ including interdisciplinary collaborations, working with new technology, organizing\ - \ events, and commissioning composers. She has worked with film makers, dancers,\ - \ choreographers, actors, poets, lighting designers and visuals artists as well\ - \ as performing musicians and composers. She has been invited to perform at festival\ - \ such as Music10 (Hindemith Centre, Switzerland), the Ottawa Chamber Music Festival,\ - \ the Ottawa Jazz Festival, the Bang on a Can Summer Festival, and Opera Theatre\ - \ and Music Festival of Lucca; at conferences such as Clarinetfest, CLIEC and\ - \ SEAMUS.\n\nConcert Venue and Time: Lydia Mendelssohn Theatre, Monday May 21,\ - \ 9:00pm" - address: 'Ann Arbor, Michigan, U.S.A.' - author: Mara Helmuth and Rebecca Danard - bibtex: "@incollection{nime2012-music-HelmuthDanard2012,\n abstract = {Program notes:\n\ - \nWater Birds is an interactive and collaborative composition for clarinet, bass\ - \ clarinet and computer. The sound of the clarinets is processed live by spectral\ - \ delays with MaxMSP and rtcmix~. Space structures the composition, as the particular\ - \ sound parameters initiated depend on the performer's location on the stage.\ - \ The development of the current version of the piece involved a custom wireless\ - \ infrared sensor network, which responds to the clarinetist's movements. Currently\ - \ the piece is performed without the sensor network, but the strategy of that\ - \ configuration still drives the composition. A score containing five sound-generating\ - \ ideas, consisting of musical fragments and a Zen poem, allows the performer\ - \ to improvise, creating his/her own sound pathway through the piece. The pathway\ - \ is reminiscent of the path of birds in the Zen poem, Dogen's \\emph{On the Nondependence\ - \ of Mind}, which reads: ``Water birds/going and coming/their traces disappear/but\ - \ they never/forget their path.''\n\nComposer(s) Credits:\n\nMara Helmuth and\ - \ Rebecca Danard\n\nInstrumentalist(s) Credits:\n\nRebecca Danard (B$\\flat$ Clarinet,\ - \ bass clarinet), Mara Helmuth (Computer)\n\nArtist(s) Biography:\n\nMara Helmuth\ - \ composes music often involving the computer, and creates multimedia and software\ - \ for composition and improvisation. Her recordings include Sounding Out! (Everglade,\ - \ forthcoming 2010), Sound Collaborations, (CDCM v.36, Centaur CRC 2903), Implements\ - \ of Actuation (Electronic Music Foundation EMF 023), and Open Space CD 16, and\ - \ her work has been performed internationally. She is on the faculty of the College-Conservatory\ - \ of Music, University of Cincinnati and its Center for Computer Music's director.\ - \ She holds a D.M.A. from Columbia University, and earlier degrees from the University\ - \ of Illinois, Urbana-Champaign. Her software for composition and improvisation\ - \ has involved granular synthesis, Internet2, and RTcmix instruments. Her writings\ - \ have appeared in \\emph{Audible Traces, Analytical Methods of Electroacoustic\ - \ Music, the Journal of New Music Research and Perspectives of New Music}. Installations\ - \ including \\emph{Hidden Mountain} (2007) were created for the Sino-Nordic Arts\ - \ Space in Beijing. She is a past president of the International Computer Music\ - \ Association.\n\nRebecca Danard: Performer, educator, scholar and entrepreneur,\ - \ \\textbf{Rebecca Danard} holds a doctorate in clarinet performance at the University\ - \ of Cincinnati College-Conservatory of Music. Also an enthusiastic teacher, Rebecca\ - \ is Adjunct Faculty at Carleton University. She is currently Artistic Director\ - \ of the Ottawa New Music Creators: a collective of professional composers and\ - \ performers dedicated to bringing contemporary music to Canada's capital. Rebecca's\ - \ performance career centres on new and experimental music, including interdisciplinary\ - \ collaborations, working with new technology, organizing events, and commissioning\ - \ composers. She has worked with film makers, dancers, choreographers, actors,\ - \ poets, lighting designers and visuals artists as well as performing musicians\ - \ and composers. She has been invited to perform at festival such as Music10 (Hindemith\ - \ Centre, Switzerland), the Ottawa Chamber Music Festival, the Ottawa Jazz Festival,\ - \ the Bang on a Can Summer Festival, and Opera Theatre and Music Festival of Lucca;\ - \ at conferences such as Clarinetfest, CLIEC and SEAMUS.\n\nConcert Venue and\ - \ Time: Lydia Mendelssohn Theatre, Monday May 21, 9:00pm},\n address = {Ann Arbor,\ - \ Michigan, U.S.A.},\n author = {Mara Helmuth and Rebecca Danard},\n booktitle\ - \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n day = {21-23},\n editor = {Georg Essl and Brent Gillespie and\ - \ Michael Gurevich and Sile O'Modhrain},\n month = {May},\n publisher = {Electrical\ - \ Engineering \\& Computer Science and Performing Arts Technology, University\ - \ of Michigan},\n title = {Water Birds},\n year = {2012}\n}\n" +- ENTRYTYPE: inproceedings + ID: nime2016-music-Miller2016 + address: 'Brisbane, Australia' + author: Amy Alexander & Curt Miller + bibtex: "@inproceedings{nime2016-music-Miller2016,\n address = {Brisbane, Australia},\n\ + \ author = {Amy Alexander & Curt Miller},\n booktitle = {Music Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n editor\ + \ = {Andrew Brown and Toby Gifford},\n month = {June},\n publisher = {Griffith\ + \ University},\n title = {Composition #1 for PIGS (Percussive Image Gestural System)},\n\ + \ year = {2016}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - day: 21-23 - editor: Georg Essl and Brent Gillespie and Michael Gurevich and Sile O'Modhrain - month: May - publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, - University of Michigan' - title: Water Birds - year: 2012 + editor: Andrew Brown and Toby Gifford + month: June + publisher: Griffith University + title: "Composition #1 for PIGS (Percussive Image Gestural System)" + year: 2016 -- ENTRYTYPE: incollection - ID: nime2012-music-TanakaParkinson2012 - abstract: "Program notes:\n\nAdam & Atau exploit a commonly available consumer electronics\ - \ device, a smartphone, as an expressive, gestural musical instrument. The device\ - \ is well known an iconic object of desire in our society of consumption, playing\ - \ music as a fixed commodity. The performers re-appropriate the mobile phone and\ - \ transform the consumer object into an instrument for concert performance. As\ - \ a duo, with one in each hand, they create a chamber music, 4-hands iPhone. The\ - \ accelerometers allow high precision capture of the performer's free space gestures.\ - \ This drives a granular synthesis patch in Pure Data (PD), where one patch becomes\ - \ the process by which a range of sounds from the natural world are stretched,\ - \ frozen, scattered, and restitched. The fact that all system components---sensor\ - \ input, signal processing and sound synthesis, and audio output, are embodied\ - \ in a single device make it a self-contained, expressive musical instrument.\n\ - \nComposer(s) Credits:\n\nAtau Tanaka and Adam Parkinson\n\nInstrumentalist(s)\ - \ Credits:\n\nArtist(s) Biography:\n\nAtau Tanaka's first inspirations came upon\ - \ meeting John Cage during his Norton Lectures at Harvard and would go to on re-create\ - \ Cage's Variations VII with Matt Wand and :zoviet*france:, performing it in Newcastle\ - \ upon Tyne, Berlin, and Paris. In the 90's he formed Sensorband with Zbigniew\ - \ Karkowski and Edwin van der Heide and then moved to Japan and came in contact\ - \ with the noise music scene, playing with Merzbow, Otomo, KK Null and others.\ - \ Atau has released solo, group, and compilation CD's on labels such as Sub Rosa,\ - \ Bip-hop, Caipirinha Music, Touch/Ash, Sonoris, Sirr-ecords. His work has been\ - \ presented at ICC in Japan, Ars Electronica, DEAF/V2, IRCAM, and Transmediale\ - \ in Europe, and Eyebeam, Wood Street Gallery, and SFMOMA in the U.S. He has been\ - \ artistic ambassador for Apple, researcher for Sony CSL, artistic co-director\ - \ of STEIM, and director of Culture Lab Newcastle. He is currently European Research\ - \ Council (ERC) fellow at Goldsmiths Digital Studios in London.\n\nAdam Parkinson\ - \ is an electronic musician based in Newcastle, England. He has recently completed\ - \ PhD, with much of his research looking at mobile music and performing with iPhones.He\ - \ has worked alongside various improvisers such as Rhodri Davies, Klaus Filip,\ - \ Robin Hayward and Dominic Lash, and has been involved in collaborations to create\ - \ sound installations with Kaffe Matthews and Caroline Bergvall. He also dabbles\ - \ in making dance music, and is trying to write a perfect pop song.\nAtau & Adam\ - \ have been performing as a duo since 2008: first as a laptop / biomuse duo then\ - \ in the current iPhone formation. 4-Hands iPhone has so far been performed across\ - \ Europe and North America including the FutureEverything Festival (Manchester),\ - \ Passos Manuel (Porto), Charm of Sound Festival (Helsinki), Electron Festival\ - \ (Geneva), Mois Multi (Quebec), Music With A View (New York).\n\nConcert Venue\ - \ and Time: Lydia Mendelssohn Theatre, Monday May 21, 9:00pm" - address: 'Ann Arbor, Michigan, U.S.A.' - author: Atau Tanaka and Adam Parkinson - bibtex: "@incollection{nime2012-music-TanakaParkinson2012,\n abstract = {Program\ - \ notes:\n\nAdam \\& Atau exploit a commonly available consumer electronics device,\ - \ a smartphone, as an expressive, gestural musical instrument. The device is well\ - \ known an iconic object of desire in our society of consumption, playing music\ - \ as a fixed commodity. The performers re-appropriate the mobile phone and transform\ - \ the consumer object into an instrument for concert performance. As a duo, with\ - \ one in each hand, they create a chamber music, 4-hands iPhone. The accelerometers\ - \ allow high precision capture of the performer's free space gestures. This drives\ - \ a granular synthesis patch in Pure Data (PD), where one patch becomes the process\ - \ by which a range of sounds from the natural world are stretched, frozen, scattered,\ - \ and restitched. The fact that all system components---sensor input, signal processing\ - \ and sound synthesis, and audio output, are embodied in a single device make\ - \ it a self-contained, expressive musical instrument.\n\nComposer(s) Credits:\n\ - \nAtau Tanaka and Adam Parkinson\n\nInstrumentalist(s) Credits:\n\nArtist(s) Biography:\n\ - \nAtau Tanaka's first inspirations came upon meeting John Cage during his Norton\ - \ Lectures at Harvard and would go to on re-create Cage's Variations VII with\ - \ Matt Wand and \\emph{:zoviet*france:}, performing it in Newcastle upon Tyne,\ - \ Berlin, and Paris. In the 90's he formed Sensorband with Zbigniew Karkowski\ - \ and Edwin van der Heide and then moved to Japan and came in contact with the\ - \ noise music scene, playing with Merzbow, Otomo, KK Null and others. Atau has\ - \ released solo, group, and compilation CD's on labels such as Sub Rosa, Bip-hop,\ - \ Caipirinha Music, Touch/Ash, Sonoris, Sirr-ecords. His work has been presented\ - \ at ICC in Japan, Ars Electronica, DEAF/V2, IRCAM, and Transmediale in Europe,\ - \ and Eyebeam, Wood Street Gallery, and SFMOMA in the U.S. He has been artistic\ - \ ambassador for Apple, researcher for Sony CSL, artistic co-director of STEIM,\ - \ and director of Culture Lab Newcastle. He is currently European Research Council\ - \ (ERC) fellow at Goldsmiths Digital Studios in London.\n\nAdam Parkinson is an\ - \ electronic musician based in Newcastle, England. He has recently completed PhD,\ - \ with much of his research looking at mobile music and performing with iPhones.He\ - \ has worked alongside various improvisers such as Rhodri Davies, Klaus Filip,\ - \ Robin Hayward and Dominic Lash, and has been involved in collaborations to create\ - \ sound installations with Kaffe Matthews and Caroline Bergvall. He also dabbles\ - \ in making dance music, and is trying to write a perfect pop song.\nAtau \\&\ - \ Adam have been performing as a duo since 2008: first as a laptop / biomuse duo\ - \ then in the current iPhone formation. 4-Hands iPhone has so far been performed\ - \ across Europe and North America including the FutureEverything Festival (Manchester),\ - \ Passos Manuel (Porto), Charm of Sound Festival (Helsinki), Electron Festival\ - \ (Geneva), Mois Multi (Quebec), Music With A View (New York).\n\nConcert Venue\ - \ and Time: Lydia Mendelssohn Theatre, Monday May 21, 9:00pm},\n address = {Ann\ - \ Arbor, Michigan, U.S.A.},\n author = {Atau Tanaka and Adam Parkinson},\n booktitle\ - \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n day = {21-23},\n editor = {Georg Essl and Brent Gillespie and\ - \ Michael Gurevich and Sile O'Modhrain},\n month = {May},\n publisher = {Electrical\ - \ Engineering \\& Computer Science and Performing Arts Technology, University\ - \ of Michigan},\n title = {4 Hands iPhone},\n year = {2012}\n}\n" +- ENTRYTYPE: inproceedings + ID: nime2016-music-Stewart2016 + address: 'Brisbane, Australia' + author: Andrew Stewart + bibtex: "@inproceedings{nime2016-music-Stewart2016,\n address = {Brisbane, Australia},\n\ + \ author = {Andrew Stewart},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ + \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ + \ title = {Ritual for Karlax},\n year = {2016}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - day: 21-23 - editor: Georg Essl and Brent Gillespie and Michael Gurevich and Sile O'Modhrain - month: May - publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, - University of Michigan' - title: 4 Hands iPhone - year: 2012 + editor: Andrew Brown and Toby Gifford + month: June + publisher: Griffith University + title: Ritual for Karlax + year: 2016 -- ENTRYTYPE: incollection - ID: nime2012-music-Applebaum2012 - abstract: "Program notes:\n\nAphasia (2010), for solo performer and two-channel\ - \ tape, was commissioned by the GRM, Paris and composed for virtuoso singer Nicholas\ - \ Isherwood. The tape, an idiosyncratic explosion of warped and mangled sounds,\ - \ is made up exclusively of vocal samples---all provided by Isherwood and subsequently\ - \ transformed digitally. Against the backdrop of this audio narrative, an elaborate\ - \ set of hand gestures are performed---an assiduously choreographed sign language\ - \ of sorts. Each gesture is fastidiously synchronized to the tape in tight rhythmic\ - \ coordination.\n\nIn the context of NIME, the piece is noteworthy for its deliberate---if\ - \ unintentionally political---contemporary technology abstinence. Ancillary questions\ - \ arise, such as ``What are the present limits of gesture control?''; ``Do these\ - \ limitations present unwelcome pressures on the boundaries of artistic imagination\ - \ and creative capacity?''; and ``How do we learn to recognize when it is artistically\ - \ prudent to eschew emerging tools?''\n\nComposer(s) Credits:\n\nMark Applebaum\n\ - \nInstrumentalist(s) Credits:\n\nMark Applebaum\n\nArtist(s) Biography:\n\nMark\ - \ Applebaum is Associate Professor of Composition at Stanford University where\ - \ he received the 2003 Walter J. Gores Award for excellence in teaching. He received\ - \ his Ph.D. in composition from the University of California at San Diego where\ - \ he studied principally with Brian Ferneyhough. His solo, chamber, choral, orchestral,\ - \ operatic, and electroacoustic work has been performed throughout the United\ - \ States, Europe, Africa, South America, and Asia. Many of his recent works are\ - \ characterized by challenges to the conventional boundaries of musical ontology.\n\ - \nConcert Venue and Time: Lydia Mendelssohn Theatre, Monday May 21, 9:00pm" - address: 'Ann Arbor, Michigan, U.S.A.' - author: Mark Applebaum - bibtex: "@incollection{nime2012-music-Applebaum2012,\n abstract = {Program notes:\n\ - \n\\emph{Aphasia} (2010), for solo performer and two-channel tape, was commissioned\ - \ by the GRM, Paris and composed for virtuoso singer Nicholas Isherwood. The tape,\ - \ an idiosyncratic explosion of warped and mangled sounds, is made up exclusively\ - \ of vocal samples---all provided by Isherwood and subsequently transformed digitally.\ - \ Against the backdrop of this audio narrative, an elaborate set of hand gestures\ - \ are performed---an assiduously choreographed sign language of sorts. Each gesture\ - \ is fastidiously synchronized to the tape in tight rhythmic coordination.\n\n\ - In the context of NIME, the piece is noteworthy for its deliberate---if unintentionally\ - \ political---contemporary technology abstinence. Ancillary questions arise,\ - \ such as ``What are the present limits of gesture control?''; ``Do these limitations\ - \ present unwelcome pressures on the boundaries of artistic imagination and creative\ - \ capacity?''; and ``How do we learn to recognize when it is artistically prudent\ - \ to eschew emerging tools?''\n\nComposer(s) Credits:\n\nMark Applebaum\n\nInstrumentalist(s)\ - \ Credits:\n\nMark Applebaum\n\nArtist(s) Biography:\n\nMark Applebaum is Associate\ - \ Professor of Composition at Stanford University where he received the 2003 Walter\ - \ J. Gores Award for excellence in teaching. He received his Ph.D. in composition\ - \ from the University of California at San Diego where he studied principally\ - \ with Brian Ferneyhough. His solo, chamber, choral, orchestral, operatic, and\ - \ electroacoustic work has been performed throughout the United States, Europe,\ - \ Africa, South America, and Asia. Many of his recent works are characterized\ - \ by challenges to the conventional boundaries of musical ontology.\n\nConcert\ - \ Venue and Time: Lydia Mendelssohn Theatre, Monday May 21, 9:00pm},\n address\ - \ = {Ann Arbor, Michigan, U.S.A.},\n author = {Mark Applebaum},\n booktitle =\ - \ {Music Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n day = {21-23},\n editor = {Georg Essl and Brent Gillespie and\ - \ Michael Gurevich and Sile O'Modhrain},\n month = {May},\n publisher = {Electrical\ - \ Engineering \\& Computer Science and Performing Arts Technology, University\ - \ of Michigan},\n title = {Aphasia},\n year = {2012}\n}\n" +- ENTRYTYPE: inproceedings + ID: nime2016-music-Kanga2016 + address: 'Brisbane, Australia' + author: Zubin Kanga + bibtex: "@inproceedings{nime2016-music-Kanga2016,\n address = {Brisbane, Australia},\n\ + \ author = {Zubin Kanga},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ + \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ + \ title = {Morphosis for piano},\n year = {2016}\n}\n" + booktitle: Music Proceedings of the International Conference on New Interfaces for + Musical Expression + editor: Andrew Brown and Toby Gifford + month: June + publisher: Griffith University + title: Morphosis for piano + year: 2016 + + +- ENTRYTYPE: inproceedings + ID: nime2016-music-Vickery2016 + address: 'Brisbane, Australia' + author: Lindsay Vickery + bibtex: "@inproceedings{nime2016-music-Vickery2016,\n address = {Brisbane, Australia},\n\ + \ author = {Lindsay Vickery},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ + \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ + \ title = {Detritus (2015)},\n year = {2016}\n}\n" + booktitle: Music Proceedings of the International Conference on New Interfaces for + Musical Expression + editor: Andrew Brown and Toby Gifford + month: June + publisher: Griffith University + title: Detritus (2015) + year: 2016 + + +- ENTRYTYPE: inproceedings + ID: nime2016-music-Romain2016 + address: 'Brisbane, Australia' + author: Michon Romain + bibtex: "@inproceedings{nime2016-music-Romain2016,\n address = {Brisbane, Australia},\n\ + \ author = {Michon Romain},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ + \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ + \ title = {A Minor Chord for BladeAxe},\n year = {2016}\n}\n" + booktitle: Music Proceedings of the International Conference on New Interfaces for + Musical Expression + editor: Andrew Brown and Toby Gifford + month: June + publisher: Griffith University + title: A Minor Chord for BladeAxe + year: 2016 + + +- ENTRYTYPE: inproceedings + ID: nime2016-music-Carroll2016 + address: 'Brisbane, Australia' + author: Nicole Carroll + bibtex: "@inproceedings{nime2016-music-Carroll2016,\n address = {Brisbane, Australia},\n\ + \ author = {Nicole Carroll},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ + \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ + \ title = {Everything In Its Place},\n year = {2016}\n}\n" + booktitle: Music Proceedings of the International Conference on New Interfaces for + Musical Expression + editor: Andrew Brown and Toby Gifford + month: June + publisher: Griffith University + title: Everything In Its Place + year: 2016 + + +- ENTRYTYPE: inproceedings + ID: nime2016-music-Kim2016 + address: 'Brisbane, Australia' + author: Jonghyun Kim + bibtex: "@inproceedings{nime2016-music-Kim2016,\n address = {Brisbane, Australia},\n\ + \ author = {Jonghyun Kim},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ + \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ + \ title = {Live Performance for Leappmotion},\n year = {2016}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - day: 21-23 - editor: Georg Essl and Brent Gillespie and Michael Gurevich and Sile O'Modhrain - month: May - publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, - University of Michigan' - title: Aphasia - year: 2012 + editor: Andrew Brown and Toby Gifford + month: June + publisher: Griffith University + title: Live Performance for Leappmotion + year: 2016 -- ENTRYTYPE: incollection - ID: nime2012-music-LeeuwSchwarz2012 - abstract: "Program notes:\n\nTwo typical NIME related inventions meet in this performance.\ - \ IRCAM based Diemo Schwarz and HKU lecturer and Electrumpet player Hans Leeuw\ - \ met at STEIM in 2010. The extreme sound possibilities of the sensor driven Electrumpet\ - \ combine wonderfully with the corpus based techniques in CataRT. Both Diemo and\ - \ Hans play their self-invented instruments for a number of years in which they\ - \ have done several iterations / extensions and built a lot of performance experience.\ - \ This experience pays off in the expressive capabilities of both performers making\ - \ this a concert that goes far beyond an extended demonstration of new instruments.\ - \ In Violent Dreams, Hans's manipulated sounds are recorded in CataRT, from which\ - \ Diemo chooses specific sonic characters and evolutions via gestural controllers,\ - \ that are played back and transformed by CataRT, challenging Hans to come up\ - \ with more extreme sounds surpassing his own originals. Thus we get an interesting\ - \ and challenging improvisation battle between two players that both fully master\ - \ their instrument.\nComposer(s) Credits:\n\nInstrumentalist(s) Credits:\n\nHans\ - \ Leeuw (Electrumpet), Diemo Schwarz (CataRT, gestural controllers)\n\nArtist(s)\ - \ Biography:\n\nHans Leeuw is recognized as one of Hollands top players composers\ - \ and bandleaders in the Jazz and improvised music scene even before he started\ - \ to use electronics and designed his own Electrumpet. He is most noted as the\ - \ bandleader of the Dutch formation Tetzepi, a 14 piece Big Band. Tetzepi exists\ - \ for 15 years and is structurally funded by Dutch government.\nNext to his activities\ - \ as a performer Hans teaches at the Utrecht school for the arts at the Music\ - \ Technology department and at the faculty Industrial Design of the Technical\ - \ University Eindhoven where he coaches projects on the design of new musical\ - \ instruments.\nIn 2008 he designed the Electrumpet, a hybrid electroacoustic\ - \ instrument that differs from similar design in that it takes the trumpet players\ - \ normal playing position and expression in account thus creating an instrument\ - \ that combines acoustic and electronic expression seamlessly. (see `the electrumpet,\ - \ additions and revisions')\n\nDiemo Schwarz is a researcher and developer at\ - \ Ircam, composer of electronic music, and musician on drums and laptop. He holds\ - \ a PhD in computer science applied to music for his research on corpus-based\ - \ concatenative musical sound synthesis.\nHis compositions and live performances,\ - \ under the name of his solo project Mean Time Between Failure, or improvising\ - \ with musicians such as Frédéric Blondy, Victoria Johnson, Pierre Alexandre Tremblay,\ - \ Etienne Brunet, Luka Juhart, George Lewis, Evan Parker, explore the possibilities\ - \ of corpus-based concatenative synthesis to re-contextualise any sound source\ - \ by rearranging sound units into a new musical framework using interactive navigation\ - \ through a sound space, controlled by gestural input devices.\nHis research work\ - \ includes improving interaction between musician and computer, and exploiting\ - \ large masses of sound for interactive real-time sound synthesis, collaborating\ - \ with composers such as Philippe Manoury, Dai Fujikura, Stefano Gervasoni, Aaron\ - \ Einbond, Sam Britton.\n\nConcert Venue and Time: Lydia Mendelssohn Theatre,\ - \ Monday May 21, 9:00pm" - address: 'Ann Arbor, Michigan, U.S.A.' - author: Hans Leeuw and Diemo Schwarz - bibtex: "@incollection{nime2012-music-LeeuwSchwarz2012,\n abstract = {Program notes:\n\ - \nTwo typical NIME related inventions meet in this performance. IRCAM based Diemo\ - \ Schwarz and HKU lecturer and Electrumpet player Hans Leeuw met at STEIM in 2010.\ - \ The extreme sound possibilities of the sensor driven Electrumpet combine wonderfully\ - \ with the corpus based techniques in CataRT. Both Diemo and Hans play their self-invented\ - \ instruments for a number of years in which they have done several iterations\ - \ / extensions and built a lot of performance experience. This experience pays\ - \ off in the expressive capabilities of both performers making this a concert\ - \ that goes far beyond an extended demonstration of new instruments. In \\emph{Violent\ - \ Dreams}, Hans's manipulated sounds are recorded in CataRT, from which Diemo\ - \ chooses specific sonic characters and evolutions via gestural controllers, that\ - \ are played back and transformed by CataRT, challenging Hans to come up with\ - \ more extreme sounds surpassing his own originals. Thus we get an interesting\ - \ and challenging improvisation battle between two players that both fully master\ - \ their instrument.\nComposer(s) Credits:\n\nInstrumentalist(s) Credits:\n\nHans\ - \ Leeuw (Electrumpet), Diemo Schwarz (CataRT, gestural controllers)\n\nArtist(s)\ - \ Biography:\n\nHans Leeuw is recognized as one of Hollands top players composers\ - \ and bandleaders in the Jazz and improvised music scene even before he started\ - \ to use electronics and designed his own Electrumpet. He is most noted as the\ - \ bandleader of the Dutch formation Tetzepi, a 14 piece Big Band. Tetzepi exists\ - \ for 15 years and is structurally funded by Dutch government.\nNext to his activities\ - \ as a performer Hans teaches at the Utrecht school for the arts at the Music\ - \ Technology department and at the faculty Industrial Design of the Technical\ - \ University Eindhoven where he coaches projects on the design of new musical\ - \ instruments.\nIn 2008 he designed the Electrumpet, a hybrid electroacoustic\ - \ instrument that differs from similar design in that it takes the trumpet players\ - \ normal playing position and expression in account thus creating an instrument\ - \ that combines acoustic and electronic expression seamlessly. (see `the electrumpet,\ - \ additions and revisions')\n\nDiemo Schwarz is a researcher and developer at\ - \ Ircam, composer of electronic music, and musician on drums and laptop. He holds\ - \ a PhD in computer science applied to music for his research on corpus-based\ - \ concatenative musical sound synthesis.\nHis compositions and live performances,\ - \ under the name of his solo project Mean Time Between Failure, or improvising\ - \ with musicians such as Fr\\'{e}d\\'{e}ric Blondy, Victoria Johnson, Pierre Alexandre\ - \ Tremblay, Etienne Brunet, Luka Juhart, George Lewis, Evan Parker, explore the\ - \ possibilities of corpus-based concatenative synthesis to re-contextualise any\ - \ sound source by rearranging sound units into a new musical framework using interactive\ - \ navigation through a sound space, controlled by gestural input devices.\nHis\ - \ research work includes improving interaction between musician and computer,\ - \ and exploiting large masses of sound for interactive real-time sound synthesis,\ - \ collaborating with composers such as Philippe Manoury, Dai Fujikura, Stefano\ - \ Gervasoni, Aaron Einbond, Sam Britton.\n\nConcert Venue and Time: Lydia Mendelssohn\ - \ Theatre, Monday May 21, 9:00pm},\n address = {Ann Arbor, Michigan, U.S.A.},\n\ - \ author = {Hans Leeuw and Diemo Schwarz},\n booktitle = {Music Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n day\ - \ = {21-23},\n editor = {Georg Essl and Brent Gillespie and Michael Gurevich and\ - \ Sile O'Modhrain},\n month = {May},\n publisher = {Electrical Engineering \\\ - & Computer Science and Performing Arts Technology, University of Michigan},\n\ - \ title = {Violent Dreams},\n year = {2012}\n}\n" +- ENTRYTYPE: inproceedings + ID: nime2016-music-Beck2016 + address: 'Brisbane, Australia' + author: Stephen David Beck and Scott Smallwood + bibtex: "@inproceedings{nime2016-music-Beck2016,\n address = {Brisbane, Australia},\n\ + \ author = {Stephen David Beck and Scott Smallwood},\n booktitle = {Music Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Andrew Brown and Toby Gifford},\n month = {June},\n publisher = {Griffith\ + \ University},\n title = {From Uganda\" Mara Helmuth},\n year = {2016}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - day: 21-23 - editor: Georg Essl and Brent Gillespie and Michael Gurevich and Sile O'Modhrain - month: May - publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, - University of Michigan' - title: Violent Dreams - year: 2012 + editor: Andrew Brown and Toby Gifford + month: June + publisher: Griffith University + title: From Uganda" Mara Helmuth + year: 2016 -- ENTRYTYPE: incollection - ID: nime2012-music-PattonRovan2012 - abstract: "Program notes:\n\nthe ellipsis catalog features new instruments designed\ - \ by Kevin Patton and Butch Rovan. Patton's instrument, the ``Fossil'', is a wireless\ - \ sensor-based musical instrument that is played with the entire gestural range\ - \ of arm movement as well as finger pressure. Four FSRs, a momentary button, and\ - \ a two-dimensional accelerometer are used to control the parameters of a custom\ - \ software environment built in Max/MSP/Jitter. It is part of a group of four\ - \ hand-carved wood instruments called the Digital Poplar Consort.\n\nRovan's ``Banshee''\ - \ is an analog electronic musical instrument. Modeled after a wind instrument,\ - \ the design uses six finger pads to control the pitch of an array of interrelated\ - \ oscillators, and a mouth sensor that allows the performer to control volume.\ - \ The Banshee also features a tilt-sensor that allows motion to change the voicing\ - \ circuitry and resulting timbre. Battery powered, the instrument can plug into\ - \ any amplifier or mixing console, much like an electric guitar.\nComposer(s)\ - \ Credits:\n\nInstrumentalist(s) Credits:\n\nKevin Patton (Fossil), Butch Rovan\ - \ (Banshee)\n\nArtist(s) Biography:\n\nKevin Patton is a musician, scholar, and\ - \ technologist active in the fields of experimental music and multimedia theatre\ - \ whose work explores the intersection of technology and performance. The design\ - \ of new musical instruments as well as interfaces and computer systems for analysis,\ - \ improvisation, installation and projection is at the center of his practice.\ - \ His work has been recognized for his collaboration with visual artist Maria\ - \ del Carmen Montoya with the 2009 Rhizome commission for the piece, I Sky You.\ - \ Patton is an assistant professor of music and performance technologies at Oregon\ - \ State University. He holds a Ph.D. and M.A. from Brown University in electronic\ - \ music and multimedia composition. He also holds a Master of Music degree in\ - \ jazz studies and composition from the University of North Texas. He was an Invited\ - \ Researcher at the Sorbonne, University of Paris IV, for the Spring of 2009.\n\ - \nButch Rovan is a media artist and performer at Brown University, where he co-directs\ - \ MEME (Multimedia & Electronic Music Experiments @ Brown). Rovan has received\ - \ prizes from the Bourges International Electroacoustic Music Competition, the\ - \ Berlin Transmediale International Media Arts Festival, and his work has appeared\ - \ throughout Europe and the U.S. Most recently his interactive installation Let\ - \ us imagine a straight line was featured in the 14th WRO International Media\ - \ Art Biennale, Poland.\nRovan's research includes new sensor hardware design\ - \ and wireless microcontroller systems. His research into gestural control and\ - \ interactivity has been featured in IRCAM's journal Resonance, Electronic Musician,\ - \ the Computer Music Journal, the Japanese magazine SoundArts, the CDROM Trends\ - \ in Gestural Control of Music (IRCAM 2000), and in the book Mapping Landscapes\ - \ for Performance as Research: Scholarly Acts and Creative Cartographies (Palgrave\ - \ Macmillan, 2009).\n\nConcert Venue and Time: Lydia Mendelssohn Theatre, Monday\ - \ May 21, 9:00pm" - address: 'Ann Arbor, Michigan, U.S.A.' - author: Kevin Patton and Butch Rovan - bibtex: "@incollection{nime2012-music-PattonRovan2012,\n abstract = {Program notes:\n\ - \n\\emph{the ellipsis catalog} features new instruments designed by Kevin Patton\ - \ and Butch Rovan. Patton's instrument, the ``Fossil'', is a wireless sensor-based\ - \ musical instrument that is played with the entire gestural range of arm movement\ - \ as well as finger pressure. Four FSRs, a momentary button, and a two-dimensional\ - \ accelerometer are used to control the parameters of a custom software environment\ - \ built in Max/MSP/Jitter. It is part of a group of four hand-carved wood instruments\ - \ called the Digital Poplar Consort.\n\nRovan's ``Banshee'' is an analog electronic\ - \ musical instrument. Modeled after a wind instrument, the design uses six finger\ - \ pads to control the pitch of an array of interrelated oscillators, and a mouth\ - \ sensor that allows the performer to control volume. The Banshee also features\ - \ a tilt-sensor that allows motion to change the voicing circuitry and resulting\ - \ timbre. Battery powered, the instrument can plug into any amplifier or mixing\ - \ console, much like an electric guitar.\nComposer(s) Credits:\n\nInstrumentalist(s)\ - \ Credits:\n\nKevin Patton (Fossil), Butch Rovan (Banshee)\n\nArtist(s) Biography:\n\ - \nKevin Patton is a musician, scholar, and technologist active in the fields of\ - \ experimental music and multimedia theatre whose work explores the intersection\ - \ of technology and performance. The design of new musical instruments as well\ - \ as interfaces and computer systems for analysis, improvisation, installation\ - \ and projection is at the center of his practice. His work has been recognized\ - \ for his collaboration with visual artist Maria del Carmen Montoya with the 2009\ - \ Rhizome commission for the piece, \\emph{I Sky You}. Patton is an assistant\ - \ professor of music and performance technologies at Oregon State University.\ - \ He holds a Ph.D. and M.A. from Brown University in electronic music and multimedia\ - \ composition. He also holds a Master of Music degree in jazz studies and composition\ - \ from the University of North Texas. He was an Invited Researcher at the Sorbonne,\ - \ University of Paris IV, for the Spring of 2009.\n\nButch Rovan is a media artist\ - \ and performer at Brown University, where he co-directs MEME (Multimedia \\&\ - \ Electronic Music Experiments @ Brown). Rovan has received prizes from the Bourges\ - \ International Electroacoustic Music Competition, the Berlin Transmediale International\ - \ Media Arts Festival, and his work has appeared throughout Europe and the U.S.\ - \ Most recently his interactive installation Let us imagine a straight line was\ - \ featured in the 14th WRO International Media Art Biennale, Poland.\nRovan's\ - \ research includes new sensor hardware design and wireless microcontroller systems.\ - \ His research into gestural control and interactivity has been featured in IRCAM's\ - \ journal Resonance, Electronic Musician, the \\emph{Computer Music Journal},\ - \ the Japanese magazine \\emph{SoundArts}, the CDROM \\emph{Trends in Gestural\ - \ Control of Music} (IRCAM 2000), and in the book \\emph{Mapping Landscapes for\ - \ Performance as Research: Scholarly Acts and Creative Cartographies} (Palgrave\ - \ Macmillan, 2009).\n\nConcert Venue and Time: Lydia Mendelssohn Theatre, Monday\ - \ May 21, 9:00pm},\n address = {Ann Arbor, Michigan, U.S.A.},\n author = {Kevin\ - \ Patton and Butch Rovan},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n day = {21-23},\n editor\ - \ = {Georg Essl and Brent Gillespie and Michael Gurevich and Sile O'Modhrain},\n\ - \ month = {May},\n publisher = {Electrical Engineering \\& Computer Science and\ - \ Performing Arts Technology, University of Michigan},\n title = {the ellipsis\ - \ catalog},\n year = {2012}\n}\n" +- ENTRYTYPE: inproceedings + ID: nime2016-music-Carey2016 + address: 'Brisbane, Australia' + author: Zubin Kanga & Benjiman Carey + bibtex: "@inproceedings{nime2016-music-Carey2016,\n address = {Brisbane, Australia},\n\ + \ author = {Zubin Kanga & Benjiman Carey},\n booktitle = {Music Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n editor\ + \ = {Andrew Brown and Toby Gifford},\n month = {June},\n publisher = {Griffith\ + \ University},\n title = {Taking the Auspices},\n year = {2016}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - day: 21-23 - editor: Georg Essl and Brent Gillespie and Michael Gurevich and Sile O'Modhrain - month: May - publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, - University of Michigan' - title: the ellipsis catalog - year: 2012 + editor: Andrew Brown and Toby Gifford + month: June + publisher: Griffith University + title: Taking the Auspices + year: 2016 + + +- ENTRYTYPE: inproceedings + ID: nime2016-music-Drummond2016 + address: 'Brisbane, Australia' + author: Jon Drummond + bibtex: "@inproceedings{nime2016-music-Drummond2016,\n address = {Brisbane, Australia},\n\ + \ author = {Jon Drummond},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ + \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ + \ title = {Light Traces},\n year = {2016}\n}\n" + booktitle: Music Proceedings of the International Conference on New Interfaces for + Musical Expression + editor: Andrew Brown and Toby Gifford + month: June + publisher: Griffith University + title: Light Traces + year: 2016 -- ENTRYTYPE: incollection - ID: nime2012-music-Marier2012 - abstract: "Program notes:\n\nClarinet is the third piece in a series of monotimbral\ - \ works. Like its siblings Piano and Cymbal, it was inspired by the sound qualities\ - \ of an acoustic instrument. This minimalist and meditative piece is a structured\ - \ improvisation performed on the sponge, a musical interface designed by the composer.\ - \ The sponge is basically a cushion equipped with sensors (accelerometers, buttons\ - \ and force sensing resistors) which detect when it is squeezed, twisted or shaken.\ - \ Because the sponge evolves continuously, the piece exists in many versions.\ - \ Each new version drifts further away from the original compositional intentions\ - \ and the piece is slowly becoming less meditative. The latest version is subtitled\ - \ Albino Butterfly.\n\nComposer(s) Credits:\n\nMartin Marier\n\nInstrumentalist(s)\ - \ Credits:\n\nMartin Marier (Sponge)\n\nArtist(s) Biography:\n\nMartin Marier\ - \ is a composer and a performer who is mainly interested in live electronic music\ - \ using new interfaces. He is the inventor of the sponge, a cushion like musical\ - \ interface that he uses to perform his pieces. The main goal of this approach\ - \ is to establish a natural link between gesture and sound in electronic music.\ - \ He aims at improving the interaction with the audience and at making the process\ - \ of composing more playful. His research on the sponge is the topic of the doctorate\ - \ he is pursuing at the Universit de Montral under the supervision of Prof. Jean\ - \ Piché. He was also supervised by Dr. Garth Paine during an exchange at the\ - \ University of Western Sydney (Australia) in 2011.\nMartin has also composed\ - \ music for theatre, collaborating mostly with the Thé\\^atre I.N.K. company for\ - \ whom he wrote the music of three plays: \"L'effet du temps sur Matévina\" (2012),\ - \ \"Roche, papier... couteau\" (2007), \"La cadette\" (2006). He sometimes writes\ - \ music for films and collaborates with the film composer Benoit Charest. He is\ - \ one of the founders of Point d'écoute (PDE), a collective whose purpose is to\ - \ promote electroacoustic music. Along with his four colleagues of PDE, he produced\ - \ concerts in Montreal, New York and Sydney.\n\nConcert Venue and Time: Lydia\ - \ Mendelssohn Theatre, Monday May 21, 9:00pm" - address: 'Ann Arbor, Michigan, U.S.A.' - author: Martin Marier - bibtex: "@incollection{nime2012-music-Marier2012,\n abstract = {Program notes:\n\ - \n\\emph{Clarinet} is the third piece in a series of monotimbral works. Like\ - \ its siblings \\emph{Piano} and \\emph{Cymbal}, it was inspired by the sound\ - \ qualities of an acoustic instrument. This minimalist and meditative piece is\ - \ a structured improvisation performed on the sponge, a musical interface designed\ - \ by the composer. The sponge is basically a cushion equipped with sensors (accelerometers,\ - \ buttons and force sensing resistors) which detect when it is squeezed, twisted\ - \ or shaken. Because the sponge evolves continuously, the piece exists in many\ - \ versions. Each new version drifts further away from the original compositional\ - \ intentions and the piece is slowly becoming less meditative. The latest version\ - \ is subtitled Albino Butterfly.\n\nComposer(s) Credits:\n\nMartin Marier\n\n\ - Instrumentalist(s) Credits:\n\nMartin Marier (Sponge)\n\nArtist(s) Biography:\n\ - \nMartin Marier is a composer and a performer who is mainly interested in live\ - \ electronic music using new interfaces. He is the inventor of the sponge, a\ - \ cushion like musical interface that he uses to perform his pieces. The main\ - \ goal of this approach is to establish a natural link between gesture and sound\ - \ in electronic music. He aims at improving the interaction with the audience\ - \ and at making the process of composing more playful. His research on the sponge\ - \ is the topic of the doctorate he is pursuing at the Universit de Montral under\ - \ the supervision of Prof. Jean Pich\\'{e}. He was also supervised by Dr. Garth\ - \ Paine during an exchange at the University of Western Sydney (Australia) in\ - \ 2011.\nMartin has also composed music for theatre, collaborating mostly with\ - \ the Th\\'{e}\\^{a}tre I.N.K. company for whom he wrote the music of three plays:\ - \ \"L'effet du temps sur Mat\\'{e}vina\" (2012), \"Roche, papier... couteau\"\ - \ (2007), \"La cadette\" (2006). He sometimes writes music for films and collaborates\ - \ with the film composer Benoit Charest. He is one of the founders of Point d'\\\ - '{e}coute (PDE), a collective whose purpose is to promote electroacoustic music.\ - \ Along with his four colleagues of PDE, he produced concerts in Montreal, New\ - \ York and Sydney.\n\nConcert Venue and Time: Lydia Mendelssohn Theatre, Monday\ - \ May 21, 9:00pm},\n address = {Ann Arbor, Michigan, U.S.A.},\n author = {Martin\ - \ Marier},\n booktitle = {Music Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n day = {21-23},\n editor = {Georg Essl\ - \ and Brent Gillespie and Michael Gurevich and Sile O'Modhrain},\n month = {May},\n\ - \ publisher = {Electrical Engineering \\& Computer Science and Performing Arts\ - \ Technology, University of Michigan},\n title = {Clarinet (Albino Butterfly)},\n\ - \ year = {2012}\n}\n" +- ENTRYTYPE: inproceedings + ID: nime2016-music-Sniff2016 + address: 'Brisbane, Australia' + author: Dj Sniff + bibtex: "@inproceedings{nime2016-music-Sniff2016,\n address = {Brisbane, Australia},\n\ + \ author = {Dj Sniff},\n booktitle = {Music Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n editor = {Andrew Brown and Toby\ + \ Gifford},\n month = {June},\n publisher = {Griffith University},\n title = {Live\ + \ performance},\n year = {2016}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - day: 21-23 - editor: Georg Essl and Brent Gillespie and Michael Gurevich and Sile O'Modhrain - month: May - publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, - University of Michigan' - title: Clarinet (Albino Butterfly) - year: 2012 + editor: Andrew Brown and Toby Gifford + month: June + publisher: Griffith University + title: Live performance + year: 2016 -- ENTRYTYPE: incollection - ID: nime2012-music-OuzounianKnappLyonDuBois2012 - abstract: "Program notes:\n\nMusic for Sleeping & Waking Minds (2011-2012) is an\ - \ overnight event in which four performers fall asleep while wearing custom designed\ - \ EEG sensors, which monitor their brainwave activity over the course of one night.\ - \ The data gathered from the EEG sensors is applied in real time to different\ - \ audio and image signal processing functions, resulting in a continuously evolving\ - \ multi-channel sound environment and visual projection. This material serves\ - \ as an audiovisual description of the individual and collective neurophysiological\ - \ state of the ensemble, with sounds and images evolving according to changes\ - \ in brainwave activity. Audiences, who are invited to bring anything that they\ - \ need to ensure comfortable sleep, can experience the work in different states\ - \ of attention: while alert and sleeping, resting and awakening.\n\nGascia Ouzounian\ - \ (composition & production), R. Benjamin Knapp (physiological interface & interaction\ - \ design), Eric Lyon (audio interface & interaction design), R. Luke DuBois (visual\ - \ interface & interaction design)Composer(s) Credits:\n\nGascia Ouzounian (composition\ - \ & production), R. Benjamin Knapp (physiological interface & interaction design),\ - \ Eric Lyon (audio interface & interaction design), R. Luke DuBois (visual interface\ - \ & interaction design)\n\nInstrumentalist(s) Credits:\n\nArtist(s) Biography:\n\ - \nGascia Ouzounian is a violinist, musicologist, and composer. She has performed\ - \ with such varied ensembles as Yo-Yo Ma and the Silk Road Ensemble at Carnegie\ - \ Hall, Bang On A Can All-Stars at the Mass MOCA, Sinfonia Toronto at the Toronto\ - \ Centre for the Arts, and the Theatre of Eternal Music Strings Ensemble at the\ - \ Dream House. Gascia's recent projects include two compositions that are intended\ - \ for overnight listening: EDEN EDEN EDEN with filmmaker Chloe Griffin, and Music\ - \ for Sleeping & Waking Minds with R. Benjamin Knapp, Eric Lyon and R. Luke DuBois.\ - \ In the latter, an ensemble of sleeping performers generates an audiovisual environment\ - \ through their neurophysiological activity over the course of one night. Gascia\ - \ teaches at Queen's University Belfast, where she leads the performance programme\ - \ in the School of Creative Arts. Her writings on experimental music and sound\ - \ art appear in numerous academic journals and the book Paul DeMarinis: Buried\ - \ in Noise.\n\nR. Benjamin Knapp is the founding director of the Institute for\ - \ Creativity, Arts, and Technology at Virginia Tech, where he is Professor of\ - \ Computer Science. Ben has led the Music, Sensors and Emotion (MuSE) group, whose\ - \ research focuses on the understanding and measurement of the physical gestures\ - \ and emotional states of musical performers and their audience. For over 20 years,\ - \ Ben has been researching and developing user-interfaces and software that enable\ - \ composers and performers to augment the physical control of a musical instrument\ - \ with more direct neural interaction. From the invention of the Biomuse with\ - \ Hugh Lusted in 1987 to the introduction of the concept of an Integral Music\ - \ Controller (a generic class of controllers that use the direct measurement of\ - \ motion and emotion to augment traditional methods of musical instrument control)\ - \ in 2005, Ben has focused on creating a user-aware interface based on the acquisition\ - \ and real-time analysis of biometric signals.\n\nEric Lyon is a composer and\ - \ computer music researcher. During the 1980s and 1990s, his fixed media computer\ - \ music focused on spectral and algorithmic processing of audio, with a tendency\ - \ toward extreme modifications of samples, variously sourced. From the early 1990s,\ - \ Lyon became involved with live computer music, performing solo, and in the Japanese\ - \ band Psychedelic Bumpo, with the Kyma system. Later in the 1990s, he gravitated\ - \ toward software-based live processing, starting to develop Max/MSP externals\ - \ in 1999. This work resulted in his LyonPotpourri collection of Max/MSP externals,\ - \ and the FFTease spectral package, developed in collaboration with Christopher\ - \ Penrose. In recent years, Lyon has focused on computer chamber music, which\ - \ integrates live, iterative DSP strategies into the creation of traditionally\ - \ notated instrumental scores. Other interests include spatial orchestration,\ - \ and articulated noise composition. Lyon teaches computer music in the School\ - \ of Music and Sonic Art at Queen's University Belfast.\n\nR. Luke DuBois is a\ - \ composer, artist, and performer who explores the temporal, verbal, and visual\ - \ structures of cultural and personal ephemera. He has collaborated on interactive\ - \ performance, installation, and music production work with many artists and organizations\ - \ including Toni Dove, Matthew Ritchie, Todd Reynolds, Jamie Jewett, Bora Yoon,\ - \ Michael Joaquin Grey, Elliott Sharp, Michael Gordon, Maya Lin, Bang on a Can,\ - \ Engine27, Harvestworks, and LEMUR, and was the director of the Princeton Laptop\ - \ Orchestra for its 2007 season. Stemming from his investigations of ``time-lapse\ - \ phonography,'' his recent work is a sonic and encyclopedic relative to time-lapse\ - \ photography. Just as a long camera exposure fuses motion into a single image,\ - \ his work reveals the average sonority, visual language, and vocabulary in music,\ - \ film, text, or cultural information. He teaches at the Brooklyn Experimental\ - \ Media Center at the Polytechnic Institute of NYU, and is on the Board of Directors\ - \ of Issue Project Room.\n\nConcert Venue and Time: North Quad Space 2435, Monday\ - \ May 21, 11:00pm" - address: 'Ann Arbor, Michigan, U.S.A.' - author: Gascia Ouzounian and R.~Benjamin Knapp and Eric Lyon and R.~Luke DuBois - bibtex: "@incollection{nime2012-music-OuzounianKnappLyonDuBois2012,\n abstract =\ - \ {Program notes:\n\n\\emph{Music for Sleeping \\& Waking Minds} (2011-2012) is\ - \ an overnight event in which four performers fall asleep while wearing custom\ - \ designed EEG sensors, which monitor their brainwave activity over the course\ - \ of one night. The data gathered from the EEG sensors is applied in real time\ - \ to different audio and image signal processing functions, resulting in a continuously\ - \ evolving multi-channel sound environment and visual projection. This material\ - \ serves as an audiovisual description of the individual and collective neurophysiological\ - \ state of the ensemble, with sounds and images evolving according to changes\ - \ in brainwave activity. Audiences, who are invited to bring anything that they\ - \ need to ensure comfortable sleep, can experience the work in different states\ - \ of attention: while alert and sleeping, resting and awakening.\n\nGascia Ouzounian\ - \ (composition \\& production), R. Benjamin Knapp (physiological interface \\\ - & interaction design), Eric Lyon (audio interface \\& interaction design), R.\ - \ Luke DuBois (visual interface \\& interaction design)Composer(s) Credits:\n\n\ - Gascia Ouzounian (composition \\& production), R. Benjamin Knapp (physiological\ - \ interface \\& interaction design), Eric Lyon (audio interface \\& interaction\ - \ design), R. Luke DuBois (visual interface \\& interaction design)\n\nInstrumentalist(s)\ - \ Credits:\n\nArtist(s) Biography:\n\nGascia Ouzounian is a violinist, musicologist,\ - \ and composer. She has performed with such varied ensembles as Yo-Yo Ma and the\ - \ Silk Road Ensemble at Carnegie Hall, Bang On A Can All-Stars at the Mass MOCA,\ - \ Sinfonia Toronto at the Toronto Centre for the Arts, and the Theatre of Eternal\ - \ Music Strings Ensemble at the Dream House. Gascia's recent projects include\ - \ two compositions that are intended for overnight listening: EDEN EDEN EDEN with\ - \ filmmaker Chloe Griffin, and \\emph{Music for Sleeping \\& Waking Minds} with\ - \ R. Benjamin Knapp, Eric Lyon and R. Luke DuBois. In the latter, an ensemble\ - \ of sleeping performers generates an audiovisual environment through their neurophysiological\ - \ activity over the course of one night. Gascia teaches at Queen's University\ - \ Belfast, where she leads the performance programme in the School of Creative\ - \ Arts. Her writings on experimental music and sound art appear in numerous academic\ - \ journals and the book \\emph{Paul DeMarinis: Buried in Noise.}\n\nR. Benjamin\ - \ Knapp is the founding director of the Institute for Creativity, Arts, and Technology\ - \ at Virginia Tech, where he is Professor of Computer Science. Ben has led the\ - \ Music, Sensors and Emotion (MuSE) group, whose research focuses on the understanding\ - \ and measurement of the physical gestures and emotional states of musical performers\ - \ and their audience. For over 20 years, Ben has been researching and developing\ - \ user-interfaces and software that enable composers and performers to augment\ - \ the physical control of a musical instrument with more direct neural interaction.\ - \ From the invention of the Biomuse with Hugh Lusted in 1987 to the introduction\ - \ of the concept of an Integral Music Controller (a generic class of controllers\ - \ that use the direct measurement of motion and emotion to augment traditional\ - \ methods of musical instrument control) in 2005, Ben has focused on creating\ - \ a user-aware interface based on the acquisition and real-time analysis of biometric\ - \ signals.\n\nEric Lyon is a composer and computer music researcher. During the\ - \ 1980s and 1990s, his fixed media computer music focused on spectral and algorithmic\ - \ processing of audio, with a tendency toward extreme modifications of samples,\ - \ variously sourced. From the early 1990s, Lyon became involved with live computer\ - \ music, performing solo, and in the Japanese band Psychedelic Bumpo, with the\ - \ Kyma system. Later in the 1990s, he gravitated toward software-based live processing,\ - \ starting to develop Max/MSP externals in 1999. This work resulted in his LyonPotpourri\ - \ collection of Max/MSP externals, and the FFTease spectral package, developed\ - \ in collaboration with Christopher Penrose. In recent years, Lyon has focused\ - \ on computer chamber music, which integrates live, iterative DSP strategies into\ - \ the creation of traditionally notated instrumental scores. Other interests include\ - \ spatial orchestration, and articulated noise composition. Lyon teaches computer\ - \ music in the School of Music and Sonic Art at Queen's University Belfast.\n\n\ - R. Luke DuBois is a composer, artist, and performer who explores the temporal,\ - \ verbal, and visual structures of cultural and personal ephemera. He has collaborated\ - \ on interactive performance, installation, and music production work with many\ - \ artists and organizations including Toni Dove, Matthew Ritchie, Todd Reynolds,\ - \ Jamie Jewett, Bora Yoon, Michael Joaquin Grey, Elliott Sharp, Michael Gordon,\ - \ Maya Lin, Bang on a Can, Engine27, Harvestworks, and LEMUR, and was the director\ - \ of the Princeton Laptop Orchestra for its 2007 season. Stemming from his investigations\ - \ of ``time-lapse phonography,'' his recent work is a sonic and encyclopedic relative\ - \ to time-lapse photography. Just as a long camera exposure fuses motion into\ - \ a single image, his work reveals the average sonority, visual language, and\ - \ vocabulary in music, film, text, or cultural information. He teaches at the\ - \ Brooklyn Experimental Media Center at the Polytechnic Institute of NYU, and\ - \ is on the Board of Directors of Issue Project Room.\n\nConcert Venue and Time:\ - \ North Quad Space 2435, Monday May 21, 11:00pm},\n address = {Ann Arbor, Michigan,\ - \ U.S.A.},\n author = {Gascia Ouzounian and R.~Benjamin Knapp and Eric Lyon and\ - \ R.~Luke DuBois},\n booktitle = {Music Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n day = {21-23},\n editor = {Georg\ - \ Essl and Brent Gillespie and Michael Gurevich and Sile O'Modhrain},\n month\ - \ = {May},\n publisher = {Electrical Engineering \\& Computer Science and Performing\ - \ Arts Technology, University of Michigan},\n title = {Music for Sleeping \\&\ - \ Waking Minds},\n year = {2012}\n}\n" +- ENTRYTYPE: inproceedings + ID: nime2016-music-Paine2016 + abstract: 'About the performer: Garth is particularly fascinated with sound as an + experiential medium, both in musical performance and as an exhibitable object. + This passion has led to several interactive responsive environments where the + inhabitant generates the sonic landscape through their presence and behaviour. + Garth has composed several music scores for dance generated through video tracking + of the choreography, and more recently using Bio-Sensing on the dancers body. + His immersive interactive environments have been exhibited in Australia, Europe, + Japan, USA, Canada, UK, Hong Kong and New Zealand. Garth Paine is internationally + regarded as an innovator in the field of interactivity in electronic music and + media arts (some papers here). He gained his PhD in interactive immersive environments + from the Royal Melbourne Institute of Technology, Australia in 2003, and completed + a Graduate Diploma in software engineering in the following year at Swinburne + University. All a long way from his Bachelor of classical Flute performance from + the conservatorium of Tasmania. Garth is Associate Professor in Digital Sound + and Interactive Media at the School of Arts Media + Engineering at Arizona State + University in the USA. His previous post was as Associate Professor of Sound Technologies + at the University of Western Sydney, where he established the Virtual, Interactive, + Performance Research environment (VIPRe) . He is often invited to run workshops + on interactivity for musical performance and commissioned to develop interactive + system for realtime musical composition for dance and theatre performances.' + address: 'Brisbane, Australia' + author: Garth Paine + bibtex: "@inproceedings{nime2016-music-Paine2016,\n abstract = {About the performer:\ + \ Garth is particularly fascinated with sound as an experiential medium, both\ + \ in musical performance and as an exhibitable object. This passion has led to\ + \ several interactive responsive environments where the inhabitant generates the\ + \ sonic landscape through their presence and behaviour. Garth has composed several\ + \ music scores for dance generated through video tracking of the choreography,\ + \ and more recently using Bio-Sensing on the dancers body. His immersive interactive\ + \ environments have been exhibited in Australia, Europe, Japan, USA, Canada, UK,\ + \ Hong Kong and New Zealand. Garth Paine is internationally regarded as an innovator\ + \ in the field of interactivity in electronic music and media arts (some papers\ + \ here). He gained his PhD in interactive immersive environments from the Royal\ + \ Melbourne Institute of Technology, Australia in 2003, and completed a Graduate\ + \ Diploma in software engineering in the following year at Swinburne University.\ + \ All a long way from his Bachelor of classical Flute performance from the conservatorium\ + \ of Tasmania. Garth is Associate Professor in Digital Sound and Interactive Media\ + \ at the School of Arts Media + Engineering at Arizona State University in the\ + \ USA. His previous post was as Associate Professor of Sound Technologies at the\ + \ University of Western Sydney, where he established the Virtual, Interactive,\ + \ Performance Research environment (VIPRe) . He is often invited to run workshops\ + \ on interactivity for musical performance and commissioned to develop interactive\ + \ system for realtime musical composition for dance and theatre performances.},\n\ + \ address = {Brisbane, Australia},\n author = {Garth Paine},\n booktitle = {Music\ + \ Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Andrew Brown and Toby Gifford},\n month = {June},\n publisher = {Griffith\ + \ University},\n year = {2016}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - day: 21-23 - editor: Georg Essl and Brent Gillespie and Michael Gurevich and Sile O'Modhrain - month: May - publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, - University of Michigan' - title: Music for Sleeping & Waking Minds - year: 2012 + editor: Andrew Brown and Toby Gifford + month: June + publisher: Griffith University + year: 2016 -- ENTRYTYPE: incollection - ID: nime2012-music-Bloland2012 - abstract: "Program notes:\n\nOf Dust and Sand uses the Electromagnetically-Prepared\ - \ Piano device, a rack of 12 electromagnets which is suspended over the strings\ - \ of a piano. Each electromagnet is sent an audio signal and in turn excites its\ - \ respective string, much like a stereo speaker made from piano strings. In this\ - \ piece a subset of the magnets remains active throughout, the performer physically\ - \ silencing the strings by pressing down with fingertips. Thus the instrument\ - \ becomes a kind of anti-piano---lifting a finger frees a string to vibrate, producing\ - \ sound. In addition, various items, such as paper and a plastic ruler, rest directly\ - \ on the strings further altering the timbre. Remember---everything you hear is\ - \ entirely acoustic.\n\nOf Dust and Sand is dedicated to The Kenners.\n\nComposer(s)\ - \ Credits:\n\nPer Bloland\n\nInstrumentalist(s) Credits:\n\nDaniel Graser (alto\ - \ saxophone), Veena Kulkarni (piano)\n\nArtist(s) Biography:\n\nPer Bloland is\ - \ a composer of acoustic and electroacoustic music whose works have been described\ - \ as having an ``incandescent effect'' with ``dangerous and luscious textures.''\ - \ His compositions range from short intimate solo pieces to works for large orchestra,\ - \ and incorporate video, dance, and custom built electronics. He has received\ - \ awards and recognition from organizations such as SEAMUS/ASCAP, Digital Art\ - \ Awards of Tokyo, ISCM, and SCI/ASCAP. He is currently a Visiting Assistant Professor\ - \ of Computer Music at the Oberlin College Conservatory, and serves as the founding\ - \ director of OINC, the Oberlin Improvisation and Newmusic Collective.\nFor more\ - \ information, please see: www.perbloland.com.\n\nDaniel Graser: Saxophonist Daniel\ - \ Graser is emerging as one of the most innovative performers and pedagogues of\ - \ his generation. A recent recipient of the Doctorate of Musical Arts from the\ - \ University of Michigan, Dan served as Teaching Assistant to legendary saxophone\ - \ pedagogue Donald Sinta for the past three years and joined the faculty of Oakland\ - \ University School of Music, Theater, and Dance in 2011. Previously, Dan earned\ - \ a Masters Degree from the University of Michigan in 2008 and Bachelors degrees\ - \ in music theory/history and saxophone performance as a student of Dr. Timothy\ - \ McAllister at the Crane School of Music in 2007. As an orchestral performer,\ - \ Dan has performed as principal saxophonist with the National Wind Ensemble in\ - \ Carnegie Hall under H. Robert Reynolds, the Detroit Symphony Orchestra under\ - \ Leonard Slatkin, The New World Symphony under Michael Tilson Thomas and John\ - \ Adams, the Ann Arbor Symphony under Arie Lipsky, the University of Michigan\ - \ Symphony Orchestra under Kenneth Kiesler, the Hot Springs Festival Orchestra\ - \ under Richard Rosenberg, and the Orchestra of Northern New York under Kenneth\ - \ Andrews. Dan was selected by the University of Michigan to be featured as a\ - \ recitalist at the Kennedy Center for the Performing Arts in Washington DC as\ - \ part of the Millenium Stage Series. Recent and forthcoming performances include\ - \ world premieres at the University of Michigan, orchestral performances with\ - \ the New World Symphony and the Detroit Symphony Orchestra as well as chamber\ - \ music performances at the Navy Band International Saxophone Symposium and the\ - \ 2012 North American Saxophone Association Biennial Conference\n\nVeena Kulkarni:\ - \ A regular performer in southeast Michigan, Veena Kulkarni teaches at the Faber\ - \ Piano Institute and Madonna University. Veena's performances have taken her\ - \ throughout the United States and beyond as both a soloist and collaborator.\ - \ In October, Veena won Best Liszt Interpretation in the 2011 Liszt-Garrison\ - \ International Piano Competition.\nVeena is the pianist for Eero Trio, whose\ - \ debut CD entitled Wolf Glen was released in 2010. Wolf Glen features the premiere\ - \ recording of Christopher Dietz's Fumeux fume, for clarinet, cello & piano. Veena\ - \ completed her doctorate in Piano Performance and Pedagogy under Logan Skelton\ - \ and John Ellis at the University of Michigan. Prior to that, she studied at\ - \ Indiana University with Emile Naoumoff and Professors Brancart, Auer, Gulli\ - \ and Tocco and at the Royal Academy of Music with Hamish Milne.\n\nConcert Venue\ - \ and Time: Lydia Mendelssohn Theatre, Tuesday May 22, 7:00pm" - address: 'Ann Arbor, Michigan, U.S.A.' - author: Per Bloland - bibtex: "@incollection{nime2012-music-Bloland2012,\n abstract = {Program notes:\n\ - \n\\emph{Of Dust and Sand} uses the Electromagnetically-Prepared Piano device,\ - \ a rack of 12 electromagnets which is suspended over the strings of a piano.\ - \ Each electromagnet is sent an audio signal and in turn excites its respective\ - \ string, much like a stereo speaker made from piano strings. In this piece a\ - \ subset of the magnets remains active throughout, the performer physically silencing\ - \ the strings by pressing down with fingertips. Thus the instrument becomes a\ - \ kind of anti-piano---lifting a finger frees a string to vibrate, producing sound.\ - \ In addition, various items, such as paper and a plastic ruler, rest directly\ - \ on the strings further altering the timbre. Remember---everything you hear is\ - \ entirely acoustic.\n\nOf Dust and Sand is dedicated to The Kenners.\n\nComposer(s)\ - \ Credits:\n\nPer Bloland\n\nInstrumentalist(s) Credits:\n\nDaniel Graser (alto\ - \ saxophone), Veena Kulkarni (piano)\n\nArtist(s) Biography:\n\nPer Bloland is\ - \ a composer of acoustic and electroacoustic music whose works have been described\ - \ as having an ``incandescent effect'' with ``dangerous and luscious textures.''\ - \ His compositions range from short intimate solo pieces to works for large orchestra,\ - \ and incorporate video, dance, and custom built electronics. He has received\ - \ awards and recognition from organizations such as SEAMUS/ASCAP, Digital Art\ - \ Awards of Tokyo, ISCM, and SCI/ASCAP. He is currently a Visiting Assistant Professor\ - \ of Computer Music at the Oberlin College Conservatory, and serves as the founding\ - \ director of OINC, the Oberlin Improvisation and Newmusic Collective.\nFor more\ - \ information, please see: www.perbloland.com.\n\nDaniel Graser: Saxophonist \\\ - textbf{Daniel Graser} is emerging as one of the most innovative performers and\ - \ pedagogues of his generation. A recent recipient of the Doctorate of Musical\ - \ Arts from the University of Michigan, Dan served as Teaching Assistant to legendary\ - \ saxophone pedagogue Donald Sinta for the past three years and joined the faculty\ - \ of Oakland University School of Music, Theater, and Dance in 2011. Previously,\ - \ Dan earned a Masters Degree from the University of Michigan in 2008 and Bachelors\ - \ degrees in music theory/history and saxophone performance as a student of Dr.\ - \ Timothy McAllister at the Crane School of Music in 2007. As an orchestral performer,\ - \ Dan has performed as principal saxophonist with the National Wind Ensemble in\ - \ Carnegie Hall under H. Robert Reynolds, the Detroit Symphony Orchestra under\ - \ Leonard Slatkin, The New World Symphony under Michael Tilson Thomas and John\ - \ Adams, the Ann Arbor Symphony under Arie Lipsky, the University of Michigan\ - \ Symphony Orchestra under Kenneth Kiesler, the Hot Springs Festival Orchestra\ - \ under Richard Rosenberg, and the Orchestra of Northern New York under Kenneth\ - \ Andrews. Dan was selected by the University of Michigan to be featured as a\ - \ recitalist at the Kennedy Center for the Performing Arts in Washington DC as\ - \ part of the Millenium Stage Series. Recent and forthcoming performances include\ - \ world premieres at the University of Michigan, orchestral performances with\ - \ the New World Symphony and the Detroit Symphony Orchestra as well as chamber\ - \ music performances at the Navy Band International Saxophone Symposium and the\ - \ 2012 North American Saxophone Association Biennial Conference\n\nVeena Kulkarni:\ - \ A regular performer in southeast Michigan, \\textbf{Veena Kulkarni} teaches\ - \ at the Faber Piano Institute and Madonna University. Veena's performances have\ - \ taken her throughout the United States and beyond as both a soloist and collaborator.\ - \ In October, Veena won Best Liszt Interpretation in the 2011 Liszt-Garrison\ - \ International Piano Competition.\nVeena is the pianist for Eero Trio, whose\ - \ debut CD entitled Wolf Glen was released in 2010. Wolf Glen features the premiere\ - \ recording of Christopher Dietz's Fumeux fume, for clarinet, cello \\& piano.\ - \ Veena completed her doctorate in Piano Performance and Pedagogy under Logan\ - \ Skelton and John Ellis at the University of Michigan. Prior to that, she studied\ - \ at Indiana University with Emile Naoumoff and Professors Brancart, Auer, Gulli\ - \ and Tocco and at the Royal Academy of Music with Hamish Milne.\n\nConcert Venue\ - \ and Time: Lydia Mendelssohn Theatre, Tuesday May 22, 7:00pm},\n address = {Ann\ - \ Arbor, Michigan, U.S.A.},\n author = {Per Bloland},\n booktitle = {Music Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ day = {21-23},\n editor = {Georg Essl and Brent Gillespie and Michael Gurevich\ - \ and Sile O'Modhrain},\n month = {May},\n publisher = {Electrical Engineering\ - \ \\& Computer Science and Performing Arts Technology, University of Michigan},\n\ - \ title = {Of Dust and Sand},\n year = {2012}\n}\n" +- ENTRYTYPE: inproceedings + ID: nime2016-music-Bown2016 + address: 'Brisbane, Australia' + author: Oliver Bown + bibtex: "@inproceedings{nime2016-music-Bown2016,\n address = {Brisbane, Australia},\n\ + \ author = {Oliver Bown},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ + \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ + \ title = {DIADs - The Ford Transit…},\n year = {2016}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - day: 21-23 - editor: Georg Essl and Brent Gillespie and Michael Gurevich and Sile O'Modhrain - month: May - publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, - University of Michigan' - title: Of Dust and Sand - year: 2012 + editor: Andrew Brown and Toby Gifford + month: June + publisher: Griffith University + title: DIADs - The Ford Transit… + year: 2016 -- ENTRYTYPE: incollection - ID: nime2012-music-Deal2012 - abstract: "Program notes:\n\nJack Walk explores notions of ecstatic energy, control\ - \ and release. The work begins with live and fixed percussion lines, re-processed\ - \ into a series of electronic representations of specified structure. This provides\ - \ a compositional framework that a percussionist interacts with, while in another\ - \ sonic layer, a laptop musician simultaneously samples and re-processes the live\ - \ percussion while channeling the audio back into the larger environment. A videographer\ - \ mixes imagery related to the original compositional notions of ecstatic control\ - \ and release. Layers of sonic material emanating from the drummer's kit blur\ - \ the virtual and real, while the music and imagery evoke imaginary lines tracing\ - \ physical and conceptual flows of energy. The trio of performers for the NIME\ - \ 2012 performance of Jack Walk (Deal, Drews, and Munson) comprise group known\ - \ as Big Robot, an Indianapolis-based computer-acoustic trio that creates live,\ - \ interactive, and media-enriched works.\n\nComposer(s) Credits:\n\nScott Deal\n\ - \nInstrumentalist(s) Credits:\n\nScott Deal (percussion), Michael Drews (audio\ - \ electronics), Jordan Munson (video)\n\nArtist(s) Biography:\n\nScott Deal has\ - \ premiered solo, chamber and mixed media works throughout North America Europe,\ - \ and Asia. An artist who ``displays phenomenal virtuosity'' (Artsfuse) and presents\ - \ a ``riveting performance'' (Sequenza 21), his recording of John Luther Adams's\ - \ Four Thousand Holes, for piano, percussion, and electronics was listed in New\ - \ Yorker Magazine's 2011 Top Ten Classical Recordings. In 2011, he and composer\ - \ Matthew Burtner were awarded the Internet2 IDEA Award for their co-creation\ - \ of Auksalaq, a telematic opera. Deal is Professor of Music and Director of\ - \ the Donald Louis Tavel Arts and Technology Research Center at Indiana University\ - \ Purdue University Indianapolis (IUPUI). He is the founder and director of the\ - \ Telematic Collective, a multi-disciplinary artistic research group comprised\ - \ of graduate students and professional collaborators. He also serves on the faculty\ - \ for the Summer Institute for Contemporary Performance Practice at the New England\ - \ Conservatory.\n\nMichael Drews is a composer, sound artist and computer musician.\ - \ His work explores unconventional narrative strategies created from transforming\ - \ contextual identity and the expressive power of cultural artifacts found in\ - \ particular sonic and visual materials. Present throughout Drews's work is an\ - \ interest in performance-based computer virtuosity and improvisational applications\ - \ of computer technology that expand traditional ideas of musical performance\ - \ and creativity. Drews is a member of computer-acoustic ensemble, Big Robot and\ - \ the experimental-electronica duo, Mana2. Performances of Drews's compositions\ - \ have been featured at SEAMUS, Cinesonika, Electronic Music Midwest, NYC Electronic\ - \ Music Festival, Studio 300, PASIC, Super Computing Global and IASPM-Canada.\ - \ Drews holds degrees from the University of Illinois at Urbana-Champaign (D.M.A.),\ - \ Cleveland State University (M.MUS.) and Kent State University (B.A.). He resides\ - \ with his family in Indianapolis and is Assistant Professor of Music at Indiana\ - \ University-Indianapolis (IUPUI). For more information: michaeldrews.org or Twitter.com/MICHAEL-DREWS\n\ - \nJordan Munson is a musician, composer, and multimedia artist. He is a Lecturer\ - \ in Music and Arts Technology, and an associate of the Donald Louis Tavel Arts\ - \ and Technology Research Center, both at Indiana University Purdue University\ - \ Indianapolis (IUPUI). His works for multimedia and music have been premiered\ - \ at institutions such as the University of Kentucky, the University of Alaska\ - \ at Fairbanks and the University of California San Diego. As a video artist,\ - \ he has shown work at New York City Electro-Acoustic Music Festival and SEAMUS.\ - \ Munson's experimental electronic efforts have resulted in performances alongside\ - \ artists such as Matmos, R. Luke DuBois and Bora Yoon. He is a member of the\ - \ computer-acoustic ensemble Big Robot, in which he work focuses on live experimental\ - \ percussion and electronics. Munson holds degrees from Indiana University in\ - \ Indianapolis (M.S.M.T.) and the University of Kentucky (B.M.).\n\nConcert Venue\ - \ and Time: Lydia Mendelssohn Theatre, Tuesday May 22, 7:00pm" +- ENTRYTYPE: inproceedings + ID: nime2016-music-Cantrell2016 + address: 'Brisbane, Australia' + author: Joe Cantrell + bibtex: "@inproceedings{nime2016-music-Cantrell2016,\n address = {Brisbane, Australia},\n\ + \ author = {Joe Cantrell},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ + \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ + \ title = {Blackbox Loops},\n year = {2016}\n}\n" + booktitle: Music Proceedings of the International Conference on New Interfaces for + Musical Expression + editor: Andrew Brown and Toby Gifford + month: June + publisher: Griffith University + title: Blackbox Loops + year: 2016 + + +- ENTRYTYPE: inproceedings + ID: nime2016-music-Pfalz2016 + address: 'Brisbane, Australia' + author: Andrew Pfalz + bibtex: "@inproceedings{nime2016-music-Pfalz2016,\n address = {Brisbane, Australia},\n\ + \ author = {Andrew Pfalz},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Brown\ + \ and Toby Gifford},\n month = {June},\n publisher = {Griffith University},\n\ + \ title = {Of Grating Imperma},\n year = {2016}\n}\n" + booktitle: Music Proceedings of the International Conference on New Interfaces for + Musical Expression + editor: Andrew Brown and Toby Gifford + month: June + publisher: Griffith University + title: Of Grating Imperma + year: 2016 + + +- ENTRYTYPE: inproceedings + ID: nime2016-music-Knowles2016 + address: 'Brisbane, Australia' + author: Donna Hewitt & Julian Knowles + bibtex: "@inproceedings{nime2016-music-Knowles2016,\n address = {Brisbane, Australia},\n\ + \ author = {Donna Hewitt & Julian Knowles},\n booktitle = {Music Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n editor\ + \ = {Andrew Brown and Toby Gifford},\n month = {June},\n publisher = {Griffith\ + \ University},\n title = {Doppelgänger³ Macrophonics²},\n year = {2016}\n}\n" + booktitle: Music Proceedings of the International Conference on New Interfaces for + Musical Expression + editor: Andrew Brown and Toby Gifford + month: June + publisher: Griffith University + title: Doppelgänger³ Macrophonics² + year: 2016 + + +- ENTRYTYPE: incollection + ID: nime2012-music-SchneiderbangerVierling2012 + abstract: "Program notes:\n\nThe piece Floating Points II is the result of the continued\ + \ work of the instrument makers and performers Michael Vierling and Matthias Schneiderbanger\ + \ within their self-developed system for collaborative performance including the\ + \ digital musical instruments Sensor-table and Chirotron. These instruments use\ + \ several sensors to transform the movements and gestures of their players into\ + \ data for sound generation, placement and movement of the sound in the room.\n\ + \nThe performances with Sensor-table and Chirotron emphasize the connection between\ + \ the performer and the digital musical instruments by using the basic noise of\ + \ the sensors as a notable characteristic in the sound synthesis to accentuate\ + \ the technical boundaries in an aesthetic way. The network is the core of the\ + \ common setup: It offers the ability to connect two physically separated instruments\ + \ into one common signal chain for sound processing and spatialisation.\n\nComposer(s)\ + \ Credits:\n\nInstrumentalist(s) Credits:\n\nMatthias Schneiderbanger (Chirotron),\ + \ Michael Vierling (Sensor-table)\n\nArtist(s) Biography:\n\nMatthias Schneiderbanger\ + \ (*1987) musician and sonic artist, studies since 2007 at the Karlsruhe University\ + \ of Music, Germany. Currently master student in music informatics with emphasis\ + \ in composition and sonic arts. Main foucs on development of digital musical\ + \ instruments, sound installations, contemporary music and live coding. Since\ + \ 2010, there is also an artistic collaboration with M. Vierling in the development\ + \ of digital musical instruments. Their instruments were presented 2011 at the\ + \ Music and Sonic Arts Symposium in Baden-Baden, performances include the Network\ + \ Music Festival in Birmingham and the ZKM in Karlsruhe. He is a member of the\ + \ laptop ensemble Beno\\^it and the Mandelbrots, performances along with numerous\ + \ other concerts at the BEAM Festival in Uxbridge, the SuperCollider Symposium\ + \ 2012 in London, the Laptops Meet Musicians Festival 2011 in Venice and the next-generation\ + \ 4.0 Festival at the ZKM in Karlsruhe. He is a member of Karlsruhe artist collective\ + \ nil.\n\nMichael Vierling studies music informatics master at the Karlsruhe University\ + \ of Music, Germany. He is drummer in several band projects and teaches a drumclass\ + \ at School for Music and Performing Arts in Bühl, Germany. His main interests\ + \ besides producing and performing music are sonic arts especially live- electronics,\ + \ creating digital music instruments and sound installations with use of sensor\ + \ technologies. Since 2010, there is an artistic collaboration with M. Schneiderbanger\ + \ in the development of digital musical instruments. Their instruments were presented\ + \ 2011 at the Music and Sonic Arts Symposium in Baden-Baden, performances include,\ + \ the NIME 2012 in Michigan and the Network Music Festival 2012 in Birmingham.\ + \ His works have been exhibited at various Festivals e.g. ton:art 2010/11, UND\ + \ 6/7, Sommerloch 2011, Beyond 3D-Festival in Karlsruhe and the Next Level Conference\ + \ in Cologne. He is a member of Karlsruhe artist collective nil.\n\nConcert Venue\ + \ and Time: Lydia Mendelssohn Theatre, Monday May 21, 9:00pm" address: 'Ann Arbor, Michigan, U.S.A.' - author: Scott Deal - bibtex: "@incollection{nime2012-music-Deal2012,\n abstract = {Program notes:\n\n\ - \\emph{Jack Walk} explores notions of ecstatic energy, control and release. The\ - \ work begins with live and fixed percussion lines, re-processed into a series\ - \ of electronic representations of specified structure. This provides a compositional\ - \ framework that a percussionist interacts with, while in another sonic layer,\ - \ a laptop musician simultaneously samples and re-processes the live percussion\ - \ while channeling the audio back into the larger environment. A videographer\ - \ mixes imagery related to the original compositional notions of ecstatic control\ - \ and release. Layers of sonic material emanating from the drummer's kit blur\ - \ the virtual and real, while the music and imagery evoke imaginary lines tracing\ - \ physical and conceptual flows of energy. The trio of performers for the NIME\ - \ 2012 performance of \\emph{Jack Walk} (Deal, Drews, and Munson) comprise group\ - \ known as Big Robot, an Indianapolis-based computer-acoustic trio that creates\ - \ live, interactive, and media-enriched works.\n\nComposer(s) Credits:\n\nScott\ - \ Deal\n\nInstrumentalist(s) Credits:\n\nScott Deal (percussion), Michael Drews\ - \ (audio electronics), Jordan Munson (video)\n\nArtist(s) Biography:\n\nScott\ - \ Deal has premiered solo, chamber and mixed media works throughout North America\ - \ Europe, and Asia. An artist who ``displays phenomenal virtuosity'' (Artsfuse)\ - \ and presents a ``riveting performance'' (Sequenza 21), his recording of John\ - \ Luther Adams's \\emph{Four Thousand Holes}, for piano, percussion, and electronics\ - \ was listed in New Yorker Magazine's 2011 Top Ten Classical Recordings. In 2011,\ - \ he and composer Matthew Burtner were awarded the Internet2 IDEA Award for their\ - \ co-creation of \\emph{Auksalaq}, a telematic opera. Deal is Professor of Music\ - \ and Director of the Donald Louis Tavel Arts and Technology Research Center at\ - \ Indiana University Purdue University Indianapolis (IUPUI). He is the founder\ - \ and director of the Telematic Collective, a multi-disciplinary artistic research\ - \ group comprised of graduate students and professional collaborators. He also\ - \ serves on the faculty for the Summer Institute for Contemporary Performance\ - \ Practice at the New England Conservatory.\n\nMichael Drews is a composer, sound\ - \ artist and computer musician. His work explores unconventional narrative strategies\ - \ created from transforming contextual identity and the expressive power of cultural\ - \ artifacts found in particular sonic and visual materials. Present throughout\ - \ Drews's work is an interest in performance-based computer virtuosity and improvisational\ - \ applications of computer technology that expand traditional ideas of musical\ - \ performance and creativity. Drews is a member of computer-acoustic ensemble,\ - \ Big Robot and the experimental-electronica duo, Mana2. Performances of Drews's\ - \ compositions have been featured at SEAMUS, Cinesonika, Electronic Music Midwest,\ - \ NYC Electronic Music Festival, Studio 300, PASIC, Super Computing Global and\ - \ IASPM-Canada. Drews holds degrees from the University of Illinois at Urbana-Champaign\ - \ (D.M.A.), Cleveland State University (M.MUS.) and Kent State University (B.A.).\ - \ He resides with his family in Indianapolis and is Assistant Professor of Music\ - \ at Indiana University-Indianapolis (IUPUI). For more information: michaeldrews.org\ - \ or Twitter.com/MICHAEL-DREWS\n\nJordan Munson is a musician, composer, and multimedia\ - \ artist. He is a Lecturer in Music and Arts Technology, and an associate of\ - \ the Donald Louis Tavel Arts and Technology Research Center, both at Indiana\ - \ University Purdue University Indianapolis (IUPUI). His works for multimedia\ - \ and music have been premiered at institutions such as the University of Kentucky,\ - \ the University of Alaska at Fairbanks and the University of California San Diego.\ - \ As a video artist, he has shown work at New York City Electro-Acoustic Music\ - \ Festival and SEAMUS. Munson's experimental electronic efforts have resulted\ - \ in performances alongside artists such as Matmos, R. Luke DuBois and Bora Yoon.\ - \ He is a member of the computer-acoustic ensemble Big Robot, in which he work\ - \ focuses on live experimental percussion and electronics. Munson holds degrees\ - \ from Indiana University in Indianapolis (M.S.M.T.) and the University of Kentucky\ - \ (B.M.).\n\nConcert Venue and Time: Lydia Mendelssohn Theatre, Tuesday May 22,\ - \ 7:00pm},\n address = {Ann Arbor, Michigan, U.S.A.},\n author = {Scott Deal},\n\ - \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n day = {21-23},\n editor = {Georg Essl and Brent Gillespie\ - \ and Michael Gurevich and Sile O'Modhrain},\n month = {May},\n publisher = {Electrical\ + author: Matthias Schneiderbanger and Michael Vierling + bibtex: "@incollection{nime2012-music-SchneiderbangerVierling2012,\n abstract =\ + \ {Program notes:\n\nThe piece \\emph{Floating Points II} is the result of the\ + \ continued work of the instrument makers and performers Michael Vierling and\ + \ Matthias Schneiderbanger within their self-developed system for collaborative\ + \ performance including the digital musical instruments Sensor-table and Chirotron.\ + \ These instruments use several sensors to transform the movements and gestures\ + \ of their players into data for sound generation, placement and movement of the\ + \ sound in the room.\n\nThe performances with \\emph{Sensor-table} and \\emph{Chirotron}\ + \ emphasize the connection between the performer and the digital musical instruments\ + \ by using the basic noise of the sensors as a notable characteristic in the sound\ + \ synthesis to accentuate the technical boundaries in an aesthetic way. The network\ + \ is the core of the common setup: It offers the ability to connect two physically\ + \ separated instruments into one common signal chain for sound processing and\ + \ spatialisation.\n\nComposer(s) Credits:\n\nInstrumentalist(s) Credits:\n\nMatthias\ + \ Schneiderbanger (Chirotron), Michael Vierling (Sensor-table)\n\nArtist(s) Biography:\n\ + \nMatthias Schneiderbanger (*1987) musician and sonic artist, studies since 2007\ + \ at the Karlsruhe University of Music, Germany. Currently master student in music\ + \ informatics with emphasis in composition and sonic arts. Main foucs on development\ + \ of digital musical instruments, sound installations, contemporary music and\ + \ live coding. Since 2010, there is also an artistic collaboration with M. Vierling\ + \ in the development of digital musical instruments. Their instruments were presented\ + \ 2011 at the Music and Sonic Arts Symposium in Baden-Baden, performances include\ + \ the Network Music Festival in Birmingham and the ZKM in Karlsruhe. He is a member\ + \ of the laptop ensemble Beno\\^{i}t and the Mandelbrots, performances along with\ + \ numerous other concerts at the BEAM Festival in Uxbridge, the SuperCollider\ + \ Symposium 2012 in London, the Laptops Meet Musicians Festival 2011 in Venice\ + \ and the next-generation 4.0 Festival at the ZKM in Karlsruhe. He is a member\ + \ of Karlsruhe artist collective nil.\n\nMichael Vierling studies music informatics\ + \ master at the Karlsruhe University of Music, Germany. He is drummer in several\ + \ band projects and teaches a drumclass at School for Music and Performing Arts\ + \ in B\\\"{u}hl, Germany. His main interests besides producing and performing\ + \ music are sonic arts especially live- electronics, creating digital music instruments\ + \ and sound installations with use of sensor technologies. Since 2010, there is\ + \ an artistic collaboration with M. Schneiderbanger in the development of digital\ + \ musical instruments. Their instruments were presented 2011 at the Music and\ + \ Sonic Arts Symposium in Baden-Baden, performances include, the NIME 2012 in\ + \ Michigan and the Network Music Festival 2012 in Birmingham. His works have been\ + \ exhibited at various Festivals e.g. ton:art 2010/11, UND 6/7, Sommerloch 2011,\ + \ Beyond 3D-Festival in Karlsruhe and the Next Level Conference in Cologne. He\ + \ is a member of Karlsruhe artist collective nil.\n\nConcert Venue and Time: Lydia\ + \ Mendelssohn Theatre, Monday May 21, 9:00pm},\n address = {Ann Arbor, Michigan,\ + \ U.S.A.},\n author = {Matthias Schneiderbanger and Michael Vierling},\n booktitle\ + \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n day = {21-23},\n editor = {Georg Essl and Brent Gillespie and\ + \ Michael Gurevich and Sile O'Modhrain},\n month = {May},\n publisher = {Electrical\ \ Engineering \\& Computer Science and Performing Arts Technology, University\ - \ of Michigan},\n title = {Jack Walk},\n year = {2012}\n}\n" + \ of Michigan},\n title = {Floating Points II},\n year = {2012}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression day: 21-23 @@ -9115,73 +8790,108 @@ month: May publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, University of Michigan' - title: Jack Walk + title: Floating Points II year: 2012 - ENTRYTYPE: incollection - ID: nime2012-music-Morales-Manzanares2012 - abstract: "Program notes:\n\nDesamor I is inspired in a model of meditation where\ - \ primordial awareness or naturally occurring timeless awareness is seen as a\ - \ result of a conversation with my wife Alejandra. This work is for piano, computer\ - \ and two Wii controllers attached to my forearms. The output is 4 channels. The\ - \ gestures of the pianist (movement, timber and dynamics) are captured in real\ - \ time via 2 microphones and a set of 2 Wii controllers. The computer languages\ - \ involved in the development of the project were: Escamol, a prolog environment\ - \ for algorithmic composition designed by the composer, and SuperCollider. In\ - \ this piece I share my experience as a performer-composer within a multi-platform\ - \ programming environments involving signal processing and machine learning techniques.\n\ - \nComposer(s) Credits:\n\nRoberto Morales-Manzanares\n\nInstrumentalist(s) Credits:\n\ - \nRoberto Morales-Manzanares (piano, percussion and electronics)\n\nArtist(s)\ - \ Biography:\n\nRoberto Morales-Manzanares: Born in Mexico City, Roberto Morales-Manzanares\ - \ started his musical training in national folkloric music and learned how to\ - \ play harps and different kinds of guitars and flutes from several regions of\ - \ the country. His doctorate in music composition was completed at UC Berkeley\ - \ in 2006. As a composer, he has written music for theater, dance, movies, TV\ - \ and radio. As an interpreter Morales-Manzanares has participated on his own\ - \ and with other composers in forums of jazz, popular and new music, including\ - \ tours to Europe United States and Latin-America.\nAs a researcher, he has been\ - \ invited to different national and international conferences such as ICMC, International\ - \ Join Conference on Artificial Intelligence IJCAI and Symposium on Arts and Technology\ - \ and has several publications. Currently he is member of the ``Sistema Nacional\ - \ de Creadores''. His music can be found in ICMC recordings, Victo label www.victo.qc.ca\ - \ (Leyendas in collaboration with Mari Kimura) and Irradia/Pocoscocodrilos.\n\n\ - Concert Venue and Time: Lydia Mendelssohn Theatre, Tuesday May 22, 7:00pm" + ID: nime2012-music-HelmuthDanard2012 + abstract: "Program notes:\n\nWater Birds is an interactive and collaborative composition\ + \ for clarinet, bass clarinet and computer. The sound of the clarinets is processed\ + \ live by spectral delays with MaxMSP and rtcmix~. Space structures the composition,\ + \ as the particular sound parameters initiated depend on the performer's location\ + \ on the stage. The development of the current version of the piece involved a\ + \ custom wireless infrared sensor network, which responds to the clarinetist's\ + \ movements. Currently the piece is performed without the sensor network, but\ + \ the strategy of that configuration still drives the composition. A score containing\ + \ five sound-generating ideas, consisting of musical fragments and a Zen poem,\ + \ allows the performer to improvise, creating his/her own sound pathway through\ + \ the piece. The pathway is reminiscent of the path of birds in the Zen poem,\ + \ Dogen's On the Nondependence of Mind, which reads: ``Water birds/going and coming/their\ + \ traces disappear/but they never/forget their path.''\n\nComposer(s) Credits:\n\ + \nMara Helmuth and Rebecca Danard\n\nInstrumentalist(s) Credits:\n\nRebecca Danard\ + \ (B♭ Clarinet, bass clarinet), Mara Helmuth (Computer)\n\nArtist(s) Biography:\n\ + \nMara Helmuth composes music often involving the computer, and creates multimedia\ + \ and software for composition and improvisation. Her recordings include Sounding\ + \ Out! (Everglade, forthcoming 2010), Sound Collaborations, (CDCM v.36, Centaur\ + \ CRC 2903), Implements of Actuation (Electronic Music Foundation EMF 023), and\ + \ Open Space CD 16, and her work has been performed internationally. She is on\ + \ the faculty of the College-Conservatory of Music, University of Cincinnati and\ + \ its Center for Computer Music's director. She holds a D.M.A. from Columbia University,\ + \ and earlier degrees from the University of Illinois, Urbana-Champaign. Her software\ + \ for composition and improvisation has involved granular synthesis, Internet2,\ + \ and RTcmix instruments. Her writings have appeared in Audible Traces, Analytical\ + \ Methods of Electroacoustic Music, the Journal of New Music Research and Perspectives\ + \ of New Music. Installations including Hidden Mountain (2007) were created for\ + \ the Sino-Nordic Arts Space in Beijing. She is a past president of the International\ + \ Computer Music Association.\n\nRebecca Danard: Performer, educator, scholar\ + \ and entrepreneur, Rebecca Danard holds a doctorate in clarinet performance at\ + \ the University of Cincinnati College-Conservatory of Music. Also an enthusiastic\ + \ teacher, Rebecca is Adjunct Faculty at Carleton University. She is currently\ + \ Artistic Director of the Ottawa New Music Creators: a collective of professional\ + \ composers and performers dedicated to bringing contemporary music to Canada's\ + \ capital. Rebecca's performance career centres on new and experimental music,\ + \ including interdisciplinary collaborations, working with new technology, organizing\ + \ events, and commissioning composers. She has worked with film makers, dancers,\ + \ choreographers, actors, poets, lighting designers and visuals artists as well\ + \ as performing musicians and composers. She has been invited to perform at festival\ + \ such as Music10 (Hindemith Centre, Switzerland), the Ottawa Chamber Music Festival,\ + \ the Ottawa Jazz Festival, the Bang on a Can Summer Festival, and Opera Theatre\ + \ and Music Festival of Lucca; at conferences such as Clarinetfest, CLIEC and\ + \ SEAMUS.\n\nConcert Venue and Time: Lydia Mendelssohn Theatre, Monday May 21,\ + \ 9:00pm" address: 'Ann Arbor, Michigan, U.S.A.' - author: Roberto Morales-Manzanares - bibtex: "@incollection{nime2012-music-Morales-Manzanares2012,\n abstract = {Program\ - \ notes:\n\n\\emph{Desamor I} is inspired in a model of meditation where primordial\ - \ awareness or naturally occurring timeless awareness is seen as a result of a\ - \ conversation with my wife Alejandra. This work is for piano, computer and two\ - \ Wii controllers attached to my forearms. The output is 4 channels. The gestures\ - \ of the pianist (movement, timber and dynamics) are captured in real time via\ - \ 2 microphones and a set of 2 Wii controllers. The computer languages involved\ - \ in the development of the project were: Escamol, a prolog environment for algorithmic\ - \ composition designed by the composer, and SuperCollider. In this piece I share\ - \ my experience as a performer-composer within a multi-platform programming environments\ - \ involving signal processing and machine learning techniques.\n\nComposer(s)\ - \ Credits:\n\nRoberto Morales-Manzanares\n\nInstrumentalist(s) Credits:\n\nRoberto\ - \ Morales-Manzanares (piano, percussion and electronics)\n\nArtist(s) Biography:\n\ - \nRoberto Morales-Manzanares: Born in Mexico City, \\textbf{Roberto Morales-Manzanares}\ - \ started his musical training in national folkloric music and learned how to\ - \ play harps and different kinds of guitars and flutes from several regions of\ - \ the country. His doctorate in music composition was completed at UC Berkeley\ - \ in 2006. As a composer, he has written music for theater, dance, movies, TV\ - \ and radio. As an interpreter Morales-Manzanares has participated on his own\ - \ and with other composers in forums of jazz, popular and new music, including\ - \ tours to Europe United States and Latin-America.\nAs a researcher, he has been\ - \ invited to different national and international conferences such as ICMC, International\ - \ Join Conference on Artificial Intelligence IJCAI and Symposium on Arts and Technology\ - \ and has several publications. Currently he is member of the ``Sistema Nacional\ - \ de Creadores''. His music can be found in ICMC recordings, Victo label www.victo.qc.ca\ - \ (Leyendas in collaboration with Mari Kimura) and Irradia/Pocoscocodrilos.\n\n\ - Concert Venue and Time: Lydia Mendelssohn Theatre, Tuesday May 22, 7:00pm},\n\ - \ address = {Ann Arbor, Michigan, U.S.A.},\n author = {Roberto Morales-Manzanares},\n\ - \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n day = {21-23},\n editor = {Georg Essl and Brent Gillespie\ - \ and Michael Gurevich and Sile O'Modhrain},\n month = {May},\n publisher = {Electrical\ + author: Mara Helmuth and Rebecca Danard + bibtex: "@incollection{nime2012-music-HelmuthDanard2012,\n abstract = {Program notes:\n\ + \nWater Birds is an interactive and collaborative composition for clarinet, bass\ + \ clarinet and computer. The sound of the clarinets is processed live by spectral\ + \ delays with MaxMSP and rtcmix~. Space structures the composition, as the particular\ + \ sound parameters initiated depend on the performer's location on the stage.\ + \ The development of the current version of the piece involved a custom wireless\ + \ infrared sensor network, which responds to the clarinetist's movements. Currently\ + \ the piece is performed without the sensor network, but the strategy of that\ + \ configuration still drives the composition. A score containing five sound-generating\ + \ ideas, consisting of musical fragments and a Zen poem, allows the performer\ + \ to improvise, creating his/her own sound pathway through the piece. The pathway\ + \ is reminiscent of the path of birds in the Zen poem, Dogen's \\emph{On the Nondependence\ + \ of Mind}, which reads: ``Water birds/going and coming/their traces disappear/but\ + \ they never/forget their path.''\n\nComposer(s) Credits:\n\nMara Helmuth and\ + \ Rebecca Danard\n\nInstrumentalist(s) Credits:\n\nRebecca Danard (B$\\flat$ Clarinet,\ + \ bass clarinet), Mara Helmuth (Computer)\n\nArtist(s) Biography:\n\nMara Helmuth\ + \ composes music often involving the computer, and creates multimedia and software\ + \ for composition and improvisation. Her recordings include Sounding Out! (Everglade,\ + \ forthcoming 2010), Sound Collaborations, (CDCM v.36, Centaur CRC 2903), Implements\ + \ of Actuation (Electronic Music Foundation EMF 023), and Open Space CD 16, and\ + \ her work has been performed internationally. She is on the faculty of the College-Conservatory\ + \ of Music, University of Cincinnati and its Center for Computer Music's director.\ + \ She holds a D.M.A. from Columbia University, and earlier degrees from the University\ + \ of Illinois, Urbana-Champaign. Her software for composition and improvisation\ + \ has involved granular synthesis, Internet2, and RTcmix instruments. Her writings\ + \ have appeared in \\emph{Audible Traces, Analytical Methods of Electroacoustic\ + \ Music, the Journal of New Music Research and Perspectives of New Music}. Installations\ + \ including \\emph{Hidden Mountain} (2007) were created for the Sino-Nordic Arts\ + \ Space in Beijing. She is a past president of the International Computer Music\ + \ Association.\n\nRebecca Danard: Performer, educator, scholar and entrepreneur,\ + \ \\textbf{Rebecca Danard} holds a doctorate in clarinet performance at the University\ + \ of Cincinnati College-Conservatory of Music. Also an enthusiastic teacher, Rebecca\ + \ is Adjunct Faculty at Carleton University. She is currently Artistic Director\ + \ of the Ottawa New Music Creators: a collective of professional composers and\ + \ performers dedicated to bringing contemporary music to Canada's capital. Rebecca's\ + \ performance career centres on new and experimental music, including interdisciplinary\ + \ collaborations, working with new technology, organizing events, and commissioning\ + \ composers. She has worked with film makers, dancers, choreographers, actors,\ + \ poets, lighting designers and visuals artists as well as performing musicians\ + \ and composers. She has been invited to perform at festival such as Music10 (Hindemith\ + \ Centre, Switzerland), the Ottawa Chamber Music Festival, the Ottawa Jazz Festival,\ + \ the Bang on a Can Summer Festival, and Opera Theatre and Music Festival of Lucca;\ + \ at conferences such as Clarinetfest, CLIEC and SEAMUS.\n\nConcert Venue and\ + \ Time: Lydia Mendelssohn Theatre, Monday May 21, 9:00pm},\n address = {Ann Arbor,\ + \ Michigan, U.S.A.},\n author = {Mara Helmuth and Rebecca Danard},\n booktitle\ + \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n day = {21-23},\n editor = {Georg Essl and Brent Gillespie and\ + \ Michael Gurevich and Sile O'Modhrain},\n month = {May},\n publisher = {Electrical\ \ Engineering \\& Computer Science and Performing Arts Technology, University\ - \ of Michigan},\n title = {Desamor I},\n year = {2012}\n}\n" + \ of Michigan},\n title = {Water Birds},\n year = {2012}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression day: 21-23 @@ -9189,90 +8899,163 @@ month: May publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, University of Michigan' - title: Desamor I + title: Water Birds year: 2012 - ENTRYTYPE: incollection - ID: nime2012-music-Hsu2012 - abstract: "Program notes:\n\nFlue is a structured audio-visual improvisation for\ - \ three musicians, utilizing live acoustic and electronic sound and interactive\ - \ animations. A physics-based smoke simulation is influenced by the real-time\ - \ audio from the musicians' performance. The audio from the performance is analyzed;\ - \ high-level tempo, spectral and other features are extracted, and sent via Open\ - \ Sound Control to the animation environment. The smoke trails are also able to\ - \ coalesce into well- defined symbols and forms, all while moving in a natural-seeming\ - \ manner consistent with the underlying fluid simulation.\n\nComposer(s) Credits:\n\ - \nBill Hsu\n\nInstrumentalist(s) Credits:\n\nBill Hsu (electronics, interactive\ - \ animation), Matt Endahl (piano), Mike Khoury (violin)\n\nArtist(s) Biography:\n\ - \nBill Hsu is an Associate Professor of Computer Science at San Francisco State\ - \ University. He has performed in the US, Asia, and Europe, including NIME 2011\ - \ (Oslo), Festival art::archive:architectures (ZKM, Karlsruhe, 2011), SMC 2009\ - \ (Porto), Harvestworks Festival 2009 (New York), Fete Quaqua 2008 (London), MIX\ - \ Festival 2007 and 2009 (New York), NIME 2007 (New York), Stimme+ 2006 (ZKM,\ - \ Karlsruhe), and the First Hong Kong Improvised Performance Festival 2005. Website:\ - \ http://userwww.sfsu.edu/~whsu/art.html\n\nMatt Endahl (b. 1985) is an improvising\ - \ pianist based in Ann Arbor, MI. A student of Geri Allen and Ed Sarath at the\ - \ University of Michigan, Matt is an active performer and organizer, having performed\ - \ in a wide variety of settings, from Gershwin's \"Rhapsody in Blue\" to freeform\ - \ solo electronic sets. Matt has taught jazz piano at Hillsdale College since\ - \ 2008. http://www.myspace.com/mattendahl\n\nMike Khoury was born in Mt. Pleasant,\ - \ Michigan in 1969. As the son of visual artist Sari Khoury, he was exposed to\ - \ various forms of visual arts and creative musical forms. Khoury is Palestinian.\n\ - Khoury's collaborators often include Leyya Tawil (dance), Ben Hall (percussion),\ - \ Christopher Riggs (guitar), and Andrew Coltrane (sound manipulation). He has\ - \ performed and recorded with Faruq Z. Bey, Dennis Gonzalez, Luc Houtkamp, Maury\ - \ Coles, Jack Wright, Graveyards, John Butcher, Gino Robair, Gunda Gottschalk,\ - \ and Le Quan Ninh.\nKhoury runs the Entropy Stereo music label where he focuses\ - \ on issuing new and archival music by challenging artists. His studies include\ - \ those with John Lindberg, Gerald Cleaver, and composer/violinist David Litven.\ - \ Khoury is the author of a chapter on Egyptian-American composer Halim El-Dabh\ - \ in a forthcoming anthology on the Arab avant garde, published by Wesleyan University\ - \ Press. Website: http://www.myspace.com/michaelkhoury\n\nConcert Venue and Time:\ - \ Lydia Mendelssohn Theatre, Tuesday May 22, 7:00pm" + ID: nime2012-music-TanakaParkinson2012 + abstract: "Program notes:\n\nAdam & Atau exploit a commonly available consumer electronics\ + \ device, a smartphone, as an expressive, gestural musical instrument. The device\ + \ is well known an iconic object of desire in our society of consumption, playing\ + \ music as a fixed commodity. The performers re-appropriate the mobile phone and\ + \ transform the consumer object into an instrument for concert performance. As\ + \ a duo, with one in each hand, they create a chamber music, 4-hands iPhone. The\ + \ accelerometers allow high precision capture of the performer's free space gestures.\ + \ This drives a granular synthesis patch in Pure Data (PD), where one patch becomes\ + \ the process by which a range of sounds from the natural world are stretched,\ + \ frozen, scattered, and restitched. The fact that all system components---sensor\ + \ input, signal processing and sound synthesis, and audio output, are embodied\ + \ in a single device make it a self-contained, expressive musical instrument.\n\ + \nComposer(s) Credits:\n\nAtau Tanaka and Adam Parkinson\n\nInstrumentalist(s)\ + \ Credits:\n\nArtist(s) Biography:\n\nAtau Tanaka's first inspirations came upon\ + \ meeting John Cage during his Norton Lectures at Harvard and would go to on re-create\ + \ Cage's Variations VII with Matt Wand and :zoviet*france:, performing it in Newcastle\ + \ upon Tyne, Berlin, and Paris. In the 90's he formed Sensorband with Zbigniew\ + \ Karkowski and Edwin van der Heide and then moved to Japan and came in contact\ + \ with the noise music scene, playing with Merzbow, Otomo, KK Null and others.\ + \ Atau has released solo, group, and compilation CD's on labels such as Sub Rosa,\ + \ Bip-hop, Caipirinha Music, Touch/Ash, Sonoris, Sirr-ecords. His work has been\ + \ presented at ICC in Japan, Ars Electronica, DEAF/V2, IRCAM, and Transmediale\ + \ in Europe, and Eyebeam, Wood Street Gallery, and SFMOMA in the U.S. He has been\ + \ artistic ambassador for Apple, researcher for Sony CSL, artistic co-director\ + \ of STEIM, and director of Culture Lab Newcastle. He is currently European Research\ + \ Council (ERC) fellow at Goldsmiths Digital Studios in London.\n\nAdam Parkinson\ + \ is an electronic musician based in Newcastle, England. He has recently completed\ + \ PhD, with much of his research looking at mobile music and performing with iPhones.He\ + \ has worked alongside various improvisers such as Rhodri Davies, Klaus Filip,\ + \ Robin Hayward and Dominic Lash, and has been involved in collaborations to create\ + \ sound installations with Kaffe Matthews and Caroline Bergvall. He also dabbles\ + \ in making dance music, and is trying to write a perfect pop song.\nAtau & Adam\ + \ have been performing as a duo since 2008: first as a laptop / biomuse duo then\ + \ in the current iPhone formation. 4-Hands iPhone has so far been performed across\ + \ Europe and North America including the FutureEverything Festival (Manchester),\ + \ Passos Manuel (Porto), Charm of Sound Festival (Helsinki), Electron Festival\ + \ (Geneva), Mois Multi (Quebec), Music With A View (New York).\n\nConcert Venue\ + \ and Time: Lydia Mendelssohn Theatre, Monday May 21, 9:00pm" address: 'Ann Arbor, Michigan, U.S.A.' - author: Bill Hsu - bibtex: "@incollection{nime2012-music-Hsu2012,\n abstract = {Program notes:\n\n\\\ - emph{Flue} is a structured audio-visual improvisation for three musicians, utilizing\ - \ live acoustic and electronic sound and interactive animations. A physics-based\ - \ smoke simulation is influenced by the real-time audio from the musicians' performance.\ - \ The audio from the performance is analyzed; high-level tempo, spectral and other\ - \ features are extracted, and sent via Open Sound Control to the animation environment.\ - \ The smoke trails are also able to coalesce into well- defined symbols and forms,\ - \ all while moving in a natural-seeming manner consistent with the underlying\ - \ fluid simulation.\n\nComposer(s) Credits:\n\nBill Hsu\n\nInstrumentalist(s)\ - \ Credits:\n\nBill Hsu (electronics, interactive animation), Matt Endahl (piano),\ - \ Mike Khoury (violin)\n\nArtist(s) Biography:\n\nBill Hsu is an Associate Professor\ - \ of Computer Science at San Francisco State University. He has performed in the\ - \ US, Asia, and Europe, including NIME 2011 (Oslo), Festival art::archive:architectures\ - \ (ZKM, Karlsruhe, 2011), SMC 2009 (Porto), Harvestworks Festival 2009 (New York),\ - \ Fete Quaqua 2008 (London), MIX Festival 2007 and 2009 (New York), NIME 2007\ - \ (New York), Stimme+ 2006 (ZKM, Karlsruhe), and the First Hong Kong Improvised\ - \ Performance Festival 2005. Website: http://userwww.sfsu.edu/~whsu/art.html\n\ - \nMatt Endahl (b. 1985) is an improvising pianist based in Ann Arbor, MI. A student\ - \ of Geri Allen and Ed Sarath at the University of Michigan, Matt is an active\ - \ performer and organizer, having performed in a wide variety of settings, from\ - \ Gershwin's \"Rhapsody in Blue\" to freeform solo electronic sets. Matt has taught\ - \ jazz piano at Hillsdale College since 2008. http://www.myspace.com/mattendahl\n\ - \nMike Khoury was born in Mt. Pleasant, Michigan in 1969. As the son of visual\ - \ artist Sari Khoury, he was exposed to various forms of visual arts and creative\ - \ musical forms. Khoury is Palestinian.\nKhoury's collaborators often include\ - \ Leyya Tawil (dance), Ben Hall (percussion), Christopher Riggs (guitar), and\ - \ Andrew Coltrane (sound manipulation). He has performed and recorded with Faruq\ - \ Z. Bey, Dennis Gonzalez, Luc Houtkamp, Maury Coles, Jack Wright, Graveyards,\ - \ John Butcher, Gino Robair, Gunda Gottschalk, and Le Quan Ninh.\nKhoury runs\ - \ the Entropy Stereo music label where he focuses on issuing new and archival\ - \ music by challenging artists. His studies include those with John Lindberg,\ - \ Gerald Cleaver, and composer/violinist David Litven. Khoury is the author of\ - \ a chapter on Egyptian-American composer Halim El-Dabh in a forthcoming anthology\ - \ on the Arab avant garde, published by Wesleyan University Press. Website: http://www.myspace.com/michaelkhoury\n\ - \nConcert Venue and Time: Lydia Mendelssohn Theatre, Tuesday May 22, 7:00pm},\n\ - \ address = {Ann Arbor, Michigan, U.S.A.},\n author = {Bill Hsu},\n booktitle\ + author: Atau Tanaka and Adam Parkinson + bibtex: "@incollection{nime2012-music-TanakaParkinson2012,\n abstract = {Program\ + \ notes:\n\nAdam \\& Atau exploit a commonly available consumer electronics device,\ + \ a smartphone, as an expressive, gestural musical instrument. The device is well\ + \ known an iconic object of desire in our society of consumption, playing music\ + \ as a fixed commodity. The performers re-appropriate the mobile phone and transform\ + \ the consumer object into an instrument for concert performance. As a duo, with\ + \ one in each hand, they create a chamber music, 4-hands iPhone. The accelerometers\ + \ allow high precision capture of the performer's free space gestures. This drives\ + \ a granular synthesis patch in Pure Data (PD), where one patch becomes the process\ + \ by which a range of sounds from the natural world are stretched, frozen, scattered,\ + \ and restitched. The fact that all system components---sensor input, signal processing\ + \ and sound synthesis, and audio output, are embodied in a single device make\ + \ it a self-contained, expressive musical instrument.\n\nComposer(s) Credits:\n\ + \nAtau Tanaka and Adam Parkinson\n\nInstrumentalist(s) Credits:\n\nArtist(s) Biography:\n\ + \nAtau Tanaka's first inspirations came upon meeting John Cage during his Norton\ + \ Lectures at Harvard and would go to on re-create Cage's Variations VII with\ + \ Matt Wand and \\emph{:zoviet*france:}, performing it in Newcastle upon Tyne,\ + \ Berlin, and Paris. In the 90's he formed Sensorband with Zbigniew Karkowski\ + \ and Edwin van der Heide and then moved to Japan and came in contact with the\ + \ noise music scene, playing with Merzbow, Otomo, KK Null and others. Atau has\ + \ released solo, group, and compilation CD's on labels such as Sub Rosa, Bip-hop,\ + \ Caipirinha Music, Touch/Ash, Sonoris, Sirr-ecords. His work has been presented\ + \ at ICC in Japan, Ars Electronica, DEAF/V2, IRCAM, and Transmediale in Europe,\ + \ and Eyebeam, Wood Street Gallery, and SFMOMA in the U.S. He has been artistic\ + \ ambassador for Apple, researcher for Sony CSL, artistic co-director of STEIM,\ + \ and director of Culture Lab Newcastle. He is currently European Research Council\ + \ (ERC) fellow at Goldsmiths Digital Studios in London.\n\nAdam Parkinson is an\ + \ electronic musician based in Newcastle, England. He has recently completed PhD,\ + \ with much of his research looking at mobile music and performing with iPhones.He\ + \ has worked alongside various improvisers such as Rhodri Davies, Klaus Filip,\ + \ Robin Hayward and Dominic Lash, and has been involved in collaborations to create\ + \ sound installations with Kaffe Matthews and Caroline Bergvall. He also dabbles\ + \ in making dance music, and is trying to write a perfect pop song.\nAtau \\&\ + \ Adam have been performing as a duo since 2008: first as a laptop / biomuse duo\ + \ then in the current iPhone formation. 4-Hands iPhone has so far been performed\ + \ across Europe and North America including the FutureEverything Festival (Manchester),\ + \ Passos Manuel (Porto), Charm of Sound Festival (Helsinki), Electron Festival\ + \ (Geneva), Mois Multi (Quebec), Music With A View (New York).\n\nConcert Venue\ + \ and Time: Lydia Mendelssohn Theatre, Monday May 21, 9:00pm},\n address = {Ann\ + \ Arbor, Michigan, U.S.A.},\n author = {Atau Tanaka and Adam Parkinson},\n booktitle\ \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ \ Expression},\n day = {21-23},\n editor = {Georg Essl and Brent Gillespie and\ \ Michael Gurevich and Sile O'Modhrain},\n month = {May},\n publisher = {Electrical\ \ Engineering \\& Computer Science and Performing Arts Technology, University\ - \ of Michigan},\n title = {Flue},\n year = {2012}\n}\n" + \ of Michigan},\n title = {4 Hands iPhone},\n year = {2012}\n}\n" + booktitle: Music Proceedings of the International Conference on New Interfaces for + Musical Expression + day: 21-23 + editor: Georg Essl and Brent Gillespie and Michael Gurevich and Sile O'Modhrain + month: May + publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, + University of Michigan' + title: 4 Hands iPhone + year: 2012 + + +- ENTRYTYPE: incollection + ID: nime2012-music-Applebaum2012 + abstract: "Program notes:\n\nAphasia (2010), for solo performer and two-channel\ + \ tape, was commissioned by the GRM, Paris and composed for virtuoso singer Nicholas\ + \ Isherwood. The tape, an idiosyncratic explosion of warped and mangled sounds,\ + \ is made up exclusively of vocal samples---all provided by Isherwood and subsequently\ + \ transformed digitally. Against the backdrop of this audio narrative, an elaborate\ + \ set of hand gestures are performed---an assiduously choreographed sign language\ + \ of sorts. Each gesture is fastidiously synchronized to the tape in tight rhythmic\ + \ coordination.\n\nIn the context of NIME, the piece is noteworthy for its deliberate---if\ + \ unintentionally political---contemporary technology abstinence. Ancillary questions\ + \ arise, such as ``What are the present limits of gesture control?''; ``Do these\ + \ limitations present unwelcome pressures on the boundaries of artistic imagination\ + \ and creative capacity?''; and ``How do we learn to recognize when it is artistically\ + \ prudent to eschew emerging tools?''\n\nComposer(s) Credits:\n\nMark Applebaum\n\ + \nInstrumentalist(s) Credits:\n\nMark Applebaum\n\nArtist(s) Biography:\n\nMark\ + \ Applebaum is Associate Professor of Composition at Stanford University where\ + \ he received the 2003 Walter J. Gores Award for excellence in teaching. He received\ + \ his Ph.D. in composition from the University of California at San Diego where\ + \ he studied principally with Brian Ferneyhough. His solo, chamber, choral, orchestral,\ + \ operatic, and electroacoustic work has been performed throughout the United\ + \ States, Europe, Africa, South America, and Asia. Many of his recent works are\ + \ characterized by challenges to the conventional boundaries of musical ontology.\n\ + \nConcert Venue and Time: Lydia Mendelssohn Theatre, Monday May 21, 9:00pm" + address: 'Ann Arbor, Michigan, U.S.A.' + author: Mark Applebaum + bibtex: "@incollection{nime2012-music-Applebaum2012,\n abstract = {Program notes:\n\ + \n\\emph{Aphasia} (2010), for solo performer and two-channel tape, was commissioned\ + \ by the GRM, Paris and composed for virtuoso singer Nicholas Isherwood. The tape,\ + \ an idiosyncratic explosion of warped and mangled sounds, is made up exclusively\ + \ of vocal samples---all provided by Isherwood and subsequently transformed digitally.\ + \ Against the backdrop of this audio narrative, an elaborate set of hand gestures\ + \ are performed---an assiduously choreographed sign language of sorts. Each gesture\ + \ is fastidiously synchronized to the tape in tight rhythmic coordination.\n\n\ + In the context of NIME, the piece is noteworthy for its deliberate---if unintentionally\ + \ political---contemporary technology abstinence. Ancillary questions arise,\ + \ such as ``What are the present limits of gesture control?''; ``Do these limitations\ + \ present unwelcome pressures on the boundaries of artistic imagination and creative\ + \ capacity?''; and ``How do we learn to recognize when it is artistically prudent\ + \ to eschew emerging tools?''\n\nComposer(s) Credits:\n\nMark Applebaum\n\nInstrumentalist(s)\ + \ Credits:\n\nMark Applebaum\n\nArtist(s) Biography:\n\nMark Applebaum is Associate\ + \ Professor of Composition at Stanford University where he received the 2003 Walter\ + \ J. Gores Award for excellence in teaching. He received his Ph.D. in composition\ + \ from the University of California at San Diego where he studied principally\ + \ with Brian Ferneyhough. His solo, chamber, choral, orchestral, operatic, and\ + \ electroacoustic work has been performed throughout the United States, Europe,\ + \ Africa, South America, and Asia. Many of his recent works are characterized\ + \ by challenges to the conventional boundaries of musical ontology.\n\nConcert\ + \ Venue and Time: Lydia Mendelssohn Theatre, Monday May 21, 9:00pm},\n address\ + \ = {Ann Arbor, Michigan, U.S.A.},\n author = {Mark Applebaum},\n booktitle =\ + \ {Music Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n day = {21-23},\n editor = {Georg Essl and Brent Gillespie and\ + \ Michael Gurevich and Sile O'Modhrain},\n month = {May},\n publisher = {Electrical\ + \ Engineering \\& Computer Science and Performing Arts Technology, University\ + \ of Michigan},\n title = {Aphasia},\n year = {2012}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression day: 21-23 @@ -9280,101 +9063,207 @@ month: May publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, University of Michigan' - title: Flue + title: Aphasia year: 2012 - ENTRYTYPE: incollection - ID: nime2012-music-GoloveMartensson2012 - abstract: "Program notes:\n\nThe most impressive uses of the theremin cello during\ - \ Theremin's time in New York are Leopold Stokowski's inclusion of one in the\ - \ Philadelphia Orchestra's low string section and Varese's composition of two\ - \ solo parts in Ecuatorial. Even more important, from my perspective, is the\ - \ fact that the instrument represents the first attempt to harness the human potential\ - \ to shape and manipulate electronic sound by means of the technical apparatus\ - \ of the modern player of bowed string instruments.\n\nRachmaninoff's Vocalise,\ - \ Op. 34 no. 14, for textless high voice, highlights the hauntingly vocal quality\ - \ of the theremin cello. Vocalise is the last of a set of 14 songs published in\ - \ 1912, less than a decade before Theremin's experiments with musical sounds began\ - \ to bear fruit.\n\nBrian Wilson and the Beach Boys, by virtue of their use of\ - \ Bob Whitsell's Electro-Theremin on several recordings, are irrevocably linked\ - \ to the history of the theremin.\n\nComposer(s) Credits:\n\nVocalise, Op.34 no.\ - \ 14 - Sergei Rachmaninoff\nMedley (Good Vibrations/God Only Knows) - Brian Wilson\n\ - \nInstrumentalist(s) Credits:\n\nJonathan Golove (Theremin cello), Magnus Martensson\ - \ (piano)\n\nArtist(s) Biography:\n\nJonathan Golove, Associate Professor of Music\ - \ at the University at Buffalo, has been featured as theremin cello soloist with\ - \ the Asko/Schoenberg Ensemble, London Sinfonietta, and International Contemporary\ - \ Ensemble; and as cello soloist with the Buffalo Philharmonic Orchestra, Slee\ - \ Sinfonietta, and New York Virtuoso Singers. He has recorded for the Albany,\ - \ Centaur, Albuzerque, and Nine Winds labels, and appeared at festivals including\ - \ the Holland Festival, Festival d'Automne, Lincoln Center Festival, June in Buffalo,\ - \ and the Festival del Centro Histórico (Mexico City). Golove gave the first performance\ - \ of Varese's Ecuatorial using Floyd Engel's recreation of the legendary early\ - \ 20th century instrument at the University at Buffalo in 2002. He is also active\ - \ as an electric cellist, particularly in the field of creative improvised music.\ - \ An accomplished composer, his works have been performed at venues including\ - \ the Kennedy Center, Venice Biennale, Festival of Aix-en-Provence, Lincoln Center\ - \ Chamber Music Society II, and the Kitchen.\n\nMagnus Martensson is Music Director\ - \ of The Scandinavian Chamber Orchestra of New York; between 1996 and 2007 he\ - \ was Visiting Assistant Professor at SUNY Buffalo and conductor of the Slee Sinfonietta.\ - \ In 1989, Martensson made his operatic debut in Malmö, Sweden, conducting a production\ - \ of Offenbach's Orpheus in the Underworld, and has subsequently conducted operas\ - \ by Mozart, Puccini, Golove, among others. He has conducted several world premiere\ - \ recordings, including orchestral music by Jeffrey Stadelman, Roger Reynolds,\ - \ and David Felder.\nIn the past few seasons Martensson has guest conducted with\ - \ the New York New Music Ensemble, the Trondheim Soloists, Musica Vitae, ICE,\ - \ and at the Monday Evening Concert Series (Los Angeles), The Manhattan School\ - \ of Music, and Teatro San Martin (Buenos Aires).\n\nConcert Venue and Time: Lydia\ - \ Mendelssohn Theatre, Tuesday May 22, 7:00pm" + ID: nime2012-music-LeeuwSchwarz2012 + abstract: "Program notes:\n\nTwo typical NIME related inventions meet in this performance.\ + \ IRCAM based Diemo Schwarz and HKU lecturer and Electrumpet player Hans Leeuw\ + \ met at STEIM in 2010. The extreme sound possibilities of the sensor driven Electrumpet\ + \ combine wonderfully with the corpus based techniques in CataRT. Both Diemo and\ + \ Hans play their self-invented instruments for a number of years in which they\ + \ have done several iterations / extensions and built a lot of performance experience.\ + \ This experience pays off in the expressive capabilities of both performers making\ + \ this a concert that goes far beyond an extended demonstration of new instruments.\ + \ In Violent Dreams, Hans's manipulated sounds are recorded in CataRT, from which\ + \ Diemo chooses specific sonic characters and evolutions via gestural controllers,\ + \ that are played back and transformed by CataRT, challenging Hans to come up\ + \ with more extreme sounds surpassing his own originals. Thus we get an interesting\ + \ and challenging improvisation battle between two players that both fully master\ + \ their instrument.\nComposer(s) Credits:\n\nInstrumentalist(s) Credits:\n\nHans\ + \ Leeuw (Electrumpet), Diemo Schwarz (CataRT, gestural controllers)\n\nArtist(s)\ + \ Biography:\n\nHans Leeuw is recognized as one of Hollands top players composers\ + \ and bandleaders in the Jazz and improvised music scene even before he started\ + \ to use electronics and designed his own Electrumpet. He is most noted as the\ + \ bandleader of the Dutch formation Tetzepi, a 14 piece Big Band. Tetzepi exists\ + \ for 15 years and is structurally funded by Dutch government.\nNext to his activities\ + \ as a performer Hans teaches at the Utrecht school for the arts at the Music\ + \ Technology department and at the faculty Industrial Design of the Technical\ + \ University Eindhoven where he coaches projects on the design of new musical\ + \ instruments.\nIn 2008 he designed the Electrumpet, a hybrid electroacoustic\ + \ instrument that differs from similar design in that it takes the trumpet players\ + \ normal playing position and expression in account thus creating an instrument\ + \ that combines acoustic and electronic expression seamlessly. (see `the electrumpet,\ + \ additions and revisions')\n\nDiemo Schwarz is a researcher and developer at\ + \ Ircam, composer of electronic music, and musician on drums and laptop. He holds\ + \ a PhD in computer science applied to music for his research on corpus-based\ + \ concatenative musical sound synthesis.\nHis compositions and live performances,\ + \ under the name of his solo project Mean Time Between Failure, or improvising\ + \ with musicians such as Frédéric Blondy, Victoria Johnson, Pierre Alexandre Tremblay,\ + \ Etienne Brunet, Luka Juhart, George Lewis, Evan Parker, explore the possibilities\ + \ of corpus-based concatenative synthesis to re-contextualise any sound source\ + \ by rearranging sound units into a new musical framework using interactive navigation\ + \ through a sound space, controlled by gestural input devices.\nHis research work\ + \ includes improving interaction between musician and computer, and exploiting\ + \ large masses of sound for interactive real-time sound synthesis, collaborating\ + \ with composers such as Philippe Manoury, Dai Fujikura, Stefano Gervasoni, Aaron\ + \ Einbond, Sam Britton.\n\nConcert Venue and Time: Lydia Mendelssohn Theatre,\ + \ Monday May 21, 9:00pm" address: 'Ann Arbor, Michigan, U.S.A.' - author: Jonathan Golove and Magnus Martensson - bibtex: "@incollection{nime2012-music-GoloveMartensson2012,\n abstract = {Program\ - \ notes:\n\nThe most impressive uses of the theremin cello during Theremin's time\ - \ in New York are Leopold Stokowski's inclusion of one in the Philadelphia Orchestra's\ - \ low string section and Varese's composition of two solo parts in Ecuatorial.\ - \ Even more important, from my perspective, is the fact that the instrument represents\ - \ the first attempt to harness the human potential to shape and manipulate electronic\ - \ sound by means of the technical apparatus of the modern player of bowed string\ - \ instruments.\n\nRachmaninoff's Vocalise, Op. 34 no. 14, for textless high voice,\ - \ highlights the hauntingly vocal quality of the theremin cello. Vocalise is the\ - \ last of a set of 14 songs published in 1912, less than a decade before Theremin's\ - \ experiments with musical sounds began to bear fruit.\n\nBrian Wilson and the\ - \ Beach Boys, by virtue of their use of Bob Whitsell's Electro-Theremin on several\ - \ recordings, are irrevocably linked to the history of the theremin.\n\nComposer(s)\ - \ Credits:\n\nVocalise, Op.34 no. 14 - \\emph{Sergei Rachmaninoff}\nMedley (Good\ - \ Vibrations/God Only Knows) - \\emph{Brian Wilson}\n\nInstrumentalist(s) Credits:\n\ - \nJonathan Golove (Theremin cello), Magnus Martensson (piano)\n\nArtist(s) Biography:\n\ - \nJonathan Golove, Associate Professor of Music at the University at Buffalo,\ - \ has been featured as theremin cello soloist with the Asko/Schoenberg Ensemble,\ - \ London Sinfonietta, and International Contemporary Ensemble; and as cello soloist\ - \ with the Buffalo Philharmonic Orchestra, Slee Sinfonietta, and New York Virtuoso\ - \ Singers. He has recorded for the Albany, Centaur, Albuzerque, and Nine Winds\ - \ labels, and appeared at festivals including the Holland Festival, Festival d'Automne,\ - \ Lincoln Center Festival, June in Buffalo, and the Festival del Centro Hist\\\ - '{o}rico (Mexico City). Golove gave the first performance of Varese's \\emph{Ecuatorial}\ - \ using Floyd Engel's recreation of the legendary early 20th century instrument\ - \ at the University at Buffalo in 2002. He is also active as an electric cellist,\ - \ particularly in the field of creative improvised music. An accomplished composer,\ - \ his works have been performed at venues including the Kennedy Center, Venice\ - \ Biennale, Festival of Aix-en-Provence, Lincoln Center Chamber Music Society\ - \ II, and the Kitchen.\n\nMagnus Martensson is Music Director of The Scandinavian\ - \ Chamber Orchestra of New York; between 1996 and 2007 he was Visiting Assistant\ - \ Professor at SUNY Buffalo and conductor of the Slee Sinfonietta. In 1989, Martensson\ - \ made his operatic debut in Malm\\''{o}, Sweden, conducting a production of Offenbach's\ - \ Orpheus in the Underworld, and has subsequently conducted operas by Mozart,\ - \ Puccini, Golove, among others. He has conducted several world premiere recordings,\ - \ including orchestral music by Jeffrey Stadelman, Roger Reynolds, and David Felder.\n\ - In the past few seasons Martensson has guest conducted with the New York New Music\ - \ Ensemble, the Trondheim Soloists, Musica Vitae, ICE, and at the Monday Evening\ - \ Concert Series (Los Angeles), The Manhattan School of Music, and Teatro San\ - \ Martin (Buenos Aires).\n\nConcert Venue and Time: Lydia Mendelssohn Theatre,\ - \ Tuesday May 22, 7:00pm},\n address = {Ann Arbor, Michigan, U.S.A.},\n author\ - \ = {Jonathan Golove and Magnus Martensson},\n booktitle = {Music Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ day = {21-23},\n editor = {Georg Essl and Brent Gillespie and Michael Gurevich\ - \ and Sile O'Modhrain},\n month = {May},\n publisher = {Electrical Engineering\ - \ \\& Computer Science and Performing Arts Technology, University of Michigan},\n\ - \ title = {Rachmaninoff-Wilson Medley},\n year = {2012}\n}\n" + author: Hans Leeuw and Diemo Schwarz + bibtex: "@incollection{nime2012-music-LeeuwSchwarz2012,\n abstract = {Program notes:\n\ + \nTwo typical NIME related inventions meet in this performance. IRCAM based Diemo\ + \ Schwarz and HKU lecturer and Electrumpet player Hans Leeuw met at STEIM in 2010.\ + \ The extreme sound possibilities of the sensor driven Electrumpet combine wonderfully\ + \ with the corpus based techniques in CataRT. Both Diemo and Hans play their self-invented\ + \ instruments for a number of years in which they have done several iterations\ + \ / extensions and built a lot of performance experience. This experience pays\ + \ off in the expressive capabilities of both performers making this a concert\ + \ that goes far beyond an extended demonstration of new instruments. In \\emph{Violent\ + \ Dreams}, Hans's manipulated sounds are recorded in CataRT, from which Diemo\ + \ chooses specific sonic characters and evolutions via gestural controllers, that\ + \ are played back and transformed by CataRT, challenging Hans to come up with\ + \ more extreme sounds surpassing his own originals. Thus we get an interesting\ + \ and challenging improvisation battle between two players that both fully master\ + \ their instrument.\nComposer(s) Credits:\n\nInstrumentalist(s) Credits:\n\nHans\ + \ Leeuw (Electrumpet), Diemo Schwarz (CataRT, gestural controllers)\n\nArtist(s)\ + \ Biography:\n\nHans Leeuw is recognized as one of Hollands top players composers\ + \ and bandleaders in the Jazz and improvised music scene even before he started\ + \ to use electronics and designed his own Electrumpet. He is most noted as the\ + \ bandleader of the Dutch formation Tetzepi, a 14 piece Big Band. Tetzepi exists\ + \ for 15 years and is structurally funded by Dutch government.\nNext to his activities\ + \ as a performer Hans teaches at the Utrecht school for the arts at the Music\ + \ Technology department and at the faculty Industrial Design of the Technical\ + \ University Eindhoven where he coaches projects on the design of new musical\ + \ instruments.\nIn 2008 he designed the Electrumpet, a hybrid electroacoustic\ + \ instrument that differs from similar design in that it takes the trumpet players\ + \ normal playing position and expression in account thus creating an instrument\ + \ that combines acoustic and electronic expression seamlessly. (see `the electrumpet,\ + \ additions and revisions')\n\nDiemo Schwarz is a researcher and developer at\ + \ Ircam, composer of electronic music, and musician on drums and laptop. He holds\ + \ a PhD in computer science applied to music for his research on corpus-based\ + \ concatenative musical sound synthesis.\nHis compositions and live performances,\ + \ under the name of his solo project Mean Time Between Failure, or improvising\ + \ with musicians such as Fr\\'{e}d\\'{e}ric Blondy, Victoria Johnson, Pierre Alexandre\ + \ Tremblay, Etienne Brunet, Luka Juhart, George Lewis, Evan Parker, explore the\ + \ possibilities of corpus-based concatenative synthesis to re-contextualise any\ + \ sound source by rearranging sound units into a new musical framework using interactive\ + \ navigation through a sound space, controlled by gestural input devices.\nHis\ + \ research work includes improving interaction between musician and computer,\ + \ and exploiting large masses of sound for interactive real-time sound synthesis,\ + \ collaborating with composers such as Philippe Manoury, Dai Fujikura, Stefano\ + \ Gervasoni, Aaron Einbond, Sam Britton.\n\nConcert Venue and Time: Lydia Mendelssohn\ + \ Theatre, Monday May 21, 9:00pm},\n address = {Ann Arbor, Michigan, U.S.A.},\n\ + \ author = {Hans Leeuw and Diemo Schwarz},\n booktitle = {Music Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n day\ + \ = {21-23},\n editor = {Georg Essl and Brent Gillespie and Michael Gurevich and\ + \ Sile O'Modhrain},\n month = {May},\n publisher = {Electrical Engineering \\\ + & Computer Science and Performing Arts Technology, University of Michigan},\n\ + \ title = {Violent Dreams},\n year = {2012}\n}\n" + booktitle: Music Proceedings of the International Conference on New Interfaces for + Musical Expression + day: 21-23 + editor: Georg Essl and Brent Gillespie and Michael Gurevich and Sile O'Modhrain + month: May + publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, + University of Michigan' + title: Violent Dreams + year: 2012 + + +- ENTRYTYPE: incollection + ID: nime2012-music-PattonRovan2012 + abstract: "Program notes:\n\nthe ellipsis catalog features new instruments designed\ + \ by Kevin Patton and Butch Rovan. Patton's instrument, the ``Fossil'', is a wireless\ + \ sensor-based musical instrument that is played with the entire gestural range\ + \ of arm movement as well as finger pressure. Four FSRs, a momentary button, and\ + \ a two-dimensional accelerometer are used to control the parameters of a custom\ + \ software environment built in Max/MSP/Jitter. It is part of a group of four\ + \ hand-carved wood instruments called the Digital Poplar Consort.\n\nRovan's ``Banshee''\ + \ is an analog electronic musical instrument. Modeled after a wind instrument,\ + \ the design uses six finger pads to control the pitch of an array of interrelated\ + \ oscillators, and a mouth sensor that allows the performer to control volume.\ + \ The Banshee also features a tilt-sensor that allows motion to change the voicing\ + \ circuitry and resulting timbre. Battery powered, the instrument can plug into\ + \ any amplifier or mixing console, much like an electric guitar.\nComposer(s)\ + \ Credits:\n\nInstrumentalist(s) Credits:\n\nKevin Patton (Fossil), Butch Rovan\ + \ (Banshee)\n\nArtist(s) Biography:\n\nKevin Patton is a musician, scholar, and\ + \ technologist active in the fields of experimental music and multimedia theatre\ + \ whose work explores the intersection of technology and performance. The design\ + \ of new musical instruments as well as interfaces and computer systems for analysis,\ + \ improvisation, installation and projection is at the center of his practice.\ + \ His work has been recognized for his collaboration with visual artist Maria\ + \ del Carmen Montoya with the 2009 Rhizome commission for the piece, I Sky You.\ + \ Patton is an assistant professor of music and performance technologies at Oregon\ + \ State University. He holds a Ph.D. and M.A. from Brown University in electronic\ + \ music and multimedia composition. He also holds a Master of Music degree in\ + \ jazz studies and composition from the University of North Texas. He was an Invited\ + \ Researcher at the Sorbonne, University of Paris IV, for the Spring of 2009.\n\ + \nButch Rovan is a media artist and performer at Brown University, where he co-directs\ + \ MEME (Multimedia & Electronic Music Experiments @ Brown). Rovan has received\ + \ prizes from the Bourges International Electroacoustic Music Competition, the\ + \ Berlin Transmediale International Media Arts Festival, and his work has appeared\ + \ throughout Europe and the U.S. Most recently his interactive installation Let\ + \ us imagine a straight line was featured in the 14th WRO International Media\ + \ Art Biennale, Poland.\nRovan's research includes new sensor hardware design\ + \ and wireless microcontroller systems. His research into gestural control and\ + \ interactivity has been featured in IRCAM's journal Resonance, Electronic Musician,\ + \ the Computer Music Journal, the Japanese magazine SoundArts, the CDROM Trends\ + \ in Gestural Control of Music (IRCAM 2000), and in the book Mapping Landscapes\ + \ for Performance as Research: Scholarly Acts and Creative Cartographies (Palgrave\ + \ Macmillan, 2009).\n\nConcert Venue and Time: Lydia Mendelssohn Theatre, Monday\ + \ May 21, 9:00pm" + address: 'Ann Arbor, Michigan, U.S.A.' + author: Kevin Patton and Butch Rovan + bibtex: "@incollection{nime2012-music-PattonRovan2012,\n abstract = {Program notes:\n\ + \n\\emph{the ellipsis catalog} features new instruments designed by Kevin Patton\ + \ and Butch Rovan. Patton's instrument, the ``Fossil'', is a wireless sensor-based\ + \ musical instrument that is played with the entire gestural range of arm movement\ + \ as well as finger pressure. Four FSRs, a momentary button, and a two-dimensional\ + \ accelerometer are used to control the parameters of a custom software environment\ + \ built in Max/MSP/Jitter. It is part of a group of four hand-carved wood instruments\ + \ called the Digital Poplar Consort.\n\nRovan's ``Banshee'' is an analog electronic\ + \ musical instrument. Modeled after a wind instrument, the design uses six finger\ + \ pads to control the pitch of an array of interrelated oscillators, and a mouth\ + \ sensor that allows the performer to control volume. The Banshee also features\ + \ a tilt-sensor that allows motion to change the voicing circuitry and resulting\ + \ timbre. Battery powered, the instrument can plug into any amplifier or mixing\ + \ console, much like an electric guitar.\nComposer(s) Credits:\n\nInstrumentalist(s)\ + \ Credits:\n\nKevin Patton (Fossil), Butch Rovan (Banshee)\n\nArtist(s) Biography:\n\ + \nKevin Patton is a musician, scholar, and technologist active in the fields of\ + \ experimental music and multimedia theatre whose work explores the intersection\ + \ of technology and performance. The design of new musical instruments as well\ + \ as interfaces and computer systems for analysis, improvisation, installation\ + \ and projection is at the center of his practice. His work has been recognized\ + \ for his collaboration with visual artist Maria del Carmen Montoya with the 2009\ + \ Rhizome commission for the piece, \\emph{I Sky You}. Patton is an assistant\ + \ professor of music and performance technologies at Oregon State University.\ + \ He holds a Ph.D. and M.A. from Brown University in electronic music and multimedia\ + \ composition. He also holds a Master of Music degree in jazz studies and composition\ + \ from the University of North Texas. He was an Invited Researcher at the Sorbonne,\ + \ University of Paris IV, for the Spring of 2009.\n\nButch Rovan is a media artist\ + \ and performer at Brown University, where he co-directs MEME (Multimedia \\&\ + \ Electronic Music Experiments @ Brown). Rovan has received prizes from the Bourges\ + \ International Electroacoustic Music Competition, the Berlin Transmediale International\ + \ Media Arts Festival, and his work has appeared throughout Europe and the U.S.\ + \ Most recently his interactive installation Let us imagine a straight line was\ + \ featured in the 14th WRO International Media Art Biennale, Poland.\nRovan's\ + \ research includes new sensor hardware design and wireless microcontroller systems.\ + \ His research into gestural control and interactivity has been featured in IRCAM's\ + \ journal Resonance, Electronic Musician, the \\emph{Computer Music Journal},\ + \ the Japanese magazine \\emph{SoundArts}, the CDROM \\emph{Trends in Gestural\ + \ Control of Music} (IRCAM 2000), and in the book \\emph{Mapping Landscapes for\ + \ Performance as Research: Scholarly Acts and Creative Cartographies} (Palgrave\ + \ Macmillan, 2009).\n\nConcert Venue and Time: Lydia Mendelssohn Theatre, Monday\ + \ May 21, 9:00pm},\n address = {Ann Arbor, Michigan, U.S.A.},\n author = {Kevin\ + \ Patton and Butch Rovan},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n day = {21-23},\n editor\ + \ = {Georg Essl and Brent Gillespie and Michael Gurevich and Sile O'Modhrain},\n\ + \ month = {May},\n publisher = {Electrical Engineering \\& Computer Science and\ + \ Performing Arts Technology, University of Michigan},\n title = {the ellipsis\ + \ catalog},\n year = {2012}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression day: 21-23 @@ -9382,142 +9271,78 @@ month: May publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, University of Michigan' - title: Rachmaninoff-Wilson Medley + title: the ellipsis catalog year: 2012 - ENTRYTYPE: incollection - ID: nime2012-music-Alexander2012 - abstract: "Program notes:\n\nThe MiND Ensemble (Music in Neural Dimensions) is a\ - \ new-media performance group that utilizes custom interfaces to explore the mind-machine-music\ - \ connection. The traditional realization of the creative process ahs been as\ - \ follows: there is an artist, a thought process, and a fixed medium which actualizes\ - \ those thoughts. Neurofeedback radically shifts this paradigm. Now there is an\ - \ artist and a dynamic medium that actively interfaces with the thought processes\ - \ of the artist himself, drastically reshaping the way we understand the creative\ - \ process. The MiND Ensemble promotes a rich awareness in which the mind is the\ - \ creative medium. All projection and audio processing in this piece are driven\ - \ in real time, with data gathered from the Emotiv EPOC headset.\n\nComposer(s)\ - \ Credits:\n\nRobert Alexander, David Biedenbender, Anton Pugh, Suby Raman, Amanda\ - \ Sari Perez, Sam L. Richards\n\nInstrumentalist(s) Credits:\n\nJeremy Crosmer\ - \ (violoncello), Robert Alexander (MiND Synth / Emotiv), Anton Pugh (MiND Synth\ - \ / Emotiv)\n\nArtist(s) Biography:\n\nRobert Alexander is a Sonification Specialist\ - \ with the Solar Heliospheric Research group at the University of Michigan, where\ - \ he is pursuing a PhD in Design Science. He was awarded a JPFP Fellowship from\ - \ NASA, an Outstanding Achievement award from ICAD, and is an Artist in Residence\ - \ with the Imagine Science Film Festival. He has collaborated with artists such\ - \ as DJ Spooky, and performed on several international stages. He founded the\ - \ MiND Ensemble in 2010.\n\nDavid Biedenbender is currently a doctoral student\ - \ in music composition at the University of Michigan. His first musical collaborations\ - \ were in rock and jazz bands as an electric bassist and in jazz and wind bands\ - \ as a bass trombonist and euphonium player. His present interests include working\ - \ with everyone from classically trained musicians to improvisers, fixed electronics\ - \ to brain data.\n\nAnton Pugh is a Masters student in Electrical Engineering:\ - \ Systems (Signal Processing concentration) at the University of Michigan. Presently\ - \ he is working on expanding his knowledge of the Processing and iOS platforms,\ - \ especially as they apply to the MiND Ensemble. His primary hobby is designing\ - \ and building custom electronic instruments and new musical interfaces. He is\ - \ also an active musician and plays viola in the Campus Symphony Orchestra.\n\n\ - Suby Raman is a composer, conductor, polyglot and linguist. His major artistic\ - \ passion is drawn from language itself: the basic aural and mental components\ - \ of language, how it determines, separates and unites cultures, and its influence\ - \ (or lack thereof) on our perception and expression of reality. He has conducted\ - \ research in brain-computer interface technology, which assist people afflicted\ - \ by ALS and spinal cord injuries.\n\nAmanda Sari Perez is a researcher with the\ - \ Neural Engineering lab at the University of Michigan. She is currently working\ - \ with microelectrode arrays to record brain activity from implanted sites. In\ - \ 2009 she co- founded the Ann Arbor HackerSpace, a DIY community engaged in hands-on\ - \ learning. For the past 3 years she has created artistic installations for the\ - \ Burning Man festival, including a performance that deconstructs participants'\ - \ notions of the self. Amanda is with the MiND Ensemble to work toward lowering\ - \ the barrier for creative expression.\n\nSam L. Richards is a composer, artist,\ - \ and researcher with a penchant for interdisciplinary collaboration and an appetite\ - \ for creative engagement of unwieldy conceptual problems. As a composer he has\ - \ worked with media artists, filmmakers, animators, and choreographers, as well\ - \ as making music for the concert hall. Although formally trained as a musician,\ - \ he also produces video installations, visual and aural media, creative writing,\ - \ and regularly steps off the beaten path in order to engage new things in new\ - \ ways.\n\nJeremy Crosmer is a gifted young professional cellist and composer.\ - \ After achieving a double-major in music and mathematics from Hendrix College,\ - \ he went on to receive multiple graduate degrees from the University of Michigan\ - \ by the age of 23. As a cellist, Crosmer has performed across the country, soloing\ - \ with orchestras in Arkansas and attending music festivals from Music Academy\ - \ of the West to Tanglewood Music Center. An avid promoted of new music, Crosmer\ - \ has both commissioned and premiered dozens of works by composers at Michigan\ - \ and elsewhere. His performance dissertation at the University of Michigan is\ - \ a study of the music of Paul Hindemith and cello sonatas by French composers\ - \ during World War I.\n\nConcert Venue and Time: Lydia Mendelssohn Theatre, Tuesday\ - \ May 22, 7:00pm" - address: 'Ann Arbor, Michigan, U.S.A.' - author: Robert Alexander and David Biedenbender and Anton Pugh and Suby Raman and - Amanda~Sari Perez and Sam~L. Richards - bibtex: "@incollection{nime2012-music-Alexander2012,\n abstract = {Program notes:\n\ - \nThe MiND Ensemble (Music in Neural Dimensions) is a new-media performance group\ - \ that utilizes custom interfaces to explore the mind-machine-music connection.\ - \ The traditional realization of the creative process ahs been as follows: there\ - \ is an artist, a thought process, and a fixed medium which actualizes those thoughts.\ - \ Neurofeedback radically shifts this paradigm. Now there is an artist and a dynamic\ - \ medium that actively interfaces with the thought processes of the artist himself,\ - \ drastically reshaping the way we understand the creative process. The MiND Ensemble\ - \ promotes a rich awareness in which the mind is the creative medium. All projection\ - \ and audio processing in this piece are driven in real time, with data gathered\ - \ from the Emotiv EPOC headset.\n\nComposer(s) Credits:\n\nRobert Alexander, David\ - \ Biedenbender, Anton Pugh, Suby Raman, Amanda Sari Perez, Sam L. Richards\n\ - \nInstrumentalist(s) Credits:\n\nJeremy Crosmer (violoncello), Robert Alexander\ - \ (MiND Synth / Emotiv), Anton Pugh (MiND Synth / Emotiv)\n\nArtist(s) Biography:\n\ - \nRobert Alexander is a Sonification Specialist with the Solar Heliospheric Research\ - \ group at the University of Michigan, where he is pursuing a PhD in Design Science.\ - \ He was awarded a JPFP Fellowship from NASA, an Outstanding Achievement award\ - \ from ICAD, and is an Artist in Residence with the Imagine Science Film Festival.\ - \ He has collaborated with artists such as DJ Spooky, and performed on several\ - \ international stages. He founded the MiND Ensemble in 2010.\n\nDavid Biedenbender\ - \ is currently a doctoral student in music composition at the University of Michigan.\ - \ His first musical collaborations were in rock and jazz bands as an electric\ - \ bassist and in jazz and wind bands as a bass trombonist and euphonium player.\ - \ His present interests include working with everyone from classically trained\ - \ musicians to improvisers, fixed electronics to brain data.\n\nAnton Pugh is\ - \ a Masters student in Electrical Engineering: Systems (Signal Processing concentration)\ - \ at the University of Michigan. Presently he is working on expanding his knowledge\ - \ of the Processing and iOS platforms, especially as they apply to the MiND Ensemble.\ - \ His primary hobby is designing and building custom electronic instruments and\ - \ new musical interfaces. He is also an active musician and plays viola in the\ - \ Campus Symphony Orchestra.\n\nSuby Raman is a composer, conductor, polyglot\ - \ and linguist. His major artistic passion is drawn from language itself: the\ - \ basic aural and mental components of language, how it determines, separates\ - \ and unites cultures, and its influence (or lack thereof) on our perception and\ - \ expression of reality. He has conducted research in brain-computer interface\ - \ technology, which assist people afflicted by ALS and spinal cord injuries.\n\ - \nAmanda Sari Perez is a researcher with the Neural Engineering lab at the University\ - \ of Michigan. She is currently working with microelectrode arrays to record brain\ - \ activity from implanted sites. In 2009 she co- founded the Ann Arbor HackerSpace,\ - \ a DIY community engaged in hands-on learning. For the past 3 years she has created\ - \ artistic installations for the Burning Man festival, including a performance\ - \ that deconstructs participants' notions of the self. Amanda is with the MiND\ - \ Ensemble to work toward lowering the barrier for creative expression.\n\nSam\ - \ L. Richards is a composer, artist, and researcher with a penchant for interdisciplinary\ - \ collaboration and an appetite for creative engagement of unwieldy conceptual\ - \ problems. As a composer he has worked with media artists, filmmakers, animators,\ - \ and choreographers, as well as making music for the concert hall. Although formally\ - \ trained as a musician, he also produces video installations, visual and aural\ - \ media, creative writing, and regularly steps off the beaten path in order to\ - \ engage new things in new ways.\n\nJeremy Crosmer is a gifted young professional\ - \ cellist and composer. After achieving a double-major in music and mathematics\ - \ from Hendrix College, he went on to receive multiple graduate degrees from the\ - \ University of Michigan by the age of 23. As a cellist, Crosmer has performed\ - \ across the country, soloing with orchestras in Arkansas and attending music\ - \ festivals from Music Academy of the West to Tanglewood Music Center. An avid\ - \ promoted of new music, Crosmer has both commissioned and premiered dozens of\ - \ works by composers at Michigan and elsewhere. His performance dissertation at\ - \ the University of Michigan is a study of the music of Paul Hindemith and cello\ - \ sonatas by French composers during World War I.\n\nConcert Venue and Time: Lydia\ - \ Mendelssohn Theatre, Tuesday May 22, 7:00pm},\n address = {Ann Arbor, Michigan,\ - \ U.S.A.},\n author = {Robert Alexander and David Biedenbender and Anton Pugh\ - \ and Suby Raman and Amanda~Sari Perez and Sam~L. Richards},\n booktitle = {Music\ - \ Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ day = {21-23},\n editor = {Georg Essl and Brent Gillespie and Michael Gurevich\ - \ and Sile O'Modhrain},\n month = {May},\n publisher = {Electrical Engineering\ - \ \\& Computer Science and Performing Arts Technology, University of Michigan},\n\ - \ title = {Thought.Projection},\n year = {2012}\n}\n" + ID: nime2012-music-Marier2012 + abstract: "Program notes:\n\nClarinet is the third piece in a series of monotimbral\ + \ works. Like its siblings Piano and Cymbal, it was inspired by the sound qualities\ + \ of an acoustic instrument. This minimalist and meditative piece is a structured\ + \ improvisation performed on the sponge, a musical interface designed by the composer.\ + \ The sponge is basically a cushion equipped with sensors (accelerometers, buttons\ + \ and force sensing resistors) which detect when it is squeezed, twisted or shaken.\ + \ Because the sponge evolves continuously, the piece exists in many versions.\ + \ Each new version drifts further away from the original compositional intentions\ + \ and the piece is slowly becoming less meditative. The latest version is subtitled\ + \ Albino Butterfly.\n\nComposer(s) Credits:\n\nMartin Marier\n\nInstrumentalist(s)\ + \ Credits:\n\nMartin Marier (Sponge)\n\nArtist(s) Biography:\n\nMartin Marier\ + \ is a composer and a performer who is mainly interested in live electronic music\ + \ using new interfaces. He is the inventor of the sponge, a cushion like musical\ + \ interface that he uses to perform his pieces. The main goal of this approach\ + \ is to establish a natural link between gesture and sound in electronic music.\ + \ He aims at improving the interaction with the audience and at making the process\ + \ of composing more playful. His research on the sponge is the topic of the doctorate\ + \ he is pursuing at the Universit de Montral under the supervision of Prof. Jean\ + \ Piché. He was also supervised by Dr. Garth Paine during an exchange at the\ + \ University of Western Sydney (Australia) in 2011.\nMartin has also composed\ + \ music for theatre, collaborating mostly with the Thé\\^atre I.N.K. company for\ + \ whom he wrote the music of three plays: \"L'effet du temps sur Matévina\" (2012),\ + \ \"Roche, papier... couteau\" (2007), \"La cadette\" (2006). He sometimes writes\ + \ music for films and collaborates with the film composer Benoit Charest. He is\ + \ one of the founders of Point d'écoute (PDE), a collective whose purpose is to\ + \ promote electroacoustic music. Along with his four colleagues of PDE, he produced\ + \ concerts in Montreal, New York and Sydney.\n\nConcert Venue and Time: Lydia\ + \ Mendelssohn Theatre, Monday May 21, 9:00pm" + address: 'Ann Arbor, Michigan, U.S.A.' + author: Martin Marier + bibtex: "@incollection{nime2012-music-Marier2012,\n abstract = {Program notes:\n\ + \n\\emph{Clarinet} is the third piece in a series of monotimbral works. Like\ + \ its siblings \\emph{Piano} and \\emph{Cymbal}, it was inspired by the sound\ + \ qualities of an acoustic instrument. This minimalist and meditative piece is\ + \ a structured improvisation performed on the sponge, a musical interface designed\ + \ by the composer. The sponge is basically a cushion equipped with sensors (accelerometers,\ + \ buttons and force sensing resistors) which detect when it is squeezed, twisted\ + \ or shaken. Because the sponge evolves continuously, the piece exists in many\ + \ versions. Each new version drifts further away from the original compositional\ + \ intentions and the piece is slowly becoming less meditative. The latest version\ + \ is subtitled Albino Butterfly.\n\nComposer(s) Credits:\n\nMartin Marier\n\n\ + Instrumentalist(s) Credits:\n\nMartin Marier (Sponge)\n\nArtist(s) Biography:\n\ + \nMartin Marier is a composer and a performer who is mainly interested in live\ + \ electronic music using new interfaces. He is the inventor of the sponge, a\ + \ cushion like musical interface that he uses to perform his pieces. The main\ + \ goal of this approach is to establish a natural link between gesture and sound\ + \ in electronic music. He aims at improving the interaction with the audience\ + \ and at making the process of composing more playful. His research on the sponge\ + \ is the topic of the doctorate he is pursuing at the Universit de Montral under\ + \ the supervision of Prof. Jean Pich\\'{e}. He was also supervised by Dr. Garth\ + \ Paine during an exchange at the University of Western Sydney (Australia) in\ + \ 2011.\nMartin has also composed music for theatre, collaborating mostly with\ + \ the Th\\'{e}\\^{a}tre I.N.K. company for whom he wrote the music of three plays:\ + \ \"L'effet du temps sur Mat\\'{e}vina\" (2012), \"Roche, papier... couteau\"\ + \ (2007), \"La cadette\" (2006). He sometimes writes music for films and collaborates\ + \ with the film composer Benoit Charest. He is one of the founders of Point d'\\\ + '{e}coute (PDE), a collective whose purpose is to promote electroacoustic music.\ + \ Along with his four colleagues of PDE, he produced concerts in Montreal, New\ + \ York and Sydney.\n\nConcert Venue and Time: Lydia Mendelssohn Theatre, Monday\ + \ May 21, 9:00pm},\n address = {Ann Arbor, Michigan, U.S.A.},\n author = {Martin\ + \ Marier},\n booktitle = {Music Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n day = {21-23},\n editor = {Georg Essl\ + \ and Brent Gillespie and Michael Gurevich and Sile O'Modhrain},\n month = {May},\n\ + \ publisher = {Electrical Engineering \\& Computer Science and Performing Arts\ + \ Technology, University of Michigan},\n title = {Clarinet (Albino Butterfly)},\n\ + \ year = {2012}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression day: 21-23 @@ -9525,111 +9350,160 @@ month: May publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, University of Michigan' - title: Thought.Projection + title: Clarinet (Albino Butterfly) year: 2012 - ENTRYTYPE: incollection - ID: nime2012-music-KimuraKato2012 - abstract: "Program notes:\n\nEigenspace (2011) is a collaborative project with Japan's\ - \ leading visual artist in new media, Tomoyuki Kato (Movie Director), with Yoshito\ - \ Onishi (Image Programing), and Chisako Hasegawa (Producer). As Japanese, we\ - \ were deeply touched by the Fukushima nuclear meltdown, the worst manmade catastrophe\ - \ in the history of the human kind, which is not contained today contaminating\ - \ the globe. Eigenspace is about our love and prayer for the humankind and our\ - \ planet, and for the next generation. The name is also taken from ``eigenvalue,''\ - \ a mathematical function used in analyzing the bowing movement, which interacts\ - \ in real time with Mr. Kato's software. The musical expression is extracted\ - \ by IRCAM's ``Augmented Violin'' and their newest motion sensor ``mini-MO'',\ - \ custom-fit into a glove designed by Mark Salinas. Special thanks to the Real\ - \ Time Musical Interactive Team at IRCAM. Eigenspace was commissioned by Harvestworks,\ - \ and premiered at Roulette in Brooklyn, on October 9th, 2011.\n\nComposer(s)\ - \ Credits:\n\nTomoyuki Kato (Movie Director), with Yoshito Onishi (Image Programing),\ - \ and Chisako Hasegawa (Producer)\n\nInstrumentalist(s) Credits:\n\nMari Kimura\ - \ (violin), Tomoyuki Kato (Interactive graphics)\n\nArtist(s) Biography:\n\nMari\ - \ Kimura: Violinist/composer Mari Kimura is widely admired as the inventor of\ - \ ``Subharmonics'' and her works for interactive computer music. As a composer,\ - \ Mari's commissions include the International Computer Music Association, Harvestworks,\ - \ Music from Japan, and received grants including NYFA, Arts International, Meet\ - \ The Composer, Japan Foundation, Argosy Foundation, and NYSCA. In 2010 Mari won\ - \ the Guggenheim Fellowship in Composition, and invited as Composer-in-Residence\ - \ at IRCAM in Paris. In October 2011, the Cassatt String Quartet premiered Mari's\ - \ ``I-Quadrifoglo'', her string quartet with interactive computer at the Symphony\ - \ Space in NYC, commissioned through Fromm Commission Award. Feature articles\ - \ in the past year include: the New York Times (May 13th, written by Matthew Gurewitsch),\ - \ and Scientific American (May 31st, written by Larry Greenemeier). Mari's CD,\ - \ The World Below G and Beyond, features her Subharmonics works and interactive\ - \ computer music. Mari teaches a course in Interactive Computer Performance at\ - \ Juilliard. http://www.marikimura.com\n\n\nTomoyuki Kato is a renowned Japanese\ - \ visual artist/movie director who works in a wide range of high-tech projects\ - \ including advertisements, commercials, museums exhibitions and theme-parks.\ - \ Kato's work is known for the superb quality, high impact, originality and new\ - \ technical methods. Recently, Kato has been active in creating corporate future\ - \ vision, such as ``concept car,'' incorporating live action, computer graphics\ - \ and animation on project bases; his recent exhibition includes 2010 Shanghai\ - \ Expo. His highly acclaimed ``Grand Odyssey,'' created for 2005 Aichi Expo's\ - \ Toshiba/Mitsui pavilion, is now displayed at Nagasaki's Huistenbosch theme-park.\ - \ In 2010, Kato created ``Better Life from Japan,'' an exhibit for Otsuka Pharmaceutical\ - \ company at Shanghai Expo, using a 360-degree display. Kato has received and\ - \ nominated for numerous awards at international and national festivals, including\ - \ Japan Ministry of Culture Media Arts Festival, Los Angels International Short\ - \ Film Festival, Montreal International Film Festival and London International\ - \ Advertising Festival.\n\nConcert Venue and Time: Lydia Mendelssohn Theatre,\ - \ Tuesday May 22, 7:00pm" + ID: nime2012-music-OuzounianKnappLyonDuBois2012 + abstract: "Program notes:\n\nMusic for Sleeping & Waking Minds (2011-2012) is an\ + \ overnight event in which four performers fall asleep while wearing custom designed\ + \ EEG sensors, which monitor their brainwave activity over the course of one night.\ + \ The data gathered from the EEG sensors is applied in real time to different\ + \ audio and image signal processing functions, resulting in a continuously evolving\ + \ multi-channel sound environment and visual projection. This material serves\ + \ as an audiovisual description of the individual and collective neurophysiological\ + \ state of the ensemble, with sounds and images evolving according to changes\ + \ in brainwave activity. Audiences, who are invited to bring anything that they\ + \ need to ensure comfortable sleep, can experience the work in different states\ + \ of attention: while alert and sleeping, resting and awakening.\n\nGascia Ouzounian\ + \ (composition & production), R. Benjamin Knapp (physiological interface & interaction\ + \ design), Eric Lyon (audio interface & interaction design), R. Luke DuBois (visual\ + \ interface & interaction design)Composer(s) Credits:\n\nGascia Ouzounian (composition\ + \ & production), R. Benjamin Knapp (physiological interface & interaction design),\ + \ Eric Lyon (audio interface & interaction design), R. Luke DuBois (visual interface\ + \ & interaction design)\n\nInstrumentalist(s) Credits:\n\nArtist(s) Biography:\n\ + \nGascia Ouzounian is a violinist, musicologist, and composer. She has performed\ + \ with such varied ensembles as Yo-Yo Ma and the Silk Road Ensemble at Carnegie\ + \ Hall, Bang On A Can All-Stars at the Mass MOCA, Sinfonia Toronto at the Toronto\ + \ Centre for the Arts, and the Theatre of Eternal Music Strings Ensemble at the\ + \ Dream House. Gascia's recent projects include two compositions that are intended\ + \ for overnight listening: EDEN EDEN EDEN with filmmaker Chloe Griffin, and Music\ + \ for Sleeping & Waking Minds with R. Benjamin Knapp, Eric Lyon and R. Luke DuBois.\ + \ In the latter, an ensemble of sleeping performers generates an audiovisual environment\ + \ through their neurophysiological activity over the course of one night. Gascia\ + \ teaches at Queen's University Belfast, where she leads the performance programme\ + \ in the School of Creative Arts. Her writings on experimental music and sound\ + \ art appear in numerous academic journals and the book Paul DeMarinis: Buried\ + \ in Noise.\n\nR. Benjamin Knapp is the founding director of the Institute for\ + \ Creativity, Arts, and Technology at Virginia Tech, where he is Professor of\ + \ Computer Science. Ben has led the Music, Sensors and Emotion (MuSE) group, whose\ + \ research focuses on the understanding and measurement of the physical gestures\ + \ and emotional states of musical performers and their audience. For over 20 years,\ + \ Ben has been researching and developing user-interfaces and software that enable\ + \ composers and performers to augment the physical control of a musical instrument\ + \ with more direct neural interaction. From the invention of the Biomuse with\ + \ Hugh Lusted in 1987 to the introduction of the concept of an Integral Music\ + \ Controller (a generic class of controllers that use the direct measurement of\ + \ motion and emotion to augment traditional methods of musical instrument control)\ + \ in 2005, Ben has focused on creating a user-aware interface based on the acquisition\ + \ and real-time analysis of biometric signals.\n\nEric Lyon is a composer and\ + \ computer music researcher. During the 1980s and 1990s, his fixed media computer\ + \ music focused on spectral and algorithmic processing of audio, with a tendency\ + \ toward extreme modifications of samples, variously sourced. From the early 1990s,\ + \ Lyon became involved with live computer music, performing solo, and in the Japanese\ + \ band Psychedelic Bumpo, with the Kyma system. Later in the 1990s, he gravitated\ + \ toward software-based live processing, starting to develop Max/MSP externals\ + \ in 1999. This work resulted in his LyonPotpourri collection of Max/MSP externals,\ + \ and the FFTease spectral package, developed in collaboration with Christopher\ + \ Penrose. In recent years, Lyon has focused on computer chamber music, which\ + \ integrates live, iterative DSP strategies into the creation of traditionally\ + \ notated instrumental scores. Other interests include spatial orchestration,\ + \ and articulated noise composition. Lyon teaches computer music in the School\ + \ of Music and Sonic Art at Queen's University Belfast.\n\nR. Luke DuBois is a\ + \ composer, artist, and performer who explores the temporal, verbal, and visual\ + \ structures of cultural and personal ephemera. He has collaborated on interactive\ + \ performance, installation, and music production work with many artists and organizations\ + \ including Toni Dove, Matthew Ritchie, Todd Reynolds, Jamie Jewett, Bora Yoon,\ + \ Michael Joaquin Grey, Elliott Sharp, Michael Gordon, Maya Lin, Bang on a Can,\ + \ Engine27, Harvestworks, and LEMUR, and was the director of the Princeton Laptop\ + \ Orchestra for its 2007 season. Stemming from his investigations of ``time-lapse\ + \ phonography,'' his recent work is a sonic and encyclopedic relative to time-lapse\ + \ photography. Just as a long camera exposure fuses motion into a single image,\ + \ his work reveals the average sonority, visual language, and vocabulary in music,\ + \ film, text, or cultural information. He teaches at the Brooklyn Experimental\ + \ Media Center at the Polytechnic Institute of NYU, and is on the Board of Directors\ + \ of Issue Project Room.\n\nConcert Venue and Time: North Quad Space 2435, Monday\ + \ May 21, 11:00pm" address: 'Ann Arbor, Michigan, U.S.A.' - author: Mari Kimura and Tomoyuki Kato - bibtex: "@incollection{nime2012-music-KimuraKato2012,\n abstract = {Program notes:\n\ - \n\\emph{Eigenspace} (2011) is a collaborative project with Japan's leading visual\ - \ artist in new media, Tomoyuki Kato (Movie Director), with Yoshito Onishi (Image\ - \ Programing), and Chisako Hasegawa (Producer). As Japanese, we were deeply touched\ - \ by the Fukushima nuclear meltdown, the worst manmade catastrophe in the history\ - \ of the human kind, which is not contained today contaminating the globe. Eigenspace\ - \ is about our love and prayer for the humankind and our planet, and for the next\ - \ generation. The name is also taken from ``eigenvalue,'' a mathematical function\ - \ used in analyzing the bowing movement, which interacts in real time with Mr.\ - \ Kato's software. The musical expression is extracted by IRCAM's ``Augmented\ - \ Violin'' and their newest motion sensor ``mini-MO'', custom-fit into a glove\ - \ designed by Mark Salinas. Special thanks to the Real Time Musical Interactive\ - \ Team at IRCAM. Eigenspace was commissioned by Harvestworks, and premiered at\ - \ Roulette in Brooklyn, on October 9th, 2011.\n\nComposer(s) Credits:\n\nTomoyuki\ - \ Kato (Movie Director), with Yoshito Onishi (Image Programing), and Chisako Hasegawa\ - \ (Producer)\n\nInstrumentalist(s) Credits:\n\nMari Kimura (violin), Tomoyuki\ - \ Kato (Interactive graphics)\n\nArtist(s) Biography:\n\nMari Kimura: Violinist/composer\ - \ \\textbf{Mari Kimura} is widely admired as the inventor of ``Subharmonics''\ - \ and her works for interactive computer music. As a composer, Mari's commissions\ - \ include the International Computer Music Association, Harvestworks, Music from\ - \ Japan, and received grants including NYFA, Arts International, Meet The Composer,\ - \ Japan Foundation, Argosy Foundation, and NYSCA. In 2010 Mari won the Guggenheim\ - \ Fellowship in Composition, and invited as Composer-in-Residence at IRCAM in\ - \ Paris. In October 2011, the Cassatt String Quartet premiered Mari's \\emph{``I-Quadrifoglo''},\ - \ her string quartet with interactive computer at the Symphony Space in NYC, commissioned\ - \ through Fromm Commission Award. Feature articles in the past year include:\ - \ the New York Times (May 13th, written by Matthew Gurewitsch), and Scientific\ - \ American (May 31st, written by Larry Greenemeier). Mari's CD, \\emph{The World\ - \ Below G and Beyond}, features her Subharmonics works and interactive computer\ - \ music. Mari teaches a course in Interactive Computer Performance at Juilliard.\ - \ http://www.marikimura.com\n\n\nTomoyuki Kato is a renowned Japanese visual\ - \ artist/movie director who works in a wide range of high-tech projects including\ - \ advertisements, commercials, museums exhibitions and theme-parks. Kato's work\ - \ is known for the superb quality, high impact, originality and new technical\ - \ methods. Recently, Kato has been active in creating corporate future vision,\ - \ such as ``concept car,'' incorporating live action, computer graphics and animation\ - \ on project bases; his recent exhibition includes 2010 Shanghai Expo. His highly\ - \ acclaimed ``Grand Odyssey,'' created for 2005 Aichi Expo's Toshiba/Mitsui pavilion,\ - \ is now displayed at Nagasaki's Huistenbosch theme-park. In 2010, Kato created\ - \ ``Better Life from Japan,'' an exhibit for Otsuka Pharmaceutical company at\ - \ Shanghai Expo, using a 360-degree display. Kato has received and nominated\ - \ for numerous awards at international and national festivals, including Japan\ - \ Ministry of Culture Media Arts Festival, Los Angels International Short Film\ - \ Festival, Montreal International Film Festival and London International Advertising\ - \ Festival.\n\nConcert Venue and Time: Lydia Mendelssohn Theatre, Tuesday May\ - \ 22, 7:00pm},\n address = {Ann Arbor, Michigan, U.S.A.},\n author = {Mari Kimura\ - \ and Tomoyuki Kato},\n booktitle = {Music Proceedings of the International Conference\ + author: Gascia Ouzounian and R.~Benjamin Knapp and Eric Lyon and R.~Luke DuBois + bibtex: "@incollection{nime2012-music-OuzounianKnappLyonDuBois2012,\n abstract =\ + \ {Program notes:\n\n\\emph{Music for Sleeping \\& Waking Minds} (2011-2012) is\ + \ an overnight event in which four performers fall asleep while wearing custom\ + \ designed EEG sensors, which monitor their brainwave activity over the course\ + \ of one night. The data gathered from the EEG sensors is applied in real time\ + \ to different audio and image signal processing functions, resulting in a continuously\ + \ evolving multi-channel sound environment and visual projection. This material\ + \ serves as an audiovisual description of the individual and collective neurophysiological\ + \ state of the ensemble, with sounds and images evolving according to changes\ + \ in brainwave activity. Audiences, who are invited to bring anything that they\ + \ need to ensure comfortable sleep, can experience the work in different states\ + \ of attention: while alert and sleeping, resting and awakening.\n\nGascia Ouzounian\ + \ (composition \\& production), R. Benjamin Knapp (physiological interface \\\ + & interaction design), Eric Lyon (audio interface \\& interaction design), R.\ + \ Luke DuBois (visual interface \\& interaction design)Composer(s) Credits:\n\n\ + Gascia Ouzounian (composition \\& production), R. Benjamin Knapp (physiological\ + \ interface \\& interaction design), Eric Lyon (audio interface \\& interaction\ + \ design), R. Luke DuBois (visual interface \\& interaction design)\n\nInstrumentalist(s)\ + \ Credits:\n\nArtist(s) Biography:\n\nGascia Ouzounian is a violinist, musicologist,\ + \ and composer. She has performed with such varied ensembles as Yo-Yo Ma and the\ + \ Silk Road Ensemble at Carnegie Hall, Bang On A Can All-Stars at the Mass MOCA,\ + \ Sinfonia Toronto at the Toronto Centre for the Arts, and the Theatre of Eternal\ + \ Music Strings Ensemble at the Dream House. Gascia's recent projects include\ + \ two compositions that are intended for overnight listening: EDEN EDEN EDEN with\ + \ filmmaker Chloe Griffin, and \\emph{Music for Sleeping \\& Waking Minds} with\ + \ R. Benjamin Knapp, Eric Lyon and R. Luke DuBois. In the latter, an ensemble\ + \ of sleeping performers generates an audiovisual environment through their neurophysiological\ + \ activity over the course of one night. Gascia teaches at Queen's University\ + \ Belfast, where she leads the performance programme in the School of Creative\ + \ Arts. Her writings on experimental music and sound art appear in numerous academic\ + \ journals and the book \\emph{Paul DeMarinis: Buried in Noise.}\n\nR. Benjamin\ + \ Knapp is the founding director of the Institute for Creativity, Arts, and Technology\ + \ at Virginia Tech, where he is Professor of Computer Science. Ben has led the\ + \ Music, Sensors and Emotion (MuSE) group, whose research focuses on the understanding\ + \ and measurement of the physical gestures and emotional states of musical performers\ + \ and their audience. For over 20 years, Ben has been researching and developing\ + \ user-interfaces and software that enable composers and performers to augment\ + \ the physical control of a musical instrument with more direct neural interaction.\ + \ From the invention of the Biomuse with Hugh Lusted in 1987 to the introduction\ + \ of the concept of an Integral Music Controller (a generic class of controllers\ + \ that use the direct measurement of motion and emotion to augment traditional\ + \ methods of musical instrument control) in 2005, Ben has focused on creating\ + \ a user-aware interface based on the acquisition and real-time analysis of biometric\ + \ signals.\n\nEric Lyon is a composer and computer music researcher. During the\ + \ 1980s and 1990s, his fixed media computer music focused on spectral and algorithmic\ + \ processing of audio, with a tendency toward extreme modifications of samples,\ + \ variously sourced. From the early 1990s, Lyon became involved with live computer\ + \ music, performing solo, and in the Japanese band Psychedelic Bumpo, with the\ + \ Kyma system. Later in the 1990s, he gravitated toward software-based live processing,\ + \ starting to develop Max/MSP externals in 1999. This work resulted in his LyonPotpourri\ + \ collection of Max/MSP externals, and the FFTease spectral package, developed\ + \ in collaboration with Christopher Penrose. In recent years, Lyon has focused\ + \ on computer chamber music, which integrates live, iterative DSP strategies into\ + \ the creation of traditionally notated instrumental scores. Other interests include\ + \ spatial orchestration, and articulated noise composition. Lyon teaches computer\ + \ music in the School of Music and Sonic Art at Queen's University Belfast.\n\n\ + R. Luke DuBois is a composer, artist, and performer who explores the temporal,\ + \ verbal, and visual structures of cultural and personal ephemera. He has collaborated\ + \ on interactive performance, installation, and music production work with many\ + \ artists and organizations including Toni Dove, Matthew Ritchie, Todd Reynolds,\ + \ Jamie Jewett, Bora Yoon, Michael Joaquin Grey, Elliott Sharp, Michael Gordon,\ + \ Maya Lin, Bang on a Can, Engine27, Harvestworks, and LEMUR, and was the director\ + \ of the Princeton Laptop Orchestra for its 2007 season. Stemming from his investigations\ + \ of ``time-lapse phonography,'' his recent work is a sonic and encyclopedic relative\ + \ to time-lapse photography. Just as a long camera exposure fuses motion into\ + \ a single image, his work reveals the average sonority, visual language, and\ + \ vocabulary in music, film, text, or cultural information. He teaches at the\ + \ Brooklyn Experimental Media Center at the Polytechnic Institute of NYU, and\ + \ is on the Board of Directors of Issue Project Room.\n\nConcert Venue and Time:\ + \ North Quad Space 2435, Monday May 21, 11:00pm},\n address = {Ann Arbor, Michigan,\ + \ U.S.A.},\n author = {Gascia Ouzounian and R.~Benjamin Knapp and Eric Lyon and\ + \ R.~Luke DuBois},\n booktitle = {Music Proceedings of the International Conference\ \ on New Interfaces for Musical Expression},\n day = {21-23},\n editor = {Georg\ \ Essl and Brent Gillespie and Michael Gurevich and Sile O'Modhrain},\n month\ \ = {May},\n publisher = {Electrical Engineering \\& Computer Science and Performing\ - \ Arts Technology, University of Michigan},\n title = {Eigenspace},\n year = {2012}\n\ - }\n" + \ Arts Technology, University of Michigan},\n title = {Music for Sleeping \\&\ + \ Waking Minds},\n year = {2012}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression day: 21-23 @@ -9637,238 +9511,129 @@ month: May publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, University of Michigan' - title: Eigenspace + title: Music for Sleeping & Waking Minds year: 2012 - ENTRYTYPE: incollection - ID: nime2012-music-KimYeo2012 - abstract: "Program notes:\n\nWhere Are You Standing? (2012) is a collaborative mobile\ - \ music piece using the digital compass on mobile phones as an intuitive, interactive\ - \ musical instrument. The piece features performers on stage making sound by aiming\ - \ at other performers: compass-measured orientation of each aiming gesture is\ - \ mapped to a specific musical note depending on which player is aimed at, and\ - \ is visualized on screen in real-time.\n\nThe piece begins with three performers\ - \ playing ``harmonic'' sounds by taking aim at each other. This consonance is\ - \ broken by the introduction of the fourth performer who represents conflict:\ - \ the notes played by this performer as well as the notes played by others when\ - \ they aim at this performer are dissonant to cause musical tension. Finally,\ - \ the last performer leaves the stage to resolve the tension, and the piece ends\ - \ with three performers back in congruity.\n\nComposer(s) Credits:\n\nBongjun\ - \ Kim, Woon Seung Yeo\n\nInstrumentalist(s) Credits:\n\nBongjun Kim (operator),\ - \ Woon Seung Yeo, Jeong-seob Lee, Seunghun Kim, Xuelian Yu (iPhones)\n\nArtist(s)\ - \ Biography:\n\nBongjun Kim (b. 1981, Seoul, Korea) is a Masters student at Korea\ - \ Advanced Institute of Science and Technology (KAIST) and a member of the Audio\ - \ and Interactive Media (AIM) Lab at the Graduate School of Culture Technology\ - \ (GSCT), KAIST. Kim has received his B.S. and M.S. degrees in Industrial and\ - \ Information Systems Engineering from Ajou University, and he has also worked\ - \ at Doosan Infracore as an R&D researcher. He is also a composer, performer,\ - \ and system developer of the KAIST Mobile Phone Orchestra (KAMPO), where he has\ - \ designed interactive mobile music performance system and composed the piece\ - \ ``Where Are You Standing?'' which features digital compass-based interaction.\ - \ Currently his research interests are algorithmic composition, music informatics,\ - \ machine improvisation, and mobile media as a new musical interface.\n\nWoon\ - \ Seung Yeo is a bassist, media artist, and computer music researcher/educator.\ - \ He is Assistant Professor at Korea Advanced Institute of Science and Technology\ - \ (KAIST) and leads the Audio and Interactive Media (AIM) Lab and the KAIST Mobile\ - \ Phone Orchestra (KAMPO). Yeo has received degrees from Seoul National University\ - \ (B.S. and M.S. in Electrical Engineering), University of California at Santa\ - \ Barbara (M.S. in Media Arts and Technology), and Stanford University (M.A. and\ - \ Ph.D. in Music). His research interests include digital audio signal processing,\ - \ musical acoustics, audiovisual art, cross-modal display, physical interaction\ - \ for music, musical interfaces, mobile media for music, and innovative performance\ - \ paradigm as well. Yeo has also curated/produced mobile music concerts, telematics\ - \ music concerts, and multimedia installations and exhibitions.\n\nJeong-seob\ - \ Lee is a Ph.D. student at the Graduate School of Culture Technology (GSCT),\ - \ KAIST, Korea, and a research member of Audio & Interactive Media Lab. He received\ - \ his M.S. degree from the same institute, and his undergraduate degree in mechanical\ - \ engineering from Seoul National University. As an amateur dancer and choreographer,\ - \ he is interested in various performances involving dance. His experiences on\ - \ stage and in engineering lead him to conduct research in interactive performance\ - \ paradigm and multimedia interface technology. He has produced a number of new\ - \ media performances through collaborations with dancers and musicians, and worked\ - \ as an audiovisual interaction designer. He is also interested in acoustic motion\ - \ detection with off-the-shelf audio devices.\n\nSeunghun Kim is a Ph.D. candidate\ - \ at KAIST and is a member of Audio and Interactive Media (AIM) Lab in the Graduate\ - \ School of Culture Technology (GSCT). He has received the B.S degree in electrical\ - \ and communications engineering from Information and Communications University\ - \ (ICU). He wrote his Master thesis on sound synthesis of the geomungo (a traditional\ - \ Korean stringed instrument) at KAIST. He has presented several papers on musical\ - \ interfaces at domestic/international conferences including the international\ - \ conference on new interfaces for musical expression (NIME) and the international\ - \ computer music conference (ICMC). In addition, he has participated in the development\ - \ of interactive installations, which were exhibited at Incheon International\ - \ Digital Art Festival (INDAF), KT&G SangSang Madang, Gwangju Design Biennale,\ - \ and Seoul Digital Media Content International Festival. He is also a member\ - \ of the KAIST Mobile Phone Orchestra (KAMPO).\n\nXuelian Yu was born and raised\ - \ in China and earned her B.S. in engineering at Jiangnan University's Digital\ - \ Media Technology program. She joined the Audio and Interactive Media (AIM) Lab\ - \ at the Graduate School of Culture Technology (GSCT), KAIST in the Fall of 2010\ - \ to combine her problem-solving skills and creative abilities to set up worlds\ - \ that people become characters in the environments and interact with their surroundings.\ - \ Xuelian is currently in Pittsburgh to discover more experience in projects that\ - \ produce artifacts that are intended to entertain, inspire or affect the participants,\ - \ at Entertainment Technology Center of Carnegie Mellon University and she focuses\ - \ on the research on the comparison of description on surround sound at the same\ - \ time. The passion for learning and expanding her experiences has strengthened\ - \ her goal to work in interactive design.\n\nConcert Venue and Time: Lydia Mendelssohn\ - \ Theatre, Tuesday May 22, 7:00pm" + ID: nime2012-music-Bloland2012 + abstract: "Program notes:\n\nOf Dust and Sand uses the Electromagnetically-Prepared\ + \ Piano device, a rack of 12 electromagnets which is suspended over the strings\ + \ of a piano. Each electromagnet is sent an audio signal and in turn excites its\ + \ respective string, much like a stereo speaker made from piano strings. In this\ + \ piece a subset of the magnets remains active throughout, the performer physically\ + \ silencing the strings by pressing down with fingertips. Thus the instrument\ + \ becomes a kind of anti-piano---lifting a finger frees a string to vibrate, producing\ + \ sound. In addition, various items, such as paper and a plastic ruler, rest directly\ + \ on the strings further altering the timbre. Remember---everything you hear is\ + \ entirely acoustic.\n\nOf Dust and Sand is dedicated to The Kenners.\n\nComposer(s)\ + \ Credits:\n\nPer Bloland\n\nInstrumentalist(s) Credits:\n\nDaniel Graser (alto\ + \ saxophone), Veena Kulkarni (piano)\n\nArtist(s) Biography:\n\nPer Bloland is\ + \ a composer of acoustic and electroacoustic music whose works have been described\ + \ as having an ``incandescent effect'' with ``dangerous and luscious textures.''\ + \ His compositions range from short intimate solo pieces to works for large orchestra,\ + \ and incorporate video, dance, and custom built electronics. He has received\ + \ awards and recognition from organizations such as SEAMUS/ASCAP, Digital Art\ + \ Awards of Tokyo, ISCM, and SCI/ASCAP. He is currently a Visiting Assistant Professor\ + \ of Computer Music at the Oberlin College Conservatory, and serves as the founding\ + \ director of OINC, the Oberlin Improvisation and Newmusic Collective.\nFor more\ + \ information, please see: www.perbloland.com.\n\nDaniel Graser: Saxophonist Daniel\ + \ Graser is emerging as one of the most innovative performers and pedagogues of\ + \ his generation. A recent recipient of the Doctorate of Musical Arts from the\ + \ University of Michigan, Dan served as Teaching Assistant to legendary saxophone\ + \ pedagogue Donald Sinta for the past three years and joined the faculty of Oakland\ + \ University School of Music, Theater, and Dance in 2011. Previously, Dan earned\ + \ a Masters Degree from the University of Michigan in 2008 and Bachelors degrees\ + \ in music theory/history and saxophone performance as a student of Dr. Timothy\ + \ McAllister at the Crane School of Music in 2007. As an orchestral performer,\ + \ Dan has performed as principal saxophonist with the National Wind Ensemble in\ + \ Carnegie Hall under H. Robert Reynolds, the Detroit Symphony Orchestra under\ + \ Leonard Slatkin, The New World Symphony under Michael Tilson Thomas and John\ + \ Adams, the Ann Arbor Symphony under Arie Lipsky, the University of Michigan\ + \ Symphony Orchestra under Kenneth Kiesler, the Hot Springs Festival Orchestra\ + \ under Richard Rosenberg, and the Orchestra of Northern New York under Kenneth\ + \ Andrews. Dan was selected by the University of Michigan to be featured as a\ + \ recitalist at the Kennedy Center for the Performing Arts in Washington DC as\ + \ part of the Millenium Stage Series. Recent and forthcoming performances include\ + \ world premieres at the University of Michigan, orchestral performances with\ + \ the New World Symphony and the Detroit Symphony Orchestra as well as chamber\ + \ music performances at the Navy Band International Saxophone Symposium and the\ + \ 2012 North American Saxophone Association Biennial Conference\n\nVeena Kulkarni:\ + \ A regular performer in southeast Michigan, Veena Kulkarni teaches at the Faber\ + \ Piano Institute and Madonna University. Veena's performances have taken her\ + \ throughout the United States and beyond as both a soloist and collaborator.\ + \ In October, Veena won Best Liszt Interpretation in the 2011 Liszt-Garrison\ + \ International Piano Competition.\nVeena is the pianist for Eero Trio, whose\ + \ debut CD entitled Wolf Glen was released in 2010. Wolf Glen features the premiere\ + \ recording of Christopher Dietz's Fumeux fume, for clarinet, cello & piano. Veena\ + \ completed her doctorate in Piano Performance and Pedagogy under Logan Skelton\ + \ and John Ellis at the University of Michigan. Prior to that, she studied at\ + \ Indiana University with Emile Naoumoff and Professors Brancart, Auer, Gulli\ + \ and Tocco and at the Royal Academy of Music with Hamish Milne.\n\nConcert Venue\ + \ and Time: Lydia Mendelssohn Theatre, Tuesday May 22, 7:00pm" address: 'Ann Arbor, Michigan, U.S.A.' - author: Bongjun Kim and Woon Seung Yeo - bibtex: "@incollection{nime2012-music-KimYeo2012,\n abstract = {Program notes:\n\ - \n\\emph{Where Are You Standing?} (2012) is a collaborative mobile music piece\ - \ using the digital compass on mobile phones as an intuitive, interactive musical\ - \ instrument. The piece features performers on stage making sound by aiming at\ - \ other performers: compass-measured orientation of each aiming gesture is mapped\ - \ to a specific musical note depending on which player is aimed at, and is visualized\ - \ on screen in real-time.\n\nThe piece begins with three performers playing ``harmonic''\ - \ sounds by taking aim at each other. This consonance is broken by the introduction\ - \ of the fourth performer who represents conflict: the notes played by this performer\ - \ as well as the notes played by others when they aim at this performer are dissonant\ - \ to cause musical tension. Finally, the last performer leaves the stage to resolve\ - \ the tension, and the piece ends with three performers back in congruity.\n\n\ - Composer(s) Credits:\n\nBongjun Kim, Woon Seung Yeo\n\nInstrumentalist(s) Credits:\n\ - \nBongjun Kim (operator), Woon Seung Yeo, Jeong-seob Lee, Seunghun Kim, Xuelian\ - \ Yu (iPhones)\n\nArtist(s) Biography:\n\nBongjun Kim (b. 1981, Seoul, Korea)\ - \ is a Masters student at Korea Advanced Institute of Science and Technology (KAIST)\ - \ and a member of the Audio and Interactive Media (AIM) Lab at the Graduate School\ - \ of Culture Technology (GSCT), KAIST. Kim has received his B.S. and M.S. degrees\ - \ in Industrial and Information Systems Engineering from Ajou University, and\ - \ he has also worked at Doosan Infracore as an R\\&D researcher. He is also a\ - \ composer, performer, and system developer of the KAIST Mobile Phone Orchestra\ - \ (KAMPO), where he has designed interactive mobile music performance system and\ - \ composed the piece ``Where Are You Standing?'' which features digital compass-based\ - \ interaction. Currently his research interests are algorithmic composition, music\ - \ informatics, machine improvisation, and mobile media as a new musical interface.\n\ - \nWoon Seung Yeo is a bassist, media artist, and computer music researcher/educator.\ - \ He is Assistant Professor at Korea Advanced Institute of Science and Technology\ - \ (KAIST) and leads the Audio and Interactive Media (AIM) Lab and the KAIST Mobile\ - \ Phone Orchestra (KAMPO). Yeo has received degrees from Seoul National University\ - \ (B.S. and M.S. in Electrical Engineering), University of California at Santa\ - \ Barbara (M.S. in Media Arts and Technology), and Stanford University (M.A. and\ - \ Ph.D. in Music). His research interests include digital audio signal processing,\ - \ musical acoustics, audiovisual art, cross-modal display, physical interaction\ - \ for music, musical interfaces, mobile media for music, and innovative performance\ - \ paradigm as well. Yeo has also curated/produced mobile music concerts, telematics\ - \ music concerts, and multimedia installations and exhibitions.\n\nJeong-seob\ - \ Lee is a Ph.D. student at the Graduate School of Culture Technology (GSCT),\ - \ KAIST, Korea, and a research member of Audio \\& Interactive Media Lab. He received\ - \ his M.S. degree from the same institute, and his undergraduate degree in mechanical\ - \ engineering from Seoul National University. As an amateur dancer and choreographer,\ - \ he is interested in various performances involving dance. His experiences on\ - \ stage and in engineering lead him to conduct research in interactive performance\ - \ paradigm and multimedia interface technology. He has produced a number of new\ - \ media performances through collaborations with dancers and musicians, and worked\ - \ as an audiovisual interaction designer. He is also interested in acoustic motion\ - \ detection with off-the-shelf audio devices.\n\nSeunghun Kim is a Ph.D. candidate\ - \ at KAIST and is a member of Audio and Interactive Media (AIM) Lab in the Graduate\ - \ School of Culture Technology (GSCT). He has received the B.S degree in electrical\ - \ and communications engineering from Information and Communications University\ - \ (ICU). He wrote his Master thesis on sound synthesis of the geomungo (a traditional\ - \ Korean stringed instrument) at KAIST. He has presented several papers on musical\ - \ interfaces at domestic/international conferences including the international\ - \ conference on new interfaces for musical expression (NIME) and the international\ - \ computer music conference (ICMC). In addition, he has participated in the development\ - \ of interactive installations, which were exhibited at Incheon International\ - \ Digital Art Festival (INDAF), KT\\&G SangSang Madang, Gwangju Design Biennale,\ - \ and Seoul Digital Media Content International Festival. He is also a member\ - \ of the KAIST Mobile Phone Orchestra (KAMPO).\n\nXuelian Yu was born and raised\ - \ in China and earned her B.S. in engineering at Jiangnan University's Digital\ - \ Media Technology program. She joined the Audio and Interactive Media (AIM) Lab\ - \ at the Graduate School of Culture Technology (GSCT), KAIST in the Fall of 2010\ - \ to combine her problem-solving skills and creative abilities to set up worlds\ - \ that people become characters in the environments and interact with their surroundings.\ - \ Xuelian is currently in Pittsburgh to discover more experience in projects that\ - \ produce artifacts that are intended to entertain, inspire or affect the participants,\ - \ at Entertainment Technology Center of Carnegie Mellon University and she focuses\ - \ on the research on the comparison of description on surround sound at the same\ - \ time. The passion for learning and expanding her experiences has strengthened\ - \ her goal to work in interactive design.\n\nConcert Venue and Time: Lydia Mendelssohn\ - \ Theatre, Tuesday May 22, 7:00pm},\n address = {Ann Arbor, Michigan, U.S.A.},\n\ - \ author = {Bongjun Kim and Woon Seung Yeo},\n booktitle = {Music Proceedings\ + author: Per Bloland + bibtex: "@incollection{nime2012-music-Bloland2012,\n abstract = {Program notes:\n\ + \n\\emph{Of Dust and Sand} uses the Electromagnetically-Prepared Piano device,\ + \ a rack of 12 electromagnets which is suspended over the strings of a piano.\ + \ Each electromagnet is sent an audio signal and in turn excites its respective\ + \ string, much like a stereo speaker made from piano strings. In this piece a\ + \ subset of the magnets remains active throughout, the performer physically silencing\ + \ the strings by pressing down with fingertips. Thus the instrument becomes a\ + \ kind of anti-piano---lifting a finger frees a string to vibrate, producing sound.\ + \ In addition, various items, such as paper and a plastic ruler, rest directly\ + \ on the strings further altering the timbre. Remember---everything you hear is\ + \ entirely acoustic.\n\nOf Dust and Sand is dedicated to The Kenners.\n\nComposer(s)\ + \ Credits:\n\nPer Bloland\n\nInstrumentalist(s) Credits:\n\nDaniel Graser (alto\ + \ saxophone), Veena Kulkarni (piano)\n\nArtist(s) Biography:\n\nPer Bloland is\ + \ a composer of acoustic and electroacoustic music whose works have been described\ + \ as having an ``incandescent effect'' with ``dangerous and luscious textures.''\ + \ His compositions range from short intimate solo pieces to works for large orchestra,\ + \ and incorporate video, dance, and custom built electronics. He has received\ + \ awards and recognition from organizations such as SEAMUS/ASCAP, Digital Art\ + \ Awards of Tokyo, ISCM, and SCI/ASCAP. He is currently a Visiting Assistant Professor\ + \ of Computer Music at the Oberlin College Conservatory, and serves as the founding\ + \ director of OINC, the Oberlin Improvisation and Newmusic Collective.\nFor more\ + \ information, please see: www.perbloland.com.\n\nDaniel Graser: Saxophonist \\\ + textbf{Daniel Graser} is emerging as one of the most innovative performers and\ + \ pedagogues of his generation. A recent recipient of the Doctorate of Musical\ + \ Arts from the University of Michigan, Dan served as Teaching Assistant to legendary\ + \ saxophone pedagogue Donald Sinta for the past three years and joined the faculty\ + \ of Oakland University School of Music, Theater, and Dance in 2011. Previously,\ + \ Dan earned a Masters Degree from the University of Michigan in 2008 and Bachelors\ + \ degrees in music theory/history and saxophone performance as a student of Dr.\ + \ Timothy McAllister at the Crane School of Music in 2007. As an orchestral performer,\ + \ Dan has performed as principal saxophonist with the National Wind Ensemble in\ + \ Carnegie Hall under H. Robert Reynolds, the Detroit Symphony Orchestra under\ + \ Leonard Slatkin, The New World Symphony under Michael Tilson Thomas and John\ + \ Adams, the Ann Arbor Symphony under Arie Lipsky, the University of Michigan\ + \ Symphony Orchestra under Kenneth Kiesler, the Hot Springs Festival Orchestra\ + \ under Richard Rosenberg, and the Orchestra of Northern New York under Kenneth\ + \ Andrews. Dan was selected by the University of Michigan to be featured as a\ + \ recitalist at the Kennedy Center for the Performing Arts in Washington DC as\ + \ part of the Millenium Stage Series. Recent and forthcoming performances include\ + \ world premieres at the University of Michigan, orchestral performances with\ + \ the New World Symphony and the Detroit Symphony Orchestra as well as chamber\ + \ music performances at the Navy Band International Saxophone Symposium and the\ + \ 2012 North American Saxophone Association Biennial Conference\n\nVeena Kulkarni:\ + \ A regular performer in southeast Michigan, \\textbf{Veena Kulkarni} teaches\ + \ at the Faber Piano Institute and Madonna University. Veena's performances have\ + \ taken her throughout the United States and beyond as both a soloist and collaborator.\ + \ In October, Veena won Best Liszt Interpretation in the 2011 Liszt-Garrison\ + \ International Piano Competition.\nVeena is the pianist for Eero Trio, whose\ + \ debut CD entitled Wolf Glen was released in 2010. Wolf Glen features the premiere\ + \ recording of Christopher Dietz's Fumeux fume, for clarinet, cello \\& piano.\ + \ Veena completed her doctorate in Piano Performance and Pedagogy under Logan\ + \ Skelton and John Ellis at the University of Michigan. Prior to that, she studied\ + \ at Indiana University with Emile Naoumoff and Professors Brancart, Auer, Gulli\ + \ and Tocco and at the Royal Academy of Music with Hamish Milne.\n\nConcert Venue\ + \ and Time: Lydia Mendelssohn Theatre, Tuesday May 22, 7:00pm},\n address = {Ann\ + \ Arbor, Michigan, U.S.A.},\n author = {Per Bloland},\n booktitle = {Music Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ \ day = {21-23},\n editor = {Georg Essl and Brent Gillespie and Michael Gurevich\ \ and Sile O'Modhrain},\n month = {May},\n publisher = {Electrical Engineering\ \ \\& Computer Science and Performing Arts Technology, University of Michigan},\n\ - \ title = {Where Are You Standing?},\n year = {2012}\n}\n" - booktitle: Music Proceedings of the International Conference on New Interfaces for - Musical Expression - day: 21-23 - editor: Georg Essl and Brent Gillespie and Michael Gurevich and Sile O'Modhrain - month: May - publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, - University of Michigan' - title: 'Where Are You Standing?' - year: 2012 - - -- ENTRYTYPE: incollection - ID: nime2012-music-Dahlstedt2012 - abstract: "Program notes:\n\nAn improvised performance on a custom built instrument,\ - \ using a simple pencil drawing as a gestural interface for controlling complex\ - \ analog synthesis. The interface works by using the resistive properties of carbon\ - \ to create a voltage potential field in the graphite/pencil markings on the paper\ - \ using custom movable electrodes, made from coins. Then, control voltages are\ - \ extracted from other points on the paper, controlling various aspects of the\ - \ synthesized sound. The design was inspired by my previous research in complex\ - \ mappings for advanced digital instruments, and provides a similarly dynamic\ - \ playing environment for analogue synthesis. The interface is very lo-tech, easy\ - \ to build, and should be possible to use with any analogue modular synthesizer.\ - \ Here, I use it with a Bugbrand modular, built by Tom Bugs in Bristol, UK. The\ - \ interface is presented in more detail in a paper presentation at the NIME conference.\n\ - \nComposer(s) Credits:\n\nInstrumentalist(s) Credits:\n\nPalle Dahlstedt (pencil\ - \ fields interface & modular synthesizer)\n\nArtist(s) Biography:\n\nPalle Dahlstedt\ - \ (b.1971), composer, improviser, pianist and researcher from Stockholm, since\ - \ 1994 living in Göteborg, Sweden. With composition degrees from the Academies\ - \ of Malmö and Göteborg and a PhD from Chalmers University of Technology in evolutionary\ - \ algorithms for composition, he is currently the main lecturer in electronic\ - \ music composition at the Academy of Music and Drama, University of Gothenburg,\ - \ and artistic director the Lindblad Studios. Also, he is associate professor\ - \ in computer-aided creativity at the Department of Applied IT, performing extensive\ - \ research in novel technology-based performance and improvisation techniques\ - \ for electronic and acoustic musicians, and in computer models of artistic creative\ - \ processes. His music has been performed on six continents and received several\ - \ awards, e.g., in 2001 he was awarded the prestigeous Gaudeamus Prize, as the\ - \ first ever for an electronic work. He is also performing on piano with and without\ - \ electronics, and in the electronic free impro duo pantoMorf.\n\nConcert Venue\ - \ and Time: Necto, Tuesday May 22, 9:00pm" - address: 'Ann Arbor, Michigan, U.S.A.' - author: Palle Dahlstedt - bibtex: "@incollection{nime2012-music-Dahlstedt2012,\n abstract = {Program notes:\n\ - \nAn improvised performance on a custom built instrument, using a simple pencil\ - \ drawing as a gestural interface for controlling complex analog synthesis. The\ - \ interface works by using the resistive properties of carbon to create a voltage\ - \ potential field in the graphite/pencil markings on the paper using custom movable\ - \ electrodes, made from coins. Then, control voltages are extracted from other\ - \ points on the paper, controlling various aspects of the synthesized sound. The\ - \ design was inspired by my previous research in complex mappings for advanced\ - \ digital instruments, and provides a similarly dynamic playing environment for\ - \ analogue synthesis. The interface is very lo-tech, easy to build, and should\ - \ be possible to use with any analogue modular synthesizer. Here, I use it with\ - \ a Bugbrand modular, built by Tom Bugs in Bristol, UK. The interface is presented\ - \ in more detail in a paper presentation at the NIME conference.\n\nComposer(s)\ - \ Credits:\n\nInstrumentalist(s) Credits:\n\nPalle Dahlstedt (pencil fields interface\ - \ \\& modular synthesizer)\n\nArtist(s) Biography:\n\nPalle Dahlstedt (b.1971),\ - \ composer, improviser, pianist and researcher from Stockholm, since 1994 living\ - \ in G\\\"{o}teborg, Sweden. With composition degrees from the Academies of Malm\\\ - \"{o} and G\\\"{o}teborg and a PhD from Chalmers University of Technology in evolutionary\ - \ algorithms for composition, he is currently the main lecturer in electronic\ - \ music composition at the Academy of Music and Drama, University of Gothenburg,\ - \ and artistic director the Lindblad Studios. Also, he is associate professor\ - \ in computer-aided creativity at the Department of Applied IT, performing extensive\ - \ research in novel technology-based performance and improvisation techniques\ - \ for electronic and acoustic musicians, and in computer models of artistic creative\ - \ processes. His music has been performed on six continents and received several\ - \ awards, e.g., in 2001 he was awarded the prestigeous Gaudeamus Prize, as the\ - \ first ever for an electronic work. He is also performing on piano with and without\ - \ electronics, and in the electronic free impro duo pantoMorf.\n\nConcert Venue\ - \ and Time: Necto, Tuesday May 22, 9:00pm},\n address = {Ann Arbor, Michigan,\ - \ U.S.A.},\n author = {Palle Dahlstedt},\n booktitle = {Music Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n day =\ - \ {21-23},\n editor = {Georg Essl and Brent Gillespie and Michael Gurevich and\ - \ Sile O'Modhrain},\n month = {May},\n publisher = {Electrical Engineering \\\ - & Computer Science and Performing Arts Technology, University of Michigan},\n\ - \ title = {Pencil Fields},\n year = {2012}\n}\n" + \ title = {Of Dust and Sand},\n year = {2012}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression day: 21-23 @@ -9876,97 +9641,129 @@ month: May publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, University of Michigan' - title: Pencil Fields + title: Of Dust and Sand year: 2012 - ENTRYTYPE: incollection - ID: nime2012-music-BrophyLabadie2012 - abstract: "Program notes:\n\nMany of the discourses around technological development\ - \ in music are deeply concerned with aspects of control; i.e. how does one exert\ - \ their control, or ``mastery'' over the technology they use. However, we propose\ - \ that technological systems with a certain amount of unpredictability and randomness\ - \ may also be useful, especially for improvisation. As an improvisation duo, our\ - \ method often involves designing electronic instruments whose behaviors are somewhat\ - \ unpredictable. As a result, our entire aesthetic is largely based on ``riding''\ - \ the boundary of control. Working in this way creates a situation where we are\ - \ often forced to react to, and work with, the unexpected.\n\nOur improvisation\ - \ features a number of handmade and hacked electronic instruments, all of which\ - \ have been designed to behave somewhat unpredictably.\n\nComposer(s) Credits:\n\ - \nInstrumentalist(s) Credits:\n\nDaniel Brophy (electronics), Colin Labadie (electronics)\n\ - \nArtist(s) Biography:\n\nDaniel Brophy is a composer, performer and improviser\ - \ of various musical styles and instrumentations ranging from orchestral and chamber\ - \ music to extreme metal, sound installations, experimental improvisation and\ - \ noise. He is a recipient of a SSHRC research grant, the 2012 KW Chamber Orchestra\ - \ composition prize, the University of Alberta's President's Award of Distinction,\ - \ and a Queen Elizabeth II Graduate Scholarship. Daniel currently resides in Edmonton,\ - \ Alberta where he is pursuing a Doctor of Music degree in composition under the\ - \ supervision of Dr. Scott Smallwood. He is member of the noise duo MUGBAIT and\ - \ is proud to have worked with a number of other wonderful musicians, dancers\ - \ and visual artists such as The Enterprise Quartet, junctQin, Digital Prowess,\ - \ TorQ, Gerry Morita, Werner Friesen and many others. Daniel is currently developing\ - \ interactive clothing for dancers, utilizing a combination of high and low technology.\n\ - \nColin Labadie is a composer and performer currently based in Edmonton, Alberta.\ - \ His musical output ranges from solo, chamber, choral, orchestral, and electroacoustic\ - \ compositions, to sound installations, multimedia collaboration, experimental\ - \ improvisation, and noise music. His work is shaped by a broad range of musical\ - \ influences, at times dealing exclusively with repetition, patterns, and subtle\ - \ variation, while at others exploring chaos and unpredictability.\nColin holds\ - \ a BMus from Wilfrid Laurier University, where he studied with Linda Catlin Smith\ - \ and Peter Hatch, and an MMus from the University of Alberta where he studied\ - \ with Howard Bashaw, Mark Hannesson, Scott Smallwood, and Andriy Talpash. Currently,\ - \ he is pursuing a Doctoral degree in Composition from the University of Alberta\ - \ under the supervision of Scott Smallwood. He is the recipient of SSHRC's Joseph-Armand\ - \ Bombardier Master's and Doctoral Scholarships, the University of Alberta Master's\ - \ and Doctoral Recruitment Scholarships, and the President's Doctoral Prize of\ - \ Distinction.\n\nConcert Venue and Time: Necto, Tuesday May 22, 9:00pm" + ID: nime2012-music-Deal2012 + abstract: "Program notes:\n\nJack Walk explores notions of ecstatic energy, control\ + \ and release. The work begins with live and fixed percussion lines, re-processed\ + \ into a series of electronic representations of specified structure. This provides\ + \ a compositional framework that a percussionist interacts with, while in another\ + \ sonic layer, a laptop musician simultaneously samples and re-processes the live\ + \ percussion while channeling the audio back into the larger environment. A videographer\ + \ mixes imagery related to the original compositional notions of ecstatic control\ + \ and release. Layers of sonic material emanating from the drummer's kit blur\ + \ the virtual and real, while the music and imagery evoke imaginary lines tracing\ + \ physical and conceptual flows of energy. The trio of performers for the NIME\ + \ 2012 performance of Jack Walk (Deal, Drews, and Munson) comprise group known\ + \ as Big Robot, an Indianapolis-based computer-acoustic trio that creates live,\ + \ interactive, and media-enriched works.\n\nComposer(s) Credits:\n\nScott Deal\n\ + \nInstrumentalist(s) Credits:\n\nScott Deal (percussion), Michael Drews (audio\ + \ electronics), Jordan Munson (video)\n\nArtist(s) Biography:\n\nScott Deal has\ + \ premiered solo, chamber and mixed media works throughout North America Europe,\ + \ and Asia. An artist who ``displays phenomenal virtuosity'' (Artsfuse) and presents\ + \ a ``riveting performance'' (Sequenza 21), his recording of John Luther Adams's\ + \ Four Thousand Holes, for piano, percussion, and electronics was listed in New\ + \ Yorker Magazine's 2011 Top Ten Classical Recordings. In 2011, he and composer\ + \ Matthew Burtner were awarded the Internet2 IDEA Award for their co-creation\ + \ of Auksalaq, a telematic opera. Deal is Professor of Music and Director of\ + \ the Donald Louis Tavel Arts and Technology Research Center at Indiana University\ + \ Purdue University Indianapolis (IUPUI). He is the founder and director of the\ + \ Telematic Collective, a multi-disciplinary artistic research group comprised\ + \ of graduate students and professional collaborators. He also serves on the faculty\ + \ for the Summer Institute for Contemporary Performance Practice at the New England\ + \ Conservatory.\n\nMichael Drews is a composer, sound artist and computer musician.\ + \ His work explores unconventional narrative strategies created from transforming\ + \ contextual identity and the expressive power of cultural artifacts found in\ + \ particular sonic and visual materials. Present throughout Drews's work is an\ + \ interest in performance-based computer virtuosity and improvisational applications\ + \ of computer technology that expand traditional ideas of musical performance\ + \ and creativity. Drews is a member of computer-acoustic ensemble, Big Robot and\ + \ the experimental-electronica duo, Mana2. Performances of Drews's compositions\ + \ have been featured at SEAMUS, Cinesonika, Electronic Music Midwest, NYC Electronic\ + \ Music Festival, Studio 300, PASIC, Super Computing Global and IASPM-Canada.\ + \ Drews holds degrees from the University of Illinois at Urbana-Champaign (D.M.A.),\ + \ Cleveland State University (M.MUS.) and Kent State University (B.A.). He resides\ + \ with his family in Indianapolis and is Assistant Professor of Music at Indiana\ + \ University-Indianapolis (IUPUI). For more information: michaeldrews.org or Twitter.com/MICHAEL-DREWS\n\ + \nJordan Munson is a musician, composer, and multimedia artist. He is a Lecturer\ + \ in Music and Arts Technology, and an associate of the Donald Louis Tavel Arts\ + \ and Technology Research Center, both at Indiana University Purdue University\ + \ Indianapolis (IUPUI). His works for multimedia and music have been premiered\ + \ at institutions such as the University of Kentucky, the University of Alaska\ + \ at Fairbanks and the University of California San Diego. As a video artist,\ + \ he has shown work at New York City Electro-Acoustic Music Festival and SEAMUS.\ + \ Munson's experimental electronic efforts have resulted in performances alongside\ + \ artists such as Matmos, R. Luke DuBois and Bora Yoon. He is a member of the\ + \ computer-acoustic ensemble Big Robot, in which he work focuses on live experimental\ + \ percussion and electronics. Munson holds degrees from Indiana University in\ + \ Indianapolis (M.S.M.T.) and the University of Kentucky (B.M.).\n\nConcert Venue\ + \ and Time: Lydia Mendelssohn Theatre, Tuesday May 22, 7:00pm" address: 'Ann Arbor, Michigan, U.S.A.' - author: Daniel Brophy and Colin Labadie - bibtex: "@incollection{nime2012-music-BrophyLabadie2012,\n abstract = {Program notes:\n\ - \nMany of the discourses around technological development in music are deeply\ - \ concerned with aspects of control; i.e. how does one exert their control, or\ - \ ``mastery'' over the technology they use. However, we propose that technological\ - \ systems with a certain amount of unpredictability and randomness may also be\ - \ useful, especially for improvisation. As an improvisation duo, our method often\ - \ involves designing electronic instruments whose behaviors are somewhat unpredictable.\ - \ As a result, our entire aesthetic is largely based on ``riding'' the boundary\ - \ of control. Working in this way creates a situation where we are often forced\ - \ to react to, and work with, the unexpected.\n\nOur improvisation features a\ - \ number of handmade and hacked electronic instruments, all of which have been\ - \ designed to behave somewhat unpredictably.\n\nComposer(s) Credits:\n\nInstrumentalist(s)\ - \ Credits:\n\nDaniel Brophy (electronics), Colin Labadie (electronics)\n\nArtist(s)\ - \ Biography:\n\nDaniel Brophy is a composer, performer and improviser of various\ - \ musical styles and instrumentations ranging from orchestral and chamber music\ - \ to extreme metal, sound installations, experimental improvisation and noise.\ - \ He is a recipient of a SSHRC research grant, the 2012 KW Chamber Orchestra composition\ - \ prize, the University of Alberta's President's Award of Distinction, and a Queen\ - \ Elizabeth II Graduate Scholarship. Daniel currently resides in Edmonton, Alberta\ - \ where he is pursuing a Doctor of Music degree in composition under the supervision\ - \ of Dr. Scott Smallwood. He is member of the noise duo MUGBAIT and is proud to\ - \ have worked with a number of other wonderful musicians, dancers and visual artists\ - \ such as The Enterprise Quartet, junctQin, Digital Prowess, TorQ, Gerry Morita,\ - \ Werner Friesen and many others. Daniel is currently developing interactive clothing\ - \ for dancers, utilizing a combination of high and low technology.\n\nColin Labadie\ - \ is a composer and performer currently based in Edmonton, Alberta. His musical\ - \ output ranges from solo, chamber, choral, orchestral, and electroacoustic compositions,\ - \ to sound installations, multimedia collaboration, experimental improvisation,\ - \ and noise music. His work is shaped by a broad range of musical influences,\ - \ at times dealing exclusively with repetition, patterns, and subtle variation,\ - \ while at others exploring chaos and unpredictability.\nColin holds a BMus from\ - \ Wilfrid Laurier University, where he studied with Linda Catlin Smith and Peter\ - \ Hatch, and an MMus from the University of Alberta where he studied with Howard\ - \ Bashaw, Mark Hannesson, Scott Smallwood, and Andriy Talpash. Currently, he is\ - \ pursuing a Doctoral degree in Composition from the University of Alberta under\ - \ the supervision of Scott Smallwood. He is the recipient of SSHRC's Joseph-Armand\ - \ Bombardier Master's and Doctoral Scholarships, the University of Alberta Master's\ - \ and Doctoral Recruitment Scholarships, and the President's Doctoral Prize of\ - \ Distinction.\n\nConcert Venue and Time: Necto, Tuesday May 22, 9:00pm},\n address\ - \ = {Ann Arbor, Michigan, U.S.A.},\n author = {Daniel Brophy and Colin Labadie},\n\ + author: Scott Deal + bibtex: "@incollection{nime2012-music-Deal2012,\n abstract = {Program notes:\n\n\ + \\emph{Jack Walk} explores notions of ecstatic energy, control and release. The\ + \ work begins with live and fixed percussion lines, re-processed into a series\ + \ of electronic representations of specified structure. This provides a compositional\ + \ framework that a percussionist interacts with, while in another sonic layer,\ + \ a laptop musician simultaneously samples and re-processes the live percussion\ + \ while channeling the audio back into the larger environment. A videographer\ + \ mixes imagery related to the original compositional notions of ecstatic control\ + \ and release. Layers of sonic material emanating from the drummer's kit blur\ + \ the virtual and real, while the music and imagery evoke imaginary lines tracing\ + \ physical and conceptual flows of energy. The trio of performers for the NIME\ + \ 2012 performance of \\emph{Jack Walk} (Deal, Drews, and Munson) comprise group\ + \ known as Big Robot, an Indianapolis-based computer-acoustic trio that creates\ + \ live, interactive, and media-enriched works.\n\nComposer(s) Credits:\n\nScott\ + \ Deal\n\nInstrumentalist(s) Credits:\n\nScott Deal (percussion), Michael Drews\ + \ (audio electronics), Jordan Munson (video)\n\nArtist(s) Biography:\n\nScott\ + \ Deal has premiered solo, chamber and mixed media works throughout North America\ + \ Europe, and Asia. An artist who ``displays phenomenal virtuosity'' (Artsfuse)\ + \ and presents a ``riveting performance'' (Sequenza 21), his recording of John\ + \ Luther Adams's \\emph{Four Thousand Holes}, for piano, percussion, and electronics\ + \ was listed in New Yorker Magazine's 2011 Top Ten Classical Recordings. In 2011,\ + \ he and composer Matthew Burtner were awarded the Internet2 IDEA Award for their\ + \ co-creation of \\emph{Auksalaq}, a telematic opera. Deal is Professor of Music\ + \ and Director of the Donald Louis Tavel Arts and Technology Research Center at\ + \ Indiana University Purdue University Indianapolis (IUPUI). He is the founder\ + \ and director of the Telematic Collective, a multi-disciplinary artistic research\ + \ group comprised of graduate students and professional collaborators. He also\ + \ serves on the faculty for the Summer Institute for Contemporary Performance\ + \ Practice at the New England Conservatory.\n\nMichael Drews is a composer, sound\ + \ artist and computer musician. His work explores unconventional narrative strategies\ + \ created from transforming contextual identity and the expressive power of cultural\ + \ artifacts found in particular sonic and visual materials. Present throughout\ + \ Drews's work is an interest in performance-based computer virtuosity and improvisational\ + \ applications of computer technology that expand traditional ideas of musical\ + \ performance and creativity. Drews is a member of computer-acoustic ensemble,\ + \ Big Robot and the experimental-electronica duo, Mana2. Performances of Drews's\ + \ compositions have been featured at SEAMUS, Cinesonika, Electronic Music Midwest,\ + \ NYC Electronic Music Festival, Studio 300, PASIC, Super Computing Global and\ + \ IASPM-Canada. Drews holds degrees from the University of Illinois at Urbana-Champaign\ + \ (D.M.A.), Cleveland State University (M.MUS.) and Kent State University (B.A.).\ + \ He resides with his family in Indianapolis and is Assistant Professor of Music\ + \ at Indiana University-Indianapolis (IUPUI). For more information: michaeldrews.org\ + \ or Twitter.com/MICHAEL-DREWS\n\nJordan Munson is a musician, composer, and multimedia\ + \ artist. He is a Lecturer in Music and Arts Technology, and an associate of\ + \ the Donald Louis Tavel Arts and Technology Research Center, both at Indiana\ + \ University Purdue University Indianapolis (IUPUI). His works for multimedia\ + \ and music have been premiered at institutions such as the University of Kentucky,\ + \ the University of Alaska at Fairbanks and the University of California San Diego.\ + \ As a video artist, he has shown work at New York City Electro-Acoustic Music\ + \ Festival and SEAMUS. Munson's experimental electronic efforts have resulted\ + \ in performances alongside artists such as Matmos, R. Luke DuBois and Bora Yoon.\ + \ He is a member of the computer-acoustic ensemble Big Robot, in which he work\ + \ focuses on live experimental percussion and electronics. Munson holds degrees\ + \ from Indiana University in Indianapolis (M.S.M.T.) and the University of Kentucky\ + \ (B.M.).\n\nConcert Venue and Time: Lydia Mendelssohn Theatre, Tuesday May 22,\ + \ 7:00pm},\n address = {Ann Arbor, Michigan, U.S.A.},\n author = {Scott Deal},\n\ \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ \ for Musical Expression},\n day = {21-23},\n editor = {Georg Essl and Brent Gillespie\ \ and Michael Gurevich and Sile O'Modhrain},\n month = {May},\n publisher = {Electrical\ \ Engineering \\& Computer Science and Performing Arts Technology, University\ - \ of Michigan},\n title = {Munich Eunuch},\n year = {2012}\n}\n" + \ of Michigan},\n title = {Jack Walk},\n year = {2012}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression day: 21-23 @@ -9974,170 +9771,73 @@ month: May publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, University of Michigan' - title: Munich Eunuch + title: Jack Walk year: 2012 - ENTRYTYPE: incollection - ID: nime2012-music-FiggMcCormackCox2012 - abstract: "Program notes:\n\nThis work merges sound and light to illuminate complex\ - \ rhythmic motives, polyrhythms and metrical patterns in a visual display generated\ - \ by three drummers playing six ``light'' drums. These new instruments bring to\ - \ life the dreams of 20th century synesthetes, such as Wassily Kandinsky and Alexander\ - \ Scriabin and others who sought to create an imagined ``visual music,'' an ideal\ - \ synthesis of music and visual art.\n\nCommunity Light Beacons are percussion\ - \ instruments that leverage the potentials of music, analog technology, and human-generated\ - \ power to visualize sound. These instruments add the dimension of light to the\ - \ ancient tradition of drumming. The drums are user-powered, and when they are\ - \ played---banged, hit and tapped---the vibrations from the drumhead are converted\ - \ to electricity by the internal speaker transducer. The generated energy powers\ - \ ultra bright LEDs, which light up with every hit and beam out from the Fresnel\ - \ lens.\n\nComposer(s) Credits:\n\nJenn Figg, Matthew McCormack, Paul Cox\n\n\ - Instrumentalist(s) Credits:\n\nRyan Hilty, Samuel Haese, Eric Young (Kinetic Light\ - \ Drums)\n\nArtist(s) Biography:\n\nJenn Figg is an artist investigating the connections\ - \ between industry, science and art through the transformation of energy, performative\ - \ objects and constructed ecosystems. She graduated with a BFA in Textiles from\ - \ the Rhode Island School of Design and an MFA from the University of California\ - \ at Santa Barbara. She is pursuing her Ph.D. in Media, Art, and Text at Virginia\ - \ Commonwealth University. She lives in Baltimore and is an Assistant Professor\ - \ of Art at Towson University in Maryland. Exhibitions include The Print Center\ - \ in Philadelphia, Pennsylvania, The Art House at the Jones Center in Austin,\ - \ Texas, Virginia MOCA in Virginia Beach, Virginia, the Columbus Center of Science\ - \ and Industry in Columbus, Ohio, the Ingenuity Festival in Cleveland, Ohio. Other\ - \ awards and residencies include the MacDowell Colony, the Lower Manhattan Cultural\ - \ Council Residency, the Great Lakes College Association New Directions Initiative,\ - \ and the University of California Interdisciplinary Humanities Center, Visual,\ - \ Performing & Media Arts Award.\n\nMatthew McCormack explores energy transformation\ - \ and expression through technology, kinetic sculpture and blown glass. He graduated\ - \ with a BFA in Glass from The Ohio State University and is now living in Baltimore,\ - \ Maryland. He is pursuing an Interdisciplinary MFA at Towson University. His\ - \ research interests include modifying a speaker transducer for optimum energy\ - \ generation and developing a series of rapid prototyped Fresnel lens stamps for\ - \ quartz crystal light instruments. His work has been featured at the Virginia\ - \ Museum of Contemporary Art in Virginia Beach, Virginia, the Columbus Center\ - \ of Science and Industry in Columbus, Ohio, the Toledo Museum of Art in Toledo,\ - \ Ohio, the Rankin Art Gallery at Ferris State University in Big Rapids, Michigan,\ - \ the National Museum of Glass in Eskisehir, Turkey, the Franklin Park Conservatory\ - \ in Columbus, Ohio, the Ingenuity Festival in Cleveland, Ohio, and as part of\ - \ the Lower Manhattan Cultural Council's Governors Island Residency in New York\ - \ City.\n\nPaul Cox is a scholar, composer and percussionist in Cleveland, Ohio.\ - \ He currently teaches music history and percussion at Case Western Reserve University\ - \ (CWRU) and the Oberlin Conservatory of Music, where he is a Visiting Assistant\ - \ Professor. He earned a PhD in musicology from CWRU in 2011 after the completion\ - \ of his dissertation, Collaged Codes: John Cage's Credo in Us, a study of Cage\ - \ and Merce Cunningham's first dance collaboration in 1942. Current projects include\ - \ composing Just.Are.Same for string quartet, oboe and tape, which weaves together\ - \ an electronic soundscape of spoken words drawn from victims of genocide with\ - \ acoustic and electronic sounds; composing an evening-length work for the ensemble\ - \ NO EXIT, in collaboration with famed world percussionist Jamie Haddad and guitarist\ - \ Bobby Ferrazza; curating a Cage ``Musicircus'' for the opening of the new Museum\ - \ of Contemporary Art in Cleveland, and artistically advising the Sitka Fest in\ - \ Alaska, a three-month-long festival of arts and culture.\n\nRyan Hilty is a\ - \ percussionist earning a degree in Music Education from the Case Western Reserve\ - \ University School of Music in Cleveland, Ohio. He is currently in his second\ - \ undergraduate year, studying percussion with Matthew Larson. He has performed\ - \ as a percussionist in numerous ensembles, including the Crestwood Wind Ensemble,\ - \ Jazz Band, and the Cleveland Youth Wind Symphony. He is the recipient of the\ - \ 2010 John Phillip Sousa Award. After earning his degree in music education,\ - \ Ryan aspires to become a high school band director.\n\nSamuel Haese is a student\ - \ of music and physics at Case Western Reserve University (CWRU) in Cleveland,\ - \ OH. He has studied concert percussion with Matthew Bassett, Feza Zweifel, and\ - \ Matthew Larson, and currently collaborates with Paul Cox in exploring and performing\ - \ modern percussion music. In the meantime, Sam is receiving a BA in Music for\ - \ studying piano with Gerardo Teissonniere through the Cleveland Institute of\ - \ Music. Sam intends to also receive a degree in Engineering Physics from CWRU\ - \ which he hopes will allow him to explore and understand music technologies.\ - \ Originally from Berkeley, California, his current plans include moving to a\ - \ sunnier place than Cleveland after graduation within the next two years.\n\n\ - Eric Young is a student at Case Western Reserve University majoring in Computer\ - \ Science and Audio Engineering. He grew up in Kansas City, Missouri. He plans\ - \ on incorporating his interests into a career developing digital audio software.\ - \ Eric has been studying general percussion performance since 2003 and specializes\ - \ in jazz drums.\n\nConcert Venue and Time: Necto, Tuesday May 22, 9:00pm" + ID: nime2012-music-Morales-Manzanares2012 + abstract: "Program notes:\n\nDesamor I is inspired in a model of meditation where\ + \ primordial awareness or naturally occurring timeless awareness is seen as a\ + \ result of a conversation with my wife Alejandra. This work is for piano, computer\ + \ and two Wii controllers attached to my forearms. The output is 4 channels. The\ + \ gestures of the pianist (movement, timber and dynamics) are captured in real\ + \ time via 2 microphones and a set of 2 Wii controllers. The computer languages\ + \ involved in the development of the project were: Escamol, a prolog environment\ + \ for algorithmic composition designed by the composer, and SuperCollider. In\ + \ this piece I share my experience as a performer-composer within a multi-platform\ + \ programming environments involving signal processing and machine learning techniques.\n\ + \nComposer(s) Credits:\n\nRoberto Morales-Manzanares\n\nInstrumentalist(s) Credits:\n\ + \nRoberto Morales-Manzanares (piano, percussion and electronics)\n\nArtist(s)\ + \ Biography:\n\nRoberto Morales-Manzanares: Born in Mexico City, Roberto Morales-Manzanares\ + \ started his musical training in national folkloric music and learned how to\ + \ play harps and different kinds of guitars and flutes from several regions of\ + \ the country. His doctorate in music composition was completed at UC Berkeley\ + \ in 2006. As a composer, he has written music for theater, dance, movies, TV\ + \ and radio. As an interpreter Morales-Manzanares has participated on his own\ + \ and with other composers in forums of jazz, popular and new music, including\ + \ tours to Europe United States and Latin-America.\nAs a researcher, he has been\ + \ invited to different national and international conferences such as ICMC, International\ + \ Join Conference on Artificial Intelligence IJCAI and Symposium on Arts and Technology\ + \ and has several publications. Currently he is member of the ``Sistema Nacional\ + \ de Creadores''. His music can be found in ICMC recordings, Victo label www.victo.qc.ca\ + \ (Leyendas in collaboration with Mari Kimura) and Irradia/Pocoscocodrilos.\n\n\ + Concert Venue and Time: Lydia Mendelssohn Theatre, Tuesday May 22, 7:00pm" address: 'Ann Arbor, Michigan, U.S.A.' - author: Jenn Figg and Matthew McCormack and Paul Cox - bibtex: "@incollection{nime2012-music-FiggMcCormackCox2012,\n abstract = {Program\ - \ notes:\n\nThis work merges sound and light to illuminate complex rhythmic motives,\ - \ polyrhythms and metrical patterns in a visual display generated by three drummers\ - \ playing six ``light'' drums. These new instruments bring to life the dreams\ - \ of 20th century synesthetes, such as Wassily Kandinsky and Alexander Scriabin\ - \ and others who sought to create an imagined ``visual music,'' an ideal synthesis\ - \ of music and visual art.\n\nCommunity Light Beacons are percussion instruments\ - \ that leverage the potentials of music, analog technology, and human-generated\ - \ power to visualize sound. These instruments add the dimension of light to the\ - \ ancient tradition of drumming. The drums are user-powered, and when they are\ - \ played---banged, hit and tapped---the vibrations from the drumhead are converted\ - \ to electricity by the internal speaker transducer. The generated energy powers\ - \ ultra bright LEDs, which light up with every hit and beam out from the Fresnel\ - \ lens.\n\nComposer(s) Credits:\n\nJenn Figg, Matthew McCormack, Paul Cox\n\n\ - Instrumentalist(s) Credits:\n\nRyan Hilty, Samuel Haese, Eric Young (Kinetic Light\ - \ Drums)\n\nArtist(s) Biography:\n\nJenn Figg is an artist investigating the connections\ - \ between industry, science and art through the transformation of energy, performative\ - \ objects and constructed ecosystems. She graduated with a BFA in Textiles from\ - \ the Rhode Island School of Design and an MFA from the University of California\ - \ at Santa Barbara. She is pursuing her Ph.D. in Media, Art, and Text at Virginia\ - \ Commonwealth University. She lives in Baltimore and is an Assistant Professor\ - \ of Art at Towson University in Maryland. Exhibitions include The Print Center\ - \ in Philadelphia, Pennsylvania, The Art House at the Jones Center in Austin,\ - \ Texas, Virginia MOCA in Virginia Beach, Virginia, the Columbus Center of Science\ - \ and Industry in Columbus, Ohio, the Ingenuity Festival in Cleveland, Ohio. Other\ - \ awards and residencies include the MacDowell Colony, the Lower Manhattan Cultural\ - \ Council Residency, the Great Lakes College Association New Directions Initiative,\ - \ and the University of California Interdisciplinary Humanities Center, Visual,\ - \ Performing \\& Media Arts Award.\n\nMatthew McCormack explores energy transformation\ - \ and expression through technology, kinetic sculpture and blown glass. He graduated\ - \ with a BFA in Glass from The Ohio State University and is now living in Baltimore,\ - \ Maryland. He is pursuing an Interdisciplinary MFA at Towson University. His\ - \ research interests include modifying a speaker transducer for optimum energy\ - \ generation and developing a series of rapid prototyped Fresnel lens stamps for\ - \ quartz crystal light instruments. His work has been featured at the Virginia\ - \ Museum of Contemporary Art in Virginia Beach, Virginia, the Columbus Center\ - \ of Science and Industry in Columbus, Ohio, the Toledo Museum of Art in Toledo,\ - \ Ohio, the Rankin Art Gallery at Ferris State University in Big Rapids, Michigan,\ - \ the National Museum of Glass in Eskisehir, Turkey, the Franklin Park Conservatory\ - \ in Columbus, Ohio, the Ingenuity Festival in Cleveland, Ohio, and as part of\ - \ the Lower Manhattan Cultural Council's Governors Island Residency in New York\ - \ City.\n\nPaul Cox is a scholar, composer and percussionist in Cleveland, Ohio.\ - \ He currently teaches music history and percussion at Case Western Reserve University\ - \ (CWRU) and the Oberlin Conservatory of Music, where he is a Visiting Assistant\ - \ Professor. He earned a PhD in musicology from CWRU in 2011 after the completion\ - \ of his dissertation, \\emph{Collaged Codes: John Cage's Credo in Us, a study\ - \ of Cage and Merce Cunningham's first dance collaboration in 1942}. Current projects\ - \ include composing \\emph{Just.Are.Same} for string quartet, oboe and tape, which\ - \ weaves together an electronic soundscape of spoken words drawn from victims\ - \ of genocide with acoustic and electronic sounds; composing an evening-length\ - \ work for the ensemble NO EXIT, in collaboration with famed world percussionist\ - \ Jamie Haddad and guitarist Bobby Ferrazza; curating a Cage ``Musicircus'' for\ - \ the opening of the new Museum of Contemporary Art in Cleveland, and artistically\ - \ advising the Sitka Fest in Alaska, a three-month-long festival of arts and culture.\n\ - \nRyan Hilty is a percussionist earning a degree in Music Education from the Case\ - \ Western Reserve University School of Music in Cleveland, Ohio. He is currently\ - \ in his second undergraduate year, studying percussion with Matthew Larson. He\ - \ has performed as a percussionist in numerous ensembles, including the Crestwood\ - \ Wind Ensemble, Jazz Band, and the Cleveland Youth Wind Symphony. He is the recipient\ - \ of the 2010 John Phillip Sousa Award. After earning his degree in music education,\ - \ Ryan aspires to become a high school band director.\n\nSamuel Haese is a student\ - \ of music and physics at Case Western Reserve University (CWRU) in Cleveland,\ - \ OH. He has studied concert percussion with Matthew Bassett, Feza Zweifel, and\ - \ Matthew Larson, and currently collaborates with Paul Cox in exploring and performing\ - \ modern percussion music. In the meantime, Sam is receiving a BA in Music for\ - \ studying piano with Gerardo Teissonniere through the Cleveland Institute of\ - \ Music. Sam intends to also receive a degree in Engineering Physics from CWRU\ - \ which he hopes will allow him to explore and understand music technologies.\ - \ Originally from Berkeley, California, his current plans include moving to a\ - \ sunnier place than Cleveland after graduation within the next two years.\n\n\ - Eric Young is a student at Case Western Reserve University majoring in Computer\ - \ Science and Audio Engineering. He grew up in Kansas City, Missouri. He plans\ - \ on incorporating his interests into a career developing digital audio software.\ - \ Eric has been studying general percussion performance since 2003 and specializes\ - \ in jazz drums.\n\nConcert Venue and Time: Necto, Tuesday May 22, 9:00pm},\n\ - \ address = {Ann Arbor, Michigan, U.S.A.},\n author = {Jenn Figg and Matthew McCormack\ - \ and Paul Cox},\n booktitle = {Music Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n day = {21-23},\n editor = {Georg\ - \ Essl and Brent Gillespie and Michael Gurevich and Sile O'Modhrain},\n month\ - \ = {May},\n publisher = {Electrical Engineering \\& Computer Science and Performing\ - \ Arts Technology, University of Michigan},\n title = {Thunderclap For Six Kinetic\ - \ Light Drums},\n year = {2012}\n}\n" + author: Roberto Morales-Manzanares + bibtex: "@incollection{nime2012-music-Morales-Manzanares2012,\n abstract = {Program\ + \ notes:\n\n\\emph{Desamor I} is inspired in a model of meditation where primordial\ + \ awareness or naturally occurring timeless awareness is seen as a result of a\ + \ conversation with my wife Alejandra. This work is for piano, computer and two\ + \ Wii controllers attached to my forearms. The output is 4 channels. The gestures\ + \ of the pianist (movement, timber and dynamics) are captured in real time via\ + \ 2 microphones and a set of 2 Wii controllers. The computer languages involved\ + \ in the development of the project were: Escamol, a prolog environment for algorithmic\ + \ composition designed by the composer, and SuperCollider. In this piece I share\ + \ my experience as a performer-composer within a multi-platform programming environments\ + \ involving signal processing and machine learning techniques.\n\nComposer(s)\ + \ Credits:\n\nRoberto Morales-Manzanares\n\nInstrumentalist(s) Credits:\n\nRoberto\ + \ Morales-Manzanares (piano, percussion and electronics)\n\nArtist(s) Biography:\n\ + \nRoberto Morales-Manzanares: Born in Mexico City, \\textbf{Roberto Morales-Manzanares}\ + \ started his musical training in national folkloric music and learned how to\ + \ play harps and different kinds of guitars and flutes from several regions of\ + \ the country. His doctorate in music composition was completed at UC Berkeley\ + \ in 2006. As a composer, he has written music for theater, dance, movies, TV\ + \ and radio. As an interpreter Morales-Manzanares has participated on his own\ + \ and with other composers in forums of jazz, popular and new music, including\ + \ tours to Europe United States and Latin-America.\nAs a researcher, he has been\ + \ invited to different national and international conferences such as ICMC, International\ + \ Join Conference on Artificial Intelligence IJCAI and Symposium on Arts and Technology\ + \ and has several publications. Currently he is member of the ``Sistema Nacional\ + \ de Creadores''. His music can be found in ICMC recordings, Victo label www.victo.qc.ca\ + \ (Leyendas in collaboration with Mari Kimura) and Irradia/Pocoscocodrilos.\n\n\ + Concert Venue and Time: Lydia Mendelssohn Theatre, Tuesday May 22, 7:00pm},\n\ + \ address = {Ann Arbor, Michigan, U.S.A.},\n author = {Roberto Morales-Manzanares},\n\ + \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n day = {21-23},\n editor = {Georg Essl and Brent Gillespie\ + \ and Michael Gurevich and Sile O'Modhrain},\n month = {May},\n publisher = {Electrical\ + \ Engineering \\& Computer Science and Performing Arts Technology, University\ + \ of Michigan},\n title = {Desamor I},\n year = {2012}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression day: 21-23 @@ -10145,77 +9845,192 @@ month: May publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, University of Michigan' - title: Thunderclap For Six Kinetic Light Drums + title: Desamor I year: 2012 - ENTRYTYPE: incollection - ID: nime2012-music-Tahiroglu2012 - abstract: "Program notes:\n\nInHands, an audiovisual real-time improvisation for\ - \ mobile phones, explores alternative options for musical interactions with two\ - \ mobile instruments in live performances. In this improvisation piece, sound\ - \ output of each mobile phone instrument becomes a sound input for the other instrument;\ - \ to be processed further with an act of responding immediately and spontaneously.\ - \ Granular synthesis module captures audio in real-time and creates the grains\ - \ based on the texture of the sounds. Magnitude, roll and pitch values of the\ - \ acceleration are mapped to the control parameters. In the control layer of\ - \ Sub-synthesis module, the change in direction of a touch position is tracked\ - \ on the mobile surface and the distance of the same touch position to 4 certain\ - \ points on the touchscreen is used as a source for creating frequency values.\ - \ This mapping model generates 4 control parameters throughout 2 dimensional input\ - \ layers. Hannah Drayson created the abstract visual-layers of this piece.\n\n\ - Composer(s) Credits:\n\nInstrumentalist(s) Credits:\n\nKoray Tahiroğlu (mobile\ - \ phones)\n\nArtist(s) Biography:\n\nKoray Tahiroğlu is a musician, postdoctoral\ - \ researcher and lecturer in the Department of Media, Aalto University. He practices\ - \ art as a researcher focusing on embodied approaches to sonic interaction in\ - \ participative music experience, as well as a performer of live electronic music.\ - \ He conducted an artistic research with a focus on studying and practicing human\ - \ musical interaction. Tahiroğlu has completed the degree of Doctor of Arts with\ - \ the dissertation entitled \"Interactive Performance Systems: Experimenting with\ - \ Human Musical Interaction\" after its public examination in 2008. He developed\ - \ interactive performance systems and experimental musical instruments, which\ - \ were used in his live performances. Since 2004, he has been also teaching workshops\ - \ and courses introducing artistic strategies and methodologies for creating computational\ - \ art works. Tahiroğlu has performed experimental music in collaboration as well\ - \ as in solo performances in Europe and North America.\n\nConcert Venue and Time:\ - \ Necto, Tuesday May 22, 9:00pm" + ID: nime2012-music-Hsu2012 + abstract: "Program notes:\n\nFlue is a structured audio-visual improvisation for\ + \ three musicians, utilizing live acoustic and electronic sound and interactive\ + \ animations. A physics-based smoke simulation is influenced by the real-time\ + \ audio from the musicians' performance. The audio from the performance is analyzed;\ + \ high-level tempo, spectral and other features are extracted, and sent via Open\ + \ Sound Control to the animation environment. The smoke trails are also able to\ + \ coalesce into well- defined symbols and forms, all while moving in a natural-seeming\ + \ manner consistent with the underlying fluid simulation.\n\nComposer(s) Credits:\n\ + \nBill Hsu\n\nInstrumentalist(s) Credits:\n\nBill Hsu (electronics, interactive\ + \ animation), Matt Endahl (piano), Mike Khoury (violin)\n\nArtist(s) Biography:\n\ + \nBill Hsu is an Associate Professor of Computer Science at San Francisco State\ + \ University. He has performed in the US, Asia, and Europe, including NIME 2011\ + \ (Oslo), Festival art::archive:architectures (ZKM, Karlsruhe, 2011), SMC 2009\ + \ (Porto), Harvestworks Festival 2009 (New York), Fete Quaqua 2008 (London), MIX\ + \ Festival 2007 and 2009 (New York), NIME 2007 (New York), Stimme+ 2006 (ZKM,\ + \ Karlsruhe), and the First Hong Kong Improvised Performance Festival 2005. Website:\ + \ http://userwww.sfsu.edu/~whsu/art.html\n\nMatt Endahl (b. 1985) is an improvising\ + \ pianist based in Ann Arbor, MI. A student of Geri Allen and Ed Sarath at the\ + \ University of Michigan, Matt is an active performer and organizer, having performed\ + \ in a wide variety of settings, from Gershwin's \"Rhapsody in Blue\" to freeform\ + \ solo electronic sets. Matt has taught jazz piano at Hillsdale College since\ + \ 2008. http://www.myspace.com/mattendahl\n\nMike Khoury was born in Mt. Pleasant,\ + \ Michigan in 1969. As the son of visual artist Sari Khoury, he was exposed to\ + \ various forms of visual arts and creative musical forms. Khoury is Palestinian.\n\ + Khoury's collaborators often include Leyya Tawil (dance), Ben Hall (percussion),\ + \ Christopher Riggs (guitar), and Andrew Coltrane (sound manipulation). He has\ + \ performed and recorded with Faruq Z. Bey, Dennis Gonzalez, Luc Houtkamp, Maury\ + \ Coles, Jack Wright, Graveyards, John Butcher, Gino Robair, Gunda Gottschalk,\ + \ and Le Quan Ninh.\nKhoury runs the Entropy Stereo music label where he focuses\ + \ on issuing new and archival music by challenging artists. His studies include\ + \ those with John Lindberg, Gerald Cleaver, and composer/violinist David Litven.\ + \ Khoury is the author of a chapter on Egyptian-American composer Halim El-Dabh\ + \ in a forthcoming anthology on the Arab avant garde, published by Wesleyan University\ + \ Press. Website: http://www.myspace.com/michaelkhoury\n\nConcert Venue and Time:\ + \ Lydia Mendelssohn Theatre, Tuesday May 22, 7:00pm" address: 'Ann Arbor, Michigan, U.S.A.' - author: Koray Tahiroğlu - bibtex: "@incollection{nime2012-music-Tahiroglu2012,\n abstract = {Program notes:\n\ - \n\\emph{InHands}, an audiovisual real-time improvisation for mobile phones, explores\ - \ alternative options for musical interactions with two mobile instruments in\ - \ live performances. In this improvisation piece, sound output of each mobile\ - \ phone instrument becomes a sound input for the other instrument; to be processed\ - \ further with an act of responding immediately and spontaneously. Granular synthesis\ - \ module captures audio in real-time and creates the grains based on the texture\ - \ of the sounds. Magnitude, roll and pitch values of the acceleration are mapped\ - \ to the control parameters. In the control layer of Sub-synthesis module, the\ - \ change in direction of a touch position is tracked on the mobile surface and\ - \ the distance of the same touch position to 4 certain points on the touchscreen\ - \ is used as a source for creating frequency values. This mapping model generates\ - \ 4 control parameters throughout 2 dimensional input layers. Hannah Drayson created\ - \ the abstract visual-layers of this piece.\n\nComposer(s) Credits:\n\nInstrumentalist(s)\ - \ Credits:\n\nKoray Tahiro\\u{g}lu (mobile phones)\n\nArtist(s) Biography:\n\n\ - Koray Tahiro\\u{g}lu is a musician, postdoctoral researcher and lecturer in the\ - \ Department of Media, Aalto University. He practices art as a researcher focusing\ - \ on embodied approaches to sonic interaction in participative music experience,\ - \ as well as a performer of live electronic music. He conducted an artistic research\ - \ with a focus on studying and practicing human musical interaction. Tahiro\\\ - u{g}lu has completed the degree of Doctor of Arts with the dissertation entitled\ - \ \"Interactive Performance Systems: Experimenting with Human Musical Interaction\"\ - \ after its public examination in 2008. He developed interactive performance systems\ - \ and experimental musical instruments, which were used in his live performances.\ - \ Since 2004, he has been also teaching workshops and courses introducing artistic\ - \ strategies and methodologies for creating computational art works. Tahiro\\\ - u{g}lu has performed experimental music in collaboration as well as in solo performances\ - \ in Europe and North America.\n\nConcert Venue and Time: Necto, Tuesday May 22,\ - \ 9:00pm},\n address = {Ann Arbor, Michigan, U.S.A.},\n author = {Koray Tahiro\\\ - u{g}lu},\n booktitle = {Music Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n day = {21-23},\n editor = {Georg Essl\ - \ and Brent Gillespie and Michael Gurevich and Sile O'Modhrain},\n month = {May},\n\ - \ publisher = {Electrical Engineering \\& Computer Science and Performing Arts\ - \ Technology, University of Michigan},\n title = {InHands: Improvisation for Mobile\ - \ Phones},\n year = {2012}\n}\n" + author: Bill Hsu + bibtex: "@incollection{nime2012-music-Hsu2012,\n abstract = {Program notes:\n\n\\\ + emph{Flue} is a structured audio-visual improvisation for three musicians, utilizing\ + \ live acoustic and electronic sound and interactive animations. A physics-based\ + \ smoke simulation is influenced by the real-time audio from the musicians' performance.\ + \ The audio from the performance is analyzed; high-level tempo, spectral and other\ + \ features are extracted, and sent via Open Sound Control to the animation environment.\ + \ The smoke trails are also able to coalesce into well- defined symbols and forms,\ + \ all while moving in a natural-seeming manner consistent with the underlying\ + \ fluid simulation.\n\nComposer(s) Credits:\n\nBill Hsu\n\nInstrumentalist(s)\ + \ Credits:\n\nBill Hsu (electronics, interactive animation), Matt Endahl (piano),\ + \ Mike Khoury (violin)\n\nArtist(s) Biography:\n\nBill Hsu is an Associate Professor\ + \ of Computer Science at San Francisco State University. He has performed in the\ + \ US, Asia, and Europe, including NIME 2011 (Oslo), Festival art::archive:architectures\ + \ (ZKM, Karlsruhe, 2011), SMC 2009 (Porto), Harvestworks Festival 2009 (New York),\ + \ Fete Quaqua 2008 (London), MIX Festival 2007 and 2009 (New York), NIME 2007\ + \ (New York), Stimme+ 2006 (ZKM, Karlsruhe), and the First Hong Kong Improvised\ + \ Performance Festival 2005. Website: http://userwww.sfsu.edu/~whsu/art.html\n\ + \nMatt Endahl (b. 1985) is an improvising pianist based in Ann Arbor, MI. A student\ + \ of Geri Allen and Ed Sarath at the University of Michigan, Matt is an active\ + \ performer and organizer, having performed in a wide variety of settings, from\ + \ Gershwin's \"Rhapsody in Blue\" to freeform solo electronic sets. Matt has taught\ + \ jazz piano at Hillsdale College since 2008. http://www.myspace.com/mattendahl\n\ + \nMike Khoury was born in Mt. Pleasant, Michigan in 1969. As the son of visual\ + \ artist Sari Khoury, he was exposed to various forms of visual arts and creative\ + \ musical forms. Khoury is Palestinian.\nKhoury's collaborators often include\ + \ Leyya Tawil (dance), Ben Hall (percussion), Christopher Riggs (guitar), and\ + \ Andrew Coltrane (sound manipulation). He has performed and recorded with Faruq\ + \ Z. Bey, Dennis Gonzalez, Luc Houtkamp, Maury Coles, Jack Wright, Graveyards,\ + \ John Butcher, Gino Robair, Gunda Gottschalk, and Le Quan Ninh.\nKhoury runs\ + \ the Entropy Stereo music label where he focuses on issuing new and archival\ + \ music by challenging artists. His studies include those with John Lindberg,\ + \ Gerald Cleaver, and composer/violinist David Litven. Khoury is the author of\ + \ a chapter on Egyptian-American composer Halim El-Dabh in a forthcoming anthology\ + \ on the Arab avant garde, published by Wesleyan University Press. Website: http://www.myspace.com/michaelkhoury\n\ + \nConcert Venue and Time: Lydia Mendelssohn Theatre, Tuesday May 22, 7:00pm},\n\ + \ address = {Ann Arbor, Michigan, U.S.A.},\n author = {Bill Hsu},\n booktitle\ + \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n day = {21-23},\n editor = {Georg Essl and Brent Gillespie and\ + \ Michael Gurevich and Sile O'Modhrain},\n month = {May},\n publisher = {Electrical\ + \ Engineering \\& Computer Science and Performing Arts Technology, University\ + \ of Michigan},\n title = {Flue},\n year = {2012}\n}\n" + booktitle: Music Proceedings of the International Conference on New Interfaces for + Musical Expression + day: 21-23 + editor: Georg Essl and Brent Gillespie and Michael Gurevich and Sile O'Modhrain + month: May + publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, + University of Michigan' + title: Flue + year: 2012 + + +- ENTRYTYPE: incollection + ID: nime2012-music-GoloveMartensson2012 + abstract: "Program notes:\n\nThe most impressive uses of the theremin cello during\ + \ Theremin's time in New York are Leopold Stokowski's inclusion of one in the\ + \ Philadelphia Orchestra's low string section and Varese's composition of two\ + \ solo parts in Ecuatorial. Even more important, from my perspective, is the\ + \ fact that the instrument represents the first attempt to harness the human potential\ + \ to shape and manipulate electronic sound by means of the technical apparatus\ + \ of the modern player of bowed string instruments.\n\nRachmaninoff's Vocalise,\ + \ Op. 34 no. 14, for textless high voice, highlights the hauntingly vocal quality\ + \ of the theremin cello. Vocalise is the last of a set of 14 songs published in\ + \ 1912, less than a decade before Theremin's experiments with musical sounds began\ + \ to bear fruit.\n\nBrian Wilson and the Beach Boys, by virtue of their use of\ + \ Bob Whitsell's Electro-Theremin on several recordings, are irrevocably linked\ + \ to the history of the theremin.\n\nComposer(s) Credits:\n\nVocalise, Op.34 no.\ + \ 14 - Sergei Rachmaninoff\nMedley (Good Vibrations/God Only Knows) - Brian Wilson\n\ + \nInstrumentalist(s) Credits:\n\nJonathan Golove (Theremin cello), Magnus Martensson\ + \ (piano)\n\nArtist(s) Biography:\n\nJonathan Golove, Associate Professor of Music\ + \ at the University at Buffalo, has been featured as theremin cello soloist with\ + \ the Asko/Schoenberg Ensemble, London Sinfonietta, and International Contemporary\ + \ Ensemble; and as cello soloist with the Buffalo Philharmonic Orchestra, Slee\ + \ Sinfonietta, and New York Virtuoso Singers. He has recorded for the Albany,\ + \ Centaur, Albuzerque, and Nine Winds labels, and appeared at festivals including\ + \ the Holland Festival, Festival d'Automne, Lincoln Center Festival, June in Buffalo,\ + \ and the Festival del Centro Histórico (Mexico City). Golove gave the first performance\ + \ of Varese's Ecuatorial using Floyd Engel's recreation of the legendary early\ + \ 20th century instrument at the University at Buffalo in 2002. He is also active\ + \ as an electric cellist, particularly in the field of creative improvised music.\ + \ An accomplished composer, his works have been performed at venues including\ + \ the Kennedy Center, Venice Biennale, Festival of Aix-en-Provence, Lincoln Center\ + \ Chamber Music Society II, and the Kitchen.\n\nMagnus Martensson is Music Director\ + \ of The Scandinavian Chamber Orchestra of New York; between 1996 and 2007 he\ + \ was Visiting Assistant Professor at SUNY Buffalo and conductor of the Slee Sinfonietta.\ + \ In 1989, Martensson made his operatic debut in Malmö, Sweden, conducting a production\ + \ of Offenbach's Orpheus in the Underworld, and has subsequently conducted operas\ + \ by Mozart, Puccini, Golove, among others. He has conducted several world premiere\ + \ recordings, including orchestral music by Jeffrey Stadelman, Roger Reynolds,\ + \ and David Felder.\nIn the past few seasons Martensson has guest conducted with\ + \ the New York New Music Ensemble, the Trondheim Soloists, Musica Vitae, ICE,\ + \ and at the Monday Evening Concert Series (Los Angeles), The Manhattan School\ + \ of Music, and Teatro San Martin (Buenos Aires).\n\nConcert Venue and Time: Lydia\ + \ Mendelssohn Theatre, Tuesday May 22, 7:00pm" + address: 'Ann Arbor, Michigan, U.S.A.' + author: Jonathan Golove and Magnus Martensson + bibtex: "@incollection{nime2012-music-GoloveMartensson2012,\n abstract = {Program\ + \ notes:\n\nThe most impressive uses of the theremin cello during Theremin's time\ + \ in New York are Leopold Stokowski's inclusion of one in the Philadelphia Orchestra's\ + \ low string section and Varese's composition of two solo parts in Ecuatorial.\ + \ Even more important, from my perspective, is the fact that the instrument represents\ + \ the first attempt to harness the human potential to shape and manipulate electronic\ + \ sound by means of the technical apparatus of the modern player of bowed string\ + \ instruments.\n\nRachmaninoff's Vocalise, Op. 34 no. 14, for textless high voice,\ + \ highlights the hauntingly vocal quality of the theremin cello. Vocalise is the\ + \ last of a set of 14 songs published in 1912, less than a decade before Theremin's\ + \ experiments with musical sounds began to bear fruit.\n\nBrian Wilson and the\ + \ Beach Boys, by virtue of their use of Bob Whitsell's Electro-Theremin on several\ + \ recordings, are irrevocably linked to the history of the theremin.\n\nComposer(s)\ + \ Credits:\n\nVocalise, Op.34 no. 14 - \\emph{Sergei Rachmaninoff}\nMedley (Good\ + \ Vibrations/God Only Knows) - \\emph{Brian Wilson}\n\nInstrumentalist(s) Credits:\n\ + \nJonathan Golove (Theremin cello), Magnus Martensson (piano)\n\nArtist(s) Biography:\n\ + \nJonathan Golove, Associate Professor of Music at the University at Buffalo,\ + \ has been featured as theremin cello soloist with the Asko/Schoenberg Ensemble,\ + \ London Sinfonietta, and International Contemporary Ensemble; and as cello soloist\ + \ with the Buffalo Philharmonic Orchestra, Slee Sinfonietta, and New York Virtuoso\ + \ Singers. He has recorded for the Albany, Centaur, Albuzerque, and Nine Winds\ + \ labels, and appeared at festivals including the Holland Festival, Festival d'Automne,\ + \ Lincoln Center Festival, June in Buffalo, and the Festival del Centro Hist\\\ + '{o}rico (Mexico City). Golove gave the first performance of Varese's \\emph{Ecuatorial}\ + \ using Floyd Engel's recreation of the legendary early 20th century instrument\ + \ at the University at Buffalo in 2002. He is also active as an electric cellist,\ + \ particularly in the field of creative improvised music. An accomplished composer,\ + \ his works have been performed at venues including the Kennedy Center, Venice\ + \ Biennale, Festival of Aix-en-Provence, Lincoln Center Chamber Music Society\ + \ II, and the Kitchen.\n\nMagnus Martensson is Music Director of The Scandinavian\ + \ Chamber Orchestra of New York; between 1996 and 2007 he was Visiting Assistant\ + \ Professor at SUNY Buffalo and conductor of the Slee Sinfonietta. In 1989, Martensson\ + \ made his operatic debut in Malm\\''{o}, Sweden, conducting a production of Offenbach's\ + \ Orpheus in the Underworld, and has subsequently conducted operas by Mozart,\ + \ Puccini, Golove, among others. He has conducted several world premiere recordings,\ + \ including orchestral music by Jeffrey Stadelman, Roger Reynolds, and David Felder.\n\ + In the past few seasons Martensson has guest conducted with the New York New Music\ + \ Ensemble, the Trondheim Soloists, Musica Vitae, ICE, and at the Monday Evening\ + \ Concert Series (Los Angeles), The Manhattan School of Music, and Teatro San\ + \ Martin (Buenos Aires).\n\nConcert Venue and Time: Lydia Mendelssohn Theatre,\ + \ Tuesday May 22, 7:00pm},\n address = {Ann Arbor, Michigan, U.S.A.},\n author\ + \ = {Jonathan Golove and Magnus Martensson},\n booktitle = {Music Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ day = {21-23},\n editor = {Georg Essl and Brent Gillespie and Michael Gurevich\ + \ and Sile O'Modhrain},\n month = {May},\n publisher = {Electrical Engineering\ + \ \\& Computer Science and Performing Arts Technology, University of Michigan},\n\ + \ title = {Rachmaninoff-Wilson Medley},\n year = {2012}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression day: 21-23 @@ -10223,73 +10038,142 @@ month: May publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, University of Michigan' - title: 'InHands: Improvisation for Mobile Phones' + title: Rachmaninoff-Wilson Medley year: 2012 - ENTRYTYPE: incollection - ID: nime2012-music-Lorenzo2012 - abstract: "Program notes:\n\nWhen designing my new electronic instruments, I always\ - \ keep in mind the relationship between the instrument and performer as a tool\ - \ and its master. The instrument should be a channel by which the performer can\ - \ access the dimensions of sound in order to attempt to make music. The music\ - \ should then originate from the musicians intention and not the instrument itself.\ - \ Thus, I design my instruments as intuitive, transparent, and non-idiosyncratic\ - \ mappings between physical gesture and sound.\n\nThis new electronic instrument\ - \ remaps a Logitech Attack 3 Joystick to be able to control sound. Through the\ - \ joystick, the performer can control volume, rhythm, repetition, and pitch of\ - \ custom, preprogrammed sounds. Additionally, the joystick can be used to record\ - \ and playback short audio loops. The product of this design allows for agile\ - \ and intentional electronic musical gestures where rhythm, volume, and pitch\ - \ are clear and deliberate. I have been able to reach a wide range of musical\ - \ expressions and I am learning and discovering more as I practice MODIFIED ATTACK.\n\ - \nComposer(s) Credits:\n\nLevy Lorenzo\n\nInstrumentalist(s) Credits:\n\nLevy\ - \ Lorenzo\n\nArtist(s) Biography:\n\nLevy Lorenzo is an electronics engineer and\ - \ percussionist living in New York. Specializing in microcontroller-based, he\ - \ performs experimental, live-electronic & acoustic music using new, custom electronic\ - \ musical instruments and percussion. His work has been featured at STEIM in Amsterdam\ - \ (NL), the Darmstadt School for New Music (DE) and the International Ensemble\ - \ Moderne Academy (AU). Currently, Levy is a Live Sound Engineer for the International\ - \ Contemporary Ensemble and Issue Project Room (Brooklyn, NY). Levy holds B.S.\ - \ and M.Eng. degrees in Electrical & Computer Engineering from Cornell University\ - \ as well as an M.M. degree in Percussion Performance from Stony Brook University,\ - \ where he is currently a D.M.A. candidate. [www.levylorenzo.com]\n\nConcert Venue\ - \ and Time: Necto, Tuesday May 22, 9:00pm" + ID: nime2012-music-Alexander2012 + abstract: "Program notes:\n\nThe MiND Ensemble (Music in Neural Dimensions) is a\ + \ new-media performance group that utilizes custom interfaces to explore the mind-machine-music\ + \ connection. The traditional realization of the creative process ahs been as\ + \ follows: there is an artist, a thought process, and a fixed medium which actualizes\ + \ those thoughts. Neurofeedback radically shifts this paradigm. Now there is an\ + \ artist and a dynamic medium that actively interfaces with the thought processes\ + \ of the artist himself, drastically reshaping the way we understand the creative\ + \ process. The MiND Ensemble promotes a rich awareness in which the mind is the\ + \ creative medium. All projection and audio processing in this piece are driven\ + \ in real time, with data gathered from the Emotiv EPOC headset.\n\nComposer(s)\ + \ Credits:\n\nRobert Alexander, David Biedenbender, Anton Pugh, Suby Raman, Amanda\ + \ Sari Perez, Sam L. Richards\n\nInstrumentalist(s) Credits:\n\nJeremy Crosmer\ + \ (violoncello), Robert Alexander (MiND Synth / Emotiv), Anton Pugh (MiND Synth\ + \ / Emotiv)\n\nArtist(s) Biography:\n\nRobert Alexander is a Sonification Specialist\ + \ with the Solar Heliospheric Research group at the University of Michigan, where\ + \ he is pursuing a PhD in Design Science. He was awarded a JPFP Fellowship from\ + \ NASA, an Outstanding Achievement award from ICAD, and is an Artist in Residence\ + \ with the Imagine Science Film Festival. He has collaborated with artists such\ + \ as DJ Spooky, and performed on several international stages. He founded the\ + \ MiND Ensemble in 2010.\n\nDavid Biedenbender is currently a doctoral student\ + \ in music composition at the University of Michigan. His first musical collaborations\ + \ were in rock and jazz bands as an electric bassist and in jazz and wind bands\ + \ as a bass trombonist and euphonium player. His present interests include working\ + \ with everyone from classically trained musicians to improvisers, fixed electronics\ + \ to brain data.\n\nAnton Pugh is a Masters student in Electrical Engineering:\ + \ Systems (Signal Processing concentration) at the University of Michigan. Presently\ + \ he is working on expanding his knowledge of the Processing and iOS platforms,\ + \ especially as they apply to the MiND Ensemble. His primary hobby is designing\ + \ and building custom electronic instruments and new musical interfaces. He is\ + \ also an active musician and plays viola in the Campus Symphony Orchestra.\n\n\ + Suby Raman is a composer, conductor, polyglot and linguist. His major artistic\ + \ passion is drawn from language itself: the basic aural and mental components\ + \ of language, how it determines, separates and unites cultures, and its influence\ + \ (or lack thereof) on our perception and expression of reality. He has conducted\ + \ research in brain-computer interface technology, which assist people afflicted\ + \ by ALS and spinal cord injuries.\n\nAmanda Sari Perez is a researcher with the\ + \ Neural Engineering lab at the University of Michigan. She is currently working\ + \ with microelectrode arrays to record brain activity from implanted sites. In\ + \ 2009 she co- founded the Ann Arbor HackerSpace, a DIY community engaged in hands-on\ + \ learning. For the past 3 years she has created artistic installations for the\ + \ Burning Man festival, including a performance that deconstructs participants'\ + \ notions of the self. Amanda is with the MiND Ensemble to work toward lowering\ + \ the barrier for creative expression.\n\nSam L. Richards is a composer, artist,\ + \ and researcher with a penchant for interdisciplinary collaboration and an appetite\ + \ for creative engagement of unwieldy conceptual problems. As a composer he has\ + \ worked with media artists, filmmakers, animators, and choreographers, as well\ + \ as making music for the concert hall. Although formally trained as a musician,\ + \ he also produces video installations, visual and aural media, creative writing,\ + \ and regularly steps off the beaten path in order to engage new things in new\ + \ ways.\n\nJeremy Crosmer is a gifted young professional cellist and composer.\ + \ After achieving a double-major in music and mathematics from Hendrix College,\ + \ he went on to receive multiple graduate degrees from the University of Michigan\ + \ by the age of 23. As a cellist, Crosmer has performed across the country, soloing\ + \ with orchestras in Arkansas and attending music festivals from Music Academy\ + \ of the West to Tanglewood Music Center. An avid promoted of new music, Crosmer\ + \ has both commissioned and premiered dozens of works by composers at Michigan\ + \ and elsewhere. His performance dissertation at the University of Michigan is\ + \ a study of the music of Paul Hindemith and cello sonatas by French composers\ + \ during World War I.\n\nConcert Venue and Time: Lydia Mendelssohn Theatre, Tuesday\ + \ May 22, 7:00pm" address: 'Ann Arbor, Michigan, U.S.A.' - author: Levy Lorenzo - bibtex: "@incollection{nime2012-music-Lorenzo2012,\n abstract = {Program notes:\n\ - \nWhen designing my new electronic instruments, I always keep in mind the relationship\ - \ between the instrument and performer as a tool and its master. The instrument\ - \ should be a channel by which the performer can access the dimensions of sound\ - \ in order to attempt to make music. The music should then originate from the\ - \ musicians intention and not the instrument itself. Thus, I design my instruments\ - \ as intuitive, transparent, and non-idiosyncratic mappings between physical gesture\ - \ and sound.\n\nThis new electronic instrument remaps a Logitech Attack 3 Joystick\ - \ to be able to control sound. Through the joystick, the performer can control\ - \ volume, rhythm, repetition, and pitch of custom, preprogrammed sounds. Additionally,\ - \ the joystick can be used to record and playback short audio loops. The product\ - \ of this design allows for agile and intentional electronic musical gestures\ - \ where rhythm, volume, and pitch are clear and deliberate. I have been able to\ - \ reach a wide range of musical expressions and I am learning and discovering\ - \ more as I practice MODIFIED ATTACK.\n\nComposer(s) Credits:\n\nLevy Lorenzo\n\ - \nInstrumentalist(s) Credits:\n\nLevy Lorenzo\n\nArtist(s) Biography:\n\nLevy\ - \ Lorenzo is an electronics engineer and percussionist living in New York. Specializing\ - \ in microcontroller-based, he performs experimental, live-electronic \\& acoustic\ - \ music using new, custom electronic musical instruments and percussion. His work\ - \ has been featured at STEIM in Amsterdam (NL), the Darmstadt School for New Music\ - \ (DE) and the International Ensemble Moderne Academy (AU). Currently, Levy is\ - \ a Live Sound Engineer for the International Contemporary Ensemble and Issue\ - \ Project Room (Brooklyn, NY). Levy holds B.S. and M.Eng. degrees in Electrical\ - \ \\& Computer Engineering from Cornell University as well as an M.M. degree in\ - \ Percussion Performance from Stony Brook University, where he is currently a\ - \ D.M.A. candidate. [www.levylorenzo.com]\n\nConcert Venue and Time: Necto, Tuesday\ - \ May 22, 9:00pm},\n address = {Ann Arbor, Michigan, U.S.A.},\n author = {Levy\ - \ Lorenzo},\n booktitle = {Music Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n day = {21-23},\n editor = {Georg Essl\ - \ and Brent Gillespie and Michael Gurevich and Sile O'Modhrain},\n month = {May},\n\ - \ publisher = {Electrical Engineering \\& Computer Science and Performing Arts\ - \ Technology, University of Michigan},\n title = {Modified Attack},\n year = {2012}\n\ - }\n" + author: Robert Alexander and David Biedenbender and Anton Pugh and Suby Raman and + Amanda~Sari Perez and Sam~L. Richards + bibtex: "@incollection{nime2012-music-Alexander2012,\n abstract = {Program notes:\n\ + \nThe MiND Ensemble (Music in Neural Dimensions) is a new-media performance group\ + \ that utilizes custom interfaces to explore the mind-machine-music connection.\ + \ The traditional realization of the creative process ahs been as follows: there\ + \ is an artist, a thought process, and a fixed medium which actualizes those thoughts.\ + \ Neurofeedback radically shifts this paradigm. Now there is an artist and a dynamic\ + \ medium that actively interfaces with the thought processes of the artist himself,\ + \ drastically reshaping the way we understand the creative process. The MiND Ensemble\ + \ promotes a rich awareness in which the mind is the creative medium. All projection\ + \ and audio processing in this piece are driven in real time, with data gathered\ + \ from the Emotiv EPOC headset.\n\nComposer(s) Credits:\n\nRobert Alexander, David\ + \ Biedenbender, Anton Pugh, Suby Raman, Amanda Sari Perez, Sam L. Richards\n\ + \nInstrumentalist(s) Credits:\n\nJeremy Crosmer (violoncello), Robert Alexander\ + \ (MiND Synth / Emotiv), Anton Pugh (MiND Synth / Emotiv)\n\nArtist(s) Biography:\n\ + \nRobert Alexander is a Sonification Specialist with the Solar Heliospheric Research\ + \ group at the University of Michigan, where he is pursuing a PhD in Design Science.\ + \ He was awarded a JPFP Fellowship from NASA, an Outstanding Achievement award\ + \ from ICAD, and is an Artist in Residence with the Imagine Science Film Festival.\ + \ He has collaborated with artists such as DJ Spooky, and performed on several\ + \ international stages. He founded the MiND Ensemble in 2010.\n\nDavid Biedenbender\ + \ is currently a doctoral student in music composition at the University of Michigan.\ + \ His first musical collaborations were in rock and jazz bands as an electric\ + \ bassist and in jazz and wind bands as a bass trombonist and euphonium player.\ + \ His present interests include working with everyone from classically trained\ + \ musicians to improvisers, fixed electronics to brain data.\n\nAnton Pugh is\ + \ a Masters student in Electrical Engineering: Systems (Signal Processing concentration)\ + \ at the University of Michigan. Presently he is working on expanding his knowledge\ + \ of the Processing and iOS platforms, especially as they apply to the MiND Ensemble.\ + \ His primary hobby is designing and building custom electronic instruments and\ + \ new musical interfaces. He is also an active musician and plays viola in the\ + \ Campus Symphony Orchestra.\n\nSuby Raman is a composer, conductor, polyglot\ + \ and linguist. His major artistic passion is drawn from language itself: the\ + \ basic aural and mental components of language, how it determines, separates\ + \ and unites cultures, and its influence (or lack thereof) on our perception and\ + \ expression of reality. He has conducted research in brain-computer interface\ + \ technology, which assist people afflicted by ALS and spinal cord injuries.\n\ + \nAmanda Sari Perez is a researcher with the Neural Engineering lab at the University\ + \ of Michigan. She is currently working with microelectrode arrays to record brain\ + \ activity from implanted sites. In 2009 she co- founded the Ann Arbor HackerSpace,\ + \ a DIY community engaged in hands-on learning. For the past 3 years she has created\ + \ artistic installations for the Burning Man festival, including a performance\ + \ that deconstructs participants' notions of the self. Amanda is with the MiND\ + \ Ensemble to work toward lowering the barrier for creative expression.\n\nSam\ + \ L. Richards is a composer, artist, and researcher with a penchant for interdisciplinary\ + \ collaboration and an appetite for creative engagement of unwieldy conceptual\ + \ problems. As a composer he has worked with media artists, filmmakers, animators,\ + \ and choreographers, as well as making music for the concert hall. Although formally\ + \ trained as a musician, he also produces video installations, visual and aural\ + \ media, creative writing, and regularly steps off the beaten path in order to\ + \ engage new things in new ways.\n\nJeremy Crosmer is a gifted young professional\ + \ cellist and composer. After achieving a double-major in music and mathematics\ + \ from Hendrix College, he went on to receive multiple graduate degrees from the\ + \ University of Michigan by the age of 23. As a cellist, Crosmer has performed\ + \ across the country, soloing with orchestras in Arkansas and attending music\ + \ festivals from Music Academy of the West to Tanglewood Music Center. An avid\ + \ promoted of new music, Crosmer has both commissioned and premiered dozens of\ + \ works by composers at Michigan and elsewhere. His performance dissertation at\ + \ the University of Michigan is a study of the music of Paul Hindemith and cello\ + \ sonatas by French composers during World War I.\n\nConcert Venue and Time: Lydia\ + \ Mendelssohn Theatre, Tuesday May 22, 7:00pm},\n address = {Ann Arbor, Michigan,\ + \ U.S.A.},\n author = {Robert Alexander and David Biedenbender and Anton Pugh\ + \ and Suby Raman and Amanda~Sari Perez and Sam~L. Richards},\n booktitle = {Music\ + \ Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ day = {21-23},\n editor = {Georg Essl and Brent Gillespie and Michael Gurevich\ + \ and Sile O'Modhrain},\n month = {May},\n publisher = {Electrical Engineering\ + \ \\& Computer Science and Performing Arts Technology, University of Michigan},\n\ + \ title = {Thought.Projection},\n year = {2012}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression day: 21-23 @@ -10297,52 +10181,111 @@ month: May publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, University of Michigan' - title: Modified Attack + title: Thought.Projection year: 2012 - ENTRYTYPE: incollection - ID: nime2012-music-Donnarumma2012 - abstract: "Program notes:\n\nComposer(s) Credits:\n\nMarco Donnarumma\n\nInstrumentalist(s)\ - \ Credits:\n\nMarco Donnarumma (Xth Sense)\n\nArtist(s) Biography:\n\nMarco Donnarumma:\ - \ New media and sonic artist, performer and teacher, Marco Donnarumma was born\ - \ in Italy and is based in Edinburgh, UK. Weaving a thread around biomedia research,\ - \ musical and theatrical performance, participatory practices and subversive coding,\ - \ Marco looks at the collision of critical creativity with humanized technologies.\ - \ He has performed and spoken in 28 countries worldwide at leading art events,\ - \ specialized festivals and academic conferences. Has been artist in residence\ - \ at Inspace (UK) and the National School of Theatre and Contemporary Dance (DK).\ - \ His work has been funded by the European Commission, Creative Scotland and the\ - \ Danish Arts Council. In February 2012 Marco was awarded the first prize in\ - \ the Margaret Guthman Musical Instrument Competition (Georgia Tech Center for\ - \ Music Technology, US) for the Xth Sense, a novel, biophysical interactive system\ - \ named the ``world's most innovative new musical instrument''.\n\nConcert Venue\ - \ and Time: Necto, Tuesday May 22, 9:00pm" + ID: nime2012-music-KimuraKato2012 + abstract: "Program notes:\n\nEigenspace (2011) is a collaborative project with Japan's\ + \ leading visual artist in new media, Tomoyuki Kato (Movie Director), with Yoshito\ + \ Onishi (Image Programing), and Chisako Hasegawa (Producer). As Japanese, we\ + \ were deeply touched by the Fukushima nuclear meltdown, the worst manmade catastrophe\ + \ in the history of the human kind, which is not contained today contaminating\ + \ the globe. Eigenspace is about our love and prayer for the humankind and our\ + \ planet, and for the next generation. The name is also taken from ``eigenvalue,''\ + \ a mathematical function used in analyzing the bowing movement, which interacts\ + \ in real time with Mr. Kato's software. The musical expression is extracted\ + \ by IRCAM's ``Augmented Violin'' and their newest motion sensor ``mini-MO'',\ + \ custom-fit into a glove designed by Mark Salinas. Special thanks to the Real\ + \ Time Musical Interactive Team at IRCAM. Eigenspace was commissioned by Harvestworks,\ + \ and premiered at Roulette in Brooklyn, on October 9th, 2011.\n\nComposer(s)\ + \ Credits:\n\nTomoyuki Kato (Movie Director), with Yoshito Onishi (Image Programing),\ + \ and Chisako Hasegawa (Producer)\n\nInstrumentalist(s) Credits:\n\nMari Kimura\ + \ (violin), Tomoyuki Kato (Interactive graphics)\n\nArtist(s) Biography:\n\nMari\ + \ Kimura: Violinist/composer Mari Kimura is widely admired as the inventor of\ + \ ``Subharmonics'' and her works for interactive computer music. As a composer,\ + \ Mari's commissions include the International Computer Music Association, Harvestworks,\ + \ Music from Japan, and received grants including NYFA, Arts International, Meet\ + \ The Composer, Japan Foundation, Argosy Foundation, and NYSCA. In 2010 Mari won\ + \ the Guggenheim Fellowship in Composition, and invited as Composer-in-Residence\ + \ at IRCAM in Paris. In October 2011, the Cassatt String Quartet premiered Mari's\ + \ ``I-Quadrifoglo'', her string quartet with interactive computer at the Symphony\ + \ Space in NYC, commissioned through Fromm Commission Award. Feature articles\ + \ in the past year include: the New York Times (May 13th, written by Matthew Gurewitsch),\ + \ and Scientific American (May 31st, written by Larry Greenemeier). Mari's CD,\ + \ The World Below G and Beyond, features her Subharmonics works and interactive\ + \ computer music. Mari teaches a course in Interactive Computer Performance at\ + \ Juilliard. http://www.marikimura.com\n\n\nTomoyuki Kato is a renowned Japanese\ + \ visual artist/movie director who works in a wide range of high-tech projects\ + \ including advertisements, commercials, museums exhibitions and theme-parks.\ + \ Kato's work is known for the superb quality, high impact, originality and new\ + \ technical methods. Recently, Kato has been active in creating corporate future\ + \ vision, such as ``concept car,'' incorporating live action, computer graphics\ + \ and animation on project bases; his recent exhibition includes 2010 Shanghai\ + \ Expo. His highly acclaimed ``Grand Odyssey,'' created for 2005 Aichi Expo's\ + \ Toshiba/Mitsui pavilion, is now displayed at Nagasaki's Huistenbosch theme-park.\ + \ In 2010, Kato created ``Better Life from Japan,'' an exhibit for Otsuka Pharmaceutical\ + \ company at Shanghai Expo, using a 360-degree display. Kato has received and\ + \ nominated for numerous awards at international and national festivals, including\ + \ Japan Ministry of Culture Media Arts Festival, Los Angels International Short\ + \ Film Festival, Montreal International Film Festival and London International\ + \ Advertising Festival.\n\nConcert Venue and Time: Lydia Mendelssohn Theatre,\ + \ Tuesday May 22, 7:00pm" address: 'Ann Arbor, Michigan, U.S.A.' - author: Marco Donnarumma - bibtex: "@incollection{nime2012-music-Donnarumma2012,\n abstract = {Program notes:\n\ - \nComposer(s) Credits:\n\nMarco Donnarumma\n\nInstrumentalist(s) Credits:\n\n\ - Marco Donnarumma (Xth Sense)\n\nArtist(s) Biography:\n\nMarco Donnarumma: New\ - \ media and sonic artist, performer and teacher, \\textbf{Marco Donnarumma} was\ - \ born in Italy and is based in Edinburgh, UK. Weaving a thread around biomedia\ - \ research, musical and theatrical performance, participatory practices and subversive\ - \ coding, Marco looks at the collision of critical creativity with humanized technologies.\ - \ He has performed and spoken in 28 countries worldwide at leading art events,\ - \ specialized festivals and academic conferences. Has been artist in residence\ - \ at Inspace (UK) and the National School of Theatre and Contemporary Dance (DK).\ - \ His work has been funded by the European Commission, Creative Scotland and the\ - \ Danish Arts Council. In February 2012 Marco was awarded the first prize in\ - \ the Margaret Guthman Musical Instrument Competition (Georgia Tech Center for\ - \ Music Technology, US) for the Xth Sense, a novel, biophysical interactive system\ - \ named the ``world's most innovative new musical instrument''.\n\nConcert Venue\ - \ and Time: Necto, Tuesday May 22, 9:00pm},\n address = {Ann Arbor, Michigan,\ - \ U.S.A.},\n author = {Marco Donnarumma},\n booktitle = {Music Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n day\ - \ = {21-23},\n editor = {Georg Essl and Brent Gillespie and Michael Gurevich and\ - \ Sile O'Modhrain},\n month = {May},\n publisher = {Electrical Engineering \\\ - & Computer Science and Performing Arts Technology, University of Michigan},\n\ - \ title = {Music for Flesh II, interactive music for enhanced body},\n year =\ - \ {2012}\n}\n" + author: Mari Kimura and Tomoyuki Kato + bibtex: "@incollection{nime2012-music-KimuraKato2012,\n abstract = {Program notes:\n\ + \n\\emph{Eigenspace} (2011) is a collaborative project with Japan's leading visual\ + \ artist in new media, Tomoyuki Kato (Movie Director), with Yoshito Onishi (Image\ + \ Programing), and Chisako Hasegawa (Producer). As Japanese, we were deeply touched\ + \ by the Fukushima nuclear meltdown, the worst manmade catastrophe in the history\ + \ of the human kind, which is not contained today contaminating the globe. Eigenspace\ + \ is about our love and prayer for the humankind and our planet, and for the next\ + \ generation. The name is also taken from ``eigenvalue,'' a mathematical function\ + \ used in analyzing the bowing movement, which interacts in real time with Mr.\ + \ Kato's software. The musical expression is extracted by IRCAM's ``Augmented\ + \ Violin'' and their newest motion sensor ``mini-MO'', custom-fit into a glove\ + \ designed by Mark Salinas. Special thanks to the Real Time Musical Interactive\ + \ Team at IRCAM. Eigenspace was commissioned by Harvestworks, and premiered at\ + \ Roulette in Brooklyn, on October 9th, 2011.\n\nComposer(s) Credits:\n\nTomoyuki\ + \ Kato (Movie Director), with Yoshito Onishi (Image Programing), and Chisako Hasegawa\ + \ (Producer)\n\nInstrumentalist(s) Credits:\n\nMari Kimura (violin), Tomoyuki\ + \ Kato (Interactive graphics)\n\nArtist(s) Biography:\n\nMari Kimura: Violinist/composer\ + \ \\textbf{Mari Kimura} is widely admired as the inventor of ``Subharmonics''\ + \ and her works for interactive computer music. As a composer, Mari's commissions\ + \ include the International Computer Music Association, Harvestworks, Music from\ + \ Japan, and received grants including NYFA, Arts International, Meet The Composer,\ + \ Japan Foundation, Argosy Foundation, and NYSCA. In 2010 Mari won the Guggenheim\ + \ Fellowship in Composition, and invited as Composer-in-Residence at IRCAM in\ + \ Paris. In October 2011, the Cassatt String Quartet premiered Mari's \\emph{``I-Quadrifoglo''},\ + \ her string quartet with interactive computer at the Symphony Space in NYC, commissioned\ + \ through Fromm Commission Award. Feature articles in the past year include:\ + \ the New York Times (May 13th, written by Matthew Gurewitsch), and Scientific\ + \ American (May 31st, written by Larry Greenemeier). Mari's CD, \\emph{The World\ + \ Below G and Beyond}, features her Subharmonics works and interactive computer\ + \ music. Mari teaches a course in Interactive Computer Performance at Juilliard.\ + \ http://www.marikimura.com\n\n\nTomoyuki Kato is a renowned Japanese visual\ + \ artist/movie director who works in a wide range of high-tech projects including\ + \ advertisements, commercials, museums exhibitions and theme-parks. Kato's work\ + \ is known for the superb quality, high impact, originality and new technical\ + \ methods. Recently, Kato has been active in creating corporate future vision,\ + \ such as ``concept car,'' incorporating live action, computer graphics and animation\ + \ on project bases; his recent exhibition includes 2010 Shanghai Expo. His highly\ + \ acclaimed ``Grand Odyssey,'' created for 2005 Aichi Expo's Toshiba/Mitsui pavilion,\ + \ is now displayed at Nagasaki's Huistenbosch theme-park. In 2010, Kato created\ + \ ``Better Life from Japan,'' an exhibit for Otsuka Pharmaceutical company at\ + \ Shanghai Expo, using a 360-degree display. Kato has received and nominated\ + \ for numerous awards at international and national festivals, including Japan\ + \ Ministry of Culture Media Arts Festival, Los Angels International Short Film\ + \ Festival, Montreal International Film Festival and London International Advertising\ + \ Festival.\n\nConcert Venue and Time: Lydia Mendelssohn Theatre, Tuesday May\ + \ 22, 7:00pm},\n address = {Ann Arbor, Michigan, U.S.A.},\n author = {Mari Kimura\ + \ and Tomoyuki Kato},\n booktitle = {Music Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n day = {21-23},\n editor = {Georg\ + \ Essl and Brent Gillespie and Michael Gurevich and Sile O'Modhrain},\n month\ + \ = {May},\n publisher = {Electrical Engineering \\& Computer Science and Performing\ + \ Arts Technology, University of Michigan},\n title = {Eigenspace},\n year = {2012}\n\ + }\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression day: 21-23 @@ -10350,57 +10293,160 @@ month: May publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, University of Michigan' - title: 'Music for Flesh II, interactive music for enhanced body' + title: Eigenspace year: 2012 - ENTRYTYPE: incollection - ID: nime2012-music-Stine2012 - abstract: "Program notes:\n\nThis piece consists of a partially pre-composed acousmatic\ - \ composition actualized in real time by hand motion. The audio generated by the\ - \ hand motions is analyzed, colorized and projected beside the performer during\ - \ the performance. The motions and content of this piece are inspired by the late\ - \ Merce Cunningham and this performance is dedicated to him.\n\nComposer(s) Credits:\n\ - \nEli Stine\n\nInstrumentalist(s) Credits:\n\nEli Stine\n\nArtist(s) Biography:\n\ - \nEli Stine (born 1991 in Greenville, NC) is a composer, programmer, and sound\ - \ designer currently pursuing a Double Degree at Oberlin College, studying Technology\ - \ In Music And Related Arts and composition in the conservatory and Computer Science\ - \ in the college. Winner of the undergraduate award from the Society for Electro-Acoustic\ - \ Music in the United States (SEAMUS) in 2011, Eli has studied with Tom Lopez,\ - \ Lewis Nielson, and Per Bloland at Oberlin, focusing on electroacoustic and acoustic\ - \ music, as well as live performance with electronics. While at Oberlin Eli has\ - \ performed with Oberlin's Contemporary Music Ensemble, had works played in concert\ - \ by Oberlin's Society of Composers, inc. ensemble and student ensemble ACADEMY,\ - \ and collaborated with students and faculty across disciplines on collaborative\ - \ multimedia projects. More information about Eli's work can be found at www.oberlin.edu/student/estine/.\n\ - \nConcert Venue and Time: Lydia Mendelssohn Theatre, Wednesday May 23, 7:00pm" + ID: nime2012-music-KimYeo2012 + abstract: "Program notes:\n\nWhere Are You Standing? (2012) is a collaborative mobile\ + \ music piece using the digital compass on mobile phones as an intuitive, interactive\ + \ musical instrument. The piece features performers on stage making sound by aiming\ + \ at other performers: compass-measured orientation of each aiming gesture is\ + \ mapped to a specific musical note depending on which player is aimed at, and\ + \ is visualized on screen in real-time.\n\nThe piece begins with three performers\ + \ playing ``harmonic'' sounds by taking aim at each other. This consonance is\ + \ broken by the introduction of the fourth performer who represents conflict:\ + \ the notes played by this performer as well as the notes played by others when\ + \ they aim at this performer are dissonant to cause musical tension. Finally,\ + \ the last performer leaves the stage to resolve the tension, and the piece ends\ + \ with three performers back in congruity.\n\nComposer(s) Credits:\n\nBongjun\ + \ Kim, Woon Seung Yeo\n\nInstrumentalist(s) Credits:\n\nBongjun Kim (operator),\ + \ Woon Seung Yeo, Jeong-seob Lee, Seunghun Kim, Xuelian Yu (iPhones)\n\nArtist(s)\ + \ Biography:\n\nBongjun Kim (b. 1981, Seoul, Korea) is a Masters student at Korea\ + \ Advanced Institute of Science and Technology (KAIST) and a member of the Audio\ + \ and Interactive Media (AIM) Lab at the Graduate School of Culture Technology\ + \ (GSCT), KAIST. Kim has received his B.S. and M.S. degrees in Industrial and\ + \ Information Systems Engineering from Ajou University, and he has also worked\ + \ at Doosan Infracore as an R&D researcher. He is also a composer, performer,\ + \ and system developer of the KAIST Mobile Phone Orchestra (KAMPO), where he has\ + \ designed interactive mobile music performance system and composed the piece\ + \ ``Where Are You Standing?'' which features digital compass-based interaction.\ + \ Currently his research interests are algorithmic composition, music informatics,\ + \ machine improvisation, and mobile media as a new musical interface.\n\nWoon\ + \ Seung Yeo is a bassist, media artist, and computer music researcher/educator.\ + \ He is Assistant Professor at Korea Advanced Institute of Science and Technology\ + \ (KAIST) and leads the Audio and Interactive Media (AIM) Lab and the KAIST Mobile\ + \ Phone Orchestra (KAMPO). Yeo has received degrees from Seoul National University\ + \ (B.S. and M.S. in Electrical Engineering), University of California at Santa\ + \ Barbara (M.S. in Media Arts and Technology), and Stanford University (M.A. and\ + \ Ph.D. in Music). His research interests include digital audio signal processing,\ + \ musical acoustics, audiovisual art, cross-modal display, physical interaction\ + \ for music, musical interfaces, mobile media for music, and innovative performance\ + \ paradigm as well. Yeo has also curated/produced mobile music concerts, telematics\ + \ music concerts, and multimedia installations and exhibitions.\n\nJeong-seob\ + \ Lee is a Ph.D. student at the Graduate School of Culture Technology (GSCT),\ + \ KAIST, Korea, and a research member of Audio & Interactive Media Lab. He received\ + \ his M.S. degree from the same institute, and his undergraduate degree in mechanical\ + \ engineering from Seoul National University. As an amateur dancer and choreographer,\ + \ he is interested in various performances involving dance. His experiences on\ + \ stage and in engineering lead him to conduct research in interactive performance\ + \ paradigm and multimedia interface technology. He has produced a number of new\ + \ media performances through collaborations with dancers and musicians, and worked\ + \ as an audiovisual interaction designer. He is also interested in acoustic motion\ + \ detection with off-the-shelf audio devices.\n\nSeunghun Kim is a Ph.D. candidate\ + \ at KAIST and is a member of Audio and Interactive Media (AIM) Lab in the Graduate\ + \ School of Culture Technology (GSCT). He has received the B.S degree in electrical\ + \ and communications engineering from Information and Communications University\ + \ (ICU). He wrote his Master thesis on sound synthesis of the geomungo (a traditional\ + \ Korean stringed instrument) at KAIST. He has presented several papers on musical\ + \ interfaces at domestic/international conferences including the international\ + \ conference on new interfaces for musical expression (NIME) and the international\ + \ computer music conference (ICMC). In addition, he has participated in the development\ + \ of interactive installations, which were exhibited at Incheon International\ + \ Digital Art Festival (INDAF), KT&G SangSang Madang, Gwangju Design Biennale,\ + \ and Seoul Digital Media Content International Festival. He is also a member\ + \ of the KAIST Mobile Phone Orchestra (KAMPO).\n\nXuelian Yu was born and raised\ + \ in China and earned her B.S. in engineering at Jiangnan University's Digital\ + \ Media Technology program. She joined the Audio and Interactive Media (AIM) Lab\ + \ at the Graduate School of Culture Technology (GSCT), KAIST in the Fall of 2010\ + \ to combine her problem-solving skills and creative abilities to set up worlds\ + \ that people become characters in the environments and interact with their surroundings.\ + \ Xuelian is currently in Pittsburgh to discover more experience in projects that\ + \ produce artifacts that are intended to entertain, inspire or affect the participants,\ + \ at Entertainment Technology Center of Carnegie Mellon University and she focuses\ + \ on the research on the comparison of description on surround sound at the same\ + \ time. The passion for learning and expanding her experiences has strengthened\ + \ her goal to work in interactive design.\n\nConcert Venue and Time: Lydia Mendelssohn\ + \ Theatre, Tuesday May 22, 7:00pm" address: 'Ann Arbor, Michigan, U.S.A.' - author: Eli Stine - bibtex: "@incollection{nime2012-music-Stine2012,\n abstract = {Program notes:\n\n\ - This piece consists of a partially pre-composed acousmatic composition actualized\ - \ in real time by hand motion. The audio generated by the hand motions is analyzed,\ - \ colorized and projected beside the performer during the performance. The motions\ - \ and content of this piece are inspired by the late Merce Cunningham and this\ - \ performance is dedicated to him.\n\nComposer(s) Credits:\n\nEli Stine\n\nInstrumentalist(s)\ - \ Credits:\n\nEli Stine\n\nArtist(s) Biography:\n\nEli Stine (born 1991 in Greenville,\ - \ NC) is a composer, programmer, and sound designer currently pursuing a Double\ - \ Degree at Oberlin College, studying Technology In Music And Related Arts and\ - \ composition in the conservatory and Computer Science in the college. Winner\ - \ of the undergraduate award from the Society for Electro-Acoustic Music in the\ - \ United States (SEAMUS) in 2011, Eli has studied with Tom Lopez, Lewis Nielson,\ - \ and Per Bloland at Oberlin, focusing on electroacoustic and acoustic music,\ - \ as well as live performance with electronics. While at Oberlin Eli has performed\ - \ with Oberlin's Contemporary Music Ensemble, had works played in concert by Oberlin's\ - \ Society of Composers, inc. ensemble and student ensemble ACADEMY, and collaborated\ - \ with students and faculty across disciplines on collaborative multimedia projects.\ - \ More information about Eli's work can be found at www.oberlin.edu/student/estine/.\n\ - \nConcert Venue and Time: Lydia Mendelssohn Theatre, Wednesday May 23, 7:00pm},\n\ - \ address = {Ann Arbor, Michigan, U.S.A.},\n author = {Eli Stine},\n booktitle\ - \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n day = {21-23},\n editor = {Georg Essl and Brent Gillespie and\ - \ Michael Gurevich and Sile O'Modhrain},\n month = {May},\n publisher = {Electrical\ - \ Engineering \\& Computer Science and Performing Arts Technology, University\ - \ of Michigan},\n title = {Motion-Influenced Composition},\n year = {2012}\n}\n" + author: Bongjun Kim and Woon Seung Yeo + bibtex: "@incollection{nime2012-music-KimYeo2012,\n abstract = {Program notes:\n\ + \n\\emph{Where Are You Standing?} (2012) is a collaborative mobile music piece\ + \ using the digital compass on mobile phones as an intuitive, interactive musical\ + \ instrument. The piece features performers on stage making sound by aiming at\ + \ other performers: compass-measured orientation of each aiming gesture is mapped\ + \ to a specific musical note depending on which player is aimed at, and is visualized\ + \ on screen in real-time.\n\nThe piece begins with three performers playing ``harmonic''\ + \ sounds by taking aim at each other. This consonance is broken by the introduction\ + \ of the fourth performer who represents conflict: the notes played by this performer\ + \ as well as the notes played by others when they aim at this performer are dissonant\ + \ to cause musical tension. Finally, the last performer leaves the stage to resolve\ + \ the tension, and the piece ends with three performers back in congruity.\n\n\ + Composer(s) Credits:\n\nBongjun Kim, Woon Seung Yeo\n\nInstrumentalist(s) Credits:\n\ + \nBongjun Kim (operator), Woon Seung Yeo, Jeong-seob Lee, Seunghun Kim, Xuelian\ + \ Yu (iPhones)\n\nArtist(s) Biography:\n\nBongjun Kim (b. 1981, Seoul, Korea)\ + \ is a Masters student at Korea Advanced Institute of Science and Technology (KAIST)\ + \ and a member of the Audio and Interactive Media (AIM) Lab at the Graduate School\ + \ of Culture Technology (GSCT), KAIST. Kim has received his B.S. and M.S. degrees\ + \ in Industrial and Information Systems Engineering from Ajou University, and\ + \ he has also worked at Doosan Infracore as an R\\&D researcher. He is also a\ + \ composer, performer, and system developer of the KAIST Mobile Phone Orchestra\ + \ (KAMPO), where he has designed interactive mobile music performance system and\ + \ composed the piece ``Where Are You Standing?'' which features digital compass-based\ + \ interaction. Currently his research interests are algorithmic composition, music\ + \ informatics, machine improvisation, and mobile media as a new musical interface.\n\ + \nWoon Seung Yeo is a bassist, media artist, and computer music researcher/educator.\ + \ He is Assistant Professor at Korea Advanced Institute of Science and Technology\ + \ (KAIST) and leads the Audio and Interactive Media (AIM) Lab and the KAIST Mobile\ + \ Phone Orchestra (KAMPO). Yeo has received degrees from Seoul National University\ + \ (B.S. and M.S. in Electrical Engineering), University of California at Santa\ + \ Barbara (M.S. in Media Arts and Technology), and Stanford University (M.A. and\ + \ Ph.D. in Music). His research interests include digital audio signal processing,\ + \ musical acoustics, audiovisual art, cross-modal display, physical interaction\ + \ for music, musical interfaces, mobile media for music, and innovative performance\ + \ paradigm as well. Yeo has also curated/produced mobile music concerts, telematics\ + \ music concerts, and multimedia installations and exhibitions.\n\nJeong-seob\ + \ Lee is a Ph.D. student at the Graduate School of Culture Technology (GSCT),\ + \ KAIST, Korea, and a research member of Audio \\& Interactive Media Lab. He received\ + \ his M.S. degree from the same institute, and his undergraduate degree in mechanical\ + \ engineering from Seoul National University. As an amateur dancer and choreographer,\ + \ he is interested in various performances involving dance. His experiences on\ + \ stage and in engineering lead him to conduct research in interactive performance\ + \ paradigm and multimedia interface technology. He has produced a number of new\ + \ media performances through collaborations with dancers and musicians, and worked\ + \ as an audiovisual interaction designer. He is also interested in acoustic motion\ + \ detection with off-the-shelf audio devices.\n\nSeunghun Kim is a Ph.D. candidate\ + \ at KAIST and is a member of Audio and Interactive Media (AIM) Lab in the Graduate\ + \ School of Culture Technology (GSCT). He has received the B.S degree in electrical\ + \ and communications engineering from Information and Communications University\ + \ (ICU). He wrote his Master thesis on sound synthesis of the geomungo (a traditional\ + \ Korean stringed instrument) at KAIST. He has presented several papers on musical\ + \ interfaces at domestic/international conferences including the international\ + \ conference on new interfaces for musical expression (NIME) and the international\ + \ computer music conference (ICMC). In addition, he has participated in the development\ + \ of interactive installations, which were exhibited at Incheon International\ + \ Digital Art Festival (INDAF), KT\\&G SangSang Madang, Gwangju Design Biennale,\ + \ and Seoul Digital Media Content International Festival. He is also a member\ + \ of the KAIST Mobile Phone Orchestra (KAMPO).\n\nXuelian Yu was born and raised\ + \ in China and earned her B.S. in engineering at Jiangnan University's Digital\ + \ Media Technology program. She joined the Audio and Interactive Media (AIM) Lab\ + \ at the Graduate School of Culture Technology (GSCT), KAIST in the Fall of 2010\ + \ to combine her problem-solving skills and creative abilities to set up worlds\ + \ that people become characters in the environments and interact with their surroundings.\ + \ Xuelian is currently in Pittsburgh to discover more experience in projects that\ + \ produce artifacts that are intended to entertain, inspire or affect the participants,\ + \ at Entertainment Technology Center of Carnegie Mellon University and she focuses\ + \ on the research on the comparison of description on surround sound at the same\ + \ time. The passion for learning and expanding her experiences has strengthened\ + \ her goal to work in interactive design.\n\nConcert Venue and Time: Lydia Mendelssohn\ + \ Theatre, Tuesday May 22, 7:00pm},\n address = {Ann Arbor, Michigan, U.S.A.},\n\ + \ author = {Bongjun Kim and Woon Seung Yeo},\n booktitle = {Music Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ day = {21-23},\n editor = {Georg Essl and Brent Gillespie and Michael Gurevich\ + \ and Sile O'Modhrain},\n month = {May},\n publisher = {Electrical Engineering\ + \ \\& Computer Science and Performing Arts Technology, University of Michigan},\n\ + \ title = {Where Are You Standing?},\n year = {2012}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression day: 21-23 @@ -10408,69 +10454,77 @@ month: May publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, University of Michigan' - title: Motion-Influenced Composition + title: 'Where Are You Standing?' year: 2012 - ENTRYTYPE: incollection - ID: nime2012-music-Ciufo2012 - abstract: "Program notes:\n\nFragments is an improvisational performance piece that\ - \ utilizes physical treatments inside an acoustic piano, as well as digital treatments\ - \ provided by computer-based digital signal processing. In addition to using a\ - \ few simple physical controls (foot pedals and custom iPad interface) this piece\ - \ also uses the performed audio stream as a gestural control source. The preformed\ - \ audio stream is analyzed and important features are extracted. The current state\ - \ and trajectory of these audio features are used to influence the behavior of\ - \ the real-time signal processing environment. This creates a computer-mediated\ - \ performance system that combines the capabilities of computation and sound processing\ - \ with the tactile and expressive intimacy of the prepared acoustic piano. Fragments\ - \ invites the listener into a unique and complex sonic environment where expectation,\ - \ repetition, spontaneity, and discovery are intertwined.\n\nComposer(s) Credits:\n\ - \nThomas Ciufo\n\nInstrumentalist(s) Credits:\n\nThomas Ciufo\n\nArtist(s) Biography:\n\ - \nThomas Ciufo is a composer, improviser, sound artist, and researcher working\ - \ primarily in the areas of electroacoustic improvisational performance and hybrid\ - \ instrument / interactive systems design. He currently serves as Assistant Professor\ - \ of Recording Arts and Music Technology in the Department of Music at Towson\ - \ University. He has been active for many years in the areas of composition, performance,\ - \ interactive installation, video work, as well as music technology education.\ - \ Festival performances include the SPARK festival in Minneapolis, the Enaction\ - \ in Arts conference in Grenoble, the International Society for Improvised Music\ - \ conference, the NWEAMO festival, the Extensible Electric Guitar Festival, various\ - \ NIME conferences, and the ICMC / Ear to the Earth conference.\n\nConcert Venue\ - \ and Time: Lydia Mendelssohn Theatre, Wednesday May 23, 7:00pm" + ID: nime2012-music-Dahlstedt2012 + abstract: "Program notes:\n\nAn improvised performance on a custom built instrument,\ + \ using a simple pencil drawing as a gestural interface for controlling complex\ + \ analog synthesis. The interface works by using the resistive properties of carbon\ + \ to create a voltage potential field in the graphite/pencil markings on the paper\ + \ using custom movable electrodes, made from coins. Then, control voltages are\ + \ extracted from other points on the paper, controlling various aspects of the\ + \ synthesized sound. The design was inspired by my previous research in complex\ + \ mappings for advanced digital instruments, and provides a similarly dynamic\ + \ playing environment for analogue synthesis. The interface is very lo-tech, easy\ + \ to build, and should be possible to use with any analogue modular synthesizer.\ + \ Here, I use it with a Bugbrand modular, built by Tom Bugs in Bristol, UK. The\ + \ interface is presented in more detail in a paper presentation at the NIME conference.\n\ + \nComposer(s) Credits:\n\nInstrumentalist(s) Credits:\n\nPalle Dahlstedt (pencil\ + \ fields interface & modular synthesizer)\n\nArtist(s) Biography:\n\nPalle Dahlstedt\ + \ (b.1971), composer, improviser, pianist and researcher from Stockholm, since\ + \ 1994 living in Göteborg, Sweden. With composition degrees from the Academies\ + \ of Malmö and Göteborg and a PhD from Chalmers University of Technology in evolutionary\ + \ algorithms for composition, he is currently the main lecturer in electronic\ + \ music composition at the Academy of Music and Drama, University of Gothenburg,\ + \ and artistic director the Lindblad Studios. Also, he is associate professor\ + \ in computer-aided creativity at the Department of Applied IT, performing extensive\ + \ research in novel technology-based performance and improvisation techniques\ + \ for electronic and acoustic musicians, and in computer models of artistic creative\ + \ processes. His music has been performed on six continents and received several\ + \ awards, e.g., in 2001 he was awarded the prestigeous Gaudeamus Prize, as the\ + \ first ever for an electronic work. He is also performing on piano with and without\ + \ electronics, and in the electronic free impro duo pantoMorf.\n\nConcert Venue\ + \ and Time: Necto, Tuesday May 22, 9:00pm" address: 'Ann Arbor, Michigan, U.S.A.' - author: Thomas Ciufo - bibtex: "@incollection{nime2012-music-Ciufo2012,\n abstract = {Program notes:\n\n\ - \\emph{Fragments} is an improvisational performance piece that utilizes physical\ - \ treatments inside an acoustic piano, as well as digital treatments provided\ - \ by computer-based digital signal processing. In addition to using a few simple\ - \ physical controls (foot pedals and custom iPad interface) this piece also uses\ - \ the performed audio stream as a gestural control source. The preformed audio\ - \ stream is analyzed and important features are extracted. The current state and\ - \ trajectory of these audio features are used to influence the behavior of the\ - \ real-time signal processing environment. This creates a computer-mediated performance\ - \ system that combines the capabilities of computation and sound processing with\ - \ the tactile and expressive intimacy of the prepared acoustic piano. \\emph{Fragments}\ - \ invites the listener into a unique and complex sonic environment where expectation,\ - \ repetition, spontaneity, and discovery are intertwined.\n\nComposer(s) Credits:\n\ - \nThomas Ciufo\n\nInstrumentalist(s) Credits:\n\nThomas Ciufo\n\nArtist(s) Biography:\n\ - \nThomas Ciufo is a composer, improviser, sound artist, and researcher working\ - \ primarily in the areas of electroacoustic improvisational performance and hybrid\ - \ instrument / interactive systems design. He currently serves as Assistant Professor\ - \ of Recording Arts and Music Technology in the Department of Music at Towson\ - \ University. He has been active for many years in the areas of composition, performance,\ - \ interactive installation, video work, as well as music technology education.\ - \ Festival performances include the SPARK festival in Minneapolis, the Enaction\ - \ in Arts conference in Grenoble, the International Society for Improvised Music\ - \ conference, the NWEAMO festival, the Extensible Electric Guitar Festival, various\ - \ NIME conferences, and the ICMC / Ear to the Earth conference.\n\nConcert Venue\ - \ and Time: Lydia Mendelssohn Theatre, Wednesday May 23, 7:00pm},\n address =\ - \ {Ann Arbor, Michigan, U.S.A.},\n author = {Thomas Ciufo},\n booktitle = {Music\ - \ Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ day = {21-23},\n editor = {Georg Essl and Brent Gillespie and Michael Gurevich\ - \ and Sile O'Modhrain},\n month = {May},\n publisher = {Electrical Engineering\ - \ \\& Computer Science and Performing Arts Technology, University of Michigan},\n\ - \ title = {Fragments},\n year = {2012}\n}\n" + author: Palle Dahlstedt + bibtex: "@incollection{nime2012-music-Dahlstedt2012,\n abstract = {Program notes:\n\ + \nAn improvised performance on a custom built instrument, using a simple pencil\ + \ drawing as a gestural interface for controlling complex analog synthesis. The\ + \ interface works by using the resistive properties of carbon to create a voltage\ + \ potential field in the graphite/pencil markings on the paper using custom movable\ + \ electrodes, made from coins. Then, control voltages are extracted from other\ + \ points on the paper, controlling various aspects of the synthesized sound. The\ + \ design was inspired by my previous research in complex mappings for advanced\ + \ digital instruments, and provides a similarly dynamic playing environment for\ + \ analogue synthesis. The interface is very lo-tech, easy to build, and should\ + \ be possible to use with any analogue modular synthesizer. Here, I use it with\ + \ a Bugbrand modular, built by Tom Bugs in Bristol, UK. The interface is presented\ + \ in more detail in a paper presentation at the NIME conference.\n\nComposer(s)\ + \ Credits:\n\nInstrumentalist(s) Credits:\n\nPalle Dahlstedt (pencil fields interface\ + \ \\& modular synthesizer)\n\nArtist(s) Biography:\n\nPalle Dahlstedt (b.1971),\ + \ composer, improviser, pianist and researcher from Stockholm, since 1994 living\ + \ in G\\\"{o}teborg, Sweden. With composition degrees from the Academies of Malm\\\ + \"{o} and G\\\"{o}teborg and a PhD from Chalmers University of Technology in evolutionary\ + \ algorithms for composition, he is currently the main lecturer in electronic\ + \ music composition at the Academy of Music and Drama, University of Gothenburg,\ + \ and artistic director the Lindblad Studios. Also, he is associate professor\ + \ in computer-aided creativity at the Department of Applied IT, performing extensive\ + \ research in novel technology-based performance and improvisation techniques\ + \ for electronic and acoustic musicians, and in computer models of artistic creative\ + \ processes. His music has been performed on six continents and received several\ + \ awards, e.g., in 2001 he was awarded the prestigeous Gaudeamus Prize, as the\ + \ first ever for an electronic work. He is also performing on piano with and without\ + \ electronics, and in the electronic free impro duo pantoMorf.\n\nConcert Venue\ + \ and Time: Necto, Tuesday May 22, 9:00pm},\n address = {Ann Arbor, Michigan,\ + \ U.S.A.},\n author = {Palle Dahlstedt},\n booktitle = {Music Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n day =\ + \ {21-23},\n editor = {Georg Essl and Brent Gillespie and Michael Gurevich and\ + \ Sile O'Modhrain},\n month = {May},\n publisher = {Electrical Engineering \\\ + & Computer Science and Performing Arts Technology, University of Michigan},\n\ + \ title = {Pencil Fields},\n year = {2012}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression day: 21-23 @@ -10478,113 +10532,97 @@ month: May publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, University of Michigan' - title: Fragments + title: Pencil Fields year: 2012 - ENTRYTYPE: incollection - ID: nime2012-music-Novello2012 - abstract: "Program notes:\n\nIn this piece we explore the personality of the ``post-modern\ - \ man''. Exposed to aggressive stimulation and overwhelming data streams, he must\ - \ make important choices to follow a rational ``mind path'' while his time quickly\ - \ runs out. The performer, impersonating the post-modern man, wears an electro-encephalographic\ - \ headset that detects his mind activity. The analysis of its output reveals the\ - \ power of the performer's three thoughts which are connected to forward movement,\ - \ turn left, and turn right in the virtual maze projected on a screen.\n\nDespite\ - \ the distracting external forces, embodied by the sound and flickering visuals,\ - \ the performer must remain paradoxically calm to generate the correct states\ - \ of mind that let him navigate his way out of the maze. Every time the performer\ - \ crosses a red boundary in the maze, he gets closer to the exit, and a new stochastic\ - \ musical scene is triggered. The time and structure of the composition is thus\ - \ entirely determined by the choices and concentration of the performer.\n\nComposer(s)\ - \ Credits:\n\nAlberto Novello\n\nInstrumentalist(s) Credits:\n\nAlberto Novello\ - \ (music, EEG analysis, top visuals), Emmanuel Elias Flores (frontal visuals),\ - \ Honza Svasek (Butoh, EEG control), E. McKinney (photography)\n\nArtist(s) Biography:\n\ - \nAlberto Novello a.k.a. JesterN studied piano and double bass at the Conservatory\ - \ of Udine, graduated in Physics at the University of Trieste, he completed in\ - \ 2004 the master ``Art, Science and Technologies'' at the Institut National Polytechnique\ - \ of Grenoble, France, under the guidance of J.C. Risset, and C. Cadoz. He was\ - \ teacher of electronic music composition at the Conservatory of Cuneo, Italy.\ - \ From 2004 to 2009 he worked at the Philips Research, Eindhoven, Netherlands,\ - \ in the field of Music Perception and Music Information Retrieval with several\ - \ publications in international conferences and journals. In 2009 he received\ - \ a PhD degree at the Technische Universiteit Eindhoven. He attended the Mater\ - \ of Sonology under the guidance of Paul Berg, Joel Ryan, and Richard Barret.\ - \ Since 2004 he produced several electronic audio visual pieces assisting among\ - \ others Alvin Lucier, Trevor Wishart, and Butch Morris. His pieces can be found\ - \ on his website: http://dindisalvadi.free.fr/.\n\nHonza Svasek was born in 1954\ - \ in the Netherlands. After his studies he\nmoved to Copenhagen were he became\ - \ a graphic designer. Then he worked as computer professional and became a UNIX/Linux\ - \ expert. In present he is a visual artist and performer. Honza started his research\ - \ of Butoh 5 years ago. He studied with Butoh performers such as Itto Morita,\ - \ Atsushi Takenouchi, Ken May, Yumiko Yoshioka,Yuko Ota, Imre Thormann. Currently\ - \ he is studying with Rhizome Lee at the Himalaya Subbody Butoh School. http://Honz.nl\n\ - \nEmmanuel Elias Flores is a media designer and software artist based in the Netherlands.\ - \ He studied music and cinema in Mexico and Sonology at the Royal Conservatory\ - \ in The Hague (NL). His work is centered around the idea of exploring different\ - \ types of cinematic experiences and the enhancement of new narrative forms which\ - \ bridge technology, art and perception. His work has been presented on a wide\ - \ range of formats: from audiovisual pieces for electronic music, opera, dance\ - \ and live cinema sets, to the design of public installations and interactive\ - \ applications for mobile devices. In parallel to his creative activities he has\ - \ worked as a developer and IT/video consultant for different commercial and art\ - \ enterprises and as a programmer for portable devices. www.emmanuelflores.net\n\ - \nConcert Venue and Time: Lydia Mendelssohn Theatre, Wednesday May 23, 7:00pm" - address: 'Ann Arbor, Michigan, U.S.A.' - author: Alberto Novello - bibtex: "@incollection{nime2012-music-Novello2012,\n abstract = {Program notes:\n\ - \nIn this piece we explore the personality of the ``post-modern man''. Exposed\ - \ to aggressive stimulation and overwhelming data streams, he must make important\ - \ choices to follow a rational ``mind path'' while his time quickly runs out.\ - \ The performer, impersonating the post-modern man, wears an electro-encephalographic\ - \ headset that detects his mind activity. The analysis of its output reveals the\ - \ power of the performer's three thoughts which are connected to forward movement,\ - \ turn left, and turn right in the virtual maze projected on a screen.\n\nDespite\ - \ the distracting external forces, embodied by the sound and flickering visuals,\ - \ the performer must remain paradoxically calm to generate the correct states\ - \ of mind that let him navigate his way out of the maze. Every time the performer\ - \ crosses a red boundary in the maze, he gets closer to the exit, and a new stochastic\ - \ musical scene is triggered. The time and structure of the composition is thus\ - \ entirely determined by the choices and concentration of the performer.\n\nComposer(s)\ - \ Credits:\n\nAlberto Novello\n\nInstrumentalist(s) Credits:\n\nAlberto Novello\ - \ (music, EEG analysis, top visuals), Emmanuel Elias Flores (frontal visuals),\ - \ Honza Svasek (Butoh, EEG control), E. McKinney (photography)\n\nArtist(s) Biography:\n\ - \nAlberto Novello a.k.a. JesterN studied piano and double bass at the Conservatory\ - \ of Udine, graduated in Physics at the University of Trieste, he completed in\ - \ 2004 the master ``Art, Science and Technologies'' at the Institut National Polytechnique\ - \ of Grenoble, France, under the guidance of J.C. Risset, and C. Cadoz. He was\ - \ teacher of electronic music composition at the Conservatory of Cuneo, Italy.\ - \ From 2004 to 2009 he worked at the Philips Research, Eindhoven, Netherlands,\ - \ in the field of Music Perception and Music Information Retrieval with several\ - \ publications in international conferences and journals. In 2009 he received\ - \ a PhD degree at the Technische Universiteit Eindhoven. He attended the Mater\ - \ of Sonology under the guidance of Paul Berg, Joel Ryan, and Richard Barret.\ - \ Since 2004 he produced several electronic audio visual pieces assisting among\ - \ others Alvin Lucier, Trevor Wishart, and Butch Morris. His pieces can be found\ - \ on his website: http://dindisalvadi.free.fr/.\n\nHonza Svasek was born in 1954\ - \ in the Netherlands. After his studies he\nmoved to Copenhagen were he became\ - \ a graphic designer. Then he worked as computer professional and became a UNIX/Linux\ - \ expert. In present he is a visual artist and performer. Honza started his research\ - \ of Butoh 5 years ago. He studied with Butoh performers such as Itto Morita,\ - \ Atsushi Takenouchi, Ken May, Yumiko Yoshioka,Yuko Ota, Imre Thormann. Currently\ - \ he is studying with Rhizome Lee at the Himalaya Subbody Butoh School. http://Honz.nl\n\ - \nEmmanuel Elias Flores is a media designer and software artist based in the Netherlands.\ - \ He studied music and cinema in Mexico and Sonology at the Royal Conservatory\ - \ in The Hague (NL). His work is centered around the idea of exploring different\ - \ types of cinematic experiences and the enhancement of new narrative forms which\ - \ bridge technology, art and perception. His work has been presented on a wide\ - \ range of formats: from audiovisual pieces for electronic music, opera, dance\ - \ and live cinema sets, to the design of public installations and interactive\ - \ applications for mobile devices. In parallel to his creative activities he has\ - \ worked as a developer and IT/video consultant for different commercial and art\ - \ enterprises and as a programmer for portable devices. www.emmanuelflores.net\n\ - \nConcert Venue and Time: Lydia Mendelssohn Theatre, Wednesday May 23, 7:00pm},\n\ - \ address = {Ann Arbor, Michigan, U.S.A.},\n author = {Alberto Novello},\n booktitle\ - \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n day = {21-23},\n editor = {Georg Essl and Brent Gillespie and\ - \ Michael Gurevich and Sile O'Modhrain},\n month = {May},\n publisher = {Electrical\ + ID: nime2012-music-BrophyLabadie2012 + abstract: "Program notes:\n\nMany of the discourses around technological development\ + \ in music are deeply concerned with aspects of control; i.e. how does one exert\ + \ their control, or ``mastery'' over the technology they use. However, we propose\ + \ that technological systems with a certain amount of unpredictability and randomness\ + \ may also be useful, especially for improvisation. As an improvisation duo, our\ + \ method often involves designing electronic instruments whose behaviors are somewhat\ + \ unpredictable. As a result, our entire aesthetic is largely based on ``riding''\ + \ the boundary of control. Working in this way creates a situation where we are\ + \ often forced to react to, and work with, the unexpected.\n\nOur improvisation\ + \ features a number of handmade and hacked electronic instruments, all of which\ + \ have been designed to behave somewhat unpredictably.\n\nComposer(s) Credits:\n\ + \nInstrumentalist(s) Credits:\n\nDaniel Brophy (electronics), Colin Labadie (electronics)\n\ + \nArtist(s) Biography:\n\nDaniel Brophy is a composer, performer and improviser\ + \ of various musical styles and instrumentations ranging from orchestral and chamber\ + \ music to extreme metal, sound installations, experimental improvisation and\ + \ noise. He is a recipient of a SSHRC research grant, the 2012 KW Chamber Orchestra\ + \ composition prize, the University of Alberta's President's Award of Distinction,\ + \ and a Queen Elizabeth II Graduate Scholarship. Daniel currently resides in Edmonton,\ + \ Alberta where he is pursuing a Doctor of Music degree in composition under the\ + \ supervision of Dr. Scott Smallwood. He is member of the noise duo MUGBAIT and\ + \ is proud to have worked with a number of other wonderful musicians, dancers\ + \ and visual artists such as The Enterprise Quartet, junctQin, Digital Prowess,\ + \ TorQ, Gerry Morita, Werner Friesen and many others. Daniel is currently developing\ + \ interactive clothing for dancers, utilizing a combination of high and low technology.\n\ + \nColin Labadie is a composer and performer currently based in Edmonton, Alberta.\ + \ His musical output ranges from solo, chamber, choral, orchestral, and electroacoustic\ + \ compositions, to sound installations, multimedia collaboration, experimental\ + \ improvisation, and noise music. His work is shaped by a broad range of musical\ + \ influences, at times dealing exclusively with repetition, patterns, and subtle\ + \ variation, while at others exploring chaos and unpredictability.\nColin holds\ + \ a BMus from Wilfrid Laurier University, where he studied with Linda Catlin Smith\ + \ and Peter Hatch, and an MMus from the University of Alberta where he studied\ + \ with Howard Bashaw, Mark Hannesson, Scott Smallwood, and Andriy Talpash. Currently,\ + \ he is pursuing a Doctoral degree in Composition from the University of Alberta\ + \ under the supervision of Scott Smallwood. He is the recipient of SSHRC's Joseph-Armand\ + \ Bombardier Master's and Doctoral Scholarships, the University of Alberta Master's\ + \ and Doctoral Recruitment Scholarships, and the President's Doctoral Prize of\ + \ Distinction.\n\nConcert Venue and Time: Necto, Tuesday May 22, 9:00pm" + address: 'Ann Arbor, Michigan, U.S.A.' + author: Daniel Brophy and Colin Labadie + bibtex: "@incollection{nime2012-music-BrophyLabadie2012,\n abstract = {Program notes:\n\ + \nMany of the discourses around technological development in music are deeply\ + \ concerned with aspects of control; i.e. how does one exert their control, or\ + \ ``mastery'' over the technology they use. However, we propose that technological\ + \ systems with a certain amount of unpredictability and randomness may also be\ + \ useful, especially for improvisation. As an improvisation duo, our method often\ + \ involves designing electronic instruments whose behaviors are somewhat unpredictable.\ + \ As a result, our entire aesthetic is largely based on ``riding'' the boundary\ + \ of control. Working in this way creates a situation where we are often forced\ + \ to react to, and work with, the unexpected.\n\nOur improvisation features a\ + \ number of handmade and hacked electronic instruments, all of which have been\ + \ designed to behave somewhat unpredictably.\n\nComposer(s) Credits:\n\nInstrumentalist(s)\ + \ Credits:\n\nDaniel Brophy (electronics), Colin Labadie (electronics)\n\nArtist(s)\ + \ Biography:\n\nDaniel Brophy is a composer, performer and improviser of various\ + \ musical styles and instrumentations ranging from orchestral and chamber music\ + \ to extreme metal, sound installations, experimental improvisation and noise.\ + \ He is a recipient of a SSHRC research grant, the 2012 KW Chamber Orchestra composition\ + \ prize, the University of Alberta's President's Award of Distinction, and a Queen\ + \ Elizabeth II Graduate Scholarship. Daniel currently resides in Edmonton, Alberta\ + \ where he is pursuing a Doctor of Music degree in composition under the supervision\ + \ of Dr. Scott Smallwood. He is member of the noise duo MUGBAIT and is proud to\ + \ have worked with a number of other wonderful musicians, dancers and visual artists\ + \ such as The Enterprise Quartet, junctQin, Digital Prowess, TorQ, Gerry Morita,\ + \ Werner Friesen and many others. Daniel is currently developing interactive clothing\ + \ for dancers, utilizing a combination of high and low technology.\n\nColin Labadie\ + \ is a composer and performer currently based in Edmonton, Alberta. His musical\ + \ output ranges from solo, chamber, choral, orchestral, and electroacoustic compositions,\ + \ to sound installations, multimedia collaboration, experimental improvisation,\ + \ and noise music. His work is shaped by a broad range of musical influences,\ + \ at times dealing exclusively with repetition, patterns, and subtle variation,\ + \ while at others exploring chaos and unpredictability.\nColin holds a BMus from\ + \ Wilfrid Laurier University, where he studied with Linda Catlin Smith and Peter\ + \ Hatch, and an MMus from the University of Alberta where he studied with Howard\ + \ Bashaw, Mark Hannesson, Scott Smallwood, and Andriy Talpash. Currently, he is\ + \ pursuing a Doctoral degree in Composition from the University of Alberta under\ + \ the supervision of Scott Smallwood. He is the recipient of SSHRC's Joseph-Armand\ + \ Bombardier Master's and Doctoral Scholarships, the University of Alberta Master's\ + \ and Doctoral Recruitment Scholarships, and the President's Doctoral Prize of\ + \ Distinction.\n\nConcert Venue and Time: Necto, Tuesday May 22, 9:00pm},\n address\ + \ = {Ann Arbor, Michigan, U.S.A.},\n author = {Daniel Brophy and Colin Labadie},\n\ + \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n day = {21-23},\n editor = {Georg Essl and Brent Gillespie\ + \ and Michael Gurevich and Sile O'Modhrain},\n month = {May},\n publisher = {Electrical\ \ Engineering \\& Computer Science and Performing Arts Technology, University\ - \ of Michigan},\n title = {Fragmentation},\n year = {2012}\n}\n" + \ of Michigan},\n title = {Munich Eunuch},\n year = {2012}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression day: 21-23 @@ -10592,130 +10630,248 @@ month: May publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, University of Michigan' - title: Fragmentation + title: Munich Eunuch year: 2012 - ENTRYTYPE: incollection - ID: nime2012-music-TrailKellOdowichuk2012 - abstract: "Program notes:\n\nMå ne Havn (mounhoun) is an improvisational multi-media\ - \ performance system for extended vibraphone with accompanying custom LED sculptures\ - \ and projected visuals. The music draws specifically from NYC free jazz, the\ - \ funeral music of the Lobi people of northern Ghana, Dub, psych rock and minimalism.\ - \ Abstract interactive light sculptures actuated from the instrument's audio and\ - \ controller data will accompany the performance, creating a visually shifting\ - \ immersive space. The sculptures, named `Takete' and `Maluma', reference Gestalt\ - \ psychology and the known correlation between our perceptions of sound and light.\ - \ Mappings will reflect this phenomenon. The piece uses a pitched percussion tool\ - \ suite developed by the Music Intelligence & Sound Technology Collective at the\ - \ University of Victoria, including: Magic Eyes (3D gesture controller), Ghost\ - \ Hands (control data looper), MSTR DRMMR++ (rhythm template as control switches),\ - \ Fantom Faders (vibraphone bars as control faders) and Gyil Gourd (physical modeling\ - \ of the Lobi xylophone's gourd resonator).\n\nComposer(s) Credits:\n\nShawn Trail,\ - \ Thor Kell, Gabrielle Odowichuk (Artistic Director)\n\nInstrumentalist(s) Credits:\n\ - \nShawn Trail (xtended Vibraphone, Notomoton- robotic drum, suspended cymbal)\n\ - \nArtist(s) Biography:\n\nShawn Trail: Electro-acoustic percussionist, Shawn Trail,\ - \ designs and builds new performance technologies for acoustic pitched percussion\ - \ instruments integrating musical robotics, physical modeling synthesis, and HCI.\ - \ He was Control Interface and Robotics Technician for Pat Metheny's Orchestrion\ - \ World Tour (2010), Fulbright Scholar at Medialogy- Aalborg University, Copenhagen\ - \ researching DSP, synthesis, and HCI (2009), and composer-in-residence with League\ - \ of Electronic Musical Urban Robots (2008). In 2002 he conducted field research\ - \ in Ghana on the Gyil (traditional xylophone). He has a Master of Music in Studio\ - \ Composition from Purchase Conservatory of Music and a BA in percussion performance\ - \ and music technology. He is an Interdisciplinary PhD candidate in Computer Science,\ - \ Electrical Engineering, and Music with MISTIC at the University of Victoria.\ - \ Performing solo under the moniker TXTED, his multi- media performance works\ - \ singularly revolve around minimal, textural evolving polyrhythmic, melodic ostinati\ - \ propelled by a sense of urgency intrinsic to cultural music rituals informed\ - \ by specific traditions.\n\nThor Kell: As a composer, programmer, and DJ, Thor\ - \ Kell likes combining interesting things in unique ways. A recent graduate of\ - \ the University of Victoria's Music / Computer Science program, he will begin\ - \ his MA at McGill University in the fall, focusing on interactions between performer,\ - \ interface, and software. While at UVic, he received a Jamie Cassels Undergraduate\ - \ Research Award: his research involved prototyping and composing for a gestural\ - \ control mapping system for extending the marimba. His traditional compositions\ - \ are all clockwork riffs and hidden structures, based on mathematical constants\ - \ or time- stretched quotes from the English folk music canon: he has written\ - \ for everything from full orchestra to solo piano. He has programmed for The\ - \ Echo Nest and SoundCloud. In his secret life as a DJ and techno maven, he has\ - \ released chart-toppers on Kompakt, impossibly deep jams on Fade, and hour-long\ - \ remix / video symphonies on his own label, Tide Pool.\n\nGabrielle Odowichuk\ - \ is a graduate student in Electrical Engineering at the University of Victoria,\ - \ working in the MISTIC research lab. A specialist in DSP and MIR, her research\ - \ has focused on sound spatialization and gesture-based control of sound and music,\ - \ developing a variety of prototypes, including Fantom Faders and Magic Eyes,\ - \ the mallet tracking and gesture control applications used in this performance.\ - \ For Møane Havn (mounhoun), she draws on previous experience in art direction\ - \ and stage design to produce unique real-time gesture-controlled visualizations.\ - \ She designed, built, and developed the interactive LED sculptures, Takete and\ - \ Maluma, used in this piece, as well as the projections. Her work has been published\ - \ by ICMC, IEEE, and NIME.\n\nConcert Venue and Time: Lydia Mendelssohn Theatre,\ - \ Wednesday May 23, 7:00pm" + ID: nime2012-music-FiggMcCormackCox2012 + abstract: "Program notes:\n\nThis work merges sound and light to illuminate complex\ + \ rhythmic motives, polyrhythms and metrical patterns in a visual display generated\ + \ by three drummers playing six ``light'' drums. These new instruments bring to\ + \ life the dreams of 20th century synesthetes, such as Wassily Kandinsky and Alexander\ + \ Scriabin and others who sought to create an imagined ``visual music,'' an ideal\ + \ synthesis of music and visual art.\n\nCommunity Light Beacons are percussion\ + \ instruments that leverage the potentials of music, analog technology, and human-generated\ + \ power to visualize sound. These instruments add the dimension of light to the\ + \ ancient tradition of drumming. The drums are user-powered, and when they are\ + \ played---banged, hit and tapped---the vibrations from the drumhead are converted\ + \ to electricity by the internal speaker transducer. The generated energy powers\ + \ ultra bright LEDs, which light up with every hit and beam out from the Fresnel\ + \ lens.\n\nComposer(s) Credits:\n\nJenn Figg, Matthew McCormack, Paul Cox\n\n\ + Instrumentalist(s) Credits:\n\nRyan Hilty, Samuel Haese, Eric Young (Kinetic Light\ + \ Drums)\n\nArtist(s) Biography:\n\nJenn Figg is an artist investigating the connections\ + \ between industry, science and art through the transformation of energy, performative\ + \ objects and constructed ecosystems. She graduated with a BFA in Textiles from\ + \ the Rhode Island School of Design and an MFA from the University of California\ + \ at Santa Barbara. She is pursuing her Ph.D. in Media, Art, and Text at Virginia\ + \ Commonwealth University. She lives in Baltimore and is an Assistant Professor\ + \ of Art at Towson University in Maryland. Exhibitions include The Print Center\ + \ in Philadelphia, Pennsylvania, The Art House at the Jones Center in Austin,\ + \ Texas, Virginia MOCA in Virginia Beach, Virginia, the Columbus Center of Science\ + \ and Industry in Columbus, Ohio, the Ingenuity Festival in Cleveland, Ohio. Other\ + \ awards and residencies include the MacDowell Colony, the Lower Manhattan Cultural\ + \ Council Residency, the Great Lakes College Association New Directions Initiative,\ + \ and the University of California Interdisciplinary Humanities Center, Visual,\ + \ Performing & Media Arts Award.\n\nMatthew McCormack explores energy transformation\ + \ and expression through technology, kinetic sculpture and blown glass. He graduated\ + \ with a BFA in Glass from The Ohio State University and is now living in Baltimore,\ + \ Maryland. He is pursuing an Interdisciplinary MFA at Towson University. His\ + \ research interests include modifying a speaker transducer for optimum energy\ + \ generation and developing a series of rapid prototyped Fresnel lens stamps for\ + \ quartz crystal light instruments. His work has been featured at the Virginia\ + \ Museum of Contemporary Art in Virginia Beach, Virginia, the Columbus Center\ + \ of Science and Industry in Columbus, Ohio, the Toledo Museum of Art in Toledo,\ + \ Ohio, the Rankin Art Gallery at Ferris State University in Big Rapids, Michigan,\ + \ the National Museum of Glass in Eskisehir, Turkey, the Franklin Park Conservatory\ + \ in Columbus, Ohio, the Ingenuity Festival in Cleveland, Ohio, and as part of\ + \ the Lower Manhattan Cultural Council's Governors Island Residency in New York\ + \ City.\n\nPaul Cox is a scholar, composer and percussionist in Cleveland, Ohio.\ + \ He currently teaches music history and percussion at Case Western Reserve University\ + \ (CWRU) and the Oberlin Conservatory of Music, where he is a Visiting Assistant\ + \ Professor. He earned a PhD in musicology from CWRU in 2011 after the completion\ + \ of his dissertation, Collaged Codes: John Cage's Credo in Us, a study of Cage\ + \ and Merce Cunningham's first dance collaboration in 1942. Current projects include\ + \ composing Just.Are.Same for string quartet, oboe and tape, which weaves together\ + \ an electronic soundscape of spoken words drawn from victims of genocide with\ + \ acoustic and electronic sounds; composing an evening-length work for the ensemble\ + \ NO EXIT, in collaboration with famed world percussionist Jamie Haddad and guitarist\ + \ Bobby Ferrazza; curating a Cage ``Musicircus'' for the opening of the new Museum\ + \ of Contemporary Art in Cleveland, and artistically advising the Sitka Fest in\ + \ Alaska, a three-month-long festival of arts and culture.\n\nRyan Hilty is a\ + \ percussionist earning a degree in Music Education from the Case Western Reserve\ + \ University School of Music in Cleveland, Ohio. He is currently in his second\ + \ undergraduate year, studying percussion with Matthew Larson. He has performed\ + \ as a percussionist in numerous ensembles, including the Crestwood Wind Ensemble,\ + \ Jazz Band, and the Cleveland Youth Wind Symphony. He is the recipient of the\ + \ 2010 John Phillip Sousa Award. After earning his degree in music education,\ + \ Ryan aspires to become a high school band director.\n\nSamuel Haese is a student\ + \ of music and physics at Case Western Reserve University (CWRU) in Cleveland,\ + \ OH. He has studied concert percussion with Matthew Bassett, Feza Zweifel, and\ + \ Matthew Larson, and currently collaborates with Paul Cox in exploring and performing\ + \ modern percussion music. In the meantime, Sam is receiving a BA in Music for\ + \ studying piano with Gerardo Teissonniere through the Cleveland Institute of\ + \ Music. Sam intends to also receive a degree in Engineering Physics from CWRU\ + \ which he hopes will allow him to explore and understand music technologies.\ + \ Originally from Berkeley, California, his current plans include moving to a\ + \ sunnier place than Cleveland after graduation within the next two years.\n\n\ + Eric Young is a student at Case Western Reserve University majoring in Computer\ + \ Science and Audio Engineering. He grew up in Kansas City, Missouri. He plans\ + \ on incorporating his interests into a career developing digital audio software.\ + \ Eric has been studying general percussion performance since 2003 and specializes\ + \ in jazz drums.\n\nConcert Venue and Time: Necto, Tuesday May 22, 9:00pm" + address: 'Ann Arbor, Michigan, U.S.A.' + author: Jenn Figg and Matthew McCormack and Paul Cox + bibtex: "@incollection{nime2012-music-FiggMcCormackCox2012,\n abstract = {Program\ + \ notes:\n\nThis work merges sound and light to illuminate complex rhythmic motives,\ + \ polyrhythms and metrical patterns in a visual display generated by three drummers\ + \ playing six ``light'' drums. These new instruments bring to life the dreams\ + \ of 20th century synesthetes, such as Wassily Kandinsky and Alexander Scriabin\ + \ and others who sought to create an imagined ``visual music,'' an ideal synthesis\ + \ of music and visual art.\n\nCommunity Light Beacons are percussion instruments\ + \ that leverage the potentials of music, analog technology, and human-generated\ + \ power to visualize sound. These instruments add the dimension of light to the\ + \ ancient tradition of drumming. The drums are user-powered, and when they are\ + \ played---banged, hit and tapped---the vibrations from the drumhead are converted\ + \ to electricity by the internal speaker transducer. The generated energy powers\ + \ ultra bright LEDs, which light up with every hit and beam out from the Fresnel\ + \ lens.\n\nComposer(s) Credits:\n\nJenn Figg, Matthew McCormack, Paul Cox\n\n\ + Instrumentalist(s) Credits:\n\nRyan Hilty, Samuel Haese, Eric Young (Kinetic Light\ + \ Drums)\n\nArtist(s) Biography:\n\nJenn Figg is an artist investigating the connections\ + \ between industry, science and art through the transformation of energy, performative\ + \ objects and constructed ecosystems. She graduated with a BFA in Textiles from\ + \ the Rhode Island School of Design and an MFA from the University of California\ + \ at Santa Barbara. She is pursuing her Ph.D. in Media, Art, and Text at Virginia\ + \ Commonwealth University. She lives in Baltimore and is an Assistant Professor\ + \ of Art at Towson University in Maryland. Exhibitions include The Print Center\ + \ in Philadelphia, Pennsylvania, The Art House at the Jones Center in Austin,\ + \ Texas, Virginia MOCA in Virginia Beach, Virginia, the Columbus Center of Science\ + \ and Industry in Columbus, Ohio, the Ingenuity Festival in Cleveland, Ohio. Other\ + \ awards and residencies include the MacDowell Colony, the Lower Manhattan Cultural\ + \ Council Residency, the Great Lakes College Association New Directions Initiative,\ + \ and the University of California Interdisciplinary Humanities Center, Visual,\ + \ Performing \\& Media Arts Award.\n\nMatthew McCormack explores energy transformation\ + \ and expression through technology, kinetic sculpture and blown glass. He graduated\ + \ with a BFA in Glass from The Ohio State University and is now living in Baltimore,\ + \ Maryland. He is pursuing an Interdisciplinary MFA at Towson University. His\ + \ research interests include modifying a speaker transducer for optimum energy\ + \ generation and developing a series of rapid prototyped Fresnel lens stamps for\ + \ quartz crystal light instruments. His work has been featured at the Virginia\ + \ Museum of Contemporary Art in Virginia Beach, Virginia, the Columbus Center\ + \ of Science and Industry in Columbus, Ohio, the Toledo Museum of Art in Toledo,\ + \ Ohio, the Rankin Art Gallery at Ferris State University in Big Rapids, Michigan,\ + \ the National Museum of Glass in Eskisehir, Turkey, the Franklin Park Conservatory\ + \ in Columbus, Ohio, the Ingenuity Festival in Cleveland, Ohio, and as part of\ + \ the Lower Manhattan Cultural Council's Governors Island Residency in New York\ + \ City.\n\nPaul Cox is a scholar, composer and percussionist in Cleveland, Ohio.\ + \ He currently teaches music history and percussion at Case Western Reserve University\ + \ (CWRU) and the Oberlin Conservatory of Music, where he is a Visiting Assistant\ + \ Professor. He earned a PhD in musicology from CWRU in 2011 after the completion\ + \ of his dissertation, \\emph{Collaged Codes: John Cage's Credo in Us, a study\ + \ of Cage and Merce Cunningham's first dance collaboration in 1942}. Current projects\ + \ include composing \\emph{Just.Are.Same} for string quartet, oboe and tape, which\ + \ weaves together an electronic soundscape of spoken words drawn from victims\ + \ of genocide with acoustic and electronic sounds; composing an evening-length\ + \ work for the ensemble NO EXIT, in collaboration with famed world percussionist\ + \ Jamie Haddad and guitarist Bobby Ferrazza; curating a Cage ``Musicircus'' for\ + \ the opening of the new Museum of Contemporary Art in Cleveland, and artistically\ + \ advising the Sitka Fest in Alaska, a three-month-long festival of arts and culture.\n\ + \nRyan Hilty is a percussionist earning a degree in Music Education from the Case\ + \ Western Reserve University School of Music in Cleveland, Ohio. He is currently\ + \ in his second undergraduate year, studying percussion with Matthew Larson. He\ + \ has performed as a percussionist in numerous ensembles, including the Crestwood\ + \ Wind Ensemble, Jazz Band, and the Cleveland Youth Wind Symphony. He is the recipient\ + \ of the 2010 John Phillip Sousa Award. After earning his degree in music education,\ + \ Ryan aspires to become a high school band director.\n\nSamuel Haese is a student\ + \ of music and physics at Case Western Reserve University (CWRU) in Cleveland,\ + \ OH. He has studied concert percussion with Matthew Bassett, Feza Zweifel, and\ + \ Matthew Larson, and currently collaborates with Paul Cox in exploring and performing\ + \ modern percussion music. In the meantime, Sam is receiving a BA in Music for\ + \ studying piano with Gerardo Teissonniere through the Cleveland Institute of\ + \ Music. Sam intends to also receive a degree in Engineering Physics from CWRU\ + \ which he hopes will allow him to explore and understand music technologies.\ + \ Originally from Berkeley, California, his current plans include moving to a\ + \ sunnier place than Cleveland after graduation within the next two years.\n\n\ + Eric Young is a student at Case Western Reserve University majoring in Computer\ + \ Science and Audio Engineering. He grew up in Kansas City, Missouri. He plans\ + \ on incorporating his interests into a career developing digital audio software.\ + \ Eric has been studying general percussion performance since 2003 and specializes\ + \ in jazz drums.\n\nConcert Venue and Time: Necto, Tuesday May 22, 9:00pm},\n\ + \ address = {Ann Arbor, Michigan, U.S.A.},\n author = {Jenn Figg and Matthew McCormack\ + \ and Paul Cox},\n booktitle = {Music Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n day = {21-23},\n editor = {Georg\ + \ Essl and Brent Gillespie and Michael Gurevich and Sile O'Modhrain},\n month\ + \ = {May},\n publisher = {Electrical Engineering \\& Computer Science and Performing\ + \ Arts Technology, University of Michigan},\n title = {Thunderclap For Six Kinetic\ + \ Light Drums},\n year = {2012}\n}\n" + booktitle: Music Proceedings of the International Conference on New Interfaces for + Musical Expression + day: 21-23 + editor: Georg Essl and Brent Gillespie and Michael Gurevich and Sile O'Modhrain + month: May + publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, + University of Michigan' + title: Thunderclap For Six Kinetic Light Drums + year: 2012 + + +- ENTRYTYPE: incollection + ID: nime2012-music-Tahiroglu2012 + abstract: "Program notes:\n\nInHands, an audiovisual real-time improvisation for\ + \ mobile phones, explores alternative options for musical interactions with two\ + \ mobile instruments in live performances. In this improvisation piece, sound\ + \ output of each mobile phone instrument becomes a sound input for the other instrument;\ + \ to be processed further with an act of responding immediately and spontaneously.\ + \ Granular synthesis module captures audio in real-time and creates the grains\ + \ based on the texture of the sounds. Magnitude, roll and pitch values of the\ + \ acceleration are mapped to the control parameters. In the control layer of\ + \ Sub-synthesis module, the change in direction of a touch position is tracked\ + \ on the mobile surface and the distance of the same touch position to 4 certain\ + \ points on the touchscreen is used as a source for creating frequency values.\ + \ This mapping model generates 4 control parameters throughout 2 dimensional input\ + \ layers. Hannah Drayson created the abstract visual-layers of this piece.\n\n\ + Composer(s) Credits:\n\nInstrumentalist(s) Credits:\n\nKoray Tahiroğlu (mobile\ + \ phones)\n\nArtist(s) Biography:\n\nKoray Tahiroğlu is a musician, postdoctoral\ + \ researcher and lecturer in the Department of Media, Aalto University. He practices\ + \ art as a researcher focusing on embodied approaches to sonic interaction in\ + \ participative music experience, as well as a performer of live electronic music.\ + \ He conducted an artistic research with a focus on studying and practicing human\ + \ musical interaction. Tahiroğlu has completed the degree of Doctor of Arts with\ + \ the dissertation entitled \"Interactive Performance Systems: Experimenting with\ + \ Human Musical Interaction\" after its public examination in 2008. He developed\ + \ interactive performance systems and experimental musical instruments, which\ + \ were used in his live performances. Since 2004, he has been also teaching workshops\ + \ and courses introducing artistic strategies and methodologies for creating computational\ + \ art works. Tahiroğlu has performed experimental music in collaboration as well\ + \ as in solo performances in Europe and North America.\n\nConcert Venue and Time:\ + \ Necto, Tuesday May 22, 9:00pm" address: 'Ann Arbor, Michigan, U.S.A.' - author: Shawn Trail and Thor Kell and Gabrielle Odowichuk - bibtex: "@incollection{nime2012-music-TrailKellOdowichuk2012,\n abstract = {Program\ - \ notes:\n\n\\emph{M\\aa ne Havn (mounhoun)} is an improvisational multi-media\ - \ performance system for extended vibraphone with accompanying custom LED sculptures\ - \ and projected visuals. The music draws specifically from NYC free jazz, the\ - \ funeral music of the Lobi people of northern Ghana, Dub, psych rock and minimalism.\ - \ Abstract interactive light sculptures actuated from the instrument's audio and\ - \ controller data will accompany the performance, creating a visually shifting\ - \ immersive space. The sculptures, named `Takete' and `Maluma', reference Gestalt\ - \ psychology and the known correlation between our perceptions of sound and light.\ - \ Mappings will reflect this phenomenon. The piece uses a pitched percussion tool\ - \ suite developed by the Music Intelligence \\& Sound Technology Collective at\ - \ the University of Victoria, including: Magic Eyes (3D gesture controller), Ghost\ - \ Hands (control data looper), MSTR DRMMR++ (rhythm template as control switches),\ - \ Fantom Faders (vibraphone bars as control faders) and Gyil Gourd (physical modeling\ - \ of the Lobi xylophone's gourd resonator).\n\nComposer(s) Credits:\n\nShawn Trail,\ - \ Thor Kell, Gabrielle Odowichuk (Artistic Director)\n\nInstrumentalist(s) Credits:\n\ - \nShawn Trail (xtended Vibraphone, Notomoton- robotic drum, suspended cymbal)\n\ - \nArtist(s) Biography:\n\nShawn Trail: Electro-acoustic percussionist, \\textbf{Shawn\ - \ Trail}, designs and builds new performance technologies for acoustic pitched\ - \ percussion instruments integrating musical robotics, physical modeling synthesis,\ - \ and HCI. He was Control Interface and Robotics Technician for Pat Metheny's\ - \ Orchestrion World Tour (2010), Fulbright Scholar at Medialogy- Aalborg University,\ - \ Copenhagen researching DSP, synthesis, and HCI (2009), and composer-in-residence\ - \ with League of Electronic Musical Urban Robots (2008). In 2002 he conducted\ - \ field research in Ghana on the Gyil (traditional xylophone). He has a Master\ - \ of Music in Studio Composition from Purchase Conservatory of Music and a BA\ - \ in percussion performance and music technology. He is an Interdisciplinary PhD\ - \ candidate in Computer Science, Electrical Engineering, and Music with MISTIC\ - \ at the University of Victoria. Performing solo under the moniker TXTED, his\ - \ multi- media performance works singularly revolve around minimal, textural evolving\ - \ polyrhythmic, melodic ostinati propelled by a sense of urgency intrinsic to\ - \ cultural music rituals informed by specific traditions.\n\nThor Kell: As a composer,\ - \ programmer, and DJ, \\textbf{Thor Kell} likes combining interesting things in\ - \ unique ways. A recent graduate of the University of Victoria's Music / Computer\ - \ Science program, he will begin his MA at McGill University in the fall, focusing\ - \ on interactions between performer, interface, and software. While at UVic, he\ - \ received a Jamie Cassels Undergraduate Research Award: his research involved\ - \ prototyping and composing for a gestural control mapping system for extending\ - \ the marimba. His traditional compositions are all clockwork riffs and hidden\ - \ structures, based on mathematical constants or time- stretched quotes from the\ - \ English folk music canon: he has written for everything from full orchestra\ - \ to solo piano. He has programmed for The Echo Nest and SoundCloud. In his secret\ - \ life as a DJ and techno maven, he has released chart-toppers on Kompakt, impossibly\ - \ deep jams on Fade, and hour-long remix / video symphonies on his own label,\ - \ Tide Pool.\n\nGabrielle Odowichuk is a graduate student in Electrical Engineering\ - \ at the University of Victoria, working in the MISTIC research lab. A specialist\ - \ in DSP and MIR, her research has focused on sound spatialization and gesture-based\ - \ control of sound and music, developing a variety of prototypes, including Fantom\ - \ Faders and Magic Eyes, the mallet tracking and gesture control applications\ - \ used in this performance. For M\\o{a}ne Havn (mounhoun), she draws on previous\ - \ experience in art direction and stage design to produce unique real-time gesture-controlled\ - \ visualizations. She designed, built, and developed the interactive LED sculptures,\ - \ Takete and Maluma, used in this piece, as well as the projections. Her work\ - \ has been published by ICMC, IEEE, and NIME.\n\nConcert Venue and Time: Lydia\ - \ Mendelssohn Theatre, Wednesday May 23, 7:00pm},\n address = {Ann Arbor, Michigan,\ - \ U.S.A.},\n author = {Shawn Trail and Thor Kell and Gabrielle Odowichuk},\n booktitle\ - \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n day = {21-23},\n editor = {Georg Essl and Brent Gillespie and\ - \ Michael Gurevich and Sile O'Modhrain},\n month = {May},\n publisher = {Electrical\ - \ Engineering \\& Computer Science and Performing Arts Technology, University\ - \ of Michigan},\n title = {M\\aa ne Havn (mounhoun): An Exploration of Gestural\ - \ Language for Pitched Percussion},\n year = {2012}\n}\n" + author: Koray Tahiroğlu + bibtex: "@incollection{nime2012-music-Tahiroglu2012,\n abstract = {Program notes:\n\ + \n\\emph{InHands}, an audiovisual real-time improvisation for mobile phones, explores\ + \ alternative options for musical interactions with two mobile instruments in\ + \ live performances. In this improvisation piece, sound output of each mobile\ + \ phone instrument becomes a sound input for the other instrument; to be processed\ + \ further with an act of responding immediately and spontaneously. Granular synthesis\ + \ module captures audio in real-time and creates the grains based on the texture\ + \ of the sounds. Magnitude, roll and pitch values of the acceleration are mapped\ + \ to the control parameters. In the control layer of Sub-synthesis module, the\ + \ change in direction of a touch position is tracked on the mobile surface and\ + \ the distance of the same touch position to 4 certain points on the touchscreen\ + \ is used as a source for creating frequency values. This mapping model generates\ + \ 4 control parameters throughout 2 dimensional input layers. Hannah Drayson created\ + \ the abstract visual-layers of this piece.\n\nComposer(s) Credits:\n\nInstrumentalist(s)\ + \ Credits:\n\nKoray Tahiro\\u{g}lu (mobile phones)\n\nArtist(s) Biography:\n\n\ + Koray Tahiro\\u{g}lu is a musician, postdoctoral researcher and lecturer in the\ + \ Department of Media, Aalto University. He practices art as a researcher focusing\ + \ on embodied approaches to sonic interaction in participative music experience,\ + \ as well as a performer of live electronic music. He conducted an artistic research\ + \ with a focus on studying and practicing human musical interaction. Tahiro\\\ + u{g}lu has completed the degree of Doctor of Arts with the dissertation entitled\ + \ \"Interactive Performance Systems: Experimenting with Human Musical Interaction\"\ + \ after its public examination in 2008. He developed interactive performance systems\ + \ and experimental musical instruments, which were used in his live performances.\ + \ Since 2004, he has been also teaching workshops and courses introducing artistic\ + \ strategies and methodologies for creating computational art works. Tahiro\\\ + u{g}lu has performed experimental music in collaboration as well as in solo performances\ + \ in Europe and North America.\n\nConcert Venue and Time: Necto, Tuesday May 22,\ + \ 9:00pm},\n address = {Ann Arbor, Michigan, U.S.A.},\n author = {Koray Tahiro\\\ + u{g}lu},\n booktitle = {Music Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n day = {21-23},\n editor = {Georg Essl\ + \ and Brent Gillespie and Michael Gurevich and Sile O'Modhrain},\n month = {May},\n\ + \ publisher = {Electrical Engineering \\& Computer Science and Performing Arts\ + \ Technology, University of Michigan},\n title = {InHands: Improvisation for Mobile\ + \ Phones},\n year = {2012}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression day: 21-23 @@ -10723,61 +10879,72 @@ month: May publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, University of Michigan' - title: 'Må ne Havn (mounhoun): An Exploration of Gestural Language for Pitched Percussion' + title: 'InHands: Improvisation for Mobile Phones' year: 2012 - ENTRYTYPE: incollection - ID: nime2012-music-Caldwell2012 - abstract: "Program notes:\n\nTexturologie 12: Gesture Studies (2011) is the most\ - \ recent of my series of pieces that explore the creation of intricate continuous-field\ - \ textures (and borrow the name of a series of paintings by Dubuffet). In this\ - \ piece, I return to my explorations of the potential of the Wii™ remote to control\ - \ computer music in performance. This time, I tried to treat the physical gesture\ - \ as the germ or motive for the music. Some of the gestures are abstract, but\ - \ some are suggestive of familiar activities like petting a cat, ringing a bell,\ - \ smoothing wallpaper , playing a guiro, scooping, tapping, or vigorous stirring.\ - \ (Check out the videos of my other Wiii™ pieces on YouTube. Search ``Caldwell\ - \ wii.'')\n\nComposer(s) Credits:\n\nJames Caldwell\n\nInstrumentalist(s) Credits:\n\ - \nJames Caldwell (Wii remotes)\n\nArtist(s) Biography:\n\nJames Caldwell (b. 1957)\ - \ is Professor of Music at Western Illinois University and co-director of the\ - \ New Music Festival. He was named Outstanding Teacher in the College of Fine\ - \ Arts and Communication (2005) and received the inaugural Provost's Award for\ - \ Excellence in Teaching. He was named the 2009 Distinguished Faculty Lecturer.\ - \ He holds degrees from Michigan State University and Northwestern University,\ - \ where he studied composition, theory, and electronic and computer music. Since\ - \ 2004 he has studied studio art---drawing, lithography, painting, and sculpture---at\ - \ WIU as a way to stretch creatively and again experience being a student.\n\n\ - Concert Venue and Time: Lydia Mendelssohn Theatre, Wednesday May 23, 7:00pm" + ID: nime2012-music-Lorenzo2012 + abstract: "Program notes:\n\nWhen designing my new electronic instruments, I always\ + \ keep in mind the relationship between the instrument and performer as a tool\ + \ and its master. The instrument should be a channel by which the performer can\ + \ access the dimensions of sound in order to attempt to make music. The music\ + \ should then originate from the musicians intention and not the instrument itself.\ + \ Thus, I design my instruments as intuitive, transparent, and non-idiosyncratic\ + \ mappings between physical gesture and sound.\n\nThis new electronic instrument\ + \ remaps a Logitech Attack 3 Joystick to be able to control sound. Through the\ + \ joystick, the performer can control volume, rhythm, repetition, and pitch of\ + \ custom, preprogrammed sounds. Additionally, the joystick can be used to record\ + \ and playback short audio loops. The product of this design allows for agile\ + \ and intentional electronic musical gestures where rhythm, volume, and pitch\ + \ are clear and deliberate. I have been able to reach a wide range of musical\ + \ expressions and I am learning and discovering more as I practice MODIFIED ATTACK.\n\ + \nComposer(s) Credits:\n\nLevy Lorenzo\n\nInstrumentalist(s) Credits:\n\nLevy\ + \ Lorenzo\n\nArtist(s) Biography:\n\nLevy Lorenzo is an electronics engineer and\ + \ percussionist living in New York. Specializing in microcontroller-based, he\ + \ performs experimental, live-electronic & acoustic music using new, custom electronic\ + \ musical instruments and percussion. His work has been featured at STEIM in Amsterdam\ + \ (NL), the Darmstadt School for New Music (DE) and the International Ensemble\ + \ Moderne Academy (AU). Currently, Levy is a Live Sound Engineer for the International\ + \ Contemporary Ensemble and Issue Project Room (Brooklyn, NY). Levy holds B.S.\ + \ and M.Eng. degrees in Electrical & Computer Engineering from Cornell University\ + \ as well as an M.M. degree in Percussion Performance from Stony Brook University,\ + \ where he is currently a D.M.A. candidate. [www.levylorenzo.com]\n\nConcert Venue\ + \ and Time: Necto, Tuesday May 22, 9:00pm" address: 'Ann Arbor, Michigan, U.S.A.' - author: James Caldwell - bibtex: "@incollection{nime2012-music-Caldwell2012,\n abstract = {Program notes:\n\ - \n\\emph{Texturologie 12: Gesture Studies} (2011) is the most recent of my series\ - \ of pieces that explore the creation of intricate continuous-field textures (and\ - \ borrow the name of a series of paintings by Dubuffet). In this piece, I return\ - \ to my explorations of the potential of the Wii\\texttrademark remote to control\ - \ computer music in performance. This time, I tried to treat the physical gesture\ - \ as the germ or motive for the music. Some of the gestures are abstract, but\ - \ some are suggestive of familiar activities like petting a cat, ringing a bell,\ - \ smoothing wallpaper , playing a guiro, scooping, tapping, or vigorous stirring.\ - \ (Check out the videos of my other Wiii\\texttrademark pieces on YouTube. Search\ - \ ``Caldwell wii.'')\n\nComposer(s) Credits:\n\nJames Caldwell\n\nInstrumentalist(s)\ - \ Credits:\n\nJames Caldwell (Wii remotes)\n\nArtist(s) Biography:\n\nJames Caldwell\ - \ (b. 1957) is Professor of Music at Western Illinois University and co-director\ - \ of the New Music Festival. He was named Outstanding Teacher in the College of\ - \ Fine Arts and Communication (2005) and received the inaugural Provost's Award\ - \ for Excellence in Teaching. He was named the 2009 Distinguished Faculty Lecturer.\ - \ He holds degrees from Michigan State University and Northwestern University,\ - \ where he studied composition, theory, and electronic and computer music. Since\ - \ 2004 he has studied studio art---drawing, lithography, painting, and sculpture---at\ - \ WIU as a way to stretch creatively and again experience being a student.\n\n\ - Concert Venue and Time: Lydia Mendelssohn Theatre, Wednesday May 23, 7:00pm},\n\ - \ address = {Ann Arbor, Michigan, U.S.A.},\n author = {James Caldwell},\n booktitle\ - \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n day = {21-23},\n editor = {Georg Essl and Brent Gillespie and\ - \ Michael Gurevich and Sile O'Modhrain},\n month = {May},\n publisher = {Electrical\ - \ Engineering \\& Computer Science and Performing Arts Technology, University\ - \ of Michigan},\n title = {Texturologie 12: Gesture Studies},\n year = {2012}\n\ + author: Levy Lorenzo + bibtex: "@incollection{nime2012-music-Lorenzo2012,\n abstract = {Program notes:\n\ + \nWhen designing my new electronic instruments, I always keep in mind the relationship\ + \ between the instrument and performer as a tool and its master. The instrument\ + \ should be a channel by which the performer can access the dimensions of sound\ + \ in order to attempt to make music. The music should then originate from the\ + \ musicians intention and not the instrument itself. Thus, I design my instruments\ + \ as intuitive, transparent, and non-idiosyncratic mappings between physical gesture\ + \ and sound.\n\nThis new electronic instrument remaps a Logitech Attack 3 Joystick\ + \ to be able to control sound. Through the joystick, the performer can control\ + \ volume, rhythm, repetition, and pitch of custom, preprogrammed sounds. Additionally,\ + \ the joystick can be used to record and playback short audio loops. The product\ + \ of this design allows for agile and intentional electronic musical gestures\ + \ where rhythm, volume, and pitch are clear and deliberate. I have been able to\ + \ reach a wide range of musical expressions and I am learning and discovering\ + \ more as I practice MODIFIED ATTACK.\n\nComposer(s) Credits:\n\nLevy Lorenzo\n\ + \nInstrumentalist(s) Credits:\n\nLevy Lorenzo\n\nArtist(s) Biography:\n\nLevy\ + \ Lorenzo is an electronics engineer and percussionist living in New York. Specializing\ + \ in microcontroller-based, he performs experimental, live-electronic \\& acoustic\ + \ music using new, custom electronic musical instruments and percussion. His work\ + \ has been featured at STEIM in Amsterdam (NL), the Darmstadt School for New Music\ + \ (DE) and the International Ensemble Moderne Academy (AU). Currently, Levy is\ + \ a Live Sound Engineer for the International Contemporary Ensemble and Issue\ + \ Project Room (Brooklyn, NY). Levy holds B.S. and M.Eng. degrees in Electrical\ + \ \\& Computer Engineering from Cornell University as well as an M.M. degree in\ + \ Percussion Performance from Stony Brook University, where he is currently a\ + \ D.M.A. candidate. [www.levylorenzo.com]\n\nConcert Venue and Time: Necto, Tuesday\ + \ May 22, 9:00pm},\n address = {Ann Arbor, Michigan, U.S.A.},\n author = {Levy\ + \ Lorenzo},\n booktitle = {Music Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n day = {21-23},\n editor = {Georg Essl\ + \ and Brent Gillespie and Michael Gurevich and Sile O'Modhrain},\n month = {May},\n\ + \ publisher = {Electrical Engineering \\& Computer Science and Performing Arts\ + \ Technology, University of Michigan},\n title = {Modified Attack},\n year = {2012}\n\ }\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression @@ -10786,106 +10953,51 @@ month: May publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, University of Michigan' - title: 'Texturologie 12: Gesture Studies' + title: Modified Attack year: 2012 - ENTRYTYPE: incollection - ID: nime2012-music-SchanklerFrancoisChew2012 - abstract: "Program notes:\n\nMimi, designed by Alexandre François with input from\ - \ Elaine Chew and Isaac Schankler, is a multi-modal interactive musical improvisation\ - \ system that explores the impact of visual feedback in performer-machine interaction.\ - \ The Mimi system enables the performer to experiment with a unique blend of improvisation-like\ - \ on-the-fly invention, composition-like planning and choreography, and expressive\ - \ performance. Mimi's improvisations are created through a factor oracle. The\ - \ visual interface gives the performer and the audience instantaneous and continuous\ - \ information on the state of the oracle, its recombination strategy, the music\ - \ to come, and that recently played. The performer controls when the system starts,\ - \ stops, and learns, the playback volume, and the recombination rate. Mimi is\ - \ not only an effective improvisation partner, it also provides a platform through\ - \ which to interrogate the mental models necessary for successful improvisation.\ - \ This performance also features custom synths and mechanisms for inter-oracle\ - \ interaction created for Mimi by Isaac Schankler.\n\nComposer(s) Credits:\n\n\ - Isaac Schankler, Alexandre François, Elaine Chew\n\nInstrumentalist(s) Credits:\n\ - \nIsaac Schankler (keyboard & electronics), Mimi (keyboard & electronics)\n\n\ - Artist(s) Biography:\n\nIsaac Schankler is a Los Angeles-based composer-improviser.\ - \ His recent honors include a grant from Meet the Composer for his opera Light\ - \ and Power, selection as finalist in the ASCAP/SEAMUS Composition Competition,\ - \ and the Damien Top Prize in the ASCAP/Lotte Lehmann Foundation Art Song Competition.\ - \ He is the Artist in Residence of the Music Computation and Cognition Laboratory\ - \ (MuCoaCo) at the USC Viterbi School of Engineering, and an Artistic Director\ - \ of the concert series People Inside Electronics. Isaac holds degrees in composition\ - \ from the USC Thornton School of Music (DMA) and the University of Michigan (MM,\ - \ BM).\n\nElaine Chew is Professor of Digital Media at Queen Mary, University\ - \ of London, and Director of Music Initiatives at the Centre for Digital Music.\ - \ An operations researcher and pianist by training, her research goal is to de-mystify\ - \ music and its performance through the use of formal scientific methods; as a\ - \ performer, she collaborates with composers to present eclectic post-tonal music.\ - \ She received PhD and SM degrees in Operations Research from MIT and a BAS in\ - \ Music and Mathematical & Computational Sciences from Stanford. She is the recipient\ - \ of NSF Career and PECASE awards, and a Radcliffe Institute for Advanced Studies\ - \ fellowship.\n\nAlexandre R.J. François's research focuses on the modeling and\ - \ design of interactive (software) systems, as an enabling step towards the understanding\ - \ of perception and cognition. He was a 2007-2008 Fellow of the Radcliffe Institute\ - \ for Advanced Study at Harvard University, where he co-led a music research cluster\ - \ on Analytical Listening Through Interactive Visualization. François received\ - \ the Dipl\\^ome d'Ingénieur from the Institut National Agronomique Paris-Grignon\ - \ in 1993, the Dipl\\^ome d'Etudes Approfondies (M.S.) from the University Paris\ - \ IX - Dauphine in 1994, and the M.S. and Ph.D. degrees in Computer Science from\ - \ the University of Southern California in 1997 and 2000 respectively.\n\nConcert\ - \ Venue and Time: Lydia Mendelssohn Theatre, Wednesday May 23, 7:00pm" + ID: nime2012-music-Donnarumma2012 + abstract: "Program notes:\n\nComposer(s) Credits:\n\nMarco Donnarumma\n\nInstrumentalist(s)\ + \ Credits:\n\nMarco Donnarumma (Xth Sense)\n\nArtist(s) Biography:\n\nMarco Donnarumma:\ + \ New media and sonic artist, performer and teacher, Marco Donnarumma was born\ + \ in Italy and is based in Edinburgh, UK. Weaving a thread around biomedia research,\ + \ musical and theatrical performance, participatory practices and subversive coding,\ + \ Marco looks at the collision of critical creativity with humanized technologies.\ + \ He has performed and spoken in 28 countries worldwide at leading art events,\ + \ specialized festivals and academic conferences. Has been artist in residence\ + \ at Inspace (UK) and the National School of Theatre and Contemporary Dance (DK).\ + \ His work has been funded by the European Commission, Creative Scotland and the\ + \ Danish Arts Council. In February 2012 Marco was awarded the first prize in\ + \ the Margaret Guthman Musical Instrument Competition (Georgia Tech Center for\ + \ Music Technology, US) for the Xth Sense, a novel, biophysical interactive system\ + \ named the ``world's most innovative new musical instrument''.\n\nConcert Venue\ + \ and Time: Necto, Tuesday May 22, 9:00pm" address: 'Ann Arbor, Michigan, U.S.A.' - author: Isaac Schankler and Alexandre François and Elaine Chew - bibtex: "@incollection{nime2012-music-SchanklerFrancoisChew2012,\n abstract = {Program\ - \ notes:\n\nMimi, designed by Alexandre Fran\\c{c}ois with input from Elaine Chew\ - \ and Isaac Schankler, is a multi-modal interactive musical improvisation system\ - \ that explores the impact of visual feedback in performer-machine interaction.\ - \ The Mimi system enables the performer to experiment with a unique blend of improvisation-like\ - \ on-the-fly invention, composition-like planning and choreography, and expressive\ - \ performance. Mimi's improvisations are created through a factor oracle. The\ - \ visual interface gives the performer and the audience instantaneous and continuous\ - \ information on the state of the oracle, its recombination strategy, the music\ - \ to come, and that recently played. The performer controls when the system starts,\ - \ stops, and learns, the playback volume, and the recombination rate. Mimi is\ - \ not only an effective improvisation partner, it also provides a platform through\ - \ which to interrogate the mental models necessary for successful improvisation.\ - \ This performance also features custom synths and mechanisms for inter-oracle\ - \ interaction created for Mimi by Isaac Schankler.\n\nComposer(s) Credits:\n\n\ - Isaac Schankler, Alexandre Fran\\c{c}ois, Elaine Chew\n\nInstrumentalist(s) Credits:\n\ - \nIsaac Schankler (keyboard \\& electronics), Mimi (keyboard \\& electronics)\n\ - \nArtist(s) Biography:\n\nIsaac Schankler is a Los Angeles-based composer-improviser.\ - \ His recent honors include a grant from Meet the Composer for his opera Light\ - \ and Power, selection as finalist in the ASCAP/SEAMUS Composition Competition,\ - \ and the Damien Top Prize in the ASCAP/Lotte Lehmann Foundation Art Song Competition.\ - \ He is the Artist in Residence of the Music Computation and Cognition Laboratory\ - \ (MuCoaCo) at the USC Viterbi School of Engineering, and an Artistic Director\ - \ of the concert series People Inside Electronics. Isaac holds degrees in composition\ - \ from the USC Thornton School of Music (DMA) and the University of Michigan (MM,\ - \ BM).\n\nElaine Chew is Professor of Digital Media at Queen Mary, University\ - \ of London, and Director of Music Initiatives at the Centre for Digital Music.\ - \ An operations researcher and pianist by training, her research goal is to de-mystify\ - \ music and its performance through the use of formal scientific methods; as a\ - \ performer, she collaborates with composers to present eclectic post-tonal music.\ - \ She received PhD and SM degrees in Operations Research from MIT and a BAS in\ - \ Music and Mathematical \\& Computational Sciences from Stanford. She is the\ - \ recipient of NSF Career and PECASE awards, and a Radcliffe Institute for Advanced\ - \ Studies fellowship.\n\nAlexandre R.J. Fran\\c{c}ois's research focuses on the\ - \ modeling and design of interactive (software) systems, as an enabling step towards\ - \ the understanding of perception and cognition. He was a 2007-2008 Fellow of\ - \ the Radcliffe Institute for Advanced Study at Harvard University, where he co-led\ - \ a music research cluster on Analytical Listening Through Interactive Visualization.\ - \ Fran\\c{c}ois received the Dipl\\^{o}me d'Ing\\'{e}nieur from the Institut National\ - \ Agronomique Paris-Grignon in 1993, the Dipl\\^{o}me d'Etudes Approfondies (M.S.)\ - \ from the University Paris IX - Dauphine in 1994, and the M.S. and Ph.D. degrees\ - \ in Computer Science from the University of Southern California in 1997 and 2000\ - \ respectively.\n\nConcert Venue and Time: Lydia Mendelssohn Theatre, Wednesday\ - \ May 23, 7:00pm},\n address = {Ann Arbor, Michigan, U.S.A.},\n author = {Isaac\ - \ Schankler and Alexandre Fran\\c{c}ois and Elaine Chew},\n booktitle = {Music\ - \ Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ day = {21-23},\n editor = {Georg Essl and Brent Gillespie and Michael Gurevich\ - \ and Sile O'Modhrain},\n month = {May},\n publisher = {Electrical Engineering\ - \ \\& Computer Science and Performing Arts Technology, University of Michigan},\n\ - \ title = {Mimi: Multi-modal Interaction for Musical Improvisation},\n year =\ + author: Marco Donnarumma + bibtex: "@incollection{nime2012-music-Donnarumma2012,\n abstract = {Program notes:\n\ + \nComposer(s) Credits:\n\nMarco Donnarumma\n\nInstrumentalist(s) Credits:\n\n\ + Marco Donnarumma (Xth Sense)\n\nArtist(s) Biography:\n\nMarco Donnarumma: New\ + \ media and sonic artist, performer and teacher, \\textbf{Marco Donnarumma} was\ + \ born in Italy and is based in Edinburgh, UK. Weaving a thread around biomedia\ + \ research, musical and theatrical performance, participatory practices and subversive\ + \ coding, Marco looks at the collision of critical creativity with humanized technologies.\ + \ He has performed and spoken in 28 countries worldwide at leading art events,\ + \ specialized festivals and academic conferences. Has been artist in residence\ + \ at Inspace (UK) and the National School of Theatre and Contemporary Dance (DK).\ + \ His work has been funded by the European Commission, Creative Scotland and the\ + \ Danish Arts Council. In February 2012 Marco was awarded the first prize in\ + \ the Margaret Guthman Musical Instrument Competition (Georgia Tech Center for\ + \ Music Technology, US) for the Xth Sense, a novel, biophysical interactive system\ + \ named the ``world's most innovative new musical instrument''.\n\nConcert Venue\ + \ and Time: Necto, Tuesday May 22, 9:00pm},\n address = {Ann Arbor, Michigan,\ + \ U.S.A.},\n author = {Marco Donnarumma},\n booktitle = {Music Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n day\ + \ = {21-23},\n editor = {Georg Essl and Brent Gillespie and Michael Gurevich and\ + \ Sile O'Modhrain},\n month = {May},\n publisher = {Electrical Engineering \\\ + & Computer Science and Performing Arts Technology, University of Michigan},\n\ + \ title = {Music for Flesh II, interactive music for enhanced body},\n year =\ \ {2012}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression @@ -10894,71 +11006,57 @@ month: May publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, University of Michigan' - title: 'Mimi: Multi-modal Interaction for Musical Improvisation' + title: 'Music for Flesh II, interactive music for enhanced body' year: 2012 - ENTRYTYPE: incollection - ID: nime2012-music-StapletonDavis2012 - abstract: "Program notes:\n\nThis performance explores notions of presence and absence,\ - \ technologically mediated communication and audience perception through the staging\ - \ of intentionally ambiguous but repeatable sonic interactions taking place across\ - \ two geographically separate locations.\n\nThanks to SARC, CCRMA & Bournemouth\ - \ University for support during the development of this project.\n\nComposer(s)\ - \ Credits:\n\nPaul Stapleton and Tom Davis\n\nInstrumentalist(s) Credits:\n\n\ - Paul Stapleton (Networked Instrument), Tom Davis (Networked Instrument)\n\nArtist(s)\ - \ Biography:\n\nPaul Stapleton is a sound artist, improviser and writer originally\ - \ from Southern California, currently based in Belfast, Northern Ireland. Paul\ - \ designs and performs with a variety of modular metallic sound sculptures, custom\ - \ made electronics, found objects and electric guitars in locations ranging from\ - \ experimental music clubs in Berlin to remote beaches on Vancouver Island. He\ - \ is currently involved in a diverse range of artistic collaborations including:\ - \ performance duo ABODE with vocalist Caroline Pugh, interdisciplinary arts group\ - \ theybreakinpieces, improvisation duo with saxophonist Simon Rose, Eric Lyon's\ - \ Noise Quartet, and the DIY quartet E=MCHammer. Since 2007, Paul has been on\ - \ the faculty at the Sonic Arts Research Centre where he teaches and supervises\ - \ Master's and PhD research in performance technologies, interaction design and\ - \ site-specific art.\n\nTom Davis is a digital artist working mainly in the medium\ - \ of sound installation. His practice and theory based output involves the creation\ - \ of technology-led environments for interaction. He performs regularly as part\ - \ of JDTJDJ with Jason Dixon and as part of the Jackson4s. He has performed and\ - \ exhibited across Europe and in the US. Davis is currently a lecturer at the\ - \ University of Bournemouth and holds a PhD from the Sonic Arts Research Centre.\n\ + ID: nime2012-music-Stine2012 + abstract: "Program notes:\n\nThis piece consists of a partially pre-composed acousmatic\ + \ composition actualized in real time by hand motion. The audio generated by the\ + \ hand motions is analyzed, colorized and projected beside the performer during\ + \ the performance. The motions and content of this piece are inspired by the late\ + \ Merce Cunningham and this performance is dedicated to him.\n\nComposer(s) Credits:\n\ + \nEli Stine\n\nInstrumentalist(s) Credits:\n\nEli Stine\n\nArtist(s) Biography:\n\ + \nEli Stine (born 1991 in Greenville, NC) is a composer, programmer, and sound\ + \ designer currently pursuing a Double Degree at Oberlin College, studying Technology\ + \ In Music And Related Arts and composition in the conservatory and Computer Science\ + \ in the college. Winner of the undergraduate award from the Society for Electro-Acoustic\ + \ Music in the United States (SEAMUS) in 2011, Eli has studied with Tom Lopez,\ + \ Lewis Nielson, and Per Bloland at Oberlin, focusing on electroacoustic and acoustic\ + \ music, as well as live performance with electronics. While at Oberlin Eli has\ + \ performed with Oberlin's Contemporary Music Ensemble, had works played in concert\ + \ by Oberlin's Society of Composers, inc. ensemble and student ensemble ACADEMY,\ + \ and collaborated with students and faculty across disciplines on collaborative\ + \ multimedia projects. More information about Eli's work can be found at www.oberlin.edu/student/estine/.\n\ \nConcert Venue and Time: Lydia Mendelssohn Theatre, Wednesday May 23, 7:00pm" address: 'Ann Arbor, Michigan, U.S.A.' - author: Paul Stapleton and Tom Davis - bibtex: "@incollection{nime2012-music-StapletonDavis2012,\n abstract = {Program\ - \ notes:\n\nThis performance explores notions of presence and absence, technologically\ - \ mediated communication and audience perception through the staging of intentionally\ - \ ambiguous but repeatable sonic interactions taking place across two geographically\ - \ separate locations.\n\nThanks to SARC, CCRMA \\& Bournemouth University for\ - \ support during the development of this project.\n\nComposer(s) Credits:\n\n\ - Paul Stapleton and Tom Davis\n\nInstrumentalist(s) Credits:\n\nPaul Stapleton\ - \ (Networked Instrument), Tom Davis (Networked Instrument)\n\nArtist(s) Biography:\n\ - \nPaul Stapleton is a sound artist, improviser and writer originally from Southern\ - \ California, currently based in Belfast, Northern Ireland. Paul designs and performs\ - \ with a variety of modular metallic sound sculptures, custom made electronics,\ - \ found objects and electric guitars in locations ranging from experimental music\ - \ clubs in Berlin to remote beaches on Vancouver Island. He is currently involved\ - \ in a diverse range of artistic collaborations including: performance duo ABODE\ - \ with vocalist Caroline Pugh, interdisciplinary arts group theybreakinpieces,\ - \ improvisation duo with saxophonist Simon Rose, Eric Lyon's Noise Quartet, and\ - \ the DIY quartet E=MCHammer. Since 2007, Paul has been on the faculty at the\ - \ Sonic Arts Research Centre where he teaches and supervises Master's and PhD\ - \ research in performance technologies, interaction design and site-specific art.\n\ - \nTom Davis is a digital artist working mainly in the medium of sound installation.\ - \ His practice and theory based output involves the creation of technology-led\ - \ environments for interaction. He performs regularly as part of JDTJDJ with Jason\ - \ Dixon and as part of the Jackson4s. He has performed and exhibited across Europe\ - \ and in the US. Davis is currently a lecturer at the University of Bournemouth\ - \ and holds a PhD from the Sonic Arts Research Centre.\n\nConcert Venue and Time:\ - \ Lydia Mendelssohn Theatre, Wednesday May 23, 7:00pm},\n address = {Ann Arbor,\ - \ Michigan, U.S.A.},\n author = {Paul Stapleton and Tom Davis},\n booktitle =\ - \ {Music Proceedings of the International Conference on New Interfaces for Musical\ + author: Eli Stine + bibtex: "@incollection{nime2012-music-Stine2012,\n abstract = {Program notes:\n\n\ + This piece consists of a partially pre-composed acousmatic composition actualized\ + \ in real time by hand motion. The audio generated by the hand motions is analyzed,\ + \ colorized and projected beside the performer during the performance. The motions\ + \ and content of this piece are inspired by the late Merce Cunningham and this\ + \ performance is dedicated to him.\n\nComposer(s) Credits:\n\nEli Stine\n\nInstrumentalist(s)\ + \ Credits:\n\nEli Stine\n\nArtist(s) Biography:\n\nEli Stine (born 1991 in Greenville,\ + \ NC) is a composer, programmer, and sound designer currently pursuing a Double\ + \ Degree at Oberlin College, studying Technology In Music And Related Arts and\ + \ composition in the conservatory and Computer Science in the college. Winner\ + \ of the undergraduate award from the Society for Electro-Acoustic Music in the\ + \ United States (SEAMUS) in 2011, Eli has studied with Tom Lopez, Lewis Nielson,\ + \ and Per Bloland at Oberlin, focusing on electroacoustic and acoustic music,\ + \ as well as live performance with electronics. While at Oberlin Eli has performed\ + \ with Oberlin's Contemporary Music Ensemble, had works played in concert by Oberlin's\ + \ Society of Composers, inc. ensemble and student ensemble ACADEMY, and collaborated\ + \ with students and faculty across disciplines on collaborative multimedia projects.\ + \ More information about Eli's work can be found at www.oberlin.edu/student/estine/.\n\ + \nConcert Venue and Time: Lydia Mendelssohn Theatre, Wednesday May 23, 7:00pm},\n\ + \ address = {Ann Arbor, Michigan, U.S.A.},\n author = {Eli Stine},\n booktitle\ + \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ \ Expression},\n day = {21-23},\n editor = {Georg Essl and Brent Gillespie and\ \ Michael Gurevich and Sile O'Modhrain},\n month = {May},\n publisher = {Electrical\ \ Engineering \\& Computer Science and Performing Arts Technology, University\ - \ of Michigan},\n title = {Ambiguous Devices},\n year = {2012}\n}\n" + \ of Michigan},\n title = {Motion-Influenced Composition},\n year = {2012}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression day: 21-23 @@ -10966,123 +11064,69 @@ month: May publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, University of Michigan' - title: Ambiguous Devices + title: Motion-Influenced Composition year: 2012 - ENTRYTYPE: incollection - ID: nime2012-music-Blasco2012 - abstract: "Program notes:\n\nThe Theremin Orchestra is a composition for three voices\ - \ and a modular system of four spheres with built-in Theremin Sensors. Two of\ - \ those spheres will control different effects on the voices and the rest will\ - \ be played as Theremin instruments. The performance is presented as a sound event\ - \ where initially the three voices appear raw and naked and as the composition\ - \ unfolds the voices will be increasingly distorted through different effects\ - \ applied with the Theremin controllers. In the climax of its progression the\ - \ other two Theremin balls will become audible merging their sound with the mesh\ - \ of vocal reshaped sources, not allowing to distinguish where the human ends\ - \ and the machine starts.\n\nComposer(s) Credits:\n\nMercedes Blasco\n\nInstrumentalist(s)\ - \ Credits:\n\nMercedes Blasco (voice, Theremin controllers, EMS synth), Thessia\ - \ Machado and Sonia Megías (voice, Theremin instrument)\n\nArtist(s) Biography:\n\ - \nMerche Blasco: Trained as a Telecommunications Engineer, Merche Blasco developed\ - \ in parallel to her studies a more creative path related with music, video, installation\ - \ and performance. She created her alter ego ``Burbuja'' as a vehicle for her\ - \ own musical exploration and since its conception she has participated & collaborated\ - \ with various artists, establishing a strong relationship between different mediums\ - \ of artistic expression & her own musical direction lsuch as Lucy Orta at the\ - \ Venice biennale, Chicks on Speed and Cristian Vogel.\nHer debut,``burbuja''\ - \ (station55 records) was presented in Sonar 2007 and has been touring in different\ - \ cities in Europe, USA and Canada in the past years: Mapping Festival (Geneve),\ - \ Sonic Art Circuits (Washington), Queens Museum of Art (New York). Thanks to\ - \ a Fulbright Grant she is currently a MPS Candidate in the Interactive Telecommunications\ - \ Program (NYU) where she is mainly researching about new tools for Electronic\ - \ Music Performance.\n\nThessia Machado, Brazil/NY, investigates the physicality\ - \ of sound and its effect on our perception of space. Many of her recent sculptures\ - \ and installations function also as unorthodox instruments---pieces that have\ - \ a real-time, live component. The expressive potential is active and changeable\ - \ as the viewer interacts and performs with it. Thessia's installations and video\ - \ pieces have been exhibited in New York, London, Philadelphia, Paris, Amsterdam,\ - \ Dublin, Berlin and Athens.\nShe has been awarded residencies at the MacDowell\ - \ Colony, Yaddo, the Atlantic Center for the Arts, the Irish Museum of Modern\ - \ Art and the Vermont Studio Center and she is a recipient of fellowships from\ - \ the New York Foundation for the Arts, The Experimental Television Center and\ - \ The Bronx Museum. Performing as link, Thessia Machado, a self-avowed noisician,\ - \ employs a changing line-up of handmade, found and modified instruments to build\ - \ driving, meditative soundscapes.\n\nSonia Megias was born on June 20th 1982\ - \ in Almansa, a village at the southeast of Spain. Since she was a kid, she has\ - \ been abducted by the arts, nature and spirituality. Even today, some years later,\ - \ she tries to interweave these beautiful disciplines, with the goal of transmit\ - \ to the world her perception of Beauty or True.\nThanks to the intensity of her\ - \ musical production, she finds herself living in New York since 2010, on the\ - \ Fulbright and a NYU Steinhardt grants. Here, she combines her studies at the\ - \ New York University with the compositions of her last commissioned pieces.\n\ - Her music has been performed in different music halls and festivals, underlining\ - \ the following: Auditorio 400 at the National Museum of Contemporary Art ``Queen\ - \ Sophia'' (2012, 2008); Cervantes Institute of New York (2012, 2011); Houston\ - \ University, at Opera Vista Festival (2011); Consulate of Argentina in New York,\ - \ at a Tribute to Alfonsina Storni (2009); Embassy of France in Spain (2009);\ - \ United Nations Headquarters (2008).\n\nConcert Venue and Time: Necto, Wednesday\ - \ May 23, 9:00pm" + ID: nime2012-music-Ciufo2012 + abstract: "Program notes:\n\nFragments is an improvisational performance piece that\ + \ utilizes physical treatments inside an acoustic piano, as well as digital treatments\ + \ provided by computer-based digital signal processing. In addition to using a\ + \ few simple physical controls (foot pedals and custom iPad interface) this piece\ + \ also uses the performed audio stream as a gestural control source. The preformed\ + \ audio stream is analyzed and important features are extracted. The current state\ + \ and trajectory of these audio features are used to influence the behavior of\ + \ the real-time signal processing environment. This creates a computer-mediated\ + \ performance system that combines the capabilities of computation and sound processing\ + \ with the tactile and expressive intimacy of the prepared acoustic piano. Fragments\ + \ invites the listener into a unique and complex sonic environment where expectation,\ + \ repetition, spontaneity, and discovery are intertwined.\n\nComposer(s) Credits:\n\ + \nThomas Ciufo\n\nInstrumentalist(s) Credits:\n\nThomas Ciufo\n\nArtist(s) Biography:\n\ + \nThomas Ciufo is a composer, improviser, sound artist, and researcher working\ + \ primarily in the areas of electroacoustic improvisational performance and hybrid\ + \ instrument / interactive systems design. He currently serves as Assistant Professor\ + \ of Recording Arts and Music Technology in the Department of Music at Towson\ + \ University. He has been active for many years in the areas of composition, performance,\ + \ interactive installation, video work, as well as music technology education.\ + \ Festival performances include the SPARK festival in Minneapolis, the Enaction\ + \ in Arts conference in Grenoble, the International Society for Improvised Music\ + \ conference, the NWEAMO festival, the Extensible Electric Guitar Festival, various\ + \ NIME conferences, and the ICMC / Ear to the Earth conference.\n\nConcert Venue\ + \ and Time: Lydia Mendelssohn Theatre, Wednesday May 23, 7:00pm" address: 'Ann Arbor, Michigan, U.S.A.' - author: Mercedes Blasco - bibtex: "@incollection{nime2012-music-Blasco2012,\n abstract = {Program notes:\n\ - \n\\emph{The Theremin Orchestra} is a composition for three voices and a modular\ - \ system of four spheres with built-in Theremin Sensors. Two of those spheres\ - \ will control different effects on the voices and the rest will be played as\ - \ Theremin instruments. The performance is presented as a sound event where initially\ - \ the three voices appear raw and naked and as the composition unfolds the voices\ - \ will be increasingly distorted through different effects applied with the Theremin\ - \ controllers. In the climax of its progression the other two Theremin balls will\ - \ become audible merging their sound with the mesh of vocal reshaped sources,\ - \ not allowing to distinguish where the human ends and the machine starts.\n\n\ - Composer(s) Credits:\n\nMercedes Blasco\n\nInstrumentalist(s) Credits:\n\nMercedes\ - \ Blasco (voice, Theremin controllers, EMS synth), Thessia Machado and Sonia Meg\\\ - '{i}as (voice, Theremin instrument)\n\nArtist(s) Biography:\n\nMerche Blasco:\ - \ Trained as a Telecommunications Engineer, \\textbf{Merche Blasco} developed\ - \ in parallel to her studies a more creative path related with music, video, installation\ - \ and performance. She created her alter ego ``Burbuja'' as a vehicle for her\ - \ own musical exploration and since its conception she has participated \\& collaborated\ - \ with various artists, establishing a strong relationship between different mediums\ - \ of artistic expression \\& her own musical direction lsuch as Lucy Orta at\ - \ the Venice biennale, Chicks on Speed and Cristian Vogel.\nHer debut,``burbuja''\ - \ (station55 records) was presented in Sonar 2007 and has been touring in different\ - \ cities in Europe, USA and Canada in the past years: Mapping Festival (Geneve),\ - \ Sonic Art Circuits (Washington), Queens Museum of Art (New York). Thanks to\ - \ a Fulbright Grant she is currently a MPS Candidate in the Interactive Telecommunications\ - \ Program (NYU) where she is mainly researching about new tools for Electronic\ - \ Music Performance.\n\nThessia Machado, Brazil/NY, investigates the physicality\ - \ of sound and its effect on our perception of space. Many of her recent sculptures\ - \ and installations function also as unorthodox instruments---pieces that have\ - \ a real-time, live component. The expressive potential is active and changeable\ - \ as the viewer interacts and performs with it. Thessia's installations and video\ - \ pieces have been exhibited in New York, London, Philadelphia, Paris, Amsterdam,\ - \ Dublin, Berlin and Athens.\nShe has been awarded residencies at the MacDowell\ - \ Colony, Yaddo, the Atlantic Center for the Arts, the Irish Museum of Modern\ - \ Art and the Vermont Studio Center and she is a recipient of fellowships from\ - \ the New York Foundation for the Arts, The Experimental Television Center and\ - \ The Bronx Museum. Performing as link, Thessia Machado, a self-avowed noisician,\ - \ employs a changing line-up of handmade, found and modified instruments to build\ - \ driving, meditative soundscapes.\n\nSonia Megias was born on June 20th 1982\ - \ in Almansa, a village at the southeast of Spain. Since she was a kid, she has\ - \ been abducted by the arts, nature and spirituality. Even today, some years later,\ - \ she tries to interweave these beautiful disciplines, with the goal of transmit\ - \ to the world her perception of Beauty or True.\nThanks to the intensity of her\ - \ musical production, she finds herself living in New York since 2010, on the\ - \ Fulbright and a NYU Steinhardt grants. Here, she combines her studies at the\ - \ New York University with the compositions of her last commissioned pieces.\n\ - Her music has been performed in different music halls and festivals, underlining\ - \ the following: Auditorio 400 at the National Museum of Contemporary Art ``Queen\ - \ Sophia'' (2012, 2008); Cervantes Institute of New York (2012, 2011); Houston\ - \ University, at Opera Vista Festival (2011); Consulate of Argentina in New York,\ - \ at a Tribute to Alfonsina Storni (2009); Embassy of France in Spain (2009);\ - \ United Nations Headquarters (2008).\n\nConcert Venue and Time: Necto, Wednesday\ - \ May 23, 9:00pm},\n address = {Ann Arbor, Michigan, U.S.A.},\n author = {Mercedes\ - \ Blasco},\n booktitle = {Music Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n day = {21-23},\n editor = {Georg Essl\ - \ and Brent Gillespie and Michael Gurevich and Sile O'Modhrain},\n month = {May},\n\ - \ publisher = {Electrical Engineering \\& Computer Science and Performing Arts\ - \ Technology, University of Michigan},\n title = {The Theremin Orchestra},\n year\ - \ = {2012}\n}\n" + author: Thomas Ciufo + bibtex: "@incollection{nime2012-music-Ciufo2012,\n abstract = {Program notes:\n\n\ + \\emph{Fragments} is an improvisational performance piece that utilizes physical\ + \ treatments inside an acoustic piano, as well as digital treatments provided\ + \ by computer-based digital signal processing. In addition to using a few simple\ + \ physical controls (foot pedals and custom iPad interface) this piece also uses\ + \ the performed audio stream as a gestural control source. The preformed audio\ + \ stream is analyzed and important features are extracted. The current state and\ + \ trajectory of these audio features are used to influence the behavior of the\ + \ real-time signal processing environment. This creates a computer-mediated performance\ + \ system that combines the capabilities of computation and sound processing with\ + \ the tactile and expressive intimacy of the prepared acoustic piano. \\emph{Fragments}\ + \ invites the listener into a unique and complex sonic environment where expectation,\ + \ repetition, spontaneity, and discovery are intertwined.\n\nComposer(s) Credits:\n\ + \nThomas Ciufo\n\nInstrumentalist(s) Credits:\n\nThomas Ciufo\n\nArtist(s) Biography:\n\ + \nThomas Ciufo is a composer, improviser, sound artist, and researcher working\ + \ primarily in the areas of electroacoustic improvisational performance and hybrid\ + \ instrument / interactive systems design. He currently serves as Assistant Professor\ + \ of Recording Arts and Music Technology in the Department of Music at Towson\ + \ University. He has been active for many years in the areas of composition, performance,\ + \ interactive installation, video work, as well as music technology education.\ + \ Festival performances include the SPARK festival in Minneapolis, the Enaction\ + \ in Arts conference in Grenoble, the International Society for Improvised Music\ + \ conference, the NWEAMO festival, the Extensible Electric Guitar Festival, various\ + \ NIME conferences, and the ICMC / Ear to the Earth conference.\n\nConcert Venue\ + \ and Time: Lydia Mendelssohn Theatre, Wednesday May 23, 7:00pm},\n address =\ + \ {Ann Arbor, Michigan, U.S.A.},\n author = {Thomas Ciufo},\n booktitle = {Music\ + \ Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ day = {21-23},\n editor = {Georg Essl and Brent Gillespie and Michael Gurevich\ + \ and Sile O'Modhrain},\n month = {May},\n publisher = {Electrical Engineering\ + \ \\& Computer Science and Performing Arts Technology, University of Michigan},\n\ + \ title = {Fragments},\n year = {2012}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression day: 21-23 @@ -11090,62 +11134,113 @@ month: May publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, University of Michigan' - title: The Theremin Orchestra + title: Fragments year: 2012 - ENTRYTYPE: incollection - ID: nime2012-music-Dupuis2012 - abstract: "Program notes:\n\nStelaextraction uses the electronic extension capabilities\ - \ of the Yerbanaut to construct a musical composition through self-reference across\ - \ different timescales. The Yerbanaut is a custom electro-acoustic kalimba built\ - \ from a yerba mate gourd, with the tines placed in a circular pattern rather\ - \ than the usual horizontal arrangement. Its sensors are intended to make use\ - \ of this new arrangement, with force-sensitive buttons giving the otherwise inert\ - \ left hand expressive capabilities, and a distance sensor allowing the right\ - \ hand's motion to determine aspects of the processing. In Stelaextraction, all\ - \ acoustic and processed sounds are recorded to a single buffer, the contents\ - \ of which can be scrubbed through using the right hand's distance sensor. In\ - \ this way, past musical gestures can be explored and then re-explored, with the\ - \ recursive processing developing self-similar musical patterns over the course\ - \ of the piece.\n\nComposer(s) Credits:\n\nAlexander Dupuis\n\nInstrumentalist(s)\ - \ Credits:\n\nAlexander Dupuis (Yerbanaut)\n\nArtist(s) Biography:\n\nAlexander\ - \ Dupuis develops real-time audiovisual feedback systems mediated by performers,\ - \ sensors, musicians, matrices, bodies, scores, games, and environments. He also\ - \ composes, arranges and performs sounds for guitars, liturgies, chamber groups,\ - \ horse duos, microwave cookbooks, and celebrity voices. He graduated from Brown\ - \ University's MEME program as an undergraduate in 2010, and is now in his second\ - \ year of the Digital Musics masters program at Dartmouth College.\n\nConcert\ - \ Venue and Time: Necto, Wednesday May 23, 9:00pm" - address: 'Ann Arbor, Michigan, U.S.A.' - author: Alexander Dupuis - bibtex: "@incollection{nime2012-music-Dupuis2012,\n abstract = {Program notes:\n\ - \n\\emph{Stelaextraction} uses the electronic extension capabilities of the Yerbanaut\ - \ to construct a musical composition through self-reference across different timescales.\ - \ The Yerbanaut is a custom electro-acoustic kalimba built from a yerba mate gourd,\ - \ with the tines placed in a circular pattern rather than the usual horizontal\ - \ arrangement. Its sensors are intended to make use of this new arrangement, with\ - \ force-sensitive buttons giving the otherwise inert left hand expressive capabilities,\ - \ and a distance sensor allowing the right hand's motion to determine aspects\ - \ of the processing. In Stelaextraction, all acoustic and processed sounds are\ - \ recorded to a single buffer, the contents of which can be scrubbed through using\ - \ the right hand's distance sensor. In this way, past musical gestures can be\ - \ explored and then re-explored, with the recursive processing developing self-similar\ - \ musical patterns over the course of the piece.\n\nComposer(s) Credits:\n\nAlexander\ - \ Dupuis\n\nInstrumentalist(s) Credits:\n\nAlexander Dupuis (Yerbanaut)\n\nArtist(s)\ - \ Biography:\n\nAlexander Dupuis develops real-time audiovisual feedback systems\ - \ mediated by performers, sensors, musicians, matrices, bodies, scores, games,\ - \ and environments. He also composes, arranges and performs sounds for guitars,\ - \ liturgies, chamber groups, horse duos, microwave cookbooks, and celebrity voices.\ - \ He graduated from Brown University's MEME program as an undergraduate in 2010,\ - \ and is now in his second year of the Digital Musics masters program at Dartmouth\ - \ College.\n\nConcert Venue and Time: Necto, Wednesday May 23, 9:00pm},\n address\ - \ = {Ann Arbor, Michigan, U.S.A.},\n author = {Alexander Dupuis},\n booktitle\ + ID: nime2012-music-Novello2012 + abstract: "Program notes:\n\nIn this piece we explore the personality of the ``post-modern\ + \ man''. Exposed to aggressive stimulation and overwhelming data streams, he must\ + \ make important choices to follow a rational ``mind path'' while his time quickly\ + \ runs out. The performer, impersonating the post-modern man, wears an electro-encephalographic\ + \ headset that detects his mind activity. The analysis of its output reveals the\ + \ power of the performer's three thoughts which are connected to forward movement,\ + \ turn left, and turn right in the virtual maze projected on a screen.\n\nDespite\ + \ the distracting external forces, embodied by the sound and flickering visuals,\ + \ the performer must remain paradoxically calm to generate the correct states\ + \ of mind that let him navigate his way out of the maze. Every time the performer\ + \ crosses a red boundary in the maze, he gets closer to the exit, and a new stochastic\ + \ musical scene is triggered. The time and structure of the composition is thus\ + \ entirely determined by the choices and concentration of the performer.\n\nComposer(s)\ + \ Credits:\n\nAlberto Novello\n\nInstrumentalist(s) Credits:\n\nAlberto Novello\ + \ (music, EEG analysis, top visuals), Emmanuel Elias Flores (frontal visuals),\ + \ Honza Svasek (Butoh, EEG control), E. McKinney (photography)\n\nArtist(s) Biography:\n\ + \nAlberto Novello a.k.a. JesterN studied piano and double bass at the Conservatory\ + \ of Udine, graduated in Physics at the University of Trieste, he completed in\ + \ 2004 the master ``Art, Science and Technologies'' at the Institut National Polytechnique\ + \ of Grenoble, France, under the guidance of J.C. Risset, and C. Cadoz. He was\ + \ teacher of electronic music composition at the Conservatory of Cuneo, Italy.\ + \ From 2004 to 2009 he worked at the Philips Research, Eindhoven, Netherlands,\ + \ in the field of Music Perception and Music Information Retrieval with several\ + \ publications in international conferences and journals. In 2009 he received\ + \ a PhD degree at the Technische Universiteit Eindhoven. He attended the Mater\ + \ of Sonology under the guidance of Paul Berg, Joel Ryan, and Richard Barret.\ + \ Since 2004 he produced several electronic audio visual pieces assisting among\ + \ others Alvin Lucier, Trevor Wishart, and Butch Morris. His pieces can be found\ + \ on his website: http://dindisalvadi.free.fr/.\n\nHonza Svasek was born in 1954\ + \ in the Netherlands. After his studies he\nmoved to Copenhagen were he became\ + \ a graphic designer. Then he worked as computer professional and became a UNIX/Linux\ + \ expert. In present he is a visual artist and performer. Honza started his research\ + \ of Butoh 5 years ago. He studied with Butoh performers such as Itto Morita,\ + \ Atsushi Takenouchi, Ken May, Yumiko Yoshioka,Yuko Ota, Imre Thormann. Currently\ + \ he is studying with Rhizome Lee at the Himalaya Subbody Butoh School. http://Honz.nl\n\ + \nEmmanuel Elias Flores is a media designer and software artist based in the Netherlands.\ + \ He studied music and cinema in Mexico and Sonology at the Royal Conservatory\ + \ in The Hague (NL). His work is centered around the idea of exploring different\ + \ types of cinematic experiences and the enhancement of new narrative forms which\ + \ bridge technology, art and perception. His work has been presented on a wide\ + \ range of formats: from audiovisual pieces for electronic music, opera, dance\ + \ and live cinema sets, to the design of public installations and interactive\ + \ applications for mobile devices. In parallel to his creative activities he has\ + \ worked as a developer and IT/video consultant for different commercial and art\ + \ enterprises and as a programmer for portable devices. www.emmanuelflores.net\n\ + \nConcert Venue and Time: Lydia Mendelssohn Theatre, Wednesday May 23, 7:00pm" + address: 'Ann Arbor, Michigan, U.S.A.' + author: Alberto Novello + bibtex: "@incollection{nime2012-music-Novello2012,\n abstract = {Program notes:\n\ + \nIn this piece we explore the personality of the ``post-modern man''. Exposed\ + \ to aggressive stimulation and overwhelming data streams, he must make important\ + \ choices to follow a rational ``mind path'' while his time quickly runs out.\ + \ The performer, impersonating the post-modern man, wears an electro-encephalographic\ + \ headset that detects his mind activity. The analysis of its output reveals the\ + \ power of the performer's three thoughts which are connected to forward movement,\ + \ turn left, and turn right in the virtual maze projected on a screen.\n\nDespite\ + \ the distracting external forces, embodied by the sound and flickering visuals,\ + \ the performer must remain paradoxically calm to generate the correct states\ + \ of mind that let him navigate his way out of the maze. Every time the performer\ + \ crosses a red boundary in the maze, he gets closer to the exit, and a new stochastic\ + \ musical scene is triggered. The time and structure of the composition is thus\ + \ entirely determined by the choices and concentration of the performer.\n\nComposer(s)\ + \ Credits:\n\nAlberto Novello\n\nInstrumentalist(s) Credits:\n\nAlberto Novello\ + \ (music, EEG analysis, top visuals), Emmanuel Elias Flores (frontal visuals),\ + \ Honza Svasek (Butoh, EEG control), E. McKinney (photography)\n\nArtist(s) Biography:\n\ + \nAlberto Novello a.k.a. JesterN studied piano and double bass at the Conservatory\ + \ of Udine, graduated in Physics at the University of Trieste, he completed in\ + \ 2004 the master ``Art, Science and Technologies'' at the Institut National Polytechnique\ + \ of Grenoble, France, under the guidance of J.C. Risset, and C. Cadoz. He was\ + \ teacher of electronic music composition at the Conservatory of Cuneo, Italy.\ + \ From 2004 to 2009 he worked at the Philips Research, Eindhoven, Netherlands,\ + \ in the field of Music Perception and Music Information Retrieval with several\ + \ publications in international conferences and journals. In 2009 he received\ + \ a PhD degree at the Technische Universiteit Eindhoven. He attended the Mater\ + \ of Sonology under the guidance of Paul Berg, Joel Ryan, and Richard Barret.\ + \ Since 2004 he produced several electronic audio visual pieces assisting among\ + \ others Alvin Lucier, Trevor Wishart, and Butch Morris. His pieces can be found\ + \ on his website: http://dindisalvadi.free.fr/.\n\nHonza Svasek was born in 1954\ + \ in the Netherlands. After his studies he\nmoved to Copenhagen were he became\ + \ a graphic designer. Then he worked as computer professional and became a UNIX/Linux\ + \ expert. In present he is a visual artist and performer. Honza started his research\ + \ of Butoh 5 years ago. He studied with Butoh performers such as Itto Morita,\ + \ Atsushi Takenouchi, Ken May, Yumiko Yoshioka,Yuko Ota, Imre Thormann. Currently\ + \ he is studying with Rhizome Lee at the Himalaya Subbody Butoh School. http://Honz.nl\n\ + \nEmmanuel Elias Flores is a media designer and software artist based in the Netherlands.\ + \ He studied music and cinema in Mexico and Sonology at the Royal Conservatory\ + \ in The Hague (NL). His work is centered around the idea of exploring different\ + \ types of cinematic experiences and the enhancement of new narrative forms which\ + \ bridge technology, art and perception. His work has been presented on a wide\ + \ range of formats: from audiovisual pieces for electronic music, opera, dance\ + \ and live cinema sets, to the design of public installations and interactive\ + \ applications for mobile devices. In parallel to his creative activities he has\ + \ worked as a developer and IT/video consultant for different commercial and art\ + \ enterprises and as a programmer for portable devices. www.emmanuelflores.net\n\ + \nConcert Venue and Time: Lydia Mendelssohn Theatre, Wednesday May 23, 7:00pm},\n\ + \ address = {Ann Arbor, Michigan, U.S.A.},\n author = {Alberto Novello},\n booktitle\ \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ \ Expression},\n day = {21-23},\n editor = {Georg Essl and Brent Gillespie and\ \ Michael Gurevich and Sile O'Modhrain},\n month = {May},\n publisher = {Electrical\ \ Engineering \\& Computer Science and Performing Arts Technology, University\ - \ of Michigan},\n title = {Stelaextraction},\n year = {2012}\n}\n" + \ of Michigan},\n title = {Fragmentation},\n year = {2012}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression day: 21-23 @@ -11153,103 +11248,130 @@ month: May publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, University of Michigan' - title: Stelaextraction + title: Fragmentation year: 2012 - ENTRYTYPE: incollection - ID: nime2012-music-Burns2012 - abstract: "Program notes:\n\nFieldwork is a software environment for improvised\ - \ performance with electronic sound and animation. Two musicians' sounding performances\ - \ are fed into the system, and analyzed for pitch, rhythm, and timbral change.\ - \ When the software recognizes a sharp contrast in one performer's textures or\ - \ gestures, it reflects this change by transforming the sound of the other musician's\ - \ performance. Not only are the musicians responding to one another as in conventional\ - \ improvisation, but they are also able to directly modify their duo partner's\ - \ sound through the software. Fieldwork emphasizes rapid, glitchy, and polyrhythmic\ - \ distortions of the musician's performances, and establishes unpredictable feedback\ - \ processes that encourage unexpected improvisational relationships between the\ - \ performers and computer.\n\nComposer(s) Credits:\n\nChristopher Burns\n\nInstrumentalist(s)\ - \ Credits:\n\nChristopher Burns, Andrew Bishop\n\nArtist(s) Biography:\n\nChristopher\ - \ Burns is a composer, improviser, and multimedia artist. His instrumental chamber\ - \ works weave energetic gestures into densely layered surfaces. Polyphony and\ - \ multiplicity also feature in his electroacoustic music, embodied in gritty,\ - \ rough-hewn textures. As an improviser, Christopher combines an idiosyncratic\ - \ approach to the electric guitar with a wide variety of custom software instruments.\ - \ Recent projects emphasize multimedia and motion capture, integrating performance,\ - \ sound, and animation into a unified experience. Across these disciplines, his\ - \ work emphasizes trajectory and directionality, superimposing and intercutting\ - \ a variety of evolving processes to create form.\nChristopher is an avid archaeologist\ - \ of electroacoustic music, creating and performing new digital realizations of\ - \ classic music by composers including Cage, Ligeti, Lucier, Nancarrow, Nono,\ - \ and Stockhausen. A committed educator, he teaches music composition and technology\ - \ at the University of Wisconsin-Milwaukee. He has studied composition with Brian\ - \ Ferneyhough, Jonathan Harvey, Jonathan Berger, Michael Tenzer, and Jan Radzynski.\n\ - \nAndrew Bishop is a versatile multi-instrumentalist, composer, improviser, educator\ - \ and scholar comfortable in a wide variety of musical idioms. He maintains a\ - \ national and international career and serves as an Assistant Professor of Jazz\ - \ and Contemporary Improvisation at the University of Michigan in Ann Arbor. \ - \ Bishop's two recordings as a leader have received widespread acclaim from The\ - \ New York Times, Downbeat Magazine, Chicago Reader, All Music Guide, Cadence\ - \ Magazine, All About Jazz-New York, All About Jazz-Los Angeles, and the Detroit\ - \ Free Press, among others. As a composer and arranger he has received over 20\ - \ commissions, numerous residencies and awards and recognition from ASCAP, the\ - \ Chicago Symphony Orchestra, the Andrew W. Melon Foundation, the National Endowment\ - \ for the Arts, Chamber Music of America and a nomination from the American Academy\ - \ of Arts and Letters. He has performed with artist in virtually every musical\ - \ genre. He earned five degrees in music including a D.M.A. in music composition\ - \ from the University of Michigan.\n\nConcert Venue and Time: Necto, Wednesday\ - \ May 23, 9:00pm" + ID: nime2012-music-TrailKellOdowichuk2012 + abstract: "Program notes:\n\nMå ne Havn (mounhoun) is an improvisational multi-media\ + \ performance system for extended vibraphone with accompanying custom LED sculptures\ + \ and projected visuals. The music draws specifically from NYC free jazz, the\ + \ funeral music of the Lobi people of northern Ghana, Dub, psych rock and minimalism.\ + \ Abstract interactive light sculptures actuated from the instrument's audio and\ + \ controller data will accompany the performance, creating a visually shifting\ + \ immersive space. The sculptures, named `Takete' and `Maluma', reference Gestalt\ + \ psychology and the known correlation between our perceptions of sound and light.\ + \ Mappings will reflect this phenomenon. The piece uses a pitched percussion tool\ + \ suite developed by the Music Intelligence & Sound Technology Collective at the\ + \ University of Victoria, including: Magic Eyes (3D gesture controller), Ghost\ + \ Hands (control data looper), MSTR DRMMR++ (rhythm template as control switches),\ + \ Fantom Faders (vibraphone bars as control faders) and Gyil Gourd (physical modeling\ + \ of the Lobi xylophone's gourd resonator).\n\nComposer(s) Credits:\n\nShawn Trail,\ + \ Thor Kell, Gabrielle Odowichuk (Artistic Director)\n\nInstrumentalist(s) Credits:\n\ + \nShawn Trail (xtended Vibraphone, Notomoton- robotic drum, suspended cymbal)\n\ + \nArtist(s) Biography:\n\nShawn Trail: Electro-acoustic percussionist, Shawn Trail,\ + \ designs and builds new performance technologies for acoustic pitched percussion\ + \ instruments integrating musical robotics, physical modeling synthesis, and HCI.\ + \ He was Control Interface and Robotics Technician for Pat Metheny's Orchestrion\ + \ World Tour (2010), Fulbright Scholar at Medialogy- Aalborg University, Copenhagen\ + \ researching DSP, synthesis, and HCI (2009), and composer-in-residence with League\ + \ of Electronic Musical Urban Robots (2008). In 2002 he conducted field research\ + \ in Ghana on the Gyil (traditional xylophone). He has a Master of Music in Studio\ + \ Composition from Purchase Conservatory of Music and a BA in percussion performance\ + \ and music technology. He is an Interdisciplinary PhD candidate in Computer Science,\ + \ Electrical Engineering, and Music with MISTIC at the University of Victoria.\ + \ Performing solo under the moniker TXTED, his multi- media performance works\ + \ singularly revolve around minimal, textural evolving polyrhythmic, melodic ostinati\ + \ propelled by a sense of urgency intrinsic to cultural music rituals informed\ + \ by specific traditions.\n\nThor Kell: As a composer, programmer, and DJ, Thor\ + \ Kell likes combining interesting things in unique ways. A recent graduate of\ + \ the University of Victoria's Music / Computer Science program, he will begin\ + \ his MA at McGill University in the fall, focusing on interactions between performer,\ + \ interface, and software. While at UVic, he received a Jamie Cassels Undergraduate\ + \ Research Award: his research involved prototyping and composing for a gestural\ + \ control mapping system for extending the marimba. His traditional compositions\ + \ are all clockwork riffs and hidden structures, based on mathematical constants\ + \ or time- stretched quotes from the English folk music canon: he has written\ + \ for everything from full orchestra to solo piano. He has programmed for The\ + \ Echo Nest and SoundCloud. In his secret life as a DJ and techno maven, he has\ + \ released chart-toppers on Kompakt, impossibly deep jams on Fade, and hour-long\ + \ remix / video symphonies on his own label, Tide Pool.\n\nGabrielle Odowichuk\ + \ is a graduate student in Electrical Engineering at the University of Victoria,\ + \ working in the MISTIC research lab. A specialist in DSP and MIR, her research\ + \ has focused on sound spatialization and gesture-based control of sound and music,\ + \ developing a variety of prototypes, including Fantom Faders and Magic Eyes,\ + \ the mallet tracking and gesture control applications used in this performance.\ + \ For Møane Havn (mounhoun), she draws on previous experience in art direction\ + \ and stage design to produce unique real-time gesture-controlled visualizations.\ + \ She designed, built, and developed the interactive LED sculptures, Takete and\ + \ Maluma, used in this piece, as well as the projections. Her work has been published\ + \ by ICMC, IEEE, and NIME.\n\nConcert Venue and Time: Lydia Mendelssohn Theatre,\ + \ Wednesday May 23, 7:00pm" address: 'Ann Arbor, Michigan, U.S.A.' - author: Christopher Burns - bibtex: "@incollection{nime2012-music-Burns2012,\n abstract = {Program notes:\n\n\ - \\emph{Fieldwork} is a software environment for improvised performance with electronic\ - \ sound and animation. Two musicians' sounding performances are fed into the system,\ - \ and analyzed for pitch, rhythm, and timbral change. When the software recognizes\ - \ a sharp contrast in one performer's textures or gestures, it reflects this change\ - \ by transforming the sound of the other musician's performance. Not only are\ - \ the musicians responding to one another as in conventional improvisation, but\ - \ they are also able to directly modify their duo partner's sound through the\ - \ software. Fieldwork emphasizes rapid, glitchy, and polyrhythmic distortions\ - \ of the musician's performances, and establishes unpredictable feedback processes\ - \ that encourage unexpected improvisational relationships between the performers\ - \ and computer.\n\nComposer(s) Credits:\n\nChristopher Burns\n\nInstrumentalist(s)\ - \ Credits:\n\nChristopher Burns, Andrew Bishop\n\nArtist(s) Biography:\n\nChristopher\ - \ Burns is a composer, improviser, and multimedia artist. His instrumental chamber\ - \ works weave energetic gestures into densely layered surfaces. Polyphony and\ - \ multiplicity also feature in his electroacoustic music, embodied in gritty,\ - \ rough-hewn textures. As an improviser, Christopher combines an idiosyncratic\ - \ approach to the electric guitar with a wide variety of custom software instruments.\ - \ Recent projects emphasize multimedia and motion capture, integrating performance,\ - \ sound, and animation into a unified experience. Across these disciplines, his\ - \ work emphasizes trajectory and directionality, superimposing and intercutting\ - \ a variety of evolving processes to create form.\nChristopher is an avid archaeologist\ - \ of electroacoustic music, creating and performing new digital realizations of\ - \ classic music by composers including Cage, Ligeti, Lucier, Nancarrow, Nono,\ - \ and Stockhausen. A committed educator, he teaches music composition and technology\ - \ at the University of Wisconsin-Milwaukee. He has studied composition with Brian\ - \ Ferneyhough, Jonathan Harvey, Jonathan Berger, Michael Tenzer, and Jan Radzynski.\n\ - \nAndrew Bishop is a versatile multi-instrumentalist, composer, improviser, educator\ - \ and scholar comfortable in a wide variety of musical idioms. He maintains a\ - \ national and international career and serves as an Assistant Professor of Jazz\ - \ and Contemporary Improvisation at the University of Michigan in Ann Arbor. \ - \ Bishop's two recordings as a leader have received widespread acclaim from \\\ - emph{The New York Times, Downbeat Magazine, Chicago Reader, All Music Guide, Cadence\ - \ Magazine, All About Jazz-New York, All About Jazz-Los Angeles, and the Detroit\ - \ Free Press}, among others. As a composer and arranger he has received over\ - \ 20 commissions, numerous residencies and awards and recognition from ASCAP,\ - \ the Chicago Symphony Orchestra, the Andrew W. Melon Foundation, the National\ - \ Endowment for the Arts, Chamber Music of America and a nomination from the American\ - \ Academy of Arts and Letters. He has performed with artist in virtually every\ - \ musical genre. He earned five degrees in music including a D.M.A. in music\ - \ composition from the University of Michigan.\n\nConcert Venue and Time: Necto,\ - \ Wednesday May 23, 9:00pm},\n address = {Ann Arbor, Michigan, U.S.A.},\n author\ - \ = {Christopher Burns},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n day = {21-23},\n editor\ - \ = {Georg Essl and Brent Gillespie and Michael Gurevich and Sile O'Modhrain},\n\ - \ month = {May},\n publisher = {Electrical Engineering \\& Computer Science and\ - \ Performing Arts Technology, University of Michigan},\n title = {Fieldwork},\n\ - \ year = {2012}\n}\n" + author: Shawn Trail and Thor Kell and Gabrielle Odowichuk + bibtex: "@incollection{nime2012-music-TrailKellOdowichuk2012,\n abstract = {Program\ + \ notes:\n\n\\emph{M\\aa ne Havn (mounhoun)} is an improvisational multi-media\ + \ performance system for extended vibraphone with accompanying custom LED sculptures\ + \ and projected visuals. The music draws specifically from NYC free jazz, the\ + \ funeral music of the Lobi people of northern Ghana, Dub, psych rock and minimalism.\ + \ Abstract interactive light sculptures actuated from the instrument's audio and\ + \ controller data will accompany the performance, creating a visually shifting\ + \ immersive space. The sculptures, named `Takete' and `Maluma', reference Gestalt\ + \ psychology and the known correlation between our perceptions of sound and light.\ + \ Mappings will reflect this phenomenon. The piece uses a pitched percussion tool\ + \ suite developed by the Music Intelligence \\& Sound Technology Collective at\ + \ the University of Victoria, including: Magic Eyes (3D gesture controller), Ghost\ + \ Hands (control data looper), MSTR DRMMR++ (rhythm template as control switches),\ + \ Fantom Faders (vibraphone bars as control faders) and Gyil Gourd (physical modeling\ + \ of the Lobi xylophone's gourd resonator).\n\nComposer(s) Credits:\n\nShawn Trail,\ + \ Thor Kell, Gabrielle Odowichuk (Artistic Director)\n\nInstrumentalist(s) Credits:\n\ + \nShawn Trail (xtended Vibraphone, Notomoton- robotic drum, suspended cymbal)\n\ + \nArtist(s) Biography:\n\nShawn Trail: Electro-acoustic percussionist, \\textbf{Shawn\ + \ Trail}, designs and builds new performance technologies for acoustic pitched\ + \ percussion instruments integrating musical robotics, physical modeling synthesis,\ + \ and HCI. He was Control Interface and Robotics Technician for Pat Metheny's\ + \ Orchestrion World Tour (2010), Fulbright Scholar at Medialogy- Aalborg University,\ + \ Copenhagen researching DSP, synthesis, and HCI (2009), and composer-in-residence\ + \ with League of Electronic Musical Urban Robots (2008). In 2002 he conducted\ + \ field research in Ghana on the Gyil (traditional xylophone). He has a Master\ + \ of Music in Studio Composition from Purchase Conservatory of Music and a BA\ + \ in percussion performance and music technology. He is an Interdisciplinary PhD\ + \ candidate in Computer Science, Electrical Engineering, and Music with MISTIC\ + \ at the University of Victoria. Performing solo under the moniker TXTED, his\ + \ multi- media performance works singularly revolve around minimal, textural evolving\ + \ polyrhythmic, melodic ostinati propelled by a sense of urgency intrinsic to\ + \ cultural music rituals informed by specific traditions.\n\nThor Kell: As a composer,\ + \ programmer, and DJ, \\textbf{Thor Kell} likes combining interesting things in\ + \ unique ways. A recent graduate of the University of Victoria's Music / Computer\ + \ Science program, he will begin his MA at McGill University in the fall, focusing\ + \ on interactions between performer, interface, and software. While at UVic, he\ + \ received a Jamie Cassels Undergraduate Research Award: his research involved\ + \ prototyping and composing for a gestural control mapping system for extending\ + \ the marimba. His traditional compositions are all clockwork riffs and hidden\ + \ structures, based on mathematical constants or time- stretched quotes from the\ + \ English folk music canon: he has written for everything from full orchestra\ + \ to solo piano. He has programmed for The Echo Nest and SoundCloud. In his secret\ + \ life as a DJ and techno maven, he has released chart-toppers on Kompakt, impossibly\ + \ deep jams on Fade, and hour-long remix / video symphonies on his own label,\ + \ Tide Pool.\n\nGabrielle Odowichuk is a graduate student in Electrical Engineering\ + \ at the University of Victoria, working in the MISTIC research lab. A specialist\ + \ in DSP and MIR, her research has focused on sound spatialization and gesture-based\ + \ control of sound and music, developing a variety of prototypes, including Fantom\ + \ Faders and Magic Eyes, the mallet tracking and gesture control applications\ + \ used in this performance. For M\\o{a}ne Havn (mounhoun), she draws on previous\ + \ experience in art direction and stage design to produce unique real-time gesture-controlled\ + \ visualizations. She designed, built, and developed the interactive LED sculptures,\ + \ Takete and Maluma, used in this piece, as well as the projections. Her work\ + \ has been published by ICMC, IEEE, and NIME.\n\nConcert Venue and Time: Lydia\ + \ Mendelssohn Theatre, Wednesday May 23, 7:00pm},\n address = {Ann Arbor, Michigan,\ + \ U.S.A.},\n author = {Shawn Trail and Thor Kell and Gabrielle Odowichuk},\n booktitle\ + \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n day = {21-23},\n editor = {Georg Essl and Brent Gillespie and\ + \ Michael Gurevich and Sile O'Modhrain},\n month = {May},\n publisher = {Electrical\ + \ Engineering \\& Computer Science and Performing Arts Technology, University\ + \ of Michigan},\n title = {M\\aa ne Havn (mounhoun): An Exploration of Gestural\ + \ Language for Pitched Percussion},\n year = {2012}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression day: 21-23 @@ -11257,81 +11379,62 @@ month: May publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, University of Michigan' - title: Fieldwork + title: 'Må ne Havn (mounhoun): An Exploration of Gestural Language for Pitched Percussion' year: 2012 - ENTRYTYPE: incollection - ID: nime2012-music-Uozumi2012 - abstract: "Program notes:\n\nThis performance aims to approach the next style of\ - \ ``mashup'' and/or ``Cut-up'' via fusion of paradigms of artificial-life and\ - \ turntable. We developed a system named ``SoniCell'' to realize it. SoniCell\ - \ employs four robots called ``cell''. Each cell behaves as a metaphor of life\ - \ based on a simple interaction model with prey-predator relationship. Each cell\ - \ is assigned a music-track in the manner of turntable. Therefore, the system\ - \ reconstructs and mixes the music-tracks via cells' interactions and performers'\ - \ interventions. In this framework, the aspects of the system and performers interactions\ - \ and cells' internal-states create structures of sounds and music from different\ - \ tracks.\n\nComposer(s) Credits:\n\nYuta Uozumi, Keisuke Oyama, Jun Tomioka,\ - \ Hiromi Okamoto, Takayuki Kimura\n\nInstrumentalist(s) Credits:\n\nArtist(s)\ - \ Biography:\n\nYuta Uozumi is a sound artist and agent-base composer was born\ - \ in the suburbs of Osaka, Japan. He started computer music at the age of fifteen.\ - \ He received his Ph.D. from Keio University SFC Graduate School of Media and\ - \ Governance. He is researching and teaching at Tokyo University of Technology.\ - \ He is studying Multi-Agent based dynamic composition with computer or human\ - \ ensembles. In 2002 His CD \"meme?\" was released from Cubicmusic Japan (under\ - \ the name of SamuraiJazz). In 2003 agent-based musical interface \"Chase\" was\ - \ accepted by NIME. It is a collaborative project by system-designer, DSP engineer\ - \ and performer. In 2005 an application for agent-based composition ``Gismo''\ - \ and a piece created with the system ``Chain'' (early version) were accepted\ - \ by ICMC(International Computer Music Conference).\n\nKeisuke Oyama, was born\ - \ in Kumamoto, Japan on September 19, 1986. He plays various instruments freely\ - \ in childhood. When he was 18, moved to Tokyo to study jazz theory. After starting\ - \ his career as a jazz musician, he participated various sessions as a guitarist.\ - \ Furthermore, his interest covered electro acoustic in the career. He was enrolled\ - \ at Keio University Shonan Fujisawa Campus (SFC) to learn method and technique\ - \ of computer music and media art in 2009. He is exploring the new expression\ - \ of music.\n\nConcert Venue and Time: Necto, Wednesday May 23, 9:00pm" + ID: nime2012-music-Caldwell2012 + abstract: "Program notes:\n\nTexturologie 12: Gesture Studies (2011) is the most\ + \ recent of my series of pieces that explore the creation of intricate continuous-field\ + \ textures (and borrow the name of a series of paintings by Dubuffet). In this\ + \ piece, I return to my explorations of the potential of the Wii™ remote to control\ + \ computer music in performance. This time, I tried to treat the physical gesture\ + \ as the germ or motive for the music. Some of the gestures are abstract, but\ + \ some are suggestive of familiar activities like petting a cat, ringing a bell,\ + \ smoothing wallpaper , playing a guiro, scooping, tapping, or vigorous stirring.\ + \ (Check out the videos of my other Wiii™ pieces on YouTube. Search ``Caldwell\ + \ wii.'')\n\nComposer(s) Credits:\n\nJames Caldwell\n\nInstrumentalist(s) Credits:\n\ + \nJames Caldwell (Wii remotes)\n\nArtist(s) Biography:\n\nJames Caldwell (b. 1957)\ + \ is Professor of Music at Western Illinois University and co-director of the\ + \ New Music Festival. He was named Outstanding Teacher in the College of Fine\ + \ Arts and Communication (2005) and received the inaugural Provost's Award for\ + \ Excellence in Teaching. He was named the 2009 Distinguished Faculty Lecturer.\ + \ He holds degrees from Michigan State University and Northwestern University,\ + \ where he studied composition, theory, and electronic and computer music. Since\ + \ 2004 he has studied studio art---drawing, lithography, painting, and sculpture---at\ + \ WIU as a way to stretch creatively and again experience being a student.\n\n\ + Concert Venue and Time: Lydia Mendelssohn Theatre, Wednesday May 23, 7:00pm" address: 'Ann Arbor, Michigan, U.S.A.' - author: Yuta Uozumi and Keisuke Oyama and Jun Tomioka and Hiromi Okamoto and Takayuki - Kimura - bibtex: "@incollection{nime2012-music-Uozumi2012,\n abstract = {Program notes:\n\ - \nThis performance aims to approach the next style of ``mashup'' and/or ``Cut-up''\ - \ via fusion of paradigms of artificial-life and turntable. We developed a system\ - \ named ``SoniCell'' to realize it. SoniCell employs four robots called ``cell''.\ - \ Each cell behaves as a metaphor of life based on a simple interaction model\ - \ with prey-predator relationship. Each cell is assigned a music-track in the\ - \ manner of turntable. Therefore, the system reconstructs and mixes the music-tracks\ - \ via cells' interactions and performers' interventions. In this framework, the\ - \ aspects of the system and performers interactions and cells' internal-states\ - \ create structures of sounds and music from different tracks.\n\nComposer(s)\ - \ Credits:\n\nYuta Uozumi, Keisuke Oyama, Jun Tomioka, Hiromi Okamoto, Takayuki\ - \ Kimura\n\nInstrumentalist(s) Credits:\n\nArtist(s) Biography:\n\nYuta Uozumi\ - \ is a sound artist and agent-base composer was born in the suburbs of Osaka,\ - \ Japan. He started computer music at the age of fifteen. He received his Ph.D.\ - \ from Keio University SFC Graduate School of Media and Governance. He is researching\ - \ and teaching at Tokyo University of Technology. He is studying Multi-Agent based\ - \ dynamic composition with computer or human ensembles. In 2002 His CD \"meme?\"\ - \ was released from Cubicmusic Japan (under the name of SamuraiJazz). In 2003\ - \ agent-based musical interface \"Chase\" was accepted by NIME. It is a collaborative\ - \ project by system-designer, DSP engineer and performer. In 2005 an application\ - \ for agent-based composition ``Gismo'' and a piece created with the system ``Chain''\ - \ (early version) were accepted by ICMC(International Computer Music Conference).\n\ - \nKeisuke Oyama, was born in Kumamoto, Japan on September 19, 1986. He plays various\ - \ instruments freely in childhood. When he was 18, moved to Tokyo to study jazz\ - \ theory. After starting his career as a jazz musician, he participated various\ - \ sessions as a guitarist. Furthermore, his interest covered electro acoustic\ - \ in the career. He was enrolled at Keio University Shonan Fujisawa Campus (SFC)\ - \ to learn method and technique of computer music and media art in 2009. He is\ - \ exploring the new expression of music.\n\nConcert Venue and Time: Necto, Wednesday\ - \ May 23, 9:00pm},\n address = {Ann Arbor, Michigan, U.S.A.},\n author = {Yuta\ - \ Uozumi and Keisuke Oyama and Jun Tomioka and Hiromi Okamoto and Takayuki Kimura},\n\ - \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n day = {21-23},\n editor = {Georg Essl and Brent Gillespie\ - \ and Michael Gurevich and Sile O'Modhrain},\n month = {May},\n publisher = {Electrical\ + author: James Caldwell + bibtex: "@incollection{nime2012-music-Caldwell2012,\n abstract = {Program notes:\n\ + \n\\emph{Texturologie 12: Gesture Studies} (2011) is the most recent of my series\ + \ of pieces that explore the creation of intricate continuous-field textures (and\ + \ borrow the name of a series of paintings by Dubuffet). In this piece, I return\ + \ to my explorations of the potential of the Wii\\texttrademark remote to control\ + \ computer music in performance. This time, I tried to treat the physical gesture\ + \ as the germ or motive for the music. Some of the gestures are abstract, but\ + \ some are suggestive of familiar activities like petting a cat, ringing a bell,\ + \ smoothing wallpaper , playing a guiro, scooping, tapping, or vigorous stirring.\ + \ (Check out the videos of my other Wiii\\texttrademark pieces on YouTube. Search\ + \ ``Caldwell wii.'')\n\nComposer(s) Credits:\n\nJames Caldwell\n\nInstrumentalist(s)\ + \ Credits:\n\nJames Caldwell (Wii remotes)\n\nArtist(s) Biography:\n\nJames Caldwell\ + \ (b. 1957) is Professor of Music at Western Illinois University and co-director\ + \ of the New Music Festival. He was named Outstanding Teacher in the College of\ + \ Fine Arts and Communication (2005) and received the inaugural Provost's Award\ + \ for Excellence in Teaching. He was named the 2009 Distinguished Faculty Lecturer.\ + \ He holds degrees from Michigan State University and Northwestern University,\ + \ where he studied composition, theory, and electronic and computer music. Since\ + \ 2004 he has studied studio art---drawing, lithography, painting, and sculpture---at\ + \ WIU as a way to stretch creatively and again experience being a student.\n\n\ + Concert Venue and Time: Lydia Mendelssohn Theatre, Wednesday May 23, 7:00pm},\n\ + \ address = {Ann Arbor, Michigan, U.S.A.},\n author = {James Caldwell},\n booktitle\ + \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n day = {21-23},\n editor = {Georg Essl and Brent Gillespie and\ + \ Michael Gurevich and Sile O'Modhrain},\n month = {May},\n publisher = {Electrical\ \ Engineering \\& Computer Science and Performing Arts Technology, University\ - \ of Michigan},\n title = {four fragments---A Performance for Swarming Robotics},\n\ - \ year = {2012}\n}\n" + \ of Michigan},\n title = {Texturologie 12: Gesture Studies},\n year = {2012}\n\ + }\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression day: 21-23 @@ -11339,52 +11442,107 @@ month: May publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, University of Michigan' - title: four fragments---A Performance for Swarming Robotics + title: 'Texturologie 12: Gesture Studies' year: 2012 - ENTRYTYPE: incollection - ID: nime2012-music-Tremblay2012 - abstract: "Program notes:\n\nA bass guitar and a laptop.\n\nNo sequence, no set\ - \ list, no programme, no gizmo, no intention, no fireworks, no meaning, no feature,\ - \ no beat, no argument, no nothing.\n\nJust this very moment with my meta-instrument:\ - \ a third sandbox in which I play in public for the sixth time, here, whatever\ - \ happens.\n\nComposer(s) Credits:\n\nInstrumentalist(s) Credits:\n\nPierre Alexandre\ - \ Tremblay\n\nArtist(s) Biography:\n\nPierre Alexandre Tremblay (Montréal, 1975)\ - \ is a composer and a performer on bass guitar and sound processing devices, in\ - \ solo and within the groups ars circa musicæ (Paris, France), de type inconnu\ - \ (Montréal, Québec), and Splice (London, UK). His music is mainly released by\ - \ Empreintes DIGITALes and Ora. He is Reader in Composition and Improvisation\ - \ at the University of Huddersfield (UK) where he also is Director of the Electronic\ - \ Music Studios. He previously worked in popular music as producer and bassist,\ - \ and is interested in videomusic and coding. He likes oolong tea, reading, and\ - \ walking. As a founding member of the no-tv collective, he does not own a working\ - \ television set. www.pierrealexandretremblay.com\n\nConcert Venue and Time: Necto,\ - \ Wednesday May 23, 9:00pm" + ID: nime2012-music-SchanklerFrancoisChew2012 + abstract: "Program notes:\n\nMimi, designed by Alexandre François with input from\ + \ Elaine Chew and Isaac Schankler, is a multi-modal interactive musical improvisation\ + \ system that explores the impact of visual feedback in performer-machine interaction.\ + \ The Mimi system enables the performer to experiment with a unique blend of improvisation-like\ + \ on-the-fly invention, composition-like planning and choreography, and expressive\ + \ performance. Mimi's improvisations are created through a factor oracle. The\ + \ visual interface gives the performer and the audience instantaneous and continuous\ + \ information on the state of the oracle, its recombination strategy, the music\ + \ to come, and that recently played. The performer controls when the system starts,\ + \ stops, and learns, the playback volume, and the recombination rate. Mimi is\ + \ not only an effective improvisation partner, it also provides a platform through\ + \ which to interrogate the mental models necessary for successful improvisation.\ + \ This performance also features custom synths and mechanisms for inter-oracle\ + \ interaction created for Mimi by Isaac Schankler.\n\nComposer(s) Credits:\n\n\ + Isaac Schankler, Alexandre François, Elaine Chew\n\nInstrumentalist(s) Credits:\n\ + \nIsaac Schankler (keyboard & electronics), Mimi (keyboard & electronics)\n\n\ + Artist(s) Biography:\n\nIsaac Schankler is a Los Angeles-based composer-improviser.\ + \ His recent honors include a grant from Meet the Composer for his opera Light\ + \ and Power, selection as finalist in the ASCAP/SEAMUS Composition Competition,\ + \ and the Damien Top Prize in the ASCAP/Lotte Lehmann Foundation Art Song Competition.\ + \ He is the Artist in Residence of the Music Computation and Cognition Laboratory\ + \ (MuCoaCo) at the USC Viterbi School of Engineering, and an Artistic Director\ + \ of the concert series People Inside Electronics. Isaac holds degrees in composition\ + \ from the USC Thornton School of Music (DMA) and the University of Michigan (MM,\ + \ BM).\n\nElaine Chew is Professor of Digital Media at Queen Mary, University\ + \ of London, and Director of Music Initiatives at the Centre for Digital Music.\ + \ An operations researcher and pianist by training, her research goal is to de-mystify\ + \ music and its performance through the use of formal scientific methods; as a\ + \ performer, she collaborates with composers to present eclectic post-tonal music.\ + \ She received PhD and SM degrees in Operations Research from MIT and a BAS in\ + \ Music and Mathematical & Computational Sciences from Stanford. She is the recipient\ + \ of NSF Career and PECASE awards, and a Radcliffe Institute for Advanced Studies\ + \ fellowship.\n\nAlexandre R.J. François's research focuses on the modeling and\ + \ design of interactive (software) systems, as an enabling step towards the understanding\ + \ of perception and cognition. He was a 2007-2008 Fellow of the Radcliffe Institute\ + \ for Advanced Study at Harvard University, where he co-led a music research cluster\ + \ on Analytical Listening Through Interactive Visualization. François received\ + \ the Dipl\\^ome d'Ingénieur from the Institut National Agronomique Paris-Grignon\ + \ in 1993, the Dipl\\^ome d'Etudes Approfondies (M.S.) from the University Paris\ + \ IX - Dauphine in 1994, and the M.S. and Ph.D. degrees in Computer Science from\ + \ the University of Southern California in 1997 and 2000 respectively.\n\nConcert\ + \ Venue and Time: Lydia Mendelssohn Theatre, Wednesday May 23, 7:00pm" address: 'Ann Arbor, Michigan, U.S.A.' - author: Pierre~Alexandre Tremblay - bibtex: "@incollection{nime2012-music-Tremblay2012,\n abstract = {Program notes:\n\ - \nA bass guitar and a laptop.\n\nNo sequence, no set list, no programme, no gizmo,\ - \ no intention, no fireworks, no meaning, no feature, no beat, no argument, no\ - \ nothing.\n\nJust this very moment with my meta-instrument: a third sandbox in\ - \ which I play in public for the sixth time, here, whatever happens.\n\nComposer(s)\ - \ Credits:\n\nInstrumentalist(s) Credits:\n\nPierre Alexandre Tremblay\n\nArtist(s)\ - \ Biography:\n\nPierre Alexandre Tremblay (Montr\\'{e}al, 1975) is a composer\ - \ and a performer on bass guitar and sound processing devices, in solo and within\ - \ the groups ars circa music\\ae (Paris, France), de type inconnu (Montr\\'{e}al,\ - \ Qu\\'{e}bec), and Splice (London, UK). His music is mainly released by Empreintes\ - \ DIGITALes and Ora. He is Reader in Composition and Improvisation at the University\ - \ of Huddersfield (UK) where he also is Director of the Electronic Music Studios.\ - \ He previously worked in popular music as producer and bassist, and is interested\ - \ in videomusic and coding. He likes oolong tea, reading, and walking. As a founding\ - \ member of the no-tv collective, he does not own a working television set. www.pierrealexandretremblay.com\n\ - \nConcert Venue and Time: Necto, Wednesday May 23, 9:00pm},\n address = {Ann Arbor,\ - \ Michigan, U.S.A.},\n author = {Pierre~Alexandre Tremblay},\n booktitle = {Music\ + author: Isaac Schankler and Alexandre François and Elaine Chew + bibtex: "@incollection{nime2012-music-SchanklerFrancoisChew2012,\n abstract = {Program\ + \ notes:\n\nMimi, designed by Alexandre Fran\\c{c}ois with input from Elaine Chew\ + \ and Isaac Schankler, is a multi-modal interactive musical improvisation system\ + \ that explores the impact of visual feedback in performer-machine interaction.\ + \ The Mimi system enables the performer to experiment with a unique blend of improvisation-like\ + \ on-the-fly invention, composition-like planning and choreography, and expressive\ + \ performance. Mimi's improvisations are created through a factor oracle. The\ + \ visual interface gives the performer and the audience instantaneous and continuous\ + \ information on the state of the oracle, its recombination strategy, the music\ + \ to come, and that recently played. The performer controls when the system starts,\ + \ stops, and learns, the playback volume, and the recombination rate. Mimi is\ + \ not only an effective improvisation partner, it also provides a platform through\ + \ which to interrogate the mental models necessary for successful improvisation.\ + \ This performance also features custom synths and mechanisms for inter-oracle\ + \ interaction created for Mimi by Isaac Schankler.\n\nComposer(s) Credits:\n\n\ + Isaac Schankler, Alexandre Fran\\c{c}ois, Elaine Chew\n\nInstrumentalist(s) Credits:\n\ + \nIsaac Schankler (keyboard \\& electronics), Mimi (keyboard \\& electronics)\n\ + \nArtist(s) Biography:\n\nIsaac Schankler is a Los Angeles-based composer-improviser.\ + \ His recent honors include a grant from Meet the Composer for his opera Light\ + \ and Power, selection as finalist in the ASCAP/SEAMUS Composition Competition,\ + \ and the Damien Top Prize in the ASCAP/Lotte Lehmann Foundation Art Song Competition.\ + \ He is the Artist in Residence of the Music Computation and Cognition Laboratory\ + \ (MuCoaCo) at the USC Viterbi School of Engineering, and an Artistic Director\ + \ of the concert series People Inside Electronics. Isaac holds degrees in composition\ + \ from the USC Thornton School of Music (DMA) and the University of Michigan (MM,\ + \ BM).\n\nElaine Chew is Professor of Digital Media at Queen Mary, University\ + \ of London, and Director of Music Initiatives at the Centre for Digital Music.\ + \ An operations researcher and pianist by training, her research goal is to de-mystify\ + \ music and its performance through the use of formal scientific methods; as a\ + \ performer, she collaborates with composers to present eclectic post-tonal music.\ + \ She received PhD and SM degrees in Operations Research from MIT and a BAS in\ + \ Music and Mathematical \\& Computational Sciences from Stanford. She is the\ + \ recipient of NSF Career and PECASE awards, and a Radcliffe Institute for Advanced\ + \ Studies fellowship.\n\nAlexandre R.J. Fran\\c{c}ois's research focuses on the\ + \ modeling and design of interactive (software) systems, as an enabling step towards\ + \ the understanding of perception and cognition. He was a 2007-2008 Fellow of\ + \ the Radcliffe Institute for Advanced Study at Harvard University, where he co-led\ + \ a music research cluster on Analytical Listening Through Interactive Visualization.\ + \ Fran\\c{c}ois received the Dipl\\^{o}me d'Ing\\'{e}nieur from the Institut National\ + \ Agronomique Paris-Grignon in 1993, the Dipl\\^{o}me d'Etudes Approfondies (M.S.)\ + \ from the University Paris IX - Dauphine in 1994, and the M.S. and Ph.D. degrees\ + \ in Computer Science from the University of Southern California in 1997 and 2000\ + \ respectively.\n\nConcert Venue and Time: Lydia Mendelssohn Theatre, Wednesday\ + \ May 23, 7:00pm},\n address = {Ann Arbor, Michigan, U.S.A.},\n author = {Isaac\ + \ Schankler and Alexandre Fran\\c{c}ois and Elaine Chew},\n booktitle = {Music\ \ Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ \ day = {21-23},\n editor = {Georg Essl and Brent Gillespie and Michael Gurevich\ \ and Sile O'Modhrain},\n month = {May},\n publisher = {Electrical Engineering\ \ \\& Computer Science and Performing Arts Technology, University of Michigan},\n\ - \ title = {Sandbox\\#3.6},\n year = {2012}\n}\n" + \ title = {Mimi: Multi-modal Interaction for Musical Improvisation},\n year =\ + \ {2012}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression day: 21-23 @@ -11392,70 +11550,71 @@ month: May publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, University of Michigan' - title: "Sandbox#3.6" + title: 'Mimi: Multi-modal Interaction for Musical Improvisation' year: 2012 - ENTRYTYPE: incollection - ID: nime2012-music-dAlessandroSchwarz2012 - abstract: "Program notes:\n\nDaisyLab is a duet performance for two new interfaces\ - \ for musical expression that have in common the ability to generate versatile\ - \ vocal material. Diemo Schwarz's instrument uses a variety of sensors on the\ - \ top of corpus-based concatenative synthesis, which has been fed with voice sounds\ - \ for this performance. Nicolas d'Alessandro plays the HandSketch interface over\ - \ the new MAGE speech synthesizer, bringing tangible inputs to an emerging speech\ - \ synthesis technique. Both systems have been submitted as long papers for this\ - \ 2012 edition of NIME. Together these two performers explore the boundaries between\ - \ vocal and non-vocal sonic spaces, aiming at deconstructing the humankind's most\ - \ ubiquitous communicative channel through a compositionally directed improvisation,\ - \ a ``comprovisation.''\n\nComposer(s) Credits:\n\nInstrumentalist(s) Credits:\n\ - \nNicolas d'Alessandro (HandSketch, iPad), Diemo Schwarz (CataRT, gestural controllers)\n\ - \nArtist(s) Biography:\n\nNicolas d'Alessandro obtained his PhD in Applied Sciences\ - \ from the University of Mons in 2009. From a lifelong interest in musical instruments\ - \ and his acquired taste in speech and singing processing, he will incrementally\ - \ shape a research topic that aims at using gestural control of sound in order\ - \ to gain insights in speech and singing production. He works with Prof. T. Dutoit\ - \ for a PhD at the University of Mons between 2004 and 2009. Late 2009, he moves\ - \ to Canada, to take a postdoc position with Prof. S. Fels at the MAGIC Lab, University\ - \ of British Columbia, where he will work on the DiVA project. There he will also\ - \ organize the first p3s workshop. Since December 2011, he is back in the University\ - \ of Mons and leads the MAGE project. Nicolas is also an active electroacoustic\ - \ performer in and around Belgium, playing guitar and invented instruments in\ - \ various performances.\n\nConcert Venue and Time: Necto, Wednesday May 23, 9:00pm" + ID: nime2012-music-StapletonDavis2012 + abstract: "Program notes:\n\nThis performance explores notions of presence and absence,\ + \ technologically mediated communication and audience perception through the staging\ + \ of intentionally ambiguous but repeatable sonic interactions taking place across\ + \ two geographically separate locations.\n\nThanks to SARC, CCRMA & Bournemouth\ + \ University for support during the development of this project.\n\nComposer(s)\ + \ Credits:\n\nPaul Stapleton and Tom Davis\n\nInstrumentalist(s) Credits:\n\n\ + Paul Stapleton (Networked Instrument), Tom Davis (Networked Instrument)\n\nArtist(s)\ + \ Biography:\n\nPaul Stapleton is a sound artist, improviser and writer originally\ + \ from Southern California, currently based in Belfast, Northern Ireland. Paul\ + \ designs and performs with a variety of modular metallic sound sculptures, custom\ + \ made electronics, found objects and electric guitars in locations ranging from\ + \ experimental music clubs in Berlin to remote beaches on Vancouver Island. He\ + \ is currently involved in a diverse range of artistic collaborations including:\ + \ performance duo ABODE with vocalist Caroline Pugh, interdisciplinary arts group\ + \ theybreakinpieces, improvisation duo with saxophonist Simon Rose, Eric Lyon's\ + \ Noise Quartet, and the DIY quartet E=MCHammer. Since 2007, Paul has been on\ + \ the faculty at the Sonic Arts Research Centre where he teaches and supervises\ + \ Master's and PhD research in performance technologies, interaction design and\ + \ site-specific art.\n\nTom Davis is a digital artist working mainly in the medium\ + \ of sound installation. His practice and theory based output involves the creation\ + \ of technology-led environments for interaction. He performs regularly as part\ + \ of JDTJDJ with Jason Dixon and as part of the Jackson4s. He has performed and\ + \ exhibited across Europe and in the US. Davis is currently a lecturer at the\ + \ University of Bournemouth and holds a PhD from the Sonic Arts Research Centre.\n\ + \nConcert Venue and Time: Lydia Mendelssohn Theatre, Wednesday May 23, 7:00pm" address: 'Ann Arbor, Michigan, U.S.A.' - author: 'Nicolas d''Alessandro and, Diemo Schwarz' - bibtex: "@incollection{nime2012-music-dAlessandroSchwarz2012,\n abstract = {Program\ - \ notes:\n\n\\emph{DaisyLab} is a duet performance for two new interfaces for\ - \ musical expression that have in common the ability to generate versatile vocal\ - \ material. Diemo Schwarz's instrument uses a variety of sensors on the top of\ - \ corpus-based concatenative synthesis, which has been fed with voice sounds for\ - \ this performance. Nicolas d'Alessandro plays the HandSketch interface over the\ - \ new MAGE speech synthesizer, bringing tangible inputs to an emerging speech\ - \ synthesis technique. Both systems have been submitted as long papers for this\ - \ 2012 edition of NIME. Together these two performers explore the boundaries between\ - \ vocal and non-vocal sonic spaces, aiming at deconstructing the humankind's most\ - \ ubiquitous communicative channel through a compositionally directed improvisation,\ - \ a ``comprovisation.''\n\nComposer(s) Credits:\n\nInstrumentalist(s) Credits:\n\ - \nNicolas d'Alessandro (HandSketch, iPad), Diemo Schwarz (CataRT, gestural controllers)\n\ - \nArtist(s) Biography:\n\nNicolas d'Alessandro obtained his PhD in Applied Sciences\ - \ from the University of Mons in 2009. From a lifelong interest in musical instruments\ - \ and his acquired taste in speech and singing processing, he will incrementally\ - \ shape a research topic that aims at using gestural control of sound in order\ - \ to gain insights in speech and singing production. He works with Prof. T. Dutoit\ - \ for a PhD at the University of Mons between 2004 and 2009. Late 2009, he moves\ - \ to Canada, to take a postdoc position with Prof. S. Fels at the MAGIC Lab, University\ - \ of British Columbia, where he will work on the DiVA project. There he will also\ - \ organize the first p3s workshop. Since December 2011, he is back in the University\ - \ of Mons and leads the MAGE project. Nicolas is also an active electroacoustic\ - \ performer in and around Belgium, playing guitar and invented instruments in\ - \ various performances.\n\nConcert Venue and Time: Necto, Wednesday May 23, 9:00pm},\n\ - \ address = {Ann Arbor, Michigan, U.S.A.},\n author = {Nicolas d'Alessandro and,\ - \ Diemo Schwarz},\n booktitle = {Music Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n day = {21-23},\n editor = {Georg\ - \ Essl and Brent Gillespie and Michael Gurevich and Sile O'Modhrain},\n month\ - \ = {May},\n publisher = {Electrical Engineering \\& Computer Science and Performing\ - \ Arts Technology, University of Michigan},\n title = {DaisyLab, a Phonetic Deconstruction\ - \ of Humankind},\n year = {2012}\n}\n" + author: Paul Stapleton and Tom Davis + bibtex: "@incollection{nime2012-music-StapletonDavis2012,\n abstract = {Program\ + \ notes:\n\nThis performance explores notions of presence and absence, technologically\ + \ mediated communication and audience perception through the staging of intentionally\ + \ ambiguous but repeatable sonic interactions taking place across two geographically\ + \ separate locations.\n\nThanks to SARC, CCRMA \\& Bournemouth University for\ + \ support during the development of this project.\n\nComposer(s) Credits:\n\n\ + Paul Stapleton and Tom Davis\n\nInstrumentalist(s) Credits:\n\nPaul Stapleton\ + \ (Networked Instrument), Tom Davis (Networked Instrument)\n\nArtist(s) Biography:\n\ + \nPaul Stapleton is a sound artist, improviser and writer originally from Southern\ + \ California, currently based in Belfast, Northern Ireland. Paul designs and performs\ + \ with a variety of modular metallic sound sculptures, custom made electronics,\ + \ found objects and electric guitars in locations ranging from experimental music\ + \ clubs in Berlin to remote beaches on Vancouver Island. He is currently involved\ + \ in a diverse range of artistic collaborations including: performance duo ABODE\ + \ with vocalist Caroline Pugh, interdisciplinary arts group theybreakinpieces,\ + \ improvisation duo with saxophonist Simon Rose, Eric Lyon's Noise Quartet, and\ + \ the DIY quartet E=MCHammer. Since 2007, Paul has been on the faculty at the\ + \ Sonic Arts Research Centre where he teaches and supervises Master's and PhD\ + \ research in performance technologies, interaction design and site-specific art.\n\ + \nTom Davis is a digital artist working mainly in the medium of sound installation.\ + \ His practice and theory based output involves the creation of technology-led\ + \ environments for interaction. He performs regularly as part of JDTJDJ with Jason\ + \ Dixon and as part of the Jackson4s. He has performed and exhibited across Europe\ + \ and in the US. Davis is currently a lecturer at the University of Bournemouth\ + \ and holds a PhD from the Sonic Arts Research Centre.\n\nConcert Venue and Time:\ + \ Lydia Mendelssohn Theatre, Wednesday May 23, 7:00pm},\n address = {Ann Arbor,\ + \ Michigan, U.S.A.},\n author = {Paul Stapleton and Tom Davis},\n booktitle =\ + \ {Music Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n day = {21-23},\n editor = {Georg Essl and Brent Gillespie and\ + \ Michael Gurevich and Sile O'Modhrain},\n month = {May},\n publisher = {Electrical\ + \ Engineering \\& Computer Science and Performing Arts Technology, University\ + \ of Michigan},\n title = {Ambiguous Devices},\n year = {2012}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression day: 21-23 @@ -11463,1739 +11622,1659 @@ month: May publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, University of Michigan' - title: 'DaisyLab, a Phonetic Deconstruction of Humankind' + title: Ambiguous Devices year: 2012 -- ENTRYTYPE: article - ID: nime23-music-12 - abstract: 'SPLT/SCRN is a game-piece where two improvisers play against each-other - using their instruments as game controllers. The piece consists of multiple randomized - mini-challenges where the performers need to improvise in order to understand - what musical gestures are required from them through positive feedback from the - screen. The mini-games cover a range of musical affordances, giving the advantage - to both instrumentalists at different times. The instrument signal is analysed - in real-time using machine learning techniques through Max/MSP, and used as control - data for both the progress within the game, as well as the control of the live - electronics. These parameters are then sent through OSC to the game engine Unity - and control the game. In addition, the hybrid system makes use of DMX-controlled - lights, which are also mapped to control data and game levels. On-screen events - are accentuated through lights within the physical space, merging the physical - and the digital.' - articleno: 12 - author: Christos Michalakos - bibtex: "@article{nime23-music-12,\n abstract = {SPLT/SCRN is a game-piece where\ - \ two improvisers play against each-other using their instruments as game controllers.\ - \ The piece consists of multiple randomized mini-challenges where the performers\ - \ need to improvise in order to understand what musical gestures are required\ - \ from them through positive feedback from the screen. The mini-games cover a\ - \ range of musical affordances, giving the advantage to both instrumentalists\ - \ at different times. The instrument signal is analysed in real-time using machine\ - \ learning techniques through Max/MSP, and used as control data for both the progress\ - \ within the game, as well as the control of the live electronics. These parameters\ - \ are then sent through OSC to the game engine Unity and control the game. In\ - \ addition, the hybrid system makes use of DMX-controlled lights, which are also\ - \ mapped to control data and game levels. On-screen events are accentuated through\ - \ lights within the physical space, merging the physical and the digital.},\n\ - \ articleno = {12},\n author = {Christos Michalakos},\n booktitle = {Music Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Rob Hamilton},\n month = {May},\n note = {Live Concert 1, Wednesday\ - \ May 31, Biblioteca Vasconcelos},\n title = {SPLT/SCRN: A Game-Piece for Dueling\ - \ Improvisers},\n url = {https://www.nime.org/proceedings/2023/nime23_concert_1.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: incollection + ID: nime2012-music-Blasco2012 + abstract: "Program notes:\n\nThe Theremin Orchestra is a composition for three voices\ + \ and a modular system of four spheres with built-in Theremin Sensors. Two of\ + \ those spheres will control different effects on the voices and the rest will\ + \ be played as Theremin instruments. The performance is presented as a sound event\ + \ where initially the three voices appear raw and naked and as the composition\ + \ unfolds the voices will be increasingly distorted through different effects\ + \ applied with the Theremin controllers. In the climax of its progression the\ + \ other two Theremin balls will become audible merging their sound with the mesh\ + \ of vocal reshaped sources, not allowing to distinguish where the human ends\ + \ and the machine starts.\n\nComposer(s) Credits:\n\nMercedes Blasco\n\nInstrumentalist(s)\ + \ Credits:\n\nMercedes Blasco (voice, Theremin controllers, EMS synth), Thessia\ + \ Machado and Sonia Megías (voice, Theremin instrument)\n\nArtist(s) Biography:\n\ + \nMerche Blasco: Trained as a Telecommunications Engineer, Merche Blasco developed\ + \ in parallel to her studies a more creative path related with music, video, installation\ + \ and performance. She created her alter ego ``Burbuja'' as a vehicle for her\ + \ own musical exploration and since its conception she has participated & collaborated\ + \ with various artists, establishing a strong relationship between different mediums\ + \ of artistic expression & her own musical direction lsuch as Lucy Orta at the\ + \ Venice biennale, Chicks on Speed and Cristian Vogel.\nHer debut,``burbuja''\ + \ (station55 records) was presented in Sonar 2007 and has been touring in different\ + \ cities in Europe, USA and Canada in the past years: Mapping Festival (Geneve),\ + \ Sonic Art Circuits (Washington), Queens Museum of Art (New York). Thanks to\ + \ a Fulbright Grant she is currently a MPS Candidate in the Interactive Telecommunications\ + \ Program (NYU) where she is mainly researching about new tools for Electronic\ + \ Music Performance.\n\nThessia Machado, Brazil/NY, investigates the physicality\ + \ of sound and its effect on our perception of space. Many of her recent sculptures\ + \ and installations function also as unorthodox instruments---pieces that have\ + \ a real-time, live component. The expressive potential is active and changeable\ + \ as the viewer interacts and performs with it. Thessia's installations and video\ + \ pieces have been exhibited in New York, London, Philadelphia, Paris, Amsterdam,\ + \ Dublin, Berlin and Athens.\nShe has been awarded residencies at the MacDowell\ + \ Colony, Yaddo, the Atlantic Center for the Arts, the Irish Museum of Modern\ + \ Art and the Vermont Studio Center and she is a recipient of fellowships from\ + \ the New York Foundation for the Arts, The Experimental Television Center and\ + \ The Bronx Museum. Performing as link, Thessia Machado, a self-avowed noisician,\ + \ employs a changing line-up of handmade, found and modified instruments to build\ + \ driving, meditative soundscapes.\n\nSonia Megias was born on June 20th 1982\ + \ in Almansa, a village at the southeast of Spain. Since she was a kid, she has\ + \ been abducted by the arts, nature and spirituality. Even today, some years later,\ + \ she tries to interweave these beautiful disciplines, with the goal of transmit\ + \ to the world her perception of Beauty or True.\nThanks to the intensity of her\ + \ musical production, she finds herself living in New York since 2010, on the\ + \ Fulbright and a NYU Steinhardt grants. Here, she combines her studies at the\ + \ New York University with the compositions of her last commissioned pieces.\n\ + Her music has been performed in different music halls and festivals, underlining\ + \ the following: Auditorio 400 at the National Museum of Contemporary Art ``Queen\ + \ Sophia'' (2012, 2008); Cervantes Institute of New York (2012, 2011); Houston\ + \ University, at Opera Vista Festival (2011); Consulate of Argentina in New York,\ + \ at a Tribute to Alfonsina Storni (2009); Embassy of France in Spain (2009);\ + \ United Nations Headquarters (2008).\n\nConcert Venue and Time: Necto, Wednesday\ + \ May 23, 9:00pm" + address: 'Ann Arbor, Michigan, U.S.A.' + author: Mercedes Blasco + bibtex: "@incollection{nime2012-music-Blasco2012,\n abstract = {Program notes:\n\ + \n\\emph{The Theremin Orchestra} is a composition for three voices and a modular\ + \ system of four spheres with built-in Theremin Sensors. Two of those spheres\ + \ will control different effects on the voices and the rest will be played as\ + \ Theremin instruments. The performance is presented as a sound event where initially\ + \ the three voices appear raw and naked and as the composition unfolds the voices\ + \ will be increasingly distorted through different effects applied with the Theremin\ + \ controllers. In the climax of its progression the other two Theremin balls will\ + \ become audible merging their sound with the mesh of vocal reshaped sources,\ + \ not allowing to distinguish where the human ends and the machine starts.\n\n\ + Composer(s) Credits:\n\nMercedes Blasco\n\nInstrumentalist(s) Credits:\n\nMercedes\ + \ Blasco (voice, Theremin controllers, EMS synth), Thessia Machado and Sonia Meg\\\ + '{i}as (voice, Theremin instrument)\n\nArtist(s) Biography:\n\nMerche Blasco:\ + \ Trained as a Telecommunications Engineer, \\textbf{Merche Blasco} developed\ + \ in parallel to her studies a more creative path related with music, video, installation\ + \ and performance. She created her alter ego ``Burbuja'' as a vehicle for her\ + \ own musical exploration and since its conception she has participated \\& collaborated\ + \ with various artists, establishing a strong relationship between different mediums\ + \ of artistic expression \\& her own musical direction lsuch as Lucy Orta at\ + \ the Venice biennale, Chicks on Speed and Cristian Vogel.\nHer debut,``burbuja''\ + \ (station55 records) was presented in Sonar 2007 and has been touring in different\ + \ cities in Europe, USA and Canada in the past years: Mapping Festival (Geneve),\ + \ Sonic Art Circuits (Washington), Queens Museum of Art (New York). Thanks to\ + \ a Fulbright Grant she is currently a MPS Candidate in the Interactive Telecommunications\ + \ Program (NYU) where she is mainly researching about new tools for Electronic\ + \ Music Performance.\n\nThessia Machado, Brazil/NY, investigates the physicality\ + \ of sound and its effect on our perception of space. Many of her recent sculptures\ + \ and installations function also as unorthodox instruments---pieces that have\ + \ a real-time, live component. The expressive potential is active and changeable\ + \ as the viewer interacts and performs with it. Thessia's installations and video\ + \ pieces have been exhibited in New York, London, Philadelphia, Paris, Amsterdam,\ + \ Dublin, Berlin and Athens.\nShe has been awarded residencies at the MacDowell\ + \ Colony, Yaddo, the Atlantic Center for the Arts, the Irish Museum of Modern\ + \ Art and the Vermont Studio Center and she is a recipient of fellowships from\ + \ the New York Foundation for the Arts, The Experimental Television Center and\ + \ The Bronx Museum. Performing as link, Thessia Machado, a self-avowed noisician,\ + \ employs a changing line-up of handmade, found and modified instruments to build\ + \ driving, meditative soundscapes.\n\nSonia Megias was born on June 20th 1982\ + \ in Almansa, a village at the southeast of Spain. Since she was a kid, she has\ + \ been abducted by the arts, nature and spirituality. Even today, some years later,\ + \ she tries to interweave these beautiful disciplines, with the goal of transmit\ + \ to the world her perception of Beauty or True.\nThanks to the intensity of her\ + \ musical production, she finds herself living in New York since 2010, on the\ + \ Fulbright and a NYU Steinhardt grants. Here, she combines her studies at the\ + \ New York University with the compositions of her last commissioned pieces.\n\ + Her music has been performed in different music halls and festivals, underlining\ + \ the following: Auditorio 400 at the National Museum of Contemporary Art ``Queen\ + \ Sophia'' (2012, 2008); Cervantes Institute of New York (2012, 2011); Houston\ + \ University, at Opera Vista Festival (2011); Consulate of Argentina in New York,\ + \ at a Tribute to Alfonsina Storni (2009); Embassy of France in Spain (2009);\ + \ United Nations Headquarters (2008).\n\nConcert Venue and Time: Necto, Wednesday\ + \ May 23, 9:00pm},\n address = {Ann Arbor, Michigan, U.S.A.},\n author = {Mercedes\ + \ Blasco},\n booktitle = {Music Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n day = {21-23},\n editor = {Georg Essl\ + \ and Brent Gillespie and Michael Gurevich and Sile O'Modhrain},\n month = {May},\n\ + \ publisher = {Electrical Engineering \\& Computer Science and Performing Arts\ + \ Technology, University of Michigan},\n title = {The Theremin Orchestra},\n year\ + \ = {2012}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Rob Hamilton + day: 21-23 + editor: Georg Essl and Brent Gillespie and Michael Gurevich and Sile O'Modhrain month: May - note: 'Live Concert 1, Wednesday May 31, Biblioteca Vasconcelos' - title: 'SPLT/SCRN: A Game-Piece for Dueling Improvisers' - url: https://www.nime.org/proceedings/2023/nime23_concert_1.pdf - year: 2023 + publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, + University of Michigan' + title: The Theremin Orchestra + year: 2012 -- ENTRYTYPE: article - ID: nime23-music-13 - abstract: 'Our idea starts from the necessity to investigate space, explore its - features, find the potential in acoustic properties, and use them as a starting - point for our research. How is it possible to create a three-dimensional and analog - sound system? How are we able to work with instruments that can move sound in - space? Taking advantage of the use of customized industrial items, we will have - the possibility to create three-dimensional audio images controlled and designed - in real-time by the performers. The concept that interests us is the single percussive - impulse as a music creator. We can change the surface, and speed of the execution - but the impulse is at the core of every percussive action. Solenoids are our artistic - medium and the interesting aspect is the relationship between us as human performers - and the possibilities that arise through our interaction with a complex mechanical - instrument. Thus we see in this instrument an extension of our percussive possibilities.' - articleno: 13 - author: Anderson Maq - bibtex: "@article{nime23-music-13,\n abstract = {Our idea starts from the necessity\ - \ to investigate space, explore its features, find the potential in acoustic properties,\ - \ and use them as a starting point for our research. How is it possible to create\ - \ a three-dimensional and analog sound system? How are we able to work with instruments\ - \ that can move sound in space? Taking advantage of the use of customized industrial\ - \ items, we will have the possibility to create three-dimensional audio images\ - \ controlled and designed in real-time by the performers. The concept that interests\ - \ us is the single percussive impulse as a music creator. We can change the surface,\ - \ and speed of the execution but the impulse is at the core of every percussive\ - \ action. Solenoids are our artistic medium and the interesting aspect is the\ - \ relationship between us as human performers and the possibilities that arise\ - \ through our interaction with a complex mechanical instrument. Thus we see in\ - \ this instrument an extension of our percussive possibilities.},\n articleno\ - \ = {13},\n author = {Anderson Maq},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Rob Hamilton},\n\ - \ month = {May},\n note = {Online Presentation},\n title = {(ex)tension by Fabrizio\ - \ di Salvo in collaboration with reConvert},\n url = {https://www.nime2023.org/program/online-in-person-concerts},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: incollection + ID: nime2012-music-Dupuis2012 + abstract: "Program notes:\n\nStelaextraction uses the electronic extension capabilities\ + \ of the Yerbanaut to construct a musical composition through self-reference across\ + \ different timescales. The Yerbanaut is a custom electro-acoustic kalimba built\ + \ from a yerba mate gourd, with the tines placed in a circular pattern rather\ + \ than the usual horizontal arrangement. Its sensors are intended to make use\ + \ of this new arrangement, with force-sensitive buttons giving the otherwise inert\ + \ left hand expressive capabilities, and a distance sensor allowing the right\ + \ hand's motion to determine aspects of the processing. In Stelaextraction, all\ + \ acoustic and processed sounds are recorded to a single buffer, the contents\ + \ of which can be scrubbed through using the right hand's distance sensor. In\ + \ this way, past musical gestures can be explored and then re-explored, with the\ + \ recursive processing developing self-similar musical patterns over the course\ + \ of the piece.\n\nComposer(s) Credits:\n\nAlexander Dupuis\n\nInstrumentalist(s)\ + \ Credits:\n\nAlexander Dupuis (Yerbanaut)\n\nArtist(s) Biography:\n\nAlexander\ + \ Dupuis develops real-time audiovisual feedback systems mediated by performers,\ + \ sensors, musicians, matrices, bodies, scores, games, and environments. He also\ + \ composes, arranges and performs sounds for guitars, liturgies, chamber groups,\ + \ horse duos, microwave cookbooks, and celebrity voices. He graduated from Brown\ + \ University's MEME program as an undergraduate in 2010, and is now in his second\ + \ year of the Digital Musics masters program at Dartmouth College.\n\nConcert\ + \ Venue and Time: Necto, Wednesday May 23, 9:00pm" + address: 'Ann Arbor, Michigan, U.S.A.' + author: Alexander Dupuis + bibtex: "@incollection{nime2012-music-Dupuis2012,\n abstract = {Program notes:\n\ + \n\\emph{Stelaextraction} uses the electronic extension capabilities of the Yerbanaut\ + \ to construct a musical composition through self-reference across different timescales.\ + \ The Yerbanaut is a custom electro-acoustic kalimba built from a yerba mate gourd,\ + \ with the tines placed in a circular pattern rather than the usual horizontal\ + \ arrangement. Its sensors are intended to make use of this new arrangement, with\ + \ force-sensitive buttons giving the otherwise inert left hand expressive capabilities,\ + \ and a distance sensor allowing the right hand's motion to determine aspects\ + \ of the processing. In Stelaextraction, all acoustic and processed sounds are\ + \ recorded to a single buffer, the contents of which can be scrubbed through using\ + \ the right hand's distance sensor. In this way, past musical gestures can be\ + \ explored and then re-explored, with the recursive processing developing self-similar\ + \ musical patterns over the course of the piece.\n\nComposer(s) Credits:\n\nAlexander\ + \ Dupuis\n\nInstrumentalist(s) Credits:\n\nAlexander Dupuis (Yerbanaut)\n\nArtist(s)\ + \ Biography:\n\nAlexander Dupuis develops real-time audiovisual feedback systems\ + \ mediated by performers, sensors, musicians, matrices, bodies, scores, games,\ + \ and environments. He also composes, arranges and performs sounds for guitars,\ + \ liturgies, chamber groups, horse duos, microwave cookbooks, and celebrity voices.\ + \ He graduated from Brown University's MEME program as an undergraduate in 2010,\ + \ and is now in his second year of the Digital Musics masters program at Dartmouth\ + \ College.\n\nConcert Venue and Time: Necto, Wednesday May 23, 9:00pm},\n address\ + \ = {Ann Arbor, Michigan, U.S.A.},\n author = {Alexander Dupuis},\n booktitle\ + \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n day = {21-23},\n editor = {Georg Essl and Brent Gillespie and\ + \ Michael Gurevich and Sile O'Modhrain},\n month = {May},\n publisher = {Electrical\ + \ Engineering \\& Computer Science and Performing Arts Technology, University\ + \ of Michigan},\n title = {Stelaextraction},\n year = {2012}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Rob Hamilton + day: 21-23 + editor: Georg Essl and Brent Gillespie and Michael Gurevich and Sile O'Modhrain month: May - note: Online Presentation - title: (ex)tension by Fabrizio di Salvo in collaboration with reConvert - url: https://www.nime2023.org/program/online-in-person-concerts - year: 2023 + publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, + University of Michigan' + title: Stelaextraction + year: 2012 -- ENTRYTYPE: article - ID: nime23-music-19 - abstract: 'Elegy (Ready, Set, Rapture) is the second work composed for Coretet, - a virtual reality musical instrument modeled after traditional bowed stringed - instruments including the violin, viola, cello and double bass. Elegy (Ready, - Set, Rapture) is a solo multi-channel performance for the Coretet double bass - that combines a pre-composed musical chord structure displayed on the neck of - the instrument in real-time with improvisation. Coretet is built using the Unreal - Engine and is performed using the Oculus Rift or Quest 2 head-mounted displays - and Oculus Touch controllers. All audio in Coretet is procedurally generated, - using physical models of a bowed string from the Synthesis Toolkit (STK) and a - waveguide plucked string, all running within Pure Data.' - articleno: 19 - author: Rob Hamilton - bibtex: "@article{nime23-music-19,\n abstract = {Elegy (Ready, Set, Rapture) is\ - \ the second work composed for Coretet, a virtual reality musical instrument modeled\ - \ after traditional bowed stringed instruments including the violin, viola, cello\ - \ and double bass. Elegy (Ready, Set, Rapture) is a solo multi-channel performance\ - \ for the Coretet double bass that combines a pre-composed musical chord structure\ - \ displayed on the neck of the instrument in real-time with improvisation. Coretet\ - \ is built using the Unreal Engine and is performed using the Oculus Rift or Quest\ - \ 2 head-mounted displays and Oculus Touch controllers. All audio in Coretet is\ - \ procedurally generated, using physical models of a bowed string from the Synthesis\ - \ Toolkit (STK) and a waveguide plucked string, all running within Pure Data.},\n\ - \ articleno = {19},\n author = {Rob Hamilton},\n booktitle = {Music Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Rob Hamilton},\n month = {May},\n note = {Live Concert 5, Friday June\ - \ 2, Centro de Cultura Digital},\n title = {Elegy (Ready, Set, Rapture)},\n url\ - \ = {https://www.nime.org/proceedings/2023/nime23_concert_5.pdf},\n year = {2023}\n\ - }\n" +- ENTRYTYPE: incollection + ID: nime2012-music-Burns2012 + abstract: "Program notes:\n\nFieldwork is a software environment for improvised\ + \ performance with electronic sound and animation. Two musicians' sounding performances\ + \ are fed into the system, and analyzed for pitch, rhythm, and timbral change.\ + \ When the software recognizes a sharp contrast in one performer's textures or\ + \ gestures, it reflects this change by transforming the sound of the other musician's\ + \ performance. Not only are the musicians responding to one another as in conventional\ + \ improvisation, but they are also able to directly modify their duo partner's\ + \ sound through the software. Fieldwork emphasizes rapid, glitchy, and polyrhythmic\ + \ distortions of the musician's performances, and establishes unpredictable feedback\ + \ processes that encourage unexpected improvisational relationships between the\ + \ performers and computer.\n\nComposer(s) Credits:\n\nChristopher Burns\n\nInstrumentalist(s)\ + \ Credits:\n\nChristopher Burns, Andrew Bishop\n\nArtist(s) Biography:\n\nChristopher\ + \ Burns is a composer, improviser, and multimedia artist. His instrumental chamber\ + \ works weave energetic gestures into densely layered surfaces. Polyphony and\ + \ multiplicity also feature in his electroacoustic music, embodied in gritty,\ + \ rough-hewn textures. As an improviser, Christopher combines an idiosyncratic\ + \ approach to the electric guitar with a wide variety of custom software instruments.\ + \ Recent projects emphasize multimedia and motion capture, integrating performance,\ + \ sound, and animation into a unified experience. Across these disciplines, his\ + \ work emphasizes trajectory and directionality, superimposing and intercutting\ + \ a variety of evolving processes to create form.\nChristopher is an avid archaeologist\ + \ of electroacoustic music, creating and performing new digital realizations of\ + \ classic music by composers including Cage, Ligeti, Lucier, Nancarrow, Nono,\ + \ and Stockhausen. A committed educator, he teaches music composition and technology\ + \ at the University of Wisconsin-Milwaukee. He has studied composition with Brian\ + \ Ferneyhough, Jonathan Harvey, Jonathan Berger, Michael Tenzer, and Jan Radzynski.\n\ + \nAndrew Bishop is a versatile multi-instrumentalist, composer, improviser, educator\ + \ and scholar comfortable in a wide variety of musical idioms. He maintains a\ + \ national and international career and serves as an Assistant Professor of Jazz\ + \ and Contemporary Improvisation at the University of Michigan in Ann Arbor. \ + \ Bishop's two recordings as a leader have received widespread acclaim from The\ + \ New York Times, Downbeat Magazine, Chicago Reader, All Music Guide, Cadence\ + \ Magazine, All About Jazz-New York, All About Jazz-Los Angeles, and the Detroit\ + \ Free Press, among others. As a composer and arranger he has received over 20\ + \ commissions, numerous residencies and awards and recognition from ASCAP, the\ + \ Chicago Symphony Orchestra, the Andrew W. Melon Foundation, the National Endowment\ + \ for the Arts, Chamber Music of America and a nomination from the American Academy\ + \ of Arts and Letters. He has performed with artist in virtually every musical\ + \ genre. He earned five degrees in music including a D.M.A. in music composition\ + \ from the University of Michigan.\n\nConcert Venue and Time: Necto, Wednesday\ + \ May 23, 9:00pm" + address: 'Ann Arbor, Michigan, U.S.A.' + author: Christopher Burns + bibtex: "@incollection{nime2012-music-Burns2012,\n abstract = {Program notes:\n\n\ + \\emph{Fieldwork} is a software environment for improvised performance with electronic\ + \ sound and animation. Two musicians' sounding performances are fed into the system,\ + \ and analyzed for pitch, rhythm, and timbral change. When the software recognizes\ + \ a sharp contrast in one performer's textures or gestures, it reflects this change\ + \ by transforming the sound of the other musician's performance. Not only are\ + \ the musicians responding to one another as in conventional improvisation, but\ + \ they are also able to directly modify their duo partner's sound through the\ + \ software. Fieldwork emphasizes rapid, glitchy, and polyrhythmic distortions\ + \ of the musician's performances, and establishes unpredictable feedback processes\ + \ that encourage unexpected improvisational relationships between the performers\ + \ and computer.\n\nComposer(s) Credits:\n\nChristopher Burns\n\nInstrumentalist(s)\ + \ Credits:\n\nChristopher Burns, Andrew Bishop\n\nArtist(s) Biography:\n\nChristopher\ + \ Burns is a composer, improviser, and multimedia artist. His instrumental chamber\ + \ works weave energetic gestures into densely layered surfaces. Polyphony and\ + \ multiplicity also feature in his electroacoustic music, embodied in gritty,\ + \ rough-hewn textures. As an improviser, Christopher combines an idiosyncratic\ + \ approach to the electric guitar with a wide variety of custom software instruments.\ + \ Recent projects emphasize multimedia and motion capture, integrating performance,\ + \ sound, and animation into a unified experience. Across these disciplines, his\ + \ work emphasizes trajectory and directionality, superimposing and intercutting\ + \ a variety of evolving processes to create form.\nChristopher is an avid archaeologist\ + \ of electroacoustic music, creating and performing new digital realizations of\ + \ classic music by composers including Cage, Ligeti, Lucier, Nancarrow, Nono,\ + \ and Stockhausen. A committed educator, he teaches music composition and technology\ + \ at the University of Wisconsin-Milwaukee. He has studied composition with Brian\ + \ Ferneyhough, Jonathan Harvey, Jonathan Berger, Michael Tenzer, and Jan Radzynski.\n\ + \nAndrew Bishop is a versatile multi-instrumentalist, composer, improviser, educator\ + \ and scholar comfortable in a wide variety of musical idioms. He maintains a\ + \ national and international career and serves as an Assistant Professor of Jazz\ + \ and Contemporary Improvisation at the University of Michigan in Ann Arbor. \ + \ Bishop's two recordings as a leader have received widespread acclaim from \\\ + emph{The New York Times, Downbeat Magazine, Chicago Reader, All Music Guide, Cadence\ + \ Magazine, All About Jazz-New York, All About Jazz-Los Angeles, and the Detroit\ + \ Free Press}, among others. As a composer and arranger he has received over\ + \ 20 commissions, numerous residencies and awards and recognition from ASCAP,\ + \ the Chicago Symphony Orchestra, the Andrew W. Melon Foundation, the National\ + \ Endowment for the Arts, Chamber Music of America and a nomination from the American\ + \ Academy of Arts and Letters. He has performed with artist in virtually every\ + \ musical genre. He earned five degrees in music including a D.M.A. in music\ + \ composition from the University of Michigan.\n\nConcert Venue and Time: Necto,\ + \ Wednesday May 23, 9:00pm},\n address = {Ann Arbor, Michigan, U.S.A.},\n author\ + \ = {Christopher Burns},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n day = {21-23},\n editor\ + \ = {Georg Essl and Brent Gillespie and Michael Gurevich and Sile O'Modhrain},\n\ + \ month = {May},\n publisher = {Electrical Engineering \\& Computer Science and\ + \ Performing Arts Technology, University of Michigan},\n title = {Fieldwork},\n\ + \ year = {2012}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Rob Hamilton + day: 21-23 + editor: Georg Essl and Brent Gillespie and Michael Gurevich and Sile O'Modhrain month: May - note: 'Live Concert 5, Friday June 2, Centro de Cultura Digital' - title: 'Elegy (Ready, Set, Rapture)' - url: https://www.nime.org/proceedings/2023/nime23_concert_5.pdf - year: 2023 + publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, + University of Michigan' + title: Fieldwork + year: 2012 -- ENTRYTYPE: article - ID: nime23-music-26 - abstract: 'Born from the will to offer a unique live experience, ALEA(s) delivers - boiling, improvised performances mixing live drawing, video animation and electronic - music. Surrounded by their audience, the three members are busy creating their - show, without any safety net. While the complex, loaded electronic music fills - the room, the illustrator’s physical implication in his drawings and the hypnotic - animations projected onto the big screen unite to finish this well-rounded show. ALEA(s) - performances are often described as immersive, intense and crafted.' - articleno: 26 - author: Boris Wilmot - bibtex: "@article{nime23-music-26,\n abstract = {Born from the will to offer a unique\ - \ live experience, ALEA(s) delivers boiling, improvised performances mixing live\ - \ drawing, video animation and electronic music. Surrounded by their audience,\ - \ the three members are busy creating their show, without any safety net. While\ - \ the complex, loaded electronic music fills the room, the illustrator’s physical\ - \ implication in his drawings and the hypnotic animations projected onto the big\ - \ screen unite to finish this well-rounded show. ALEA(s) performances are often\ - \ described as immersive, intense and crafted.},\n articleno = {26},\n author\ - \ = {Boris Wilmot},\n booktitle = {Music Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n editor = {Rob Hamilton},\n month\ - \ = {May},\n note = {Online Presentation},\n title = {ALEA(s)},\n url = {https://www.nime2023.org/program/online-in-person-concerts},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: incollection + ID: nime2012-music-Uozumi2012 + abstract: "Program notes:\n\nThis performance aims to approach the next style of\ + \ ``mashup'' and/or ``Cut-up'' via fusion of paradigms of artificial-life and\ + \ turntable. We developed a system named ``SoniCell'' to realize it. SoniCell\ + \ employs four robots called ``cell''. Each cell behaves as a metaphor of life\ + \ based on a simple interaction model with prey-predator relationship. Each cell\ + \ is assigned a music-track in the manner of turntable. Therefore, the system\ + \ reconstructs and mixes the music-tracks via cells' interactions and performers'\ + \ interventions. In this framework, the aspects of the system and performers interactions\ + \ and cells' internal-states create structures of sounds and music from different\ + \ tracks.\n\nComposer(s) Credits:\n\nYuta Uozumi, Keisuke Oyama, Jun Tomioka,\ + \ Hiromi Okamoto, Takayuki Kimura\n\nInstrumentalist(s) Credits:\n\nArtist(s)\ + \ Biography:\n\nYuta Uozumi is a sound artist and agent-base composer was born\ + \ in the suburbs of Osaka, Japan. He started computer music at the age of fifteen.\ + \ He received his Ph.D. from Keio University SFC Graduate School of Media and\ + \ Governance. He is researching and teaching at Tokyo University of Technology.\ + \ He is studying Multi-Agent based dynamic composition with computer or human\ + \ ensembles. In 2002 His CD \"meme?\" was released from Cubicmusic Japan (under\ + \ the name of SamuraiJazz). In 2003 agent-based musical interface \"Chase\" was\ + \ accepted by NIME. It is a collaborative project by system-designer, DSP engineer\ + \ and performer. In 2005 an application for agent-based composition ``Gismo''\ + \ and a piece created with the system ``Chain'' (early version) were accepted\ + \ by ICMC(International Computer Music Conference).\n\nKeisuke Oyama, was born\ + \ in Kumamoto, Japan on September 19, 1986. He plays various instruments freely\ + \ in childhood. When he was 18, moved to Tokyo to study jazz theory. After starting\ + \ his career as a jazz musician, he participated various sessions as a guitarist.\ + \ Furthermore, his interest covered electro acoustic in the career. He was enrolled\ + \ at Keio University Shonan Fujisawa Campus (SFC) to learn method and technique\ + \ of computer music and media art in 2009. He is exploring the new expression\ + \ of music.\n\nConcert Venue and Time: Necto, Wednesday May 23, 9:00pm" + address: 'Ann Arbor, Michigan, U.S.A.' + author: Yuta Uozumi and Keisuke Oyama and Jun Tomioka and Hiromi Okamoto and Takayuki + Kimura + bibtex: "@incollection{nime2012-music-Uozumi2012,\n abstract = {Program notes:\n\ + \nThis performance aims to approach the next style of ``mashup'' and/or ``Cut-up''\ + \ via fusion of paradigms of artificial-life and turntable. We developed a system\ + \ named ``SoniCell'' to realize it. SoniCell employs four robots called ``cell''.\ + \ Each cell behaves as a metaphor of life based on a simple interaction model\ + \ with prey-predator relationship. Each cell is assigned a music-track in the\ + \ manner of turntable. Therefore, the system reconstructs and mixes the music-tracks\ + \ via cells' interactions and performers' interventions. In this framework, the\ + \ aspects of the system and performers interactions and cells' internal-states\ + \ create structures of sounds and music from different tracks.\n\nComposer(s)\ + \ Credits:\n\nYuta Uozumi, Keisuke Oyama, Jun Tomioka, Hiromi Okamoto, Takayuki\ + \ Kimura\n\nInstrumentalist(s) Credits:\n\nArtist(s) Biography:\n\nYuta Uozumi\ + \ is a sound artist and agent-base composer was born in the suburbs of Osaka,\ + \ Japan. He started computer music at the age of fifteen. He received his Ph.D.\ + \ from Keio University SFC Graduate School of Media and Governance. He is researching\ + \ and teaching at Tokyo University of Technology. He is studying Multi-Agent based\ + \ dynamic composition with computer or human ensembles. In 2002 His CD \"meme?\"\ + \ was released from Cubicmusic Japan (under the name of SamuraiJazz). In 2003\ + \ agent-based musical interface \"Chase\" was accepted by NIME. It is a collaborative\ + \ project by system-designer, DSP engineer and performer. In 2005 an application\ + \ for agent-based composition ``Gismo'' and a piece created with the system ``Chain''\ + \ (early version) were accepted by ICMC(International Computer Music Conference).\n\ + \nKeisuke Oyama, was born in Kumamoto, Japan on September 19, 1986. He plays various\ + \ instruments freely in childhood. When he was 18, moved to Tokyo to study jazz\ + \ theory. After starting his career as a jazz musician, he participated various\ + \ sessions as a guitarist. Furthermore, his interest covered electro acoustic\ + \ in the career. He was enrolled at Keio University Shonan Fujisawa Campus (SFC)\ + \ to learn method and technique of computer music and media art in 2009. He is\ + \ exploring the new expression of music.\n\nConcert Venue and Time: Necto, Wednesday\ + \ May 23, 9:00pm},\n address = {Ann Arbor, Michigan, U.S.A.},\n author = {Yuta\ + \ Uozumi and Keisuke Oyama and Jun Tomioka and Hiromi Okamoto and Takayuki Kimura},\n\ + \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n day = {21-23},\n editor = {Georg Essl and Brent Gillespie\ + \ and Michael Gurevich and Sile O'Modhrain},\n month = {May},\n publisher = {Electrical\ + \ Engineering \\& Computer Science and Performing Arts Technology, University\ + \ of Michigan},\n title = {four fragments---A Performance for Swarming Robotics},\n\ + \ year = {2012}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Rob Hamilton + day: 21-23 + editor: Georg Essl and Brent Gillespie and Michael Gurevich and Sile O'Modhrain month: May - note: Online Presentation - title: ALEA(s) - url: https://www.nime2023.org/program/online-in-person-concerts - year: 2023 + publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, + University of Michigan' + title: four fragments---A Performance for Swarming Robotics + year: 2012 -- ENTRYTYPE: article - ID: nime23-music-28 - abstract: 'The Center of the Universe was inspired by my impression of New York - City after several trips to this world center. When I stood at the top of the - Empire State Building, I felt that it absorbed the energy of the entire universe. - People with different backgrounds travel to New York from all over the world, - creating a colorful and spectacular city. The primary material in this work is - the text “The Center of the Universe.” This text is stated and manipulated in - various languages, including English, Spanish, French, German, Italian, Russian, - Chinese, Japanese, Korean, and Thai. All the human voices come from the sampled - AI voices of the MacOS system. Two Bluetooth Nintendo Wiimote Controllers provide - the capability to stand untethered at center stage and play this composition.' - articleno: 28 - author: Sunhuimei Xia - bibtex: "@article{nime23-music-28,\n abstract = {The Center of the Universe was\ - \ inspired by my impression of New York City after several trips to this world\ - \ center. When I stood at the top of the Empire State Building, I felt that it\ - \ absorbed the energy of the entire universe. People with different backgrounds\ - \ travel to New York from all over the world, creating a colorful and spectacular\ - \ city. The primary material in this work is the text “The Center of the Universe.”\ - \ This text is stated and manipulated in various languages, including English,\ - \ Spanish, French, German, Italian, Russian, Chinese, Japanese, Korean, and Thai.\ - \ All the human voices come from the sampled AI voices of the MacOS system. Two\ - \ Bluetooth Nintendo Wiimote Controllers provide the capability to stand untethered\ - \ at center stage and play this composition.},\n articleno = {28},\n author =\ - \ {Sunhuimei Xia},\n booktitle = {Music Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n editor = {Rob Hamilton},\n month\ - \ = {May},\n note = {Online Presentation},\n title = {The Center of the Universe},\n\ - \ url = {https://www.nime2023.org/program/online-in-person-concerts},\n year =\ - \ {2023}\n}\n" +- ENTRYTYPE: incollection + ID: nime2012-music-Tremblay2012 + abstract: "Program notes:\n\nA bass guitar and a laptop.\n\nNo sequence, no set\ + \ list, no programme, no gizmo, no intention, no fireworks, no meaning, no feature,\ + \ no beat, no argument, no nothing.\n\nJust this very moment with my meta-instrument:\ + \ a third sandbox in which I play in public for the sixth time, here, whatever\ + \ happens.\n\nComposer(s) Credits:\n\nInstrumentalist(s) Credits:\n\nPierre Alexandre\ + \ Tremblay\n\nArtist(s) Biography:\n\nPierre Alexandre Tremblay (Montréal, 1975)\ + \ is a composer and a performer on bass guitar and sound processing devices, in\ + \ solo and within the groups ars circa musicæ (Paris, France), de type inconnu\ + \ (Montréal, Québec), and Splice (London, UK). His music is mainly released by\ + \ Empreintes DIGITALes and Ora. He is Reader in Composition and Improvisation\ + \ at the University of Huddersfield (UK) where he also is Director of the Electronic\ + \ Music Studios. He previously worked in popular music as producer and bassist,\ + \ and is interested in videomusic and coding. He likes oolong tea, reading, and\ + \ walking. As a founding member of the no-tv collective, he does not own a working\ + \ television set. www.pierrealexandretremblay.com\n\nConcert Venue and Time: Necto,\ + \ Wednesday May 23, 9:00pm" + address: 'Ann Arbor, Michigan, U.S.A.' + author: Pierre~Alexandre Tremblay + bibtex: "@incollection{nime2012-music-Tremblay2012,\n abstract = {Program notes:\n\ + \nA bass guitar and a laptop.\n\nNo sequence, no set list, no programme, no gizmo,\ + \ no intention, no fireworks, no meaning, no feature, no beat, no argument, no\ + \ nothing.\n\nJust this very moment with my meta-instrument: a third sandbox in\ + \ which I play in public for the sixth time, here, whatever happens.\n\nComposer(s)\ + \ Credits:\n\nInstrumentalist(s) Credits:\n\nPierre Alexandre Tremblay\n\nArtist(s)\ + \ Biography:\n\nPierre Alexandre Tremblay (Montr\\'{e}al, 1975) is a composer\ + \ and a performer on bass guitar and sound processing devices, in solo and within\ + \ the groups ars circa music\\ae (Paris, France), de type inconnu (Montr\\'{e}al,\ + \ Qu\\'{e}bec), and Splice (London, UK). His music is mainly released by Empreintes\ + \ DIGITALes and Ora. He is Reader in Composition and Improvisation at the University\ + \ of Huddersfield (UK) where he also is Director of the Electronic Music Studios.\ + \ He previously worked in popular music as producer and bassist, and is interested\ + \ in videomusic and coding. He likes oolong tea, reading, and walking. As a founding\ + \ member of the no-tv collective, he does not own a working television set. www.pierrealexandretremblay.com\n\ + \nConcert Venue and Time: Necto, Wednesday May 23, 9:00pm},\n address = {Ann Arbor,\ + \ Michigan, U.S.A.},\n author = {Pierre~Alexandre Tremblay},\n booktitle = {Music\ + \ Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ day = {21-23},\n editor = {Georg Essl and Brent Gillespie and Michael Gurevich\ + \ and Sile O'Modhrain},\n month = {May},\n publisher = {Electrical Engineering\ + \ \\& Computer Science and Performing Arts Technology, University of Michigan},\n\ + \ title = {Sandbox\\#3.6},\n year = {2012}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Rob Hamilton + day: 21-23 + editor: Georg Essl and Brent Gillespie and Michael Gurevich and Sile O'Modhrain month: May - note: Online Presentation - title: The Center of the Universe - url: https://www.nime2023.org/program/online-in-person-concerts - year: 2023 + publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, + University of Michigan' + title: "Sandbox#3.6" + year: 2012 -- ENTRYTYPE: article - ID: nime23-music-29 - abstract: '“Chomsky Hash” is a piece for improvisation, electric guitar, and live - electronics. The piece utilizes traditional guitar effects processing with a variety - of unconventional effects for the instrument, along with a surround panner setup - for quadraphonic sound. The laptop and electronic elements also act as improvising - agent, with a variety of chance operations that allow the computer to make decisions - for itself in performance. The title is a reference to the famous debate between - Noam Chomsky and Michel Foucault. Famously, Foucault asked to be paid in a large - amount of hash for his participation in the debate. Friends would say that on - special occasions Foucault would break out “that Chomsky Hash”. The relevance - of this debate to the piece is the elements I’m working with and transforming. - The electric guitar itself has a long history in American popular music and has - a lot of specific cultural connotations that could seem traditional even though - at times it’s been a counter cultural symbol. With the use of DAW’s such as Ableton - Live or Max/MSP, the electric guitar can be further altered and expanded upon. - Noam Chomsky is considered a radical and countercultural figure in American politics, - but within the debate with Michel Foucault comes off as traditional and conservative - compared to Foucault’s Dionysian and hedonistic character traits. The debate itself - is an interesting synthesis of the two thinkers'' ideas. The main driving factors - of the piece are improvisation, timbral transformation, live electronics processing, - and spatialization. Since 2019, I’ve been working on bringing together my instrumental - background as a guitarist and improviser with my interest in electronic music. - This piece is a part of a series of pieces for electric guitar & live electronics.' - articleno: 29 - author: Seth A Davis - bibtex: "@article{nime23-music-29,\n abstract = {“Chomsky Hash” is a piece for improvisation,\ - \ electric guitar, and live electronics. The piece utilizes traditional guitar\ - \ effects processing with a variety of unconventional effects for the instrument,\ - \ along with a surround panner setup for quadraphonic sound. The laptop and electronic\ - \ elements also act as improvising agent, with a variety of chance operations\ - \ that allow the computer to make decisions for itself in performance. The title\ - \ is a reference to the famous debate between Noam Chomsky and Michel Foucault.\ - \ Famously, Foucault asked to be paid in a large amount of hash for his participation\ - \ in the debate. Friends would say that on special occasions Foucault would break\ - \ out “that Chomsky Hash”. The relevance of this debate to the piece is the elements\ - \ I’m working with and transforming. The electric guitar itself has a long history\ - \ in American popular music and has a lot of specific cultural connotations that\ - \ could seem traditional even though at times it’s been a counter cultural symbol.\ - \ With the use of DAW’s such as Ableton Live or Max/MSP, the electric guitar can\ - \ be further altered and expanded upon. Noam Chomsky is considered a radical and\ - \ countercultural figure in American politics, but within the debate with Michel\ - \ Foucault comes off as traditional and conservative compared to Foucault’s Dionysian\ - \ and hedonistic character traits. The debate itself is an interesting synthesis\ - \ of the two thinkers' ideas. The main driving factors of the piece are improvisation,\ - \ timbral transformation, live electronics processing, and spatialization. Since\ - \ 2019, I’ve been working on bringing together my instrumental background as a\ - \ guitarist and improviser with my interest in electronic music. This piece is\ - \ a part of a series of pieces for electric guitar \\& live electronics.},\n articleno\ - \ = {29},\n author = {Seth A Davis},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Rob Hamilton},\n\ - \ month = {May},\n note = {Online Presentation},\n title = {“Chomsky Hash” for\ - \ improvisation, electric guitar, and live electronics},\n url = {https://www.nime2023.org/program/online-in-person-concerts},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: incollection + ID: nime2012-music-dAlessandroSchwarz2012 + abstract: "Program notes:\n\nDaisyLab is a duet performance for two new interfaces\ + \ for musical expression that have in common the ability to generate versatile\ + \ vocal material. Diemo Schwarz's instrument uses a variety of sensors on the\ + \ top of corpus-based concatenative synthesis, which has been fed with voice sounds\ + \ for this performance. Nicolas d'Alessandro plays the HandSketch interface over\ + \ the new MAGE speech synthesizer, bringing tangible inputs to an emerging speech\ + \ synthesis technique. Both systems have been submitted as long papers for this\ + \ 2012 edition of NIME. Together these two performers explore the boundaries between\ + \ vocal and non-vocal sonic spaces, aiming at deconstructing the humankind's most\ + \ ubiquitous communicative channel through a compositionally directed improvisation,\ + \ a ``comprovisation.''\n\nComposer(s) Credits:\n\nInstrumentalist(s) Credits:\n\ + \nNicolas d'Alessandro (HandSketch, iPad), Diemo Schwarz (CataRT, gestural controllers)\n\ + \nArtist(s) Biography:\n\nNicolas d'Alessandro obtained his PhD in Applied Sciences\ + \ from the University of Mons in 2009. From a lifelong interest in musical instruments\ + \ and his acquired taste in speech and singing processing, he will incrementally\ + \ shape a research topic that aims at using gestural control of sound in order\ + \ to gain insights in speech and singing production. He works with Prof. T. Dutoit\ + \ for a PhD at the University of Mons between 2004 and 2009. Late 2009, he moves\ + \ to Canada, to take a postdoc position with Prof. S. Fels at the MAGIC Lab, University\ + \ of British Columbia, where he will work on the DiVA project. There he will also\ + \ organize the first p3s workshop. Since December 2011, he is back in the University\ + \ of Mons and leads the MAGE project. Nicolas is also an active electroacoustic\ + \ performer in and around Belgium, playing guitar and invented instruments in\ + \ various performances.\n\nConcert Venue and Time: Necto, Wednesday May 23, 9:00pm" + address: 'Ann Arbor, Michigan, U.S.A.' + author: 'Nicolas d''Alessandro and, Diemo Schwarz' + bibtex: "@incollection{nime2012-music-dAlessandroSchwarz2012,\n abstract = {Program\ + \ notes:\n\n\\emph{DaisyLab} is a duet performance for two new interfaces for\ + \ musical expression that have in common the ability to generate versatile vocal\ + \ material. Diemo Schwarz's instrument uses a variety of sensors on the top of\ + \ corpus-based concatenative synthesis, which has been fed with voice sounds for\ + \ this performance. Nicolas d'Alessandro plays the HandSketch interface over the\ + \ new MAGE speech synthesizer, bringing tangible inputs to an emerging speech\ + \ synthesis technique. Both systems have been submitted as long papers for this\ + \ 2012 edition of NIME. Together these two performers explore the boundaries between\ + \ vocal and non-vocal sonic spaces, aiming at deconstructing the humankind's most\ + \ ubiquitous communicative channel through a compositionally directed improvisation,\ + \ a ``comprovisation.''\n\nComposer(s) Credits:\n\nInstrumentalist(s) Credits:\n\ + \nNicolas d'Alessandro (HandSketch, iPad), Diemo Schwarz (CataRT, gestural controllers)\n\ + \nArtist(s) Biography:\n\nNicolas d'Alessandro obtained his PhD in Applied Sciences\ + \ from the University of Mons in 2009. From a lifelong interest in musical instruments\ + \ and his acquired taste in speech and singing processing, he will incrementally\ + \ shape a research topic that aims at using gestural control of sound in order\ + \ to gain insights in speech and singing production. He works with Prof. T. Dutoit\ + \ for a PhD at the University of Mons between 2004 and 2009. Late 2009, he moves\ + \ to Canada, to take a postdoc position with Prof. S. Fels at the MAGIC Lab, University\ + \ of British Columbia, where he will work on the DiVA project. There he will also\ + \ organize the first p3s workshop. Since December 2011, he is back in the University\ + \ of Mons and leads the MAGE project. Nicolas is also an active electroacoustic\ + \ performer in and around Belgium, playing guitar and invented instruments in\ + \ various performances.\n\nConcert Venue and Time: Necto, Wednesday May 23, 9:00pm},\n\ + \ address = {Ann Arbor, Michigan, U.S.A.},\n author = {Nicolas d'Alessandro and,\ + \ Diemo Schwarz},\n booktitle = {Music Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n day = {21-23},\n editor = {Georg\ + \ Essl and Brent Gillespie and Michael Gurevich and Sile O'Modhrain},\n month\ + \ = {May},\n publisher = {Electrical Engineering \\& Computer Science and Performing\ + \ Arts Technology, University of Michigan},\n title = {DaisyLab, a Phonetic Deconstruction\ + \ of Humankind},\n year = {2012}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Rob Hamilton + day: 21-23 + editor: Georg Essl and Brent Gillespie and Michael Gurevich and Sile O'Modhrain month: May - note: Online Presentation - title: '“Chomsky Hash” for improvisation, electric guitar, and live electronics' - url: https://www.nime2023.org/program/online-in-person-concerts - year: 2023 + publisher: 'Electrical Engineering \& Computer Science and Performing Arts Technology, + University of Michigan' + title: 'DaisyLab, a Phonetic Deconstruction of Humankind' + year: 2012 -- ENTRYTYPE: article - ID: nime23-music-36 - abstract: 'Galactic Madness is a structured improvisational network piece inspired - by a set of pictures of the galaxy taken by NASA''s James Webb Space Telescope(released - in June 2022). After closely observing the pictures for hours, I wanted to create - a mesmerizing system that resembles the infinite and enigmatic nature of the galaxy.' - articleno: 36 - author: Qiujiang Lu - bibtex: "@article{nime23-music-36,\n abstract = {Galactic Madness is a structured\ - \ improvisational network piece inspired by a set of pictures of the galaxy taken\ - \ by NASA's James Webb Space Telescope(released in June 2022). After closely observing\ - \ the pictures for hours, I wanted to create a mesmerizing system that resembles\ - \ the infinite and enigmatic nature of the galaxy.},\n articleno = {36},\n author\ - \ = {Qiujiang Lu},\n booktitle = {Music Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n editor = {Rob Hamilton},\n month\ - \ = {May},\n note = {Online Presentation},\n title = {Galactic Madness},\n url\ - \ = {https://www.nime2023.org/program/online-in-person-concerts},\n year = {2023}\n\ - }\n" +- ENTRYTYPE: inproceedings + ID: nime2010-music-Miyama2010 + address: 'Sydney, Australia' + author: Chikashi Miyama + bibtex: "@inproceedings{nime2010-music-Miyama2010,\n address = {Sydney, Australia},\n\ + \ author = {Chikashi Miyama},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Johnston,\ + \ Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer,\ + \ Kirsty Beilharz, Roger Mills},\n month = {June},\n publisher = {University of\ + \ Technology Sydney},\n title = {Black Vox},\n year = {2010}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Rob Hamilton - month: May - note: Online Presentation - title: Galactic Madness - url: https://www.nime2023.org/program/online-in-person-concerts - year: 2023 + editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, + Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + month: June + publisher: University of Technology Sydney + title: Black Vox + year: 2010 -- ENTRYTYPE: article - ID: nime23-music-52 - abstract: '“Refraction Interlude” features a solo performer surrounded by a battery - of gongs and cymbals that are activated by surfaces transducers. The metal percussion - responds to the performer’s improvisation, seeming to sound autonomously. The - work can be performed by any instrument. Each new performer records a set of samples, - short improvisations centered around a specified set of techniques. These recordings - are then analyzed and used to as a foundation for forms of mixed synthesis, generating - sounds that are tailored to the specific acoustical properties of the metal percussion. - This iteration of the work is a new realization for piano.' - articleno: 52 - author: Matthew Goodheart - bibtex: "@article{nime23-music-52,\n abstract = {“Refraction Interlude” features\ - \ a solo performer surrounded by a battery of gongs and cymbals that are activated\ - \ by surfaces transducers. The metal percussion responds to the performer’s improvisation,\ - \ seeming to sound autonomously. The work can be performed by any instrument.\ - \ Each new performer records a set of samples, short improvisations centered around\ - \ a specified set of techniques. These recordings are then analyzed and used to\ - \ as a foundation for forms of mixed synthesis, generating sounds that are tailored\ - \ to the specific acoustical properties of the metal percussion. This iteration\ - \ of the work is a new realization for piano.},\n articleno = {52},\n author =\ - \ {Matthew Goodheart},\n booktitle = {Music Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n editor = {Rob Hamilton},\n month\ - \ = {May},\n note = {Live Concert 5, Friday June 2, Centro de Cultura Digital},\n\ - \ title = {Refraction Interlude: piano},\n url = {https://www.nime.org/proceedings/2023/nime23_concert_5.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: nime2010-music-Collins2010 + address: 'Sydney, Australia' + author: Nicolas Collins + bibtex: "@inproceedings{nime2010-music-Collins2010,\n address = {Sydney, Australia},\n\ + \ author = {Nicolas Collins},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Johnston,\ + \ Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer,\ + \ Kirsty Beilharz, Roger Mills},\n month = {June},\n publisher = {University of\ + \ Technology Sydney},\n title = {Salvage},\n year = {2010}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Rob Hamilton - month: May - note: 'Live Concert 5, Friday June 2, Centro de Cultura Digital' - title: 'Refraction Interlude: piano' - url: https://www.nime.org/proceedings/2023/nime23_concert_5.pdf - year: 2023 + editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, + Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + month: June + publisher: University of Technology Sydney + title: Salvage + year: 2010 -- ENTRYTYPE: article - ID: nime23-music-66 - abstract: 'Our duo -ence improvises live remixes of augmented 7” vinyl records combined - with performance on, and sequenced sampling of, custom-made elecroacoustic instruments. - Our collaboration draws on Anon2’s experience in art installation contexts and - with electronic dance music group Anon, and Anon1’s work as an instrument inventor, - sound designer and improviser in groups such as Anon and Anon. Our performance - for NIME 2023 begins by asking, what kind of strange rhythmic futures will continue - to be built at the intersection of Mexican and Irish cultures? To aid this endeavour, - we invoke the mythology of Batallón de San Patricio, a group of disenfranchised - European (largely Irish) immigrants and African slaves who defected from the United - States Army to fight on the side of the Mexican Army during the Mexican–American - War of 1846–48. The battalion has been memorialised by a broad range of musicians, - novelists and filmmakers. These accounts provide stories of cultural resonances - in the lives of diverse peoples, unlikely collectives who formed allegiances through - their shared oppression at the hands of dominant imperialist powers. Our storytelling - here is similar, but also different. While we are interested in resonances, allegiances, - and points of connection that form moments of tense but productive co-existences - between different communities, we are likewise drawn towards the precarious, noisy - and uncertain material processes enacted in such meetings. Thus, we seek a kind - of dissensual groove, an oscillation between distance and relation, remixing fragments - from Irish and Mexican music traditions into fragile and ever-collapsing rhythmic - architectures, creating spaces in which to move.' - articleno: 66 - author: Paul Stapleton and Ricki O'Rawe - bibtex: "@article{nime23-music-66,\n abstract = {Our duo -ence improvises live remixes\ - \ of augmented 7” vinyl records combined with performance on, and sequenced sampling\ - \ of, custom-made elecroacoustic instruments. Our collaboration draws on Anon2’s\ - \ experience in art installation contexts and with electronic dance music group\ - \ Anon, and Anon1’s work as an instrument inventor, sound designer and improviser\ - \ in groups such as Anon and Anon. Our performance for NIME 2023 begins by asking,\ - \ what kind of strange rhythmic futures will continue to be built at the intersection\ - \ of Mexican and Irish cultures? To aid this endeavour, we invoke the mythology\ - \ of Batallón de San Patricio, a group of disenfranchised European (largely Irish)\ - \ immigrants and African slaves who defected from the United States Army to fight\ - \ on the side of the Mexican Army during the Mexican–American War of 1846–48.\ - \ The battalion has been memorialised by a broad range of musicians, novelists\ - \ and filmmakers. These accounts provide stories of cultural resonances in the\ - \ lives of diverse peoples, unlikely collectives who formed allegiances through\ - \ their shared oppression at the hands of dominant imperialist powers. Our storytelling\ - \ here is similar, but also different. While we are interested in resonances,\ - \ allegiances, and points of connection that form moments of tense but productive\ - \ co-existences between different communities, we are likewise drawn towards the\ - \ precarious, noisy and uncertain material processes enacted in such meetings.\ - \ Thus, we seek a kind of dissensual groove, an oscillation between distance and\ - \ relation, remixing fragments from Irish and Mexican music traditions into fragile\ - \ and ever-collapsing rhythmic architectures, creating spaces in which to move.},\n\ - \ articleno = {66},\n author = {Paul Stapleton and Ricki O'Rawe},\n booktitle\ - \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n editor = {Rob Hamilton},\n month = {May},\n note = {Live Concert\ - \ 4, Thursday June 1, Centro de Cultura Digital},\n title = {Where is that Batallón\ - \ de San Patricio Groove?},\n url = {https://www.nime.org/proceedings/2023/nime23_concert_4.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: nime2010-music-Perrin2010 + address: 'Sydney, Australia' + author: Stéphane Perrin and Utako Shibatsuji + bibtex: "@inproceedings{nime2010-music-Perrin2010,\n address = {Sydney, Australia},\n\ + \ author = {Stéphane Perrin and Utako Shibatsuji},\n booktitle = {Music Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine,\ + \ Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills},\n month = {June},\n\ + \ publisher = {University of Technology Sydney},\n title = {The Ningen Dogs Orchestra},\n\ + \ year = {2010}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Rob Hamilton - month: May - note: 'Live Concert 4, Thursday June 1, Centro de Cultura Digital' - title: 'Where is that Batallón de San Patricio Groove?' - url: https://www.nime.org/proceedings/2023/nime23_concert_4.pdf - year: 2023 + editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, + Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + month: June + publisher: University of Technology Sydney + title: The Ningen Dogs Orchestra + year: 2010 -- ENTRYTYPE: article - ID: nime23-music-73 - abstract: 'This piece is an mobile outdoor performance where a dancer wearing mobile - wireless IMU sensors controls sound generated by a laptop through their movements. - The performance is mobile, and can take place in any available public space. The - dancer''s movements acquired by the sensors drive sound generation algorithms - running on SuperCollider and output from a mobile speaker. Since all the hardware - is commercially available and relatively inexpensive, this system is easy to build. - Through this work, we are showing that a performance that is not bound by location - is possible through a relatively inexpensive and easy-to-construct performance - system. The title "Unboxing" refers to escaping from the economic, social, political, - and artistic constraints of conventional performances. It also alludes to “unboxing” - as an internet meme in online videos where one does not know what is contained - in the box before it is opened - as the performance data and the resulting sound - structures cannot be evaluated beforehand. This project aims to open up computer - music creativity to a wider audience through frugal technology and escape Western-centric - concepts of music and dances. As alternative, we propose the term “electronic - sound performance”.' - articleno: 73 - author: Takumi Ikeda and Hanako Atake and Iannis Zannos - bibtex: "@article{nime23-music-73,\n abstract = {This piece is an mobile outdoor\ - \ performance where a dancer wearing mobile wireless IMU sensors controls sound\ - \ generated by a laptop through their movements. The performance is mobile, and\ - \ can take place in any available public space. The dancer's movements acquired\ - \ by the sensors drive sound generation algorithms running on SuperCollider and\ - \ output from a mobile speaker. Since all the hardware is commercially available\ - \ and relatively inexpensive, this system is easy to build. Through this work,\ - \ we are showing that a performance that is not bound by location is possible\ - \ through a relatively inexpensive and easy-to-construct performance system. The\ - \ title \"Unboxing\" refers to escaping from the economic, social, political,\ - \ and artistic constraints of conventional performances. It also alludes to “unboxing”\ - \ as an internet meme in online videos where one does not know what is contained\ - \ in the box before it is opened - as the performance data and the resulting sound\ - \ structures cannot be evaluated beforehand. This project aims to open up computer\ - \ music creativity to a wider audience through frugal technology and escape Western-centric\ - \ concepts of music and dances. As alternative, we propose the term “electronic\ - \ sound performance”.},\n articleno = {73},\n author = {Takumi Ikeda and Hanako\ - \ Atake and Iannis Zannos},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Rob Hamilton},\n\ - \ month = {May},\n note = {Online Presentation},\n title = {Unboxing: Public-Space\ - \ Performance With Wearable-Sensors And SuperCollider},\n url = {https://www.nime2023.org/program/online-in-person-concerts},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: nime2010-music-Kanda2010 + address: 'Sydney, Australia' + author: Ryo Kanda + bibtex: "@inproceedings{nime2010-music-Kanda2010,\n address = {Sydney, Australia},\n\ + \ author = {Ryo Kanda},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Johnston,\ + \ Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer,\ + \ Kirsty Beilharz, Roger Mills},\n month = {June},\n publisher = {University of\ + \ Technology Sydney},\n title = {Tennendai no 0m0s},\n year = {2010}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Rob Hamilton - month: May - note: Online Presentation - title: 'Unboxing: Public-Space Performance With Wearable-Sensors And SuperCollider' - url: https://www.nime2023.org/program/online-in-person-concerts - year: 2023 - - -- ENTRYTYPE: article - ID: nime23-music-1099 - abstract: 'Music(re)ality is a collaborative musical performance between the virtual - and real world. Two musicians will present a musical improvisation, with one performing - with an iPad instrument and the others using a freehand augmented reality musical - instrument. While musicians are physically located in the space, the music jamming - will happen across a virtual and real environment. How will the collaboration - happen and what is a mixed reality musical performance? Through sonic feedback - or performers'' musical gestures? It will all be demonstrated in this performance.' - articleno: 1099 - author: Yichen Wang and Charles Patrick Martin - bibtex: "@article{nime23-music-1099,\n abstract = {Music(re)ality is a collaborative\ - \ musical performance between the virtual and real world. Two musicians will present\ - \ a musical improvisation, with one performing with an iPad instrument and the\ - \ others using a freehand augmented reality musical instrument. While musicians\ - \ are physically located in the space, the music jamming will happen across a\ - \ virtual and real environment. How will the collaboration happen and what is\ - \ a mixed reality musical performance? Through sonic feedback or performers' musical\ - \ gestures? It will all be demonstrated in this performance.},\n articleno = {1099},\n\ - \ author = {Yichen Wang and Charles Patrick Martin},\n booktitle = {Music Proceedings\ + editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, + Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + month: June + publisher: University of Technology Sydney + title: Tennendai no 0m0s + year: 2010 + + +- ENTRYTYPE: inproceedings + ID: nime2010-music-Tomczak2010 + address: 'Sydney, Australia' + author: Sebastian Tomczak and Poppi Doser + bibtex: "@inproceedings{nime2010-music-Tomczak2010,\n address = {Sydney, Australia},\n\ + \ author = {Sebastian Tomczak and Poppi Doser},\n booktitle = {Music Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Rob Hamilton},\n month = {May},\n note = {Online Presentation},\n\ - \ title = {Music(re)ality: A Collaborative Improvisation between Virtual and Real\ - \ World},\n url = {https://www.nime2023.org/program/online-in-person-concerts},\n\ - \ year = {2023}\n}\n" + \ editor = {Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine,\ + \ Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills},\n month = {June},\n\ + \ publisher = {University of Technology Sydney},\n title = {Antia},\n year = {2010}\n\ + }\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Rob Hamilton - month: May - note: Online Presentation - title: 'Music(re)ality: A Collaborative Improvisation between Virtual and Real World' - url: https://www.nime2023.org/program/online-in-person-concerts - year: 2023 + editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, + Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + month: June + publisher: University of Technology Sydney + title: Antia + year: 2010 -- ENTRYTYPE: article - ID: nime23-music-1101 - abstract: 'The performance of Sculpture DAXR is an offshoot of the Oscuterium project, - created by the group, RedSpills, a collaborative trio of new musical instrument - technologists, artists and performers: Michał Seta (Montreal, Quebec, Canada), - Dirk Stromberg (Republic of Singapore), and D. Andrew Stewart (Lethbridge, Alberta, - Canada). While Sculpture DAXR can be experienced as a live, in-person, multi-media - show involving the karlax digital musical instrument, live coding, video and sound - projection, this work is best experienced in its original form: a hybrid performance - and experience in which the participants (performer and audience) inhabit both - a live venue in real life (IRL) and a 3D virtual reality (VR) meeting point in - Mozilla''s real-time communications platform, Hubs. The innovative nature of this - work arises from the production of sound directly within the Hubs environment - using the Faust (Functional Audio Stream) programming language (i.e., browser-based - software synthesis engine). Both sound creation and 3D objects are transformed - by real-time data transmitted from a DMI over the internet.' - articleno: 1101 - author: D Stewart - bibtex: "@article{nime23-music-1101,\n abstract = {The performance of Sculpture\ - \ DAXR is an offshoot of the Oscuterium project, created by the group, RedSpills,\ - \ a collaborative trio of new musical instrument technologists, artists and performers:\ - \ Michał Seta (Montreal, Quebec, Canada), Dirk Stromberg (Republic of Singapore),\ - \ and D. Andrew Stewart (Lethbridge, Alberta, Canada). While Sculpture DAXR can\ - \ be experienced as a live, in-person, multi-media show involving the karlax digital\ - \ musical instrument, live coding, video and sound projection, this work is best\ - \ experienced in its original form: a hybrid performance and experience in which\ - \ the participants (performer and audience) inhabit both a live venue in real\ - \ life (IRL) and a 3D virtual reality (VR) meeting point in Mozilla's real-time\ - \ communications platform, Hubs. The innovative nature of this work arises from\ - \ the production of sound directly within the Hubs environment using the Faust\ - \ (Functional Audio Stream) programming language (i.e., browser-based software\ - \ synthesis engine). Both sound creation and 3D objects are transformed by real-time\ - \ data transmitted from a DMI over the internet.},\n articleno = {1101},\n author\ - \ = {D Stewart},\n booktitle = {Music Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n editor = {Rob Hamilton},\n month\ - \ = {May},\n note = {Live Concert 4, Thursday June 1, Centro de Cultura Digital},\n\ - \ title = {Sculpture DAXR},\n url = {https://www.nime.org/proceedings/2023/nime23_concert_4.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: nime2010-music-Ganburged2010 + abstract: 'Bukhchuluun Ganburged , Fiddle and Throat Singing , Martin Slawig , Roger + Mills' + address: 'Sydney, Australia' + author: Bukhchuluun Ganburged and Martin Slawig and Roger Mills + bibtex: "@inproceedings{nime2010-music-Ganburged2010,\n abstract = {Bukhchuluun\ + \ Ganburged , Fiddle and Throat Singing , Martin Slawig , Roger Mills},\n address\ + \ = {Sydney, Australia},\n author = {Bukhchuluun Ganburged and Martin Slawig and\ + \ Roger Mills},\n booktitle = {Music Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n editor = {Andrew Johnston, Sam\ + \ Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer,\ + \ Kirsty Beilharz, Roger Mills},\n month = {June},\n publisher = {University of\ + \ Technology Sydney},\n title = {Ethernet Orchestra - Remote Networked Performance},\n\ + \ year = {2010}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Rob Hamilton - month: May - note: 'Live Concert 4, Thursday June 1, Centro de Cultura Digital' - title: Sculpture DAXR - url: https://www.nime.org/proceedings/2023/nime23_concert_4.pdf - year: 2023 + editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, + Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + month: June + publisher: University of Technology Sydney + title: Ethernet Orchestra - Remote Networked Performance + year: 2010 -- ENTRYTYPE: article - ID: nime23-music-1110 - abstract: 'Transcontinental Grapevine is a new crowdsourced telematic work by the - Virginia Tech Linux Laptop Orchestra (L2Ork) that was co-created and performed - with collaborators from UNTREF, Buenos Aires, Argentina. The work is inspired - by the introductory loop of the "Grapevine" song by Lane 8 and Elderbrook and - utilizes L2Ork Tweeter online collaborative musicking platform that allows for - perfect sync among performers regardless the distance (in this case two groups - of performers, 11 in total, were over 5,000 miles apart). The work’s EDM aesthetics - intentionally seeks to test the limits of the newfound platform’s ability to sync - players, as well as to expand the telematic musical vocabulary. The work was - co-created by the participants, each offering their own monophonic contributions. - It starts with Lane 8''s "Grapevine" intro, and then crossfades into a crowdsourced - theme and variations.' - articleno: 1110 - author: Ivica Ico Bukvic - bibtex: "@article{nime23-music-1110,\n abstract = {Transcontinental Grapevine is\ - \ a new crowdsourced telematic work by the Virginia Tech Linux Laptop Orchestra\ - \ (L2Ork) that was co-created and performed with collaborators from UNTREF, Buenos\ - \ Aires, Argentina. The work is inspired by the introductory loop of the \"Grapevine\"\ - \ song by Lane 8 and Elderbrook and utilizes L2Ork Tweeter online collaborative\ - \ musicking platform that allows for perfect sync among performers regardless\ - \ the distance (in this case two groups of performers, 11 in total, were over\ - \ 5,000 miles apart). The work’s EDM aesthetics intentionally seeks to test the\ - \ limits of the newfound platform’s ability to sync players, as well as to expand\ - \ the telematic musical vocabulary. The work was co-created by the participants,\ - \ each offering their own monophonic contributions. It starts with Lane 8's \"\ - Grapevine\" intro, and then crossfades into a crowdsourced theme and variations.},\n\ - \ articleno = {1110},\n author = {Ivica Ico Bukvic},\n booktitle = {Music Proceedings\ +- ENTRYTYPE: inproceedings + ID: nime2010-music-Pritchard2010 + address: 'Sydney, Australia' + author: Bob Pritchard and Marguerite Witvoet + bibtex: "@inproceedings{nime2010-music-Pritchard2010,\n address = {Sydney, Australia},\n\ + \ author = {Bob Pritchard and Marguerite Witvoet},\n booktitle = {Music Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Rob Hamilton},\n month = {May},\n note = {Live Concert 3, Thursday\ - \ June 1, Centro de Cultura Digital},\n title = {Transcontinental Grapevine},\n\ - \ url = {https://www.nime.org/proceedings/2023/nime23_concert_3.pdf},\n year =\ - \ {2023}\n}\n" + \ editor = {Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine,\ + \ Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills},\n month = {June},\n\ + \ publisher = {University of Technology Sydney},\n title = {What Does A Body Know?},\n\ + \ year = {2010}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Rob Hamilton - month: May - note: 'Live Concert 3, Thursday June 1, Centro de Cultura Digital' - title: Transcontinental Grapevine - url: https://www.nime.org/proceedings/2023/nime23_concert_3.pdf - year: 2023 + editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, + Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + month: June + publisher: University of Technology Sydney + title: 'What Does A Body Know?' + year: 2010 -- ENTRYTYPE: article - ID: nime23-music-1113 - abstract: 'T/ensor/~ (version 0.3) is a prototype of a dynamic performance system - developed in MAX that involves adaptive digital signal processing modules and - generative processes towards exploring the field and performance practice of human-machine - improvisation. The system is the result of a pilot, artistic research study entitled - ‘Improvisation Technologies and Creative Machines: The Performer-Instrument Relational - Milieu’. Our proposal for the NIME 2023 conference involves a c.10–12 minutes - improvised performance with the system (drum-kit performer and T/ensor/~ 0.3).' - articleno: 1113 - author: Dimitris Papageorgiou - bibtex: "@article{nime23-music-1113,\n abstract = {T/ensor/~ (version 0.3) is a\ - \ prototype of a dynamic performance system developed in MAX that involves adaptive\ - \ digital signal processing modules and generative processes towards exploring\ - \ the field and performance practice of human-machine improvisation. The system\ - \ is the result of a pilot, artistic research study entitled ‘Improvisation Technologies\ - \ and Creative Machines: The Performer-Instrument Relational Milieu’. Our proposal\ - \ for the NIME 2023 conference involves a c.10–12 minutes improvised performance\ - \ with the system (drum-kit performer and T/ensor/~ 0.3).},\n articleno = {1113},\n\ - \ author = {Dimitris Papageorgiou},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Rob Hamilton},\n\ - \ month = {May},\n note = {Live Concert 2, Wednesday May 31, Biblioteca Vasconcelos},\n\ - \ title = {T/ensor/~ 0.3},\n url = {https://www.nime.org/proceedings/2023/nime23_concert_2.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: nime2010-music-Barrass2010 + abstract: "Program notes: The Cellist's brain signals and pulse figure against the\ + \ Baroque Basso Continuo that they are playing. The Cellist wears the state of\ + \ the art Enobio system which transmits EEG, ECG and EOG from brain activity,\ + \ eye movements, muscle contractions, and pulse to a laptop computer. These signals\ + \ are mapped into sound in realtime with specially designed sonication algorithms.\n\ + \nAbout the performers:\nStephen Barrass teaches and researches Digital Design\ + \ and Media Arts at the University of Canberra.\nDiane Whitmer is a Neuroscientist\ + \ at Starlab Pty. Ltd. in Barcelona.\nGeoffrey Gartner is a Cellist in the Ensemble\ + \ Offspring in Sydney" + address: 'Sydney, Australia' + author: Stephen Barrass and Diane Whitmer + bibtex: "@inproceedings{nime2010-music-Barrass2010,\n abstract = {Program notes:\ + \ The Cellist's brain signals and pulse figure against the Baroque Basso Continuo\ + \ that they are playing. The Cellist wears the state of the art Enobio system\ + \ which transmits EEG, ECG and EOG from brain activity, eye movements, muscle\ + \ contractions, and pulse to a laptop computer. These signals are mapped into\ + \ sound in realtime with specially designed sonication algorithms.\n\nAbout the\ + \ performers:\nStephen Barrass teaches and researches Digital Design and Media\ + \ Arts at the University of Canberra.\nDiane Whitmer is a Neuroscientist at Starlab\ + \ Pty. Ltd. in Barcelona.\nGeoffrey Gartner is a Cellist in the Ensemble Offspring\ + \ in Sydney},\n address = {Sydney, Australia},\n author = {Stephen Barrass and\ + \ Diane Whitmer},\n booktitle = {Music Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n editor = {Andrew Johnston, Sam\ + \ Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer,\ + \ Kirsty Beilharz, Roger Mills},\n month = {June},\n publisher = {University of\ + \ Technology Sydney},\n title = {Baroque Basso Continuo for Cello, Heart (ECG)\ + \ and Mind (EEG)},\n year = {2010}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Rob Hamilton - month: May - note: 'Live Concert 2, Wednesday May 31, Biblioteca Vasconcelos' - title: T/ensor/~ 0.3 - url: https://www.nime.org/proceedings/2023/nime23_concert_2.pdf - year: 2023 + editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, + Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + month: June + publisher: University of Technology Sydney + title: 'Baroque Basso Continuo for Cello, Heart (ECG) and Mind (EEG)' + year: 2010 + + +- ENTRYTYPE: inproceedings + ID: nime2010-music-Drummond2010 + abstract: "Program notes: The interactive electroacoustics in Jet Stream are created\ + \ through the use of an underlying virtual model of a flute. This hybrid virtual\ + \ instrument is controlled through parameters such as bore length, blow intensity,\ + \ pressure, canal width, labium position. Lamorna's “real” flute sounds are analyzed\ + \ with respect to tone color, volume envelopes, frequency and spectral content.\ + \ These sonic gestures are then mapped to performance parameters for the computer's\ + \ virtual flute sonification. Of course the virtual flute doesn't have to conform\ + \ to the physical constraints of the “real-world”.\n\nAbout the performer: Jon\ + \ Drummond is a Sydney based composer, sound artist, programmer, academic and\ + \ researcher. His creative work spans the fields of instrumental music, electroacoustic,\ + \ interactive, sound and new media arts. Jon's electroacoustic and interactive\ + \ work has been presented widely including the International Computer Music Conferences\ + \ (Denmark 1994, Canada 1995, Greece 1997, China 1999, Singapore 2003), Electrofringe,\ + \ Totally Huge New Music Festival, Darwin International Guitar Festival and the\ + \ Adelaide Festival of Arts. Jon is currently employed as a researcher at MARCS\ + \ Auditory Laboratories, the University of Western Sydney." + address: 'Sydney, Australia' + author: Jon Drummond + bibtex: "@inproceedings{nime2010-music-Drummond2010,\n abstract = {Program notes:\ + \ The interactive electroacoustics in Jet Stream are created through the use of\ + \ an underlying virtual model of a flute. This hybrid virtual instrument is controlled\ + \ through parameters such as bore length, blow intensity, pressure, canal width,\ + \ labium position. Lamorna's “real” flute sounds are analyzed with respect to\ + \ tone color, volume envelopes, frequency and spectral content. These sonic gestures\ + \ are then mapped to performance parameters for the computer's virtual flute sonification.\ + \ Of course the virtual flute doesn't have to conform to the physical constraints\ + \ of the “real-world”.\n\nAbout the performer: Jon Drummond is a Sydney based\ + \ composer, sound artist, programmer, academic and researcher. His creative work\ + \ spans the fields of instrumental music, electroacoustic, interactive, sound\ + \ and new media arts. Jon's electroacoustic and interactive work has been presented\ + \ widely including the International Computer Music Conferences (Denmark 1994,\ + \ Canada 1995, Greece 1997, China 1999, Singapore 2003), Electrofringe, Totally\ + \ Huge New Music Festival, Darwin International Guitar Festival and the Adelaide\ + \ Festival of Arts. Jon is currently employed as a researcher at MARCS Auditory\ + \ Laboratories, the University of Western Sydney.},\n address = {Sydney, Australia},\n\ + \ author = {Jon Drummond},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Johnston,\ + \ Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer,\ + \ Kirsty Beilharz, Roger Mills},\n month = {June},\n publisher = {University of\ + \ Technology Sydney},\n title = {Jet Stream},\n year = {2010}\n}\n" + booktitle: Music Proceedings of the International Conference on New Interfaces for + Musical Expression + editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, + Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + month: June + publisher: University of Technology Sydney + title: Jet Stream + year: 2010 -- ENTRYTYPE: article - ID: nime23-music-1115 - abstract: 'Neo Tokyo, ca. 2019, 31 years after World War III, Akira awakens. This - homage is an audiovisual, live-coded performance, remixing and re-envisioning - the 1988 classic film created in the year of its setting, 2019 and reimagined - now in 2022/2023 as the audiovisual work DEF FUNCTION(DYSTOPIAKIRA). The authors - use the code editor Jensaarai to collaboratively and simultaneously live-code - TidalCycles and Python, each supported by SuperCollider and Touch Designer on - the backend respectively. The authors often collaborate remotely due to their - respective locations which is facilitated by Jensaarai. This enables the client-side - rendering of both audio and visuals in order to retain high-quality representations - of both elements.' - articleno: 1115 - author: Ryan R Smith and Shawn Lawson - bibtex: "@article{nime23-music-1115,\n abstract = {Neo Tokyo, ca. 2019, 31 years\ - \ after World War III, Akira awakens. This homage is an audiovisual, live-coded\ - \ performance, remixing and re-envisioning the 1988 classic film created in the\ - \ year of its setting, 2019 and reimagined now in 2022/2023 as the audiovisual\ - \ work DEF FUNCTION(DYSTOPIAKIRA). The authors use the code editor Jensaarai\ - \ to collaboratively and simultaneously live-code TidalCycles and Python, each\ - \ supported by SuperCollider and Touch Designer on the backend respectively. The\ - \ authors often collaborate remotely due to their respective locations which is\ - \ facilitated by Jensaarai. This enables the client-side rendering of both audio\ - \ and visuals in order to retain high-quality representations of both elements.},\n\ - \ articleno = {1115},\n author = {Ryan R Smith and Shawn Lawson},\n booktitle\ - \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n editor = {Rob Hamilton},\n month = {May},\n note = {Online Presentation},\n\ - \ title = {DEF FUNCTION(DYSTOPIAKIRA)},\n url = {https://www.nime2023.org/program/online-in-person-concerts},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: nime2010-music-Johnston2010 + abstract: "Program notes: This audiovisual work for acoustic instruments and interactive\ + \ software uses simple models of physical structures to mediate between acoustic\ + \ sounds and computer generated sound and visuals. Musicians use their acoustic\ + \ instruments to playfully interact with a physically modelled virtual sound sculpture\ + \ which is projected onto the screen.\nThe musicians use sounds produced on their\ + \ acoustic instruments to reach into the virtual world and grasp, push and hit\ + \ the sculpture. In response the structure glows, spins, bounces around and generates\ + \ its own sounds. The pitch and timbre of the live acoustic sounds are captured\ + \ and transformed by the virtual sculpture which sings back in its own way. Each\ + \ individual object (or mass) in the physical model is linked to a synthesis engine\ + \ which uses additive and subtractive synthesis techniques to produce a wide range\ + \ of sonic textures.\nThe frequency of oscillators of the synthesis engines are\ + \ set by the acoustic sounds played by the acoustic musicians and the volume of\ + \ sound produced is controlled by the movement of the masses. The effect is that\ + \ the sound sculpture produces evocative sounds clearly linked to the sonic gestures\ + \ of the performers and the movement of the onscreen sculpture.\nDuring performance\ + \ the physical structure and characteristics of the sculpture are altered. Links\ + \ between masses are cut, spring tension of the links\naltered and damping is\ + \ ramped up and down. Thus, while transparency of operation is maintained, the\ + \ complexity of the interaction between the acoustic and electronic performers\ + \ and the sound sculpture itself leads to rich conversational musical interactions.\n\ + \nAbout the performers: Andrew Johnston is a musician and software developer\ + \ living in Sydney, Australia. He completed a music performance degree at the\ + \ Victorian College of the Arts in 1995 and has performed with several Australian\ + \ symphony orchestras and a number of other ensembles.\nSubsequently he has completed\ + \ a Masters degree in Information Technology and in 2009 he completed a PhD investigating\ + \ the design and use of software to support an experimental, exploratory approach\ + \ to live music making. Andrew currently holds the position of Lecturer in the\ + \ Faculty of Engineering and IT at the University of Technology, Sydney.\n\nPhil\ + \ Slater - Trumpet\nJason Noble - Clarinet" + address: 'Sydney, Australia' + author: Andrew Johnston + bibtex: "@inproceedings{nime2010-music-Johnston2010,\n abstract = {Program notes:\ + \ This audiovisual work for acoustic instruments and interactive software uses\ + \ simple models of physical structures to mediate between acoustic sounds and\ + \ computer generated sound and visuals. Musicians use their acoustic instruments\ + \ to playfully interact with a physically modelled virtual sound sculpture which\ + \ is projected onto the screen.\nThe musicians use sounds produced on their acoustic\ + \ instruments to reach into the virtual world and grasp, push and hit the sculpture.\ + \ In response the structure glows, spins, bounces around and generates its own\ + \ sounds. The pitch and timbre of the live acoustic sounds are captured and transformed\ + \ by the virtual sculpture which sings back in its own way. Each individual object\ + \ (or mass) in the physical model is linked to a synthesis engine which uses additive\ + \ and subtractive synthesis techniques to produce a wide range of sonic textures.\n\ + The frequency of oscillators of the synthesis engines are set by the acoustic\ + \ sounds played by the acoustic musicians and the volume of sound produced is\ + \ controlled by the movement of the masses. The effect is that the sound sculpture\ + \ produces evocative sounds clearly linked to the sonic gestures of the performers\ + \ and the movement of the onscreen sculpture.\nDuring performance the physical\ + \ structure and characteristics of the sculpture are altered. Links between masses\ + \ are cut, spring tension of the links\naltered and damping is ramped up and down.\ + \ Thus, while transparency of operation is maintained, the complexity of the interaction\ + \ between the acoustic and electronic performers and the sound sculpture itself\ + \ leads to rich conversational musical interactions.\n\nAbout the performers:\ + \ Andrew Johnston is a musician and software developer living in Sydney, Australia.\ + \ He completed a music performance degree at the Victorian College of the Arts\ + \ in 1995 and has performed with several Australian symphony orchestras and a\ + \ number of other ensembles.\nSubsequently he has completed a Masters degree in\ + \ Information Technology and in 2009 he completed a PhD investigating the design\ + \ and use of software to support an experimental, exploratory approach to live\ + \ music making. Andrew currently holds the position of Lecturer in the Faculty\ + \ of Engineering and IT at the University of Technology, Sydney.\n\nPhil Slater\ + \ - Trumpet\nJason Noble - Clarinet},\n address = {Sydney, Australia},\n author\ + \ = {Andrew Johnston},\n booktitle = {Music Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n editor = {Andrew Johnston, Sam\ + \ Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer,\ + \ Kirsty Beilharz, Roger Mills},\n month = {June},\n publisher = {University of\ + \ Technology Sydney},\n title = {Touching Dialogue},\n year = {2010}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Rob Hamilton - month: May - note: Online Presentation - title: DEF FUNCTION(DYSTOPIAKIRA) - url: https://www.nime2023.org/program/online-in-person-concerts - year: 2023 + editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, + Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + month: June + publisher: University of Technology Sydney + title: Touching Dialogue + year: 2010 -- ENTRYTYPE: article - ID: nime23-music-1137 - abstract: 'In this performance, two guitar players improvise using electro-acoustic - guitars equipped with actuators that can hit each of the strings. By moving through - virtual shapes placed around them with their guitars and bodies, they can control - the actuators. By using minimal modifications of the instrument and subtly extending - existing playing techniques, the setup aims at preserving the technical and cultural - heritage of the acoustic instrument. During the performance, the two musicians - combine elements of traditional playing with rhythmical interventions that complements - the interaction with the shapes. In particular, the shapes allow them to generate - stable rhythmical overlapped sequences. The improvisation then develops according - to the musicians'' inspiration with the shapes integrated in their playing.' - articleno: 1137 - author: Sebastien Beaumont and Ivann Cruz and Arthur Paté and Florent Berthaut - bibtex: "@article{nime23-music-1137,\n abstract = {In this performance, two guitar\ - \ players improvise using electro-acoustic guitars equipped with actuators that\ - \ can hit each of the strings. By moving through virtual shapes placed around\ - \ them with their guitars and bodies, they can control the actuators. By using\ - \ minimal modifications of the instrument and subtly extending existing playing\ - \ techniques, the setup aims at preserving the technical and cultural heritage\ - \ of the acoustic instrument. During the performance, the two musicians combine\ - \ elements of traditional playing with rhythmical interventions that complements\ - \ the interaction with the shapes. In particular, the shapes allow them to generate\ - \ stable rhythmical overlapped sequences. The improvisation then develops according\ - \ to the musicians' inspiration with the shapes integrated in their playing.},\n\ - \ articleno = {1137},\n author = {Sebastien Beaumont and Ivann Cruz and Arthur\ - \ Paté and Florent Berthaut},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Rob Hamilton},\n\ - \ month = {May},\n note = {Online Presentation},\n title = {VS : Improvisation\ - \ with Automated Interactive Instruments},\n url = {https://www.nime2023.org/program/online-in-person-concerts},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: nime2010-music-Collins2010 + abstract: "Program notes: Kinesics is a structure for improvisation; the computer\ + \ uses extensive machine listening technology to track the pianist and generates\ + \ feature-based effects. The computer also guides the pianist to explore actions\ + \ from a catalogue of gestures, some of which are heavy-handed. A feedback loop\ + \ is established of interpretation of sounding and physical gesture.\nThe Computer\ + \ was born in China in 2009, but eventually found its way to England to the ownership\ + \ of a grubby handed computer musician. Though ostensibly based somewhere near\ + \ Brighton, it went on to have many adventures around the world, and is grateful\ + \ to its owner at least for never putting it in hold luggage. Though suffering\ + \ an alarming logic board failure of cataclysmic proportions before even reaching\ + \ its first birthday, replacement surgery by qualified though over familiar service\ + \ personnel saved its life. Philosophical questions remain about the extent to\ + \ which its current personality is contiguous with the old, as evidenced in various\ + \ proprietary programs temporarily refusing to believe in their host brain anymore.\ + \ But it is just happy it can be here tonight to play for you.\nThere will also\ + \ be a dispensable human being on stage.\nAbout the performers:\nComputer - Electronics\n\ + Nick Collins - Piano" + address: 'Sydney, Australia' + author: Nick Collins + bibtex: "@inproceedings{nime2010-music-Collins2010,\n abstract = {Program notes:\ + \ Kinesics is a structure for improvisation; the computer uses extensive machine\ + \ listening technology to track the pianist and generates feature-based effects.\ + \ The computer also guides the pianist to explore actions from a catalogue of\ + \ gestures, some of which are heavy-handed. A feedback loop is established of\ + \ interpretation of sounding and physical gesture.\nThe Computer was born in China\ + \ in 2009, but eventually found its way to England to the ownership of a grubby\ + \ handed computer musician. Though ostensibly based somewhere near Brighton, it\ + \ went on to have many adventures around the world, and is grateful to its owner\ + \ at least for never putting it in hold luggage. Though suffering an alarming\ + \ logic board failure of cataclysmic proportions before even reaching its first\ + \ birthday, replacement surgery by qualified though over familiar service personnel\ + \ saved its life. Philosophical questions remain about the extent to which its\ + \ current personality is contiguous with the old, as evidenced in various proprietary\ + \ programs temporarily refusing to believe in their host brain anymore. But it\ + \ is just happy it can be here tonight to play for you.\nThere will also be a\ + \ dispensable human being on stage.\nAbout the performers:\nComputer - Electronics\n\ + Nick Collins - Piano},\n address = {Sydney, Australia},\n author = {Nick Collins},\n\ + \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n editor = {Andrew Johnston, Sam Ferguson, Jos Mulder,\ + \ Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger\ + \ Mills},\n month = {June},\n publisher = {University of Technology Sydney},\n\ + \ title = {Kinesics},\n year = {2010}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Rob Hamilton - month: May - note: Online Presentation - title: 'VS : Improvisation with Automated Interactive Instruments' - url: https://www.nime2023.org/program/online-in-person-concerts - year: 2023 + editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, + Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + month: June + publisher: University of Technology Sydney + title: Kinesics + year: 2010 -- ENTRYTYPE: article - ID: nime23-music-1141 - abstract: 'Codex Saqqara is a cycle of five semi-improvised musical pieces for live - coding and electric violin. Here, for duration reasons, we present a short excerpt. - The interaction between the two performers takes place through a system that allows - the violinist to record and overdub up to five samples in real-time, which are - then processed and organized into structures by the live coder. In this way, the - two musicians interact with each other’s musical space, taking on different musical - roles during the performance, such as soloists, orchestrators or accompanists. - Given its extemporaneous nature, the piece is composed from-scratch, following - a series of macro-structures determined beforehand. This submission accompanies - a paper regarding the system used, along with some reflections that emerged during - the rehearsals for this performance.' - articleno: 1141 - author: Francesco Dal Rì and Francesca Zanghellini - bibtex: "@article{nime23-music-1141,\n abstract = {Codex Saqqara is a cycle of five\ - \ semi-improvised musical pieces for live coding and electric violin. Here, for\ - \ duration reasons, we present a short excerpt. The interaction between the two\ - \ performers takes place through a system that allows the violinist to record\ - \ and overdub up to five samples in real-time, which are then processed and organized\ - \ into structures by the live coder. In this way, the two musicians interact with\ - \ each other’s musical space, taking on different musical roles during the performance,\ - \ such as soloists, orchestrators or accompanists. Given its extemporaneous nature,\ - \ the piece is composed from-scratch, following a series of macro-structures determined\ - \ beforehand. This submission accompanies a paper regarding the system used, along\ - \ with some reflections that emerged during the rehearsals for this performance.},\n\ - \ articleno = {1141},\n author = {Francesco Dal Rì and Francesca Zanghellini},\n\ +- ENTRYTYPE: inproceedings + ID: nime2010-music-Ratcliffe2010 + abstract: "Program notes: \"Mutations\" is an interactive work exploring notions\ + \ of the DJ set and the remix through the integration of various streams of piano\ + \ based material in live performance. Incorporating human and machine-generated\ + \ material, a “realization” of the piece involves the management of a pool of\ + \ audio files, MIDI files, and score fragments, which are drawn upon during performance.\ + \ In this way, the performer is required to control and shape the various streams\ + \ of material in the same way that a DJ would select and combine records during\ + \ the structuring of a live set (an alternative realization of `Mutations' may\ + \ involve the playback of mixed material, in which the trajectory of the narrative\ + \ has been determined in advance). The supply of audio files, MIDI files, and\ + \ score fragments used in the construction of the piece takes existing works from\ + \ the piano repertoire as source material, both transformed and quoted intact,\ + \ resulting in a spectrum of recognizability ranging from the easily identifiable,\ + \ to the ambiguous, to the non-referential. The integration of this borrowed material\ + \ within the three strands of the piece highlights various connections between\ + \ traditional forms of musical borrowing, transformative imitation, improvisation,\ + \ electroacoustic sound transformation and quotation, EDM sampling practices,\ + \ remix practices and DJ performance. This version of `Mutations' features a pre-recorded\ + \ electronic part, realized using a software application created by Jon Weinel.\n\ + \nAbout the performers: Robert Ratcliffe is currently completing a PhD in composition\ + \ (New Forms of Hybrid Musical Discourse) at Keele University (UK). He is the\ + \ first composer to develop a musical language based on the cross fertilization\ + \ of contemporary art music and electronic dance music (EDM). http://www.myspace.com/visionfugitive.\n\ + \nZubin Kanga - Piano\nRobert Ratcliffe - Electronics\nJon Weinel - Software Author" + address: 'Sydney, Australia' + author: Robert Ratcliffe and Jon Weinel + bibtex: "@inproceedings{nime2010-music-Ratcliffe2010,\n abstract = {Program notes:\ + \ \"Mutations\" is an interactive work exploring notions of the DJ set and the\ + \ remix through the integration of various streams of piano based material in\ + \ live performance. Incorporating human and machine-generated material, a “realization”\ + \ of the piece involves the management of a pool of audio files, MIDI files, and\ + \ score fragments, which are drawn upon during performance. In this way, the performer\ + \ is required to control and shape the various streams of material in the same\ + \ way that a DJ would select and combine records during the structuring of a live\ + \ set (an alternative realization of `Mutations' may involve the playback of mixed\ + \ material, in which the trajectory of the narrative has been determined in advance).\ + \ The supply of audio files, MIDI files, and score fragments used in the construction\ + \ of the piece takes existing works from the piano repertoire as source material,\ + \ both transformed and quoted intact, resulting in a spectrum of recognizability\ + \ ranging from the easily identifiable, to the ambiguous, to the non-referential.\ + \ The integration of this borrowed material within the three strands of the piece\ + \ highlights various connections between traditional forms of musical borrowing,\ + \ transformative imitation, improvisation, electroacoustic sound transformation\ + \ and quotation, EDM sampling practices, remix practices and DJ performance. This\ + \ version of `Mutations' features a pre-recorded electronic part, realized using\ + \ a software application created by Jon Weinel.\n\nAbout the performers: Robert\ + \ Ratcliffe is currently completing a PhD in composition (New Forms of Hybrid\ + \ Musical Discourse) at Keele University (UK). He is the first composer to develop\ + \ a musical language based on the cross fertilization of contemporary art music\ + \ and electronic dance music (EDM). http://www.myspace.com/visionfugitive.\n\n\ + Zubin Kanga - Piano\nRobert Ratcliffe - Electronics\nJon Weinel - Software Author},\n\ + \ address = {Sydney, Australia},\n author = {Robert Ratcliffe and Jon Weinel},\n\ \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n editor = {Rob Hamilton},\n month = {May},\n note\ - \ = {Online Presentation},\n title = {Codex Saqqara},\n url = {https://www.nime2023.org/program/online-in-person-concerts},\n\ - \ year = {2023}\n}\n" + \ for Musical Expression},\n editor = {Andrew Johnston, Sam Ferguson, Jos Mulder,\ + \ Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger\ + \ Mills},\n month = {June},\n publisher = {University of Technology Sydney},\n\ + \ title = {Mutations},\n year = {2010}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Rob Hamilton - month: May - note: Online Presentation - title: Codex Saqqara - url: https://www.nime2023.org/program/online-in-person-concerts - year: 2023 + editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, + Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + month: June + publisher: University of Technology Sydney + title: Mutations + year: 2010 -- ENTRYTYPE: article - ID: nime23-music-1148 - abstract: 'This is a proposal for the premier of Flightless Path, a new work for - The Terpsichora Pressure-Sensitive Floors and Renaissance Violone. The Terpsichora - Pressure-Sensitive Floors (The Floors) are a new digital musical instrument which - uses whole-body motion to control electronic music. The instrument continues the - development of early models for pioneering dancer Philippa Cullen (1950-1975), - expanding its use as an expressive and versatile instrument for musicians to play. - The Floors use a large interactive surface for fine control of many sonic parameters - with a small number of sensors. The violone is the Renaissance precursor to the - double bass. It is a large instrument that has six gut strings, gut frets and - is played with a viol style underhand bow. This instrument also requires the whole - body to play and physically support the instrument in performance. This new work - brings these two instruments together and is an interplay between the definitions - of instruments and controller as they relate to contemporary practices based on - gesture. Working with the specific limitations of the body in relation to large - objects, the Floors and the violone both function as controllers for affecting - sound and as instruments for creating sound.' - articleno: 1148 - author: iran sanadzadeh and Chloë Sobek - bibtex: "@article{nime23-music-1148,\n abstract = {This is a proposal for the premier\ - \ of Flightless Path, a new work for The Terpsichora Pressure-Sensitive Floors\ - \ and Renaissance Violone. The Terpsichora Pressure-Sensitive Floors (The Floors)\ - \ are a new digital musical instrument which uses whole-body motion to control\ - \ electronic music. The instrument continues the development of early models for\ - \ pioneering dancer Philippa Cullen (1950-1975), expanding its use as an expressive\ - \ and versatile instrument for musicians to play. The Floors use a large interactive\ - \ surface for fine control of many sonic parameters with a small number of sensors.\ - \ The violone is the Renaissance precursor to the double bass. It is a large instrument\ - \ that has six gut strings, gut frets and is played with a viol style underhand\ - \ bow. This instrument also requires the whole body to play and physically support\ - \ the instrument in performance. This new work brings these two instruments together\ - \ and is an interplay between the definitions of instruments and controller as\ - \ they relate to contemporary practices based on gesture. Working with the specific\ - \ limitations of the body in relation to large objects, the Floors and the violone\ - \ both function as controllers for affecting sound and as instruments for creating\ - \ sound.},\n articleno = {1148},\n author = {iran sanadzadeh and Chloë Sobek},\n\ - \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n editor = {Rob Hamilton},\n month = {May},\n note\ - \ = {Live Concert 5, Friday June 2, Centro de Cultura Digital},\n title = {Flightless\ - \ Path},\n url = {https://www.nime.org/proceedings/2023/nime23_concert_5.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: nime2010-music-Paine2010 + abstract: "Program notes: Grace Space is a new work for clarinet and realtime electronic\ + \ transformation. It plays with sonic space and non-space; the use of the grace\ + \ note to define relationships of transition from a forgotten or distant space\ + \ to a familiar space, a known pitch. The piece contemplates memory, the experience\ + \ of snapping out of a daydream, from distant imaginings or recollections to the\ + \ real space, the events right in front of you. The realtime electronic transformation\ + \ makes the space fluid and introduces a height ened depth of perspective. Surround\ + \ sound spatialization techniques are used also to bring the sound of the clarinet\ + \ off the stage and around the audience, subverting the audience as spectator\ + \ relationship to one where the audience is at the core of the work, the position\ + \ the performer usually occupies.\n\nAbout the performers: Garth Paine has exhibited\ + \ immersive interactive environments in Australia, Europe, Japan, USA, Hong Kong\ + \ and New Zealand. He is on the organizing and peer review panels for the International\ + \ Conference On New Interfaces for Musical Expression (NIME), the International\ + \ Computer Music Conference. He has twice been guest editor of Organized Sound\ + \ Journal (Cambridge University Press) for special editions on interactive systems\ + \ in music and sound installation. He is often invited to run workshops on interactivity\ + \ for musical performance and commissioned to develop interactive system for realtime\ + \ musical composition for dance and theatre performances. He was selected as one\ + \ of ten creative professionals internationally for exhibition in the 10th New\ + \ York Digital Salon; DesignX Critical Reflections, and as a millennium leader\ + \ of innovation by the German Keyboard Magazine in 2000. Dr Paine was awarded\ + \ the Australia Council for the Arts, New Media Arts Fellowship in 2000, and The\ + \ RMIT Innovation Research Award in 2002. He is a member of the advisory panel\ + \ for the Electronic Music Foundation and one of 17 advisors to the UNESCO funded\ + \ Symposium on the Future, which is developing a taxonomy / design space of electronic\ + \ musical instruments. Recently Dr Paine been invited to perform at the Agora\ + \ Festival, Centre Pompidou, Paris (2006) and the New York Electronic Arts Festival\ + \ (2007), and in 2009 will perform in Sydney, Melbourne, Perth, Lymerik Ireland,\ + \ New York City, Montreal and Quebec in Canada, and Phoenix Arizona. In 2008 Dr\ + \ Paine received the UWS Vice-Chancellor's Excellence Award for Postgraduate Research\ + \ Training and Supervision.\n\nJason Noble - Clarinet" + address: 'Sydney, Australia' + author: Garth Paine + bibtex: "@inproceedings{nime2010-music-Paine2010,\n abstract = {Program notes: Grace\ + \ Space is a new work for clarinet and realtime electronic transformation. It\ + \ plays with sonic space and non-space; the use of the grace note to define relationships\ + \ of transition from a forgotten or distant space to a familiar space, a known\ + \ pitch. The piece contemplates memory, the experience of snapping out of a daydream,\ + \ from distant imaginings or recollections to the real space, the events right\ + \ in front of you. The realtime electronic transformation makes the space fluid\ + \ and introduces a height ened depth of perspective. Surround sound spatialization\ + \ techniques are used also to bring the sound of the clarinet off the stage and\ + \ around the audience, subverting the audience as spectator relationship to one\ + \ where the audience is at the core of the work, the position the performer usually\ + \ occupies.\n\nAbout the performers: Garth Paine has exhibited immersive interactive\ + \ environments in Australia, Europe, Japan, USA, Hong Kong and New Zealand. He\ + \ is on the organizing and peer review panels for the International Conference\ + \ On New Interfaces for Musical Expression (NIME), the International Computer\ + \ Music Conference. He has twice been guest editor of Organized Sound Journal\ + \ (Cambridge University Press) for special editions on interactive systems in\ + \ music and sound installation. He is often invited to run workshops on interactivity\ + \ for musical performance and commissioned to develop interactive system for realtime\ + \ musical composition for dance and theatre performances. He was selected as one\ + \ of ten creative professionals internationally for exhibition in the 10th New\ + \ York Digital Salon; DesignX Critical Reflections, and as a millennium leader\ + \ of innovation by the German Keyboard Magazine in 2000. Dr Paine was awarded\ + \ the Australia Council for the Arts, New Media Arts Fellowship in 2000, and The\ + \ RMIT Innovation Research Award in 2002. He is a member of the advisory panel\ + \ for the Electronic Music Foundation and one of 17 advisors to the UNESCO funded\ + \ Symposium on the Future, which is developing a taxonomy / design space of electronic\ + \ musical instruments. Recently Dr Paine been invited to perform at the Agora\ + \ Festival, Centre Pompidou, Paris (2006) and the New York Electronic Arts Festival\ + \ (2007), and in 2009 will perform in Sydney, Melbourne, Perth, Lymerik Ireland,\ + \ New York City, Montreal and Quebec in Canada, and Phoenix Arizona. In 2008 Dr\ + \ Paine received the UWS Vice-Chancellor's Excellence Award for Postgraduate Research\ + \ Training and Supervision.\n\nJason Noble - Clarinet},\n address = {Sydney, Australia},\n\ + \ author = {Garth Paine},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Johnston,\ + \ Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer,\ + \ Kirsty Beilharz, Roger Mills},\n month = {June},\n publisher = {University of\ + \ Technology Sydney},\n title = {Grace Space},\n year = {2010}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Rob Hamilton - month: May - note: 'Live Concert 5, Friday June 2, Centro de Cultura Digital' - title: Flightless Path - url: https://www.nime.org/proceedings/2023/nime23_concert_5.pdf - year: 2023 + editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, + Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + month: June + publisher: University of Technology Sydney + title: Grace Space + year: 2010 -- ENTRYTYPE: article - ID: nime23-music-1158 - abstract: 'PROGRAM NOTES The Moirai Mask is an ornate mask that operates as a NIME. - The mask has an integrated MIDI controller that allows the performer to play music - by touching the brass and bamboo panels. In performance, the artist uses audio-montage - to collage sounds of the Australian wilderness with electronics and sampled fragments - of an acoustic string instrument. The mask is handmade from predominantly recycled - materials; hand cut brass panels and hand painted bamboo elements adorn the front - of the mask, which are sewn into the cotton paneling that covers the hand soldered - electrical components. The Moirai Mask is a sonic play on the Covid-19 PPE mask. - The PPE mask, like an exo-skeleton, provides an extra, augmented layer of protection - from our bodies, the ‘outside world’, the virus, the Other. The Covid-19 pandemic - forced us to accept our bodily limitations and embrace this prosaic form of human - augmentation, the PPE mask. Furthermore, as the Covid-19 virus enters our bodies - and is transmitted through our breath, we must acknowledge that we are not separate - from the non-human world that we inhabit but are in fact bodily constituted through - it [1]. As Deborah Lupton et al. point out ‘the COVID crisis [has] heightened - awareness of our collective vulnerability to each other’s more-than-human bodies’ - [ibid.]. Drawing on the concept of a NIME, here the PPE mask is appropriated as - a symbolic and subversive art object, paying sonic homage to the non-human world - while the artist’s voice is subtly silenced.' - articleno: 1158 - author: Chloë L A Sobek - bibtex: "@article{nime23-music-1158,\n abstract = {PROGRAM NOTES The Moirai Mask\ - \ is an ornate mask that operates as a NIME. The mask has an integrated MIDI controller\ - \ that allows the performer to play music by touching the brass and bamboo panels.\ - \ In performance, the artist uses audio-montage to collage sounds of the Australian\ - \ wilderness with electronics and sampled fragments of an acoustic string instrument.\ - \ The mask is handmade from predominantly recycled materials; hand cut brass panels\ - \ and hand painted bamboo elements adorn the front of the mask, which are sewn\ - \ into the cotton paneling that covers the hand soldered electrical components.\ - \ The Moirai Mask is a sonic play on the Covid-19 PPE mask. The PPE mask, like\ - \ an exo-skeleton, provides an extra, augmented layer of protection from our bodies,\ - \ the ‘outside world’, the virus, the Other. The Covid-19 pandemic forced us to\ - \ accept our bodily limitations and embrace this prosaic form of human augmentation,\ - \ the PPE mask. Furthermore, as the Covid-19 virus enters our bodies and is transmitted\ - \ through our breath, we must acknowledge that we are not separate from the non-human\ - \ world that we inhabit but are in fact bodily constituted through it [1]. As\ - \ Deborah Lupton et al. point out ‘the COVID crisis [has] heightened awareness\ - \ of our collective vulnerability to each other’s more-than-human bodies’ [ibid.].\ - \ Drawing on the concept of a NIME, here the PPE mask is appropriated as a symbolic\ - \ and subversive art object, paying sonic homage to the non-human world while\ - \ the artist’s voice is subtly silenced.},\n articleno = {1158},\n author = {Chloë\ - \ L A Sobek},\n booktitle = {Music Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n editor = {Rob Hamilton},\n month\ - \ = {May},\n note = {Live Concert 5, Friday June 2, Centro de Cultura Digital},\n\ - \ title = {The Moirai Mask},\n url = {https://www.nime.org/proceedings/2023/nime23_concert_5.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: nime2010-music-Mackay2010 + abstract: "Program notes: This piece was written for Duo Contour, and uses the following\ + \ poem `Under the Slates' by Martin Daws as its inspiration:\nWe are earth people\n\ + \nLong have we hidden\nIn the rock heavy heart\nAnd harboured our strengths\n\ + Among the agonies of stone\n\nOurs is the granite\nWind withered to pinnacles\n\ + And the whispered secret\nPassed behind its scream\n\nAnd the dark slate blasted\n\ + Into fragments of its nature\nShattered forgotten bodies\nPatterned random\nHeaped\ + \ on houses\n\nDropped on churches\nSilenced hymns\nOn buried villages lost to\ + \ light\n\nWe mourn our eagles\nCount our sheep\nLay our seed on crusted bed spring\n\ + Spines shrunk with the gravity\nDreams pulled out of star flight\nDriven back\ + \ to earth to bone\nTo wakeful vision raw with piling rock\nAgainst the sun\n\n\ + We are the subjects of a skyline\nHeld in hard embrace\nIts dark love a sanctuary\n\ + For our healing\n\nThe poem reflects Daws' response to the altered landscape formed\ + \ by slate quarrying in the village of Bethesda in North Wales.\nThe role of the\ + \ instrumentalists in this piece is to create a textural accompaniment to the\ + \ words. Through different sound transformation techniques, the sounds of the\ + \ instruments are altered in real-time to create word-painting effects.\nVideo\ + \ sequences of the poet himself are juxtaposed against images of the area in question.\ + \ These images are manipulated in real-time by the sound of the instruments themselves.\ + \ The video imagery, like the music, is intended to re flect the meaning of the\ + \ text. For this piece, I created my own software tools in Max/MSP/Jitter for\ + \ live audio/video interaction.\n\n\nAbout the performer: Rob Mackay is a composer,\ + \ sound artist and performer. He obtained a degree in Geology and Music at the\ + \ University of Keele, studying composition there with Mike Vaughan, before going\ + \ on to complete a Master's and PhD with Andrew Lewis at the University of Wales,\ + \ Bangor. Currently he is a lecturer in Creative Music Technology at the University\ + \ of Hull, Scarborough Campus, and is the course director.\nRecent projects have\ + \ moved towards a cross-disciplinary approach, including theatre, audio/visual\ + \ installation work, and human/computer interaction. Prizes and honours include:\ + \ IMEB Bourges (1997 and 2001); EAR99 from Hungarian Radio (1999); Confluencias\ + \ (2003); La Muse en Circuit (2004 and 2006). His work has received over 100 performances\ + \ in 16 countries (including several performances on BBC Radio 3). He has held\ + \ composer residencies at Slovak Radio (Bratislava), La Muse en Circuit (Paris),\ + \ and the Tyrone Guthre Arts Centre (Ireland).\nHe has played, written and produced\ + \ in a number of bands and ensembles, including the Welsh Hip-Hop collective \"\ + Tystion\" with whom he collaborated alongside John Cale on the film `A Beautiful\ + \ Mistake', as well as recording two John Peel sessions on BBC Radio 1 and supporting\ + \ PJ Harvey. More recently, he has done session work for Gowel Owen and Euros\ + \ Childs. 6 CDs including his compositions are available. More information and\ + \ pieces at:\n\nwww.myspace.com/robflute\nwww.digital-music-archives.com" + address: 'Sydney, Australia' + author: Robert Mackay + bibtex: "@inproceedings{nime2010-music-Mackay2010,\n abstract = {Program notes:\ + \ This piece was written for Duo Contour, and uses the following poem `Under the\ + \ Slates' by Martin Daws as its inspiration:\nWe are earth people\n\nLong have\ + \ we hidden\nIn the rock heavy heart\nAnd harboured our strengths\nAmong the agonies\ + \ of stone\n\nOurs is the granite\nWind withered to pinnacles\nAnd the whispered\ + \ secret\nPassed behind its scream\n\nAnd the dark slate blasted\nInto fragments\ + \ of its nature\nShattered forgotten bodies\nPatterned random\nHeaped on houses\n\ + \nDropped on churches\nSilenced hymns\nOn buried villages lost to light\n\nWe\ + \ mourn our eagles\nCount our sheep\nLay our seed on crusted bed spring\nSpines\ + \ shrunk with the gravity\nDreams pulled out of star flight\nDriven back to earth\ + \ to bone\nTo wakeful vision raw with piling rock\nAgainst the sun\n\nWe are the\ + \ subjects of a skyline\nHeld in hard embrace\nIts dark love a sanctuary\nFor\ + \ our healing\n\nThe poem reflects Daws' response to the altered landscape formed\ + \ by slate quarrying in the village of Bethesda in North Wales.\nThe role of the\ + \ instrumentalists in this piece is to create a textural accompaniment to the\ + \ words. Through different sound transformation techniques, the sounds of the\ + \ instruments are altered in real-time to create word-painting effects.\nVideo\ + \ sequences of the poet himself are juxtaposed against images of the area in question.\ + \ These images are manipulated in real-time by the sound of the instruments themselves.\ + \ The video imagery, like the music, is intended to re flect the meaning of the\ + \ text. For this piece, I created my own software tools in Max/MSP/Jitter for\ + \ live audio/video interaction.\n\n\nAbout the performer: Rob Mackay is a composer,\ + \ sound artist and performer. He obtained a degree in Geology and Music at the\ + \ University of Keele, studying composition there with Mike Vaughan, before going\ + \ on to complete a Master's and PhD with Andrew Lewis at the University of Wales,\ + \ Bangor. Currently he is a lecturer in Creative Music Technology at the University\ + \ of Hull, Scarborough Campus, and is the course director.\nRecent projects have\ + \ moved towards a cross-disciplinary approach, including theatre, audio/visual\ + \ installation work, and human/computer interaction. Prizes and honours include:\ + \ IMEB Bourges (1997 and 2001); EAR99 from Hungarian Radio (1999); Confluencias\ + \ (2003); La Muse en Circuit (2004 and 2006). His work has received over 100 performances\ + \ in 16 countries (including several performances on BBC Radio 3). He has held\ + \ composer residencies at Slovak Radio (Bratislava), La Muse en Circuit (Paris),\ + \ and the Tyrone Guthre Arts Centre (Ireland).\nHe has played, written and produced\ + \ in a number of bands and ensembles, including the Welsh Hip-Hop collective \"\ + Tystion\" with whom he collaborated alongside John Cale on the film `A Beautiful\ + \ Mistake', as well as recording two John Peel sessions on BBC Radio 1 and supporting\ + \ PJ Harvey. More recently, he has done session work for Gowel Owen and Euros\ + \ Childs. 6 CDs including his compositions are available. More information and\ + \ pieces at:\n\nwww.myspace.com/robflute\nwww.digital-music-archives.com},\n address\ + \ = {Sydney, Australia},\n author = {Robert Mackay},\n booktitle = {Music Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine,\ + \ Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills},\n month = {June},\n\ + \ publisher = {University of Technology Sydney},\n title = {Altered Landscapes},\n\ + \ year = {2010}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Rob Hamilton - month: May - note: 'Live Concert 5, Friday June 2, Centro de Cultura Digital' - title: The Moirai Mask - url: https://www.nime.org/proceedings/2023/nime23_concert_5.pdf - year: 2023 + editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, + Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + month: June + publisher: University of Technology Sydney + title: Altered Landscapes + year: 2010 -- ENTRYTYPE: article - ID: nime23-music-1166 - abstract: 'BANDNAME is composed of three womxn NAME, NAME, and NAME and guests who - use the physical properties of the electromagnetic spectrum to create installations, - performances and recordings. Using electronic feedback, audio speakers, various - kinds of microphones/pickups, and resonant objects of all shapes and kinds, we - summon the feminine spirit of electromagnetism, aka the Goddess of the Electronic - Medium aka the ElecroMagnetic Goddess. We have a flexible membership inclusive - to all peoples who are willing to open themselves up to this spirit. In terms - of current trends in audio technology, we invoke a feminist response to the masculinization - of the music industry, audio engineering, and to the artistic spaces of sound - arts in general. Our latest project includes playing with painted score-objects - Bareëmins. They are painted with conductive carbon paint, and non-conductive paint. - When the area that is conductive is activated it produces sound, the non-conductive - area does not. Thereby, by alternating painted and not painted areas in an aesthetic - way, a score can be embedded into the very instrument itself. The paint can be - applied to paintings as well as the inside of paper and plastic sculptures the - results are many fold. There are folded paper crystal Bareëmins that look like - crystals suitable for an electromagnetic altar. You can use them to invoke the - Electromagnetic Goddess at home. The project is particularly aligned with this - year''s theme of Frugal Music Innovation as it uses all natural materials + paste - glue to create the painted score/instrument. The carbon paint is made by recycling - charcoal from a cooking fire, the colored paint is everyday school supplies and - paint made out of found earth pigment. The binder is paste glue. The brains are - an Arduino running simple theremin code with only 2 resistors and an 8ohm speaker - as peripherals. video here https://youtu.be/YAD-F68Ntl4' - articleno: 1166 - author: Sofya Yuditskaya and Jess Rowland and Margaret Schedel - bibtex: "@article{nime23-music-1166,\n abstract = {BANDNAME is composed of three\ - \ womxn NAME, NAME, and NAME and guests who use the physical properties of the\ - \ electromagnetic spectrum to create installations, performances and recordings.\ - \ Using electronic feedback, audio speakers, various kinds of microphones/pickups,\ - \ and resonant objects of all shapes and kinds, we summon the feminine spirit\ - \ of electromagnetism, aka the Goddess of the Electronic Medium aka the ElecroMagnetic\ - \ Goddess. We have a flexible membership inclusive to all peoples who are willing\ - \ to open themselves up to this spirit. In terms of current trends in audio technology,\ - \ we invoke a feminist response to the masculinization of the music industry,\ - \ audio engineering, and to the artistic spaces of sound arts in general. Our\ - \ latest project includes playing with painted score-objects Bareëmins. They are\ - \ painted with conductive carbon paint, and non-conductive paint. When the area\ - \ that is conductive is activated it produces sound, the non-conductive area does\ - \ not. Thereby, by alternating painted and not painted areas in an aesthetic way,\ - \ a score can be embedded into the very instrument itself. The paint can be applied\ - \ to paintings as well as the inside of paper and plastic sculptures the results\ - \ are many fold. There are folded paper crystal Bareëmins that look like crystals\ - \ suitable for an electromagnetic altar. You can use them to invoke the Electromagnetic\ - \ Goddess at home. The project is particularly aligned with this year's theme\ - \ of Frugal Music Innovation as it uses all natural materials + paste glue to\ - \ create the painted score/instrument. The carbon paint is made by recycling charcoal\ - \ from a cooking fire, the colored paint is everyday school supplies and paint\ - \ made out of found earth pigment. The binder is paste glue. The brains are an\ - \ Arduino running simple theremin code with only 2 resistors and an 8ohm speaker\ - \ as peripherals. video here https://youtu.be/YAD-F68Ntl4},\n articleno =\ - \ {1166},\n author = {Sofya Yuditskaya and Jess Rowland and Margaret Schedel},\n\ - \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n editor = {Rob Hamilton},\n month = {May},\n note\ - \ = {Live Concert 4, Thursday June 1, Centro de Cultura Digital},\n title = {Carbon\ - \ Based EM Fields},\n url = {https://www.nime.org/proceedings/2023/nime23_concert_4.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: nime2010-music-Havryliv2010 + abstract: "Program notes: This performance draws on a natural feature of a particular\ + \ class of chaotic oscillators described by Julien Sprott, namely that they require\ + \ a driving force in order to perform as chaotic attractors. In the unmodified\ + \ equations driving forces are introduced mathematically, however, as we calculate\ + \ the chaotic systems in real-time we open the door to using a performers audio\ + \ signal as an input force.\nThis class of oscillator exhibits interesting behavior\ + \ in response to different frequency inputs; in particular, the systems are sensitive\ + \ to changes in low frequency tones. This encourages the use of Just Intonation\ + \ as a method of determining tuning systems with easily defined difference tones;\ + \ the scale developed by Kraig Grady features many difference tones in an excitable\ + \ range for the chaotic oscillators.\n\nAbout the performers:\nMark Havryliv is\ + \ a doctoral student developing a haptic musical instrument at the University\ + \ of Wollongong. Aside from that research, he is interested in the musical possibilities\ + \ of integrating real-time sonification with other disciplines like game design\ + \ and creative writing.\n\nKraig Grady, an Anaphorian now living in Australia,\ + \ composes almost exclusively for acoustic instruments of his own making or modification\ + \ tuned to just intonation. Often his work is combined with his Shadow Theatre\ + \ productions. His work has been presented at Ballhaus Naunyn Berlin (Germany),\ + \ the Chateau de la Napoule (France), the Norton Simon Museum of Art, the UCLA\ + \ Armand Hammer Museum, the Pacific Asia Museum, the Los Angeles Philharmonics\ + \ American Music Weekend and New Music America 1985. He was chosen by Buzz Magazine\ + \ as one of the \"100 coolest people in Los Angeles\".\n\nKraig Grady - Just Intonation\ + \ Tuned Marimba\nMark Havryliv - Saxophone" + address: 'Sydney, Australia' + author: Mark Havryliv + bibtex: "@inproceedings{nime2010-music-Havryliv2010,\n abstract = {Program notes:\ + \ This performance draws on a natural feature of a particular class of chaotic\ + \ oscillators described by Julien Sprott, namely that they require a driving force\ + \ in order to perform as chaotic attractors. In the unmodified equations driving\ + \ forces are introduced mathematically, however, as we calculate the chaotic systems\ + \ in real-time we open the door to using a performers audio signal as an input\ + \ force.\nThis class of oscillator exhibits interesting behavior in response to\ + \ different frequency inputs; in particular, the systems are sensitive to changes\ + \ in low frequency tones. This encourages the use of Just Intonation as a method\ + \ of determining tuning systems with easily defined difference tones; the scale\ + \ developed by Kraig Grady features many difference tones in an excitable range\ + \ for the chaotic oscillators.\n\nAbout the performers:\nMark Havryliv is a doctoral\ + \ student developing a haptic musical instrument at the University of Wollongong.\ + \ Aside from that research, he is interested in the musical possibilities of integrating\ + \ real-time sonification with other disciplines like game design and creative\ + \ writing.\n\nKraig Grady, an Anaphorian now living in Australia, composes almost\ + \ exclusively for acoustic instruments of his own making or modification tuned\ + \ to just intonation. Often his work is combined with his Shadow Theatre productions.\ + \ His work has been presented at Ballhaus Naunyn Berlin (Germany), the Chateau\ + \ de la Napoule (France), the Norton Simon Museum of Art, the UCLA Armand Hammer\ + \ Museum, the Pacific Asia Museum, the Los Angeles Philharmonics American Music\ + \ Weekend and New Music America 1985. He was chosen by Buzz Magazine as one of\ + \ the \"100 coolest people in Los Angeles\".\n\nKraig Grady - Just Intonation\ + \ Tuned Marimba\nMark Havryliv - Saxophone},\n address = {Sydney, Australia},\n\ + \ author = {Mark Havryliv},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Johnston,\ + \ Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer,\ + \ Kirsty Beilharz, Roger Mills},\n month = {June},\n publisher = {University of\ + \ Technology Sydney},\n title = {Warming for Blackall},\n year = {2010}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Rob Hamilton - month: May - note: 'Live Concert 4, Thursday June 1, Centro de Cultura Digital' - title: Carbon Based EM Fields - url: https://www.nime.org/proceedings/2023/nime23_concert_4.pdf - year: 2023 + editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, + Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + month: June + publisher: University of Technology Sydney + title: Warming for Blackall + year: 2010 -- ENTRYTYPE: article - ID: nime23-music-1168 - abstract: 'Sonic Swells is a multimedia music composition for fixed audio, filmed - footage of a surfer and live saxophone. This iterative sound art project explores - the use of sonification of ocean weather data, sonification of movement data from - a surfer riding waves, and live performance as tools for music composition. Weather - data is collected through a free API and converted to sound in Max/MSP, driving - the parameters of a very large additive and subtractive synthesizer that uses - pink noise as its fundamental sound source. The sonification includes swell direction - and wind speed that dictate the positions of audio in the stereo or surround speaker - field, and wave height and swell period driving an undulating filter effect. The - severity of the conditions dictates the complexity of the soundscape. Sampled - audio is blended into the sonification. The surfer''s movement data is collected - with a DIY kit including an iPhone for telemetry, an android or esp32 watch for - data logging, and a small Wi-Fi router with battery and a GoPro. This information - influences elements of the ocean weather sonification and affects the saxophone - live performance. The performer plays a combination of scored and improvised material. - The piece explores the relationship between sonification, motion and music.' - articleno: 1168 - author: Cayn Borthwick - bibtex: "@article{nime23-music-1168,\n abstract = {Sonic Swells is a multimedia\ - \ music composition for fixed audio, filmed footage of a surfer and live saxophone.\ - \ This iterative sound art project explores the use of sonification of ocean weather\ - \ data, sonification of movement data from a surfer riding waves, and live performance\ - \ as tools for music composition. Weather data is collected through a free API\ - \ and converted to sound in Max/MSP, driving the parameters of a very large additive\ - \ and subtractive synthesizer that uses pink noise as its fundamental sound source.\ - \ The sonification includes swell direction and wind speed that dictate the positions\ - \ of audio in the stereo or surround speaker field, and wave height and swell\ - \ period driving an undulating filter effect. The severity of the conditions dictates\ - \ the complexity of the soundscape. Sampled audio is blended into the sonification.\ - \ The surfer's movement data is collected with a DIY kit including an iPhone for\ - \ telemetry, an android or esp32 watch for data logging, and a small Wi-Fi router\ - \ with battery and a GoPro. This information influences elements of the ocean\ - \ weather sonification and affects the saxophone live performance. The performer\ - \ plays a combination of scored and improvised material. The piece explores the\ - \ relationship between sonification, motion and music.},\n articleno = {1168},\n\ - \ author = {Cayn Borthwick},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Rob Hamilton},\n\ - \ month = {May},\n note = {Live Concert 5, Friday June 2, Centro de Cultura Digital},\n\ - \ title = {Sonic Swells - Riding Swells},\n url = {https://www.nime.org/proceedings/2023/nime23_concert_5.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: nime2010-music-Hopson2010 + abstract: "Program notes: Life on (Planet) is a work for two rocks and interactive\ + \ computer processing. The performer clicks and rubs the rocks together in front\ + \ of a stereo microphone. The computer responds to how the rocks are played, with\ + \ particular regard to changes in tempo, articulation, volume, and position of\ + \ the rocks relative to the left/right stereo field of the microphone. Complex\ + \ combinations of (somewhat) controllable sounds arise from the accretion of input\ + \ sound combined with feedback from the space.\n\nAbout the performer:\nHolland\ + \ Hopson is a composer, improviser, and electronic artist. As an instrumentalist\ + \ he performs on soprano saxophone, clawhammer banjo and electronics. He has held\ + \ residencies at STEIM, Amsterdam; Experimental Music Studios, Krakow and Katowice,\ + \ Poland; Sonic Arts Research Studio, Vancouver, Canada; LEMURPlex, Brooklyn;\ + \ and Harvestworks Digital Media Arts, New York where he developed a sound installation\ + \ based on Marcel Duchamp's sculpture, With Hidden Noise. An avid phonographer,\ + \ Holland has recorded sounds on four continents and in over a dozen countries.\ + \ Holland's latest recording is With Hidden Noises released on Grab Rare Arts\ + \ (www.grabrarearts.com)." + address: 'Sydney, Australia' + author: Holland Hopson + bibtex: "@inproceedings{nime2010-music-Hopson2010,\n abstract = {Program notes:\ + \ Life on (Planet) is a work for two rocks and interactive computer processing.\ + \ The performer clicks and rubs the rocks together in front of a stereo microphone.\ + \ The computer responds to how the rocks are played, with particular regard to\ + \ changes in tempo, articulation, volume, and position of the rocks relative to\ + \ the left/right stereo field of the microphone. Complex combinations of (somewhat)\ + \ controllable sounds arise from the accretion of input sound combined with feedback\ + \ from the space.\n\nAbout the performer:\nHolland Hopson is a composer, improviser,\ + \ and electronic artist. As an instrumentalist he performs on soprano saxophone,\ + \ clawhammer banjo and electronics. He has held residencies at STEIM, Amsterdam;\ + \ Experimental Music Studios, Krakow and Katowice, Poland; Sonic Arts Research\ + \ Studio, Vancouver, Canada; LEMURPlex, Brooklyn; and Harvestworks Digital Media\ + \ Arts, New York where he developed a sound installation based on Marcel Duchamp's\ + \ sculpture, With Hidden Noise. An avid phonographer, Holland has recorded sounds\ + \ on four continents and in over a dozen countries. Holland's latest recording\ + \ is With Hidden Noises released on Grab Rare Arts (www.grabrarearts.com).},\n\ + \ address = {Sydney, Australia},\n author = {Holland Hopson},\n booktitle = {Music\ + \ Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine,\ + \ Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills},\n month = {June},\n\ + \ publisher = {University of Technology Sydney},\n title = {Life on (Planet)},\n\ + \ year = {2010}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Rob Hamilton - month: May - note: 'Live Concert 5, Friday June 2, Centro de Cultura Digital' - title: Sonic Swells - Riding Swells - url: https://www.nime.org/proceedings/2023/nime23_concert_5.pdf - year: 2023 + editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, + Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + month: June + publisher: University of Technology Sydney + title: Life on (Planet) + year: 2010 -- ENTRYTYPE: article - ID: nime23-music-1174 - abstract: 'Pandora hears her own dreams, they talk to her in mysterious voices, - unknown languages. You find yourself standing alone, in the middle of her darkness. - You don’t know how you got there. Are you one of Pandora’s dreams? Talk to her, - maybe she will answer you. In this audiovisual dreamscape lies a re-imagining - of Pandora’s story, where the contents of her jar are bioluminescent swarming - spores that seek to fill the world with hope instead of evil, and life instead - of death. The spores want to get out, their evolutionary powers are hidden, and - the whole universe is waiting to be explored. Meanwhile, Pandora is dreaming, - condemned to keep the box closed. Life waits to be released.' - articleno: 1174 - author: Jack Armitage and Celeste Betancur - bibtex: "@article{nime23-music-1174,\n abstract = {Pandora hears her own dreams,\ - \ they talk to her in mysterious voices, unknown languages. You find yourself\ - \ standing alone, in the middle of her darkness. You don’t know how you got there.\ - \ Are you one of Pandora’s dreams? Talk to her, maybe she will answer you. In\ - \ this audiovisual dreamscape lies a re-imagining of Pandora’s story, where the\ - \ contents of her jar are bioluminescent swarming spores that seek to fill the\ - \ world with hope instead of evil, and life instead of death. The spores want\ - \ to get out, their evolutionary powers are hidden, and the whole universe is\ - \ waiting to be explored. Meanwhile, Pandora is dreaming, condemned to keep the\ - \ box closed. Life waits to be released.},\n articleno = {1174},\n author = {Jack\ - \ Armitage and Celeste Betancur},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Rob Hamilton},\n\ - \ month = {May},\n note = {Live Concert 5, Friday June 2, Centro de Cultura Digital},\n\ - \ title = {Pandora's Mycophony},\n url = {https://www.nime.org/proceedings/2023/nime23_concert_5.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: nime2010-music-Sorensen2010 + address: 'Sydney, Australia' + author: Andrew Sorensen + bibtex: "@inproceedings{nime2010-music-Sorensen2010,\n address = {Sydney, Australia},\n\ + \ author = {Andrew Sorensen},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Johnston,\ + \ Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer,\ + \ Kirsty Beilharz, Roger Mills},\n month = {June},\n publisher = {University of\ + \ Technology Sydney},\n title = {Live Coding Improvisation},\n year = {2010}\n\ + }\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Rob Hamilton - month: May - note: 'Live Concert 5, Friday June 2, Centro de Cultura Digital' - title: Pandora's Mycophony - url: https://www.nime.org/proceedings/2023/nime23_concert_5.pdf - year: 2023 + editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, + Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + month: June + publisher: University of Technology Sydney + title: Live Coding Improvisation + year: 2010 -- ENTRYTYPE: article - ID: nime23-music-1212 - abstract: 'The Sabotaging Piano is an electronic prepared piano that challenges - performers through the remapping of keys to unexpected pitches. For every new - performance, a new remapping pattern is given, so performers face a continuously - surprising new element. The performer is provided with an expression pedal (a - ``sabotaging pedal'''') to modulate the amount of keys that will we remapped, - going from none to all of them.' - articleno: 1212 - author: Teodoro Dannemann - bibtex: "@article{nime23-music-1212,\n abstract = {The Sabotaging Piano is an electronic\ - \ prepared piano that challenges performers through the remapping of keys to unexpected\ - \ pitches. For every new performance, a new remapping pattern is given, so performers\ - \ face a continuously surprising new element. The performer is provided with an\ - \ expression pedal (a ``sabotaging pedal'') to modulate the amount of keys that\ - \ will we remapped, going from none to all of them.},\n articleno = {1212},\n\ - \ author = {Teodoro Dannemann},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Rob Hamilton},\n\ - \ month = {May},\n note = {Live Concert 1, Wednesday May 31, Biblioteca Vasconcelos},\n\ - \ title = {Sabotaging Piano Concert},\n url = {https://www.nime.org/proceedings/2023/nime23_concert_5.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: nime2010-music-Brown2010 + address: 'Sydney, Australia' + author: Andrew Brown + bibtex: "@inproceedings{nime2010-music-Brown2010,\n address = {Sydney, Australia},\n\ + \ author = {Andrew Brown},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Johnston,\ + \ Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer,\ + \ Kirsty Beilharz, Roger Mills},\n month = {June},\n publisher = {University of\ + \ Technology Sydney},\n title = {A Live Coding Performance},\n year = {2010}\n\ + }\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Rob Hamilton - month: May - note: 'Live Concert 1, Wednesday May 31, Biblioteca Vasconcelos' - title: Sabotaging Piano Concert - url: https://www.nime.org/proceedings/2023/nime23_concert_5.pdf - year: 2023 + editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, + Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + month: June + publisher: University of Technology Sydney + title: A Live Coding Performance + year: 2010 -- ENTRYTYPE: article - ID: nime23-music-1214 - abstract: 'Returns and Simulacra combines sound and projections of video onto a - screen with the performer’s body on stage. It uses mini bee accelerometers and - touch-sensor attachments as an instrument called Piano Hands. Through this instrument, - the pianist controls a max/MSP patch interface and some elements in the projected - video of the piece. The piece addresses the performer’s multiple identities on - stage, playing the line between the real and virtual performance while incorporating - different footage from filmed videos of the pianist and archived cabaret performances - of the British queer performers of the past. The digital score relies on the pianist''s - embodied gestural behaviour and his reaction to audio and video material.' - articleno: 1214 - author: Solomiya Moroz and Zubin Kanga - bibtex: "@article{nime23-music-1214,\n abstract = {Returns and Simulacra combines\ - \ sound and projections of video onto a screen with the performer’s body on stage.\ - \ It uses mini bee accelerometers and touch-sensor attachments as an instrument\ - \ called Piano Hands. Through this instrument, the pianist controls a max/MSP\ - \ patch interface and some elements in the projected video of the piece. The piece\ - \ addresses the performer’s multiple identities on stage, playing the line between\ - \ the real and virtual performance while incorporating different footage from\ - \ filmed videos of the pianist and archived cabaret performances of the British\ - \ queer performers of the past. The digital score relies on the pianist's embodied\ - \ gestural behaviour and his reaction to audio and video material.},\n articleno\ - \ = {1214},\n author = {Solomiya Moroz and Zubin Kanga},\n booktitle = {Music\ - \ Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Rob Hamilton},\n month = {May},\n note = {Online Presentation},\n\ - \ title = {Returns \\& Simulacra},\n url = {https://www.nime2023.org/program/online-in-person-concerts},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: nime2010-music-Magnusson2010 + address: 'Sydney, Australia' + author: Thor Magnusson + bibtex: "@inproceedings{nime2010-music-Magnusson2010,\n address = {Sydney, Australia},\n\ + \ author = {Thor Magnusson},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Johnston,\ + \ Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer,\ + \ Kirsty Beilharz, Roger Mills},\n month = {June},\n publisher = {University of\ + \ Technology Sydney},\n title = {Ixi Lang Performance},\n year = {2010}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Rob Hamilton - month: May - note: Online Presentation - title: Returns & Simulacra - url: https://www.nime2023.org/program/online-in-person-concerts - year: 2023 + editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, + Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + month: June + publisher: University of Technology Sydney + title: Ixi Lang Performance + year: 2010 -- ENTRYTYPE: article - ID: nime23-music-1245 - abstract: 'Absence is a performance for audio–visual concatenative synthesis elaborated - during Diemo Schwarz’s art–science residency at the IMéRA Institute for Advanced - Study in 2022. It explores several notions of absence: of light, of love, of humanity, - where societies and all human artefacts will be destructured to gradually disappear - within the materials and textures of the natural world. Audio–visual concatenative - synthesis extends the principle of corpus-based sound synthesis to the visual - domain, where, in addition to the sound corpus (i.e. a collection of segments - of recorded sound with a perceptual description of their sound character), the - artist uses a corpus of still images with perceptual description (colour, texture, - brightness, entropy, and other content-based image descriptors). The artist then - creates an audio–visual musical performance by navigating through one of these - descriptor spaces, e.g. through the collection of sound grains in a space of perceptual - audio descriptors, and at the same time through the other descriptor space, i.e. - select images from the visual corpus for rendering, and thus navigate in parallel - through both corpora interactively with gestural control via movement sensors. - This will evoke an aesthetic of acoustic and visual collage or cut-up, generating - an audio–visual sequence of similar sounds/images from the two corpora when navigation - is local, and opposing contrasting sounds/images when the navigation jumps to - different parts of the linked sound/image descriptor space. The artistic–technological - question that is explored here is how to control at the same time the navigation - through the audio and the image descriptor spaces with gesture sensors, i.e. how - to link the gesture sensing to both the image descriptors and the sound descriptors - in order to create a multi-modal audio–visual performance.' - articleno: 1245 - author: Diemo Schwarz - bibtex: "@article{nime23-music-1245,\n abstract = {Absence is a performance for\ - \ audio–visual concatenative synthesis elaborated during Diemo Schwarz’s art–science\ - \ residency at the IMéRA Institute for Advanced Study in 2022. It explores several\ - \ notions of absence: of light, of love, of humanity, where societies and all\ - \ human artefacts will be destructured to gradually disappear within the materials\ - \ and textures of the natural world. Audio–visual concatenative synthesis extends\ - \ the principle of corpus-based sound synthesis to the visual domain, where, in\ - \ addition to the sound corpus (i.e. a collection of segments of recorded sound\ - \ with a perceptual description of their sound character), the artist uses a corpus\ - \ of still images with perceptual description (colour, texture, brightness, entropy,\ - \ and other content-based image descriptors). The artist then creates an audio–visual\ - \ musical performance by navigating through one of these descriptor spaces, e.g.\ - \ through the collection of sound grains in a space of perceptual audio descriptors,\ - \ and at the same time through the other descriptor space, i.e. select images\ - \ from the visual corpus for rendering, and thus navigate in parallel through\ - \ both corpora interactively with gestural control via movement sensors. This\ - \ will evoke an aesthetic of acoustic and visual collage or cut-up, generating\ - \ an audio–visual sequence of similar sounds/images from the two corpora when\ - \ navigation is local, and opposing contrasting sounds/images when the navigation\ - \ jumps to different parts of the linked sound/image descriptor space. The artistic–technological\ - \ question that is explored here is how to control at the same time the navigation\ - \ through the audio and the image descriptor spaces with gesture sensors, i.e.\ - \ how to link the gesture sensing to both the image descriptors and the sound\ - \ descriptors in order to create a multi-modal audio–visual performance.},\n articleno\ - \ = {1245},\n author = {Diemo Schwarz},\n booktitle = {Music Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n editor\ - \ = {Rob Hamilton},\n month = {May},\n note = {Live Concert 5, Friday June 2,\ - \ Centro de Cultura Digital},\n title = {Absence},\n url = {https://www.nime.org/proceedings/2023/nime23_concert_5.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: nime2010-music-Dubrau2010 + address: 'Sydney, Australia' + author: Mei-Ling Dubrau and Mark Havryliv + bibtex: "@inproceedings{nime2010-music-Dubrau2010,\n address = {Sydney, Australia},\n\ + \ author = {Mei-Ling Dubrau and Mark Havryliv},\n booktitle = {Music Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine,\ + \ Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills},\n month = {June},\n\ + \ publisher = {University of Technology Sydney},\n title = {P[r]o[pri]et[a]ry\ + \ in[ternet] [Ad]mo[ni]tion[s]},\n year = {2010}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Rob Hamilton - month: May - note: 'Live Concert 5, Friday June 2, Centro de Cultura Digital' - title: Absence - url: https://www.nime.org/proceedings/2023/nime23_concert_5.pdf - year: 2023 + editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, + Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + month: June + publisher: University of Technology Sydney + title: 'P[r]o[pri]et[a]ry in[ternet] [Ad]mo[ni]tion[s]' + year: 2010 -- ENTRYTYPE: article - ID: nime23-music-1249 - abstract: 'Dream Structures is a live coding performance that uses computational - audio analysis and machine learning to navigate and resample a half-terabyte archive - of 90s/00s trance music, creating a live musical collage that organises fragments - of audio from thousands of tracks by traversing a multidimensional feature space.' - articleno: 1249 - author: Daniel Jones - bibtex: "@article{nime23-music-1249,\n abstract = {Dream Structures is a live coding\ - \ performance that uses computational audio analysis and machine learning to navigate\ - \ and resample a half-terabyte archive of 90s/00s trance music, creating a live\ - \ musical collage that organises fragments of audio from thousands of tracks by\ - \ traversing a multidimensional feature space.},\n articleno = {1249},\n author\ - \ = {Daniel Jones},\n booktitle = {Music Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n editor = {Rob Hamilton},\n month\ - \ = {May},\n note = {Live Concert 3, Thursday June 1, Centro de Cultura Digital},\n\ - \ title = {Dream Structures},\n url = {https://www.nime.org/proceedings/2023/nime23_concert_3.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: nime2010-music-Ptak2010 + address: 'Sydney, Australia' + author: Anthony Ptak + bibtex: "@inproceedings{nime2010-music-Ptak2010,\n address = {Sydney, Australia},\n\ + \ author = {Anthony Ptak},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Johnston,\ + \ Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer,\ + \ Kirsty Beilharz, Roger Mills},\n month = {June},\n publisher = {University of\ + \ Technology Sydney},\n title = {Live Bar-Coding},\n year = {2010}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Rob Hamilton - month: May - note: 'Live Concert 3, Thursday June 1, Centro de Cultura Digital' - title: Dream Structures - url: https://www.nime.org/proceedings/2023/nime23_concert_3.pdf - year: 2023 + editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, + Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + month: June + publisher: University of Technology Sydney + title: Live Bar-Coding + year: 2010 -- ENTRYTYPE: article - ID: nime23-music-1262 - abstract: Stir bugs is an exploration of live algorithmic control in corpus-based - performance. A community of computational agents confined to a two-dimensional - square prison cell is live-coded into collective madness. Agents are controlled - by simple code functions that define navigation in a terrain made of a collection - of electronic noise samples. Each agent is also associated with a sound playback/synthesis - function. The performance embraces the complexity emerging from quickly coding - a multiplicity of behaviours in a shared sonic space. - articleno: 1262 - author: Gerard Roma - bibtex: "@article{nime23-music-1262,\n abstract = {Stir bugs is an exploration of\ - \ live algorithmic control in corpus-based performance. A community of computational\ - \ agents confined to a two-dimensional square prison cell is live-coded into collective\ - \ madness. Agents are controlled by simple code functions that define navigation\ - \ in a terrain made of a collection of electronic noise samples. Each agent is\ - \ also associated with a sound playback/synthesis function. The performance embraces\ - \ the complexity emerging from quickly coding a multiplicity of behaviours in\ - \ a shared sonic space.},\n articleno = {1262},\n author = {Gerard Roma},\n booktitle\ - \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n editor = {Rob Hamilton},\n month = {May},\n note = {Online Presentation},\n\ - \ title = {Stir bugs},\n url = {https://www.nime2023.org/program/online-in-person-concerts},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: nime2010-music-Sazdov2010 + address: 'Sydney, Australia' + author: Robert Sazdov and Giuseppe Torre + bibtex: "@inproceedings{nime2010-music-Sazdov2010,\n address = {Sydney, Australia},\n\ + \ author = {Robert Sazdov and Giuseppe Torre},\n booktitle = {Music Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine,\ + \ Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills},\n month = {June},\n\ + \ publisher = {University of Technology Sydney},\n title = {MOLITVA},\n year =\ + \ {2010}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Rob Hamilton - month: May - note: Online Presentation - title: Stir bugs - url: https://www.nime2023.org/program/online-in-person-concerts - year: 2023 + editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, + Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + month: June + publisher: University of Technology Sydney + title: MOLITVA + year: 2010 -- ENTRYTYPE: article - ID: nime23-music-1272 - abstract: 'Branch is a live coding étude centered around speech and form. The piece - uses the TidalCycles language alongside a tool we developed called SHARP, which - provides an interactive, tree-like structure embedded in the text editor to track - how blocks of code evolve over time. SHARP opens up new musical affordances centered - around quickly switching between previous program states. In addition, SHARP’s - version trees act as a kind of post-hoc score, leaving a visual trace of the piece’s - structure as it unfolds. With Branch, we attempt to go beyond a simple demonstration - of SHARP as a tool and instead create a piece which highlights the interplay between - musical form, its visual representation in SHARP, and the sonic material itself. - To that end, Branch makes use of machine-generated speech based mostly on snippets - from the text of Robert Frost’s poem “The Road Not Taken”. The text is largely - decontextualized, and its treatment is somewhat tongue-in-cheek: while the poem’s - premise centers around not being able to take both paths, we can easily explore - as many code paths as we wish. In addition to speech, Branch uses audio samples - from Freesound, including the sounds of twigs snapping, knocking on wood, and - a person stepping on leaves.' - articleno: 1272 - author: Daniel Manesh and Douglas A Bowman Jr and Sang Won Lee - bibtex: "@article{nime23-music-1272,\n abstract = {Branch is a live coding étude\ - \ centered around speech and form. The piece uses the TidalCycles language alongside\ - \ a tool we developed called SHARP, which provides an interactive, tree-like structure\ - \ embedded in the text editor to track how blocks of code evolve over time. SHARP\ - \ opens up new musical affordances centered around quickly switching between previous\ - \ program states. In addition, SHARP’s version trees act as a kind of post-hoc\ - \ score, leaving a visual trace of the piece’s structure as it unfolds. With Branch,\ - \ we attempt to go beyond a simple demonstration of SHARP as a tool and instead\ - \ create a piece which highlights the interplay between musical form, its visual\ - \ representation in SHARP, and the sonic material itself. To that end, Branch\ - \ makes use of machine-generated speech based mostly on snippets from the text\ - \ of Robert Frost’s poem “The Road Not Taken”. The text is largely decontextualized,\ - \ and its treatment is somewhat tongue-in-cheek: while the poem’s premise centers\ - \ around not being able to take both paths, we can easily explore as many code\ - \ paths as we wish. In addition to speech, Branch uses audio samples from Freesound,\ - \ including the sounds of twigs snapping, knocking on wood, and a person stepping\ - \ on leaves.},\n articleno = {1272},\n author = {Daniel Manesh and Douglas A Bowman\ - \ Jr and Sang Won Lee},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Rob Hamilton},\n\ - \ month = {May},\n note = {Live Concert 4, Thursday June 1, Centro de Cultura\ - \ Digital},\n title = {Branch},\n url = {https://www.nime.org/proceedings/2023/nime23_concert_4.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: nime2010-music-Hopson2010 + address: 'Sydney, Australia' + author: Holland Hopson + bibtex: "@inproceedings{nime2010-music-Hopson2010,\n address = {Sydney, Australia},\n\ + \ author = {Holland Hopson},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Johnston,\ + \ Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer,\ + \ Kirsty Beilharz, Roger Mills},\n month = {June},\n publisher = {University of\ + \ Technology Sydney},\n title = {Banjo & Electronics},\n year = {2010}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Rob Hamilton - month: May - note: 'Live Concert 4, Thursday June 1, Centro de Cultura Digital' - title: Branch - url: https://www.nime.org/proceedings/2023/nime23_concert_4.pdf - year: 2023 + editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, + Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + month: June + publisher: University of Technology Sydney + title: Banjo & Electronics + year: 2010 -- ENTRYTYPE: article - ID: nime23-music-1284 - abstract: 'Displacements is a music and video performance that thematizes the recording - of walks in public spaces (a relatively recent and popular genre of videos on - streaming platforms). In a place built to organize human displacements, a moving - observer registers passing bodies: their directions, flows and speeds superimposed - on the shades and forms of the environment are the visual information that feed - an algorithmic composition based on shifts of space, time and color. The music, - likewise algorithmic and mainly synthetic (but also including transformations - of the sound captured during the footage), modulates its visual counterpart by - providing an ethereal atmosphere uncorrelated with the expected soundscape. The - work alludes to principles of the live coding practice as its performance happens - in an improvised way through editing and running a pre-prepared computer code - that controls the processes for music and video generation. The code is displayed - as the top layer of the video, making available to the audience the performer’s - decisions, as well as the algorithmic structure of the work, and having an aesthetic - role as part of the visual composition of the work.' - articleno: 1284 - author: Adriano Claro Monteiro - bibtex: "@article{nime23-music-1284,\n abstract = {Displacements is a music and\ - \ video performance that thematizes the recording of walks in public spaces (a\ - \ relatively recent and popular genre of videos on streaming platforms). In a\ - \ place built to organize human displacements, a moving observer registers passing\ - \ bodies: their directions, flows and speeds superimposed on the shades and forms\ - \ of the environment are the visual information that feed an algorithmic composition\ - \ based on shifts of space, time and color. The music, likewise algorithmic and\ - \ mainly synthetic (but also including transformations of the sound captured during\ - \ the footage), modulates its visual counterpart by providing an ethereal atmosphere\ - \ uncorrelated with the expected soundscape. The work alludes to principles of\ - \ the live coding practice as its performance happens in an improvised way through\ - \ editing and running a pre-prepared computer code that controls the processes\ - \ for music and video generation. The code is displayed as the top layer of the\ - \ video, making available to the audience the performer’s decisions, as well as\ - \ the algorithmic structure of the work, and having an aesthetic role as part\ - \ of the visual composition of the work.},\n articleno = {1284},\n author = {Adriano\ - \ Claro Monteiro},\n booktitle = {Music Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n editor = {Rob Hamilton},\n month\ - \ = {May},\n note = {Live Concert 4, Thursday June 1, Centro de Cultura Digital},\n\ - \ title = {Displacements},\n url = {https://www.nime.org/proceedings/2023/nime23_concert_4.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: nime2010-music-Lyon2010 + address: 'Sydney, Australia' + author: Eric Lyon and Ben Knapp + bibtex: "@inproceedings{nime2010-music-Lyon2010,\n address = {Sydney, Australia},\n\ + \ author = {Eric Lyon and Ben Knapp},\n booktitle = {Music Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n editor\ + \ = {Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon\ + \ Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills},\n month = {June},\n\ + \ publisher = {University of Technology Sydney},\n title = {Stem Cells},\n year\ + \ = {2010}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Rob Hamilton - month: May - note: 'Live Concert 4, Thursday June 1, Centro de Cultura Digital' - title: Displacements - url: https://www.nime.org/proceedings/2023/nime23_concert_4.pdf - year: 2023 + editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, + Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + month: June + publisher: University of Technology Sydney + title: Stem Cells + year: 2010 -- ENTRYTYPE: article - ID: nime23-music-1285 - abstract: 'If crystal bowls could speak, what would they say? Beyond Hexagons is - a performance using the shard-speakers, a musical instrument and playback system - created from the shards of broken crystal singing bowls with affixed transducers - and resonators. Tracing their lifespans from quartz mines to factories and from - scientific laboratories and sound studios, the bowls transmit their origin stories - of purpose, function, and pleasure through a unique and alien sonic language that - makes heavy use of improvisation, whimsy, and custom software instruments. The - result is a sonic exploration of the paradoxes contained in these materials — - strength and fragility, acuity and intuition, secrecy and frankness.' - articleno: 1285 - author: Anastasia Clarke - bibtex: "@article{nime23-music-1285,\n abstract = {If crystal bowls could speak,\ - \ what would they say? Beyond Hexagons is a performance using the shard-speakers,\ - \ a musical instrument and playback system created from the shards of broken crystal\ - \ singing bowls with affixed transducers and resonators. Tracing their lifespans\ - \ from quartz mines to factories and from scientific laboratories and sound studios,\ - \ the bowls transmit their origin stories of purpose, function, and pleasure through\ - \ a unique and alien sonic language that makes heavy use of improvisation, whimsy,\ - \ and custom software instruments. The result is a sonic exploration of the paradoxes\ - \ contained in these materials — strength and fragility, acuity and intuition,\ - \ secrecy and frankness.},\n articleno = {1285},\n author = {Anastasia Clarke},\n\ - \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n editor = {Rob Hamilton},\n month = {May},\n note\ - \ = {Live Concert 3, Thursday June 1, Centro de Cultura Digital},\n title = {Shard\ - \ Speakers: Beyond Hexagons},\n url = {https://www.nime.org/proceedings/2023/nime23_concert_3.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: nime2010-music-Ensemble2010 + address: 'Sydney, Australia' + author: Charisma Ensemble and Kirsty Beilharz + bibtex: "@inproceedings{nime2010-music-Ensemble2010,\n address = {Sydney, Australia},\n\ + \ author = {Charisma Ensemble and Kirsty Beilharz},\n booktitle = {Music Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine,\ + \ Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills},\n month = {June},\n\ + \ publisher = {University of Technology Sydney},\n title = {Diamond Quills Hyper-Ensemble},\n\ + \ year = {2010}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Rob Hamilton - month: May - note: 'Live Concert 3, Thursday June 1, Centro de Cultura Digital' - title: 'Shard Speakers: Beyond Hexagons' - url: https://www.nime.org/proceedings/2023/nime23_concert_3.pdf - year: 2023 - + editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, + Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + month: June + publisher: University of Technology Sydney + title: Diamond Quills Hyper-Ensemble + year: 2010 -- ENTRYTYPE: article - ID: nime23-music-1287 - abstract: 'Fluid Flows, Transit, and Symbols is a coded composition and re-imagined - sonic poem. The primary influence for the design of the piece was pipe flow and - fluid mechanics; specifically the transition from laminar (smooth) flow to turbulent - flow within a pipe. The sounds and sequences are all designed in SuperCollider - and the organization of the composition is composed live and sounds different - with every performance. The reading of the poem is processed through granular - synthesis, creating new sentences amongst the soundscape at an unpredictable rate. - The performance and piece can be adapted to any space and only requires a microphone, - a laptop, and a soundsystem.' - articleno: 1287 - author: Costa K Colachis Glass - bibtex: "@article{nime23-music-1287,\n abstract = {Fluid Flows, Transit, and Symbols\ - \ is a coded composition and re-imagined sonic poem. The primary influence for\ - \ the design of the piece was pipe flow and fluid mechanics; specifically the\ - \ transition from laminar (smooth) flow to turbulent flow within a pipe. The sounds\ - \ and sequences are all designed in SuperCollider and the organization of the\ - \ composition is composed live and sounds different with every performance. The\ - \ reading of the poem is processed through granular synthesis, creating new sentences\ - \ amongst the soundscape at an unpredictable rate. The performance and piece can\ - \ be adapted to any space and only requires a microphone, a laptop, and a soundsystem.},\n\ - \ articleno = {1287},\n author = {Costa K Colachis Glass},\n booktitle = {Music\ - \ Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Rob Hamilton},\n month = {May},\n note = {Live Concert 2, Wednesday\ - \ May 31, Biblioteca Vasconcelos},\n title = {Fluid Flows, Transit, and Symbols},\n\ - \ url = {https://www.nime.org/proceedings/2023/nime23_concert_2.pdf},\n year =\ - \ {2023}\n}\n" + +- ENTRYTYPE: inproceedings + ID: nime2010-music-Hewitt2010 + address: 'Sydney, Australia' + author: Donna Hewitt and Avril Huddy + bibtex: "@inproceedings{nime2010-music-Hewitt2010,\n address = {Sydney, Australia},\n\ + \ author = {Donna Hewitt and Avril Huddy},\n booktitle = {Music Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n editor\ + \ = {Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon\ + \ Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills},\n month = {June},\n\ + \ publisher = {University of Technology Sydney},\n title = {Idol},\n year = {2010}\n\ + }\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Rob Hamilton - month: May - note: 'Live Concert 2, Wednesday May 31, Biblioteca Vasconcelos' - title: 'Fluid Flows, Transit, and Symbols' - url: https://www.nime.org/proceedings/2023/nime23_concert_2.pdf - year: 2023 + editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, + Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + month: June + publisher: University of Technology Sydney + title: Idol + year: 2010 -- ENTRYTYPE: article - ID: nime23-music-1305 - abstract: 'Survival Kit is a live electroacoustic piece that explores the connection - between textual and musical meanings. It is a revised take on choral music in - the digital era. The author experiments with ways to interpret natural language - in computer music and suggests a novel approach to performing text/sound compositions. - The foundation of the piece is a poetic text that lists all the things that may - come to mind amidst a futile preparation for a global disaster. The piece is - performed by a single performer in the live coding manner. The author enters the - text in his original computer music software, which triggers sections of pre-recorded - music and corresponding processing algorithms. All vocals were performed by a - collaborator vocalist (tenor) using a recording score for individual lines, and - then edited and programmed into the software by the author.' - articleno: 1305 - author: Eugene Markin - bibtex: "@article{nime23-music-1305,\n abstract = {Survival Kit is a live electroacoustic\ - \ piece that explores the connection between textual and musical meanings. It\ - \ is a revised take on choral music in the digital era. The author experiments\ - \ with ways to interpret natural language in computer music and suggests a novel\ - \ approach to performing text/sound compositions. The foundation of the piece\ - \ is a poetic text that lists all the things that may come to mind amidst a futile\ - \ preparation for a global disaster. The piece is performed by a single performer\ - \ in the live coding manner. The author enters the text in his original computer\ - \ music software, which triggers sections of pre-recorded music and corresponding\ - \ processing algorithms. All vocals were performed by a collaborator vocalist\ - \ (tenor) using a recording score for individual lines, and then edited and programmed\ - \ into the software by the author.},\n articleno = {1305},\n author = {Eugene\ - \ Markin},\n booktitle = {Music Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n editor = {Rob Hamilton},\n month =\ - \ {May},\n note = {Live Concert 1, Wednesday May 31, Biblioteca Vasconcelos},\n\ - \ title = {Survival Kit},\n url = {https://www.nime.org/proceedings/2023/nime23_concert_1.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: nime2010-music-Martinez2010 + address: 'Sydney, Australia' + author: Christopher Martinez + bibtex: "@inproceedings{nime2010-music-Martinez2010,\n address = {Sydney, Australia},\n\ + \ author = {Christopher Martinez},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Johnston,\ + \ Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer,\ + \ Kirsty Beilharz, Roger Mills},\n month = {June},\n publisher = {University of\ + \ Technology Sydney},\n title = {Radio Healer},\n year = {2010}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Rob Hamilton - month: May - note: 'Live Concert 1, Wednesday May 31, Biblioteca Vasconcelos' - title: Survival Kit - url: https://www.nime.org/proceedings/2023/nime23_concert_1.pdf - year: 2023 + editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, + Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + month: June + publisher: University of Technology Sydney + title: Radio Healer + year: 2010 -- ENTRYTYPE: article - ID: nime23-music-1315 - abstract: 'Sound composition on the fly that consists of a descent into the training - of a deep learning audio neural network, which will explore with voice the implications - of artificial intelligence from a transhackfeminist ethical perspective in order - to critically look at these tools from the same and thus intervene them from within. - That is, as an algorithmic essay, the piece will explore with voice, the implications - of these technologies taking as text feminist and transfeminist research that - theorize on the subject. The voice will be synthesized and reconstructed as a - means to hack the same networks and the way we understand them.' - articleno: 1315 - author: Marianne Teixido and Emilio Ocelotl - bibtex: "@article{nime23-music-1315,\n abstract = {Sound composition on the fly\ - \ that consists of a descent into the training of a deep learning audio neural\ - \ network, which will explore with voice the implications of artificial intelligence\ - \ from a transhackfeminist ethical perspective in order to critically look at\ - \ these tools from the same and thus intervene them from within. That is, as an\ - \ algorithmic essay, the piece will explore with voice, the implications of these\ - \ technologies taking as text feminist and transfeminist research that theorize\ - \ on the subject. The voice will be synthesized and reconstructed as a means to\ - \ hack the same networks and the way we understand them.},\n articleno = {1315},\n\ - \ author = {Marianne Teixido and Emilio Ocelotl},\n booktitle = {Music Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Rob Hamilton},\n month = {May},\n note = {Live Concert 5, Friday June\ - \ 2, Centro de Cultura Digital},\n title = {deep structures},\n url = {https://www.nime.org/proceedings/2023/nime23_concert_5.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: nime2010-music-Langley2010 + address: 'Sydney, Australia' + author: Somaya Langley + bibtex: "@inproceedings{nime2010-music-Langley2010,\n address = {Sydney, Australia},\n\ + \ author = {Somaya Langley},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Johnston,\ + \ Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer,\ + \ Kirsty Beilharz, Roger Mills},\n month = {June},\n publisher = {University of\ + \ Technology Sydney},\n title = {ID-i/o},\n year = {2010}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Rob Hamilton - month: May - note: 'Live Concert 5, Friday June 2, Centro de Cultura Digital' - title: deep structures - url: https://www.nime.org/proceedings/2023/nime23_concert_5.pdf - year: 2023 + editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, + Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + month: June + publisher: University of Technology Sydney + title: ID-i/o + year: 2010 -- ENTRYTYPE: article - ID: nime23-music-1316 - abstract: 'Affordance describes the relationship between the environment and the - individual from the action provider’s perspective. Affordance can be false, can - be hidden, or can be perceptible. Within our complex environment, real or virtual, - material or intellectual, the affordances can be functional or delusional, can - be ephemeral or permanent, can be present or delayed – a choice for you to observe, - adapt, participate, and evolve.' - articleno: 1316 - author: Chi Wang - bibtex: "@article{nime23-music-1316,\n abstract = {Affordance describes the relationship\ - \ between the environment and the individual from the action provider’s perspective.\ - \ Affordance can be false, can be hidden, or can be perceptible. Within our complex\ - \ environment, real or virtual, material or intellectual, the affordances can\ - \ be functional or delusional, can be ephemeral or permanent, can be present or\ - \ delayed – a choice for you to observe, adapt, participate, and evolve.},\n articleno\ - \ = {1316},\n author = {Chi Wang},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Rob Hamilton},\n\ - \ month = {May},\n note = {Online Presentation},\n title = {Transparent Affordance},\n\ - \ url = {https://www.nime2023.org/program/online-in-person-concerts},\n year =\ - \ {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: nime2010-music-Schubert2010 + address: 'Sydney, Australia' + author: Alexander Schubert + bibtex: "@inproceedings{nime2010-music-Schubert2010,\n address = {Sydney, Australia},\n\ + \ author = {Alexander Schubert},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Johnston,\ + \ Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer,\ + \ Kirsty Beilharz, Roger Mills},\n month = {June},\n publisher = {University of\ + \ Technology Sydney},\n title = {Laplace Tiger},\n year = {2010}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Rob Hamilton - month: May - note: Online Presentation - title: Transparent Affordance - url: https://www.nime2023.org/program/online-in-person-concerts - year: 2023 + editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, + Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + month: June + publisher: University of Technology Sydney + title: Laplace Tiger + year: 2010 -- ENTRYTYPE: article - ID: nime23-music-1340 - abstract: 'Innermost Echoes is a performance work which utilizes performer physiological - data as a novel input mechanism to introduce a new form of hybrid improvisation - alongside a robotic koto which will sonify this data in a communicative feedback - loop and a Eurorack system which will serve as a bridge between the passive physiological - data and the active performance. By introducing this form of input, our improvisational - performance will challenge the traditional approach to live performance by creating - a closed loop between our emotions and the performance itself. In a sense, we - will be improvising with our own presence. We believe this new kind of performative - dialogue can challenge existing hierarchies within live music performances. This - novel performance paradigm seeks to examine new performative dialogues and ideas - on what it means to perform live. Current performance practices are often based - predominantly on the direct communication of the performers through their respective - instruments. When we introduce the performer’s physiology as a gestural language, - we hope to define a new methodology of presence-based improvisation. The performers - wear custom built sensing wristbands and elastic breathing bands around their - chest to gather physiological data consisting of EDA (electrodermal activity), - HRV (heart rate variability), and respiration rate. This data is then sent via - OSC to a laptop running Max/MSP which applies this live data to the robotic koto - and the Eurorack system. These data streams and occurrences of synchrony between - the performers’ data are then sonified and used as a structural indicator of the - current state of the performers, thereby forming a new unspoken dialogue between - the two.' - articleno: 1340 - author: Danny Hynds and Aoi Uyama and George Chernyshov and DingDing Zheng and Kozue - Matsumoto and Michael Pogorzhelskiy and Tatsuya Saito and Kai Kunze and Kouta - Minamizawa - bibtex: "@article{nime23-music-1340,\n abstract = {Innermost Echoes is a performance\ - \ work which utilizes performer physiological data as a novel input mechanism\ - \ to introduce a new form of hybrid improvisation alongside a robotic koto which\ - \ will sonify this data in a communicative feedback loop and a Eurorack system\ - \ which will serve as a bridge between the passive physiological data and the\ - \ active performance. By introducing this form of input, our improvisational performance\ - \ will challenge the traditional approach to live performance by creating a closed\ - \ loop between our emotions and the performance itself. In a sense, we will be\ - \ improvising with our own presence. We believe this new kind of performative\ - \ dialogue can challenge existing hierarchies within live music performances.\ - \ This novel performance paradigm seeks to examine new performative dialogues\ - \ and ideas on what it means to perform live. Current performance practices are\ - \ often based predominantly on the direct communication of the performers through\ - \ their respective instruments. When we introduce the performer’s physiology as\ - \ a gestural language, we hope to define a new methodology of presence-based improvisation.\ - \ The performers wear custom built sensing wristbands and elastic breathing bands\ - \ around their chest to gather physiological data consisting of EDA (electrodermal\ - \ activity), HRV (heart rate variability), and respiration rate. This data is\ - \ then sent via OSC to a laptop running Max/MSP which applies this live data to\ - \ the robotic koto and the Eurorack system. These data streams and occurrences\ - \ of synchrony between the performers’ data are then sonified and used as a structural\ - \ indicator of the current state of the performers, thereby forming a new unspoken\ - \ dialogue between the two.},\n articleno = {1340},\n author = {Danny Hynds and\ - \ Aoi Uyama and George Chernyshov and DingDing Zheng and Kozue Matsumoto and Michael\ - \ Pogorzhelskiy and Tatsuya Saito and Kai Kunze and Kouta Minamizawa},\n booktitle\ - \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n editor = {Rob Hamilton},\n month = {May},\n note = {Online Presentation},\n\ - \ title = {Innermost Echoes},\n url = {https://www.nime2023.org/program/online-in-person-concerts},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: nime2010-music-Lai2010 + abstract: "Interactivity between performer and technology is a crucial part of media\ + \ performance. This is made possible through creative integration of software\ + \ and hardware devices. The performance work, Strike On Stage, uses such an integration\ + \ to bridge three aspects of performance: performers' body movements, audio processing,\ + \ and projected video. \n\nFor the NIME 2010 concert performance, we are proposing\ + \ to present a media work composed for the Strike On Stage instrument demonstrating\ + \ a wide variety of interactions between the two performers, the instrument itself\ + \ and the video projection.\n\nThe instrument for Strike On Stage is a large performance\ + \ surface for multiple players to control computer based musical instruments and\ + \ visuals. This concept is a new perspective on Chi-Hsia Lai's MPhil research\ + \ project, Hands On Stage (video documentation can be found at ),\ + \ which was a solo, audiovisual performance work created during 2007 and 2008." + address: 'Sydney, Australia' + author: Chi-Hsia Lai and Charles Martin + bibtex: "@inproceedings{nime2010-music-Lai2010,\n abstract = {Interactivity between\ + \ performer and technology is a crucial part of media performance. This is made\ + \ possible through creative integration of software and hardware devices. The\ + \ performance work, Strike On Stage, uses such an integration to bridge three\ + \ aspects of performance: performers' body movements, audio processing, and projected\ + \ video. \n\nFor the NIME 2010 concert performance, we are proposing to present\ + \ a media work composed for the Strike On Stage instrument demonstrating a wide\ + \ variety of interactions between the two performers, the instrument itself and\ + \ the video projection.\n\nThe instrument for Strike On Stage is a large performance\ + \ surface for multiple players to control computer based musical instruments and\ + \ visuals. This concept is a new perspective on Chi-Hsia Lai's MPhil research\ + \ project, Hands On Stage (video documentation can be found at ),\ + \ which was a solo, audiovisual performance work created during 2007 and 2008.},\n\ + \ address = {Sydney, Australia},\n author = {Chi-Hsia Lai and Charles Martin},\n\ + \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n editor = {Andrew Johnston, Sam Ferguson, Jos Mulder,\ + \ Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger\ + \ Mills},\n month = {June},\n publisher = {University of Technology Sydney},\n\ + \ title = {Strike On Stage},\n year = {2010}\n}\n" + booktitle: Music Proceedings of the International Conference on New Interfaces for + Musical Expression + editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, + Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + month: June + publisher: University of Technology Sydney + title: Strike On Stage + year: 2010 + + +- ENTRYTYPE: inproceedings + ID: nime2010-music-Schunior2010 + address: 'Sydney, Australia' + author: Michael Schunior + bibtex: "@inproceedings{nime2010-music-Schunior2010,\n address = {Sydney, Australia},\n\ + \ author = {Michael Schunior},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Johnston,\ + \ Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer,\ + \ Kirsty Beilharz, Roger Mills},\n month = {June},\n publisher = {University of\ + \ Technology Sydney},\n title = {{HAITIAN HAARPS}},\n year = {2010}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Rob Hamilton - month: May - note: Online Presentation - title: Innermost Echoes - url: https://www.nime2023.org/program/online-in-person-concerts - year: 2023 + editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, + Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + month: June + publisher: University of Technology Sydney + title: '{HAITIAN HAARPS}' + year: 2010 -- ENTRYTYPE: article - ID: nime23-music-1389 - abstract: 'Our performance research quintet has been set up to explore multiple - instantiations of DIY electronic musical instruments (EMI) through improvisation. - Our group consists of five highly experienced music improvisers, visual artists - and instrument makers with a shared connection to the Sonic Arts Research Centre - (SARC) at Queen’s University Belfast. Performer-makers in this group have multiple - decades of experience producing work in academic and professional contexts in - Europe, the Americas and the Middle East [websites anonymised, but available upon - request]. We are particularly interested in exploiting irregularities in the - qualities of circuit components (e.g. imprecise tolerances/values), and how this - allows for the development of stylistic differences across multiple instrument-performer - configurations. We are also interested in how skill, style and performance techniques - are developed in different ways on similar devices over extended periods of time, - and how our existing musical practices are reconfigured through such collaborative - exchanges. For this musical performance each performer will use DIY EMI featuring - function generators and wide band noise. The instruments are ‘bent by design’ - (Hordijk 2009) and use ‘withered technologies’(Ott 2020) at their core. These - musical instruments have been selected to promote productive instability whilst - building a timbral playground. The DIY instrument ethos includes the publication - of the designs and ‘how to’ instructions to assist other makers in the creation - of their own EMI, especially those who have to adopt a frugal approach to resources. The - aesthetic of our performance is informed by noise and free improvised musics, - and is offered as continuation of ‘thinkering’ (Huhtamo 2011) practice as part - of the history of electronic music experimentation.' - articleno: 1389 - author: Miguel Ortiz and Barry Cullen and Paul Stapleton - bibtex: "@article{nime23-music-1389,\n abstract = {Our performance research quintet\ - \ has been set up to explore multiple instantiations of DIY electronic musical\ - \ instruments (EMI) through improvisation. Our group consists of five highly experienced\ - \ music improvisers, visual artists and instrument makers with a shared connection\ - \ to the Sonic Arts Research Centre (SARC) at Queen’s University Belfast. Performer-makers\ - \ in this group have multiple decades of experience producing work in academic\ - \ and professional contexts in Europe, the Americas and the Middle East [websites\ - \ anonymised, but available upon request]. We are particularly interested in\ - \ exploiting irregularities in the qualities of circuit components (e.g. imprecise\ - \ tolerances/values), and how this allows for the development of stylistic differences\ - \ across multiple instrument-performer configurations. We are also interested\ - \ in how skill, style and performance techniques are developed in different ways\ - \ on similar devices over extended periods of time, and how our existing musical\ - \ practices are reconfigured through such collaborative exchanges. For this\ - \ musical performance each performer will use DIY EMI featuring function generators\ - \ and wide band noise. The instruments are ‘bent by design’ (Hordijk 2009) and\ - \ use ‘withered technologies’(Ott 2020) at their core. These musical instruments\ - \ have been selected to promote productive instability whilst building a timbral\ - \ playground. The DIY instrument ethos includes the publication of the designs\ - \ and ‘how to’ instructions to assist other makers in the creation of their own\ - \ EMI, especially those who have to adopt a frugal approach to resources. The\ - \ aesthetic of our performance is informed by noise and free improvised musics,\ - \ and is offered as continuation of ‘thinkering’ (Huhtamo 2011) practice as part\ - \ of the history of electronic music experimentation.},\n articleno = {1389},\n\ - \ author = {Miguel Ortiz and Barry Cullen and Paul Stapleton},\n booktitle = {Music\ - \ Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Rob Hamilton},\n month = {May},\n note = {Live Concert 3, Thursday\ - \ June 1, Centro de Cultura Digital},\n title = {Pandemonium Quintet play Drone\ - \ \\& Drama Versions},\n url = {https://www.nime.org/proceedings/2023/nime23_concert_3.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: nime2010-music-GeWang2010 + address: 'Sydney, Australia' + author: 'Ge Wang, Jieun Oh, Jorge Herrera, Nicholas J. Bryan and Luke Dahl' + bibtex: "@inproceedings{nime2010-music-GeWang2010,\n address = {Sydney, Australia},\n\ + \ author = {Ge Wang, Jieun Oh, Jorge Herrera, Nicholas J. Bryan and Luke Dahl},\n\ + \ booktitle = {Music Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n editor = {Andrew Johnston, Sam Ferguson, Jos Mulder,\ + \ Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger\ + \ Mills},\n month = {June},\n publisher = {University of Technology Sydney},\n\ + \ title = {Stanford Mobile Phone Orchestra (MoPhO)},\n year = {2010}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Rob Hamilton - month: May - note: 'Live Concert 3, Thursday June 1, Centro de Cultura Digital' - title: Pandemonium Quintet play Drone & Drama Versions - url: https://www.nime.org/proceedings/2023/nime23_concert_3.pdf - year: 2023 + editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, + Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + month: June + publisher: University of Technology Sydney + title: Stanford Mobile Phone Orchestra (MoPhO) + year: 2010 -- ENTRYTYPE: article - ID: nime23-music-1392 - abstract: 'Laser Phase Synthesis [XXI VII III I] is an audiovisual performance informed - by the historical Audio/Video/Laser system developed by Lowell Cross and Carson - Jeffries for use by David Tudor and Experiments in Arts and Technology (E.A.T.) - at the 1970 Japan World Exposition in Osaka, Japan. The current work employs digital - audio synthesis, modern laser display technology, and close collaboration between - sound and image composition to illustrate the harmonic progression of a musical - work.' - articleno: 1392 - author: Derek Holzer and Luka Aron - bibtex: "@article{nime23-music-1392,\n abstract = {Laser Phase Synthesis [XXI VII\ - \ III I] is an audiovisual performance informed by the historical Audio/Video/Laser\ - \ system developed by Lowell Cross and Carson Jeffries for use by David Tudor\ - \ and Experiments in Arts and Technology (E.A.T.) at the 1970 Japan World Exposition\ - \ in Osaka, Japan. The current work employs digital audio synthesis, modern laser\ - \ display technology, and close collaboration between sound and image composition\ - \ to illustrate the harmonic progression of a musical work.},\n articleno = {1392},\n\ - \ author = {Derek Holzer and Luka Aron},\n booktitle = {Music Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n editor\ - \ = {Rob Hamilton},\n month = {May},\n note = {Live Concert 2, Wednesday May 31,\ - \ Biblioteca Vasconcelos},\n title = {Laser Phase Synthesis [XXI VII III I]},\n\ - \ url = {https://www.nime.org/proceedings/2023/nime23_concert_2.pdf},\n year =\ - \ {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: nime2010-music-Essl2010 + address: 'Sydney, Australia' + author: Georg Essl + bibtex: "@inproceedings{nime2010-music-Essl2010,\n address = {Sydney, Australia},\n\ + \ author = {Georg Essl},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Johnston,\ + \ Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer,\ + \ Kirsty Beilharz, Roger Mills},\n month = {June},\n publisher = {University of\ + \ Technology Sydney},\n title = {Mobile Phone Orchestras presents...},\n year\ + \ = {2010}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Rob Hamilton - month: May - note: 'Live Concert 2, Wednesday May 31, Biblioteca Vasconcelos' - title: 'Laser Phase Synthesis [XXI VII III I]' - url: https://www.nime.org/proceedings/2023/nime23_concert_2.pdf - year: 2023 + editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, + Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + month: June + publisher: University of Technology Sydney + title: Mobile Phone Orchestras presents... + year: 2010 -- ENTRYTYPE: article - ID: nime23-music-1393 - abstract: '4 Disklavier Preludes is one of the main works in The Gedanken Room (2021). - This is a work that explores the implications of Quantum Computing for Music composition, - both conceptually and practically. Its 4 parts explore the use of the Disklavier - both as an input and output interface for building Quantum Circuits and retrieving - its measurements, in a live interactive multimedia environment with which live - performers interact. The cinematographic narrative addresses utopian/dystopian - issues in human-machine interaction.' - articleno: 1393 - author: Omar C Hamido - bibtex: "@article{nime23-music-1393,\n abstract = {4 Disklavier Preludes is one\ - \ of the main works in The Gedanken Room (2021). This is a work that explores\ - \ the implications of Quantum Computing for Music composition, both conceptually\ - \ and practically. Its 4 parts explore the use of the Disklavier both as an input\ - \ and output interface for building Quantum Circuits and retrieving its measurements,\ - \ in a live interactive multimedia environment with which live performers interact.\ - \ The cinematographic narrative addresses utopian/dystopian issues in human-machine\ - \ interaction.},\n articleno = {1393},\n author = {Omar C Hamido},\n booktitle\ - \ = {Music Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n editor = {Rob Hamilton},\n month = {May},\n note = {Online Presentation},\n\ - \ title = {4 Disklavier Preludes},\n url = {https://www.nime2023.org/program/online-in-person-concerts},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: nime2010-music-Haines2010 + address: 'Sydney, Australia' + author: Christian Haines + bibtex: "@inproceedings{nime2010-music-Haines2010,\n address = {Sydney, Australia},\n\ + \ author = {Christian Haines},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Johnston,\ + \ Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer,\ + \ Kirsty Beilharz, Roger Mills},\n month = {June},\n publisher = {University of\ + \ Technology Sydney},\n title = {SOMETHING TO GO HEAR #4},\n year = {2010}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Rob Hamilton - month: May - note: Online Presentation - title: 4 Disklavier Preludes - url: https://www.nime2023.org/program/online-in-person-concerts - year: 2023 + editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, + Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + month: June + publisher: University of Technology Sydney + title: "SOMETHING TO GO HEAR #4" + year: 2010 -- ENTRYTYPE: article - ID: nime23-music-1394 - abstract: 'Finger Breath, for performer, live electronics, and zither, was originally - commissioned by the the Frontside International Chamber Music Festival, funded - by a grant from the Swedish Arts Council, and premiered in January 2023 as a headphone - performance in the belly of a small passenger ferry. The main concepts behind - the work are three: First, the intimate sounds from the musicians breathing, and - from his fingers on the strings of an ancient zither. Second, the idea that the - live breathing and the musician’s sounds played by finger movements are the only - sources of gestural control and expression in the piece. Breathing and finger - movements form the basis of many musical expressions throughout the world, as - they are our most intimate physiological and gestural bodily mechanisms. Third, - the combination of the first two into a situation of “entangled musicianship”, - where each action has triple consequences: as a sound source to be heard live, - as a sound source being fed to various buffers for later manipulation and playback, - but also as a source of gestural control, affecting a variety of playback mechanisms - for the buffered sounds. It is thus impossible to play something without also - altering the conditions for future playing. Hence the entanglement.' - articleno: 1394 - author: Palle Dahlstedt - bibtex: "@article{nime23-music-1394,\n abstract = {Finger Breath, for performer,\ - \ live electronics, and zither, was originally commissioned by the the Frontside\ - \ International Chamber Music Festival, funded by a grant from the Swedish Arts\ - \ Council, and premiered in January 2023 as a headphone performance in the belly\ - \ of a small passenger ferry. The main concepts behind the work are three: First,\ - \ the intimate sounds from the musicians breathing, and from his fingers on the\ - \ strings of an ancient zither. Second, the idea that the live breathing and the\ - \ musician’s sounds played by finger movements are the only sources of gestural\ - \ control and expression in the piece. Breathing and finger movements form the\ - \ basis of many musical expressions throughout the world, as they are our most\ - \ intimate physiological and gestural bodily mechanisms. Third, the combination\ - \ of the first two into a situation of “entangled musicianship”, where each action\ - \ has triple consequences: as a sound source to be heard live, as a sound source\ - \ being fed to various buffers for later manipulation and playback, but also as\ - \ a source of gestural control, affecting a variety of playback mechanisms for\ - \ the buffered sounds. It is thus impossible to play something without also altering\ - \ the conditions for future playing. Hence the entanglement.},\n articleno = {1394},\n\ - \ author = {Palle Dahlstedt},\n booktitle = {Music Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Rob Hamilton},\n\ - \ month = {May},\n note = {Live Concert 2, Wednesday May 31, Biblioteca Vasconcelos},\n\ - \ title = {Finger Breath – Material and control through intimate sounds},\n url\ - \ = {https://www.nime.org/proceedings/2023/nime23_concert_2.pdf},\n year = {2023}\n\ - }\n" +- ENTRYTYPE: inproceedings + ID: nime2010-music-Schiemer2010 + address: 'Sydney, Australia' + author: Greg Schiemer + bibtex: "@inproceedings{nime2010-music-Schiemer2010,\n address = {Sydney, Australia},\n\ + \ author = {Greg Schiemer},\n booktitle = {Music Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Andrew Johnston,\ + \ Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, Jon Drummond, Greg Schiemer,\ + \ Kirsty Beilharz, Roger Mills},\n month = {June},\n publisher = {University of\ + \ Technology Sydney},\n title = {Mandala 9},\n year = {2010}\n}\n" booktitle: Music Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Rob Hamilton - month: May - note: 'Live Concert 2, Wednesday May 31, Biblioteca Vasconcelos' - title: Finger Breath – Material and control through intimate sounds - url: https://www.nime.org/proceedings/2023/nime23_concert_2.pdf - year: 2023 + editor: 'Andrew Johnston, Sam Ferguson, Jos Mulder, Somaya Langley, Garth Paine, + Jon Drummond, Greg Schiemer, Kirsty Beilharz, Roger Mills' + month: June + publisher: University of Technology Sydney + title: Mandala 9 + year: 2010 diff --git a/_data/nime_papers.yaml b/_data/nime_papers.yaml index d03764bc..5b9253dc 100644 --- a/_data/nime_papers.yaml +++ b/_data/nime_papers.yaml @@ -1,93560 +1,93565 @@ - ENTRYTYPE: inproceedings - ID: Jones2007 - abstract: 'Physical modeling has proven to be a successful method ofsynthesizing - highly expressive sounds. However, providingdeep methods of real time musical - control remains a majorchallenge. In this paper we describe our work towards aninstrument - for percussion synthesis, in which a waveguidemesh is both excited and damped - by a 2D matrix of forcesfrom a sensor. By emulating a drum skin both as controllerand - sound generator, our instrument has reproduced someof the expressive qualities - of hand drumming. Details of ourimplementation are discussed, as well as qualitative - resultsand experience gleaned from live performances.' - address: 'New York City, NY, United States' - author: 'Jones, Randy and Schloss, Andrew' - bibtex: "@inproceedings{Jones2007,\n abstract = {Physical modeling has proven to\ - \ be a successful method ofsynthesizing highly expressive sounds. However, providingdeep\ - \ methods of real time musical control remains a majorchallenge. In this paper\ - \ we describe our work towards aninstrument for percussion synthesis, in which\ - \ a waveguidemesh is both excited and damped by a 2D matrix of forcesfrom a sensor.\ - \ By emulating a drum skin both as controllerand sound generator, our instrument\ - \ has reproduced someof the expressive qualities of hand drumming. Details of\ - \ ourimplementation are discussed, as well as qualitative resultsand experience\ - \ gleaned from live performances.},\n address = {New York City, NY, United States},\n\ - \ author = {Jones, Randy and Schloss, Andrew},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1177131},\n issn = {2220-4806},\n keywords = {Physical modeling,\ - \ instrument design, expressive control, multi-touch, performance },\n pages =\ - \ {27--30},\n title = {Controlling a Physical Model with a {2D} Force Matrix},\n\ - \ url = {http://www.nime.org/proceedings/2007/nime2007_027.pdf},\n year = {2007}\n\ - }\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1177131 - issn: 2220-4806 - keywords: 'Physical modeling, instrument design, expressive control, multi-touch, - performance ' - pages: 27--30 - title: Controlling a Physical Model with a 2D Force Matrix - url: http://www.nime.org/proceedings/2007/nime2007_027.pdf - year: 2007 - - -- ENTRYTYPE: inproceedings - ID: B2007 - abstract: 'In this paper we describe the design and implementation of the PHYSMISM: - an interface for exploring the possibilities for improving the creative use of - physical modelling sound synthesis. The PHYSMISM is implemented in a software - and hardware version. Moreover, four different physical modelling techniques are - implemented, to explore the implications of using and combining different techniques. - In order to evaluate the creative use of physical models, a test was performed - using 11 experienced musicians as test subjects. Results show that the capability - of combining the physical models and the use of a physical interface engaged the - musicians in creative exploration of physical models.' - address: 'New York City, NY, United States' - author: 'Bottcher, Niels and Gelineck, Steven and Serafin, Stefania' - bibtex: "@inproceedings{B2007,\n abstract = {In this paper we describe the design\ - \ and implementation of the PHYSMISM: an interface for exploring the possibilities\ - \ for improving the creative use of physical modelling sound synthesis. The PHYSMISM\ - \ is implemented in a software and hardware version. Moreover, four different\ - \ physical modelling techniques are implemented, to explore the implications of\ - \ using and combining different techniques. In order to evaluate the creative\ - \ use of physical models, a test was performed using 11 experienced musicians\ - \ as test subjects. Results show that the capability of combining the physical\ - \ models and the use of a physical interface engaged the musicians in creative\ - \ exploration of physical models.},\n address = {New York City, NY, United States},\n\ - \ author = {Bottcher, Niels and Gelineck, Steven and Serafin, Stefania},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1177051},\n issn = {2220-4806},\n keywords\ - \ = {Physical models, hybrid instruments, excitation, resonator. },\n pages =\ - \ {31--36},\n title = {{PHY}SMISM : A Control Interface for Creative Exploration\ - \ of Physical Models},\n url = {http://www.nime.org/proceedings/2007/nime2007_031.pdf},\n\ - \ year = {2007}\n}\n" + ID: Overholt2011 + abstract: 'The Overtone Fiddle is a new violin-family instrument that incorporates + electronic sensors, integrated DSP, and physical actuation of the acoustic body. + An embedded tactile sound transducer creates extra vibrations in the body of the + Overtone Fiddle, allowing performer control and sensation via both traditional + violin techniques, as well as extended playing techniques that incorporate shared + man/machine control of the resulting sound. A magnetic pickup system is mounted + to the end of the fiddle''s fingerboard in order to detect the signals from the + vibrating strings, deliberately not capturing vibrations from the full body of + the instrument. This focused sensing approach allows less restrained use of DSP-generated + feedback signals, as there is very little direct leakage from the actuator embedded + in the body of the instrument back to the pickup. ' + address: 'Oslo, Norway' + author: 'Overholt, Dan' + bibtex: "@inproceedings{Overholt2011,\n abstract = {The Overtone Fiddle is a new\ + \ violin-family instrument that incorporates electronic sensors, integrated DSP,\ + \ and physical actuation of the acoustic body. An embedded tactile sound transducer\ + \ creates extra vibrations in the body of the Overtone Fiddle, allowing performer\ + \ control and sensation via both traditional violin techniques, as well as extended\ + \ playing techniques that incorporate shared man/machine control of the resulting\ + \ sound. A magnetic pickup system is mounted to the end of the fiddle's fingerboard\ + \ in order to detect the signals from the vibrating strings, deliberately not\ + \ capturing vibrations from the full body of the instrument. This focused sensing\ + \ approach allows less restrained use of DSP-generated feedback signals, as there\ + \ is very little direct leakage from the actuator embedded in the body of the\ + \ instrument back to the pickup. },\n address = {Oslo, Norway},\n author = {Overholt,\ + \ Dan},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1178127},\n issn = {2220-4806},\n\ + \ keywords = {Actuated Musical Instruments, Hybrid Instruments, Active Acoustics,\ + \ Electronic Violin },\n pages = {30--33},\n presentation-video = {https://vimeo.com/26795157/},\n\ + \ title = {The Overtone Fiddle: an Actuated Acoustic Instrument},\n url = {http://www.nime.org/proceedings/2011/nime2011_004.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177051 + doi: 10.5281/zenodo.1178127 issn: 2220-4806 - keywords: 'Physical models, hybrid instruments, excitation, resonator. ' - pages: 31--36 - title: 'PHYSMISM : A Control Interface for Creative Exploration of Physical Models' - url: http://www.nime.org/proceedings/2007/nime2007_031.pdf - year: 2007 + keywords: 'Actuated Musical Instruments, Hybrid Instruments, Active Acoustics, Electronic + Violin ' + pages: 30--33 + presentation-video: https://vimeo.com/26795157/ + title: 'The Overtone Fiddle: an Actuated Acoustic Instrument' + url: http://www.nime.org/proceedings/2011/nime2011_004.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Chuchacz2007 - abstract: 'A novel electronic percussion synthesizer prototype is presented. Our - ambition is to design an instrument that will produce a high quality, realistic - sound based on a physical modelling sound synthesis algorithm. This is achieved - using a real-time Field Programmable Gate Array (FPGA) implementation of the model - coupled to an interface that aims to make efficient use of all the subtle nuanced - gestures of the instrumentalist. It is based on a complex physical model of the - vibrating plate --- the source of sound in the majority of percussion instruments. - A Xilinx Virtex II pro FPGA core handles the sound synthesis computations with - an 8 billion operations per second performance and has been designed in such a - way to allow a high level of control and flexibility. Strategies are also presented - to that allow the parametric space of the model to be mapped to the playing gestures - of the percussionist.' - address: 'New York City, NY, United States' - author: 'Chuchacz, Katarzyna and O''Modhrain, Sile and Woods, Roger' - bibtex: "@inproceedings{Chuchacz2007,\n abstract = {A novel electronic percussion\ - \ synthesizer prototype is presented. Our ambition is to design an instrument\ - \ that will produce a high quality, realistic sound based on a physical modelling\ - \ sound synthesis algorithm. This is achieved using a real-time Field Programmable\ - \ Gate Array (FPGA) implementation of the model coupled to an interface that aims\ - \ to make efficient use of all the subtle nuanced gestures of the instrumentalist.\ - \ It is based on a complex physical model of the vibrating plate --- the source\ - \ of sound in the majority of percussion instruments. A Xilinx Virtex II pro FPGA\ - \ core handles the sound synthesis computations with an 8 billion operations per\ - \ second performance and has been designed in such a way to allow a high level\ - \ of control and flexibility. Strategies are also presented to that allow the\ - \ parametric space of the model to be mapped to the playing gestures of the percussionist.},\n\ - \ address = {New York City, NY, United States},\n author = {Chuchacz, Katarzyna\ - \ and O'Modhrain, Sile and Woods, Roger},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177071},\n\ - \ issn = {2220-4806},\n keywords = {Physical Model, Electronic Percussion Instrument,\ - \ FPGA. },\n pages = {37--40},\n title = {Physical Models and Musical Controllers\ - \ -- Designing a Novel Electronic Percussion Instrument},\n url = {http://www.nime.org/proceedings/2007/nime2007_037.pdf},\n\ - \ year = {2007}\n}\n" + ID: Montag2011 + abstract: 'During the past decade, multi-touch surfaces have emerged as valuable + tools for collaboration, display, interaction, and musical expression. Unfortunately, + they tend to be costly and often suffer from two drawbacks for music performance:(1) + relatively high latency owing to their sensing mechanism, and (2) lack of haptic + feedback. We analyze the latency present in several current multi-touch platforms, + and we describe a new custom system that reduces latency to an average of 30 ms + while providing programmable haptic feed-back to the user. The paper concludes + with a description of ongoing and future work.' + address: 'Oslo, Norway' + author: 'Montag, Matthew and Sullivan, Stefan and Dickey, Scott and Leider, Colby' + bibtex: "@inproceedings{Montag2011,\n abstract = {During the past decade, multi-touch\ + \ surfaces have emerged as valuable tools for collaboration, display, interaction,\ + \ and musical expression. Unfortunately, they tend to be costly and often suffer\ + \ from two drawbacks for music performance:(1) relatively high latency owing to\ + \ their sensing mechanism, and (2) lack of haptic feedback. We analyze the latency\ + \ present in several current multi-touch platforms, and we describe a new custom\ + \ system that reduces latency to an average of 30 ms while providing programmable\ + \ haptic feed-back to the user. The paper concludes with a description of ongoing\ + \ and future work.},\n address = {Oslo, Norway},\n author = {Montag, Matthew and\ + \ Sullivan, Stefan and Dickey, Scott and Leider, Colby},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178115},\n issn = {2220-4806},\n keywords = {multi-touch,\ + \ haptics, frustrated total internal reflection, music performance, music composition,\ + \ latency, DIY },\n pages = {8--13},\n presentation-video = {https://vimeo.com/26799018/},\n\ + \ title = {A Low-Cost, Low-Latency Multi-Touch Table with Haptic Feedback for\ + \ Musical Applications},\n url = {http://www.nime.org/proceedings/2011/nime2011_008.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177071 + doi: 10.5281/zenodo.1178115 issn: 2220-4806 - keywords: 'Physical Model, Electronic Percussion Instrument, FPGA. ' - pages: 37--40 - title: Physical Models and Musical Controllers -- Designing a Novel Electronic Percussion - Instrument - url: http://www.nime.org/proceedings/2007/nime2007_037.pdf - year: 2007 + keywords: 'multi-touch, haptics, frustrated total internal reflection, music performance, + music composition, latency, DIY ' + pages: 8--13 + presentation-video: https://vimeo.com/26799018/ + title: 'A Low-Cost, Low-Latency Multi-Touch Table with Haptic Feedback for Musical + Applications' + url: http://www.nime.org/proceedings/2011/nime2011_008.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Wessel2007 - abstract: 'We describe the design, implementation, and evaluation with musical applications - of force sensitive multi-touch arrays of touchpads. Each of the touchpads supports - a three dimensional representation of musical material: two spatial dimensions - plus a force measurement we typically use to control dynamics. We have developed - two pad systems, one with 24 pads and a second with 2 arrays of 16 pads each. - We emphasize the treatment of gestures as sub-sampled audio signals. This tight - coupling of gesture with audio provides for a high degree of control intimacy. - Our experiments with the pad arrays demonstrate that we can efficiently deal with - large numbers of audio encoded gesture channels – 72 for the 24 pad array and - 96 for the two 16 pad arrays.' - address: 'New York City, NY, United States' - author: 'Wessel, David and Avizienis, Rimas and Freed, Adrian and Wright, Matthew' - bibtex: "@inproceedings{Wessel2007,\n abstract = {We describe the design, implementation,\ - \ and evaluation with musical applications of force sensitive multi-touch arrays\ - \ of touchpads. Each of the touchpads supports a three dimensional representation\ - \ of musical material: two spatial dimensions plus a force measurement we typically\ - \ use to control dynamics. We have developed two pad systems, one with 24 pads\ - \ and a second with 2 arrays of 16 pads each. We emphasize the treatment of gestures\ - \ as sub-sampled audio signals. This tight coupling of gesture with audio provides\ - \ for a high degree of control intimacy. Our experiments with the pad arrays demonstrate\ - \ that we can efficiently deal with large numbers of audio encoded gesture channels\ - \ – 72 for the 24 pad array and 96 for the two 16 pad arrays.},\n address = {New\ - \ York City, NY, United States},\n author = {Wessel, David and Avizienis, Rimas\ - \ and Freed, Adrian and Wright, Matthew},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179479},\n\ - \ issn = {2220-4806},\n keywords = {Pressure and force sensing, High-resolution\ - \ gestural signals, Touchpad, VersaPad.},\n pages = {41--45},\n title = {A Force\ - \ Sensitive Multi-Touch Array Supporting Multiple {2-D} Musical Control Structures},\n\ - \ url = {http://www.nime.org/proceedings/2007/nime2007_041.pdf},\n year = {2007}\n\ - }\n" + ID: Shear2011 + abstract: 'The Electromagnetically Sustained Rhodes Piano is an augmentation of + the original instrument with additional control over the amplitude envelope of + individual notes. Thisincludes slow attacks and infinite sustain while preservingthe + familiar spectral qualities of this classic electromechanical piano. These additional + parameters are controlled withaftertouch on the existing keyboard, extending standardpiano + technique. Two sustain methods were investigated,driving the actuator first with + a pure sine wave, and secondwith the output signal of the sensor. A special isolationmethod + effectively decouples the sensors from the actuatorsand tames unruly feedback + in the high-gain signal path.' + address: 'Oslo, Norway' + author: 'Shear, Greg and Wright, Matthew' + bibtex: "@inproceedings{Shear2011,\n abstract = {The Electromagnetically Sustained\ + \ Rhodes Piano is an augmentation of the original instrument with additional control\ + \ over the amplitude envelope of individual notes. Thisincludes slow attacks and\ + \ infinite sustain while preservingthe familiar spectral qualities of this classic\ + \ electromechanical piano. These additional parameters are controlled withaftertouch\ + \ on the existing keyboard, extending standardpiano technique. Two sustain methods\ + \ were investigated,driving the actuator first with a pure sine wave, and secondwith\ + \ the output signal of the sensor. A special isolationmethod effectively decouples\ + \ the sensors from the actuatorsand tames unruly feedback in the high-gain signal\ + \ path.},\n address = {Oslo, Norway},\n author = {Shear, Greg and Wright, Matthew},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178161},\n issn = {2220-4806},\n\ + \ keywords = {Rhodes, keyboard, electromagnetic, sustain, augmented instrument,\ + \ feedback, aftertouch },\n pages = {14--17},\n presentation-video = {https://vimeo.com/26802504/},\n\ + \ title = {The Electromagnetically Sustained Rhodes Piano},\n url = {http://www.nime.org/proceedings/2011/nime2011_014.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179479 + doi: 10.5281/zenodo.1178161 issn: 2220-4806 - keywords: 'Pressure and force sensing, High-resolution gestural signals, Touchpad, - VersaPad.' - pages: 41--45 - title: A Force Sensitive Multi-Touch Array Supporting Multiple 2-D Musical Control - Structures - url: http://www.nime.org/proceedings/2007/nime2007_041.pdf - year: 2007 + keywords: 'Rhodes, keyboard, electromagnetic, sustain, augmented instrument, feedback, + aftertouch ' + pages: 14--17 + presentation-video: https://vimeo.com/26802504/ + title: The Electromagnetically Sustained Rhodes Piano + url: http://www.nime.org/proceedings/2011/nime2011_014.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Chang2007 - abstract: 'FigureWe present Zstretch, a textile music controller that supports expressive - haptic interactions. The musical controller takes advantage of the fabric''s topological - constraints to enable proportional control of musical parameters. This novel interface - explores ways in which one might treat music as a sheet of cloth. This paper proposes - an approach to engage simple technologies for supporting ordinary hand interactions. - We show that this combination of basic technology with general tactile movements - can result in an expressive musical interface. a' - address: 'New York City, NY, United States' - author: 'Chang, Angela and Ishii, Hiroshi' - bibtex: "@inproceedings{Chang2007,\n abstract = {FigureWe present Zstretch, a textile\ - \ music controller that supports expressive haptic interactions. The musical controller\ - \ takes advantage of the fabric's topological constraints to enable proportional\ - \ control of musical parameters. This novel interface explores ways in which one\ - \ might treat music as a sheet of cloth. This paper proposes an approach to engage\ - \ simple technologies for supporting ordinary hand interactions. We show that\ - \ this combination of basic technology with general tactile movements can result\ - \ in an expressive musical interface. a},\n address = {New York City, NY, United\ - \ States},\n author = {Chang, Angela and Ishii, Hiroshi},\n booktitle = {Proceedings\ + ID: Pardue2011 + abstract: 'This paper describes the motivation and construction of Gamelan Elektrika, + a new electronic gamelan modeled after a Balinese Gong Kebyar. The first of its + kind, Elektrika consists of seven instruments acting as MIDI controllers accompanied + by traditional percussion and played by 11 or more performers following Balinese + performance practice. Three main percussive instrument designs were executed using + a combination of force sensitive resistors, piezos, and capacitive sensing. While + the instrument interfaces are designedto play interchangeably with the original, + the sound andt ravel possiblilities they enable are tremendous. MIDI enables a + massive new sound palette with new scales beyond the quirky traditional tuning + and non-traditional sounds. It also allows simplified transcription for an aurally + taught tradition. Significantly, it reduces the transportation challenges of a + previously large and heavy ensemble, creating opportunities for wider audiences + to experience Gong Kebyar''s enchanting sound. True to the spirit of oneness in + Balinese music, as one of the first large all-MIDI ensembles, ElekTrika challenges + performers to trust silent instruments and develop an understanding of highly + intricate and interlocking music not through the sound of the individual, but + through the sound of the whole.' + address: 'Oslo, Norway' + author: 'Pardue, Laurel S. and Boch, Andrew and Boch, Matt and Southworth, Christine + and Rigopulos, Alex' + bibtex: "@inproceedings{Pardue2011,\n abstract = {This paper describes the motivation\ + \ and construction of Gamelan Elektrika, a new electronic gamelan modeled after\ + \ a Balinese Gong Kebyar. The first of its kind, Elektrika consists of seven instruments\ + \ acting as MIDI controllers accompanied by traditional percussion and played\ + \ by 11 or more performers following Balinese performance practice. Three main\ + \ percussive instrument designs were executed using a combination of force sensitive\ + \ resistors, piezos, and capacitive sensing. While the instrument interfaces are\ + \ designedto play interchangeably with the original, the sound andt ravel possiblilities\ + \ they enable are tremendous. MIDI enables a massive new sound palette with new\ + \ scales beyond the quirky traditional tuning and non-traditional sounds. It also\ + \ allows simplified transcription for an aurally taught tradition. Significantly,\ + \ it reduces the transportation challenges of a previously large and heavy ensemble,\ + \ creating opportunities for wider audiences to experience Gong Kebyar's enchanting\ + \ sound. True to the spirit of oneness in Balinese music, as one of the first\ + \ large all-MIDI ensembles, ElekTrika challenges performers to trust silent instruments\ + \ and develop an understanding of highly intricate and interlocking music not\ + \ through the sound of the individual, but through the sound of the whole.},\n\ + \ address = {Oslo, Norway},\n author = {Pardue, Laurel S. and Boch, Andrew and\ + \ Boch, Matt and Southworth, Christine and Rigopulos, Alex},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177067},\n issn = {2220-4806},\n keywords = {Tangible\ - \ interfaces, textiles, tactile design, musical expressivity },\n pages = {46--49},\n\ - \ title = {Zstretch : A Stretchy Fabric Music Controller},\n url = {http://www.nime.org/proceedings/2007/nime2007_046.pdf},\n\ - \ year = {2007}\n}\n" + \ doi = {10.5281/zenodo.1178131},\n issn = {2220-4806},\n keywords = {bali, gamelan,\ + \ musical instrument design, MIDI ensemble },\n pages = {18--23},\n presentation-video\ + \ = {https://vimeo.com/26803278/},\n title = {Gamelan Elektrika: An Electronic\ + \ Balinese Gamelan},\n url = {http://www.nime.org/proceedings/2011/nime2011_018.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177067 + doi: 10.5281/zenodo.1178131 issn: 2220-4806 - keywords: 'Tangible interfaces, textiles, tactile design, musical expressivity ' - pages: 46--49 - title: 'Zstretch : A Stretchy Fabric Music Controller' - url: http://www.nime.org/proceedings/2007/nime2007_046.pdf - year: 2007 + keywords: 'bali, gamelan, musical instrument design, MIDI ensemble ' + pages: 18--23 + presentation-video: https://vimeo.com/26803278/ + title: 'Gamelan Elektrika: An Electronic Balinese Gamelan' + url: http://www.nime.org/proceedings/2011/nime2011_018.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Kim2007 - abstract: 'In this paper, we describe the musical development of a new system for - performing electronic music where a video-based eye movement recording system, - known as Oculog, is used to control sound. Its development is discussed against - a background that includes a brief history of biologically based interfaces for - performing music, together with a survey of various recording systems currently - in use for monitoring eye movement in clinical applications. Oculog is discussed - with specific reference to its implementation as a performance interface for electronic - music. A new work features algorithms driven by eye movement response and allows - the user to interact with audio synthesis and introduces new possibilities for - microtonal performance. Discussion reflects an earlier technological paradigm - and concludes by reviewing possibilities for future development.' - address: 'New York City, NY, United States' - author: 'Kim, Juno and Schiemer, Greg and Narushima, Terumi' - bibtex: "@inproceedings{Kim2007,\n abstract = {In this paper, we describe the musical\ - \ development of a new system for performing electronic music where a video-based\ - \ eye movement recording system, known as Oculog, is used to control sound. Its\ - \ development is discussed against a background that includes a brief history\ - \ of biologically based interfaces for performing music, together with a survey\ - \ of various recording systems currently in use for monitoring eye movement in\ - \ clinical applications. Oculog is discussed with specific reference to its implementation\ - \ as a performance interface for electronic music. A new work features algorithms\ - \ driven by eye movement response and allows the user to interact with audio synthesis\ - \ and introduces new possibilities for microtonal performance. Discussion reflects\ - \ an earlier technological paradigm and concludes by reviewing possibilities for\ - \ future development.},\n address = {New York City, NY, United States},\n author\ - \ = {Kim, Juno and Schiemer, Greg and Narushima, Terumi},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177145},\n issn = {2220-4806},\n keywords = {1,algorithmic\ - \ composition,expressive control interfaces,eye movement recording,microtonal\ - \ tuning,midi,nime07,pure data,video},\n pages = {50--55},\n title = {Oculog :\ - \ Playing with Eye Movements},\n url = {http://www.nime.org/proceedings/2007/nime2007_050.pdf},\n\ - \ year = {2007}\n}\n" + ID: Lee2011 + abstract: 'This paper introduces Sonicstrument, a sound-based interface that traces + the user''s hand motions. Sonicstrument utilizes stereotypical acoustic transducers + (i.e., a pair of earphones and a microphone) for transmission and reception of + acoustic signals whose frequencies are within the highest area of human hearing + range that can rarely be perceived by most people. Being simpler in structure + and easier to implement than typical ultrasonic motion detectors with special + transducers, this system is robust and offers precise results without introducing + any undesired sonic disturbance to users. We describe the design and implementation + of Sonicstrument, evaluate its performance, and present two practical applications + of the system in music and interactive performance.' + address: 'Oslo, Norway' + author: 'Lee, Jeong-seob and Yeo, Woon Seung' + bibtex: "@inproceedings{Lee2011,\n abstract = {This paper introduces Sonicstrument,\ + \ a sound-based interface that traces the user's hand motions. Sonicstrument utilizes\ + \ stereotypical acoustic transducers (i.e., a pair of earphones and a microphone)\ + \ for transmission and reception of acoustic signals whose frequencies are within\ + \ the highest area of human hearing range that can rarely be perceived by most\ + \ people. Being simpler in structure and easier to implement than typical ultrasonic\ + \ motion detectors with special transducers, this system is robust and offers\ + \ precise results without introducing any undesired sonic disturbance to users.\ + \ We describe the design and implementation of Sonicstrument, evaluate its performance,\ + \ and present two practical applications of the system in music and interactive\ + \ performance.},\n address = {Oslo, Norway},\n author = {Lee, Jeong-seob and Yeo,\ + \ Woon Seung},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1180259},\n issn\ + \ = {2220-4806},\n keywords = {Stereotypical transducers, audible sound, Doppler\ + \ effect, handfree interface, musical instrument, interactive performance },\n\ + \ pages = {24--27},\n presentation-video = {https://vimeo.com/26804455/},\n title\ + \ = {Sonicstrument : A Musical Interface with Stereotypical Acoustic Transducers},\n\ + \ url = {http://www.nime.org/proceedings/2011/nime2011_024.pdf},\n year = {2011}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177145 + doi: 10.5281/zenodo.1180259 issn: 2220-4806 - keywords: '1,algorithmic composition,expressive control interfaces,eye movement - recording,microtonal tuning,midi,nime07,pure data,video' - pages: 50--55 - title: 'Oculog : Playing with Eye Movements' - url: http://www.nime.org/proceedings/2007/nime2007_050.pdf - year: 2007 + keywords: 'Stereotypical transducers, audible sound, Doppler effect, handfree interface, + musical instrument, interactive performance ' + pages: 24--27 + presentation-video: https://vimeo.com/26804455/ + title: 'Sonicstrument : A Musical Interface with Stereotypical Acoustic Transducers' + url: http://www.nime.org/proceedings/2011/nime2011_024.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Camurri2007 - abstract: 'In this paper, we present a new system, the Orchestra Explorer, enabling - a novel paradigm for active fruition of sound and music content. The Orchestra - Explorer allows users to physically navigate inside a virtual orchestra, to actively - explore the music piece the orchestra is playing, to modify and mold the sound - and music content in real-time through their expressive full-body movement and - gesture. An implementation of the Orchestra Explorer was developed and presented - in the framework of the science exhibition Cimenti di Invenzione e Armonia, held - at Casa Paganini, Genova, from October 2006 to January 2007. ' - address: 'New York City, NY, United States' - author: 'Camurri, Antonio and Canepa, Corrado and Volpe, Gualtiero' - bibtex: "@inproceedings{Camurri2007,\n abstract = {In this paper, we present a new\ - \ system, the Orchestra Explorer, enabling a novel paradigm for active fruition\ - \ of sound and music content. The Orchestra Explorer allows users to physically\ - \ navigate inside a virtual orchestra, to actively explore the music piece the\ - \ orchestra is playing, to modify and mold the sound and music content in real-time\ - \ through their expressive full-body movement and gesture. An implementation of\ - \ the Orchestra Explorer was developed and presented in the framework of the science\ - \ exhibition {Cimenti di Invenzione e Armonia}, held at Casa Paganini, Genova,\ - \ from October 2006 to January 2007. },\n address = {New York City, NY, United\ - \ States},\n author = {Camurri, Antonio and Canepa, Corrado and Volpe, Gualtiero},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177059},\n issn = {2220-4806},\n\ - \ keywords = {Active listening of music, expressive interfaces, full-body motion\ - \ analysis and expressive gesture processing, multimodal interactive systems for\ - \ music and performing arts applications. },\n pages = {56--61},\n title = {Active\ - \ Listening to a Virtual Orchestra Through an Expressive Gestural Interface :\ - \ The Orchestra Explorer},\n url = {http://www.nime.org/proceedings/2007/nime2007_056.pdf},\n\ - \ year = {2007}\n}\n" + ID: Smallwood2011 + abstract: "This paper describes recent developments in the creation of sound-making\ + \ instruments and devices powered by photovoltaic (PV) technologies. With the\ + \ rise of more efficient PV products in diverse packages, the possibilities for\ + \ creating solar-powered musical instruments, sound installations, and loudspeakers\ + \ are becoming increasingly realizable. This paper surveys past and recent developments\ + \ in this area, including several projects by the ,\n,\nauthor, and demonstrates\ + \ how the use of PV technologies can influence the creative process in unique\ + \ ways. In addition, this paper discusses how solar sound arts can enhance the\ + \ aesthetic direction taken by recent work in soundscape studies and acoustic\ + \ ecology. Finally, this paper will point towards future directions and possibilities\ + \ as PV technologies continue to evolve and improve in terms of performance, and\ + \ become more affordable. " + address: 'Oslo, Norway' + author: 'Smallwood, Scott' + bibtex: "@inproceedings{Smallwood2011,\n abstract = {This paper describes recent\ + \ developments in the creation of sound-making instruments and devices powered\ + \ by photovoltaic (PV) technologies. With the rise of more efficient PV products\ + \ in diverse packages, the possibilities for creating solar-powered musical instruments,\ + \ sound installations, and loudspeakers are becoming increasingly realizable.\ + \ This paper surveys past and recent developments in this area, including several\ + \ projects by the ,\n,\nauthor, and demonstrates how the use of PV technologies\ + \ can influence the creative process in unique ways. In addition, this paper discusses\ + \ how solar sound arts can enhance the aesthetic direction taken by recent work\ + \ in soundscape studies and acoustic ecology. Finally, this paper will point towards\ + \ future directions and possibilities as PV technologies continue to evolve and\ + \ improve in terms of performance, and become more affordable. },\n address =\ + \ {Oslo, Norway},\n author = {Smallwood, Scott},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1178167},\n issn = {2220-4806},\n keywords = {Solar Sound\ + \ Arts, Circuit Bending, Hardware Hacking, Human-Computer Interface Design, Acoustic\ + \ Ecology, Sound Art, Electroacoustics, Laptop Orchestra, PV Technology },\n pages\ + \ = {28--31},\n title = {Solar Sound Arts: Creating Instruments and Devices Powered\ + \ by Photovoltaic Technologies},\n url = {http://www.nime.org/proceedings/2011/nime2011_028.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177059 + doi: 10.5281/zenodo.1178167 issn: 2220-4806 - keywords: 'Active listening of music, expressive interfaces, full-body motion analysis - and expressive gesture processing, multimodal interactive systems for music and - performing arts applications. ' - pages: 56--61 - title: 'Active Listening to a Virtual Orchestra Through an Expressive Gestural Interface - : The Orchestra Explorer' - url: http://www.nime.org/proceedings/2007/nime2007_056.pdf - year: 2007 + keywords: 'Solar Sound Arts, Circuit Bending, Hardware Hacking, Human-Computer Interface + Design, Acoustic Ecology, Sound Art, Electroacoustics, Laptop Orchestra, PV Technology ' + pages: 28--31 + title: 'Solar Sound Arts: Creating Instruments and Devices Powered by Photovoltaic + Technologies' + url: http://www.nime.org/proceedings/2011/nime2011_028.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Bell2007 - abstract: 'We present the Multimodal Music Stand (MMMS) for the untethered sensing - of performance gestures and the interactive control of music. Using e-field sensing, - audio analysis, and computer vision, the MMMS captures a performer''s continuous - expressive gestures and robustly identifies discrete cues in a musical performance. - Continuous and discrete gestures are sent to an interactive music system featuring - custom designed software that performs real-time spectral transformation of audio. ' - address: 'New York City, NY, United States' - author: 'Bell, Bo and Kleban, Jim and Overholt, Dan and Putnam, Lance and Thompson, - John and Morin-Kuchera, JoAnn' - bibtex: "@inproceedings{Bell2007,\n abstract = {We present the Multimodal Music\ - \ Stand (MMMS) for the untethered sensing of performance gestures and the interactive\ - \ control of music. Using e-field sensing, audio analysis, and computer vision,\ - \ the MMMS captures a performer's continuous expressive gestures and robustly\ - \ identifies discrete cues in a musical performance. Continuous and discrete gestures\ - \ are sent to an interactive music system featuring custom designed software that\ - \ performs real-time spectral transformation of audio. },\n address = {New York\ - \ City, NY, United States},\n author = {Bell, Bo and Kleban, Jim and Overholt,\ - \ Dan and Putnam, Lance and Thompson, John and Morin-Kuchera, JoAnn},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1177039},\n issn = {2220-4806},\n keywords\ - \ = {Multimodal, interactivity, computer vision, e-field sensing, untethered control.\ - \ },\n pages = {62--65},\n title = {The Multimodal Music Stand},\n url = {http://www.nime.org/proceedings/2007/nime2007_062.pdf},\n\ - \ year = {2007}\n}\n" + ID: Klugel2011 + abstract: 'This paper provides a discussion of how the electronic, solely ITbased + composition and performance of electronic music can besupported in realtime with + a collaborative application on a tabletopinterface, mediating between single-user + style music compositiontools and co-located collaborative music improvisation. + After having elaborated on the theoretical backgrounds of prerequisites ofco-located + collaborative tabletop applications as well as the common paradigms in music composition/notation, + we will review related work on novel IT approaches to music composition and improvisation. + Subsequently, we will present our prototypical implementation and the results.' + address: 'Oslo, Norway' + author: 'Klügel, Niklas and Frieß, Marc R. and Groh, Georg and Echtler, Florian' + bibtex: "@inproceedings{Klugel2011,\n abstract = {This paper provides a discussion\ + \ of how the electronic, solely ITbased composition and performance of electronic\ + \ music can besupported in realtime with a collaborative application on a tabletopinterface,\ + \ mediating between single-user style music compositiontools and co-located collaborative\ + \ music improvisation. After having elaborated on the theoretical backgrounds\ + \ of prerequisites ofco-located collaborative tabletop applications as well as\ + \ the common paradigms in music composition/notation, we will review related work\ + \ on novel IT approaches to music composition and improvisation. Subsequently,\ + \ we will present our prototypical implementation and the results.},\n address\ + \ = {Oslo, Norway},\n author = {Kl\\''{u}gel, Niklas and Frie\\ss, Marc R. and\ + \ Groh, Georg and Echtler, Florian},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178071},\n\ + \ issn = {2220-4806},\n keywords = {Tabletop Interface, Collaborative Music Composition,\ + \ Creativity Support },\n pages = {32--35},\n title = {An Approach to Collaborative\ + \ Music Composition},\n url = {http://www.nime.org/proceedings/2011/nime2011_032.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177039 + doi: 10.5281/zenodo.1178071 issn: 2220-4806 - keywords: 'Multimodal, interactivity, computer vision, e-field sensing, untethered - control. ' - pages: 62--65 - title: The Multimodal Music Stand - url: http://www.nime.org/proceedings/2007/nime2007_062.pdf - year: 2007 + keywords: 'Tabletop Interface, Collaborative Music Composition, Creativity Support ' + pages: 32--35 + title: An Approach to Collaborative Music Composition + url: http://www.nime.org/proceedings/2011/nime2011_032.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Malloch2007 - abstract: 'This paper describes the T-Stick, a new family of digitalmusical instruments. - It presents the motivation behind theproject, hardware and software design, and - presents insightsgained through collaboration with performers who have collectively - practised and performed with the T-Stick for hundreds of hours, and with composers - who have written piecesfor the instrument in the context of McGill University''s - Digital Orchestra project. Each of the T-Sticks is based on thesame general structure - and sensing platform, but each alsodiffers from its siblings in size, weight, - timbre and range.' - address: 'New York City, NY, United States' - author: 'Malloch, Joseph and Wanderley, Marcelo M.' - bibtex: "@inproceedings{Malloch2007,\n abstract = {This paper describes the T-Stick,\ - \ a new family of digitalmusical instruments. It presents the motivation behind\ - \ theproject, hardware and software design, and presents insightsgained through\ - \ collaboration with performers who have collectively practised and performed\ - \ with the T-Stick for hundreds of hours, and with composers who have written\ - \ piecesfor the instrument in the context of McGill University's Digital Orchestra\ - \ project. Each of the T-Sticks is based on thesame general structure and sensing\ - \ platform, but each alsodiffers from its siblings in size, weight, timbre and\ - \ range.},\n address = {New York City, NY, United States},\n author = {Malloch,\ - \ Joseph and Wanderley, Marcelo M.},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177175},\n\ - \ issn = {2220-4806},\n keywords = {gestural controller, digital musical instrument,\ - \ families of instruments },\n pages = {66--69},\n title = {The T-Stick : From\ - \ Musical Interface to Musical Instrument},\n url = {http://www.nime.org/proceedings/2007/nime2007_066.pdf},\n\ - \ year = {2007}\n}\n" + ID: Gold2011 + abstract: 'Popular music (characterized by improvised instrumental parts, beat and + measure-level organization, and steady tempo) poses challenges for human-computer + music performance (HCMP). Pieces of music are typically rearrangeable on-the-fly + and involve a high degree of variation from ensemble to ensemble, and even between + rehearsal and performance. Computer systems aiming to participate in such ensembles + must therefore cope with a dynamic high-level structure in addition to the more + traditional problems of beat-tracking, score-following, and machine improvisation. + There are many approaches to integrating the components required to implement + dynamic human-computer music performance systems. This paper presents a reference + architecture designed to allow the typical sub-components (e.g. beat-tracking, + tempo prediction, improvisation) to be integrated in a consistent way, allowing + them to be combined and/or compared systematically. In addition, the paper presents + a dynamic score representation particularly suited to the demands of popular music + performance by computer. ' + address: 'Oslo, Norway' + author: 'Gold, Nicolas E. and Dannenberg, Roger B.' + bibtex: "@inproceedings{Gold2011,\n abstract = {Popular music (characterized by\ + \ improvised instrumental parts, beat and measure-level organization, and steady\ + \ tempo) poses challenges for human-computer music performance (HCMP). Pieces\ + \ of music are typically rearrangeable on-the-fly and involve a high degree of\ + \ variation from ensemble to ensemble, and even between rehearsal and performance.\ + \ Computer systems aiming to participate in such ensembles must therefore cope\ + \ with a dynamic high-level structure in addition to the more traditional problems\ + \ of beat-tracking, score-following, and machine improvisation. There are many\ + \ approaches to integrating the components required to implement dynamic human-computer\ + \ music performance systems. This paper presents a reference architecture designed\ + \ to allow the typical sub-components (e.g. beat-tracking, tempo prediction, improvisation)\ + \ to be integrated in a consistent way, allowing them to be combined and/or compared\ + \ systematically. In addition, the paper presents a dynamic score representation\ + \ particularly suited to the demands of popular music performance by computer.\ + \ },\n address = {Oslo, Norway},\n author = {Gold, Nicolas E. and Dannenberg,\ + \ Roger B.},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178033},\n issn\ + \ = {2220-4806},\n keywords = {live performance,popular music,software design},\n\ + \ pages = {36--39},\n title = {A Reference Architecture and Score Representation\ + \ for Popular Music Human-Computer Music Performance Systems},\n url = {http://www.nime.org/proceedings/2011/nime2011_036.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177175 + doi: 10.5281/zenodo.1178033 issn: 2220-4806 - keywords: 'gestural controller, digital musical instrument, families of instruments ' - pages: 66--69 - title: 'The T-Stick : From Musical Interface to Musical Instrument' - url: http://www.nime.org/proceedings/2007/nime2007_066.pdf - year: 2007 + keywords: 'live performance,popular music,software design' + pages: 36--39 + title: A Reference Architecture and Score Representation for Popular Music Human-Computer + Music Performance Systems + url: http://www.nime.org/proceedings/2011/nime2011_036.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Paine2007 - abstract: 'This paper presents the Thummer Mapping Project (ThuMP), an industry - partnership project between ThumMotion P/L and The University of Western Sydney - (UWS). ThuMP sought to developing mapping strategies for new interfaces for musical - expression (NIME), specifically the ThummerTM, which provides thirteen simultaneous - degrees of freedom. This research presents a new approach to the mapping problem - resulting from a primary design research phase and a prototype testing and evaluation - phase. In order to establish an underlying design approach for the ThummerTM mapping - strategies, a number of interviews were carried out with high-level acoustic instrumental - performers, the majority of whom play with the Sydney Symphony Orchestra, Sydney, - Australia. Mapping strategies were developed from analysis of these interviews - and then evaluated in trial usability testing.' - address: 'New York City, NY, United States' - author: 'Paine, Garth and Stevenson, Ian and Pearce, Angela' - bibtex: "@inproceedings{Paine2007,\n abstract = {This paper presents the Thummer\ - \ Mapping Project (ThuMP), an industry partnership project between ThumMotion\ - \ P/L and The University of Western Sydney (UWS). ThuMP sought to developing mapping\ - \ strategies for new interfaces for musical expression (NIME), specifically the\ - \ ThummerTM, which provides thirteen simultaneous degrees of freedom. This research\ - \ presents a new approach to the mapping problem resulting from a primary design\ - \ research phase and a prototype testing and evaluation phase. In order to establish\ - \ an underlying design approach for the ThummerTM mapping strategies, a number\ - \ of interviews were carried out with high-level acoustic instrumental performers,\ - \ the majority of whom play with the Sydney Symphony Orchestra, Sydney, Australia.\ - \ Mapping strategies were developed from analysis of these interviews and then\ - \ evaluated in trial usability testing.},\n address = {New York City, NY, United\ - \ States},\n author = {Paine, Garth and Stevenson, Ian and Pearce, Angela},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177217},\n issn = {2220-4806},\n\ - \ keywords = {Musical Instrument Design, Mapping, Musicianship, evaluation, testing.\ - \ },\n pages = {70--77},\n title = {The Thummer Mapping Project (ThuMP)},\n url\ - \ = {http://www.nime.org/proceedings/2007/nime2007_070.pdf},\n year = {2007}\n\ - }\n" + ID: Bokowiec2011 + abstract: 'V''OCT(Ritual) is a work for solo vocalist/performer and Bodycoder System, + composed in residency at Dartington College of Arts (UK) Easter 2010. This paper + looks at the technical and compositional methodologies used in the realization + of the work, in particular, the choices made with regard to the mapping of sensor + elements to various spatialization functions. Kinaesonics will be discussed in + relation to the coding of real-time one-to-one mapping of sound to gesture and + its expression in terms of hardware and software design. Four forms of expressivity + arising out of interactive work with the Bodycoder system will be identified. + How sonic (electro-acoustic), programmed, gestural (kinaesonic) and in terms of + the V''Oct(Ritual) vocal expressivities are constructed as pragmatic and tangible + elements within the compositional practice will be discussed and the subsequent + importance of collaboration with a performer will be exposed. ' + address: 'Oslo, Norway' + author: 'Bokowiec, Mark A.' + bibtex: "@inproceedings{Bokowiec2011,\n abstract = {V'OCT(Ritual) is a work for\ + \ solo vocalist/performer and Bodycoder System, composed in residency at Dartington\ + \ College of Arts (UK) Easter 2010. This paper looks at the technical and compositional\ + \ methodologies used in the realization of the work, in particular, the choices\ + \ made with regard to the mapping of sensor elements to various spatialization\ + \ functions. Kinaesonics will be discussed in relation to the coding of real-time\ + \ one-to-one mapping of sound to gesture and its expression in terms of hardware\ + \ and software design. Four forms of expressivity arising out of interactive work\ + \ with the Bodycoder system will be identified. How sonic (electro-acoustic),\ + \ programmed, gestural (kinaesonic) and in terms of the V'Oct(Ritual) vocal expressivities\ + \ are constructed as pragmatic and tangible elements within the compositional\ + \ practice will be discussed and the subsequent importance of collaboration with\ + \ a performer will be exposed. },\n address = {Oslo, Norway},\n author = {Bokowiec,\ + \ Mark A.},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177967},\n issn\ + \ = {2220-4806},\n keywords = {Bodycoder, Kinaesonics, Expressivity, Gestural\ + \ Control, Interactive Performance Mechanisms, Collaboration. },\n pages = {40--43},\n\ + \ title = {V'OCT (Ritual): An Interactive Vocal Work for Bodycoder System and\ + \ 8~{C}hannel Spatialization},\n url = {http://www.nime.org/proceedings/2011/nime2011_040.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177217 + doi: 10.5281/zenodo.1177967 issn: 2220-4806 - keywords: 'Musical Instrument Design, Mapping, Musicianship, evaluation, testing. ' - pages: 70--77 - title: The Thummer Mapping Project (ThuMP) - url: http://www.nime.org/proceedings/2007/nime2007_070.pdf - year: 2007 + keywords: 'Bodycoder, Kinaesonics, Expressivity, Gestural Control, Interactive Performance + Mechanisms, Collaboration. ' + pages: 40--43 + title: 'V''OCT (Ritual): An Interactive Vocal Work for Bodycoder System and 8~Channel + Spatialization' + url: http://www.nime.org/proceedings/2011/nime2011_040.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: dAlessandro2007 - abstract: 'In this paper, we present a new bi-manual gestural controller, called - HandSketch, composed of purchasable devices : pen tablet and pressure-sensing - surfaces. It aims at achieving real-time manipulation of several continuous and - articulated aspects of pitched sounds synthesis, with a focus on expressive voice. - Both prefered and non-prefered hand issues are discussed. Concrete playing diagrams - and mapping strategies are described. These results are integrated and a compact - controller is proposed.' - address: 'New York City, NY, United States' - author: 'd''Alessandro, Nicolas and Dutoit, Thierry' - bibtex: "@inproceedings{dAlessandro2007,\n abstract = {In this paper, we present\ - \ a new bi-manual gestural controller, called HandSketch, composed of purchasable\ - \ devices : pen tablet and pressure-sensing surfaces. It aims at achieving real-time\ - \ manipulation of several continuous and articulated aspects of pitched sounds\ - \ synthesis, with a focus on expressive voice. Both prefered and non-prefered\ - \ hand issues are discussed. Concrete playing diagrams and mapping strategies\ - \ are described. These results are integrated and a compact controller is proposed.},\n\ - \ address = {New York City, NY, United States},\n author = {d'Alessandro, Nicolas\ - \ and Dutoit, Thierry},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177027},\n\ - \ issn = {2220-4806},\n keywords = {Pen tablet, FSR, bi-manual gestural control.\ - \ },\n pages = {78--81},\n title = {HandSketch Bi-Manual Controller Investigation\ - \ on Expressive Control Issues of an Augmented Tablet},\n url = {http://www.nime.org/proceedings/2007/nime2007_078.pdf},\n\ - \ year = {2007}\n}\n" + ID: Berthaut2011 + abstract: 'First Person Shooters are among the most played computer videogames. + They combine navigation, interaction and collaboration in3D virtual environments + using simple input devices, i.e. mouseand keyboard. In this paper, we study the + possibilities broughtby these games for musical interaction. We present the Couacs, + acollaborative multiprocess instrument which relies on interactiontechniques used + in FPS together with new techniques adding theexpressiveness required for musical + interaction. In particular, theFaders For All game mode allows musicians to perform + patternbased electronic compositions.' + address: 'Oslo, Norway' + author: 'Berthaut, Florent and Katayose, Haruhiro and Wakama, Hironori and Totani, + Naoyuki and Sato, Yuichi' + bibtex: "@inproceedings{Berthaut2011,\n abstract = {First Person Shooters are among\ + \ the most played computer videogames. They combine navigation, interaction and\ + \ collaboration in3D virtual environments using simple input devices, i.e. mouseand\ + \ keyboard. In this paper, we study the possibilities broughtby these games for\ + \ musical interaction. We present the Couacs, acollaborative multiprocess instrument\ + \ which relies on interactiontechniques used in FPS together with new techniques\ + \ adding theexpressiveness required for musical interaction. In particular, theFaders\ + \ For All game mode allows musicians to perform patternbased electronic compositions.},\n\ + \ address = {Oslo, Norway},\n author = {Berthaut, Florent and Katayose, Haruhiro\ + \ and Wakama, Hironori and Totani, Naoyuki and Sato, Yuichi},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177961},\n issn = {2220-4806},\n keywords = {the couacs,\ + \ fps, first person shooters, collaborative, 3D interaction, multiprocess instrument\ + \ },\n pages = {44--47},\n title = {First Person Shooters as Collaborative Multiprocess\ + \ Instruments},\n url = {http://www.nime.org/proceedings/2011/nime2011_044.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177027 + doi: 10.5281/zenodo.1177961 issn: 2220-4806 - keywords: 'Pen tablet, FSR, bi-manual gestural control. ' - pages: 78--81 - title: HandSketch Bi-Manual Controller Investigation on Expressive Control Issues - of an Augmented Tablet - url: http://www.nime.org/proceedings/2007/nime2007_078.pdf - year: 2007 + keywords: 'the couacs, fps, first person shooters, collaborative, 3D interaction, + multiprocess instrument ' + pages: 44--47 + title: First Person Shooters as Collaborative Multiprocess Instruments + url: http://www.nime.org/proceedings/2011/nime2011_044.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Takegawa2007 - abstract: 'Musical performers need to show off their virtuosity for selfexpression - and communicate with other people. Therefore, they are prepared to perform at - any time and anywhere. However, a musical keyboard of 88 keys is too large and - too heavy to carry around. When a portable keyboard that is suitable for carrying - around is played over a wide range, the notes being played frequently cause the - diapason of the keyboard to protrude. It is common to use Key Transpose in conventional - portable keyboards, which shifts the diapason of the keyboard. However, this function - creates several problems such as the feeling of discomfort from the misalignment - between the keying positions and their output sounds. Therefore, the goal of our - study is to construct Mobile Clavier, which enables the diapason to be changed - smoothly. Mobile Clavier resolves the problems with Key Transpose by having black - keys inserted between any two side-by-side white keys. This paper also discusses - how effective Mobile Clavier was in an experiment conducted using professional - pianists. We can play music at any time and anywhere with Mobile Clavier.' - address: 'New York City, NY, United States' - author: 'Takegawa, Yoshinari and Terada, Tsutomu' - bibtex: "@inproceedings{Takegawa2007,\n abstract = {Musical performers need to show\ - \ off their virtuosity for selfexpression and communicate with other people. Therefore,\ - \ they are prepared to perform at any time and anywhere. However, a musical keyboard\ - \ of 88 keys is too large and too heavy to carry around. When a portable keyboard\ - \ that is suitable for carrying around is played over a wide range, the notes\ - \ being played frequently cause the diapason of the keyboard to protrude. It is\ - \ common to use Key Transpose in conventional portable keyboards, which shifts\ - \ the diapason of the keyboard. However, this function creates several problems\ - \ such as the feeling of discomfort from the misalignment between the keying positions\ - \ and their output sounds. Therefore, the goal of our study is to construct Mobile\ - \ Clavier, which enables the diapason to be changed smoothly. Mobile Clavier resolves\ - \ the problems with Key Transpose by having black keys inserted between any two\ - \ side-by-side white keys. This paper also discusses how effective Mobile Clavier\ - \ was in an experiment conducted using professional pianists. We can play music\ - \ at any time and anywhere with Mobile Clavier.},\n address = {New York City,\ - \ NY, United States},\n author = {Takegawa, Yoshinari and Terada, Tsutomu},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177255},\n issn = {2220-4806},\n\ - \ keywords = {Portable keyboard, Additional black keys, Diapason change },\n pages\ - \ = {82--87},\n title = {Mobile Clavier : New Music Keyboard for Flexible Key\ - \ Transpose},\n url = {http://www.nime.org/proceedings/2007/nime2007_082.pdf},\n\ - \ year = {2007}\n}\n" + ID: Hahnel2011 + address: 'Oslo, Norway' + author: 'H\''''{a}hnel, Tilo and Berndt, Axel' + bibtex: "@inproceedings{Hahnel2011,\n address = {Oslo, Norway},\n author = {H\\\ + ''{a}hnel, Tilo and Berndt, Axel},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178037},\n\ + \ issn = {2220-4806},\n keywords = {articula-,duration,dynamics,egales,loudness,notes\ + \ in,synthetic performance,timing,tion},\n pages = {48--51},\n title = {Studying\ + \ Interdependencies in Music Performance : An Interactive Tool},\n url = {http://www.nime.org/proceedings/2011/nime2011_048.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177255 + doi: 10.5281/zenodo.1178037 issn: 2220-4806 - keywords: 'Portable keyboard, Additional black keys, Diapason change ' - pages: 82--87 - title: 'Mobile Clavier : New Music Keyboard for Flexible Key Transpose' - url: http://www.nime.org/proceedings/2007/nime2007_082.pdf - year: 2007 + keywords: 'articula-,duration,dynamics,egales,loudness,notes in,synthetic performance,timing,tion' + pages: 48--51 + title: 'Studying Interdependencies in Music Performance : An Interactive Tool' + url: http://www.nime.org/proceedings/2011/nime2011_048.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Ojanen2007 - abstract: This paper presents a line of historic electronic musical instruments - designed by Erkki Kurenniemi in the 1960's and1970's. Kurenniemi's instruments - were influenced by digitallogic and an experimental attitude towards user interfacedesign. - The paper presents an overview of Kurenniemi'sinstruments and a detailed description - of selected devices.Emphasis is put on user interface issues such as unconventional - interactive real-time control and programming methods. - address: 'New York City, NY, United States' - author: 'Ojanen, Mikko and Suominen, Jari and Kallio, Titti and Lassfolk, Kai' - bibtex: "@inproceedings{Ojanen2007,\n abstract = {This paper presents a line of\ - \ historic electronic musical instruments designed by Erkki Kurenniemi in the\ - \ 1960's and1970's. Kurenniemi's instruments were influenced by digitallogic and\ - \ an experimental attitude towards user interfacedesign. The paper presents an\ - \ overview of Kurenniemi'sinstruments and a detailed description of selected devices.Emphasis\ - \ is put on user interface issues such as unconventional interactive real-time\ - \ control and programming methods.},\n address = {New York City, NY, United States},\n\ - \ author = {Ojanen, Mikko and Suominen, Jari and Kallio, Titti and Lassfolk, Kai},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177211},\n issn = {2220-4806},\n\ - \ keywords = {Erkki Kurenniemi, Dimi, Synthesizer, Digital electronics, User interface\ - \ design },\n pages = {88--93},\n title = {Design Principles and User Interfaces\ - \ of Erkki Kurenniemi's Electronic Musical Instruments of the 1960's and 1970's},\n\ - \ url = {http://www.nime.org/proceedings/2007/nime2007_088.pdf},\n year = {2007}\n\ + ID: Bokesoy2011 + address: 'Oslo, Norway' + author: 'B\''''{o}kesoy, Sinan and Adler, Patrick' + bibtex: "@inproceedings{Bokesoy2011,\n address = {Oslo, Norway},\n author = {B\\\ + ''{o}kesoy, Sinan and Adler, Patrick},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177945},\n\ + \ issn = {2220-4806},\n keywords = {Sound installation, robotic music, interactive\ + \ systems },\n pages = {52--55},\n title = {1city1001vibrations : Development\ + \ of a Interactive Sound Installation with Robotic Instrument Performance},\n\ + \ url = {http://www.nime.org/proceedings/2011/nime2011_052.pdf},\n year = {2011}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177211 + doi: 10.5281/zenodo.1177945 issn: 2220-4806 - keywords: 'Erkki Kurenniemi, Dimi, Synthesizer, Digital electronics, User interface - design ' - pages: 88--93 - title: Design Principles and User Interfaces of Erkki Kurenniemi's Electronic Musical - Instruments of the 1960's and 1970's - url: http://www.nime.org/proceedings/2007/nime2007_088.pdf - year: 2007 + keywords: 'Sound installation, robotic music, interactive systems ' + pages: 52--55 + title: '1city1001vibrations : Development of a Interactive Sound Installation with + Robotic Instrument Performance' + url: http://www.nime.org/proceedings/2011/nime2011_052.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Magnusson2007 - abstract: 'This paper reports on a survey conducted in the autumn of 2006 with the - objective to understand people''s relationship to their musical tools. The survey - focused on the question of embodiment and its different modalities in the fields - of acoustic and digital instruments. The questions of control, instrumental entropy, - limitations and creativity were addressed in relation to people''s activities - of playing, creating or modifying their instruments. The approach used in the - survey was phenomenological, i.e. we were concerned with the experience of playing, - composing for and designing digital or acoustic instruments. At the time of analysis, - we had 209 replies from musicians, composers, engineers, designers, artists and - others interested in this topic. The survey was mainly aimed at instrumentalists - and people who create their own instruments or compositions in flexible audio - programming environments such as SuperCollider, Pure Data, ChucK, Max/MSP, CSound, - etc. ' - address: 'New York City, NY, United States' - author: 'Magnusson, Thor and Mendieta, Enrike H.' - bibtex: "@inproceedings{Magnusson2007,\n abstract = {This paper reports on a survey\ - \ conducted in the autumn of 2006 with the objective to understand people's relationship\ - \ to their musical tools. The survey focused on the question of embodiment and\ - \ its different modalities in the fields of acoustic and digital instruments.\ - \ The questions of control, instrumental entropy, limitations and creativity were\ - \ addressed in relation to people's activities of playing, creating or modifying\ - \ their instruments. The approach used in the survey was phenomenological, i.e.\ - \ we were concerned with the experience of playing, composing for and designing\ - \ digital or acoustic instruments. At the time of analysis, we had 209 replies\ - \ from musicians, composers, engineers, designers, artists and others interested\ - \ in this topic. The survey was mainly aimed at instrumentalists and people who\ - \ create their own instruments or compositions in flexible audio programming environments\ - \ such as SuperCollider, Pure Data, ChucK, Max/MSP, CSound, etc. },\n address\ - \ = {New York City, NY, United States},\n author = {Magnusson, Thor and Mendieta,\ - \ Enrike H.},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177171},\n issn\ - \ = {2220-4806},\n keywords = {Survey, musical instruments, usability, ergonomics,\ - \ embodiment, mapping, affordances, constraints, instrumental entropy, audio programming.\ - \ },\n pages = {94--99},\n title = {The Acoustic, the Digital and the Body : A\ - \ Survey on Musical Instruments},\n url = {http://www.nime.org/proceedings/2007/nime2007_094.pdf},\n\ - \ year = {2007}\n}\n" + ID: MurrayBrowne2011 + abstract: "Many performers of novel musical instruments find it difficult to engage\ + \ audiences beyond those in the field. Previousresearch points to a failure to\ + \ balance complexity with usability, and a loss of transparency due to the detachmentof\ + \ the controller and sound generator. The issue is oftenexacerbated by an audience's\ + \ lack of prior exposure to theinstrument and its workings.However, we argue that\ + \ there is a conflict underlyingmany novel musical instruments in that they are\ + \ intendedto be both a tool for creative expression and a creative workof art\ + \ in themselves, resulting in incompatible requirements.By considering the instrument,\ + \ the composition and theperformance together as a whole with careful considerationof\ + \ the rate of learning demanded of the audience, we propose that a lack of transparency\ + \ can become an asset ratherthan a hindrance. Our approach calls for not only\ + \ controllerand sound generator to be designed in sympathy with eachother, but\ + \ composition, performance and physical form too.Identifying three design principles,\ + \ we illustrate this approach with the Serendiptichord, a wearable instrument\ + \ fordancers created by the ,\n,\nauthors." + address: 'Oslo, Norway' + author: 'Murray-Browne, Tim and Mainstone, Di and Bryan-Kinns, Nick and Plumbley, + Mark D.' + bibtex: "@inproceedings{MurrayBrowne2011,\n abstract = {Many performers of novel\ + \ musical instruments find it difficult to engage audiences beyond those in the\ + \ field. Previousresearch points to a failure to balance complexity with usability,\ + \ and a loss of transparency due to the detachmentof the controller and sound\ + \ generator. The issue is oftenexacerbated by an audience's lack of prior exposure\ + \ to theinstrument and its workings.However, we argue that there is a conflict\ + \ underlyingmany novel musical instruments in that they are intendedto be both\ + \ a tool for creative expression and a creative workof art in themselves, resulting\ + \ in incompatible requirements.By considering the instrument, the composition\ + \ and theperformance together as a whole with careful considerationof the rate\ + \ of learning demanded of the audience, we propose that a lack of transparency\ + \ can become an asset ratherthan a hindrance. Our approach calls for not only\ + \ controllerand sound generator to be designed in sympathy with eachother, but\ + \ composition, performance and physical form too.Identifying three design principles,\ + \ we illustrate this approach with the Serendiptichord, a wearable instrument\ + \ fordancers created by the ,\n,\nauthors.},\n address = {Oslo, Norway},\n author\ + \ = {Murray-Browne, Tim and Mainstone, Di and Bryan-Kinns, Nick and Plumbley,\ + \ Mark D.},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178119},\n issn\ + \ = {2220-4806},\n keywords = {Performance, composed instrument, transparency,\ + \ constraint. },\n pages = {56--59},\n title = {The Medium is the Message: Composing\ + \ Instruments and Performing Mappings},\n url = {http://www.nime.org/proceedings/2011/nime2011_056.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177171 + doi: 10.5281/zenodo.1178119 issn: 2220-4806 - keywords: 'Survey, musical instruments, usability, ergonomics, embodiment, mapping, - affordances, constraints, instrumental entropy, audio programming. ' - pages: 94--99 - title: 'The Acoustic, the Digital and the Body : A Survey on Musical Instruments' - url: http://www.nime.org/proceedings/2007/nime2007_094.pdf - year: 2007 + keywords: 'Performance, composed instrument, transparency, constraint. ' + pages: 56--59 + title: 'The Medium is the Message: Composing Instruments and Performing Mappings' + url: http://www.nime.org/proceedings/2011/nime2011_056.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Zbyszynski2007 - abstract: 'We summarize a decade of musical projects and research employing Wacom - digitizing tablets as musical controllers, discussing general implementation schemes - using Max/MSP and OpenSoundControl, and specific implementations in musical improvisation, - interactive sound installation, interactive multimedia performance, and as a compositional - assistant. We examine two-handed sensing strategies and schemes for gestural mapping. ' - address: 'New York City, NY, United States' - author: 'Zbyszynski, Michael and Wright, Matthew and Momeni, Ali and Cullen, Daniel' - bibtex: "@inproceedings{Zbyszynski2007,\n abstract = {We summarize a decade of musical\ - \ projects and research employing Wacom digitizing tablets as musical controllers,\ - \ discussing general implementation schemes using Max/MSP and OpenSoundControl,\ - \ and specific implementations in musical improvisation, interactive sound installation,\ - \ interactive multimedia performance, and as a compositional assistant. We examine\ - \ two-handed sensing strategies and schemes for gestural mapping. },\n address\ - \ = {New York City, NY, United States},\n author = {Zbyszynski, Michael and Wright,\ - \ Matthew and Momeni, Ali and Cullen, Daniel},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1179483},\n issn = {2220-4806},\n keywords = {1,algorithmic\ - \ composition,digitizing tablet,expressivity,gesture,mapping,nime07,position sensing,wacom\ - \ tablet,why the wacom tablet},\n pages = {100--105},\n title = {Ten Years of\ - \ Tablet Musical Interfaces at CNMAT},\n url = {http://www.nime.org/proceedings/2007/nime2007_100.pdf},\n\ - \ year = {2007}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1179483 + ID: Kim2011 + abstract: 'In this paper, we discuss the use of the clothesline as ametaphor for + designing a musical interface called Airer Choir. This interactive installation + is based on the function ofan ordinary object that is not a traditional instrument, + andhanging articles of clothing is literally the gesture to use theinterface. + Based on this metaphor, a musical interface withhigh transparency was designed. + Using the metaphor, weexplored the possibilities for recognizing of input gesturesand + creating sonic events by mapping data to sound. Thus,four different types of Airer + Choir were developed. By classifying the interfaces, we concluded that various + musicalexpressions are possible by using the same metaphor.' + address: 'Oslo, Norway' + author: 'Kim, Seunghun and Kim, Luke K. and Jeong, Songhee and Yeo, Woon Seung' + bibtex: "@inproceedings{Kim2011,\n abstract = {In this paper, we discuss the use\ + \ of the clothesline as ametaphor for designing a musical interface called Airer\ + \ Choir. This interactive installation is based on the function ofan ordinary\ + \ object that is not a traditional instrument, andhanging articles of clothing\ + \ is literally the gesture to use theinterface. Based on this metaphor, a musical\ + \ interface withhigh transparency was designed. Using the metaphor, weexplored\ + \ the possibilities for recognizing of input gesturesand creating sonic events\ + \ by mapping data to sound. Thus,four different types of Airer Choir were developed.\ + \ By classifying the interfaces, we concluded that various musicalexpressions\ + \ are possible by using the same metaphor.},\n address = {Oslo, Norway},\n author\ + \ = {Kim, Seunghun and Kim, Luke K. and Jeong, Songhee and Yeo, Woon Seung},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178065},\n issn = {2220-4806},\n\ + \ keywords = {musical interface, metaphor, clothesline installation },\n pages\ + \ = {60--63},\n title = {Clothesline as a Metaphor for a Musical Interface},\n\ + \ url = {http://www.nime.org/proceedings/2011/nime2011_060.pdf},\n year = {2011}\n\ + }\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1178065 issn: 2220-4806 - keywords: '1,algorithmic composition,digitizing tablet,expressivity,gesture,mapping,nime07,position - sensing,wacom tablet,why the wacom tablet' - pages: 100--105 - title: Ten Years of Tablet Musical Interfaces at CNMAT - url: http://www.nime.org/proceedings/2007/nime2007_100.pdf - year: 2007 + keywords: 'musical interface, metaphor, clothesline installation ' + pages: 60--63 + title: Clothesline as a Metaphor for a Musical Interface + url: http://www.nime.org/proceedings/2011/nime2011_060.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Gurevich2007 - abstract: 'We describe the prevailing model of musical expression, which assumes - a binary formulation of "the text" and "the act", along with its implied roles - of composer and performer. We argue that this model not only excludes some contemporary - aesthetic values but also limits the communicative ability of new music interfaces. - As an alternative, an ecology of musical creation accounts for both a diversity - of aesthetic goals and the complex interrelation of human and non-human agents. - An ecological perspective on several approaches to musical creation with interactive - technologies reveals an expanded, more inclusive view of artistic interaction - that facilitates novel, compelling ways to use technology for music. This paper - is fundamentally a call to consider the role of aesthetic values in the analysis - of artistic processes and technologies. ' - address: 'New York City, NY, United States' - author: 'Gurevich, Michael and Treviño, Jeffrey' - bibtex: "@inproceedings{Gurevich2007,\n abstract = {We describe the prevailing model\ - \ of musical expression, which assumes a binary formulation of \"the text\" and\ - \ \"the act\", along with its implied roles of composer and performer. We argue\ - \ that this model not only excludes some contemporary aesthetic values but also\ - \ limits the communicative ability of new music interfaces. As an alternative,\ - \ an ecology of musical creation accounts for both a diversity of aesthetic goals\ - \ and the complex interrelation of human and non-human agents. An ecological perspective\ - \ on several approaches to musical creation with interactive technologies reveals\ - \ an expanded, more inclusive view of artistic interaction that facilitates novel,\ - \ compelling ways to use technology for music. This paper is fundamentally a call\ - \ to consider the role of aesthetic values in the analysis of artistic processes\ - \ and technologies. },\n address = {New York City, NY, United States},\n author\ - \ = {Gurevich, Michael and Trevi\\~{n}o, Jeffrey},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177107},\n issn = {2220-4806},\n keywords = {Expression,\ - \ expressivity, non-expressive, emotion, discipline, model, construct, discourse,\ - \ aesthetic goal, experience, transparency, evaluation, communication },\n pages\ - \ = {106--111},\n title = {Expression and Its Discontents : Toward an Ecology\ - \ of Musical Creation},\n url = {http://www.nime.org/proceedings/2007/nime2007_106.pdf},\n\ - \ year = {2007}\n}\n" + ID: Polotti2011 + abstract: 'In this paper, we discuss the results obtained by means of the EGGS (Elementary + Gestalts for Gesture Sonification) system in terms of artistic realizations. EGGS + was introduced in a previous edition of this conference. The works presented include + interactive installations in the form of public art and interactive onstage performances. + In all of the works, the EGGS principles of simplicity based on the correspondence + between elementary sonic and movement units, and of organicity between sound and + gesture are applied. Indeed, we study both sound as a means for gesture representation + and gesture as embodiment of sound. These principles constitute our guidelines + for the investigation of the bidirectional relationship between sound and body + expression with various strategies involving both educated and non-educated executors. ' + address: 'Oslo, Norway' + author: 'Polotti, Pietro and Goina, Maurizio' + bibtex: "@inproceedings{Polotti2011,\n abstract = {In this paper, we discuss the\ + \ results obtained by means of the EGGS (Elementary Gestalts for Gesture Sonification)\ + \ system in terms of artistic realizations. EGGS was introduced in a previous\ + \ edition of this conference. The works presented include interactive installations\ + \ in the form of public art and interactive onstage performances. In all of the\ + \ works, the EGGS principles of simplicity based on the correspondence between\ + \ elementary sonic and movement units, and of organicity between sound and gesture\ + \ are applied. Indeed, we study both sound as a means for gesture representation\ + \ and gesture as embodiment of sound. These principles constitute our guidelines\ + \ for the investigation of the bidirectional relationship between sound and body\ + \ expression with various strategies involving both educated and non-educated\ + \ executors. },\n address = {Oslo, Norway},\n author = {Polotti, Pietro and Goina,\ + \ Maurizio},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178137},\n issn\ + \ = {2220-4806},\n keywords = {Gesture sonification, Interactive performance,\ + \ Public art. },\n pages = {64--67},\n title = {EGGS in Action},\n url = {http://www.nime.org/proceedings/2011/nime2011_064.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177107 + doi: 10.5281/zenodo.1178137 issn: 2220-4806 - keywords: 'Expression, expressivity, non-expressive, emotion, discipline, model, - construct, discourse, aesthetic goal, experience, transparency, evaluation, communication ' - pages: 106--111 - title: 'Expression and Its Discontents : Toward an Ecology of Musical Creation' - url: http://www.nime.org/proceedings/2007/nime2007_106.pdf - year: 2007 + keywords: 'Gesture sonification, Interactive performance, Public art. ' + pages: 64--67 + title: EGGS in Action + url: http://www.nime.org/proceedings/2011/nime2011_064.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Nilson2007 - abstract: 'Live coding is almost the antithesis of immediate physical musicianship, - and yet, has attracted the attentions of a number of computer-literate musicians, - as well as the music-savvy programmers that might be more expected. It is within - the context of live coding that I seek to explore the question of practising a - contemporary digital musical instrument, which is often raised as an aside but - more rarely carried out in research (though see [12]). At what stage of expertise - are the members of the live coding movement, and what practice regimes might help - them to find their true potential?' - address: 'New York City, NY, United States' - author: 'Nilson, Click' - bibtex: "@inproceedings{Nilson2007,\n abstract = {Live coding is almost the antithesis\ - \ of immediate physical musicianship, and yet, has attracted the attentions of\ - \ a number of computer-literate musicians, as well as the music-savvy programmers\ - \ that might be more expected. It is within the context of live coding that I\ - \ seek to explore the question of practising a contemporary digital musical instrument,\ - \ which is often raised as an aside but more rarely carried out in research (though\ - \ see [12]). At what stage of expertise are the members of the live coding movement,\ - \ and what practice regimes might help them to find their true potential?},\n\ - \ address = {New York City, NY, United States},\n author = {Nilson, Click},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177209},\n issn = {2220-4806},\n\ - \ keywords = {Practice, practising, live coding },\n pages = {112--117},\n title\ - \ = {Live Coding Practice},\n url = {http://www.nime.org/proceedings/2007/nime2007_112.pdf},\n\ - \ year = {2007}\n}\n" + ID: Janssen2011 + abstract: The present article describes a reverberation instrumentwhich is based + on cognitive categorization of reverberating spaces. Different techniques for + artificial reverberationwill be covered. A multidimensional scaling experimentwas + conducted on impulse responses in order to determinehow humans acoustically perceive + spatiality. This researchseems to indicate that the perceptual dimensions are + related to early energy decay and timbral qualities. Theseresults are applied + to a reverberation instrument based ondelay lines. It can be contended that such + an instrumentcan be controlled more intuitively than other delay line reverberation + tools which often provide a confusing range ofparameters which have a physical + rather than perceptualmeaning. + address: 'Oslo, Norway' + author: 'Janssen, Berit' + bibtex: "@inproceedings{Janssen2011,\n abstract = {The present article describes\ + \ a reverberation instrumentwhich is based on cognitive categorization of reverberating\ + \ spaces. Different techniques for artificial reverberationwill be covered. A\ + \ multidimensional scaling experimentwas conducted on impulse responses in order\ + \ to determinehow humans acoustically perceive spatiality. This researchseems\ + \ to indicate that the perceptual dimensions are related to early energy decay\ + \ and timbral qualities. Theseresults are applied to a reverberation instrument\ + \ based ondelay lines. It can be contended that such an instrumentcan be controlled\ + \ more intuitively than other delay line reverberation tools which often provide\ + \ a confusing range ofparameters which have a physical rather than perceptualmeaning.},\n\ + \ address = {Oslo, Norway},\n author = {Janssen, Berit},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178049},\n issn = {2220-4806},\n keywords = {Reverberation,\ + \ perception, multidimensional scaling, mapping },\n pages = {68--71},\n title\ + \ = {A Reverberation Instrument Based on Perceptual Mapping},\n url = {http://www.nime.org/proceedings/2011/nime2011_068.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177209 + doi: 10.5281/zenodo.1178049 issn: 2220-4806 - keywords: 'Practice, practising, live coding ' - pages: 112--117 - title: Live Coding Practice - url: http://www.nime.org/proceedings/2007/nime2007_112.pdf - year: 2007 + keywords: 'Reverberation, perception, multidimensional scaling, mapping ' + pages: 68--71 + title: A Reverberation Instrument Based on Perceptual Mapping + url: http://www.nime.org/proceedings/2011/nime2011_068.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Mann2007 - abstract: 'This paper presents two main ideas: (1) Various newly invented liquid-based - or underwater musical instruments are proposed that function like woodwind instruments - but use water instead of air. These “woodwater” instruments expand the space of - known instruments to include all three states of matter: solid (strings, percussion); - liquid (the proposed instruments); and gas (brass and woodwinds). Instruments - that use the fourth state of matter (plasma) are also proposed. (2) Although the - current trend in musical interfaces has been to expand versatililty and generality - by separating the interface from the sound-producing medium, this paper identifies - an opposite trend in musical interface design inspired by instruments such as - the harp, the acoustic or electric guitar, the tin whistle, and the Neanderthal - flute, that have a directness of user-interface, where the fingers of the musician - are in direct physical contact with the sound-producing medium. The newly invented - instruments are thus designed to have this sensually tempting intimacy not be - lost behind layers of abstraction, while also allowing for the high degree of - virtuosity. Examples presented include the poseidophone, an instrument made from - an array of ripple tanks, each tuned for a particular note, and the hydraulophone, - an instrument in which sound is produced by pressurized hydraulic fluid that is - in direct physical contact with the fingers of the player. Instruments based on - these primordial media tend to fall outside existing classifications and taxonomies - of known musical instruments which only consider instruments that make sound with - solid or gaseous states of matter. To better understand and contextualize some - of the new primordial user interfaces, a broader concept of musical instrument - classification is proposed that considers the states of matter of both the user-interface - and the sound production medium.' - address: 'New York City, NY, United States' - author: 'Mann, Steve' - bibtex: "@inproceedings{Mann2007,\n abstract = {This paper presents two main ideas:\ - \ (1) Various newly invented liquid-based or underwater musical instruments are\ - \ proposed that function like woodwind instruments but use water instead of air.\ - \ These “woodwater” instruments expand the space of known instruments to include\ - \ all three states of matter: solid (strings, percussion); liquid (the proposed\ - \ instruments); and gas (brass and woodwinds). Instruments that use the fourth\ - \ state of matter (plasma) are also proposed. (2) Although the current trend in\ - \ musical interfaces has been to expand versatililty and generality by separating\ - \ the interface from the sound-producing medium, this paper identifies an opposite\ - \ trend in musical interface design inspired by instruments such as the harp,\ - \ the acoustic or electric guitar, the tin whistle, and the Neanderthal flute,\ - \ that have a directness of user-interface, where the fingers of the musician\ - \ are in direct physical contact with the sound-producing medium. The newly invented\ - \ instruments are thus designed to have this sensually tempting intimacy not be\ - \ lost behind layers of abstraction, while also allowing for the high degree of\ - \ virtuosity. Examples presented include the poseidophone, an instrument made\ - \ from an array of ripple tanks, each tuned for a particular note, and the hydraulophone,\ - \ an instrument in which sound is produced by pressurized hydraulic fluid that\ - \ is in direct physical contact with the fingers of the player. Instruments based\ - \ on these primordial media tend to fall outside existing classifications and\ - \ taxonomies of known musical instruments which only consider instruments that\ - \ make sound with solid or gaseous states of matter. To better understand and\ - \ contextualize some of the new primordial user interfaces, a broader concept\ - \ of musical instrument classification is proposed that considers the states of\ - \ matter of both the user-interface and the sound production medium.},\n address\ - \ = {New York City, NY, United States},\n author = {Mann, Steve},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1177181},\n issn = {2220-4806},\n keywords\ - \ = {all or part of,ethnomusicology,hydraulophone,is granted without fee,nime07,or\ - \ hard copies of,permission to make digital,personal or classroom use,provided\ - \ that copies are,tangible user interface,this work for},\n pages = {118--123},\n\ - \ title = {Natural Interfaces for Musical Expression : Physiphones and a Physics-Based\ - \ Organology},\n url = {http://www.nime.org/proceedings/2007/nime2007_118.pdf},\n\ - \ year = {2007}\n}\n" + ID: Hayes2011 + address: 'Oslo, Norway' + author: 'Hayes, Lauren' + bibtex: "@inproceedings{Hayes2011,\n address = {Oslo, Norway},\n author = {Hayes,\ + \ Lauren},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1178043},\n issn = {2220-4806},\n\ + \ keywords = {Vibrotactile feedback, human-computer interfaces, digital composition,\ + \ real-time performance, augmented instruments. },\n pages = {72--75},\n title\ + \ = {Vibrotactile Feedback-Assisted Performance},\n url = {http://www.nime.org/proceedings/2011/nime2011_072.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177181 + doi: 10.5281/zenodo.1178043 issn: 2220-4806 - keywords: 'all or part of,ethnomusicology,hydraulophone,is granted without fee,nime07,or - hard copies of,permission to make digital,personal or classroom use,provided that - copies are,tangible user interface,this work for' - pages: 118--123 - title: 'Natural Interfaces for Musical Expression : Physiphones and a Physics-Based - Organology' - url: http://www.nime.org/proceedings/2007/nime2007_118.pdf - year: 2007 + keywords: 'Vibrotactile feedback, human-computer interfaces, digital composition, + real-time performance, augmented instruments. ' + pages: 72--75 + title: Vibrotactile Feedback-Assisted Performance + url: http://www.nime.org/proceedings/2011/nime2011_072.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Bevilacqua2007 - abstract: 'We present in this paper a complete gestural interface built to support - music pedagogy. The development of this prototype concerned both hardware and - software components: a small wireless sensor interface including accelerometers - and gyroscopes, and an analysis system enabling gesture following and recognition. - A first set of experiments was conducted with teenagers in a music theory class. - The preliminary results were encouraging concerning the suitability of these developments - in music education. ' - address: 'New York City, NY, United States' - author: 'Bevilacqua, Frédéric and Guédy, Fabrice and Schnell, Norbert and Fléty, - Emmanuel and Leroy, Nicolas' - bibtex: "@inproceedings{Bevilacqua2007,\n abstract = {We present in this paper a\ - \ complete gestural interface built to support music pedagogy. The development\ - \ of this prototype concerned both hardware and software components: a small wireless\ - \ sensor interface including accelerometers and gyroscopes, and an analysis system\ - \ enabling gesture following and recognition. A first set of experiments was conducted\ - \ with teenagers in a music theory class. The preliminary results were encouraging\ - \ concerning the suitability of these developments in music education. },\n address\ - \ = {New York City, NY, United States},\n author = {Bevilacqua, Fr\\'{e}d\\'{e}ric\ - \ and Gu\\'{e}dy, Fabrice and Schnell, Norbert and Fl\\'{e}ty, Emmanuel and Leroy,\ - \ Nicolas},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177045},\n issn\ - \ = {2220-4806},\n keywords = {Technology-enhanced learning, music pedagogy, wireless\ - \ interface, gesture-follower, gesture recognition },\n pages = {124--129},\n\ - \ title = {Wireless Sensor Interface and Gesture-Follower for Music Pedagogy},\n\ - \ url = {http://www.nime.org/proceedings/2007/nime2007_124.pdf},\n year = {2007}\n\ + ID: Ando2011 + abstract: 'The use of Interactive Evolutionary Computation (IEC) is suitable to + the development of art-creation aid system for beginners. This is because of important + features of IEC, like the ability of optimizing with ambiguous evaluation measures, + and not requiring special knowledge about art-creation. With the popularity of + Consumer Generated Media, many beginners in term of art-creation are interested + in creating their own original art works. Thus developing of useful IEC system + for musical creation is an urgent task. However, user-assist functions for IEC + proposed in pastworks decrease the possibility of getting good unexpected results, + which is an important feature of art-creation with IEC. In this paper, The author + proposes a new IEC evaluation process named "Shopping Basket" procedure IEC. In + the procedure, an user-assist function called Similarity-Based Reasoning allows + for natural evaluation by the user. The function reduces user''s burden without + reducing the possibility of unexpected results. The author performs an experiment + where subjects use the new interface to validate it. As a result of the experiment, + the author concludes that the new interface is better to motivate users to compose + with IEC system than the old interface.' + address: 'Oslo, Norway' + author: 'Ando, Daichi' + bibtex: "@inproceedings{Ando2011,\n abstract = {The use of Interactive Evolutionary\ + \ Computation (IEC) is suitable to the development of art-creation aid system\ + \ for beginners. This is because of important features of IEC, like the ability\ + \ of optimizing with ambiguous evaluation measures, and not requiring special\ + \ knowledge about art-creation. With the popularity of Consumer Generated Media,\ + \ many beginners in term of art-creation are interested in creating their own\ + \ original art works. Thus developing of useful IEC system for musical creation\ + \ is an urgent task. However, user-assist functions for IEC proposed in pastworks\ + \ decrease the possibility of getting good unexpected results, which is an important\ + \ feature of art-creation with IEC. In this paper, The author proposes a new IEC\ + \ evaluation process named \"Shopping Basket\" procedure IEC. In the procedure,\ + \ an user-assist function called Similarity-Based Reasoning allows for natural\ + \ evaluation by the user. The function reduces user's burden without reducing\ + \ the possibility of unexpected results. The author performs an experiment where\ + \ subjects use the new interface to validate it. As a result of the experiment,\ + \ the author concludes that the new interface is better to motivate users to compose\ + \ with IEC system than the old interface.},\n address = {Oslo, Norway},\n author\ + \ = {Ando, Daichi},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177941},\n\ + \ issn = {2220-4806},\n keywords = {Interactive Evolutionary Computation, User-Interface,\ + \ Composition Aid },\n pages = {76--79},\n title = {Improving User-Interface of\ + \ Interactive EC for Composition-Aid by means of Shopping Basket Procedure},\n\ + \ url = {http://www.nime.org/proceedings/2011/nime2011_076.pdf},\n year = {2011}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177045 + doi: 10.5281/zenodo.1177941 issn: 2220-4806 - keywords: 'Technology-enhanced learning, music pedagogy, wireless interface, gesture-follower, - gesture recognition ' - pages: 124--129 - title: Wireless Sensor Interface and Gesture-Follower for Music Pedagogy - url: http://www.nime.org/proceedings/2007/nime2007_124.pdf - year: 2007 + keywords: 'Interactive Evolutionary Computation, User-Interface, Composition Aid ' + pages: 76--79 + title: Improving User-Interface of Interactive EC for Composition-Aid by means of + Shopping Basket Procedure + url: http://www.nime.org/proceedings/2011/nime2011_076.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Dannenberg2007 - abstract: 'Augmenting performances of live popular music with computer systems poses - many new challenges. Here, "popular music" is taken to mean music with a mostly - steady tempo, some improvisational elements, and largely predetermined melodies, - harmonies, and other parts. The overall problem is studied by developing a framework - consisting of constraints and subproblems that any solution should address. These - problems include beat acquisition, beat phase, score location, sound synthesis, - data preparation, and adaptation. A prototype system is described that offers - a set of solutions to the problems posed by the framework, and future work is - suggested. ' - address: 'New York City, NY, United States' - author: 'Dannenberg, Roger B.' - bibtex: "@inproceedings{Dannenberg2007,\n abstract = {Augmenting performances of\ - \ live popular music with computer systems poses many new challenges. Here, \"\ - popular music\" is taken to mean music with a mostly steady tempo, some improvisational\ - \ elements, and largely predetermined melodies, harmonies, and other parts. The\ - \ overall problem is studied by developing a framework consisting of constraints\ - \ and subproblems that any solution should address. These problems include beat\ - \ acquisition, beat phase, score location, sound synthesis, data preparation,\ - \ and adaptation. A prototype system is described that offers a set of solutions\ - \ to the problems posed by the framework, and future work is suggested. },\n address\ - \ = {New York City, NY, United States},\n author = {Dannenberg, Roger B.},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1177081},\n issn = {2220-4806},\n keywords\ - \ = {accompaniment,beat,conducting,intelligent,music synchronization,nime07,synthetic\ - \ performer,tracking,virtual orchestra},\n pages = {130--135},\n title = {New\ - \ Interfaces for Popular Music Performance},\n url = {http://www.nime.org/proceedings/2007/nime2007_130.pdf},\n\ - \ year = {2007}\n}\n" + ID: Mcgee2011 + abstract: BioRhythm is an interactive bio-feedback installation controlled by the + cardiovascular system. Data from a photoplethysmograph (PPG) sensor controls sonification + and visualization parameters in real-time. Biological signals areobtained using + the techniques of Resonance Theory in Hemodynamics and mapped to audiovisual cues + via the Five Element Philosophy. The result is a new media interface utilizing + sound synthesis and spatialization with advanced graphics rendering. BioRhythm + serves as an artistic explorationof the harmonic spectra of pulse waves. + address: 'Oslo, Norway' + author: 'Mcgee, Ryan and Fan, Yuan-Yi and Ali, Reza' + bibtex: "@inproceedings{Mcgee2011,\n abstract = {BioRhythm is an interactive bio-feedback\ + \ installation controlled by the cardiovascular system. Data from a photoplethysmograph\ + \ (PPG) sensor controls sonification and visualization parameters in real-time.\ + \ Biological signals areobtained using the techniques of Resonance Theory in Hemodynamics\ + \ and mapped to audiovisual cues via the Five Element Philosophy. The result is\ + \ a new media interface utilizing sound synthesis and spatialization with advanced\ + \ graphics rendering. BioRhythm serves as an artistic explorationof the harmonic\ + \ spectra of pulse waves.},\n address = {Oslo, Norway},\n author = {Mcgee, Ryan\ + \ and Fan, Yuan-Yi and Ali, Reza},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178105},\n\ + \ issn = {2220-4806},\n keywords = {bio-feedback,bio-sensing,fm synthesis,open\ + \ sound control,parallel computing,sonification,spa-,spatial audio,tialization,tion,visualiza-},\n\ + \ pages = {80--83},\n title = {BioRhythm : a Biologically-inspired Audio-Visual\ + \ Installation},\n url = {http://www.nime.org/proceedings/2011/nime2011_080.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177081 + doi: 10.5281/zenodo.1178105 issn: 2220-4806 - keywords: 'accompaniment,beat,conducting,intelligent,music synchronization,nime07,synthetic - performer,tracking,virtual orchestra' - pages: 130--135 - title: New Interfaces for Popular Music Performance - url: http://www.nime.org/proceedings/2007/nime2007_130.pdf - year: 2007 + keywords: 'bio-feedback,bio-sensing,fm synthesis,open sound control,parallel computing,sonification,spa-,spatial + audio,tialization,tion,visualiza-' + pages: 80--83 + title: 'BioRhythm : a Biologically-inspired Audio-Visual Installation' + url: http://www.nime.org/proceedings/2011/nime2011_080.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Lee2007 - abstract: 'We present a system for rhythmic analysis of human motion inreal-time. - Using a combination of both spectral (Fourier) andspatial analysis of onsets, - we are able to extract repeating rhythmic patterns from data collected using accelerometers. - These extracted rhythmic patterns show the relative magnitudes of accentuated - movements and their spacing in time. Inspired by previouswork in automatic beat - detection of audio recordings, we designedour algorithms to be robust to changes - in timing using multipleanalysis techniques and methods for sensor fusion, filtering - andclustering. We tested our system using a limited set of movements,as well as - dance movements collected from a professional, bothwith promising results.' - address: 'New York City, NY, United States' - author: 'Lee, Eric and Enke, Urs and Borchers, Jan and de Jong, Leo' - bibtex: "@inproceedings{Lee2007,\n abstract = {We present a system for rhythmic\ - \ analysis of human motion inreal-time. Using a combination of both spectral (Fourier)\ - \ andspatial analysis of onsets, we are able to extract repeating rhythmic patterns\ - \ from data collected using accelerometers. These extracted rhythmic patterns\ - \ show the relative magnitudes of accentuated movements and their spacing in time.\ - \ Inspired by previouswork in automatic beat detection of audio recordings, we\ - \ designedour algorithms to be robust to changes in timing using multipleanalysis\ - \ techniques and methods for sensor fusion, filtering andclustering. We tested\ - \ our system using a limited set of movements,as well as dance movements collected\ - \ from a professional, bothwith promising results.},\n address = {New York City,\ - \ NY, United States},\n author = {Lee, Eric and Enke, Urs and Borchers, Jan and\ - \ de Jong, Leo},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177159},\n\ - \ issn = {2220-4806},\n keywords = {rhythm analysis, dance movement analysis,\ - \ onset analysis },\n pages = {136--141},\n title = {Towards Rhythmic Analysis\ - \ of Human Motion Using Acceleration-Onset Times},\n url = {http://www.nime.org/proceedings/2007/nime2007_136.pdf},\n\ - \ year = {2007}\n}\n" + ID: Pigott2011 + address: 'Oslo, Norway' + author: 'Pigott, Jon' + bibtex: "@inproceedings{Pigott2011,\n address = {Oslo, Norway},\n author = {Pigott,\ + \ Jon},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1178133},\n issn = {2220-4806},\n\ + \ keywords = {Electromechanical sonic art, kinetic sound art, prepared speakers,\ + \ Infinite Spring. },\n pages = {84--87},\n title = {Vibration , Volts and Sonic\ + \ Art: A Practice and Theory of Electromechanical Sound},\n url = {http://www.nime.org/proceedings/2011/nime2011_084.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177159 + doi: 10.5281/zenodo.1178133 issn: 2220-4806 - keywords: 'rhythm analysis, dance movement analysis, onset analysis ' - pages: 136--141 - title: Towards Rhythmic Analysis of Human Motion Using Acceleration-Onset Times - url: http://www.nime.org/proceedings/2007/nime2007_136.pdf - year: 2007 + keywords: 'Electromechanical sonic art, kinetic sound art, prepared speakers, Infinite + Spring. ' + pages: 84--87 + title: 'Vibration , Volts and Sonic Art: A Practice and Theory of Electromechanical + Sound' + url: http://www.nime.org/proceedings/2011/nime2011_084.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Bouillot2007 - abstract: 'Remote real-time musical interaction is a domain where endto-end latency - is a well known problem. Today, the mainexplored approach aims to keep it below - the musicians perception threshold. In this paper, we explore another approach, - where end-to-end delays rise to several seconds, butcomputed in a controlled (and - synchronized) way dependingon the structure of the musical pieces. Thanks to our - fullydistributed prototype called nJam, we perform user experiments to show how - this new kind of interactivity breaks theactual end-to-end latency bounds.' - address: 'New York City, NY, United States' - author: 'Bouillot, Nicolas' - bibtex: "@inproceedings{Bouillot2007,\n abstract = {Remote real-time musical interaction\ - \ is a domain where endto-end latency is a well known problem. Today, the mainexplored\ - \ approach aims to keep it below the musicians perception threshold. In this paper,\ - \ we explore another approach, where end-to-end delays rise to several seconds,\ - \ butcomputed in a controlled (and synchronized) way dependingon the structure\ - \ of the musical pieces. Thanks to our fullydistributed prototype called nJam,\ - \ we perform user experiments to show how this new kind of interactivity breaks\ - \ theactual end-to-end latency bounds.},\n address = {New York City, NY, United\ - \ States},\n author = {Bouillot, Nicolas},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177055},\n\ - \ issn = {2220-4806},\n keywords = {Remote real-time musical interaction, end-to-end\ - \ delays, syn- chronization, user experiments, distributed metronome, NMP. },\n\ - \ pages = {142--147},\n title = {nJam User Experiments : Enabling Remote Musical\ - \ Interaction from Milliseconds to Seconds},\n url = {http://www.nime.org/proceedings/2007/nime2007_142.pdf},\n\ - \ year = {2007}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1177055 - issn: 2220-4806 - keywords: 'Remote real-time musical interaction, end-to-end delays, syn- chronization, - user experiments, distributed metronome, NMP. ' - pages: 142--147 - title: 'nJam User Experiments : Enabling Remote Musical Interaction from Milliseconds - to Seconds' - url: http://www.nime.org/proceedings/2007/nime2007_142.pdf - year: 2007 - - -- ENTRYTYPE: inproceedings - ID: Moody2007 - abstract: 'This paper describes the Ashitaka audiovisual instrumentand the process - used to develop it. The main idea guidingthe design of the instrument is that - motion can be used toconnect audio and visuals, and the first part of the paperconsists - of an exploration of this idea. The issue of mappings is raised, discussing both - audio-visual mappings andthe mappings between the interface and synthesis methods.The - paper concludes with a detailed look at the instrumentitself, including the interface, - synthesis methods, and mappings used.' - address: 'New York City, NY, United States' - author: 'Moody, Niall and Fells, Nick and Bailey, Nicholas' - bibtex: "@inproceedings{Moody2007,\n abstract = {This paper describes the Ashitaka\ - \ audiovisual instrumentand the process used to develop it. The main idea guidingthe\ - \ design of the instrument is that motion can be used toconnect audio and visuals,\ - \ and the first part of the paperconsists of an exploration of this idea. The\ - \ issue of mappings is raised, discussing both audio-visual mappings andthe mappings\ - \ between the interface and synthesis methods.The paper concludes with a detailed\ - \ look at the instrumentitself, including the interface, synthesis methods, and\ - \ mappings used.},\n address = {New York City, NY, United States},\n author =\ - \ {Moody, Niall and Fells, Nick and Bailey, Nicholas},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177199},\n issn = {2220-4806},\n keywords = {audiovisual,instrument,mappings,nime07,synchresis,x3d},\n\ - \ pages = {148--153},\n title = {Ashitaka : An Audiovisual Instrument},\n url\ - \ = {http://www.nime.org/proceedings/2007/nime2007_148.pdf},\n year = {2007}\n\ - }\n" + ID: Sioros2011 + abstract: 'We introduce a novel algorithm for automatically generating rhythms in + real time in a certain meter. The generated rhythms are "generic" in the sense + that they are characteristic of each time signature without belonging to a specific + musical style. The algorithm is based on a stochastic model in which various aspects + and qualities of the generated rhythm can be controlled intuitively and in real + time. Such qualities are the density of the generated events per bar, the amount + of variation in generation, the amount of syncopation, the metrical strength, + and of course the meter itself. The kin.rhythmicator software application was + developed to implement this algorithm. During a performance with the kin.rhythmicator + the user can control all aspects of the performance through descriptive and intuitive + graphic controls. ' + address: 'Oslo, Norway' + author: 'Sioros, George and Guedes, Carlos' + bibtex: "@inproceedings{Sioros2011,\n abstract = {We introduce a novel algorithm\ + \ for automatically generating rhythms in real time in a certain meter. The generated\ + \ rhythms are \"generic\" in the sense that they are characteristic of each time\ + \ signature without belonging to a specific musical style. The algorithm is based\ + \ on a stochastic model in which various aspects and qualities of the generated\ + \ rhythm can be controlled intuitively and in real time. Such qualities are the\ + \ density of the generated events per bar, the amount of variation in generation,\ + \ the amount of syncopation, the metrical strength, and of course the meter itself.\ + \ The kin.rhythmicator software application was developed to implement this algorithm.\ + \ During a performance with the kin.rhythmicator the user can control all aspects\ + \ of the performance through descriptive and intuitive graphic controls. },\n\ + \ address = {Oslo, Norway},\n author = {Sioros, George and Guedes, Carlos},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178163},\n issn = {2220-4806},\n\ + \ keywords = {automatic music generation, generative, stochastic, metric indispensability,\ + \ syncopation, Max/MSP, Max4Live },\n pages = {88--91},\n title = {Automatic Rhythmic\ + \ Performance in Max/MSP: the kin.rhythmicator},\n url = {http://www.nime.org/proceedings/2011/nime2011_088.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177199 + doi: 10.5281/zenodo.1178163 issn: 2220-4806 - keywords: audiovisual,instrument,mappings,nime07,synchresis,x3d - pages: 148--153 - title: 'Ashitaka : An Audiovisual Instrument' - url: http://www.nime.org/proceedings/2007/nime2007_148.pdf - year: 2007 + keywords: 'automatic music generation, generative, stochastic, metric indispensability, + syncopation, Max/MSP, Max4Live ' + pages: 88--91 + title: 'Automatic Rhythmic Performance in Max/MSP: the kin.rhythmicator' + url: http://www.nime.org/proceedings/2011/nime2011_088.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Aimi2007 - abstract: 'This paper describes several example hybrid acoustic / electronic percussion - instruments using realtime convolution toaugment and modify the apparent acoustics - of damped physical objects. Examples of cymbal, frame drum, practice pad,brush, - and bass drum controllers are described.' - address: 'New York City, NY, United States' - author: 'Aimi, Roberto' - bibtex: "@inproceedings{Aimi2007,\n abstract = {This paper describes several example\ - \ hybrid acoustic / electronic percussion instruments using realtime convolution\ - \ toaugment and modify the apparent acoustics of damped physical objects. Examples\ - \ of cymbal, frame drum, practice pad,brush, and bass drum controllers are described.},\n\ - \ address = {New York City, NY, United States},\n author = {Aimi, Roberto},\n\ + ID: Goncalves2011 + abstract: The importance of embedded devices as new devices to thefield of Voltage-Controlled + Synthesizers is realized. Emphasis is directed towards understanding the importance + of suchdevices in Voltage-Controlled Synthesizers. Introducing theVoltage-Controlled + Computer as a new paradigm. Specifications for hardware interfacing and programming + techniquesare described based on real prototypes. Implementationsand successful + results are reported. + address: 'Oslo, Norway' + author: 'Goncalves, André' + bibtex: "@inproceedings{Goncalves2011,\n abstract = {The importance of embedded\ + \ devices as new devices to thefield of Voltage-Controlled Synthesizers is realized.\ + \ Emphasis is directed towards understanding the importance of suchdevices in\ + \ Voltage-Controlled Synthesizers. Introducing theVoltage-Controlled Computer\ + \ as a new paradigm. Specifications for hardware interfacing and programming techniquesare\ + \ described based on real prototypes. Implementationsand successful results are\ + \ reported.},\n address = {Oslo, Norway},\n author = {Goncalves, Andr{\\'{e}}},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177033},\n issn = {2220-4806},\n\ - \ keywords = {Musical controllers, extended acoustic instruments },\n pages =\ - \ {154--159},\n title = {Percussion Instruments Using Realtime Convolution : Physical\ - \ Controllers},\n url = {http://www.nime.org/proceedings/2007/nime2007_154.pdf},\n\ - \ year = {2007}\n}\n" + \ Musical Expression},\n doi = {10.5281/zenodo.1178035},\n issn = {2220-4806},\n\ + \ keywords = {Voltage-controlled synthesizer, embedded systems, voltage-controlled\ + \ computer, computer driven control voltage generation },\n pages = {92--95},\n\ + \ title = {Towards a Voltage-Controlled Computer Control and Interaction Beyond\ + \ an Embedded System},\n url = {http://www.nime.org/proceedings/2011/nime2011_092.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177033 + doi: 10.5281/zenodo.1178035 issn: 2220-4806 - keywords: 'Musical controllers, extended acoustic instruments ' - pages: 154--159 - title: 'Percussion Instruments Using Realtime Convolution : Physical Controllers' - url: http://www.nime.org/proceedings/2007/nime2007_154.pdf - year: 2007 + keywords: 'Voltage-controlled synthesizer, embedded systems, voltage-controlled + computer, computer driven control voltage generation ' + pages: 92--95 + title: Towards a Voltage-Controlled Computer Control and Interaction Beyond an Embedded + System + url: http://www.nime.org/proceedings/2011/nime2011_092.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Rohs2007 - abstract: 'CaMus2 allows collaborative performance with mobile camera phones. The - original CaMus project was extended tosupport multiple phones performing in the - same space andgenerating MIDI signals to control sound generation andmanipulation - software or hardware. Through an opticalflow technology the system can be used - without a referencemarker grid. When using a marker grid, the use of dynamicdigital - zoom extends the range of performance. Semanticinformation display helps guide - the performer visually.' - address: 'New York City, NY, United States' - author: 'Rohs, Michael and Essl, Georg' - bibtex: "@inproceedings{Rohs2007,\n abstract = {CaMus2 allows collaborative performance\ - \ with mobile camera phones. The original CaMus project was extended tosupport\ - \ multiple phones performing in the same space andgenerating MIDI signals to control\ - \ sound generation andmanipulation software or hardware. Through an opticalflow\ - \ technology the system can be used without a referencemarker grid. When using\ - \ a marker grid, the use of dynamicdigital zoom extends the range of performance.\ - \ Semanticinformation display helps guide the performer visually.},\n address\ - \ = {New York City, NY, United States},\n author = {Rohs, Michael and Essl, Georg},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177233},\n issn = {2220-4806},\n\ - \ keywords = {Camera phone, mobile phone, music performance, mobile sound generation,\ - \ sensing-based interaction, collaboration },\n pages = {160--163},\n title =\ - \ {CaMus 2 -- Optical Flow and Collaboration in Camera Phone Music Performance},\n\ - \ url = {http://www.nime.org/proceedings/2007/nime2007_160.pdf},\n year = {2007}\n\ + ID: Kim2011b + abstract: 'We developed an automatic piano performance system calledPolyhymnia that + is able to generate expressive polyphonicpiano performances with music scores + so that it can be usedas a computer-based tool for an expressive performance.The + system automatically renders expressive piano musicby means of automatic musical + symbol interpretation andstatistical models of structure-expression relations + regarding polyphonic features of piano performance. Experimental results indicate + that the generated performances of various piano pieces with diverse trained models + had polyphonicexpression and sounded expressively. In addition, the models trained + with different performance styles reflected thestyles observed in the training + performances, and they werewell distinguishable by human listeners. Polyhymnia + wonthe first prize in the autonomous section of the PerformanceRendering Contest + for Computer Systems (Rencon) 2010.' + address: 'Oslo, Norway' + author: 'Kim, Tae Hun and Fukayama, Satoru and Nishimoto, Takuya and Sagayama, Shigeki' + bibtex: "@inproceedings{Kim2011b,\n abstract = {We developed an automatic piano\ + \ performance system calledPolyhymnia that is able to generate expressive polyphonicpiano\ + \ performances with music scores so that it can be usedas a computer-based tool\ + \ for an expressive performance.The system automatically renders expressive piano\ + \ musicby means of automatic musical symbol interpretation andstatistical models\ + \ of structure-expression relations regarding polyphonic features of piano performance.\ + \ Experimental results indicate that the generated performances of various piano\ + \ pieces with diverse trained models had polyphonicexpression and sounded expressively.\ + \ In addition, the models trained with different performance styles reflected\ + \ thestyles observed in the training performances, and they werewell distinguishable\ + \ by human listeners. Polyhymnia wonthe first prize in the autonomous section\ + \ of the PerformanceRendering Contest for Computer Systems (Rencon) 2010.},\n\ + \ address = {Oslo, Norway},\n author = {Kim, Tae Hun and Fukayama, Satoru and\ + \ Nishimoto, Takuya and Sagayama, Shigeki},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1178069},\n issn = {2220-4806},\n keywords = {performance rendering,\ + \ polyphonic expression, statistical modeling, conditional random fields },\n\ + \ pages = {96--99},\n title = {Polyhymnia : An Automatic Piano Performance System\ + \ with Statistical Modeling of Polyphonic Expression and Musical Symbol Interpretation},\n\ + \ url = {http://www.nime.org/proceedings/2011/nime2011_096.pdf},\n year = {2011}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177233 + doi: 10.5281/zenodo.1178069 issn: 2220-4806 - keywords: 'Camera phone, mobile phone, music performance, mobile sound generation, - sensing-based interaction, collaboration ' - pages: 160--163 - title: CaMus 2 -- Optical Flow and Collaboration in Camera Phone Music Performance - url: http://www.nime.org/proceedings/2007/nime2007_160.pdf - year: 2007 + keywords: 'performance rendering, polyphonic expression, statistical modeling, conditional + random fields ' + pages: 96--99 + title: 'Polyhymnia : An Automatic Piano Performance System with Statistical Modeling + of Polyphonic Expression and Musical Symbol Interpretation' + url: http://www.nime.org/proceedings/2011/nime2011_096.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Fiebrink2007 - abstract: 'We draw on our experiences with the Princeton Laptop Orchestra to discuss - novel uses of the laptop’s native physical inputs for flexible and expressive - control. We argue that instruments designed using these built-in inputs offer - benefits over custom standalone controllers, particularly in certain group performance - settings; creatively thinking about native capabilities can lead to interesting - and unique new interfaces. We discuss a variety of example instruments that use - the laptop’s native capabilities and suggest avenues for future work. We also - describe a new toolkit for rapidly experimenting with these capabilities.' - address: 'New York City, NY, United States' - author: 'Fiebrink, Rebecca and Wang, Ge and Cook, Perry R.' - bibtex: "@inproceedings{Fiebrink2007,\n abstract = {We draw on our experiences with\ - \ the Princeton Laptop Orchestra to discuss novel uses of the laptop’s native\ - \ physical inputs for flexible and expressive control. We argue that instruments\ - \ designed using these built-in inputs offer benefits over custom standalone controllers,\ - \ particularly in certain group performance settings; creatively thinking about\ - \ native capabilities can lead to interesting and unique new interfaces. We discuss\ - \ a variety of example instruments that use the laptop’s native capabilities and\ - \ suggest avenues for future work. We also describe a new toolkit for rapidly\ - \ experimenting with these capabilities.},\n address = {New York City, NY, United\ - \ States},\n author = {Fiebrink, Rebecca and Wang, Ge and Cook, Perry R.},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1177087},\n issn = {2220-4806},\n keywords\ - \ = {Mapping strategies. Laptop-based physical interfaces. Collaborative laptop\ - \ performance.},\n pages = {164--167},\n title = {Don't Forget the Laptop : Using\ - \ Native Input Capabilities for Expressive Musical Control},\n url = {http://www.nime.org/proceedings/2007/nime2007_164.pdf},\n\ - \ year = {2007}\n}\n" + ID: Carrascal2011 + abstract: 'Audio mixing is the adjustment of relative volumes, panning and other + parameters corresponding to different soundsources, in order to create a technically + and aesthetically adequate sound sum. To do this, audio engineers employ "panpots" + and faders, the standard controls in audio mixers. The design of such devices + has remained practically unchanged for decades since their introduction. At the + time,no usability studies seem to have been conducted on suchdevices, so one could + question if they are really optimizedfor the task they are meant for.This paper + proposes a new set of controls that might beused to simplify and/or improve the + performance of audiomixing tasks, taking into account the spatial characteristicsof + modern mixing technologies such as surround and 3Daudio and making use of multitouch + interface technologies.A preliminary usability test has shown promising results.' + address: 'Oslo, Norway' + author: 'Carrascal, Juan P. and Jordà, Sergi' + bibtex: "@inproceedings{Carrascal2011,\n abstract = {Audio mixing is the adjustment\ + \ of relative volumes, panning and other parameters corresponding to different\ + \ soundsources, in order to create a technically and aesthetically adequate sound\ + \ sum. To do this, audio engineers employ \"panpots\" and faders, the standard\ + \ controls in audio mixers. The design of such devices has remained practically\ + \ unchanged for decades since their introduction. At the time,no usability studies\ + \ seem to have been conducted on suchdevices, so one could question if they are\ + \ really optimizedfor the task they are meant for.This paper proposes a new set\ + \ of controls that might beused to simplify and/or improve the performance of\ + \ audiomixing tasks, taking into account the spatial characteristicsof modern\ + \ mixing technologies such as surround and 3Daudio and making use of multitouch\ + \ interface technologies.A preliminary usability test has shown promising results.},\n\ + \ address = {Oslo, Norway},\n author = {Carrascal, Juan P. and Jord\\`{a}, Sergi},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1177983},\n issn = {2220-4806},\n\ + \ keywords = {audio mixing,control surface,multitouch,touchscreen},\n pages =\ + \ {100--103},\n title = {Multitouch Interface for Audio Mixing},\n url = {http://www.nime.org/proceedings/2011/nime2011_100.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177087 + doi: 10.5281/zenodo.1177983 issn: 2220-4806 - keywords: Mapping strategies. Laptop-based physical interfaces. Collaborative laptop - performance. - pages: 164--167 - title: 'Don''t Forget the Laptop : Using Native Input Capabilities for Expressive - Musical Control' - url: http://www.nime.org/proceedings/2007/nime2007_164.pdf - year: 2007 + keywords: 'audio mixing,control surface,multitouch,touchscreen' + pages: 100--103 + title: Multitouch Interface for Audio Mixing + url: http://www.nime.org/proceedings/2011/nime2011_100.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Moriwaki2007 - abstract: 'In this paper the authors present the MIDI Scrapyard Challenge (MSC) - workshop, a one-day hands-on experience which asks participants to create musical - controllers out of cast-off electronics, found materials and junk. The workshop - experience, principles, and considerations are detailed, along with sample projects - which have been created in various MSC workshops. Observations and implications - as well as future developments for the workshop are discussed.' - address: 'New York City, NY, United States' - author: 'Moriwaki, Katherine and Brucken-Cohen, Jonah' - bibtex: "@inproceedings{Moriwaki2007,\n abstract = {In this paper the authors present\ - \ the MIDI Scrapyard Challenge (MSC) workshop, a one-day hands-on experience which\ - \ asks participants to create musical controllers out of cast-off electronics,\ - \ found materials and junk. The workshop experience, principles, and considerations\ - \ are detailed, along with sample projects which have been created in various\ - \ MSC workshops. Observations and implications as well as future developments\ - \ for the workshop are discussed.},\n address = {New York City, NY, United States},\n\ - \ author = {Moriwaki, Katherine and Brucken-Cohen, Jonah},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177201},\n issn = {2220-4806},\n keywords = {Workshop,\ - \ MIDI, Interaction Design, Creativity, Performance},\n pages = {168--172},\n\ - \ title = {MIDI Scrapyard Challenge Workshops},\n url = {http://www.nime.org/proceedings/2007/nime2007_168.pdf},\n\ - \ year = {2007}\n}\n" + ID: Derbinsky2011 + abstract: 'This paper explores how a general cognitive architecture canpragmatically + facilitate the development and exploration ofinteractive music interfaces on a + mobile platform. To thisend we integrated the Soar cognitive architecture into + themobile music meta-environment urMus. We develop anddemonstrate four artificial + agents which use diverse learningmechanisms within two mobile music interfaces. + We alsoinclude details of the computational performance of theseagents, evincing + that the architecture can support real-timeinteractivity on modern commodity hardware.' + address: 'Oslo, Norway' + author: 'Derbinsky, Nate and Essl, Georg' + bibtex: "@inproceedings{Derbinsky2011,\n abstract = {This paper explores how a general\ + \ cognitive architecture canpragmatically facilitate the development and exploration\ + \ ofinteractive music interfaces on a mobile platform. To thisend we integrated\ + \ the Soar cognitive architecture into themobile music meta-environment urMus.\ + \ We develop anddemonstrate four artificial agents which use diverse learningmechanisms\ + \ within two mobile music interfaces. We alsoinclude details of the computational\ + \ performance of theseagents, evincing that the architecture can support real-timeinteractivity\ + \ on modern commodity hardware.},\n address = {Oslo, Norway},\n author = {Derbinsky,\ + \ Nate and Essl, Georg},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177993},\n\ + \ issn = {2220-4806},\n keywords = {cognitive architecture,machine learning,mobile\ + \ music},\n pages = {104--107},\n title = {Cognitive Architecture in Mobile Music\ + \ Interactions},\n url = {http://www.nime.org/proceedings/2011/nime2011_104.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177201 + doi: 10.5281/zenodo.1177993 issn: 2220-4806 - keywords: 'Workshop, MIDI, Interaction Design, Creativity, Performance' - pages: 168--172 - title: MIDI Scrapyard Challenge Workshops - url: http://www.nime.org/proceedings/2007/nime2007_168.pdf - year: 2007 + keywords: 'cognitive architecture,machine learning,mobile music' + pages: 104--107 + title: Cognitive Architecture in Mobile Music Interactions + url: http://www.nime.org/proceedings/2011/nime2011_104.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Lee2007a - abstract: 'We present REXband, an interactive music exhibit for collaborative improvisation - to medieval music. This audio-only system consists of three digitally augmented - medieval instrument replicas: thehurdy gurdy, harp, and frame drum. The instruments - communicate with software that provides users with both musical support and feedback - on their performance using a "virtual audience" set in a medieval tavern. REXband - builds upon previous work in interactive music exhibits by incorporating aspects - of e-learning to educate, in addition to interaction design patterns to entertain; - care was also taken to ensure historic authenticity. Feedback from user testing - in both controlled (laboratory) and public (museum) environments has been extremely - positive. REXband is part of the Regensburg Experience, an exhibition scheduled - to open in July 2007 to showcase the rich history of Regensburg, Germany.' - address: 'New York City, NY, United States' - author: 'Lee, Eric and Wolf, Marius and Jansen, Yvonne and Borchers, Jan' - bibtex: "@inproceedings{Lee2007a,\n abstract = {We present REXband, an interactive\ - \ music exhibit for collaborative improvisation to medieval music. This audio-only\ - \ system consists of three digitally augmented medieval instrument replicas: thehurdy\ - \ gurdy, harp, and frame drum. The instruments communicate with software that\ - \ provides users with both musical support and feedback on their performance using\ - \ a \"virtual audience\" set in a medieval tavern. REXband builds upon previous\ - \ work in interactive music exhibits by incorporating aspects of e-learning to\ - \ educate, in addition to interaction design patterns to entertain; care was also\ - \ taken to ensure historic authenticity. Feedback from user testing in both controlled\ - \ (laboratory) and public (museum) environments has been extremely positive. REXband\ - \ is part of the Regensburg Experience, an exhibition scheduled to open in July\ - \ 2007 to showcase the rich history of Regensburg, Germany.},\n address = {New\ - \ York City, NY, United States},\n author = {Lee, Eric and Wolf, Marius and Jansen,\ - \ Yvonne and Borchers, Jan},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177163},\n\ - \ issn = {2220-4806},\n keywords = {interactive music exhibits, medieval music,\ - \ augmented instruments, e-learning, education },\n pages = {172--177},\n title\ - \ = {REXband : A Multi-User Interactive Exhibit for Exploring Medieval Music},\n\ - \ url = {http://www.nime.org/proceedings/2007/nime2007_172.pdf},\n year = {2007}\n\ + ID: Smith2011 + abstract: 'Supervised machine learning enables complex many-to-manymappings and + control schemes needed in interactive performance systems. One of the persistent + problems in theseapplications is generating, identifying and choosing inputoutput + pairings for training. This poses problems of scope(limiting the realm of potential + control inputs), effort (requiring significant pre-performance training time), + and cognitive load (forcing the performer to learn and remember thecontrol areas). + We discuss the creation and implementationof an automatic "supervisor", using + unsupervised machinelearning algorithms to train a supervised neural networkon + the fly. This hierarchical arrangement enables networktraining in real time based + on the musical or gestural control inputs employed in a performance, aiming at + freeing theperformer to operate in a creative, intuitive realm, makingthe machine + control transparent and automatic. Three implementations of this self supervised + model driven by iPod,iPad, and acoustic violin are described.' + address: 'Oslo, Norway' + author: 'Smith, Benjamin D. and Garnett, Guy E.' + bibtex: "@inproceedings{Smith2011,\n abstract = {Supervised machine learning enables\ + \ complex many-to-manymappings and control schemes needed in interactive performance\ + \ systems. One of the persistent problems in theseapplications is generating,\ + \ identifying and choosing inputoutput pairings for training. This poses problems\ + \ of scope(limiting the realm of potential control inputs), effort (requiring\ + \ significant pre-performance training time), and cognitive load (forcing the\ + \ performer to learn and remember thecontrol areas). We discuss the creation and\ + \ implementationof an automatic \"supervisor\", using unsupervised machinelearning\ + \ algorithms to train a supervised neural networkon the fly. This hierarchical\ + \ arrangement enables networktraining in real time based on the musical or gestural\ + \ control inputs employed in a performance, aiming at freeing theperformer to\ + \ operate in a creative, intuitive realm, makingthe machine control transparent\ + \ and automatic. Three implementations of this self supervised model driven by\ + \ iPod,iPad, and acoustic violin are described.},\n address = {Oslo, Norway},\n\ + \ author = {Smith, Benjamin D. and Garnett, Guy E.},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178169},\n issn = {2220-4806},\n keywords = {NIME, machine\ + \ learning, interactive computer music, machine listening, improvisation, adaptive\ + \ resonance theory },\n pages = {108--111},\n title = {The Self-Supervising Machine},\n\ + \ url = {http://www.nime.org/proceedings/2011/nime2011_108.pdf},\n year = {2011}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177163 + doi: 10.5281/zenodo.1178169 issn: 2220-4806 - keywords: 'interactive music exhibits, medieval music, augmented instruments, e-learning, - education ' - pages: 172--177 - title: 'REXband : A Multi-User Interactive Exhibit for Exploring Medieval Music' - url: http://www.nime.org/proceedings/2007/nime2007_172.pdf - year: 2007 + keywords: 'NIME, machine learning, interactive computer music, machine listening, + improvisation, adaptive resonance theory ' + pages: 108--111 + title: The Self-Supervising Machine + url: http://www.nime.org/proceedings/2011/nime2011_108.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Baalman2007 - abstract: 'This paper describes work on a newly created large-scale interactive - theater performance entitled Schwelle (Thresholds). The authors discuss an innovative - approach towards the conception, development and implementation of dynamic and - responsive audio scenography: a constantly evolving, multi-layered sound design - generated by continuous input from a series of distributed wireless sensors deployed - both on the body of a performer and placed within the physical stage environment. - The paper is divided into conceptual and technological parts. We first describe - the project’s dramaturgical and conceptual context in order to situate the artistic - framework that has guided the technological system design. Specifically, this - framework discusses the team’s approach in combining techniques from situated - computing, theatrical sound design practice and dynamical systems in order to - create a new kind of adaptive audio scenographic environment augmented by wireless, - distributed sensing for use in live theatrical performance. The goal of this adaptive - sound design is to move beyond both existing playback models used in theatre sound - as well as the purely humancentered, controller-instrument approach used in much - current interactive performance practice.' - address: 'New York City, NY, United States' - author: 'Baalman, Marije A. and Moody-Grigsby, Daniel and Salter, Christopher L.' - bibtex: "@inproceedings{Baalman2007,\n abstract = {This paper describes work on\ - \ a newly created large-scale interactive theater performance entitled Schwelle\ - \ (Thresholds). The authors discuss an innovative approach towards the conception,\ - \ development and implementation of dynamic and responsive audio scenography:\ - \ a constantly evolving, multi-layered sound design generated by continuous input\ - \ from a series of distributed wireless sensors deployed both on the body of a\ - \ performer and placed within the physical stage environment. The paper is divided\ - \ into conceptual and technological parts. We first describe the project’s dramaturgical\ - \ and conceptual context in order to situate the artistic framework that has guided\ - \ the technological system design. Specifically, this framework discusses the\ - \ team’s approach in combining techniques from situated computing, theatrical\ - \ sound design practice and dynamical systems in order to create a new kind of\ - \ adaptive audio scenographic environment augmented by wireless, distributed sensing\ - \ for use in live theatrical performance. The goal of this adaptive sound design\ - \ is to move beyond both existing playback models used in theatre sound as well\ - \ as the purely humancentered, controller-instrument approach used in much current\ - \ interactive performance practice.},\n address = {New York City, NY, United States},\n\ - \ author = {Baalman, Marije A. and Moody-Grigsby, Daniel and Salter, Christopher\ - \ L.},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1177035},\n issn = {2220-4806},\n\ - \ keywords = {Interactive performance, dynamical systems, wireless sens- ing,\ - \ adaptive audio scenography, audio dramaturgy, situated computing, sound design\ - \ },\n pages = {178--184},\n title = {Schwelle : Sensor Augmented, Adaptive Sound\ - \ Design for Live Theatrical Performance},\n url = {http://www.nime.org/proceedings/2007/nime2007_178.pdf},\n\ - \ year = {2007}\n}\n" + ID: Albin2011 + abstract: 'A mixed media tool was created that promotes ensemblevirtuosity through + tight coordination and interdepence inmusical performance. Two different types + of performers interact with a virtual space using Wii remote and tangibleinterfaces + using the reacTIVision toolkit [11]. One group ofperformers uses a tangible tabletop + interface to place andmove sound objects in a virtual environment. The soundobjects + are represented by visual avatars and have audiosamples associated with them. + A second set of performersmake use of Wii remotes to create triggering waves thatcan + collide with those sound objects. Sound is only produced upon collision of the + waves with the sound objects.What results is a performance in which users must + negotiate through a physical and virtual space and are positionedto work together + to create musical pieces.' + address: 'Oslo, Norway' + author: 'Albin, Aaron and Şentürk, Sertan and Van Troyer, Akito and Blosser, Brian + and Jan, Oliver and Weinberg, Gil' + bibtex: "@inproceedings{Albin2011,\n abstract = {A mixed media tool was created\ + \ that promotes ensemblevirtuosity through tight coordination and interdepence\ + \ inmusical performance. Two different types of performers interact with a virtual\ + \ space using Wii remote and tangibleinterfaces using the reacTIVision toolkit\ + \ [11]. One group ofperformers uses a tangible tabletop interface to place andmove\ + \ sound objects in a virtual environment. The soundobjects are represented by\ + \ visual avatars and have audiosamples associated with them. A second set of performersmake\ + \ use of Wii remotes to create triggering waves thatcan collide with those sound\ + \ objects. Sound is only produced upon collision of the waves with the sound objects.What\ + \ results is a performance in which users must negotiate through a physical and\ + \ virtual space and are positionedto work together to create musical pieces.},\n\ + \ address = {Oslo, Norway},\n author = {Albin, Aaron and \\c{S}ent\\''{u}rk, Sertan\ + \ and Van Troyer, Akito and Blosser, Brian and Jan, Oliver and Weinberg, Gil},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1177939},\n issn = {2220-4806},\n\ + \ keywords = {reacTIVision, processing, ensemble, mixed media, virtualization,\ + \ tangible, sample },\n pages = {112--115},\n title = {Beatscape , a Mixed Virtual-Physical\ + \ Environment for Musical Ensembles},\n url = {http://www.nime.org/proceedings/2011/nime2011_112.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177035 + doi: 10.5281/zenodo.1177939 issn: 2220-4806 - keywords: 'Interactive performance, dynamical systems, wireless sens- ing, adaptive - audio scenography, audio dramaturgy, situated computing, sound design ' - pages: 178--184 - title: 'Schwelle : Sensor Augmented, Adaptive Sound Design for Live Theatrical Performance' - url: http://www.nime.org/proceedings/2007/nime2007_178.pdf - year: 2007 + keywords: 'reacTIVision, processing, ensemble, mixed media, virtualization, tangible, + sample ' + pages: 112--115 + title: 'Beatscape , a Mixed Virtual-Physical Environment for Musical Ensembles' + url: http://www.nime.org/proceedings/2011/nime2011_112.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Jakovich2007 - abstract: 'Architectural space is a key contributor to the perceptual world we experience - daily. We present ‘ParticleTecture’, a soundspace installation system that extends - spatial perception of ordinary architectural space through gestural interaction - with sound in space. ParticleTecture employs a particle metaphor to produce granular - synthesis soundspaces in response to video-tracking of human movement. It incorporates - an adaptive mechanism that utilizes a measure of engagement to inform ongoing - audio patterns in response to human activity. By identifying engaging features - in its response, the system is able to predict, pre-empt and shape its evolving - responses in accordance with the most engaging, compelling, interesting attributes - of the active environment. An implementation of ParticleTecture for gallery installation - is presented and discussed as one form of architectural space.' - address: 'New York City, NY, United States' - author: 'Jakovich, Joanne and Beilharz, Kirsty' - bibtex: "@inproceedings{Jakovich2007,\n abstract = {Architectural space is a key\ - \ contributor to the perceptual world we experience daily. We present ‘ParticleTecture’,\ - \ a soundspace installation system that extends spatial perception of ordinary\ - \ architectural space through gestural interaction with sound in space. ParticleTecture\ - \ employs a particle metaphor to produce granular synthesis soundspaces in response\ - \ to video-tracking of human movement. It incorporates an adaptive mechanism that\ - \ utilizes a measure of engagement to inform ongoing audio patterns in response\ - \ to human activity. By identifying engaging features in its response, the system\ - \ is able to predict, pre-empt and shape its evolving responses in accordance\ - \ with the most engaging, compelling, interesting attributes of the active environment.\ - \ An implementation of ParticleTecture for gallery installation is presented and\ - \ discussed as one form of architectural space.},\n address = {New York City,\ - \ NY, United States},\n author = {Jakovich, Joanne and Beilharz, Kirsty},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1177127},\n issn = {2220-4806},\n keywords\ - \ = {Architecture, installation, interaction, granular synthesis, adaptation,\ - \ engagement. },\n pages = {185--190},\n title = {ParticleTecture : Interactive\ - \ Granular Soundspaces for Architectural Design},\n url = {http://www.nime.org/proceedings/2007/nime2007_185.pdf},\n\ - \ year = {2007}\n}\n" + ID: Fabiani2011 + abstract: 'This paper presents MoodifierLive, a mobile phone application for interactive + control of rule-based automatic musicperformance. Five different interaction modes + are available,of which one allows for collaborative performances with upto four + participants, and two let the user control the expressive performance using expressive + hand gestures. Evaluations indicate that the application is interesting, fun touse, + and that the gesture modes, especially the one based ondata from free expressive + gestures, allow for performanceswhose emotional content matches that of the gesture + thatproduced them.' + address: 'Oslo, Norway' + author: 'Fabiani, Marco and Dubus, Gaël and Bresin, Roberto' + bibtex: "@inproceedings{Fabiani2011,\n abstract = {This paper presents MoodifierLive,\ + \ a mobile phone application for interactive control of rule-based automatic musicperformance.\ + \ Five different interaction modes are available,of which one allows for collaborative\ + \ performances with upto four participants, and two let the user control the expressive\ + \ performance using expressive hand gestures. Evaluations indicate that the application\ + \ is interesting, fun touse, and that the gesture modes, especially the one based\ + \ ondata from free expressive gestures, allow for performanceswhose emotional\ + \ content matches that of the gesture thatproduced them.},\n address = {Oslo,\ + \ Norway},\n author = {Fabiani, Marco and Dubus, Ga\\''{e}l and Bresin, Roberto},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178005},\n issn = {2220-4806},\n\ + \ keywords = {Expressive performance, gesture, collaborative performance, mobile\ + \ phone },\n pages = {116--119},\n title = {MoodifierLive : Interactive and Collaborative\ + \ Expressive Music Performance on Mobile Devices},\n url = {http://www.nime.org/proceedings/2011/nime2011_116.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177127 + doi: 10.5281/zenodo.1178005 issn: 2220-4806 - keywords: 'Architecture, installation, interaction, granular synthesis, adaptation, - engagement. ' - pages: 185--190 - title: 'ParticleTecture : Interactive Granular Soundspaces for Architectural Design' - url: http://www.nime.org/proceedings/2007/nime2007_185.pdf - year: 2007 + keywords: 'Expressive performance, gesture, collaborative performance, mobile phone ' + pages: 116--119 + title: 'MoodifierLive : Interactive and Collaborative Expressive Music Performance + on Mobile Devices' + url: http://www.nime.org/proceedings/2011/nime2011_116.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Franinovic2007 - abstract: 'The distinctive features of interactive sound installations in public - space are considered, with special attention to the rich, if undoubtedly difficult, - environments in which they exist. It is argued that such environments, and the - social contexts that they imply, are among the most valuable features of these - works for the approach that we have adopted to creation as research practice. - The discussion is articulated through case studies drawn from two of our installations, - Recycled Soundscapes (2004) and Skyhooks (2006). Implications for the broader - design of new musical instruments are presented.' - address: 'New York City, NY, United States' - author: 'Franinovic, Karmen and Visell, Yon' - bibtex: "@inproceedings{Franinovic2007,\n abstract = {The distinctive features of\ - \ interactive sound installations in public space are considered, with special\ - \ attention to the rich, if undoubtedly difficult, environments in which they\ - \ exist. It is argued that such environments, and the social contexts that they\ - \ imply, are among the most valuable features of these works for the approach\ - \ that we have adopted to creation as research practice. The discussion is articulated\ - \ through case studies drawn from two of our installations, Recycled Soundscapes\ - \ (2004) and Skyhooks (2006). Implications for the broader design of new musical\ - \ instruments are presented.},\n address = {New York City, NY, United States},\n\ - \ author = {Franinovic, Karmen and Visell, Yon},\n booktitle = {Proceedings of\ + ID: Schroeder2011 + address: 'Oslo, Norway' + author: 'Schroeder, Benjamin and Ainger, Marc and Parent, Richard' + bibtex: "@inproceedings{Schroeder2011,\n address = {Oslo, Norway},\n author = {Schroeder,\ + \ Benjamin and Ainger, Marc and Parent, Richard},\n booktitle = {Proceedings of\ \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1177093},\n issn = {2220-4806},\n keywords = {architecture,interaction,music,nime07,sound\ - \ in-,urban design},\n pages = {191--196},\n title = {New Musical Interfaces in\ - \ Context : Sonic Interaction Design in the Urban Setting},\n url = {http://www.nime.org/proceedings/2007/nime2007_191.pdf},\n\ - \ year = {2007}\n}\n" + \ = {10.5281/zenodo.1178157},\n issn = {2220-4806},\n keywords = {a human performer,agents,agents\ + \ smoothly changing the,behavioral animation,figure 1,length of,physically based\ + \ sound,pro-,strings being played by},\n pages = {120--123},\n title = {A Physically\ + \ Based Sound Space for Procedural Agents},\n url = {http://www.nime.org/proceedings/2011/nime2011_120.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177093 + doi: 10.5281/zenodo.1178157 issn: 2220-4806 - keywords: 'architecture,interaction,music,nime07,sound in-,urban design' - pages: 191--196 - title: 'New Musical Interfaces in Context : Sonic Interaction Design in the Urban - Setting' - url: http://www.nime.org/proceedings/2007/nime2007_191.pdf - year: 2007 + keywords: 'a human performer,agents,agents smoothly changing the,behavioral animation,figure + 1,length of,physically based sound,pro-,strings being played by' + pages: 120--123 + title: A Physically Based Sound Space for Procedural Agents + url: http://www.nime.org/proceedings/2011/nime2011_120.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Gimenes2007 - abstract: 'In this paper we introduce a System conceived to serve as the "musical - brain" of autonomous musical robots or agent-based software simulations of robotic - systems. Our research goal is to provide robots with the ability to integrate - with the musical culture of their surroundings. In a multi-agent configuration, - the System can simulate an environment in which autonomous agents interact with - each other as well as with external agents (e.g., robots, human beings or other - systems). The main outcome of these interactions is the transformation and development - of their musical styles as well as the musical style of the environment in which - they live. ' - address: 'New York City, NY, United States' - author: 'Gimenes, Marcelo and Miranda, Eduardo and Johnson, Chris' - bibtex: "@inproceedings{Gimenes2007,\n abstract = {In this paper we introduce a\ - \ System conceived to serve as the \"musical brain\" of autonomous musical robots\ - \ or agent-based software simulations of robotic systems. Our research goal is\ - \ to provide robots with the ability to integrate with the musical culture of\ - \ their surroundings. In a multi-agent configuration, the System can simulate\ - \ an environment in which autonomous agents interact with each other as well as\ - \ with external agents (e.g., robots, human beings or other systems). The main\ - \ outcome of these interactions is the transformation and development of their\ - \ musical styles as well as the musical style of the environment in which they\ - \ live. },\n address = {New York City, NY, United States},\n author = {Gimenes,\ - \ Marcelo and Miranda, Eduardo and Johnson, Chris},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177099},\n issn = {2220-4806},\n keywords = {artificial\ - \ life,musical style,musicianship,nime07},\n pages = {197--202},\n title = {Musicianship\ - \ for Robots with Style},\n url = {http://www.nime.org/proceedings/2007/nime2007_197.pdf},\n\ - \ year = {2007}\n}\n" + ID: Garcia2011 + abstract: 'This paper presents a study of blowing pressure profilesacquired from + recorder playing. Blowing pressure signalsare captured from real performance by + means of a a lowintrusiveness acquisition system constructed around commercial + pressure sensors based on piezoelectric transducers.An alto recorder was mechanically + modified by a luthierto allow the measurement and connection of sensors whilerespecting + playability and intrusiveness. A multi-modaldatabase including aligned blowing + pressure and sound signals is constructed from real practice, covering the performance + space by considering different fundamental frequencies, dynamics, articulations + and note durations. Once signals were pre-processed and segmented, a set of temporalenvelope + features were defined as a basis for studying andconstructing a simplified model + of blowing pressure profilesin different performance contexts.' + address: 'Oslo, Norway' + author: 'García, Francisco and Vinceslas, Leny and Tubau, Josep and Maestre, Esteban' + bibtex: "@inproceedings{Garcia2011,\n abstract = {This paper presents a study of\ + \ blowing pressure profilesacquired from recorder playing. Blowing pressure signalsare\ + \ captured from real performance by means of a a lowintrusiveness acquisition\ + \ system constructed around commercial pressure sensors based on piezoelectric\ + \ transducers.An alto recorder was mechanically modified by a luthierto allow\ + \ the measurement and connection of sensors whilerespecting playability and intrusiveness.\ + \ A multi-modaldatabase including aligned blowing pressure and sound signals is\ + \ constructed from real practice, covering the performance space by considering\ + \ different fundamental frequencies, dynamics, articulations and note durations.\ + \ Once signals were pre-processed and segmented, a set of temporalenvelope features\ + \ were defined as a basis for studying andconstructing a simplified model of blowing\ + \ pressure profilesin different performance contexts.},\n address = {Oslo, Norway},\n\ + \ author = {Garc\\'{\\i}a, Francisco and Vinceslas, Leny and Tubau, Josep and\ + \ Maestre, Esteban},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178025},\n\ + \ issn = {2220-4806},\n keywords = {blowing,instrumental gesture,multi-modal data,pressure,recorder,wind\ + \ instrument},\n pages = {124--127},\n title = {Acquisition and Study of Blowing\ + \ Pressure Profiles in Recorder Playing},\n url = {http://www.nime.org/proceedings/2011/nime2011_124.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177099 + doi: 10.5281/zenodo.1178025 issn: 2220-4806 - keywords: 'artificial life,musical style,musicianship,nime07' - pages: 197--202 - title: Musicianship for Robots with Style - url: http://www.nime.org/proceedings/2007/nime2007_197.pdf - year: 2007 + keywords: 'blowing,instrumental gesture,multi-modal data,pressure,recorder,wind + instrument' + pages: 124--127 + title: Acquisition and Study of Blowing Pressure Profiles in Recorder Playing + url: http://www.nime.org/proceedings/2011/nime2011_124.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Topper2007 - abstract: ' WISEAR (Wireless Sensor Array)8, provides a robust andscalable platform - for virtually limitless types of data input tosoftware synthesis engines. It is - essentially a Linux based SBC(Single Board Computer) with 802.11a/b/g wireless - capability.The device, with batteries, only weighs a few pounds and can beworn - by a dancer or other live performer. Past work has focusedon connecting "conventional" - sensors (eg., bend sensors,accelerometers, FSRs, etc...) to the board and using - it as a datarelay, sending the data as real time control messages to synthesisengines - like Max/MSP and RTcmix1. Current research hasextended the abilities of the device - to take real-time audio andvideo data from USB cameras and audio devices, as well - asrunning synthesis engines on board the device itself. Given itsgeneric network - ability (eg., being an 802.11a/b/g device) there istheoretically no limit to the - number of WISEAR boxes that canbe used simultaneously in a performance, facilitating - multiperformer compositions. This paper will present the basic design philosophy - behindWISEAR, explain some of the basic concepts and methods, aswell as provide - a live demonstration of the running device, wornby the author.' - address: 'New York City, NY, United States' - author: 'Topper, David' - bibtex: "@inproceedings{Topper2007,\n abstract = { WISEAR (Wireless Sensor Array)8,\ - \ provides a robust andscalable platform for virtually limitless types of data\ - \ input tosoftware synthesis engines. It is essentially a Linux based SBC(Single\ - \ Board Computer) with 802.11a/b/g wireless capability.The device, with batteries,\ - \ only weighs a few pounds and can beworn by a dancer or other live performer.\ - \ Past work has focusedon connecting \"conventional\" sensors (eg., bend sensors,accelerometers,\ - \ FSRs, etc...) to the board and using it as a datarelay, sending the data as\ - \ real time control messages to synthesisengines like Max/MSP and RTcmix1. Current\ - \ research hasextended the abilities of the device to take real-time audio andvideo\ - \ data from USB cameras and audio devices, as well asrunning synthesis engines\ - \ on board the device itself. Given itsgeneric network ability (eg., being an\ - \ 802.11a/b/g device) there istheoretically no limit to the number of WISEAR boxes\ - \ that canbe used simultaneously in a performance, facilitating multiperformer\ - \ compositions. This paper will present the basic design philosophy behindWISEAR,\ - \ explain some of the basic concepts and methods, aswell as provide a live demonstration\ - \ of the running device, wornby the author.},\n address = {New York City, NY,\ - \ United States},\n author = {Topper, David},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1177261},\n issn = {2220-4806},\n keywords = {Wireless, sensors,\ - \ embedded devices, linux, real-time audio, real- time video },\n pages = {203--204},\n\ - \ title = {Extended Applications of the Wireless Sensor Array (WISEAR)},\n url\ - \ = {http://www.nime.org/proceedings/2007/nime2007_203.pdf},\n year = {2007}\n\ - }\n" + ID: Friberg2011 + abstract: 'This is an overview of the three installations Hoppsa Universum, CLOSE + and Flying Carpet. They were all designed as choreographed sound and music installations + controlled by the visitors movements. The perspective is from an artistic goal/vision + intention in combination with the technical challenges and possibilities. All + three installations were realized with video cameras in the ceiling registering + the users'' position or movement. The video analysis was then controlling different + types of interactive software audio players. Different aspects like narrativity, + user control, and technical limitations are discussed. ' + address: 'Oslo, Norway' + author: 'Friberg, Anders and Källblad, Anna' + bibtex: "@inproceedings{Friberg2011,\n abstract = {This is an overview of the three\ + \ installations Hoppsa Universum, CLOSE and Flying Carpet. They were all designed\ + \ as choreographed sound and music installations controlled by the visitors movements.\ + \ The perspective is from an artistic goal/vision intention in combination with\ + \ the technical challenges and possibilities. All three installations were realized\ + \ with video cameras in the ceiling registering the users' position or movement.\ + \ The video analysis was then controlling different types of interactive software\ + \ audio players. Different aspects like narrativity, user control, and technical\ + \ limitations are discussed. },\n address = {Oslo, Norway},\n author = {Friberg,\ + \ Anders and K\\''{a}llblad, Anna},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178017},\n\ + \ issn = {2220-4806},\n keywords = {Gestures, dance, choreography, music installation,\ + \ interactive music. },\n pages = {128--131},\n title = {Experiences from Video-Controlled\ + \ Sound Installations},\n url = {http://www.nime.org/proceedings/2011/nime2011_128.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177261 + doi: 10.5281/zenodo.1178017 issn: 2220-4806 - keywords: 'Wireless, sensors, embedded devices, linux, real-time audio, real- time - video ' - pages: 203--204 - title: Extended Applications of the Wireless Sensor Array (WISEAR) - url: http://www.nime.org/proceedings/2007/nime2007_203.pdf - year: 2007 + keywords: 'Gestures, dance, choreography, music installation, interactive music. ' + pages: 128--131 + title: Experiences from Video-Controlled Sound Installations + url: http://www.nime.org/proceedings/2011/nime2011_128.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Fernstrom2007 - abstract: 'In this paper, we describe a new wearable wireless sensor system for - solo or group dance performances. The system consists of a number of 25mm Wireless - Inertial Measurement Unit (WIMU) nodes designed at the Tyndall National Institute. - Each sensor node has two dual-axis accelerometers, three single axis gyroscopes - and two dual axis magnetometers, providing 6 Degrees of Freedom (DOF) movement - tracking. All sensors transmit data wirelessly to a basestation at a frequency - band and power that does not require licensing. The interface process has been - developed at the Interaction Design Center of the University of Limerick (Ireland). - The data are acquired and manipulated in well-know real-time software like pd - and Max/MSP. This paper presents the new system, describes the interface design - and outlines the main achievements of this collaborative research, which has been - named ‘Celeritas’.' - address: 'New York City, NY, United States' - author: 'Torre, Giuseppe and Fernström, Mikael and O''Flynn, Brendan and Angove, - Philip' - bibtex: "@inproceedings{Fernstrom2007,\n abstract = {In this paper, we describe\ - \ a new wearable wireless sensor system for solo or group dance performances.\ - \ The system consists of a number of 25mm Wireless Inertial Measurement Unit (WIMU)\ - \ nodes designed at the Tyndall National Institute. Each sensor node has two dual-axis\ - \ accelerometers, three single axis gyroscopes and two dual axis magnetometers,\ - \ providing 6 Degrees of Freedom (DOF) movement tracking. All sensors transmit\ - \ data wirelessly to a basestation at a frequency band and power that does not\ - \ require licensing. The interface process has been developed at the Interaction\ - \ Design Center of the University of Limerick (Ireland). The data are acquired\ - \ and manipulated in well-know real-time software like pd and Max/MSP. This paper\ - \ presents the new system, describes the interface design and outlines the main\ - \ achievements of this collaborative research, which has been named ‘Celeritas’.},\n\ - \ address = {New York City, NY, United States},\n author = {Torre, Giuseppe and\ - \ Fernstr\\''{o}m, Mikael and O'Flynn, Brendan and Angove, Philip},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1179463},\n issn = {2220-4806},\n keywords\ - \ = {Inertial Measurement Unit, IMU, Position Tracking, Interactive Dance Performance,\ - \ Graphical Object, Mapping. },\n pages = {205--208},\n title = {Celeritas : Wearable\ - \ Wireless System},\n url = {http://www.nime.org/proceedings/2007/nime2007_205.pdf},\n\ - \ year = {2007}\n}\n" + ID: dAlessandro2011 + address: 'Oslo, Norway' + author: 'd''Alessandro, Nicolas and Calderon, Roberto and M\''''{u}ller, Stefanie' + bibtex: "@inproceedings{dAlessandro2011,\n address = {Oslo, Norway},\n author =\ + \ {d'Alessandro, Nicolas and Calderon, Roberto and M\\''{u}ller, Stefanie},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1177933},\n issn = {2220-4806},\n\ + \ keywords = {agent,architecture,collaboration,figure 1,installation,instrument,interactive\ + \ fabric,light,mo-,movements in the installation,space and,tion,voice synthesis},\n\ + \ pages = {132--135},\n title = {ROOM #81---Agent-Based Instrument for Experiencing\ + \ Architectural and Vocal Cues},\n url = {http://www.nime.org/proceedings/2011/nime2011_132.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179463 + doi: 10.5281/zenodo.1177933 issn: 2220-4806 - keywords: 'Inertial Measurement Unit, IMU, Position Tracking, Interactive Dance - Performance, Graphical Object, Mapping. ' - pages: 205--208 - title: 'Celeritas : Wearable Wireless System' - url: http://www.nime.org/proceedings/2007/nime2007_205.pdf - year: 2007 + keywords: 'agent,architecture,collaboration,figure 1,installation,instrument,interactive + fabric,light,mo-,movements in the installation,space and,tion,voice synthesis' + pages: 132--135 + title: "ROOM #81---Agent-Based Instrument for Experiencing Architectural and Vocal\ + \ Cues" + url: http://www.nime.org/proceedings/2011/nime2011_132.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Sinclair2007 - abstract: 'This paper presents an approach to audio-haptic integration that utilizes - Open Sound Control, an increasingly wellsupported standard for audio communication, - to initializeand communicate with dynamic virtual environments thatwork with off-the-shelf - force-feedback devices.' - address: 'New York City, NY, United States' - author: 'Sinclair, Stephen and Wanderley, Marcelo M.' - bibtex: "@inproceedings{Sinclair2007,\n abstract = {This paper presents an approach\ - \ to audio-haptic integration that utilizes Open Sound Control, an increasingly\ - \ wellsupported standard for audio communication, to initializeand communicate\ - \ with dynamic virtual environments thatwork with off-the-shelf force-feedback\ - \ devices.},\n address = {New York City, NY, United States},\n author = {Sinclair,\ - \ Stephen and Wanderley, Marcelo M.},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177245},\n\ - \ issn = {2220-4806},\n keywords = {Haptics, control, multi-modal, audio, force-feedback\ - \ },\n pages = {209--212},\n title = {Defining a Control Standard for Easily Integrating\ - \ Haptic Virtual Environments with Existing Audio / Visual Systems},\n url = {http://www.nime.org/proceedings/2007/nime2007_209.pdf},\n\ - \ year = {2007}\n}\n" + ID: Kuhara2011 + abstract: 'We developed a kinetic particles synthesizer for mobile devices having + a multi-touch screen such as a tablet PC and a smart phone. This synthesizer generates + music based on the kinetics of particles under a two-dimensional physics engine. + The particles move in the screen to synthesize sounds according to their own physical + properties, which are shape, size, mass, linear and angular velocity, friction, + restitution, etc. If a particle collides with others, a percussive sound is generated. + A player can play music by the simple operation of touching or dragging on the + screen of the device. Using a three-axis acceleration sensor, a player can perform + music by shuffling or tilting the device. Each particle sounds just a simple tone. + However, a large amount of various particles play attractive music by aggregating + their sounds. This concept has been inspired by natural sounds made from an assembly + of simple components, for example, rustling leaves or falling rain. For a novice + who has no experience of playing a musical instrument, it is easy to learn how + to play instantly and enjoy performing music with intuitive operation. Our system + is used for musical instruments for interactive music entertainment. ' + address: 'Oslo, Norway' + author: 'Kuhara, Yasuo and Kobayashi, Daiki' + bibtex: "@inproceedings{Kuhara2011,\n abstract = {We developed a kinetic particles\ + \ synthesizer for mobile devices having a multi-touch screen such as a tablet\ + \ PC and a smart phone. This synthesizer generates music based on the kinetics\ + \ of particles under a two-dimensional physics engine. The particles move in the\ + \ screen to synthesize sounds according to their own physical properties, which\ + \ are shape, size, mass, linear and angular velocity, friction, restitution, etc.\ + \ If a particle collides with others, a percussive sound is generated. A player\ + \ can play music by the simple operation of touching or dragging on the screen\ + \ of the device. Using a three-axis acceleration sensor, a player can perform\ + \ music by shuffling or tilting the device. Each particle sounds just a simple\ + \ tone. However, a large amount of various particles play attractive music by\ + \ aggregating their sounds. This concept has been inspired by natural sounds made\ + \ from an assembly of simple components, for example, rustling leaves or falling\ + \ rain. For a novice who has no experience of playing a musical instrument, it\ + \ is easy to learn how to play instantly and enjoy performing music with intuitive\ + \ operation. Our system is used for musical instruments for interactive music\ + \ entertainment. },\n address = {Oslo, Norway},\n author = {Kuhara, Yasuo and\ + \ Kobayashi, Daiki},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178079},\n\ + \ issn = {2220-4806},\n keywords = {Particle, Tablet PC, iPhone, iPod touch, iPad,\ + \ Smart phone, Kinetics, Touch screen, Physics engine. },\n pages = {136--137},\n\ + \ title = {Kinetic Particles Synthesizer Using Multi-Touch Screen Interface of\ + \ Mobile Devices},\n url = {http://www.nime.org/proceedings/2011/nime2011_136.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177245 + doi: 10.5281/zenodo.1178079 issn: 2220-4806 - keywords: 'Haptics, control, multi-modal, audio, force-feedback ' - pages: 209--212 - title: Defining a Control Standard for Easily Integrating Haptic Virtual Environments - with Existing Audio / Visual Systems - url: http://www.nime.org/proceedings/2007/nime2007_209.pdf - year: 2007 + keywords: 'Particle, Tablet PC, iPhone, iPod touch, iPad, Smart phone, Kinetics, + Touch screen, Physics engine. ' + pages: 136--137 + title: Kinetic Particles Synthesizer Using Multi-Touch Screen Interface of Mobile + Devices + url: http://www.nime.org/proceedings/2011/nime2011_136.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Donaldson2007 - abstract: 'Chroma based representations of acoustic phenomenon are representations - of sound as pitched acoustic energy. A framewise chroma distribution over an entire - musical piece is a useful and straightforward representation of its musical pitch - over time. This paper examines a method of condensing the block-wise chroma information - of a musical piece into a two dimensional embedding. Such an embedding is a representation - or map of the different pitched energies in a song, and how these energies relate - to each other in the context of the song. The paper presents an interactive version - of this representation as an exploratory analytical tool or instrument for granular - synthesis. Pointing and clicking on the interactive map recreates the acoustical - energy present in the chroma blocks at that location, providing an effective way - of both exploring the relationships between sounds in the original piece, and - recreating a synthesized approximation of these sounds in an instrumental fashion. ' - address: 'New York City, NY, United States' - author: 'Donaldson, Justin and Knopke, Ian and Raphael, Chris' - bibtex: "@inproceedings{Donaldson2007,\n abstract = {Chroma based representations\ - \ of acoustic phenomenon are representations of sound as pitched acoustic energy.\ - \ A framewise chroma distribution over an entire musical piece is a useful and\ - \ straightforward representation of its musical pitch over time. This paper examines\ - \ a method of condensing the block-wise chroma information of a musical piece\ - \ into a two dimensional embedding. Such an embedding is a representation or map\ - \ of the different pitched energies in a song, and how these energies relate to\ - \ each other in the context of the song. The paper presents an interactive version\ - \ of this representation as an exploratory analytical tool or instrument for granular\ - \ synthesis. Pointing and clicking on the interactive map recreates the acoustical\ - \ energy present in the chroma blocks at that location, providing an effective\ - \ way of both exploring the relationships between sounds in the original piece,\ - \ and recreating a synthesized approximation of these sounds in an instrumental\ - \ fashion. },\n address = {New York City, NY, United States},\n author = {Donaldson,\ - \ Justin and Knopke, Ian and Raphael, Chris},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1177085},\n issn = {2220-4806},\n keywords = {Chroma, granular\ - \ synthesis, dimensionality reduction },\n pages = {213--219},\n title = {Chroma\ - \ Palette : Chromatic Maps of Sound As Granular Synthesis Interface},\n url =\ - \ {http://www.nime.org/proceedings/2007/nime2007_213.pdf},\n year = {2007}\n}\n" + ID: Carlson2011 + address: 'Oslo, Norway' + author: 'Carlson, Chris and Marschner, Eli and Mccurry, Hunter' + bibtex: "@inproceedings{Carlson2011,\n address = {Oslo, Norway},\n author = {Carlson,\ + \ Chris and Marschner, Eli and Mccurry, Hunter},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1177981},\n issn = {2220-4806},\n keywords = {arduino,beagleboard,ccrma,force\ + \ feedback,haptics,jack,linux audio,multi-channel audio,nime,pd,pure data,satellite\ + \ ccrma,sound spatialization},\n pages = {138--139},\n title = {The Sound Flinger\ + \ : A Haptic Spatializer},\n url = {http://www.nime.org/proceedings/2011/nime2011_138.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177085 + doi: 10.5281/zenodo.1177981 issn: 2220-4806 - keywords: 'Chroma, granular synthesis, dimensionality reduction ' - pages: 213--219 - title: 'Chroma Palette : Chromatic Maps of Sound As Granular Synthesis Interface' - url: http://www.nime.org/proceedings/2007/nime2007_213.pdf - year: 2007 + keywords: 'arduino,beagleboard,ccrma,force feedback,haptics,jack,linux audio,multi-channel + audio,nime,pd,pure data,satellite ccrma,sound spatialization' + pages: 138--139 + title: 'The Sound Flinger : A Haptic Spatializer' + url: http://www.nime.org/proceedings/2011/nime2011_138.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Collins2007 - address: 'New York City, NY, United States' - author: 'Collins, Nick' - bibtex: "@inproceedings{Collins2007,\n address = {New York City, NY, United States},\n\ - \ author = {Collins, Nick},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177075},\n\ - \ issn = {2220-4806},\n keywords = {accompaniment,concatenative sound syn-,feature\ - \ matching,inner parts,interactive mu-,melodic similarity,nime07,thesis},\n pages\ - \ = {220--223},\n title = {Matching Parts : Inner Voice Led Control for Symbolic\ - \ and Audio Accompaniment},\n url = {http://www.nime.org/proceedings/2007/nime2007_220.pdf},\n\ - \ year = {2007}\n}\n" + ID: Kondapalli2011 + abstract: 'Daft Datum is an autonomous new media artefact that takes input from + movement of the feet (i.e. tapping/stomping/stamping) on a wooden surface, underneath + which is a sensor sheet. The sensors in the sheet are mapped to various sound + samples and synthesized sounds. Attributes of the synthesized sound, such as pitch + and octave, can be controlled using the Nintendo Wii Remote. It also facilitates + switching between modes of sound and recording/playing back a segment of audio. + The result is music generated by dancing on the device that is further modulated + by a hand-held controller. ' + address: 'Oslo, Norway' + author: 'Kondapalli, Ravi and Sung, Ben-Zhen' + bibtex: "@inproceedings{Kondapalli2011,\n abstract = {Daft Datum is an autonomous\ + \ new media artefact that takes input from movement of the feet (i.e. tapping/stomping/stamping)\ + \ on a wooden surface, underneath which is a sensor sheet. The sensors in the\ + \ sheet are mapped to various sound samples and synthesized sounds. Attributes\ + \ of the synthesized sound, such as pitch and octave, can be controlled using\ + \ the Nintendo Wii Remote. It also facilitates switching between modes of sound\ + \ and recording/playing back a segment of audio. The result is music generated\ + \ by dancing on the device that is further modulated by a hand-held controller.\ + \ },\n address = {Oslo, Norway},\n author = {Kondapalli, Ravi and Sung, Ben-Zhen},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178075},\n issn = {2220-4806},\n\ + \ keywords = {Daft Datum, Wii, Dance Pad, Feet, Controller, Bluetooth, Musical\ + \ Interface, Dance, Sensor Sheet },\n pages = {140--141},\n title = {Daft Datum\ + \ -- An Interface for Producing Music Through Foot-based Interaction},\n url =\ + \ {http://www.nime.org/proceedings/2011/nime2011_140.pdf},\n year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177075 + doi: 10.5281/zenodo.1178075 issn: 2220-4806 - keywords: 'accompaniment,concatenative sound syn-,feature matching,inner parts,interactive - mu-,melodic similarity,nime07,thesis' - pages: 220--223 - title: 'Matching Parts : Inner Voice Led Control for Symbolic and Audio Accompaniment' - url: http://www.nime.org/proceedings/2007/nime2007_220.pdf - year: 2007 - - -- ENTRYTYPE: inproceedings - ID: Cartwright2007 - abstract: "This report presents the design and construct ion of Rage in Conjunction\ - \ with the Machine, a simple but novel pairing of musical interface and sound\ - \ sculpture. The ,\n,\nauthors discuss the design and creation of this instrument\ - \ , focusing on the unique aspects of it, including the use of physical systems,\ - \ large gestural input, scale, and the electronic coupling of a physical input\ - \ to a physical output." - address: 'New York City, NY, United States' - author: 'Cartwright, Mark and Jones, Matt and Terasawa, Hiroko' - bibtex: "@inproceedings{Cartwright2007,\n abstract = {This report presents the design\ - \ and construct ion of Rage in Conjunction with the Machine, a simple but novel\ - \ pairing of musical interface and sound sculpture. The ,\n,\nauthors discuss\ - \ the design and creation of this instrument , focusing on the unique aspects\ - \ of it, including the use of physical systems, large gestural input, scale, and\ - \ the electronic coupling of a physical input to a physical output.},\n address\ - \ = {New York City, NY, United States},\n author = {Cartwright, Mark and Jones,\ - \ Matt and Terasawa, Hiroko},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177063},\n\ - \ issn = {2220-4806},\n keywords = {audience participation,inflatable,instrume\ - \ nt design,instrume nt size,mapping,musical,new musical instrument,nime07,physical\ - \ systems,sound scultpure},\n pages = {224--227},\n title = {Rage in Conjunction\ - \ with the Machine},\n url = {http://www.nime.org/proceedings/2007/nime2007_224.pdf},\n\ - \ year = {2007}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1177063 - issn: 2220-4806 - keywords: 'audience participation,inflatable,instrume nt design,instrume nt size,mapping,musical,new - musical instrument,nime07,physical systems,sound scultpure' - pages: 224--227 - title: Rage in Conjunction with the Machine - url: http://www.nime.org/proceedings/2007/nime2007_224.pdf - year: 2007 + keywords: 'Daft Datum, Wii, Dance Pad, Feet, Controller, Bluetooth, Musical Interface, + Dance, Sensor Sheet ' + pages: 140--141 + title: Daft Datum -- An Interface for Producing Music Through Foot-based Interaction + url: http://www.nime.org/proceedings/2011/nime2011_140.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Weinberg2007 - abstract: 'The paper presents the theoretical background and the design scheme for - a perceptual and improvisational robotic marimba player that interacts with human - musicians in a visual and acoustic manner. Informed by an evaluation of a previously - developed robotic percussionist, we present the extension of our work to melodic - and harmonic realms with the design of a robotic player that listens to, analyzes - and improvises pitch-based musical materials. After a presentation of the motivation - for the project, theoretical background and related work, we present a set of - research questions followed by a description of hardware and software approaches - that address these questions. The paper concludes with a description of our plans - to implement and embed these approaches in a robotic marimba player that will - be used in workshops and concerts.' - address: 'New York City, NY, United States' - author: 'Weinberg, Gil and Driscoll, Scott' - bibtex: "@inproceedings{Weinberg2007,\n abstract = {The paper presents the theoretical\ - \ background and the design scheme for a perceptual and improvisational robotic\ - \ marimba player that interacts with human musicians in a visual and acoustic\ - \ manner. Informed by an evaluation of a previously developed robotic percussionist,\ - \ we present the extension of our work to melodic and harmonic realms with the\ - \ design of a robotic player that listens to, analyzes and improvises pitch-based\ - \ musical materials. After a presentation of the motivation for the project, theoretical\ - \ background and related work, we present a set of research questions followed\ - \ by a description of hardware and software approaches that address these questions.\ - \ The paper concludes with a description of our plans to implement and embed these\ - \ approaches in a robotic marimba player that will be used in workshops and concerts.},\n\ - \ address = {New York City, NY, United States},\n author = {Weinberg, Gil and\ - \ Driscoll, Scott},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179477},\n\ - \ issn = {2220-4806},\n keywords = {human-machine interaction,improvisation,nime07,perceptual\ - \ modeling,robotic musicianship},\n pages = {228--233},\n title = {The Design\ - \ of a Robotic Marimba Player -- Introducing Pitch into Robotic Musicianship},\n\ - \ url = {http://www.nime.org/proceedings/2007/nime2007_228.pdf},\n year = {2007}\n\ + ID: Martin2011 + abstract: 'This paper describes Strike on Stage, an interface and corresponding audio-visual performance work developed and + performed in 2010 by percussionists and media artists Chi-Hsia Lai and Charles Martin. The concept of + Strike on Stage is to integrate computer visuals and sound into animprovised + percussion performance. A large projection surface is positioned directly behind the performers, while acomputer + vision system tracks their movements. The setup allows computer visualisation + and sonification to be directly responsive and unified with the performers'' gestures.' + address: 'Oslo, Norway' + author: 'Martin, Charles and Lai, Chi-Hsia' + bibtex: "@inproceedings{Martin2011,\n abstract = {This paper describes Strike on\ + \ Stage, an interface and corresponding audio-visual performance work developed\ + \ and performed in 2010 by percussionists and media artists Chi-Hsia Lai and\ + \ Charles Martin. The concept of Strike on Stage is to integrate computer\ + \ visuals and sound into animprovised percussion performance. A large projection\ + \ surface is positioned directly behind the performers, while acomputer\ + \ vision system tracks their movements. The setup allows computer visualisation\ + \ and sonification to be directly responsive and unified with the performers'\ + \ gestures.},\n address = {Oslo, Norway},\n author = {Martin, Charles and Lai,\ + \ Chi-Hsia},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178103},\n issn\ + \ = {2220-4806},\n keywords = {computer vision, media performance, percussion},\n\ + \ pages = {142--143},\n title = {Strike on Stage: a Percussion and Media Performance},\n\ + \ url = {http://www.nime.org/proceedings/2011/nime2011_142.pdf},\n year = {2011}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179477 + doi: 10.5281/zenodo.1178103 issn: 2220-4806 - keywords: 'human-machine interaction,improvisation,nime07,perceptual modeling,robotic - musicianship' - pages: 228--233 - title: The Design of a Robotic Marimba Player -- Introducing Pitch into Robotic - Musicianship - url: http://www.nime.org/proceedings/2007/nime2007_228.pdf - year: 2007 + keywords: 'computer vision, media performance, percussion' + pages: 142--143 + title: 'Strike on Stage: a Percussion and Media Performance' + url: http://www.nime.org/proceedings/2011/nime2011_142.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Robertson2007 - abstract: 'This paper describes the development of B-Keeper, a reatime beat tracking - system implemented in Java and Max/MSP,which is capable of maintaining synchronisation - between anelectronic sequencer and a drummer. This enables musicians to interact - with electronic parts which are triggeredautomatically by the computer from performance - information. We describe an implementation which functions withthe sequencer Ableton - Live.' - address: 'New York City, NY, United States' - author: 'Robertson, Andrew and Plumbley, Mark D.' - bibtex: "@inproceedings{Robertson2007,\n abstract = {This paper describes the development\ - \ of B-Keeper, a reatime beat tracking system implemented in Java and Max/MSP,which\ - \ is capable of maintaining synchronisation between anelectronic sequencer and\ - \ a drummer. This enables musicians to interact with electronic parts which are\ - \ triggeredautomatically by the computer from performance information. We describe\ - \ an implementation which functions withthe sequencer Ableton Live.},\n address\ - \ = {New York City, NY, United States},\n author = {Robertson, Andrew and Plumbley,\ - \ Mark D.},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177231},\n issn\ - \ = {2220-4806},\n keywords = {Human-Computer Interaction, Automatic Accompaniment,\ - \ Performance },\n pages = {234--237},\n title = {B-Keeper : A Beat-Tracker for\ - \ Live Performance},\n url = {http://www.nime.org/proceedings/2007/nime2007_234.pdf},\n\ - \ year = {2007}\n}\n" + ID: Caramiaux2011a + abstract: 'In this paper we present an experimental study concerninggestural embodiment + of environmental sounds in a listeningcontext. The presented work is part of a + project aiming atmodeling movement-sound relationships, with the end goalof proposing + novel approaches for designing musical instruments and sounding objects. The experiment + is based onsound stimuli corresponding to "causal" and "non-causal" sounds. It + is divided into a performance phase and an interview. The experiment is designed + to investigate possiblecorrelation between the perception of the "causality" of + environmental sounds and different gesture strategies for thesound embodiment. + In analogy with the perception of thesounds'' causality, we propose to distinguish + gestures that "mimic" a sound''s cause and gestures that "trace" a sound''smorphology + following temporal sound characteristics. Results from the interviews show that, + first, our causal soundsdatabase lead to consistent descriptions of the action + at theorigin of the sound and participants mimic this action. Second, non-causal + sounds lead to inconsistent metaphoric descriptions of the sound and participants + make gestures following sound "contours". Quantitatively, the results showthat + gesture variability is higher for causal sounds that noncausal sounds.' + address: 'Oslo, Norway' + author: 'Caramiaux, Baptiste and Susini, Patrick and Bianco, Tommaso and Bevilacqua, + Frédéric and Houix, Olivier and Schnell, Norbert and Misdariis, Nicolas' + bibtex: "@inproceedings{Caramiaux2011a,\n abstract = {In this paper we present an\ + \ experimental study concerninggestural embodiment of environmental sounds in\ + \ a listeningcontext. The presented work is part of a project aiming atmodeling\ + \ movement-sound relationships, with the end goalof proposing novel approaches\ + \ for designing musical instruments and sounding objects. The experiment is based\ + \ onsound stimuli corresponding to \"causal\" and \"non-causal\" sounds. It is\ + \ divided into a performance phase and an interview. The experiment is designed\ + \ to investigate possiblecorrelation between the perception of the \"causality\"\ + \ of environmental sounds and different gesture strategies for thesound embodiment.\ + \ In analogy with the perception of thesounds' causality, we propose to distinguish\ + \ gestures that \"mimic\" a sound's cause and gestures that \"trace\" a sound'smorphology\ + \ following temporal sound characteristics. Results from the interviews show that,\ + \ first, our causal soundsdatabase lead to consistent descriptions of the action\ + \ at theorigin of the sound and participants mimic this action. Second, non-causal\ + \ sounds lead to inconsistent metaphoric descriptions of the sound and participants\ + \ make gestures following sound \"contours\". Quantitatively, the results showthat\ + \ gesture variability is higher for causal sounds that noncausal sounds.},\n address\ + \ = {Oslo, Norway},\n author = {Caramiaux, Baptiste and Susini, Patrick and Bianco,\ + \ Tommaso and Bevilacqua, Fr\\'{e}d\\'{e}ric and Houix, Olivier and Schnell, Norbert\ + \ and Misdariis, Nicolas},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177979},\n\ + \ issn = {2220-4806},\n keywords = {Embodiment, Environmental Sound Perception,\ + \ Listening, Gesture Sound Interaction },\n pages = {144--148},\n presentation-video\ + \ = {https://vimeo.com/26805553/},\n title = {Gestural Embodiment of Environmental\ + \ Sounds: an Experimental Study},\n url = {http://www.nime.org/proceedings/2011/nime2011_144.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177231 + doi: 10.5281/zenodo.1177979 issn: 2220-4806 - keywords: 'Human-Computer Interaction, Automatic Accompaniment, Performance ' - pages: 234--237 - title: 'B-Keeper : A Beat-Tracker for Live Performance' - url: http://www.nime.org/proceedings/2007/nime2007_234.pdf - year: 2007 + keywords: 'Embodiment, Environmental Sound Perception, Listening, Gesture Sound + Interaction ' + pages: 144--148 + presentation-video: https://vimeo.com/26805553/ + title: 'Gestural Embodiment of Environmental Sounds: an Experimental Study' + url: http://www.nime.org/proceedings/2011/nime2011_144.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Kapur2007 - abstract: 'This paper describes a system enabling a human to perform music with - a robot in real-time, in the context of North Indian classical music. We modify - a traditional acoustic sitar into a hyperinstrument in order to capture performance - gestures for musical analysis. A custom built four-armed robotic Indian drummer - was built using a microchip, solenoids, aluminum and folk frame drums. Algorithms - written towards "intelligent" machine musicianship are described. The final goal - of this research is to have a robotic drummer accompany a professional human sitar - player live in performance. ' - address: 'New York City, NY, United States' - author: 'Kapur, Ajay and Singer, Eric and Benning, Manjinder S. and Tzanetakis, - George and Trimpin, Trimpin' - bibtex: "@inproceedings{Kapur2007,\n abstract = {This paper describes a system enabling\ - \ a human to perform music with a robot in real-time, in the context of North\ - \ Indian classical music. We modify a traditional acoustic sitar into a hyperinstrument\ - \ in order to capture performance gestures for musical analysis. A custom built\ - \ four-armed robotic Indian drummer was built using a microchip, solenoids, aluminum\ - \ and folk frame drums. Algorithms written towards \"intelligent\" machine musicianship\ - \ are described. The final goal of this research is to have a robotic drummer\ - \ accompany a professional human sitar player live in performance. },\n address\ - \ = {New York City, NY, United States},\n author = {Kapur, Ajay and Singer, Eric\ - \ and Benning, Manjinder S. and Tzanetakis, George and Trimpin, Trimpin},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1177137},\n issn = {2220-4806},\n keywords\ - \ = {Musical Robotics, Electronic Sitar, Hyperinstruments, Music Information Retrieval\ - \ (MIR). },\n pages = {238--241},\n title = {Integrating HyperInstruments , Musical\ - \ Robots \\& Machine Musicianship for North {India}n Classical Music},\n url =\ - \ {http://www.nime.org/proceedings/2007/nime2007_238.pdf},\n year = {2007}\n}\n" + ID: Mealla2011 + abstract: 'The use of physiological signals in Human Computer Interaction (HCI) + is becoming popular and widespread, mostly due to sensors miniaturization and + advances in real-time processing. However, most of the studies that use physiology + based interaction focus on single-user paradigms, and its usage in collaborative + scenarios is still in its beginning. In this paper we explore how interactive + sonification of brain and heart signals, and its representation through physical + objects (physiopucks) in a tabletop interface may enhance motivational and controlling + aspects of music collaboration. A multimodal system is presented, based on an + electrophysiology sensor system and the Reactable, a musical tabletop interface. + Performance and motivation variables were assessed in an experiment involving + a test "Physio" group(N=22) and a control "Placebo" group (N=10). Pairs of participants + used two methods for sound creation: implicit interaction through physiological + signals, and explicit interaction by means of gestural manipulation. The results + showed that pairs in the Physio Group declared less difficulty, higher confidence + and more symmetric control than the Placebo Group, where no real-time sonification + was provided as subjects were using pre-recorded physiological signal being unaware + of it. These results support the feasibility of introducing physiology-based interaction + in multimodal interfaces for collaborative music generation.' + address: 'Oslo, Norway' + author: 'Mealla, Sebastián and Väaljamäae, Aleksander and Bosi, Mathieu and Jordà, + Sergi' + bibtex: "@inproceedings{Mealla2011,\n abstract = {The use of physiological signals\ + \ in Human Computer Interaction (HCI) is becoming popular and widespread, mostly\ + \ due to sensors miniaturization and advances in real-time processing. However,\ + \ most of the studies that use physiology based interaction focus on single-user\ + \ paradigms, and its usage in collaborative scenarios is still in its beginning.\ + \ In this paper we explore how interactive sonification of brain and heart signals,\ + \ and its representation through physical objects (physiopucks) in a tabletop\ + \ interface may enhance motivational and controlling aspects of music collaboration.\ + \ A multimodal system is presented, based on an electrophysiology sensor system\ + \ and the Reactable, a musical tabletop interface. Performance and motivation\ + \ variables were assessed in an experiment involving a test \"Physio\" group(N=22)\ + \ and a control \"Placebo\" group (N=10). Pairs of participants used two methods\ + \ for sound creation: implicit interaction through physiological signals, and\ + \ explicit interaction by means of gestural manipulation. The results showed that\ + \ pairs in the Physio Group declared less difficulty, higher confidence and more\ + \ symmetric control than the Placebo Group, where no real-time sonification was\ + \ provided as subjects were using pre-recorded physiological signal being unaware\ + \ of it. These results support the feasibility of introducing physiology-based\ + \ interaction in multimodal interfaces for collaborative music generation.},\n\ + \ address = {Oslo, Norway},\n author = {Mealla, Sebasti\\'{a}n and V\\''{a}aljam\\\ + ''{a}ae, Aleksander and Bosi, Mathieu and Jord\\`{a}, Sergi},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178107},\n issn = {2220-4806},\n keywords = {bci, collaboration,\ + \ cscw, hci, multimodal interfaces, music, physiological computing, physiopucks,\ + \ tabletops, universitat pompeu fabra},\n pages = {149--154},\n presentation-video\ + \ = {https://vimeo.com/26806576/},\n title = {Listening to Your Brain: Implicit\ + \ Interaction in Collaborative Music Performances},\n url = {http://www.nime.org/proceedings/2011/nime2011_149.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177137 + doi: 10.5281/zenodo.1178107 issn: 2220-4806 - keywords: 'Musical Robotics, Electronic Sitar, Hyperinstruments, Music Information - Retrieval (MIR). ' - pages: 238--241 - title: 'Integrating HyperInstruments , Musical Robots & Machine Musicianship for - North Indian Classical Music' - url: http://www.nime.org/proceedings/2007/nime2007_238.pdf - year: 2007 + keywords: 'bci, collaboration, cscw, hci, multimodal interfaces, music, physiological + computing, physiopucks, tabletops, universitat pompeu fabra' + pages: 149--154 + presentation-video: https://vimeo.com/26806576/ + title: 'Listening to Your Brain: Implicit Interaction in Collaborative Music Performances' + url: http://www.nime.org/proceedings/2011/nime2011_149.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Clay2007 - abstract: 'The starting point for this project is the want to produce a music controller - that could be employed in such a manner that even lay public could enjoy the possibilities - of mobile art. All of the works that are discussed here are in relation to a new - GPS-based controller, the Wrist-Conductor. The works are technically based around - the synchronizing possibilities using the GPS Time Mark and are aesthetically - rooted in works that function in an open public space such as a city or a forest. - One of the works intended for the controller, China Gates, is discussed here in - detail in order to describe how the GPS Wrist-Controller is actually used in a - public art context. The other works, CitySonics, The Enchanted Forest and Get - a Pot & a Spoon are described briefly in order to demonstrate that even a simple - controller can be used to create a body of works. This paper also addresses the - breaking of the media bubble via the concept of the “open audience”, or how mobile - art can engage pedestrians as viewers or listeners within public space and not - remain an isolated experience for performers only.' - address: 'New York City, NY, United States' - author: 'Clay, Arthur and Majoe, Dennis' - bibtex: "@inproceedings{Clay2007,\n abstract = {The starting point for this project\ - \ is the want to produce a music controller that could be employed in such a manner\ - \ that even lay public could enjoy the possibilities of mobile art. All of the\ - \ works that are discussed here are in relation to a new GPS-based controller,\ - \ the Wrist-Conductor. The works are technically based around the synchronizing\ - \ possibilities using the GPS Time Mark and are aesthetically rooted in works\ - \ that function in an open public space such as a city or a forest. One of the\ - \ works intended for the controller, China Gates, is discussed here in detail\ - \ in order to describe how the GPS Wrist-Controller is actually used in a public\ - \ art context. The other works, CitySonics, The Enchanted Forest and Get a Pot\ - \ \\& a Spoon are described briefly in order to demonstrate that even a simple\ - \ controller can be used to create a body of works. This paper also addresses\ - \ the breaking of the media bubble via the concept of the “open audience”, or\ - \ how mobile art can engage pedestrians as viewers or listeners within public\ - \ space and not remain an isolated experience for performers only.},\n address\ - \ = {New York City, NY, United States},\n author = {Clay, Arthur and Majoe, Dennis},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177073},\n issn = {2220-4806},\n\ - \ keywords = {Mobile Music, GPS, Controller, Collaborative Performance },\n pages\ - \ = {242--245},\n title = {The Wrist-Conductor},\n url = {http://www.nime.org/proceedings/2007/nime2007_242.pdf},\n\ - \ year = {2007}\n}\n" + ID: Newton2011 + abstract: 'This paper examines the creation of augmented musicalinstruments by a + number of musicians. Equipped with asystem called the Augmentalist, 10 musicians + created newaugmented instruments based on their traditional acousticor electric + instruments. This paper discusses the ways inwhich the musicians augmented their + instruments, examines the similarities and differences between the resultinginstruments + and presents a number of interesting findingsresulting from this process.' + address: 'Oslo, Norway' + author: 'Newton, Dan and Marshall, Mark T.' + bibtex: "@inproceedings{Newton2011,\n abstract = {This paper examines the creation\ + \ of augmented musicalinstruments by a number of musicians. Equipped with asystem\ + \ called the Augmentalist, 10 musicians created newaugmented instruments based\ + \ on their traditional acousticor electric instruments. This paper discusses the\ + \ ways inwhich the musicians augmented their instruments, examines the similarities\ + \ and differences between the resultinginstruments and presents a number of interesting\ + \ findingsresulting from this process.},\n address = {Oslo, Norway},\n author\ + \ = {Newton, Dan and Marshall, Mark T.},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178121},\n\ + \ issn = {2220-4806},\n keywords = {Augmented Instruments, Instrument Design,\ + \ Digital Musical Instruments, Performance },\n pages = {155--160},\n presentation-video\ + \ = {https://vimeo.com/26807158/},\n title = {Examining How Musicians Create Augmented\ + \ Musical Instruments},\n url = {http://www.nime.org/proceedings/2011/nime2011_155.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177073 + doi: 10.5281/zenodo.1178121 issn: 2220-4806 - keywords: 'Mobile Music, GPS, Controller, Collaborative Performance ' - pages: 242--245 - title: The Wrist-Conductor - url: http://www.nime.org/proceedings/2007/nime2007_242.pdf - year: 2007 + keywords: 'Augmented Instruments, Instrument Design, Digital Musical Instruments, + Performance ' + pages: 155--160 + presentation-video: https://vimeo.com/26807158/ + title: Examining How Musicians Create Augmented Musical Instruments + url: http://www.nime.org/proceedings/2011/nime2011_155.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Hollinger2007 - abstract: 'This paper presents an electronic piano keyboard and computer mouse designed - for use in a magnetic resonance imaging scanner. The interface allows neuroscientists - studying motor learning of musical tasks to perform functional scans of a subject''s - brain while synchronizing the scanner, auditory and visual stimuli, and auditory - feedback with the onset, offset, and velocity of the piano keys. The design of - the initial prototype and environment-specific issues are described, as well as - prior work in the field. Preliminary results are positive and were unable to show - the existence of image artifacts caused by the interface. Recommendations to improve - the optical assembly are provided in order to increase the robustness of the design. ' - address: 'New York City, NY, United States' - author: 'Hollinger, Avrum and Steele, Christopher and Penhune, Virginia and Zatorre, - Robert and Wanderley, Marcelo M.' - bibtex: "@inproceedings{Hollinger2007,\n abstract = {This paper presents an electronic\ - \ piano keyboard and computer mouse designed for use in a magnetic resonance imaging\ - \ scanner. The interface allows neuroscientists studying motor learning of musical\ - \ tasks to perform functional scans of a subject's brain while synchronizing the\ - \ scanner, auditory and visual stimuli, and auditory feedback with the onset,\ - \ offset, and velocity of the piano keys. The design of the initial prototype\ - \ and environment-specific issues are described, as well as prior work in the\ - \ field. Preliminary results are positive and were unable to show the existence\ - \ of image artifacts caused by the interface. Recommendations to improve the optical\ - \ assembly are provided in order to increase the robustness of the design. },\n\ - \ address = {New York City, NY, United States},\n author = {Hollinger, Avrum and\ - \ Steele, Christopher and Penhune, Virginia and Zatorre, Robert and Wanderley,\ - \ Marcelo M.},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177119},\n issn\ - \ = {2220-4806},\n keywords = {Input device, MRI-compatible, fMRI, motor learning,\ - \ optical sensing. },\n pages = {246--249},\n title = {fMRI-Compatible Electronic\ - \ Controllers},\n url = {http://www.nime.org/proceedings/2007/nime2007_246.pdf},\n\ - \ year = {2007}\n}\n" + ID: Seldess2011 + abstract: 'We present Tahakum, an open source, extensible collection of software + tools designed to enhance workflow on multichannel audio systems within complex + multi-functional research and development environments. Tahakum aims to provide + critical functionality required across a broad spectrum of audio systems usage + scenarios, while at the same time remaining sufficiently open as to easily support + modifications and extensions via 3rd party hardware and software. Features provided + in the framework include software for custom mixing/routing and audio system preset + automation, software for network message routing/redirection and protocol conversion, + and software for dynamic audio asset management and control. ' + address: 'Oslo, Norway' + author: 'Seldess, Zachary and Yamada, Toshiro' + bibtex: "@inproceedings{Seldess2011,\n abstract = {We present {Tahakum}, an open\ + \ source, extensible collection of software tools designed to enhance workflow\ + \ on multichannel audio systems within complex multi-functional research and development\ + \ environments. Tahakum aims to provide critical functionality required across\ + \ a broad spectrum of audio systems usage scenarios, while at the same time remaining\ + \ sufficiently open as to easily support modifications and extensions via 3rd\ + \ party hardware and software. Features provided in the framework include software\ + \ for custom mixing/routing and audio system preset automation, software for network\ + \ message routing/redirection and protocol conversion, and software for dynamic\ + \ audio asset management and control. },\n address = {Oslo, Norway},\n author\ + \ = {Seldess, Zachary and Yamada, Toshiro},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1178159},\n issn = {2220-4806},\n keywords = {Audio Control\ + \ Systems, Audio for VR, Max/MSP, Spatial Audio },\n pages = {161--166},\n presentation-video\ + \ = {https://vimeo.com/26809966/},\n title = {Tahakum: A Multi-Purpose Audio Control\ + \ Framework},\n url = {http://www.nime.org/proceedings/2011/nime2011_161.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177119 + doi: 10.5281/zenodo.1178159 issn: 2220-4806 - keywords: 'Input device, MRI-compatible, fMRI, motor learning, optical sensing. ' - pages: 246--249 - title: fMRI-Compatible Electronic Controllers - url: http://www.nime.org/proceedings/2007/nime2007_246.pdf - year: 2007 + keywords: 'Audio Control Systems, Audio for VR, Max/MSP, Spatial Audio ' + pages: 161--166 + presentation-video: https://vimeo.com/26809966/ + title: 'Tahakum: A Multi-Purpose Audio Control Framework' + url: http://www.nime.org/proceedings/2011/nime2011_161.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Nagashima2007 - abstract: 'This is a report of research project about developing novel musical instruments - for interactive computer music. The project''s name - "GHI project" means that - "It might be good that musical instrument shines, isn''t it?" in Japanese. I examined - the essences of musical instruments again on proverb "Taking a lesson from the - past". At the first step, my project targeted and chose "Kendang" - the traditional - musical instrument of Indonesia.' - address: 'New York City, NY, United States' - author: 'Nagashima, Yoichi' - bibtex: "@inproceedings{Nagashima2007,\n abstract = {This is a report of research\ - \ project about developing novel musical instruments for interactive computer\ - \ music. The project's name - \"GHI project\" means that \"It might be good that\ - \ musical instrument shines, isn't it?\" in Japanese. I examined the essences\ - \ of musical instruments again on proverb \"Taking a lesson from the past\". At\ - \ the first step, my project targeted and chose \"Kendang\" - the traditional\ - \ musical instrument of Indonesia.},\n address = {New York City, NY, United States},\n\ - \ author = {Nagashima, Yoichi},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177205},\n\ - \ issn = {2220-4806},\n keywords = {kendang, media arts, new instruments, sound\ - \ and light},\n pages = {250--253},\n title = {GHI project and \"Cyber Kendang\"\ - },\n url = {http://www.nime.org/proceedings/2007/nime2007_250.pdf},\n year = {2007}\n\ - }\n" + ID: Liang2011 + abstract: 'Computer music systems that coordinate or interact with human musicians + exist in many forms. Often, coordination is at the level of gestures and phrases + without synchronization at the beat level (or perhaps the notion of "beat" does + not even exist). In music with beats, fine-grain synchronization can be achieved + by having humans adapt to the computer (e.g. following a click track), or by computer + accompaniment in which the computer follows a predetermined score. We consider + an alternative scenario in which improvisation prevents traditional score following, + but where synchronization is achieved at the level of beats, measures, and cues. + To explore this new type of human-computer interaction, we have created new software + abstractions for synchronization and coordination of music and interfaces in different + modalities. We describe these new software structures, present examples, and introduce + the idea of music notation as an interactive musical interface rather than a static + document. ' + address: 'Oslo, Norway' + author: 'Liang, Dawen and Xia, Guangyu and Dannenberg, Roger B.' + bibtex: "@inproceedings{Liang2011,\n abstract = {Computer music systems that coordinate\ + \ or interact with human musicians exist in many forms. Often, coordination is\ + \ at the level of gestures and phrases without synchronization at the beat level\ + \ (or perhaps the notion of \"beat\" does not even exist). In music with beats,\ + \ fine-grain synchronization can be achieved by having humans adapt to the computer\ + \ (e.g. following a click track), or by computer accompaniment in which the computer\ + \ follows a predetermined score. We consider an alternative scenario in which\ + \ improvisation prevents traditional score following, but where synchronization\ + \ is achieved at the level of beats, measures, and cues. To explore this new type\ + \ of human-computer interaction, we have created new software abstractions for\ + \ synchronization and coordination of music and interfaces in different modalities.\ + \ We describe these new software structures, present examples, and introduce the\ + \ idea of music notation as an interactive musical interface rather than a static\ + \ document. },\n address = {Oslo, Norway},\n author = {Liang, Dawen and Xia, Guangyu\ + \ and Dannenberg, Roger B.},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178091},\n\ + \ issn = {2220-4806},\n keywords = {automatic accompaniment,interactive,music\ + \ display,popular music,real-time,synchronization},\n pages = {167--172},\n presentation-video\ + \ = {https://vimeo.com/26832515/},\n title = {A Framework for Coordination and\ + \ Synchronization of Media},\n url = {http://www.nime.org/proceedings/2011/nime2011_167.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177205 + doi: 10.5281/zenodo.1178091 issn: 2220-4806 - keywords: 'kendang, media arts, new instruments, sound and light' - pages: 250--253 - title: GHI project and "Cyber Kendang" - url: http://www.nime.org/proceedings/2007/nime2007_250.pdf - year: 2007 + keywords: 'automatic accompaniment,interactive,music display,popular music,real-time,synchronization' + pages: 167--172 + presentation-video: https://vimeo.com/26832515/ + title: A Framework for Coordination and Synchronization of Media + url: http://www.nime.org/proceedings/2011/nime2011_167.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Toyoda2007 - abstract: 'This study proposes new possibilities for interaction design pertaining - to music piece creation. Specifically, the study created an environment wherein - a wide range of users are able to easily experience new musical expressions via - a combination of newly developed software and the Nintendo Wii Remote controller. ' - address: 'New York City, NY, United States' - author: 'Toyoda, Shinichiro' - bibtex: "@inproceedings{Toyoda2007,\n abstract = {This study proposes new possibilities\ - \ for interaction design pertaining to music piece creation. Specifically, the\ - \ study created an environment wherein a wide range of users are able to easily\ - \ experience new musical expressions via a combination of newly developed software\ - \ and the Nintendo Wii Remote controller. },\n address = {New York City, NY, United\ - \ States},\n author = {Toyoda, Shinichiro},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1179465},\n issn = {2220-4806},\n keywords = {Interactive systems,\ - \ improvisation, gesture, composition INTRODUCTION Though music related research\ - \ focusing on the interaction between people and computers is currently experiencing\ - \ wide range development, the history of approaches wherein the creation of new\ - \ musical expression is made possible via the active },\n pages = {254--255},\n\ - \ title = {Sensillum : An Improvisational Approach to Composition},\n url = {http://www.nime.org/proceedings/2007/nime2007_254.pdf},\n\ - \ year = {2007}\n}\n" + ID: Berdahl2011a + abstract: 'This paper describes a new Beagle Board-based platform forteaching and + practicing interaction design for musical applications. The migration from desktop + and laptop computerbased sound synthesis to a compact and integrated control, + computation and sound generation platform has enormous potential to widen the + range of computer music instruments and installations that can be designed, and + improvesthe portability, autonomy, extensibility and longevity of designed systems. + We describe the technical features of theSatellite CCRMA platform and contrast + it with personalcomputer-based systems used in the past as well as emergingsmart + phone-based platforms. The advantages and tradeoffs of the new platform are considered, + and some projectwork is described.' + address: 'Oslo, Norway' + author: 'Berdahl, Edgar and Ju, Wendy' + bibtex: "@inproceedings{Berdahl2011a,\n abstract = {This paper describes a new Beagle\ + \ Board-based platform forteaching and practicing interaction design for musical\ + \ applications. The migration from desktop and laptop computerbased sound synthesis\ + \ to a compact and integrated control, computation and sound generation platform\ + \ has enormous potential to widen the range of computer music instruments and\ + \ installations that can be designed, and improvesthe portability, autonomy, extensibility\ + \ and longevity of designed systems. We describe the technical features of theSatellite\ + \ CCRMA platform and contrast it with personalcomputer-based systems used in the\ + \ past as well as emergingsmart phone-based platforms. The advantages and tradeoffs\ + \ of the new platform are considered, and some projectwork is described.},\n address\ + \ = {Oslo, Norway},\n author = {Berdahl, Edgar and Ju, Wendy},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177957},\n issn = {2220-4806},\n keywords = {arduino,beagle\ + \ board,instruments omap,linux,microcontrollers,music controllers,nime,pd,pedagogy,texas},\n\ + \ pages = {173--178},\n presentation-video = {https://vimeo.com/26833829/},\n\ + \ title = {Satellite CCRMA: A Musical Interaction and Sound Synthesis Platform},\n\ + \ url = {http://www.nime.org/proceedings/2011/nime2011_173.pdf},\n year = {2011}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179465 + doi: 10.5281/zenodo.1177957 issn: 2220-4806 - keywords: 'Interactive systems, improvisation, gesture, composition INTRODUCTION - Though music related research focusing on the interaction between people and computers - is currently experiencing wide range development, the history of approaches wherein - the creation of new musical expression is made possible via the active ' - pages: 254--255 - title: 'Sensillum : An Improvisational Approach to Composition' - url: http://www.nime.org/proceedings/2007/nime2007_254.pdf - year: 2007 + keywords: 'arduino,beagle board,instruments omap,linux,microcontrollers,music controllers,nime,pd,pedagogy,texas' + pages: 173--178 + presentation-video: https://vimeo.com/26833829/ + title: 'Satellite CCRMA: A Musical Interaction and Sound Synthesis Platform' + url: http://www.nime.org/proceedings/2011/nime2011_173.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Gruenbaum2007 - abstract: 'Almost all traditional musical instruments have a one-to-one correspondence - between a given fingering and the pitch that sounds for that fingering. The Samchillian - Tip Tip Tip Cheeepeeeee does not --- it is a keyboard MIDI controller that is - based on intervals rather than fixed pitches. That is, a given keypress will sound - a pitch a number of steps away from the last note sounded (within the key signature - and scale selected) according to the ''delta'' value assigned to that key. The - advantages of such a system are convenience, speed, and the ability to play difficult, - unusual and/or unintended passages extemporaneously. ' - address: 'New York City, NY, United States' - author: 'Gruenbaum, Leon' - bibtex: "@inproceedings{Gruenbaum2007,\n abstract = {Almost all traditional musical\ - \ instruments have a one-to-one correspondence between a given fingering and the\ - \ pitch that sounds for that fingering. The Samchillian Tip Tip Tip Cheeepeeeee\ - \ does not --- it is a keyboard MIDI controller that is based on intervals rather\ - \ than fixed pitches. That is, a given keypress will sound a pitch a number of\ - \ steps away from the last note sounded (within the key signature and scale selected)\ - \ according to the 'delta' value assigned to that key. The advantages of such\ - \ a system are convenience, speed, and the ability to play difficult, unusual\ - \ and/or unintended passages extemporaneously. },\n address = {New York City,\ - \ NY, United States},\n author = {Gruenbaum, Leon},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177103},\n issn = {2220-4806},\n keywords = {samchillian,\ - \ keyboard, MIDI controller, relative, interval, microtonal, computer keyboard,\ - \ pitch, musical instrument },\n pages = {256--259},\n title = {The Samchillian\ - \ Tip Tip Tip Cheeepeeeee : A Relativistic Keyboard Instrument},\n url = {http://www.nime.org/proceedings/2007/nime2007_256.pdf},\n\ - \ year = {2007}\n}\n" + ID: Bryan2011 + abstract: 'A novel method of digital scratching is presented as an alternative to + currently available digital hardware interfaces and time-coded vinyl (TCV). Similar + to TCV, the proposed method leverages existing analog turntables as a physical + interface to manipulate the playback of digital audio. To doso, however, an accelerometer/gyroscope–equipped + smartphone is firmly attached to a modified record, placed on a turntable, and + used to sense a performers movement, resulting in a wireless sensing-based scratching + method. The accelerometer and gyroscope data is wirelessly transmitted to a computer + to manipulate the digital audio playback in real-time. The method provides the + benefit of digital audio and storage, requires minimal additional hardware, accommodates + familiar proprioceptive feedback, and allows a single interface to control both + digital and analog audio. In addition, the proposed method provides numerous additional + benefits including real-time graphical display,multi-touch interaction, and untethered + performance (e.g“air-scratching”). Such a method turns a vinyl record into an + interactive surface and enhances traditional scratching performance by affording + new and creative musical interactions. Informal testing show this approach to + be viable,responsive, and robust.' + address: 'Oslo, Norway' + author: 'Bryan, Nicholas J. and Wang, Ge' + bibtex: "@inproceedings{Bryan2011,\n abstract = {A novel method of digital scratching\ + \ is presented as an alternative to currently available digital hardware interfaces\ + \ and time-coded vinyl (TCV). Similar to TCV, the proposed method leverages existing\ + \ analog turntables as a physical interface to manipulate the playback of digital\ + \ audio. To doso, however, an accelerometer/gyroscope–equipped smartphone is firmly\ + \ attached to a modified record, placed on a turntable, and used to sense a performers\ + \ movement, resulting in a wireless sensing-based scratching method. The accelerometer\ + \ and gyroscope data is wirelessly transmitted to a computer to manipulate the\ + \ digital audio playback in real-time. The method provides the benefit of digital\ + \ audio and storage, requires minimal additional hardware, accommodates familiar\ + \ proprioceptive feedback, and allows a single interface to control both digital\ + \ and analog audio. In addition, the proposed method provides numerous additional\ + \ benefits including real-time graphical display,multi-touch interaction, and\ + \ untethered performance (e.g“air-scratching”). Such a method turns a vinyl record\ + \ into an interactive surface and enhances traditional scratching performance\ + \ by affording new and creative musical interactions. Informal testing show this\ + \ approach to be viable,responsive, and robust.},\n address = {Oslo, Norway},\n\ + \ author = {Bryan, Nicholas J. and Wang, Ge},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1177971},\n issn = {2220-4806},\n keywords = {Digital scratching,\ + \ mobile music, digital DJ, smartphone, turntable, turntablism, record player,\ + \ accelerometer, gyroscope, vinyl emulation software },\n pages = {179--184},\n\ + \ presentation-video = {https://vimeo.com/26835277/},\n title = {Two Turntables\ + \ and a Mobile Phone},\n url = {http://www.nime.org/proceedings/2011/nime2011_179.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177103 + doi: 10.5281/zenodo.1177971 issn: 2220-4806 - keywords: 'samchillian, keyboard, MIDI controller, relative, interval, microtonal, - computer keyboard, pitch, musical instrument ' - pages: 256--259 - title: 'The Samchillian Tip Tip Tip Cheeepeeeee : A Relativistic Keyboard Instrument' - url: http://www.nime.org/proceedings/2007/nime2007_256.pdf - year: 2007 + keywords: 'Digital scratching, mobile music, digital DJ, smartphone, turntable, + turntablism, record player, accelerometer, gyroscope, vinyl emulation software ' + pages: 179--184 + presentation-video: https://vimeo.com/26835277/ + title: Two Turntables and a Mobile Phone + url: http://www.nime.org/proceedings/2011/nime2011_179.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Freeman2007 - abstract: 'Graph Theory links the creative music-making activities of web site visitors - to the dynamic generation of an instrumental score for solo violin. Participants - use a web-based interface to navigate among short, looping musical fragments to - create their own unique path through the open-form composition. Before each concert - performance, the violinist prints out a new copy of the score that orders the - fragments based on the decisions made by web visitors. ' - address: 'New York City, NY, United States' - author: 'Freeman, Jason' - bibtex: "@inproceedings{Freeman2007,\n abstract = {Graph Theory links the creative\ - \ music-making activities of web site visitors to the dynamic generation of an\ - \ instrumental score for solo violin. Participants use a web-based interface to\ - \ navigate among short, looping musical fragments to create their own unique path\ - \ through the open-form composition. Before each concert performance, the violinist\ - \ prints out a new copy of the score that orders the fragments based on the decisions\ - \ made by web visitors. },\n address = {New York City, NY, United States},\n author\ - \ = {Freeman, Jason},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177095},\n\ - \ issn = {2220-4806},\n keywords = {Music, Composition, Residency, Audience Interaction,\ - \ Collaboration, Violin, Graph, Flash, Internet, Traveling Salesman. },\n pages\ - \ = {260--263},\n title = {Graph Theory : Interfacing Audiences Into the Compositional\ - \ Process},\n url = {http://www.nime.org/proceedings/2007/nime2007_260.pdf},\n\ - \ year = {2007}\n}\n" + ID: Kruge2011 + abstract: 'MadPad is a networked audiovisual sample station for mobile devices. + Twelve short video clips are loaded onto thescreen in a grid and playback is triggered + by tapping anywhere on the clip. This is similar to tapping the pads of anaudio + sample station, but extends that interaction to addvisual sampling. Clips can + be shot on-the-fly with a cameraenabled mobile device and loaded into the player + instantly,giving the performer an ability to quickly transform his orher surroundings + into a sample-based, audiovisual instrument. Samples can also be sourced from + an online community in which users can post or download content. The recent ubiquity + of multitouch mobile devices and advances inpervasive computing have made this + system possible, providing for a vast amount of content only limited by theimagination + of the performer and the community. This paper presents the core features of MadPad + and the designexplorations that inspired them.' + address: 'Oslo, Norway' + author: 'Kruge, Nick and Wang, Ge' + bibtex: "@inproceedings{Kruge2011,\n abstract = {MadPad is a networked audiovisual\ + \ sample station for mobile devices. Twelve short video clips are loaded onto\ + \ thescreen in a grid and playback is triggered by tapping anywhere on the clip.\ + \ This is similar to tapping the pads of anaudio sample station, but extends that\ + \ interaction to addvisual sampling. Clips can be shot on-the-fly with a cameraenabled\ + \ mobile device and loaded into the player instantly,giving the performer an ability\ + \ to quickly transform his orher surroundings into a sample-based, audiovisual\ + \ instrument. Samples can also be sourced from an online community in which users\ + \ can post or download content. The recent ubiquity of multitouch mobile devices\ + \ and advances inpervasive computing have made this system possible, providing\ + \ for a vast amount of content only limited by theimagination of the performer\ + \ and the community. This paper presents the core features of MadPad and the designexplorations\ + \ that inspired them.},\n address = {Oslo, Norway},\n author = {Kruge, Nick and\ + \ Wang, Ge},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178077},\n issn\ + \ = {2220-4806},\n keywords = {mobile music, networked music, social music, audiovisual,\ + \ sampling, user-generated content, crowdsourcing, sample station, iPad, iPhone\ + \ },\n pages = {185--190},\n presentation-video = {https://vimeo.com/26855684/},\n\ + \ title = {MadPad: A Crowdsourcing System for Audiovisual Sampling},\n url = {http://www.nime.org/proceedings/2011/nime2011_185.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177095 + doi: 10.5281/zenodo.1178077 issn: 2220-4806 - keywords: 'Music, Composition, Residency, Audience Interaction, Collaboration, Violin, - Graph, Flash, Internet, Traveling Salesman. ' - pages: 260--263 - title: 'Graph Theory : Interfacing Audiences Into the Compositional Process' - url: http://www.nime.org/proceedings/2007/nime2007_260.pdf - year: 2007 + keywords: 'mobile music, networked music, social music, audiovisual, sampling, user-generated + content, crowdsourcing, sample station, iPad, iPhone ' + pages: 185--190 + presentation-video: https://vimeo.com/26855684/ + title: 'MadPad: A Crowdsourcing System for Audiovisual Sampling' + url: http://www.nime.org/proceedings/2011/nime2011_185.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Villar2007 - abstract: 'This paper describes the design and implementation of a new interface - prototype for live music mixing. The ColorDex system employs a completely new - operational metaphor which allows the mix DJ to prepare up to six tracks at once, - and perform mixes between up to three of those at a time. The basic premises of - the design are: 1) Build a performance tool that multiplies the possible choices - a DJ has in respect in how and when tracks are prepared and mixed; 2) Design the - system in such a way that the tool does not overload the performer with unnecessary - complexity, and 3) Make use of novel technology to make the performance of live - music mixing more engaging for both the performer and the audience. The core components - of the system are: A software program to load, visualize and playback digitally - encoded tracks; the HDDJ device (built chiefly out of a repurposed hard disk drive), - which provides tactile manipulation of the playback speed and position of tracks; - and the Cubic Crossfader, a wireless sensor cube that controls of the volume of - individual tracks, and allows the DJ to mix these in interesting ways. ' - address: 'New York City, NY, United States' - author: 'Villar, Nicolas and Gellersen, Hans and Jervis, Matt and Lang, Alexander' - bibtex: "@inproceedings{Villar2007,\n abstract = {This paper describes the design\ - \ and implementation of a new interface prototype for live music mixing. The ColorDex\ - \ system employs a completely new operational metaphor which allows the mix DJ\ - \ to prepare up to six tracks at once, and perform mixes between up to three of\ - \ those at a time. The basic premises of the design are: 1) Build a performance\ - \ tool that multiplies the possible choices a DJ has in respect in how and when\ - \ tracks are prepared and mixed; 2) Design the system in such a way that the tool\ - \ does not overload the performer with unnecessary complexity, and 3) Make use\ - \ of novel technology to make the performance of live music mixing more engaging\ - \ for both the performer and the audience. The core components of the system are:\ - \ A software program to load, visualize and playback digitally encoded tracks;\ - \ the HDDJ device (built chiefly out of a repurposed hard disk drive), which provides\ - \ tactile manipulation of the playback speed and position of tracks; and the Cubic\ - \ Crossfader, a wireless sensor cube that controls of the volume of individual\ - \ tracks, and allows the DJ to mix these in interesting ways. },\n address = {New\ - \ York City, NY, United States},\n author = {Villar, Nicolas and Gellersen, Hans\ - \ and Jervis, Matt and Lang, Alexander},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179475},\n\ - \ issn = {2220-4806},\n keywords = {Novel interfaces, live music-mixing, cube-based\ - \ interfaces, crossfading, repurposing HDDs, accelerometer-based cubic control\ - \ },\n pages = {264--269},\n title = {The ColorDex DJ System : A New Interface\ - \ for Live Music Mixing},\n url = {http://www.nime.org/proceedings/2007/nime2007_264.pdf},\n\ - \ year = {2007}\n}\n" + ID: Keefe2011 + abstract: 'Visual information integration in mobile music performanceis an area + that has not been thoroughly explored and currentapplications are often individually + designed. From camerainput to flexible output rendering, we discuss visual performance + support in the context of urMus, a meta-environmentfor mobile interaction and + performance development. Theuse of cameras, a set of image primitives, interactive + visualcontent, projectors, and camera flashes can lead to visuallyintriguing performance + possibilities.' + address: 'Oslo, Norway' + author: 'Keefe, Patrick O. and Essl, Georg' + bibtex: "@inproceedings{Keefe2011,\n abstract = {Visual information integration\ + \ in mobile music performanceis an area that has not been thoroughly explored\ + \ and currentapplications are often individually designed. From camerainput to\ + \ flexible output rendering, we discuss visual performance support in the context\ + \ of urMus, a meta-environmentfor mobile interaction and performance development.\ + \ Theuse of cameras, a set of image primitives, interactive visualcontent, projectors,\ + \ and camera flashes can lead to visuallyintriguing performance possibilities.},\n\ + \ address = {Oslo, Norway},\n author = {Keefe, Patrick O. and Essl, Georg},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178061},\n issn = {2220-4806},\n\ + \ keywords = {Mobile performance, visual interaction, camera phone, mobile collaboration\ + \ },\n pages = {191--196},\n presentation-video = {https://vimeo.com/26836592/},\n\ + \ title = {The Visual in Mobile Music Performance},\n url = {http://www.nime.org/proceedings/2011/nime2011_191.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179475 + doi: 10.5281/zenodo.1178061 issn: 2220-4806 - keywords: 'Novel interfaces, live music-mixing, cube-based interfaces, crossfading, - repurposing HDDs, accelerometer-based cubic control ' - pages: 264--269 - title: 'The ColorDex DJ System : A New Interface for Live Music Mixing' - url: http://www.nime.org/proceedings/2007/nime2007_264.pdf - year: 2007 + keywords: 'Mobile performance, visual interaction, camera phone, mobile collaboration ' + pages: 191--196 + presentation-video: https://vimeo.com/26836592/ + title: The Visual in Mobile Music Performance + url: http://www.nime.org/proceedings/2011/nime2011_191.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Dahl2007 - abstract: 'In this paper, we describe a musical controller – the WaveSaw – for directly - manipulating a wavetable. The WaveSaw consists of a long, flexible metal strip - with handles on either end, somewhat analogous to a saw. The user plays the WaveSaw - by holding the handles and bending the metal strip. We use sensors to measure - the strip’s curvature and reconstruct its shape as a wavetable stored in a computer. - This provides a direct gestural mapping from the shape of the WaveSaw to the timbral - characteristics of the computer-generated sound. Additional sensors provide control - of pitch, amplitude, and other musical parameters.' - address: 'New York City, NY, United States' - author: 'Dahl, Luke and Whetsell, Nathan and Van Stoecker, John' - bibtex: "@inproceedings{Dahl2007,\n abstract = {In this paper, we describe a musical\ - \ controller – the WaveSaw – for directly manipulating a wavetable. The WaveSaw\ - \ consists of a long, flexible metal strip with handles on either end, somewhat\ - \ analogous to a saw. The user plays the WaveSaw by holding the handles and bending\ - \ the metal strip. We use sensors to measure the strip’s curvature and reconstruct\ - \ its shape as a wavetable stored in a computer. This provides a direct gestural\ - \ mapping from the shape of the WaveSaw to the timbral characteristics of the\ - \ computer-generated sound. Additional sensors provide control of pitch, amplitude,\ - \ and other musical parameters.},\n address = {New York City, NY, United States},\n\ - \ author = {Dahl, Luke and Whetsell, Nathan and Van Stoecker, John},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1177079},\n issn = {2220-4806},\n keywords\ - \ = {Musical controller, Puredata, scanned synthesis, flex sensors. },\n pages\ - \ = {270--272},\n title = {The WaveSaw : A Flexible Instrument for Direct Timbral\ - \ Manipulation},\n url = {http://www.nime.org/proceedings/2007/nime2007_270.pdf},\n\ - \ year = {2007}\n}\n" + ID: Wang2011 + abstract: 'This paper describes the origin, design, and implementation of Smule''s + Magic Fiddle, an expressive musical instrument for the iPad. Magic Fiddle takes + advantage of the physical aspects of the device to integrate game-like and pedagogical + elements. We describe the origin of Magic Fiddle, chronicle its design process, + discuss its integrated music education system, and evaluate the overall experience. ' + address: 'Oslo, Norway' + author: 'Wang, Ge and Oh, Jieun and Lieber, Tom' + bibtex: "@inproceedings{Wang2011,\n abstract = {This paper describes the origin,\ + \ design, and implementation of Smule's Magic Fiddle, an expressive musical instrument\ + \ for the iPad. Magic Fiddle takes advantage of the physical aspects of the device\ + \ to integrate game-like and pedagogical elements. We describe the origin of Magic\ + \ Fiddle, chronicle its design process, discuss its integrated music education\ + \ system, and evaluate the overall experience. },\n address = {Oslo, Norway},\n\ + \ author = {Wang, Ge and Oh, Jieun and Lieber, Tom},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178187},\n issn = {2220-4806},\n keywords = {Magic Fiddle,\ + \ iPad, physical interaction design, experiential design, music education. },\n\ + \ pages = {197--202},\n presentation-video = {https://vimeo.com/26857032/},\n\ + \ title = {Designing for the iPad: Magic Fiddle},\n url = {http://www.nime.org/proceedings/2011/nime2011_197.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177079 + doi: 10.5281/zenodo.1178187 issn: 2220-4806 - keywords: 'Musical controller, Puredata, scanned synthesis, flex sensors. ' - pages: 270--272 - title: 'The WaveSaw : A Flexible Instrument for Direct Timbral Manipulation' - url: http://www.nime.org/proceedings/2007/nime2007_270.pdf - year: 2007 + keywords: 'Magic Fiddle, iPad, physical interaction design, experiential design, + music education. ' + pages: 197--202 + presentation-video: https://vimeo.com/26857032/ + title: 'Designing for the iPad: Magic Fiddle' + url: http://www.nime.org/proceedings/2011/nime2011_197.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Bennett2007 - abstract: 'This paper proposes that the physicality of an instrument be considered - an important aspect in the design of new interfaces for musical expression. The - use of Laban''s theory of effort in the design of new effortful interfaces, in - particular looking at effortspace modulation, is investigated, and a platform - for effortful interface development (named the DAMPER) is described. Finally, - future work is described and further areas of research are highlighted. ' - address: 'New York City, NY, United States' - author: 'Bennett, Peter and Ward, Nicholas and O''Modhrain, Sile and Rebelo, Pedro' - bibtex: "@inproceedings{Bennett2007,\n abstract = {This paper proposes that the\ - \ physicality of an instrument be considered an important aspect in the design\ - \ of new interfaces for musical expression. The use of Laban's theory of effort\ - \ in the design of new effortful interfaces, in particular looking at effortspace\ - \ modulation, is investigated, and a platform for effortful interface development\ - \ (named the DAMPER) is described. Finally, future work is described and further\ - \ areas of research are highlighted. },\n address = {New York City, NY, United\ - \ States},\n author = {Bennett, Peter and Ward, Nicholas and O'Modhrain, Sile\ - \ and Rebelo, Pedro},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177041},\n\ - \ issn = {2220-4806},\n keywords = {Effortful Interaction. Haptics. Laban Analysis.\ - \ Physicality. HCI. },\n pages = {273--276},\n title = {DAMPER : A Platform for\ - \ Effortful Interface Development},\n url = {http://www.nime.org/proceedings/2007/nime2007_273.pdf},\n\ - \ year = {2007}\n}\n" + ID: Knapp2011 + abstract: 'This paper describes a new interface for mobile music creation, the MobileMuse, + that introduces the capability of using physiological indicators of emotion + as a new mode of interaction. Combining both kinematic and physiological measurement + in a mobile environment creates the possibility of integral music control—the + use of both gesture and emotion to control sound creation—where it has never been + possible before. This paper will review the concept of integral music control + and describe the motivation for creating the MobileMuse, its design and future + possibilities.' + address: 'Oslo, Norway' + author: 'Knapp, Benjamin and Bortz, Brennon' + bibtex: "@inproceedings{Knapp2011,\n abstract = {This paper describes a new interface\ + \ for mobile music creation, the MobileMuse, that introduces the capability of\ + \ using physiological indicators of emotion as a new mode of interaction. Combining\ + \ both kinematic and physiological measurement in a mobile environment creates\ + \ the possibility of integral music control—the use of both gesture and emotion\ + \ to control sound creation—where it has never been possible before. This paper\ + \ will review the concept of integral music control and describe the motivation\ + \ for creating the MobileMuse, its design and future possibilities.},\n address\ + \ = {Oslo, Norway},\n author = {Knapp, Benjamin and Bortz, Brennon},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1178073},\n issn = {2220-4806},\n keywords\ + \ = {affective computing,bile music performance,mo-,physiological signal measurement},\n\ + \ pages = {203--206},\n presentation-video = {https://vimeo.com/26858339/},\n\ + \ title = {MobileMuse: Integral Music Control Goes Mobile},\n url = {http://www.nime.org/proceedings/2011/nime2011_203.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177041 + doi: 10.5281/zenodo.1178073 issn: 2220-4806 - keywords: 'Effortful Interaction. Haptics. Laban Analysis. Physicality. HCI. ' - pages: 273--276 - title: 'DAMPER : A Platform for Effortful Interface Development' - url: http://www.nime.org/proceedings/2007/nime2007_273.pdf - year: 2007 + keywords: 'affective computing,bile music performance,mo-,physiological signal measurement' + pages: 203--206 + presentation-video: https://vimeo.com/26858339/ + title: 'MobileMuse: Integral Music Control Goes Mobile' + url: http://www.nime.org/proceedings/2011/nime2011_203.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Francois2007 - abstract: 'This paper describes the design of Mimi, a multi-modal interactive musical - improvisation system that explores the potential and powerful impact of visual - feedback in performermachine interaction. Mimi is a performer-centric tool designed - for use in performance and teaching. Its key andnovel component is its visual - interface, designed to providethe performer with instantaneous and continuous - information on the state of the system. For human improvisation,in which context - and planning are paramount, the relevantstate of the system extends to the near - future and recentpast. Mimi''s visual interface allows for a peculiar blendof - raw reflex typically associated with improvisation, andpreparation and timing - more closely affiliated with scorebased reading. Mimi is not only an effective - improvisationpartner, it has also proven itself to be an invaluable platformthrough - which to interrogate the mental models necessaryfor successful improvisation.' - address: 'New York City, NY, United States' - author: 'François, Alexandre R. and Chew, Elaine and Thurmond, Dennis' - bibtex: "@inproceedings{Francois2007,\n abstract = {This paper describes the design\ - \ of Mimi, a multi-modal interactive musical improvisation system that explores\ - \ the potential and powerful impact of visual feedback in performermachine interaction.\ - \ Mimi is a performer-centric tool designed for use in performance and teaching.\ - \ Its key andnovel component is its visual interface, designed to providethe performer\ - \ with instantaneous and continuous information on the state of the system. For\ - \ human improvisation,in which context and planning are paramount, the relevantstate\ - \ of the system extends to the near future and recentpast. Mimi's visual interface\ - \ allows for a peculiar blendof raw reflex typically associated with improvisation,\ - \ andpreparation and timing more closely affiliated with scorebased reading. Mimi\ - \ is not only an effective improvisationpartner, it has also proven itself to\ - \ be an invaluable platformthrough which to interrogate the mental models necessaryfor\ - \ successful improvisation.},\n address = {New York City, NY, United States},\n\ - \ author = {Fran\\c{c}ois, Alexandre R. and Chew, Elaine and Thurmond, Dennis},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177091},\n issn = {2220-4806},\n\ - \ keywords = {Performer-machine interaction, visualization design, machine improvisation\ - \ },\n pages = {277--280},\n title = {Visual Feedback in Performer-Machine Interaction\ - \ for Musical Improvisation},\n url = {http://www.nime.org/proceedings/2007/nime2007_277.pdf},\n\ - \ year = {2007}\n}\n" + ID: Beck2011 + abstract: 'Laptop Orchestras (LOs) have recently become a very popular mode of musical + expression. They engage groups ofperformers to use ordinary laptop computers as + instrumentsand sound sources in the performance of specially createdmusic software. + Perhaps the biggest challenge for LOs isthe distribution, management and control + of software acrossheterogeneous collections of networked computers. Software must + be stored and distributed from a central repository, but launched on individual + laptops immediately beforeperformance. The GRENDL project leverages proven gridcomputing + frameworks and approaches the Laptop Orchestra as a distributed computing platform + for interactive computer music. This allows us to readily distribute softwareto + each laptop in the orchestra depending on the laptop''sinternal configuration, + its role in the composition, and theplayer assigned to that computer. Using the + SAGA framework, GRENDL is able to distribute software and managesystem and application + environments for each composition.Our latest version includes tangible control + of the GRENDLenvironment for a more natural and familiar user experience.' + address: 'Oslo, Norway' + author: 'Beck, Stephen D. and Branton, Chris and Maddineni, Sharath' + bibtex: "@inproceedings{Beck2011,\n abstract = {Laptop Orchestras (LOs) have recently\ + \ become a very popular mode of musical expression. They engage groups ofperformers\ + \ to use ordinary laptop computers as instrumentsand sound sources in the performance\ + \ of specially createdmusic software. Perhaps the biggest challenge for LOs isthe\ + \ distribution, management and control of software acrossheterogeneous collections\ + \ of networked computers. Software must be stored and distributed from a central\ + \ repository, but launched on individual laptops immediately beforeperformance.\ + \ The GRENDL project leverages proven gridcomputing frameworks and approaches\ + \ the Laptop Orchestra as a distributed computing platform for interactive computer\ + \ music. This allows us to readily distribute softwareto each laptop in the orchestra\ + \ depending on the laptop'sinternal configuration, its role in the composition,\ + \ and theplayer assigned to that computer. Using the SAGA framework, GRENDL is\ + \ able to distribute software and managesystem and application environments for\ + \ each composition.Our latest version includes tangible control of the GRENDLenvironment\ + \ for a more natural and familiar user experience.},\n address = {Oslo, Norway},\n\ + \ author = {Beck, Stephen D. and Branton, Chris and Maddineni, Sharath},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1177951},\n issn = {2220-4806},\n keywords\ + \ = {laptop orchestra, tangible interaction, grid computing },\n pages = {207--210},\n\ + \ presentation-video = {https://vimeo.com/26860960/},\n title = {Tangible Performance\ + \ Management of Grid-based Laptop Orchestras},\n url = {http://www.nime.org/proceedings/2011/nime2011_207.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177091 + doi: 10.5281/zenodo.1177951 issn: 2220-4806 - keywords: 'Performer-machine interaction, visualization design, machine improvisation ' - pages: 277--280 - title: Visual Feedback in Performer-Machine Interaction for Musical Improvisation - url: http://www.nime.org/proceedings/2007/nime2007_277.pdf - year: 2007 + keywords: 'laptop orchestra, tangible interaction, grid computing ' + pages: 207--210 + presentation-video: https://vimeo.com/26860960/ + title: Tangible Performance Management of Grid-based Laptop Orchestras + url: http://www.nime.org/proceedings/2011/nime2011_207.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Poepel2007 - abstract: 'Many fascinating new developments in the area bowed stringed instruments - have been developed in recent years. However, the majority of these new applications - are either not well known, used orconsidered in a broader context by their target - users. The necessaryexchange between the world of developers and the players is - ratherlimited. A group of performers, researchers, instrument developersand composers - was founded in order to share expertise and experiences and to give each other - feedback on the work done to developnew instruments. Instruments incorporating - new interfaces, synthesis methods, sensor technology, new materials like carbon - fiber andwood composites as well as composite materials and research outcome are - presented and discussed in the group. This paper gives anintroduction to the group - and reports about activities and outcomesin the last two years.' - address: 'New York City, NY, United States' - author: 'Poepel, Cornelius and Marx, Günter' - bibtex: "@inproceedings{Poepel2007,\n abstract = {Many fascinating new developments\ - \ in the area bowed stringed instruments have been developed in recent years.\ - \ However, the majority of these new applications are either not well known, used\ - \ orconsidered in a broader context by their target users. The necessaryexchange\ - \ between the world of developers and the players is ratherlimited. A group of\ - \ performers, researchers, instrument developersand composers was founded in order\ - \ to share expertise and experiences and to give each other feedback on the work\ - \ done to developnew instruments. Instruments incorporating new interfaces, synthesis\ - \ methods, sensor technology, new materials like carbon fiber andwood composites\ - \ as well as composite materials and research outcome are presented and discussed\ - \ in the group. This paper gives anintroduction to the group and reports about\ - \ activities and outcomesin the last two years.},\n address = {New York City,\ - \ NY, United States},\n author = {Poepel, Cornelius and Marx, G\\''{u}nter},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177221},\n issn = {2220-4806},\n\ - \ keywords = {Interdisciplinary user group, electronic bowed string instrument,\ - \ evaluation of computer based musical instruments },\n pages = {281--284},\n\ - \ title = {>hot\\_strings SIG},\n url = {http://www.nime.org/proceedings/2007/nime2007_281.pdf},\n\ - \ year = {2007}\n}\n" + ID: Dimitrov2011 + abstract: 'A contemporary PC user, typically expects a sound cardto be a piece of + hardware, that: can be manipulated by''audio'' software (most typically exemplified + by ''media players''); and allows interfacing of the PC to audio reproduction + and/or recording equipment. As such, a ''sound card''can be considered to be a + system, that encompasses designdecisions on both hardware and software levels + -- that also demand a certain understanding of the architecture of thetarget PC + operating system.This project outlines how an Arduino Duemillanoveboard (containing + a USB interface chip, manufactured byFuture Technology Devices International Ltd + [FTDI]company) can be demonstrated to behave as a full-duplex,mono, 8-bit 44.1 + kHz soundcard, through an implementation of: a PC audio driver for ALSA (Advanced + LinuxSound Architecture); a matching program for theArduino''sATmega microcontroller + -- and nothing more than headphones (and a couple of capacitors). The main contributionof + this paper is to bring a holistic aspect to the discussionon the topic of implementation + of soundcards -- also by referring to open-source driver, microcontroller code + and testmethods; and outline a complete implementation of an open -- yet functional + -- soundcard system.' + address: 'Oslo, Norway' + author: 'Dimitrov, Smilen and Serafin, Stefania' + bibtex: "@inproceedings{Dimitrov2011,\n abstract = {A contemporary PC user, typically\ + \ expects a sound cardto be a piece of hardware, that: can be manipulated by'audio'\ + \ software (most typically exemplified by 'media players'); and allows interfacing\ + \ of the PC to audio reproduction and/or recording equipment. As such, a 'sound\ + \ card'can be considered to be a system, that encompasses designdecisions on both\ + \ hardware and software levels -- that also demand a certain understanding of\ + \ the architecture of thetarget PC operating system.This project outlines how\ + \ an Arduino Duemillanoveboard (containing a USB interface chip, manufactured\ + \ byFuture Technology Devices International Ltd [FTDI]company) can be demonstrated\ + \ to behave as a full-duplex,mono, 8-bit 44.1 kHz soundcard, through an implementation\ + \ of: a PC audio driver for ALSA (Advanced LinuxSound Architecture); a matching\ + \ program for theArduino'sATmega microcontroller -- and nothing more than headphones\ + \ (and a couple of capacitors). The main contributionof this paper is to bring\ + \ a holistic aspect to the discussionon the topic of implementation of soundcards\ + \ -- also by referring to open-source driver, microcontroller code and testmethods;\ + \ and outline a complete implementation of an open -- yet functional -- soundcard\ + \ system.},\n address = {Oslo, Norway},\n author = {Dimitrov, Smilen and Serafin,\ + \ Stefania},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177997},\n issn\ + \ = {2220-4806},\n keywords = {alsa,arduino,audio,driver,linux,sound card},\n\ + \ pages = {211--216},\n title = {Audio Arduino -- an ALSA (Advanced Linux Sound\ + \ Architecture) Audio Driver for FTDI-based Arduinos},\n url = {http://www.nime.org/proceedings/2011/nime2011_211.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177221 + doi: 10.5281/zenodo.1177997 issn: 2220-4806 - keywords: 'Interdisciplinary user group, electronic bowed string instrument, evaluation - of computer based musical instruments ' - pages: 281--284 - title: '>hot_strings SIG' - url: http://www.nime.org/proceedings/2007/nime2007_281.pdf - year: 2007 + keywords: 'alsa,arduino,audio,driver,linux,sound card' + pages: 211--216 + title: Audio Arduino -- an ALSA (Advanced Linux Sound Architecture) Audio Driver + for FTDI-based Arduinos + url: http://www.nime.org/proceedings/2011/nime2011_211.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Cook2007 - abstract: 'Description of a project, inspired by the theory of affordance, exploring - the issues of visceral expression and audience engagement in the realm of computer - performance. Describes interaction design research techniques in novel application, - used to engage and gain insight into the culture and mindset of the improvising - musician. This research leads to the design and implementation of a prototype - system that allows musicians to play an object of their choice as a musical instrument.' - address: 'New York City, NY, United States' - author: 'Cook, Andrew A. and Pullin, Graham' - bibtex: "@inproceedings{Cook2007,\n abstract = {Description of a project, inspired\ - \ by the theory of affordance, exploring the issues of visceral expression and\ - \ audience engagement in the realm of computer performance. Describes interaction\ - \ design research techniques in novel application, used to engage and gain insight\ - \ into the culture and mindset of the improvising musician. This research leads\ - \ to the design and implementation of a prototype system that allows musicians\ - \ to play an object of their choice as a musical instrument.},\n address = {New\ - \ York City, NY, United States},\n author = {Cook, Andrew A. and Pullin, Graham},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177077},\n issn = {2220-4806},\n\ - \ keywords = {1,affordance,background and problem space,cultural probes,design\ - \ research,improvisation,interaction design,nime07,performance},\n pages = {285--288},\n\ - \ title = {Tactophonics : Your Favourite Thing Wants to Sing},\n url = {http://www.nime.org/proceedings/2007/nime2007_285.pdf},\n\ - \ year = {2007}\n}\n" + ID: Kim2011a + abstract: 'In this paper, we introduce a pipe interface that recognizestouch on + tone holes by the resonances in the pipe instead ofa touch sensor. This work was + based on the acoustic principles of woodwind instruments without complex sensors + andelectronic circuits to develop a simple and durable interface.The measured + signals were analyzed to show that differentfingerings generate various sounds. + The audible resonancesignal in the pipe interface can be used as a sonic event + formusical expression by itself and also as an input parameterfor mapping different + sounds.' + address: 'Oslo, Norway' + author: 'Kim, Seunghun and Yeo, Woon Seung' + bibtex: "@inproceedings{Kim2011a,\n abstract = {In this paper, we introduce a pipe\ + \ interface that recognizestouch on tone holes by the resonances in the pipe instead\ + \ ofa touch sensor. This work was based on the acoustic principles of woodwind\ + \ instruments without complex sensors andelectronic circuits to develop a simple\ + \ and durable interface.The measured signals were analyzed to show that differentfingerings\ + \ generate various sounds. The audible resonancesignal in the pipe interface can\ + \ be used as a sonic event formusical expression by itself and also as an input\ + \ parameterfor mapping different sounds.},\n address = {Oslo, Norway},\n author\ + \ = {Kim, Seunghun and Yeo, Woon Seung},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178067},\n\ + \ issn = {2220-4806},\n keywords = {resonance, mapping, pipe },\n pages = {217--219},\n\ + \ title = {Musical Control of a Pipe Based on Acoustic Resonance},\n url = {http://www.nime.org/proceedings/2011/nime2011_217.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177077 + doi: 10.5281/zenodo.1178067 issn: 2220-4806 - keywords: '1,affordance,background and problem space,cultural probes,design research,improvisation,interaction - design,nime07,performance' - pages: 285--288 - title: 'Tactophonics : Your Favourite Thing Wants to Sing' - url: http://www.nime.org/proceedings/2007/nime2007_285.pdf - year: 2007 + keywords: 'resonance, mapping, pipe ' + pages: 217--219 + title: Musical Control of a Pipe Based on Acoustic Resonance + url: http://www.nime.org/proceedings/2011/nime2011_217.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Perez2007 - abstract: 'In this paper, we describe the composition of a piece for choir and Integral - Music Controller. We focus more on the aesthetic, conceptual, and practical aspects - of the interface and less on the technological details. We especially stress the - influence that the designed interface poses on the compositional process and how - we approach the expressive organisation of musical materials during the composition - of the piece, as well as the addition of nuances (personal real-time expression) - by the musicians at performance time. ' - address: 'New York City, NY, United States' - author: 'Pérez, Miguel A. and Knapp, Benjamin and Alcorn, Michael' - bibtex: "@inproceedings{Perez2007,\n abstract = {In this paper, we describe the\ - \ composition of a piece for choir and Integral Music Controller. We focus more\ - \ on the aesthetic, conceptual, and practical aspects of the interface and less\ - \ on the technological details. We especially stress the influence that the designed\ - \ interface poses on the compositional process and how we approach the expressive\ - \ organisation of musical materials during the composition of the piece, as well\ - \ as the addition of nuances (personal real-time expression) by the musicians\ - \ at performance time. },\n address = {New York City, NY, United States},\n author\ - \ = {P\\'{e}rez, Miguel A. and Knapp, Benjamin and Alcorn, Michael},\n booktitle\ + ID: Hansen2011 + abstract: 'In this paper a collaborative music game for two pen tablets is studied + in order to see how two people with no professional music background negotiated + musical improvisation. In an initial study of what it is that constitutes play + fluency in improvisation, a music game has been designed and evaluated through + video analysis: A qualitative view of mutual action describes the social context + of music improvisation: how two people with speech, laughter, gestures, postures + and pauses negotiate individual and joint action. The objective behind the design + of the game application was to support players in some aspects of their mutual + play. Results show that even though players activated additional sound feedback + as a result of their mutual play, players also engaged in forms of mutual play + that the game engine did not account for. These ways of mutual play are descibed + further along with some suggestions for how to direct future designs of collaborative + music improvisation games towards ways of mutual play. ' + address: 'Oslo, Norway' + author: 'Hansen, Anne-Marie S. and Anderson, Hans J. and Raudaskoski, Pirkko' + bibtex: "@inproceedings{Hansen2011,\n abstract = {In this paper a collaborative\ + \ music game for two pen tablets is studied in order to see how two people with\ + \ no professional music background negotiated musical improvisation. In an initial\ + \ study of what it is that constitutes play fluency in improvisation, a music\ + \ game has been designed and evaluated through video analysis: A qualitative view\ + \ of mutual action describes the social context of music improvisation: how two\ + \ people with speech, laughter, gestures, postures and pauses negotiate individual\ + \ and joint action. The objective behind the design of the game application was\ + \ to support players in some aspects of their mutual play. Results show that even\ + \ though players activated additional sound feedback as a result of their mutual\ + \ play, players also engaged in forms of mutual play that the game engine did\ + \ not account for. These ways of mutual play are descibed further along with some\ + \ suggestions for how to direct future designs of collaborative music improvisation\ + \ games towards ways of mutual play. },\n address = {Oslo, Norway},\n author =\ + \ {Hansen, Anne-Marie S. and Anderson, Hans J. and Raudaskoski, Pirkko},\n booktitle\ \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1177215},\n issn = {2220-4806},\n keywords\ - \ = {Composition, Integral Music Controller, Emotion measurement, Physiological\ - \ Measurement, Spatialisation. },\n pages = {289--292},\n title = {D\\'{\\i}amair\ - \ : Composing for Choir and Integral Music Controller},\n url = {http://www.nime.org/proceedings/2007/nime2007_289.pdf},\n\ - \ year = {2007}\n}\n" + \ Expression},\n doi = {10.5281/zenodo.1178039},\n issn = {2220-4806},\n keywords\ + \ = {Collaborative interfaces, improvisation, interactive music games, social\ + \ interaction, play, novice. },\n pages = {220--223},\n title = {Play Fluency\ + \ in Music Improvisation Games for Novices},\n url = {http://www.nime.org/proceedings/2011/nime2011_220.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177215 + doi: 10.5281/zenodo.1178039 issn: 2220-4806 - keywords: 'Composition, Integral Music Controller, Emotion measurement, Physiological - Measurement, Spatialisation. ' - pages: 289--292 - title: 'Díamair : Composing for Choir and Integral Music Controller' - url: http://www.nime.org/proceedings/2007/nime2007_289.pdf - year: 2007 + keywords: 'Collaborative interfaces, improvisation, interactive music games, social + interaction, play, novice. ' + pages: 220--223 + title: Play Fluency in Music Improvisation Games for Novices + url: http://www.nime.org/proceedings/2011/nime2011_220.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Fornari2007 - abstract: 'We present an interactive sound spatialization and synthesis system based - on Interaural Time Difference (ITD) model and Evolutionary Computation. We define - a Sonic Localization Field using sound attenuation and ITD azimuth angle parameters - and, in order to control an adaptive algorithm, we used pairs of these parameters - as Spatial Sound Genotypes (SSG). They are extracted from waveforms which are - considered individuals of a Population Set. A user-interface receives input from - a generic gesture interface (such as a NIME device) and interprets them as ITD - cues. Trajectories provided by these signals are used as Target Sets of an evolutionary - algorithm. A Fitness procedure optimizes locally the distance between the Target - Set and the SSG pairs. Through a parametric score the user controls dynamic changes - in the sound output. ' - address: 'New York City, NY, United States' - author: 'Fornari, Jose and Maia, Adolfo Jr. and Manzolli, Jonatas' - bibtex: "@inproceedings{Fornari2007,\n abstract = {We present an interactive sound\ - \ spatialization and synthesis system based on Interaural Time Difference (ITD)\ - \ model and Evolutionary Computation. We define a Sonic Localization Field using\ - \ sound attenuation and ITD azimuth angle parameters and, in order to control\ - \ an adaptive algorithm, we used pairs of these parameters as Spatial Sound Genotypes\ - \ (SSG). They are extracted from waveforms which are considered individuals of\ - \ a Population Set. A user-interface receives input from a generic gesture interface\ - \ (such as a NIME device) and interprets them as ITD cues. Trajectories provided\ - \ by these signals are used as Target Sets of an evolutionary algorithm. A Fitness\ - \ procedure optimizes locally the distance between the Target Set and the SSG\ - \ pairs. Through a parametric score the user controls dynamic changes in the sound\ - \ output. },\n address = {New York City, NY, United States},\n author = {Fornari,\ - \ Jose and Maia, Adolfo Jr. and Manzolli, Jonatas},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177089},\n issn = {2220-4806},\n keywords = {interactive,\ - \ sound, spatialization, evolutionary, adaptation. },\n pages = {293--298},\n\ - \ title = {Interactive Spatialization and Sound Design using an Evolutionary System},\n\ - \ url = {http://www.nime.org/proceedings/2007/nime2007_293.pdf},\n year = {2007}\n\ - }\n" + ID: Ramkissoon2011 + abstract: 'The Bass Sleeve uses an Arduino board with a combination of buttons, + switches, flex sensors, force sensing resistors, and an accelerometer to map the + ancillary movements of a performer to sampling, real-time audio and video processing + including pitch shifting, delay, low pass filtering, and onscreen video movement. + The device was created to augment the existing functions of the electric bass + and explore the use of ancillary gestures to control the laptop in a live performance. + In this research it was found that incorporating ancillary gestures into a live + performance could be useful when controlling the parameters of audio processing, + sound synthesis and video manipulation. These ancillary motions can be a practical + solution to gestural multitasking allowing independent control of computer music + parameters while performing with the electric bass. The process of performing + with the Bass Sleeve resulted in a greater amount of laptop control, an increase + in the amount of expressiveness using the electric bass in combination with the + laptop, and an improvement in the interactivity on both the electric bass and + laptop during a live performance. The design uses various gesture-to-sound mapping + strategies to accomplish a compositional task during an electro acoustic multimedia + musical performance piece. ' + address: 'Oslo, Norway' + author: 'Ramkissoon, Izzi' + bibtex: "@inproceedings{Ramkissoon2011,\n abstract = {The Bass Sleeve uses an Arduino\ + \ board with a combination of buttons, switches, flex sensors, force sensing resistors,\ + \ and an accelerometer to map the ancillary movements of a performer to sampling,\ + \ real-time audio and video processing including pitch shifting, delay, low pass\ + \ filtering, and onscreen video movement. The device was created to augment the\ + \ existing functions of the electric bass and explore the use of ancillary gestures\ + \ to control the laptop in a live performance. In this research it was found that\ + \ incorporating ancillary gestures into a live performance could be useful when\ + \ controlling the parameters of audio processing, sound synthesis and video manipulation.\ + \ These ancillary motions can be a practical solution to gestural multitasking\ + \ allowing independent control of computer music parameters while performing with\ + \ the electric bass. The process of performing with the Bass Sleeve resulted in\ + \ a greater amount of laptop control, an increase in the amount of expressiveness\ + \ using the electric bass in combination with the laptop, and an improvement in\ + \ the interactivity on both the electric bass and laptop during a live performance.\ + \ The design uses various gesture-to-sound mapping strategies to accomplish a\ + \ compositional task during an electro acoustic multimedia musical performance\ + \ piece. },\n address = {Oslo, Norway},\n author = {Ramkissoon, Izzi},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1178141},\n issn = {2220-4806},\n keywords\ + \ = {Interactive Music, Interactive Performance Systems, Gesture Controllers,\ + \ Augmented Instruments, Electric Bass, Video Tracking },\n pages = {224--227},\n\ + \ title = {The Bass Sleeve: A Real-time Multimedia Gestural Controller for Augmented\ + \ Electric Bass Performance},\n url = {http://www.nime.org/proceedings/2011/nime2011_224.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177089 + doi: 10.5281/zenodo.1178141 issn: 2220-4806 - keywords: 'interactive, sound, spatialization, evolutionary, adaptation. ' - pages: 293--298 - title: Interactive Spatialization and Sound Design using an Evolutionary System - url: http://www.nime.org/proceedings/2007/nime2007_293.pdf - year: 2007 + keywords: 'Interactive Music, Interactive Performance Systems, Gesture Controllers, + Augmented Instruments, Electric Bass, Video Tracking ' + pages: 224--227 + title: 'The Bass Sleeve: A Real-time Multimedia Gestural Controller for Augmented + Electric Bass Performance' + url: http://www.nime.org/proceedings/2011/nime2011_224.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Hornof2007 - abstract: 'In this project, eye tracking researchers and computer music composers - collaborate to create musical compositions that are played with the eyes. A commercial - eye tracker (LC Technologies Eyegaze) is connected to a music and multimedia authoring - environment (Max/MSP/Jitter). The project addresses issues of both noise and control: - How will the performance benefit from the noise inherent in eye trackers and eye - movements, and to what extent should the composition encourage the performer to - try to control a specific musical outcome? Providing one set of answers to these - two questions, the authors create an eye-controlled composition, EyeMusic v1.0, - which was selected by juries for live performance at computer music conferences.' - address: 'New York City, NY, United States' - author: 'Hornof, Anthony J. and Rogers, Troy and Halverson, Tim' - bibtex: "@inproceedings{Hornof2007,\n abstract = {In this project, eye tracking\ - \ researchers and computer music composers collaborate to create musical compositions\ - \ that are played with the eyes. A commercial eye tracker (LC Technologies Eyegaze)\ - \ is connected to a music and multimedia authoring environment (Max/MSP/Jitter).\ - \ The project addresses issues of both noise and control: How will the performance\ - \ benefit from the noise inherent in eye trackers and eye movements, and to what\ - \ extent should the composition encourage the performer to try to control a specific\ - \ musical outcome? Providing one set of answers to these two questions, the authors\ - \ create an eye-controlled composition, EyeMusic v1.0, which was selected by juries\ - \ for live performance at computer music conferences.},\n address = {New York\ - \ City, NY, United States},\n author = {Hornof, Anthony J. and Rogers, Troy and\ - \ Halverson, Tim},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177121},\n\ - \ issn = {2220-4806},\n keywords = {H.5.2 [Information Interfaces and Presentation]\ - \ User Interfaces --- input devices and strategies, interaction styles. J.5 [Arts\ - \ and Humanities] Fine arts, performing arts. },\n pages = {299--300},\n title\ - \ = {EyeMusic : Performing Live Music and Multimedia Compositions with Eye Movements},\n\ - \ url = {http://www.nime.org/proceedings/2007/nime2007_299.pdf},\n year = {2007}\n\ - }\n" + ID: Kapur2011 + abstract: "This paper describes the KarmetiK NotomotoN, a new musical robotic system\ + \ for performance and education. A long time goal of the ,\n,\nauthors has been\ + \ to provide users with plug-andplay, highly expressive musical robot system with\ + \ a high degree of portability. This paper describes the technical details of\ + \ the NotomotoN, and discusses its use in performance and educational scenarios.\ + \ Detailed tests performed to optimize technical aspects of the NotomotoN are\ + \ described to highlight usability and performance specifications for electronic\ + \ musicians and educators. " + address: 'Oslo, Norway' + author: 'Kapur, Ajay and Darling, Michael and Murphy, Jim and Hochenbaum, Jordan + and Diakopoulos, Dimitri and Trimpin, Trimpin' + bibtex: "@inproceedings{Kapur2011,\n abstract = {This paper describes the KarmetiK\ + \ NotomotoN, a new musical robotic system for performance and education. A long\ + \ time goal of the ,\n,\nauthors has been to provide users with plug-andplay,\ + \ highly expressive musical robot system with a high degree of portability. This\ + \ paper describes the technical details of the NotomotoN, and discusses its use\ + \ in performance and educational scenarios. Detailed tests performed to optimize\ + \ technical aspects of the NotomotoN are described to highlight usability and\ + \ performance specifications for electronic musicians and educators. },\n address\ + \ = {Oslo, Norway},\n author = {Kapur, Ajay and Darling, Michael and Murphy, Jim\ + \ and Hochenbaum, Jordan and Diakopoulos, Dimitri and Trimpin, Trimpin},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1178059},\n issn = {2220-4806},\n keywords\ + \ = {music technology,musical robotics,robotic performance},\n pages = {228--231},\n\ + \ title = {The KarmetiK NotomotoN : A New Breed of Musical Robot for Teaching\ + \ and Performance},\n url = {http://www.nime.org/proceedings/2011/nime2011_228.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177121 + doi: 10.5281/zenodo.1178059 issn: 2220-4806 - keywords: 'H.5.2 [Information Interfaces and Presentation] User Interfaces --- input - devices and strategies, interaction styles. J.5 [Arts and Humanities] Fine arts, - performing arts. ' - pages: 299--300 - title: 'EyeMusic : Performing Live Music and Multimedia Compositions with Eye Movements' - url: http://www.nime.org/proceedings/2007/nime2007_299.pdf - year: 2007 + keywords: 'music technology,musical robotics,robotic performance' + pages: 228--231 + title: 'The KarmetiK NotomotoN : A New Breed of Musical Robot for Teaching and Performance' + url: http://www.nime.org/proceedings/2011/nime2011_228.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Kirk2007 - abstract: 'The FrankenPipe project is an attempt to convert a traditionalHighland - Bagpipe into a controller capable of driving both realtime synthesis on a laptop - as well as a radio-controlled (RC) car.Doing so engages musical creativity while - enabling novel, oftenhumorous, performance art. The chanter is outfitted withphotoresistors - (CdS photoconductive cells) underneath each hole,allowing a full range of MIDI - values to be produced with eachfinger and giving the player a natural feel. An - air-pressure sensoris also deployed in the bag to provide another element of controlwhile - capturing a fundamental element of bagpipe performance.The final product navigates - the realm of both musical instrumentand toy, allowing the performer to create - a novel yet richperformance experience for the audience.' - address: 'New York City, NY, United States' - author: 'Kirk, Turner and Leider, Colby' - bibtex: "@inproceedings{Kirk2007,\n abstract = {The FrankenPipe project is an attempt\ - \ to convert a traditionalHighland Bagpipe into a controller capable of driving\ - \ both realtime synthesis on a laptop as well as a radio-controlled (RC) car.Doing\ - \ so engages musical creativity while enabling novel, oftenhumorous, performance\ - \ art. The chanter is outfitted withphotoresistors (CdS photoconductive cells)\ - \ underneath each hole,allowing a full range of MIDI values to be produced with\ - \ eachfinger and giving the player a natural feel. An air-pressure sensoris also\ - \ deployed in the bag to provide another element of controlwhile capturing a fundamental\ - \ element of bagpipe performance.The final product navigates the realm of both\ - \ musical instrumentand toy, allowing the performer to create a novel yet richperformance\ - \ experience for the audience.},\n address = {New York City, NY, United States},\n\ - \ author = {Kirk, Turner and Leider, Colby},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1177151},\n issn = {2220-4806},\n keywords = {FrankenPipe, alternate\ - \ controller, MIDI, bagpipe, photoresistor, chanter. },\n pages = {301--304},\n\ - \ title = {The FrankenPipe : A Novel Bagpipe Controller},\n url = {http://www.nime.org/proceedings/2007/nime2007_301.pdf},\n\ - \ year = {2007}\n}\n" + ID: Barenca2011 + abstract: 'The Manipuller is a novel Gestural Controller based on strings manipulation + and multi-dimensional force sensing technology. This paper describes its motivation, + design and operational principles along with some of its musical applications. + Finally the results of a preliminary usability test are presented and discussed. ' + address: 'Oslo, Norway' + author: 'Barenca, Adrián and Torre, Giuseppe' + bibtex: "@inproceedings{Barenca2011,\n abstract = {The Manipuller is a novel Gestural\ + \ Controller based on strings manipulation and multi-dimensional force sensing\ + \ technology. This paper describes its motivation, design and operational principles\ + \ along with some of its musical applications. Finally the results of a preliminary\ + \ usability test are presented and discussed. },\n address = {Oslo, Norway},\n\ + \ author = {Barenca, Adri\\'{a}n and Torre, Giuseppe},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177949},\n issn = {2220-4806},\n keywords = {1,and force\ + \ sensors within,force sensing,gestural,gestural controller,manipulation,strings,strings\ + \ and force sensing,the integration of strings},\n pages = {232--235},\n title\ + \ = {The Manipuller : Strings Manipulation and Multi-Dimensional Force Sensing},\n\ + \ url = {http://www.nime.org/proceedings/2011/nime2011_232.pdf},\n year = {2011}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177151 + doi: 10.5281/zenodo.1177949 issn: 2220-4806 - keywords: 'FrankenPipe, alternate controller, MIDI, bagpipe, photoresistor, chanter. ' - pages: 301--304 - title: 'The FrankenPipe : A Novel Bagpipe Controller' - url: http://www.nime.org/proceedings/2007/nime2007_301.pdf - year: 2007 + keywords: '1,and force sensors within,force sensing,gestural,gestural controller,manipulation,strings,strings + and force sensing,the integration of strings' + pages: 232--235 + title: 'The Manipuller : Strings Manipulation and Multi-Dimensional Force Sensing' + url: http://www.nime.org/proceedings/2011/nime2011_232.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Camurri2007a - abstract: 'EyesWeb XMI (for eXtended Multimodal Interaction) is the new version - of the well-known EyesWeb platform. It has a main focus on multimodality and the - main design target of this new release has been to improve the ability to process - and correlate several streams of data. It has been used extensively to build a - set of interactive systems for performing arts applications for Festival della - Scienza 2006, Genoa, Italy. The purpose of this paper is to describe the developed - installations as well as the new EyesWeb features that helped in their development.' - address: 'New York City, NY, United States' - author: 'Camurri, Antonio and Coletta, Paolo and Varni, Giovanna and Ghisio, Simone' - bibtex: "@inproceedings{Camurri2007a,\n abstract = {EyesWeb XMI (for eXtended Multimodal\ - \ Interaction) is the new version of the well-known EyesWeb platform. It has a\ - \ main focus on multimodality and the main design target of this new release has\ - \ been to improve the ability to process and correlate several streams of data.\ - \ It has been used extensively to build a set of interactive systems for performing\ - \ arts applications for Festival della Scienza 2006, Genoa, Italy. The purpose\ - \ of this paper is to describe the developed installations as well as the new\ - \ EyesWeb features that helped in their development.},\n address = {New York City,\ - \ NY, United States},\n author = {Camurri, Antonio and Coletta, Paolo and Varni,\ - \ Giovanna and Ghisio, Simone},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177061},\n\ - \ issn = {2220-4806},\n keywords = {EyesWeb, multimodal interactive systems, performing\ - \ arts. },\n pages = {305--308},\n title = {Developing Multimodal Interactive\ - \ Systems with EyesWeb XMI},\n url = {http://www.nime.org/proceedings/2007/nime2007_305.pdf},\n\ - \ year = {2007}\n}\n" + ID: Crevoisier2011 + abstract: 'The Surface Editor is a software tool for creating control interfaces + and mapping input actions to OSC or MIDI actions very easily and intuitively. + Originally conceived to be used with a tactile interface, the Surface Editor has + been extended to support the creation of graspable interfaces as well. This paper + presents a new framework for the generic mapping of user actions with graspable + objects on a surface. We also present a system for detecting touch on thin objects, + allowing for extended interactive possibilities. The Surface Editor is not limited + to a particular tracking system though, and the generic mapping approach for objects + can have a broader use with various input interfaces supporting touch and/or objects. ' + address: 'Oslo, Norway' + author: 'Crevoisier, Alain and Picard-Limpens, Cécile' + bibtex: "@inproceedings{Crevoisier2011,\n abstract = {The Surface Editor is a software\ + \ tool for creating control interfaces and mapping input actions to OSC or MIDI\ + \ actions very easily and intuitively. Originally conceived to be used with a\ + \ tactile interface, the Surface Editor has been extended to support the creation\ + \ of graspable interfaces as well. This paper presents a new framework for the\ + \ generic mapping of user actions with graspable objects on a surface. We also\ + \ present a system for detecting touch on thin objects, allowing for extended\ + \ interactive possibilities. The Surface Editor is not limited to a particular\ + \ tracking system though, and the generic mapping approach for objects can have\ + \ a broader use with various input interfaces supporting touch and/or objects.\ + \ },\n address = {Oslo, Norway},\n author = {Crevoisier, Alain and Picard-Limpens,\ + \ C\\'{e}cile},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177989},\n\ + \ issn = {2220-4806},\n keywords = {NIME, mapping, interaction, user-defined interfaces,\ + \ tangibles, graspable interfaces. },\n pages = {236--239},\n title = {Mapping\ + \ Objects with the Surface Editor},\n url = {http://www.nime.org/proceedings/2011/nime2011_236.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177061 + doi: 10.5281/zenodo.1177989 issn: 2220-4806 - keywords: 'EyesWeb, multimodal interactive systems, performing arts. ' - pages: 305--308 - title: Developing Multimodal Interactive Systems with EyesWeb XMI - url: http://www.nime.org/proceedings/2007/nime2007_305.pdf - year: 2007 - - + keywords: 'NIME, mapping, interaction, user-defined interfaces, tangibles, graspable + interfaces. ' + pages: 236--239 + title: Mapping Objects with the Surface Editor + url: http://www.nime.org/proceedings/2011/nime2011_236.pdf + year: 2011 + + - ENTRYTYPE: inproceedings - ID: Hoffman2007 - abstract: 'A crucial set of decisions in digital musical instrument design deals - with choosing mappings between parameters controlled by the performer and the - synthesis algorithms that actually generate sound. Feature-based synthesis offers - a way to parameterize audio synthesis in terms of the quantifiable perceptual - characteristics, or features, the performer wishes the sound to take on. Techniques - for accomplishing such mappings and enabling feature-based synthesis to be performed - in real time are discussed. An example is given of how a real-time performance - system might be designed to take advantage of feature-based synthesis''s ability - to provide perceptually meaningful control over a large number of synthesis parameters. ' - address: 'New York City, NY, United States' - author: 'Hoffman, Matt and Cook, Perry R.' - bibtex: "@inproceedings{Hoffman2007,\n abstract = {A crucial set of decisions in\ - \ digital musical instrument design deals with choosing mappings between parameters\ - \ controlled by the performer and the synthesis algorithms that actually generate\ - \ sound. Feature-based synthesis offers a way to parameterize audio synthesis\ - \ in terms of the quantifiable perceptual characteristics, or features, the performer\ - \ wishes the sound to take on. Techniques for accomplishing such mappings and\ - \ enabling feature-based synthesis to be performed in real time are discussed.\ - \ An example is given of how a real-time performance system might be designed\ - \ to take advantage of feature-based synthesis's ability to provide perceptually\ - \ meaningful control over a large number of synthesis parameters. },\n address\ - \ = {New York City, NY, United States},\n author = {Hoffman, Matt and Cook, Perry\ - \ R.},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1177117},\n issn = {2220-4806},\n\ - \ keywords = {Feature, Synthesis, Analysis, Mapping, Real-time. },\n pages = {309--312},\n\ - \ title = {Real-Time Feature-Based Synthesis for Live Musical Performance},\n\ - \ url = {http://www.nime.org/proceedings/2007/nime2007_309.pdf},\n year = {2007}\n\ - }\n" + ID: Hochenbaum2011 + abstract: 'This paper presents the SmartFiducial, a wireless tangible object that + facilitates additional modes of expressivity for vision-based tabletop surfaces. + Using infrared proximity sensing and resistive based force-sensors, the SmartFiducial + affords users unique, and highly gestural inputs. Furthermore, the SmartFiducial + incorporates additional customizable pushbutton switches. Using XBee radio frequency + (RF) wireless transmission, the SmartFiducial establishes bipolar communication + with a host computer. This paper describes the design and implementation of the + SmartFiducial, as well as an exploratory use in a musical context. ' + address: 'Oslo, Norway' + author: 'Hochenbaum, Jordan and Kapur, Ajay' + bibtex: "@inproceedings{Hochenbaum2011,\n abstract = {This paper presents the SmartFiducial,\ + \ a wireless tangible object that facilitates additional modes of expressivity\ + \ for vision-based tabletop surfaces. Using infrared proximity sensing and resistive\ + \ based force-sensors, the SmartFiducial affords users unique, and highly gestural\ + \ inputs. Furthermore, the SmartFiducial incorporates additional customizable\ + \ pushbutton switches. Using XBee radio frequency (RF) wireless transmission,\ + \ the SmartFiducial establishes bipolar communication with a host computer. This\ + \ paper describes the design and implementation of the SmartFiducial, as well\ + \ as an exploratory use in a musical context. },\n address = {Oslo, Norway},\n\ + \ author = {Hochenbaum, Jordan and Kapur, Ajay},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1178045},\n issn = {2220-4806},\n keywords = {Fiducial, Tangible\ + \ Interface, Multi-touch, Sensors, Gesture, Haptics, Bricktable, Proximity Sensing\ + \ },\n pages = {240--243},\n title = {Adding Z-Depth and Pressure Expressivity\ + \ to Tangible Tabletop Surfaces},\n url = {http://www.nime.org/proceedings/2011/nime2011_240.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177117 + doi: 10.5281/zenodo.1178045 issn: 2220-4806 - keywords: 'Feature, Synthesis, Analysis, Mapping, Real-time. ' - pages: 309--312 - title: Real-Time Feature-Based Synthesis for Live Musical Performance - url: http://www.nime.org/proceedings/2007/nime2007_309.pdf - year: 2007 + keywords: 'Fiducial, Tangible Interface, Multi-touch, Sensors, Gesture, Haptics, + Bricktable, Proximity Sensing ' + pages: 240--243 + title: Adding Z-Depth and Pressure Expressivity to Tangible Tabletop Surfaces + url: http://www.nime.org/proceedings/2011/nime2011_240.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Hashida2007 - abstract: 'This paper introduces jPop-E (java-based PolyPhrase Ensemble), an assistant - system for the Pop-E performancerendering system. Using this assistant system, - MIDI dataincluding expressive tempo changes or velocity control canbe created - based on the user''s musical intention. Pop-E(PolyPhrase Ensemble) is one of the - few machine systemsdevoted to creating expressive musical performances thatcan - deal with the structure of polyphonic music and theuser''s interpretation of the - music. A well-designed graphical user interface is required to make full use of - the potential ability of Pop-E. In this paper, we discuss the necessaryelements - of the user interface for Pop-E, and describe theimplemented system, jPop-E.' - address: 'New York City, NY, United States' - author: 'Hashida, Mitsuyo and Nagata, Noriko and Katayose, Haruhiro' - bibtex: "@inproceedings{Hashida2007,\n abstract = {This paper introduces jPop-E\ - \ (java-based PolyPhrase Ensemble), an assistant system for the Pop-E performancerendering\ - \ system. Using this assistant system, MIDI dataincluding expressive tempo changes\ - \ or velocity control canbe created based on the user's musical intention. Pop-E(PolyPhrase\ - \ Ensemble) is one of the few machine systemsdevoted to creating expressive musical\ - \ performances thatcan deal with the structure of polyphonic music and theuser's\ - \ interpretation of the music. A well-designed graphical user interface is required\ - \ to make full use of the potential ability of Pop-E. In this paper, we discuss\ - \ the necessaryelements of the user interface for Pop-E, and describe theimplemented\ - \ system, jPop-E.},\n address = {New York City, NY, United States},\n author =\ - \ {Hashida, Mitsuyo and Nagata, Noriko and Katayose, Haruhiro},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177111},\n issn = {2220-4806},\n keywords = {Performance\ - \ Rendering, User Interface, Ensemble Music Ex- pression },\n pages = {313--316},\n\ - \ title = {jPop-E : An Assistant System for Performance Rendering of Ensemble\ - \ Music},\n url = {http://www.nime.org/proceedings/2007/nime2007_313.pdf},\n year\ - \ = {2007}\n}\n" + ID: Milne2011 + address: 'Oslo, Norway' + author: 'Milne, Andrew J. and Xamb\''{o}, Anna and Laney, Robin and Sharp, David + B. and Prechtl, Anthony and Holland, Simon' + bibtex: "@inproceedings{Milne2011,\n address = {Oslo, Norway},\n author = {Milne,\ + \ Andrew J. and Xamb\\'{o}, Anna and Laney, Robin and Sharp, David B. and Prechtl,\ + \ Anthony and Holland, Simon},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178109},\n\ + \ issn = {2220-4806},\n keywords = {generalized keyboard, isomorphic layout, multi-touch\ + \ surface, tablet, musical interface design, iPad, microtonality },\n pages =\ + \ {244--247},\n title = {Hex Player --- A Virtual Musical Controller},\n url =\ + \ {http://www.nime.org/proceedings/2011/nime2011_244.pdf},\n year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177111 + doi: 10.5281/zenodo.1178109 issn: 2220-4806 - keywords: 'Performance Rendering, User Interface, Ensemble Music Ex- pression ' - pages: 313--316 - title: 'jPop-E : An Assistant System for Performance Rendering of Ensemble Music' - url: http://www.nime.org/proceedings/2007/nime2007_313.pdf - year: 2007 + keywords: 'generalized keyboard, isomorphic layout, multi-touch surface, tablet, + musical interface design, iPad, microtonality ' + pages: 244--247 + title: Hex Player --- A Virtual Musical Controller + url: http://www.nime.org/proceedings/2011/nime2011_244.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Sarkar2007 - abstract: 'Playing music over the Internet, whether for real-time jamming, network - performance or distance education, is constrained by the speed of light which - introduces, over long distances, time delays unsuitable for musical applications. - Current musical collaboration systems generally transmit compressed audio streams - over low-latency and high-bandwidthnetworks to optimize musician synchronization. - This paperproposes an alternative approach based on pattern recognition and music - prediction. Trained for a particular typeof music, here the Indian tabla drum, - the system calledTablaNet identifies rhythmic patterns by recognizing individual - strokes played by a musician and mapping them dynamically to known musical constructs. - Symbols representing these musical structures are sent over the network toa corresponding - computer system. The computer at thereceiving end anticipates incoming events - by analyzing previous phrases and synthesizes an estimated audio output.Although - such a system may introduce variants due to prediction approximations, resulting - in a slightly different musical experience at both ends, we find that it demonstratesa - high level of playability with an immediacy not present inother systems, and functions - well as an educational tool.' - address: 'New York City, NY, United States' - author: 'Sarkar, Mihir and Vercoe, Barry' - bibtex: "@inproceedings{Sarkar2007,\n abstract = {Playing music over the Internet,\ - \ whether for real-time jamming, network performance or distance education, is\ - \ constrained by the speed of light which introduces, over long distances, time\ - \ delays unsuitable for musical applications. Current musical collaboration systems\ - \ generally transmit compressed audio streams over low-latency and high-bandwidthnetworks\ - \ to optimize musician synchronization. This paperproposes an alternative approach\ - \ based on pattern recognition and music prediction. Trained for a particular\ - \ typeof music, here the Indian tabla drum, the system calledTablaNet identifies\ - \ rhythmic patterns by recognizing individual strokes played by a musician and\ - \ mapping them dynamically to known musical constructs. Symbols representing these\ - \ musical structures are sent over the network toa corresponding computer system.\ - \ The computer at thereceiving end anticipates incoming events by analyzing previous\ - \ phrases and synthesizes an estimated audio output.Although such a system may\ - \ introduce variants due to prediction approximations, resulting in a slightly\ - \ different musical experience at both ends, we find that it demonstratesa high\ - \ level of playability with an immediacy not present inother systems, and functions\ - \ well as an educational tool.},\n address = {New York City, NY, United States},\n\ - \ author = {Sarkar, Mihir and Vercoe, Barry},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1177239},\n issn = {2220-4806},\n keywords = {network music\ - \ performance, real-time online musical collab- oration, Indian percussions, tabla\ - \ bols, strokes recognition, music prediction },\n pages = {317--320},\n title\ - \ = {Recognition and Prediction in a Network Music Performance System for {India}n\ - \ Percussion},\n url = {http://www.nime.org/proceedings/2007/nime2007_317.pdf},\n\ - \ year = {2007}\n}\n" + ID: Waadeland2011 + address: 'Oslo, Norway' + author: 'Waadeland, Carl H.' + bibtex: "@inproceedings{Waadeland2011,\n address = {Oslo, Norway},\n author = {Waadeland,\ + \ Carl H.},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178185},\n issn\ + \ = {2220-4806},\n keywords = {gesture,movement,rhythm performance,spectral analysis},\n\ + \ pages = {248--251},\n title = {Rhythm Performance from a Spectral Point of View},\n\ + \ url = {http://www.nime.org/proceedings/2011/nime2011_248.pdf},\n year = {2011}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177239 + doi: 10.5281/zenodo.1178185 issn: 2220-4806 - keywords: 'network music performance, real-time online musical collab- oration, - Indian percussions, tabla bols, strokes recognition, music prediction ' - pages: 317--320 - title: Recognition and Prediction in a Network Music Performance System for Indian - Percussion - url: http://www.nime.org/proceedings/2007/nime2007_317.pdf - year: 2007 + keywords: 'gesture,movement,rhythm performance,spectral analysis' + pages: 248--251 + title: Rhythm Performance from a Spectral Point of View + url: http://www.nime.org/proceedings/2011/nime2011_248.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Vigoda2007 - abstract: 'JamiOki-PureJoy is a novel electronically mediated musical performance - system. PureJoy is a musical instrument; A highly flexible looper, sampler, effects - processor and sound manipulation interface based on Pure Data, with input from - a joystick controller and headset microphone. PureJoy allows the player to essentially - sculpt their voice with their hands. JamiOki is an engine for running group-player - musical game pieces. JamiOki helps each player by ‘whispering instructions’ in - their ear. Players track and control their progress through the game using a graphical - display and a touch-sensitive footpad. JamiOki is an architecture for bringing - groups of players together to express themselves musically in a way that is both - spontaneous and formally satisfying. The flexibility of the PureJoy instrument - offers to JamiOki the ability for any player to play any requested role in the - music at any time. The musical structure provided by JamiOki helps PureJoy players - create more complex pieces of music on the fly with spontaneous sounds, silences, - themes, recapitulation, tight transitions, structural hierarchy, interesting interactions, - and even friendly competition. As a combined system JamiOki-PureJoy is exciting - and fun to play.' - address: 'New York City, NY, United States' - author: 'Vigoda, Benjamin and Merrill, David' - bibtex: "@inproceedings{Vigoda2007,\n abstract = {JamiOki-PureJoy is a novel electronically\ - \ mediated musical performance system. PureJoy is a musical instrument; A highly\ - \ flexible looper, sampler, effects processor and sound manipulation interface\ - \ based on Pure Data, with input from a joystick controller and headset microphone.\ - \ PureJoy allows the player to essentially sculpt their voice with their hands.\ - \ JamiOki is an engine for running group-player musical game pieces. JamiOki helps\ - \ each player by ‘whispering instructions’ in their ear. Players track and control\ - \ their progress through the game using a graphical display and a touch-sensitive\ - \ footpad. JamiOki is an architecture for bringing groups of players together\ - \ to express themselves musically in a way that is both spontaneous and formally\ - \ satisfying. The flexibility of the PureJoy instrument offers to JamiOki the\ - \ ability for any player to play any requested role in the music at any time.\ - \ The musical structure provided by JamiOki helps PureJoy players create more\ - \ complex pieces of music on the fly with spontaneous sounds, silences, themes,\ - \ recapitulation, tight transitions, structural hierarchy, interesting interactions,\ - \ and even friendly competition. As a combined system JamiOki-PureJoy is exciting\ - \ and fun to play.},\n address = {New York City, NY, United States},\n author\ - \ = {Vigoda, Benjamin and Merrill, David},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179473},\n\ - \ issn = {2220-4806},\n keywords = {JamiOki, PureJoy, collaborative performance,\ - \ structured im- provisation, electronically-mediated performance, found sound\ - \ },\n pages = {321--326},\n title = {JamiOki-PureJoy : A Game Engine and Instrument\ - \ for Electronically-Mediated Musical Improvisation},\n url = {http://www.nime.org/proceedings/2007/nime2007_321.pdf},\n\ - \ year = {2007}\n}\n" + ID: Comajuncosas2011 + abstract: 'This research presents a 3D gestural interface for collaborative concatenative + sound synthesis and audio mosaicing.Our goal is to improve the communication between + the audience and performers by means of an enhanced correlationbetween gestures + and musical outcome. Nuvolet consists ofa 3D motion controller coupled to a concatenative + synthesis engine. The interface detects and tracks the performers hands in four + dimensions (x,y,z,t) and allows them toconcurrently explore two or three-dimensional + sound cloudrepresentations of the units from the sound corpus, as wellas to perform + collaborative target-based audio mosaicing.Nuvolet is included in the Esmuc Laptop + Orchestra catalogfor forthcoming performances.' + address: 'Oslo, Norway' + author: 'Comajuncosas, Josep M. and Barrachina, Alex and O''Connell, John and Guaus, + Enric' + bibtex: "@inproceedings{Comajuncosas2011,\n abstract = {This research presents a\ + \ 3D gestural interface for collaborative concatenative sound synthesis and audio\ + \ mosaicing.Our goal is to improve the communication between the audience and\ + \ performers by means of an enhanced correlationbetween gestures and musical outcome.\ + \ Nuvolet consists ofa 3D motion controller coupled to a concatenative synthesis\ + \ engine. The interface detects and tracks the performers hands in four dimensions\ + \ (x,y,z,t) and allows them toconcurrently explore two or three-dimensional sound\ + \ cloudrepresentations of the units from the sound corpus, as wellas to perform\ + \ collaborative target-based audio mosaicing.Nuvolet is included in the Esmuc\ + \ Laptop Orchestra catalogfor forthcoming performances.},\n address = {Oslo, Norway},\n\ + \ author = {Comajuncosas, Josep M. and Barrachina, Alex and O'Connell, John and\ + \ Guaus, Enric},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177987},\n\ + \ issn = {2220-4806},\n keywords = {concatenative synthesis, audio mosaicing,\ + \ open-air interface, gestural controller, musical instrument, 3D },\n pages =\ + \ {252--255},\n title = {Nuvolet: {3D} Gesture-driven Collaborative Audio Mosaicing},\n\ + \ url = {http://www.nime.org/proceedings/2011/nime2011_252.pdf},\n year = {2011}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179473 + doi: 10.5281/zenodo.1177987 issn: 2220-4806 - keywords: 'JamiOki, PureJoy, collaborative performance, structured im- provisation, - electronically-mediated performance, found sound ' - pages: 321--326 - title: 'JamiOki-PureJoy : A Game Engine and Instrument for Electronically-Mediated - Musical Improvisation' - url: http://www.nime.org/proceedings/2007/nime2007_321.pdf - year: 2007 + keywords: 'concatenative synthesis, audio mosaicing, open-air interface, gestural + controller, musical instrument, 3D ' + pages: 252--255 + title: 'Nuvolet: 3D Gesture-driven Collaborative Audio Mosaicing' + url: http://www.nime.org/proceedings/2011/nime2011_252.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Gomez2007 - abstract: In this article we want to show how graphical languages can be used successfully - for monitoring and controlling a digital musical instrument. An overview of the - design and development stages of this instrument shows how we can create models - which will simplify the control and use of different kinds of musical algorithms - for synthesis and sequencing. - address: 'New York City, NY, United States' - author: 'Gómez, Daniel and Donner, Tjebbe and Posada, Andrés' - bibtex: "@inproceedings{Gomez2007,\n abstract = {In this article we want to show\ - \ how graphical languages can be used successfully for monitoring and controlling\ - \ a digital musical instrument. An overview of the design and development stages\ - \ of this instrument shows how we can create models which will simplify the control\ - \ and use of different kinds of musical algorithms for synthesis and sequencing.},\n\ - \ address = {New York City, NY, United States},\n author = {G\\'{o}mez, Daniel\ - \ and Donner, Tjebbe and Posada, Andr\\'{e}s},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1177097},\n issn = {2220-4806},\n keywords = {nime07},\n pages\ - \ = {327--329},\n title = {A Look at the Design and Creation of a Graphically\ - \ Controlled Digital Musical Instrument},\n url = {http://www.nime.org/proceedings/2007/nime2007_327.pdf},\n\ - \ year = {2007}\n}\n" + ID: Schoonderwaldt2011 + abstract: 'We report on a performance study of a French-Canadian fiddler. The fiddling + tradition forms an interesting contrast toclassical violin performance in several + ways. Distinguishingfeatures include special elements in the bowing techniqueand + the presence of an accompanying foot clogging pattern.These two characteristics + are described, visualized and analyzed using video and motion capture recordings + as sourcematerial.' + address: 'Oslo, Norway' + author: 'Schoonderwaldt, Erwin and Jensenius, Alexander Refsum' + bibtex: "@inproceedings{Schoonderwaldt2011,\n abstract = {We report on a performance\ + \ study of a French-Canadian fiddler. The fiddling tradition forms an interesting\ + \ contrast toclassical violin performance in several ways. Distinguishingfeatures\ + \ include special elements in the bowing techniqueand the presence of an accompanying\ + \ foot clogging pattern.These two characteristics are described, visualized and\ + \ analyzed using video and motion capture recordings as sourcematerial.},\n address\ + \ = {Oslo, Norway},\n author = {Schoonderwaldt, Erwin and Jensenius, Alexander\ + \ Refsum},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1178155},\n issn = {2220-4806},\n\ + \ keywords = {fiddler, violin, French-Canadian, bowing, feet, clogging, motion\ + \ capture, video, motiongram, kinematics, sonification },\n pages = {256--259},\n\ + \ title = {Effective and Expressive Movements in a French-Canadian fiddler's Performance},\n\ + \ url = {http://www.nime.org/proceedings/2011/nime2011_256.pdf},\n year = {2011}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177097 + doi: 10.5281/zenodo.1178155 issn: 2220-4806 - keywords: nime07 - pages: 327--329 - title: A Look at the Design and Creation of a Graphically Controlled Digital Musical - Instrument - url: http://www.nime.org/proceedings/2007/nime2007_327.pdf - year: 2007 + keywords: 'fiddler, violin, French-Canadian, bowing, feet, clogging, motion capture, + video, motiongram, kinematics, sonification ' + pages: 256--259 + title: Effective and Expressive Movements in a French-Canadian fiddler's Performance + url: http://www.nime.org/proceedings/2011/nime2011_256.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Vanegas2007 - abstract: 'The guitar pick has traditionally been used to strike or rakethe strings - of a guitar or bass, and in rarer instances, ashamisen, lute, or other stringed - instrument. The pressure exerted on it, however, has until now been ignored.The - MIDI Pick, an enhanced guitar pick, embraces this dimension, acting as a trigger - for serial data, audio samples,MIDI messages 1, Max/MSP patches, and on/off messages.This - added scope expands greatly the stringed instrumentplayer''s musical dynamic in - the studio or on stage.' - address: 'New York City, NY, United States' - author: 'Vanegas, Roy' - bibtex: "@inproceedings{Vanegas2007,\n abstract = {The guitar pick has traditionally\ - \ been used to strike or rakethe strings of a guitar or bass, and in rarer instances,\ - \ ashamisen, lute, or other stringed instrument. The pressure exerted on it, however,\ - \ has until now been ignored.The MIDI Pick, an enhanced guitar pick, embraces\ - \ this dimension, acting as a trigger for serial data, audio samples,MIDI messages\ - \ 1, Max/MSP patches, and on/off messages.This added scope expands greatly the\ - \ stringed instrumentplayer's musical dynamic in the studio or on stage.},\n address\ - \ = {New York City, NY, United States},\n author = {Vanegas, Roy},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1179471},\n issn = {2220-4806},\n keywords\ - \ = {guitar, MIDI, pick, plectrum, wireless, bluetooth, ZigBee, Arduino, NIME,\ - \ ITP },\n pages = {330--333},\n title = {The {MIDI} Pick : Trigger Serial Data\ - \ , Samples, and {MIDI} from a Guitar Pick},\n url = {http://www.nime.org/proceedings/2007/nime2007_330.pdf},\n\ - \ year = {2007}\n}\n" + ID: Bisig2011 + abstract: 'In this paper an audio-visual installation is discussed, which combines + interactive, immersive and generative elements. After introducing some of the + challenges in the field of Generative Art and placing the work within its research + context, conceptual reflections are made about the spatial, behavioural, perceptual + and social issues that are raised within the entire installation. A discussion + about the artistic content follows, focussing on the scenography and on working + with flocking algorithms in general, before addressing three specific pieces realised + for the exhibition. Next the technical implementation for both hardand software + are detailed before the idea of a hybrid ecosystem gets discussed and further + developments outlined.' + address: 'Oslo, Norway' + author: 'Bisig, Daniel and Schacher, Jan C. and Neukom, Martin' + bibtex: "@inproceedings{Bisig2011,\n abstract = {In this paper an audio-visual installation\ + \ is discussed, which combines interactive, immersive and generative elements.\ + \ After introducing some of the challenges in the field of Generative Art and\ + \ placing the work within its research context, conceptual reflections are made\ + \ about the spatial, behavioural, perceptual and social issues that are raised\ + \ within the entire installation. A discussion about the artistic content follows,\ + \ focussing on the scenography and on working with flocking algorithms in general,\ + \ before addressing three specific pieces realised for the exhibition. Next the\ + \ technical implementation for both hardand software are detailed before the idea\ + \ of a hybrid ecosystem gets discussed and further developments outlined.},\n\ + \ address = {Oslo, Norway},\n author = {Bisig, Daniel and Schacher, Jan C. and\ + \ Neukom, Martin},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177965},\n\ + \ issn = {2220-4806},\n keywords = {Generative Art, Interactive Environment, Immersive\ + \ Installation, Swarm Simulation, Hybrid Ecosystem },\n pages = {260--263},\n\ + \ title = {Flowspace -- A Hybrid Ecosystem},\n url = {http://www.nime.org/proceedings/2011/nime2011_260.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179471 + doi: 10.5281/zenodo.1177965 issn: 2220-4806 - keywords: 'guitar, MIDI, pick, plectrum, wireless, bluetooth, ZigBee, Arduino, NIME, - ITP ' - pages: 330--333 - title: 'The MIDI Pick : Trigger Serial Data , Samples, and MIDI from a Guitar Pick' - url: http://www.nime.org/proceedings/2007/nime2007_330.pdf - year: 2007 + keywords: 'Generative Art, Interactive Environment, Immersive Installation, Swarm + Simulation, Hybrid Ecosystem ' + pages: 260--263 + title: Flowspace -- A Hybrid Ecosystem + url: http://www.nime.org/proceedings/2011/nime2011_260.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Benning2007 - abstract: 'This paper describes the design and experimentation of a Kalman Filter - used to improve position tracking of a 3-D gesture-based musical controller known - as the Radiodrum. The Singer dynamic model for target tracking is used to describe - the evolution of a Radiodrum''s stick position in time. The autocorrelation time - constant of a gesture''s acceleration and the variance of the gesture acceleration - are used to tune the model to various performance modes. Multiple Kalman Filters - tuned to each gesture type are run in parallel and an Interacting Multiple Model - (IMM) is implemented to decide on the best combination of filter outputs to track - the current gesture. Our goal is to accurately track Radiodrum gestures through - noisy measurement signals. ' - address: 'New York City, NY, United States' - author: 'Benning, Manjinder S. and McGuire, Michael and Driessen, Peter' - bibtex: "@inproceedings{Benning2007,\n abstract = {This paper describes the design\ - \ and experimentation of a Kalman Filter used to improve position tracking of\ - \ a 3-D gesture-based musical controller known as the Radiodrum. The Singer dynamic\ - \ model for target tracking is used to describe the evolution of a Radiodrum's\ - \ stick position in time. The autocorrelation time constant of a gesture's acceleration\ - \ and the variance of the gesture acceleration are used to tune the model to various\ - \ performance modes. Multiple Kalman Filters tuned to each gesture type are run\ - \ in parallel and an Interacting Multiple Model (IMM) is implemented to decide\ - \ on the best combination of filter outputs to track the current gesture. Our\ - \ goal is to accurately track Radiodrum gestures through noisy measurement signals.\ - \ },\n address = {New York City, NY, United States},\n author = {Benning, Manjinder\ - \ S. and McGuire, Michael and Driessen, Peter},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1177043},\n issn = {2220-4806},\n keywords = {Kalman Filtering,\ - \ Radiodrum, Gesture Tracking, Interacting Multiple Model INTRODUCTION Intention\ - \ is a key aspect of traditional music performance. The ability for an artist\ - \ to reliably reproduce sound, pitch, rhythms, and emotion is paramount to the\ - \ design of any instrument. With the },\n pages = {334--337},\n title = {Improved\ - \ Position Tracking of a {3-D} Gesture-Based Musical Controller Using a {Kalman}\ - \ Filter},\n url = {http://www.nime.org/proceedings/2007/nime2007_334.pdf},\n\ - \ year = {2007}\n}\n" + ID: Sosnick2011 + abstract: 'In this paper, we describe an implementation of a real-time sound synthesizer + using Finite Difference-based simulation of a two-dimensional membrane. Finite + Difference (FD) methods can be the basis for physics-based music instrument models + that generate realistic audio output. However, such methods are compute-intensive; + large simulations cannot run in real time on current CPUs. Many current systems + now include powerful Graphics Processing Units (GPUs), which are a good fit for + FD methods. We demonstrate that it is possible to use this method to create a + usable real-time audio synthesizer. ' + address: 'Oslo, Norway' + author: 'Sosnick, Marc and Hsu, William' + bibtex: "@inproceedings{Sosnick2011,\n abstract = {In this paper, we describe an\ + \ implementation of a real-time sound synthesizer using Finite Difference-based\ + \ simulation of a two-dimensional membrane. Finite Difference (FD) methods can\ + \ be the basis for physics-based music instrument models that generate realistic\ + \ audio output. However, such methods are compute-intensive; large simulations\ + \ cannot run in real time on current CPUs. Many current systems now include powerful\ + \ Graphics Processing Units (GPUs), which are a good fit for FD methods. We demonstrate\ + \ that it is possible to use this method to create a usable real-time audio synthesizer.\ + \ },\n address = {Oslo, Norway},\n author = {Sosnick, Marc and Hsu, William},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178173},\n issn = {2220-4806},\n\ + \ keywords = {Finite Difference, GPU, CUDA, Synthesis },\n pages = {264--267},\n\ + \ title = {Implementing a Finite Difference-Based Real-time Sound Synthesizer\ + \ using {GPU}s},\n url = {http://www.nime.org/proceedings/2011/nime2011_264.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177043 + doi: 10.5281/zenodo.1178173 issn: 2220-4806 - keywords: 'Kalman Filtering, Radiodrum, Gesture Tracking, Interacting Multiple Model - INTRODUCTION Intention is a key aspect of traditional music performance. The ability - for an artist to reliably reproduce sound, pitch, rhythms, and emotion is paramount - to the design of any instrument. With the ' - pages: 334--337 - title: Improved Position Tracking of a 3-D Gesture-Based Musical Controller Using - a Kalman Filter - url: http://www.nime.org/proceedings/2007/nime2007_334.pdf - year: 2007 + keywords: 'Finite Difference, GPU, CUDA, Synthesis ' + pages: 264--267 + title: Implementing a Finite Difference-Based Real-time Sound Synthesizer using + GPUs + url: http://www.nime.org/proceedings/2011/nime2011_264.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Keating2007 - abstract: 'In this paper, design scenarios made possible by the use of an interactive - illuminated floor as the basis of an audiovisual environment are presented. By - interfacing a network of pressure sensitive, light-emitting tiles with a 7.1 channel - speaker system and requisite audio software, many avenues for collaborative expression - emerge, as do heretofore unexplored modes of multiplayer music and dance gaming. - By giving users light and sound cues that both guide and respond to their movement, - a rich environment is created that playfully integrates the auditory, the visual, - and the kinesthetic into a unified interactive experience.' - address: 'New York City, NY, United States' - author: 'Keating, Noah H.' - bibtex: "@inproceedings{Keating2007,\n abstract = {In this paper, design scenarios\ - \ made possible by the use of an interactive illuminated floor as the basis of\ - \ an audiovisual environment are presented. By interfacing a network of pressure\ - \ sensitive, light-emitting tiles with a 7.1 channel speaker system and requisite\ - \ audio software, many avenues for collaborative expression emerge, as do heretofore\ - \ unexplored modes of multiplayer music and dance gaming. By giving users light\ - \ and sound cues that both guide and respond to their movement, a rich environment\ - \ is created that playfully integrates the auditory, the visual, and the kinesthetic\ - \ into a unified interactive experience.},\n address = {New York City, NY, United\ - \ States},\n author = {Keating, Noah H.},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177141},\n\ - \ issn = {2220-4806},\n keywords = {Responsive Environments, Audiovisual Play,\ - \ Kinetic Games, Movement Rich Game Play, Immersive Dance, Smart Floor },\n pages\ - \ = {338--343},\n title = {The Lambent Reactive : An Audiovisual Environment for\ - \ Kinesthetic Playforms},\n url = {http://www.nime.org/proceedings/2007/nime2007_338.pdf},\n\ - \ year = {2007}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1177141 - issn: 2220-4806 - keywords: 'Responsive Environments, Audiovisual Play, Kinetic Games, Movement Rich - Game Play, Immersive Dance, Smart Floor ' - pages: 338--343 - title: 'The Lambent Reactive : An Audiovisual Environment for Kinesthetic Playforms' - url: http://www.nime.org/proceedings/2007/nime2007_338.pdf - year: 2007 - - -- ENTRYTYPE: inproceedings - ID: Stark2007 - abstract: 'We present a new group of audio effects that use beat tracking, the detection - of beats in an audio signal, to relate effectparameters to the beats in an input - signal. Conventional audio effects are augmented so that their operation is related - tothe output of a beat tracking system. We present a temposynchronous delay effect - and a set of beat synchronous lowfrequency oscillator effects including tremolo, - vibrato andauto-wah. All effects are implemented in real-time as VSTplug-ins to - allow for their use in live performance.' - address: 'New York City, NY, United States' - author: 'Stark, Adam M. and Plumbley, Mark D. and Davies, Matthew E.' - bibtex: "@inproceedings{Stark2007,\n abstract = {We present a new group of audio\ - \ effects that use beat tracking, the detection of beats in an audio signal, to\ - \ relate effectparameters to the beats in an input signal. Conventional audio\ - \ effects are augmented so that their operation is related tothe output of a beat\ - \ tracking system. We present a temposynchronous delay effect and a set of beat\ - \ synchronous lowfrequency oscillator effects including tremolo, vibrato andauto-wah.\ - \ All effects are implemented in real-time as VSTplug-ins to allow for their use\ - \ in live performance.},\n address = {New York City, NY, United States},\n author\ - \ = {Stark, Adam M. and Plumbley, Mark D. and Davies, Matthew E.},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1177249},\n issn = {2220-4806},\n keywords\ - \ = {a beat-synchronous tremolo effect,audio effects,beat tracking,figure 1,im-,nime07,plemented\ - \ as a vst,plug-in,real-time,the rate is controlled,vst plug-in},\n pages = {344--345},\n\ - \ title = {Real-Time Beat-Synchronous Audio Effects},\n url = {http://www.nime.org/proceedings/2007/nime2007_344.pdf},\n\ - \ year = {2007}\n}\n" + ID: Tidemann2011 + abstract: 'Interacting with musical avatars have been increasingly popular over + the years, with the introduction of games likeGuitar Hero and Rock Band. These + games provide MIDIequipped controllers that look like their real-world counterparts + (e.g. MIDI guitar, MIDI drumkit) that the users playto control their designated + avatar in the game. The performance of the user is measured against a score that + needs tobe followed. However, the avatar does not move in responseto how the user + plays, it follows some predefined movementpattern. If the user plays badly, the + game ends with theavatar ending the performance (i.e. throwing the guitar onthe + floor). The gaming experience would increase if theavatar would move in accordance + with user input. This paper presents an architecture that couples musical input + withbody movement. Using imitation learning, a simulated human robot learns to + play the drums like human drummersdo, both visually and auditory. Learning data + is recordedusing MIDI and motion tracking. The system uses an artificial intelligence + approach to implement imitation learning,employing artificial neural networks.' + address: 'Oslo, Norway' + author: 'Tidemann, Axel' + bibtex: "@inproceedings{Tidemann2011,\n abstract = {Interacting with musical avatars\ + \ have been increasingly popular over the years, with the introduction of games\ + \ likeGuitar Hero and Rock Band. These games provide MIDIequipped controllers\ + \ that look like their real-world counterparts (e.g. MIDI guitar, MIDI drumkit)\ + \ that the users playto control their designated avatar in the game. The performance\ + \ of the user is measured against a score that needs tobe followed. However, the\ + \ avatar does not move in responseto how the user plays, it follows some predefined\ + \ movementpattern. If the user plays badly, the game ends with theavatar ending\ + \ the performance (i.e. throwing the guitar onthe floor). The gaming experience\ + \ would increase if theavatar would move in accordance with user input. This paper\ + \ presents an architecture that couples musical input withbody movement. Using\ + \ imitation learning, a simulated human robot learns to play the drums like human\ + \ drummersdo, both visually and auditory. Learning data is recordedusing MIDI\ + \ and motion tracking. The system uses an artificial intelligence approach to\ + \ implement imitation learning,employing artificial neural networks.},\n address\ + \ = {Oslo, Norway},\n author = {Tidemann, Axel},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1178175},\n issn = {2220-4806},\n keywords = {artificial intelli-,drumming,modeling\ + \ human behaviour},\n pages = {268--271},\n title = {An Artificial Intelligence\ + \ Architecture for Musical Expressiveness that Learns by Imitation},\n url = {http://www.nime.org/proceedings/2011/nime2011_268.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177249 + doi: 10.5281/zenodo.1178175 issn: 2220-4806 - keywords: 'a beat-synchronous tremolo effect,audio effects,beat tracking,figure - 1,im-,nime07,plemented as a vst,plug-in,real-time,the rate is controlled,vst plug-in' - pages: 344--345 - title: Real-Time Beat-Synchronous Audio Effects - url: http://www.nime.org/proceedings/2007/nime2007_344.pdf - year: 2007 + keywords: 'artificial intelli-,drumming,modeling human behaviour' + pages: 268--271 + title: An Artificial Intelligence Architecture for Musical Expressiveness that Learns + by Imitation + url: http://www.nime.org/proceedings/2011/nime2011_268.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Wulfson2007 - abstract: 'This article presents various custom software tools called Automatic - Notation Generators (ANG''s) developed by the authors to aid in the creation of - algorithmic instrumental compositions. The unique possibilities afforded by ANG - software are described, along with relevant examples of their compositional output. - These avenues of exploration include: mappings of spectral data directly into - notated music, the creation of software transcribers that enable users to generate - multiple realizations of algorithmic compositions, and new types of spontaneous - performance with live generated screen-based music notation. The authors present - their existing software tools along with suggestions for future research and artistic - inquiry. ' - address: 'New York City, NY, United States' - author: 'Wulfson, Harris and Barrett, G. Douglas and Winter, Michael' - bibtex: "@inproceedings{Wulfson2007,\n abstract = {This article presents various\ - \ custom software tools called Automatic Notation Generators (ANG's) developed\ - \ by the authors to aid in the creation of algorithmic instrumental compositions.\ - \ The unique possibilities afforded by ANG software are described, along with\ - \ relevant examples of their compositional output. These avenues of exploration\ - \ include: mappings of spectral data directly into notated music, the creation\ - \ of software transcribers that enable users to generate multiple realizations\ - \ of algorithmic compositions, and new types of spontaneous performance with live\ - \ generated screen-based music notation. The authors present their existing software\ - \ tools along with suggestions for future research and artistic inquiry. },\n\ - \ address = {New York City, NY, United States},\n author = {Wulfson, Harris and\ - \ Barrett, G. Douglas and Winter, Michael},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1176861},\n issn = {2220-4806},\n keywords = {nime07},\n pages\ - \ = {346--351},\n title = {Automatic Notation Generators},\n url = {http://www.nime.org/proceedings/2007/nime2007_346.pdf},\n\ - \ year = {2007}\n}\n" + ID: Dahl2011 + abstract: 'TweetDreams is an instrument and musical compositionwhich creates real-time + sonification and visualization oftweets. Tweet data containing specified search + terms is retrieved from Twitter and used to build networks of associated tweets. + These networks govern the creation of melodiesassociated with each tweet and are + displayed graphically.Audience members participate in the piece by tweeting,and + their tweets are given special musical and visual prominence.' + address: 'Oslo, Norway' + author: 'Dahl, Luke and Herrera, Jorge and Wilkerson, Carr' + bibtex: "@inproceedings{Dahl2011,\n abstract = {TweetDreams is an instrument and\ + \ musical compositionwhich creates real-time sonification and visualization oftweets.\ + \ Tweet data containing specified search terms is retrieved from Twitter and used\ + \ to build networks of associated tweets. These networks govern the creation of\ + \ melodiesassociated with each tweet and are displayed graphically.Audience members\ + \ participate in the piece by tweeting,and their tweets are given special musical\ + \ and visual prominence.},\n address = {Oslo, Norway},\n author = {Dahl, Luke\ + \ and Herrera, Jorge and Wilkerson, Carr},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177991},\n\ + \ issn = {2220-4806},\n keywords = {Twitter, audience participation, sonification,\ + \ data visualization, text processing, interaction, multi-user instrument. },\n\ + \ pages = {272--275},\n title = {TweetDreams : Making Music with the Audience\ + \ and the World using Real-time Twitter Data},\n url = {http://www.nime.org/proceedings/2011/nime2011_272.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176861 + doi: 10.5281/zenodo.1177991 issn: 2220-4806 - keywords: nime07 - pages: 346--351 - title: Automatic Notation Generators - url: http://www.nime.org/proceedings/2007/nime2007_346.pdf - year: 2007 + keywords: 'Twitter, audience participation, sonification, data visualization, text + processing, interaction, multi-user instrument. ' + pages: 272--275 + title: 'TweetDreams : Making Music with the Audience and the World using Real-time + Twitter Data' + url: http://www.nime.org/proceedings/2011/nime2011_272.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Young2007 - abstract: 'This paper presents a newly created database containing calibrated gesture - and audio data corresponding to various violin bowstrokes, as well as video and - motion capture data in some cases. The database is web-accessible and searchable - by keywords and subject. It also has several important features designed to improve - accessibility to the data and to foster collaboration between researchers in fields - related to bowed string synthesis, acoustics, and gesture.' - address: 'New York City, NY, United States' - author: 'Young, Diana and Deshmane, Anagha' - bibtex: "@inproceedings{Young2007,\n abstract = {This paper presents a newly created\ - \ database containing calibrated gesture and audio data corresponding to various\ - \ violin bowstrokes, as well as video and motion capture data in some cases. The\ - \ database is web-accessible and searchable by keywords and subject. It also has\ - \ several important features designed to improve accessibility to the data and\ - \ to foster collaboration between researchers in fields related to bowed string\ - \ synthesis, acoustics, and gesture.},\n address = {New York City, NY, United\ - \ States},\n author = {Young, Diana and Deshmane, Anagha},\n booktitle = {Proceedings\ + ID: Fyfe2011 + abstract: 'JunctionBox is a new software toolkit for creating multitouch interfaces + for controlling sound and music. Morespecifically, the toolkit has special features + which make iteasy to create TUIO-based touch interfaces for controllingsound engines + via Open Sound Control. Programmers using the toolkit have a great deal of freedom + to create highlycustomized interfaces that work on a variety of hardware.' + address: 'Oslo, Norway' + author: 'Fyfe, Lawrence and Tindale, Adam and Carpendale, Sheelagh' + bibtex: "@inproceedings{Fyfe2011,\n abstract = {JunctionBox is a new software toolkit\ + \ for creating multitouch interfaces for controlling sound and music. Morespecifically,\ + \ the toolkit has special features which make iteasy to create TUIO-based touch\ + \ interfaces for controllingsound engines via Open Sound Control. Programmers\ + \ using the toolkit have a great deal of freedom to create highlycustomized interfaces\ + \ that work on a variety of hardware.},\n address = {Oslo, Norway},\n author =\ + \ {Fyfe, Lawrence and Tindale, Adam and Carpendale, Sheelagh},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1179481},\n issn = {2220-4806},\n keywords = {violin,\ - \ bowed string, bowstroke, bowing, bowing parameters, technique, gesture, audio\ - \ },\n pages = {352--357},\n title = {Bowstroke Database : A Web-Accessible Archive\ - \ of Violin Bowing Data},\n url = {http://www.nime.org/proceedings/2007/nime2007_352.pdf},\n\ - \ year = {2007}\n}\n" + \ doi = {10.5281/zenodo.1178021},\n issn = {2220-4806},\n keywords = {Multi-touch,\ + \ Open Sound Control, Toolkit, TUIO },\n pages = {276--279},\n title = {JunctionBox\ + \ : A Toolkit for Creating Multi-touch Sound Control Interfaces},\n url = {http://www.nime.org/proceedings/2011/nime2011_276.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179481 + doi: 10.5281/zenodo.1178021 issn: 2220-4806 - keywords: 'violin, bowed string, bowstroke, bowing, bowing parameters, technique, - gesture, audio ' - pages: 352--357 - title: 'Bowstroke Database : A Web-Accessible Archive of Violin Bowing Data' - url: http://www.nime.org/proceedings/2007/nime2007_352.pdf - year: 2007 + keywords: 'Multi-touch, Open Sound Control, Toolkit, TUIO ' + pages: 276--279 + title: 'JunctionBox : A Toolkit for Creating Multi-touch Sound Control Interfaces' + url: http://www.nime.org/proceedings/2011/nime2011_276.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Schacher2007 - abstract: 'This paper presents a methodology and a set of tools for gesture control - of sources in 3D surround sound. The techniques for rendering acoustic events - on multi-speaker or headphone-based surround systems have evolved considerably, - making it possible to use them in real-time performances on light equipment. Controlling - the placement of sound sources is usually done in idiosyncratic ways and has not - yet been fully explored and formalized. This issue is addressed here with the - proposition of a methodical approach. The mapping of gestures to source motion - is implemented by giving the sources physical object properties and manipulating - these characteristics with standard geometrical transforms through hierarchical - or emergent relationships. ' - address: 'New York City, NY, United States' - author: 'Schacher, Jan C.' - bibtex: "@inproceedings{Schacher2007,\n abstract = {This paper presents a methodology\ - \ and a set of tools for gesture control of sources in 3D surround sound. The\ - \ techniques for rendering acoustic events on multi-speaker or headphone-based\ - \ surround systems have evolved considerably, making it possible to use them in\ - \ real-time performances on light equipment. Controlling the placement of sound\ - \ sources is usually done in idiosyncratic ways and has not yet been fully explored\ - \ and formalized. This issue is addressed here with the proposition of a methodical\ - \ approach. The mapping of gestures to source motion is implemented by giving\ - \ the sources physical object properties and manipulating these characteristics\ - \ with standard geometrical transforms through hierarchical or emergent relationships.\ - \ },\n address = {New York City, NY, United States},\n author = {Schacher, Jan\ - \ C.},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1177241},\n issn = {2220-4806},\n\ - \ keywords = {Gesture, Surround Sound, Mapping, Trajectory, Transform Matrix,\ - \ Tree Hierarchy, Emergent Structures. },\n pages = {358--362},\n title = {Gesture\ - \ Control of Sounds in {3D} Space},\n url = {http://www.nime.org/proceedings/2007/nime2007_358.pdf},\n\ - \ year = {2007}\n}\n" + ID: Johnston2011 + abstract: 'This paper presents an approach to practice-based researchin new musical + instrument design. At a high level, the process involves drawing on relevant theories + and aesthetic approaches to design new instruments, attempting to identify relevant + applied design criteria, and then examiningthe experiences of performers who use + the instruments withparticular reference to these criteria. Outcomes of this process + include new instruments, theories relating to musicianinstrument interaction and + a set of design criteria informedby practice and research.' + address: 'Oslo, Norway' + author: 'Johnston, Andrew' + bibtex: "@inproceedings{Johnston2011,\n abstract = {This paper presents an approach\ + \ to practice-based researchin new musical instrument design. At a high level,\ + \ the process involves drawing on relevant theories and aesthetic approaches to\ + \ design new instruments, attempting to identify relevant applied design criteria,\ + \ and then examiningthe experiences of performers who use the instruments withparticular\ + \ reference to these criteria. Outcomes of this process include new instruments,\ + \ theories relating to musicianinstrument interaction and a set of design criteria\ + \ informedby practice and research.},\n address = {Oslo, Norway},\n author = {Johnston,\ + \ Andrew},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1178053},\n issn = {2220-4806},\n\ + \ keywords = {practice-based research, evaluation, Human-Computer Interaction,\ + \ research methods, user studies },\n pages = {280--283},\n title = {Beyond Evaluation\ + \ : Linking Practice and Theory in New Musical Interface Design},\n url = {http://www.nime.org/proceedings/2011/nime2011_280.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177241 + doi: 10.5281/zenodo.1178053 issn: 2220-4806 - keywords: 'Gesture, Surround Sound, Mapping, Trajectory, Transform Matrix, Tree - Hierarchy, Emergent Structures. ' - pages: 358--362 - title: Gesture Control of Sounds in 3D Space - url: http://www.nime.org/proceedings/2007/nime2007_358.pdf - year: 2007 + keywords: 'practice-based research, evaluation, Human-Computer Interaction, research + methods, user studies ' + pages: 280--283 + title: 'Beyond Evaluation : Linking Practice and Theory in New Musical Interface + Design' + url: http://www.nime.org/proceedings/2011/nime2011_280.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Porres2007 - abstract: 'This work presents an interactive device to control an adaptive tuning - and synthesis system. The gestural controller is based on the theremin concept - in which only an antenna is used as a proximity sensor. This interactive process - is guided by sensorial consonance curves and adaptive tuning related to psychoacoustical - studies. We used an algorithm to calculate the dissonance values according to - amplitudes and frequencies of a given sound spectrum. The theoretical background - is presented followed by interactive composition strategies and sound results. ' - address: 'New York City, NY, United States' - author: 'Porres, Alexandre T. and Manzolli, Jonatas' - bibtex: "@inproceedings{Porres2007,\n abstract = {This work presents an interactive\ - \ device to control an adaptive tuning and synthesis system. The gestural controller\ - \ is based on the theremin concept in which only an antenna is used as a proximity\ - \ sensor. This interactive process is guided by sensorial consonance curves and\ - \ adaptive tuning related to psychoacoustical studies. We used an algorithm to\ - \ calculate the dissonance values according to amplitudes and frequencies of a\ - \ given sound spectrum. The theoretical background is presented followed by interactive\ - \ composition strategies and sound results. },\n address = {New York City, NY,\ - \ United States},\n author = {Porres, Alexandre T. and Manzolli, Jonatas},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1177223},\n issn = {2220-4806},\n keywords\ - \ = {Interaction, adaptive tuning, theremin, sensorial dissonance, synthesis.\ - \ },\n pages = {363--366},\n title = {Adaptive Tuning Using Theremin as Gestural\ - \ Controller},\n url = {http://www.nime.org/proceedings/2007/nime2007_363.pdf},\n\ - \ year = {2007}\n}\n" + ID: Popp2011 + address: 'Oslo, Norway' + author: 'Popp, Phillip and Wright, Matthew' + bibtex: "@inproceedings{Popp2011,\n address = {Oslo, Norway},\n author = {Popp,\ + \ Phillip and Wright, Matthew},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178139},\n\ + \ issn = {2220-4806},\n keywords = {Spectral Model Synthesis, Gesture Recognition,\ + \ Synthesis Control, Wacom Tablet, Machine Learning },\n pages = {284--287},\n\ + \ title = {Intuitive Real-Time Control of Spectral Model Synthesis},\n url = {http://www.nime.org/proceedings/2011/nime2011_284.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177223 + doi: 10.5281/zenodo.1178139 issn: 2220-4806 - keywords: 'Interaction, adaptive tuning, theremin, sensorial dissonance, synthesis. ' - pages: 363--366 - title: Adaptive Tuning Using Theremin as Gestural Controller - url: http://www.nime.org/proceedings/2007/nime2007_363.pdf - year: 2007 + keywords: 'Spectral Model Synthesis, Gesture Recognition, Synthesis Control, Wacom + Tablet, Machine Learning ' + pages: 284--287 + title: Intuitive Real-Time Control of Spectral Model Synthesis + url: http://www.nime.org/proceedings/2011/nime2011_284.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Hsu2007 - abstract: 'In previous publications (see for example [2] and [3]), we described - an interactive music system, designed to improvise with saxophonist John Butcher; - our system analyzes timbral and gestural features in real-time, and uses this - information to guide response generation. This paper overviews our recent work - with the system''s interaction management component (IMC). We explore several - options for characterizing improvisation at a higher level, and managing decisions - for interactive performance in a rich timbral environment. We developed a simple, - efficient framework using a small number of features suggested by recent work - in mood modeling in music. We describe and evaluate the first version of the IMC, - which was used in performance at the Live Algorithms for Music (LAM) conference - in December 2006. We touch on developments on the system since LAM, and discuss - future plans to address perceived shortcomings in responsiveness, and the ability - of the system to make long-term adaptations. ' - address: 'New York City, NY, United States' - author: 'Hsu, William' - bibtex: "@inproceedings{Hsu2007,\n abstract = {In previous publications (see for\ - \ example [2] and [3]), we described an interactive music system, designed to\ - \ improvise with saxophonist John Butcher; our system analyzes timbral and gestural\ - \ features in real-time, and uses this information to guide response generation.\ - \ This paper overviews our recent work with the system's interaction management\ - \ component (IMC). We explore several options for characterizing improvisation\ - \ at a higher level, and managing decisions for interactive performance in a rich\ - \ timbral environment. We developed a simple, efficient framework using a small\ - \ number of features suggested by recent work in mood modeling in music. We describe\ - \ and evaluate the first version of the IMC, which was used in performance at\ - \ the Live Algorithms for Music (LAM) conference in December 2006. We touch on\ - \ developments on the system since LAM, and discuss future plans to address perceived\ - \ shortcomings in responsiveness, and the ability of the system to make long-term\ - \ adaptations. },\n address = {New York City, NY, United States},\n author = {Hsu,\ - \ William},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177123},\n issn\ - \ = {2220-4806},\n keywords = {Interactive music systems, timbral analysis, free\ - \ improvisation. },\n pages = {367--370},\n title = {Design Issues in Interaction\ - \ Modeling for Free Improvisation},\n url = {http://www.nime.org/proceedings/2007/nime2007_367.pdf},\n\ - \ year = {2007}\n}\n" + ID: Molina2011 + abstract: 'We present BeatJockey, a prototype interface which makesuse of Audio + Mosaicing (AM), beat-tracking and machinelearning techniques, for supporting Diskjockeys + (DJs) byproposing them new ways of interaction with the songs onthe DJ''s playlist. + This prototype introduces a new paradigmto DJing in which the user has the capability + to mix songsinteracting with beat-units that accompany the DJ''s mix.For this + type of interaction, the system suggests song slicestaken from songs selected + from a playlist, which could gowell with the beats of whatever master song is + being played.In addition the system allows the synchronization of multiple songs, + thus permitting flexible, coherent and rapid progressions in the DJ''s mix. BeatJockey + uses the Reactable,a musical tangible user interface (TUI), and it has beendesigned + to be used by all DJs regardless of their level ofexpertise, as the system helps + the novice while bringing newcreative opportunities to the expert.' + address: 'Oslo, Norway' + author: 'Molina, Pablo and Haro, Martín and Jordà, Sergi' + bibtex: "@inproceedings{Molina2011,\n abstract = {We present BeatJockey, a prototype\ + \ interface which makesuse of Audio Mosaicing (AM), beat-tracking and machinelearning\ + \ techniques, for supporting Diskjockeys (DJs) byproposing them new ways of interaction\ + \ with the songs onthe DJ's playlist. This prototype introduces a new paradigmto\ + \ DJing in which the user has the capability to mix songsinteracting with beat-units\ + \ that accompany the DJ's mix.For this type of interaction, the system suggests\ + \ song slicestaken from songs selected from a playlist, which could gowell with\ + \ the beats of whatever master song is being played.In addition the system allows\ + \ the synchronization of multiple songs, thus permitting flexible, coherent and\ + \ rapid progressions in the DJ's mix. BeatJockey uses the Reactable,a musical\ + \ tangible user interface (TUI), and it has beendesigned to be used by all DJs\ + \ regardless of their level ofexpertise, as the system helps the novice while\ + \ bringing newcreative opportunities to the expert.},\n address = {Oslo, Norway},\n\ + \ author = {Molina, Pablo and Haro, Mart\\'{\\i}n and Jord\\`{a}, Sergi},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1178113},\n issn = {2220-4806},\n keywords\ + \ = {DJ, music information retrieval, audio mosaicing, percussion, turntable,\ + \ beat-mash, interactive music interfaces, realtime, tabletop interaction, reactable.\ + \ },\n pages = {288--291},\n title = {BeatJockey : A New Tool for Enhancing DJ\ + \ Skills},\n url = {http://www.nime.org/proceedings/2011/nime2011_288.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177123 + doi: 10.5281/zenodo.1178113 issn: 2220-4806 - keywords: 'Interactive music systems, timbral analysis, free improvisation. ' - pages: 367--370 - title: Design Issues in Interaction Modeling for Free Improvisation - url: http://www.nime.org/proceedings/2007/nime2007_367.pdf - year: 2007 + keywords: 'DJ, music information retrieval, audio mosaicing, percussion, turntable, + beat-mash, interactive music interfaces, realtime, tabletop interaction, reactable. ' + pages: 288--291 + title: 'BeatJockey : A New Tool for Enhancing DJ Skills' + url: http://www.nime.org/proceedings/2011/nime2011_288.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Groux2007 - abstract: 'Until recently, the sonification of Virtual Environments had often been - reduced to its simplest expression. Too often soundscapes and background music - are predetermined, repetitive and somewhat predictable. Yet, there is room for - more complex and interesting sonification schemes that can improve the sensation - of presence in a Virtual Environment. In this paper we propose a system that automatically - generates original background music in real-time called VR-RoBoser. As a test - case we present the application of VR-RoBoser to a dynamic avatar that explores - its environment. We show that the musical events are directly and continuously - generated and influenced by the behavior of the avatar in three-dimensional virtual - space, generating a context dependent sonification. ' - address: 'New York City, NY, United States' - author: 'le Groux, Sylvain and Manzolli, Jonatas and Verschure, Paul F.' - bibtex: "@inproceedings{Groux2007,\n abstract = {Until recently, the sonification\ - \ of Virtual Environments had often been reduced to its simplest expression. Too\ - \ often soundscapes and background music are predetermined, repetitive and somewhat\ - \ predictable. Yet, there is room for more complex and interesting sonification\ - \ schemes that can improve the sensation of presence in a Virtual Environment.\ - \ In this paper we propose a system that automatically generates original background\ - \ music in real-time called VR-RoBoser. As a test case we present the application\ - \ of VR-RoBoser to a dynamic avatar that explores its environment. We show that\ - \ the musical events are directly and continuously generated and influenced by\ - \ the behavior of the avatar in three-dimensional virtual space, generating a\ - \ context dependent sonification. },\n address = {New York City, NY, United States},\n\ - \ author = {le Groux, Sylvain and Manzolli, Jonatas and Verschure, Paul F.},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177101},\n issn = {2220-4806},\n\ - \ keywords = {Real-time Composition, Interactive Sonification, Real-time Neural\ - \ Processing, Multimedia, Virtual Environment, Avatar. },\n pages = {371--374},\n\ - \ title = {VR-RoBoser : Real-Time Adaptive Sonification of Virtual Environments\ - \ Based on Avatar Behavior},\n url = {http://www.nime.org/proceedings/2007/nime2007_371.pdf},\n\ - \ year = {2007}\n}\n" + ID: Schacher2011 + abstract: 'In this paper the relationship between body, motion and sound is addressed. + The comparison with traditional instruments and dance is shown with regards to + basic types of motion. The difference between gesture and movement is outlined + and some of the models used in dance for structuring motion sequences are described. + In order to identify expressive aspects of motion sequences a test scenario is + devised. After the description of the methods and tools used in a series of measurements, + two types of data-display are shown and the applied in the interpretation. One + salient feature is recognized and put into perspective with regards to movement + and gestalt perception. Finally the merits of the technical means that were applied + are compared and a model-based approach to motion-sound mapping is proposed. ' + address: 'Oslo, Norway' + author: 'Schacher, Jan C. and Stoecklin, Angela' + bibtex: "@inproceedings{Schacher2011,\n abstract = {In this paper the relationship\ + \ between body, motion and sound is addressed. The comparison with traditional\ + \ instruments and dance is shown with regards to basic types of motion. The difference\ + \ between gesture and movement is outlined and some of the models used in dance\ + \ for structuring motion sequences are described. In order to identify expressive\ + \ aspects of motion sequences a test scenario is devised. After the description\ + \ of the methods and tools used in a series of measurements, two types of data-display\ + \ are shown and the applied in the interpretation. One salient feature is recognized\ + \ and put into perspective with regards to movement and gestalt perception. Finally\ + \ the merits of the technical means that were applied are compared and a model-based\ + \ approach to motion-sound mapping is proposed. },\n address = {Oslo, Norway},\n\ + \ author = {Schacher, Jan C. and Stoecklin, Angela},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178149},\n issn = {2220-4806},\n keywords = {Interactive\ + \ Dance, Motion and Gesture, Sonification, Motion Perception, Mapping },\n pages\ + \ = {292--295},\n title = {Traces -- Body, Motion and Sound},\n url = {http://www.nime.org/proceedings/2011/nime2011_292.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177101 + doi: 10.5281/zenodo.1178149 issn: 2220-4806 - keywords: 'Real-time Composition, Interactive Sonification, Real-time Neural Processing, - Multimedia, Virtual Environment, Avatar. ' - pages: 371--374 - title: 'VR-RoBoser : Real-Time Adaptive Sonification of Virtual Environments Based - on Avatar Behavior' - url: http://www.nime.org/proceedings/2007/nime2007_371.pdf - year: 2007 + keywords: 'Interactive Dance, Motion and Gesture, Sonification, Motion Perception, + Mapping ' + pages: 292--295 + title: 'Traces -- Body, Motion and Sound' + url: http://www.nime.org/proceedings/2011/nime2011_292.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Steiner2007 - abstract: 'In this paper we discuss our progress on the HID toolkit, a collection - of software modules for the Pure Data and Max/MSP programming environments that - provide unified, user-friendly and cross-platform access to human interface devices - (HIDs) such as joysticks, digitizer tablets, and stomp-pads. These HIDs are ubiquitous, - inexpensive and capable of sensing a wide range of human gesture, making them - appealing interfaces for interactive media control. However, it is difficult to - utilize many of these devices for custom-made applications, particularly for novices. - The modules we discuss in this paper are [hidio], which handles incoming and outgoing - data between a patch and a HID, and [input noticer], which monitors HID plug/unplug - events. The goal in creating these modules is to preserve maximal flexibility - in accessing the input and output capabilities of HIDs, in a manner that is ap- - proachable for both sophisticated and beginning designers. This paper documents - our design notes and implementa- tion considerations, current progress, and ideas - for future extensions to the HID toolkit.' - address: 'New York City, NY, United States' - author: 'Steiner, Hans-Christoph and Merrill, David and Matthes, Olaf' - bibtex: "@inproceedings{Steiner2007,\n abstract = {In this paper we discuss our\ - \ progress on the HID toolkit, a collection of software modules for the Pure Data\ - \ and Max/MSP programming environments that provide unified, user-friendly and\ - \ cross-platform access to human interface devices (HIDs) such as joysticks, digitizer\ - \ tablets, and stomp-pads. These HIDs are ubiquitous, inexpensive and capable\ - \ of sensing a wide range of human gesture, making them appealing interfaces for\ - \ interactive media control. However, it is difficult to utilize many of these\ - \ devices for custom-made applications, particularly for novices. The modules\ - \ we discuss in this paper are [hidio], which handles incoming and outgoing data\ - \ between a patch and a HID, and [input noticer], which monitors HID plug/unplug\ - \ events. The goal in creating these modules is to preserve maximal flexibility\ - \ in accessing the input and output capabilities of HIDs, in a manner that is\ - \ ap- proachable for both sophisticated and beginning designers. This paper documents\ - \ our design notes and implementa- tion considerations, current progress, and\ - \ ideas for future extensions to the HID toolkit.},\n address = {New York City,\ - \ NY, United States},\n author = {Steiner, Hans-Christoph and Merrill, David and\ - \ Matthes, Olaf},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177251},\n\ - \ issn = {2220-4806},\n keywords = {nime07},\n pages = {375--378},\n title = {A\ - \ Unified Toolkit for Accessing Human Interface Devices in Pure Data and Max /\ - \ MSP},\n url = {http://www.nime.org/proceedings/2007/nime2007_375.pdf},\n year\ - \ = {2007}\n}\n" + ID: Leslie2011 + abstract: MoodMixer is an interactive installation in which participants collaboratively + navigate a two-dimensional music spaceby manipulating their cognitive state and + conveying thisstate via wearable Electroencephalography (EEG) technology. The + participants can choose to actively manipulateor passively convey their cognitive + state depending on theirdesired approach and experience level. A four-channel + electronic music mixture continuously conveys the participants'expressed cognitive + states while a colored visualization oftheir locations on a two-dimensional projection + of cognitive state attributes aids their navigation through the space.MoodMixer + is a collaborative experience that incorporatesaspects of both passive and active + EEG sonification andperformance art. We discuss the technical design of the installation + and place its collaborative sonification aestheticdesign within the context of + existing EEG-based music andart. + address: 'Oslo, Norway' + author: 'Leslie, Grace and Mullen, Tim' + bibtex: "@inproceedings{Leslie2011,\n abstract = {MoodMixer is an interactive installation\ + \ in which participants collaboratively navigate a two-dimensional music spaceby\ + \ manipulating their cognitive state and conveying thisstate via wearable Electroencephalography\ + \ (EEG) technology. The participants can choose to actively manipulateor passively\ + \ convey their cognitive state depending on theirdesired approach and experience\ + \ level. A four-channel electronic music mixture continuously conveys the participants'expressed\ + \ cognitive states while a colored visualization oftheir locations on a two-dimensional\ + \ projection of cognitive state attributes aids their navigation through the space.MoodMixer\ + \ is a collaborative experience that incorporatesaspects of both passive and active\ + \ EEG sonification andperformance art. We discuss the technical design of the\ + \ installation and place its collaborative sonification aestheticdesign within\ + \ the context of existing EEG-based music andart.},\n address = {Oslo, Norway},\n\ + \ author = {Leslie, Grace and Mullen, Tim},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1178089},\n issn = {2220-4806},\n keywords = {EEG, BCMI, collaboration,\ + \ sonification, visualization },\n pages = {296--299},\n title = {MoodMixer :\ + \ {EEG}-based Collaborative Sonification},\n url = {http://www.nime.org/proceedings/2011/nime2011_296.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177251 + doi: 10.5281/zenodo.1178089 issn: 2220-4806 - keywords: nime07 - pages: 375--378 - title: A Unified Toolkit for Accessing Human Interface Devices in Pure Data and - Max / MSP - url: http://www.nime.org/proceedings/2007/nime2007_375.pdf - year: 2007 + keywords: 'EEG, BCMI, collaboration, sonification, visualization ' + pages: 296--299 + title: 'MoodMixer : EEG-based Collaborative Sonification' + url: http://www.nime.org/proceedings/2011/nime2011_296.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Nort2007 - abstract: 'This paper describes musical experiments aimed at designing control structures - for navigating complex and continuous sonic spaces. The focus is on sound processing - techniques which contain a high number of control parameters,and which exhibit - subtle and interesting micro-variationsand textural qualities when controlled - properly. The examples all use a simple low-dimensional controller --- a standard - graphics tablet --- and the task of initimate and subtle textural manipulations - is left to the design of proper mappings,created using a custom toolbox of mapping - functions. Thiswork further acts to contextualize past theoretical results bythe - given musical presentations, and arrives at some conclusions about the interplay - between musical intention, controlstrategies and the process of their design.' - address: 'New York City, NY, United States' - author: 'Van Nort, Doug and Wanderley, Marcelo M.' - bibtex: "@inproceedings{Nort2007,\n abstract = {This paper describes musical experiments\ - \ aimed at designing control structures for navigating complex and continuous\ - \ sonic spaces. The focus is on sound processing techniques which contain a high\ - \ number of control parameters,and which exhibit subtle and interesting micro-variationsand\ - \ textural qualities when controlled properly. The examples all use a simple low-dimensional\ - \ controller --- a standard graphics tablet --- and the task of initimate and\ - \ subtle textural manipulations is left to the design of proper mappings,created\ - \ using a custom toolbox of mapping functions. Thiswork further acts to contextualize\ - \ past theoretical results bythe given musical presentations, and arrives at some\ - \ conclusions about the interplay between musical intention, controlstrategies\ - \ and the process of their design.},\n address = {New York City, NY, United States},\n\ - \ author = {Van Nort, Doug and Wanderley, Marcelo M.},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1179469},\n issn = {2220-4806},\n keywords = {Mapping,\ - \ Control, Sound Texture, Musical Gestures },\n pages = {379--383},\n title =\ - \ {Control Strategies for Navigation of Complex Sonic Spaces Transformation of\ - \ Resonant Models},\n url = {http://www.nime.org/proceedings/2007/nime2007_379.pdf},\n\ - \ year = {2007}\n}\n" + ID: Skogstad2011 + abstract: 'The paper presents research about implementing a full body inertial motion + capture system, the Xsens MVN suit, for musical interaction. Three different approaches + for stream-ing real time and prerecorded motion capture data with Open Sound Control + have been implemented. Furthermore, we present technical performance details and + our experience with the motion capture system in realistic practice.' + address: 'Oslo, Norway' + author: 'Skogstad, Ståle A. and de Quay, Yago and Jensenius, Alexander Refsum' + bibtex: "@inproceedings{Skogstad2011,\n abstract = {The paper presents research\ + \ about implementing a full body inertial motion capture system, the Xsens MVN\ + \ suit, for musical interaction. Three different approaches for stream-ing real\ + \ time and prerecorded motion capture data with Open Sound Control have been implemented.\ + \ Furthermore, we present technical performance details and our experience with\ + \ the motion capture system in realistic practice.},\n address = {Oslo, Norway},\n\ + \ author = {Skogstad, Ståle A. and de Quay, Yago and Jensenius, Alexander Refsum},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178165},\n issn = {2220-4806},\n\ + \ pages = {300--303},\n title = {OSC Implementation and Evaluation of the Xsens\ + \ MVN Suit},\n url = {http://www.nime.org/proceedings/2011/nime2011_300.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179469 + doi: 10.5281/zenodo.1178165 issn: 2220-4806 - keywords: 'Mapping, Control, Sound Texture, Musical Gestures ' - pages: 379--383 - title: Control Strategies for Navigation of Complex Sonic Spaces Transformation - of Resonant Models - url: http://www.nime.org/proceedings/2007/nime2007_379.pdf - year: 2007 + pages: 300--303 + title: OSC Implementation and Evaluation of the Xsens MVN Suit + url: http://www.nime.org/proceedings/2011/nime2011_300.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Knorig2007 - abstract: 'In this paper we present the concept and prototype of a new musical interface - that utilizes the close relationship between gestural expression in the act of - painting and that of playing a musical instrument in order to provide non-musicians - the opportunity to create musical expression. A physical brush on a canvas acts - as the instrument. The characteristics of its stroke are intuitively mapped to - a conductor program, defining expressive parameters of the tone in real-time. - Two different interaction modes highlight the importance of bodily expression - in making music as well as the value of a metaphorical visual representation.' - address: 'New York City, NY, United States' - author: 'Knörig, André and Müller, Boris and Wettach, Reto' - bibtex: "@inproceedings{Knorig2007,\n abstract = {In this paper we present the concept\ - \ and prototype of a new musical interface that utilizes the close relationship\ - \ between gestural expression in the act of painting and that of playing a musical\ - \ instrument in order to provide non-musicians the opportunity to create musical\ - \ expression. A physical brush on a canvas acts as the instrument. The characteristics\ - \ of its stroke are intuitively mapped to a conductor program, defining expressive\ - \ parameters of the tone in real-time. Two different interaction modes highlight\ - \ the importance of bodily expression in making music as well as the value of\ - \ a metaphorical visual representation.},\n address = {New York City, NY, United\ - \ States},\n author = {Kn\\''{o}rig, Andr\\'{e} and M\\''{u}ller, Boris and Wettach,\ - \ Reto},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1177155},\n issn = {2220-4806},\n\ - \ keywords = {musical interface, musical expression, expressive gesture, musical\ - \ education, natural interface },\n pages = {384--385},\n title = {Articulated\ - \ Paint : Musical Expression for Non-Musicians},\n url = {http://www.nime.org/proceedings/2007/nime2007_384.pdf},\n\ - \ year = {2007}\n}\n" + ID: Wyse2011 + abstract: 'The goal of our research is to find ways of supporting and encouraging + musical behavior by non-musicians in shared public performance environments. Previous + studies indicated simultaneous music listening and performance is difficult for + non-musicians, and that visual support for the task might be helpful. This paper + presents results from a preliminary user study conducted to evaluate the effect + of visual feedback on a musical tracking task. Participants generated a musical + signal by manipulating a hand-held device with two dimensions of control over + two parameters, pitch and density of note events, and were given the task of following + a target pattern as closely as possible. The target pattern was a machine-generated + musical signal comprising of variation over the same two parameters. Visual feedback + provided participants with information about the control parameters of the musical + signal generated by the machine. We measured the task performance under different + visual feedback strategies. Results show that single parameter visualizations + tend to improve the tracking performance with respect to the visualized parameter, + but not the non-visualized parameter. Visualizing two independent parameters simultaneously + decreases performance in both dimensions. ' + address: 'Oslo, Norway' + author: 'Wyse, Lonce and Mitani, Norikazu and Nanayakkara, Suranga' + bibtex: "@inproceedings{Wyse2011,\n abstract = {The goal of our research is to find\ + \ ways of supporting and encouraging musical behavior by non-musicians in shared\ + \ public performance environments. Previous studies indicated simultaneous music\ + \ listening and performance is difficult for non-musicians, and that visual support\ + \ for the task might be helpful. This paper presents results from a preliminary\ + \ user study conducted to evaluate the effect of visual feedback on a musical\ + \ tracking task. Participants generated a musical signal by manipulating a hand-held\ + \ device with two dimensions of control over two parameters, pitch and density\ + \ of note events, and were given the task of following a target pattern as closely\ + \ as possible. The target pattern was a machine-generated musical signal comprising\ + \ of variation over the same two parameters. Visual feedback provided participants\ + \ with information about the control parameters of the musical signal generated\ + \ by the machine. We measured the task performance under different visual feedback\ + \ strategies. Results show that single parameter visualizations tend to improve\ + \ the tracking performance with respect to the visualized parameter, but not the\ + \ non-visualized parameter. Visualizing two independent parameters simultaneously\ + \ decreases performance in both dimensions. },\n address = {Oslo, Norway},\n author\ + \ = {Wyse, Lonce and Mitani, Norikazu and Nanayakkara, Suranga},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178191},\n issn = {2220-4806},\n keywords = {Mobile phone,\ + \ Interactive music performance, Listening, Group music play, Visual support },\n\ + \ pages = {304--307},\n title = {The Effect of Visualizing Audio Targets in a\ + \ Musical Listening and Performance Task},\n url = {http://www.nime.org/proceedings/2011/nime2011_304.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177155 + doi: 10.5281/zenodo.1178191 issn: 2220-4806 - keywords: 'musical interface, musical expression, expressive gesture, musical education, - natural interface ' - pages: 384--385 - title: 'Articulated Paint : Musical Expression for Non-Musicians' - url: http://www.nime.org/proceedings/2007/nime2007_384.pdf - year: 2007 + keywords: 'Mobile phone, Interactive music performance, Listening, Group music play, + Visual support ' + pages: 304--307 + title: The Effect of Visualizing Audio Targets in a Musical Listening and Performance + Task + url: http://www.nime.org/proceedings/2011/nime2011_304.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Baba2007 - abstract: 'Freqtric Drums is a new musical, corporal electronic instrument that - allows us not only to recover face-to-face communication, but also makes possible - body-to-body communication so that a self image based on the sense of being a - separate body can be signicant altered through an openness toand even a sense - of becoming part of another body. FreqtricDrums is a device that turns audiences - surrounding a performer into drums so that the performer, as a drummer, cancommunicate - with audience members as if they were a setof drums. We describe our concept and - the implementationand process of evolution of Freqtric Drums.' - address: 'New York City, NY, United States' - author: 'Baba, Tetsuaki and Ushiama, Taketoshi and Tomimatsu, Kiyoshi' - bibtex: "@inproceedings{Baba2007,\n abstract = {Freqtric Drums is a new musical,\ - \ corporal electronic instrument that allows us not only to recover face-to-face\ - \ communication, but also makes possible body-to-body communication so that a\ - \ self image based on the sense of being a separate body can be signicant altered\ - \ through an openness toand even a sense of becoming part of another body. FreqtricDrums\ - \ is a device that turns audiences surrounding a performer into drums so that\ - \ the performer, as a drummer, cancommunicate with audience members as if they\ - \ were a setof drums. We describe our concept and the implementationand process\ - \ of evolution of Freqtric Drums.},\n address = {New York City, NY, United States},\n\ - \ author = {Baba, Tetsuaki and Ushiama, Taketoshi and Tomimatsu, Kiyoshi},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1177037},\n issn = {2220-4806},\n keywords\ - \ = {interpersonal communication, musical instrument, interaction design, skin\ - \ contact, touch },\n pages = {386--387},\n title = {Freqtric Drums : A Musical\ - \ Instrument that Uses Skin Contact as an Interface},\n url = {http://www.nime.org/proceedings/2007/nime2007_386.pdf},\n\ - \ year = {2007}\n}\n" + ID: Freed2011 + abstract: 'An effective programming style for gesture signal processing is described + using a new library that brings efficient run-time polymorphism, functional and + instance-based object-oriented programming to Max/MSP. By introducing better support + for generic programming and composability Max/MSP becomes a more productive environment + for managing the growing scale and complexity of gesture sensing systems for musical + instruments and interactive installations. ' + address: 'Oslo, Norway' + author: 'Freed, Adrian and MacCallum, John and Schmeder, Andrew' + bibtex: "@inproceedings{Freed2011,\n abstract = {An effective programming style\ + \ for gesture signal processing is described using a new library that brings efficient\ + \ run-time polymorphism, functional and instance-based object-oriented programming\ + \ to Max/MSP. By introducing better support for generic programming and composability\ + \ Max/MSP becomes a more productive environment for managing the growing scale\ + \ and complexity of gesture sensing systems for musical instruments and interactive\ + \ installations. },\n address = {Oslo, Norway},\n author = {Freed, Adrian and\ + \ MacCallum, John and Schmeder, Andrew},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178015},\n\ + \ issn = {2220-4806},\n keywords = {composability,delegation,functional programming,gesture\ + \ signal,max,msp,object,object-,open sound control,oriented programming,processing},\n\ + \ pages = {308--311},\n title = {Composability for Musical Gesture Signal Processing\ + \ using new OSC-based Object and Functional Programming Extensions to Max/MSP},\n\ + \ url = {http://www.nime.org/proceedings/2011/nime2011_308.pdf},\n year = {2011}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177037 + doi: 10.5281/zenodo.1178015 issn: 2220-4806 - keywords: 'interpersonal communication, musical instrument, interaction design, - skin contact, touch ' - pages: 386--387 - title: 'Freqtric Drums : A Musical Instrument that Uses Skin Contact as an Interface' - url: http://www.nime.org/proceedings/2007/nime2007_386.pdf - year: 2007 + keywords: 'composability,delegation,functional programming,gesture signal,max,msp,object,object-,open + sound control,oriented programming,processing' + pages: 308--311 + title: Composability for Musical Gesture Signal Processing using new OSC-based Object + and Functional Programming Extensions to Max/MSP + url: http://www.nime.org/proceedings/2011/nime2011_308.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Han2007 - abstract: 'Project Scriabin is an interactive implementation of Alexander Scriabin’s - experimentation with “opposite mapping direction”, that is, mapping from hue (colour) - to pitch (sound). Main colour to sound coding was implemented by Scriabin’s colour - scale.' - address: 'New York City, NY, United States' - author: 'Han, Chang Min' - bibtex: "@inproceedings{Han2007,\n abstract = {Project Scriabin is an interactive\ - \ implementation of Alexander Scriabin’s experimentation with “opposite mapping\ - \ direction”, that is, mapping from hue (colour) to pitch (sound). Main colour\ - \ to sound coding was implemented by Scriabin’s colour scale.},\n address = {New\ - \ York City, NY, United States},\n author = {Han, Chang Min},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177109},\n issn = {2220-4806},\n keywords = {Synaesthesia,\ - \ Sonification, Touch Screen},\n pages = {388--389},\n title = {Project Scriabin\ - \ v.3},\n url = {http://www.nime.org/proceedings/2007/nime2007_388.pdf},\n year\ - \ = {2007}\n}\n" + ID: Nymoen2011 + abstract: 'The paper presents the SoundSaber-a musical instrument based on motion + capture technology. We present technical details of the instrument and discuss + the design development process. The SoundSaber may be used as an example of how + high-fidelity motion capture equipment can be used for prototyping musical instruments, + and we illustrate this with an example of a low-cost implementation of our motion + capture instrument.' + address: 'Oslo, Norway' + author: 'Nymoen, Kristian and Skogstad, Ståle A. and Jensenius, Alexander Refsum' + bibtex: "@inproceedings{Nymoen2011,\n abstract = {The paper presents the SoundSaber-a\ + \ musical instrument based on motion capture technology. We present technical\ + \ details of the instrument and discuss the design development process. The SoundSaber\ + \ may be used as an example of how high-fidelity motion capture equipment can\ + \ be used for prototyping musical instruments, and we illustrate this with an\ + \ example of a low-cost implementation of our motion capture instrument.},\n address\ + \ = {Oslo, Norway},\n author = {Nymoen, Kristian and Skogstad, Ståle A. and Jensenius,\ + \ Alexander Refsum},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178125},\n\ + \ issn = {2220-4806},\n pages = {312--315},\n title = {SoundSaber -- A Motion\ + \ Capture Instrument},\n url = {http://www.nime.org/proceedings/2011/nime2011_312.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177109 + doi: 10.5281/zenodo.1178125 issn: 2220-4806 - keywords: 'Synaesthesia, Sonification, Touch Screen' - pages: 388--389 - title: Project Scriabin v.3 - url: http://www.nime.org/proceedings/2007/nime2007_388.pdf - year: 2007 + pages: 312--315 + title: SoundSaber -- A Motion Capture Instrument + url: http://www.nime.org/proceedings/2011/nime2011_312.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Castellano2007 - abstract: 'In this paper we describe a system which allows users to use their full-body - for controlling in real-time the generation of an expressive audio-visual feedback. - The system extracts expressive motion features from the user''s full-body movements - and gestures. The values of these motion features are mapped both onto acoustic - parameters for the real-time expressive rendering of a piece of music, and onto - real-time generated visual feedback projected on a screen in front of the user. ' - address: 'New York City, NY, United States' - author: 'Castellano, Ginevra and Bresin, Roberto and Camurri, Antonio and Volpe, - Gualtiero' - bibtex: "@inproceedings{Castellano2007,\n abstract = {In this paper we describe\ - \ a system which allows users to use their full-body for controlling in real-time\ - \ the generation of an expressive audio-visual feedback. The system extracts expressive\ - \ motion features from the user's full-body movements and gestures. The values\ - \ of these motion features are mapped both onto acoustic parameters for the real-time\ - \ expressive rendering of a piece of music, and onto real-time generated visual\ - \ feedback projected on a screen in front of the user. },\n address = {New York\ - \ City, NY, United States},\n author = {Castellano, Ginevra and Bresin, Roberto\ - \ and Camurri, Antonio and Volpe, Gualtiero},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1177065},\n issn = {2220-4806},\n keywords = {Expressive interaction;\ - \ multimodal environments; interactive music systems },\n pages = {390--391},\n\ - \ title = {Expressive Control of Music and Visual Media by Full-Body Movement},\n\ - \ url = {http://www.nime.org/proceedings/2007/nime2007_390.pdf},\n year = {2007}\n\ - }\n" + ID: Brandtsegg2011 + abstract: 'The article describes a flexible mapping technique realized as a many-to-many + dynamic mapping matrix. Digital sound generation is typically controlled by a + large number of parameters and efficient and flexible mapping is necessary to + provide expressive control over the instrument. The proposed modulation matrix + technique may be seen as a generic and selfmodifying mapping mechanism integrated + in a dynamic interpolation scheme. It is implemented efficiently by taking advantage + of its inherent sparse matrix structure. The modulation matrix is used within + the Hadron Particle Synthesizer, a complex granular module with 200 synthesis + parameters and a simplified performance control structure with 4 expression parameters. ' + address: 'Oslo, Norway' + author: 'Brandtsegg, Öyvind and Saue, Sigurd and Johansen, Thom' + bibtex: "@inproceedings{Brandtsegg2011,\n abstract = {The article describes a flexible\ + \ mapping technique realized as a many-to-many dynamic mapping matrix. Digital\ + \ sound generation is typically controlled by a large number of parameters and\ + \ efficient and flexible mapping is necessary to provide expressive control over\ + \ the instrument. The proposed modulation matrix technique may be seen as a generic\ + \ and selfmodifying mapping mechanism integrated in a dynamic interpolation scheme.\ + \ It is implemented efficiently by taking advantage of its inherent sparse matrix\ + \ structure. The modulation matrix is used within the Hadron Particle Synthesizer,\ + \ a complex granular module with 200 synthesis parameters and a simplified performance\ + \ control structure with 4 expression parameters. },\n address = {Oslo, Norway},\n\ + \ author = {Brandtsegg, \\''{O}yvind and Saue, Sigurd and Johansen, Thom},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1177969},\n issn = {2220-4806},\n keywords\ + \ = {Mapping, granular synthesis, modulation, live performance },\n pages = {316--319},\n\ + \ title = {A Modulation Matrix for Complex Parameter Sets},\n url = {http://www.nime.org/proceedings/2011/nime2011_316.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177065 + doi: 10.5281/zenodo.1177969 issn: 2220-4806 - keywords: 'Expressive interaction; multimodal environments; interactive music systems ' - pages: 390--391 - title: Expressive Control of Music and Visual Media by Full-Body Movement - url: http://www.nime.org/proceedings/2007/nime2007_390.pdf - year: 2007 + keywords: 'Mapping, granular synthesis, modulation, live performance ' + pages: 316--319 + title: A Modulation Matrix for Complex Parameter Sets + url: http://www.nime.org/proceedings/2011/nime2011_316.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Tindale2007 - abstract: This paper describes a hybrid method to allow drummers to expressively - utilize electronics. Commercial electronic drum hardware is made more expressive - by replacing the sample playback “drum brain” with a physical modeling algorithm - implemented in Max/MSP. Timbre recognition techniques identify striking implement - and location as symbolic data that can be used to modify the parameters of the - physical model. - address: 'New York City, NY, United States' - author: 'Tindale, Adam R.' - bibtex: "@inproceedings{Tindale2007,\n abstract = {This paper describes a hybrid\ - \ method to allow drummers to expressively utilize electronics. Commercial electronic\ - \ drum hardware is made more expressive by replacing the sample playback “drum\ - \ brain” with a physical modeling algorithm implemented in Max/MSP. Timbre recognition\ - \ techniques identify striking implement and location as symbolic data that can\ - \ be used to modify the parameters of the physical model.},\n address = {New York\ - \ City, NY, United States},\n author = {Tindale, Adam R.},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177259},\n issn = {2220-4806},\n keywords = {electronic\ - \ percussion,nime07,physical modeling,timbre recognition},\n pages = {392--393},\n\ - \ title = {A Hybrid Method for Extended Percussive Gesture},\n url = {http://www.nime.org/proceedings/2007/nime2007_392.pdf},\n\ - \ year = {2007}\n}\n" + ID: Tseng2011 + address: 'Oslo, Norway' + author: 'Tseng, Yu-Chung and Liu, Che-Wei and Chi, Tzu-Heng and Wang, Hui-Yu' + bibtex: "@inproceedings{Tseng2011,\n address = {Oslo, Norway},\n author = {Tseng,\ + \ Yu-Chung and Liu, Che-Wei and Chi, Tzu-Heng and Wang, Hui-Yu},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178179},\n issn = {2220-4806},\n pages = {320--321},\n\ + \ title = {Sound Low Fun},\n url = {http://www.nime.org/proceedings/2011/nime2011_320.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177259 + doi: 10.5281/zenodo.1178179 issn: 2220-4806 - keywords: 'electronic percussion,nime07,physical modeling,timbre recognition' - pages: 392--393 - title: A Hybrid Method for Extended Percussive Gesture - url: http://www.nime.org/proceedings/2007/nime2007_392.pdf - year: 2007 + pages: 320--321 + title: Sound Low Fun + url: http://www.nime.org/proceedings/2011/nime2011_320.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Bottoni2007 - abstract: This paper reports our experiments on using a dual-coreDSP processor in - the construction of a user-programmablemusical instrument and controller called - the TouchBox. - address: 'New York City, NY, United States' - author: 'Bottoni, Paolo and Caporali, Riccardo and Capuano, Daniele and Faralli, - Stefano and Labella, Anna and Pierro, Mario' - bibtex: "@inproceedings{Bottoni2007,\n abstract = {This paper reports our experiments\ - \ on using a dual-coreDSP processor in the construction of a user-programmablemusical\ - \ instrument and controller called the TouchBox.},\n address = {New York City,\ - \ NY, United States},\n author = {Bottoni, Paolo and Caporali, Riccardo and Capuano,\ - \ Daniele and Faralli, Stefano and Labella, Anna and Pierro, Mario},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1177053},\n issn = {2220-4806},\n keywords\ - \ = {dual-core, DSP, touch-screen, synthesizer, controller },\n pages = {394--395},\n\ - \ title = {Use of a Dual-Core {DSP} in a Low-Cost, Touch-Screen Based Musical\ - \ Instrument},\n url = {http://www.nime.org/proceedings/2007/nime2007_394.pdf},\n\ - \ year = {2007}\n}\n" + ID: Berdahl2011 + abstract: 'The purpose of this brief paper is to revisit the question oflongevity + in present experimental practice and coin the termautonomous new media artefacts + (AutoNMA), which arecomplete and independent of external computer systems,so they + can be operable for a longer period of time andcan be demonstrated at a moment''s + notice. We argue thatplatforms for prototyping should promote the creation ofAutoNMA + to make extant the devices which will be a partof the future history of new media.' + address: 'Oslo, Norway' + author: 'Berdahl, Edgar and Chafe, Chris' + bibtex: "@inproceedings{Berdahl2011,\n abstract = {The purpose of this brief paper\ + \ is to revisit the question oflongevity in present experimental practice and\ + \ coin the termautonomous new media artefacts (AutoNMA), which arecomplete and\ + \ independent of external computer systems,so they can be operable for a longer\ + \ period of time andcan be demonstrated at a moment's notice. We argue thatplatforms\ + \ for prototyping should promote the creation ofAutoNMA to make extant the devices\ + \ which will be a partof the future history of new media.},\n address = {Oslo,\ + \ Norway},\n author = {Berdahl, Edgar and Chafe, Chris},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177953},\n issn = {2220-4806},\n keywords = {autonomous,\ + \ standalone, Satellite CCRMA, Arduino },\n pages = {322--323},\n title = {Autonomous\ + \ New Media Artefacts ( AutoNMA )},\n url = {http://www.nime.org/proceedings/2011/nime2011_322.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177053 + doi: 10.5281/zenodo.1177953 issn: 2220-4806 - keywords: 'dual-core, DSP, touch-screen, synthesizer, controller ' - pages: 394--395 - title: 'Use of a Dual-Core DSP in a Low-Cost, Touch-Screen Based Musical Instrument' - url: http://www.nime.org/proceedings/2007/nime2007_394.pdf - year: 2007 + keywords: 'autonomous, standalone, Satellite CCRMA, Arduino ' + pages: 322--323 + title: Autonomous New Media Artefacts ( AutoNMA ) + url: http://www.nime.org/proceedings/2011/nime2011_322.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Kanebako2007 - abstract: 'This instrument is a part of the “Gangu Project” at IAMAS, which aim - to develop digital toys for improving children’s social behavior in the future. - It was further developed as part of the IAMAS-Interface Cultures exchange program. - “Mountain Guitar” is a new musical instrument that enables musical expression - through a custom-made sensor technology, which captures and transforms the height - at which the instrument is held to the musical outcome during the playing session. - One of the goals of “Mountain Guitar” is to let untrained users easily and intuitively - play guitar through their body movements. In addition to capturing the users’ - body movements, “Mountain Guitar” also simulates standard guitar playing techniques - such as vibrato, choking, and mute. “Mountain Guitar’s” goal is to provide playing - pleasure for guitar training sessions. This poster describes the “Mountain Guitar’s” - fundamental principles and its mode of operation.' - address: 'New York City, NY, United States' - author: 'Kanebako, Junichi and Gibson, James and Mignonneau, Laurent' - bibtex: "@inproceedings{Kanebako2007,\n abstract = {This instrument is a part of\ - \ the “Gangu Project” at IAMAS, which aim to develop digital toys for improving\ - \ children’s social behavior in the future. It was further developed as part of\ - \ the IAMAS-Interface Cultures exchange program. “Mountain Guitar” is a new musical\ - \ instrument that enables musical expression through a custom-made sensor technology,\ - \ which captures and transforms the height at which the instrument is held to\ - \ the musical outcome during the playing session. One of the goals of “Mountain\ - \ Guitar” is to let untrained users easily and intuitively play guitar through\ - \ their body movements. In addition to capturing the users’ body movements, “Mountain\ - \ Guitar” also simulates standard guitar playing techniques such as vibrato, choking,\ - \ and mute. “Mountain Guitar’s” goal is to provide playing pleasure for guitar\ - \ training sessions. This poster describes the “Mountain Guitar’s” fundamental\ - \ principles and its mode of operation.},\n address = {New York City, NY, United\ - \ States},\n author = {Kanebako, Junichi and Gibson, James and Mignonneau, Laurent},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177133},\n issn = {2220-4806},\n\ - \ keywords = {Musical Expression, Guitar Instrument, MIDI to sensor mapping, Physical\ - \ Computing, Intuitive Interaction},\n pages = {396--398},\n title = {Mountain\ - \ Guitar : a Musical Instrument for Everyone},\n url = {http://www.nime.org/proceedings/2007/nime2007_396.pdf},\n\ - \ year = {2007}\n}\n" + ID: Yoo2011 + abstract: 'Recently, Microsoft introduced a game interface called Kinect for the + Xbox 360 video game platform. This interface enables users to control and interact + with the game console without the need to touch a controller. It largely increases + the users'' degree of freedom to express their emotion. In this paper, we first + describe the system we developed to use this interface for sound generation and + controlling musical expression. The skeleton data are extracted from users'' motions + and the data are translated to pre-defined MIDI data. We then use the MIDI data + to control several applications. To allow the translation between the data, we + implemented a simple Kinect-to-MIDI data convertor, which is introduced in this + paper. We describe two applications to make music with Kinect: we first generate + sound with Max/MSP, and then control the adlib with our own adlib generating system + by the body movements of the users. ' + address: 'Oslo, Norway' + author: 'Yoo, Min-Joon and Beak, Jin-Wook and Lee, In-Kwon' + bibtex: "@inproceedings{Yoo2011,\n abstract = {Recently, Microsoft introduced a\ + \ game interface called Kinect for the Xbox 360 video game platform. This interface\ + \ enables users to control and interact with the game console without the need\ + \ to touch a controller. It largely increases the users' degree of freedom to\ + \ express their emotion. In this paper, we first describe the system we developed\ + \ to use this interface for sound generation and controlling musical expression.\ + \ The skeleton data are extracted from users' motions and the data are translated\ + \ to pre-defined MIDI data. We then use the MIDI data to control several applications.\ + \ To allow the translation between the data, we implemented a simple Kinect-to-MIDI\ + \ data convertor, which is introduced in this paper. We describe two applications\ + \ to make music with Kinect: we first generate sound with Max/MSP, and then control\ + \ the adlib with our own adlib generating system by the body movements of the\ + \ users. },\n address = {Oslo, Norway},\n author = {Yoo, Min-Joon and Beak, Jin-Wook\ + \ and Lee, In-Kwon},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178193},\n\ + \ issn = {2220-4806},\n keywords = {Kinect, gaming interface, sound generation,\ + \ adlib generation },\n pages = {324--325},\n title = {Creating Musical Expression\ + \ using Kinect},\n url = {http://www.nime.org/proceedings/2011/nime2011_324.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177133 + doi: 10.5281/zenodo.1178193 issn: 2220-4806 - keywords: 'Musical Expression, Guitar Instrument, MIDI to sensor mapping, Physical - Computing, Intuitive Interaction' - pages: 396--398 - title: 'Mountain Guitar : a Musical Instrument for Everyone' - url: http://www.nime.org/proceedings/2007/nime2007_396.pdf - year: 2007 + keywords: 'Kinect, gaming interface, sound generation, adlib generation ' + pages: 324--325 + title: Creating Musical Expression using Kinect + url: http://www.nime.org/proceedings/2011/nime2011_324.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Sirguy2007 - abstract: 'Eowave and Ircam have been deeply involved into gestureanalysis and sensing - for a few years by now, as severalartistic projects demonstrate (1). In 2004, - Eowave has beenworking with Ircam on the development of the Eobodysensor system, - and since that, Eowave''s range of sensors hasbeen increased with new sensors - sometimes developed innarrow collaboration with artists for custom sensor systemsfor - installations and performances. This demo-paperdescribes the recent design of - a new USB/MIDI-to-sensorinterface called Eobody2.' - address: 'New York City, NY, United States' - author: 'Sirguy, Marc and Gallin, Emmanuelle' - bibtex: "@inproceedings{Sirguy2007,\n abstract = {Eowave and Ircam have been deeply\ - \ involved into gestureanalysis and sensing for a few years by now, as severalartistic\ - \ projects demonstrate (1). In 2004, Eowave has beenworking with Ircam on the\ - \ development of the Eobodysensor system, and since that, Eowave's range of sensors\ - \ hasbeen increased with new sensors sometimes developed innarrow collaboration\ - \ with artists for custom sensor systemsfor installations and performances. This\ - \ demo-paperdescribes the recent design of a new USB/MIDI-to-sensorinterface called\ - \ Eobody2.},\n address = {New York City, NY, United States},\n author = {Sirguy,\ - \ Marc and Gallin, Emmanuelle},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177247},\n\ - \ issn = {2220-4806},\n keywords = {Gestural controller, Sensor, MIDI, USB, Computer\ - \ music, Relays, Motors, Robots, Wireless. },\n pages = {401--402},\n title =\ - \ {Eobody2 : A Follow-up to Eobody's Technology},\n url = {http://www.nime.org/proceedings/2007/nime2007_401.pdf},\n\ - \ year = {2007}\n}\n" + ID: DeJong2011 + abstract: 'This paper proposes a new research direction for the large family of + instrumental musical interfaces where sound is generated using digital granular + synthesis, and where interaction and control involve the (fine) operation of stiff, + flat contact surfaces. First, within a historical context, a general absence of, + and clear need for, tangible output that is dynamically instantiated by the grain-generating + process itself is identified. Second, to fill this gap, a concrete general approach + is proposed based on the careful construction of non-vibratory and vibratory force + pulses, in a one-to-one relationship with sonic grains.An informal pilot psychophysics + experiment initiating the approach was conducted, which took into account the + two main cases for applying forces to the human skin: perpendicular, and lateral. + Initial results indicate that the force pulse approach can enable perceivably + multidimensional, tangible display of the ongoing grain-generating process. Moreover, + it was found that this can be made to meaningfully happen (in real time) in the + same timescale of basic sonic grain generation. This is not a trivial property, + and provides an important and positive fundament for further developing this type + of enhanced display. It also leads to the exciting prospect of making arbitrary + sonic grains actual physical manipulanda. ' + address: 'Oslo, Norway' + author: 'de Jong, Staas' + bibtex: "@inproceedings{DeJong2011,\n abstract = {This paper proposes a new research\ + \ direction for the large family of instrumental musical interfaces where sound\ + \ is generated using digital granular synthesis, and where interaction and control\ + \ involve the (fine) operation of stiff, flat contact surfaces. First, within\ + \ a historical context, a general absence of, and clear need for, tangible output\ + \ that is dynamically instantiated by the grain-generating process itself is identified.\ + \ Second, to fill this gap, a concrete general approach is proposed based on the\ + \ careful construction of non-vibratory and vibratory force pulses, in a one-to-one\ + \ relationship with sonic grains.An informal pilot psychophysics experiment initiating\ + \ the approach was conducted, which took into account the two main cases for applying\ + \ forces to the human skin: perpendicular, and lateral. Initial results indicate\ + \ that the force pulse approach can enable perceivably multidimensional, tangible\ + \ display of the ongoing grain-generating process. Moreover, it was found that\ + \ this can be made to meaningfully happen (in real time) in the same timescale\ + \ of basic sonic grain generation. This is not a trivial property, and provides\ + \ an important and positive fundament for further developing this type of enhanced\ + \ display. It also leads to the exciting prospect of making arbitrary sonic grains\ + \ actual physical manipulanda. },\n address = {Oslo, Norway},\n author = {de Jong,\ + \ Staas},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1178055},\n issn = {2220-4806},\n\ + \ keywords = {and others,and today granular,barry truax,curtis roads,granular\ + \ sound synthesis,instrumental control,tangible display,tangible manipulation},\n\ + \ pages = {326--328},\n title = {Making Grains Tangible: Microtouch for Microsound},\n\ + \ url = {http://www.nime.org/proceedings/2011/nime2011_326.pdf},\n year = {2011}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177247 + doi: 10.5281/zenodo.1178055 issn: 2220-4806 - keywords: 'Gestural controller, Sensor, MIDI, USB, Computer music, Relays, Motors, - Robots, Wireless. ' - pages: 401--402 - title: 'Eobody2 : A Follow-up to Eobody''s Technology' - url: http://www.nime.org/proceedings/2007/nime2007_401.pdf - year: 2007 + keywords: 'and others,and today granular,barry truax,curtis roads,granular sound + synthesis,instrumental control,tangible display,tangible manipulation' + pages: 326--328 + title: 'Making Grains Tangible: Microtouch for Microsound' + url: http://www.nime.org/proceedings/2011/nime2011_326.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Till2007 - abstract: 'The WISP is a novel wireless sensor that uses 3 axis magnetometers, accelerometers, - and rate gyroscopes to provide a real-time measurement of its own orientation - in space. Orientation data are transmitted via the Open Sound Control protocol - (OSC) to a synthesis engine for interactive live dance performance. ' - address: 'New York City, NY, United States' - author: 'Till, Bernie C. and Benning, Manjinder S. and Livingston, Nigel' - bibtex: "@inproceedings{Till2007,\n abstract = {The WISP is a novel wireless sensor\ - \ that uses 3 axis magnetometers, accelerometers, and rate gyroscopes to provide\ - \ a real-time measurement of its own orientation in space. Orientation data are\ - \ transmitted via the Open Sound Control protocol (OSC) to a synthesis engine\ - \ for interactive live dance performance. },\n address = {New York City, NY, United\ - \ States},\n author = {Till, Bernie C. and Benning, Manjinder S. and Livingston,\ - \ Nigel},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1177257},\n issn = {2220-4806},\n\ - \ keywords = {Music Controller, Human-Computer Interaction, Wireless Sensing,\ - \ Inertial Sensing. },\n pages = {403--404},\n title = {Wireless Inertial Sensor\ - \ Package (WISP)},\n url = {http://www.nime.org/proceedings/2007/nime2007_403.pdf},\n\ - \ year = {2007}\n}\n" + ID: Caramiaux2011 + abstract: 'This paper presents a prototypical tool for sound selection driven by + users'' gestures. Sound selection by gesturesis a particular case of "query by + content" in multimedia databases. Gesture-to-Sound matching is based on computing + the similarity between both gesture and sound parameters'' temporal evolution. + The tool presents three algorithms for matching gesture query to sound target. + Thesystem leads to several applications in sound design, virtualinstrument design + and interactive installation.' + address: 'Oslo, Norway' + author: 'Caramiaux, Baptiste and Bevilacqua, Frédéric and Schnell, Norbert' + bibtex: "@inproceedings{Caramiaux2011,\n abstract = {This paper presents a prototypical\ + \ tool for sound selection driven by users' gestures. Sound selection by gesturesis\ + \ a particular case of \"query by content\" in multimedia databases. Gesture-to-Sound\ + \ matching is based on computing the similarity between both gesture and sound\ + \ parameters' temporal evolution. The tool presents three algorithms for matching\ + \ gesture query to sound target. Thesystem leads to several applications in sound\ + \ design, virtualinstrument design and interactive installation.},\n address =\ + \ {Oslo, Norway},\n author = {Caramiaux, Baptiste and Bevilacqua, Fr\\'{e}d\\\ + '{e}ric and Schnell, Norbert},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177977},\n\ + \ issn = {2220-4806},\n keywords = {Query by Gesture, Time Series Analysis, Sonic\ + \ Interaction },\n pages = {329--330},\n title = {Sound Selection by Gestures},\n\ + \ url = {http://www.nime.org/proceedings/2011/nime2011_329.pdf},\n year = {2011}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177257 + doi: 10.5281/zenodo.1177977 issn: 2220-4806 - keywords: 'Music Controller, Human-Computer Interaction, Wireless Sensing, Inertial - Sensing. ' - pages: 403--404 - title: Wireless Inertial Sensor Package (WISP) - url: http://www.nime.org/proceedings/2007/nime2007_403.pdf - year: 2007 + keywords: 'Query by Gesture, Time Series Analysis, Sonic Interaction ' + pages: 329--330 + title: Sound Selection by Gestures + url: http://www.nime.org/proceedings/2011/nime2011_329.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Loewenstein2007 - abstract: 'The ”Acoustic Map“ is an interactive soundinstallation developed for - the “Hallakustika” Festival in Hall (Tyrolia, Austria) using the motion tracking - software Eyes-Web and Max-MSP. For the NIME 07 a simulation of the motion tracking - part of the original work will be shown. Its aim was to create an interactive - city portrait of the city of Hall and to offer the possibility to enhance six - sites of the city on an acoustical basis with what I called an “acoustic zoom”.' - address: 'New York City, NY, United States' - author: 'Loewenstein, Stefan' - bibtex: "@inproceedings{Loewenstein2007,\n abstract = {The ”Acoustic Map“ is an\ - \ interactive soundinstallation developed for the “Hallakustika” Festival in Hall\ - \ (Tyrolia, Austria) using the motion tracking software Eyes-Web and Max-MSP.\ - \ For the NIME 07 a simulation of the motion tracking part of the original work\ - \ will be shown. Its aim was to create an interactive city portrait of the city\ - \ of Hall and to offer the possibility to enhance six sites of the city on an\ - \ acoustical basis with what I called an “acoustic zoom”.},\n address = {New York\ - \ City, NY, United States},\n author = {Loewenstein, Stefan},\n booktitle = {Proceedings\ + ID: Kerllenevich2011 + abstract: 'We propose and discuss an open source real-time interface that focuses + in the vast potential for interactive soundart creation emerging from biological + neural networks, asparadigmatic complex systems for musical exploration. Inparticular, + we focus on networks that are responsible for thegeneration of rhythmic patterns.The + interface relies uponthe idea of relating metaphorically neural behaviors to electronic + and acoustic instruments notes, by means of flexiblemapping strategies. The user + can intuitively design network configurations by dynamically creating neurons + andconfiguring their inter-connectivity. The core of the systemis based in events + emerging from his network design, whichfunctions in a similar way to what happens + in real smallneural networks. Having multiple signal and data inputsand outputs, + as well as standard communications protocolssuch as MIDI, OSC and TCP/IP, it becomes + and uniquetool for composers and performers, suitable for different performance + scenarios, like live electronics, sound installationsand telematic concerts.' + address: 'Oslo, Norway' + author: 'Kerlleñevich, Hernán and Eguía, Manuel C. and Riera, Pablo E.' + bibtex: "@inproceedings{Kerllenevich2011,\n abstract = {We propose and discuss an\ + \ open source real-time interface that focuses in the vast potential for interactive\ + \ soundart creation emerging from biological neural networks, asparadigmatic complex\ + \ systems for musical exploration. Inparticular, we focus on networks that are\ + \ responsible for thegeneration of rhythmic patterns.The interface relies uponthe\ + \ idea of relating metaphorically neural behaviors to electronic and acoustic\ + \ instruments notes, by means of flexiblemapping strategies. The user can intuitively\ + \ design network configurations by dynamically creating neurons andconfiguring\ + \ their inter-connectivity. The core of the systemis based in events emerging\ + \ from his network design, whichfunctions in a similar way to what happens in\ + \ real smallneural networks. Having multiple signal and data inputsand outputs,\ + \ as well as standard communications protocolssuch as MIDI, OSC and TCP/IP, it\ + \ becomes and uniquetool for composers and performers, suitable for different\ + \ performance scenarios, like live electronics, sound installationsand telematic\ + \ concerts.},\n address = {Oslo, Norway},\n author = {Kerlle\\~{n}evich, Hern\\\ + '{a}n and Egu\\'{\\i}a, Manuel C. and Riera, Pablo E.},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177167},\n issn = {2220-4806},\n keywords = {nime07},\n\ - \ pages = {405--406},\n title = {\"Acoustic Map\" -- An Interactive Cityportrait},\n\ - \ url = {http://www.nime.org/proceedings/2007/nime2007_405.pdf},\n year = {2007}\n\ - }\n" + \ doi = {10.5281/zenodo.1178063},\n issn = {2220-4806},\n keywords = {rhythm generation,\ + \ biological neural networks, complex patterns, musical interface, network performance\ + \ },\n pages = {331--336},\n presentation-video = {https://vimeo.com/26874396/},\n\ + \ title = {An Open Source Interface based on Biological Neural Networks for Interactive\ + \ Music Performance},\n url = {http://www.nime.org/proceedings/2011/nime2011_331.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177167 + doi: 10.5281/zenodo.1178063 issn: 2220-4806 - keywords: nime07 - pages: 405--406 - title: '"Acoustic Map" -- An Interactive Cityportrait' - url: http://www.nime.org/proceedings/2007/nime2007_405.pdf - year: 2007 + keywords: 'rhythm generation, biological neural networks, complex patterns, musical + interface, network performance ' + pages: 331--336 + presentation-video: https://vimeo.com/26874396/ + title: An Open Source Interface based on Biological Neural Networks for Interactive + Music Performance + url: http://www.nime.org/proceedings/2011/nime2011_331.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Hashida2007a - abstract: 'This paper introduces a system for improvisational musical expression - that enables all users, novice and experienced, to perform intuitively and expressively. - Users can generate musically consistent results through intuitive action, inputting - rhythm in a decent tempo. We demonstrate novel mapping ways that reflect user’s - input information more interactively and effectively in generating the music. - We also present various input devices that allow users more creative liberty.' - address: 'New York City, NY, United States' - author: 'Hashida, Tomoko and Naemura, Takeshi and Sato, Takao' - bibtex: "@inproceedings{Hashida2007a,\n abstract = {This paper introduces a system\ - \ for improvisational musical expression that enables all users, novice and experienced,\ - \ to perform intuitively and expressively. Users can generate musically consistent\ - \ results through intuitive action, inputting rhythm in a decent tempo. We demonstrate\ - \ novel mapping ways that reflect user’s input information more interactively\ - \ and effectively in generating the music. We also present various input devices\ - \ that allow users more creative liberty.},\n address = {New York City, NY, United\ - \ States},\n author = {Hashida, Tomoko and Naemura, Takeshi and Sato, Takao},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177113},\n issn = {2220-4806},\n\ - \ keywords = {Improvisation, interactive music, a sense of tempo },\n pages =\ - \ {407--408},\n title = {A System for Improvisational Musical Expression Based\ - \ on Player's Sense of Tempo},\n url = {http://www.nime.org/proceedings/2007/nime2007_407.pdf},\n\ - \ year = {2007}\n}\n" + ID: Gillian2011 + abstract: 'This paper presents a novel algorithm that has been specifically designed + for the recognition of multivariate temporal musical gestures. The algorithm is + based on DynamicTime Warping and has been extended to classify any N dimensional + signal, automatically compute a classificationthreshold to reject any data that + is not a valid gesture andbe quickly trained with a low number of training examples.The + algorithm is evaluated using a database of 10 temporalgestures performed by 10 + participants achieving an averagecross-validation result of 99%.' + address: 'Oslo, Norway' + author: 'Gillian, Nicholas and Knapp, Benjamin and O''Modhrain, Sile' + bibtex: "@inproceedings{Gillian2011,\n abstract = {This paper presents a novel algorithm\ + \ that has been specifically designed for the recognition of multivariate temporal\ + \ musical gestures. The algorithm is based on DynamicTime Warping and has been\ + \ extended to classify any N dimensional signal, automatically compute a classificationthreshold\ + \ to reject any data that is not a valid gesture andbe quickly trained with a\ + \ low number of training examples.The algorithm is evaluated using a database\ + \ of 10 temporalgestures performed by 10 participants achieving an averagecross-validation\ + \ result of 99%.},\n address = {Oslo, Norway},\n author = {Gillian, Nicholas and\ + \ Knapp, Benjamin and O'Modhrain, Sile},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178029},\n\ + \ issn = {2220-4806},\n keywords = {Dynamic Time Warping, Gesture Recognition,\ + \ Musician-Computer Interaction, Multivariate Temporal Gestures },\n pages = {337--342},\n\ + \ presentation-video = {https://vimeo.com/26874428/},\n title = {Recognition Of\ + \ Multivariate Temporal Musical Gestures Using N-Dimensional Dynamic Time Warping},\n\ + \ url = {http://www.nime.org/proceedings/2011/nime2011_337.pdf},\n year = {2011}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177113 + doi: 10.5281/zenodo.1178029 issn: 2220-4806 - keywords: 'Improvisation, interactive music, a sense of tempo ' - pages: 407--408 - title: A System for Improvisational Musical Expression Based on Player's Sense of - Tempo - url: http://www.nime.org/proceedings/2007/nime2007_407.pdf - year: 2007 + keywords: 'Dynamic Time Warping, Gesture Recognition, Musician-Computer Interaction, + Multivariate Temporal Gestures ' + pages: 337--342 + presentation-video: https://vimeo.com/26874428/ + title: Recognition Of Multivariate Temporal Musical Gestures Using N-Dimensional + Dynamic Time Warping + url: http://www.nime.org/proceedings/2011/nime2011_337.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Nakamoto2007 - abstract: 'We proposed a circle canon system for enjoying a musical ensemble supported - by a computer and network. Using the song Frog round, which is a popular circle - canon chorus originated from a German folk song, we produced a singing ensemble - opportunity where everyone plays the music together at the same time. The aim - of our system is that anyone can experience the joyful feeling of actually playing - the music as well as sharing it with others. ' - address: 'New York City, NY, United States' - author: 'Nakamoto, Misako and Kuhara, Yasuo' - bibtex: "@inproceedings{Nakamoto2007,\n abstract = {We proposed a circle canon system\ - \ for enjoying a musical ensemble supported by a computer and network. Using the\ - \ song {Frog round}, which is a popular circle canon chorus originated from a\ - \ German folk song, we produced a singing ensemble opportunity where everyone\ - \ plays the music together at the same time. The aim of our system is that anyone\ - \ can experience the joyful feeling of actually playing the music as well as sharing\ - \ it with others. },\n address = {New York City, NY, United States},\n author\ - \ = {Nakamoto, Misako and Kuhara, Yasuo},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177207},\n\ - \ issn = {2220-4806},\n keywords = {Circle canon, Chorus, Song, Frog round, Ensemble,\ - \ Internet, Max/MSP, MySQL database. },\n pages = {409--410},\n title = {Circle\ - \ Canon Chorus System Used To Enjoy A Musical Ensemble Singing \"Frog Round\"\ - },\n url = {http://www.nime.org/proceedings/2007/nime2007_409.pdf},\n year = {2007}\n\ - }\n" + ID: Gillian2011a + abstract: 'This paper presents the SARC EyesWeb Catalog, (SEC),a machine learning + toolbox that has been specifically developed for musician-computer interaction. + The SEC features a large number of machine learning algorithms that can be used + in real-time to recognise static postures, perform regression and classify multivariate + temporal gestures. The algorithms within the toolbox have been designed to work + with any N -dimensional signal and can be quickly trained with a small number + of training examples. We also provide the motivation for the algorithms used for + the recognition of musical gestures to achieve a low intra-personal generalisation + error, as opposed to the inter-personal generalisation error that is more common + in other areas of human-computer interaction.' + address: 'Oslo, Norway' + author: 'Gillian, Nicholas and Knapp, Benjamin and O''Modhrain, Sile' + bibtex: "@inproceedings{Gillian2011a,\n abstract = {This paper presents the SARC\ + \ EyesWeb Catalog, (SEC),a machine learning toolbox that has been specifically\ + \ developed for musician-computer interaction. The SEC features a large number\ + \ of machine learning algorithms that can be used in real-time to recognise static\ + \ postures, perform regression and classify multivariate temporal gestures. The\ + \ algorithms within the toolbox have been designed to work with any N -dimensional\ + \ signal and can be quickly trained with a small number of training examples.\ + \ We also provide the motivation for the algorithms used for the recognition of\ + \ musical gestures to achieve a low intra-personal generalisation error, as opposed\ + \ to the inter-personal generalisation error that is more common in other areas\ + \ of human-computer interaction.},\n address = {Oslo, Norway},\n author = {Gillian,\ + \ Nicholas and Knapp, Benjamin and O'Modhrain, Sile},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178031},\n issn = {2220-4806},\n keywords = {Machine\ + \ learning, gesture recognition, musician-computer interaction, SEC },\n pages\ + \ = {343--348},\n presentation-video = {https://vimeo.com/26872843/},\n title\ + \ = {A Machine Learning Toolbox For Musician Computer Interaction},\n url = {http://www.nime.org/proceedings/2011/nime2011_343.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177207 + doi: 10.5281/zenodo.1178031 issn: 2220-4806 - keywords: 'Circle canon, Chorus, Song, Frog round, Ensemble, Internet, Max/MSP, - MySQL database. ' - pages: 409--410 - title: Circle Canon Chorus System Used To Enjoy A Musical Ensemble Singing "Frog - Round" - url: http://www.nime.org/proceedings/2007/nime2007_409.pdf - year: 2007 + keywords: 'Machine learning, gesture recognition, musician-computer interaction, + SEC ' + pages: 343--348 + presentation-video: https://vimeo.com/26872843/ + title: A Machine Learning Toolbox For Musician Computer Interaction + url: http://www.nime.org/proceedings/2011/nime2011_343.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Estrada2007 - abstract: 'Loop-R is a real-time video performance tool, based in the exploration - of low-tech, used technology and human engineering research. With this tool its - author is giving a shout to industry, using existing and mistreated technology - in innovative ways, combining concepts and interfaces: blending segregated interfaces - (GUI and Physical) into one. After graspable interfaces and the “end” of WIMP - interfaces, hardware and software blend themselves in a new genre providing free - control of video-loops in an expressive hybrid tool.' - address: 'New York City, NY, United States' - author: 'Pereira, Rui' - bibtex: "@inproceedings{Estrada2007,\n abstract = {Loop-R is a real-time video performance\ - \ tool, based in the exploration of low-tech, used technology and human engineering\ - \ research. With this tool its author is giving a shout to industry, using existing\ - \ and mistreated technology in innovative ways, combining concepts and interfaces:\ - \ blending segregated interfaces (GUI and Physical) into one. After graspable\ - \ interfaces and the “end” of WIMP interfaces, hardware and software blend themselves\ - \ in a new genre providing free control of video-loops in an expressive hybrid\ - \ tool.},\n address = {New York City, NY, United States},\n author = {Pereira,\ - \ Rui},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1177219},\n issn = {2220-4806},\n\ - \ keywords = {Real-time; video; interface; live-visuals; loop; },\n pages = {411--414},\n\ - \ title = {Loop-R : Real-Time Video Interface},\n url = {http://www.nime.org/proceedings/2007/nime2007_411.pdf},\n\ - \ year = {2007}\n}\n" + ID: Jessop2011 + abstract: "In composer Tod Machover's new opera Death and the Powers, the main character\ + \ uploads his consciousness into anelaborate computer system to preserve his essence\ + \ and agencyafter his corporeal death. Consequently, for much of theopera, the\ + \ stage and the environment itself come alive asthe main character. This creative\ + \ need brings with it a hostof technical challenges and opportunities. In order\ + \ to satisfythe needs of this storyline, Machover's Opera of the Futuregroup at\ + \ the MIT Media Lab has developed a suite of newperformance technologies, including\ + \ robot characters, interactive performance capture systems, mapping systems for,\n\ + ,\nauthoring interactive multimedia performances, new musical instruments, unique\ + \ spatialized sound controls, anda unified control system for all these technological\ + \ components. While developed for a particular theatrical production, many of\ + \ the concepts and design procedures remain relevant to broader contexts including\ + \ performance,robotics, and interaction design." + address: 'Oslo, Norway' + author: 'Jessop, Elena and Torpey, Peter A. and Bloomberg, Benjamin' + bibtex: "@inproceedings{Jessop2011,\n abstract = {In composer Tod Machover's new\ + \ opera Death and the Powers, the main character uploads his consciousness into\ + \ anelaborate computer system to preserve his essence and agencyafter his corporeal\ + \ death. Consequently, for much of theopera, the stage and the environment itself\ + \ come alive asthe main character. This creative need brings with it a hostof\ + \ technical challenges and opportunities. In order to satisfythe needs of this\ + \ storyline, Machover's Opera of the Futuregroup at the MIT Media Lab has developed\ + \ a suite of newperformance technologies, including robot characters, interactive\ + \ performance capture systems, mapping systems for,\n,\nauthoring interactive\ + \ multimedia performances, new musical instruments, unique spatialized sound controls,\ + \ anda unified control system for all these technological components. While developed\ + \ for a particular theatrical production, many of the concepts and design procedures\ + \ remain relevant to broader contexts including performance,robotics, and interaction\ + \ design.},\n address = {Oslo, Norway},\n author = {Jessop, Elena and Torpey,\ + \ Peter A. and Bloomberg, Benjamin},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178051},\n\ + \ issn = {2220-4806},\n keywords = {opera, Death and the Powers, Tod Machover,\ + \ gestural interfaces, Disembodied Performance, ambisonics },\n pages = {349--354},\n\ + \ presentation-video = {https://vimeo.com/26878423/},\n title = {Music and Technology\ + \ in Death and the Powers},\n url = {http://www.nime.org/proceedings/2011/nime2011_349.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177219 + doi: 10.5281/zenodo.1178051 issn: 2220-4806 - keywords: 'Real-time; video; interface; live-visuals; loop; ' - pages: 411--414 - title: 'Loop-R : Real-Time Video Interface' - url: http://www.nime.org/proceedings/2007/nime2007_411.pdf - year: 2007 + keywords: 'opera, Death and the Powers, Tod Machover, gestural interfaces, Disembodied + Performance, ambisonics ' + pages: 349--354 + presentation-video: https://vimeo.com/26878423/ + title: Music and Technology in Death and the Powers + url: http://www.nime.org/proceedings/2011/nime2011_349.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Rigler2007 - abstract: 'The Music Cre8tor is an interactive music composition system controlled - by motion sensors specifically designed for children with disabilities although - not exclusively for this population. The player(s) of the Music Cre8tor can either - hold or attach accelerometer sensors to trigger a variety of computer-generated - sounds, MIDI instruments and/or pre-recorded sound files. The sensitivity of the - sensors can be modified for each unique individual so that even the smallest movement - can control a sound. The flexibility of the system is such that either four people - can play simultaneously and/or one or more players can use up to four sensors. - The original goal of this program was to empower students with disabilities to - create music and encourage them to perform with other musicians, however this - same goal has expanded to include other populations.' - address: 'New York City, NY, United States' - author: 'Rigler, Jane and Seldess, Zachary' - bibtex: "@inproceedings{Rigler2007,\n abstract = {The Music Cre8tor is an interactive\ - \ music composition system controlled by motion sensors specifically designed\ - \ for children with disabilities although not exclusively for this population.\ - \ The player(s) of the Music Cre8tor can either hold or attach accelerometer sensors\ - \ to trigger a variety of computer-generated sounds, MIDI instruments and/or pre-recorded\ - \ sound files. The sensitivity of the sensors can be modified for each unique\ - \ individual so that even the smallest movement can control a sound. The flexibility\ - \ of the system is such that either four people can play simultaneously and/or\ - \ one or more players can use up to four sensors. The original goal of this program\ - \ was to empower students with disabilities to create music and encourage them\ - \ to perform with other musicians, however this same goal has expanded to include\ - \ other populations.},\n address = {New York City, NY, United States},\n author\ - \ = {Rigler, Jane and Seldess, Zachary},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177227},\n\ - \ issn = {2220-4806},\n keywords = {Music Education, disabilities, special education,\ - \ motion sensors, music composition, interactive performance. },\n pages = {415--416},\n\ - \ title = {The Music Cre8tor : an Interactive System for Musical Exploration and\ - \ Education},\n url = {http://www.nime.org/proceedings/2007/nime2007_415.pdf},\n\ - \ year = {2007}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1177227 - issn: 2220-4806 - keywords: 'Music Education, disabilities, special education, motion sensors, music - composition, interactive performance. ' - pages: 415--416 - title: 'The Music Cre8tor : an Interactive System for Musical Exploration and Education' - url: http://www.nime.org/proceedings/2007/nime2007_415.pdf - year: 2007 - - -- ENTRYTYPE: inproceedings - ID: Guedes2007 - abstract: 'In this demonstration, I exemplify how a musical channel ofcommunication - can be established in computer-mediatedinteraction between musicians and dancers - in real time. Thischannel of communication uses a software libraryimplemented - as a library of external objects for Max/MSP[1],that processes data from an object - or library that performsframe-differencing analysis of a video stream in real - time inthis programming environment.' - address: 'New York City, NY, United States' - author: 'Guedes, Carlos' - bibtex: "@inproceedings{Guedes2007,\n abstract = {In this demonstration, I exemplify\ - \ how a musical channel ofcommunication can be established in computer-mediatedinteraction\ - \ between musicians and dancers in real time. Thischannel of communication uses\ - \ a software libraryimplemented as a library of external objects for Max/MSP[1],that\ - \ processes data from an object or library that performsframe-differencing analysis\ - \ of a video stream in real time inthis programming environment.},\n address =\ - \ {New York City, NY, United States},\n author = {Guedes, Carlos},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1177105},\n issn = {2220-4806},\n keywords\ - \ = {dance,in dance,interaction between music and,interactive,interactive dance,interactive\ - \ performance,musical rhythm and rhythm,nime07,performance systems},\n pages =\ - \ {417--419},\n title = {Establishing a Musical Channel of Communication between\ - \ Dancers and Musicians in Computer-Mediated Collaborations in Dance Performance},\n\ - \ url = {http://www.nime.org/proceedings/2007/nime2007_417.pdf},\n year = {2007}\n\ - }\n" + ID: Zappi2011 + abstract: 'In this paper we introduce a multimodal platform for Hybrid Reality live + performances: by means of non-invasiveVirtual Reality technology, we developed + a system to presentartists and interactive virtual objects in audio/visual choreographies + on the same real stage. These choreographiescould include spectators too, providing + them with the possibility to directly modify the scene and its audio/visual features. + We also introduce the first interactive performancestaged with this technology, + in which an electronic musician played live five tracks manipulating the 3D projectedvisuals. + As questionnaires have been distributed after theshow, in the last part of this + work we discuss the analysisof collected data, underlining positive and negative + aspectsof the proposed experience.This paper belongs together with a performance + proposalcalled Dissonance, in which two performers exploit the platform to create + a progressive soundtrack along with the exploration of an interactive virtual + environment.' + address: 'Oslo, Norway' + author: 'Zappi, Victor and Mazzanti, Dario and Brogni, Andrea and Caldwell, Darwin' + bibtex: "@inproceedings{Zappi2011,\n abstract = {In this paper we introduce a multimodal\ + \ platform for Hybrid Reality live performances: by means of non-invasiveVirtual\ + \ Reality technology, we developed a system to presentartists and interactive\ + \ virtual objects in audio/visual choreographies on the same real stage. These\ + \ choreographiescould include spectators too, providing them with the possibility\ + \ to directly modify the scene and its audio/visual features. We also introduce\ + \ the first interactive performancestaged with this technology, in which an electronic\ + \ musician played live five tracks manipulating the 3D projectedvisuals. As questionnaires\ + \ have been distributed after theshow, in the last part of this work we discuss\ + \ the analysisof collected data, underlining positive and negative aspectsof the\ + \ proposed experience.This paper belongs together with a performance proposalcalled\ + \ Dissonance, in which two performers exploit the platform to create a progressive\ + \ soundtrack along with the exploration of an interactive virtual environment.},\n\ + \ address = {Oslo, Norway},\n author = {Zappi, Victor and Mazzanti, Dario and\ + \ Brogni, Andrea and Caldwell, Darwin},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178197},\n\ + \ issn = {2220-4806},\n keywords = {Interactive Performance, Hybrid Choreographies,\ + \ Virtual Reality, Music Control },\n pages = {355--360},\n presentation-video\ + \ = {https://vimeo.com/26880256/},\n title = {Design and Evaluation of a Hybrid\ + \ Reality Performance},\n url = {http://www.nime.org/proceedings/2011/nime2011_355.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177105 + doi: 10.5281/zenodo.1178197 issn: 2220-4806 - keywords: 'dance,in dance,interaction between music and,interactive,interactive - dance,interactive performance,musical rhythm and rhythm,nime07,performance systems' - pages: 417--419 - title: Establishing a Musical Channel of Communication between Dancers and Musicians - in Computer-Mediated Collaborations in Dance Performance - url: http://www.nime.org/proceedings/2007/nime2007_417.pdf - year: 2007 + keywords: 'Interactive Performance, Hybrid Choreographies, Virtual Reality, Music + Control ' + pages: 355--360 + presentation-video: https://vimeo.com/26880256/ + title: Design and Evaluation of a Hybrid Reality Performance + url: http://www.nime.org/proceedings/2011/nime2011_355.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Bull2007 - address: 'New York City, NY, United States' - author: 'Bull, Steve and Gresham-Lancaster, Scot and Mintchev, Kalin and Svoboda, - Terese' - bibtex: "@inproceedings{Bull2007,\n address = {New York City, NY, United States},\n\ - \ author = {Bull, Steve and Gresham-Lancaster, Scot and Mintchev, Kalin and Svoboda,\ - \ Terese},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1177057},\n issn = {2220-4806},\n\ - \ keywords = {nime07},\n pages = {420--420},\n title = {Cellphonia : WET},\n url\ - \ = {http://www.nime.org/proceedings/2007/nime2007_420.pdf},\n year = {2007}\n\ - }\n" + ID: Garcia2011a + abstract: 'We conducted three studies with contemporary music composers at IRCAM. + We found that even highly computer-literate composers use an iterative process + that begins with expressing musical ideas on paper, followed by active parallel + exploration on paper and in software, prior to final execution of their ideas + as an original score. We conducted a participatory design study that focused on + the creative exploration phase, to design tools that help composers better integrate + their paper-based and electronic activities. We then developed InkSplorer as a + technology probe that connects users'' hand-written gestures on paper to Max/MSP + and OpenMusic. Composers appropriated InkSplorer according to their preferred + composition styles, emphasizing its ability to help them quickly explore musical + ideas on paper as they interact with the computer. We conclude with recommendations + for designing interactive paper tools that support the creative process, letting + users explore musical ideas both on paper and electronically. ' + address: 'Oslo, Norway' + author: 'Garcia, Jérémie and Tsandilas, Theophanis and Agon, Carlos and Mackay, + Wendy E.' + bibtex: "@inproceedings{Garcia2011a,\n abstract = {We conducted three studies with\ + \ contemporary music composers at IRCAM. We found that even highly computer-literate\ + \ composers use an iterative process that begins with expressing musical ideas\ + \ on paper, followed by active parallel exploration on paper and in software,\ + \ prior to final execution of their ideas as an original score. We conducted a\ + \ participatory design study that focused on the creative exploration phase, to\ + \ design tools that help composers better integrate their paper-based and electronic\ + \ activities. We then developed InkSplorer as a technology probe that connects\ + \ users' hand-written gestures on paper to Max/MSP and OpenMusic. Composers appropriated\ + \ InkSplorer according to their preferred composition styles, emphasizing its\ + \ ability to help them quickly explore musical ideas on paper as they interact\ + \ with the computer. We conclude with recommendations for designing interactive\ + \ paper tools that support the creative process, letting users explore musical\ + \ ideas both on paper and electronically. },\n address = {Oslo, Norway},\n author\ + \ = {Garcia, J\\'{e}r\\'{e}mie and Tsandilas, Theophanis and Agon, Carlos and\ + \ Mackay, Wendy E.},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178027},\n\ + \ issn = {2220-4806},\n keywords = {Composer, Creativity, Design Exploration,\ + \ InkSplorer, Interactive Paper, OpenMusic, Technology Probes. },\n pages = {361--366},\n\ + \ presentation-video = {https://vimeo.com/26881368/},\n title = {InkSplorer :\ + \ Exploring Musical Ideas on Paper and Computer},\n url = {http://www.nime.org/proceedings/2011/nime2011_361.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177057 + doi: 10.5281/zenodo.1178027 issn: 2220-4806 - keywords: nime07 - pages: 420--420 - title: 'Cellphonia : WET' - url: http://www.nime.org/proceedings/2007/nime2007_420.pdf - year: 2007 + keywords: 'Composer, Creativity, Design Exploration, InkSplorer, Interactive Paper, + OpenMusic, Technology Probes. ' + pages: 361--366 + presentation-video: https://vimeo.com/26881368/ + title: 'InkSplorer : Exploring Musical Ideas on Paper and Computer' + url: http://www.nime.org/proceedings/2011/nime2011_361.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Court2007 - address: 'New York City, NY, United States' - author: Collective Dearraindrop - bibtex: "@inproceedings{Court2007,\n address = {New York City, NY, United States},\n\ - \ author = {Collective Dearraindrop},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177083},\n\ - \ issn = {2220-4806},\n keywords = {nime07},\n pages = {421--421},\n title = {Miller},\n\ - \ url = {http://www.nime.org/proceedings/2007/nime2007_421.pdf},\n year = {2007}\n\ - }\n" + ID: Lopez2011 + abstract: 'The DJ culture uses a gesture lexicon strongly rooted in thetraditional + setup of turntables and a mixer. As novel toolsare introduced in the DJ community, + this lexicon is adaptedto the features they provide. In particular, multitouch + technologies can offer a new syntax while still supporting the oldlexicon, which + is desired by DJs.We present a classification of DJ tools, from an interaction + point of view, that divides the previous work into Traditional, Virtual and Hybrid + setups. Moreover, we presenta multitouch tabletop application, developed with + a groupof DJ consultants to ensure an adequate implementation ofthe traditional + gesture lexicon.To conclude, we conduct an expert evaluation, with tenDJ users + in which we compare the three DJ setups with ourprototype. The study revealed + that our proposal suits expectations of Club/Radio-DJs, but fails against the + mentalmodel of Scratch-DJs, due to the lack of haptic feedback torepresent the + record''s physical rotation. Furthermore, testsshow that our multitouch DJ setup, + reduces task durationwhen compared with Virtual setups.' + address: 'Oslo, Norway' + author: 'Lopez, Pedro and Ferreira, Alfredo and Pereira, J. A. Madeiras' + bibtex: "@inproceedings{Lopez2011,\n abstract = {The DJ culture uses a gesture lexicon\ + \ strongly rooted in thetraditional setup of turntables and a mixer. As novel\ + \ toolsare introduced in the DJ community, this lexicon is adaptedto the features\ + \ they provide. In particular, multitouch technologies can offer a new syntax\ + \ while still supporting the oldlexicon, which is desired by DJs.We present a\ + \ classification of DJ tools, from an interaction point of view, that divides\ + \ the previous work into Traditional, Virtual and Hybrid setups. Moreover, we\ + \ presenta multitouch tabletop application, developed with a groupof DJ consultants\ + \ to ensure an adequate implementation ofthe traditional gesture lexicon.To conclude,\ + \ we conduct an expert evaluation, with tenDJ users in which we compare the three\ + \ DJ setups with ourprototype. The study revealed that our proposal suits expectations\ + \ of Club/Radio-DJs, but fails against the mentalmodel of Scratch-DJs, due to\ + \ the lack of haptic feedback torepresent the record's physical rotation. Furthermore,\ + \ testsshow that our multitouch DJ setup, reduces task durationwhen compared with\ + \ Virtual setups.},\n address = {Oslo, Norway},\n author = {Lopez, Pedro and Ferreira,\ + \ Alfredo and Pereira, J. A. Madeiras},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178093},\n\ + \ issn = {2220-4806},\n keywords = {DJing, Multitouch Interaction, Expert User\ + \ evaluation, HCI },\n pages = {367--372},\n presentation-video = {https://vimeo.com/26881380/},\n\ + \ title = {Battle of the DJs: an HCI Perspective of Traditional, Virtual, Hybrid\ + \ and Multitouch DJing},\n url = {http://www.nime.org/proceedings/2011/nime2011_367.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177083 + doi: 10.5281/zenodo.1178093 issn: 2220-4806 - keywords: nime07 - pages: 421--421 - title: Miller - url: http://www.nime.org/proceedings/2007/nime2007_421.pdf - year: 2007 + keywords: 'DJing, Multitouch Interaction, Expert User evaluation, HCI ' + pages: 367--372 + presentation-video: https://vimeo.com/26881380/ + title: 'Battle of the DJs: an HCI Perspective of Traditional, Virtual, Hybrid and + Multitouch DJing' + url: http://www.nime.org/proceedings/2011/nime2011_367.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Hauert2007 - address: 'New York City, NY, United States' - author: 'Hauert, Sibylle and Reichmuth, Daniel and B\"{o}hm, Volker' - bibtex: "@inproceedings{Hauert2007,\n address = {New York City, NY, United States},\n\ - \ author = {Hauert, Sibylle and Reichmuth, Daniel and B\\\"{o}hm, Volker},\n booktitle\ + ID: MarquezBorbon2011 + abstract: 'As NIME''s focus has expanded beyond the design reportswhich were pervasive + in the early days to include studies andexperiments involving music control devices, + we report on aparticular area of activity that has been overlooked: designsof + music devices in experimental contexts. We demonstratethis is distinct from designing + for artistic performances, witha unique set of novel challenges. A survey of methodologicalapproaches + to experiments in NIME reveals a tendency torely on existing instruments or evaluations + of new devicesdesigned for broader creative application. We present twoexamples + from our own studies that reveal the merits ofdesigning purpose-built devices + for experimental contexts.' + address: 'Oslo, Norway' + author: 'Marquez-Borbon, Adnan and Gurevich, Michael and Fyans, A. Cavan and Stapleton, + Paul' + bibtex: "@inproceedings{MarquezBorbon2011,\n abstract = {As NIME's focus has expanded\ + \ beyond the design reportswhich were pervasive in the early days to include studies\ + \ andexperiments involving music control devices, we report on aparticular area\ + \ of activity that has been overlooked: designsof music devices in experimental\ + \ contexts. We demonstratethis is distinct from designing for artistic performances,\ + \ witha unique set of novel challenges. A survey of methodologicalapproaches to\ + \ experiments in NIME reveals a tendency torely on existing instruments or evaluations\ + \ of new devicesdesigned for broader creative application. We present twoexamples\ + \ from our own studies that reveal the merits ofdesigning purpose-built devices\ + \ for experimental contexts.},\n address = {Oslo, Norway},\n author = {Marquez-Borbon,\ + \ Adnan and Gurevich, Michael and Fyans, A. Cavan and Stapleton, Paul},\n booktitle\ \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1177115},\n issn = {2220-4806},\n keywords\ - \ = {nime07},\n pages = {422--422},\n title = {Instant City, a Music Building\ - \ Game Table},\n url = {http://www.nime.org/proceedings/2007/nime2007_422.pdf},\n\ - \ year = {2007}\n}\n" + \ Expression},\n doi = {10.5281/zenodo.1178099},\n issn = {2220-4806},\n keywords\ + \ = {Experiment, Methodology, Instrument Design, DMIs },\n pages = {373--376},\n\ + \ presentation-video = {https://vimeo.com/26882375/},\n title = {Designing Digital\ + \ Musical Interactions in Experimental Contexts},\n url = {http://www.nime.org/proceedings/2011/nime2011_373.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177115 + doi: 10.5281/zenodo.1178099 issn: 2220-4806 - keywords: nime07 - pages: 422--422 - title: 'Instant City, a Music Building Game Table' - url: http://www.nime.org/proceedings/2007/nime2007_422.pdf - year: 2007 + keywords: 'Experiment, Methodology, Instrument Design, DMIs ' + pages: 373--376 + presentation-video: https://vimeo.com/26882375/ + title: Designing Digital Musical Interactions in Experimental Contexts + url: http://www.nime.org/proceedings/2011/nime2011_373.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Milmoe2007 - address: 'New York City, NY, United States' - author: 'Milmoe, Andrew' - bibtex: "@inproceedings{Milmoe2007,\n address = {New York City, NY, United States},\n\ - \ author = {Milmoe, Andrew},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177197},\n\ - \ issn = {2220-4806},\n keywords = {nime07},\n pages = {423--423},\n title = {NIME\ - \ Performance \\& Installation : Sonic Pong V3.0},\n url = {http://www.nime.org/proceedings/2007/nime2007_423.pdf},\n\ - \ year = {2007}\n}\n" + ID: Reus2011 + abstract: 'This paper describes the design of Crackle, a interactivesound and touch + experience inspired by the CrackleBox.We begin by describing a ruleset for Crackle''s + interactionderived from the salient interactive qualities of the CrackleBox. An + implementation strategy is then described forrealizing the ruleset as an application + for the iPhone. Thepaper goes on to consider the potential of using Crackleas + an encapsulated interaction paradigm for exploring arbitrary sound spaces, and + concludes with lessons learned ondesigning for multitouch surfaces as expressive + input sensors.' + address: 'Oslo, Norway' + author: 'Reus, Jonathan' + bibtex: "@inproceedings{Reus2011,\n abstract = {This paper describes the design\ + \ of Crackle, a interactivesound and touch experience inspired by the CrackleBox.We\ + \ begin by describing a ruleset for Crackle's interactionderived from the salient\ + \ interactive qualities of the CrackleBox. An implementation strategy is then\ + \ described forrealizing the ruleset as an application for the iPhone. Thepaper\ + \ goes on to consider the potential of using Crackleas an encapsulated interaction\ + \ paradigm for exploring arbitrary sound spaces, and concludes with lessons learned\ + \ ondesigning for multitouch surfaces as expressive input sensors.},\n address\ + \ = {Oslo, Norway},\n author = {Reus, Jonathan},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1178143},\n issn = {2220-4806},\n keywords = {touchscreen,\ + \ interface topology, mobile music, interaction paradigm, dynamic mapping, CrackleBox,\ + \ iPhone },\n pages = {377--380},\n presentation-video = {https://vimeo.com/26882621/},\n\ + \ title = {Crackle: A Dynamic Mobile Multitouch Topology for Exploratory Sound\ + \ Interaction},\n url = {http://www.nime.org/proceedings/2011/nime2011_377.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177197 + doi: 10.5281/zenodo.1178143 issn: 2220-4806 - keywords: nime07 - pages: 423--423 - title: 'NIME Performance \& Installation : Sonic Pong V3.0' - url: http://www.nime.org/proceedings/2007/nime2007_423.pdf - year: 2007 + keywords: 'touchscreen, interface topology, mobile music, interaction paradigm, + dynamic mapping, CrackleBox, iPhone ' + pages: 377--380 + presentation-video: https://vimeo.com/26882621/ + title: 'Crackle: A Dynamic Mobile Multitouch Topology for Exploratory Sound Interaction' + url: http://www.nime.org/proceedings/2011/nime2011_377.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Biggs2007 - address: 'New York City, NY, United States' - author: 'Biggs, Betsey' - bibtex: "@inproceedings{Biggs2007,\n address = {New York City, NY, United States},\n\ - \ author = {Biggs, Betsey},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177047},\n\ - \ issn = {2220-4806},\n keywords = {nime07},\n pages = {424--424},\n title = {The\ - \ Tipping Point},\n url = {http://www.nime.org/proceedings/2007/nime2007_424.pdf},\n\ - \ year = {2007}\n}\n" + ID: Aaron2011 + abstract: 'This paper introduces Improcess, a novel cross-disciplinarycollaborative + project focussed on the design and development of tools to structure the communication + between performer and musical process. We describe a 3-tiered architecture centering + around the notion of a Common MusicRuntime, a shared platform on top of which + inter-operatingclient interfaces may be combined to form new musical instruments. + This approach allows hardware devices such asthe monome to act as an extended + hardware interface withthe same power to initiate and control musical processesas + a bespoke programming language. Finally, we reflect onthe structure of the collaborative + project itself, which offers an opportunity to discuss general research strategy + forconducting highly sophisticated technical research within aperforming arts + environment such as the development of apersonal regime of preparation for performance.' + address: 'Oslo, Norway' + author: 'Aaron, Samuel and Blackwell, Alan and Hoadley, Richard and Regan, Tim' + bibtex: "@inproceedings{Aaron2011,\n abstract = {This paper introduces Improcess,\ + \ a novel cross-disciplinarycollaborative project focussed on the design and development\ + \ of tools to structure the communication between performer and musical process.\ + \ We describe a 3-tiered architecture centering around the notion of a Common\ + \ MusicRuntime, a shared platform on top of which inter-operatingclient interfaces\ + \ may be combined to form new musical instruments. This approach allows hardware\ + \ devices such asthe monome to act as an extended hardware interface withthe same\ + \ power to initiate and control musical processesas a bespoke programming language.\ + \ Finally, we reflect onthe structure of the collaborative project itself, which\ + \ offers an opportunity to discuss general research strategy forconducting highly\ + \ sophisticated technical research within aperforming arts environment such as\ + \ the development of apersonal regime of preparation for performance.},\n address\ + \ = {Oslo, Norway},\n author = {Aaron, Samuel and Blackwell, Alan and Hoadley,\ + \ Richard and Regan, Tim},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177935},\n\ + \ issn = {2220-4806},\n keywords = {Improvisation, live coding, controllers, monome,\ + \ collaboration, concurrency, abstractions },\n pages = {381--386},\n presentation-video\ + \ = {https://vimeo.com/26905683/},\n title = {A Principled Approach to Developing\ + \ New Languages for Live Coding},\n url = {http://www.nime.org/proceedings/2011/nime2011_381.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177047 + doi: 10.5281/zenodo.1177935 issn: 2220-4806 - keywords: nime07 - pages: 424--424 - title: The Tipping Point - url: http://www.nime.org/proceedings/2007/nime2007_424.pdf - year: 2007 + keywords: 'Improvisation, live coding, controllers, monome, collaboration, concurrency, + abstractions ' + pages: 381--386 + presentation-video: https://vimeo.com/26905683/ + title: A Principled Approach to Developing New Languages for Live Coding + url: http://www.nime.org/proceedings/2011/nime2011_381.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Morris2007 - address: 'New York City, NY, United States' - author: 'Morris, Simon' - bibtex: "@inproceedings{Morris2007,\n address = {New York City, NY, United States},\n\ - \ author = {Morris, Simon},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177203},\n\ - \ issn = {2220-4806},\n keywords = {nime07},\n pages = {425--425},\n title = {Musique\ - \ Concrete : Transforming Space , Sound and the City Through Skateboarding},\n\ - \ url = {http://www.nime.org/proceedings/2007/nime2007_425.pdf},\n year = {2007}\n\ + ID: Bullock2011 + address: 'Oslo, Norway' + author: 'Bullock, Jamie and Beattie, Daniel and Turner, Jerome' + bibtex: "@inproceedings{Bullock2011,\n address = {Oslo, Norway},\n author = {Bullock,\ + \ Jamie and Beattie, Daniel and Turner, Jerome},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1177973},\n issn = {2220-4806},\n keywords = {live electronics,software,usability,user\ + \ experience},\n pages = {387--392},\n presentation-video = {https://vimeo.com/26906574/},\n\ + \ title = {Integra Live : a New Graphical User Interface for Live Electronic Music},\n\ + \ url = {http://www.nime.org/proceedings/2011/nime2011_387.pdf},\n year = {2011}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177203 + doi: 10.5281/zenodo.1177973 issn: 2220-4806 - keywords: nime07 - pages: 425--425 - title: 'Musique Concrete : Transforming Space , Sound and the City Through Skateboarding' - url: http://www.nime.org/proceedings/2007/nime2007_425.pdf - year: 2007 + keywords: 'live electronics,software,usability,user experience' + pages: 387--392 + presentation-video: https://vimeo.com/26906574/ + title: 'Integra Live : a New Graphical User Interface for Live Electronic Music' + url: http://www.nime.org/proceedings/2011/nime2011_387.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Uozumi2007 - address: 'New York City, NY, United States' - author: 'Uozumi, Yuta and Takahashi, Masato and Kobayashi, Ryoho' - bibtex: "@inproceedings{Uozumi2007,\n address = {New York City, NY, United States},\n\ - \ author = {Uozumi, Yuta and Takahashi, Masato and Kobayashi, Ryoho},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1179467},\n issn = {2220-4806},\n keywords\ - \ = {nime07},\n pages = {426--426},\n title = {Bd : A Sound Installation with\ - \ Swarming Robots},\n url = {http://www.nime.org/proceedings/2007/nime2007_426.pdf},\n\ - \ year = {2007}\n}\n" + ID: Roh2011 + abstract: 'The design space of fabric multitouch surface interaction is explored + with emphasis on novel materials and construction techniques aimed towards reliable, + repairable pressure sensing surfaces for musical applications. ' + address: 'Oslo, Norway' + author: 'Roh, Jung-Sim and Mann, Yotam and Freed, Adrian and Wessel, David' + bibtex: "@inproceedings{Roh2011,\n abstract = {The design space of fabric multitouch\ + \ surface interaction is explored with emphasis on novel materials and construction\ + \ techniques aimed towards reliable, repairable pressure sensing surfaces for\ + \ musical applications. },\n address = {Oslo, Norway},\n author = {Roh, Jung-Sim\ + \ and Mann, Yotam and Freed, Adrian and Wessel, David},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178145},\n issn = {2220-4806},\n keywords = {Multitouch,\ + \ surface interaction, piezoresistive, fabric sensor, e-textiles, tangible computing,\ + \ drum controller },\n pages = {393--398},\n presentation-video = {https://vimeo.com/26906580/},\n\ + \ title = {Robust and Reliable Fabric, Piezoresistive Multitouch Sensing Surfaces\ + \ for Musical Controllers},\n url = {http://www.nime.org/proceedings/2011/nime2011_393.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179467 + doi: 10.5281/zenodo.1178145 issn: 2220-4806 - keywords: nime07 - pages: 426--426 - title: 'Bd : A Sound Installation with Swarming Robots' - url: http://www.nime.org/proceedings/2007/nime2007_426.pdf - year: 2007 + keywords: 'Multitouch, surface interaction, piezoresistive, fabric sensor, e-textiles, + tangible computing, drum controller ' + pages: 393--398 + presentation-video: https://vimeo.com/26906580/ + title: 'Robust and Reliable Fabric, Piezoresistive Multitouch Sensing Surfaces for + Musical Controllers' + url: http://www.nime.org/proceedings/2011/nime2011_393.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Stanza2007 - address: 'New York City, NY, United States' - author: ', Stanza' - bibtex: "@inproceedings{Stanza2007,\n address = {New York City, NY, United States},\n\ - \ author = {, Stanza},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177029},\n\ - \ issn = {2220-4806},\n keywords = {nime07},\n pages = {427--427},\n title = {Sensity},\n\ - \ url = {http://www.nime.org/proceedings/2007/nime2007_427.pdf},\n year = {2007}\n\ - }\n" + ID: Marshall2011 + abstract: 'This paper deals with the effects of integrated vibrotactile feedback + on the "feel" of a digital musical instrument(DMI). Building on previous work + developing a DMI withintegrated vibrotactile feedback actuators, we discuss howto + produce instrument-like vibrations, compare these simulated vibrations with those + produced by an acoustic instrument and examine how the integration of this feedbackeffects + performer ratings of the instrument. We found thatintegrated vibrotactile feedback + resulted in an increase inperformer engagement with the instrument, but resulted + ina reduction in the perceived control of the instrument. Wediscuss these results + and their implications for the design ofnew digital musical instruments.' + address: 'Oslo, Norway' + author: 'Marshall, Mark T. and Wanderley, Marcelo M.' + bibtex: "@inproceedings{Marshall2011,\n abstract = {This paper deals with the effects\ + \ of integrated vibrotactile feedback on the \"feel\" of a digital musical instrument(DMI).\ + \ Building on previous work developing a DMI withintegrated vibrotactile feedback\ + \ actuators, we discuss howto produce instrument-like vibrations, compare these\ + \ simulated vibrations with those produced by an acoustic instrument and examine\ + \ how the integration of this feedbackeffects performer ratings of the instrument.\ + \ We found thatintegrated vibrotactile feedback resulted in an increase inperformer\ + \ engagement with the instrument, but resulted ina reduction in the perceived\ + \ control of the instrument. Wediscuss these results and their implications for\ + \ the design ofnew digital musical instruments.},\n address = {Oslo, Norway},\n\ + \ author = {Marshall, Mark T. and Wanderley, Marcelo M.},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178101},\n issn = {2220-4806},\n keywords = {Vibrotactile\ + \ Feedback, Digital Musical Instruments, Feel, Loudspeakers },\n pages = {399--404},\n\ + \ title = {Examining the Effects of Embedded Vibrotactile Feedback on the Feel\ + \ of a Digital Musical Instrument},\n url = {http://www.nime.org/proceedings/2011/nime2011_399.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177029 + doi: 10.5281/zenodo.1178101 issn: 2220-4806 - keywords: nime07 - pages: 427--427 - title: Sensity - url: http://www.nime.org/proceedings/2007/nime2007_427.pdf - year: 2007 + keywords: 'Vibrotactile Feedback, Digital Musical Instruments, Feel, Loudspeakers ' + pages: 399--404 + title: Examining the Effects of Embedded Vibrotactile Feedback on the Feel of a + Digital Musical Instrument + url: http://www.nime.org/proceedings/2011/nime2011_399.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Sa2007 - address: 'New York City, NY, United States' - author: 'Sa, Adriana' - bibtex: "@inproceedings{Sa2007,\n address = {New York City, NY, United States},\n\ - \ author = {Sa, Adriana},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177235},\n\ - \ issn = {2220-4806},\n keywords = {nime07},\n pages = {428--428},\n title = {Thresholds},\n\ - \ url = {http://www.nime.org/proceedings/2007/nime2007_428.pdf},\n year = {2007}\n\ - }\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1177235 + ID: Diakopoulos2011 + abstract: 'This paper presents a series of open-source firmwares for the latest + iteration of the popular Arduino microcontroller platform. A portmanteau of Human + Interface Device and Arduino, the HIDUINO project tackles a major problem in designing + NIMEs: easily and reliably communicating with a host computer using standard MIDI + over USB. HIDUINO was developed in conjunction with a class at the California + Institute of the Arts intended to teach introductory-level human-computer and + human-robot interaction within the context of musical controllers. We describe + our frustration with existing microcontroller platforms and our experiences using + the new firmware to facilitate the development and prototyping of new music controllers. ' + address: 'Oslo, Norway' + author: 'Diakopoulos, Dimitri and Kapur, Ajay' + bibtex: "@inproceedings{Diakopoulos2011,\n abstract = {This paper presents a series\ + \ of open-source firmwares for the latest iteration of the popular Arduino microcontroller\ + \ platform. A portmanteau of Human Interface Device and Arduino, the HIDUINO project\ + \ tackles a major problem in designing NIMEs: easily and reliably communicating\ + \ with a host computer using standard MIDI over USB. HIDUINO was developed in\ + \ conjunction with a class at the California Institute of the Arts intended to\ + \ teach introductory-level human-computer and human-robot interaction within the\ + \ context of musical controllers. We describe our frustration with existing microcontroller\ + \ platforms and our experiences using the new firmware to facilitate the development\ + \ and prototyping of new music controllers. },\n address = {Oslo, Norway},\n author\ + \ = {Diakopoulos, Dimitri and Kapur, Ajay},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1177995},\n issn = {2220-4806},\n keywords = {Arduino, USB,\ + \ HID, MIDI, HCI, controllers, microcontrollers },\n pages = {405--408},\n presentation-video\ + \ = {https://vimeo.com/26908264/},\n title = {HIDUINO : A firmware for building\ + \ driverless {USB}-MIDI devices using the Arduino microcontroller},\n url = {http://www.nime.org/proceedings/2011/nime2011_405.pdf},\n\ + \ year = {2011}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1177995 issn: 2220-4806 - keywords: nime07 - pages: 428--428 - title: Thresholds - url: http://www.nime.org/proceedings/2007/nime2007_428.pdf - year: 2007 + keywords: 'Arduino, USB, HID, MIDI, HCI, controllers, microcontrollers ' + pages: 405--408 + presentation-video: https://vimeo.com/26908264/ + title: 'HIDUINO : A firmware for building driverless USB-MIDI devices using the + Arduino microcontroller' + url: http://www.nime.org/proceedings/2011/nime2011_405.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Takahashi2007 - address: 'New York City, NY, United States' - author: 'Takahashi, Masato and Tanaka, Hiroya' - bibtex: "@inproceedings{Takahashi2007,\n address = {New York City, NY, United States},\n\ - \ author = {Takahashi, Masato and Tanaka, Hiroya},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177253},\n issn = {2220-4806},\n keywords = {nime07},\n\ - \ pages = {429--429},\n title = {bog : Instrumental Aliens},\n url = {http://www.nime.org/proceedings/2007/nime2007_429.pdf},\n\ - \ year = {2007}\n}\n" + ID: Flety2011 + abstract: 'We present a strategy for the improvement of wireless sensor data transmission + latency, implemented in two current projects involving gesture/control sound interaction. + Our platform was designed to be capable of accepting accessories using a digital + bus. The receiver features a IEEE 802.15.4 microcontroller associated to a TCP/IP + stack integrated circuit that transmits the received wireless data to a host computer + using the Open Sound Control protocol. This paper details how we improved the + latency and sample rate of the said technology while keeping the device small + and scalable. ' + address: 'Oslo, Norway' + author: 'Fléty, Emmanuel and Maestracci, Côme' + bibtex: "@inproceedings{Flety2011,\n abstract = {We present a strategy for the improvement\ + \ of wireless sensor data transmission latency, implemented in two current projects\ + \ involving gesture/control sound interaction. Our platform was designed to be\ + \ capable of accepting accessories using a digital bus. The receiver features\ + \ a IEEE 802.15.4 microcontroller associated to a TCP/IP stack integrated circuit\ + \ that transmits the received wireless data to a host computer using the Open\ + \ Sound Control protocol. This paper details how we improved the latency and sample\ + \ rate of the said technology while keeping the device small and scalable. },\n\ + \ address = {Oslo, Norway},\n author = {Fl\\'{e}ty, Emmanuel and Maestracci, C{\\\ + ^o}me},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1178009},\n issn = {2220-4806},\n\ + \ keywords = {Embedded sensors, gesture recognition, wireless, sound and music\ + \ computing, interaction, 802.15.4, Zigbee. },\n pages = {409--412},\n presentation-video\ + \ = {https://vimeo.com/26908266/},\n title = {Latency Improvement in Sensor Wireless\ + \ Transmission Using {IEEE} 802.15.4},\n url = {http://www.nime.org/proceedings/2011/nime2011_409.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177253 + doi: 10.5281/zenodo.1178009 issn: 2220-4806 - keywords: nime07 - pages: 429--429 - title: 'bog : Instrumental Aliens' - url: http://www.nime.org/proceedings/2007/nime2007_429.pdf - year: 2007 + keywords: 'Embedded sensors, gesture recognition, wireless, sound and music computing, + interaction, 802.15.4, Zigbee. ' + pages: 409--412 + presentation-video: https://vimeo.com/26908266/ + title: Latency Improvement in Sensor Wireless Transmission Using IEEE 802.15.4 + url: http://www.nime.org/proceedings/2011/nime2011_409.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Oliver2007 - address: 'New York City, NY, United States' - author: 'Oliver, Julian and Pickles, Steven' - bibtex: "@inproceedings{Oliver2007,\n address = {New York City, NY, United States},\n\ - \ author = {Oliver, Julian and Pickles, Steven},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1177213},\n issn = {2220-4806},\n keywords = {nime07},\n pages\ - \ = {430--430},\n title = {Fijuu2 : A Game-Based Audio-Visual Performance and\ - \ Composition Engine},\n url = {http://www.nime.org/proceedings/2007/nime2007_430.pdf},\n\ - \ year = {2007}\n}\n" + ID: Snyder2011 + abstract: 'The Snyderphonics Manta controller is a USB touch controller for music + and video. It features 48 capacitive touch sensors, arranged in a hexagonal grid, + with bi-color LEDs that are programmable from the computer. The sensors send continuous + data proportional to surface area touched, and a velocitydetection algorithm has + been implemented to estimate attack velocity based on this touch data. In addition + to these hexagonal sensors, the Manta has two high-dimension touch sliders (giving + 12-bit values), and four assignable function buttons. In this paper, I outline + the features of the controller, the available methods for communicating between + the device and a computer, and some current uses for the controller. ' + address: 'Oslo, Norway' + author: 'Snyder, Jeff' + bibtex: "@inproceedings{Snyder2011,\n abstract = {The Snyderphonics Manta controller\ + \ is a USB touch controller for music and video. It features 48 capacitive touch\ + \ sensors, arranged in a hexagonal grid, with bi-color LEDs that are programmable\ + \ from the computer. The sensors send continuous data proportional to surface\ + \ area touched, and a velocitydetection algorithm has been implemented to estimate\ + \ attack velocity based on this touch data. In addition to these hexagonal sensors,\ + \ the Manta has two high-dimension touch sliders (giving 12-bit values), and four\ + \ assignable function buttons. In this paper, I outline the features of the controller,\ + \ the available methods for communicating between the device and a computer, and\ + \ some current uses for the controller. },\n address = {Oslo, Norway},\n author\ + \ = {Snyder, Jeff},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178171},\n\ + \ issn = {2220-4806},\n keywords = {Snyderphonics, Manta, controller, USB, capacitive,\ + \ touch, sensor, decoupled LED, hexagon, grid, touch slider, HID, portable, wood,\ + \ live music, live video },\n pages = {413--416},\n presentation-video = {https://vimeo.com/26908273/},\n\ + \ title = {Snyderphonics Manta Controller, a Novel {USB} Touch-Controller},\n\ + \ url = {http://www.nime.org/proceedings/2011/nime2011_413.pdf},\n year = {2011}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177213 + doi: 10.5281/zenodo.1178171 issn: 2220-4806 - keywords: nime07 - pages: 430--430 - title: 'Fijuu2 : A Game-Based Audio-Visual Performance and Composition Engine' - url: http://www.nime.org/proceedings/2007/nime2007_430.pdf - year: 2007 + keywords: 'Snyderphonics, Manta, controller, USB, capacitive, touch, sensor, decoupled + LED, hexagon, grid, touch slider, HID, portable, wood, live music, live video ' + pages: 413--416 + presentation-video: https://vimeo.com/26908273/ + title: 'Snyderphonics Manta Controller, a Novel USB Touch-Controller' + url: http://www.nime.org/proceedings/2011/nime2011_413.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Corness2007 - address: 'New York City, NY, United States' - author: 'Seo, Jinsil and Corness, Greg' - bibtex: "@inproceedings{Corness2007,\n address = {New York City, NY, United States},\n\ - \ author = {Seo, Jinsil and Corness, Greg},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1177243},\n issn = {2220-4806},\n keywords = {nime07},\n pages\ - \ = {431--431},\n title = {nite\\_aura : An Audio-Visual Interactive Immersive\ - \ Installation},\n url = {http://www.nime.org/proceedings/2007/nime2007_431.pdf},\n\ - \ year = {2007}\n}\n" + ID: Hsu2011 + address: 'Oslo, Norway' + author: 'Hsu, William' + bibtex: "@inproceedings{Hsu2011,\n address = {Oslo, Norway},\n author = {Hsu, William},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178047},\n issn = {2220-4806},\n\ + \ keywords = {animation,audio-visual,generative,improvisation,interactive},\n\ + \ pages = {417--420},\n title = {On Movement , Structure and Abstraction in Generative\ + \ Audiovisual Improvisation},\n url = {http://www.nime.org/proceedings/2011/nime2011_417.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177243 + doi: 10.5281/zenodo.1178047 issn: 2220-4806 - keywords: nime07 - pages: 431--431 - title: 'nite\_aura : An Audio-Visual Interactive Immersive Installation' - url: http://www.nime.org/proceedings/2007/nime2007_431.pdf - year: 2007 + keywords: animation,audio-visual,generative,improvisation,interactive + pages: 417--420 + title: 'On Movement , Structure and Abstraction in Generative Audiovisual Improvisation' + url: http://www.nime.org/proceedings/2011/nime2011_417.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Historia2007 - address: 'New York City, NY, United States' - author: '\''{A}lvarez-Fern\''{a}ndez, Miguel and Kersten, Stefan and Piascik, Asia' - bibtex: "@inproceedings{Historia2007,\n address = {New York City, NY, United States},\n\ - \ author = {\\'{A}lvarez-Fern\\'{a}ndez, Miguel and Kersten, Stefan and Piascik,\ - \ Asia},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1177031},\n issn = {2220-4806},\n\ - \ keywords = {nime07},\n pages = {432--432},\n title = {Soundanism},\n url = {http://www.nime.org/proceedings/2007/nime2007_432.pdf},\n\ - \ year = {2007}\n}\n" + ID: Angel2011 + abstract: "This paper deals with the usage of bio-data from performers to create\ + \ interactive multimedia performances or installations. It presents this type\ + \ of research in some art works produced in the last fifty years (such as Lucier's\ + \ Music for a Solo Performance, from 1965), including two interactive performances\ + \ of my ,\n,\nauthorship, which use two different types of bio-interfaces: on\ + \ the one hand, an EMG (Electromyography) and on the other hand, an EEG (electroencephalography).\ + \ The paper explores the interaction between the human body and real-time media\ + \ (audio and visual) by the usage of bio-interfaces. This research is based on\ + \ biofeedback investigations pursued by the psychologist Neal E. Miller in the\ + \ 1960s, mainly based on finding new methods to reduce stress. However, this article\ + \ explains and shows examples in which biofeedback research is used for artistic\ + \ purposes only. " + address: 'Oslo, Norway' + author: 'Angel, Claudia R.' + bibtex: "@inproceedings{Angel2011,\n abstract = {This paper deals with the usage\ + \ of bio-data from performers to create interactive multimedia performances or\ + \ installations. It presents this type of research in some art works produced\ + \ in the last fifty years (such as Lucier's Music for a Solo Performance, from\ + \ 1965), including two interactive performances of my ,\n,\nauthorship, which\ + \ use two different types of bio-interfaces: on the one hand, an EMG (Electromyography)\ + \ and on the other hand, an EEG (electroencephalography). The paper explores the\ + \ interaction between the human body and real-time media (audio and visual) by\ + \ the usage of bio-interfaces. This research is based on biofeedback investigations\ + \ pursued by the psychologist Neal E. Miller in the 1960s, mainly based on finding\ + \ new methods to reduce stress. However, this article explains and shows examples\ + \ in which biofeedback research is used for artistic purposes only. },\n address\ + \ = {Oslo, Norway},\n author = {Angel, Claudia R.},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177943},\n issn = {2220-4806},\n keywords = {Live electronics,\ + \ Butoh, performance, biofeedback, interactive sound and video. },\n pages = {421--424},\n\ + \ title = {Creating Interactive Multimedia Works with Bio-data},\n url = {http://www.nime.org/proceedings/2011/nime2011_421.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177031 + doi: 10.5281/zenodo.1177943 issn: 2220-4806 - keywords: nime07 - pages: 432--432 - title: Soundanism - url: http://www.nime.org/proceedings/2007/nime2007_432.pdf - year: 2007 + keywords: 'Live electronics, Butoh, performance, biofeedback, interactive sound + and video. ' + pages: 421--424 + title: Creating Interactive Multimedia Works with Bio-data + url: http://www.nime.org/proceedings/2011/nime2011_421.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Quessy2007 - address: 'New York City, NY, United States' - author: 'Quessy, Alexandre' - bibtex: "@inproceedings{Quessy2007,\n address = {New York City, NY, United States},\n\ - \ author = {Quessy, Alexandre},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177225},\n\ - \ issn = {2220-4806},\n keywords = {nime07},\n pages = {433--433},\n title = {Human\ - \ Sequencer},\n url = {http://www.nime.org/proceedings/2007/nime2007_433.pdf},\n\ - \ year = {2007}\n}\n" + ID: Ustarroz2011 + abstract: 'TresnaNet explores the potential of Telematics as a generator ofmusical + expressions. I pretend to sound the silent flow ofinformation from the network.This + is realized through the fabrication of a prototypefollowing the intention of giving + substance to the intangibleparameters of our communication. The result may haveeducational, + commercial and artistic applications because it is aphysical and perceptible representation + of the transfer ofinformation over the network. This paper describes the design,implementation + and conclusions about TresnaNet.' + address: 'Oslo, Norway' + author: 'Ustarroz, Paula' + bibtex: "@inproceedings{Ustarroz2011,\n abstract = {TresnaNet explores the potential\ + \ of Telematics as a generator ofmusical expressions. I pretend to sound the silent\ + \ flow ofinformation from the network.This is realized through the fabrication\ + \ of a prototypefollowing the intention of giving substance to the intangibleparameters\ + \ of our communication. The result may haveeducational, commercial and artistic\ + \ applications because it is aphysical and perceptible representation of the transfer\ + \ ofinformation over the network. This paper describes the design,implementation\ + \ and conclusions about TresnaNet.},\n address = {Oslo, Norway},\n author = {Ustarroz,\ + \ Paula},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1178181},\n issn = {2220-4806},\n\ + \ keywords = {Interface, musical generation, telematics, network, musical instrument,\ + \ network sniffer. },\n pages = {425--428},\n title = {TresnaNet Musical Generation\ + \ based on Network Protocols},\n url = {http://www.nime.org/proceedings/2011/nime2011_425.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177225 + doi: 10.5281/zenodo.1178181 issn: 2220-4806 - keywords: nime07 - pages: 433--433 - title: Human Sequencer - url: http://www.nime.org/proceedings/2007/nime2007_433.pdf - year: 2007 + keywords: 'Interface, musical generation, telematics, network, musical instrument, + network sniffer. ' + pages: 425--428 + title: TresnaNet Musical Generation based on Network Protocols + url: http://www.nime.org/proceedings/2011/nime2011_425.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Tomas2019 - abstract: 'This paper reports on a workshop where participants produced physical - mock-ups of musical interfaces directly after miming control of short electroacoustic - music pieces. Our goal was understanding how people envision and materialize their - own sound-producing gestures from spontaneous cognitive mappings. During the workshop, - 50 participants from different creative backgrounds modeled more than 180 physical - artifacts. Participants were filmed and interviewed for the later analysis of - their different multimodal associations about music. Our initial hypothesis was - that most of the physical mock-ups would be similar to the sound-producing objects - that participants would identify in the musical pieces. Although the majority - of artifacts clearly showed correlated design trajectories, our results indicate - that a relevant number of participants intuitively decided to engineer alternative - solutions emphasizing their personal design preferences. Therefore, in this paper - we present and discuss the workshop format, its results and the possible applications - for designing new musical interfaces.' - address: 'Porto Alegre, Brazil' - author: Enrique Tomas and Thomas Gorbach and Hilda Tellioglu and Martin Kaltenbrunner - bibtex: "@inproceedings{Tomas2019,\n abstract = {This paper reports on a workshop\ - \ where participants produced physical mock-ups of musical interfaces directly\ - \ after miming control of short electroacoustic music pieces. Our goal was understanding\ - \ how people envision and materialize their own sound-producing gestures from\ - \ spontaneous cognitive mappings. During the workshop, 50 participants from different\ - \ creative backgrounds modeled more than 180 physical artifacts. Participants\ - \ were filmed and interviewed for the later analysis of their different multimodal\ - \ associations about music. Our initial hypothesis was that most of the physical\ - \ mock-ups would be similar to the sound-producing objects that participants would\ - \ identify in the musical pieces. Although the majority of artifacts clearly showed\ - \ correlated design trajectories, our results indicate that a relevant number\ - \ of participants intuitively decided to engineer alternative solutions emphasizing\ - \ their personal design preferences. Therefore, in this paper we present and discuss\ - \ the workshop format, its results and the possible applications for designing\ - \ new musical interfaces.},\n address = {Porto Alegre, Brazil},\n author = {Enrique\ - \ Tomas and Thomas Gorbach and Hilda Tellioglu and Martin Kaltenbrunner},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.3672842},\n editor = {Marcelo Queiroz and\ - \ Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {1--6},\n\ - \ publisher = {UFRGS},\n title = {Material embodiments of electroacoustic music:\ - \ an experimental workshop study},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper001.pdf},\n\ - \ year = {2019}\n}\n" + ID: Luhtala2011 + address: 'Oslo, Norway' + author: 'Luhtala, Matti and Kym\''''{a}l\''''{a}inen, Tiina and Plomp, Johan' + bibtex: "@inproceedings{Luhtala2011,\n address = {Oslo, Norway},\n author = {Luhtala,\ + \ Matti and Kym\\''{a}l\\''{a}inen, Tiina and Plomp, Johan},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178095},\n issn = {2220-4806},\n keywords = {Music interfaces,\ + \ music therapy, modifiable interfaces, design tools, Human-Technology Interaction\ + \ (HTI), User-Centred Design (UCD), design for all (DfA), prototyping, performance.\ + \ },\n pages = {429--432},\n title = {Designing a Music Performance Space for\ + \ Persons with Intellectual Learning Disabilities},\n url = {http://www.nime.org/proceedings/2011/nime2011_429.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672842 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1178095 issn: 2220-4806 - month: June - pages: 1--6 - publisher: UFRGS - title: 'Material embodiments of electroacoustic music: an experimental workshop - study' - url: http://www.nime.org/proceedings/2019/nime2019_paper001.pdf - year: 2019 + keywords: 'Music interfaces, music therapy, modifiable interfaces, design tools, + Human-Technology Interaction (HTI), User-Centred Design (UCD), design for all + (DfA), prototyping, performance. ' + pages: 429--432 + title: Designing a Music Performance Space for Persons with Intellectual Learning + Disabilities + url: http://www.nime.org/proceedings/2011/nime2011_429.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Lu2019 - abstract: 'In this paper, collaborative performance is defined as the performance - of the piano by the performer and accompanied by an automatic harp. The automatic - harp can play music based on the electronic score and change its speed according - to the speed of the performer. We built a 32-channel automatic harp and designed - a layered modular framework integrating both hardware and software, for experimental - real-time control protocols. Considering that MIDI keyboard lacking information - of force (acceleration) and fingering detection, both of which are important for - expression, we designed force-sensor glove and achieved basic image recognition. - They are used to accurately detect speed, force (corresponding to velocity in - MIDI) and pitch when a performer plays the piano.' - address: 'Porto Alegre, Brazil' - author: Yupu Lu and Yijie Wu and Shijie Zhu - bibtex: "@inproceedings{Lu2019,\n abstract = {In this paper, collaborative performance\ - \ is defined as the performance of the piano by the performer and accompanied\ - \ by an automatic harp. The automatic harp can play music based on the electronic\ - \ score and change its speed according to the speed of the performer. We built\ - \ a 32-channel automatic harp and designed a layered modular framework integrating\ - \ both hardware and software, for experimental real-time control protocols. Considering\ - \ that MIDI keyboard lacking information of force (acceleration) and fingering\ - \ detection, both of which are important for expression, we designed force-sensor\ - \ glove and achieved basic image recognition. They are used to accurately detect\ - \ speed, force (corresponding to velocity in MIDI) and pitch when a performer\ - \ plays the piano.},\n address = {Porto Alegre, Brazil},\n author = {Yupu Lu and\ - \ Yijie Wu and Shijie Zhu},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672846},\n\ - \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {7--8},\n publisher = {UFRGS},\n title = {Collaborative\ - \ Musical Performances with Automatic Harp Based on Image Recognition and Force\ - \ Sensing Resistors},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper002.pdf},\n\ - \ year = {2019}\n}\n" + ID: Ahola2011 + abstract: 'Motion-based interactive systems have long been utilizedin contemporary + dance performances. These performancesbring new insight to sound-action experiences + in multidisciplinary art forms. This paper discusses the related technology within + the framework of the dance piece, Raja. The performance set up of Raja gives a + possibility to use two complementary tracking systems and two alternative choices + formotion sensors in real-time audio-visual synthesis.' + address: 'Oslo, Norway' + author: 'Ahola, Tom and Tahiroglu, Koray and Ahmaniemi, Teemu and Belloni, Fabio + and Ranki, Ville' + bibtex: "@inproceedings{Ahola2011,\n abstract = {Motion-based interactive systems\ + \ have long been utilizedin contemporary dance performances. These performancesbring\ + \ new insight to sound-action experiences in multidisciplinary art forms. This\ + \ paper discusses the related technology within the framework of the dance piece,\ + \ Raja. The performance set up of Raja gives a possibility to use two complementary\ + \ tracking systems and two alternative choices formotion sensors in real-time\ + \ audio-visual synthesis.},\n address = {Oslo, Norway},\n author = {Ahola, Tom\ + \ and Tahiroglu, Koray and Ahmaniemi, Teemu and Belloni, Fabio and Ranki, Ville},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1177937},\n issn = {2220-4806},\n\ + \ keywords = {raja, performance, dance, motion sensor, accelerometer, gyro, positioning,\ + \ sonification, pure data, visualization, Qt},\n pages = {433--436},\n title =\ + \ {Raja -- A Multidisciplinary Artistic Performance},\n url = {http://www.nime.org/proceedings/2011/nime2011_433.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672846 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1177937 issn: 2220-4806 - month: June - pages: 7--8 - publisher: UFRGS - title: Collaborative Musical Performances with Automatic Harp Based on Image Recognition - and Force Sensing Resistors - url: http://www.nime.org/proceedings/2019/nime2019_paper002.pdf - year: 2019 + keywords: 'raja, performance, dance, motion sensor, accelerometer, gyro, positioning, + sonification, pure data, visualization, Qt' + pages: 433--436 + title: Raja -- A Multidisciplinary Artistic Performance + url: http://www.nime.org/proceedings/2011/nime2011_433.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Arbel2019 - abstract: 'The Symbaline is an active instrument comprised of several partly-filled - wine glasses excited by electromagnetic coils. This work describes an electromechanical - system for incorporating frequency and amplitude modulation to the Symbaline''s - sound. A pendulum having a magnetic bob is suspended inside the liquid in the - wine glass. The pendulum is put into oscillation by driving infra-sound signals - through the coil. The pendulum''s movement causes the liquid in the glass to slosh - back and forth. Simultaneously, wine glass sounds are produced by driving audio-range - signals through the coil, inducing vibrations in a small magnet attached to the - glass surface and exciting glass vibrations. As the glass vibrates, the sloshing - liquid periodically changes the glass''s resonance frequencies and dampens the - glass, thus modulating both wine glass pitch and sound intensity.' - address: 'Porto Alegre, Brazil' - author: Lior Arbel and Yoav Y. Schechner and Noam Amir - bibtex: "@inproceedings{Arbel2019,\n abstract = {The Symbaline is an active instrument\ - \ comprised of several partly-filled wine glasses excited by electromagnetic coils.\ - \ This work describes an electromechanical system for incorporating frequency\ - \ and amplitude modulation to the Symbaline's sound. A pendulum having a magnetic\ - \ bob is suspended inside the liquid in the wine glass. The pendulum is put into\ - \ oscillation by driving infra-sound signals through the coil. The pendulum's\ - \ movement causes the liquid in the glass to slosh back and forth. Simultaneously,\ - \ wine glass sounds are produced by driving audio-range signals through the coil,\ - \ inducing vibrations in a small magnet attached to the glass surface and exciting\ - \ glass vibrations. As the glass vibrates, the sloshing liquid periodically changes\ - \ the glass's resonance frequencies and dampens the glass, thus modulating both\ - \ wine glass pitch and sound intensity.},\n address = {Porto Alegre, Brazil},\n\ - \ author = {Lior Arbel and Yoav Y. Schechner and Noam Amir},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.3672848},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {9--14},\n publisher = {UFRGS},\n\ - \ title = {The Symbaline --- An Active Wine Glass Instrument with a Liquid Sloshing\ - \ Vibrato Mechanism},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper003.pdf},\n\ - \ year = {2019}\n}\n" + ID: Gallin2011 + address: 'Oslo, Norway' + author: 'Gallin, Emmanuelle and Sirguy, Marc' + bibtex: "@inproceedings{Gallin2011,\n address = {Oslo, Norway},\n author = {Gallin,\ + \ Emmanuelle and Sirguy, Marc},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178023},\n\ + \ issn = {2220-4806},\n keywords = {Controller, Sensor, MIDI, USB, Computer Music,\ + \ USB, OSC, CV, MIDI, DMX, A/D Converter, Interface. },\n pages = {437--440},\n\ + \ title = {Eobody3: a Ready-to-use Pre-mapped \\& Multi-protocol Sensor Interface},\n\ + \ url = {http://www.nime.org/proceedings/2011/nime2011_437.pdf},\n year = {2011}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672848 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1178023 issn: 2220-4806 - month: June - pages: 9--14 - publisher: UFRGS - title: The Symbaline --- An Active Wine Glass Instrument with a Liquid Sloshing - Vibrato Mechanism - url: http://www.nime.org/proceedings/2019/nime2019_paper003.pdf - year: 2019 + keywords: 'Controller, Sensor, MIDI, USB, Computer Music, USB, OSC, CV, MIDI, DMX, + A/D Converter, Interface. ' + pages: 437--440 + title: 'Eobody3: a Ready-to-use Pre-mapped \& Multi-protocol Sensor Interface' + url: http://www.nime.org/proceedings/2011/nime2011_437.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: deSouzaNunes2019 - abstract: "This paper presents the SIBILIM, a low-cost musical interface composed\ - \ of a resonance box made of cardboard containing customised push buttons that\ - \ interact with a smartphone through its video camera. Each button can be mapped\ - \ to a set of MIDI notes or control parameters. The sound is generated through\ - \ synthesis or sample playback and can be amplified with the help of a transducer,\ - \ which excites the resonance box. An essential contribution of this interface\ - \ is the possibility of reconfiguration of the buttons layout without the need\ - \ to hard rewire the system since it uses only the smartphone built-in camera.\ - \ This features allow for quick instrument customisation for different use cases,\n\ - such as low cost projects for schools or instrument building workshops. Our case\ - \ study used the Sibilim for music education, where it was designed to develop\ - \ the conscious of music perception and to stimulate creativity through exercises\ - \ of short tonal musical compositions. We conducted a study with a group of twelve\ - \ participants in an experimental workshop to verify its validity." - address: 'Porto Alegre, Brazil' - author: Helena de Souza Nunes and Federico Visi and Lydia Helena Wohl Coelho and - Rodrigo Schramm - bibtex: "@inproceedings{deSouzaNunes2019,\n abstract = {This paper presents the\ - \ SIBILIM, a low-cost musical interface composed of a resonance box made of cardboard\ - \ containing customised push buttons that interact with a smartphone through its\ - \ video camera. Each button can be mapped to a set of MIDI notes or control parameters.\ - \ The sound is generated through synthesis or sample playback and can be amplified\ - \ with the help of a transducer, which excites the resonance box. An essential\ - \ contribution of this interface is the possibility of reconfiguration of the\ - \ buttons layout without the need to hard rewire the system since it uses only\ - \ the smartphone built-in camera. This features allow for quick instrument customisation\ - \ for different use cases,\nsuch as low cost projects for schools or instrument\ - \ building workshops. Our case study used the Sibilim for music education, where\ - \ it was designed to develop the conscious of music perception and to stimulate\ - \ creativity through exercises of short tonal musical compositions. We conducted\ - \ a study with a group of twelve participants in an experimental workshop to verify\ - \ its validity.},\n address = {Porto Alegre, Brazil},\n author = {Helena de Souza\ - \ Nunes and Federico Visi and Lydia Helena Wohl Coelho and Rodrigo Schramm},\n\ + ID: Baath2011 + abstract: 'The aim of this study was to investigate how well subjectsbeat out a + rhythm using eye movements and to establishthe most accurate method of doing this. + Eighteen subjectsparticipated in an experiment were five different methodswere + evaluated. A fixation based method was found to bethe most accurate. All subjects + were able to synchronizetheir eye movements with a given beat but the accuracywas + much lower than usually found in finger tapping studies. Many parts of the body + are used to make music but sofar, with a few exceptions, the eyes have been silent. + The research presented here provides guidelines for implementingeye controlled + musical interfaces. Such interfaces would enable performers and artists to use + eye movement for musicalexpression and would open up new, exiting possibilities.' + address: 'Oslo, Norway' + author: 'Bååth, Rasmus and Strandberg, Thomas and Balkenius, Christian' + bibtex: "@inproceedings{Baath2011,\n abstract = {The aim of this study was to investigate\ + \ how well subjectsbeat out a rhythm using eye movements and to establishthe most\ + \ accurate method of doing this. Eighteen subjectsparticipated in an experiment\ + \ were five different methodswere evaluated. A fixation based method was found\ + \ to bethe most accurate. All subjects were able to synchronizetheir eye movements\ + \ with a given beat but the accuracywas much lower than usually found in finger\ + \ tapping studies. Many parts of the body are used to make music but sofar, with\ + \ a few exceptions, the eyes have been silent. The research presented here provides\ + \ guidelines for implementingeye controlled musical interfaces. Such interfaces\ + \ would enable performers and artists to use eye movement for musicalexpression\ + \ and would open up new, exiting possibilities.},\n address = {Oslo, Norway},\n\ + \ author = {B\\aa\\aath, Rasmus and Strandberg, Thomas and Balkenius, Christian},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.3672850},\n editor = {Marcelo Queiroz\ - \ and Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {15--20},\n\ - \ publisher = {UFRGS},\n title = {SIBILIM: A low-cost customizable wireless musical\ - \ interface},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper004.pdf},\n\ - \ year = {2019}\n}\n" + \ Musical Expression},\n doi = {10.5281/zenodo.1177947},\n issn = {2220-4806},\n\ + \ keywords = {Rhythm, Eye tracking, Sensorimotor synchronization, Eye tapping\ + \ },\n pages = {441--444},\n title = {Eye Tapping : How to Beat Out an Accurate\ + \ Rhythm using Eye Movements},\n url = {http://www.nime.org/proceedings/2011/nime2011_441.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672850 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1177947 issn: 2220-4806 - month: June - pages: 15--20 - publisher: UFRGS - title: 'SIBILIM: A low-cost customizable wireless musical interface' - url: http://www.nime.org/proceedings/2019/nime2019_paper004.pdf - year: 2019 + keywords: 'Rhythm, Eye tracking, Sensorimotor synchronization, Eye tapping ' + pages: 441--444 + title: 'Eye Tapping : How to Beat Out an Accurate Rhythm using Eye Movements' + url: http://www.nime.org/proceedings/2011/nime2011_441.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Bell2019 - abstract: 'The combination of graphic/animated scores, acoustic signals (audio-scores) - and Head-Mounted Display (HMD) technology offers promising potentials in the context - of distributed notation, for live performances and concerts involving voices, - instruments and electronics. After an explanation of what SmartVox is technically, - and how it is used by composers and performers, this paper explains why this form - of technology-aided performance might help musicians for synchronization to an - electronic tape and (spectral) tuning. Then, from an exploration of the concepts - of distributed notation and networked music performances, it proposes solutions - (in conjunction with INScore, BabelScores and the Decibel Score Player) seeking - for the expansion of distributed notation practice to a wider community. It finally - presents findings relative to the use of SmartVox with HMDs.' - address: 'Porto Alegre, Brazil' - author: Jonathan Bell - bibtex: "@inproceedings{Bell2019,\n abstract = {The combination of graphic/animated\ - \ scores, acoustic signals (audio-scores) and Head-Mounted Display (HMD) technology\ - \ offers promising potentials in the context of distributed notation, for live\ - \ performances and concerts involving voices, instruments and electronics. After\ - \ an explanation of what SmartVox is technically, and how it is used by composers\ - \ and performers, this paper explains why this form of technology-aided performance\ - \ might help musicians for synchronization to an electronic tape and (spectral)\ - \ tuning. Then, from an exploration of the concepts of distributed notation and\ - \ networked music performances, it proposes solutions (in conjunction with INScore,\ - \ BabelScores and the Decibel Score Player) seeking for the expansion of distributed\ - \ notation practice to a wider community. It finally presents findings relative\ - \ to the use of SmartVox with HMDs.},\n address = {Porto Alegre, Brazil},\n author\ - \ = {Jonathan Bell},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672852},\n\ - \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {21--24},\n publisher = {UFRGS},\n title = {The Risset Cycle,\ - \ Recent Use Cases With SmartVox},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper005.pdf},\n\ - \ year = {2019}\n}\n" + ID: Rosenbaum2011 + abstract: 'I present MelodyMorph, a reconfigurable musical instrument designed with + a focus on melodic improvisation. It is designed for a touch-screen interface, + and allows the user to create "bells" which can be tapped to play a note, and + dragged around on a pannable and zoomable canvas. Colors, textures and shapes + of the bells represent pitch and timbre properties. "Recorder bells" can store + and play back performances. Users can construct instruments that are modifiable + as they play, and build up complex melodies hierarchically from simple parts. ' + address: 'Oslo, Norway' + author: 'Rosenbaum, Eric' + bibtex: "@inproceedings{Rosenbaum2011,\n abstract = {I present MelodyMorph, a reconfigurable\ + \ musical instrument designed with a focus on melodic improvisation. It is designed\ + \ for a touch-screen interface, and allows the user to create \"bells\" which\ + \ can be tapped to play a note, and dragged around on a pannable and zoomable\ + \ canvas. Colors, textures and shapes of the bells represent pitch and timbre\ + \ properties. \"Recorder bells\" can store and play back performances. Users can\ + \ construct instruments that are modifiable as they play, and build up complex\ + \ melodies hierarchically from simple parts. },\n address = {Oslo, Norway},\n\ + \ author = {Rosenbaum, Eric},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178147},\n\ + \ issn = {2220-4806},\n keywords = {Melody, improvisation, representation, multi-touch,\ + \ iPad },\n pages = {445--447},\n title = {MelodyMorph: A Reconfigurable Musical\ + \ Instrument},\n url = {http://www.nime.org/proceedings/2011/nime2011_445.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672852 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1178147 issn: 2220-4806 - month: June - pages: 21--24 - publisher: UFRGS - title: 'The Risset Cycle, Recent Use Cases With SmartVox' - url: http://www.nime.org/proceedings/2019/nime2019_paper005.pdf - year: 2019 + keywords: 'Melody, improvisation, representation, multi-touch, iPad ' + pages: 445--447 + title: 'MelodyMorph: A Reconfigurable Musical Instrument' + url: http://www.nime.org/proceedings/2011/nime2011_445.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Wang2019 - abstract: 'This paper documents the key issues of performance and compatibility - working with Musical Instrument Digital Interface (MIDI) via Bluetooth Low Energy - (BLE) as a wireless interface for sensor or controller data and inter-module communication - in the context of building interactive digital systems. An overview of BLE MIDI - is presented along with a comparison of the protocol from the perspective of theoretical - limits and interoperability, showing its widespread compatibility across platforms - compared with other alternatives. Then we perform three complementary tests on - BLE MIDI and alternative interfaces using prototype and commercial devices, showing - that BLE MIDI has comparable performance with the tested WiFi implementations, - with end-to-end (sensor input to audio output) latencies of under 10ms under certain - conditions. Overall, BLE MIDI is an ideal choice for controllers and sensor interfaces - that are designed to work on a wide variety of platforms.' - address: 'Porto Alegre, Brazil' - author: Johnty Wang and Axel Mulder and Marcelo Wanderley - bibtex: "@inproceedings{Wang2019,\n abstract = {This paper documents the key issues\ - \ of performance and compatibility working with Musical Instrument Digital Interface\ - \ (MIDI) via Bluetooth Low Energy (BLE) as a wireless interface for sensor or\ - \ controller data and inter-module communication in the context of building interactive\ - \ digital systems. An overview of BLE MIDI is presented along with a comparison\ - \ of the protocol from the perspective of theoretical limits and interoperability,\ - \ showing its widespread compatibility across platforms compared with other alternatives.\ - \ Then we perform three complementary tests on BLE MIDI and alternative interfaces\ - \ using prototype and commercial devices, showing that BLE MIDI has comparable\ - \ performance with the tested WiFi implementations, with end-to-end (sensor input\ - \ to audio output) latencies of under 10ms under certain conditions. Overall,\ - \ BLE MIDI is an ideal choice for controllers and sensor interfaces that are designed\ - \ to work on a wide variety of platforms.},\n address = {Porto Alegre, Brazil},\n\ - \ author = {Johnty Wang and Axel Mulder and Marcelo Wanderley},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.3672854},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {25--30},\n publisher = {UFRGS},\n\ - \ title = {Practical Considerations for {MIDI} over Bluetooth Low Energy as a\ - \ Wireless Interface},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper006.pdf},\n\ - \ year = {2019}\n}\n" + ID: Franinovic2011 + address: 'Oslo, Norway' + author: 'Franinovic, Karmen' + bibtex: "@inproceedings{Franinovic2011,\n address = {Oslo, Norway},\n author = {Franinovic,\ + \ Karmen},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1178013},\n issn = {2220-4806},\n\ + \ keywords = {exploration,gesture,habit,sonic interaction design},\n pages = {448--452},\n\ + \ title = {The Flo)(ps : Negotiating Between Habitual and Explorative Gestures},\n\ + \ url = {http://www.nime.org/proceedings/2011/nime2011_448.pdf},\n year = {2011}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672854 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1178013 issn: 2220-4806 - month: June - pages: 25--30 - publisher: UFRGS - title: Practical Considerations for MIDI over Bluetooth Low Energy as a Wireless - Interface - url: http://www.nime.org/proceedings/2019/nime2019_paper006.pdf - year: 2019 + keywords: 'exploration,gesture,habit,sonic interaction design' + pages: 448--452 + title: 'The Flo)(ps : Negotiating Between Habitual and Explorative Gestures' + url: http://www.nime.org/proceedings/2011/nime2011_448.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Ramchurn2019 - abstract: 'We report on the design and deployment of systems for the performance - of live score accompaniment to an interactive movie by a Networked Musical Ensemble. - In this case, the audio-visual content of the movie is selected in real time based - on user input to a Brain-Computer Interface (BCI). Our system supports musical - improvisation between human performers and automated systems responding to the - BCI. We explore the performers'' roles during two performances when these socio-technical - systems were implemented, in terms of coordination, problem-solving, managing - uncertainty and musical responses to system constraints. This allows us to consider - how features of these systems and practices might be incorporated into a general - tool, aimed at any musician, which could scale for use in different performance - settings involving interactive media. ' - address: 'Porto Alegre, Brazil' - author: Richard Ramchurn and Juan Pablo Martinez-Avila and Sarah Martindale and - Alan Chamberlain and Max L Wilson and Steve Benford - bibtex: "@inproceedings{Ramchurn2019,\n abstract = {We report on the design and\ - \ deployment of systems for the performance of live score accompaniment to an\ - \ interactive movie by a Networked Musical Ensemble. In this case, the audio-visual\ - \ content of the movie is selected in real time based on user input to a Brain-Computer\ - \ Interface (BCI). Our system supports musical improvisation between human performers\ - \ and automated systems responding to the BCI. We explore the performers' roles\ - \ during two performances when these socio-technical systems were implemented,\ - \ in terms of coordination, problem-solving, managing uncertainty and musical\ - \ responses to system constraints. This allows us to consider how features of\ - \ these systems and practices might be incorporated into a general tool, aimed\ - \ at any musician, which could scale for use in different performance settings\ - \ involving interactive media. },\n address = {Porto Alegre, Brazil},\n author\ - \ = {Richard Ramchurn and Juan Pablo Martinez-Avila and Sarah Martindale and Alan\ - \ Chamberlain and Max L Wilson and Steve Benford},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.3672856},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {31--36},\n publisher = {UFRGS},\n\ - \ title = {Improvising a Live Score to an Interactive Brain-Controlled Film},\n\ - \ url = {http://www.nime.org/proceedings/2019/nime2019_paper007.pdf},\n year =\ - \ {2019}\n}\n" + ID: Schedel2011 + abstract: 'In this paper we discuss how the band 000000Swan uses machine learning + to parse complex sensor data and create intricate artistic systems for live performance. + Using the Wekinator software for interactive machine learning, we have created + discrete and continuous models for controlling audio and visual environments using + human gestures sensed by a commercially-available sensor bow and the Microsoft + Kinect. In particular, we have employed machine learning to quickly and easily + prototype complex relationships between performer gesture and performative outcome. ' + address: 'Oslo, Norway' + author: 'Schedel, Margaret and Perry, Phoenix and Fiebrink, Rebecca' + bibtex: "@inproceedings{Schedel2011,\n abstract = {In this paper we discuss how\ + \ the band 000000Swan uses machine learning to parse complex sensor data and create\ + \ intricate artistic systems for live performance. Using the Wekinator software\ + \ for interactive machine learning, we have created discrete and continuous models\ + \ for controlling audio and visual environments using human gestures sensed by\ + \ a commercially-available sensor bow and the Microsoft Kinect. In particular,\ + \ we have employed machine learning to quickly and easily prototype complex relationships\ + \ between performer gesture and performative outcome. },\n address = {Oslo, Norway},\n\ + \ author = {Schedel, Margaret and Perry, Phoenix and Fiebrink, Rebecca},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1178151},\n issn = {2220-4806},\n keywords\ + \ = {Wekinator, K-Bow, Machine Learning, Interactive, Multimedia, Kinect, Motion-Tracking,\ + \ Bow Articulation, Animation },\n pages = {453--456},\n title = {Wekinating 000000{S}wan\ + \ : Using Machine Learning to Create and Control Complex Artistic Systems},\n\ + \ url = {http://www.nime.org/proceedings/2011/nime2011_453.pdf},\n year = {2011}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672856 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1178151 issn: 2220-4806 - month: June - pages: 31--36 - publisher: UFRGS - title: Improvising a Live Score to an Interactive Brain-Controlled Film - url: http://www.nime.org/proceedings/2019/nime2019_paper007.pdf - year: 2019 + keywords: 'Wekinator, K-Bow, Machine Learning, Interactive, Multimedia, Kinect, + Motion-Tracking, Bow Articulation, Animation ' + pages: 453--456 + title: 'Wekinating 000000Swan : Using Machine Learning to Create and Control Complex + Artistic Systems' + url: http://www.nime.org/proceedings/2011/nime2011_453.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Tom2019 - abstract: 'Although several Digital Musical Instruments (DMIs) have been presented - at NIME, very few of them remain accessible to the community. Rebuilding a DMI - is often a necessary step to allow for performance with NIMEs. Rebuilding a DMI - exactly similar to its original, however, might not be possible due to technology - obsolescence, lack of documentation or other reasons. It might then be interesting - to re-interpret a DMI and build an instrument inspired by the original one, creating - novel performance opportunities. This paper presents the challenges and approaches - involved in rebuilding and re-interpreting an existing DMI, The Sponge by Martin - Marier. The rebuilt versions make use of newer/improved technology and customized - design aspects like addition of vibrotactile feedback and implementation of different - mapping strategies. It also discusses the implications of embedding sound synthesis - within the DMI, by using the Prynth framework and further presents a comparison - between this approach and the more traditional ground-up approach. As a result - of the evaluation and comparison of the two rebuilt DMIs, we present a third version - which combines the benefits and discuss performance issues with these devices.' - address: 'Porto Alegre, Brazil' - author: Ajin Jiji Tom and Harish Jayanth Venkatesan and Ivan Franco and Marcelo - Wanderley - bibtex: "@inproceedings{Tom2019,\n abstract = {Although several Digital Musical\ - \ Instruments (DMIs) have been presented at NIME, very few of them remain accessible\ - \ to the community. Rebuilding a DMI is often a necessary step to allow for performance\ - \ with NIMEs. Rebuilding a DMI exactly similar to its original, however, might\ - \ not be possible due to technology obsolescence, lack of documentation or other\ - \ reasons. It might then be interesting to re-interpret a DMI and build an instrument\ - \ inspired by the original one, creating novel performance opportunities. This\ - \ paper presents the challenges and approaches involved in rebuilding and re-interpreting\ - \ an existing DMI, The Sponge by Martin Marier. The rebuilt versions make use\ - \ of newer/improved technology and customized design aspects like addition of\ - \ vibrotactile feedback and implementation of different mapping strategies. It\ - \ also discusses the implications of embedding sound synthesis within the DMI,\ - \ by using the Prynth framework and further presents a comparison between this\ - \ approach and the more traditional ground-up approach. As a result of the evaluation\ - \ and comparison of the two rebuilt DMIs, we present a third version which combines\ - \ the benefits and discuss performance issues with these devices.},\n address\ - \ = {Porto Alegre, Brazil},\n author = {Ajin Jiji Tom and Harish Jayanth Venkatesan\ - \ and Ivan Franco and Marcelo Wanderley},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672858},\n\ - \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {37--42},\n publisher = {UFRGS},\n title = {Rebuilding and\ - \ Reinterpreting a Digital Musical Instrument --- The Sponge},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper008.pdf},\n\ - \ year = {2019}\n}\n" + ID: Julia2011 + abstract: 'In the past decade we have seen a growing presence of tabletop systems + applied to music, lately with even some products becoming commercially available + and being used byprofessional musicians in concerts. The development of thistype + of applications requires several demanding technicalexpertises such as input processing, + graphical design, realtime sound generation or interaction design, and because + ofthis complexity they are usually developed by a multidisciplinary group.In this + paper we present the Musical Tabletop CodingFramework (MTCF) a framework for designing + and codingmusical tabletop applications by using the graphical programming language + for digital sound processing Pure Data(Pd). With this framework we try to simplify + the creationprocess of such type of interfaces, by removing the need ofany programming + skills other than those of Pd.' + address: 'Oslo, Norway' + author: 'Julià, Carles F. and Gallardo, Daniel and Jordà, Sergi' + bibtex: "@inproceedings{Julia2011,\n abstract = {In the past decade we have seen\ + \ a growing presence of tabletop systems applied to music, lately with even some\ + \ products becoming commercially available and being used byprofessional musicians\ + \ in concerts. The development of thistype of applications requires several demanding\ + \ technicalexpertises such as input processing, graphical design, realtime sound\ + \ generation or interaction design, and because ofthis complexity they are usually\ + \ developed by a multidisciplinary group.In this paper we present the Musical\ + \ Tabletop CodingFramework (MTCF) a framework for designing and codingmusical\ + \ tabletop applications by using the graphical programming language for digital\ + \ sound processing Pure Data(Pd). With this framework we try to simplify the creationprocess\ + \ of such type of interfaces, by removing the need ofany programming skills other\ + \ than those of Pd.},\n address = {Oslo, Norway},\n author = {Juli\\`{a}, Carles\ + \ F. and Gallardo, Daniel and Jord\\`{a}, Sergi},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1178057},\n issn = {2220-4806},\n keywords = {Pure Data, tabletop,\ + \ tangible, framework },\n pages = {457--460},\n title = {MTCF : A Framework for\ + \ Designing and Coding Musical Tabletop Applications Directly in Pure Data},\n\ + \ url = {http://www.nime.org/proceedings/2011/nime2011_457.pdf},\n year = {2011}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672858 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1178057 issn: 2220-4806 - month: June - pages: 37--42 - publisher: UFRGS - title: Rebuilding and Reinterpreting a Digital Musical Instrument --- The Sponge - url: http://www.nime.org/proceedings/2019/nime2019_paper008.pdf - year: 2019 + keywords: 'Pure Data, tabletop, tangible, framework ' + pages: 457--460 + title: 'MTCF : A Framework for Designing and Coding Musical Tabletop Applications + Directly in Pure Data' + url: http://www.nime.org/proceedings/2011/nime2011_457.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Nishida2019 - abstract: 'Recent technological advances, such as increased CPU/GPU processing speed, - along with the miniaturization of devices and sensors, have created new possibilities - for integrating immersive technologies in music and performance art. Virtual and - Augmented Reality (VR/AR) have become increasingly interesting as mobile device - platforms, such as up-to-date smartphones, with necessary CPU resources entered - the consumer market. In combination with recent web technologies, any mobile device - can simply connect with a browser to a local server to access the latest technology. - The web platform also eases the integration of collaborative situated media in - participatory artwork. In this paper, we present the interactive music improvisation - piece ‘Border,'' premiered in 2018 at the Beyond Festival at the Center for Art - and Media Karlsruhe (ZKM). This piece explores the interaction between a performer - and the audience using web-based applications – including AR, real-time 3D audio/video - streaming, advanced web audio, and gesture-controlled virtual instruments – on - smart mobile devices.' - address: 'Porto Alegre, Brazil' - author: Kiyu Nishida and Akishige Yuguchi and kazuhiro jo and Paul Modler and Markus - Noisternig - bibtex: "@inproceedings{Nishida2019,\n abstract = {Recent technological advances,\ - \ such as increased CPU/GPU processing speed, along with the miniaturization of\ - \ devices and sensors, have created new possibilities for integrating immersive\ - \ technologies in music and performance art. Virtual and Augmented Reality (VR/AR)\ - \ have become increasingly interesting as mobile device platforms, such as up-to-date\ - \ smartphones, with necessary CPU resources entered the consumer market. In combination\ - \ with recent web technologies, any mobile device can simply connect with a browser\ - \ to a local server to access the latest technology. The web platform also eases\ - \ the integration of collaborative situated media in participatory artwork. In\ - \ this paper, we present the interactive music improvisation piece ‘Border,' premiered\ - \ in 2018 at the Beyond Festival at the Center for Art and Media Karlsruhe (ZKM).\ - \ This piece explores the interaction between a performer and the audience using\ - \ web-based applications – including AR, real-time 3D audio/video streaming, advanced\ - \ web audio, and gesture-controlled virtual instruments – on smart mobile devices.},\n\ - \ address = {Porto Alegre, Brazil},\n author = {Kiyu Nishida and Akishige Yuguchi\ - \ and kazuhiro jo and Paul Modler and Markus Noisternig},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.3672860},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {43--46},\n publisher = {UFRGS},\n\ - \ title = {Border: A Live Performance Based on Web {AR} and a Gesture-Controlled\ - \ Virtual Instrument},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper009.pdf},\n\ - \ year = {2019}\n}\n" + ID: Pirro2011 + address: 'Oslo, Norway' + author: 'Pirr\`{o}, David and Eckel, Gerhard' + bibtex: "@inproceedings{Pirro2011,\n address = {Oslo, Norway},\n author = {Pirr\\\ + `{o}, David and Eckel, Gerhard},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178135},\n\ + \ issn = {2220-4806},\n keywords = {embod-,enactive interfaces,has been ap-,iment,interaction,motion\ + \ tracking,of sound and music,physical modelling,to movement and gesture},\n pages\ + \ = {461--464},\n title = {Physical Modelling Enabling Enaction: an Example},\n\ + \ url = {http://www.nime.org/proceedings/2011/nime2011_461.pdf},\n year = {2011}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672860 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1178135 issn: 2220-4806 - month: June - pages: 43--46 - publisher: UFRGS - title: 'Border: A Live Performance Based on Web AR and a Gesture-Controlled Virtual - Instrument' - url: http://www.nime.org/proceedings/2019/nime2019_paper009.pdf - year: 2019 + keywords: 'embod-,enactive interfaces,has been ap-,iment,interaction,motion tracking,of + sound and music,physical modelling,to movement and gesture' + pages: 461--464 + title: 'Physical Modelling Enabling Enaction: an Example' + url: http://www.nime.org/proceedings/2011/nime2011_461.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Dahlstedt2019 - abstract: 'Libration Perturbed is a performance and an improvisation instrument, - originally composed and designed for a multi-speaker dome. The performer controls - a bank of 64 virtual inter-connected resonating strings, with individual and direct - control of tuning and resonance characteristics through a multitouch-enhanced - klavier interface (TouchKeys). It is a hybrid acoustic-electronic instrument, - as all string vibrations originate from physical vibrations in the klavier and - its casing, captured through contact microphones. In addition, there are gestural - strings, called ropes, excited by performed musical gestures. All strings and - ropes are connected, and inter-resonate together as a ”super-harp”, internally - and through the performance space. With strong resonance, strings may go into - chaotic motion or emergent quasi-periodic patterns, but custom adaptive leveling - mechanisms keep loudness under the musician''s control at all times. The hybrid - digital/acoustic approach and the enhanced keyboard provide for an expressive - and very physical interaction, and a strong multi-channel immersive experience. - The paper describes the aesthetic choices behind the design of the system, as - well as the technical implementation, and – primarily – the interaction design, - as it emerges from mapping, sound design, physical modeling and integration of - the acoustic, the gestural, and the virtual. The work is evaluated based on the - experiences from a series of performances.' - address: 'Porto Alegre, Brazil' - author: Palle Dahlstedt - bibtex: "@inproceedings{Dahlstedt2019,\n abstract = {Libration Perturbed is a performance\ - \ and an improvisation instrument, originally composed and designed for a multi-speaker\ - \ dome. The performer controls a bank of 64 virtual inter-connected resonating\ - \ strings, with individual and direct control of tuning and resonance characteristics\ - \ through a multitouch-enhanced klavier interface (TouchKeys). It is a hybrid\ - \ acoustic-electronic instrument, as all string vibrations originate from physical\ - \ vibrations in the klavier and its casing, captured through contact microphones.\ - \ In addition, there are gestural strings, called ropes, excited by performed\ - \ musical gestures. All strings and ropes are connected, and inter-resonate together\ - \ as a ”super-harp”, internally and through the performance space. With strong\ - \ resonance, strings may go into chaotic motion or emergent quasi-periodic patterns,\ - \ but custom adaptive leveling mechanisms keep loudness under the musician's control\ - \ at all times. The hybrid digital/acoustic approach and the enhanced keyboard\ - \ provide for an expressive and very physical interaction, and a strong multi-channel\ - \ immersive experience. The paper describes the aesthetic choices behind the design\ - \ of the system, as well as the technical implementation, and – primarily – the\ - \ interaction design, as it emerges from mapping, sound design, physical modeling\ - \ and integration of the acoustic, the gestural, and the virtual. The work is\ - \ evaluated based on the experiences from a series of performances.},\n address\ - \ = {Porto Alegre, Brazil},\n author = {Palle Dahlstedt},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.3672862},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {47--52},\n publisher = {UFRGS},\n\ - \ title = {Taming and Tickling the Beast --- Multi-Touch Keyboard as Interface\ - \ for a Physically Modelled Interconnected Resonating Super-Harp},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper010.pdf},\n\ - \ year = {2019}\n}\n" + ID: Mitchell2011 + abstract: 'This paper documents the first developmental phase of aninterface that + enables the performance of live music usinggestures and body movements. The work + included focuseson the first step of this project: the composition and performance + of live music using hand gestures captured using asingle data glove. The paper + provides a background to thefield, the aim of the project and a technical description + ofthe work completed so far. This includes the developmentof a robust posture + vocabulary, an artificial neural networkbased posture identification process and + a state-based system to map identified postures onto a set of performanceprocesses. + The paper is closed with qualitative usage observations and a projection of future + plans.' + address: 'Oslo, Norway' + author: 'Mitchell, Thomas and Heap, Imogen' + bibtex: "@inproceedings{Mitchell2011,\n abstract = {This paper documents the first\ + \ developmental phase of aninterface that enables the performance of live music\ + \ usinggestures and body movements. The work included focuseson the first step\ + \ of this project: the composition and performance of live music using hand gestures\ + \ captured using asingle data glove. The paper provides a background to thefield,\ + \ the aim of the project and a technical description ofthe work completed so far.\ + \ This includes the developmentof a robust posture vocabulary, an artificial neural\ + \ networkbased posture identification process and a state-based system to map\ + \ identified postures onto a set of performanceprocesses. The paper is closed\ + \ with qualitative usage observations and a projection of future plans.},\n address\ + \ = {Oslo, Norway},\n author = {Mitchell, Thomas and Heap, Imogen},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1178111},\n issn = {2220-4806},\n keywords\ + \ = {Music Controller, Gestural Music, Data Glove, Neural Network, Live Music\ + \ Composition, Looping, Imogen Heap },\n pages = {465--468},\n title = {SoundGrasp\ + \ : A Gestural Interface for the Performance of Live Music},\n url = {http://www.nime.org/proceedings/2011/nime2011_465.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672862 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1178111 issn: 2220-4806 - month: June - pages: 47--52 - publisher: UFRGS - title: Taming and Tickling the Beast --- Multi-Touch Keyboard as Interface for a - Physically Modelled Interconnected Resonating Super-Harp - url: http://www.nime.org/proceedings/2019/nime2019_paper010.pdf - year: 2019 + keywords: 'Music Controller, Gestural Music, Data Glove, Neural Network, Live Music + Composition, Looping, Imogen Heap ' + pages: 465--468 + title: 'SoundGrasp : A Gestural Interface for the Performance of Live Music' + url: http://www.nime.org/proceedings/2011/nime2011_465.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Cavdir2019 - abstract: 'This research represents an evolution and evaluation of the embodied - physical laptop instruments. Specifically, these are instruments that are physical - in that they use bodily interaction, take advantage of the physical affordances - of the laptop. They are embodied in the sense that instruments are played in such - ways where the sound is embedded to be close to the instrument. Three distinct - laptop instruments, Taptop, Armtop, and Blowtop, are introduced in this paper. - We discuss the integrity of the design process with composing for laptop instruments - and performing with them. In this process, our aim is to blur the boundaries of - the composer and designer/engineer roles. How the physicality is achieved by leveraging - musical gestures gained through traditional instrument practice is studied, as - well as those inspired by body gestures. We aim to explore how using such interaction - methods affects the communication between the ensemble and the audience. An aesthetic-first - qualitative evaluation of these interfaces is discussed, through works and performances - crafted specifically for these instruments and presented in the concert setting - of the laptop orchestra. In so doing, we reflect on how such physical, embodied - instrument design practices can inform a different kind of expressive and performance - mindset.' - address: 'Porto Alegre, Brazil' - author: Doga Cavdir and Juan Sierra and Ge Wang - bibtex: "@inproceedings{Cavdir2019,\n abstract = {This research represents an evolution\ - \ and evaluation of the embodied physical laptop instruments. Specifically, these\ - \ are instruments that are physical in that they use bodily interaction, take\ - \ advantage of the physical affordances of the laptop. They are embodied in the\ - \ sense that instruments are played in such ways where the sound is embedded to\ - \ be close to the instrument. Three distinct laptop instruments, Taptop, Armtop,\ - \ and Blowtop, are introduced in this paper. We discuss the integrity of the design\ - \ process with composing for laptop instruments and performing with them. In this\ - \ process, our aim is to blur the boundaries of the composer and designer/engineer\ - \ roles. How the physicality is achieved by leveraging musical gestures gained\ - \ through traditional instrument practice is studied, as well as those inspired\ - \ by body gestures. We aim to explore how using such interaction methods affects\ - \ the communication between the ensemble and the audience. An aesthetic-first\ - \ qualitative evaluation of these interfaces is discussed, through works and performances\ - \ crafted specifically for these instruments and presented in the concert setting\ - \ of the laptop orchestra. In so doing, we reflect on how such physical, embodied\ - \ instrument design practices can inform a different kind of expressive and performance\ - \ mindset.},\n address = {Porto Alegre, Brazil},\n author = {Doga Cavdir and Juan\ - \ Sierra and Ge Wang},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672864},\n\ - \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {53--58},\n publisher = {UFRGS},\n title = {Taptop, Armtop,\ - \ Blowtop: Evolving the Physical Laptop Instrument},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper011.pdf},\n\ - \ year = {2019}\n}\n" + ID: Mullen2011 + abstract: 'The use of non-invasive electroencephalography (EEG) in the experimental + arts is not a novel concept. Since 1965, EEG has been used in a large number of, + sometimes highly sophisticated, systems for musical and artistic expression. However, + since the advent of the synthesizer, most such systems have utilized digital and/or + synthesized media in sonifying the EEG signals. There have been relatively few + attempts to create interfaces for musical expression that allow one to mechanically + manipulate acoustic instruments by modulating one''s mental state. Secondly, few + such systems afford a distributed performance medium, with data transfer and audience + participation occurring over the Internet. The use of acoustic instruments and + Internet-enabled communication expands the realm of possibilities for musical + expression in Brain-Computer Music Interfaces (BCMI), while also introducing additional + challenges. In this paper we report and examine a first demonstration (Music for + Online Performer) of a novel system for Internet-enabled manipulation of robotic + acoustic instruments, with feedback, using a non-invasive EEG-based BCI and low-cost, + commercially available robotics hardware. ' + address: 'Oslo, Norway' + author: 'Mullen, Tim and Warp, Richard and Jansch, Adam' + bibtex: "@inproceedings{Mullen2011,\n abstract = {The use of non-invasive electroencephalography\ + \ (EEG) in the experimental arts is not a novel concept. Since 1965, EEG has been\ + \ used in a large number of, sometimes highly sophisticated, systems for musical\ + \ and artistic expression. However, since the advent of the synthesizer, most\ + \ such systems have utilized digital and/or synthesized media in sonifying the\ + \ EEG signals. There have been relatively few attempts to create interfaces for\ + \ musical expression that allow one to mechanically manipulate acoustic instruments\ + \ by modulating one's mental state. Secondly, few such systems afford a distributed\ + \ performance medium, with data transfer and audience participation occurring\ + \ over the Internet. The use of acoustic instruments and Internet-enabled communication\ + \ expands the realm of possibilities for musical expression in Brain-Computer\ + \ Music Interfaces (BCMI), while also introducing additional challenges. In this\ + \ paper we report and examine a first demonstration (Music for Online Performer)\ + \ of a novel system for Internet-enabled manipulation of robotic acoustic instruments,\ + \ with feedback, using a non-invasive EEG-based BCI and low-cost, commercially\ + \ available robotics hardware. },\n address = {Oslo, Norway},\n author = {Mullen,\ + \ Tim and Warp, Richard and Jansch, Adam},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178117},\n\ + \ issn = {2220-4806},\n keywords = {EEG, Brain-Computer Music Interface, Internet,\ + \ Arduino. },\n pages = {469--472},\n title = {Minding the (Transatlantic) Gap:\ + \ An Internet-Enabled Acoustic Brain-Computer Music Interface},\n url = {http://www.nime.org/proceedings/2011/nime2011_469.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672864 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1178117 issn: 2220-4806 - month: June - pages: 53--58 - publisher: UFRGS - title: 'Taptop, Armtop, Blowtop: Evolving the Physical Laptop Instrument' - url: http://www.nime.org/proceedings/2019/nime2019_paper011.pdf - year: 2019 + keywords: 'EEG, Brain-Computer Music Interface, Internet, Arduino. ' + pages: 469--472 + title: 'Minding the (Transatlantic) Gap: An Internet-Enabled Acoustic Brain-Computer + Music Interface' + url: http://www.nime.org/proceedings/2011/nime2011_469.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: GomezJauregui2019 - abstract: 'This work aims to explore the use of a new gesture-based interaction - built on automatic recognition of Soundpainting structured gestural language. - In the proposed approach, a composer (called Soundpainter) performs Soundpainting - gestures facing a Kinect sensor. Then, a gesture recognition system captures gestures - that are sent to a sound generator software. The proposed method was used to stage - an artistic show in which a Soundpainter had to improvise with 6 different gestures - to generate a musical composition from different sounds in real time. The accuracy - of the gesture recognition system was evaluated as well as Soundpainter''s user - experience. In addition, a user evaluation study for using our proposed system - in a learning context was also conducted. Current results open up perspectives - for the design of new artistic expressions based on the use of automatic gestural - recognition supported by Soundpainting language.' - address: 'Porto Alegre, Brazil' - author: David Antonio Gómez Jáuregui and Irvin Dongo and Nadine Couture - bibtex: "@inproceedings{GomezJauregui2019,\n abstract = {This work aims to explore\ - \ the use of a new gesture-based interaction built on automatic recognition of\ - \ Soundpainting structured gestural language. In the proposed approach, a composer\ - \ (called Soundpainter) performs Soundpainting gestures facing a Kinect sensor.\ - \ Then, a gesture recognition system captures gestures that are sent to a sound\ - \ generator software. The proposed method was used to stage an artistic show in\ - \ which a Soundpainter had to improvise with 6 different gestures to generate\ - \ a musical composition from different sounds in real time. The accuracy of the\ - \ gesture recognition system was evaluated as well as Soundpainter's user experience.\ - \ In addition, a user evaluation study for using our proposed system in a learning\ - \ context was also conducted. Current results open up perspectives for the design\ - \ of new artistic expressions based on the use of automatic gestural recognition\ - \ supported by Soundpainting language.},\n address = {Porto Alegre, Brazil},\n\ - \ author = {David Antonio Gómez Jáuregui and Irvin Dongo and Nadine Couture},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.3672866},\n editor = {Marcelo Queiroz\ - \ and Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {59--64},\n\ - \ publisher = {UFRGS},\n title = {Automatic Recognition of Soundpainting for the\ - \ Generation of Electronic Music Sounds},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper012.pdf},\n\ - \ year = {2019}\n}\n" + ID: Papetti2011 + abstract: 'A shoe-based interface is presented, which enables users toplay percussive + virtual instruments by tapping their feet.The wearable interface consists of a + pair of sandals equippedwith four force sensors and four actuators affording audiotactile + feedback. The sensors provide data via wireless transmission to a host computer, + where they are processed andmapped to a physics-based sound synthesis engine. + Sincethe system provides OSC and MIDI compatibility, alternative electronic instruments + can be used as well. The audiosignals are then sent back wirelessly to audio-tactile + excitersembedded in the sandals'' sole, and optionally to headphonesand external + loudspeakers. The round-trip wireless communication only introduces very small + latency, thus guaranteeing coherence and unity in the multimodal percept andallowing + tight timing while playing.' + address: 'Oslo, Norway' + author: 'Papetti, Stefano and Civolani, Marco and Fontana, Federico' + bibtex: "@inproceedings{Papetti2011,\n abstract = {A shoe-based interface is presented,\ + \ which enables users toplay percussive virtual instruments by tapping their feet.The\ + \ wearable interface consists of a pair of sandals equippedwith four force sensors\ + \ and four actuators affording audiotactile feedback. The sensors provide data\ + \ via wireless transmission to a host computer, where they are processed andmapped\ + \ to a physics-based sound synthesis engine. Sincethe system provides OSC and\ + \ MIDI compatibility, alternative electronic instruments can be used as well.\ + \ The audiosignals are then sent back wirelessly to audio-tactile excitersembedded\ + \ in the sandals' sole, and optionally to headphonesand external loudspeakers.\ + \ The round-trip wireless communication only introduces very small latency, thus\ + \ guaranteeing coherence and unity in the multimodal percept andallowing tight\ + \ timing while playing.},\n address = {Oslo, Norway},\n author = {Papetti, Stefano\ + \ and Civolani, Marco and Fontana, Federico},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1178129},\n issn = {2220-4806},\n keywords = {interface, audio,\ + \ tactile, foot tapping, embodiment, footwear, wireless, wearable, mobile },\n\ + \ pages = {473--476},\n title = {Rhythm'n'Shoes: a Wearable Foot Tapping Interface\ + \ with Audio-Tactile Feedback},\n url = {http://www.nime.org/proceedings/2011/nime2011_473.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672866 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1178129 issn: 2220-4806 - month: June - pages: 59--64 - publisher: UFRGS - title: Automatic Recognition of Soundpainting for the Generation of Electronic Music - Sounds - url: http://www.nime.org/proceedings/2019/nime2019_paper012.pdf - year: 2019 + keywords: 'interface, audio, tactile, foot tapping, embodiment, footwear, wireless, + wearable, mobile ' + pages: 473--476 + title: 'Rhythm''n''Shoes: a Wearable Foot Tapping Interface with Audio-Tactile Feedback' + url: http://www.nime.org/proceedings/2011/nime2011_473.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Morreale2019 - abstract: 'This paper introduces the Magpick, an augmented pick for electric guitar - that uses electromagnetic induction to sense the motion of the pick with respect - to the permanent magnets in the guitar pickup. The Magpick provides the guitarist - with nuanced control of the sound which coexists with traditional plucking-hand - technique. The paper presents three ways that the signal from the pick can modulate - the guitar sound, followed by a case study of its use in which 11 guitarists tested - the Magpick for five days and composed a piece with it. Reflecting on their comments - and experiences, we outline the innovative features of this technology from the - point of view of performance practice. In particular, compared to other augmentations, - the high temporal resolution, low latency, and large dynamic range of the Magpick - support a highly nuanced control over the sound. Our discussion highlights the - utility of having the locus of augmentation coincide with the locus of interaction.' - address: 'Porto Alegre, Brazil' - author: Fabio Morreale and Andrea Guidi and Andrew P. McPherson - bibtex: "@inproceedings{Morreale2019,\n abstract = {This paper introduces the Magpick,\ - \ an augmented pick for electric guitar that uses electromagnetic induction to\ - \ sense the motion of the pick with respect to the permanent magnets in the guitar\ - \ pickup. The Magpick provides the guitarist with nuanced control of the sound\ - \ which coexists with traditional plucking-hand technique. The paper presents\ - \ three ways that the signal from the pick can modulate the guitar sound, followed\ - \ by a case study of its use in which 11 guitarists tested the Magpick for five\ - \ days and composed a piece with it. Reflecting on their comments and experiences,\ - \ we outline the innovative features of this technology from the point of view\ - \ of performance practice. In particular, compared to other augmentations, the\ - \ high temporal resolution, low latency, and large dynamic range of the Magpick\ - \ support a highly nuanced control over the sound. Our discussion highlights the\ - \ utility of having the locus of augmentation coincide with the locus of interaction.},\n\ - \ address = {Porto Alegre, Brazil},\n author = {Fabio Morreale and Andrea Guidi\ - \ and Andrew P. McPherson},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672868},\n\ - \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {65--70},\n publisher = {UFRGS},\n title = {Magpick: an\ - \ Augmented Guitar Pick for Nuanced Control},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper013.pdf},\n\ - \ year = {2019}\n}\n" + ID: Erkut2011 + abstract: 'We present a generic, structured model for design and evaluation of musical + interfaces. This model is developmentoriented, and it is based on the fundamental + function of themusical interfaces, i.e., to coordinate the human action andperception + for musical expression, subject to human capabilities and skills. To illustrate + the particulars of this modeland present it in operation, we consider the previous + designand evaluation phase of iPalmas, our testbed for exploringrhythmic interaction. + Our findings inform the current design phase of iPalmas visual and auditory displays, + wherewe build on what has resonated with the test users, and explore further possibilities + based on the evaluation results.' + address: 'Oslo, Norway' + author: 'Erkut, Cumhur and Jylhä, Antti and Discioglu, Reha' + bibtex: "@inproceedings{Erkut2011,\n abstract = {We present a generic, structured\ + \ model for design and evaluation of musical interfaces. This model is developmentoriented,\ + \ and it is based on the fundamental function of themusical interfaces, i.e.,\ + \ to coordinate the human action andperception for musical expression, subject\ + \ to human capabilities and skills. To illustrate the particulars of this modeland\ + \ present it in operation, we consider the previous designand evaluation phase\ + \ of iPalmas, our testbed for exploringrhythmic interaction. Our findings inform\ + \ the current design phase of iPalmas visual and auditory displays, wherewe build\ + \ on what has resonated with the test users, and explore further possibilities\ + \ based on the evaluation results.},\n address = {Oslo, Norway},\n author = {Erkut,\ + \ Cumhur and Jylh\\''{a}, Antti and Discioglu, Reha},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178003},\n issn = {2220-4806},\n keywords = {multimodal\ + \ displays,rhythmic interaction,sonification,uml},\n pages = {477--480},\n title\ + \ = {A Structured Design and Evaluation Model with Application to Rhythmic Interaction\ + \ Displays},\n url = {http://www.nime.org/proceedings/2011/nime2011_477.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672868 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1178003 issn: 2220-4806 - month: June - pages: 65--70 - publisher: UFRGS - title: 'Magpick: an Augmented Guitar Pick for Nuanced Control' - url: http://www.nime.org/proceedings/2019/nime2019_paper013.pdf - year: 2019 + keywords: 'multimodal displays,rhythmic interaction,sonification,uml' + pages: 477--480 + title: A Structured Design and Evaluation Model with Application to Rhythmic Interaction + Displays + url: http://www.nime.org/proceedings/2011/nime2011_477.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Petit2019 - abstract: 'Skini is a platform for composing and producing live performances with - audience participating using connected devices (smartphones, tablets, PC, etc.). - The music composer creates beforehand musical elements such as melodic patterns, - sound patterns, instruments, group of instruments, and a dynamic score that governs - the way the basic elements will behave according to events produced by the audience. - During the concert or the performance, the audience, by interacting with the system, - gives birth to an original music composition. Skini music scores are expressed - in terms of constraints that establish relationships between instruments. A constraint - maybe instantaneous, for instance one may disable violins while trumpets are playing. - A constraint may also be temporal, for instance, the piano cannot play more than - 30 consecutive seconds. The Skini platform is implemented in Hop.js and HipHop.js. - HipHop.js, a synchronous reactive DLS, is used for implementing the music scores - as its elementary constructs consisting of high level operators such as parallel - executions, sequences, awaits, synchronization points, etc, form an ideal core - language for implementing Skini constraints. This paper presents the Skini platform. - It reports about live performances and an educational project. It briefly overviews - the use of HipHop.js for representing score.' - address: 'Porto Alegre, Brazil' - author: Bertrand Petit and manuel serrano - bibtex: "@inproceedings{Petit2019,\n abstract = {Skini is a platform for composing\ - \ and producing live performances with audience participating using connected\ - \ devices (smartphones, tablets, PC, etc.). The music composer creates beforehand\ - \ musical elements such as melodic patterns, sound patterns, instruments, group\ - \ of instruments, and a dynamic score that governs the way the basic elements\ - \ will behave according to events produced by the audience. During the concert\ - \ or the performance, the audience, by interacting with the system, gives birth\ - \ to an original music composition. Skini music scores are expressed in terms\ - \ of constraints that establish relationships between instruments. A constraint\ - \ maybe instantaneous, for instance one may disable violins while trumpets are\ - \ playing. A constraint may also be temporal, for instance, the piano cannot play\ - \ more than 30 consecutive seconds. The Skini platform is implemented in Hop.js\ - \ and HipHop.js. HipHop.js, a synchronous reactive DLS, is used for implementing\ - \ the music scores as its elementary constructs consisting of high level operators\ - \ such as parallel executions, sequences, awaits, synchronization points, etc,\ - \ form an ideal core language for implementing Skini constraints. This paper presents\ - \ the Skini platform. It reports about live performances and an educational project.\ - \ It briefly overviews the use of HipHop.js for representing score.},\n address\ - \ = {Porto Alegre, Brazil},\n author = {Bertrand Petit and manuel serrano},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.3672870},\n editor = {Marcelo Queiroz\ - \ and Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {71--76},\n\ - \ publisher = {UFRGS},\n title = {Composing and executing Interactive music using\ - \ the HipHop.js language},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper014.pdf},\n\ - \ year = {2019}\n}\n" + ID: Marchini2011 + abstract: 'This paper introduces and evaluates a novel methodologyfor the estimation + of bow pressing force in violin performance, aiming at a reduced intrusiveness + while maintaininghigh accuracy. The technique is based on using a simplifiedphysical + model of the hair ribbon deflection, and feeding thismodel solely with position + and orientation measurements ofthe bow and violin spatial coordinates. The physical + modelis both calibrated and evaluated using real force data acquired by means + of a load cell.' + address: 'Oslo, Norway' + author: 'Marchini, Marco and Papiotis, Panos and Pérez, Alfonso and Maestre, Esteban' + bibtex: "@inproceedings{Marchini2011,\n abstract = {This paper introduces and evaluates\ + \ a novel methodologyfor the estimation of bow pressing force in violin performance,\ + \ aiming at a reduced intrusiveness while maintaininghigh accuracy. The technique\ + \ is based on using a simplifiedphysical model of the hair ribbon deflection,\ + \ and feeding thismodel solely with position and orientation measurements ofthe\ + \ bow and violin spatial coordinates. The physical modelis both calibrated and\ + \ evaluated using real force data acquired by means of a load cell.},\n address\ + \ = {Oslo, Norway},\n author = {Marchini, Marco and Papiotis, Panos and P\\'{e}rez,\ + \ Alfonso and Maestre, Esteban},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178097},\n\ + \ issn = {2220-4806},\n keywords = {bow pressing force, bow force, pressing force,\ + \ force, violin playing, bow simplified physical model, 6DOF, hair ribbon ends,\ + \ string ends },\n pages = {481--486},\n title = {A Hair Ribbon Deflection Model\ + \ for Low-intrusiveness Measurement of Bow Force in Violin Performance},\n url\ + \ = {http://www.nime.org/proceedings/2011/nime2011_481.pdf},\n year = {2011}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672870 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1178097 issn: 2220-4806 - month: June - pages: 71--76 - publisher: UFRGS - title: Composing and executing Interactive music using the HipHop.js language - url: http://www.nime.org/proceedings/2019/nime2019_paper014.pdf - year: 2019 + keywords: 'bow pressing force, bow force, pressing force, force, violin playing, + bow simplified physical model, 6DOF, hair ribbon ends, string ends ' + pages: 481--486 + title: A Hair Ribbon Deflection Model for Low-intrusiveness Measurement of Bow Force + in Violin Performance + url: http://www.nime.org/proceedings/2011/nime2011_481.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Rocha2019 - abstract: 'Due to video game controls great presence in popular culture and its - ease of access, even people who are not in the habit of playing electronic games - possibly interacted with this kind of interface once in a lifetime. Thus, gestures - like pressing a sequence of buttons, pressing them simultaneously or sliding your - fingers through the control can be mapped for musical creation. This work aims - the elaboration of a strategy in which several gestures performed in a joystick - control can influence one or several parameters of the sound synthesis, making - a mapping denominated many to many. Buttons combinations used to perform game - actions that are common in fighting games, like Street Fighter, were mapped to - the synthesizer to create a music. Experiments show that this mapping is capable - of influencing the musical expression of a DMI making it closer to an acoustic - instrument.' - address: 'Porto Alegre, Brazil' - author: Gabriel Lopes Rocha and João Teixera Araújo and Flávio Luiz Schiavoni - bibtex: "@inproceedings{Rocha2019,\n abstract = {Due to video game controls great\ - \ presence in popular culture and its ease of access, even people who are not\ - \ in the habit of playing electronic games possibly interacted with this kind\ - \ of interface once in a lifetime. Thus, gestures like pressing a sequence of\ - \ buttons, pressing them simultaneously or sliding your fingers through the control\ - \ can be mapped for musical creation. This work aims the elaboration of a strategy\ - \ in which several gestures performed in a joystick control can influence one\ - \ or several parameters of the sound synthesis, making a mapping denominated many\ - \ to many. Buttons combinations used to perform game actions that are common in\ - \ fighting games, like Street Fighter, were mapped to the synthesizer to create\ - \ a music. Experiments show that this mapping is capable of influencing the musical\ - \ expression of a DMI making it closer to an acoustic instrument.},\n address\ - \ = {Porto Alegre, Brazil},\n author = {Gabriel Lopes Rocha and João Teixera Araújo\ - \ and Flávio Luiz Schiavoni},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672872},\n\ - \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {77--78},\n publisher = {UFRGS},\n title = {Ha Dou Ken Music:\ - \ Different mappings to play music with joysticks},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper015.pdf},\n\ - \ year = {2019}\n}\n" + ID: Forsyth2011 + abstract: 'Remixing audio samples is a common technique for the creation of electronic + music, and there are a wide variety oftools available to edit, process, and recombine + pre-recordedaudio into new compositions. However, all of these toolsconceive of + the timeline of the pre-recorded audio and theplayback timeline as identical. + In this paper, we introducea dual time axis representation in which these two + timelines are described explicitly. We also discuss the randomaccess remix application + for the iPad, an audio sample editor based on this representation. We describe + an initialuser study with 15 high school students that indicates thatthe random + access remix application has the potential todevelop into a useful and interesting + tool for composers andperformers of electronic music.' + address: 'Oslo, Norway' + author: 'Forsyth, Jon and Glennon, Aron and Bello, Juan P.' + bibtex: "@inproceedings{Forsyth2011,\n abstract = {Remixing audio samples is a common\ + \ technique for the creation of electronic music, and there are a wide variety\ + \ oftools available to edit, process, and recombine pre-recordedaudio into new\ + \ compositions. However, all of these toolsconceive of the timeline of the pre-recorded\ + \ audio and theplayback timeline as identical. In this paper, we introducea dual\ + \ time axis representation in which these two timelines are described explicitly.\ + \ We also discuss the randomaccess remix application for the iPad, an audio sample\ + \ editor based on this representation. We describe an initialuser study with 15\ + \ high school students that indicates thatthe random access remix application\ + \ has the potential todevelop into a useful and interesting tool for composers\ + \ andperformers of electronic music.},\n address = {Oslo, Norway},\n author =\ + \ {Forsyth, Jon and Glennon, Aron and Bello, Juan P.},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178011},\n issn = {2220-4806},\n keywords = {interactive\ + \ systems, sample editor, remix, iPad, multi-touch },\n pages = {487--490},\n\ + \ title = {Random Access Remixing on the iPad},\n url = {http://www.nime.org/proceedings/2011/nime2011_487.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672872 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1178011 issn: 2220-4806 - month: June - pages: 77--78 - publisher: UFRGS - title: 'Ha Dou Ken Music: Different mappings to play music with joysticks' - url: http://www.nime.org/proceedings/2019/nime2019_paper015.pdf - year: 2019 + keywords: 'interactive systems, sample editor, remix, iPad, multi-touch ' + pages: 487--490 + title: Random Access Remixing on the iPad + url: http://www.nime.org/proceedings/2011/nime2011_487.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Næss2019 - abstract: 'This paper describes a new intelligent interactive instrument, based - on an embedded computing platform, where deep neural networks are applied to interactive - music generation. Even though using neural networks for music composition is not - uncommon, a lot of these models tend to not support any form of user interaction. - We introduce a self-contained intelligent instrument using generative models, - with support for real-time interaction where the user can adjust high-level parameters - to modify the music generated by the instrument. We describe the technical details - of our generative model and discuss the experience of using the system as part - of musical performance.' - address: 'Porto Alegre, Brazil' - author: Torgrim Rudland Næss and Charles Patrick Martin - bibtex: "@inproceedings{Næss2019,\n abstract = {This paper describes a new intelligent\ - \ interactive instrument, based on an embedded computing platform, where deep\ - \ neural networks are applied to interactive music generation. Even though using\ - \ neural networks for music composition is not uncommon, a lot of these models\ - \ tend to not support any form of user interaction. We introduce a self-contained\ - \ intelligent instrument using generative models, with support for real-time interaction\ - \ where the user can adjust high-level parameters to modify the music generated\ - \ by the instrument. We describe the technical details of our generative model\ - \ and discuss the experience of using the system as part of musical performance.},\n\ - \ address = {Porto Alegre, Brazil},\n author = {Torgrim Rudland Næss and Charles\ - \ Patrick Martin},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672874},\n\ - \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {79--82},\n publisher = {UFRGS},\n title = {A Physical Intelligent\ - \ Instrument using Recurrent Neural Networks},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper016.pdf},\n\ - \ year = {2019}\n}\n" + ID: Donald2011 + abstract: 'This paper outlines the formation of the Expanded Performance (EP) trio, + a chamber ensemble comprised of electriccello with sensor bow, augmented digital + percussion, anddigital turntable with mixer. Decisions relating to physical set-ups + and control capabilities, sonic identities, andmappings of each instrument, as + well as their roles withinthe ensemble, are explored. The contributions of these + factors to the design of a coherent, expressive ensemble andits emerging performance + practice are considered. The trioproposes solutions to creation, rehearsal and + performanceissues in ensemble live electronics.' + address: 'Oslo, Norway' + author: 'Donald, Erika and Duinker, Ben and Britton, Eliot' + bibtex: "@inproceedings{Donald2011,\n abstract = {This paper outlines the formation\ + \ of the Expanded Performance (EP) trio, a chamber ensemble comprised of electriccello\ + \ with sensor bow, augmented digital percussion, anddigital turntable with mixer.\ + \ Decisions relating to physical set-ups and control capabilities, sonic identities,\ + \ andmappings of each instrument, as well as their roles withinthe ensemble, are\ + \ explored. The contributions of these factors to the design of a coherent, expressive\ + \ ensemble andits emerging performance practice are considered. The trioproposes\ + \ solutions to creation, rehearsal and performanceissues in ensemble live electronics.},\n\ + \ address = {Oslo, Norway},\n author = {Donald, Erika and Duinker, Ben and Britton,\ + \ Eliot},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1177999},\n issn = {2220-4806},\n\ + \ keywords = {Live electronics, digital performance, mapping, chamber music, ensemble,\ + \ instrument identity },\n pages = {491--494},\n title = {Designing the EP Trio:\ + \ Instrument Identities, Control and Performance Practice in an Electronic Chamber\ + \ Music Ensemble},\n url = {http://www.nime.org/proceedings/2011/nime2011_491.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672874 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1177999 issn: 2220-4806 - month: June - pages: 79--82 - publisher: UFRGS - title: A Physical Intelligent Instrument using Recurrent Neural Networks - url: http://www.nime.org/proceedings/2019/nime2019_paper016.pdf - year: 2019 + keywords: 'Live electronics, digital performance, mapping, chamber music, ensemble, + instrument identity ' + pages: 491--494 + title: 'Designing the EP Trio: Instrument Identities, Control and Performance Practice + in an Electronic Chamber Music Ensemble' + url: http://www.nime.org/proceedings/2011/nime2011_491.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Fraietta2019 - abstract: 'This paper details the mapping strategy of the work Order and Progress: - a sonic segue across A Auriverde, a composition based upon the skyscape represented - on the Brazilian flag. This work uses the Stellarium planetarium software as a - performance interface, blending the political symbology, scientific data and musical - mapping of each star represented on the flag as a multimedia performance. The - work is interfaced through the Stellar Command module, a Java based program that - converts the visible field of view from the Stellarium planetarium interface to - astronomical data through the VizieR database of astronomical catalogues. This - scientific data is then mapped to musical parameters through a Java based programming - environment. I will discuss the strategies employed to create a work that was - not only artistically novel, but also visually engaging and scientifically accurate.' - address: 'Porto Alegre, Brazil' - author: Angelo Fraietta - bibtex: "@inproceedings{Fraietta2019,\n abstract = {This paper details the mapping\ - \ strategy of the work Order and Progress: a sonic segue across A Auriverde, a\ - \ composition based upon the skyscape represented on the Brazilian flag. This\ - \ work uses the Stellarium planetarium software as a performance interface, blending\ - \ the political symbology, scientific data and musical mapping of each star represented\ - \ on the flag as a multimedia performance. The work is interfaced through the\ - \ Stellar Command module, a Java based program that converts the visible field\ - \ of view from the Stellarium planetarium interface to astronomical data through\ - \ the VizieR database of astronomical catalogues. This scientific data is then\ - \ mapped to musical parameters through a Java based programming environment. I\ - \ will discuss the strategies employed to create a work that was not only artistically\ - \ novel, but also visually engaging and scientifically accurate.},\n address =\ - \ {Porto Alegre, Brazil},\n author = {Angelo Fraietta},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.3672876},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {83--88},\n publisher = {UFRGS},\n\ - \ title = {Creating Order and Progress},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper017.pdf},\n\ - \ year = {2019}\n}\n" + ID: Fyans2011 + abstract: 'We present observations from two separate studies of spectators'' perceptions + of musical performances, one involvingtwo acoustic instruments, the other two + electronic instruments. Both studies followed the same qualitative method,using + structured interviews to ascertain and compare spectators'' experiences. In this + paper, we focus on outcomespertaining to perceptions of the performers'' skill, + relatingto concepts of embodiment and communities of practice.' + address: 'Oslo, Norway' + author: 'Fyans, A. Cavan and Gurevich, Michael' + bibtex: "@inproceedings{Fyans2011,\n abstract = {We present observations from two\ + \ separate studies of spectators' perceptions of musical performances, one involvingtwo\ + \ acoustic instruments, the other two electronic instruments. Both studies followed\ + \ the same qualitative method,using structured interviews to ascertain and compare\ + \ spectators' experiences. In this paper, we focus on outcomespertaining to perceptions\ + \ of the performers' skill, relatingto concepts of embodiment and communities\ + \ of practice.},\n address = {Oslo, Norway},\n author = {Fyans, A. Cavan and Gurevich,\ + \ Michael},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178019},\n issn\ + \ = {2220-4806},\n keywords = {skill, embodiment, perception, effort, control,\ + \ spectator },\n pages = {495--498},\n title = {Perceptions of Skill in Performances\ + \ with Acoustic and Electronic Instruments},\n url = {http://www.nime.org/proceedings/2011/nime2011_495.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672876 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1178019 issn: 2220-4806 - month: June - pages: 83--88 - publisher: UFRGS - title: Creating Order and Progress - url: http://www.nime.org/proceedings/2019/nime2019_paper017.pdf - year: 2019 + keywords: 'skill, embodiment, perception, effort, control, spectator ' + pages: 495--498 + title: Perceptions of Skill in Performances with Acoustic and Electronic Instruments + url: http://www.nime.org/proceedings/2011/nime2011_495.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Tragtenberg2019 - abstract: 'This paper discusses the creation of instruments in which music is intentionally - generated by dance. We introduce the conceptual framework of Digital Dance and - Music Instruments (DDMI). Several DDMI have already been created, but they have - been developed isolatedly, and there is still a lack of a common process of ideation - and development. Knowledge about Digital Musical Instruments (DMIs) and Interactive - Dance Systems (IDSs) can contribute to the design of DDMI, but the former brings - few contributions to the body''s expressiveness, and the latter brings few references - to an instrumental relationship with music. Because of those different premises, - the integration between both paradigms can be an arduous task for the designer - of DDMI. The conceptual framework of DDMI can also serve as a bridge between DMIs - and IDSs, serving as a lingua franca between both communities and facilitating - the exchange of knowledge. The conceptual framework has shown to be a promising - analytical tool for the design, development, and evaluation of new digital dance - and music instrument.' - address: 'Porto Alegre, Brazil' - author: João Nogueira Tragtenberg and Filipe Calegario and Giordano Cabral and Geber - L. Ramalho - bibtex: "@inproceedings{Tragtenberg2019,\n abstract = {This paper discusses the\ - \ creation of instruments in which music is intentionally generated by dance.\ - \ We introduce the conceptual framework of Digital Dance and Music Instruments\ - \ (DDMI). Several DDMI have already been created, but they have been developed\ - \ isolatedly, and there is still a lack of a common process of ideation and development.\ - \ Knowledge about Digital Musical Instruments (DMIs) and Interactive Dance Systems\ - \ (IDSs) can contribute to the design of DDMI, but the former brings few contributions\ - \ to the body's expressiveness, and the latter brings few references to an instrumental\ - \ relationship with music. Because of those different premises, the integration\ - \ between both paradigms can be an arduous task for the designer of DDMI. The\ - \ conceptual framework of DDMI can also serve as a bridge between DMIs and IDSs,\ - \ serving as a lingua franca between both communities and facilitating the exchange\ - \ of knowledge. The conceptual framework has shown to be a promising analytical\ - \ tool for the design, development, and evaluation of new digital dance and music\ - \ instrument.},\n address = {Porto Alegre, Brazil},\n author = {João Nogueira\ - \ Tragtenberg and Filipe Calegario and Giordano Cabral and Geber L. Ramalho},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.3672878},\n editor = {Marcelo Queiroz\ - \ and Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {89--94},\n\ - \ publisher = {UFRGS},\n title = {Towards the Concept of Digital Dance and Music\ - \ Instruments},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper018.pdf},\n\ - \ year = {2019}\n}\n" + ID: Nishino2011 + address: 'Oslo, Norway' + author: 'Nishino, Hiroki' + bibtex: "@inproceedings{Nishino2011,\n address = {Oslo, Norway},\n author = {Nishino,\ + \ Hiroki},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1178123},\n issn = {2220-4806},\n\ + \ keywords = {Computer music, programming language, the psychology of programming,\ + \ usability },\n pages = {499--502},\n title = {Cognitive Issues in Computer Music\ + \ Programming},\n url = {http://www.nime.org/proceedings/2011/nime2011_499.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672878 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1178123 issn: 2220-4806 - month: June - pages: 89--94 - publisher: UFRGS - title: Towards the Concept of Digital Dance and Music Instruments - url: http://www.nime.org/proceedings/2019/nime2019_paper018.pdf - year: 2019 + keywords: 'Computer music, programming language, the psychology of programming, + usability ' + pages: 499--502 + title: Cognitive Issues in Computer Music Programming + url: http://www.nime.org/proceedings/2011/nime2011_499.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Bomba2019 - abstract: 'Visitors interact with a blindfolded artist''s body, the motions of which - are tracked and translated into synthesized four-channel sound, surrounding the - participants. Through social-physical and aural interactions, they play his instrument-body, - in a mutual dance. Crucial for this work has been the motion-to-sound mapping - design, and the investigations of bodily interaction with normal lay-people and - with professional contact-improvisation dancers. The extra layer of social-physical - interaction both constrains and inspires the participant-artist relation and the - sonic exploration, and through this, his body is transformed into an instrument, - and physical space is transformed into a sound-space. The project aims to explore - the experience of interaction between human and technology and its impact on one''s - bodily perception and embodiment, as well as the relation between body and space, - departing from a set of existing theories on embodiment. In the paper, its underlying - aesthetics are described and discussed, as well as the sensitive motion research - process behind it, and the technical implementation of the work. It is evaluated - based on participant behavior and experiences and analysis of its premiere exhibition - in 2018.' - address: 'Porto Alegre, Brazil' - author: Maros Suran Bomba and Palle Dahlstedt - bibtex: "@inproceedings{Bomba2019,\n abstract = {Visitors interact with a blindfolded\ - \ artist's body, the motions of which are tracked and translated into synthesized\ - \ four-channel sound, surrounding the participants. Through social-physical and\ - \ aural interactions, they play his instrument-body, in a mutual dance. Crucial\ - \ for this work has been the motion-to-sound mapping design, and the investigations\ - \ of bodily interaction with normal lay-people and with professional contact-improvisation\ - \ dancers. The extra layer of social-physical interaction both constrains and\ - \ inspires the participant-artist relation and the sonic exploration, and through\ - \ this, his body is transformed into an instrument, and physical space is transformed\ - \ into a sound-space. The project aims to explore the experience of interaction\ - \ between human and technology and its impact on one's bodily perception and embodiment,\ - \ as well as the relation between body and space, departing from a set of existing\ - \ theories on embodiment. In the paper, its underlying aesthetics are described\ - \ and discussed, as well as the sensitive motion research process behind it, and\ - \ the technical implementation of the work. It is evaluated based on participant\ - \ behavior and experiences and analysis of its premiere exhibition in 2018.},\n\ - \ address = {Porto Alegre, Brazil},\n author = {Maros Suran Bomba and Palle Dahlstedt},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.3672880},\n editor = {Marcelo Queiroz\ - \ and Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {95--100},\n\ - \ publisher = {UFRGS},\n title = {Somacoustics: Interactive Body-as-Instrument},\n\ - \ url = {http://www.nime.org/proceedings/2019/nime2019_paper019.pdf},\n year =\ - \ {2019}\n}\n" + ID: Lamb2011 + abstract: 'This paper introduces the Seaboard, a new tangible musicalinstrument + which aims to provide musicians with significantcapability to manipulate sound + in real-time in a musicallyintuitive way. It introduces the core design features + whichmake the Seaboard unique, and describes the motivationand rationale behind + the design. The fundamental approachto dealing with problems associated with discrete + and continuous inputs is summarized.' + address: 'Oslo, Norway' + author: 'Lamb, Roland and Robertson, Andrew' + bibtex: "@inproceedings{Lamb2011,\n abstract = {This paper introduces the Seaboard,\ + \ a new tangible musicalinstrument which aims to provide musicians with significantcapability\ + \ to manipulate sound in real-time in a musicallyintuitive way. It introduces\ + \ the core design features whichmake the Seaboard unique, and describes the motivationand\ + \ rationale behind the design. The fundamental approachto dealing with problems\ + \ associated with discrete and continuous inputs is summarized.},\n address =\ + \ {Oslo, Norway},\n author = {Lamb, Roland and Robertson, Andrew},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1178081},\n issn = {2220-4806},\n keywords\ + \ = {Piano keyboard-related interface, continuous and discrete control, haptic\ + \ feedback, Human-Computer Interaction (HCI) },\n pages = {503--506},\n title\ + \ = {Seaboard : a New Piano Keyboard-related Interface Combining Discrete and\ + \ Continuous Control},\n url = {http://www.nime.org/proceedings/2011/nime2011_503.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672880 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1178081 issn: 2220-4806 - month: June - pages: 95--100 - publisher: UFRGS - title: 'Somacoustics: Interactive Body-as-Instrument' - url: http://www.nime.org/proceedings/2019/nime2019_paper019.pdf - year: 2019 + keywords: 'Piano keyboard-related interface, continuous and discrete control, haptic + feedback, Human-Computer Interaction (HCI) ' + pages: 503--506 + title: 'Seaboard : a New Piano Keyboard-related Interface Combining Discrete and + Continuous Control' + url: http://www.nime.org/proceedings/2011/nime2011_503.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Turczan2019 - abstract: 'The Scale Navigator is a graphical interface implementation of Dmitri - Tymoczko''s scale network designed to help generate algorithmic harmony and harmonically - synchronize performers in a laptop or electro-acoustic orchestra. The user manipulates - the Scale Navigator to direct harmony on a chord-to-chord level and on a scale-to-scale - level. In a live performance setting, the interface broadcasts control data, MIDI, - and real-time notation to an ensemble of live electronic performers, sight-reading - improvisers, and musical generative algorithms. ' - address: 'Porto Alegre, Brazil' - author: Nathan Turczan and Ajay Kapur - bibtex: "@inproceedings{Turczan2019,\n abstract = {The Scale Navigator is a graphical\ - \ interface implementation of Dmitri Tymoczko's scale network designed to help\ - \ generate algorithmic harmony and harmonically synchronize performers in a laptop\ - \ or electro-acoustic orchestra. The user manipulates the Scale Navigator to direct\ - \ harmony on a chord-to-chord level and on a scale-to-scale level. In a live performance\ - \ setting, the interface broadcasts control data, MIDI, and real-time notation\ - \ to an ensemble of live electronic performers, sight-reading improvisers, and\ - \ musical generative algorithms. },\n address = {Porto Alegre, Brazil},\n author\ - \ = {Nathan Turczan and Ajay Kapur},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672882},\n\ - \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {101--104},\n publisher = {UFRGS},\n title = {The Scale\ - \ Navigator: A System for Networked Algorithmic Harmony},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper020.pdf},\n\ - \ year = {2019}\n}\n" + ID: Beyer2011 + address: 'Oslo, Norway' + author: 'Beyer, Gilbert and Meier, Max' + bibtex: "@inproceedings{Beyer2011,\n address = {Oslo, Norway},\n author = {Beyer,\ + \ Gilbert and Meier, Max},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177963},\n\ + \ issn = {2220-4806},\n keywords = {Interactive music, public displays, user experience,\ + \ out-of-home media, algorithmic composition, soft constraints },\n pages = {507--510},\n\ + \ title = {Music Interfaces for Novice Users : Composing Music on a Public Display\ + \ with Hand Gestures},\n url = {http://www.nime.org/proceedings/2011/nime2011_507.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672882 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1177963 issn: 2220-4806 - month: June - pages: 101--104 - publisher: UFRGS - title: 'The Scale Navigator: A System for Networked Algorithmic Harmony' - url: http://www.nime.org/proceedings/2019/nime2019_paper020.pdf - year: 2019 + keywords: 'Interactive music, public displays, user experience, out-of-home media, + algorithmic composition, soft constraints ' + pages: 507--510 + title: 'Music Interfaces for Novice Users : Composing Music on a Public Display + with Hand Gestures' + url: http://www.nime.org/proceedings/2011/nime2011_507.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Lucas2019 - abstract: 'In this paper, the authors describe the evaluation of a collection of - bespoke knob cap designs intended to improve the ease in which a specific musician - with dyskinetic cerebral palsy can operate rotary controls in a musical context. - The authors highlight the importance of the performers perspective when using - design as a means for overcoming access barriers to music. Also, while the authors - were not able to find an ideal solution for the musician within the confines of - this study, several useful observations on the process of evaluating bespoke assistive - music technology are described; observations which may prove useful to digital - musical instrument designers working within the field of inclusive music.' - address: 'Porto Alegre, Brazil' - author: Alex Michael Lucas and Miguel Ortiz and Dr. Franziska Schroeder - bibtex: "@inproceedings{Lucas2019,\n abstract = {In this paper, the authors describe\ - \ the evaluation of a collection of bespoke knob cap designs intended to improve\ - \ the ease in which a specific musician with dyskinetic cerebral palsy can operate\ - \ rotary controls in a musical context. The authors highlight the importance of\ - \ the performers perspective when using design as a means for overcoming access\ - \ barriers to music. Also, while the authors were not able to find an ideal solution\ - \ for the musician within the confines of this study, several useful observations\ - \ on the process of evaluating bespoke assistive music technology are described;\ - \ observations which may prove useful to digital musical instrument designers\ - \ working within the field of inclusive music.},\n address = {Porto Alegre, Brazil},\n\ - \ author = {Alex Michael Lucas and Miguel Ortiz and Dr. Franziska Schroeder},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.3672884},\n editor = {Marcelo Queiroz\ - \ and Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {105--109},\n\ - \ publisher = {UFRGS},\n title = {Bespoke Design for Inclusive Music: The Challenges\ - \ of Evaluation},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper021.pdf},\n\ - \ year = {2019}\n}\n" + ID: Cappelen2011 + abstract: 'The traditional role of the musical instrument is to be the working tool + of the professional musician. On the instrument the musician performs music for + the audience to listen to. In this paper we present an interactive installation, + where we expand the role of the instrument to motivate musicking and cocreation + between diverse users. We have made an open installation, where users can perform + a variety of actions in several situations. By using the abilities of the computer, + we have made an installation, which can be interpreted to have many roles. It + can both be an instrument, a co-musician, a communication partner, a toy, a meeting + place and an ambient musical landscape. The users can dynamically shift between + roles, based on their abilities, knowledge and motivation. ' + address: 'Oslo, Norway' + author: 'Cappelen, Birgitta and Anderson, Anders-Petter' + bibtex: "@inproceedings{Cappelen2011,\n abstract = {The traditional role of the\ + \ musical instrument is to be the working tool of the professional musician. On\ + \ the instrument the musician performs music for the audience to listen to. In\ + \ this paper we present an interactive installation, where we expand the role\ + \ of the instrument to motivate musicking and cocreation between diverse users.\ + \ We have made an open installation, where users can perform a variety of actions\ + \ in several situations. By using the abilities of the computer, we have made\ + \ an installation, which can be interpreted to have many roles. It can both be\ + \ an instrument, a co-musician, a communication partner, a toy, a meeting place\ + \ and an ambient musical landscape. The users can dynamically shift between roles,\ + \ based on their abilities, knowledge and motivation. },\n address = {Oslo, Norway},\n\ + \ author = {Cappelen, Birgitta and Anderson, Anders-Petter},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177975},\n issn = {2220-4806},\n keywords = {design,genre,interaction,interactive\ + \ installation,music instrument,musicking,narrative,open,role,sound art},\n pages\ + \ = {511--514},\n title = {Expanding the Role of the Instrument},\n url = {http://www.nime.org/proceedings/2011/nime2011_511.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672884 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1177975 issn: 2220-4806 - month: June - pages: 105--109 - publisher: UFRGS - title: 'Bespoke Design for Inclusive Music: The Challenges of Evaluation' - url: http://www.nime.org/proceedings/2019/nime2019_paper021.pdf - year: 2019 + keywords: 'design,genre,interaction,interactive installation,music instrument,musicking,narrative,open,role,sound + art' + pages: 511--514 + title: Expanding the Role of the Instrument + url: http://www.nime.org/proceedings/2011/nime2011_511.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Xiao2019 - abstract: 'T-Voks is an augmented theremin that controls Voks, a performative singing - synthesizer. Originally developed for control with a graphic tablet interface, - Voks allows for real-time pitch and time scaling, vocal effort modification and - syllable sequencing for pre-recorded voice utterances. For T-Voks the theremin''s - frequency antenna modifies the output pitch of the target utterance while the - amplitude antenna controls not only volume as usual but also voice quality and - vocal effort. Syllabic sequencing is handled by an additional pressure sensor - attached to the player''s volume-control hand. This paper presents the system - architecture of T-Voks, the preparation procedure for a song, playing gestures, - and practice techniques, along with musical and poetic examples across four different - languages and styles.' - address: 'Porto Alegre, Brazil' - author: Xiao Xiao and Grégoire Locqueville and Christophe d'Alessandro and Boris - Doval - bibtex: "@inproceedings{Xiao2019,\n abstract = {T-Voks is an augmented theremin\ - \ that controls Voks, a performative singing synthesizer. Originally developed\ - \ for control with a graphic tablet interface, Voks allows for real-time pitch\ - \ and time scaling, vocal effort modification and syllable sequencing for pre-recorded\ - \ voice utterances. For T-Voks the theremin's frequency antenna modifies the output\ - \ pitch of the target utterance while the amplitude antenna controls not only\ - \ volume as usual but also voice quality and vocal effort. Syllabic sequencing\ - \ is handled by an additional pressure sensor attached to the player's volume-control\ - \ hand. This paper presents the system architecture of T-Voks, the preparation\ - \ procedure for a song, playing gestures, and practice techniques, along with\ - \ musical and poetic examples across four different languages and styles.},\n\ - \ address = {Porto Alegre, Brazil},\n author = {Xiao Xiao and Grégoire Locqueville\ - \ and Christophe d'Alessandro and Boris Doval},\n booktitle = {Proceedings of\ + ID: Todoroff2011 + abstract: 'We developed very small and light sensors, each equippedwith 3-axes accelerometers, + magnetometers and gyroscopes.Those MARG (Magnetic, Angular Rate, and Gravity) + sensors allow for a drift-free attitude computation which in turnleads to the + possibility of recovering the skeleton of bodyparts that are of interest for the + performance, improvingthe results of gesture recognition and allowing to get relative + position between the extremities of the limbs and thetorso of the performer. This + opens new possibilities in termsof mapping. We kept our previous approach developed + atARTeM [2]: wireless from the body to the host computer,but wired through a 4-wire + digital bus on the body. Byrelieving the need for a transmitter on each sensing + node,we could built very light and flat sensor nodes that can bemade invisible + under the clothes. Smaller sensors, coupledwith flexible wires on the body, give + more freedom of movement to dancers despite the need for cables on the body.And + as the weight of each sensor node, box included, isonly 5 grams (Figure 1), they + can also be put on the upper and lower arm and hand of a violin or viola player, + toretrieve the skeleton from the torso to the hand, withoutadding any weight that + would disturb the performer. Weused those sensors in several performances with + a dancingviola player and in one where she was simultaneously controlling gas + flames interactively. We are currently applyingthem to other types of musical + performances.' + address: 'Oslo, Norway' + author: 'Todoroff, Todor' + bibtex: "@inproceedings{Todoroff2011,\n abstract = {We developed very small and\ + \ light sensors, each equippedwith 3-axes accelerometers, magnetometers and gyroscopes.Those\ + \ MARG (Magnetic, Angular Rate, and Gravity) sensors allow for a drift-free attitude\ + \ computation which in turnleads to the possibility of recovering the skeleton\ + \ of bodyparts that are of interest for the performance, improvingthe results\ + \ of gesture recognition and allowing to get relative position between the extremities\ + \ of the limbs and thetorso of the performer. This opens new possibilities in\ + \ termsof mapping. We kept our previous approach developed atARTeM [2]: wireless\ + \ from the body to the host computer,but wired through a 4-wire digital bus on\ + \ the body. Byrelieving the need for a transmitter on each sensing node,we could\ + \ built very light and flat sensor nodes that can bemade invisible under the clothes.\ + \ Smaller sensors, coupledwith flexible wires on the body, give more freedom of\ + \ movement to dancers despite the need for cables on the body.And as the weight\ + \ of each sensor node, box included, isonly 5 grams (Figure 1), they can also\ + \ be put on the upper and lower arm and hand of a violin or viola player, toretrieve\ + \ the skeleton from the torso to the hand, withoutadding any weight that would\ + \ disturb the performer. Weused those sensors in several performances with a dancingviola\ + \ player and in one where she was simultaneously controlling gas flames interactively.\ + \ We are currently applyingthem to other types of musical performances.},\n address\ + \ = {Oslo, Norway},\n author = {Todoroff, Todor},\n booktitle = {Proceedings of\ \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.3672886},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {110--115},\n publisher = {UFRGS},\n\ - \ title = {T-Voks: the Singing and Speaking Theremin},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper022.pdf},\n\ - \ year = {2019}\n}\n" + \ = {10.5281/zenodo.1178177},\n issn = {2220-4806},\n keywords = {wireless MARG\ + \ sensors },\n pages = {515--518},\n title = {Wireless Digital/Analog Sensors\ + \ for Music and Dance Performances},\n url = {http://www.nime.org/proceedings/2011/nime2011_515.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672886 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1178177 issn: 2220-4806 - month: June - pages: 110--115 - publisher: UFRGS - title: 'T-Voks: the Singing and Speaking Theremin' - url: http://www.nime.org/proceedings/2019/nime2019_paper022.pdf - year: 2019 + keywords: 'wireless MARG sensors ' + pages: 515--518 + title: Wireless Digital/Analog Sensors for Music and Dance Performances + url: http://www.nime.org/proceedings/2011/nime2011_515.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Brown2019 - abstract: 'Recent developments in music technology have enabled novel timbres to - be acoustically synthesized using various actuation and excitation methods. Utilizing - recent work in nonlinear acoustic synthesis, we propose a transducer based augmented - percussion implement entitled DRMMR. This design enables the user to sustain computer - sequencer-like drum rolls at faster speeds while also enabling the user to achieve - nonlinear acoustic synthesis effects. Our acoustic evaluation shows drum rolls - executed by DRMMR easily exhibit greater levels of regularity, speed, and precision - than comparable transducer and electromagnetic-based actuation methods. DRMMR''s - nonlinear acoustic synthesis functionality also presents possibilities for new - kinds of sonic interactions on the surface of drum membranes.' - address: 'Porto Alegre, Brazil' - author: Hunter Brown and spencer topel - bibtex: "@inproceedings{Brown2019,\n abstract = {Recent developments in music technology\ - \ have enabled novel timbres to be acoustically synthesized using various actuation\ - \ and excitation methods. Utilizing recent work in nonlinear acoustic synthesis,\ - \ we propose a transducer based augmented percussion implement entitled DRMMR.\ - \ This design enables the user to sustain computer sequencer-like drum rolls at\ - \ faster speeds while also enabling the user to achieve nonlinear acoustic synthesis\ - \ effects. Our acoustic evaluation shows drum rolls executed by DRMMR easily exhibit\ - \ greater levels of regularity, speed, and precision than comparable transducer\ - \ and electromagnetic-based actuation methods. DRMMR's nonlinear acoustic synthesis\ - \ functionality also presents possibilities for new kinds of sonic interactions\ - \ on the surface of drum membranes.},\n address = {Porto Alegre, Brazil},\n author\ - \ = {Hunter Brown and spencer topel},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672888},\n\ - \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {116--121},\n publisher = {UFRGS},\n title = {{DRMMR}: An\ - \ Augmented Percussion Implement},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper023.pdf},\n\ - \ year = {2019}\n}\n" + ID: Engum2011 + abstract: 'This paper covers and also describes an ongoing research project focusing + on new artistic possibilities by exchanging music technological methods and techniques + between two distinct musical genres. Through my background as a guitarist and + composer in an experimental metal band I have experienced a vast development in + music technology during the last 20 years. This development has made a great impact + in changing the procedures for composing and producing music within my genre without + necessarily changing the strategies of how the technology is used. The transition + from analogue to digital sound technology not only opened up new ways of manipulating + and manoeuvring sound, it also opened up challenges in how to integrate and control + the digital sound technology as a seamless part of my musical genre. By using + techniques and methods known from electro-acoustic/computer music, and adapting + them for use within my tradition, this research aims to find new strategies for + composing and producing music within my genre. ' + address: 'Oslo, Norway' + author: 'Engum, Trond' + bibtex: "@inproceedings{Engum2011,\n abstract = {This paper covers and also describes\ + \ an ongoing research project focusing on new artistic possibilities by exchanging\ + \ music technological methods and techniques between two distinct musical genres.\ + \ Through my background as a guitarist and composer in an experimental metal band\ + \ I have experienced a vast development in music technology during the last 20\ + \ years. This development has made a great impact in changing the procedures for\ + \ composing and producing music within my genre without necessarily changing the\ + \ strategies of how the technology is used. The transition from analogue to digital\ + \ sound technology not only opened up new ways of manipulating and manoeuvring\ + \ sound, it also opened up challenges in how to integrate and control the digital\ + \ sound technology as a seamless part of my musical genre. By using techniques\ + \ and methods known from electro-acoustic/computer music, and adapting them for\ + \ use within my tradition, this research aims to find new strategies for composing\ + \ and producing music within my genre. },\n address = {Oslo, Norway},\n author\ + \ = {Engum, Trond},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178001},\n\ + \ issn = {2220-4806},\n keywords = {Artistic research, strategies for composition\ + \ and production, convolution, environmental sounds, real time control },\n pages\ + \ = {519--522},\n title = {Real-time Control and Creative Convolution},\n url\ + \ = {http://www.nime.org/proceedings/2011/nime2011_519.pdf},\n year = {2011}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672888 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1178001 issn: 2220-4806 - month: June - pages: 116--121 - publisher: UFRGS - title: 'DRMMR: An Augmented Percussion Implement' - url: http://www.nime.org/proceedings/2019/nime2019_paper023.pdf - year: 2019 + keywords: 'Artistic research, strategies for composition and production, convolution, + environmental sounds, real time control ' + pages: 519--522 + title: Real-time Control and Creative Convolution + url: http://www.nime.org/proceedings/2011/nime2011_519.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Lepri2019 - abstract: 'The emergence of a new technology can be considered as the result of - social, cultural and technical process. Instrument designs are particularly influenced - by cultural and aesthetic values linked to the specific contexts and communities - that produced them. In previous work, we ran a design fiction workshop in which - musicians created non-functional instrument mockups. In the current paper, we - report on an online survey in which music technologists were asked to speculate - on the background of the musicians who designed particular instruments. Our results - showed several cues for the interpretation of the artefacts'' origins, including - physical features, body-instrument interactions, use of language and references - to established music practices and tools. Tacit musical and cultural values were - also identified based on intuitive and holistic judgments. Our discussion highlights - the importance of cultural awareness and context-dependent values on the design - and use of interactive musical systems.' - address: 'Porto Alegre, Brazil' - author: Giacomo Lepri and Andrew P. McPherson - bibtex: "@inproceedings{Lepri2019,\n abstract = {The emergence of a new technology\ - \ can be considered as the result of social, cultural and technical process. Instrument\ - \ designs are particularly influenced by cultural and aesthetic values linked\ - \ to the specific contexts and communities that produced them. In previous work,\ - \ we ran a design fiction workshop in which musicians created non-functional instrument\ - \ mockups. In the current paper, we report on an online survey in which music\ - \ technologists were asked to speculate on the background of the musicians who\ - \ designed particular instruments. Our results showed several cues for the interpretation\ - \ of the artefacts' origins, including physical features, body-instrument interactions,\ - \ use of language and references to established music practices and tools. Tacit\ - \ musical and cultural values were also identified based on intuitive and holistic\ - \ judgments. Our discussion highlights the importance of cultural awareness and\ - \ context-dependent values on the design and use of interactive musical systems.},\n\ - \ address = {Porto Alegre, Brazil},\n author = {Giacomo Lepri and Andrew P. McPherson},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.3672890},\n editor = {Marcelo Queiroz\ - \ and Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {122--127},\n\ - \ publisher = {UFRGS},\n title = {Fictional instruments, real values: discovering\ - \ musical backgrounds with non-functional prototypes},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper024.pdf},\n\ - \ year = {2019}\n}\n" + ID: Bergsland2011 + address: 'Oslo, Norway' + author: 'Bergsland, Andreas' + bibtex: "@inproceedings{Bergsland2011,\n address = {Oslo, Norway},\n author = {Bergsland,\ + \ Andreas},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177959},\n issn\ + \ = {2220-4806},\n keywords = {LPC, software instrument, analysis, modeling, csound\ + \ },\n pages = {523--526},\n title = {Phrases from {P}aul {L}ansky's {S}ix {F}antasies},\n\ + \ url = {http://www.nime.org/proceedings/2011/nime2011_523.pdf},\n year = {2011}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672890 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1177959 issn: 2220-4806 - month: June - pages: 122--127 - publisher: UFRGS - title: 'Fictional instruments, real values: discovering musical backgrounds with - non-functional prototypes' - url: http://www.nime.org/proceedings/2019/nime2019_paper024.pdf - year: 2019 + keywords: 'LPC, software instrument, analysis, modeling, csound ' + pages: 523--526 + title: 'Phrases from {P}aul {L}ansky''s {S}ix {F}antasies' + url: http://www.nime.org/proceedings/2011/nime2011_523.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Dewey2019 - abstract: 'This paper presents the first stage in the design and evaluation of a - novel container metaphor interface for equalisation control. The prototype system - harnesses the Pepper''s Ghost illusion to project mid-air a holographic data visualisation - of an audio track''s long-term average and real-time frequency content as a deformable - shape manipulated directly via hand gestures. The system uses HTML 5, JavaScript - and the Web Audio API in conjunction with a Leap Motion controller and bespoke - low budget projection system. During subjective evaluation users commented that - the novel system was simpler and more intuitive to use than commercially established - equalisation interface paradigms and most suited to creative, expressive and explorative - equalisation tasks.' - address: 'Porto Alegre, Brazil' - author: Christopher Dewey and Jonathan P. Wakefield - bibtex: "@inproceedings{Dewey2019,\n abstract = {This paper presents the first stage\ - \ in the design and evaluation of a novel container metaphor interface for equalisation\ - \ control. The prototype system harnesses the Pepper's Ghost illusion to project\ - \ mid-air a holographic data visualisation of an audio track's long-term average\ - \ and real-time frequency content as a deformable shape manipulated directly via\ - \ hand gestures. The system uses HTML 5, JavaScript and the Web Audio API in conjunction\ - \ with a Leap Motion controller and bespoke low budget projection system. During\ - \ subjective evaluation users commented that the novel system was simpler and\ - \ more intuitive to use than commercially established equalisation interface paradigms\ - \ and most suited to creative, expressive and explorative equalisation tasks.},\n\ - \ address = {Porto Alegre, Brazil},\n author = {Christopher Dewey and Jonathan\ - \ P. Wakefield},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672892},\n\ - \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {128--129},\n publisher = {UFRGS},\n title = {Exploring\ - \ the Container Metaphor for Equalisation Manipulation},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper025.pdf},\n\ - \ year = {2019}\n}\n" + ID: VonFalkenstein2011 + abstract: 'Gliss is an application for iOS that lets the user sequence five separate + instruments and play them back in various ways. Sequences can be created by drawing + onto the screen while the sequencer is running. The playhead of the sequencer + can be set to randomly deviate from the drawings or can be controlled via the + accelerometer of the device. This makes Gliss a hybrid of a sequencer, an instrument + and a generative music system. ' + address: 'Oslo, Norway' + author: 'von Falkenstein, Jan T.' + bibtex: "@inproceedings{VonFalkenstein2011,\n abstract = {Gliss is an application\ + \ for iOS that lets the user sequence five separate instruments and play them\ + \ back in various ways. Sequences can be created by drawing onto the screen while\ + \ the sequencer is running. The playhead of the sequencer can be set to randomly\ + \ deviate from the drawings or can be controlled via the accelerometer of the\ + \ device. This makes Gliss a hybrid of a sequencer, an instrument and a generative\ + \ music system. },\n address = {Oslo, Norway},\n author = {von Falkenstein, Jan\ + \ T.},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1178007},\n issn = {2220-4806},\n\ + \ keywords = {Gliss, iOS, iPhone, iPad, interface, UPIC, music, sequencer, accelerometer,\ + \ drawing },\n pages = {527--528},\n title = {Gliss : An Intuitive Sequencer for\ + \ the iPhone and iPad},\n url = {http://www.nime.org/proceedings/2011/nime2011_527.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672892 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1178007 issn: 2220-4806 - month: June - pages: 128--129 - publisher: UFRGS - title: Exploring the Container Metaphor for Equalisation Manipulation - url: http://www.nime.org/proceedings/2019/nime2019_paper025.pdf - year: 2019 + keywords: 'Gliss, iOS, iPhone, iPad, interface, UPIC, music, sequencer, accelerometer, + drawing ' + pages: 527--528 + title: 'Gliss : An Intuitive Sequencer for the iPhone and iPad' + url: http://www.nime.org/proceedings/2011/nime2011_527.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Hofmann2019 - abstract: 'Physics-based sound synthesis allows to shape the sound by modifying - parameters that reference to real world properties of acoustic instruments. This - paper presents a hybrid physical modeling single reed instrument, where a virtual - tube is coupled to a real mouthpiece with a sensor-equipped clarinet reed. The - tube model is provided as an opcode for Csound which is running on the low-latency - embedded audio-platform Bela. An actuator is connected to the audio output and - the sensor-reed signal is fed back into the input of Bela. The performer can control - the coupling between reed and actuator, and is also provided with a 3D-printed - slider/knob interface to change parameters of the tube model in real-time.' - address: 'Porto Alegre, Brazil' - author: Alex Hofmann and Vasileios Chatziioannou and Sebastian Schmutzhard and Gökberk - Erdogan and Alexander Mayer - bibtex: "@inproceedings{Hofmann2019,\n abstract = {Physics-based sound synthesis\ - \ allows to shape the sound by modifying parameters that reference to real world\ - \ properties of acoustic instruments. This paper presents a hybrid physical modeling\ - \ single reed instrument, where a virtual tube is coupled to a real mouthpiece\ - \ with a sensor-equipped clarinet reed. The tube model is provided as an opcode\ - \ for Csound which is running on the low-latency embedded audio-platform Bela.\ - \ An actuator is connected to the audio output and the sensor-reed signal is fed\ - \ back into the input of Bela. The performer can control the coupling between\ - \ reed and actuator, and is also provided with a 3D-printed slider/knob interface\ - \ to change parameters of the tube model in real-time.},\n address = {Porto Alegre,\ - \ Brazil},\n author = {Alex Hofmann and Vasileios Chatziioannou and Sebastian\ - \ Schmutzhard and Gökberk Erdogan and Alexander Mayer},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.3672896},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {130--133},\n publisher = {UFRGS},\n\ - \ title = {The Half-Physler: An oscillating real-time interface to a tube resonator\ - \ model},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper026.pdf},\n\ - \ year = {2019}\n}\n" + ID: Harriman2011 + abstract: 'This paper describes a new musical instrument inspired by the pedal-steel + guitar, along with its motivations and other considerations. Creating a multi-dimensional, + expressive instrument was the primary driving force. For these criteria the pedal + steel guitar proved an apt model as it allows control over several instrument + parameters simultaneously and continuously. The parameters we wanted control over + were volume, timbre, release time and pitch.The Quadrofeelia is played with two + hands on a horizontal surface. Single notes and melodies are easily played as + well as chordal accompaniment with a variety of timbres and release times enabling + a range of legato and staccato notes in an intuitive manner with a new yet familiar + interface.' + address: 'Oslo, Norway' + author: 'Harriman, Jiffer and Casey, Locky and Melvin, Linden' + bibtex: "@inproceedings{Harriman2011,\n abstract = {This paper describes a new musical\ + \ instrument inspired by the pedal-steel guitar, along with its motivations and\ + \ other considerations. Creating a multi-dimensional, expressive instrument was\ + \ the primary driving force. For these criteria the pedal steel guitar proved\ + \ an apt model as it allows control over several instrument parameters simultaneously\ + \ and continuously. The parameters we wanted control over were volume, timbre,\ + \ release time and pitch.The Quadrofeelia is played with two hands on a horizontal\ + \ surface. Single notes and melodies are easily played as well as chordal accompaniment\ + \ with a variety of timbres and release times enabling a range of legato and staccato\ + \ notes in an intuitive manner with a new yet familiar interface.},\n address\ + \ = {Oslo, Norway},\n author = {Harriman, Jiffer and Casey, Locky and Melvin,\ + \ Linden},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1178041},\n issn = {2220-4806},\n\ + \ keywords = {NIME, pedal-steel, electronic, slide, demonstration, membrane, continuous,\ + \ ribbon, instrument, polyphony, lead },\n pages = {529--530},\n title = {Quadrofeelia\ + \ -- A New Instrument for Sliding into Notes},\n url = {http://www.nime.org/proceedings/2011/nime2011_529.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672896 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1178041 issn: 2220-4806 - month: June - pages: 130--133 - publisher: UFRGS - title: 'The Half-Physler: An oscillating real-time interface to a tube resonator - model' - url: http://www.nime.org/proceedings/2019/nime2019_paper026.pdf - year: 2019 + keywords: 'NIME, pedal-steel, electronic, slide, demonstration, membrane, continuous, + ribbon, instrument, polyphony, lead ' + pages: 529--530 + title: Quadrofeelia -- A New Instrument for Sliding into Notes + url: http://www.nime.org/proceedings/2011/nime2011_529.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Bussigel2019 - abstract: 'There is rich history of using found or “readymade” objects in music - performances and sound installations. John Cage''s Water Walk, Carolee Schneeman''s - Noise Bodies, and David Tudor''s Rainforest all lean on both the sonic and cultural - affordances of found objects. Today, composers and sound artists continue to look - at the everyday, combining readymades with microcontrollers and homemade electronics - and repurposing known interfaces for their latent sonic potential. This paper - gives a historical overview of work at the intersection of music and the readymade - and then describes three recent sound installations/performances by the authors - that further explore this space. The emphasis is on processes involved in working - with found objects--the complex, practical, and playful explorations into sound - and material culture.' - address: 'Porto Alegre, Brazil' - author: Peter Bussigel and Stephan Moore and Scott Smallwood - bibtex: "@inproceedings{Bussigel2019,\n abstract = {There is rich history of using\ - \ found or “readymade” objects in music performances and sound installations.\ - \ John Cage's Water Walk, Carolee Schneeman's Noise Bodies, and David Tudor's\ - \ Rainforest all lean on both the sonic and cultural affordances of found objects.\ - \ Today, composers and sound artists continue to look at the everyday, combining\ - \ readymades with microcontrollers and homemade electronics and repurposing known\ - \ interfaces for their latent sonic potential. This paper gives a historical overview\ - \ of work at the intersection of music and the readymade and then describes three\ - \ recent sound installations/performances by the authors that further explore\ - \ this space. The emphasis is on processes involved in working with found objects--the\ - \ complex, practical, and playful explorations into sound and material culture.},\n\ - \ address = {Porto Alegre, Brazil},\n author = {Peter Bussigel and Stephan Moore\ - \ and Scott Smallwood},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672898},\n\ - \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {134--139},\n publisher = {UFRGS},\n title = {Reanimating\ - \ the Readymade},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper027.pdf},\n\ - \ year = {2019}\n}\n" + ID: Wang2011a + address: 'Oslo, Norway' + author: 'Wang, Johnty and d''Alessandro, Nicolas and Fels, Sidney S. and Pritchard, + Bob' + bibtex: "@inproceedings{Wang2011a,\n address = {Oslo, Norway},\n author = {Wang,\ + \ Johnty and d'Alessandro, Nicolas and Fels, Sidney S. and Pritchard, Bob},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178189},\n issn = {2220-4806},\n\ + \ pages = {531--532},\n title = {SQUEEZY : Extending a Multi-touch Screen with\ + \ Force Sensing Objects for Controlling Articulatory Synthesis},\n url = {http://www.nime.org/proceedings/2011/nime2011_531.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672898 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1178189 issn: 2220-4806 - month: June - pages: 134--139 - publisher: UFRGS - title: Reanimating the Readymade - url: http://www.nime.org/proceedings/2019/nime2019_paper027.pdf - year: 2019 + pages: 531--532 + title: 'SQUEEZY : Extending a Multi-touch Screen with Force Sensing Objects for + Controlling Articulatory Synthesis' + url: http://www.nime.org/proceedings/2011/nime2011_531.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Zhang2019 - abstract: 'Haptic interfaces have untapped the sense of touch to assist multimodal - music learning. We have recently seen various improvements of interface design - on tactile feedback and force guidance aiming to make instrument learning more - effective. However, most interfaces are still quite static; they cannot yet sense - the learning progress and adjust the tutoring strategy accordingly. To solve this - problem, we contribute an adaptive haptic interface based on the latest design - of haptic flute. We first adopted a clutch mechanism to enable the interface to - turn on and off the haptic control flexibly in real time. The interactive tutor - is then able to follow human performances and apply the “teacher force” only when - the software instructs so. Finally, we incorporated the adaptive interface with - a step-by-step dynamic learning strategy. Experimental results showed that dynamic - learning dramatically outperforms static learning, which boosts the learning rate - by 45.3% and shrinks the forgetting chance by 86%.' - address: 'Porto Alegre, Brazil' - author: Yian Zhang and Yinmiao Li and Daniel Chin and Gus Xia - bibtex: "@inproceedings{Zhang2019,\n abstract = {Haptic interfaces have untapped\ - \ the sense of touch to assist multimodal music learning. We have recently seen\ - \ various improvements of interface design on tactile feedback and force guidance\ - \ aiming to make instrument learning more effective. However, most interfaces\ - \ are still quite static; they cannot yet sense the learning progress and adjust\ - \ the tutoring strategy accordingly. To solve this problem, we contribute an adaptive\ - \ haptic interface based on the latest design of haptic flute. We first adopted\ - \ a clutch mechanism to enable the interface to turn on and off the haptic control\ - \ flexibly in real time. The interactive tutor is then able to follow human performances\ - \ and apply the “teacher force” only when the software instructs so. Finally,\ - \ we incorporated the adaptive interface with a step-by-step dynamic learning\ - \ strategy. Experimental results showed that dynamic learning dramatically outperforms\ - \ static learning, which boosts the learning rate by 45.3% and shrinks the forgetting\ - \ chance by 86%.},\n address = {Porto Alegre, Brazil},\n author = {Yian Zhang\ - \ and Yinmiao Li and Daniel Chin and Gus Xia},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.3672900},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {140--145},\n publisher = {UFRGS},\n\ - \ title = {Adaptive Multimodal Music Learning via Interactive Haptic Instrument},\n\ - \ url = {http://www.nime.org/proceedings/2019/nime2019_paper028.pdf},\n year =\ - \ {2019}\n}\n" + ID: Choe2011 + abstract: 'In this paper, we suggest a conceptual model of a Web application framework + for the composition and documentation of soundscape and introduce corresponding + prototype projects, SeoulSoundMap and SoundScape Composer. We also survey the + current Web-based sound projects in terms of soundscape documentation. ' + address: 'Oslo, Norway' + author: 'Choe, Souhwan and Lee, Kyogu' + bibtex: "@inproceedings{Choe2011,\n abstract = {In this paper, we suggest a conceptual\ + \ model of a Web application framework for the composition and documentation of\ + \ soundscape and introduce corresponding prototype projects, SeoulSoundMap and\ + \ SoundScape Composer. We also survey the current Web-based sound projects in\ + \ terms of soundscape documentation. },\n address = {Oslo, Norway},\n author =\ + \ {Choe, Souhwan and Lee, Kyogu},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177985},\n\ + \ issn = {2220-4806},\n keywords = {soundscape, web application framework, sound\ + \ archive, sound map, soundscape composition, soundscape documentation. },\n pages\ + \ = {533--534},\n title = {{SW}AF: Towards a Web Application Framework for Composition\ + \ and Documentation of Soundscape},\n url = {http://www.nime.org/proceedings/2011/nime2011_533.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672900 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1177985 issn: 2220-4806 - month: June - pages: 140--145 - publisher: UFRGS - title: Adaptive Multimodal Music Learning via Interactive Haptic Instrument - url: http://www.nime.org/proceedings/2019/nime2019_paper028.pdf - year: 2019 + keywords: 'soundscape, web application framework, sound archive, sound map, soundscape + composition, soundscape documentation. ' + pages: 533--534 + title: 'SWAF: Towards a Web Application Framework for Composition and Documentation + of Soundscape' + url: http://www.nime.org/proceedings/2011/nime2011_533.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Sguiglia2019 - abstract: 'We present El mapa no es el territorio (MNT), a set of open source tools - that facilitate the design of visual and musical mappings for interactive installations - and performance pieces. MNT is being developed by a multidisciplinary group that - explores gestural control of audio-visual environments and virtual instruments. - Along with these tools, this paper will present two projects in which they were - used -interactive installation Memorias Migrantes and stage performance Recorte - de Jorge Cárdenas Cayendo-, showing how MNT allows us to develop collaborative - artworks that articulate body movement and generative audiovisual systems, and - how its current version was influenced by these successive implementations.' - address: 'Porto Alegre, Brazil' - author: Fabián Sguiglia and Pauli Coton and Fernando Toth - bibtex: "@inproceedings{Sguiglia2019,\n abstract = {We present El mapa no es el\ - \ territorio (MNT), a set of open source tools that facilitate the design of visual\ - \ and musical mappings for interactive installations and performance pieces. MNT\ - \ is being developed by a multidisciplinary group that explores gestural control\ - \ of audio-visual environments and virtual instruments. Along with these tools,\ - \ this paper will present two projects in which they were used -interactive installation\ - \ Memorias Migrantes and stage performance Recorte de Jorge Cárdenas Cayendo-,\ - \ showing how MNT allows us to develop collaborative artworks that articulate\ - \ body movement and generative audiovisual systems, and how its current version\ - \ was influenced by these successive implementations.},\n address = {Porto Alegre,\ - \ Brazil},\n author = {Fabián Sguiglia and Pauli Coton and Fernando Toth},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.3672902},\n editor = {Marcelo Queiroz and\ - \ Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {146--149},\n\ - \ publisher = {UFRGS},\n title = {El mapa no es el territorio: Sensor mapping\ - \ for audiovisual performances},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper029.pdf},\n\ - \ year = {2019}\n}\n" + ID: Schnell2011 + abstract: 'We are presenting a set of applications that have been realized with + the MO modular wireless motion capture deviceand a set of software components + integrated into Max/MSP.These applications, created in the context of artistic + projects,music pedagogy, and research, allow for the gestural reembodiment of + recorded sound and music. They demonstrate a large variety of different "playing + techniques" inmusical performance using wireless motion sensor modulesin conjunction + with gesture analysis and real-time audioprocessing components.' + address: 'Oslo, Norway' + author: 'Schnell, Norbert and Bevilacqua, Frédéric and Rasamimanana, Nicolas and + Blois, Julien and Guédy, Fabrice and Fléty, Emmanuel' + bibtex: "@inproceedings{Schnell2011,\n abstract = {We are presenting a set of applications\ + \ that have been realized with the MO modular wireless motion capture deviceand\ + \ a set of software components integrated into Max/MSP.These applications, created\ + \ in the context of artistic projects,music pedagogy, and research, allow for\ + \ the gestural reembodiment of recorded sound and music. They demonstrate a large\ + \ variety of different \"playing techniques\" inmusical performance using wireless\ + \ motion sensor modulesin conjunction with gesture analysis and real-time audioprocessing\ + \ components.},\n address = {Oslo, Norway},\n author = {Schnell, Norbert and Bevilacqua,\ + \ Fr\\'{e}d\\'{e}ric and Rasamimanana, Nicolas and Blois, Julien and Gu\\'{e}dy,\ + \ Fabrice and Fl\\'{e}ty, Emmanuel},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178153},\n\ + \ issn = {2220-4806},\n keywords = {Music, Gesture, Interface, Wireless Sensors,\ + \ Gesture Recognition, Audio Processing, Design, Interaction },\n pages = {535--536},\n\ + \ title = {Playing the \"MO\" -- Gestural Control and Re-Embodiment of Recorded\ + \ Sound and Music},\n url = {http://www.nime.org/proceedings/2011/nime2011_535.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672902 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1178153 issn: 2220-4806 - month: June - pages: 146--149 - publisher: UFRGS - title: 'El mapa no es el territorio: Sensor mapping for audiovisual performances' - url: http://www.nime.org/proceedings/2019/nime2019_paper029.pdf - year: 2019 + keywords: 'Music, Gesture, Interface, Wireless Sensors, Gesture Recognition, Audio + Processing, Design, Interaction ' + pages: 535--536 + title: Playing the "MO" -- Gestural Control and Re-Embodiment of Recorded Sound + and Music + url: http://www.nime.org/proceedings/2011/nime2011_535.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Yaremchuk2019 - abstract: 'The Rulers is a Digital Musical Instrument with 7 metal beams, each of - which is fixed at one end. It uses infrared sensors, Hall sensors, and strain - gauges to estimate deflection. These sensors each perform better or worse depending - on the class of gesture the user is making, motivating sensor fusion practices. - Residuals between Kalman filters and sensor output are calculated and used as - input to a recurrent neural network which outputs a classification that determines - which processing parameters and sensor measurements are employed. Multiple instances - (30) of layer recurrent neural networks with a single hidden layer varying in - size from 1 to 10 processing units were trained, and tested on previously unseen - data. The best performing neural network has only 3 hidden units and has a sufficiently - low error rate to be good candidate for gesture classification. This paper demonstrates - that: dynamic networks out-perform feedforward networks for this type of gesture - classification, a small network can handle a problem of this level of complexity, - recurrent networks of this size are fast enough for real-time applications of - this type, and the importance of training multiple instances of each network architecture - and selecting the best performing one from within that set.' - address: 'Porto Alegre, Brazil' - author: Vanessa Yaremchuk and Carolina Brum Medeiros and Marcelo Wanderley - bibtex: "@inproceedings{Yaremchuk2019,\n abstract = {The Rulers is a Digital Musical\ - \ Instrument with 7 metal beams, each of which is fixed at one end. It uses infrared\ - \ sensors, Hall sensors, and strain gauges to estimate deflection. These sensors\ - \ each perform better or worse depending on the class of gesture the user is making,\ - \ motivating sensor fusion practices. Residuals between Kalman filters and sensor\ - \ output are calculated and used as input to a recurrent neural network which\ - \ outputs a classification that determines which processing parameters and sensor\ - \ measurements are employed. Multiple instances (30) of layer recurrent neural\ - \ networks with a single hidden layer varying in size from 1 to 10 processing\ - \ units were trained, and tested on previously unseen data. The best performing\ - \ neural network has only 3 hidden units and has a sufficiently low error rate\ - \ to be good candidate for gesture classification. This paper demonstrates that:\ - \ dynamic networks out-perform feedforward networks for this type of gesture classification,\ - \ a small network can handle a problem of this level of complexity, recurrent\ - \ networks of this size are fast enough for real-time applications of this type,\ - \ and the importance of training multiple instances of each network architecture\ - \ and selecting the best performing one from within that set.},\n address = {Porto\ - \ Alegre, Brazil},\n author = {Vanessa Yaremchuk and Carolina Brum Medeiros and\ - \ Marcelo Wanderley},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672904},\n\ - \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {150--155},\n publisher = {UFRGS},\n title = {Small Dynamic\ - \ Neural Networks for Gesture Classification with The Rulers (a Digital Musical\ - \ Instrument)},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper030.pdf},\n\ - \ year = {2019}\n}\n" + ID: Zamborlin2011 + abstract: '(land)moves is an interactive installation: the user''s gestures control + the multimedia processing with a total synergybetween audio and video synthesis + and treatment.' + address: 'Oslo, Norway' + author: 'Zamborlin, Bruno and Partesana, Giorgio and Liuni, Marco' + bibtex: "@inproceedings{Zamborlin2011,\n abstract = {(land)moves is an interactive\ + \ installation: the user's gestures control the multimedia processing with a total\ + \ synergybetween audio and video synthesis and treatment.},\n address = {Oslo,\ + \ Norway},\n author = {Zamborlin, Bruno and Partesana, Giorgio and Liuni, Marco},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178195},\n issn = {2220-4806},\n\ + \ keywords = {mapping gesture-audio-video, gesture recognition, landscape, soundscape\ + \ },\n pages = {537--538},\n title = {({LAN}D)MOVES},\n url = {http://www.nime.org/proceedings/2011/nime2011_537.pdf},\n\ + \ year = {2011}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672904 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1178195 issn: 2220-4806 - month: June - pages: 150--155 - publisher: UFRGS - title: Small Dynamic Neural Networks for Gesture Classification with The Rulers - (a Digital Musical Instrument) - url: http://www.nime.org/proceedings/2019/nime2019_paper030.pdf - year: 2019 + keywords: 'mapping gesture-audio-video, gesture recognition, landscape, soundscape ' + pages: 537--538 + title: (LAND)MOVES + url: http://www.nime.org/proceedings/2011/nime2011_537.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Dahlstedtb2019 - abstract: 'We present a work where a space of realtime synthesized sounds is explored - through ear (Oto) and movement (Kinesis) by one or two dancers. Movement is tracked - and mapped through extensive pre-processing to a high-dimensional acoustic space, - using a many-to-many mapping, so that every small body movement matters. Designed - for improvised exploration, it works as both performance and installation. Through - this re-translation of bodily action, position, and posture into infinite-dimensional - sound texture and timbre, the performers are invited to re-think and re-learn - position and posture as sound, effort as gesture, and timbre as a bodily construction. - The sound space can be shared by two people, with added modes of presence, proximity - and interaction. The aesthetic background and technical implementation of the - system are described, and the system is evaluated based on a number of performances, - workshops and installation exhibits. Finally, the aesthetic and choreographic - motivations behind the performance narrative are explained, and discussed in the - light of the design of the sonification.' - address: 'Porto Alegre, Brazil' - author: Palle Dahlstedt and Ami Skånberg Dahlstedt - bibtex: "@inproceedings{Dahlstedtb2019,\n abstract = {We present a work where a\ - \ space of realtime synthesized sounds is explored through ear (Oto) and movement\ - \ (Kinesis) by one or two dancers. Movement is tracked and mapped through extensive\ - \ pre-processing to a high-dimensional acoustic space, using a many-to-many mapping,\ - \ so that every small body movement matters. Designed for improvised exploration,\ - \ it works as both performance and installation. Through this re-translation of\ - \ bodily action, position, and posture into infinite-dimensional sound texture\ - \ and timbre, the performers are invited to re-think and re-learn position and\ - \ posture as sound, effort as gesture, and timbre as a bodily construction. The\ - \ sound space can be shared by two people, with added modes of presence, proximity\ - \ and interaction. The aesthetic background and technical implementation of the\ - \ system are described, and the system is evaluated based on a number of performances,\ - \ workshops and installation exhibits. Finally, the aesthetic and choreographic\ - \ motivations behind the performance narrative are explained, and discussed in\ - \ the light of the design of the sonification.},\n address = {Porto Alegre, Brazil},\n\ - \ author = {Palle Dahlstedt and Ami Skånberg Dahlstedt},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.3672906},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {156--161},\n publisher = {UFRGS},\n\ - \ title = {OtoKin: Mapping for Sound Space Exploration through Dance Improvisation},\n\ - \ url = {http://www.nime.org/proceedings/2019/nime2019_paper031.pdf},\n year =\ - \ {2019}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.3672906 - editor: Marcelo Queiroz and Anna Xambó Sedó - issn: 2220-4806 - month: June - pages: 156--161 - publisher: UFRGS - title: 'OtoKin: Mapping for Sound Space Exploration through Dance Improvisation' - url: http://www.nime.org/proceedings/2019/nime2019_paper031.pdf - year: 2019 + ID: Verplank2011 + abstract: 'Haptic interfaces using active force-feedback have mostly been used for + emulating existing instruments and making conventional music. With the right speed, + force, precision and software they can also be used to make new sounds and perhaps + new music. The requirements are local microprocessors (for low-latency and high + update rates), strategic sensors (for force as well as position), and non-linear + dynamics (that make for rich overtones and chaotic music).' + address: 'Oslo, Norway' + author: 'Verplank, Bill and Georg, Francesco' + bibtex: "@inproceedings{Verplank2011,\n abstract = {Haptic interfaces using active\ + \ force-feedback have mostly been used for emulating existing instruments and\ + \ making conventional music. With the right speed, force, precision and software\ + \ they can also be used to make new sounds and perhaps new music. The requirements\ + \ are local microprocessors (for low-latency and high update rates), strategic\ + \ sensors (for force as well as position), and non-linear dynamics (that make\ + \ for rich overtones and chaotic music).},\n address = {Oslo, Norway},\n author\ + \ = {Verplank, Bill and Georg, Francesco},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178183},\n\ + \ issn = {2220-4806},\n keywords = {NIME, Haptics, Music Controllers, Microprocessors.\ + \ },\n pages = {539--540},\n title = {Can Haptics Make New Music ? -- Fader and\ + \ Plank Demos},\n url = {http://www.nime.org/proceedings/2011/nime2011_539.pdf},\n\ + \ year = {2011}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1178183 + issn: 2220-4806 + keywords: 'NIME, Haptics, Music Controllers, Microprocessors. ' + pages: 539--540 + title: 'Can Haptics Make New Music ? -- Fader and Plank Demos' + url: http://www.nime.org/proceedings/2011/nime2011_539.pdf + year: 2011 - ENTRYTYPE: inproceedings - ID: Wright2019 - abstract: 'Taking inspiration from research into deliberately constrained musical - technologies and the emergence of neurodiverse, child-led musical groups such - as the Artism Ensemble, the interplay between design-constraints, inclusivity - and appro- priation is explored. A small scale review covers systems from two - prominent UK-based companies, and two itera- tions of a new prototype system that - were developed in collaboration with a small group of young people on the autistic - spectrum. Amongst these technologies, the aspects of musical experience that are - made accessible differ with re- spect to the extent and nature of each system''s - constraints. It is argued that the design-constraints of the new prototype system - facilitated the diverse playing styles and techniques observed during its development. - Based on these obser- vations, we propose that deliberately constrained musical - instruments may be one way of providing more opportuni- ties for the emergence - of personal practices and preferences in neurodiverse groups of children and young - people, and that this is a fitting subject for further research.' - address: 'Porto Alegre, Brazil' - author: Joe Wright and James Dooley - bibtex: "@inproceedings{Wright2019,\n abstract = {Taking inspiration from research\ - \ into deliberately constrained musical technologies and the emergence of neurodiverse,\ - \ child-led musical groups such as the Artism Ensemble, the interplay between\ - \ design-constraints, inclusivity and appro- priation is explored. A small scale\ - \ review covers systems from two prominent UK-based companies, and two itera-\ - \ tions of a new prototype system that were developed in collaboration with a\ - \ small group of young people on the autistic spectrum. Amongst these technologies,\ - \ the aspects of musical experience that are made accessible differ with re- spect\ - \ to the extent and nature of each system's constraints. It is argued that the\ - \ design-constraints of the new prototype system facilitated the diverse playing\ - \ styles and techniques observed during its development. Based on these obser-\ - \ vations, we propose that deliberately constrained musical instruments may be\ - \ one way of providing more opportuni- ties for the emergence of personal practices\ - \ and preferences in neurodiverse groups of children and young people, and that\ - \ this is a fitting subject for further research.},\n address = {Porto Alegre,\ - \ Brazil},\n author = {Joe Wright and James Dooley},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.3672908},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {162--167},\n publisher = {UFRGS},\n\ - \ title = {On the Inclusivity of Constraint: Creative Appropriation in Instruments\ - \ for Neurodiverse Children and Young People},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper032.pdf},\n\ - \ year = {2019}\n}\n" + ID: Cook2001 + abstract: 'This paper will present observations on the design, artistic, and human + factors of creating digital music controllers. Specific projects will be presented, + and a set of design principles will be supported from those examples. ' + address: 'Seattle, WA' + author: 'Cook, Perry R.' + bibtex: "@inproceedings{Cook2001,\n abstract = {This paper will present observations\ + \ on the design, artistic, and human factors of creating digital music controllers.\ + \ Specific projects will be presented, and a set of design principles will be\ + \ supported from those examples. },\n address = {Seattle, WA},\n author = {Cook,\ + \ Perry R.},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n date = {1-2 April, 2001},\n doi = {10.5281/zenodo.1176358},\n\ + \ issn = {2220-4806},\n keywords = {Musical control, artistic interfaces.},\n\ + \ pages = {3--6},\n title = {Principles for Designing Computer Music Controllers},\n\ + \ url = {http://www.nime.org/proceedings/2001/nime2001_003.pdf},\n year = {2001}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672908 - editor: Marcelo Queiroz and Anna Xambó Sedó + date: '1-2 April, 2001' + doi: 10.5281/zenodo.1176358 issn: 2220-4806 - month: June - pages: 162--167 - publisher: UFRGS - title: 'On the Inclusivity of Constraint: Creative Appropriation in Instruments - for Neurodiverse Children and Young People' - url: http://www.nime.org/proceedings/2019/nime2019_paper032.pdf - year: 2019 + keywords: 'Musical control, artistic interfaces.' + pages: 3--6 + title: Principles for Designing Computer Music Controllers + url: http://www.nime.org/proceedings/2001/nime2001_003.pdf + year: 2001 - ENTRYTYPE: inproceedings - ID: Almeida2019 - abstract: 'We present AMIGO, a real-time computer music system that assists novice - users in the composition process through guided musical improvisation. The system - consists of 1) a computational analysis-generation algorithm, which not only formalizes - musical principles from examples, but also guides the user in selecting note sequences; - 2) a MIDI keyboard controller with an integrated LED stripe, which provides visual - feedback to the user; and 3) a real-time music notation, which displays the generated - output. Ultimately, AMIGO allows the intuitive creation of new musical structures - and the acquisition of Western music formalisms, such as musical notation.' - address: 'Porto Alegre, Brazil' - author: Isabela Corintha Almeida and Giordano Cabral and Professor Gilberto Bernardes - Almeida - bibtex: "@inproceedings{Almeida2019,\n abstract = {We present AMIGO, a real-time\ - \ computer music system that assists novice users in the composition process through\ - \ guided musical improvisation. The system consists of 1) a computational analysis-generation\ - \ algorithm, which not only formalizes musical principles from examples, but also\ - \ guides the user in selecting note sequences; 2) a MIDI keyboard controller with\ - \ an integrated LED stripe, which provides visual feedback to the user; and 3)\ - \ a real-time music notation, which displays the generated output. Ultimately,\ - \ AMIGO allows the intuitive creation of new musical structures and the acquisition\ - \ of Western music formalisms, such as musical notation.},\n address = {Porto\ - \ Alegre, Brazil},\n author = {Isabela Corintha Almeida and Giordano Cabral and\ - \ Professor Gilberto Bernardes Almeida},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672910},\n\ - \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {168--169},\n publisher = {UFRGS},\n title = {{AMIGO}: An\ - \ Assistive Musical Instrument to Engage, Create and Learn Music},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper033.pdf},\n\ - \ year = {2019}\n}\n" + ID: Verplank2001 + abstract: 'Over the last four years, we have developed a series of lectures, labs + and project assignments aimed at introducing enough technology so that students + from a mix of disciplines can design and build innovative interface devices.' + address: 'Seattle, WA' + author: 'Verplank, Bill and Sapp, Craig and Mathews, Max' + bibtex: "@inproceedings{Verplank2001,\n abstract = {Over the last four years, we\ + \ have developed a series of lectures, labs and project assignments aimed at introducing\ + \ enough technology so that students from a mix of disciplines can design and\ + \ build innovative interface devices.},\n address = {Seattle, WA},\n author =\ + \ {Verplank, Bill and Sapp, Craig and Mathews, Max},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ date = {1-2 April, 2001},\n doi = {10.5281/zenodo.1176380},\n issn = {2220-4806},\n\ + \ keywords = {Input devices, music controllers, CHI technology, courses.},\n pages\ + \ = {7--10},\n title = {A Course on Controllers},\n url = {http://www.nime.org/proceedings/2001/nime2001_007.pdf},\n\ + \ year = {2001}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672910 - editor: Marcelo Queiroz and Anna Xambó Sedó + date: '1-2 April, 2001' + doi: 10.5281/zenodo.1176380 issn: 2220-4806 - month: June - pages: 168--169 - publisher: UFRGS - title: 'AMIGO: An Assistive Musical Instrument to Engage, Create and Learn Music' - url: http://www.nime.org/proceedings/2019/nime2019_paper033.pdf - year: 2019 + keywords: 'Input devices, music controllers, CHI technology, courses.' + pages: 7--10 + title: A Course on Controllers + url: http://www.nime.org/proceedings/2001/nime2001_007.pdf + year: 2001 - ENTRYTYPE: inproceedings - ID: Figueiró2019 - abstract: 'ESMERIL is an application developed for Android with a toolchain based - on Puredata and OpenFrameworks (with Ofelia library). The application enables - music creation in a specific expanded format: four separate mono tracks, each - one able to manipulate up to eight audio samples per channel. It works also as - a performance instrument that stimulates collaborative remixings from compositions - of scored interaction gestures called “scenes”. The interface also aims to be - a platform to exchange those sample packs as artistic releases, a format similar - to the popular idea of an “album”, but prepared to those four channel packs of - samples and scores of interaction. It uses an adaptive audio slicing mechanism - and it is based on interaction design for multi-touch screen features. A timing - sequencer enhances the interaction between pre-set sequences (the “scenes”) and - screen manipulation scratching, expanding and moving graphic sound waves. This - paper describes the graphical interface features, some development decisions up - to now and perspectives to its continuity.' - address: 'Porto Alegre, Brazil' - author: Cristiano Figueiró and Guilherme Soares and Bruno Rohde - bibtex: "@inproceedings{Figueiró2019,\n abstract = {ESMERIL is an application developed\ - \ for Android with a toolchain based on Puredata and OpenFrameworks (with Ofelia\ - \ library). The application enables music creation in a specific expanded format:\ - \ four separate mono tracks, each one able to manipulate up to eight audio samples\ - \ per channel. It works also as a performance instrument that stimulates collaborative\ - \ remixings from compositions of scored interaction gestures called “scenes”.\ - \ The interface also aims to be a platform to exchange those sample packs as artistic\ - \ releases, a format similar to the popular idea of an “album”, but prepared to\ - \ those four channel packs of samples and scores of interaction. It uses an adaptive\ - \ audio slicing mechanism and it is based on interaction design for multi-touch\ - \ screen features. A timing sequencer enhances the interaction between pre-set\ - \ sequences (the “scenes”) and screen manipulation scratching, expanding and moving\ - \ graphic sound waves. This paper describes the graphical interface features,\ - \ some development decisions up to now and perspectives to its continuity.},\n\ - \ address = {Porto Alegre, Brazil},\n author = {Cristiano Figueiró and Guilherme\ - \ Soares and Bruno Rohde},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672912},\n\ - \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {170--173},\n publisher = {UFRGS},\n title = {{ESMERIL}\ - \ --- An interactive audio player and composition system for collaborative experimental\ - \ music netlabels},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper034.pdf},\n\ - \ year = {2019}\n}\n" + ID: Wessel2001 + abstract: 'In this paper we describe our efforts towards the development of live + performance computer-based musical instrumentation. Our design criteria include + initial ease of use coupled with a long term potential for virtuosity,minimal + and low variance latency, and clear and simple strategies for programming the + relationship between gesture and musical result. We present custom controllers + and unique adaptations of standard gestural interfaces, a programmable connectivity + processor, a communications protocol called Open Sound Control(OSC), and a variety + of metaphors for musical control. We further describe applications of our technology + to a variety of real musical performances and directions for future research.' + address: 'Seattle, WA' + author: 'Wessel, David and Wright, Matthew' + bibtex: "@inproceedings{Wessel2001,\n abstract = {In this paper we describe our\ + \ efforts towards the development of live performance computer-based musical instrumentation.\ + \ Our design criteria include initial ease of use coupled with a long term potential\ + \ for virtuosity,minimal and low variance latency, and clear and simple strategies\ + \ for programming the relationship between gesture and musical result. We present\ + \ custom controllers and unique adaptations of standard gestural interfaces, a\ + \ programmable connectivity processor, a communications protocol called Open Sound\ + \ Control(OSC), and a variety of metaphors for musical control. We further describe\ + \ applications of our technology to a variety of real musical performances and\ + \ directions for future research.},\n address = {Seattle, WA},\n author = {Wessel,\ + \ David and Wright, Matthew},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n date = {1-2 April, 2001},\n\ + \ doi = {10.5281/zenodo.1176382},\n issn = {2220-4806},\n keywords = {communications\ + \ protocols,gestural controllers,latency,musical,reactive computing,signal processing},\n\ + \ pages = {11--14},\n title = {Problems and Prospects for Intimate Musical Control\ + \ of Computers},\n url = {http://www.nime.org/proceedings/2001/nime2001_011.pdf},\n\ + \ year = {2001}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672912 - editor: Marcelo Queiroz and Anna Xambó Sedó + date: '1-2 April, 2001' + doi: 10.5281/zenodo.1176382 issn: 2220-4806 - month: June - pages: 170--173 - publisher: UFRGS - title: ESMERIL --- An interactive audio player and composition system for collaborative - experimental music netlabels - url: http://www.nime.org/proceedings/2019/nime2019_paper034.pdf - year: 2019 + keywords: 'communications protocols,gestural controllers,latency,musical,reactive + computing,signal processing' + pages: 11--14 + title: Problems and Prospects for Intimate Musical Control of Computers + url: http://www.nime.org/proceedings/2001/nime2001_011.pdf + year: 2001 - ENTRYTYPE: inproceedings - ID: Weber2019 - abstract: 'We introduce a machine learning technique to autonomously generate novel - melodies that are variations of an arbitrary base melody. These are produced by - a neural network that ensures that (with high probability) the melodic and rhythmic - structure of the new melody is consistent with a given set of sample songs. We - train a Variational Autoencoder network to identify a low-dimensional set of variables - that allows for the compression and representation of sample songs. By perturbing - these variables with Perlin Noise---a temporally-consistent parameterized noise - function---it is possible to generate smoothly-changing novel melodies. We show - that (1) by regulating the amount of noise, one can specify how much of the base - song will be preserved; and (2) there is a direct correlation between the noise - signal and the differences between the statistical properties of novel melodies - and the original one. Users can interpret the controllable noise as a type of - "creativity knob": the higher it is, the more leeway the network has to generate - significantly different melodies. We present a physical prototype that allows - musicians to use a keyboard to provide base melodies and to adjust the network''s - "creativity knobs" to regulate in real-time the process that proposes new melody - ideas.' - address: 'Porto Alegre, Brazil' - author: Aline Weber and Lucas Nunes Alegre and Jim Torresen and Bruno C. da Silva - bibtex: "@inproceedings{Weber2019,\n abstract = {We introduce a machine learning\ - \ technique to autonomously generate novel melodies that are variations of an\ - \ arbitrary base melody. These are produced by a neural network that ensures that\ - \ (with high probability) the melodic and rhythmic structure of the new melody\ - \ is consistent with a given set of sample songs. We train a Variational Autoencoder\ - \ network to identify a low-dimensional set of variables that allows for the compression\ - \ and representation of sample songs. By perturbing these variables with Perlin\ - \ Noise---a temporally-consistent parameterized noise function---it is possible\ - \ to generate smoothly-changing novel melodies. We show that (1) by regulating\ - \ the amount of noise, one can specify how much of the base song will be preserved;\ - \ and (2) there is a direct correlation between the noise signal and the differences\ - \ between the statistical properties of novel melodies and the original one. Users\ - \ can interpret the controllable noise as a type of \"creativity knob\": the higher\ - \ it is, the more leeway the network has to generate significantly different melodies.\ - \ We present a physical prototype that allows musicians to use a keyboard to provide\ - \ base melodies and to adjust the network's \"creativity knobs\" to regulate in\ - \ real-time the process that proposes new melody ideas.},\n address = {Porto Alegre,\ - \ Brazil},\n author = {Aline Weber and Lucas Nunes Alegre and Jim Torresen and\ - \ Bruno C. da Silva},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672914},\n\ - \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {174--179},\n publisher = {UFRGS},\n title = {Parameterized\ - \ Melody Generation with Autoencoders and Temporally-Consistent Noise},\n url\ - \ = {http://www.nime.org/proceedings/2019/nime2019_paper035.pdf},\n year = {2019}\n\ - }\n" + ID: Orio2001 + abstract: 'This paper reviews the existing literature on input device evaluation + and design in human-computer interaction (HCI)and discusses possible applications + of this knowledge to the design and evaluation of new interfaces for musical expression. + Specifically, a set of musical tasks is suggested to allow the evaluation of different + existing controllers. ' + address: 'Seattle, WA' + author: 'Orio, Nicola and Schnell, Norbert and Wanderley, Marcelo M.' + bibtex: "@inproceedings{Orio2001,\n abstract = {This paper reviews the existing\ + \ literature on input device evaluation and design in human-computer interaction\ + \ (HCI)and discusses possible applications of this knowledge to the design and\ + \ evaluation of new interfaces for musical expression. Specifically, a set of\ + \ musical tasks is suggested to allow the evaluation of different existing controllers.\ + \ },\n address = {Seattle, WA},\n author = {Orio, Nicola and Schnell, Norbert\ + \ and Wanderley, Marcelo M.},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n date = {1-2 April, 2001},\n\ + \ doi = {10.5281/zenodo.1176370},\n issn = {2220-4806},\n keywords = {Input device\ + \ design, gestural control, interactive systems},\n pages = {15--18},\n title\ + \ = {Input Devices for Musical Expression : Borrowing Tools from HCI},\n url =\ + \ {http://www.nime.org/proceedings/2001/nime2001_015.pdf},\n year = {2001}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672914 - editor: Marcelo Queiroz and Anna Xambó Sedó + date: '1-2 April, 2001' + doi: 10.5281/zenodo.1176370 issn: 2220-4806 - month: June - pages: 174--179 - publisher: UFRGS - title: Parameterized Melody Generation with Autoencoders and Temporally-Consistent - Noise - url: http://www.nime.org/proceedings/2019/nime2019_paper035.pdf - year: 2019 + keywords: 'Input device design, gestural control, interactive systems' + pages: 15--18 + title: 'Input Devices for Musical Expression : Borrowing Tools from HCI' + url: http://www.nime.org/proceedings/2001/nime2001_015.pdf + year: 2001 - ENTRYTYPE: inproceedings - ID: Tanaka2019 - abstract: 'This paper presents a system that allows users to quickly try different - ways to train neural networks and temporal modeling techniques to associate arm - gestures with time varying sound. We created a software framework for this, and - designed three interactive sounds and presented them to participants in a workshop - based study. We build upon previous work in sound-tracing and mapping-by-demonstration - to ask the participants to design gestures with which to perform the given sounds - using a multimodal, inertial measurement (IMU) and muscle sensing (EMG) device. - We presented the user with four techniques for associating sensor input to synthesizer - parameter output. Two were classical techniques from the literature, and two proposed - different ways to capture dynamic gesture in a neural network. These four techniques - were: 1.) A Static Position regression training procedure, 2.) A Hidden Markov - based temporal modeler, 3.) Whole Gesture capture to a neural network, and 4.) - a Windowed method using the position-based procedure on the fly during the performance - of a dynamic gesture. Our results show trade-offs between accurate, predictable - reproduction of the source sounds and exploration of the gesture-sound space. - Several of the users were attracted to our new windowed method for capturing gesture - anchor points on the fly as training data for neural network based regression. - This paper will be of interest to musicians interested in going from sound design - to gesture design and offers a workflow for quickly trying different mapping-by-demonstration - techniques.' - address: 'Porto Alegre, Brazil' - author: 'Atau Tanaka and Di Donato, Balandino and Michael Zbyszynski and Geert Roks' - bibtex: "@inproceedings{Tanaka2019,\n abstract = {This paper presents a system that\ - \ allows users to quickly try different ways to train neural networks and temporal\ - \ modeling techniques to associate arm gestures with time varying sound. We created\ - \ a software framework for this, and designed three interactive sounds and presented\ - \ them to participants in a workshop based study. We build upon previous work\ - \ in sound-tracing and mapping-by-demonstration to ask the participants to design\ - \ gestures with which to perform the given sounds using a multimodal, inertial\ - \ measurement (IMU) and muscle sensing (EMG) device. We presented the user with\ - \ four techniques for associating sensor input to synthesizer parameter output.\ - \ Two were classical techniques from the literature, and two proposed different\ - \ ways to capture dynamic gesture in a neural network. These four techniques were:\ - \ 1.) A Static Position regression training procedure, 2.) A Hidden Markov based\ - \ temporal modeler, 3.) Whole Gesture capture to a neural network, and 4.) a Windowed\ - \ method using the position-based procedure on the fly during the performance\ - \ of a dynamic gesture. Our results show trade-offs between accurate, predictable\ - \ reproduction of the source sounds and exploration of the gesture-sound space.\ - \ Several of the users were attracted to our new windowed method for capturing\ - \ gesture anchor points on the fly as training data for neural network based regression.\ - \ This paper will be of interest to musicians interested in going from sound design\ - \ to gesture design and offers a workflow for quickly trying different mapping-by-demonstration\ - \ techniques.},\n address = {Porto Alegre, Brazil},\n author = {Atau Tanaka and\ - \ Di Donato, Balandino and Michael Zbyszynski and Geert Roks},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.3672916},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {180--185},\n publisher = {UFRGS},\n\ - \ title = {Designing Gestures for Continuous Sonic Interaction},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper036.pdf},\n\ - \ year = {2019}\n}\n" + ID: Bahn2001 + abstract: 'This paper presents the interface developments and music of the duo "interface," + formed by Curtis Bahn and Dan Trueman. We describe gestural instrument design, + interactive performance interfaces for improvisational music, spherical speakers + (multi-channel, outward-radiating geodesic speaker arrays) and Sensor-Speaker-Arrays + (SenSAs: combinations of various sensor devices with spherical speaker arrays). + We discuss the concept, design and construction of these systems, and, give examples + from several new published CDs of work by Bahn and Trueman.' + address: 'Seattle, WA' + author: 'Bahn, Curtis and Trueman, Dan' + bibtex: "@inproceedings{Bahn2001,\n abstract = {This paper presents the interface\ + \ developments and music of the duo \"interface,\" formed by Curtis Bahn and Dan\ + \ Trueman. We describe gestural instrument design, interactive performance interfaces\ + \ for improvisational music, spherical speakers (multi-channel, outward-radiating\ + \ geodesic speaker arrays) and Sensor-Speaker-Arrays (SenSAs: combinations of\ + \ various sensor devices with spherical speaker arrays). We discuss the concept,\ + \ design and construction of these systems, and, give examples from several new\ + \ published CDs of work by Bahn and Trueman.},\n address = {Seattle, WA},\n author\ + \ = {Bahn, Curtis and Trueman, Dan},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n date = {1-2 April, 2001},\n\ + \ doi = {10.5281/zenodo.1176356},\n issn = {2220-4806},\n pages = {19--23},\n\ + \ title = {interface : Electronic Chamber Ensemble},\n url = {http://www.nime.org/proceedings/2001/nime2001_019.pdf},\n\ + \ year = {2001}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672916 - editor: Marcelo Queiroz and Anna Xambó Sedó + date: '1-2 April, 2001' + doi: 10.5281/zenodo.1176356 issn: 2220-4806 - month: June - pages: 180--185 - publisher: UFRGS - title: Designing Gestures for Continuous Sonic Interaction - url: http://www.nime.org/proceedings/2019/nime2019_paper036.pdf - year: 2019 + pages: 19--23 + title: 'interface : Electronic Chamber Ensemble' + url: http://www.nime.org/proceedings/2001/nime2001_019.pdf + year: 2001 - ENTRYTYPE: inproceedings - ID: Erdem2019 - abstract: 'This paper describes the process of developing a shared instrument for - music--dance performance, with a particular focus on exploring the boundaries - between standstill vs motion, and silence vs sound. The piece Vrengt grew from - the idea of enabling a true partnership between a musician and a dancer, developing - an instrument that would allow for active co-performance. Using a participatory - design approach, we worked with sonification as a tool for systematically exploring - the dancer''s bodily expressions. The exploration used a "spatiotemporal matrix", - with a particular focus on sonic microinteraction. In the final performance, two - Myo armbands were used for capturing muscle activity of the arm and leg of the - dancer, together with a wireless headset microphone capturing the sound of breathing. - In the paper we reflect on multi-user instrument paradigms, discuss our approach - to creating a shared instrument using sonification as a tool for the sound design, - and reflect on the performers'' subjective evaluation of the instrument. ' - address: 'Porto Alegre, Brazil' - author: 'Cagri Erdem and Katja Henriksen Schia and Jensenius, Alexander Refsum' - bibtex: "@inproceedings{Erdem2019,\n abstract = {This paper describes the process\ - \ of developing a shared instrument for music--dance performance, with a particular\ - \ focus on exploring the boundaries between standstill vs motion, and silence\ - \ vs sound. The piece Vrengt grew from the idea of enabling a true partnership\ - \ between a musician and a dancer, developing an instrument that would allow for\ - \ active co-performance. Using a participatory design approach, we worked with\ - \ sonification as a tool for systematically exploring the dancer's bodily expressions.\ - \ The exploration used a \"spatiotemporal matrix\", with a particular focus on\ - \ sonic microinteraction. In the final performance, two Myo armbands were used\ - \ for capturing muscle activity of the arm and leg of the dancer, together with\ - \ a wireless headset microphone capturing the sound of breathing. In the paper\ - \ we reflect on multi-user instrument paradigms, discuss our approach to creating\ - \ a shared instrument using sonification as a tool for the sound design, and reflect\ - \ on the performers' subjective evaluation of the instrument. },\n address =\ - \ {Porto Alegre, Brazil},\n author = {Cagri Erdem and Katja Henriksen Schia and\ - \ Jensenius, Alexander Refsum},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672918},\n\ - \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {186--191},\n publisher = {UFRGS},\n title = {Vrengt: A\ - \ Shared Body-Machine Instrument for Music-Dance Performance},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper037.pdf},\n\ - \ year = {2019}\n}\n" + ID: Goudeseune2001 + abstract: 'We present an acoustic musical instrument played through a resonance + model of another sound. The resonance model is controlled in real time as part + of the composite instrument. Our implementation uses an electric violin, whose + spatial position modifies filter parameters of the resonance model. Simplicial + interpolation defines the mapping from spatial position to filter parameters. + With some effort, pitch tracking can also control the filter parameters. The individual + technologies -- motion tracking, pitch tracking, resonance models -- are easily + adapted to other instruments.' + address: 'Seattle, WA' + author: 'Goudeseune, Camille and Garnett, Guy and Johnson, Timothy' + bibtex: "@inproceedings{Goudeseune2001,\n abstract = {We present an acoustic musical\ + \ instrument played through a resonance model of another sound. The resonance\ + \ model is controlled in real time as part of the composite instrument. Our implementation\ + \ uses an electric violin, whose spatial position modifies filter parameters of\ + \ the resonance model. Simplicial interpolation defines the mapping from spatial\ + \ position to filter parameters. With some effort, pitch tracking can also control\ + \ the filter parameters. The individual technologies -- motion tracking, pitch\ + \ tracking, resonance models -- are easily adapted to other instruments.},\n address\ + \ = {Seattle, WA},\n author = {Goudeseune, Camille and Garnett, Guy and Johnson,\ + \ Timothy},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n date = {1-2 April, 2001},\n doi = {10.5281/zenodo.1176362},\n\ + \ issn = {2220-4806},\n keywords = {multidimensionality, control, resonance, pitch\ + \ tracking},\n pages = {24--26},\n title = {Resonant Processing of Instrumental\ + \ Sound Controlled by Spatial Position},\n url = {http://www.nime.org/proceedings/2001/nime2001_024.pdf},\n\ + \ year = {2001}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672918 - editor: Marcelo Queiroz and Anna Xambó Sedó + date: '1-2 April, 2001' + doi: 10.5281/zenodo.1176362 issn: 2220-4806 - month: June - pages: 186--191 - publisher: UFRGS - title: 'Vrengt: A Shared Body-Machine Instrument for Music-Dance Performance' - url: http://www.nime.org/proceedings/2019/nime2019_paper037.pdf - year: 2019 + keywords: 'multidimensionality, control, resonance, pitch tracking' + pages: 24--26 + title: Resonant Processing of Instrumental Sound Controlled by Spatial Position + url: http://www.nime.org/proceedings/2001/nime2001_024.pdf + year: 2001 - ENTRYTYPE: inproceedings - ID: ParkeWolfe2019 - abstract: 'We have built a new software toolkit that enables music therapists and - teachers to create custom digital musical interfaces for children with diverse - disabilities. It was designed in collaboration with music therapists, teachers, - and children. It uses interactive machine learning to create new sensor- and vision-based - musical interfaces using demonstrations of actions and sound, making interface - building fast and accessible to people without programming or engineering expertise. - Interviews with two music therapy and education professionals who have used the - software extensively illustrate how richly customised, sensor-based interfaces - can be used in music therapy contexts; they also reveal how properties of input - devices, music-making approaches, and mapping techniques can support a variety - of interaction styles and therapy goals.' - address: 'Porto Alegre, Brazil' - author: Samuel Thompson Parke-Wolfe and Hugo Scurto and Rebecca Fiebrink - bibtex: "@inproceedings{ParkeWolfe2019,\n abstract = {We have built a new software\ - \ toolkit that enables music therapists and teachers to create custom digital\ - \ musical interfaces for children with diverse disabilities. It was designed in\ - \ collaboration with music therapists, teachers, and children. It uses interactive\ - \ machine learning to create new sensor- and vision-based musical interfaces using\ - \ demonstrations of actions and sound, making interface building fast and accessible\ - \ to people without programming or engineering expertise. Interviews with two\ - \ music therapy and education professionals who have used the software extensively\ - \ illustrate how richly customised, sensor-based interfaces can be used in music\ - \ therapy contexts; they also reveal how properties of input devices, music-making\ - \ approaches, and mapping techniques can support a variety of interaction styles\ - \ and therapy goals.},\n address = {Porto Alegre, Brazil},\n author = {Samuel\ - \ Thompson Parke-Wolfe and Hugo Scurto and Rebecca Fiebrink},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.3672920},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {192--197},\n publisher = {UFRGS},\n\ - \ title = {Sound Control: Supporting Custom Musical Interface Design for Children\ - \ with Disabilities},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper038.pdf},\n\ - \ year = {2019}\n}\n" + ID: Gurevich2001 + abstract: 'The Accordiatron is a new MIDI controller for real-time performance based + on the paradigm of a conventional squeeze box or concertina. It translates the + gestures of a performer to the standard communication protocol ofMIDI, allowing + for flexible mappings of performance data to sonic parameters. When used in conjunction + with a realtime signal processing environment, the Accordiatron becomes an expressive, + versatile musical instrument. A combination of sensory outputs providing both + discrete and continuous data gives the subtle expressiveness and control necessary + for interactive music.' + address: 'Seattle, WA' + author: 'Gurevich, Michael and von Muehlen, Stephan' + bibtex: "@inproceedings{Gurevich2001,\n abstract = {The Accordiatron is a new MIDI\ + \ controller for real-time performance based on the paradigm of a conventional\ + \ squeeze box or concertina. It translates the gestures of a performer to the\ + \ standard communication protocol ofMIDI, allowing for flexible mappings of performance\ + \ data to sonic parameters. When used in conjunction with a realtime signal processing\ + \ environment, the Accordiatron becomes an expressive, versatile musical instrument.\ + \ A combination of sensory outputs providing both discrete and continuous data\ + \ gives the subtle expressiveness and control necessary for interactive music.},\n\ + \ address = {Seattle, WA},\n author = {Gurevich, Michael and von Muehlen, Stephan},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n date = {1-2 April, 2001},\n doi = {10.5281/zenodo.1176364},\n\ + \ issn = {2220-4806},\n keywords = {MIDI controllers, computer music, interactive\ + \ music, electronic musical instruments, musical instrument design, human computer\ + \ interface},\n pages = {27--29},\n title = {The Accordiatron : A {MIDI} Controller\ + \ For Interactive Music},\n url = {http://www.nime.org/proceedings/2001/nime2001_027.pdf},\n\ + \ year = {2001}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672920 - editor: Marcelo Queiroz and Anna Xambó Sedó + date: '1-2 April, 2001' + doi: 10.5281/zenodo.1176364 issn: 2220-4806 - month: June - pages: 192--197 - publisher: UFRGS - title: 'Sound Control: Supporting Custom Musical Interface Design for Children with - Disabilities' - url: http://www.nime.org/proceedings/2019/nime2019_paper038.pdf - year: 2019 + keywords: 'MIDI controllers, computer music, interactive music, electronic musical + instruments, musical instrument design, human computer interface' + pages: 27--29 + title: 'The Accordiatron : A MIDI Controller For Interactive Music' + url: http://www.nime.org/proceedings/2001/nime2001_027.pdf + year: 2001 - ENTRYTYPE: inproceedings - ID: Hödl2019 - abstract: 'With a new digital music instrument (DMI), the interface itself, the - sound generation, the composition, and the performance are often closely related - and even intrinsically linked with each other. Similarly, the instrument designer, - composer, and performer are often the same person. The Academic Festival Overture - is a new piece of music for the DMI Trombosonic and symphonic orchestra written - by a composer who had no prior experience with the instrument. The piece underwent - the phases of a composition competition, rehearsals, a music video production, - and a public live performance. This whole process was evaluated reflecting on - the experience of three involved key stakeholder: the composer, the conductor, - and the instrument designer as performer. `Blending dimensions'' of these stakeholder - and decoupling the composition from the instrument designer inspired the newly - involved composer to completely rethink the DMI''s interaction and sound concept. - Thus, to deliberately avoid an early collaboration between a DMI designer and - a composer bears the potential for new inspiration and at the same time the challenge - to seek such a collaboration in the need of clarifying possible misunderstandings - and improvement.' - address: 'Porto Alegre, Brazil' - author: Oliver Hödl - bibtex: "@inproceedings{Hödl2019,\n abstract = {With a new digital music instrument\ - \ (DMI), the interface itself, the sound generation, the composition, and the\ - \ performance are often closely related and even intrinsically linked with each\ - \ other. Similarly, the instrument designer, composer, and performer are often\ - \ the same person. The Academic Festival Overture is a new piece of music for\ - \ the DMI Trombosonic and symphonic orchestra written by a composer who had no\ - \ prior experience with the instrument. The piece underwent the phases of a composition\ - \ competition, rehearsals, a music video production, and a public live performance.\ - \ This whole process was evaluated reflecting on the experience of three involved\ - \ key stakeholder: the composer, the conductor, and the instrument designer as\ - \ performer. `Blending dimensions' of these stakeholder and decoupling the composition\ - \ from the instrument designer inspired the newly involved composer to completely\ - \ rethink the DMI's interaction and sound concept. Thus, to deliberately avoid\ - \ an early collaboration between a DMI designer and a composer bears the potential\ - \ for new inspiration and at the same time the challenge to seek such a collaboration\ - \ in the need of clarifying possible misunderstandings and improvement.},\n address\ - \ = {Porto Alegre, Brazil},\n author = {Oliver Hödl},\n booktitle = {Proceedings\ + ID: Paradiso2001 + abstract: 'The technologies behind passive resonant magnetically coupled tags are + introduced and their application as a musical controller is illustrated for solo + or group performances, interactive installations, and music toys. ' + address: 'Seattle, WA' + author: 'Paradiso, Joseph A. and Hsiao, Kai-yuh and Benbasat, Ari' + bibtex: "@inproceedings{Paradiso2001,\n abstract = {The technologies behind passive\ + \ resonant magnetically coupled tags are introduced and their application as a\ + \ musical controller is illustrated for solo or group performances, interactive\ + \ installations, and music toys. },\n address = {Seattle, WA},\n author = {Paradiso,\ + \ Joseph A. and Hsiao, Kai-yuh and Benbasat, Ari},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.3672922},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {198--203},\n publisher = {UFRGS},\n\ - \ title = {'Blending Dimensions' when Composing for {DMI} and Symphonic Orchestra},\n\ - \ url = {http://www.nime.org/proceedings/2019/nime2019_paper039.pdf},\n year =\ - \ {2019}\n}\n" + \ date = {1-2 April, 2001},\n doi = {10.5281/zenodo.1176374},\n issn = {2220-4806},\n\ + \ keywords = {RFID, resonant tags, EAS tags, musical controller, tangible interface},\n\ + \ pages = {30--33},\n title = {Tangible Music Interfaces Using Passive Magnetic\ + \ Tags},\n url = {http://www.nime.org/proceedings/2001/nime2001_030.pdf},\n year\ + \ = {2001}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672922 - editor: Marcelo Queiroz and Anna Xambó Sedó + date: '1-2 April, 2001' + doi: 10.5281/zenodo.1176374 issn: 2220-4806 - month: June - pages: 198--203 - publisher: UFRGS - title: '''Blending Dimensions'' when Composing for DMI and Symphonic Orchestra' - url: http://www.nime.org/proceedings/2019/nime2019_paper039.pdf - year: 2019 + keywords: 'RFID, resonant tags, EAS tags, musical controller, tangible interface' + pages: 30--33 + title: Tangible Music Interfaces Using Passive Magnetic Tags + url: http://www.nime.org/proceedings/2001/nime2001_030.pdf + year: 2001 - ENTRYTYPE: inproceedings - ID: haki2019 - abstract: 'This paper presents a detailed explanation of a system generating basslines - that are stylistically and rhythmically interlocked with a provided audio drum - loop. The proposed system is based on a natural language processing technique: - word-based sequence-to-sequence learning using LSTM units. The novelty of the - proposed method lies in the fact that the system is not reliant on a voice-by-voice - transcription of drums; instead, in this method, a drum representation is used - as an input sequence from which a translated bassline is obtained at the output. - The drum representation consists of fixed size sequences of onsets detected from - a 2-bar audio drum loop in eight different frequency bands. The basslines generated - by this method consist of pitched notes with different duration. The proposed - system was trained on two distinct datasets compiled for this project by the authors. - Each dataset contains a variety of 2-bar drum loops with annotated basslines from - two different styles of dance music: House and Soca. A listening experiment designed - based on the system revealed that the proposed system is capable of generating - basslines that are interesting and are well rhythmically interlocked with the - drum loops from which they were generated.' - address: 'Porto Alegre, Brazil' - author: behzad haki and Sergi Jorda - bibtex: "@inproceedings{haki2019,\n abstract = {This paper presents a detailed explanation\ - \ of a system generating basslines that are stylistically and rhythmically interlocked\ - \ with a provided audio drum loop. The proposed system is based on a natural language\ - \ processing technique: word-based sequence-to-sequence learning using LSTM units.\ - \ The novelty of the proposed method lies in the fact that the system is not reliant\ - \ on a voice-by-voice transcription of drums; instead, in this method, a drum\ - \ representation is used as an input sequence from which a translated bassline\ - \ is obtained at the output. The drum representation consists of fixed size sequences\ - \ of onsets detected from a 2-bar audio drum loop in eight different frequency\ - \ bands. The basslines generated by this method consist of pitched notes with\ - \ different duration. The proposed system was trained on two distinct datasets\ - \ compiled for this project by the authors. Each dataset contains a variety of\ - \ 2-bar drum loops with annotated basslines from two different styles of dance\ - \ music: House and Soca. A listening experiment designed based on the system revealed\ - \ that the proposed system is capable of generating basslines that are interesting\ - \ and are well rhythmically interlocked with the drum loops from which they were\ - \ generated.},\n address = {Porto Alegre, Brazil},\n author = {behzad haki and\ - \ Sergi Jorda},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672928},\n\ - \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {204--209},\n publisher = {UFRGS},\n title = {A Bassline\ - \ Generation System Based on Sequence-to-Sequence Learning},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper040.pdf},\n\ - \ year = {2019}\n}\n" + ID: Mase2001 + abstract: 'In this paper, we introduce our research challenges for creating new + musical instruments using everyday-life media with intimate interfaces, such as + the self-body, clothes, water and stuffed toys. Various sensor technologies including + image processing and general touch sensitive devices are employed to exploit these + interaction media. The focus of our effort is to provide user-friendly and enjoyable + experiences for new music and sound performances. Multimodality of musical instruments + is explored in each attempt. The degree of controllability in the performance + and the richness of expressions are also discussed for each installation. ' + address: 'Seattle, WA' + author: 'Mase, Kenji and Yonezawa, Tomoko' + bibtex: "@inproceedings{Mase2001,\n abstract = {In this paper, we introduce our\ + \ research challenges for creating new musical instruments using everyday-life\ + \ media with intimate interfaces, such as the self-body, clothes, water and stuffed\ + \ toys. Various sensor technologies including image processing and general touch\ + \ sensitive devices are employed to exploit these interaction media. The focus\ + \ of our effort is to provide user-friendly and enjoyable experiences for new\ + \ music and sound performances. Multimodality of musical instruments is explored\ + \ in each attempt. The degree of controllability in the performance and the richness\ + \ of expressions are also discussed for each installation. },\n address = {Seattle,\ + \ WA},\n author = {Mase, Kenji and Yonezawa, Tomoko},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ date = {1-2 April, 2001},\n doi = {10.5281/zenodo.1176368},\n issn = {2220-4806},\n\ + \ keywords = {New interface, music controller, dance, image processing, water\ + \ interface, stuffed toy},\n pages = {34--37},\n title = {Body , Clothes , Water\ + \ and Toys : Media Towards Natural Music Expressions with Digital Sounds},\n url\ + \ = {http://www.nime.org/proceedings/2001/nime2001_034.pdf},\n year = {2001}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672928 - editor: Marcelo Queiroz and Anna Xambó Sedó + date: '1-2 April, 2001' + doi: 10.5281/zenodo.1176368 issn: 2220-4806 - month: June - pages: 204--209 - publisher: UFRGS - title: A Bassline Generation System Based on Sequence-to-Sequence Learning - url: http://www.nime.org/proceedings/2019/nime2019_paper040.pdf - year: 2019 + keywords: 'New interface, music controller, dance, image processing, water interface, + stuffed toy' + pages: 34--37 + title: 'Body , Clothes , Water and Toys : Media Towards Natural Music Expressions + with Digital Sounds' + url: http://www.nime.org/proceedings/2001/nime2001_034.pdf + year: 2001 - ENTRYTYPE: inproceedings - ID: May2019 - abstract: 'This paper presents a novel physical fuzz pedal effect system named BLIKSEM. - Our approach applies previous work in nonlinear acoustic synthesis via a driven - cantilever soundboard configuration for the purpose of generating fuzz pedal-like - effects as well as a variety of novel audio effects. Following a presentation - of our pedal design, we compare the performance of our system with various various - classic and contemporary fuzz pedals using an electric guitar. Our results show - that BLIKSEM is capable of generating signals that approximate the timbre and - dynamic behaviors of conventional fuzz pedals, as well as offer new mechanisms - for expressive interactions and a range of new effects in different configurations.' - address: 'Porto Alegre, Brazil' - author: Lloyd May and spencer topel - bibtex: "@inproceedings{May2019,\n abstract = {This paper presents a novel physical\ - \ fuzz pedal effect system named BLIKSEM. Our approach applies previous work in\ - \ nonlinear acoustic synthesis via a driven cantilever soundboard configuration\ - \ for the purpose of generating fuzz pedal-like effects as well as a variety of\ - \ novel audio effects. Following a presentation of our pedal design, we compare\ - \ the performance of our system with various various classic and contemporary\ - \ fuzz pedals using an electric guitar. Our results show that BLIKSEM is capable\ - \ of generating signals that approximate the timbre and dynamic behaviors of conventional\ - \ fuzz pedals, as well as offer new mechanisms for expressive interactions and\ - \ a range of new effects in different configurations.},\n address = {Porto Alegre,\ - \ Brazil},\n author = {Lloyd May and spencer topel},\n booktitle = {Proceedings\ + ID: Overholt2001 + abstract: 'The MATRIX (Multipurpose Array of Tactile Rods for Interactive eXpression) + is a new musical interface for amateurs and professionals alike. It gives users + a 3dimensional tangible interface to control music using their hands, and can + be used in conjunction with a traditional musical instrument and a microphone, + or as a stand-alone gestural input device. The surface of the MATRIX acts as areal-time + interface that can manipulate the parameters of a synthesis engine or effect algorithm + in response to a performer''s expressive gestures. One example is to have the + rods of the MATRIX control the individual grains of a granular synthesizer, thereby + "sonically sculpting" the microstructure of a sound. In this way, the MATRIX provides + an intuitive method of manipulating sound with avery high level of real-time control.' + address: 'Seattle, WA' + author: 'Overholt, Dan' + bibtex: "@inproceedings{Overholt2001,\n abstract = {The MATRIX (Multipurpose Array\ + \ of Tactile Rods for Interactive eXpression) is a new musical interface for amateurs\ + \ and professionals alike. It gives users a 3dimensional tangible interface to\ + \ control music using their hands, and can be used in conjunction with a traditional\ + \ musical instrument and a microphone, or as a stand-alone gestural input device.\ + \ The surface of the MATRIX acts as areal-time interface that can manipulate the\ + \ parameters of a synthesis engine or effect algorithm in response to a performer's\ + \ expressive gestures. One example is to have the rods of the MATRIX control the\ + \ individual grains of a granular synthesizer, thereby \"sonically sculpting\"\ + \ the microstructure of a sound. In this way, the MATRIX provides an intuitive\ + \ method of manipulating sound with avery high level of real-time control.},\n\ + \ address = {Seattle, WA},\n author = {Overholt, Dan},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.3672930},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {210--215},\n publisher = {UFRGS},\n\ - \ title = {{BLIKSEM}: An Acoustic Synthesis Fuzz Pedal},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper041.pdf},\n\ - \ year = {2019}\n}\n" + \ date = {1-2 April, 2001},\n doi = {10.5281/zenodo.1176372},\n issn = {2220-4806},\n\ + \ keywords = {Musical controller, tangible interface, real-time expression, audio\ + \ synthesis, effects algorithms, signal processing, 3-D interface, sculptable\ + \ surface},\n pages = {38--41},\n title = {The MATRIX : A Novel Controller for\ + \ Musical Expression},\n url = {http://www.nime.org/proceedings/2001/nime2001_038.pdf},\n\ + \ year = {2001}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672930 - editor: Marcelo Queiroz and Anna Xambó Sedó + date: '1-2 April, 2001' + doi: 10.5281/zenodo.1176372 issn: 2220-4806 - month: June - pages: 210--215 - publisher: UFRGS - title: 'BLIKSEM: An Acoustic Synthesis Fuzz Pedal' - url: http://www.nime.org/proceedings/2019/nime2019_paper041.pdf - year: 2019 + keywords: 'Musical controller, tangible interface, real-time expression, audio synthesis, + effects algorithms, signal processing, 3-D interface, sculptable surface' + pages: 38--41 + title: 'The MATRIX : A Novel Controller for Musical Expression' + url: http://www.nime.org/proceedings/2001/nime2001_038.pdf + year: 2001 - ENTRYTYPE: inproceedings - ID: Xambó2019 - abstract: 'In this paper, we present a workshop of physical computing applied to - NIME design based on science, technology, engineering, arts, and mathematics (STEAM) - education. The workshop is designed for master students with multidisciplinary - backgrounds. They are encouraged to work in teams from two university campuses - remotely connected through a portal space. The components of the workshop are - prototyping, music improvisation and reflective practice. We report the results - of this course, which show a positive impact on the students'' confidence in prototyping - and intention to continue in STEM fields. We also present the challenges and lessons - learned on how to improve the teaching of hybrid technologies and programming - skills in an interdisciplinary context across two locations, with the aim of satisfying - both beginners and experts. We conclude with a broader discussion on how these - new pedagogical perspectives can improve NIME-related courses.' - address: 'Porto Alegre, Brazil' - author: Anna Xambó and Sigurd Saue and Alexander Refsum Jensenius and Robin Støckert - and Oeyvind Brandtsegg - bibtex: "@inproceedings{Xambó2019,\n abstract = {In this paper, we present a workshop\ - \ of physical computing applied to NIME design based on science, technology, engineering,\ - \ arts, and mathematics (STEAM) education. The workshop is designed for master\ - \ students with multidisciplinary backgrounds. They are encouraged to work in\ - \ teams from two university campuses remotely connected through a portal space.\ - \ The components of the workshop are prototyping, music improvisation and reflective\ - \ practice. We report the results of this course, which show a positive impact\ - \ on the students' confidence in prototyping and intention to continue in STEM\ - \ fields. We also present the challenges and lessons learned on how to improve\ - \ the teaching of hybrid technologies and programming skills in an interdisciplinary\ - \ context across two locations, with the aim of satisfying both beginners and\ - \ experts. We conclude with a broader discussion on how these new pedagogical\ - \ perspectives can improve NIME-related courses.},\n address = {Porto Alegre,\ - \ Brazil},\n author = {Anna Xambó and Sigurd Saue and Alexander Refsum Jensenius\ - \ and Robin Støckert and Oeyvind Brandtsegg},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.3672932},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {216--221},\n publisher = {UFRGS},\n\ - \ title = {{NIME} Prototyping in Teams: A Participatory Approach to Teaching Physical\ - \ Computing},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper042.pdf},\n\ - \ year = {2019}\n}\n" + ID: DArcangelo2001 + abstract: 'This paper describes a series of projects that explore the possibilities + of musical expression through the combination of pre-composed, interlocking, modular + components. In particular, this paper presents a modular soundtrack recently + composed by the author for “Currentsof Creativity,” a permanent interactive video + wall installation at the Pope John Paul II Cultural Center which is slated to + open Easter 2001 in Washington, DC.' + address: 'Seattle, WA' + author: 'D''Arcangelo, Gideon' + bibtex: "@inproceedings{DArcangelo2001,\n abstract = {This paper describes a series\ + \ of projects that explore the possibilities of musical expression through the\ + \ combination of pre-composed, interlocking, modular components. In particular,\ + \ this paper presents a modular soundtrack recently composed by the author for\ + \ “Currentsof Creativity,” a permanent interactive video wall installation at\ + \ the Pope John Paul II Cultural Center which is slated to open Easter 2001 in\ + \ Washington, DC.},\n address = {Seattle, WA},\n author = {D'Arcangelo, Gideon},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n date = {1-2 April, 2001},\n doi = {10.5281/zenodo.1176360},\n\ + \ issn = {2220-4806},\n pages = {42--45},\n title = {Creating Contexts of Creativity\ + \ : Musical Composition with Modular Components},\n url = {http://www.nime.org/proceedings/2001/nime2001_042.pdf},\n\ + \ year = {2001}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672932 - editor: Marcelo Queiroz and Anna Xambó Sedó + date: '1-2 April, 2001' + doi: 10.5281/zenodo.1176360 issn: 2220-4806 - month: June - pages: 216--221 - publisher: UFRGS - title: 'NIME Prototyping in Teams: A Participatory Approach to Teaching Physical - Computing' - url: http://www.nime.org/proceedings/2019/nime2019_paper042.pdf - year: 2019 + pages: 42--45 + title: 'Creating Contexts of Creativity : Musical Composition with Modular Components' + url: http://www.nime.org/proceedings/2001/nime2001_042.pdf + year: 2001 - ENTRYTYPE: inproceedings - ID: Meneses2019 - abstract: 'The increasing availability of accessible sensor technologies, single - board computers, and prototyping platforms have resulted in a growing number of - frameworks explicitly geared towards the design and construction of Digital and - Augmented Musical Instruments. Developing such instruments can be facilitated - by choosing the most suitable framework for each project. In the process of selecting - a framework for implementing an augmented guitar instrument, we have tested three - Linux-based open-source platforms that have been designed for real-time sensor - interfacing, audio processing, and synthesis. Factors such as acquisition latency, - workload measurements, documentation, and software implementation are compared - and discussed to determine the suitability of each environment for our particular - project.' - address: 'Porto Alegre, Brazil' - author: Eduardo Meneses and Johnty Wang and Sergio Freire and Marcelo Wanderley - bibtex: "@inproceedings{Meneses2019,\n abstract = {The increasing availability of\ - \ accessible sensor technologies, single board computers, and prototyping platforms\ - \ have resulted in a growing number of frameworks explicitly geared towards the\ - \ design and construction of Digital and Augmented Musical Instruments. Developing\ - \ such instruments can be facilitated by choosing the most suitable framework\ - \ for each project. In the process of selecting a framework for implementing an\ - \ augmented guitar instrument, we have tested three Linux-based open-source platforms\ - \ that have been designed for real-time sensor interfacing, audio processing,\ - \ and synthesis. Factors such as acquisition latency, workload measurements, documentation,\ - \ and software implementation are compared and discussed to determine the suitability\ - \ of each environment for our particular project.},\n address = {Porto Alegre,\ - \ Brazil},\n author = {Eduardo Meneses and Johnty Wang and Sergio Freire and Marcelo\ - \ Wanderley},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672934},\n editor\ - \ = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n\ - \ pages = {222--227},\n publisher = {UFRGS},\n title = {A Comparison of Open-Source\ - \ Linux Frameworks for an Augmented Musical Instrument Implementation},\n url\ - \ = {http://www.nime.org/proceedings/2019/nime2019_paper043.pdf},\n year = {2019}\n\ - }\n" + ID: Jorda2001 + abstract: 'The conception and design of new musical interfaces is a multidisciplinary + area that tightly relates technology and artistic creation. In this paper, the + author first exposes some of the questions he has posed himself during more than + a decade experience as a performer, composer, interface and software designer, + and educator. Finally, he illustrates these topics with some examples of his work.' + address: 'Seattle, WA' + author: 'Jordà, Sergi' + bibtex: "@inproceedings{Jorda2001,\n abstract = {The conception and design of new\ + \ musical interfaces is a multidisciplinary area that tightly relates technology\ + \ and artistic creation. In this paper, the author first exposes some of the questions\ + \ he has posed himself during more than a decade experience as a performer, composer,\ + \ interface and software designer, and educator. Finally, he illustrates these\ + \ topics with some examples of his work.},\n address = {Seattle, WA},\n author\ + \ = {Jord\\`{a}, Sergi},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n date = {1-2 April, 2001},\n doi\ + \ = {10.5281/zenodo.1176366},\n issn = {2220-4806},\n pages = {46--50},\n title\ + \ = {New Musical Interfaces and New Music-making Paradigms},\n url = {http://www.nime.org/proceedings/2001/nime2001_046.pdf},\n\ + \ year = {2001}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672934 - editor: Marcelo Queiroz and Anna Xambó Sedó + date: '1-2 April, 2001' + doi: 10.5281/zenodo.1176366 issn: 2220-4806 - month: June - pages: 222--227 - publisher: UFRGS - title: A Comparison of Open-Source Linux Frameworks for an Augmented Musical Instrument - Implementation - url: http://www.nime.org/proceedings/2019/nime2019_paper043.pdf - year: 2019 + pages: 46--50 + title: New Musical Interfaces and New Music-making Paradigms + url: http://www.nime.org/proceedings/2001/nime2001_046.pdf + year: 2001 - ENTRYTYPE: inproceedings - ID: MatusLerner2019 - abstract: 'During the twentieth century several Latin American nations (such as - Argentina, Brazil, Chile, Cuba and Mexico) have originated relevant antecedents - in the NIME field. Their innovative authors have interrelated musical composition, - lutherie, electronics and computing. This paper provides a panoramic view of their - original electronic instruments and experimental sound practices, as well as a - perspective of them regarding other inventions around the World.' - address: 'Porto Alegre, Brazil' - author: 'Lerner, Martin Matus' - bibtex: "@inproceedings{MatusLerner2019,\n abstract = {During the twentieth century\ - \ several Latin American nations (such as Argentina, Brazil, Chile, Cuba and Mexico)\ - \ have originated relevant antecedents in the NIME field. Their innovative authors\ - \ have interrelated musical composition, lutherie, electronics and computing.\ - \ This paper provides a panoramic view of their original electronic instruments\ - \ and experimental sound practices, as well as a perspective of them regarding\ - \ other inventions around the World.},\n address = {Porto Alegre, Brazil},\n author\ - \ = {Lerner, Martin Matus},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672936},\n\ - \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {228--233},\n publisher = {UFRGS},\n title = {Latin American\ - \ {NIME}s: Electronic Musical Instruments and Experimental Sound Devices in the\ - \ Twentieth Century},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper044.pdf},\n\ - \ year = {2019}\n}\n" + ID: Robson2001 + abstract: 'This paper reviews a number of projects that explore building electronic + musical things, interfaces and objects designed to be used and enjoyed by anybody + but in particular those who do not see themselves as naturally musical. On reflecting + on the strengths of these projects, interesting directions for similar work in + the future are considered.' + address: 'Seattle, WA' + author: 'Robson, Dominic' + bibtex: "@inproceedings{Robson2001,\n abstract = {This paper reviews a number of\ + \ projects that explore building electronic musical things, interfaces and objects\ + \ designed to be used and enjoyed by anybody but in particular those who do not\ + \ see themselves as naturally musical. On reflecting on the strengths of these\ + \ projects, interesting directions for similar work in the future are considered.},\n\ + \ address = {Seattle, WA},\n author = {Robson, Dominic},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ date = {1-2 April, 2001},\n doi = {10.5281/zenodo.1176376},\n issn = {2220-4806},\n\ + \ pages = {51--53},\n title = {PLAY! : Sound Toys For the Non Musical},\n url\ + \ = {http://www.nime.org/proceedings/2001/nime2001_051.pdf},\n year = {2001}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672936 - editor: Marcelo Queiroz and Anna Xambó Sedó + date: '1-2 April, 2001' + doi: 10.5281/zenodo.1176376 issn: 2220-4806 - month: June - pages: 228--233 - publisher: UFRGS - title: 'Latin American NIMEs: Electronic Musical Instruments and Experimental Sound - Devices in the Twentieth Century' - url: http://www.nime.org/proceedings/2019/nime2019_paper044.pdf - year: 2019 + pages: 51--53 + title: 'PLAY! : Sound Toys For the Non Musical' + url: http://www.nime.org/proceedings/2001/nime2001_051.pdf + year: 2001 - ENTRYTYPE: inproceedings - ID: Reid2019 - abstract: 'This paper presents four years of development in performance and compositional - practice on an electronically augmented trumpet called MIGSI. Discussion is focused - on conceptual and technical approaches to data mapping, sonic interaction, and - composition that are inspired by philosophical questions of time: what is now? - Is time linear or multi-directional? Can we operate in multiple modes of temporal - perception simultaneously? A number of mapping strategies are presented which - explore these ideas through the manipulation of temporal separation between user - input and sonic output. In addition to presenting technical progress, this paper - will introduce a body of original repertoire composed for MIGSI, in order to illustrate - how these tools and approaches have been utilized in live performance and how - they may find use in other creative applications.' - address: 'Porto Alegre, Brazil' - author: Sarah Reid and Ryan Gaston and Ajay Kapur - bibtex: "@inproceedings{Reid2019,\n abstract = {This paper presents four years of\ - \ development in performance and compositional practice on an electronically augmented\ - \ trumpet called MIGSI. Discussion is focused on conceptual and technical approaches\ - \ to data mapping, sonic interaction, and composition that are inspired by philosophical\ - \ questions of time: what is now? Is time linear or multi-directional? Can we\ - \ operate in multiple modes of temporal perception simultaneously? A number of\ - \ mapping strategies are presented which explore these ideas through the manipulation\ - \ of temporal separation between user input and sonic output. In addition to presenting\ - \ technical progress, this paper will introduce a body of original repertoire\ - \ composed for MIGSI, in order to illustrate how these tools and approaches have\ - \ been utilized in live performance and how they may find use in other creative\ - \ applications.},\n address = {Porto Alegre, Brazil},\n author = {Sarah Reid and\ - \ Ryan Gaston and Ajay Kapur},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672940},\n\ - \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {234--239},\n publisher = {UFRGS},\n title = {Perspectives\ - \ on Time: performance practice, mapping strategies, \\& composition with {MIGSI}},\n\ - \ url = {http://www.nime.org/proceedings/2019/nime2019_paper045.pdf},\n year =\ - \ {2019}\n}\n" + ID: Ulyate2001 + abstract: 'In 1998 we designed enabling technology and a venue concept that allowed + several participants to influence a shared musical and visual experience. Our + primary goal was to deliver musically coherent and visually satisfying results + from several participants'' input. The result, the Interactive Dance Club, ran + for four nights at the ACM SIGGRAPH 98 convention in Orlando, Florida.In this + paper we will briefly describe the Interactive Dance Club, our "10 Commandments + of Interactivity", and what we learned from it''s premiere at SIGGRAPH 98.' + address: 'Seattle, WA' + author: 'Ulyate, Ryan and Bianciardi, David' + bibtex: "@inproceedings{Ulyate2001,\n abstract = {In 1998 we designed enabling technology\ + \ and a venue concept that allowed several participants to influence a shared\ + \ musical and visual experience. Our primary goal was to deliver musically coherent\ + \ and visually satisfying results from several participants' input. The result,\ + \ the Interactive Dance Club, ran for four nights at the ACM SIGGRAPH 98 convention\ + \ in Orlando, Florida.In this paper we will briefly describe the Interactive Dance\ + \ Club, our \"10 Commandments of Interactivity\", and what we learned from it's\ + \ premiere at SIGGRAPH 98.},\n address = {Seattle, WA},\n author = {Ulyate, Ryan\ + \ and Bianciardi, David},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n date = {1-2 April, 2001},\n doi\ + \ = {10.5281/zenodo.1176378},\n issn = {2220-4806},\n pages = {54--56},\n title\ + \ = {The Interactive Dance Club : Avoiding Chaos In A Multi Participant Environment},\n\ + \ url = {http://www.nime.org/proceedings/2001/nime2001_054.pdf},\n year = {2001}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672940 - editor: Marcelo Queiroz and Anna Xambó Sedó + date: '1-2 April, 2001' + doi: 10.5281/zenodo.1176378 issn: 2220-4806 - month: June - pages: 234--239 - publisher: UFRGS - title: 'Perspectives on Time: performance practice, mapping strategies, & composition - with MIGSI' - url: http://www.nime.org/proceedings/2019/nime2019_paper045.pdf - year: 2019 + pages: 54--56 + title: 'The Interactive Dance Club : Avoiding Chaos In A Multi Participant Environment' + url: http://www.nime.org/proceedings/2001/nime2001_054.pdf + year: 2001 - ENTRYTYPE: inproceedings - ID: Lamounier2019 - abstract: 'The present work explores the design of multimodal interfaces that capture - hand gestures and promote interactions between dance, music and wearable technologic - garment. We aim at studying the design strategies used to interface music to other - domains of the performance, in special, the application of wearable technologies - into music performances. The project describes the development of the music and - wearable interfaces, which comprise a hand interface and a mechanical actuator - attached to the dancer''s dress. The performance resulted from the study is inspired - in the butoh dances and attempts to add a technological poetic as music-dance-wearable - interactions to the traditional dialogue between dance and music. ' - address: 'Porto Alegre, Brazil' - author: Natacha Lamounier and Luiz Naveda and Adriana Bicalho - bibtex: "@inproceedings{Lamounier2019,\n abstract = {The present work explores the\ - \ design of multimodal interfaces that capture hand gestures and promote interactions\ - \ between dance, music and wearable technologic garment. We aim at studying the\ - \ design strategies used to interface music to other domains of the performance,\ - \ in special, the application of wearable technologies into music performances.\ - \ The project describes the development of the music and wearable interfaces,\ - \ which comprise a hand interface and a mechanical actuator attached to the dancer's\ - \ dress. The performance resulted from the study is inspired in the butoh dances\ - \ and attempts to add a technological poetic as music-dance-wearable interactions\ - \ to the traditional dialogue between dance and music. },\n address = {Porto Alegre,\ - \ Brazil},\n author = {Natacha Lamounier and Luiz Naveda and Adriana Bicalho},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.3672942},\n editor = {Marcelo Queiroz\ - \ and Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {240--245},\n\ - \ publisher = {UFRGS},\n title = {The design of technological interfaces for interactions\ - \ between music, dance and garment movements},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper046.pdf},\n\ - \ year = {2019}\n}\n" + ID: Brandtsegg2018 + abstract: 'This paper explores working methods and instrument design for musical + performance sessions (studio and live) where cross-adaptive techniques for audio + processing are utilized. Cross-adaptive processing uses feature extraction methods + and digital processing to allow the actions of one acoustic instrument to influence + the timbre of another. Even though the physical interface for the musician is + the familiar acoustic instrument, the musical dimensions controlled with the actions + on the instrument have been expanded radically. For this reason, and when used + in live performance, the cross-adaptive methods constitute new interfaces for + musical expression. Not only do the musician control his or her own instrumental + expression, but the instrumental actions directly influence the timbre of another + instrument in the ensemble, while their own instrument''s sound is modified by + the actions of other musicians. In the present paper we illustrate and discuss + some design issues relating to the configuration and composition of such tools + for different musical situations. Such configurations include among other things + the mapping of modulators, the choice of applied effects and processing methods.' + address: 'Blacksburg, Virginia, USA' + author: Oeyvind Brandtsegg and Trond Engum and Bernt Isak Wærstad + bibtex: "@inproceedings{Brandtsegg2018,\n abstract = {This paper explores working\ + \ methods and instrument design for musical performance sessions (studio and live)\ + \ where cross-adaptive techniques for audio processing are utilized. Cross-adaptive\ + \ processing uses feature extraction methods and digital processing to allow the\ + \ actions of one acoustic instrument to influence the timbre of another. Even\ + \ though the physical interface for the musician is the familiar acoustic instrument,\ + \ the musical dimensions controlled with the actions on the instrument have been\ + \ expanded radically. For this reason, and when used in live performance, the\ + \ cross-adaptive methods constitute new interfaces for musical expression. Not\ + \ only do the musician control his or her own instrumental expression, but the\ + \ instrumental actions directly influence the timbre of another instrument in\ + \ the ensemble, while their own instrument's sound is modified by the actions\ + \ of other musicians. In the present paper we illustrate and discuss some design\ + \ issues relating to the configuration and composition of such tools for different\ + \ musical situations. Such configurations include among other things the mapping\ + \ of modulators, the choice of applied effects and processing methods.},\n address\ + \ = {Blacksburg, Virginia, USA},\n author = {Oeyvind Brandtsegg and Trond Engum\ + \ and Bernt Isak Wærstad},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302649},\n\ + \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {1--6},\n publisher = {Virginia\ + \ Tech},\n title = {Working methods and instrument design for cross-adaptive sessions},\n\ + \ url = {http://www.nime.org/proceedings/2018/nime2018_paper0001.pdf},\n year\ + \ = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672942 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1302649 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 240--245 - publisher: UFRGS - title: 'The design of technological interfaces for interactions between music, dance - and garment movements' - url: http://www.nime.org/proceedings/2019/nime2019_paper046.pdf - year: 2019 + pages: 1--6 + publisher: Virginia Tech + title: Working methods and instrument design for cross-adaptive sessions + url: http://www.nime.org/proceedings/2018/nime2018_paper0001.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: AlarconDiaz2019 - abstract: 'INTIMAL is a physical virtual embodied system for relational listening - that integrates body movement, oral archives, and voice expression through telematic - improvisatory performance in migratory contexts. It has been informed by nine - Colombian migrant women who express their migratory journeys through free body - movement, voice and spoken word improvisation. These improvisations have been - recorded using Motion Capture, in order to develop interfaces for co-located and - telematic interactions for the sharing of narratives of migration. In this paper, - using data from the Motion Capture experiments, we are exploring two specific - movements from improvisers: displacements on space (walking, rotating), and breathing - data. Here we envision how co-relations between walking and breathing, might be - further studied to implement interfaces that help the making of connections between - place, and the feeling of presence for people in-between distant locations.' - address: 'Porto Alegre, Brazil' - author: Ximena Alarcon Diaz and Victor Evaristo Gonzalez Sanchez and Cagri Erdem - bibtex: "@inproceedings{AlarconDiaz2019,\n abstract = {INTIMAL is a physical virtual\ - \ embodied system for relational listening that integrates body movement, oral\ - \ archives, and voice expression through telematic improvisatory performance in\ - \ migratory contexts. It has been informed by nine Colombian migrant women who\ - \ express their migratory journeys through free body movement, voice and spoken\ - \ word improvisation. These improvisations have been recorded using Motion Capture,\ - \ in order to develop interfaces for co-located and telematic interactions for\ - \ the sharing of narratives of migration. In this paper, using data from the Motion\ - \ Capture experiments, we are exploring two specific movements from improvisers:\ - \ displacements on space (walking, rotating), and breathing data. Here we envision\ - \ how co-relations between walking and breathing, might be further studied to\ - \ implement interfaces that help the making of connections between place, and\ - \ the feeling of presence for people in-between distant locations.},\n address\ - \ = {Porto Alegre, Brazil},\n author = {Ximena Alarcon Diaz and Victor Evaristo\ - \ Gonzalez Sanchez and Cagri Erdem},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672944},\n\ - \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {246--249},\n publisher = {UFRGS},\n title = {{INTIMAL}:\ - \ Walking to Find Place, Breathing to Feel Presence},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper047.pdf},\n\ - \ year = {2019}\n}\n" + ID: Egozy2018 + abstract: "*12* is chamber music work composed with the goal of letting audience\ + \ members have an engaging, individualized, and influential role in live music\ + \ performance using their mobile phones as custom tailored musical instruments.\ + \ The goals of direct music making, meaningful communication, intuitive interfaces,\ + \ and technical transparency led to a design that purposefully limits the number\ + \ of participating audience members, balances the tradeoffs between interface\ + \ simplicity and control, and prioritizes the role of a graphics and animation\ + \ display system that is both functional and aesthetically integrated. Survey\ + \ results from the audience and stage musicians show a successful and engaging\ + \ experience, and also illuminate the path towards future improvements." + address: 'Blacksburg, Virginia, USA' + author: Eran Egozy and Eun Young Lee + bibtex: "@inproceedings{Egozy2018,\n abstract = {*12* is chamber music work composed\ + \ with the goal of letting audience members have an engaging, individualized,\ + \ and influential role in live music performance using their mobile phones as\ + \ custom tailored musical instruments. The goals of direct music making, meaningful\ + \ communication, intuitive interfaces, and technical transparency led to a design\ + \ that purposefully limits the number of participating audience members, balances\ + \ the tradeoffs between interface simplicity and control, and prioritizes the\ + \ role of a graphics and animation display system that is both functional and\ + \ aesthetically integrated. Survey results from the audience and stage musicians\ + \ show a successful and engaging experience, and also illuminate the path towards\ + \ future improvements.},\n address = {Blacksburg, Virginia, USA},\n author = {Eran\ + \ Egozy and Eun Young Lee},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302655},\n\ + \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {7--12},\n publisher = {Virginia\ + \ Tech},\n title = {*12*: Mobile Phone-Based Audience Participation in a Chamber\ + \ Music Performance},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0002.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672944 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1302655 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 246--249 - publisher: UFRGS - title: 'INTIMAL: Walking to Find Place, Breathing to Feel Presence' - url: http://www.nime.org/proceedings/2019/nime2019_paper047.pdf - year: 2019 + pages: 7--12 + publisher: Virginia Tech + title: "*12*: Mobile Phone-Based Audience Participation in a Chamber Music Performance" + url: http://www.nime.org/proceedings/2018/nime2018_paper0002.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: Sardana2019 - abstract: "Locus is a NIME designed specifically for an interactive, immersive high\ - \ density loudspeaker array environment. The system is based on a pointing mechanism\ - \ to interact with a sound scene comprising 128 speakers. Users can point anywhere\ - \ to interact with the system, and the spatial interaction utilizes motion capture,\ - \ so it does not require a screen. Instead, it is completely controlled via hand\ - \ gestures using a glove that is populated with motion-tracking markers.\n\nThe\ - \ main purpose of this system is to offer intuitive physical interaction with\ - \ the perimeter-based spatial sound sources. Further, its goal is to minimize\ - \ user-worn technology and thereby enhance freedom of motion by utilizing environmental\ - \ sensing devices, such as motion capture cameras or infrared sensors. The ensuing\ - \ creativity enabling technology is applicable to a broad array of possible scenarios,\ - \ from researching limits of human spatial hearing perception to facilitating\ - \ learning and artistic performances, including dance. In this paper, we describe\ - \ our NIME design and implementation, its preliminary assessment, and offer a\ - \ Unity-based toolkit to facilitate its broader deployment and adoption." - address: 'Porto Alegre, Brazil' - author: Disha Sardana and Woohun Joo and Ivica Ico Bukvic and Greg Earle - bibtex: "@inproceedings{Sardana2019,\n abstract = {Locus is a NIME designed specifically\ - \ for an interactive, immersive high density loudspeaker array environment. The\ - \ system is based on a pointing mechanism to interact with a sound scene comprising\ - \ 128 speakers. Users can point anywhere to interact with the system, and the\ - \ spatial interaction utilizes motion capture, so it does not require a screen.\ - \ Instead, it is completely controlled via hand gestures using a glove that is\ - \ populated with motion-tracking markers.\n\nThe main purpose of this system is\ - \ to offer intuitive physical interaction with the perimeter-based spatial sound\ - \ sources. Further, its goal is to minimize user-worn technology and thereby enhance\ - \ freedom of motion by utilizing environmental sensing devices, such as motion\ - \ capture cameras or infrared sensors. The ensuing creativity enabling technology\ - \ is applicable to a broad array of possible scenarios, from researching limits\ - \ of human spatial hearing perception to facilitating learning and artistic performances,\ - \ including dance. In this paper, we describe our NIME design and implementation,\ - \ its preliminary assessment, and offer a Unity-based toolkit to facilitate its\ - \ broader deployment and adoption.},\n address = {Porto Alegre, Brazil},\n author\ - \ = {Disha Sardana and Woohun Joo and Ivica Ico Bukvic and Greg Earle},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.3672946},\n editor = {Marcelo Queiroz and\ - \ Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {250--255},\n\ - \ publisher = {UFRGS},\n title = {Introducing Locus: a {NIME} for Immersive Exocentric\ - \ Aural Environments},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper048.pdf},\n\ - \ year = {2019}\n}\n" + ID: Lind2018 + abstract: 'The Max Maestro – an animated music notation system was developed to + enable the exploration of artistic possibilities for composition and performance + practices within the field of contemporary art music, more specifically, to enable + a large crowd of non-professional performers regardless of their musical background + to perform a fixed music compositions written in multiple individual parts. Furthermore, + the Max Maestro was developed to facilitate concert hall performances where non-professional + performers could be synchronised with an electronic music part. This paper presents + the background, the content and the artistic ideas with the Max Maestro system + and gives two examples of live concert hall performances where the Max Maestro + was used. An artistic research approach with an auto ethnographic method was adopted + for the study. This paper contributes with new knowledge to the field of animated + music notation.' + address: 'Blacksburg, Virginia, USA' + author: Anders Lind + bibtex: "@inproceedings{Lind2018,\n abstract = {The Max Maestro – an animated music\ + \ notation system was developed to enable the exploration of artistic possibilities\ + \ for composition and performance practices within the field of contemporary art\ + \ music, more specifically, to enable a large crowd of non-professional performers\ + \ regardless of their musical background to perform a fixed music compositions\ + \ written in multiple individual parts. Furthermore, the Max Maestro was developed\ + \ to facilitate concert hall performances where non-professional performers could\ + \ be synchronised with an electronic music part. This paper presents the background,\ + \ the content and the artistic ideas with the Max Maestro system and gives two\ + \ examples of live concert hall performances where the Max Maestro was used. An\ + \ artistic research approach with an auto ethnographic method was adopted for\ + \ the study. This paper contributes with new knowledge to the field of animated\ + \ music notation.},\n address = {Blacksburg, Virginia, USA},\n author = {Anders\ + \ Lind},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1302657},\n editor = {Luke\ + \ Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn =\ + \ {2220-4806},\n month = {June},\n pages = {13--18},\n publisher = {Virginia Tech},\n\ + \ title = {Animated Notation in Multiple Parts for Crowd of Non-professional Performers},\n\ + \ url = {http://www.nime.org/proceedings/2018/nime2018_paper0003.pdf},\n year\ + \ = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672946 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1302657 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 250--255 - publisher: UFRGS - title: 'Introducing Locus: a NIME for Immersive Exocentric Aural Environments' - url: http://www.nime.org/proceedings/2019/nime2019_paper048.pdf - year: 2019 + pages: 13--18 + publisher: Virginia Tech + title: Animated Notation in Multiple Parts for Crowd of Non-professional Performers + url: http://www.nime.org/proceedings/2018/nime2018_paper0003.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: Ho2019 - abstract: 'This paper presents an ongoing process of examining and reinventing the - Guqin, to forge a contemporary engagement with this unique traditional Chinese - string instrument. The SlowQin is both a hybrid resemblance of the Guqin and a - fully functioning wireless interface to interact with computer software. It has - been developed and performed during the last decade. Instead of aiming for virtuosic - perfection of playing the instrument, SlowQin emphasizes the openness for continuously - rethinking and reinventing the Guqin''s possibilities. Through a combination of - conceptual work and practical production, Echo Ho''s SlowQin project works as - an experimental twist on Historically Informed Performance, with the motivation - of conveying artistic gestures that tackle philosophical, ideological, and socio-political - subjects embedded in our living environment in globalised conditions. In particular, - this paper touches the history of the Guqin, gives an overview of the technical - design concepts of the instrument, and discusses the aesthetical approaches of - the SlowQin performances that have been realised so far.' - address: 'Porto Alegre, Brazil' - author: Echo Ho and Prof. Dr. Phil. Alberto de Campo and Hannes Hoelzl - bibtex: "@inproceedings{Ho2019,\n abstract = {This paper presents an ongoing process\ - \ of examining and reinventing the Guqin, to forge a contemporary engagement with\ - \ this unique traditional Chinese string instrument. The SlowQin is both a hybrid\ - \ resemblance of the Guqin and a fully functioning wireless interface to interact\ - \ with computer software. It has been developed and performed during the last\ - \ decade. Instead of aiming for virtuosic perfection of playing the instrument,\ - \ SlowQin emphasizes the openness for continuously rethinking and reinventing\ - \ the Guqin's possibilities. Through a combination of conceptual work and practical\ - \ production, Echo Ho's SlowQin project works as an experimental twist on Historically\ - \ Informed Performance, with the motivation of conveying artistic gestures that\ - \ tackle philosophical, ideological, and socio-political subjects embedded in\ - \ our living environment in globalised conditions. In particular, this paper touches\ - \ the history of the Guqin, gives an overview of the technical design concepts\ - \ of the instrument, and discusses the aesthetical approaches of the SlowQin performances\ - \ that have been realised so far.},\n address = {Porto Alegre, Brazil},\n author\ - \ = {Echo Ho and Prof. Dr. Phil. Alberto de Campo and Hannes Hoelzl},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.3672948},\n editor = {Marcelo Queiroz and\ - \ Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {256--259},\n\ - \ publisher = {UFRGS},\n title = {The SlowQin: An Interdisciplinary Approach to\ - \ reinventing the Guqin},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper049.pdf},\n\ - \ year = {2019}\n}\n" + ID: Brown2018 + abstract: 'Musebots are autonomous musical agents that interact with other musebots + to produce music. Inaugurated in 2015, musebots are now an established practice + in the field of musical metacreation, which aims to automate aspects of creative + practice. Originally musebot development focused on software-only ensembles of + musical agents, coded by a community of developers. More recent experiments have + explored humans interfacing with musebot ensembles in various ways: including + through electronic interfaces in which parametric control of high-level musebot + parameters are used; message-based interfaces which allow human users to communicate + with musebots in their own language; and interfaces through which musebots have + jammed with human musicians. Here we report on the recent developments of human + interaction with musebot ensembles and reflect on some of the implications of + these developments for the design of metacreative music systems.' + address: 'Blacksburg, Virginia, USA' + author: Andrew R. Brown and Matthew Horrigan and Arne Eigenfeldt and Toby Gifford + and Daniel Field and Jon McCormack + bibtex: "@inproceedings{Brown2018,\n abstract = {Musebots are autonomous musical\ + \ agents that interact with other musebots to produce music. Inaugurated in 2015,\ + \ musebots are now an established practice in the field of musical metacreation,\ + \ which aims to automate aspects of creative practice. Originally musebot development\ + \ focused on software-only ensembles of musical agents, coded by a community of\ + \ developers. More recent experiments have explored humans interfacing with musebot\ + \ ensembles in various ways: including through electronic interfaces in which\ + \ parametric control of high-level musebot parameters are used; message-based\ + \ interfaces which allow human users to communicate with musebots in their own\ + \ language; and interfaces through which musebots have jammed with human musicians.\ + \ Here we report on the recent developments of human interaction with musebot\ + \ ensembles and reflect on some of the implications of these developments for\ + \ the design of metacreative music systems.},\n address = {Blacksburg, Virginia,\ + \ USA},\n author = {Andrew R. Brown and Matthew Horrigan and Arne Eigenfeldt and\ + \ Toby Gifford and Daniel Field and Jon McCormack},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1302659},\n editor = {Luke Dahl, Douglas Bowman, Thomas\ + \ Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n\ + \ pages = {19--24},\n publisher = {Virginia Tech},\n title = {Interacting with\ + \ Musebots},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0004.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672948 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1302659 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 256--259 - publisher: UFRGS - title: 'The SlowQin: An Interdisciplinary Approach to reinventing the Guqin' - url: http://www.nime.org/proceedings/2019/nime2019_paper049.pdf - year: 2019 + pages: 19--24 + publisher: Virginia Tech + title: Interacting with Musebots + url: http://www.nime.org/proceedings/2018/nime2018_paper0004.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: Martin2019 - abstract: 'This paper is about creating digital musical instruments where a predictive - neural network model is integrated into the interactive system. Rather than predicting - symbolic music (e.g., MIDI notes), we suggest that predicting future control data - from the user and precise temporal information can lead to new and interesting - interactive possibilities. We propose that a mixture density recurrent neural - network (MDRNN) is an appropriate model for this task. The predictions can be - used to fill-in control data when the user stops performing, or as a kind of filter - on the user''s own input. We present an interactive MDRNN prediction server that - allows rapid prototyping of new NIMEs featuring predictive musical interaction - by recording datasets, training MDRNN models, and experimenting with interaction - modes. We illustrate our system with several example NIMEs applying this idea. - Our evaluation shows that real-time predictive interaction is viable even on single-board - computers and that small models are appropriate for small datasets.' - address: 'Porto Alegre, Brazil' - author: Charles Patrick Martin and Jim Torresen - bibtex: "@inproceedings{Martin2019,\n abstract = {This paper is about creating digital\ - \ musical instruments where a predictive neural network model is integrated into\ - \ the interactive system. Rather than predicting symbolic music (e.g., MIDI notes),\ - \ we suggest that predicting future control data from the user and precise temporal\ - \ information can lead to new and interesting interactive possibilities. We propose\ - \ that a mixture density recurrent neural network (MDRNN) is an appropriate model\ - \ for this task. The predictions can be used to fill-in control data when the\ - \ user stops performing, or as a kind of filter on the user's own input. We present\ - \ an interactive MDRNN prediction server that allows rapid prototyping of new\ - \ NIMEs featuring predictive musical interaction by recording datasets, training\ - \ MDRNN models, and experimenting with interaction modes. We illustrate our system\ - \ with several example NIMEs applying this idea. Our evaluation shows that real-time\ - \ predictive interaction is viable even on single-board computers and that small\ - \ models are appropriate for small datasets.},\n address = {Porto Alegre, Brazil},\n\ - \ author = {Charles Patrick Martin and Jim Torresen},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.3672952},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {260--265},\n publisher = {UFRGS},\n\ - \ title = {An Interactive Musical Prediction System with Mixture Density Recurrent\ - \ Neural Networks},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper050.pdf},\n\ - \ year = {2019}\n}\n" + ID: Kiefer2018 + abstract: 'We investigate how audio augmented reality can engender new collective + modes of musical expression in the context of a sound art installation, ''Listening + Mirrors'', exploring the creation of interactive sound environments for musicians + and non-musicians alike. ''Listening Mirrors'' is designed to incorporate physical + objects and computational systems for altering the acoustic environment, to enhance + collective listening and challenge traditional musician-instrument performance. + At a formative stage in exploring audio AR technology, we conducted an audience + experience study investigating questions around the potential of audio AR in creating + sound installation environments for collective musical expression. We collected + interview evidence about the participants'' experience and analysed the data with + using a grounded theory approach. The results demonstrated that the technology + has the potential to create immersive spaces where an audience can feel safe to + experiment musically, and showed how AR can intervene in sound perception to instrumentalise + an environment. The results also revealed caveats about the use of audio AR, + mainly centred on social inhibition and seamlessness of experience, and finding + a balance between mediated worlds so that there is space for interplay between + the two.' + address: 'Blacksburg, Virginia, USA' + author: Chris Kiefer and Cecile Chevalier + bibtex: "@inproceedings{Kiefer2018,\n abstract = {We investigate how audio augmented\ + \ reality can engender new collective modes of musical expression in the context\ + \ of a sound art installation, 'Listening Mirrors', exploring the creation of\ + \ interactive sound environments for musicians and non-musicians alike. 'Listening\ + \ Mirrors' is designed to incorporate physical objects and computational systems\ + \ for altering the acoustic environment, to enhance collective listening and challenge\ + \ traditional musician-instrument performance. At a formative stage in exploring\ + \ audio AR technology, we conducted an audience experience study investigating\ + \ questions around the potential of audio AR in creating sound installation environments\ + \ for collective musical expression. We collected interview evidence about the\ + \ participants' experience and analysed the data with using a grounded theory\ + \ approach. The results demonstrated that the technology has the potential to\ + \ create immersive spaces where an audience can feel safe to experiment musically,\ + \ and showed how AR can intervene in sound perception to instrumentalise an environment.\ + \ The results also revealed caveats about the use of audio AR, mainly centred\ + \ on social inhibition and seamlessness of experience, and finding a balance between\ + \ mediated worlds so that there is space for interplay between the two.},\n address\ + \ = {Blacksburg, Virginia, USA},\n author = {Chris Kiefer and Cecile Chevalier},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1302661},\n editor = {Luke Dahl,\ + \ Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {25--28},\n publisher = {Virginia Tech},\n title =\ + \ {Towards New Modes of Collective Musical Expression through Audio Augmented\ + \ Reality},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0005.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672952 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1302661 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 260--265 - publisher: UFRGS - title: An Interactive Musical Prediction System with Mixture Density Recurrent Neural - Networks - url: http://www.nime.org/proceedings/2019/nime2019_paper050.pdf - year: 2019 + pages: 25--28 + publisher: Virginia Tech + title: Towards New Modes of Collective Musical Expression through Audio Augmented + Reality + url: http://www.nime.org/proceedings/2018/nime2018_paper0005.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: Bazoge2019 - abstract: 'The paper presents the electronic music performance project Vis Insita - implementing the design of experimental instrumental interfaces based on optical - motion capture technology with passive infrared markers (MoCap), and the analysis - of their use in a real scenic presentation context. Because of MoCap''s predisposition - to capture the movements of the body, a lot of research and musical applications - in the performing arts concern dance or the sonification of gesture. For our research, - we wanted to move away from the capture of the human body to analyse the possibilities - of a kinetic object handled by a performer, both in terms of musical expression, - but also in the broader context of a multimodal scenic interpretation.' - address: 'Porto Alegre, Brazil' - author: Nicolas Bazoge and Ronan Gaugne and Florian Nouviale and Valérie Gouranton - and Bruno Bossis - bibtex: "@inproceedings{Bazoge2019,\n abstract = {The paper presents the electronic\ - \ music performance project Vis Insita implementing the design of experimental\ - \ instrumental interfaces based on optical motion capture technology with passive\ - \ infrared markers (MoCap), and the analysis of their use in a real scenic presentation\ - \ context. Because of MoCap's predisposition to capture the movements of the body,\ - \ a lot of research and musical applications in the performing arts concern dance\ - \ or the sonification of gesture. For our research, we wanted to move away from\ - \ the capture of the human body to analyse the possibilities of a kinetic object\ - \ handled by a performer, both in terms of musical expression, but also in the\ - \ broader context of a multimodal scenic interpretation.},\n address = {Porto\ - \ Alegre, Brazil},\n author = {Nicolas Bazoge and Ronan Gaugne and Florian Nouviale\ - \ and Valérie Gouranton and Bruno Bossis},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672954},\n\ - \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {266--271},\n publisher = {UFRGS},\n title = {Expressive\ - \ potentials of motion capture in musical performance},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper051.pdf},\n\ - \ year = {2019}\n}\n" + ID: Matsuura2018 + abstract: 'Aphysical Unmodeling Instrument is the title of a sound installation + that re-physicalizes the Whirlwind meta-wind-instrument physical model. We re-implemented + the Whirlwind by using real-world physical objects to comprise a sound installation. + The sound propagation between a speaker and microphone was used as the delay, + and a paper cylinder was employed as the resonator. This paper explains the concept + and implementation of this work at the 2017 HANARART exhibition. We examine the + characteristics of the work, address its limitations, and discuss the possibility + of its interpretation by means of a “re-physicalization.”' + address: 'Blacksburg, Virginia, USA' + author: Tomoya Matsuura and kazuhiro jo + bibtex: "@inproceedings{Matsuura2018,\n abstract = {Aphysical Unmodeling Instrument\ + \ is the title of a sound installation that re-physicalizes the Whirlwind meta-wind-instrument\ + \ physical model. We re-implemented the Whirlwind by using real-world physical\ + \ objects to comprise a sound installation. The sound propagation between a speaker\ + \ and microphone was used as the delay, and a paper cylinder was employed as the\ + \ resonator. This paper explains the concept and implementation of this work at\ + \ the 2017 HANARART exhibition. We examine the characteristics of the work, address\ + \ its limitations, and discuss the possibility of its interpretation by means\ + \ of a “re-physicalization.”},\n address = {Blacksburg, Virginia, USA},\n author\ + \ = {Tomoya Matsuura and kazuhiro jo},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302663},\n\ + \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {29--30},\n publisher = {Virginia\ + \ Tech},\n title = {Aphysical Unmodeling Instrument: Sound Installation that Re-Physicalizes\ + \ a Meta-Wind-Instrument Physical Model, Whirlwind},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0006.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672954 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1302663 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 266--271 - publisher: UFRGS - title: Expressive potentials of motion capture in musical performance - url: http://www.nime.org/proceedings/2019/nime2019_paper051.pdf - year: 2019 + pages: 29--30 + publisher: Virginia Tech + title: 'Aphysical Unmodeling Instrument: Sound Installation that Re-Physicalizes + a Meta-Wind-Instrument Physical Model, Whirlwind' + url: http://www.nime.org/proceedings/2018/nime2018_paper0006.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: VanTroyer2019 - abstract: 'This paper explores the potential of image-to-image translation techniques - in aiding the design of new hardware-based musical interfaces such as MIDI keyboard, - grid-based controller, drum machine, and analog modular synthesizers. We collected - an extensive image database of such interfaces and implemented image-to-image - translation techniques using variants of Generative Adversarial Networks. The - created models learn the mapping between input and output images using a training - set of either paired or unpaired images. We qualitatively assess the visual outcomes - based on three image-to-image translation models: reconstructing interfaces from - edge maps, and collection style transfers based on two image sets: visuals of - mosaic tile patterns and geometric abstract two-dimensional arts. This paper aims - to demonstrate that synthesizing interface layouts based on image-to-image translation - techniques can yield insights for researchers, musicians, music technology industrial - designers, and the broader NIME community.' - address: 'Porto Alegre, Brazil' - author: Akito Van Troyer and Rebecca Kleinberger - bibtex: "@inproceedings{VanTroyer2019,\n abstract = {This paper explores the potential\ - \ of image-to-image translation techniques in aiding the design of new hardware-based\ - \ musical interfaces such as MIDI keyboard, grid-based controller, drum machine,\ - \ and analog modular synthesizers. We collected an extensive image database of\ - \ such interfaces and implemented image-to-image translation techniques using\ - \ variants of Generative Adversarial Networks. The created models learn the mapping\ - \ between input and output images using a training set of either paired or unpaired\ - \ images. We qualitatively assess the visual outcomes based on three image-to-image\ - \ translation models: reconstructing interfaces from edge maps, and collection\ - \ style transfers based on two image sets: visuals of mosaic tile patterns and\ - \ geometric abstract two-dimensional arts. This paper aims to demonstrate that\ - \ synthesizing interface layouts based on image-to-image translation techniques\ - \ can yield insights for researchers, musicians, music technology industrial designers,\ - \ and the broader NIME community.},\n address = {Porto Alegre, Brazil},\n author\ - \ = {Akito Van Troyer and Rebecca Kleinberger},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.3672956},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {272--277},\n publisher = {UFRGS},\n\ - \ title = {From Mondrian to Modular Synth: Rendering {NIME} using Generative Adversarial\ - \ Networks},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper052.pdf},\n\ - \ year = {2019}\n}\n" + ID: Holbrook2018 + abstract: Many common and popular sound spatialisation techniques and methods rely + on listeners being positioned in a "sweet-spot" for an optimal listening position + in a circle of speakers. This paper discusses a stochastic spatialisation method + and its first iteration as implemented for the exhibition Hot Pocket at The Museum + of Contemporary Art in Oslo in 2017. This method is implemented in Max and offers + a matrix-based amplitude panning methodology which can provide a flexible means + for the spatialialisation of sounds. + address: 'Blacksburg, Virginia, USA' + author: Ulf A. S. Holbrook + bibtex: "@inproceedings{Holbrook2018,\n abstract = {Many common and popular sound\ + \ spatialisation techniques and methods rely on listeners being positioned in\ + \ a \"sweet-spot\" for an optimal listening position in a circle of speakers.\ + \ This paper discusses a stochastic spatialisation method and its first iteration\ + \ as implemented for the exhibition Hot Pocket at The Museum of Contemporary Art\ + \ in Oslo in 2017. This method is implemented in Max and offers a matrix-based\ + \ amplitude panning methodology which can provide a flexible means for the spatialialisation\ + \ of sounds.},\n address = {Blacksburg, Virginia, USA},\n author = {Ulf A. S.\ + \ Holbrook},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302665},\n editor\ + \ = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {31--32},\n publisher = {Virginia\ + \ Tech},\n title = {An approach to stochastic spatialization --- A case of Hot\ + \ Pocket},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0007.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672956 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1302665 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 272--277 - publisher: UFRGS - title: 'From Mondrian to Modular Synth: Rendering NIME using Generative Adversarial - Networks' - url: http://www.nime.org/proceedings/2019/nime2019_paper052.pdf - year: 2019 + pages: 31--32 + publisher: Virginia Tech + title: An approach to stochastic spatialization --- A case of Hot Pocket + url: http://www.nime.org/proceedings/2018/nime2018_paper0007.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: Pardue2019 - abstract: 'When designing an augmented acoustic instrument, it is often of interest - to retain an instrument''s sound quality and nuanced response while leveraging - the richness of digital synthesis. Digital audio has traditionally been generated - through speakers, separating sound generation from the instrument itself, or by - adding an actuator within the instrument''s resonating body, imparting new sounds - along with the original. We offer a third option, isolating the playing interface - from the actuated resonating body, allowing us to rewrite the relationship between - performance action and sound result while retaining the general form and feel - of the acoustic instrument. We present a hybrid acoustic-electronic violin based - on a stick-body electric violin and an electrodynamic polyphonic pick-up capturing - individual string displacements. A conventional violin body acts as the resonator, - actuated using digitally altered audio of the string inputs. By attaching the - electric violin above the body with acoustic isolation, we retain the physical - playing experience of a normal violin along with some of the acoustic filtering - and radiation of a traditional build. We propose the use of the hybrid instrument - with digitally automated pitch and tone correction to make an easy violin for - use as a potential motivational tool for beginning violinists.' - address: 'Porto Alegre, Brazil' - author: Laurel Pardue and Kurijn Buys and Dan Overholt and Andrew P. McPherson and - Michael Edinger - bibtex: "@inproceedings{Pardue2019,\n abstract = {When designing an augmented acoustic\ - \ instrument, it is often of interest to retain an instrument's sound quality\ - \ and nuanced response while leveraging the richness of digital synthesis. Digital\ - \ audio has traditionally been generated through speakers, separating sound generation\ - \ from the instrument itself, or by adding an actuator within the instrument's\ - \ resonating body, imparting new sounds along with the original. We offer a third\ - \ option, isolating the playing interface from the actuated resonating body, allowing\ - \ us to rewrite the relationship between performance action and sound result while\ - \ retaining the general form and feel of the acoustic instrument. We present\ - \ a hybrid acoustic-electronic violin based on a stick-body electric violin and\ - \ an electrodynamic polyphonic pick-up capturing individual string displacements.\ - \ A conventional violin body acts as the resonator, actuated using digitally\ - \ altered audio of the string inputs. By attaching the electric violin above\ - \ the body with acoustic isolation, we retain the physical playing experience\ - \ of a normal violin along with some of the acoustic filtering and radiation of\ - \ a traditional build. We propose the use of the hybrid instrument with digitally\ - \ automated pitch and tone correction to make an easy violin for use as a potential\ - \ motivational tool for beginning violinists.},\n address = {Porto Alegre, Brazil},\n\ - \ author = {Laurel Pardue and Kurijn Buys and Dan Overholt and Andrew P. McPherson\ - \ and Michael Edinger},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672958},\n\ - \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {278--283},\n publisher = {UFRGS},\n title = {Separating\ - \ sound from source: sonic transformation of the violin through electrodynamic\ - \ pickups and acoustic actuation},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper053.pdf},\n\ - \ year = {2019}\n}\n" + ID: Champion2018 + abstract: 'AM MODE is a custom-designed software interface for electronic augmentation + of the acoustic drum set. The software is used in the development a series of + recordings, similarly titled as AM MODE. Programmed in Max/MSP, the software uses + live audio input from individual instruments within the drum set as control parameters + for modulation synthesis. By using a combination of microphones and MIDI triggers, + audio signal features such as the velocity of the strike of the drum, or the frequency + at which the drum resonates, are tracked, interpolated, and scaled to user specifications. + The resulting series of recordings is comprised of the digitally generated output + of the modulation engine, in addition to both raw and modulated signals from the + acoustic drum set. In this way, this project explores drum set augmentation not + only at the input and from a performative angle, but also at the output, where + the acoustic and the synthesized elements are merged into each other, forming + a sonic hybrid. ' + address: 'Blacksburg, Virginia, USA' + author: Cory Champion and Mo H Zareei + bibtex: "@inproceedings{Champion2018,\n abstract = {AM MODE is a custom-designed\ + \ software interface for electronic augmentation of the acoustic drum set. The\ + \ software is used in the development a series of recordings, similarly titled\ + \ as AM MODE. Programmed in Max/MSP, the software uses live audio input from individual\ + \ instruments within the drum set as control parameters for modulation synthesis.\ + \ By using a combination of microphones and MIDI triggers, audio signal features\ + \ such as the velocity of the strike of the drum, or the frequency at which the\ + \ drum resonates, are tracked, interpolated, and scaled to user specifications.\ + \ The resulting series of recordings is comprised of the digitally generated output\ + \ of the modulation engine, in addition to both raw and modulated signals from\ + \ the acoustic drum set. In this way, this project explores drum set augmentation\ + \ not only at the input and from a performative angle, but also at the output,\ + \ where the acoustic and the synthesized elements are merged into each other,\ + \ forming a sonic hybrid. },\n address = {Blacksburg, Virginia, USA},\n author\ + \ = {Cory Champion and Mo H Zareei},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302667},\n\ + \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {33--34},\n publisher = {Virginia\ + \ Tech},\n title = {AM MODE: Using AM and FM Synthesis for Acoustic Drum Set Augmentation},\n\ + \ url = {http://www.nime.org/proceedings/2018/nime2018_paper0008.pdf},\n year\ + \ = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672958 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1302667 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 278--283 - publisher: UFRGS - title: 'Separating sound from source: sonic transformation of the violin through - electrodynamic pickups and acoustic actuation' - url: http://www.nime.org/proceedings/2019/nime2019_paper053.pdf - year: 2019 + pages: 33--34 + publisher: Virginia Tech + title: 'AM MODE: Using AM and FM Synthesis for Acoustic Drum Set Augmentation' + url: http://www.nime.org/proceedings/2018/nime2018_paper0008.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: Advincula2019 - abstract: 'This paper introduces the Grain Prism, a hybrid of a granular synthesizer - and sampler that, through a capacitive sensing interface presented in obscure - glyphs, invites users to create experimental sound textures with their own recorded - voice. The capacitive sensing system, activated through skin contact over single - glyphs or a combination of them, instigates the user to decipher the hidden sonic - messages. The mysterious interface open space to aleatoricism in the act of conjuring - sound, and therefore new discoveries. The users, when forced to abandon preconceived - ways of playing a synthesizer, look at themselves in a different light, as their - voice is the source material.' - address: 'Porto Alegre, Brazil' - author: Gabriela Bila Advincula and Don Derek Haddad and Kent Larson - bibtex: "@inproceedings{Advincula2019,\n abstract = {This paper introduces the Grain\ - \ Prism, a hybrid of a granular synthesizer and sampler that, through a capacitive\ - \ sensing interface presented in obscure glyphs, invites users to create experimental\ - \ sound textures with their own recorded voice. The capacitive sensing system,\ - \ activated through skin contact over single glyphs or a combination of them,\ - \ instigates the user to decipher the hidden sonic messages. The mysterious interface\ - \ open space to aleatoricism in the act of conjuring sound, and therefore new\ - \ discoveries. The users, when forced to abandon preconceived ways of playing\ - \ a synthesizer, look at themselves in a different light, as their voice is the\ - \ source material.},\n address = {Porto Alegre, Brazil},\n author = {Gabriela\ - \ Bila Advincula and Don Derek Haddad and Kent Larson},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.3672960},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {284--285},\n publisher = {UFRGS},\n\ - \ title = {Grain Prism: Hieroglyphic Interface for Granular Sampling},\n url =\ - \ {http://www.nime.org/proceedings/2019/nime2019_paper054.pdf},\n year = {2019}\n\ - }\n" + ID: Haddad2018 + abstract: 'This paper introduces the Kinesynth, a hybrid kinesthetic synthesizer + that uses the human body as both an analog mixer and as a modulator using a combination + of capacitive sensing in "transmit" mode and skin conductance. This is achieved + when the body, through the skin, relays signals from control & audio sources to + the inputs of the instrument. These signals can be harnessed from the environment, + from within the Kinesynth''s internal synthesizer, or from external instrument, + making the Kinesynth a mediator between the body and the environment.' + address: 'Blacksburg, Virginia, USA' + author: Don Derek Haddad and Joe Paradiso + bibtex: "@inproceedings{Haddad2018,\n abstract = {This paper introduces the Kinesynth,\ + \ a hybrid kinesthetic synthesizer that uses the human body as both an analog\ + \ mixer and as a modulator using a combination of capacitive sensing in \"transmit\"\ + \ mode and skin conductance. This is achieved when the body, through the skin,\ + \ relays signals from control & audio sources to the inputs of the instrument.\ + \ These signals can be harnessed from the environment, from within the Kinesynth's\ + \ internal synthesizer, or from external instrument, making the Kinesynth a mediator\ + \ between the body and the environment.},\n address = {Blacksburg, Virginia, USA},\n\ + \ author = {Don Derek Haddad and Joe Paradiso},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1302669},\n editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n\ + \ isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {35--36},\n publisher = {Virginia Tech},\n title = {Kinesynth: Patching, Modulating,\ + \ and Mixing a Hybrid Kinesthetic Synthesizer.},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0009.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672960 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1302669 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 284--285 - publisher: UFRGS - title: 'Grain Prism: Hieroglyphic Interface for Granular Sampling' - url: http://www.nime.org/proceedings/2019/nime2019_paper054.pdf - year: 2019 + pages: 35--36 + publisher: Virginia Tech + title: 'Kinesynth: Patching, Modulating, and Mixing a Hybrid Kinesthetic Synthesizer.' + url: http://www.nime.org/proceedings/2018/nime2018_paper0009.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: Bown2019 - abstract: 'We present an audio-focused creative coding toolkit for deploying music - programs to remote networked devices. It is designed to support efficient creative - exploratory search in the context of the Internet of Things (IoT), where one or - more devices must be configured, programmed and interact over a network, with - applications in digital musical instruments, networked music performance and other - digital experiences. Users can easily monitor and hack what multiple devices are - doing on the fly, enhancing their ability to perform ``exploratory search'''' - in a creative workflow. We present two creative case studies using the system: - the creation of a dance performance and the creation of a distributed musical - installation. Analysing different activities within the production process, with - a particular focus on the trade-off between more creative exploratory tasks and - more standard configuring and problem-solving tasks, we show how the system supports - creative exploratory search for multiple networked devices. ' - address: 'Porto Alegre, Brazil' - author: Oliver Bown and Angelo Fraietta and Sam Ferguson and Lian Loke and Liam - Bray - bibtex: "@inproceedings{Bown2019,\n abstract = {We present an audio-focused creative\ - \ coding toolkit for deploying music programs to remote networked devices. It\ - \ is designed to support efficient creative exploratory search in the context\ - \ of the Internet of Things (IoT), where one or more devices must be configured,\ - \ programmed and interact over a network, with applications in digital musical\ - \ instruments, networked music performance and other digital experiences. Users\ - \ can easily monitor and hack what multiple devices are doing on the fly, enhancing\ - \ their ability to perform ``exploratory search'' in a creative workflow. We present\ - \ two creative case studies using the system: the creation of a dance performance\ - \ and the creation of a distributed musical installation. Analysing different\ - \ activities within the production process, with a particular focus on the trade-off\ - \ between more creative exploratory tasks and more standard configuring and problem-solving\ - \ tasks, we show how the system supports creative exploratory search for multiple\ - \ networked devices. },\n address = {Porto Alegre, Brazil},\n author = {Oliver\ - \ Bown and Angelo Fraietta and Sam Ferguson and Lian Loke and Liam Bray},\n booktitle\ + ID: Marogna2018 + abstract: 'CABOTO is an interactive system for live performance and composition. + A graphic score sketched on paper is read by a computer vision system. The graphic + elements are scanned following a symbolic-raw hybrid approach, that is, they are + recognised and classified according to their shapes but also scanned as waveforms + and optical signals. All this information is mapped into the synthesis engine, + which implements different kind of synthesis techniques for different shapes. + In CABOTO the score is viewed as a cartographic map explored by some navigators. + These navigators traverse the score in a semi-autonomous way, scanning the graphic + elements found along their paths. The system tries to challenge the boundaries + between the concepts of composition, score, performance, instrument, since the + musical result will depend both on the composed score and the way the navigators + will traverse it during the live performance. ' + address: 'Blacksburg, Virginia, USA' + author: Riccardo Marogna + bibtex: "@inproceedings{Marogna2018,\n abstract = {CABOTO is an interactive system\ + \ for live performance and composition. A graphic score sketched on paper is read\ + \ by a computer vision system. The graphic elements are scanned following a symbolic-raw\ + \ hybrid approach, that is, they are recognised and classified according to their\ + \ shapes but also scanned as waveforms and optical signals. All this information\ + \ is mapped into the synthesis engine, which implements different kind of synthesis\ + \ techniques for different shapes. In CABOTO the score is viewed as a cartographic\ + \ map explored by some navigators. These navigators traverse the score in a semi-autonomous\ + \ way, scanning the graphic elements found along their paths. The system tries\ + \ to challenge the boundaries between the concepts of composition, score, performance,\ + \ instrument, since the musical result will depend both on the composed score\ + \ and the way the navigators will traverse it during the live performance. },\n\ + \ address = {Blacksburg, Virginia, USA},\n author = {Riccardo Marogna},\n booktitle\ \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.3672962},\n editor = {Marcelo Queiroz and\ - \ Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {286--291},\n\ - \ publisher = {UFRGS},\n title = {Facilitating Creative Exploratory Search with\ - \ Multiple Networked Audio Devices Using HappyBrackets},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper055.pdf},\n\ - \ year = {2019}\n}\n" + \ Expression},\n doi = {10.5281/zenodo.1302671},\n editor = {Luke Dahl, Douglas\ + \ Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {37--42},\n publisher = {Virginia Tech},\n title =\ + \ {CABOTO: A Graphic-Based Interactive System for Composing and Performing Electronic\ + \ Music},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0010.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672962 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1302671 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 286--291 - publisher: UFRGS - title: Facilitating Creative Exploratory Search with Multiple Networked Audio Devices - Using HappyBrackets - url: http://www.nime.org/proceedings/2019/nime2019_paper055.pdf - year: 2019 + pages: 37--42 + publisher: Virginia Tech + title: 'CABOTO: A Graphic-Based Interactive System for Composing and Performing + Electronic Music' + url: http://www.nime.org/proceedings/2018/nime2018_paper0010.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: FernandesSantos2019 - abstract: 'During the musical performance, expert musicians consciously manipulate - acoustical parameters expressing their interpretative choices. Also, players make - physical motions and, in many cases, these gestures are related to the musicians'' - artistic intentions. However, it''s not clear if the sound manipulation reflects - in physical motions. The understanding of the musical structure of the work being - performed in its many levels may impact the projection of artistic intentions, - and performers alter it in micro and macro-sections, such as in musical motifs, - phrases and sessions. Therefore, this paper investigates the timing manipulation - and how such variations may reflect in physical gestures. The study involved musicians - (flute, clarinet, and bassoon players) performing a unison excerpt by G. Rossini. - We analyzed the relationship between timing variation (the Inter Onsets Interval - deviations) and physical motion based on the traveled distance of the flute under - different conditions. The flutists were asked to play the musical excerpt in three - experimental conditions: (1) playing solo and playing in duets with previous recordings - by other instrumentalists, (2) clarinetist and (3) bassoonist. The finding suggests - that: 1) the movements, which seem to be related to the sense of pulse, are recurrent - and stable, 2) the timing variability in micro or macro sections reflects in gestures'' - amplitude performed by flutists.' - address: 'Porto Alegre, Brazil' - author: Thais Fernandes Santos - bibtex: "@inproceedings{FernandesSantos2019,\n abstract = {During the musical performance,\ - \ expert musicians consciously manipulate acoustical parameters expressing their\ - \ interpretative choices. Also, players make physical motions and, in many cases,\ - \ these gestures are related to the musicians' artistic intentions. However, it's\ - \ not clear if the sound manipulation reflects in physical motions. The understanding\ - \ of the musical structure of the work being performed in its many levels may\ - \ impact the projection of artistic intentions, and performers alter it in micro\ - \ and macro-sections, such as in musical motifs, phrases and sessions. Therefore,\ - \ this paper investigates the timing manipulation and how such variations may\ - \ reflect in physical gestures. The study involved musicians (flute, clarinet,\ - \ and bassoon players) performing a unison excerpt by G. Rossini. We analyzed\ - \ the relationship between timing variation (the Inter Onsets Interval deviations)\ - \ and physical motion based on the traveled distance of the flute under different\ - \ conditions. The flutists were asked to play the musical excerpt in three experimental\ - \ conditions: (1) playing solo and playing in duets with previous recordings by\ - \ other instrumentalists, (2) clarinetist and (3) bassoonist. The finding suggests\ - \ that: 1) the movements, which seem to be related to the sense of pulse, are\ - \ recurrent and stable, 2) the timing variability in micro or macro sections reflects\ - \ in gestures' amplitude performed by flutists.},\n address = {Porto Alegre, Brazil},\n\ - \ author = {Thais Fernandes Santos},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672966},\n\ - \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {292--297},\n publisher = {UFRGS},\n title = {The reciprocity\ - \ between ancillary gesture and music structure performed by expert musicians},\n\ - \ url = {http://www.nime.org/proceedings/2019/nime2019_paper056.pdf},\n year =\ - \ {2019}\n}\n" + ID: Oliveira2018 + abstract: 'This paper describes the concept, design, and realization of two iterations + of a new controller called the XT Synth. The development of the instrument came + from the desire to maintain the expressivity and familiarity of string instruments, + while adding the flexibility and power usually found in keyboard controllers. + There are different examples of instruments that bring the physicality and expressiveness + of acoustic instruments into electronic music, from “Do it yourself” (DIY) products + to commercially available ones. This paper discusses the process and the challenges + faced when creating a DIY musical instrument and then subsequently transforming + the instrument into a product suitable for commercialization.' + address: 'Blacksburg, Virginia, USA' + author: 'Oliveira da Silveira, Gustavo' + bibtex: "@inproceedings{Oliveira2018,\n abstract = {This paper describes the concept,\ + \ design, and realization of two iterations of a new controller called the XT\ + \ Synth. The development of the instrument came from the desire to maintain the\ + \ expressivity and familiarity of string instruments, while adding the flexibility\ + \ and power usually found in keyboard controllers. There are different examples\ + \ of instruments that bring the physicality and expressiveness of acoustic instruments\ + \ into electronic music, from “Do it yourself” (DIY) products to commercially\ + \ available ones. This paper discusses the process and the challenges faced when\ + \ creating a DIY musical instrument and then subsequently transforming the instrument\ + \ into a product suitable for commercialization.},\n address = {Blacksburg, Virginia,\ + \ USA},\n author = {Oliveira da Silveira, Gustavo},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1302673},\n editor = {Luke Dahl, Douglas Bowman, Thomas\ + \ Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n\ + \ pages = {43--44},\n publisher = {Virginia Tech},\n title = {The XT Synth: A\ + \ New Controller for String Players},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0011.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672966 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1302673 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 292--297 - publisher: UFRGS - title: The reciprocity between ancillary gesture and music structure performed by - expert musicians - url: http://www.nime.org/proceedings/2019/nime2019_paper056.pdf - year: 2019 + pages: 43--44 + publisher: Virginia Tech + title: 'The XT Synth: A New Controller for String Players' + url: http://www.nime.org/proceedings/2018/nime2018_paper0011.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: Paisa2019 - abstract: 'This project describes a novel approach to hybrid electro-acoustical - instruments by augmenting the Sensel Morph, with real-time audio sensing capabilities. - The actual action-sounds are captured with a piezoelectric transducer and processed - in Max 8 to extend the sonic range existing in the acoustical domain alone. The - control parameters are captured by the Morph and mapped to audio algorithm proprieties - like filter cutoff frequency, frequency shift or overdrive. The instrument opens - up the possibility for a large selection of different interaction techniques that - have a direct impact on the output sound. The instrument is evaluated from a sound - designer''s perspective, encouraging exploration in the materials used as well - as techniques. The contribution are two-fold. First, the use of a piezo transducer - to augment the Sensel Morph affords an extra dimension of control on top of the - offerings. Second, the use of acoustic sounds from physical interactions as a - source for excitation and manipulation of an audio processing system offers a - large variety of new sounds to be discovered. The methodology involved an exploratory - process of iterative instrument making, interspersed with observations gathered - via improvisatory trials, focusing on the new interactions made possible through - the fusion of audio-rate inputs with the Morph''s default interaction methods.' - address: 'Porto Alegre, Brazil' - author: Razvan Paisa and Dan Overholt - bibtex: "@inproceedings{Paisa2019,\n abstract = {This project describes a novel\ - \ approach to hybrid electro-acoustical instruments by augmenting the Sensel Morph,\ - \ with real-time audio sensing capabilities. The actual action-sounds are captured\ - \ with a piezoelectric transducer and processed in Max 8 to extend the sonic range\ - \ existing in the acoustical domain alone. The control parameters are captured\ - \ by the Morph and mapped to audio algorithm proprieties like filter cutoff frequency,\ - \ frequency shift or overdrive. The instrument opens up the possibility for a\ - \ large selection of different interaction techniques that have a direct impact\ - \ on the output sound. The instrument is evaluated from a sound designer's perspective,\ - \ encouraging exploration in the materials used as well as techniques. The contribution\ - \ are two-fold. First, the use of a piezo transducer to augment the Sensel Morph\ - \ affords an extra dimension of control on top of the offerings. Second, the use\ - \ of acoustic sounds from physical interactions as a source for excitation and\ - \ manipulation of an audio processing system offers a large variety of new sounds\ - \ to be discovered. The methodology involved an exploratory process of iterative\ - \ instrument making, interspersed with observations gathered via improvisatory\ - \ trials, focusing on the new interactions made possible through the fusion of\ - \ audio-rate inputs with the Morph's default interaction methods.},\n address\ - \ = {Porto Alegre, Brazil},\n author = {Razvan Paisa and Dan Overholt},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.3672968},\n editor = {Marcelo Queiroz and\ - \ Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {298--302},\n\ - \ publisher = {UFRGS},\n title = {Enhancing the Expressivity of the Sensel Morph\ - \ via Audio-rate Sensing},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper057.pdf},\n\ - \ year = {2019}\n}\n" + ID: Bin2018 + abstract: 'This paper presents a study examining the effects of disfluent design + on audience perception of digital musical instrument (DMI) performance. Disfluency, + defined as a barrier to effortless cognitive processing, has been shown to generate + better results in some contexts as it engages higher levels of cognition. We were + motivated to determine if disfluent design in a DMI would result in a risk state + that audiences would be able to perceive, and if this would have any effect on + their evaluation of the performance. A DMI was produced that incorporated a disfluent + characteristic: It would turn itself off if not constantly moved. Six physically + identical instruments were produced, each in one of three versions: Control (no + disfluent characteristics), mild disfluency (turned itself off slowly), and heightened + disfluency (turned itself off more quickly). 6 percussionists each performed on + one instrument for a live audience (N=31), and data was collected in the form + of real-time feedback (via a mobile phone app), and post-hoc surveys. Though there + was little difference in ratings of enjoyment between the versions of the instrument, + the real-time and qualitative data suggest that disfluent behaviour in a DMI may + be a way for audiences to perceive and appreciate performer skill.' + address: 'Blacksburg, Virginia, USA' + author: S. M. Astrid Bin and Nick Bryan-Kinns and Andrew P. McPherson + bibtex: "@inproceedings{Bin2018,\n abstract = {This paper presents a study examining\ + \ the effects of disfluent design on audience perception of digital musical instrument\ + \ (DMI) performance. Disfluency, defined as a barrier to effortless cognitive\ + \ processing, has been shown to generate better results in some contexts as it\ + \ engages higher levels of cognition. We were motivated to determine if disfluent\ + \ design in a DMI would result in a risk state that audiences would be able to\ + \ perceive, and if this would have any effect on their evaluation of the performance.\ + \ A DMI was produced that incorporated a disfluent characteristic: It would turn\ + \ itself off if not constantly moved. Six physically identical instruments were\ + \ produced, each in one of three versions: Control (no disfluent characteristics),\ + \ mild disfluency (turned itself off slowly), and heightened disfluency (turned\ + \ itself off more quickly). 6 percussionists each performed on one instrument\ + \ for a live audience (N=31), and data was collected in the form of real-time\ + \ feedback (via a mobile phone app), and post-hoc surveys. Though there was little\ + \ difference in ratings of enjoyment between the versions of the instrument, the\ + \ real-time and qualitative data suggest that disfluent behaviour in a DMI may\ + \ be a way for audiences to perceive and appreciate performer skill.},\n address\ + \ = {Blacksburg, Virginia, USA},\n author = {S. M. Astrid Bin and Nick Bryan-Kinns\ + \ and Andrew P. McPherson},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302675},\n\ + \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {45--50},\n publisher = {Virginia\ + \ Tech},\n title = {Risky business: Disfluency as a design strategy},\n url =\ + \ {http://www.nime.org/proceedings/2018/nime2018_paper0012.pdf},\n year = {2018}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672968 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1302675 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 298--302 - publisher: UFRGS - title: Enhancing the Expressivity of the Sensel Morph via Audio-rate Sensing - url: http://www.nime.org/proceedings/2019/nime2019_paper057.pdf - year: 2019 + pages: 45--50 + publisher: Virginia Tech + title: 'Risky business: Disfluency as a design strategy' + url: http://www.nime.org/proceedings/2018/nime2018_paper0012.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: Ramos2019 - abstract: 'This paper presents a description of the design and usage of Eolos, a - wireless MIDI wind controller. The main goal of Eolos is to provide an interface - that facilitates the production of music for any individual, regardless of their - playing skills or previous musical knowledge. Its features are: open design, lower - cost than commercial alternatives, wireless MIDI operation, rechargeable battery - power, graphical user interface, tactile keys, sensitivity to air pressure, left-right - reversible design and two FSR sensors. There is also a mention about its participation - in the 1st Collaborative Concert over the Internet between Argentina and Cuba - "Tradición y Nuevas Sonoridades".' - address: 'Porto Alegre, Brazil' - author: Juan Mariano Ramos - bibtex: "@inproceedings{Ramos2019,\n abstract = {This paper presents a description\ - \ of the design and usage of Eolos, a wireless MIDI wind controller. The main\ - \ goal of Eolos is to provide an interface that facilitates the production of\ - \ music for any individual, regardless of their playing skills or previous musical\ - \ knowledge. Its features are: open design, lower cost than commercial alternatives,\ - \ wireless MIDI operation, rechargeable battery power, graphical user interface,\ - \ tactile keys, sensitivity to air pressure, left-right reversible design and\ - \ two FSR sensors. There is also a mention about its participation in the 1st\ - \ Collaborative Concert over the Internet between Argentina and Cuba \"Tradición\ - \ y Nuevas Sonoridades\".},\n address = {Porto Alegre, Brazil},\n author = {Juan\ - \ Mariano Ramos},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672972},\n\ - \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {303--306},\n publisher = {UFRGS},\n title = {Eolos: a wireless\ - \ {MIDI} wind controller},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper058.pdf},\n\ - \ year = {2019}\n}\n" + ID: Gibson2018 + abstract: 'The voice of the theremin is more than just a simple sine wave. Its unique + sound is made through two radio frequency oscillators that, when operating at + almost identical frequencies, gravitate towards each other. Ultimately, this pull + alters the sine wave, creating the signature sound of the theremin. The Theremin + Textural Expander (TTE) explores other textures the theremin can produce when + its sound is processed and manipulated through a Max/MSP patch and controlled + via a MIDI pedalboard. The TTE extends the theremin''s ability, enabling it to + produce five distinct new textures beyond the original. It also features a looping + system that the performer can use to layer textures created with the traditional + theremin sound. Ultimately, this interface introduces a new way to play and experience + the theremin; it extends its expressivity, affording a greater range of compositional + possibilities and greater flexibility in free improvisation contexts. ' + address: 'Blacksburg, Virginia, USA' + author: Rachel Gibson + bibtex: "@inproceedings{Gibson2018,\n abstract = {The voice of the theremin is more\ + \ than just a simple sine wave. Its unique sound is made through two radio frequency\ + \ oscillators that, when operating at almost identical frequencies, gravitate\ + \ towards each other. Ultimately, this pull alters the sine wave, creating the\ + \ signature sound of the theremin. The Theremin Textural Expander (TTE) explores\ + \ other textures the theremin can produce when its sound is processed and manipulated\ + \ through a Max/MSP patch and controlled via a MIDI pedalboard. The TTE extends\ + \ the theremin's ability, enabling it to produce five distinct new textures beyond\ + \ the original. It also features a looping system that the performer can use\ + \ to layer textures created with the traditional theremin sound. Ultimately, this\ + \ interface introduces a new way to play and experience the theremin; it extends\ + \ its expressivity, affording a greater range of compositional possibilities and\ + \ greater flexibility in free improvisation contexts. },\n address = {Blacksburg,\ + \ Virginia, USA},\n author = {Rachel Gibson},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1302527},\n editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n\ + \ isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {51--52},\n publisher = {Virginia Tech},\n title = {The Theremin Textural\ + \ Expander},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0013.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672972 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1302527 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 303--306 - publisher: UFRGS - title: 'Eolos: a wireless MIDI wind controller' - url: http://www.nime.org/proceedings/2019/nime2019_paper058.pdf - year: 2019 + pages: 51--52 + publisher: Virginia Tech + title: The Theremin Textural Expander + url: http://www.nime.org/proceedings/2018/nime2018_paper0013.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: Yang2019 - abstract: 'Variational Autoencoder has already achieved great results on image generation and - recently made promising progress on music sequence generation. However, the model - is still quite difficult to control in the sense that the learned latent representations - lack meaningful music semantics. What users really need is to interact with certain music - features, such as rhythm and pitch contour, in the creation process so that they - can easily test different composition ideas. In this paper, we propose a disentanglement - by augmentation method to inspect the pitch and rhythm interpretations of the - latent representations. Based on the interpretable representations, an intuitive - graphical user interface demo is designed for users to better direct the music - creation process by manipulating the pitch contours and rhythmic complexity.' - address: 'Porto Alegre, Brazil' - author: Ruihan Yang and Tianyao Chen and Yiyi Zhang and gus xia - bibtex: "@inproceedings{Yang2019,\n abstract = {Variational Autoencoder has already\ - \ achieved great results on image generation and recently made promising progress\ - \ on music sequence generation. However, the model is still quite difficult to\ - \ control in the sense that the learned latent representations lack meaningful\ - \ music semantics. What users really need is to interact with certain music features,\ - \ such as rhythm and pitch contour, in the creation process so that they can easily\ - \ test different composition ideas. In this paper, we propose a disentanglement\ - \ by augmentation method to inspect the pitch and rhythm interpretations of the\ - \ latent representations. Based on the interpretable representations, an intuitive\ - \ graphical user interface demo is designed for users to better direct the music\ - \ creation process by manipulating the pitch contours and rhythmic complexity.},\n\ - \ address = {Porto Alegre, Brazil},\n author = {Ruihan Yang and Tianyao Chen and\ - \ Yiyi Zhang and gus xia},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672974},\n\ - \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {307--312},\n publisher = {UFRGS},\n title = {Inspecting\ - \ and Interacting with Meaningful Music Representations using {VAE}},\n url =\ - \ {http://www.nime.org/proceedings/2019/nime2019_paper059.pdf},\n year = {2019}\n\ - }\n" + ID: Toka2018 + abstract: 'This paper introduces Siren, a hybrid system for algorithmic composition + and live-coding performances. Its hierarchical structure allows small modifications + to propagate and aggregate on lower levels for dramatic changes in the musical + output. It uses functional programming language TidalCycles as the core pattern + creation environment due to its inherent ability to create complex pattern relations + with minimal syntax. Borrowing the best from TidalCycles, Siren augments the pattern + creation process by introducing various interface level features: a multi-channel + sequencer, local and global parameters, mathematical expressions, and pattern + history. It presents new opportunities for recording, refining, and reusing the + playback information with the pattern roll component. Subsequently, the paper + concludes with a preliminary evaluation of Siren in the context of user interface + design principles, which originates from the cognitive dimensions framework for + musical notation design.' + address: 'Blacksburg, Virginia, USA' + author: Mert Toka and Can Ince and Mehmet Aydin Baytas + bibtex: "@inproceedings{Toka2018,\n abstract = {This paper introduces Siren, a hybrid\ + \ system for algorithmic composition and live-coding performances. Its hierarchical\ + \ structure allows small modifications to propagate and aggregate on lower levels\ + \ for dramatic changes in the musical output. It uses functional programming language\ + \ TidalCycles as the core pattern creation environment due to its inherent ability\ + \ to create complex pattern relations with minimal syntax. Borrowing the best\ + \ from TidalCycles, Siren augments the pattern creation process by introducing\ + \ various interface level features: a multi-channel sequencer, local and global\ + \ parameters, mathematical expressions, and pattern history. It presents new opportunities\ + \ for recording, refining, and reusing the playback information with the pattern\ + \ roll component. Subsequently, the paper concludes with a preliminary evaluation\ + \ of Siren in the context of user interface design principles, which originates\ + \ from the cognitive dimensions framework for musical notation design.},\n address\ + \ = {Blacksburg, Virginia, USA},\n author = {Mert Toka and Can Ince and Mehmet\ + \ Aydin Baytas},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302677},\n\ + \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {53--58},\n publisher = {Virginia\ + \ Tech},\n title = {Siren: Interface for Pattern Languages},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0014.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672974 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1302677 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 307--312 - publisher: UFRGS - title: Inspecting and Interacting with Meaningful Music Representations using VAE - url: http://www.nime.org/proceedings/2019/nime2019_paper059.pdf - year: 2019 + pages: 53--58 + publisher: Virginia Tech + title: 'Siren: Interface for Pattern Languages' + url: http://www.nime.org/proceedings/2018/nime2018_paper0014.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: Roma2019 - abstract: 'Descriptor spaces have become an ubiquitous interaction paradigm for - music based on collections of audio samples. However, most systems rely on a small - predefined set of descriptors, which the user is often required to understand - and choose from. There is no guarantee that the chosen descriptors are relevant - for a given collection. In addition, this method does not scale to longer samples - that require higher-dimensional descriptions, which biases systems towards the - use of short samples. In this paper we propose novel framework for automatic creation - of interactive sound spaces from sound collections using feature learning and - dimensionality reduction. The framework is implemented as a software library using - the SuperCollider language. We compare several algorithms and describe some example - interfaces for interacting with the resulting spaces. Our experiments signal the - potential of unsupervised algorithms for creating data-driven musical interfaces.' - address: 'Porto Alegre, Brazil' - author: Gerard Roma and Owen Green and Pierre Alexandre Tremblay - bibtex: "@inproceedings{Roma2019,\n abstract = {Descriptor spaces have become an\ - \ ubiquitous interaction paradigm for music based on collections of audio samples.\ - \ However, most systems rely on a small predefined set of descriptors, which the\ - \ user is often required to understand and choose from. There is no guarantee\ - \ that the chosen descriptors are relevant for a given collection. In addition,\ - \ this method does not scale to longer samples that require higher-dimensional\ - \ descriptions, which biases systems towards the use of short samples. In this\ - \ paper we propose novel framework for automatic creation of interactive sound\ - \ spaces from sound collections using feature learning and dimensionality reduction.\ - \ The framework is implemented as a software library using the SuperCollider language.\ - \ We compare several algorithms and describe some example interfaces for interacting\ - \ with the resulting spaces. Our experiments signal the potential of unsupervised\ - \ algorithms for creating data-driven musical interfaces.},\n address = {Porto\ - \ Alegre, Brazil},\n author = {Gerard Roma and Owen Green and Pierre Alexandre\ - \ Tremblay},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672976},\n editor\ - \ = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n\ - \ pages = {313--318},\n publisher = {UFRGS},\n title = {Adaptive Mapping of Sound\ - \ Collections for Data-driven Musical Interfaces},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper060.pdf},\n\ - \ year = {2019}\n}\n" + ID: Salazar2018 + abstract: 'This paper documents an extensive and varied series of performances by + the authors over the past year using mobile technology, primarily iPad tablets + running the Auraglyph musical sketchpad software. These include both solo and + group performances, the latter under the auspices of the Mobile Ensemble of CalArts + (MECA), a group created to perform music with mobile technology devices. As a + whole, this diverse mobile technology-based performance practice leverages Auraglyph''s + versatility to explore a number of topical issues in electronic music performance, + including the use of physical and acoustical space, audience participation, and + interaction design of musical instruments.' + address: 'Blacksburg, Virginia, USA' + author: Spencer Salazar and Andrew Piepenbrink and Sarah Reid + bibtex: "@inproceedings{Salazar2018,\n abstract = {This paper documents an extensive\ + \ and varied series of performances by the authors over the past year using mobile\ + \ technology, primarily iPad tablets running the Auraglyph musical sketchpad software.\ + \ These include both solo and group performances, the latter under the auspices\ + \ of the Mobile Ensemble of CalArts (MECA), a group created to perform music with\ + \ mobile technology devices. As a whole, this diverse mobile technology-based\ + \ performance practice leverages Auraglyph's versatility to explore a number of\ + \ topical issues in electronic music performance, including the use of physical\ + \ and acoustical space, audience participation, and interaction design of musical\ + \ instruments.},\n address = {Blacksburg, Virginia, USA},\n author = {Spencer\ + \ Salazar and Andrew Piepenbrink and Sarah Reid},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1302679},\n editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n\ + \ isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {59--64},\n publisher = {Virginia Tech},\n title = {Developing a Performance\ + \ Practice for Mobile Music Technology},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0015.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672976 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1302679 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 313--318 - publisher: UFRGS - title: Adaptive Mapping of Sound Collections for Data-driven Musical Interfaces - url: http://www.nime.org/proceedings/2019/nime2019_paper060.pdf - year: 2019 + pages: 59--64 + publisher: Virginia Tech + title: Developing a Performance Practice for Mobile Music Technology + url: http://www.nime.org/proceedings/2018/nime2018_paper0015.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: Norilo2019 - abstract: 'This paper presents Veneer, a visual, touch-ready programming interface - for the Kronos programming language. The challenges of representing high-level - data flow abstractions, including higher order functions, are described. The tension - between abstraction and spontaneity in programming is addressed, and gradual abstraction - in live programming is proposed as a potential solution. Several novel user interactions - for patching on a touch device are shown. In addition, the paper describes some - of the current issues of web audio music applications and offers strategies for - integrating a web-based presentation layer with a low-latency native processing - backend.' - address: 'Porto Alegre, Brazil' - author: Vesa Petri Norilo - bibtex: "@inproceedings{Norilo2019,\n abstract = {This paper presents Veneer, a\ - \ visual, touch-ready programming interface for the Kronos programming language.\ - \ The challenges of representing high-level data flow abstractions, including\ - \ higher order functions, are described. The tension between abstraction and spontaneity\ - \ in programming is addressed, and gradual abstraction in live programming is\ - \ proposed as a potential solution. Several novel user interactions for patching\ - \ on a touch device are shown. In addition, the paper describes some of the current\ - \ issues of web audio music applications and offers strategies for integrating\ - \ a web-based presentation layer with a low-latency native processing backend.},\n\ - \ address = {Porto Alegre, Brazil},\n author = {Vesa Petri Norilo},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.3672978},\n editor = {Marcelo Queiroz and\ - \ Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {319--324},\n\ - \ publisher = {UFRGS},\n title = {Veneer: Visual and Touch-based Programming for\ - \ Audio},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper061.pdf},\n\ - \ year = {2019}\n}\n" + ID: Momeni2018 + abstract: 'This paper provides an overview of the design, prototyping, deployment + and evaluation of a multi-agent interactive sound instrument named MOM (Mobile + Object for Music). MOM combines a real-time signal processing engine implemented + with Pure Data on an embedded Linux platform, with gestural interaction implemented + via a variety of analog and digital sensors. Power, sound-input and sound-diffusion + subsystems make the instrument autonomous and mobile. This instrument was designed + in coordination with the development of an evening-length dance/music performance + in which the performing musician is engaged in choreographed movements with the + mobile instruments. The design methodology relied on a participatory process + that engaged an interdisciplinary team made up of technologists, musicians, composers, + choreographers, and dancers. The prototyping process relied on a mix of in-house + and out-sourced digital fabrication processes intended to make the open source + hardware and software design of the system accessible and affordable for other + creators. ' + address: 'Blacksburg, Virginia, USA' + author: Ali Momeni and Daniel McNamara and Jesse Stiles + bibtex: "@inproceedings{Momeni2018,\n abstract = {This paper provides an overview\ + \ of the design, prototyping, deployment and evaluation of a multi-agent interactive\ + \ sound instrument named MOM (Mobile Object for Music). MOM combines a real-time\ + \ signal processing engine implemented with Pure Data on an embedded Linux platform,\ + \ with gestural interaction implemented via a variety of analog and digital sensors.\ + \ Power, sound-input and sound-diffusion subsystems make the instrument autonomous\ + \ and mobile. This instrument was designed in coordination with the development\ + \ of an evening-length dance/music performance in which the performing musician\ + \ is engaged in choreographed movements with the mobile instruments. The design\ + \ methodology relied on a participatory process that engaged an interdisciplinary\ + \ team made up of technologists, musicians, composers, choreographers, and dancers.\ + \ The prototyping process relied on a mix of in-house and out-sourced digital\ + \ fabrication processes intended to make the open source hardware and software\ + \ design of the system accessible and affordable for other creators. },\n address\ + \ = {Blacksburg, Virginia, USA},\n author = {Ali Momeni and Daniel McNamara and\ + \ Jesse Stiles},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302681},\n\ + \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {65--71},\n publisher = {Virginia\ + \ Tech},\n title = {MOM: an Extensible Platform for Rapid Prototyping and Design\ + \ of Electroacoustic Instruments},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0016.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672978 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1302681 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 319--324 - publisher: UFRGS - title: 'Veneer: Visual and Touch-based Programming for Audio' - url: http://www.nime.org/proceedings/2019/nime2019_paper061.pdf - year: 2019 + pages: 65--71 + publisher: Virginia Tech + title: 'MOM: an Extensible Platform for Rapid Prototyping and Design of Electroacoustic + Instruments' + url: http://www.nime.org/proceedings/2018/nime2018_paper0016.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: Faitas2019 - abstract: 'Generating convincing music via deep neural networks is a challenging - problem that shows promise for many applications including interactive musical - creation. One part of this challenge is the problem of generating convincing accompaniment - parts to a given melody, as could be used in an automatic accompaniment system. - Despite much progress in this area, systems that can automatically learn to generate - interesting sounding, as well as harmonically plausible, accompanying melodies - remain somewhat elusive. In this paper we explore the problem of sequence to sequence - music generation where a human user provides a sequence of notes, and a neural - network model responds with a harmonically suitable sequence of equal length. - We consider two sequence-to-sequence models; one featuring standard unidirectional - long short-term memory (LSTM) architecture, and the other featuring bidirectional - LSTM, both successfully trained to produce a sequence based on the given input. - Both of these are fairly dated models, as part of the investigation is to see - what can be achieved with such models. These are evaluated and compared via a - qualitative study that features 106 respondents listening to eight random samples - from our set of generated music, as well as two human samples. From the results - we see a preference for the sequences generated by the bidirectional model as - well as an indication that these sequences sound more human.' - address: 'Porto Alegre, Brazil' - author: Andrei Faitas and Synne Engdahl Baumann and Torgrim Rudland Næss and Jim - Torresen and Charles Patrick Martin - bibtex: "@inproceedings{Faitas2019,\n abstract = {Generating convincing music via\ - \ deep neural networks is a challenging problem that shows promise for many applications\ - \ including interactive musical creation. One part of this challenge is the problem\ - \ of generating convincing accompaniment parts to a given melody, as could be\ - \ used in an automatic accompaniment system. Despite much progress in this area,\ - \ systems that can automatically learn to generate interesting sounding, as well\ - \ as harmonically plausible, accompanying melodies remain somewhat elusive. In\ - \ this paper we explore the problem of sequence to sequence music generation where\ - \ a human user provides a sequence of notes, and a neural network model responds\ - \ with a harmonically suitable sequence of equal length. We consider two sequence-to-sequence\ - \ models; one featuring standard unidirectional long short-term memory (LSTM)\ - \ architecture, and the other featuring bidirectional LSTM, both successfully\ - \ trained to produce a sequence based on the given input. Both of these are fairly\ - \ dated models, as part of the investigation is to see what can be achieved with\ - \ such models. These are evaluated and compared via a qualitative study that features\ - \ 106 respondents listening to eight random samples from our set of generated\ - \ music, as well as two human samples. From the results we see a preference for\ - \ the sequences generated by the bidirectional model as well as an indication\ - \ that these sequences sound more human.},\n address = {Porto Alegre, Brazil},\n\ - \ author = {Andrei Faitas and Synne Engdahl Baumann and Torgrim Rudland Næss and\ - \ Jim Torresen and Charles Patrick Martin},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.3672980},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {325--330},\n publisher = {UFRGS},\n\ - \ title = {Generating Convincing Harmony Parts with Simple Long Short-Term Memory\ - \ Networks},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper062.pdf},\n\ - \ year = {2019}\n}\n" + ID: Robertson2018 + abstract: 'The Harmonic Wand is a transducer-based instrument that combines physical + excitation, synthesis, and gestural control. Our objective was to design a device + that affords exploratory modes of interaction with the performer''s surroundings, + as well as precise control over microtonal pitch content and other concomitant + parameters. The instrument is comprised of a hand-held wand, containing two piezo-electric + transducers affixed to a pair of metal probes. The performer uses the wand to + physically excite surfaces in the environment and capture resultant signals. Input + materials are then processed using a novel application of Karplus-Strong synthesis, + in which these impulses are imbued with discrete resonances. We achieved gestural + control over synthesis parameters using a secondary tactile interface, consisting + of four force-sensitive resistors (FSR), a fader, and momentary switch. As a + unique feature of our instrument, we modeled pitch organization and associated + parametric controls according to theoretical principles outlined in Harry Partch''s + “monophonic fabric” of Just Intonation—specifically his conception of odentities, + udentities, and a variable numerary nexus. This system classifies pitch content + based upon intervallic structures found in both the overtone and undertone series. Our + paper details the procedural challenges in designing the Harmonic Wand.' + address: 'Blacksburg, Virginia, USA' + author: Ben Luca Robertson and Luke Dahl + bibtex: "@inproceedings{Robertson2018,\n abstract = {The Harmonic Wand is a transducer-based\ + \ instrument that combines physical excitation, synthesis, and gestural control.\ + \ Our objective was to design a device that affords exploratory modes of interaction\ + \ with the performer's surroundings, as well as precise control over microtonal\ + \ pitch content and other concomitant parameters. The instrument is comprised\ + \ of a hand-held wand, containing two piezo-electric transducers affixed to a\ + \ pair of metal probes. The performer uses the wand to physically excite surfaces\ + \ in the environment and capture resultant signals. Input materials are then\ + \ processed using a novel application of Karplus-Strong synthesis, in which these\ + \ impulses are imbued with discrete resonances. We achieved gestural control\ + \ over synthesis parameters using a secondary tactile interface, consisting of\ + \ four force-sensitive resistors (FSR), a fader, and momentary switch. As a unique\ + \ feature of our instrument, we modeled pitch organization and associated parametric\ + \ controls according to theoretical principles outlined in Harry Partch's “monophonic\ + \ fabric” of Just Intonation—specifically his conception of odentities, udentities,\ + \ and a variable numerary nexus. This system classifies pitch content based upon\ + \ intervallic structures found in both the overtone and undertone series. Our\ + \ paper details the procedural challenges in designing the Harmonic Wand.},\n\ + \ address = {Blacksburg, Virginia, USA},\n author = {Ben Luca Robertson and Luke\ + \ Dahl},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1302683},\n editor = {Luke\ + \ Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn =\ + \ {2220-4806},\n month = {June},\n pages = {72--77},\n publisher = {Virginia Tech},\n\ + \ title = {Harmonic Wand: An Instrument for Microtonal Control and Gestural Excitation},\n\ + \ url = {http://www.nime.org/proceedings/2018/nime2018_paper0017.pdf},\n year\ + \ = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672980 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1302683 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 325--330 - publisher: UFRGS - title: Generating Convincing Harmony Parts with Simple Long Short-Term Memory Networks - url: http://www.nime.org/proceedings/2019/nime2019_paper062.pdf - year: 2019 - - -- ENTRYTYPE: inproceedings - ID: Marasco2019 - abstract: 'Bendit_I/O is a system that allows for wireless, networked performance - of circuit-bent devices, giving artists a new outlet for performing with repurposed - technology. In a typical setup, a user pre-bends a device using the Bendit_I/O - board as an intermediary, replacing physical switches and potentiometers with - the board''s reed relays, motor driver, and digital potentiometer signals. Bendit_I/O - brings the networking techniques of distributed music performances to the hardware - hacking realm, opening the door for creative implementation of multiple circuit-bent - devices in audiovisual experiences. Consisting of a Wi-Fi- enabled I/O board and - a Node-based server, the system provides performers with a variety of interaction - and control possibilities between connected users and hacked devices. Moreover, - it is user-friendly, low-cost, and modular, making it a flexible toolset for artists - of diverse experience levels.' - address: 'Porto Alegre, Brazil' - author: Anthony T. Marasco and Edgar Berdahl and Jesse Allison - bibtex: "@inproceedings{Marasco2019,\n abstract = {Bendit\\_I/O is a system that\ - \ allows for wireless, networked performance of circuit-bent devices, giving artists\ - \ a new outlet for performing with repurposed technology. In a typical setup,\ - \ a user pre-bends a device using the Bendit\\_I/O board as an intermediary, replacing\ - \ physical switches and potentiometers with the board's reed relays, motor driver,\ - \ and digital potentiometer signals. Bendit\\_I/O brings the networking techniques\ - \ of distributed music performances to the hardware hacking realm, opening the\ - \ door for creative implementation of multiple circuit-bent devices in audiovisual\ - \ experiences. Consisting of a Wi-Fi- enabled I/O board and a Node-based server,\ - \ the system provides performers with a variety of interaction and control possibilities\ - \ between connected users and hacked devices. Moreover, it is user-friendly, low-cost,\ - \ and modular, making it a flexible toolset for artists of diverse experience\ - \ levels.},\n address = {Porto Alegre, Brazil},\n author = {Anthony T. Marasco\ - \ and Edgar Berdahl and Jesse Allison},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672982},\n\ - \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {331--334},\n publisher = {UFRGS},\n title = {{Bendit\\\ - _I/O}: A System for Networked Performance of Circuit-Bent Devices},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper063.pdf},\n\ - \ year = {2019}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.3672982 - editor: Marcelo Queiroz and Anna Xambó Sedó - issn: 2220-4806 - month: June - pages: 331--334 - publisher: UFRGS - title: 'Bendit_I/O: A System for Networked Performance of Circuit-Bent Devices' - url: http://www.nime.org/proceedings/2019/nime2019_paper063.pdf - year: 2019 + pages: 72--77 + publisher: Virginia Tech + title: 'Harmonic Wand: An Instrument for Microtonal Control and Gestural Excitation' + url: http://www.nime.org/proceedings/2018/nime2018_paper0017.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: Macionis2019 - abstract: '''Where Is The Quiet?'' is a mixed-media installation that utilizes immersive - experience design, mechatronics, and machine learning in order to enhance wellness - and increase connectivity to the natural world. Individuals interact with the - installation by wearing a brainwave interface that measures the strength of the - alpha wave signal. The interface then transmits the data to a computer that uses - it in order to determine the individual''s overall state of relaxation. As the - individual achieves higher states of relaxation, mechatronic instruments respond - and provide feedback. This feedback not only encourages self-awareness but also - it motivates the individual to relax further. Visitors without the headset experience - the installation by watching a film and listening to an original musical score. - Through the novel arrangement of technologies and features, ''Where Is The Quiet?'' - demonstrates that mediated technological experiences are capable of evoking meditative - states of consciousness, facilitating individual and group connectivity, and deepening - awareness of the natural world. As such, this installation opens the door to future - research regarding the possibility of immersive experiences supporting humanitarian - needs.' - address: 'Porto Alegre, Brazil' + ID: Macionis2018 + abstract: 'Sansa is an extended sansula, a hyper-instrument that is similar in design + and functionality to a kalimba or thumb piano. At the heart of this interface + is a series of sensors that are used to augment the tone and expand the performance + capabilities of the instrument. The sensor data is further exploited using the + machine learning program Wekinator, which gives users the ability to interact + and perform with the instrument using several different modes of operation. In + this way, Sansa is capable of both solo acoustic performances as well as complex + productions that require interactions between multiple technological mediums. + Sansa expands the current community of hyper-instruments by demonstrating the + ways that hardware and software can extend an acoustic instrument''s functionality + and playability in a live performance or studio setting.' + address: 'Blacksburg, Virginia, USA' author: McLean J Macionis and Ajay Kapur - bibtex: "@inproceedings{Macionis2019,\n abstract = {'Where Is The Quiet?' is a mixed-media\ - \ installation that utilizes immersive experience design, mechatronics, and machine\ - \ learning in order to enhance wellness and increase connectivity to the natural\ - \ world. Individuals interact with the installation by wearing a brainwave interface\ - \ that measures the strength of the alpha wave signal. The interface then transmits\ - \ the data to a computer that uses it in order to determine the individual's overall\ - \ state of relaxation. As the individual achieves higher states of relaxation,\ - \ mechatronic instruments respond and provide feedback. This feedback not only\ - \ encourages self-awareness but also it motivates the individual to relax further.\ - \ Visitors without the headset experience the installation by watching a film\ - \ and listening to an original musical score. Through the novel arrangement of\ - \ technologies and features, 'Where Is The Quiet?' demonstrates that mediated\ - \ technological experiences are capable of evoking meditative states of consciousness,\ - \ facilitating individual and group connectivity, and deepening awareness of the\ - \ natural world. As such, this installation opens the door to future research\ - \ regarding the possibility of immersive experiences supporting humanitarian needs.},\n\ - \ address = {Porto Alegre, Brazil},\n author = {McLean J Macionis and Ajay Kapur},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.3672984},\n editor = {Marcelo Queiroz\ - \ and Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {335--338},\n\ - \ publisher = {UFRGS},\n title = {Where Is The Quiet: Immersive Experience Design\ - \ Using the Brain, Mechatronics, and Machine Learning},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper064.pdf},\n\ - \ year = {2019}\n}\n" + bibtex: "@inproceedings{Macionis2018,\n abstract = {Sansa is an extended sansula,\ + \ a hyper-instrument that is similar in design and functionality to a kalimba\ + \ or thumb piano. At the heart of this interface is a series of sensors that are\ + \ used to augment the tone and expand the performance capabilities of the instrument.\ + \ The sensor data is further exploited using the machine learning program Wekinator,\ + \ which gives users the ability to interact and perform with the instrument using\ + \ several different modes of operation. In this way, Sansa is capable of both\ + \ solo acoustic performances as well as complex productions that require interactions\ + \ between multiple technological mediums. Sansa expands the current community\ + \ of hyper-instruments by demonstrating the ways that hardware and software can\ + \ extend an acoustic instrument's functionality and playability in a live performance\ + \ or studio setting.},\n address = {Blacksburg, Virginia, USA},\n author = {McLean\ + \ J Macionis and Ajay Kapur},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302685},\n\ + \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {78--81},\n publisher = {Virginia\ + \ Tech},\n title = {Sansa: A Modified Sansula for Extended Compositional Techniques\ + \ Using Machine Learning},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0018.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672984 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1302685 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 335--338 - publisher: UFRGS - title: 'Where Is The Quiet: Immersive Experience Design Using the Brain, Mechatronics, - and Machine Learning' - url: http://www.nime.org/proceedings/2019/nime2019_paper064.pdf - year: 2019 + pages: 78--81 + publisher: Virginia Tech + title: 'Sansa: A Modified Sansula for Extended Compositional Techniques Using Machine + Learning' + url: http://www.nime.org/proceedings/2018/nime2018_paper0018.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: Carson2019 - abstract: 'Mesh Garden explores participatory music-making with smart- phones using - an audio sequencer game made up of a distributed smartphone speaker system. The - piece allows a group of people in a relaxed situation to create a piece of ambient - music using their smartphones networked through the internet. The players'' interactions - with the music are derived from the orientations of their phones. The work also - has a gameplay aspect; if two players'' phones match in orientation, one player - has the option to take the other player''s note, building up a bank of notes that - will be used to form a melody.' - address: 'Porto Alegre, Brazil' - author: Tate Carson - bibtex: "@inproceedings{Carson2019,\n abstract = {Mesh Garden explores participatory\ - \ music-making with smart- phones using an audio sequencer game made up of a distributed\ - \ smartphone speaker system. The piece allows a group of people in a relaxed situation\ - \ to create a piece of ambient music using their smartphones networked through\ - \ the internet. The players' interactions with the music are derived from the\ - \ orientations of their phones. The work also has a gameplay aspect; if two players'\ - \ phones match in orientation, one player has the option to take the other player's\ - \ note, building up a bank of notes that will be used to form a melody.},\n address\ - \ = {Porto Alegre, Brazil},\n author = {Tate Carson},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.3672986},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {339--342},\n publisher = {UFRGS},\n\ - \ title = {Mesh Garden: A creative-based musical game for participatory musical\ - \ performance },\n url = {http://www.nime.org/proceedings/2019/nime2019_paper065.pdf},\n\ - \ year = {2019}\n}\n" + ID: Turchet2018 + abstract: 'This demo will showcase technologically mediated interactions between + a performer playing a smart musical instrument (SMIs) and audience members using + Musical Haptic Wearables (MHWs). Smart Instruments are a family of musical instruments + characterized by embedded computational intelligence, wireless connectivity, an + embedded sound delivery system, and an onboard system for feedback to the player. + They offer direct point-to-point communication between each other and other portable + sensor-enabled devices connected to local networks and to the Internet. MHWs are + wearable devices for audience members, which encompass haptic stimulation, gesture + tracking, and wireless connectivity features. This demo will present an architecture + enabling the multidirectional creative communication between a performer playing + a Smart Mandolin and audience members using armband-based MHWs.' + address: 'Blacksburg, Virginia, USA' + author: Luca Turchet and Mathieu Barthet + bibtex: "@inproceedings{Turchet2018,\n abstract = {This demo will showcase technologically\ + \ mediated interactions between a performer playing a smart musical instrument\ + \ (SMIs) and audience members using Musical Haptic Wearables (MHWs). Smart Instruments\ + \ are a family of musical instruments characterized by embedded computational\ + \ intelligence, wireless connectivity, an embedded sound delivery system, and\ + \ an onboard system for feedback to the player. They offer direct point-to-point\ + \ communication between each other and other portable sensor-enabled devices connected\ + \ to local networks and to the Internet. MHWs are wearable devices for audience\ + \ members, which encompass haptic stimulation, gesture tracking, and wireless\ + \ connectivity features. This demo will present an architecture enabling the multidirectional\ + \ creative communication between a performer playing a Smart Mandolin and audience\ + \ members using armband-based MHWs.},\n address = {Blacksburg, Virginia, USA},\n\ + \ author = {Luca Turchet and Mathieu Barthet},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1302687},\n editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n\ + \ isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {82--83},\n publisher = {Virginia Tech},\n title = {Demo of interactions between\ + \ a performer playing a Smart Mandolin and audience members using Musical Haptic\ + \ Wearables},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0019.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672986 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1302687 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 339--342 - publisher: UFRGS - title: 'Mesh Garden: A creative-based musical game for participatory musical performance ' - url: http://www.nime.org/proceedings/2019/nime2019_paper065.pdf - year: 2019 + pages: 82--83 + publisher: Virginia Tech + title: Demo of interactions between a performer playing a Smart Mandolin and audience + members using Musical Haptic Wearables + url: http://www.nime.org/proceedings/2018/nime2018_paper0019.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: Rossmy2019 - abstract: 'In this paper we draw a picture that captures the increasing interest - in the format of modular synthesizers today. We therefore provide a historical - summary, which includes the origins, the fall and the rediscovery of that technology. - Further an empirical analysis is performed based on statements given by artists - and manufacturers taken from published interviews. These statements were aggregated, - objectified and later reviewed by an expert group consisting of modular synthesizer - vendors. Their responses provide the basis for the discussion on how emerging - trends in synthesizer interface design reveal challenges and opportunities for - the NIME community. ' - address: 'Porto Alegre, Brazil' - author: Beat Rossmy and Alexander Wiethoff - bibtex: "@inproceedings{Rossmy2019,\n abstract = {In this paper we draw a picture\ - \ that captures the increasing interest in the format of modular synthesizers\ - \ today. We therefore provide a historical summary, which includes the origins,\ - \ the fall and the rediscovery of that technology. Further an empirical analysis\ - \ is performed based on statements given by artists and manufacturers taken from\ - \ published interviews. These statements were aggregated, objectified and later\ - \ reviewed by an expert group consisting of modular synthesizer vendors. Their\ - \ responses provide the basis for the discussion on how emerging trends in synthesizer\ - \ interface design reveal challenges and opportunities for the NIME community.\ - \ },\n address = {Porto Alegre, Brazil},\n author = {Beat Rossmy and Alexander\ - \ Wiethoff},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672988},\n editor\ - \ = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n\ - \ pages = {343--348},\n publisher = {UFRGS},\n title = {The Modular Backward Evolution\ - \ --- Why to Use Outdated Technologies},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper066.pdf},\n\ - \ year = {2019}\n}\n" + ID: Kemper2018 + abstract: 'Robotic instrument designers tend to focus on the number of sound control + parameters and their resolution when trying to develop expressivity in their instruments. + These parameters afford greater sonic nuance related to elements of music that + are traditionally associated with expressive human performances including articulation, + timbre, dynamics, and phrasing. Equating the capacity for sonic nuance and musical + expression stems from the “transitive” perspective that musical expression is + an act of emotional communication from performer to listener. However, this perspective + is problematic in the case of robotic instruments since we do not typically consider + machines to be capable of expressing emotion. Contemporary theories of musical + expression focus on an “intransitive” perspective, where musical meaning is generated + as an embodied experience. Understanding expressivity from this perspective allows + listeners to interpret performances by robotic instruments as possessing their + own expressive meaning, even though the performer is a machine. It also enables + musicians working with robotic instruments to develop their own unique vocabulary + of expressive gestures unique to mechanical instruments. This paper explores these + issues of musical expression, introducing the concept of mechatronic expression + as a compositional and design strategy that highlights the musical and performative + capabilities unique to robotic instruments.' + address: 'Blacksburg, Virginia, USA' + author: Steven Kemper and Scott Barton + bibtex: "@inproceedings{Kemper2018,\n abstract = {Robotic instrument designers tend\ + \ to focus on the number of sound control parameters and their resolution when\ + \ trying to develop expressivity in their instruments. These parameters afford\ + \ greater sonic nuance related to elements of music that are traditionally associated\ + \ with expressive human performances including articulation, timbre, dynamics,\ + \ and phrasing. Equating the capacity for sonic nuance and musical expression\ + \ stems from the “transitive” perspective that musical expression is an act of\ + \ emotional communication from performer to listener. However, this perspective\ + \ is problematic in the case of robotic instruments since we do not typically\ + \ consider machines to be capable of expressing emotion. Contemporary theories\ + \ of musical expression focus on an “intransitive” perspective, where musical\ + \ meaning is generated as an embodied experience. Understanding expressivity from\ + \ this perspective allows listeners to interpret performances by robotic instruments\ + \ as possessing their own expressive meaning, even though the performer is a machine.\ + \ It also enables musicians working with robotic instruments to develop their\ + \ own unique vocabulary of expressive gestures unique to mechanical instruments.\ + \ This paper explores these issues of musical expression, introducing the concept\ + \ of mechatronic expression as a compositional and design strategy that highlights\ + \ the musical and performative capabilities unique to robotic instruments.},\n\ + \ address = {Blacksburg, Virginia, USA},\n author = {Steven Kemper and Scott Barton},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1302689},\n editor = {Luke Dahl,\ + \ Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {84--87},\n publisher = {Virginia Tech},\n title =\ + \ {Mechatronic Expression: Reconsidering Expressivity in Music for Robotic Instruments\ + \ },\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0020.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672988 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1302689 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 343--348 - publisher: UFRGS - title: The Modular Backward Evolution --- Why to Use Outdated Technologies - url: http://www.nime.org/proceedings/2019/nime2019_paper066.pdf - year: 2019 + pages: 84--87 + publisher: Virginia Tech + title: 'Mechatronic Expression: Reconsidering Expressivity in Music for Robotic + Instruments ' + url: http://www.nime.org/proceedings/2018/nime2018_paper0020.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: Goudard2019 - abstract: 'This article questions the notion of ephemerality of digital musical - instruments (DMI). Longevity is generally regarded as a valuable quality that - good design criteria should help to achieve. However, the nature of the tools, - of the performance conditions and of the music itself may lead to think of ephemerality - as an intrinsic modality of the existence of DMIs. In particular, the conditions - of contemporary musical production suggest that contextual adaptations of instrumental - devices beyond the monolithic unity of classical instruments should be considered. - The first two parts of this article analyse various reasons to reassess the issue - of longevity and ephemerality. The last two sections attempt to propose an articulation - of these two aspects to inform both the design of the DMI and their learning.' - address: 'Porto Alegre, Brazil' - author: Vincent Goudard - bibtex: "@inproceedings{Goudard2019,\n abstract = {This article questions the notion\ - \ of ephemerality of digital musical instruments (DMI). Longevity is generally\ - \ regarded as a valuable quality that good design criteria should help to achieve.\ - \ However, the nature of the tools, of the performance conditions and of the music\ - \ itself may lead to think of ephemerality as an intrinsic modality of the existence\ - \ of DMIs. In particular, the conditions of contemporary musical production suggest\ - \ that contextual adaptations of instrumental devices beyond the monolithic unity\ - \ of classical instruments should be considered. The first two parts of this article\ - \ analyse various reasons to reassess the issue of longevity and ephemerality.\ - \ The last two sections attempt to propose an articulation of these two aspects\ - \ to inform both the design of the DMI and their learning.},\n address = {Porto\ - \ Alegre, Brazil},\n author = {Vincent Goudard},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.3672990},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {349--354},\n publisher = {UFRGS},\n\ - \ title = {Ephemeral instruments},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper067.pdf},\n\ - \ year = {2019}\n}\n" + ID: Brownb2018 + abstract: 'Musical participation has brought individuals together in on-going communities + throughout human history, aiding in the kinds of social integration essential + for wellbeing. The design of Digital Musical Instruments (DMIs), however, has + generally been driven by idiosyncratic artistic concerns, Western art music and + dance traditions of expert performance, and short-lived interactive art installations + engaging a broader public of musical novices. These DMIs rarely engage with the + problems of on-going use in musical communities with existing performance idioms, + repertoire, and social codes with participants representing the full learning + curve of musical skill, such as social dance. Our project, Interactive Tango Milonga, + an interactive Argentine tango dance system for social dance addresses these challenges + in order to innovate connection, the feeling of intense relation between dance + partners, music, and the larger tango community. ' + address: 'Blacksburg, Virginia, USA' + author: Courtney Brown + bibtex: "@inproceedings{Brownb2018,\n abstract = {Musical participation has brought\ + \ individuals together in on-going communities throughout human history, aiding\ + \ in the kinds of social integration essential for wellbeing. The design of Digital\ + \ Musical Instruments (DMIs), however, has generally been driven by idiosyncratic\ + \ artistic concerns, Western art music and dance traditions of expert performance,\ + \ and short-lived interactive art installations engaging a broader public of musical\ + \ novices. These DMIs rarely engage with the problems of on-going use in musical\ + \ communities with existing performance idioms, repertoire, and social codes with\ + \ participants representing the full learning curve of musical skill, such as\ + \ social dance. Our project, Interactive Tango Milonga, an interactive Argentine\ + \ tango dance system for social dance addresses these challenges in order to innovate\ + \ connection, the feeling of intense relation between dance partners, music, and\ + \ the larger tango community. },\n address = {Blacksburg, Virginia, USA},\n author\ + \ = {Courtney Brown},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302693},\n\ + \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {88--91},\n publisher = {Virginia\ + \ Tech},\n title = {Interactive Tango Milonga: Designing {DMI}s for the Social\ + \ Dance Context },\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0021.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672990 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1302693 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 349--354 - publisher: UFRGS - title: Ephemeral instruments - url: http://www.nime.org/proceedings/2019/nime2019_paper067.pdf - year: 2019 + pages: 88--91 + publisher: Virginia Tech + title: 'Interactive Tango Milonga: Designing DMIs for the Social Dance Context ' + url: http://www.nime.org/proceedings/2018/nime2018_paper0021.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: Jaramillo2019 - abstract: 'This paper reports the conception, design, implementation and evaluation - processes of PICO, a portable audio effect system created with Pure Data and the - Raspberry Pi, which augments traditional plucked string instruments such as the - Brazilian Cavaquinho, the Venezuelan Cuatro, the Colombian Tiple and the Peruvian/Bolivian - Charango. A fabric soft case fixed to the instrument`s body holds the PICO modules: - the touchscreen, the single board computer, the sound card, the speaker system - and the DC power bank. The device audio specifications arose from musicological - insights about the social role of performers in their musical contexts and the - instruments'' playing techniques. They were taken as design challenges in the - creation process of PICO`s first prototype, which was submitted to a short evaluation. - Along with the construction of PICO, we reflected over the design of an interactive - audio interface as a mode of research. Therefore, the paper will also discuss - methodological aspects of audio hardware design.' - address: 'Porto Alegre, Brazil' - author: Julian Jaramillo and Fernando Iazzetta - bibtex: "@inproceedings{Jaramillo2019,\n abstract = {This paper reports the conception,\ - \ design, implementation and evaluation processes of PICO, a portable audio effect\ - \ system created with Pure Data and the Raspberry Pi, which augments traditional\ - \ plucked string instruments such as the Brazilian Cavaquinho, the Venezuelan\ - \ Cuatro, the Colombian Tiple and the Peruvian/Bolivian Charango. A fabric soft\ - \ case fixed to the instrument`s body holds the PICO modules: the touchscreen,\ - \ the single board computer, the sound card, the speaker system and the DC power\ - \ bank. The device audio specifications arose from musicological insights about\ - \ the social role of performers in their musical contexts and the instruments'\ - \ playing techniques. They were taken as design challenges in the creation process\ - \ of PICO`s first prototype, which was submitted to a short evaluation. Along\ - \ with the construction of PICO, we reflected over the design of an interactive\ - \ audio interface as a mode of research. Therefore, the paper will also discuss\ - \ methodological aspects of audio hardware design.},\n address = {Porto Alegre,\ - \ Brazil},\n author = {Julian Jaramillo and Fernando Iazzetta},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.3672992},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {355--360},\n publisher = {UFRGS},\n\ - \ title = {{PICO}: A portable audio effect box for traditional plucked-string\ - \ instruments},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper068.pdf},\n\ - \ year = {2019}\n}\n" + ID: Kleinberger2018 + abstract: 'This paper presents an experiment to investigate how new types of vocal + practices can affect psychophysiological activity. We know that health can influence + the voice, but can a certain use of the voice influence health through modification + of mental and physical state? This study took place in the setting of the Vocal + Vibrations installation. For the experiment, participants engage in a multi sensory + vocal exercise with a limited set of guidance to obtain a wide spectrum of vocal + performances across participants. We compare characteristics of those vocal practices + to the participant''s heart rate, breathing rate, electrodermal activity and mental + states. We obtained significant results suggesting that we can correlate psychophysiological + states with characteristics of the vocal practice if we also take into account + biographical information, and in particular mea- surement of how much people “like” + their own voice.' + address: 'Blacksburg, Virginia, USA' + author: Rebecca Kleinberger + bibtex: "@inproceedings{Kleinberger2018,\n abstract = {This paper presents an experiment\ + \ to investigate how new types of vocal practices can affect psychophysiological\ + \ activity. We know that health can influence the voice, but can a certain use\ + \ of the voice influence health through modification of mental and physical state?\ + \ This study took place in the setting of the Vocal Vibrations installation. For\ + \ the experiment, participants engage in a multi sensory vocal exercise with a\ + \ limited set of guidance to obtain a wide spectrum of vocal performances across\ + \ participants. We compare characteristics of those vocal practices to the participant's\ + \ heart rate, breathing rate, electrodermal activity and mental states. We obtained\ + \ significant results suggesting that we can correlate psychophysiological states\ + \ with characteristics of the vocal practice if we also take into account biographical\ + \ information, and in particular mea- surement of how much people “like” their\ + \ own voice.},\n address = {Blacksburg, Virginia, USA},\n author = {Rebecca Kleinberger},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1302693},\n editor = {Luke Dahl,\ + \ Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {92--95},\n publisher = {Virginia Tech},\n title =\ + \ {Vocal Musical Expression with a Tactile Resonating Device and its Psychophysiological\ + \ Effects},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0022.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672992 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1302693 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 355--360 - publisher: UFRGS - title: 'PICO: A portable audio effect box for traditional plucked-string instruments' - url: http://www.nime.org/proceedings/2019/nime2019_paper068.pdf - year: 2019 + pages: 92--95 + publisher: Virginia Tech + title: Vocal Musical Expression with a Tactile Resonating Device and its Psychophysiological + Effects + url: http://www.nime.org/proceedings/2018/nime2018_paper0022.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: Bertissolo2019 - abstract: ' This paper focuses on ongoing research in music composition based on the - study of cognitive research in musical meaning. As a method and result at the - same time, we propose the creation of experiments related to key issues in composition - and music cognition, such as music and movement, memory, expectation and metaphor - in creative process. The theoretical reference approached is linked to the embodied - cognition, with unfolding related to the cognitive semantics and the enactivist - current of cognitive sciences, among other domains of contemporary sciences of - mind and neuroscience. The experiments involve the relationship between music - and movement, based on prior research using as a reference context in which it - is not possible to establish a clear distinction between them: the Capoeira. Finally, - we proposes a discussion about the application of the theoretical approach in - two compositions: Boreal IV, for Steel Drums and real time electronics, and Converse, - collaborative multimedia piece for piano, real-time audio (Puredata) and video - processing (GEM and live video) and a dancer.' - address: 'Porto Alegre, Brazil' - author: Guilherme Bertissolo - bibtex: "@inproceedings{Bertissolo2019,\n abstract = { This paper focuses on ongoing\ - \ research in music composition based on the study of cognitive research in\ - \ musical meaning. As a method and result at the same time, we propose the creation\ - \ of experiments related to key issues in composition and music cognition, such\ - \ as music and movement, memory, expectation and metaphor in creative process.\ - \ The theoretical reference approached is linked to the embodied cognition, with\ - \ unfolding related to the cognitive semantics and the enactivist current of cognitive\ - \ sciences, among other domains of contemporary sciences of mind and neuroscience.\ - \ The experiments involve the relationship between music and movement, based on\ - \ prior research using as a reference context in which it is not possible to establish\ - \ a clear distinction between them: the Capoeira. Finally, we proposes a discussion\ - \ about the application of the theoretical approach in two compositions: Boreal\ - \ IV, for Steel Drums and real time electronics, and Converse, collaborative multimedia\ - \ piece for piano, real-time audio (Puredata) and video processing (GEM and live\ - \ video) and a dancer.},\n address = {Porto Alegre, Brazil},\n author = {Guilherme\ - \ Bertissolo},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672994},\n editor\ - \ = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n\ - \ pages = {361--364},\n publisher = {UFRGS},\n title = {Composing Understandings:\ - \ music, motion, gesture and embodied cognition},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper069.pdf},\n\ - \ year = {2019}\n}\n" + ID: Palsbröker2018 + abstract: 'In order to facilitate access to playing music spontaneously, the prototype + of an instrument which allows a more natural learning approach was developed as + part of the research project Drum-Dance-Music-Machine. The result was a modular + system consisting of several VST plug-ins, which on the one hand provides a drum + interface to create sounds and tones and on the other hand generates or manipulates + music through dance movement, in order to simplify the understanding of more abstract + characteristics of music. This paper describes the development of a new software + concept for the prototype, which since then has been further developed and evaluated + several times. This will improve the maintainability and extensibility of the + system and eliminate design weaknesses. To do so, the existing system first will + be analyzed and requirements for a new framework, which is based on the concepts + of event driven architecture and dependency injection, will be defined. The components + are then transferred to the new system and their performance is assessed. The + approach chosen in this case study and the lessons learned are intended to provide + a viable solution for solving similar problems in the development of modular VST-based + NIMEs.' + address: 'Blacksburg, Virginia, USA' + author: Patrick Palsbröker and Christine Steinmeier and Dominic Becking + bibtex: "@inproceedings{Palsbröker2018,\n abstract = {In order to facilitate access\ + \ to playing music spontaneously, the prototype of an instrument which allows\ + \ a more natural learning approach was developed as part of the research project\ + \ Drum-Dance-Music-Machine. The result was a modular system consisting of several\ + \ VST plug-ins, which on the one hand provides a drum interface to create sounds\ + \ and tones and on the other hand generates or manipulates music through dance\ + \ movement, in order to simplify the understanding of more abstract characteristics\ + \ of music. This paper describes the development of a new software concept for\ + \ the prototype, which since then has been further developed and evaluated several\ + \ times. This will improve the maintainability and extensibility of the system\ + \ and eliminate design weaknesses. To do so, the existing system first will be\ + \ analyzed and requirements for a new framework, which is based on the concepts\ + \ of event driven architecture and dependency injection, will be defined. The\ + \ components are then transferred to the new system and their performance is assessed.\ + \ The approach chosen in this case study and the lessons learned are intended\ + \ to provide a viable solution for solving similar problems in the development\ + \ of modular VST-based NIMEs.},\n address = {Blacksburg, Virginia, USA},\n author\ + \ = {Patrick Palsbröker and Christine Steinmeier and Dominic Becking},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1302653},\n editor = {Luke Dahl, Douglas\ + \ Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {96--101},\n publisher = {Virginia Tech},\n title\ + \ = {A Framework for Modular VST-based NIMEs Using EDA and Dependency Injection},\n\ + \ url = {http://www.nime.org/proceedings/2018/nime2018_paper0023.pdf},\n year\ + \ = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672994 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1302653 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 361--364 - publisher: UFRGS - title: 'Composing Understandings: music, motion, gesture and embodied cognition' - url: http://www.nime.org/proceedings/2019/nime2019_paper069.pdf - year: 2019 + pages: 96--101 + publisher: Virginia Tech + title: A Framework for Modular VST-based NIMEs Using EDA and Dependency Injection + url: http://www.nime.org/proceedings/2018/nime2018_paper0023.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: RamosFlores2019 - abstract: 'New interfaces allow performers to access new possibilities of musical - expression. Even though interfaces are often designed to be adaptable to different - software, most of them rely on external speakers or similar transducers. This - often results on disembodiment and acoustic disengagement from the interface, - and in the case of augmented instruments, from the instruments themselves. This - paper describes a project in which a hybrid system allows an acoustic integration - between the sound of acoustic saxophone and electronics.' - address: 'Porto Alegre, Brazil' - author: Cristohper Ramos Flores and Jim Murphy and Michael Norris - bibtex: "@inproceedings{RamosFlores2019,\n abstract = {New interfaces allow performers\ - \ to access new possibilities of musical expression. Even though interfaces are\ - \ often designed to be adaptable to different software, most of them rely on external\ - \ speakers or similar transducers. This often results on disembodiment and acoustic\ - \ disengagement from the interface, and in the case of augmented instruments,\ - \ from the instruments themselves. This paper describes a project in which a hybrid\ - \ system allows an acoustic integration between the sound of acoustic saxophone\ - \ and electronics.},\n address = {Porto Alegre, Brazil},\n author = {Cristohper\ - \ Ramos Flores and Jim Murphy and Michael Norris},\n booktitle = {Proceedings\ + ID: Atherton2018 + abstract: 'Chunity is a programming environment for the design of interactive audiovisual + games, instruments, and experiences. It embodies an audio-driven, sound-first + approach that integrates audio programming and graphics programming in the same + workflow, taking advantage of strongly-timed audio programming features of the + ChucK programming language and the state-of-the-art real-time graphics engine + found in Unity. We describe both the system and its intended workflow for the + creation of expressive audiovisual works. Chunity was evaluated as the primary + software platform in a computer music and design course, where students created + a diverse assortment of interactive audiovisual software. We present results from + the evaluation and discuss Chunity''s usability, utility, and aesthetics as a + way of working. Through these, we argue for Chunity as a unique and useful way + to program sound, graphics, and interaction in tandem, giving users the flexibility + to use a game engine to do much more than "just" make games.' + address: 'Blacksburg, Virginia, USA' + author: Jack Atherton and Ge Wang + bibtex: "@inproceedings{Atherton2018,\n abstract = {Chunity is a programming environment\ + \ for the design of interactive audiovisual games, instruments, and experiences.\ + \ It embodies an audio-driven, sound-first approach that integrates audio programming\ + \ and graphics programming in the same workflow, taking advantage of strongly-timed\ + \ audio programming features of the ChucK programming language and the state-of-the-art\ + \ real-time graphics engine found in Unity. We describe both the system and its\ + \ intended workflow for the creation of expressive audiovisual works. Chunity\ + \ was evaluated as the primary software platform in a computer music and design\ + \ course, where students created a diverse assortment of interactive audiovisual\ + \ software. We present results from the evaluation and discuss Chunity's usability,\ + \ utility, and aesthetics as a way of working. Through these, we argue for Chunity\ + \ as a unique and useful way to program sound, graphics, and interaction in tandem,\ + \ giving users the flexibility to use a game engine to do much more than \"just\"\ + \ make games.},\n address = {Blacksburg, Virginia, USA},\n author = {Jack Atherton\ + \ and Ge Wang},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302695},\n\ + \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {102--107},\n publisher = {Virginia\ + \ Tech},\n title = {Chunity: Integrated Audiovisual Programming in Unity},\n url\ + \ = {http://www.nime.org/proceedings/2018/nime2018_paper0024.pdf},\n year = {2018}\n\ + }\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1302695 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 + issn: 2220-4806 + month: June + pages: 102--107 + publisher: Virginia Tech + title: 'Chunity: Integrated Audiovisual Programming in Unity' + url: http://www.nime.org/proceedings/2018/nime2018_paper0024.pdf + year: 2018 + + +- ENTRYTYPE: inproceedings + ID: Ianigro2018 + abstract: 'In this paper we expand on prior research into the use of Continuous + Time Recurrent Neural Networks (CTRNNs) as evolvable generators of musical structures + such as audio waveforms. This type of neural network has a compact structure and + is capable of producing a large range of temporal dynamics. Due to these properties, + we believe that CTRNNs combined with evolutionary algorithms (EA) could offer + musicians many creative possibilities for the exploration of sound. In prior work, + we have explored the use of interactive and target-based EA designs to tap into + the creative possibilities of CTRNNs. Our results have shown promise for the use + of CTRNNs in the audio domain. However, we feel neither EA designs allow both + open-ended discovery and effective navigation of the CTRNN audio search space + by musicians. Within this paper, we explore the possibility of using novelty search + as an alternative algorithm that facilitates both open-ended and rapid discovery + of the CTRNN creative search space.' + address: 'Blacksburg, Virginia, USA' + author: Steffan Carlos Ianigro and Oliver Bown + bibtex: "@inproceedings{Ianigro2018,\n abstract = {In this paper we expand on prior\ + \ research into the use of Continuous Time Recurrent Neural Networks (CTRNNs)\ + \ as evolvable generators of musical structures such as audio waveforms. This\ + \ type of neural network has a compact structure and is capable of producing a\ + \ large range of temporal dynamics. Due to these properties, we believe that CTRNNs\ + \ combined with evolutionary algorithms (EA) could offer musicians many creative\ + \ possibilities for the exploration of sound. In prior work, we have explored\ + \ the use of interactive and target-based EA designs to tap into the creative\ + \ possibilities of CTRNNs. Our results have shown promise for the use of CTRNNs\ + \ in the audio domain. However, we feel neither EA designs allow both open-ended\ + \ discovery and effective navigation of the CTRNN audio search space by musicians.\ + \ Within this paper, we explore the possibility of using novelty search as an\ + \ alternative algorithm that facilitates both open-ended and rapid discovery of\ + \ the CTRNN creative search space.},\n address = {Blacksburg, Virginia, USA},\n\ + \ author = {Steffan Carlos Ianigro and Oliver Bown},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.3672996},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {365--370},\n publisher = {UFRGS},\n\ - \ title = {HypeSax: Saxophone acoustic augmentation},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper070.pdf},\n\ - \ year = {2019}\n}\n" + \ doi = {10.5281/zenodo.1302697},\n editor = {Luke Dahl, Douglas Bowman, Thomas\ + \ Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n\ + \ pages = {108--113},\n publisher = {Virginia Tech},\n title = {Exploring Continuous\ + \ Time Recurrent Neural Networks through Novelty Search},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0025.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672996 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1302697 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 365--370 - publisher: UFRGS - title: 'HypeSax: Saxophone acoustic augmentation' - url: http://www.nime.org/proceedings/2019/nime2019_paper070.pdf - year: 2019 + pages: 108--113 + publisher: Virginia Tech + title: Exploring Continuous Time Recurrent Neural Networks through Novelty Search + url: http://www.nime.org/proceedings/2018/nime2018_paper0025.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: Chwalek2019 - abstract: 'We describe the design of an untethered digital synthesizer that can - be held and manipulated while broadcasting audio data to a receiving off-the-shelf - Bluetooth receiver. The synthesizer allows the user to freely rotate and reorient - the instrument while exploiting non-contact light sensing for a truly expressive - performance. The system consists of a suite of sensors that convert rotation, - orientation, touch, and user proximity into various audio filters and effects - operated on preset wave tables, while offering a persistence of vision display - for input visualization. This paper discusses the design of the system, including - the circuit, mechanics, and software layout, as well as how this device may be - incorporated into a performance. ' - address: 'Porto Alegre, Brazil' - author: Patrick Chwalek and Joe Paradiso - bibtex: "@inproceedings{Chwalek2019,\n abstract = {We describe the design of an\ - \ untethered digital synthesizer that can be held and manipulated while broadcasting\ - \ audio data to a receiving off-the-shelf Bluetooth receiver. The synthesizer\ - \ allows the user to freely rotate and reorient the instrument while exploiting\ - \ non-contact light sensing for a truly expressive performance. The system consists\ - \ of a suite of sensors that convert rotation, orientation, touch, and user proximity\ - \ into various audio filters and effects operated on preset wave tables, while\ - \ offering a persistence of vision display for input visualization. This paper\ - \ discusses the design of the system, including the circuit, mechanics, and software\ - \ layout, as well as how this device may be incorporated into a performance. },\n\ - \ address = {Porto Alegre, Brazil},\n author = {Patrick Chwalek and Joe Paradiso},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.3672998},\n editor = {Marcelo Queiroz\ - \ and Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {371--374},\n\ - \ publisher = {UFRGS},\n title = {CD-Synth: a Rotating, Untethered, Digital Synthesizer},\n\ - \ url = {http://www.nime.org/proceedings/2019/nime2019_paper071.pdf},\n year =\ - \ {2019}\n}\n" + ID: Bowers2018 + abstract: 'Research into machine listening has intensified in recent years creating + a variety of techniques for recognising musical features suitable, for example, + in musicological analysis or commercial application in song recognition. Within + NIME, several projects exist seeking to make these techniques useful in real-time + music making. However, we debate whether the functionally-oriented approaches + inherited from engineering domains that much machine listening research manifests + is fully suited to the exploratory, divergent, boundary-stretching, uncertainty-seeking, + playful and irreverent orientations of many artists. To explore this, we engaged + in a concerted collaborative design exercise in which many different listening + algorithms were implemented and presented with input which challenged their customary + range of application and the implicit norms of musicality which research can take + for granted. An immersive 3D spatialised multichannel environment was created + in which the algorithms could be explored in a hybrid installation/performance/lecture + form of research presentation. The paper closes with reflections on the creative + value of ‘hijacking'' formal approaches into deviant contexts, the typically undocumented + practical know-how required to make algorithms work, the productivity of a playfully + irreverent relationship between engineering and artistic approaches to NIME, and + a sketch of a sonocybernetic aesthetics for our work.' + address: 'Blacksburg, Virginia, USA' + author: John Bowers and Owen Green + bibtex: "@inproceedings{Bowers2018,\n abstract = {Research into machine listening\ + \ has intensified in recent years creating a variety of techniques for recognising\ + \ musical features suitable, for example, in musicological analysis or commercial\ + \ application in song recognition. Within NIME, several projects exist seeking\ + \ to make these techniques useful in real-time music making. However, we debate\ + \ whether the functionally-oriented approaches inherited from engineering domains\ + \ that much machine listening research manifests is fully suited to the exploratory,\ + \ divergent, boundary-stretching, uncertainty-seeking, playful and irreverent\ + \ orientations of many artists. To explore this, we engaged in a concerted collaborative\ + \ design exercise in which many different listening algorithms were implemented\ + \ and presented with input which challenged their customary range of application\ + \ and the implicit norms of musicality which research can take for granted. An\ + \ immersive 3D spatialised multichannel environment was created in which the algorithms\ + \ could be explored in a hybrid installation/performance/lecture form of research\ + \ presentation. The paper closes with reflections on the creative value of ‘hijacking'\ + \ formal approaches into deviant contexts, the typically undocumented practical\ + \ know-how required to make algorithms work, the productivity of a playfully irreverent\ + \ relationship between engineering and artistic approaches to NIME, and a sketch\ + \ of a sonocybernetic aesthetics for our work.},\n address = {Blacksburg, Virginia,\ + \ USA},\n author = {John Bowers and Owen Green},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1302699},\n editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n\ + \ isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {114--119},\n publisher = {Virginia Tech},\n title = {All the Noises: Hijacking\ + \ Listening Machines for Performative Research},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0026.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672998 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1302699 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 371--374 - publisher: UFRGS - title: 'CD-Synth: a Rotating, Untethered, Digital Synthesizer' - url: http://www.nime.org/proceedings/2019/nime2019_paper071.pdf - year: 2019 + pages: 114--119 + publisher: Virginia Tech + title: 'All the Noises: Hijacking Listening Machines for Performative Research' + url: http://www.nime.org/proceedings/2018/nime2018_paper0026.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: Granieri2019 - abstract: 'This paper presents Reach, a keyboard-based gesture recog- nition system - for live piano sound modulation. Reach is a system built using the Leap Motion - Orion SDK, Pure Data and a custom C++ OSC mapper1. It provides control over the - sound modulation of an acoustic piano using the pi- anist''s ancillary gestures. - The system was developed using an iterative design pro- cess, incorporating research - findings from two user studies and several case studies. The results that emerged - show the potential of recognising and utilising the pianist''s existing technique - when designing keyboard-based DMIs, reducing the requirement to learn additional - techniques.' - address: 'Porto Alegre, Brazil' - author: Niccolò Granieri and James Dooley - bibtex: "@inproceedings{Granieri2019,\n abstract = {This paper presents Reach, a\ - \ keyboard-based gesture recog- nition system for live piano sound modulation.\ - \ Reach is a system built using the Leap Motion Orion SDK, Pure Data and a custom\ - \ C++ OSC mapper1. It provides control over the sound modulation of an acoustic\ - \ piano using the pi- anist's ancillary gestures. The system was developed using\ - \ an iterative design pro- cess, incorporating research findings from two user\ - \ studies and several case studies. The results that emerged show the potential\ - \ of recognising and utilising the pianist's existing technique when designing\ - \ keyboard-based DMIs, reducing the requirement to learn additional techniques.},\n\ - \ address = {Porto Alegre, Brazil},\n author = {Niccolò Granieri and James Dooley},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.3673000},\n editor = {Marcelo Queiroz\ - \ and Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {375--376},\n\ - \ publisher = {UFRGS},\n title = {Reach: a keyboard-based gesture recognition\ - \ system for live piano sound modulation},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper072.pdf},\n\ - \ year = {2019}\n}\n" + ID: Schramm2018 + abstract: 'This paper presents a system for easily augmenting polyphonic pitched + instruments. The entire system is designed to run on a low-cost embedded computer, + suitable for live performance and easy to customise for different use cases. The + core of the system implements real-time spectrum factorisation, decomposing polyphonic + audio input signals into music note activations. New instruments can be easily + added to the system with the help of custom spectral template dictionaries. Instrument + augmentation is achieved by replacing or mixing the instrument''s original sounds + with a large variety of synthetic or sampled sounds, which follow the polyphonic + pitch activations.' + address: 'Blacksburg, Virginia, USA' + author: Rodrigo Schramm and Federico Visi and André Brasil and Marcelo O Johann + bibtex: "@inproceedings{Schramm2018,\n abstract = {This paper presents a system\ + \ for easily augmenting polyphonic pitched instruments. The entire system is designed\ + \ to run on a low-cost embedded computer, suitable for live performance and easy\ + \ to customise for different use cases. The core of the system implements real-time\ + \ spectrum factorisation, decomposing polyphonic audio input signals into music\ + \ note activations. New instruments can be easily added to the system with the\ + \ help of custom spectral template dictionaries. Instrument augmentation is achieved\ + \ by replacing or mixing the instrument's original sounds with a large variety\ + \ of synthetic or sampled sounds, which follow the polyphonic pitch activations.},\n\ + \ address = {Blacksburg, Virginia, USA},\n author = {Rodrigo Schramm and Federico\ + \ Visi and André Brasil and Marcelo O Johann},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1302650},\n editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n\ + \ isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {120--125},\n publisher = {Virginia Tech},\n title = {A polyphonic pitch tracking\ + \ embedded system for rapid instrument augmentation},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0027.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3673000 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1302650 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 375--376 - publisher: UFRGS - title: 'Reach: a keyboard-based gesture recognition system for live piano sound - modulation' - url: http://www.nime.org/proceedings/2019/nime2019_paper072.pdf - year: 2019 + pages: 120--125 + publisher: Virginia Tech + title: A polyphonic pitch tracking embedded system for rapid instrument augmentation + url: http://www.nime.org/proceedings/2018/nime2018_paper0027.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: schedel2019 - abstract: 'This paper describes the creation of a NIME created from an iron and - wooden ironing board. The ironing board acts as a resonator for the system which - includes sensors embedded in the iron such as pressure, and piezo microphones. - The iron has LEDs wired to the sides and at either end of the board are CCDs; - using machine learning we can identify what kind of fabric is being ironed, and - the position of the iron along the x and y-axes as well as its rotation and tilt. - This instrument is part of a larger project, Women''s Labor, that juxtaposes traditional - musical instruments such as spinets and virginals designated for “ladies” with - new interfaces for musical expression that repurpose older tools of women''s work. - Using embedded technologies, we reimagine domestic tools as musical interfaces, - creating expressive instruments from the appliances of women''s chores.' - address: 'Porto Alegre, Brazil' - author: margaret schedel and Jocelyn Ho and Matthew Blessing - bibtex: "@inproceedings{schedel2019,\n abstract = {This paper describes the creation\ - \ of a NIME created from an iron and wooden ironing board. The ironing board acts\ - \ as a resonator for the system which includes sensors embedded in the iron such\ - \ as pressure, and piezo microphones. The iron has LEDs wired to the sides and\ - \ at either end of the board are CCDs; using machine learning we can identify\ - \ what kind of fabric is being ironed, and the position of the iron along the\ - \ x and y-axes as well as its rotation and tilt. This instrument is part of a\ - \ larger project, Women's Labor, that juxtaposes traditional musical instruments\ - \ such as spinets and virginals designated for “ladies” with new interfaces for\ - \ musical expression that repurpose older tools of women's work. Using embedded\ - \ technologies, we reimagine domestic tools as musical interfaces, creating expressive\ - \ instruments from the appliances of women's chores.},\n address = {Porto Alegre,\ - \ Brazil},\n author = {margaret schedel and Jocelyn Ho and Matthew Blessing},\n\ + ID: Tahiroglu2018 + abstract: 'This paper introduces various ways that idiomatic gestures emerge in + performance practice with new musical instruments. It demonstrates that idiomatic + gestures can play an important role in the development of personalized performance + practices that can be the basis for the development of style and expression. Three + detailed examples -- biocontrollers, accordion-inspired instruments, and a networked + intelligent controller -- illustrate how a complex suite of factors throughout + the design, composition and performance processes can influence the development + of idiomatic gestures. We argue that the explicit consideration of idiomatic gestures + throughout the life cycle of new instruments can facilitate the emergence of style + and give rise to performances that can develop rich layers of meaning.' + address: 'Blacksburg, Virginia, USA' + author: Koray Tahiroglu and Michael Gurevich and R. Benjamin Knapp + bibtex: "@inproceedings{Tahiroglu2018,\n abstract = {This paper introduces various\ + \ ways that idiomatic gestures emerge in performance practice with new musical\ + \ instruments. It demonstrates that idiomatic gestures can play an important role\ + \ in the development of personalized performance practices that can be the basis\ + \ for the development of style and expression. Three detailed examples -- biocontrollers,\ + \ accordion-inspired instruments, and a networked intelligent controller -- illustrate\ + \ how a complex suite of factors throughout the design, composition and performance\ + \ processes can influence the development of idiomatic gestures. We argue that\ + \ the explicit consideration of idiomatic gestures throughout the life cycle of\ + \ new instruments can facilitate the emergence of style and give rise to performances\ + \ that can develop rich layers of meaning.},\n address = {Blacksburg, Virginia,\ + \ USA},\n author = {Koray Tahiroglu and Michael Gurevich and R. Benjamin Knapp},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.3672729},\n editor = {Marcelo Queiroz\ - \ and Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {377--380},\n\ - \ publisher = {UFRGS},\n title = {Women's Labor: Creating {NIME}s from Domestic\ - \ Tools },\n url = {http://www.nime.org/proceedings/2019/nime2019_paper073.pdf},\n\ - \ year = {2019}\n}\n" + \ Musical Expression},\n doi = {10.5281/zenodo.1302701},\n editor = {Luke Dahl,\ + \ Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {126--131},\n publisher = {Virginia Tech},\n title\ + \ = {Contextualising Idiomatic Gestures in Musical Interactions with NIMEs},\n\ + \ url = {http://www.nime.org/proceedings/2018/nime2018_paper0028.pdf},\n year\ + \ = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3672729 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1302701 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 377--380 - publisher: UFRGS - title: 'Women''s Labor: Creating NIMEs from Domestic Tools ' - url: http://www.nime.org/proceedings/2019/nime2019_paper073.pdf - year: 2019 + pages: 126--131 + publisher: Virginia Tech + title: Contextualising Idiomatic Gestures in Musical Interactions with NIMEs + url: http://www.nime.org/proceedings/2018/nime2018_paper0028.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: RauberDuBois2019 - abstract: 'This paper presents HMusic, a domain specific language based on music - patterns that can be used to write music and live coding. The main abstractions - provided by the language are patterns and tracks. Code written in HMusic looks - like patterns and multi-tracks available in music sequencers and drum machines. - HMusic provides primitives to design and compose patterns generating new patterns. - The basic abstractions provided by the language have an inductive definition and - HMusic is embedded in the Haskell functional programming language, programmers - can design functions to manipulate music on the fly.' - address: 'Porto Alegre, Brazil' - author: Andre Rauber Du Bois and Rodrigo Geraldo Ribeiro - bibtex: "@inproceedings{RauberDuBois2019,\n abstract = {This paper presents HMusic,\ - \ a domain specific language based on music patterns that can be used to write\ - \ music and live coding. The main abstractions provided by the language are patterns\ - \ and tracks. Code written in HMusic looks like patterns and multi-tracks available\ - \ in music sequencers and drum machines. HMusic provides primitives to design\ - \ and compose patterns generating new patterns. The basic abstractions provided\ - \ by the language have an inductive definition and HMusic is embedded in the Haskell\ - \ functional programming language, programmers can design functions to manipulate\ - \ music on the fly.},\n address = {Porto Alegre, Brazil},\n author = {Andre Rauber\ - \ Du Bois and Rodrigo Geraldo Ribeiro},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3673003},\n\ - \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {381--386},\n publisher = {UFRGS},\n title = {HMusic: A\ - \ domain specific language for music programming and live coding},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper074.pdf},\n\ - \ year = {2019}\n}\n" + ID: Hantrakul2018 + abstract: 'Machine learning and deep learning has recently made a large impact in + the artistic community. In many of these applications however, the model is often + used to render the high dimensional output directly e.g. every individual pixel + in the final image. Humans arguably operate in much lower dimensional spaces during + the creative process e.g. the broad movements of a brush. In this paper, we design + a neural gesture system for music generation based around this concept. Instead + of directly generating audio, we train a Long Short Term Memory (LSTM) recurrent + neural network to generate instantaneous position and pressure on the Roli Lightpad + instrument. These generated coordinates in turn, give rise to the sonic output + defined in the synth engine. The system relies on learning these movements from + a musician who has already developed a palette of musical gestures idiomatic to + the Lightpad. Unlike many deep learning systems that render high dimensional output, + our low-dimensional system can be run in real-time, enabling the first real time + gestural duet of its kind between a player and a recurrent neural network on the + Lightpad instrument.' + address: 'Blacksburg, Virginia, USA' + author: Lamtharn Hantrakul + bibtex: "@inproceedings{Hantrakul2018,\n abstract = {Machine learning and deep learning\ + \ has recently made a large impact in the artistic community. In many of these\ + \ applications however, the model is often used to render the high dimensional\ + \ output directly e.g. every individual pixel in the final image. Humans arguably\ + \ operate in much lower dimensional spaces during the creative process e.g. the\ + \ broad movements of a brush. In this paper, we design a neural gesture system\ + \ for music generation based around this concept. Instead of directly generating\ + \ audio, we train a Long Short Term Memory (LSTM) recurrent neural network to\ + \ generate instantaneous position and pressure on the Roli Lightpad instrument.\ + \ These generated coordinates in turn, give rise to the sonic output defined in\ + \ the synth engine. The system relies on learning these movements from a musician\ + \ who has already developed a palette of musical gestures idiomatic to the Lightpad.\ + \ Unlike many deep learning systems that render high dimensional output, our low-dimensional\ + \ system can be run in real-time, enabling the first real time gestural duet of\ + \ its kind between a player and a recurrent neural network on the Lightpad instrument.},\n\ + \ address = {Blacksburg, Virginia, USA},\n author = {Lamtharn Hantrakul},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1302703},\n editor = {Luke Dahl, Douglas\ + \ Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {132--137},\n publisher = {Virginia Tech},\n title\ + \ = {GestureRNN: A neural gesture system for the Roli Lightpad Block},\n url\ + \ = {http://www.nime.org/proceedings/2018/nime2018_paper0029.pdf},\n year = {2018}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3673003 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1302703 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 381--386 - publisher: UFRGS - title: 'HMusic: A domain specific language for music programming and live coding' - url: http://www.nime.org/proceedings/2019/nime2019_paper074.pdf - year: 2019 + pages: 132--137 + publisher: Virginia Tech + title: 'GestureRNN: A neural gesture system for the Roli Lightpad Block' + url: http://www.nime.org/proceedings/2018/nime2018_paper0029.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: Fraiettab2019 - abstract: "This paper presents the use of Stellarium planetarium software coupled\ - \ with the VizieR database of astronomical catalogues as an interface mechanism\ - \ for creating astronomy based multimedia performances, and as a music composition\ - \ interface. The celestial display from Stellarium is used to query VizieR, which\ - \ then obtains scienti\nc astronomical data from the stars displayed--including\ - \ colour, celestial position, magnitude and distance--and sends it as input data\ - \ for music composition or performance. Stellarium and VizieR are controlled through\ - \ Stellar Command, a software library that couples the two systems and can be\ - \ used as both a standalone command line utility using Open Sound Control, and\ - \ as a software library." - address: 'Porto Alegre, Brazil' - author: Angelo Fraietta - bibtex: "@inproceedings{Fraiettab2019,\n abstract = {This paper presents the use\ - \ of Stellarium planetarium software coupled with the VizieR database of astronomical\ - \ catalogues as an interface mechanism for creating astronomy based multimedia\ - \ performances, and as a music composition interface. The celestial display from\ - \ Stellarium is used to query VizieR, which then obtains scienti\nc astronomical\ - \ data from the stars displayed--including colour, celestial position, magnitude\ - \ and distance--and sends it as input data for music composition or performance.\ - \ Stellarium and VizieR are controlled through Stellar Command, a software library\ - \ that couples the two systems and can be used as both a standalone command line\ - \ utility using Open Sound Control, and as a software library.},\n address = {Porto\ - \ Alegre, Brazil},\n author = {Angelo Fraietta},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.3673005},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {387--392},\n publisher = {UFRGS},\n\ - \ title = {Stellar Command: a planetarium software based cosmic performance interface},\n\ - \ url = {http://www.nime.org/proceedings/2019/nime2019_paper075.pdf},\n year =\ - \ {2019}\n}\n" + ID: DiDonato2018 + abstract: 'Myo Mapper is a free and open source cross-platform application to map + data from the gestural device Myo armband into Open Sound Control (OSC) messages. + It represents a `quick and easy'' solution for exploring the Myo''s potential + for realising new interfaces for musical expression. Together with details of + the software, this paper reports some applications in which Myo Mapper has been + successfully used and a qualitative evaluation. We then proposed guidelines for + using Myo data in interactive artworks based on insight gained from the works + described and the evaluation. Findings show that Myo Mapper empowers artists and + non-skilled developers to easily take advantage of Myo data high-level features + for realising interactive artistic works. It also facilitates the recognition + of poses and gestures beyond those included with the product by using third-party + interactive machine learning software.' + address: 'Blacksburg, Virginia, USA' + author: 'Di Donato, Balandino and Jamie Bullock and Atau Tanaka' + bibtex: "@inproceedings{DiDonato2018,\n abstract = {Myo Mapper is a free and open\ + \ source cross-platform application to map data from the gestural device Myo armband\ + \ into Open Sound Control (OSC) messages. It represents a `quick and easy' solution\ + \ for exploring the Myo's potential for realising new interfaces for musical expression.\ + \ Together with details of the software, this paper reports some applications\ + \ in which Myo Mapper has been successfully used and a qualitative evaluation.\ + \ We then proposed guidelines for using Myo data in interactive artworks based\ + \ on insight gained from the works described and the evaluation. Findings show\ + \ that Myo Mapper empowers artists and non-skilled developers to easily take advantage\ + \ of Myo data high-level features for realising interactive artistic works. It\ + \ also facilitates the recognition of poses and gestures beyond those included\ + \ with the product by using third-party interactive machine learning software.},\n\ + \ address = {Blacksburg, Virginia, USA},\n author = {Di Donato, Balandino and\ + \ Jamie Bullock and Atau Tanaka},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302705},\n\ + \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {138--143},\n publisher = {Virginia\ + \ Tech},\n title = {Myo Mapper: a Myo armband to OSC mapper},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0030.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3673005 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1302705 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 387--392 - publisher: UFRGS - title: 'Stellar Command: a planetarium software based cosmic performance interface' - url: http://www.nime.org/proceedings/2019/nime2019_paper075.pdf - year: 2019 + pages: 138--143 + publisher: Virginia Tech + title: 'Myo Mapper: a Myo armband to OSC mapper' + url: http://www.nime.org/proceedings/2018/nime2018_paper0030.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: Müller2019 - abstract: 'Telematic performances connect two or more locations so that participants - are able to interact in real time. Such practices blend a variety of dimensions, - insofar as the representation of remote performers on a local stage intrinsically - occurs on auditory, as well as visual and scenic, levels. Due to their multimodal - nature, the analysis or creation of such performances can quickly descend into - a house of mirrors wherein certain intensely interdependent dimensions come to - the fore, while others are multiplied, seem hidden or are made invisible. In order - to have a better understanding of such performances, Dimension Space Analysis, - with its capacity to review multifaceted components of a system, can be applied - to telematic performances, understood here as (a bundle of) NIMEs. In the second - part of the paper, some telematic works from the practices of the authors are - described with the toolset developed.' - address: 'Porto Alegre, Brazil' - author: Patrick Müller and Johannes Michael Schuett - bibtex: "@inproceedings{Müller2019,\n abstract = {Telematic performances connect\ - \ two or more locations so that participants are able to interact in real time.\ - \ Such practices blend a variety of dimensions, insofar as the representation\ - \ of remote performers on a local stage intrinsically occurs on auditory, as well\ - \ as visual and scenic, levels. Due to their multimodal nature, the analysis or\ - \ creation of such performances can quickly descend into a house of mirrors wherein\ - \ certain intensely interdependent dimensions come to the fore, while others are\ - \ multiplied, seem hidden or are made invisible. In order to have a better understanding\ - \ of such performances, Dimension Space Analysis, with its capacity to review\ - \ multifaceted components of a system, can be applied to telematic performances,\ - \ understood here as (a bundle of) NIMEs. In the second part of the paper, some\ - \ telematic works from the practices of the authors are described with the toolset\ - \ developed.},\n address = {Porto Alegre, Brazil},\n author = {Patrick Müller\ - \ and Johannes Michael Schuett},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3673007},\n\ - \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {393--400},\n publisher = {UFRGS},\n title = {Towards a\ - \ Telematic Dimension Space},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper076.pdf},\n\ - \ year = {2019}\n}\n" + ID: Visi2018 + abstract: 'We present modosc, a set of Max abstractions designed for computing motion + descriptors from raw motion capture data in real time. The library contains methods + for extracting descriptors useful for expressive movement analysis and sonic interaction + design. modosc is designed to address the data handling and synchronization issues + that often arise when working with complex marker sets. This is achieved by adopting + a multiparadigm approach facilitated by odot and Open Sound Control to overcome + some of the limitations of conventional Max programming, and structure incoming + and outgoing data streams in a meaningful and easily accessible manner. After + describing the contents of the library and how data streams are structured and + processed, we report on a sonic interaction design use case involving motion feature + extraction and machine learning.' + address: 'Blacksburg, Virginia, USA' + author: Federico Visi and Luke Dahl + bibtex: "@inproceedings{Visi2018,\n abstract = {We present modosc, a set of Max\ + \ abstractions designed for computing motion descriptors from raw motion capture\ + \ data in real time. The library contains methods for extracting descriptors useful\ + \ for expressive movement analysis and sonic interaction design. modosc is designed\ + \ to address the data handling and synchronization issues that often arise when\ + \ working with complex marker sets. This is achieved by adopting a multiparadigm\ + \ approach facilitated by odot and Open Sound Control to overcome some of the\ + \ limitations of conventional Max programming, and structure incoming and outgoing\ + \ data streams in a meaningful and easily accessible manner. After describing\ + \ the contents of the library and how data streams are structured and processed,\ + \ we report on a sonic interaction design use case involving motion feature extraction\ + \ and machine learning.},\n address = {Blacksburg, Virginia, USA},\n author =\ + \ {Federico Visi and Luke Dahl},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302707},\n\ + \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {144--147},\n publisher = {Virginia\ + \ Tech},\n title = {Real-Time Motion Capture Analysis and Music Interaction with\ + \ the Modosc Descriptor Library},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0031.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3673007 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1302707 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 393--400 - publisher: UFRGS - title: Towards a Telematic Dimension Space - url: http://www.nime.org/proceedings/2019/nime2019_paper076.pdf - year: 2019 + pages: 144--147 + publisher: Virginia Tech + title: Real-Time Motion Capture Analysis and Music Interaction with the Modosc Descriptor + Library + url: http://www.nime.org/proceedings/2018/nime2018_paper0031.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: Lucasb2019 - abstract: 'Unity is one of the most used engines in the game industry and several - extensions have been implemented to increase its features in order to create multimedia - products in a more effective and efficient way. From the point of view of audio - development, Unity has included an Audio Mixer from version 5 which facilitates - the organization of sounds, effects, and the mixing process in general; however, - this module can be manipulated only through its graphical interface. This work - describes the design and implementation of an extension tool to map parameters - from the Audio Mixer to MIDI external devices, like controllers with sliders and - knobs, such way the developer can easily mix a game with the feeling of a physical - interface. ' - address: 'Porto Alegre, Brazil' - author: Pedro Pablo Lucas - bibtex: "@inproceedings{Lucasb2019,\n abstract = {Unity is one of the most used\ - \ engines in the game industry and several extensions have been implemented to\ - \ increase its features in order to create multimedia products in a more effective\ - \ and efficient way. From the point of view of audio development, Unity has included\ - \ an Audio Mixer from version 5 which facilitates the organization of sounds,\ - \ effects, and the mixing process in general; however, this module can be manipulated\ - \ only through its graphical interface. This work describes the design and implementation\ - \ of an extension tool to map parameters from the Audio Mixer to MIDI external\ - \ devices, like controllers with sliders and knobs, such way the developer can\ - \ easily mix a game with the feeling of a physical interface. },\n address = {Porto\ - \ Alegre, Brazil},\n author = {Pedro Pablo Lucas},\n booktitle = {Proceedings\ + ID: Arslan2018 + abstract: 'Mobile devices have been a promising platform for musical performance + thanks to the various sensors readily available on board. In particular, mobile + cameras can provide rich input as they can capture a wide variety of user gestures + or environment dynamics. However, this raw camera input only provides continuous + parameters and requires expensive computation. In this paper, we propose to combine + motion/gesture input with the touch input, in order to filter movement information + both temporally and spatially, thus increasing expressiveness while reducing computation + time. We present a design space which demonstrates the diversity of interactions + that our technique enables. We also report the results of a user study in which + we observe how musicians appropriate the interaction space with an example instrument.' + address: 'Blacksburg, Virginia, USA' + author: Cagan Arslan and Florent Berthaut and Jean Martinet and Ioan Marius Bilasco + and Laurent Grisoni + bibtex: "@inproceedings{Arslan2018,\n abstract = {Mobile devices have been a promising\ + \ platform for musical performance thanks to the various sensors readily available\ + \ on board. In particular, mobile cameras can provide rich input as they can capture\ + \ a wide variety of user gestures or environment dynamics. However, this raw camera\ + \ input only provides continuous parameters and requires expensive computation.\ + \ In this paper, we propose to combine motion/gesture input with the touch input,\ + \ in order to filter movement information both temporally and spatially, thus\ + \ increasing expressiveness while reducing computation time. We present a design\ + \ space which demonstrates the diversity of interactions that our technique enables.\ + \ We also report the results of a user study in which we observe how musicians\ + \ appropriate the interaction space with an example instrument.},\n address =\ + \ {Blacksburg, Virginia, USA},\n author = {Cagan Arslan and Florent Berthaut and\ + \ Jean Martinet and Ioan Marius Bilasco and Laurent Grisoni},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.3673009},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {401--404},\n publisher = {UFRGS},\n\ - \ title = {A {MIDI} Controller Mapper for the Built-in Audio Mixer in the Unity\ - \ Game Engine},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper077.pdf},\n\ - \ year = {2019}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.3673009 - editor: Marcelo Queiroz and Anna Xambó Sedó - issn: 2220-4806 - month: June - pages: 401--404 - publisher: UFRGS - title: A MIDI Controller Mapper for the Built-in Audio Mixer in the Unity Game Engine - url: http://www.nime.org/proceedings/2019/nime2019_paper077.pdf - year: 2019 - - -- ENTRYTYPE: inproceedings - ID: Lucasc2019 - abstract: 'AuSynthAR is a digital instrument based on Augmented Reality (AR), which - allows sound synthesis modules to create simple sound networks. It only requires - a mobile device, a set of tokens, a sound output device and, optionally, a MIDI - controller, which makes it an affordable instrument. An application running on - the device generates the sounds and the graphical augmentations over the tokens.' - address: 'Porto Alegre, Brazil' - author: Pedro Pablo Lucas - bibtex: "@inproceedings{Lucasc2019,\n abstract = {AuSynthAR is a digital instrument\ - \ based on Augmented Reality (AR), which allows sound synthesis modules to create\ - \ simple sound networks. It only requires a mobile device, a set of tokens, a\ - \ sound output device and, optionally, a MIDI controller, which makes it an affordable\ - \ instrument. An application running on the device generates the sounds and the\ - \ graphical augmentations over the tokens.},\n address = {Porto Alegre, Brazil},\n\ - \ author = {Pedro Pablo Lucas},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3673011},\n\ - \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {405--406},\n publisher = {UFRGS},\n title = {AuSynthAR:\ - \ A simple low-cost modular synthesizer based on Augmented Reality},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper078.pdf},\n\ - \ year = {2019}\n}\n" + \ doi = {10.5281/zenodo.1302709},\n editor = {Luke Dahl, Douglas Bowman, Thomas\ + \ Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n\ + \ pages = {148--151},\n publisher = {Virginia Tech},\n title = {The Phone with\ + \ the Flow: Combining Touch + Optical Flow in Mobile Instruments},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0032.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3673011 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1302709 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 405--406 - publisher: UFRGS - title: 'AuSynthAR: A simple low-cost modular synthesizer based on Augmented Reality' - url: http://www.nime.org/proceedings/2019/nime2019_paper078.pdf - year: 2019 + pages: 148--151 + publisher: Virginia Tech + title: 'The Phone with the Flow: Combining Touch + Optical Flow in Mobile Instruments' + url: http://www.nime.org/proceedings/2018/nime2018_paper0032.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: Haddad2019 - abstract: 'This paper introduces a versatile module for Eurorack synthesizers that - allows multiple modular synthesizers to be patched together remotely through the - world wide web. The module is configured from a read-eval-print-loop environment - running in the web browser, that can be used to send signals to the modular synthesizer - from a live coding interface or from various data sources on the internet.' - address: 'Porto Alegre, Brazil' - author: Don Derek Haddad and Joe Paradiso - bibtex: "@inproceedings{Haddad2019,\n abstract = {This paper introduces a versatile\ - \ module for Eurorack synthesizers that allows multiple modular synthesizers to\ - \ be patched together remotely through the world wide web. The module is configured\ - \ from a read-eval-print-loop environment running in the web browser, that can\ - \ be used to send signals to the modular synthesizer from a live coding interface\ - \ or from various data sources on the internet.},\n address = {Porto Alegre, Brazil},\n\ - \ author = {Don Derek Haddad and Joe Paradiso},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.3673013},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {407--410},\n publisher = {UFRGS},\n\ - \ title = {The World Wide Web in an Analog Patchbay},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper079.pdf},\n\ - \ year = {2019}\n}\n" + ID: Engeln2018 + abstract: 'Many digital interfaces for audio effects still resemble racks and cases + of their hardware counterparts. For instance, DSP-algorithms are often adjusted + via direct value input, sliders, or knobs. While recent research has started to + experiment with the capabilities offered by modern interfaces, there are no examples + for productive applications such as audio-morphing. Audio-morphing as a special + field of DSP has a high complexity for the morph itself and for the parametrization + of the transition between two sources. We propose a multi-touch enhanced interface + for visual audiomorphing. This interface visualizes the internal processing and + allows direct manipulation of the morphing parameters in the visualization. Using + multi-touch gestures to manipulate audio-morphing in a visual way, sound design + and music production becomes more unrestricted and creative.' + address: 'Blacksburg, Virginia, USA' + author: Lars Engeln and Dietrich Kammer and Leon Brandt and Rainer Groh + bibtex: "@inproceedings{Engeln2018,\n abstract = {Many digital interfaces for audio\ + \ effects still resemble racks and cases of their hardware counterparts. For instance,\ + \ DSP-algorithms are often adjusted via direct value input, sliders, or knobs.\ + \ While recent research has started to experiment with the capabilities offered\ + \ by modern interfaces, there are no examples for productive applications such\ + \ as audio-morphing. Audio-morphing as a special field of DSP has a high complexity\ + \ for the morph itself and for the parametrization of the transition between two\ + \ sources. We propose a multi-touch enhanced interface for visual audiomorphing.\ + \ This interface visualizes the internal processing and allows direct manipulation\ + \ of the morphing parameters in the visualization. Using multi-touch gestures\ + \ to manipulate audio-morphing in a visual way, sound design and music production\ + \ becomes more unrestricted and creative.},\n address = {Blacksburg, Virginia,\ + \ USA},\n author = {Lars Engeln and Dietrich Kammer and Leon Brandt and Rainer\ + \ Groh},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1302711},\n editor = {Luke\ + \ Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn =\ + \ {2220-4806},\n month = {June},\n pages = {152--155},\n publisher = {Virginia\ + \ Tech},\n title = {Multi-Touch Enhanced Visual Audio-Morphing},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0033.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3673013 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1302711 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 407--410 - publisher: UFRGS - title: The World Wide Web in an Analog Patchbay - url: http://www.nime.org/proceedings/2019/nime2019_paper079.pdf - year: 2019 + pages: 152--155 + publisher: Virginia Tech + title: Multi-Touch Enhanced Visual Audio-Morphing + url: http://www.nime.org/proceedings/2018/nime2018_paper0033.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: Yoshimura2019 - abstract: 'In this paper, we propose a “voice” instrument based on vocal tract models - with a soft material for a 3D printer and an electrolarynx. In our practice, we - explore the incongruity of the voice instrument through the accompanying music - production and performance. With the instrument, we aim to return to the fact - that the “Machine speaks out.” With the production of a song “Vocalise (Incomplete),” - and performances, we reveal how the instrument could work with the audiences and - explore the uncultivated field of voices.' - address: 'Porto Alegre, Brazil' - author: Fou Yoshimura and kazuhiro jo - bibtex: "@inproceedings{Yoshimura2019,\n abstract = {In this paper, we propose a\ - \ “voice” instrument based on vocal tract models with a soft material for a 3D\ - \ printer and an electrolarynx. In our practice, we explore the incongruity of\ - \ the voice instrument through the accompanying music production and performance.\ - \ With the instrument, we aim to return to the fact that the “Machine speaks out.”\ - \ With the production of a song “Vocalise (Incomplete),” and performances, we\ - \ reveal how the instrument could work with the audiences and explore the uncultivated\ - \ field of voices.},\n address = {Porto Alegre, Brazil},\n author = {Fou Yoshimura\ - \ and kazuhiro jo},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3673015},\n\ - \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {411--412},\n publisher = {UFRGS},\n title = {A \"voice\"\ - \ instrument based on vocal tract models by using soft material for a 3D printer\ - \ and an electrolarynx},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper080.pdf},\n\ - \ year = {2019}\n}\n" + ID: Çamcı2018 + abstract: 'We describe an innovative multi-touch performance tool for real-time + granular synthesis based on hand-drawn waveform paths. GrainTrain is a cross-platform + web application that can run on both desktop and mobile computers, including tablets + and phones. In this paper, we first offer an analysis of existing granular synthesis + tools from an interaction stand-point, and outline a taxonomy of common interaction + paradigms used in their designs. We then delineate the implementation of GrainTrain, + and its unique approach to controlling real-time granular synthesis. We describe + practical scenarios in which GrainTrain enables new performance possibilities. + Finally, we discuss the results of a user study, and provide reports from expert + users who evaluated GrainTrain.' + address: 'Blacksburg, Virginia, USA' + author: Anıl Çamcı + bibtex: "@inproceedings{Çamcı2018,\n abstract = {We describe an innovative multi-touch\ + \ performance tool for real-time granular synthesis based on hand-drawn waveform\ + \ paths. GrainTrain is a cross-platform web application that can run on both desktop\ + \ and mobile computers, including tablets and phones. In this paper, we first\ + \ offer an analysis of existing granular synthesis tools from an interaction stand-point,\ + \ and outline a taxonomy of common interaction paradigms used in their designs.\ + \ We then delineate the implementation of GrainTrain, and its unique approach\ + \ to controlling real-time granular synthesis. We describe practical scenarios\ + \ in which GrainTrain enables new performance possibilities. Finally, we discuss\ + \ the results of a user study, and provide reports from expert users who evaluated\ + \ GrainTrain.},\n address = {Blacksburg, Virginia, USA},\n author = {Anıl Çamcı},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1302529},\n editor = {Luke Dahl,\ + \ Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {156--161},\n publisher = {Virginia Tech},\n title\ + \ = {GrainTrain: A Hand-drawn Multi-touch Interface for Granular Synthesis},\n\ + \ url = {http://www.nime.org/proceedings/2018/nime2018_paper0034.pdf},\n year\ + \ = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3673015 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1302529 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 411--412 - publisher: UFRGS - title: A "voice" instrument based on vocal tract models by using soft material for - a 3D printer and an electrolarynx - url: http://www.nime.org/proceedings/2019/nime2019_paper080.pdf - year: 2019 + pages: 156--161 + publisher: Virginia Tech + title: 'GrainTrain: A Hand-drawn Multi-touch Interface for Granular Synthesis' + url: http://www.nime.org/proceedings/2018/nime2018_paper0034.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: YepezPlacencia2019 - abstract: 'Mechatronic chordophones have become increasingly common in mechatronic - music. As expressive instruments, they offer multiple techniques to create and - manipulate sounds using their actuation mechanisms. Chordophone designs have taken - multiple forms, from frames that play a guitar-like instrument, to machines that - integrate strings and actuators as part of their frame. However, few of these - instruments have taken advantage of dynamics, which have been largely unexplored. - This paper details the design and construction of a new picking mechanism prototype - which enables expressive techniques through fast and precise movement and actuation. - We have adopted iterative design and rapid prototyping strategies to develop and - refine a compact picker capable of creating dynamic variations reliably. Finally, - a quantitative evaluation process demonstrates that this system offers the speed - and consistency of previously existing picking mechanisms, while providing increased - control over musical dynamics and articulations.' - address: 'Porto Alegre, Brazil' - author: Juan Pablo Yepez Placencia and Jim Murphy and Dale Carnegie - bibtex: "@inproceedings{YepezPlacencia2019,\n abstract = {Mechatronic chordophones\ - \ have become increasingly common in mechatronic music. As expressive instruments,\ - \ they offer multiple techniques to create and manipulate sounds using their actuation\ - \ mechanisms. Chordophone designs have taken multiple forms, from frames that\ - \ play a guitar-like instrument, to machines that integrate strings and actuators\ - \ as part of their frame. However, few of these instruments have taken advantage\ - \ of dynamics, which have been largely unexplored. This paper details the design\ - \ and construction of a new picking mechanism prototype which enables expressive\ - \ techniques through fast and precise movement and actuation. We have adopted\ - \ iterative design and rapid prototyping strategies to develop and refine a compact\ - \ picker capable of creating dynamic variations reliably. Finally, a quantitative\ - \ evaluation process demonstrates that this system offers the speed and consistency\ - \ of previously existing picking mechanisms, while providing increased control\ - \ over musical dynamics and articulations.},\n address = {Porto Alegre, Brazil},\n\ - \ author = {Juan Pablo Yepez Placencia and Jim Murphy and Dale Carnegie},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.3673017},\n editor = {Marcelo Queiroz and\ - \ Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {413--418},\n\ - \ publisher = {UFRGS},\n title = {Exploring Dynamic Variations for Expressive\ - \ Mechatronic Chordophones},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper081.pdf},\n\ - \ year = {2019}\n}\n" + ID: xia2018 + abstract: 'Traditional instrument learning procedure is time-consuming; it begins + with learning music notations and necessitates layers of sophistication and abstraction. + Haptic interfaces open another door to the music world for the vast majority of + talentless beginners when traditional training methods are not effective. However, + the existing haptic interfaces can only be used to learn specially designed pieces + with great restrictions on duration and pitch range due to the fact that it is + only feasible to guide a part of performance motion haptically for most instruments. + Our study breaks such restrictions using a semi-haptic guidance method. For the + first time, the pitch range of the haptically learned pieces go beyond an octave + (with the fingering motion covers most of the possible choices) and the duration + of learned pieces cover a whole phrase. This significant change leads to a more + realistic instrument learning process. Experiments show that semi-haptic interface + is effective as long as learners are not “tone deaf”. Using our prototype device, + the learning rate is about 30% faster compared with learning from videos.' + address: 'Blacksburg, Virginia, USA' + author: gus xia and Roger B. Dannenberg + bibtex: "@inproceedings{xia2018,\n abstract = {Traditional instrument learning procedure\ + \ is time-consuming; it begins with learning music notations and necessitates\ + \ layers of sophistication and abstraction. Haptic interfaces open another door\ + \ to the music world for the vast majority of talentless beginners when traditional\ + \ training methods are not effective. However, the existing haptic interfaces\ + \ can only be used to learn specially designed pieces with great restrictions\ + \ on duration and pitch range due to the fact that it is only feasible to guide\ + \ a part of performance motion haptically for most instruments. Our study breaks\ + \ such restrictions using a semi-haptic guidance method. For the first time, the\ + \ pitch range of the haptically learned pieces go beyond an octave (with the fingering\ + \ motion covers most of the possible choices) and the duration of learned pieces\ + \ cover a whole phrase. This significant change leads to a more realistic instrument\ + \ learning process. Experiments show that semi-haptic interface is effective as\ + \ long as learners are not “tone deaf”. Using our prototype device, the learning\ + \ rate is about 30% faster compared with learning from videos.},\n address = {Blacksburg,\ + \ Virginia, USA},\n author = {gus xia and Roger B. Dannenberg},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1302531},\n editor = {Luke Dahl, Douglas Bowman, Thomas\ + \ Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n\ + \ pages = {162--167},\n publisher = {Virginia Tech},\n title = {ShIFT: A Semi-haptic\ + \ Interface for Flute Tutoring},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0035.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3673017 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1302531 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 413--418 - publisher: UFRGS - title: Exploring Dynamic Variations for Expressive Mechatronic Chordophones - url: http://www.nime.org/proceedings/2019/nime2019_paper081.pdf - year: 2019 + pages: 162--167 + publisher: Virginia Tech + title: 'ShIFT: A Semi-haptic Interface for Flute Tutoring' + url: http://www.nime.org/proceedings/2018/nime2018_paper0035.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: Chauhan2019 - abstract: 'In this paper, we introduce and explore a novel Virtual Reality musical - interaction system (named REVOLVE) that utilises a user-guided evolutionary algorithm - to personalise musical instruments to users'' individual preferences. REVOLVE - is designed towards being an `endlessly entertaining'' experience through the - potentially infinite number of sounds that can be produced. Our hypothesis is - that using evolutionary algorithms with VR for musical interactions will lead - to increased user telepresence. In addition to this, REVOLVE was designed to inform - novel research into this unexplored area. Think aloud trials and thematic analysis - revealed 5 main themes: control, comparison to the real world, immersion, general - usability and limitations, in addition to practical improvements. Overall, it - was found that this combination of technologies did improve telepresence levels, - proving the original hypothesis correct.' - address: 'Porto Alegre, Brazil' - author: Dhruv Chauhan and Peter Bennett - bibtex: "@inproceedings{Chauhan2019,\n abstract = {In this paper, we introduce and\ - \ explore a novel Virtual Reality musical interaction system (named REVOLVE) that\ - \ utilises a user-guided evolutionary algorithm to personalise musical instruments\ - \ to users' individual preferences. REVOLVE is designed towards being an `endlessly\ - \ entertaining' experience through the potentially infinite number of sounds that\ - \ can be produced. Our hypothesis is that using evolutionary algorithms with VR\ - \ for musical interactions will lead to increased user telepresence. In addition\ - \ to this, REVOLVE was designed to inform novel research into this unexplored\ - \ area. Think aloud trials and thematic analysis revealed 5 main themes: control,\ - \ comparison to the real world, immersion, general usability and limitations,\ - \ in addition to practical improvements. Overall, it was found that this combination\ - \ of technologies did improve telepresence levels, proving the original hypothesis\ - \ correct.},\n address = {Porto Alegre, Brazil},\n author = {Dhruv Chauhan and\ - \ Peter Bennett},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3673019},\n\ - \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {419--422},\n publisher = {UFRGS},\n title = {Searching\ - \ for the Perfect Instrument: Increased Telepresence through Interactive Evolutionary\ - \ Instrument Design},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper082.pdf},\n\ - \ year = {2019}\n}\n" + ID: Morreale2018 + abstract: 'The term `NIME'' --- New Interfaces for Musical Expression --- has come + to signify both technical and cultural characteristics. Not all new musical instruments + are NIMEs, and not all NIMEs are defined as such for the sole ephemeral condition + of being new. So, what are the typical characteristics of NIMEs and what are their + roles in performers'' practice? Is there a typical NIME repertoire? This paper + aims to address these questions with a bottom up approach. We reflect on the answers + of 78 NIME performers to an online questionnaire discussing their performance + experience with NIMEs. The results of our investigation explore the role of NIMEs + in the performers'' practice and identify the values that are common among performers. + We find that most NIMEs are viewed as exploratory tools created by and for performers, + and that they are constantly in development and almost in no occasions in a finite + state. The findings of our survey also reflect upon virtuosity with NIMEs, whose + peculiar performance practice results in learning trajectories that often do not + lead to the development of virtuosity as it is commonly understood in traditional + performance.' + address: 'Blacksburg, Virginia, USA' + author: Fabio Morreale and Andrew P. McPherson and Marcelo Wanderley + bibtex: "@inproceedings{Morreale2018,\n abstract = {The term `NIME' --- New Interfaces\ + \ for Musical Expression --- has come to signify both technical and cultural characteristics.\ + \ Not all new musical instruments are NIMEs, and not all NIMEs are defined as\ + \ such for the sole ephemeral condition of being new. So, what are the typical\ + \ characteristics of NIMEs and what are their roles in performers' practice? Is\ + \ there a typical NIME repertoire? This paper aims to address these questions\ + \ with a bottom up approach. We reflect on the answers of 78 NIME performers to\ + \ an online questionnaire discussing their performance experience with NIMEs.\ + \ The results of our investigation explore the role of NIMEs in the performers'\ + \ practice and identify the values that are common among performers. We find that\ + \ most NIMEs are viewed as exploratory tools created by and for performers, and\ + \ that they are constantly in development and almost in no occasions in a finite\ + \ state. The findings of our survey also reflect upon virtuosity with NIMEs, whose\ + \ peculiar performance practice results in learning trajectories that often do\ + \ not lead to the development of virtuosity as it is commonly understood in traditional\ + \ performance.},\n address = {Blacksburg, Virginia, USA},\n author = {Fabio Morreale\ + \ and Andrew P. McPherson and Marcelo Wanderley},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1302533},\n editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n\ + \ isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {168--173},\n publisher = {Virginia Tech},\n title = {NIME Identity from the\ + \ Performer's Perspective},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0036.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3673019 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1302533 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 419--422 - publisher: UFRGS - title: 'Searching for the Perfect Instrument: Increased Telepresence through Interactive - Evolutionary Instrument Design' - url: http://www.nime.org/proceedings/2019/nime2019_paper082.pdf - year: 2019 + pages: 168--173 + publisher: Virginia Tech + title: NIME Identity from the Performer's Perspective + url: http://www.nime.org/proceedings/2018/nime2018_paper0036.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: Savery2019 - abstract: 'Harriet Padberg wrote Computer-Composed Canon and Free Fugue as part - of her 1964 dissertation in Mathematics and Music at Saint Louis University. This - program is one of the earliest examples of text-to-music software and algorithmic - composition, which are areas of great interest in the present-day field of music - technology. This paper aims to analyze the technological innovation, aesthetic - design process, and impact of Harriet Padberg''s original 1964 thesis as well - as the design of a modern recreation and utilization, in order to gain insight - to the nature of revisiting older works. Here, we present our open source recreation - of Padberg''s program with a modern interface and, through its use as an artistic - tool by three composers, show how historical works can be effectively used for - new creative purposes in contemporary contexts. Not Even One by Molly Jones draws - on the historical and social significance of Harriet Padberg through using her - program in a piece about the lack of representation of women judges in composition - competitions. Brevity by Anna Savery utilizes the original software design as - a composition tool, and The Padberg Piano by Anthony Caulkins uses the melodic - generation of the original to create a software instrument.' - address: 'Porto Alegre, Brazil' - author: Richard J Savery and Benjamin Genchel and Jason Brent Smith and Anthony - Caulkins and Molly E Jones and Anna Savery - bibtex: "@inproceedings{Savery2019,\n abstract = {Harriet Padberg wrote Computer-Composed\ - \ Canon and Free Fugue as part of her 1964 dissertation in Mathematics and Music\ - \ at Saint Louis University. This program is one of the earliest examples of text-to-music\ - \ software and algorithmic composition, which are areas of great interest in the\ - \ present-day field of music technology. This paper aims to analyze the technological\ - \ innovation, aesthetic design process, and impact of Harriet Padberg's original\ - \ 1964 thesis as well as the design of a modern recreation and utilization, in\ - \ order to gain insight to the nature of revisiting older works. Here, we present\ - \ our open source recreation of Padberg's program with a modern interface and,\ - \ through its use as an artistic tool by three composers, show how historical\ - \ works can be effectively used for new creative purposes in contemporary contexts.\ - \ Not Even One by Molly Jones draws on the historical and social significance\ - \ of Harriet Padberg through using her program in a piece about the lack of representation\ - \ of women judges in composition competitions. Brevity by Anna Savery utilizes\ - \ the original software design as a composition tool, and The Padberg Piano by\ - \ Anthony Caulkins uses the melodic generation of the original to create a software\ - \ instrument.},\n address = {Porto Alegre, Brazil},\n author = {Richard J Savery\ - \ and Benjamin Genchel and Jason Brent Smith and Anthony Caulkins and Molly E\ - \ Jones and Anna Savery},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3673021},\n\ - \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {423--428},\n publisher = {UFRGS},\n title = {Learning from\ - \ History: Recreating and Repurposing Harriet Padberg's Computer Composed Canon\ - \ and Free Fugue},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper083.pdf},\n\ - \ year = {2019}\n}\n" + ID: Xambó2018 + abstract: 'In recent years, there has been an increase in awareness of the underrepresentation + of women in the sound and music computing fields. The New Interfaces for Musical + Expression (NIME) conference is not an exception, with a number of open questions + remaining around the issue. In the present paper, we study the presence and evolution + over time of women authors in NIME since the beginning of the conference in 2001 + until 2017. We discuss the results of such a gender imbalance and potential solutions + by summarizing the actions taken by a number of worldwide initiatives that have + put an effort into making women''s work visible in our field, with a particular + emphasis on Women in Music Tech (WiMT), a student-led organization that aims to + encourage more women to join music technology, as a case study. We conclude with + a hope for an improvement in the representation of women in NIME by presenting + WiNIME, a public online database that details who are the women authors in NIME.' + address: 'Blacksburg, Virginia, USA' + author: Anna Xambó + bibtex: "@inproceedings{Xambó2018,\n abstract = {In recent years, there has been\ + \ an increase in awareness of the underrepresentation of women in the sound and\ + \ music computing fields. The New Interfaces for Musical Expression (NIME) conference\ + \ is not an exception, with a number of open questions remaining around the issue.\ + \ In the present paper, we study the presence and evolution over time of women\ + \ authors in NIME since the beginning of the conference in 2001 until 2017. We\ + \ discuss the results of such a gender imbalance and potential solutions by summarizing\ + \ the actions taken by a number of worldwide initiatives that have put an effort\ + \ into making women's work visible in our field, with a particular emphasis on\ + \ Women in Music Tech (WiMT), a student-led organization that aims to encourage\ + \ more women to join music technology, as a case study. We conclude with a hope\ + \ for an improvement in the representation of women in NIME by presenting WiNIME,\ + \ a public online database that details who are the women authors in NIME.},\n\ + \ address = {Blacksburg, Virginia, USA},\n author = {Anna Xambó},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1302535},\n editor = {Luke Dahl, Douglas\ + \ Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {174--177},\n publisher = {Virginia Tech},\n title\ + \ = {Who Are the Women Authors in NIME?–Improving Gender Balance in NIME Research},\n\ + \ url = {http://www.nime.org/proceedings/2018/nime2018_paper0037.pdf},\n year\ + \ = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3673021 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1302535 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 423--428 - publisher: UFRGS - title: 'Learning from History: Recreating and Repurposing Harriet Padberg''s Computer - Composed Canon and Free Fugue' - url: http://www.nime.org/proceedings/2019/nime2019_paper083.pdf - year: 2019 + pages: 174--177 + publisher: Virginia Tech + title: 'Who Are the Women Authors in NIME?–Improving Gender Balance in NIME Research' + url: http://www.nime.org/proceedings/2018/nime2018_paper0037.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: Berdahl2019 - abstract: 'The design of a Spatially Distributed Vibrotactile Actuator Array (SDVAA) - for the fingertips is presented. It provides high-fidelity vibrotactile stimulation - at the audio sampling rate. Prior works are discussed, and the system is demonstrated - using two music compositions by the authors.' - address: 'Porto Alegre, Brazil' - author: Edgar Berdahl and Austin Franklin and Eric Sheffield - bibtex: "@inproceedings{Berdahl2019,\n abstract = {The design of a Spatially Distributed\ - \ Vibrotactile Actuator Array (SDVAA) for the fingertips is presented. It provides\ - \ high-fidelity vibrotactile stimulation at the audio sampling rate. Prior works\ - \ are discussed, and the system is demonstrated using two music compositions by\ - \ the authors.},\n address = {Porto Alegre, Brazil},\n author = {Edgar Berdahl\ - \ and Austin Franklin and Eric Sheffield},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3673023},\n\ - \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {429--430},\n publisher = {UFRGS},\n title = {A Spatially\ - \ Distributed Vibrotactile Actuator Array for the Fingertips},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper084.pdf},\n\ - \ year = {2019}\n}\n" + ID: Reid2018 + abstract: 'This paper presents a collection of hardware-based technologies for live + performance developed by women over the last few decades. The field of music technology + and interface design has a significant gender imbalance, with men greatly outnumbering + women. The purpose of this paper is to promote the visibility and representation + of women in this field, and to encourage discussion on the importance of mentorship + and role models for young women and girls in music technology.' + address: 'Blacksburg, Virginia, USA' + author: Sarah Reid and Sara Sithi-Amnuai and Ajay Kapur + bibtex: "@inproceedings{Reid2018,\n abstract = {This paper presents a collection\ + \ of hardware-based technologies for live performance developed by women over\ + \ the last few decades. The field of music technology and interface design has\ + \ a significant gender imbalance, with men greatly outnumbering women. The purpose\ + \ of this paper is to promote the visibility and representation of women in this\ + \ field, and to encourage discussion on the importance of mentorship and role\ + \ models for young women and girls in music technology.},\n address = {Blacksburg,\ + \ Virginia, USA},\n author = {Sarah Reid and Sara Sithi-Amnuai and Ajay Kapur},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1302537},\n editor = {Luke Dahl,\ + \ Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {178--183},\n publisher = {Virginia Tech},\n title\ + \ = {Women Who Build Things: Gestural Controllers, Augmented Instruments, and\ + \ Musical Mechatronics},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0038.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3673023 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1302537 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 429--430 - publisher: UFRGS - title: A Spatially Distributed Vibrotactile Actuator Array for the Fingertips - url: http://www.nime.org/proceedings/2019/nime2019_paper084.pdf - year: 2019 + pages: 178--183 + publisher: Virginia Tech + title: 'Women Who Build Things: Gestural Controllers, Augmented Instruments, and + Musical Mechatronics' + url: http://www.nime.org/proceedings/2018/nime2018_paper0038.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: Gregorio2019 - abstract: 'Feature-based synthesis applies machine learning and signal processing - methods to the development of alternative interfaces for controlling parametric - synthesis algorithms. One approach, geared toward real-time control, uses low - dimensional gestural controllers and learned mappings from control spaces to parameter - spaces, making use of an intermediate latent timbre distribution, such that the - control space affords a spatially-intuitive arrangement of sonic possibilities. - Whereas many existing systems present alternatives to the traditional parametric - interfaces, the proposed system explores ways in which feature-based synthesis - can augment one-to-one parameter control, made possible by fully invertible mappings - between control and parameter spaces.' - address: 'Porto Alegre, Brazil' - author: Jeff Gregorio and Youngmoo Kim - bibtex: "@inproceedings{Gregorio2019,\n abstract = {Feature-based synthesis applies\ - \ machine learning and signal processing methods to the development of alternative\ - \ interfaces for controlling parametric synthesis algorithms. One approach, geared\ - \ toward real-time control, uses low dimensional gestural controllers and learned\ - \ mappings from control spaces to parameter spaces, making use of an intermediate\ - \ latent timbre distribution, such that the control space affords a spatially-intuitive\ - \ arrangement of sonic possibilities. Whereas many existing systems present alternatives\ - \ to the traditional parametric interfaces, the proposed system explores ways\ - \ in which feature-based synthesis can augment one-to-one parameter control, made\ - \ possible by fully invertible mappings between control and parameter spaces.},\n\ - \ address = {Porto Alegre, Brazil},\n author = {Jeff Gregorio and Youngmoo Kim},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.3673025},\n editor = {Marcelo Queiroz\ - \ and Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {431--436},\n\ - \ publisher = {UFRGS},\n title = {Augmenting Parametric Synthesis with Learned\ - \ Timbral Controllers},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper085.pdf},\n\ - \ year = {2019}\n}\n" + ID: Jack2018 + abstract: 'An oft-cited aspiration of digital musical instrument (DMI) design is + to create instruments, in the words of Wessel and Wright, with a ‘low entry fee + and no ceiling on virtuosity''. This is a difficult task to achieve: many new + instruments are aimed at either the expert or amateur musician, with few instruments + catering for both. There is often a balance between learning curve and the nuance + of musical control in DMIs. In this paper we present a study conducted with non-musicians + and guitarists playing guitar-derivative DMIs with variable levels of control + intimacy: how the richness and nuance of a performer''s movement translates into + the musical output of an instrument. Findings suggest a significant difference + in preference for levels of control intimacy between the guitarists and the non-musicians. + In particular, the guitarists unanimously preferred the richest of the two settings + whereas the non-musicians generally preferred the setting with lower richness. + This difference is notable because it is often taken as a given that increasing + richness is a way to make instruments more enjoyable to play, however, this result + only seems to be true for expert players.' + address: 'Blacksburg, Virginia, USA' + author: Robert H Jack and Jacob Harrison and Fabio Morreale and Andrew P. McPherson + bibtex: "@inproceedings{Jack2018,\n abstract = {An oft-cited aspiration of digital\ + \ musical instrument (DMI) design is to create instruments, in the words of Wessel\ + \ and Wright, with a ‘low entry fee and no ceiling on virtuosity'. This is a difficult\ + \ task to achieve: many new instruments are aimed at either the expert or amateur\ + \ musician, with few instruments catering for both. There is often a balance between\ + \ learning curve and the nuance of musical control in DMIs. In this paper we present\ + \ a study conducted with non-musicians and guitarists playing guitar-derivative\ + \ DMIs with variable levels of control intimacy: how the richness and nuance of\ + \ a performer's movement translates into the musical output of an instrument.\ + \ Findings suggest a significant difference in preference for levels of control\ + \ intimacy between the guitarists and the non-musicians. In particular, the guitarists\ + \ unanimously preferred the richest of the two settings whereas the non-musicians\ + \ generally preferred the setting with lower richness. This difference is notable\ + \ because it is often taken as a given that increasing richness is a way to make\ + \ instruments more enjoyable to play, however, this result only seems to be true\ + \ for expert players.},\n address = {Blacksburg, Virginia, USA},\n author = {Robert\ + \ H Jack and Jacob Harrison and Fabio Morreale and Andrew P. McPherson},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1302539},\n editor = {Luke Dahl, Douglas\ + \ Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {184--189},\n publisher = {Virginia Tech},\n title\ + \ = {Democratising {DMI}s: the relationship of expertise and control intimacy},\n\ + \ url = {http://www.nime.org/proceedings/2018/nime2018_paper0039.pdf},\n year\ + \ = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3673025 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1302539 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 431--436 - publisher: UFRGS - title: Augmenting Parametric Synthesis with Learned Timbral Controllers - url: http://www.nime.org/proceedings/2019/nime2019_paper085.pdf - year: 2019 + pages: 184--189 + publisher: Virginia Tech + title: 'Democratising DMIs: the relationship of expertise and control intimacy' + url: http://www.nime.org/proceedings/2018/nime2018_paper0039.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: Leigh2019 - abstract: 'This paper introduces studies conducted with musicians that aim to understand - modes of human-robot interaction, situated between automation and human augmentation. - Our robotic guitar system used for the study consists of various sound generating - mechanisms, either driven by software or by a musician directly. The control mechanism - allows the musician to have a varying degree of agency over the overall musical - direction. We present interviews and discussions on open-ended experiments conducted - with music students and musicians. The outcome of this research includes new modes - of playing the guitar given the robotic capabilities, and an understanding of - how automation can be integrated into instrument-playing processes. The results - present insights into how a human-machine hybrid system can increase the efficacy - of training or exploration, without compromising human engagement with a task.' - address: 'Porto Alegre, Brazil' - author: Sang-won Leigh and Abhinandan Jain and Pattie Maes - bibtex: "@inproceedings{Leigh2019,\n abstract = {This paper introduces studies conducted\ - \ with musicians that aim to understand modes of human-robot interaction, situated\ - \ between automation and human augmentation. Our robotic guitar system used for\ - \ the study consists of various sound generating mechanisms, either driven by\ - \ software or by a musician directly. The control mechanism allows the musician\ - \ to have a varying degree of agency over the overall musical direction. We present\ - \ interviews and discussions on open-ended experiments conducted with music students\ - \ and musicians. The outcome of this research includes new modes of playing the\ - \ guitar given the robotic capabilities, and an understanding of how automation\ - \ can be integrated into instrument-playing processes. The results present insights\ - \ into how a human-machine hybrid system can increase the efficacy of training\ - \ or exploration, without compromising human engagement with a task.},\n address\ - \ = {Porto Alegre, Brazil},\n author = {Sang-won Leigh and Abhinandan Jain and\ - \ Pattie Maes},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3673027},\n\ - \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {437--442},\n publisher = {UFRGS},\n title = {Exploring\ - \ Human-Machine Synergy and Interaction on a Robotic Instrument},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper086.pdf},\n\ - \ year = {2019}\n}\n" + ID: MarquezBorbon2018 + abstract: 'This paper addresses the prevailing longevity problem of digital musical + instruments (DMIs) in NIME research and design by proposing a holistic system + design approach. Despite recent efforts to examine the main contributing factors + of DMI falling into obsolescence, such attempts to remedy this issue largely place + focus on the artifacts establishing themselves, their design processes and technologies. + However, few existing studies have attempted to proactively build a community + around technological platforms for DMIs, whilst bearing in mind the social dynamics + and activities necessary for a budding community. We observe that such attempts + while important in their undertaking, are limited in their scope. In this paper + we will discuss that achieving some sort of longevity must be addressed beyond + the device itself and must tackle broader ecosystemic factors. We hypothesize, + that a longevous DMI design must not only take into account a target community + but it may also require a non-traditional pedagogical system that sustains artistic + practice.' + address: 'Blacksburg, Virginia, USA' + author: Adnan Marquez-Borbon and Juan Pablo Martinez-Avila + bibtex: "@inproceedings{MarquezBorbon2018,\n abstract = {This paper addresses the\ + \ prevailing longevity problem of digital musical instruments (DMIs) in NIME research\ + \ and design by proposing a holistic system design approach. Despite recent efforts\ + \ to examine the main contributing factors of DMI falling into obsolescence, such\ + \ attempts to remedy this issue largely place focus on the artifacts establishing\ + \ themselves, their design processes and technologies. However, few existing studies\ + \ have attempted to proactively build a community around technological platforms\ + \ for DMIs, whilst bearing in mind the social dynamics and activities necessary\ + \ for a budding community. We observe that such attempts while important in their\ + \ undertaking, are limited in their scope. In this paper we will discuss that\ + \ achieving some sort of longevity must be addressed beyond the device itself\ + \ and must tackle broader ecosystemic factors. We hypothesize, that a longevous\ + \ DMI design must not only take into account a target community but it may also\ + \ require a non-traditional pedagogical system that sustains artistic practice.},\n\ + \ address = {Blacksburg, Virginia, USA},\n author = {Adnan Marquez-Borbon and\ + \ Juan Pablo Martinez-Avila},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302541},\n\ + \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {190--195},\n publisher = {Virginia\ + \ Tech},\n title = {The Problem of DMI Adoption and Longevity: Envisioning a NIME\ + \ Performance Pedagogy},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0040.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3673027 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1302541 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 437--442 - publisher: UFRGS - title: Exploring Human-Machine Synergy and Interaction on a Robotic Instrument - url: http://www.nime.org/proceedings/2019/nime2019_paper086.pdf - year: 2019 + pages: 190--195 + publisher: Virginia Tech + title: 'The Problem of DMI Adoption and Longevity: Envisioning a NIME Performance + Pedagogy' + url: http://www.nime.org/proceedings/2018/nime2018_paper0040.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: Lee2019 - abstract: 'Modern computer music performances often involve a musical instrument - that is primarily digital; software runs on a computer, and the physical form - of the instrument is the computer. In such a practice, the performance interface - is rendered on a computer screen for the performer. There has been a concern in - using a laptop as a musical instrument from the audience''s perspective, in that - having ``a laptop performer sitting behind the screen'''' makes it difficult for - the audience to understand how the performer is creating music. Mirroring a computer - screen on a projection screen has been one way to address the concern and reveal - the performer''s instrument. This paper introduces and discusses the author''s - computer music practice, in which a performer actively considers screen mirroring - as an essential part of the performance, beyond visualization of music. In this - case, screen mirroring is not complementary, but inevitable from the inception - of the performance. The related works listed within explore various roles of screen - mirroring in computer music performance and helps us understand empirical and - logistical findings in such practices.' - address: 'Porto Alegre, Brazil' - author: Sang Won Lee - bibtex: "@inproceedings{Lee2019,\n abstract = {Modern computer music performances\ - \ often involve a musical instrument that is primarily digital; software runs\ - \ on a computer, and the physical form of the instrument is the computer. In such\ - \ a practice, the performance interface is rendered on a computer screen for the\ - \ performer. There has been a concern in using a laptop as a musical instrument\ - \ from the audience's perspective, in that having ``a laptop performer sitting\ - \ behind the screen'' makes it difficult for the audience to understand how the\ - \ performer is creating music. Mirroring a computer screen on a projection screen\ - \ has been one way to address the concern and reveal the performer's instrument.\ - \ This paper introduces and discusses the author's computer music practice, in\ - \ which a performer actively considers screen mirroring as an essential part of\ - \ the performance, beyond visualization of music. In this case, screen mirroring\ - \ is not complementary, but inevitable from the inception of the performance.\ - \ The related works listed within explore various roles of screen mirroring in\ - \ computer music performance and helps us understand empirical and logistical\ - \ findings in such practices.},\n address = {Porto Alegre, Brazil},\n author =\ - \ {Sang Won Lee},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3673029},\n\ - \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {443--448},\n publisher = {UFRGS},\n title = {Show Them\ - \ My Screen: Mirroring a Laptop Screen as an Expressive and Communicative Means\ - \ in Computer Music},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper087.pdf},\n\ - \ year = {2019}\n}\n" + ID: Martin2018 + abstract: This paper describes the process of developing a standstill performance + work using the Myo gesture control armband and the Bela embedded computing platform. + The combination of Myo and Bela allows a portable and extensible version of the + standstill performance concept while introducing muscle tension as an additional + control parameter. We describe the technical details of our setup and introduce + Myo-to-Bela and Myo-to-OSC software bridges that assist with prototyping compositions + using the Myo controller. + address: 'Blacksburg, Virginia, USA' + author: 'Martin, Charles Patrick and Jensenius, Alexander Refsum and Jim Torresen' + bibtex: "@inproceedings{Martin2018,\n abstract = {This paper describes the process\ + \ of developing a standstill performance work using the Myo gesture control armband\ + \ and the Bela embedded computing platform. The combination of Myo and Bela allows\ + \ a portable and extensible version of the standstill performance concept while\ + \ introducing muscle tension as an additional control parameter. We describe\ + \ the technical details of our setup and introduce Myo-to-Bela and Myo-to-OSC\ + \ software bridges that assist with prototyping compositions using the Myo controller.},\n\ + \ address = {Blacksburg, Virginia, USA},\n author = {Martin, Charles Patrick \ + \ and Jensenius, Alexander Refsum and Jim Torresen},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1302543},\n editor = {Luke Dahl, Douglas Bowman, Thomas\ + \ Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n\ + \ pages = {196--197},\n publisher = {Virginia Tech},\n title = {Composing an Ensemble\ + \ Standstill Work for Myo and Bela},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0041.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3673029 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1302543 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 443--448 - publisher: UFRGS - title: 'Show Them My Screen: Mirroring a Laptop Screen as an Expressive and Communicative - Means in Computer Music' - url: http://www.nime.org/proceedings/2019/nime2019_paper087.pdf - year: 2019 + pages: 196--197 + publisher: Virginia Tech + title: Composing an Ensemble Standstill Work for Myo and Bela + url: http://www.nime.org/proceedings/2018/nime2018_paper0041.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: Davis2019 - abstract: 'We present IllumiWear, a novel eTextile prototype that uses fiber optics - as interactive input and visual output. Fiber optic cables are separated into - bundles and then woven like a basket into a bendable glowing fabric. By equipping - light emitting diodes to one side of these bundles and photodiode light intensity - sensors to the other, loss of light intensity can be measured when the fabric - is bent. The sensing technique of IllumiWear is not only able to discriminate - between discreet touch, slight bends, and harsh bends, but also recover the location - of deformation. In this way, our computational fabric prototype uses its intrinsic - means of visual output (light) as a tool for interactive input. We provide design - and implementation details for our prototype as well as a technical evaluation - of its effectiveness and limitations as an interactive computational textile. - In addition, we examine the potential of this prototype''s interactive capabilities - by extending our eTextile to create a tangible user interface for audio and visual - manipulation.' - address: 'Porto Alegre, Brazil' - author: Josh Urban Davis - bibtex: "@inproceedings{Davis2019,\n abstract = {We present IllumiWear, a novel\ - \ eTextile prototype that uses fiber optics as interactive input and visual output.\ - \ Fiber optic cables are separated into bundles and then woven like a basket into\ - \ a bendable glowing fabric. By equipping light emitting diodes to one side of\ - \ these bundles and photodiode light intensity sensors to the other, loss of light\ - \ intensity can be measured when the fabric is bent. The sensing technique of\ - \ IllumiWear is not only able to discriminate between discreet touch, slight bends,\ - \ and harsh bends, but also recover the location of deformation. In this way,\ - \ our computational fabric prototype uses its intrinsic means of visual output\ - \ (light) as a tool for interactive input. We provide design and implementation\ - \ details for our prototype as well as a technical evaluation of its effectiveness\ - \ and limitations as an interactive computational textile. In addition, we examine\ - \ the potential of this prototype's interactive capabilities by extending our\ - \ eTextile to create a tangible user interface for audio and visual manipulation.},\n\ - \ address = {Porto Alegre, Brazil},\n author = {Josh Urban Davis},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.3673033},\n editor = {Marcelo Queiroz and\ - \ Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {449--454},\n\ - \ publisher = {UFRGS},\n title = {IllumiWear: A Fiber-Optic eTextile for MultiMedia\ - \ Interactions},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper088.pdf},\n\ - \ year = {2019}\n}\n" + ID: Nieva2018 + abstract: 'This paper presents the work to maintain several copies of the digital + musical instrument (DMI) called the T-Stick in the hopes of extending their useful + lifetime. The T-Sticks were originally conceived in 2006 and 20 copies have been + built over the last 12 years. While they all preserve the original design concept, + their evolution resulted in variations in choice of microcontrollers, and sensors. + We worked with eight copies of the second and fourth generation T-Sticks to + overcome issues related to the aging of components, changes in external software, + lack of documentation, and in general, the problem of technical maintenance.' + address: 'Blacksburg, Virginia, USA' + author: Alex Nieva and Johnty Wang and Joseph Malloch and Marcelo Wanderley + bibtex: "@inproceedings{Nieva2018,\n abstract = {This paper presents the work to\ + \ maintain several copies of the digital musical instrument (DMI) called the T-Stick\ + \ in the hopes of extending their useful lifetime. The T-Sticks were originally\ + \ conceived in 2006 and 20 copies have been built over the last 12 years. While\ + \ they all preserve the original design concept, their evolution resulted in variations\ + \ in choice of microcontrollers, and sensors. We worked with eight copies\ + \ of the second and fourth generation T-Sticks to overcome issues related to the\ + \ aging of components, changes in external software, lack of documentation, and\ + \ in general, the problem of technical maintenance.},\n address = {Blacksburg,\ + \ Virginia, USA},\n author = {Alex Nieva and Johnty Wang and Joseph Malloch and\ + \ Marcelo Wanderley},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302545},\n\ + \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {198--199},\n publisher = {Virginia\ + \ Tech},\n title = {The T-Stick: Maintaining a 12 year-old Digital Musical Instrument},\n\ + \ url = {http://www.nime.org/proceedings/2018/nime2018_paper0042.pdf},\n year\ + \ = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3673033 - editor: Marcelo Queiroz and Anna Xambó Sedó + doi: 10.5281/zenodo.1302545 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 449--454 - publisher: UFRGS - title: 'IllumiWear: A Fiber-Optic eTextile for MultiMedia Interactions' - url: http://www.nime.org/proceedings/2019/nime2019_paper088.pdf - year: 2019 + pages: 198--199 + publisher: Virginia Tech + title: 'The T-Stick: Maintaining a 12 year-old Digital Musical Instrument' + url: http://www.nime.org/proceedings/2018/nime2018_paper0042.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: lpereira2014 - abstract: 'The Well--Sequenced Synthesizer is a series of sequencers that create - music in dialog with the user. Through the sequencers'' physical interfaces, users - can control music theory-based generative algorithms. This series --a work-in-progress-is - composed by three sequencers at this time. The first one, called The Counterpointer, - takes a melody input from the user and responds by generating voices based on - the rules of eighteenth--century counterpoint. The second one is based on a recent - treatise on harmony and counterpoint by music theorist Dmitri Tymoczco: El Ordenador - lets users explore a set of features of tonality by constraining randomly generated - music according to one or more of them. El Ordenador gives the user less control - than The Counterpointer, but more than La Mecánica, the third sequencer in the - series. La Mecánica plays back the sequences generated by El Ordenador using a - punch-card reading music box mechanism. It makes the digital patterns visible - and tactile, and links them back to the physical world.' - address: 'London, United Kingdom' - author: Luisa Pereira Hors - bibtex: "@inproceedings{lpereira2014,\n abstract = {The Well--Sequenced Synthesizer\ - \ is a series of sequencers that create music in dialog with the user. Through\ - \ the sequencers' physical interfaces, users can control music theory-based generative\ - \ algorithms. This series --a work-in-progress-is composed by three sequencers\ - \ at this time. The first one, called The Counterpointer, takes a melody input\ - \ from the user and responds by generating voices based on the rules of eighteenth--century\ - \ counterpoint. The second one is based on a recent treatise on harmony and counterpoint\ - \ by music theorist Dmitri Tymoczco: El Ordenador lets users explore a set of\ - \ features of tonality by constraining randomly generated music according to one\ - \ or more of them. El Ordenador gives the user less control than The Counterpointer,\ - \ but more than La Mec{\\'a}nica, the third sequencer in the series. La Mec{\\\ - 'a}nica plays back the sequences generated by El Ordenador using a punch-card\ - \ reading music box mechanism. It makes the digital patterns visible and tactile,\ - \ and links them back to the physical world.},\n address = {London, United Kingdom},\n\ - \ author = {Luisa Pereira Hors},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178806},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {88--89},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {The Well-Sequenced Synthesizer},\n url = {http://www.nime.org/proceedings/2014/nime2014_2.pdf},\n\ - \ year = {2014}\n}\n" + ID: Dewey2018 + abstract: This paper explores the use of the ubiquitous MIDI keyboard to control + a DJ performance system. The prototype system uses a two octave keyboard with + each octave controlling one audio track. Each audio track has four two-bar loops + which play in synchronisation switchable by its respective octave's first four + black keys. The top key of the keyboard toggles between frequency filter mode + and time slicer mode. In frequency filter mode the white keys provide seven bands + of latched frequency filtering. In time slicer mode the white keys plus black + B flat key provide latched on/off control of eight time slices of the loop. The + system was informally evaluated by nine subjects. The frequency filter mode combined + with loop switching worked well with the MIDI keyboard interface. All subjects + agreed that all tools had creative performance potential that could be developed + by further practice. + address: 'Blacksburg, Virginia, USA' + author: Christopher Dewey and Jonathan P. Wakefield + bibtex: "@inproceedings{Dewey2018,\n abstract = {This paper explores the use of\ + \ the ubiquitous MIDI keyboard to control a DJ performance system. The prototype\ + \ system uses a two octave keyboard with each octave controlling one audio track.\ + \ Each audio track has four two-bar loops which play in synchronisation switchable\ + \ by its respective octave's first four black keys. The top key of the keyboard\ + \ toggles between frequency filter mode and time slicer mode. In frequency filter\ + \ mode the white keys provide seven bands of latched frequency filtering. In time\ + \ slicer mode the white keys plus black B flat key provide latched on/off control\ + \ of eight time slices of the loop. The system was informally evaluated by nine\ + \ subjects. The frequency filter mode combined with loop switching worked well\ + \ with the MIDI keyboard interface. All subjects agreed that all tools had creative\ + \ performance potential that could be developed by further practice.},\n address\ + \ = {Blacksburg, Virginia, USA},\n author = {Christopher Dewey and Jonathan P.\ + \ Wakefield},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302547},\n editor\ + \ = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {200--201},\n publisher = {Virginia\ + \ Tech},\n title = {{MIDI} Keyboard Defined DJ Performance System},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0043.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178806 + doi: 10.5281/zenodo.1302547 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 88--89 - publisher: 'Goldsmiths, University of London' - title: The Well-Sequenced Synthesizer - url: http://www.nime.org/proceedings/2014/nime2014_2.pdf - year: 2014 + pages: 200--201 + publisher: Virginia Tech + title: MIDI Keyboard Defined DJ Performance System + url: http://www.nime.org/proceedings/2018/nime2018_paper0043.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: ptimothy2014 - abstract: 'This paper describes the technologies, collaborative processes, and artistic - intents of the musical composition Engravings for Prepared Snare Drum, iPad, and - Computer, which was composed by Timothy Polashek for percussionist Brad Meyer - using a jointly created electroacoustic and interactive musical instrument. During - performance, the percussionist equally manipulates and expresses through two surfaces, - an iPad displaying an interactive touch screen and a snare drum augmented with - various foreign objects, including a contact microphone adhered to the drumhead''s - surface. A computer program created for this composition runs on a laptop computer - in front of the percussionist. The software captures sound from the contact microphone - and transforms this sound through audio signal processing controlled by the performer''s - gestures on the iPad. The computer screen displays an animated graphic score, - as well as the current states of iPad controls and audio signal processing, for - the performer. Many compositional and technological approaches used in this project - pay tribute to composer John Cage, since the premiere performance of Engravings - for Prepared Snare Drum, iPad, and Computer took place in 2012, the centennial - celebration of Cage''s birth year.' - address: 'London, United Kingdom' - author: Timothy Polashek and Brad Meyer - bibtex: "@inproceedings{ptimothy2014,\n abstract = {This paper describes the technologies,\ - \ collaborative processes, and artistic intents of the musical composition Engravings\ - \ for Prepared Snare Drum, iPad, and Computer, which was composed by Timothy Polashek\ - \ for percussionist Brad Meyer using a jointly created electroacoustic and interactive\ - \ musical instrument. During performance, the percussionist equally manipulates\ - \ and expresses through two surfaces, an iPad displaying an interactive touch\ - \ screen and a snare drum augmented with various foreign objects, including a\ - \ contact microphone adhered to the drumhead's surface. A computer program created\ - \ for this composition runs on a laptop computer in front of the percussionist.\ - \ The software captures sound from the contact microphone and transforms this\ - \ sound through audio signal processing controlled by the performer's gestures\ - \ on the iPad. The computer screen displays an animated graphic score, as well\ - \ as the current states of iPad controls and audio signal processing, for the\ - \ performer. Many compositional and technological approaches used in this project\ - \ pay tribute to composer John Cage, since the premiere performance of Engravings\ - \ for Prepared Snare Drum, iPad, and Computer took place in 2012, the centennial\ - \ celebration of Cage's birth year.},\n address = {London, United Kingdom},\n\ - \ author = {Timothy Polashek and Brad Meyer},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1178907},\n issn = {2220-4806},\n month = {June},\n pages =\ - \ {82--83},\n publisher = {Goldsmiths, University of London},\n title = {Engravings\ - \ for Prepared Snare Drum, iPad, and Computer},\n url = {http://www.nime.org/proceedings/2014/nime2014_254.pdf},\n\ - \ year = {2014}\n}\n" + ID: Engum2018 + abstract: This paper describes an ongoing research project which address challenges + and opportunities when collaborating interactively in real time in a "virtual" + sound studio with several partners in different locations. "Virtual" in this context + referring to an interconnected and inter-domain studio environment consisting + of several local production systems connected to public and private networks. + This paper reports experiences and challenges related to two different production + scenarios conducted in 2017. + address: 'Blacksburg, Virginia, USA' + author: Trond Engum and Otto Jonassen Wittner + bibtex: "@inproceedings{Engum2018,\n abstract = {This paper describes an ongoing\ + \ research project which address challenges and opportunities when collaborating\ + \ interactively in real time in a \"virtual\" sound studio with several partners\ + \ in different locations. \"Virtual\" in this context referring to an interconnected\ + \ and inter-domain studio environment consisting of several local production systems\ + \ connected to public and private networks. This paper reports experiences and\ + \ challenges related to two different production scenarios conducted in 2017.},\n\ + \ address = {Blacksburg, Virginia, USA},\n author = {Trond Engum and Otto Jonassen\ + \ Wittner},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302549},\n editor\ + \ = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {202--203},\n publisher = {Virginia\ + \ Tech},\n title = {Democratizing Interactive Music Production over the Internet},\n\ + \ url = {http://www.nime.org/proceedings/2018/nime2018_paper0044.pdf},\n year\ + \ = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178907 + doi: 10.5281/zenodo.1302549 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 82--83 - publisher: 'Goldsmiths, University of London' - title: 'Engravings for Prepared Snare Drum, iPad, and Computer' - url: http://www.nime.org/proceedings/2014/nime2014_254.pdf - year: 2014 + pages: 202--203 + publisher: Virginia Tech + title: Democratizing Interactive Music Production over the Internet + url: http://www.nime.org/proceedings/2018/nime2018_paper0044.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: mzareei2014 - abstract: 'Over the past few decades, there has been an increasing number of musical - instruments and works of sound art that incorporate robotics and mechatronics. - This paper proposes a new approach in classification of such works and focuses - on those whose ideological roots can be sought in Luigi Russolo''s noise-intoners - (intonarumori). It presents a discussion on works in which mechatronics is used - to investigate new and traditionally perceived as ``extra-musical'''' sonic territories, - and introduces Rasper: a new mechatronic noise-intoner that features an electromechanical - apparatus to create noise physically, while regulating it rhythmically and timbrally.' - address: 'London, United Kingdom' - author: Mo Zareei and Ajay Kapur and Dale A. Carnegie - bibtex: "@inproceedings{mzareei2014,\n abstract = {Over the past few decades, there\ - \ has been an increasing number of musical instruments and works of sound art\ - \ that incorporate robotics and mechatronics. This paper proposes a new approach\ - \ in classification of such works and focuses on those whose ideological roots\ - \ can be sought in Luigi Russolo's noise-intoners (intonarumori). It presents\ - \ a discussion on works in which mechatronics is used to investigate new and traditionally\ - \ perceived as ``extra-musical'' sonic territories, and introduces Rasper: a new\ - \ mechatronic noise-intoner that features an electromechanical apparatus to create\ - \ noise physically, while regulating it rhythmically and timbrally.},\n address\ - \ = {London, United Kingdom},\n author = {Mo Zareei and Ajay Kapur and Dale A.\ - \ Carnegie},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178995},\n issn\ - \ = {2220-4806},\n month = {June},\n pages = {473--478},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Rasper: a Mechatronic Noise-Intoner},\n url\ - \ = {http://www.nime.org/proceedings/2014/nime2014_268.pdf},\n year = {2014}\n\ - }\n" + ID: Charles2018 + abstract: 'This paper describes how the Axoloti platform is well suited to teach + a beginners'' course about new elecro-acoustic musical instruments and how it + fits the needs of artists who want to work with an embedded sound processing platform + and get creative at the crossroads of acoustics and electronics. First, we present + the criteria used to choose a platform for the course titled "Creating New Musical + Instruments" given at the University of Iowa in the Fall of 2017. Then, we explain + why we chose the Axoloti board and development environment.' + address: 'Blacksburg, Virginia, USA' + author: 'Jean-Francois Charles and Cotallo Solares, Carlos and Toro Tobon, Carlos + and Andrew Willette' + bibtex: "@inproceedings{Charles2018,\n abstract = {This paper describes how the\ + \ Axoloti platform is well suited to teach a beginners' course about new elecro-acoustic\ + \ musical instruments and how it fits the needs of artists who want to work with\ + \ an embedded sound processing platform and get creative at the crossroads of\ + \ acoustics and electronics. First, we present the criteria used to choose a platform\ + \ for the course titled \"Creating New Musical Instruments\" given at the University\ + \ of Iowa in the Fall of 2017. Then, we explain why we chose the Axoloti board\ + \ and development environment.},\n address = {Blacksburg, Virginia, USA},\n author\ + \ = {Jean-Francois Charles and Cotallo Solares, Carlos and Toro Tobon, Carlos\ + \ and Andrew Willette},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302551},\n\ + \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {204--205},\n publisher = {Virginia\ + \ Tech},\n title = {Using the Axoloti Embedded Sound Processing Platform to Foster\ + \ Experimentation and Creativity},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0045.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178995 + doi: 10.5281/zenodo.1302551 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 473--478 - publisher: 'Goldsmiths, University of London' - title: 'Rasper: a Mechatronic Noise-Intoner' - url: http://www.nime.org/proceedings/2014/nime2014_268.pdf - year: 2014 + pages: 204--205 + publisher: Virginia Tech + title: Using the Axoloti Embedded Sound Processing Platform to Foster Experimentation + and Creativity + url: http://www.nime.org/proceedings/2018/nime2018_paper0045.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: cudell2014 - abstract: 'In our current era, where smartphones are commonplace and buzzwords like - ``the internet of things,'''' ``wearable tech,'''' and ``augmented reality'''' - are ubiquitous, translating performance gestures into data and intuitively mapping - it to control musical/visual parameters in the realm of computing should be trivial; - but it isn''t. Technical barriers still persist that limit this activity to exclusive - groups capable of learning skillsets far removed from one''s musical craft. These - skills include programming, soldering, microprocessors, wireless protocols, and - circuit design. Those of us whose creative activity is centered in NIME have to - become polyglots of many disciplines to achieve our work. In the NIME community, - it''s unclear that we should even draw distinctions between ''artist'' and ''technician'', - because these skillsets have become integral to our creative practice. However, - what about the vast communities of musicians, composers, and artists who want - to leverage sensing to take their craft into new territory with no background - in circuits, soldering, embedded programming, and sensor function? eMersion, a - plug-and-play, modular, wireless alternative solution for creating NIMEs will - be presented. It enables one to bypass the technical hurdles listed above in favor - of immediate experimentation with musical practice and wireless sensing. A unique - software architecture will also be unveiled that enables one to quickly and intuitively - process and map unpredictable numbers and types of wireless data streams, the - Digital Data Workstation.' - address: 'London, United Kingdom' - author: Chet Udell and James Paul Sain - bibtex: "@inproceedings{cudell2014,\n abstract = {In our current era, where smartphones\ - \ are commonplace and buzzwords like ``the internet of things,'' ``wearable tech,''\ - \ and ``augmented reality'' are ubiquitous, translating performance gestures into\ - \ data and intuitively mapping it to control musical/visual parameters in the\ - \ realm of computing should be trivial; but it isn't. Technical barriers still\ - \ persist that limit this activity to exclusive groups capable of learning skillsets\ - \ far removed from one's musical craft. These skills include programming, soldering,\ - \ microprocessors, wireless protocols, and circuit design. Those of us whose creative\ - \ activity is centered in NIME have to become polyglots of many disciplines to\ - \ achieve our work. In the NIME community, it's unclear that we should even draw\ - \ distinctions between 'artist' and 'technician', because these skillsets have\ - \ become integral to our creative practice. However, what about the vast communities\ - \ of musicians, composers, and artists who want to leverage sensing to take their\ - \ craft into new territory with no background in circuits, soldering, embedded\ - \ programming, and sensor function? eMersion, a plug-and-play, modular, wireless\ - \ alternative solution for creating NIMEs will be presented. It enables one to\ - \ bypass the technical hurdles listed above in favor of immediate experimentation\ - \ with musical practice and wireless sensing. A unique software architecture will\ - \ also be unveiled that enables one to quickly and intuitively process and map\ - \ unpredictable numbers and types of wireless data streams, the Digital Data Workstation.},\n\ - \ address = {London, United Kingdom},\n author = {Chet Udell and James Paul Sain},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178971},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {130--133},\n publisher = {Goldsmiths, University\ - \ of London},\n title = {eMersion | Sensor-controlled Electronic Music Modules\ - \ \\& Digital Data Workstation},\n url = {http://www.nime.org/proceedings/2014/nime2014_272.pdf},\n\ - \ year = {2014}\n}\n" + ID: Tsoukalas2018 + abstract: 'The following paper introduces a new mechatronic NIME kit that uses new + additions to the Pd-L2Ork visual programing environment and its K-12 learning + module. It is designed to facilitate the creation of simple mechatronics systems + for physical sound production in K-12 and production scenarios. The new set of + objects builds on the existing support for the Raspberry Pi platform to also include + the use of electric actuators via the microcomputer''s GPIO system. Moreover, + we discuss implications of the newly introduced kit in the creative and K-12 education + scenarios by sharing observations from a series of pilot workshops, with particular + focus on using mechatronic NIMEs as a catalyst for the development of programing + skills.' + address: 'Blacksburg, Virginia, USA' + author: Kyriakos Tsoukalas and Ivica Ico Bukvic + bibtex: "@inproceedings{Tsoukalas2018,\n abstract = {The following paper introduces\ + \ a new mechatronic NIME kit that uses new additions to the Pd-L2Ork visual programing\ + \ environment and its K-12 learning module. It is designed to facilitate the creation\ + \ of simple mechatronics systems for physical sound production in K-12 and production\ + \ scenarios. The new set of objects builds on the existing support for the Raspberry\ + \ Pi platform to also include the use of electric actuators via the microcomputer's\ + \ GPIO system. Moreover, we discuss implications of the newly introduced kit in\ + \ the creative and K-12 education scenarios by sharing observations from a series\ + \ of pilot workshops, with particular focus on using mechatronic NIMEs as a catalyst\ + \ for the development of programing skills.},\n address = {Blacksburg, Virginia,\ + \ USA},\n author = {Kyriakos Tsoukalas and Ivica Ico Bukvic},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1302553},\n editor = {Luke Dahl, Douglas Bowman, Thomas\ + \ Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n\ + \ pages = {206--209},\n publisher = {Virginia Tech},\n title = {Introducing a\ + \ K-12 Mechatronic NIME Kit},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0046.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178971 + doi: 10.5281/zenodo.1302553 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 130--133 - publisher: 'Goldsmiths, University of London' - title: eMersion | Sensor-controlled Electronic Music Modules & Digital Data Workstation - url: http://www.nime.org/proceedings/2014/nime2014_272.pdf - year: 2014 + pages: 206--209 + publisher: Virginia Tech + title: Introducing a K-12 Mechatronic NIME Kit + url: http://www.nime.org/proceedings/2018/nime2018_paper0046.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: tmurraybrowne2014 - abstract: 'We introduce Harmonic Motion, a free open source toolkit for artists, - musicians and designers working with gestural data. Extracting musically useful - features from captured gesture data can be challenging, with projects often requiring - bespoke processing techniques developed through iterations of tweaking equations - involving a number of constant values -sometimes referred to as `magic numbers''. - Harmonic Motion provides a robust interface for rapid prototyping of patches to - process gestural data and a framework through which approaches may be encapsulated, - reused and shared with others. In addition, we describe our design process in - which both personal experience and a survey of potential users informed a set - of specific goals for the software.' - address: 'London, United Kingdom' - author: Tim Murray-Browne and Mark Plumbley - bibtex: "@inproceedings{tmurraybrowne2014,\n abstract = {We introduce Harmonic Motion,\ - \ a free open source toolkit for artists, musicians and designers working with\ - \ gestural data. Extracting musically useful features from captured gesture data\ - \ can be challenging, with projects often requiring bespoke processing techniques\ - \ developed through iterations of tweaking equations involving a number of constant\ - \ values -sometimes referred to as `magic numbers'. Harmonic Motion provides a\ - \ robust interface for rapid prototyping of patches to process gestural data and\ - \ a framework through which approaches may be encapsulated, reused and shared\ - \ with others. In addition, we describe our design process in which both personal\ - \ experience and a survey of potential users informed a set of specific goals\ - \ for the software.},\n address = {London, United Kingdom},\n author = {Tim Murray-Browne\ - \ and Mark Plumbley},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178887},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {213--216},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Harmonic Motion: A Toolkit for Processing\ - \ Gestural Data for Interactive Sound},\n url = {http://www.nime.org/proceedings/2014/nime2014_273.pdf},\n\ - \ year = {2014}\n}\n" + ID: Bennett2018 + abstract: 'We describe the development of Neurythmic: an interactive system for + the creation and performance of fluid, expressive musical rhythms using Central + Pattern Generators (CPGs). CPGs are neural networks which generate adaptive rhythmic + signals. They simulate structures in animals which underly behaviours such as + heartbeat, gut peristalsis and complex motor control. Neurythmic is the first + such system to use CPGs for interactive rhythm creation. We discuss how Neurythmic + uses the entrainment behaviour of these networks to support the creation of rhythms + while avoiding the rigidity of grid quantisation approaches. As well as discussing + the development, design and evaluation of Neurythmic, we discuss relevant properties + of the CPG networks used (Matsuoka''s Neural Oscillator), and describe methods + for their control. Evaluation with expert and professional musicians shows that + Neurythmic is a versatile tool, adapting well to a range of quite different musical + approaches.' + address: 'Blacksburg, Virginia, USA' + author: Daniel Bennett and Peter Bennett and Anne Roudaut + bibtex: "@inproceedings{Bennett2018,\n abstract = {We describe the development of\ + \ Neurythmic: an interactive system for the creation and performance of fluid,\ + \ expressive musical rhythms using Central Pattern Generators (CPGs). CPGs are\ + \ neural networks which generate adaptive rhythmic signals. They simulate structures\ + \ in animals which underly behaviours such as heartbeat, gut peristalsis and complex\ + \ motor control. Neurythmic is the first such system to use CPGs for interactive\ + \ rhythm creation. We discuss how Neurythmic uses the entrainment behaviour of\ + \ these networks to support the creation of rhythms while avoiding the rigidity\ + \ of grid quantisation approaches. As well as discussing the development, design\ + \ and evaluation of Neurythmic, we discuss relevant properties of the CPG networks\ + \ used (Matsuoka's Neural Oscillator), and describe methods for their control.\ + \ Evaluation with expert and professional musicians shows that Neurythmic is a\ + \ versatile tool, adapting well to a range of quite different musical approaches.},\n\ + \ address = {Blacksburg, Virginia, USA},\n author = {Daniel Bennett and Peter\ + \ Bennett and Anne Roudaut},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302555},\n\ + \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {210--215},\n publisher = {Virginia\ + \ Tech},\n title = {Neurythmic: A Rhythm Creation Tool Based on Central Pattern\ + \ Generators},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0047.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178887 + doi: 10.5281/zenodo.1302555 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 213--216 - publisher: 'Goldsmiths, University of London' - title: 'Harmonic Motion: A Toolkit for Processing Gestural Data for Interactive - Sound' - url: http://www.nime.org/proceedings/2014/nime2014_273.pdf - year: 2014 + pages: 210--215 + publisher: Virginia Tech + title: 'Neurythmic: A Rhythm Creation Tool Based on Central Pattern Generators' + url: http://www.nime.org/proceedings/2018/nime2018_paper0047.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: slui2014 - abstract: 'Pop music jamming on the keyboard requires massive music knowledge. Musician - needs to understand and memorize the behavior of each chord in different keys. - However, most simple pop music follows a common chord progression pattern. This - pattern applies to most simple pop music on all the 12 keys. We designed an app - that can reduce the difficulty of music jamming on the keyboard by using this - pattern. The app displays the current chord in the Roman numeral and suggests - the expected next chord in an easy to understand way on a smartphone. This work - investigates into the human computer interaction perspective of music performance. - We use a smartphone app as a bridge, which assists musician to react faster in - music jamming by transforming the complex music knowledge into a simple, unified - and easy to understand format. Experiment result shows that this app can help - the non-keyboardist musician to learn pop music jamming. It also shows that the - app is useful to assist keyboardist in making key transpose and playing music - in the key with many sharps and flats. We will use the same interface design to - guide user on playing other chord progressions such as the jazz chord progression.' - address: 'London, United Kingdom' - author: Simon Lui - bibtex: "@inproceedings{slui2014,\n abstract = {Pop music jamming on the keyboard\ - \ requires massive music knowledge. Musician needs to understand and memorize\ - \ the behavior of each chord in different keys. However, most simple pop music\ - \ follows a common chord progression pattern. This pattern applies to most simple\ - \ pop music on all the 12 keys. We designed an app that can reduce the difficulty\ - \ of music jamming on the keyboard by using this pattern. The app displays the\ - \ current chord in the Roman numeral and suggests the expected next chord in an\ - \ easy to understand way on a smartphone. This work investigates into the human\ - \ computer interaction perspective of music performance. We use a smartphone app\ - \ as a bridge, which assists musician to react faster in music jamming by transforming\ - \ the complex music knowledge into a simple, unified and easy to understand format.\ - \ Experiment result shows that this app can help the non-keyboardist musician\ - \ to learn pop music jamming. It also shows that the app is useful to assist keyboardist\ - \ in making key transpose and playing music in the key with many sharps and flats.\ - \ We will use the same interface design to guide user on playing other chord progressions\ - \ such as the jazz chord progression.},\n address = {London, United Kingdom},\n\ - \ author = {Simon Lui},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178855},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {98--101},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {A Real Time Common Chord Progression Guide\ - \ on the Smartphone for Jamming Pop Song on the Music Keyboard},\n url = {http://www.nime.org/proceedings/2014/nime2014_275.pdf},\n\ - \ year = {2014}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1178855 - issn: 2220-4806 - month: June - pages: 98--101 - publisher: 'Goldsmiths, University of London' - title: A Real Time Common Chord Progression Guide on the Smartphone for Jamming - Pop Song on the Music Keyboard - url: http://www.nime.org/proceedings/2014/nime2014_275.pdf - year: 2014 - - -- ENTRYTYPE: inproceedings - ID: tmagnusson2014 - abstract: 'Live coding emphasises improvisation. It is an art practice that merges - the act of musical composition and performance into a public act of projected - writing. This paper introduces the Threnoscope system, which includes a live coding - micro-language for drone-based microtonal composition. The paper discusses the - aims and objectives of the system, elucidates the design decisions, and introduces - in particular the code score feature present in the Threnoscope. The code score - is a novel element in the design of live coding systems allowing for improvisation - through a graphic score, rendering a visual representation of past and future - events in a real-time performance. The paper demonstrates how the system''s methods - can be mapped ad hoc to GUIor hardware-based control.' - address: 'London, United Kingdom' - author: Thor Magnusson - bibtex: "@inproceedings{tmagnusson2014,\n abstract = {Live coding emphasises improvisation.\ - \ It is an art practice that merges the act of musical composition and performance\ - \ into a public act of projected writing. This paper introduces the Threnoscope\ - \ system, which includes a live coding micro-language for drone-based microtonal\ - \ composition. The paper discusses the aims and objectives of the system, elucidates\ - \ the design decisions, and introduces in particular the code score feature present\ - \ in the Threnoscope. The code score is a novel element in the design of live\ - \ coding systems allowing for improvisation through a graphic score, rendering\ - \ a visual representation of past and future events in a real-time performance.\ - \ The paper demonstrates how the system's methods can be mapped ad hoc to GUIor\ - \ hardware-based control.},\n address = {London, United Kingdom},\n author = {Thor\ - \ Magnusson},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178857},\n issn\ - \ = {2220-4806},\n month = {June},\n pages = {19--22},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Improvising with the Threnoscope: Integrating\ - \ Code, Hardware, GUI, Network, and Graphic Scores},\n url = {http://www.nime.org/proceedings/2014/nime2014_276.pdf},\n\ - \ year = {2014}\n}\n" + ID: Granger2018 + abstract: 'Composing music typically requires years of music theory experience and + knowledge that includes but is not limited to chord progression, melody composition + theory, and an understanding of whole-step/half-step passing tones among others. + For that reason, certain songwriters such as singers may find a necessity to hire + experienced pianists to help compose their music. In order to facilitate the process + for beginner and aspiring musicians, we have developed Lumanote, a music composition + tool that aids songwriters by presenting real-time suggestions on appropriate + melody notes and chord progression. While a preliminary evaluation yielded favorable + results for beginners, many commented on the difficulty of having to map the note + suggestions displayed on the on-screen interface to the physical keyboard they + were playing on. This paper presents the resulting solution: an LED-based feedback + system that is designed to be directly attached to any standard MIDI keyboard. + This peripheral aims to help map note suggestions directly to the physical keys + of a musical keyboard. A study consisting of 22 individuals was conducted to compare + the effectiveness of the new LED-based system with the existing computer interface, + finding that the vast majority of users preferred the LED system. Three experienced + musicians also judged and ranked the compositions, noting significant improvement + in song quality when using either system, and citing comparable quality between + compositions that used either interface.' + address: 'Blacksburg, Virginia, USA' + author: James Granger and Mateo Aviles and Joshua Kirby and Austin Griffin and Johnny + Yoon and Raniero A. Lara-Garduno and Tracy Hammond + bibtex: "@inproceedings{Granger2018,\n abstract = {Composing music typically requires\ + \ years of music theory experience and knowledge that includes but is not limited\ + \ to chord progression, melody composition theory, and an understanding of whole-step/half-step\ + \ passing tones among others. For that reason, certain songwriters such as singers\ + \ may find a necessity to hire experienced pianists to help compose their music.\ + \ In order to facilitate the process for beginner and aspiring musicians, we have\ + \ developed Lumanote, a music composition tool that aids songwriters by presenting\ + \ real-time suggestions on appropriate melody notes and chord progression. While\ + \ a preliminary evaluation yielded favorable results for beginners, many commented\ + \ on the difficulty of having to map the note suggestions displayed on the on-screen\ + \ interface to the physical keyboard they were playing on. This paper presents\ + \ the resulting solution: an LED-based feedback system that is designed to be\ + \ directly attached to any standard MIDI keyboard. This peripheral aims to help\ + \ map note suggestions directly to the physical keys of a musical keyboard. A\ + \ study consisting of 22 individuals was conducted to compare the effectiveness\ + \ of the new LED-based system with the existing computer interface, finding that\ + \ the vast majority of users preferred the LED system. Three experienced musicians\ + \ also judged and ranked the compositions, noting significant improvement in song\ + \ quality when using either system, and citing comparable quality between compositions\ + \ that used either interface.},\n address = {Blacksburg, Virginia, USA},\n author\ + \ = {James Granger and Mateo Aviles and Joshua Kirby and Austin Griffin and Johnny\ + \ Yoon and Raniero A. Lara-Garduno and Tracy Hammond},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1302557},\n editor = {Luke Dahl, Douglas Bowman, Thomas\ + \ Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n\ + \ pages = {216--221},\n publisher = {Virginia Tech},\n title = {Evaluating LED-based\ + \ interface for Lumanote composition creation tool},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0048.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178857 + doi: 10.5281/zenodo.1302557 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 19--22 - publisher: 'Goldsmiths, University of London' - title: 'Improvising with the Threnoscope: Integrating Code, Hardware, GUI, Network, - and Graphic Scores' - url: http://www.nime.org/proceedings/2014/nime2014_276.pdf - year: 2014 + pages: 216--221 + publisher: Virginia Tech + title: Evaluating LED-based interface for Lumanote composition creation tool + url: http://www.nime.org/proceedings/2018/nime2018_paper0048.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: strump2014 - abstract: 'This paper describes the concept and design of Orphion, a new digital - musical instrument based on the Apple iPad. We begin by outlining primary challenges - associated with DMI design, focussing on the specific problems Orphion seeks to - address such as requirements for haptic feedback from the device. Orphion achieves - this by incorporating an interaction model based on tonally tuned virtual ``pads'''' - in user-configurable layouts, where the pitch and timbre associated with each - pad depends on the initial point of touch, touch point size and size variation, - and position after the initial touch. These parameters control a physical model - for sound generation with visual feedback provided via the iPad display. We present - findings from the research and development process including design revisions - made in response to user testing. Finally, conclusions are made about the effectiveness - of the instrument based on large-scale user feedback.' - address: 'London, United Kingdom' - author: Sebastian Trump and Jamie Bullock - bibtex: "@inproceedings{strump2014,\n abstract = {This paper describes the concept\ - \ and design of Orphion, a new digital musical instrument based on the Apple iPad.\ - \ We begin by outlining primary challenges associated with DMI design, focussing\ - \ on the specific problems Orphion seeks to address such as requirements for haptic\ - \ feedback from the device. Orphion achieves this by incorporating an interaction\ - \ model based on tonally tuned virtual ``pads'' in user-configurable layouts,\ - \ where the pitch and timbre associated with each pad depends on the initial point\ - \ of touch, touch point size and size variation, and position after the initial\ - \ touch. These parameters control a physical model for sound generation with visual\ - \ feedback provided via the iPad display. We present findings from the research\ - \ and development process including design revisions made in response to user\ - \ testing. Finally, conclusions are made about the effectiveness of the instrument\ - \ based on large-scale user feedback.},\n address = {London, United Kingdom},\n\ - \ author = {Sebastian Trump and Jamie Bullock},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1178963},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {159--162},\n publisher = {Goldsmiths, University of London},\n title = {Orphion:\ - \ A Gestural Multi-Touch Instrument for the iPad},\n url = {http://www.nime.org/proceedings/2014/nime2014_277.pdf},\n\ - \ year = {2014}\n}\n" + ID: Meneses2018 + abstract: 'This paper describes two augmented nylon-string guitar projects developed + in different institutions. GuitarAMI uses sensors to modify the classical guitars + constraints while GuiaRT uses digital signal processing to create virtual guitarists + that interact with the performer in real-time. After a bibliographic review of + Augmented Musical Instruments (AMIs) based on guitars, we present the details + of the two projects and compare them using an adapted dimensional space representation. + Highlighting the complementarity and cross-influences between the projects, we + propose avenues for future collaborative work.' + address: 'Blacksburg, Virginia, USA' + author: Eduardo Meneses and Sergio Freire and Marcelo Wanderley + bibtex: "@inproceedings{Meneses2018,\n abstract = {This paper describes two augmented\ + \ nylon-string guitar projects developed in different institutions. GuitarAMI\ + \ uses sensors to modify the classical guitars constraints while GuiaRT uses digital\ + \ signal processing to create virtual guitarists that interact with the performer\ + \ in real-time. After a bibliographic review of Augmented Musical Instruments\ + \ (AMIs) based on guitars, we present the details of the two projects and compare\ + \ them using an adapted dimensional space representation. Highlighting the complementarity\ + \ and cross-influences between the projects, we propose avenues for future collaborative\ + \ work.},\n address = {Blacksburg, Virginia, USA},\n author = {Eduardo Meneses\ + \ and Sergio Freire and Marcelo Wanderley},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1302559},\n editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n\ + \ isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {222--227},\n publisher = {Virginia Tech},\n title = {GuitarAMI and GuiaRT:\ + \ two independent yet complementary projects on augmented nylon guitars},\n url\ + \ = {http://www.nime.org/proceedings/2018/nime2018_paper0049.pdf},\n year = {2018}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178963 + doi: 10.5281/zenodo.1302559 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 159--162 - publisher: 'Goldsmiths, University of London' - title: 'Orphion: A Gestural Multi-Touch Instrument for the iPad' - url: http://www.nime.org/proceedings/2014/nime2014_277.pdf - year: 2014 + pages: 222--227 + publisher: Virginia Tech + title: 'GuitarAMI and GuiaRT: two independent yet complementary projects on augmented + nylon guitars' + url: http://www.nime.org/proceedings/2018/nime2018_paper0049.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: mkrzyzaniak2014 - abstract: 'This paper describes the implementation of a digital audio / visual feedback - system for an extemporaneous dance performance. The system was designed to automatically - synchronize aesthetically with the dancers. The performance was premiered at the - Slingshot festival in Athens Georgia on March 9, 2013.' - address: 'London, United Kingdom' - author: Michael Krzyzaniak and Julie Akerly and Matthew Mosher and Muharrem Yildirim - and Garth Paine - bibtex: "@inproceedings{mkrzyzaniak2014,\n abstract = {This paper describes the\ - \ implementation of a digital audio / visual feedback system for an extemporaneous\ - \ dance performance. The system was designed to automatically synchronize aesthetically\ - \ with the dancers. The performance was premiered at the Slingshot festival in\ - \ Athens Georgia on March 9, 2013.},\n address = {London, United Kingdom},\n author\ - \ = {Michael Krzyzaniak and Julie Akerly and Matthew Mosher and Muharrem Yildirim\ - \ and Garth Paine},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178841},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {303--306},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Separation: Short Range Repulsion. Implementation\ - \ of an Automated Aesthetic Synchronization System for a Dance Performance.},\n\ - \ url = {http://www.nime.org/proceedings/2014/nime2014_279.pdf},\n year = {2014}\n\ - }\n" + ID: Stolfi2018 + abstract: 'Playsound.space is a web-based tool to search for and play Creative Commons + licensed-sounds which can be applied to free improvisation, experimental music + production and soundscape composition. It provides a fast access to about 400k + non-musical and musical sounds provided by Freesound, and allows users to play/loop + single or multiple sounds retrieved through text based search. Sound discovery + is facilitated by use of semantic searches and sound visual representations (spectrograms). + Guided by the motivation to create an intuitive tool to support music practice + that could suit both novice and trained musicians, we developed and improved the + system in a continuous process, gathering frequent feedback from a range of users + with various skills. We assessed the prototype with 18 non musician and musician + participants during free music improvisation sessions. Results indicate that the + system was found easy to use and supports creative collaboration and expressiveness + irrespective of musical ability. We identified further design challenges linked + to creative identification, control and content quality.' + address: 'Blacksburg, Virginia, USA' + author: Ariane de Souza Stolfi and Miguel Ceriani and Luca Turchet and Mathieu Barthet + bibtex: "@inproceedings{Stolfi2018,\n abstract = {Playsound.space is a web-based\ + \ tool to search for and play Creative Commons licensed-sounds which can be applied\ + \ to free improvisation, experimental music production and soundscape composition.\ + \ It provides a fast access to about 400k non-musical and musical sounds provided\ + \ by Freesound, and allows users to play/loop single or multiple sounds retrieved\ + \ through text based search. Sound discovery is facilitated by use of semantic\ + \ searches and sound visual representations (spectrograms). Guided by the motivation\ + \ to create an intuitive tool to support music practice that could suit both novice\ + \ and trained musicians, we developed and improved the system in a continuous\ + \ process, gathering frequent feedback from a range of users with various skills.\ + \ We assessed the prototype with 18 non musician and musician participants during\ + \ free music improvisation sessions. Results indicate that the system was found\ + \ easy to use and supports creative collaboration and expressiveness irrespective\ + \ of musical ability. We identified further design challenges linked to creative\ + \ identification, control and content quality.},\n address = {Blacksburg, Virginia,\ + \ USA},\n author = {Ariane de Souza Stolfi and Miguel Ceriani and Luca Turchet\ + \ and Mathieu Barthet},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302561},\n\ + \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {228--233},\n publisher = {Virginia\ + \ Tech},\n title = {Playsound.space: Inclusive Free Music Improvisations Using\ + \ Audio Commons},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0050.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178841 + doi: 10.5281/zenodo.1302561 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 303--306 - publisher: 'Goldsmiths, University of London' - title: 'Separation: Short Range Repulsion. Implementation of an Automated Aesthetic - Synchronization System for a Dance Performance.' - url: http://www.nime.org/proceedings/2014/nime2014_279.pdf - year: 2014 + pages: 228--233 + publisher: Virginia Tech + title: 'Playsound.space: Inclusive Free Music Improvisations Using Audio Commons' + url: http://www.nime.org/proceedings/2018/nime2018_paper0050.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: ylim2014 - abstract: 'Smartphone-based music conducting is a convenient and effective approach - to conducting practice that aims to overcome the practical limitations of traditional - conducting practice and provide enhanced user experience compared to those of - previous virtual conducting examples. This work introduces the v-Maestro, a smartphone - application for music conducting. Powered by the Gyroscope of the device, the - v-Maestro analyzes conducting motions that allows the user to not only control - the tempo but also simulate ``cueing'''' for different instruments. Results from - user tests show that, in spite of certain ergonomic problems, new conducting practice - with the v-Maestro is more satisfactory than traditional methods and has a strong - potential as a conducting practice tool.' - address: 'London, United Kingdom' - author: Yang Kyu Lim and Woon Seung Yeo - bibtex: "@inproceedings{ylim2014,\n abstract = {Smartphone-based music conducting\ - \ is a convenient and effective approach to conducting practice that aims to overcome\ - \ the practical limitations of traditional conducting practice and provide enhanced\ - \ user experience compared to those of previous virtual conducting examples. This\ - \ work introduces the v-Maestro, a smartphone application for music conducting.\ - \ Powered by the Gyroscope of the device, the v-Maestro analyzes conducting motions\ - \ that allows the user to not only control the tempo but also simulate ``cueing''\ - \ for different instruments. Results from user tests show that, in spite of certain\ - \ ergonomic problems, new conducting practice with the v-Maestro is more satisfactory\ - \ than traditional methods and has a strong potential as a conducting practice\ - \ tool.},\n address = {London, United Kingdom},\n author = {Yang Kyu Lim and Woon\ - \ Seung Yeo},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178851},\n issn\ - \ = {2220-4806},\n month = {June},\n pages = {573--576},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Smartphone-based Music Conducting},\n url\ - \ = {http://www.nime.org/proceedings/2014/nime2014_281.pdf},\n year = {2014}\n\ - }\n" + ID: Harding2018 + abstract: 'This paper provides a new interface for the production and distribution + of high resolution analog control signals, particularly aimed toward the control + of analog modular synthesisers. Control Voltage/Gate interfaces generate Control + Voltage (CV) and Gate Voltage (Gate) as a means of controlling note pitch and + length respectively, and have been with us since 1986 [2]. The authors provide + a unique custom CV/Gate interface and dedicated communication protocol which leverages + standard USB Serial functionality and enables connectivity over a plethora of + computing devices, including embedded devices such as the Raspberry Pi and ARM + based devices including widely available ‘Android TV Boxes''. We provide a general + overview of the unique hardware and communication protocol developments followed + by usage case examples toward tuning and embedded platforms, leveraging softwares + ranging from Pure Data (Pd), Max, and Max for Live (M4L).' + address: 'Blacksburg, Virginia, USA' + author: John Harding and Richard Graham and Edwin Park + bibtex: "@inproceedings{Harding2018,\n abstract = {This paper provides a new interface\ + \ for the production and distribution of high resolution analog control signals,\ + \ particularly aimed toward the control of analog modular synthesisers. Control\ + \ Voltage/Gate interfaces generate Control Voltage (CV) and Gate Voltage (Gate)\ + \ as a means of controlling note pitch and length respectively, and have been\ + \ with us since 1986 [2]. The authors provide a unique custom CV/Gate interface\ + \ and dedicated communication protocol which leverages standard USB Serial functionality\ + \ and enables connectivity over a plethora of computing devices, including embedded\ + \ devices such as the Raspberry Pi and ARM based devices including widely available\ + \ ‘Android TV Boxes'. We provide a general overview of the unique hardware and\ + \ communication protocol developments followed by usage case examples toward tuning\ + \ and embedded platforms, leveraging softwares ranging from Pure Data (Pd), Max,\ + \ and Max for Live (M4L).},\n address = {Blacksburg, Virginia, USA},\n author\ + \ = {John Harding and Richard Graham and Edwin Park},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1302563},\n editor = {Luke Dahl, Douglas Bowman, Thomas\ + \ Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n\ + \ pages = {234--237},\n publisher = {Virginia Tech},\n title = {CTRL: A Flexible,\ + \ Precision Interface for Analog Synthesis},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0051.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178851 + doi: 10.5281/zenodo.1302563 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 573--576 - publisher: 'Goldsmiths, University of London' - title: Smartphone-based Music Conducting - url: http://www.nime.org/proceedings/2014/nime2014_281.pdf - year: 2014 + pages: 234--237 + publisher: Virginia Tech + title: 'CTRL: A Flexible, Precision Interface for Analog Synthesis' + url: http://www.nime.org/proceedings/2018/nime2018_paper0051.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: jdeng2014 - abstract: 'Music jamming is an extremely difficult task for musical novices. Trying - to extend this meaningful activity, which can be highly enjoyable, to a larger - recipient group, we present WIJAM, a mobile application for an ad-hoc group of - musical novices to perform improvisation along with a music master. In this ``master-players'''' - paradigm, the master offers a music backing, orchestrates the musical flow, and - gives feedbacks to the players; the players improvise by tapping and sketching - on their smartphones. We believe that this paradigm can be a significant contribution - to the possibility of music playing by a group of novices with no instrumental - training leading to decent musical results.' - address: 'London, United Kingdom' - author: Jun-qi Deng and Francis Chi Moon Lau and Ho-Cheung Ng and Yu-Kwong Kwok - and Hung-Kwan Chen and Yu-heng Liu - bibtex: "@inproceedings{jdeng2014,\n abstract = {Music jamming is an extremely difficult\ - \ task for musical novices. Trying to extend this meaningful activity, which can\ - \ be highly enjoyable, to a larger recipient group, we present WIJAM, a mobile\ - \ application for an ad-hoc group of musical novices to perform improvisation\ - \ along with a music master. In this ``master-players'' paradigm, the master offers\ - \ a music backing, orchestrates the musical flow, and gives feedbacks to the players;\ - \ the players improvise by tapping and sketching on their smartphones. We believe\ - \ that this paradigm can be a significant contribution to the possibility of music\ - \ playing by a group of novices with no instrumental training leading to decent\ - \ musical results.},\n address = {London, United Kingdom},\n author = {Jun-qi\ - \ Deng and Francis Chi Moon Lau and Ho-Cheung Ng and Yu-Kwong Kwok and Hung-Kwan\ - \ Chen and Yu-heng Liu},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178746},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {407--410},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {WIJAM: A Mobile Collaborative Improvisation\ - \ Platform under Master-players Paradigm},\n url = {http://www.nime.org/proceedings/2014/nime2014_284.pdf},\n\ - \ year = {2014}\n}\n" + ID: Beyls2018 + abstract: 'This paper describes a machine learning approach in the context of non-idiomatic + human-machine improvisation. In an attempt to avoid explicit mapping of user actions + to machine responses, an experimental machine learning strategy is suggested where + rewards are derived from the implied motivation of the human interactor – two + motivations are at work: integration (aiming to connect with machine generated + material) and expression (independent activity). By tracking consecutive changes + in musical distance (i.e. melodic similarity) between human and machine, such + motivations can be inferred. A variation of Q-learning is used featuring a self-optimizing + variable length state-action-reward list. The system (called Pock) is tunable + into particular behavioral niches by means of a limited number of parameters. + Pock is designed as a recursive structure and behaves as a complex dynamical system. + When tracking systems variables over time, emergent non-trivial patterns reveal + experimental evidence of attractors demonstrating successful adaptation.' + address: 'Blacksburg, Virginia, USA' + author: Peter Beyls + bibtex: "@inproceedings{Beyls2018,\n abstract = {This paper describes a machine\ + \ learning approach in the context of non-idiomatic human-machine improvisation.\ + \ In an attempt to avoid explicit mapping of user actions to machine responses,\ + \ an experimental machine learning strategy is suggested where rewards are derived\ + \ from the implied motivation of the human interactor – two motivations are at\ + \ work: integration (aiming to connect with machine generated material) and expression\ + \ (independent activity). By tracking consecutive changes in musical distance\ + \ (i.e. melodic similarity) between human and machine, such motivations can be\ + \ inferred. A variation of Q-learning is used featuring a self-optimizing variable\ + \ length state-action-reward list. The system (called Pock) is tunable into particular\ + \ behavioral niches by means of a limited number of parameters. Pock is designed\ + \ as a recursive structure and behaves as a complex dynamical system. When tracking\ + \ systems variables over time, emergent non-trivial patterns reveal experimental\ + \ evidence of attractors demonstrating successful adaptation.},\n address = {Blacksburg,\ + \ Virginia, USA},\n author = {Peter Beyls},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1302565},\n editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n\ + \ isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {238--243},\n publisher = {Virginia Tech},\n title = {Motivated Learning in\ + \ Human-Machine Improvisation},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0052.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178746 + doi: 10.5281/zenodo.1302565 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 407--410 - publisher: 'Goldsmiths, University of London' - title: 'WIJAM: A Mobile Collaborative Improvisation Platform under Master-players - Paradigm' - url: http://www.nime.org/proceedings/2014/nime2014_284.pdf - year: 2014 + pages: 238--243 + publisher: Virginia Tech + title: Motivated Learning in Human-Machine Improvisation + url: http://www.nime.org/proceedings/2018/nime2018_paper0052.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: tmurraybrowne12014 - abstract: 'The Cave of Sounds is an interactive sound installation made up of new - musical instruments. Exploring what it means to create instruments together within - the context of NIME and the maker scene, each instrument was created by an individual - but with the aim of forming a part of this new ensemble over ten months, with - the final installation debuting at the Barbican in London in August 2013. In this - paper, we describe how ideas of prehistoric collective music making inspired and - guided this participatory musical work, both in terms of how it was created and - the audience experience of musical collaboration we aimed to create in the final - installation. Following a detailed description of the installation itself, we - reflect on the successes, lessons and future challenges of encouraging creative - musical collaboration among members of an audience.' - address: 'London, United Kingdom' - author: Tim Murray-Browne and Dom Aversano and Susanna Garcia and Wallace Hobbes - and Daniel Lopez and Tadeo Sendon and Panagiotis Tigas and Kacper Ziemianin and - Duncan Chapman - bibtex: "@inproceedings{tmurraybrowne12014,\n abstract = {The Cave of Sounds is\ - \ an interactive sound installation made up of new musical instruments. Exploring\ - \ what it means to create instruments together within the context of NIME and\ - \ the maker scene, each instrument was created by an individual but with the aim\ - \ of forming a part of this new ensemble over ten months, with the final installation\ - \ debuting at the Barbican in London in August 2013. In this paper, we describe\ - \ how ideas of prehistoric collective music making inspired and guided this participatory\ - \ musical work, both in terms of how it was created and the audience experience\ - \ of musical collaboration we aimed to create in the final installation. Following\ - \ a detailed description of the installation itself, we reflect on the successes,\ - \ lessons and future challenges of encouraging creative musical collaboration\ - \ among members of an audience.},\n address = {London, United Kingdom},\n author\ - \ = {Tim Murray-Browne and Dom Aversano and Susanna Garcia and Wallace Hobbes\ - \ and Daniel Lopez and Tadeo Sendon and Panagiotis Tigas and Kacper Ziemianin\ - \ and Duncan Chapman},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178885},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {307--310},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {The Cave of Sounds: An Interactive Installation\ - \ Exploring How We Create Music Together},\n url = {http://www.nime.org/proceedings/2014/nime2014_288.pdf},\n\ - \ year = {2014}\n}\n" + ID: Chandran2018 + abstract: 'InterFACE is an interactive system for musical creation, mediated primarily + through the user''s facial expressions and movements. It aims to take advantage + of the expressive capabilities of the human face to create music in a way that + is both expressive and whimsical. This paper introduces the designs of three virtual + instruments in the InterFACE system: namely, FACEdrum (a drum machine), GrannyFACE + (a granular synthesis sampler), and FACEorgan (a laptop mouth organ using both + face tracking and audio analysis). We present the design behind these instruments + and consider what it means to be able to create music with one''s face. Finally, + we discuss the usability and aesthetic criteria for evaluating such a system, + taking into account our initial design goals as well as the resulting experience + for the performer and audience.' + address: 'Blacksburg, Virginia, USA' + author: Deepak Chandran and Ge Wang + bibtex: "@inproceedings{Chandran2018,\n abstract = {InterFACE is an interactive\ + \ system for musical creation, mediated primarily through the user's facial expressions\ + \ and movements. It aims to take advantage of the expressive capabilities of the\ + \ human face to create music in a way that is both expressive and whimsical. This\ + \ paper introduces the designs of three virtual instruments in the InterFACE system:\ + \ namely, FACEdrum (a drum machine), GrannyFACE (a granular synthesis sampler),\ + \ and FACEorgan (a laptop mouth organ using both face tracking and audio analysis).\ + \ We present the design behind these instruments and consider what it means to\ + \ be able to create music with one's face. Finally, we discuss the usability and\ + \ aesthetic criteria for evaluating such a system, taking into account our initial\ + \ design goals as well as the resulting experience for the performer and audience.},\n\ + \ address = {Blacksburg, Virginia, USA},\n author = {Deepak Chandran and Ge Wang},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1302569},\n editor = {Luke Dahl,\ + \ Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {244--248},\n publisher = {Virginia Tech},\n title\ + \ = {InterFACE: new faces for musical expression},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0053.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178885 + doi: 10.5281/zenodo.1302569 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 307--310 - publisher: 'Goldsmiths, University of London' - title: 'The Cave of Sounds: An Interactive Installation Exploring How We Create - Music Together' - url: http://www.nime.org/proceedings/2014/nime2014_288.pdf - year: 2014 + pages: 244--248 + publisher: Virginia Tech + title: 'InterFACE: new faces for musical expression' + url: http://www.nime.org/proceedings/2018/nime2018_paper0053.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: knymoen12014 - abstract: 'We present Funky Sole Music, a musical interface employing a sole embedded - with three force sensitive resistors in combination with a novel algorithm for - continuous movement classification. A heuristics-based music engine has been implemented, - allowing users to control high-level parameters of the musical output. This provides - a greater degree of control to users without musical expertise compared to what - they get with traditional media playes. By using the movement classification result - not as a direct control action in itself, but as a way to change mapping spaces - and musical sections, the control possibilities offered by the simple interface - are greatly increased.' - address: 'London, United Kingdom' - author: Kristian Nymoen and Sichao Song and Yngve Hafting and Jim Torresen - bibtex: "@inproceedings{knymoen12014,\n abstract = {We present Funky Sole Music,\ - \ a musical interface employing a sole embedded with three force sensitive resistors\ - \ in combination with a novel algorithm for continuous movement classification.\ - \ A heuristics-based music engine has been implemented, allowing users to control\ - \ high-level parameters of the musical output. This provides a greater degree\ - \ of control to users without musical expertise compared to what they get with\ - \ traditional media playes. By using the movement classification result not as\ - \ a direct control action in itself, but as a way to change mapping spaces and\ - \ musical sections, the control possibilities offered by the simple interface\ - \ are greatly increased.},\n address = {London, United Kingdom},\n author = {Kristian\ - \ Nymoen and Sichao Song and Yngve Hafting and Jim Torresen},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178895},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {299--302},\n publisher = {Goldsmiths, University of London},\n title = {Funky\ - \ Sole Music: Gait Recognition and Adaptive Mapping},\n url = {http://www.nime.org/proceedings/2014/nime2014_289.pdf},\n\ - \ year = {2014}\n}\n" + ID: Polfreman2018 + abstract: 'Hands are important anatomical structures for musical performance, and + recent developments in input device technology have allowed rather detailed capture + of hand gestures using consumer-level products. While in some musical contexts, + detailed hand and finger movements are required, in others it is sufficient to + communicate discrete hand postures to indicate selection or other state changes. + This research compared three approaches to capturing hand gestures where the shape + of the hand, i.e. the relative positions and angles of finger joints, are an important + part of the gesture. A number of sensor types can be used to capture information + about hand posture, each of which has various practical advantages and disadvantages + for music applications. This study compared three approaches, using optical, inertial + and muscular information, with three sets of 5 hand postures (i.e. static gestures) + and gesture recognition algorithms applied to the device data, aiming to determine + which methods are most effective.' + address: 'Blacksburg, Virginia, USA' + author: Richard Polfreman + bibtex: "@inproceedings{Polfreman2018,\n abstract = {Hands are important anatomical\ + \ structures for musical performance, and recent developments in input device\ + \ technology have allowed rather detailed capture of hand gestures using consumer-level\ + \ products. While in some musical contexts, detailed hand and finger movements\ + \ are required, in others it is sufficient to communicate discrete hand postures\ + \ to indicate selection or other state changes. This research compared three approaches\ + \ to capturing hand gestures where the shape of the hand, i.e. the relative positions\ + \ and angles of finger joints, are an important part of the gesture. A number\ + \ of sensor types can be used to capture information about hand posture, each\ + \ of which has various practical advantages and disadvantages for music applications.\ + \ This study compared three approaches, using optical, inertial and muscular information,\ + \ with three sets of 5 hand postures (i.e. static gestures) and gesture recognition\ + \ algorithms applied to the device data, aiming to determine which methods are\ + \ most effective.},\n address = {Blacksburg, Virginia, USA},\n author = {Richard\ + \ Polfreman},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302571},\n editor\ + \ = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {249--254},\n publisher = {Virginia\ + \ Tech},\n title = {Hand Posture Recognition: IR, IMU and sEMG},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0054.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178895 + doi: 10.5281/zenodo.1302571 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 299--302 - publisher: 'Goldsmiths, University of London' - title: 'Funky Sole Music: Gait Recognition and Adaptive Mapping' - url: http://www.nime.org/proceedings/2014/nime2014_289.pdf - year: 2014 + pages: 249--254 + publisher: Virginia Tech + title: 'Hand Posture Recognition: IR, IMU and sEMG' + url: http://www.nime.org/proceedings/2018/nime2018_paper0054.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: fheller2014 - abstract: 'Although an analog technology, many DJs still value the turntable as - an irreplaceable performance tool. Digital vinyl systems combine the distinct - haptic nature of the analog turntable with the advantages of digital media. They - use special records containing a digital timecode which is then processed by a - computer and mapped to properties like playback speed and direction. These records, - however, are generic and, in contrast to traditional vinyl, do not provide visual - cues representing the structure of the track. We present a system that augments - the timecode record with a visualization of song information such as artist, title, - and track length, but also with a waveform that allows to visually navigate to - a certain beat. We conducted a survey examining the acceptance of such tools in - the DJ community and conducted a user study with professional DJs. The system - was widely accepted as a tool in the DJ community and received very positive feedback - during observational mixing sessions with four professional DJs.' - address: 'London, United Kingdom' - author: Florian Heller and Jan Borchers - bibtex: "@inproceedings{fheller2014,\n abstract = {Although an analog technology,\ - \ many DJs still value the turntable as an irreplaceable performance tool. Digital\ - \ vinyl systems combine the distinct haptic nature of the analog turntable with\ - \ the advantages of digital media. They use special records containing a digital\ - \ timecode which is then processed by a computer and mapped to properties like\ - \ playback speed and direction. These records, however, are generic and, in contrast\ - \ to traditional vinyl, do not provide visual cues representing the structure\ - \ of the track. We present a system that augments the timecode record with a visualization\ - \ of song information such as artist, title, and track length, but also with a\ - \ waveform that allows to visually navigate to a certain beat. We conducted a\ - \ survey examining the acceptance of such tools in the DJ community and conducted\ - \ a user study with professional DJs. The system was widely accepted as a tool\ - \ in the DJ community and received very positive feedback during observational\ - \ mixing sessions with four professional DJs.},\n address = {London, United Kingdom},\n\ - \ author = {Florian Heller and Jan Borchers},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1178796},\n issn = {2220-4806},\n month = {June},\n pages =\ - \ {66--69},\n publisher = {Goldsmiths, University of London},\n title = {Visualizing\ - \ Song Structure on Timecode Vinyls},\n url = {http://www.nime.org/proceedings/2014/nime2014_290.pdf},\n\ - \ year = {2014}\n}\n" + ID: Malloch2018 + abstract: 'The Digital Orchestra Toolbox for Max is an open-source collection of + small modular software tools for aiding the development of Digital Musical Instruments. + Each tool takes the form of an "abstraction" for the visual programming environment + Max, meaning it can be opened and understood by users within the Max environment, + as well as copied, modified, and appropriated as desired. This paper describes + the origins of the Toolbox and our motivations for creating it, broadly outlines + the types of tools included, and follows the development of the project over the + last twelve years. We also present examples of several digital musical instruments + built using the Toolbox.' + address: 'Blacksburg, Virginia, USA' + author: Joseph Malloch and Marlon Mario Schumacher and Stephen Sinclair and Marcelo + Wanderley + bibtex: "@inproceedings{Malloch2018,\n abstract = {The Digital Orchestra Toolbox\ + \ for Max is an open-source collection of small modular software tools for aiding\ + \ the development of Digital Musical Instruments. Each tool takes the form of\ + \ an \"abstraction\" for the visual programming environment Max, meaning it can\ + \ be opened and understood by users within the Max environment, as well as copied,\ + \ modified, and appropriated as desired. This paper describes the origins of the\ + \ Toolbox and our motivations for creating it, broadly outlines the types of tools\ + \ included, and follows the development of the project over the last twelve years.\ + \ We also present examples of several digital musical instruments built using\ + \ the Toolbox.},\n address = {Blacksburg, Virginia, USA},\n author = {Joseph Malloch\ + \ and Marlon Mario Schumacher and Stephen Sinclair and Marcelo Wanderley},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1302573},\n editor = {Luke Dahl, Douglas\ + \ Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {255--258},\n publisher = {Virginia Tech},\n title\ + \ = {The Digital Orchestra Toolbox for Max},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0055.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178796 + doi: 10.5281/zenodo.1302573 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 66--69 - publisher: 'Goldsmiths, University of London' - title: Visualizing Song Structure on Timecode Vinyls - url: http://www.nime.org/proceedings/2014/nime2014_290.pdf - year: 2014 + pages: 255--258 + publisher: Virginia Tech + title: The Digital Orchestra Toolbox for Max + url: http://www.nime.org/proceedings/2018/nime2018_paper0055.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: mmarier2014 - abstract: 'The development of the cushion-like musical interface called the sponge - started about seven years ago. Since then, it was extensively used to perform - in various settings. The sponge itself is described, but the main focus is on - the evolution of the mapping strategies that are used. The author reviews the - guidelines proposed by other researchers and explains how they were concretely - applied with the sponge. He concludes that no single strategy constitutes a solution - to the issue of mapping and that musical compositions are complex entities that - require the use of a multitude of mapping strategies in parallel. It is hoped - that the mappings described combined with new strategies will eventually lead - to the emergence of a musical language that is idiomatic to the sponge.' - address: 'London, United Kingdom' - author: Martin Marier - bibtex: "@inproceedings{mmarier2014,\n abstract = {The development of the cushion-like\ - \ musical interface called the sponge started about seven years ago. Since then,\ - \ it was extensively used to perform in various settings. The sponge itself is\ - \ described, but the main focus is on the evolution of the mapping strategies\ - \ that are used. The author reviews the guidelines proposed by other researchers\ - \ and explains how they were concretely applied with the sponge. He concludes\ - \ that no single strategy constitutes a solution to the issue of mapping and that\ - \ musical compositions are complex entities that require the use of a multitude\ - \ of mapping strategies in parallel. It is hoped that the mappings described combined\ - \ with new strategies will eventually lead to the emergence of a musical language\ - \ that is idiomatic to the sponge.},\n address = {London, United Kingdom},\n author\ - \ = {Martin Marier},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178863},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {525--528},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Designing Mappings for the Sponge: Towards\ - \ Spongistic Music},\n url = {http://www.nime.org/proceedings/2014/nime2014_292.pdf},\n\ - \ year = {2014}\n}\n" + ID: Manaris2018 + abstract: 'JythonMusic is a software environment for developing interactive musical + experiences and systems. It is based on jMusic, a software environment for computer-assisted + composition, which was extended within the last decade into a more comprehensive + framework providing composers and software developers with libraries for music + making, image manipulation, building graphical user interfaces, and interacting + with external devices via MIDI and OSC, among others. This environment is free + and open source. It is based on Python, therefore it provides more economical + syntax relative to Java- and C/C++-like languages. JythonMusic rests on top of + Java, so it provides access to the complete Java API and external Java-based libraries + as needed. Also, it works seamlessly with other software, such as PureData, Max/MSP, + and Processing. The paper provides an overview of important JythonMusic libraries + related to constructing interactive musical experiences. It demonstrates their + scope and utility by summarizing several projects developed using JythonMusic, + including interactive sound art installations, new interfaces for sound manipulation + and spatialization, as well as various explorations on mapping among motion, gesture + and music.' + address: 'Blacksburg, Virginia, USA' + author: Bill Manaris and Pangur Brougham-Cook and Dana Hughes and Andrew R. Brown + bibtex: "@inproceedings{Manaris2018,\n abstract = {JythonMusic is a software environment\ + \ for developing interactive musical experiences and systems. It is based on\ + \ jMusic, a software environment for computer-assisted composition, which was\ + \ extended within the last decade into a more comprehensive framework providing\ + \ composers and software developers with libraries for music making, image manipulation,\ + \ building graphical user interfaces, and interacting with external devices via\ + \ MIDI and OSC, among others. This environment is free and open source. It is\ + \ based on Python, therefore it provides more economical syntax relative to Java-\ + \ and C/C++-like languages. JythonMusic rests on top of Java, so it provides\ + \ access to the complete Java API and external Java-based libraries as needed.\ + \ Also, it works seamlessly with other software, such as PureData, Max/MSP, and\ + \ Processing. The paper provides an overview of important JythonMusic libraries\ + \ related to constructing interactive musical experiences. It demonstrates their\ + \ scope and utility by summarizing several projects developed using JythonMusic,\ + \ including interactive sound art installations, new interfaces for sound manipulation\ + \ and spatialization, as well as various explorations on mapping among motion,\ + \ gesture and music.},\n address = {Blacksburg, Virginia, USA},\n author = {Bill\ + \ Manaris and Pangur Brougham-Cook and Dana Hughes and Andrew R. Brown},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1302575},\n editor = {Luke Dahl, Douglas\ + \ Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {259--262},\n publisher = {Virginia Tech},\n title\ + \ = {JythonMusic: An Environment for Developing Interactive Music Systems},\n\ + \ url = {http://www.nime.org/proceedings/2018/nime2018_paper0056.pdf},\n year\ + \ = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178863 + doi: 10.5281/zenodo.1302575 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 525--528 - publisher: 'Goldsmiths, University of London' - title: 'Designing Mappings for the Sponge: Towards Spongistic Music' - url: http://www.nime.org/proceedings/2014/nime2014_292.pdf - year: 2014 + pages: 259--262 + publisher: Virginia Tech + title: 'JythonMusic: An Environment for Developing Interactive Music Systems' + url: http://www.nime.org/proceedings/2018/nime2018_paper0056.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: mneupert2014 - abstract: 'We present an instrument for audio-visual performance that allows to - recombine sounds from a collection of sampled media through concatenative synthesis. - A three-dimensional distribution derived from feature-analysis becomes accessible - through a theremin-inspired interface, allowing the player to shift from exploration - and intuitive navigation toward embodied performance on a granular level. In our - example we illustrate this concept by using the audiovisual recording of an instrumental - performance as a source. Our system provides an alternative interface to the musical - instrument''s audiovisual corpus: as the instrument''s sound and behavior is accessed - in ways that are not possible on the instrument itself, the resulting non-linear - playback of the grains generates an instant remix in a cut-up aesthetic. The presented - instrument is a human-computer interface that employs the structural outcome of - machine analysis accessing audiovisual corpora in the context of a musical performance.' - address: 'London, United Kingdom' - author: Joachim Goßmann and Max Neupert - bibtex: "@inproceedings{mneupert2014,\n abstract = {We present an instrument for\ - \ audio-visual performance that allows to recombine sounds from a collection of\ - \ sampled media through concatenative synthesis. A three-dimensional distribution\ - \ derived from feature-analysis becomes accessible through a theremin-inspired\ - \ interface, allowing the player to shift from exploration and intuitive navigation\ - \ toward embodied performance on a granular level. In our example we illustrate\ - \ this concept by using the audiovisual recording of an instrumental performance\ - \ as a source. Our system provides an alternative interface to the musical instrument's\ - \ audiovisual corpus: as the instrument's sound and behavior is accessed in ways\ - \ that are not possible on the instrument itself, the resulting non-linear playback\ - \ of the grains generates an instant remix in a cut-up aesthetic. The presented\ - \ instrument is a human-computer interface that employs the structural outcome\ - \ of machine analysis accessing audiovisual corpora in the context of a musical\ - \ performance.},\n address = {London, United Kingdom},\n author = {Joachim Go{\\\ - ss}mann and Max Neupert},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178772},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {151--154},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Musical Interface to Audiovisual Corpora of\ - \ Arbitrary Instruments},\n url = {http://www.nime.org/proceedings/2014/nime2014_296.pdf},\n\ - \ year = {2014}\n}\n" + ID: Leib2018 + abstract: 'We introduce the Triplexer, a novel foot controller that gives the performer + 3 degrees of freedom over the control of various effects parameters. With the + Triplexer, we aim to expand the performer''s control space by augmenting the capabilities + of the common expression pedal that is found in most effects rigs. Using industrial-grade + weight-detection sensors and widely-adopted communication protocols, the Triplexer + offers a flexible platform that can be integrated into various performance setups + and situations. In this paper, we detail the design of the Triplexer by describing + its hardware, embedded signal processing, and mapping software implementations. + We also offer the results of a user study, which we conducted to evaluate the + usability of our controller.' + address: 'Blacksburg, Virginia, USA' + author: Steven Leib and Anıl Çamcı + bibtex: "@inproceedings{Leib2018,\n abstract = {We introduce the Triplexer, a novel\ + \ foot controller that gives the performer 3 degrees of freedom over the control\ + \ of various effects parameters. With the Triplexer, we aim to expand the performer's\ + \ control space by augmenting the capabilities of the common expression pedal\ + \ that is found in most effects rigs. Using industrial-grade weight-detection\ + \ sensors and widely-adopted communication protocols, the Triplexer offers a flexible\ + \ platform that can be integrated into various performance setups and situations.\ + \ In this paper, we detail the design of the Triplexer by describing its hardware,\ + \ embedded signal processing, and mapping software implementations. We also offer\ + \ the results of a user study, which we conducted to evaluate the usability of\ + \ our controller.},\n address = {Blacksburg, Virginia, USA},\n author = {Steven\ + \ Leib and Anıl Çamcı},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302577},\n\ + \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {263--268},\n publisher = {Virginia\ + \ Tech},\n title = {Triplexer: An Expression Pedal with New Degrees of Freedom},\n\ + \ url = {http://www.nime.org/proceedings/2018/nime2018_paper0057.pdf},\n year\ + \ = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178772 + doi: 10.5281/zenodo.1302577 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 151--154 - publisher: 'Goldsmiths, University of London' - title: Musical Interface to Audiovisual Corpora of Arbitrary Instruments - url: http://www.nime.org/proceedings/2014/nime2014_296.pdf - year: 2014 + pages: 263--268 + publisher: Virginia Tech + title: 'Triplexer: An Expression Pedal with New Degrees of Freedom' + url: http://www.nime.org/proceedings/2018/nime2018_paper0057.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: ibergstrom2014 - abstract: 'We introduce two complementary OSC schemata for two contexts of use. - The first is for the complete description of an OSC namespace: detailing the full - set of messages each OSC-enabled system can receive or send, alongside choice - metadata we deem necessary to make full use of each system''s description. The - second context of use is a snapshot (partial or full) of the system''s state. - We also relate our proposed schemata to the current state of the art, and how - using these resolves issues that were left pending with previous research.' - address: 'London, United Kingdom' - author: Ilias Bergstrom and Joan Llobera - bibtex: "@inproceedings{ibergstrom2014,\n abstract = {We introduce two complementary\ - \ OSC schemata for two contexts of use. The first is for the complete description\ - \ of an OSC namespace: detailing the full set of messages each OSC-enabled system\ - \ can receive or send, alongside choice metadata we deem necessary to make full\ - \ use of each system's description. The second context of use is a snapshot (partial\ - \ or full) of the system's state. We also relate our proposed schemata to the\ - \ current state of the art, and how using these resolves issues that were left\ - \ pending with previous research.},\n address = {London, United Kingdom},\n author\ - \ = {Ilias Bergstrom and Joan Llobera},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178712},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {311--314},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {OSC-Namespace and OSC-State: Schemata for\ - \ Describing the Namespace and State of OSC-Enabled Systems},\n url = {http://www.nime.org/proceedings/2014/nime2014_300.pdf},\n\ - \ year = {2014}\n}\n" + ID: Úlfarsson2018 + abstract: "This paper reports upon the process of innovation of a new instrument.\ + \ The author has developed the halldorophone a new electroacoustic string instrument\ + \ which makes use of positive feedback as a key element in generating its sound.\n\ + An important objective of the project has been to encourage its use by practicing\ + \ musicians. After ten years of use, the halldorophone has a growing repertoire\ + \ of works by prominent composers and performers. During the development of the\ + \ instrument, the question has been asked: “why do musicians want to use this\ + \ instrument?” and answers have been found through on-going (informal) user studies\ + \ and feedback. As the project progresses, a picture emerges of what qualities\ + \ have led to a culture of acceptance and use around this new instrument.\nThis\ + \ paper describes the halldorophone and presents the rationale for its major design\ + \ features and ergonomic choices, as they relate to the overarching objective\ + \ of nurturing a culture of use and connects it to wider trends." + address: 'Blacksburg, Virginia, USA' + author: Halldór Úlfarsson + bibtex: "@inproceedings{Úlfarsson2018,\n abstract = {This paper reports upon the\ + \ process of innovation of a new instrument. The author has developed the halldorophone\ + \ a new electroacoustic string instrument which makes use of positive feedback\ + \ as a key element in generating its sound.\nAn important objective of the project\ + \ has been to encourage its use by practicing musicians. After ten years of use,\ + \ the halldorophone has a growing repertoire of works by prominent composers and\ + \ performers. During the development of the instrument, the question has been\ + \ asked: “why do musicians want to use this instrument?” and answers have been\ + \ found through on-going (informal) user studies and feedback. As the project\ + \ progresses, a picture emerges of what qualities have led to a culture of acceptance\ + \ and use around this new instrument.\nThis paper describes the halldorophone\ + \ and presents the rationale for its major design features and ergonomic choices,\ + \ as they relate to the overarching objective of nurturing a culture of use and\ + \ connects it to wider trends.},\n address = {Blacksburg, Virginia, USA},\n author\ + \ = {Halldór Úlfarsson},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302579},\n\ + \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {269--274},\n publisher = {Virginia\ + \ Tech},\n title = {The halldorophone: The ongoing innovation of a cello-like\ + \ drone instrument},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0058.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178712 + doi: 10.5281/zenodo.1302579 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 311--314 - publisher: 'Goldsmiths, University of London' - title: 'OSC-Namespace and OSC-State: Schemata for Describing the Namespace and State - of OSC-Enabled Systems' - url: http://www.nime.org/proceedings/2014/nime2014_300.pdf - year: 2014 + pages: 269--274 + publisher: Virginia Tech + title: 'The halldorophone: The ongoing innovation of a cello-like drone instrument' + url: http://www.nime.org/proceedings/2018/nime2018_paper0058.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: ebertelli2014 - abstract: 'Through examining the decisions and sequences of presenting a multi-media - instrument fabrication program to students, this paper seeks to uncover practical - elements of best practice and possible improvements in science and music education. - The Conductive Music program incorporates public engagement principles, open-source - hardware, DIY ethos, contemporary composition techniques, and educational activities - for creative and analytical thinking. These activities impart positive skills - through multi-media content delivery for all learning types. The program is designed - to test practices for engaging at-risk young people from urban areas in the construction - and performance of new electronic instruments. The goal is to open up the world - of electronic music performance to a new generation of young digital artists and - to replace negative social behaviours with creative outlets for expression through - technology and performance. This paper highlights the key elements designed to - deliver the program''s agenda and examines the ways in which these aims were realised - or tested in the classroom.' - address: 'London, United Kingdom' - author: Emily Robertson and Enrico Bertelli - bibtex: "@inproceedings{ebertelli2014,\n abstract = {Through examining the decisions\ - \ and sequences of presenting a multi-media instrument fabrication program to\ - \ students, this paper seeks to uncover practical elements of best practice and\ - \ possible improvements in science and music education. The Conductive Music program\ - \ incorporates public engagement principles, open-source hardware, DIY ethos,\ - \ contemporary composition techniques, and educational activities for creative\ - \ and analytical thinking. These activities impart positive skills through multi-media\ - \ content delivery for all learning types. The program is designed to test practices\ - \ for engaging at-risk young people from urban areas in the construction and performance\ - \ of new electronic instruments. The goal is to open up the world of electronic\ - \ music performance to a new generation of young digital artists and to replace\ - \ negative social behaviours with creative outlets for expression through technology\ - \ and performance. This paper highlights the key elements designed to deliver\ - \ the program's agenda and examines the ways in which these aims were realised\ - \ or tested in the classroom.},\n address = {London, United Kingdom},\n author\ - \ = {Emily Robertson and Enrico Bertelli},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178921},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {517--520},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Conductive Music: Teaching Innovative Interface\ - \ Design and Composition Techniques with Open-Source Hardware},\n url = {http://www.nime.org/proceedings/2014/nime2014_301.pdf},\n\ - \ year = {2014}\n}\n" + ID: Tsoukalasb2018 + abstract: 'Laptop orchestras create music, although digitally produced, in a collaborative + live performance not unlike a traditional orchestra. The recent increase in interest + and investment in this style of music creation has paved the way for novel methods + for musicians to create and interact with music. To this end, a number of nontraditional + instruments have been constructed that enable musicians to control sound production + beyond pitch and volume, integrating filtering, musical effects, etc. Wii Remotes + (WiiMotes) have seen heavy use in maker communities, including laptop orchestras, + for their robust sensor array and low cost. The placement of sensors and the form + factor of the device itself are suited for video games, not necessarily live music + creation. In this paper, the authors present a new controller design, based on + the WiiMote hardware platform, to address usability in gesture-centric music performance. + Based on the pilot-study data, the new controller offers unrestricted two-hand + gesture production, smaller footprint, and lower muscle strain.' + address: 'Blacksburg, Virginia, USA' + author: Kyriakos Tsoukalas and Joseph Kubalak and Ivica Ico Bukvic + bibtex: "@inproceedings{Tsoukalasb2018,\n abstract = {Laptop orchestras create music,\ + \ although digitally produced, in a collaborative live performance not unlike\ + \ a traditional orchestra. The recent increase in interest and investment in this\ + \ style of music creation has paved the way for novel methods for musicians to\ + \ create and interact with music. To this end, a number of nontraditional instruments\ + \ have been constructed that enable musicians to control sound production beyond\ + \ pitch and volume, integrating filtering, musical effects, etc. Wii Remotes (WiiMotes)\ + \ have seen heavy use in maker communities, including laptop orchestras, for their\ + \ robust sensor array and low cost. The placement of sensors and the form factor\ + \ of the device itself are suited for video games, not necessarily live music\ + \ creation. In this paper, the authors present a new controller design, based\ + \ on the WiiMote hardware platform, to address usability in gesture-centric music\ + \ performance. Based on the pilot-study data, the new controller offers unrestricted\ + \ two-hand gesture production, smaller footprint, and lower muscle strain.},\n\ + \ address = {Blacksburg, Virginia, USA},\n author = {Kyriakos Tsoukalas and Joseph\ + \ Kubalak and Ivica Ico Bukvic},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302581},\n\ + \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {275--280},\n publisher = {Virginia\ + \ Tech},\n title = {L2OrkMote: Reimagining a Low-Cost Wearable Controller for\ + \ a Live Gesture-Centric Music Performance},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0059.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178921 + doi: 10.5281/zenodo.1302581 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 517--520 - publisher: 'Goldsmiths, University of London' - title: 'Conductive Music: Teaching Innovative Interface Design and Composition Techniques - with Open-Source Hardware' - url: http://www.nime.org/proceedings/2014/nime2014_301.pdf - year: 2014 + pages: 275--280 + publisher: Virginia Tech + title: 'L2OrkMote: Reimagining a Low-Cost Wearable Controller for a Live Gesture-Centric + Music Performance' + url: http://www.nime.org/proceedings/2018/nime2018_paper0059.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: tmudd2014 - abstract: 'This paper examines electronic instruments that are based on dynamical - systems, where the behaviour of the instrument depends not only upon the immediate - input to the instrument, but also on the past input. Five instruments are presented - as case studies: Michel Waisvisz'' Cracklebox, Dylan Menzies'' Spiro, no-input - mixing desk, the author''s Feedback Joypad, and microphone-loudspeaker feedback. - Links are suggested between the sonic affordances of each instrument and the dynamical - mechanisms embedded in them. This is discussed in the context of contemporary, - materialoriented approaches to composition and particularly to free improvisation - where elements such as unpredictability and instability are often of interest, - and the process of exploration and discovery is an important part of the practice. - Links are also made with the use of dynamical interactions in computer games to - produce situations in which slight variations in the timing and ordering of inputs - can lead to very different outcomes, encouraging similarly explorative approaches.' - address: 'London, United Kingdom' - author: Tom Mudd and Simon Holland and Paul Mulholland and Nick Dalton - bibtex: "@inproceedings{tmudd2014,\n abstract = {This paper examines electronic\ - \ instruments that are based on dynamical systems, where the behaviour of the\ - \ instrument depends not only upon the immediate input to the instrument, but\ - \ also on the past input. Five instruments are presented as case studies: Michel\ - \ Waisvisz' Cracklebox, Dylan Menzies' Spiro, no-input mixing desk, the author's\ - \ Feedback Joypad, and microphone-loudspeaker feedback. Links are suggested between\ - \ the sonic affordances of each instrument and the dynamical mechanisms embedded\ - \ in them. This is discussed in the context of contemporary, materialoriented\ - \ approaches to composition and particularly to free improvisation where elements\ - \ such as unpredictability and instability are often of interest, and the process\ - \ of exploration and discovery is an important part of the practice. Links are\ - \ also made with the use of dynamical interactions in computer games to produce\ - \ situations in which slight variations in the timing and ordering of inputs can\ - \ lead to very different outcomes, encouraging similarly explorative approaches.},\n\ - \ address = {London, United Kingdom},\n author = {Tom Mudd and Simon Holland and\ - \ Paul Mulholland and Nick Dalton},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178881},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {126--129},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Dynamical Interactions with Electronic Instruments},\n\ - \ url = {http://www.nime.org/proceedings/2014/nime2014_302.pdf},\n year = {2014}\n\ + ID: Armitage2018 + abstract: 'In digital musical instrument design, different tools and methods offer + a variety of approaches for constraining the exploration of musical gestures and + sounds. Toolkits made of modular components usefully constrain exploration towards + simple, quick and functional combinations, and methods such as sketching and model-making + alternatively allow imagination and narrative to guide exploration. In this work + we sought to investigate a context where these approaches to exploration were + combined. We designed a craft workshop for 20 musical instrument designers, where + groups were given the same partly-finished instrument to craft for one hour with + raw materials, and though the task was open ended, they were prompted to focus + on subtle details that might distinguish their instruments. Despite the prompt + the groups diverged dramatically in intent and style, and generated gestural language + rapidly and flexibly. By the end, each group had developed a distinctive approach + to constraint, exploratory style, collaboration and interpretation of the instrument + and workshop materials. We reflect on this outcome to discuss advantages and disadvantages + to integrating digital musical instrument design tools and methods, and how to + further investigate and extend this approach.' + address: 'Blacksburg, Virginia, USA' + author: Jack Armitage and Andrew P. McPherson + bibtex: "@inproceedings{Armitage2018,\n abstract = {In digital musical instrument\ + \ design, different tools and methods offer a variety of approaches for constraining\ + \ the exploration of musical gestures and sounds. Toolkits made of modular components\ + \ usefully constrain exploration towards simple, quick and functional combinations,\ + \ and methods such as sketching and model-making alternatively allow imagination\ + \ and narrative to guide exploration. In this work we sought to investigate a\ + \ context where these approaches to exploration were combined. We designed a craft\ + \ workshop for 20 musical instrument designers, where groups were given the same\ + \ partly-finished instrument to craft for one hour with raw materials, and though\ + \ the task was open ended, they were prompted to focus on subtle details that\ + \ might distinguish their instruments. Despite the prompt the groups diverged\ + \ dramatically in intent and style, and generated gestural language rapidly and\ + \ flexibly. By the end, each group had developed a distinctive approach to constraint,\ + \ exploratory style, collaboration and interpretation of the instrument and workshop\ + \ materials. We reflect on this outcome to discuss advantages and disadvantages\ + \ to integrating digital musical instrument design tools and methods, and how\ + \ to further investigate and extend this approach.},\n address = {Blacksburg,\ + \ Virginia, USA},\n author = {Jack Armitage and Andrew P. McPherson},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1302583},\n editor = {Luke Dahl, Douglas\ + \ Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {281--286},\n publisher = {Virginia Tech},\n title\ + \ = {Crafting Digital Musical Instruments: An Exploratory Workshop Study},\n url\ + \ = {http://www.nime.org/proceedings/2018/nime2018_paper0060.pdf},\n year = {2018}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178881 + doi: 10.5281/zenodo.1302583 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 126--129 - publisher: 'Goldsmiths, University of London' - title: Dynamical Interactions with Electronic Instruments - url: http://www.nime.org/proceedings/2014/nime2014_302.pdf - year: 2014 + pages: 281--286 + publisher: Virginia Tech + title: 'Crafting Digital Musical Instruments: An Exploratory Workshop Study' + url: http://www.nime.org/proceedings/2018/nime2018_paper0060.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: mbretan2014 - abstract: 'As robots become more pervasive in the world we think about how this - might influence the way in which people experience music. We introduce the concept - of a "robotic musical companion" (RMC) in the form of Shimi, a smart-phone enabled - five degree-of-freedom (DoF) robotic platform. We discuss experiences individuals - tend to have with music as consumers and performers and explore how these experiences - can be modified, aided, or improved by the inherent synergies between a human - and robot. An overview of several applications developed for Shimi is provided. - These applications place Shimi in various roles and enable human-robotic interactions - (HRIs) that are highlighted by more personable social communications using natural - language and other forms of communication.' - address: 'London, United Kingdom' - author: Mason Bretan and Gil Weinberg - bibtex: "@inproceedings{mbretan2014,\n abstract = {As robots become more pervasive\ - \ in the world we think about how this might influence the way in which people\ - \ experience music. We introduce the concept of a \"robotic musical companion\"\ - \ (RMC) in the form of Shimi, a smart-phone enabled five degree-of-freedom (DoF)\ - \ robotic platform. We discuss experiences individuals tend to have with music\ - \ as consumers and performers and explore how these experiences can be modified,\ - \ aided, or improved by the inherent synergies between a human and robot. An overview\ - \ of several applications developed for Shimi is provided. These applications\ - \ place Shimi in various roles and enable human-robotic interactions (HRIs) that\ - \ are highlighted by more personable social communications using natural language\ - \ and other forms of communication.},\n address = {London, United Kingdom},\n\ - \ author = {Mason Bretan and Gil Weinberg},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1178724},\n issn = {2220-4806},\n month = {June},\n pages =\ - \ {315--318},\n publisher = {Goldsmiths, University of London},\n title = {Chronicles\ - \ of a Robotic Musical Companion},\n url = {http://www.nime.org/proceedings/2014/nime2014_303.pdf},\n\ - \ year = {2014}\n}\n" + ID: Kalo2018 + abstract: 'Incremental robotic sheet forming is used to fabricate a novel cymbal + shape based on models of geometric chaos for stadium shaped boundaries. This provides + a proof-of-concept that this robotic fabrication technique might be a candidate + method for creating novel metallic ideophones that are based on sheet deformations. + Given that the technique does not require molding, it is well suited for both + rapid and iterative prototyping and the fabrication of individual pieces. With + advances in miniaturization, this approach may also be suitable for personal fabrication. + In this paper we discuss this technique as well as aspects of the geometry of + stadium cymbals and their impact on the resulting instrument.' + address: 'Blacksburg, Virginia, USA' + author: Ammar Kalo and Georg Essl + bibtex: "@inproceedings{Kalo2018,\n abstract = {Incremental robotic sheet forming\ + \ is used to fabricate a novel cymbal shape based on models of geometric chaos\ + \ for stadium shaped boundaries. This provides a proof-of-concept that this robotic\ + \ fabrication technique might be a candidate method for creating novel metallic\ + \ ideophones that are based on sheet deformations. Given that the technique does\ + \ not require molding, it is well suited for both rapid and iterative prototyping\ + \ and the fabrication of individual pieces. With advances in miniaturization,\ + \ this approach may also be suitable for personal fabrication. In this paper we\ + \ discuss this technique as well as aspects of the geometry of stadium cymbals\ + \ and their impact on the resulting instrument.},\n address = {Blacksburg, Virginia,\ + \ USA},\n author = {Ammar Kalo and Georg Essl},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1302585},\n editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n\ + \ isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {287--292},\n publisher = {Virginia Tech},\n title = {Individual Fabrication\ + \ of Cymbals using Incremental Robotic Sheet Forming},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0061.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178724 + doi: 10.5281/zenodo.1302585 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 315--318 - publisher: 'Goldsmiths, University of London' - title: Chronicles of a Robotic Musical Companion - url: http://www.nime.org/proceedings/2014/nime2014_303.pdf - year: 2014 + pages: 287--292 + publisher: Virginia Tech + title: Individual Fabrication of Cymbals using Incremental Robotic Sheet Forming + url: http://www.nime.org/proceedings/2018/nime2018_paper0061.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: sserafin2014 - abstract: 'In this paper we propose an empirical method to develop mapping strategies - between a gestural based interface (the Gloves) and physically based sound synthesis - models. An experiment was performed in order to investigate which kind of gestures - listeners associate to synthesised sounds produced using physical models, corresponding - to three categories of sound: sustained, iterative and impulsive. The results - of the experiment show that listeners perform similar gestures when controlling - sounds from the different categories. We used such gestures in order to create - the mapping strategy between the Gloves and the physically based synthesis engine.' - address: 'London, United Kingdom' - author: Stefania Serafin and Stefano Trento and Francesco Grani and Hannah Perner-Wilson - and Seb Madgwick and Tom Mitchell - bibtex: "@inproceedings{sserafin2014,\n abstract = {In this paper we propose an\ - \ empirical method to develop mapping strategies between a gestural based interface\ - \ (the Gloves) and physically based sound synthesis models. An experiment was\ - \ performed in order to investigate which kind of gestures listeners associate\ - \ to synthesised sounds produced using physical models, corresponding to three\ - \ categories of sound: sustained, iterative and impulsive. The results of the\ - \ experiment show that listeners perform similar gestures when controlling sounds\ - \ from the different categories. We used such gestures in order to create the\ - \ mapping strategy between the Gloves and the physically based synthesis engine.},\n\ - \ address = {London, United Kingdom},\n author = {Stefania Serafin and Stefano\ - \ Trento and Francesco Grani and Hannah Perner-Wilson and Seb Madgwick and Tom\ - \ Mitchell},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178937},\n issn\ - \ = {2220-4806},\n month = {June},\n pages = {521--524},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Controlling Physically Based Virtual Musical\ - \ Instruments Using The Gloves},\n url = {http://www.nime.org/proceedings/2014/nime2014_307.pdf},\n\ - \ year = {2014}\n}\n" + ID: McDowell2018 + abstract: 'This paper reports the development of a ‘haptic-listening'' system which + presents the listener with a representation of the vibrotactile feedback perceived + by a classical guitarist during performance through the use of haptic feedback + technology. The paper describes the design of the haptic-listening system which + is in two prototypes: the “DIY Haptic Guitar” and a more robust haptic-listening + Trial prototype using a Reckhorn BS-200 shaker. Through two experiments, the perceptual + significance and overall musical contribution of the addition of haptic feedback + in a listening context was evaluated. Subjects preferred listening to the classical + guitar presentation with the addition of haptic feedback and the addition of haptic + feedback contributed to listeners'' engagement with a performance. The results + of the experiments and their implications are discussed in this paper.' + address: 'Blacksburg, Virginia, USA' + author: John McDowell + bibtex: "@inproceedings{McDowell2018,\n abstract = {This paper reports the development\ + \ of a ‘haptic-listening' system which presents the listener with a representation\ + \ of the vibrotactile feedback perceived by a classical guitarist during performance\ + \ through the use of haptic feedback technology. The paper describes the design\ + \ of the haptic-listening system which is in two prototypes: the “DIY Haptic Guitar”\ + \ and a more robust haptic-listening Trial prototype using a Reckhorn BS-200 shaker.\ + \ Through two experiments, the perceptual significance and overall musical contribution\ + \ of the addition of haptic feedback in a listening context was evaluated. Subjects\ + \ preferred listening to the classical guitar presentation with the addition of\ + \ haptic feedback and the addition of haptic feedback contributed to listeners'\ + \ engagement with a performance. The results of the experiments and their implications\ + \ are discussed in this paper.},\n address = {Blacksburg, Virginia, USA},\n author\ + \ = {John McDowell},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302587},\n\ + \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {293--298},\n publisher = {Virginia\ + \ Tech},\n title = {Haptic-Listening and the Classical Guitar},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0062.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178937 + doi: 10.5281/zenodo.1302587 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 521--524 - publisher: 'Goldsmiths, University of London' - title: Controlling Physically Based Virtual Musical Instruments Using The Gloves - url: http://www.nime.org/proceedings/2014/nime2014_307.pdf - year: 2014 + pages: 293--298 + publisher: Virginia Tech + title: Haptic-Listening and the Classical Guitar + url: http://www.nime.org/proceedings/2018/nime2018_paper0062.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: cgnegy12014 - abstract: 'CollideFx is a real-time audio effects processor that integrates the - physics of real objects into the parameter space of the signal chain. Much like - a traditional signal chain, the user can choose a series of effects and offer - realtime control to their various parameters. In this work, we introduce a means - of creating tree-like signal graphs that dynamically change their routing in response - to changes in the location of the unit generators in a virtual space. Signals - are rerouted using a crossfading scheme that avoids the harsh clicks and pops - associated with amplitude discontinuities. The unit generators are easily controllable - using a click and drag interface that responds using familiar physics. CollideFx - brings the interactivity of a video game together with the purpose of creating - interesting and complex audio effects. With little difficulty, users can craft - custom effects, or alternatively, can fling a unit generator into a cluster of - several others to obtain more surprising results, letting the physics engine do - the decision making.' - address: 'London, United Kingdom' - author: Chet Gnegy - bibtex: "@inproceedings{cgnegy12014,\n abstract = {CollideFx is a real-time audio\ - \ effects processor that integrates the physics of real objects into the parameter\ - \ space of the signal chain. Much like a traditional signal chain, the user can\ - \ choose a series of effects and offer realtime control to their various parameters.\ - \ In this work, we introduce a means of creating tree-like signal graphs that\ - \ dynamically change their routing in response to changes in the location of the\ - \ unit generators in a virtual space. Signals are rerouted using a crossfading\ - \ scheme that avoids the harsh clicks and pops associated with amplitude discontinuities.\ - \ The unit generators are easily controllable using a click and drag interface\ - \ that responds using familiar physics. CollideFx brings the interactivity of\ - \ a video game together with the purpose of creating interesting and complex audio\ - \ effects. With little difficulty, users can craft custom effects, or alternatively,\ - \ can fling a unit generator into a cluster of several others to obtain more surprising\ - \ results, letting the physics engine do the decision making.},\n address = {London,\ - \ United Kingdom},\n author = {Chet Gnegy},\n booktitle = {Proceedings of the\ + ID: Harrison2018 + abstract: 'The design of traditional musical instruments is a process of incremental + refinement over many centuries of innovation. Conversely, digital musical instruments + (DMIs), being unconstrained by requirements of efficient acoustic sound production + and ergonomics, can take on forms which are more abstract in their relation to + the mechanism of control and sound production. In this paper we consider the case + of designing DMIs for use in existing musical cultures, and pose questions around + the social and technical acceptability of certain design choices relating to global + physical form and input modality (sensing strategy and the input gestures that + it affords). We designed four guitar-derivative DMIs designed to be suitable to + perform a strummed harmonic accompaniment to a folk tune. Each instrument possessed + varying degrees of `guitar-likeness'', based either on the form and aesthetics + of the guitar or the specific mode of interaction. We conducted a study where + both non-musicians and guitarists played two versions of the instruments and completed + musical tasks with each instrument. The results of this study highlight the complex + interaction between global form and input modality when designing for existing + musical cultures.' + address: 'Blacksburg, Virginia, USA' + author: Jacob Harrison and Robert H Jack and Fabio Morreale and Andrew P. McPherson + bibtex: "@inproceedings{Harrison2018,\n abstract = {The design of traditional musical\ + \ instruments is a process of incremental refinement over many centuries of innovation.\ + \ Conversely, digital musical instruments (DMIs), being unconstrained by requirements\ + \ of efficient acoustic sound production and ergonomics, can take on forms which\ + \ are more abstract in their relation to the mechanism of control and sound production.\ + \ In this paper we consider the case of designing DMIs for use in existing musical\ + \ cultures, and pose questions around the social and technical acceptability of\ + \ certain design choices relating to global physical form and input modality (sensing\ + \ strategy and the input gestures that it affords). We designed four guitar-derivative\ + \ DMIs designed to be suitable to perform a strummed harmonic accompaniment to\ + \ a folk tune. Each instrument possessed varying degrees of `guitar-likeness',\ + \ based either on the form and aesthetics of the guitar or the specific mode of\ + \ interaction. We conducted a study where both non-musicians and guitarists played\ + \ two versions of the instruments and completed musical tasks with each instrument.\ + \ The results of this study highlight the complex interaction between global form\ + \ and input modality when designing for existing musical cultures.},\n address\ + \ = {Blacksburg, Virginia, USA},\n author = {Jacob Harrison and Robert H Jack\ + \ and Fabio Morreale and Andrew P. McPherson},\n booktitle = {Proceedings of the\ \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1178770},\n issn = {2220-4806},\n month = {June},\n pages =\ - \ {427--430},\n publisher = {Goldsmiths, University of London},\n title = {CollideFx:\ - \ A Physics-Based Audio Effects Processor},\n url = {http://www.nime.org/proceedings/2014/nime2014_308.pdf},\n\ - \ year = {2014}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1178770 - issn: 2220-4806 - month: June - pages: 427--430 - publisher: 'Goldsmiths, University of London' - title: 'CollideFx: A Physics-Based Audio Effects Processor' - url: http://www.nime.org/proceedings/2014/nime2014_308.pdf - year: 2014 - - -- ENTRYTYPE: inproceedings - ID: tbarraclough2014 - abstract: 'This paper describes the Modulome System, a new hardware interface set - for group-based electronic music performance and installation. Taking influence - from a variety of established interfaces, the Modulome is a modular controller - with application dependant use cases.' - address: 'London, United Kingdom' - author: Timothy J Barraclough and Jim Murphy and Ajay Kapur - bibtex: "@inproceedings{tbarraclough2014,\n abstract = {This paper describes the\ - \ Modulome System, a new hardware interface set for group-based electronic music\ - \ performance and installation. Taking influence from a variety of established\ - \ interfaces, the Modulome is a modular controller with application dependant\ - \ use cases.},\n address = {London, United Kingdom},\n author = {Timothy J Barraclough\ - \ and Jim Murphy and Ajay Kapur},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178708},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {155--158},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {New Open-Source Interfaces for Group Based\ - \ Participatory Performance of Live Electronic Music},\n url = {http://www.nime.org/proceedings/2014/nime2014_309.pdf},\n\ - \ year = {2014}\n}\n" + \ {10.5281/zenodo.1302589},\n editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n\ + \ isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {299--304},\n publisher = {Virginia Tech},\n title = {When is a Guitar not\ + \ a Guitar? Cultural Form, Input Modality and Expertise},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0063.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178708 + doi: 10.5281/zenodo.1302589 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 155--158 - publisher: 'Goldsmiths, University of London' - title: New Open-Source Interfaces for Group Based Participatory Performance of Live - Electronic Music - url: http://www.nime.org/proceedings/2014/nime2014_309.pdf - year: 2014 + pages: 299--304 + publisher: Virginia Tech + title: 'When is a Guitar not a Guitar? Cultural Form, Input Modality and Expertise' + url: http://www.nime.org/proceedings/2018/nime2018_paper0063.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: olahdeoja2014 - abstract: 'This paper provides a report of a research effort to transform architectural - and scenographic surfaces into sound sources and use them in artistic creation. - Structure-borne sound drivers are employed to induce sound into the solid surfaces, - making them vibrate and emit sound. The sound waves can be perceived both via - the aural (airborne diffusion) as well as the tactile (structure-borne diffusion) - senses. The paper describes the main challenges encountered in the use of structure-borne - sound technology, as well as the current results in overcoming them. Two completed - artistic projects are presented in order to illustrate the creative possibilities - enabled by the research.' - address: 'London, United Kingdom' - author: Otso Lähdeoja - bibtex: "@inproceedings{olahdeoja2014,\n abstract = {This paper provides a report\ - \ of a research effort to transform architectural and scenographic surfaces into\ - \ sound sources and use them in artistic creation. Structure-borne sound drivers\ - \ are employed to induce sound into the solid surfaces, making them vibrate and\ - \ emit sound. The sound waves can be perceived both via the aural (airborne diffusion)\ - \ as well as the tactile (structure-borne diffusion) senses. The paper describes\ - \ the main challenges encountered in the use of structure-borne sound technology,\ - \ as well as the current results in overcoming them. Two completed artistic projects\ - \ are presented in order to illustrate the creative possibilities enabled by the\ - \ research.},\n address = {London, United Kingdom},\n author = {Otso L\\''ahdeoja},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178843},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {319--322},\n publisher = {Goldsmiths, University\ - \ of London},\n title = {Structure-Borne Sound and Aurally Active Spaces},\n url\ - \ = {http://www.nime.org/proceedings/2014/nime2014_310.pdf},\n year = {2014}\n\ - }\n" + ID: Larsen2018 + abstract: 'Common emotional effects following a stroke include depression, apathy + and lack of motivation. We conducted a longitudinal case study to investigate + if enabling a post-stroke former guitarist re-learn to play guitar would help + increase motivation for self rehabilitation and quality of life after suffering + a stroke. The intervention lasted three weeks during which the participant had + a fully functional electrical guitar fitted with a strumming device controlled + by a foot pedal at his free disposal. The device replaced right strumming of the + strings, and the study showed that the participant, who was highly motivated, + played 20 sessions despite system latency and reduced musical expression. He incorporated + his own literature and equipment into his playing routine and improved greatly + as the study progressed. He was able to play alone and keep a steady rhythm in + time with backing tracks that went as fast as 120bpm. During the study he was + able to lower his error rate to 33%, while his average flutter also decreased.' + address: 'Blacksburg, Virginia, USA' + author: Jeppe Larsen and Hendrik Knoche and Dan Overholt + bibtex: "@inproceedings{Larsen2018,\n abstract = {Common emotional effects following\ + \ a stroke include depression, apathy and lack of motivation. We conducted a longitudinal\ + \ case study to investigate if enabling a post-stroke former guitarist re-learn\ + \ to play guitar would help increase motivation for self rehabilitation and quality\ + \ of life after suffering a stroke. The intervention lasted three weeks during\ + \ which the participant had a fully functional electrical guitar fitted with a\ + \ strumming device controlled by a foot pedal at his free disposal. The device\ + \ replaced right strumming of the strings, and the study showed that the participant,\ + \ who was highly motivated, played 20 sessions despite system latency and reduced\ + \ musical expression. He incorporated his own literature and equipment into his\ + \ playing routine and improved greatly as the study progressed. He was able to\ + \ play alone and keep a steady rhythm in time with backing tracks that went as\ + \ fast as 120bpm. During the study he was able to lower his error rate to 33%,\ + \ while his average flutter also decreased.},\n address = {Blacksburg, Virginia,\ + \ USA},\n author = {Jeppe Larsen and Hendrik Knoche and Dan Overholt},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1302591},\n editor = {Luke Dahl, Douglas\ + \ Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {305--310},\n publisher = {Virginia Tech},\n title\ + \ = {A Longitudinal Field Trial with a Hemiplegic Guitarist Using The Actuated\ + \ Guitar},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0064.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178843 + doi: 10.5281/zenodo.1302591 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 319--322 - publisher: 'Goldsmiths, University of London' - title: Structure-Borne Sound and Aurally Active Spaces - url: http://www.nime.org/proceedings/2014/nime2014_310.pdf - year: 2014 + pages: 305--310 + publisher: Virginia Tech + title: A Longitudinal Field Trial with a Hemiplegic Guitarist Using The Actuated + Guitar + url: http://www.nime.org/proceedings/2018/nime2018_paper0064.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: dwikstrom2014 - abstract: 'In this paper an emotionally justified approach for controlling sound - with physiology is presented. Measurements of listeners'' physiology, while they - are listening to recorded music of their own choosing, are used to create a regression - model that predicts features extracted from music with the help of the listeners'' - physiological response patterns. This information can be used as a control signal - to drive musical composition and synthesis of new sounds an approach involving - concatenative sound synthesis is suggested. An evaluation study was conducted - to test the feasibility of the model. A multiple linear regression model and an - artificial neural network model were evaluated against a constant regressor, or - dummy model. The dummy model outperformed the other models in prediction accuracy, - but the artificial neural network model achieved significant correlations between - predictions and target values for many acoustic features.' - address: 'London, United Kingdom' - author: D. J. Valtteri Wikström - bibtex: "@inproceedings{dwikstrom2014,\n abstract = {In this paper an emotionally\ - \ justified approach for controlling sound with physiology is presented. Measurements\ - \ of listeners' physiology, while they are listening to recorded music of their\ - \ own choosing, are used to create a regression model that predicts features extracted\ - \ from music with the help of the listeners' physiological response patterns.\ - \ This information can be used as a control signal to drive musical composition\ - \ and synthesis of new sounds an approach involving concatenative sound synthesis\ - \ is suggested. An evaluation study was conducted to test the feasibility of the\ - \ model. A multiple linear regression model and an artificial neural network model\ - \ were evaluated against a constant regressor, or dummy model. The dummy model\ - \ outperformed the other models in prediction accuracy, but the artificial neural\ - \ network model achieved significant correlations between predictions and target\ - \ values for many acoustic features.},\n address = {London, United Kingdom},\n\ - \ author = {D. J. Valtteri Wikstr\\''om},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178981},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {549--552},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Musical Composition by Regressional Mapping\ - \ of Physiological Responses to Acoustic Features},\n url = {http://www.nime.org/proceedings/2014/nime2014_311.pdf},\n\ - \ year = {2014}\n}\n" + ID: Stapleton2018 + abstract: 'In this paper we report preliminary observations from an ongoing study + into how musicians explore and adapt to the parameter space of a virtual-acoustic + string bridge plate instrument. These observations inform (and are informed by) + a wider approach to understanding the development of skill and style in interactions + between musicians and musical instruments. We discuss a performance-driven ecosystemic + approach to studying musical relationships, drawing on arguments from the literature + which emphasise the need to go beyond simplistic notions of control and usability + when assessing exploratory and performatory musical interactions. Lastly, we focus + on processes of perceptual learning and co-tuning between musician and instrument, + and how these activities may contribute to the emergence of personal style as + a hallmark of skilful music-making.' + address: 'Blacksburg, Virginia, USA' + author: Paul Stapleton and Maarten van Walstijn and Sandor Mehes + bibtex: "@inproceedings{Stapleton2018,\n abstract = {In this paper we report preliminary\ + \ observations from an ongoing study into how musicians explore and adapt to the\ + \ parameter space of a virtual-acoustic string bridge plate instrument. These\ + \ observations inform (and are informed by) a wider approach to understanding\ + \ the development of skill and style in interactions between musicians and musical\ + \ instruments. We discuss a performance-driven ecosystemic approach to studying\ + \ musical relationships, drawing on arguments from the literature which emphasise\ + \ the need to go beyond simplistic notions of control and usability when assessing\ + \ exploratory and performatory musical interactions. Lastly, we focus on processes\ + \ of perceptual learning and co-tuning between musician and instrument, and how\ + \ these activities may contribute to the emergence of personal style as a hallmark\ + \ of skilful music-making.},\n address = {Blacksburg, Virginia, USA},\n author\ + \ = {Paul Stapleton and Maarten van Walstijn and Sandor Mehes},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1302593},\n editor = {Luke Dahl, Douglas Bowman, Thomas\ + \ Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n\ + \ pages = {311--314},\n publisher = {Virginia Tech},\n title = {Co-Tuning Virtual-Acoustic\ + \ Performance Ecosystems: observations on the development of skill and style in\ + \ the study of musician-instrument relationships},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0065.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178981 + doi: 10.5281/zenodo.1302593 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 549--552 - publisher: 'Goldsmiths, University of London' - title: Musical Composition by Regressional Mapping of Physiological Responses to - Acoustic Features - url: http://www.nime.org/proceedings/2014/nime2014_311.pdf - year: 2014 + pages: 311--314 + publisher: Virginia Tech + title: 'Co-Tuning Virtual-Acoustic Performance Ecosystems: observations on the development + of skill and style in the study of musician-instrument relationships' + url: http://www.nime.org/proceedings/2018/nime2018_paper0065.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: jlong2014 - abstract: 'This paper describes the Robotic Taishogoto, a new robotic musical instrument - for performance, musical installations, and educational purposes. The primary - goals of its creation is to provide an easy to use, cost effective, compact and - integrated acoustic instrument which is fully automated and controllable via standard - MIDI commands. This paper describes the technical details of its design and implementation - including the mechanics, electronics and firmware. It also outlines various control - methodologies and use cases for the instrument.' - address: 'London, United Kingdom' - author: Jason Long - bibtex: "@inproceedings{jlong2014,\n abstract = {This paper describes the Robotic\ - \ Taishogoto, a new robotic musical instrument for performance, musical installations,\ - \ and educational purposes. The primary goals of its creation is to provide an\ - \ easy to use, cost effective, compact and integrated acoustic instrument which\ - \ is fully automated and controllable via standard MIDI commands. This paper describes\ - \ the technical details of its design and implementation including the mechanics,\ - \ electronics and firmware. It also outlines various control methodologies and\ - \ use cases for the instrument.},\n address = {London, United Kingdom},\n author\ - \ = {Jason Long},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178853},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {479--482},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {The Robotic Taishogoto: A New Plug 'n Play\ - \ Desktop Performance Instrument},\n url = {http://www.nime.org/proceedings/2014/nime2014_313.pdf},\n\ - \ year = {2014}\n}\n" + ID: Fish2018 + abstract: 'The environment of zero gravity affords a unique medium for new modalities + of musical performance, both in the design of instruments, and human interactions + with said instruments. To explore this medium, we have created and flown Telemetron, + the first musical instrument specifically designed for and tested in the zero + gravity environment. The resultant instrument (leveraging gyroscopes and wireless + telemetry transmission) and recorded performance represent an initial exploration + of compositions that are unique to the physics and dynamics of outer space. We + describe the motivations for this instrument, and the unique constraints involved + in designing for this environment. This initial design suggests possibilities + for further experiments in musical instrument design for outer space.' + address: 'Blacksburg, Virginia, USA' + author: 'Fish II, Sands A. and Nicole L''Huillier' + bibtex: "@inproceedings{Fish2018,\n abstract = {The environment of zero gravity\ + \ affords a unique medium for new modalities of musical performance, both in the\ + \ design of instruments, and human interactions with said instruments. To explore\ + \ this medium, we have created and flown Telemetron, the first musical instrument\ + \ specifically designed for and tested in the zero gravity environment. The resultant\ + \ instrument (leveraging gyroscopes and wireless telemetry transmission) and recorded\ + \ performance represent an initial exploration of compositions that are unique\ + \ to the physics and dynamics of outer space. We describe the motivations for\ + \ this instrument, and the unique constraints involved in designing for this environment.\ + \ This initial design suggests possibilities for further experiments in musical\ + \ instrument design for outer space.},\n address = {Blacksburg, Virginia, USA},\n\ + \ author = {Fish II, Sands A. and Nicole L'Huillier},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1302595},\n editor = {Luke Dahl, Douglas Bowman, Thomas\ + \ Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n\ + \ pages = {315--317},\n publisher = {Virginia Tech},\n title = {Telemetron: A\ + \ Musical Instrument for Performance in Zero Gravity},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0066.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178853 + doi: 10.5281/zenodo.1302595 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 479--482 - publisher: 'Goldsmiths, University of London' - title: 'The Robotic Taishogoto: A New Plug ''n Play Desktop Performance Instrument' - url: http://www.nime.org/proceedings/2014/nime2014_313.pdf - year: 2014 + pages: 315--317 + publisher: Virginia Tech + title: 'Telemetron: A Musical Instrument for Performance in Zero Gravity' + url: http://www.nime.org/proceedings/2018/nime2018_paper0066.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: pmathews2014 - abstract: 'Networked musical performance using networks of computers for live performance - of electronic music has evolved over a number of decades but has tended to rely - upon customized and highly specialized software designed specifically for particular - artistic goals. This paper presents Tangle, a flexible software framework designed - to provide a basis for performance on any number of distinct instruments. The - network includes features to simplify the control of robotic instruments, such - as automated latency compensation and self-testing, while being simple to extend - in order to implement device-specific logic and failsafes. Tangle has been tested - on two diverse systems incorporating a number of unique and complex mechatronic - instruments.' - address: 'London, United Kingdom' - author: Paul Mathews and Ness Morris and Jim Murphy and Ajay Kapur and Dale Carnegie - bibtex: "@inproceedings{pmathews2014,\n abstract = {Networked musical performance\ - \ using networks of computers for live performance of electronic music has evolved\ - \ over a number of decades but has tended to rely upon customized and highly specialized\ - \ software designed specifically for particular artistic goals. This paper presents\ - \ Tangle, a flexible software framework designed to provide a basis for performance\ - \ on any number of distinct instruments. The network includes features to simplify\ - \ the control of robotic instruments, such as automated latency compensation and\ - \ self-testing, while being simple to extend in order to implement device-specific\ - \ logic and failsafes. Tangle has been tested on two diverse systems incorporating\ - \ a number of unique and complex mechatronic instruments.},\n address = {London,\ - \ United Kingdom},\n author = {Paul Mathews and Ness Morris and Jim Murphy and\ - \ Ajay Kapur and Dale Carnegie},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178867},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {187--190},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Tangle: a Flexible Framework for Performance\ - \ with Advanced Robotic Musical Instruments},\n url = {http://www.nime.org/proceedings/2014/nime2014_314.pdf},\n\ - \ year = {2014}\n}\n" + ID: Wilcox2018 + abstract: 'This paper covers the technical and aesthetic development of robotcowboy, + the author''s ongoing human-computer wearable performance project. Conceived as + an idiosyncratic manifesto on the embodiment of computational sound, the original + robotcowboy system was built in 2006-2007 using a belt-mounted industrial wearable + computer running GNU/Linux and Pure Data, external USB audio/MIDI interfaces, + HID gamepads, and guitar. Influenced by roadworthy analog gear, chief system requirements + were mobility, plug-and-play, reliability, and low cost. From 2007 to 2011, this + first iteration "Cabled Madness" melded rock music with realtime algorithmic composition + and revolved around cyborg human/system tension, aspects of improvisation, audience + feedback, and an inherent capability of failure. The second iteration "Onward + to Mars" explored storytelling from 2012-2015 through the one-way journey of the + first human on Mars with the computing system adapted into a self-contained spacesuit + backpack. Now 10 years on, a new robotcowboy 2.0 system powers a third iteration + with only an iPhone and PdParty, the author''s open-source iOS application which + runs Pure Data patches and provides full duplex stereo audio, MIDI, HID game controller + support, and Open Sound Control communication. The future is bright, do you have + room to wiggle?' + address: 'Blacksburg, Virginia, USA' + author: Dan Wilcox + bibtex: "@inproceedings{Wilcox2018,\n abstract = {This paper covers the technical\ + \ and aesthetic development of robotcowboy, the author's ongoing human-computer\ + \ wearable performance project. Conceived as an idiosyncratic manifesto on the\ + \ embodiment of computational sound, the original robotcowboy system was built\ + \ in 2006-2007 using a belt-mounted industrial wearable computer running GNU/Linux\ + \ and Pure Data, external USB audio/MIDI interfaces, HID gamepads, and guitar.\ + \ Influenced by roadworthy analog gear, chief system requirements were mobility,\ + \ plug-and-play, reliability, and low cost. From 2007 to 2011, this first iteration\ + \ \"Cabled Madness\" melded rock music with realtime algorithmic composition and\ + \ revolved around cyborg human/system tension, aspects of improvisation, audience\ + \ feedback, and an inherent capability of failure. The second iteration \"Onward\ + \ to Mars\" explored storytelling from 2012-2015 through the one-way journey of\ + \ the first human on Mars with the computing system adapted into a self-contained\ + \ spacesuit backpack. Now 10 years on, a new {robotcowboy 2.0} system powers a\ + \ third iteration with only an iPhone and PdParty, the author's open-source iOS\ + \ application which runs Pure Data patches and provides full duplex stereo audio,\ + \ MIDI, HID game controller support, and Open Sound Control communication. The\ + \ future is bright, do you have room to wiggle?},\n address = {Blacksburg, Virginia,\ + \ USA},\n author = {Dan Wilcox},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302597},\n\ + \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {318--323},\n publisher = {Virginia\ + \ Tech},\n title = {robotcowboy: 10 Years of Wearable Computer Rock},\n url =\ + \ {http://www.nime.org/proceedings/2018/nime2018_paper0067.pdf},\n year = {2018}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178867 + doi: 10.5281/zenodo.1302597 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 187--190 - publisher: 'Goldsmiths, University of London' - title: 'Tangle: a Flexible Framework for Performance with Advanced Robotic Musical - Instruments' - url: http://www.nime.org/proceedings/2014/nime2014_314.pdf - year: 2014 + pages: 318--323 + publisher: Virginia Tech + title: 'robotcowboy: 10 Years of Wearable Computer Rock' + url: http://www.nime.org/proceedings/2018/nime2018_paper0067.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: ofried2014 - abstract: 'The modern musician enjoys access to a staggering number of audio samples. - Composition software can ship with many gigabytes of data, and there are many - more to be found online. However, conventional methods for navigating these libraries - are still quite rudimentary, and often involve scrolling through alphabetical - lists. We present a system for sample exploration that allows audio clips to be - sorted according to user taste, and arranged in any desired 2D formation such - that similar samples are located near each other. Our method relies on two advances - in machine learning. First, metric learning allows the user to shape the audio - feature space to match their own preferences. Second, kernelized sorting finds - an optimal arrangement for the samples in 2D. We demonstrate our system with two - new interfaces for exploring audio samples, and evaluate the technology qualitatively - and quantitatively via a pair of user studies.' - address: 'London, United Kingdom' - author: Ohad Fried and Zeyu Jin and Reid Oda and Adam Finkelstein - bibtex: "@inproceedings{ofried2014,\n abstract = {The modern musician enjoys access\ - \ to a staggering number of audio samples. Composition software can ship with\ - \ many gigabytes of data, and there are many more to be found online. However,\ - \ conventional methods for navigating these libraries are still quite rudimentary,\ - \ and often involve scrolling through alphabetical lists. We present a system\ - \ for sample exploration that allows audio clips to be sorted according to user\ - \ taste, and arranged in any desired 2D formation such that similar samples are\ - \ located near each other. Our method relies on two advances in machine learning.\ - \ First, metric learning allows the user to shape the audio feature space to match\ - \ their own preferences. Second, kernelized sorting finds an optimal arrangement\ - \ for the samples in 2D. We demonstrate our system with two new interfaces for\ - \ exploring audio samples, and evaluate the technology qualitatively and quantitatively\ - \ via a pair of user studies.},\n address = {London, United Kingdom},\n author\ - \ = {Ohad Fried and Zeyu Jin and Reid Oda and Adam Finkelstein},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178766},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {281--286},\n publisher = {Goldsmiths, University of London},\n title = {AudioQuilt:\ - \ {2D} Arrangements of Audio Samples using Metric Learning and Kernelized Sorting},\n\ - \ url = {http://www.nime.org/proceedings/2014/nime2014_315.pdf},\n year = {2014}\n\ - }\n" + ID: Gonzalez2018 + abstract: 'This article describes the design and construction of a collection of + digitally-controlled augmented acoustic guitars, and the use of these guitars + in the installation Sverm-Resonans. The installation was built around the idea + of exploring `inverse'' sonic microinteraction, that is, controlling sounds by + the micromotion observed when attempting to stand still. It consisted of six acoustic + guitars, each equipped with a Bela embedded computer for sound processing (in + Pure Data), an infrared distance sensor to detect the presence of users, and an + actuator attached to the guitar body to produce sound. With an attached battery + pack, the result was a set of completely autonomous instruments that were easy + to hang in a gallery space. The installation encouraged explorations on the boundary + between the tactile and the kinesthetic, the body and the mind, and between motion + and sound. The use of guitars, albeit with an untraditional `performance'' technique, + made the experience both familiar and unfamiliar at the same time. Many users + reported heightened sensations of stillness, sound, and vibration, and that the + `inverse'' control of the instrument was both challenging and pleasant.' + address: 'Blacksburg, Virginia, USA' + author: 'Gonzalez Sanchez, Victor Evaristo and Martin, Charles Patrick and Agata + Zelechowska and Bjerkestrand, Kari Anne Vadstensvik and Victoria Johnson and + Jensenius, Alexander Refsum ' + bibtex: "@inproceedings{Gonzalez2018,\n abstract = {This article describes the design\ + \ and construction of a collection of digitally-controlled augmented acoustic\ + \ guitars, and the use of these guitars in the installation Sverm-Resonans. The\ + \ installation was built around the idea of exploring `inverse' sonic microinteraction,\ + \ that is, controlling sounds by the micromotion observed when attempting to stand\ + \ still. It consisted of six acoustic guitars, each equipped with a Bela embedded\ + \ computer for sound processing (in Pure Data), an infrared distance sensor to\ + \ detect the presence of users, and an actuator attached to the guitar body to\ + \ produce sound. With an attached battery pack, the result was a set of completely\ + \ autonomous instruments that were easy to hang in a gallery space. The installation\ + \ encouraged explorations on the boundary between the tactile and the kinesthetic,\ + \ the body and the mind, and between motion and sound. The use of guitars, albeit\ + \ with an untraditional `performance' technique, made the experience both familiar\ + \ and unfamiliar at the same time. Many users reported heightened sensations of\ + \ stillness, sound, and vibration, and that the `inverse' control of the instrument\ + \ was both challenging and pleasant.},\n address = {Blacksburg, Virginia, USA},\n\ + \ author = {Gonzalez Sanchez, Victor Evaristo and Martin, Charles Patrick and\ + \ Agata Zelechowska and Bjerkestrand, Kari Anne Vadstensvik and Victoria Johnson\ + \ and Jensenius, Alexander Refsum },\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302599},\n\ + \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {324--327},\n publisher = {Virginia\ + \ Tech},\n title = {Bela-Based Augmented Acoustic Guitars for Sonic Microinteraction},\n\ + \ url = {http://www.nime.org/proceedings/2018/nime2018_paper0068.pdf},\n year\ + \ = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178766 + doi: 10.5281/zenodo.1302599 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 281--286 - publisher: 'Goldsmiths, University of London' - title: 'AudioQuilt: 2D Arrangements of Audio Samples using Metric Learning and Kernelized - Sorting' - url: http://www.nime.org/proceedings/2014/nime2014_315.pdf - year: 2014 + pages: 324--327 + publisher: Virginia Tech + title: Bela-Based Augmented Acoustic Guitars for Sonic Microinteraction + url: http://www.nime.org/proceedings/2018/nime2018_paper0068.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: dgabanaarellano2014 - abstract: 'This paper presents a new circular tangible interface where one or multiple - users can collaborate and interact in real time by placing and moving passive - wooden pucks on a transparent tabletop in order to create music. The design encourages - physical intuition and visual feedback on the music being created. An arm with - six optical sensors rotates beneath a transparent surface, triggering sounds based - on the objects placed above. The interface''s simplicity and tangibility make - it easy to learn and suitable for a broad range of users.' - address: 'London, United Kingdom' - author: Daniel Gábana Arellano and Andrew McPherson - bibtex: "@inproceedings{dgabanaarellano2014,\n abstract = {This paper presents a\ - \ new circular tangible interface where one or multiple users can collaborate\ - \ and interact in real time by placing and moving passive wooden pucks on a transparent\ - \ tabletop in order to create music. The design encourages physical intuition\ - \ and visual feedback on the music being created. An arm with six optical sensors\ - \ rotates beneath a transparent surface, triggering sounds based on the objects\ - \ placed above. The interface's simplicity and tangibility make it easy to learn\ - \ and suitable for a broad range of users.},\n address = {London, United Kingdom},\n\ - \ author = {Daniel G\\'abana Arellano and Andrew McPherson},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178704},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {84--85},\n publisher = {Goldsmiths, University of London},\n title = {Radear:\ - \ A Tangible Spinning Music Sequencer},\n url = {http://www.nime.org/proceedings/2014/nime2014_324.pdf},\n\ - \ year = {2014}\n}\n" + ID: Lepri2018 + abstract: "Obsolete and old technologies are often used in interactive art and music\ + \ performance. DIY practices such as hardware hacking and circuit bending provide\n\ + effective methods to the integration of old machines into new artistic inventions.\ + \ This paper presents the Cembalo Scrivano .1, an interactive audio-visual installation\ + \ based on an augmented typewriter. Borrowing concepts from media archaeology\ + \ studies, tangible interaction design and digital lutherie, we discuss how investigations\ + \ into the historical and cultural evolution of a technology can suggest directions\ + \ for the regeneration of obsolete objects. The design approach outlined focuses\ + \ on the remediation of an old device and aims to evoke cultural and physical\ + \ properties associated to the source object." + address: 'Blacksburg, Virginia, USA' + author: Giacomo Lepri and Andrew P. McPherson + bibtex: "@inproceedings{Lepri2018,\n abstract = {Obsolete and old technologies are\ + \ often used in interactive art and music performance. DIY practices such as hardware\ + \ hacking and circuit bending provide\neffective methods to the integration of\ + \ old machines into new artistic inventions. This paper presents the Cembalo Scrivano\ + \ .1, an interactive audio-visual installation based on an augmented typewriter.\ + \ Borrowing concepts from media archaeology studies, tangible interaction design\ + \ and digital lutherie, we discuss how investigations into the historical and\ + \ cultural evolution of a technology can suggest directions for the regeneration\ + \ of obsolete objects. The design approach outlined focuses on the remediation\ + \ of an old device and aims to evoke cultural and physical properties associated\ + \ to the source object.},\n address = {Blacksburg, Virginia, USA},\n author =\ + \ {Giacomo Lepri and Andrew P. McPherson},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302601},\n\ + \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {328--333},\n publisher = {Virginia\ + \ Tech},\n title = {Mirroring the past, from typewriting to interactive art: an\ + \ approach to the re-design of a vintage technology},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0069.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178704 + doi: 10.5281/zenodo.1302601 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 84--85 - publisher: 'Goldsmiths, University of London' - title: 'Radear: A Tangible Spinning Music Sequencer' - url: http://www.nime.org/proceedings/2014/nime2014_324.pdf - year: 2014 + pages: 328--333 + publisher: Virginia Tech + title: 'Mirroring the past, from typewriting to interactive art: an approach to + the re-design of a vintage technology' + url: http://www.nime.org/proceedings/2018/nime2018_paper0069.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: aberndt2014 - abstract: 'We present the digital musical instrument TouchNoise that is based on - multitouch interaction with a particle system. It implements a novel interface - concept for modulating noise spectra. Each particle represents a sine oscillator - that moves through the two-dimensional frequency and stereo panning domain via - Brownian motion. Its behavior can be affected by multitouch gestures allowing - the shaping of the resulting sound in many different ways. Particles can be dragged, - attracted, repelled, accentuated, and their autonomous behavior can be manipulated. - In this paper we introduce the concepts behind this instrument, describe its implementation - and discuss the sonic design space emerging from it.' - address: 'London, United Kingdom' - author: Axel Berndt and Nadia Al-Kassab and Raimund Dachselt - bibtex: "@inproceedings{aberndt2014,\n abstract = {We present the digital musical\ - \ instrument TouchNoise that is based on multitouch interaction with a particle\ - \ system. It implements a novel interface concept for modulating noise spectra.\ - \ Each particle represents a sine oscillator that moves through the two-dimensional\ - \ frequency and stereo panning domain via Brownian motion. Its behavior can be\ - \ affected by multitouch gestures allowing the shaping of the resulting sound\ - \ in many different ways. Particles can be dragged, attracted, repelled, accentuated,\ - \ and their autonomous behavior can be manipulated. In this paper we introduce\ - \ the concepts behind this instrument, describe its implementation and discuss\ - \ the sonic design space emerging from it.},\n address = {London, United Kingdom},\n\ - \ author = {Axel Berndt and Nadia Al-Kassab and Raimund Dachselt},\n booktitle\ + ID: Thorn2018 + abstract: 'This paper describes a performer-centric approach to the design, sensor + selection, data interpretation, and mapping schema of a sensor-embedded glove + called the “alto.glove” that the author uses to extend his performance abilities + on violin. The alto.glove is a response to the limitations—both creative and technical—perceived + in feature extraction processes that rely on classification. The hardware answers + one problem: how to extend violin playing in a minimal yet powerful way; the software + answers another: how to create a rich, evolving response that enhances expression + in improvisation. The author approaches this problem from the various roles of + violinist, hardware technician, programmer, sound designer, composer, and improviser. + Importantly, the alto.glove is designed to be cost-effective and relatively easy + to build.' + address: 'Blacksburg, Virginia, USA' + author: Seth Dominicus Thorn + bibtex: "@inproceedings{Thorn2018,\n abstract = {This paper describes a performer-centric\ + \ approach to the design, sensor selection, data interpretation, and mapping schema\ + \ of a sensor-embedded glove called the “alto.glove” that the author uses to extend\ + \ his performance abilities on violin. The alto.glove is a response to the limitations—both\ + \ creative and technical—perceived in feature extraction processes that rely on\ + \ classification. The hardware answers one problem: how to extend violin playing\ + \ in a minimal yet powerful way; the software answers another: how to create a\ + \ rich, evolving response that enhances expression in improvisation. The author\ + \ approaches this problem from the various roles of violinist, hardware technician,\ + \ programmer, sound designer, composer, and improviser. Importantly, the alto.glove\ + \ is designed to be cost-effective and relatively easy to build.},\n address =\ + \ {Blacksburg, Virginia, USA},\n author = {Seth Dominicus Thorn},\n booktitle\ \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1178714},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {323--326},\n publisher = {Goldsmiths, University of London},\n\ - \ title = {TouchNoise: A Particle-based Multitouch Noise Modulation Interface},\n\ - \ url = {http://www.nime.org/proceedings/2014/nime2014_325.pdf},\n year = {2014}\n\ - }\n" + \ Expression},\n doi = {10.5281/zenodo.1302603},\n editor = {Luke Dahl, Douglas\ + \ Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {334--339},\n publisher = {Virginia Tech},\n title\ + \ = {Alto.Glove: New Techniques for Augmented Violin},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0070.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178714 + doi: 10.5281/zenodo.1302603 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 323--326 - publisher: 'Goldsmiths, University of London' - title: 'TouchNoise: A Particle-based Multitouch Noise Modulation Interface' - url: http://www.nime.org/proceedings/2014/nime2014_325.pdf - year: 2014 + pages: 334--339 + publisher: Virginia Tech + title: 'Alto.Glove: New Techniques for Augmented Violin' + url: http://www.nime.org/proceedings/2018/nime2018_paper0070.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: ynakanishi2014 - abstract: 'In this paper, the authors introduce a stand-alone synthesizer, ``B.O.M.B. - -Beat Of Magic Box --'''' for electronic music sessions and live performances. - ``B.O.M.B.'''' has a wireless communication system that synchronizes musical scale - and tempo (BPM) between multiple devices. In addition, participants can change - master/slave role between performers immediately. Our primary motivation is to - provide musicians and nonmusicians with opportunities to experience a collaborative - electronic music performance. Here, the hardware and interaction design of the - device is presented. To date, numerous collaborative musical instruments have - been developed in electronic music field [1][2][3]. The authors are interested - in formations of musical sessions using stand-alone devices and leader/follower - relationship in musical sessions. The authors specify three important requirements - of instrument design for musical session. They are as follows: (1) Simple Interface: - Interface that enables performers to control three sound elements (pitch, timbre, - and amplitude) with simple interaction. (2) Portable Stand-alone System: System - that runs standalone (with sound generators, speakers, and butteries). Because - musical sessions can be improvised at any place and time, the authors consider - that portability is essential in designing musical instruments for sessions. (3) - Wireless Synchronization: System that supports ensembles by automatically synchronizing - tempo (BPM) and tonality between multiple devices by air because of portability. - In addition, performers can switch master/slave roles smoothly such as leader/follower - relationship during a musical session. The authors gave ten live performances - using this device at domestic and international events. In these events, the authors - confirmed that our proposed wireless synchronization system worked stable. It - is suggested that our system demonstrate the practicality of wireless synchronization. - In future, the authors will evaluate the device in terms of its stability in multi-performer - musical sessions.' - address: 'London, United Kingdom' - author: Yoshihito Nakanishi and Seiichiro Matsumura and Chuichi Arakawa - bibtex: "@inproceedings{ynakanishi2014,\n abstract = {In this paper, the authors\ - \ introduce a stand-alone synthesizer, ``B.O.M.B. -Beat Of Magic Box --'' for\ - \ electronic music sessions and live performances. ``B.O.M.B.'' has a wireless\ - \ communication system that synchronizes musical scale and tempo (BPM) between\ - \ multiple devices. In addition, participants can change master/slave role between\ - \ performers immediately. Our primary motivation is to provide musicians and nonmusicians\ - \ with opportunities to experience a collaborative electronic music performance.\ - \ Here, the hardware and interaction design of the device is presented. To date,\ - \ numerous collaborative musical instruments have been developed in electronic\ - \ music field [1][2][3]. The authors are interested in formations of musical sessions\ - \ using stand-alone devices and leader/follower relationship in musical sessions.\ - \ The authors specify three important requirements of instrument design for musical\ - \ session. They are as follows: (1) Simple Interface: Interface that enables performers\ - \ to control three sound elements (pitch, timbre, and amplitude) with simple interaction.\ - \ (2) Portable Stand-alone System: System that runs standalone (with sound generators,\ - \ speakers, and butteries). Because musical sessions can be improvised at any\ - \ place and time, the authors consider that portability is essential in designing\ - \ musical instruments for sessions. (3) Wireless Synchronization: System that\ - \ supports ensembles by automatically synchronizing tempo (BPM) and tonality between\ - \ multiple devices by air because of portability. In addition, performers can\ - \ switch master/slave roles smoothly such as leader/follower relationship during\ - \ a musical session. The authors gave ten live performances using this device\ - \ at domestic and international events. In these events, the authors confirmed\ - \ that our proposed wireless synchronization system worked stable. It is suggested\ - \ that our system demonstrate the practicality of wireless synchronization. In\ - \ future, the authors will evaluate the device in terms of its stability in multi-performer\ - \ musical sessions.},\n address = {London, United Kingdom},\n author = {Yoshihito\ - \ Nakanishi and Seiichiro Matsumura and Chuichi Arakawa},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178889},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {80--81},\n publisher = {Goldsmiths, University of London},\n title = {B.O.M.B.\ - \ -Beat Of Magic Box -: Stand-Alone Synthesizer Using Wireless Synchronization\ - \ System For Musical Session and Performance},\n url = {http://www.nime.org/proceedings/2014/nime2014_327.pdf},\n\ - \ year = {2014}\n}\n" + ID: Liontiris2018 + abstract: 'This paper illustrates the development of a Feedback Resonating Double + Bass. The instrument is essentially the augmentation of an acoustic double bass + using positive feedback. The research aimed to reply the question of how to augment + and convert a double bass into a feedback resonating one without following an + invasive method. The conversion process illustrated here is applicable and adaptable + to double basses of any size, without making irreversible alterations to the instruments. ' + address: 'Blacksburg, Virginia, USA' + author: 'Liontiris, Thanos Polymeneas' + bibtex: "@inproceedings{Liontiris2018,\n abstract = {This paper illustrates the\ + \ development of a Feedback Resonating Double Bass. The instrument is essentially\ + \ the augmentation of an acoustic double bass using positive feedback. The research\ + \ aimed to reply the question of how to augment and convert a double bass into\ + \ a feedback resonating one without following an invasive method. The conversion\ + \ process illustrated here is applicable and adaptable to double basses of any\ + \ size, without making irreversible alterations to the instruments. },\n address\ + \ = {Blacksburg, Virginia, USA},\n author = {Liontiris, Thanos Polymeneas},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1302605},\n editor = {Luke Dahl,\ + \ Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {340--341},\n publisher = {Virginia Tech},\n title\ + \ = {Low Frequency Feedback Drones: A non-invasive augmentation of the double\ + \ bass},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0071.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178889 + doi: 10.5281/zenodo.1302605 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 80--81 - publisher: 'Goldsmiths, University of London' - title: 'B.O.M.B. -Beat Of Magic Box -: Stand-Alone Synthesizer Using Wireless Synchronization - System For Musical Session and Performance' - url: http://www.nime.org/proceedings/2014/nime2014_327.pdf - year: 2014 + pages: 340--341 + publisher: Virginia Tech + title: 'Low Frequency Feedback Drones: A non-invasive augmentation of the double + bass' + url: http://www.nime.org/proceedings/2018/nime2018_paper0071.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: gwakefield2014 - abstract: 'We discuss live coding audio-visual worlds for large-scale virtual reality - environments. We describe Alive, an instrument allowing multiple users to develop - sonic and visual behaviors of agents in a virtual world, through a browserbased - collaborative code interface, accessible while being immersed through spatialized - audio and stereoscopic display. The interface adds terse syntax for query-based - precise or stochastic selections and declarative agent manipulations, lazily-evaluated - expressions for synthesis and behavior, event handling, and flexible scheduling.' - address: 'London, United Kingdom' - author: Graham Wakefield and Charlie Roberts and Matthew Wright and Timothy Wood - and Karl Yerkes - bibtex: "@inproceedings{gwakefield2014,\n abstract = {We discuss live coding audio-visual\ - \ worlds for large-scale virtual reality environments. We describe Alive, an instrument\ - \ allowing multiple users to develop sonic and visual behaviors of agents in a\ - \ virtual world, through a browserbased collaborative code interface, accessible\ - \ while being immersed through spatialized audio and stereoscopic display. The\ - \ interface adds terse syntax for query-based precise or stochastic selections\ - \ and declarative agent manipulations, lazily-evaluated expressions for synthesis\ - \ and behavior, event handling, and flexible scheduling.},\n address = {London,\ - \ United Kingdom},\n author = {Graham Wakefield and Charlie Roberts and Matthew\ - \ Wright and Timothy Wood and Karl Yerkes},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1178975},\n issn = {2220-4806},\n month = {June},\n pages =\ - \ {505--508},\n publisher = {Goldsmiths, University of London},\n title = {Collaborative\ - \ Live-Coding with an Immersive Instrument},\n url = {http://www.nime.org/proceedings/2014/nime2014_328.pdf},\n\ - \ year = {2014}\n}\n" + ID: Formo2018 + abstract: 'The Orchestra of Speech is a performance concept resulting from a recent + artistic research project exploring the relationship between music and speech, + in particular improvised music and everyday conversation. As a tool in this exploration, + a digital musical instrument system has been developed for “orchestrating” musical + features of speech into music, in real time. Through artistic practice, this system + has evolved into a personal electroacoustic performance concept.' + address: 'Blacksburg, Virginia, USA' + author: Daniel Formo + bibtex: "@inproceedings{Formo2018,\n abstract = {The Orchestra of Speech is a performance\ + \ concept resulting from a recent artistic research project exploring the relationship\ + \ between music and speech, in particular improvised music and everyday conversation.\ + \ As a tool in this exploration, a digital musical instrument system has been\ + \ developed for “orchestrating” musical features of speech into music, in real\ + \ time. Through artistic practice, this system has evolved into a personal electroacoustic\ + \ performance concept.},\n address = {Blacksburg, Virginia, USA},\n author = {Daniel\ + \ Formo},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1302607},\n editor = {Luke\ + \ Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn =\ + \ {2220-4806},\n month = {June},\n pages = {342--343},\n publisher = {Virginia\ + \ Tech},\n title = {The Orchestra of Speech: a speech-based instrument system},\n\ + \ url = {http://www.nime.org/proceedings/2018/nime2018_paper0072.pdf},\n year\ + \ = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178975 + doi: 10.5281/zenodo.1302607 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 505--508 - publisher: 'Goldsmiths, University of London' - title: Collaborative Live-Coding with an Immersive Instrument - url: http://www.nime.org/proceedings/2014/nime2014_328.pdf - year: 2014 + pages: 342--343 + publisher: Virginia Tech + title: 'The Orchestra of Speech: a speech-based instrument system' + url: http://www.nime.org/proceedings/2018/nime2018_paper0072.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: ssuh2014 - abstract: 'For the electric guitar, which takes a large proportion in modern pop - music, effects unit (or effector) is no longer optional. Many guitarists already - `play'' their effects with their instrument. However, it is not easy to control - these effects during the play, so lots of new controllers and interfaces have - been devised; one example is a pedal type effects that helps players to control - effects with a foot while their hands are busy. Some players put a controller - on their guitars. However, our instruments are so precious to drill a hole, and - the stage is too big for the player who is just kneeling behind the pedals and - turning the knobs. In this paper, we designed a new control system for electric - guitar and bass. This paper is about a gesture-based sound control system that - controls the electric guitar effects (like delay time, reverberation or pitch) - with the player''s hand gesture. This system utilizes TAPIR signal to trace player''s - hand motion. TAPIR signal is an acoustic signal that can rarely be received by - most people, because its frequency exists between 18 kHz to 22 kHz [TAPIR article]. - This system consists of a signal generator, an electric guitar and a sound processor. - From the generator that is attached on the player''s hand, the TAPIR signal transfers - to the magnetic pickup equipped on the electric guitar. Player''s gesture is captured - as a Doppler shift and the processor calculates the value as the sound effect - parameter. In this paper, we focused on the demonstration of the signal transfer - on aforementioned system.' - address: 'London, United Kingdom' - author: Sangwon Suh and Jeong-seob Lee and Woon Seung Yeo - bibtex: "@inproceedings{ssuh2014,\n abstract = {For the electric guitar, which takes\ - \ a large proportion in modern pop music, effects unit (or effector) is no longer\ - \ optional. Many guitarists already `play' their effects with their instrument.\ - \ However, it is not easy to control these effects during the play, so lots of\ - \ new controllers and interfaces have been devised; one example is a pedal type\ - \ effects that helps players to control effects with a foot while their hands\ - \ are busy. Some players put a controller on their guitars. However, our instruments\ - \ are so precious to drill a hole, and the stage is too big for the player who\ - \ is just kneeling behind the pedals and turning the knobs. In this paper, we\ - \ designed a new control system for electric guitar and bass. This paper is about\ - \ a gesture-based sound control system that controls the electric guitar effects\ - \ (like delay time, reverberation or pitch) with the player's hand gesture. This\ - \ system utilizes TAPIR signal to trace player's hand motion. TAPIR signal is\ - \ an acoustic signal that can rarely be received by most people, because its frequency\ - \ exists between 18 kHz to 22 kHz [TAPIR article]. This system consists of a signal\ - \ generator, an electric guitar and a sound processor. From the generator that\ - \ is attached on the player's hand, the TAPIR signal transfers to the magnetic\ - \ pickup equipped on the electric guitar. Player's gesture is captured as a Doppler\ - \ shift and the processor calculates the value as the sound effect parameter.\ - \ In this paper, we focused on the demonstration of the signal transfer on aforementioned\ - \ system.},\n address = {London, United Kingdom},\n author = {Sangwon Suh and\ - \ Jeong-seob Lee and Woon Seung Yeo},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178949},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {90--93},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {A Gesture Detection with Guitar Pickup and\ - \ Earphones},\n url = {http://www.nime.org/proceedings/2014/nime2014_333.pdf},\n\ - \ year = {2014}\n}\n" + ID: Weisling2018 + abstract: 'This paper presents a brief overview of an online survey conducted with + the objective of gaining insight into compositional and performance practices + of contemporary audiovisual practitioners. The survey gathered information regarding + how practitioners relate aural and visual media in their work, and how compositional + and performance practices involving multiple modalities might differ from other + practices. Discussed here are three themes: compositional approaches, transparency + and audience knowledge, and error and risk, which emerged from participants'' + responses. We believe these themes contribute to a discussion within the NIME + community regarding unique challenges and objectives presented when working with + multiple media.' + address: 'Blacksburg, Virginia, USA' + author: Anna Weisling and Anna Xambó and ireti olowe and Mathieu Barthet + bibtex: "@inproceedings{Weisling2018,\n abstract = {This paper presents a brief\ + \ overview of an online survey conducted with the objective of gaining insight\ + \ into compositional and performance practices of contemporary audiovisual practitioners.\ + \ The survey gathered information regarding how practitioners relate aural and\ + \ visual media in their work, and how compositional and performance practices\ + \ involving multiple modalities might differ from other practices. Discussed here\ + \ are three themes: compositional approaches, transparency and audience knowledge,\ + \ and error and risk, which emerged from participants' responses. We believe these\ + \ themes contribute to a discussion within the NIME community regarding unique\ + \ challenges and objectives presented when working with multiple media.},\n address\ + \ = {Blacksburg, Virginia, USA},\n author = {Anna Weisling and Anna Xambó and\ + \ ireti olowe and Mathieu Barthet},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302609},\n\ + \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {344--345},\n publisher = {Virginia\ + \ Tech},\n title = {Surveying the Compositional and Performance Practices of Audiovisual\ + \ Practitioners},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0073.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178949 + doi: 10.5281/zenodo.1302609 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 90--93 - publisher: 'Goldsmiths, University of London' - title: A Gesture Detection with Guitar Pickup and Earphones - url: http://www.nime.org/proceedings/2014/nime2014_333.pdf - year: 2014 + pages: 344--345 + publisher: Virginia Tech + title: Surveying the Compositional and Performance Practices of Audiovisual Practitioners + url: http://www.nime.org/proceedings/2018/nime2018_paper0073.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: fberthaut2014 - abstract: 'This paper presents a collaborative digital musical instrument that uses - the ephemeral and physical properties of soap bubbles to explore the complexity - layers and oscillating parameters of electronic (bass) music. This instrument, - called Wubbles, aims at encouraging both individual and collaborative musical - manipulations.' - address: 'London, United Kingdom' - author: Florent Berthaut and Jarrod Knibbe - bibtex: "@inproceedings{fberthaut2014,\n abstract = {This paper presents a collaborative\ - \ digital musical instrument that uses the ephemeral and physical properties of\ - \ soap bubbles to explore the complexity layers and oscillating parameters of\ - \ electronic (bass) music. This instrument, called Wubbles, aims at encouraging\ - \ both individual and collaborative musical manipulations.},\n address = {London,\ - \ United Kingdom},\n author = {Florent Berthaut and Jarrod Knibbe},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1178716},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {499--500},\n publisher = {Goldsmiths, University of London},\n\ - \ title = {Wubbles: A Collaborative Ephemeral Musical Instrument},\n url = {http://www.nime.org/proceedings/2014/nime2014_334.pdf},\n\ - \ year = {2014}\n}\n" + ID: Marasco2018 + abstract: 'The author presents Sound Opinions, a custom software tool that uses + sentiment analysis to create sound art installations and music compositions. The + software runs inside the NodeRed.js programming environment. It scrapes text from + web pages, pre-processes it, performs sentiment analysis via a remote API, and + parses the resulting data for use in external digital audio programs. The sentiment + analysis itself is handled by IBM''s Watson Tone Analyzer. The author has used + this tool to create an interactive multimedia installation, titled Critique. Sources + of criticism of a chosen musical work are analyzed and the negative or positive + statements about that composition work to warp and change it. This allows the + audience to only hear the work through the lens of its critics, and not in the + original form that its creator intended.' + address: 'Blacksburg, Virginia, USA' + author: Anthony T. Marasco + bibtex: "@inproceedings{Marasco2018,\n abstract = {The author presents Sound Opinions,\ + \ a custom software tool that uses sentiment analysis to create sound art installations\ + \ and music compositions. The software runs inside the NodeRed.js programming\ + \ environment. It scrapes text from web pages, pre-processes it, performs sentiment\ + \ analysis via a remote API, and parses the resulting data for use in external\ + \ digital audio programs. The sentiment analysis itself is handled by IBM's Watson\ + \ Tone Analyzer. The author has used this tool to create an interactive multimedia\ + \ installation, titled Critique. Sources of criticism of a chosen musical work\ + \ are analyzed and the negative or positive statements about that composition\ + \ work to warp and change it. This allows the audience to only hear the work\ + \ through the lens of its critics, and not in the original form that its creator\ + \ intended.},\n address = {Blacksburg, Virginia, USA},\n author = {Anthony T.\ + \ Marasco},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302611},\n editor\ + \ = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {346--347},\n publisher = {Virginia\ + \ Tech},\n title = {Sound Opinions: Creating a Virtual Tool for Sound Art Installations\ + \ through Sentiment Analysis of Critical Reviews},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0074.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178716 + doi: 10.5281/zenodo.1302611 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 499--500 - publisher: 'Goldsmiths, University of London' - title: 'Wubbles: A Collaborative Ephemeral Musical Instrument' - url: http://www.nime.org/proceedings/2014/nime2014_334.pdf - year: 2014 + pages: 346--347 + publisher: Virginia Tech + title: 'Sound Opinions: Creating a Virtual Tool for Sound Art Installations through + Sentiment Analysis of Critical Reviews' + url: http://www.nime.org/proceedings/2018/nime2018_paper0074.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: lpardue2014 - abstract: 'This paper presents a multi-modal approach to musical instrument pitch - tracking combining audio and position sensor data. Finger location on a violin - fingerboard is measured using resistive sensors, allowing rapid detection of approximate - pitch. The initial pitch estimate is then used to restrict the search space of - an audio pitch tracking algorithm. Most audio-only pitch tracking algorithms face - a fundamental tradeoff between accuracy and latency, with longer analysis windows - producing better pitch estimates at the cost of noticeable lag in a live performance - environment. Conversely, sensor-only strategies struggle to achieve the fine pitch - accuracy a human listener would expect. By combining the two approaches, high - accuracy and low latency can be simultaneously achieved.' - address: 'London, United Kingdom' - author: Laurel Pardue and Dongjuan Nian and Christopher Harte and Andrew McPherson - bibtex: "@inproceedings{lpardue2014,\n abstract = {This paper presents a multi-modal\ - \ approach to musical instrument pitch tracking combining audio and position sensor\ - \ data. Finger location on a violin fingerboard is measured using resistive sensors,\ - \ allowing rapid detection of approximate pitch. The initial pitch estimate is\ - \ then used to restrict the search space of an audio pitch tracking algorithm.\ - \ Most audio-only pitch tracking algorithms face a fundamental tradeoff between\ - \ accuracy and latency, with longer analysis windows producing better pitch estimates\ - \ at the cost of noticeable lag in a live performance environment. Conversely,\ - \ sensor-only strategies struggle to achieve the fine pitch accuracy a human listener\ - \ would expect. By combining the two approaches, high accuracy and low latency\ - \ can be simultaneously achieved.},\n address = {London, United Kingdom},\n author\ - \ = {Laurel Pardue and Dongjuan Nian and Christopher Harte and Andrew McPherson},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178899},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {54--59},\n publisher = {Goldsmiths, University of\ - \ London},\n title = {Low-Latency Audio Pitch Tracking: A Multi-Modal Sensor-Assisted\ - \ Approach},\n url = {http://www.nime.org/proceedings/2014/nime2014_336.pdf},\n\ - \ year = {2014}\n}\n" + ID: Kritsis2018 + abstract: 'We present our work in progress on the development of a web-based system + for music performance with virtual instruments in a virtual 3D environment, which + provides three means of interaction (i.e physical, gestural and mixed), using + tracking data from a Leap Motion sensor. Moreover, our system is integrated as + a creative tool within the context of a STEAM education platform that promotes + science learning through musical activities. The presented system models string + and percussion instruments, with realistic sonic feedback based on Modalys, a + physical model-based sound synthesis engine. Our proposal meets the performance + requirements of real-time interactive systems and is implemented strictly with + web technologies.' + address: 'Blacksburg, Virginia, USA' + author: Kosmas Kritsis and Aggelos Gkiokas and Carlos Árpád Acosta and Quentin Lamerand + and Robert Piéchaud and Maximos Kaliakatsos-Papakostas and Vassilis Katsouros + bibtex: "@inproceedings{Kritsis2018,\n abstract = {We present our work in progress\ + \ on the development of a web-based system for music performance with virtual\ + \ instruments in a virtual 3D environment, which provides three means of interaction\ + \ (i.e physical, gestural and mixed), using tracking data from a Leap Motion sensor.\ + \ Moreover, our system is integrated as a creative tool within the context of\ + \ a STEAM education platform that promotes science learning through musical activities.\ + \ The presented system models string and percussion instruments, with realistic\ + \ sonic feedback based on Modalys, a physical model-based sound synthesis engine.\ + \ Our proposal meets the performance requirements of real-time interactive systems\ + \ and is implemented strictly with web technologies.},\n address = {Blacksburg,\ + \ Virginia, USA},\n author = {Kosmas Kritsis and Aggelos Gkiokas and Carlos Árpád\ + \ Acosta and Quentin Lamerand and Robert Piéchaud and Maximos Kaliakatsos-Papakostas\ + \ and Vassilis Katsouros},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302613},\n\ + \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {348--349},\n publisher = {Virginia\ + \ Tech},\n title = {A web-based 3D environment for gestural interaction with virtual\ + \ music instruments as a STEAM education tool},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0075.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178899 + doi: 10.5281/zenodo.1302613 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 54--59 - publisher: 'Goldsmiths, University of London' - title: 'Low-Latency Audio Pitch Tracking: A Multi-Modal Sensor-Assisted Approach' - url: http://www.nime.org/proceedings/2014/nime2014_336.pdf - year: 2014 + pages: 348--349 + publisher: Virginia Tech + title: A web-based 3D environment for gestural interaction with virtual music instruments + as a STEAM education tool + url: http://www.nime.org/proceedings/2018/nime2018_paper0075.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: nklugel12014 - abstract: 'In this contribution, we will discuss a prototype that allows a group - of users to design sound collaboratively in real time using a multi-touch tabletop. - We make use of a machine learning method to generate a mapping from perceptual - audio features to synthesis parameters. This mapping is then used for visualization - and interaction. Finally, we discuss the results of a comparative evaluation study.' - address: 'London, United Kingdom' - author: Niklas Klügel and Timo Becker and Georg Groh - bibtex: "@inproceedings{nklugel12014,\n abstract = {In this contribution, we will\ - \ discuss a prototype that allows a group of users to design sound collaboratively\ - \ in real time using a multi-touch tabletop. We make use of a machine learning\ - \ method to generate a mapping from perceptual audio features to synthesis parameters.\ - \ This mapping is then used for visualization and interaction. Finally, we discuss\ - \ the results of a comparative evaluation study.},\n address = {London, United\ - \ Kingdom},\n author = {Niklas Kl\\''ugel and Timo Becker and Georg Groh},\n booktitle\ + ID: Mannone2018 + abstract: 'We developed a new musical interface, CubeHarmonic, with the magnetic + tracking system, IM3D, created at Tohoku University. IM3D system precisely tracks + positions of tiny, wireless, battery-less, and identifiable LC coils in real time. + The CubeHarmonic is a musical application of the Rubik''s cube, with notes on + each little piece. Scrambling the cube, we get different chords and chord sequences. + Positions of the pieces which contain LC coils are detected through IM3D, and + transmitted to the computer, that plays sounds. The central position of the cube + is also computed from the LC coils located into the corners of Rubik''s cube, + and, depending on the computed central position, we can manipulate overall loudness + and pitch changes, as in theremin playing. This new instrument, whose first idea + comes from mathematical theory of music, can be used as a teaching tool both for + math (group theory) and music (music theory, mathematical music theory), as well + as a composition device, a new instrument for avant-garde performances, and a + recreational tool.' + address: 'Blacksburg, Virginia, USA' + author: Maria C. Mannone and Eri Kitamura and Jiawei Huang and Ryo Sugawara and + Yoshifumi Kitamura + bibtex: "@inproceedings{Mannone2018,\n abstract = {We developed a new musical interface,\ + \ CubeHarmonic, with the magnetic tracking system, IM3D, created at Tohoku University.\ + \ IM3D system precisely tracks positions of tiny, wireless, battery-less, and\ + \ identifiable LC coils in real time. The CubeHarmonic is a musical application\ + \ of the Rubik's cube, with notes on each little piece. Scrambling the cube, we\ + \ get different chords and chord sequences. Positions of the pieces which contain\ + \ LC coils are detected through IM3D, and transmitted to the computer, that plays\ + \ sounds. The central position of the cube is also computed from the LC coils\ + \ located into the corners of Rubik's cube, and, depending on the computed central\ + \ position, we can manipulate overall loudness and pitch changes, as in theremin\ + \ playing. This new instrument, whose first idea comes from mathematical theory\ + \ of music, can be used as a teaching tool both for math (group theory) and music\ + \ (music theory, mathematical music theory), as well as a composition device,\ + \ a new instrument for avant-garde performances, and a recreational tool.},\n\ + \ address = {Blacksburg, Virginia, USA},\n author = {Maria C. Mannone and Eri\ + \ Kitamura and Jiawei Huang and Ryo Sugawara and Yoshifumi Kitamura},\n booktitle\ \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1178833},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {327--330},\n publisher = {Goldsmiths, University of London},\n\ - \ title = {Designing Sound Collaboratively Perceptually Motivated Audio Synthesis},\n\ - \ url = {http://www.nime.org/proceedings/2014/nime2014_339.pdf},\n year = {2014}\n\ - }\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1178833 - issn: 2220-4806 - month: June - pages: 327--330 - publisher: 'Goldsmiths, University of London' - title: Designing Sound Collaboratively Perceptually Motivated Audio Synthesis - url: http://www.nime.org/proceedings/2014/nime2014_339.pdf - year: 2014 - - -- ENTRYTYPE: inproceedings - ID: seloul2014 - abstract: 'We use Max/MSP framework to create a reliable but flexible approach for - managing live performances of music bands who rely on live playing with digital - music. This approach utilizes Max/MSP to allow any player an easy and low cost - way to apply and experiment innovative music interfaces for live performance, - without losing the professionalism required on stage. In that approach, every - 1-3 players is plugged to a unit consisting of a standard sound-card and laptop. - This unit is controlled by an interface that schedules and manages all the digital - sounds made by each player (VST effects, VST instruments and ''home-made'' interactive - interfaces). All the player''s units are then remotely controlled by a conductor - patch which is in charge of the synchronization of all the players and background - samples in real time, as well as providing sensitive metronome and scheduling - visual enhancement. Moreover, and not less important, we can take the advantage - of using virtual instruments and virtual effects in Max environment to manage - the mix, and routing the audio. This providing monitors and metronome to the players - ears, and virtual mixing via Max/MSP patch. This privilege almost eliminates the - dependency in the venue''s equipment, and in that way, the sound quality and music - ideas can be taken out directly from the studio to the stage.' - address: 'London, United Kingdom' - author: Yehiel Amo and Gil Zissu and Shaltiel Eloul and Eran Shlomi and Dima Schukin - and Almog Kalifa - bibtex: "@inproceedings{seloul2014,\n abstract = {We use Max/MSP framework to create\ - \ a reliable but flexible approach for managing live performances of music bands\ - \ who rely on live playing with digital music. This approach utilizes Max/MSP\ - \ to allow any player an easy and low cost way to apply and experiment innovative\ - \ music interfaces for live performance, without losing the professionalism required\ - \ on stage. In that approach, every 1-3 players is plugged to a unit consisting\ - \ of a standard sound-card and laptop. This unit is controlled by an interface\ - \ that schedules and manages all the digital sounds made by each player (VST effects,\ - \ VST instruments and 'home-made' interactive interfaces). All the player's units\ - \ are then remotely controlled by a conductor patch which is in charge of the\ - \ synchronization of all the players and background samples in real time, as well\ - \ as providing sensitive metronome and scheduling visual enhancement. Moreover,\ - \ and not less important, we can take the advantage of using virtual instruments\ - \ and virtual effects in Max environment to manage the mix, and routing the audio.\ - \ This providing monitors and metronome to the players ears, and virtual mixing\ - \ via Max/MSP patch. This privilege almost eliminates the dependency in the venue's\ - \ equipment, and in that way, the sound quality and music ideas can be taken out\ - \ directly from the studio to the stage.},\n address = {London, United Kingdom},\n\ - \ author = {Yehiel Amo and Gil Zissu and Shaltiel Eloul and Eran Shlomi and Dima\ - \ Schukin and Almog Kalifa},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178700},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {94--97},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {A Max/MSP Approach for Incorporating Digital\ - \ Music via Laptops in Live Performances of Music Bands},\n url = {http://www.nime.org/proceedings/2014/nime2014_340.pdf},\n\ - \ year = {2014}\n}\n" + \ Expression},\n doi = {10.5281/zenodo.1302615},\n editor = {Luke Dahl, Douglas\ + \ Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {350--351},\n publisher = {Virginia Tech},\n title\ + \ = {CubeHarmonic: A New Interface from a Magnetic 3D Motion Tracking System to\ + \ Music Performance},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0076.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178700 + doi: 10.5281/zenodo.1302615 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 94--97 - publisher: 'Goldsmiths, University of London' - title: A Max/MSP Approach for Incorporating Digital Music via Laptops in Live Performances - of Music Bands - url: http://www.nime.org/proceedings/2014/nime2014_340.pdf - year: 2014 + pages: 350--351 + publisher: Virginia Tech + title: 'CubeHarmonic: A New Interface from a Magnetic 3D Motion Tracking System + to Music Performance' + url: http://www.nime.org/proceedings/2018/nime2018_paper0076.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: asa2014 - abstract: 'The text exposes a perceptual approach to instrument design and composition, - and it introduces an instrument that outputs acoustic sound, digital sound, and - digital image. We explore disparities between human perception and digital analysis - as creative material. Because the instrument repurposes software intended to create - video games, we establish a distinction between the notion of ``flow'''' in music - and gaming, questioning how it may substantiate in interaction design. Furthermore, - we extrapolate from cognition/attention research to describe how the projected - image creates a reactive stage scene without deviating attention from the music.' - address: 'London, United Kingdom' - author: Adriana Sa - bibtex: "@inproceedings{asa2014,\n abstract = {The text exposes a perceptual approach\ - \ to instrument design and composition, and it introduces an instrument that outputs\ - \ acoustic sound, digital sound, and digital image. We explore disparities between\ - \ human perception and digital analysis as creative material. Because the instrument\ - \ repurposes software intended to create video games, we establish a distinction\ - \ between the notion of ``flow'' in music and gaming, questioning how it may substantiate\ - \ in interaction design. Furthermore, we extrapolate from cognition/attention\ - \ research to describe how the projected image creates a reactive stage scene\ - \ without deviating attention from the music.},\n address = {London, United Kingdom},\n\ - \ author = {Adriana Sa},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178925},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {331--334},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Repurposing Video Game Software for Musical\ - \ Expression: A Perceptual Approach},\n url = {http://www.nime.org/proceedings/2014/nime2014_343.pdf},\n\ - \ year = {2014}\n}\n" + ID: Kristoffersen2018 + abstract: 'In this paper we present a novel digital effects controller for electric + guitar based upon the whammy bar as a user interface. The goal with the project + is to give guitarists a way to interact with dynamic effects control that feels + familiar to their instrument and playing style. A 3D-printed prototype has been + made. It replaces the whammy bar of a traditional Fender vibrato system with a + sensor-equipped whammy bar. The functionality of the present prototype includes + separate readings of force applied towards and from the guitar body, as well as + an end knob for variable control. Further functionality includes a hinged system + allowing for digital effect control either with or without the mechanical manipulation + of string tension. By incorporating digital sensors to the idiomatic whammy bar + interface, one would potentially bring guitarists a high level of control intimacy + with the device, and thus lead to a closer interaction with effects.' + address: 'Blacksburg, Virginia, USA' + author: Martin M Kristoffersen and Trond Engum + bibtex: "@inproceedings{Kristoffersen2018,\n abstract = {In this paper we present\ + \ a novel digital effects controller for electric guitar based upon the whammy\ + \ bar as a user interface. The goal with the project is to give guitarists a way\ + \ to interact with dynamic effects control that feels familiar to their instrument\ + \ and playing style. A 3D-printed prototype has been made. It replaces the whammy\ + \ bar of a traditional Fender vibrato system with a sensor-equipped whammy bar.\ + \ The functionality of the present prototype includes separate readings of force\ + \ applied towards and from the guitar body, as well as an end knob for variable\ + \ control. Further functionality includes a hinged system allowing for digital\ + \ effect control either with or without the mechanical manipulation of string\ + \ tension. By incorporating digital sensors to the idiomatic whammy bar interface,\ + \ one would potentially bring guitarists a high level of control intimacy with\ + \ the device, and thus lead to a closer interaction with effects.},\n address\ + \ = {Blacksburg, Virginia, USA},\n author = {Martin M Kristoffersen and Trond\ + \ Engum},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1302617},\n editor = {Luke\ + \ Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn =\ + \ {2220-4806},\n month = {June},\n pages = {352--355},\n publisher = {Virginia\ + \ Tech},\n title = {The Whammy Bar as a Digital Effect Controller},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0077.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178925 + doi: 10.5281/zenodo.1302617 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 331--334 - publisher: 'Goldsmiths, University of London' - title: 'Repurposing Video Game Software for Musical Expression: A Perceptual Approach' - url: http://www.nime.org/proceedings/2014/nime2014_343.pdf - year: 2014 + pages: 352--355 + publisher: Virginia Tech + title: The Whammy Bar as a Digital Effect Controller + url: http://www.nime.org/proceedings/2018/nime2018_paper0077.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: jmurphy2014 - abstract: 'This paper presents a method for a self-tuning procedure for musical - robots capable of continuous pitch-shifting. Such a technique is useful for robots - consisting of many strings: the ability to self-tune allows for long-term installation - without human intervention as well as on-the-fly tuning scheme changes. The presented - method consists of comparing a detuned string''s pitch at runtime to a pre-compiled - table of string responses at varying tensions. The behavior of the current detuned - string is interpolated from the two nearest pre-characterized neighbors, and the - desired virtual fret positions are added to the interpolated model. This method - allows for rapid tuning at runtime, requiring only a single string actuation to - determine the pitch. After a detailed description of the self-tuning technique - and implementation, the results will be evaluated on the new Swivel 2 robotic - slide guitar. The paper concludes with a discussion of performance applications - and ideas for subsequent work on self-tuning musical robotic systems.' - address: 'London, United Kingdom' - author: Jim Murphy and Paul Mathews and Ajay Kapur and Dale Carnegie - bibtex: "@inproceedings{jmurphy2014,\n abstract = {This paper presents a method\ - \ for a self-tuning procedure for musical robots capable of continuous pitch-shifting.\ - \ Such a technique is useful for robots consisting of many strings: the ability\ - \ to self-tune allows for long-term installation without human intervention as\ - \ well as on-the-fly tuning scheme changes. The presented method consists of comparing\ - \ a detuned string's pitch at runtime to a pre-compiled table of string responses\ - \ at varying tensions. The behavior of the current detuned string is interpolated\ - \ from the two nearest pre-characterized neighbors, and the desired virtual fret\ - \ positions are added to the interpolated model. This method allows for rapid\ - \ tuning at runtime, requiring only a single string actuation to determine the\ - \ pitch. After a detailed description of the self-tuning technique and implementation,\ - \ the results will be evaluated on the new Swivel 2 robotic slide guitar. The\ - \ paper concludes with a discussion of performance applications and ideas for\ - \ subsequent work on self-tuning musical robotic systems.},\n address = {London,\ - \ United Kingdom},\n author = {Jim Murphy and Paul Mathews and Ajay Kapur and\ - \ Dale Carnegie},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178883},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {565--568},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Robot: Tune Yourself! Automatic Tuning for\ - \ Musical Robotics},\n url = {http://www.nime.org/proceedings/2014/nime2014_345.pdf},\n\ - \ year = {2014}\n}\n" + ID: Pond2018 + abstract: 'The process of learning to play a string instrument is a notoriously + difficult task. A new student to the instrument is faced with mastering multiple, + interconnected physical movements in order to become a skillful player. In their + development, one measure of a players quality is their tone, which is the result + of the combination of the physical characteristics of the instrument and their + technique in playing it. This paper describes preliminary research into creating + an intuitive, real-time device for evaluating the quality of tone generation on + the cello: a ``timbre-tuner'''' to aid cellists evaluate their tone quality. Data + for the study was collected from six post-secondary music students, consisting + of recordings of scales covering the entire range of the cello. Comprehensive + spectral audio analysis was performed on the data set in order to evaluate features + suitable to describe tone quality. An inverse relationship was found between the + harmonic centroid and pitch played, which became more pronounced when restricted + to the A string. In addition, a model for predicting the harmonic centroid at + different pitches on the A string was created. Results from informal listening + tests support the use of the harmonic centroid as an appropriate measure for tone + quality.' + address: 'Blacksburg, Virginia, USA' + author: Robert Pond and Alexander Klassen and Kirk McNally + bibtex: "@inproceedings{Pond2018,\n abstract = {The process of learning to play\ + \ a string instrument is a notoriously difficult task. A new student to the instrument\ + \ is faced with mastering multiple, interconnected physical movements in order\ + \ to become a skillful player. In their development, one measure of a players\ + \ quality is their tone, which is the result of the combination of the physical\ + \ characteristics of the instrument and their technique in playing it. This paper\ + \ describes preliminary research into creating an intuitive, real-time device\ + \ for evaluating the quality of tone generation on the cello: a ``timbre-tuner''\ + \ to aid cellists evaluate their tone quality. Data for the study was collected\ + \ from six post-secondary music students, consisting of recordings of scales covering\ + \ the entire range of the cello. Comprehensive spectral audio analysis was performed\ + \ on the data set in order to evaluate features suitable to describe tone quality.\ + \ An inverse relationship was found between the harmonic centroid and pitch played,\ + \ which became more pronounced when restricted to the A string. In addition, a\ + \ model for predicting the harmonic centroid at different pitches on the A string\ + \ was created. Results from informal listening tests support the use of the harmonic\ + \ centroid as an appropriate measure for tone quality.},\n address = {Blacksburg,\ + \ Virginia, USA},\n author = {Robert Pond and Alexander Klassen and Kirk McNally},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1302619},\n editor = {Luke Dahl,\ + \ Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {356--359},\n publisher = {Virginia Tech},\n title\ + \ = {Timbre Tuning: Variation in Cello Sprectrum Across Pitches and Instruments},\n\ + \ url = {http://www.nime.org/proceedings/2018/nime2018_paper0078.pdf},\n year\ + \ = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178883 + doi: 10.5281/zenodo.1302619 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 565--568 - publisher: 'Goldsmiths, University of London' - title: 'Robot: Tune Yourself! Automatic Tuning for Musical Robotics' - url: http://www.nime.org/proceedings/2014/nime2014_345.pdf - year: 2014 + pages: 356--359 + publisher: Virginia Tech + title: 'Timbre Tuning: Variation in Cello Sprectrum Across Pitches and Instruments' + url: http://www.nime.org/proceedings/2018/nime2018_paper0078.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: astark2014 - abstract: 'Real-time audio analysis has great potential for being used to create - musically responsive applications in live performances. There have been many examples - of such use, including sound-responsive visualisations, adaptive audio effects - and machine musicianship. However, at present, using audio analysis algorithms - in live performance requires either some detailed knowledge about the algorithms - themselves, or programming or both. Those wishing to use audio analysis in live - performances may not have either of these as their strengths. Rather, they may - instead wish to focus upon systems that respond to audio analysis data, such as - visual projections or sound generators. In response, this paper introduces the - Sound Analyser an audio plug-in allowing users to a) select a custom set of audio - analyses to be performed in real-time and b) send that information via OSC so - that it can easily be used by other systems to develop responsive applications - for live performances and installations. A description of the system architecture - and audio analysis algorithms implemented in the plug-in is presented before moving - on to two case studies where the plug-in has been used in the field with artists.' - address: 'London, United Kingdom' - author: Adam Stark - bibtex: "@inproceedings{astark2014,\n abstract = {Real-time audio analysis has great\ - \ potential for being used to create musically responsive applications in live\ - \ performances. There have been many examples of such use, including sound-responsive\ - \ visualisations, adaptive audio effects and machine musicianship. However, at\ - \ present, using audio analysis algorithms in live performance requires either\ - \ some detailed knowledge about the algorithms themselves, or programming or both.\ - \ Those wishing to use audio analysis in live performances may not have either\ - \ of these as their strengths. Rather, they may instead wish to focus upon systems\ - \ that respond to audio analysis data, such as visual projections or sound generators.\ - \ In response, this paper introduces the Sound Analyser an audio plug-in allowing\ - \ users to a) select a custom set of audio analyses to be performed in real-time\ - \ and b) send that information via OSC so that it can easily be used by other\ - \ systems to develop responsive applications for live performances and installations.\ - \ A description of the system architecture and audio analysis algorithms implemented\ - \ in the plug-in is presented before moving on to two case studies where the plug-in\ - \ has been used in the field with artists.},\n address = {London, United Kingdom},\n\ - \ author = {Adam Stark},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178945},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {183--186},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Sound Analyser: A Plug-In for Real-Time Audio\ - \ Analysis in Live Performances and Installations},\n url = {http://www.nime.org/proceedings/2014/nime2014_348.pdf},\n\ - \ year = {2014}\n}\n" + ID: Mosher2018 + abstract: 'This demonstration paper describes the concepts behind Tributaries of + Our Distant Palpability, an interactive sonified sculpture. It takes form as + a swelling sea anemone, while the sounds it produces recall the quagmire of a + digital ocean. The sculpture responds to changing light conditions with a dynamic + mix of audio tracks, mapping volume to light level. People passing by the sculpture, + or directly engaging it by creating light and shadows with their smart phone flashlights, + will trigger the audio. At the same time, it automatically adapts to gradual + environment light changes, such as the rise and fall of the sun. The piece was + inspired by the searching gestures people make, and emotions they have while, + idly browsing content on their smart devices. It was created through an interdisciplinary + collaboration between a musician, an interaction designer, and a ceramicist.' + address: 'Blacksburg, Virginia, USA' + author: Matthew Mosher and Danielle Wood and Tony Obr + bibtex: "@inproceedings{Mosher2018,\n abstract = {This demonstration paper describes\ + \ the concepts behind Tributaries of Our Distant Palpability, an interactive sonified\ + \ sculpture. It takes form as a swelling sea anemone, while the sounds it produces\ + \ recall the quagmire of a digital ocean. The sculpture responds to changing\ + \ light conditions with a dynamic mix of audio tracks, mapping volume to light\ + \ level. People passing by the sculpture, or directly engaging it by creating\ + \ light and shadows with their smart phone flashlights, will trigger the audio.\ + \ At the same time, it automatically adapts to gradual environment light changes,\ + \ such as the rise and fall of the sun. The piece was inspired by the searching\ + \ gestures people make, and emotions they have while, idly browsing content on\ + \ their smart devices. It was created through an interdisciplinary collaboration\ + \ between a musician, an interaction designer, and a ceramicist.},\n address =\ + \ {Blacksburg, Virginia, USA},\n author = {Matthew Mosher and Danielle Wood and\ + \ Tony Obr},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302621},\n editor\ + \ = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {360--361},\n publisher = {Virginia\ + \ Tech},\n title = {Tributaries of Our Lost Palpability},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0079.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178945 + doi: 10.5281/zenodo.1302621 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 183--186 - publisher: 'Goldsmiths, University of London' - title: 'Sound Analyser: A Plug-In for Real-Time Audio Analysis in Live Performances - and Installations' - url: http://www.nime.org/proceedings/2014/nime2014_348.pdf - year: 2014 + pages: 360--361 + publisher: Virginia Tech + title: Tributaries of Our Lost Palpability + url: http://www.nime.org/proceedings/2018/nime2018_paper0079.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: bjohnson2014 - abstract: 'This paper introduces recent developments in the Chronus series, a family - of custom controllers that afford a performer gestural interaction with surround - sound systems that can be easily integrated into their personal performance systems. - The controllers are built with the goal of encouraging more electronic musicians - to include the creation of dynamic pantophonic fields in performance. The paper - focuses on technical advances of the Chronus 2.0 prototype that extend the interface - to control both radial and angular positional data, and the controllers'' ease - of integration into electronic performance configurations, both for diffusion - and for performance from the wider electronic music community.' - address: 'London, United Kingdom' - author: Bridget Johnson and Michael Norris and Ajay Kapur - bibtex: "@inproceedings{bjohnson2014,\n abstract = {This paper introduces recent\ - \ developments in the Chronus series, a family of custom controllers that afford\ - \ a performer gestural interaction with surround sound systems that can be easily\ - \ integrated into their personal performance systems. The controllers are built\ - \ with the goal of encouraging more electronic musicians to include the creation\ - \ of dynamic pantophonic fields in performance. The paper focuses on technical\ - \ advances of the Chronus 2.0 prototype that extend the interface to control both\ - \ radial and angular positional data, and the controllers' ease of integration\ - \ into electronic performance configurations, both for diffusion and for performance\ - \ from the wider electronic music community.},\n address = {London, United Kingdom},\n\ - \ author = {Bridget Johnson and Michael Norris and Ajay Kapur},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178820},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {335--338},\n publisher = {Goldsmiths, University of London},\n title = {The\ - \ Development Of Physical Spatial Controllers},\n url = {http://www.nime.org/proceedings/2014/nime2014_349.pdf},\n\ - \ year = {2014}\n}\n" + ID: Piepenbrink2018 + abstract: 'We present a flexible, compact, and affordable embedded physical modeling + synthesizer which functions as a digital shaker. The instrument is self-contained, + battery-powered, wireless, and synthesizes various shakers, rattles, and other + handheld shaken percussion. Beyond modeling existing shakers, the instrument affords + new sonic interactions including hand mutes on its loudspeakers and self-sustaining + feedback. Both low-cost and high-performance versions of the instrument are discussed.' + address: 'Blacksburg, Virginia, USA' + author: Andrew Piepenbrink + bibtex: "@inproceedings{Piepenbrink2018,\n abstract = {We present a flexible, compact,\ + \ and affordable embedded physical modeling synthesizer which functions as a digital\ + \ shaker. The instrument is self-contained, battery-powered, wireless, and synthesizes\ + \ various shakers, rattles, and other handheld shaken percussion. Beyond modeling\ + \ existing shakers, the instrument affords new sonic interactions including hand\ + \ mutes on its loudspeakers and self-sustaining feedback. Both low-cost and high-performance\ + \ versions of the instrument are discussed.},\n address = {Blacksburg, Virginia,\ + \ USA},\n author = {Andrew Piepenbrink},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302623},\n\ + \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {362--363},\n publisher = {Virginia\ + \ Tech},\n title = {Embedded Digital Shakers: Handheld Physical Modeling Synthesizers},\n\ + \ url = {http://www.nime.org/proceedings/2018/nime2018_paper0080.pdf},\n year\ + \ = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178820 + doi: 10.5281/zenodo.1302623 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 335--338 - publisher: 'Goldsmiths, University of London' - title: The Development Of Physical Spatial Controllers - url: http://www.nime.org/proceedings/2014/nime2014_349.pdf - year: 2014 + pages: 362--363 + publisher: Virginia Tech + title: 'Embedded Digital Shakers: Handheld Physical Modeling Synthesizers' + url: http://www.nime.org/proceedings/2018/nime2018_paper0080.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: ajensenius2014 - abstract: 'The term ''gesture'' has represented a buzzword in the NIME community - since the beginning of its conference series. But how often is it actually used, - what is it used to describe, and how does its usage here differ from its usage - in other fields of study? This paper presents a linguistic analysis of the motion-related - terminology used in all of the papers published in the NIME conference proceedings - to date (2001-2013). The results show that ''gesture'' is in fact used in 62 % - of all NIME papers, which is a significantly higher percentage than in other music - conferences (ICMC and SMC), and much more frequently than it is used in the HCI - and biomechanics communities. The results from a collocation analysis support - the claim that ''gesture'' is used broadly in the NIME community, and indicate - that it ranges from the description of concrete human motion and system control - to quite metaphorical applications.' - address: 'London, United Kingdom' - author: Alexander Refsum Jensenius - bibtex: "@inproceedings{ajensenius2014,\n abstract = {The term 'gesture' has represented\ - \ a buzzword in the NIME community since the beginning of its conference series.\ - \ But how often is it actually used, what is it used to describe, and how does\ - \ its usage here differ from its usage in other fields of study? This paper presents\ - \ a linguistic analysis of the motion-related terminology used in all of the papers\ - \ published in the NIME conference proceedings to date (2001-2013). The results\ - \ show that 'gesture' is in fact used in 62 % of all NIME papers, which is a significantly\ - \ higher percentage than in other music conferences (ICMC and SMC), and much more\ - \ frequently than it is used in the HCI and biomechanics communities. The results\ - \ from a collocation analysis support the claim that 'gesture' is used broadly\ - \ in the NIME community, and indicate that it ranges from the description of concrete\ - \ human motion and system control to quite metaphorical applications.},\n address\ - \ = {London, United Kingdom},\n author = {Alexander Refsum Jensenius},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1178816},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {217--220},\n publisher = {Goldsmiths, University of London},\n\ - \ title = {To gesture or Not? {A}n Analysis of Terminology in {NIME} Proceedings\ - \ 2001--2013},\n url = {http://www.nime.org/proceedings/2014/nime2014_351.pdf},\n\ - \ year = {2014}\n}\n" + ID: Xambób2018 + abstract: 'The recent increase in the accessibility and size of personal and crowdsourced + digital sound collections brought about a valuable resource for music creation. + Finding and retrieving relevant sounds in performance leads to challenges that + can be approached using music information retrieval (MIR). In this paper, we explore + the use of MIR to retrieve and repurpose sounds in musical live coding. We present + a live coding system built on SuperCollider enabling the use of audio content + from online Creative Commons (CC) sound databases such as Freesound or personal + sound databases. The novelty of our approach lies in exploiting high-level MIR + methods (e.g., query by pitch or rhythmic cues) using live coding techniques applied + to sounds. We demonstrate its potential through the reflection of an illustrative + case study and the feedback from four expert users. The users tried the system + with either a personal database or a crowdsourced database and reported its potential + in facilitating tailorability of the tool to their own creative workflows.' + address: 'Blacksburg, Virginia, USA' + author: Anna Xambó and Gerard Roma and Alexander Lerch and Mathieu Barthet and György + Fazekas + bibtex: "@inproceedings{Xambób2018,\n abstract = {The recent increase in the accessibility\ + \ and size of personal and crowdsourced digital sound collections brought about\ + \ a valuable resource for music creation. Finding and retrieving relevant sounds\ + \ in performance leads to challenges that can be approached using music information\ + \ retrieval (MIR). In this paper, we explore the use of MIR to retrieve and repurpose\ + \ sounds in musical live coding. We present a live coding system built on SuperCollider\ + \ enabling the use of audio content from online Creative Commons (CC) sound databases\ + \ such as Freesound or personal sound databases. The novelty of our approach lies\ + \ in exploiting high-level MIR methods (e.g., query by pitch or rhythmic cues)\ + \ using live coding techniques applied to sounds. We demonstrate its potential\ + \ through the reflection of an illustrative case study and the feedback from four\ + \ expert users. The users tried the system with either a personal database or\ + \ a crowdsourced database and reported its potential in facilitating tailorability\ + \ of the tool to their own creative workflows.},\n address = {Blacksburg, Virginia,\ + \ USA},\n author = {Anna Xambó and Gerard Roma and Alexander Lerch and Mathieu\ + \ Barthet and György Fazekas},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302625},\n\ + \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {364--369},\n publisher = {Virginia\ + \ Tech},\n title = {Live Repurposing of Sounds: MIR Explorations with Personal\ + \ and Crowdsourced Databases},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0081.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178816 + doi: 10.5281/zenodo.1302625 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 217--220 - publisher: 'Goldsmiths, University of London' - title: 'To gesture or Not? An Analysis of Terminology in NIME Proceedings 2001--2013' - url: http://www.nime.org/proceedings/2014/nime2014_351.pdf - year: 2014 + pages: 364--369 + publisher: Virginia Tech + title: 'Live Repurposing of Sounds: MIR Explorations with Personal and Crowdsourced + Databases' + url: http://www.nime.org/proceedings/2018/nime2018_paper0081.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: etomas12014 - abstract: 'Tangible Scores are a new paradigm for musical instrument design with - a physical configuration inspired by graphic scores. In this paper we will focus - on the design aspects of this new interface as well as on some of the related - technical details. Creating an intuitive, modular and expressive instrument for - textural music was the primary driving force. Following these criteria, we literally - incorporated a musical score onto the surface of the instrument as a way of continuously - controlling several parameters of the sound synthesis. Tangible Scores are played - with both hands and they can adopt multiple physical forms. Complex and expressive - sound textures can be easily played over a variety of timbres, enabling precise - control in a natural manner.' - address: 'London, United Kingdom' - author: Enrique Tomás and Martin Kaltenbrunner - bibtex: "@inproceedings{etomas12014,\n abstract = {Tangible Scores are a new paradigm\ - \ for musical instrument design with a physical configuration inspired by graphic\ - \ scores. In this paper we will focus on the design aspects of this new interface\ - \ as well as on some of the related technical details. Creating an intuitive,\ - \ modular and expressive instrument for textural music was the primary driving\ - \ force. Following these criteria, we literally incorporated a musical score onto\ - \ the surface of the instrument as a way of continuously controlling several parameters\ - \ of the sound synthesis. Tangible Scores are played with both hands and they\ - \ can adopt multiple physical forms. Complex and expressive sound textures can\ - \ be easily played over a variety of timbres, enabling precise control in a natural\ - \ manner.},\n address = {London, United Kingdom},\n author = {Enrique Tom\\'as\ - \ and Martin Kaltenbrunner},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178953},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {609--614},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Tangible Scores: Shaping the Inherent Instrument\ - \ Score},\n url = {http://www.nime.org/proceedings/2014/nime2014_352.pdf},\n year\ - \ = {2014}\n}\n" + ID: Sarwate2018 + abstract: 'This paper explores the question of how live coding musicians can perform + with musicians who are not using code (such as acoustic instrumentalists or those + using graphical and tangible electronic interfaces). This paper investigates performance + systems that facilitate improvisation where the musicians can interact not just + by listening to each other and changing their own output, but also by manipulating + the data stream of the other performer(s). In a course of performance-led research + four prototypes were built and analyzed them using concepts from NIME and creative + collaboration literature. Based on this analysis it was found that the systems + should 1) provide a commonly modifiable visual representation of musical data + for both coder and non-coder, and 2) provide some independent means of sound production + for each user, giving the non-coder the ability to slow down and make non-realtime + decisions for greater performance flexibility. ' + address: 'Blacksburg, Virginia, USA' + author: Avneesh Sarwate and Ryan Taylor Rose and Jason Freeman and Jack Armitage + bibtex: "@inproceedings{Sarwate2018,\n abstract = {This paper explores the question\ + \ of how live coding musicians can perform with musicians who are not using code\ + \ (such as acoustic instrumentalists or those using graphical and tangible electronic\ + \ interfaces). This paper investigates performance systems that facilitate improvisation\ + \ where the musicians can interact not just by listening to each other and changing\ + \ their own output, but also by manipulating the data stream of the other performer(s).\ + \ In a course of performance-led research four prototypes were built and analyzed\ + \ them using concepts from NIME and creative collaboration literature. Based on\ + \ this analysis it was found that the systems should 1) provide a commonly modifiable\ + \ visual representation of musical data for both coder and non-coder, and 2) provide\ + \ some independent means of sound production for each user, giving the non-coder\ + \ the ability to slow down and make non-realtime decisions for greater performance\ + \ flexibility. },\n address = {Blacksburg, Virginia, USA},\n author = {Avneesh\ + \ Sarwate and Ryan Taylor Rose and Jason Freeman and Jack Armitage},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1302627},\n editor = {Luke Dahl, Douglas\ + \ Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {370--373},\n publisher = {Virginia Tech},\n title\ + \ = {Performance Systems for Live Coders and Non Coders},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0082.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178953 + doi: 10.5281/zenodo.1302627 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 609--614 - publisher: 'Goldsmiths, University of London' - title: 'Tangible Scores: Shaping the Inherent Instrument Score' - url: http://www.nime.org/proceedings/2014/nime2014_352.pdf - year: 2014 + pages: 370--373 + publisher: Virginia Tech + title: Performance Systems for Live Coders and Non Coders + url: http://www.nime.org/proceedings/2018/nime2018_paper0082.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: emorgan2014 - abstract: 'New technologies have led to the design of exciting interfaces for collaborative - music making. However we still have very little understanding of the underlying - affective and communicative processes which occur during such interactions. To - address this issue, we carried out a pilot study where we collected continuous - behavioural, physiological, and performance related measures from pairs of improvising - drummers. This paper presents preliminary findings, which could be useful for - the evaluation and design of user-centred collaborative interfaces for musical - creativity and expression.' - address: 'London, United Kingdom' - author: Evan Morgan and Hatice Gunes and Nick Bryan-Kinns - bibtex: "@inproceedings{emorgan2014,\n abstract = {New technologies have led to\ - \ the design of exciting interfaces for collaborative music making. However we\ - \ still have very little understanding of the underlying affective and communicative\ - \ processes which occur during such interactions. To address this issue, we carried\ - \ out a pilot study where we collected continuous behavioural, physiological,\ - \ and performance related measures from pairs of improvising drummers. This paper\ - \ presents preliminary findings, which could be useful for the evaluation and\ - \ design of user-centred collaborative interfaces for musical creativity and expression.},\n\ - \ address = {London, United Kingdom},\n author = {Evan Morgan and Hatice Gunes\ - \ and Nick Bryan-Kinns},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178877},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {23--28},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Instrumenting the Interaction: Affective and\ - \ Psychophysiological Features of Live Collaborative Musical Improvisation},\n\ - \ url = {http://www.nime.org/proceedings/2014/nime2014_353.pdf},\n year = {2014}\n\ + ID: Snyder2018 + abstract: 'This paper presents research on control of electronic signal feedback + in brass instruments through the development of a new augmented musical instrument, + the Feedback Trombone. The Feedback Trombone (FBT) extends the traditional acoustic + trombone interface with a speaker, microphone, and custom analog and digital hardware. ' + address: 'Blacksburg, Virginia, USA' + author: Jeff Snyder and Michael R Mulshine and Rajeev S Erramilli + bibtex: "@inproceedings{Snyder2018,\n abstract = {This paper presents research on\ + \ control of electronic signal feedback in brass instruments through the development\ + \ of a new augmented musical instrument, the Feedback Trombone. The Feedback Trombone\ + \ (FBT) extends the traditional acoustic trombone interface with a speaker, microphone,\ + \ and custom analog and digital hardware. },\n address = {Blacksburg, Virginia,\ + \ USA},\n author = {Jeff Snyder and Michael R Mulshine and Rajeev S Erramilli},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1302629},\n editor = {Luke Dahl,\ + \ Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {374--379},\n publisher = {Virginia Tech},\n title\ + \ = {The Feedback Trombone: Controlling Feedback in Brass Instruments},\n url\ + \ = {http://www.nime.org/proceedings/2018/nime2018_paper0083.pdf},\n year = {2018}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178877 + doi: 10.5281/zenodo.1302629 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 23--28 - publisher: 'Goldsmiths, University of London' - title: 'Instrumenting the Interaction: Affective and Psychophysiological Features - of Live Collaborative Musical Improvisation' - url: http://www.nime.org/proceedings/2014/nime2014_353.pdf - year: 2014 + pages: 374--379 + publisher: Virginia Tech + title: 'The Feedback Trombone: Controlling Feedback in Brass Instruments' + url: http://www.nime.org/proceedings/2018/nime2018_paper0083.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: bjohnston2014 - abstract: 'This paper details the creation, design, implementation and uses of a - series of new mechanically bowed string instruments. These instruments have been - designed with the objective of allowing for multiple parameters of musical expressivity, - as well as including the physical and spatial features of the instruments to be - integral aspects of their perception as instruments and sonic objects. This paper - focuses on the hardware design, software implementation, and present musical uses - of the ensemble.' - address: 'London, United Kingdom' - author: Blake Johnston and Henry Dengate Thrush and Ajay Kapur and Jim Murphy and - Tane Moleta - bibtex: "@inproceedings{bjohnston2014,\n abstract = {This paper details the creation,\ - \ design, implementation and uses of a series of new mechanically bowed string\ - \ instruments. These instruments have been designed with the objective of allowing\ - \ for multiple parameters of musical expressivity, as well as including the physical\ - \ and spatial features of the instruments to be integral aspects of their perception\ - \ as instruments and sonic objects. This paper focuses on the hardware design,\ - \ software implementation, and present musical uses of the ensemble.},\n address\ - \ = {London, United Kingdom},\n author = {Blake Johnston and Henry Dengate Thrush\ - \ and Ajay Kapur and Jim Murphy and Tane Moleta},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1178822},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {557--560},\n publisher = {Goldsmiths, University of London},\n title = {Polus:\ - \ The Design and Development of a New, Mechanically Bowed String Instrument Ensemble},\n\ - \ url = {http://www.nime.org/proceedings/2014/nime2014_355.pdf},\n year = {2014}\n\ - }\n" + ID: Sheffield2018 + abstract: The use of mechatronic components (e.g. DC motors and solenoids) as both + electronic sound source and locus of interaction is explored in a form of embedded + acoustic instruments called mechanoise instruments. Micro-controllers and embedded + computing devices provide a platform for live control of motor speeds and additional + sound processing by a human performer. Digital fabrication and use of salvaged + and found materials are emphasized. + address: 'Blacksburg, Virginia, USA' + author: Eric Sheffield + bibtex: "@inproceedings{Sheffield2018,\n abstract = {The use of mechatronic components\ + \ (e.g. DC motors and solenoids) as both electronic sound source and locus of\ + \ interaction is explored in a form of embedded acoustic instruments called mechanoise\ + \ instruments. Micro-controllers and embedded computing devices provide a platform\ + \ for live control of motor speeds and additional sound processing by a human\ + \ performer. Digital fabrication and use of salvaged and found materials are emphasized.},\n\ + \ address = {Blacksburg, Virginia, USA},\n author = {Eric Sheffield},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1302631},\n editor = {Luke Dahl, Douglas\ + \ Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {380--381},\n publisher = {Virginia Tech},\n title\ + \ = {Mechanoise: Mechatronic Sound and Interaction in Embedded Acoustic Instruments},\n\ + \ url = {http://www.nime.org/proceedings/2018/nime2018_paper0084.pdf},\n year\ + \ = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178822 - issn: 2220-4806 + doi: 10.5281/zenodo.1302631 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 + issn: 2220-4806 month: June - pages: 557--560 - publisher: 'Goldsmiths, University of London' - title: 'Polus: The Design and Development of a New, Mechanically Bowed String Instrument - Ensemble' - url: http://www.nime.org/proceedings/2014/nime2014_355.pdf - year: 2014 + pages: 380--381 + publisher: Virginia Tech + title: 'Mechanoise: Mechatronic Sound and Interaction in Embedded Acoustic Instruments' + url: http://www.nime.org/proceedings/2018/nime2018_paper0084.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: oizmirli2014 - abstract: 'We present a framework for imitation of percussion performances with - parameter-based learning for accurate reproduction. We constructed a robotic setup - involving pull-solenoids attached to drum sticks which communicate with a computer - through an Arduino microcontroller. The imitation framework allows for parameter - adaptation to different mechanical constructions by learning the capabilities - of the overall system being used. For the rhythmic vocabulary, we have considered - regular stroke, flam and drag styles. A learning and calibration system was developed - to efficiently perform grace notes for the drag rudiment as well as the single - stroke and the flam rudiment. A second pre-performance process is introduced to - minimize the latency difference between individual drum sticks in our mechanical - setup. We also developed an off-line onset detection method to reliably recognize - onsets from the microphone input. Once these pre-performance steps are taken, - our setup will then listen to a human drummer''s performance pattern, analyze - for onsets, loudness, and rudiment pattern, and then play back using the learned - parameters for the particular system. We conducted three different evaluations - of our constructed system.' - address: 'London, United Kingdom' - author: Ozgur Izmirli and Jake Faris - bibtex: "@inproceedings{oizmirli2014,\n abstract = {We present a framework for imitation\ - \ of percussion performances with parameter-based learning for accurate reproduction.\ - \ We constructed a robotic setup involving pull-solenoids attached to drum sticks\ - \ which communicate with a computer through an Arduino microcontroller. The imitation\ - \ framework allows for parameter adaptation to different mechanical constructions\ - \ by learning the capabilities of the overall system being used. For the rhythmic\ - \ vocabulary, we have considered regular stroke, flam and drag styles. A learning\ - \ and calibration system was developed to efficiently perform grace notes for\ - \ the drag rudiment as well as the single stroke and the flam rudiment. A second\ - \ pre-performance process is introduced to minimize the latency difference between\ - \ individual drum sticks in our mechanical setup. We also developed an off-line\ - \ onset detection method to reliably recognize onsets from the microphone input.\ - \ Once these pre-performance steps are taken, our setup will then listen to a\ - \ human drummer's performance pattern, analyze for onsets, loudness, and rudiment\ - \ pattern, and then play back using the learned parameters for the particular\ - \ system. We conducted three different evaluations of our constructed system.},\n\ - \ address = {London, United Kingdom},\n author = {Ozgur Izmirli and Jake Faris},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178814},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {483--486},\n publisher = {Goldsmiths, University\ - \ of London},\n title = {Imitation Framework for Percussion},\n url = {http://www.nime.org/proceedings/2014/nime2014_360.pdf},\n\ - \ year = {2014}\n}\n" + ID: Pigrem2018 + abstract: 'This paper explores the role of materiality in Digital Musical Instruments + and questions the influence of tacit understandings of sensor technology. Existing + research investigates the use of gesture, physical interaction and subsequent + parameter mapping. We suggest that a tacit knowledge of the ‘sensor layer'' brings + with it definitions, understandings and expectations that forge and guide our + approach to interaction. We argue that the influence of technology starts before + a sound is made, and comes from not only intuition of material properties, but + also received notions of what technology can and should do. On encountering an + instrument with obvious sensors, a potential performer will attempt to predict + what the sensors do and what the designer intends for them to do, becoming influenced + by a machine centered understanding of interaction and not a solely material centred + one. The paper presents an observational study of interaction using non-functional + prototype instruments designed to explore fundamental ideas and understandings + of instrumental interaction in the digital realm. We will show that this understanding + influences both gestural language and ability to characterise an expected sonic/musical + response. ' + address: 'Blacksburg, Virginia, USA' + author: 'Jon Pigrem and McPherson, Andrew P.' + bibtex: "@inproceedings{Pigrem2018,\n abstract = {This paper explores the role of\ + \ materiality in Digital Musical Instruments and questions the influence of tacit\ + \ understandings of sensor technology. Existing research investigates the use\ + \ of gesture, physical interaction and subsequent parameter mapping. We suggest\ + \ that a tacit knowledge of the ‘sensor layer' brings with it definitions, understandings\ + \ and expectations that forge and guide our approach to interaction. We argue\ + \ that the influence of technology starts before a sound is made, and comes from\ + \ not only intuition of material properties, but also received notions of what\ + \ technology can and should do. On encountering an instrument with obvious sensors,\ + \ a potential performer will attempt to predict what the sensors do and what the\ + \ designer intends for them to do, becoming influenced by a machine centered understanding\ + \ of interaction and not a solely material centred one. The paper presents an\ + \ observational study of interaction using non-functional prototype instruments\ + \ designed to explore fundamental ideas and understandings of instrumental interaction\ + \ in the digital realm. We will show that this understanding influences both gestural\ + \ language and ability to characterise an expected sonic/musical response. },\n\ + \ address = {Blacksburg, Virginia, USA},\n author = {Jon Pigrem and McPherson,\ + \ Andrew P.},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302633},\n editor\ + \ = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {382--385},\n publisher = {Virginia\ + \ Tech},\n title = {Do We Speak Sensor? Cultural Constraints of Embodied Interaction\ + \ },\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0085.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178814 + doi: 10.5281/zenodo.1302633 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 483--486 - publisher: 'Goldsmiths, University of London' - title: Imitation Framework for Percussion - url: http://www.nime.org/proceedings/2014/nime2014_360.pdf - year: 2014 + pages: 382--385 + publisher: Virginia Tech + title: 'Do We Speak Sensor? Cultural Constraints of Embodied Interaction ' + url: http://www.nime.org/proceedings/2018/nime2018_paper0085.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: ailsar2014 - abstract: 'This paper discusses one particular mapping for a new gestural instrument - called the AirSticks. This mapping was designed to be used for improvised or rehearsed - duos and restricts the performer to only utilising the sound source of one other - musician playing an acoustic instrument. Several pieces with different musicians - were performed and documented, musicians were observed and interviews with these - musicians were transcribed. In this paper we will examine the thoughts of these - musicians to gather a better understanding of how to design effective ensemble - instruments of this type.' - address: 'London, United Kingdom' - author: Alon Ilsar and Mark Havryliv and Andrew Johnston - bibtex: "@inproceedings{ailsar2014,\n abstract = {This paper discusses one particular\ - \ mapping for a new gestural instrument called the AirSticks. This mapping was\ - \ designed to be used for improvised or rehearsed duos and restricts the performer\ - \ to only utilising the sound source of one other musician playing an acoustic\ - \ instrument. Several pieces with different musicians were performed and documented,\ - \ musicians were observed and interviews with these musicians were transcribed.\ - \ In this paper we will examine the thoughts of these musicians to gather a better\ - \ understanding of how to design effective ensemble instruments of this type.},\n\ - \ address = {London, United Kingdom},\n author = {Alon Ilsar and Mark Havryliv\ - \ and Andrew Johnston},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178812},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {339--342},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Evaluating the Performance of a New Gestural\ - \ Instrument Within an Ensemble},\n url = {http://www.nime.org/proceedings/2014/nime2014_363.pdf},\n\ - \ year = {2014}\n}\n" + ID: Salazarb2018 + abstract: 'At first glance, the practice of musical live coding seems distanced + from the gestures and sense of embodiment common in musical performance, electronic + or otherwise. This workshop seeks to explore the extent to which this assertion + is justified, to re-examine notions of gesture and embodiment in the context of + musical live coding performance, to consider historical approaches to synthesizing + musical programming and gesture, and to look to the future for new ways of doing + so. The workshop will consist firstly of a critical discussion of these issues + and related literature. This will be followed by applied practical experiments + involving ideas generated during these discussions. The workshop will conclude + with a recapitulation and examination of these experiments in the context of previous + research and proposed future directions. ' + address: 'Blacksburg, Virginia, USA' + author: Spencer Salazar and Jack Armitage + bibtex: "@inproceedings{Salazarb2018,\n abstract = {At first glance, the practice\ + \ of musical live coding seems distanced from the gestures and sense of embodiment\ + \ common in musical performance, electronic or otherwise. This workshop seeks\ + \ to explore the extent to which this assertion is justified, to re-examine notions\ + \ of gesture and embodiment in the context of musical live coding performance,\ + \ to consider historical approaches to synthesizing musical programming and gesture,\ + \ and to look to the future for new ways of doing so. The workshop will consist\ + \ firstly of a critical discussion of these issues and related literature. This\ + \ will be followed by applied practical experiments involving ideas generated\ + \ during these discussions. The workshop will conclude with a recapitulation and\ + \ examination of these experiments in the context of previous research and proposed\ + \ future directions. },\n address = {Blacksburg, Virginia, USA},\n author = {Spencer\ + \ Salazar and Jack Armitage},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302635},\n\ + \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {386--389},\n publisher = {Virginia\ + \ Tech},\n title = {Re-engaging the Body and Gesture in Musical Live Coding},\n\ + \ url = {http://www.nime.org/proceedings/2018/nime2018_paper0086.pdf},\n year\ + \ = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178812 + doi: 10.5281/zenodo.1302635 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 339--342 - publisher: 'Goldsmiths, University of London' - title: Evaluating the Performance of a New Gestural Instrument Within an Ensemble - url: http://www.nime.org/proceedings/2014/nime2014_363.pdf - year: 2014 + pages: 386--389 + publisher: Virginia Tech + title: Re-engaging the Body and Gesture in Musical Live Coding + url: http://www.nime.org/proceedings/2018/nime2018_paper0086.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: abarenca2014 - abstract: The Manipuller is a musical interface based on strings and multi-dimensional - force sensing. This paper presents a new architectural approach to the original - interface design which has been materialized with the implementation of the Manipuller - II system prototype. Besides the short paper we would like to do a poster presentation - plus a demo of the new prototype where the public will be invited to play with - the new musical interface. - address: 'London, United Kingdom' - author: Adrián Barenca and Milos Corak - bibtex: "@inproceedings{abarenca2014,\n abstract = {The Manipuller is a musical\ - \ interface based on strings and multi-dimensional force sensing. This paper presents\ - \ a new architectural approach to the original interface design which has been\ - \ materialized with the implementation of the Manipuller II system prototype.\ - \ Besides the short paper we would like to do a poster presentation plus a demo\ - \ of the new prototype where the public will be invited to play with the new musical\ - \ interface.},\n address = {London, United Kingdom},\n author = {Adri{\\'a}n Barenca\ - \ and Milos Corak},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178706},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {589--592},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {The Manipuller II: Strings within a Force\ - \ Sensing Ring},\n url = {http://www.nime.org/proceedings/2014/nime2014_364.pdf},\n\ - \ year = {2014}\n}\n" + ID: Berdahl2018 + abstract: 'For the purpose of creating new musical instruments, chaotic dynamical + systems can be simulated in real time to synthesize complex sounds. This work + investigates a series of discrete-time chaotic maps, which have the potential + to generate intriguing sounds when they are adjusted to be on the edge of chaos. With + these chaotic maps as studied historically, the edge of chaos tends to be razor-thin, + which can make it difficult to employ them for making new musical instruments. The + authors therefore suggest connecting chaotic maps with digital waveguides, which + (1) make it easier to synthesize harmonic tones and (2) make it harder to fall + off of the edge of chaos while playing a musical instrument. The authors argue + therefore that this technique widens the razor-thin edge of chaos into a musical + highway.' + address: 'Blacksburg, Virginia, USA' + author: Edgar Berdahl and Eric Sheffield and Andrew Pfalz and Anthony T. Marasco + bibtex: "@inproceedings{Berdahl2018,\n abstract = {For the purpose of creating new\ + \ musical instruments, chaotic dynamical systems can be simulated in real time\ + \ to synthesize complex sounds. This work investigates a series of discrete-time\ + \ chaotic maps, which have the potential to generate intriguing sounds when they\ + \ are adjusted to be on the edge of chaos. With these chaotic maps as studied\ + \ historically, the edge of chaos tends to be razor-thin, which can make it difficult\ + \ to employ them for making new musical instruments. The authors therefore suggest\ + \ connecting chaotic maps with digital waveguides, which (1) make it easier to\ + \ synthesize harmonic tones and (2) make it harder to fall off of the edge of\ + \ chaos while playing a musical instrument. The authors argue therefore that\ + \ this technique widens the razor-thin edge of chaos into a musical highway.},\n\ + \ address = {Blacksburg, Virginia, USA},\n author = {Edgar Berdahl and Eric Sheffield\ + \ and Andrew Pfalz and Anthony T. Marasco},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1302637},\n editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n\ + \ isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {390--393},\n publisher = {Virginia Tech},\n title = {Widening the Razor-Thin\ + \ Edge of Chaos Into a Musical Highway: Connecting Chaotic Maps to Digital Waveguides},\n\ + \ url = {http://www.nime.org/proceedings/2018/nime2018_paper0087.pdf},\n year\ + \ = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178706 + doi: 10.5281/zenodo.1302637 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 589--592 - publisher: 'Goldsmiths, University of London' - title: 'The Manipuller II: Strings within a Force Sensing Ring' - url: http://www.nime.org/proceedings/2014/nime2014_364.pdf - year: 2014 + pages: 390--393 + publisher: Virginia Tech + title: 'Widening the Razor-Thin Edge of Chaos Into a Musical Highway: Connecting + Chaotic Maps to Digital Waveguides' + url: http://www.nime.org/proceedings/2018/nime2018_paper0087.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: osarier2014 - abstract: 'In the recent years many touch screen interfaces have been designed and - used for musical control. When compared with their physical counterparts, current - control paradigms employed in touch screen musical interfaces do not require the - same level of physical labor and this negatively affects the user experience in - terms of expressivity, engagement and enjoyment. This lack of physicality can - be remedied by using interaction elements, which are designed for the exertion - of the user. Employing intentionally difficult and inefficient interaction design - can enhance the user experience by allowing greater bodily expression, kinesthetic - feedback, more apparent skill acquisition, and performer satisfaction. Rub Synth - is a touch screen musical instrument with an exertion interface. It was made for - creating and testing exertion strategies that are possible by only using 2d touch - coordinates as input and evaluating the outcomes of implementing intentional difficulty. - This paper discusses the strategies that can be employed to model effort on touch - screens, the benefits of having physical difficulty, Rub Synth''s interaction - design, and user experience results of using such an interface.' - address: 'London, United Kingdom' - author: Ozan Sarier - bibtex: "@inproceedings{osarier2014,\n abstract = {In the recent years many touch\ - \ screen interfaces have been designed and used for musical control. When compared\ - \ with their physical counterparts, current control paradigms employed in touch\ - \ screen musical interfaces do not require the same level of physical labor and\ - \ this negatively affects the user experience in terms of expressivity, engagement\ - \ and enjoyment. This lack of physicality can be remedied by using interaction\ - \ elements, which are designed for the exertion of the user. Employing intentionally\ - \ difficult and inefficient interaction design can enhance the user experience\ - \ by allowing greater bodily expression, kinesthetic feedback, more apparent skill\ - \ acquisition, and performer satisfaction. Rub Synth is a touch screen musical\ - \ instrument with an exertion interface. It was made for creating and testing\ - \ exertion strategies that are possible by only using 2d touch coordinates as\ - \ input and evaluating the outcomes of implementing intentional difficulty. This\ - \ paper discusses the strategies that can be employed to model effort on touch\ - \ screens, the benefits of having physical difficulty, Rub Synth's interaction\ - \ design, and user experience results of using such an interface.},\n address\ - \ = {London, United Kingdom},\n author = {Ozan Sarier},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178931},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {179--182},\n publisher = {Goldsmiths, University of London},\n title = {Rub\ - \ Synth : A Study of Implementing Intentional Physical Difficulty Into Touch Screen\ - \ Music Controllers},\n url = {http://www.nime.org/proceedings/2014/nime2014_367.pdf},\n\ - \ year = {2014}\n}\n" + ID: Snyderb2018 + abstract: 'This paper describes a project to create a software instrument using + a biological model of neuron behavior for audio synthesis. The translation of + the model to a usable audio synthesis process is described, and a piece for laptop + orchestra created using the instrument is discussed.' + address: 'Blacksburg, Virginia, USA' + author: Jeff Snyder and Aatish Bhatia and Michael R Mulshine + bibtex: "@inproceedings{Snyderb2018,\n abstract = {This paper describes a project\ + \ to create a software instrument using a biological model of neuron behavior\ + \ for audio synthesis. The translation of the model to a usable audio synthesis\ + \ process is described, and a piece for laptop orchestra created using the instrument\ + \ is discussed.},\n address = {Blacksburg, Virginia, USA},\n author = {Jeff Snyder\ + \ and Aatish Bhatia and Michael R Mulshine},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1302639},\n editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n\ + \ isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {394--397},\n publisher = {Virginia Tech},\n title = {Neuron-modeled Audio\ + \ Synthesis: Nonlinear Sound and Control},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0088.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178931 + doi: 10.5281/zenodo.1302639 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 179--182 - publisher: 'Goldsmiths, University of London' - title: 'Rub Synth : A Study of Implementing Intentional Physical Difficulty Into - Touch Screen Music Controllers' - url: http://www.nime.org/proceedings/2014/nime2014_367.pdf - year: 2014 + pages: 394--397 + publisher: Virginia Tech + title: 'Neuron-modeled Audio Synthesis: Nonlinear Sound and Control' + url: http://www.nime.org/proceedings/2018/nime2018_paper0088.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: lfyfe2014 - abstract: 'The Nexus Data Exchange Format (NDEF) is an Open Sound Control (OSC) - namespace specification designed to make connection and message management tasks - easier for OSC-based networked performance systems. New extensions to the NDEF - namespace improve both connection and message management between OSC client and - server nodes. Connection management between nodes now features human-readable - labels for connections and a new message exchange for pinging connections to determine - their status. Message management now has improved namespace synchronization via - a message count exchange and by the ability to add, remove, and replace messages - on connected nodes.' - address: 'London, United Kingdom' - author: Lawrence Fyfe and Adam Tindale and Sheelagh Carpendale - bibtex: "@inproceedings{lfyfe2014,\n abstract = {The Nexus Data Exchange Format\ - \ (NDEF) is an Open Sound Control (OSC) namespace specification designed to make\ - \ connection and message management tasks easier for OSC-based networked performance\ - \ systems. New extensions to the NDEF namespace improve both connection and message\ - \ management between OSC client and server nodes. Connection management between\ - \ nodes now features human-readable labels for connections and a new message exchange\ - \ for pinging connections to determine their status. Message management now has\ - \ improved namespace synchronization via a message count exchange and by the ability\ - \ to add, remove, and replace messages on connected nodes.},\n address = {London,\ - \ United Kingdom},\n author = {Lawrence Fyfe and Adam Tindale and Sheelagh Carpendale},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178768},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {343--346},\n publisher = {Goldsmiths, University\ - \ of London},\n title = {Extending the Nexus Data Exchange Format (NDEF) Specification},\n\ - \ url = {http://www.nime.org/proceedings/2014/nime2014_368.pdf},\n year = {2014}\n\ - }\n" + ID: Cádiz2018 + abstract: 'In computer or electroacoustic music, it is often the case that the compositional + act and the parametric control of the underlying synthesis algorithms or hardware + are not separable from each other. In these situations, composition and control + of the synthesis parameters are not easy to distinguish. One possible solution + is by means of fuzzy logic. This approach provides a simple, intuitive but powerful + control of the compositional process usually in interesting non-linear ways. Compositional + control in this context is achieved by the fuzzification of the relevant internal + synthesis parameters and the parallel computation of common sense fuzzy rules + of inference specified by the composer. This approach has been implemented computationally + as a software package entitled FLCTK (Fuzzy Logic Control Tool Kit) in the form + of external objects for the widely used real-time compositional environments Max/MSP + and Pd. In this article, we present an updated version of this tool. As a demonstration + of the wide range of situations in which this approach could be used, we provide + two examples of parametric fuzzy control: first, the fuzzy control of a water + tank simulation and second a particle-based sound synthesis technique by a fuzzy + approach. ' + address: 'Blacksburg, Virginia, USA' + author: Rodrigo F. Cádiz and Marie Gonzalez-Inostroza + bibtex: "@inproceedings{Cádiz2018,\n abstract = {In computer or electroacoustic\ + \ music, it is often the case that the compositional act and the parametric control\ + \ of the underlying synthesis algorithms or hardware are not separable from each\ + \ other. In these situations, composition and control of the synthesis parameters\ + \ are not easy to distinguish. One possible solution is by means of fuzzy logic.\ + \ This approach provides a simple, intuitive but powerful control of the compositional\ + \ process usually in interesting non-linear ways. Compositional control in this\ + \ context is achieved by the fuzzification of the relevant internal synthesis\ + \ parameters and the parallel computation of common sense fuzzy rules of inference\ + \ specified by the composer. This approach has been implemented computationally\ + \ as a software package entitled FLCTK (Fuzzy Logic Control Tool Kit) in the form\ + \ of external objects for the widely used real-time compositional environments\ + \ Max/MSP and Pd. In this article, we present an updated version of this tool.\ + \ As a demonstration of the wide range of situations in which this approach could\ + \ be used, we provide two examples of parametric fuzzy control: first, the fuzzy\ + \ control of a water tank simulation and second a particle-based sound synthesis\ + \ technique by a fuzzy approach. },\n address = {Blacksburg, Virginia, USA},\n\ + \ author = {Rodrigo F. Cádiz and Marie Gonzalez-Inostroza},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1302641},\n editor = {Luke Dahl, Douglas Bowman, Thomas\ + \ Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n\ + \ pages = {398--402},\n publisher = {Virginia Tech},\n title = {Fuzzy Logic Control\ + \ Toolkit 2.0: composing and synthesis by fuzzyfication},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0089.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178768 + doi: 10.5281/zenodo.1302641 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 343--346 - publisher: 'Goldsmiths, University of London' - title: Extending the Nexus Data Exchange Format (NDEF) Specification - url: http://www.nime.org/proceedings/2014/nime2014_368.pdf - year: 2014 + pages: 398--402 + publisher: Virginia Tech + title: 'Fuzzy Logic Control Toolkit 2.0: composing and synthesis by fuzzyfication' + url: http://www.nime.org/proceedings/2018/nime2018_paper0089.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: ihattwick12014 - abstract: This paper presents results from the development of a digital musical - instrument which uses audio feature extraction for the control of sound synthesis. - Our implementation utilizes multi-band audio analysis to generate control signals. - This technique is well-suited to instruments for which the gestural interface - is intentionally weakly defined. We present a percussion instrument utilizing - this technique in which the timbral characteristics of found objects are the primary - source of audio for analysis. - address: 'London, United Kingdom' - author: Ian Hattwick and Preston Beebe and Zachary Hale and Marcelo Wanderley and - Philippe Leroux and Fabrice Marandola - bibtex: "@inproceedings{ihattwick12014,\n abstract = {This paper presents results\ - \ from the development of a digital musical instrument which uses audio feature\ - \ extraction for the control of sound synthesis. Our implementation utilizes multi-band\ - \ audio analysis to generate control signals. This technique is well-suited to\ - \ instruments for which the gestural interface is intentionally weakly defined.\ - \ We present a percussion instrument utilizing this technique in which the timbral\ - \ characteristics of found objects are the primary source of audio for analysis.},\n\ - \ address = {London, United Kingdom},\n author = {Ian Hattwick and Preston Beebe\ - \ and Zachary Hale and Marcelo Wanderley and Philippe Leroux and Fabrice Marandola},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178790},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {597--600},\n publisher = {Goldsmiths, University\ - \ of London},\n title = {Unsounding Objects: Audio Feature Extraction for the\ - \ Control of Sound Synthesis},\n url = {http://www.nime.org/proceedings/2014/nime2014_369.pdf},\n\ - \ year = {2014}\n}\n" + ID: Leigh2018 + abstract: 'Playing musical instruments involves producing gradually more challenging + body movements and transitions, where the kinematic constraints of the body play + a crucial role in structuring the resulting music. We seek to make a bridge between + currently accessible motor patterns, and musical possibilities beyond those --- + afforded through the use of a robotic augmentation. Guitar Machine is a robotic + device that presses on guitar strings and assists a musician by fretting alongside + her on the same guitar. This paper discusses the design of the system, strategies + for using the system to create novel musical patterns, and a user study that looks + at the effects of the temporary acquisition of enhanced physical ability. Our + results indicate that the proposed human-robot interaction would equip users to + explore new musical avenues on the guitar, as well as provide an enhanced understanding + of the task at hand on the basis of the robotically acquired ability. ' + address: 'Blacksburg, Virginia, USA' + author: Sang-won Leigh and Pattie Maes + bibtex: "@inproceedings{Leigh2018,\n abstract = {Playing musical instruments involves\ + \ producing gradually more challenging body movements and transitions, where the\ + \ kinematic constraints of the body play a crucial role in structuring the resulting\ + \ music. We seek to make a bridge between currently accessible motor patterns,\ + \ and musical possibilities beyond those --- afforded through the use of a robotic\ + \ augmentation. Guitar Machine is a robotic device that presses on guitar strings\ + \ and assists a musician by fretting alongside her on the same guitar. This paper\ + \ discusses the design of the system, strategies for using the system to create\ + \ novel musical patterns, and a user study that looks at the effects of the temporary\ + \ acquisition of enhanced physical ability. Our results indicate that the proposed\ + \ human-robot interaction would equip users to explore new musical avenues on\ + \ the guitar, as well as provide an enhanced understanding of the task at hand\ + \ on the basis of the robotically acquired ability. },\n address = {Blacksburg,\ + \ Virginia, USA},\n author = {Sang-won Leigh and Pattie Maes},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1302643},\n editor = {Luke Dahl, Douglas Bowman, Thomas\ + \ Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n\ + \ pages = {403--408},\n publisher = {Virginia Tech},\n title = {Guitar Machine:\ + \ Robotic Fretting Augmentation for Hybrid Human-Machine Guitar Play},\n url =\ + \ {http://www.nime.org/proceedings/2018/nime2018_paper0090.pdf},\n year = {2018}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178790 + doi: 10.5281/zenodo.1302643 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 597--600 - publisher: 'Goldsmiths, University of London' - title: 'Unsounding Objects: Audio Feature Extraction for the Control of Sound Synthesis' - url: http://www.nime.org/proceedings/2014/nime2014_369.pdf - year: 2014 + pages: 403--408 + publisher: Virginia Tech + title: 'Guitar Machine: Robotic Fretting Augmentation for Hybrid Human-Machine Guitar + Play' + url: http://www.nime.org/proceedings/2018/nime2018_paper0090.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: ihattwick2014 - abstract: 'Moving new DMIs from the research lab to professional artistic contexts - places new demands on both their design and manufacturing. Through a discussion - of the Prosthetic Instruments, a family of digital musical instruments we designed - for use in an interactive dance performance, we discuss four different approaches - to manufacturing -artisanal, building block, rapid prototyping, and industrial. - We discuss our use of these different approaches as we strove to reconcile the - many conflicting constraints placed upon the instruments'' design due to their - use as hypothetical prosthetic extensions to dancers'' bodies, as aesthetic objects, - and as instruments used in a professional touring context. Experiences and lessons - learned during the design and manufacturing process are discussed in relation - both to these manufacturing approaches as well as to Bill Buxton''s concept of - artist-spec design.' - address: 'London, United Kingdom' - author: Ian Hattwick and Joseph Malloch and Marcelo Wanderley - bibtex: "@inproceedings{ihattwick2014,\n abstract = {Moving new DMIs from the research\ - \ lab to professional artistic contexts places new demands on both their design\ - \ and manufacturing. Through a discussion of the Prosthetic Instruments, a family\ - \ of digital musical instruments we designed for use in an interactive dance performance,\ - \ we discuss four different approaches to manufacturing -artisanal, building block,\ - \ rapid prototyping, and industrial. We discuss our use of these different approaches\ - \ as we strove to reconcile the many conflicting constraints placed upon the instruments'\ - \ design due to their use as hypothetical prosthetic extensions to dancers' bodies,\ - \ as aesthetic objects, and as instruments used in a professional touring context.\ - \ Experiences and lessons learned during the design and manufacturing process\ - \ are discussed in relation both to these manufacturing approaches as well as\ - \ to Bill Buxton's concept of artist-spec design.},\n address = {London, United\ - \ Kingdom},\n author = {Ian Hattwick and Joseph Malloch and Marcelo Wanderley},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178792},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {443--448},\n publisher = {Goldsmiths, University\ - \ of London},\n title = {Forming Shapes to Bodies: Design for Manufacturing in\ - \ the Prosthetic Instruments},\n url = {http://www.nime.org/proceedings/2014/nime2014_370.pdf},\n\ - \ year = {2014}\n}\n" + ID: Barton2018 + abstract: 'Percussive aerophones are configurable, modular, scalable, and can be + constructed from commonly found materials. They can produce rich timbres, a wide + range of pitches and complex polyphony. Their use by humans, perhaps most famously + by the Blue Man Group, inspired us to build an electromechanically-actuated version + of the instrument in order to explore expressive possibilities enabled by machines. + The Music, Perception, and Robotics Lab at WPI has iteratively designed, built + and composed for a robotic percussive aerophone since 2015, which has both taught + lessons in actuation and revealed promising musical capabilities of the instrument. ' + address: 'Blacksburg, Virginia, USA' + author: Scott Barton and Karl Sundberg and Andrew Walter and Linda Sara Baker and + Tanuj Sane and Alexander O'Brien + bibtex: "@inproceedings{Barton2018,\n abstract = {Percussive aerophones are configurable,\ + \ modular, scalable, and can be constructed from commonly found materials. They\ + \ can produce rich timbres, a wide range of pitches and complex polyphony. Their\ + \ use by humans, perhaps most famously by the Blue Man Group, inspired us to build\ + \ an electromechanically-actuated version of the instrument in order to explore\ + \ expressive possibilities enabled by machines. The Music, Perception, and Robotics\ + \ Lab at WPI has iteratively designed, built and composed for a robotic percussive\ + \ aerophone since 2015, which has both taught lessons in actuation and revealed\ + \ promising musical capabilities of the instrument. },\n address = {Blacksburg,\ + \ Virginia, USA},\n author = {Scott Barton and Karl Sundberg and Andrew Walter\ + \ and Linda Sara Baker and Tanuj Sane and Alexander O'Brien},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1302645},\n editor = {Luke Dahl, Douglas Bowman, Thomas\ + \ Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n\ + \ pages = {409--412},\n publisher = {Virginia Tech},\n title = {Robotic Percussive\ + \ Aerophone},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0091.pdf},\n\ + \ year = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178792 + doi: 10.5281/zenodo.1302645 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 443--448 - publisher: 'Goldsmiths, University of London' - title: 'Forming Shapes to Bodies: Design for Manufacturing in the Prosthetic Instruments' - url: http://www.nime.org/proceedings/2014/nime2014_370.pdf - year: 2014 + pages: 409--412 + publisher: Virginia Tech + title: Robotic Percussive Aerophone + url: http://www.nime.org/proceedings/2018/nime2018_paper0091.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: cnash2014 - abstract: 'This paper explores the concept of end-user programming languages in - music composition, and introduces the Manhattan system, which integrates formulas - with a grid-based style of music sequencer. Following the paradigm of spreadsheets, - an established model of end-user programming, Manhattan is designed to bridge - the gap between traditional music editing methods (such as MIDI sequencing and - typesetting) and generative and algorithmic music -seeking both to reduce the - learning threshold of programming and support flexible integration of static and - dynamic musical elements in a single work. Interaction draws on rudimentary knowledge - of mathematics and spreadsheets to augment the sequencer notation with programming - concepts such as expressions, built-in functions, variables, pointers and arrays, - iteration (for loops), branching (goto), and conditional statements (if-then-else). - In contrast to other programming tools, formulas emphasise the visibility of musical - data (e.g. notes), rather than code, but also allow composers to interact with - notated music from a more abstract perspective of musical processes. To illustrate - the function and use cases of the system, several examples of traditional and - generative music are provided, the latter drawing on minimalism (process-based - music) as an accessible introduction to algorithmic composition. Throughout, the - system and approach are evaluated using the cognitive dimensions of notations - framework, together with early feedback for use by artists.' - address: 'London, United Kingdom' - author: Chris Nash - bibtex: "@inproceedings{cnash2014,\n abstract = {This paper explores the concept\ - \ of end-user programming languages in music composition, and introduces the Manhattan\ - \ system, which integrates formulas with a grid-based style of music sequencer.\ - \ Following the paradigm of spreadsheets, an established model of end-user programming,\ - \ Manhattan is designed to bridge the gap between traditional music editing methods\ - \ (such as MIDI sequencing and typesetting) and generative and algorithmic music\ - \ -seeking both to reduce the learning threshold of programming and support flexible\ - \ integration of static and dynamic musical elements in a single work. Interaction\ - \ draws on rudimentary knowledge of mathematics and spreadsheets to augment the\ - \ sequencer notation with programming concepts such as expressions, built-in functions,\ - \ variables, pointers and arrays, iteration (for loops), branching (goto), and\ - \ conditional statements (if-then-else). In contrast to other programming tools,\ - \ formulas emphasise the visibility of musical data (e.g. notes), rather than\ - \ code, but also allow composers to interact with notated music from a more abstract\ - \ perspective of musical processes. To illustrate the function and use cases of\ - \ the system, several examples of traditional and generative music are provided,\ - \ the latter drawing on minimalism (process-based music) as an accessible introduction\ - \ to algorithmic composition. Throughout, the system and approach are evaluated\ - \ using the cognitive dimensions of notations framework, together with early feedback\ - \ for use by artists.},\n address = {London, United Kingdom},\n author = {Chris\ - \ Nash},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1178891},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {221--226},\n publisher = {Goldsmiths, University\ - \ of London},\n title = {Manhattan: End-User Programming for Music},\n url = {http://www.nime.org/proceedings/2014/nime2014_371.pdf},\n\ - \ year = {2014}\n}\n" + ID: VillicañaShaw2018 + abstract: 'This paper introduces seven mechatronic compositions performed over three + years at the xxxxx (xxxx). Each composition is discussed in regard to how it addresses + the performative elements of mechatronic music concerts. The compositions are + grouped into four classifications according to the types of interactions between + human and robotic performers they afford: Non-Interactive, Mechatronic Instruments + Played by Humans, Mechatronic Instruments Playing with Humans, and Social Interaction + as Performance. The orchestration of each composition is described along with + an overview of the piece''s compositional philosophy. Observations on how specific + extra-musical compositional techniques can be incorporated into future mechatronic + performances by human-robot performance ensembles are addressed.' + address: 'Blacksburg, Virginia, USA' + author: Nathan Daniel Villicaña-Shaw and Spencer Salazar and Ajay Kapur + bibtex: "@inproceedings{VillicañaShaw2018,\n abstract = {This paper introduces seven\ + \ mechatronic compositions performed over three years at the xxxxx (xxxx). Each\ + \ composition is discussed in regard to how it addresses the performative elements\ + \ of mechatronic music concerts. The compositions are grouped into four classifications\ + \ according to the types of interactions between human and robotic performers\ + \ they afford: Non-Interactive, Mechatronic Instruments Played by Humans, Mechatronic\ + \ Instruments Playing with Humans, and Social Interaction as Performance. The\ + \ orchestration of each composition is described along with an overview of the\ + \ piece's compositional philosophy. Observations on how specific extra-musical\ + \ compositional techniques can be incorporated into future mechatronic performances\ + \ by human-robot performance ensembles are addressed.},\n address = {Blacksburg,\ + \ Virginia, USA},\n author = {Nathan Daniel Villicaña-Shaw and Spencer Salazar\ + \ and Ajay Kapur},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302647},\n\ + \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {413--418},\n publisher = {Virginia\ + \ Tech},\n title = {Mechatronic Performance in Computer Music Compositions},\n\ + \ url = {http://www.nime.org/proceedings/2018/nime2018_paper0092.pdf},\n year\ + \ = {2018}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178891 + doi: 10.5281/zenodo.1302647 + editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' + isbn: 978-1-949373-99-8 issn: 2220-4806 month: June - pages: 221--226 - publisher: 'Goldsmiths, University of London' - title: 'Manhattan: End-User Programming for Music' - url: http://www.nime.org/proceedings/2014/nime2014_371.pdf - year: 2014 + pages: 413--418 + publisher: Virginia Tech + title: Mechatronic Performance in Computer Music Compositions + url: http://www.nime.org/proceedings/2018/nime2018_paper0092.pdf + year: 2018 - ENTRYTYPE: inproceedings - ID: croberts2014 - abstract: 'We describe research enabling the rapid creation of digital musical instruments - and their publication to the Internet. This research comprises both high-level - abstractions for making continuous mappings between audio, interactive, and graphical - elements, as well as a centralized database for storing and accessing instruments. - Published instruments run in most devices capable of running a modern web browser. - Notation of instrument design is optimized for readability and expressivity.' - address: 'London, United Kingdom' - author: Charlie Roberts and Matthew Wright and JoAnn Kuchera-Morin and Tobias Höllerer - bibtex: "@inproceedings{croberts2014,\n abstract = {We describe research enabling\ - \ the rapid creation of digital musical instruments and their publication to the\ - \ Internet. This research comprises both high-level abstractions for making continuous\ - \ mappings between audio, interactive, and graphical elements, as well as a centralized\ - \ database for storing and accessing instruments. Published instruments run in\ - \ most devices capable of running a modern web browser. Notation of instrument\ - \ design is optimized for readability and expressivity.},\n address = {London,\ - \ United Kingdom},\n author = {Charlie Roberts and Matthew Wright and JoAnn Kuchera-Morin\ - \ and Tobias H{\\''o}llerer},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178919},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {239--242},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Rapid Creation and Publication of Digital\ - \ Musical Instruments},\n url = {http://www.nime.org/proceedings/2014/nime2014_373.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_1 + abstract: 'This paper provides figures and metrics over twenty years of New Interfaces + for Musical Expression conferences, which are derived by analyzing the publicly + available paper proceedings. Besides presenting statistical information and a + bibliometric study, we aim at identifying trends and patterns. The analysis shows + the growth and heterogeneity of the NIME demographic, as well the increase in + research output. The data presented in this paper allows the community to reflect + on several issues such as diversity and sustainability, and it provides insights + to address challenges and set future directions.' + address: 'Shanghai, China' + articleno: 1 + author: 'Fasciani, Stefano and Goode, Jackson' + bibtex: "@inproceedings{NIME21_1,\n abstract = {This paper provides figures and\ + \ metrics over twenty years of New Interfaces for Musical Expression conferences,\ + \ which are derived by analyzing the publicly available paper proceedings. Besides\ + \ presenting statistical information and a bibliometric study, we aim at identifying\ + \ trends and patterns. The analysis shows the growth and heterogeneity of the\ + \ NIME demographic, as well the increase in research output. The data presented\ + \ in this paper allows the community to reflect on several issues such as diversity\ + \ and sustainability, and it provides insights to address challenges and set future\ + \ directions.},\n address = {Shanghai, China},\n articleno = {1},\n author = {Fasciani,\ + \ Stefano and Goode, Jackson},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.b368bcd5},\n\ + \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/44W7dB7lzQg},\n\ + \ title = {20 NIMEs: Twenty Years of New Interfaces for Musical Expression},\n\ + \ url = {https://nime.pubpub.org/pub/20nimes},\n year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178919 + doi: 10.21428/92fbeb44.b368bcd5 issn: 2220-4806 month: June - pages: 239--242 - publisher: 'Goldsmiths, University of London' - title: Rapid Creation and Publication of Digital Musical Instruments - url: http://www.nime.org/proceedings/2014/nime2014_373.pdf - year: 2014 + presentation-video: https://youtu.be/44W7dB7lzQg + title: '20 NIMEs: Twenty Years of New Interfaces for Musical Expression' + url: https://nime.pubpub.org/pub/20nimes + year: 2021 - ENTRYTYPE: inproceedings - ID: ibukvic2014 - abstract: 'The following paper showcases new integrated Pd-L2Ork system and its - K12 educational counterpart running on Raspberry Pi hardware. A collection of - new externals and abstractions in conjunction with the Modern Device LOP shield - transforms Raspberry Pi into a cost-efficient sensing hub providing Arduino-like - connectivity with 10 digital I/O pins (including both software and hardware implementations - of pulse width modulation) and 8 analog inputs, while offering a number of integrated - features, including audio I/O, USB and Ethernet connectivity and video output.' - address: 'London, United Kingdom' - author: Ivica Bukvic - bibtex: "@inproceedings{ibukvic2014,\n abstract = {The following paper showcases\ - \ new integrated Pd-L2Ork system and its K12 educational counterpart running on\ - \ Raspberry Pi hardware. A collection of new externals and abstractions in conjunction\ - \ with the Modern Device LOP shield transforms Raspberry Pi into a cost-efficient\ - \ sensing hub providing Arduino-like connectivity with 10 digital I/O pins (including\ - \ both software and hardware implementations of pulse width modulation) and 8\ - \ analog inputs, while offering a number of integrated features, including audio\ - \ I/O, USB and Ethernet connectivity and video output.},\n address = {London,\ - \ United Kingdom},\n author = {Ivica Bukvic},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1178726},\n issn = {2220-4806},\n month = {June},\n pages =\ - \ {163--166},\n publisher = {Goldsmiths, University of London},\n title = {Pd-L2Ork\ - \ Raspberry Pi Toolkit as a Comprehensive Arduino Alternative in K-12 and Production\ - \ Scenarios},\n url = {http://www.nime.org/proceedings/2014/nime2014_377.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_10 + abstract: 'This paper investigates how the concept of score has been used in the + NIME community. To this end, we performed a systematic literature review of the + NIME proceedings, analyzing papers in which scores play a central role. We analyzed + the score not as an object per se but in relation to the users and the interactive + system(s). In other words, we primarily looked at the role that scores play in + the performance ecology. For this reason, to analyze the papers, we relied on + ARCAA, a recent framework created to investigate artifact ecologies in computer + music performances. Using the framework, we created a scheme for each paper and + clustered the papers according to similarities. Our analysis produced five main + categories that we present and discuss in relation to literature about musical + scores.' + address: 'Shanghai, China' + articleno: 10 + author: 'Masu, Raul and Correia, Nuno N. and Romao, Teresa' + bibtex: "@inproceedings{NIME21_10,\n abstract = {This paper investigates how the\ + \ concept of score has been used in the NIME community. To this end, we performed\ + \ a systematic literature review of the NIME proceedings, analyzing papers in\ + \ which scores play a central role. We analyzed the score not as an object per\ + \ se but in relation to the users and the interactive system(s). In other words,\ + \ we primarily looked at the role that scores play in the performance ecology.\ + \ For this reason, to analyze the papers, we relied on ARCAA, a recent framework\ + \ created to investigate artifact ecologies in computer music performances. Using\ + \ the framework, we created a scheme for each paper and clustered the papers according\ + \ to similarities. Our analysis produced five main categories that we present\ + \ and discuss in relation to literature about musical scores.},\n address = {Shanghai,\ + \ China},\n articleno = {10},\n author = {Masu, Raul and Correia, Nuno N. and\ + \ Romao, Teresa},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.3ffad95a},\n\ + \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/j7XmQvDdUPk},\n\ + \ title = {NIME Scores: a Systematic Review of How Scores Have Shaped Performance\ + \ Ecologies in NIME},\n url = {https://nime.pubpub.org/pub/41cj1pyt},\n year =\ + \ {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178726 + doi: 10.21428/92fbeb44.3ffad95a issn: 2220-4806 month: June - pages: 163--166 - publisher: 'Goldsmiths, University of London' - title: Pd-L2Ork Raspberry Pi Toolkit as a Comprehensive Arduino Alternative in K-12 - and Production Scenarios - url: http://www.nime.org/proceedings/2014/nime2014_377.pdf - year: 2014 + presentation-video: https://youtu.be/j7XmQvDdUPk + title: 'NIME Scores: a Systematic Review of How Scores Have Shaped Performance Ecologies + in NIME' + url: https://nime.pubpub.org/pub/41cj1pyt + year: 2021 - ENTRYTYPE: inproceedings - ID: rkleinberger2014 - abstract: 'Vocal Vibrations is a new project by the Opera of the Future group at - the MIT Media Lab that seeks to engage the public in thoughtful singing and vocalizing, - while exploring the relationship between human physiology and the resonant vibrations - of the voice. This paper describes the motivations, the technical implementation, - and the experience design of the Vocal Vibrations public installation. This installation - consists of a space for reflective listening to a vocal composition (the Chapel) - and an interactive space for personal vocal exploration (the Cocoon). In the interactive - experience, the participant also experiences a tangible exteriorization of his - voice by holding the ORB, a handheld device that translates his voice and singing - into tactile vibrations. This installation encourages visitors to explore the - physicality and expressivity of their voices in a rich musical context.' - address: 'London, United Kingdom' - author: Charles Holbrow and Elena Jessop and Rebecca Kleinberger - bibtex: "@inproceedings{rkleinberger2014,\n abstract = {Vocal Vibrations is a new\ - \ project by the Opera of the Future group at the MIT Media Lab that seeks to\ - \ engage the public in thoughtful singing and vocalizing, while exploring the\ - \ relationship between human physiology and the resonant vibrations of the voice.\ - \ This paper describes the motivations, the technical implementation, and the\ - \ experience design of the Vocal Vibrations public installation. This installation\ - \ consists of a space for reflective listening to a vocal composition (the Chapel)\ - \ and an interactive space for personal vocal exploration (the Cocoon). In the\ - \ interactive experience, the participant also experiences a tangible exteriorization\ - \ of his voice by holding the ORB, a handheld device that translates his voice\ - \ and singing into tactile vibrations. This installation encourages visitors to\ - \ explore the physicality and expressivity of their voices in a rich musical context.},\n\ - \ address = {London, United Kingdom},\n author = {Charles Holbrow and Elena Jessop\ - \ and Rebecca Kleinberger},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178800},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {431--434},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Vocal Vibrations: A Multisensory Experience\ - \ of the Voice},\n url = {http://www.nime.org/proceedings/2014/nime2014_378.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_11 + abstract: 'This paper presents the development of MapLooper: a live-looping system + for gesture-to-sound mappings. We first reviewed loop-based Digital Musical Instruments + (DMIs). We then developed a connectivity infrastructure for wireless embedded + musical instruments with distributed mapping and synchronization. We evaluated + our infrastructure in the context of the real-time constraints of music performance. + We measured a round-trip latency of 4.81 ms when mapping signals at 100 Hz with + embedded libmapper and an average inter-onset delay of 3.03 ms for synchronizing + with Ableton Link. On top of this infrastructure, we developed MapLooper: a live-looping + tool with 2 example musical applications: a harp synthesizer with SuperCollider + and embedded source-filter synthesis with FAUST on ESP32. Our system is based + on a novel approach to mapping, extrapolating from using FIR and IIR filters on + gestural data to using delay-lines as part of the mapping of DMIs. Our system + features rhythmic time quantization and a flexible loop manipulation system for + creative musical exploration. We open-source all of our components.' + address: 'Shanghai, China' + articleno: 11 + author: 'Frisson, Christian and Bredholt, Mathias and Malloch, Joseph and Wanderley, + Marcelo M.' + bibtex: "@inproceedings{NIME21_11,\n abstract = {This paper presents the development\ + \ of MapLooper: a live-looping system for gesture-to-sound mappings. We first\ + \ reviewed loop-based Digital Musical Instruments (DMIs). We then developed a\ + \ connectivity infrastructure for wireless embedded musical instruments with distributed\ + \ mapping and synchronization. We evaluated our infrastructure in the context\ + \ of the real-time constraints of music performance. We measured a round-trip\ + \ latency of 4.81 ms when mapping signals at 100 Hz with embedded libmapper and\ + \ an average inter-onset delay of 3.03 ms for synchronizing with Ableton Link.\ + \ On top of this infrastructure, we developed MapLooper: a live-looping tool with\ + \ 2 example musical applications: a harp synthesizer with SuperCollider and embedded\ + \ source-filter synthesis with FAUST on ESP32. Our system is based on a novel\ + \ approach to mapping, extrapolating from using FIR and IIR filters on gestural\ + \ data to using delay-lines as part of the mapping of DMIs. Our system features\ + \ rhythmic time quantization and a flexible loop manipulation system for creative\ + \ musical exploration. We open-source all of our components.},\n address = {Shanghai,\ + \ China},\n articleno = {11},\n author = {Frisson, Christian and Bredholt, Mathias\ + \ and Malloch, Joseph and Wanderley, Marcelo M.},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.21428/92fbeb44.47175201},\n issn = {2220-4806},\n month = {June},\n presentation-video\ + \ = {https://youtu.be/9r0zDJA8qbs},\n title = {MapLooper: Live-looping of distributed\ + \ gesture-to-sound mappings},\n url = {https://nime.pubpub.org/pub/2pqbusk7},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178800 + doi: 10.21428/92fbeb44.47175201 issn: 2220-4806 month: June - pages: 431--434 - publisher: 'Goldsmiths, University of London' - title: 'Vocal Vibrations: A Multisensory Experience of the Voice' - url: http://www.nime.org/proceedings/2014/nime2014_378.pdf - year: 2014 + presentation-video: https://youtu.be/9r0zDJA8qbs + title: 'MapLooper: Live-looping of distributed gesture-to-sound mappings' + url: https://nime.pubpub.org/pub/2pqbusk7 + year: 2021 - ENTRYTYPE: inproceedings - ID: gdublon2014 - abstract: 'We present the FingerSynth, a wearable musical instrument made up of - a bracelet and set of rings that enable its player to produce sound by touching - nearly any surface in their environment. Each ring contains a small, independently - controlled exciter transducer commonly used for auditory bone conduction. The - rings sound loudly when they touch a hard object, and are practically silent otherwise. - When a wearer touches their own (or someone else''s) head, the contacted person - hears the sound through bone conduction, inaudible to others. The bracelet contains - a microcontroller, a set of FET transistors, an accelerometer, and a battery. - The microcontroller generates a separate audio signal for each ring, switched - through the FETs, and can take user input through the accelerometer in the form - of taps, flicks, and other gestures. The player controls the envelope and timbre - of the sound by varying the physical pressure and the angle of their finger on - the surface, or by touching differently resonant surfaces. Because its sound is - shaped by direct, physical contact with objects and people, the FingerSynth encourages - players to experiment with the materials around them and with one another, making - music with everything they touch.' - address: 'London, United Kingdom' - author: Gershon Dublon and Joseph A. Paradiso - bibtex: "@inproceedings{gdublon2014,\n abstract = {We present the FingerSynth, a\ - \ wearable musical instrument made up of a bracelet and set of rings that enable\ - \ its player to produce sound by touching nearly any surface in their environment.\ - \ Each ring contains a small, independently controlled exciter transducer commonly\ - \ used for auditory bone conduction. The rings sound loudly when they touch a\ - \ hard object, and are practically silent otherwise. When a wearer touches their\ - \ own (or someone else's) head, the contacted person hears the sound through bone\ - \ conduction, inaudible to others. The bracelet contains a microcontroller, a\ - \ set of FET transistors, an accelerometer, and a battery. The microcontroller\ - \ generates a separate audio signal for each ring, switched through the FETs,\ - \ and can take user input through the accelerometer in the form of taps, flicks,\ - \ and other gestures. The player controls the envelope and timbre of the sound\ - \ by varying the physical pressure and the angle of their finger on the surface,\ - \ or by touching differently resonant surfaces. Because its sound is shaped by\ - \ direct, physical contact with objects and people, the FingerSynth encourages\ - \ players to experiment with the materials around them and with one another, making\ - \ music with everything they touch.},\n address = {London, United Kingdom},\n\ - \ author = {Gershon Dublon and Joseph A. Paradiso},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178754},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {134--135},\n publisher = {Goldsmiths, University of London},\n title = {FingerSynth:\ - \ Wearable Transducers for Exploring the Environment and Playing Music Everywhere},\n\ - \ url = {http://www.nime.org/proceedings/2014/nime2014_379.pdf},\n year = {2014}\n\ + ID: NIME21_12 + abstract: 'Assessment of user experience (UX) is increasingly important in music + interaction evaluation, as witnessed in previous NIME reviews describing varied + and idiosyncratic evaluation strategies. This paper focuses on evaluations conducted + in the last four years of NIME (2017 to 2020), compares results to previous research, + and classifies evaluation types to describe how researchers approach and study + UX in NIME. While results of this review confirm patterns such as the prominence + of short-term, performer perspective evaluations, and the variety of evaluation + strategies used, they also show that UX-focused evaluations are typically exploratory + and limited to novice performers. Overall, these patterns indicate that current + UX evaluation strategies do not address dynamic factors such as skill development, + the evolution of the performer-instrument relationship, and hedonic and cognitive + aspects of UX. To address such limitations, we discuss a number of less common + tools developed within and outside of NIME that focus on dynamic aspects of UX, + potentially leading to more informative and meaningful evaluation insights.' + address: 'Shanghai, China' + articleno: 12 + author: 'Reimer, P. J. Charles and Wanderley, Marcelo M.' + bibtex: "@inproceedings{NIME21_12,\n abstract = {Assessment of user experience (UX)\ + \ is increasingly important in music interaction evaluation, as witnessed in previous\ + \ NIME reviews describing varied and idiosyncratic evaluation strategies. This\ + \ paper focuses on evaluations conducted in the last four years of NIME (2017\ + \ to 2020), compares results to previous research, and classifies evaluation types\ + \ to describe how researchers approach and study UX in NIME. While results of\ + \ this review confirm patterns such as the prominence of short-term, performer\ + \ perspective evaluations, and the variety of evaluation strategies used, they\ + \ also show that UX-focused evaluations are typically exploratory and limited\ + \ to novice performers. Overall, these patterns indicate that current UX evaluation\ + \ strategies do not address dynamic factors such as skill development, the evolution\ + \ of the performer-instrument relationship, and hedonic and cognitive aspects\ + \ of UX. To address such limitations, we discuss a number of less common tools\ + \ developed within and outside of NIME that focus on dynamic aspects of UX, potentially\ + \ leading to more informative and meaningful evaluation insights.},\n address\ + \ = {Shanghai, China},\n articleno = {12},\n author = {Reimer, P. J. Charles and\ + \ Wanderley, Marcelo M.},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.807a000f},\n\ + \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/WTaee8NVtPg},\n\ + \ title = {Embracing Less Common Evaluation Strategies for Studying User Experience\ + \ in NIME},\n url = {https://nime.pubpub.org/pub/fidgs435},\n year = {2021}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178754 - issn: 2220-4806 - month: June - pages: 134--135 - publisher: 'Goldsmiths, University of London' - title: 'FingerSynth: Wearable Transducers for Exploring the Environment and Playing - Music Everywhere' - url: http://www.nime.org/proceedings/2014/nime2014_379.pdf - year: 2014 - - -- ENTRYTYPE: inproceedings - ID: fhashimoto2014 - abstract: 'In recent years, there has been an increase in the number of artists - who make use of automated music performances in their music and live concerts. - Automated music performance is a form of music production using programmed musical - notes. Some artists who introduce automated music performance operate parameters - of the sound in their performance for production of their music. In this paper, - we focus on the music production aspects and describe a method that realizes operation - of the sound parameters via computer. Further, in this study, the probability - distribution of the action (i.e., variation of parameters) is obtained within - the music, using Bayesian filters. The probability distribution of each piece - of music is transformed by passing through a Markov model. After the probability - distribution is obtained, sound parameters can be automatically controlled. We - have developed a system to reproduce the musical expressions of humans and confirmed - the possibilities of our method.' - address: 'London, United Kingdom' - author: Fumito Hashimoto and Motoki Miura - bibtex: "@inproceedings{fhashimoto2014,\n abstract = {In recent years, there has\ - \ been an increase in the number of artists who make use of automated music performances\ - \ in their music and live concerts. Automated music performance is a form of music\ - \ production using programmed musical notes. Some artists who introduce automated\ - \ music performance operate parameters of the sound in their performance for production\ - \ of their music. In this paper, we focus on the music production aspects and\ - \ describe a method that realizes operation of the sound parameters via computer.\ - \ Further, in this study, the probability distribution of the action (i.e., variation\ - \ of parameters) is obtained within the music, using Bayesian filters. The probability\ - \ distribution of each piece of music is transformed by passing through a Markov\ - \ model. After the probability distribution is obtained, sound parameters can\ - \ be automatically controlled. We have developed a system to reproduce the musical\ - \ expressions of humans and confirmed the possibilities of our method.},\n address\ - \ = {London, United Kingdom},\n author = {Fumito Hashimoto and Motoki Miura},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178788},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {347--350},\n publisher = {Goldsmiths, University\ - \ of London},\n title = {Operating Sound Parameters Using {Markov} Model and {Bayes}ian\ - \ Filters in Automated Music Performance},\n url = {http://www.nime.org/proceedings/2014/nime2014_380.pdf},\n\ - \ year = {2014}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1178788 + doi: 10.21428/92fbeb44.807a000f issn: 2220-4806 month: June - pages: 347--350 - publisher: 'Goldsmiths, University of London' - title: Operating Sound Parameters Using Markov Model and Bayesian Filters in Automated - Music Performance - url: http://www.nime.org/proceedings/2014/nime2014_380.pdf - year: 2014 + presentation-video: https://youtu.be/WTaee8NVtPg + title: Embracing Less Common Evaluation Strategies for Studying User Experience + in NIME + url: https://nime.pubpub.org/pub/fidgs435 + year: 2021 - ENTRYTYPE: inproceedings - ID: rgupfinger2014 - abstract: 'Street art opens a new, broad research field in the context of urban - communication and sound aesthetics in public space. The primary focus of this - article is the relevance and effects of using sound technologies and audio devices - to shape urban landscape and soundscape. This paper examines the process of developing - an alternative type of street art that uses sound as its medium. It represents - multiple audio device prototypes, which encourage new chances for street artists - and activists to contribute their messages and signs in public spaces. Furthermore, - it documents different approaches to establishing this alternative urban practice - within the street art and new media art field. The findings also expose a research - space for sound and technical interventions in the context of street art.' - address: 'London, United Kingdom' - author: Reinhard Gupfinger and Martin Kaltenbrunner - bibtex: "@inproceedings{rgupfinger2014,\n abstract = {Street art opens a new, broad\ - \ research field in the context of urban communication and sound aesthetics in\ - \ public space. The primary focus of this article is the relevance and effects\ - \ of using sound technologies and audio devices to shape urban landscape and soundscape.\ - \ This paper examines the process of developing an alternative type of street\ - \ art that uses sound as its medium. It represents multiple audio device prototypes,\ - \ which encourage new chances for street artists and activists to contribute their\ - \ messages and signs in public spaces. Furthermore, it documents different approaches\ - \ to establishing this alternative urban practice within the street art and new\ - \ media art field. The findings also expose a research space for sound and technical\ - \ interventions in the context of street art.},\n address = {London, United Kingdom},\n\ - \ author = {Reinhard Gupfinger and Martin Kaltenbrunner},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178778},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {577--580},\n publisher = {Goldsmiths, University of London},\n title = {SOUND\ - \ TOSSING Audio Devices in the Context of Street Art},\n url = {http://www.nime.org/proceedings/2014/nime2014_385.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_13 + abstract: 'To tackle digital musical instrument (DMI) longevity and the problem + of the second performer, we proposed the T-Stick Music Creation Project, a series + of musical commissions along with workshops, mentorship, and technical support, + meant to foment composition and performance using the T-Stick and provide an opportunity + to improve technical and pedagogical support for the instrument. Based on the + project’s outcomes, we describe three main contributions: our approach; the artistic + works produced; and analysis of these works demonstrating the T-Stick as actuator, + modulator, and data provider.' + address: 'Shanghai, China' + articleno: 13 + author: 'Fukuda, Takuto and Meneses, Eduardo and West, Travis and Wanderley, Marcelo + M.' + bibtex: "@inproceedings{NIME21_13,\n abstract = {To tackle digital musical instrument\ + \ (DMI) longevity and the problem of the second performer, we proposed the T-Stick\ + \ Music Creation Project, a series of musical commissions along with workshops,\ + \ mentorship, and technical support, meant to foment composition and performance\ + \ using the T-Stick and provide an opportunity to improve technical and pedagogical\ + \ support for the instrument. Based on the project’s outcomes, we describe three\ + \ main contributions: our approach; the artistic works produced; and analysis\ + \ of these works demonstrating the T-Stick as actuator, modulator, and data provider.},\n\ + \ address = {Shanghai, China},\n articleno = {13},\n author = {Fukuda, Takuto\ + \ and Meneses, Eduardo and West, Travis and Wanderley, Marcelo M.},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.21428/92fbeb44.26f33210},\n issn = {2220-4806},\n month\ + \ = {June},\n presentation-video = {https://youtu.be/tfOUMr3p4b4},\n title = {The\ + \ T-Stick Music Creation Project: An approach to building a creative community\ + \ around a DMI},\n url = {https://nime.pubpub.org/pub/7c4qdj4u},\n year = {2021}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178778 + doi: 10.21428/92fbeb44.26f33210 issn: 2220-4806 month: June - pages: 577--580 - publisher: 'Goldsmiths, University of London' - title: SOUND TOSSING Audio Devices in the Context of Street Art - url: http://www.nime.org/proceedings/2014/nime2014_385.pdf - year: 2014 + presentation-video: https://youtu.be/tfOUMr3p4b4 + title: 'The T-Stick Music Creation Project: An approach to building a creative community + around a DMI' + url: https://nime.pubpub.org/pub/7c4qdj4u + year: 2021 - ENTRYTYPE: inproceedings - ID: tmitchell2014 - abstract: 'Wireless technology is growing increasingly prevalent in the development - of new interfaces for live music performance. However, with a number of different - wireless technologies operating in the 2.4 GHz band, there is a high risk of interference - and congestion, which has the potential to severely disrupt live performances. - With its high transmission power, channel bandwidth and throughput, Wi-Fi (IEEE - 802.11) presents an opportunity for highly robust wireless communications. This - paper presents our preliminary work optimising the components of a Wi-Fi system - for live performance scenarios. We summarise the manufacture and testing of a - prototype directional antenna that is designed to maximise sensitivity to a performer''s - signal while suppressing interference from elsewhere. We also propose a set of - recommended Wi-Fi configurations to reduce latency and increase throughput. Practical - investigations utilising these arrangements demonstrate a single x-OSC device - achieving a latency of <3 ms and a distributed network of 15 devices achieving - a net throughput of ~4800 packets per second (~320 per device); where each packet - is a 104-byte OSC message containing 16 analogue input channels acquired by the - device.' - address: 'London, United Kingdom' - author: Thomas Mitchell and Sebastian Madgwick and Simon Rankine and Geoffrey Hilton - and Adrian Freed and Andrew Nix - bibtex: "@inproceedings{tmitchell2014,\n abstract = {Wireless technology is growing\ - \ increasingly prevalent in the development of new interfaces for live music performance.\ - \ However, with a number of different wireless technologies operating in the 2.4\ - \ GHz band, there is a high risk of interference and congestion, which has the\ - \ potential to severely disrupt live performances. With its high transmission\ - \ power, channel bandwidth and throughput, Wi-Fi (IEEE 802.11) presents an opportunity\ - \ for highly robust wireless communications. This paper presents our preliminary\ - \ work optimising the components of a Wi-Fi system for live performance scenarios.\ - \ We summarise the manufacture and testing of a prototype directional antenna\ - \ that is designed to maximise sensitivity to a performer's signal while suppressing\ - \ interference from elsewhere. We also propose a set of recommended Wi-Fi configurations\ - \ to reduce latency and increase throughput. Practical investigations utilising\ - \ these arrangements demonstrate a single x-OSC device achieving a latency of\ - \ <3 ms and a distributed network of 15 devices achieving a net throughput of\ - \ ~4800 packets per second (~320 per device); where each packet is a 104-byte\ - \ OSC message containing 16 analogue input channels acquired by the device.},\n\ - \ address = {London, United Kingdom},\n author = {Thomas Mitchell and Sebastian\ - \ Madgwick and Simon Rankine and Geoffrey Hilton and Adrian Freed and Andrew Nix},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178875},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {251--256},\n publisher = {Goldsmiths, University\ - \ of London},\n title = {Making the Most of Wi-Fi: Optimisations for Robust Wireless\ - \ Live Music Performance},\n url = {http://www.nime.org/proceedings/2014/nime2014_386.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_14 + abstract: 'This paper provides initial efforts in developing and evaluating a real-time + movement sonification framework for physical activity practice and learning. Reactive + Video provides an interactive, vision-based, adaptive video playback with auditory + feedback on users'' performance to better support when learning and practicing + new physical skills. We implement the sonification for auditory feedback design + by extending the Web Audio API framework. The current application focuses on Tai-Chi + performance and provides two main audio cues to users for several Tai Chi exercises. + We provide our design approach, implementation, and sound generation and mapping, + specifically for interactive systems with direct video manipulation. Our observations + reveal the relationship between the movement-to-sound mapping and characteristics + of the physical activity.' + address: 'Shanghai, China' + articleno: 14 + author: 'Cavdir, Doga and Clarke, Chris and Chiu, Patrick and Denoue, Laurent and + Kimber, Don' + bibtex: "@inproceedings{NIME21_14,\n abstract = {This paper provides initial efforts\ + \ in developing and evaluating a real-time movement sonification framework for\ + \ physical activity practice and learning. Reactive Video provides an interactive,\ + \ vision-based, adaptive video playback with auditory feedback on users' performance\ + \ to better support when learning and practicing new physical skills. We implement\ + \ the sonification for auditory feedback design by extending the Web Audio API\ + \ framework. The current application focuses on Tai-Chi performance and provides\ + \ two main audio cues to users for several Tai Chi exercises. We provide our design\ + \ approach, implementation, and sound generation and mapping, specifically for\ + \ interactive systems with direct video manipulation. Our observations reveal\ + \ the relationship between the movement-to-sound mapping and characteristics of\ + \ the physical activity.},\n address = {Shanghai, China},\n articleno = {14},\n\ + \ author = {Cavdir, Doga and Clarke, Chris and Chiu, Patrick and Denoue, Laurent\ + \ and Kimber, Don},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.eef53755},\n\ + \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/pbvZI80XgEU},\n\ + \ title = {Reactive Video: Movement Sonification for Learning Physical Activity\ + \ with Adaptive Video Playback},\n url = {https://nime.pubpub.org/pub/dzlsifz6},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178875 + doi: 10.21428/92fbeb44.eef53755 issn: 2220-4806 month: June - pages: 251--256 - publisher: 'Goldsmiths, University of London' - title: 'Making the Most of Wi-Fi: Optimisations for Robust Wireless Live Music Performance' - url: http://www.nime.org/proceedings/2014/nime2014_386.pdf - year: 2014 + presentation-video: https://youtu.be/pbvZI80XgEU + title: 'Reactive Video: Movement Sonification for Learning Physical Activity with + Adaptive Video Playback' + url: https://nime.pubpub.org/pub/dzlsifz6 + year: 2021 - ENTRYTYPE: inproceedings - ID: mmainsbridge2014 - abstract: 'This paper explores the challenge of achieving nuanced control and physical - engagement with gestural interfaces in performance. Performances with a prototype - gestural performance system, Gestate, provide the basis for insights into the - application of gestural systems in live contexts. These reflections stem from - a performer''s perspective, outlining the experience of prototyping and performing - with augmented instruments that extend vocal or instrumental technique through - ancillary gestures. Successful implementation of rapidly evolving gestural technologies - in real-time performance calls for new approaches to performing and musicianship, - centred around a growing understanding of the body''s physical and creative potential. - For musicians hoping to incorporate gestural control seamlessly into their performance - practice a balance of technical mastery and kinaesthetic awareness is needed to - adapt existing systems to their own purposes. Within non-tactile systems, visual - feedback mechanisms can support this process by providing explicit visual cues - that compensate for the absence of haptic or tangible feedback. Experience gained - through prototyping and performance can yield a deeper understanding of the broader - nature of gestural control and the way in which performers inhabit their own bodies.' - address: 'London, United Kingdom' - author: Mary Mainsbridge and Kirsty Beilharz - bibtex: "@inproceedings{mmainsbridge2014,\n abstract = {This paper explores the\ - \ challenge of achieving nuanced control and physical engagement with gestural\ - \ interfaces in performance. Performances with a prototype gestural performance\ - \ system, Gestate, provide the basis for insights into the application of gestural\ - \ systems in live contexts. These reflections stem from a performer's perspective,\ - \ outlining the experience of prototyping and performing with augmented instruments\ - \ that extend vocal or instrumental technique through ancillary gestures. Successful\ - \ implementation of rapidly evolving gestural technologies in real-time performance\ - \ calls for new approaches to performing and musicianship, centred around a growing\ - \ understanding of the body's physical and creative potential. For musicians hoping\ - \ to incorporate gestural control seamlessly into their performance practice a\ - \ balance of technical mastery and kinaesthetic awareness is needed to adapt existing\ - \ systems to their own purposes. Within non-tactile systems, visual feedback mechanisms\ - \ can support this process by providing explicit visual cues that compensate for\ - \ the absence of haptic or tangible feedback. Experience gained through prototyping\ - \ and performance can yield a deeper understanding of the broader nature of gestural\ - \ control and the way in which performers inhabit their own bodies.},\n address\ - \ = {London, United Kingdom},\n author = {Mary Mainsbridge and Kirsty Beilharz},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178859},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {110--113},\n publisher = {Goldsmiths, University\ - \ of London},\n title = {Body As Instrument: Performing with Gestural Interfaces},\n\ - \ url = {http://www.nime.org/proceedings/2014/nime2014_393.pdf},\n year = {2014}\n\ - }\n" + ID: NIME21_15 + abstract: 'We present hyper-hybrid flute, a new interface which can be toggled between + its electronic mode and its acoustic mode. In its acoustic mode, the interface + is identical to the regular six-hole recorder. In its electronic mode, the interface + detects the player''s fingering and breath velocity and translates them to MIDI + messages. Specifically, it maps higher breath velocity to higher octaves, with + the modulo remainder controlling the microtonal pitch bend. This novel mapping + reproduces a highly realistic flute-playing experience. Furthermore, changing + the parameters easily augments the interface into a hyperinstrument that allows + the player to control microtones more expressively via breathing techniques.' + address: 'Shanghai, China' + articleno: 15 + author: 'Chin, Daniel and Zhang, Ian and Xia, Gus' + bibtex: "@inproceedings{NIME21_15,\n abstract = {We present hyper-hybrid flute,\ + \ a new interface which can be toggled between its electronic mode and its acoustic\ + \ mode. In its acoustic mode, the interface is identical to the regular six-hole\ + \ recorder. In its electronic mode, the interface detects the player's fingering\ + \ and breath velocity and translates them to MIDI messages. Specifically, it maps\ + \ higher breath velocity to higher octaves, with the modulo remainder controlling\ + \ the microtonal pitch bend. This novel mapping reproduces a highly realistic\ + \ flute-playing experience. Furthermore, changing the parameters easily augments\ + \ the interface into a hyperinstrument that allows the player to control microtones\ + \ more expressively via breathing techniques.},\n address = {Shanghai, China},\n\ + \ articleno = {15},\n author = {Chin, Daniel and Zhang, Ian and Xia, Gus},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.21428/92fbeb44.c09d91be},\n issn = {2220-4806},\n month\ + \ = {June},\n presentation-video = {https://youtu.be/UIqsYK9F4xo},\n title = {Hyper-hybrid\ + \ Flute: Simulating and Augmenting How Breath Affects Octave and Microtone},\n\ + \ url = {https://nime.pubpub.org/pub/eshr},\n year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178859 + doi: 10.21428/92fbeb44.c09d91be issn: 2220-4806 month: June - pages: 110--113 - publisher: 'Goldsmiths, University of London' - title: 'Body As Instrument: Performing with Gestural Interfaces' - url: http://www.nime.org/proceedings/2014/nime2014_393.pdf - year: 2014 + presentation-video: https://youtu.be/UIqsYK9F4xo + title: 'Hyper-hybrid Flute: Simulating and Augmenting How Breath Affects Octave + and Microtone' + url: https://nime.pubpub.org/pub/eshr + year: 2021 - ENTRYTYPE: inproceedings - ID: hportner2014 - abstract: 'The Chimaera is a touch-less, expressive, polyphonic and electronic music - controller based on magnetic field sensing. An array of hall-effect sensors and - their vicinity make up a continuous 2D interaction space. The sensors are excited - with Neodymium magnets worn on fingers. The device continuously tracks position - and vicinity of multiple present magnets along the sensor array to produce event - signals accordingly. Apart from the two positional signals, an event also carries - the magnetic field polarization, a unique identifier and group association. We - like to think of it as a mixed analog/digital offspring of theremin and trautonium. - These general-purpose event signals are transmitted and eventually translated - into musical events according to custom mappings on a host system. With its touch-less - control (no friction), high update rates (2-4kHz), its quasi-continuous spatial - resolution and its low-latency (<1 ms), the Chimaera can react to most subtle - motions instantaneously and allows for a highly dynamic and expressive play. Its - open source design additionally gives the user all possibilities to further tune - hardware and firmware to his or her needs. The Chimaera is network-oriented and - configured with and communicated by OSC (Open Sound Control), which makes it straight-forward - to integrate into any setup.' - address: 'London, United Kingdom' - author: Hanspeter Portner - bibtex: "@inproceedings{hportner2014,\n abstract = {The Chimaera is a touch-less,\ - \ expressive, polyphonic and electronic music controller based on magnetic field\ - \ sensing. An array of hall-effect sensors and their vicinity make up a continuous\ - \ 2D interaction space. The sensors are excited with Neodymium magnets worn on\ - \ fingers. The device continuously tracks position and vicinity of multiple present\ - \ magnets along the sensor array to produce event signals accordingly. Apart from\ - \ the two positional signals, an event also carries the magnetic field polarization,\ - \ a unique identifier and group association. We like to think of it as a mixed\ - \ analog/digital offspring of theremin and trautonium. These general-purpose event\ - \ signals are transmitted and eventually translated into musical events according\ - \ to custom mappings on a host system. With its touch-less control (no friction),\ - \ high update rates (2-4kHz), its quasi-continuous spatial resolution and its\ - \ low-latency (<1 ms), the Chimaera can react to most subtle motions instantaneously\ - \ and allows for a highly dynamic and expressive play. Its open source design\ - \ additionally gives the user all possibilities to further tune hardware and firmware\ - \ to his or her needs. The Chimaera is network-oriented and configured with and\ - \ communicated by OSC (Open Sound Control), which makes it straight-forward to\ - \ integrate into any setup.},\n address = {London, United Kingdom},\n author =\ - \ {Hanspeter Portner},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178909},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {501--504},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {CHIMAERA The Poly-Magneto-Phonic Theremin\ - \ An Expressive Touch-Less Hall-Effect Sensor Array},\n url = {http://www.nime.org/proceedings/2014/nime2014_397.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_16 + abstract: 'This paper examines grid interfaces which are currently used in many + musical devices and instruments. This type of interface concept has been rooted + in the NIME community since the early 2000s. We provide an overview of research + projects and commercial products and conducted an expert interview as well as + an online survey. In summary this work shares: (1) an overview on grid controller + research, (2) a set of three usability issues deduced by a multi method approach, + and (3) an evaluation of user perceptions regarding persistent usability issues + and common reasons for the use of grid interfaces.' + address: 'Shanghai, China' + articleno: 16 + author: 'Rossmy, Beat and Wiethoff, Alexander' + bibtex: "@inproceedings{NIME21_16,\n abstract = {This paper examines grid interfaces\ + \ which are currently used in many musical devices and instruments. This type\ + \ of interface concept has been rooted in the NIME community since the early 2000s.\ + \ We provide an overview of research projects and commercial products and conducted\ + \ an expert interview as well as an online survey. In summary this work shares:\ + \ (1) an overview on grid controller research, (2) a set of three usability issues\ + \ deduced by a multi method approach, and (3) an evaluation of user perceptions\ + \ regarding persistent usability issues and common reasons for the use of grid\ + \ interfaces.},\n address = {Shanghai, China},\n articleno = {16},\n author =\ + \ {Rossmy, Beat and Wiethoff, Alexander},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.6a2451e6},\n\ + \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/GuPIz2boJwA},\n\ + \ title = {Musical Grid Interfaces: Past, Present, and Future Directions},\n url\ + \ = {https://nime.pubpub.org/pub/grid-past-present-future},\n year = {2021}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178909 + doi: 10.21428/92fbeb44.6a2451e6 issn: 2220-4806 month: June - pages: 501--504 - publisher: 'Goldsmiths, University of London' - title: CHIMAERA The Poly-Magneto-Phonic Theremin An Expressive Touch-Less Hall-Effect - Sensor Array - url: http://www.nime.org/proceedings/2014/nime2014_397.pdf - year: 2014 + presentation-video: https://youtu.be/GuPIz2boJwA + title: 'Musical Grid Interfaces: Past, Present, and Future Directions' + url: https://nime.pubpub.org/pub/grid-past-present-future + year: 2021 - ENTRYTYPE: inproceedings - ID: twebster12014 - abstract: 'This paper introduces the OWL stage effects pedal and aims to present - the device within the context of Human Computer Interaction (HCI) research. The - OWL is a dedicated, programmable audio device designed to provide an alternative - to the use of laptop computers for bespoke audio processing on stage for music - performance. By creating a software framework that allows the user to program - their own code for the hardware in C++, the OWL project makes it possible to use - homemade audio processing on stage without the need for a laptop running a computer - music environment such as Pure Data or Supercollider. Moving away from the general-purpose - computer to a dedicated audio device means that some of the potential problems - and technical complexity of performing with a laptop computer onstage can be avoided, - allowing the user to focus more of their attention on the musical performance. - Within the format of a traditional guitar ''stomp box'', the OWL aims to integrate - seamlessly into a guitarist''s existing pedal board setup, and in this way presents - as an example of a ubiquitous and tangible computing device -a programmable computer - designed to fit into an existing mode of musical performance whilst being transparent - in use.' - address: 'London, United Kingdom' - author: Thomas Webster and Guillaume LeNost and Martin Klang - bibtex: "@inproceedings{twebster12014,\n abstract = {This paper introduces the OWL\ - \ stage effects pedal and aims to present the device within the context of Human\ - \ Computer Interaction (HCI) research. The OWL is a dedicated, programmable audio\ - \ device designed to provide an alternative to the use of laptop computers for\ - \ bespoke audio processing on stage for music performance. By creating a software\ - \ framework that allows the user to program their own code for the hardware in\ - \ C++, the OWL project makes it possible to use homemade audio processing on stage\ - \ without the need for a laptop running a computer music environment such as Pure\ - \ Data or Supercollider. Moving away from the general-purpose computer to a dedicated\ - \ audio device means that some of the potential problems and technical complexity\ - \ of performing with a laptop computer onstage can be avoided, allowing the user\ - \ to focus more of their attention on the musical performance. Within the format\ - \ of a traditional guitar 'stomp box', the OWL aims to integrate seamlessly into\ - \ a guitarist's existing pedal board setup, and in this way presents as an example\ - \ of a ubiquitous and tangible computing device -a programmable computer designed\ - \ to fit into an existing mode of musical performance whilst being transparent\ - \ in use.},\n address = {London, United Kingdom},\n author = {Thomas Webster and\ - \ Guillaume LeNost and Martin Klang},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178979},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {621--624},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {The OWL programmable stage effects pedal:\ - \ Revising the concept of the on-stage computer for live music performance},\n\ - \ url = {http://www.nime.org/proceedings/2014/nime2014_399.pdf},\n year = {2014}\n\ - }\n" + ID: NIME21_17 + abstract: 'Musical grid interfaces such as the monome grid have developed into standard + interfaces for musical equipment over the last 15 years. However, the types of + possible interactions more or less remained the same, only expanding grid capabilities + by external IO elements. Therefore, we propose to transfer capacitive touch technology + to grid devices to expand their input capabilities by combining tangible and capacitive-touch + based interaction paradigms. This enables to keep the generic nature of grid interfaces + which is a key feature for many users. In this paper we present the TouchGrid + concept and share our proof-of-concept implementation as well as an expert evaluation + regarding the general concept of touch interaction used on grid devices. TouchGrid + provides swipe and bezel interaction derived from smart phone interfaces to allow + navigation between applications and access to menu systems in a familiar way.' + address: 'Shanghai, China' + articleno: 17 + author: 'Rossmy, Beat and Unger, Sebastian and Wiethoff, Alexander' + bibtex: "@inproceedings{NIME21_17,\n abstract = {Musical grid interfaces such as\ + \ the monome grid have developed into standard interfaces for musical equipment\ + \ over the last 15 years. However, the types of possible interactions more or\ + \ less remained the same, only expanding grid capabilities by external IO elements.\ + \ Therefore, we propose to transfer capacitive touch technology to grid devices\ + \ to expand their input capabilities by combining tangible and capacitive-touch\ + \ based interaction paradigms. This enables to keep the generic nature of grid\ + \ interfaces which is a key feature for many users. In this paper we present the\ + \ TouchGrid concept and share our proof-of-concept implementation as well as an\ + \ expert evaluation regarding the general concept of touch interaction used on\ + \ grid devices. TouchGrid provides swipe and bezel interaction derived from smart\ + \ phone interfaces to allow navigation between applications and access to menu\ + \ systems in a familiar way.},\n address = {Shanghai, China},\n articleno = {17},\n\ + \ author = {Rossmy, Beat and Unger, Sebastian and Wiethoff, Alexander},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.21428/92fbeb44.303223db},\n issn = {2220-4806},\n month\ + \ = {June},\n presentation-video = {https://youtu.be/ti2h_WK5NeU},\n title = {TouchGrid\ + \ – Combining Touch Interaction with Musical Grid Interfaces},\n url = {https://nime.pubpub.org/pub/touchgrid},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178979 + doi: 10.21428/92fbeb44.303223db issn: 2220-4806 month: June - pages: 621--624 - publisher: 'Goldsmiths, University of London' - title: 'The OWL programmable stage effects pedal: Revising the concept of the on-stage - computer for live music performance' - url: http://www.nime.org/proceedings/2014/nime2014_399.pdf - year: 2014 + presentation-video: https://youtu.be/ti2h_WK5NeU + title: TouchGrid – Combining Touch Interaction with Musical Grid Interfaces + url: https://nime.pubpub.org/pub/touchgrid + year: 2021 - ENTRYTYPE: inproceedings - ID: gtorre2014 - abstract: 'In this paper we describe the application of a movement-based design - process for digital musical instruments which led to the development of a prototype - DMI named the Twister. The development is described in two parts. Firstly, we - consider the design of the interface or physical controller. Following this we - describe the development of a specific sonic character, mapping approach and performance. - In both these parts an explicit consideration of the type of movement we would - like the device to engender in performance drove the design choices. By considering - these two parts separately we draw attention to two different levels at which - movement might be considered in the design of DMIs; at a general level of ranges - of movement in the creation of the controller and a more specific, but still quite - open, level in the creation of the final instrument and a particular performance. - In light of the results of this process the limitations of existing representations - of movement within the DMI design discourse is discussed. Further, the utility - of a movement focused design approach is discussed.' - address: 'London, United Kingdom' - author: Nicholas Ward and Giuseppe Torre - bibtex: "@inproceedings{gtorre2014,\n abstract = {In this paper we describe the\ - \ application of a movement-based design process for digital musical instruments\ - \ which led to the development of a prototype DMI named the Twister. The development\ - \ is described in two parts. Firstly, we consider the design of the interface\ - \ or physical controller. Following this we describe the development of a specific\ - \ sonic character, mapping approach and performance. In both these parts an explicit\ - \ consideration of the type of movement we would like the device to engender in\ - \ performance drove the design choices. By considering these two parts separately\ - \ we draw attention to two different levels at which movement might be considered\ - \ in the design of DMIs; at a general level of ranges of movement in the creation\ - \ of the controller and a more specific, but still quite open, level in the creation\ - \ of the final instrument and a particular performance. In light of the results\ - \ of this process the limitations of existing representations of movement within\ - \ the DMI design discourse is discussed. Further, the utility of a movement focused\ - \ design approach is discussed.},\n address = {London, United Kingdom},\n author\ - \ = {Nicholas Ward and Giuseppe Torre},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178977},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {449--454},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Constraining Movement as a Basis for {DMI}\ - \ Design and Performance.},\n url = {http://www.nime.org/proceedings/2014/nime2014_404.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_18 + abstract: 'Composing is a neglected area of music education. To increase participation, + many technologies provide open-ended interfaces to motivate child autodidactic + use, drawing influence from Papert’s LOGO philosophy to support children’s learning + through play. This paper presents a case study examining which interactions with + Codetta, a LOGO-inspired, block-based music platform, supports children’s creativity + in music composition. Interaction logs were collected from 20 children and correlated + against socially-validated creativity scores. To conclude, we recommend that the + transition between low-level edits and high-level processes should be carefully + scaffolded.' + address: 'Shanghai, China' + articleno: 18 + author: 'Ford, Corey and Bryan-Kinns, Nick and Nash, Chris' + bibtex: "@inproceedings{NIME21_18,\n abstract = {Composing is a neglected area of\ + \ music education. To increase participation, many technologies provide open-ended\ + \ interfaces to motivate child autodidactic use, drawing influence from Papert’s\ + \ LOGO philosophy to support children’s learning through play. This paper presents\ + \ a case study examining which interactions with Codetta, a LOGO-inspired, block-based\ + \ music platform, supports children’s creativity in music composition. Interaction\ + \ logs were collected from 20 children and correlated against socially-validated\ + \ creativity scores. To conclude, we recommend that the transition between low-level\ + \ edits and high-level processes should be carefully scaffolded.},\n address =\ + \ {Shanghai, China},\n articleno = {18},\n author = {Ford, Corey and Bryan-Kinns,\ + \ Nick and Nash, Chris},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.e83deee9},\n\ + \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/XpMiDWrxXMU},\n\ + \ title = {Creativity in Children's Digital Music Composition},\n url = {https://nime.pubpub.org/pub/ker5w948},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178977 + doi: 10.21428/92fbeb44.e83deee9 issn: 2220-4806 month: June - pages: 449--454 - publisher: 'Goldsmiths, University of London' - title: Constraining Movement as a Basis for DMI Design and Performance. - url: http://www.nime.org/proceedings/2014/nime2014_404.pdf - year: 2014 + presentation-video: https://youtu.be/XpMiDWrxXMU + title: Creativity in Children's Digital Music Composition + url: https://nime.pubpub.org/pub/ker5w948 + year: 2021 - ENTRYTYPE: inproceedings - ID: mdavies2014 - abstract: 'In this paper we present Improvasher a real-time musical accompaniment - system which creates an automatic mashup to accompany live musical input. Improvasher - is built around two music processing modules, the first, a performance following - technique, makes beat-synchronous predictions of chroma features from a live musical - input. The second, a music mashup system, determines the compatibility between - beat-synchronous chromagrams from different pieces of music. Through the combination - of these two techniques, a real-time time predict mashup can be generated towards - a new form of automatic accompaniment for interactive musical performance.' - address: 'London, United Kingdom' - author: Matthew Davies and Adam Stark and Fabien Gouyon and Masataka Goto - bibtex: "@inproceedings{mdavies2014,\n abstract = {In this paper we present Improvasher\ - \ a real-time musical accompaniment system which creates an automatic mashup to\ - \ accompany live musical input. Improvasher is built around two music processing\ - \ modules, the first, a performance following technique, makes beat-synchronous\ - \ predictions of chroma features from a live musical input. The second, a music\ - \ mashup system, determines the compatibility between beat-synchronous chromagrams\ - \ from different pieces of music. Through the combination of these two techniques,\ - \ a real-time time predict mashup can be generated towards a new form of automatic\ - \ accompaniment for interactive musical performance.},\n address = {London, United\ - \ Kingdom},\n author = {Matthew Davies and Adam Stark and Fabien Gouyon and Masataka\ - \ Goto},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1178744},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {541--544},\n publisher = {Goldsmiths, University\ - \ of London},\n title = {Improvasher: A Real-Time Mashup System for Live Musical\ - \ Input},\n url = {http://www.nime.org/proceedings/2014/nime2014_405.pdf},\n year\ - \ = {2014}\n}\n" + ID: NIME21_19 + abstract: 'Various studies have shown that haptic interfaces could enhance the learning + efficiency in music learning, but most existing studies focus on training motor + skills of instrument playing such as finger motions. In this paper, we present + a wearable haptic device to guide diaphragmatic breathing, which can be used in + vocal training as well as the learning of wind instruments. The device is a wearable + strap vest, consisting of a spinal exoskeleton on the back for inhalation and + an elastic belt around the waist for exhalation. We first conducted case studies + to assess how convenient and comfortable to wear the device, and then evaluate + its effectiveness in guiding rhythm and breath. Results show users'' acceptance + of the haptic interface and the potential of haptic guidance in vocal training.' + address: 'Shanghai, China' + articleno: 19 + author: 'Li, Yinmiao and Piao, Ziyue and Xia, Gus' + bibtex: "@inproceedings{NIME21_19,\n abstract = {Various studies have shown that\ + \ haptic interfaces could enhance the learning efficiency in music learning, but\ + \ most existing studies focus on training motor skills of instrument playing such\ + \ as finger motions. In this paper, we present a wearable haptic device to guide\ + \ diaphragmatic breathing, which can be used in vocal training as well as the\ + \ learning of wind instruments. The device is a wearable strap vest, consisting\ + \ of a spinal exoskeleton on the back for inhalation and an elastic belt around\ + \ the waist for exhalation. We first conducted case studies to assess how convenient\ + \ and comfortable to wear the device, and then evaluate its effectiveness in guiding\ + \ rhythm and breath. Results show users' acceptance of the haptic interface and\ + \ the potential of haptic guidance in vocal training.},\n address = {Shanghai,\ + \ China},\n articleno = {19},\n author = {Li, Yinmiao and Piao, Ziyue and Xia,\ + \ Gus},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.21428/92fbeb44.6d342615},\n issn = {2220-4806},\n\ + \ month = {June},\n presentation-video = {https://youtu.be/-t-u0V-27ng},\n title\ + \ = {A Wearable Haptic Interface for Breath Guidance in Vocal Training},\n url\ + \ = {https://nime.pubpub.org/pub/cgi7t0ta},\n year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178744 + doi: 10.21428/92fbeb44.6d342615 issn: 2220-4806 month: June - pages: 541--544 - publisher: 'Goldsmiths, University of London' - title: 'Improvasher: A Real-Time Mashup System for Live Musical Input' - url: http://www.nime.org/proceedings/2014/nime2014_405.pdf - year: 2014 + presentation-video: https://youtu.be/-t-u0V-27ng + title: A Wearable Haptic Interface for Breath Guidance in Vocal Training + url: https://nime.pubpub.org/pub/cgi7t0ta + year: 2021 - ENTRYTYPE: inproceedings - ID: operrotin2014 - abstract: 'Conceiving digital musical instruments might be challenging in terms - of spectator accessibility. Depending on the interface and the complexity of the - software used as a transition between the controller and sound, a musician performance - can be totally opaque for the audience and loose its interest. This paper examines - the possibility of adding a visual feedback to help the public understanding, - and add expressivity to the performance. It explores the various mapping organizations - between controller and sound, giving different spaces of representation for the - visual feedback. It can be either an amplification of the controller parameters, - or a representation of the related musical parameters. Different examples of visualization - are presented and evaluated, taking the Cantor Digitalis as a support. It appears - the representation of musical parameters, little used compared to the representation - of controllers, received a good opinion from the audience, highlighting the musical - intention of the performers.' - address: 'London, United Kingdom' - author: Olivier Perrotin and Christophe d'Alessandro - bibtex: "@inproceedings{operrotin2014,\n abstract = {Conceiving digital musical\ - \ instruments might be challenging in terms of spectator accessibility. Depending\ - \ on the interface and the complexity of the software used as a transition between\ - \ the controller and sound, a musician performance can be totally opaque for the\ - \ audience and loose its interest. This paper examines the possibility of adding\ - \ a visual feedback to help the public understanding, and add expressivity to\ - \ the performance. It explores the various mapping organizations between controller\ - \ and sound, giving different spaces of representation for the visual feedback.\ - \ It can be either an amplification of the controller parameters, or a representation\ - \ of the related musical parameters. Different examples of visualization are presented\ - \ and evaluated, taking the Cantor Digitalis as a support. It appears the representation\ - \ of musical parameters, little used compared to the representation of controllers,\ - \ received a good opinion from the audience, highlighting the musical intention\ - \ of the performers.},\n address = {London, United Kingdom},\n author = {Olivier\ - \ Perrotin and Christophe d'Alessandro},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178901},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {605--608},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Visualizing Gestures in the Control of a Digital\ - \ Musical Instrument},\n url = {http://www.nime.org/proceedings/2014/nime2014_406.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_2 + abstract: 'Ambient sounds such as breaking waves or rustling leaves are sometimes + used in music recording, composition and performance. However, as these sounds + lack a precise pitch, they can not be used melodically. This work describes Aeolis, + a virtual instrument producing pitched tones from a real-time ambient sound input + using subtractive synthesis. The produced tones retain the identifiable timbres + of the ambient sounds. Tones generated using input sounds from various environments, + such as sea waves, leaves rustle and traffic noise, are analyzed. A configuration + for a live in-situ performance is described, consisting of live streaming the + produced sounds. In this configuration, the environment itself acts as a ‘performer’ + of sorts, alongside the Aeolis player, providing both real-time input signals + and complementary visual cues.' + address: 'Shanghai, China' + articleno: 2 + author: 'Arbel, Lior' + bibtex: "@inproceedings{NIME21_2,\n abstract = {Ambient sounds such as breaking\ + \ waves or rustling leaves are sometimes used in music recording, composition\ + \ and performance. However, as these sounds lack a precise pitch, they can not\ + \ be used melodically. This work describes Aeolis, a virtual instrument producing\ + \ pitched tones from a real-time ambient sound input using subtractive synthesis.\ + \ The produced tones retain the identifiable timbres of the ambient sounds. Tones\ + \ generated using input sounds from various environments, such as sea waves, leaves\ + \ rustle and traffic noise, are analyzed. A configuration for a live in-situ performance\ + \ is described, consisting of live streaming the produced sounds. In this configuration,\ + \ the environment itself acts as a ‘performer’ of sorts, alongside the Aeolis\ + \ player, providing both real-time input signals and complementary visual cues.},\n\ + \ address = {Shanghai, China},\n articleno = {2},\n author = {Arbel, Lior},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.21428/92fbeb44.64f66047},\n issn = {2220-4806},\n\ + \ month = {June},\n presentation-video = {https://youtu.be/C0WEeaYy0tQ},\n title\ + \ = {Aeolis: A Virtual Instrument Producing Pitched Tones With Soundscape Timbres},\n\ + \ url = {https://nime.pubpub.org/pub/c3w33wya},\n year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178901 + doi: 10.21428/92fbeb44.64f66047 issn: 2220-4806 month: June - pages: 605--608 - publisher: 'Goldsmiths, University of London' - title: Visualizing Gestures in the Control of a Digital Musical Instrument - url: http://www.nime.org/proceedings/2014/nime2014_406.pdf - year: 2014 + presentation-video: https://youtu.be/C0WEeaYy0tQ + title: 'Aeolis: A Virtual Instrument Producing Pitched Tones With Soundscape Timbres' + url: https://nime.pubpub.org/pub/c3w33wya + year: 2021 - ENTRYTYPE: inproceedings - ID: ldonovan2014 - abstract: 'This paper presents the Talking Guitar, an electric guitar augmented - with a system which tracks the position of the headstock in real time and uses - that data to control the parameters of a formant-filtering effect which impresses - upon the guitar sound a sense of speech. A user study is conducted with the device - to establish an indication of the practicality of using headstock tracking to - control effect parameters and to suggest natural and useful mapping strategies. - Individual movements and gestures are evaluated in order to guide further development - of the system.' - address: 'London, United Kingdom' - author: Liam Donovan and Andrew McPherson - bibtex: "@inproceedings{ldonovan2014,\n abstract = {This paper presents the Talking\ - \ Guitar, an electric guitar augmented with a system which tracks the position\ - \ of the headstock in real time and uses that data to control the parameters of\ - \ a formant-filtering effect which impresses upon the guitar sound a sense of\ - \ speech. A user study is conducted with the device to establish an indication\ - \ of the practicality of using headstock tracking to control effect parameters\ - \ and to suggest natural and useful mapping strategies. Individual movements and\ - \ gestures are evaluated in order to guide further development of the system.},\n\ - \ address = {London, United Kingdom},\n author = {Liam Donovan and Andrew McPherson},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178752},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {351--354},\n publisher = {Goldsmiths, University\ - \ of London},\n title = {The Talking Guitar: Headstock Tracking and Mapping Strategies},\n\ - \ url = {http://www.nime.org/proceedings/2014/nime2014_407.pdf},\n year = {2014}\n\ - }\n" + ID: NIME21_20 + abstract: 'The development of technologies for acquisition and display gives access + to a large variety of volumetric (3D) textures, either synthetic or obtained through + tomography. They constitute extremely rich data which is usually explored for + informative purposes, in medical or engineering contexts. We believe that this + exploration has a strong potential for musical expression. To that extent, we + propose a design space for the musical exploration of volumetric textures. We + describe the challenges for its implementation in Virtual and Mixed-Reality and + we present a case study with an instrument called the Volume Sequencer which we + analyse using your design space. Finally, we evaluate the impact on expressive + exploration of two dimensions, namely the amount of visual feedback and the selection + variability.' + address: 'Shanghai, China' + articleno: 20 + author: 'Berthaut, Florent' + bibtex: "@inproceedings{NIME21_20,\n abstract = {The development of technologies\ + \ for acquisition and display gives access to a large variety of volumetric (3D)\ + \ textures, either synthetic or obtained through tomography. They constitute extremely\ + \ rich data which is usually explored for informative purposes, in medical or\ + \ engineering contexts. We believe that this exploration has a strong potential\ + \ for musical expression. To that extent, we propose a design space for the musical\ + \ exploration of volumetric textures. We describe the challenges for its implementation\ + \ in Virtual and Mixed-Reality and we present a case study with an instrument\ + \ called the Volume Sequencer which we analyse using your design space. Finally,\ + \ we evaluate the impact on expressive exploration of two dimensions, namely the\ + \ amount of visual feedback and the selection variability.},\n address = {Shanghai,\ + \ China},\n articleno = {20},\n author = {Berthaut, Florent},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.21428/92fbeb44.6607d04f},\n issn = {2220-4806},\n month = {June},\n\ + \ presentation-video = {https://youtu.be/C9EiA3TSUag},\n title = {Musical Exploration\ + \ of Volumetric Textures in Mixed and Virtual Reality},\n url = {https://nime.pubpub.org/pub/sqceyucq},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178752 + doi: 10.21428/92fbeb44.6607d04f issn: 2220-4806 month: June - pages: 351--354 - publisher: 'Goldsmiths, University of London' - title: 'The Talking Guitar: Headstock Tracking and Mapping Strategies' - url: http://www.nime.org/proceedings/2014/nime2014_407.pdf - year: 2014 + presentation-video: https://youtu.be/C9EiA3TSUag + title: Musical Exploration of Volumetric Textures in Mixed and Virtual Reality + url: https://nime.pubpub.org/pub/sqceyucq + year: 2021 - ENTRYTYPE: inproceedings - ID: vzappi2014 - abstract: 'This paper investigates the process of appropriation in digital musical - instrument performance, examining the effect of instrument complexity on the emergence - of personal playing styles. Ten musicians of varying background were given a deliberately - constrained musical instrument, a wooden cube containing a touch/force sensor, - speaker and embedded computer. Each cube was identical in construction, but half - the instruments were configured for two degrees of freedom while the other half - allowed only a single degree. Each musician practiced at home and presented two - performances, in which their techniques and reactions were assessed through video, - sensor data logs, questionnaires and interviews. Results show that the addition - of a second degree of freedom had the counterintuitive effect of reducing the - exploration of the instrument''s affordances; this suggested the presence of a - dominant constraint in one of the two configurations which strongly differentiated - the process of appropriation across the two groups of participants.' - address: 'London, United Kingdom' - author: Victor Zappi and Andrew McPherson - bibtex: "@inproceedings{vzappi2014,\n abstract = {This paper investigates the process\ - \ of appropriation in digital musical instrument performance, examining the effect\ - \ of instrument complexity on the emergence of personal playing styles. Ten musicians\ - \ of varying background were given a deliberately constrained musical instrument,\ - \ a wooden cube containing a touch/force sensor, speaker and embedded computer.\ - \ Each cube was identical in construction, but half the instruments were configured\ - \ for two degrees of freedom while the other half allowed only a single degree.\ - \ Each musician practiced at home and presented two performances, in which their\ - \ techniques and reactions were assessed through video, sensor data logs, questionnaires\ - \ and interviews. Results show that the addition of a second degree of freedom\ - \ had the counterintuitive effect of reducing the exploration of the instrument's\ - \ affordances; this suggested the presence of a dominant constraint in one of\ - \ the two configurations which strongly differentiated the process of appropriation\ - \ across the two groups of participants.},\n address = {London, United Kingdom},\n\ - \ author = {Victor Zappi and Andrew McPherson},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1178993},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {455--460},\n publisher = {Goldsmiths, University of London},\n title = {Dimensionality\ - \ and Appropriation in Digital Musical Instrument Design},\n url = {http://www.nime.org/proceedings/2014/nime2014_409.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_21 + abstract: 'The Girls Electronic Arts Retreat (GEAR) is a STEAM summer camp for ages + 8 - 11. In this paper, we compare and contrast lessons from the first two iterations + of GEAR, including one in-person and one remote session. We introduce our Teaching + Interfaces for Musical Expression (TIME) framework and use our analyses to compose + a list of best practices in TIME development and implementation.' + address: 'Shanghai, China' + articleno: 21 + author: 'Aresty, Abby and Gibson, Rachel' + bibtex: "@inproceedings{NIME21_21,\n abstract = {The Girls Electronic Arts Retreat\ + \ (GEAR) is a STEAM summer camp for ages 8 - 11. In this paper, we compare and\ + \ contrast lessons from the first two iterations of GEAR, including one in-person\ + \ and one remote session. We introduce our Teaching Interfaces for Musical Expression\ + \ (TIME) framework and use our analyses to compose a list of best practices in\ + \ TIME development and implementation.},\n address = {Shanghai, China},\n articleno\ + \ = {21},\n author = {Aresty, Abby and Gibson, Rachel},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.21428/92fbeb44.25757aca},\n issn = {2220-4806},\n month = {June},\n\ + \ presentation-video = {https://youtu.be/8qeFjNGaEHc},\n title = {Changing GEAR:\ + \ The Girls Electronic Arts Retreat's Teaching Interfaces for Musical Expression},\n\ + \ url = {https://nime.pubpub.org/pub/8lop0zj4},\n year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178993 + doi: 10.21428/92fbeb44.25757aca issn: 2220-4806 month: June - pages: 455--460 - publisher: 'Goldsmiths, University of London' - title: Dimensionality and Appropriation in Digital Musical Instrument Design - url: http://www.nime.org/proceedings/2014/nime2014_409.pdf - year: 2014 + presentation-video: https://youtu.be/8qeFjNGaEHc + title: 'Changing GEAR: The Girls Electronic Arts Retreat''s Teaching Interfaces + for Musical Expression' + url: https://nime.pubpub.org/pub/8lop0zj4 + year: 2021 - ENTRYTYPE: inproceedings - ID: mrodrigues2014 - abstract: 'Digital Musical Instruments (DMIs) have difficulties establishing themselves - after their creation. A huge number of DMIs is presented every year and few of - them actually remain in use. Several causes could explain this reality, among - them the lack of a proper instrumental technique, inadequacy of the traditional - musical notation and the non-existence of a repertoire dedicated to the instrument. - In this paper we present Entoa, the first written music for Intonaspacio, a DMI - we designed in our research project. We propose some strategies for mapping data - from sensors to sound processing, in order to accomplish an expressive performance. - Entoa is divided in five different sections that corresponds to five movements. - For each, a different mapping is designed, introducing subtle alterations that - progressively explore the ensemble of features of the instrument. The performer - is then required to adapt his repertoire of gestures along the piece. Indications - are expressed through a gestural notation, where freedom is give to performer - to control certain parameters at specific moments in the music.' - address: 'London, United Kingdom' - author: Clayton Mamedes and Mailis Rodrigues and Marcelo M. Wanderley and Jônatas - Manzolli and Denise H. L. Garcia and Paulo Ferreira-Lopes - bibtex: "@inproceedings{mrodrigues2014,\n abstract = {Digital Musical Instruments\ - \ (DMIs) have difficulties establishing themselves after their creation. A huge\ - \ number of DMIs is presented every year and few of them actually remain in use.\ - \ Several causes could explain this reality, among them the lack of a proper instrumental\ - \ technique, inadequacy of the traditional musical notation and the non-existence\ - \ of a repertoire dedicated to the instrument. In this paper we present Entoa,\ - \ the first written music for Intonaspacio, a DMI we designed in our research\ - \ project. We propose some strategies for mapping data from sensors to sound processing,\ - \ in order to accomplish an expressive performance. Entoa is divided in five different\ - \ sections that corresponds to five movements. For each, a different mapping is\ - \ designed, introducing subtle alterations that progressively explore the ensemble\ - \ of features of the instrument. The performer is then required to adapt his repertoire\ - \ of gestures along the piece. Indications are expressed through a gestural notation,\ - \ where freedom is give to performer to control certain parameters at specific\ - \ moments in the music.},\n address = {London, United Kingdom},\n author = {Clayton\ - \ Mamedes and Mailis Rodrigues and Marcelo M. Wanderley and J{\\^o}natas Manzolli\ - \ and Denise H. L. Garcia and Paulo Ferreira-Lopes},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178861},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {509--512},\n publisher = {Goldsmiths, University of London},\n title = {Composing\ - \ for {DMI}s Entoa, a Dedicate Piece for Intonaspacio},\n url = {http://www.nime.org/proceedings/2014/nime2014_411.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_22 + abstract: 'The praxis of using detailed visual models to illustrate complex ideas + is widely used in the sciences but less so in music theory. Taking the composer’s + notes as a starting point, we have developed a complete interactive 3D model of + Grisey’s Talea (1986). Our model presents a novel approach to music education + and theory by making understanding of complex musical structures accessible to + students and non-musicians, particularly those who struggle with traditional means + of learning or whose mode of learning is predominantly visual. The model builds + on the foundations of 1) the historical associations between visual and musical + arts and those concerning spectralists in particular 2) evidence of recurring + cross-modal associations in the general population and consistent associations + for individual synesthetes. Research into educational uses of the model is a topic + for future exploration.' + address: 'Shanghai, China' + articleno: 22 + author: 'Andersen, Anne Sophie and Kwan, Derek' + bibtex: "@inproceedings{NIME21_22,\n abstract = {The praxis of using detailed visual\ + \ models to illustrate complex ideas is widely used in the sciences but less so\ + \ in music theory. Taking the composer’s notes as a starting point, we have developed\ + \ a complete interactive 3D model of Grisey’s Talea (1986). Our model presents\ + \ a novel approach to music education and theory by making understanding of complex\ + \ musical structures accessible to students and non-musicians, particularly those\ + \ who struggle with traditional means of learning or whose mode of learning is\ + \ predominantly visual. The model builds on the foundations of 1) the historical\ + \ associations between visual and musical arts and those concerning spectralists\ + \ in particular 2) evidence of recurring cross-modal associations in the general\ + \ population and consistent associations for individual synesthetes. Research\ + \ into educational uses of the model is a topic for future exploration.},\n address\ + \ = {Shanghai, China},\n articleno = {22},\n author = {Andersen, Anne Sophie and\ + \ Kwan, Derek},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.27d09832},\n\ + \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/PGYOkFjyrek},\n\ + \ title = {Grisey’s 'Talea': Musical Representation As An Interactive 3D Map},\n\ + \ url = {https://nime.pubpub.org/pub/oiwz8bb7},\n year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178861 + doi: 10.21428/92fbeb44.27d09832 issn: 2220-4806 month: June - pages: 509--512 - publisher: 'Goldsmiths, University of London' - title: 'Composing for DMIs Entoa, a Dedicate Piece for Intonaspacio' - url: http://www.nime.org/proceedings/2014/nime2014_411.pdf - year: 2014 + presentation-video: https://youtu.be/PGYOkFjyrek + title: 'Grisey’s ''Talea'': Musical Representation As An Interactive 3D Map' + url: https://nime.pubpub.org/pub/oiwz8bb7 + year: 2021 - ENTRYTYPE: inproceedings - ID: dmazzanti2014 - abstract: 'Designing a collaborative performance requires the use of paradigms and - technologies which can deeply influence the whole piece experience. In this paper - we define a set of six variables, and use them to describe and evaluate a number - of platforms for participatory performances. Based on this evaluation, the Augmented - Stage is introduced. Such concept describes how Augmented Reality techniques can - be used to superimpose a performance stage with a virtual environment, populated - with interactive elements. The manipulation of these objects allows spectators - to contribute to the visual and sonic outcome of the performance through their - mobile devices, while keeping their freedom to focus on the stage. An interactive - acoustic rock performance based on this concept was staged. Questionnaires distributed - to the audience and performers'' comments have been analyzed, contributing to - an evaluation of the presented concept and platform done through the defined variables.' - address: 'London, United Kingdom' - author: Dario Mazzanti and Victor Zappi and Darwin Caldwell and Andrea Brogni - bibtex: "@inproceedings{dmazzanti2014,\n abstract = {Designing a collaborative performance\ - \ requires the use of paradigms and technologies which can deeply influence the\ - \ whole piece experience. In this paper we define a set of six variables, and\ - \ use them to describe and evaluate a number of platforms for participatory performances.\ - \ Based on this evaluation, the Augmented Stage is introduced. Such concept describes\ - \ how Augmented Reality techniques can be used to superimpose a performance stage\ - \ with a virtual environment, populated with interactive elements. The manipulation\ - \ of these objects allows spectators to contribute to the visual and sonic outcome\ - \ of the performance through their mobile devices, while keeping their freedom\ - \ to focus on the stage. An interactive acoustic rock performance based on this\ - \ concept was staged. Questionnaires distributed to the audience and performers'\ - \ comments have been analyzed, contributing to an evaluation of the presented\ - \ concept and platform done through the defined variables.},\n address = {London,\ - \ United Kingdom},\n author = {Dario Mazzanti and Victor Zappi and Darwin Caldwell\ - \ and Andrea Brogni},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178871},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {29--34},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Augmented Stage for Participatory Performances},\n\ - \ url = {http://www.nime.org/proceedings/2014/nime2014_413.pdf},\n year = {2014}\n\ - }\n" + ID: NIME21_23 + abstract: 'In this paper we discuss the beneficial aspects of incorporating energy-motion + models as a design pattern in musical interface design. These models can be understood + as archetypes of motion trajectories which are commonly applied in the analysis + and composition of acousmatic music. With the aim of exploring a new possible + paradigm for interface design, our research builds on the parallel investigation + of embodied music cognition theory and the praxis of acousmatic music. After having + run a large study for understanding a listener’s spontaneous rendering of form + and movement, we built a number of digital instruments especially designed to + emphasise a particular energy-motion profile. The evaluation through composition + and performance indicates that this design paradigm can foster musical inventiveness + and expression in the processes of composition and performance of gestural electronic + music.' + address: 'Shanghai, China' + articleno: 23 + author: 'Tomás, Enrique and Gorbach, Thomas and Tellioğlu, Hilda and Kaltenbrunner, + Martin' + bibtex: "@inproceedings{NIME21_23,\n abstract = {In this paper we discuss the beneficial\ + \ aspects of incorporating energy-motion models as a design pattern in musical\ + \ interface design. These models can be understood as archetypes of motion trajectories\ + \ which are commonly applied in the analysis and composition of acousmatic music.\ + \ With the aim of exploring a new possible paradigm for interface design, our\ + \ research builds on the parallel investigation of embodied music cognition theory\ + \ and the praxis of acousmatic music. After having run a large study for understanding\ + \ a listener’s spontaneous rendering of form and movement, we built a number of\ + \ digital instruments especially designed to emphasise a particular energy-motion\ + \ profile. The evaluation through composition and performance indicates that this\ + \ design paradigm can foster musical inventiveness and expression in the processes\ + \ of composition and performance of gestural electronic music.},\n address = {Shanghai,\ + \ China},\n articleno = {23},\n author = {Tomás, Enrique and Gorbach, Thomas and\ + \ Tellioğlu, Hilda and Kaltenbrunner, Martin},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.21428/92fbeb44.ce8139a8},\n issn = {2220-4806},\n month = {June},\n presentation-video\ + \ = {https://youtu.be/QDjCEnGYSC4},\n title = {Embodied Gestures: Sculpting Energy-Motion\ + \ Models into Musical Interfaces},\n url = {https://nime.pubpub.org/pub/gsx1wqt5},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178871 + doi: 10.21428/92fbeb44.ce8139a8 issn: 2220-4806 month: June - pages: 29--34 - publisher: 'Goldsmiths, University of London' - title: Augmented Stage for Participatory Performances - url: http://www.nime.org/proceedings/2014/nime2014_413.pdf - year: 2014 + presentation-video: https://youtu.be/QDjCEnGYSC4 + title: 'Embodied Gestures: Sculpting Energy-Motion Models into Musical Interfaces' + url: https://nime.pubpub.org/pub/gsx1wqt5 + year: 2021 - ENTRYTYPE: inproceedings - ID: rtubb2014 - abstract: 'This paper outlines a theoretical framework for creative technology based - on two contrasting processes: divergent exploration and convergent optimisation. - We claim that these two cases require different gesture-to-parameter mapping properties. - Results are presented from a user experiment that motivates this theory. The experiment - was conducted using a publicly available iPad app: ``Sonic Zoom''''. Participants - were encouraged to conduct an open ended exploration of synthesis timbre using - a combination of two different interfaces. The first was a standard interface - with ten sliders, hypothesised to be suited to the ``convergent'''' stage of creation. - The second was a mapping of the entire 10-D combinatorial space to a 2-D surface - using a space filling curve. This novel interface was intended to support the - ``divergent'''' aspect of creativity. The paths of around 250 users through both - 2-D and 10-D space were logged and analysed. Both the interaction data and questionnaire - results show that the different interfaces tended to be used for different aspects - of sound creation, and a combination of these two navigation styles was deemed - to be more useful than either individually. The study indicates that the predictable, - separate parameters found in most music technology are more appropriate for convergent - tasks.' - address: 'London, United Kingdom' - author: Robert Tubb and Simon Dixon - bibtex: "@inproceedings{rtubb2014,\n abstract = {This paper outlines a theoretical\ - \ framework for creative technology based on two contrasting processes: divergent\ - \ exploration and convergent optimisation. We claim that these two cases require\ - \ different gesture-to-parameter mapping properties. Results are presented from\ - \ a user experiment that motivates this theory. The experiment was conducted using\ - \ a publicly available iPad app: ``Sonic Zoom''. Participants were encouraged\ - \ to conduct an open ended exploration of synthesis timbre using a combination\ - \ of two different interfaces. The first was a standard interface with ten sliders,\ - \ hypothesised to be suited to the ``convergent'' stage of creation. The second\ - \ was a mapping of the entire 10-D combinatorial space to a 2-D surface using\ - \ a space filling curve. This novel interface was intended to support the ``divergent''\ - \ aspect of creativity. The paths of around 250 users through both 2-D and 10-D\ - \ space were logged and analysed. Both the interaction data and questionnaire\ - \ results show that the different interfaces tended to be used for different aspects\ - \ of sound creation, and a combination of these two navigation styles was deemed\ - \ to be more useful than either individually. The study indicates that the predictable,\ - \ separate parameters found in most music technology are more appropriate for\ - \ convergent tasks.},\n address = {London, United Kingdom},\n author = {Robert\ - \ Tubb and Simon Dixon},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178967},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {227--232},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {The Divergent Interface: Supporting Creative\ - \ Exploration of Parameter Spaces},\n url = {http://www.nime.org/proceedings/2014/nime2014_415.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_24 + abstract: 'This paper addresses environmental issues around NIME research and practice. + We discuss the formulation of an environmental statement for the conference as + well as the initiation of a NIME Eco Wiki containing information on environmental + concerns related to the creation of new musical instruments. We outline a number + of these concerns and, by systematically reviewing the proceedings of all previous + NIME conferences, identify a general lack of reflection on the environmental impact + of the research undertaken. Finally, we propose a framework for addressing the + making, testing, using, and disposal of NIMEs in the hope that sustainability + may become a central concern to researchers.' + address: 'Shanghai, China' + articleno: 24 + author: 'Masu, Raul and Melbye, Adam Pultz and Sullivan, John and Jensenius, Alexander + Refsum' + bibtex: "@inproceedings{NIME21_24,\n abstract = {This paper addresses environmental\ + \ issues around NIME research and practice. We discuss the formulation of an environmental\ + \ statement for the conference as well as the initiation of a NIME Eco Wiki containing\ + \ information on environmental concerns related to the creation of new musical\ + \ instruments. We outline a number of these concerns and, by systematically reviewing\ + \ the proceedings of all previous NIME conferences, identify a general lack of\ + \ reflection on the environmental impact of the research undertaken. Finally,\ + \ we propose a framework for addressing the making, testing, using, and disposal\ + \ of NIMEs in the hope that sustainability may become a central concern to researchers.},\n\ + \ address = {Shanghai, China},\n articleno = {24},\n author = {Masu, Raul and\ + \ Melbye, Adam Pultz and Sullivan, John and Jensenius, Alexander Refsum},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.21428/92fbeb44.5725ad8f},\n issn = {2220-4806},\n month\ + \ = {June},\n presentation-video = {https://youtu.be/JE6YqYsV5Oo},\n title = {NIME\ + \ and the Environment: Toward a More Sustainable NIME Practice},\n url = {https://nime.pubpub.org/pub/4bbl5lod},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178967 + doi: 10.21428/92fbeb44.5725ad8f issn: 2220-4806 month: June - pages: 227--232 - publisher: 'Goldsmiths, University of London' - title: 'The Divergent Interface: Supporting Creative Exploration of Parameter Spaces' - url: http://www.nime.org/proceedings/2014/nime2014_415.pdf - year: 2014 + presentation-video: https://youtu.be/JE6YqYsV5Oo + title: 'NIME and the Environment: Toward a More Sustainable NIME Practice' + url: https://nime.pubpub.org/pub/4bbl5lod + year: 2021 - ENTRYTYPE: inproceedings - ID: sfavilla2014 - abstract: 'This paper presents new touch-screen collaborative music interaction - for people with dementia. The authors argue that dementia technology has yet to - focus on collaborative multi-user group musical interactions. The project aims - to contribute to dementia care while addressing a significant gap in current literature. - Two trials explore contrasting musical scenarios: the performance of abstract - electronic music and the distributed performance of J.S. Bach''s Goldberg Variations. - Findings presented in this paper; demonstrate that people with dementia can successfully - perform and engage in collaborative music performance activities with little or - no scaffolded instruction. Further findings suggest that people with dementia - can develop and retain musical performance skill over time. This paper proposes - a number of guidelines and design solutions.' - address: 'London, United Kingdom' - author: Stu Favilla and Sonja Pedell - bibtex: "@inproceedings{sfavilla2014,\n abstract = {This paper presents new touch-screen\ - \ collaborative music interaction for people with dementia. The authors argue\ - \ that dementia technology has yet to focus on collaborative multi-user group\ - \ musical interactions. The project aims to contribute to dementia care while\ - \ addressing a significant gap in current literature. Two trials explore contrasting\ - \ musical scenarios: the performance of abstract electronic music and the distributed\ - \ performance of J.S. Bach's Goldberg Variations. Findings presented in this paper;\ - \ demonstrate that people with dementia can successfully perform and engage in\ - \ collaborative music performance activities with little or no scaffolded instruction.\ - \ Further findings suggest that people with dementia can develop and retain musical\ - \ performance skill over time. This paper proposes a number of guidelines and\ - \ design solutions.},\n address = {London, United Kingdom},\n author = {Stu Favilla\ - \ and Sonja Pedell},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178760},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {35--39},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Touch Screen Collaborative Music: Designing\ - \ NIME for Older People with Dementia},\n url = {http://www.nime.org/proceedings/2014/nime2014_417.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_25 + abstract: 'The Global Hyperorgan is an intercontinental, creative space for acoustic + musicking. Existing pipe organs around the world are networked for real-time, + geographically-distant performance, with performers utilizing instruments and + other input devices to collaborate musically through the voices of the pipes in + each location. A pilot study was carried out in January 2021, connecting two large + pipe organs in Piteå, Sweden, and Amsterdam, the Netherlands. A quartet of performers + tested the Global Hyperorgan’s capacities for telematic musicking through a series + of pieces. The concept of modularity is useful when considering the artistic challenges + and possibilities of the Global Hyperorgan. We observe how the modular system + utilized in the pilot study afforded multiple experiences of shared instrumentality + from which new, synthetic voices emerge. As a long-term technological, artistic + and social research project, the Global Hyperorgan offers a platform for exploring + technology, agency, voice, and intersubjectivity in hyper-acoustic telematic musicking.' + address: 'Shanghai, China' + articleno: 25 + author: 'Harlow, Randall and Petersson, Mattias and Ek, Robert and Visi, Federico + and Östersjö, Stefan' + bibtex: "@inproceedings{NIME21_25,\n abstract = {The Global Hyperorgan is an intercontinental,\ + \ creative space for acoustic musicking. Existing pipe organs around the world\ + \ are networked for real-time, geographically-distant performance, with performers\ + \ utilizing instruments and other input devices to collaborate musically through\ + \ the voices of the pipes in each location. A pilot study was carried out in January\ + \ 2021, connecting two large pipe organs in Piteå, Sweden, and Amsterdam, the\ + \ Netherlands. A quartet of performers tested the Global Hyperorgan’s capacities\ + \ for telematic musicking through a series of pieces. The concept of modularity\ + \ is useful when considering the artistic challenges and possibilities of the\ + \ Global Hyperorgan. We observe how the modular system utilized in the pilot study\ + \ afforded multiple experiences of shared instrumentality from which new, synthetic\ + \ voices emerge. As a long-term technological, artistic and social research project,\ + \ the Global Hyperorgan offers a platform for exploring technology, agency, voice,\ + \ and intersubjectivity in hyper-acoustic telematic musicking.},\n address = {Shanghai,\ + \ China},\n articleno = {25},\n author = {Harlow, Randall and Petersson, Mattias\ + \ and Ek, Robert and Visi, Federico and Östersjö, Stefan},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.21428/92fbeb44.d4146b2d},\n issn = {2220-4806},\n month = {June},\n\ + \ presentation-video = {https://youtu.be/t88aIXdqBWQ},\n title = {Global Hyperorgan:\ + \ a platform for telematic musicking and research},\n url = {https://nime.pubpub.org/pub/a626cbqh},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178760 + doi: 10.21428/92fbeb44.d4146b2d issn: 2220-4806 month: June - pages: 35--39 - publisher: 'Goldsmiths, University of London' - title: 'Touch Screen Collaborative Music: Designing NIME for Older People with Dementia' - url: http://www.nime.org/proceedings/2014/nime2014_417.pdf - year: 2014 + presentation-video: https://youtu.be/t88aIXdqBWQ + title: 'Global Hyperorgan: a platform for telematic musicking and research' + url: https://nime.pubpub.org/pub/a626cbqh + year: 2021 - ENTRYTYPE: inproceedings - ID: jeaton2014 - abstract: 'The Space Between Us is a live performance piece for vocals, piano and - live electronics using a Brain-Computer Music Interface system for emotional control - of the score. The system not only aims to reflect emotional states but to direct - and induce emotional states through the real-time generation of the score, highlighting - the potential of direct neural-emotional manipulation in live performance. The - EEG of the vocalist and one audience member is measured throughout the performance - and the system generates a real-time score based on mapping the emotional features - within the EEG. We measure the two emotional descriptors, valence and arousal, - within EEG and map the two-dimensional correlate of averaged windows to musical - phrases. These pre-composed phrases contain associated emotional content based - on the KTH Performance Rules System (Director Musices). The piece is in three - movements, the first two are led by the emotions of each subject respectively, - whilst the third movement interpolates the combined response of the performer - and audience member. The system not only aims to reflect the individuals'' emotional - states but also attempts to induce a shared emotional experience by drawing the - two responses together. This work highlights the potential available in affecting - neural-emotional manipulation within live performance and demonstrates a new approach - to real-time, affectively-driven composition.' - address: 'London, United Kingdom' - author: Joel Eaton and Weiwei Jin and Eduardo Miranda - bibtex: "@inproceedings{jeaton2014,\n abstract = {The Space Between Us is a live\ - \ performance piece for vocals, piano and live electronics using a Brain-Computer\ - \ Music Interface system for emotional control of the score. The system not only\ - \ aims to reflect emotional states but to direct and induce emotional states through\ - \ the real-time generation of the score, highlighting the potential of direct\ - \ neural-emotional manipulation in live performance. The EEG of the vocalist and\ - \ one audience member is measured throughout the performance and the system generates\ - \ a real-time score based on mapping the emotional features within the EEG. We\ - \ measure the two emotional descriptors, valence and arousal, within EEG and map\ - \ the two-dimensional correlate of averaged windows to musical phrases. These\ - \ pre-composed phrases contain associated emotional content based on the KTH Performance\ - \ Rules System (Director Musices). The piece is in three movements, the first\ - \ two are led by the emotions of each subject respectively, whilst the third movement\ - \ interpolates the combined response of the performer and audience member. The\ - \ system not only aims to reflect the individuals' emotional states but also attempts\ - \ to induce a shared emotional experience by drawing the two responses together.\ - \ This work highlights the potential available in affecting neural-emotional manipulation\ - \ within live performance and demonstrates a new approach to real-time, affectively-driven\ - \ composition.},\n address = {London, United Kingdom},\n author = {Joel Eaton\ - \ and Weiwei Jin and Eduardo Miranda},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178756},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {593--596},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {The Space Between Us. A Live Performance with\ - \ Musical Score Generated via Emotional Levels Measured in {EEG} of One Performer\ - \ and an Audience Member},\n url = {http://www.nime.org/proceedings/2014/nime2014_418.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_26 + abstract: 'The nature of digital musical instruments (DMIs), often bespoke artefacts + devised by single or small groups of technologists, requires thought about how + they are shared and archived so that others can replicate or adapt designs. The + ability for replication contributes to an instrument’s longevity and creates opportunities + for both DMI designers and researchers. Research papers often omit necessary knowledge + for replicating research artefacts, but we argue that mitigating this situation + is not just about including design materials and documentation. Our way of approaching + this issue is by drawing on an age-old method as a way of disseminating knowledge, + the apprenticeship. We propose the DMI apprenticeship as a way of exploring the + procedural obstacles of replicating DMIs, while highlighting for both apprentice + and designer the elements of knowledge that are a challenge to communicate in + conventional documentation. Our own engagement with the DMI apprenticeship led + to successfully replicating an instrument, Strummi. Framing this process as an + apprenticeship highlighted the non-obvious areas of the documentation and manufacturing + process that are crucial in the successful replication of a DMI.' + address: 'Shanghai, China' + articleno: 26 + author: 'Zayas-Garin, Luis and Harrison, Jacob and Jack, Robert and McPherson, Andrew' + bibtex: "@inproceedings{NIME21_26,\n abstract = {The nature of digital musical instruments\ + \ (DMIs), often bespoke artefacts devised by single or small groups of technologists,\ + \ requires thought about how they are shared and archived so that others can replicate\ + \ or adapt designs. The ability for replication contributes to an instrument’s\ + \ longevity and creates opportunities for both DMI designers and researchers.\ + \ Research papers often omit necessary knowledge for replicating research artefacts,\ + \ but we argue that mitigating this situation is not just about including design\ + \ materials and documentation. Our way of approaching this issue is by drawing\ + \ on an age-old method as a way of disseminating knowledge, the apprenticeship.\ + \ We propose the DMI apprenticeship as a way of exploring the procedural obstacles\ + \ of replicating DMIs, while highlighting for both apprentice and designer the\ + \ elements of knowledge that are a challenge to communicate in conventional documentation.\ + \ Our own engagement with the DMI apprenticeship led to successfully replicating\ + \ an instrument, Strummi. Framing this process as an apprenticeship highlighted\ + \ the non-obvious areas of the documentation and manufacturing process that are\ + \ crucial in the successful replication of a DMI.},\n address = {Shanghai, China},\n\ + \ articleno = {26},\n author = {Zayas-Garin, Luis and Harrison, Jacob and Jack,\ + \ Robert and McPherson, Andrew},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.87f1d63e},\n\ + \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/zTMaubJjlzA},\n\ + \ title = {DMI Apprenticeship: Sharing and Replicating Musical Artefacts},\n url\ + \ = {https://nime.pubpub.org/pub/dmiapprenticeship},\n year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178756 + doi: 10.21428/92fbeb44.87f1d63e issn: 2220-4806 month: June - pages: 593--596 - publisher: 'Goldsmiths, University of London' - title: The Space Between Us. A Live Performance with Musical Score Generated via - Emotional Levels Measured in EEG of One Performer and an Audience Member - url: http://www.nime.org/proceedings/2014/nime2014_418.pdf - year: 2014 + presentation-video: https://youtu.be/zTMaubJjlzA + title: 'DMI Apprenticeship: Sharing and Replicating Musical Artefacts' + url: https://nime.pubpub.org/pub/dmiapprenticeship + year: 2021 - ENTRYTYPE: inproceedings - ID: jmathew2014 - abstract: 'Recent technological improvements in audio reproduction systems increased - the possibilities to spatialize sources in a listening environment. The spatialization - of reproduced audio is however highly dependent on the recording technique, the - rendering method, and the loudspeaker configuration. While object-based audio - production has proven to reduce the dependency on loudspeaker configurations, - authoring tools are still considered to be difficult to interact with in current - production environments. In this paper, we investigate the issues of spatialization - techniques for object-based audio production and introduce the Spatial Audio Design - Spaces (SpADS) framework, that provides insights into the spatial manipulation - of object-based audio. Based on interviews with professional sound engineers, - this morphological analysis clarifies the relationships between recording and - rendering techniques that define audio-objects for 3D speaker configurations, - allowing the analysis and the design of advanced object-based controllers as well.' - address: 'London, United Kingdom' - author: Justin Mathew and Stéphane Huot and Alan Blum - bibtex: "@inproceedings{jmathew2014,\n abstract = {Recent technological improvements\ - \ in audio reproduction systems increased the possibilities to spatialize sources\ - \ in a listening environment. The spatialization of reproduced audio is however\ - \ highly dependent on the recording technique, the rendering method, and the loudspeaker\ - \ configuration. While object-based audio production has proven to reduce the\ - \ dependency on loudspeaker configurations, authoring tools are still considered\ - \ to be difficult to interact with in current production environments. In this\ - \ paper, we investigate the issues of spatialization techniques for object-based\ - \ audio production and introduce the Spatial Audio Design Spaces (SpADS) framework,\ - \ that provides insights into the spatial manipulation of object-based audio.\ - \ Based on interviews with professional sound engineers, this morphological analysis\ - \ clarifies the relationships between recording and rendering techniques that\ - \ define audio-objects for 3D speaker configurations, allowing the analysis and\ - \ the design of advanced object-based controllers as well.},\n address = {London,\ - \ United Kingdom},\n author = {Justin Mathew and St{\\'e}phane Huot and Alan Blum},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178865},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {415--420},\n publisher = {Goldsmiths, University\ - \ of London},\n title = {A Morphological Analysis of Audio-Objects and their Control\ - \ Methods for {3D} Audio},\n url = {http://www.nime.org/proceedings/2014/nime2014_420.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_27 + abstract: 'This paper presents the soma design process of creating Body Electric: + a novel interface for the capture and use of biofeedback signals and physiological + changes generated in the body by breathing, during singing. This NIME design is + grounded in the performer''s experience of, and relationship to, their body and + their voice. We show that NIME design using principles from soma design can offer + creative opportunities in developing novel sensing mechanisms, which can in turn + inform composition and further elicit curious engagements between performer and + artefact, disrupting notions of performer-led control. As contributions, this + work 1) offers an example of NIME design for situated living, feeling, performing + bodies, and 2) presents the rich potential of soma design as a path for designing + in this context.' + address: 'Shanghai, China' + articleno: 27 + author: 'Cotton, Kelsey and Sanches, Pedro and Tsaknaki, Vasiliki and Karpashevich, + Pavel' + bibtex: "@inproceedings{NIME21_27,\n abstract = {This paper presents the soma design\ + \ process of creating Body Electric: a novel interface for the capture and use\ + \ of biofeedback signals and physiological changes generated in the body by breathing,\ + \ during singing. This NIME design is grounded in the performer's experience of,\ + \ and relationship to, their body and their voice. We show that NIME design using\ + \ principles from soma design can offer creative opportunities in developing novel\ + \ sensing mechanisms, which can in turn inform composition and further elicit\ + \ curious engagements between performer and artefact, disrupting notions of performer-led\ + \ control. As contributions, this work 1) offers an example of NIME design for\ + \ situated living, feeling, performing bodies, and 2) presents the rich potential\ + \ of soma design as a path for designing in this context.},\n address = {Shanghai,\ + \ China},\n articleno = {27},\n author = {Cotton, Kelsey and Sanches, Pedro and\ + \ Tsaknaki, Vasiliki and Karpashevich, Pavel},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.21428/92fbeb44.ec9f8fdd},\n issn = {2220-4806},\n month = {June},\n presentation-video\ + \ = {https://youtu.be/zwzCgG8MXNA},\n title = {The Body Electric: A NIME designed\ + \ through and with the somatic experience of singing},\n url = {https://nime.pubpub.org/pub/ntm5kbux},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178865 + doi: 10.21428/92fbeb44.ec9f8fdd issn: 2220-4806 month: June - pages: 415--420 - publisher: 'Goldsmiths, University of London' - title: A Morphological Analysis of Audio-Objects and their Control Methods for 3D - Audio - url: http://www.nime.org/proceedings/2014/nime2014_420.pdf - year: 2014 + presentation-video: https://youtu.be/zwzCgG8MXNA + title: 'The Body Electric: A NIME designed through and with the somatic experience + of singing' + url: https://nime.pubpub.org/pub/ntm5kbux + year: 2021 - ENTRYTYPE: inproceedings - ID: rcanning2014 - abstract: 'This paper describes the Parallaxis Score System, part of the authors - ongoing research into to the development of technological tools that foster creative - interactions between improvising musicians and predefined instructional texts. - The Parallaxis platform places these texts within a networked, interactive environment - with a generalised set of controls in order to explore and devise ontologies of - network performance. As an interactive tool involved in music production the score - system itself undergoes a functional transformation and becomes a distributed - meta-instrument in its own right, independent from, yet intrinsically connected - to those instruments held by the performers.' - address: 'London, United Kingdom' - author: Rob Canning - bibtex: "@inproceedings{rcanning2014,\n abstract = {This paper describes the Parallaxis\ - \ Score System, part of the authors ongoing research into to the development of\ - \ technological tools that foster creative interactions between improvising musicians\ - \ and predefined instructional texts. The Parallaxis platform places these texts\ - \ within a networked, interactive environment with a generalised set of controls\ - \ in order to explore and devise ontologies of network performance. As an interactive\ - \ tool involved in music production the score system itself undergoes a functional\ - \ transformation and becomes a distributed meta-instrument in its own right, independent\ - \ from, yet intrinsically connected to those instruments held by the performers.},\n\ - \ address = {London, United Kingdom},\n author = {Rob Canning},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178728},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {144--146},\n publisher = {Goldsmiths, University of London},\n title = {Interactive\ - \ Parallax Scrolling Score Interface for Composed Networked Improvisation},\n\ - \ url = {http://www.nime.org/proceedings/2014/nime2014_421.pdf},\n year = {2014}\n\ - }\n" + ID: NIME21_28 + abstract: 'This paper discusses findings from a survey on interfaces for making + electronic music. We invited electronic music makers of varying experience to + reflect on their practice and setup and to imagine and describe their ideal interface + for music-making. We also asked them to reflect on the state of gestural controllers, + machine learning, and artificial intelligence in their practice. We had 118 people + respond to the survey, with 40.68% professional musicians, and 10.17% identifying + as living with a disability or access requirement. Results highlight limitations + of music-making setups as perceived by electronic music makers, reflections on + how imagined novel interfaces could address such limitations, and positive attitudes + towards ML and AI in general.' + address: 'Shanghai, China' + articleno: 28 + author: 'Frid, Emma and Ilsar, Alon' + bibtex: "@inproceedings{NIME21_28,\n abstract = {This paper discusses findings from\ + \ a survey on interfaces for making electronic music. We invited electronic music\ + \ makers of varying experience to reflect on their practice and setup and to imagine\ + \ and describe their ideal interface for music-making. We also asked them to reflect\ + \ on the state of gestural controllers, machine learning, and artificial intelligence\ + \ in their practice. We had 118 people respond to the survey, with 40.68% professional\ + \ musicians, and 10.17% identifying as living with a disability or access requirement.\ + \ Results highlight limitations of music-making setups as perceived by electronic\ + \ music makers, reflections on how imagined novel interfaces could address such\ + \ limitations, and positive attitudes towards ML and AI in general.},\n address\ + \ = {Shanghai, China},\n articleno = {28},\n author = {Frid, Emma and Ilsar, Alon},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.21428/92fbeb44.c37a2370},\n issn = {2220-4806},\n\ + \ month = {June},\n presentation-video = {https://youtu.be/vX8B7fQki_w},\n title\ + \ = {Reimagining (Accessible) Digital Musical Instruments: A Survey on Electronic\ + \ Music-Making Tools},\n url = {https://nime.pubpub.org/pub/reimaginingadmis},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178728 + doi: 10.21428/92fbeb44.c37a2370 issn: 2220-4806 month: June - pages: 144--146 - publisher: 'Goldsmiths, University of London' - title: Interactive Parallax Scrolling Score Interface for Composed Networked Improvisation - url: http://www.nime.org/proceedings/2014/nime2014_421.pdf - year: 2014 + presentation-video: https://youtu.be/vX8B7fQki_w + title: 'Reimagining (Accessible) Digital Musical Instruments: A Survey on Electronic + Music-Making Tools' + url: https://nime.pubpub.org/pub/reimaginingadmis + year: 2021 - ENTRYTYPE: inproceedings - ID: arenaud2014 - abstract: 'This paper provides an overview of a proposed demonstration of 3DinMotion, - a system using real time motion capture of one or several subjects, which can - be used in interactive audiovisual pieces and network performances. The skeleton - of a subject is analyzed in real time and displayed as an abstract avatar as well - as sonified based on mappings and rules to make the interplay experience lively - and rewarding. A series of musical pieces have been composed for the interface - following cueing strategies. In addition a second display, ``the prompter'''' - guides the users through the piece. 3DinMotion has been developed from scratch - and natively, leading to a system with a very low latency, making it suitable - for real time music interactions. In addition, 3DinMotion is fully compatible - with the OpenSoundControl (OSC) protocol, allowing expansion to commonly used - musical and sound design applications.' - address: 'London, United Kingdom' - author: Alain Renaud and Caecilia Charbonnier and Sylvain Chagué - bibtex: "@inproceedings{arenaud2014,\n abstract = {This paper provides an overview\ - \ of a proposed demonstration of 3DinMotion, a system using real time motion capture\ - \ of one or several subjects, which can be used in interactive audiovisual pieces\ - \ and network performances. The skeleton of a subject is analyzed in real time\ - \ and displayed as an abstract avatar as well as sonified based on mappings and\ - \ rules to make the interplay experience lively and rewarding. A series of musical\ - \ pieces have been composed for the interface following cueing strategies. In\ - \ addition a second display, ``the prompter'' guides the users through the piece.\ - \ 3DinMotion has been developed from scratch and natively, leading to a system\ - \ with a very low latency, making it suitable for real time music interactions.\ - \ In addition, 3DinMotion is fully compatible with the OpenSoundControl (OSC)\ - \ protocol, allowing expansion to commonly used musical and sound design applications.},\n\ - \ address = {London, United Kingdom},\n author = {Alain Renaud and Caecilia Charbonnier\ - \ and Sylvain Chagu\\'e},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178915},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {495--496},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {{3D}inMotion A Mocap Based Interface for Real\ - \ Time Visualisation and Sonification of Multi-User Interactions},\n url = {http://www.nime.org/proceedings/2014/nime2014_423.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_29 + abstract: 'The Magnetic Resonator Piano (MRP) is a relatively well-established DMI + which significantly expands the capabilities of the acoustic piano. This paper + presents SoftMRP, a Max/MSP patch designed to emulate the physical MRP and thereby + to allow rehearsal of MRP repertoire and performance techniques using any MIDI + keyboard and expression pedal; it is hoped that the development of such a tool + will encourage even more widespread adoption of the original instrument amongst + composers and performers. This paper explains SoftMRP’s features and limitations, + discussing the challenges of approximating responses which rely upon the MRP’s + continuous sensing of key position, and considering ways in which the development + of the emulation might feed back into the development of the original instrument, + both specifically and more broadly: since it was designed by a composer, based + on his experience of writing for the instrument, it offers the MRP’s designers + an insight into how the instrument is conceptualised and understood by the musicians + who use it.' + address: 'Shanghai, China' + articleno: 29 + author: 'Pitkin, Jonathan' + bibtex: "@inproceedings{NIME21_29,\n abstract = {The Magnetic Resonator Piano (MRP)\ + \ is a relatively well-established DMI which significantly expands the capabilities\ + \ of the acoustic piano. This paper presents SoftMRP, a Max/MSP patch designed\ + \ to emulate the physical MRP and thereby to allow rehearsal of MRP repertoire\ + \ and performance techniques using any MIDI keyboard and expression pedal; it\ + \ is hoped that the development of such a tool will encourage even more widespread\ + \ adoption of the original instrument amongst composers and performers. This paper\ + \ explains SoftMRP’s features and limitations, discussing the challenges of approximating\ + \ responses which rely upon the MRP’s continuous sensing of key position, and\ + \ considering ways in which the development of the emulation might feed back into\ + \ the development of the original instrument, both specifically and more broadly:\ + \ since it was designed by a composer, based on his experience of writing for\ + \ the instrument, it offers the MRP’s designers an insight into how the instrument\ + \ is conceptualised and understood by the musicians who use it.},\n address =\ + \ {Shanghai, China},\n articleno = {29},\n author = {Pitkin, Jonathan},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.21428/92fbeb44.9e7da18f},\n issn = {2220-4806},\n month\ + \ = {June},\n presentation-video = {https://youtu.be/Fw43nHVyGUg},\n title = {SoftMRP:\ + \ a Software Emulation of the Magnetic Resonator Piano},\n url = {https://nime.pubpub.org/pub/m9nhdm0p},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178915 + doi: 10.21428/92fbeb44.9e7da18f issn: 2220-4806 month: June - pages: 495--496 - publisher: 'Goldsmiths, University of London' - title: 3DinMotion A Mocap Based Interface for Real Time Visualisation and Sonification - of Multi-User Interactions - url: http://www.nime.org/proceedings/2014/nime2014_423.pdf - year: 2014 + presentation-video: https://youtu.be/Fw43nHVyGUg + title: 'SoftMRP: a Software Emulation of the Magnetic Resonator Piano' + url: https://nime.pubpub.org/pub/m9nhdm0p + year: 2021 - ENTRYTYPE: inproceedings - ID: tkelkar2014 - abstract: 'We propose a new musical interface, TrAP (TRace-A-Phrase) for generating - phrases of Hindustani Classical Music (HCM). In this system the user traces melodic - phrases on a tablet interface to create phrases in a raga. We begin by analyzing - tracings drawn by 28 participants, and train a classifier to categorize them into - one of four melodic categories from the theory of Hindustani Music. Then we create - a model based on note transitions from the raga grammar for the notes used in - the singable octaves in HCM. Upon being given a new tracing, the system segments - the tracing and computes a final phrase that best approximates the tracing.' - address: 'London, United Kingdom' - author: Udit Roy and Tejaswinee Kelkar and Bipin Indurkhya - bibtex: "@inproceedings{tkelkar2014,\n abstract = {We propose a new musical interface,\ - \ TrAP (TRace-A-Phrase) for generating phrases of Hindustani Classical Music (HCM).\ - \ In this system the user traces melodic phrases on a tablet interface to create\ - \ phrases in a raga. We begin by analyzing tracings drawn by 28 participants,\ - \ and train a classifier to categorize them into one of four melodic categories\ - \ from the theory of Hindustani Music. Then we create a model based on note transitions\ - \ from the raga grammar for the notes used in the singable octaves in HCM. Upon\ - \ being given a new tracing, the system segments the tracing and computes a final\ - \ phrase that best approximates the tracing.},\n address = {London, United Kingdom},\n\ - \ author = {Udit Roy and Tejaswinee Kelkar and Bipin Indurkhya},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178923},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {243--246},\n publisher = {Goldsmiths, University of London},\n title = {TrAP:\ - \ An Interactive System to Generate Valid Raga Phrases from Sound-Tracings},\n\ - \ url = {http://www.nime.org/proceedings/2014/nime2014_424.pdf},\n year = {2014}\n\ - }\n" + ID: NIME21_3 + abstract: 'This paper presents the design and preliminary evaluation of an Accessible + Digital Musical Instrument (ADMI) in the form of a tangible wooden step sequencer + that uses photoresistors and wooden blocks to trigger musical events. Furthermore, + the paper presents a short overview of design criteria for ADMIs based on literature + and first insights of an ongoing qualitative interview study with German Special + Educational Needs (SEN) teachers conducted by the first author. The preliminary + evaluation is realized by a reflection on the mentioned criteria. The instrument + was designed as a starting point for a participatory design process in music education + settings. The software is programmed in Pure Data and running on a Raspberry Pi + computer that fits inside the body of the instrument. While most similar developments + focus on professional performance and complex interactions, LoopBlocks focuses + on accessibility and Special Educational Needs settings. The main goal is to reduce + the cognitive load needed to play music by providing a clear and constrained interaction, + thus reducing intellectual and technical barriers to active music making.' + address: 'Shanghai, China' + articleno: 3 + author: 'Förster, Andreas and Komesker, Mathias' + bibtex: "@inproceedings{NIME21_3,\n abstract = {This paper presents the design and\ + \ preliminary evaluation of an Accessible Digital Musical Instrument (ADMI) in\ + \ the form of a tangible wooden step sequencer that uses photoresistors and wooden\ + \ blocks to trigger musical events. Furthermore, the paper presents a short overview\ + \ of design criteria for ADMIs based on literature and first insights of an ongoing\ + \ qualitative interview study with German Special Educational Needs (SEN) teachers\ + \ conducted by the first author. The preliminary evaluation is realized by a reflection\ + \ on the mentioned criteria. The instrument was designed as a starting point for\ + \ a participatory design process in music education settings. The software is\ + \ programmed in Pure Data and running on a Raspberry Pi computer that fits inside\ + \ the body of the instrument. While most similar developments focus on professional\ + \ performance and complex interactions, LoopBlocks focuses on accessibility and\ + \ Special Educational Needs settings. The main goal is to reduce the cognitive\ + \ load needed to play music by providing a clear and constrained interaction,\ + \ thus reducing intellectual and technical barriers to active music making.},\n\ + \ address = {Shanghai, China},\n articleno = {3},\n author = {Förster, Andreas\ + \ and Komesker, Mathias},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.f45e1caf},\n\ + \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/u5o0gmB3MX8},\n\ + \ title = {LoopBlocks: Design and Preliminary Evaluation of an Accessible Tangible\ + \ Musical Step Sequencer},\n url = {https://nime.pubpub.org/pub/bj2w1gdx},\n year\ + \ = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178923 + doi: 10.21428/92fbeb44.f45e1caf issn: 2220-4806 month: June - pages: 243--246 - publisher: 'Goldsmiths, University of London' - title: 'TrAP: An Interactive System to Generate Valid Raga Phrases from Sound-Tracings' - url: http://www.nime.org/proceedings/2014/nime2014_424.pdf - year: 2014 + presentation-video: https://youtu.be/u5o0gmB3MX8 + title: 'LoopBlocks: Design and Preliminary Evaluation of an Accessible Tangible + Musical Step Sequencer' + url: https://nime.pubpub.org/pub/bj2w1gdx + year: 2021 - ENTRYTYPE: inproceedings - ID: ncollins2014 - abstract: 'The algorave movement has received reasonable international exposure - in the last two years, including a series of concerts in Europe and beyond, and - press coverage in a number of media. This paper seeks to illuminate some of the - historical precedents to the scene, its primary aesthetic goals, and the divergent - technological and musical approaches of representative participants. We keep in - mind the novel possibilities in musical expression explored by algoravers. The - scene is by no means homogeneous, and the very lack of uniformity of technique, - from new live coding languages through code DJing to plug-in combination, with - or without visual extension, is indicative of the flexibility of computers themselves - as general information processors.' - address: 'London, United Kingdom' - author: Nick Collins and Alex McLean - bibtex: "@inproceedings{ncollins2014,\n abstract = {The algorave movement has received\ - \ reasonable international exposure in the last two years, including a series\ - \ of concerts in Europe and beyond, and press coverage in a number of media. This\ - \ paper seeks to illuminate some of the historical precedents to the scene, its\ - \ primary aesthetic goals, and the divergent technological and musical approaches\ - \ of representative participants. We keep in mind the novel possibilities in musical\ - \ expression explored by algoravers. The scene is by no means homogeneous, and\ - \ the very lack of uniformity of technique, from new live coding languages through\ - \ code DJing to plug-in combination, with or without visual extension, is indicative\ - \ of the flexibility of computers themselves as general information processors.},\n\ - \ address = {London, United Kingdom},\n author = {Nick Collins and Alex McLean},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178734},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {355--358},\n publisher = {Goldsmiths, University\ - \ of London},\n title = {Algorave: Live Performance of Algorithmic Electronic\ - \ Dance Music},\n url = {http://www.nime.org/proceedings/2014/nime2014_426.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_30 + abstract: 'The NIME community has proposed a variety of interfaces that connect + making music and education. This paper reviews current literature, proposes a + method for developing educational NIMEs, and reflects on a way to manifest computational + thinking through music computing. A case study is presented and discussed in which + a programmable mechatronics educational NIME and a virtual simulation of the NIME + offered as a web application were developed.' + address: 'Shanghai, China' + articleno: 30 + author: 'Tsoukalas, Kyriakos and Bukvic, Ivica' + bibtex: "@inproceedings{NIME21_30,\n abstract = {The NIME community has proposed\ + \ a variety of interfaces that connect making music and education. This paper\ + \ reviews current literature, proposes a method for developing educational NIMEs,\ + \ and reflects on a way to manifest computational thinking through music computing.\ + \ A case study is presented and discussed in which a programmable mechatronics\ + \ educational NIME and a virtual simulation of the NIME offered as a web application\ + \ were developed.},\n address = {Shanghai, China},\n articleno = {30},\n author\ + \ = {Tsoukalas, Kyriakos and Bukvic, Ivica},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.21428/92fbeb44.1eeb3ada},\n issn = {2220-4806},\n month = {June},\n presentation-video\ + \ = {https://youtu.be/pdsfZX_kJBo},\n title = {Music Computing and Computational\ + \ Thinking: A Case Study},\n url = {https://nime.pubpub.org/pub/t94aq9rf},\n year\ + \ = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178734 + doi: 10.21428/92fbeb44.1eeb3ada issn: 2220-4806 month: June - pages: 355--358 - publisher: 'Goldsmiths, University of London' - title: 'Algorave: Live Performance of Algorithmic Electronic Dance Music' - url: http://www.nime.org/proceedings/2014/nime2014_426.pdf - year: 2014 + presentation-video: https://youtu.be/pdsfZX_kJBo + title: 'Music Computing and Computational Thinking: A Case Study' + url: https://nime.pubpub.org/pub/t94aq9rf + year: 2021 - ENTRYTYPE: inproceedings - ID: jbowers12014 - abstract: 'In this paper we describe an artistic response to a collection of natural - history museum artefacts, developed as part of a residency organised around a - public participatory workshop. Drawing on a critical literature in studies of - material culture, the work incorporated data sonification, image audification, - field recordings and created a number of instruments for exploring geological - artefacts and meterological data as aesthetic material. The residency culminated - in an exhibition presented as a ''sensorium'' for the sensory exploration of museum - objects. In describing the methods and thinking behind the project this paper - presents an alternative approach to engaging artists and audiences with local - heritage and museum archives, which draws on research in NIME and allied literatures, - and which is devoted to enlivening collections as occasions for varied interpretation, - appropriation and aesthetic response.' - address: 'London, United Kingdom' - author: John Bowers and Tim Shaw - bibtex: "@inproceedings{jbowers12014,\n abstract = {In this paper we describe an\ - \ artistic response to a collection of natural history museum artefacts, developed\ - \ as part of a residency organised around a public participatory workshop. Drawing\ - \ on a critical literature in studies of material culture, the work incorporated\ - \ data sonification, image audification, field recordings and created a number\ - \ of instruments for exploring geological artefacts and meterological data as\ - \ aesthetic material. The residency culminated in an exhibition presented as a\ - \ 'sensorium' for the sensory exploration of museum objects. In describing the\ - \ methods and thinking behind the project this paper presents an alternative approach\ - \ to engaging artists and audiences with local heritage and museum archives, which\ - \ draws on research in NIME and allied literatures, and which is devoted to enlivening\ - \ collections as occasions for varied interpretation, appropriation and aesthetic\ - \ response.},\n address = {London, United Kingdom},\n author = {John Bowers and\ - \ Tim Shaw},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178720},\n issn\ - \ = {2220-4806},\n month = {June},\n pages = {175--178},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Reappropriating Museum Collections: Performing\ - \ Geology Specimens and Meterology Data as New Instruments for Musical Expression},\n\ - \ url = {http://www.nime.org/proceedings/2014/nime2014_429.pdf},\n year = {2014}\n\ - }\n" + ID: NIME21_31 + abstract: 'We present new results combining data from a previously published study + of the mapping design process and a new replication of the same method with a + group of participants having different background expertise. Our thematic analysis + of participants'' interview responses reveal some design criteria common to both + groups of participants: mappings must manage the balance of control between the + instrument and the player, and they should be easy to understand for the player + and audience. We also consider several criteria that distinguish the two groups'' + evaluation strategies. We conclude with important discussion of the mapping designer''s + perspective, performance with gestural controllers, and the difficulties of evaluating + mapping designs and musical instruments in general.' + address: 'Shanghai, China' + articleno: 31 + author: 'West, Travis and Caramiaux, Baptiste and Huot, Stéphane and Wanderley, + Marcelo M.' + bibtex: "@inproceedings{NIME21_31,\n abstract = {We present new results combining\ + \ data from a previously published study of the mapping design process and a new\ + \ replication of the same method with a group of participants having different\ + \ background expertise. Our thematic analysis of participants' interview responses\ + \ reveal some design criteria common to both groups of participants: mappings\ + \ must manage the balance of control between the instrument and the player, and\ + \ they should be easy to understand for the player and audience. We also consider\ + \ several criteria that distinguish the two groups' evaluation strategies. We\ + \ conclude with important discussion of the mapping designer's perspective, performance\ + \ with gestural controllers, and the difficulties of evaluating mapping designs\ + \ and musical instruments in general.},\n address = {Shanghai, China},\n articleno\ + \ = {31},\n author = {West, Travis and Caramiaux, Baptiste and Huot, Stéphane\ + \ and Wanderley, Marcelo M.},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.04f0fc35},\n\ + \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/3hM531E_vlg},\n\ + \ title = {Making Mappings: Design Criteria for Live Performance},\n url = {https://nime.pubpub.org/pub/f1ueovwv},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178720 + doi: 10.21428/92fbeb44.04f0fc35 issn: 2220-4806 month: June - pages: 175--178 - publisher: 'Goldsmiths, University of London' - title: 'Reappropriating Museum Collections: Performing Geology Specimens and Meterology - Data as New Instruments for Musical Expression' - url: http://www.nime.org/proceedings/2014/nime2014_429.pdf - year: 2014 + presentation-video: https://youtu.be/3hM531E_vlg + title: 'Making Mappings: Design Criteria for Live Performance' + url: https://nime.pubpub.org/pub/f1ueovwv + year: 2021 - ENTRYTYPE: inproceedings - ID: ahadjakos12014 - abstract: 'Physical computing platforms such as the Arduino have significantly simplified - developing physical musical interfaces. However, those platforms typically target - everyday programmers rather than composers and media artists. On the other hand, - tangible user interface (TUI) toolkits, which provide an integrated, easy-to-use - solution have not gained momentum in modern music creation. We propose a concept - that hybridizes physical computing and TUI toolkit approaches. This helps to tackle - typical TUI toolkit weaknesses, namely quick sensor obsolescence and limited choices. - We developed a physical realization based on the idea of "universal pins", which - can be configured to perform a variety of duties, making it possible to connect - different sensor breakouts and modules. We evaluated our prototype by making performance - measurements and conducting a user study demonstrating the feasibility of our - approach.' - address: 'London, United Kingdom' - author: Aristotelis Hadjakos and Simon Waloschek - bibtex: "@inproceedings{ahadjakos12014,\n abstract = {Physical computing platforms\ - \ such as the Arduino have significantly simplified developing physical musical\ - \ interfaces. However, those platforms typically target everyday programmers rather\ - \ than composers and media artists. On the other hand, tangible user interface\ - \ (TUI) toolkits, which provide an integrated, easy-to-use solution have not gained\ - \ momentum in modern music creation. We propose a concept that hybridizes physical\ - \ computing and TUI toolkit approaches. This helps to tackle typical TUI toolkit\ - \ weaknesses, namely quick sensor obsolescence and limited choices. We developed\ - \ a physical realization based on the idea of \"universal pins\", which can be\ - \ configured to perform a variety of duties, making it possible to connect different\ - \ sensor breakouts and modules. We evaluated our prototype by making performance\ - \ measurements and conducting a user study demonstrating the feasibility of our\ - \ approach.},\n address = {London, United Kingdom},\n author = {Aristotelis Hadjakos\ - \ and Simon Waloschek},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178782},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {625--628},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {SPINE: A TUI Toolkit and Physical Computing\ - \ Hybrid},\n url = {http://www.nime.org/proceedings/2014/nime2014_430.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_32 + abstract: 'What is the relationship between a musician-designer''s auditory imagery + for a musical piece, a design idea for an augmented instrument to support the + realisation of that piece, and the aspiration to introduce the resulting instrument + to a community of like-minded performers? We explore this NIME topic in the context + of building the first iteration of an augmented acoustic guitar prototype for + percussive fingerstyle guitarists. The first author, himself a percussive fingerstyle + player, started the project of an augmented guitar with expectations and assumptions + made around his own playing style, and in particular around the arrangement of + one song. This input was complemented by the outcome of an interview study, in + which percussive guitarists highlighted functional and creative requirements to + suit their needs. We ran a pilot study to assess the resulting prototype, involving + two other players. We present their feedback on two configurations of the prototype, + one equalising the signal of surface sensors and the other based on sample triggering. + The equalisation-based setting was better received, however both participants + provided useful suggestions to improve the sample-triggering model following their + own auditory imagery.' + address: 'Shanghai, China' + articleno: 32 + author: 'Martelloni, Andrea and McPherson, Andrew and Barthet, Mathieu' + bibtex: "@inproceedings{NIME21_32,\n abstract = {What is the relationship between\ + \ a musician-designer's auditory imagery for a musical piece, a design idea for\ + \ an augmented instrument to support the realisation of that piece, and the aspiration\ + \ to introduce the resulting instrument to a community of like-minded performers?\ + \ We explore this NIME topic in the context of building the first iteration of\ + \ an augmented acoustic guitar prototype for percussive fingerstyle guitarists.\ + \ The first author, himself a percussive fingerstyle player, started the project\ + \ of an augmented guitar with expectations and assumptions made around his own\ + \ playing style, and in particular around the arrangement of one song. This input\ + \ was complemented by the outcome of an interview study, in which percussive guitarists\ + \ highlighted functional and creative requirements to suit their needs. We ran\ + \ a pilot study to assess the resulting prototype, involving two other players.\ + \ We present their feedback on two configurations of the prototype, one equalising\ + \ the signal of surface sensors and the other based on sample triggering. The\ + \ equalisation-based setting was better received, however both participants provided\ + \ useful suggestions to improve the sample-triggering model following their own\ + \ auditory imagery.},\n address = {Shanghai, China},\n articleno = {32},\n author\ + \ = {Martelloni, Andrea and McPherson, Andrew and Barthet, Mathieu},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.21428/92fbeb44.2f6db6e6},\n issn = {2220-4806},\n month\ + \ = {June},\n presentation-video = {https://youtu.be/qeX6dUrJURY},\n title = {Guitar\ + \ augmentation for Percussive Fingerstyle: Combining self-reflexive practice and\ + \ user-centred design},\n url = {https://nime.pubpub.org/pub/zgj85mzv},\n year\ + \ = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178782 + doi: 10.21428/92fbeb44.2f6db6e6 issn: 2220-4806 month: June - pages: 625--628 - publisher: 'Goldsmiths, University of London' - title: 'SPINE: A TUI Toolkit and Physical Computing Hybrid' - url: http://www.nime.org/proceedings/2014/nime2014_430.pdf - year: 2014 + presentation-video: https://youtu.be/qeX6dUrJURY + title: 'Guitar augmentation for Percussive Fingerstyle: Combining self-reflexive + practice and user-centred design' + url: https://nime.pubpub.org/pub/zgj85mzv + year: 2021 - ENTRYTYPE: inproceedings - ID: ogreen2014 - abstract: 'To engage with questions of musicality is to invite into consideration - a complex network of topics beyond the mechanics of soundful interaction with - our interfaces. Drawing on the work of Born, I sketch an outline of the reach - of these topics. I suggest that practice-led methods, by dint of focussing on - the lived experience where many of these topics converge, may be able to serve - as a useful methodological `glue'' for NIME by helping stimulate useful agonistic - discussion on our objects of study, and map the untidy contours of contemporary - practices. I contextualise this discussion by presenting two recently developed - improvisation systems and drawing from these some starting suggestions for how - attention to the grain of lived practice could usefully contribute to considerations - for designers in terms of the pursuit of musicality and the care required in considering - performances in evaluation.' - address: 'London, United Kingdom' - author: Owen Green - bibtex: "@inproceedings{ogreen2014,\n abstract = {To engage with questions of musicality\ - \ is to invite into consideration a complex network of topics beyond the mechanics\ - \ of soundful interaction with our interfaces. Drawing on the work of Born, I\ - \ sketch an outline of the reach of these topics. I suggest that practice-led\ - \ methods, by dint of focussing on the lived experience where many of these topics\ - \ converge, may be able to serve as a useful methodological `glue' for NIME by\ - \ helping stimulate useful agonistic discussion on our objects of study, and map\ - \ the untidy contours of contemporary practices. I contextualise this discussion\ - \ by presenting two recently developed improvisation systems and drawing from\ - \ these some starting suggestions for how attention to the grain of lived practice\ - \ could usefully contribute to considerations for designers in terms of the pursuit\ - \ of musicality and the care required in considering performances in evaluation.},\n\ - \ address = {London, United Kingdom},\n author = {Owen Green},\n booktitle = {Proceedings\ + ID: NIME21_33 + abstract: 'Recent applications of Transformer neural networks in the field of music + have demonstrated their ability to effectively capture and emulate long-term dependencies + characteristic of human notions of musicality and creative merit. We propose a + novel approach to automated symbolic rhythm generation, where a Transformer-XL + model trained on the Magenta Groove MIDI Dataset is used for the tasks of sequence + generation and continuation. Hundreds of generations are evaluated using blind-listening + tests to determine the extent to which the aspects of rhythm we understand to + be valuable are learnt and reproduced. Our model is able to achieve a standard + of rhythmic production comparable to human playing across arbitrarily long time + periods and multiple playing styles.' + address: 'Shanghai, China' + articleno: 33 + author: 'Nuttall, Thomas and Haki, Behzad and Jorda, Sergi' + bibtex: "@inproceedings{NIME21_33,\n abstract = {Recent applications of Transformer\ + \ neural networks in the field of music have demonstrated their ability to effectively\ + \ capture and emulate long-term dependencies characteristic of human notions of\ + \ musicality and creative merit. We propose a novel approach to automated symbolic\ + \ rhythm generation, where a Transformer-XL model trained on the Magenta Groove\ + \ MIDI Dataset is used for the tasks of sequence generation and continuation.\ + \ Hundreds of generations are evaluated using blind-listening tests to determine\ + \ the extent to which the aspects of rhythm we understand to be valuable are learnt\ + \ and reproduced. Our model is able to achieve a standard of rhythmic production\ + \ comparable to human playing across arbitrarily long time periods and multiple\ + \ playing styles.},\n address = {Shanghai, China},\n articleno = {33},\n author\ + \ = {Nuttall, Thomas and Haki, Behzad and Jorda, Sergi},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178776},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {1--6},\n publisher = {Goldsmiths, University of London},\n title = {NIME,\ - \ Musicality and Practice-led Methods},\n url = {http://www.nime.org/proceedings/2014/nime2014_434.pdf},\n\ - \ year = {2014}\n}\n" + \ doi = {10.21428/92fbeb44.fe9a0d82},\n issn = {2220-4806},\n month = {June},\n\ + \ presentation-video = {https://youtu.be/Ul9s8qSMUgU},\n title = {Transformer\ + \ Neural Networks for Automated Rhythm Generation},\n url = {https://nime.pubpub.org/pub/8947fhly},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178776 + doi: 10.21428/92fbeb44.fe9a0d82 issn: 2220-4806 month: June - pages: 1--6 - publisher: 'Goldsmiths, University of London' - title: 'NIME, Musicality and Practice-led Methods' - url: http://www.nime.org/proceedings/2014/nime2014_434.pdf - year: 2014 + presentation-video: https://youtu.be/Ul9s8qSMUgU + title: Transformer Neural Networks for Automated Rhythm Generation + url: https://nime.pubpub.org/pub/8947fhly + year: 2021 - ENTRYTYPE: inproceedings - ID: jsokolovskis2014 - abstract: 'This paper presents a method for locating the position of a strike on - an acoustic drumhead. Near-field optical sensors were installed underneath the - drumhead of a commercially available snare drum. By implementing time difference - of arrival (TDOA) algorithm accuracy within 2cm was achieved in approximating - the location of strikes. The system can be used for drum performance analysis, - timbre analysis and can form a basis for an augmented drum performance system.' - address: 'London, United Kingdom' - author: Janis Sokolovskis and Andrew McPherson - bibtex: "@inproceedings{jsokolovskis2014,\n abstract = {This paper presents a method\ - \ for locating the position of a strike on an acoustic drumhead. Near-field optical\ - \ sensors were installed underneath the drumhead of a commercially available snare\ - \ drum. By implementing time difference of arrival (TDOA) algorithm accuracy within\ - \ 2cm was achieved in approximating the location of strikes. The system can be\ - \ used for drum performance analysis, timbre analysis and can form a basis for\ - \ an augmented drum performance system.},\n address = {London, United Kingdom},\n\ - \ author = {Janis Sokolovskis and Andrew McPherson},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178943},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {70--73},\n publisher = {Goldsmiths, University of London},\n title = {Optical\ - \ Measurement of Acoustic Drum Strike Locations},\n url = {http://www.nime.org/proceedings/2014/nime2014_436.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_34 + abstract: 'This paper provides a study of a workshop which invited composers, musicians, + and sound designers to explore instruments from the history of electronic sound + in Sweden. The workshop participants applied media archaeology methods towards + analyzing one particular instrument from the past, the Dataton System 3000. They + then applied design fiction methods towards imagining several speculative instruments + of the future. Each stage of the workshop revealed very specific utopian ideas + surrounding the design of sound instruments. After introducing the background + and methods of the workshop, the authors present an overview and thematic analysis + of the workshop''s outcomes. The paper concludes with some reflections on the + use of this method-in-progress for investigating the ethics and affordances of + historical electronic sound instruments. It also suggests the significance of + ethics and affordances for the design of contemporary instruments.' + address: 'Shanghai, China' + articleno: 34 + author: 'Holzer, Derek and Frisk, Henrik and Holzapfel, Andre' + bibtex: "@inproceedings{NIME21_34,\n abstract = {This paper provides a study of\ + \ a workshop which invited composers, musicians, and sound designers to explore\ + \ instruments from the history of electronic sound in Sweden. The workshop participants\ + \ applied media archaeology methods towards analyzing one particular instrument\ + \ from the past, the Dataton System 3000. They then applied design fiction methods\ + \ towards imagining several speculative instruments of the future. Each stage\ + \ of the workshop revealed very specific utopian ideas surrounding the design\ + \ of sound instruments. After introducing the background and methods of the workshop,\ + \ the authors present an overview and thematic analysis of the workshop's outcomes.\ + \ The paper concludes with some reflections on the use of this method-in-progress\ + \ for investigating the ethics and affordances of historical electronic sound\ + \ instruments. It also suggests the significance of ethics and affordances for\ + \ the design of contemporary instruments.},\n address = {Shanghai, China},\n articleno\ + \ = {34},\n author = {Holzer, Derek and Frisk, Henrik and Holzapfel, Andre},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.21428/92fbeb44.2723647f},\n issn = {2220-4806},\n\ + \ month = {June},\n presentation-video = {https://youtu.be/qBapYX7IOHA},\n title\ + \ = {Sounds of Futures Passed: Media Archaeology and Design Fiction as NIME Methodologies},\n\ + \ url = {https://nime.pubpub.org/pub/200fpd5a},\n year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178943 + doi: 10.21428/92fbeb44.2723647f issn: 2220-4806 month: June - pages: 70--73 - publisher: 'Goldsmiths, University of London' - title: Optical Measurement of Acoustic Drum Strike Locations - url: http://www.nime.org/proceedings/2014/nime2014_436.pdf - year: 2014 + presentation-video: https://youtu.be/qBapYX7IOHA + title: 'Sounds of Futures Passed: Media Archaeology and Design Fiction as NIME Methodologies' + url: https://nime.pubpub.org/pub/200fpd5a + year: 2021 - ENTRYTYPE: inproceedings - ID: fmorreale2014 - abstract: 'This paper presents MINUET, a framework for musical interface design - grounded in the experience of the player. MINUET aims to provide new perspectives - on the design of musical interfaces, referred to as a general term that comprises - digital musical instruments and interactive installations. The ultimate purpose - is to reduce the complexity of the design space emphasizing the experience of - the player. MINUET is structured as a design process consisting of two stages: - goal and specifications. The reliability of MINUET is tested through a systematic - comparison with the related work and through a case study. To this end, we present - the design and prototyping of Hexagon, a new musical interface with learning purposes.' - address: 'London, United Kingdom' - author: Fabio Morreale and Antonella De Angeli and Sile O'Modhrain - bibtex: "@inproceedings{fmorreale2014,\n abstract = {This paper presents MINUET,\ - \ a framework for musical interface design grounded in the experience of the player.\ - \ MINUET aims to provide new perspectives on the design of musical interfaces,\ - \ referred to as a general term that comprises digital musical instruments and\ - \ interactive installations. The ultimate purpose is to reduce the complexity\ - \ of the design space emphasizing the experience of the player. MINUET is structured\ - \ as a design process consisting of two stages: goal and specifications. The reliability\ - \ of MINUET is tested through a systematic comparison with the related work and\ - \ through a case study. To this end, we present the design and prototyping of\ - \ Hexagon, a new musical interface with learning purposes.},\n address = {London,\ - \ United Kingdom},\n author = {Fabio Morreale and Antonella De Angeli and Sile\ - \ O'Modhrain},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178879},\n issn\ - \ = {2220-4806},\n month = {June},\n pages = {467--472},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Musical Interface Design: An Experience-oriented\ - \ Framework},\n url = {http://www.nime.org/proceedings/2014/nime2014_437.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_35 + abstract: 'Audio and haptic sensations have previously been linked in the development + of NIMEs and in other domains like human-computer interaction. Most efforts to + work with these modalities together tend to either treat haptics as secondary + to audio, or conversely, audio as secondary to haptics, and design sensations + in each modality separately. In this paper, we investigate the possibility of + designing audio and vibrotactile effects simultaneously by interpolating audio-haptic + control spaces. An inverse radial basis function method is used to dynamically + create a mapping from a two-dimensional space to a many-dimensional control space + for multimodal effects based on user-specified control points. Two proofs of concept + were developed focusing on modifying the same structure across modalities and + parallel structures.' + address: 'Shanghai, China' + articleno: 35 + author: 'Regimbal, Juliette and Wanderley, Marcelo M.' + bibtex: "@inproceedings{NIME21_35,\n abstract = {Audio and haptic sensations have\ + \ previously been linked in the development of NIMEs and in other domains like\ + \ human-computer interaction. Most efforts to work with these modalities together\ + \ tend to either treat haptics as secondary to audio, or conversely, audio as\ + \ secondary to haptics, and design sensations in each modality separately. In\ + \ this paper, we investigate the possibility of designing audio and vibrotactile\ + \ effects simultaneously by interpolating audio-haptic control spaces. An inverse\ + \ radial basis function method is used to dynamically create a mapping from a\ + \ two-dimensional space to a many-dimensional control space for multimodal effects\ + \ based on user-specified control points. Two proofs of concept were developed\ + \ focusing on modifying the same structure across modalities and parallel structures.},\n\ + \ address = {Shanghai, China},\n articleno = {35},\n author = {Regimbal, Juliette\ + \ and Wanderley, Marcelo M.},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.1084cb07},\n\ + \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/eH3mn1Ad5BE},\n\ + \ title = {Interpolating Audio and Haptic Control Spaces},\n url = {https://nime.pubpub.org/pub/zd2z1evu},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178879 + doi: 10.21428/92fbeb44.1084cb07 issn: 2220-4806 month: June - pages: 467--472 - publisher: 'Goldsmiths, University of London' - title: 'Musical Interface Design: An Experience-oriented Framework' - url: http://www.nime.org/proceedings/2014/nime2014_437.pdf - year: 2014 + presentation-video: https://youtu.be/eH3mn1Ad5BE + title: Interpolating Audio and Haptic Control Spaces + url: https://nime.pubpub.org/pub/zd2z1evu + year: 2021 - ENTRYTYPE: inproceedings - ID: jbowers2014 - abstract: 'This paper outlines a concept of hybrid resonant assemblages, combinations - of varied materials excited by sound transducers, feeding back to themselves via - digital signal processing. We ground our concept as an extension of work by David - Tudor, Nicolas Collins and Bowers and Archer [NIME 2005] and draw on a variety - of critical perspectives in the social sciences and philosophy to explore such - assemblages as an alternative to more familiar ideas of instruments and interfaces. - We lay out a conceptual framework for the exploration of hybrid resonant assemblages - and describe how we have approached implementing them. Our performance experience - is presented and implications for work are discussed. In the light of our work, - we urge a reconsideration of the implicit norms of performance which underlie - much research in NIME. In particular, drawing on the philosophical work of Jean-Luc - Nancy, we commend a wider notion of touch that also recognises the performative - value of withholding contact.' - address: 'London, United Kingdom' - author: John Bowers and Annika Haas - bibtex: "@inproceedings{jbowers2014,\n abstract = {This paper outlines a concept\ - \ of hybrid resonant assemblages, combinations of varied materials excited by\ - \ sound transducers, feeding back to themselves via digital signal processing.\ - \ We ground our concept as an extension of work by David Tudor, Nicolas Collins\ - \ and Bowers and Archer [NIME 2005] and draw on a variety of critical perspectives\ - \ in the social sciences and philosophy to explore such assemblages as an alternative\ - \ to more familiar ideas of instruments and interfaces. We lay out a conceptual\ - \ framework for the exploration of hybrid resonant assemblages and describe how\ - \ we have approached implementing them. Our performance experience is presented\ - \ and implications for work are discussed. In the light of our work, we urge a\ - \ reconsideration of the implicit norms of performance which underlie much research\ - \ in NIME. In particular, drawing on the philosophical work of Jean-Luc Nancy,\ - \ we commend a wider notion of touch that also recognises the performative value\ - \ of withholding contact.},\n address = {London, United Kingdom},\n author = {John\ - \ Bowers and Annika Haas},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178718},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {7--12},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Hybrid Resonant Assemblages: Rethinking Instruments,\ - \ Touch and Performance in New Interfaces for Musical Expression},\n url = {http://www.nime.org/proceedings/2014/nime2014_438.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_36 + abstract: 'Algorithmic Power Ballads is a performance for Saxophone and autonomous + improvisor, with an optional third performer who can use the web interface to + hand-write note sequences, and adjust synthesis parameters. The performance system + explores shifting power dynamics between acoustic, algorithmic and autonomous + performers through modifying the amount of control and agency they have over the + sound over the duration of the performance. A higher-level algorithm how strongly + the machine listening algorithms, which analyse the saxophone input, influence + the rhythmic and melodic patterns generated by the system. The autonomous improvisor + is trained on power ballad melodies prior to the performance and in lieu of influence + from the saxophonist and live coder strays towards melodic phrases from this musical + style. The piece is written in javascript and WebAudio API and uses MMLL a browser-based + machine listening library.' + address: 'Shanghai, China' + articleno: 36 + author: 'Knotts, Shelly' + bibtex: "@inproceedings{NIME21_36,\n abstract = {Algorithmic Power Ballads is a\ + \ performance for Saxophone and autonomous improvisor, with an optional third\ + \ performer who can use the web interface to hand-write note sequences, and adjust\ + \ synthesis parameters. The performance system explores shifting power dynamics\ + \ between acoustic, algorithmic and autonomous performers through modifying the\ + \ amount of control and agency they have over the sound over the duration of the\ + \ performance. A higher-level algorithm how strongly the machine listening algorithms,\ + \ which analyse the saxophone input, influence the rhythmic and melodic patterns\ + \ generated by the system. The autonomous improvisor is trained on power ballad\ + \ melodies prior to the performance and in lieu of influence from the saxophonist\ + \ and live coder strays towards melodic phrases from this musical style. The piece\ + \ is written in javascript and WebAudio API and uses MMLL a browser-based machine\ + \ listening library.},\n address = {Shanghai, China},\n articleno = {36},\n author\ + \ = {Knotts, Shelly},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.548cca2b},\n\ + \ issn = {2220-4806},\n month = {June},\n title = {Algorithmic Power Ballads},\n\ + \ url = {https://nime.pubpub.org/pub/w2ubqkv4},\n year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178718 + doi: 10.21428/92fbeb44.548cca2b issn: 2220-4806 month: June - pages: 7--12 - publisher: 'Goldsmiths, University of London' - title: 'Hybrid Resonant Assemblages: Rethinking Instruments, Touch and Performance - in New Interfaces for Musical Expression' - url: http://www.nime.org/proceedings/2014/nime2014_438.pdf - year: 2014 + title: Algorithmic Power Ballads + url: https://nime.pubpub.org/pub/w2ubqkv4 + year: 2021 - ENTRYTYPE: inproceedings - ID: cgeiger2014 - abstract: 'We present a system that allows users to experience singing without singing - using gesture-based interaction techniques. We designed a set of body-related - interaction and multi-modal feedback techniques and developed a singing voice - synthesizer system that is controlled by the user''s mouth shapes and arm gestures. - Based on the adaption of a number of digital media-related techniques such as - face and body tracking, 3D rendering, singing voice synthesis and physical computing, - we developed a media installation that allows users to perform an aria without - real singing and provide the look and feel from a 20th century performance of - an opera singer. We evaluated this system preliminarily with users.' - address: 'London, United Kingdom' - author: Cornelius Pöpel and Jochen Feitsch and Marco Strobel and Christian Geiger - bibtex: "@inproceedings{cgeiger2014,\n abstract = {We present a system that allows\ - \ users to experience singing without singing using gesture-based interaction\ - \ techniques. We designed a set of body-related interaction and multi-modal feedback\ - \ techniques and developed a singing voice synthesizer system that is controlled\ - \ by the user's mouth shapes and arm gestures. Based on the adaption of a number\ - \ of digital media-related techniques such as face and body tracking, 3D rendering,\ - \ singing voice synthesis and physical computing, we developed a media installation\ - \ that allows users to perform an aria without real singing and provide the look\ - \ and feel from a 20th century performance of an opera singer. We evaluated this\ - \ system preliminarily with users.},\n address = {London, United Kingdom},\n author\ - \ = {Cornelius Pöpel and Jochen Feitsch and Marco Strobel and Christian Geiger},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178905},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {359--362},\n publisher = {Goldsmiths, University\ - \ of London},\n title = {Design and Evaluation of a Gesture Controlled Singing\ - \ Voice Installation},\n url = {http://www.nime.org/proceedings/2014/nime2014_439.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_37 + abstract: 'In this paper, Entangled, a multi-modal instrument in virtual 3D space + with sound, graphics, and the smartphone-based gestural interface for multi-user + is introduced. Within the same network, the players can use their smartphone as + the controller by entering a specific URL into their smartphone’s browser. After + joining the network, by actuating the smartphone''s accelerometer, the players + apply gravitational force to a swarm of particles in the virtual space. Machine + learning-based gesture pattern recognition is parallelly used to increase the + functionality of the gestural command. Through this interface, the player can + achieve intuitive control of gravitation in virtual reality (VR) space. The gravitation + becomes the medium of the system involving physics, graphics, and sonification + which composes a multimodal compositional language with cross-modal correspondence. Entangled is + built on AlloLib, which is a cross-platform suite of C++ components for building + interactive multimedia tools and applications. Throughout the script, the reason + for each decision is elaborated arguing the importance of crossmodal correspondence + in the design procedure.' + address: 'Shanghai, China' + articleno: 37 + author: 'Lee, Myungin' + bibtex: "@inproceedings{NIME21_37,\n abstract = {In this paper, Entangled, a multi-modal\ + \ instrument in virtual 3D space with sound, graphics, and the smartphone-based\ + \ gestural interface for multi-user is introduced. Within the same network, the\ + \ players can use their smartphone as the controller by entering a specific URL\ + \ into their smartphone’s browser. After joining the network, by actuating the\ + \ smartphone's accelerometer, the players apply gravitational force to a swarm\ + \ of particles in the virtual space. Machine learning-based gesture pattern recognition\ + \ is parallelly used to increase the functionality of the gestural command. Through\ + \ this interface, the player can achieve intuitive control of gravitation in virtual\ + \ reality (VR) space. The gravitation becomes the medium of the system involving\ + \ physics, graphics, and sonification which composes a multimodal compositional\ + \ language with cross-modal correspondence. Entangled is built on AlloLib, which\ + \ is a cross-platform suite of C++ components for building interactive multimedia\ + \ tools and applications. Throughout the script, the reason for each decision\ + \ is elaborated arguing the importance of crossmodal correspondence in the design\ + \ procedure.},\n address = {Shanghai, China},\n articleno = {37},\n author = {Lee,\ + \ Myungin},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.eae7c23f},\n\ + \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/NjpXFYDvuZw},\n\ + \ title = {Entangled: A Multi-Modal, Multi-User Interactive Instrument in Virtual\ + \ 3D Space Using the Smartphone for Gesture Control},\n url = {https://nime.pubpub.org/pub/4gt8wiy0},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178905 + doi: 10.21428/92fbeb44.eae7c23f issn: 2220-4806 month: June - pages: 359--362 - publisher: 'Goldsmiths, University of London' - title: Design and Evaluation of a Gesture Controlled Singing Voice Installation - url: http://www.nime.org/proceedings/2014/nime2014_439.pdf - year: 2014 + presentation-video: https://youtu.be/NjpXFYDvuZw + title: 'Entangled: A Multi-Modal, Multi-User Interactive Instrument in Virtual 3D + Space Using the Smartphone for Gesture Control' + url: https://nime.pubpub.org/pub/4gt8wiy0 + year: 2021 - ENTRYTYPE: inproceedings - ID: dwilliams2014 - abstract: 'This paper presents an implementation of a near real-time timbre morphing - signal processing system, designed to facilitate an element of `liveness'' and - unpredictability in a musical installation. The timbre morpher is a hybrid analysis - and synthesis technique based on Spectral Modeling Synthesis (an additive and - subtractive modeling technique). The musical installation forms an interactive - soundtrack in response to the series of Rosso Luana marble sculptures Shapes in - the Clouds, I, II, III, IV & V by artist Peter Randall-Page, exhibited at the - Peninsula Arts Gallery in Devon, UK, from 1 February to 29 March 2014. The timbre - morphing system is used to transform live input captured at each sculpture with - a discrete microphone array, by morphing towards noisy source signals that have - been associated with each sculpture as part of a pre-determined musical structure. - The resulting morphed audio is then fed-back to the gallery via a five-channel - speaker array. Visitors are encouraged to walk freely through the installation - and interact with the sound world, creating unique audio morphs based on their - own movements, voices, and incidental sounds.' - address: 'London, United Kingdom' - author: Duncan Williams and Peter Randall-Page and Eduardo Miranda - bibtex: "@inproceedings{dwilliams2014,\n abstract = {This paper presents an implementation\ - \ of a near real-time timbre morphing signal processing system, designed to facilitate\ - \ an element of `liveness' and unpredictability in a musical installation. The\ - \ timbre morpher is a hybrid analysis and synthesis technique based on Spectral\ - \ Modeling Synthesis (an additive and subtractive modeling technique). The musical\ - \ installation forms an interactive soundtrack in response to the series of Rosso\ - \ Luana marble sculptures Shapes in the Clouds, I, II, III, IV & V by artist Peter\ - \ Randall-Page, exhibited at the Peninsula Arts Gallery in Devon, UK, from 1 February\ - \ to 29 March 2014. The timbre morphing system is used to transform live input\ - \ captured at each sculpture with a discrete microphone array, by morphing towards\ - \ noisy source signals that have been associated with each sculpture as part of\ - \ a pre-determined musical structure. The resulting morphed audio is then fed-back\ - \ to the gallery via a five-channel speaker array. Visitors are encouraged to\ - \ walk freely through the installation and interact with the sound world, creating\ - \ unique audio morphs based on their own movements, voices, and incidental sounds.},\n\ - \ address = {London, United Kingdom},\n author = {Duncan Williams and Peter Randall-Page\ - \ and Eduardo Miranda},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178983},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {435--438},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Timbre morphing: near real-time hybrid synthesis\ - \ in a musical installation},\n url = {http://www.nime.org/proceedings/2014/nime2014_440.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_38 + abstract: 'We present Spire Muse, a co-creative musical agent that engages in different + kinds of interactive behaviors. The software utilizes corpora of solo instrumental + performances encoded as self-organized maps and outputs slices of the corpora + as concatenated, remodeled audio sequences. Transitions between behaviors can + be automated, and the interface enables the negotiation of these transitions through + feedback buttons that signal approval, force reversions to previous behaviors, + or request change. Musical responses are embedded in a pre-trained latent space, + emergent in the interaction, and influenced through the weighting of rhythmic, + spectral, harmonic, and melodic features. The training and run-time modules utilize + a modified version of the MASOM agent architecture. Our model stimulates spontaneous + creativity and reduces the need for the user to sustain analytical mind frames, + thereby optimizing flow. The agent traverses a system autonomy axis ranging from + reactive to proactive, which includes the behaviors of shadowing, mirroring, and + coupling. A fourth behavior—negotiation—is emergent from the interface between + agent and user. The synergy of corpora, interactive modes, and influences induces + musical responses along a musical similarity axis from converging to diverging. + We share preliminary observations from experiments with the agent and discuss + design challenges and future prospects.' + address: 'Shanghai, China' + articleno: 38 + author: 'Thelle, Notto J. W. and Pasquier, Philippe' + bibtex: "@inproceedings{NIME21_38,\n abstract = {We present Spire Muse, a co-creative\ + \ musical agent that engages in different kinds of interactive behaviors. The\ + \ software utilizes corpora of solo instrumental performances encoded as self-organized\ + \ maps and outputs slices of the corpora as concatenated, remodeled audio sequences.\ + \ Transitions between behaviors can be automated, and the interface enables the\ + \ negotiation of these transitions through feedback buttons that signal approval,\ + \ force reversions to previous behaviors, or request change. Musical responses\ + \ are embedded in a pre-trained latent space, emergent in the interaction, and\ + \ influenced through the weighting of rhythmic, spectral, harmonic, and melodic\ + \ features. The training and run-time modules utilize a modified version of the\ + \ MASOM agent architecture. Our model stimulates spontaneous creativity and reduces\ + \ the need for the user to sustain analytical mind frames, thereby optimizing\ + \ flow. The agent traverses a system autonomy axis ranging from reactive to proactive,\ + \ which includes the behaviors of shadowing, mirroring, and coupling. A fourth\ + \ behavior—negotiation—is emergent from the interface between agent and user.\ + \ The synergy of corpora, interactive modes, and influences induces musical responses\ + \ along a musical similarity axis from converging to diverging. We share preliminary\ + \ observations from experiments with the agent and discuss design challenges and\ + \ future prospects.},\n address = {Shanghai, China},\n articleno = {38},\n author\ + \ = {Thelle, Notto J. W. and Pasquier, Philippe},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.21428/92fbeb44.84c0b364},\n issn = {2220-4806},\n month = {June},\n presentation-video\ + \ = {https://youtu.be/4QMQNyoGfOs},\n title = {Spire Muse: A Virtual Musical Partner\ + \ for Creative Brainstorming},\n url = {https://nime.pubpub.org/pub/wcj8sjee},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178983 + doi: 10.21428/92fbeb44.84c0b364 issn: 2220-4806 month: June - pages: 435--438 - publisher: 'Goldsmiths, University of London' - title: 'Timbre morphing: near real-time hybrid synthesis in a musical installation' - url: http://www.nime.org/proceedings/2014/nime2014_440.pdf - year: 2014 + presentation-video: https://youtu.be/4QMQNyoGfOs + title: 'Spire Muse: A Virtual Musical Partner for Creative Brainstorming' + url: https://nime.pubpub.org/pub/wcj8sjee + year: 2021 - ENTRYTYPE: inproceedings - ID: pvandertorren12014 - abstract: 'The Striso is a new expressive music instrument with an acoustic feel, - which is designed to be intuitive to play and playable everywhere. The sound of - every note can be precisely controlled using the direction and pressure sensitive - buttons, combined with instrument motion like tilting or shaking. It works standalone, - with an internal speaker and battery, and is meant as a self contained instrument - with its own distinct sound, but can also be connected to a computer to control - other synthesizers. The notes are arranged in an easy and systematic way, according - to the new DCompose note layout that is also presented in this paper. The DCompose - note layout is designed to be compact, ergonomic, easy to learn, and closely bound - to the harmonic properties of the notes.' - address: 'London, United Kingdom' - author: Piers Titus van der Torren - bibtex: "@inproceedings{pvandertorren12014,\n abstract = {The Striso is a new expressive\ - \ music instrument with an acoustic feel, which is designed to be intuitive to\ - \ play and playable everywhere. The sound of every note can be precisely controlled\ - \ using the direction and pressure sensitive buttons, combined with instrument\ - \ motion like tilting or shaking. It works standalone, with an internal speaker\ - \ and battery, and is meant as a self contained instrument with its own distinct\ - \ sound, but can also be connected to a computer to control other synthesizers.\ - \ The notes are arranged in an easy and systematic way, according to the new DCompose\ - \ note layout that is also presented in this paper. The DCompose note layout is\ - \ designed to be compact, ergonomic, easy to learn, and closely bound to the harmonic\ - \ properties of the notes.},\n address = {London, United Kingdom},\n author =\ - \ {Piers Titus van der Torren},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178957},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {615--620},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Striso, a Compact Expressive Instrument Based\ - \ on a New Isomorphic Note Layout},\n url = {http://www.nime.org/proceedings/2014/nime2014_442.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_39 + abstract: 'This paper introduces a new Electrumpet control system that affords for + quick and easy access to all its electro-acoustic features. The new implementation + uses virtuosic gestures learned on the acoustic trumpet for quick electronic control, + showing its effectiveness by controlling an innovative interactive harmoniser. + Seamless transition from the smooth but rigid, often uncommunicative sound of + the harmoniser to a more noisy, open and chaotic sound world required the addition + of extra features and scenarios. This prepares the instrument for multiple musical + environments, including free improvised settings with large sonic diversity. The + system should particularly interest virtuoso improvising electroacoustic musicians + and hyperinstrument player/developers that combine many musical styles in their + art and that look for inspiration to use existing virtuosity for electronic control.' + address: 'Shanghai, China' + articleno: 39 + author: 'Leeuw, Hans' + bibtex: "@inproceedings{NIME21_39,\n abstract = {This paper introduces a new Electrumpet\ + \ control system that affords for quick and easy access to all its electro-acoustic\ + \ features. The new implementation uses virtuosic gestures learned on the acoustic\ + \ trumpet for quick electronic control, showing its effectiveness by controlling\ + \ an innovative interactive harmoniser. Seamless transition from the smooth but\ + \ rigid, often uncommunicative sound of the harmoniser to a more noisy, open and\ + \ chaotic sound world required the addition of extra features and scenarios. This\ + \ prepares the instrument for multiple musical environments, including free improvised\ + \ settings with large sonic diversity. The system should particularly interest\ + \ virtuoso improvising electroacoustic musicians and hyperinstrument player/developers\ + \ that combine many musical styles in their art and that look for inspiration\ + \ to use existing virtuosity for electronic control.},\n address = {Shanghai,\ + \ China},\n articleno = {39},\n author = {Leeuw, Hans},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.21428/92fbeb44.a8e0cceb},\n issn = {2220-4806},\n month = {June},\n\ + \ presentation-video = {https://youtu.be/oHM_WfHOGUo},\n title = {Virtuoso mapping\ + \ for the Electrumpet, a hyperinstrument strategy},\n url = {https://nime.pubpub.org/pub/fxe52ym6},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178957 + doi: 10.21428/92fbeb44.a8e0cceb issn: 2220-4806 month: June - pages: 615--620 - publisher: 'Goldsmiths, University of London' - title: 'Striso, a Compact Expressive Instrument Based on a New Isomorphic Note Layout' - url: http://www.nime.org/proceedings/2014/nime2014_442.pdf - year: 2014 + presentation-video: https://youtu.be/oHM_WfHOGUo + title: 'Virtuoso mapping for the Electrumpet, a hyperinstrument strategy' + url: https://nime.pubpub.org/pub/fxe52ym6 + year: 2021 - ENTRYTYPE: inproceedings - ID: mbugge2014 - abstract: 'Reunion2012 is a work for electronically modified chessboard, chess players - and electronic instruments. The work is based on---but also departs from---John - Cage''s Reunion, which premiered at the Sightsoundsystems Festival, Toronto, 1968. - In the original performance, Cage and Marcel Duchamp played chess on an electronic - board constructed by Lowell Cross. The board `conducted'' various electronic sound - sources played by Cross, Gordon Mumma, David Tudor, and David Behrman, using photoresistors - fitted under the squares [1]. Reunion2012, on the other hand, utilises magnet - sensors via an Arduino. Like in Cage''s Variations V, this resulted in a musical - situation where the improvising musicians had full control over their own sound, - but no control regarding when their sound may be heard. In addition to a concert - version, this paper also describes an interactive installation based on the same - hardware.' - address: 'London, United Kingdom' - author: Anders Tveit and Hans Wilmers and Notto Thelle and Magnus Bugge and Thom - Johansen and Eskil Muan Sæther - bibtex: "@inproceedings{mbugge2014,\n abstract = {Reunion2012 is a work for electronically\ - \ modified chessboard, chess players and electronic instruments. The work is based\ - \ on---but also departs from---John Cage's Reunion, which premiered at the Sightsoundsystems\ - \ Festival, Toronto, 1968. In the original performance, Cage and Marcel Duchamp\ - \ played chess on an electronic board constructed by Lowell Cross. The board `conducted'\ - \ various electronic sound sources played by Cross, Gordon Mumma, David Tudor,\ - \ and David Behrman, using photoresistors fitted under the squares [1]. Reunion2012,\ - \ on the other hand, utilises magnet sensors via an Arduino. Like in Cage's Variations\ - \ V, this resulted in a musical situation where the improvising musicians had\ - \ full control over their own sound, but no control regarding when their sound\ - \ may be heard. In addition to a concert version, this paper also describes an\ - \ interactive installation based on the same hardware.},\n address = {London,\ - \ United Kingdom},\n author = {Anders Tveit and Hans Wilmers and Notto Thelle\ - \ and Magnus Bugge and Thom Johansen and Eskil Muan S{\\ae}ther},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1178969},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {561--564},\n publisher = {Goldsmiths, University of London},\n\ - \ title = {{Reunion}2012: A Novel Interface for Sound Producing Actions Through\ - \ the Game of Chess},\n url = {http://www.nime.org/proceedings/2014/nime2014_443.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_4 + abstract: 'In this paper, we discuss the importance of replicability in Digital + Musical Instrument (DMI) design and the NIME community. Replication enables us + to: create new artifacts based on existing ones, experiment DMIs in different + contexts and cultures, and validate obtained results from evaluations. We investigate + how the papers present artifact documentation and source code by analyzing the + NIME proceedings from 2018, 2019, and 2020. We argue that the presence and the + quality of documentation are good indicators of replicability and can be beneficial + for the NIME community. Finally, we discuss the importance of documentation for + replication, propose a call to action towards more replicable projects, and present + a practical guide informing future steps toward replicability in the NIME community.' + address: 'Shanghai, China' + articleno: 4 + author: 'Calegario, Filipe and Tragtenberg, João and Frisson, Christian and Meneses, + Eduardo and Malloch, Joseph and Cusson, Vincent and Wanderley, Marcelo M.' + bibtex: "@inproceedings{NIME21_4,\n abstract = {In this paper, we discuss the importance\ + \ of replicability in Digital Musical Instrument (DMI) design and the NIME community.\ + \ Replication enables us to: create new artifacts based on existing ones, experiment\ + \ DMIs in different contexts and cultures, and validate obtained results from\ + \ evaluations. We investigate how the papers present artifact documentation and\ + \ source code by analyzing the NIME proceedings from 2018, 2019, and 2020. We\ + \ argue that the presence and the quality of documentation are good indicators\ + \ of replicability and can be beneficial for the NIME community. Finally, we discuss\ + \ the importance of documentation for replication, propose a call to action towards\ + \ more replicable projects, and present a practical guide informing future steps\ + \ toward replicability in the NIME community.},\n address = {Shanghai, China},\n\ + \ articleno = {4},\n author = {Calegario, Filipe and Tragtenberg, João and Frisson,\ + \ Christian and Meneses, Eduardo and Malloch, Joseph and Cusson, Vincent and Wanderley,\ + \ Marcelo M.},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.dc50e34d},\n\ + \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/ySh5SueLMAA},\n\ + \ title = {Documentation and Replicability in the NIME Community},\n url = {https://nime.pubpub.org/pub/czq0nt9i},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178969 + doi: 10.21428/92fbeb44.dc50e34d issn: 2220-4806 month: June - pages: 561--564 - publisher: 'Goldsmiths, University of London' - title: 'Reunion2012: A Novel Interface for Sound Producing Actions Through the Game - of Chess' - url: http://www.nime.org/proceedings/2014/nime2014_443.pdf - year: 2014 + presentation-video: https://youtu.be/ySh5SueLMAA + title: Documentation and Replicability in the NIME Community + url: https://nime.pubpub.org/pub/czq0nt9i + year: 2021 - ENTRYTYPE: inproceedings - ID: avantroyer2014 - abstract: 'Acoustic feedback controllers (AFCs) are typically applied to solve feedback - problems evident in applications such as public address (PA) systems, hearing - aids, and speech applications. Applying the techniques of AFCs to different contexts, - such as musical performance, sound installations, and product design, presents - a unique insight into the research of embodied sonic interfaces and environments. - This paper presents techniques that use digital acoustic feedback control algorithms - to augment the sonic properties of environments and discusses approaches to the - design of sonically playful experiences that apply such techniques. Three experimental - prototypes are described to illustrate how the techniques can be applied to versatile - environments and continuous coupling of users'' audible actions with sonically - augmented environments. The knowledge obtained from these prototypes has led to - Acoustic Feedback Ecology System (AFES) design patterns. The paper concludes with - some future research directions based on the prototypes and proposes several other - potentially useful applications ranging from musical performance to everyday contexts.' - address: 'London, United Kingdom' - author: Akito van Troyer - bibtex: "@inproceedings{avantroyer2014,\n abstract = {Acoustic feedback controllers\ - \ (AFCs) are typically applied to solve feedback problems evident in applications\ - \ such as public address (PA) systems, hearing aids, and speech applications.\ - \ Applying the techniques of AFCs to different contexts, such as musical performance,\ - \ sound installations, and product design, presents a unique insight into the\ - \ research of embodied sonic interfaces and environments. This paper presents\ - \ techniques that use digital acoustic feedback control algorithms to augment\ - \ the sonic properties of environments and discusses approaches to the design\ - \ of sonically playful experiences that apply such techniques. Three experimental\ - \ prototypes are described to illustrate how the techniques can be applied to\ - \ versatile environments and continuous coupling of users' audible actions with\ - \ sonically augmented environments. The knowledge obtained from these prototypes\ - \ has led to Acoustic Feedback Ecology System (AFES) design patterns. The paper\ - \ concludes with some future research directions based on the prototypes and proposes\ - \ several other potentially useful applications ranging from musical performance\ - \ to everyday contexts.},\n address = {London, United Kingdom},\n author = {Akito\ - \ van Troyer},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178961},\n issn\ - \ = {2220-4806},\n month = {June},\n pages = {118--121},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Composing Embodied Sonic Play Experiences:\ - \ Towards Acoustic Feedback Ecology},\n url = {http://www.nime.org/proceedings/2014/nime2014_444.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_40 + abstract: 'The use of crowdsourced sounds in live coding can be seen as an example + of asynchronous collaboration. It is not uncommon for crowdsourced databases to + return unexpected results to the queries submitted by a user. In such a situation, + a live coder is likely to require some degree of additional filtering to adapt + the results to her/his musical intentions. We refer to this context-dependent + decisions as situated musical actions. Here, we present directions for designing + a customisable virtual companion to help live coders in their practice. In particular, + we introduce a machine learning (ML) model that, based on a set of examples provided + by the live coder, filters the crowdsourced sounds retrieved from the Freesound + online database at performance time. We evaluated a first illustrative model using + objective and subjective measures. We tested a more generic live coding framework + in two performances and two workshops, where several ML models have been trained + and used. We discuss the promising results for ML in education, live coding practices + and the design of future NIMEs.' + address: 'Shanghai, China' + articleno: 40 + author: 'Xambó, Anna and Roma, Gerard and Roig, Sam and Solaz, Eduard' + bibtex: "@inproceedings{NIME21_40,\n abstract = {The use of crowdsourced sounds\ + \ in live coding can be seen as an example of asynchronous collaboration. It is\ + \ not uncommon for crowdsourced databases to return unexpected results to the\ + \ queries submitted by a user. In such a situation, a live coder is likely to\ + \ require some degree of additional filtering to adapt the results to her/his\ + \ musical intentions. We refer to this context-dependent decisions as situated\ + \ musical actions. Here, we present directions for designing a customisable virtual\ + \ companion to help live coders in their practice. In particular, we introduce\ + \ a machine learning (ML) model that, based on a set of examples provided by the\ + \ live coder, filters the crowdsourced sounds retrieved from the Freesound online\ + \ database at performance time. We evaluated a first illustrative model using\ + \ objective and subjective measures. We tested a more generic live coding framework\ + \ in two performances and two workshops, where several ML models have been trained\ + \ and used. We discuss the promising results for ML in education, live coding\ + \ practices and the design of future NIMEs.},\n address = {Shanghai, China},\n\ + \ articleno = {40},\n author = {Xambó, Anna and Roma, Gerard and Roig, Sam and\ + \ Solaz, Eduard},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.64c9f217},\n\ + \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/F4UoH1hRMoU},\n\ + \ title = {Live Coding with the Cloud and a Virtual Agent},\n url = {https://nime.pubpub.org/pub/zpdgg2fg},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178961 + doi: 10.21428/92fbeb44.64c9f217 issn: 2220-4806 month: June - pages: 118--121 - publisher: 'Goldsmiths, University of London' - title: 'Composing Embodied Sonic Play Experiences: Towards Acoustic Feedback Ecology' - url: http://www.nime.org/proceedings/2014/nime2014_444.pdf - year: 2014 + presentation-video: https://youtu.be/F4UoH1hRMoU + title: Live Coding with the Cloud and a Virtual Agent + url: https://nime.pubpub.org/pub/zpdgg2fg + year: 2021 - ENTRYTYPE: inproceedings - ID: mcartwright2014 - abstract: 'Programming an audio synthesizer can be a difficult task for many. However, - if a user has a general idea of the sound they are trying to program, they may - be able to imitate it with their voice. This paper presents SynthAssist, a system - for interactively searching the synthesis space of an audio synthesizer. In this - work, we present how to use the system for querying a database of audio synthesizer - patches (i.e. settings/parameters) by vocal imitation and user feedback. To account - for the limitations of the human voice, it uses both absolute and relative time - series representations of features and relevance feedback on both the feature - weights and time series to refine the query. The method presented in this paper - can be used to search through large databases of previously existing ``factory - presets'''' or program a synthesizer using the data-driven approach to automatic - synthesizer programming.' - address: 'London, United Kingdom' - author: Mark Cartwright and Bryan Pardo - bibtex: "@inproceedings{mcartwright2014,\n abstract = {Programming an audio synthesizer\ - \ can be a difficult task for many. However, if a user has a general idea of the\ - \ sound they are trying to program, they may be able to imitate it with their\ - \ voice. This paper presents SynthAssist, a system for interactively searching\ - \ the synthesis space of an audio synthesizer. In this work, we present how to\ - \ use the system for querying a database of audio synthesizer patches (i.e. settings/parameters)\ - \ by vocal imitation and user feedback. To account for the limitations of the\ - \ human voice, it uses both absolute and relative time series representations\ - \ of features and relevance feedback on both the feature weights and time series\ - \ to refine the query. The method presented in this paper can be used to search\ - \ through large databases of previously existing ``factory presets'' or program\ - \ a synthesizer using the data-driven approach to automatic synthesizer programming.},\n\ - \ address = {London, United Kingdom},\n author = {Mark Cartwright and Bryan Pardo},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178730},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {363--366},\n publisher = {Goldsmiths, University\ - \ of London},\n title = {SynthAssist: Querying an Audio Synthesizer by Vocal Imitation},\n\ - \ url = {http://www.nime.org/proceedings/2014/nime2014_446.pdf},\n year = {2014}\n\ - }\n" + ID: NIME21_41 + abstract: 'In this paper, we propose COSMIC, a COnverSational Interface for Human-AI + MusIc Co-Creation. It is a chatbot with a two-fold design philosophy: to understand + human creative intent and to help humans in their creation. The core Natural Language + Processing (NLP) module is responsible for three functions: 1) understanding human + needs in chat, 2) cross-modal interaction between natural language understanding + and music generation models, and 3) mixing and coordinating multiple algorithms + to complete the composition.1' + address: 'Shanghai, China' + articleno: 41 + author: 'Zhang, Yixiao and Xia, Gus and Levy, Mark and Dixon, Simon' + bibtex: "@inproceedings{NIME21_41,\n abstract = {In this paper, we propose COSMIC,\ + \ a COnverSational Interface for Human-AI MusIc Co-Creation. It is a chatbot with\ + \ a two-fold design philosophy: to understand human creative intent and to help\ + \ humans in their creation. The core Natural Language Processing (NLP) module\ + \ is responsible for three functions: 1) understanding human needs in chat, 2)\ + \ cross-modal interaction between natural language understanding and music generation\ + \ models, and 3) mixing and coordinating multiple algorithms to complete the composition.1},\n\ + \ address = {Shanghai, China},\n articleno = {41},\n author = {Zhang, Yixiao and\ + \ Xia, Gus and Levy, Mark and Dixon, Simon},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.21428/92fbeb44.110a7a32},\n issn = {2220-4806},\n month = {June},\n presentation-video\ + \ = {https://youtu.be/o5YO0ni7sng},\n title = {COSMIC: A Conversational Interface\ + \ for Human-AI Music Co-Creation},\n url = {https://nime.pubpub.org/pub/in6wsc9t},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178730 + doi: 10.21428/92fbeb44.110a7a32 issn: 2220-4806 month: June - pages: 363--366 - publisher: 'Goldsmiths, University of London' - title: 'SynthAssist: Querying an Audio Synthesizer by Vocal Imitation' - url: http://www.nime.org/proceedings/2014/nime2014_446.pdf - year: 2014 + presentation-video: https://youtu.be/o5YO0ni7sng + title: 'COSMIC: A Conversational Interface for Human-AI Music Co-Creation' + url: https://nime.pubpub.org/pub/in6wsc9t + year: 2021 - ENTRYTYPE: inproceedings - ID: chutchins2014 - abstract: 'Government spying on internet traffic has seemingly become ubiquitous. - Not to be left out, the private sector tracks our online footprint via our ISP - or with a little help from facebook. Web services, such as advertisement servers - and Google track our progress as we surf the net and click on links. The Mozilla - plugin, Lightbeam (formerly Collusion), shows the user a visual map of every site - a surfer sends data to. A interconnected web of advertisers and other (otherwise) - invisible data-gatherers quickly builds during normal usage. We propose modifying - this plugin so that as the graph builds, its state is broadcast visa OSC. Members - of BiLE will receive and interpret those OSC messages in SuperCollider and PD. - We will act as a translational object in a process of live-sonification. The collected - data is the material with which we will develop a set of music tracks based on - patterns we may discover. The findings of our data collection and the developed - music will be presented in the form of an audiovisual live performance. Snippets - of collected text and URLs will both form the basis of our audio interpretation, - but also be projected on to a screen, so an audience can voyeuristically experience - the actions taken on their behalf by governments and advertisers. After the concert, - all of the scripts and documentation related to the data collection and sharing - in the piece will be posted to github under a GPL license.' - address: 'London, United Kingdom' - author: Charles Hutchins and Holger Ballweg and Shelly Knotts and Jonas Hummel and - Antonio Roberts - bibtex: "@inproceedings{chutchins2014,\n abstract = {Government spying on internet\ - \ traffic has seemingly become ubiquitous. Not to be left out, the private sector\ - \ tracks our online footprint via our ISP or with a little help from facebook.\ - \ Web services, such as advertisement servers and Google track our progress as\ - \ we surf the net and click on links. The Mozilla plugin, Lightbeam (formerly\ - \ Collusion), shows the user a visual map of every site a surfer sends data to.\ - \ A interconnected web of advertisers and other (otherwise) invisible data-gatherers\ - \ quickly builds during normal usage. We propose modifying this plugin so that\ - \ as the graph builds, its state is broadcast visa OSC. Members of BiLE will receive\ - \ and interpret those OSC messages in SuperCollider and PD. We will act as a translational\ - \ object in a process of live-sonification. The collected data is the material\ - \ with which we will develop a set of music tracks based on patterns we may discover.\ - \ The findings of our data collection and the developed music will be presented\ - \ in the form of an audiovisual live performance. Snippets of collected text and\ - \ URLs will both form the basis of our audio interpretation, but also be projected\ - \ on to a screen, so an audience can voyeuristically experience the actions taken\ - \ on their behalf by governments and advertisers. After the concert, all of the\ - \ scripts and documentation related to the data collection and sharing in the\ - \ piece will be posted to github under a GPL license.},\n address = {London, United\ - \ Kingdom},\n author = {Charles Hutchins and Holger Ballweg and Shelly Knotts\ - \ and Jonas Hummel and Antonio Roberts},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178810},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {497--498},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Soundbeam: A Platform for Sonyfing Web Tracking},\n\ - \ url = {http://www.nime.org/proceedings/2014/nime2014_447.pdf},\n year = {2014}\n\ - }\n" + ID: NIME21_42 + abstract: 'This paper presents Living Sounds, an internet radio station and online + venue hosted by nature. The virtual space is animated by live sound from a restored + wetland wildlife sanctuary, spatially mixed from dozens of 24/7 streaming microphones + across the landscape. The station’s guests are invited artists and others whose + performances are responsive to and contingent upon the ever-changing environmental + sound. Subtle, sound-active drawings by different visual designers anchor the + one-page website. Using low latency, high fidelity WebRTC, our system allows guests + to mix themselves in, remix the raw nature streams, or run our multichannel sources + fully through their own processors. Created in early 2020 in response to the locked + down conditions of the COVID-19 pandemic, the site became a virtual oasis, with + usage data showing long duration visits. In collaboration with several festivals + that went online in 2020, programmed live content included music, storytelling, + and guided meditation. One festival commissioned a local microphone installation, + resulting in a second nature source for the station: 5-channels of sound from + a small Maine island. Catalyzed by recent events, when many have been separated + from environments of inspiration and restoration, we propose Living Sounds as + both a virtual nature space for cohabitation and a new kind of contingent online + venue.' + address: 'Shanghai, China' + articleno: 42 + author: 'Dublon, Gershon and Liu, Xin' + bibtex: "@inproceedings{NIME21_42,\n abstract = {This paper presents Living Sounds,\ + \ an internet radio station and online venue hosted by nature. The virtual space\ + \ is animated by live sound from a restored wetland wildlife sanctuary, spatially\ + \ mixed from dozens of 24/7 streaming microphones across the landscape. The station’s\ + \ guests are invited artists and others whose performances are responsive to and\ + \ contingent upon the ever-changing environmental sound. Subtle, sound-active\ + \ drawings by different visual designers anchor the one-page website. Using low\ + \ latency, high fidelity WebRTC, our system allows guests to mix themselves in,\ + \ remix the raw nature streams, or run our multichannel sources fully through\ + \ their own processors. Created in early 2020 in response to the locked down conditions\ + \ of the COVID-19 pandemic, the site became a virtual oasis, with usage data showing\ + \ long duration visits. In collaboration with several festivals that went online\ + \ in 2020, programmed live content included music, storytelling, and guided meditation.\ + \ One festival commissioned a local microphone installation, resulting in a second\ + \ nature source for the station: 5-channels of sound from a small Maine island.\ + \ Catalyzed by recent events, when many have been separated from environments\ + \ of inspiration and restoration, we propose Living Sounds as both a virtual nature\ + \ space for cohabitation and a new kind of contingent online venue.},\n address\ + \ = {Shanghai, China},\n articleno = {42},\n author = {Dublon, Gershon and Liu,\ + \ Xin},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.21428/92fbeb44.b90e0fcb},\n issn = {2220-4806},\n\ + \ month = {June},\n presentation-video = {https://youtu.be/tE4YMDf-bQE},\n title\ + \ = {Living Sounds: Live Nature Sound as Online Performance Space},\n url = {https://nime.pubpub.org/pub/46by9xxn},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178810 + doi: 10.21428/92fbeb44.b90e0fcb issn: 2220-4806 month: June - pages: 497--498 - publisher: 'Goldsmiths, University of London' - title: 'Soundbeam: A Platform for Sonyfing Web Tracking' - url: http://www.nime.org/proceedings/2014/nime2014_447.pdf - year: 2014 + presentation-video: https://youtu.be/tE4YMDf-bQE + title: 'Living Sounds: Live Nature Sound as Online Performance Space' + url: https://nime.pubpub.org/pub/46by9xxn + year: 2021 - ENTRYTYPE: inproceedings - ID: jcomajuncosas2014 - abstract: 'According to the tradition, music ensembles are usually lead by a conductor - who is the responsible to coordinate and guide the group under a specific musical - criteria. Similarly, computer ensembles resort to a conductor to keep the synchronization - and structural coordination of the performance, often with the assistance of software. - Achieving integration and coherence in a networked performance, however, can be - challenging in certain scenarios. This is the case for configurations with a high - degree of mutual interdependence and shared control. This paper focuses on the - design strategies for developing a software based conductor assistant for collective - instruments. We propose a novel conductor dimension space representation for collective - instruments, which takes into account both its social and structural features. - We present a case study of a collective instrument implementing a software conductor. - Finally, we discuss the implications of human and machine conduction schemes in - the context of the proposed dimension space.' - address: 'London, United Kingdom' - author: Josep Comajuncosas and Enric Guaus - bibtex: "@inproceedings{jcomajuncosas2014,\n abstract = {According to the tradition,\ - \ music ensembles are usually lead by a conductor who is the responsible to coordinate\ - \ and guide the group under a specific musical criteria. Similarly, computer ensembles\ - \ resort to a conductor to keep the synchronization and structural coordination\ - \ of the performance, often with the assistance of software. Achieving integration\ - \ and coherence in a networked performance, however, can be challenging in certain\ - \ scenarios. This is the case for configurations with a high degree of mutual\ - \ interdependence and shared control. This paper focuses on the design strategies\ - \ for developing a software based conductor assistant for collective instruments.\ - \ We propose a novel conductor dimension space representation for collective instruments,\ - \ which takes into account both its social and structural features. We present\ - \ a case study of a collective instrument implementing a software conductor. Finally,\ - \ we discuss the implications of human and machine conduction schemes in the context\ - \ of the proposed dimension space.},\n address = {London, United Kingdom},\n author\ - \ = {Josep Comajuncosas and Enric Guaus},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178736},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {513--516},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Conducting Collective Instruments : A Case\ - \ Study},\n url = {http://www.nime.org/proceedings/2014/nime2014_448.pdf},\n year\ - \ = {2014}\n}\n" + ID: NIME21_43 + abstract: 'Speculātor is presented as a fist-sized, battery-powered, environmentally + aware, soundscape augmentation artifact that listens to the sonic environment + and provides real-time illuminated visual feedback in reaction to what it hears. + The visual soundscape augmentations these units offer allow for creating sonic + art installations whose artistic subject is the unaltered in-situ sonic environment. + Speculātor is designed to be quickly installed in exposed outdoor environments + without power infrastructure to allow maximum flexibility when selecting exhibition + locations. Data from light, temperature, and humidity sensors guide behavior to + maximize soundscape augmentation effectiveness and protect artifacts from operating + under dangerous environmental conditions. To highlight the music-like qualities + of cicada vocalizations, installations conducted between October 2019 and March + 2020, where multiple Speculātor units are installed in outdoor natural locations + are presented as an initial case study.' + address: 'Shanghai, China' + articleno: 43 + author: 'Villicaña-Shaw, Nathan and Carnegie, Dale A. and Murphy, Jim and Zareei, + Mo' + bibtex: "@inproceedings{NIME21_43,\n abstract = {Speculātor is presented as a fist-sized,\ + \ battery-powered, environmentally aware, soundscape augmentation artifact that\ + \ listens to the sonic environment and provides real-time illuminated visual feedback\ + \ in reaction to what it hears. The visual soundscape augmentations these units\ + \ offer allow for creating sonic art installations whose artistic subject is the\ + \ unaltered in-situ sonic environment. Speculātor is designed to be quickly installed\ + \ in exposed outdoor environments without power infrastructure to allow maximum\ + \ flexibility when selecting exhibition locations. Data from light, temperature,\ + \ and humidity sensors guide behavior to maximize soundscape augmentation effectiveness\ + \ and protect artifacts from operating under dangerous environmental conditions.\ + \ To highlight the music-like qualities of cicada vocalizations, installations\ + \ conducted between October 2019 and March 2020, where multiple Speculātor units\ + \ are installed in outdoor natural locations are presented as an initial case\ + \ study.},\n address = {Shanghai, China},\n articleno = {43},\n author = {Villicaña-Shaw,\ + \ Nathan and Carnegie, Dale A. and Murphy, Jim and Zareei, Mo},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.21428/92fbeb44.e521c5a4},\n issn = {2220-4806},\n month = {June},\n\ + \ presentation-video = {https://youtu.be/kP3fDzAHXDw},\n title = {Speculātor:\ + \ visual soundscape augmentation of natural environments},\n url = {https://nime.pubpub.org/pub/pxr0grnk},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178736 + doi: 10.21428/92fbeb44.e521c5a4 issn: 2220-4806 month: June - pages: 513--516 - publisher: 'Goldsmiths, University of London' - title: 'Conducting Collective Instruments : A Case Study' - url: http://www.nime.org/proceedings/2014/nime2014_448.pdf - year: 2014 + presentation-video: https://youtu.be/kP3fDzAHXDw + title: 'Speculātor: visual soundscape augmentation of natural environments' + url: https://nime.pubpub.org/pub/pxr0grnk + year: 2021 - ENTRYTYPE: inproceedings - ID: mgurevich12014 - abstract: 'Drawing on concepts from systemics, cybernetics, and musical automata, - this paper proposes a mechatronic, electroacoustic instrument that allows for - shared control between programmed, mechanized motion and a human interactor. We - suggest that such an instrument, situated somewhere between a robotic musical - instrument and a passive controller, will foster the emergence of new, complex, - and meaningful modes of musical interaction. In line with the methodological principles - of practice as research, we describe the development and design of one such instrument-Stringtrees. - The design process also reflects the notion of ambiguity as a resource in design: - The instrument was endowed with a collection of sensors, controls, and actuators - without a highly specific or prescriptive model for how a musician would interact - with it.' - address: 'London, United Kingdom' - author: Michael Gurevich - bibtex: "@inproceedings{mgurevich12014,\n abstract = {Drawing on concepts from systemics,\ - \ cybernetics, and musical automata, this paper proposes a mechatronic, electroacoustic\ - \ instrument that allows for shared control between programmed, mechanized motion\ - \ and a human interactor. We suggest that such an instrument, situated somewhere\ - \ between a robotic musical instrument and a passive controller, will foster the\ - \ emergence of new, complex, and meaningful modes of musical interaction. In line\ - \ with the methodological principles of practice as research, we describe the\ - \ development and design of one such instrument-Stringtrees. The design process\ - \ also reflects the notion of ambiguity as a resource in design: The instrument\ - \ was endowed with a collection of sensors, controls, and actuators without a\ - \ highly specific or prescriptive model for how a musician would interact with\ - \ it.},\n address = {London, United Kingdom},\n author = {Michael Gurevich},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178780},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {487--490},\n publisher = {Goldsmiths, University\ - \ of London},\n title = {Distributed Control in a Mechatronic Musical Instrument},\n\ - \ url = {http://www.nime.org/proceedings/2014/nime2014_449.pdf},\n year = {2014}\n\ - }\n" + ID: NIME21_44 + abstract: 'This paper outlines a demonstration of an acoustic piano augmentation + that allows for infinite sustain of one or many notes. The result is a natural + sounding piano sustain that lasts for an unnatural period of time. Using a tactile + shaker, a contact microphone and an amplitude activated FFT-freeze Max patch, + this system is easily assembled and creates an infinitely sustaining piano.' + address: 'Shanghai, China' + articleno: 44 + author: 'Thompson, William and Berdahl, Edgar' + bibtex: "@inproceedings{NIME21_44,\n abstract = {This paper outlines a demonstration\ + \ of an acoustic piano augmentation that allows for infinite sustain of one or\ + \ many notes. The result is a natural sounding piano sustain that lasts for an\ + \ unnatural period of time. Using a tactile shaker, a contact microphone and an\ + \ amplitude activated FFT-freeze Max patch, this system is easily assembled and\ + \ creates an infinitely sustaining piano.},\n address = {Shanghai, China},\n articleno\ + \ = {44},\n author = {Thompson, William and Berdahl, Edgar},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.21428/92fbeb44.2c4879f5},\n issn = {2220-4806},\n month = {June},\n\ + \ presentation-video = {https://youtu.be/YRby0VdL8Nk},\n title = {An Infinitely\ + \ Sustaining Piano Achieved Through a Soundboard-Mounted Shaker },\n url = {https://nime.pubpub.org/pub/cde9r70r},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178780 + doi: 10.21428/92fbeb44.2c4879f5 issn: 2220-4806 month: June - pages: 487--490 - publisher: 'Goldsmiths, University of London' - title: Distributed Control in a Mechatronic Musical Instrument - url: http://www.nime.org/proceedings/2014/nime2014_449.pdf - year: 2014 + presentation-video: https://youtu.be/YRby0VdL8Nk + title: 'An Infinitely Sustaining Piano Achieved Through a Soundboard-Mounted Shaker ' + url: https://nime.pubpub.org/pub/cde9r70r + year: 2021 - ENTRYTYPE: inproceedings - ID: dschwarz12014 - abstract: 'We propose ways of enriching the timbral potential of gestural sonic - material captured via piezo or contact microphones, through latency-free convolution - of the microphone signal with grains from a sound corpus. This creates a new way - to combine the sonic richness of large sound corpora, easily accessible via navigation - through a timbral descriptor space, with the intuitive gestural interaction with - a surface, captured by any contact microphone. We use convolution to excite the - grains from the corpus via the microphone input, capturing the contact interaction - sounds, which allows articulation of the corpus by hitting, scratching, or strumming - a surface with various parts of the hands or objects. We also show how changes - of grains have to be carefully handled, how one can smoothly interpolate between - neighbouring grains, and finally evaluate the system against previous attempts.' - address: 'London, United Kingdom' - author: Diemo Schwarz and Pierre Alexandre Tremblay and Alex Harker - bibtex: "@inproceedings{dschwarz12014,\n abstract = {We propose ways of enriching\ - \ the timbral potential of gestural sonic material captured via piezo or contact\ - \ microphones, through latency-free convolution of the microphone signal with\ - \ grains from a sound corpus. This creates a new way to combine the sonic richness\ - \ of large sound corpora, easily accessible via navigation through a timbral descriptor\ - \ space, with the intuitive gestural interaction with a surface, captured by any\ - \ contact microphone. We use convolution to excite the grains from the corpus\ - \ via the microphone input, capturing the contact interaction sounds, which allows\ - \ articulation of the corpus by hitting, scratching, or strumming a surface with\ - \ various parts of the hands or objects. We also show how changes of grains have\ - \ to be carefully handled, how one can smoothly interpolate between neighbouring\ - \ grains, and finally evaluate the system against previous attempts.},\n address\ - \ = {London, United Kingdom},\n author = {Diemo Schwarz and Pierre Alexandre Tremblay\ - \ and Alex Harker},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178935},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {247--250},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Rich Contacts: Corpus-Based Convolution of\ - \ Contact Interaction Sound for Enhanced Musical Expression},\n url = {http://www.nime.org/proceedings/2014/nime2014_451.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_45 + abstract: 'Block-based coding environments enable novices to write code that bypasses + the syntactic complexities of text. However, we see a lack of effective block-based + tools that balance programming with expressive music making. We introduce Toneblocks1, + a prototype web application intended to be intuitive and engaging for novice users + with interests in computer programming and music. Toneblocks is designed to lower + the barrier of entry while increasing the ceiling of expression for advanced users. + In Toneblocks, users produce musical loops ranging from static sequences to generative + systems, and can manipulate their properties live. Pilot usability tests conducted + with two participants provide evidence that the current prototype is easy to use + and can produce complex musical output. An evaluation offers potential future + improvements including user-defined variables and functions, and rhythmic variability.' + address: 'Shanghai, China' + articleno: 45 + author: 'Quigley, Michael and Payne, William' + bibtex: "@inproceedings{NIME21_45,\n abstract = {Block-based coding environments\ + \ enable novices to write code that bypasses the syntactic complexities of text.\ + \ However, we see a lack of effective block-based tools that balance programming\ + \ with expressive music making. We introduce Toneblocks1, a prototype web application\ + \ intended to be intuitive and engaging for novice users with interests in computer\ + \ programming and music. Toneblocks is designed to lower the barrier of entry\ + \ while increasing the ceiling of expression for advanced users. In Toneblocks,\ + \ users produce musical loops ranging from static sequences to generative systems,\ + \ and can manipulate their properties live. Pilot usability tests conducted with\ + \ two participants provide evidence that the current prototype is easy to use\ + \ and can produce complex musical output. An evaluation offers potential future\ + \ improvements including user-defined variables and functions, and rhythmic variability.},\n\ + \ address = {Shanghai, China},\n articleno = {45},\n author = {Quigley, Michael\ + \ and Payne, William},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.46c0f6ef},\n\ + \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/c64l1hK3QiY},\n\ + \ title = {Toneblocks: Block-based musical programming},\n url = {https://nime.pubpub.org/pub/qn6lqnzx},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178935 + doi: 10.21428/92fbeb44.46c0f6ef issn: 2220-4806 month: June - pages: 247--250 - publisher: 'Goldsmiths, University of London' - title: 'Rich Contacts: Corpus-Based Convolution of Contact Interaction Sound for - Enhanced Musical Expression' - url: http://www.nime.org/proceedings/2014/nime2014_451.pdf - year: 2014 + presentation-video: https://youtu.be/c64l1hK3QiY + title: 'Toneblocks: Block-based musical programming' + url: https://nime.pubpub.org/pub/qn6lqnzx + year: 2021 - ENTRYTYPE: inproceedings - ID: fvisi2014 - abstract: 'This work describes a new approach to gesture mapping in a performance - with a traditional musical instrument and live electronics based upon theories - of embodied music cognition (EMC) and musical gestures. Considerations on EMC - and how gestures affect the experience of music inform different mapping strategies. - Our intent is to enhance the expressiveness and the liveness of performance by - tracking gestures via a multimodal motion capture system and to use motion data - to control several features of the music. After a review of recent research in - the field, a proposed application of such theories to a performance with electric - guitar and live electronics will follow, focusing both on aspects of meaning formation - and motion capturing.' - address: 'London, United Kingdom' - author: Federico Visi and Rodrigo Schramm and Eduardo Miranda - bibtex: "@inproceedings{fvisi2014,\n abstract = {This work describes a new approach\ - \ to gesture mapping in a performance with a traditional musical instrument and\ - \ live electronics based upon theories of embodied music cognition (EMC) and musical\ - \ gestures. Considerations on EMC and how gestures affect the experience of music\ - \ inform different mapping strategies. Our intent is to enhance the expressiveness\ - \ and the liveness of performance by tracking gestures via a multimodal motion\ - \ capture system and to use motion data to control several features of the music.\ - \ After a review of recent research in the field, a proposed application of such\ - \ theories to a performance with electric guitar and live electronics will follow,\ - \ focusing both on aspects of meaning formation and motion capturing.},\n address\ - \ = {London, United Kingdom},\n author = {Federico Visi and Rodrigo Schramm and\ - \ Eduardo Miranda},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178973},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {601--604},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Use of Body Motion to Enhance Traditional\ - \ Musical Instruments},\n url = {http://www.nime.org/proceedings/2014/nime2014_460.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_46 + abstract: 'This paper introduces “Ripples”, an iOS application for the Atlanta Botanical + Garden that uses auditory augmented reality to provide an intuitive music guide + by seamlessly integrating information about the garden into the visiting experience. + For each point of interest nearby, “Ripples” generates music in real time, representing + a location through data collected from users’ smartphones. The music is then overlaid + onto the physical environment and binaural spatialization indicates real-world + coordinates of their represented places. By taking advantage of the human auditory + sense’s innate spatial sound source localization and source separation capabilities, + “Ripples” makes navigation intuitive and information easy to understand.' + address: 'Shanghai, China' + articleno: 46 + author: 'Wu, Yi and Freeman, Jason' + bibtex: "@inproceedings{NIME21_46,\n abstract = {This paper introduces “Ripples”,\ + \ an iOS application for the Atlanta Botanical Garden that uses auditory augmented\ + \ reality to provide an intuitive music guide by seamlessly integrating information\ + \ about the garden into the visiting experience. For each point of interest nearby,\ + \ “Ripples” generates music in real time, representing a location through data\ + \ collected from users’ smartphones. The music is then overlaid onto the physical\ + \ environment and binaural spatialization indicates real-world coordinates of\ + \ their represented places. By taking advantage of the human auditory sense’s\ + \ innate spatial sound source localization and source separation capabilities,\ + \ “Ripples” makes navigation intuitive and information easy to understand.},\n\ + \ address = {Shanghai, China},\n articleno = {46},\n author = {Wu, Yi and Freeman,\ + \ Jason},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.21428/92fbeb44.b8e82252},\n issn = {2220-4806},\n\ + \ month = {June},\n presentation-video = {https://youtu.be/T7EJVACX3QI},\n title\ + \ = {Ripples: An Auditory Augmented Reality iOS Application for the Atlanta Botanical\ + \ Garden},\n url = {https://nime.pubpub.org/pub/n1o19efr},\n year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178973 + doi: 10.21428/92fbeb44.b8e82252 issn: 2220-4806 month: June - pages: 601--604 - publisher: 'Goldsmiths, University of London' - title: Use of Body Motion to Enhance Traditional Musical Instruments - url: http://www.nime.org/proceedings/2014/nime2014_460.pdf - year: 2014 + presentation-video: https://youtu.be/T7EJVACX3QI + title: 'Ripples: An Auditory Augmented Reality iOS Application for the Atlanta Botanical + Garden' + url: https://nime.pubpub.org/pub/n1o19efr + year: 2021 - ENTRYTYPE: inproceedings - ID: jjeon2014 - abstract: 'This paper presents an enhanced sonic data communication method using - TAPIR (Theoretically Audible, but Practically Inaudible Range: frequencies above - 18kHz) sound and a software toolkit as its implementation. Using inaudible sound - as a data medium, a digital data network among the audience and performer can - be easily built with microphones and speakers, without requiring any additional - hardware. ``TAPIR Sound Tag'''' is a smart device framework for inaudible data - communication that can be easily embedded in audience participatory performances - and interactive arts. With a bandwidth of 900 Hz, a high transmission rate of - 200 bps can be achieved, enabling peer-to-peer or broadcasting real-time data - communication among smart devices. This system can be used without any advanced - knowledge in signal processing and communication system theory; simply specifying - carrier frequency and bandwidth with a few lines of code can start data communication. - Several usage scenarios of the system are also presented, such as participating - in an interactive performance by adding and controlling sound, and collaborative - completion of an artist''s work by audience. We expect this framework to provide - a new way of audience interaction to artists, as well as further promoting audience - participation by simplifying the process: using personal smart devices as a medium - and not requiring additional hardware or complex settings.' - address: 'London, United Kingdom' - author: Jimin Jeon and Gunho Chae and Edward Jangwon Lee and Woon Seung Yeo - bibtex: "@inproceedings{jjeon2014,\n abstract = {This paper presents an enhanced\ - \ sonic data communication method using TAPIR (Theoretically Audible, but Practically\ - \ Inaudible Range: frequencies above 18kHz) sound and a software toolkit as its\ - \ implementation. Using inaudible sound as a data medium, a digital data network\ - \ among the audience and performer can be easily built with microphones and speakers,\ - \ without requiring any additional hardware. ``TAPIR Sound Tag'' is a smart device\ - \ framework for inaudible data communication that can be easily embedded in audience\ - \ participatory performances and interactive arts. With a bandwidth of 900 Hz,\ - \ a high transmission rate of 200 bps can be achieved, enabling peer-to-peer or\ - \ broadcasting real-time data communication among smart devices. This system can\ - \ be used without any advanced knowledge in signal processing and communication\ - \ system theory; simply specifying carrier frequency and bandwidth with a few\ - \ lines of code can start data communication. Several usage scenarios of the system\ - \ are also presented, such as participating in an interactive performance by adding\ - \ and controlling sound, and collaborative completion of an artist's work by audience.\ - \ We expect this framework to provide a new way of audience interaction to artists,\ - \ as well as further promoting audience participation by simplifying the process:\ - \ using personal smart devices as a medium and not requiring additional hardware\ - \ or complex settings.},\n address = {London, United Kingdom},\n author = {Jimin\ - \ Jeon and Gunho Chae and Edward Jangwon Lee and Woon Seung Yeo},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1178818},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {367--370},\n publisher = {Goldsmiths, University of London},\n\ - \ title = {TAPIR Sound Tag: An Enhanced Sonic Communication Framework for Audience\ - \ Participatory Performance},\n url = {http://www.nime.org/proceedings/2014/nime2014_461.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_47 + abstract: 'This article describes Mono-Replay, a software environment designed for + sound animation. "Sound animation" in this context means musical performance based + on various modes of replay and transformation of all kinds of recorded music samples. + Sound animation using Mono-Replay is a two-step process, including an off-line + analysis phase and on-line performance or synthesis phase. The analysis phase + proceeds with time segmentation, and the set up of anchor points corresponding + to temporal musical discourse parameters (notes, pulses, events). This allows, + at the performance phase, for control of timing, playback position, playback speed, + and a variety of spectral effects, with the help of gesture interfaces. Animation + principles and software features of Mono-Replay are described. Two examples of + sound animation based on beat tracking and transient detection algorithms are + presented (a multi-track record of Superstition by Steve Wonder and Jeff Beck + and Accidents/Harmoniques, an electroacoustic piece by Bernard Parmegiani). With + the help of these two contrasted examples, the fundamental principles of “sound + animation” are reviewed: parameters of musical discourse, audio file segmentation, + gestural control and interaction for animation at the performance stage.' + address: 'Shanghai, China' + articleno: 47 + author: 'LUCAS, Thomas and d''Alessandro, Christophe and Laubier, Serge de' + bibtex: "@inproceedings{NIME21_47,\n abstract = {This article describes Mono-Replay,\ + \ a software environment designed for sound animation. \"Sound animation\" in\ + \ this context means musical performance based on various modes of replay and\ + \ transformation of all kinds of recorded music samples. Sound animation using\ + \ Mono-Replay is a two-step process, including an off-line analysis phase and\ + \ on-line performance or synthesis phase. The analysis phase proceeds with time\ + \ segmentation, and the set up of anchor points corresponding to temporal musical\ + \ discourse parameters (notes, pulses, events). This allows, at the performance\ + \ phase, for control of timing, playback position, playback speed, and a variety\ + \ of spectral effects, with the help of gesture interfaces. Animation principles\ + \ and software features of Mono-Replay are described. Two examples of sound animation\ + \ based on beat tracking and transient detection algorithms are presented (a multi-track\ + \ record of Superstition by Steve Wonder and Jeff Beck and Accidents/Harmoniques,\ + \ an electroacoustic piece by Bernard Parmegiani). With the help of these two\ + \ contrasted examples, the fundamental principles of “sound animation” are reviewed:\ + \ parameters of musical discourse, audio file segmentation, gestural control and\ + \ interaction for animation at the performance stage.},\n address = {Shanghai,\ + \ China},\n articleno = {47},\n author = {LUCAS, Thomas and d'Alessandro, Christophe\ + \ and Laubier, Serge de},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.7b843efe},\n\ + \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/Ck79wRgqXfU},\n\ + \ title = {Mono-Replay : a software tool for digitized sound animation},\n url\ + \ = {https://nime.pubpub.org/pub/8lqitvvq},\n year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178818 + doi: 10.21428/92fbeb44.7b843efe issn: 2220-4806 month: June - pages: 367--370 - publisher: 'Goldsmiths, University of London' - title: 'TAPIR Sound Tag: An Enhanced Sonic Communication Framework for Audience - Participatory Performance' - url: http://www.nime.org/proceedings/2014/nime2014_461.pdf - year: 2014 + presentation-video: https://youtu.be/Ck79wRgqXfU + title: 'Mono-Replay : a software tool for digitized sound animation' + url: https://nime.pubpub.org/pub/8lqitvvq + year: 2021 - ENTRYTYPE: inproceedings - ID: asarasua2014 - abstract: 'Many musical interfaces have used the musical conductor metaphor, allowing - users to control the expressive aspects of a performance by imitating the gestures - of conductors. In most of them, the rules to control these expressive aspects - are predefined and users have to adapt to them. Other works have studied conductors'' - gestures in relation to the performance of the orchestra. The goal of this study - is to analyze, following the path initiated by this latter kind of works, how - simple motion capture descriptors can explain the relationship between the loudness - of a given performance and the way in which different subjects move when asked - to impersonate the conductor of that performance. Twenty-five subjects were asked - to impersonate the conductor of three classical music fragments while listening - to them. The results of different linear regression models with motion capture - descriptors as explanatory variables show that, by studying how descriptors correlate - to loudness differently among subjects, different tendencies can be found and - exploited to design models that better adjust to their expectations.' - address: 'London, United Kingdom' - author: Alvaro Sarasúa and Enric Guaus - bibtex: "@inproceedings{asarasua2014,\n abstract = {Many musical interfaces have\ - \ used the musical conductor metaphor, allowing users to control the expressive\ - \ aspects of a performance by imitating the gestures of conductors. In most of\ - \ them, the rules to control these expressive aspects are predefined and users\ - \ have to adapt to them. Other works have studied conductors' gestures in relation\ - \ to the performance of the orchestra. The goal of this study is to analyze, following\ - \ the path initiated by this latter kind of works, how simple motion capture descriptors\ - \ can explain the relationship between the loudness of a given performance and\ - \ the way in which different subjects move when asked to impersonate the conductor\ - \ of that performance. Twenty-five subjects were asked to impersonate the conductor\ - \ of three classical music fragments while listening to them. The results of different\ - \ linear regression models with motion capture descriptors as explanatory variables\ - \ show that, by studying how descriptors correlate to loudness differently among\ - \ subjects, different tendencies can be found and exploited to design models that\ - \ better adjust to their expectations.},\n address = {London, United Kingdom},\n\ - \ author = {Alvaro Saras\\'ua and Enric Guaus},\n booktitle = {Proceedings of\ + ID: NIME21_48 + abstract: 'This paper discusses Pandora''s Box, a novel idiosyncratic electroacoustic + instrument and performance utilizing feedback as sound generation principle. The + instrument''s signal path consists of a closed-loop through custom DSP algorithms + and a spring. Pandora''s Box is played by tactile interaction with the spring + and a control panel with faders and switches. The design and implementation are + described and rituals are explained referencing a video recording of a concert.' + address: 'Shanghai, China' + articleno: 48 + author: 'Slager, Ward J.' + bibtex: "@inproceedings{NIME21_48,\n abstract = {This paper discusses Pandora's\ + \ Box, a novel idiosyncratic electroacoustic instrument and performance utilizing\ + \ feedback as sound generation principle. The instrument's signal path consists\ + \ of a closed-loop through custom DSP algorithms and a spring. Pandora's Box is\ + \ played by tactile interaction with the spring and a control panel with faders\ + \ and switches. The design and implementation are described and rituals are explained\ + \ referencing a video recording of a concert.},\n address = {Shanghai, China},\n\ + \ articleno = {48},\n author = {Slager, Ward J.},\n booktitle = {Proceedings of\ \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1178929},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {195--200},\n publisher = {Goldsmiths, University of London},\n title = {Dynamics\ - \ in Music Conducting: A Computational Comparative Study Among Subjects},\n url\ - \ = {http://www.nime.org/proceedings/2014/nime2014_464.pdf},\n year = {2014}\n\ + \ = {10.21428/92fbeb44.61b13baf},\n issn = {2220-4806},\n month = {June},\n presentation-video\ + \ = {https://youtu.be/s89Ycd0QkDI},\n title = {Designing and performing with Pandora’s\ + \ Box: transforming feedback physically and with algorithms},\n url = {https://nime.pubpub.org/pub/kx6d0553},\n\ + \ year = {2021}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.21428/92fbeb44.61b13baf + issn: 2220-4806 + month: June + presentation-video: https://youtu.be/s89Ycd0QkDI + title: 'Designing and performing with Pandora’s Box: transforming feedback physically + and with algorithms' + url: https://nime.pubpub.org/pub/kx6d0553 + year: 2021 + + +- ENTRYTYPE: inproceedings + ID: NIME21_49 + abstract: 'Quadrant is a new human-computer interface based on an array of distance + sensors. The hardware consists of 4 time-of-flight detectors and is designed to + detect the position, velocity, and orientation of the user''s hand in free space. + Signal processing is used to recognize gestures and other events, which we map + to a variety of musical parameters to demonstrate possible applications. We have + developed Quadrant as an open-hardware circuit board, which acts as a USB controller + to a host computer.' + address: 'Shanghai, China' + articleno: 49 + author: 'Chronopoulos, Chris' + bibtex: "@inproceedings{NIME21_49,\n abstract = {Quadrant is a new human-computer\ + \ interface based on an array of distance sensors. The hardware consists of 4\ + \ time-of-flight detectors and is designed to detect the position, velocity, and\ + \ orientation of the user's hand in free space. Signal processing is used to recognize\ + \ gestures and other events, which we map to a variety of musical parameters to\ + \ demonstrate possible applications. We have developed Quadrant as an open-hardware\ + \ circuit board, which acts as a USB controller to a host computer.},\n address\ + \ = {Shanghai, China},\n articleno = {49},\n author = {Chronopoulos, Chris},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.21428/92fbeb44.761367fd},\n issn = {2220-4806},\n\ + \ month = {June},\n presentation-video = {https://youtu.be/p8flHKv17Y8},\n title\ + \ = {Quadrant: A Multichannel, Time-of-Flight Based Hand Tracking Interface for\ + \ Computer Music},\n url = {https://nime.pubpub.org/pub/quadrant},\n year = {2021}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178929 + doi: 10.21428/92fbeb44.761367fd issn: 2220-4806 month: June - pages: 195--200 - publisher: 'Goldsmiths, University of London' - title: 'Dynamics in Music Conducting: A Computational Comparative Study Among Subjects' - url: http://www.nime.org/proceedings/2014/nime2014_464.pdf - year: 2014 + presentation-video: https://youtu.be/p8flHKv17Y8 + title: 'Quadrant: A Multichannel, Time-of-Flight Based Hand Tracking Interface for + Computer Music' + url: https://nime.pubpub.org/pub/quadrant + year: 2021 - ENTRYTYPE: inproceedings - ID: kkeatch2014 - abstract: 'Sounds of Solitaire is a novel interface for musical expression based - on an extended peg solitaire board as a generator of live musical composition. - The classic puzzle game, for one person, is extended by mapping the moves of the - game through a self contained system using Arduino and Raspberry Pi, triggering - both analogue and digital sound. The solitaire board, as instrument, is presented - as a wood and Perspex box with the hardware inside. Ball bearings function as - both solitaire pegs and switches, while a purpose built solenoid controlled monochord - and ball bearing run provide the analogue sound source, which is digitally manipulated - in real-time, according to the sequences of game moves. The creative intention - of Sounds of Solitaire is that the playful approach to participation in a musical - experience, provided by the material for music making in real-time, demonstrates - an integrated approach to concepts of composing, performing and listening.' - address: 'London, United Kingdom' - author: Kirsty Keatch - bibtex: "@inproceedings{kkeatch2014,\n abstract = {Sounds of Solitaire is a novel\ - \ interface for musical expression based on an extended peg solitaire board as\ - \ a generator of live musical composition. The classic puzzle game, for one person,\ - \ is extended by mapping the moves of the game through a self contained system\ - \ using Arduino and Raspberry Pi, triggering both analogue and digital sound.\ - \ The solitaire board, as instrument, is presented as a wood and Perspex box with\ - \ the hardware inside. Ball bearings function as both solitaire pegs and switches,\ - \ while a purpose built solenoid controlled monochord and ball bearing run provide\ - \ the analogue sound source, which is digitally manipulated in real-time, according\ - \ to the sequences of game moves. The creative intention of Sounds of Solitaire\ - \ is that the playful approach to participation in a musical experience, provided\ - \ by the material for music making in real-time, demonstrates an integrated approach\ - \ to concepts of composing, performing and listening.},\n address = {London, United\ - \ Kingdom},\n author = {Kirsty Keatch},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178827},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {102--105},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {An Exploration of Peg Solitaire as a Compositional\ - \ Tool},\n url = {http://www.nime.org/proceedings/2014/nime2014_466.pdf},\n year\ - \ = {2014}\n}\n" + ID: NIME21_5 + abstract: 'This short article presents a reductionist infra-instrument. It concerns + a yellow die-cast aluminium box only featuring a key switch and a 1/4” TRS balanced + audio output as its UI. On the turn of the key, the device performs a certain + poem in Morse code and via very low frequency acoustic pulses; in this way, it + transforms poetry into bursts of intense acoustic energy that may resonate a hosting + architecture and any human bodies therein. It is argued that the instrument functions + at the very same time as a critical/speculative electronic object, as an ad-hoc + performance instrument, and as a piece of (conceptual) art on its own sake.' + address: 'Shanghai, China' + articleno: 5 + author: 'Koutsomichalis, Marinos' + bibtex: "@inproceedings{NIME21_5,\n abstract = {This short article presents a reductionist\ + \ infra-instrument. It concerns a yellow die-cast aluminium box only featuring\ + \ a key switch and a 1/4” TRS balanced audio output as its UI. On the turn of\ + \ the key, the device performs a certain poem in Morse code and via very low frequency\ + \ acoustic pulses; in this way, it transforms poetry into bursts of intense acoustic\ + \ energy that may resonate a hosting architecture and any human bodies therein.\ + \ It is argued that the instrument functions at the very same time as a critical/speculative\ + \ electronic object, as an ad-hoc performance instrument, and as a piece of (conceptual)\ + \ art on its own sake.},\n address = {Shanghai, China},\n articleno = {5},\n author\ + \ = {Koutsomichalis, Marinos},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.765a94a7},\n\ + \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/_IUT0tbtkBI},\n\ + \ title = {A Yellow Box with a Key Switch and a 1/4\" TRS Balanced Audio Output},\n\ + \ url = {https://nime.pubpub.org/pub/n69uznd4},\n year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178827 + doi: 10.21428/92fbeb44.765a94a7 issn: 2220-4806 month: June - pages: 102--105 - publisher: 'Goldsmiths, University of London' - title: An Exploration of Peg Solitaire as a Compositional Tool - url: http://www.nime.org/proceedings/2014/nime2014_466.pdf - year: 2014 + presentation-video: https://youtu.be/_IUT0tbtkBI + title: A Yellow Box with a Key Switch and a 1/4" TRS Balanced Audio Output + url: https://nime.pubpub.org/pub/n69uznd4 + year: 2021 - ENTRYTYPE: inproceedings - ID: xxiao2014 - abstract: 'We present Andante, a representation of music as animated characters - walking along the piano keyboard that appear to play the physical keys with each - step. Based on a view of music pedagogy that emphasizes expressive, full-body - communication early in the learning process, Andante promotes an understanding - of the music rooted in the body, taking advantage of walking as one of the most - fundamental human rhythms. We describe three example visualizations on a preliminary - prototype as well as applications extending our examples for practice feedback, - improvisation and composition. Through our project, we reflect on some high level - considerations for the NIME community.' - address: 'London, United Kingdom' - author: Xiao Xiao and Basheer Tome and Hiroshi Ishii - bibtex: "@inproceedings{xxiao2014,\n abstract = {We present Andante, a representation\ - \ of music as animated characters walking along the piano keyboard that appear\ - \ to play the physical keys with each step. Based on a view of music pedagogy\ - \ that emphasizes expressive, full-body communication early in the learning process,\ - \ Andante promotes an understanding of the music rooted in the body, taking advantage\ - \ of walking as one of the most fundamental human rhythms. We describe three example\ - \ visualizations on a preliminary prototype as well as applications extending\ - \ our examples for practice feedback, improvisation and composition. Through our\ - \ project, we reflect on some high level considerations for the NIME community.},\n\ - \ address = {London, United Kingdom},\n author = {Xiao Xiao and Basheer Tome and\ - \ Hiroshi Ishii},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178987},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {629--632},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Andante: Walking Figures on the Piano Keyboard\ - \ to Visualize Musical Motion},\n url = {http://www.nime.org/proceedings/2014/nime2014_467.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_50 + abstract: 'In the present study a musician and a dancer explore the co-play between + them through sensory technology. The main questions concern the placement and + processing of motion sensors, and the choice of sound parameters that a dancer + can manipulate. Results indicate that sound parameters of delay and pitch altered + dancers’ experience most positively and that placement of sensors on each wrist + and ankle with a diagonal mapping of the sound parameters was the most suitable.' + address: 'Shanghai, China' + articleno: 50 + author: 'Andersson López, Lisa and Svenns, Thelma and Holzapfel, Andre' + bibtex: "@inproceedings{NIME21_50,\n abstract = {In the present study a musician\ + \ and a dancer explore the co-play between them through sensory technology. The\ + \ main questions concern the placement and processing of motion sensors, and the\ + \ choice of sound parameters that a dancer can manipulate. Results indicate that\ + \ sound parameters of delay and pitch altered dancers’ experience most positively\ + \ and that placement of sensors on each wrist and ankle with a diagonal mapping\ + \ of the sound parameters was the most suitable.},\n address = {Shanghai, China},\n\ + \ articleno = {50},\n author = {Andersson López, Lisa and Svenns, Thelma and Holzapfel,\ + \ Andre},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.21428/92fbeb44.18c3fc2b},\n issn = {2220-4806},\n\ + \ month = {June},\n presentation-video = {https://youtu.be/Mo8mVJJrqx8},\n title\ + \ = {Sensitiv – Designing a Sonic Co-play Tool for Interactive Dance},\n url =\ + \ {https://nime.pubpub.org/pub/y1y5jolp},\n year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178987 + doi: 10.21428/92fbeb44.18c3fc2b issn: 2220-4806 month: June - pages: 629--632 - publisher: 'Goldsmiths, University of London' - title: 'Andante: Walking Figures on the Piano Keyboard to Visualize Musical Motion' - url: http://www.nime.org/proceedings/2014/nime2014_467.pdf - year: 2014 + presentation-video: https://youtu.be/Mo8mVJJrqx8 + title: Sensitiv – Designing a Sonic Co-play Tool for Interactive Dance + url: https://nime.pubpub.org/pub/y1y5jolp + year: 2021 - ENTRYTYPE: inproceedings - ID: avantklooster2014 - abstract: Emotion is a complex topic much studied in music and arguably equally - central to the visual arts where this is usually referred to with the overarching - label of aesthetics. This paper explores how music and the arts have incorporated - the study of emotion. We then introduce the development of a live audio visual - interface entitled In A State that detects emotion from live audio (in this case - a piano performance) and generates visuals and electro acoustic music in response. - address: 'London, United Kingdom' - author: Adinda van 't Klooster and Nick Collins - bibtex: "@inproceedings{avantklooster2014,\n abstract = {Emotion is a complex topic\ - \ much studied in music and arguably equally central to the visual arts where\ - \ this is usually referred to with the overarching label of aesthetics. This paper\ - \ explores how music and the arts have incorporated the study of emotion. We then\ - \ introduce the development of a live audio visual interface entitled In A State\ - \ that detects emotion from live audio (in this case a piano performance) and\ - \ generates visuals and electro acoustic music in response.},\n address = {London,\ - \ United Kingdom},\n author = {Adinda van 't Klooster and Nick Collins},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1178837},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {545--548},\n publisher = {Goldsmiths, University of London},\n\ - \ title = {In A State: Live Emotion Detection and Visualisation for Music Performance},\n\ - \ url = {http://www.nime.org/proceedings/2014/nime2014_469.pdf},\n year = {2014}\n\ + ID: NIME21_51 + abstract: 'Wireless sensor-based technologies are becoming increasingly accessible + and widely explored in interactive musical performance due to their ubiquity and + low-cost, which brings the necessity of understanding the capabilities and limitations + of these sensors. This is usually approached by using a reference system, such + as an optical motion capture system, to assess the signals’ properties. However, + this process raises the issue of synchronizing the signal and the reference data + streams, as each sensor is subject to different latency, time drift, reference + clocks and initialization timings. This paper presents an empirical quantification + of the latency communication stages in a setup consisting of a Qualisys optical + motion capture (mocap) system and a wireless microcontroller-based sensor device. + We performed event-to-end tests on the critical components of the hybrid setup + to determine the synchronization suitability. Overall, further synchronization + is viable because of the near individual average latencies of around 25ms for + both the mocap system and the wireless sensor interface.' + address: 'Shanghai, China' + articleno: 51 + author: 'Santos, Geise and Wang, Johnty and Brum, Carolina and Wanderley, Marcelo + M. and Tavares, Tiago and Rocha, Anderson' + bibtex: "@inproceedings{NIME21_51,\n abstract = {Wireless sensor-based technologies\ + \ are becoming increasingly accessible and widely explored in interactive musical\ + \ performance due to their ubiquity and low-cost, which brings the necessity of\ + \ understanding the capabilities and limitations of these sensors. This is usually\ + \ approached by using a reference system, such as an optical motion capture system,\ + \ to assess the signals’ properties. However, this process raises the issue of\ + \ synchronizing the signal and the reference data streams, as each sensor is subject\ + \ to different latency, time drift, reference clocks and initialization timings.\ + \ This paper presents an empirical quantification of the latency communication\ + \ stages in a setup consisting of a Qualisys optical motion capture (mocap) system\ + \ and a wireless microcontroller-based sensor device. We performed event-to-end\ + \ tests on the critical components of the hybrid setup to determine the synchronization\ + \ suitability. Overall, further synchronization is viable because of the near\ + \ individual average latencies of around 25ms for both the mocap system and the\ + \ wireless sensor interface.},\n address = {Shanghai, China},\n articleno = {51},\n\ + \ author = {Santos, Geise and Wang, Johnty and Brum, Carolina and Wanderley, Marcelo\ + \ M. and Tavares, Tiago and Rocha, Anderson},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.21428/92fbeb44.51b1c3a1},\n issn = {2220-4806},\n month = {June},\n presentation-video\ + \ = {https://youtu.be/a1TVvr9F7hE},\n title = {Comparative Latency Analysis of\ + \ Optical and Inertial Motion Capture Systems for Gestural Analysis and Musical\ + \ Performance},\n url = {https://nime.pubpub.org/pub/wmcqkvw1},\n year = {2021}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178837 + doi: 10.21428/92fbeb44.51b1c3a1 issn: 2220-4806 month: June - pages: 545--548 - publisher: 'Goldsmiths, University of London' - title: 'In A State: Live Emotion Detection and Visualisation for Music Performance' - url: http://www.nime.org/proceedings/2014/nime2014_469.pdf - year: 2014 + presentation-video: https://youtu.be/a1TVvr9F7hE + title: Comparative Latency Analysis of Optical and Inertial Motion Capture Systems + for Gestural Analysis and Musical Performance + url: https://nime.pubpub.org/pub/wmcqkvw1 + year: 2021 - ENTRYTYPE: inproceedings - ID: doverholt2014 - abstract: 'We introduce and describe the initial evaluation of a new low-cost augmented - violin prototype, with research focused on the user experience when playing such - hybrid physical-digital instruments, and the exploration of novel interactive - performance techniques. Another goal of this work is wider platform accessibility - for players, via a simple `do-it-yourself'' approach described by the design herein. - While the hardware and software elements are open source, the build process can - nonetheless require non-insignificant investments of time and money, as well as - basic electronics construction skills. These have been kept to a minimum wherever - possible. Our initial prototype is based upon an inexpensive electric violin that - is widely available online for approximately $200 USD. This serves as the starting - point for construction, to which the design adds local Digital Signal Processing - (DSP), gestural sensing, and sound output. Real-time DSP algorithms are running - on a mobile device, which also incorporates orientation/gesture sensors for parameter - mapping, with the resulting sound amplified and rendered via small loudspeakers - mounted on the instrument. The platform combines all necessary elements for digitally-mediated - interactive performance; the need for a traditional computer only arises when - developing new DSP algorithms for the platform. An initial exploratory evaluation - with users is presented, in which performers explore different possibilities with - the proposed platform (various DSP implementations, mapping schemes, physical - setups, etc.) in order to better establish the needs of the performing artist. - Based on these results, future work is outlined leading towards the development - of a complete quartet of instruments.' - address: 'London, United Kingdom' - author: Dan Overholt and Steven Gelineck - bibtex: "@inproceedings{doverholt2014,\n abstract = {We introduce and describe the\ - \ initial evaluation of a new low-cost augmented violin prototype, with research\ - \ focused on the user experience when playing such hybrid physical-digital instruments,\ - \ and the exploration of novel interactive performance techniques. Another goal\ - \ of this work is wider platform accessibility for players, via a simple `do-it-yourself'\ - \ approach described by the design herein. While the hardware and software elements\ - \ are open source, the build process can nonetheless require non-insignificant\ - \ investments of time and money, as well as basic electronics construction skills.\ - \ These have been kept to a minimum wherever possible. Our initial prototype is\ - \ based upon an inexpensive electric violin that is widely available online for\ - \ approximately $200 USD. This serves as the starting point for construction,\ - \ to which the design adds local Digital Signal Processing (DSP), gestural sensing,\ - \ and sound output. Real-time DSP algorithms are running on a mobile device, which\ - \ also incorporates orientation/gesture sensors for parameter mapping, with the\ - \ resulting sound amplified and rendered via small loudspeakers mounted on the\ - \ instrument. The platform combines all necessary elements for digitally-mediated\ - \ interactive performance; the need for a traditional computer only arises when\ - \ developing new DSP algorithms for the platform. An initial exploratory evaluation\ - \ with users is presented, in which performers explore different possibilities\ - \ with the proposed platform (various DSP implementations, mapping schemes, physical\ - \ setups, etc.) in order to better establish the needs of the performing artist.\ - \ Based on these results, future work is outlined leading towards the development\ - \ of a complete quartet of instruments.},\n address = {London, United Kingdom},\n\ - \ author = {Dan Overholt and Steven Gelineck},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1178897},\n issn = {2220-4806},\n month = {June},\n pages =\ - \ {122--125},\n publisher = {Goldsmiths, University of London},\n title = {Design\ - \ \\& Evaluation of an Accessible Hybrid Violin Platform},\n url = {http://www.nime.org/proceedings/2014/nime2014_470.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_52 + abstract: 'The work presented here is based on the Hybrid Augmented Saxophone of + Gestural Symbioses (HASGS) system with a focus on and its evolution over the last + five years, and an emphasis on its functional structure and the repertoire. The + HASGS system was intended to retain focus on the performance of the acoustic instrument, + keeping gestures centralised within the habitual practice of the instrument, and + reducing the use of external devices to control electronic parameters in mixed + music. Taking a reduced approach, the technology chosen to prototype HASGS was + developed in order to serve the aesthetic intentions of the pieces being written + for it. This strategy proved to avoid an overload of solutions that could bring + artefacts and superficial use of the augmentation processes, which sometimes occur + on augmented instruments, specially prototyped for improvisational intentionality. + Here, we discuss how the repertoire, hardware, and software of the system can + be mutually affected by this approach. We understand this project as an empirically-based + study which can both serve as a model for analysis, as well provide composers + and performers with pathways and creative strategies for the development of augmentation + processes.' + address: 'Shanghai, China' + articleno: 52 + author: 'Portovedo, Henrique and Lopes, Paulo Ferreira and Mendes, Ricardo and Gala, + Tiago' + bibtex: "@inproceedings{NIME21_52,\n abstract = {The work presented here is based\ + \ on the Hybrid Augmented Saxophone of Gestural Symbioses (HASGS) system with\ + \ a focus on and its evolution over the last five years, and an emphasis on its\ + \ functional structure and the repertoire. The HASGS system was intended to retain\ + \ focus on the performance of the acoustic instrument, keeping gestures centralised\ + \ within the habitual practice of the instrument, and reducing the use of external\ + \ devices to control electronic parameters in mixed music. Taking a reduced approach,\ + \ the technology chosen to prototype HASGS was developed in order to serve the\ + \ aesthetic intentions of the pieces being written for it. This strategy proved\ + \ to avoid an overload of solutions that could bring artefacts and superficial\ + \ use of the augmentation processes, which sometimes occur on augmented instruments,\ + \ specially prototyped for improvisational intentionality. Here, we discuss how\ + \ the repertoire, hardware, and software of the system can be mutually affected\ + \ by this approach. We understand this project as an empirically-based study which\ + \ can both serve as a model for analysis, as well provide composers and performers\ + \ with pathways and creative strategies for the development of augmentation processes.},\n\ + \ address = {Shanghai, China},\n articleno = {52},\n author = {Portovedo, Henrique\ + \ and Lopes, Paulo Ferreira and Mendes, Ricardo and Gala, Tiago},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.21428/92fbeb44.643abd8c},\n issn = {2220-4806},\n month\ + \ = {June},\n presentation-video = {https://youtu.be/wRygkMgx2Oc},\n title = {HASGS:\ + \ Five Years of Reduced Augmented Evolution},\n url = {https://nime.pubpub.org/pub/1293exfw},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178897 + doi: 10.21428/92fbeb44.643abd8c issn: 2220-4806 month: June - pages: 122--125 - publisher: 'Goldsmiths, University of London' - title: Design & Evaluation of an Accessible Hybrid Violin Platform - url: http://www.nime.org/proceedings/2014/nime2014_470.pdf - year: 2014 + presentation-video: https://youtu.be/wRygkMgx2Oc + title: 'HASGS: Five Years of Reduced Augmented Evolution' + url: https://nime.pubpub.org/pub/1293exfw + year: 2021 - ENTRYTYPE: inproceedings - ID: axambo2014 - abstract: 'Co-located tabletop tangible user interfaces (TUIs) for music performance - are known for promoting multi-player collaboration with a shared interface, yet - it is still unclear how to best support the awareness of the workspace in terms - of understanding individual actions and the other group members actions, in parallel. - In this paper, we investigate the effects of providing auditory feedback using - ambisonics spatialisation, aimed at informing users about the location of the - tangibles on the tabletop surface, with groups of mixed musical backgrounds. Participants - were asked to improvise music on SoundXY4: The Art of Noise, a tabletop system - that includes sound samples inspired by Russolo''s taxonomy of noises. We compared - spatialisation vs. no-spatialisation conditions, and findings suggest that, when - using spatialisation, there was a clearer workspace awareness, and a greater engagement - in the musical activity as an immersive experience.' - address: 'London, United Kingdom' - author: Anna Xambó and Gerard Roma and Robin Laney and Chris Dobbyn and Sergi Jordà - bibtex: "@inproceedings{axambo2014,\n abstract = {Co-located tabletop tangible user\ - \ interfaces (TUIs) for music performance are known for promoting multi-player\ - \ collaboration with a shared interface, yet it is still unclear how to best support\ - \ the awareness of the workspace in terms of understanding individual actions\ - \ and the other group members actions, in parallel. In this paper, we investigate\ - \ the effects of providing auditory feedback using ambisonics spatialisation,\ - \ aimed at informing users about the location of the tangibles on the tabletop\ - \ surface, with groups of mixed musical backgrounds. Participants were asked to\ - \ improvise music on {SoundXY4: The Art of Noise}, a tabletop system that includes\ - \ sound samples inspired by Russolo's taxonomy of noises. We compared spatialisation\ - \ vs. no-spatialisation conditions, and findings suggest that, when using spatialisation,\ - \ there was a clearer workspace awareness, and a greater engagement in the musical\ - \ activity as an immersive experience.},\n address = {London, United Kingdom},\n\ - \ author = {Anna Xamb\\'o and Gerard Roma and Robin Laney and Chris Dobbyn and\ - \ Sergi Jord\\`a},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178985},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {40--45},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {SoundXY4: Supporting Tabletop Collaboration\ - \ and Awareness with Ambisonics Spatialisation},\n url = {http://www.nime.org/proceedings/2014/nime2014_471.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_53 + abstract: 'This paper presents a theoretical framework for describing interactive + sound installations, along with an interactive database, on a web application, + for visualizing various features of sound installations. A corpus of 195 interactive + sound installations was reviewed to derive a taxonomy describing them across three + perspectives: Artistic Intention, Interaction and System Design. A web application + is provided to dynamically visualize and explore the corpus of sound installations + using interactive charts (https://isi-database.herokuapp.com/). Our contribution + is two-sided: we provide a theoretical framework to characterize interactive sound + installations as well as a tool to inform sound artists and designers about up-to-date + practices regarding interactive sound installations design.' + address: 'Shanghai, China' + articleno: 53 + author: 'Fraisse, Valérian and Guastavino, Catherine and Wanderley, Marcelo M.' + bibtex: "@inproceedings{NIME21_53,\n abstract = {This paper presents a theoretical\ + \ framework for describing interactive sound installations, along with an interactive\ + \ database, on a web application, for visualizing various features of sound installations.\ + \ A corpus of 195 interactive sound installations was reviewed to derive a taxonomy\ + \ describing them across three perspectives: Artistic Intention, Interaction and\ + \ System Design. A web application is provided to dynamically visualize and explore\ + \ the corpus of sound installations using interactive charts (https://isi-database.herokuapp.com/).\ + \ Our contribution is two-sided: we provide a theoretical framework to characterize\ + \ interactive sound installations as well as a tool to inform sound artists and\ + \ designers about up-to-date practices regarding interactive sound installations\ + \ design.},\n address = {Shanghai, China},\n articleno = {53},\n author = {Fraisse,\ + \ Valérian and Guastavino, Catherine and Wanderley, Marcelo M.},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.21428/92fbeb44.4fd9089c},\n issn = {2220-4806},\n month = {June},\n\ + \ presentation-video = {https://youtu.be/MtIVB7P3bs4},\n title = {A Visualization\ + \ Tool to Explore Interactive Sound Installations},\n url = {https://nime.pubpub.org/pub/i1rx1t2e},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178985 + doi: 10.21428/92fbeb44.4fd9089c issn: 2220-4806 month: June - pages: 40--45 - publisher: 'Goldsmiths, University of London' - title: 'SoundXY4: Supporting Tabletop Collaboration and Awareness with Ambisonics - Spatialisation' - url: http://www.nime.org/proceedings/2014/nime2014_471.pdf - year: 2014 + presentation-video: https://youtu.be/MtIVB7P3bs4 + title: A Visualization Tool to Explore Interactive Sound Installations + url: https://nime.pubpub.org/pub/i1rx1t2e + year: 2021 - ENTRYTYPE: inproceedings - ID: smealla2014 - abstract: 'The maturation process of the NIME field has brought a growing interest - in teaching the design and implementation of Digital Music Instruments (DMI) as - well as in finding objective evaluation methods to assess the suitability of these - outcomes. In this paper we propose a methodology for teaching NIME design and - a set of tools meant to inform the design process. This approach has been applied - in a master course focused on the exploration of expressiveness and on the role - of the mapping component in the NIME creation chain, through hands-on and self-reflective - approach based on a restrictive setup consisting of smart-phones and the Pd programming - language. Working Groups were formed, and a 2-step DMI design process was applied, - including 2 performance stages. The evaluation tools assessed both System and - Performance aspects of each project, according to Listeners'' impressions after - each performance. Listeners'' previous music knowledge was also considered. Through - this methodology, students with different backgrounds were able to effectively - engage in the NIME design processes, developing working DMI prototypes according - to the demanded requirements; the assessment tools proved to be consistent for - evaluating NIMEs systems and performances, and the fact of informing the design - processes with the outcome of the evaluation, showed a traceable progress in the - students outcomes.' - address: 'London, United Kingdom' - author: Sergi Jordà and Sebastian Mealla - bibtex: "@inproceedings{smealla2014,\n abstract = {The maturation process of the\ - \ NIME field has brought a growing interest in teaching the design and implementation\ - \ of Digital Music Instruments (DMI) as well as in finding objective evaluation\ - \ methods to assess the suitability of these outcomes. In this paper we propose\ - \ a methodology for teaching NIME design and a set of tools meant to inform the\ - \ design process. This approach has been applied in a master course focused on\ - \ the exploration of expressiveness and on the role of the mapping component in\ - \ the NIME creation chain, through hands-on and self-reflective approach based\ - \ on a restrictive setup consisting of smart-phones and the Pd programming language.\ - \ Working Groups were formed, and a 2-step DMI design process was applied, including\ - \ 2 performance stages. The evaluation tools assessed both System and Performance\ - \ aspects of each project, according to Listeners' impressions after each performance.\ - \ Listeners' previous music knowledge was also considered. Through this methodology,\ - \ students with different backgrounds were able to effectively engage in the NIME\ - \ design processes, developing working DMI prototypes according to the demanded\ - \ requirements; the assessment tools proved to be consistent for evaluating NIMEs\ - \ systems and performances, and the fact of informing the design processes with\ - \ the outcome of the evaluation, showed a traceable progress in the students outcomes.},\n\ - \ address = {London, United Kingdom},\n author = {Sergi Jord\\`a and Sebastian\ - \ Mealla},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1178824},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {233--238},\n publisher = {Goldsmiths, University\ - \ of London},\n title = {A Methodological Framework for Teaching, Evaluating and\ - \ Informing NIME Design with a Focus on Mapping and Expressiveness},\n url = {http://www.nime.org/proceedings/2014/nime2014_472.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_54 + abstract: 'Self-resonating vibrotactile instruments (SRIs) are hybrid feedback instruments, + characterised by an electro-mechanical feedback loop that is both the means of + sound production and the expressive interface. Through the lens of contemporary + SRIs, we reflect on how they are characterised, designed, and played. By considering + reports from designers and players of this species of instrument-performance system, + we explore the experience of playing them. With a view to supporting future research + and practice in the field, we illustrate the value of conceptualising SRIs in + Cybernetic and systems theoretic terms and suggest that this offers an intuitive, + yet powerful basis for future performance, analysis and making; in doing so we + close the loop in the making, playing and conceptualisation of SRIs with the aim + of nourishing the evolution of theory, creative and technical practice in this + field.' + address: 'Shanghai, China' + articleno: 54 + author: 'Eldridge, Alice and Kiefer, Chris and Overholt, Dan and Ulfarsson, Halldor' + bibtex: "@inproceedings{NIME21_54,\n abstract = {Self-resonating vibrotactile instruments\ + \ (SRIs) are hybrid feedback instruments, characterised by an electro-mechanical\ + \ feedback loop that is both the means of sound production and the expressive\ + \ interface. Through the lens of contemporary SRIs, we reflect on how they are\ + \ characterised, designed, and played. By considering reports from designers and\ + \ players of this species of instrument-performance system, we explore the experience\ + \ of playing them. With a view to supporting future research and practice in the\ + \ field, we illustrate the value of conceptualising SRIs in Cybernetic and systems\ + \ theoretic terms and suggest that this offers an intuitive, yet powerful basis\ + \ for future performance, analysis and making; in doing so we close the loop in\ + \ the making, playing and conceptualisation of SRIs with the aim of nourishing\ + \ the evolution of theory, creative and technical practice in this field.},\n\ + \ address = {Shanghai, China},\n articleno = {54},\n author = {Eldridge, Alice\ + \ and Kiefer, Chris and Overholt, Dan and Ulfarsson, Halldor},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.21428/92fbeb44.1f29a09e},\n issn = {2220-4806},\n month = {June},\n\ + \ presentation-video = {https://youtu.be/EP1G4vCVm_E},\n title = {Self-resonating\ + \ Vibrotactile Feedback Instruments {\\textbar}{\\textbar}: Making, Playing, Conceptualising\ + \ :{\\textbar}{\\textbar}},\n url = {https://nime.pubpub.org/pub/6mhrjiqt},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178824 + doi: 10.21428/92fbeb44.1f29a09e issn: 2220-4806 month: June - pages: 233--238 - publisher: 'Goldsmiths, University of London' - title: 'A Methodological Framework for Teaching, Evaluating and Informing NIME Design - with a Focus on Mapping and Expressiveness' - url: http://www.nime.org/proceedings/2014/nime2014_472.pdf - year: 2014 + presentation-video: https://youtu.be/EP1G4vCVm_E + title: 'Self-resonating Vibrotactile Feedback Instruments ||: Making, Playing, Conceptualising + :||' + url: https://nime.pubpub.org/pub/6mhrjiqt + year: 2021 - ENTRYTYPE: inproceedings - ID: btaylor2014 - abstract: 'Developing for mobile and multimodal platforms is more important now - than ever, as smartphones and tablets proliferate and mobile device orchestras - become commonplace. We detail NexusUI, a JavaScript framework that enables rapid - prototyping and development of expressive multitouch electronic instrument interfaces - within a web browser. Extensions of this project assist in easily creating dynamic - user interfaces. NexusUI contains several novel encapsulations of creative interface - objects, each accessible with one line of code. NexusUp assists in one-button - duplication of Max interfaces into mobile-friendly web pages that transmit to - Max automatically via Open Sound Control. NexusDrop enables drag-and-drop interface - building and saves interfaces to a central Nexus database. Finally, we provide - an overview of several projects made with NexusUI, including mobile instruments, - art installations, sound diffusion tools, and iOS games, and describe Nexus'' - possibilities as an architecture for our future Mobile App Orchestra.' - address: 'London, United Kingdom' - author: Benjamin Taylor and Jesse Allison and William Conlin and Yemin Oh and Daniel - Holmes - bibtex: "@inproceedings{btaylor2014,\n abstract = {Developing for mobile and multimodal\ - \ platforms is more important now than ever, as smartphones and tablets proliferate\ - \ and mobile device orchestras become commonplace. We detail NexusUI, a JavaScript\ - \ framework that enables rapid prototyping and development of expressive multitouch\ - \ electronic instrument interfaces within a web browser. Extensions of this project\ - \ assist in easily creating dynamic user interfaces. NexusUI contains several\ - \ novel encapsulations of creative interface objects, each accessible with one\ - \ line of code. NexusUp assists in one-button duplication of Max interfaces into\ - \ mobile-friendly web pages that transmit to Max automatically via Open Sound\ - \ Control. NexusDrop enables drag-and-drop interface building and saves interfaces\ - \ to a central Nexus database. Finally, we provide an overview of several projects\ - \ made with NexusUI, including mobile instruments, art installations, sound diffusion\ - \ tools, and iOS games, and describe Nexus' possibilities as an architecture for\ - \ our future Mobile App Orchestra.},\n address = {London, United Kingdom},\n author\ - \ = {Benjamin Taylor and Jesse Allison and William Conlin and Yemin Oh and Daniel\ - \ Holmes},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1178951},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {257--262},\n publisher = {Goldsmiths, University\ - \ of London},\n title = {Simplified Expressive Mobile Development with NexusUI,\ - \ NexusUp, and NexusDrop},\n url = {http://www.nime.org/proceedings/2014/nime2014_480.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_55 + abstract: 'Virtual reality (VR) offers novel possibilities of design choices for + Digital Musical Instruments in terms of shapes, sizes, sounds or colours, removing + many constraints inherent to physical interfaces. In particular, the size and + position of the interface components of Immersive Virtual Musical Instruments + (IVMIs) can be freely chosen to elicit large or small hand gestures. In addition, + VR allows for the manipulation of what users visually perceive of their actual + physical actions, through redirections and changes in Control-Display Ratio (CDR). + Visual and gestural amplitudes can therefore be defined separately, potentially + affecting the user experience in new ways. In this paper, we investigate the use + of CDR to enrich the design with a control over the user perceived fatigue, sense + of presence and musical expression. Our findings suggest that the CDR has an impact + on the sense of presence, on the perceived difficulty of controlling the sound + and on the distance covered by the hand. From these results, we derive a set of + insights and guidelines for the design of IVMIs.' + address: 'Shanghai, China' + articleno: 55 + author: 'Reynaert, Vincent and Berthaut, Florent and Rekik, Yosra and grisoni, laurent' + bibtex: "@inproceedings{NIME21_55,\n abstract = {Virtual reality (VR) offers novel\ + \ possibilities of design choices for Digital Musical Instruments in terms of\ + \ shapes, sizes, sounds or colours, removing many constraints inherent to physical\ + \ interfaces. In particular, the size and position of the interface components\ + \ of Immersive Virtual Musical Instruments (IVMIs) can be freely chosen to elicit\ + \ large or small hand gestures. In addition, VR allows for the manipulation of\ + \ what users visually perceive of their actual physical actions, through redirections\ + \ and changes in Control-Display Ratio (CDR). Visual and gestural amplitudes can\ + \ therefore be defined separately, potentially affecting the user experience in\ + \ new ways. In this paper, we investigate the use of CDR to enrich the design\ + \ with a control over the user perceived fatigue, sense of presence and musical\ + \ expression. Our findings suggest that the CDR has an impact on the sense of\ + \ presence, on the perceived difficulty of controlling the sound and on the distance\ + \ covered by the hand. From these results, we derive a set of insights and guidelines\ + \ for the design of IVMIs.},\n address = {Shanghai, China},\n articleno = {55},\n\ + \ author = {Reynaert, Vincent and Berthaut, Florent and Rekik, Yosra and grisoni,\ + \ laurent},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.c47be986},\n\ + \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/d1DthYt8EUw},\n\ + \ title = {The Effect of Control-Display Ratio on User Experience in Immersive\ + \ Virtual Musical Instruments},\n url = {https://nime.pubpub.org/pub/8n8br4cc},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178951 + doi: 10.21428/92fbeb44.c47be986 issn: 2220-4806 month: June - pages: 257--262 - publisher: 'Goldsmiths, University of London' - title: 'Simplified Expressive Mobile Development with NexusUI, NexusUp, and NexusDrop' - url: http://www.nime.org/proceedings/2014/nime2014_480.pdf - year: 2014 + presentation-video: https://youtu.be/d1DthYt8EUw + title: The Effect of Control-Display Ratio on User Experience in Immersive Virtual + Musical Instruments + url: https://nime.pubpub.org/pub/8n8br4cc + year: 2021 - ENTRYTYPE: inproceedings - ID: mfunk2014 - abstract: 'When looking at modern music club settings, especially in the area of - electronic music, music is consumed in a unidirectional way -from DJ or producer - to the audience -with little direct means to influence and participate. In this - paper we challenge this phenomenon and aim for a new bond between the audience - and the DJ through the creation of an interactive dance concept: Experio. Experio - allows for multiple audience participants influencing the musical performance - through dance, facilitated by a musical moderator using a tailored interface. - This co-creation of electronic music on both novice and expert levels is a new - participatory live performance approach, which is evaluated on the basis of thousands - of visitors who interacted with Experio during several international exhibitions.' - address: 'London, United Kingdom' - author: Bastiaan van Hout and Luca Giacolini and Bart Hengeveld and Mathias Funk - and Joep Frens - bibtex: "@inproceedings{mfunk2014,\n abstract = {When looking at modern music club\ - \ settings, especially in the area of electronic music, music is consumed in a\ - \ unidirectional way -from DJ or producer to the audience -with little direct\ - \ means to influence and participate. In this paper we challenge this phenomenon\ - \ and aim for a new bond between the audience and the DJ through the creation\ - \ of an interactive dance concept: Experio. Experio allows for multiple audience\ - \ participants influencing the musical performance through dance, facilitated\ - \ by a musical moderator using a tailored interface. This co-creation of electronic\ - \ music on both novice and expert levels is a new participatory live performance\ - \ approach, which is evaluated on the basis of thousands of visitors who interacted\ - \ with Experio during several international exhibitions.},\n address = {London,\ - \ United Kingdom},\n author = {Bastiaan van Hout and Luca Giacolini and Bart Hengeveld\ - \ and Mathias Funk and Joep Frens},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178808},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {46--49},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Experio: a Design for Novel Audience Participation\ - \ in Club Settings},\n url = {http://www.nime.org/proceedings/2014/nime2014_481.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_56 + abstract: 'This paper explores ecological perspectives of human activity in the + use of digital musical instruments and assistive technology. While such perspectives + are relatively nascent in DMI design and evaluation, ecological frameworks have + a long-standing foundation in occupational therapy and the design of assistive + technology products and services. Informed by two case studies, the authors'' + critique, compare and marry concepts from each domain to guide future research + into accessible music technology. The authors discover that ecological frameworks + used by occupational therapists are helpful in describing the nature of individual + impairment, disability and situated context. However, such frameworks seemingly + flounder when attempting to describe the personal value of music-making.' + address: 'Shanghai, China' + articleno: 56 + author: 'Lucas, Alex and Harrison, Jacob and Schroeder, Franziska and Ortiz, Miguel' + bibtex: "@inproceedings{NIME21_56,\n abstract = {This paper explores ecological\ + \ perspectives of human activity in the use of digital musical instruments and\ + \ assistive technology. While such perspectives are relatively nascent in DMI\ + \ design and evaluation, ecological frameworks have a long-standing foundation\ + \ in occupational therapy and the design of assistive technology products and\ + \ services. Informed by two case studies, the authors' critique, compare and marry\ + \ concepts from each domain to guide future research into accessible music technology.\ + \ The authors discover that ecological frameworks used by occupational therapists\ + \ are helpful in describing the nature of individual impairment, disability and\ + \ situated context. However, such frameworks seemingly flounder when attempting\ + \ to describe the personal value of music-making.},\n address = {Shanghai, China},\n\ + \ articleno = {56},\n author = {Lucas, Alex and Harrison, Jacob and Schroeder,\ + \ Franziska and Ortiz, Miguel},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.ff09de34},\n\ + \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/Khk05vKMrao},\n\ + \ title = {Cross-Pollinating Ecological Perspectives in ADMI Design and Evaluation},\n\ + \ url = {https://nime.pubpub.org/pub/d72sylsq},\n year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178808 + doi: 10.21428/92fbeb44.ff09de34 issn: 2220-4806 month: June - pages: 46--49 - publisher: 'Goldsmiths, University of London' - title: 'Experio: a Design for Novel Audience Participation in Club Settings' - url: http://www.nime.org/proceedings/2014/nime2014_481.pdf - year: 2014 + presentation-video: https://youtu.be/Khk05vKMrao + title: Cross-Pollinating Ecological Perspectives in ADMI Design and Evaluation + url: https://nime.pubpub.org/pub/d72sylsq + year: 2021 - ENTRYTYPE: inproceedings - ID: jfrancoise12014 - abstract: We present a set of probabilistic models that support the design of movement - and sound relationships in interactive sonic systems. We focus on a mapping--by--demonstration - approach in which the relationships between motion and sound are defined by a - machine learning model that learns from a set of user examples. We describe four - probabilistic models with complementary characteristics in terms of multimodality - and temporality. We illustrate the practical use of each of the four models with - a prototype application for sound control built using our Max implementation. - address: 'London, United Kingdom' - author: Jules Françoise and Norbert Schnell and Riccardo Borghesi and Frédéric Bevilacqua - bibtex: "@inproceedings{jfrancoise12014,\n abstract = {We present a set of probabilistic\ - \ models that support the design of movement and sound relationships in interactive\ - \ sonic systems. We focus on a mapping--by--demonstration approach in which the\ - \ relationships between motion and sound are defined by a machine learning model\ - \ that learns from a set of user examples. We describe four probabilistic models\ - \ with complementary characteristics in terms of multimodality and temporality.\ - \ We illustrate the practical use of each of the four models with a prototype\ - \ application for sound control built using our Max implementation.},\n address\ - \ = {London, United Kingdom},\n author = {Jules Fran\\c{c}oise and Norbert Schnell\ - \ and Riccardo Borghesi and Fr\\'ed\\'eric Bevilacqua},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178764},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {287--292},\n publisher = {Goldsmiths, University of London},\n title = {Probabilistic\ - \ Models for Designing Motion and Sound Relationships},\n url = {http://www.nime.org/proceedings/2014/nime2014_482.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_57 + abstract: 'This paper describes Le Bâton, a new digital musical instrument based + on the nonlinear dynamics of the triple pendulum. The triple pendulum is a simple + physical system constructed by attaching three pendulums vertically such that + each joint can swing freely. When subjected to large oscillations, its motion + is chaotic and is often described as unexpectedly mesmerizing. Le Bâton uses wireless + inertial measurement units (IMUs) embedded in each pendulum arm to send real-time + motion data to Max/MSP. Additionally, we implemented a control mechanism, allowing + a user to remotely interact with it by setting the initial release angle. Here, + we explain the motivation and design of Le Bâton and describe mapping strategies. + To conclude, we discuss how its nature of user interaction complicates its status + as a digital musical instrument.' + address: 'Shanghai, China' + articleno: 57 + author: 'Skarha, Matthew and Cusson, Vincent and Frisson, Christian and Wanderley, + Marcelo M.' + bibtex: "@inproceedings{NIME21_57,\n abstract = {This paper describes Le Bâton,\ + \ a new digital musical instrument based on the nonlinear dynamics of the triple\ + \ pendulum. The triple pendulum is a simple physical system constructed by attaching\ + \ three pendulums vertically such that each joint can swing freely. When subjected\ + \ to large oscillations, its motion is chaotic and is often described as unexpectedly\ + \ mesmerizing. Le Bâton uses wireless inertial measurement units (IMUs) embedded\ + \ in each pendulum arm to send real-time motion data to Max/MSP. Additionally,\ + \ we implemented a control mechanism, allowing a user to remotely interact with\ + \ it by setting the initial release angle. Here, we explain the motivation and\ + \ design of Le Bâton and describe mapping strategies. To conclude, we discuss\ + \ how its nature of user interaction complicates its status as a digital musical\ + \ instrument.},\n address = {Shanghai, China},\n articleno = {57},\n author =\ + \ {Skarha, Matthew and Cusson, Vincent and Frisson, Christian and Wanderley, Marcelo\ + \ M.},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.21428/92fbeb44.09ecc54d},\n issn = {2220-4806},\n\ + \ month = {June},\n presentation-video = {https://youtu.be/bLx5b9aqwgI},\n title\ + \ = {Le Bâton: A Digital Musical Instrument Based on the Chaotic Triple Pendulum},\n\ + \ url = {https://nime.pubpub.org/pub/uh1zfz1f},\n year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178764 + doi: 10.21428/92fbeb44.09ecc54d issn: 2220-4806 month: June - pages: 287--292 - publisher: 'Goldsmiths, University of London' - title: Probabilistic Models for Designing Motion and Sound Relationships - url: http://www.nime.org/proceedings/2014/nime2014_482.pdf - year: 2014 + presentation-video: https://youtu.be/bLx5b9aqwgI + title: 'Le Bâton: A Digital Musical Instrument Based on the Chaotic Triple Pendulum' + url: https://nime.pubpub.org/pub/uh1zfz1f + year: 2021 - ENTRYTYPE: inproceedings - ID: atsiros12014 - abstract: 'This paper presents the findings of two exploratory studies. In these - studies participants performed a series of image-sound association tasks. The - aim of the studies was to investigate the perceived similarity and the efficacy - of two multidimensional mappings each consisting of three audio-visual associations. - The purpose of the mappings is to enable visual control of corpus-based concatenative - synthesis. More specifically the stimuli in the first study was designed to test - the perceived similarity of six audio-visual associations, between the two mappings - using three corpora resulting in 18 audio-visual stimuli. The corpora differ in - terms of two sound characteristics: harmonic contain and continuity. Data analysis - revealed no significant differences in the participant''s responses between the - three corpora, or between the two mappings. However highly significant differences - were revealed between the individual audio-visual association pairs. The second - study investigates the affects of the mapping and the corpus in the ability of - the participants to detect which image out of three similar images was used to - generate six audio stimuli. The data analysis revealed significant differences - in the ability of the participants'' to detect the correct image depending on - which corpus was used. Less significant was the effect of the mapping in the success - rate of the participant responses.' - address: 'London, United Kingdom' - author: Augoustinos Tsiros - bibtex: "@inproceedings{atsiros12014,\n abstract = {This paper presents the findings\ - \ of two exploratory studies. In these studies participants performed a series\ - \ of image-sound association tasks. The aim of the studies was to investigate\ - \ the perceived similarity and the efficacy of two multidimensional mappings each\ - \ consisting of three audio-visual associations. The purpose of the mappings is\ - \ to enable visual control of corpus-based concatenative synthesis. More specifically\ - \ the stimuli in the first study was designed to test the perceived similarity\ - \ of six audio-visual associations, between the two mappings using three corpora\ - \ resulting in 18 audio-visual stimuli. The corpora differ in terms of two sound\ - \ characteristics: harmonic contain and continuity. Data analysis revealed no\ - \ significant differences in the participant's responses between the three corpora,\ - \ or between the two mappings. However highly significant differences were revealed\ - \ between the individual audio-visual association pairs. The second study investigates\ - \ the affects of the mapping and the corpus in the ability of the participants\ - \ to detect which image out of three similar images was used to generate six audio\ - \ stimuli. The data analysis revealed significant differences in the ability of\ - \ the participants' to detect the correct image depending on which corpus was\ - \ used. Less significant was the effect of the mapping in the success rate of\ - \ the participant responses.},\n address = {London, United Kingdom},\n author\ - \ = {Augoustinos Tsiros},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178965},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {421--426},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Evaluating the Perceived Similarity Between\ - \ Audio-Visual Features Using Corpus-Based Concatenative Synthesis},\n url = {http://www.nime.org/proceedings/2014/nime2014_484.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_58 + abstract: 'The CHILLER (a Computer-Human Interface for the Live Labeling of Emotional + Responses) is a prototype of an affordable and easy-to-use wearable sensor for + the real-time detection and visualization of one of the most accurate biomarkers + of musical emotional processing:  the piloerection of the skin (i.e., the goosebumps) + that accompany musical chills (also known as musical frissons or shivers down + the spine). In controlled laboratory experiments, electrodermal activity (EDA) + has been traditionally used to measure fluctuations of musical emotion. EDA is, + however, ill-suited for real-world settings (e.g., live concerts) because of its + sensitivity to movement, electronic noise and variations in the contact between + the skin and the recording electrodes. The CHILLER, based on the Raspberry Pi + architecture, overcomes these limitations by using a well-known algorithm capable + of detecting goosebumps from a video recording of a patch of skin. The CHILLER + has potential applications in both academia and industry and could be used as + a tool to broaden participation in STEM, as it brings together concepts from experimental + psychology, neuroscience, physiology and computer science in an inexpensive, do-it-yourself + device well-suited for educational purposes.' + address: 'Shanghai, China' + articleno: 58 + author: 'Pelofi, Claire and Goldstein, Michal and Bevilacqua, Dana and McPhee, Michael + and Abrams, Ellie and Ripollés, Pablo' + bibtex: "@inproceedings{NIME21_58,\n abstract = {The CHILLER (a Computer-Human Interface\ + \ for the Live Labeling of Emotional Responses) is a prototype of an affordable\ + \ and easy-to-use wearable sensor for the real-time detection and visualization\ + \ of one of the most accurate biomarkers of musical emotional processing:  the\ + \ piloerection of the skin (i.e., the goosebumps) that accompany musical chills\ + \ (also known as musical frissons or shivers down the spine). In controlled laboratory\ + \ experiments, electrodermal activity (EDA) has been traditionally used to measure\ + \ fluctuations of musical emotion. EDA is, however, ill-suited for real-world\ + \ settings (e.g., live concerts) because of its sensitivity to movement, electronic\ + \ noise and variations in the contact between the skin and the recording electrodes.\ + \ The CHILLER, based on the Raspberry Pi architecture, overcomes these limitations\ + \ by using a well-known algorithm capable of detecting goosebumps from a video\ + \ recording of a patch of skin. The CHILLER has potential applications in both\ + \ academia and industry and could be used as a tool to broaden participation in\ + \ STEM, as it brings together concepts from experimental psychology, neuroscience,\ + \ physiology and computer science in an inexpensive, do-it-yourself device well-suited\ + \ for educational purposes.},\n address = {Shanghai, China},\n articleno = {58},\n\ + \ author = {Pelofi, Claire and Goldstein, Michal and Bevilacqua, Dana and McPhee,\ + \ Michael and Abrams, Ellie and Ripollés, Pablo},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.21428/92fbeb44.5da1ca0b},\n issn = {2220-4806},\n month = {June},\n presentation-video\ + \ = {https://youtu.be/JujnpqoSdR4},\n title = {CHILLER: a Computer Human Interface\ + \ for the Live Labeling of Emotional Responses},\n url = {https://nime.pubpub.org/pub/kdahf9fq},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178965 + doi: 10.21428/92fbeb44.5da1ca0b issn: 2220-4806 month: June - pages: 421--426 - publisher: 'Goldsmiths, University of London' - title: Evaluating the Perceived Similarity Between Audio-Visual Features Using Corpus-Based - Concatenative Synthesis - url: http://www.nime.org/proceedings/2014/nime2014_484.pdf - year: 2014 + presentation-video: https://youtu.be/JujnpqoSdR4 + title: 'CHILLER: a Computer Human Interface for the Live Labeling of Emotional Responses' + url: https://nime.pubpub.org/pub/kdahf9fq + year: 2021 - ENTRYTYPE: inproceedings - ID: ngold2014 - abstract: 'The Leap Motion(TM) sensor offers fine-grained gesture-recognition and - hand tracking. Since its release, there have been several uses of the device for - instrument design, musical interaction and expression control, documented through - online video. However, there has been little formal documented investigation of - the potential and challenges of the platform in this context. This paper presents - lessons learned from work-in-progress on the development of musical instruments - and control applications using the Leap Motion(TM) sensor. Two instruments are - presented: Air-Keys and Air-Pads and the potential for augmentation of a traditional - keyboard is explored. The results show that the platform is promising in this - context but requires various challenges, both physical and logical, to be overcome.' - address: 'London, United Kingdom' - author: Jihyun Han and Nicolas Gold - bibtex: "@inproceedings{ngold2014,\n abstract = {The Leap Motion(TM) sensor offers\ - \ fine-grained gesture-recognition and hand tracking. Since its release, there\ - \ have been several uses of the device for instrument design, musical interaction\ - \ and expression control, documented through online video. However, there has\ - \ been little formal documented investigation of the potential and challenges\ - \ of the platform in this context. This paper presents lessons learned from work-in-progress\ - \ on the development of musical instruments and control applications using the\ - \ Leap Motion(TM) sensor. Two instruments are presented: Air-Keys and Air-Pads\ - \ and the potential for augmentation of a traditional keyboard is explored. The\ - \ results show that the platform is promising in this context but requires various\ - \ challenges, both physical and logical, to be overcome.},\n address = {London,\ - \ United Kingdom},\n author = {Jihyun Han and Nicolas Gold},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178784},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {371--374},\n publisher = {Goldsmiths, University of London},\n title = {Lessons\ - \ Learned in Exploring the Leap Motion(TM) Sensor for Gesture-based Instrument\ - \ Design},\n url = {http://www.nime.org/proceedings/2014/nime2014_485.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_59 + abstract: 'Creating an artificially intelligent (AI) aid for music composers requires + a practical and modular approach, one that allows the composer to manipulate the + technology when needed in the search for new sounds. Many existing approaches + fail to capture the interest of composers as they are limited beyond their demonstrative + purposes, allow for only minimal interaction from the composer or require GPU + access to generate samples quickly. This paper introduces Score-Transformer (ST), + a practical integration of deep learning technology to aid in the creation of + new music which works seamlessly alongside any popular software notation (Finale, + Sibelius, etc.). Score-Transformer is built upon a variant of the powerful transformer + model, currently used in state-of-the-art natural language models. Owing to hierarchical + and sequential similarities between music and language, the transformer model + can learn to write polyphonic MIDI music based on any styles, genres, or composers + it is trained upon. This paper briefly outlines how the model learns and later + notates music based upon any prompt given to it from the user. Furthermore, ST + can be updated at any time on additional MIDI recordings minimizing the risk of + the software becoming outdated or impractical for continued use.' + address: 'Shanghai, China' + articleno: 59 + author: 'Lupker, Jeffrey A. T.' + bibtex: "@inproceedings{NIME21_59,\n abstract = {Creating an artificially intelligent\ + \ (AI) aid for music composers requires a practical and modular approach, one\ + \ that allows the composer to manipulate the technology when needed in the search\ + \ for new sounds. Many existing approaches fail to capture the interest of composers\ + \ as they are limited beyond their demonstrative purposes, allow for only minimal\ + \ interaction from the composer or require GPU access to generate samples quickly.\ + \ This paper introduces Score-Transformer (ST), a practical integration of deep\ + \ learning technology to aid in the creation of new music which works seamlessly\ + \ alongside any popular software notation (Finale, Sibelius, etc.). Score-Transformer\ + \ is built upon a variant of the powerful transformer model, currently used in\ + \ state-of-the-art natural language models. Owing to hierarchical and sequential\ + \ similarities between music and language, the transformer model can learn to\ + \ write polyphonic MIDI music based on any styles, genres, or composers it is\ + \ trained upon. This paper briefly outlines how the model learns and later notates\ + \ music based upon any prompt given to it from the user. Furthermore, ST can be\ + \ updated at any time on additional MIDI recordings minimizing the risk of the\ + \ software becoming outdated or impractical for continued use.},\n address = {Shanghai,\ + \ China},\n articleno = {59},\n author = {Lupker, Jeffrey A. T.},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.21428/92fbeb44.21d4fd1f},\n issn = {2220-4806},\n month\ + \ = {June},\n presentation-video = {https://youtu.be/CZO8nj6YzVI},\n title = {Score-Transformer:\ + \ A Deep Learning Aid for Music Composition},\n url = {https://nime.pubpub.org/pub/7a6ij1ak},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178784 + doi: 10.21428/92fbeb44.21d4fd1f issn: 2220-4806 month: June - pages: 371--374 - publisher: 'Goldsmiths, University of London' - title: Lessons Learned in Exploring the Leap Motion(TM) Sensor for Gesture-based - Instrument Design - url: http://www.nime.org/proceedings/2014/nime2014_485.pdf - year: 2014 + presentation-video: https://youtu.be/CZO8nj6YzVI + title: 'Score-Transformer: A Deep Learning Aid for Music Composition' + url: https://nime.pubpub.org/pub/7a6ij1ak + year: 2021 - ENTRYTYPE: inproceedings - ID: jlarsen2014 - abstract: 'People with a physical handicap are often not able to engage and embrace - the world of music on the same terms as normal functioning people. Musical instruments - have been refined the last centuries which makes them highly specialized instruments - that nearly all requires at least two functioning hands. In this study we try - to enable people with hemiplegia to play a real electrical guitar by modifying - it in a way that make people with hemiplegia able to actually play the guitar. - We developed the guitar platform to utilize sensors to capture the rhythmic motion - of alternative fully functioning limbs, such as a foot, knee or the head to activate - a motorized fader moving a pick back and forth across the strings. The approach - employs the flexibility of a programmable digital system which allows us to scale - and map different ranges of data from various sensors to the motion of the actuator - and thereby making it easier adapt to individual users. To validate and test the - instrument platform we collaborated with the Helena Elsass Center during their - 2013 Summer Camp to see if we actually succeeded in creating an electrical guitar - that children with hemiplegia could actually play. The initial user studies showed - that children with hemiplegia were able to play the actuated guitar by producing - rhythmical movement across the strings that enables them to enter a world of music - they so often see as closed.' - address: 'London, United Kingdom' - author: Jeppe Larsen and Dan Overholt and Thomas Moeslund - bibtex: "@inproceedings{jlarsen2014,\n abstract = {People with a physical handicap\ - \ are often not able to engage and embrace the world of music on the same terms\ - \ as normal functioning people. Musical instruments have been refined the last\ - \ centuries which makes them highly specialized instruments that nearly all requires\ - \ at least two functioning hands. In this study we try to enable people with hemiplegia\ - \ to play a real electrical guitar by modifying it in a way that make people with\ - \ hemiplegia able to actually play the guitar. We developed the guitar platform\ - \ to utilize sensors to capture the rhythmic motion of alternative fully functioning\ - \ limbs, such as a foot, knee or the head to activate a motorized fader moving\ - \ a pick back and forth across the strings. The approach employs the flexibility\ - \ of a programmable digital system which allows us to scale and map different\ - \ ranges of data from various sensors to the motion of the actuator and thereby\ - \ making it easier adapt to individual users. To validate and test the instrument\ - \ platform we collaborated with the Helena Elsass Center during their 2013 Summer\ - \ Camp to see if we actually succeeded in creating an electrical guitar that children\ - \ with hemiplegia could actually play. The initial user studies showed that children\ - \ with hemiplegia were able to play the actuated guitar by producing rhythmical\ - \ movement across the strings that enables them to enter a world of music they\ - \ so often see as closed.},\n address = {London, United Kingdom},\n author = {Jeppe\ - \ Larsen and Dan Overholt and Thomas Moeslund},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1178845},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {60--65},\n publisher = {Goldsmiths, University of London},\n title = {The\ - \ Actuated guitar: Implementation and user test on children with Hemiplegia},\n\ - \ url = {http://www.nime.org/proceedings/2014/nime2014_486.pdf},\n year = {2014}\n\ - }\n" + ID: NIME21_6 + abstract: 'We propose and evaluate an approach to incorporating multiple user-provided + inputs, each demonstrating a complementary set of musical characteristics, to + guide the output of a generative model for synthesizing short music performances + or loops. We focus on user inputs that describe both “what to play” (via scores + in MIDI format) and “how to play it” (via rhythmic inputs to specify expressive + timing and dynamics). Through experiments, we demonstrate that our method can + facilitate human-AI co-creation of drum loops with diverse and customizable outputs. + In the process, we argue for the interaction paradigm of mapping by demonstration + as a promising approach to working with deep learning models that are capable + of generating complex and realistic musical parts.' + address: 'Shanghai, China' + articleno: 6 + author: 'Gillick, Jon and Bamman, David' + bibtex: "@inproceedings{NIME21_6,\n abstract = {We propose and evaluate an approach\ + \ to incorporating multiple user-provided inputs, each demonstrating a complementary\ + \ set of musical characteristics, to guide the output of a generative model for\ + \ synthesizing short music performances or loops. We focus on user inputs that\ + \ describe both “what to play” (via scores in MIDI format) and “how to play it”\ + \ (via rhythmic inputs to specify expressive timing and dynamics). Through experiments,\ + \ we demonstrate that our method can facilitate human-AI co-creation of drum loops\ + \ with diverse and customizable outputs. In the process, we argue for the interaction\ + \ paradigm of mapping by demonstration as a promising approach to working with\ + \ deep learning models that are capable of generating complex and realistic musical\ + \ parts.},\n address = {Shanghai, China},\n articleno = {6},\n author = {Gillick,\ + \ Jon and Bamman, David},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.06e2d5f4},\n\ + \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/Q2M_smiN6oo},\n\ + \ title = {What to Play and How to Play it: Guiding Generative Music Models with\ + \ Multiple Demonstrations},\n url = {https://nime.pubpub.org/pub/s3x60926},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178845 + doi: 10.21428/92fbeb44.06e2d5f4 issn: 2220-4806 month: June - pages: 60--65 - publisher: 'Goldsmiths, University of London' - title: 'The Actuated guitar: Implementation and user test on children with Hemiplegia' - url: http://www.nime.org/proceedings/2014/nime2014_486.pdf - year: 2014 + presentation-video: https://youtu.be/Q2M_smiN6oo + title: 'What to Play and How to Play it: Guiding Generative Music Models with Multiple + Demonstrations' + url: https://nime.pubpub.org/pub/s3x60926 + year: 2021 - ENTRYTYPE: inproceedings - ID: tresch2014 - abstract: This paper proposes a simple architecture for creating (indoor) audio - walks by using a server running Max/MSP together with the external object fhnw.audiowalk.state - and smartphone clients running either under Android or iOS using LibPd. Server - and smartphone clients communicate over WLAN by exchanging OSC messages. Server - and client have been designed in a way that allows artists with only little programming - skills to create position-based audio walks. - address: 'London, United Kingdom' - author: Thomas Resch and Matthias Krebs - bibtex: "@inproceedings{tresch2014,\n abstract = {This paper proposes a simple architecture\ - \ for creating (indoor) audio walks by using a server running Max/MSP together\ - \ with the external object fhnw.audiowalk.state and smartphone clients running\ - \ either under Android or iOS using LibPd. Server and smartphone clients communicate\ - \ over WLAN by exchanging OSC messages. Server and client have been designed in\ - \ a way that allows artists with only little programming skills to create position-based\ - \ audio walks.},\n address = {London, United Kingdom},\n author = {Thomas Resch\ - \ and Matthias Krebs},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178917},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {269--272},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {A Simple Architecture for Server-based (Indoor)\ - \ Audio Walks},\n url = {http://www.nime.org/proceedings/2014/nime2014_491.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_60 + abstract: 'Amstramgrame is a music technology STEAM (Science Technology Engineering + Arts and Mathematics) project aiming at making more tangible abstract scientific + concepts through the programming of a Digital Musical Instrument (DMI): the Gramophone. + Various custom tools ranging from online programming environments to the Gramophone + itself have been developed as part of this project. An innovative method anchored + in the reality of the field as well as a wide range of key-turn pedagogical scenarios + are also part of the Amtramgrame toolkit. This article presents the tools and + the method of Amstramgrame as well as the results of its pilot phase. Future directions + along with some insights on the implementation of this kind of project are provided + as well.' + address: 'Shanghai, China' + articleno: 60 + author: 'Michon, Romain and Dumitrascu, Catinca and Chudet, Sandrine and Orlarey, + Yann and Letz, Stéphane and Fober, Dominique' + bibtex: "@inproceedings{NIME21_60,\n abstract = {Amstramgrame is a music technology\ + \ STEAM (Science Technology Engineering Arts and Mathematics) project aiming at\ + \ making more tangible abstract scientific concepts through the programming of\ + \ a Digital Musical Instrument (DMI): the Gramophone. Various custom tools ranging\ + \ from online programming environments to the Gramophone itself have been developed\ + \ as part of this project. An innovative method anchored in the reality of the\ + \ field as well as a wide range of key-turn pedagogical scenarios are also part\ + \ of the Amtramgrame toolkit. This article presents the tools and the method of\ + \ Amstramgrame as well as the results of its pilot phase. Future directions along\ + \ with some insights on the implementation of this kind of project are provided\ + \ as well.},\n address = {Shanghai, China},\n articleno = {60},\n author = {Michon,\ + \ Romain and Dumitrascu, Catinca and Chudet, Sandrine and Orlarey, Yann and Letz,\ + \ Stéphane and Fober, Dominique},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.a84edd3f},\n\ + \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/KTgl4suQ_Ks},\n\ + \ title = {Amstramgrame: Making Scientific Concepts More Tangible Through Music\ + \ Technology at School},\n url = {https://nime.pubpub.org/pub/3zeala6v},\n year\ + \ = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178917 + doi: 10.21428/92fbeb44.a84edd3f issn: 2220-4806 month: June - pages: 269--272 - publisher: 'Goldsmiths, University of London' - title: A Simple Architecture for Server-based (Indoor) Audio Walks - url: http://www.nime.org/proceedings/2014/nime2014_491.pdf - year: 2014 + presentation-video: https://youtu.be/KTgl4suQ_Ks + title: 'Amstramgrame: Making Scientific Concepts More Tangible Through Music Technology + at School' + url: https://nime.pubpub.org/pub/3zeala6v + year: 2021 - ENTRYTYPE: inproceedings - ID: strail2014 - abstract: 'The El-Lamellophone (El-La) is a Lamellophone hyperinstrument incorporating - electronic sensors and integrated DSP. Initial investigations have been made into - digitallycontrolled physical actuation of the acoustic tines. An embedded Linux - micro-computer supplants the laptop. A piezoelectric pickup is mounted to the - underside of the body of the instrument for direct audio acquisition providing - a robust signal with little interference. The signal is used for electric sound-reinforcement, - creative signal processing and audio analysis developed in Puredata (Pd). This - signal inputs and outputs the micro computer via stereo 1/8th inch phono jacks. - Sensors provide gesture recognition affording the performer a broader, more dynamic - range of musical human computer interaction (MHCI) over specific DSP functions. - Work has been done toward electromagnetic actuation of the tines, aiming to allow - performer control and sensation via both traditional Lamellophone techniques, - as well as extended playing techniques that incorporate shared human/computer - control of the resulting sound. The goal is to achieve this without compromising - the traditional sound production methods of the acoustic instrument while leveraging - inherent performance gestures with embedded continuous controller values essential - to MHCI. The result is an intuitive, performer designed, hybrid electro-acoustic - instrument, idiomatic computer interface, and robotic acoustic instrument in one - framework.' - address: 'London, United Kingdom' - author: Shawn Trail and Duncan MacConnell and Leo Jenkins and Jeff Snyder and George - Tzanetakis and Peter Driessen - bibtex: "@inproceedings{strail2014,\n abstract = {The El-Lamellophone (El-La) is\ - \ a Lamellophone hyperinstrument incorporating electronic sensors and integrated\ - \ DSP. Initial investigations have been made into digitallycontrolled physical\ - \ actuation of the acoustic tines. An embedded Linux micro-computer supplants\ - \ the laptop. A piezoelectric pickup is mounted to the underside of the body of\ - \ the instrument for direct audio acquisition providing a robust signal with little\ - \ interference. The signal is used for electric sound-reinforcement, creative\ - \ signal processing and audio analysis developed in Puredata (Pd). This signal\ - \ inputs and outputs the micro computer via stereo 1/8th inch phono jacks. Sensors\ - \ provide gesture recognition affording the performer a broader, more dynamic\ - \ range of musical human computer interaction (MHCI) over specific DSP functions.\ - \ Work has been done toward electromagnetic actuation of the tines, aiming to\ - \ allow performer control and sensation via both traditional Lamellophone techniques,\ - \ as well as extended playing techniques that incorporate shared human/computer\ - \ control of the resulting sound. The goal is to achieve this without compromising\ - \ the traditional sound production methods of the acoustic instrument while leveraging\ - \ inherent performance gestures with embedded continuous controller values essential\ - \ to MHCI. The result is an intuitive, performer designed, hybrid electro-acoustic\ - \ instrument, idiomatic computer interface, and robotic acoustic instrument in\ - \ one framework.},\n address = {London, United Kingdom},\n author = {Shawn Trail\ - \ and Duncan MacConnell and Leo Jenkins and Jeff Snyder and George Tzanetakis\ - \ and Peter Driessen},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178959},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {537--540},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {El-Lamellophone A Low-cost, DIY, Open Framework\ - \ for Acoustic Lemellophone Based Hyperinstruments},\n url = {http://www.nime.org/proceedings/2014/nime2014_492.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_61 + abstract: 'We study the question of how wireless, self-contained CMOS-synthesizers + with built-in speakers can be used to achieve low-threshold operability of multichannel + sound fields. We deliberately use low-tech and DIY approaches to build simple + sound modules for music interaction and education in order to ensure accessibility + of the technology. The modules are operated by wireless power transfer (WPT). + A multichannel sound field can be easily generated and modulated by placing several + sound objects in proximity to the induction coils. Alterations in sound are caused + by repositioning, moving or grouping the sound modules. Although not physically + linked to each other, the objects start interacting electro-acoustically when + they share the same magnetic field. Because they are equipped with electronic + sound generators and transducers, the sound modules can work independently from + a sound studio situation.' + address: 'Shanghai, China' + articleno: 61 + author: 'Reuter, Vivian and Schwarz, Lorenz' + bibtex: "@inproceedings{NIME21_61,\n abstract = {We study the question of how wireless,\ + \ self-contained CMOS-synthesizers with built-in speakers can be used to achieve\ + \ low-threshold operability of multichannel sound fields. We deliberately use\ + \ low-tech and DIY approaches to build simple sound modules for music interaction\ + \ and education in order to ensure accessibility of the technology. The modules\ + \ are operated by wireless power transfer (WPT). A multichannel sound field can\ + \ be easily generated and modulated by placing several sound objects in proximity\ + \ to the induction coils. Alterations in sound are caused by repositioning, moving\ + \ or grouping the sound modules. Although not physically linked to each other,\ + \ the objects start interacting electro-acoustically when they share the same\ + \ magnetic field. Because they are equipped with electronic sound generators and\ + \ transducers, the sound modules can work independently from a sound studio situation.},\n\ + \ address = {Shanghai, China},\n articleno = {61},\n author = {Reuter, Vivian\ + \ and Schwarz, Lorenz},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.07c72a46},\n\ + \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/08kfv74Z880},\n\ + \ title = {Wireless Sound Modules},\n url = {https://nime.pubpub.org/pub/muvvx0y5},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178959 + doi: 10.21428/92fbeb44.07c72a46 issn: 2220-4806 month: June - pages: 537--540 - publisher: 'Goldsmiths, University of London' - title: 'El-Lamellophone A Low-cost, DIY, Open Framework for Acoustic Lemellophone - Based Hyperinstruments' - url: http://www.nime.org/proceedings/2014/nime2014_492.pdf - year: 2014 + presentation-video: https://youtu.be/08kfv74Z880 + title: Wireless Sound Modules + url: https://nime.pubpub.org/pub/muvvx0y5 + year: 2021 - ENTRYTYPE: inproceedings - ID: nklugel2014 - abstract: In this contribution we will show three prototypical applications that - allow users to collaboratively create rhythmic structures with successively more - degrees of freedom to generate rhythmic complexity. By means of a user study we - analyze the impact of this on the users' satisfaction and further compare it to - data logged during the experiments that allow us to measure the rhythmic complexity - created. - address: 'London, United Kingdom' - author: Niklas Klügel and Gerhard Hagerer and Georg Groh - bibtex: "@inproceedings{nklugel2014,\n abstract = {In this contribution we will\ - \ show three prototypical applications that allow users to collaboratively create\ - \ rhythmic structures with successively more degrees of freedom to generate rhythmic\ - \ complexity. By means of a user study we analyze the impact of this on the users'\ - \ satisfaction and further compare it to data logged during the experiments that\ - \ allow us to measure the rhythmic complexity created.},\n address = {London,\ - \ United Kingdom},\n author = {Niklas Kl\\''ugel and Gerhard Hagerer and Georg\ - \ Groh},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1178835},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {50--53},\n publisher = {Goldsmiths, University of\ - \ London},\n title = {TreeQuencer: Collaborative Rhythm Sequencing A Comparative\ - \ Study},\n url = {http://www.nime.org/proceedings/2014/nime2014_498.pdf},\n year\ - \ = {2014}\n}\n" + ID: NIME21_62 + abstract: 'When two sounds are played at the same loudness, pitch, and duration, + what sets them apart are their timbres. This study documents the design and implementation + of the Timbre Explorer, a synthesizer interface based on efforts to dimensionalize + this perceptual concept. The resulting prototype controls four perceptually salient + dimensions of timbre in real-time: attack time, brightness, spectral flux, and + spectral density. A graphical user interface supports user understanding with + live visualizations of the effects of each dimension. The applications of this + interface are three-fold; further perceptual timbre studies, usage as a practical + shortcut for synthesizers, and educating users about the frequency domain, sound + synthesis, and the concept of timbre. The project has since been expanded to a + standalone version independent of a computer and a purely online web-audio version.' + address: 'Shanghai, China' + articleno: 62 + author: 'Lam, Joshua Ryan and Saitis, Charalampos' + bibtex: "@inproceedings{NIME21_62,\n abstract = {When two sounds are played at the\ + \ same loudness, pitch, and duration, what sets them apart are their timbres.\ + \ This study documents the design and implementation of the Timbre Explorer, a\ + \ synthesizer interface based on efforts to dimensionalize this perceptual concept.\ + \ The resulting prototype controls four perceptually salient dimensions of timbre\ + \ in real-time: attack time, brightness, spectral flux, and spectral density.\ + \ A graphical user interface supports user understanding with live visualizations\ + \ of the effects of each dimension. The applications of this interface are three-fold;\ + \ further perceptual timbre studies, usage as a practical shortcut for synthesizers,\ + \ and educating users about the frequency domain, sound synthesis, and the concept\ + \ of timbre. The project has since been expanded to a standalone version independent\ + \ of a computer and a purely online web-audio version.},\n address = {Shanghai,\ + \ China},\n articleno = {62},\n author = {Lam, Joshua Ryan and Saitis, Charalampos},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.21428/92fbeb44.92a95683},\n issn = {2220-4806},\n\ + \ month = {June},\n presentation-video = {https://youtu.be/EJ0ZAhOdBTw},\n title\ + \ = {The Timbre Explorer: A Synthesizer Interface for Educational Purposes and\ + \ Perceptual Studies},\n url = {https://nime.pubpub.org/pub/q5oc20wg},\n year\ + \ = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178835 + doi: 10.21428/92fbeb44.92a95683 issn: 2220-4806 month: June - pages: 50--53 - publisher: 'Goldsmiths, University of London' - title: 'TreeQuencer: Collaborative Rhythm Sequencing A Comparative Study' - url: http://www.nime.org/proceedings/2014/nime2014_498.pdf - year: 2014 + presentation-video: https://youtu.be/EJ0ZAhOdBTw + title: 'The Timbre Explorer: A Synthesizer Interface for Educational Purposes and + Perceptual Studies' + url: https://nime.pubpub.org/pub/q5oc20wg + year: 2021 - ENTRYTYPE: inproceedings - ID: dschlienger2014 - abstract: 'This paper provides a rationale for choosing acoustic localisation techniques - as an alternative to other principles to provide spatial positions in interactive - locative audio applications (ILAA). By comparing positioning technology in existing - ILAAs to the expected performance of acoustic positioning systems (APS), we can - evaluate if APS would perform equivalently in a particular application. In this - paper, the titles of NIME conference proceedings from 2001 to 2013 were searched - for presentations on ILAA using positioning technology. Over 80 relevant articles - were found. For each of the systems we evaluated if and why APS would be a contender - or not. The results showed that for over 73 percent of the reviewed applications, - APS could possibly provide competitive alternatives and at very low cost.' - address: 'London, United Kingdom' - author: Dominik Schlienger and Sakari Tervo - bibtex: "@inproceedings{dschlienger2014,\n abstract = {This paper provides a rationale\ - \ for choosing acoustic localisation techniques as an alternative to other principles\ - \ to provide spatial positions in interactive locative audio applications (ILAA).\ - \ By comparing positioning technology in existing ILAAs to the expected performance\ - \ of acoustic positioning systems (APS), we can evaluate if APS would perform\ - \ equivalently in a particular application. In this paper, the titles of NIME\ - \ conference proceedings from 2001 to 2013 were searched for presentations on\ - \ ILAA using positioning technology. Over 80 relevant articles were found. For\ - \ each of the systems we evaluated if and why APS would be a contender or not.\ - \ The results showed that for over 73 percent of the reviewed applications, APS\ - \ could possibly provide competitive alternatives and at very low cost.},\n address\ - \ = {London, United Kingdom},\n author = {Dominik Schlienger and Sakari Tervo},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178933},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {439--442},\n publisher = {Goldsmiths, University\ - \ of London},\n title = {Acoustic Localisation as an Alternative to Positioning\ - \ Principles in Applications presented at NIME 2001-2013},\n url = {http://www.nime.org/proceedings/2014/nime2014_501.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_63 + abstract: 'Music education is an important part of the school curriculum; it teaches + children to be creative and to collaborate with others. Music gives individuals + another medium to communicate through, which is especially important for individuals + with cognitive or physical disabilities. Teachers of children with severe disabilities + have expressed a lack of musical instruments adapted for these children, which + leads to an incomplete music education for this group. This study aims at designing + and evaluating a set of collaborative musical instruments for children with cognitive + and physical disabilities, and the research is done together with the special + education school Rullen in Stockholm, Sweden. The process was divided into three + main parts; a pre-study, building and designing, and finally a user study. Based + on findings from previous research, together with input received from teachers + at Rullen during the pre-study, the resulting design consists of four musical + instruments that are connected to a central hub. The results show that the instruments + functioned as intended and that the design makes musical learning accessible in + a way traditional instruments do not, as well as creates a good basis for a collaborative + musical experience. However, fully evaluating the effect of playing together requires + more time for the children to get comfortable with the instruments and also for + the experiment leaders to test different setups to optimize the conditions for + a good interplay.' + address: 'Shanghai, China' + articleno: 63 + author: 'Svahn, Maria and Hölling, Josefine and Curtsson, Fanny and Nokelainen, + Nina' + bibtex: "@inproceedings{NIME21_63,\n abstract = {Music education is an important\ + \ part of the school curriculum; it teaches children to be creative and to collaborate\ + \ with others. Music gives individuals another medium to communicate through,\ + \ which is especially important for individuals with cognitive or physical disabilities.\ + \ Teachers of children with severe disabilities have expressed a lack of musical\ + \ instruments adapted for these children, which leads to an incomplete music education\ + \ for this group. This study aims at designing and evaluating a set of collaborative\ + \ musical instruments for children with cognitive and physical disabilities, and\ + \ the research is done together with the special education school Rullen in Stockholm,\ + \ Sweden. The process was divided into three main parts; a pre-study, building\ + \ and designing, and finally a user study. Based on findings from previous research,\ + \ together with input received from teachers at Rullen during the pre-study, the\ + \ resulting design consists of four musical instruments that are connected to\ + \ a central hub. The results show that the instruments functioned as intended\ + \ and that the design makes musical learning accessible in a way traditional instruments\ + \ do not, as well as creates a good basis for a collaborative musical experience.\ + \ However, fully evaluating the effect of playing together requires more time\ + \ for the children to get comfortable with the instruments and also for the experiment\ + \ leaders to test different setups to optimize the conditions for a good interplay.},\n\ + \ address = {Shanghai, China},\n articleno = {63},\n author = {Svahn, Maria and\ + \ Hölling, Josefine and Curtsson, Fanny and Nokelainen, Nina},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.21428/92fbeb44.e795c9b5},\n issn = {2220-4806},\n month = {June},\n\ + \ presentation-video = {https://youtu.be/2cD9f493oJM},\n title = {The Rullen Band},\n\ + \ url = {https://nime.pubpub.org/pub/pvd6davm},\n year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178933 + doi: 10.21428/92fbeb44.e795c9b5 issn: 2220-4806 month: June - pages: 439--442 - publisher: 'Goldsmiths, University of London' - title: Acoustic Localisation as an Alternative to Positioning Principles in Applications - presented at NIME 2001-2013 - url: http://www.nime.org/proceedings/2014/nime2014_501.pdf - year: 2014 + presentation-video: https://youtu.be/2cD9f493oJM + title: The Rullen Band + url: https://nime.pubpub.org/pub/pvd6davm + year: 2021 - ENTRYTYPE: inproceedings - ID: cfaubel12014 - abstract: 'In the paper I present a robotic device that offers new ways of interaction - for producing rhythmic patterns. The apparatus is placed on an overhead projector - and a visual presentation of these rhythmic patterns is delivered as a shadow - play. The rhythmic patterns can be manipulated by modifying the environment of - the robot, through direct physical interaction with the robot, by rewiring the - internal connectivity, and by adjusting internal parameters. The theory of embodied - cognition provides the theoretical basis of this device. The core postulate of - embodied cognition is that biological behavior can only be understood through - an understanding of the real-time interactions of an organism''s nervous system, - the organism''s body and the environment. One the one hand the device illustrates - this theory because the patterns that are created equally depend on the real-time - interactions of the electronics, the physical structure of the device and the - environment. On the other hand the device presents a synthesis of these ideas - and it is effectively possible to play with it at all the three levels, the electronics, - the physical configuration of the robot and the environment.' - address: 'London, United Kingdom' - author: Christian Faubel - bibtex: "@inproceedings{cfaubel12014,\n abstract = {In the paper I present a robotic\ - \ device that offers new ways of interaction for producing rhythmic patterns.\ - \ The apparatus is placed on an overhead projector and a visual presentation of\ - \ these rhythmic patterns is delivered as a shadow play. The rhythmic patterns\ - \ can be manipulated by modifying the environment of the robot, through direct\ - \ physical interaction with the robot, by rewiring the internal connectivity,\ - \ and by adjusting internal parameters. The theory of embodied cognition provides\ - \ the theoretical basis of this device. The core postulate of embodied cognition\ - \ is that biological behavior can only be understood through an understanding\ - \ of the real-time interactions of an organism's nervous system, the organism's\ - \ body and the environment. One the one hand the device illustrates this theory\ - \ because the patterns that are created equally depend on the real-time interactions\ - \ of the electronics, the physical structure of the device and the environment.\ - \ On the other hand the device presents a synthesis of these ideas and it is effectively\ - \ possible to play with it at all the three levels, the electronics, the physical\ - \ configuration of the robot and the environment.},\n address = {London, United\ - \ Kingdom},\n author = {Christian Faubel},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1180950},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {491--494},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Rhythm Apparatus on Overhead},\n url = {http://www.nime.org/proceedings/2014/nime2014_503.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_64 + abstract: 'Sequencer-based live performances of electronic music require a variety + of interactions. These interactions depend strongly on the affordances and constraints + of the used instrument. Musicians may perceive the available interactions offered + by the used instrument as limiting. For furthering the development of instruments + for live performances and expanding the interaction possibilities, first, a systematic + overview of interactions in current sequencer-based music performance is needed. To + that end, we propose a taxonomy of interactions in sequencer-based music performances + of electronic music. We identify two performance modes sequencing and sound design + and four interaction classes creation, modification, selection, and evaluation. + Furthermore, we discuss the influence of the different interaction classes on + both, musicians as well as the audience and use the proposed taxonomy to analyze + six commercially available hardware devices.' + address: 'Shanghai, China' + articleno: 64 + author: 'Püst, Stefan and Gieseke, Lena and Brennecke, Angela' + bibtex: "@inproceedings{NIME21_64,\n abstract = {Sequencer-based live performances\ + \ of electronic music require a variety of interactions. These interactions depend\ + \ strongly on the affordances and constraints of the used instrument. Musicians\ + \ may perceive the available interactions offered by the used instrument as limiting.\ + \ For furthering the development of instruments for live performances and expanding\ + \ the interaction possibilities, first, a systematic overview of interactions\ + \ in current sequencer-based music performance is needed. To that end, we propose\ + \ a taxonomy of interactions in sequencer-based music performances of electronic\ + \ music. We identify two performance modes sequencing and sound design and four\ + \ interaction classes creation, modification, selection, and evaluation. Furthermore,\ + \ we discuss the influence of the different interaction classes on both, musicians\ + \ as well as the audience and use the proposed taxonomy to analyze six commercially\ + \ available hardware devices.},\n address = {Shanghai, China},\n articleno = {64},\n\ + \ author = {Püst, Stefan and Gieseke, Lena and Brennecke, Angela},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.21428/92fbeb44.0d5ab18d},\n issn = {2220-4806},\n month\ + \ = {June},\n presentation-video = {https://youtu.be/c4MUKWpneg0},\n title = {Interaction\ + \ Taxonomy for Sequencer-Based Music Performances},\n url = {https://nime.pubpub.org/pub/gq2ukghi},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1180950 + doi: 10.21428/92fbeb44.0d5ab18d issn: 2220-4806 month: June - pages: 491--494 - publisher: 'Goldsmiths, University of London' - title: Rhythm Apparatus on Overhead - url: http://www.nime.org/proceedings/2014/nime2014_503.pdf - year: 2014 + presentation-video: https://youtu.be/c4MUKWpneg0 + title: Interaction Taxonomy for Sequencer-Based Music Performances + url: https://nime.pubpub.org/pub/gq2ukghi + year: 2021 - ENTRYTYPE: inproceedings - ID: ahazzard2014 - abstract: 'Music plays a vital role in accompanying all manner of our experiences. - Soundtracks within films, video games and ceremonies possess a unique ability - to enhance a narrative, suggest emotional content and mark key transitions. Moreover, - soundtracks often achieve all of this without being the primary focus, on the - contrary they typically assume a supporting role. The proliferation of mobile - devices increasingly leads us to listen to music while on the move and musicians - are seizing on locative technologies as a tool for creating new kinds of music - that directly respond to people''s movements through space. In light of these - trends, we consider the interesting question of how composers might set about - creating musical soundtracks to accompany mobile experiences. What we have in - mind are experiences such as guided walks, tours and even pervasive games. The - novelty of our research here is in the music serving as an accompaniment to enhance - a location specific activity, much as a soundtrack does for a film. This calls - for composers to take into account the key features of the experience, and its - setting, to gently complement them through the music. We examine this process - from a composer''s perspective by presenting `from the field'' an account of how - they address the multifaceted challenges of designing a soundtrack for public - sculpture park. We chart a composer''s rationale as they developed a soundtrack - for this site over multiple iterations of design, testing and refinement. We expose - key relationships between the raw materials of music (melody, harmony, timbre, - rhythm and dynamics) and those of the physical setting, that enable the composer - to gracefully mesh the music into the fabric of the space. The result is to propose - a set of recommendations to inform the composition of mobile soundtracks that - we intend to guide future practice and research.' - address: 'London, United Kingdom' - author: Adrian Hazzard and Steve Benford and Gary Burnett - bibtex: "@inproceedings{ahazzard2014,\n abstract = {Music plays a vital role in\ - \ accompanying all manner of our experiences. Soundtracks within films, video\ - \ games and ceremonies possess a unique ability to enhance a narrative, suggest\ - \ emotional content and mark key transitions. Moreover, soundtracks often achieve\ - \ all of this without being the primary focus, on the contrary they typically\ - \ assume a supporting role. The proliferation of mobile devices increasingly leads\ - \ us to listen to music while on the move and musicians are seizing on locative\ - \ technologies as a tool for creating new kinds of music that directly respond\ - \ to people's movements through space. In light of these trends, we consider the\ - \ interesting question of how composers might set about creating musical soundtracks\ - \ to accompany mobile experiences. What we have in mind are experiences such as\ - \ guided walks, tours and even pervasive games. The novelty of our research here\ - \ is in the music serving as an accompaniment to enhance a location specific activity,\ - \ much as a soundtrack does for a film. This calls for composers to take into\ - \ account the key features of the experience, and its setting, to gently complement\ - \ them through the music. We examine this process from a composer's perspective\ - \ by presenting `from the field' an account of how they address the multifaceted\ - \ challenges of designing a soundtrack for public sculpture park. We chart a composer's\ - \ rationale as they developed a soundtrack for this site over multiple iterations\ - \ of design, testing and refinement. We expose key relationships between the raw\ - \ materials of music (melody, harmony, timbre, rhythm and dynamics) and those\ - \ of the physical setting, that enable the composer to gracefully mesh the music\ - \ into the fabric of the space. The result is to propose a set of recommendations\ - \ to inform the composition of mobile soundtracks that we intend to guide future\ - \ practice and research.},\n address = {London, United Kingdom},\n author = {Adrian\ - \ Hazzard and Steve Benford and Gary Burnett},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1178794},\n issn = {2220-4806},\n month = {June},\n pages =\ - \ {411--414},\n publisher = {Goldsmiths, University of London},\n title = {You'll\ - \ Never Walk Alone: Composing Location-Based Soundtracks},\n url = {http://www.nime.org/proceedings/2014/nime2014_506.pdf},\n\ - \ year = {2014}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1178794 - issn: 2220-4806 - month: June - pages: 411--414 - publisher: 'Goldsmiths, University of London' - title: 'You''ll Never Walk Alone: Composing Location-Based Soundtracks' - url: http://www.nime.org/proceedings/2014/nime2014_506.pdf - year: 2014 - - -- ENTRYTYPE: inproceedings - ID: kyerkes2014 - abstract: 'Twkyr is a new interface for musical expression that emphasizes realtime - manipulation, audification, and visualization of waveforms with a multitouch surface, - offering different interactivity at different time scales, within the same waveform. - The interactive audiovisual design of Tweakyr is motivated by the need for increased - parsimony and transparency in electronic musical instruments and draws from the - work of Curtis Roads on time scales as qualitative musical parameters, and Edward - Tufte''s ``data-ink'''' principles for the improvement of data graphics.' - address: 'London, United Kingdom' - author: Karl Yerkes and Matthew Wright - bibtex: "@inproceedings{kyerkes2014,\n abstract = {Twkyr is a new interface for\ - \ musical expression that emphasizes realtime manipulation, audification, and\ - \ visualization of waveforms with a multitouch surface, offering different interactivity\ - \ at different time scales, within the same waveform. The interactive audiovisual\ - \ design of Tweakyr is motivated by the need for increased parsimony and transparency\ - \ in electronic musical instruments and draws from the work of Curtis Roads on\ - \ time scales as qualitative musical parameters, and Edward Tufte's ``data-ink''\ - \ principles for the improvement of data graphics.},\n address = {London, United\ - \ Kingdom},\n author = {Karl Yerkes and Matthew Wright},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178989},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {375--378},\n publisher = {Goldsmiths, University of London},\n title = {Twkyr:\ - \ a Multitouch Waveform Looper},\n url = {http://www.nime.org/proceedings/2014/nime2014_508.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_65 + abstract: 'From an epistemological perspective, this work presents a discussion + of how the paradigm of enactive music cognition is related to improvisation in + the context of the skills and needs of 21st-century music learners. Improvisation + in music education is addressed within the perspective of an alternative but an + increasingly influential enactive approach to mind (Varela et al., 1993) followed + by the four theories known as the 4E of cognition - embedded, embodied, enactive + and extended - which naturally have characteristics in common that led them to + be grouped in this way. I discuss the “autopoietic” (self-maintain systems that + auto-reproduce over time based on their own set of internal rules) nature of the + embodied musical mind. To conclude, an overview concerning the enactivist approach + within DMIs design in order to provide a better understanding of the experiences + and benefits of using new technologies in musical learning contexts is outlined.' + address: 'Shanghai, China' + articleno: 65 + author: 'Corintha, Isabela and Cabral, Giordano' + bibtex: "@inproceedings{NIME21_65,\n abstract = {From an epistemological perspective,\ + \ this work presents a discussion of how the paradigm of enactive music cognition\ + \ is related to improvisation in the context of the skills and needs of 21st-century\ + \ music learners. Improvisation in music education is addressed within the perspective\ + \ of an alternative but an increasingly influential enactive approach to mind\ + \ (Varela et al., 1993) followed by the four theories known as the 4E of cognition\ + \ - embedded, embodied, enactive and extended - which naturally have characteristics\ + \ in common that led them to be grouped in this way. I discuss the “autopoietic”\ + \ (self-maintain systems that auto-reproduce over time based on their own set\ + \ of internal rules) nature of the embodied musical mind. To conclude, an overview\ + \ concerning the enactivist approach within DMIs design in order to provide a\ + \ better understanding of the experiences and benefits of using new technologies\ + \ in musical learning contexts is outlined.},\n address = {Shanghai, China},\n\ + \ articleno = {65},\n author = {Corintha, Isabela and Cabral, Giordano},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.21428/92fbeb44.56a01d33},\n issn = {2220-4806},\n month\ + \ = {June},\n presentation-video = {https://youtu.be/dGb5tl_tA58},\n title = {Improvised\ + \ Sound-Making within Musical Apprenticeship and Enactivism: An Intersection between\ + \ the 4E`s Model and DMIs},\n url = {https://nime.pubpub.org/pub/e4lsrn6c},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178989 + doi: 10.21428/92fbeb44.56a01d33 issn: 2220-4806 month: June - pages: 375--378 - publisher: 'Goldsmiths, University of London' - title: 'Twkyr: a Multitouch Waveform Looper' - url: http://www.nime.org/proceedings/2014/nime2014_508.pdf - year: 2014 + presentation-video: https://youtu.be/dGb5tl_tA58 + title: 'Improvised Sound-Making within Musical Apprenticeship and Enactivism: An + Intersection between the 4E`s Model and DMIs' + url: https://nime.pubpub.org/pub/e4lsrn6c + year: 2021 - ENTRYTYPE: inproceedings - ID: tmays2014 - abstract: 'In this paper we expose the need to go beyond the composer/performer - model of electronic instrument design and programming to encourage the transmission - of compositions and the creation of a repertory via notation of repeatable performance - practice. Drawing on 4 years of practice using the Karlax controller (Da Fact) - as a base for new digital musical instruments, we present our notation system - in detail and cite some mapping strategies and examples from to pieces in a growing - repertory of chamber music compositions for electronic and acoustic instruments' - address: 'London, United Kingdom' - author: Tom Mays and Francis Faber - bibtex: "@inproceedings{tmays2014,\n abstract = {In this paper we expose the need\ - \ to go beyond the composer/performer model of electronic instrument design and\ - \ programming to encourage the transmission of compositions and the creation of\ - \ a repertory via notation of repeatable performance practice. Drawing on 4 years\ - \ of practice using the Karlax controller (Da Fact) as a base for new digital\ - \ musical instruments, we present our notation system in detail and cite some\ - \ mapping strategies and examples from to pieces in a growing repertory of chamber\ - \ music compositions for electronic and acoustic instruments},\n address = {London,\ - \ United Kingdom},\n author = {Tom Mays and Francis Faber},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178869},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {553--556},\n publisher = {Goldsmiths, University of London},\n title = {A\ - \ Notation System for the Karlax Controller},\n url = {http://www.nime.org/proceedings/2014/nime2014_509.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_66 + abstract: 'In many contexts, creating mappings for gestural interactions can form + part of an artistic process. Creators seeking a mapping that is expressive, novel, + and affords them a sense of authorship may not know how to program it up in a + signal processing patch. Tools like Wekinator [1] and MIMIC [2] allow creators + to use supervised machine learning to learn mappings from example input/output + pairings. However, a creator may know a good mapping when they encounter it yet + start with little sense of what the inputs or outputs should be. We call this + an open-ended mapping process. Addressing this need, we introduce the latent mapping, + which leverages the latent space of an unsupervised machine learning algorithm + such as a Variational Autoencoder trained on a corpus of unlabelled gestural data + from the creator. We illustrate it with Sonified Body, a system mapping full-body + movement to sound which we explore in a residency with three dancers.' + address: 'Shanghai, China' + articleno: 66 + author: 'Murray-Browne, Tim and Tigas, Panagiotis' + bibtex: "@inproceedings{NIME21_66,\n abstract = {In many contexts, creating mappings\ + \ for gestural interactions can form part of an artistic process. Creators seeking\ + \ a mapping that is expressive, novel, and affords them a sense of authorship\ + \ may not know how to program it up in a signal processing patch. Tools like Wekinator\ + \ [1] and MIMIC [2] allow creators to use supervised machine learning to learn\ + \ mappings from example input/output pairings. However, a creator may know a good\ + \ mapping when they encounter it yet start with little sense of what the inputs\ + \ or outputs should be. We call this an open-ended mapping process. Addressing\ + \ this need, we introduce the latent mapping, which leverages the latent space\ + \ of an unsupervised machine learning algorithm such as a Variational Autoencoder\ + \ trained on a corpus of unlabelled gestural data from the creator. We illustrate\ + \ it with Sonified Body, a system mapping full-body movement to sound which we\ + \ explore in a residency with three dancers.},\n address = {Shanghai, China},\n\ + \ articleno = {66},\n author = {Murray-Browne, Tim and Tigas, Panagiotis},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.21428/92fbeb44.9d4bcd4b},\n issn = {2220-4806},\n month\ + \ = {June},\n presentation-video = {https://youtu.be/zBOHWyIGaYc},\n title = {Latent\ + \ Mappings: Generating Open-Ended Expressive Mappings Using Variational Autoencoders},\n\ + \ url = {https://nime.pubpub.org/pub/latent-mappings},\n year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178869 + doi: 10.21428/92fbeb44.9d4bcd4b issn: 2220-4806 month: June - pages: 553--556 - publisher: 'Goldsmiths, University of London' - title: A Notation System for the Karlax Controller - url: http://www.nime.org/proceedings/2014/nime2014_509.pdf - year: 2014 + presentation-video: https://youtu.be/zBOHWyIGaYc + title: 'Latent Mappings: Generating Open-Ended Expressive Mappings Using Variational + Autoencoders' + url: https://nime.pubpub.org/pub/latent-mappings + year: 2021 - ENTRYTYPE: inproceedings - ID: avanzandt2014 - abstract: 'When performing a piece, a pianist''s interpretation is communicated - both through the sound produced and through body gestures. We present PiaF (Piano - Follower), a prototype for augmenting piano performance by measuring gesture variations. - We survey other augmented piano projects, several of which focus on gestural recognition, - and present our prototype which uses machine learning techniques for gesture classification - and estimation of gesture variations in real-time. Our implementation uses the - Kinect depth sensor to track body motion in space, which is used as input data. - During an initial learning phase, the system is taught a set of reference gestures, - or templates. During performance, the live gesture is classified in real-time, - and variations with respect to the recognized template are computed. These values - can then be mapped to audio processing parameters, to control digital effects - which are applied to the acoustic output of the piano in real-time. We discuss - initial tests using PiaF with a pianist, as well as potential applications beyond - live performance, including pedagogy and embodiment of recorded performance.' - address: 'London, United Kingdom' - author: Alejandro Van Zandt-Escobar and Baptiste Caramiaux and Atau Tanaka - bibtex: "@inproceedings{avanzandt2014,\n abstract = {When performing a piece, a\ - \ pianist's interpretation is communicated both through the sound produced and\ - \ through body gestures. We present PiaF (Piano Follower), a prototype for augmenting\ - \ piano performance by measuring gesture variations. We survey other augmented\ - \ piano projects, several of which focus on gestural recognition, and present\ - \ our prototype which uses machine learning techniques for gesture classification\ - \ and estimation of gesture variations in real-time. Our implementation uses the\ - \ Kinect depth sensor to track body motion in space, which is used as input data.\ - \ During an initial learning phase, the system is taught a set of reference gestures,\ - \ or templates. During performance, the live gesture is classified in real-time,\ - \ and variations with respect to the recognized template are computed. These values\ - \ can then be mapped to audio processing parameters, to control digital effects\ - \ which are applied to the acoustic output of the piano in real-time. We discuss\ - \ initial tests using PiaF with a pianist, as well as potential applications beyond\ - \ live performance, including pedagogy and embodiment of recorded performance.},\n\ - \ address = {London, United Kingdom},\n author = {Alejandro Van Zandt-Escobar\ - \ and Baptiste Caramiaux and Atau Tanaka},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178991},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {167--170},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {PiaF: A Tool for Augmented Piano Performance\ - \ Using Gesture Variation Following},\n url = {http://www.nime.org/proceedings/2014/nime2014_511.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_67 + abstract: 'This paper describes Oopsy, which provides a streamlined process for + editing digital signal processing algorithms for precise and sample accurate sound + generation, transformation and modulation, and placing them in the context of + embedded hardware and modular synthesizers. This pipeline gives digital instrument + designers the development flexibility of established software with the deployment + benefits of working on hardware. Specifically, algorithm design takes place in + the flexible context of gen~ in Max, and Oopsy automatically and fluently translates + this and uploads it onto the open-ended Daisy embedded hardware. The paper locates + this work in the context of related software/hardware workflows, and provides + detail of its contributions in design, implementation, and use.' + address: 'Shanghai, China' + articleno: 67 + author: 'Wakefield, Graham' + bibtex: "@inproceedings{NIME21_67,\n abstract = {This paper describes Oopsy, which\ + \ provides a streamlined process for editing digital signal processing algorithms\ + \ for precise and sample accurate sound generation, transformation and modulation,\ + \ and placing them in the context of embedded hardware and modular synthesizers.\ + \ This pipeline gives digital instrument designers the development flexibility\ + \ of established software with the deployment benefits of working on hardware.\ + \ Specifically, algorithm design takes place in the flexible context of gen~ in\ + \ Max, and Oopsy automatically and fluently translates this and uploads it onto\ + \ the open-ended Daisy embedded hardware. The paper locates this work in the context\ + \ of related software/hardware workflows, and provides detail of its contributions\ + \ in design, implementation, and use.},\n address = {Shanghai, China},\n articleno\ + \ = {67},\n author = {Wakefield, Graham},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.e32fde90},\n\ + \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/xJwI9F9Spbo},\n\ + \ title = {A streamlined workflow from Max/gen{\\textasciitilde} to modular hardware},\n\ + \ url = {https://nime.pubpub.org/pub/0u3ruj23},\n year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178991 + doi: 10.21428/92fbeb44.e32fde90 issn: 2220-4806 month: June - pages: 167--170 - publisher: 'Goldsmiths, University of London' - title: 'PiaF: A Tool for Augmented Piano Performance Using Gesture Variation Following' - url: http://www.nime.org/proceedings/2014/nime2014_511.pdf - year: 2014 + presentation-video: https://youtu.be/xJwI9F9Spbo + title: A streamlined workflow from Max/gen~ to modular hardware + url: https://nime.pubpub.org/pub/0u3ruj23 + year: 2021 - ENTRYTYPE: inproceedings - ID: pdahlstedt12014 - abstract: 'The idea behind the YouHero was two-fold. First, to make an expressive - instrument out of the computer game toy guitar controller from the famous game - GuitarHero. With its limited amount of control parameters, this was a challenge. - Second, through this instrument we wanted to provide an alternative to the view - that you become a hero by perfect imitation of your idols. Instead, play yourself. - You are the hero. In this paper, we describe the design of the instrument, including - its novel mapping approach based on switched timbre vectors scaled by accellerometer - data, unconventional sound engines and the sound and mapping editing features, - including manual editing of individual vectors. The instrument is evaluated through - its practical applications during the whole project, with workshops with teenagers, - a set of state-funded commissions from professional composers, and the development - of considerable skill by the key performers. We have also submitted a performance - proposal for this project.' - address: 'London, United Kingdom' - author: Palle Dahlstedt and Patrik Karlsson and Katarina Widell and Tony Blomdahl - bibtex: "@inproceedings{pdahlstedt12014,\n abstract = {The idea behind the YouHero\ - \ was two-fold. First, to make an expressive instrument out of the computer game\ - \ toy guitar controller from the famous game GuitarHero. With its limited amount\ - \ of control parameters, this was a challenge. Second, through this instrument\ - \ we wanted to provide an alternative to the view that you become a hero by perfect\ - \ imitation of your idols. Instead, play yourself. You are the hero. In this paper,\ - \ we describe the design of the instrument, including its novel mapping approach\ - \ based on switched timbre vectors scaled by accellerometer data, unconventional\ - \ sound engines and the sound and mapping editing features, including manual editing\ - \ of individual vectors. The instrument is evaluated through its practical applications\ - \ during the whole project, with workshops with teenagers, a set of state-funded\ - \ commissions from professional composers, and the development of considerable\ - \ skill by the key performers. We have also submitted a performance proposal for\ - \ this project.},\n address = {London, United Kingdom},\n author = {Palle Dahlstedt\ - \ and Patrik Karlsson and Katarina Widell and Tony Blomdahl},\n booktitle = {Proceedings\ + ID: NIME21_68 + abstract: 'In response to the 2020 pandemic, a new work was composed inspired by + the limitations and challenges of performing over the network. Since synchronization + is one of the big challenges, or perhaps something to be avoided due to network + latency, this work explicitly calls for desynchronization in a controlled way, + using metronomes running at different rates to take performers in and out of approximate + synchronization. A special editor was developed to visualize the music because + conventional editors do not support multiple continuously varying tempi.' + address: 'Shanghai, China' + articleno: 68 + author: 'Dannenberg, Roger B.' + bibtex: "@inproceedings{NIME21_68,\n abstract = {In response to the 2020 pandemic,\ + \ a new work was composed inspired by the limitations and challenges of performing\ + \ over the network. Since synchronization is one of the big challenges, or perhaps\ + \ something to be avoided due to network latency, this work explicitly calls for\ + \ desynchronization in a controlled way, using metronomes running at different\ + \ rates to take performers in and out of approximate synchronization. A special\ + \ editor was developed to visualize the music because conventional editors do\ + \ not support multiple continuously varying tempi.},\n address = {Shanghai, China},\n\ + \ articleno = {68},\n author = {Dannenberg, Roger B.},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178742},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {403--406},\n publisher = {Goldsmiths, University of London},\n title = {YouHero\ - \ Making an Expressive Concert Instrument from the GuitarHero Controller},\n url\ - \ = {http://www.nime.org/proceedings/2014/nime2014_513.pdf},\n year = {2014}\n\ - }\n" + \ doi = {10.21428/92fbeb44.a41fe2c5},\n issn = {2220-4806},\n month = {June},\n\ + \ presentation-video = {https://youtu.be/MhcZyE2SCck},\n title = {Canons for Conlon:\ + \ Composing and Performing Multiple Tempi on the Web},\n url = {https://nime.pubpub.org/pub/jxo0v8r7},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178742 + doi: 10.21428/92fbeb44.a41fe2c5 issn: 2220-4806 month: June - pages: 403--406 - publisher: 'Goldsmiths, University of London' - title: YouHero Making an Expressive Concert Instrument from the GuitarHero Controller - url: http://www.nime.org/proceedings/2014/nime2014_513.pdf - year: 2014 + presentation-video: https://youtu.be/MhcZyE2SCck + title: 'Canons for Conlon: Composing and Performing Multiple Tempi on the Web' + url: https://nime.pubpub.org/pub/jxo0v8r7 + year: 2021 - ENTRYTYPE: inproceedings - ID: ldahl2014 - abstract: 'Motion sensing technologies enable musical interfaces where a performer - moves their body "in the air" without manipulating or contacting a physical object. - These interfaces work well when the movement and sound are smooth and continuous, - but it has proven difficult to design a system which triggers discrete sounds - with precision that allows for complex rhythmic performance. We conducted a study - where participants perform ``air-drumming'''' gestures in time to rhythmic sounds. - These movements are recorded, and the timing of various movement features with - respect to the onset of audio events is analyzed. A novel algorithm for detecting - sudden changes in direction is used to find the end of the strike gesture. We - find that these occur on average after the audio onset and that this timing varies - with the tempo of the movement. Sharp peaks in magnitude acceleration occur before - the audio onset and do not vary with tempo. These results suggest that detecting - peaks in acceleration will lead to more naturally responsive air gesture instruments.' - address: 'London, United Kingdom' - author: Luke Dahl - bibtex: "@inproceedings{ldahl2014,\n abstract = {Motion sensing technologies enable\ - \ musical interfaces where a performer moves their body \"in the air\" without\ - \ manipulating or contacting a physical object. These interfaces work well when\ - \ the movement and sound are smooth and continuous, but it has proven difficult\ - \ to design a system which triggers discrete sounds with precision that allows\ - \ for complex rhythmic performance. We conducted a study where participants perform\ - \ ``air-drumming'' gestures in time to rhythmic sounds. These movements are recorded,\ - \ and the timing of various movement features with respect to the onset of audio\ - \ events is analyzed. A novel algorithm for detecting sudden changes in direction\ - \ is used to find the end of the strike gesture. We find that these occur on average\ - \ after the audio onset and that this timing varies with the tempo of the movement.\ - \ Sharp peaks in magnitude acceleration occur before the audio onset and do not\ - \ vary with tempo. These results suggest that detecting peaks in acceleration\ - \ will lead to more naturally responsive air gesture instruments.},\n address\ - \ = {London, United Kingdom},\n author = {Luke Dahl},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178738},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {201--206},\n publisher = {Goldsmiths, University of London},\n title = {Triggering\ - \ Sounds from Discrete Air Gestures: What Movement Feature Has the Best Timing?},\n\ - \ url = {http://www.nime.org/proceedings/2014/nime2014_514.pdf},\n year = {2014}\n\ - }\n" + ID: NIME21_69 + abstract: 'This paper describes a subversive compositional approach to machine learning, + focused on the exploration of AI bias and computational aesthetic evaluation. + In Bias, for bass clarinet and Interactive Music System, a computer music system + using two Neural Networks trained to develop “aesthetic bias” interacts with the + musician by evaluating the sound input based on its “subjective” aesthetic judgments. + The composition problematizes the discrepancies between the concepts of error + and accuracy, associated with supervised machine learning, and aesthetic judgments + as inherently subjective and intangible. The methods used in the compositional + process are discussed with respect to the objective of balancing the trade-off + between musical authorship and interpretative freedom in interactive musical works.' + address: 'Shanghai, China' + articleno: 69 + author: 'Gioti, Artemi-Maria' + bibtex: "@inproceedings{NIME21_69,\n abstract = {This paper describes a subversive\ + \ compositional approach to machine learning, focused on the exploration of AI\ + \ bias and computational aesthetic evaluation. In Bias, for bass clarinet and\ + \ Interactive Music System, a computer music system using two Neural Networks\ + \ trained to develop “aesthetic bias” interacts with the musician by evaluating\ + \ the sound input based on its “subjective” aesthetic judgments. The composition\ + \ problematizes the discrepancies between the concepts of error and accuracy,\ + \ associated with supervised machine learning, and aesthetic judgments as inherently\ + \ subjective and intangible. The methods used in the compositional process are\ + \ discussed with respect to the objective of balancing the trade-off between musical\ + \ authorship and interpretative freedom in interactive musical works.},\n address\ + \ = {Shanghai, China},\n articleno = {69},\n author = {Gioti, Artemi-Maria},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.21428/92fbeb44.de74b046},\n issn = {2220-4806},\n\ + \ month = {June},\n presentation-video = {https://youtu.be/9l8NeGmvpDU},\n title\ + \ = {A Compositional Exploration of Computational Aesthetic Evaluation and AI\ + \ Bias.},\n url = {https://nime.pubpub.org/pub/zpvgmv74},\n year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178738 + doi: 10.21428/92fbeb44.de74b046 issn: 2220-4806 month: June - pages: 201--206 - publisher: 'Goldsmiths, University of London' - title: 'Triggering Sounds from Discrete Air Gestures: What Movement Feature Has - the Best Timing?' - url: http://www.nime.org/proceedings/2014/nime2014_514.pdf - year: 2014 + presentation-video: https://youtu.be/9l8NeGmvpDU + title: A Compositional Exploration of Computational Aesthetic Evaluation and AI + Bias. + url: https://nime.pubpub.org/pub/zpvgmv74 + year: 2021 - ENTRYTYPE: inproceedings - ID: chonigman2014 - abstract: 'This paper introduces a new technique for creating Swept Frequency Capacitive - Sensing with open source technology for use in creating richer and more complex - musical gestures. This new style of capacitive touch sensing is extremely robust - compared to older versions and will allow greater implementation of gesture recognition - and touch control in the development of NIMEs. Inspired by the Touché project, - this paper discusses how to implement this technique using the community standard - hardware Arduino instead of custom designed electronics. The technique requires - only passive components and can be used to enhance the touch sensitivity of many - everyday objects and even biological materials and substances such as plants, - which this paper will focus on as a case study through the project known as Cultivating - Frequencies. This paper will discuss different techniques of filtering data captured - by this system, different methods for creating gesture recognition unique to the - object being used, and the implications of this technology as it pertains to the - goal of ubiquitous sensing. Furthermore, this paper will introduce a new Arduino - Library, SweepingCapSense, which simplifies the coding required to implement this - technique.' - address: 'London, United Kingdom' - author: Colin Honigman and Jordan Hochenbaum and Ajay Kapur - bibtex: "@inproceedings{chonigman2014,\n abstract = {This paper introduces a new\ - \ technique for creating Swept Frequency Capacitive Sensing with open source technology\ - \ for use in creating richer and more complex musical gestures. This new style\ - \ of capacitive touch sensing is extremely robust compared to older versions and\ - \ will allow greater implementation of gesture recognition and touch control in\ - \ the development of NIMEs. Inspired by the Touch{\\'e} project, this paper discusses\ - \ how to implement this technique using the community standard hardware Arduino\ - \ instead of custom designed electronics. The technique requires only passive\ - \ components and can be used to enhance the touch sensitivity of many everyday\ - \ objects and even biological materials and substances such as plants, which this\ - \ paper will focus on as a case study through the project known as Cultivating\ - \ Frequencies. This paper will discuss different techniques of filtering data\ - \ captured by this system, different methods for creating gesture recognition\ - \ unique to the object being used, and the implications of this technology as\ - \ it pertains to the goal of ubiquitous sensing. Furthermore, this paper will\ - \ introduce a new Arduino Library, SweepingCapSense, which simplifies the coding\ - \ required to implement this technique.},\n address = {London, United Kingdom},\n\ - \ author = {Colin Honigman and Jordan Hochenbaum and Ajay Kapur},\n booktitle\ + ID: NIME21_7 + abstract: "Can random digit data be transformed and utilized as a sound installation\ + \ that provides a referential connection between a book and the electromechanical\ + \ computer? What happens when the text of A Million Random Digits with 100,000\ + \ Normal Deviates is ‘vocalized’ by an electro-mechanical object? Using a media\ + \ archaeological research approach, Click::RAND^(#)2, an indeterminate sound sculpture\ + \ utilising relays as sound objects, is an audio-visual reinterpretation and representation\ + \ of an historical relationship between a book of random digits and the electromechanical\ + \ relay. Developed by the first author, Click::RAND^(#)2 is the physical re-presentation\ + \ of random digit data sets as compositional elements to complement the physical\ + \ presence of the work through spatialized sound patterns framed within the context\ + \ of Henri Lefebvre’s rhythmanalysis and experienced as synchronous, syncopated\ + \ or discordant rhythms." + address: 'Shanghai, China' + articleno: 7 + author: 'Dunham, Paul and Zareei, Dr. Mo H. and Carnegie, Prof. Dale and McKinnon, + Dr. Dugal' + bibtex: "@inproceedings{NIME21_7,\n abstract = {Can random digit data be transformed\ + \ and utilized as a sound installation that provides a referential connection\ + \ between a book and the electromechanical computer? What happens when the text\ + \ of A Million Random Digits with 100,000 Normal Deviates is ‘vocalized’ by an\ + \ electro-mechanical object? Using a media archaeological research approach, Click::RAND^(#)2,\ + \ an indeterminate sound sculpture utilising relays as sound objects, is an audio-visual\ + \ reinterpretation and representation of an historical relationship between a\ + \ book of random digits and the electromechanical relay. Developed by the first\ + \ author, Click::RAND^(#)2 is the physical re-presentation of random digit data\ + \ sets as compositional elements to complement the physical presence of the work\ + \ through spatialized sound patterns framed within the context of Henri Lefebvre’s\ + \ rhythmanalysis and experienced as synchronous, syncopated or discordant rhythms.},\n\ + \ address = {Shanghai, China},\n articleno = {7},\n author = {Dunham, Paul and\ + \ Zareei, Dr. Mo H. and Carnegie, Prof. Dale and McKinnon, Dr. Dugal},\n booktitle\ \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1178802},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {74--77},\n publisher = {Goldsmiths, University of London},\n\ - \ title = {Techniques in Swept Frequency Capacitive Sensing: An Open Source Approach},\n\ - \ url = {http://www.nime.org/proceedings/2014/nime2014_515.pdf},\n year = {2014}\n\ - }\n" + \ Expression},\n doi = {10.21428/92fbeb44.5cc6d157},\n issn = {2220-4806},\n month\ + \ = {June},\n presentation-video = {https://youtu.be/vJynbs8txuA},\n title = {Click::RAND#2.\ + \ An Indeterminate Sound Sculpture},\n url = {https://nime.pubpub.org/pub/lac4s48h},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178802 + doi: 10.21428/92fbeb44.5cc6d157 issn: 2220-4806 month: June - pages: 74--77 - publisher: 'Goldsmiths, University of London' - title: 'Techniques in Swept Frequency Capacitive Sensing: An Open Source Approach' - url: http://www.nime.org/proceedings/2014/nime2014_515.pdf - year: 2014 + presentation-video: https://youtu.be/vJynbs8txuA + title: "Click::RAND#2. An Indeterminate Sound Sculpture" + url: https://nime.pubpub.org/pub/lac4s48h + year: 2021 - ENTRYTYPE: inproceedings - ID: hdiao2014 - abstract: 'Sketching is a natural way for one person to convey their thoughts and - intentions to another. With the recent rise of tablet-based computing, the use - of sketching as a control and interaction paradigm is one that deserves exploration. - In this paper we present an interactive sketch-based music composition and performance - system called Drawchestra. The aim of the system is to give users an intuitive - way to convey their musical ideas to a computer system with the minimum of technical - training thus enabling them to focus on the creative tasks of composition and - performance. The system provides the user with a canvas upon which they may create - their own instruments by sketching shapes on the tablet screen. The system recognises - a certain set of shapes which it treats as virtual instruments or effects. Once - recognised, these virtual instruments can then be played by the user in real time. - The size of a sketched instrument shape is used to control certain parameters - of the sound so the user can build complex orchestras containing many different - shapes of different sizes. The sketched shapes may also be moved and resized as - desired making it possible to customise and edit the virtual orchestra as the - user goes along. The system has been implemented in Python and user tests conducted - using an iPad as the control surface. We report the results of the user study - at the end of the paper before briefly discussing the outcome and outlining the - next steps for the system design.' - address: 'London, United Kingdom' - author: Haojing Diao and Yanchao Zhou and Christopher Andrew Harte and Nick Bryan-Kinns - bibtex: "@inproceedings{hdiao2014,\n abstract = {Sketching is a natural way for\ - \ one person to convey their thoughts and intentions to another. With the recent\ - \ rise of tablet-based computing, the use of sketching as a control and interaction\ - \ paradigm is one that deserves exploration. In this paper we present an interactive\ - \ sketch-based music composition and performance system called Drawchestra. The\ - \ aim of the system is to give users an intuitive way to convey their musical\ - \ ideas to a computer system with the minimum of technical training thus enabling\ - \ them to focus on the creative tasks of composition and performance. The system\ - \ provides the user with a canvas upon which they may create their own instruments\ - \ by sketching shapes on the tablet screen. The system recognises a certain set\ - \ of shapes which it treats as virtual instruments or effects. Once recognised,\ - \ these virtual instruments can then be played by the user in real time. The size\ - \ of a sketched instrument shape is used to control certain parameters of the\ - \ sound so the user can build complex orchestras containing many different shapes\ - \ of different sizes. The sketched shapes may also be moved and resized as desired\ - \ making it possible to customise and edit the virtual orchestra as the user goes\ - \ along. The system has been implemented in Python and user tests conducted using\ - \ an iPad as the control surface. We report the results of the user study at the\ - \ end of the paper before briefly discussing the outcome and outlining the next\ - \ steps for the system design.},\n address = {London, United Kingdom},\n author\ - \ = {Haojing Diao and Yanchao Zhou and Christopher Andrew Harte and Nick Bryan-Kinns},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178748},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {569--572},\n publisher = {Goldsmiths, University\ - \ of London},\n title = {Sketch-Based Musical Composition and Performance},\n\ - \ url = {http://www.nime.org/proceedings/2014/nime2014_517.pdf},\n year = {2014}\n\ - }\n" + ID: NIME21_70 + abstract: 'We present a novel robotic violinist that is designed to play Carnatic + music - a music system popular in the southern part of India. The robot plays + the D string and uses a single finger mechanism inspired by the Chitravina - a + fretless Indian lute. A fingerboard traversal system with a dynamic finger tip + apparatus enables the robot to play gamakas - pitch based embellishments in-between + notes, which are at the core of Carnatic music. A double roller design is used + for bowing which reduces space, produces a tone that resembles the tone of a conventional + violin bow, and facilitates super human playing techniques such as infinite bowing. + The design also enables the user to change the bow hair tightness to help capture + a variety of performing techniques in different musical styles. Objective assessments + and subjective listening tests were conducted to evaluate our design, indicating + that the robot can play gamakas in a realistic manner and thus, can perform Carnatic + music.' + address: 'Shanghai, China' + articleno: 70 + author: 'Sankaranarayanan, Raghavasimhan and Weinberg, Gil' + bibtex: "@inproceedings{NIME21_70,\n abstract = {We present a novel robotic violinist\ + \ that is designed to play Carnatic music - a music system popular in the southern\ + \ part of India. The robot plays the D string and uses a single finger mechanism\ + \ inspired by the Chitravina - a fretless Indian lute. A fingerboard traversal\ + \ system with a dynamic finger tip apparatus enables the robot to play gamakas\ + \ - pitch based embellishments in-between notes, which are at the core of Carnatic\ + \ music. A double roller design is used for bowing which reduces space, produces\ + \ a tone that resembles the tone of a conventional violin bow, and facilitates\ + \ super human playing techniques such as infinite bowing. The design also enables\ + \ the user to change the bow hair tightness to help capture a variety of performing\ + \ techniques in different musical styles. Objective assessments and subjective\ + \ listening tests were conducted to evaluate our design, indicating that the robot\ + \ can play gamakas in a realistic manner and thus, can perform Carnatic music.},\n\ + \ address = {Shanghai, China},\n articleno = {70},\n author = {Sankaranarayanan,\ + \ Raghavasimhan and Weinberg, Gil},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.0ad83109},\n\ + \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/4vNZm2Zewqs},\n\ + \ title = {Design of Hathaani - A Robotic Violinist for Carnatic Music},\n url\ + \ = {https://nime.pubpub.org/pub/225tmviw},\n year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178748 + doi: 10.21428/92fbeb44.0ad83109 issn: 2220-4806 month: June - pages: 569--572 - publisher: 'Goldsmiths, University of London' - title: Sketch-Based Musical Composition and Performance - url: http://www.nime.org/proceedings/2014/nime2014_517.pdf - year: 2014 + presentation-video: https://youtu.be/4vNZm2Zewqs + title: Design of Hathaani - A Robotic Violinist for Carnatic Music + url: https://nime.pubpub.org/pub/225tmviw + year: 2021 - ENTRYTYPE: inproceedings - ID: jratcliffe2014 - abstract: 'This paper presents a control surface interface for music mixing using - real time computer vision. Two input sensors are considered: the Leap Motion and - the Microsoft Kinect. The author presents significant design considerations, including - improving of the user''s sense of depth and panorama, maintaining broad accessibility - by integrating the system with Digital Audio Workstation (DAW) software, and implementing - a system that is portable and affordable. To provide the user with a heightened - sense of sound spatialization over the traditional channel strip, the concept - of depth is addressed directly using the stage metaphor. Sound sources are represented - as colored spheres in a graphical user interface to provide the user with visual - feedback. Moving sources back and forward controls volume, while left to right - controls panning. To provide broader accessibility, the interface is configured - to control mixing within the Ableton Live DAW. The author also discusses future - plans to expand functionality and evaluate the system.' - address: 'London, United Kingdom' - author: Jarrod Ratcliffe - bibtex: "@inproceedings{jratcliffe2014,\n abstract = {This paper presents a control\ - \ surface interface for music mixing using real time computer vision. Two input\ - \ sensors are considered: the Leap Motion and the Microsoft Kinect. The author\ - \ presents significant design considerations, including improving of the user's\ - \ sense of depth and panorama, maintaining broad accessibility by integrating\ - \ the system with Digital Audio Workstation (DAW) software, and implementing a\ - \ system that is portable and affordable. To provide the user with a heightened\ - \ sense of sound spatialization over the traditional channel strip, the concept\ - \ of depth is addressed directly using the stage metaphor. Sound sources are represented\ - \ as colored spheres in a graphical user interface to provide the user with visual\ - \ feedback. Moving sources back and forward controls volume, while left to right\ - \ controls panning. To provide broader accessibility, the interface is configured\ - \ to control mixing within the Ableton Live DAW. The author also discusses future\ - \ plans to expand functionality and evaluate the system.},\n address = {London,\ - \ United Kingdom},\n author = {Jarrod Ratcliffe},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1178911},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {136--139},\n publisher = {Goldsmiths, University of London},\n title = {Hand\ - \ and Finger Motion-Controlled Audio Mixing Interface},\n url = {http://www.nime.org/proceedings/2014/nime2014_518.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_71 + abstract: 'The current generation of commercial hardware and software for virtual + reality and immersive environments presents possibilities for a wealth of creative + solutions for new musical expression and interaction. This paper explores the + affordances of virtual musical environments with the disabled music-making community + of Drake Music Project Northern Ireland. Recent collaborations have investigated + strategies for Guided Interactions in Virtual Musical Environments (GIVME), a + novel concept the authors introduce here. This paper gives some background on + disabled music-making with digital musical instruments before sharing recent research + projects that facilitate disabled music performance in virtual reality immersive + environments. We expand on the premise of GIVME as a potential guideline for musical + interaction design for disabled musicians in VR, and take an explorative look + at the possibilities and constraints for instrument design for disabled musicians + as virtual worlds integrate ever more closely with the real.' + address: 'Shanghai, China' + articleno: 71 + author: 'Mills, Damian and Schroeder, Franziska and D''Arcy, John' + bibtex: "@inproceedings{NIME21_71,\n abstract = {The current generation of commercial\ + \ hardware and software for virtual reality and immersive environments presents\ + \ possibilities for a wealth of creative solutions for new musical expression\ + \ and interaction. This paper explores the affordances of virtual musical environments\ + \ with the disabled music-making community of Drake Music Project Northern Ireland.\ + \ Recent collaborations have investigated strategies for Guided Interactions in\ + \ Virtual Musical Environments (GIVME), a novel concept the authors introduce\ + \ here. This paper gives some background on disabled music-making with digital\ + \ musical instruments before sharing recent research projects that facilitate\ + \ disabled music performance in virtual reality immersive environments. We expand\ + \ on the premise of GIVME as a potential guideline for musical interaction design\ + \ for disabled musicians in VR, and take an explorative look at the possibilities\ + \ and constraints for instrument design for disabled musicians as virtual worlds\ + \ integrate ever more closely with the real.},\n address = {Shanghai, China},\n\ + \ articleno = {71},\n author = {Mills, Damian and Schroeder, Franziska and D'Arcy,\ + \ John},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.21428/92fbeb44.5443652c},\n issn = {2220-4806},\n\ + \ month = {June},\n presentation-video = {https://youtu.be/sI0K9sMYc80},\n title\ + \ = {GIVME: Guided Interactions in Virtual Musical Environments: },\n url = {https://nime.pubpub.org/pub/h14o4oit},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178911 + doi: 10.21428/92fbeb44.5443652c issn: 2220-4806 month: June - pages: 136--139 - publisher: 'Goldsmiths, University of London' - title: Hand and Finger Motion-Controlled Audio Mixing Interface - url: http://www.nime.org/proceedings/2014/nime2014_518.pdf - year: 2014 + presentation-video: https://youtu.be/sI0K9sMYc80 + title: 'GIVME: Guided Interactions in Virtual Musical Environments: ' + url: https://nime.pubpub.org/pub/h14o4oit + year: 2021 - ENTRYTYPE: inproceedings - ID: cmckinney2014 - abstract: 'With the growing adoption of internet connectivity across the world, - online collaboration is still a difficult and slow endeavor. Many amazing languages - and tools such as SuperCollider, ChucK, and Max/MSP all facilitate networking - and collaboration, however these languages and tools were not created explicitly - to make group performances simple and intuitive. New web standards such as Web - Audio and Web GL introduce the capability for web browsers to duplicate many of - the features in computer music tools. This paper introduces Lich.js, an effort - to bring musicians together over the internet with minimal effort by leveraging - web technologies.' - address: 'London, United Kingdom' - author: Chad McKinney - bibtex: "@inproceedings{cmckinney2014,\n abstract = {With the growing adoption of\ - \ internet connectivity across the world, online collaboration is still a difficult\ - \ and slow endeavor. Many amazing languages and tools such as SuperCollider, ChucK,\ - \ and Max/MSP all facilitate networking and collaboration, however these languages\ - \ and tools were not created explicitly to make group performances simple and\ - \ intuitive. New web standards such as Web Audio and Web GL introduce the capability\ - \ for web browsers to duplicate many of the features in computer music tools.\ - \ This paper introduces Lich.js, an effort to bring musicians together over the\ - \ internet with minimal effort by leveraging web technologies.},\n address = {London,\ - \ United Kingdom},\n author = {Chad McKinney},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1178873},\n issn = {2220-4806},\n month = {June},\n pages =\ - \ {379--382},\n publisher = {Goldsmiths, University of London},\n title = {Quick\ - \ Live Coding Collaboration In The Web Browser},\n url = {http://www.nime.org/proceedings/2014/nime2014_519.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_72 + abstract: 'In this article, we discuss the creation of The Furies: A LaptOpera, + a new opera for laptop orchestra and live vocal soloists that tells the story + of the Greek tragedy Electra. We outline the principles that guided our instrument + design with the aim of forging direct and visceral connections between the music, + the narrative, and the relationship between characters in ways we can simultaneously + hear, see, and feel. Through detailed case studies of three instruments—The Rope + and BeatPlayer, the tether chorus, and the autonomous speaker orchestra—this paper + offers tools and reflections to guide instrument-building in service of narrative-based + works through a unified multimedia art form.' + address: 'Shanghai, China' + articleno: 72 + author: 'Hege, Anne and Noufi, Camille and Georgieva, Elena and Wang, Ge' + bibtex: "@inproceedings{NIME21_72,\n abstract = {In this article, we discuss the\ + \ creation of The Furies: A LaptOpera, a new opera for laptop orchestra and live\ + \ vocal soloists that tells the story of the Greek tragedy Electra. We outline\ + \ the principles that guided our instrument design with the aim of forging direct\ + \ and visceral connections between the music, the narrative, and the relationship\ + \ between characters in ways we can simultaneously hear, see, and feel. Through\ + \ detailed case studies of three instruments—The Rope and BeatPlayer, the tether\ + \ chorus, and the autonomous speaker orchestra—this paper offers tools and reflections\ + \ to guide instrument-building in service of narrative-based works through a unified\ + \ multimedia art form.},\n address = {Shanghai, China},\n articleno = {72},\n\ + \ author = {Hege, Anne and Noufi, Camille and Georgieva, Elena and Wang, Ge},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.21428/92fbeb44.dde5029a},\n issn = {2220-4806},\n\ + \ month = {June},\n presentation-video = {https://youtu.be/QC_-h4cVVog},\n title\ + \ = {Instrument Design for The Furies: A LaptOpera},\n url = {https://nime.pubpub.org/pub/gx6klqui},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178873 + doi: 10.21428/92fbeb44.dde5029a issn: 2220-4806 month: June - pages: 379--382 - publisher: 'Goldsmiths, University of London' - title: Quick Live Coding Collaboration In The Web Browser - url: http://www.nime.org/proceedings/2014/nime2014_519.pdf - year: 2014 + presentation-video: https://youtu.be/QC_-h4cVVog + title: 'Instrument Design for The Furies: A LaptOpera' + url: https://nime.pubpub.org/pub/gx6klqui + year: 2021 - ENTRYTYPE: inproceedings - ID: sknotts2014 - abstract: 'This paper reports the results of an online survey of 160 laptop ensembles - and the relative democracy of their organisational and social structures. For - the purposes of this research a laptop ensemble is defined as a performing group - of three or more musicians for whom the laptop is the main sound generating source - and who typically perform together in the same room. The concept of democracy - (i.e. governance by members of the group) has been used as a starting point to - assess firstly what types of organisational structures are currently used in laptop - ensembles and secondarily to what extent laptop ensembles consider the implications - of organisational and social structure on their musical output. To assess this - I recorded a number of data points including ensemble size, whether the group - has a director or conductor, use of homogenous vs. heterogenous hardware and software, - whether they perform composed pieces or mainly improvise, the level of network - interaction and whether or not the ensemble has an academic affiliation. The survey - allowed me to define a scale of democracy in laptop ensembles and typical features - of the most and least democratic groups. Some examples are given of democratic - and autocratic activity in existing laptop ensembles. This work is part of a larger - scale project investigating the effect of social structures on the musical output - of laptop ensembles.' - address: 'London, United Kingdom' - author: Shelly Knotts and Nick Collins - bibtex: "@inproceedings{sknotts2014,\n abstract = {This paper reports the results\ - \ of an online survey of 160 laptop ensembles and the relative democracy of their\ - \ organisational and social structures. For the purposes of this research a laptop\ - \ ensemble is defined as a performing group of three or more musicians for whom\ - \ the laptop is the main sound generating source and who typically perform together\ - \ in the same room. The concept of democracy (i.e. governance by members of the\ - \ group) has been used as a starting point to assess firstly what types of organisational\ - \ structures are currently used in laptop ensembles and secondarily to what extent\ - \ laptop ensembles consider the implications of organisational and social structure\ - \ on their musical output. To assess this I recorded a number of data points including\ - \ ensemble size, whether the group has a director or conductor, use of homogenous\ - \ vs. heterogenous hardware and software, whether they perform composed pieces\ - \ or mainly improvise, the level of network interaction and whether or not the\ - \ ensemble has an academic affiliation. The survey allowed me to define a scale\ - \ of democracy in laptop ensembles and typical features of the most and least\ - \ democratic groups. Some examples are given of democratic and autocratic activity\ - \ in existing laptop ensembles. This work is part of a larger scale project investigating\ - \ the effect of social structures on the musical output of laptop ensembles.},\n\ - \ address = {London, United Kingdom},\n author = {Shelly Knotts and Nick Collins},\n\ + ID: NIME21_73 + abstract: 'As technologies and interfaces for the instrumental control of musical + sound get ever better at tracking aspects of human position and motion in space, + a fundamental problem emerges: Unintended or even counter-intentional control + may result when humans themselves become a source of positional noise. A clear + case of what is meant by this, is the “stillness movement” of a body part, occurring + despite the simultaneous explicit intention for that body part to remain still. + In this paper, we present the results of a randomized, controlled experiment investigating + this phenomenon along a vertical axis relative to the human fingertip. The results + include characterizations of both the spatial distribution and frequency distribution + of the stillness movement observed. Also included are results indicating a possible + role for constant forces and viscosities in reducing stillness movement amplitude, + thereby potentially enabling the implementation of more positional control of + musical sound within the same available spatial range. Importantly, the above + is summarized in a form that is directly interpretable for anyone designing technologies, + interactions, or performances that involve fingertip control of musical sound. + Also, a complete data set of the experimental results is included in the separate + Appendices to this paper, again in a format that is directly interpretable.' + address: 'Shanghai, China' + articleno: 73 + author: 'de Jong, Staas' + bibtex: "@inproceedings{NIME21_73,\n abstract = {As technologies and interfaces\ + \ for the instrumental control of musical sound get ever better at tracking aspects\ + \ of human position and motion in space, a fundamental problem emerges: Unintended\ + \ or even counter-intentional control may result when humans themselves become\ + \ a source of positional noise. A clear case of what is meant by this, is the\ + \ “stillness movement” of a body part, occurring despite the simultaneous explicit\ + \ intention for that body part to remain still. In this paper, we present the\ + \ results of a randomized, controlled experiment investigating this phenomenon\ + \ along a vertical axis relative to the human fingertip. The results include characterizations\ + \ of both the spatial distribution and frequency distribution of the stillness\ + \ movement observed. Also included are results indicating a possible role for\ + \ constant forces and viscosities in reducing stillness movement amplitude, thereby\ + \ potentially enabling the implementation of more positional control of musical\ + \ sound within the same available spatial range. Importantly, the above is summarized\ + \ in a form that is directly interpretable for anyone designing technologies,\ + \ interactions, or performances that involve fingertip control of musical sound.\ + \ Also, a complete data set of the experimental results is included in the separate\ + \ Appendices to this paper, again in a format that is directly interpretable.},\n\ + \ address = {Shanghai, China},\n articleno = {73},\n author = {de Jong, Staas},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178839},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {191--194},\n publisher = {Goldsmiths, University\ - \ of London},\n title = {The Politics of Laptop Ensembles: A Survey of 160 Laptop\ - \ Ensembles and their Organisational Structures},\n url = {http://www.nime.org/proceedings/2014/nime2014_521.pdf},\n\ - \ year = {2014}\n}\n" + \ Musical Expression},\n doi = {10.21428/92fbeb44.9765f11d},\n issn = {2220-4806},\n\ + \ month = {June},\n presentation-video = {https://youtu.be/L_WhJ3N-v8c},\n title\ + \ = {Human noise at the fingertip: Positional (non)control under varying haptic\ + \ × musical conditions},\n url = {https://nime.pubpub.org/pub/bol2r7nr},\n year\ + \ = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178839 + doi: 10.21428/92fbeb44.9765f11d issn: 2220-4806 month: June - pages: 191--194 - publisher: 'Goldsmiths, University of London' - title: 'The Politics of Laptop Ensembles: A Survey of 160 Laptop Ensembles and their - Organisational Structures' - url: http://www.nime.org/proceedings/2014/nime2014_521.pdf - year: 2014 + presentation-video: https://youtu.be/L_WhJ3N-v8c + title: 'Human noise at the fingertip: Positional (non)control under varying haptic + × musical conditions' + url: https://nime.pubpub.org/pub/bol2r7nr + year: 2021 - ENTRYTYPE: inproceedings - ID: lfeugere2014 - abstract: 'In this demonstration, the mapping and the gestural control strategy - developed in the Digitartic are presented. Digitartic is a musical instrument - able to control sung syllables. Performative rule-based synthesis allows for controlling - semi-consonants, plosive, fricative and nasal consonants with a same gesture, - despite the structural differences in natural production of such vocal segments. - A graphic pen tablet is used for capturing the gesture with a high sampling rate - and resolution. This system alows for both performing various manners of articulation - and having a continuous control over the articulation.' - address: 'London, United Kingdom' - author: Lionel Feugère and Christophe d'Alessandro - bibtex: "@inproceedings{lfeugere2014,\n abstract = {In this demonstration, the mapping\ - \ and the gestural control strategy developed in the Digitartic are presented.\ - \ Digitartic is a musical instrument able to control sung syllables. Performative\ - \ rule-based synthesis allows for controlling semi-consonants, plosive, fricative\ - \ and nasal consonants with a same gesture, despite the structural differences\ - \ in natural production of such vocal segments. A graphic pen tablet is used for\ - \ capturing the gesture with a high sampling rate and resolution. This system\ - \ alows for both performing various manners of articulation and having a continuous\ - \ control over the articulation.},\n address = {London, United Kingdom},\n author\ - \ = {Lionel Feug\\`ere and Christophe d'Alessandro},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178762},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {86--87},\n publisher = {Goldsmiths, University of London},\n title = {Rule-Based\ - \ Performative Synthesis of Sung Syllables},\n url = {http://www.nime.org/proceedings/2014/nime2014_522.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_74 + abstract: 'In this paper I show how it is possible to create polyrhythmic patterns + with analogue oscillators by setting up a network of variable resistances that + connect these oscillators. The system I present is build with electronic circuits + connected to dc-motors and allows for a very tangible and playful exploration + of the dynamic properties of artificial neural networks. The theoretical underpinnings + of this approach stem from observation and models of synchronization in living + organisms, where synchronization and phase-locking is not only an observable phenomenon + but can also be seen as a marker of the quality of interaction. Realized as a + technical system of analogue oscillators synchronization also appears between + oscillators tuned at different basic rhythm and stable polyrhythmic patterns emerge + as the result of electrical connections.' + address: 'Shanghai, China' + articleno: 74 + author: 'Faubel, Christian' + bibtex: "@inproceedings{NIME21_74,\n abstract = {In this paper I show how it is\ + \ possible to create polyrhythmic patterns with analogue oscillators by setting\ + \ up a network of variable resistances that connect these oscillators. The system\ + \ I present is build with electronic circuits connected to dc-motors and allows\ + \ for a very tangible and playful exploration of the dynamic properties of artificial\ + \ neural networks. The theoretical underpinnings of this approach stem from observation\ + \ and models of synchronization in living organisms, where synchronization and\ + \ phase-locking is not only an observable phenomenon but can also be seen as a\ + \ marker of the quality of interaction. Realized as a technical system of analogue\ + \ oscillators synchronization also appears between oscillators tuned at different\ + \ basic rhythm and stable polyrhythmic patterns emerge as the result of electrical\ + \ connections.},\n address = {Shanghai, China},\n articleno = {74},\n author =\ + \ {Faubel, Christian},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.e66a8542},\n\ + \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/pJlxVJTMRto},\n\ + \ title = {Emergent Polyrhythmic Patterns with a Neuromorph Electronic Network},\n\ + \ url = {https://nime.pubpub.org/pub/g04egsqn},\n year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178762 + doi: 10.21428/92fbeb44.e66a8542 issn: 2220-4806 month: June - pages: 86--87 - publisher: 'Goldsmiths, University of London' - title: Rule-Based Performative Synthesis of Sung Syllables - url: http://www.nime.org/proceedings/2014/nime2014_522.pdf - year: 2014 + presentation-video: https://youtu.be/pJlxVJTMRto + title: Emergent Polyrhythmic Patterns with a Neuromorph Electronic Network + url: https://nime.pubpub.org/pub/g04egsqn + year: 2021 - ENTRYTYPE: inproceedings - ID: jharriman2014 - abstract: 'What do new possibilities for music and art making look like in a world - in which the biological and mechanical are increasingly entangled? Can a contrived - environment envelope the senses to the point that one feel fully immersed in it? - It was with these questions in mind that the interactive mechanical sound art - installation endo/exo came into being. Through the use of networked technology - the system becomes more like a self-aware organism, passing messages from node - to node as cells communicate through chemical signals with their neighbors. In - an artistic context, the communication network resembles, but differs from, other - mechanical systems. Issues such as latency are often considered negative factors, - yet they can contribute a touch of personality in this context. This paper is - a reflection on these and other considerations gained from the experience of designing - and constructing endo/exo as well as future implications for the Honeycomb platform - as a tool for creating musical interactions within a new paradigm which allows - for emergent behavior across vast physical spaces. The use of swarming and self-organization, - as well as playful interaction, creates an ``aliveness'''' in the mechanism, and - renders its exploration pleasurable, intriguing and uncanny.' - address: 'London, United Kingdom' - author: Jiffer Harriman and Michael Theodore and Nikolaus Correll and Hunter Ewen - bibtex: "@inproceedings{jharriman2014,\n abstract = {What do new possibilities for\ - \ music and art making look like in a world in which the biological and mechanical\ - \ are increasingly entangled? Can a contrived environment envelope the senses\ - \ to the point that one feel fully immersed in it? It was with these questions\ - \ in mind that the interactive mechanical sound art installation endo/exo came\ - \ into being. Through the use of networked technology the system becomes more\ - \ like a self-aware organism, passing messages from node to node as cells communicate\ - \ through chemical signals with their neighbors. In an artistic context, the communication\ - \ network resembles, but differs from, other mechanical systems. Issues such as\ - \ latency are often considered negative factors, yet they can contribute a touch\ - \ of personality in this context. This paper is a reflection on these and other\ - \ considerations gained from the experience of designing and constructing endo/exo\ - \ as well as future implications for the Honeycomb platform as a tool for creating\ - \ musical interactions within a new paradigm which allows for emergent behavior\ - \ across vast physical spaces. The use of swarming and self-organization, as well\ - \ as playful interaction, creates an ``aliveness'' in the mechanism, and renders\ - \ its exploration pleasurable, intriguing and uncanny.},\n address = {London,\ - \ United Kingdom},\n author = {Jiffer Harriman and Michael Theodore and Nikolaus\ - \ Correll and Hunter Ewen},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178786},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {383--386},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {endo/exo Making Art and Music with Distributed\ - \ Computing},\n url = {http://www.nime.org/proceedings/2014/nime2014_523.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_75 + abstract: 'Over past editions of the NIME Conference, there has been a growing concern + towards diversity and inclusion. It is relevant for an international community + whose vast majority of its members are in Europe, the USA, and Canada to seek + a richer cultural diversity. To contribute to a decolonial perspective in the + inclusion of underrepresented countries and ethnic/racial groups, we discuss Gambiarra + and Techno-Vernacular Creativity concepts. We believe these concepts may help + structure and stimulate individuals from these underrepresented contexts to perform + research in the NIME field.' + address: 'Shanghai, China' + articleno: 75 + author: 'Tragtenberg, João and Albuquerque, Gabriel and Calegario, Filipe' + bibtex: "@inproceedings{NIME21_75,\n abstract = {Over past editions of the NIME\ + \ Conference, there has been a growing concern towards diversity and inclusion.\ + \ It is relevant for an international community whose vast majority of its members\ + \ are in Europe, the USA, and Canada to seek a richer cultural diversity. To contribute\ + \ to a decolonial perspective in the inclusion of underrepresented countries and\ + \ ethnic/racial groups, we discuss Gambiarra and Techno-Vernacular Creativity\ + \ concepts. We believe these concepts may help structure and stimulate individuals\ + \ from these underrepresented contexts to perform research in the NIME field.},\n\ + \ address = {Shanghai, China},\n articleno = {75},\n author = {Tragtenberg, João\ + \ and Albuquerque, Gabriel and Calegario, Filipe},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.21428/92fbeb44.98354a15},\n issn = {2220-4806},\n month = {June},\n\ + \ presentation-video = {https://youtu.be/iJ8g7vBPFYw},\n title = {Gambiarra and\ + \ Techno-Vernacular Creativity in NIME Research},\n url = {https://nime.pubpub.org/pub/aqm27581},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178786 + doi: 10.21428/92fbeb44.98354a15 issn: 2220-4806 month: June - pages: 383--386 - publisher: 'Goldsmiths, University of London' - title: endo/exo Making Art and Music with Distributed Computing - url: http://www.nime.org/proceedings/2014/nime2014_523.pdf - year: 2014 + presentation-video: https://youtu.be/iJ8g7vBPFYw + title: Gambiarra and Techno-Vernacular Creativity in NIME Research + url: https://nime.pubpub.org/pub/aqm27581 + year: 2021 - ENTRYTYPE: inproceedings - ID: rgraham2014 - abstract: 'This paper describes the theoretical underpinnings, design, and development - of a hyper--instrumental performance system driven by gestural data obtained from - an electric guitar. The system combines a multichannel audio feed from the guitar - (which is parsed for its pitch, spectral content and note inter--onset time data - to provide abstractions of sounded performance gestures) with motion tracking - of the performer''s larger--scale bodily movements using a Microsoft Xbox Kinect - sensor. These gestural materials are used to provide the basis for the structures - of relational mappings, informed by the embodied image schema structures of Lakoff - and Johnson. These theoretical perspectives are refined via larger-scale ecological-embodied - structural relationships in electroacoustic music outlined in Smalley''s theory - of spectromorphology, alongside the incorporation of an additional active-agential - response structure through the use of the boids flocking algorithm by Reynolds - to control the spatialization of outputs and other textural processes. The paper - aims to advance a broadly-applicable ''performance gesture ecology'', providing - a shared spatial-relational mapping (a ''basic gestural space'') which allows - for creative (but still coherent) mappings from the performance gestures to the - control of textural and spatial structures.' - address: 'London, United Kingdom' - author: Ricky Graham and Brian Bridges - bibtex: "@inproceedings{rgraham2014,\n abstract = {This paper describes the theoretical\ - \ underpinnings, design, and development of a hyper--instrumental performance\ - \ system driven by gestural data obtained from an electric guitar. The system\ - \ combines a multichannel audio feed from the guitar (which is parsed for its\ - \ pitch, spectral content and note inter--onset time data to provide abstractions\ - \ of sounded performance gestures) with motion tracking of the performer's larger--scale\ - \ bodily movements using a Microsoft Xbox Kinect sensor. These gestural materials\ - \ are used to provide the basis for the structures of relational mappings, informed\ - \ by the embodied image schema structures of Lakoff and Johnson. These theoretical\ - \ perspectives are refined via larger-scale ecological-embodied structural relationships\ - \ in electroacoustic music outlined in Smalley's theory of spectromorphology,\ - \ alongside the incorporation of an additional active-agential response structure\ - \ through the use of the boids flocking algorithm by Reynolds to control the spatialization\ - \ of outputs and other textural processes. The paper aims to advance a broadly-applicable\ - \ 'performance gesture ecology', providing a shared spatial-relational mapping\ - \ (a 'basic gestural space') which allows for creative (but still coherent) mappings\ - \ from the performance gestures to the control of textural and spatial structures.},\n\ - \ address = {London, United Kingdom},\n author = {Ricky Graham and Brian Bridges},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178774},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {581--584},\n publisher = {Goldsmiths, University\ - \ of London},\n title = {Gesture and Embodied Metaphor in Spatial Music Performance\ - \ Systems Design.},\n url = {http://www.nime.org/proceedings/2014/nime2014_526.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_76 + abstract: 'Digital musical instrument (DMI) design and performance is primarily + practiced by those with backgrounds in music technology and human-computer interaction. + Research on these topics is rarely led by performers, much less by those without + backgrounds in technology. In this study, we explore DMI design and performance + from the perspective of a singular community of classically-trained percussionists. + We use a practiced-based methodology informed by our skillset as percussionists + to study how instrumental skills and sensibilities can be incorporated into the + personalization of, and performance with, DMIs. We introduced a simple and adaptable + digital musical instrument, built using the Arduino Uno, that individuals (percussionists) + could personalize and extend in order to improvise, compose and create music (études). + Our analysis maps parallel percussion practices emerging from the resultant DMI + compositions and performances by examining the functionality of each Arduino instrument + through the lens of material-oriented and communication-oriented approaches to + interactivity.' + address: 'Shanghai, China' + articleno: 76 + author: 'Roth, Timothy and Huang, Aiyun and Cunningham, Tyler' + bibtex: "@inproceedings{NIME21_76,\n abstract = {Digital musical instrument (DMI)\ + \ design and performance is primarily practiced by those with backgrounds in music\ + \ technology and human-computer interaction. Research on these topics is rarely\ + \ led by performers, much less by those without backgrounds in technology. In\ + \ this study, we explore DMI design and performance from the perspective of a\ + \ singular community of classically-trained percussionists. We use a practiced-based\ + \ methodology informed by our skillset as percussionists to study how instrumental\ + \ skills and sensibilities can be incorporated into the personalization of, and\ + \ performance with, DMIs. We introduced a simple and adaptable digital musical\ + \ instrument, built using the Arduino Uno, that individuals (percussionists) could\ + \ personalize and extend in order to improvise, compose and create music (études).\ + \ Our analysis maps parallel percussion practices emerging from the resultant\ + \ DMI compositions and performances by examining the functionality of each Arduino\ + \ instrument through the lens of material-oriented and communication-oriented\ + \ approaches to interactivity.},\n address = {Shanghai, China},\n articleno =\ + \ {76},\n author = {Roth, Timothy and Huang, Aiyun and Cunningham, Tyler},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.21428/92fbeb44.c61b9546},\n issn = {2220-4806},\n month\ + \ = {June},\n presentation-video = {https://youtu.be/kjQDN907FXs},\n title = {On\ + \ Parallel Performance Practices: Some Observations on Personalizing DMIs as Percussionists},\n\ + \ url = {https://nime.pubpub.org/pub/226jlaug},\n year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178774 + doi: 10.21428/92fbeb44.c61b9546 issn: 2220-4806 month: June - pages: 581--584 - publisher: 'Goldsmiths, University of London' - title: Gesture and Embodied Metaphor in Spatial Music Performance Systems Design. - url: http://www.nime.org/proceedings/2014/nime2014_526.pdf - year: 2014 + presentation-video: https://youtu.be/kjQDN907FXs + title: 'On Parallel Performance Practices: Some Observations on Personalizing DMIs + as Percussionists' + url: https://nime.pubpub.org/pub/226jlaug + year: 2021 - ENTRYTYPE: inproceedings - ID: ckiefer2014 - abstract: 'Echo State Networks (ESNs), a form of recurrent neural network developed - in the field of Reservoir Computing, show significant potential for use as a tool - in the design of mappings for digital musical instruments. They have, however, - seldom been used in this area, so this paper explores their possible uses. This - project contributes a new open source library, which was developed to allow ESNs - to run in the Pure Data dataflow environment. Several use cases were explored, - focusing on addressing current issues in mapping research. ESNs were found to - work successfully in scenarios of pattern classification, multiparametric control, - explorative mapping and the design of nonlinearities and uncontrol. Un-trained - behaviours are proposed, as augmentations to the conventional reservoir system - that allow the player to introduce potentially interesting non-linearities and - uncontrol into the reservoir. Interactive evolution style controls are proposed - as strategies to help design these behaviours, which are otherwise dependent on - arbitrary parameters. A study on sound classification shows that ESNs can reliably - differentiate between two drum sounds, and also generalise to other similar input. - Following evaluation of the use cases, heuristics are proposed to aid the use - of ESNs in computer music scenarios.' - address: 'London, United Kingdom' - author: Chris Kiefer - bibtex: "@inproceedings{ckiefer2014,\n abstract = {Echo State Networks (ESNs), a\ - \ form of recurrent neural network developed in the field of Reservoir Computing,\ - \ show significant potential for use as a tool in the design of mappings for digital\ - \ musical instruments. They have, however, seldom been used in this area, so this\ - \ paper explores their possible uses. This project contributes a new open source\ - \ library, which was developed to allow ESNs to run in the Pure Data dataflow\ - \ environment. Several use cases were explored, focusing on addressing current\ - \ issues in mapping research. ESNs were found to work successfully in scenarios\ - \ of pattern classification, multiparametric control, explorative mapping and\ - \ the design of nonlinearities and uncontrol. Un-trained behaviours are proposed,\ - \ as augmentations to the conventional reservoir system that allow the player\ - \ to introduce potentially interesting non-linearities and uncontrol into the\ - \ reservoir. Interactive evolution style controls are proposed as strategies to\ - \ help design these behaviours, which are otherwise dependent on arbitrary parameters.\ - \ A study on sound classification shows that ESNs can reliably differentiate between\ - \ two drum sounds, and also generalise to other similar input. Following evaluation\ - \ of the use cases, heuristics are proposed to aid the use of ESNs in computer\ - \ music scenarios.},\n address = {London, United Kingdom},\n author = {Chris Kiefer},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178829},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {293--298},\n publisher = {Goldsmiths, University\ - \ of London},\n title = {Musical Instrument Mapping Design with Echo State Networks},\n\ - \ url = {http://www.nime.org/proceedings/2014/nime2014_530.pdf},\n year = {2014}\n\ - }\n" + ID: NIME21_77 + abstract: 'The Seals are a political, feminist, noise, and AI-inspired electronic + sorta-surf rock band composed of the talents of Margaret Schedel, Susie Green, + Sophia Sun, Ria Rajan, and Sofy Yuditskaya, augmented by the S.E.A.L. (Synthetic + Erudition Assist Lattice), as we call the collection of AIs that assist us in + creating usable content with which to mold and shape our music and visuals. Our + concerts begin by invoking one another through internet conferencing software; + during the concert, we play skull augmented theremins while reading GPT2 & GPT3 + (Machine Learning language models) generated dialogue over pre-generated songs. + As a distributed band we designed our performance to take place over video conferencing + systems deliberately incorporating the glitch artifacts that they bring. We use + one of the oldest forms of generative operations, throwing dice, as well as the + latest in ML technology to create our collaborative music over a distance. In + this paper, we illustrate how we leverage the multiple novel interfaces that we + use to create our unique sound.' + address: 'Shanghai, China' + articleno: 77 + author: 'Yuditskaya, Sofy and Sun, Sophia and Schedel, Margaret' + bibtex: "@inproceedings{NIME21_77,\n abstract = {The Seals are a political, feminist,\ + \ noise, and AI-inspired electronic sorta-surf rock band composed of the talents\ + \ of Margaret Schedel, Susie Green, Sophia Sun, Ria Rajan, and Sofy Yuditskaya,\ + \ augmented by the S.E.A.L. (Synthetic Erudition Assist Lattice), as we call the\ + \ collection of AIs that assist us in creating usable content with which to mold\ + \ and shape our music and visuals. Our concerts begin by invoking one another\ + \ through internet conferencing software; during the concert, we play skull augmented\ + \ theremins while reading GPT2 & GPT3 (Machine Learning language models) generated\ + \ dialogue over pre-generated songs. As a distributed band we designed our performance\ + \ to take place over video conferencing systems deliberately incorporating the\ + \ glitch artifacts that they bring. We use one of the oldest forms of generative\ + \ operations, throwing dice, as well as the latest in ML technology to create\ + \ our collaborative music over a distance. In this paper, we illustrate how we\ + \ leverage the multiple novel interfaces that we use to create our unique sound.},\n\ + \ address = {Shanghai, China},\n articleno = {77},\n author = {Yuditskaya, Sofy\ + \ and Sun, Sophia and Schedel, Margaret},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.0282a79c},\n\ + \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/FmTbEUyePXg},\n\ + \ title = {Synthetic Erudition Assist Lattice},\n url = {https://nime.pubpub.org/pub/5oupvoun},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178829 + doi: 10.21428/92fbeb44.0282a79c issn: 2220-4806 month: June - pages: 293--298 - publisher: 'Goldsmiths, University of London' - title: Musical Instrument Mapping Design with Echo State Networks - url: http://www.nime.org/proceedings/2014/nime2014_530.pdf - year: 2014 + presentation-video: https://youtu.be/FmTbEUyePXg + title: Synthetic Erudition Assist Lattice + url: https://nime.pubpub.org/pub/5oupvoun + year: 2021 - ENTRYTYPE: inproceedings - ID: jallison2014 - abstract: 'Mobile music applications are typically quite limiting to musicians, - as they either attempt to mimic non-touch screen interfaces or do not offer enough - control. Pitch Canvas is a musical interface that was built specifically for the - touchscreen. Pitches are laid out in a hexagonal pattern that allow for easy scale, - chord, and arpeggiation patterns. Notes are played by touch, but are sustained - through continuous movement. Pitch bends can be achieved by passing through the - space between the notes. Its current implementation runs only on Apple iPad tablet - computers using a libPd to convert user interaction into audio. An iPad overlay - offers physical feedback for the circles as well as the pitch bend area between - the circles. A performable version of the application has been built, though several - active developments allow alternative sonic interpretation of the gestures, enhanced - visual response to user interaction, and the ability to control the instrument - with multiple devices.' - address: 'London, United Kingdom' - author: Bradley Strylowski and Jesse Allison and Jesse Guessford - bibtex: "@inproceedings{jallison2014,\n abstract = {Mobile music applications are\ - \ typically quite limiting to musicians, as they either attempt to mimic non-touch\ - \ screen interfaces or do not offer enough control. Pitch Canvas is a musical\ - \ interface that was built specifically for the touchscreen. Pitches are laid\ - \ out in a hexagonal pattern that allow for easy scale, chord, and arpeggiation\ - \ patterns. Notes are played by touch, but are sustained through continuous movement.\ - \ Pitch bends can be achieved by passing through the space between the notes.\ - \ Its current implementation runs only on Apple iPad tablet computers using a\ - \ libPd to convert user interaction into audio. An iPad overlay offers physical\ - \ feedback for the circles as well as the pitch bend area between the circles.\ - \ A performable version of the application has been built, though several active\ - \ developments allow alternative sonic interpretation of the gestures, enhanced\ - \ visual response to user interaction, and the ability to control the instrument\ - \ with multiple devices.},\n address = {London, United Kingdom},\n author = {Bradley\ - \ Strylowski and Jesse Allison and Jesse Guessford},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178947},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {171--174},\n publisher = {Goldsmiths, University of London},\n title = {Pitch\ - \ Canvas: Touchscreen Based Mobile Music Instrument},\n url = {http://www.nime.org/proceedings/2014/nime2014_533.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_78 + abstract: This study investigates how accurately users can continuously control + a variety of one degree of freedom sensors commonly used in electronic music interfaces. + Analysis within an information-theoretic model yields channel capacities of maximum + information throughput in bits/sec that can support a unified comparison. The + results may inform the design of digital musical instruments and the design of + systems with similarly demanding control tasks. + address: 'Shanghai, China' + articleno: 78 + author: 'Blandino, Michael and Berdahl, Edgar' + bibtex: "@inproceedings{NIME21_78,\n abstract = {This study investigates how accurately\ + \ users can continuously control a variety of one degree of freedom sensors commonly\ + \ used in electronic music interfaces. Analysis within an information-theoretic\ + \ model yields channel capacities of maximum information throughput in bits/sec\ + \ that can support a unified comparison. The results may inform the design of\ + \ digital musical instruments and the design of systems with similarly demanding\ + \ control tasks.},\n address = {Shanghai, China},\n articleno = {78},\n author\ + \ = {Blandino, Michael and Berdahl, Edgar},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.21428/92fbeb44.c2b5a672},\n issn = {2220-4806},\n month = {June},\n presentation-video\ + \ = {https://youtu.be/-p7mp3LFsQg},\n title = {Using a Pursuit Tracking Task to\ + \ Compare Continuous Control of Various NIME Sensors},\n url = {https://nime.pubpub.org/pub/using-a-pursuit-tracking-task-to-compare-continuous-control-of-various-nime-sensors},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178947 + doi: 10.21428/92fbeb44.c2b5a672 issn: 2220-4806 month: June - pages: 171--174 - publisher: 'Goldsmiths, University of London' - title: 'Pitch Canvas: Touchscreen Based Mobile Music Instrument' - url: http://www.nime.org/proceedings/2014/nime2014_533.pdf - year: 2014 + presentation-video: https://youtu.be/-p7mp3LFsQg + title: Using a Pursuit Tracking Task to Compare Continuous Control of Various NIME + Sensors + url: https://nime.pubpub.org/pub/using-a-pursuit-tracking-task-to-compare-continuous-control-of-various-nime-sensors + year: 2021 - ENTRYTYPE: inproceedings - ID: pdahlstedt2014 - abstract: 'Two related versions of an unstable live algorithm for the Disklavier - player piano are presented. The underlying generative feedback system consists - of four virtual musicians, listening to each other in a circular configuration. - There is no temporal form, and all parameters of the system are controlled by - the performer through an intricate but direct mapping, in an attempt to combine - the experienced musician''s physical control of gesture and phrasing, with the - structural complexities and richness of generative music. In the first version, - Circle Squared, the interface is an array of pressure sensors, and the performer - performs on the system without participating directly, like a puppet master. In - the second version, control parameters are derived directly from playing on the - same piano that performs the output of the system. Here, the performer both plays - with and on the system in an intricate dance with the unpredictable output of - the unstable virtual ensemble. The underlying mapping strategies are presented, - together with the structure of the generative system. Experiences from a series - of performances are discussed, primarily from the perspective of the improvising - musician.' - address: 'London, United Kingdom' - author: Palle Dahlstedt - bibtex: "@inproceedings{pdahlstedt2014,\n abstract = {Two related versions of an\ - \ unstable live algorithm for the Disklavier player piano are presented. The underlying\ - \ generative feedback system consists of four virtual musicians, listening to\ - \ each other in a circular configuration. There is no temporal form, and all parameters\ - \ of the system are controlled by the performer through an intricate but direct\ - \ mapping, in an attempt to combine the experienced musician's physical control\ - \ of gesture and phrasing, with the structural complexities and richness of generative\ - \ music. In the first version, Circle Squared, the interface is an array of pressure\ - \ sensors, and the performer performs on the system without participating directly,\ - \ like a puppet master. In the second version, control parameters are derived\ - \ directly from playing on the same piano that performs the output of the system.\ - \ Here, the performer both plays with and on the system in an intricate dance\ - \ with the unpredictable output of the unstable virtual ensemble. The underlying\ - \ mapping strategies are presented, together with the structure of the generative\ - \ system. Experiences from a series of performances are discussed, primarily from\ - \ the perspective of the improvising musician.},\n address = {London, United Kingdom},\n\ - \ author = {Palle Dahlstedt},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178740},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {114--117},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Circle Squared and Circle Keys Performing\ - \ on and with an Unstable Live Algorithm for the Disklavier},\n url = {http://www.nime.org/proceedings/2014/nime2014_534.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_79 + abstract: 'Contending with ecosystem silencing in the Anthropocene, RhumbLine: Plectrohyla + Exquisita is an installation-scale instrument featuring an ensemble of zoomorphic + musical robots that generate an acoustic soundscape from behind an acousmatic + veil, highlighting the spatial attributes of acoustic sound. Originally conceived + as a physical installation, the global COVID-19 pandemic catalyzed a reconceptualization + of the work that allowed it to function remotely and collaboratively with users + seeding robotic frog callers with improvised rhythmic calls via the internet—transforming + a physical installation into a web-based performable installation-scale instrument. + The performed calls from online visitors evolve using AI as they pass through + the frog collective. After performing a rhythm, audiences listen ambisonically + from behind a virtual veil and attempt to map the formation of the frogs, based + on the spatial information embedded in their calls. After listening, audience + members can reveal the frogs and their formation. By reconceiving rhumb lines—navigational + tools that create paths of constant bearing to navigate space—as sonic tools to + spatially orient listeners, RhumbLine: Plectrohyla Exquisita functions as a new + interface for spatial musical expression (NISME) in both its physical and virtual + instantiations.' + address: 'Shanghai, China' + articleno: 79 + author: 'Schedel, Margaret and Smith, Brian and Cosgrove, Robert and Hwang, Nick' + bibtex: "@inproceedings{NIME21_79,\n abstract = {Contending with ecosystem silencing\ + \ in the Anthropocene, RhumbLine: Plectrohyla Exquisita is an installation-scale\ + \ instrument featuring an ensemble of zoomorphic musical robots that generate\ + \ an acoustic soundscape from behind an acousmatic veil, highlighting the spatial\ + \ attributes of acoustic sound. Originally conceived as a physical installation,\ + \ the global COVID-19 pandemic catalyzed a reconceptualization of the work that\ + \ allowed it to function remotely and collaboratively with users seeding robotic\ + \ frog callers with improvised rhythmic calls via the internet—transforming a\ + \ physical installation into a web-based performable installation-scale instrument.\ + \ The performed calls from online visitors evolve using AI as they pass through\ + \ the frog collective. After performing a rhythm, audiences listen ambisonically\ + \ from behind a virtual veil and attempt to map the formation of the frogs, based\ + \ on the spatial information embedded in their calls. After listening, audience\ + \ members can reveal the frogs and their formation. By reconceiving rhumb lines—navigational\ + \ tools that create paths of constant bearing to navigate space—as sonic tools\ + \ to spatially orient listeners, RhumbLine: Plectrohyla Exquisita functions as\ + \ a new interface for spatial musical expression (NISME) in both its physical\ + \ and virtual instantiations.},\n address = {Shanghai, China},\n articleno = {79},\n\ + \ author = {Schedel, Margaret and Smith, Brian and Cosgrove, Robert and Hwang,\ + \ Nick},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.21428/92fbeb44.9e1312b1},\n issn = {2220-4806},\n\ + \ month = {June},\n presentation-video = {https://youtu.be/twzpxObh9jw},\n title\ + \ = {RhumbLine: Plectrohyla Exquisita — Spatial Listening of Zoomorphic Musical\ + \ Robots},\n url = {https://nime.pubpub.org/pub/f5jtuy87},\n year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178740 + doi: 10.21428/92fbeb44.9e1312b1 issn: 2220-4806 month: June - pages: 114--117 - publisher: 'Goldsmiths, University of London' - title: Circle Squared and Circle Keys Performing on and with an Unstable Live Algorithm - for the Disklavier - url: http://www.nime.org/proceedings/2014/nime2014_534.pdf - year: 2014 + presentation-video: https://youtu.be/twzpxObh9jw + title: 'RhumbLine: Plectrohyla Exquisita — Spatial Listening of Zoomorphic Musical + Robots' + url: https://nime.pubpub.org/pub/f5jtuy87 + year: 2021 - ENTRYTYPE: inproceedings - ID: fthalmann2014 - abstract: 'This paper introduces an extension of the Rubato Composer software''s - BigBang rubette module for gestural composition. The extension enables composers - and improvisers to operate BigBang using the Leap Motion controller, which uses - two cameras to detect hand motions in three-dimensional space. The low latency - and high precision of the device make it a good fit for BigBang''s functionality, - which is based on immediate visual and auditive feedback. With the new extensions, - users can define an infinite variety of musical objects, such as oscillators, - pitches, chord progressions, or frequency modulators, in real-time and transform - them in order to generate more complex musical structures on any level of abstraction.' - address: 'London, United Kingdom' - author: Daniel Tormoen and Florian Thalmann and Guerino Mazzola - bibtex: "@inproceedings{fthalmann2014,\n abstract = {This paper introduces an extension\ - \ of the Rubato Composer software's BigBang rubette module for gestural composition.\ - \ The extension enables composers and improvisers to operate BigBang using the\ - \ Leap Motion controller, which uses two cameras to detect hand motions in three-dimensional\ - \ space. The low latency and high precision of the device make it a good fit for\ - \ BigBang's functionality, which is based on immediate visual and auditive feedback.\ - \ With the new extensions, users can define an infinite variety of musical objects,\ - \ such as oscillators, pitches, chord progressions, or frequency modulators, in\ - \ real-time and transform them in order to generate more complex musical structures\ - \ on any level of abstraction.},\n address = {London, United Kingdom},\n author\ - \ = {Daniel Tormoen and Florian Thalmann and Guerino Mazzola},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178955},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {207--212},\n publisher = {Goldsmiths, University of London},\n title = {The\ - \ Composing Hand: Musical Creation with Leap Motion and the BigBang Rubette},\n\ - \ url = {http://www.nime.org/proceedings/2014/nime2014_536.pdf},\n year = {2014}\n\ - }\n" + ID: NIME21_8 + abstract: 'Recent work in NIME has questioned the political and social implications + of work in this field, and has called for direct action on problems in the areas + of diversity, representation and political engagement. Though there is motivation + to address these problems, there is an open question of how to meaningfully do + so. This paper proposes that NIME’s historical record is the best tool for understanding + our own output but this record is incomplete, and makes the case for collective + action to improve how we document our work. I begin by contrasting NIME’s output + with its discourse, and explore the nature of this discourse through NIME history + and examine our inherited epistemological complexity. I assert that, if left unexamined, + this complexity can undermine our community values of diversity and inclusion. + I argue that meaningfully addressing current problems demands critical reflection + on our work, and explore how NIME’s historical record is currently used as a means + of doing so. I then review what NIME''s historical record contains (and what it + does not), and evaluate its fitness for use as a tool of inquiry. Finally I make + the case for collective action to establish better documentation practices, and + suggest features that may be helpful for the process as well as the result.' + address: 'Shanghai, China' + articleno: 8 + author: 'Bin, S. M. Astrid' + bibtex: "@inproceedings{NIME21_8,\n abstract = {Recent work in NIME has questioned\ + \ the political and social implications of work in this field, and has called\ + \ for direct action on problems in the areas of diversity, representation and\ + \ political engagement. Though there is motivation to address these problems,\ + \ there is an open question of how to meaningfully do so. This paper proposes\ + \ that NIME’s historical record is the best tool for understanding our own output\ + \ but this record is incomplete, and makes the case for collective action to improve\ + \ how we document our work. I begin by contrasting NIME’s output with its discourse,\ + \ and explore the nature of this discourse through NIME history and examine our\ + \ inherited epistemological complexity. I assert that, if left unexamined, this\ + \ complexity can undermine our community values of diversity and inclusion. I\ + \ argue that meaningfully addressing current problems demands critical reflection\ + \ on our work, and explore how NIME’s historical record is currently used as a\ + \ means of doing so. I then review what NIME's historical record contains (and\ + \ what it does not), and evaluate its fitness for use as a tool of inquiry. Finally\ + \ I make the case for collective action to establish better documentation practices,\ + \ and suggest features that may be helpful for the process as well as the result.},\n\ + \ address = {Shanghai, China},\n articleno = {8},\n author = {Bin, S. M. Astrid},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.21428/92fbeb44.ac5d43e1},\n issn = {2220-4806},\n\ + \ month = {June},\n presentation-video = {https://youtu.be/omnMRlj7miA},\n title\ + \ = {Discourse is critical: Towards a collaborative NIME history},\n url = {https://nime.pubpub.org/pub/nbrrk8ll},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178955 + doi: 10.21428/92fbeb44.ac5d43e1 issn: 2220-4806 month: June - pages: 207--212 - publisher: 'Goldsmiths, University of London' - title: 'The Composing Hand: Musical Creation with Leap Motion and the BigBang Rubette' - url: http://www.nime.org/proceedings/2014/nime2014_536.pdf - year: 2014 + presentation-video: https://youtu.be/omnMRlj7miA + title: 'Discourse is critical: Towards a collaborative NIME history' + url: https://nime.pubpub.org/pub/nbrrk8ll + year: 2021 - ENTRYTYPE: inproceedings - ID: obown2014 - abstract: 'Audiences of live laptop music frequently express dismay at the opacity - of performer activity and question how ``live'''' performances actually are. Yet - motionless laptop performers endure as musical spectacles from clubs to concert - halls, suggesting that for many this is a non-issue. Understanding these perceptions - might help performers better achieve their intentions, inform interface design - within the NIME field and help develop theories of liveness and performance. To - this end, a study of listeners'' perception of liveness and performer control - in laptop performance was carried out, in which listeners examined several short - audio-only excerpts of laptop performances and answered questions about their - perception of the performance: what they thought was happening and its sense of - liveness. Our results suggest that audiences are likely to associate liveness - with perceived performer activity such as improvisation and the audibility of - gestures, whereas perceptions of generative material, backing tracks, or other - preconceived material do not appear to inhibit perceptions of liveness.' - address: 'London, United Kingdom' - author: Oliver Bown and Renick Bell and Adam Parkinson - bibtex: "@inproceedings{obown2014,\n abstract = {Audiences of live laptop music\ - \ frequently express dismay at the opacity of performer activity and question\ - \ how ``live'' performances actually are. Yet motionless laptop performers endure\ - \ as musical spectacles from clubs to concert halls, suggesting that for many\ - \ this is a non-issue. Understanding these perceptions might help performers better\ - \ achieve their intentions, inform interface design within the NIME field and\ - \ help develop theories of liveness and performance. To this end, a study of listeners'\ - \ perception of liveness and performer control in laptop performance was carried\ - \ out, in which listeners examined several short audio-only excerpts of laptop\ - \ performances and answered questions about their perception of the performance:\ - \ what they thought was happening and its sense of liveness. Our results suggest\ - \ that audiences are likely to associate liveness with perceived performer activity\ - \ such as improvisation and the audibility of gestures, whereas perceptions of\ - \ generative material, backing tracks, or other preconceived material do not appear\ - \ to inhibit perceptions of liveness.},\n address = {London, United Kingdom},\n\ - \ author = {Oliver Bown and Renick Bell and Adam Parkinson},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178722},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {13--18},\n publisher = {Goldsmiths, University of London},\n title = {Examining\ - \ the Perception of Liveness and Activity in Laptop Music: Listeners' Inference\ - \ about what the Performer is Doing from the Audio Alone},\n url = {http://www.nime.org/proceedings/2014/nime2014_538.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_80 + abstract: 'In this paper we present the recent developments in the AI-terity instrument. + AI-terity is a deformable, non-rigid musical instrument that comprises a particular + artificial intelligence (AI) method for generating audio samples for real-time + audio synthesis. As an improvement, we developed the control interface structure + with additional sensor hardware. In addition, we implemented a new hybrid deep + learning architecture, GANSpaceSynth, in which we applied the GANSpace method + on the GANSynth model. Following the deep learning model improvement, we developed + new autonomous features for the instrument that aim at keeping the musician in + an active and uncertain state of exploration. Through these new features, the + instrument enables more accurate control on GAN latent space. Further, we intend + to investigate the current developments through a musical composition that idiomatically + reflects the new autonomous features of the AI-terity instrument. We argue that + the present technology of AI is suitable for enabling alternative autonomous features + in audio domain for the creative practices of musicians.' + address: 'Shanghai, China' + articleno: 80 + author: 'Tahiroğlu, Koray and Kastemaa, Miranda and Koli, Oskar' + bibtex: "@inproceedings{NIME21_80,\n abstract = {In this paper we present the recent\ + \ developments in the AI-terity instrument. AI-terity is a deformable, non-rigid\ + \ musical instrument that comprises a particular artificial intelligence (AI)\ + \ method for generating audio samples for real-time audio synthesis. As an improvement,\ + \ we developed the control interface structure with additional sensor hardware.\ + \ In addition, we implemented a new hybrid deep learning architecture, GANSpaceSynth,\ + \ in which we applied the GANSpace method on the GANSynth model. Following the\ + \ deep learning model improvement, we developed new autonomous features for the\ + \ instrument that aim at keeping the musician in an active and uncertain state\ + \ of exploration. Through these new features, the instrument enables more accurate\ + \ control on GAN latent space. Further, we intend to investigate the current developments\ + \ through a musical composition that idiomatically reflects the new autonomous\ + \ features of the AI-terity instrument. We argue that the present technology of\ + \ AI is suitable for enabling alternative autonomous features in audio domain\ + \ for the creative practices of musicians.},\n address = {Shanghai, China},\n\ + \ articleno = {80},\n author = {Tahiroğlu, Koray and Kastemaa, Miranda and Koli,\ + \ Oskar},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.21428/92fbeb44.3d0e9e12},\n issn = {2220-4806},\n\ + \ month = {June},\n presentation-video = {https://youtu.be/WVAIPwI-3P8},\n title\ + \ = {AI-terity 2.0: An Autonomous NIME Featuring GANSpaceSynth Deep Learning Model},\n\ + \ url = {https://nime.pubpub.org/pub/9zu49nu5},\n year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178722 + doi: 10.21428/92fbeb44.3d0e9e12 issn: 2220-4806 month: June - pages: 13--18 - publisher: 'Goldsmiths, University of London' - title: 'Examining the Perception of Liveness and Activity in Laptop Music: Listeners'' - Inference about what the Performer is Doing from the Audio Alone' - url: http://www.nime.org/proceedings/2014/nime2014_538.pdf - year: 2014 + presentation-video: https://youtu.be/WVAIPwI-3P8 + title: 'AI-terity 2.0: An Autonomous NIME Featuring GANSpaceSynth Deep Learning + Model' + url: https://nime.pubpub.org/pub/9zu49nu5 + year: 2021 - ENTRYTYPE: inproceedings - ID: jsnyder12014 - abstract: 'This paper discusses the Birl, an electronic wind instrument developed - by the authors. It uses artificial neural nets to apply machine learning to the - mapping of fingering systems and embouchure position. The design features of the - instrument are described, and the machine learning mapping strategy is discussed.' - address: 'London, United Kingdom' - author: Jeff Snyder and Danny Ryan - bibtex: "@inproceedings{jsnyder12014,\n abstract = {This paper discusses the Birl,\ - \ an electronic wind instrument developed by the authors. It uses artificial neural\ - \ nets to apply machine learning to the mapping of fingering systems and embouchure\ - \ position. The design features of the instrument are described, and the machine\ - \ learning mapping strategy is discussed.},\n address = {London, United Kingdom},\n\ - \ author = {Jeff Snyder and Danny Ryan},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178939},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {585--588},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {The Birl: An Electronic Wind Instrument Based\ - \ on an Artificial Neural Network Parameter Mapping Structure},\n url = {http://www.nime.org/proceedings/2014/nime2014_540.pdf},\n\ - \ year = {2014}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1178939 - issn: 2220-4806 - month: June - pages: 585--588 - publisher: 'Goldsmiths, University of London' - title: 'The Birl: An Electronic Wind Instrument Based on an Artificial Neural Network - Parameter Mapping Structure' - url: http://www.nime.org/proceedings/2014/nime2014_540.pdf - year: 2014 - - -- ENTRYTYPE: inproceedings - ID: ahindle12014 - abstract: 'One problem with live computer music performance is the transport of - computers to a venue and the following setup of the computers used in playing - and rendering music. The more computers involved the longer the setup and tear-down - of a performance. Each computer adds power and cabling requirements that the venue - must accommodate. Cloud computing can change of all this by simplifying the setup - of many (10s, 100s) of machines at the click of a button. But there''s a catch, - the cloud is not physically near you, you cannot run an audio cable to the cloud. - The audio from a computer music instrument in the cloud needs to streamed back - to the performer and listeners. There are many solutions for streaming audio over - networks and the internet, most of them suffer from high latency, heavy buffering, - or proprietary/non-portable clients. In this paper we propose a portable cloud-friendly - method of streaming, almost a cloud soundcard, whereby performers can use mobile - devices (Android, iOS, laptops) to stream audio from the cloud with far lower - latency than technologies like icecast. This technology enables near-realtime - control over power computer music networks enabling performers to travel light - and perform live with more computers than ever before.' - address: 'London, United Kingdom' - author: Abram Hindle - bibtex: "@inproceedings{ahindle12014,\n abstract = {One problem with live computer\ - \ music performance is the transport of computers to a venue and the following\ - \ setup of the computers used in playing and rendering music. The more computers\ - \ involved the longer the setup and tear-down of a performance. Each computer\ - \ adds power and cabling requirements that the venue must accommodate. Cloud computing\ - \ can change of all this by simplifying the setup of many (10s, 100s) of machines\ - \ at the click of a button. But there's a catch, the cloud is not physically near\ - \ you, you cannot run an audio cable to the cloud. The audio from a computer music\ - \ instrument in the cloud needs to streamed back to the performer and listeners.\ - \ There are many solutions for streaming audio over networks and the internet,\ - \ most of them suffer from high latency, heavy buffering, or proprietary/non-portable\ - \ clients. In this paper we propose a portable cloud-friendly method of streaming,\ - \ almost a cloud soundcard, whereby performers can use mobile devices (Android,\ - \ iOS, laptops) to stream audio from the cloud with far lower latency than technologies\ - \ like icecast. This technology enables near-realtime control over power computer\ - \ music networks enabling performers to travel light and perform live with more\ - \ computers than ever before.},\n address = {London, United Kingdom},\n author\ - \ = {Abram Hindle},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178798},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {277--280},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {CloudOrch: A Portable SoundCard in the Cloud},\n\ - \ url = {http://www.nime.org/proceedings/2014/nime2014_541.pdf},\n year = {2014}\n\ - }\n" + ID: NIME21_81 + abstract: 'A novel, high-fidelity, shape-sensing technology, BendShape [1], is investigated + as an expressive music controller for sound effects, direct sound manipulation, + and voice synthesis. Various approaches are considered for developing mapping + strategies that create transparent metaphors to facilitate expression for both + the performer and the audience. We explore strategies in the input, intermediate, + and output mapping layers using a two-step approach guided by Perry’s Principles +  [2]. First, we use trial-and-error to establish simple mappings between single + input parameter control and effects to identify promising directions for further + study. Then, we compose a specific piece that supports different uses of the BendShape + mappings in a performance context: this allows us to study a performer trying + different types of expressive techniques, enabling us to analyse the role each + mapping has in facilitating musical expression. We also investigate the effects + these mapping strategies have on performer bandwidth. Our main finding is that + the high fidelity of the novel BendShape sensor facilitates creating interpretable + input representations to control sound representations, and thereby match interpretations + that provide better expressive mappings, such as with vocal shape to vocal sound + and bumpiness control; however, direct mappings of individual, independent sensor + mappings to effects does not provide obvious advantages over simpler controls. + Furthermore, while the BendShape sensor enables rich explorations for sound, the + ability to find expressive interpretable shape-to-sound representations while + respecting the performer’s bandwidth limitations (caused by having many coupled + input degrees of freedom) remains a challenge and an opportunity.' + address: 'Shanghai, China' + articleno: 81 + author: 'Champagne, Alex and Pritchard, Bob and Dietz, Paul and Fels, Sidney' + bibtex: "@inproceedings{NIME21_81,\n abstract = {A novel, high-fidelity, shape-sensing\ + \ technology, BendShape [1], is investigated as an expressive music controller\ + \ for sound effects, direct sound manipulation, and voice synthesis. Various approaches\ + \ are considered for developing mapping strategies that create transparent metaphors\ + \ to facilitate expression for both the performer and the audience. We explore\ + \ strategies in the input, intermediate, and output mapping layers using a two-step\ + \ approach guided by Perry’s Principles  [2]. First, we use trial-and-error to\ + \ establish simple mappings between single input parameter control and effects\ + \ to identify promising directions for further study. Then, we compose a specific\ + \ piece that supports different uses of the BendShape mappings in a performance\ + \ context: this allows us to study a performer trying different types of expressive\ + \ techniques, enabling us to analyse the role each mapping has in facilitating\ + \ musical expression. We also investigate the effects these mapping strategies\ + \ have on performer bandwidth. Our main finding is that the high fidelity of the\ + \ novel BendShape sensor facilitates creating interpretable input representations\ + \ to control sound representations, and thereby match interpretations that provide\ + \ better expressive mappings, such as with vocal shape to vocal sound and bumpiness\ + \ control; however, direct mappings of individual, independent sensor mappings\ + \ to effects does not provide obvious advantages over simpler controls. Furthermore,\ + \ while the BendShape sensor enables rich explorations for sound, the ability\ + \ to find expressive interpretable shape-to-sound representations while respecting\ + \ the performer’s bandwidth limitations (caused by having many coupled input degrees\ + \ of freedom) remains a challenge and an opportunity.},\n address = {Shanghai,\ + \ China},\n articleno = {81},\n author = {Champagne, Alex and Pritchard, Bob and\ + \ Dietz, Paul and Fels, Sidney},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.a72b68dd},\n\ + \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/CnJmH6fX6XA},\n\ + \ title = {Investigation of a Novel Shape Sensor for Musical Expression},\n url\ + \ = {https://nime.pubpub.org/pub/bu2jb1d6},\n year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178798 + doi: 10.21428/92fbeb44.a72b68dd issn: 2220-4806 month: June - pages: 277--280 - publisher: 'Goldsmiths, University of London' - title: 'CloudOrch: A Portable SoundCard in the Cloud' - url: http://www.nime.org/proceedings/2014/nime2014_541.pdf - year: 2014 + presentation-video: https://youtu.be/CnJmH6fX6XA + title: Investigation of a Novel Shape Sensor for Musical Expression + url: https://nime.pubpub.org/pub/bu2jb1d6 + year: 2021 - ENTRYTYPE: inproceedings - ID: jsnyder2014 - abstract: 'In this paper, we present the ``Mobile Marching Band'''' (MMB) as a new - mode of musical performance with mobile computing devices. We define an MMB to - be, at its most general, any ensemble utilizing mobile computation that can travel - as it performs, with the performance being independent of its location. We will - discuss the affordances and limitations of mobile-based instrument design and - performance, specifically within the context of a ``moving'''' ensemble. We will - also discuss the use of a Mobile Marching Band as an educational tool. Finally, - we will explore our implementation of a Mobile Parade, a digital Brazilian samba - ensemble.' - address: 'London, United Kingdom' - author: Jeff Snyder and Avneesh Sarwate - bibtex: "@inproceedings{jsnyder2014,\n abstract = {In this paper, we present the\ - \ ``Mobile Marching Band'' (MMB) as a new mode of musical performance with mobile\ - \ computing devices. We define an MMB to be, at its most general, any ensemble\ - \ utilizing mobile computation that can travel as it performs, with the performance\ - \ being independent of its location. We will discuss the affordances and limitations\ - \ of mobile-based instrument design and performance, specifically within the context\ - \ of a ``moving'' ensemble. We will also discuss the use of a Mobile Marching\ - \ Band as an educational tool. Finally, we will explore our implementation of\ - \ a Mobile Parade, a digital Brazilian samba ensemble.},\n address = {London,\ - \ United Kingdom},\n author = {Jeff Snyder and Avneesh Sarwate},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178941},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {147--150},\n publisher = {Goldsmiths, University of London},\n title = {Mobile\ - \ Device Percussion Parade},\n url = {http://www.nime.org/proceedings/2014/nime2014_542.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_82 + abstract: 'Debris is a playful interface for direct manipulation of audio waveforms. + Audio data is represented as a collection of waveform elements, which provide + a low-resolution visualisation of the audio sample. Each element, however, can + be individually examined, re-positioned, or broken down into smaller fragments, + thereby becoming a tangible representation of a moment in the sample. Debris is + built around the idea of looking at a sound not as a linear event to be played + from beginning to end, but as a non-linear collection of moments, timbres, and + sound fragments which can be explored, closely examined and interacted with. This + paper positions the work among conceptually related NIME interfaces, details the + various user interactions and their mappings and ends with a discussion around + the interface’s constraints.' + address: 'Shanghai, China' + articleno: 82 + author: 'Robinson, Frederic Anthony' + bibtex: "@inproceedings{NIME21_82,\n abstract = {Debris is a playful interface for\ + \ direct manipulation of audio waveforms. Audio data is represented as a collection\ + \ of waveform elements, which provide a low-resolution visualisation of the audio\ + \ sample. Each element, however, can be individually examined, re-positioned,\ + \ or broken down into smaller fragments, thereby becoming a tangible representation\ + \ of a moment in the sample. Debris is built around the idea of looking at a sound\ + \ not as a linear event to be played from beginning to end, but as a non-linear\ + \ collection of moments, timbres, and sound fragments which can be explored, closely\ + \ examined and interacted with. This paper positions the work among conceptually\ + \ related NIME interfaces, details the various user interactions and their mappings\ + \ and ends with a discussion around the interface’s constraints.},\n address =\ + \ {Shanghai, China},\n articleno = {82},\n author = {Robinson, Frederic Anthony},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.21428/92fbeb44.02005035},\n issn = {2220-4806},\n\ + \ month = {June},\n presentation-video = {https://youtu.be/H04LgbZqc-c},\n title\ + \ = {Debris: A playful interface for direct manipulation of audio waveforms},\n\ + \ url = {https://nime.pubpub.org/pub/xn761337},\n year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178941 + doi: 10.21428/92fbeb44.02005035 issn: 2220-4806 month: June - pages: 147--150 - publisher: 'Goldsmiths, University of London' - title: Mobile Device Percussion Parade - url: http://www.nime.org/proceedings/2014/nime2014_542.pdf - year: 2014 + presentation-video: https://youtu.be/H04LgbZqc-c + title: 'Debris: A playful interface for direct manipulation of audio waveforms' + url: https://nime.pubpub.org/pub/xn761337 + year: 2021 - ENTRYTYPE: inproceedings - ID: dvannort2014 - abstract: 'This paper discusses an approach to instrument conception that is based - on a careful consideration of the coupling of tactile and sonic gestural action - both into and out of the performance system. To this end we propose a design approach - that not only considers the materiality of the instrument, but that leverages - it as a central part of the conception of the sonic quality, the control structuring - and what generally falls under the umbrella of "mapping" design. As we will discuss, - this extended computational matter-centric view is of benefit towards holistically - understanding an ``instrument'''' gestural engagement, as it is realized through - physical material, sonic gestural matter and felt human engagement. We present - instrumental systems that have arisen as a result of this approach to instrument - design.' - address: 'London, United Kingdom' - author: Navid Navab and Doug Van Nort and Sha Xin Wei - bibtex: "@inproceedings{dvannort2014,\n abstract = {This paper discusses an approach\ - \ to instrument conception that is based on a careful consideration of the coupling\ - \ of tactile and sonic gestural action both into and out of the performance system.\ - \ To this end we propose a design approach that not only considers the materiality\ - \ of the instrument, but that leverages it as a central part of the conception\ - \ of the sonic quality, the control structuring and what generally falls under\ - \ the umbrella of \"mapping\" design. As we will discuss, this extended computational\ - \ matter-centric view is of benefit towards holistically understanding an ``instrument''\ - \ gestural engagement, as it is realized through physical material, sonic gestural\ - \ matter and felt human engagement. We present instrumental systems that have\ - \ arisen as a result of this approach to instrument design.},\n address = {London,\ - \ United Kingdom},\n author = {Navid Navab and Doug Van Nort and Sha Xin Wei},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178893},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {387--390},\n publisher = {Goldsmiths, University\ - \ of London},\n title = {A Material Computation Perspective on Audio Mosaicing\ - \ and Gestural Conditioning},\n url = {http://www.nime.org/proceedings/2014/nime2014_544.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_83 + abstract: 'Musical audio synthesis often requires systems-level knowledge and uniquely + analytical approaches to music making, thus a number of machine learning systems + have been proposed to replace traditional parameter spaces with more intuitive + control spaces based on spatial arrangement of sonic qualities. Some prior evaluations + of simplified control spaces have shown increased user efficacy via quantitative + metrics in sound design tasks, and some indicate that simplification may lower + barriers to entry to synthesis. However, the level and nature of the appeal of + simplified interfaces to synthesists merits investigation, particularly in relation + to the type of task, prior expertise, and aesthetic values. Toward addressing + these unknowns, this work investigates user experience in a sample of 20 musicians + with varying degrees of synthesis expertise, and uses a one-week, at-home, multi-task + evaluation of a novel instrument presenting a simplified mode of control alongside + the full parameter space. We find that our participants generally give primacy + to parameter space and seek understanding of parameter-sound relationships, yet + most do report finding some creative utility in timbre-space control for discovery + of sounds, timbral transposition, and expressive modulations of parameters. Although + we find some articulations of particular aesthetic values, relationships to user + experience remain difficult to characterize generally.' + address: 'Shanghai, China' + articleno: 83 + author: 'Gregorio, Jeff and Kim, Youngmoo E.' + bibtex: "@inproceedings{NIME21_83,\n abstract = {Musical audio synthesis often requires\ + \ systems-level knowledge and uniquely analytical approaches to music making,\ + \ thus a number of machine learning systems have been proposed to replace traditional\ + \ parameter spaces with more intuitive control spaces based on spatial arrangement\ + \ of sonic qualities. Some prior evaluations of simplified control spaces have\ + \ shown increased user efficacy via quantitative metrics in sound design tasks,\ + \ and some indicate that simplification may lower barriers to entry to synthesis.\ + \ However, the level and nature of the appeal of simplified interfaces to synthesists\ + \ merits investigation, particularly in relation to the type of task, prior expertise,\ + \ and aesthetic values. Toward addressing these unknowns, this work investigates\ + \ user experience in a sample of 20 musicians with varying degrees of synthesis\ + \ expertise, and uses a one-week, at-home, multi-task evaluation of a novel instrument\ + \ presenting a simplified mode of control alongside the full parameter space.\ + \ We find that our participants generally give primacy to parameter space and\ + \ seek understanding of parameter-sound relationships, yet most do report finding\ + \ some creative utility in timbre-space control for discovery of sounds, timbral\ + \ transposition, and expressive modulations of parameters. Although we find some\ + \ articulations of particular aesthetic values, relationships to user experience\ + \ remain difficult to characterize generally.},\n address = {Shanghai, China},\n\ + \ articleno = {83},\n author = {Gregorio, Jeff and Kim, Youngmoo E.},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.21428/92fbeb44.31419bf9},\n issn = {2220-4806},\n month\ + \ = {June},\n presentation-video = {https://youtu.be/m7IqWceQmuk},\n title = {Evaluation\ + \ of Timbre-Based Control of a Parametric Synthesizer},\n url = {https://nime.pubpub.org/pub/adtb2zl5},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178893 + doi: 10.21428/92fbeb44.31419bf9 issn: 2220-4806 month: June - pages: 387--390 - publisher: 'Goldsmiths, University of London' - title: A Material Computation Perspective on Audio Mosaicing and Gestural Conditioning - url: http://www.nime.org/proceedings/2014/nime2014_544.pdf - year: 2014 + presentation-video: https://youtu.be/m7IqWceQmuk + title: Evaluation of Timbre-Based Control of a Parametric Synthesizer + url: https://nime.pubpub.org/pub/adtb2zl5 + year: 2021 - ENTRYTYPE: inproceedings - ID: slee12014 - abstract: 'This work introduces a way to distribute mobile applications using mobile - ad-hoc network in the context of audience participation. The goal is to minimize - user configuration so that the process is highly accessible for casual smartphone - users. The prototype mobile applications utilize WiFiDirect and Service Discovery - Protocol to distribute code. With the aid of these two technologies, the prototype - system requires no infrastructure and minimum user configuration.' - address: 'London, United Kingdom' - author: Sang Won Lee and Georg Essl and Z. Morley Mao - bibtex: "@inproceedings{slee12014,\n abstract = {This work introduces a way to distribute\ - \ mobile applications using mobile ad-hoc network in the context of audience participation.\ - \ The goal is to minimize user configuration so that the process is highly accessible\ - \ for casual smartphone users. The prototype mobile applications utilize WiFiDirect\ - \ and Service Discovery Protocol to distribute code. With the aid of these two\ - \ technologies, the prototype system requires no infrastructure and minimum user\ - \ configuration.},\n address = {London, United Kingdom},\n author = {Sang Won\ - \ Lee and Georg Essl and Z. Morley Mao},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178849},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {533--536},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Distributing Mobile Music Applications for\ - \ Audience Participation Using Mobile Ad-hoc Network ({MANET})},\n url = {http://www.nime.org/proceedings/2014/nime2014_546.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_84 + abstract: 'Hybridization No. 1 is a wireless hand-held rotary instrument that allows + the performer to simultaneously interact with physical and virtual spaces. The + instrument emits visible laser lights and invisible ultrasonic waves which scan + the architecture of a physical space. The instrument is also connected to a virtual + 3D model of the same space, which allows the performer to create an immersive + audiovisual composition that blurs the limits between physical and virtual space. + In this paper I describe the instrument, its operation and its integrated multimedia + system.' + address: 'Shanghai, China' + articleno: 84 + author: 'Riaño, Milton' + bibtex: "@inproceedings{NIME21_84,\n abstract = {Hybridization No. 1 is a wireless\ + \ hand-held rotary instrument that allows the performer to simultaneously interact\ + \ with physical and virtual spaces. The instrument emits visible laser lights\ + \ and invisible ultrasonic waves which scan the architecture of a physical space.\ + \ The instrument is also connected to a virtual 3D model of the same space, which\ + \ allows the performer to create an immersive audiovisual composition that blurs\ + \ the limits between physical and virtual space. In this paper I describe the\ + \ instrument, its operation and its integrated multimedia system.},\n address\ + \ = {Shanghai, China},\n articleno = {84},\n author = {Riaño, Milton},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.21428/92fbeb44.d3354ff3},\n issn = {2220-4806},\n month\ + \ = {June},\n title = {Hybridization No. 1: Standing at the Boundary between Physical\ + \ and Virtual Space},\n url = {https://nime.pubpub.org/pub/h1},\n year = {2021}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178849 + doi: 10.21428/92fbeb44.d3354ff3 issn: 2220-4806 month: June - pages: 533--536 - publisher: 'Goldsmiths, University of London' - title: Distributing Mobile Music Applications for Audience Participation Using Mobile - Ad-hoc Network (MANET) - url: http://www.nime.org/proceedings/2014/nime2014_546.pdf - year: 2014 + title: 'Hybridization No. 1: Standing at the Boundary between Physical and Virtual + Space' + url: https://nime.pubpub.org/pub/h1 + year: 2021 - ENTRYTYPE: inproceedings - ID: jherrera2014 - abstract: 'A recently developed system that uses pitched sounds to discover relative - 3D positions of a group of devices located in the same physical space is described. - The measurements are coordinated over an IP network in a decentralized manner, - while the actual measurements are carried out measuring the time-of-flight of - the notes played by different devices. Approaches to sonify the discovery process - are discussed. A specific instantiation of the system is described in detail. - The melody is specified in the form of a score, available to every device in the - network. The system performs the melody by playing different notes consecutively - on different devices, keeping a consistent timing, while carrying out the inter-device - measurements necessary to discover the geometrical configuration of the devices - in the physical space.' - address: 'London, United Kingdom' - author: Hyung Suk Kim and Jorge Herrera and Ge Wang - bibtex: "@inproceedings{jherrera2014,\n abstract = {A recently developed system\ - \ that uses pitched sounds to discover relative 3D positions of a group of devices\ - \ located in the same physical space is described. The measurements are coordinated\ - \ over an IP network in a decentralized manner, while the actual measurements\ - \ are carried out measuring the time-of-flight of the notes played by different\ - \ devices. Approaches to sonify the discovery process are discussed. A specific\ - \ instantiation of the system is described in detail. The melody is specified\ - \ in the form of a score, available to every device in the network. The system\ - \ performs the melody by playing different notes consecutively on different devices,\ - \ keeping a consistent timing, while carrying out the inter-device measurements\ - \ necessary to discover the geometrical configuration of the devices in the physical\ - \ space.},\n address = {London, United Kingdom},\n author = {Hyung Suk Kim and\ - \ Jorge Herrera and Ge Wang},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178831},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {273--276},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Ping-Pong: Musically Discovering Locations},\n\ - \ url = {http://www.nime.org/proceedings/2014/nime2014_550.pdf},\n year = {2014}\n\ - }\n" + ID: NIME21_85 + abstract: 'We present the methods and findings of a multi-day performance research + lab that evaluated the efficacy of a novel nerve sensor in the context of a physically + inclusive performance practice. Nerve sensors are a variant of surface electromyography + that are optimized to detect signals from nerve firings rather than skeletal muscle + movement, allowing performers with altered muscle physiology or control to use + the sensors more effectively. Through iterative co-design and musical performance + evaluation, we compared the performative affordances and limitations of the nerve + sensor to other contemporary sensor-based gestural instruments. The nerve sensor + afforded the communication of gestural effort in a manner that other gestural + instruments did not, while offering a smaller palette of reliably classifiable + gestures.' + address: 'Shanghai, China' + articleno: 85 + author: 'May, Lloyd and Larsson, Peter' + bibtex: "@inproceedings{NIME21_85,\n abstract = {We present the methods and findings\ + \ of a multi-day performance research lab that evaluated the efficacy of a novel\ + \ nerve sensor in the context of a physically inclusive performance practice.\ + \ Nerve sensors are a variant of surface electromyography that are optimized to\ + \ detect signals from nerve firings rather than skeletal muscle movement, allowing\ + \ performers with altered muscle physiology or control to use the sensors more\ + \ effectively. Through iterative co-design and musical performance evaluation,\ + \ we compared the performative affordances and limitations of the nerve sensor\ + \ to other contemporary sensor-based gestural instruments. The nerve sensor afforded\ + \ the communication of gestural effort in a manner that other gestural instruments\ + \ did not, while offering a smaller palette of reliably classifiable gestures.},\n\ + \ address = {Shanghai, China},\n articleno = {85},\n author = {May, Lloyd and\ + \ Larsson, Peter},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.82c5626f},\n\ + \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/qsRVcBl2gAo},\n\ + \ title = {Nerve Sensors in Inclusive Musical Performance},\n url = {https://nime.pubpub.org/pub/yxcp36ii},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178831 + doi: 10.21428/92fbeb44.82c5626f issn: 2220-4806 month: June - pages: 273--276 - publisher: 'Goldsmiths, University of London' - title: 'Ping-Pong: Musically Discovering Locations' - url: http://www.nime.org/proceedings/2014/nime2014_550.pdf - year: 2014 + presentation-video: https://youtu.be/qsRVcBl2gAo + title: Nerve Sensors in Inclusive Musical Performance + url: https://nime.pubpub.org/pub/yxcp36ii + year: 2021 - ENTRYTYPE: inproceedings - ID: eberdahl2014 - abstract: 'An embedded acoustic instrument is an embedded musical instrument that - provides a direct acoustic output. This paper describes how to make embedded acoustic - instruments using laser cutting for digital fabrication. Several tips are given - for improving the acoustic quality including: employing maximally stiff material, - placing loudspeaker drivers in the corners of enclosure faces, increasing the - stiffness of ``loudspeaker'''' faces by doubling their thickness, choosing side-lengths - with non-integer ratios, and incorporating bracing. Various versions of an open - design of the ``LapBox'''' are provided to help community members replicate and - extend the work. A procedure is suggested for testing and optimizing the acoustic - quality.' - address: 'London, United Kingdom' - author: Edgar Berdahl - bibtex: "@inproceedings{eberdahl2014,\n abstract = {An embedded acoustic instrument\ - \ is an embedded musical instrument that provides a direct acoustic output. This\ - \ paper describes how to make embedded acoustic instruments using laser cutting\ - \ for digital fabrication. Several tips are given for improving the acoustic quality\ - \ including: employing maximally stiff material, placing loudspeaker drivers in\ - \ the corners of enclosure faces, increasing the stiffness of ``loudspeaker''\ - \ faces by doubling their thickness, choosing side-lengths with non-integer ratios,\ - \ and incorporating bracing. Various versions of an open design of the ``LapBox''\ - \ are provided to help community members replicate and extend the work. A procedure\ - \ is suggested for testing and optimizing the acoustic quality.},\n address =\ - \ {London, United Kingdom},\n author = {Edgar Berdahl},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178710},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {140--143},\n publisher = {Goldsmiths, University of London},\n title = {How\ - \ to Make Embedded Acoustic Instruments},\n url = {http://www.nime.org/proceedings/2014/nime2014_551.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_86 + abstract: 'This paper introduces Tune Field, a 3-dimensional tangible interface + that combines and alters previously existing concepts of topographical, field + sensing and capacitive touch interfaces as a method for musical expression and + sound visualization. Users are invited to create experimental sound textures while + modifying the topography of antennas. The interface’s touch antennas are randomly + located on a box promoting exploration and discovery of gesture-to-sound relationships. + This way, the interface opens space to playfully producing sound and triggering + visuals; thus, converting Tune Field into a sensorial experience.' + address: 'Shanghai, China' + articleno: 86 + author: 'Fernandez, Guadalupe Babio and Larson, Kent' + bibtex: "@inproceedings{NIME21_86,\n abstract = {This paper introduces Tune Field,\ + \ a 3-dimensional tangible interface that combines and alters previously existing\ + \ concepts of topographical, field sensing and capacitive touch interfaces as\ + \ a method for musical expression and sound visualization. Users are invited to\ + \ create experimental sound textures while modifying the topography of antennas.\ + \ The interface’s touch antennas are randomly located on a box promoting exploration\ + \ and discovery of gesture-to-sound relationships. This way, the interface opens\ + \ space to playfully producing sound and triggering visuals; thus, converting\ + \ Tune Field into a sensorial experience.},\n address = {Shanghai, China},\n articleno\ + \ = {86},\n author = {Fernandez, Guadalupe Babio and Larson, Kent},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.21428/92fbeb44.2305755b},\n issn = {2220-4806},\n month\ + \ = {June},\n presentation-video = {https://youtu.be/2lB8idO_yDs},\n title = {Tune\ + \ Field},\n url = {https://nime.pubpub.org/pub/eqvxspw3},\n year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178710 + doi: 10.21428/92fbeb44.2305755b issn: 2220-4806 month: June - pages: 140--143 - publisher: 'Goldsmiths, University of London' - title: How to Make Embedded Acoustic Instruments - url: http://www.nime.org/proceedings/2014/nime2014_551.pdf - year: 2014 + presentation-video: https://youtu.be/2lB8idO_yDs + title: Tune Field + url: https://nime.pubpub.org/pub/eqvxspw3 + year: 2021 - ENTRYTYPE: inproceedings - ID: cdominguez2014 - abstract: 'This paper presents a project that discusses a brief history of artistic - systems that use photoresistors (light-dependent resistors) and results in the - construction of an interface and performance controller. The controller combines - an Arduino microcontroller with a grid of photoresistors set into a slab of wood - covered with a thin acrylic sheet. A brief background on past uses of these components - for music and film composition and instrument-building introduces a few different - implementations and performance contexts for the controller. Topics such as implementation, - construction, and performance possibilities (including electroacoustic and audio-visual - performance) of the controller are also discussed.' - address: 'London, United Kingdom' - author: Carlos Dominguez - bibtex: "@inproceedings{cdominguez2014,\n abstract = {This paper presents a project\ - \ that discusses a brief history of artistic systems that use photoresistors (light-dependent\ - \ resistors) and results in the construction of an interface and performance controller.\ - \ The controller combines an Arduino microcontroller with a grid of photoresistors\ - \ set into a slab of wood covered with a thin acrylic sheet. A brief background\ - \ on past uses of these components for music and film composition and instrument-building\ - \ introduces a few different implementations and performance contexts for the\ - \ controller. Topics such as implementation, construction, and performance possibilities\ - \ (including electroacoustic and audio-visual performance) of the controller are\ - \ also discussed.},\n address = {London, United Kingdom},\n author = {Carlos Dominguez},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178750},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {78--79},\n publisher = {Goldsmiths, University of\ - \ London},\n title = {16-{CdS}: A Surface Controller for the Simultaneous Manipulation\ - \ of Multiple Analog Components},\n url = {http://www.nime.org/proceedings/2014/nime2014_552.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_87 + abstract: 'The basic role of DJs is creating a seamless sequence of music tracks. + In order to make the DJ mix a single continuous audio stream, DJs control various + audio effects on a DJ mixer system particularly in the transition region between + one track and the next track and modify the audio signals in terms of volume, + timbre, tempo, and other musical elements. There have been research efforts to + imitate the DJ mixing techniques but they are mainly rule-based approaches based + on domain knowledge. In this paper, we propose a method to analyze the DJ mixer + control from real-world DJ mixes toward a data-driven approach to imitate the + DJ performance. Specifically, we estimate the mixing gain trajectories between + the two tracks using sub-band analysis with constrained convex optimization. We + evaluate the method by reconstructing the original tracks using the two source + tracks and the gain estimate, and show that the proposed method outperforms the + linear crossfading as a baseline and the single-band analysis. A listening test + from the survey of 14 participants also confirms that our proposed method is superior + among them.' + address: 'Shanghai, China' + articleno: 87 + author: 'Kim, Taejun and Yang, Yi-Hsuan and Nam, Juhan' + bibtex: "@inproceedings{NIME21_87,\n abstract = {The basic role of DJs is creating\ + \ a seamless sequence of music tracks. In order to make the DJ mix a single continuous\ + \ audio stream, DJs control various audio effects on a DJ mixer system particularly\ + \ in the transition region between one track and the next track and modify the\ + \ audio signals in terms of volume, timbre, tempo, and other musical elements.\ + \ There have been research efforts to imitate the DJ mixing techniques but they\ + \ are mainly rule-based approaches based on domain knowledge. In this paper, we\ + \ propose a method to analyze the DJ mixer control from real-world DJ mixes toward\ + \ a data-driven approach to imitate the DJ performance. Specifically, we estimate\ + \ the mixing gain trajectories between the two tracks using sub-band analysis\ + \ with constrained convex optimization. We evaluate the method by reconstructing\ + \ the original tracks using the two source tracks and the gain estimate, and show\ + \ that the proposed method outperforms the linear crossfading as a baseline and\ + \ the single-band analysis. A listening test from the survey of 14 participants\ + \ also confirms that our proposed method is superior among them.},\n address =\ + \ {Shanghai, China},\n articleno = {87},\n author = {Kim, Taejun and Yang, Yi-Hsuan\ + \ and Nam, Juhan},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.4b2fc7b9},\n\ + \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/ju0P-Zq8Bwo},\n\ + \ title = {Reverse-Engineering The Transition Regions of Real-World DJ Mixes using\ + \ Sub-band Analysis with Convex Optimization},\n url = {https://nime.pubpub.org/pub/g7avj1a7},\n\ + \ year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178750 + doi: 10.21428/92fbeb44.4b2fc7b9 issn: 2220-4806 month: June - pages: 78--79 - publisher: 'Goldsmiths, University of London' - title: '16-CdS: A Surface Controller for the Simultaneous Manipulation of Multiple - Analog Components' - url: http://www.nime.org/proceedings/2014/nime2014_552.pdf - year: 2014 + presentation-video: https://youtu.be/ju0P-Zq8Bwo + title: Reverse-Engineering The Transition Regions of Real-World DJ Mixes using Sub-band + Analysis with Convex Optimization + url: https://nime.pubpub.org/pub/g7avj1a7 + year: 2021 - ENTRYTYPE: inproceedings - ID: slee2014 - abstract: 'In the setting of collaborative live coding, a number of issues emerge: - (1) need for communication, (2) issues of conflicts in sharing program state space, - and (3) remote control of code execution. In this paper, we propose solutions - to these problems. In the recent extension of UrMus, a programming environment - for mobile music application development, we introduce a paradigm of shared and - individual namespaces safeguard against conflicts in parallel coding activities. - We also develop live variable view that communicates live changes in state among - live coders, networked performers, and the audience. Lastly, we integrate collaborative - aspects of programming execution into built-in live chat, which enables not only - communication with others, but also distributed execution of code.' - address: 'London, United Kingdom' - author: Sang Won Lee and Georg Essl - bibtex: "@inproceedings{slee2014,\n abstract = {In the setting of collaborative\ - \ live coding, a number of issues emerge: (1) need for communication, (2) issues\ - \ of conflicts in sharing program state space, and (3) remote control of code\ - \ execution. In this paper, we propose solutions to these problems. In the recent\ - \ extension of UrMus, a programming environment for mobile music application development,\ - \ we introduce a paradigm of shared and individual namespaces safeguard against\ - \ conflicts in parallel coding activities. We also develop live variable view\ - \ that communicates live changes in state among live coders, networked performers,\ - \ and the audience. Lastly, we integrate collaborative aspects of programming\ - \ execution into built-in live chat, which enables not only communication with\ - \ others, but also distributed execution of code.},\n address = {London, United\ - \ Kingdom},\n author = {Sang Won Lee and Georg Essl},\n booktitle = {Proceedings\ + ID: NIME21_88 + abstract: 'This paper reports on a project aimed to break away from the portability + concerns of native DSP code between different platforms, thus freeing the instrument + designer from the burden of porting new Digital Musical Instruments (DMIs) to + different architectures. Bespoke Anywhere is a live modular style software DMI + with an instance of the Audio Anywhere (AA) framework, that enables working with + audio plugins that are compiled once and run anywhere. At the heart of Audio Anywhere + is an audio engine whose Digital Signal Processing (DSP) components are written + in Faust and deployed with Web Assembly (Wasm). We demonstrate Bespoke Anywhere + as a hosting application, for live performance, and music production. We focus + on an instance of AA using Faust for DSP, that is statically complied to portable + Wasm, and Graphical User Interfaces (GUIs) described in JSON, both of which are + loaded dynamically into our modified version of Bespoke.' + address: 'Shanghai, China' + articleno: 88 + author: 'Gaster, Benedict and Challinor, Ryan' + bibtex: "@inproceedings{NIME21_88,\n abstract = {This paper reports on a project\ + \ aimed to break away from the portability concerns of native DSP code between\ + \ different platforms, thus freeing the instrument designer from the burden of\ + \ porting new Digital Musical Instruments (DMIs) to different architectures. Bespoke\ + \ Anywhere is a live modular style software DMI with an instance of the Audio\ + \ Anywhere (AA) framework, that enables working with audio plugins that are compiled\ + \ once and run anywhere. At the heart of Audio Anywhere is an audio engine whose\ + \ Digital Signal Processing (DSP) components are written in Faust and deployed\ + \ with Web Assembly (Wasm). We demonstrate Bespoke Anywhere as a hosting application,\ + \ for live performance, and music production. We focus on an instance of AA using\ + \ Faust for DSP, that is statically complied to portable Wasm, and Graphical User\ + \ Interfaces (GUIs) described in JSON, both of which are loaded dynamically into\ + \ our modified version of Bespoke.},\n address = {Shanghai, China},\n articleno\ + \ = {88},\n author = {Gaster, Benedict and Challinor, Ryan},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178847},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {263--268},\n publisher = {Goldsmiths, University of London},\n title = {Communication,\ - \ Control, and State Sharing in Collaborative Live Coding},\n url = {http://www.nime.org/proceedings/2014/nime2014_554.pdf},\n\ - \ year = {2014}\n}\n" + \ doi = {10.21428/92fbeb44.02c348fb},\n issn = {2220-4806},\n month = {June},\n\ + \ presentation-video = {https://youtu.be/ayJzFVRXPMs},\n title = {Bespoke Anywhere},\n\ + \ url = {https://nime.pubpub.org/pub/8jaqbl7m},\n year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178847 + doi: 10.21428/92fbeb44.02c348fb issn: 2220-4806 month: June - pages: 263--268 - publisher: 'Goldsmiths, University of London' - title: 'Communication, Control, and State Sharing in Collaborative Live Coding' - url: http://www.nime.org/proceedings/2014/nime2014_554.pdf - year: 2014 + presentation-video: https://youtu.be/ayJzFVRXPMs + title: Bespoke Anywhere + url: https://nime.pubpub.org/pub/8jaqbl7m + year: 2021 - ENTRYTYPE: inproceedings - ID: rcollecchia2014 - abstract: 'Sirens evoke images of alarm, public service, war, and forthcoming air - raid. Outside of the music of Edgard Varese, sirens have rarely been framed as - musical instruments. By connecting air hoses to spinning disks with evenly-spaced - perforations, the siren timbre is translated musically. Polyphony gives our instrument - an organ-like personality: keys are mapped to different frequencies and the pressure - applied to them determines volume. The siren organ can produce a large range of - sounds both timbrally and dynamically. In addition to a siren timbre, the instrument - produces similar sounds to a harmonica. Portability, robustness, and electronic - stability are all areas for improvement.' - address: 'London, United Kingdom' - author: Regina Collecchia and Dan Somen and Kevin McElroy - bibtex: "@inproceedings{rcollecchia2014,\n abstract = {Sirens evoke images of alarm,\ - \ public service, war, and forthcoming air raid. Outside of the music of Edgard\ - \ Varese, sirens have rarely been framed as musical instruments. By connecting\ - \ air hoses to spinning disks with evenly-spaced perforations, the siren timbre\ - \ is translated musically. Polyphony gives our instrument an organ-like personality:\ - \ keys are mapped to different frequencies and the pressure applied to them determines\ - \ volume. The siren organ can produce a large range of sounds both timbrally and\ - \ dynamically. In addition to a siren timbre, the instrument produces similar\ - \ sounds to a harmonica. Portability, robustness, and electronic stability are\ - \ all areas for improvement.},\n address = {London, United Kingdom},\n author\ - \ = {Regina Collecchia and Dan Somen and Kevin McElroy},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178732},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {391--394},\n publisher = {Goldsmiths, University of London},\n title = {The\ - \ Siren Organ},\n url = {http://www.nime.org/proceedings/2014/nime2014_558.pdf},\n\ - \ year = {2014}\n}\n" + ID: NIME21_9 + abstract: 'Learning advanced skills on a musical instrument takes a range of physical + and cognitive efforts. For instance, practicing polyrhythm is a complex task that + requires the development of both musical and physical skills. This paper explores + the use of automation in the context of learning advanced skills on the guitar. + Our robotic guitar is capable of physically plucking on the strings along with + a musician, providing both haptic and audio guidance to the musician. We hypothesize + that a multimodal and first-person experience of “being able to play” could increase + learning efficacy. We discuss the novel learning application and a user study, + through which we illustrate the implication and potential issues in systems that + provide temporary skills and in-situ multimodal guidance for learning.' + address: 'Shanghai, China' + articleno: 9 + author: 'Leigh, Sang-won and Lee, Jeonghyun (Jonna)' + bibtex: "@inproceedings{NIME21_9,\n abstract = {Learning advanced skills on a musical\ + \ instrument takes a range of physical and cognitive efforts. For instance, practicing\ + \ polyrhythm is a complex task that requires the development of both musical and\ + \ physical skills. This paper explores the use of automation in the context of\ + \ learning advanced skills on the guitar. Our robotic guitar is capable of physically\ + \ plucking on the strings along with a musician, providing both haptic and audio\ + \ guidance to the musician. We hypothesize that a multimodal and first-person\ + \ experience of “being able to play” could increase learning efficacy. We discuss\ + \ the novel learning application and a user study, through which we illustrate\ + \ the implication and potential issues in systems that provide temporary skills\ + \ and in-situ multimodal guidance for learning.},\n address = {Shanghai, China},\n\ + \ articleno = {9},\n author = {Leigh, Sang-won and Lee, Jeonghyun (Jonna)},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.21428/92fbeb44.002be215},\n issn = {2220-4806},\n\ + \ month = {June},\n presentation-video = {https://youtu.be/MeXrN95jajU},\n title\ + \ = {A Study on Learning Advanced Skills on Co-Playable Robotic Instruments},\n\ + \ url = {https://nime.pubpub.org/pub/h5dqsvpm},\n year = {2021}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178732 + doi: 10.21428/92fbeb44.002be215 issn: 2220-4806 month: June - pages: 391--394 - publisher: 'Goldsmiths, University of London' - title: The Siren Organ - url: http://www.nime.org/proceedings/2014/nime2014_558.pdf - year: 2014 + presentation-video: https://youtu.be/MeXrN95jajU + title: A Study on Learning Advanced Skills on Co-Playable Robotic Instruments + url: https://nime.pubpub.org/pub/h5dqsvpm + year: 2021 - ENTRYTYPE: inproceedings - ID: drector2014 - abstract: 'Actuated instruments is a growing area of activity for research and composition, - yet there has been little focus on membrane-based instruments. This paper describes - a novel design for an internally actuated drum based on the mechanical principles - of a loudspeaker. Implementation is described in detail; in particular, two modes - of actuation, a moving-coil electromagnet and a moving-magnet design, are described. - We evaluate the drum using a synthesized frequency sweep, and find that the instrument - has a broad frequency response and exhibits qualities of both a drum and speaker.' - address: 'London, United Kingdom' - author: David Rector and Spencer Topel - bibtex: "@inproceedings{drector2014,\n abstract = {Actuated instruments is a growing\ - \ area of activity for research and composition, yet there has been little focus\ - \ on membrane-based instruments. This paper describes a novel design for an internally\ - \ actuated drum based on the mechanical principles of a loudspeaker. Implementation\ - \ is described in detail; in particular, two modes of actuation, a moving-coil\ - \ electromagnet and a moving-magnet design, are described. We evaluate the drum\ - \ using a synthesized frequency sweep, and find that the instrument has a broad\ - \ frequency response and exhibits qualities of both a drum and speaker.},\n address\ - \ = {London, United Kingdom},\n author = {David Rector and Spencer Topel},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1178913},\n issn = {2220-4806},\n month\ - \ = {June},\n pages = {395--398},\n publisher = {Goldsmiths, University of London},\n\ - \ title = {Internally Actuated Drums for Expressive Performance},\n url = {http://www.nime.org/proceedings/2014/nime2014_559.pdf},\n\ - \ year = {2014}\n}\n" + ID: KimBoyle2008 + abstract: "The rapid development of network communicationtechnologies has allowed\ + \ composers to create new ways inwhich to directly engage participants in the\ + \ exploration of newmusical environments. A number of distinctive aestheticapproaches\ + \ to the musical application of networks will beoutlined in this paper each of\ + \ which is mediated andconditioned by the technical and aesthetic foundations\ + \ of thenetwork technologies themselves. Recent work in the field byartists such\ + \ as Atau Tanaka and Metraform will be examined, aswill some of the earlier pioneering\ + \ work in the genre by MaxNeuhaus. While recognizing the historical context ofcollaborative\ + \ work, the ,\n,\nauthor will examine how the strategiesemployed in the work of\ + \ these artists have helped redefine anew aesthetics of engagement in which play,\ + \ spatial andtemporal dislocation are amongst the genre's definingcharacteristics." + address: 'Genoa, Italy' + author: 'Kim-Boyle, David' + bibtex: "@inproceedings{KimBoyle2008,\n abstract = {The rapid development of network\ + \ communicationtechnologies has allowed composers to create new ways inwhich to\ + \ directly engage participants in the exploration of newmusical environments.\ + \ A number of distinctive aestheticapproaches to the musical application of networks\ + \ will beoutlined in this paper each of which is mediated andconditioned by the\ + \ technical and aesthetic foundations of thenetwork technologies themselves. Recent\ + \ work in the field byartists such as Atau Tanaka and Metraform will be examined,\ + \ aswill some of the earlier pioneering work in the genre by MaxNeuhaus. While\ + \ recognizing the historical context ofcollaborative work, the ,\n,\nauthor will\ + \ examine how the strategiesemployed in the work of these artists have helped\ + \ redefine anew aesthetics of engagement in which play, spatial andtemporal dislocation\ + \ are amongst the genre's definingcharacteristics.},\n address = {Genoa, Italy},\n\ + \ author = {Kim-Boyle, David},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179579},\n\ + \ issn = {2220-4806},\n keywords = {Networks, collaborative, open-form, play,\ + \ interface. },\n pages = {3--8},\n title = {Network Musics --- Play , Engagement\ + \ and the Democratization of Performance},\n url = {http://www.nime.org/proceedings/2008/nime2008_003.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178913 + doi: 10.5281/zenodo.1179579 issn: 2220-4806 - month: June - pages: 395--398 - publisher: 'Goldsmiths, University of London' - title: Internally Actuated Drums for Expressive Performance - url: http://www.nime.org/proceedings/2014/nime2014_559.pdf - year: 2014 + keywords: 'Networks, collaborative, open-form, play, interface. ' + pages: 3--8 + title: 'Network Musics --- Play , Engagement and the Democratization of Performance' + url: http://www.nime.org/proceedings/2008/nime2008_003.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: ssalazar2014 - abstract: 'Effective software interaction design must consider all of the capabilities - and limitations of the platform for which it is developed. To this end, we propose - a new model for computer music system design on touchscreen devices, combining - both pen/stylus input and multitouch gestures. Such a model surpasses the barrier - of touchscreen-based keyboard input, preserving the primary interaction of touch - and direct manipulation throughout the development of a complex musical program. - We have implemented an iPad software application utilizing these principles, called - ``Auraglyph.'''' Auraglyph offers a number of fundamental audio processing and - control operators, as well as facilities for structured input and output. All - of these software objects are created, parameterized, and interconnected via stylus - and touch input. Underlying this application is an advanced handwriting recognition - framework, LipiTk, which can be trained to recognize both alphanumeric characters - and arbitrary figures, shapes, and patterns.' - address: 'London, United Kingdom' - author: Spencer Salazar and Ge Wang - bibtex: "@inproceedings{ssalazar2014,\n abstract = {Effective software interaction\ - \ design must consider all of the capabilities and limitations of the platform\ - \ for which it is developed. To this end, we propose a new model for computer\ - \ music system design on touchscreen devices, combining both pen/stylus input\ - \ and multitouch gestures. Such a model surpasses the barrier of touchscreen-based\ - \ keyboard input, preserving the primary interaction of touch and direct manipulation\ - \ throughout the development of a complex musical program. We have implemented\ - \ an iPad software application utilizing these principles, called ``Auraglyph.''\ - \ Auraglyph offers a number of fundamental audio processing and control operators,\ - \ as well as facilities for structured input and output. All of these software\ - \ objects are created, parameterized, and interconnected via stylus and touch\ - \ input. Underlying this application is an advanced handwriting recognition framework,\ - \ LipiTk, which can be trained to recognize both alphanumeric characters and arbitrary\ - \ figures, shapes, and patterns.},\n address = {London, United Kingdom},\n author\ - \ = {Spencer Salazar and Ge Wang},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178927},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {106--109},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Auraglyph: Handwritten Computer Music Composition\ - \ and Design},\n url = {http://www.nime.org/proceedings/2014/nime2014_560.pdf},\n\ - \ year = {2014}\n}\n" + ID: Barbosa2008 + abstract: 'This paper presents the latest developments of the Public Sound Objects + (PSOs) system, an experimental framework to implement and test new concepts for + Networked Music. The project of a Public interactive installation using the PSOs + system was commissioned in 2007 by Casa da Musica, the main concert hall space + in Porto. It resulted in a distributed musical structure with up to ten interactive + performance terminals distributed along the Casa da Musica''s hallways, collectively + controlling a shared acoustic piano. The installation allows the visitors to collaborate + remotely with each other, within the building, using a software interface custom + developed to facilitate collaborative music practices and with no requirements + in terms previous knowledge of musical performance. ' + address: 'Genoa, Italy' + author: 'Barbosa, Àlvaro' + bibtex: "@inproceedings{Barbosa2008,\n abstract = {This paper presents the latest\ + \ developments of the Public Sound Objects (PSOs) system, an experimental framework\ + \ to implement and test new concepts for Networked Music. The project of a Public\ + \ interactive installation using the PSOs system was commissioned in 2007 by Casa\ + \ da Musica, the main concert hall space in Porto. It resulted in a distributed\ + \ musical structure with up to ten interactive performance terminals distributed\ + \ along the Casa da Musica's hallways, collectively controlling a shared acoustic\ + \ piano. The installation allows the visitors to collaborate remotely with each\ + \ other, within the building, using a software interface custom developed to facilitate\ + \ collaborative music practices and with no requirements in terms previous knowledge\ + \ of musical performance. },\n address = {Genoa, Italy},\n author = {Barbosa,\ + \ \\`{A}lvaro},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179487},\n\ + \ issn = {2220-4806},\n keywords = {algorithmic composition,behavioral driven,electronic\ + \ music instruments,interfaces,network music instruments,nime08,performance,public\ + \ music,real-time collaborative,sound},\n pages = {9--12},\n title = {Ten-Hand\ + \ Piano : A Networked Music Installation},\n url = {http://www.nime.org/proceedings/2008/nime2008_009.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178927 + doi: 10.5281/zenodo.1179487 issn: 2220-4806 - month: June - pages: 106--109 - publisher: 'Goldsmiths, University of London' - title: 'Auraglyph: Handwritten Computer Music Composition and Design' - url: http://www.nime.org/proceedings/2014/nime2014_560.pdf - year: 2014 + keywords: 'algorithmic composition,behavioral driven,electronic music instruments,interfaces,network + music instruments,nime08,performance,public music,real-time collaborative,sound' + pages: 9--12 + title: 'Ten-Hand Piano : A Networked Music Installation' + url: http://www.nime.org/proceedings/2008/nime2008_009.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: ahornof2014 - abstract: 'Although new sensor devices and data streams are increasingly used for - musical expression, and although eye-tracking devices have become increasingly - cost-effective and prevalent in research and as a means of communication for people - with severe motor impairments, eye-controlled musical expression nonetheless remains - somewhat elusive and minimally explored. This paper (a) identifies a number of - fundamental human eye movement capabilities and constraints which determine in - part what can and cannot be musically expressed with eye movements, (b) reviews - prior work on eye-controlled musical expression, and (c) analyzes and provides - a taxonomy of what has been done, and what will need to be addressed in future - eye-controlled musical instruments. The fundamental human constraints and processes - that govern eye movements create a challenge for eye-controlled music in that - the instrument needs to be designed to motivate or at least permit specific unique - visual goals, each of which when accomplished must then be mapped, using the eye - tracker and some sort of sound generator, to different musical outcomes. The control - of the musical instrument is less direct than if it were played with muscles that - can be controlled in a more direct manner, such as the muscles in the hands.' - address: 'London, United Kingdom' - author: Anthony Hornof - bibtex: "@inproceedings{ahornof2014,\n abstract = {Although new sensor devices and\ - \ data streams are increasingly used for musical expression, and although eye-tracking\ - \ devices have become increasingly cost-effective and prevalent in research and\ - \ as a means of communication for people with severe motor impairments, eye-controlled\ - \ musical expression nonetheless remains somewhat elusive and minimally explored.\ - \ This paper (a) identifies a number of fundamental human eye movement capabilities\ - \ and constraints which determine in part what can and cannot be musically expressed\ - \ with eye movements, (b) reviews prior work on eye-controlled musical expression,\ - \ and (c) analyzes and provides a taxonomy of what has been done, and what will\ - \ need to be addressed in future eye-controlled musical instruments. The fundamental\ - \ human constraints and processes that govern eye movements create a challenge\ - \ for eye-controlled music in that the instrument needs to be designed to motivate\ - \ or at least permit specific unique visual goals, each of which when accomplished\ - \ must then be mapped, using the eye tracker and some sort of sound generator,\ - \ to different musical outcomes. The control of the musical instrument is less\ - \ direct than if it were played with muscles that can be controlled in a more\ - \ direct manner, such as the muscles in the hands.},\n address = {London, United\ - \ Kingdom},\n author = {Anthony Hornof},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178804},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {461--466},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {The Prospects For Eye-Controlled Musical Performance},\n\ - \ url = {http://www.nime.org/proceedings/2014/nime2014_562.pdf},\n year = {2014}\n\ - }\n" + ID: Wozniewski2008 + abstract: 'New application spaces and artistic forms can emerge whenusers are freed + from constraints. In the general case ofhuman-computer interfaces, users are often + confined to afixed location, severely limiting mobility. To overcome thisconstraint + in the context of musical interaction, we presenta system to manage large-scale + collaborative mobile audioenvironments, driven by user movement. Multiple participants + navigate through physical space while sharing overlaid virtual elements. Each + user is equipped with a mobilecomputing device, GPS receiver, orientation sensor, + microphone, headphones, or various combinations of these technologies. We investigate + methods of location tracking, wireless audio streaming, and state management between + mobiledevices and centralized servers. The result is a system thatallows mobile + users, with subjective 3-D audio rendering,to share virtual scenes. The audio + elements of these scenescan be organized into large-scale spatial audio interfaces,thus + allowing for immersive mobile performance, locativeaudio installations, and many + new forms of collaborativesonic activity.' + address: 'Genoa, Italy' + author: 'Wozniewski, Mike and Bouillot, Nicolas and Settel, Zack and Cooperstock, + Jeremy R.' + bibtex: "@inproceedings{Wozniewski2008,\n abstract = {New application spaces and\ + \ artistic forms can emerge whenusers are freed from constraints. In the general\ + \ case ofhuman-computer interfaces, users are often confined to afixed location,\ + \ severely limiting mobility. To overcome thisconstraint in the context of musical\ + \ interaction, we presenta system to manage large-scale collaborative mobile audioenvironments,\ + \ driven by user movement. Multiple participants navigate through physical space\ + \ while sharing overlaid virtual elements. Each user is equipped with a mobilecomputing\ + \ device, GPS receiver, orientation sensor, microphone, headphones, or various\ + \ combinations of these technologies. We investigate methods of location tracking,\ + \ wireless audio streaming, and state management between mobiledevices and centralized\ + \ servers. The result is a system thatallows mobile users, with subjective 3-D\ + \ audio rendering,to share virtual scenes. The audio elements of these scenescan\ + \ be organized into large-scale spatial audio interfaces,thus allowing for immersive\ + \ mobile performance, locativeaudio installations, and many new forms of collaborativesonic\ + \ activity.},\n address = {Genoa, Italy},\n author = {Wozniewski, Mike and Bouillot,\ + \ Nicolas and Settel, Zack and Cooperstock, Jeremy R.},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1179651},\n issn = {2220-4806},\n keywords = {sonic navigation,\ + \ mobile music, spatial interaction, wireless audio streaming, locative media,\ + \ collaborative interfaces },\n pages = {13--18},\n title = {Large-Scale Mobile\ + \ Audio Environments for Collaborative Musical Interaction},\n url = {http://www.nime.org/proceedings/2008/nime2008_013.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178804 + doi: 10.5281/zenodo.1179651 issn: 2220-4806 - month: June - pages: 461--466 - publisher: 'Goldsmiths, University of London' - title: The Prospects For Eye-Controlled Musical Performance - url: http://www.nime.org/proceedings/2014/nime2014_562.pdf - year: 2014 + keywords: 'sonic navigation, mobile music, spatial interaction, wireless audio streaming, + locative media, collaborative interfaces ' + pages: 13--18 + title: Large-Scale Mobile Audio Environments for Collaborative Musical Interaction + url: http://www.nime.org/proceedings/2008/nime2008_013.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: aplace2014 - abstract: 'This paper explores the design process of the AlphaSphere, an experimental - new musical instrument that has transitioned into scale production and international - distribution. Initially, the design intentions and engineering processes are covered. - The paper continues by briefly evaluating the user testing process and outlining - the ergonomics, communication protocol and software of the device. The paper closes - by questioning what it takes to evaluate success as a musical instrument.' - address: 'London, United Kingdom' - author: Adam Place and Liam Lacey and Thomas Mitchell - bibtex: "@inproceedings{aplace2014,\n abstract = {This paper explores the design\ - \ process of the AlphaSphere, an experimental new musical instrument that has\ - \ transitioned into scale production and international distribution. Initially,\ - \ the design intentions and engineering processes are covered. The paper continues\ - \ by briefly evaluating the user testing process and outlining the ergonomics,\ - \ communication protocol and software of the device. The paper closes by questioning\ - \ what it takes to evaluate success as a musical instrument.},\n address = {London,\ - \ United Kingdom},\n author = {Adam Place and Liam Lacey and Thomas Mitchell},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178903},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {399--402},\n publisher = {Goldsmiths, University\ - \ of London},\n title = {AlphaSphere from Prototype to Product},\n url = {http://www.nime.org/proceedings/2014/nime2014_568.pdf},\n\ - \ year = {2014}\n}\n" + ID: Fraietta2008 + abstract: 'Open Sound Control (OSC) is being used successfully as amessaging protocol + among many computers, gesturalcontrollers and multimedia systems. Although OSC + hasaddressed some of the shortcomings of MIDI, OSC cannotdeliver on its promises + as a real-time communication protocolfor constrained embedded systems. This paper + will examinesome of the advantages but also dispel some of the mythsconcerning + OSC. The paper will also describe how some of thebest features of OSC can be used + to develop a lightweightprotocol that is microcontroller friendly.' + address: 'Genoa, Italy' + author: 'Fraietta, Angelo' + bibtex: "@inproceedings{Fraietta2008,\n abstract = {Open Sound Control (OSC) is\ + \ being used successfully as amessaging protocol among many computers, gesturalcontrollers\ + \ and multimedia systems. Although OSC hasaddressed some of the shortcomings of\ + \ MIDI, OSC cannotdeliver on its promises as a real-time communication protocolfor\ + \ constrained embedded systems. This paper will examinesome of the advantages\ + \ but also dispel some of the mythsconcerning OSC. The paper will also describe\ + \ how some of thebest features of OSC can be used to develop a lightweightprotocol\ + \ that is microcontroller friendly.},\n address = {Genoa, Italy},\n author = {Fraietta,\ + \ Angelo},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1179537},\n issn = {2220-4806},\n\ + \ keywords = {a,data transmission protocols,gestural controllers,has been implemented\ + \ as,midi,nime08,open sound control,osc},\n pages = {19--23},\n title = {Open\ + \ Sound Control : Constraints and Limitations},\n url = {http://www.nime.org/proceedings/2008/nime2008_019.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178903 + doi: 10.5281/zenodo.1179537 issn: 2220-4806 - month: June - pages: 399--402 - publisher: 'Goldsmiths, University of London' - title: AlphaSphere from Prototype to Product - url: http://www.nime.org/proceedings/2014/nime2014_568.pdf - year: 2014 + keywords: 'a,data transmission protocols,gestural controllers,has been implemented + as,midi,nime08,open sound control,osc' + pages: 19--23 + title: 'Open Sound Control : Constraints and Limitations' + url: http://www.nime.org/proceedings/2008/nime2008_019.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: aandersson2014 - abstract: 'In this paper we explore how we compose sound for an interactive tangible - and mobile interface; where the goal is to improve health and well-being for families - with children with disabilities. We describe the composition process from how - we decompose a linear beat-based and vocal sound material; recompose it with real-time - audio synthesis and composition rules into interactive Scenes. Scenes that make - it possible for the user to select, explore and recreate different ``sound worlds'''' - with the tangible interface as an instrument; create and play with it as a friend; - improvise and create; or relax with it as an ambient sounding furniture. We continue - discussing a user story, how the Scenes are recreated by amateur users, persons - with severe disabilities and family members; improvising with the mobile tangibles. - We discuss composition techniques for mixing sound, tangible-physical and lighting - elements in the Scenes. Based on observations we explore how a diverse audience - in the family and at school can recreate and improvise their own sound experience - and play together with others. We conclude by discussing the possible impact of - our findings for the NIME-community; how the techniques of decomposing, recomposing - and recreating sound, based on a relational perspective, could contribute to the - design of new instruments for musical expression.' - address: 'London, United Kingdom' - author: Anders-Petter Andersson and Birgitta Cappelen and Fredrik Olofsson - bibtex: "@inproceedings{aandersson2014,\n abstract = {In this paper we explore how\ - \ we compose sound for an interactive tangible and mobile interface; where the\ - \ goal is to improve health and well-being for families with children with disabilities.\ - \ We describe the composition process from how we decompose a linear beat-based\ - \ and vocal sound material; recompose it with real-time audio synthesis and composition\ - \ rules into interactive Scenes. Scenes that make it possible for the user to\ - \ select, explore and recreate different ``sound worlds'' with the tangible interface\ - \ as an instrument; create and play with it as a friend; improvise and create;\ - \ or relax with it as an ambient sounding furniture. We continue discussing a\ - \ user story, how the Scenes are recreated by amateur users, persons with severe\ - \ disabilities and family members; improvising with the mobile tangibles. We discuss\ - \ composition techniques for mixing sound, tangible-physical and lighting elements\ - \ in the Scenes. Based on observations we explore how a diverse audience in the\ - \ family and at school can recreate and improvise their own sound experience and\ - \ play together with others. We conclude by discussing the possible impact of\ - \ our findings for the NIME-community; how the techniques of decomposing, recomposing\ - \ and recreating sound, based on a relational perspective, could contribute to\ - \ the design of new instruments for musical expression.},\n address = {London,\ - \ United Kingdom},\n author = {Anders-Petter Andersson and Birgitta Cappelen and\ - \ Fredrik Olofsson},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178702},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {529--532},\n publisher = {Goldsmiths,\ - \ University of London},\n title = {Designing Sound for Recreation and Well-Being},\n\ - \ url = {http://www.nime.org/proceedings/2014/nime2014_572.pdf},\n year = {2014}\n\ - }\n" + ID: Bozzolan2008 + abstract: 'The continuous evolutions in the human-computer interfaces field have + allowed the development of control devicesthat let have a more and more intuitive, + gestural and noninvasive interaction.Such devices find a natural employment also + in the musicapplied informatics and in particular in the electronic music,always + searching for new expressive means.This paper presents a prototype of a system + for the realtime control of sound spatialization in a multichannel configuration + with a multimodal interaction interface. The spatializer, called SMuSIM, employs + interaction devices thatrange from the simple and well-established mouse and keyboard + to a classical gaming used joystick (gamepad), finallyexploiting more advanced + and innovative typologies basedon image analysis (as a webcam).' + address: 'Genoa, Italy' + author: 'Bozzolan, Matteo and Cospito, Giovanni' + bibtex: "@inproceedings{Bozzolan2008,\n abstract = {The continuous evolutions in\ + \ the human-computer interfaces field have allowed the development of control\ + \ devicesthat let have a more and more intuitive, gestural and noninvasive interaction.Such\ + \ devices find a natural employment also in the musicapplied informatics and in\ + \ particular in the electronic music,always searching for new expressive means.This\ + \ paper presents a prototype of a system for the realtime control of sound spatialization\ + \ in a multichannel configuration with a multimodal interaction interface. The\ + \ spatializer, called SMuSIM, employs interaction devices thatrange from the simple\ + \ and well-established mouse and keyboard to a classical gaming used joystick\ + \ (gamepad), finallyexploiting more advanced and innovative typologies basedon\ + \ image analysis (as a webcam).},\n address = {Genoa, Italy},\n author = {Bozzolan,\ + \ Matteo and Cospito, Giovanni},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179501},\n\ + \ issn = {2220-4806},\n keywords = {Sound spatialization, multimodal interaction,\ + \ interaction interfaces, EyesWeb, Pure data. },\n pages = {24--27},\n title =\ + \ {SMuSIM : a Prototype of Multichannel Spatialization System with Multimodal\ + \ Interaction Interface},\n url = {http://www.nime.org/proceedings/2008/nime2008_024.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178702 + doi: 10.5281/zenodo.1179501 issn: 2220-4806 - month: June - pages: 529--532 - publisher: 'Goldsmiths, University of London' - title: Designing Sound for Recreation and Well-Being - url: http://www.nime.org/proceedings/2014/nime2014_572.pdf - year: 2014 + keywords: 'Sound spatialization, multimodal interaction, interaction interfaces, + EyesWeb, Pure data. ' + pages: 24--27 + title: 'SMuSIM : a Prototype of Multichannel Spatialization System with Multimodal + Interaction Interface' + url: http://www.nime.org/proceedings/2008/nime2008_024.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Gaye2006 - address: 'Paris, France' - author: 'Gaye, Lalya and Holmquist, Lars E. and Behrendt, Frauke and Tanaka, Atau' - bibtex: "@inproceedings{Gaye2006,\n address = {Paris, France},\n author = {Gaye,\ - \ Lalya and Holmquist, Lars E. and Behrendt, Frauke and Tanaka, Atau},\n booktitle\ + ID: Nash2008 + abstract: 'Over the last century, composers have made increasingly ambitious experiments + with musical time, but have been impeded in expressing more temporally-complex + musical processes by the limitations of both music notations and human performers. + In this paper, we describe a computer-based notation and gestural control system + for independently manipulating the tempi of musical parts within a piece, at performance + time. We describe how the problem was approached, drawing upon feedback and suggestions + from consultations across multiple disciplines, seeking analogous problems in + other fields. Throughout, our approach is guided and, ultimately, assessed by + an established professional composer, who was able to interact with a working + prototype of the system. ' + address: 'Genoa, Italy' + author: 'Nash, Chris and Blackwell, Alan' + bibtex: "@inproceedings{Nash2008,\n abstract = {Over the last century, composers\ + \ have made increasingly ambitious experiments with musical time, but have been\ + \ impeded in expressing more temporally-complex musical processes by the limitations\ + \ of both music notations and human performers. In this paper, we describe a computer-based\ + \ notation and gestural control system for independently manipulating the tempi\ + \ of musical parts within a piece, at performance time. We describe how the problem\ + \ was approached, drawing upon feedback and suggestions from consultations across\ + \ multiple disciplines, seeking analogous problems in other fields. Throughout,\ + \ our approach is guided and, ultimately, assessed by an established professional\ + \ composer, who was able to interact with a working prototype of the system. },\n\ + \ address = {Genoa, Italy},\n author = {Nash, Chris and Blackwell, Alan},\n booktitle\ \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176909},\n issn = {2220-4806},\n pages\ - \ = {22--25},\n title = {Mobile Music Technology: Report on an Emerging Community},\n\ - \ url = {http://www.nime.org/proceedings/2006/nime2006_022.pdf},\n year = {2006}\n\ - }\n" + \ Expression},\n doi = {10.5281/zenodo.1179603},\n issn = {2220-4806},\n keywords\ + \ = {composition,gesture,nime08,performance,polytempi,realtime,tempo},\n pages\ + \ = {28--33},\n title = {Realtime Representation and Gestural Control of Musical\ + \ Polytempi},\n url = {http://www.nime.org/proceedings/2008/nime2008_028.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176909 + doi: 10.5281/zenodo.1179603 issn: 2220-4806 - pages: 22--25 - title: 'Mobile Music Technology: Report on an Emerging Community' - url: http://www.nime.org/proceedings/2006/nime2006_022.pdf - year: 2006 + keywords: composition,gesture,nime08,performance,polytempi,realtime,tempo + pages: 28--33 + title: Realtime Representation and Gestural Control of Musical Polytempi + url: http://www.nime.org/proceedings/2008/nime2008_028.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Tanaka2006 - abstract: 'This paper presents the concepts and techniques used in afamily of location - based multimedia works. The paper hasthree main sections: 1.) to describe the - architecture of anaudio-visual hardware/software framework we havedeveloped for - the realization of a series of locative mediaartworks, 2.) to discuss the theoretical - and conceptualunderpinnings motivating the design of the technicalframework, and - 3.) to elicit from this, fundamental issuesand questions that can be generalized - and applicable to thegrowing practice of locative media.' - address: 'Paris, France' - author: 'Tanaka, Atau and Gemeinboeck, Petra' - bibtex: "@inproceedings{Tanaka2006,\n abstract = {This paper presents the concepts\ - \ and techniques used in afamily of location based multimedia works. The paper\ - \ hasthree main sections: 1.) to describe the architecture of anaudio-visual hardware/software\ - \ framework we havedeveloped for the realization of a series of locative mediaartworks,\ - \ 2.) to discuss the theoretical and conceptualunderpinnings motivating the design\ - \ of the technicalframework, and 3.) to elicit from this, fundamental issuesand\ - \ questions that can be generalized and applicable to thegrowing practice of locative\ - \ media.},\n address = {Paris, France},\n author = {Tanaka, Atau and Gemeinboeck,\ - \ Petra},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1177013},\n issn = {2220-4806},\n\ - \ keywords = {Mobile music, urban fiction, locative media. },\n pages = {26--30},\n\ - \ title = {A Framework for Spatial Interaction in Locative Media},\n url = {http://www.nime.org/proceedings/2006/nime2006_026.pdf},\n\ - \ year = {2006}\n}\n" + ID: Laurson2008 + address: 'Genoa, Italy' + author: 'Laurson, Mikael and Kuuskankare, Mika' + bibtex: "@inproceedings{Laurson2008,\n address = {Genoa, Italy},\n author = {Laurson,\ + \ Mikael and Kuuskankare, Mika},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179589},\n\ + \ issn = {2220-4806},\n keywords = {synthesis control, expressive timing, playing\ + \ styles },\n pages = {34--37},\n title = {Towards Idiomatic and Flexible Score-based\ + \ Gestural Control with a Scripting Language},\n url = {http://www.nime.org/proceedings/2008/nime2008_034.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177013 + doi: 10.5281/zenodo.1179589 issn: 2220-4806 - keywords: 'Mobile music, urban fiction, locative media. ' - pages: 26--30 - title: A Framework for Spatial Interaction in Locative Media - url: http://www.nime.org/proceedings/2006/nime2006_026.pdf - year: 2006 + keywords: 'synthesis control, expressive timing, playing styles ' + pages: 34--37 + title: Towards Idiomatic and Flexible Score-based Gestural Control with a Scripting + Language + url: http://www.nime.org/proceedings/2008/nime2008_034.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Rohs2006 - address: 'Paris, France' - author: 'Rohs, Michael and Essl, Georg and Roth, Martin' - bibtex: "@inproceedings{Rohs2006,\n address = {Paris, France},\n author = {Rohs,\ - \ Michael and Essl, Georg and Roth, Martin},\n booktitle = {Proceedings of the\ + ID: Bouenard2008 + abstract: 'A new interface for visualizing and analyzing percussion gestures is + presented, proposing enhancements of existing motion capture analysis tools. This + is achieved by offering apercussion gesture analysis protocol using motion capture.A + virtual character dynamic model is then designed in order to take advantage of + gesture characteristics, yielding toimprove gesture analysis with visualization + and interactioncues of different types.' + address: 'Genoa, Italy' + author: 'Bouënard, Alexandre and Gibet, Sylvie and Wanderley, Marcelo M.' + bibtex: "@inproceedings{Bouenard2008,\n abstract = {A new interface for visualizing\ + \ and analyzing percussion gestures is presented, proposing enhancements of existing\ + \ motion capture analysis tools. This is achieved by offering apercussion gesture\ + \ analysis protocol using motion capture.A virtual character dynamic model is\ + \ then designed in order to take advantage of gesture characteristics, yielding\ + \ toimprove gesture analysis with visualization and interactioncues of different\ + \ types.},\n address = {Genoa, Italy},\n author = {Bou\\''{e}nard, Alexandre and\ + \ Gibet, Sylvie and Wanderley, Marcelo M.},\n booktitle = {Proceedings of the\ \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1176997},\n issn = {2220-4806},\n pages = {31--36},\n title\ - \ = {CaMus: Live Music Performance using Camera Phones and Visual Grid Tracking},\n\ - \ url = {http://www.nime.org/proceedings/2006/nime2006_031.pdf},\n year = {2006}\n\ - }\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1176997 - issn: 2220-4806 - pages: 31--36 - title: 'CaMus: Live Music Performance using Camera Phones and Visual Grid Tracking' - url: http://www.nime.org/proceedings/2006/nime2006_031.pdf - year: 2006 - - -- ENTRYTYPE: inproceedings - ID: Schiemer2006 - abstract: 'This paper describes two new live performance scenarios for performing - music using bluetooth-enabled mobile phones. Interaction between mobile phones - via wireless link is a key feature of the performance interface for each scenario. - Both scenarios are discussed in the context of two publicly performed works for - an ensemble of players in which mobile phone handsets are used both as sound sources - and as hand-held controllers. In both works mobile phones are mounted in a specially - devised pouch attached to a cord and physically swung to produce audio chorusing. - During performance some players swing phones while others operate phones as hand-held - controllers. Wireless connectivity enables interaction between flying and hand-held - phones. Each work features different bluetooth implementations. In one a dedicated - mobile phone acts as a server that interconnects multiple clients, while in the - other point to point communication takes place between clients on an ad hoc basis. - The paper summarises bluetooth tools designed for live performance realisation - and concludes with a comparative evaluation of both scenarios for future implementation - of performance by large ensembles of nonexpert players performing microtonal music - using ubiquitous technology. ' - address: 'Paris, France' - author: 'Schiemer, Greg and Havryliv, Mark' - bibtex: "@inproceedings{Schiemer2006,\n abstract = {This paper describes two new\ - \ live performance scenarios for performing music using bluetooth-enabled mobile\ - \ phones. Interaction between mobile phones via wireless link is a key feature\ - \ of the performance interface for each scenario. Both scenarios are discussed\ - \ in the context of two publicly performed works for an ensemble of players in\ - \ which mobile phone handsets are used both as sound sources and as hand-held\ - \ controllers. In both works mobile phones are mounted in a specially devised\ - \ pouch attached to a cord and physically swung to produce audio chorusing. During\ - \ performance some players swing phones while others operate phones as hand-held\ - \ controllers. Wireless connectivity enables interaction between flying and hand-held\ - \ phones. Each work features different bluetooth implementations. In one a dedicated\ - \ mobile phone acts as a server that interconnects multiple clients, while in\ - \ the other point to point communication takes place between clients on an ad\ - \ hoc basis. The paper summarises bluetooth tools designed for live performance\ - \ realisation and concludes with a comparative evaluation of both scenarios for\ - \ future implementation of performance by large ensembles of nonexpert players\ - \ performing microtonal music using ubiquitous technology. },\n address = {Paris,\ - \ France},\n author = {Schiemer, Greg and Havryliv, Mark},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176999},\n issn = {2220-4806},\n keywords = {Java 2 Micro\ - \ Edition; j2me; Pure Data; PD; Real-Time Media Performance; Just Intonation.\ - \ },\n pages = {37--42},\n title = {Pocket Gamelan: Tuneable Trajectories for\ - \ Flying Sources in Mandala 3 and Mandala 4},\n url = {http://www.nime.org/proceedings/2006/nime2006_037.pdf},\n\ - \ year = {2006}\n}\n" + \ {10.5281/zenodo.1179497},\n issn = {2220-4806},\n keywords = {Gesture and sound,\ + \ interface, percussion gesture, virtual character, interaction. },\n pages =\ + \ {38--43},\n title = {Enhancing the Visualization of Percussion Gestures by Virtual\ + \ Character Animation},\n url = {http://www.nime.org/proceedings/2008/nime2008_038.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176999 + doi: 10.5281/zenodo.1179497 issn: 2220-4806 - keywords: 'Java 2 Micro Edition; j2me; Pure Data; PD; Real-Time Media Performance; - Just Intonation. ' - pages: 37--42 - title: 'Pocket Gamelan: Tuneable Trajectories for Flying Sources in Mandala 3 and - Mandala 4' - url: http://www.nime.org/proceedings/2006/nime2006_037.pdf - year: 2006 + keywords: 'Gesture and sound, interface, percussion gesture, virtual character, + interaction. ' + pages: 38--43 + title: Enhancing the Visualization of Percussion Gestures by Virtual Character Animation + url: http://www.nime.org/proceedings/2008/nime2008_038.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Birchfield2006 - abstract: 'Physically situated public art poses significant challenges for the design - and realization of interactive, electronic sound works. Consideration of diverse - audiences, environmental sensitivity, exhibition conditions, and logistics must - guide the artwork. We describe our work in this area, using a recently installed - public piece, Transition Soundings, as a case study that reveals a specialized - interface and open-ended approach to interactive music making. This case study - serves as a vehicle for examination of the real world challenges posed by public - art and its outcomes. ' - address: 'Paris, France' - author: 'Birchfield, David and Phillips, Kelly and Kidané, Assegid and Lorig, David' - bibtex: "@inproceedings{Birchfield2006,\n abstract = {Physically situated public\ - \ art poses significant challenges for the design and realization of interactive,\ - \ electronic sound works. Consideration of diverse audiences, environmental sensitivity,\ - \ exhibition conditions, and logistics must guide the artwork. We describe our\ - \ work in this area, using a recently installed public piece, Transition Soundings,\ - \ as a case study that reveals a specialized interface and open-ended approach\ - \ to interactive music making. This case study serves as a vehicle for examination\ - \ of the real world challenges posed by public art and its outcomes. },\n address\ - \ = {Paris, France},\n author = {Birchfield, David and Phillips, Kelly and Kidan\\\ - '{e}, Assegid and Lorig, David},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176873},\n\ - \ issn = {2220-4806},\n keywords = {Music, Sound, Interactivity, Arts, Public\ - \ Art, Network Systems, Sculpture, Installation Art, Embedded Electronics. },\n\ - \ pages = {43--48},\n title = {Interactive Public Sound Art: a case study},\n\ - \ url = {http://www.nime.org/proceedings/2006/nime2006_043.pdf},\n year = {2006}\n\ + ID: Young2008 + address: 'Genoa, Italy' + author: 'Young, Diana' + bibtex: "@inproceedings{Young2008,\n address = {Genoa, Italy},\n author = {Young,\ + \ Diana},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1177457},\n issn = {2220-4806},\n\ + \ keywords = {bowing, gesture, playing technique, principal component anal- ysis,\ + \ classification },\n pages = {44--48},\n title = {Classification of Common Violin\ + \ Bowing Techniques Using Gesture Data from a Playable Measurement System},\n\ + \ url = {http://www.nime.org/proceedings/2008/nime2008_044.pdf},\n year = {2008}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176873 + doi: 10.5281/zenodo.1177457 issn: 2220-4806 - keywords: 'Music, Sound, Interactivity, Arts, Public Art, Network Systems, Sculpture, - Installation Art, Embedded Electronics. ' - pages: 43--48 - title: 'Interactive Public Sound Art: a case study' - url: http://www.nime.org/proceedings/2006/nime2006_043.pdf - year: 2006 + keywords: 'bowing, gesture, playing technique, principal component anal- ysis, classification ' + pages: 44--48 + title: Classification of Common Violin Bowing Techniques Using Gesture Data from + a Playable Measurement System + url: http://www.nime.org/proceedings/2008/nime2008_044.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Wang2006 - address: 'Paris, France' - author: 'Wang, Ge and Misra, Ananya and Cook, Perry R.' - bibtex: "@inproceedings{Wang2006,\n address = {Paris, France},\n author = {Wang,\ - \ Ge and Misra, Ananya and Cook, Perry R.},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1177017},\n issn = {2220-4806},\n keywords = {Graphical interfaces,\ - \ collaborative performance, networking, computer music ensemble, emergence, visualization,\ - \ education. },\n pages = {49--52},\n title = {Building Collaborative Graphical\ - \ interFaces in the Audicle},\n url = {http://www.nime.org/proceedings/2006/nime2006_049.pdf},\n\ - \ year = {2006}\n}\n" + ID: Pakarinen2008 + abstract: 'This article discusses a virtual slide guitar instrument, recently introduced + in [7]. The instrument consists of a novelphysics-based synthesis model and a + gestural user interface.The synthesis engine uses energy-compensated time-varyingdigital + waveguides. The string algorithm also contains aparametric model for synthesizing + the tube-string contactsounds. The real-time virtual slide guitar user interface + employs optical gesture recognition, so that the user can playthis virtual instrument + simply by making slide guitar playing gestures in front of a camera.' + address: 'Genoa, Italy' + author: 'Pakarinen, Jyri and Välimäki, Vesa and Puputti, Tapio' + bibtex: "@inproceedings{Pakarinen2008,\n abstract = {This article discusses a virtual\ + \ slide guitar instrument, recently introduced in [7]. The instrument consists\ + \ of a novelphysics-based synthesis model and a gestural user interface.The synthesis\ + \ engine uses energy-compensated time-varyingdigital waveguides. The string algorithm\ + \ also contains aparametric model for synthesizing the tube-string contactsounds.\ + \ The real-time virtual slide guitar user interface employs optical gesture recognition,\ + \ so that the user can playthis virtual instrument simply by making slide guitar\ + \ playing gestures in front of a camera.},\n address = {Genoa, Italy},\n author\ + \ = {Pakarinen, Jyri and V\\''{a}lim\\''{a}ki, Vesa and Puputti, Tapio},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1179607},\n issn = {2220-4806},\n keywords\ + \ = {Sound synthesis, slide guitar, gesture control, physical mod- eling },\n\ + \ pages = {49--52},\n title = {Slide Guitar Synthesizer with Gestural Control},\n\ + \ url = {http://www.nime.org/proceedings/2008/nime2008_049.pdf},\n year = {2008}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177017 + doi: 10.5281/zenodo.1179607 issn: 2220-4806 - keywords: 'Graphical interfaces, collaborative performance, networking, computer - music ensemble, emergence, visualization, education. ' + keywords: 'Sound synthesis, slide guitar, gesture control, physical mod- eling ' pages: 49--52 - title: Building Collaborative Graphical interFaces in the Audicle - url: http://www.nime.org/proceedings/2006/nime2006_049.pdf - year: 2006 + title: Slide Guitar Synthesizer with Gestural Control + url: http://www.nime.org/proceedings/2008/nime2008_049.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Rebelo2006 - abstract: 'The culture of laptop improvisation has grown tremendously in recent - years. The development of personalized software instruments presents interesting - issues in the context of improvised group performances. This paper examines an - approach that is aimed at increasing the modes of interactivity between laptop - performers and at the same time suggests ways in which audiences can better discern - and identify the sonic characteristics of each laptop performer. We refer to software - implementation that was developed for the BLISS networked laptop ensemble with - view to designing a shared format for the exchange of messages within local and - internet based networks. ' - address: 'Paris, France' - author: 'Rebelo, Pedro and Renaud, Alain B.' - bibtex: "@inproceedings{Rebelo2006,\n abstract = {The culture of laptop improvisation\ - \ has grown tremendously in recent years. The development of personalized software\ - \ instruments presents interesting issues in the context of improvised group performances.\ - \ This paper examines an approach that is aimed at increasing the modes of interactivity\ - \ between laptop performers and at the same time suggests ways in which audiences\ - \ can better discern and identify the sonic characteristics of each laptop performer.\ - \ We refer to software implementation that was developed for the BLISS networked\ - \ laptop ensemble with view to designing a shared format for the exchange of messages\ - \ within local and internet based networks. },\n address = {Paris, France},\n\ - \ author = {Rebelo, Pedro and Renaud, Alain B.},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1176993},\n issn = {2220-4806},\n keywords = {Networked audio\ - \ technologies, laptop ensemble, centralized audio server, improvisation },\n\ - \ pages = {53--56},\n title = {The Frequencyliator -- Distributing Structures\ - \ for Networked Laptop Improvisation},\n url = {http://www.nime.org/proceedings/2006/nime2006_053.pdf},\n\ - \ year = {2006}\n}\n" + ID: Lahdeoja2008 + address: 'Genoa, Italy' + author: 'L\''''{a}hdeoja, Otso' + bibtex: "@inproceedings{Lahdeoja2008,\n address = {Genoa, Italy},\n author = {L\\\ + ''{a}hdeoja, Otso},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179585},\n\ + \ issn = {2220-4806},\n keywords = {Augmented instrument, electric guitar, gesture-sound\ + \ relationship },\n pages = {53--56},\n title = {An Approach to Instrument Augmentation\ + \ : the Electric Guitar},\n url = {http://www.nime.org/proceedings/2008/nime2008_053.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176993 + doi: 10.5281/zenodo.1179585 issn: 2220-4806 - keywords: 'Networked audio technologies, laptop ensemble, centralized audio server, - improvisation ' + keywords: 'Augmented instrument, electric guitar, gesture-sound relationship ' pages: 53--56 - title: The Frequencyliator -- Distributing Structures for Networked Laptop Improvisation - url: http://www.nime.org/proceedings/2006/nime2006_053.pdf - year: 2006 + title: 'An Approach to Instrument Augmentation : the Electric Guitar' + url: http://www.nime.org/proceedings/2008/nime2008_053.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Naef2006 - address: 'Paris, France' - author: 'Naef, Martin and Collicott, Daniel' - bibtex: "@inproceedings{Naef2006,\n address = {Paris, France},\n author = {Naef,\ - \ Martin and Collicott, Daniel},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176975},\n\ - \ issn = {2220-4806},\n pages = {57--60},\n title = {A VR Interface for Collaborative\ - \ {3D} Audio Performance},\n url = {http://www.nime.org/proceedings/2006/nime2006_057.pdf},\n\ - \ year = {2006}\n}\n" + ID: Raisanen2008 + abstract: 'This paper describes the Sormina, a new virtual and tangibleinstrument, + which has its origins in both virtual technology andthe heritage of traditional + instrument design. The motivationbehind the project is presented, as well as hardware + andsoftware design. Insights gained through collaboration withacoustic musicians + are presented, as well as comparison tohistorical instrument design.' + address: 'Genoa, Italy' + author: 'Räisänen, Juhani' + bibtex: "@inproceedings{Raisanen2008,\n abstract = {This paper describes the Sormina,\ + \ a new virtual and tangibleinstrument, which has its origins in both virtual\ + \ technology andthe heritage of traditional instrument design. The motivationbehind\ + \ the project is presented, as well as hardware andsoftware design. Insights gained\ + \ through collaboration withacoustic musicians are presented, as well as comparison\ + \ tohistorical instrument design.},\n address = {Genoa, Italy},\n author = {R\\\ + ''{a}is\\''{a}nen, Juhani},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179617},\n\ + \ issn = {2220-4806},\n keywords = {Gestural controller, digital musical instrument,\ + \ usability, music history, design. },\n pages = {57--60},\n title = {Sormina\ + \ -- a New Virtual and Tangible Instrument},\n url = {http://www.nime.org/proceedings/2008/nime2008_057.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176975 + doi: 10.5281/zenodo.1179617 issn: 2220-4806 + keywords: 'Gestural controller, digital musical instrument, usability, music history, + design. ' pages: 57--60 - title: 'A VR Interface for Collaborative {3D} Audio Performance' - url: http://www.nime.org/proceedings/2006/nime2006_057.pdf - year: 2006 - - -- ENTRYTYPE: inproceedings - ID: Geiger2006 - address: 'Paris, France' - author: 'Geiger, G\''''{u}nter' - bibtex: "@inproceedings{Geiger2006,\n address = {Paris, France},\n author = {Geiger,\ - \ G\\''{u}nter},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176911},\n\ - \ issn = {2220-4806},\n keywords = {touch screen, PDA, Pure Data, controller,\ - \ mobile musical instrument, human computer interaction },\n pages = {61--64},\n\ - \ title = {Using the Touch Screen as a Controller for Portable Computer Music\ - \ Instruments},\n url = {http://www.nime.org/proceedings/2006/nime2006_061.pdf},\n\ - \ year = {2006}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1176911 - issn: 2220-4806 - keywords: 'touch screen, PDA, Pure Data, controller, mobile musical instrument, - human computer interaction ' - pages: 61--64 - title: Using the Touch Screen as a Controller for Portable Computer Music Instruments - url: http://www.nime.org/proceedings/2006/nime2006_061.pdf - year: 2006 + title: Sormina -- a New Virtual and Tangible Instrument + url: http://www.nime.org/proceedings/2008/nime2008_057.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Holm2006 - abstract: 'This paper discusses the concept of using background music to control - video game parameters and thus actions on the screen. Each song selected by the - player makes the game look different and behave variedly. The concept is explored - by modifying an existing video game and playtesting it with different kinds of - MIDI music. Several examples of mapping MIDI parameters to game events are presented. - As mobile phones'' MIDI players do not usually have a dedicated callback API, - a real-time MIDI analysis software for Symbian OS was implemented. Future developments - including real-time group performance as a way to control game content are also - considered. ' - address: 'Paris, France' - author: 'Holm, Jukka and Arrasvuori, Juha and Havukainen, Kai' - bibtex: "@inproceedings{Holm2006,\n abstract = {This paper discusses the concept\ - \ of using background music to control video game parameters and thus actions\ - \ on the screen. Each song selected by the player makes the game look different\ - \ and behave variedly. The concept is explored by modifying an existing video\ - \ game and playtesting it with different kinds of MIDI music. Several examples\ - \ of mapping MIDI parameters to game events are presented. As mobile phones' MIDI\ - \ players do not usually have a dedicated callback API, a real-time MIDI analysis\ - \ software for Symbian OS was implemented. Future developments including real-time\ - \ group performance as a way to control game content are also considered. },\n\ - \ address = {Paris, France},\n author = {Holm, Jukka and Arrasvuori, Juha and\ - \ Havukainen, Kai},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176925},\n\ - \ issn = {2220-4806},\n keywords = {Games, MIDI, music, rhythm games, background\ - \ music reactive games, musically controlled games, MIDI-controlled games, Virtual\ - \ Sequencer. },\n pages = {65--70},\n title = {Using {MIDI} to Modify Video Game\ - \ Content},\n url = {http://www.nime.org/proceedings/2006/nime2006_065.pdf},\n\ - \ year = {2006}\n}\n" + ID: Berdahl2008a + abstract: 'The music community has long had a strong interest in haptic technology. + Recently, more effort has been put into making it more and more accessible to + instrument designers.This paper covers some of these technologies with the aimof + helping instrument designers add haptic feedback to theirinstruments. We begin + by giving a brief overview of practicalactuators. Next, we compare and contrast + using embeddedmicrocontrollers versus general purpose computers as controllers. + Along the way, we mention some common softwareenvironments for implementing control + algorithms. Then wediscuss the fundamental haptic control algorithms as well assome + more complex ones. Finally, we present two practicaland effective haptic musical + instruments: the haptic drumand the Cellomobo.' + address: 'Genoa, Italy' + author: 'Berdahl, Edgar and Steiner, Hans-Christoph and Oldham, Collin' + bibtex: "@inproceedings{Berdahl2008a,\n abstract = {The music community has long\ + \ had a strong interest in haptic technology. Recently, more effort has been put\ + \ into making it more and more accessible to instrument designers.This paper covers\ + \ some of these technologies with the aimof helping instrument designers add haptic\ + \ feedback to theirinstruments. We begin by giving a brief overview of practicalactuators.\ + \ Next, we compare and contrast using embeddedmicrocontrollers versus general\ + \ purpose computers as controllers. Along the way, we mention some common softwareenvironments\ + \ for implementing control algorithms. Then wediscuss the fundamental haptic control\ + \ algorithms as well assome more complex ones. Finally, we present two practicaland\ + \ effective haptic musical instruments: the haptic drumand the Cellomobo.},\n\ + \ address = {Genoa, Italy},\n author = {Berdahl, Edgar and Steiner, Hans-Christoph\ + \ and Oldham, Collin},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179495},\n\ + \ issn = {2220-4806},\n keywords = {haptic, actuator, practical, immersion, embedded,\ + \ sampling rate, woofer, haptic drum, Cellomobo },\n pages = {61--66},\n title\ + \ = {Practical Hardware and Algorithms for Creating Haptic Musical Instruments},\n\ + \ url = {http://www.nime.org/proceedings/2008/nime2008_061.pdf},\n year = {2008}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176925 + doi: 10.5281/zenodo.1179495 issn: 2220-4806 - keywords: 'Games, MIDI, music, rhythm games, background music reactive games, musically - controlled games, MIDI-controlled games, Virtual Sequencer. ' - pages: 65--70 - title: Using MIDI to Modify Video Game Content - url: http://www.nime.org/proceedings/2006/nime2006_065.pdf - year: 2006 + keywords: 'haptic, actuator, practical, immersion, embedded, sampling rate, woofer, + haptic drum, Cellomobo ' + pages: 61--66 + title: Practical Hardware and Algorithms for Creating Haptic Musical Instruments + url: http://www.nime.org/proceedings/2008/nime2008_061.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Lippit2006 - abstract: "Turntable musicians have yet to explore new expressions with digital\ - \ technology. New higher-level development tools open possibilities for these\ - \ artists to build their own instruments that can achieve artistic goals commercial\ - \ products cannot. This paper will present a rough overview on the practice and\ - \ recent development of turntable music, followed by descriptions of two projects\ - \ by the ,\n,\nauthor. " - address: 'Paris, France' - author: 'Lippit, Takuro M.' - bibtex: "@inproceedings{Lippit2006,\n abstract = {Turntable musicians have yet to\ - \ explore new expressions with digital technology. New higher-level development\ - \ tools open possibilities for these artists to build their own instruments that\ - \ can achieve artistic goals commercial products cannot. This paper will present\ - \ a rough overview on the practice and recent development of turntable music,\ - \ followed by descriptions of two projects by the ,\n,\nauthor. },\n address =\ - \ {Paris, France},\n author = {Lippit, Takuro M.},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176965},\n issn = {2220-4806},\n keywords = {Turntable\ - \ music, DJ, turntablist, improvisation, Max/MSP, PIC Microcontroller, Physical\ - \ Computing },\n pages = {71--74},\n title = {Turntable Music in the Digital Era:\ - \ Designing Alternative Tools for New Turntable Expression},\n url = {http://www.nime.org/proceedings/2006/nime2006_071.pdf},\n\ - \ year = {2006}\n}\n" + ID: Zoran2008 + address: 'Genoa, Italy' + author: 'Zoran, Amit and Maes, Pattie' + bibtex: "@inproceedings{Zoran2008,\n address = {Genoa, Italy},\n author = {Zoran,\ + \ Amit and Maes, Pattie},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177463},\n\ + \ issn = {2220-4806},\n keywords = {nime08},\n pages = {67--70},\n title = {Considering\ + \ Virtual \\& Physical Aspects in Acoustic Guitar Design},\n url = {http://www.nime.org/proceedings/2008/nime2008_067.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176965 + doi: 10.5281/zenodo.1177463 issn: 2220-4806 - keywords: 'Turntable music, DJ, turntablist, improvisation, Max/MSP, PIC Microcontroller, - Physical Computing ' - pages: 71--74 - title: 'Turntable Music in the Digital Era: Designing Alternative Tools for New - Turntable Expression' - url: http://www.nime.org/proceedings/2006/nime2006_071.pdf - year: 2006 + keywords: nime08 + pages: 67--70 + title: Considering Virtual \& Physical Aspects in Acoustic Guitar Design + url: http://www.nime.org/proceedings/2008/nime2008_067.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Kiser2006 - abstract: 'This report presents an interface for musical performance called the - spinCycle. spinCycle enables performers to make visual patterns with brightly - colored objects on a spinning turntable platter that get translated into musical - arrangements in realtime. I will briefly describe the hardware implementation - and the sound generation logic used, as well as provide a historical background - for the project.' - address: 'Paris, France' - author: 'Kiser, Spencer' - bibtex: "@inproceedings{Kiser2006,\n abstract = {This report presents an interface\ - \ for musical performance called the spinCycle. spinCycle enables performers to\ - \ make visual patterns with brightly colored objects on a spinning turntable platter\ - \ that get translated into musical arrangements in realtime. I will briefly describe\ - \ the hardware implementation and the sound generation logic used, as well as\ - \ provide a historical background for the project.},\n address = {Paris, France},\n\ - \ author = {Kiser, Spencer},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176941},\n\ - \ issn = {2220-4806},\n keywords = {Color-tracking, turntable, visualization,\ - \ interactivity, synesthesia },\n pages = {75--76},\n title = {spinCycle: a Color-Tracking\ - \ Turntable Sequencer},\n url = {http://www.nime.org/proceedings/2006/nime2006_075.pdf},\n\ - \ year = {2006}\n}\n" + ID: Menzies2008 + abstract: 'Phya is an open source C++ library originally designed foradding physically + modeled contact sounds into computergame environments equipped with physics engines. + We review some aspects of this system, and also consider it fromthe purely aesthetic + perspective of musical expression.' + address: 'Genoa, Italy' + author: 'Menzies, Dylan' + bibtex: "@inproceedings{Menzies2008,\n abstract = {Phya is an open source C++ library\ + \ originally designed foradding physically modeled contact sounds into computergame\ + \ environments equipped with physics engines. We review some aspects of this system,\ + \ and also consider it fromthe purely aesthetic perspective of musical expression.},\n\ + \ address = {Genoa, Italy},\n author = {Menzies, Dylan},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1179599},\n issn = {2220-4806},\n keywords = {NIME, musical\ + \ expression, virtual reality, physical model- ing, audio synthesis },\n pages\ + \ = {71--76},\n title = {Virtual Intimacy : Phya as an Instrument},\n url = {http://www.nime.org/proceedings/2008/nime2008_071.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176941 + doi: 10.5281/zenodo.1179599 issn: 2220-4806 - keywords: 'Color-tracking, turntable, visualization, interactivity, synesthesia ' - pages: 75--76 - title: 'spinCycle: a Color-Tracking Turntable Sequencer' - url: http://www.nime.org/proceedings/2006/nime2006_075.pdf - year: 2006 + keywords: 'NIME, musical expression, virtual reality, physical model- ing, audio + synthesis ' + pages: 71--76 + title: 'Virtual Intimacy : Phya as an Instrument' + url: http://www.nime.org/proceedings/2008/nime2008_071.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Lee2006a - address: 'Paris, France' - author: 'Lee, Jason' - bibtex: "@inproceedings{Lee2006a,\n address = {Paris, France},\n author = {Lee,\ - \ Jason},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1176959},\n issn = {2220-4806},\n\ - \ pages = {77--78},\n title = {The Chopping Board: Real-time Sample Editor},\n\ - \ url = {http://www.nime.org/proceedings/2006/nime2006_077.pdf},\n year = {2006}\n\ - }\n" + ID: Butler2008 + abstract: 'In this paper I discuss the importance of and need forpedagogical materials + to support the development of newinterfaces and new instruments for electronic + music. I describemy method for creating a graduated series of pedagogicaletudes + composed using Max/MSP. The etudes will helpperformers and instrument designers + learn the most commonlyused basic skills necessary to perform with interactiveelectronic + music instruments. My intention is that the finalseries will guide a beginner + from these initial steps through agraduated method, eventually incorporating some + of the moreadvanced techniques regularly used by electronic musiccomposers.I describe + the order of the series, and discuss the benefits (bothto performers and to composers) + of having a logical sequence ofskill-based etudes. I also connect the significance + of skilledperformers to the development of two essential areas that Iperceive + are still just emerging in this field: the creation of acomposed repertoire and + an increase in musical expressionduring performance.' + address: 'Genoa, Italy' + author: 'Butler, Jennifer' + bibtex: "@inproceedings{Butler2008,\n abstract = {In this paper I discuss the importance\ + \ of and need forpedagogical materials to support the development of newinterfaces\ + \ and new instruments for electronic music. I describemy method for creating a\ + \ graduated series of pedagogicaletudes composed using Max/MSP. The etudes will\ + \ helpperformers and instrument designers learn the most commonlyused basic skills\ + \ necessary to perform with interactiveelectronic music instruments. My intention\ + \ is that the finalseries will guide a beginner from these initial steps through\ + \ agraduated method, eventually incorporating some of the moreadvanced techniques\ + \ regularly used by electronic musiccomposers.I describe the order of the series,\ + \ and discuss the benefits (bothto performers and to composers) of having a logical\ + \ sequence ofskill-based etudes. I also connect the significance of skilledperformers\ + \ to the development of two essential areas that Iperceive are still just emerging\ + \ in this field: the creation of acomposed repertoire and an increase in musical\ + \ expressionduring performance.},\n address = {Genoa, Italy},\n author = {Butler,\ + \ Jennifer},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179503},\n issn\ + \ = {2220-4806},\n keywords = {composition,etudes,max,msp,musical controllers,musical\ + \ expression,nime08,pedagogy,repertoire},\n pages = {77--80},\n title = {Creating\ + \ Pedagogical Etudes for Interactive Instruments},\n url = {http://www.nime.org/proceedings/2008/nime2008_077.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176959 + doi: 10.5281/zenodo.1179503 issn: 2220-4806 - pages: 77--78 - title: 'The Chopping Board: Real-time Sample Editor' - url: http://www.nime.org/proceedings/2006/nime2006_077.pdf - year: 2006 + keywords: 'composition,etudes,max,msp,musical controllers,musical expression,nime08,pedagogy,repertoire' + pages: 77--80 + title: Creating Pedagogical Etudes for Interactive Instruments + url: http://www.nime.org/proceedings/2008/nime2008_077.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: DeJong2006 - address: 'Paris, France' - author: 'de Jong, Staas' - bibtex: "@inproceedings{DeJong2006,\n address = {Paris, France},\n author = {de\ - \ Jong, Staas},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176935},\n\ - \ issn = {2220-4806},\n pages = {79--80},\n title = {A Tactile Closed-Loop Device\ - \ for Musical Interaction},\n url = {http://www.nime.org/proceedings/2006/nime2006_079.pdf},\n\ - \ year = {2006}\n}\n" + ID: Stowell2008 + abstract: 'The expressive and creative affordances of an interface aredifficult + to evaluate, particularly with quantitative methods.However, rigorous qualitative + methods do exist and can beused to investigate such topics. We present a methodologybased + around user studies involving Discourse Analysis ofspeech. We also present an + example of the methodologyin use: we evaluate a musical interface which utilises + vocaltimbre, with a user group of beatboxers.' + address: 'Genoa, Italy' + author: 'Stowell, Dan and Plumbley, Mark D. and Bryan-Kinns, Nick' + bibtex: "@inproceedings{Stowell2008,\n abstract = {The expressive and creative affordances\ + \ of an interface aredifficult to evaluate, particularly with quantitative methods.However,\ + \ rigorous qualitative methods do exist and can beused to investigate such topics.\ + \ We present a methodologybased around user studies involving Discourse Analysis\ + \ ofspeech. We also present an example of the methodologyin use: we evaluate a\ + \ musical interface which utilises vocaltimbre, with a user group of beatboxers.},\n\ + \ address = {Genoa, Italy},\n author = {Stowell, Dan and Plumbley, Mark D. and\ + \ Bryan-Kinns, Nick},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179631},\n\ + \ issn = {2220-4806},\n keywords = {discourse analysis,evaluation,nime08,qualitative\ + \ methods,voice},\n pages = {81--86},\n title = {Discourse Analysis Evaluation\ + \ Method for Expressive Musical Interfaces},\n url = {http://www.nime.org/proceedings/2008/nime2008_081.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176935 + doi: 10.5281/zenodo.1179631 issn: 2220-4806 - pages: 79--80 - title: A Tactile Closed-Loop Device for Musical Interaction - url: http://www.nime.org/proceedings/2006/nime2006_079.pdf - year: 2006 + keywords: 'discourse analysis,evaluation,nime08,qualitative methods,voice' + pages: 81--86 + title: Discourse Analysis Evaluation Method for Expressive Musical Interfaces + url: http://www.nime.org/proceedings/2008/nime2008_081.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Bennett2006 - abstract: 'The PETECUBE project consists of a series of musical interfaces designed - to explore multi-modal feedback. This paper will briefly describe the definition - of multimodal feedback, the aim of the project, the development of the first PETECUBE - and proposed further work. ' - address: 'Paris, France' - author: 'Bennett, Peter' - bibtex: "@inproceedings{Bennett2006,\n abstract = {The PETECUBE project consists\ - \ of a series of musical interfaces designed to explore multi-modal feedback.\ - \ This paper will briefly describe the definition of multimodal feedback, the\ - \ aim of the project, the development of the first PETECUBE and proposed further\ - \ work. },\n address = {Paris, France},\n author = {Bennett, Peter},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176869},\n issn = {2220-4806},\n keywords\ - \ = {Multi-modal Feedback. Haptics. Musical Instrument. },\n pages = {81--84},\n\ - \ title = {{PET}ECUBE: a Multimodal Feedback Interface},\n url = {http://www.nime.org/proceedings/2006/nime2006_081.pdf},\n\ - \ year = {2006}\n}\n" + ID: Kiefer2008 + abstract: 'There is small but useful body of research concerning theevaluation of + musical interfaces with HCI techniques. Inthis paper, we present a case study + in implementing thesetechniques; we describe a usability experiment which evaluated + the Nintendo Wiimote as a musical controller, andreflect on the effectiveness + of our choice of HCI methodologies in this context. The study offered some valuable + results,but our picture of the Wiimote was incomplete as we lackeddata concerning + the participants'' instantaneous musical experience. Recent trends in HCI are + leading researchers totackle this problem of evaluating user experience; we reviewsome + of their work and suggest that with some adaptation itcould provide useful new + tools and methodologies for computer musicians.' + address: 'Genoa, Italy' + author: 'Kiefer, Chris and Collins, Nick and Fitzpatrick, Geraldine' + bibtex: "@inproceedings{Kiefer2008,\n abstract = {There is small but useful body\ + \ of research concerning theevaluation of musical interfaces with HCI techniques.\ + \ Inthis paper, we present a case study in implementing thesetechniques; we describe\ + \ a usability experiment which evaluated the Nintendo Wiimote as a musical controller,\ + \ andreflect on the effectiveness of our choice of HCI methodologies in this context.\ + \ The study offered some valuable results,but our picture of the Wiimote was incomplete\ + \ as we lackeddata concerning the participants' instantaneous musical experience.\ + \ Recent trends in HCI are leading researchers totackle this problem of evaluating\ + \ user experience; we reviewsome of their work and suggest that with some adaptation\ + \ itcould provide useful new tools and methodologies for computer musicians.},\n\ + \ address = {Genoa, Italy},\n author = {Kiefer, Chris and Collins, Nick and Fitzpatrick,\ + \ Geraldine},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179577},\n issn\ + \ = {2220-4806},\n keywords = {HCI Methodology, Wiimote, Evaluating Musical Interaction\ + \ },\n pages = {87--90},\n title = {HCI Methodology For Evaluating Musical Controllers\ + \ : A Case Study},\n url = {http://www.nime.org/proceedings/2008/nime2008_087.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176869 + doi: 10.5281/zenodo.1179577 issn: 2220-4806 - keywords: 'Multi-modal Feedback. Haptics. Musical Instrument. ' - pages: 81--84 - title: 'PETECUBE: a Multimodal Feedback Interface' - url: http://www.nime.org/proceedings/2006/nime2006_081.pdf - year: 2006 + keywords: 'HCI Methodology, Wiimote, Evaluating Musical Interaction ' + pages: 87--90 + title: 'HCI Methodology For Evaluating Musical Controllers : A Case Study' + url: http://www.nime.org/proceedings/2008/nime2008_087.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Lebel2006 - address: 'Paris, France' - author: 'Lebel, Denis and Malloch, Joseph' - bibtex: "@inproceedings{Lebel2006,\n address = {Paris, France},\n author = {Lebel,\ - \ Denis and Malloch, Joseph},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176955},\n\ - \ issn = {2220-4806},\n keywords = {Digital musical instrument, kinesthetic feedback\ - \ },\n pages = {85--88},\n title = {The G-Spring Controller},\n url = {http://www.nime.org/proceedings/2006/nime2006_085.pdf},\n\ - \ year = {2006}\n}\n" + ID: Bau2008 + abstract: 'We combine two concepts, the musical instrument as metaphorand technology + probes, to explore how tangible interfaces canexploit the semantic richness of + sound. Using participatorydesign methods from Human-Computer Interaction (HCI), + wedesigned and tested the A20, a polyhedron-shaped, multichannel audio input/output + device. The software maps soundaround the edges and responds to the user''s gestural + input,allowing both aural and haptic modes of interaction as well asdirect manipulation + of media content. The software is designedto be very flexible and can be adapted + to a wide range ofshapes. Our tests of the A20''s perceptual and interactionproperties + showed that users can successfully detect soundplacement, movement and haptic + effects on this device. Ourparticipatory design workshops explored the possibilities + of theA20 as a generative tool for the design of an extended,collaborative personal + music player. The A20 helped users toenact scenarios of everyday mobile music + player use and togenerate new design ideas.' + address: 'Genoa, Italy' + author: 'Bau, Olivier and Tanaka, Atau and Mackay, Wendy E.' + bibtex: "@inproceedings{Bau2008,\n abstract = {We combine two concepts, the musical\ + \ instrument as metaphorand technology probes, to explore how tangible interfaces\ + \ canexploit the semantic richness of sound. Using participatorydesign methods\ + \ from Human-Computer Interaction (HCI), wedesigned and tested the A20, a polyhedron-shaped,\ + \ multichannel audio input/output device. The software maps soundaround the edges\ + \ and responds to the user's gestural input,allowing both aural and haptic modes\ + \ of interaction as well asdirect manipulation of media content. The software\ + \ is designedto be very flexible and can be adapted to a wide range ofshapes.\ + \ Our tests of the A20's perceptual and interactionproperties showed that users\ + \ can successfully detect soundplacement, movement and haptic effects on this\ + \ device. Ourparticipatory design workshops explored the possibilities of theA20\ + \ as a generative tool for the design of an extended,collaborative personal music\ + \ player. The A20 helped users toenact scenarios of everyday mobile music player\ + \ use and togenerate new design ideas.},\n address = {Genoa, Italy},\n author\ + \ = {Bau, Olivier and Tanaka, Atau and Mackay, Wendy E.},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1179489},\n issn = {2220-4806},\n keywords = {Generative\ + \ design tools, Instrument building, Multi-faceted audio, Personal music devices,\ + \ Tangible user interfaces, Technology probes },\n pages = {91--96},\n title =\ + \ {The A20 : Musical Metaphors for Interface Design},\n url = {http://www.nime.org/proceedings/2008/nime2008_091.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176955 + doi: 10.5281/zenodo.1179489 issn: 2220-4806 - keywords: 'Digital musical instrument, kinesthetic feedback ' - pages: 85--88 - title: The G-Spring Controller - url: http://www.nime.org/proceedings/2006/nime2006_085.pdf - year: 2006 + keywords: 'Generative design tools, Instrument building, Multi-faceted audio, Personal + music devices, Tangible user interfaces, Technology probes ' + pages: 91--96 + title: 'The A20 : Musical Metaphors for Interface Design' + url: http://www.nime.org/proceedings/2008/nime2008_091.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Lock2006 - abstract: 'The Orbophone is a new interface that radiates rather thanprojects sound - and image. It provides a cohesive platformfor audio and visual presentation in - situations where bothmedia are transmitted from the same location andlocalization - in both media is perceptually correlated. Thispaper discusses the advantages of - radiation overconventional sound and image projection for certain kindsof interactive - public multimedia exhibits and describes theartistic motivation for its development - against a historicalbackdrop of sound systems used in public spaces. Oneexhibit - using the Orbophone is described in detail togetherwith description and critique - of the prototype, discussingaspects of its design and construction. The paper - concludeswith an outline of the Orbophone version 2.' - address: 'Paris, France' - author: 'Lock, Damien and Schiemer, Greg' - bibtex: "@inproceedings{Lock2006,\n abstract = {The Orbophone is a new interface\ - \ that radiates rather thanprojects sound and image. It provides a cohesive platformfor\ - \ audio and visual presentation in situations where bothmedia are transmitted\ - \ from the same location andlocalization in both media is perceptually correlated.\ - \ Thispaper discusses the advantages of radiation overconventional sound and image\ - \ projection for certain kindsof interactive public multimedia exhibits and describes\ - \ theartistic motivation for its development against a historicalbackdrop of sound\ - \ systems used in public spaces. Oneexhibit using the Orbophone is described in\ - \ detail togetherwith description and critique of the prototype, discussingaspects\ - \ of its design and construction. The paper concludeswith an outline of the Orbophone\ - \ version 2.},\n address = {Paris, France},\n author = {Lock, Damien and Schiemer,\ - \ Greg},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1176967},\n issn = {2220-4806},\n\ - \ keywords = {Immersive Sound; Multi-channel Sound; Loud-speaker Array; Multimedia;\ - \ Streaming Media; Real-Time Media Performance; Sound Installation. },\n pages\ - \ = {89--92},\n title = {Orbophone: a New Interface for Radiating Sound and Image},\n\ - \ url = {http://www.nime.org/proceedings/2006/nime2006_089.pdf},\n year = {2006}\n\ + ID: Grosshauser2008 + abstract: 'The described project is a new approach to use highly sensitive low force + pressure sensor matrices for malposition, cramping and tension of hands and fingers, + gesture and keystroke analysis and for new musical expression. In the latter, + sensors are used as additional touch sensitive switches and keys. In pedagogical + issues, new ways of technology enhanced teaching, self teaching and exercising + are described. The used sensors are custom made in collaboration with the ReactiveS + Sensorlab. ' + address: 'Genoa, Italy' + author: 'Grosshauser, Tobias' + bibtex: "@inproceedings{Grosshauser2008,\n abstract = {The described project is\ + \ a new approach to use highly sensitive low force pressure sensor matrices for\ + \ malposition, cramping and tension of hands and fingers, gesture and keystroke\ + \ analysis and for new musical expression. In the latter, sensors are used as\ + \ additional touch sensitive switches and keys. In pedagogical issues, new ways\ + \ of technology enhanced teaching, self teaching and exercising are described.\ + \ The used sensors are custom made in collaboration with the ReactiveS Sensorlab.\ + \ },\n address = {Genoa, Italy},\n author = {Grosshauser, Tobias},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1179551},\n issn = {2220-4806},\n keywords\ + \ = {Pressure Measurement, Force, Sensor, Finger, Violin, Strings, Piano, Left\ + \ Hand, Right Hand, Time Line, Cramping, Gesture and Posture Analysis. },\n pages\ + \ = {97--102},\n title = {Low Force Pressure Measurement : Pressure Sensor Matrices\ + \ for Gesture Analysis , Stiffness Recognition and Augmented Instruments},\n url\ + \ = {http://www.nime.org/proceedings/2008/nime2008_097.pdf},\n year = {2008}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176967 + doi: 10.5281/zenodo.1179551 issn: 2220-4806 - keywords: 'Immersive Sound; Multi-channel Sound; Loud-speaker Array; Multimedia; - Streaming Media; Real-Time Media Performance; Sound Installation. ' - pages: 89--92 - title: 'Orbophone: a New Interface for Radiating Sound and Image' - url: http://www.nime.org/proceedings/2006/nime2006_089.pdf - year: 2006 + keywords: 'Pressure Measurement, Force, Sensor, Finger, Violin, Strings, Piano, + Left Hand, Right Hand, Time Line, Cramping, Gesture and Posture Analysis. ' + pages: 97--102 + title: 'Low Force Pressure Measurement : Pressure Sensor Matrices for Gesture Analysis + , Stiffness Recognition and Augmented Instruments' + url: http://www.nime.org/proceedings/2008/nime2008_097.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Kartadinata2006 - abstract: 'The gluion is a sensor interface that was designed to overcomesome of - the limitations of more traditional designs based onmicrocontrollers, which only - provide a small, fixed number ofdigital modules such as counters and serial interfaces. - These areoften required to handle sensors where the physical parametercannot easily - be converted into a voltage. Other sensors arepacked into modules that include - converters and communicatevia SPI or I2C. Finallly, many designs require outputcapabilities - beyond simple on/off.The gluion approaches these challenges thru its FPGA-baseddesign - which allows for a large number of digital I/O modules.It also provides superior - flexibility regarding theirconfiguration, resolution, and functionality. In addition, - theFPGA enables a software implementation of the host link --- inthe case of the - gluion the OSC protocol as well as theunderlying Ethernet layers.' - address: 'Paris, France' - author: 'Kartadinata, Sukandar' - bibtex: "@inproceedings{Kartadinata2006,\n abstract = {The gluion is a sensor interface\ - \ that was designed to overcomesome of the limitations of more traditional designs\ - \ based onmicrocontrollers, which only provide a small, fixed number ofdigital\ - \ modules such as counters and serial interfaces. These areoften required to handle\ - \ sensors where the physical parametercannot easily be converted into a voltage.\ - \ Other sensors arepacked into modules that include converters and communicatevia\ - \ SPI or I2C. Finallly, many designs require outputcapabilities beyond simple\ - \ on/off.The gluion approaches these challenges thru its FPGA-baseddesign which\ - \ allows for a large number of digital I/O modules.It also provides superior flexibility\ - \ regarding theirconfiguration, resolution, and functionality. In addition, theFPGA\ - \ enables a software implementation of the host link --- inthe case of the gluion\ - \ the OSC protocol as well as theunderlying Ethernet layers.},\n address = {Paris,\ - \ France},\n author = {Kartadinata, Sukandar},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1176937},\n issn = {2220-4806},\n keywords = {actuators,digital\ - \ sensors,fpga,osc,sensor interfaces},\n pages = {93--96},\n title = {The Gluion\ - \ Advantages of an {FPGA}-based Sensor Interface},\n url = {http://www.nime.org/proceedings/2006/nime2006_093.pdf},\n\ - \ year = {2006}\n}\n" + ID: Torre2008 + abstract: 'In this paper, we describe an algorithm for the numericalevaluation of + the orientation of an object to which a clusterof accelerometers, gyroscopes and + magnetometers has beenattached. The algorithm is implemented through a set ofMax/Msp + and pd new externals. Through the successfulimplementation of the algorithm, we + introduce Pointingat, a new gesture device for the control of sound in a 3Denvironment. + This work has been at the core of the Celeritas Project, an interdisciplinary + research project on motiontracking technology and multimedia live performances + between the Tyndall Institute of Cork and the InteractionDesign Centre of Limerick.' + address: 'Genoa, Italy' + author: 'Torre, Giuseppe and Torres, Javier and Fernström, Mikael' + bibtex: "@inproceedings{Torre2008,\n abstract = {In this paper, we describe an algorithm\ + \ for the numericalevaluation of the orientation of an object to which a clusterof\ + \ accelerometers, gyroscopes and magnetometers has beenattached. The algorithm\ + \ is implemented through a set ofMax/Msp and pd new externals. Through the successfulimplementation\ + \ of the algorithm, we introduce Pointingat, a new gesture device for the control\ + \ of sound in a 3Denvironment. This work has been at the core of the Celeritas\ + \ Project, an interdisciplinary research project on motiontracking technology\ + \ and multimedia live performances between the Tyndall Institute of Cork and the\ + \ InteractionDesign Centre of Limerick.},\n address = {Genoa, Italy},\n author\ + \ = {Torre, Giuseppe and Torres, Javier and Fernstr\\''{o}m, Mikael},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1179641},\n issn = {2220-4806},\n keywords\ + \ = {eu-,ler,max,micro-electro-mechanical,msp,nime08,orientation matrix,pd,pitch\ + \ yaw and roll,quaternion,sensors,surement unit,tracking orientation,wimu,wireless\ + \ inertial mea-},\n pages = {103--106},\n title = {The Development of Motion Tracking\ + \ Algorithms for Low Cost Inertial Measurement Units},\n url = {http://www.nime.org/proceedings/2008/nime2008_103.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176937 + doi: 10.5281/zenodo.1179641 issn: 2220-4806 - keywords: 'actuators,digital sensors,fpga,osc,sensor interfaces' - pages: 93--96 - title: The Gluion Advantages of an FPGA-based Sensor Interface - url: http://www.nime.org/proceedings/2006/nime2006_093.pdf - year: 2006 + keywords: 'eu-,ler,max,micro-electro-mechanical,msp,nime08,orientation matrix,pd,pitch + yaw and roll,quaternion,sensors,surement unit,tracking orientation,wimu,wireless + inertial mea-' + pages: 103--106 + title: The Development of Motion Tracking Algorithms for Low Cost Inertial Measurement + Units + url: http://www.nime.org/proceedings/2008/nime2008_103.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Freed2006 - abstract: A new sensor integration system and its first incarnation i sdescribed. - As well as supporting existing analog sensorarrays a new architecture allows for - easy integration of thenew generation of low-cost digital sensors used in computermusic - performance instruments and installation art. - address: 'Paris, France' - author: 'Freed, Adrian and Avizienis, Rimas and Wright, Matthew' - bibtex: "@inproceedings{Freed2006,\n abstract = {A new sensor integration system\ - \ and its first incarnation i sdescribed. As well as supporting existing analog\ - \ sensorarrays a new architecture allows for easy integration of thenew generation\ - \ of low-cost digital sensors used in computermusic performance instruments and\ - \ installation art.},\n address = {Paris, France},\n author = {Freed, Adrian and\ - \ Avizienis, Rimas and Wright, Matthew},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176903},\n\ - \ issn = {2220-4806},\n keywords = {Gesture, sensor, MEMS, FPGA, network, OSC,\ - \ configurability },\n pages = {97--100},\n title = {Beyond 0-5{V}: Expanding\ - \ Sensor Integration Architectures},\n url = {http://www.nime.org/proceedings/2006/nime2006_097.pdf},\n\ - \ year = {2006}\n}\n" + ID: Freed2008 + abstract: 'The paper introduces new fiber and malleable materials,including piezoresistive + fabric and conductive heat-shrinktubing, and shows techniques and examples of + how they maybe used for rapid prototyping and agile development of musicalinstrument + controllers. New implementations of well-knowndesigns are covered as well as enhancements + of existingcontrollers. Finally, two new controllers are introduced that aremade + possible by these recently available materials andconstruction techniques.' + address: 'Genoa, Italy' + author: 'Freed, Adrian' + bibtex: "@inproceedings{Freed2008,\n abstract = {The paper introduces new fiber\ + \ and malleable materials,including piezoresistive fabric and conductive heat-shrinktubing,\ + \ and shows techniques and examples of how they maybe used for rapid prototyping\ + \ and agile development of musicalinstrument controllers. New implementations\ + \ of well-knowndesigns are covered as well as enhancements of existingcontrollers.\ + \ Finally, two new controllers are introduced that aremade possible by these recently\ + \ available materials andconstruction techniques.},\n address = {Genoa, Italy},\n\ + \ author = {Freed, Adrian},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179539},\n\ + \ issn = {2220-4806},\n keywords = {Agile Development, Rapid Prototyping, Conductive\ + \ fabric, Piezoresistive fabric, conductive heatshrink tubing, augmented instruments.\ + \ },\n pages = {107--112},\n title = {Application of new Fiber and Malleable Materials\ + \ for Agile Development of Augmented Instruments and Controllers},\n url = {http://www.nime.org/proceedings/2008/nime2008_107.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176903 + doi: 10.5281/zenodo.1179539 issn: 2220-4806 - keywords: 'Gesture, sensor, MEMS, FPGA, network, OSC, configurability ' - pages: 97--100 - title: 'Beyond 0-5V: Expanding Sensor Integration Architectures' - url: http://www.nime.org/proceedings/2006/nime2006_097.pdf - year: 2006 + keywords: 'Agile Development, Rapid Prototyping, Conductive fabric, Piezoresistive + fabric, conductive heatshrink tubing, augmented instruments. ' + pages: 107--112 + title: Application of new Fiber and Malleable Materials for Agile Development of + Augmented Instruments and Controllers + url: http://www.nime.org/proceedings/2008/nime2008_107.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Johnson2006 - abstract: 'How can we provide interfaces to synthesis algorithms thatwill allow - us to manipulate timbre directly, using the sametimbre-words that are used by - human musicians to communicate about timbre? This paper describes ongoingwork - that uses machine learning methods (principally genetic algorithms and neural - networks) to learn (1) to recognise timbral characteristics of sound and (2) to - adjust timbral characteristics of existing synthesized sounds.' - address: 'Paris, France' - author: 'Johnson, Colin G. and Gounaropoulos, Alex' - bibtex: "@inproceedings{Johnson2006,\n abstract = {How can we provide interfaces\ - \ to synthesis algorithms thatwill allow us to manipulate timbre directly, using\ - \ the sametimbre-words that are used by human musicians to communicate about timbre?\ - \ This paper describes ongoingwork that uses machine learning methods (principally\ - \ genetic algorithms and neural networks) to learn (1) to recognise timbral characteristics\ - \ of sound and (2) to adjust timbral characteristics of existing synthesized sounds.},\n\ - \ address = {Paris, France},\n author = {Johnson, Colin G. and Gounaropoulos,\ - \ Alex},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1176933},\n issn = {2220-4806},\n\ - \ keywords = {timbre; natural language; neural networks },\n pages = {101--102},\n\ - \ title = {Timbre Interfaces using Adjectives and Adverbs},\n url = {http://www.nime.org/proceedings/2006/nime2006_101.pdf},\n\ - \ year = {2006}\n}\n" + ID: Crevoisier2008 + abstract: 'In this paper, we describe a set of hardware and software tools for creating + musical controllers with any flat surface or simple object, such as tables, walls, + metallic plates, wood boards, etc. The system makes possible to transform such + physical objects and surfaces into virtual control interfaces, by using computer + vision technologies to track the interaction made by the musician, either with + the hands, mallets or sticks. These new musical interfaces, freely reconfigurable, + can be used to control standard sound modules or effect processors, by defining + zones on their surface and assigning them musical commands, such as the triggering + of notes or the modulation of parameters.' + address: 'Genoa, Italy' + author: 'Crevoisier, Alain and Kellum, Greg' + bibtex: "@inproceedings{Crevoisier2008,\n abstract = {In this paper, we describe\ + \ a set of hardware and software tools for creating musical controllers with any\ + \ flat surface or simple object, such as tables, walls, metallic plates, wood\ + \ boards, etc. The system makes possible to transform such physical objects and\ + \ surfaces into virtual control interfaces, by using computer vision technologies\ + \ to track the interaction made by the musician, either with the hands, mallets\ + \ or sticks. These new musical interfaces, freely reconfigurable, can be used\ + \ to control standard sound modules or effect processors, by defining zones on\ + \ their surface and assigning them musical commands, such as the triggering of\ + \ notes or the modulation of parameters.},\n address = {Genoa, Italy},\n author\ + \ = {Crevoisier, Alain and Kellum, Greg},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179517},\n\ + \ issn = {2220-4806},\n keywords = {Computer Vision, Multi-touch Interaction,\ + \ Musical Interfaces. },\n pages = {113--116},\n title = {Transforming Ordinary\ + \ Surfaces into Multi-touch Controllers},\n url = {http://www.nime.org/proceedings/2008/nime2008_113.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176933 + doi: 10.5281/zenodo.1179517 issn: 2220-4806 - keywords: 'timbre; natural language; neural networks ' - pages: 101--102 - title: Timbre Interfaces using Adjectives and Adverbs - url: http://www.nime.org/proceedings/2006/nime2006_101.pdf - year: 2006 + keywords: 'Computer Vision, Multi-touch Interaction, Musical Interfaces. ' + pages: 113--116 + title: Transforming Ordinary Surfaces into Multi-touch Controllers + url: http://www.nime.org/proceedings/2008/nime2008_113.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Stewart2006 - address: 'Paris, France' - author: 'Stewart, D. Andrew' - bibtex: "@inproceedings{Stewart2006,\n address = {Paris, France},\n author = {Stewart,\ - \ D. Andrew},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177011},\n issn\ - \ = {2220-4806},\n keywords = {composition, process, materials, gesture, controller,\ - \ cross- modal interaction },\n pages = {103--105},\n title = {SonicJumper Composer},\n\ - \ url = {http://www.nime.org/proceedings/2006/nime2006_103.pdf},\n year = {2006}\n\ - }\n" + ID: Ward2008 + abstract: 'This paper presents a comparison of the movement styles of two theremin + players based on observation and analysis of video recordings. The premise behind + this research is that a consideration of musicians'' movements could form the + basis for a new framework for the design of new instruments. Laban Movement Analysis + is used to qualitatively analyse the movement styles of the musicians and to argue + that the Recuperation phase of their phrasing is essential to achieve satisfactory + performance. ' + address: 'Genoa, Italy' + author: 'Ward, Nicholas and Penfield, Kedzie and O''Modhrain, Sile and Knapp, Benjamin' + bibtex: "@inproceedings{Ward2008,\n abstract = {This paper presents a comparison\ + \ of the movement styles of two theremin players based on observation and analysis\ + \ of video recordings. The premise behind this research is that a consideration\ + \ of musicians' movements could form the basis for a new framework for the design\ + \ of new instruments. Laban Movement Analysis is used to qualitatively analyse\ + \ the movement styles of the musicians and to argue that the Recuperation phase\ + \ of their phrasing is essential to achieve satisfactory performance. },\n address\ + \ = {Genoa, Italy},\n author = {Ward, Nicholas and Penfield, Kedzie and O'Modhrain,\ + \ Sile and Knapp, Benjamin},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179649},\n\ + \ issn = {2220-4806},\n keywords = {Effort Phrasing, Recuperation, Laban Movement\ + \ Analysis, Theremin },\n pages = {117--121},\n title = {A Study of Two Thereminists\ + \ : Towards Movement Informed Instrument Design},\n url = {http://www.nime.org/proceedings/2008/nime2008_117.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177011 + doi: 10.5281/zenodo.1179649 issn: 2220-4806 - keywords: 'composition, process, materials, gesture, controller, cross- modal interaction ' - pages: 103--105 - title: SonicJumper Composer - url: http://www.nime.org/proceedings/2006/nime2006_103.pdf - year: 2006 + keywords: 'Effort Phrasing, Recuperation, Laban Movement Analysis, Theremin ' + pages: 117--121 + title: 'A Study of Two Thereminists : Towards Movement Informed Instrument Design' + url: http://www.nime.org/proceedings/2008/nime2008_117.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Steiner2006 - address: 'Paris, France' - author: 'Steiner, Hans-Christoph' - bibtex: "@inproceedings{Steiner2006,\n address = {Paris, France},\n author = {Steiner,\ - \ Hans-Christoph},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177009},\n\ - \ issn = {2220-4806},\n pages = {106--109},\n title = {Towards a Catalog and Software\ - \ Library of Mapping Methods},\n url = {http://www.nime.org/proceedings/2006/nime2006_106.pdf},\n\ - \ year = {2006}\n}\n" + ID: Maniatakos2008 + address: 'Genoa, Italy' + author: 'Maniatakos, Vassilios-Fivos A. and Jacquemin, Christian' + bibtex: "@inproceedings{Maniatakos2008,\n address = {Genoa, Italy},\n author = {Maniatakos,\ + \ Vassilios-Fivos A. and Jacquemin, Christian},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1179595},\n issn = {2220-4806},\n keywords = {affective computing,\ + \ interactive performance, HMM, gesture recognition, intelligent mapping, affective\ + \ interface },\n pages = {122--127},\n title = {Towards an Affective Gesture Interface\ + \ for Expressive Music Performance},\n url = {http://www.nime.org/proceedings/2008/nime2008_122.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177009 + doi: 10.5281/zenodo.1179595 issn: 2220-4806 - pages: 106--109 - title: Towards a Catalog and Software Library of Mapping Methods - url: http://www.nime.org/proceedings/2006/nime2006_106.pdf - year: 2006 + keywords: 'affective computing, interactive performance, HMM, gesture recognition, + intelligent mapping, affective interface ' + pages: 122--127 + title: Towards an Affective Gesture Interface for Expressive Music Performance + url: http://www.nime.org/proceedings/2008/nime2008_122.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Kobori2006 - address: 'Paris, France' - author: 'Kobori, Daisuke and Kagawa, Kojiro and Iida, Makoto and Arakawa, Chuichi' - bibtex: "@inproceedings{Kobori2006,\n address = {Paris, France},\n author = {Kobori,\ - \ Daisuke and Kagawa, Kojiro and Iida, Makoto and Arakawa, Chuichi},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176947},\n issn = {2220-4806},\n pages\ - \ = {110--113},\n title = {LINE: Interactive Sound and Light Installation},\n\ - \ url = {http://www.nime.org/proceedings/2006/nime2006_110.pdf},\n year = {2006}\n\ - }\n" + ID: Kallblad2008 + abstract: 'It started with an idea to create an empty space in which you activated + music and light as you moved around. In responding to the music and lighting you + would activate more or different sounds and thereby communicate with the space + through your body. This led to an artistic research project in which children''s + spontaneous movement was observed, a choreography made based on the children''s + movements and music written and recorded for the choreography. This music was + then decomposed and choreographed into an empty space at Botkyrka konsthall creating + an interactive dance installation. It was realized using an interactive sound + and light system in which 5 video cameras were detecting the motion in the room + connected to a 4-channel sound system and a set of 14 light modules. During five + weeks people of all ages came to dance and move around in the installation. The + installation attracted a wide range of people of all ages and the tentative evaluation + indicates that it was very positively received and that it encouraged free movement + in the intended way. Besides observing the activity in the installation interviews + were made with schoolchildren age 7 who had participated in the installation. ' + address: 'Genoa, Italy' + author: 'Källblad, Anna and Friberg, Anders and Svensson, Karl and Edelholm, Elisabet + S.' + bibtex: "@inproceedings{Kallblad2008,\n abstract = {It started with an idea to create\ + \ an empty space in which you activated music and light as you moved around. In\ + \ responding to the music and lighting you would activate more or different sounds\ + \ and thereby communicate with the space through your body. This led to an artistic\ + \ research project in which children's spontaneous movement was observed, a choreography\ + \ made based on the children's movements and music written and recorded for the\ + \ choreography. This music was then decomposed and choreographed into an empty\ + \ space at Botkyrka konsthall creating an interactive dance installation. It was\ + \ realized using an interactive sound and light system in which 5 video cameras\ + \ were detecting the motion in the room connected to a 4-channel sound system\ + \ and a set of 14 light modules. During five weeks people of all ages came to\ + \ dance and move around in the installation. The installation attracted a wide\ + \ range of people of all ages and the tentative evaluation indicates that it was\ + \ very positively received and that it encouraged free movement in the intended\ + \ way. Besides observing the activity in the installation interviews were made\ + \ with schoolchildren age 7 who had participated in the installation. },\n address\ + \ = {Genoa, Italy},\n author = {K\\''{a}llblad, Anna and Friberg, Anders and Svensson,\ + \ Karl and Edelholm, Elisabet S.},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179573},\n\ + \ issn = {2220-4806},\n keywords = {Installation, dance, video recognition, children's\ + \ movement, interactive multimedia },\n pages = {128--133},\n title = {Hoppsa\ + \ Universum -- An Interactive Dance Installation for Children},\n url = {http://www.nime.org/proceedings/2008/nime2008_128.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176947 + doi: 10.5281/zenodo.1179573 issn: 2220-4806 - pages: 110--113 - title: 'LINE: Interactive Sound and Light Installation' - url: http://www.nime.org/proceedings/2006/nime2006_110.pdf - year: 2006 + keywords: 'Installation, dance, video recognition, children''s movement, interactive + multimedia ' + pages: 128--133 + title: Hoppsa Universum -- An Interactive Dance Installation for Children + url: http://www.nime.org/proceedings/2008/nime2008_128.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: BryanKinns2006 - abstract: 'This paper reports on ongoing studies of the design and use ofsupport - for remote group music making. In this paper weoutline the initial findings of - a recent study focusing on thefunction of decay of contributions in collaborative - musicmaking. Findings indicate that persistent contributions lendthemselves to - individual musical composition and learningnovel interfaces, whilst contributions - that quickly decayengender a more focused musical interaction in experiencedparticipants.' - address: 'Paris, France' - author: 'Bryan-Kinns, Nick and Healey, Patrick G.' - bibtex: "@inproceedings{BryanKinns2006,\n abstract = {This paper reports on ongoing\ - \ studies of the design and use ofsupport for remote group music making. In this\ - \ paper weoutline the initial findings of a recent study focusing on thefunction\ - \ of decay of contributions in collaborative musicmaking. Findings indicate that\ - \ persistent contributions lendthemselves to individual musical composition and\ - \ learningnovel interfaces, whilst contributions that quickly decayengender a\ - \ more focused musical interaction in experiencedparticipants.},\n address = {Paris,\ - \ France},\n author = {Bryan-Kinns, Nick and Healey, Patrick G.},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176885},\n issn = {2220-4806},\n keywords\ - \ = {creativity,design,group interaction,music improvisation},\n pages = {114--117},\n\ - \ title = {Decay in Collaborative Music Making},\n url = {http://www.nime.org/proceedings/2006/nime2006_114.pdf},\n\ - \ year = {2006}\n}\n" + ID: Camurri2008 + address: 'Genoa, Italy' + author: 'Camurri, Antonio and Canepa, Corrado and Coletta, Paolo and Mazzarino, + Barbara and Volpe, Gualtiero' + bibtex: "@inproceedings{Camurri2008,\n address = {Genoa, Italy},\n author = {Camurri,\ + \ Antonio and Canepa, Corrado and Coletta, Paolo and Mazzarino, Barbara and Volpe,\ + \ Gualtiero},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179505},\n issn\ + \ = {2220-4806},\n keywords = {Active listening of music, expressive interfaces,\ + \ full-body motion analysis and expressive gesture processing, multimodal interactive\ + \ systems for music and performing arts applications, collaborative environments,\ + \ social interaction. },\n pages = {134--139},\n title = {Mappe per Affetti Erranti\ + \ : a Multimodal System for Social Active Listening and Expressive Performance},\n\ + \ url = {http://www.nime.org/proceedings/2008/nime2008_134.pdf},\n year = {2008}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176885 + doi: 10.5281/zenodo.1179505 issn: 2220-4806 - keywords: 'creativity,design,group interaction,music improvisation' - pages: 114--117 - title: Decay in Collaborative Music Making - url: http://www.nime.org/proceedings/2006/nime2006_114.pdf - year: 2006 + keywords: 'Active listening of music, expressive interfaces, full-body motion analysis + and expressive gesture processing, multimodal interactive systems for music and + performing arts applications, collaborative environments, social interaction. ' + pages: 134--139 + title: 'Mappe per Affetti Erranti : a Multimodal System for Social Active Listening + and Expressive Performance' + url: http://www.nime.org/proceedings/2008/nime2008_134.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Gurevich2006 - address: 'Paris, France' - author: 'Gurevich, Michael' - bibtex: "@inproceedings{Gurevich2006,\n address = {Paris, France},\n author = {Gurevich,\ - \ Michael},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176915},\n issn\ - \ = {2220-4806},\n keywords = {Collaborative interface, remote jamming, network\ - \ music, interaction design, novice, media space INTRODUCTION Most would agree\ - \ that music is an inherently social ac- tivity [30], but since the },\n pages\ - \ = {118--123},\n title = {JamSpace: Designing A Collaborative Networked Music\ - \ Space for Novices},\n url = {http://www.nime.org/proceedings/2006/nime2006_118.pdf},\n\ - \ year = {2006}\n}\n" + ID: Canazza2008 + abstract: "Musical open works can be often thought like sequences of musical structures,\ + \ which can be arranged by anyone who had access to them and who wished to realize\ + \ the work. This paper proposes an innovative agent-based system to model the\ + \ information and organize it in structured knowledge; to create effective, graph-centric\ + \ browsing perspectives and views for the user; to use ,\n,\nauthoring tools for\ + \ the performance of open work of electro-acoustic music. " + address: 'Genoa, Italy' + author: 'Canazza, Sergio and Dattolo, Antonina' + bibtex: "@inproceedings{Canazza2008,\n abstract = {Musical open works can be often\ + \ thought like sequences of musical structures, which can be arranged by anyone\ + \ who had access to them and who wished to realize the work. This paper proposes\ + \ an innovative agent-based system to model the information and organize it in\ + \ structured knowledge; to create effective, graph-centric browsing perspectives\ + \ and views for the user; to use ,\n,\nauthoring tools for the performance of\ + \ open work of electro-acoustic music. },\n address = {Genoa, Italy},\n author\ + \ = {Canazza, Sergio and Dattolo, Antonina},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1179507},\n issn = {2220-4806},\n keywords = {Musical Open Work,\ + \ Multimedia Information Systems, Software Agents, zz-structures. },\n pages =\ + \ {140--143},\n title = {New Data Structure for Old Musical Open Works},\n url\ + \ = {http://www.nime.org/proceedings/2008/nime2008_140.pdf},\n year = {2008}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176915 + doi: 10.5281/zenodo.1179507 issn: 2220-4806 - keywords: 'Collaborative interface, remote jamming, network music, interaction design, - novice, media space INTRODUCTION Most would agree that music is an inherently - social ac- tivity [30], but since the ' - pages: 118--123 - title: 'JamSpace: Designing A Collaborative Networked Music Space for Novices' - url: http://www.nime.org/proceedings/2006/nime2006_118.pdf - year: 2006 + keywords: 'Musical Open Work, Multimedia Information Systems, Software Agents, zz-structures. ' + pages: 140--143 + title: New Data Structure for Old Musical Open Works + url: http://www.nime.org/proceedings/2008/nime2008_140.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Knapp2006 - abstract: "In this paper, we describe the networking of multiple Integral Music\ - \ Controllers (IMCs) to enable an entirely new method for creating music by tapping\ - \ into the composite gestures and emotions of not just one, but many performers.\ - \ The concept and operation of an IMC is reviewed as well as its use in a network\ - \ of IMC controllers. We then introduce a new technique of Integral Music Control\ - \ by assessing the composite gesture(s) and emotion(s) of a group of performers\ - \ through the use of a wireless mesh network. The Telemuse, an IMC designed precisely\ - \ for this kind of performance, is described and its use in a new musical performance\ - \ project under development by the ,\n,\nauthors is discussed. " - address: 'Paris, France' - author: 'Knapp, Benjamin and Cook, Perry R.' - bibtex: "@inproceedings{Knapp2006,\n abstract = {In this paper, we describe the\ - \ networking of multiple Integral Music Controllers (IMCs) to enable an entirely\ - \ new method for creating music by tapping into the composite gestures and emotions\ - \ of not just one, but many performers. The concept and operation of an IMC is\ - \ reviewed as well as its use in a network of IMC controllers. We then introduce\ - \ a new technique of Integral Music Control by assessing the composite gesture(s)\ - \ and emotion(s) of a group of performers through the use of a wireless mesh network.\ - \ The Telemuse, an IMC designed precisely for this kind of performance, is described\ - \ and its use in a new musical performance project under development by the ,\n\ - ,\nauthors is discussed. },\n address = {Paris, France},\n author = {Knapp, Benjamin\ - \ and Cook, Perry R.},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176943},\n\ - \ issn = {2220-4806},\n keywords = {Community-Institutional Relations,Health Services\ - \ Accessibility,Medically Uninsured,Organizational Case Studies,Primary Health\ - \ Care,Public-Private Sector Partnerships,San Francisco},\n pages = {124--128},\n\ - \ title = {Creating a Network of Integral Music Controllers},\n url = {http://www.nime.org/proceedings/2006/nime2006_124.pdf},\n\ - \ year = {2006}\n}\n" + ID: Eigenfeldt2008 + abstract: 'This paper presents an agent-based architecture for robotic musical instruments + that generate polyphonic rhythmic patterns that continuously evolve and develop + in a musically "intelligent" manner. Agent-based software offers a new method + for real-time composition that allows for complex interactions between individual + voices while requiring very little user interaction or supervision. The system + described, Kinetic Engine, is an environment in which individual software agents, + emulate drummers improvising within a percussion ensemble. Player agents assume + roles and personalities within the ensemble, and communicate with one another + to create complex rhythmic interactions. In this project, the ensemble is comprised + of a 12-armed musical robot, MahaDeviBot, in which each limb has its own software + agent controlling what it performs. ' + address: 'Genoa, Italy' + author: 'Eigenfeldt, Arne and Kapur, Ajay' + bibtex: "@inproceedings{Eigenfeldt2008,\n abstract = {This paper presents an agent-based\ + \ architecture for robotic musical instruments that generate polyphonic rhythmic\ + \ patterns that continuously evolve and develop in a musically \"intelligent\"\ + \ manner. Agent-based software offers a new method for real-time composition that\ + \ allows for complex interactions between individual voices while requiring very\ + \ little user interaction or supervision. The system described, Kinetic Engine,\ + \ is an environment in which individual software agents, emulate drummers improvising\ + \ within a percussion ensemble. Player agents assume roles and personalities within\ + \ the ensemble, and communicate with one another to create complex rhythmic interactions.\ + \ In this project, the ensemble is comprised of a 12-armed musical robot, MahaDeviBot,\ + \ in which each limb has its own software agent controlling what it performs.\ + \ },\n address = {Genoa, Italy},\n author = {Eigenfeldt, Arne and Kapur, Ajay},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1179527},\n issn = {2220-4806},\n\ + \ keywords = {Robotic Musical Instruments, Agents, Machine Musicianship. },\n\ + \ pages = {144--149},\n title = {An Agent-based System for Robotic Musical Performance},\n\ + \ url = {http://www.nime.org/proceedings/2008/nime2008_144.pdf},\n year = {2008}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176943 + doi: 10.5281/zenodo.1179527 issn: 2220-4806 - keywords: 'Community-Institutional Relations,Health Services Accessibility,Medically - Uninsured,Organizational Case Studies,Primary Health Care,Public-Private Sector - Partnerships,San Francisco' - pages: 124--128 - title: Creating a Network of Integral Music Controllers - url: http://www.nime.org/proceedings/2006/nime2006_124.pdf - year: 2006 + keywords: 'Robotic Musical Instruments, Agents, Machine Musicianship. ' + pages: 144--149 + title: An Agent-based System for Robotic Musical Performance + url: http://www.nime.org/proceedings/2008/nime2008_144.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Burtner2006 - abstract: "This paper explores the use of perturbation in designing multiperformer\ - \ or multi-agent interactive musical interfaces. A problem with the multi-performer\ - \ approach is how to cohesively organize the independent data inputs into useable\ - \ control information for synthesis engines. Perturbation has proven useful for\ - \ navigating multi-agent NIMEs. The ,\n,\nauthor's Windtree is discussed as an\ - \ example multi-performer instrument in which perturbation is used for multichannel\ - \ ecological modeling. The Windtree uses a physical system turbulence model controlled\ - \ in real time by four performers. " - address: 'Paris, France' - author: 'Burtner, Matthew' - bibtex: "@inproceedings{Burtner2006,\n abstract = {This paper explores the use of\ - \ perturbation in designing multiperformer or multi-agent interactive musical\ - \ interfaces. A problem with the multi-performer approach is how to cohesively\ - \ organize the independent data inputs into useable control information for synthesis\ - \ engines. Perturbation has proven useful for navigating multi-agent NIMEs. The\ - \ ,\n,\nauthor's Windtree is discussed as an example multi-performer instrument\ - \ in which perturbation is used for multichannel ecological modeling. The Windtree\ - \ uses a physical system turbulence model controlled in real time by four performers.\ - \ },\n address = {Paris, France},\n author = {Burtner, Matthew},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176887},\n issn = {2220-4806},\n keywords = {interface,mapping,movement,multi-agent,multi-performer,music\ - \ composition,perturbation},\n pages = {129--133},\n title = {Perturbation Techniques\ - \ for Multi-Performer or Multi- Agent Interactive Musical Interfaces},\n url =\ - \ {http://www.nime.org/proceedings/2006/nime2006_129.pdf},\n year = {2006}\n}\n" + ID: Goina2008 + abstract: 'In this paper, we investigate the relationships between gesture and sound + by means of an elementary gesture sonification. This work takes inspiration from + Bauhaus'' ideals and Paul Klee''s investigation into forms and pictorial representation. + In line with these ideas, the main aim of this work is to reduce gesture to a + combination of a small number of elementary components (gestalts) used to control + a corresponding small set of sounds. By means of a demonstrative tool, we introduce + here a line of research that is at its initial stage. The envisaged goal of future + developments is a novel system that could be a composing/improvising tool as well + as an interface for interactive dance and performance. ' + address: 'Genoa, Italy' + author: 'Goina, Maurizio and Polotti, Pietro' + bibtex: "@inproceedings{Goina2008,\n abstract = {In this paper, we investigate the\ + \ relationships between gesture and sound by means of an elementary gesture sonification.\ + \ This work takes inspiration from Bauhaus' ideals and Paul Klee's investigation\ + \ into forms and pictorial representation. In line with these ideas, the main\ + \ aim of this work is to reduce gesture to a combination of a small number of\ + \ elementary components (gestalts) used to control a corresponding small set of\ + \ sounds. By means of a demonstrative tool, we introduce here a line of research\ + \ that is at its initial stage. The envisaged goal of future developments is a\ + \ novel system that could be a composing/improvising tool as well as an interface\ + \ for interactive dance and performance. },\n address = {Genoa, Italy},\n author\ + \ = {Goina, Maurizio and Polotti, Pietro},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179549},\n\ + \ issn = {2220-4806},\n keywords = {Bauhaus, Klee, gesture analysis, sonification.\ + \ },\n pages = {150--153},\n title = {Elementary Gestalts for Gesture Sonification},\n\ + \ url = {http://www.nime.org/proceedings/2008/nime2008_150.pdf},\n year = {2008}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176887 + doi: 10.5281/zenodo.1179549 issn: 2220-4806 - keywords: 'interface,mapping,movement,multi-agent,multi-performer,music composition,perturbation' - pages: 129--133 - title: Perturbation Techniques for Multi-Performer or Multi- Agent Interactive Musical - Interfaces - url: http://www.nime.org/proceedings/2006/nime2006_129.pdf - year: 2006 + keywords: 'Bauhaus, Klee, gesture analysis, sonification. ' + pages: 150--153 + title: Elementary Gestalts for Gesture Sonification + url: http://www.nime.org/proceedings/2008/nime2008_150.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Aylward2006 - abstract: 'We describe the design of a system of compact, wireless sensor modules - meant to capture expressive motion whenworn at the wrists and ankles of a dancer. - The sensors form ahigh-speed RF network geared toward real-time dataacquisition - from multiple devices simultaneously, enabling asmall dance ensemble to become - a collective interface formusic control. Each sensor node includes a 6-axis inertialmeasurement - unit (IMU) comprised of three orthogonalgyroscopes and accelerometers in order - to capture localdynamics, as well as a capacitive sensor to measure closerange - node-to-node proximity. The nodes may also beaugmented with other digital or analog - sensors. This paperdescribes application goals, presents the prototype hardwaredesign, - introduces concepts for feature extraction andinterpretation, and discusses early - test results.' - address: 'Paris, France' - author: 'Aylward, Ryan and Paradiso, Joseph A.' - bibtex: "@inproceedings{Aylward2006,\n abstract = {We describe the design of a system\ - \ of compact, wireless sensor modules meant to capture expressive motion whenworn\ - \ at the wrists and ankles of a dancer. The sensors form ahigh-speed RF network\ - \ geared toward real-time dataacquisition from multiple devices simultaneously,\ - \ enabling asmall dance ensemble to become a collective interface formusic control.\ - \ Each sensor node includes a 6-axis inertialmeasurement unit (IMU) comprised\ - \ of three orthogonalgyroscopes and accelerometers in order to capture localdynamics,\ - \ as well as a capacitive sensor to measure closerange node-to-node proximity.\ - \ The nodes may also beaugmented with other digital or analog sensors. This paperdescribes\ - \ application goals, presents the prototype hardwaredesign, introduces concepts\ - \ for feature extraction andinterpretation, and discusses early test results.},\n\ - \ address = {Paris, France},\n author = {Aylward, Ryan and Paradiso, Joseph A.},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176865},\n issn = {2220-4806},\n\ - \ keywords = {Interactive dance, wearable sensor networks, inertial gesture tracking,\ - \ collective motion analysis, multi-user interface },\n pages = {134--139},\n\ - \ title = {Sensemble: A Wireless, Compact, Multi-User Sensor System for Interactive\ - \ Dance},\n url = {http://www.nime.org/proceedings/2006/nime2006_134.pdf},\n year\ - \ = {2006}\n}\n" + ID: DelleMonache2008 + abstract: 'We present our work with augmented everyday objectstransformed into sound + sources for music generation. The idea isto give voice to objects through technology. + More specifically, theparadigm of the birth of musical instruments as a sonification + ofobjects used in domestic or work everyday environments is hereconsidered and + transposed into the technologically augmentedscenarios of our contemporary world.' + address: 'Genoa, Italy' + author: 'Delle Monache, Stefano and Polotti, Pietro and Papetti, Stefano and Rocchesso, + Davide' + bibtex: "@inproceedings{DelleMonache2008,\n abstract = {We present our work with\ + \ augmented everyday objectstransformed into sound sources for music generation.\ + \ The idea isto give voice to objects through technology. More specifically, theparadigm\ + \ of the birth of musical instruments as a sonification ofobjects used in domestic\ + \ or work everyday environments is hereconsidered and transposed into the technologically\ + \ augmentedscenarios of our contemporary world.},\n address = {Genoa, Italy},\n\ + \ author = {Delle Monache, Stefano and Polotti, Pietro and Papetti, Stefano and\ + \ Rocchesso, Davide},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179519},\n\ + \ issn = {2220-4806},\n keywords = {Rag-time washboard, sounding objects, physics-based\ + \ sound synthesis, interactivity, sonification, augmented everyday objects. },\n\ + \ pages = {154--157},\n title = {Sonically Augmented Found Objects},\n url = {http://www.nime.org/proceedings/2008/nime2008_154.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176865 + doi: 10.5281/zenodo.1179519 issn: 2220-4806 - keywords: 'Interactive dance, wearable sensor networks, inertial gesture tracking, - collective motion analysis, multi-user interface ' - pages: 134--139 - title: 'Sensemble: A Wireless, Compact, Multi-User Sensor System for Interactive - Dance' - url: http://www.nime.org/proceedings/2006/nime2006_134.pdf - year: 2006 + keywords: 'Rag-time washboard, sounding objects, physics-based sound synthesis, + interactivity, sonification, augmented everyday objects. ' + pages: 154--157 + title: Sonically Augmented Found Objects + url: http://www.nime.org/proceedings/2008/nime2008_154.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Ramakrishnan2006 - address: 'Paris, France' - author: 'Ramakrishnan, Chandrasekhar and Go\ss man, Joachim and Br\''''{u}mmer, - Ludger' - bibtex: "@inproceedings{Ramakrishnan2006,\n address = {Paris, France},\n author\ - \ = {Ramakrishnan, Chandrasekhar and Go\\ss man, Joachim and Br\\''{u}mmer, Ludger},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176991},\n issn = {2220-4806},\n\ - \ keywords = {Sound Spatialization, Ambisonics, Vector Based Additive Panning\ - \ (VBAP), Wave Field Synthesis, Acousmatic Music },\n pages = {140--143},\n title\ - \ = {The ZKM Klangdom},\n url = {http://www.nime.org/proceedings/2006/nime2006_140.pdf},\n\ - \ year = {2006}\n}\n" + ID: Pelletier2008 + abstract: 'This paper describes a generalized motion-based framework forthe generation + of large musical control fields from imaging data.The framework is general in + the sense that it does not depend ona particular source of sensing data. Real-time + images of stageperformers, pre-recorded and live video, as well as more exoticdata + from imaging systems such as thermography, pressuresensor arrays, etc. can be + used as a source of control. Featurepoints are extracted from the candidate images, + from whichmotion vector fields are calculated. After some processing, thesemotion + vectors are mapped individually to sound synthesisparameters. Suitable synthesis + techniques include granular andmicrosonic algorithms, additive synthesis and micro-polyphonicorchestration. + Implementation details of this framework isdiscussed, as well as suitable creative + and artistic uses andapproaches.' + address: 'Genoa, Italy' + author: 'Pelletier, Jean-Marc' + bibtex: "@inproceedings{Pelletier2008,\n abstract = {This paper describes a generalized\ + \ motion-based framework forthe generation of large musical control fields from\ + \ imaging data.The framework is general in the sense that it does not depend ona\ + \ particular source of sensing data. Real-time images of stageperformers, pre-recorded\ + \ and live video, as well as more exoticdata from imaging systems such as thermography,\ + \ pressuresensor arrays, etc. can be used as a source of control. Featurepoints\ + \ are extracted from the candidate images, from whichmotion vector fields are\ + \ calculated. After some processing, thesemotion vectors are mapped individually\ + \ to sound synthesisparameters. Suitable synthesis techniques include granular\ + \ andmicrosonic algorithms, additive synthesis and micro-polyphonicorchestration.\ + \ Implementation details of this framework isdiscussed, as well as suitable creative\ + \ and artistic uses andapproaches.},\n address = {Genoa, Italy},\n author = {Pelletier,\ + \ Jean-Marc},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179611},\n issn\ + \ = {2220-4806},\n keywords = {Computer vision, control field, image analysis,\ + \ imaging, mapping, microsound, motion flow, sonification, synthesis },\n pages\ + \ = {158--163},\n title = {Sonified Motion Flow Fields as a Means of Musical Expression},\n\ + \ url = {http://www.nime.org/proceedings/2008/nime2008_158.pdf},\n year = {2008}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176991 + doi: 10.5281/zenodo.1179611 issn: 2220-4806 - keywords: 'Sound Spatialization, Ambisonics, Vector Based Additive Panning (VBAP), - Wave Field Synthesis, Acousmatic Music ' - pages: 140--143 - title: The ZKM Klangdom - url: http://www.nime.org/proceedings/2006/nime2006_140.pdf - year: 2006 + keywords: 'Computer vision, control field, image analysis, imaging, mapping, microsound, + motion flow, sonification, synthesis ' + pages: 158--163 + title: Sonified Motion Flow Fields as a Means of Musical Expression + url: http://www.nime.org/proceedings/2008/nime2008_158.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Wozniewski2006 - abstract: 'Traditional uses of virtual audio environments tend to focus onperceptually - accurate acoustic representations. Though spatialization of sound sources is important, - it is necessary to leveragecontrol of the sonic representation when considering - musical applications. The proposed framework allows for the creation ofperceptually - immersive scenes that function as musical instruments. Loudspeakers and microphones - are modeled within thescene along with the listener/performer, creating a navigable - 3Dsonic space where sound sources and sinks process audio according to user-defined - spatial mappings.' - address: 'Paris, France' - author: 'Wozniewski, Mike and Settel, Zack and Cooperstock, Jeremy R.' - bibtex: "@inproceedings{Wozniewski2006,\n abstract = {Traditional uses of virtual\ - \ audio environments tend to focus onperceptually accurate acoustic representations.\ - \ Though spatialization of sound sources is important, it is necessary to leveragecontrol\ - \ of the sonic representation when considering musical applications. The proposed\ - \ framework allows for the creation ofperceptually immersive scenes that function\ - \ as musical instruments. Loudspeakers and microphones are modeled within thescene\ - \ along with the listener/performer, creating a navigable 3Dsonic space where\ - \ sound sources and sinks process audio according to user-defined spatial mappings.},\n\ - \ address = {Paris, France},\n author = {Wozniewski, Mike and Settel, Zack and\ - \ Cooperstock, Jeremy R.},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177021},\n\ - \ issn = {2220-4806},\n keywords = {Control paradigms, 3D audio, spatialization,\ - \ immersive audio environments, auditory display, acoustic modeling, spatial inter-\ - \ faces, virtual instrument design },\n pages = {144--149},\n title = {A Framework\ - \ for Immersive Spatial Audio Performance},\n url = {http://www.nime.org/proceedings/2006/nime2006_144.pdf},\n\ - \ year = {2006}\n}\n" + ID: Dubrau2008 + address: 'Genoa, Italy' + author: 'Dubrau, Josh and Havryliv, Mark' + bibtex: "@inproceedings{Dubrau2008,\n address = {Genoa, Italy},\n author = {Dubrau,\ + \ Josh and Havryliv, Mark},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179525},\n\ + \ issn = {2220-4806},\n keywords = {Poetry, language sonification, psychoanalysis,\ + \ linguistics, Freud, realtime poetry. },\n pages = {164--167},\n title = {P[a]ra[pra]xis\ + \ : Poetry in Motion},\n url = {http://www.nime.org/proceedings/2008/nime2008_164.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177021 + doi: 10.5281/zenodo.1179525 issn: 2220-4806 - keywords: 'Control paradigms, 3D audio, spatialization, immersive audio environments, - auditory display, acoustic modeling, spatial inter- faces, virtual instrument - design ' - pages: 144--149 - title: A Framework for Immersive Spatial Audio Performance - url: http://www.nime.org/proceedings/2006/nime2006_144.pdf - year: 2006 + keywords: 'Poetry, language sonification, psychoanalysis, linguistics, Freud, realtime + poetry. ' + pages: 164--167 + title: 'P[a]ra[pra]xis : Poetry in Motion' + url: http://www.nime.org/proceedings/2008/nime2008_164.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Francois2006 - address: 'Paris, France' - author: 'Francois, Alexander R. and Chew, Elaine' - bibtex: "@inproceedings{Francois2006,\n address = {Paris, France},\n author = {Francois,\ - \ Alexander R. and Chew, Elaine},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176901},\n\ - \ issn = {2220-4806},\n keywords = {Software Architecture, Interactive Systems,\ - \ Music soft- ware },\n pages = {150--155},\n title = {An Architectural Framework\ - \ for Interactive Music Systems},\n url = {http://www.nime.org/proceedings/2006/nime2006_150.pdf},\n\ - \ year = {2006}\n}\n" + ID: Schacher2008 + abstract: 'Moving out of doors with digital tools and electronic music and creating + musically rich experiences is made possible by the increased availability of ever + smaller and more powerful mobile computers. Composing music for and in a landscape + instead of for a closed architectural space offers new perspectives but also raises + questions about interaction and composition of electronic music. The work we present + here was commissioned by a festival and ran on a daily basis over a period of + three months. A GPS-enabled embedded Linux system is assembled to serve as a location-aware + sound platform. Several challenges have to be overcome both technically and artistically + to achieve a seamless experience and provide a simple device to be handed to the + public. By building this interactive experience, which relies as much on the user''s + willingness to explore the invisible sonic landscape as on the ability to deploy + the technology, a number of new avenues for exploring electronic music and interactivity + in location-based media open up. New ways of composing music for and in a landscape + and for creating audience interaction are explored. ' + address: 'Genoa, Italy' + author: 'Schacher, Jan C.' + bibtex: "@inproceedings{Schacher2008,\n abstract = {Moving out of doors with digital\ + \ tools and electronic music and creating musically rich experiences is made possible\ + \ by the increased availability of ever smaller and more powerful mobile computers.\ + \ Composing music for and in a landscape instead of for a closed architectural\ + \ space offers new perspectives but also raises questions about interaction and\ + \ composition of electronic music. The work we present here was commissioned by\ + \ a festival and ran on a daily basis over a period of three months. A GPS-enabled\ + \ embedded Linux system is assembled to serve as a location-aware sound platform.\ + \ Several challenges have to be overcome both technically and artistically to\ + \ achieve a seamless experience and provide a simple device to be handed to the\ + \ public. By building this interactive experience, which relies as much on the\ + \ user's willingness to explore the invisible sonic landscape as on the ability\ + \ to deploy the technology, a number of new avenues for exploring electronic music\ + \ and interactivity in location-based media open up. New ways of composing music\ + \ for and in a landscape and for creating audience interaction are explored. },\n\ + \ address = {Genoa, Italy},\n author = {Schacher, Jan C.},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1179623},\n issn = {2220-4806},\n keywords = {Location-based,\ + \ electronic music, composition, embedded Linux, GPS, Pure Data, interaction,\ + \ mapping, soundscape },\n pages = {168--171},\n title = {Davos Soundscape, a\ + \ Location Based Interactive Composition},\n url = {http://www.nime.org/proceedings/2008/nime2008_168.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176901 + doi: 10.5281/zenodo.1179623 issn: 2220-4806 - keywords: 'Software Architecture, Interactive Systems, Music soft- ware ' - pages: 150--155 - title: An Architectural Framework for Interactive Music Systems - url: http://www.nime.org/proceedings/2006/nime2006_150.pdf - year: 2006 + keywords: 'Location-based, electronic music, composition, embedded Linux, GPS, Pure + Data, interaction, mapping, soundscape ' + pages: 168--171 + title: 'Davos Soundscape, a Location Based Interactive Composition' + url: http://www.nime.org/proceedings/2008/nime2008_168.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Jacquemin2006 - address: 'Paris, France' - author: 'Jacquemin, Christian and de Laubier, Serge' - bibtex: "@inproceedings{Jacquemin2006,\n address = {Paris, France},\n author = {Jacquemin,\ - \ Christian and de Laubier, Serge},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176929},\n\ - \ issn = {2220-4806},\n keywords = {audio-visual composition,feedback,transmodality},\n\ - \ pages = {156--161},\n title = {Transmodal Feedback as a New Perspective for\ - \ Audio-visual Effects},\n url = {http://www.nime.org/proceedings/2006/nime2006_156.pdf},\n\ - \ year = {2006}\n}\n" + ID: Schmeder2008 + abstract: 'A general-purpose firmware for a low cost microcontroller is described + that employs the Open Sound Control protocol over USB. The firmware is designed + with considerations for integration in new musical interfaces and embedded devices. + Features of note include stateless design, efficient floating-point support, temporally + correct data handling, and protocol completeness. A timing performance analysis + is conducted.' + address: 'Genoa, Italy' + author: 'Schmeder, Andrew and Freed, Adrian' + bibtex: "@inproceedings{Schmeder2008,\n abstract = {A general-purpose firmware for\ + \ a low cost microcontroller is described that employs the Open Sound Control\ + \ protocol over USB. The firmware is designed with considerations for integration\ + \ in new musical interfaces and embedded devices. Features of note include stateless\ + \ design, efficient floating-point support, temporally correct data handling,\ + \ and protocol completeness. A timing performance analysis is conducted.},\n address\ + \ = {Genoa, Italy},\n author = {Schmeder, Andrew and Freed, Adrian},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1179627},\n issn = {2220-4806},\n keywords\ + \ = {jitter,latency,nime08,open sound control,pic microcontroller,usb},\n pages\ + \ = {175--180},\n title = {uOSC : The Open Sound Control Reference Platform for\ + \ Embedded Devices},\n url = {http://www.nime.org/proceedings/2008/nime2008_175.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176929 + doi: 10.5281/zenodo.1179627 issn: 2220-4806 - keywords: 'audio-visual composition,feedback,transmodality' - pages: 156--161 - title: Transmodal Feedback as a New Perspective for Audio-visual Effects - url: http://www.nime.org/proceedings/2006/nime2006_156.pdf - year: 2006 + keywords: 'jitter,latency,nime08,open sound control,pic microcontroller,usb' + pages: 175--180 + title: 'uOSC : The Open Sound Control Reference Platform for Embedded Devices' + url: http://www.nime.org/proceedings/2008/nime2008_175.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Magnusson2006 - abstract: 'The ixi software project started in 2000 with the intention to explore - new interactive patterns and virtual interfaces in computer music software. The - aim of this paper is not to describe these programs, as they have been described - elsewhere [14][15], but rather explicate the theoretical background that underlies - the design of these screen-based instruments. After an analysis of the similarities - and differences in the design of acoustic and screen-based instruments, the paper - describes how the creation of an interface is essentially the creation of a semiotic - system that affects and influences the musician and the composer. Finally the - terminology of this semiotics is explained as an interaction model. ' - address: 'Paris, France' - author: 'Magnusson, Thor' - bibtex: "@inproceedings{Magnusson2006,\n abstract = {The ixi software project started\ - \ in 2000 with the intention to explore new interactive patterns and virtual interfaces\ - \ in computer music software. The aim of this paper is not to describe these programs,\ - \ as they have been described elsewhere [14][15], but rather explicate the theoretical\ - \ background that underlies the design of these screen-based instruments. After\ - \ an analysis of the similarities and differences in the design of acoustic and\ - \ screen-based instruments, the paper describes how the creation of an interface\ - \ is essentially the creation of a semiotic system that affects and influences\ - \ the musician and the composer. Finally the terminology of this semiotics is\ - \ explained as an interaction model. },\n address = {Paris, France},\n author\ - \ = {Magnusson, Thor},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176969},\n\ - \ issn = {2220-4806},\n keywords = {Interfaces, interaction design, HCI, semiotics,\ - \ actors, OSC, mapping, interaction models, creative tools. },\n pages = {162--167},\n\ - \ title = {Screen-Based Musical Interfaces as Semiotic Machines},\n url = {http://www.nime.org/proceedings/2006/nime2006_162.pdf},\n\ - \ year = {2006}\n}\n" + ID: Place2008 + abstract: 'An approach for creating structured Open Sound Control(OSC) messages + by separating the addressing of node valuesand node properties is suggested. This + includes a methodfor querying values and properties. As a result, it is possibleto + address complex nodes as classes inside of more complextree structures using an + OSC namespace. This is particularly useful for creating flexible communication + in modularsystems. A prototype implementation is presented and discussed.' + address: 'Genoa, Italy' + author: 'Place, Timothy and Lossius, Trond and Jensenius, Alexander R. and Peters, + Nils' + bibtex: "@inproceedings{Place2008,\n abstract = {An approach for creating structured\ + \ Open Sound Control(OSC) messages by separating the addressing of node valuesand\ + \ node properties is suggested. This includes a methodfor querying values and\ + \ properties. As a result, it is possibleto address complex nodes as classes inside\ + \ of more complextree structures using an OSC namespace. This is particularly\ + \ useful for creating flexible communication in modularsystems. A prototype implementation\ + \ is presented and discussed.},\n address = {Genoa, Italy},\n author = {Place,\ + \ Timothy and Lossius, Trond and Jensenius, Alexander R. and Peters, Nils},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1179613},\n issn = {2220-4806},\n\ + \ keywords = {jamoma,namespace,nime08,osc,standardization},\n pages = {181--184},\n\ + \ title = {Addressing Classes by Differentiating Values and Properties in OSC},\n\ + \ url = {http://www.nime.org/proceedings/2008/nime2008_181.pdf},\n year = {2008}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176969 + doi: 10.5281/zenodo.1179613 issn: 2220-4806 - keywords: 'Interfaces, interaction design, HCI, semiotics, actors, OSC, mapping, - interaction models, creative tools. ' - pages: 162--167 - title: Screen-Based Musical Interfaces as Semiotic Machines - url: http://www.nime.org/proceedings/2006/nime2006_162.pdf - year: 2006 + keywords: jamoma,namespace,nime08,osc,standardization + pages: 181--184 + title: Addressing Classes by Differentiating Values and Properties in OSC + url: http://www.nime.org/proceedings/2008/nime2008_181.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Zadel2006 - address: 'Paris, France' - author: 'Zadel, Mark and Scavone, Gary' - bibtex: "@inproceedings{Zadel2006,\n address = {Paris, France},\n author = {Zadel,\ - \ Mark and Scavone, Gary},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177025},\n\ - \ issn = {2220-4806},\n keywords = {Software control of computer music, laptop\ - \ performance, graphical interfaces, freehand input, dynamic simulation },\n pages\ - \ = {168--171},\n title = {Different Strokes: a Prototype Software System for\ - \ Laptop Performance and Improvisation},\n url = {http://www.nime.org/proceedings/2006/nime2006_168.pdf},\n\ - \ year = {2006}\n}\n" + ID: Platz2008 + abstract: 'Many mobile devices, specifically mobile phones, come equipped with a + microphone. Microphones are high-fidelity sensors that can pick up sounds relating + to a range of physical phenomena. Using simple feature extraction methods,parameters + can be found that sensibly map to synthesis algorithms to allow expressive and + interactive performance.For example blowing noise can be used as a wind instrument + excitation source. Also other types of interactionscan be detected via microphones, + such as striking. Hencethe microphone, in addition to allowing literal recording,serves + as an additional source of input to the developingfield of mobile phone performance.' + address: 'Genoa, Italy' + author: 'Ananya, Misra and Essl, Georg and Rohs, Michael' + bibtex: "@inproceedings{Platz2008,\n abstract = {Many mobile devices, specifically\ + \ mobile phones, come equipped with a microphone. Microphones are high-fidelity\ + \ sensors that can pick up sounds relating to a range of physical phenomena. Using\ + \ simple feature extraction methods,parameters can be found that sensibly map\ + \ to synthesis algorithms to allow expressive and interactive performance.For\ + \ example blowing noise can be used as a wind instrument excitation source. Also\ + \ other types of interactionscan be detected via microphones, such as striking.\ + \ Hencethe microphone, in addition to allowing literal recording,serves as an\ + \ additional source of input to the developingfield of mobile phone performance.},\n\ + \ address = {Genoa, Italy},\n author = {Ananya, Misra and Essl, Georg and Rohs,\ + \ Michael},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179485},\n issn\ + \ = {2220-4806},\n keywords = {mobile music making, microphone, mobile-stk },\n\ + \ pages = {185--188},\n title = {Microphone as Sensor in Mobile Phone Performance},\n\ + \ url = {http://www.nime.org/proceedings/2008/nime2008_185.pdf},\n year = {2008}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177025 + doi: 10.5281/zenodo.1179485 issn: 2220-4806 - keywords: 'Software control of computer music, laptop performance, graphical interfaces, - freehand input, dynamic simulation ' - pages: 168--171 - title: 'Different Strokes: a Prototype Software System for Laptop Performance and - Improvisation' - url: http://www.nime.org/proceedings/2006/nime2006_168.pdf - year: 2006 + keywords: 'mobile music making, microphone, mobile-stk ' + pages: 185--188 + title: Microphone as Sensor in Mobile Phone Performance + url: http://www.nime.org/proceedings/2008/nime2008_185.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Nishibori2006 - abstract: 'Development of a musical interface which allows people to play music - intuitively and create music visibly. ' - address: 'Paris, France' - author: 'Nishibori, Yu and Iwai, Toshio' - bibtex: "@inproceedings{Nishibori2006,\n abstract = {Development of a musical interface\ - \ which allows people to play music intuitively and create music visibly. },\n\ - \ address = {Paris, France},\n author = {Nishibori, Yu and Iwai, Toshio},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176979},\n issn = {2220-4806},\n pages\ - \ = {172--175},\n title = {TENORI-ON},\n url = {http://www.nime.org/proceedings/2006/nime2006_172.pdf},\n\ - \ year = {2006}\n}\n" + ID: Bouillot2008 + address: 'Genoa, Italy' + author: 'Bouillot, Nicolas and Wozniewski, Mike and Settel, Zack and Cooperstock, + Jeremy R.' + bibtex: "@inproceedings{Bouillot2008,\n address = {Genoa, Italy},\n author = {Bouillot,\ + \ Nicolas and Wozniewski, Mike and Settel, Zack and Cooperstock, Jeremy R.},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1179499},\n issn = {2220-4806},\n\ + \ keywords = {nime08},\n pages = {189--192},\n title = {A Mobile Wireless Augmented\ + \ Guitar},\n url = {http://www.nime.org/proceedings/2008/nime2008_189.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176979 + doi: 10.5281/zenodo.1179499 issn: 2220-4806 - pages: 172--175 - title: TENORI-ON - url: http://www.nime.org/proceedings/2006/nime2006_172.pdf - year: 2006 + keywords: nime08 + pages: 189--192 + title: A Mobile Wireless Augmented Guitar + url: http://www.nime.org/proceedings/2008/nime2008_189.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Jensenius2006a - abstract: 'This paper presents our need for a Gesture Description Interchange Format - (GDIF) for storing, retrieving and sharing information about music-related gestures. - Ideally, it should be possible to store all sorts of data from various commercial - and custom made controllers, motion capture and computer vision systems, as well - as results from different types of gesture analysis, in a coherent and consistent - way. This would make it possible to use the information with different software, - platforms and devices, and also allow for sharing data between research institutions. - We present some of the data types that should be included, and discuss issues - which need to be resolved.' - address: 'Paris, France' - author: 'Jensenius, Alexander Refsum and Kvifte, Tellef and Godøy, Rolf Inge' - bibtex: "@inproceedings{Jensenius2006a,\n abstract = {This paper presents our need\ - \ for a Gesture Description Interchange Format (GDIF) for storing, retrieving\ - \ and sharing information about music-related gestures. Ideally, it should be\ - \ possible to store all sorts of data from various commercial and custom made\ - \ controllers, motion capture and computer vision systems, as well as results\ - \ from different types of gesture analysis, in a coherent and consistent way.\ - \ This would make it possible to use the information with different software,\ - \ platforms and devices, and also allow for sharing data between research institutions.\ - \ We present some of the data types that should be included, and discuss issues\ - \ which need to be resolved.},\n address = {Paris, France},\n author = {Jensenius,\ - \ Alexander Refsum and Kvifte, Tellef and Godøy, Rolf Inge},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176931},\n issn = {2220-4806},\n keywords = {Gesture\ - \ description, gesture analysis, standards },\n pages = {176--179},\n title =\ - \ {Towards a Gesture Description Interchange Format},\n url = {http://www.nime.org/proceedings/2006/nime2006_176.pdf},\n\ - \ year = {2006}\n}\n" + ID: Jacobs2008 + abstract: None + address: 'Genoa, Italy' + author: 'Jacobs, Robert and Feldmeier, Mark and Paradiso, Joseph A.' + bibtex: "@inproceedings{Jacobs2008,\n abstract = {None},\n address = {Genoa, Italy},\n\ + \ author = {Jacobs, Robert and Feldmeier, Mark and Paradiso, Joseph A.},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1179567},\n issn = {2220-4806},\n keywords\ + \ = {None},\n pages = {193--196},\n title = {A Mobile Music Environment Using\ + \ a PD Compiler and Wireless Sensors},\n url = {http://www.nime.org/proceedings/2008/nime2008_193.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176931 + doi: 10.5281/zenodo.1179567 issn: 2220-4806 - keywords: 'Gesture description, gesture analysis, standards ' - pages: 176--179 - title: Towards a Gesture Description Interchange Format - url: http://www.nime.org/proceedings/2006/nime2006_176.pdf - year: 2006 + keywords: None + pages: 193--196 + title: A Mobile Music Environment Using a PD Compiler and Wireless Sensors + url: http://www.nime.org/proceedings/2008/nime2008_193.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Wanderley2006 - address: 'Paris, France' - author: 'Wanderley, Marcelo M. and Birnbaum, David and Malloch, Joseph and Sinyor, - Elliot and Boissinot, Julien' - bibtex: "@inproceedings{Wanderley2006,\n address = {Paris, France},\n author = {Wanderley,\ - \ Marcelo M. and Birnbaum, David and Malloch, Joseph and Sinyor, Elliot and Boissinot,\ - \ Julien},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1177015},\n issn = {2220-4806},\n\ - \ keywords = {sensors, Wiki, collaborative website, open content },\n pages =\ - \ {180--183},\n title = {SensorWiki.org: A Collaborative Resource for Researchers\ - \ and Interface Designers},\n url = {http://www.nime.org/proceedings/2006/nime2006_180.pdf},\n\ - \ year = {2006}\n}\n" + ID: Bencina2008 + address: 'Genoa, Italy' + author: 'Bencina, Ross and Wilde, Danielle and Langley, Somaya' + bibtex: "@inproceedings{Bencina2008,\n address = {Genoa, Italy},\n author = {Bencina,\ + \ Ross and Wilde, Danielle and Langley, Somaya},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1179491},\n issn = {2220-4806},\n keywords = {gestural control,mapping,nime08,prototyping,three-axis\ + \ accelerometers,vocal,wii remote},\n pages = {197--202},\n title = {Gesture=Sound\ + \ Experiments : Process and Mappings},\n url = {http://www.nime.org/proceedings/2008/nime2008_197.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177015 + doi: 10.5281/zenodo.1179491 issn: 2220-4806 - keywords: 'sensors, Wiki, collaborative website, open content ' - pages: 180--183 - title: 'SensorWiki.org: A Collaborative Resource for Researchers and Interface Designers' - url: http://www.nime.org/proceedings/2006/nime2006_180.pdf - year: 2006 + keywords: 'gestural control,mapping,nime08,prototyping,three-axis accelerometers,vocal,wii + remote' + pages: 197--202 + title: 'Gesture=Sound Experiments : Process and Mappings' + url: http://www.nime.org/proceedings/2008/nime2008_197.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Dimitrov2006 - address: 'Paris, France' - author: 'Dimitrov, Smilen and Serafin, Stefania' - bibtex: "@inproceedings{Dimitrov2006,\n address = {Paris, France},\n author = {Dimitrov,\ - \ Smilen and Serafin, Stefania},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176891},\n\ - \ issn = {2220-4806},\n pages = {184--187},\n title = {A Simple Practical Approach\ - \ to a Wireless Data Acquisition Board},\n url = {http://www.nime.org/proceedings/2006/nime2006_184.pdf},\n\ - \ year = {2006}\n}\n" + ID: Ciglar2008 + address: 'Genoa, Italy' + author: 'Ciglar, Miha' + bibtex: "@inproceedings{Ciglar2008,\n address = {Genoa, Italy},\n author = {Ciglar,\ + \ Miha},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1179511},\n issn = {2220-4806},\n\ + \ keywords = {dancer, fig, from the system in, gesture recognition, haptic feedback,\ + \ in, markers attached to the, motion tracking, nime08, s limbs, the dancer receives\ + \ feedback, two ways},\n pages = {203--206},\n title = {\"3rd. Pole\" -- A Composition\ + \ Performed via Gestural Cues},\n url = {http://www.nime.org/proceedings/2008/nime2008_203.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176891 + doi: 10.5281/zenodo.1179511 issn: 2220-4806 - pages: 184--187 - title: A Simple Practical Approach to a Wireless Data Acquisition Board - url: http://www.nime.org/proceedings/2006/nime2006_184.pdf - year: 2006 + keywords: 'dancer, fig, from the system in, gesture recognition, haptic feedback, + in, markers attached to the, motion tracking, nime08, s limbs, the dancer receives + feedback, two ways' + pages: 203--206 + title: '"3rd. Pole" -- A Composition Performed via Gestural Cues' + url: http://www.nime.org/proceedings/2008/nime2008_203.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Hansen2006 - address: 'Paris, France' - author: 'Hansen, Kjetil F. and Bresin, Roberto' - bibtex: "@inproceedings{Hansen2006,\n address = {Paris, France},\n author = {Hansen,\ - \ Kjetil F. and Bresin, Roberto},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176921},\n\ - \ issn = {2220-4806},\n keywords = {controllers,dj,instrument mapping,scratching,virtual},\n\ - \ pages = {188--191},\n title = {Mapping Strategies in DJ Scratching},\n url =\ - \ {http://www.nime.org/proceedings/2006/nime2006_188.pdf},\n year = {2006}\n}\n" + ID: Hansen2008 + abstract: 'This paper describes a project started for implementing DJscratching + techniques on the reactable. By interacting withobjects representing scratch patterns + commonly performedon the turntable and the crossfader, the musician can playwith + DJ techniques and manipulate how they are executedin a performance. This is a + novel approach to the digital DJapplications and hardware. Two expert musicians + practisedand performed on the reactable in order to both evaluate theplayability + and improve the design of the DJ techniques.' + address: 'Genoa, Italy' + author: 'Hansen, Kjetil F. and Alonso, Marcos' + bibtex: "@inproceedings{Hansen2008,\n abstract = {This paper describes a project\ + \ started for implementing DJscratching techniques on the reactable. By interacting\ + \ withobjects representing scratch patterns commonly performedon the turntable\ + \ and the crossfader, the musician can playwith DJ techniques and manipulate how\ + \ they are executedin a performance. This is a novel approach to the digital DJapplications\ + \ and hardware. Two expert musicians practisedand performed on the reactable in\ + \ order to both evaluate theplayability and improve the design of the DJ techniques.},\n\ + \ address = {Genoa, Italy},\n author = {Hansen, Kjetil F. and Alonso, Marcos},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1179555},\n issn = {2220-4806},\n\ + \ keywords = {dj scratch techniques,interfaces,nime08,playability,reactable},\n\ + \ pages = {207--210},\n title = {More DJ Techniques on the reactable},\n url =\ + \ {http://www.nime.org/proceedings/2008/nime2008_207.pdf},\n year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176921 + doi: 10.5281/zenodo.1179555 issn: 2220-4806 - keywords: 'controllers,dj,instrument mapping,scratching,virtual' - pages: 188--191 - title: Mapping Strategies in DJ Scratching - url: http://www.nime.org/proceedings/2006/nime2006_188.pdf - year: 2006 + keywords: 'dj scratch techniques,interfaces,nime08,playability,reactable' + pages: 207--210 + title: More DJ Techniques on the reactable + url: http://www.nime.org/proceedings/2008/nime2008_207.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Kessous2006 - abstract: 'In this paper we describe a new guitar-like musical controller. The ''GXtar'' - is an instrument which takes as a starting point a guitar but his role is to bring - different and new musical possibilities while preserving the spirit and techniques - of guitar. Therefore, it was conceived and carried out starting from the body - of an electric guitar. The fingerboard of this guitar was equipped with two lines - of sensors: linear position sensors, and tactile pressure sensors. These two lines - of sensors are used as two virtual strings. Their two ends are the bridge and - the nut of the guitar. The design of the instrument is made in a way that the - position of a finger, on one of these virtual strings, corresponds to the note, - which would have been played on a real and vibrating string. On the soundboard - of the guitar, a controller, with 3 degrees of freedom, allows to drive other - synthesis parameters. We then describe how this interface is integrated in a musical - audio system and serves as a musical instrument. ' - address: 'Paris, France' - author: 'Kessous, Loïc and Castet, Julien and Arfib, Daniel' - bibtex: "@inproceedings{Kessous2006,\n abstract = {In this paper we describe a new\ - \ guitar-like musical controller. The 'GXtar' is an instrument which takes as\ - \ a starting point a guitar but his role is to bring different and new musical\ - \ possibilities while preserving the spirit and techniques of guitar. Therefore,\ - \ it was conceived and carried out starting from the body of an electric guitar.\ - \ The fingerboard of this guitar was equipped with two lines of sensors: linear\ - \ position sensors, and tactile pressure sensors. These two lines of sensors are\ - \ used as two virtual strings. Their two ends are the bridge and the nut of the\ - \ guitar. The design of the instrument is made in a way that the position of a\ - \ finger, on one of these virtual strings, corresponds to the note, which would\ - \ have been played on a real and vibrating string. On the soundboard of the guitar,\ - \ a controller, with 3 degrees of freedom, allows to drive other synthesis parameters.\ - \ We then describe how this interface is integrated in a musical audio system\ - \ and serves as a musical instrument. },\n address = {Paris, France},\n author\ - \ = {Kessous, Lo\\\"{i}c and Castet, Julien and Arfib, Daniel},\n booktitle =\ + ID: Dimitrov2008 + abstract: 'This paper reports on a Short-Term Scientific Mission (STSM)sponsored + by the Sonic Interaction Design (SID) EuropeanCOST Action IC601.Prototypes of + objects for the novel instrument Reactablewere developed, with the goal of studying + sonification ofmovements on this platform using physical models. A physical model + of frictional interactions between rubbed dry surfaces was used as an audio generation + engine, which alloweddevelopment in two directions --- a set of objects that affordsmotions + similar to sliding, and a single object aiming tosonify contact friction sound. + Informal evaluation was obtained from a Reactable expert user, regarding these + sets ofobjects. Experiments with the objects were also performed- related to both + audio filtering, and interfacing with otherobjects for the Reactable.' + address: 'Genoa, Italy' + author: 'Dimitrov, Smilen and Alonso, Marcos and Serafin, Stefania' + bibtex: "@inproceedings{Dimitrov2008,\n abstract = {This paper reports on a Short-Term\ + \ Scientific Mission (STSM)sponsored by the Sonic Interaction Design (SID) EuropeanCOST\ + \ Action IC601.Prototypes of objects for the novel instrument Reactablewere developed,\ + \ with the goal of studying sonification ofmovements on this platform using physical\ + \ models. A physical model of frictional interactions between rubbed dry surfaces\ + \ was used as an audio generation engine, which alloweddevelopment in two directions\ + \ --- a set of objects that affordsmotions similar to sliding, and a single object\ + \ aiming tosonify contact friction sound. Informal evaluation was obtained from\ + \ a Reactable expert user, regarding these sets ofobjects. Experiments with the\ + \ objects were also performed- related to both audio filtering, and interfacing\ + \ with otherobjects for the Reactable.},\n address = {Genoa, Italy},\n author\ + \ = {Dimitrov, Smilen and Alonso, Marcos and Serafin, Stefania},\n booktitle =\ \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176857},\n issn = {2220-4806},\n keywords = {Guitar,\ - \ alternate controller, sensors, synthesizer, multidimensional control. },\n pages\ - \ = {192--195},\n title = {'GXtar', an Interface Using Guitar Techniques},\n url\ - \ = {http://www.nime.org/proceedings/2006/nime2006_192.pdf},\n year = {2006}\n\ + \ doi = {10.5281/zenodo.1179523},\n issn = {2220-4806},\n keywords = {Reactable,\ + \ physical model, motion sonification, contact fric- tion },\n pages = {211--214},\n\ + \ title = {Developing Block-Movement, Physical-Model Based Objects for the Reactable},\n\ + \ url = {http://www.nime.org/proceedings/2008/nime2008_211.pdf},\n year = {2008}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176857 + doi: 10.5281/zenodo.1179523 issn: 2220-4806 - keywords: 'Guitar, alternate controller, sensors, synthesizer, multidimensional - control. ' - pages: 192--195 - title: '''GXtar'', an Interface Using Guitar Techniques' - url: http://www.nime.org/proceedings/2006/nime2006_192.pdf - year: 2006 + keywords: 'Reactable, physical model, motion sonification, contact fric- tion ' + pages: 211--214 + title: 'Developing Block-Movement, Physical-Model Based Objects for the Reactable' + url: http://www.nime.org/proceedings/2008/nime2008_211.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Burns2006 - address: 'Paris, France' - author: 'Burns, Anne-Marie and Wanderley, Marcelo M.' - bibtex: "@inproceedings{Burns2006,\n address = {Paris, France},\n author = {Burns,\ - \ Anne-Marie and Wanderley, Marcelo M.},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176850},\n\ - \ issn = {2220-4806},\n keywords = {finger-tracking,gesture,guitar fingering,hough\ - \ transform},\n pages = {196--199},\n title = {Visual Methods for the Retrieval\ - \ of Guitarist Fingering},\n url = {http://www.nime.org/proceedings/2006/nime2006_196.pdf},\n\ - \ year = {2006}\n}\n" + ID: Thiebaut2008 + abstract: 'This research focuses on real-time gesture learning and recognition. + Events arrive in a continuous stream without explicitly given boundaries. To obtain + temporal accuracy, weneed to consider the lag between the detection of an eventand + any effects we wish to trigger with it. Two methodsfor real time gesture recognition + using a Nintendo Wii controller are presented. The first detects gestures similar + to agiven template using either a Euclidean distance or a cosinesimilarity measure. + The second method uses novel information theoretic methods to detect and categorize + gestures inan unsupervised way. The role of supervision, detection lagand the + importance of haptic feedback are discussed.' + address: 'Genoa, Italy' + author: 'Thiebaut, Jean-Baptiste and Abdallah, Samer and Robertson, Andrew and Bryan-Kinns, + Nick and Plumbley, Mark D.' + bibtex: "@inproceedings{Thiebaut2008,\n abstract = {This research focuses on real-time\ + \ gesture learning and recognition. Events arrive in a continuous stream without\ + \ explicitly given boundaries. To obtain temporal accuracy, weneed to consider\ + \ the lag between the detection of an eventand any effects we wish to trigger\ + \ with it. Two methodsfor real time gesture recognition using a Nintendo Wii controller\ + \ are presented. The first detects gestures similar to agiven template using either\ + \ a Euclidean distance or a cosinesimilarity measure. The second method uses novel\ + \ information theoretic methods to detect and categorize gestures inan unsupervised\ + \ way. The role of supervision, detection lagand the importance of haptic feedback\ + \ are discussed.},\n address = {Genoa, Italy},\n author = {Thiebaut, Jean-Baptiste\ + \ and Abdallah, Samer and Robertson, Andrew and Bryan-Kinns, Nick and Plumbley,\ + \ Mark D.},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179639},\n issn\ + \ = {2220-4806},\n keywords = {Gesture recognition, supervised and unsupervised\ + \ learning, interaction, haptic feedback, information dynamics, HMMs },\n pages\ + \ = {215--218},\n title = {Real Time Gesture Learning and Recognition : Towards\ + \ Automatic Categorization},\n url = {http://www.nime.org/proceedings/2008/nime2008_215.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176850 + doi: 10.5281/zenodo.1179639 issn: 2220-4806 - keywords: 'finger-tracking,gesture,guitar fingering,hough transform' - pages: 196--199 - title: Visual Methods for the Retrieval of Guitarist Fingering - url: http://www.nime.org/proceedings/2006/nime2006_196.pdf - year: 2006 + keywords: 'Gesture recognition, supervised and unsupervised learning, interaction, + haptic feedback, information dynamics, HMMs ' + pages: 215--218 + title: 'Real Time Gesture Learning and Recognition : Towards Automatic Categorization' + url: http://www.nime.org/proceedings/2008/nime2008_215.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Schoonderwaldt2006 - abstract: 'A cost-effective method was developed for the estimation of the bow velocity - in violin playing, using an accelerometer on the bow in combination with point - tracking using a standard video camera. The video data are used to detect the - moments of bow direction changes. This information is used for piece-wise integration - of the accelerometer signal, resulting in a drift-free reconstructed velocity - signal with a high temporal resolution. The method was evaluated using a 3D motion - capturing system, providing a reliable reference of the actual bow velocity. The - method showed good results when the accelerometer and video stream are synchronized. - Additional latency and jitter of the camera stream can importantly decrease the - performance of the method, depending on the bow stroke type. ' - address: 'Paris, France' - author: 'Schoonderwaldt, Erwin and Rasamimanana, Nicolas and Bevilacqua, Frédéric' - bibtex: "@inproceedings{Schoonderwaldt2006,\n abstract = {A cost-effective method\ - \ was developed for the estimation of the bow velocity in violin playing, using\ - \ an accelerometer on the bow in combination with point tracking using a standard\ - \ video camera. The video data are used to detect the moments of bow direction\ - \ changes. This information is used for piece-wise integration of the accelerometer\ - \ signal, resulting in a drift-free reconstructed velocity signal with a high\ - \ temporal resolution. The method was evaluated using a 3D motion capturing system,\ - \ providing a reliable reference of the actual bow velocity. The method showed\ - \ good results when the accelerometer and video stream are synchronized. Additional\ - \ latency and jitter of the camera stream can importantly decrease the performance\ - \ of the method, depending on the bow stroke type. },\n address = {Paris, France},\n\ - \ author = {Schoonderwaldt, Erwin and Rasamimanana, Nicolas and Bevilacqua, Fr\\\ - '{e}d\\'{e}ric},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177003},\n\ - \ issn = {2220-4806},\n keywords = {Bowing gestures, bowed string, violin, bow\ - \ velocity, accelerometer, video tracking. },\n pages = {200--203},\n title =\ - \ {Combining Accelerometer and Video Camera: Reconstruction of Bow Velocity Profiles},\n\ - \ url = {http://www.nime.org/proceedings/2006/nime2006_200.pdf},\n year = {2006}\n\ - }\n" + ID: Kimura2008 + abstract: 'This paper describes the compositional process for creatingthe interactive + work for violin entitled VITESSIMO using theAugmented Violin [1].' + address: 'Genoa, Italy' + author: 'Kimura, Mari' + bibtex: "@inproceedings{Kimura2008,\n abstract = {This paper describes the compositional\ + \ process for creatingthe interactive work for violin entitled VITESSIMO using\ + \ theAugmented Violin [1].},\n address = {Genoa, Italy},\n author = {Kimura, Mari},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1179581},\n issn = {2220-4806},\n\ + \ keywords = {Augmented Violin, gesture tracking, interactive performance },\n\ + \ pages = {219--220},\n title = {Making of VITESSIMO for Augmented Violin : Compositional\ + \ Process and Performance},\n url = {http://www.nime.org/proceedings/2008/nime2008_219.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177003 + doi: 10.5281/zenodo.1179581 issn: 2220-4806 - keywords: 'Bowing gestures, bowed string, violin, bow velocity, accelerometer, video - tracking. ' - pages: 200--203 - title: 'Combining Accelerometer and Video Camera: Reconstruction of Bow Velocity - Profiles' - url: http://www.nime.org/proceedings/2006/nime2006_200.pdf - year: 2006 + keywords: 'Augmented Violin, gesture tracking, interactive performance ' + pages: 219--220 + title: 'Making of VITESSIMO for Augmented Violin : Compositional Process and Performance' + url: http://www.nime.org/proceedings/2008/nime2008_219.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Leroy2006 - address: 'Paris, France' - author: 'Leroy, Nicolas and Fl\''{e}ty, Emmanuel and Bevilacqua, Fr\''{e}d\''{e}ric' - bibtex: "@inproceedings{Leroy2006,\n address = {Paris, France},\n author = {Leroy,\ - \ Nicolas and Fl\\'{e}ty, Emmanuel and Bevilacqua, Fr\\'{e}d\\'{e}ric},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176859},\n issn = {2220-4806},\n pages\ - \ = {204--207},\n title = {Reflective Optical Pickup For Violin},\n url = {http://www.nime.org/proceedings/2006/nime2006_204.pdf},\n\ - \ year = {2006}\n}\n" + ID: Loviscach2008 + abstract: 'Sound libraries for music synthesizers easily comprise one thousand or + more programs (''''patches''''). Thus, there are enough raw data to apply data + mining to reveal typical settings and to extract dependencies. Intelligent user + interfaces for music synthesizers can be based on such statistics. This paper + proposes two approaches: First, the user sets any number of parameters and then + lets the system find the nearest sounds in the database, a kind of patch autocompletion. + Second, all parameters are "live" as usual, but turning one knob or setting a + switch will also change the settings of other, statistically related controls. + Both approaches canbe used with the standard interface of the synthesizer. On + top of that, this paper introduces alternative or additional interfaces based + on data visualization.' + address: 'Genoa, Italy' + author: 'Loviscach, Jörn' + bibtex: "@inproceedings{Loviscach2008,\n abstract = {Sound libraries for music synthesizers\ + \ easily comprise one thousand or more programs (''patches''). Thus, there are\ + \ enough raw data to apply data mining to reveal typical settings and to extract\ + \ dependencies. Intelligent user interfaces for music synthesizers can be based\ + \ on such statistics. This paper proposes two approaches: First, the user sets\ + \ any number of parameters and then lets the system find the nearest sounds in\ + \ the database, a kind of patch autocompletion. Second, all parameters are \"\ + live\" as usual, but turning one knob or setting a switch will also change the\ + \ settings of other, statistically related controls. Both approaches canbe used\ + \ with the standard interface of the synthesizer. On top of that, this paper introduces\ + \ alternative or additional interfaces based on data visualization.},\n address\ + \ = {Genoa, Italy},\n author = {Loviscach, J\\''{o}rn},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1179591},\n issn = {2220-4806},\n keywords = {Information\ + \ visualization, mutual information, intelligent user interfaces },\n pages =\ + \ {221--224},\n title = {Programming a Music Synthesizer through Data Mining},\n\ + \ url = {http://www.nime.org/proceedings/2008/nime2008_221.pdf},\n year = {2008}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176859 + doi: 10.5281/zenodo.1179591 issn: 2220-4806 - pages: 204--207 - title: Reflective Optical Pickup For Violin - url: http://www.nime.org/proceedings/2006/nime2006_204.pdf - year: 2006 + keywords: 'Information visualization, mutual information, intelligent user interfaces ' + pages: 221--224 + title: Programming a Music Synthesizer through Data Mining + url: http://www.nime.org/proceedings/2008/nime2008_221.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Jorda2006 - abstract: 'This paper introduces the scoreTable*, a tangible interactive music score - editor which started as a simple application for demoing "traditional" approaches - to music creation, using the reacTable* technology, and which has evolved into - an independent research project on its own. After a brief discussion on the role - of pitch in music, we present a brief overview of related tangible music editors, - and discuss several paradigms in computer music creation, contrasting synchronous - with asynchronous approaches. The final part of the paper describes the current - state of the scoreTable* as well as its future lines of research.' - address: 'Paris, France' - author: 'Jordà, Sergi and Alonso, Marcos' - bibtex: "@inproceedings{Jorda2006,\n abstract = {This paper introduces the scoreTable*,\ - \ a tangible interactive music score editor which started as a simple application\ - \ for demoing \"traditional\" approaches to music creation, using the reacTable*\ - \ technology, and which has evolved into an independent research project on its\ - \ own. After a brief discussion on the role of pitch in music, we present a brief\ - \ overview of related tangible music editors, and discuss several paradigms in\ - \ computer music creation, contrasting synchronous with asynchronous approaches.\ - \ The final part of the paper describes the current state of the scoreTable* as\ - \ well as its future lines of research.},\n address = {Paris, France},\n author\ - \ = {Jord\\`{a}, Sergi and Alonso, Marcos},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1176855},\n issn = {2220-4806},\n keywords = {Musical instrument,\ - \ Collaborative Music, Computer Supported Collaborative Work, Tangible User Interface,\ - \ Music Theory. },\n pages = {208--211},\n title = {Mary Had a Little scoreTable*\ - \ or the reacTable* Goes Melodic},\n url = {http://www.nime.org/proceedings/2006/nime2006_208.pdf},\n\ - \ year = {2006}\n}\n" + ID: Ng2008 + abstract: 'This paper presents a project called i-Maestro (www.i-maestro.org) which + develops interactive multimedia environments for technology enhanced music education. + The project explores novel solutions for music training in both theory and performance, + building on recent innovations resulting from the development of computer and + information technologies, by exploiting new pedagogical paradigms with cooperative + and interactive self-learning environments, gesture interfaces, and augmented + instruments. This paper discusses the general context along with the background + and current developments of the project, together with an overview of the framework + and discussions on a number of selected tools to support technology-enhanced music + learning and teaching. ' + address: 'Genoa, Italy' + author: 'Ng, Kia and Nesi, Paolo' + bibtex: "@inproceedings{Ng2008,\n abstract = {This paper presents a project called\ + \ i-Maestro (www.i-maestro.org) which develops interactive multimedia environments\ + \ for technology enhanced music education. The project explores novel solutions\ + \ for music training in both theory and performance, building on recent innovations\ + \ resulting from the development of computer and information technologies, by\ + \ exploiting new pedagogical paradigms with cooperative and interactive self-learning\ + \ environments, gesture interfaces, and augmented instruments. This paper discusses\ + \ the general context along with the background and current developments of the\ + \ project, together with an overview of the framework and discussions on a number\ + \ of selected tools to support technology-enhanced music learning and teaching.\ + \ },\n address = {Genoa, Italy},\n author = {Ng, Kia and Nesi, Paolo},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1179605},\n issn = {2220-4806},\n keywords\ + \ = {augmented instrument,education,gesture,interactive,interface,motion,multimedia,music,nime08,notation,sensor,sonification,technology-enhanced\ + \ learning,visualisation},\n pages = {225--228},\n title = {i-Maestro : Technology-Enhanced\ + \ Learning and Teaching for Music},\n url = {http://www.nime.org/proceedings/2008/nime2008_225.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176855 + doi: 10.5281/zenodo.1179605 issn: 2220-4806 - keywords: 'Musical instrument, Collaborative Music, Computer Supported Collaborative - Work, Tangible User Interface, Music Theory. ' - pages: 208--211 - title: Mary Had a Little scoreTable* or the reacTable* Goes Melodic - url: http://www.nime.org/proceedings/2006/nime2006_208.pdf - year: 2006 + keywords: 'augmented instrument,education,gesture,interactive,interface,motion,multimedia,music,nime08,notation,sensor,sonification,technology-enhanced + learning,visualisation' + pages: 225--228 + title: 'i-Maestro : Technology-Enhanced Learning and Teaching for Music' + url: http://www.nime.org/proceedings/2008/nime2008_225.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Crevoisier2006 - address: 'Paris, France' - author: 'Crevoisier, Alain and Bornand, C\''{e}dric and Guichard, Arnaud and Matsumura, - Seiichiro and Arakawa, Chuichi' - bibtex: "@inproceedings{Crevoisier2006,\n address = {Paris, France},\n author =\ - \ {Crevoisier, Alain and Bornand, C\\'{e}dric and Guichard, Arnaud and Matsumura,\ - \ Seiichiro and Arakawa, Chuichi},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176853},\n\ - \ issn = {2220-4806},\n pages = {212--215},\n title = {Sound Rose: Creating Music\ - \ and Images with a Touch Table},\n url = {http://www.nime.org/proceedings/2006/nime2006_212.pdf},\n\ - \ year = {2006}\n}\n" + ID: Kuyken2008 + abstract: 'This paper describes the HOP system. It consists of a wireless module + built up by multiple nodes and a base station. The nodes detect acceleration of + e.g. human movement. At a rate of 100 Hertz the base station collects the acceleration + samples. The data can be acquired in real-time software like Pure Data and Max/MSP. + The data can be used to analyze and/or sonify movement. ' + address: 'Genoa, Italy' + author: 'Kuyken, Bart and Verstichel, Wouter and Bossuyt, Frederick and Vanfleteren, + Jan and Demey, Michiel and Leman, Marc' + bibtex: "@inproceedings{Kuyken2008,\n abstract = {This paper describes the HOP system.\ + \ It consists of a wireless module built up by multiple nodes and a base station.\ + \ The nodes detect acceleration of e.g. human movement. At a rate of 100 Hertz\ + \ the base station collects the acceleration samples. The data can be acquired\ + \ in real-time software like Pure Data and Max/MSP. The data can be used to analyze\ + \ and/or sonify movement. },\n address = {Genoa, Italy},\n author = {Kuyken, Bart\ + \ and Verstichel, Wouter and Bossuyt, Frederick and Vanfleteren, Jan and Demey,\ + \ Michiel and Leman, Marc},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179583},\n\ + \ issn = {2220-4806},\n keywords = {Digital Musical Instrument, Wireless Sensors,\ + \ Inertial Sensing, Hop Sensor },\n pages = {229--232},\n title = {The HOP Sensor\ + \ : Wireless Motion Sensor},\n url = {http://www.nime.org/proceedings/2008/nime2008_229.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176853 + doi: 10.5281/zenodo.1179583 issn: 2220-4806 - pages: 212--215 - title: 'Sound Rose: Creating Music and Images with a Touch Table' - url: http://www.nime.org/proceedings/2006/nime2006_212.pdf - year: 2006 + keywords: 'Digital Musical Instrument, Wireless Sensors, Inertial Sensing, Hop Sensor ' + pages: 229--232 + title: 'The HOP Sensor : Wireless Motion Sensor' + url: http://www.nime.org/proceedings/2008/nime2008_229.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Davidson2006 - abstract: 'In this paper, we describe our experience in musical interface design - for a large scale, high-resolution, multi-touch display surface. We provide an - overview of historical and presentday context in multi-touch audio interaction, - and describe our approach to analysis of tracked multi-finger, multi-hand data - for controlling live audio synthesis.' - address: 'Paris, France' - author: 'Davidson, Philip L. and Han, Jefferson Y.' - bibtex: "@inproceedings{Davidson2006,\n abstract = {In this paper, we describe our\ - \ experience in musical interface design for a large scale, high-resolution, multi-touch\ - \ display surface. We provide an overview of historical and presentday context\ - \ in multi-touch audio interaction, and describe our approach to analysis of tracked\ - \ multi-finger, multi-hand data for controlling live audio synthesis.},\n address\ - \ = {Paris, France},\n author = {Davidson, Philip L. and Han, Jefferson Y.},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176889},\n issn = {2220-4806},\n\ - \ keywords = {multi-touch, touch, tactile, bi-manual, multi-user, synthesis, dynamic\ - \ patching },\n pages = {216--219},\n title = {Synthesis and Control on Large\ - \ Scale Multi-Touch Sensing Displays},\n url = {http://www.nime.org/proceedings/2006/nime2006_216.pdf},\n\ - \ year = {2006}\n}\n" + ID: Coghlan2008 + address: 'Genoa, Italy' + author: 'Coghlan, Niall and Knapp, Benjamin' + bibtex: "@inproceedings{Coghlan2008,\n address = {Genoa, Italy},\n author = {Coghlan,\ + \ Niall and Knapp, Benjamin},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179513},\n\ + \ issn = {2220-4806},\n keywords = {Ubiquitous computing, context -awareness,\ + \ networking, embedded systems, chairs, digital artefacts, emotional state sensing,\ + \ affective computing, biosignals. },\n pages = {233--236},\n title = {Sensory\ + \ Chairs : A System for Biosignal Research and Performance},\n url = {http://www.nime.org/proceedings/2008/nime2008_233.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176889 + doi: 10.5281/zenodo.1179513 issn: 2220-4806 - keywords: 'multi-touch, touch, tactile, bi-manual, multi-user, synthesis, dynamic - patching ' - pages: 216--219 - title: Synthesis and Control on Large Scale Multi-Touch Sensing Displays - url: http://www.nime.org/proceedings/2006/nime2006_216.pdf - year: 2006 + keywords: 'Ubiquitous computing, context -awareness, networking, embedded systems, + chairs, digital artefacts, emotional state sensing, affective computing, biosignals. ' + pages: 233--236 + title: 'Sensory Chairs : A System for Biosignal Research and Performance' + url: http://www.nime.org/proceedings/2008/nime2008_233.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Kvifte2006 - abstract: 'This paper discusses the need for a framework for describing musical - instruments and their design, and discusses some possible elements in such a framework. - The framework is meant as an aid in the development of a coherent terminology - for describing, comparing and discussing different musical instruments and musical - instrument designs. Three different perspectives are presented; that of the listener, - the performer, and the constructor, and various levels of descriptions are introduced.' - address: 'Paris, France' - author: 'Kvifte, Tellef and Jensenius, Alexander Refsum' - bibtex: "@inproceedings{Kvifte2006,\n abstract = {This paper discusses the need\ - \ for a framework for describing musical instruments and their design, and discusses\ - \ some possible elements in such a framework. The framework is meant as an aid\ - \ in the development of a coherent terminology for describing, comparing and discussing\ - \ different musical instruments and musical instrument designs. Three different\ - \ perspectives are presented; that of the listener, the performer, and the constructor,\ - \ and various levels of descriptions are introduced.},\n address = {Paris, France},\n\ - \ author = {Kvifte, Tellef and Jensenius, Alexander Refsum},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176951},\n issn = {2220-4806},\n keywords = {Musical\ - \ instrument design, mapping, gestures, organology. },\n pages = {220--225},\n\ - \ title = {Towards a Coherent Terminology and Model of Instrument Description\ - \ and Design},\n url = {http://www.nime.org/proceedings/2006/nime2006_220.pdf},\n\ - \ year = {2006}\n}\n" + ID: Godbehere2008 + abstract: 'We present examples of a wireless sensor network as applied to wearable + digital music controllers. Recent advances in wireless Personal Area Networks + (PANs) have precipitated the IEEE 802.15.4 standard for low-power, low-cost wireless + sensor networks. We have applied this new technology to create a fully wireless, + wearable network of accelerometers which are small enough to be hidden under clothing. + Various motion analysis and machine learning techniques are applied to the raw + accelerometer data in real-time to generate and control music on the fly. ' + address: 'Genoa, Italy' + author: 'Godbehere, Andrew B. and Ward, Nathan J.' + bibtex: "@inproceedings{Godbehere2008,\n abstract = {We present examples of a wireless\ + \ sensor network as applied to wearable digital music controllers. Recent advances\ + \ in wireless Personal Area Networks (PANs) have precipitated the IEEE 802.15.4\ + \ standard for low-power, low-cost wireless sensor networks. We have applied this\ + \ new technology to create a fully wireless, wearable network of accelerometers\ + \ which are small enough to be hidden under clothing. Various motion analysis\ + \ and machine learning techniques are applied to the raw accelerometer data in\ + \ real-time to generate and control music on the fly. },\n address = {Genoa, Italy},\n\ + \ author = {Godbehere, Andrew B. and Ward, Nathan J.},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1179547},\n issn = {2220-4806},\n keywords = {Wearable\ + \ computing, personal area networks, accelerometers, 802.15.4, motion analysis,\ + \ human-computer interaction, live performance, digital musical controllers, gestural\ + \ control },\n pages = {237--240},\n title = {Wearable Interfaces for Cyberphysical\ + \ Musical Expression},\n url = {http://www.nime.org/proceedings/2008/nime2008_237.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176951 + doi: 10.5281/zenodo.1179547 issn: 2220-4806 - keywords: 'Musical instrument design, mapping, gestures, organology. ' - pages: 220--225 - title: Towards a Coherent Terminology and Model of Instrument Description and Design - url: http://www.nime.org/proceedings/2006/nime2006_220.pdf - year: 2006 + keywords: 'Wearable computing, personal area networks, accelerometers, 802.15.4, + motion analysis, human-computer interaction, live performance, digital musical + controllers, gestural control ' + pages: 237--240 + title: Wearable Interfaces for Cyberphysical Musical Expression + url: http://www.nime.org/proceedings/2008/nime2008_237.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Marshall2006 - address: 'Paris, France' - author: 'Marshall, Mark T. and Wanderley, Marcelo M.' - bibtex: "@inproceedings{Marshall2006,\n address = {Paris, France},\n author = {Marshall,\ - \ Mark T. and Wanderley, Marcelo M.},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176973},\n\ - \ issn = {2220-4806},\n keywords = {digital musical instruments,tactile feedback,vibro-tactile},\n\ - \ pages = {226--229},\n title = {Vibrotactile Feedback in Digital Musical Instruments},\n\ - \ url = {http://www.nime.org/proceedings/2006/nime2006_226.pdf},\n year = {2006}\n\ - }\n" + ID: Hayafuchi2008 + abstract: 'This research aims to develop a wearable musical interface which enables + to control audio and video signals by using hand gestures and human body motions. + We have been developing an audio-visual manipulation system that realizes tracks + control, time-based operations and searching for tracks from massive music library. + It aims to build an emotional and affecting musical interaction, and will provide + a better method of music listening to people. A sophisticated glove-like device + with an acceleration sensor and several strain sensors has been developed. A realtime + signal processing and musical control are executed as a result of gesture recognition. + We also developed a stand-alone device that performs as a musical controller and + player at the same time. In this paper, we describe the development of a compact + and sophisticated sensor device, and demonstrate its performance of audio and + video signals control.' + address: 'Genoa, Italy' + author: 'Hayafuchi, Kouki and Suzuki, Kenji' + bibtex: "@inproceedings{Hayafuchi2008,\n abstract = {This research aims to develop\ + \ a wearable musical interface which enables to control audio and video signals\ + \ by using hand gestures and human body motions. We have been developing an audio-visual\ + \ manipulation system that realizes tracks control, time-based operations and\ + \ searching for tracks from massive music library. It aims to build an emotional\ + \ and affecting musical interaction, and will provide a better method of music\ + \ listening to people. A sophisticated glove-like device with an acceleration\ + \ sensor and several strain sensors has been developed. A realtime signal processing\ + \ and musical control are executed as a result of gesture recognition. We also\ + \ developed a stand-alone device that performs as a musical controller and player\ + \ at the same time. In this paper, we describe the development of a compact and\ + \ sophisticated sensor device, and demonstrate its performance of audio and video\ + \ signals control.},\n address = {Genoa, Italy},\n author = {Hayafuchi, Kouki\ + \ and Suzuki, Kenji},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179561},\n\ + \ issn = {2220-4806},\n keywords = {Embodied Sound Media, Music Controller, Gestures,\ + \ Body Motion, Musical Interface },\n pages = {241--244},\n title = {MusicGlove:\ + \ A Wearable Musical Controller for Massive Media Library},\n url = {http://www.nime.org/proceedings/2008/nime2008_241.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176973 + doi: 10.5281/zenodo.1179561 issn: 2220-4806 - keywords: 'digital musical instruments,tactile feedback,vibro-tactile' - pages: 226--229 - title: Vibrotactile Feedback in Digital Musical Instruments - url: http://www.nime.org/proceedings/2006/nime2006_226.pdf - year: 2006 + keywords: 'Embodied Sound Media, Music Controller, Gestures, Body Motion, Musical + Interface ' + pages: 241--244 + title: 'MusicGlove: A Wearable Musical Controller for Massive Media Library' + url: http://www.nime.org/proceedings/2008/nime2008_241.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Koehly2006 - abstract: 'This paper presents the development of novel "home-made" touch sensors - using conductive pigments and various substrate materials. We show that it is - possible to build one''s own position, pressure and bend sensors with various - electrical characteristics, sizes and shapes, and this for a very competitive - price. We give examples and provide results from experimental tests of such developments. ' - address: 'Paris, France' - author: 'Koehly, Rodolphe and Curtil, Denis and Wanderley, Marcelo M.' - bibtex: "@inproceedings{Koehly2006,\n abstract = {This paper presents the development\ - \ of novel \"home-made\" touch sensors using conductive pigments and various substrate\ - \ materials. We show that it is possible to build one's own position, pressure\ - \ and bend sensors with various electrical characteristics, sizes and shapes,\ - \ and this for a very competitive price. We give examples and provide results\ - \ from experimental tests of such developments. },\n address = {Paris, France},\n\ - \ author = {Koehly, Rodolphe and Curtil, Denis and Wanderley, Marcelo M.},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176949},\n issn = {2220-4806},\n keywords\ - \ = {Touch sensors, piezoresistive technology, conductive pigments, sensitive\ - \ materials, interface design },\n pages = {230--233},\n title = {Paper FSRs and\ - \ Latex/Fabric Traction Sensors: Methods for the Development of Home-Made Touch\ - \ Sensors},\n url = {http://www.nime.org/proceedings/2006/nime2006_230.pdf},\n\ - \ year = {2006}\n}\n" + ID: Zbyszynski2008 + abstract: 'This paper proposes the creation of a method book for tabletbased instruments, + evaluating pedagogical materials fortraditional instruments as well as research + in human-computerinteraction and tablet interfaces.' + address: 'Genoa, Italy' + author: 'Zbyszynski, Michael' + bibtex: "@inproceedings{Zbyszynski2008,\n abstract = {This paper proposes the creation\ + \ of a method book for tabletbased instruments, evaluating pedagogical materials\ + \ fortraditional instruments as well as research in human-computerinteraction\ + \ and tablet interfaces.},\n address = {Genoa, Italy},\n author = {Zbyszynski,\ + \ Michael},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177461},\n issn\ + \ = {2220-4806},\n keywords = {Wacom tablet, digitizing tablet, expressivity,\ + \ gesture, mapping, pedagogy, practice },\n pages = {245--248},\n title = {An\ + \ Elementary Method for Tablet},\n url = {http://www.nime.org/proceedings/2008/nime2008_245.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176949 + doi: 10.5281/zenodo.1177461 issn: 2220-4806 - keywords: 'Touch sensors, piezoresistive technology, conductive pigments, sensitive - materials, interface design ' - pages: 230--233 - title: 'Paper FSRs and Latex/Fabric Traction Sensors: Methods for the Development - of Home-Made Touch Sensors' - url: http://www.nime.org/proceedings/2006/nime2006_230.pdf - year: 2006 + keywords: 'Wacom tablet, digitizing tablet, expressivity, gesture, mapping, pedagogy, + practice ' + pages: 245--248 + title: An Elementary Method for Tablet + url: http://www.nime.org/proceedings/2008/nime2008_245.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Bowers2006 - address: 'Paris, France' - author: 'Bowers, John and Villar, Nicolas' - bibtex: "@inproceedings{Bowers2006,\n address = {Paris, France},\n author = {Bowers,\ - \ John and Villar, Nicolas},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176881},\n\ - \ issn = {2220-4806},\n keywords = {Ad hoc instruments, Pin&Play, physical interfaces,\ - \ music performance, new interfaces for musical expression. },\n pages = {234--239},\n\ - \ title = {Creating Ad Hoc Instruments with Pin\\&Play\\&Perform},\n url = {http://www.nime.org/proceedings/2006/nime2006_234.pdf},\n\ - \ year = {2006}\n}\n" + ID: Roma2008 + abstract: We present an audio waveform editor that can be operated in real time + through a tabletop interface. The systemcombines multi-touch and tangible interaction + techniques inorder to implement the metaphor of a toolkit that allows direct manipulation + of a sound sample. The resulting instrument is well suited for live performance + based on evolvingloops. + address: 'Genoa, Italy' + author: 'Roma, Gerard and Xambó, Anna' + bibtex: "@inproceedings{Roma2008,\n abstract = {We present an audio waveform editor\ + \ that can be operated in real time through a tabletop interface. The systemcombines\ + \ multi-touch and tangible interaction techniques inorder to implement the metaphor\ + \ of a toolkit that allows direct manipulation of a sound sample. The resulting\ + \ instrument is well suited for live performance based on evolvingloops.},\n address\ + \ = {Genoa, Italy},\n author = {Roma, Gerard and Xamb\\'{o}, Anna},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1179621},\n issn = {2220-4806},\n keywords\ + \ = {tangible interface, tabletop interface, musical performance, interaction\ + \ techniques },\n pages = {249--252},\n title = {A Tabletop Waveform Editor for\ + \ Live Performance},\n url = {http://www.nime.org/proceedings/2008/nime2008_249.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176881 + doi: 10.5281/zenodo.1179621 issn: 2220-4806 - keywords: 'Ad hoc instruments, Pin&Play, physical interfaces, music performance, - new interfaces for musical expression. ' - pages: 234--239 - title: Creating Ad Hoc Instruments with Pin\&Play\&Perform - url: http://www.nime.org/proceedings/2006/nime2006_234.pdf - year: 2006 + keywords: 'tangible interface, tabletop interface, musical performance, interaction + techniques ' + pages: 249--252 + title: A Tabletop Waveform Editor for Live Performance + url: http://www.nime.org/proceedings/2008/nime2008_249.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Serafin2006 - abstract: 'In this paper we introduce the Croaker, a novel input deviceinspired - by Russolo''s Intonarumori. We describe the components of the controller and the - sound synthesis engine whichallows to reproduce several everyday sounds.' - address: 'Paris, France' - author: 'Serafin, Stefania and de Götzen, Amalia and Böttcher, Niels and Gelineck, - Steven' - bibtex: "@inproceedings{Serafin2006,\n abstract = {In this paper we introduce the\ - \ Croaker, a novel input deviceinspired by Russolo's Intonarumori. We describe\ - \ the components of the controller and the sound synthesis engine whichallows\ - \ to reproduce several everyday sounds.},\n address = {Paris, France},\n author\ - \ = {Serafin, Stefania and de G\\''{o}tzen, Amalia and B\\''{o}ttcher, Niels and\ - \ Gelineck, Steven},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177005},\n\ - \ issn = {2220-4806},\n keywords = {Noise machines, everyday sounds, physical\ - \ models. },\n pages = {240--245},\n title = {Synthesis and Control of Everyday\ - \ Sounds Reconstructing Russolo's Intonarumori},\n url = {http://www.nime.org/proceedings/2006/nime2006_240.pdf},\n\ - \ year = {2006}\n}\n" + ID: Valle2008a + address: 'Genoa, Italy' + author: 'Valle, Andrea' + bibtex: "@inproceedings{Valle2008a,\n address = {Genoa, Italy},\n author = {Valle,\ + \ Andrea},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1179645},\n issn = {2220-4806},\n\ + \ keywords = {algorithmic composition,automatic notation,nime08},\n pages = {253--256},\n\ + \ title = {Integrated Algorithmic Composition Fluid systems for including notation\ + \ in music composition cycle},\n url = {http://www.nime.org/proceedings/2008/nime2008_253.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177005 + doi: 10.5281/zenodo.1179645 issn: 2220-4806 - keywords: 'Noise machines, everyday sounds, physical models. ' - pages: 240--245 - title: Synthesis and Control of Everyday Sounds Reconstructing Russolo's Intonarumori - url: http://www.nime.org/proceedings/2006/nime2006_240.pdf - year: 2006 + keywords: 'algorithmic composition,automatic notation,nime08' + pages: 253--256 + title: Integrated Algorithmic Composition Fluid systems for including notation in + music composition cycle + url: http://www.nime.org/proceedings/2008/nime2008_253.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Weinberg2006 - address: 'Paris, France' - author: 'Weinberg, Gil and Thatcher, Travis' - bibtex: "@inproceedings{Weinberg2006,\n address = {Paris, France},\n author = {Weinberg,\ - \ Gil and Thatcher, Travis},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177019},\n\ - \ issn = {2220-4806},\n keywords = {1,background and motivations,biological research,interactive\ - \ auditory display,neural patterns,scholars are,sonification,with new developments\ - \ in},\n pages = {246--249},\n title = {Interactive Sonification of Neural Activity},\n\ - \ url = {http://www.nime.org/proceedings/2006/nime2006_246.pdf},\n year = {2006}\n\ - }\n" + ID: Valle2008 + abstract: 'This paper is about GeoGraphy, a graph-based system forthe control of + both musical composition and interactive performance and its implementation in + a real-time, interactiveapplication. The implementation includes a flexible userinterface + system.' + address: 'Genoa, Italy' + author: 'Valle, Andrea' + bibtex: "@inproceedings{Valle2008,\n abstract = {This paper is about GeoGraphy,\ + \ a graph-based system forthe control of both musical composition and interactive\ + \ performance and its implementation in a real-time, interactiveapplication. The\ + \ implementation includes a flexible userinterface system.},\n address = {Genoa,\ + \ Italy},\n author = {Valle, Andrea},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179643},\n\ + \ issn = {2220-4806},\n keywords = {a graph,composition,figure 1,interfaces,left,live\ + \ coding,musical algorithmic composition,nime08,performance,vertex durations and\ + \ coor-},\n pages = {257--260},\n title = {GeoGraphy : a Real-Time, Graph-Based\ + \ Composition Environment},\n url = {http://www.nime.org/proceedings/2008/nime2008_257.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177019 + doi: 10.5281/zenodo.1179643 issn: 2220-4806 - keywords: '1,background and motivations,biological research,interactive auditory - display,neural patterns,scholars are,sonification,with new developments in' - pages: 246--249 - title: Interactive Sonification of Neural Activity - url: http://www.nime.org/proceedings/2006/nime2006_246.pdf - year: 2006 + keywords: 'a graph,composition,figure 1,interfaces,left,live coding,musical algorithmic + composition,nime08,performance,vertex durations and coor-' + pages: 257--260 + title: 'GeoGraphy : a Real-Time, Graph-Based Composition Environment' + url: http://www.nime.org/proceedings/2008/nime2008_257.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Remus2006 - address: 'Paris, France' - author: 'R\''{e}mus, Jacques' - bibtex: "@inproceedings{Remus2006,\n address = {Paris, France},\n author = {R\\\ - '{e}mus, Jacques},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176989},\n\ - \ issn = {2220-4806},\n keywords = {camera musicale,interface,jacques r\\'{e}mus,machines,musical\ - \ camera,musical hand,non haptic instrument,s mappings,sculptures and mechanical\ - \ musical,sound},\n pages = {250--253},\n title = {Non Haptic Control of Music\ - \ by Video Analysis of Hand Movements: 14 Years of Experience with the `Cam\\\ - '{e}ra Musicale'},\n url = {http://www.nime.org/proceedings/2006/nime2006_250.pdf},\n\ - \ year = {2006}\n}\n" + ID: Zannos2008 + abstract: 'In this paper, we describe the development of multi-platform tools for + Audiovisual and Kinetic installations. These involve the connection of three development + environments: Python, SuperCollider and Processing, in order to drive kinetic + art installations and to combine these with digital synthesis of sound and image + in real time. By connecting these three platforms via the OSC protocol, we enable + the control in real time of analog physical media (a device that draws figures + on sand), sound synthesis and image synthesis. We worked on the development of + algorithms for drawing figures and synthesizing images and sound on all three + platforms and experimented with various mechanisms for coordinating synthesis + and rendering in different media. Several problems were addressed: How to coordinate + the timing between different platforms? What configuration to use? Clientserver + (who is the client who the server?), equal partners, mixed configurations. A library + was developed in SuperCollider to enable the packaging of algorithms into modules + with automatic generation of GUI from specifications, and the saving of configurations + of modules into session files as scripts in SuperCollider code. The application + of this library as a framework for both driving graphic synthesis in Processing + and receiving control data from it resulted in an environment for experimentation + that is also being used successfully in teaching interactive audiovisual media. ' + address: 'Genoa, Italy' + author: 'Zannos, Iannis' + bibtex: "@inproceedings{Zannos2008,\n abstract = {In this paper, we describe the\ + \ development of multi-platform tools for Audiovisual and Kinetic installations.\ + \ These involve the connection of three development environments: Python, SuperCollider\ + \ and Processing, in order to drive kinetic art installations and to combine these\ + \ with digital synthesis of sound and image in real time. By connecting these\ + \ three platforms via the OSC protocol, we enable the control in real time of\ + \ analog physical media (a device that draws figures on sand), sound synthesis\ + \ and image synthesis. We worked on the development of algorithms for drawing\ + \ figures and synthesizing images and sound on all three platforms and experimented\ + \ with various mechanisms for coordinating synthesis and rendering in different\ + \ media. Several problems were addressed: How to coordinate the timing between\ + \ different platforms? What configuration to use? Clientserver (who is the client\ + \ who the server?), equal partners, mixed configurations. A library was developed\ + \ in SuperCollider to enable the packaging of algorithms into modules with automatic\ + \ generation of GUI from specifications, and the saving of configurations of modules\ + \ into session files as scripts in SuperCollider code. The application of this\ + \ library as a framework for both driving graphic synthesis in Processing and\ + \ receiving control data from it resulted in an environment for experimentation\ + \ that is also being used successfully in teaching interactive audiovisual media.\ + \ },\n address = {Genoa, Italy},\n author = {Zannos, Iannis},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177459},\n issn = {2220-4806},\n keywords = {kinetic\ + \ art, audiovisual installations, python, SuperCollider, Processing, algorithmic\ + \ art, tools for multi-platform development },\n pages = {261--264},\n title =\ + \ {Multi-Platform Development of Audiovisual and Kinetic Installations},\n url\ + \ = {http://www.nime.org/proceedings/2008/nime2008_261.pdf},\n year = {2008}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176989 + doi: 10.5281/zenodo.1177459 issn: 2220-4806 - keywords: 'camera musicale,interface,jacques r\''{e}mus,machines,musical camera,musical - hand,non haptic instrument,s mappings,sculptures and mechanical musical,sound' - pages: 250--253 - title: 'Non Haptic Control of Music by Video Analysis of Hand Movements: 14 Years - of Experience with the `Cam\''{e}ra Musicale''' - url: http://www.nime.org/proceedings/2006/nime2006_250.pdf - year: 2006 + keywords: 'kinetic art, audiovisual installations, python, SuperCollider, Processing, + algorithmic art, tools for multi-platform development ' + pages: 261--264 + title: Multi-Platform Development of Audiovisual and Kinetic Installations + url: http://www.nime.org/proceedings/2008/nime2008_261.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Borchers2006 - abstract: 'The MICON is an electronic music stand extending Maestro!, the latest - in a series of interactive conducting exhibits that use real orchestral audio - and video recordings. The MICON uses OpenGL-based rendering to display and animate - score pages with a high degree of realism. It offers three different score display - formats to match the user''s level of expertise. A realtime animated visual cueing - system helps users with their conducting. The MICON has been evaluated with music - students. ' - address: 'Paris, France' - author: 'Borchers, Jan and Hadjakos, Aristotelis and Mühlhäuser, Max' - bibtex: "@inproceedings{Borchers2006,\n abstract = {The MICON is an electronic music\ - \ stand extending Maestro!, the latest in a series of interactive conducting exhibits\ - \ that use real orchestral audio and video recordings. The MICON uses OpenGL-based\ - \ rendering to display and animate score pages with a high degree of realism.\ - \ It offers three different score display formats to match the user's level of\ - \ expertise. A realtime animated visual cueing system helps users with their conducting.\ - \ The MICON has been evaluated with music students. },\n address = {Paris, France},\n\ - \ author = {Borchers, Jan and Hadjakos, Aristotelis and M\\''{u}hlh\\''{a}user,\ - \ Max},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1176877},\n issn = {2220-4806},\n\ - \ keywords = {Music stand, score display, exhibit, conducting. },\n pages = {254--259},\n\ - \ title = {MICON A Music Stand for Interactive Conducting},\n url = {http://www.nime.org/proceedings/2006/nime2006_254.pdf},\n\ - \ year = {2006}\n}\n" + ID: Corness2008 + abstract: 'Through the developing of tools for analyzing the performerssonic and + movement-based gestures, research into the systemperformer interaction has focused + on the computer''s ability torespond to the performer. Where as such work shows + interestwithin the community in developing an interaction paradigmmodeled on the + player, by focusing on the perception andreasoning of the system, this research + assumes that theperformer''s manner of interaction is in agreement with thiscomputational + model. My study presents an alternative model ofinteraction designed for improvisatory + performance centered onthe perception of the performer as understood by theories + takenfrom performance practices and cognitive science.' + address: 'Genoa, Italy' + author: 'Corness, Greg' + bibtex: "@inproceedings{Corness2008,\n abstract = {Through the developing of tools\ + \ for analyzing the performerssonic and movement-based gestures, research into\ + \ the systemperformer interaction has focused on the computer's ability torespond\ + \ to the performer. Where as such work shows interestwithin the community in developing\ + \ an interaction paradigmmodeled on the player, by focusing on the perception\ + \ andreasoning of the system, this research assumes that theperformer's manner\ + \ of interaction is in agreement with thiscomputational model. My study presents\ + \ an alternative model ofinteraction designed for improvisatory performance centered\ + \ onthe perception of the performer as understood by theories takenfrom performance\ + \ practices and cognitive science.},\n address = {Genoa, Italy},\n author = {Corness,\ + \ Greg},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1179515},\n issn = {2220-4806},\n\ + \ keywords = {Interactive performance, Perception, HCI },\n pages = {265--268},\n\ + \ title = {Performer Model : Towards a Framework for Interactive Performance Based\ + \ on Perceived Intention},\n url = {http://www.nime.org/proceedings/2008/nime2008_265.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176877 + doi: 10.5281/zenodo.1179515 issn: 2220-4806 - keywords: 'Music stand, score display, exhibit, conducting. ' - pages: 254--259 - title: MICON A Music Stand for Interactive Conducting - url: http://www.nime.org/proceedings/2006/nime2006_254.pdf - year: 2006 + keywords: 'Interactive performance, Perception, HCI ' + pages: 265--268 + title: 'Performer Model : Towards a Framework for Interactive Performance Based + on Perceived Intention' + url: http://www.nime.org/proceedings/2008/nime2008_265.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Lee2006 - abstract: 'Designing a conducting gesture analysis system for public spacesposes - unique challenges. We present conga, a software framework that enables automatic - recognition and interpretation ofconducting gestures. conga is able to recognize - multiple types ofgestures with varying levels of difficulty for the user to perform,from - a standard four-beat pattern, to simplified up-down conducting movements, to no - pattern at all. conga provides an extendablelibrary of feature detectors linked - together into a directed acyclicgraph; these graphs represent the various conducting - patterns asgesture profiles. At run-time, conga searches for the best profileto - match a user''s gestures in real-time, and uses a beat prediction algorithm to - provide results at the sub-beat level, in additionto output values such as tempo, - gesture size, and the gesture''sgeometric center. Unlike some previous approaches, - conga doesnot need to be trained with sample data before use. Our preliminary - user tests show that conga has a beat recognition rate ofover 90%. conga is deployed - as the gesture recognition systemfor Maestro!, an interactive conducting exhibit - that opened in theBetty Brinn Children''s Museum in Milwaukee, USA in March2006.' - address: 'Paris, France' - author: 'Lee, Eric and Grüll, Ingo and Keil, Henning and Borchers, Jan' - bibtex: "@inproceedings{Lee2006,\n abstract = {Designing a conducting gesture analysis\ - \ system for public spacesposes unique challenges. We present conga, a software\ - \ framework that enables automatic recognition and interpretation ofconducting\ - \ gestures. conga is able to recognize multiple types ofgestures with varying\ - \ levels of difficulty for the user to perform,from a standard four-beat pattern,\ - \ to simplified up-down conducting movements, to no pattern at all. conga provides\ - \ an extendablelibrary of feature detectors linked together into a directed acyclicgraph;\ - \ these graphs represent the various conducting patterns asgesture profiles. At\ - \ run-time, conga searches for the best profileto match a user's gestures in real-time,\ - \ and uses a beat prediction algorithm to provide results at the sub-beat level,\ - \ in additionto output values such as tempo, gesture size, and the gesture'sgeometric\ - \ center. Unlike some previous approaches, conga doesnot need to be trained with\ - \ sample data before use. Our preliminary user tests show that conga has a beat\ - \ recognition rate ofover 90%. conga is deployed as the gesture recognition systemfor\ - \ Maestro!, an interactive conducting exhibit that opened in theBetty Brinn Children's\ - \ Museum in Milwaukee, USA in March2006.},\n address = {Paris, France},\n author\ - \ = {Lee, Eric and Gr\\''{u}ll, Ingo and Keil, Henning and Borchers, Jan},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176957},\n issn = {2220-4806},\n keywords\ - \ = {gesture recognition, conducting, software gesture frameworks },\n pages =\ - \ {260--265},\n title = {conga: A Framework for Adaptive Conducting Gesture Analysis},\n\ - \ url = {http://www.nime.org/proceedings/2006/nime2006_260.pdf},\n year = {2006}\n\ + ID: Teles2008 + address: 'Genoa, Italy' + author: 'Teles, Paulo C. and Boyle, Aidan' + bibtex: "@inproceedings{Teles2008,\n address = {Genoa, Italy},\n author = {Teles,\ + \ Paulo C. and Boyle, Aidan},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179637},\n\ + \ issn = {2220-4806},\n keywords = {nime08},\n pages = {269--272},\n title = {Developing\ + \ an \"Antigenous\" Art Installation Based on a Touchless Endosystem Interface},\n\ + \ url = {http://www.nime.org/proceedings/2008/nime2008_269.pdf},\n year = {2008}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176957 + doi: 10.5281/zenodo.1179637 issn: 2220-4806 - keywords: 'gesture recognition, conducting, software gesture frameworks ' - pages: 260--265 - title: 'conga: A Framework for Adaptive Conducting Gesture Analysis' - url: http://www.nime.org/proceedings/2006/nime2006_260.pdf - year: 2006 + keywords: nime08 + pages: 269--272 + title: Developing an "Antigenous" Art Installation Based on a Touchless Endosystem + Interface + url: http://www.nime.org/proceedings/2008/nime2008_269.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: dAlessandro2006 - address: 'Paris, France' - author: 'd''Alessandro, Nicolas and d''Alessandro, Christophe and Le Beux, Sylvain - and Doval, Boris' - bibtex: "@inproceedings{dAlessandro2006,\n address = {Paris, France},\n author =\ - \ {d'Alessandro, Nicolas and d'Alessandro, Christophe and Le Beux, Sylvain and\ - \ Doval, Boris},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176863},\n\ - \ issn = {2220-4806},\n keywords = {Singing synthesis, voice source, voice quality,\ - \ spectral model, formant synthesis, instrument, gestural control. },\n pages\ - \ = {266--271},\n title = {Real-time CALM Synthesizer: New Approaches in Hands-Controlled\ - \ Voice Synthesis},\n url = {http://www.nime.org/proceedings/2006/nime2006_266.pdf},\n\ - \ year = {2006}\n}\n" + ID: Lanzalone2008 + address: 'Genoa, Italy' + author: 'Lanzalone, Silvia' + bibtex: "@inproceedings{Lanzalone2008,\n address = {Genoa, Italy},\n author = {Lanzalone,\ + \ Silvia},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1179587},\n issn = {2220-4806},\n\ + \ keywords = {nime08},\n pages = {273--276},\n title = {The 'Suspended Clarinet'\ + \ with the 'Uncaused Sound' : Description of a Renewed Musical Instrument},\n\ + \ url = {http://www.nime.org/proceedings/2008/nime2008_273.pdf},\n year = {2008}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176863 + doi: 10.5281/zenodo.1179587 issn: 2220-4806 - keywords: 'Singing synthesis, voice source, voice quality, spectral model, formant - synthesis, instrument, gestural control. ' - pages: 266--271 - title: 'Real-time CALM Synthesizer: New Approaches in Hands-Controlled Voice Synthesis' - url: http://www.nime.org/proceedings/2006/nime2006_266.pdf - year: 2006 - - + keywords: nime08 + pages: 273--276 + title: 'The ''Suspended Clarinet'' with the ''Uncaused Sound'' : Description of + a Renewed Musical Instrument' + url: http://www.nime.org/proceedings/2008/nime2008_273.pdf + year: 2008 + + - ENTRYTYPE: inproceedings - ID: Pritchard2006 - abstract: 'We describe the implementation of an environment for Gesturally-Realized - Audio, Speech and Song Performance (GRASSP), which includes a glove-based interface, - a mapping/training interface, and a collection of Max/MSP/Jitter bpatchers that - allow the user to improvise speech, song, sound synthesis, sound processing, sound - localization, and video processing. The mapping/training interface provides a - framework for performers to specify by example the mapping between gesture and - sound or video controls. We demonstrate the effectiveness of the GRASSP environment - for gestural control of musical expression by creating a gesture-to-voice system - that is currently being used by performers. ' - address: 'Paris, France' - author: 'Pritchard, Bob and Fels, Sidney S.' - bibtex: "@inproceedings{Pritchard2006,\n abstract = {We describe the implementation\ - \ of an environment for Gesturally-Realized Audio, Speech and Song Performance\ - \ (GRASSP), which includes a glove-based interface, a mapping/training interface,\ - \ and a collection of Max/MSP/Jitter bpatchers that allow the user to improvise\ - \ speech, song, sound synthesis, sound processing, sound localization, and video\ - \ processing. The mapping/training interface provides a framework for performers\ - \ to specify by example the mapping between gesture and sound or video controls.\ - \ We demonstrate the effectiveness of the GRASSP environment for gestural control\ - \ of musical expression by creating a gesture-to-voice system that is currently\ - \ being used by performers. },\n address = {Paris, France},\n author = {Pritchard,\ - \ Bob and Fels, Sidney S.},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176987},\n\ - \ issn = {2220-4806},\n keywords = {Speech synthesis, parallel formant speech\ - \ synthesizer, gesture control, Max/MSP, Jitter, Cyberglove, Polhemus, sound diffusion,\ - \ UBC Toolbox, Glove-Talk, },\n pages = {272--276},\n title = {GRASSP: Gesturally-Realized\ - \ Audio, Speech and Song Performance},\n url = {http://www.nime.org/proceedings/2006/nime2006_272.pdf},\n\ - \ year = {2006}\n}\n" + ID: Hashida2008 + abstract: 'One of the advantages of case-based systems is that theycan generate + expressions even if the user doesn''t know howthe system applies expression rules. + However, the systemscannot avoid the problem of data sparseness and do notpermit + a user to improve the expression of a certain part ofa melody directly. After + discussing the functions requiredfor user-oriented interface for performance rendering + systems, this paper proposes a directable case-based performance rendering system, + called Itopul. Itopul is characterized by 1) a combination of the phrasing model + and thepulse model, 2) the use of a hierarchical music structure foravoiding from + the data sparseness problem, 3) visualizationof the processing progress, and 4) + music structures directlymodifiable by the user.' + address: 'Genoa, Italy' + author: 'Hashida, Mitsuyo and Ito, Yosuke and Katayose, Haruhiro' + bibtex: "@inproceedings{Hashida2008,\n abstract = {One of the advantages of case-based\ + \ systems is that theycan generate expressions even if the user doesn't know howthe\ + \ system applies expression rules. However, the systemscannot avoid the problem\ + \ of data sparseness and do notpermit a user to improve the expression of a certain\ + \ part ofa melody directly. After discussing the functions requiredfor user-oriented\ + \ interface for performance rendering systems, this paper proposes a directable\ + \ case-based performance rendering system, called Itopul. Itopul is characterized\ + \ by 1) a combination of the phrasing model and thepulse model, 2) the use of\ + \ a hierarchical music structure foravoiding from the data sparseness problem,\ + \ 3) visualizationof the processing progress, and 4) music structures directlymodifiable\ + \ by the user.},\n address = {Genoa, Italy},\n author = {Hashida, Mitsuyo and\ + \ Ito, Yosuke and Katayose, Haruhiro},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179559},\n\ + \ issn = {2220-4806},\n keywords = {Performance Rendering, User Interface, Case-based\ + \ Approach },\n pages = {277--280},\n title = {A Directable Performance Rendering\ + \ System: Itopul},\n url = {http://www.nime.org/proceedings/2008/nime2008_277.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176987 + doi: 10.5281/zenodo.1179559 issn: 2220-4806 - keywords: 'Speech synthesis, parallel formant speech synthesizer, gesture control, - Max/MSP, Jitter, Cyberglove, Polhemus, sound diffusion, UBC Toolbox, Glove-Talk, ' - pages: 272--276 - title: 'GRASSP: Gesturally-Realized Audio, Speech and Song Performance' - url: http://www.nime.org/proceedings/2006/nime2006_272.pdf - year: 2006 + keywords: 'Performance Rendering, User Interface, Case-based Approach ' + pages: 277--280 + title: 'A Directable Performance Rendering System: Itopul' + url: http://www.nime.org/proceedings/2008/nime2008_277.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Dobrian2006 - abstract: 'Is there a distinction between New Interfaces for MusicalExpression and - New Interfaces for Controlling Sound? Thisarticle begins with a brief overview - of expression in musicalperformance, and examines some of the characteristics - ofeffective "expressive" computer music instruments. Itbecomes apparent that sophisticated - musical expressionrequires not only a good control interface but also virtuosicmastery - of the instrument it controls. By studying effectiveacoustic instruments, choosing - intuitive but complexgesture-sound mappings that take advantage of establishedinstrumental - skills, designing intelligent characterizationsof performance gestures, and promoting - long-term dedicatedpractice on a new interface, computer music instrumentdesigners - can enhance the expressive quality of computermusic performance.' - address: 'Paris, France' - author: 'Dobrian, Christopher and Koppelman, Daniel' - bibtex: "@inproceedings{Dobrian2006,\n abstract = {Is there a distinction between\ - \ New Interfaces for MusicalExpression and New Interfaces for Controlling Sound?\ - \ Thisarticle begins with a brief overview of expression in musicalperformance,\ - \ and examines some of the characteristics ofeffective \"expressive\" computer\ - \ music instruments. Itbecomes apparent that sophisticated musical expressionrequires\ - \ not only a good control interface but also virtuosicmastery of the instrument\ - \ it controls. By studying effectiveacoustic instruments, choosing intuitive but\ - \ complexgesture-sound mappings that take advantage of establishedinstrumental\ - \ skills, designing intelligent characterizationsof performance gestures, and\ - \ promoting long-term dedicatedpractice on a new interface, computer music instrumentdesigners\ - \ can enhance the expressive quality of computermusic performance.},\n address\ - \ = {Paris, France},\n author = {Dobrian, Christopher and Koppelman, Daniel},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176893},\n issn = {2220-4806},\n\ - \ keywords = {Expression, instrument design, performance, virtuosity. },\n pages\ - \ = {277--282},\n title = {The E in NIME: Musical Expression with New Computer\ - \ Interfaces},\n url = {http://www.nime.org/proceedings/2006/nime2006_277.pdf},\n\ - \ year = {2006}\n}\n" + ID: Hazlewood2008 + abstract: 'In this work we describe our initial explorations in building a musical + instrument specifically for providing listenerswith simple, but useful, ambient + information. The termAmbient Musical Information Systems (AMIS) is proposedto + describe this kind of research. Instruments like these differ from standard musical + instruments in that they are tobe perceived indirectly from outside one''s primary + focus ofattention. We describe our rationale for creating such a device, a discussion + on the appropriate qualities of sound fordelivering ambient information, and a + description of an instrument created for use in a series of experiments that wewill + use to test out ideas. We conclude with a discussion ofour initial findings, and + some further directions we wish toexplore.' + address: 'Genoa, Italy' + author: 'Hazlewood, William R. and Knopke, Ian' + bibtex: "@inproceedings{Hazlewood2008,\n abstract = {In this work we describe our\ + \ initial explorations in building a musical instrument specifically for providing\ + \ listenerswith simple, but useful, ambient information. The termAmbient Musical\ + \ Information Systems (AMIS) is proposedto describe this kind of research. Instruments\ + \ like these differ from standard musical instruments in that they are tobe perceived\ + \ indirectly from outside one's primary focus ofattention. We describe our rationale\ + \ for creating such a device, a discussion on the appropriate qualities of sound\ + \ fordelivering ambient information, and a description of an instrument created\ + \ for use in a series of experiments that wewill use to test out ideas. We conclude\ + \ with a discussion ofour initial findings, and some further directions we wish\ + \ toexplore.},\n address = {Genoa, Italy},\n author = {Hazlewood, William R. and\ + \ Knopke, Ian},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179563},\n\ + \ issn = {2220-4806},\n keywords = {Ambient Musical Information Systems, musical\ + \ instruments, human computer interaction, Markov chain, probability, al- gorithmic\ + \ composition },\n pages = {281--284},\n title = {Designing Ambient Musical Information\ + \ Systems},\n url = {http://www.nime.org/proceedings/2008/nime2008_281.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176893 + doi: 10.5281/zenodo.1179563 issn: 2220-4806 - keywords: 'Expression, instrument design, performance, virtuosity. ' - pages: 277--282 - title: 'The E in NIME: Musical Expression with New Computer Interfaces' - url: http://www.nime.org/proceedings/2006/nime2006_277.pdf - year: 2006 + keywords: 'Ambient Musical Information Systems, musical instruments, human computer + interaction, Markov chain, probability, al- gorithmic composition ' + pages: 281--284 + title: Designing Ambient Musical Information Systems + url: http://www.nime.org/proceedings/2008/nime2008_281.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Richards2006 - abstract: 'Why is a seemingly mundane issue such as airline baggageallowance of - great significance in regards to the performancepractice of electronic music? - This paper discusses how aperformance practice has evolved that seeks to question - thebinary and corporate digital world. New ''instruments'' andapproaches have - emerged that explore ''dirty electronics'' and''punktronics'': DIY electronic - instruments made from junk.These instruments are not instruments in the traditionalsense, - defined by physical dimensions or by a set number ofparameters, but modular systems, - constantly evolving, nevercomplete, infinitely variable and designed to be portable. - Acombination of lo- and hi-fi, analogue and digital,synchronous and asynchronous - devices offer new modes ofexpression. The development of these new interfaces - formusical expression run side-by-side with an emerging postdigital aesthetic.' - address: 'Paris, France' - author: 'Richards, John' - bibtex: "@inproceedings{Richards2006,\n abstract = {Why is a seemingly mundane issue\ - \ such as airline baggageallowance of great significance in regards to the performancepractice\ - \ of electronic music? This paper discusses how aperformance practice has evolved\ - \ that seeks to question thebinary and corporate digital world. New 'instruments'\ - \ andapproaches have emerged that explore 'dirty electronics' and'punktronics':\ - \ DIY electronic instruments made from junk.These instruments are not instruments\ - \ in the traditionalsense, defined by physical dimensions or by a set number ofparameters,\ - \ but modular systems, constantly evolving, nevercomplete, infinitely variable\ - \ and designed to be portable. Acombination of lo- and hi-fi, analogue and digital,synchronous\ - \ and asynchronous devices offer new modes ofexpression. The development of these\ - \ new interfaces formusical expression run side-by-side with an emerging postdigital\ - \ aesthetic.},\n address = {Paris, France},\n author = {Richards, John},\n booktitle\ + ID: Hadjakos2008 + abstract: 'The Elbow Piano distinguishes two types of piano touch: a touchwith movement + in the elbow joint and a touch without. A playednote is first mapped to the left + or right hand by visual tracking.Custom-built goniometers attached to the player''s + arms are usedto detect the type of touch. The two different types of touchesare + sonified by different instrument sounds. This gives theplayer an increased awareness + of his elbow movements, which isconsidered valuable for piano education. We have + implementedthe system and evaluated it with a group of music students.' + address: 'Genoa, Italy' + author: 'Hadjakos, Aristotelis and Aitenbichler, Erwin and Mühlhäuser, Max' + bibtex: "@inproceedings{Hadjakos2008,\n abstract = {The Elbow Piano distinguishes\ + \ two types of piano touch: a touchwith movement in the elbow joint and a touch\ + \ without. A playednote is first mapped to the left or right hand by visual tracking.Custom-built\ + \ goniometers attached to the player's arms are usedto detect the type of touch.\ + \ The two different types of touchesare sonified by different instrument sounds.\ + \ This gives theplayer an increased awareness of his elbow movements, which isconsidered\ + \ valuable for piano education. We have implementedthe system and evaluated it\ + \ with a group of music students.},\n address = {Genoa, Italy},\n author = {Hadjakos,\ + \ Aristotelis and Aitenbichler, Erwin and M\\''{u}hlh\\''{a}user, Max},\n booktitle\ \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176995},\n issn = {2220-4806},\n keywords\ - \ = {bastardisation,dirty electronics,diy,ebay,live,modular,performance,portability,post-digital,punktronics},\n\ - \ pages = {283--287},\n title = {32kg: Performance Systems for a Post-Digital\ - \ Age},\n url = {http://www.nime.org/proceedings/2006/nime2006_283.pdf},\n year\ - \ = {2006}\n}\n" + \ Expression},\n doi = {10.5281/zenodo.1179553},\n issn = {2220-4806},\n keywords\ + \ = {Piano, education, sonification, feedback, gesture. },\n pages = {285--288},\n\ + \ title = {The Elbow Piano : Sonification of Piano Playing Movements},\n url =\ + \ {http://www.nime.org/proceedings/2008/nime2008_285.pdf},\n year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176995 + doi: 10.5281/zenodo.1179553 issn: 2220-4806 - keywords: 'bastardisation,dirty electronics,diy,ebay,live,modular,performance,portability,post-digital,punktronics' - pages: 283--287 - title: '32kg: Performance Systems for a Post-Digital Age' - url: http://www.nime.org/proceedings/2006/nime2006_283.pdf - year: 2006 + keywords: 'Piano, education, sonification, feedback, gesture. ' + pages: 285--288 + title: 'The Elbow Piano : Sonification of Piano Playing Movements' + url: http://www.nime.org/proceedings/2008/nime2008_285.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: DeLaubier2006 - address: 'Paris, France' - author: 'de Laubier, Serge and Goudard, Vincent' - bibtex: "@inproceedings{DeLaubier2006,\n address = {Paris, France},\n author = {de\ - \ Laubier, Serge and Goudard, Vincent},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176953},\n\ - \ issn = {2220-4806},\n keywords = {1,audio-graphic portable instrument,ethernet,from\ - \ 1983 to 1988,genesis of the project,on,puce muse studios,r\\'{e}pertoire,we\ - \ worked at the,wifi},\n pages = {288--291},\n title = {Meta-Instrument 3: a Look\ - \ over 17 Years of Practice},\n url = {http://www.nime.org/proceedings/2006/nime2006_288.pdf},\n\ - \ year = {2006}\n}\n" + ID: Takegawa2008 + abstract: 'Musical keyboard instruments have a long history, whichresulted in many + kinds of keyboards (claviers) today. Sincethe hardware of conventional musical + keyboards cannot bechanged, such as the number of keys, musicians have tocarry + these large keyboards for playing music that requiresonly a small diapason. To + solve this problem, the goal ofour study is to construct UnitKeyboard, which has + only 12keys (7 white keys and 5 black keys) and connectors fordocking with other + UnitKeyboards. We can build variouskinds of musical keyboard configurations by + connecting oneUnitKeyboard to others, since they have automatic settingsfor multiple + keyboard instruments. We discuss the usabilityof the UnitKeyboard from reviews + by several amateur andprofessional pianists who used the UnitKeyboard.' + address: 'Genoa, Italy' + author: 'Takegawa, Yoshinari and Tsukamoto, Masahiko' + bibtex: "@inproceedings{Takegawa2008,\n abstract = {Musical keyboard instruments\ + \ have a long history, whichresulted in many kinds of keyboards (claviers) today.\ + \ Sincethe hardware of conventional musical keyboards cannot bechanged, such as\ + \ the number of keys, musicians have tocarry these large keyboards for playing\ + \ music that requiresonly a small diapason. To solve this problem, the goal ofour\ + \ study is to construct UnitKeyboard, which has only 12keys (7 white keys and\ + \ 5 black keys) and connectors fordocking with other UnitKeyboards. We can build\ + \ variouskinds of musical keyboard configurations by connecting oneUnitKeyboard\ + \ to others, since they have automatic settingsfor multiple keyboard instruments.\ + \ We discuss the usabilityof the UnitKeyboard from reviews by several amateur\ + \ andprofessional pianists who used the UnitKeyboard.},\n address = {Genoa, Italy},\n\ + \ author = {Takegawa, Yoshinari and Tsukamoto, Masahiko},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1179635},\n issn = {2220-4806},\n keywords = {Portable\ + \ keyboard instruments, block interface, Automatic settings },\n pages = {289--292},\n\ + \ title = {UnitKeyboard : An Easily Configurable Compact Clavier},\n url = {http://www.nime.org/proceedings/2008/nime2008_289.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176953 + doi: 10.5281/zenodo.1179635 issn: 2220-4806 - keywords: '1,audio-graphic portable instrument,ethernet,from 1983 to 1988,genesis - of the project,on,puce muse studios,r\''{e}pertoire,we worked at the,wifi' - pages: 288--291 - title: 'Meta-Instrument 3: a Look over 17 Years of Practice' - url: http://www.nime.org/proceedings/2006/nime2006_288.pdf - year: 2006 + keywords: 'Portable keyboard instruments, block interface, Automatic settings ' + pages: 289--292 + title: 'UnitKeyboard : An Easily Configurable Compact Clavier' + url: http://www.nime.org/proceedings/2008/nime2008_289.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Goto2006 - abstract: 'This paper is intended to introduce the system, which combines "BodySuit" - and "RoboticMusic", as well as its possibilities and its uses in an artistic application. - "BodySuit" refers to a gesture controller in a Data Suit type. "RoboticMusic" - refers to percussion robots, which are appliedto a humanoid robot type. In this - paper, I will discuss their aesthetics and the concept, as well as the idea of - the "Extended Body".' - address: 'Paris, France' - author: 'Goto, Suguru' - bibtex: "@inproceedings{Goto2006,\n abstract = {This paper is intended to introduce\ - \ the system, which combines \"BodySuit\" and \"RoboticMusic\", as well as its\ - \ possibilities and its uses in an artistic application. \"BodySuit\" refers to\ - \ a gesture controller in a Data Suit type. \"RoboticMusic\" refers to percussion\ - \ robots, which are appliedto a humanoid robot type. In this paper, I will discuss\ - \ their aesthetics and the concept, as well as the idea of the \"Extended Body\"\ - .},\n address = {Paris, France},\n author = {Goto, Suguru},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176913},\n issn = {2220-4806},\n keywords = {Robot, Gesture\ - \ Controller, Humanoid Robot, Artificial Intelligence, Interaction },\n pages\ - \ = {292--295},\n title = {The Case Study of An Application of The System, `BodySuit'\ - \ and `RoboticMusic': Its Introduction and Aesthetics},\n url = {http://www.nime.org/proceedings/2006/nime2006_292.pdf},\n\ - \ year = {2006}\n}\n" + ID: PalacioQuintin2008 + abstract: 'After eight years of practice on the first hyper-flute prototype (a flute + extended with sensors), this article presentsa retrospective of its instrumental + practice and the newdevelopments planned from both technological and musical perspectives. + Design, performance skills, and mappingstrategies are discussed, as well as interactive + compositionand improvisation.' + address: 'Genoa, Italy' + author: 'Palacio-Quintin, Cléo' + bibtex: "@inproceedings{PalacioQuintin2008,\n abstract = {After eight years of practice\ + \ on the first hyper-flute prototype (a flute extended with sensors), this article\ + \ presentsa retrospective of its instrumental practice and the newdevelopments\ + \ planned from both technological and musical perspectives. Design, performance\ + \ skills, and mappingstrategies are discussed, as well as interactive compositionand\ + \ improvisation.},\n address = {Genoa, Italy},\n author = {Palacio-Quintin, Cl\\\ + '{e}o},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1179609},\n issn = {2220-4806},\n\ + \ keywords = {composition,gestural control,hyper-flute,hyper-instruments,improvisation,interactive\ + \ music,mapping,nime08,sensors},\n pages = {293--298},\n title = {Eight Years\ + \ of Practice on the Hyper-Flute : Technological and Musical Perspectives},\n\ + \ url = {http://www.nime.org/proceedings/2008/nime2008_293.pdf},\n year = {2008}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176913 + doi: 10.5281/zenodo.1179609 issn: 2220-4806 - keywords: 'Robot, Gesture Controller, Humanoid Robot, Artificial Intelligence, Interaction ' - pages: 292--295 - title: 'The Case Study of An Application of The System, `BodySuit'' and `RoboticMusic'': - Its Introduction and Aesthetics' - url: http://www.nime.org/proceedings/2006/nime2006_292.pdf - year: 2006 + keywords: 'composition,gestural control,hyper-flute,hyper-instruments,improvisation,interactive + music,mapping,nime08,sensors' + pages: 293--298 + title: 'Eight Years of Practice on the Hyper-Flute : Technological and Musical Perspectives' + url: http://www.nime.org/proceedings/2008/nime2008_293.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Hindman2006 - address: 'Paris, France' - author: 'Hindman, David' - bibtex: "@inproceedings{Hindman2006,\n address = {Paris, France},\n author = {Hindman,\ - \ David},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1176923},\n issn = {2220-4806},\n\ - \ pages = {296--299},\n title = {Modal Kombat: Competition and Choreography in\ - \ Synesthetic Musical Performance},\n url = {http://www.nime.org/proceedings/2006/nime2006_296.pdf},\n\ - \ year = {2006}\n}\n" + ID: Berdahl2008 + abstract: 'We introduce physically motivated interfaces for playing virtual musical + instruments, and we suggest that they lie somewhere in between commonplace interfaces + and haptic interfaces in terms of their complexity. Next, we review guitarlike + interfaces, and we design an interface to a virtual string.The excitation signal + and pitch are sensed separately usingtwo independent string segments. These parameters + controla two-axis digital waveguide virtual string, which modelsvibrations in + the horizontal and vertical transverse axes aswell as the coupling between them. + Finally, we consider theadvantages of using a multi-axis pickup for measuring + theexcitation signal.' + address: 'Genoa, Italy' + author: 'Berdahl, Edgar and Smith, Julius O.' + bibtex: "@inproceedings{Berdahl2008,\n abstract = {We introduce physically motivated\ + \ interfaces for playing virtual musical instruments, and we suggest that they\ + \ lie somewhere in between commonplace interfaces and haptic interfaces in terms\ + \ of their complexity. Next, we review guitarlike interfaces, and we design an\ + \ interface to a virtual string.The excitation signal and pitch are sensed separately\ + \ usingtwo independent string segments. These parameters controla two-axis digital\ + \ waveguide virtual string, which modelsvibrations in the horizontal and vertical\ + \ transverse axes aswell as the coupling between them. Finally, we consider theadvantages\ + \ of using a multi-axis pickup for measuring theexcitation signal.},\n address\ + \ = {Genoa, Italy},\n author = {Berdahl, Edgar and Smith, Julius O.},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1179493},\n issn = {2220-4806},\n keywords\ + \ = {physically motivated, physical, models, modeling, vibrating string, guitar,\ + \ pitch detection, interface, excitation, coupled strings, haptic },\n pages =\ + \ {299--302},\n title = {A Tangible Virtual Vibrating String : A Physically Motivated\ + \ Virtual Musical Instrument Interface},\n url = {http://www.nime.org/proceedings/2008/nime2008_299.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176923 + doi: 10.5281/zenodo.1179493 issn: 2220-4806 - pages: 296--299 - title: 'Modal Kombat: Competition and Choreography in Synesthetic Musical Performance' - url: http://www.nime.org/proceedings/2006/nime2006_296.pdf - year: 2006 + keywords: 'physically motivated, physical, models, modeling, vibrating string, guitar, + pitch detection, interface, excitation, coupled strings, haptic ' + pages: 299--302 + title: 'A Tangible Virtual Vibrating String : A Physically Motivated Virtual Musical + Instrument Interface' + url: http://www.nime.org/proceedings/2008/nime2008_299.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Lehrman2006 - address: 'Paris, France' - author: 'Lehrman, Paul D. and Singer, Eric' - bibtex: "@inproceedings{Lehrman2006,\n address = {Paris, France},\n author = {Lehrman,\ - \ Paul D. and Singer, Eric},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176961},\n\ - \ issn = {2220-4806},\n keywords = {Robotics, computer control, MIDI, player pianos,\ - \ mechanical music, percussion, sound effects, Dadaism. },\n pages = {300--303},\n\ - \ title = {A \"Ballet M\\'{e}canique\" for the 21{s}t Century: Performing George\ - \ Antheil's Dadaist Masterpiece with Robots},\n url = {http://www.nime.org/proceedings/2006/nime2006_300.pdf},\n\ - \ year = {2006}\n}\n" + ID: Geiger2008 + abstract: 'Being one of the earliest electronic instruments the basic principles + of the Theremin have often been used to design new musical interfaces. We present + the structured design and evaluation of a set of 3D interfaces for a virtual Theremin, + the VRemin. The variants differ in the size of the interaction space, the interface + complexity, and the applied IO devices. We conducted a formal evaluation based + on the well-known AttrakDiff questionnaire for evaluating the hedonic and pragmatic + quality of interactive products. The presented work is a first approach towards + a participatory design process for musical interfaces that includes user evaluation + at early design phases. ' + address: 'Genoa, Italy' + author: 'Geiger, Christian and Reckter, Holger and Paschke, David and Schulz, Florian + and Poepel, Cornelius' + bibtex: "@inproceedings{Geiger2008,\n abstract = {Being one of the earliest electronic\ + \ instruments the basic principles of the Theremin have often been used to design\ + \ new musical interfaces. We present the structured design and evaluation of a\ + \ set of 3D interfaces for a virtual Theremin, the VRemin. The variants differ\ + \ in the size of the interaction space, the interface complexity, and the applied\ + \ IO devices. We conducted a formal evaluation based on the well-known AttrakDiff\ + \ questionnaire for evaluating the hedonic and pragmatic quality of interactive\ + \ products. The presented work is a first approach towards a participatory design\ + \ process for musical interfaces that includes user evaluation at early design\ + \ phases. },\n address = {Genoa, Italy},\n author = {Geiger, Christian and Reckter,\ + \ Holger and Paschke, David and Schulz, Florian and Poepel, Cornelius},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1179545},\n issn = {2220-4806},\n keywords\ + \ = {3d interaction techniques,an important concept for,both hands,evaluation,few\ + \ wimp interface concepts,in contrast the use,make efficient use of,nime08,of\ + \ both hands is,theremin-based interfaces},\n pages = {303--306},\n title = {Towards\ + \ Participatory Design and Evaluation of Theremin-based Musical Interfaces},\n\ + \ url = {http://www.nime.org/proceedings/2008/nime2008_303.pdf},\n year = {2008}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176961 + doi: 10.5281/zenodo.1179545 issn: 2220-4806 - keywords: 'Robotics, computer control, MIDI, player pianos, mechanical music, percussion, - sound effects, Dadaism. ' - pages: 300--303 - title: 'A "Ballet M\''{e}canique" for the 21{s}t Century: Performing George Antheil''s - Dadaist Masterpiece with Robots' - url: http://www.nime.org/proceedings/2006/nime2006_300.pdf - year: 2006 + keywords: '3d interaction techniques,an important concept for,both hands,evaluation,few + wimp interface concepts,in contrast the use,make efficient use of,nime08,of both + hands is,theremin-based interfaces' + pages: 303--306 + title: Towards Participatory Design and Evaluation of Theremin-based Musical Interfaces + url: http://www.nime.org/proceedings/2008/nime2008_303.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Lemouton2006 - abstract: This paper deals with the first musical usage of anexperimental system - dedicated to the optical detection ofthe position of a trombone's slide. - address: 'Paris, France' - author: 'Lemouton, Serge and Stroppa, Marco and Sluchin, Benny' - bibtex: "@inproceedings{Lemouton2006,\n abstract = {This paper deals with the first\ - \ musical usage of anexperimental system dedicated to the optical detection ofthe\ - \ position of a trombone's slide.},\n address = {Paris, France},\n author = {Lemouton,\ - \ Serge and Stroppa, Marco and Sluchin, Benny},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1176963},\n issn = {2220-4806},\n keywords = {augmented instrument,chamber\ - \ electronics,computer,interaction,musical motivation,performer,trombone},\n pages\ - \ = {304--307},\n title = {Using the Augmented Trombone in \"I will not kiss your\ - \ f.ing flag\"},\n url = {http://www.nime.org/proceedings/2006/nime2006_304.pdf},\n\ - \ year = {2006}\n}\n" + ID: Henriques2008 + address: 'Genoa, Italy' + author: 'Henriques, Tom\''{a}s' + bibtex: "@inproceedings{Henriques2008,\n address = {Genoa, Italy},\n author = {Henriques,\ + \ Tom\\'{a}s},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179565},\n issn\ + \ = {2220-4806},\n keywords = {computer music,musical instrument,nime08,sensor\ + \ technologies},\n pages = {307--310},\n title = {META-{EV}I Innovative Performance\ + \ Paths with a Wind Controller},\n url = {http://www.nime.org/proceedings/2008/nime2008_307.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176963 + doi: 10.5281/zenodo.1179565 issn: 2220-4806 - keywords: 'augmented instrument,chamber electronics,computer,interaction,musical - motivation,performer,trombone' - pages: 304--307 - title: Using the Augmented Trombone in "I will not kiss your f.ing flag" - url: http://www.nime.org/proceedings/2006/nime2006_304.pdf - year: 2006 + keywords: 'computer music,musical instrument,nime08,sensor technologies' + pages: 307--310 + title: 'META-{EV}I Innovative Performance Paths with a Wind Controller' + url: http://www.nime.org/proceedings/2008/nime2008_307.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Schiesser2006 - address: 'Paris, France' - author: 'Schiesser, S\''{e}bastien and Traube, Caroline' - bibtex: "@inproceedings{Schiesser2006,\n address = {Paris, France},\n author = {Schiesser,\ - \ S\\'{e}bastien and Traube, Caroline},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177001},\n\ - \ issn = {2220-4806},\n keywords = {saxophone, augmented instrument, live electronics,\ - \ perfor- mance, gestural control },\n pages = {308--313},\n title = {On Making\ - \ and Playing an Electronically-augmented Saxophone},\n url = {http://www.nime.org/proceedings/2006/nime2006_308.pdf},\n\ - \ year = {2006}\n}\n" + ID: Price2008 + address: 'Genoa, Italy' + author: 'Price, Robin and Rebelo, Pedro' + bibtex: "@inproceedings{Price2008,\n address = {Genoa, Italy},\n author = {Price,\ + \ Robin and Rebelo, Pedro},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179615},\n\ + \ issn = {2220-4806},\n keywords = {Mapping, database, audiovisual, radio, installation\ + \ art. },\n pages = {311--314},\n title = {Database and Mapping Design for Audiovisual\ + \ Prepared Radio Set Installation},\n url = {http://www.nime.org/proceedings/2008/nime2008_311.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177001 + doi: 10.5281/zenodo.1179615 issn: 2220-4806 - keywords: 'saxophone, augmented instrument, live electronics, perfor- mance, gestural - control ' - pages: 308--313 - title: On Making and Playing an Electronically-augmented Saxophone - url: http://www.nime.org/proceedings/2006/nime2006_308.pdf - year: 2006 - - -- ENTRYTYPE: inproceedings - ID: Smyth2006 - address: 'Paris, France' - author: 'Smyth, Tamara' - bibtex: "@inproceedings{Smyth2006,\n address = {Paris, France},\n author = {Smyth,\ - \ Tamara},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1177007},\n issn = {2220-4806},\n\ - \ keywords = {khaen, sound synthesis control, mapping, musical acoustics },\n\ - \ pages = {314--317},\n title = {Handheld Acoustic Filter Bank for Musical Control},\n\ - \ url = {http://www.nime.org/proceedings/2006/nime2006_314.pdf},\n year = {2006}\n\ - }\n" + keywords: 'Mapping, database, audiovisual, radio, installation art. ' + pages: 311--314 + title: Database and Mapping Design for Audiovisual Prepared Radio Set Installation + url: http://www.nime.org/proceedings/2008/nime2008_311.pdf + year: 2008 + + +- ENTRYTYPE: inproceedings + ID: Jo2008 + abstract: 'Monalisa is a software platform that enables to "see the sound, hear + the image". It consists of three software: Monalisa Application, Monalisa-Audio + Unit, and Monalisa-Image Unit, and an installation: Monalisa "shadow of the sound". + In this paper, we describe the implementation of each software and installation + with the explanation of the basic algorithms to treat the image data and the sound + data transparently.' + address: 'Genoa, Italy' + author: 'Jo, Kazuhiro and Nagano, Norihisa' + bibtex: "@inproceedings{Jo2008,\n abstract = {Monalisa is a software platform that\ + \ enables to \"see the sound, hear the image\". It consists of three software:\ + \ Monalisa Application, Monalisa-Audio Unit, and Monalisa-Image Unit, and an installation:\ + \ Monalisa \"shadow of the sound\". In this paper, we describe the implementation\ + \ of each software and installation with the explanation of the basic algorithms\ + \ to treat the image data and the sound data transparently.},\n address = {Genoa,\ + \ Italy},\n author = {Jo, Kazuhiro and Nagano, Norihisa},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1179569},\n issn = {2220-4806},\n keywords = {Sound and\ + \ Image Processing Software, Plug-in, Installation },\n pages = {315--318},\n\ + \ title = {Monalisa : \"See the Sound , Hear the Image\"},\n url = {http://www.nime.org/proceedings/2008/nime2008_315.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177007 + doi: 10.5281/zenodo.1179569 issn: 2220-4806 - keywords: 'khaen, sound synthesis control, mapping, musical acoustics ' - pages: 314--317 - title: Handheld Acoustic Filter Bank for Musical Control - url: http://www.nime.org/proceedings/2006/nime2006_314.pdf - year: 2006 + keywords: 'Sound and Image Processing Software, Plug-in, Installation ' + pages: 315--318 + title: 'Monalisa : "See the Sound , Hear the Image"' + url: http://www.nime.org/proceedings/2008/nime2008_315.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Nixdorf2006 - abstract: 'In this paper we will report on the use of real-time soundspatialization - in Challenging Bodies, a trans-disciplinaryperformance project at the University - of Regina. Usingwell-understood spatialization techniques mapped to a custom interface, - a computer system was built that allowedlive spatial control of ten sound signals - from on-stage performers. This spatial control added a unique dynamic element - to an already ultramodern performance. The systemis described in detail, including - the main advantages overexisting spatialization systems: simplicity, usability, - customization and scalability' - address: 'Paris, France' - author: 'Nixdorf, Joshua J. and Gerhard, David' - bibtex: "@inproceedings{Nixdorf2006,\n abstract = {In this paper we will report\ - \ on the use of real-time soundspatialization in Challenging Bodies, a trans-disciplinaryperformance\ - \ project at the University of Regina. Usingwell-understood spatialization techniques\ - \ mapped to a custom interface, a computer system was built that allowedlive spatial\ - \ control of ten sound signals from on-stage performers. This spatial control\ - \ added a unique dynamic element to an already ultramodern performance. The systemis\ - \ described in detail, including the main advantages overexisting spatialization\ - \ systems: simplicity, usability, customization and scalability},\n address =\ - \ {Paris, France},\n author = {Nixdorf, Joshua J. and Gerhard, David},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176981},\n issn = {2220-4806},\n keywords\ - \ = {gem,live systems,pd,performance sys-,real-time systems,sound architecture,sound\ - \ localization,sound spatialization,surround sound,tems},\n pages = {318--321},\n\ - \ title = {Real-time Sound Source Spatialization as Used in Challenging Bodies:\ - \ Implementation and Performance},\n url = {http://www.nime.org/proceedings/2006/nime2006_318.pdf},\n\ - \ year = {2006}\n}\n" + ID: Robertson2008 + address: 'Genoa, Italy' + author: 'Robertson, Andrew and Plumbley, Mark D. and Bryan-Kinns, Nick' + bibtex: "@inproceedings{Robertson2008,\n address = {Genoa, Italy},\n author = {Robertson,\ + \ Andrew and Plumbley, Mark D. and Bryan-Kinns, Nick},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1179619},\n issn = {2220-4806},\n keywords = {Automatic\ + \ Accompaniment, Beat Tracking, Human-Computer Interaction, Musical Interface\ + \ Evaluation },\n pages = {319--324},\n title = {A Turing Test for B-Keeper :\ + \ Evaluating an Interactive},\n url = {http://www.nime.org/proceedings/2008/nime2008_319.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176981 + doi: 10.5281/zenodo.1179619 issn: 2220-4806 - keywords: 'gem,live systems,pd,performance sys-,real-time systems,sound architecture,sound - localization,sound spatialization,surround sound,tems' - pages: 318--321 - title: 'Real-time Sound Source Spatialization as Used in Challenging Bodies: Implementation - and Performance' - url: http://www.nime.org/proceedings/2006/nime2006_318.pdf - year: 2006 + keywords: 'Automatic Accompaniment, Beat Tracking, Human-Computer Interaction, Musical + Interface Evaluation ' + pages: 319--324 + title: 'A Turing Test for B-Keeper : Evaluating an Interactive' + url: http://www.nime.org/proceedings/2008/nime2008_319.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Bottoni2006 - address: 'Paris, France' - author: 'Bottoni, Paolo and Faralli, Stefano and Labella, Anna and Pierro, Mario' - bibtex: "@inproceedings{Bottoni2006,\n address = {Paris, France},\n author = {Bottoni,\ - \ Paolo and Faralli, Stefano and Labella, Anna and Pierro, Mario},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176879},\n issn = {2220-4806},\n keywords\ - \ = {mapping, planning, agent, Max/MSP },\n pages = {322--325},\n title = {Mapping\ - \ with Planning Agents in the Max/MSP Environment: the GO/Max Language},\n url\ - \ = {http://www.nime.org/proceedings/2006/nime2006_322.pdf},\n year = {2006}\n\ + ID: Gatzsche2008 + abstract: 'In this paper, we present a pitch space based musical interface approach. + A pitch space arranges tones in a way that meaningful tone combinations can be + easily generated. Using a touch sensitive surface or a 3D-Joystick a player can + move through the pitch space and create the desired sound by selecting tones. + The more optimal the tones are geometrically arranged, the less control parameters + are required to move through the space and to select the desired pitches. For + this the quality of pitch space based musical interfaces depends on two factors: + 1. the way how the tones are organized within the pitch space and 2. the way how + the parameters of a given controller are used to move through the space and to + select pitches. This paper presents a musical interface based on a tonal pitch + space derived from a four dimensional model found by the music psychologists [11], + [2]. The proposed pitch space particularly eases the creation of tonal harmonic + music. Simultaneously it outlines music psychological and theoretical principles + of music. ' + address: 'Genoa, Italy' + author: 'Gatzsche, Gabriel and Mehnert, Markus and Stöcklmeier, Christian' + bibtex: "@inproceedings{Gatzsche2008,\n abstract = {In this paper, we present a\ + \ pitch space based musical interface approach. A pitch space arranges tones in\ + \ a way that meaningful tone combinations can be easily generated. Using a touch\ + \ sensitive surface or a 3D-Joystick a player can move through the pitch space\ + \ and create the desired sound by selecting tones. The more optimal the tones\ + \ are geometrically arranged, the less control parameters are required to move\ + \ through the space and to select the desired pitches. For this the quality of\ + \ pitch space based musical interfaces depends on two factors: 1. the way how\ + \ the tones are organized within the pitch space and 2. the way how the parameters\ + \ of a given controller are used to move through the space and to select pitches.\ + \ This paper presents a musical interface based on a tonal pitch space derived\ + \ from a four dimensional model found by the music psychologists [11], [2]. The\ + \ proposed pitch space particularly eases the creation of tonal harmonic music.\ + \ Simultaneously it outlines music psychological and theoretical principles of\ + \ music. },\n address = {Genoa, Italy},\n author = {Gatzsche, Gabriel and Mehnert,\ + \ Markus and St\\''{o}cklmeier, Christian},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1179541},\n issn = {2220-4806},\n keywords = {Pitch space, musical\ + \ interface, Carol L. Krumhansl, music psychology, music theory, western tonal\ + \ music, 3D tonality model, spiral of thirds, 3D, Hardware controller, Symmetry\ + \ model },\n pages = {325--330},\n title = {Interaction with Tonal Pitch Spaces},\n\ + \ url = {http://www.nime.org/proceedings/2008/nime2008_325.pdf},\n year = {2008}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176879 + doi: 10.5281/zenodo.1179541 issn: 2220-4806 - keywords: 'mapping, planning, agent, Max/MSP ' - pages: 322--325 - title: 'Mapping with Planning Agents in the Max/MSP Environment: the GO/Max Language' - url: http://www.nime.org/proceedings/2006/nime2006_322.pdf - year: 2006 + keywords: 'Pitch space, musical interface, Carol L. Krumhansl, music psychology, + music theory, western tonal music, 3D tonality model, spiral of thirds, 3D, Hardware + controller, Symmetry model ' + pages: 325--330 + title: Interaction with Tonal Pitch Spaces + url: http://www.nime.org/proceedings/2008/nime2008_325.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Bonardi2006 - abstract: 'In this article, we present the first step of our research work todesign - a Virtual Assistant for Performers and Stage Directors,able to give a feedback - from performances. We use amethodology to automatically construct fuzzy rules - in a FuzzyRule-Based System that detects contextual emotions from anactor''s performance - during a show. We collect video data from a lot of performances of the sameshow - from which it should be possible to visualize all the emotions and intents or - more precisely "intent graphs". To perform this, the collected data defining low-level - descriptors are aggregated and converted into high-level characterizations. Then, - depending on the retrieved data and on their distributionon the axis, we partition - the universes into classes. The last stepis the building of the fuzzy rules that - are obtained from the classes and that permit to give conclusions to label the - detected emotions.' - address: 'Paris, France' - author: 'Bonardi, Alain and Truck, Isis and Akdag, Herman' - bibtex: "@inproceedings{Bonardi2006,\n abstract = {In this article, we present the\ - \ first step of our research work todesign a Virtual Assistant for Performers\ - \ and Stage Directors,able to give a feedback from performances. We use amethodology\ - \ to automatically construct fuzzy rules in a FuzzyRule-Based System that detects\ - \ contextual emotions from anactor's performance during a show. We collect video\ - \ data from a lot of performances of the sameshow from which it should be possible\ - \ to visualize all the emotions and intents or more precisely \"intent graphs\"\ - . To perform this, the collected data defining low-level descriptors are aggregated\ - \ and converted into high-level characterizations. Then, depending on the retrieved\ - \ data and on their distributionon the axis, we partition the universes into classes.\ - \ The last stepis the building of the fuzzy rules that are obtained from the classes\ - \ and that permit to give conclusions to label the detected emotions.},\n address\ - \ = {Paris, France},\n author = {Bonardi, Alain and Truck, Isis and Akdag, Herman},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176875},\n issn = {2220-4806},\n\ - \ keywords = {Virtual Assistant, Intents, Emotion detector, Fuzzy Classes, Stage\ - \ Director, Performance. },\n pages = {326--329},\n title = {Towards a Virtual\ - \ Assistant for Performers and Stage Directors},\n url = {http://www.nime.org/proceedings/2006/nime2006_326.pdf},\n\ - \ year = {2006}\n}\n" + ID: Chordia2008 + abstract: 'We describe a system that can listen to a performance of Indian music + and recognize the raag, the fundamental melodicframework that Indian classical + musicians improvise within.In addition to determining the most likely raag being + performed, the system displays the estimated the likelihoodof each of the other + possible raags, visualizing the changesover time. The system computes the pitch-class + distributionand uses a Bayesian decision rule to classify the resultingtwelve + dimensional feature vector, where each feature represents the relative use of + each pitch class. We show that thesystem achieves high performance on a variety + of sources,making it a viable tool for interactive performance.' + address: 'Genoa, Italy' + author: 'Chordia, Parag and Rae, Alex' + bibtex: "@inproceedings{Chordia2008,\n abstract = {We describe a system that can\ + \ listen to a performance of Indian music and recognize the raag, the fundamental\ + \ melodicframework that Indian classical musicians improvise within.In addition\ + \ to determining the most likely raag being performed, the system displays the\ + \ estimated the likelihoodof each of the other possible raags, visualizing the\ + \ changesover time. The system computes the pitch-class distributionand uses a\ + \ Bayesian decision rule to classify the resultingtwelve dimensional feature vector,\ + \ where each feature represents the relative use of each pitch class. We show\ + \ that thesystem achieves high performance on a variety of sources,making it a\ + \ viable tool for interactive performance.},\n address = {Genoa, Italy},\n author\ + \ = {Chordia, Parag and Rae, Alex},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179509},\n\ + \ issn = {2220-4806},\n keywords = {automatic recognition,indian music,nime08,raag,raga},\n\ + \ pages = {331--334},\n title = {Real-Time Raag Recognition for Interactive Music},\n\ + \ url = {http://www.nime.org/proceedings/2008/nime2008_331.pdf},\n year = {2008}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176875 + doi: 10.5281/zenodo.1179509 issn: 2220-4806 - keywords: 'Virtual Assistant, Intents, Emotion detector, Fuzzy Classes, Stage Director, - Performance. ' - pages: 326--329 - title: Towards a Virtual Assistant for Performers and Stage Directors - url: http://www.nime.org/proceedings/2006/nime2006_326.pdf - year: 2006 + keywords: 'automatic recognition,indian music,nime08,raag,raga' + pages: 331--334 + title: Real-Time Raag Recognition for Interactive Music + url: http://www.nime.org/proceedings/2008/nime2008_331.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Nagashima2006 - abstract: 'This is a studio report of researches and projects in SUAC(Shizuoka University - of Art and Culture). SUAC was foundedin April 2000, and organized NIME04 as you - know. SUAC has "Faculty of Design" and "Department of Art and Science" and all - students study interactive systems and media arts.SUAC has organized Media Art - Festival (MAF) from 2001 to2005. Domestic/overseas artists participated in SUAC - MAF,and SUAC students'' projects also joined and exhibited theirworks in MAF. - I will introduce the production cases withinteractive media-installations by SUAC - students'' projectsfrom the aspect experiences with novel interfaces ineducation - and entertainment and reports on students projectsin the framework of NIME related - courses.' - address: 'Paris, France' - author: 'Nagashima, Yoichi' - bibtex: "@inproceedings{Nagashima2006,\n abstract = {This is a studio report of\ - \ researches and projects in SUAC(Shizuoka University of Art and Culture). SUAC\ - \ was foundedin April 2000, and organized NIME04 as you know. SUAC has \"Faculty\ - \ of Design\" and \"Department of Art and Science\" and all students study interactive\ - \ systems and media arts.SUAC has organized Media Art Festival (MAF) from 2001\ - \ to2005. Domestic/overseas artists participated in SUAC MAF,and SUAC students'\ - \ projects also joined and exhibited theirworks in MAF. I will introduce the production\ - \ cases withinteractive media-installations by SUAC students' projectsfrom the\ - \ aspect experiences with novel interfaces ineducation and entertainment and reports\ - \ on students projectsin the framework of NIME related courses.},\n address =\ - \ {Paris, France},\n author = {Nagashima, Yoichi},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176977},\n issn = {2220-4806},\n keywords = {Interactive\ - \ Installation, Sensors, Media Arts, Studio Reports },\n pages = {330--333},\n\ - \ title = {Students' Projects of Interactive Media-installations in SUAC},\n url\ - \ = {http://www.nime.org/proceedings/2006/nime2006_330.pdf},\n year = {2006}\n\ + ID: Vinjar2008 + abstract: 'A general CAC1-environment charged with physical-modelling capabilities + is described. It combines CommonMusic,ODE and Fluxus in a modular way, making + a powerful andflexible environment for experimenting with physical modelsin composition.Composition + in this respect refers to the generation andmanipulation of structure typically + on or above a note, phrase or voice-level. Compared to efforts in synthesisand + performance little work has gone into applying physicalmodels to composition. + Potentials in composition-applications are presumably large.The implementation + of the physically equipped CAC-environment is described in detail.' + address: 'Genoa, Italy' + author: 'Vinjar, Anders' + bibtex: "@inproceedings{Vinjar2008,\n abstract = {A general CAC1-environment charged\ + \ with physical-modelling capabilities is described. It combines CommonMusic,ODE\ + \ and Fluxus in a modular way, making a powerful andflexible environment for experimenting\ + \ with physical modelsin composition.Composition in this respect refers to the\ + \ generation andmanipulation of structure typically on or above a note, phrase\ + \ or voice-level. Compared to efforts in synthesisand performance little work\ + \ has gone into applying physicalmodels to composition. Potentials in composition-applications\ + \ are presumably large.The implementation of the physically equipped CAC-environment\ + \ is described in detail.},\n address = {Genoa, Italy},\n author = {Vinjar, Anders},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1179647},\n issn = {2220-4806},\n\ + \ keywords = {Physical Models in composition, CommonMusic, Musical mapping },\n\ + \ pages = {335--338},\n title = {Bending Common Music with Physical Models},\n\ + \ url = {http://www.nime.org/proceedings/2008/nime2008_335.pdf},\n year = {2008}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176977 + doi: 10.5281/zenodo.1179647 issn: 2220-4806 - keywords: 'Interactive Installation, Sensors, Media Arts, Studio Reports ' - pages: 330--333 - title: Students' Projects of Interactive Media-installations in SUAC - url: http://www.nime.org/proceedings/2006/nime2006_330.pdf - year: 2006 + keywords: 'Physical Models in composition, CommonMusic, Musical mapping ' + pages: 335--338 + title: Bending Common Music with Physical Models + url: http://www.nime.org/proceedings/2008/nime2008_335.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Breinbjerg2006 - abstract: 'In this paper we describe the intentions, the design and functionality - of an Acousmatic Composition Environment that allows children or musical novices - to educate their auditory curiosity by recording, manipulating and mixing sounds - of everyday life. The environment consists of three stands: A stand for sound - recording with a soundproof box that ensure good recording facilities in a noisy - environment; a stand for sound manipulation with five simple, tangible interfaces; - a stand for sound mixing with a graphical computer interface presented on two - touch screens. ' - address: 'Paris, France' - author: 'Breinbjerg, Morten and Caprani, Ole and Lunding, Rasmus and Kramhoft, Line' - bibtex: "@inproceedings{Breinbjerg2006,\n abstract = {In this paper we describe\ - \ the intentions, the design and functionality of an Acousmatic Composition Environment\ - \ that allows children or musical novices to educate their auditory curiosity\ - \ by recording, manipulating and mixing sounds of everyday life. The environment\ - \ consists of three stands: A stand for sound recording with a soundproof box\ - \ that ensure good recording facilities in a noisy environment; a stand for sound\ - \ manipulation with five simple, tangible interfaces; a stand for sound mixing\ - \ with a graphical computer interface presented on two touch screens. },\n address\ - \ = {Paris, France},\n author = {Breinbjerg, Morten and Caprani, Ole and Lunding,\ - \ Rasmus and Kramhoft, Line},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176883},\n\ - \ issn = {2220-4806},\n keywords = {Acousmatic listening, aesthetics, tangible\ - \ interfaces. },\n pages = {334--337},\n title = {An Acousmatic Composition Environment},\n\ - \ url = {http://www.nime.org/proceedings/2006/nime2006_334.pdf},\n year = {2006}\n\ - }\n" + ID: Schedel2008 + abstract: 'The Color of Waiting is an interactive theater workwith music, dance, + and video which was developed atSTEIM in Amsterdam and further refined at CMMASin + Morelia Mexico with funding from Meet theComposer. Using Max/MSP/ Jitter a cellist + is able tocontrol sound and video during the performancewhile performing a structured + improvisation inresponse to the dancer''s movement. In order toensure. repeated + performances of The Color o fWaiting , Kinesthetech Sense created the scorecontained + in this paper. Performance is essential tothe practice of time-based art as a + living form, buthas been complicated by the unique challenges ininterpretation + and re-creation posed by worksincorporating technology. Creating a detailed scoreis + one of the ways artists working with technologycan combat obsolescence.' + address: 'Genoa, Italy' + author: 'Schedel, Margaret and Rootberg, Alison and de Martelly, Elizabeth' + bibtex: "@inproceedings{Schedel2008,\n abstract = {The Color of Waiting is an interactive\ + \ theater workwith music, dance, and video which was developed atSTEIM in Amsterdam\ + \ and further refined at CMMASin Morelia Mexico with funding from Meet theComposer.\ + \ Using Max/MSP/ Jitter a cellist is able tocontrol sound and video during the\ + \ performancewhile performing a structured improvisation inresponse to the dancer's\ + \ movement. In order toensure. repeated performances of The Color o fWaiting ,\ + \ Kinesthetech Sense created the scorecontained in this paper. Performance is\ + \ essential tothe practice of time-based art as a living form, buthas been complicated\ + \ by the unique challenges ininterpretation and re-creation posed by worksincorporating\ + \ technology. Creating a detailed scoreis one of the ways artists working with\ + \ technologycan combat obsolescence.},\n address = {Genoa, Italy},\n author =\ + \ {Schedel, Margaret and Rootberg, Alison and de Martelly, Elizabeth},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1179625},\n issn = {2220-4806},\n keywords\ + \ = {nime08},\n pages = {339--342},\n title = {Scoring an Interactive, Multimedia\ + \ Performance Work},\n url = {http://www.nime.org/proceedings/2008/nime2008_339.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176883 + doi: 10.5281/zenodo.1179625 issn: 2220-4806 - keywords: 'Acousmatic listening, aesthetics, tangible interfaces. ' - pages: 334--337 - title: An Acousmatic Composition Environment - url: http://www.nime.org/proceedings/2006/nime2006_334.pdf - year: 2006 + keywords: nime08 + pages: 339--342 + title: 'Scoring an Interactive, Multimedia Performance Work' + url: http://www.nime.org/proceedings/2008/nime2008_339.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Hamilton2006 - address: 'Paris, France' - author: 'Hamilton, Robert' - bibtex: "@inproceedings{Hamilton2006,\n address = {Paris, France},\n author = {Hamilton,\ - \ Robert},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1176919},\n issn = {2220-4806},\n\ - \ keywords = {Bioinformatics, composition, real-time score generation. },\n pages\ - \ = {338--341},\n title = {Bioinformatic Feedback: Performer Bio-data as a Driver\ - \ for Real-time Composition},\n url = {http://www.nime.org/proceedings/2006/nime2006_338.pdf},\n\ - \ year = {2006}\n}\n" + ID: Endo2008 + abstract: 'We developed a rhythmic instruments ensemble simulator generating animation + using game controllers. The motion of a player is transformed into musical expression + data of MIDI to generate sounds, and MIDI data are transformed into animation + control parameters to generate movies. These animations and music are shown as + the reflection of player performance. Multiple players can perform a musical ensemble + to make more varied patterns of animation. Our system is so easy that everyone + can enjoy performing a fusion of music and animation. ' + address: 'Genoa, Italy' + author: 'Endo, Ayaka and Kuhara, Yasuo' + bibtex: "@inproceedings{Endo2008,\n abstract = {We developed a rhythmic instruments\ + \ ensemble simulator generating animation using game controllers. The motion of\ + \ a player is transformed into musical expression data of MIDI to generate sounds,\ + \ and MIDI data are transformed into animation control parameters to generate\ + \ movies. These animations and music are shown as the reflection of player performance.\ + \ Multiple players can perform a musical ensemble to make more varied patterns\ + \ of animation. Our system is so easy that everyone can enjoy performing a fusion\ + \ of music and animation. },\n address = {Genoa, Italy},\n author = {Endo, Ayaka\ + \ and Kuhara, Yasuo},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179529},\n\ + \ issn = {2220-4806},\n keywords = {Wii Remote, Wireless game controller, MIDI,\ + \ Max/MSP, Flash movie, Gesture music and animation. },\n pages = {345--346},\n\ + \ title = {Rhythmic Instruments Ensemble Simulator Generating Animation Movies\ + \ Using {Bluetooth} Game Controller},\n url = {http://www.nime.org/proceedings/2008/nime2008_345.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176919 + doi: 10.5281/zenodo.1179529 issn: 2220-4806 - keywords: 'Bioinformatics, composition, real-time score generation. ' - pages: 338--341 - title: 'Bioinformatic Feedback: Performer Bio-data as a Driver for Real-time Composition' - url: http://www.nime.org/proceedings/2006/nime2006_338.pdf - year: 2006 + keywords: 'Wii Remote, Wireless game controller, MIDI, Max/MSP, Flash movie, Gesture + music and animation. ' + pages: 345--346 + title: Rhythmic Instruments Ensemble Simulator Generating Animation Movies Using + Bluetooth Game Controller + url: http://www.nime.org/proceedings/2008/nime2008_345.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Pak2006 - address: 'Paris, France' - author: 'Pak, Jonathan' - bibtex: "@inproceedings{Pak2006,\n address = {Paris, France},\n author = {Pak, Jonathan},\n\ + ID: McMillen2008 + abstract: 'The demonstration of a series of properly weighted and balanced Bluetooth + sensor bows for violin, viola, cello and bass. ' + address: 'Genoa, Italy' + author: 'McMillen, Keith A.' + bibtex: "@inproceedings{McMillen2008,\n abstract = {The demonstration of a series\ + \ of properly weighted and balanced Bluetooth sensor bows for violin, viola, cello\ + \ and bass. },\n address = {Genoa, Italy},\n author = {McMillen, Keith A.},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176983},\n issn = {2220-4806},\n\ - \ pages = {342--345},\n title = {The Light Matrix: An Interface for Musical Expression\ - \ and Performance},\n url = {http://www.nime.org/proceedings/2006/nime2006_342.pdf},\n\ - \ year = {2006}\n}\n" + \ Musical Expression},\n doi = {10.5281/zenodo.1179597},\n issn = {2220-4806},\n\ + \ keywords = {Sensor bow, stringed instruments, bluetooth },\n pages = {347--348},\n\ + \ title = {Stage-Worthy Sensor Bows for Stringed Instruments},\n url = {http://www.nime.org/proceedings/2008/nime2008_347.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176983 + doi: 10.5281/zenodo.1179597 issn: 2220-4806 - pages: 342--345 - title: 'The Light Matrix: An Interface for Musical Expression and Performance' - url: http://www.nime.org/proceedings/2006/nime2006_342.pdf - year: 2006 + keywords: 'Sensor bow, stringed instruments, bluetooth ' + pages: 347--348 + title: Stage-Worthy Sensor Bows for Stringed Instruments + url: http://www.nime.org/proceedings/2008/nime2008_347.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Kobayashi2006 - address: 'Paris, France' - author: 'Kobayashi, Shigeru and Endo, Takanori and Harada, Katsuhiko and Oishi, - Shosei' - bibtex: "@inproceedings{Kobayashi2006,\n address = {Paris, France},\n author = {Kobayashi,\ - \ Shigeru and Endo, Takanori and Harada, Katsuhiko and Oishi, Shosei},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176945},\n issn = {2220-4806},\n keywords\ - \ = {learning,rapid prototyping,reconfigurable,sensor interface},\n pages = {346--351},\n\ - \ title = {GAINER: A Reconfigurable {I/O} Module and Software Libraries for Education},\n\ - \ url = {http://www.nime.org/proceedings/2006/nime2006_346.pdf},\n year = {2006}\n\ - }\n" + ID: Flanigan2008 + abstract: Plink Jet is a robotic musical instrument made from scavenged inkjet printers + and guitar parts. We investigate the expressive capabilities of everyday machine + technology by recontextualizing the relatively high-tech mechanisms of typical + office debris into an electro-acoustic musical instrument. We also explore the + performative relationship between human and machine. + address: 'Genoa, Italy' + author: 'Flanigan, Lesley and Doro, Andrew' + bibtex: "@inproceedings{Flanigan2008,\n abstract = {Plink Jet is a robotic musical\ + \ instrument made from scavenged inkjet printers and guitar parts. We investigate\ + \ the expressive capabilities of everyday machine technology by recontextualizing\ + \ the relatively high-tech mechanisms of typical office debris into an electro-acoustic\ + \ musical instrument. We also explore the performative relationship between human\ + \ and machine.},\n address = {Genoa, Italy},\n author = {Flanigan, Lesley and\ + \ Doro, Andrew},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179533},\n\ + \ issn = {2220-4806},\n keywords = {Interaction Design, Repurposing of Consumer\ + \ Technology, DIY, Performing Technology, Robotics, Automation, Infra-Instrument\ + \ },\n pages = {349--351},\n title = {Plink Jet},\n url = {http://www.nime.org/proceedings/2008/nime2008_349.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176945 + doi: 10.5281/zenodo.1179533 issn: 2220-4806 - keywords: 'learning,rapid prototyping,reconfigurable,sensor interface' - pages: 346--351 - title: 'GAINER: A Reconfigurable {I/O} Module and Software Libraries for Education' - url: http://www.nime.org/proceedings/2006/nime2006_346.pdf - year: 2006 + keywords: 'Interaction Design, Repurposing of Consumer Technology, DIY, Performing + Technology, Robotics, Automation, Infra-Instrument ' + pages: 349--351 + title: Plink Jet + url: http://www.nime.org/proceedings/2008/nime2008_349.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Beilharz2006 - abstract: 'Hyper-shaku (Border-Crossing) is an interactive sensor environment that - uses motion sensors to trigger immediate responses and generative processes augmenting - the Japanese bamboo shakuhachi in both the auditory and visual domain. The latter - differentiates this process from many hyper-instruments by building a performance - of visual design as well as electronic music on top of the acoustic performance. - It utilizes a combination of computer vision and wireless sensing technologies - conflated from preceding works. This paper outlines the use of gesture in these - preparatory sound and audio-visual performative, installation and sonification - works, leading to a description of the Hyper-shaku environment integrating sonification - and generative elements. ' - address: 'Paris, France' - author: 'Beilharz, Kirsty and Jakovich, Joanne and Ferguson, Sam' - bibtex: "@inproceedings{Beilharz2006,\n abstract = {Hyper-shaku (Border-Crossing)\ - \ is an interactive sensor environment that uses motion sensors to trigger immediate\ - \ responses and generative processes augmenting the Japanese bamboo shakuhachi\ - \ in both the auditory and visual domain. The latter differentiates this process\ - \ from many hyper-instruments by building a performance of visual design as well\ - \ as electronic music on top of the acoustic performance. It utilizes a combination\ - \ of computer vision and wireless sensing technologies conflated from preceding\ - \ works. This paper outlines the use of gesture in these preparatory sound and\ - \ audio-visual performative, installation and sonification works, leading to a\ - \ description of the Hyper-shaku environment integrating sonification and generative\ - \ elements. },\n address = {Paris, France},\n author = {Beilharz, Kirsty and Jakovich,\ - \ Joanne and Ferguson, Sam},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176867},\n\ - \ issn = {2220-4806},\n keywords = {Gesture-controllers, sonification, hyper-instrument\ - \ },\n pages = {352--357},\n title = {Hyper-shaku (Border-crossing): Towards the\ - \ Multi-modal Gesture-controlled Hyper-Instrument},\n url = {http://www.nime.org/proceedings/2006/nime2006_352.pdf},\n\ - \ year = {2006}\n}\n" + ID: Kamiyama2008 + address: 'Genoa, Italy' + author: 'Kamiyama, Yusuke and Tanaka, Mai and Tanaka, Hiroya' + bibtex: "@inproceedings{Kamiyama2008,\n address = {Genoa, Italy},\n author = {Kamiyama,\ + \ Yusuke and Tanaka, Mai and Tanaka, Hiroya},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1179575},\n issn = {2220-4806},\n keywords = {umbrella, musical\ + \ expression, sound generating device, 3D sound system, sound-field arrangement.\ + \ },\n pages = {352--353},\n title = {Oto-Shigure : An Umbrella-Shaped Sound Generator\ + \ for Musical Expression},\n url = {http://www.nime.org/proceedings/2008/nime2008_352.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176867 + doi: 10.5281/zenodo.1179575 issn: 2220-4806 - keywords: 'Gesture-controllers, sonification, hyper-instrument ' - pages: 352--357 - title: 'Hyper-shaku (Border-crossing): Towards the Multi-modal Gesture-controlled - Hyper-Instrument' - url: http://www.nime.org/proceedings/2006/nime2006_352.pdf - year: 2006 + keywords: 'umbrella, musical expression, sound generating device, 3D sound system, + sound-field arrangement. ' + pages: 352--353 + title: 'Oto-Shigure : An Umbrella-Shaped Sound Generator for Musical Expression' + url: http://www.nime.org/proceedings/2008/nime2008_352.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Farwell2006 - abstract: 'Three electro-acoustic systems were devised for a newtrombone work, Rouse. - This paper presents the technicalsystems and outlines their musical context and - motivation. TheuSlide measures trombone slide-extension by a minimalhardware ultrasonic - technique. An easy calibration proceduremaps linear extension to the slide "positions" - of the player. TheeMouth is a driver that replaces the mouthpiece, with softwareemulation - of trombone tone and algorithmic musical lines,allowing the trombone to appear - to play itself. The eMute isbuilt around a loudspeaker unit, driven so that it - affects stronglythe player''s embouchure, allowing fine control of complex beatpatterns. - eMouth and eMute, under control of the uSlide, set upimprovisatory worlds that - are part of the composed architectureof Rouse.' - address: 'Paris, France' - author: 'Farwell, Neal' - bibtex: "@inproceedings{Farwell2006,\n abstract = {Three electro-acoustic systems\ - \ were devised for a newtrombone work, Rouse. This paper presents the technicalsystems\ - \ and outlines their musical context and motivation. TheuSlide measures trombone\ - \ slide-extension by a minimalhardware ultrasonic technique. An easy calibration\ - \ proceduremaps linear extension to the slide \"positions\" of the player. TheeMouth\ - \ is a driver that replaces the mouthpiece, with softwareemulation of trombone\ - \ tone and algorithmic musical lines,allowing the trombone to appear to play itself.\ - \ The eMute isbuilt around a loudspeaker unit, driven so that it affects stronglythe\ - \ player's embouchure, allowing fine control of complex beatpatterns. eMouth and\ - \ eMute, under control of the uSlide, set upimprovisatory worlds that are part\ - \ of the composed architectureof Rouse.},\n address = {Paris, France},\n author\ - \ = {Farwell, Neal},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176895},\n\ - \ issn = {2220-4806},\n keywords = {composition,electro-acoustic adaptation,emulation,illusion,improvisation,mapping,mute,trombone,ultrasonic},\n\ - \ pages = {358--363},\n title = {Adapting the Trombone: a Suite of Electro-acoustic\ - \ Interventions for the Piece},\n url = {http://www.nime.org/proceedings/2006/nime2006_358.pdf},\n\ - \ year = {2006}\n}\n" + ID: Follmer2008 + address: 'Genoa, Italy' + author: 'Follmer, Sean and Warren, Chris and Marquez-Borbon, Adnan' + bibtex: "@inproceedings{Follmer2008,\n address = {Genoa, Italy},\n author = {Follmer,\ + \ Sean and Warren, Chris and Marquez-Borbon, Adnan},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1179535},\n issn = {2220-4806},\n keywords = {nime08},\n\ + \ pages = {354--355},\n title = {The Pond : Interactive Multimedia Installation},\n\ + \ url = {http://www.nime.org/proceedings/2008/nime2008_354.pdf},\n year = {2008}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176895 + doi: 10.5281/zenodo.1179535 issn: 2220-4806 - keywords: 'composition,electro-acoustic adaptation,emulation,illusion,improvisation,mapping,mute,trombone,ultrasonic' - pages: 358--363 - title: 'Adapting the Trombone: a Suite of Electro-acoustic Interventions for the - Piece' - url: http://www.nime.org/proceedings/2006/nime2006_358.pdf - year: 2006 + keywords: nime08 + pages: 354--355 + title: 'The Pond : Interactive Multimedia Installation' + url: http://www.nime.org/proceedings/2008/nime2008_354.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: MakiPatola2006 - address: 'Paris, France' - author: 'Maki-Patola, Teemu and H\''''{a}m\''''{a}l\''''{a}inen, Perttu and Kanerva, - Aki' - bibtex: "@inproceedings{MakiPatola2006,\n address = {Paris, France},\n author =\ - \ {Maki-Patola, Teemu and H\\''{a}m\\''{a}l\\''{a}inen, Perttu and Kanerva, Aki},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176971},\n issn = {2220-4806},\n\ - \ keywords = {1,2,2 9,3897,39,425,43,7,8,9},\n pages = {364--369},\n title = {The\ - \ Augmented Djembe Drum --- Sculpting Rhythms},\n url = {http://www.nime.org/proceedings/2006/nime2006_364.pdf},\n\ - \ year = {2006}\n}\n" + ID: Hartman2008 + address: 'Genoa, Italy' + author: 'Hartman, Ethan and Cooper, Jeff and Spratt, Kyle' + bibtex: "@inproceedings{Hartman2008,\n address = {Genoa, Italy},\n author = {Hartman,\ + \ Ethan and Cooper, Jeff and Spratt, Kyle},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1179557},\n issn = {2220-4806},\n keywords = {nime08},\n pages\ + \ = {356--357},\n title = {Swing Set : Musical Controllers with Inherent Physical\ + \ Dynamics},\n url = {http://www.nime.org/proceedings/2008/nime2008_356.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176971 + doi: 10.5281/zenodo.1179557 issn: 2220-4806 - keywords: '1,2,2 9,3897,39,425,43,7,8,9' - pages: 364--369 - title: The Augmented Djembe Drum --- Sculpting Rhythms - url: http://www.nime.org/proceedings/2006/nime2006_364.pdf - year: 2006 + keywords: nime08 + pages: 356--357 + title: 'Swing Set : Musical Controllers with Inherent Physical Dynamics' + url: http://www.nime.org/proceedings/2008/nime2008_356.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Favilla2006 - address: 'Paris, France' - author: 'Favilla, Stuart and Cannon, Joanne' - bibtex: "@inproceedings{Favilla2006,\n address = {Paris, France},\n author = {Favilla,\ - \ Stuart and Cannon, Joanne},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176897},\n\ - \ issn = {2220-4806},\n pages = {370--375},\n title = {Children of Grainger: Leather\ - \ Instruments for Free Music},\n url = {http://www.nime.org/proceedings/2006/nime2006_370.pdf},\n\ - \ year = {2006}\n}\n" + ID: Modler2008 + address: 'Genoa, Italy' + author: 'Modler, Paul and Myatt, Tony' + bibtex: "@inproceedings{Modler2008,\n address = {Genoa, Italy},\n author = {Modler,\ + \ Paul and Myatt, Tony},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179601},\n\ + \ issn = {2220-4806},\n keywords = {nime08},\n pages = {358--359},\n title = {Video\ + \ Based Recognition of Hand Gestures by Neural Networks for the Control of Sound\ + \ and Music},\n url = {http://www.nime.org/proceedings/2008/nime2008_358.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176897 + doi: 10.5281/zenodo.1179601 issn: 2220-4806 - pages: 370--375 - title: 'Children of Grainger: Leather Instruments for Free Music' - url: http://www.nime.org/proceedings/2006/nime2006_370.pdf - year: 2006 + keywords: nime08 + pages: 358--359 + title: Video Based Recognition of Hand Gestures by Neural Networks for the Control + of Sound and Music + url: http://www.nime.org/proceedings/2008/nime2008_358.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Hsu2006 - abstract: 'This paper describes recent enhancements in an interactive system designed - to improvise with saxophonist John Butcher [1]. In addition to musical parameters - such as pitch and loudness, our system is able to analyze timbral characteristics - of the saxophone tone in real-time, and use timbral information to guide the generation - of response material. We capture each saxophone gesture on the fly, extract a - set of gestural and timbral contours, and store them in a repository. Improvising - agents can consult the repository when generating responses. The gestural or timbral - progression of a saxophone phrase can be remapped or transformed; this enables - a variety of response material that also references audible contours of the original - saxophone gestures. A single simple framework is used to manage gestural and timbral - information extracted from analysis, and for expressive control of virtual instruments - in a free improvisation context. ' - address: 'Paris, France' - author: 'Hsu, William' - bibtex: "@inproceedings{Hsu2006,\n abstract = {This paper describes recent enhancements\ - \ in an interactive system designed to improvise with saxophonist John Butcher\ - \ [1]. In addition to musical parameters such as pitch and loudness, our system\ - \ is able to analyze timbral characteristics of the saxophone tone in real-time,\ - \ and use timbral information to guide the generation of response material. We\ - \ capture each saxophone gesture on the fly, extract a set of gestural and timbral\ - \ contours, and store them in a repository. Improvising agents can consult the\ - \ repository when generating responses. The gestural or timbral progression of\ - \ a saxophone phrase can be remapped or transformed; this enables a variety of\ - \ response material that also references audible contours of the original saxophone\ - \ gestures. A single simple framework is used to manage gestural and timbral information\ - \ extracted from analysis, and for expressive control of virtual instruments in\ - \ a free improvisation context. },\n address = {Paris, France},\n author = {Hsu,\ - \ William},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176927},\n issn\ - \ = {2220-4806},\n keywords = {Interactive music systems, timbre analysis, instrument\ - \ control. },\n pages = {376--379},\n title = {Managing Gesture and Timbre for\ - \ Analysis and Instrument Control in an Interactive Environment},\n url = {http://www.nime.org/proceedings/2006/nime2006_376.pdf},\n\ - \ year = {2006}\n}\n" + ID: Suzuki2008 + abstract: 'This research aims to develop a novel instrument for sociomusical interaction + where a number of participants can produce sounds by feet in collaboration with + each other. Thedeveloped instrument, beacon, is regarded as embodied soundmedia + product that will provide an interactive environmentaround it. The beacon produces + laser beams lying on theground and rotating. Audio sounds are then produced whenthe + beams pass individual performer''s foot. As the performers are able to control + the pitch and sound length accordingto the foot location and angles facing the + instrument, theperformer''s body motion and foot behavior can be translated into + sound and music in an intuitive manner.' + address: 'Genoa, Italy' + author: 'Suzuki, Kenji and Kyoya, Miho and Kamatani, Takahiro and Uchiyama, Toshiaki' + bibtex: "@inproceedings{Suzuki2008,\n abstract = {This research aims to develop\ + \ a novel instrument for sociomusical interaction where a number of participants\ + \ can produce sounds by feet in collaboration with each other. Thedeveloped instrument,\ + \ beacon, is regarded as embodied soundmedia product that will provide an interactive\ + \ environmentaround it. The beacon produces laser beams lying on theground and\ + \ rotating. Audio sounds are then produced whenthe beams pass individual performer's\ + \ foot. As the performers are able to control the pitch and sound length accordingto\ + \ the foot location and angles facing the instrument, theperformer's body motion\ + \ and foot behavior can be translated into sound and music in an intuitive manner.},\n\ + \ address = {Genoa, Italy},\n author = {Suzuki, Kenji and Kyoya, Miho and Kamatani,\ + \ Takahiro and Uchiyama, Toshiaki},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179633},\n\ + \ issn = {2220-4806},\n keywords = {Embodied sound media, Hyper-instrument, Laser\ + \ beams },\n pages = {360--361},\n title = {beacon : Embodied Sound Media Environment\ + \ for Socio-Musical Interaction},\n url = {http://www.nime.org/proceedings/2008/nime2008_360.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176927 + doi: 10.5281/zenodo.1179633 issn: 2220-4806 - keywords: 'Interactive music systems, timbre analysis, instrument control. ' - pages: 376--379 - title: Managing Gesture and Timbre for Analysis and Instrument Control in an Interactive - Environment - url: http://www.nime.org/proceedings/2006/nime2006_376.pdf - year: 2006 + keywords: 'Embodied sound media, Hyper-instrument, Laser beams ' + pages: 360--361 + title: 'beacon : Embodied Sound Media Environment for Socio-Musical Interaction' + url: http://www.nime.org/proceedings/2008/nime2008_360.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Hamel2006 - address: 'Paris, France' - author: 'Hamel, Keith' - bibtex: "@inproceedings{Hamel2006,\n address = {Paris, France},\n author = {Hamel,\ - \ Keith},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1176917},\n issn = {2220-4806},\n\ - \ pages = {380--383},\n title = {Integrated Interactive Music Performance Environment},\n\ - \ url = {http://www.nime.org/proceedings/2006/nime2006_380.pdf},\n year = {2006}\n\ - }\n" + ID: Sjuve2008 + abstract: 'This paper describes the development of a wireless wearablecontroller, + GO, for both sound processing and interactionwith wearable lights. Pure Data is + used for sound processing.The GO prototype is built using a PIC microcontroller + usingvarious sensors for receiving information from physicalmovements.' + address: 'Genoa, Italy' + author: 'Sjuve, Eva' + bibtex: "@inproceedings{Sjuve2008,\n abstract = {This paper describes the development\ + \ of a wireless wearablecontroller, GO, for both sound processing and interactionwith\ + \ wearable lights. Pure Data is used for sound processing.The GO prototype is\ + \ built using a PIC microcontroller usingvarious sensors for receiving information\ + \ from physicalmovements.},\n address = {Genoa, Italy},\n author = {Sjuve, Eva},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1179629},\n issn = {2220-4806},\n\ + \ keywords = {Wireless controller, Pure Data, Gestural interface, Interactive\ + \ Lights. },\n pages = {362--363},\n title = {Prototype GO : Wireless Controller\ + \ for Pure Data},\n url = {http://www.nime.org/proceedings/2008/nime2008_362.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176917 + doi: 10.5281/zenodo.1179629 issn: 2220-4806 - pages: 380--383 - title: Integrated Interactive Music Performance Environment - url: http://www.nime.org/proceedings/2006/nime2006_380.pdf - year: 2006 + keywords: 'Wireless controller, Pure Data, Gestural interface, Interactive Lights. ' + pages: 362--363 + title: 'Prototype GO : Wireless Controller for Pure Data' + url: http://www.nime.org/proceedings/2008/nime2008_362.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Ferguson2006 - address: 'Paris, France' - author: 'Ferguson, Sam' - bibtex: "@inproceedings{Ferguson2006,\n address = {Paris, France},\n author = {Ferguson,\ - \ Sam},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1176899},\n issn = {2220-4806},\n\ - \ keywords = {interactive sonification,music,sonification,sound visualization},\n\ - \ pages = {384--389},\n title = {Learning Musical Instrument Skills Through Interactive\ - \ Sonification},\n url = {http://www.nime.org/proceedings/2006/nime2006_384.pdf},\n\ - \ year = {2006}\n}\n" + ID: Macrae2008 + address: 'Genoa, Italy' + author: 'Macrae, Robert and Dixon, Simon' + bibtex: "@inproceedings{Macrae2008,\n address = {Genoa, Italy},\n author = {Macrae,\ + \ Robert and Dixon, Simon},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179593},\n\ + \ issn = {2220-4806},\n keywords = {Graphical Interface, Computer Game, MIDI Display\ + \ },\n pages = {364--365},\n title = {From Toy to Tutor : Note-Scroller is a Game\ + \ to Teach Music},\n url = {http://www.nime.org/proceedings/2008/nime2008_364.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176899 + doi: 10.5281/zenodo.1179593 issn: 2220-4806 - keywords: 'interactive sonification,music,sonification,sound visualization' - pages: 384--389 - title: Learning Musical Instrument Skills Through Interactive Sonification - url: http://www.nime.org/proceedings/2006/nime2006_384.pdf - year: 2006 + keywords: 'Graphical Interface, Computer Game, MIDI Display ' + pages: 364--365 + title: 'From Toy to Tutor : Note-Scroller is a Game to Teach Music' + url: http://www.nime.org/proceedings/2008/nime2008_364.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Poepel2006 - abstract: 'In this paper, some of the more recent developments in musical instruments - related to the violin family are described, and analyzed according to several - criteria adapted from other publications. While it is impossible to cover all - such developments, we have tried to sample a variety of instruments from the last - decade or so, with a greater focus on those published in the computer music literature. - Experiences in the field of string players focusing on such developments are presented. - Conclusions are drawn in which further research into violin-related digital instruments - for string players may benefit from the presented criteria as well as the experiences. ' - address: 'Paris, France' - author: 'Poepel, Cornelius and Overholt, Dan' - bibtex: "@inproceedings{Poepel2006,\n abstract = {In this paper, some of the more\ - \ recent developments in musical instruments related to the violin family are\ - \ described, and analyzed according to several criteria adapted from other publications.\ - \ While it is impossible to cover all such developments, we have tried to sample\ - \ a variety of instruments from the last decade or so, with a greater focus on\ - \ those published in the computer music literature. Experiences in the field of\ - \ string players focusing on such developments are presented. Conclusions are\ - \ drawn in which further research into violin-related digital instruments for\ - \ string players may benefit from the presented criteria as well as the experiences.\ - \ },\n address = {Paris, France},\n author = {Poepel, Cornelius and Overholt,\ - \ Dan},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1176985},\n issn = {2220-4806},\n\ - \ keywords = {Violin, viola, cello, bass, digital, electronic, synthesis, controller.\ - \ },\n pages = {390--395},\n title = {Recent Developments in Violin-related Digital\ - \ Musical Instruments: Where Are We and Where Are We Going?},\n url = {http://www.nime.org/proceedings/2006/nime2006_390.pdf},\n\ - \ year = {2006}\n}\n" + ID: Favilla2008 + abstract: 'This demonstration presents three new augmented and metasaxophone interface/instruments, + built by the Bent LeatherBand. The instruments are designed for virtuosic liveperformance + and make use of Sukandar Kartadinata''s Gluion[OSC] interfaces. The project rationale + and research outcomesfor the first twelve months is discussed. Instruments/interfacesdescribed + include the Gluisop, Gluialto and Leathersop.' + address: 'Genoa, Italy' + author: 'Favilla, Stuart and Cannon, Joanne and Hicks, Tony and Chant, Dale and + Favilla, Paris' + bibtex: "@inproceedings{Favilla2008,\n abstract = {This demonstration presents three\ + \ new augmented and metasaxophone interface/instruments, built by the Bent LeatherBand.\ + \ The instruments are designed for virtuosic liveperformance and make use of Sukandar\ + \ Kartadinata's Gluion[OSC] interfaces. The project rationale and research outcomesfor\ + \ the first twelve months is discussed. Instruments/interfacesdescribed include\ + \ the Gluisop, Gluialto and Leathersop.},\n address = {Genoa, Italy},\n author\ + \ = {Favilla, Stuart and Cannon, Joanne and Hicks, Tony and Chant, Dale and Favilla,\ + \ Paris},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1179531},\n issn = {2220-4806},\n\ + \ keywords = {Augmented saxophone, Gluion, OSC, virtuosic performance systems\ + \ },\n pages = {366--369},\n title = {Gluisax : Bent Leather Band's Augmented\ + \ Saxophone Project},\n url = {http://www.nime.org/proceedings/2008/nime2008_366.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176985 + doi: 10.5281/zenodo.1179531 issn: 2220-4806 - keywords: 'Violin, viola, cello, bass, digital, electronic, synthesis, controller. ' - pages: 390--395 - title: 'Recent Developments in Violin-related Digital Musical Instruments: Where - Are We and Where Are We Going?' - url: http://www.nime.org/proceedings/2006/nime2006_390.pdf - year: 2006 + keywords: 'Augmented saxophone, Gluion, OSC, virtuosic performance systems ' + pages: 366--369 + title: 'Gluisax : Bent Leather Band''s Augmented Saxophone Project' + url: http://www.nime.org/proceedings/2008/nime2008_366.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Young2006 - abstract: 'In this paper we present progress of an ongoingcollaboration between - researchers at the MIT MediaLaboratory and the Royal Academy of Music (RAM). The - aimof this project is to further explore the expressive musicalpotential of the - Hyperbow, a custom music controller firstdesigned for use in violin performance. - Through the creationof new repertoire, we hope to stimulate the evolution of thisinterface, - advancing its usability and refining itscapabilities. In preparation for this - work, the Hyperbowsystem has been adapted for cello (acoustic and electric)performance. - The structure of our collaboration is described,and two of the pieces currently - in progress are presented.Feedback from the performers is also discussed, as well - asfuture plans.' - address: 'Paris, France' - author: 'Young, Diana and Nunn, Patrick and Vassiliev, Artem' - bibtex: "@inproceedings{Young2006,\n abstract = {In this paper we present progress\ - \ of an ongoingcollaboration between researchers at the MIT MediaLaboratory and\ - \ the Royal Academy of Music (RAM). The aimof this project is to further explore\ - \ the expressive musicalpotential of the Hyperbow, a custom music controller firstdesigned\ - \ for use in violin performance. Through the creationof new repertoire, we hope\ - \ to stimulate the evolution of thisinterface, advancing its usability and refining\ - \ itscapabilities. In preparation for this work, the Hyperbowsystem has been adapted\ - \ for cello (acoustic and electric)performance. The structure of our collaboration\ - \ is described,and two of the pieces currently in progress are presented.Feedback\ - \ from the performers is also discussed, as well asfuture plans.},\n address =\ - \ {Paris, France},\n author = {Young, Diana and Nunn, Patrick and Vassiliev, Artem},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177023},\n issn = {2220-4806},\n\ - \ keywords = {Cello, bow, controller, electroacoustic music, composition. },\n\ - \ pages = {396--401},\n title = {Composing for Hyperbow: A Collaboration Between\ - \ {MIT} and the Royal Academy of Music},\n url = {http://www.nime.org/proceedings/2006/nime2006_396.pdf},\n\ - \ year = {2006}\n}\n" + ID: DeJong2008 + address: 'Genoa, Italy' + author: 'de Jong, Staas' + bibtex: "@inproceedings{DeJong2008,\n address = {Genoa, Italy},\n author = {de Jong,\ + \ Staas},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1179571},\n issn = {2220-4806},\n\ + \ keywords = {nime08},\n pages = {370--371},\n title = {The Cyclotactor : Towards\ + \ a Tactile Platform for Musical Interaction},\n url = {http://www.nime.org/proceedings/2008/nime2008_370.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177023 + doi: 10.5281/zenodo.1179571 issn: 2220-4806 - keywords: 'Cello, bow, controller, electroacoustic music, composition. ' - pages: 396--401 - title: 'Composing for Hyperbow: A Collaboration Between MIT and the Royal Academy - of Music' - url: http://www.nime.org/proceedings/2006/nime2006_396.pdf - year: 2006 + keywords: nime08 + pages: 370--371 + title: 'The Cyclotactor : Towards a Tactile Platform for Musical Interaction' + url: http://www.nime.org/proceedings/2008/nime2008_370.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Bevilacqua2006 - address: 'Paris, France' - author: 'Bevilacqua, Fr\''{e}d\''{e}ric and Rasamimanana, Nicolas and Fl\''{e}ty, - Emmanuel and Lemouton, Serge and Baschet, Florence' - bibtex: "@inproceedings{Bevilacqua2006,\n address = {Paris, France},\n author =\ - \ {Bevilacqua, Fr\\'{e}d\\'{e}ric and Rasamimanana, Nicolas and Fl\\'{e}ty, Emmanuel\ - \ and Lemouton, Serge and Baschet, Florence},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1176871},\n issn = {2220-4806},\n pages = {402--406},\n title\ - \ = {The Augmented Violin Project: Research, Composition and Performance Report},\n\ - \ url = {http://www.nime.org/proceedings/2006/nime2006_402.pdf},\n year = {2006}\n\ - }\n" + ID: Demey2008 + abstract: 'The Musical Synchrotron is a software interface that connects wireless + motion sensors to a real-time interactive environment (Pure Data, Max/MSP). In + addition to the measurement of movement, the system provides audio playback and + visual feedback. The Musical Synchrotron outputs a score with the degree in which + synchronization with the presented music is successful. The interface has been + used to measure how people move in response to music. The system was used for + experiments at public events. ' + address: 'Genoa, Italy' + author: 'Demey, Michiel and Leman, Marc and Bossuyt, Frederick and Vanfleteren, + Jan' + bibtex: "@inproceedings{Demey2008,\n abstract = {The Musical Synchrotron is a software\ + \ interface that connects wireless motion sensors to a real-time interactive environment\ + \ (Pure Data, Max/MSP). In addition to the measurement of movement, the system\ + \ provides audio playback and visual feedback. The Musical Synchrotron outputs\ + \ a score with the degree in which synchronization with the presented music is\ + \ successful. The interface has been used to measure how people move in response\ + \ to music. The system was used for experiments at public events. },\n address\ + \ = {Genoa, Italy},\n author = {Demey, Michiel and Leman, Marc and Bossuyt, Frederick\ + \ and Vanfleteren, Jan},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179521},\n\ + \ issn = {2220-4806},\n keywords = {Wireless sensors, tempo perception, social\ + \ interaction, music and movement, embodied music cognition },\n pages = {372--373},\n\ + \ title = {The Musical Synchrotron : Using Wireless Motion Sensors to Study How\ + \ Social Interaction Affects Synchronization with Musical Tempo},\n url = {http://www.nime.org/proceedings/2008/nime2008_372.pdf},\n\ + \ year = {2008}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176871 + doi: 10.5281/zenodo.1179521 issn: 2220-4806 - pages: 402--406 - title: 'The Augmented Violin Project: Research, Composition and Performance Report' - url: http://www.nime.org/proceedings/2006/nime2006_402.pdf - year: 2006 + keywords: 'Wireless sensors, tempo perception, social interaction, music and movement, + embodied music cognition ' + pages: 372--373 + title: 'The Musical Synchrotron : Using Wireless Motion Sensors to Study How Social + Interaction Affects Synchronization with Musical Tempo' + url: http://www.nime.org/proceedings/2008/nime2008_372.pdf + year: 2008 - ENTRYTYPE: inproceedings - ID: Kimura2006 - abstract: 'This is a description of a demonstration, regarding theuse of auditory - illusions and psycho-acoustic phenomenonused in the interactive work of Jean-Claude - Risset, writtenfor violinist Mari Kimura.' - address: 'Paris, France' - author: 'Kimura, Mari and Risset, Jean-Claude' - bibtex: "@inproceedings{Kimura2006,\n abstract = {This is a description of a demonstration,\ - \ regarding theuse of auditory illusions and psycho-acoustic phenomenonused in\ - \ the interactive work of Jean-Claude Risset, writtenfor violinist Mari Kimura.},\n\ - \ address = {Paris, France},\n author = {Kimura, Mari and Risset, Jean-Claude},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176939},\n issn = {2220-4806},\n\ - \ keywords = {Violin, psycho-acoustic phenomena, auditory illusions, sig- nal\ - \ processing, subharmonics, Risset, Kimura. },\n pages = {407--408},\n title =\ - \ {Auditory Illusion and Violin: Demonstration of a Work by Jean-Claude Risset\ - \ Written for Mari Kimura},\n url = {http://www.nime.org/proceedings/2006/nime2006_407.pdf},\n\ - \ year = {2006}\n}\n" + ID: Sheffield2016 + abstract: |- + The Haptic Capstans are two rotational force-feedback knobs + circumscribed by eye-catching LED rings. In this work, the Haptic Capstans are + programmed using physical models in order to experiment with audio-visual-haptic + interactions for music applications. + address: 'Brisbane, Australia' + author: Eric Sheffield and Edgar Berdahl and Andrew Pfalz + bibtex: "@inproceedings{Sheffield2016,\n abstract = {The Haptic Capstans are two\ + \ rotational force-feedback knobs\ncircumscribed by eye-catching LED rings. In\ + \ this work, the Haptic Capstans are\nprogrammed using physical models in order\ + \ to experiment with audio-visual-haptic\ninteractions for music applications.},\n\ + \ address = {Brisbane, Australia},\n author = {Eric Sheffield and Edgar Berdahl\ + \ and Andrew Pfalz},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176002},\n\ + \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {1--2},\n publisher\ + \ = {Queensland Conservatorium Griffith University},\n title = {The Haptic Capstans:\ + \ Rotational Force Feedback for Music using a FireFader Derivative Device},\n\ + \ track = {Demonstrations},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper00012.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176939 + doi: 10.5281/zenodo.1176002 + isbn: 978-1-925455-13-7 issn: 2220-4806 - keywords: 'Violin, psycho-acoustic phenomena, auditory illusions, sig- nal processing, - subharmonics, Risset, Kimura. ' - pages: 407--408 - title: 'Auditory Illusion and Violin: Demonstration of a Work by Jean-Claude Risset - Written for Mari Kimura' - url: http://www.nime.org/proceedings/2006/nime2006_407.pdf - year: 2006 + pages: 1--2 + publisher: Queensland Conservatorium Griffith University + title: 'The Haptic Capstans: Rotational Force Feedback for Music using a FireFader + Derivative Device' + track: Demonstrations + url: http://www.nime.org/proceedings/2016/nime2016_paper00012.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: Freed2006a - abstract: 'Software and hardware enhancements to an electric 6-stringcello are described - with a focus on a new mechanical tuningdevice, a novel rotary sensor for bow interaction - and controlstrategies to leverage a suite of polyphonic soundprocessing effects.' - address: 'Paris, France' - author: 'Freed, Adrian and Wessel, David and Zbyszynski, Michael and Uitti, Frances - M.' - bibtex: "@inproceedings{Freed2006a,\n abstract = {Software and hardware enhancements\ - \ to an electric 6-stringcello are described with a focus on a new mechanical\ - \ tuningdevice, a novel rotary sensor for bow interaction and controlstrategies\ - \ to leverage a suite of polyphonic soundprocessing effects.},\n address = {Paris,\ - \ France},\n author = {Freed, Adrian and Wessel, David and Zbyszynski, Michael\ - \ and Uitti, Frances M.},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176905},\n\ - \ issn = {2220-4806},\n keywords = {Cello, chordophone, FSR, Rotary Absolute Position\ - \ Encoder, Double Bowing, triple stops, double stops, convolution. },\n pages\ - \ = {409--413},\n title = {Augmenting the Cello},\n url = {http://www.nime.org/proceedings/2006/nime2006_409.pdf},\n\ - \ year = {2006}\n}\n" + ID: Long2016 + abstract: 'Musical robots provide artists and musicians with the ability to realise + complex new musical ideas in real acoustic space. However, most musical robots + are created with open-loop control systems, many of which require time consuming + calibration and do not reach the level of reliability of other electronic musical + instruments such as synthesizers. This paper outlines the construction of a new + robotic musical instrument, the Closed-Loop Robotic Glockenspiel, and discusses + the improved robustness, usability and expressive capabilities that closed-loop + control systems and embedded musical information retrieval processes can afford + robotic musical instruments. The hardware design of the instrument is described + along with the firmware of the embedded MIR system. The result is a new desktop + robotic musical instrument that is capable of continuous unaided re-calibration, + is as simple to use as more traditional hardware electronic sound-sources and + provides musicians with new expressive capabilities. ' + address: 'Brisbane, Australia' + author: Jason Long and Dale Carnegie and Ajay Kapur + bibtex: "@inproceedings{Long2016,\n abstract = {Musical robots provide artists and\ + \ musicians with the ability to realise complex new musical ideas in real acoustic\ + \ space. However, most musical robots are created with open-loop control systems,\ + \ many of which require time consuming calibration and do not reach the level\ + \ of reliability of other electronic musical instruments such as synthesizers.\ + \ This paper outlines the construction of a new robotic musical instrument, the\ + \ Closed-Loop Robotic Glockenspiel, and discusses the improved robustness, usability\ + \ and expressive capabilities that closed-loop control systems and embedded musical\ + \ information retrieval processes can afford robotic musical instruments. The\ + \ hardware design of the instrument is described along with the firmware of the\ + \ embedded MIR system. The result is a new desktop robotic musical instrument\ + \ that is capable of continuous unaided re-calibration, is as simple to use as\ + \ more traditional hardware electronic sound-sources and provides musicians with\ + \ new expressive capabilities. },\n address = {Brisbane, Australia},\n author\ + \ = {Jason Long and Dale Carnegie and Ajay Kapur},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.3964607},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ + \ pages = {2--7},\n publisher = {Queensland Conservatorium Griffith University},\n\ + \ title = {The Closed-Loop Robotic Glockenspiel: Improving Musical Robots with\ + \ Embedded Musical Information Retrieval},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0002.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176905 + doi: 10.5281/zenodo.3964607 + isbn: 978-1-925455-13-7 issn: 2220-4806 - keywords: 'Cello, chordophone, FSR, Rotary Absolute Position Encoder, Double Bowing, - triple stops, double stops, convolution. ' - pages: 409--413 - title: Augmenting the Cello - url: http://www.nime.org/proceedings/2006/nime2006_409.pdf - year: 2006 + pages: 2--7 + publisher: Queensland Conservatorium Griffith University + title: 'The Closed-Loop Robotic Glockenspiel: Improving Musical Robots with Embedded + Musical Information Retrieval' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0002.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_0 - abstract: 'We introduce faust2smartphone, a tool to generate an edit-ready project - for musical mobile application, which connects Faust programming language and - mobile application’s development. It is an extended implementation of faust2api. - Faust DSP objects can be easily embedded as a high level API so that the developers - can access various functions and elements across different mobile platforms. This - paper provides several modes and technical details on the structures and implementation - of this system as well as some applications and future directions for this tool.' - address: 'Birmingham, UK' - author: 'Weng, Ruolun' - bibtex: "@inproceedings{NIME20_0,\n abstract = {We introduce faust2smartphone, a\ - \ tool to generate an edit-ready project for musical mobile application, which\ - \ connects Faust programming language and mobile application’s development. It\ - \ is an extended implementation of faust2api. Faust DSP objects can be easily\ - \ embedded as a high level API so that the developers can access various functions\ - \ and elements across different mobile platforms. This paper provides several\ - \ modes and technical details on the structures and implementation of this system\ - \ as well as some applications and future directions for this tool.},\n address\ - \ = {Birmingham, UK},\n author = {Weng, Ruolun},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.4813164},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {1--4},\n publisher = {Birmingham\ - \ City University},\n title = {Interactive Mobile Musical Application using faust2smartphone},\n\ - \ url = {https://www.nime.org/proceedings/2020/nime2020_paper0.pdf},\n year =\ - \ {2020}\n}\n" + ID: Carey2016a + abstract: "This paper describes a package of modular tools developed for use\nwith\ + \ virtual reality peripherals to allow for music composition, performance and\n\ + viewing in `real-time' across networks within a spectralist paradigm.\nThe central\ + \ tool is SpectraScore, a Max/MSP abstraction for analysing audio\nsignals and\ + \ ranking the resultant partials according to their harmonic pitch\nclass profiles.\ + \ This data triggers the generation of objects in a virtual world\nbased on the\ + \ `topography' of the source sound, which is experienced\nby network clients via\ + \ Google Cardboard headsets. They use their movements to\ntrigger audio in various\ + \ microtonal tunings and incidentally generate scores.\nThese scores are transmitted\ + \ to performers who improvise music from this notation\nusing Leap Motion Theremins,\ + \ also in VR space. Finally, the performance is\nbroadcast via a web audio stream,\ + \ which can be heard by the composer-audience in\nthe initial virtual world. The\ + \ `real-time composers' and performers\nare not required to have any prior knowledge\ + \ of complex computer systems and\ninteract either using head position tracking,\ + \ or with a Oculus Rift DK2 and a\nLeap Motion Camera. " + address: 'Brisbane, Australia' + author: Benedict Carey + bibtex: "@inproceedings{Carey2016a,\n abstract = {This paper describes a package\ + \ of modular tools developed for use\nwith virtual reality peripherals to allow\ + \ for music composition, performance and\nviewing in `real-time' across networks\ + \ within a spectralist paradigm.\nThe central tool is SpectraScore, a Max/MSP\ + \ abstraction for analysing audio\nsignals and ranking the resultant partials\ + \ according to their harmonic pitch\nclass profiles. This data triggers the generation\ + \ of objects in a virtual world\nbased on the `topography' of the source sound,\ + \ which is experienced\nby network clients via Google Cardboard headsets. They\ + \ use their movements to\ntrigger audio in various microtonal tunings and incidentally\ + \ generate scores.\nThese scores are transmitted to performers who improvise music\ + \ from this notation\nusing Leap Motion Theremins, also in VR space. Finally,\ + \ the performance is\nbroadcast via a web audio stream, which can be heard by\ + \ the composer-audience in\nthe initial virtual world. The `real-time composers'\ + \ and performers\nare not required to have any prior knowledge of complex computer\ + \ systems and\ninteract either using head position tracking, or with a Oculus\ + \ Rift DK2 and a\nLeap Motion Camera. },\n address = {Brisbane, Australia},\n\ + \ author = {Benedict Carey},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176004},\n\ + \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {3--4},\n publisher\ + \ = {Queensland Conservatorium Griffith University},\n title = {SpectraScore VR:\ + \ Networkable virtual reality software tools for real-time composition and performance},\n\ + \ track = {Demonstrations},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper00022.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813164 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176004 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 1--4 - publisher: Birmingham City University - title: Interactive Mobile Musical Application using faust2smartphone - url: https://www.nime.org/proceedings/2020/nime2020_paper0.pdf - year: 2020 + pages: 3--4 + publisher: Queensland Conservatorium Griffith University + title: 'SpectraScore VR: Networkable virtual reality software tools for real-time + composition and performance' + track: Demonstrations + url: http://www.nime.org/proceedings/2016/nime2016_paper00022.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_1 - abstract: 'This paper reports on the user-driven redesign of an embedded digital - musical instrument that has yielded a trio of new instruments, informed by early - user feedback and co-design workshops organized with active musicians. Collectively, - they share a stand-alone design, digitally fabricated enclosures, and a common - sensor acquisition and sound synthesis architecture, yet each is unique in its - playing technique and sonic output. We focus on the technical design of the instruments - and provide examples of key design specifications that were derived from user - input, while reflecting on the challenges to, and opportunities for, creating - instruments that support active practices of performing musicians.' - address: 'Birmingham, UK' - author: 'Sullivan, John and Vanasse, Julian and Guastavino, Catherine and Wanderley, - Marcelo' - bibtex: "@inproceedings{NIME20_1,\n abstract = {This paper reports on the user-driven\ - \ redesign of an embedded digital musical instrument that has yielded a trio of\ - \ new instruments, informed by early user feedback and co-design workshops organized\ - \ with active musicians. Collectively, they share a stand-alone design, digitally\ - \ fabricated enclosures, and a common sensor acquisition and sound synthesis architecture,\ - \ yet each is unique in its playing technique and sonic output. We focus on the\ - \ technical design of the instruments and provide examples of key design specifications\ - \ that were derived from user input, while reflecting on the challenges to, and\ - \ opportunities for, creating instruments that support active practices of performing\ - \ musicians.},\n address = {Birmingham, UK},\n author = {Sullivan, John and Vanasse,\ - \ Julian and Guastavino, Catherine and Wanderley, Marcelo},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.4813166},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {5--10},\n presentation-video\ - \ = {https://youtu.be/DUMXJw-CTVo},\n publisher = {Birmingham City University},\n\ - \ title = {Reinventing the Noisebox: Designing Embedded Instruments for Active\ - \ Musicians},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper1.pdf},\n\ - \ year = {2020}\n}\n" + ID: Chang2016 + abstract: 'This paper discusses a new approach to acoustic amplitude modulation. + Building on prior work with electromagnetic augmentation of acoustic instruments, + we begin with a theory of operation model to describe the mechanical forces necessary + to produce acoustic amplitude modulation synthesis. We then propose an implementation + of our model as an instrumental prototype. The results illustrate that our acoustic + amplitude modulation system produces controllable sideband components, and that + synthesis generated from our corresponding numerical dynamic system model closely + approximates the acoustic result of the physical system.' + address: 'Brisbane, Australia' + author: Herbert H.C. Chang and Spencer Topel + bibtex: "@inproceedings{Chang2016,\n abstract = {This paper discusses a new approach\ + \ to acoustic amplitude modulation. Building on prior work with electromagnetic\ + \ augmentation of acoustic instruments, we begin with a theory of operation model\ + \ to describe the mechanical forces necessary to produce acoustic amplitude modulation\ + \ synthesis. We then propose an implementation of our model as an instrumental\ + \ prototype. The results illustrate that our acoustic amplitude modulation system\ + \ produces controllable sideband components, and that synthesis generated from\ + \ our corresponding numerical dynamic system model closely approximates the acoustic\ + \ result of the physical system.},\n address = {Brisbane, Australia},\n author\ + \ = {Herbert H.C. Chang and Spencer Topel},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.3964599},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ + \ pages = {8--13},\n publisher = {Queensland Conservatorium Griffith University},\n\ + \ title = {Electromagnetically Actuated Acoustic Amplitude Modulation Synthesis},\n\ + \ track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0003.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813166 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.3964599 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 5--10 - presentation-video: https://youtu.be/DUMXJw-CTVo - publisher: Birmingham City University - title: 'Reinventing the Noisebox: Designing Embedded Instruments for Active Musicians' - url: https://www.nime.org/proceedings/2020/nime2020_paper1.pdf - year: 2020 + pages: 8--13 + publisher: Queensland Conservatorium Griffith University + title: Electromagnetically Actuated Acoustic Amplitude Modulation Synthesis + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0003.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_10 - abstract: 'This paper presents a new visualization paradigm for graphical interpolation - systems, known as Star Interpolation, that has been specifically created for sound - design applications. Through the presented investigation of previous visualizations, - it becomes apparent that the existing visuals in this class of system, generally - relate to the interpolation model that determines the weightings of the presets - and not the sonic output. The Star Interpolator looks to resolve this deficiency - by providing visual cues that relate to the parameter space. Through comparative - exploration it has been found this visualization provides a number of benefits - over the previous systems. It is also shown that hybrid visualization can be generated - that combined benefits of the new visualization with the existing interpolation - models. These can then be accessed by using an Interactive Visualization (IV) - approach. The results from our exploration of these visualizations are encouraging - and they appear to be advantageous when using the interpolators for sound designs - tasks. Therefore, it is proposed that formal usability testing is undertaken to - measure the potential value of this form of visualization.' - address: 'Birmingham, UK' - author: 'Gibson, Darrell J and Polfreman, Richard' - bibtex: "@inproceedings{NIME20_10,\n abstract = {This paper presents a new visualization\ - \ paradigm for graphical interpolation systems, known as Star Interpolation, that\ - \ has been specifically created for sound design applications. Through the presented\ - \ investigation of previous visualizations, it becomes apparent that the existing\ - \ visuals in this class of system, generally relate to the interpolation model\ - \ that determines the weightings of the presets and not the sonic output. The\ - \ Star Interpolator looks to resolve this deficiency by providing visual cues\ - \ that relate to the parameter space. Through comparative exploration it has been\ - \ found this visualization provides a number of benefits over the previous systems.\ - \ It is also shown that hybrid visualization can be generated that combined benefits\ - \ of the new visualization with the existing interpolation models. These can then\ - \ be accessed by using an Interactive Visualization (IV) approach. The results\ - \ from our exploration of these visualizations are encouraging and they appear\ - \ to be advantageous when using the interpolators for sound designs tasks. Therefore,\ - \ it is proposed that formal usability testing is undertaken to measure the potential\ - \ value of this form of visualization.},\n address = {Birmingham, UK},\n author\ - \ = {Gibson, Darrell J and Polfreman, Richard},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.4813168},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {49--54},\n presentation-video\ - \ = {https://youtu.be/3ImRZdSsP-M},\n publisher = {Birmingham City University},\n\ - \ title = {Star Interpolator – A Novel Visualization Paradigm for Graphical Interpolators},\n\ - \ url = {https://www.nime.org/proceedings/2020/nime2020_paper10.pdf},\n year =\ - \ {2020}\n}\n" + ID: Berdahl2016 + abstract: "New interfaces for vibrotactile interaction with touchscreens are\nrealized.\ + \ An electromagnetic design for wireless actuation of 3D-printed\nconductive tokens\ + \ is analyzed. Example music interactions are implemented using\nphysical modeling\ + \ paradigms, each investigated within the context of a particular\ntoken that\ + \ suggests a different interaction metaphor." + address: 'Brisbane, Australia' + author: Edgar Berdahl and Danny Holmes and Eric Sheffield + bibtex: "@inproceedings{Berdahl2016,\n abstract = {New interfaces for vibrotactile\ + \ interaction with touchscreens are\nrealized. An electromagnetic design for wireless\ + \ actuation of 3D-printed\nconductive tokens is analyzed. Example music interactions\ + \ are implemented using\nphysical modeling paradigms, each investigated within\ + \ the context of a particular\ntoken that suggests a different interaction metaphor.},\n\ + \ address = {Brisbane, Australia},\n author = {Edgar Berdahl and Danny Holmes\ + \ and Eric Sheffield},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1175984},\n\ + \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {5--6},\n publisher\ + \ = {Queensland Conservatorium Griffith University},\n title = {Wireless Vibrotactile\ + \ Tokens for Audio-Haptic Interaction with Touchscreen Interfaces},\n track =\ + \ {Demonstrations},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper00032.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813168 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1175984 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 49--54 - presentation-video: https://youtu.be/3ImRZdSsP-M - publisher: Birmingham City University - title: Star Interpolator – A Novel Visualization Paradigm for Graphical Interpolators - url: https://www.nime.org/proceedings/2020/nime2020_paper10.pdf - year: 2020 + pages: 5--6 + publisher: Queensland Conservatorium Griffith University + title: Wireless Vibrotactile Tokens for Audio-Haptic Interaction with Touchscreen + Interfaces + track: Demonstrations + url: http://www.nime.org/proceedings/2016/nime2016_paper00032.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_100 - abstract: 'This paper reports on the process of development of a virtual-acoustic - proto-instrument, Vodhrán, based on a physical model of a plate, within a musical - performance-driven ecosystemic environment. Performers explore the plate model - via tactile interaction through a Sensel Morph interface, chosen to allow damping - and localised striking consistent with playing hand percussion. Through an iteration - of prototypes, we have designed an embedded proto-instrument that allows a bodily - interaction between the performer and the virtual-acoustic plate in a way that - redirects from the perception of the Sensel as a touchpad and reframes it as a - percussive surface. Due to the computational effort required to run such a rich - physical model and the necessity to provide a natural interaction, the audio processing - is implemented on a high powered single board computer. We describe the design - challenges and report on the technological solutions we have found in the implementation - of Vodhrán which we believe are valuable to the wider NIME community.' - address: 'Birmingham, UK' - author: 'Pardue, Laurel S and Ortiz, Miguel and van Walstijn, Maarten and Stapleton, - Paul and Rodger, Matthew' - bibtex: "@inproceedings{NIME20_100,\n abstract = {This paper reports on the process\ - \ of development of a virtual-acoustic proto-instrument, Vodhrán, based on a physical\ - \ model of a plate, within a musical performance-driven ecosystemic environment.\ - \ Performers explore the plate model via tactile interaction through a Sensel\ - \ Morph interface, chosen to allow damping and localised striking consistent with\ - \ playing hand percussion. Through an iteration of prototypes, we have designed\ - \ an embedded proto-instrument that allows a bodily interaction between the performer\ - \ and the virtual-acoustic plate in a way that redirects from the perception of\ - \ the Sensel as a touchpad and reframes it as a percussive surface. Due to the\ - \ computational effort required to run such a rich physical model and the necessity\ - \ to provide a natural interaction, the audio processing is implemented on a high\ - \ powered single board computer. We describe the design challenges and report\ - \ on the technological solutions we have found in the implementation of Vodhrán\ - \ which we believe are valuable to the wider NIME community.},\n address = {Birmingham,\ - \ UK},\n author = {Pardue, Laurel S and Ortiz, Miguel and van Walstijn, Maarten\ - \ and Stapleton, Paul and Rodger, Matthew},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.4813170},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {523--524},\n publisher = {Birmingham\ - \ City University},\n title = {Vodhrán: collaborative design for evolving a physical\ - \ model and interface into a proto-instrument},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper100.pdf},\n\ - \ year = {2020}\n}\n" + ID: Baldwin2016 + abstract: 'An interactive museum exhibit of a digitally augmented medieval musical + instrument, the tromba marina, is presented. The tromba marina is a curious single + stringed instrument with a rattling bridge, from which a trumpet-like timbre is + produced. The physical instrument was constructed as a replica of one found in + Musikmuseet in Frederiksberg. The replicated instrument was augmented with a pickup, + speakers and digital signal processing to create a more reliable, approachable + and appropriate instrument for interactive display in the museum. We report on + the evaluation of the instrument performed at the Danish museum of musical instruments.' + address: 'Brisbane, Australia' + author: Alex Baldwin and Troels Hammer and Edvinas Pechiulis and Peter Williams + and Dan Overholt and Stefania Serafin + bibtex: "@inproceedings{Baldwin2016,\n abstract = {An interactive museum exhibit\ + \ of a digitally augmented medieval musical instrument, the tromba marina, is\ + \ presented. The tromba marina is a curious single stringed instrument with a\ + \ rattling bridge, from which a trumpet-like timbre is produced. The physical\ + \ instrument was constructed as a replica of one found in Musikmuseet in Frederiksberg.\ + \ The replicated instrument was augmented with a pickup, speakers and digital\ + \ signal processing to create a more reliable, approachable and appropriate instrument\ + \ for interactive display in the museum. We report on the evaluation of the instrument\ + \ performed at the Danish museum of musical instruments.},\n address = {Brisbane,\ + \ Australia},\n author = {Alex Baldwin and Troels Hammer and Edvinas Pechiulis\ + \ and Peter Williams and Dan Overholt and Stefania Serafin},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.3964592},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ + \ pages = {14--19},\n publisher = {Queensland Conservatorium Griffith University},\n\ + \ title = {Tromba Moderna: A Digitally Augmented Medieval Instrument},\n track\ + \ = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0004.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813170 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.3964592 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 523--524 - publisher: Birmingham City University - title: 'Vodhrán: collaborative design for evolving a physical model and interface - into a proto-instrument' - url: https://www.nime.org/proceedings/2020/nime2020_paper100.pdf - year: 2020 + pages: 14--19 + publisher: Queensland Conservatorium Griffith University + title: 'Tromba Moderna: A Digitally Augmented Medieval Instrument' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0004.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_101 - abstract: 'Brain-computer interfaces (BCIs) are beneficial for patients who are - suffering from motor disabilities because it offers them a way of creative expression, - which improves mental well-being. BCIs aim to establish a direct communication - medium between the brain and the computer. Therefore, unlike conventional musical - interfaces, it does not require muscular power. This paper explores the potential - of building sound synthesisers with BCIs that are based on steady-state visually - evoked potential (SSVEP). It investigates novel ways to enable patients with motor - disabilities to express themselves. It presents a new concept called sonic expression, - that is to express oneself purely by the synthesis of sound. It introduces new - layouts and designs for BCI-based sound synthesisers and the limitations of these - interfaces are discussed. An evaluation of different sound synthesis techniques - is conducted to find an appropriate one for such systems. Synthesis techniques - are evaluated and compared based on a framework governed by sonic expression.' - address: 'Birmingham, UK' - author: 'Venkatesh, Satvik and Braund, Edward and Miranda, Eduardo' - bibtex: "@inproceedings{NIME20_101,\n abstract = {Brain-computer interfaces (BCIs)\ - \ are beneficial for patients who are suffering from motor disabilities because\ - \ it offers them a way of creative expression, which improves mental well-being.\ - \ BCIs aim to establish a direct communication medium between the brain and the\ - \ computer. Therefore, unlike conventional musical interfaces, it does not require\ - \ muscular power. This paper explores the potential of building sound synthesisers\ - \ with BCIs that are based on steady-state visually evoked potential (SSVEP).\ - \ It investigates novel ways to enable patients with motor disabilities to express\ - \ themselves. It presents a new concept called sonic expression, that is to express\ - \ oneself purely by the synthesis of sound. It introduces new layouts and designs\ - \ for BCI-based sound synthesisers and the limitations of these interfaces are\ - \ discussed. An evaluation of different sound synthesis techniques is conducted\ - \ to find an appropriate one for such systems. Synthesis techniques are evaluated\ - \ and compared based on a framework governed by sonic expression.},\n address\ - \ = {Birmingham, UK},\n author = {Venkatesh, Satvik and Braund, Edward and Miranda,\ - \ Eduardo},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813172},\n editor\ - \ = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n\ - \ pages = {525--530},\n publisher = {Birmingham City University},\n title = {Designing\ - \ Brain-computer Interfaces for Sonic Expression},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper101.pdf},\n\ - \ year = {2020}\n}\n" + ID: Berg2016 + abstract: |- + This demonstration describes Tango, software for + Computer-Human Improvisation developed for more than 25 years by Henning Berg. + Tango listens to an improvising musician, analyses what it hears + and plays musical responses which relate directly to the musical input. + If the improviser in turn reacts to these answers, a musical loop between the + human and the machine can emerge. The way input and reaction correlate and the + predictability of Tango's responses can be defined by the user via a setup + of improvising environments, called Rooms. + Real-time sampling with knowledge of the musical content behind the samples and + Midi-handling are unified via Tango's own monophonic audio-to-Midi, time + stretching and pitch shifting algorithms. Both audio and Midi can be used by + Tango's modules (e.g. Listeners, Players, Modifiers, Metronomes or Harmony) + for input and output. + A flexible real time control system allows for internal and external remote + control and scaling of most parameters. + The free software for Windows7 with all necessary folders, English and German + manuals, many example-Rooms and a few videos can be downloaded at + www.henning-berg.de. + address: 'Brisbane, Australia' + author: Henning Berg + bibtex: "@inproceedings{Berg2016,\n abstract = {This demonstration describes Tango,\ + \ software for\nComputer-Human Improvisation developed for more than 25 years\ + \ by Henning Berg.\nTango listens to an improvising musician, analyses what it\ + \ hears\nand plays musical responses which relate directly to the musical input.\n\ + If the improviser in turn reacts to these answers, a musical loop between the\n\ + human and the machine can emerge. The way input and reaction correlate and the\n\ + predictability of Tango's responses can be defined by the user via a setup\nof\ + \ improvising environments, called Rooms.\nReal-time sampling with knowledge of\ + \ the musical content behind the samples and\nMidi-handling are unified via Tango's\ + \ own monophonic audio-to-Midi, time\nstretching and pitch shifting algorithms.\ + \ Both audio and Midi can be used by\nTango's modules (e.g. Listeners, Players,\ + \ Modifiers, Metronomes or Harmony)\nfor input and output.\nA flexible real time\ + \ control system allows for internal and external remote\ncontrol and scaling\ + \ of most parameters.\nThe free software for Windows7 with all necessary folders,\ + \ English and German\nmanuals, many example-Rooms and a few videos can be downloaded\ + \ at\nwww.henning-berg.de.},\n address = {Brisbane, Australia},\n author = {Henning\ + \ Berg},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1175990},\n isbn = {978-1-925455-13-7},\n\ + \ issn = {2220-4806},\n pages = {7--8},\n publisher = {Queensland Conservatorium\ + \ Griffith University},\n title = {Tango: Software for Computer-Human Improvisation},\n\ + \ track = {Demonstrations},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper00042.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813172 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1175990 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 525--530 - publisher: Birmingham City University - title: Designing Brain-computer Interfaces for Sonic Expression - url: https://www.nime.org/proceedings/2020/nime2020_paper101.pdf - year: 2020 + pages: 7--8 + publisher: Queensland Conservatorium Griffith University + title: 'Tango: Software for Computer-Human Improvisation' + track: Demonstrations + url: http://www.nime.org/proceedings/2016/nime2016_paper00042.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_102 - abstract: 'Music has previously been shown to be beneficial in improving runners - performance in treadmill based experiments. This paper evaluates a generative - music system, HEARTBEATS, designed to create biosignal synchronous music in real-time - according to an individual athlete’s heart-rate or cadence (steps per minute). - The tempo, melody, and timbral features of the generated music are modulated according - to biosensor input from each runner using a wearable Bluetooth sensor. We compare - the relative performance of athletes listening to heart-rate and cadence synchronous - music, across a randomized trial (N=57) on a trail course with 76ft of elevation. - Participants were instructed to continue until perceived effort went beyond an - 18 using the Borg rating of perceived exertion scale. We found that cadence-synchronous - music improved performance and decreased perceived effort in male runners, and - improved performance but not perceived effort in female runners, in comparison - to heart-rate synchronous music. This work has implications for the future design - and implementation of novel portable music systems and in music-assisted coaching.' - address: 'Birmingham, UK' - author: 'Williams, Duncan A.H. and Fazenda, Bruno and Williamson, Victoria J. and - Fazekas, Gyorgy' - bibtex: "@inproceedings{NIME20_102,\n abstract = {Music has previously been shown\ - \ to be beneficial in improving runners performance in treadmill based experiments.\ - \ This paper evaluates a generative music system, HEARTBEATS, designed to create\ - \ biosignal synchronous music in real-time according to an individual athlete’s\ - \ heart-rate or cadence (steps per minute). The tempo, melody, and timbral features\ - \ of the generated music are modulated according to biosensor input from each\ - \ runner using a wearable Bluetooth sensor. We compare the relative performance\ - \ of athletes listening to heart-rate and cadence synchronous music, across a\ - \ randomized trial (N=57) on a trail course with 76ft of elevation. Participants\ - \ were instructed to continue until perceived effort went beyond an 18 using the\ - \ Borg rating of perceived exertion scale. We found that cadence-synchronous music\ - \ improved performance and decreased perceived effort in male runners, and improved\ - \ performance but not perceived effort in female runners, in comparison to heart-rate\ - \ synchronous music. This work has implications for the future design and implementation\ - \ of novel portable music systems and in music-assisted coaching.},\n address\ - \ = {Birmingham, UK},\n author = {Williams, Duncan A.H. and Fazenda, Bruno and\ - \ Williamson, Victoria J. and Fazekas, Gyorgy},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.4813174},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {531--536},\n publisher = {Birmingham\ - \ City University},\n title = {Biophysiologically synchronous computer generated\ - \ music improves performance and reduces perceived effort in trail runners},\n\ - \ url = {https://www.nime.org/proceedings/2020/nime2020_paper102.pdf},\n year\ - \ = {2020}\n}\n" + ID: McPherson2016 + abstract: "The importance of low and consistent latency in interactive music\nsystems\ + \ is well-established. So how do commonly-used tools for creating digital\nmusical\ + \ instruments and other tangible interfaces perform in terms of latency\nfrom\ + \ user action to sound output? This paper examines several common\nconfigurations\ + \ where a microcontroller (e.g. Arduino) or wireless device\ncommunicates with\ + \ computer-based sound generator (e.g. Max/MSP, Pd). We find\nthat, perhaps surprisingly,\ + \ almost none of the tested configurations meet\ngenerally-accepted guidelines\ + \ for latency and jitter. To address this limitation,\nthe paper presents a new\ + \ embedded platform, Bela, which is capable of complex\naudio and sensor processing\ + \ at submillisecond latency." + address: 'Brisbane, Australia' + author: Andrew McPherson and Robert Jack and Giulio Moro + bibtex: "@inproceedings{McPherson2016,\n abstract = {The importance of low and consistent\ + \ latency in interactive music\nsystems is well-established. So how do commonly-used\ + \ tools for creating digital\nmusical instruments and other tangible interfaces\ + \ perform in terms of latency\nfrom user action to sound output? This paper examines\ + \ several common\nconfigurations where a microcontroller (e.g. Arduino) or wireless\ + \ device\ncommunicates with computer-based sound generator (e.g. Max/MSP, Pd).\ + \ We find\nthat, perhaps surprisingly, almost none of the tested configurations\ + \ meet\ngenerally-accepted guidelines for latency and jitter. To address this\ + \ limitation,\nthe paper presents a new embedded platform, Bela, which is capable\ + \ of complex\naudio and sensor processing at submillisecond latency.},\n address\ + \ = {Brisbane, Australia},\n author = {Andrew McPherson and Robert Jack and Giulio\ + \ Moro},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.3964611},\n isbn = {978-1-925455-13-7},\n\ + \ issn = {2220-4806},\n pages = {20--25},\n publisher = {Queensland Conservatorium\ + \ Griffith University},\n title = {Action-Sound Latency: Are Our Tools Fast Enough?},\n\ + \ track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0005.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813174 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.3964611 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 531--536 - publisher: Birmingham City University - title: Biophysiologically synchronous computer generated music improves performance - and reduces perceived effort in trail runners - url: https://www.nime.org/proceedings/2020/nime2020_paper102.pdf - year: 2020 + pages: 20--25 + publisher: Queensland Conservatorium Griffith University + title: 'Action-Sound Latency: Are Our Tools Fast Enough?' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0005.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_103 - abstract: 'Audio content-based processing has become a pervasive methodology for - techno-fluent musicians. System architectures typically create thumbnail audio - descriptions, based on signal processing methods, to visualize, retrieve and transform - musical audio efficiently. Towards enhanced usability of these descriptor-based - frameworks for the music community, the paper advances a minimal content-based - audio description scheme, rooted on primary musical notation attributes at the - threefold sound object, meso and macro hierarchies. Multiple perceptually-guided - viewpoints from rhythmic, harmonic, timbral and dynamic attributes define a discrete - and finite alphabet with minimal formal and subjective assumptions using unsupervised - and user-guided methods. The Factor Oracle automaton is then adopted to model - and visualize temporal morphology. The generative musical applications enabled - by the descriptor-based framework at multiple structural hierarchies are discussed.' - address: 'Birmingham, UK' - author: 'Bernardes, Gilberto and Bernardes, Gilberto' - bibtex: "@inproceedings{NIME20_103,\n abstract = {Audio content-based processing\ - \ has become a pervasive methodology for techno-fluent musicians. System architectures\ - \ typically create thumbnail audio descriptions, based on signal processing methods,\ - \ to visualize, retrieve and transform musical audio efficiently. Towards enhanced\ - \ usability of these descriptor-based frameworks for the music community, the\ - \ paper advances a minimal content-based audio description scheme, rooted on primary\ - \ musical notation attributes at the threefold sound object, meso and macro hierarchies.\ - \ Multiple perceptually-guided viewpoints from rhythmic, harmonic, timbral and\ - \ dynamic attributes define a discrete and finite alphabet with minimal formal\ - \ and subjective assumptions using unsupervised and user-guided methods. The Factor\ - \ Oracle automaton is then adopted to model and visualize temporal morphology.\ - \ The generative musical applications enabled by the descriptor-based framework\ - \ at multiple structural hierarchies are discussed.},\n address = {Birmingham,\ - \ UK},\n author = {Bernardes, Gilberto and Bernardes, Gilberto},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.4813176},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {537--542},\n presentation-video\ - \ = {https://youtu.be/zEg9Cpir8zA},\n publisher = {Birmingham City University},\n\ - \ title = {Interfacing Sounds: Hierarchical Audio-Content Morphologies for Creative\ - \ Re-purposing in earGram 2.0},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper103.pdf},\n\ - \ year = {2020}\n}\n" + ID: Berdahl2016a + abstract: "Virtual ``slack'' strings are designed for and employed by the\nLaptop\ + \ Orchestra of Louisiana. These virtual strings are ``slack'' in the sense\nthat\ + \ they can be very easily displaced, bent, tugged upon, etc. This enables\nforce-feedback\ + \ control of widely ranging pitch glides, by as much as an octave or\nmore, simply\ + \ by bending the virtual string. To realize a slack string design, a\nvirtual\ + \ spring with a specific nonlinear characteristic curve is designed.\nViolin,\ + \ viola, and cello-scale models are tuned and employed by the Laptop\nOrchestra\ + \ of Louisiana in Quartet for Strings." + address: 'Brisbane, Australia' + author: Edgar Berdahl and Andrew Pfalz and Stephen David Beck + bibtex: "@inproceedings{Berdahl2016a,\n abstract = {Virtual ``slack'' strings are\ + \ designed for and employed by the\nLaptop Orchestra of Louisiana. These virtual\ + \ strings are ``slack'' in the sense\nthat they can be very easily displaced,\ + \ bent, tugged upon, etc. This enables\nforce-feedback control of widely ranging\ + \ pitch glides, by as much as an octave or\nmore, simply by bending the virtual\ + \ string. To realize a slack string design, a\nvirtual spring with a specific\ + \ nonlinear characteristic curve is designed.\nViolin, viola, and cello-scale\ + \ models are tuned and employed by the Laptop\nOrchestra of Louisiana in Quartet\ + \ for Strings.},\n address = {Brisbane, Australia},\n author = {Edgar Berdahl\ + \ and Andrew Pfalz and Stephen David Beck},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1175988},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ + \ pages = {9--10},\n publisher = {Queensland Conservatorium Griffith University},\n\ + \ title = {Very Slack Strings: A Physical Model and Its Use in the Composition\ + \ Quartet for Strings},\n track = {Demonstrations},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper00052.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813176 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1175988 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 537--542 - presentation-video: https://youtu.be/zEg9Cpir8zA - publisher: Birmingham City University - title: 'Interfacing Sounds: Hierarchical Audio-Content Morphologies for Creative - Re-purposing in earGram 2.0' - url: https://www.nime.org/proceedings/2020/nime2020_paper103.pdf - year: 2020 + pages: 9--10 + publisher: Queensland Conservatorium Griffith University + title: 'Very Slack Strings: A Physical Model and Its Use in the Composition Quartet + for Strings' + track: Demonstrations + url: http://www.nime.org/proceedings/2016/nime2016_paper00052.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_104 - abstract: 'For a long time, magnetic tape has been commonly utilized as one of physical - media for recording and playing music. In this research, we propose a novel interactive - musical instrument called ParaSampling that utilizes the technology of magnetic - sound recording, and a improvisational sound playing method based on the instrument. - While a conventional cassette tape player has a single tapehead, which rigidly - placed, our instrument utilizes multiple handheld tapehead modules as an interface. - Players can hold the interfaces and press them against the rotating magnetic tape - at an any point to record or reproduce sounds The player can also easily erase - and rewrite the sound recorded on the tape. With this instrument, they can achieve - improvised and unique musical expressions through tangible and spatial interactions. - In this paper, we describe the system design of ParaSampling, the implementation - of the prototype system, and discuss music expressions enabled by the system.' - address: 'Birmingham, UK' - author: 'Han, Joung Min and Kakehi, Yasuaki' - bibtex: "@inproceedings{NIME20_104,\n abstract = {For a long time, magnetic tape\ - \ has been commonly utilized as one of physical media for recording and playing\ - \ music. In this research, we propose a novel interactive musical instrument called\ - \ ParaSampling that utilizes the technology of magnetic sound recording, and a\ - \ improvisational sound playing method based on the instrument. While a conventional\ - \ cassette tape player has a single tapehead, which rigidly placed, our instrument\ - \ utilizes multiple handheld tapehead modules as an interface. Players can hold\ - \ the interfaces and press them against the rotating magnetic tape at an any point\ - \ to record or reproduce sounds The player can also easily erase and rewrite the\ - \ sound recorded on the tape. With this instrument, they can achieve improvised\ - \ and unique musical expressions through tangible and spatial interactions. In\ - \ this paper, we describe the system design of ParaSampling, the implementation\ - \ of the prototype system, and discuss music expressions enabled by the system.},\n\ - \ address = {Birmingham, UK},\n author = {Han, Joung Min and Kakehi, Yasuaki},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.4813178},\n editor = {Romain Michon\ - \ and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages =\ - \ {543--544},\n publisher = {Birmingham City University},\n title = {ParaSampling:\ - \ A Musical Instrument with Handheld Tapehead Interfaces for Impromptu Recording\ - \ and Playing on a Magnetic Tape},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper104.pdf},\n\ - \ year = {2020}\n}\n" + ID: Oda2016 + abstract: "At a time in the near future, many computers (including devices\nsuch\ + \ as smart-phones) will have system clocks that are synchronized to a high\ndegree\ + \ (less than 1 ms of error). This will enable us to coordinate events across\n\ + unconnected devices with a degree of accuracy that was previously impossible.\ + \ In\nparticular, high clock synchronization means that we can use these clocks\ + \ to\nsynchronize tempo between humans or sequencers with little-to-no communication\n\ + between the devices. To facilitate this low-overhead tempo synchronization, we\n\ + propose the Global Metronome, which is a simple, computationally cheap method\ + \ to\nobtain absolute tempo synchronization. We present experimental results\n\ + demonstrating the effectiveness of using the Global Metronome and compare the\n\ + performance to MIDI clock sync, a common synchronization method. Finally, we\n\ + present an open source implementation of a Global Metronome server using a\nGPS-connected\ + \ Raspberry Pi that can be built for under $100." + address: 'Brisbane, Australia' + author: Reid Oda and Rebecca Fiebrink + bibtex: "@inproceedings{Oda2016,\n abstract = {At a time in the near future, many\ + \ computers (including devices\nsuch as smart-phones) will have system clocks\ + \ that are synchronized to a high\ndegree (less than 1 ms of error). This will\ + \ enable us to coordinate events across\nunconnected devices with a degree of\ + \ accuracy that was previously impossible. In\nparticular, high clock synchronization\ + \ means that we can use these clocks to\nsynchronize tempo between humans or sequencers\ + \ with little-to-no communication\nbetween the devices. To facilitate this low-overhead\ + \ tempo synchronization, we\npropose the Global Metronome, which is a simple,\ + \ computationally cheap method to\nobtain absolute tempo synchronization. We present\ + \ experimental results\ndemonstrating the effectiveness of using the Global Metronome\ + \ and compare the\nperformance to MIDI clock sync, a common synchronization method.\ + \ Finally, we\npresent an open source implementation of a Global Metronome server\ + \ using a\nGPS-connected Raspberry Pi that can be built for under $100.},\n address\ + \ = {Brisbane, Australia},\n author = {Reid Oda and Rebecca Fiebrink},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1176096},\n isbn = {978-1-925455-13-7},\n\ + \ issn = {2220-4806},\n pages = {26--31},\n publisher = {Queensland Conservatorium\ + \ Griffith University},\n title = {The Global Metronome: Absolute Tempo Sync For\ + \ Networked Musical Performance},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0006.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813178 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176096 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 543--544 - publisher: Birmingham City University - title: 'ParaSampling: A Musical Instrument with Handheld Tapehead Interfaces for - Impromptu Recording and Playing on a Magnetic Tape' - url: https://www.nime.org/proceedings/2020/nime2020_paper104.pdf - year: 2020 + pages: 26--31 + publisher: Queensland Conservatorium Griffith University + title: 'The Global Metronome: Absolute Tempo Sync For Networked Musical Performance' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0006.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_105 - abstract: 'A variety of controllers with multifarious sensors and functions have - maximized the real time performers control capabilities. The idea behind this - project was to create an interface which enables the interaction between the performers - and the effect processor measuring their brain waves amplitudes, e.g., alpha, - beta, theta, delta and gamma, not necessarily with the user’s awareness. We achieved - this by using an electroencephalography (EEG) sensor for detecting performer’s - different emotional states and, based on these, sending midi messages for digital - processing units automation. The aim is to create a new generation of digital - processor units that could be automatically configured in real-time given the - emotions or thoughts of the performer or the audience. By introducing emotional - state information in the real time control of several aspects of artistic expression, - we highlight the impact of surprise and uniqueness in the artistic performance.' - address: 'Birmingham, UK' - author: 'Filandrianos, Giorgos and Kotsani, Natalia and Dervakos, Edmund G and Stamou, - Giorgos and Amprazis, Vaios and Kiourtzoglou, Panagiotis' - bibtex: "@inproceedings{NIME20_105,\n abstract = {A variety of controllers with\ - \ multifarious sensors and functions have maximized the real time performers control\ - \ capabilities. The idea behind this project was to create an interface which\ - \ enables the interaction between the performers and the effect processor measuring\ - \ their brain waves amplitudes, e.g., alpha, beta, theta, delta and gamma, not\ - \ necessarily with the user’s awareness. We achieved this by using an electroencephalography\ - \ (EEG) sensor for detecting performer’s different emotional states and, based\ - \ on these, sending midi messages for digital processing units automation. The\ - \ aim is to create a new generation of digital processor units that could be automatically\ - \ configured in real-time given the emotions or thoughts of the performer or the\ - \ audience. By introducing emotional state information in the real time control\ - \ of several aspects of artistic expression, we highlight the impact of surprise\ - \ and uniqueness in the artistic performance.},\n address = {Birmingham, UK},\n\ - \ author = {Filandrianos, Giorgos and Kotsani, Natalia and Dervakos, Edmund G\ - \ and Stamou, Giorgos and Amprazis, Vaios and Kiourtzoglou, Panagiotis},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.4813180},\n editor = {Romain Michon and\ - \ Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages = {545--546},\n\ - \ publisher = {Birmingham City University},\n title = {Brainwaves-driven Effects\ - \ Automation in Musical Performance},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper105.pdf},\n\ - \ year = {2020}\n}\n" + ID: Smallwood2016 + abstract: "This paper describes the development, creation, and deployment of\na\ + \ sound installation entitled Coronium 3500 (Lucie's Halo), commissioned by\n\ + the Caramoor Center for Music and the Arts. The piece, a 12-channel immersive\n\ + sound installation driven by solar power, was exhibited as part of the exhibition\n\ + In the Garden of Sonic Delights from June 7 to Nov. 4, 2014, and again for\nsimilar\ + \ duration in 2015. Herein I describe the aesthetic and technical details\nof\ + \ the piece and its ultimate deployment, as well as reflecting on the results\n\ + and the implications for future work." + address: 'Brisbane, Australia' + author: Scott Smallwood + bibtex: "@inproceedings{Smallwood2016,\n abstract = {This paper describes the development,\ + \ creation, and deployment of\na sound installation entitled Coronium 3500 (Lucie's\ + \ Halo), commissioned by\nthe Caramoor Center for Music and the Arts. The piece,\ + \ a 12-channel immersive\nsound installation driven by solar power, was exhibited\ + \ as part of the exhibition\nIn the Garden of Sonic Delights from June 7 to Nov.\ + \ 4, 2014, and again for\nsimilar duration in 2015. Herein I describe the aesthetic\ + \ and technical details\nof the piece and its ultimate deployment, as well as\ + \ reflecting on the results\nand the implications for future work.},\n address\ + \ = {Brisbane, Australia},\n author = {Scott Smallwood},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176127},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ + \ pages = {32--35},\n publisher = {Queensland Conservatorium Griffith University},\n\ + \ title = {Coronium 3500: A Solarsonic Installation for Caramoor},\n track = {Papers},\n\ + \ url = {http://www.nime.org/proceedings/2016/nime2016_paper0007.pdf},\n year\ + \ = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813180 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176127 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 545--546 - publisher: Birmingham City University - title: Brainwaves-driven Effects Automation in Musical Performance - url: https://www.nime.org/proceedings/2020/nime2020_paper105.pdf - year: 2020 + pages: 32--35 + publisher: Queensland Conservatorium Griffith University + title: 'Coronium 3500: A Solarsonic Installation for Caramoor' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0007.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_106 - abstract: 'This article focuses on the rich potential of hybrid domain translation - of modular synthesis (MS) into virtual reality (VR). It asks: to what extent can - what is valued in studio-based MS practice find a natural home or rich new interpretations - in the immersive capacities of VR? The article attends particularly to the relative - affordances and constraints of each as they inform the design and development - of a new system ("Mischmasch") supporting collaborative and performative patching - of Max gen~ patches and operators within a shared room-scale VR space.' - address: 'Birmingham, UK' - author: 'Wakefield, Graham and Palumbo, Michael and Zonta, Alexander' - bibtex: "@inproceedings{NIME20_106,\n abstract = {This article focuses on the rich\ - \ potential of hybrid domain translation of modular synthesis (MS) into virtual\ - \ reality (VR). It asks: to what extent can what is valued in studio-based MS\ - \ practice find a natural home or rich new interpretations in the immersive capacities\ - \ of VR? The article attends particularly to the relative affordances and constraints\ - \ of each as they inform the design and development of a new system (\"Mischmasch\"\ - ) supporting collaborative and performative patching of Max gen~ patches and operators\ - \ within a shared room-scale VR space.},\n address = {Birmingham, UK},\n author\ - \ = {Wakefield, Graham and Palumbo, Michael and Zonta, Alexander},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.4813182},\n editor = {Romain Michon and\ - \ Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages = {547--550},\n\ - \ publisher = {Birmingham City University},\n title = {Affordances and Constraints\ - \ of Modular Synthesis in Virtual Reality},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper106.pdf},\n\ - \ year = {2020}\n}\n" + ID: Laurenzo2016 + abstract: |- + In the period between June 2014 and June 2015, at least 5,500 + immigrants died trying to reach Europe from Africa while crossing the + Mediterranean Sea. + In this paper we present 5500, a piano performance that is a part of an on-going + project that investigates the incorporation of electrical muscle stimulation + (EMS) into musical performances, with a particular interest in the political + significance of the negotiation of control that arises. + 5500 consists of a performance of Beethoven's Sonata Pathétique, where the + pianist's execution is disrupted using computer-controlled electrodes + which stimulate the muscles in his or her arms causing their involuntary + contractions and affecting the final musical result. + address: 'Brisbane, Australia' + author: Tomas Laurenzo + bibtex: "@inproceedings{Laurenzo2016,\n abstract = {In the period between June 2014\ + \ and June 2015, at least 5,500\nimmigrants died trying to reach Europe from Africa\ + \ while crossing the\nMediterranean Sea.\nIn this paper we present 5500, a piano\ + \ performance that is a part of an on-going\nproject that investigates the incorporation\ + \ of electrical muscle stimulation\n(EMS) into musical performances, with a particular\ + \ interest in the political\nsignificance of the negotiation of control that arises.\n\ + 5500 consists of a performance of Beethoven's Sonata Path\\'{e}tique, where the\n\ + pianist's execution is disrupted using computer-controlled electrodes\nwhich stimulate\ + \ the muscles in his or her arms causing their involuntary\ncontractions and affecting\ + \ the final musical result.},\n address = {Brisbane, Australia},\n author = {Tomas\ + \ Laurenzo},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176058},\n isbn\ + \ = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {36--40},\n publisher\ + \ = {Queensland Conservatorium Griffith University},\n title = {5500: performance,\ + \ control, and politics},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0008.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813182 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176058 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 547--550 - publisher: Birmingham City University - title: Affordances and Constraints of Modular Synthesis in Virtual Reality - url: https://www.nime.org/proceedings/2020/nime2020_paper106.pdf - year: 2020 + pages: 36--40 + publisher: Queensland Conservatorium Griffith University + title: '5500: performance, control, and politics' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0008.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_107 - abstract: 'Focusing on interactive performance works borne out of dancer-musician - collaborations, this paper investigates the relationship between the mediums of - sound and movement through a conceptual interpretation of the biological phenomenon - of symbiosis. Describing the close and persistent interactions between organisms - of different species, symbioses manifest across a spectrum of relationship types, - each identified according to the health effect experienced by the engaged organisms. - This biological taxonomy is appropriated within a framework which identifies specific - modes of interaction between sound and movement according to the collaborating - practitioners’ intended outcome, and required provisions, cognition of affect, - and system operation. Using the symbiotic framework as an analytical tool, six - dancer-musician collaborations from the field of NIME are examined in respect - to the employed modes of interaction within each of the four examined areas. The - findings reveal the emergence of multiple modes in each work, as well as examples - of mutation between different modes over the course of a performance. Furthermore, - the symbiotic concept provides a novel understanding of the ways gesture recognition - technologies (GRTs) have redefined the relationship dynamics between dancers and - musicians, and suggests a more efficient and inclusive approach in communicating - the potential and limitations presented by Human-Computer Interaction tools.' - address: 'Birmingham, UK' - author: 'moraitis, emmanouil' - bibtex: "@inproceedings{NIME20_107,\n abstract = {Focusing on interactive performance\ - \ works borne out of dancer-musician collaborations, this paper investigates the\ - \ relationship between the mediums of sound and movement through a conceptual\ - \ interpretation of the biological phenomenon of symbiosis. Describing the close\ - \ and persistent interactions between organisms of different species, symbioses\ - \ manifest across a spectrum of relationship types, each identified according\ - \ to the health effect experienced by the engaged organisms. This biological taxonomy\ - \ is appropriated within a framework which identifies specific modes of interaction\ - \ between sound and movement according to the collaborating practitioners’ intended\ - \ outcome, and required provisions, cognition of affect, and system operation.\ - \ Using the symbiotic framework as an analytical tool, six dancer-musician collaborations\ - \ from the field of NIME are examined in respect to the employed modes of interaction\ - \ within each of the four examined areas. The findings reveal the emergence of\ - \ multiple modes in each work, as well as examples of mutation between different\ - \ modes over the course of a performance. Furthermore, the symbiotic concept provides\ - \ a novel understanding of the ways gesture recognition technologies (GRTs) have\ - \ redefined the relationship dynamics between dancers and musicians, and suggests\ - \ a more efficient and inclusive approach in communicating the potential and limitations\ - \ presented by Human-Computer Interaction tools.},\n address = {Birmingham, UK},\n\ - \ author = {moraitis, emmanouil},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813184},\n\ - \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ - \ = {July},\n pages = {551--556},\n presentation-video = {https://youtu.be/5X6F_nL8SOg},\n\ - \ publisher = {Birmingham City University},\n title = {Symbiosis: a biological\ - \ taxonomy for modes of interaction in dance-music collaborations},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper107.pdf},\n\ - \ year = {2020}\n}\n" + ID: Johnson2016 + abstract: "This paper provides an overview of a new mechatronic loudspeaker\nsystem:\ + \ speaker.motion. The system affords automated positioning of a loudspeaker\n\ + in real-time in order to manipulate the spatial qualities of electronic music.\n\ + The paper gives a technical overview of how the system's hardware and\nsoftware\ + \ were developed and the design criteria and methodology. There is\ndiscussion\ + \ of the unique features of the speaker.motion spatialisation system and\nthe\ + \ methods of user interaction, as well as a look at the creative possibilities\n\ + that the loudspeakers afford. The creative affordances are explored through the\n\ + case study of two new pieces written for the speaker.motion system. It is hoped\n\ + that the speaker.motion system will afford composers and performers with a new\n\ + range of spatial aesthetics to use in spatial performances, and encourage\nexploration\ + \ of the acoustic properties of physical performance and installation\nspaces\ + \ in electronic music." + address: 'Brisbane, Australia' + author: Bridget Johnson and Michael Norris and Ajay Kapur + bibtex: "@inproceedings{Johnson2016,\n abstract = {This paper provides an overview\ + \ of a new mechatronic loudspeaker\nsystem: speaker.motion. The system affords\ + \ automated positioning of a loudspeaker\nin real-time in order to manipulate\ + \ the spatial qualities of electronic music.\nThe paper gives a technical overview\ + \ of how the system's hardware and\nsoftware were developed and the design criteria\ + \ and methodology. There is\ndiscussion of the unique features of the speaker.motion\ + \ spatialisation system and\nthe methods of user interaction, as well as a look\ + \ at the creative possibilities\nthat the loudspeakers afford. The creative affordances\ + \ are explored through the\ncase study of two new pieces written for the speaker.motion\ + \ system. It is hoped\nthat the speaker.motion system will afford composers and\ + \ performers with a new\nrange of spatial aesthetics to use in spatial performances,\ + \ and encourage\nexploration of the acoustic properties of physical performance\ + \ and installation\nspaces in electronic music.},\n address = {Brisbane, Australia},\n\ + \ author = {Bridget Johnson and Michael Norris and Ajay Kapur},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176046},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ + \ pages = {41--45},\n publisher = {Queensland Conservatorium Griffith University},\n\ + \ title = {speaker.motion: A Mechatronic Loudspeaker System for Live Spatialisation},\n\ + \ track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0009.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813184 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176046 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 551--556 - presentation-video: https://youtu.be/5X6F_nL8SOg - publisher: Birmingham City University - title: 'Symbiosis: a biological taxonomy for modes of interaction in dance-music - collaborations' - url: https://www.nime.org/proceedings/2020/nime2020_paper107.pdf - year: 2020 + pages: 41--45 + publisher: Queensland Conservatorium Griffith University + title: 'speaker.motion: A Mechatronic Loudspeaker System for Live Spatialisation' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0009.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_108 - abstract: 'We present Olly, a musical textile tangible user interface (TUI) designed - around the observations of a group of five children with autism who like music. - The intention is to support scaffolding social interactions and sensory regulation - during a semi-structured and open-ended playful activity. Olly was tested in the - dance studio of a special education needs (SEN) school in North-East London, UK, - for a period of 5 weeks, every Thursday afternoon for 30 minutes. Olly uses one - Bare touch board in midi mode and four stretch analog sensors embedded inside - four elastic ribbons. These ribbons top the main body of the installation which - is made by using an inflatable gym ball wrapped in felt. Each of the ribbons plays - a different instrument and triggers different harmonic chords. Olly allows to - play pleasant melodies if interacting with it in solo mode and more complex harmonies - when playing together with others. Results show great potentials for carefully - designed musical TUI implementation aimed at scaffolding social play while affording - self-regulation in SEN contexts. We present a brief introduction on the background - and motivations, design considerations and results.' - address: 'Birmingham, UK' - author: 'Nonnis, Antonella and Bryan-Kinns, Nick' - bibtex: "@inproceedings{NIME20_108,\n abstract = {We present Olly, a musical textile\ - \ tangible user interface (TUI) designed around the observations of a group of\ - \ five children with autism who like music. The intention is to support scaffolding\ - \ social interactions and sensory regulation during a semi-structured and open-ended\ - \ playful activity. Olly was tested in the dance studio of a special education\ - \ needs (SEN) school in North-East London, UK, for a period of 5 weeks, every\ - \ Thursday afternoon for 30 minutes. Olly uses one Bare touch board in midi mode\ - \ and four stretch analog sensors embedded inside four elastic ribbons. These\ - \ ribbons top the main body of the installation which is made by using an inflatable\ - \ gym ball wrapped in felt. Each of the ribbons plays a different instrument and\ - \ triggers different harmonic chords. Olly allows to play pleasant melodies if\ - \ interacting with it in solo mode and more complex harmonies when playing together\ - \ with others. Results show great potentials for carefully designed musical TUI\ - \ implementation aimed at scaffolding social play while affording self-regulation\ - \ in SEN contexts. We present a brief introduction on the background and motivations,\ - \ design considerations and results.},\n address = {Birmingham, UK},\n author\ - \ = {Nonnis, Antonella and Bryan-Kinns, Nick},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.4813186},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {557--558},\n publisher = {Birmingham\ - \ City University},\n title = {Όλοι: music making to scaffold social playful activities\ - \ and self-regulation},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper108.pdf},\n\ - \ year = {2020}\n}\n" + ID: Nort2016 + abstract: |- + This paper presents our work towards a database of performance + activity that is grounded in an embodied view on meaning creation that crosses + sense modalities. Our system design is informed by the philosophical and + aesthestic intentions of the laboratory context within which it is designed, + focused on distribution of performance activity across temporal and spatial + dimensions, and expanded notions of the instrumental system as environmental + performative agent. We focus here on design decisions that result from this + overarching worldview on digitally-mediated performance. + address: 'Brisbane, Australia' + author: Doug Van Nort and Ian Jarvis and Michael Palumbo + bibtex: "@inproceedings{Nort2016,\n abstract = {This paper presents our work towards\ + \ a database of performance\nactivity that is grounded in an embodied view on\ + \ meaning creation that crosses\nsense modalities. Our system design is informed\ + \ by the philosophical and\naesthestic intentions of the laboratory context within\ + \ which it is designed,\nfocused on distribution of performance activity across\ + \ temporal and spatial\ndimensions, and expanded notions of the instrumental system\ + \ as environmental\nperformative agent. We focus here on design decisions that\ + \ result from this\noverarching worldview on digitally-mediated performance.},\n\ + \ address = {Brisbane, Australia},\n author = {Doug Van Nort and Ian Jarvis and\ + \ Michael Palumbo},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176092},\n\ + \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {46--50},\n publisher\ + \ = {Queensland Conservatorium Griffith University},\n title = {Towards a Mappable\ + \ Database of Emergent Gestural Meaning},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0010.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813186 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176092 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 557--558 - publisher: Birmingham City University - title: 'Όλοι: music making to scaffold social playful activities and self-regulation' - url: https://www.nime.org/proceedings/2020/nime2020_paper108.pdf - year: 2020 + pages: 46--50 + publisher: Queensland Conservatorium Griffith University + title: Towards a Mappable Database of Emergent Gestural Meaning + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0010.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_109 - abstract: 'Identity is inextricably linked to culture and sustained through creation - and performance of music and dance, yet discussion of agency and cultural tools - informing design and performance application of gestural controllers is not widely - discussed. The purpose of this paper is to discuss the cultural body, its consideration - in existing gestural controller design, and how cultural design methods have the - potential to extend musical/social identities and/or traditions within a technological - context. In an effort to connect and reconnect with the author’s personal Nikkei - heritage, this paper will discuss the design of Nami – a custom built gestural - controller and its applicability to extend the author’s cultural body through - a community-centric case study performance.' - address: 'Birmingham, UK' - author: 'Sithi-Amnuai, Sara' - bibtex: "@inproceedings{NIME20_109,\n abstract = {Identity is inextricably linked\ - \ to culture and sustained through creation and performance of music and dance,\ - \ yet discussion of agency and cultural tools informing design and performance\ - \ application of gestural controllers is not widely discussed. The purpose of\ - \ this paper is to discuss the cultural body, its consideration in existing gestural\ - \ controller design, and how cultural design methods have the potential to extend\ - \ musical/social identities and/or traditions within a technological context.\ - \ In an effort to connect and reconnect with the author’s personal Nikkei heritage,\ - \ this paper will discuss the design of Nami – a custom built gestural controller\ - \ and its applicability to extend the author’s cultural body through a community-centric\ - \ case study performance.},\n address = {Birmingham, UK},\n author = {Sithi-Amnuai,\ - \ Sara},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.4813188},\n editor = {Romain\ - \ Michon and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages\ - \ = {559--563},\n presentation-video = {https://youtu.be/QCUGtE_z1LE},\n publisher\ - \ = {Birmingham City University},\n title = {Exploring Identity Through Design:\ - \ A Focus on the Cultural Body Via Nami},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper109.pdf},\n\ - \ year = {2020}\n}\n" + ID: Long2016a + abstract: "The majority of musical robotics performances, projects and\ninstallations\ + \ utilise microcontroller hardware to digitally interface the robotic\ninstruments\ + \ with sequencer software and other musical controllers, often via a\npersonal\ + \ computer. While in many ways digital interfacing offers considerable\npower\ + \ and flexibility, digital protocols, equipment and audio workstations often\n\ + tend to suggest particular music-making work-flows and have resolution and timing\n\ + limitations. This paper describes the creation of a hardware interface that\n\ + allows direct communication between analogue synthesizer equipment and simple\n\ + robotic musical instruments entirely in the analogue domain without the use of\n\ + computers, microcontrollers or software of any kind. Several newly created\nmusical\ + \ robots of various designs are presented, together with a custom built\nhardware\ + \ interface with circuitry that enables analogue synthesizers to interface\nwith\ + \ the robots without any digital intermediary. This enables novel methods of\n\ + musical expression, creates new music-making work-flows for composing and\nimprovising\ + \ with musical robots and takes advantage of the low latency and\ninfinite resolution\ + \ of analogue circuits." + address: 'Brisbane, Australia' + author: Jason Long and Ajay Kapur and Dale Carnegie + bibtex: "@inproceedings{Long2016a,\n abstract = {The majority of musical robotics\ + \ performances, projects and\ninstallations utilise microcontroller hardware to\ + \ digitally interface the robotic\ninstruments with sequencer software and other\ + \ musical controllers, often via a\npersonal computer. While in many ways digital\ + \ interfacing offers considerable\npower and flexibility, digital protocols, equipment\ + \ and audio workstations often\ntend to suggest particular music-making work-flows\ + \ and have resolution and timing\nlimitations. This paper describes the creation\ + \ of a hardware interface that\nallows direct communication between analogue synthesizer\ + \ equipment and simple\nrobotic musical instruments entirely in the analogue domain\ + \ without the use of\ncomputers, microcontrollers or software of any kind. Several\ + \ newly created\nmusical robots of various designs are presented, together with\ + \ a custom built\nhardware interface with circuitry that enables analogue synthesizers\ + \ to interface\nwith the robots without any digital intermediary. This enables\ + \ novel methods of\nmusical expression, creates new music-making work-flows for\ + \ composing and\nimprovising with musical robots and takes advantage of the low\ + \ latency and\ninfinite resolution of analogue circuits.},\n address = {Brisbane,\ + \ Australia},\n author = {Jason Long and Ajay Kapur and Dale Carnegie},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1176072},\n isbn = {978-1-925455-13-7},\n\ + \ issn = {2220-4806},\n pages = {51--54},\n publisher = {Queensland Conservatorium\ + \ Griffith University},\n title = {An Analogue Interface for Musical Robots},\n\ + \ track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0011.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813188 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176072 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 559--563 - presentation-video: https://youtu.be/QCUGtE_z1LE - publisher: Birmingham City University - title: 'Exploring Identity Through Design: A Focus on the Cultural Body Via Nami' - url: https://www.nime.org/proceedings/2020/nime2020_paper109.pdf - year: 2020 + pages: 51--54 + publisher: Queensland Conservatorium Griffith University + title: An Analogue Interface for Musical Robots + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0011.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_11 - abstract: 'With the development of web audio standards, it has quickly become technically - easy to develop and deploy software for inviting audiences to participate in musical - performances using their mobile phones. Thus, a new audience-centric musical genre - has emerged, which aligns with artistic manifestations where there is an explicit - inclusion of the public (e.g. participatory art, cinema or theatre). Previous - research has focused on analysing this new genre from historical, social organisation - and technical perspectives. This follow-up paper contributes with reflections - on technical and aesthetic aspects of composing within this audience-centric approach. - We propose a set of 13 composition dimensions that deal with the role of the performer, - the role of the audience, the location of sound and the type of feedback, among - others. From a reflective approach, four participatory pieces developed by the - authors are analysed using the proposed dimensions. Finally, we discuss a set - of recommendations and challenges for the composers-developers of this new and - promising musical genre. This paper concludes discussing the implications of this - research for the NIME community.' - address: 'Birmingham, UK' - author: 'Xambó, Anna and Roma, Gerard' - bibtex: "@inproceedings{NIME20_11,\n abstract = {With the development of web audio\ - \ standards, it has quickly become technically easy to develop and deploy software\ - \ for inviting audiences to participate in musical performances using their mobile\ - \ phones. Thus, a new audience-centric musical genre has emerged, which aligns\ - \ with artistic manifestations where there is an explicit inclusion of the public\ - \ (e.g. participatory art, cinema or theatre). Previous research has focused on\ - \ analysing this new genre from historical, social organisation and technical\ - \ perspectives. This follow-up paper contributes with reflections on technical\ - \ and aesthetic aspects of composing within this audience-centric approach. We\ - \ propose a set of 13 composition dimensions that deal with the role of the performer,\ - \ the role of the audience, the location of sound and the type of feedback, among\ - \ others. From a reflective approach, four participatory pieces developed by the\ - \ authors are analysed using the proposed dimensions. Finally, we discuss a set\ - \ of recommendations and challenges for the composers-developers of this new and\ - \ promising musical genre. This paper concludes discussing the implications of\ - \ this research for the NIME community.},\n address = {Birmingham, UK},\n author\ - \ = {Xambó, Anna and Roma, Gerard},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813192},\n\ - \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ - \ = {July},\n pages = {55--60},\n publisher = {Birmingham City University},\n\ - \ title = {Performing Audiences: Composition Strategies for Network Music using\ - \ Mobile Phones},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper11.pdf},\n\ - \ year = {2020}\n}\n" + ID: Barrett2016 + abstract: "Despite increasingly accessible and user-friendly multi-channel\ncompositional\ + \ tools, many composers still choose stereo formats for their work,\nwhere the\ + \ compositional process is allied to diffusion performance over a\n`classical'\ + \ loudspeaker orchestra. Although such orchestras remain\ncommon within UK institutions\ + \ as well as in France, they are in decline in the\nrest of the world. In contrast,\ + \ permanent, high-density loudspeaker arrays are on\nthe rise, as is the practical\ + \ application of 3-D audio technologies. Looking to\nthe future, we need to reconcile\ + \ the performance of historical and new stereo\nworks, side-by-side native 3-D\ + \ compositions. In anticipation of this growing\nneed, we have designed and tested\ + \ a prototype `Virtualmonium'. The\nVirtualmonium is an instrument for classical\ + \ diffusion performance over an\nacousmonium emulated in higher-order Ambisonics.\ + \ It allows composers to\ncustom-design loudspeaker orchestra emulations for the\ + \ performance of their\nworks, rehearse and refine performances off-site, and\ + \ perform classical\nrepertoire alongside native 3-D formats in the same concert.\ + \ This paper describes\nthe technical design of the Virtualmonium, assesses the\ + \ success of the prototype\nin some preliminary listening tests and concerts,\ + \ and speculates how the\ninstrument can further composition and performance practice." + address: 'Brisbane, Australia' + author: Natasha Barrett and Alexander Refsum Jensenius + bibtex: "@inproceedings{Barrett2016,\n abstract = {Despite increasingly accessible\ + \ and user-friendly multi-channel\ncompositional tools, many composers still choose\ + \ stereo formats for their work,\nwhere the compositional process is allied to\ + \ diffusion performance over a\n`classical' loudspeaker orchestra. Although such\ + \ orchestras remain\ncommon within UK institutions as well as in France, they\ + \ are in decline in the\nrest of the world. In contrast, permanent, high-density\ + \ loudspeaker arrays are on\nthe rise, as is the practical application of 3-D\ + \ audio technologies. Looking to\nthe future, we need to reconcile the performance\ + \ of historical and new stereo\nworks, side-by-side native 3-D compositions. In\ + \ anticipation of this growing\nneed, we have designed and tested a prototype\ + \ `Virtualmonium'. The\nVirtualmonium is an instrument for classical diffusion\ + \ performance over an\nacousmonium emulated in higher-order Ambisonics. It allows\ + \ composers to\ncustom-design loudspeaker orchestra emulations for the performance\ + \ of their\nworks, rehearse and refine performances off-site, and perform classical\n\ + repertoire alongside native 3-D formats in the same concert. This paper describes\n\ + the technical design of the Virtualmonium, assesses the success of the prototype\n\ + in some preliminary listening tests and concerts, and speculates how the\ninstrument\ + \ can further composition and performance practice.},\n address = {Brisbane, Australia},\n\ + \ author = {Natasha Barrett and Alexander Refsum Jensenius},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1175974},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ + \ pages = {55--60},\n publisher = {Queensland Conservatorium Griffith University},\n\ + \ title = {The `Virtualmonium': an instrument for classical sound diffusion over\ + \ a virtual loudspeaker orchestra},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0012.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813192 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1175974 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July pages: 55--60 - publisher: Birmingham City University - title: 'Performing Audiences: Composition Strategies for Network Music using Mobile - Phones' - url: https://www.nime.org/proceedings/2020/nime2020_paper11.pdf - year: 2020 + publisher: Queensland Conservatorium Griffith University + title: 'The `Virtualmonium'': an instrument for classical sound diffusion over a + virtual loudspeaker orchestra' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0012.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_110 - abstract: 'This paper reflects on players'' first responses to a constrained Accessible - Digital Musical Instrument (ADMI) in open, child-led sessions with seven children - at a special school. Each player''s gestures with the instrument were sketched, - categorised and compared with those of others among the group. Additionally, sensor - data from the instruments was recorded and analysed to give a secondary indication - of playing style, based on note and silence durations. In accord with previous - studies, the high degree of constraints led to a diverse range of playing styles, - allowing each player to appropriate and explore the instruments within a short - inaugural session. The open, undirected sessions also provided insights which - could potentially direct future work based on each person''s responses to the - instruments. The paper closes with a short discussion of these diverse styles, - and the potential role constrained ADMIs could serve as ''ice-breakers'' in musical - projects that seek to co-produce or co-design with neurodiverse children and young - people.' - address: 'Birmingham, UK' - author: 'Wright, Joe' - bibtex: "@inproceedings{NIME20_110,\n abstract = {This paper reflects on players'\ - \ first responses to a constrained Accessible Digital Musical Instrument (ADMI)\ - \ in open, child-led sessions with seven children at a special school. Each player's\ - \ gestures with the instrument were sketched, categorised and compared with those\ - \ of others among the group. Additionally, sensor data from the instruments was\ - \ recorded and analysed to give a secondary indication of playing style, based\ - \ on note and silence durations. In accord with previous studies, the high degree\ - \ of constraints led to a diverse range of playing styles, allowing each player\ - \ to appropriate and explore the instruments within a short inaugural session.\ - \ The open, undirected sessions also provided insights which could potentially\ - \ direct future work based on each person's responses to the instruments. The\ - \ paper closes with a short discussion of these diverse styles, and the potential\ - \ role constrained ADMIs could serve as 'ice-breakers' in musical projects that\ - \ seek to co-produce or co-design with neurodiverse children and young people.},\n\ - \ address = {Birmingham, UK},\n author = {Wright, Joe},\n booktitle = {Proceedings\ + ID: Arango2016 + abstract: "This paper reports the goals, procedures and recent activities of\nthe\ + \ Smartphone Ensemble, an academic group of musicians and designers exploring\n\ + mobile phones social mediation in musical contexts. The SE was created in the\n\ + Design and Creation program at the Caldas University in Manizales, Colombia and\n\ + includes six regular members. The group intends to enhance links among musicians,\n\ + and between the musicians and their audience, by leveraging the network\ncapabilities\ + \ and mobility of smart phones, and exploring the expressivity of\nurban space.\ + \ Through the creation of pieces and interventions that are related to\nurban\ + \ experiences, the Smartphone Ensemble envisions alternatives to the standard\n\ + musical performance space. In this regard, the performances intend to be urban\n\ + interventions, not traditional concerts, they progress according to previously\n\ + defined tours around the city that the group embarks while playing" + address: 'Brisbane, Australia' + author: Julian Jaramillo Arango and Daniel Melàn Giraldo + bibtex: "@inproceedings{Arango2016,\n abstract = {This paper reports the goals,\ + \ procedures and recent activities of\nthe Smartphone Ensemble, an academic group\ + \ of musicians and designers exploring\nmobile phones social mediation in musical\ + \ contexts. The SE was created in the\nDesign and Creation program at the Caldas\ + \ University in Manizales, Colombia and\nincludes six regular members. The group\ + \ intends to enhance links among musicians,\nand between the musicians and their\ + \ audience, by leveraging the network\ncapabilities and mobility of smart phones,\ + \ and exploring the expressivity of\nurban space. Through the creation of pieces\ + \ and interventions that are related to\nurban experiences, the Smartphone Ensemble\ + \ envisions alternatives to the standard\nmusical performance space. In this regard,\ + \ the performances intend to be urban\ninterventions, not traditional concerts,\ + \ they progress according to previously\ndefined tours around the city that the\ + \ group embarks while playing},\n address = {Brisbane, Australia},\n author =\ + \ {Julian Jaramillo Arango and Daniel Mel\\`{a}n Giraldo},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.4813194},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {564--569},\n presentation-video\ - \ = {https://youtu.be/RhaIzCXQ3uo},\n publisher = {Birmingham City University},\n\ - \ title = {The Appropriation and Utility of Constrained ADMIs},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper110.pdf},\n\ - \ year = {2020}\n}\n" + \ doi = {10.5281/zenodo.1175850},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ + \ pages = {61--64},\n publisher = {Queensland Conservatorium Griffith University},\n\ + \ title = {The Smartphone Ensemble. Exploring mobile computer mediation in collaborative\ + \ musical performance},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0013.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813194 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1175850 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 564--569 - presentation-video: https://youtu.be/RhaIzCXQ3uo - publisher: Birmingham City University - title: The Appropriation and Utility of Constrained ADMIs - url: https://www.nime.org/proceedings/2020/nime2020_paper110.pdf - year: 2020 + pages: 61--64 + publisher: Queensland Conservatorium Griffith University + title: The Smartphone Ensemble. Exploring mobile computer mediation in collaborative + musical performance + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0013.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_111 - abstract: 'When performing with new instruments, musicians often develop new performative - gestures and playing techniques. Music performance studies on new instruments - often consider interfaces that feature a spectrum of gestures similar to already - existing sound production techniques. This paper considers the choices performers - make when creating an idiomatic gestural language for an entirely unfamiliar instrument. - We designed a musical interface with a unique large-scale layout to encourage - new performers to create fully original instrument-body interactions. We conducted - a study where trained musicians were invited to perform one of two versions of - the same instrument, each physically identical but with a different tone mapping. - The study results reveal insights into how musicians develop novel performance - gestures when encountering a new instrument characterised by an unfamiliar shape - and size. Our discussion highlights the impact of an instrument’s scale and layout - on the emergence of new gestural vocabularies and on the qualities of the music - performed.' - address: 'Birmingham, UK' - author: 'Mice, Lia and McPherson, Andrew' - bibtex: "@inproceedings{NIME20_111,\n abstract = {When performing with new instruments,\ - \ musicians often develop new performative gestures and playing techniques. Music\ - \ performance studies on new instruments often consider interfaces that feature\ - \ a spectrum of gestures similar to already existing sound production techniques.\ - \ This paper considers the choices performers make when creating an idiomatic\ - \ gestural language for an entirely unfamiliar instrument. We designed a musical\ - \ interface with a unique large-scale layout to encourage new performers to create\ - \ fully original instrument-body interactions. We conducted a study where trained\ - \ musicians were invited to perform one of two versions of the same instrument,\ - \ each physically identical but with a different tone mapping. The study results\ - \ reveal insights into how musicians develop novel performance gestures when encountering\ - \ a new instrument characterised by an unfamiliar shape and size. Our discussion\ - \ highlights the impact of an instrument’s scale and layout on the emergence of\ - \ new gestural vocabularies and on the qualities of the music performed.},\n address\ - \ = {Birmingham, UK},\n author = {Mice, Lia and McPherson, Andrew},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.4813200},\n editor = {Romain Michon and\ - \ Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages = {570--575},\n\ - \ presentation-video = {https://youtu.be/mnJN8ELneUU},\n publisher = {Birmingham\ - \ City University},\n title = {From miming to NIMEing: the development of idiomatic\ - \ gestural language on large scale DMIs},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper111.pdf},\n\ - \ year = {2020}\n}\n" + ID: Hofmann2016 + abstract: "Electronic pickup systems for acoustic instruments are often used\nin\ + \ popular and contemporary music performances because they allow amplification\n\ + and modification of a clean and direct signal. Strain gauge sensors on saxophone\n\ + and clarinet reeds have been shown to be a useful tool to gain insight into\n\ + tongue articulation during performance but also capture the reed vibrations. In\n\ + our previous design, we used a procedure with epoxy adhesive to glue the strain\n\ + gauge sensors to the flat side of the synthetic single reeds. The new design\n\ + integrates the sensor inside a synthetic reed, respectively between layers of\n\ + fibre polymer and wood. This allows an industrial production of sensor reeds.\n\ + Sensor reeds open up new possibilities to pick up woodwind instruments and to\n\ + analyse, to modify, and to amplify the signal. A signal-to-noise analysis of the\n\ + signals from both designs showed that a sensor, glued to the outside of the reed,\n\ + produced a cleaner signal." + address: 'Brisbane, Australia' + author: Alex Hofmann and Vasileios Chatziioannou and Alexander Mayer and Harry Hartmann + bibtex: "@inproceedings{Hofmann2016,\n abstract = {Electronic pickup systems for\ + \ acoustic instruments are often used\nin popular and contemporary music performances\ + \ because they allow amplification\nand modification of a clean and direct signal.\ + \ Strain gauge sensors on saxophone\nand clarinet reeds have been shown to be\ + \ a useful tool to gain insight into\ntongue articulation during performance but\ + \ also capture the reed vibrations. In\nour previous design, we used a procedure\ + \ with epoxy adhesive to glue the strain\ngauge sensors to the flat side of the\ + \ synthetic single reeds. The new design\nintegrates the sensor inside a synthetic\ + \ reed, respectively between layers of\nfibre polymer and wood. This allows an\ + \ industrial production of sensor reeds.\nSensor reeds open up new possibilities\ + \ to pick up woodwind instruments and to\nanalyse, to modify, and to amplify the\ + \ signal. A signal-to-noise analysis of the\nsignals from both designs showed\ + \ that a sensor, glued to the outside of the reed,\nproduced a cleaner signal.},\n\ + \ address = {Brisbane, Australia},\n author = {Alex Hofmann and Vasileios Chatziioannou\ + \ and Alexander Mayer and Harry Hartmann},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176028},\n\ + \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {65--68},\n publisher\ + \ = {Queensland Conservatorium Griffith University},\n title = {Development of\ + \ Fibre Polymer Sensor {Reed}s for Saxophone and Clarinet},\n track = {Papers},\n\ + \ url = {http://www.nime.org/proceedings/2016/nime2016_paper0014.pdf},\n year\ + \ = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813200 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176028 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 570--575 - presentation-video: https://youtu.be/mnJN8ELneUU - publisher: Birmingham City University - title: 'From miming to NIMEing: the development of idiomatic gestural language on - large scale DMIs' - url: https://www.nime.org/proceedings/2020/nime2020_paper111.pdf - year: 2020 + pages: 65--68 + publisher: Queensland Conservatorium Griffith University + title: Development of Fibre Polymer Sensor Reeds for Saxophone and Clarinet + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0014.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_112 - abstract: 'The Cyclops is an eye-gaze controlled instrument designed for live performance - and improvisation. It is primarily mo- tivated by a need for expressive musical - instruments that are more easily accessible to people who rely on eye track- ers - for computer access, such as those with amyotrophic lateral sclerosis (ALS). At - its current implementation, the Cyclops contains a synthesizer and sequencer, - and provides the ability to easily create and automate musical parameters and - effects through recording eye-gaze gestures on a two- dimensional canvas. In this - paper, we frame our prototype in the context of previous eye-controlled instruments, - and we discuss we designed the Cyclops to make gaze-controlled music making as - fun, accessible, and seamless as possible despite notable interaction challenges - like latency, inaccu- racy, and “Midas Touch.”' - address: 'Birmingham, UK' - author: 'Payne, William C and Paradiso, Ann and Kane, Shaun' - bibtex: "@inproceedings{NIME20_112,\n abstract = {The Cyclops is an eye-gaze controlled\ - \ instrument designed for live performance and improvisation. It is primarily\ - \ mo- tivated by a need for expressive musical instruments that are more easily\ - \ accessible to people who rely on eye track- ers for computer access, such as\ - \ those with amyotrophic lateral sclerosis (ALS). At its current implementation,\ - \ the Cyclops contains a synthesizer and sequencer, and provides the ability to\ - \ easily create and automate musical parameters and effects through recording\ - \ eye-gaze gestures on a two- dimensional canvas. In this paper, we frame our\ - \ prototype in the context of previous eye-controlled instruments, and we discuss\ - \ we designed the Cyclops to make gaze-controlled music making as fun, accessible,\ - \ and seamless as possible despite notable interaction challenges like latency,\ - \ inaccu- racy, and “Midas Touch.”},\n address = {Birmingham, UK},\n author =\ - \ {Payne, William C and Paradiso, Ann and Kane, Shaun},\n booktitle = {Proceedings\ + ID: Kapur2016 + abstract: 'This paper presents two new musical robot systems and an accompanying + driver electronics array. These systems are designed to allow for modular extensibility + and ease of use with different instrument systems. The first system discussed + is MalletOTon, a mechatronic mallet instrument player that may be re-configured + to play a number of different instruments. Secondly, the Modulet mechatronic noisemakers + are presented. These instruments are discrete modules that may be installed throughout + a space in a wide variety of configurations. In addition to presenting the aforementioned + new instruments, the Novalis system is shown. Novalis is an open-ended driver + system for mechatronic instruments, designed to afford rapid deployment and modularity. + Where prior mechatronic instruments are often purpose-built, the robots and robot + electronics presented in this paper may be re-deployed in a wide-ranging, diverse + manner. Taken as a whole, the design practices discussed in this paper go toward + establishing a paradigm of modular and extensible mechatronic instrument development.' + address: 'Brisbane, Australia' + author: Ajay Kapur and Jim Murphy and Michael Darling and Eric Heep and Bruce Lott + and Ness Morris + bibtex: "@inproceedings{Kapur2016,\n abstract = {This paper presents two new musical\ + \ robot systems and an accompanying driver electronics array. These systems are\ + \ designed to allow for modular extensibility and ease of use with different instrument\ + \ systems. The first system discussed is MalletOTon, a mechatronic mallet instrument\ + \ player that may be re-configured to play a number of different instruments.\ + \ Secondly, the Modulet mechatronic noisemakers are presented. These instruments\ + \ are discrete modules that may be installed throughout a space in a wide variety\ + \ of configurations. In addition to presenting the aforementioned new instruments,\ + \ the Novalis system is shown. Novalis is an open-ended driver system for mechatronic\ + \ instruments, designed to afford rapid deployment and modularity. Where prior\ + \ mechatronic instruments are often purpose-built, the robots and robot electronics\ + \ presented in this paper may be re-deployed in a wide-ranging, diverse manner.\ + \ Taken as a whole, the design practices discussed in this paper go toward establishing\ + \ a paradigm of modular and extensible mechatronic instrument development.},\n\ + \ address = {Brisbane, Australia},\n author = {Ajay Kapur and Jim Murphy and Michael\ + \ Darling and Eric Heep and Bruce Lott and Ness Morris},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.4813204},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {576--580},\n presentation-video\ - \ = {https://youtu.be/G6dxngoCx60},\n publisher = {Birmingham City University},\n\ - \ title = {Cyclops: Designing an eye-controlled instrument for accessibility and\ - \ flexible use},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper112.pdf},\n\ - \ year = {2020}\n}\n" + \ doi = {10.5281/zenodo.1176050},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ + \ pages = {69--72},\n publisher = {Queensland Conservatorium Griffith University},\n\ + \ title = {MalletOTon and the Modulets: Modular and Extensible Musical Robots},\n\ + \ track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0015.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813204 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176050 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 576--580 - presentation-video: https://youtu.be/G6dxngoCx60 - publisher: Birmingham City University - title: 'Cyclops: Designing an eye-controlled instrument for accessibility and flexible - use' - url: https://www.nime.org/proceedings/2020/nime2020_paper112.pdf - year: 2020 + pages: 69--72 + publisher: Queensland Conservatorium Griffith University + title: 'MalletOTon and the Modulets: Modular and Extensible Musical Robots' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0015.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_113 - abstract: 'This paper presents the results of an observational study focusing on - the collaborative learning processes of a group of performers with an interactive - musical system. The main goal of this study was to implement methods for learning - and developing practice with these technological objects in order to generate - future pedagogical methods. During the research period of six months, four participants - regularly engaged in workshop-type scenarios where learning objectives were proposed - and guided by themselves.The principal researcher, working as participant-observer, - did not impose or prescribed learning objectives to the other members of the group. - Rather, all participants had equal say in what was to be done and how it was to - be accomplished. Results show that the group learning environment is rich in opportunities - for learning, mutual teaching, and for establishing a comunal practice for a given - interactive musical system.Key findings suggest that learning by demonstration, - observation and modelling are significant for learning in this context. Additionally, - it was observed that a dialogue and a continuous flow of information between the - members of the community is needed in order to motivate and further their learning.' - address: 'Birmingham, UK' - author: 'Marquez-Borbon, Adnan' - bibtex: "@inproceedings{NIME20_113,\n abstract = {This paper presents the results\ - \ of an observational study focusing on the collaborative learning processes of\ - \ a group of performers with an interactive musical system. The main goal of this\ - \ study was to implement methods for learning and developing practice with these\ - \ technological objects in order to generate future pedagogical methods. During\ - \ the research period of six months, four participants regularly engaged in workshop-type\ - \ scenarios where learning objectives were proposed and guided by themselves.The\ - \ principal researcher, working as participant-observer, did not impose or prescribed\ - \ learning objectives to the other members of the group. Rather, all participants\ - \ had equal say in what was to be done and how it was to be accomplished. Results\ - \ show that the group learning environment is rich in opportunities for learning,\ - \ mutual teaching, and for establishing a comunal practice for a given interactive\ - \ musical system.Key findings suggest that learning by demonstration, observation\ - \ and modelling are significant for learning in this context. Additionally, it\ - \ was observed that a dialogue and a continuous flow of information between the\ - \ members of the community is needed in order to motivate and further their learning.},\n\ - \ address = {Birmingham, UK},\n author = {Marquez-Borbon, Adnan},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.4813206},\n editor = {Romain Michon and\ - \ Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages = {581--586},\n\ - \ presentation-video = {https://youtu.be/1G0bOVlWwyI},\n publisher = {Birmingham\ - \ City University},\n title = {Collaborative Learning with Interactive Music Systems},\n\ - \ url = {https://www.nime.org/proceedings/2020/nime2020_paper113.pdf},\n year\ - \ = {2020}\n}\n" + ID: Olson2016 + abstract: "Video games and music have influenced each other since the\nbeginning\ + \ of the consumer video game era. In particular the chiptune genre of\nmusic uses\ + \ sounds from 8-bit video games; these sounds have even found their way\ninto\ + \ contemporary popular music. However, in this genre, game sounds are arranged\n\ + using conventional musical interfaces, meaning the games themselves (their\nalgorithms,\ + \ design and interactivity) play no role in the creation of the music.\nThis paper\ + \ describes a new way of creating music with 8-bit games, by reverse\nengineering\ + \ and augmenting them with run-time scripts. A new API, Emstrument, is\npresented\ + \ which allows these scripts to send MIDI to music production software.\nThe end\ + \ result is game-derived musical interfaces any computer musician can use\nwith\ + \ their existing workflow. This enhances prior work in repurposing games as\n\ + musical interfaces by allowing musicians to use the original games instead of\n\ + having to build new versions with added musical capabilities.\nSeveral examples\ + \ of both new musical instruments and dynamic interactive musical\ncompositions\ + \ using Emstrument are presented, using iconic games from the 8-bit\nera." + address: 'Brisbane, Australia' + author: Ben Olson + bibtex: "@inproceedings{Olson2016,\n abstract = {Video games and music have influenced\ + \ each other since the\nbeginning of the consumer video game era. In particular\ + \ the chiptune genre of\nmusic uses sounds from 8-bit video games; these sounds\ + \ have even found their way\ninto contemporary popular music. However, in this\ + \ genre, game sounds are arranged\nusing conventional musical interfaces, meaning\ + \ the games themselves (their\nalgorithms, design and interactivity) play no role\ + \ in the creation of the music.\nThis paper describes a new way of creating music\ + \ with 8-bit games, by reverse\nengineering and augmenting them with run-time\ + \ scripts. A new API, Emstrument, is\npresented which allows these scripts to\ + \ send MIDI to music production software.\nThe end result is game-derived musical\ + \ interfaces any computer musician can use\nwith their existing workflow. This\ + \ enhances prior work in repurposing games as\nmusical interfaces by allowing\ + \ musicians to use the original games instead of\nhaving to build new versions\ + \ with added musical capabilities.\nSeveral examples of both new musical instruments\ + \ and dynamic interactive musical\ncompositions using Emstrument are presented,\ + \ using iconic games from the 8-bit\nera.},\n address = {Brisbane, Australia},\n\ + \ author = {Ben Olson},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176100},\n\ + \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {73--77},\n publisher\ + \ = {Queensland Conservatorium Griffith University},\n title = {Transforming 8-Bit\ + \ Video Games into Musical Interfaces via Reverse Engineering and Augmentation},\n\ + \ track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0016.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813206 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176100 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 581--586 - presentation-video: https://youtu.be/1G0bOVlWwyI - publisher: Birmingham City University - title: Collaborative Learning with Interactive Music Systems - url: https://www.nime.org/proceedings/2020/nime2020_paper113.pdf - year: 2020 + pages: 73--77 + publisher: Queensland Conservatorium Griffith University + title: Transforming 8-Bit Video Games into Musical Interfaces via Reverse Engineering + and Augmentation + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0016.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_114 - abstract: 'This paper presents WELLE, a web-based music environment for blind people, - and describes its development, design, notation syntax and first experiences. - WELLE is intended to serve as a collaborative, performative and educational tool - to quickly create and record musical ideas. It is pattern-oriented, based on textual - notation and focuses on accessibility, playful interaction and ease of use. WELLE - was developed as part of the research project Tangible Signals and will also serve - as a platform for the integration of upcoming new interfaces.' - address: 'Birmingham, UK' - author: 'Vetter, Jens' - bibtex: "@inproceedings{NIME20_114,\n abstract = {This paper presents WELLE, a web-based\ - \ music environment for blind people, and describes its development, design, notation\ - \ syntax and first experiences. WELLE is intended to serve as a collaborative,\ - \ performative and educational tool to quickly create and record musical ideas.\ - \ It is pattern-oriented, based on textual notation and focuses on accessibility,\ - \ playful interaction and ease of use. WELLE was developed as part of the research\ - \ project Tangible Signals and will also serve as a platform for the integration\ - \ of upcoming new interfaces.},\n address = {Birmingham, UK},\n author = {Vetter,\ - \ Jens},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.4813208},\n editor = {Romain\ - \ Michon and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages\ - \ = {587--590},\n publisher = {Birmingham City University},\n title = {WELLE -\ - \ a web-based music environment for the blind},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper114.pdf},\n\ - \ year = {2020}\n}\n" + ID: Cherston2016 + abstract: |- + We present a sonification platform for generating audio driven by + real-time particle collision data from the ATLAS experiment at CERN. This paper + provides a description of the data-to-audio mapping interfaces supported by the + project's composition tool as well as a preliminary evaluation of the platform's + evolution to meet the aesthetic needs of vastly distinct musical styles and + presentation venues. Our work has been conducted in collaboration with the ATLAS + Outreach team and is part of a broad vision to better harness real-time sensor + data as a canvas for artistic expression. Data-driven streaming audio can be + treated as a reimagined form of live radio for which composers craft the + instruments but real-time particle collisions pluck the strings. + address: 'Brisbane, Australia' + author: Juliana Cherston and Ewan Hill and Steven Goldfarb and Joseph Paradiso + bibtex: "@inproceedings{Cherston2016,\n abstract = {We present a sonification platform\ + \ for generating audio driven by\nreal-time particle collision data from the ATLAS\ + \ experiment at CERN. This paper\nprovides a description of the data-to-audio\ + \ mapping interfaces supported by the\nproject's composition tool as well as a\ + \ preliminary evaluation of the platform's\nevolution to meet the aesthetic needs\ + \ of vastly distinct musical styles and\npresentation venues. Our work has been\ + \ conducted in collaboration with the ATLAS\nOutreach team and is part of a broad\ + \ vision to better harness real-time sensor\ndata as a canvas for artistic expression.\ + \ Data-driven streaming audio can be\ntreated as a reimagined form of live radio\ + \ for which composers craft the\ninstruments but real-time particle collisions\ + \ pluck the strings.},\n address = {Brisbane, Australia},\n author = {Juliana\ + \ Cherston and Ewan Hill and Steven Goldfarb and Joseph Paradiso},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1176012},\n isbn = {978-1-925455-13-7},\n\ + \ issn = {2220-4806},\n pages = {78--83},\n publisher = {Queensland Conservatorium\ + \ Griffith University},\n title = {Musician and Mega-Machine: Compositions Driven\ + \ by Real-Time Particle Collision Data from the ATLAS Detector},\n track = {Papers},\n\ + \ url = {http://www.nime.org/proceedings/2016/nime2016_paper0017.pdf},\n year\ + \ = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813208 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176012 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 587--590 - publisher: Birmingham City University - title: WELLE - a web-based music environment for the blind - url: https://www.nime.org/proceedings/2020/nime2020_paper114.pdf - year: 2020 + pages: 78--83 + publisher: Queensland Conservatorium Griffith University + title: 'Musician and Mega-Machine: Compositions Driven by Real-Time Particle Collision + Data from the ATLAS Detector' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0017.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_115 - abstract: 'This paper presents an overview of the design principles behind Digital - Music Instruments (DMIs) for education across all editions of the International - Conference on New Interfaces for Music Expression (NIME). We compiled a comprehensive - catalogue of over hundred DMIs with varying degrees of applicability in the educational - practice. Each catalogue entry is annotated according to a proposed taxonomy for - DMIs for education, rooted in the mechanics of control, mapping and feedback of - an interactive music system, along with the required expertise of target user - groups and the instrument learning curve. Global statistics unpack underlying - trends and design goals across the chronological period of the NIME conference. - In recent years, we note a growing number of DMIs targeting non-experts and with - reduced requirements in terms of expertise. Stemming from the identified trends, - we discuss future challenges in the design of DMIs for education towards enhanced - degrees of variation and unpredictability.' - address: 'Birmingham, UK' - author: 'Pessoa, Margarida and Parauta, Cláudio and Luís, Pedro and Corintha, Isabela - and Bernardes, Gilberto' - bibtex: "@inproceedings{NIME20_115,\n abstract = {This paper presents an overview\ - \ of the design principles behind Digital Music Instruments (DMIs) for education\ - \ across all editions of the International Conference on New Interfaces for Music\ - \ Expression (NIME). We compiled a comprehensive catalogue of over hundred DMIs\ - \ with varying degrees of applicability in the educational practice. Each catalogue\ - \ entry is annotated according to a proposed taxonomy for DMIs for education,\ - \ rooted in the mechanics of control, mapping and feedback of an interactive music\ - \ system, along with the required expertise of target user groups and the instrument\ - \ learning curve. Global statistics unpack underlying trends and design goals\ - \ across the chronological period of the NIME conference. In recent years, we\ - \ note a growing number of DMIs targeting non-experts and with reduced requirements\ - \ in terms of expertise. Stemming from the identified trends, we discuss future\ - \ challenges in the design of DMIs for education towards enhanced degrees of variation\ - \ and unpredictability.},\n address = {Birmingham, UK},\n author = {Pessoa, Margarida\ - \ and Parauta, Cláudio and Luís, Pedro and Corintha, Isabela and Bernardes, Gilberto},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.4813210},\n editor = {Romain Michon\ - \ and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages =\ - \ {591--595},\n publisher = {Birmingham City University},\n title = {Examining\ - \ Temporal Trends and Design Goals of Digital Music Instruments for Education\ - \ in NIME: A Proposed Taxonomy},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper115.pdf},\n\ - \ year = {2020}\n}\n" + ID: Lind2016 + abstract: "Digitalization has enabled material decoupling of sound from the\nphysical\ + \ devices traditionally used to conceive it. This paper reports an\nartistic exploration\ + \ of novel mappings between everyday objects and digital\nsound. The Wheel Quintet---a\ + \ novel musical instrument comprising four\nbicycle wheels and a skateboard---was\ + \ created using off-the-shelf\ncomponents and visual programming in Max/MSP. The\ + \ use of everyday objects sought\nto enable people to quickly master the instrument,\ + \ regardless of their musical\nbackgrounds, and collectively create polytempic\ + \ musical textures in a\nparticipatory art context. Applying an action research\ + \ approach, the paper\nexamines in detail two key cycles of planning, action,\ + \ and analysis related to\nthe instrument, involving an interactive museum exhibition\ + \ open to the public and\na concert hall performance conducted by an animated\ + \ music notation system.\nDrawing on insights from the study, the paper contributes\ + \ new knowledge\nconcerning the creation and use of novel interfaces for music\ + \ composition and\nperformance enabled by digitalization." + address: 'Brisbane, Australia' + author: Anders Lind and Daniel Nylén + bibtex: "@inproceedings{Lind2016,\n abstract = {Digitalization has enabled material\ + \ decoupling of sound from the\nphysical devices traditionally used to conceive\ + \ it. This paper reports an\nartistic exploration of novel mappings between everyday\ + \ objects and digital\nsound. The Wheel Quintet---a novel musical instrument comprising\ + \ four\nbicycle wheels and a skateboard---was created using off-the-shelf\ncomponents\ + \ and visual programming in Max/MSP. The use of everyday objects sought\nto enable\ + \ people to quickly master the instrument, regardless of their musical\nbackgrounds,\ + \ and collectively create polytempic musical textures in a\nparticipatory art\ + \ context. Applying an action research approach, the paper\nexamines in detail\ + \ two key cycles of planning, action, and analysis related to\nthe instrument,\ + \ involving an interactive museum exhibition open to the public and\na concert\ + \ hall performance conducted by an animated music notation system.\nDrawing on\ + \ insights from the study, the paper contributes new knowledge\nconcerning the\ + \ creation and use of novel interfaces for music composition and\nperformance\ + \ enabled by digitalization.},\n address = {Brisbane, Australia},\n author = {Anders\ + \ Lind and Daniel Nyl\\'{e}n},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176064},\n\ + \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {84--89},\n publisher\ + \ = {Queensland Conservatorium Griffith University},\n title = {Mapping Everyday\ + \ Objects to Digital Materiality in The Wheel Quintet: Polytempic Music and Participatory\ + \ Art},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0018.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813210 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176064 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 591--595 - publisher: Birmingham City University - title: 'Examining Temporal Trends and Design Goals of Digital Music Instruments - for Education in NIME: A Proposed Taxonomy' - url: https://www.nime.org/proceedings/2020/nime2020_paper115.pdf - year: 2020 + pages: 84--89 + publisher: Queensland Conservatorium Griffith University + title: 'Mapping Everyday Objects to Digital Materiality in The Wheel Quintet: Polytempic + Music and Participatory Art' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0018.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_116 - abstract: 'The tabla is a traditional pitched two-piece Indian drum set, popular - not only within South East Asian music, but whose sounds also regularly feature - in western music. Yet tabla remains an aural tradition, taught largely through - a guru system heavy in custom and mystique. Tablas can also pose problems for - school and professional performance environments as they are physically bulky, - fragile, and reactive to environmental factors such as damp and heat. As part - of a broader project to demystify tabla, we present an electronic tabla that plays - nearly identically to an acoustic tabla and was created in order to make the tabla - acces- sible and practical for a wider audience of students, pro- fessional musicians - and composers. Along with develop- ment of standardised tabla notation and instructional - educational aides, the electronic tabla is designed to be compact, robust, easily - tuned, and the electronic nature allows for scoring tabla through playing. Further, - used as an interface, it allows the use of learned tabla technique to control - other percussive sounds. We also discuss the technological approaches used to - accurately capture the localized multi-touch rapid-fire strikes and damping that - combine to make tabla such a captivating and virtuosic instrument.' - address: 'Birmingham, UK' - author: 'Pardue, Laurel S and Bhamra, Kuljit and England, Graham and Eddershaw, - Phil and Menzies, Duncan ' - bibtex: "@inproceedings{NIME20_116,\n abstract = {The tabla is a traditional pitched\ - \ two-piece Indian drum set, popular not only within South East Asian music, but\ - \ whose sounds also regularly feature in western music. Yet tabla remains an aural\ - \ tradition, taught largely through a guru system heavy in custom and mystique.\ - \ Tablas can also pose problems for school and professional performance environments\ - \ as they are physically bulky, fragile, and reactive to environmental factors\ - \ such as damp and heat. As part of a broader project to demystify tabla, we present\ - \ an electronic tabla that plays nearly identically to an acoustic tabla and was\ - \ created in order to make the tabla acces- sible and practical for a wider audience\ - \ of students, pro- fessional musicians and composers. Along with develop- ment\ - \ of standardised tabla notation and instructional educational aides, the electronic\ - \ tabla is designed to be compact, robust, easily tuned, and the electronic nature\ - \ allows for scoring tabla through playing. Further, used as an interface, it\ - \ allows the use of learned tabla technique to control other percussive sounds.\ - \ We also discuss the technological approaches used to accurately capture the\ - \ localized multi-touch rapid-fire strikes and damping that combine to make tabla\ - \ such a captivating and virtuosic instrument.},\n address = {Birmingham, UK},\n\ - \ author = {Pardue, Laurel S and Bhamra, Kuljit and England, Graham and Eddershaw,\ - \ Phil and Menzies, Duncan },\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813212},\n\ - \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ - \ = {July},\n pages = {596--599},\n presentation-video = {https://youtu.be/PPaHq8fQjB0},\n\ - \ publisher = {Birmingham City University},\n title = {Demystifying tabla through\ - \ the development of an electronic drum},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper116.pdf},\n\ - \ year = {2020}\n}\n" + ID: Balandra2016 + abstract: 'An entertainment environment to enrich music listening experience is + purposed. This environment is composed of 3 modules: a MIDI player, a music animation + and a haptic module that translates the notes played by one instrument into a + resemblant vibration. To create the haptic vibration, the notes'' relative pitch + in the song are calculated, then these positions are mapped into the haptic signals'' + amplitude and frequency. Also, the envelope of the haptic signal is modified, + by using an ADSR filter, to have the same envelope as the audio signal. To evaluate + the perceived cross-modal similarity between users, two experiments were performed. + In both, the users used the complete entertainment environment to rank the similarity + between 3 different haptic signals, with triangular, square and analogue envelopes + and 4 different instruments in a classical song. The first experiment was performed + with the purposed amplitude and frequency technique, while the second experiment + was performed with constant frequency and amplitude. Results, show different envelope + user preferences. The square and triangular envelopes were preferred in the first + experiment, while only analogue envelopes were preferred in the second. This suggests + that the users'' envelope perception was masked by the changes in amplitude and + frequency.' + address: 'Brisbane, Australia' + author: Alfonso Balandra and Hironori Mitake and Shoichi Hasegawa + bibtex: "@inproceedings{Balandra2016,\n abstract = {An entertainment environment\ + \ to enrich music listening experience is purposed. This environment is composed\ + \ of 3 modules: a MIDI player, a music animation and a haptic module that translates\ + \ the notes played by one instrument into a resemblant vibration. To create the\ + \ haptic vibration, the notes' relative pitch in the song are calculated, then\ + \ these positions are mapped into the haptic signals' amplitude and frequency.\ + \ Also, the envelope of the haptic signal is modified, by using an ADSR filter,\ + \ to have the same envelope as the audio signal. To evaluate the perceived cross-modal\ + \ similarity between users, two experiments were performed. In both, the users\ + \ used the complete entertainment environment to rank the similarity between 3\ + \ different haptic signals, with triangular, square and analogue envelopes and\ + \ 4 different instruments in a classical song. The first experiment was performed\ + \ with the purposed amplitude and frequency technique, while the second experiment\ + \ was performed with constant frequency and amplitude. Results, show different\ + \ envelope user preferences. The square and triangular envelopes were preferred\ + \ in the first experiment, while only analogue envelopes were preferred in the\ + \ second. This suggests that the users' envelope perception was masked by the\ + \ changes in amplitude and frequency.},\n address = {Brisbane, Australia},\n author\ + \ = {Alfonso Balandra and Hironori Mitake and Shoichi Hasegawa},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1175968},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ + \ pages = {90--95},\n publisher = {Queensland Conservatorium Griffith University},\n\ + \ title = {Haptic Music Player---Synthetic audio-tactile stimuli generation based\ + \ on the notes' pitch and instruments' envelope mapping},\n track = {Papers},\n\ + \ url = {http://www.nime.org/proceedings/2016/nime2016_paper0019.pdf},\n year\ + \ = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813212 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1175968 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 596--599 - presentation-video: https://youtu.be/PPaHq8fQjB0 - publisher: Birmingham City University - title: Demystifying tabla through the development of an electronic drum - url: https://www.nime.org/proceedings/2020/nime2020_paper116.pdf - year: 2020 + pages: 90--95 + publisher: Queensland Conservatorium Griffith University + title: Haptic Music Player---Synthetic audio-tactile stimuli generation based on + the notes' pitch and instruments' envelope mapping + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0019.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_117 - abstract: 'SpeakerDrum is an instrument composed of multiple Dual Voice Coil speakers - (DVC) where two coils are used to drive the same membrane. However, in this case, - one of them is used as a microphone which is then used by the performer as an - input interface of percussive gestures. Of course, this leads to poten- tial feedback, - but with enough control, a compelling exploration of resonance haptic feedback - and sound embodiment is possible.' - address: 'Birmingham, UK' - author: 'Sierra, Juan D' - bibtex: "@inproceedings{NIME20_117,\n abstract = {SpeakerDrum is an instrument composed\ - \ of multiple Dual Voice Coil speakers (DVC) where two coils are used to drive\ - \ the same membrane. However, in this case, one of them is used as a microphone\ - \ which is then used by the performer as an input interface of percussive gestures.\ - \ Of course, this leads to poten- tial feedback, but with enough control, a compelling\ - \ exploration of resonance haptic feedback and sound embodiment is possible.},\n\ - \ address = {Birmingham, UK},\n author = {Sierra, Juan D},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.4813216},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {600--604},\n publisher = {Birmingham\ - \ City University},\n title = {SpeakerDrum},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper117.pdf},\n\ - \ year = {2020}\n}\n" + ID: Huberth2016 + abstract: 'Notation systems are used in almost all fields, especially for the communication + and expression of ideas. This paper proposes and discusses a notation system for + Gametrak-based computer music instruments. The notation system''s design is informed + both by Western music notation and dance notation, as well as common mappings + used in laptop orchestras. It is designed to be sound-agnostic, primarily instructing + the performer in their motions. While the discussion of such a notation system + may be particularly timely due to the growing commercially-available 3D motion + tracking controllers, the notation system may prove especially useful in the context + of Gametrak and laptop orchestra, for which score-based representation can help + clarify performer interaction and serve as a teaching tool in documenting prior + work.' + address: 'Brisbane, Australia' + author: Madeline Huberth and Chryssie Nanou + bibtex: "@inproceedings{Huberth2016,\n abstract = {Notation systems are used in\ + \ almost all fields, especially for the communication and expression of ideas.\ + \ This paper proposes and discusses a notation system for Gametrak-based computer\ + \ music instruments. The notation system's design is informed both by Western\ + \ music notation and dance notation, as well as common mappings used in laptop\ + \ orchestras. It is designed to be sound-agnostic, primarily instructing the performer\ + \ in their motions. While the discussion of such a notation system may be particularly\ + \ timely due to the growing commercially-available 3D motion tracking controllers,\ + \ the notation system may prove especially useful in the context of Gametrak and\ + \ laptop orchestra, for which score-based representation can help clarify performer\ + \ interaction and serve as a teaching tool in documenting prior work.},\n address\ + \ = {Brisbane, Australia},\n author = {Madeline Huberth and Chryssie Nanou},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1176034},\n isbn = {978-1-925455-13-7},\n\ + \ issn = {2220-4806},\n pages = {96--105},\n publisher = {Queensland Conservatorium\ + \ Griffith University},\n title = {Notation for {3D} Motion Tracking Controllers:\ + \ A Gametrak Case Study},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0020.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813216 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176034 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 600--604 - publisher: Birmingham City University - title: SpeakerDrum - url: https://www.nime.org/proceedings/2020/nime2020_paper117.pdf - year: 2020 + pages: 96--105 + publisher: Queensland Conservatorium Griffith University + title: 'Notation for 3D Motion Tracking Controllers: A Gametrak Case Study' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0020.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_118 - abstract: 'This paper presents the KeyWI, an electronic wind instrument design based - on the melodica that both improves upon limitations in current systems and is - general and powerful enough to support a variety of applications. Four opportunities - for growth are identified in current electronic wind instrument systems, which - then are used as focuses in the development and evaluation of the instrument. - The instrument features a breath pressure sensor with a large dynamic range, a - keyboard that allows for polyphonic pitch selection, and a completely integrated - construction. Sound synthesis is performed with Faust code compiled to the Bela - Mini, which offers low-latency audio and a simple yet powerful development workflow. - In order to be as accessible and versatile as possible, the hardware and software - is entirely open-source, and fabrication requires only common maker tools.' - address: 'Birmingham, UK' - author: 'Caren, Matthew and Michon, Romain and Wright, Matthew' - bibtex: "@inproceedings{NIME20_118,\n abstract = {This paper presents the KeyWI,\ - \ an electronic wind instrument design based on the melodica that both improves\ - \ upon limitations in current systems and is general and powerful enough to support\ - \ a variety of applications. Four opportunities for growth are identified in current\ - \ electronic wind instrument systems, which then are used as focuses in the development\ - \ and evaluation of the instrument. The instrument features a breath pressure\ - \ sensor with a large dynamic range, a keyboard that allows for polyphonic pitch\ - \ selection, and a completely integrated construction. Sound synthesis is performed\ - \ with Faust code compiled to the Bela Mini, which offers low-latency audio and\ - \ a simple yet powerful development workflow. In order to be as accessible and\ - \ versatile as possible, the hardware and software is entirely open-source, and\ - \ fabrication requires only common maker tools.},\n address = {Birmingham, UK},\n\ - \ author = {Caren, Matthew and Michon, Romain and Wright, Matthew},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.4813218},\n editor = {Romain Michon and\ - \ Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages = {605--608},\n\ - \ publisher = {Birmingham City University},\n title = {The KeyWI: An Expressive\ - \ and Accessible Electronic Wind Instrument},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper118.pdf},\n\ - \ year = {2020}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.4813218 - editor: Romain Michon and Franziska Schroeder + ID: Cakmak2016 + abstract: "In this paper, we describe a novel multimedia system for networked\n\ + musical collaboration. Our system, called Monad, offers a 3D virtual environment\n\ + that can be shared by multiple participants to collaborate remotely on a musical\n\ + performance. With Monad, we explore how various features of this environment in\n\ + relation to game mechanics, network architecture, and audiovisual aesthetics can\n\ + be used to mitigate problems inherent to networked musical performance, such as\n\ + time delays, data loss, and reduced agency of users. Finally, we describe the\n\ + results of a series of qualitative user studies that illustrate the effectiveness\n\ + of some of our design decisions with two separate versions of Monad." + address: 'Brisbane, Australia' + author: Cem Cakmak and Anil Camci and Angus Forbes + bibtex: "@inproceedings{Cakmak2016,\n abstract = {In this paper, we describe a novel\ + \ multimedia system for networked\nmusical collaboration. Our system, called Monad,\ + \ offers a 3D virtual environment\nthat can be shared by multiple participants\ + \ to collaborate remotely on a musical\nperformance. With Monad, we explore how\ + \ various features of this environment in\nrelation to game mechanics, network\ + \ architecture, and audiovisual aesthetics can\nbe used to mitigate problems inherent\ + \ to networked musical performance, such as\ntime delays, data loss, and reduced\ + \ agency of users. Finally, we describe the\nresults of a series of qualitative\ + \ user studies that illustrate the effectiveness\nof some of our design decisions\ + \ with two separate versions of Monad.},\n address = {Brisbane, Australia},\n\ + \ author = {Cem Cakmak and Anil Camci and Angus Forbes},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176002},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ + \ pages = {106--111},\n publisher = {Queensland Conservatorium Griffith University},\n\ + \ title = {Networked Virtual Environments as Collaborative Music Spaces},\n track\ + \ = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0022.pdf},\n\ + \ year = {2016}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1176002 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 605--608 - publisher: Birmingham City University - title: 'The KeyWI: An Expressive and Accessible Electronic Wind Instrument' - url: https://www.nime.org/proceedings/2020/nime2020_paper118.pdf - year: 2020 + pages: 106--111 + publisher: Queensland Conservatorium Griffith University + title: Networked Virtual Environments as Collaborative Music Spaces + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0022.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_119 - abstract: 'In this paper we provide a detailed description of the development of - a new interface for musical expression, the da ̈ıs, with focus on an iterative - development process, control of physical models for sounds synthesis, and haptic - feedback. The development process, consisting of three iterations, is covered - along with a discussion of the tools and methods used. The sound synthesis algorithm - for the da ̈ıs, a physical model of a bowed string, is covered and the mapping - from the interface parameters to those of the synthesis algorithms is described - in detail. Using a qualitative test the affordances, advantages, and disadvantages - of the chosen design, synthesis algorithm, and parameter mapping is highlighted. - Lastly, the possibilities for future work is discussed with special focus on alternate - sounds and mappings.' - address: 'Birmingham, UK' - author: 'Christensen, Pelle Juul and Overholt, Dan and Serafin, Stefania' - bibtex: "@inproceedings{NIME20_119,\n abstract = {In this paper we provide a detailed\ - \ description of the development of a new interface for musical expression, the\ - \ da ̈ıs, with focus on an iterative development process, control of physical\ - \ models for sounds synthesis, and haptic feedback. The development process, consisting\ - \ of three iterations, is covered along with a discussion of the tools and methods\ - \ used. The sound synthesis algorithm for the da ̈ıs, a physical model of a bowed\ - \ string, is covered and the mapping from the interface parameters to those of\ - \ the synthesis algorithms is described in detail. Using a qualitative test the\ - \ affordances, advantages, and disadvantages of the chosen design, synthesis algorithm,\ - \ and parameter mapping is highlighted. Lastly, the possibilities for future work\ - \ is discussed with special focus on alternate sounds and mappings.},\n address\ - \ = {Birmingham, UK},\n author = {Christensen, Pelle Juul and Overholt, Dan and\ - \ Serafin, Stefania},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813220},\n\ - \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ - \ = {July},\n pages = {609--612},\n presentation-video = {https://youtu.be/XOvnc_AKKX8},\n\ - \ publisher = {Birmingham City University},\n title = {The Da ̈ıs: A Haptically\ - \ Enabled New Interface for Musical Expression for Controlling Physical Models\ - \ for Sound Synthesis},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper119.pdf},\n\ - \ year = {2020}\n}\n" + ID: Becking2016 + abstract: "Most instruments traditionally used to teach music in early\neducation,\ + \ like xylophones or flutes, encumber children with the additional\ndifficulty\ + \ of an unfamiliar and unnatural interface. The most simple expressive\ninteraction,\ + \ that even the smallest children use in order to make music, is\npounding at\ + \ surfaces. Through the design of an instrument with a simple\ninterface, like\ + \ a drum, but which produces a melodic sound, children can be\nprovided with an\ + \ easy and intuitive means to produce consonance. This should then\nbe further\ + \ complemented with information from analysis and interpretation of\nchildlike\ + \ gestures and dance moves, reflecting their natural understanding\nof musical\ + \ structure and motion. Based on these assumptions we propose a modular\nand reactive\ + \ system for dynamic composition with accessible interfaces, divided\ninto distinct\ + \ plugins usable in a standard digital audio workstation. This paper\ndescribes\ + \ our concept and how it can facilitate access to collaborative music\nmaking\ + \ for small children. A first prototypical implementation has been\ndesigned and\ + \ developed during the ongoing research project\nDrum-Dance-Music-Machine (DDMM),\ + \ a cooperation with the local social welfare\nassociation AWO Hagen and the chair\ + \ of musical education at the University of\nApplied Sciences Bielefeld." + address: 'Brisbane, Australia' + author: Dominic Becking and Christine Steinmeier and Philipp Kroos + bibtex: "@inproceedings{Becking2016,\n abstract = {Most instruments traditionally\ + \ used to teach music in early\neducation, like xylophones or flutes, encumber\ + \ children with the additional\ndifficulty of an unfamiliar and unnatural interface.\ + \ The most simple expressive\ninteraction, that even the smallest children use\ + \ in order to make music, is\npounding at surfaces. Through the design of an instrument\ + \ with a simple\ninterface, like a drum, but which produces a melodic sound, children\ + \ can be\nprovided with an easy and intuitive means to produce consonance. This\ + \ should then\nbe further complemented with information from analysis and interpretation\ + \ of\nchildlike gestures and dance moves, reflecting their natural understanding\n\ + of musical structure and motion. Based on these assumptions we propose a modular\n\ + and reactive system for dynamic composition with accessible interfaces, divided\n\ + into distinct plugins usable in a standard digital audio workstation. This paper\n\ + describes our concept and how it can facilitate access to collaborative music\n\ + making for small children. A first prototypical implementation has been\ndesigned\ + \ and developed during the ongoing research project\nDrum-Dance-Music-Machine\ + \ (DDMM), a cooperation with the local social welfare\nassociation AWO Hagen and\ + \ the chair of musical education at the University of\nApplied Sciences Bielefeld.},\n\ + \ address = {Brisbane, Australia},\n author = {Dominic Becking and Christine Steinmeier\ + \ and Philipp Kroos},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1175980},\n\ + \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {112--117},\n publisher\ + \ = {Queensland Conservatorium Griffith University},\n title = {Drum-Dance-Music-Machine:\ + \ Construction of a Technical Toolset for Low-Threshold Access to Collaborative\ + \ Musical Performance},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0023.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813220 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1175980 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 609--612 - presentation-video: https://youtu.be/XOvnc_AKKX8 - publisher: Birmingham City University - title: 'The Da ̈ıs: A Haptically Enabled New Interface for Musical Expression for - Controlling Physical Models for Sound Synthesis' - url: https://www.nime.org/proceedings/2020/nime2020_paper119.pdf - year: 2020 + pages: 112--117 + publisher: Queensland Conservatorium Griffith University + title: 'Drum-Dance-Music-Machine: Construction of a Technical Toolset for Low-Threshold + Access to Collaborative Musical Performance' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0023.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_12 - abstract: 'Computer composed music remains a novel and challenging problem to solve. - Despite an abundance of techniques and systems little research has explored how - these might be useful for end-users looking to compose with generative and algorithmic - music techniques. User interfaces for generative music systems are often inaccessible - to non-programmers and neglect established composition workflow and design paradigms - that are familiar to computer-based music composers. We have developed a system - called the Interactive Generative Music Environment (IGME) that attempts to bridge - the gap between generative music and music sequencing software, through an easy - to use score editing interface. This paper discusses a series of user studies - in which users explore generative music composition with IGME. A questionnaire - evaluates the user’s perception of interacting with generative music and from - this provide recommendations for future generative music systems and interfaces.' - address: 'Birmingham, UK' - author: 'Hunt, Samuel J and Mitchell, Tom and Nash, Chris' - bibtex: "@inproceedings{NIME20_12,\n abstract = {Computer composed music remains\ - \ a novel and challenging problem to solve. Despite an abundance of techniques\ - \ and systems little research has explored how these might be useful for end-users\ - \ looking to compose with generative and algorithmic music techniques. User interfaces\ - \ for generative music systems are often inaccessible to non-programmers and neglect\ - \ established composition workflow and design paradigms that are familiar to computer-based\ - \ music composers. We have developed a system called the Interactive Generative\ - \ Music Environment (IGME) that attempts to bridge the gap between generative\ - \ music and music sequencing software, through an easy to use score editing interface.\ - \ This paper discusses a series of user studies in which users explore generative\ - \ music composition with IGME. A questionnaire evaluates the user’s perception\ - \ of interacting with generative music and from this provide recommendations for\ - \ future generative music systems and interfaces.},\n address = {Birmingham, UK},\n\ - \ author = {Hunt, Samuel J and Mitchell, Tom and Nash, Chris},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.4813222},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {61--66},\n publisher = {Birmingham\ - \ City University},\n title = {Composing computer generated music, an observational\ - \ study using IGME: the Interactive Generative Music Environment},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper12.pdf},\n\ - \ year = {2020}\n}\n" + ID: Leitman2016 + abstract: "Music Maker is a free online resource that provides files for 3D\nprinting\ + \ woodwind and brass mouthpieces and tutorials for using those mouthpieces\nto\ + \ learn about acoustics and music. The mouthpieces are designed to fit into\n\ + standard plumbing and automobile parts that can be easily purchased at home\n\ + improvement and automotive stores. The result is a musical tool that can be used\n\ + as simply as a set of building blocks to bridge the gap between our increasingly\n\ + digital world of fabrication and the real-world materials that make up our daily\n\ + lives.\nAn increasing number of schools, libraries and community groups are purchasing\ + \ 3D\nprinters but many are still struggling to create engaging and relevant curriculum\n\ + that ties into academic subjects. Making new musical instruments is a fantastic\n\ + way to learn about acoustics, physics and mathematics." + address: 'Brisbane, Australia' + author: Sasha Leitman and John Granzow + bibtex: "@inproceedings{Leitman2016,\n abstract = {Music Maker is a free online\ + \ resource that provides files for 3D\nprinting woodwind and brass mouthpieces\ + \ and tutorials for using those mouthpieces\nto learn about acoustics and music.\ + \ The mouthpieces are designed to fit into\nstandard plumbing and automobile parts\ + \ that can be easily purchased at home\nimprovement and automotive stores. The\ + \ result is a musical tool that can be used\nas simply as a set of building blocks\ + \ to bridge the gap between our increasingly\ndigital world of fabrication and\ + \ the real-world materials that make up our daily\nlives.\nAn increasing number\ + \ of schools, libraries and community groups are purchasing 3D\nprinters but many\ + \ are still struggling to create engaging and relevant curriculum\nthat ties into\ + \ academic subjects. Making new musical instruments is a fantastic\nway to learn\ + \ about acoustics, physics and mathematics.},\n address = {Brisbane, Australia},\n\ + \ author = {Sasha Leitman and John Granzow},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1176062},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ + \ pages = {118--121},\n publisher = {Queensland Conservatorium Griffith University},\n\ + \ title = {Music Maker: 3d Printing and Acoustics Curriculum},\n track = {Papers},\n\ + \ url = {http://www.nime.org/proceedings/2016/nime2016_paper0024.pdf},\n year\ + \ = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813222 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176062 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 61--66 - publisher: Birmingham City University - title: 'Composing computer generated music, an observational study using IGME: the - Interactive Generative Music Environment' - url: https://www.nime.org/proceedings/2020/nime2020_paper12.pdf - year: 2020 + pages: 118--121 + publisher: Queensland Conservatorium Griffith University + title: 'Music Maker: 3d Printing and Acoustics Curriculum' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0024.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_120 - abstract: 'Many opportunities and challenges in both the control and performative - aspects of today’s modular synthesizers exist. The user interface prevailing in - the world of synthesizers and music controllers has always been revolving around - knobs, faders, switches, dials, buttons, or capacitive touchpads, to name a few. - This paper presents a novel way of interaction with a modular synthesizer by exploring - the affordances of cord-base UIs. A special patch cable was developed us- ing - commercially available piezo-resistive rubber cords, and was adapted to fit to - the 3.5 mm mono audio jack, making it compatible with the Eurorack modular-synth - standard. Moreover, a module was developed to condition this stretch- able sensor/cable, - to allow multiple Patch-cordes to be used in a given patch simultaneously. This - paper also presents a vocabulary of interactions, labeled through various physical - actions, turning the patch cable into an expressive controller that complements - traditional patching techniques.' - address: 'Birmingham, UK' - author: 'Wilbert, Joao and Haddad, Don D and Ishii, Hiroshi and Paradiso, Joseph' - bibtex: "@inproceedings{NIME20_120,\n abstract = {Many opportunities and challenges\ - \ in both the control and performative aspects of today’s modular synthesizers\ - \ exist. The user interface prevailing in the world of synthesizers and music\ - \ controllers has always been revolving around knobs, faders, switches, dials,\ - \ buttons, or capacitive touchpads, to name a few. This paper presents a novel\ - \ way of interaction with a modular synthesizer by exploring the affordances of\ - \ cord-base UIs. A special patch cable was developed us- ing commercially available\ - \ piezo-resistive rubber cords, and was adapted to fit to the 3.5 mm mono audio\ - \ jack, making it compatible with the Eurorack modular-synth standard. Moreover,\ - \ a module was developed to condition this stretch- able sensor/cable, to allow\ - \ multiple Patch-cordes to be used in a given patch simultaneously. This paper\ - \ also presents a vocabulary of interactions, labeled through various physical\ - \ actions, turning the patch cable into an expressive controller that complements\ - \ traditional patching techniques.},\n address = {Birmingham, UK},\n author =\ - \ {Wilbert, Joao and Haddad, Don D and Ishii, Hiroshi and Paradiso, Joseph},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.4813224},\n editor = {Romain Michon\ - \ and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages =\ - \ {613--616},\n presentation-video = {https://youtu.be/7gklx8ek8U8},\n publisher\ - \ = {Birmingham City University},\n title = {Patch-corde: an expressive patch-cable\ - \ for the modular synthesizer.},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper120.pdf},\n\ - \ year = {2020}\n}\n" + ID: Sello2016 + abstract: |- + This paper introduces the Hexenkessel---an augmented musical + instrument for interactive multimedia arts. The Hexenkessel is a classical + timpani with its drumhead acting as a tangible user interface for expressive + multimedia performances on stage. + address: 'Brisbane, Australia' + author: Jacob T. Sello + bibtex: "@inproceedings{Sello2016,\n abstract = {This paper introduces the Hexenkessel---an\ + \ augmented musical\ninstrument for interactive multimedia arts. The Hexenkessel\ + \ is a classical\ntimpani with its drumhead acting as a tangible user interface\ + \ for expressive\nmultimedia performances on stage.},\n address = {Brisbane, Australia},\n\ + \ author = {Jacob T. Sello},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176118},\n\ + \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {122--131},\n publisher\ + \ = {Queensland Conservatorium Griffith University},\n title = {The Hexenkessel:\ + \ A Hybrid Musical Instrument for Multimedia Performances},\n track = {Papers},\n\ + \ url = {http://www.nime.org/proceedings/2016/nime2016_paper0025.pdf},\n year\ + \ = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813224 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176118 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 613--616 - presentation-video: https://youtu.be/7gklx8ek8U8 - publisher: Birmingham City University - title: 'Patch-corde: an expressive patch-cable for the modular synthesizer.' - url: https://www.nime.org/proceedings/2020/nime2020_paper120.pdf - year: 2020 + pages: 122--131 + publisher: Queensland Conservatorium Griffith University + title: 'The Hexenkessel: A Hybrid Musical Instrument for Multimedia Performances' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0025.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_121 - abstract: 'The artistic sonification offers a creative method for putting direct - semantic layers to the abstract sounds. This paper is dedicated to the sound installation - “Soil choir v.1.3” that sonifies soil moisture in different depths and transforms - this non-musical phenomenon into organized sound structures. The sonification - of natural soil moisture processes tests the limits of our attention, patience - and willingness to still perceive ultra-slow reactions and examines the mechanisms - of our sense adaptation. Although the musical time of the installation is set - to almost non-human – environmental time scale (changes happen within hours, days, - weeks or even months…) this system can be explored and even played also as an - instrument by putting sensors to different soil areas or pouring liquid into the - soil and waiting for changes... The crucial aspect of the work was to design the - sonification architecture that deals with extreme slow changes of input data – - measured values from moisture sensors. The result is the sound installation consisting - of three objects – each with different types of soil. Every object is compact, - independent unit consisting of three low-cost capacitive soil moisture sensors, - 1m long perspex tube filled with soil, full range loudspeaker and Bela platform - with custom Supercollider code. I developed this installation during the year - 2019 and this paper gives insight into the aspects and issues connected with creating - this installation.' - address: 'Birmingham, UK' - author: 'Suchánek, Jiří' - bibtex: "@inproceedings{NIME20_121,\n abstract = {The artistic sonification offers\ - \ a creative method for putting direct semantic layers to the abstract sounds.\ - \ This paper is dedicated to the sound installation “Soil choir v.1.3” that sonifies\ - \ soil moisture in different depths and transforms this non-musical phenomenon\ - \ into organized sound structures. The sonification of natural soil moisture processes\ - \ tests the limits of our attention, patience and willingness to still perceive\ - \ ultra-slow reactions and examines the mechanisms of our sense adaptation. Although\ - \ the musical time of the installation is set to almost non-human – environmental\ - \ time scale (changes happen within hours, days, weeks or even months…) this system\ - \ can be explored and even played also as an instrument by putting sensors to\ - \ different soil areas or pouring liquid into the soil and waiting for changes...\ - \ The crucial aspect of the work was to design the sonification architecture that\ - \ deals with extreme slow changes of input data – measured values from moisture\ - \ sensors. The result is the sound installation consisting of three objects –\ - \ each with different types of soil. Every object is compact, independent unit\ - \ consisting of three low-cost capacitive soil moisture sensors, 1m long perspex\ - \ tube filled with soil, full range loudspeaker and Bela platform with custom\ - \ Supercollider code. I developed this installation during the year 2019 and this\ - \ paper gives insight into the aspects and issues connected with creating this\ - \ installation.},\n address = {Birmingham, UK},\n author = {Suchánek, Jiří},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.4813226},\n editor = {Romain Michon\ - \ and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages =\ - \ {617--618},\n publisher = {Birmingham City University},\n title = {SOIL CHOIR\ - \ v.1.3 - soil moisture sonification installation},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper121.pdf},\n\ - \ year = {2020}\n}\n" + ID: Lnicode228hdeoja2016 + abstract: "This paper presents an ongoing project for augmenting acoustic\ninstruments\ + \ with active acoustics. Active acoustics are defined as audio-rate\nvibration\ + \ driven into the instruments physical structure, inducing air-borne\nsound output.\ + \ The instrument's acoustic sound is thus doubled by an\nelectronic soundscape\ + \ radiating from the same source. The article is centered on\na case study on\ + \ two guitars, one with hexaphonic sound capture and the other with\nmonophonic\ + \ pickup. The article discusses the design, implementation, acoustics,\nsound\ + \ capture and processing of an active acoustic instrument, as well as\ngestural\ + \ control using the Leap Motion sensor. Extensions towards other\ninstruments\ + \ are presented, in connection with related artistic projects and\n`electronic\ + \ chamber music' aesthetics." + address: 'Brisbane, Australia' + author: Otso Lähdeoja + bibtex: "@inproceedings{Lnicode228hdeoja2016,\n abstract = {This paper presents\ + \ an ongoing project for augmenting acoustic\ninstruments with active acoustics.\ + \ Active acoustics are defined as audio-rate\nvibration driven into the instruments\ + \ physical structure, inducing air-borne\nsound output. The instrument's acoustic\ + \ sound is thus doubled by an\nelectronic soundscape radiating from the same source.\ + \ The article is centered on\na case study on two guitars, one with hexaphonic\ + \ sound capture and the other with\nmonophonic pickup. The article discusses the\ + \ design, implementation, acoustics,\nsound capture and processing of an active\ + \ acoustic instrument, as well as\ngestural control using the Leap Motion sensor.\ + \ Extensions towards other\ninstruments are presented, in connection with related\ + \ artistic projects and\n`electronic chamber music' aesthetics.},\n address =\ + \ {Brisbane, Australia},\n author = {Otso L\\''{a}hdeoja},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176054},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ + \ pages = {132--136},\n publisher = {Queensland Conservatorium Griffith University},\n\ + \ title = {Active Acoustic Instruments for Electronic Chamber Music},\n track\ + \ = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0027.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813226 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176054 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 617--618 - publisher: Birmingham City University - title: SOIL CHOIR v.1.3 - soil moisture sonification installation - url: https://www.nime.org/proceedings/2020/nime2020_paper121.pdf - year: 2020 + pages: 132--136 + publisher: Queensland Conservatorium Griffith University + title: Active Acoustic Instruments for Electronic Chamber Music + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0027.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_122 - abstract: 'Three DIY electronic instruments that the author has used in real-life - multimedia performance contexts are scrutinised herein. The instruments are made - intentionally rough-hewn, non-optimal and user-unfriendly in several respects, - and are shown to draw upon experimental traits in electronics de- sign and interfaces - for music expression. The various different ways in which such design traits affects - their performance are outlined, as are their overall consequence to the artistic - outcome and to individual experiences of it. It is shown that, to a varying extent, - they all embody, mediate, and aid actualise the specifics their parent projects - revolve around. It is eventually suggested that in the context of an exploratory - and hybrid artistic practice, bespoke instruments of sorts, their improvised performance, - the material traits or processes they implement or pivot on, and the ideas/narratives - that perturb thereof, may all intertwine and fuse into one another so that a clear - distinction between one another is not always possible, or meaningful. In such - a vein, this paper aims at being an account of such a practice upon which prospective - researchers/artists may further build upon.' - address: 'Birmingham, UK' - author: 'Koutsomichalis, Marinos' - bibtex: "@inproceedings{NIME20_122,\n abstract = {Three DIY electronic instruments\ - \ that the author has used in real-life multimedia performance contexts are scrutinised\ - \ herein. The instruments are made intentionally rough-hewn, non-optimal and user-unfriendly\ - \ in several respects, and are shown to draw upon experimental traits in electronics\ - \ de- sign and interfaces for music expression. The various different ways in\ - \ which such design traits affects their performance are outlined, as are their\ - \ overall consequence to the artistic outcome and to individual experiences of\ - \ it. It is shown that, to a varying extent, they all embody, mediate, and aid\ - \ actualise the specifics their parent projects revolve around. It is eventually\ - \ suggested that in the context of an exploratory and hybrid artistic practice,\ - \ bespoke instruments of sorts, their improvised performance, the material traits\ - \ or processes they implement or pivot on, and the ideas/narratives that perturb\ - \ thereof, may all intertwine and fuse into one another so that a clear distinction\ - \ between one another is not always possible, or meaningful. In such a vein, this\ - \ paper aims at being an account of such a practice upon which prospective researchers/artists\ - \ may further build upon.},\n address = {Birmingham, UK},\n author = {Koutsomichalis,\ - \ Marinos},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813228},\n editor\ - \ = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n\ - \ pages = {619--624},\n presentation-video = {https://youtu.be/DWecR7exl8k},\n\ - \ publisher = {Birmingham City University},\n title = {Rough-hewn Hertzian Multimedia\ - \ Instruments},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper122.pdf},\n\ - \ year = {2020}\n}\n" + ID: Lynch2016 + abstract: "We present a composition framework that facilitates novel musical\nmappings\ + \ for large-scale distributed networks of environmental sensors. A library\nof\ + \ C-externals called ChainFlow for the graphical programming language Max/MSP\n\ + that provides an interface to real-time and historical data for large sensor\n\ + deployments was designed and implemented. This library along with spatialized\n\ + audio techniques were used to create immersive musical compositions which can\ + \ be\npresented on their own or complemented by a graphical 3D virtual world.\ + \ Musical\nworks driven by a sensor network deployed in a wetland restoration\ + \ project called\nTidmarsh are presented as case studies in augmented presence\ + \ through musical\nmapping." + address: 'Brisbane, Australia' + author: Evan Lynch and Joseph Paradiso + bibtex: "@inproceedings{Lynch2016,\n abstract = {We present a composition framework\ + \ that facilitates novel musical\nmappings for large-scale distributed networks\ + \ of environmental sensors. A library\nof C-externals called ChainFlow for the\ + \ graphical programming language Max/MSP\nthat provides an interface to real-time\ + \ and historical data for large sensor\ndeployments was designed and implemented.\ + \ This library along with spatialized\naudio techniques were used to create immersive\ + \ musical compositions which can be\npresented on their own or complemented by\ + \ a graphical 3D virtual world. Musical\nworks driven by a sensor network deployed\ + \ in a wetland restoration project called\nTidmarsh are presented as case studies\ + \ in augmented presence through musical\nmapping.},\n address = {Brisbane, Australia},\n\ + \ author = {Evan Lynch and Joseph Paradiso},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1176074},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ + \ pages = {137--142},\n publisher = {Queensland Conservatorium Griffith University},\n\ + \ title = {SensorChimes: Musical Mapping for Sensor Networks},\n track = {Papers},\n\ + \ url = {http://www.nime.org/proceedings/2016/nime2016_paper0028.pdf},\n year\ + \ = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813228 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176074 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 619--624 - presentation-video: https://youtu.be/DWecR7exl8k - publisher: Birmingham City University - title: Rough-hewn Hertzian Multimedia Instruments - url: https://www.nime.org/proceedings/2020/nime2020_paper122.pdf - year: 2020 + pages: 137--142 + publisher: Queensland Conservatorium Griffith University + title: 'SensorChimes: Musical Mapping for Sensor Networks' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0028.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_123 - abstract: 'The visual-audioizer is a patch created in Max in which the concept of - fluid-time animation techniques, in tandem with basic computer vision tracking - methods, can be used as a tool to allow the visual time-based media artist to - create music. Visual aspects relating to the animator’s knowledge of motion, animated - loops, and auditory synchronization derived from computer vision tracking methods, - allow an immediate connection between the generated audio derived from visuals—becoming - a new way to experience and create audio-visual media. A conceptual overview, - comparisons of past/current audio-visual contributors, and a summary of the Max - patch will be discussed. The novelty of practice-based animation methods in the - field of musical expression, considerations of utilizing the visual-audioizer, - and the future of fluid-time animation techniques as a tool of musical creativity - will also be addressed. ' - address: 'Birmingham, UK' - author: 'Olsen, Taylor J' - bibtex: "@inproceedings{NIME20_123,\n abstract = {The visual-audioizer is a patch\ - \ created in Max in which the concept of fluid-time animation techniques, in tandem\ - \ with basic computer vision tracking methods, can be used as a tool to allow\ - \ the visual time-based media artist to create music. Visual aspects relating\ - \ to the animator’s knowledge of motion, animated loops, and auditory synchronization\ - \ derived from computer vision tracking methods, allow an immediate connection\ - \ between the generated audio derived from visuals—becoming a new way to experience\ - \ and create audio-visual media. A conceptual overview, comparisons of past/current\ - \ audio-visual contributors, and a summary of the Max patch will be discussed.\ - \ The novelty of practice-based animation methods in the field of musical expression,\ - \ considerations of utilizing the visual-audioizer, and the future of fluid-time\ - \ animation techniques as a tool of musical creativity will also be addressed.\ - \ },\n address = {Birmingham, UK},\n author = {Olsen, Taylor J},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.4813230},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {625--630},\n publisher = {Birmingham\ - \ City University},\n title = {Animation, Sonification, and Fluid-Time: A Visual-Audioizer\ - \ Prototype},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper123.pdf},\n\ - \ year = {2020}\n}\n" + ID: Nakanishi2016 + abstract: "NAKANISYNTH is a synthesiser application available on iOS devices\nthat\ + \ provides a simple and intuitive interface, allowing users to produce sound\n\ + loops by freehand drawing sound waves and envelope curves. The interface provides\n\ + a simple way of interacting: the only input required involves drawing two\nwaveforms,\ + \ meaning that users can easily produce various sounds intuitively\nwithout the\ + \ need for complex manipulation. The application's interface comprises\nof an\ + \ interchangeable ribbon and keyboard feature, plus two panels where users\ncan\ + \ edit waveforms, allowing users to make sounds. This simple approach to the\n\ + interface means that it is easy for users to understand the relationship between\n\ + a waveform and the sound that it produces. " + address: 'Brisbane, Australia' + author: Kyosuke Nakanishi and Paul Haimes and Tetsuaki Baba and Kumiko Kushiyama + bibtex: "@inproceedings{Nakanishi2016,\n abstract = {NAKANISYNTH is a synthesiser\ + \ application available on iOS devices\nthat provides a simple and intuitive interface,\ + \ allowing users to produce sound\nloops by freehand drawing sound waves and envelope\ + \ curves. The interface provides\na simple way of interacting: the only input\ + \ required involves drawing two\nwaveforms, meaning that users can easily produce\ + \ various sounds intuitively\nwithout the need for complex manipulation. The application's\ + \ interface comprises\nof an interchangeable ribbon and keyboard feature, plus\ + \ two panels where users\ncan edit waveforms, allowing users to make sounds. This\ + \ simple approach to the\ninterface means that it is easy for users to understand\ + \ the relationship between\na waveform and the sound that it produces. },\n address\ + \ = {Brisbane, Australia},\n author = {Kyosuke Nakanishi and Paul Haimes and Tetsuaki\ + \ Baba and Kumiko Kushiyama},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176086},\n\ + \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {143--145},\n publisher\ + \ = {Queensland Conservatorium Griffith University},\n title = {NAKANISYNTH: An\ + \ Intuitive Freehand Drawing Waveform Synthesiser Application for iOS Devices},\n\ + \ track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0029.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813230 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176086 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 625--630 - publisher: Birmingham City University - title: 'Animation, Sonification, and Fluid-Time: A Visual-Audioizer Prototype' - url: https://www.nime.org/proceedings/2020/nime2020_paper123.pdf - year: 2020 + pages: 143--145 + publisher: Queensland Conservatorium Griffith University + title: 'NAKANISYNTH: An Intuitive Freehand Drawing Waveform Synthesiser Application + for iOS Devices' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0029.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_124 - abstract: This paper describes a system for automating the generation of mapping - schemes between human interaction with extramusical objects and electronic dance - music. These mappings are determined through the comparison of sensor input to - a synthesized matrix of sequenced audio. The goal of the system is to facilitate - live performances that feature quotidian objects in the place of traditional musical - instruments. The practical and artistic applications of musical control with quotidian - objects is discussed. The associated object-manipulating gesture vocabularies - are mapped to musical output so that the objects themselves may be perceived as - DMIs. This strategy is used in a performance to explore the liveness qualities - of the system. - address: 'Birmingham, UK' - author: 'de las Pozas, Virginia' - bibtex: "@inproceedings{NIME20_124,\n abstract = {This paper describes a system\ - \ for automating the generation of mapping schemes between human interaction with\ - \ extramusical objects and electronic dance music. These mappings are determined\ - \ through the comparison of sensor input to a synthesized matrix of sequenced\ - \ audio. The goal of the system is to facilitate live performances that feature\ - \ quotidian objects in the place of traditional musical instruments. The practical\ - \ and artistic applications of musical control with quotidian objects is discussed.\ - \ The associated object-manipulating gesture vocabularies are mapped to musical\ - \ output so that the objects themselves may be perceived as DMIs. This strategy\ - \ is used in a performance to explore the liveness qualities of the system.},\n\ - \ address = {Birmingham, UK},\n author = {de las Pozas, Virginia},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.4813232},\n editor = {Romain Michon and\ - \ Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages = {631--634},\n\ - \ publisher = {Birmingham City University},\n title = {Semi-Automated Mappings\ - \ for Object-Manipulating Gestural Control of Electronic Music},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper124.pdf},\n\ - \ year = {2020}\n}\n" + ID: Vindriis2016 + abstract: "StrumBot is a novel standalone six stringed robotic guitar\nconsisting\ + \ of mechanisms designed to enable musical expressivity and minimise\nacoustic\ + \ noise. It is desirable for less than 60 dBA of noise at 1 m to be\nemitted to\ + \ allow StrumBot to play in intimate venues such as cafés or\nrestaurants without\ + \ loud motor noises detracting from the musical experience.\nStrumBot improves\ + \ upon previous RMI's by allowing additional expressive\nopportunities for a composer\ + \ to utilise. StrumBot can perform slides, vibrato,\nmuting techniques, pitch\ + \ bends, pluck power variances, timbre control, complex\nchords and fast strumming\ + \ patterns.\nA MIDI input allows commercial or custom controllers to operate StrumBot.\ + \ Novel\nnote allocation algorithms were created to allow a single MIDI stream\ + \ of notes to\nbe allocated across the six guitar strings.\nLatency measurements\ + \ from MIDI input to string pluck are as low as 40 ms for a\nbest case scenario\ + \ strum, allowing StrumBot to accompany a live musician with\nminimal audible\ + \ delay.\nA relay based loop switcher is incorporated, allowing StrumBot to activate\n\ + standard commercial guitar pedals based on a MIDI instruction. " + address: 'Brisbane, Australia' + author: Richard Vindriis and Dale Carnegie + bibtex: "@inproceedings{Vindriis2016,\n abstract = {StrumBot is a novel standalone\ + \ six stringed robotic guitar\nconsisting of mechanisms designed to enable musical\ + \ expressivity and minimise\nacoustic noise. It is desirable for less than 60\ + \ dBA of noise at 1 m to be\nemitted to allow StrumBot to play in intimate venues\ + \ such as caf\\'{e}s or\nrestaurants without loud motor noises detracting from\ + \ the musical experience.\nStrumBot improves upon previous RMI's by allowing additional\ + \ expressive\nopportunities for a composer to utilise. StrumBot can perform slides,\ + \ vibrato,\nmuting techniques, pitch bends, pluck power variances, timbre control,\ + \ complex\nchords and fast strumming patterns.\nA MIDI input allows commercial\ + \ or custom controllers to operate StrumBot. Novel\nnote allocation algorithms\ + \ were created to allow a single MIDI stream of notes to\nbe allocated across\ + \ the six guitar strings.\nLatency measurements from MIDI input to string pluck\ + \ are as low as 40 ms for a\nbest case scenario strum, allowing StrumBot to accompany\ + \ a live musician with\nminimal audible delay.\nA relay based loop switcher is\ + \ incorporated, allowing StrumBot to activate\nstandard commercial guitar pedals\ + \ based on a MIDI instruction. },\n address = {Brisbane, Australia},\n author\ + \ = {Richard Vindriis and Dale Carnegie},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176135},\n\ + \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {146--151},\n publisher\ + \ = {Queensland Conservatorium Griffith University},\n title = {StrumBot---An\ + \ Overview of a Strumming Guitar Robot},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0030.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813232 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176135 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 631--634 - publisher: Birmingham City University - title: Semi-Automated Mappings for Object-Manipulating Gestural Control of Electronic - Music - url: https://www.nime.org/proceedings/2020/nime2020_paper124.pdf - year: 2020 + pages: 146--151 + publisher: Queensland Conservatorium Griffith University + title: StrumBot---An Overview of a Strumming Guitar Robot + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0030.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_125 - abstract: 'During theBaroque period, improvisation was a key element of music performance - and education. Great musicians, such as J.S. Bach, were better known as improvisers - than composers. Today, however, there is a lack of improvisation culture - in classical music performance and education; classical musicians either are not - trained to improvise, or cannot find other people to improvise with. Motivated - by this observation, we develop BachDuet, a system that enables real-time - counterpoint improvisation between a human anda machine. This system uses a recurrent - neural network toprocess the human musician’s monophonic performance ona MIDI - keyboard and generates the machine’s monophonic performance in real time. We develop - a GUI to visualize the generated music content and to facilitate this interaction. - We conduct user studies with 13 musically trained users and show the feasibility of two-party duet counterpoint - improvisation and the effectiveness of BachDuet for this purpose. We also conduct - listening tests with 48 participants and show that they cannot tell the difference - between duets generated by human-machine improvisation using BachDuet and those - generated by human-human improvisation. Objective evaluation is also conducted - to assess the degree to which these improvisations adhere to common rules of counterpoint, - showing promising results.' - address: 'Birmingham, UK' - author: 'Benetatos, Christodoulos and VanderStel, Joseph and Duan, Zhiyao' - bibtex: "@inproceedings{NIME20_125,\n abstract = {During theBaroque period, improvisation\ - \ was a key element of music performance and education. Great musicians, such\ - \ as J.S. Bach, were better known as improvisers than composers. Today, however,\ - \ there is a lack of improvisation culture in classical music performance\ - \ and education; classical musicians either are not trained to improvise, or cannot\ - \ find other people to improvise with. Motivated by this observation, we develop\ - \ BachDuet, a system that enables real-time counterpoint improvisation between\ - \ a human anda machine. This system uses a recurrent neural network toprocess\ - \ the human musician’s monophonic performance ona MIDI keyboard and generates\ - \ the machine’s monophonic performance in real time. We develop a GUI to visualize\ - \ the generated music content and to facilitate this interaction. We conduct\ - \ user studies with 13 musically trained users and show the feasibility\ - \ of two-party duet counterpoint improvisation and the effectiveness of BachDuet\ - \ for this purpose. We also conduct listening tests with 48 participants and\ - \ show that they cannot tell the difference between duets generated by human-machine\ - \ improvisation using BachDuet and those generated by human-human improvisation.\ - \ Objective evaluation is also conducted to assess the degree to which these\ - \ improvisations adhere to common rules of counterpoint, showing promising results.},\n\ - \ address = {Birmingham, UK},\n author = {Benetatos, Christodoulos and VanderStel,\ - \ Joseph and Duan, Zhiyao},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813234},\n\ - \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ - \ = {July},\n pages = {635--640},\n presentation-video = {https://youtu.be/wFGW0QzuPPk},\n\ - \ publisher = {Birmingham City University},\n title = {BachDuet: A Deep Learning\ - \ System for Human-Machine Counterpoint Improvisation},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper125.pdf},\n\ - \ year = {2020}\n}\n" + ID: Shaw2016 + abstract: "This paper describes a long term, collaborative project Sound\nSpaces.\ + \ Within this project we creatively investigated various environments and\nbuilt\ + \ a collection of artworks in response to material gathered through a number\n\ + of practical field visits. Our responses were presented in numerous,\nidiosyncratic\ + \ ways and took shape through a number of concerted making\nactivities. The work\ + \ was conducted both in and with the public, allowing\nparticipants to inform\ + \ the creative decisions made throughout the project as well\nas experiencing\ + \ the building of the artworks. Within this essay we report on our\nprocess, presentation\ + \ and offer alternative methods for collecting material and\npresenting representations\ + \ of space. We describe the many responses made during\nour time and related these\ + \ to research concerns relevant to the NIME community.\nWe conclude with our findings\ + \ and, through the production of an annotated\nportfolio, offer our main emerging\ + \ themes as points of discussion. " + address: 'Brisbane, Australia' + author: Tim Shaw and Simon Bowen and John Bowers + bibtex: "@inproceedings{Shaw2016,\n abstract = {This paper describes a long term,\ + \ collaborative project Sound\nSpaces. Within this project we creatively investigated\ + \ various environments and\nbuilt a collection of artworks in response to material\ + \ gathered through a number\nof practical field visits. Our responses were presented\ + \ in numerous,\nidiosyncratic ways and took shape through a number of concerted\ + \ making\nactivities. The work was conducted both in and with the public, allowing\n\ + participants to inform the creative decisions made throughout the project as well\n\ + as experiencing the building of the artworks. Within this essay we report on our\n\ + process, presentation and offer alternative methods for collecting material and\n\ + presenting representations of space. We describe the many responses made during\n\ + our time and related these to research concerns relevant to the NIME community.\n\ + We conclude with our findings and, through the production of an annotated\nportfolio,\ + \ offer our main emerging themes as points of discussion. },\n address = {Brisbane,\ + \ Australia},\n author = {Tim Shaw and Simon Bowen and John Bowers},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1176122},\n isbn = {978-1-925455-13-7},\n\ + \ issn = {2220-4806},\n pages = {152--157},\n publisher = {Queensland Conservatorium\ + \ Griffith University},\n title = {Unfoldings: Multiple Explorations of Sound\ + \ and Space},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0031.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813234 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176122 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 635--640 - presentation-video: https://youtu.be/wFGW0QzuPPk - publisher: Birmingham City University - title: 'BachDuet: A Deep Learning System for Human-Machine Counterpoint Improvisation' - url: https://www.nime.org/proceedings/2020/nime2020_paper125.pdf - year: 2020 + pages: 152--157 + publisher: Queensland Conservatorium Griffith University + title: 'Unfoldings: Multiple Explorations of Sound and Space' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0031.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_13 - abstract: 'Because they break the physical link between gestures and sound, Digital - Musical Instruments offer countless opportunities for musical expression. For - the same reason however, they may hinder the audience experience, making the musician - contribution and expressiveness difficult to perceive. In order to cope with this - issue without altering the instruments, researchers and artists alike have designed - techniques to augment their performances with additional information, through - audio, haptic or visual modalities. These techniques have however only been designed - to offer a fixed level of information, without taking into account the variety - of spectators expertise and preferences. In this paper, we investigate the design, - implementation and effect on audience experience of visual augmentations with - controllable level of detail (LOD). We conduct a controlled experiment with 18 - participants, including novices and experts. Our results show contrasts in the - impact of LOD on experience and comprehension for experts and novices, and highlight - the diversity of usage of visual augmentations by spectators.' - address: 'Birmingham, UK' - author: 'Capra, Olivier and Berthaut, Florent and Grisoni, Laurent' - bibtex: "@inproceedings{NIME20_13,\n abstract = {Because they break the physical\ - \ link between gestures and sound, Digital Musical Instruments offer countless\ - \ opportunities for musical expression. For the same reason however, they may\ - \ hinder the audience experience, making the musician contribution and expressiveness\ - \ difficult to perceive. In order to cope with this issue without altering the\ - \ instruments, researchers and artists alike have designed techniques to augment\ - \ their performances with additional information, through audio, haptic or visual\ - \ modalities. These techniques have however only been designed to offer a fixed\ - \ level of information, without taking into account the variety of spectators\ - \ expertise and preferences. In this paper, we investigate the design, implementation\ - \ and effect on audience experience of visual augmentations with controllable\ - \ level of detail (LOD). We conduct a controlled experiment with 18 participants,\ - \ including novices and experts. Our results show contrasts in the impact of LOD\ - \ on experience and comprehension for experts and novices, and highlight the diversity\ - \ of usage of visual augmentations by spectators.},\n address = {Birmingham, UK},\n\ - \ author = {Capra, Olivier and Berthaut, Florent and Grisoni, Laurent},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.4813236},\n editor = {Romain Michon and\ - \ Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages = {67--72},\n\ - \ presentation-video = {https://youtu.be/3hIGu9QDn4o},\n publisher = {Birmingham\ - \ City University},\n title = {All You Need Is LOD : Levels of Detail in Visual\ - \ Augmentations for the Audience},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper13.pdf},\n\ - \ year = {2020}\n}\n" + ID: Rieger2016 + abstract: "The Driftwood is a maneuverable sculptural instrument &\ncontroller.\ + \ Tactilely, it is a micro-terrain one can explore with the hands as\nwith the\ + \ ears. Closed circuit sensors, moving wooden parts and Piezo microphones\nare\ + \ discussed in the design phase alongside background and musical implementation\n\ + concepts. Electronics and nature converge in this instrument harmoniously\nreferencing\ + \ our changing world and environment. When engaging with the sonic\nsculpture\ + \ silent objects become audible and rest-wood is venerated. It is\nrevealed to\ + \ the musician interacting with Driftwood that our actions intervene\ndirectly\ + \ with issues relating to sustainability and the amount of value we place\non\ + \ the world we live in. Every scrap of wood was once a tree, Driftwood reminds\n\ + us of this in a multi-sensory playing experience. The Driftwood proposes a\nreinterpretation\ + \ of the process of music creation, awareness and expression." + address: 'Brisbane, Australia' + author: Alexandra Rieger and Spencer Topel + bibtex: "@inproceedings{Rieger2016,\n abstract = {The Driftwood is a maneuverable\ + \ sculptural instrument &\ncontroller. Tactilely, it is a micro-terrain one can\ + \ explore with the hands as\nwith the ears. Closed circuit sensors, moving wooden\ + \ parts and Piezo microphones\nare discussed in the design phase alongside background\ + \ and musical implementation\nconcepts. Electronics and nature converge in this\ + \ instrument harmoniously\nreferencing our changing world and environment. When\ + \ engaging with the sonic\nsculpture silent objects become audible and rest-wood\ + \ is venerated. It is\nrevealed to the musician interacting with Driftwood that\ + \ our actions intervene\ndirectly with issues relating to sustainability and the\ + \ amount of value we place\non the world we live in. Every scrap of wood was once\ + \ a tree, Driftwood reminds\nus of this in a multi-sensory playing experience.\ + \ The Driftwood proposes a\nreinterpretation of the process of music creation,\ + \ awareness and expression.},\n address = {Brisbane, Australia},\n author = {Alexandra\ + \ Rieger and Spencer Topel},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176110},\n\ + \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {158--159},\n publisher\ + \ = {Queensland Conservatorium Griffith University},\n title = {Driftwood: Redefining\ + \ Sound Sculpture Controllers},\n track = {Demonstrations},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0032.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813236 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176110 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 67--72 - presentation-video: https://youtu.be/3hIGu9QDn4o - publisher: Birmingham City University - title: 'All You Need Is LOD : Levels of Detail in Visual Augmentations for the Audience' - url: https://www.nime.org/proceedings/2020/nime2020_paper13.pdf - year: 2020 + pages: 158--159 + publisher: Queensland Conservatorium Griffith University + title: 'Driftwood: Redefining Sound Sculpture Controllers' + track: Demonstrations + url: http://www.nime.org/proceedings/2016/nime2016_paper0032.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_14 - abstract: 'In this work we test the performance of multiple ESP32microcontrollers - used as WiFi sensor interfaces in the context of real-time interactive systems. - The number of devices from 1 to 13, and individual sending rates from 50 to 2300 - messages per second are tested to provide examples of various network load situations - that may resemble a performance configuration. The overall end-to-end latency - and bandwidth are measured as the basic performance metrics of interest. The results - show that a maximum message rate of 2300 Hz is possible on a 2.4 GHz network for - a single embedded device and decreases as the number of devices are added. During - testing it was possible to have up to 7 devices transmitting at 100 Hz while attaining - less than 10 ms latency, but performance degrades with increasing sending rates - and number of devices. Performance can also vary significantly from day to day - depending on network usage in a crowded environment.' - address: 'Birmingham, UK' - author: 'Wang, Johnty and Meneses, Eduardo and Wanderley, Marcelo' - bibtex: "@inproceedings{NIME20_14,\n abstract = {In this work we test the performance\ - \ of multiple ESP32microcontrollers used as WiFi sensor interfaces in the context\ - \ of real-time interactive systems. The number of devices from 1 to 13, and individual\ - \ sending rates from 50 to 2300 messages per second are tested to provide examples\ - \ of various network load situations that may resemble a performance configuration.\ - \ The overall end-to-end latency and bandwidth are measured as the basic performance\ - \ metrics of interest. The results show that a maximum message rate of 2300 Hz\ - \ is possible on a 2.4 GHz network for a single embedded device and decreases\ - \ as the number of devices are added. During testing it was possible to have up\ - \ to 7 devices transmitting at 100 Hz while attaining less than 10 ms latency,\ - \ but performance degrades with increasing sending rates and number of devices.\ - \ Performance can also vary significantly from day to day depending on network\ - \ usage in a crowded environment.},\n address = {Birmingham, UK},\n author = {Wang,\ - \ Johnty and Meneses, Eduardo and Wanderley, Marcelo},\n booktitle = {Proceedings\ + ID: Kleinberger2016 + abstract: "The following paper documents the prototype of a musical door that\n\ + interactively plays sounds, melodies, and sound textures when in use. We took\ + \ the\nnatural interactions people have with doors---grabbing and turning\nthe\ + \ knob and pushing and puling motions---and turned them into\nmusical activities.\ + \ The idea behind this project comes from the fact that the\nactivity of using\ + \ a door is almost always accompanied by a sound that is\ngenerally ignored by\ + \ the user. We believe that this sound can be considered\nmusically rich and expressive\ + \ because each door has specific sound\ncharacteristics and each person makes\ + \ it sound slightly different. By augmenting\nthe door to create an unexpected\ + \ sound, this project encourages us to listen to\nour daily lives with a musician's\ + \ critical ear, and reminds us of the musicality\nof our everyday activities." + address: 'Brisbane, Australia' + author: Rebecca Kleinberger and Akito van Troyer + bibtex: "@inproceedings{Kleinberger2016,\n abstract = {The following paper documents\ + \ the prototype of a musical door that\ninteractively plays sounds, melodies,\ + \ and sound textures when in use. We took the\nnatural interactions people have\ + \ with doors---grabbing and turning\nthe knob and pushing and puling motions---and\ + \ turned them into\nmusical activities. The idea behind this project comes from\ + \ the fact that the\nactivity of using a door is almost always accompanied by\ + \ a sound that is\ngenerally ignored by the user. We believe that this sound can\ + \ be considered\nmusically rich and expressive because each door has specific\ + \ sound\ncharacteristics and each person makes it sound slightly different. By\ + \ augmenting\nthe door to create an unexpected sound, this project encourages\ + \ us to listen to\nour daily lives with a musician's critical ear, and reminds\ + \ us of the musicality\nof our everyday activities.},\n address = {Brisbane, Australia},\n\ + \ author = {Rebecca Kleinberger and Akito van Troyer},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.4813239},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {73--76},\n publisher = {Birmingham\ - \ City University},\n title = {The Scalability of WiFi for Mobile Embedded Sensor\ - \ Interfaces},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper14.pdf},\n\ - \ year = {2020}\n}\n" + \ doi = {10.5281/zenodo.1176052},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ + \ pages = {160--161},\n publisher = {Queensland Conservatorium Griffith University},\n\ + \ title = {Dooremi: a Doorway to Music},\n track = {Demonstrations},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0033.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813239 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176052 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 73--76 - publisher: Birmingham City University - title: The Scalability of WiFi for Mobile Embedded Sensor Interfaces - url: https://www.nime.org/proceedings/2020/nime2020_paper14.pdf - year: 2020 + pages: 160--161 + publisher: Queensland Conservatorium Griffith University + title: 'Dooremi: a Doorway to Music' + track: Demonstrations + url: http://www.nime.org/proceedings/2016/nime2016_paper0033.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_15 - abstract: 'Advanced musical cooperation, such as concurrent control of musical parameters - or sharing data between instruments,has previously been investigated using multi-user instruments or orchestras of identical instruments. In the case - of heterogeneous digital orchestras, where the instruments, interfaces, and control - gestures can be very different, a number of issues may impede such collaboration - opportunities. These include the lack of a standard method for sharing - data or control, the incompatibility of parameter types, and limited awareness of other musicians’ activity and instrument structure. As a result, most collaborations remain - limited to synchronising tempo or applying effects to audio outputs. In this paper - we present two interfaces for real-time group collaboration amongst musicians - with heterogeneous instruments. We conducted a qualitative study to investigate - how these interfaces impact musicians’ experience and their musical output, we - performed a thematic analysis of inter-views, and we analysed logs of interactions. From these - results we derive principles and guidelines for the design of advanced - collaboration systems for heterogeneous digital orchestras, namely Adapting (to) the System, - Support Development, Default to Openness, and Minimise Friction to Support - Expressivity.' - address: 'Birmingham, UK' - author: 'Berthaut, Florent and Dahl, Luke' - bibtex: "@inproceedings{NIME20_15,\n abstract = {Advanced musical cooperation, such\ - \ as concurrent control of musical parameters or sharing data between instruments,has\ - \ previously been investigated using multi-user instruments or orchestras\ - \ of identical instruments. In the case of heterogeneous digital orchestras,\ - \ where the instruments, interfaces, and control gestures can be very different,\ - \ a number of issues may impede such collaboration opportunities. These include\ - \ the lack of a standard method for sharing data or control, the incompatibility\ - \ of parameter types, and limited awareness of other musicians’ activity\ - \ and instrument structure. As a result, most collaborations remain\ - \ limited to synchronising tempo or applying effects to audio outputs. In this\ - \ paper we present two interfaces for real-time group collaboration amongst musicians\ - \ with heterogeneous instruments. We conducted a qualitative study to \ - \ investigate how these interfaces impact musicians’ experience and their musical\ - \ output, we performed a thematic analysis of inter-views, and we analysed\ - \ logs of interactions. From these results we derive principles and\ - \ guidelines for the design of advanced collaboration systems for heterogeneous\ - \ digital orchestras, namely Adapting (to) the System, Support Development,\ - \ Default to Openness, and Minimise Friction to Support Expressivity.},\n address\ - \ = {Birmingham, UK},\n author = {Berthaut, Florent and Dahl, Luke},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.4813241},\n editor = {Romain Michon and\ - \ Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages = {77--82},\n\ - \ presentation-video = {https://youtu.be/jGpKkbWq_TY},\n publisher = {Birmingham\ - \ City University},\n title = {Adapting & Openness: Dynamics of Collaboration\ - \ Interfaces for Heterogeneous Digital Orchestras},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper15.pdf},\n\ - \ year = {2020}\n}\n" + ID: Normark2016 + abstract: "This paper describes how a classical instrument, the clarinet, can\n\ + be extended with modern technology to create a new and easy to use augmented\n\ + instrument. The paper describes the design process, technical details and how\ + \ a\nmusician can use the instrument. The clarinet bell is extended with sensor\n\ + technology in order to improve the ways the clarinet is traditionally played and\n\ + improve the performing artist's musical and performative expressions. New\nways\ + \ of performing music with a clarinet also opens up for novel ways of\ncomposing\ + \ musical pieces. The design is iterated in two versions with improved\nhardware\ + \ and form factor where everything is packaged into the clarinet bell. The\nclarinet\ + \ uses electronics that wirelessly sends sensor data to a computer that\nprocesses\ + \ a live audio feed via the software MAX 7 and plays it back via\nloudspeakers\ + \ on the stage. The extended clarinet provides several ways of\ntransforming audio\ + \ and also adds several ways of making performances more\nvisually interesting.\ + \ It is shown that this way of using sensor technology in a\ntraditional musical\ + \ instrument adds new dimensions to the performance and allows\ncreative persons\ + \ to express themselves in new ways as well as giving the audience\nan improved\ + \ experience. " + address: 'Brisbane, Australia' + author: 'Normark, Carl Jürgen and Peter Parnes and Robert Ek and Harald Andersson' + bibtex: "@inproceedings{Normark2016,\n abstract = {This paper describes how a classical\ + \ instrument, the clarinet, can\nbe extended with modern technology to create\ + \ a new and easy to use augmented\ninstrument. The paper describes the design\ + \ process, technical details and how a\nmusician can use the instrument. The clarinet\ + \ bell is extended with sensor\ntechnology in order to improve the ways the clarinet\ + \ is traditionally played and\nimprove the performing artist's musical and performative\ + \ expressions. New\nways of performing music with a clarinet also opens up for\ + \ novel ways of\ncomposing musical pieces. The design is iterated in two versions\ + \ with improved\nhardware and form factor where everything is packaged into the\ + \ clarinet bell. The\nclarinet uses electronics that wirelessly sends sensor data\ + \ to a computer that\nprocesses a live audio feed via the software MAX 7 and plays\ + \ it back via\nloudspeakers on the stage. The extended clarinet provides several\ + \ ways of\ntransforming audio and also adds several ways of making performances\ + \ more\nvisually interesting. It is shown that this way of using sensor technology\ + \ in a\ntraditional musical instrument adds new dimensions to the performance\ + \ and allows\ncreative persons to express themselves in new ways as well as giving\ + \ the audience\nan improved experience. },\n address = {Brisbane, Australia},\n\ + \ author = {Normark, Carl J\\''{u}rgen and Peter Parnes and Robert Ek and Harald\ + \ Andersson},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176090},\n isbn\ + \ = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {162--167},\n publisher\ + \ = {Queensland Conservatorium Griffith University},\n title = {The extended clarinet},\n\ + \ track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0034.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813241 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176090 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 77--82 - presentation-video: https://youtu.be/jGpKkbWq_TY - publisher: Birmingham City University - title: 'Adapting & Openness: Dynamics of Collaboration Interfaces for Heterogeneous - Digital Orchestras' - url: https://www.nime.org/proceedings/2020/nime2020_paper15.pdf - year: 2020 - - -- ENTRYTYPE: inproceedings - ID: NIME20_16 - abstract: 'Music technology can provide persons who experience physical and/or intellectual - barriers using traditional musical instruments with a unique access to active - music making. This applies particularly but not exclusively to the so-called group - of people with physical and/or mental disabilities. This paper presents two Accessible - Digital Musical Instruments (ADMIs) that were specifically designed for the students - of a Special Educational Needs (SEN) school with a focus on intellectual disabilities. - With SnoeSky, we present an ADMI in the form of an interactive starry sky that - integrates into the Snoezel-Room. Here, users can ''play'' with ''melodic constellations'' - using a flashlight. SonicDive is an interactive installation that enables users - to explore a complex water soundscape through their movement inside a ball pool. - The underlying goal of both ADMIs was the promotion of self-efficacy experiences - while stimulating the users'' relaxation and activation. This paper reports on - the design process involving the users and their environment. In addition, it - describes some details of the technical implementaion of the ADMIs as well as - first indices for their effectiveness.' - address: 'Birmingham, UK' - author: 'Förster, Andreas and Komesker, Christina and Schnell, Norbert' - bibtex: "@inproceedings{NIME20_16,\n abstract = {Music technology can provide persons\ - \ who experience physical and/or intellectual barriers using traditional musical\ - \ instruments with a unique access to active music making. This applies particularly\ - \ but not exclusively to the so-called group of people with physical and/or mental\ - \ disabilities. This paper presents two Accessible Digital Musical Instruments\ - \ (ADMIs) that were specifically designed for the students of a Special Educational\ - \ Needs (SEN) school with a focus on intellectual disabilities. With SnoeSky,\ - \ we present an ADMI in the form of an interactive starry sky that integrates\ - \ into the Snoezel-Room. Here, users can 'play' with 'melodic constellations'\ - \ using a flashlight. SonicDive is an interactive installation that enables users\ - \ to explore a complex water soundscape through their movement inside a ball pool.\ - \ The underlying goal of both ADMIs was the promotion of self-efficacy experiences\ - \ while stimulating the users' relaxation and activation. This paper reports on\ - \ the design process involving the users and their environment. In addition, it\ - \ describes some details of the technical implementaion of the ADMIs as well as\ - \ first indices for their effectiveness.},\n address = {Birmingham, UK},\n author\ - \ = {Förster, Andreas and Komesker, Christina and Schnell, Norbert},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.4813243},\n editor = {Romain Michon and\ - \ Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages = {83--88},\n\ - \ publisher = {Birmingham City University},\n title = {SnoeSky and SonicDive -\ - \ Design and Evaluation of Two Accessible Digital Musical Instruments for a SEN\ - \ School},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper16.pdf},\n\ - \ year = {2020}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.4813243 - editor: Romain Michon and Franziska Schroeder - issn: 2220-4806 - month: July - pages: 83--88 - publisher: Birmingham City University - title: SnoeSky and SonicDive - Design and Evaluation of Two Accessible Digital Musical - Instruments for a SEN School - url: https://www.nime.org/proceedings/2020/nime2020_paper16.pdf - year: 2020 + pages: 162--167 + publisher: Queensland Conservatorium Griffith University + title: The extended clarinet + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0034.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_17 - abstract: 'The NuiTrack IDE supports writing code for an active infrared camera - to track up to six bodies, with up to 25 target points on each person. The system - automatically assigns IDs to performers/users as they enter the tracking area, - but when occlusion of a performer occurs, or when a user exits and then re-enters - the tracking area, upon rediscovery of the user the system generates a new tracking - ID. Because of this any assigned and registered target tracking points for specific - users are lost, as are the linked abilities of that performer to control media - based on their movements. We describe a single camera system for overcoming this - problem by assigning IDs based on the colours worn by the performers, and then - using the colour tracking for updating and confirming identification when the - performer reappears after occlusion or upon re-entry. A video link is supplied - showing the system used for an interactive dance work with four dancers controlling - individual audio tracks. ' - address: 'Birmingham, UK' - author: 'Pritchard, Robert and Lavery, Ian' - bibtex: "@inproceedings{NIME20_17,\n abstract = {The NuiTrack IDE supports writing\ - \ code for an active infrared camera to track up to six bodies, with up to 25\ - \ target points on each person. The system automatically assigns IDs to performers/users\ - \ as they enter the tracking area, but when occlusion of a performer occurs, or\ - \ when a user exits and then re-enters the tracking area, upon rediscovery of\ - \ the user the system generates a new tracking ID. Because of this any assigned\ - \ and registered target tracking points for specific users are lost, as are the\ - \ linked abilities of that performer to control media based on their movements.\ - \ We describe a single camera system for overcoming this problem by assigning\ - \ IDs based on the colours worn by the performers, and then using the colour tracking\ - \ for updating and confirming identification when the performer reappears after\ - \ occlusion or upon re-entry. A video link is supplied showing the system used\ - \ for an interactive dance work with four dancers controlling individual audio\ - \ tracks. },\n address = {Birmingham, UK},\n author = {Pritchard, Robert and Lavery,\ - \ Ian},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.4813245},\n editor = {Romain\ - \ Michon and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages\ - \ = {89--92},\n publisher = {Birmingham City University},\n title = {Inexpensive\ - \ Colour Tracking to Overcome Performer ID Loss },\n url = {https://www.nime.org/proceedings/2020/nime2020_paper17.pdf},\n\ - \ year = {2020}\n}\n" + ID: Nagashim2016 + abstract: |- + This is a report of a novel tactile musical instrument. This + instrument is called Multi Rubbing Tactile Instrument (MRTI2015), using ten + pieces of PAW sensor produced by the RT corporation. Previous research was + focused on untouchable instruments, but this approach is fully tactile---rub + and touch. The ten PAW sensors are assigned on the surface of the egg-like + plastic case to fit the ten fingers grasping the instrument. The controller is + mbed (NucleoF401RE), and it communicates with the host PC via high speed serial + (115200bps) by an MIDI-like protocol. Inside the egg-like plastic case, this + instrument has eight blue-LEDs which are controlled by the host in order to + display the grasping nuances. The prototype of this instrument contains realtime + visualizing system with chaotic graphics by Open-GL. I will report on the + principle of the sensor, and details about realizing the new system. + address: 'Brisbane, Australia' + author: Yoichi Nagashima + bibtex: "@inproceedings{Nagashim2016,\n abstract = {This is a report of a novel\ + \ tactile musical instrument. This\ninstrument is called Multi Rubbing Tactile\ + \ Instrument (MRTI2015), using ten\npieces of PAW sensor produced by the RT corporation.\ + \ Previous research was\nfocused on untouchable instruments, but this approach\ + \ is fully tactile---rub\nand touch. The ten PAW sensors are assigned on the surface\ + \ of the egg-like\nplastic case to fit the ten fingers grasping the instrument.\ + \ The controller is\nmbed (NucleoF401RE), and it communicates with the host PC\ + \ via high speed serial\n(115200bps) by an MIDI-like protocol. Inside the egg-like\ + \ plastic case, this\ninstrument has eight blue-LEDs which are controlled by the\ + \ host in order to\ndisplay the grasping nuances. The prototype of this instrument\ + \ contains realtime\nvisualizing system with chaotic graphics by Open-GL. I will\ + \ report on the\nprinciple of the sensor, and details about realizing the new\ + \ system.},\n address = {Brisbane, Australia},\n author = {Yoichi Nagashima},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1176084},\n isbn = {978-1-925455-13-7},\n\ + \ issn = {2220-4806},\n pages = {168--169},\n publisher = {Queensland Conservatorium\ + \ Griffith University},\n title = {Multi Rubbing Tactile Instrument},\n track\ + \ = {Demonstrations},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0035.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813245 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176084 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 89--92 - publisher: Birmingham City University - title: 'Inexpensive Colour Tracking to Overcome Performer ID Loss ' - url: https://www.nime.org/proceedings/2020/nime2020_paper17.pdf - year: 2020 + pages: 168--169 + publisher: Queensland Conservatorium Griffith University + title: Multi Rubbing Tactile Instrument + track: Demonstrations + url: http://www.nime.org/proceedings/2016/nime2016_paper0035.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_18 - abstract: 'In this study, an analog synthesizer module using Aloe vera was proposed - as a biomemristor. The recent revival of analog modular synthesizers explores - novel possibilities of sounds based on unconventional technologies such as integrating - biological forms and structures into traditional circuits. A biosignal has been - used in experimental music as the material for composition. However, the recent - development of a biocomputor using a slime mold biomemristor expands the use of - biomemristors in music. Based on prior research, characteristics of Aloe vera - as a biomemristor were electrically measured, and two types of analog synthesizer - modules were developed, current to voltage converter and current spike to voltage - converter. For this application, a live performance was conducted with the CVC - module and the possibilities as a new interface for musical expression were examined.' - address: 'Birmingham, UK' - author: 'Nishida, Kiyu and jo, kazuhiro' - bibtex: "@inproceedings{NIME20_18,\n abstract = {In this study, an analog synthesizer\ - \ module using Aloe vera was proposed as a biomemristor. The recent revival of\ - \ analog modular synthesizers explores novel possibilities of sounds based on\ - \ unconventional technologies such as integrating biological forms and structures\ - \ into traditional circuits. A biosignal has been used in experimental music as\ - \ the material for composition. However, the recent development of a biocomputor\ - \ using a slime mold biomemristor expands the use of biomemristors in music. Based\ - \ on prior research, characteristics of Aloe vera as a biomemristor were electrically\ - \ measured, and two types of analog synthesizer modules were developed, current\ - \ to voltage converter and current spike to voltage converter. For this application,\ - \ a live performance was conducted with the CVC module and the possibilities as\ - \ a new interface for musical expression were examined.},\n address = {Birmingham,\ - \ UK},\n author = {Nishida, Kiyu and jo, kazuhiro},\n booktitle = {Proceedings\ + ID: Zhang2016 + abstract: "This paper presents a web-based application enabling audiences to\ncollaboratively\ + \ contribute to the creative process during live music\nperformances. The system\ + \ aims at enhancing audience engagement and creating new\nforms of live music\ + \ experiences. Interaction between audience and performers is\nmade possible through\ + \ a client/server architecture enabling bidirectional\ncommunication of creative\ + \ data. Audience members can vote for pre-determined\nmusical attributes using\ + \ a smartphone-friendly and cross-platform web\napplication. The system gathers\ + \ audience members' votes and provide feedback\nthrough visualisations that can\ + \ be tailored for specific needs. In order to\nsupport multiple performers and\ + \ large audiences, automatic audience-to-performer\ngroupings are handled by the\ + \ application. The framework was applied to support\nlive interactive musical\ + \ improvisations where creative roles are shared amongst\naudience and performers\ + \ (Open Symphony). Qualitative analyses of user surveys\nhighlighted very positive\ + \ feedback related to themes such as engagement and\ncreativity and also identified\ + \ further design challenges around audience sense of\ncontrol and latency." + address: 'Brisbane, Australia' + author: Leshao Zhang and Yongmeng Wu and Mathieu Barthet + bibtex: "@inproceedings{Zhang2016,\n abstract = {This paper presents a web-based\ + \ application enabling audiences to\ncollaboratively contribute to the creative\ + \ process during live music\nperformances. The system aims at enhancing audience\ + \ engagement and creating new\nforms of live music experiences. Interaction between\ + \ audience and performers is\nmade possible through a client/server architecture\ + \ enabling bidirectional\ncommunication of creative data. Audience members can\ + \ vote for pre-determined\nmusical attributes using a smartphone-friendly and\ + \ cross-platform web\napplication. The system gathers audience members' votes\ + \ and provide feedback\nthrough visualisations that can be tailored for specific\ + \ needs. In order to\nsupport multiple performers and large audiences, automatic\ + \ audience-to-performer\ngroupings are handled by the application. The framework\ + \ was applied to support\nlive interactive musical improvisations where creative\ + \ roles are shared amongst\naudience and performers (Open Symphony). Qualitative\ + \ analyses of user surveys\nhighlighted very positive feedback related to themes\ + \ such as engagement and\ncreativity and also identified further design challenges\ + \ around audience sense of\ncontrol and latency.},\n address = {Brisbane, Australia},\n\ + \ author = {Leshao Zhang and Yongmeng Wu and Mathieu Barthet},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.4813249},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {93--96},\n presentation-video\ - \ = {https://youtu.be/bZaCd6igKEA},\n publisher = {Birmingham City University},\n\ - \ title = {Modules for analog synthesizers using Aloe vera biomemristor},\n url\ - \ = {https://www.nime.org/proceedings/2020/nime2020_paper18.pdf},\n year = {2020}\n\ - }\n" + \ doi = {10.5281/zenodo.1176147},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ + \ pages = {170--175},\n publisher = {Queensland Conservatorium Griffith University},\n\ + \ title = {A Web Application for Audience Participation in Live Music Performance:\ + \ The Open Symphony Use Case},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0036.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813249 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176147 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 93--96 - presentation-video: https://youtu.be/bZaCd6igKEA - publisher: Birmingham City University - title: Modules for analog synthesizers using Aloe vera biomemristor - url: https://www.nime.org/proceedings/2020/nime2020_paper18.pdf - year: 2020 + pages: 170--175 + publisher: Queensland Conservatorium Griffith University + title: 'A Web Application for Audience Participation in Live Music Performance: + The Open Symphony Use Case' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0036.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_19 - abstract: 'On several acoustic and electromechanical keyboard instruments, the produced - sound is not always strictly dependent exclusively on a discrete key velocity - parameter, and minute gesture details can affect the final sonic result. By contrast, - subtle variations in articulation have a relatively limited effect on the sound - generation when the keyboard controller uses the MIDI standard, used in the vast - majority of digital keyboards. In this paper we present an embedded platform that - can generate sound in response to a controller capable of sensing the continuous - position of keys on a keyboard. This platform enables the creation of keyboard-based - DMIs which allow for a richer set of interaction gestures than would be possible - through a MIDI keyboard, which we demonstrate through two example instruments. - First, in a Hammond organ emulator, the sensing device allows to recreate the - nuances of the interaction with the original instrument in a way a velocity-based - MIDI controller could not. Second, a nonlinear waveguide flute synthesizer is - shown as an example of the expressive capabilities that a continuous-keyboard - controller opens up in the creation of new keyboard-based DMIs.' - address: 'Birmingham, UK' - author: 'Moro, Giulio and McPherson, Andrew' - bibtex: "@inproceedings{NIME20_19,\n abstract = {On several acoustic and electromechanical\ - \ keyboard instruments, the produced sound is not always strictly dependent exclusively\ - \ on a discrete key velocity parameter, and minute gesture details can affect\ - \ the final sonic result. By contrast, subtle variations in articulation have\ - \ a relatively limited effect on the sound generation when the keyboard controller\ - \ uses the MIDI standard, used in the vast majority of digital keyboards. In this\ - \ paper we present an embedded platform that can generate sound in response to\ - \ a controller capable of sensing the continuous position of keys on a keyboard.\ - \ This platform enables the creation of keyboard-based DMIs which allow for a\ - \ richer set of interaction gestures than would be possible through a MIDI keyboard,\ - \ which we demonstrate through two example instruments. First, in a Hammond organ\ - \ emulator, the sensing device allows to recreate the nuances of the interaction\ - \ with the original instrument in a way a velocity-based MIDI controller could\ - \ not. Second, a nonlinear waveguide flute synthesizer is shown as an example\ - \ of the expressive capabilities that a continuous-keyboard controller opens up\ - \ in the creation of new keyboard-based DMIs.},\n address = {Birmingham, UK},\n\ - \ author = {Moro, Giulio and McPherson, Andrew},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.4813253},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {97--102},\n presentation-video\ - \ = {https://youtu.be/Y137M9UoKKg},\n publisher = {Birmingham City University},\n\ - \ title = {A platform for low-latency continuous keyboard sensing and sound generation},\n\ - \ url = {https://www.nime.org/proceedings/2020/nime2020_paper19.pdf},\n year =\ - \ {2020}\n}\n" + ID: CarvalhoJunior2016 + abstract: |- + Cloud services allow musicians and developers to build audience + participation software with minimal network configuration for audience and no + need for server-side development. In this paper we discuss how a cloud service + supported the audience participation music performance, Crowd in C[loud], which + enables audience participation on a large scale using the audience audience's + smartphones. + We present the detail of the cloud service technology and an analysis of the + network transaction data regarding the performance. + This helps us to understand the nature of cloud-based audience participation + pieces based on the characteristics of a performance reality and provides cues + about the technology's scalability. + address: 'Brisbane, Australia' + author: Antonio Deusany de Carvalho Junior and Sang Won Lee and Georg Essl + bibtex: "@inproceedings{CarvalhoJunior2016,\n abstract = {Cloud services allow musicians\ + \ and developers to build audience\nparticipation software with minimal network\ + \ configuration for audience and no\nneed for server-side development. In this\ + \ paper we discuss how a cloud service\nsupported the audience participation music\ + \ performance, Crowd in C[loud], which\nenables audience participation on a large\ + \ scale using the audience audience's\nsmartphones.\nWe present the detail of\ + \ the cloud service technology and an analysis of the\nnetwork transaction data\ + \ regarding the performance.\nThis helps us to understand the nature of cloud-based\ + \ audience participation\npieces based on the characteristics of a performance\ + \ reality and provides cues\nabout the technology's scalability.},\n address =\ + \ {Brisbane, Australia},\n author = {Antonio Deusany de Carvalho Junior and Sang\ + \ Won Lee and Georg Essl},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176008},\n\ + \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {176--181},\n publisher\ + \ = {Queensland Conservatorium Griffith University},\n title = {Understanding\ + \ Cloud Support for the Audience Participation Concert Performance of Crowd in\ + \ C[loud]},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0037.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813253 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176008 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 97--102 - presentation-video: https://youtu.be/Y137M9UoKKg - publisher: Birmingham City University - title: A platform for low-latency continuous keyboard sensing and sound generation - url: https://www.nime.org/proceedings/2020/nime2020_paper19.pdf - year: 2020 + pages: 176--181 + publisher: Queensland Conservatorium Griffith University + title: 'Understanding Cloud Support for the Audience Participation Concert Performance + of Crowd in C[loud]' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0037.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_2 - abstract: 'Excello is a spreadsheet-based music composition and programming environment. - We co-developed Excello with feedback from 21 musicians at varying levels of musical - and computing experience. We asked: can the spreadsheet interface be used for - programmatic music creation? Our design process encountered questions such as - how time should be represented, whether amplitude and octave should be encoded - as properties of individual notes or entire phrases, and how best to leverage - standard spreadsheet features, such as formulae and copy-paste. We present the - user-centric rationale for our current design, and report a user study suggesting - that Excello''s notation retains similar cognitive dimensions to conventional - music composition tools, while allowing the user to write substantially complex - programmatic music.' - address: 'Birmingham, UK' - author: 'Sarkar, Advait and Mattinson, Henry' - bibtex: "@inproceedings{NIME20_2,\n abstract = {Excello is a spreadsheet-based music\ - \ composition and programming environment. We co-developed Excello with feedback\ - \ from 21 musicians at varying levels of musical and computing experience. We\ - \ asked: can the spreadsheet interface be used for programmatic music creation?\ - \ Our design process encountered questions such as how time should be represented,\ - \ whether amplitude and octave should be encoded as properties of individual notes\ - \ or entire phrases, and how best to leverage standard spreadsheet features, such\ - \ as formulae and copy-paste. We present the user-centric rationale for our current\ - \ design, and report a user study suggesting that Excello's notation retains similar\ - \ cognitive dimensions to conventional music composition tools, while allowing\ - \ the user to write substantially complex programmatic music.},\n address = {Birmingham,\ - \ UK},\n author = {Sarkar, Advait and Mattinson, Henry},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.4813256},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {11--16},\n publisher = {Birmingham\ - \ City University},\n title = {Excello: exploring spreadsheets for music composition},\n\ - \ url = {https://www.nime.org/proceedings/2020/nime2020_paper2.pdf},\n year =\ - \ {2020}\n}\n" + ID: Wang2016 + abstract: |- + This article presents observations and strategies for designing + game-like elements for expressive mobile musical interactions. The designs of + several popular commercial mobile music instruments are discussed and compared, + along with the different ways they integrate musical information and game-like + elements. In particular, issues of designing goals, rules, and interactions are + balanced with articulating expressiveness. These experiences aim to invite and + engage users with game design while maintaining and encouraging open-ended + musical expression and exploration. A set of observations is derived, leading to + a broader design motivation and philosophy. + address: 'Brisbane, Australia' + author: Ge Wang + bibtex: "@inproceedings{Wang2016,\n abstract = {This article presents observations\ + \ and strategies for designing\ngame-like elements for expressive mobile musical\ + \ interactions. The designs of\nseveral popular commercial mobile music instruments\ + \ are discussed and compared,\nalong with the different ways they integrate musical\ + \ information and game-like\nelements. In particular, issues of designing goals,\ + \ rules, and interactions are\nbalanced with articulating expressiveness. These\ + \ experiences aim to invite and\nengage users with game design while maintaining\ + \ and encouraging open-ended\nmusical expression and exploration. A set of observations\ + \ is derived, leading to\na broader design motivation and philosophy.},\n address\ + \ = {Brisbane, Australia},\n author = {Ge Wang},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1176141},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ + \ pages = {182--187},\n publisher = {Queensland Conservatorium Griffith University},\n\ + \ title = {Game Design for Expressive Mobile Music},\n track = {Papers},\n url\ + \ = {http://www.nime.org/proceedings/2016/nime2016_paper0038.pdf},\n year = {2016}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813256 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176141 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 11--16 - publisher: Birmingham City University - title: 'Excello: exploring spreadsheets for music composition' - url: https://www.nime.org/proceedings/2020/nime2020_paper2.pdf - year: 2020 + pages: 182--187 + publisher: Queensland Conservatorium Griffith University + title: Game Design for Expressive Mobile Music + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0038.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_20 - abstract: 'In NIME design, thorough attention has been devoted to feedback modalities, - including auditory, visual and haptic feedback. How the performer executes the - gestures to achieve a sound on an instrument, by contrast, appears to be less - examined. Previous research showed that auditory imagery, or the ability to hear - or recreate sounds in the mind even when no audible sound is present, is essential - to the sensorimotor control involved in playing an instrument. In this paper, - we enquire whether auditory imagery can also help to support skill transfer between - musical instruments resulting in possible implications for new instrument design. - To answer this question, we performed two experimental studies on pitch accuracy - and fluency where professional violinists were asked to play a modified violin. - Results showed altered or even possibly irrelevant auditory feedback on a modified - violin does not appear to be a significant impediment to performance. However, - performers need to have coherent imagery of what they want to do, and the sonic - outcome needs to be coupled to the motor program to achieve it. This finding shows - that the design lens should be shifted from a direct feedback model of instrumental - playing toward a model where imagery guides the playing process. This result is - in agreement with recent research on skilled sensorimotor control that highlights - the value of feedforward anticipation in embodied musical performance. It is also - of primary importance for the design of new instruments: new sounds that cannot - easily be imagined and that are not coupled to a motor program are not likely - to be easily performed on the instrument.' - address: 'Birmingham, UK' - author: 'Guidi, Andrea and Morreale, Fabio and McPherson, Andrew' - bibtex: "@inproceedings{NIME20_20,\n abstract = {In NIME design, thorough attention\ - \ has been devoted to feedback modalities, including auditory, visual and haptic\ - \ feedback. How the performer executes the gestures to achieve a sound on an instrument,\ - \ by contrast, appears to be less examined. Previous research showed that auditory\ - \ imagery, or the ability to hear or recreate sounds in the mind even when no\ - \ audible sound is present, is essential to the sensorimotor control involved\ - \ in playing an instrument. In this paper, we enquire whether auditory imagery\ - \ can also help to support skill transfer between musical instruments resulting\ - \ in possible implications for new instrument design. To answer this question,\ - \ we performed two experimental studies on pitch accuracy and fluency where professional\ - \ violinists were asked to play a modified violin. Results showed altered or even\ - \ possibly irrelevant auditory feedback on a modified violin does not appear to\ - \ be a significant impediment to performance. However, performers need to have\ - \ coherent imagery of what they want to do, and the sonic outcome needs to be\ - \ coupled to the motor program to achieve it. This finding shows that the design\ - \ lens should be shifted from a direct feedback model of instrumental playing\ - \ toward a model where imagery guides the playing process. This result is in agreement\ - \ with recent research on skilled sensorimotor control that highlights the value\ - \ of feedforward anticipation in embodied musical performance. It is also of primary\ - \ importance for the design of new instruments: new sounds that cannot easily\ - \ be imagined and that are not coupled to a motor program are not likely to be\ - \ easily performed on the instrument.},\n address = {Birmingham, UK},\n author\ - \ = {Guidi, Andrea and Morreale, Fabio and McPherson, Andrew},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.4813260},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {103--108},\n presentation-video\ - \ = {https://youtu.be/yK7Tg1kW2No},\n publisher = {Birmingham City University},\n\ - \ title = {Design for auditory imagery: altering instruments to explore performer\ - \ fluency},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper20.pdf},\n\ - \ year = {2020}\n}\n" + ID: Banas2016 + abstract: "An auditory game has been developed as a part of research in\nWavefield\ + \ Synthesis. In order to design and implement this game, a number of\ntechnologies\ + \ have been incorporated in the development process. By pairing motion\ncapture\ + \ with a WiiMote new dimension of movement input was achieved.\nWe present an\ + \ evaluation study where the game was assessed." + address: 'Brisbane, Australia' + author: Jan Banas and Razvan Paisa and Iakovos Vogiatzoglou and Francesco Grani + and Stefania Serafin + bibtex: "@inproceedings{Banas2016,\n abstract = {An auditory game has been developed\ + \ as a part of research in\nWavefield Synthesis. In order to design and implement\ + \ this game, a number of\ntechnologies have been incorporated in the development\ + \ process. By pairing motion\ncapture with a WiiMote new dimension of movement\ + \ input was achieved.\nWe present an evaluation study where the game was assessed.},\n\ + \ address = {Brisbane, Australia},\n author = {Jan Banas and Razvan Paisa and\ + \ Iakovos Vogiatzoglou and Francesco Grani and Stefania Serafin},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1175972},\n isbn = {978-1-925455-13-7},\n\ + \ issn = {2220-4806},\n pages = {188--193},\n publisher = {Queensland Conservatorium\ + \ Griffith University},\n title = {Design and evaluation of a gesture driven wave\ + \ field synthesis auditory game},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0039.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813260 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1175972 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 103--108 - presentation-video: https://youtu.be/yK7Tg1kW2No - publisher: Birmingham City University - title: 'Design for auditory imagery: altering instruments to explore performer fluency' - url: https://www.nime.org/proceedings/2020/nime2020_paper20.pdf - year: 2020 + pages: 188--193 + publisher: Queensland Conservatorium Griffith University + title: Design and evaluation of a gesture driven wave field synthesis auditory game + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0039.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_21 - abstract: 'In this paper, we introduce the concept of VR Open Scores: aleatoric - score-based virtual scenarios where an aleatoric score is embedded in a virtual - environment. This idea builds upon the notion of graphic scores and composed instrument, - and apply them in a new context. Our proposal also explores possible parallels - between open meaning in interaction design, and aleatoric score, conceptualized - as Open Work by the Italian philosopher Umberto Eco. Our approach has two aims. - The first aim is to create an environment where users can immerse themselves in - the visual elements of a score while listening to the corresponding music. The - second aim is to facilitate users to develop a personal relationship with both - the system and the score. To achieve those aims, as a practical implementation - of our proposed concept, we developed two immersive scenarios: a 360º video and - an interactive space. We conclude presenting how our design aims were accomplished - in the two scenarios, and describing positive and negative elements of our implementations.' - address: 'Birmingham, UK' - author: 'Masu, Raul and Bala, Paulo and Ahmad, Muhammad and Correia, Nuno N. and - Nisi, Valentina and Nunes, Nuno and Romão, Teresa' - bibtex: "@inproceedings{NIME20_21,\n abstract = {In this paper, we introduce the\ - \ concept of VR Open Scores: aleatoric score-based virtual scenarios where an\ - \ aleatoric score is embedded in a virtual environment. This idea builds upon\ - \ the notion of graphic scores and composed instrument, and apply them in a new\ - \ context. Our proposal also explores possible parallels between open meaning\ - \ in interaction design, and aleatoric score, conceptualized as Open Work by the\ - \ Italian philosopher Umberto Eco. Our approach has two aims. The first aim is\ - \ to create an environment where users can immerse themselves in the visual elements\ - \ of a score while listening to the corresponding music. The second aim is to\ - \ facilitate users to develop a personal relationship with both the system and\ - \ the score. To achieve those aims, as a practical implementation of our proposed\ - \ concept, we developed two immersive scenarios: a 360º video and an interactive\ - \ space. We conclude presenting how our design aims were accomplished in the two\ - \ scenarios, and describing positive and negative elements of our implementations.},\n\ - \ address = {Birmingham, UK},\n author = {Masu, Raul and Bala, Paulo and Ahmad,\ - \ Muhammad and Correia, Nuno N. and Nisi, Valentina and Nunes, Nuno and Romão,\ - \ Teresa},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.4813262},\n editor = {Romain\ - \ Michon and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages\ - \ = {109--114},\n presentation-video = {https://youtu.be/JSM6Rydz7iE},\n publisher\ - \ = {Birmingham City University},\n title = {VR Open Scores: Scores as Inspiration\ - \ for VR Scenarios},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper21.pdf},\n\ - \ year = {2020}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.4813262 - editor: Romain Michon and Franziska Schroeder - issn: 2220-4806 - month: July - pages: 109--114 - presentation-video: https://youtu.be/JSM6Rydz7iE - publisher: Birmingham City University - title: 'VR Open Scores: Scores as Inspiration for VR Scenarios' - url: https://www.nime.org/proceedings/2020/nime2020_paper21.pdf - year: 2020 - - -- ENTRYTYPE: inproceedings - ID: NIME20_22 - abstract: 'The project takes a Universal Design approach to exploring the possibility - of creating a software platform to facilitate a Networked Ensemble for Disabled - musicians. In accordance with the Nothing About Us Without Us (Charlton, 1998) - principle I worked with a group of 15 professional musicians who are also disabled. - The group gave interviews as to their perspectives and needs around networked - music practices and this data was then analysed to look at how software design - could be developed to make it more accessible. We also identified key messages - for the wider design of digital musical instrument makers, performers and event - organisers to improve practice around working with and for disabled musicians. ' - address: 'Birmingham, UK' - author: 'Skuse, Amble H C and Knotts, Shelly' - bibtex: "@inproceedings{NIME20_22,\n abstract = {The project takes a Universal Design\ - \ approach to exploring the possibility of creating a software platform to facilitate\ - \ a Networked Ensemble for Disabled musicians. In accordance with the Nothing\ - \ About Us Without Us (Charlton, 1998) principle I worked with a group of 15 professional\ - \ musicians who are also disabled. The group gave interviews as to their perspectives\ - \ and needs around networked music practices and this data was then analysed to\ - \ look at how software design could be developed to make it more accessible. We\ - \ also identified key messages for the wider design of digital musical instrument\ - \ makers, performers and event organisers to improve practice around working with\ - \ and for disabled musicians. },\n address = {Birmingham, UK},\n author = {Skuse,\ - \ Amble H C and Knotts, Shelly},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813266},\n\ - \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ - \ = {July},\n pages = {115--120},\n presentation-video = {https://youtu.be/m4D4FBuHpnE},\n\ - \ publisher = {Birmingham City University},\n title = {Creating an Online Ensemble\ - \ for Home Based Disabled Musicians: Disabled Access and Universal Design - why\ - \ disabled people must be at the heart of developing technology.},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper22.pdf},\n\ - \ year = {2020}\n}\n" + ID: Baytas2016 + abstract: |- + In this paper, we investigate how watching a live-sequenced + electronic music performance, compared to merely hearing the music, contributes + to spectators' experiences of tension. We also explore the role of the + performers' effective and ancillary gestures in conveying tension, when they can + be seen. To this end, we conducted an experiment where 30 participants heard, + saw, or both heard and saw a live-sequenced techno music performance recording + while they produced continuous judgments on their experience of tension. Eye + tracking data was also recorded from participants who saw the visuals, to reveal + aspects of the performance that influenced their tension judgments. We analysed + the data to explore how auditory and visual components and the performer's + movements contribute to spectators' experience of tension. Our results show that + their perception of emotional intensity is consistent across hearing and sight, + suggesting that gestures in live-sequencing can be a medium + for expressive performance. + address: 'Brisbane, Australia' + author: 'Baytas, Mehmet Aydin and Tilbe Goksun and Oguzhan Ozcan' + bibtex: "@inproceedings{Baytas2016,\n abstract = {In this paper, we investigate\ + \ how watching a live-sequenced\nelectronic music performance, compared to merely\ + \ hearing the music, contributes\nto spectators' experiences of tension. We also\ + \ explore the role of the\nperformers' effective and ancillary gestures in conveying\ + \ tension, when they can\nbe seen. To this end, we conducted an experiment where\ + \ 30 participants heard,\nsaw, or both heard and saw a live-sequenced techno music\ + \ performance recording\nwhile they produced continuous judgments on their experience\ + \ of tension. Eye\ntracking data was also recorded from participants who saw the\ + \ visuals, to reveal\naspects of the performance that influenced their tension\ + \ judgments. We analysed\nthe data to explore how auditory and visual components\ + \ and the performer's\nmovements contribute to spectators' experience of tension.\ + \ Our results show that\ntheir perception of emotional intensity is consistent\ + \ across hearing and sight,\nsuggesting that gestures in live-sequencing can be\ + \ a medium\nfor expressive performance.},\n address = {Brisbane, Australia},\n\ + \ author = {Baytas, Mehmet Aydin and Tilbe Goksun and Oguzhan Ozcan},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1175978},\n isbn = {978-1-925455-13-7},\n\ + \ issn = {2220-4806},\n pages = {194--199},\n publisher = {Queensland Conservatorium\ + \ Griffith University},\n title = {The Perception of Live-sequenced Electronic\ + \ Music via Hearing and Sight},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0040.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813266 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1175978 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 115--120 - presentation-video: https://youtu.be/m4D4FBuHpnE - publisher: Birmingham City University - title: 'Creating an Online Ensemble for Home Based Disabled Musicians: Disabled - Access and Universal Design - why disabled people must be at the heart of developing - technology.' - url: https://www.nime.org/proceedings/2020/nime2020_paper22.pdf - year: 2020 + pages: 194--199 + publisher: Queensland Conservatorium Griffith University + title: The Perception of Live-sequenced Electronic Music via Hearing and Sight + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0040.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_23 - abstract: 'As virtual reality (VR) continues to gain prominence as a medium for - artistic expression, a growing number of projects explore the use of VR for musical - interaction design. In this paper, we discuss the concept of VIMEs (Virtual Interfaces - for Musical Expression) through four case studies that explore different aspects - of musical interactions in virtual environments. We then describe a user study - designed to evaluate these VIMEs in terms of various usability considerations, - such as immersion, perception of control, learnability and physical effort. We - offer the results of the study, articulating the relationship between the design - of a VIME and the various performance behaviors observed among its users. Finally, - we discuss how these results, combined with recent developments in VR technology, - can inform the design of new VIMEs.' - address: 'Birmingham, UK' - author: 'Çamcı, Anıl and Vilaplana, Matias and Wang, Ruth' - bibtex: "@inproceedings{NIME20_23,\n abstract = {As virtual reality (VR) continues\ - \ to gain prominence as a medium for artistic expression, a growing number of\ - \ projects explore the use of VR for musical interaction design. In this paper,\ - \ we discuss the concept of VIMEs (Virtual Interfaces for Musical Expression)\ - \ through four case studies that explore different aspects of musical interactions\ - \ in virtual environments. We then describe a user study designed to evaluate\ - \ these VIMEs in terms of various usability considerations, such as immersion,\ - \ perception of control, learnability and physical effort. We offer the results\ - \ of the study, articulating the relationship between the design of a VIME and\ - \ the various performance behaviors observed among its users. Finally, we discuss\ - \ how these results, combined with recent developments in VR technology, can inform\ - \ the design of new VIMEs.},\n address = {Birmingham, UK},\n author = {Çamcı,\ - \ Anıl and Vilaplana, Matias and Wang, Ruth},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.4813268},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {121--126},\n publisher = {Birmingham\ - \ City University},\n title = {Exploring the Affordances of VR for Musical Interaction\ - \ Design with VIMEs},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper23.pdf},\n\ - \ year = {2020}\n}\n" + ID: Bin2016 + abstract: "This paper explores the roles of technical and musical familiarity\n\ + in shaping audience response to digital musical instrument (DMI) performances.\ + \ In\nan audience study conducted during an evening concert, we examined two primary\n\ + questions: first, whether a deeper understanding of how a DMI works increases\ + \ an\naudience's enjoyment and interest in the performance; and second, given\ + \ the same\nDMI and same performer, whether playing in a conventional (vernacular)\ + \ versus an\nexperimental musical style affects an audience's response. We held\ + \ a concert in\nwhich two DMI creator-performers each played two pieces in differing\ + \ styles.\nBefore the concert, each half the 64-person audience was given a technical\n\ + explanation of one of the instruments. Results showed that receiving an\nexplanation\ + \ increased the reported understanding of that instrument, but had no\neffect\ + \ on either the reported level of interest or enjoyment. On the other hand,\n\ + performances in experimental versus conventional style on the same instrument\n\ + received widely divergent audience responses. We discuss implications of these\n\ + findings for DMI design." + address: 'Brisbane, Australia' + author: S. Astrid Bin and Nick Bryan-Kinns and Andrew P. McPherson + bibtex: "@inproceedings{Bin2016,\n abstract = {This paper explores the roles of\ + \ technical and musical familiarity\nin shaping audience response to digital musical\ + \ instrument (DMI) performances. In\nan audience study conducted during an evening\ + \ concert, we examined two primary\nquestions: first, whether a deeper understanding\ + \ of how a DMI works increases an\naudience's enjoyment and interest in the performance;\ + \ and second, given the same\nDMI and same performer, whether playing in a conventional\ + \ (vernacular) versus an\nexperimental musical style affects an audience's response.\ + \ We held a concert in\nwhich two DMI creator-performers each played two pieces\ + \ in differing styles.\nBefore the concert, each half the 64-person audience was\ + \ given a technical\nexplanation of one of the instruments. Results showed that\ + \ receiving an\nexplanation increased the reported understanding of that instrument,\ + \ but had no\neffect on either the reported level of interest or enjoyment. On\ + \ the other hand,\nperformances in experimental versus conventional style on the\ + \ same instrument\nreceived widely divergent audience responses. We discuss implications\ + \ of these\nfindings for DMI design.},\n address = {Brisbane, Australia},\n author\ + \ = {S. Astrid Bin and Nick Bryan-Kinns and Andrew P. McPherson},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1175994},\n isbn = {978-1-925455-13-7},\n\ + \ issn = {2220-4806},\n pages = {200--205},\n publisher = {Queensland Conservatorium\ + \ Griffith University},\n title = {Skip the Pre-Concert Demo: How Technical Familiarity\ + \ and Musical Style Affect Audience Response},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0041.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813268 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1175994 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 121--126 - publisher: Birmingham City University - title: Exploring the Affordances of VR for Musical Interaction Design with VIMEs - url: https://www.nime.org/proceedings/2020/nime2020_paper23.pdf - year: 2020 + pages: 200--205 + publisher: Queensland Conservatorium Griffith University + title: 'Skip the Pre-Concert Demo: How Technical Familiarity and Musical Style Affect + Audience Response' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0041.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_24 - abstract: 'The continued growth of modern VR (virtual reality) platforms into mass - adoption is fundamentally driven by the work of content creators who offer engaging - experiences. It is therefore essential to design accessible creativity support - tools that can facilitate the work of a broad range of practitioners in this domain. - In this paper, we focus on one facet of VR content creation, namely immersive - audio design. We discuss a suite of design tools that enable both novice and expert - users to rapidly prototype immersive sonic environments across desktop, virtual - reality and augmented reality platforms. We discuss the design considerations - adopted for each implementation, and how the individual systems informed one another - in terms of interaction design. We then offer a preliminary evaluation of these - systems with reports from first-time users. Finally, we discuss our road-map for - improving individual and collaborative creative experiences across platforms and - realities in the context of immersive audio.' - address: 'Birmingham, UK' - author: 'Çamcı, Anıl and Willette, Aaron and Gargi, Nachiketa and Kim, Eugene and - Xu, Julia and Lai, Tanya' - bibtex: "@inproceedings{NIME20_24,\n abstract = {The continued growth of modern\ - \ VR (virtual reality) platforms into mass adoption is fundamentally driven by\ - \ the work of content creators who offer engaging experiences. It is therefore\ - \ essential to design accessible creativity support tools that can facilitate\ - \ the work of a broad range of practitioners in this domain. In this paper, we\ - \ focus on one facet of VR content creation, namely immersive audio design. We\ - \ discuss a suite of design tools that enable both novice and expert users to\ - \ rapidly prototype immersive sonic environments across desktop, virtual reality\ - \ and augmented reality platforms. We discuss the design considerations adopted\ - \ for each implementation, and how the individual systems informed one another\ - \ in terms of interaction design. We then offer a preliminary evaluation of these\ - \ systems with reports from first-time users. Finally, we discuss our road-map\ - \ for improving individual and collaborative creative experiences across platforms\ - \ and realities in the context of immersive audio.},\n address = {Birmingham,\ - \ UK},\n author = {Çamcı, Anıl and Willette, Aaron and Gargi, Nachiketa and Kim,\ - \ Eugene and Xu, Julia and Lai, Tanya},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813270},\n\ - \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ - \ = {July},\n pages = {127--130},\n publisher = {Birmingham City University},\n\ - \ title = {Cross-platform and Cross-reality Design of Immersive Sonic Environments},\n\ - \ url = {https://www.nime.org/proceedings/2020/nime2020_paper24.pdf},\n year =\ - \ {2020}\n}\n" + ID: Wu2016 + abstract: "This paper presents an empirical evaluation of a digital music\ninstrument\ + \ (DMI) for electroacoustic vocal performance, the Tibetan Singing\nPrayer Wheel\ + \ (TSPW). Specifically, we study audience preference for the way it\nmaps horizontal\ + \ spinning gestures to vocal processing parameters. We filmed six\nsongs with\ + \ the singer using the TSPW, and created two alternative soundtracks for\neach\ + \ song: one desynchronized, and one with the mapping inverted. Participants\n\ + viewed all six songs with either the original or desynchronized soundtrack\n(Experiment\ + \ 1), or either the original or inverted-mapping soundtrack (Experiment\n2). Participants\ + \ were asked several questions via questionnaire after each song.\nOverall, they\ + \ reported higher engagement and preference for the original\nversions, suggesting\ + \ that audiences of the TSPW prefer more highly synchronized\nperformances, as\ + \ well as more intuitive mappings, though level of perceived\nexpression of the\ + \ performer only significantly differed in Experiment 1. Further,\nwe believe\ + \ that our experimental methods contribute to how DMIs can be evaluated\nfrom\ + \ the audience's (a recently noted underrepresented stakeholder)\nperspective." + address: 'Brisbane, Australia' + author: Jiayue Cecilia Wu and Madeline Huberth and Yoo Hsiu Yeh and Matt Wright + bibtex: "@inproceedings{Wu2016,\n abstract = {This paper presents an empirical evaluation\ + \ of a digital music\ninstrument (DMI) for electroacoustic vocal performance,\ + \ the Tibetan Singing\nPrayer Wheel (TSPW). Specifically, we study audience preference\ + \ for the way it\nmaps horizontal spinning gestures to vocal processing parameters.\ + \ We filmed six\nsongs with the singer using the TSPW, and created two alternative\ + \ soundtracks for\neach song: one desynchronized, and one with the mapping inverted.\ + \ Participants\nviewed all six songs with either the original or desynchronized\ + \ soundtrack\n(Experiment 1), or either the original or inverted-mapping soundtrack\ + \ (Experiment\n2). Participants were asked several questions via questionnaire\ + \ after each song.\nOverall, they reported higher engagement and preference for\ + \ the original\nversions, suggesting that audiences of the TSPW prefer more highly\ + \ synchronized\nperformances, as well as more intuitive mappings, though level\ + \ of perceived\nexpression of the performer only significantly differed in Experiment\ + \ 1. Further,\nwe believe that our experimental methods contribute to how DMIs\ + \ can be evaluated\nfrom the audience's (a recently noted underrepresented stakeholder)\n\ + perspective.},\n address = {Brisbane, Australia},\n author = {Jiayue Cecilia Wu\ + \ and Madeline Huberth and Yoo Hsiu Yeh and Matt Wright},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176143},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ + \ pages = {206--211},\n publisher = {Queensland Conservatorium Griffith University},\n\ + \ title = {Evaluating the Audience's Perception of Real-time Gestural Control\ + \ and Mapping Mechanisms in Electroacoustic Vocal Performance},\n track = {Papers},\n\ + \ url = {http://www.nime.org/proceedings/2016/nime2016_paper0042.pdf},\n year\ + \ = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813270 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176143 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 127--130 - publisher: Birmingham City University - title: Cross-platform and Cross-reality Design of Immersive Sonic Environments - url: https://www.nime.org/proceedings/2020/nime2020_paper24.pdf - year: 2020 + pages: 206--211 + publisher: Queensland Conservatorium Griffith University + title: Evaluating the Audience's Perception of Real-time Gestural Control and Mapping + Mechanisms in Electroacoustic Vocal Performance + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0042.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_25 - abstract: 'Silver is an artwork that deals with the emotional feeling of contact - by exaggerating it acoustically. It originates from an interactive room installation, - where several textile sculptures merge with sounds. Silver is made from a wire - mesh and its surface is reactive to closeness and touch. This material property - forms a hybrid of artwork and parametric controller for the real-time sound generation. - The textile quality of the fine steel wire-mesh evokes a haptic familiarity inherent - to textile materials. This makes it easy for the audience to overcome the initial - threshold barrier to get in touch with the artwork in an exhibition situation. - Additionally, the interaction is not dependent on visuals. The characteristics - of the surface sensor allows a user to play the instrument without actually touching - it.' - address: 'Birmingham, UK' - author: 'Schebella, Marius and Fischbacher, Gertrud and Mosher, Matthew' - bibtex: "@inproceedings{NIME20_25,\n abstract = {Silver is an artwork that deals\ - \ with the emotional feeling of contact by exaggerating it acoustically. It originates\ - \ from an interactive room installation, where several textile sculptures merge\ - \ with sounds. Silver is made from a wire mesh and its surface is reactive to\ - \ closeness and touch. This material property forms a hybrid of artwork and parametric\ - \ controller for the real-time sound generation. The textile quality of the fine\ - \ steel wire-mesh evokes a haptic familiarity inherent to textile materials. \ - \ This makes it easy for the audience to overcome the initial threshold barrier\ - \ to get in touch with the artwork in an exhibition situation. Additionally, the\ - \ interaction is not dependent on visuals. The characteristics of the surface\ - \ sensor allows a user to play the instrument without actually touching it.},\n\ - \ address = {Birmingham, UK},\n author = {Schebella, Marius and Fischbacher, Gertrud\ - \ and Mosher, Matthew},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813272},\n\ - \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ - \ = {July},\n pages = {131--132},\n publisher = {Birmingham City University},\n\ - \ title = {Silver: A Textile Wireframe Interface for the Interactive Sound Installation\ - \ Idiosynkrasia},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper25.pdf},\n\ - \ year = {2020}\n}\n" + ID: Lee2016 + abstract: 'This paper suggests a novel form of audiovisual performance --- live + writing --- that transforms creative writing into a real-time performing art. + The process of typing a poem on the fly is captured and augmented to create an + audiovisual performance that establishes natural links among the components of + typing gestures, the poem being written on the fly, and audiovisual artifacts. + Live writing draws upon ideas from the tradition of live coding in which the process + of programming is revealed to the audience in real-time. This paper discusses + the motivation behind the idea, interaction schemes and a performance interface + for such a performance practice. Our live writing performance system is enabled + by a custom text editor, writing-sound mapping strategies of our choice, a poem-sonification, + and temporal typography. We describe two live writing performances that take different + approaches as they vary the degree of composition and improvisation in writing.' + address: 'Brisbane, Australia' + author: Sang Won Lee and Georg Essl and Mari Martinez + bibtex: "@inproceedings{Lee2016,\n abstract = {This paper suggests a novel form\ + \ of audiovisual performance --- live writing --- that transforms creative writing\ + \ into a real-time performing art. The process of typing a poem on the fly is\ + \ captured and augmented to create an audiovisual performance that establishes\ + \ natural links among the components of typing gestures, the poem being written\ + \ on the fly, and audiovisual artifacts. Live writing draws upon ideas from the\ + \ tradition of live coding in which the process of programming is revealed to\ + \ the audience in real-time. This paper discusses the motivation behind the idea,\ + \ interaction schemes and a performance interface for such a performance practice.\ + \ Our live writing performance system is enabled by a custom text editor, writing-sound\ + \ mapping strategies of our choice, a poem-sonification, and temporal typography.\ + \ We describe two live writing performances that take different approaches as\ + \ they vary the degree of composition and improvisation in writing.},\n address\ + \ = {Brisbane, Australia},\n author = {Sang Won Lee and Georg Essl and Mari Martinez},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1176060},\n isbn = {978-1-925455-13-7},\n\ + \ issn = {2220-4806},\n pages = {212--217},\n publisher = {Queensland Conservatorium\ + \ Griffith University},\n title = {Live Writing : Writing as a Real-time Audiovisual\ + \ Performance},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0043.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813272 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176060 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 131--132 - publisher: Birmingham City University - title: 'Silver: A Textile Wireframe Interface for the Interactive Sound Installation - Idiosynkrasia' - url: https://www.nime.org/proceedings/2020/nime2020_paper25.pdf - year: 2020 + pages: 212--217 + publisher: Queensland Conservatorium Griffith University + title: 'Live Writing : Writing as a Real-time Audiovisual Performance' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0043.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_26 - abstract: 'Musical expressivity is an important aspect of musical performance for - humans as well as robotic musicians. We present a novel mechatronics-driven implementation - of Brushless Direct Current (BLDC) motors in a robotic marimba player, named ANON, - designed to improve speed, dynamic range (loudness), and ultimately perceived - musical expressivity in comparison to state-of-the-art robotic percussionist actuators. - In an objective test of dynamic range, we find that our implementation provides - wider and more consistent dynamic range response in comparison with solenoid-based - robotic percussionists. Our implementation also outperforms both solenoid and - human marimba players in striking speed. In a subjective listening test measuring - musical expressivity, our system performs significantly better than a solenoid-based - system and is statistically indistinguishable from human performers.' - address: 'Birmingham, UK' - author: 'Yang, Ning and Savery, Richard and Sankaranarayanan, Raghavasimhan and - Zahray, Lisa and Weinberg, Gil' - bibtex: "@inproceedings{NIME20_26,\n abstract = {Musical expressivity is an important\ - \ aspect of musical performance for humans as well as robotic musicians. We present\ - \ a novel mechatronics-driven implementation of Brushless Direct Current (BLDC)\ - \ motors in a robotic marimba player, named ANON, designed to improve speed, dynamic\ - \ range (loudness), and ultimately perceived musical expressivity in comparison\ - \ to state-of-the-art robotic percussionist actuators. In an objective test of\ - \ dynamic range, we find that our implementation provides wider and more consistent\ - \ dynamic range response in comparison with solenoid-based robotic percussionists.\ - \ Our implementation also outperforms both solenoid and human marimba players\ - \ in striking speed. In a subjective listening test measuring musical expressivity,\ - \ our system performs significantly better than a solenoid-based system and is\ - \ statistically indistinguishable from human performers.},\n address = {Birmingham,\ - \ UK},\n author = {Yang, Ning and Savery, Richard and Sankaranarayanan, Raghavasimhan\ - \ and Zahray, Lisa and Weinberg, Gil},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813274},\n\ - \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ - \ = {July},\n pages = {133--138},\n presentation-video = {https://youtu.be/KsQNlArUv2k},\n\ - \ publisher = {Birmingham City University},\n title = {Mechatronics-Driven Musical\ - \ Expressivity for Robotic Percussionists},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper26.pdf},\n\ - \ year = {2020}\n}\n" + ID: Jathal2016 + abstract: |- + The majority of electronic percussion controllers on the market + today are based on location-oriented striking techniques, resulting in a finger + drumming interaction paradigm, that is both fundamentally eclectic as well as + imposingly contr. The few controllers that allow hand-drumming + techniques also invariably conform to region-based triggering design, or, in + trade-off for expressivity, end up excluding hardware connectivity options that + are vital to the context of the modern electronic rhythm producer. The HandSolo + is a timbre-based drum controller that allows the use of natural, hand-drumming + strokes, whilst offering the same end-goal functionality that percussion + controller users have come to expect over the past decade. + address: 'Brisbane, Australia' + author: Kunal Jathal and Tae-Hong Park + bibtex: "@inproceedings{Jathal2016,\n abstract = {The majority of electronic percussion\ + \ controllers on the market\ntoday are based on location-oriented striking techniques,\ + \ resulting in a finger\ndrumming interaction paradigm, that is both fundamentally\ + \ eclectic as well as\nimposingly contr. The few controllers that allow hand-drumming\n\ + techniques also invariably conform to region-based triggering design, or, in\n\ + trade-off for expressivity, end up excluding hardware connectivity options that\n\ + are vital to the context of the modern electronic rhythm producer. The HandSolo\n\ + is a timbre-based drum controller that allows the use of natural, hand-drumming\n\ + strokes, whilst offering the same end-goal functionality that percussion\ncontroller\ + \ users have come to expect over the past decade.},\n address = {Brisbane, Australia},\n\ + \ author = {Kunal Jathal and Tae-Hong Park},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1176042},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ + \ pages = {218--223},\n publisher = {Queensland Conservatorium Griffith University},\n\ + \ title = {The HandSolo: A Hand Drum Controller for Natural Rhythm Entry and Production},\n\ + \ track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0044.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813274 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176042 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 133--138 - presentation-video: https://youtu.be/KsQNlArUv2k - publisher: Birmingham City University - title: Mechatronics-Driven Musical Expressivity for Robotic Percussionists - url: https://www.nime.org/proceedings/2020/nime2020_paper26.pdf - year: 2020 + pages: 218--223 + publisher: Queensland Conservatorium Griffith University + title: 'The HandSolo: A Hand Drum Controller for Natural Rhythm Entry and Production' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0044.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_27 - abstract: 'Discovering outmoded or obsolete technologies and appropriating them - in creative practice can uncover new relationships between those technologies. - Using a media archaeological research approach, this paper presents the electromechanical - relay and a book of random numbers as related forms of obsolete media. Situated - within the context of electromechanical sound art, the work uses a non-deterministic - approach to explore the non-linear and unpredictable agency and materiality of - the objects in the work. Developed by the first author, Click::RAND is an object-based - sound installation. The work has been developed as an audio-visual representation - of a genealogy of connections between these two forms of media in the history - of computing.' - address: 'Birmingham, UK' - author: 'Dunham, Paul' - bibtex: "@inproceedings{NIME20_27,\n abstract = {Discovering outmoded or obsolete\ - \ technologies and appropriating them in creative practice can uncover new relationships\ - \ between those technologies. Using a media archaeological research approach,\ - \ this paper presents the electromechanical relay and a book of random numbers\ - \ as related forms of obsolete media. Situated within the context of electromechanical\ - \ sound art, the work uses a non-deterministic approach to explore the non-linear\ - \ and unpredictable agency and materiality of the objects in the work. Developed\ - \ by the first author, Click::RAND is an object-based sound installation. The\ - \ work has been developed as an audio-visual representation of a genealogy of\ - \ connections between these two forms of media in the history of computing.},\n\ - \ address = {Birmingham, UK},\n author = {Dunham, Paul},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.4813276},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {139--142},\n presentation-video\ - \ = {https://youtu.be/vWKw8H0F9cI},\n publisher = {Birmingham City University},\n\ - \ title = {Click::RAND. A Minimalist Sound Sculpture.},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper27.pdf},\n\ - \ year = {2020}\n}\n" + ID: Nash2016 + abstract: "This paper presents a development of the ubiquitous computer\nkeyboard\ + \ to capture velocity and other continuous musical properties, in order to\nsupport\ + \ more expressive interaction with music software. Building on existing\n`virtual\ + \ piano' utilities, the device is designed to provide a richer\nmechanism for\ + \ note entry within predominantly non-realtime editing tasks, in\napplications\ + \ where keyboard interaction is a central component of the user\nexperience (score\ + \ editors, sequencers, DAWs, trackers, live coding), and in which\nusers draw\ + \ on virtuosities in both music and computing.\nIn the keyboard, additional hardware\ + \ combines existing scan code (key press)\ndata with accelerometer readings to\ + \ create a secondary USB device, using the same\ncable but visible to software\ + \ as a separate USB MIDI device aside existing USB\nHID functionality. This paper\ + \ presents and evaluates an initial prototype,\ndeveloped using an Arduino board\ + \ and inexpensive sensors, and discusses design\nconsiderations and test findings\ + \ in musical applications, drawing on user studies\nof keyboard-mediated music\ + \ interaction. Without challenging more established (and\nexpensive) performance\ + \ devices; significant benefits are demonstrated in\nnotation-mediated interaction,\ + \ where the user's focus rests with\nsoftware." + address: 'Brisbane, Australia' + author: Chris Nash + bibtex: "@inproceedings{Nash2016,\n abstract = {This paper presents a development\ + \ of the ubiquitous computer\nkeyboard to capture velocity and other continuous\ + \ musical properties, in order to\nsupport more expressive interaction with music\ + \ software. Building on existing\n`virtual piano' utilities, the device is designed\ + \ to provide a richer\nmechanism for note entry within predominantly non-realtime\ + \ editing tasks, in\napplications where keyboard interaction is a central component\ + \ of the user\nexperience (score editors, sequencers, DAWs, trackers, live coding),\ + \ and in which\nusers draw on virtuosities in both music and computing.\nIn the\ + \ keyboard, additional hardware combines existing scan code (key press)\ndata\ + \ with accelerometer readings to create a secondary USB device, using the same\n\ + cable but visible to software as a separate USB MIDI device aside existing USB\n\ + HID functionality. This paper presents and evaluates an initial prototype,\ndeveloped\ + \ using an Arduino board and inexpensive sensors, and discusses design\nconsiderations\ + \ and test findings in musical applications, drawing on user studies\nof keyboard-mediated\ + \ music interaction. Without challenging more established (and\nexpensive) performance\ + \ devices; significant benefits are demonstrated in\nnotation-mediated interaction,\ + \ where the user's focus rests with\nsoftware.},\n address = {Brisbane, Australia},\n\ + \ author = {Chris Nash},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176088},\n\ + \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {224--229},\n publisher\ + \ = {Queensland Conservatorium Griffith University},\n title = {The 'E' in QWERTY:\ + \ Musical Expression with Old Computer Interfaces},\n track = {Papers},\n url\ + \ = {http://www.nime.org/proceedings/2016/nime2016_paper0045.pdf},\n year = {2016}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813276 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176088 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 139--142 - presentation-video: https://youtu.be/vWKw8H0F9cI - publisher: Birmingham City University - title: 'Click::RAND. A Minimalist Sound Sculpture.' - url: https://www.nime.org/proceedings/2020/nime2020_paper27.pdf - year: 2020 + pages: 224--229 + publisher: Queensland Conservatorium Griffith University + title: 'The ''E'' in QWERTY: Musical Expression with Old Computer Interfaces' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0045.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_28 - abstract: 'This paper reports on the experience gained after five years of teaching - a NIME master course designed specifically for artists. A playful pedagogical - approach based on practice-based methods is presented and evaluated. My goal was - introducing the art of NIME design and performance giving less emphasis to technology. - Instead of letting technology determine how we teach and think during the class, - I propose fostering at first the student''s active construction and understanding - of the field experimenting with physical materials,sound production and bodily - movements. For this intention I developed a few classroom exercises which my students - had to study and practice. During this period of five years, 95 students attended - the course. At the end of the semester course, each student designed, built and - performed a new interface for musical expression in front of an audience. Thus, - in this paper I describe and discuss the benefits of applying playfulness and - practice-based methods for teaching NIME in art universities. I introduce the - methods and classroom exercises developed and finally I present some lessons learned - from this pedagogical experience.' - address: 'Birmingham, UK' - author: 'Tomás, Enrique' - bibtex: "@inproceedings{NIME20_28,\n abstract = {This paper reports on the experience\ - \ gained after five years of teaching a NIME master course designed specifically\ - \ for artists. A playful pedagogical approach based on practice-based methods\ - \ is presented and evaluated. My goal was introducing the art of NIME design and\ - \ performance giving less emphasis to technology. Instead of letting technology\ - \ determine how we teach and think during the class, I propose fostering at first\ - \ the student's active construction and understanding of the field experimenting\ - \ with physical materials,sound production and bodily movements. For this intention\ - \ I developed a few classroom exercises which my students had to study and practice.\ - \ During this period of five years, 95 students attended the course. At the end\ - \ of the semester course, each student designed, built and performed a new interface\ - \ for musical expression in front of an audience. Thus, in this paper I describe\ - \ and discuss the benefits of applying playfulness and practice-based methods\ - \ for teaching NIME in art universities. I introduce the methods and classroom\ - \ exercises developed and finally I present some lessons learned from this pedagogical\ - \ experience.},\n address = {Birmingham, UK},\n author = {Tomás, Enrique},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.4813280},\n editor = {Romain Michon and\ - \ Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages = {143--148},\n\ - \ presentation-video = {https://youtu.be/94o3J3ozhMs},\n publisher = {Birmingham\ - \ City University},\n title = {A Playful Approach to Teaching NIME: Pedagogical\ - \ Methods from a Practice-Based Perspective},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper28.pdf},\n\ - \ year = {2020}\n}\n" + ID: Greenhill2016 + abstract: "We present Focal, an eye-tracking musical expression controller\nwhich\ + \ allows\nhands-free control over audio effects and synthesis parameters during\n\ + peformance. A see-through head-mounted display projects virtual dials and\nswitches\ + \ into the visual field. The performer controls these with a single\nexpression\ + \ pedal, switching context by glancing at the object they wish to\ncontrol. This\ + \ simple interface allows for minimal physical disturbance to the\ninstrumental\ + \ musician, whilst enabling the control of many parameters otherwise\nonly achievable\ + \ with multiple foot pedalboards. We describe the development of\nthe system,\ + \ including the construction of the eye-tracking display, and the\ndesign of the\ + \ musical interface. We also present a comparison of a performance\nbetween Focal\ + \ and conventional controllers. " + address: 'Brisbane, Australia' + author: Stewart Greenhill and Cathie Travers + bibtex: "@inproceedings{Greenhill2016,\n abstract = {We present Focal, an eye-tracking\ + \ musical expression controller\nwhich allows\nhands-free control over audio effects\ + \ and synthesis parameters during\npeformance. A see-through head-mounted display\ + \ projects virtual dials and\nswitches into the visual field. The performer controls\ + \ these with a single\nexpression pedal, switching context by glancing at the\ + \ object they wish to\ncontrol. This simple interface allows for minimal physical\ + \ disturbance to the\ninstrumental musician, whilst enabling the control of many\ + \ parameters otherwise\nonly achievable with multiple foot pedalboards. We describe\ + \ the development of\nthe system, including the construction of the eye-tracking\ + \ display, and the\ndesign of the musical interface. We also present a comparison\ + \ of a performance\nbetween Focal and conventional controllers. },\n address =\ + \ {Brisbane, Australia},\n author = {Stewart Greenhill and Cathie Travers},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1176022},\n isbn = {978-1-925455-13-7},\n\ + \ issn = {2220-4806},\n pages = {230--235},\n publisher = {Queensland Conservatorium\ + \ Griffith University},\n title = {Focal : An Eye-Tracking Musical Expression\ + \ Controller},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0046.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813280 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176022 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 143--148 - presentation-video: https://youtu.be/94o3J3ozhMs - publisher: Birmingham City University - title: 'A Playful Approach to Teaching NIME: Pedagogical Methods from a Practice-Based - Perspective' - url: https://www.nime.org/proceedings/2020/nime2020_paper28.pdf - year: 2020 + pages: 230--235 + publisher: Queensland Conservatorium Griffith University + title: 'Focal : An Eye-Tracking Musical Expression Controller' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0046.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_29 - abstract: 'Using open-source and creative coding frameworks, a team of artist-engineers - from Portland Community College working with artists that experience Intellectual/Developmental - disabilities prototyped an ensemble of adapted instruments and synthesizers that - facilitate real-time in-key collaboration. The instruments employ a variety of - sensors, sending the resulting musical controls to software sound generators via - MIDI. Careful consideration was given to the balance between freedom of expression, - and curating the possible sonic outcomes as adaptation. Evaluation of adapted - instrument design may differ greatly from frameworks for evaluating traditional - instruments or products intended for mass-market, though the results of such focused - and individualised design have a variety of possible applications.' - address: 'Birmingham, UK' - author: 'Jarvis Holland, Quinn D and Quartez, Crystal and Botello, Francisco and - Gammill, Nathan' - bibtex: "@inproceedings{NIME20_29,\n abstract = {Using open-source and creative\ - \ coding frameworks, a team of artist-engineers from Portland Community College\ - \ working with artists that experience Intellectual/Developmental disabilities\ - \ prototyped an ensemble of adapted instruments and synthesizers that facilitate\ - \ real-time in-key collaboration. The instruments employ a variety of sensors,\ - \ sending the resulting musical controls to software sound generators via MIDI.\ - \ Careful consideration was given to the balance between freedom of expression,\ - \ and curating the possible sonic outcomes as adaptation. Evaluation of adapted\ - \ instrument design may differ greatly from frameworks for evaluating traditional\ - \ instruments or products intended for mass-market, though the results of such\ - \ focused and individualised design have a variety of possible applications.},\n\ - \ address = {Birmingham, UK},\n author = {Jarvis Holland, Quinn D and Quartez,\ - \ Crystal and Botello, Francisco and Gammill, Nathan},\n booktitle = {Proceedings\ + ID: Meacham2016 + abstract: |- + The `Laptop Accordion' co-opts the commodity laptop + computer + to craft an expressive, whimsical accordion-like instrument. + It utilizes the opening and closing of the laptop + screen as a physical metaphor for accordion bellows, and the + laptop keyboard as musical buttonboard. Motion is tracked + using the laptop camera via optical flow and mapped to continuous + control over dynamics, while the sound is generated + in real-time. The instrument uses both skeuomorphic and + abstract onscreen graphics which further reference the core + mechanics of `squeezebox' instruments. The laptop accordion + provides several game modes, while overall offering an + unconventional aesthetic experience in music making. + address: 'Brisbane, Australia' + author: Aidan Meacham and Sanjay Kannan and Ge Wang + bibtex: "@inproceedings{Meacham2016,\n abstract = {The `Laptop Accordion' co-opts\ + \ the commodity laptop\ncomputer\nto craft an expressive, whimsical accordion-like\ + \ instrument.\nIt utilizes the opening and closing of the laptop\nscreen as a\ + \ physical metaphor for accordion bellows, and the\nlaptop keyboard as musical\ + \ buttonboard. Motion is tracked\nusing the laptop camera via optical flow and\ + \ mapped to continuous\ncontrol over dynamics, while the sound is generated\n\ + in real-time. The instrument uses both skeuomorphic and\nabstract onscreen graphics\ + \ which further reference the core\nmechanics of `squeezebox' instruments. The\ + \ laptop accordion\nprovides several game modes, while overall offering an\nunconventional\ + \ aesthetic experience in music making.},\n address = {Brisbane, Australia},\n\ + \ author = {Aidan Meacham and Sanjay Kannan and Ge Wang},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.4813286},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {149--153},\n publisher = {Birmingham\ - \ City University},\n title = {EXPANDING ACCESS TO MUSIC TECHNOLOGY- Rapid Prototyping\ - \ Accessible Instrument Solutions For Musicians With Intellectual Disabilities},\n\ - \ url = {https://www.nime.org/proceedings/2020/nime2020_paper29.pdf},\n year =\ - \ {2020}\n}\n" + \ doi = {10.5281/zenodo.1176078},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ + \ pages = {236--240},\n publisher = {Queensland Conservatorium Griffith University},\n\ + \ title = {The Laptop Accordion},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0047.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813286 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176078 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 149--153 - publisher: Birmingham City University - title: EXPANDING ACCESS TO MUSIC TECHNOLOGY- Rapid Prototyping Accessible Instrument - Solutions For Musicians With Intellectual Disabilities - url: https://www.nime.org/proceedings/2020/nime2020_paper29.pdf - year: 2020 + pages: 236--240 + publisher: Queensland Conservatorium Griffith University + title: The Laptop Accordion + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0047.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_3 - abstract: 'Non-rigid interfaces allow for exploring new interactive paradigms that - rely on deformable input and shape change, and whose possible applications span - several branches of human-computer interaction (HCI). While extensively explored - as deformable game controllers, bendable smartphones, and shape-changing displays, - non-rigid interfaces are rarely framed in a musical context, and their use for - composition and performance is rather sparse and unsystematic. With this work, - we start a systematic exploration of this relatively uncharted research area, - by means of (1) briefly reviewing existing musical interfaces that capitalize - on deformable input,and (2) surveying 11 among experts and pioneers in the field - about their experience with and vision on non-rigid musical interfaces.Based on - experts’ input, we suggest possible next steps of musical appropriation with deformable - and shape-changing technologies.We conclude by discussing how cross-overs between - NIME and HCI research will benefit non-rigid interfaces.' - address: 'Birmingham, UK' - author: 'Boem, Alberto and Troiano, Giovanni M and and Lepri, Giacomo and Zappi, - Victor' - bibtex: "@inproceedings{NIME20_3,\n abstract = {Non-rigid interfaces allow for exploring\ - \ new interactive paradigms that rely on deformable input and shape change, and\ - \ whose possible applications span several branches of human-computer interaction\ - \ (HCI). While extensively explored as deformable game controllers, bendable smartphones,\ - \ and shape-changing displays, non-rigid interfaces are rarely framed in a musical\ - \ context, and their use for composition and performance is rather sparse and\ - \ unsystematic. With this work, we start a systematic exploration of this relatively\ - \ uncharted research area, by means of (1) briefly reviewing existing musical\ - \ interfaces that capitalize on deformable input,and (2) surveying 11 among experts\ - \ and pioneers in the field about their experience with and vision on non-rigid\ - \ musical interfaces.Based on experts’ input, we suggest possible next steps of\ - \ musical appropriation with deformable and shape-changing technologies.We conclude\ - \ by discussing how cross-overs between NIME and HCI research will benefit non-rigid\ - \ interfaces.},\n address = {Birmingham, UK},\n author = {Boem, Alberto and Troiano,\ - \ Giovanni M and and Lepri, Giacomo and Zappi, Victor},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.4813288},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {17--22},\n presentation-video\ - \ = {https://youtu.be/o4CuAglHvf4},\n publisher = {Birmingham City University},\n\ - \ title = {Non-Rigid Musical Interfaces: Exploring Practices, Takes, and Future\ - \ Perspective},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper3.pdf},\n\ - \ year = {2020}\n}\n" + ID: Jakobsen2016 + abstract: "This paper presents a novel platform for expressive music making\ncalled\ + \ Hitmachine. Hitmachine lets you build and play your own musical\ninstruments\ + \ from Legos and sensors and is aimed towards empowering everyone to\nengage in\ + \ rich music making despite of prior musical experience. The paper\npresents findings\ + \ from a 4-day workshop where more that 150 children from ages\n3-13 built and\ + \ played their own musical instruments. The children used different\nsensors for\ + \ playing and performed with their instruments on stage. The findings\nshow how\ + \ age influenced the children's musical understanding and\nexpressivity, and gives\ + \ insight into important aspects to consider when designing\nfor expressive music\ + \ for novices." + address: 'Brisbane, Australia' + author: Kasper buhl Jakobsen and Marianne Graves Petersen and Majken Kirkegaard + Rasmussen and Jens Emil Groenbaek and jakob winge and jeppe stougaard + bibtex: "@inproceedings{Jakobsen2016,\n abstract = {This paper presents a novel\ + \ platform for expressive music making\ncalled Hitmachine. Hitmachine lets you\ + \ build and play your own musical\ninstruments from Legos and sensors and is aimed\ + \ towards empowering everyone to\nengage in rich music making despite of prior\ + \ musical experience. The paper\npresents findings from a 4-day workshop where\ + \ more that 150 children from ages\n3-13 built and played their own musical instruments.\ + \ The children used different\nsensors for playing and performed with their instruments\ + \ on stage. The findings\nshow how age influenced the children's musical understanding\ + \ and\nexpressivity, and gives insight into important aspects to consider when\ + \ designing\nfor expressive music for novices.},\n address = {Brisbane, Australia},\n\ + \ author = {Kasper buhl Jakobsen and Marianne Graves Petersen and Majken Kirkegaard\ + \ Rasmussen and Jens Emil Groenbaek and jakob winge and jeppe stougaard},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1176038},\n isbn = {978-1-925455-13-7},\n\ + \ issn = {2220-4806},\n pages = {241--246},\n publisher = {Queensland Conservatorium\ + \ Griffith University},\n title = {Hitmachine: Collective Musical Expressivity\ + \ for Novices},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0048.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813288 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176038 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 17--22 - presentation-video: https://youtu.be/o4CuAglHvf4 - publisher: Birmingham City University - title: 'Non-Rigid Musical Interfaces: Exploring Practices, Takes, and Future Perspective' - url: https://www.nime.org/proceedings/2020/nime2020_paper3.pdf - year: 2020 + pages: 241--246 + publisher: Queensland Conservatorium Griffith University + title: 'Hitmachine: Collective Musical Expressivity for Novices' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0048.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_30 - abstract: 'Despite a history spanning nearly 30 years, best practices for the use - of virtual reality (VR) in computer music performance remain exploratory. Here, - we present a case study of a laptop orchestra performance entitled Resilience, - involving one VR performer and an ensemble of instrumental performers, in order - to explore values and design principles for incorporating this emerging technology - into computer music performance. We present a brief history at the intersection - of VR and the laptop orchestra. We then present the design of the piece and distill - it into a set of design principles. Broadly, these design principles address the - interplay between the different conflicting perspectives at play: those of the - VR performer, the ensemble, and the audience. For example, one principle suggests - that the perceptual link between the physical and virtual world maybe enhanced - for the audience by improving the performers'' sense of embodiment. We argue that - these design principles are a form of generalized knowledge about how we might - design laptop orchestra pieces involving virtual reality.' - address: 'Birmingham, UK' - author: 'Atherton, Jack and Wang, Ge' - bibtex: "@inproceedings{NIME20_30,\n abstract = {Despite a history spanning nearly\ - \ 30 years, best practices for the use of virtual reality (VR) in computer music\ - \ performance remain exploratory. Here, we present a case study of a laptop orchestra\ - \ performance entitled Resilience, involving one VR performer and an ensemble\ - \ of instrumental performers, in order to explore values and design principles\ - \ for incorporating this emerging technology into computer music performance.\ - \ We present a brief history at the intersection of VR and the laptop orchestra.\ - \ We then present the design of the piece and distill it into a set of design\ - \ principles. Broadly, these design principles address the interplay between the\ - \ different conflicting perspectives at play: those of the VR performer, the ensemble,\ - \ and the audience. For example, one principle suggests that the perceptual link\ - \ between the physical and virtual world maybe enhanced for the audience by improving\ - \ the performers' sense of embodiment. We argue that these design principles are\ - \ a form of generalized knowledge about how we might design laptop orchestra pieces\ - \ involving virtual reality.},\n address = {Birmingham, UK},\n author = {Atherton,\ - \ Jack and Wang, Ge},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813290},\n\ - \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ - \ = {July},\n pages = {154--159},\n presentation-video = {https://youtu.be/tmeDO5hg56Y},\n\ - \ publisher = {Birmingham City University},\n title = {Curating Perspectives:\ - \ Incorporating Virtual Reality into Laptop Orchestra Performance},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper30.pdf},\n\ - \ year = {2020}\n}\n" + ID: Michon2016 + abstract: "In this paper, we present the BladeAxe: an iPad-based musical\ninstrument\ + \ leveraging the concepts of augmented mobile device and hybrid\nphysical model\ + \ controller. By being almost fully standalone, it can be used\neasily on stage\ + \ in the frame of a live performance by simply plugging it to a\ntraditional guitar\ + \ amplifier or to any sound system. Its acoustical plucking\nsystem provides the\ + \ performer with an extended expressive potential compared to a\nstandard controller.\n\ + After presenting an intermediate version of the BladeAxe, we'll describe\nour\ + \ final design. We will also introduce a similar instrument: the PlateAxe." + address: 'Brisbane, Australia' + author: Romain Michon and Julius Orion Iii Smith and Matthew Wright and Chris Chafe + bibtex: "@inproceedings{Michon2016,\n abstract = {In this paper, we present the\ + \ BladeAxe: an iPad-based musical\ninstrument leveraging the concepts of augmented\ + \ mobile device and hybrid\nphysical model controller. By being almost fully standalone,\ + \ it can be used\neasily on stage in the frame of a live performance by simply\ + \ plugging it to a\ntraditional guitar amplifier or to any sound system. Its acoustical\ + \ plucking\nsystem provides the performer with an extended expressive potential\ + \ compared to a\nstandard controller.\nAfter presenting an intermediate version\ + \ of the BladeAxe, we'll describe\nour final design. We will also introduce a\ + \ similar instrument: the PlateAxe.},\n address = {Brisbane, Australia},\n author\ + \ = {Romain Michon and Julius Orion Iii Smith and Matthew Wright and Chris Chafe},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1176080},\n isbn = {978-1-925455-13-7},\n\ + \ issn = {2220-4806},\n pages = {247--252},\n publisher = {Queensland Conservatorium\ + \ Griffith University},\n title = {Augmenting the iPad: the BladeAxe},\n track\ + \ = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0049.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813290 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176080 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 154--159 - presentation-video: https://youtu.be/tmeDO5hg56Y - publisher: Birmingham City University - title: 'Curating Perspectives: Incorporating Virtual Reality into Laptop Orchestra - Performance' - url: https://www.nime.org/proceedings/2020/nime2020_paper30.pdf - year: 2020 + pages: 247--252 + publisher: Queensland Conservatorium Griffith University + title: 'Augmenting the iPad: the BladeAxe' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0049.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_31 - abstract: 'So far, NIME research has been mostly inward-looking, dedicated to divulging - and studying our own work and having limited engagement with trends outside our - community. Though musical instruments as cultural artefacts are inherently political, - we have so far not sufficiently engaged with confronting these themes in our own - research. In this paper we argue that we should consider how our work is also - political, and begin to develop a clear political agenda that includes social, - ethical, and cultural considerations through which to consider not only our own - musical instruments, but also those not created by us. Failing to do so would - result in an unintentional but tacit acceptance and support of such ideologies. - We explore one item to be included in this political agenda: the recent trend - in music technology of ``democratising music'''', which carries implicit political - ideologies grounded in techno-solutionism. We conclude with a number of recommendations - for stimulating community-wide discussion on these themes in the hope that this - leads to the development of an outward-facing perspective that fully engages with - political topics.' - address: 'Birmingham, UK' - author: 'Morreale, Fabio and Bin, S. M. Astrid and McPherson, Andrew and Stapleton, - Paul and Wanderley, Marcelo' - bibtex: "@inproceedings{NIME20_31,\n abstract = {So far, NIME research has been\ - \ mostly inward-looking, dedicated to divulging and studying our own work and\ - \ having limited engagement with trends outside our community. Though musical\ - \ instruments as cultural artefacts are inherently political, we have so far not\ - \ sufficiently engaged with confronting these themes in our own research. In this\ - \ paper we argue that we should consider how our work is also political, and begin\ - \ to develop a clear political agenda that includes social, ethical, and cultural\ - \ considerations through which to consider not only our own musical instruments,\ - \ but also those not created by us. Failing to do so would result in an unintentional\ - \ but tacit acceptance and support of such ideologies. We explore one item to\ - \ be included in this political agenda: the recent trend in music technology of\ - \ ``democratising music'', which carries implicit political ideologies grounded\ - \ in techno-solutionism. We conclude with a number of recommendations for stimulating\ - \ community-wide discussion on these themes in the hope that this leads to the\ - \ development of an outward-facing perspective that fully engages with political\ - \ topics.},\n address = {Birmingham, UK},\n author = {Morreale, Fabio and Bin,\ - \ S. M. Astrid and McPherson, Andrew and Stapleton, Paul and Wanderley, Marcelo},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.4813294},\n editor = {Romain Michon\ - \ and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages =\ - \ {160--165},\n presentation-video = {https://youtu.be/y2iDN24ZLTg},\n publisher\ - \ = {Birmingham City University},\n title = {A NIME Of The Times: Developing an\ - \ Outward-Looking Political Agenda For This Community},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper31.pdf},\n\ - \ year = {2020}\n}\n" + ID: Hnicode233onMorissette2016 + abstract: "The author's artistic practice as a composer and performer is\ntransdisciplinary.\ + \ The body as a vector associated with sound, gesture, video,\nphysical space,\ + \ and technological space, constitute the six founding elements.\nThey give rise\ + \ to works between music and dance, between musical theater and\nmultimedia works\ + \ leading to a new hybrid performative practice. These works are\nrealized using\ + \ a motion capture system by computer vision, SICMAP (Systéme\nInteractif de Captation\ + \ du Mouvement en Art Performatif --- Interactive\nMotion Capture System For The\ + \ Performative Arts). In this paper, the author\nsituates her artistic practice\ + \ founded by the three pillars of transdisciplinary\nresearch methodology. The\ + \ path taken by the performer-creator, leading to the\nconception of the SICMAP,\ + \ is then explained through a reflection on the\n`dream instrument'. Followed\ + \ by a technical description, the SICMAP\nis contextualized using theoretical\ + \ models: the instrumental continuum and energy\ncontinuum, the `dream instrument'\ + \ and the typology of the\ninstrumental gesture. Initiated by the SICMAP, these\ + \ are then applied to a new\nparadigm the gesture-sound space and subsequently\ + \ put into practice through the\ncreation of the work From Infinity To Within." + address: 'Brisbane, Australia' + author: Barah Héon-Morissette + bibtex: "@inproceedings{Hnicode233onMorissette2016,\n abstract = {The author's artistic\ + \ practice as a composer and performer is\ntransdisciplinary. The body as a vector\ + \ associated with sound, gesture, video,\nphysical space, and technological space,\ + \ constitute the six founding elements.\nThey give rise to works between music\ + \ and dance, between musical theater and\nmultimedia works leading to a new hybrid\ + \ performative practice. These works are\nrealized using a motion capture system\ + \ by computer vision, SICMAP (Syst\\'{e}me\nInteractif de Captation du Mouvement\ + \ en Art Performatif --- Interactive\nMotion Capture System For The Performative\ + \ Arts). In this paper, the author\nsituates her artistic practice founded by\ + \ the three pillars of transdisciplinary\nresearch methodology. The path taken\ + \ by the performer-creator, leading to the\nconception of the SICMAP, is then\ + \ explained through a reflection on the\n`dream instrument'. Followed by a technical\ + \ description, the SICMAP\nis contextualized using theoretical models: the instrumental\ + \ continuum and energy\ncontinuum, the `dream instrument' and the typology of\ + \ the\ninstrumental gesture. Initiated by the SICMAP, these are then applied to\ + \ a new\nparadigm the gesture-sound space and subsequently put into practice through\ + \ the\ncreation of the work From Infinity To Within.},\n address = {Brisbane,\ + \ Australia},\n author = {Barah H\\'{e}on-Morissette},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176024},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ + \ pages = {253--258},\n publisher = {Queensland Conservatorium Griffith University},\n\ + \ title = {Transdisciplinary Methodology: from Theory to the Stage, Creation for\ + \ the {SIC}MAP},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0050.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813294 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176024 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 160--165 - presentation-video: https://youtu.be/y2iDN24ZLTg - publisher: Birmingham City University - title: 'A NIME Of The Times: Developing an Outward-Looking Political Agenda For - This Community' - url: https://www.nime.org/proceedings/2020/nime2020_paper31.pdf - year: 2020 + pages: 253--258 + publisher: Queensland Conservatorium Griffith University + title: 'Transdisciplinary Methodology: from Theory to the Stage, Creation for the + SICMAP' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0050.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_32 - abstract: 'We present TRAVIS II, an augmented acoustic violin with touch sensors - integrated into its 3D printed fingerboard that track left-hand finger gestures - in real time. The fingerboard has four strips of conductive PLA filament which - produce an electric signal when fingers press down on each string. While these - sensors are physically robust, they are mechanically assembled and thus easy to - replace if damaged. The performer can also trigger presets via four FSRs attached - to the body of the violin. The instrument is completely wireless, giving the performer - the freedom to move throughout the performance space. While the sensing fingerboard - is installed in place of the traditional fingerboard, all other electronics can - be removed from the augmented instrument, maintaining the aesthetics of a traditional - violin. Our design allows violinists to naturally create music for interactive - performance and improvisation without requiring new instrumental techniques. In - this paper, we describe the design of the instrument, experiments leading to the - sensing fingerboard, and performative applications of the instrument.' - address: 'Birmingham, UK' - author: 'Ko, Chantelle L and Oehlberg, Lora' - bibtex: "@inproceedings{NIME20_32,\n abstract = {We present TRAVIS II, an augmented\ - \ acoustic violin with touch sensors integrated into its 3D printed fingerboard\ - \ that track left-hand finger gestures in real time. The fingerboard has four\ - \ strips of conductive PLA filament which produce an electric signal when fingers\ - \ press down on each string. While these sensors are physically robust, they are\ - \ mechanically assembled and thus easy to replace if damaged. The performer can\ - \ also trigger presets via four FSRs attached to the body of the violin. The instrument\ - \ is completely wireless, giving the performer the freedom to move throughout\ - \ the performance space. While the sensing fingerboard is installed in place of\ - \ the traditional fingerboard, all other electronics can be removed from the augmented\ - \ instrument, maintaining the aesthetics of a traditional violin. Our design allows\ - \ violinists to naturally create music for interactive performance and improvisation\ - \ without requiring new instrumental techniques. In this paper, we describe the\ - \ design of the instrument, experiments leading to the sensing fingerboard, and\ - \ performative applications of the instrument.},\n address = {Birmingham, UK},\n\ - \ author = {Ko, Chantelle L and Oehlberg, Lora},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.4813300},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {166--171},\n presentation-video\ - \ = {https://youtu.be/XIAd_dr9PHE},\n publisher = {Birmingham City University},\n\ - \ title = {Touch Responsive Augmented Violin Interface System II: Integrating\ - \ Sensors into a 3D Printed Fingerboard},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper32.pdf},\n\ - \ year = {2020}\n}\n" + ID: Xiao2016 + abstract: "This paper explores how an actuated pin-based shape display may\nserve\ + \ as a platform on which to build musical instruments and controllers. We\ndesigned\ + \ and prototyped three new instruments that use the shape display not only\nas\ + \ an input device, but also as a source of acoustic sound. These cover a range\n\ + of interaction paradigms to generate ambient textures, polyrhythms, and melodies.\n\ + This paper first presents existing work from which we drew interactions and\n\ + metaphors for our designs. We then introduce each of our instruments and the\n\ + back-end software we used to prototype them. Finally, we offer reflections on\n\ + some central themes of NIME, including the relationship between musician and\n\ + machine." + address: 'Brisbane, Australia' + author: Xiao Xiao and Donald Derek Haddad and Thomas Sanchez and Akito van Troyer + and Rébecca Kleinberger and Penny Webb and Joe Paradiso and Tod Machover and Hiroshi + Ishii + bibtex: "@inproceedings{Xiao2016,\n abstract = {This paper explores how an actuated\ + \ pin-based shape display may\nserve as a platform on which to build musical instruments\ + \ and controllers. We\ndesigned and prototyped three new instruments that use\ + \ the shape display not only\nas an input device, but also as a source of acoustic\ + \ sound. These cover a range\nof interaction paradigms to generate ambient textures,\ + \ polyrhythms, and melodies.\nThis paper first presents existing work from which\ + \ we drew interactions and\nmetaphors for our designs. We then introduce each\ + \ of our instruments and the\nback-end software we used to prototype them. Finally,\ + \ we offer reflections on\nsome central themes of NIME, including the relationship\ + \ between musician and\nmachine.},\n address = {Brisbane, Australia},\n author\ + \ = {Xiao Xiao and Donald Derek Haddad and Thomas Sanchez and Akito van Troyer\ + \ and R\\'{e}becca Kleinberger and Penny Webb and Joe Paradiso and Tod Machover\ + \ and Hiroshi Ishii},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176145},\n\ + \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {259--264},\n publisher\ + \ = {Queensland Conservatorium Griffith University},\n title = {Kin\\'{e}phone:\ + \ Exploring the Musical Potential of an Actuated Pin-Based Shape Display},\n track\ + \ = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0051.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813300 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176145 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 166--171 - presentation-video: https://youtu.be/XIAd_dr9PHE - publisher: Birmingham City University - title: 'Touch Responsive Augmented Violin Interface System II: Integrating Sensors - into a 3D Printed Fingerboard' - url: https://www.nime.org/proceedings/2020/nime2020_paper32.pdf - year: 2020 + pages: 259--264 + publisher: Queensland Conservatorium Griffith University + title: 'Kinéphone: Exploring the Musical Potential of an Actuated Pin-Based Shape + Display' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0051.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_33 - abstract: 'The expressive control of sound and music through body movements is well-studied. For - some people, body movement is demanding, and although they would prefer to express - themselves freely using gestural control, they are unable to use such interfaces - without difficulty. In this paper, we present the P(l)aying Attention framework - for manipulating recorded music to support these people, and to help the therapists - that work with them. The aim is to facilitate body awareness, exploration, and - expressivity by allowing the manipulation of a pre-recorded ‘ensemble’ through - an interpretation of body movement, provided by a machine-learning system trained - on physiotherapist assessments and movement data from people with chronic pain. The - system considers the nature of a person’s movement (e.g. protective) and offers - an interpretation in terms of the joint-groups that are playing a major role in - the determination at that point in the movement, and to which attention should - perhaps be given (or the opposite at the user’s discretion). Using music to convey - the interpretation offers informational (through movement sonification) and creative - (through manipulating the ensemble by movement) possibilities. The approach offers - the opportunity to explore movement and music at multiple timescales and under - varying musical aesthetics.' - address: 'Birmingham, UK' - author: 'Gold, Nicolas E and Wang, Chongyang and Olugbade, Temitayo and Berthouze, - Nadia and Williams, Amanda' - bibtex: "@inproceedings{NIME20_33,\n abstract = {The expressive control of sound\ - \ and music through body movements is well-studied. For some people, body movement\ - \ is demanding, and although they would prefer to express themselves freely using\ - \ gestural control, they are unable to use such interfaces without difficulty.\ - \ In this paper, we present the P(l)aying Attention framework for manipulating\ - \ recorded music to support these people, and to help the therapists that work\ - \ with them. The aim is to facilitate body awareness, exploration, and expressivity\ - \ by allowing the manipulation of a pre-recorded ‘ensemble’ through an interpretation\ - \ of body movement, provided by a machine-learning system trained on physiotherapist\ - \ assessments and movement data from people with chronic pain. The system considers\ - \ the nature of a person’s movement (e.g. protective) and offers an interpretation\ - \ in terms of the joint-groups that are playing a major role in the determination\ - \ at that point in the movement, and to which attention should perhaps be given\ - \ (or the opposite at the user’s discretion). Using music to convey the interpretation\ - \ offers informational (through movement sonification) and creative (through manipulating\ - \ the ensemble by movement) possibilities. The approach offers the opportunity\ - \ to explore movement and music at multiple timescales and under varying musical\ - \ aesthetics.},\n address = {Birmingham, UK},\n author = {Gold, Nicolas E and\ - \ Wang, Chongyang and Olugbade, Temitayo and Berthouze, Nadia and Williams, Amanda},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.4813303},\n editor = {Romain Michon\ - \ and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages =\ - \ {172--175},\n publisher = {Birmingham City University},\n title = {P(l)aying\ - \ Attention: Multi-modal, multi-temporal music control},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper33.pdf},\n\ - \ year = {2020}\n}\n" + ID: Waite2016 + abstract: "This paper presents a brief review of current literature detailing\n\ + some of the issues and trends in composition and performance with interactive\n\ + music systems. Of particular interest is how musicians interact with a separate\n\ + machine entity that exercises agency over the creative process. The use of\nreal-world\ + \ metaphors as a strategy for increasing audience engagement is also\ndiscussed.\n\ + The composition and system Church Belles is presented, analyzed and evaluated\ + \ in\nterms of its architecture, how it relates to existing studies of musician-machine\n\ + creative interaction and how the use of a real-world metaphor can promote\naudience\ + \ perceptions of liveness. This develops previous NIME work by offering a\ndetailed\ + \ case study of the development process of both a system and a piece for\npopular,\ + \ non-improvisational vocal/guitar music." + address: 'Brisbane, Australia' + author: Si Waite + bibtex: "@inproceedings{Waite2016,\n abstract = {This paper presents a brief review\ + \ of current literature detailing\nsome of the issues and trends in composition\ + \ and performance with interactive\nmusic systems. Of particular interest is how\ + \ musicians interact with a separate\nmachine entity that exercises agency over\ + \ the creative process. The use of\nreal-world metaphors as a strategy for increasing\ + \ audience engagement is also\ndiscussed.\nThe composition and system Church Belles\ + \ is presented, analyzed and evaluated in\nterms of its architecture, how it relates\ + \ to existing studies of musician-machine\ncreative interaction and how the use\ + \ of a real-world metaphor can promote\naudience perceptions of liveness. This\ + \ develops previous NIME work by offering a\ndetailed case study of the development\ + \ process of both a system and a piece for\npopular, non-improvisational vocal/guitar\ + \ music.},\n address = {Brisbane, Australia},\n author = {Si Waite},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1176139},\n isbn = {978-1-925455-13-7},\n\ + \ issn = {2220-4806},\n pages = {265--270},\n publisher = {Queensland Conservatorium\ + \ Griffith University},\n title = {Church Belles: An Interactive System and Composition\ + \ Using Real-World Metaphors},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0052.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813303 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176139 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 172--175 - publisher: Birmingham City University - title: 'P(l)aying Attention: Multi-modal, multi-temporal music control' - url: https://www.nime.org/proceedings/2020/nime2020_paper33.pdf - year: 2020 + pages: 265--270 + publisher: Queensland Conservatorium Griffith University + title: 'Church Belles: An Interactive System and Composition Using Real-World Metaphors' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0052.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_34 - abstract: 'We present a musical interface specifically designed for inclusive performance - that offers a shared experience for both individuals who are deaf and hard of - hearing as well as those who are not. This interface borrows gestures (with or - without overt meaning) from American Sign Language (ASL), rendered using low-frequency - sounds that can be felt by everyone in the performance. The Deaf and Hard of Hearing - cannot experience the sound in the same way. Instead, they are able to physically - experience the vibrations, nuances, contours, as well as its correspondence with - the hand gestures. Those who are not hard of hearing can experience the sound, - but also feel it just the same, with the knowledge that the same physical vibrations - are shared by everyone. The employment of sign language adds another aesthetic - dimension to the instrument --a nuanced borrowing of a functional communication - medium for an artistic end. ' - address: 'Birmingham, UK' - author: 'Cavdir, Doga and Wang, Ge' - bibtex: "@inproceedings{NIME20_34,\n abstract = {We present a musical interface\ - \ specifically designed for inclusive performance that offers a shared experience\ - \ for both individuals who are deaf and hard of hearing as well as those who are\ - \ not. This interface borrows gestures (with or without overt meaning) from American\ - \ Sign Language (ASL), rendered using low-frequency sounds that can be felt by\ - \ everyone in the performance. The Deaf and Hard of Hearing cannot experience\ - \ the sound in the same way. Instead, they are able to physically experience the\ - \ vibrations, nuances, contours, as well as its correspondence with the hand gestures.\ - \ Those who are not hard of hearing can experience the sound, but also feel it\ - \ just the same, with the knowledge that the same physical vibrations are shared\ - \ by everyone. The employment of sign language adds another aesthetic dimension\ - \ to the instrument --a nuanced borrowing of a functional communication medium\ - \ for an artistic end. },\n address = {Birmingham, UK},\n author = {Cavdir, Doga\ - \ and Wang, Ge},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813305},\n\ - \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ - \ = {July},\n pages = {176--181},\n presentation-video = {https://youtu.be/JCvlHu4UaZ0},\n\ - \ publisher = {Birmingham City University},\n title = {Felt Sound: A Shared Musical\ - \ Experience for the Deaf and Hard of Hearing},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper34.pdf},\n\ - \ year = {2020}\n}\n" + ID: Olowe2016 + abstract: "We propose residUUm, an audiovisual performance tool that uses\nsonification\ + \ to orchestrate a particle system of shapes, as an attempt to build\nan audiovisual\ + \ user interface in which all the actions of a performer on a laptop\nare intended\ + \ to be explicitly interpreted by the audience. We propose two\napproaches to\ + \ performing with residUUm and discuss the methods utilized to\nfulfill the promise\ + \ of audience-visible interaction: mapping and performance\nstrategies applied\ + \ to express audiovisual interactions with multilayered\nsound-image relationships.\ + \ The system received positive feedback from 34 audience\nparticipants on aspects\ + \ such as aesthetics and audiovisual integration, and we\nidentified further design\ + \ challenges around performance clarity and strategy. We\ndiscuss residUUm's development\ + \ objectives, modes of interaction and the impact of\nan audience-visible interface\ + \ on the performer and observer. " + address: 'Brisbane, Australia' + author: Ireti Olowe and Giulio Moro and Mathieu Barthet + bibtex: "@inproceedings{Olowe2016,\n abstract = {We propose residUUm, an audiovisual\ + \ performance tool that uses\nsonification to orchestrate a particle system of\ + \ shapes, as an attempt to build\nan audiovisual user interface in which all the\ + \ actions of a performer on a laptop\nare intended to be explicitly interpreted\ + \ by the audience. We propose two\napproaches to performing with residUUm and\ + \ discuss the methods utilized to\nfulfill the promise of audience-visible interaction:\ + \ mapping and performance\nstrategies applied to express audiovisual interactions\ + \ with multilayered\nsound-image relationships. The system received positive feedback\ + \ from 34 audience\nparticipants on aspects such as aesthetics and audiovisual\ + \ integration, and we\nidentified further design challenges around performance\ + \ clarity and strategy. We\ndiscuss residUUm's development objectives, modes of\ + \ interaction and the impact of\nan audience-visible interface on the performer\ + \ and observer. },\n address = {Brisbane, Australia},\n author = {Ireti Olowe\ + \ and Giulio Moro and Mathieu Barthet},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176098},\n\ + \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {271--276},\n publisher\ + \ = {Queensland Conservatorium Griffith University},\n title = {residUUm: user\ + \ mapping and performance strategies for multilayered live audiovisual generation},\n\ + \ track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0053.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813305 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176098 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 176--181 - presentation-video: https://youtu.be/JCvlHu4UaZ0 - publisher: Birmingham City University - title: 'Felt Sound: A Shared Musical Experience for the Deaf and Hard of Hearing' - url: https://www.nime.org/proceedings/2020/nime2020_paper34.pdf - year: 2020 + pages: 271--276 + publisher: Queensland Conservatorium Griffith University + title: 'residUUm: user mapping and performance strategies for multilayered live + audiovisual generation' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0053.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_35 - abstract: 'This paper examines the use of Sound Sensors and audio as input material - for New Interfaces for Musical Expression (NIMEs), exploring the unique affordances - and character of the interactions and instruments that leverage it. Examples of - previous work in the literature that use audio as sensor input data are examined - for insights into how the use of Sound Sensors provides unique opportunities within - the NIME context. We present the results of a user study comparing sound-based - sensors to other sensing modalities within the context of controlling parameters. The - study suggests that the use of Sound Sensors can enhance gestural flexibility - and nuance but that they also present challenges in accuracy and repeatability.' - address: 'Birmingham, UK' - author: 'Leitman, Sasha' - bibtex: "@inproceedings{NIME20_35,\n abstract = {This paper examines the use of\ - \ Sound Sensors and audio as input material for New Interfaces for Musical Expression\ - \ (NIMEs), exploring the unique affordances and character of the interactions\ - \ and instruments that leverage it. Examples of previous work in the literature\ - \ that use audio as sensor input data are examined for insights into how the use\ - \ of Sound Sensors provides unique opportunities within the NIME context. We\ - \ present the results of a user study comparing sound-based sensors to other sensing\ - \ modalities within the context of controlling parameters. The study suggests\ - \ that the use of Sound Sensors can enhance gestural flexibility and nuance but\ - \ that they also present challenges in accuracy and repeatability.},\n address\ - \ = {Birmingham, UK},\n author = {Leitman, Sasha},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.4813309},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {182--187},\n publisher = {Birmingham\ - \ City University},\n title = {Sound Based Sensors for NIMEs},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper35.pdf},\n\ - \ year = {2020}\n}\n" + ID: Bhumber2016 + abstract: "This paper describes the processes involved in developing Pendula,\n\ + a performance environment and interactive installation using swings, interactive\n\ + video, and audio. A presentation of the project is described using three swings.\n\ + Gyroscopic and accelerometer data were used in each of the setups to control\n\ + audio and visual parameters.The installation was presented as both an interactive\n\ + environment and as a performance instrument, with multiple public performances.\n\ + Construction of the physical devices used, circuits built, and software created\n\ + is covered in this paper, along with a discussion of problems and their solutions\n\ + encountered during the development of Pendula." + address: 'Brisbane, Australia' + author: Kirandeep Bhumber and Nancy Lee and Brian Topp + bibtex: "@inproceedings{Bhumber2016,\n abstract = {This paper describes the processes\ + \ involved in developing Pendula,\na performance environment and interactive installation\ + \ using swings, interactive\nvideo, and audio. A presentation of the project is\ + \ described using three swings.\nGyroscopic and accelerometer data were used in\ + \ each of the setups to control\naudio and visual parameters.The installation\ + \ was presented as both an interactive\nenvironment and as a performance instrument,\ + \ with multiple public performances.\nConstruction of the physical devices used,\ + \ circuits built, and software created\nis covered in this paper, along with a\ + \ discussion of problems and their solutions\nencountered during the development\ + \ of Pendula.},\n address = {Brisbane, Australia},\n author = {Kirandeep Bhumber\ + \ and Nancy Lee and Brian Topp},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1175992},\n\ + \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {277--285},\n publisher\ + \ = {Queensland Conservatorium Griffith University},\n title = {Pendula: An Interactive\ + \ Swing Installation and Performance Environment},\n track = {Papers},\n url =\ + \ {http://www.nime.org/proceedings/2016/nime2016_paper0054.pdf},\n year = {2016}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813309 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1175992 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 182--187 - publisher: Birmingham City University - title: Sound Based Sensors for NIMEs - url: https://www.nime.org/proceedings/2020/nime2020_paper35.pdf - year: 2020 + pages: 277--285 + publisher: Queensland Conservatorium Griffith University + title: 'Pendula: An Interactive Swing Installation and Performance Environment' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0054.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_36 - abstract: 'This paper presents a novel interactive system for creating audio-visual - expressions on tabletop display by dynamically manipulating solids of revolution - called spheroids. The four types of basic spinning and rolling movements of spheroids - are recognized from the physical conditions such as the contact area, the location - of the centroid, the (angular) velocity, and the curvature of the locus all obtained - from sensor data on the display. They are then used for interactively generating - audio-visual effects that match each of the movements. We developed a digital - content that integrated these functionalities and enabled composition and live - performance through manipulation of spheroids.' - address: 'Birmingham, UK' - author: 'Ikawa, Yuma and Matsuura, Akihiro' - bibtex: "@inproceedings{NIME20_36,\n abstract = {This paper presents a novel interactive\ - \ system for creating audio-visual expressions on tabletop display by dynamically\ - \ manipulating solids of revolution called spheroids. The four types of basic\ - \ spinning and rolling movements of spheroids are recognized from the physical\ - \ conditions such as the contact area, the location of the centroid, the (angular)\ - \ velocity, and the curvature of the locus all obtained from sensor data on the\ - \ display. They are then used for interactively generating audio-visual effects\ - \ that match each of the movements. We developed a digital content that integrated\ - \ these functionalities and enabled composition and live performance through manipulation\ - \ of spheroids.},\n address = {Birmingham, UK},\n author = {Ikawa, Yuma and Matsuura,\ - \ Akihiro},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813311},\n editor\ - \ = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n\ - \ pages = {188--189},\n publisher = {Birmingham City University},\n title = {Playful\ - \ Audio-Visual Interaction with Spheroids },\n url = {https://www.nime.org/proceedings/2020/nime2020_paper36.pdf},\n\ - \ year = {2020}\n}\n" + ID: Dabin2016 + abstract: "This project explores the potential for 3D modelling and printing\nto\ + \ create customised flutes that can play music in a variety of microtonal\nscales.\ + \ One of the challenges in the field of microtonality is that conventional\nmusical\ + \ instruments are inadequate for realising the abundance of theoretical\ntunings\ + \ that musicians wish to investigate. This paper focuses on the development\n\ + of two types of flutes, the recorder and transverse flute, with interchangeable\n\ + mouthpieces. These flutes are designed to play subharmonic microtonal scales.\ + \ The\ndiscussion provides an overview of the design and implementation process,\n\ + including calculation methods for acoustic modelling and 3D printing\ntechnologies,\ + \ as well as an evaluation of some of the difficulties encountered.\nResults from\ + \ our 3D printed flutes suggest that whilst further refinements are\nnecessary\ + \ in our designs, 3D modelling and printing techniques offer new and\nvaluable\ + \ methods for the design and production of customised musical instruments.\nThe\ + \ long term goal of this project is to create a system in which users can\nspecify\ + \ the tuning of their instrument to generate a 3D model and have it printed\n\ + on demand. " + address: 'Brisbane, Australia' + author: Matthew Dabin and Terumi Narushima and Stephen Beirne and Christian Ritz + and Kraig Grady + bibtex: "@inproceedings{Dabin2016,\n abstract = {This project explores the potential\ + \ for 3D modelling and printing\nto create customised flutes that can play music\ + \ in a variety of microtonal\nscales. One of the challenges in the field of microtonality\ + \ is that conventional\nmusical instruments are inadequate for realising the abundance\ + \ of theoretical\ntunings that musicians wish to investigate. This paper focuses\ + \ on the development\nof two types of flutes, the recorder and transverse flute,\ + \ with interchangeable\nmouthpieces. These flutes are designed to play subharmonic\ + \ microtonal scales. The\ndiscussion provides an overview of the design and implementation\ + \ process,\nincluding calculation methods for acoustic modelling and 3D printing\n\ + technologies, as well as an evaluation of some of the difficulties encountered.\n\ + Results from our 3D printed flutes suggest that whilst further refinements are\n\ + necessary in our designs, 3D modelling and printing techniques offer new and\n\ + valuable methods for the design and production of customised musical instruments.\n\ + The long term goal of this project is to create a system in which users can\n\ + specify the tuning of their instrument to generate a 3D model and have it printed\n\ + on demand. },\n address = {Brisbane, Australia},\n author = {Matthew Dabin and\ + \ Terumi Narushima and Stephen Beirne and Christian Ritz and Kraig Grady},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1176014},\n isbn = {978-1-925455-13-7},\n\ + \ issn = {2220-4806},\n pages = {286--290},\n publisher = {Queensland Conservatorium\ + \ Griffith University},\n title = {{3D} Modelling and Printing of Microtonal Flutes},\n\ + \ track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0056.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813311 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176014 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 188--189 - publisher: Birmingham City University - title: 'Playful Audio-Visual Interaction with Spheroids ' - url: https://www.nime.org/proceedings/2020/nime2020_paper36.pdf - year: 2020 + pages: 286--290 + publisher: Queensland Conservatorium Griffith University + title: 3D Modelling and Printing of Microtonal Flutes + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0056.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_37 - abstract: 'This paper presents ARLooper, an augmented reality mobile interface that - allows multiple users to record sound and perform together in a shared AR space. - ARLooper is an attempt to explore the potential of collaborative mobile AR instruments - in supporting non-verbal communication for musical performances. With ARLooper, - the user can record, manipulate, and play sounds being visualized as 3D waveforms - in an AR space. ARLooper provides a shared AR environment wherein multiple users - can observe each other''s activities in real time, supporting increasing the understanding - of collaborative contexts. This paper provides the background of the research - and the design and technical implementation of ARLooper, followed by a user study.' - address: 'Birmingham, UK' - author: 'Park, Sihwa' - bibtex: "@inproceedings{NIME20_37,\n abstract = {This paper presents ARLooper, an\ - \ augmented reality mobile interface that allows multiple users to record sound\ - \ and perform together in a shared AR space. ARLooper is an attempt to explore\ - \ the potential of collaborative mobile AR instruments in supporting non-verbal\ - \ communication for musical performances. With ARLooper, the user can record,\ - \ manipulate, and play sounds being visualized as 3D waveforms in an AR space.\ - \ ARLooper provides a shared AR environment wherein multiple users can observe\ - \ each other's activities in real time, supporting increasing the understanding\ - \ of collaborative contexts. This paper provides the background of the research\ - \ and the design and technical implementation of ARLooper, followed by a user\ - \ study.},\n address = {Birmingham, UK},\n author = {Park, Sihwa},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.4813313},\n editor = {Romain Michon and\ - \ Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages = {190--195},\n\ - \ presentation-video = {https://youtu.be/Trw4epKeUbM},\n publisher = {Birmingham\ - \ City University},\n title = {Collaborative Mobile Instruments in a Shared AR\ - \ Space: a Case of ARLooper},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper37.pdf},\n\ - \ year = {2020}\n}\n" + ID: Hofmann2016a + abstract: "Low cost, credit card size computers like the Raspberry Pi allow\nmusicians\ + \ to experiment with building software-based standalone musical\ninstruments.\ + \ The COSMO Project aims to provide an easy-to-use hardware and\nsoftware framework\ + \ to build Csound based instruments as hardware devices. Inside\nthe instrument,\ + \ the Csound software is running on a Raspberry Pi computer,\nconnected to a custom\ + \ designed interface board (COSMO-HAT) that allows to connect\npotentiometers,\ + \ switches, LED's, and sensors. A classic stomp box design is used\nto demonstrate\ + \ how Csound can be brought on stage as a stand-alone hardware\neffect instrument." + address: 'Brisbane, Australia' + author: Alex Hofmann and Bernt Waerstad and Kristoffer Koch + bibtex: "@inproceedings{Hofmann2016a,\n abstract = {Low cost, credit card size computers\ + \ like the Raspberry Pi allow\nmusicians to experiment with building software-based\ + \ standalone musical\ninstruments. The COSMO Project aims to provide an easy-to-use\ + \ hardware and\nsoftware framework to build Csound based instruments as hardware\ + \ devices. Inside\nthe instrument, the Csound software is running on a Raspberry\ + \ Pi computer,\nconnected to a custom designed interface board (COSMO-HAT) that\ + \ allows to connect\npotentiometers, switches, LED's, and sensors. A classic stomp\ + \ box design is used\nto demonstrate how Csound can be brought on stage as a stand-alone\ + \ hardware\neffect instrument.},\n address = {Brisbane, Australia},\n author =\ + \ {Alex Hofmann and Bernt Waerstad and Kristoffer Koch},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176030},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ + \ pages = {291--294},\n publisher = {Queensland Conservatorium Griffith University},\n\ + \ title = {Csound Instruments On Stage},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0057.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813313 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176030 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 190--195 - presentation-video: https://youtu.be/Trw4epKeUbM - publisher: Birmingham City University - title: 'Collaborative Mobile Instruments in a Shared AR Space: a Case of ARLooper' - url: https://www.nime.org/proceedings/2020/nime2020_paper37.pdf - year: 2020 - - -- ENTRYTYPE: inproceedings - ID: NIME20_38 - abstract: 'Expressive 2D multi-touch interfaces have in recent years moved from - research prototypes to industrial products, from repurposed generic computer input - devices to controllers specially designed for musical expression. A host of practicioners - use this type of devices in many different ways, with different gestures and sound - synthesis or transformation methods. In order to get an overview of existing and - desired usages, we launched an on-line survey that collected 37 answers from practicioners - in and outside of academic and design communities. In the survey we inquired about - the participants'' devices, their strengths and weaknesses, the layout of control - dimensions, the used gestures and mappings, the synthesis software or hardware - and the use of audio descriptors and machine learning. The results can inform - the design of future interfaces, gesture analysis and mapping, and give directions - for the need and use of machine learning for user adaptation.' - address: 'Birmingham, UK' - author: 'Schwarz, Diemo and Liu, Abby Wanyu and Bevilacqua, Frederic' - bibtex: "@inproceedings{NIME20_38,\n abstract = {Expressive 2D multi-touch interfaces\ - \ have in recent years moved from research prototypes to industrial products,\ - \ from repurposed generic computer input devices to controllers specially designed\ - \ for musical expression. A host of practicioners use this type of devices in\ - \ many different ways, with different gestures and sound synthesis or transformation\ - \ methods. In order to get an overview of existing and desired usages, we launched\ - \ an on-line survey that collected 37 answers from practicioners in and outside\ - \ of academic and design communities. In the survey we inquired about the participants'\ - \ devices, their strengths and weaknesses, the layout of control dimensions, the\ - \ used gestures and mappings, the synthesis software or hardware and the use of\ - \ audio descriptors and machine learning. The results can inform the design of\ - \ future interfaces, gesture analysis and mapping, and give directions for the\ - \ need and use of machine learning for user adaptation.},\n address = {Birmingham,\ - \ UK},\n author = {Schwarz, Diemo and Liu, Abby Wanyu and Bevilacqua, Frederic},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.4813318},\n editor = {Romain Michon\ - \ and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages =\ - \ {196--201},\n presentation-video = {https://youtu.be/eE8I3mecaB8},\n publisher\ - \ = {Birmingham City University},\n title = {A Survey on the Use of 2D Touch Interfaces\ - \ for Musical Expression},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper38.pdf},\n\ - \ year = {2020}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.4813318 - editor: Romain Michon and Franziska Schroeder - issn: 2220-4806 - month: July - pages: 196--201 - presentation-video: https://youtu.be/eE8I3mecaB8 - publisher: Birmingham City University - title: A Survey on the Use of 2D Touch Interfaces for Musical Expression - url: https://www.nime.org/proceedings/2020/nime2020_paper38.pdf - year: 2020 + pages: 291--294 + publisher: Queensland Conservatorium Griffith University + title: Csound Instruments On Stage + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0057.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_39 - abstract: 'General-Purpose GPU computing is becoming an increasingly viable option - for acceleration, including in the audio domain. Although it can improve performance, - the intrinsic nature of a device like the GPU involves data transfers and execution - commands which requires time to complete. Therefore, there is an understandable - caution concerning the overhead involved with using the GPU for audio computation. - This paper aims to clarify the limitations by presenting a performance benchmarking - suite. The benchmarks utilize OpenCL and CUDA across various tests to highlight - the considerations and limitations of processing audio in the GPU environment. - The benchmarking suite has been used to gather a collection of results across - various hardware. Salient results have been reviewed in order to highlight the - benefits and limitations of the GPU for digital audio. The results in this work - show that the minimal GPU overhead fits into the real-time audio requirements - provided the buffer size is selected carefully. The baseline overhead is shown - to be roughly 0.1ms, depending on the GPU. This means buffer sizes 8 and above - are completed within the allocated time frame. Results from more demanding tests, - involving physical modelling synthesis, demonstrated a balance was needed between - meeting the sample rate and keeping within limits for latency and jitter. Buffer - sizes from 1 to 16 failed to sustain the sample rate whilst buffer sizes 512 to - 32768 exceeded either latency or jitter limits. Buffer sizes in between these - ranges, such as 256, satisfied the sample rate, latency and jitter requirements - chosen for this paper.' - address: 'Birmingham, UK' - author: 'Renney, Harri L and Mitchell, Tom and Gaster, Benedict' - bibtex: "@inproceedings{NIME20_39,\n abstract = {General-Purpose GPU computing is\ - \ becoming an increasingly viable option for acceleration, including in the audio\ - \ domain. Although it can improve performance, the intrinsic nature of a device\ - \ like the GPU involves data transfers and execution commands which requires time\ - \ to complete. Therefore, there is an understandable caution concerning the overhead\ - \ involved with using the GPU for audio computation. This paper aims to clarify\ - \ the limitations by presenting a performance benchmarking suite. The benchmarks\ - \ utilize OpenCL and CUDA across various tests to highlight the considerations\ - \ and limitations of processing audio in the GPU environment. The benchmarking\ - \ suite has been used to gather a collection of results across various hardware.\ - \ Salient results have been reviewed in order to highlight the benefits and limitations\ - \ of the GPU for digital audio. The results in this work show that the minimal\ - \ GPU overhead fits into the real-time audio requirements provided the buffer\ - \ size is selected carefully. The baseline overhead is shown to be roughly 0.1ms,\ - \ depending on the GPU. This means buffer sizes 8 and above are completed within\ - \ the allocated time frame. Results from more demanding tests, involving physical\ - \ modelling synthesis, demonstrated a balance was needed between meeting the sample\ - \ rate and keeping within limits for latency and jitter. Buffer sizes from 1 to\ - \ 16 failed to sustain the sample rate whilst buffer sizes 512 to 32768 exceeded\ - \ either latency or jitter limits. Buffer sizes in between these ranges, such\ - \ as 256, satisfied the sample rate, latency and jitter requirements chosen for\ - \ this paper.},\n address = {Birmingham, UK},\n author = {Renney, Harri L and\ - \ Mitchell, Tom and Gaster, Benedict},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813320},\n\ - \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ - \ = {July},\n pages = {202--207},\n presentation-video = {https://youtu.be/xAVEHJZRIx0},\n\ - \ publisher = {Birmingham City University},\n title = {There and Back Again: The\ - \ Practicality of GPU Accelerated Digital Audio},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper39.pdf},\n\ - \ year = {2020}\n}\n" + ID: Resch2016 + abstract: "This paper describes a setup for embedding complex virtual\ninstruments\ + \ such as a physical model of the prepared piano sound synthesis in the\nsequencing\ + \ library note~ for Max. Based on the requirements of contemporary music\nand\ + \ media arts, note~ introduces computer-aided composition techniques and\ngraphical\ + \ user interfaces for sequencing and editing into the real time world of\nMax/MSP.\ + \ A piano roll, a microtonal musical score and the capability to attach\nfloating-point\ + \ lists of (theoretically) arbitrary length to a single note-on\nevent, enables\ + \ artists to play, edit and record compound sound synthesis with the\nnecessary\ + \ precision." + address: 'Brisbane, Australia' + author: Thomas Resch and Stefan Bilbao + bibtex: "@inproceedings{Resch2016,\n abstract = {This paper describes a setup for\ + \ embedding complex virtual\ninstruments such as a physical model of the prepared\ + \ piano sound synthesis in the\nsequencing library note~ for Max. Based on the\ + \ requirements of contemporary music\nand media arts, note~ introduces computer-aided\ + \ composition techniques and\ngraphical user interfaces for sequencing and editing\ + \ into the real time world of\nMax/MSP. A piano roll, a microtonal musical score\ + \ and the capability to attach\nfloating-point lists of (theoretically) arbitrary\ + \ length to a single note-on\nevent, enables artists to play, edit and record\ + \ compound sound synthesis with the\nnecessary precision.},\n address = {Brisbane,\ + \ Australia},\n author = {Thomas Resch and Stefan Bilbao},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176108},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ + \ pages = {295--299},\n publisher = {Queensland Conservatorium Griffith University},\n\ + \ title = {Controlling complex virtuel instruments---A setup with note~ for Max\ + \ and prepared piano sound synthesis},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0058.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813320 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176108 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 202--207 - presentation-video: https://youtu.be/xAVEHJZRIx0 - publisher: Birmingham City University - title: 'There and Back Again: The Practicality of GPU Accelerated Digital Audio' - url: https://www.nime.org/proceedings/2020/nime2020_paper39.pdf - year: 2020 + pages: 295--299 + publisher: Queensland Conservatorium Griffith University + title: Controlling complex virtuel instruments---A setup with note~ for Max and + prepared piano sound synthesis + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0058.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_4 - abstract: 'Ambulation is a sound walk that uses field recording techniques and listening - technologies to create a walking performance using environmental sound. Ambulation - engages with the act of recording as an improvised performance in response to - the soundscapes it is presented within. In this paper we describe the work and - place it in relationship to other artists engaged with field recording and extended - sound walking practices. We will give technical details of the Ambulation system - we developed as part of the creation of the piece, and conclude with a collection - of observations that emerged from the project. The research around the development - and presentation of Ambulation contributes to the idea of field recording as a - live, procedural practice, moving away from the ideas of the movement of documentary - material from one place to another. We will show how having an open, improvisational - approach to technologically supported sound walking enables rich and unexpected - results to occur and how this way of working can contribute to NIME design and - thinking.' - address: 'Birmingham, UK' - author: 'Shaw, Tim and Bowers, John' - bibtex: "@inproceedings{NIME20_4,\n abstract = {Ambulation is a sound walk that\ - \ uses field recording techniques and listening technologies to create a walking\ - \ performance using environmental sound. Ambulation engages with the act of recording\ - \ as an improvised performance in response to the soundscapes it is presented\ - \ within. In this paper we describe the work and place it in relationship to other\ - \ artists engaged with field recording and extended sound walking practices. We\ - \ will give technical details of the Ambulation system we developed as part of\ - \ the creation of the piece, and conclude with a collection of observations that\ - \ emerged from the project. The research around the development and presentation\ - \ of Ambulation contributes to the idea of field recording as a live, procedural\ - \ practice, moving away from the ideas of the movement of documentary material\ - \ from one place to another. We will show how having an open, improvisational\ - \ approach to technologically supported sound walking enables rich and unexpected\ - \ results to occur and how this way of working can contribute to NIME design and\ - \ thinking.},\n address = {Birmingham, UK},\n author = {Shaw, Tim and Bowers,\ - \ John},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.4813322},\n editor = {Romain\ - \ Michon and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages\ - \ = {23--28},\n presentation-video = {https://youtu.be/dDXkNnQXdN4},\n publisher\ - \ = {Birmingham City University},\n title = {Ambulation: Exploring Listening Technologies\ - \ for an Extended Sound Walking Practice},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper4.pdf},\n\ - \ year = {2020}\n}\n" + ID: Brown2016 + abstract: "Camera-based motion tracking has become a popular enabling\ntechnology\ + \ for gestural human-computer interaction. However, the approach suffers\nfrom\ + \ several limitations which have been shown to be particularly problematic\nwhen\ + \ employed within musical contexts. This paper presents Leimu, a wrist mount\n\ + that couples a Leap Motion optical sensor with an inertial measurement unit to\n\ + combine the benefits of wearable and camera-based motion tracking. Leimu is\n\ + designed, developed and then evaluated using discourse and statistical analysis\n\ + methods. The results indicate that the Leimu is an effective interface for\ngestural\ + \ music interaction and offers improved tracking precision over Leap\nMotion positioned\ + \ on a table top. " + address: 'Brisbane, Australia' + author: Dom Brown and Nathan Renney and Adam Stark and Chris Nash and Tom Mitchell + bibtex: "@inproceedings{Brown2016,\n abstract = {Camera-based motion tracking has\ + \ become a popular enabling\ntechnology for gestural human-computer interaction.\ + \ However, the approach suffers\nfrom several limitations which have been shown\ + \ to be particularly problematic\nwhen employed within musical contexts. This\ + \ paper presents Leimu, a wrist mount\nthat couples a Leap Motion optical sensor\ + \ with an inertial measurement unit to\ncombine the benefits of wearable and camera-based\ + \ motion tracking. Leimu is\ndesigned, developed and then evaluated using discourse\ + \ and statistical analysis\nmethods. The results indicate that the Leimu is an\ + \ effective interface for\ngestural music interaction and offers improved tracking\ + \ precision over Leap\nMotion positioned on a table top. },\n address = {Brisbane,\ + \ Australia},\n author = {Dom Brown and Nathan Renney and Adam Stark and Chris\ + \ Nash and Tom Mitchell},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176000},\n\ + \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {300--304},\n publisher\ + \ = {Queensland Conservatorium Griffith University},\n title = {Leimu: Gloveless\ + \ Music Interaction Using a Wrist Mounted Leap Motion},\n track = {Papers},\n\ + \ url = {http://www.nime.org/proceedings/2016/nime2016_paper0059.pdf},\n year\ + \ = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813322 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176000 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 23--28 - presentation-video: https://youtu.be/dDXkNnQXdN4 - publisher: Birmingham City University - title: 'Ambulation: Exploring Listening Technologies for an Extended Sound Walking - Practice' - url: https://www.nime.org/proceedings/2020/nime2020_paper4.pdf - year: 2020 + pages: 300--304 + publisher: Queensland Conservatorium Griffith University + title: 'Leimu: Gloveless Music Interaction Using a Wrist Mounted Leap Motion' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0059.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_40 - abstract: 'Learning to play an instrument is intrinsically multimodal, and we have - seen a trend of applying visual and haptic feedback in music games and computer-aided - music tutoring systems. However, most current systems are still designed to master - individual pieces of music; it is unclear how well the learned skills can be generalized - to new pieces. We aim to explore this question. In this study, we contribute Interactive - Rainbow Score, an interactive visual system to boost the learning of sight-playing, - the general musical skill to read music and map the visual representations to - performance motions. The key design of Interactive Rainbow Score is to associate - pitches (and the corresponding motions) with colored notation and further strengthen - such association via real-time interactions. Quantitative results show that the - interactive feature on average increases the learning efficiency by 31.1%. Further - analysis indicates that it is critical to apply the interaction in the early period - of learning.' - address: 'Birmingham, UK' - author: 'Xia, Gus and Chin, Daniel and Zhang, Yian and Zhang, Tianyu and Zhao, Junbo' - bibtex: "@inproceedings{NIME20_40,\n abstract = {Learning to play an instrument\ - \ is intrinsically multimodal, and we have seen a trend of applying visual and\ - \ haptic feedback in music games and computer-aided music tutoring systems. However,\ - \ most current systems are still designed to master individual pieces of music;\ - \ it is unclear how well the learned skills can be generalized to new pieces.\ - \ We aim to explore this question. In this study, we contribute Interactive Rainbow\ - \ Score, an interactive visual system to boost the learning of sight-playing,\ - \ the general musical skill to read music and map the visual representations to\ - \ performance motions. The key design of Interactive Rainbow Score is to associate\ - \ pitches (and the corresponding motions) with colored notation and further strengthen\ - \ such association via real-time interactions. Quantitative results show that\ - \ the interactive feature on average increases the learning efficiency by 31.1%.\ - \ Further analysis indicates that it is critical to apply the interaction in the\ - \ early period of learning.},\n address = {Birmingham, UK},\n author = {Xia, Gus\ - \ and Chin, Daniel and Zhang, Yian and Zhang, Tianyu and Zhao, Junbo},\n booktitle\ + ID: Gnicode243mez2016 + abstract: "This paper presents the design of a Max/MSP flexible workflow\nframework\ + \ built for complex real-time interactive performances. This system was\ndeveloped\ + \ for Emovere, an interdisciplinary piece for dance, biosignals, sound\nand visuals,\ + \ yet it was conceived to accommodate interactive performances of\ndifferent nature\ + \ and of heterogeneous technical requirements, which we believe to\nrepresent\ + \ a common underlying structure among these.\nThe work presented in this document\ + \ proposes a framework that takes care of the\nsignal input/output stages, as\ + \ well as storing and recalling presets and scenes,\nthus allowing the user to\ + \ focus on the programming of interaction models and\nsound synthesis or sound\ + \ processing. Results are presented with Emovere as an\nexample case, discussing\ + \ the advantages and further challenges that this\nframework offers for other\ + \ performance scenarios." + address: 'Brisbane, Australia' + author: Esteban Gómez and Javier Jaimovich + bibtex: "@inproceedings{Gnicode243mez2016,\n abstract = {This paper presents the\ + \ design of a Max/MSP flexible workflow\nframework built for complex real-time\ + \ interactive performances. This system was\ndeveloped for Emovere, an interdisciplinary\ + \ piece for dance, biosignals, sound\nand visuals, yet it was conceived to accommodate\ + \ interactive performances of\ndifferent nature and of heterogeneous technical\ + \ requirements, which we believe to\nrepresent a common underlying structure among\ + \ these.\nThe work presented in this document proposes a framework that takes\ + \ care of the\nsignal input/output stages, as well as storing and recalling presets\ + \ and scenes,\nthus allowing the user to focus on the programming of interaction\ + \ models and\nsound synthesis or sound processing. Results are presented with\ + \ Emovere as an\nexample case, discussing the advantages and further challenges\ + \ that this\nframework offers for other performance scenarios.},\n address = {Brisbane,\ + \ Australia},\n author = {Esteban G\\'{o}mez and Javier Jaimovich},\n booktitle\ \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.4813324},\n editor = {Romain Michon and\ - \ Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages = {208--213},\n\ - \ publisher = {Birmingham City University},\n title = {Interactive Rainbow Score:\ - \ A Visual-centered Multimodal Flute Tutoring System},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper40.pdf},\n\ - \ year = {2020}\n}\n" + \ Expression},\n doi = {10.5281/zenodo.1176018},\n isbn = {978-1-925455-13-7},\n\ + \ issn = {2220-4806},\n pages = {305--309},\n publisher = {Queensland Conservatorium\ + \ Griffith University},\n title = {Designing a Flexible Workflow for Complex Real-Time\ + \ Interactive Performances},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0060.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813324 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176018 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 208--213 - publisher: Birmingham City University - title: 'Interactive Rainbow Score: A Visual-centered Multimodal Flute Tutoring - System' - url: https://www.nime.org/proceedings/2020/nime2020_paper40.pdf - year: 2020 + pages: 305--309 + publisher: Queensland Conservatorium Griffith University + title: Designing a Flexible Workflow for Complex Real-Time Interactive Performances + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0060.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_41 - abstract: 'Research on Accessible Digital Musical Instruments (ADMIs) has received - growing attention over the past decades, carving out an increasingly large space - in the literature. Despite the recent publication of state-of-the-art review works, - there are still few systematic studies on ADMIs design analysis. In this paper - we propose a formal tool to explore the main design aspects of ADMIs based on - Dimension Space Analysis, a well established methodology in the NIME literature - which allows to generate an effective visual representation of the design space. - We therefore propose a set of relevant dimensions, which are based both on categories - proposed in recent works in the research context, and on original contributions. - We then proceed to demonstrate its applicability by selecting a set of relevant - case studies, and analyzing a sample set of ADMIs found in the literature.' - address: 'Birmingham, UK' - author: 'Davanzo, Nicola and Avanzini, Federico' - bibtex: "@inproceedings{NIME20_41,\n abstract = {Research on Accessible Digital\ - \ Musical Instruments (ADMIs) has received growing attention over the past decades,\ - \ carving out an increasingly large space in the literature. Despite the recent\ - \ publication of state-of-the-art review works, there are still few systematic\ - \ studies on ADMIs design analysis. In this paper we propose a formal tool to\ - \ explore the main design aspects of ADMIs based on Dimension Space Analysis,\ - \ a well established methodology in the NIME literature which allows to generate\ - \ an effective visual representation of the design space. We therefore propose\ - \ a set of relevant dimensions, which are based both on categories proposed in\ - \ recent works in the research context, and on original contributions. We then\ - \ proceed to demonstrate its applicability by selecting a set of relevant case\ - \ studies, and analyzing a sample set of ADMIs found in the literature.},\n address\ - \ = {Birmingham, UK},\n author = {Davanzo, Nicola and Avanzini, Federico},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.4813326},\n editor = {Romain Michon and\ - \ Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages = {214--220},\n\ - \ presentation-video = {https://youtu.be/pJlB5k8TV9M},\n publisher = {Birmingham\ - \ City University},\n title = {A Dimension Space for the Evaluation of Accessible\ - \ Digital Musical Instruments},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper41.pdf},\n\ - \ year = {2020}\n}\n" + ID: Volioti2016 + abstract: "There is a growing interest in `unlocking' the motor\nskills of expert\ + \ musicians. Motivated by this need, the main objective of this\npaper is to present\ + \ a new way of modeling expressive gesture variations in\nmusical performance.\ + \ For this purpose, the 3D gesture recognition engine\n`x2Gesture' (eXpert eXpressive\ + \ Gesture) has been developed, inspired\nby the Gesture Variation Follower, which\ + \ is initially designed and developed at\nIRCAM in Paris and then extended at\ + \ Goldsmiths College in London. x2Gesture\nsupports both learning of musical gestures\ + \ and live performing, through gesture\nsonification, as a unified user experience.\ + \ The deeper understanding of the\nexpressive gestural variations permits to define\ + \ the confidence bounds of the\nexpert's gestures, which are used during the decoding\ + \ phase of the\nrecognition. The first experiments show promising results in terms\ + \ of recognition\naccuracy and temporal alignment between template and performed\ + \ gesture, which\nleads to a better fluidity and immediacy and thus gesture sonification. " + address: 'Brisbane, Australia' + author: Christina Volioti and Sotiris Manitsaris and Eleni Katsouli and Athanasios + Manitsaris + bibtex: "@inproceedings{Volioti2016,\n abstract = {There is a growing interest in\ + \ `unlocking' the motor\nskills of expert musicians. Motivated by this need, the\ + \ main objective of this\npaper is to present a new way of modeling expressive\ + \ gesture variations in\nmusical performance. For this purpose, the 3D gesture\ + \ recognition engine\n`x2Gesture' (eXpert eXpressive Gesture) has been developed,\ + \ inspired\nby the Gesture Variation Follower, which is initially designed and\ + \ developed at\nIRCAM in Paris and then extended at Goldsmiths College in London.\ + \ x2Gesture\nsupports both learning of musical gestures and live performing, through\ + \ gesture\nsonification, as a unified user experience. The deeper understanding\ + \ of the\nexpressive gestural variations permits to define the confidence bounds\ + \ of the\nexpert's gestures, which are used during the decoding phase of the\n\ + recognition. The first experiments show promising results in terms of recognition\n\ + accuracy and temporal alignment between template and performed gesture, which\n\ + leads to a better fluidity and immediacy and thus gesture sonification. },\n address\ + \ = {Brisbane, Australia},\n author = {Christina Volioti and Sotiris Manitsaris\ + \ and Eleni Katsouli and Athanasios Manitsaris},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1176137},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ + \ pages = {310--315},\n publisher = {Queensland Conservatorium Griffith University},\n\ + \ title = {x2Gesture: how machines could learn expressive gesture variations of\ + \ expert musicians},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0061.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813326 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176137 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 214--220 - presentation-video: https://youtu.be/pJlB5k8TV9M - publisher: Birmingham City University - title: A Dimension Space for the Evaluation of Accessible Digital Musical Instruments - url: https://www.nime.org/proceedings/2020/nime2020_paper41.pdf - year: 2020 + pages: 310--315 + publisher: Queensland Conservatorium Griffith University + title: 'x2Gesture: how machines could learn expressive gesture variations of expert + musicians' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0061.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_42 - abstract: "This paper describes physical and digital design strategies for the Feedback-Actuated\ - \ Augmented Bass - a self-contained feedback double bass with embedded DSP capabilities.\ - \ A primary goal of the research project is to create an instrument that responds\ - \ well to the use of extended playing techniques and can manifest complex harmonic\ - \ spectra while retaining the feel and sonic \nfingerprint of an acoustic double\ - \ bass. While the physical con\nfiguration of the instrument builds on similar\ - \ feedback string instruments being developed in recent years, this project focuses\ - \ on modifying the feedback behaviour through low-level audio feature extractions\ - \ coupled to computationally lightweight \nfiltering and amplitude management\ - \ algorithms. We discuss these adaptive and time-variant processing strategies\ - \ and how we apply them in sculpting the system's dynamic and complex behaviour\ - \ to our liking." - address: 'Birmingham, UK' - author: 'Melbye, Adam Pultz and Ulfarsson, Halldor A' - bibtex: "@inproceedings{NIME20_42,\n abstract = {This paper describes physical and\ - \ digital design strategies for the Feedback-Actuated Augmented Bass - a self-contained\ - \ feedback double bass with embedded DSP capabilities. A primary goal of the research\ - \ project is to create an instrument that responds well to the use of extended\ - \ playing techniques and can manifest complex harmonic spectra while retaining\ - \ the feel and sonic \nfingerprint of an acoustic double bass. While the physical\ - \ con\nfiguration of the instrument builds on similar feedback string instruments\ - \ being developed in recent years, this project focuses on modifying the feedback\ - \ behaviour through low-level audio feature extractions coupled to computationally\ - \ lightweight \nfiltering and amplitude management algorithms. We discuss these\ - \ adaptive and time-variant processing strategies and how we apply them in sculpting\ - \ the system's dynamic and complex behaviour to our liking.},\n address = {Birmingham,\ - \ UK},\n author = {Melbye, Adam Pultz and Ulfarsson, Halldor A},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.4813328},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {221--226},\n presentation-video\ - \ = {https://youtu.be/jXePge1MS8A},\n publisher = {Birmingham City University},\n\ - \ title = {Sculpting the behaviour of the Feedback-Actuated Augmented Bass: Design\ - \ strategies for subtle manipulations of string feedback using simple adaptive\ - \ algorithms},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper42.pdf},\n\ - \ year = {2020}\n}\n" + ID: Jaimovich2016 + abstract: 'This paper presents the work developed for Emovere: an interactive real-time + interdisciplinary performance that measures physiological signals from dancers + to drive a piece that explores and reflects around the biology of emotion. This + document focuses on the design of a series of interaction modes and materials + that were developed for this performance, and are believed to be a contribution + for the creation of artistic projects that work with dancers and physiological + signals. The paper introduces the motivation and theoretical framework behind + this project, to then deliver a detailed description and analysis of four different + interaction modes built to drive this performance using electromyography and electrocardiography. + Readers will find a discussion of the results obtained with these designs, as + well as comments on future work.' + address: 'Brisbane, Australia' + author: Javier Jaimovich + bibtex: "@inproceedings{Jaimovich2016,\n abstract = {This paper presents the work\ + \ developed for Emovere: an interactive real-time interdisciplinary performance\ + \ that measures physiological signals from dancers to drive a piece that explores\ + \ and reflects around the biology of emotion. This document focuses on the design\ + \ of a series of interaction modes and materials that were developed for this\ + \ performance, and are believed to be a contribution for the creation of artistic\ + \ projects that work with dancers and physiological signals. The paper introduces\ + \ the motivation and theoretical framework behind this project, to then deliver\ + \ a detailed description and analysis of four different interaction modes built\ + \ to drive this performance using electromyography and electrocardiography. Readers\ + \ will find a discussion of the results obtained with these designs, as well as\ + \ comments on future work.},\n address = {Brisbane, Australia},\n author = {Javier\ + \ Jaimovich},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176036},\n isbn\ + \ = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {316--320},\n publisher\ + \ = {Queensland Conservatorium Griffith University},\n title = {Emovere: Designing\ + \ Sound Interactions for Biosignals and Dancers},\n track = {Papers},\n url =\ + \ {http://www.nime.org/proceedings/2016/nime2016_paper0062.pdf},\n year = {2016}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813328 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176036 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 221--226 - presentation-video: https://youtu.be/jXePge1MS8A - publisher: Birmingham City University - title: 'Sculpting the behaviour of the Feedback-Actuated Augmented Bass: Design - strategies for subtle manipulations of string feedback using simple adaptive algorithms' - url: https://www.nime.org/proceedings/2020/nime2020_paper42.pdf - year: 2020 + pages: 316--320 + publisher: Queensland Conservatorium Griffith University + title: 'Emovere: Designing Sound Interactions for Biosignals and Dancers' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0062.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_43 - abstract: 'The comparative study presented in this paper focuses on two approaches - for the search of sound presets using a specific geometric touch app. The first - approach is based on independent sliders on screen and is called analytic. The - second is based on interpolation between presets represented by polygons on screen - and is called holistic. Participants had to listen to, memorize, and search for - sound presets characterized by four parameters. Ten different configurations of - sound synthesis and processing were presented to each participant, once for each - approach. The performance scores of 28 participants (not including early testers) - were computed using two measured values: the search duration, and the parametric - distance between the reference and answered presets. Compared to the analytic - sliders-based interface, the holistic interpolation-based interface demonstrated - a significant performance improvement for 60% of sound synthesizers. The other - 40% led to equivalent results for the analytic and holistic interfaces. Using - sliders, expert users performed nearly as well as they did with interpolation. - Beginners and intermediate users struggled more with sliders, while the interpolation - allowed them to get quite close to experts’ results.' - address: 'Birmingham, UK' - author: 'Le Vaillant, Gwendal and Dutoit, Thierry and Giot, Rudi' - bibtex: "@inproceedings{NIME20_43,\n abstract = {The comparative study presented\ - \ in this paper focuses on two approaches for the search of sound presets using\ - \ a specific geometric touch app. The first approach is based on independent sliders\ - \ on screen and is called analytic. The second is based on interpolation between\ - \ presets represented by polygons on screen and is called holistic. Participants\ - \ had to listen to, memorize, and search for sound presets characterized by four\ - \ parameters. Ten different configurations of sound synthesis and processing were\ - \ presented to each participant, once for each approach. The performance scores\ - \ of 28 participants (not including early testers) were computed using two measured\ - \ values: the search duration, and the parametric distance between the reference\ - \ and answered presets. Compared to the analytic sliders-based interface, the\ - \ holistic interpolation-based interface demonstrated a significant performance\ - \ improvement for 60% of sound synthesizers. The other 40% led to equivalent results\ - \ for the analytic and holistic interfaces. Using sliders, expert users performed\ - \ nearly as well as they did with interpolation. Beginners and intermediate users\ - \ struggled more with sliders, while the interpolation allowed them to get quite\ - \ close to experts’ results.},\n address = {Birmingham, UK},\n author = {Le Vaillant,\ - \ Gwendal and Dutoit, Thierry and Giot, Rudi},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.4813330},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {227--232},\n presentation-video\ - \ = {https://youtu.be/Korw3J_QvQE},\n publisher = {Birmingham City University},\n\ - \ title = {Analytic vs. holistic approaches for the live search of sound presets\ - \ using graphical interpolation},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper43.pdf},\n\ - \ year = {2020}\n}\n" + ID: Snicode248derberg2016 + abstract: "This paper explores the possibility of breaking the barrier\nbetween\ + \ deaf and hearing people when it comes to the subject of making music.\nSuggestions\ + \ on how deaf and hearing people can collaborate in creating music\ntogether,\ + \ are presented. The conducted research will focus on deaf people with a\ngeneral\ + \ interest in music as well as hearing musicians as target groups. Through\nreviewing\ + \ different related research areas, it is found that visualization of\nsound along\ + \ with a haptic feedback can help deaf people interpret and interact\nwith music.\ + \ With this in mind, three variations of a collaborative user interface\nare presented,\ + \ in which deaf and hearing people are meant to collaborate in\ncreating short\ + \ beats and melody sequences. Through evaluating the three\nprototypes, with two\ + \ deaf people and two hearing musicians, it is found that the\ntarget groups can\ + \ collaborate to some extent in creating beats. However, in order\nfor the target\ + \ groups to create melodic sequences together in a satisfactory\nmanner, more\ + \ detailed visualization and distributed haptic output is necessary,\nmostly due\ + \ to the fact that the deaf test participants struggle in distinguishing\nbetween\ + \ higher pitch and timbre. " + address: 'Brisbane, Australia' + author: 'Söderberg, Ene Alicia and Odgaard, Rasmus Emil and Sarah Bitsch and Oliver + Höeg-Jensen and Christensen, Nikolaj Schildt and Poulsen, Sören Dahl and Steven + Gelineck' + bibtex: "@inproceedings{Snicode248derberg2016,\n abstract = {This paper explores\ + \ the possibility of breaking the barrier\nbetween deaf and hearing people when\ + \ it comes to the subject of making music.\nSuggestions on how deaf and hearing\ + \ people can collaborate in creating music\ntogether, are presented. The conducted\ + \ research will focus on deaf people with a\ngeneral interest in music as well\ + \ as hearing musicians as target groups. Through\nreviewing different related\ + \ research areas, it is found that visualization of\nsound along with a haptic\ + \ feedback can help deaf people interpret and interact\nwith music. With this\ + \ in mind, three variations of a collaborative user interface\nare presented,\ + \ in which deaf and hearing people are meant to collaborate in\ncreating short\ + \ beats and melody sequences. Through evaluating the three\nprototypes, with two\ + \ deaf people and two hearing musicians, it is found that the\ntarget groups can\ + \ collaborate to some extent in creating beats. However, in order\nfor the target\ + \ groups to create melodic sequences together in a satisfactory\nmanner, more\ + \ detailed visualization and distributed haptic output is necessary,\nmostly due\ + \ to the fact that the deaf test participants struggle in distinguishing\nbetween\ + \ higher pitch and timbre. },\n address = {Brisbane, Australia},\n author = {S\\\ + ''{o}derberg, Ene Alicia and Odgaard, Rasmus Emil and Sarah Bitsch and Oliver\ + \ H\\''{o}eg-Jensen and Christensen, Nikolaj Schildt and Poulsen, S\\''{o}ren\ + \ Dahl and Steven Gelineck},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176112},\n\ + \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {321--326},\n publisher\ + \ = {Queensland Conservatorium Griffith University},\n title = {Music Aid---Towards\ + \ a Collaborative Experience for Deaf and Hearing People in Creating Music},\n\ + \ track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0063.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813330 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176112 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 227--232 - presentation-video: https://youtu.be/Korw3J_QvQE - publisher: Birmingham City University - title: Analytic vs. holistic approaches for the live search of sound presets using - graphical interpolation - url: https://www.nime.org/proceedings/2020/nime2020_paper43.pdf - year: 2020 + pages: 321--326 + publisher: Queensland Conservatorium Griffith University + title: Music Aid---Towards a Collaborative Experience for Deaf and Hearing People + in Creating Music + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0063.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_44 - abstract: 'The purpose of this project is to develop an interface for writing and - performing music using sequencers in virtual reality (VR). The VR sequencer deals - with chance-based operations to select audio clips for playback and spatial orientation-based - rhythm and melody generation, while incorporating three-dimensional (3-D) objects - as omnidirectional playheads. Spheres which grow from a variable minimum size - to a variable maximum size at a variable speed, constantly looping, represent - the passage of time in this VR sequencer. The 3-D assets which represent samples - are actually sample containers that come in six common dice shapes. As the dice - come into contact with a sphere, their samples are triggered to play. This behavior - mimics digital audio workstation (DAW) playheads reading MIDI left-to-right in - popular professional and consumer software sequencers. To incorporate height into - VR music making, the VR sequencer is capable of generating terrain at the press - of a button. Each terrain will gradually change, creating the possibility for - the dice to roll on their own. Audio effects are built in to each scene and mapped - to terrain parameters, creating another opportunity for chance operations in the - music making process. The chance-based sample selection, spatial orientation-defined - rhythms, and variable terrain mapped to audio effects lead to indeterminacy in - performance and replication of a single piece of music. This project aims to give - the gaming community access to experimental music making by means of consumer - virtual reality hardware.' - address: 'Birmingham, UK' - author: 'Mitchusson, Chase' - bibtex: "@inproceedings{NIME20_44,\n abstract = {The purpose of this project is\ - \ to develop an interface for writing and performing music using sequencers in\ - \ virtual reality (VR). The VR sequencer deals with chance-based operations to\ - \ select audio clips for playback and spatial orientation-based rhythm and melody\ - \ generation, while incorporating three-dimensional (3-D) objects as omnidirectional\ - \ playheads. Spheres which grow from a variable minimum size to a variable maximum\ - \ size at a variable speed, constantly looping, represent the passage of time\ - \ in this VR sequencer. The 3-D assets which represent samples are actually sample\ - \ containers that come in six common dice shapes. As the dice come into contact\ - \ with a sphere, their samples are triggered to play. This behavior mimics digital\ - \ audio workstation (DAW) playheads reading MIDI left-to-right in popular professional\ - \ and consumer software sequencers. To incorporate height into VR music making,\ - \ the VR sequencer is capable of generating terrain at the press of a button.\ - \ Each terrain will gradually change, creating the possibility for the dice to\ - \ roll on their own. Audio effects are built in to each scene and mapped to terrain\ - \ parameters, creating another opportunity for chance operations in the music\ - \ making process. The chance-based sample selection, spatial orientation-defined\ - \ rhythms, and variable terrain mapped to audio effects lead to indeterminacy\ - \ in performance and replication of a single piece of music. This project aims\ - \ to give the gaming community access to experimental music making by means of\ - \ consumer virtual reality hardware.},\n address = {Birmingham, UK},\n author\ - \ = {Mitchusson, Chase},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813332},\n\ - \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ - \ = {July},\n pages = {233--236},\n publisher = {Birmingham City University},\n\ - \ title = {Indeterminate Sample Sequencing in Virtual Reality},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper44.pdf},\n\ - \ year = {2020}\n}\n" + ID: Larsen2016 + abstract: "Many forms of enabling technologies exist today. While\ntechnologies\ + \ aimed at enabling basic tasks in everyday life (locomotion, eating,\netc.) are\ + \ more common, musical instruments for people with disabilities can\nprovide a\ + \ chance for emotional enjoyment, as well as improve physical conditions\nthrough\ + \ therapeutic use. The field of musical instruments for people with\nphysical\ + \ disabilities, however, is still an emerging area of research. In this\narticle,\ + \ we look at the current state of developments, including a survey of\ncustom\ + \ designed instruments, augmentations / modifications of existing\ninstruments,\ + \ music-supported therapy, and recent trends in the area. The overview\nis extrapolated\ + \ to look at where the research is headed, providing insights for\npotential future\ + \ work." + address: 'Brisbane, Australia' + author: Jeppe Veirum Larsen and Dan Overholt and Thomas B. Moeslund + bibtex: "@inproceedings{Larsen2016,\n abstract = {Many forms of enabling technologies\ + \ exist today. While\ntechnologies aimed at enabling basic tasks in everyday life\ + \ (locomotion, eating,\netc.) are more common, musical instruments for people\ + \ with disabilities can\nprovide a chance for emotional enjoyment, as well as\ + \ improve physical conditions\nthrough therapeutic use. The field of musical instruments\ + \ for people with\nphysical disabilities, however, is still an emerging area of\ + \ research. In this\narticle, we look at the current state of developments, including\ + \ a survey of\ncustom designed instruments, augmentations / modifications of existing\n\ + instruments, music-supported therapy, and recent trends in the area. The overview\n\ + is extrapolated to look at where the research is headed, providing insights for\n\ + potential future work.},\n address = {Brisbane, Australia},\n author = {Jeppe\ + \ Veirum Larsen and Dan Overholt and Thomas B. Moeslund},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176056},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ + \ pages = {327--331},\n publisher = {Queensland Conservatorium Griffith University},\n\ + \ title = {The Prospects of Musical Instruments For People with Physical Disabilities},\n\ + \ track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0064.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813332 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176056 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 233--236 - publisher: Birmingham City University - title: Indeterminate Sample Sequencing in Virtual Reality - url: https://www.nime.org/proceedings/2020/nime2020_paper44.pdf - year: 2020 + pages: 327--331 + publisher: Queensland Conservatorium Griffith University + title: The Prospects of Musical Instruments For People with Physical Disabilities + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0064.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_45 - abstract: 'Machine learning (ML) has been used to create mappings for digital musical - instruments for over twenty-five years, and numerous ML toolkits have been developed - for the NIME community. However, little published work has studied how ML has - been used in sustained instrument building and performance practices. This paper - examines the experiences of instrument builder and performer Laetitia Sonami, - who has been using ML to build and refine her Spring Spyre instrument since 2012. - Using Sonami’s current practice as a case study, this paper explores the utility, - opportunities, and challenges involved in using ML in practice over many years. - This paper also reports the perspective of Rebecca Fiebrink, the creator of the - Wekinator ML tool used by Sonami, revealing how her work with Sonami has led to - changes to the software and to her teaching. This paper thus contributes a deeper - understanding of the value of ML for NIME practitioners, and it can inform design - considerations for future ML toolkits as well as NIME pedagogy. Further, it provides - new perspectives on familiar NIME conversations about mapping strategies, expressivity, - and control, informed by a dedicated practice over many years.' - address: 'Birmingham, UK' - author: 'Fiebrink, Rebecca and Sonami, Laetitia' - bibtex: "@inproceedings{NIME20_45,\n abstract = {Machine learning (ML) has been\ - \ used to create mappings for digital musical instruments for over twenty-five\ - \ years, and numerous ML toolkits have been developed for the NIME community.\ - \ However, little published work has studied how ML has been used in sustained\ - \ instrument building and performance practices. This paper examines the experiences\ - \ of instrument builder and performer Laetitia Sonami, who has been using ML to\ - \ build and refine her Spring Spyre instrument since 2012. Using Sonami’s current\ - \ practice as a case study, this paper explores the utility, opportunities, and\ - \ challenges involved in using ML in practice over many years. This paper also\ - \ reports the perspective of Rebecca Fiebrink, the creator of the Wekinator ML\ - \ tool used by Sonami, revealing how her work with Sonami has led to changes to\ - \ the software and to her teaching. This paper thus contributes a deeper understanding\ - \ of the value of ML for NIME practitioners, and it can inform design considerations\ - \ for future ML toolkits as well as NIME pedagogy. Further, it provides new perspectives\ - \ on familiar NIME conversations about mapping strategies, expressivity, and control,\ - \ informed by a dedicated practice over many years.},\n address = {Birmingham,\ - \ UK},\n author = {Fiebrink, Rebecca and Sonami, Laetitia},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.4813334},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {237--242},\n presentation-video\ - \ = {https://youtu.be/EvXZ9NayZhA},\n publisher = {Birmingham City University},\n\ - \ title = {Reflections on Eight Years of Instrument Creation with Machine Learning},\n\ - \ url = {https://www.nime.org/proceedings/2020/nime2020_paper45.pdf},\n year =\ - \ {2020}\n}\n" + ID: Benson2016 + abstract: "We present an innovative sound spatialization and shaping\ninterface,\ + \ called SoundMorpheus, which allows the placement of sounds in space,\nas well\ + \ as the altering of sound characteristics, via arm movements that resemble\n\ + those of a conductor. The interface displays sounds (or their attributes) to the\n\ + user, who reaches for them with one or both hands, grabs them, and gently or\n\ + forcefully sends them around in space, in a 360° circle. The system\ncombines\ + \ MIDI and traditional instruments with one or more myoelectric sensors.\nThese\ + \ components may be physically collocated or distributed in various locales\n\ + connected via the Internet. This system also supports the performance of\nacousmatic\ + \ and electronic music, enabling performances where the traditionally\ncentral\ + \ mixing board, need not be touched at all (or minimally touched for\ncalibration).\ + \ Finally, the system may facilitate the recording of a visual score\nof a performance,\ + \ which can be stored for later playback and additional\nmanipulation. We present\ + \ three projects that utilize SoundMorpheus and\ndemonstrate its capabilities\ + \ and potential." + address: 'Brisbane, Australia' + author: Christopher Benson and Bill Manaris and Seth Stoudenmier and Timothy Ward + bibtex: "@inproceedings{Benson2016,\n abstract = {We present an innovative sound\ + \ spatialization and shaping\ninterface, called SoundMorpheus, which allows the\ + \ placement of sounds in space,\nas well as the altering of sound characteristics,\ + \ via arm movements that resemble\nthose of a conductor. The interface displays\ + \ sounds (or their attributes) to the\nuser, who reaches for them with one or\ + \ both hands, grabs them, and gently or\nforcefully sends them around in space,\ + \ in a 360$^{\\circ}$ circle. The system\ncombines MIDI and traditional instruments\ + \ with one or more myoelectric sensors.\nThese components may be physically collocated\ + \ or distributed in various locales\nconnected via the Internet. This system also\ + \ supports the performance of\nacousmatic and electronic music, enabling performances\ + \ where the traditionally\ncentral mixing board, need not be touched at all (or\ + \ minimally touched for\ncalibration). Finally, the system may facilitate the\ + \ recording of a visual score\nof a performance, which can be stored for later\ + \ playback and additional\nmanipulation. We present three projects that utilize\ + \ SoundMorpheus and\ndemonstrate its capabilities and potential.},\n address =\ + \ {Brisbane, Australia},\n author = {Christopher Benson and Bill Manaris and Seth\ + \ Stoudenmier and Timothy Ward},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1175982},\n\ + \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {332--337},\n publisher\ + \ = {Queensland Conservatorium Griffith University},\n title = {SoundMorpheus:\ + \ A Myoelectric-Sensor Based Interface for Sound Spatialization and Shaping},\n\ + \ track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0065.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813334 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1175982 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 237--242 - presentation-video: https://youtu.be/EvXZ9NayZhA - publisher: Birmingham City University - title: Reflections on Eight Years of Instrument Creation with Machine Learning - url: https://www.nime.org/proceedings/2020/nime2020_paper45.pdf - year: 2020 + pages: 332--337 + publisher: Queensland Conservatorium Griffith University + title: 'SoundMorpheus: A Myoelectric-Sensor Based Interface for Sound Spatialization + and Shaping' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0065.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_46 - abstract: 'Based on the experience garnered through a longitudinal ethnographic - study, the authors reflect on the practice of designing and fabricating bespoke, - accessible music tech- nologies. Of particular focus are the social, technical - and environmental factors at play which make the provision of such technology - a reality. The authors make suggestions of ways to achieve long-term, sustained - use. Seemingly those involved in its design, fabrication and use could benefit - from a concerted effort to share resources, knowledge and skill as a mobilised - community of practitioners.' - address: 'Birmingham, UK' - author: 'Lucas, Alex and Ortiz, Miguel and Schroeder, Franziska' - bibtex: "@inproceedings{NIME20_46,\n abstract = {Based on the experience garnered\ - \ through a longitudinal ethnographic study, the authors reflect on the practice\ - \ of designing and fabricating bespoke, accessible music tech- nologies. Of particular\ - \ focus are the social, technical and environmental factors at play which make\ - \ the provision of such technology a reality. The authors make suggestions of\ - \ ways to achieve long-term, sustained use. Seemingly those involved in its design,\ - \ fabrication and use could benefit from a concerted effort to share resources,\ - \ knowledge and skill as a mobilised community of practitioners.},\n address =\ - \ {Birmingham, UK},\n author = {Lucas, Alex and Ortiz, Miguel and Schroeder, Franziska},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.4813338},\n editor = {Romain Michon\ - \ and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages =\ - \ {243--248},\n presentation-video = {https://youtu.be/cLguyuZ9weI},\n publisher\ - \ = {Birmingham City University},\n title = {The Longevity of Bespoke, Accessible\ - \ Music Technology: A Case for Community},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper46.pdf},\n\ - \ year = {2020}\n}\n" + ID: Ozdemir2016 + abstract: "PORTAL is an interactive performance tool that uses a laser\nprojector\ + \ to visualize computer-generated audio signals. In this paper, we first\noffer\ + \ an overview of earlier work on audiovisual and laser art that inspired the\n\ + current project. We then discuss our own implementation, focusing not only on\ + \ the\ntechnical issues related to the use of a laser projector in an artistic\ + \ context,\nbut also on the aesthetic considerations in dealing with the translation\ + \ of\nsounds into visuals, and vice versa. We provide detailed descriptions of\ + \ our\nhardware implementation, our software system, and its desktop and mobile\n\ + interfaces, which are made available online. Finally, we offer the results of\ + \ a\nuser study we conducted in the form of an interactive online survey on audience\n\ + perception of the relationship between analogous sounds and visuals, which was\n\ + explored as part of our performance practice." + address: 'Brisbane, Australia' + author: Gorkem Ozdemir and Anil Camci and Angus Forbes + bibtex: "@inproceedings{Ozdemir2016,\n abstract = {PORTAL is an interactive performance\ + \ tool that uses a laser\nprojector to visualize computer-generated audio signals.\ + \ In this paper, we first\noffer an overview of earlier work on audiovisual and\ + \ laser art that inspired the\ncurrent project. We then discuss our own implementation,\ + \ focusing not only on the\ntechnical issues related to the use of a laser projector\ + \ in an artistic context,\nbut also on the aesthetic considerations in dealing\ + \ with the translation of\nsounds into visuals, and vice versa. We provide detailed\ + \ descriptions of our\nhardware implementation, our software system, and its desktop\ + \ and mobile\ninterfaces, which are made available online. Finally, we offer the\ + \ results of a\nuser study we conducted in the form of an interactive online survey\ + \ on audience\nperception of the relationship between analogous sounds and visuals,\ + \ which was\nexplored as part of our performance practice.},\n address = {Brisbane,\ + \ Australia},\n author = {Gorkem Ozdemir and Anil Camci and Angus Forbes},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1176102},\n isbn = {978-1-925455-13-7},\n\ + \ issn = {2220-4806},\n pages = {338--343},\n publisher = {Queensland Conservatorium\ + \ Griffith University},\n title = {PORTAL: An Audiovisual Laser Performance System},\n\ + \ track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0066.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813338 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176102 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 243--248 - presentation-video: https://youtu.be/cLguyuZ9weI - publisher: Birmingham City University - title: 'The Longevity of Bespoke, Accessible Music Technology: A Case for Community' - url: https://www.nime.org/proceedings/2020/nime2020_paper46.pdf - year: 2020 + pages: 338--343 + publisher: Queensland Conservatorium Griffith University + title: 'PORTAL: An Audiovisual Laser Performance System' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0066.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_47 - abstract: 'With the proliferation of venues equipped with the high density loudspeaker - arrays there is a growing interest in developing new interfaces for spatial musical - expression (NISME). Of particular interest are interfaces that focus on the emancipation - of the spatial domain as the primary dimension for musical expression. Here we - present Monet NISME that leverages multitouch pressure-sensitive surface and the - D4 library''s spatial mask and thereby allows for a unique approach to interactive - spatialization. Further, we present a study with 22 participants designed to assess - its usefulness and compare it to the Locus, a NISME introduced in 2019 as part - of a localization study which is built on the same design principles of using - natural gestural interaction with the spatial content. Lastly, we briefly discuss - the utilization of both NISMEs in two artistic performances and propose a set - of guidelines for further exploration in the NISME domain.' - address: 'Birmingham, UK' - author: 'Bukvic, Ivica I and Sardana, Disha and Joo, Woohun' - bibtex: "@inproceedings{NIME20_47,\n abstract = {With the proliferation of venues\ - \ equipped with the high density loudspeaker arrays there is a growing interest\ - \ in developing new interfaces for spatial musical expression (NISME). Of particular\ - \ interest are interfaces that focus on the emancipation of the spatial domain\ - \ as the primary dimension for musical expression. Here we present Monet NISME\ - \ that leverages multitouch pressure-sensitive surface and the D4 library's spatial\ - \ mask and thereby allows for a unique approach to interactive spatialization.\ - \ Further, we present a study with 22 participants designed to assess its usefulness\ - \ and compare it to the Locus, a NISME introduced in 2019 as part of a localization\ - \ study which is built on the same design principles of using natural gestural\ - \ interaction with the spatial content. Lastly, we briefly discuss the utilization\ - \ of both NISMEs in two artistic performances and propose a set of guidelines\ - \ for further exploration in the NISME domain.},\n address = {Birmingham, UK},\n\ - \ author = {Bukvic, Ivica I and Sardana, Disha and Joo, Woohun},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.4813342},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {249--254},\n presentation-video\ - \ = {https://youtu.be/GQ0552Lc1rw},\n publisher = {Birmingham City University},\n\ - \ title = {New Interfaces for Spatial Musical Expression},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper47.pdf},\n\ - \ year = {2020}\n}\n" + ID: Lindell2016 + abstract: "We organised an elven day intense course in materiality for\nmusical\ + \ expressions to explore underlying principles of New Interfaces for\nMusical\ + \ Expression (NIME) in higher education. We grounded the course in\ndifferent\ + \ aspects of materiality and gathered interdisciplinary student teams\nfrom three\ + \ Nordic universities. Electronic music instrument makers participated\nin providing\ + \ the course. In eleven days the students designed and built\ninterfaces for musical\ + \ expressions, composed a piece, and performed at the\nNorberg electronic music\ + \ festival. The students explored the relationship\nbetween technology and possible\ + \ musical expression with a strong connection to\nculture and place. The emphasis\ + \ on performance provided closure and motivated\nteams to move forward in their\ + \ design and artistic processes. On the basis of the\ncourse we discuss an interdisciplinary\ + \ NIME course syllabus, and we infer that it\nbenefits from grounding in materiality\ + \ and in the place with a strong reference\nto culture." + address: 'Brisbane, Australia' + author: Rikard Lindell and Koray Tahiroglu and Morten Riis and Jennie Schaeffer + bibtex: "@inproceedings{Lindell2016,\n abstract = {We organised an elven day intense\ + \ course in materiality for\nmusical expressions to explore underlying principles\ + \ of New Interfaces for\nMusical Expression (NIME) in higher education. We grounded\ + \ the course in\ndifferent aspects of materiality and gathered interdisciplinary\ + \ student teams\nfrom three Nordic universities. Electronic music instrument makers\ + \ participated\nin providing the course. In eleven days the students designed\ + \ and built\ninterfaces for musical expressions, composed a piece, and performed\ + \ at the\nNorberg electronic music festival. The students explored the relationship\n\ + between technology and possible musical expression with a strong connection to\n\ + culture and place. The emphasis on performance provided closure and motivated\n\ + teams to move forward in their design and artistic processes. On the basis of\ + \ the\ncourse we discuss an interdisciplinary NIME course syllabus, and we infer\ + \ that it\nbenefits from grounding in materiality and in the place with a strong\ + \ reference\nto culture.},\n address = {Brisbane, Australia},\n author = {Rikard\ + \ Lindell and Koray Tahiroglu and Morten Riis and Jennie Schaeffer},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1176066},\n isbn = {978-1-925455-13-7},\n\ + \ issn = {2220-4806},\n pages = {344--349},\n publisher = {Queensland Conservatorium\ + \ Griffith University},\n title = {Materiality for Musical Expressions: an Approach\ + \ to Interdisciplinary Syllabus Development for NIME},\n track = {Papers},\n url\ + \ = {http://www.nime.org/proceedings/2016/nime2016_paper0067.pdf},\n year = {2016}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813342 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176066 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 249--254 - presentation-video: https://youtu.be/GQ0552Lc1rw - publisher: Birmingham City University - title: New Interfaces for Spatial Musical Expression - url: https://www.nime.org/proceedings/2020/nime2020_paper47.pdf - year: 2020 + pages: 344--349 + publisher: Queensland Conservatorium Griffith University + title: 'Materiality for Musical Expressions: an Approach to Interdisciplinary Syllabus + Development for NIME' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0067.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_48 - abstract: 'This study presents an ecosystemic approach to music interaction, through - the practice-based development of a mixed reality installation artwork. It fuses - a generative, immersive audio composition with augmented reality visualisation, - within an architectural space as part of a blended experience. Participants are - encouraged to explore and interact with this combination of elements through physical - engagement, to then develop an understanding of how the blending of real and virtual - space occurs as the installation unfolds. The sonic layer forms a link between - the two, as a three-dimensional sound composition. Connections in the system allow - for multiple streams of data to run between the layers, which are used for the - real-time modulation of parameters. These feedback mechanisms form a complete - loop between the participant in real space, soundscape, and mixed reality visualisation, - providing a participant mediated experience that exists somewhere between creator - and observer.' - address: 'Birmingham, UK' - author: 'Durham, Mark' - bibtex: "@inproceedings{NIME20_48,\n abstract = {This study presents an ecosystemic\ - \ approach to music interaction, through the practice-based development of a mixed\ - \ reality installation artwork. It fuses a generative, immersive audio composition\ - \ with augmented reality visualisation, within an architectural space as part\ - \ of a blended experience. Participants are encouraged to explore and interact\ - \ with this combination of elements through physical engagement, to then develop\ - \ an understanding of how the blending of real and virtual space occurs as the\ - \ installation unfolds. The sonic layer forms a link between the two, as a three-dimensional\ - \ sound composition. Connections in the system allow for multiple streams of data\ - \ to run between the layers, which are used for the real-time modulation of parameters.\ - \ These feedback mechanisms form a complete loop between the participant in real\ - \ space, soundscape, and mixed reality visualisation, providing a participant\ - \ mediated experience that exists somewhere between creator and observer.},\n\ - \ address = {Birmingham, UK},\n author = {Durham, Mark},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.4813344},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {255--258},\n publisher = {Birmingham\ - \ City University},\n title = {Inhabiting the Instrument},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper48.pdf},\n\ - \ year = {2020}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.4813344 - editor: Romain Michon and Franziska Schroeder - issn: 2220-4806 - month: July - pages: 255--258 - publisher: Birmingham City University - title: Inhabiting the Instrument - url: https://www.nime.org/proceedings/2020/nime2020_paper48.pdf - year: 2020 - - -- ENTRYTYPE: inproceedings - ID: NIME20_49 - abstract: 'This paper details technologies and artistic approaches to crowd-driven - music, discussed in the context of a live public installation in which activity - in a public space (a busy railway platform) is used to drive the automated composition - and performance of music. The approach presented uses realtime machine vision - applied to a live video feed of a scene, from which detected objects and people - are fed into Manhattan (Nash, 2014), a digital music notation that integrates - sequencing and programming to support the live creation of complex musical works - that combine static, algorithmic, and interactive elements. The paper discusses - the technical details of the system and artistic development of specific musical - works, introducing novel techniques for mapping chaotic systems to musical expression - and exploring issues of agency, aesthetic, accessibility and adaptability relating - to composing interactive music for crowds and public spaces. In particular, performances - as part of an installation for BBC Music Day 2018 are described. The paper subsequently - details a practical workshop, delivered digitally, exploring the development of - interactive performances in which the audience or general public actively or passively - control live generation of a musical piece. Exercises support discussions on technical, - aesthetic, and ontological issues arising from the identification and mapping - of structure, order, and meaning in non-musical domains to analogous concepts - in musical expression. Materials for the workshop are available freely with the - Manhattan software.' - address: 'Birmingham, UK' - author: 'Nash, Chris' - bibtex: "@inproceedings{NIME20_49,\n abstract = {This paper details technologies\ - \ and artistic approaches to crowd-driven music, discussed in the context of a\ - \ live public installation in which activity in a public space (a busy railway\ - \ platform) is used to drive the automated composition and performance of music.\ - \ The approach presented uses realtime machine vision applied to a live video\ - \ feed of a scene, from which detected objects and people are fed into Manhattan\ - \ (Nash, 2014), a digital music notation that integrates sequencing and programming\ - \ to support the live creation of complex musical works that combine static, algorithmic,\ - \ and interactive elements. The paper discusses the technical details of the system\ - \ and artistic development of specific musical works, introducing novel techniques\ - \ for mapping chaotic systems to musical expression and exploring issues of agency,\ - \ aesthetic, accessibility and adaptability relating to composing interactive\ - \ music for crowds and public spaces. In particular, performances as part of an\ - \ installation for BBC Music Day 2018 are described. The paper subsequently details\ - \ a practical workshop, delivered digitally, exploring the development of interactive\ - \ performances in which the audience or general public actively or passively control\ - \ live generation of a musical piece. Exercises support discussions on technical,\ - \ aesthetic, and ontological issues arising from the identification and mapping\ - \ of structure, order, and meaning in non-musical domains to analogous concepts\ - \ in musical expression. Materials for the workshop are available freely with\ - \ the Manhattan software.},\n address = {Birmingham, UK},\n author = {Nash, Chris},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.4813346},\n editor = {Romain Michon\ - \ and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages =\ - \ {259--264},\n presentation-video = {https://youtu.be/DHIowP2lOsA},\n publisher\ - \ = {Birmingham City University},\n title = {Crowd-driven Music: Interactive and\ - \ Generative Approaches using Machine Vision and Manhattan},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper49.pdf},\n\ - \ year = {2020}\n}\n" + ID: Gimenes2016 + abstract: "This paper introduces Performance Without Borders and Embodied\niSound,\ + \ two sound installations performed at the 2016 Peninsula Arts Contemporary\n\ + Music Festival at Plymouth University. Sharing in common the use of smartphones\n\ + to afford real-time audience participation, two bespoke distributed computer\n\ + systems (Sherwell and Levinsky Music, respectively). Whilst the first one\nimplements\ + \ a cloud-based voting system, the second implements movement tracking\nand iBeacon-based\ + \ indoor-positioning to control the choice of soundtracks, audio\nsynthesis, and\ + \ surround sound positioning, among other parameters. The general\nconcepts of\ + \ the installations, in particular design and interactive possibilities\nafforded\ + \ by the computer systems are presented." + address: 'Brisbane, Australia' + author: Marcelo Gimenes and Pierre-Emmanuel Largeron and Eduardo Miranda + bibtex: "@inproceedings{Gimenes2016,\n abstract = {This paper introduces Performance\ + \ Without Borders and Embodied\niSound, two sound installations performed at the\ + \ 2016 Peninsula Arts Contemporary\nMusic Festival at Plymouth University. Sharing\ + \ in common the use of smartphones\nto afford real-time audience participation,\ + \ two bespoke distributed computer\nsystems (Sherwell and Levinsky Music, respectively).\ + \ Whilst the first one\nimplements a cloud-based voting system, the second implements\ + \ movement tracking\nand iBeacon-based indoor-positioning to control the choice\ + \ of soundtracks, audio\nsynthesis, and surround sound positioning, among other\ + \ parameters. The general\nconcepts of the installations, in particular design\ + \ and interactive possibilities\nafforded by the computer systems are presented.},\n\ + \ address = {Brisbane, Australia},\n author = {Marcelo Gimenes and Pierre-Emmanuel\ + \ Largeron and Eduardo Miranda},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176020},\n\ + \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {350--354},\n publisher\ + \ = {Queensland Conservatorium Griffith University},\n title = {Frontiers: Expanding\ + \ Musical Imagination With Audience Participation},\n track = {Papers},\n url\ + \ = {http://www.nime.org/proceedings/2016/nime2016_paper0068.pdf},\n year = {2016}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813346 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176020 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 259--264 - presentation-video: https://youtu.be/DHIowP2lOsA - publisher: Birmingham City University - title: 'Crowd-driven Music: Interactive and Generative Approaches using Machine - Vision and Manhattan' - url: https://www.nime.org/proceedings/2020/nime2020_paper49.pdf - year: 2020 + pages: 350--354 + publisher: Queensland Conservatorium Griffith University + title: 'Frontiers: Expanding Musical Imagination With Audience Participation' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0068.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_5 - abstract: 'This paper discusses the design of a musical synthesizer that takes words - as input, and attempts to generate music that somehow underscores those words. - This is considered as a tool for sound designers who could, for example, enter - dialogue from a film script and generate appropriate back- ground music. The synthesizer - uses emotional valence and arousal as a common representation between words and - mu- sic. It draws on previous studies that relate words and mu- sical features - to valence and arousal. The synthesizer was evaluated with a user study. Participants - listened to music generated by the synthesizer, and described the music with words. - The arousal of the words they entered was highly correlated with the intended - arousal of the music. The same was, surprisingly, not true for valence. The synthesizer - is online, at [redacted URL].' - address: 'Birmingham, UK' - author: 'Krzyzaniak, Michael J' - bibtex: "@inproceedings{NIME20_5,\n abstract = {This paper discusses the design\ - \ of a musical synthesizer that takes words as input, and attempts to generate\ - \ music that somehow underscores those words. This is considered as a tool for\ - \ sound designers who could, for example, enter dialogue from a film script and\ - \ generate appropriate back- ground music. The synthesizer uses emotional valence\ - \ and arousal as a common representation between words and mu- sic. It draws on\ - \ previous studies that relate words and mu- sical features to valence and arousal.\ - \ The synthesizer was evaluated with a user study. Participants listened to music\ - \ generated by the synthesizer, and described the music with words. The arousal\ - \ of the words they entered was highly correlated with the intended arousal of\ - \ the music. The same was, surprisingly, not true for valence. The synthesizer\ - \ is online, at [redacted URL].},\n address = {Birmingham, UK},\n author = {Krzyzaniak,\ - \ Michael J},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813350},\n editor\ - \ = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n\ - \ pages = {29--34},\n publisher = {Birmingham City University},\n title = {Words\ - \ to Music Synthesis},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper5.pdf},\n\ - \ year = {2020}\n}\n" + ID: Schlei2016 + abstract: |- + The PourOver Sensor Framework is an open iOS framework designed to + connect iOS control sources (hardware sensors, user input, custom algorithms) to + an audio graph's parameters. The design of the framework, motivation, and use + cases are discussed. The framework is demonstrated in an end-user friendly iOS + app PourOver, in which users can run Pd patches with easy access to hardware + sensors and iOS APIs. + address: 'Brisbane, Australia' + author: Kevin Schlei and Chris Burns and Aidan Menuge + bibtex: "@inproceedings{Schlei2016,\n abstract = {The PourOver Sensor Framework\ + \ is an open iOS framework designed to\nconnect iOS control sources (hardware\ + \ sensors, user input, custom algorithms) to\nan audio graph's parameters. The\ + \ design of the framework, motivation, and use\ncases are discussed. The framework\ + \ is demonstrated in an end-user friendly iOS\napp PourOver, in which users can\ + \ run Pd patches with easy access to hardware\nsensors and iOS APIs.},\n address\ + \ = {Brisbane, Australia},\n author = {Kevin Schlei and Chris Burns and Aidan\ + \ Menuge},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1176114},\n isbn = {978-1-925455-13-7},\n\ + \ issn = {2220-4806},\n pages = {355--358},\n publisher = {Queensland Conservatorium\ + \ Griffith University},\n title = {PourOver: A Sensor-Driven Generative Music\ + \ Platform},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0069.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813350 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176114 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 29--34 - publisher: Birmingham City University - title: Words to Music Synthesis - url: https://www.nime.org/proceedings/2020/nime2020_paper5.pdf - year: 2020 + pages: 355--358 + publisher: Queensland Conservatorium Griffith University + title: 'PourOver: A Sensor-Driven Generative Music Platform' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0069.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_50 - abstract: 'This paper brings together two main perspectives on algorithmic pattern. - First, the writing of musical patterns in live coding performance, and second, - the weaving of patterns in textiles. In both cases, algorithmic pattern is an - interface between the human and the outcome, where small changes have far-reaching - impact on the results. By bringing contemporary live coding and ancient textile - approaches together, we reach a common view of pattern as algorithmic movement - (e.g. looping, shifting, reflecting, interfering) in the making of things. This - works beyond the usual definition of pattern used in musical interfaces, of mere - repeating sequences. We conclude by considering the place of algorithmic pattern - in a wider activity of making.' - address: 'Birmingham, UK' - author: 'Mclean, Alex' - bibtex: "@inproceedings{NIME20_50,\n abstract = {This paper brings together two\ - \ main perspectives on algorithmic pattern. First, the writing of musical patterns\ - \ in live coding performance, and second, the weaving of patterns in textiles.\ - \ In both cases, algorithmic pattern is an interface between the human and the\ - \ outcome, where small changes have far-reaching impact on the results. By bringing\ - \ contemporary live coding and ancient textile approaches together, we reach a\ - \ common view of pattern as algorithmic movement (e.g. looping, shifting, reflecting,\ - \ interfering) in the making of things. This works beyond the usual definition\ - \ of pattern used in musical interfaces, of mere repeating sequences. We conclude\ - \ by considering the place of algorithmic pattern in a wider activity of making.},\n\ - \ address = {Birmingham, UK},\n author = {Mclean, Alex},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.4813352},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {265--270},\n presentation-video\ - \ = {https://youtu.be/X9AkOAEDV08},\n publisher = {Birmingham City University},\n\ - \ title = {Algorithmic Pattern},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper50.pdf},\n\ - \ year = {2020}\n}\n" + ID: Hindle2016 + abstract: |2- + NIMEs typically focus on novelty but the cost of novelty is + often to + ignore other non-functional requirements and concerns such as + usability or security. Digital security has probably not been a + concern for performers due to the duration of their performances and + lack of disrespectful hackers, known as crackers, in attendance + carrying the appropriate equipment and software necessary to hack a + performance. Yet many modern NIMEs could be hacked from smart-phones + in the audience. The lack of security hardening makes NIMEs an easy + target --- but a question arises: if hacking can interrupt or modify + a performance couldn't hacking itself also be performance? Thus + would music hacking, live-hacking, be similar to live-coding? In + this paper we discuss how NIMEs are in danger of being hacked, and + yet how hacking can be an act of performance too. + address: 'Brisbane, Australia' + author: Abram Hindle + bibtex: "@inproceedings{Hindle2016,\n abstract = { NIMEs typically focus on novelty\ + \ but the cost of novelty is\noften to\nignore other non-functional requirements\ + \ and concerns such as\nusability or security. Digital security has probably not\ + \ been a\nconcern for performers due to the duration of their performances and\n\ + lack of disrespectful hackers, known as crackers, in attendance\ncarrying the\ + \ appropriate equipment and software necessary to hack a\nperformance. Yet many\ + \ modern NIMEs could be hacked from smart-phones\nin the audience. The lack of\ + \ security hardening makes NIMEs an easy\ntarget --- but a question arises: if\ + \ hacking can interrupt or modify\na performance couldn't hacking itself also\ + \ be performance? Thus\nwould music hacking, live-hacking, be similar to live-coding?\ + \ In\nthis paper we discuss how NIMEs are in danger of being hacked, and\nyet\ + \ how hacking can be an act of performance too.},\n address = {Brisbane, Australia},\n\ + \ author = {Abram Hindle},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176026},\n\ + \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {359--364},\n publisher\ + \ = {Queensland Conservatorium Griffith University},\n title = {Hacking NIMEs},\n\ + \ track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0070.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813352 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176026 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 265--270 - presentation-video: https://youtu.be/X9AkOAEDV08 - publisher: Birmingham City University - title: Algorithmic Pattern - url: https://www.nime.org/proceedings/2020/nime2020_paper50.pdf - year: 2020 + pages: 359--364 + publisher: Queensland Conservatorium Griffith University + title: Hacking NIMEs + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0070.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_51 - abstract: 'Interactive machine learning (IML) is an approach to building interactive - systems, including DMIs, focusing on iterative end-user data provision and direct - evaluation. This paper describes the implementation of a Javascript library, encapsulating - many of the boilerplate needs of building IML systems for creative tasks with - minimal code inclusion and low barrier to entry. Further, we present a set of - complimentary Audio Worklet-backed instruments to allow for in-browser creation - of new musical systems able to run concurrently with various computationally expensive - feature extractor and lightweight machine learning models without the interference - often seen in interactive Web Audio applications.' - address: 'Birmingham, UK' - author: 'McCallum, Louis and Grierson, Mick S' - bibtex: "@inproceedings{NIME20_51,\n abstract = {Interactive machine learning (IML)\ - \ is an approach to building interactive systems, including DMIs, focusing on\ - \ iterative end-user data provision and direct evaluation. This paper describes\ - \ the implementation of a Javascript library, encapsulating many of the boilerplate\ - \ needs of building IML systems for creative tasks with minimal code inclusion\ - \ and low barrier to entry. Further, we present a set of complimentary Audio Worklet-backed\ - \ instruments to allow for in-browser creation of new musical systems able to\ - \ run concurrently with various computationally expensive feature extractor and\ - \ lightweight machine learning models without the interference often seen in interactive\ - \ Web Audio applications.},\n address = {Birmingham, UK},\n author = {McCallum,\ - \ Louis and Grierson, Mick S},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813357},\n\ - \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ - \ = {July},\n pages = {271--272},\n publisher = {Birmingham City University},\n\ - \ title = {Supporting Interactive Machine Learning Approaches to Building Musical\ - \ Instruments in the Browser},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper51.pdf},\n\ - \ year = {2020}\n}\n" + ID: Jordnicode2252016 + abstract: 'This paper presents a generative drumming agent built from the results + of an extensive survey carried out with electronic music producers, in two phases. + Following the techniques of user-centered interaction design, an international + group of beat producers was reviewed on the possibility of using AI algorithms + to help them in the beat production workflow. The analyzed results of these tests + were used as design requirements for constructing a system that would indeed perform + some tasks alongside the producer. The first results of this working prototype + are presented with a description of the system. The prototype is a stylistic drum + generator that creates new rhythmic patterns after being trained with a collection + of drum tracks. Further stages of development and potential algorithms are discussed.' + address: 'Brisbane, Australia' + author: Sergi Jordà and Daniel Gómez-Marín and Ángel Faraldo and Perfecto Herrera + bibtex: "@inproceedings{Jordnicode2252016,\n abstract = {This paper presents a generative\ + \ drumming agent built from the results of an extensive survey carried out with\ + \ electronic music producers, in two phases. Following the techniques of user-centered\ + \ interaction design, an international group of beat producers was reviewed on\ + \ the possibility of using AI algorithms to help them in the beat production workflow.\ + \ The analyzed results of these tests were used as design requirements for constructing\ + \ a system that would indeed perform some tasks alongside the producer. The first\ + \ results of this working prototype are presented with a description of the system.\ + \ The prototype is a stylistic drum generator that creates new rhythmic patterns\ + \ after being trained with a collection of drum tracks. Further stages of development\ + \ and potential algorithms are discussed.},\n address = {Brisbane, Australia},\n\ + \ author = {Sergi Jord\\`{a} and Daniel G\\'{o}mez-Mar\\'{i}n and \\'{A}ngel Faraldo\ + \ and Perfecto Herrera},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176048},\n\ + \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {365--370},\n publisher\ + \ = {Queensland Conservatorium Griffith University},\n title = {Drumming with\ + \ style: From user needs to a working prototype},\n track = {Papers},\n url =\ + \ {http://www.nime.org/proceedings/2016/nime2016_paper0071.pdf},\n year = {2016}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813357 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176048 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 271--272 - publisher: Birmingham City University - title: Supporting Interactive Machine Learning Approaches to Building Musical Instruments - in the Browser - url: https://www.nime.org/proceedings/2020/nime2020_paper51.pdf - year: 2020 + pages: 365--370 + publisher: Queensland Conservatorium Griffith University + title: 'Drumming with style: From user needs to a working prototype' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0071.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_52 - abstract: 'TorqueTuner is an embedded module that allows Digital Musical Instrument - (DMI) designers to map sensors to parameters of haptic effects and dynamically - modify rotary force feedback in real-time. We embedded inside TorqueTuner a collection - of haptic effects (Wall, Magnet, Detents, Spring, Friction, Spin, Free) and a - bi-directional interface through libmapper, a software library for making connections - between data signals on a shared network. To increase affordability and portability - of force-feedback implementations in DMI design, we designed our platform to be - wireless, self-contained and built from commercially available components. To - provide examples of modularity and portability, we integrated TorqueTuner into - a standalone haptic knob and into an existing DMI, the T-Stick. We implemented - 3 musical applications (Pitch wheel, Turntable and Exciter), by mapping sensors - to sound synthesis in audio programming environment SuperCollider. While the original - goal was to simulate the haptic feedback associated with turning a knob, we found - that the platform allows for further expanding interaction possibilities in application - scenarios where rotary control is familiar.' - address: 'Birmingham, UK' - author: 'Kirkegaard, Mathias S and Bredholt, Mathias and Frisson, Christian and - Wanderley, Marcelo' - bibtex: "@inproceedings{NIME20_52,\n abstract = {TorqueTuner is an embedded module\ - \ that allows Digital Musical Instrument (DMI) designers to map sensors to parameters\ - \ of haptic effects and dynamically modify rotary force feedback in real-time.\ - \ We embedded inside TorqueTuner a collection of haptic effects (Wall, Magnet,\ - \ Detents, Spring, Friction, Spin, Free) and a bi-directional interface through\ - \ libmapper, a software library for making connections between data signals on\ - \ a shared network. To increase affordability and portability of force-feedback\ - \ implementations in DMI design, we designed our platform to be wireless, self-contained\ - \ and built from commercially available components. To provide examples of modularity\ - \ and portability, we integrated TorqueTuner into a standalone haptic knob and\ - \ into an existing DMI, the T-Stick. We implemented 3 musical applications (Pitch\ - \ wheel, Turntable and Exciter), by mapping sensors to sound synthesis in audio\ - \ programming environment SuperCollider. While the original goal was to simulate\ - \ the haptic feedback associated with turning a knob, we found that the platform\ - \ allows for further expanding interaction possibilities in application scenarios\ - \ where rotary control is familiar.},\n address = {Birmingham, UK},\n author =\ - \ {Kirkegaard, Mathias S and Bredholt, Mathias and Frisson, Christian and Wanderley,\ - \ Marcelo},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813359},\n editor\ - \ = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n\ - \ pages = {273--278},\n presentation-video = {https://youtu.be/V8WDMbuX9QA},\n\ - \ publisher = {Birmingham City University},\n title = {TorqueTuner: A self contained\ - \ module for designing rotary haptic force feedback for digital musical instruments},\n\ - \ url = {https://www.nime.org/proceedings/2020/nime2020_paper52.pdf},\n year =\ - \ {2020}\n}\n" + ID: Bown2016 + abstract: |- + We describe a project in which a game of lawn bowls was recreated + using Distributed Interactive Audio Devices (DIADs), to create an interactive + musical experience in the form of a game. This paper details the design of the + underlying digital music system, some of the compositional and design + considerations, and the technical challenges involved. We discuss future + directions for our system and compositional method. + address: 'Brisbane, Australia' + author: Oliver Bown and Sam Ferguson + bibtex: "@inproceedings{Bown2016,\n abstract = {We describe a project in which a\ + \ game of lawn bowls was recreated\nusing Distributed Interactive Audio Devices\ + \ (DIADs), to create an interactive\nmusical experience in the form of a game.\ + \ This paper details the design of the\nunderlying digital music system, some\ + \ of the compositional and design\nconsiderations, and the technical challenges\ + \ involved. We discuss future\ndirections for our system and compositional method.},\n\ + \ address = {Brisbane, Australia},\n author = {Oliver Bown and Sam Ferguson},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1175998},\n isbn = {978-1-925455-13-7},\n\ + \ issn = {2220-4806},\n pages = {371--372},\n publisher = {Queensland Conservatorium\ + \ Griffith University},\n title = {A Musical Game of Bowls Using the DIADs},\n\ + \ track = {Demonstrations},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0072.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813359 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1175998 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 273--278 - presentation-video: https://youtu.be/V8WDMbuX9QA - publisher: Birmingham City University - title: 'TorqueTuner: A self contained module for designing rotary haptic force feedback - for digital musical instruments' - url: https://www.nime.org/proceedings/2020/nime2020_paper52.pdf - year: 2020 + pages: 371--372 + publisher: Queensland Conservatorium Griffith University + title: A Musical Game of Bowls Using the DIADs + track: Demonstrations + url: http://www.nime.org/proceedings/2016/nime2016_paper0072.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_53 - abstract: 'Iterative design methods involving children and educators are difficult - to conduct, given both the ethical implications and time commitments understandably - required. The qualitative design process presented here recruits introductory - teacher training students, towards discovering useful design insights relevant - to music education technologies “by proxy”. Therefore, some of the barriers present - in child-computer interaction research are avoided. As an example, the method - is applied to the creation of a block-based music notation system, named Codetta. - Building upon successful educational technologies that intersect both music and - computer programming, Codetta seeks to enable child composition, whilst aiding - generalist educator’s confidence in teaching music.' - address: 'Birmingham, UK' - author: 'Ford, Corey J and Nash, Chris' - bibtex: "@inproceedings{NIME20_53,\n abstract = {Iterative design methods involving\ - \ children and educators are difficult to conduct, given both the ethical implications\ - \ and time commitments understandably required. The qualitative design process\ - \ presented here recruits introductory teacher training students, towards discovering\ - \ useful design insights relevant to music education technologies “by proxy”.\ - \ Therefore, some of the barriers present in child-computer interaction research\ - \ are avoided. As an example, the method is applied to the creation of a block-based\ - \ music notation system, named Codetta. Building upon successful educational technologies\ - \ that intersect both music and computer programming, Codetta seeks to enable\ - \ child composition, whilst aiding generalist educator’s confidence in teaching\ - \ music.},\n address = {Birmingham, UK},\n author = {Ford, Corey J and Nash, Chris},\n\ + ID: Eyes2016 + abstract: "During an electronic music performance it is common to see light\nand\ + \ sound interacting electronically in many different ways. From sound and light\n\ + shows, whereby light reacts to sound, or generated visuals are projected onto\ + \ a\nscreen behind the performer. However we asked the question what if we could\n\ + convert sound to light and back again and control sound with light? Inspired by\n\ + the huge acoustic of the Mimerlaven at Norberg festival we built a `light\ninstrument'\ + \ that allowed us to interrupt and disrupt sound using light\nforming the basis\ + \ of our piece `Interruption'." + address: 'Brisbane, Australia' + author: Benjamin James Eyes and Laurits Esben Jongejan + bibtex: "@inproceedings{Eyes2016,\n abstract = {During an electronic music performance\ + \ it is common to see light\nand sound interacting electronically in many different\ + \ ways. From sound and light\nshows, whereby light reacts to sound, or generated\ + \ visuals are projected onto a\nscreen behind the performer. However we asked\ + \ the question what if we could\nconvert sound to light and back again and control\ + \ sound with light? Inspired by\nthe huge acoustic of the Mimerlaven at Norberg\ + \ festival we built a `light\ninstrument' that allowed us to interrupt and disrupt\ + \ sound using light\nforming the basis of our piece `Interruption'.},\n address\ + \ = {Brisbane, Australia},\n author = {Benjamin James Eyes and Laurits Esben Jongejan},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.4813361},\n editor = {Romain Michon\ - \ and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages =\ - \ {279--284},\n presentation-video = {https://youtu.be/fPbZMQ5LEmk},\n publisher\ - \ = {Birmingham City University},\n title = {An Iterative Design ‘by proxy’ Method\ - \ for Developing Educational Music Interfaces},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper53.pdf},\n\ - \ year = {2020}\n}\n" + \ Musical Expression},\n doi = {10.5281/zenodo.1176016},\n isbn = {978-1-925455-13-7},\n\ + \ issn = {2220-4806},\n pages = {373--374},\n publisher = {Queensland Conservatorium\ + \ Griffith University},\n title = {How to Stop Sound: Creating a light instrument\ + \ and `Interruption' a piece for the Mimerlaven, Norberg Festival 2015.},\n track\ + \ = {Demonstrations},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0073.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813361 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176016 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 279--284 - presentation-video: https://youtu.be/fPbZMQ5LEmk - publisher: Birmingham City University - title: An Iterative Design ‘by proxy’ Method for Developing Educational Music Interfaces - url: https://www.nime.org/proceedings/2020/nime2020_paper53.pdf - year: 2020 + pages: 373--374 + publisher: Queensland Conservatorium Griffith University + title: 'How to Stop Sound: Creating a light instrument and `Interruption'' a piece + for the Mimerlaven, Norberg Festival 2015.' + track: Demonstrations + url: http://www.nime.org/proceedings/2016/nime2016_paper0073.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_54 - abstract: 'Probatio is an open-source toolkit for prototyping new digital musical - instruments created in 2016. Based on a morphological chart of postures and controls - of musical instruments, it comprises a set of blocks, bases, hubs, and supports - that, when combined, allows designers, artists, and musicians to experiment with - different input devices for musical interaction in different positions and postures. - Several musicians have used the system, and based on these past experiences, we - assembled a list of improvements to implement version 1.0 of the toolkit through - a unique international partnership between two laboratories in Brazil and Canada. - In this paper, we present the original toolkit and its use so far, summarize the - main lessons learned from musicians using it, and present the requirements behind, - and the final design of, v1.0 of the project. We also detail the work developed - in digital fabrication using two different techniques: laser cutting and 3D printing, - comparing their pros and cons. We finally discuss the opportunities and challenges - of fully sharing the project online and replicating its parts in both countries.' - address: 'Birmingham, UK' - author: 'Calegario, Filipe and Wanderley, Marcelo and Tragtenberg, João and Meneses, - Eduardo and Wang, Johnty and Sullivan, John and Franco, Ivan and Kirkegaard, Mathias - S and Bredholt, Mathias and Rohs, Josh' - bibtex: "@inproceedings{NIME20_54,\n abstract = {Probatio is an open-source toolkit\ - \ for prototyping new digital musical instruments created in 2016. Based on a\ - \ morphological chart of postures and controls of musical instruments, it comprises\ - \ a set of blocks, bases, hubs, and supports that, when combined, allows designers,\ - \ artists, and musicians to experiment with different input devices for musical\ - \ interaction in different positions and postures. Several musicians have used\ - \ the system, and based on these past experiences, we assembled a list of improvements\ - \ to implement version 1.0 of the toolkit through a unique international partnership\ - \ between two laboratories in Brazil and Canada. In this paper, we present the\ - \ original toolkit and its use so far, summarize the main lessons learned from\ - \ musicians using it, and present the requirements behind, and the final design\ - \ of, v1.0 of the project. We also detail the work developed in digital fabrication\ - \ using two different techniques: laser cutting and 3D printing, comparing their\ - \ pros and cons. We finally discuss the opportunities and challenges of fully\ - \ sharing the project online and replicating its parts in both countries.},\n\ - \ address = {Birmingham, UK},\n author = {Calegario, Filipe and Wanderley, Marcelo\ - \ and Tragtenberg, João and Meneses, Eduardo and Wang, Johnty and Sullivan, John\ - \ and Franco, Ivan and Kirkegaard, Mathias S and Bredholt, Mathias and Rohs, Josh},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.4813363},\n editor = {Romain Michon\ - \ and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages =\ - \ {285--290},\n presentation-video = {https://youtu.be/jkFnZZUA3xs},\n publisher\ - \ = {Birmingham City University},\n title = {Probatio 1.0: collaborative development\ - \ of a toolkit for functional DMI prototypes},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper54.pdf},\n\ - \ year = {2020}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.4813363 - editor: Romain Michon and Franziska Schroeder - issn: 2220-4806 - month: July - pages: 285--290 - presentation-video: https://youtu.be/jkFnZZUA3xs - publisher: Birmingham City University - title: 'Probatio 1.0: collaborative development of a toolkit for functional DMI - prototypes' - url: https://www.nime.org/proceedings/2020/nime2020_paper54.pdf - year: 2020 - - -- ENTRYTYPE: inproceedings - ID: NIME20_55 - abstract: 'We conducted a study which examines mappings from a relatively unexplored - perspective: how they are made. Twelve skilled NIME users designed a mapping from - a T-Stick to a subtractive synthesizer, and were interviewed about their approach - to mapping design. We present a thematic analysis of the interviews, with reference - to data recordings captured while the designers worked. Our results suggest that - the mapping design process is an iterative process that alternates between two - working modes: diffuse exploration and directed experimentation. ' - address: 'Birmingham, UK' - author: 'West, Travis J and Wanderley, Marcelo and Caramiaux, Baptiste' - bibtex: "@inproceedings{NIME20_55,\n abstract = {We conducted a study which examines\ - \ mappings from a relatively unexplored perspective: how they are made. Twelve\ - \ skilled NIME users designed a mapping from a T-Stick to a subtractive synthesizer,\ - \ and were interviewed about their approach to mapping design. We present a thematic\ - \ analysis of the interviews, with reference to data recordings captured while\ - \ the designers worked. Our results suggest that the mapping design process is\ - \ an iterative process that alternates between two working modes: diffuse exploration\ - \ and directed experimentation. },\n address = {Birmingham, UK},\n author = {West,\ - \ Travis J and Wanderley, Marcelo and Caramiaux, Baptiste},\n booktitle = {Proceedings\ + ID: Hope2016 + abstract: "This paper-demonstration provides an overview of an generative\nmusic\ + \ score adapted for the iPad by the Decibel new music ensemble. The original\n\ + score `Loaded (NSFW)' (2015) is by Western Australian composer Laura\nJane Lowther,\ + \ and is scored for ensemble and electronics, commissioned for a\nperformance\ + \ in April 2015 at the Perth Institute of Contemporary Arts. It engages\nand develops\ + \ the Decibel Score Player application, a score reader and generator\nfor the\ + \ iPad as a tool for displaying an interactive score that requires\nperformers\ + \ to react to news headlines through musical means. The paper will\nintroduce\ + \ the concept for the player, how it was developed, and how it was used\nin the\ + \ premiere performance. The associated demonstration shows how the score\nappears\ + \ on the iPads. " + address: 'Brisbane, Australia' + author: Cat Hope and Stuart James and Aaron Wyatt + bibtex: "@inproceedings{Hope2016,\n abstract = {This paper-demonstration provides\ + \ an overview of an generative\nmusic score adapted for the iPad by the Decibel\ + \ new music ensemble. The original\nscore `Loaded (NSFW)' (2015) is by Western\ + \ Australian composer Laura\nJane Lowther, and is scored for ensemble and electronics,\ + \ commissioned for a\nperformance in April 2015 at the Perth Institute of Contemporary\ + \ Arts. It engages\nand develops the Decibel Score Player application, a score\ + \ reader and generator\nfor the iPad as a tool for displaying an interactive score\ + \ that requires\nperformers to react to news headlines through musical means.\ + \ The paper will\nintroduce the concept for the player, how it was developed,\ + \ and how it was used\nin the premiere performance. The associated demonstration\ + \ shows how the score\nappears on the iPads. },\n address = {Brisbane, Australia},\n\ + \ author = {Cat Hope and Stuart James and Aaron Wyatt},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.4813365},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {291--296},\n presentation-video\ - \ = {https://youtu.be/aaoResYjqmE},\n publisher = {Birmingham City University},\n\ - \ title = {Making Mappings: Examining the Design Process},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper55.pdf},\n\ - \ year = {2020}\n}\n" + \ doi = {10.5281/zenodo.1176032},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ + \ pages = {375--376},\n publisher = {Queensland Conservatorium Griffith University},\n\ + \ title = {Headline grabs for music: The development of the iPad score generator\ + \ for `Loaded (NSFW)'},\n track = {Demonstrations},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0074.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813365 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176032 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 291--296 - presentation-video: https://youtu.be/aaoResYjqmE - publisher: Birmingham City University - title: 'Making Mappings: Examining the Design Process' - url: https://www.nime.org/proceedings/2020/nime2020_paper55.pdf - year: 2020 + pages: 375--376 + publisher: Queensland Conservatorium Griffith University + title: 'Headline grabs for music: The development of the iPad score generator for + `Loaded (NSFW)''' + track: Demonstrations + url: http://www.nime.org/proceedings/2016/nime2016_paper0074.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_56 - abstract: 'Parthenope is a robotic musical siren developed to produce unique timbres - and sonic gestures. Parthenope uses perforated spinning disks through which air - is directed to produce sound. Computer-control of disk speed and air flow in conjunction - with a variety of nozzles allow pitches to be precisely produced at different - volumes. The instrument is controlled via Open Sound Control (OSC) messages sent - over an ethernet connection and can interface with common DAWs and physical controllers. - Parthenope is capable of microtonal tuning, portamenti, rapid and precise articulation - (and thus complex rhythms) and distinct timbres that result from its aerophonic - character. It occupies a unique place among robotic musical instruments.' - address: 'Birmingham, UK' - author: 'Sidler, Michael and Bisson, Matthew C and Grotz, Jordan and Barton, Scott' - bibtex: "@inproceedings{NIME20_56,\n abstract = {Parthenope is a robotic musical\ - \ siren developed to produce unique timbres and sonic gestures. Parthenope uses\ - \ perforated spinning disks through which air is directed to produce sound. Computer-control\ - \ of disk speed and air flow in conjunction with a variety of nozzles allow pitches\ - \ to be precisely produced at different volumes. The instrument is controlled\ - \ via Open Sound Control (OSC) messages sent over an ethernet connection and can\ - \ interface with common DAWs and physical controllers. Parthenope is capable of\ - \ microtonal tuning, portamenti, rapid and precise articulation (and thus complex\ - \ rhythms) and distinct timbres that result from its aerophonic character. It\ - \ occupies a unique place among robotic musical instruments.},\n address = {Birmingham,\ - \ UK},\n author = {Sidler, Michael and Bisson, Matthew C and Grotz, Jordan and\ - \ Barton, Scott},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813367},\n\ - \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ - \ = {July},\n pages = {297--300},\n presentation-video = {https://youtu.be/HQuR0aBJ70Y},\n\ - \ publisher = {Birmingham City University},\n title = {Parthenope: A Robotic Musical\ - \ Siren},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper56.pdf},\n\ - \ year = {2020}\n}\n" + ID: Carey2016 + abstract: |- + This paper discusses practice-based research in the context of + live performance with interactive systems. We focus on two approaches, both of + which are concerned with documenting, examining and reflecting on the real-world + behaviours and experiences of people and artefacts involved in the creation of + new works. The first approach is primarily based on reflections by an individual + performer/developer (auto-ethnography) and the second on interviews and + observations. The rationales for both approaches are presented along with + findings from research which applied them in order to illustrate and explore the + characteristics of both. Challenges, including the difficulty of balancing + rigour and relevance and the risks of negatively impacting on creative practices + are articulated, as are the potential benefits. + address: 'Brisbane, Australia' + author: Benjamin Carey and Andrew Johnston + bibtex: "@inproceedings{Carey2016,\n abstract = {This paper discusses practice-based\ + \ research in the context of\nlive performance with interactive systems. We focus\ + \ on two approaches, both of\nwhich are concerned with documenting, examining\ + \ and reflecting on the real-world\nbehaviours and experiences of people and artefacts\ + \ involved in the creation of\nnew works. The first approach is primarily based\ + \ on reflections by an individual\nperformer/developer (auto-ethnography) and\ + \ the second on interviews and\nobservations. The rationales for both approaches\ + \ are presented along with\nfindings from research which applied them in order\ + \ to illustrate and explore the\ncharacteristics of both. Challenges, including\ + \ the difficulty of balancing\nrigour and relevance and the risks of negatively\ + \ impacting on creative practices\nare articulated, as are the potential benefits.},\n\ + \ address = {Brisbane, Australia},\n author = {Benjamin Carey and Andrew Johnston},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1176006},\n isbn = {978-1-925455-13-7},\n\ + \ issn = {2220-4806},\n pages = {377--382},\n publisher = {Queensland Conservatorium\ + \ Griffith University},\n title = {Reflection On Action in NIME Research: Two\ + \ Complementary Perspectives},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0075.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813367 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176006 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 297--300 - presentation-video: https://youtu.be/HQuR0aBJ70Y - publisher: Birmingham City University - title: 'Parthenope: A Robotic Musical Siren' - url: https://www.nime.org/proceedings/2020/nime2020_paper56.pdf - year: 2020 + pages: 377--382 + publisher: Queensland Conservatorium Griffith University + title: 'Reflection On Action in NIME Research: Two Complementary Perspectives' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0075.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_57 - abstract: 'The Tremolo-Harp is a twelve-stringed robotic instrument, where each - string is actuated with a DC vibration motor to produce a mechatronic “tremolo” - effect. It was inspired by instruments and musical styles that employ tremolo - as a primary performance technique, including the hammered dulcimer, pipa, banjo, - flamenco guitar, and surf rock guitar. Additionally, the Tremolo-Harp is designed - to produce long, sustained textures and continuous dynamic variation. These capabilities - represent a different approach from the majority of existing robotic string instruments, - which tend to focus on actuation speed and rhythmic precision. The composition - Tremolo-Harp Study 1 (2019) presents an initial exploration of the Tremolo-Harp’s - unique timbre and capability for continuous dynamic variation. ' - address: 'Birmingham, UK' - author: 'Kemper, Steven' - bibtex: "@inproceedings{NIME20_57,\n abstract = {The Tremolo-Harp is a twelve-stringed\ - \ robotic instrument, where each string is actuated with a DC vibration motor\ - \ to produce a mechatronic “tremolo” effect. It was inspired by instruments and\ - \ musical styles that employ tremolo as a primary performance technique, including\ - \ the hammered dulcimer, pipa, banjo, flamenco guitar, and surf rock guitar. Additionally,\ - \ the Tremolo-Harp is designed to produce long, sustained textures and continuous\ - \ dynamic variation. These capabilities represent a different approach from the\ - \ majority of existing robotic string instruments, which tend to focus on actuation\ - \ speed and rhythmic precision. The composition Tremolo-Harp Study 1 (2019) presents\ - \ an initial exploration of the Tremolo-Harp’s unique timbre and capability for\ - \ continuous dynamic variation. },\n address = {Birmingham, UK},\n author = {Kemper,\ - \ Steven},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.4813369},\n editor = {Romain\ - \ Michon and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages\ - \ = {301--304},\n publisher = {Birmingham City University},\n title = {Tremolo-Harp:\ - \ A Vibration-Motor Actuated Robotic String Instrument},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper57.pdf},\n\ - \ year = {2020}\n}\n" + ID: Nuannicode225in2016 + abstract: "In this paper we describe an approach for generating and\nvisualising\ + \ new rhythmic patterns from existing audio in real-time using\nconcatenative\ + \ synthesis. We introduce a graph-based model enabling novel\nvisualisation and\ + \ manipulation of new patterns that mimics the rhythmic and\ntimbral character\ + \ of an existing target seed pattern using a separate database of\npalette sounds.\ + \ Our approach is described, reporting on those features that may\nbe useful in\ + \ describing units of sound related to rhythm and how they might then\nbe projected\ + \ into two-dimensional space for visualisation using reduction\ntechniques and\ + \ clustering. We conclude the paper with our qualitative appraisal\nof using the\ + \ interface and outline scope for future work." + address: 'Brisbane, Australia' + author: Càrthach Ó Nuanàin and Sergi Jordà and Perfecto Herrera + bibtex: "@inproceedings{Nuannicode225in2016,\n abstract = {In this paper we describe\ + \ an approach for generating and\nvisualising new rhythmic patterns from existing\ + \ audio in real-time using\nconcatenative synthesis. We introduce a graph-based\ + \ model enabling novel\nvisualisation and manipulation of new patterns that mimics\ + \ the rhythmic and\ntimbral character of an existing target seed pattern using\ + \ a separate database of\npalette sounds. Our approach is described, reporting\ + \ on those features that may\nbe useful in describing units of sound related to\ + \ rhythm and how they might then\nbe projected into two-dimensional space for\ + \ visualisation using reduction\ntechniques and clustering. We conclude the paper\ + \ with our qualitative appraisal\nof using the interface and outline scope for\ + \ future work.},\n address = {Brisbane, Australia},\n author = {C\\`{a}rthach\ + \ \\'{O} Nuan\\`{a}in and Sergi Jord\\`{a} and Perfecto Herrera},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1176094},\n isbn = {978-1-925455-13-7},\n\ + \ issn = {2220-4806},\n pages = {383--387},\n publisher = {Queensland Conservatorium\ + \ Griffith University},\n title = {An Interactive Software Instrument for Real-time\ + \ Rhythmic Concatenative Synthesis},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0076.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813369 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176094 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 301--304 - publisher: Birmingham City University - title: 'Tremolo-Harp: A Vibration-Motor Actuated Robotic String Instrument' - url: https://www.nime.org/proceedings/2020/nime2020_paper57.pdf - year: 2020 + pages: 383--387 + publisher: Queensland Conservatorium Griffith University + title: An Interactive Software Instrument for Real-time Rhythmic Concatenative Synthesis + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0076.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_58 - abstract: 'We propose ExSampling: an integrated system of recording application - and Deep Learning environment for a real-time music performance of environmental - sounds sampled by field recording. Automated sound mapping to Ableton Live tracks - by Deep Learning enables field recording to be applied to real-time performance, - and create interactions among sound recorder, composers and performers.' - address: 'Birmingham, UK' - author: 'Kobayashi, Atsuya and Anzai, Reo and Tokui, Nao' - bibtex: "@inproceedings{NIME20_58,\n abstract = {We propose ExSampling: an integrated\ - \ system of recording application and Deep Learning environment for a real-time\ - \ music performance of environmental sounds sampled by field recording. Automated\ - \ sound mapping to Ableton Live tracks by Deep Learning enables field recording\ - \ to be applied to real-time performance, and create interactions among sound\ - \ recorder, composers and performers.},\n address = {Birmingham, UK},\n author\ - \ = {Kobayashi, Atsuya and Anzai, Reo and Tokui, Nao},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.4813371},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {305--308},\n publisher = {Birmingham\ - \ City University},\n title = {ExSampling: a system for the real-time ensemble\ - \ performance of field-recorded environmental sounds},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper58.pdf},\n\ - \ year = {2020}\n}\n" + ID: Milne2016 + abstract: "We present an application XronoMorph for the\nalgorithmic generation\ + \ of rhythms in the context of creative composition and\nperformance, and of musical\ + \ analysis and education. XronoMorph makes use of\nvisual and geometrical conceptualizations\ + \ of rhythms, and allows the user to\nsmoothly morph between rhythms. Sonification\ + \ of the user generated geometrical\nconstructs is possible using a built-in sampler,\ + \ VST and AU plugins, or\nstandalone synthesizers via MIDI. The algorithms are\ + \ based on two underlying\nmathematical principles: perfect balance and well-formedness,\ + \ both of which can\nbe derived from coefficients of the discrete Fourier transform\ + \ of the rhythm. The\nmathematical background, musical implications, and their\ + \ implementation in the\nsoftware are discussed." + address: 'Brisbane, Australia' + author: Andrew J. Milne and Steffen A. Herff and David Bulger and William A. Sethares + and Roger T. Dean + bibtex: "@inproceedings{Milne2016,\n abstract = {We present an application XronoMorph\ + \ for the\nalgorithmic generation of rhythms in the context of creative composition\ + \ and\nperformance, and of musical analysis and education. XronoMorph makes use\ + \ of\nvisual and geometrical conceptualizations of rhythms, and allows the user\ + \ to\nsmoothly morph between rhythms. Sonification of the user generated geometrical\n\ + constructs is possible using a built-in sampler, VST and AU plugins, or\nstandalone\ + \ synthesizers via MIDI. The algorithms are based on two underlying\nmathematical\ + \ principles: perfect balance and well-formedness, both of which can\nbe derived\ + \ from coefficients of the discrete Fourier transform of the rhythm. The\nmathematical\ + \ background, musical implications, and their implementation in the\nsoftware\ + \ are discussed.},\n address = {Brisbane, Australia},\n author = {Andrew J. Milne\ + \ and Steffen A. Herff and David Bulger and William A. Sethares and Roger T. Dean},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1176082},\n isbn = {978-1-925455-13-7},\n\ + \ issn = {2220-4806},\n pages = {388--393},\n publisher = {Queensland Conservatorium\ + \ Griffith University},\n title = {XronoMorph: Algorithmic Generation of Perfectly\ + \ Balanced and Well-Formed Rhythms},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0077.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813371 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176082 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 305--308 - publisher: Birmingham City University - title: 'ExSampling: a system for the real-time ensemble performance of field-recorded - environmental sounds' - url: https://www.nime.org/proceedings/2020/nime2020_paper58.pdf - year: 2020 + pages: 388--393 + publisher: Queensland Conservatorium Griffith University + title: 'XronoMorph: Algorithmic Generation of Perfectly Balanced and Well-Formed + Rhythms' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0077.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_59 - abstract: 'The exploration of musical robots has been an area of interest due to - the timbral and mechanical advantages they offer for music generation and performance. - However, one of the greatest challenges in mechatronic music is to enable these - robots to deliver a nuanced and expressive performance. This depends on their - capability to integrate dynamics, articulation, and a variety of ornamental techniques - while playing a given musical passage. In this paper we introduce a robot arm - pitch shifter for a mechatronic monochord prototype. This is a fast, precise, - and mechanically quiet system that enables sliding techniques during musical performance. - We discuss the design and construction process, as well as the system''s advantages - and restrictions. We also review the quantitative evaluation process used to assess - if the instrument meets the design requirements. This process reveals how the - pitch shifter outperforms existing configurations, and potential areas of improvement - for future work.' - address: 'Birmingham, UK' - author: 'Yepez Placencia, Juan Pablo and Murphy, Jim and Carnegie, Dale' - bibtex: "@inproceedings{NIME20_59,\n abstract = {The exploration of musical robots\ - \ has been an area of interest due to the timbral and mechanical advantages they\ - \ offer for music generation and performance. However, one of the greatest challenges\ - \ in mechatronic music is to enable these robots to deliver a nuanced and expressive\ - \ performance. This depends on their capability to integrate dynamics, articulation,\ - \ and a variety of ornamental techniques while playing a given musical passage.\ - \ In this paper we introduce a robot arm pitch shifter for a mechatronic monochord\ - \ prototype. This is a fast, precise, and mechanically quiet system that enables\ - \ sliding techniques during musical performance. We discuss the design and construction\ - \ process, as well as the system's advantages and restrictions. We also review\ - \ the quantitative evaluation process used to assess if the instrument meets the\ - \ design requirements. This process reveals how the pitch shifter outperforms\ - \ existing configurations, and potential areas of improvement for future work.},\n\ - \ address = {Birmingham, UK},\n author = {Yepez Placencia, Juan Pablo and Murphy,\ - \ Jim and Carnegie, Dale},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813375},\n\ - \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ - \ = {July},\n pages = {309--314},\n presentation-video = {https://youtu.be/rpX8LTZd-Zs},\n\ - \ publisher = {Birmingham City University},\n title = {Designing an Expressive\ - \ Pitch Shifting Mechanism for Mechatronic Chordophones},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper59.pdf},\n\ - \ year = {2020}\n}\n" + ID: Vickery2016 + abstract: "The rhizome concept explored by Deleuze and Guatarri has had an\nimportant\ + \ influence on formal thinking in music and new media. This paper\nexplores the\ + \ development of rhizomatic musical scores that are arranged\ncartographically\ + \ with nodal points allowing for alternate pathways to be\ntraversed. The challenges\ + \ of pre-digital exemplars of rhizomatic structure are\ndiscussed. It follows\ + \ the development of concepts and technology used in the\ncreation of five works\ + \ by the author Ubahn c. 1985: the Rosenberg Variations\n[2012], The Last Years\ + \ [2012], Sacrificial Zones [2014], detritus [2015] and\ntrash vortex [2015].\ + \ The paper discusses the potential for the evolution of novel\nformal structures\ + \ using rhizomatic structures. " + address: 'Brisbane, Australia' + author: Lindsay Vickery + bibtex: "@inproceedings{Vickery2016,\n abstract = {The rhizome concept explored\ + \ by Deleuze and Guatarri has had an\nimportant influence on formal thinking in\ + \ music and new media. This paper\nexplores the development of rhizomatic musical\ + \ scores that are arranged\ncartographically with nodal points allowing for alternate\ + \ pathways to be\ntraversed. The challenges of pre-digital exemplars of rhizomatic\ + \ structure are\ndiscussed. It follows the development of concepts and technology\ + \ used in the\ncreation of five works by the author Ubahn c. 1985: the Rosenberg\ + \ Variations\n[2012], The Last Years [2012], Sacrificial Zones [2014], detritus\ + \ [2015] and\ntrash vortex [2015]. The paper discusses the potential for the evolution\ + \ of novel\nformal structures using rhizomatic structures. },\n address = {Brisbane,\ + \ Australia},\n author = {Lindsay Vickery},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1176133},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ + \ pages = {394--400},\n publisher = {Queensland Conservatorium Griffith University},\n\ + \ title = {Rhizomatic approaches to screen-based music notation},\n track = {Papers},\n\ + \ url = {http://www.nime.org/proceedings/2016/nime2016_paper0078.pdf},\n year\ + \ = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813375 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176133 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 309--314 - presentation-video: https://youtu.be/rpX8LTZd-Zs - publisher: Birmingham City University - title: Designing an Expressive Pitch Shifting Mechanism for Mechatronic Chordophones - url: https://www.nime.org/proceedings/2020/nime2020_paper59.pdf - year: 2020 + pages: 394--400 + publisher: Queensland Conservatorium Griffith University + title: Rhizomatic approaches to screen-based music notation + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0078.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_6 - abstract: 'Flexible strings with piezoelectric properties have been developed but - until date not evaluated for the use as part of a musical instrument. This paper - is assessing the properties of these new fibers, looking at their possibilities - for NIME applications.' - address: 'Birmingham, UK' - author: 'Ehrhardt, Marcel and Neupert, Max and Wegener, Clemens' - bibtex: "@inproceedings{NIME20_6,\n abstract = {Flexible strings with piezoelectric\ - \ properties have been developed but until date not evaluated for the use as part\ - \ of a musical instrument. This paper is assessing the properties of these new\ - \ fibers, looking at their possibilities for NIME applications.},\n address =\ - \ {Birmingham, UK},\n author = {Ehrhardt, Marcel and Neupert, Max and Wegener,\ - \ Clemens},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813377},\n editor\ - \ = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n\ - \ pages = {35--36},\n publisher = {Birmingham City University},\n title = {Piezoelectric\ - \ strings as a musical interface},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper6.pdf},\n\ - \ year = {2020}\n}\n" + ID: James2016 + abstract: |- + This paper documents a method of controlling complex sound + synthesis processes such as granular synthesis, additive synthesis, timbre + morphology, swarm-based spatialisation, spectral spatialisation, and timbre + spatialisation via a multi-parametric 2D interface. This paper evaluates the use + of audio-rate control signals for sound synthesis, and discussing approaches to + de-interleaving, synchronization, and mapping. The paper also outlines a number + of ways of extending the expressivity of such a control interface by coupling + this with another 2D multi-parametric nodes interface and audio-rate 2D table + lookup. The paper proceeds to review methods of navigating multi-parameter sets + via interpolation and transformation. Some case studies are finally discussed in + the paper. The author has used this method to control complex sound synthesis + processes that require control data for more that a thousand parameters. + address: 'Brisbane, Australia' + author: Stuart James + bibtex: "@inproceedings{James2016,\n abstract = {This paper documents a method of\ + \ controlling complex sound\nsynthesis processes such as granular synthesis, additive\ + \ synthesis, timbre\nmorphology, swarm-based spatialisation, spectral spatialisation,\ + \ and timbre\nspatialisation via a multi-parametric 2D interface. This paper evaluates\ + \ the use\nof audio-rate control signals for sound synthesis, and discussing approaches\ + \ to\nde-interleaving, synchronization, and mapping. The paper also outlines a\ + \ number\nof ways of extending the expressivity of such a control interface by\ + \ coupling\nthis with another 2D multi-parametric nodes interface and audio-rate\ + \ 2D table\nlookup. The paper proceeds to review methods of navigating multi-parameter\ + \ sets\nvia interpolation and transformation. Some case studies are finally discussed\ + \ in\nthe paper. The author has used this method to control complex sound synthesis\n\ + processes that require control data for more that a thousand parameters.},\n address\ + \ = {Brisbane, Australia},\n author = {Stuart James},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176040},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ + \ pages = {401--406},\n publisher = {Queensland Conservatorium Griffith University},\n\ + \ title = {A Multi-Point {2D} Interface: Audio-Rate Signals for Controlling Complex\ + \ Multi-Parametric Sound Synthesis},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0079.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813377 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176040 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 35--36 - publisher: Birmingham City University - title: Piezoelectric strings as a musical interface - url: https://www.nime.org/proceedings/2020/nime2020_paper6.pdf - year: 2020 + pages: 401--406 + publisher: Queensland Conservatorium Griffith University + title: 'A Multi-Point 2D Interface: Audio-Rate Signals for Controlling Complex Multi-Parametric + Sound Synthesis' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0079.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_60 - abstract: 'This paper outlines the development process of an audio-visual gestural - instrument—the AirSticks—and elaborates on the role ‘miming’ has played in the - formation of new mappings for the instrument. The AirSticks, although fully-functioning, - were used as props in live performances in order to evaluate potential mapping - strategies that were later implemented for real. This use of mime when designing - Digital Musical Instruments (DMIs) can help overcome choice paralysis, break from - established habits, and liberate creators to realise more meaningful parameter - mappings. Bringing this process into an interactive performance environment acknowledges - the audience as stakeholders in the design of these instruments, and also leads - us to reflect upon the beliefs and assumptions made by an audience when engaging - with the performance of such ‘magical’ devices. This paper establishes two opposing - strategies to parameter mapping, ‘movement-first’ mapping, and the less conventional - ‘sound-first’ mapping that incorporates mime. We discuss the performance ‘One - Five Nine’, its transformation from a partial mime into a fully interactive presentation, - and the influence this process has had on the outcome of the performance and the - AirSticks as a whole.' - address: 'Birmingham, UK' - author: 'Ilsar, Alon A and Hughes, Matthew and Johnston, Andrew' - bibtex: "@inproceedings{NIME20_60,\n abstract = {This paper outlines the development\ - \ process of an audio-visual gestural instrument—the AirSticks—and elaborates\ - \ on the role ‘miming’ has played in the formation of new mappings for the instrument.\ - \ The AirSticks, although fully-functioning, were used as props in live performances\ - \ in order to evaluate potential mapping strategies that were later implemented\ - \ for real. This use of mime when designing Digital Musical Instruments (DMIs)\ - \ can help overcome choice paralysis, break from established habits, and liberate\ - \ creators to realise more meaningful parameter mappings. Bringing this process\ - \ into an interactive performance environment acknowledges the audience as stakeholders\ - \ in the design of these instruments, and also leads us to reflect upon the beliefs\ - \ and assumptions made by an audience when engaging with the performance of such\ - \ ‘magical’ devices. This paper establishes two opposing strategies to parameter\ - \ mapping, ‘movement-first’ mapping, and the less conventional ‘sound-first’ mapping\ - \ that incorporates mime. We discuss the performance ‘One Five Nine’, its transformation\ - \ from a partial mime into a fully interactive presentation, and the influence\ - \ this process has had on the outcome of the performance and the AirSticks as\ - \ a whole.},\n address = {Birmingham, UK},\n author = {Ilsar, Alon A and Hughes,\ - \ Matthew and Johnston, Andrew},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813383},\n\ - \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ - \ = {July},\n pages = {315--320},\n presentation-video = {https://youtu.be/ZFQKKI3dFhE},\n\ - \ publisher = {Birmingham City University},\n title = {NIME or Mime: A Sound-First\ - \ Approach to Developing an Audio-Visual Gestural Instrument},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper60.pdf},\n\ - \ year = {2020}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.4813383 - editor: Romain Michon and Franziska Schroeder - issn: 2220-4806 - month: July - pages: 315--320 - presentation-video: https://youtu.be/ZFQKKI3dFhE - publisher: Birmingham City University - title: 'NIME or Mime: A Sound-First Approach to Developing an Audio-Visual Gestural - Instrument' - url: https://www.nime.org/proceedings/2020/nime2020_paper60.pdf - year: 2020 - - -- ENTRYTYPE: inproceedings - ID: NIME20_61 - abstract: 'This demonstration presents URack, a custom-built audio-visual composition - and performance environment that combines the Unity video-game engine with the - VCV Rack software modular synthesiser. In alternative cross-modal solutions, a - compromise is likely made in either the sonic or visual output, or the consistency - and intuitiveness of the composition environment. By integrating control mechanisms - for graphics inside VCV Rack, the music-making metaphors used to build a patch - are extended into the visual domain. Users familiar with modular synthesizers - are immediately able to start building high-fidelity graphics using the same control - voltages regularly used to compose sound. Without needing to interact with two - separate development environments, languages or metaphorical domains, users are - encouraged to freely, creatively and enjoyably construct their own highly-integrated - audio-visual instruments. This demonstration will showcase the construction of - an audio-visual patch using URack, focusing on the integration of flexible GPU - particle systems present in Unity with the vast library of creative audio composition - modules inside VCV.' - address: 'Birmingham, UK' - author: 'Hughes, Matthew and Johnston, Andrew' - bibtex: "@inproceedings{NIME20_61,\n abstract = {This demonstration presents URack,\ - \ a custom-built audio-visual composition and performance environment that combines\ - \ the Unity video-game engine with the VCV Rack software modular synthesiser.\ - \ In alternative cross-modal solutions, a compromise is likely made in either\ - \ the sonic or visual output, or the consistency and intuitiveness of the composition\ - \ environment. By integrating control mechanisms for graphics inside VCV Rack,\ - \ the music-making metaphors used to build a patch are extended into the visual\ - \ domain. Users familiar with modular synthesizers are immediately able to start\ - \ building high-fidelity graphics using the same control voltages regularly used\ - \ to compose sound. Without needing to interact with two separate development\ - \ environments, languages or metaphorical domains, users are encouraged to freely,\ - \ creatively and enjoyably construct their own highly-integrated audio-visual\ - \ instruments. This demonstration will showcase the construction of an audio-visual\ - \ patch using URack, focusing on the integration of flexible GPU particle systems\ - \ present in Unity with the vast library of creative audio composition modules\ - \ inside VCV.},\n address = {Birmingham, UK},\n author = {Hughes, Matthew and\ - \ Johnston, Andrew},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813389},\n\ - \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ - \ = {July},\n pages = {321--322},\n publisher = {Birmingham City University},\n\ - \ title = {URack: Audio-visual Composition and Performance using Unity and VCV\ - \ Rack},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper61.pdf},\n\ - \ year = {2020}\n}\n" + ID: Schlienger2016 + abstract: "Despite the near ubiquitous availability of interfaces for spatial\n\ + interaction, standard audio spatialisation technology makes very little use of\n\ + it. In fact, we find that audio technology often impedes spatial interaction:\ + \ In\nthe workshop on music, space and interaction we thus developed the idea\ + \ of a\nreal-time panning whereby a moving sound source is reproduced as a virtual\ + \ source\non a panning trajectory. We define a series of application scenarios\ + \ where we\ndescribe in detail what functionality is required to inform an implementation.\ + \ In\nour earlier work we showed that Acoustic Localisation (AL) potentially can\n\ + provide a pervasive technique for spatially interactive audio applications.\n\ + Playing through the application scenarios with AL in mind provides interesting\n\ + approaches. For one scenario we show an example implementation as proof of\nconcept." + address: 'Brisbane, Australia' + author: Dominik Schlienger + bibtex: "@inproceedings{Schlienger2016,\n abstract = {Despite the near ubiquitous\ + \ availability of interfaces for spatial\ninteraction, standard audio spatialisation\ + \ technology makes very little use of\nit. In fact, we find that audio technology\ + \ often impedes spatial interaction: In\nthe workshop on music, space and interaction\ + \ we thus developed the idea of a\nreal-time panning whereby a moving sound source\ + \ is reproduced as a virtual source\non a panning trajectory. We define a series\ + \ of application scenarios where we\ndescribe in detail what functionality is\ + \ required to inform an implementation. In\nour earlier work we showed that Acoustic\ + \ Localisation (AL) potentially can\nprovide a pervasive technique for spatially\ + \ interactive audio applications.\nPlaying through the application scenarios with\ + \ AL in mind provides interesting\napproaches. For one scenario we show an example\ + \ implementation as proof of\nconcept.},\n address = {Brisbane, Australia},\n\ + \ author = {Dominik Schlienger},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176116},\n\ + \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {407--412},\n publisher\ + \ = {Queensland Conservatorium Griffith University},\n title = {Acoustic Localisation\ + \ for Spatial Reproduction of Moving Sound Source: Application Scenarios \\& Proof\ + \ of Concept},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0080.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813389 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176116 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 321--322 - publisher: Birmingham City University - title: 'URack: Audio-visual Composition and Performance using Unity and VCV Rack' - url: https://www.nime.org/proceedings/2020/nime2020_paper61.pdf - year: 2020 + pages: 407--412 + publisher: Queensland Conservatorium Griffith University + title: 'Acoustic Localisation for Spatial Reproduction of Moving Sound Source: Application + Scenarios & Proof of Concept' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0080.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_62 - abstract: 'In this work, we have developed a textile-based interactive surface fabricated - through digital knitting technology. Our prototype explores intarsia, interlock - patterning, and a collection of functional and non-functional fibers to create - a piano-pattern textile for expressive and virtuosic sonic interaction. We combined - conductive, thermochromic, and composite yarns with high-flex polyester yarns - to develop KnittedKeyboard with its soft physical properties and responsive sensing - and display capabilities. The individual and combination of each key could simultaneously - sense discrete touch, as well as continuous proximity and pressure. The KnittedKeyboard - enables performers to experience fabric-based multimodal interaction as they explore - the seamless texture and materiality of the electronic textile.' - address: 'Birmingham, UK' - author: 'Wicaksono, Irmandy and Paradiso, Joseph' - bibtex: "@inproceedings{NIME20_62,\n abstract = {In this work, we have developed\ - \ a textile-based interactive surface fabricated through digital knitting technology.\ - \ Our prototype explores intarsia, interlock patterning, and a collection of functional\ - \ and non-functional fibers to create a piano-pattern textile for expressive and\ - \ virtuosic sonic interaction. We combined conductive, thermochromic, and composite\ - \ yarns with high-flex polyester yarns to develop KnittedKeyboard with its soft\ - \ physical properties and responsive sensing and display capabilities. The individual\ - \ and combination of each key could simultaneously sense discrete touch, as well\ - \ as continuous proximity and pressure. The KnittedKeyboard enables performers\ - \ to experience fabric-based multimodal interaction as they explore the seamless\ - \ texture and materiality of the electronic textile.},\n address = {Birmingham,\ - \ UK},\n author = {Wicaksono, Irmandy and Paradiso, Joseph},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.4813391},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {323--326},\n publisher = {Birmingham\ - \ City University},\n title = {KnittedKeyboard: Digital Knitting of Electronic\ - \ Textile Musical Controllers},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper62.pdf},\n\ - \ year = {2020}\n}\n" + ID: Soraghan2016 + abstract: "Many commercial software applications for timbre creation and\nmanipulation\ + \ feature an engineering-focused, parametric layout. This paper argues\nthe case\ + \ for a perceptually motivated approach to interface design in such tools.\n`Perceptually\ + \ motivated' in this context refers to the use of common semantic\ntimbre descriptors\ + \ to influence the digital representation of timbre. A review is\ngiven of existing\ + \ research into semantic descriptors of timbre, as well as\ncorresponding acoustic\ + \ features of timbre. Discussion is also given on existing\ninterface design techniques.\ + \ The perceptually motivated approach to interface\ndesign is demonstrated using\ + \ an example system, which makes use of perceptually\nrelevant mappings from acoustic\ + \ timbre features to semantic timbre descriptors\nand visualises sounds as physical\ + \ objects." + address: 'Brisbane, Australia' + author: Sean Soraghan and Alain Renaud and Ben Supper + bibtex: "@inproceedings{Soraghan2016,\n abstract = {Many commercial software applications\ + \ for timbre creation and\nmanipulation feature an engineering-focused, parametric\ + \ layout. This paper argues\nthe case for a perceptually motivated approach to\ + \ interface design in such tools.\n`Perceptually motivated' in this context refers\ + \ to the use of common semantic\ntimbre descriptors to influence the digital representation\ + \ of timbre. A review is\ngiven of existing research into semantic descriptors\ + \ of timbre, as well as\ncorresponding acoustic features of timbre. Discussion\ + \ is also given on existing\ninterface design techniques. The perceptually motivated\ + \ approach to interface\ndesign is demonstrated using an example system, which\ + \ makes use of perceptually\nrelevant mappings from acoustic timbre features to\ + \ semantic timbre descriptors\nand visualises sounds as physical objects.},\n\ + \ address = {Brisbane, Australia},\n author = {Sean Soraghan and Alain Renaud\ + \ and Ben Supper},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176129},\n\ + \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {413--418},\n publisher\ + \ = {Queensland Conservatorium Griffith University},\n title = {Towards a perceptual\ + \ framework for interface design in digital environments for timbre manipulation},\n\ + \ track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0081.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813391 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176129 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 323--326 - publisher: Birmingham City University - title: 'KnittedKeyboard: Digital Knitting of Electronic Textile Musical Controllers' - url: https://www.nime.org/proceedings/2020/nime2020_paper62.pdf - year: 2020 + pages: 413--418 + publisher: Queensland Conservatorium Griffith University + title: Towards a perceptual framework for interface design in digital environments + for timbre manipulation + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0081.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_63 - abstract: 'In the context of artistic performances, the complexity and diversity - of digital interfaces may impair the spectator experience, in particular hiding - the engagement and virtuosity of the performers. Artists and researchers have - made attempts at solving this by augmenting performances with additional information - provided through visual, haptic or sonic modalities. However, the proposed techniques - have not yet been formalized and we believe a clarification of their many aspects - is necessary for future research. In this paper, we propose a taxonomy for what - we define as Spectator Experience Augmentation Techniques (SEATs). We use it to - analyse existing techniques and we demonstrate how it can serve as a basis for - the exploration of novel ones.' - address: 'Birmingham, UK' - author: 'Capra, Olivier and Berthaut, Florent and Grisoni, Laurent' - bibtex: "@inproceedings{NIME20_63,\n abstract = {In the context of artistic performances,\ - \ the complexity and diversity of digital interfaces may impair the spectator\ - \ experience, in particular hiding the engagement and virtuosity of the performers.\ - \ Artists and researchers have made attempts at solving this by augmenting performances\ - \ with additional information provided through visual, haptic or sonic modalities.\ - \ However, the proposed techniques have not yet been formalized and we believe\ - \ a clarification of their many aspects is necessary for future research. In this\ - \ paper, we propose a taxonomy for what we define as Spectator Experience Augmentation\ - \ Techniques (SEATs). We use it to analyse existing techniques and we demonstrate\ - \ how it can serve as a basis for the exploration of novel ones.},\n address =\ - \ {Birmingham, UK},\n author = {Capra, Olivier and Berthaut, Florent and Grisoni,\ - \ Laurent},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813396},\n editor\ - \ = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n\ - \ pages = {327--330},\n publisher = {Birmingham City University},\n title = {A\ - \ Taxonomy of Spectator Experience Augmentation Techniques},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper63.pdf},\n\ - \ year = {2020}\n}\n" + ID: Reid2016 + abstract: "This paper describes the design of a Minimally Invasive Gesture\nSensing\ + \ Interface (MIGSI) for trumpet. The interface attaches effortlessly to any\n\ + B-flat or C trumpet and requires no permanent modifications to the\nhost-instrument.\ + \ It was designed first and foremost with accessibility in\nmind an approach that\ + \ is uncommon in augmented instrument design and\nseeks to strike a balance between\ + \ minimal design and robust control. MIGSI uses\nsensor technology to capture\ + \ gestural data such as valve displacement, hand\ntension, and instrument position,\ + \ to offer extended control and expressivity to\ntrumpet players. Several streams\ + \ of continuous data are transmitted wirelessly\nfrom MIGSI to the receiving computer,\ + \ where MIGSI Mapping application (a simple\ngraphical user interface) parses\ + \ the incoming data into individually accessible\nvariables. It is our hope that\ + \ MIGSI will be adopted by trumpet players and\ncomposers, and that over time\ + \ a new body of repertoire for the augmented trumpet\nwill emerge." + address: 'Brisbane, Australia' + author: Sarah Reid and Ryan Gaston and Colin Honigman and Ajay Kapur + bibtex: "@inproceedings{Reid2016,\n abstract = {This paper describes the design\ + \ of a Minimally Invasive Gesture\nSensing Interface (MIGSI) for trumpet. The\ + \ interface attaches effortlessly to any\nB-flat or C trumpet and requires no\ + \ permanent modifications to the\nhost-instrument. It was designed first and foremost\ + \ with accessibility in\nmind an approach that is uncommon in augmented instrument\ + \ design and\nseeks to strike a balance between minimal design and robust control.\ + \ MIGSI uses\nsensor technology to capture gestural data such as valve displacement,\ + \ hand\ntension, and instrument position, to offer extended control and expressivity\ + \ to\ntrumpet players. Several streams of continuous data are transmitted wirelessly\n\ + from MIGSI to the receiving computer, where MIGSI Mapping application (a simple\n\ + graphical user interface) parses the incoming data into individually accessible\n\ + variables. It is our hope that MIGSI will be adopted by trumpet players and\n\ + composers, and that over time a new body of repertoire for the augmented trumpet\n\ + will emerge.},\n address = {Brisbane, Australia},\n author = {Sarah Reid and Ryan\ + \ Gaston and Colin Honigman and Ajay Kapur},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1176106},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ + \ pages = {419--424},\n publisher = {Queensland Conservatorium Griffith University},\n\ + \ title = {Minimally Invasive Gesture Sensing Interface (MIGSI) for Trumpet},\n\ + \ track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0082.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813396 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176106 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 327--330 - publisher: Birmingham City University - title: A Taxonomy of Spectator Experience Augmentation Techniques - url: https://www.nime.org/proceedings/2020/nime2020_paper63.pdf - year: 2020 + pages: 419--424 + publisher: Queensland Conservatorium Griffith University + title: Minimally Invasive Gesture Sensing Interface (MIGSI) for Trumpet + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0082.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_64 - abstract: 'Existing applications of mobile music tools are often concerned with - the simulation of acoustic or digital musical instruments, extended with graphical - representations of keys, pads, etc. Following an intensive review of existing - tools and approaches to mobile music making, we implemented a digital drawing - tool, employing a time-based graphical/gestural interface for music composition - and performance. In this paper, we introduce our Sounding Brush project, through - which we explore music making in various forms with the natural gestures of drawing - and mark making on a tablet device. Subsequently, we present the design and development - of the Sounding Brush application. Utilising this project idea, we discuss the - act of drawing as an activity that is not separated from the act of playing musical - instrument. Drawing is essentially the act of playing music by means of a continuous - process of observation, individualisation and exploring time and space in a unique - way.' - address: 'Birmingham, UK' - author: 'Sen, Sourya and Tahiroğlu, Koray and Lohmann, Julia' - bibtex: "@inproceedings{NIME20_64,\n abstract = {Existing applications of mobile\ - \ music tools are often concerned with the simulation of acoustic or digital\ - \ musical instruments, extended with graphical representations of keys, pads,\ - \ etc. Following an intensive review of existing tools and approaches to mobile\ - \ music making, we implemented a digital drawing tool, employing a time-based\ - \ graphical/gestural interface for music composition and performance. In this\ - \ paper, we introduce our Sounding Brush project, through which we explore music\ - \ making in various forms with the natural gestures of drawing and mark making\ - \ on a tablet device. Subsequently, we present the design and development of the\ - \ Sounding Brush application. Utilising this project idea, we discuss the act\ - \ of drawing as an activity that is not separated from the act of playing musical\ - \ instrument. Drawing is essentially the act of playing music by means of a continuous\ - \ process of observation, individualisation and exploring time and space in a\ - \ unique way.},\n address = {Birmingham, UK},\n author = {Sen, Sourya and Tahiroğlu,\ - \ Koray and Lohmann, Julia},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813398},\n\ - \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ - \ = {July},\n pages = {331--336},\n presentation-video = {https://youtu.be/7RkGbyGM-Ho},\n\ - \ publisher = {Birmingham City University},\n title = {Sounding Brush: A Tablet\ - \ based Musical Instrument for Drawing and Mark Making},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper64.pdf},\n\ - \ year = {2020}\n}\n" + ID: Paine2016 + abstract: "The question of sound as an experience of now, as a conduit to the\n\ + quality of our belonging to the present, is challenging. Yet it is a crucial\n\ + issue in discussions about ecological listening. I have come to think of sound\ + \ as\na viscous material, a vibrating field of energy that has texture and density\ + \ and\na physicality that is unlike most other media.\nNow suggests a desire of\ + \ becoming present in the resonating sound field of our\nimmediate environment.\ + \ The energy in the field constantly modulates and drifts. I\ndraw on voices and\ + \ forces from the natural environment, humans and machines. The\nwork seeks to\ + \ draw the listeners into an inner space in which they can be both\npresent and\ + \ aware of their sonic environment and become immersed in it. Now is\npartly inspired\ + \ by Samuel Beckett's novel Watt, specifically Watt's\nmysterious journey into\ + \ to the unknown." + address: 'Brisbane, Australia' + author: Garth Paine + bibtex: "@inproceedings{Paine2016,\n abstract = {The question of sound as an experience\ + \ of now, as a conduit to the\nquality of our belonging to the present, is challenging.\ + \ Yet it is a crucial\nissue in discussions about ecological listening. I have\ + \ come to think of sound as\na viscous material, a vibrating field of energy that\ + \ has texture and density and\na physicality that is unlike most other media.\n\ + Now suggests a desire of becoming present in the resonating sound field of our\n\ + immediate environment. The energy in the field constantly modulates and drifts.\ + \ I\ndraw on voices and forces from the natural environment, humans and machines.\ + \ The\nwork seeks to draw the listeners into an inner space in which they can\ + \ be both\npresent and aware of their sonic environment and become immersed in\ + \ it. Now is\npartly inspired by Samuel Beckett's novel Watt, specifically Watt's\n\ + mysterious journey into to the unknown.},\n address = {Brisbane, Australia},\n\ + \ author = {Garth Paine},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176104},\n\ + \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {425--426},\n publisher\ + \ = {Queensland Conservatorium Griffith University},\n title = {Now},\n track\ + \ = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0083.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813398 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176104 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 331--336 - presentation-video: https://youtu.be/7RkGbyGM-Ho - publisher: Birmingham City University - title: 'Sounding Brush: A Tablet based Musical Instrument for Drawing and Mark Making' - url: https://www.nime.org/proceedings/2020/nime2020_paper64.pdf - year: 2020 + pages: 425--426 + publisher: Queensland Conservatorium Griffith University + title: Now + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0083.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_65 - abstract: 'A deformable musical instrument can take numerous distinct shapes with - its non-rigid features. Building audio synthesis module for such an interface - behaviour can be challenging. In this paper, we present the Al-terity, a non-rigid - musical instrument that comprises a deep learning model with generative adversarial - network architecture and use it for generating audio samples for real-time audio - synthesis. The particular deep learning model we use for this instrument was trained - with existing data set as input for purposes of further experimentation. The main - benefits of the model used are the ability to produce the realistic range of timbre - of the trained data set and the ability to generate new audio samples in real-time, - in the moment of playing, with the characteristics of sounds that the performer - ever heard before. We argue that these advanced intelligence features on the - audio synthesis level could allow us to explore performing music with particular - response features that define the instrument''s digital idiomaticity and allow - us reinvent the instrument in the act of music performance.' - address: 'Birmingham, UK' - author: 'Tahiroğlu, Koray and Kastemaa, Miranda and Koli, Oskar' - bibtex: "@inproceedings{NIME20_65,\n abstract = {A deformable musical instrument\ - \ can take numerous distinct shapes with its non-rigid features. Building audio\ - \ synthesis module for such an interface behaviour can be challenging. In this\ - \ paper, we present the Al-terity, a non-rigid musical instrument that comprises\ - \ a deep learning model with generative adversarial network architecture and\ - \ use it for generating audio samples for real-time audio synthesis. The particular\ - \ deep learning model we use for this instrument was trained with existing data\ - \ set as input for purposes of further experimentation. The main benefits of the\ - \ model used are the ability to produce the realistic range of timbre of the trained\ - \ data set and the ability to generate new audio samples in real-time, in the\ - \ moment of playing, with the characteristics of sounds that the performer ever\ - \ heard before. We argue that these advanced intelligence features on the audio\ - \ synthesis level could allow us to explore performing music with particular response\ - \ features that define the instrument's digital idiomaticity and allow us reinvent\ - \ the instrument in the act of music performance.},\n address = {Birmingham, UK},\n\ - \ author = {Tahiroğlu, Koray and Kastemaa, Miranda and Koli, Oskar},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.4813402},\n editor = {Romain Michon and\ - \ Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages = {337--342},\n\ - \ presentation-video = {https://youtu.be/giYxFovZAvQ},\n publisher = {Birmingham\ - \ City University},\n title = {Al-terity: Non-Rigid Musical Instrument with Artificial\ - \ Intelligence Applied to Real-Time Audio Synthesis},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper65.pdf},\n\ - \ year = {2020}\n}\n" + ID: Shapiro2016 + abstract: "NIME research realizes a vision of performance by means of\ncomputational\ + \ expression, linking body and space to sound and imagery through\neclectic forms\ + \ of sensing and interaction. This vision could dramatically impact\ncomputer\ + \ science education, simultaneously modernizing the field and drawing in\ndiverse\ + \ new participants. We describe our work creating a NIME-inspired computer\nmusic\ + \ toolkit for kids called BlockyTalky; the toolkit enables users to create\nnetworks\ + \ of sensing devices and synthesizers. We offer findings from our research\non\ + \ student learning through programming and performance. We conclude by\nsuggesting\ + \ a number of future directions for NIME researchers interested in\neducation." + address: 'Brisbane, Australia' + author: R. Benjamin Shapiro and Rebecca Fiebrink and Matthew Ahrens and Annie Kelly + bibtex: "@inproceedings{Shapiro2016,\n abstract = {NIME research realizes a vision\ + \ of performance by means of\ncomputational expression, linking body and space\ + \ to sound and imagery through\neclectic forms of sensing and interaction. This\ + \ vision could dramatically impact\ncomputer science education, simultaneously\ + \ modernizing the field and drawing in\ndiverse new participants. We describe\ + \ our work creating a NIME-inspired computer\nmusic toolkit for kids called BlockyTalky;\ + \ the toolkit enables users to create\nnetworks of sensing devices and synthesizers.\ + \ We offer findings from our research\non student learning through programming\ + \ and performance. We conclude by\nsuggesting a number of future directions for\ + \ NIME researchers interested in\neducation.},\n address = {Brisbane, Australia},\n\ + \ author = {R. Benjamin Shapiro and Rebecca Fiebrink and Matthew Ahrens and Annie\ + \ Kelly},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1176120},\n isbn = {978-1-925455-13-7},\n\ + \ issn = {2220-4806},\n pages = {427--432},\n publisher = {Queensland Conservatorium\ + \ Griffith University},\n title = {BlockyTalky: A Physical and Distributed Computer\ + \ Music Toolkit for Kids},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0084.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813402 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176120 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 337--342 - presentation-video: https://youtu.be/giYxFovZAvQ - publisher: Birmingham City University - title: 'Al-terity: Non-Rigid Musical Instrument with Artificial Intelligence Applied - to Real-Time Audio Synthesis' - url: https://www.nime.org/proceedings/2020/nime2020_paper65.pdf - year: 2020 + pages: 427--432 + publisher: Queensland Conservatorium Griffith University + title: 'BlockyTalky: A Physical and Distributed Computer Music Toolkit for Kids' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0084.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_66 - abstract: 'Feedback instruments offer radical new ways of engaging with instrument - design and musicianship. They are defined by recurrent circulation of signals - through the instrument, which give the instrument ‘a life of its own’ and a ''stimulating - uncontrollability''. Arguably, the most interesting musical behaviour in these - instruments happens when their dynamic complexity is maximised, without falling - into saturating feedback. It is often challenging to keep the instrument in this - zone; this research looks at algorithmic ways to manage the behaviour of feedback - loops in order to make feedback instruments more playable and musical; to expand - and maintain the `sweet spot''. We propose a solution that manages gain dynamics - based on measurement of complexity, using a realtime implementation of the Effort - to Compress algorithm. The system was evaluated with four musicians, each of whom - have different variations of string-based feedback instruments, following an autobiographical - design approach. Qualitative feedback was gathered, showing that the system was - successful in modifying the behaviour of these instruments to allow easier access - to edge transition zones, sometimes at the expense of losing some of the more - compelling dynamics of the instruments. The basic efficacy of the system is evidenced - by descriptive audio analysis. This paper is accompanied by a dataset of sounds - collected during the study, and the open source software that was written to support - the research.' - address: 'Birmingham, UK' - author: 'Kiefer, Chris and Overholt, Dan and Eldridge, Alice' - bibtex: "@inproceedings{NIME20_66,\n abstract = {Feedback instruments offer radical\ - \ new ways of engaging with instrument design and musicianship. They are defined\ - \ by recurrent circulation of signals through the instrument, which give the instrument\ - \ ‘a life of its own’ and a 'stimulating uncontrollability'. Arguably, the most\ - \ interesting musical behaviour in these instruments happens when their dynamic\ - \ complexity is maximised, without falling into saturating feedback. It is often\ - \ challenging to keep the instrument in this zone; this research looks at algorithmic\ - \ ways to manage the behaviour of feedback loops in order to make feedback instruments\ - \ more playable and musical; to expand and maintain the `sweet spot'. We propose\ - \ a solution that manages gain dynamics based on measurement of complexity, using\ - \ a realtime implementation of the Effort to Compress algorithm. The system was\ - \ evaluated with four musicians, each of whom have different variations of string-based\ - \ feedback instruments, following an autobiographical design approach. Qualitative\ - \ feedback was gathered, showing that the system was successful in modifying the\ - \ behaviour of these instruments to allow easier access to edge transition zones,\ - \ sometimes at the expense of losing some of the more compelling dynamics of the\ - \ instruments. The basic efficacy of the system is evidenced by descriptive audio\ - \ analysis. This paper is accompanied by a dataset of sounds collected during\ - \ the study, and the open source software that was written to support the research.},\n\ - \ address = {Birmingham, UK},\n author = {Kiefer, Chris and Overholt, Dan and\ - \ Eldridge, Alice},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813406},\n\ - \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ - \ = {July},\n pages = {343--348},\n presentation-video = {https://youtu.be/sf6FwsUX-84},\n\ - \ publisher = {Birmingham City University},\n title = {Shaping the behaviour of\ - \ feedback instruments with complexity-controlled gain dynamics},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper66.pdf},\n\ - \ year = {2020}\n}\n" + ID: Bowers2016 + abstract: 'This paper describes an instance of what we call `curated research'', + a concerted thinking, making and performance activity between two research teams + with a dedicated interest in the creation of experimental musical instruments + and the development of new performance practices. Our work builds theoretically + upon critical work in philosophy, anthropology and aesthetics, and practically + upon previous explorations of strategies for facilitating rapid, collaborative, + publicly-oriented making in artistic settings. We explored an orientation to making + which promoted the creation of a family of instruments and performance environments + that were responses to the self-consciously provocative theme of `One Knob To + Rule Them All''. A variety of design issues were explored including: mapping, + physicality, the question of control in interface design, reductionist aesthetics + and design strategies, and questions of gender and power in musical culture. We + discuss not only the technologies which were made but also reflect on the value + of such concerted, provocatively thematised, collective making activities for + addressing foundational design issues. As such, our work is intended not just + as a technical and practical contribution to NIME but also a reflective provocation + into how we conduct research itself in a curated critical manner.' + address: 'Brisbane, Australia' + author: John Bowers and John Richards and Tim Shaw and Jim Frieze and Ben Freeth + and Sam Topley and Neal Spowage and Steve Jones and Amit Patel and Li Rui + bibtex: "@inproceedings{Bowers2016,\n abstract = {This paper describes an instance\ + \ of what we call `curated research', a concerted thinking, making and performance\ + \ activity between two research teams with a dedicated interest in the creation\ + \ of experimental musical instruments and the development of new performance practices.\ + \ Our work builds theoretically upon critical work in philosophy, anthropology\ + \ and aesthetics, and practically upon previous explorations of strategies for\ + \ facilitating rapid, collaborative, publicly-oriented making in artistic settings.\ + \ We explored an orientation to making which promoted the creation of a family\ + \ of instruments and performance environments that were responses to the self-consciously\ + \ provocative theme of `One Knob To Rule Them All'. A variety of design issues\ + \ were explored including: mapping, physicality, the question of control in interface\ + \ design, reductionist aesthetics and design strategies, and questions of gender\ + \ and power in musical culture. We discuss not only the technologies which were\ + \ made but also reflect on the value of such concerted, provocatively thematised,\ + \ collective making activities for addressing foundational design issues. As such,\ + \ our work is intended not just as a technical and practical contribution to NIME\ + \ but also a reflective provocation into how we conduct research itself in a curated\ + \ critical manner.},\n address = {Brisbane, Australia},\n author = {John Bowers\ + \ and John Richards and Tim Shaw and Jim Frieze and Ben Freeth and Sam Topley\ + \ and Neal Spowage and Steve Jones and Amit Patel and Li Rui},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1175996},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ + \ pages = {433--438},\n publisher = {Queensland Conservatorium Griffith University},\n\ + \ title = {One Knob To Rule Them All: Reductionist Interfaces for Expansionist\ + \ Research},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0085.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813406 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1175996 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 343--348 - presentation-video: https://youtu.be/sf6FwsUX-84 - publisher: Birmingham City University - title: Shaping the behaviour of feedback instruments with complexity-controlled - gain dynamics - url: https://www.nime.org/proceedings/2020/nime2020_paper66.pdf - year: 2020 + pages: 433--438 + publisher: Queensland Conservatorium Griffith University + title: 'One Knob To Rule Them All: Reductionist Interfaces for Expansionist Research' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0085.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_67 - abstract: 'Brain-computer interfacing (BCI) offers novel methods to facilitate participation - in audio engineering, providing access for individuals who might otherwise be - unable to take part (either due to lack of training, or physical disability). This - paper describes the development of a BCI system for conscious, or ‘active’, control - of parameters on an audio mixer by generation of synchronous MIDI Machine Control - messages. The mapping between neurophysiological cues and audio parameter must - be intuitive for a neophyte audience (i.e., one without prior training or the - physical skills developed by professional audio engineers when working with tactile - interfaces). The prototype is dubbed MINDMIX (a portmanteau of ‘mind’ and ‘mixer’), - combining discrete and many-to-many mappings of audio mixer parameters and BCI - control signals measured via Electronecephalograph (EEG). In future, specific - evaluation of discrete mappings would be useful for iterative system design.' - address: 'Birmingham, UK' - author: 'Williams, Duncan A.H.' - bibtex: "@inproceedings{NIME20_67,\n abstract = {Brain-computer interfacing (BCI)\ - \ offers novel methods to facilitate participation in audio engineering, providing\ - \ access for individuals who might otherwise be unable to take part (either due\ - \ to lack of training, or physical disability). This paper describes the development\ - \ of a BCI system for conscious, or ‘active’, control of parameters on an audio\ - \ mixer by generation of synchronous MIDI Machine Control messages. The mapping\ - \ between neurophysiological cues and audio parameter must be intuitive for a\ - \ neophyte audience (i.e., one without prior training or the physical skills developed\ - \ by professional audio engineers when working with tactile interfaces). The prototype\ - \ is dubbed MINDMIX (a portmanteau of ‘mind’ and ‘mixer’), combining discrete\ - \ and many-to-many mappings of audio mixer parameters and BCI control signals\ - \ measured via Electronecephalograph (EEG). In future, specific evaluation of\ - \ discrete mappings would be useful for iterative system design.},\n address =\ - \ {Birmingham, UK},\n author = {Williams, Duncan A.H.},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.4813408},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {349--352},\n publisher = {Birmingham\ - \ City University},\n title = {MINDMIX: Mapping of brain activity to congruent\ - \ audio mixing features},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper67.pdf},\n\ - \ year = {2020}\n}\n" + ID: Jensenius2016 + abstract: "This paper provides an overview of the process of editing the\nforthcoming\ + \ anthology A NIME Reader---Fifteen years of New Interfaces for\nMusical Expression.\ + \ The selection process is presented, and we reflect on some\nof the trends we\ + \ have observed in re-discovering the collection of more than 1200\nNIME papers\ + \ published throughout the 15 yearlong history of the conference. An\nanthology\ + \ is necessarily selective, and ours is no exception. As we present in\nthis paper,\ + \ the aim has been to represent the wide range of artistic,\nscientific, and technological\ + \ approaches that characterize the NIME conference.\nThe anthology also includes\ + \ critical discourse, and through acknowledgment of the\nstrengths and weaknesses\ + \ of the NIME community, we propose activities which could\nfurther diversify\ + \ and strengthen the field." + address: 'Brisbane, Australia' + author: Alexander Refsum Jensenius and Michael J. Lyons + bibtex: "@inproceedings{Jensenius2016,\n abstract = {This paper provides an overview\ + \ of the process of editing the\nforthcoming anthology A NIME Reader---Fifteen\ + \ years of New Interfaces for\nMusical Expression. The selection process is presented,\ + \ and we reflect on some\nof the trends we have observed in re-discovering the\ + \ collection of more than 1200\nNIME papers published throughout the 15 yearlong\ + \ history of the conference. An\nanthology is necessarily selective, and ours\ + \ is no exception. As we present in\nthis paper, the aim has been to represent\ + \ the wide range of artistic,\nscientific, and technological approaches that characterize\ + \ the NIME conference.\nThe anthology also includes critical discourse, and through\ + \ acknowledgment of the\nstrengths and weaknesses of the NIME community, we propose\ + \ activities which could\nfurther diversify and strengthen the field.},\n address\ + \ = {Brisbane, Australia},\n author = {Alexander Refsum Jensenius and Michael\ + \ J. Lyons},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176044},\n isbn\ + \ = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {439--443},\n publisher\ + \ = {Queensland Conservatorium Griffith University},\n title = {Trends at NIME---Reflections\ + \ on Editing A NIME Reader},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0086.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813408 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176044 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 349--352 - publisher: Birmingham City University - title: 'MINDMIX: Mapping of brain activity to congruent audio mixing features' - url: https://www.nime.org/proceedings/2020/nime2020_paper67.pdf - year: 2020 + pages: 439--443 + publisher: Queensland Conservatorium Griffith University + title: Trends at NIME---Reflections on Editing A NIME Reader + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0086.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_68 - abstract: 'We present SQUISHBOI, a continuous touch controller for interacting with - complex musical systems. An elastic rubber membrane forms the playing surface - of the instrument, while machine learning is used for dimensionality reduction - and gesture recognition. The membrane is stretched over a hollow shell which permits - considerable depth excursion, with an array of distance sensors tracking the surface - displacement from underneath. The inherent dynamics of the membrane lead to cross-coupling - between nearby sensors, however we do not see this as a flaw or limitation. Instead - we find this coupling gives structure to the playing techniques and mapping schemes - chosen by the user. The instrument is best utilized as a tool for actively designing - abstraction and forming a relative control structure within a given system, one - which allows for intuitive gestural control beyond what can be accomplished with - conventional musical controllers.' - address: 'Birmingham, UK' - author: 'DeSmith, Marcel O and Piepenbrink, Andrew and Kapur, Ajay' - bibtex: "@inproceedings{NIME20_68,\n abstract = {We present SQUISHBOI, a continuous\ - \ touch controller for interacting with complex musical systems. An elastic rubber\ - \ membrane forms the playing surface of the instrument, while machine learning\ - \ is used for dimensionality reduction and gesture recognition. The membrane is\ - \ stretched over a hollow shell which permits considerable depth excursion, with\ - \ an array of distance sensors tracking the surface displacement from underneath.\ - \ The inherent dynamics of the membrane lead to cross-coupling between nearby\ - \ sensors, however we do not see this as a flaw or limitation. Instead we find\ - \ this coupling gives structure to the playing techniques and mapping schemes\ - \ chosen by the user. The instrument is best utilized as a tool for actively designing\ - \ abstraction and forming a relative control structure within a given system,\ - \ one which allows for intuitive gestural control beyond what can be accomplished\ - \ with conventional musical controllers.},\n address = {Birmingham, UK},\n author\ - \ = {DeSmith, Marcel O and Piepenbrink, Andrew and Kapur, Ajay},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.4813412},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {353--356},\n publisher = {Birmingham\ - \ City University},\n title = {SQUISHBOI: A Multidimensional Controller for Complex\ - \ Musical Interactions using Machine Learning},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper68.pdf},\n\ - \ year = {2020}\n}\n" + ID: Tahironicode287lu2016 + abstract: |- + In this paper we present the new development of a semi-autonomous + response module for the NOISA system. NOISA is an interactive music system that + predicts performer's engagement levels, learns from the performer, decides what + to do and does it at the right moment. As an improvement for the above, we + implemented real-time adaptive features that respond to a detailed monitoring of + the performer's engagement and to overall sonic space, while evaluating the + impact of its actions. Through these new features, the response module produces + meaningful and non-intrusive counter actions, attempting to deepen and maintain + the performer's engagement in musical interaction. In a formative study we + compared our designed response module against a random control system of events, + in which the former performed consistently better than the latter. + address: 'Brisbane, Australia' + author: Koray Tahiroglu and Juan Carlos Vasquez and Johan Kildal + bibtex: "@inproceedings{Tahironicode287lu2016,\n abstract = {In this paper we present\ + \ the new development of a semi-autonomous\nresponse module for the NOISA system.\ + \ NOISA is an interactive music system that\npredicts performer's engagement levels,\ + \ learns from the performer, decides what\nto do and does it at the right moment.\ + \ As an improvement for the above, we\nimplemented real-time adaptive features\ + \ that respond to a detailed monitoring of\nthe performer's engagement and to\ + \ overall sonic space, while evaluating the\nimpact of its actions. Through these\ + \ new features, the response module produces\nmeaningful and non-intrusive counter\ + \ actions, attempting to deepen and maintain\nthe performer's engagement in musical\ + \ interaction. In a formative study we\ncompared our designed response module\ + \ against a random control system of events,\nin which the former performed consistently\ + \ better than the latter.},\n address = {Brisbane, Australia},\n author = {Koray\ + \ Tahiroglu and Juan Carlos Vasquez and Johan Kildal},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176131},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ + \ pages = {444--449},\n publisher = {Queensland Conservatorium Griffith University},\n\ + \ title = {Non-intrusive Counter-actions: Maintaining Progressively Engaging Interactions\ + \ for Music Performance},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0087.pdf},\n\ + \ year = {2016}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813412 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1176131 + isbn: 978-1-925455-13-7 issn: 2220-4806 - month: July - pages: 353--356 - publisher: Birmingham City University - title: 'SQUISHBOI: A Multidimensional Controller for Complex Musical Interactions - using Machine Learning' - url: https://www.nime.org/proceedings/2020/nime2020_paper68.pdf - year: 2020 + pages: 444--449 + publisher: Queensland Conservatorium Griffith University + title: 'Non-intrusive Counter-actions: Maintaining Progressively Engaging Interactions + for Music Performance' + track: Papers + url: http://www.nime.org/proceedings/2016/nime2016_paper0087.pdf + year: 2016 - ENTRYTYPE: inproceedings - ID: NIME20_69 - abstract: 'Digital technologies play a fundamental role in New Interfaces for Musical - Expression as well as music making and consumption more widely. This paper reports - on two workshops with music professionals and researchers who undertook an initial - exploration of the differences between digital platforms (software and online - services) for music in the UK and China. Differences were found in primary target - user groups of digital platforms in the UK and China as well as the stages of - the culture creation cycle they were developed for. Reasons for the divergence - of digital platforms include differences in culture, regulation, and infrastructure, - as well as the inherent Western bias of software for music making such as Digital - Audio Workstations. Using AI to bridge between Western and Chinese music traditions - is suggested as an opportunity to address aspects of the divergent landscape of - digital platforms for music inside and outside China.' - address: 'Birmingham, UK' - author: 'Bryan-Kinns, Nick and ZIJIN, LI and Sun, Xiaohua' - bibtex: "@inproceedings{NIME20_69,\n abstract = {Digital technologies play a fundamental\ - \ role in New Interfaces for Musical Expression as well as music making and consumption\ - \ more widely. This paper reports on two workshops with music professionals\ - \ and researchers who undertook an initial exploration of the differences between\ - \ digital platforms (software and online services) for music in the UK and China.\ - \ Differences were found in primary target user groups of digital platforms in\ - \ the UK and China as well as the stages of the culture creation cycle they were\ - \ developed for. Reasons for the divergence of digital platforms include differences\ - \ in culture, regulation, and infrastructure, as well as the inherent Western\ - \ bias of software for music making such as Digital Audio Workstations. Using\ - \ AI to bridge between Western and Chinese music traditions is suggested as an\ - \ opportunity to address aspects of the divergent landscape of digital platforms\ - \ for music inside and outside China.},\n address = {Birmingham, UK},\n author\ - \ = {Bryan-Kinns, Nick and ZIJIN, LI and Sun, Xiaohua},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.4813414},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {357--360},\n presentation-video\ - \ = {https://youtu.be/c7nkCBBTnDA},\n publisher = {Birmingham City University},\n\ - \ title = {On Digital Platforms and AI for Music in the UK and China},\n url =\ - \ {https://www.nime.org/proceedings/2020/nime2020_paper69.pdf},\n year = {2020}\n\ + ID: Jones2007 + abstract: 'Physical modeling has proven to be a successful method ofsynthesizing + highly expressive sounds. However, providingdeep methods of real time musical + control remains a majorchallenge. In this paper we describe our work towards aninstrument + for percussion synthesis, in which a waveguidemesh is both excited and damped + by a 2D matrix of forcesfrom a sensor. By emulating a drum skin both as controllerand + sound generator, our instrument has reproduced someof the expressive qualities + of hand drumming. Details of ourimplementation are discussed, as well as qualitative + resultsand experience gleaned from live performances.' + address: 'New York City, NY, United States' + author: 'Jones, Randy and Schloss, Andrew' + bibtex: "@inproceedings{Jones2007,\n abstract = {Physical modeling has proven to\ + \ be a successful method ofsynthesizing highly expressive sounds. However, providingdeep\ + \ methods of real time musical control remains a majorchallenge. In this paper\ + \ we describe our work towards aninstrument for percussion synthesis, in which\ + \ a waveguidemesh is both excited and damped by a 2D matrix of forcesfrom a sensor.\ + \ By emulating a drum skin both as controllerand sound generator, our instrument\ + \ has reproduced someof the expressive qualities of hand drumming. Details of\ + \ ourimplementation are discussed, as well as qualitative resultsand experience\ + \ gleaned from live performances.},\n address = {New York City, NY, United States},\n\ + \ author = {Jones, Randy and Schloss, Andrew},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1177131},\n issn = {2220-4806},\n keywords = {Physical modeling,\ + \ instrument design, expressive control, multi-touch, performance },\n pages =\ + \ {27--30},\n title = {Controlling a Physical Model with a {2D} Force Matrix},\n\ + \ url = {http://www.nime.org/proceedings/2007/nime2007_027.pdf},\n year = {2007}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813414 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1177131 issn: 2220-4806 - month: July - pages: 357--360 - presentation-video: https://youtu.be/c7nkCBBTnDA - publisher: Birmingham City University - title: On Digital Platforms and AI for Music in the UK and China - url: https://www.nime.org/proceedings/2020/nime2020_paper69.pdf - year: 2020 + keywords: 'Physical modeling, instrument design, expressive control, multi-touch, + performance ' + pages: 27--30 + title: Controlling a Physical Model with a 2D Force Matrix + url: http://www.nime.org/proceedings/2007/nime2007_027.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: NIME20_7 - abstract: 'Digitally integrating the materiality, form, and tactility in everyday - objects (e.g., pottery) provides inspiration for new ways of musical expression - and performance. In this project we reinterpret the creative process and aesthetic - philosophy of pottery as algorithmic music to help users rediscover the latent - story behind pottery through a synesthetic experience. Projects Mobius I and Mobius - II illustrate two potential directions toward a musical interface, one focusing - on the circular form, and the other, on graphical ornaments of pottery. Six conductive - graphics on the pottery function as capacitive sensors while retaining their resemblance - to traditional ornamental patterns in pottery. Offering pottery as a musical interface, - we invite users to orchestrate algorithmic music by physically touching the different - graphics.' - address: 'Birmingham, UK' - author: 'Chu, Jean and Choi, Jaewon' - bibtex: "@inproceedings{NIME20_7,\n abstract = {Digitally integrating the materiality,\ - \ form, and tactility in everyday objects (e.g., pottery) provides inspiration\ - \ for new ways of musical expression and performance. In this project we reinterpret\ - \ the creative process and aesthetic philosophy of pottery as algorithmic music\ - \ to help users rediscover the latent story behind pottery through a synesthetic\ - \ experience. Projects Mobius I and Mobius II illustrate two potential directions\ - \ toward a musical interface, one focusing on the circular form, and the other,\ - \ on graphical ornaments of pottery. Six conductive graphics on the pottery function\ - \ as capacitive sensors while retaining their resemblance to traditional ornamental\ - \ patterns in pottery. Offering pottery as a musical interface, we invite users\ - \ to orchestrate algorithmic music by physically touching the different graphics.},\n\ - \ address = {Birmingham, UK},\n author = {Chu, Jean and Choi, Jaewon},\n booktitle\ + ID: B2007 + abstract: 'In this paper we describe the design and implementation of the PHYSMISM: + an interface for exploring the possibilities for improving the creative use of + physical modelling sound synthesis. The PHYSMISM is implemented in a software + and hardware version. Moreover, four different physical modelling techniques are + implemented, to explore the implications of using and combining different techniques. + In order to evaluate the creative use of physical models, a test was performed + using 11 experienced musicians as test subjects. Results show that the capability + of combining the physical models and the use of a physical interface engaged the + musicians in creative exploration of physical models.' + address: 'New York City, NY, United States' + author: 'Bottcher, Niels and Gelineck, Steven and Serafin, Stefania' + bibtex: "@inproceedings{B2007,\n abstract = {In this paper we describe the design\ + \ and implementation of the PHYSMISM: an interface for exploring the possibilities\ + \ for improving the creative use of physical modelling sound synthesis. The PHYSMISM\ + \ is implemented in a software and hardware version. Moreover, four different\ + \ physical modelling techniques are implemented, to explore the implications of\ + \ using and combining different techniques. In order to evaluate the creative\ + \ use of physical models, a test was performed using 11 experienced musicians\ + \ as test subjects. Results show that the capability of combining the physical\ + \ models and the use of a physical interface engaged the musicians in creative\ + \ exploration of physical models.},\n address = {New York City, NY, United States},\n\ + \ author = {Bottcher, Niels and Gelineck, Steven and Serafin, Stefania},\n booktitle\ \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.4813416},\n editor = {Romain Michon and\ - \ Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages = {37--38},\n\ - \ publisher = {Birmingham City University},\n title = {Reinterpretation of Pottery\ - \ as a Musical Interface},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper7.pdf},\n\ - \ year = {2020}\n}\n" + \ Expression},\n doi = {10.5281/zenodo.1177051},\n issn = {2220-4806},\n keywords\ + \ = {Physical models, hybrid instruments, excitation, resonator. },\n pages =\ + \ {31--36},\n title = {{PHY}SMISM : A Control Interface for Creative Exploration\ + \ of Physical Models},\n url = {http://www.nime.org/proceedings/2007/nime2007_031.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813416 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1177051 issn: 2220-4806 - month: July - pages: 37--38 - publisher: Birmingham City University - title: Reinterpretation of Pottery as a Musical Interface - url: https://www.nime.org/proceedings/2020/nime2020_paper7.pdf - year: 2020 + keywords: 'Physical models, hybrid instruments, excitation, resonator. ' + pages: 31--36 + title: 'PHYSMISM : A Control Interface for Creative Exploration of Physical Models' + url: http://www.nime.org/proceedings/2007/nime2007_031.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: NIME20_70 - abstract: 'In this paper we adopt the theory of force dynamics in human cognition - as a fundamental design principle for the development of mid-air musical interfaces. - We argue that this principle can provide more intuitive user experiences when - the interface does not provide direct haptic feedback – such as interfaces made - with various gesture-tracking technologies. Grounded in five concepts from the - theoretical literature on force dynamics in musical cognition, the paper presents - a set of principles for interaction design focused on five force schemas: Path - restraint, Containment restraint, Counter-force, Attraction, and Compulsion. We - describe an initial set of examples that implement these principles using a Leap - Motion sensor for gesture tracking and SuperCollider for interactive audio design. - Finally, the paper presents a pilot experiment that provides initial ratings of - intuitiveness in the user experience.' - address: 'Birmingham, UK' - author: 'Eskildsen, Anders and Walther-Hansen, Mads' - bibtex: "@inproceedings{NIME20_70,\n abstract = {In this paper we adopt the theory\ - \ of force dynamics in human cognition as a fundamental design principle for the\ - \ development of mid-air musical interfaces. We argue that this principle can\ - \ provide more intuitive user experiences when the interface does not provide\ - \ direct haptic feedback – such as interfaces made with various gesture-tracking\ - \ technologies. Grounded in five concepts from the theoretical literature on force\ - \ dynamics in musical cognition, the paper presents a set of principles for interaction\ - \ design focused on five force schemas: Path restraint, Containment restraint,\ - \ Counter-force, Attraction, and Compulsion. We describe an initial set of examples\ - \ that implement these principles using a Leap Motion sensor for gesture tracking\ - \ and SuperCollider for interactive audio design. Finally, the paper presents\ - \ a pilot experiment that provides initial ratings of intuitiveness in the user\ - \ experience.},\n address = {Birmingham, UK},\n author = {Eskildsen, Anders and\ - \ Walther-Hansen, Mads},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813418},\n\ - \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ - \ = {July},\n pages = {361--366},\n presentation-video = {https://youtu.be/REe967aGVN4},\n\ - \ publisher = {Birmingham City University},\n title = {Force dynamics as a design\ - \ framework for mid-air musical interfaces},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper70.pdf},\n\ - \ year = {2020}\n}\n" + ID: Chuchacz2007 + abstract: 'A novel electronic percussion synthesizer prototype is presented. Our + ambition is to design an instrument that will produce a high quality, realistic + sound based on a physical modelling sound synthesis algorithm. This is achieved + using a real-time Field Programmable Gate Array (FPGA) implementation of the model + coupled to an interface that aims to make efficient use of all the subtle nuanced + gestures of the instrumentalist. It is based on a complex physical model of the + vibrating plate --- the source of sound in the majority of percussion instruments. + A Xilinx Virtex II pro FPGA core handles the sound synthesis computations with + an 8 billion operations per second performance and has been designed in such a + way to allow a high level of control and flexibility. Strategies are also presented + to that allow the parametric space of the model to be mapped to the playing gestures + of the percussionist.' + address: 'New York City, NY, United States' + author: 'Chuchacz, Katarzyna and O''Modhrain, Sile and Woods, Roger' + bibtex: "@inproceedings{Chuchacz2007,\n abstract = {A novel electronic percussion\ + \ synthesizer prototype is presented. Our ambition is to design an instrument\ + \ that will produce a high quality, realistic sound based on a physical modelling\ + \ sound synthesis algorithm. This is achieved using a real-time Field Programmable\ + \ Gate Array (FPGA) implementation of the model coupled to an interface that aims\ + \ to make efficient use of all the subtle nuanced gestures of the instrumentalist.\ + \ It is based on a complex physical model of the vibrating plate --- the source\ + \ of sound in the majority of percussion instruments. A Xilinx Virtex II pro FPGA\ + \ core handles the sound synthesis computations with an 8 billion operations per\ + \ second performance and has been designed in such a way to allow a high level\ + \ of control and flexibility. Strategies are also presented to that allow the\ + \ parametric space of the model to be mapped to the playing gestures of the percussionist.},\n\ + \ address = {New York City, NY, United States},\n author = {Chuchacz, Katarzyna\ + \ and O'Modhrain, Sile and Woods, Roger},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177071},\n\ + \ issn = {2220-4806},\n keywords = {Physical Model, Electronic Percussion Instrument,\ + \ FPGA. },\n pages = {37--40},\n title = {Physical Models and Musical Controllers\ + \ -- Designing a Novel Electronic Percussion Instrument},\n url = {http://www.nime.org/proceedings/2007/nime2007_037.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813418 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1177071 issn: 2220-4806 - month: July - pages: 361--366 - presentation-video: https://youtu.be/REe967aGVN4 - publisher: Birmingham City University - title: Force dynamics as a design framework for mid-air musical interfaces - url: https://www.nime.org/proceedings/2020/nime2020_paper70.pdf - year: 2020 + keywords: 'Physical Model, Electronic Percussion Instrument, FPGA. ' + pages: 37--40 + title: Physical Models and Musical Controllers -- Designing a Novel Electronic Percussion + Instrument + url: http://www.nime.org/proceedings/2007/nime2007_037.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: NIME20_71 - abstract: 'Continuous MIDI controllers commonly output their position only, with - no influence of the performative energy with which they were set. In this paper, - creative uses of time as a parameter in continuous controller mapping are demonstrated: - the speed of movement affects the position mapping and control output. A set of - SuperCollider classes are presented, developed in the author’s practice in computer - music, where they have been used together with commercial MIDI controllers. The - creative applications employ various approaches and metaphors for scaling time, - but also machine learning for recognising patterns. In the techniques, performer, - controller and synthesis ‘intra-act’, to use Karen Barad’s term: because position - and velocity are derived from the same data, sound output cannot be predicted - without the temporal context of performance.' - address: 'Birmingham, UK' - author: 'Nyström, Erik' - bibtex: "@inproceedings{NIME20_71,\n abstract = {Continuous MIDI controllers commonly\ - \ output their position only, with no influence of the performative energy with\ - \ which they were set. In this paper, creative uses of time as a parameter in\ - \ continuous controller mapping are demonstrated: the speed of movement affects\ - \ the position mapping and control output. A set of SuperCollider classes are\ - \ presented, developed in the author’s practice in computer music, where they\ - \ have been used together with commercial MIDI controllers. The creative applications\ - \ employ various approaches and metaphors for scaling time, but also machine learning\ - \ for recognising patterns. In the techniques, performer, controller and synthesis\ - \ ‘intra-act’, to use Karen Barad’s term: because position and velocity are derived\ - \ from the same data, sound output cannot be predicted without the temporal context\ - \ of performance.},\n address = {Birmingham, UK},\n author = {Nyström, Erik},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.4813420},\n editor = {Romain Michon\ - \ and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages =\ - \ {367--368},\n publisher = {Birmingham City University},\n title = {Intra-Actions:\ - \ Experiments with Velocity and Position in Continuous Controllers},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper71.pdf},\n\ - \ year = {2020}\n}\n" + ID: Wessel2007 + abstract: 'We describe the design, implementation, and evaluation with musical applications + of force sensitive multi-touch arrays of touchpads. Each of the touchpads supports + a three dimensional representation of musical material: two spatial dimensions + plus a force measurement we typically use to control dynamics. We have developed + two pad systems, one with 24 pads and a second with 2 arrays of 16 pads each. + We emphasize the treatment of gestures as sub-sampled audio signals. This tight + coupling of gesture with audio provides for a high degree of control intimacy. + Our experiments with the pad arrays demonstrate that we can efficiently deal with + large numbers of audio encoded gesture channels – 72 for the 24 pad array and + 96 for the two 16 pad arrays.' + address: 'New York City, NY, United States' + author: 'Wessel, David and Avizienis, Rimas and Freed, Adrian and Wright, Matthew' + bibtex: "@inproceedings{Wessel2007,\n abstract = {We describe the design, implementation,\ + \ and evaluation with musical applications of force sensitive multi-touch arrays\ + \ of touchpads. Each of the touchpads supports a three dimensional representation\ + \ of musical material: two spatial dimensions plus a force measurement we typically\ + \ use to control dynamics. We have developed two pad systems, one with 24 pads\ + \ and a second with 2 arrays of 16 pads each. We emphasize the treatment of gestures\ + \ as sub-sampled audio signals. This tight coupling of gesture with audio provides\ + \ for a high degree of control intimacy. Our experiments with the pad arrays demonstrate\ + \ that we can efficiently deal with large numbers of audio encoded gesture channels\ + \ – 72 for the 24 pad array and 96 for the two 16 pad arrays.},\n address = {New\ + \ York City, NY, United States},\n author = {Wessel, David and Avizienis, Rimas\ + \ and Freed, Adrian and Wright, Matthew},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179479},\n\ + \ issn = {2220-4806},\n keywords = {Pressure and force sensing, High-resolution\ + \ gestural signals, Touchpad, VersaPad.},\n pages = {41--45},\n title = {A Force\ + \ Sensitive Multi-Touch Array Supporting Multiple {2-D} Musical Control Structures},\n\ + \ url = {http://www.nime.org/proceedings/2007/nime2007_041.pdf},\n year = {2007}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813420 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1179479 issn: 2220-4806 - month: July - pages: 367--368 - publisher: Birmingham City University - title: 'Intra-Actions: Experiments with Velocity and Position in Continuous Controllers' - url: https://www.nime.org/proceedings/2020/nime2020_paper71.pdf - year: 2020 + keywords: 'Pressure and force sensing, High-resolution gestural signals, Touchpad, + VersaPad.' + pages: 41--45 + title: A Force Sensitive Multi-Touch Array Supporting Multiple 2-D Musical Control + Structures + url: http://www.nime.org/proceedings/2007/nime2007_041.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: NIME20_72 - abstract: 'This paper presents an ongoing research on hand gesture interactive sonification - in dance performances. For this purpose, a conceptual framework and a multilayered - mapping model issued from an experimental case study will be proposed. The goal - of this research is twofold. On the one hand, we aim to determine action-based - perceptual invariants that allow us to establish pertinent relations between gesture - qualities and sound features. On the other hand, we are interested in analysing - how an interactive model-based sonification can provide useful and effective feedback - for dance practitioners. From this point of view, our research explicitly addresses - the convergence between the scientific understandings provided by the field of - movement sonification and the traditional know-how developed over the years within - the digital instrument and interaction design communities. A key component of - our study is the combination between physically-based sound synthesis and motion - features analysis. This approach has proven effective in providing interesting - insights for devising novel sonification models for artistic and scientific purposes, - and for developing a collaborative platform involving the designer, the musician - and the performer.' - address: 'Birmingham, UK' - author: 'Leonard, James and Giomi, Andrea' - bibtex: "@inproceedings{NIME20_72,\n abstract = {This paper presents an ongoing\ - \ research on hand gesture interactive sonification in dance performances. For\ - \ this purpose, a conceptual framework and a multilayered mapping model issued\ - \ from an experimental case study will be proposed. The goal of this research\ - \ is twofold. On the one hand, we aim to determine action-based perceptual invariants\ - \ that allow us to establish pertinent relations between gesture qualities and\ - \ sound features. On the other hand, we are interested in analysing how an interactive\ - \ model-based sonification can provide useful and effective feedback for dance\ - \ practitioners. From this point of view, our research explicitly addresses the\ - \ convergence between the scientific understandings provided by the field of movement\ - \ sonification and the traditional know-how developed over the years within the\ - \ digital instrument and interaction design communities. A key component of our\ - \ study is the combination between physically-based sound synthesis and motion\ - \ features analysis. This approach has proven effective in providing interesting\ - \ insights for devising novel sonification models for artistic and scientific\ - \ purposes, and for developing a collaborative platform involving the designer,\ - \ the musician and the performer.},\n address = {Birmingham, UK},\n author = {Leonard,\ - \ James and Giomi, Andrea},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813422},\n\ - \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ - \ = {July},\n pages = {369--374},\n presentation-video = {https://youtu.be/HQqIjL-Z8dA},\n\ - \ publisher = {Birmingham City University},\n title = {Towards an Interactive\ - \ Model-Based Sonification of Hand Gesture for Dance Performance},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper72.pdf},\n\ - \ year = {2020}\n}\n" + ID: Chang2007 + abstract: 'FigureWe present Zstretch, a textile music controller that supports expressive + haptic interactions. The musical controller takes advantage of the fabric''s topological + constraints to enable proportional control of musical parameters. This novel interface + explores ways in which one might treat music as a sheet of cloth. This paper proposes + an approach to engage simple technologies for supporting ordinary hand interactions. + We show that this combination of basic technology with general tactile movements + can result in an expressive musical interface. a' + address: 'New York City, NY, United States' + author: 'Chang, Angela and Ishii, Hiroshi' + bibtex: "@inproceedings{Chang2007,\n abstract = {FigureWe present Zstretch, a textile\ + \ music controller that supports expressive haptic interactions. The musical controller\ + \ takes advantage of the fabric's topological constraints to enable proportional\ + \ control of musical parameters. This novel interface explores ways in which one\ + \ might treat music as a sheet of cloth. This paper proposes an approach to engage\ + \ simple technologies for supporting ordinary hand interactions. We show that\ + \ this combination of basic technology with general tactile movements can result\ + \ in an expressive musical interface. a},\n address = {New York City, NY, United\ + \ States},\n author = {Chang, Angela and Ishii, Hiroshi},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177067},\n issn = {2220-4806},\n keywords = {Tangible\ + \ interfaces, textiles, tactile design, musical expressivity },\n pages = {46--49},\n\ + \ title = {Zstretch : A Stretchy Fabric Music Controller},\n url = {http://www.nime.org/proceedings/2007/nime2007_046.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813422 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1177067 issn: 2220-4806 - month: July - pages: 369--374 - presentation-video: https://youtu.be/HQqIjL-Z8dA - publisher: Birmingham City University - title: Towards an Interactive Model-Based Sonification of Hand Gesture for Dance - Performance - url: https://www.nime.org/proceedings/2020/nime2020_paper72.pdf - year: 2020 + keywords: 'Tangible interfaces, textiles, tactile design, musical expressivity ' + pages: 46--49 + title: 'Zstretch : A Stretchy Fabric Music Controller' + url: http://www.nime.org/proceedings/2007/nime2007_046.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: NIME20_73 - abstract: 'Lack of access to technological devices is a common exponent of a new - form of social exclusion. Coupled with this, there are also the risk of increasing - inequality between developed and underdeveloped countries when concerning technology - access. Regarding Internet access, the percentage of young Africans who do not - have access to this technology is around 60%, while in Europe the figure is 4%. - This limitation also expands for musical instruments, whether electronic or not. - In light of this worldwide problem, this paper aims to showcase a method for building - a MIDI Controller, a prominent instrument for musical production and live performance, - in an economically viable form that can be accessible to the poorest populations. - It is also desirable that the equipment is suitable for teaching various subjects - such as Music, Computer Science and Engineering. The outcome of this research - is not an amazing controller or a brandy new cool interface but the experience - of building a controller concerning all the bad conditions of doing it.' - address: 'Birmingham, UK' - author: 'Vieira, Romulo A and Schiavoni, Flávio Luiz' - bibtex: "@inproceedings{NIME20_73,\n abstract = {Lack of access to technological\ - \ devices is a common exponent of a new form of social exclusion. Coupled with\ - \ this, there are also the risk of increasing inequality between developed and\ - \ underdeveloped countries when concerning technology access. Regarding Internet\ - \ access, the percentage of young Africans who do not have access to this technology\ - \ is around 60%, while in Europe the figure is 4%. This limitation also expands\ - \ for musical instruments, whether electronic or not. In light of this worldwide\ - \ problem, this paper aims to showcase a method for building a MIDI Controller,\ - \ a prominent instrument for musical production and live performance, in an economically\ - \ viable form that can be accessible to the poorest populations. It is also desirable\ - \ that the equipment is suitable for teaching various subjects such as Music,\ - \ Computer Science and Engineering. The outcome of this research is not an amazing\ - \ controller or a brandy new cool interface but the experience of building a controller\ - \ concerning all the bad conditions of doing it.},\n address = {Birmingham, UK},\n\ - \ author = {Vieira, Romulo A and Schiavoni, Flávio Luiz},\n booktitle = {Proceedings\ + ID: Kim2007 + abstract: 'In this paper, we describe the musical development of a new system for + performing electronic music where a video-based eye movement recording system, + known as Oculog, is used to control sound. Its development is discussed against + a background that includes a brief history of biologically based interfaces for + performing music, together with a survey of various recording systems currently + in use for monitoring eye movement in clinical applications. Oculog is discussed + with specific reference to its implementation as a performance interface for electronic + music. A new work features algorithms driven by eye movement response and allows + the user to interact with audio synthesis and introduces new possibilities for + microtonal performance. Discussion reflects an earlier technological paradigm + and concludes by reviewing possibilities for future development.' + address: 'New York City, NY, United States' + author: 'Kim, Juno and Schiemer, Greg and Narushima, Terumi' + bibtex: "@inproceedings{Kim2007,\n abstract = {In this paper, we describe the musical\ + \ development of a new system for performing electronic music where a video-based\ + \ eye movement recording system, known as Oculog, is used to control sound. Its\ + \ development is discussed against a background that includes a brief history\ + \ of biologically based interfaces for performing music, together with a survey\ + \ of various recording systems currently in use for monitoring eye movement in\ + \ clinical applications. Oculog is discussed with specific reference to its implementation\ + \ as a performance interface for electronic music. A new work features algorithms\ + \ driven by eye movement response and allows the user to interact with audio synthesis\ + \ and introduces new possibilities for microtonal performance. Discussion reflects\ + \ an earlier technological paradigm and concludes by reviewing possibilities for\ + \ future development.},\n address = {New York City, NY, United States},\n author\ + \ = {Kim, Juno and Schiemer, Greg and Narushima, Terumi},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.4813424},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {375--379},\n presentation-video\ - \ = {https://youtu.be/X1GE5jk2cgc},\n publisher = {Birmingham City University},\n\ - \ title = {Fliperama: An affordable Arduino based MIDI Controller},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper73.pdf},\n\ - \ year = {2020}\n}\n" + \ doi = {10.5281/zenodo.1177145},\n issn = {2220-4806},\n keywords = {1,algorithmic\ + \ composition,expressive control interfaces,eye movement recording,microtonal\ + \ tuning,midi,nime07,pure data,video},\n pages = {50--55},\n title = {Oculog :\ + \ Playing with Eye Movements},\n url = {http://www.nime.org/proceedings/2007/nime2007_050.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813424 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1177145 issn: 2220-4806 - month: July - pages: 375--379 - presentation-video: https://youtu.be/X1GE5jk2cgc - publisher: Birmingham City University - title: 'Fliperama: An affordable Arduino based MIDI Controller' - url: https://www.nime.org/proceedings/2020/nime2020_paper73.pdf - year: 2020 + keywords: '1,algorithmic composition,expressive control interfaces,eye movement + recording,microtonal tuning,midi,nime07,pure data,video' + pages: 50--55 + title: 'Oculog : Playing with Eye Movements' + url: http://www.nime.org/proceedings/2007/nime2007_050.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: NIME20_74 - abstract: 'This paper reports on a project that aimed to break apart the isolation - of VR and share an experience between both the wearer of a headset and a room - of observers. It presented the user with an acoustically playable virtual environment - in which their interactions with objects spawned audio events from the room’s - 80 loudspeakers and animations on the room’s 3 display walls. This required the - use of several Unity engines running on separate machines and SuperCollider running - as the audio engine. The perspectives into what the wearer of the headset was - doing allowed the audience to connect their movements to the sounds and images - being experienced, effectively allowing them all to participate in the installation - simultaneously.' - address: 'Birmingham, UK' - author: 'MacLean, Alex' - bibtex: "@inproceedings{NIME20_74,\n abstract = {This paper reports on a project\ - \ that aimed to break apart the isolation of VR and share an experience between\ - \ both the wearer of a headset and a room of observers. It presented the user\ - \ with an acoustically playable virtual environment in which their interactions\ - \ with objects spawned audio events from the room’s 80 loudspeakers and animations\ - \ on the room’s 3 display walls. This required the use of several Unity engines\ - \ running on separate machines and SuperCollider running as the audio engine.\ - \ The perspectives into what the wearer of the headset was doing allowed the audience\ - \ to connect their movements to the sounds and images being experienced, effectively\ - \ allowing them all to participate in the installation simultaneously.},\n address\ - \ = {Birmingham, UK},\n author = {MacLean, Alex},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.4813426},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {380--381},\n publisher = {Birmingham\ - \ City University},\n title = {Immersive Dreams: A Shared VR Experience},\n url\ - \ = {https://www.nime.org/proceedings/2020/nime2020_paper74.pdf},\n year = {2020}\n\ - }\n" + ID: Camurri2007 + abstract: 'In this paper, we present a new system, the Orchestra Explorer, enabling + a novel paradigm for active fruition of sound and music content. The Orchestra + Explorer allows users to physically navigate inside a virtual orchestra, to actively + explore the music piece the orchestra is playing, to modify and mold the sound + and music content in real-time through their expressive full-body movement and + gesture. An implementation of the Orchestra Explorer was developed and presented + in the framework of the science exhibition Cimenti di Invenzione e Armonia, held + at Casa Paganini, Genova, from October 2006 to January 2007. ' + address: 'New York City, NY, United States' + author: 'Camurri, Antonio and Canepa, Corrado and Volpe, Gualtiero' + bibtex: "@inproceedings{Camurri2007,\n abstract = {In this paper, we present a new\ + \ system, the Orchestra Explorer, enabling a novel paradigm for active fruition\ + \ of sound and music content. The Orchestra Explorer allows users to physically\ + \ navigate inside a virtual orchestra, to actively explore the music piece the\ + \ orchestra is playing, to modify and mold the sound and music content in real-time\ + \ through their expressive full-body movement and gesture. An implementation of\ + \ the Orchestra Explorer was developed and presented in the framework of the science\ + \ exhibition {Cimenti di Invenzione e Armonia}, held at Casa Paganini, Genova,\ + \ from October 2006 to January 2007. },\n address = {New York City, NY, United\ + \ States},\n author = {Camurri, Antonio and Canepa, Corrado and Volpe, Gualtiero},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1177059},\n issn = {2220-4806},\n\ + \ keywords = {Active listening of music, expressive interfaces, full-body motion\ + \ analysis and expressive gesture processing, multimodal interactive systems for\ + \ music and performing arts applications. },\n pages = {56--61},\n title = {Active\ + \ Listening to a Virtual Orchestra Through an Expressive Gestural Interface :\ + \ The Orchestra Explorer},\n url = {http://www.nime.org/proceedings/2007/nime2007_056.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813426 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1177059 issn: 2220-4806 - month: July - pages: 380--381 - publisher: Birmingham City University - title: 'Immersive Dreams: A Shared VR Experience' - url: https://www.nime.org/proceedings/2020/nime2020_paper74.pdf - year: 2020 + keywords: 'Active listening of music, expressive interfaces, full-body motion analysis + and expressive gesture processing, multimodal interactive systems for music and + performing arts applications. ' + pages: 56--61 + title: 'Active Listening to a Virtual Orchestra Through an Expressive Gestural Interface + : The Orchestra Explorer' + url: http://www.nime.org/proceedings/2007/nime2007_056.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: NIME20_75 - abstract: 'There are many studies of Digital Musical Instrument (DMI) design, but - there is little research on the cross-cultural co-creation of DMIs drawing on - traditional musical instruments. We present a study of cross-cultural co-creation - inspired by the Duxianqin - a traditional Chinese Jing ethnic minority single - stringed musical instrument. We report on how we structured the co-creation with - European and Chinese participants ranging from DMI designers to composers and - performers. We discuss how we identified the `essence'' of the Duxianqin and used - this to drive co-creation of three Duxianqin reimagined through digital technologies. - Music was specially composed for these reimagined Duxianqin and performed in public - as the culmination of the design process. We reflect on our co-creation process - and how others could use such an approach to identify the essence of traditional - instruments and reimagine them in the digital age.' - address: 'Birmingham, UK' - author: 'Bryan-Kinns, Nick and ZIJIN, LI' - bibtex: "@inproceedings{NIME20_75,\n abstract = {There are many studies of Digital\ - \ Musical Instrument (DMI) design, but there is little research on the cross-cultural\ - \ co-creation of DMIs drawing on traditional musical instruments. We present a\ - \ study of cross-cultural co-creation inspired by the Duxianqin - a traditional\ - \ Chinese Jing ethnic minority single stringed musical instrument. We report on\ - \ how we structured the co-creation with European and Chinese participants ranging\ - \ from DMI designers to composers and performers. We discuss how we identified\ - \ the `essence' of the Duxianqin and used this to drive co-creation of three Duxianqin\ - \ reimagined through digital technologies. Music was specially composed for these\ - \ reimagined Duxianqin and performed in public as the culmination of the design\ - \ process. We reflect on our co-creation process and how others could use such\ - \ an approach to identify the essence of traditional instruments and reimagine\ - \ them in the digital age.},\n address = {Birmingham, UK},\n author = {Bryan-Kinns,\ - \ Nick and ZIJIN, LI},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813428},\n\ - \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ - \ = {July},\n pages = {382--387},\n presentation-video = {https://youtu.be/NvHcUQea82I},\n\ - \ publisher = {Birmingham City University},\n title = {ReImagining: Cross-cultural\ - \ Co-Creation of a Chinese Traditional Musical Instrument with Digital Technologies},\n\ - \ url = {https://www.nime.org/proceedings/2020/nime2020_paper75.pdf},\n year =\ - \ {2020}\n}\n" + ID: Bell2007 + abstract: 'We present the Multimodal Music Stand (MMMS) for the untethered sensing + of performance gestures and the interactive control of music. Using e-field sensing, + audio analysis, and computer vision, the MMMS captures a performer''s continuous + expressive gestures and robustly identifies discrete cues in a musical performance. + Continuous and discrete gestures are sent to an interactive music system featuring + custom designed software that performs real-time spectral transformation of audio. ' + address: 'New York City, NY, United States' + author: 'Bell, Bo and Kleban, Jim and Overholt, Dan and Putnam, Lance and Thompson, + John and Morin-Kuchera, JoAnn' + bibtex: "@inproceedings{Bell2007,\n abstract = {We present the Multimodal Music\ + \ Stand (MMMS) for the untethered sensing of performance gestures and the interactive\ + \ control of music. Using e-field sensing, audio analysis, and computer vision,\ + \ the MMMS captures a performer's continuous expressive gestures and robustly\ + \ identifies discrete cues in a musical performance. Continuous and discrete gestures\ + \ are sent to an interactive music system featuring custom designed software that\ + \ performs real-time spectral transformation of audio. },\n address = {New York\ + \ City, NY, United States},\n author = {Bell, Bo and Kleban, Jim and Overholt,\ + \ Dan and Putnam, Lance and Thompson, John and Morin-Kuchera, JoAnn},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1177039},\n issn = {2220-4806},\n keywords\ + \ = {Multimodal, interactivity, computer vision, e-field sensing, untethered control.\ + \ },\n pages = {62--65},\n title = {The Multimodal Music Stand},\n url = {http://www.nime.org/proceedings/2007/nime2007_062.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813428 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1177039 issn: 2220-4806 - month: July - pages: 382--387 - presentation-video: https://youtu.be/NvHcUQea82I - publisher: Birmingham City University - title: 'ReImagining: Cross-cultural Co-Creation of a Chinese Traditional Musical - Instrument with Digital Technologies' - url: https://www.nime.org/proceedings/2020/nime2020_paper75.pdf - year: 2020 + keywords: 'Multimodal, interactivity, computer vision, e-field sensing, untethered + control. ' + pages: 62--65 + title: The Multimodal Music Stand + url: http://www.nime.org/proceedings/2007/nime2007_062.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: NIME20_76 - abstract: 'This paper presents a discussion of Dark Matter, a sonification project - using live coding and just-in-time programming techniques. The project uses data - from proton-proton collisions produced by the Large Hadron Collider (LHC) at CERN, - Switzerland, and then detected and reconstructed by the Compact Muon Solenoid - (CMS) experiment, and was developed with the support of the art@CMS project. Work - for the Dark Matter project included the development of a custom-made environment - in the SuperCollider (SC) programming language that lets the performers of the - group engage in collective improvisations using dynamic interventions and networked - music systems. This paper will also provide information about a spin-off project - entitled the Interactive Physics Sonification System (IPSOS), an interactive and - standalone online application developed in the JavaScript programming language. - It provides a web-based interface that allows users to map particle data to sound - on commonly used web browsers, mobile devices, such as smartphones, tablets etc. - The project was developed as an educational outreach tool to engage young students - and the general public with data derived from LHC collisions.' - address: 'Birmingham, UK' - author: 'Vasilakos, Konstantinos n/a and Wilson, Scott and McCauley, Thomas and - Yeung, Tsun Winston and Margetson, Emma and Khosravi Mardakheh, Milad' - bibtex: "@inproceedings{NIME20_76,\n abstract = {This paper presents a discussion\ - \ of Dark Matter, a sonification project using live coding and just-in-time programming\ - \ techniques. The project uses data from proton-proton collisions produced by\ - \ the Large Hadron Collider (LHC) at CERN, Switzerland, and then detected and\ - \ reconstructed by the Compact Muon Solenoid (CMS) experiment, and was developed\ - \ with the support of the art@CMS project. Work for the Dark Matter project included\ - \ the development of a custom-made environment in the SuperCollider (SC) programming\ - \ language that lets the performers of the group engage in collective improvisations\ - \ using dynamic interventions and networked music systems. This paper will also\ - \ provide information about a spin-off project entitled the Interactive Physics\ - \ Sonification System (IPSOS), an interactive and standalone online application\ - \ developed in the JavaScript programming language. It provides a web-based interface\ - \ that allows users to map particle data to sound on commonly used web browsers,\ - \ mobile devices, such as smartphones, tablets etc. The project was developed\ - \ as an educational outreach tool to engage young students and the general public\ - \ with data derived from LHC collisions.},\n address = {Birmingham, UK},\n author\ - \ = {Vasilakos, Konstantinos n/a and Wilson, Scott and McCauley, Thomas and Yeung,\ - \ Tsun Winston and Margetson, Emma and Khosravi Mardakheh, Milad},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.4813430},\n editor = {Romain Michon and\ - \ Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages = {388--393},\n\ - \ presentation-video = {https://youtu.be/1vS_tFUyz7g},\n publisher = {Birmingham\ - \ City University},\n title = {Sonification of High Energy Physics Data Using\ - \ Live Coding and Web Based Interfaces.},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper76.pdf},\n\ - \ year = {2020}\n}\n" + ID: Malloch2007 + abstract: 'This paper describes the T-Stick, a new family of digitalmusical instruments. + It presents the motivation behind theproject, hardware and software design, and + presents insightsgained through collaboration with performers who have collectively + practised and performed with the T-Stick for hundreds of hours, and with composers + who have written piecesfor the instrument in the context of McGill University''s + Digital Orchestra project. Each of the T-Sticks is based on thesame general structure + and sensing platform, but each alsodiffers from its siblings in size, weight, + timbre and range.' + address: 'New York City, NY, United States' + author: 'Malloch, Joseph and Wanderley, Marcelo M.' + bibtex: "@inproceedings{Malloch2007,\n abstract = {This paper describes the T-Stick,\ + \ a new family of digitalmusical instruments. It presents the motivation behind\ + \ theproject, hardware and software design, and presents insightsgained through\ + \ collaboration with performers who have collectively practised and performed\ + \ with the T-Stick for hundreds of hours, and with composers who have written\ + \ piecesfor the instrument in the context of McGill University's Digital Orchestra\ + \ project. Each of the T-Sticks is based on thesame general structure and sensing\ + \ platform, but each alsodiffers from its siblings in size, weight, timbre and\ + \ range.},\n address = {New York City, NY, United States},\n author = {Malloch,\ + \ Joseph and Wanderley, Marcelo M.},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177175},\n\ + \ issn = {2220-4806},\n keywords = {gestural controller, digital musical instrument,\ + \ families of instruments },\n pages = {66--69},\n title = {The T-Stick : From\ + \ Musical Interface to Musical Instrument},\n url = {http://www.nime.org/proceedings/2007/nime2007_066.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813430 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1177175 issn: 2220-4806 - month: July - pages: 388--393 - presentation-video: https://youtu.be/1vS_tFUyz7g - publisher: Birmingham City University - title: Sonification of High Energy Physics Data Using Live Coding and Web Based - Interfaces. - url: https://www.nime.org/proceedings/2020/nime2020_paper76.pdf - year: 2020 + keywords: 'gestural controller, digital musical instrument, families of instruments ' + pages: 66--69 + title: 'The T-Stick : From Musical Interface to Musical Instrument' + url: http://www.nime.org/proceedings/2007/nime2007_066.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: NIME20_77 - abstract: 'Our goal is to develop an improvisational ensemble support system for - music beginners who do not have knowledge of chord progressions and do not have - enough experience of playing an instrument. We hypothesized that a music beginner - cannot determine tonal pitches of melody over a particular chord but can use body - movements to specify the pitch contour (i.e., melodic outline) and the attack - timings (i.e., rhythm). We aim to realize a performance interface for supporting - expressing intuitive pitch contour and attack timings using body motion and outputting - harmonious pitches over the chord progression of the background music. Since the - intended users of this system are not limited to people with music experience, - we plan to develop a system that uses Android smartphones, which many people have. - Our system consists of three modules: a module for specifying attack timing using - smartphone sensors, module for estimating the vertical movement of the smartphone - using smartphone sensors, and module for estimating the sound height using smartphone - vertical movement and background chord progression. Each estimation module is - developed using long short-term memory (LSTM), which is often used to estimate - time series data. We conduct evaluation experiments for each module. As a result, - the attack timing estimation had zero misjudgments, and the mean error time of - the estimated attack timing was smaller than the sensor-acquisition interval. - The accuracy of the vertical motion estimation was 64%, and that of the pitch - estimation was 7.6%. The results indicate that the attack timing is accurate enough, - but the vertical motion estimation and the pitch estimation need to be improved - for actual use.' - address: 'Birmingham, UK' - author: 'Takase, Haruya and Shiramatsu, Shun' - bibtex: "@inproceedings{NIME20_77,\n abstract = {Our goal is to develop an improvisational\ - \ ensemble support system for music beginners who do not have knowledge of chord\ - \ progressions and do not have enough experience of playing an instrument. We\ - \ hypothesized that a music beginner cannot determine tonal pitches of melody\ - \ over a particular chord but can use body movements to specify the pitch contour\ - \ (i.e., melodic outline) and the attack timings (i.e., rhythm). We aim to realize\ - \ a performance interface for supporting expressing intuitive pitch contour and\ - \ attack timings using body motion and outputting harmonious pitches over the\ - \ chord progression of the background music. Since the intended users of this\ - \ system are not limited to people with music experience, we plan to develop a\ - \ system that uses Android smartphones, which many people have. Our system consists\ - \ of three modules: a module for specifying attack timing using smartphone sensors,\ - \ module for estimating the vertical movement of the smartphone using smartphone\ - \ sensors, and module for estimating the sound height using smartphone vertical\ - \ movement and background chord progression. Each estimation module is developed\ - \ using long short-term memory (LSTM), which is often used to estimate time series\ - \ data. We conduct evaluation experiments for each module. As a result, the attack\ - \ timing estimation had zero misjudgments, and the mean error time of the estimated\ - \ attack timing was smaller than the sensor-acquisition interval. The accuracy\ - \ of the vertical motion estimation was 64%, and that of the pitch estimation\ - \ was 7.6%. The results indicate that the attack timing is accurate enough, but\ - \ the vertical motion estimation and the pitch estimation need to be improved\ - \ for actual use.},\n address = {Birmingham, UK},\n author = {Takase, Haruya and\ - \ Shiramatsu, Shun},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813434},\n\ - \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ - \ = {July},\n pages = {394--398},\n presentation-video = {https://youtu.be/WhrGhas9Cvc},\n\ - \ publisher = {Birmingham City University},\n title = {Support System for Improvisational\ - \ Ensemble Based on Long Short-Term Memory Using Smartphone Sensor},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper77.pdf},\n\ - \ year = {2020}\n}\n" + ID: Paine2007 + abstract: 'This paper presents the Thummer Mapping Project (ThuMP), an industry + partnership project between ThumMotion P/L and The University of Western Sydney + (UWS). ThuMP sought to developing mapping strategies for new interfaces for musical + expression (NIME), specifically the ThummerTM, which provides thirteen simultaneous + degrees of freedom. This research presents a new approach to the mapping problem + resulting from a primary design research phase and a prototype testing and evaluation + phase. In order to establish an underlying design approach for the ThummerTM mapping + strategies, a number of interviews were carried out with high-level acoustic instrumental + performers, the majority of whom play with the Sydney Symphony Orchestra, Sydney, + Australia. Mapping strategies were developed from analysis of these interviews + and then evaluated in trial usability testing.' + address: 'New York City, NY, United States' + author: 'Paine, Garth and Stevenson, Ian and Pearce, Angela' + bibtex: "@inproceedings{Paine2007,\n abstract = {This paper presents the Thummer\ + \ Mapping Project (ThuMP), an industry partnership project between ThumMotion\ + \ P/L and The University of Western Sydney (UWS). ThuMP sought to developing mapping\ + \ strategies for new interfaces for musical expression (NIME), specifically the\ + \ ThummerTM, which provides thirteen simultaneous degrees of freedom. This research\ + \ presents a new approach to the mapping problem resulting from a primary design\ + \ research phase and a prototype testing and evaluation phase. In order to establish\ + \ an underlying design approach for the ThummerTM mapping strategies, a number\ + \ of interviews were carried out with high-level acoustic instrumental performers,\ + \ the majority of whom play with the Sydney Symphony Orchestra, Sydney, Australia.\ + \ Mapping strategies were developed from analysis of these interviews and then\ + \ evaluated in trial usability testing.},\n address = {New York City, NY, United\ + \ States},\n author = {Paine, Garth and Stevenson, Ian and Pearce, Angela},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1177217},\n issn = {2220-4806},\n\ + \ keywords = {Musical Instrument Design, Mapping, Musicianship, evaluation, testing.\ + \ },\n pages = {70--77},\n title = {The Thummer Mapping Project (ThuMP)},\n url\ + \ = {http://www.nime.org/proceedings/2007/nime2007_070.pdf},\n year = {2007}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813434 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1177217 issn: 2220-4806 - month: July - pages: 394--398 - presentation-video: https://youtu.be/WhrGhas9Cvc - publisher: Birmingham City University - title: Support System for Improvisational Ensemble Based on Long Short-Term Memory - Using Smartphone Sensor - url: https://www.nime.org/proceedings/2020/nime2020_paper77.pdf - year: 2020 + keywords: 'Musical Instrument Design, Mapping, Musicianship, evaluation, testing. ' + pages: 70--77 + title: The Thummer Mapping Project (ThuMP) + url: http://www.nime.org/proceedings/2007/nime2007_070.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: NIME20_78 - abstract: 'In this paper, we contribute to the discussion on how to best design - human-centric MIR tools for live audio mixing by bridging the gap between research - on complex systems, the psychology of automation and the design of tools that - support creativity in music production. We present the design of the Channel-AI, - an embedded AI system which performs instrument recognition and generates parameter - settings suggestions for gain levels, gating, compression and equalization which - are specific to the input signal and the instrument type. We discuss what we believe - to be the key design principles and perspectives on the making of intelligent - tools for creativity and for experts in the loop. We demonstrate how these principles - have been applied to inform the design of the interaction between expert live - audio mixing engineers with the Channel-AI (i.e. a corpus of AI features embedded - in the Midas HD Console. We report the findings from a preliminary evaluation - we conducted with three professional mixing engineers and reflect on mixing engineers’ - comments about the Channel-AI on social media.' - address: 'Birmingham, UK' - author: 'Tsiros, Augoustinos and Palladini, Alessandro' - bibtex: "@inproceedings{NIME20_78,\n abstract = {In this paper, we contribute to\ - \ the discussion on how to best design human-centric MIR tools for live audio\ - \ mixing by bridging the gap between research on complex systems, the psychology\ - \ of automation and the design of tools that support creativity in music production.\ - \ We present the design of the Channel-AI, an embedded AI system which performs\ - \ instrument recognition and generates parameter settings suggestions for gain\ - \ levels, gating, compression and equalization which are specific to the input\ - \ signal and the instrument type. We discuss what we believe to be the key design\ - \ principles and perspectives on the making of intelligent tools for creativity\ - \ and for experts in the loop. We demonstrate how these principles have been applied\ - \ to inform the design of the interaction between expert live audio mixing engineers\ - \ with the Channel-AI (i.e. a corpus of AI features embedded in the Midas HD Console.\ - \ We report the findings from a preliminary evaluation we conducted with three\ - \ professional mixing engineers and reflect on mixing engineers’ comments about\ - \ the Channel-AI on social media.},\n address = {Birmingham, UK},\n author = {Tsiros,\ - \ Augoustinos and Palladini, Alessandro},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813436},\n\ - \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ - \ = {July},\n pages = {399--404},\n publisher = {Birmingham City University},\n\ - \ title = {Towards a Human-Centric Design Framework for AI Assisted Music Production},\n\ - \ url = {https://www.nime.org/proceedings/2020/nime2020_paper78.pdf},\n year =\ - \ {2020}\n}\n" + ID: dAlessandro2007 + abstract: 'In this paper, we present a new bi-manual gestural controller, called + HandSketch, composed of purchasable devices : pen tablet and pressure-sensing + surfaces. It aims at achieving real-time manipulation of several continuous and + articulated aspects of pitched sounds synthesis, with a focus on expressive voice. + Both prefered and non-prefered hand issues are discussed. Concrete playing diagrams + and mapping strategies are described. These results are integrated and a compact + controller is proposed.' + address: 'New York City, NY, United States' + author: 'd''Alessandro, Nicolas and Dutoit, Thierry' + bibtex: "@inproceedings{dAlessandro2007,\n abstract = {In this paper, we present\ + \ a new bi-manual gestural controller, called HandSketch, composed of purchasable\ + \ devices : pen tablet and pressure-sensing surfaces. It aims at achieving real-time\ + \ manipulation of several continuous and articulated aspects of pitched sounds\ + \ synthesis, with a focus on expressive voice. Both prefered and non-prefered\ + \ hand issues are discussed. Concrete playing diagrams and mapping strategies\ + \ are described. These results are integrated and a compact controller is proposed.},\n\ + \ address = {New York City, NY, United States},\n author = {d'Alessandro, Nicolas\ + \ and Dutoit, Thierry},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177027},\n\ + \ issn = {2220-4806},\n keywords = {Pen tablet, FSR, bi-manual gestural control.\ + \ },\n pages = {78--81},\n title = {HandSketch Bi-Manual Controller Investigation\ + \ on Expressive Control Issues of an Augmented Tablet},\n url = {http://www.nime.org/proceedings/2007/nime2007_078.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813436 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1177027 issn: 2220-4806 - month: July - pages: 399--404 - publisher: Birmingham City University - title: Towards a Human-Centric Design Framework for AI Assisted Music Production - url: https://www.nime.org/proceedings/2020/nime2020_paper78.pdf - year: 2020 + keywords: 'Pen tablet, FSR, bi-manual gestural control. ' + pages: 78--81 + title: HandSketch Bi-Manual Controller Investigation on Expressive Control Issues + of an Augmented Tablet + url: http://www.nime.org/proceedings/2007/nime2007_078.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: NIME20_79 - abstract: 'Understanding the question of what makes a good musical instrument raises - several conceptual challenges. Researchers have regularly adopted tools from traditional - HCI as a framework to address this issue, in which instrumental musical activities - are taken to comprise a device and a user, and should be evaluated as such. We - argue that this approach is not equipped to fully address the conceptual issues - raised by this question. It is worth reflecting on what exactly an instrument - is, and how instruments contribute toward meaningful musical experiences. Based - on a theoretical framework that incorporates ideas from ecological psychology, - enactivism, and phenomenology, we propose an alternative approach to studying - musical instruments. According to this approach, instruments are better understood - in terms of processes rather than as devices, while musicians are not users, but - rather agents in musical ecologies. A consequence of this reframing is that any - evaluations of instruments, if warranted, should align with the specificities - of the relevant processes and ecologies concerned. We present an outline of this - argument and conclude with a description of a current research project to illustrate - how our approach can shape the design and performance of a musical instrument - in-progress.' - address: 'Birmingham, UK' - author: 'Rodger, Matthew and Stapleton, Paul and van Walstijn, Maarten and Ortiz, - Miguel and Pardue, Laurel S' - bibtex: "@inproceedings{NIME20_79,\n abstract = {Understanding the question of what\ - \ makes a good musical instrument raises several conceptual challenges. Researchers\ - \ have regularly adopted tools from traditional HCI as a framework to address\ - \ this issue, in which instrumental musical activities are taken to comprise a\ - \ device and a user, and should be evaluated as such. We argue that this approach\ - \ is not equipped to fully address the conceptual issues raised by this question.\ - \ It is worth reflecting on what exactly an instrument is, and how instruments\ - \ contribute toward meaningful musical experiences. Based on a theoretical framework\ - \ that incorporates ideas from ecological psychology, enactivism, and phenomenology,\ - \ we propose an alternative approach to studying musical instruments. According\ - \ to this approach, instruments are better understood in terms of processes rather\ - \ than as devices, while musicians are not users, but rather agents in musical\ - \ ecologies. A consequence of this reframing is that any evaluations of instruments,\ - \ if warranted, should align with the specificities of the relevant processes\ - \ and ecologies concerned. We present an outline of this argument and conclude\ - \ with a description of a current research project to illustrate how our approach\ - \ can shape the design and performance of a musical instrument in-progress.},\n\ - \ address = {Birmingham, UK},\n author = {Rodger, Matthew and Stapleton, Paul\ - \ and van Walstijn, Maarten and Ortiz, Miguel and Pardue, Laurel S},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.4813438},\n editor = {Romain Michon and\ - \ Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages = {405--410},\n\ - \ presentation-video = {https://youtu.be/ADLo-QdSwBc},\n publisher = {Birmingham\ - \ City University},\n title = {What Makes a Good Musical Instrument? A Matter\ - \ of Processes, Ecologies and Specificities },\n url = {https://www.nime.org/proceedings/2020/nime2020_paper79.pdf},\n\ - \ year = {2020}\n}\n" + ID: Takegawa2007 + abstract: 'Musical performers need to show off their virtuosity for selfexpression + and communicate with other people. Therefore, they are prepared to perform at + any time and anywhere. However, a musical keyboard of 88 keys is too large and + too heavy to carry around. When a portable keyboard that is suitable for carrying + around is played over a wide range, the notes being played frequently cause the + diapason of the keyboard to protrude. It is common to use Key Transpose in conventional + portable keyboards, which shifts the diapason of the keyboard. However, this function + creates several problems such as the feeling of discomfort from the misalignment + between the keying positions and their output sounds. Therefore, the goal of our + study is to construct Mobile Clavier, which enables the diapason to be changed + smoothly. Mobile Clavier resolves the problems with Key Transpose by having black + keys inserted between any two side-by-side white keys. This paper also discusses + how effective Mobile Clavier was in an experiment conducted using professional + pianists. We can play music at any time and anywhere with Mobile Clavier.' + address: 'New York City, NY, United States' + author: 'Takegawa, Yoshinari and Terada, Tsutomu' + bibtex: "@inproceedings{Takegawa2007,\n abstract = {Musical performers need to show\ + \ off their virtuosity for selfexpression and communicate with other people. Therefore,\ + \ they are prepared to perform at any time and anywhere. However, a musical keyboard\ + \ of 88 keys is too large and too heavy to carry around. When a portable keyboard\ + \ that is suitable for carrying around is played over a wide range, the notes\ + \ being played frequently cause the diapason of the keyboard to protrude. It is\ + \ common to use Key Transpose in conventional portable keyboards, which shifts\ + \ the diapason of the keyboard. However, this function creates several problems\ + \ such as the feeling of discomfort from the misalignment between the keying positions\ + \ and their output sounds. Therefore, the goal of our study is to construct Mobile\ + \ Clavier, which enables the diapason to be changed smoothly. Mobile Clavier resolves\ + \ the problems with Key Transpose by having black keys inserted between any two\ + \ side-by-side white keys. This paper also discusses how effective Mobile Clavier\ + \ was in an experiment conducted using professional pianists. We can play music\ + \ at any time and anywhere with Mobile Clavier.},\n address = {New York City,\ + \ NY, United States},\n author = {Takegawa, Yoshinari and Terada, Tsutomu},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1177255},\n issn = {2220-4806},\n\ + \ keywords = {Portable keyboard, Additional black keys, Diapason change },\n pages\ + \ = {82--87},\n title = {Mobile Clavier : New Music Keyboard for Flexible Key\ + \ Transpose},\n url = {http://www.nime.org/proceedings/2007/nime2007_082.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813438 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1177255 issn: 2220-4806 - month: July - pages: 405--410 - presentation-video: https://youtu.be/ADLo-QdSwBc - publisher: Birmingham City University - title: 'What Makes a Good Musical Instrument? A Matter of Processes, Ecologies and - Specificities ' - url: https://www.nime.org/proceedings/2020/nime2020_paper79.pdf - year: 2020 + keywords: 'Portable keyboard, Additional black keys, Diapason change ' + pages: 82--87 + title: 'Mobile Clavier : New Music Keyboard for Flexible Key Transpose' + url: http://www.nime.org/proceedings/2007/nime2007_082.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: NIME20_8 - abstract: 'We describe a sonic artwork, "Listening To Listening", that has been - designed to accompany a real-world sculpture with two prototype interaction schemes. - Our artwork is created for the HoloLens platform so that users can have an individual - experience in a mixed reality context. Personal AR systems have recently become - available and practical for integration into public art projects, however research - into sonic sculpture works has yet to account for the affordances of current portable - and mainstream AR systems. In this work, we take advantage of the HoloLens'' spatial - awareness to build sonic spaces that have a precise spatial relationship to a - given sculpture and where the sculpture itself is modelled in the augmented scene - as an "invisible hologram". We describe the artistic rationale for our artwork, - the design of the two interaction schemes, and the technical and usability feedback - that we have obtained from demonstrations during iterative development. This work - appears to be the first time that head-mounted AR has been used to build an interactive - sonic landscape to engage with a public sculpture.' - address: 'Birmingham, UK' - author: 'Martin, Charles Patrick and Liu, Zeruo and Wang, Yichen and He, Wennan - and Gardner, Henry' - bibtex: "@inproceedings{NIME20_8,\n abstract = {We describe a sonic artwork, \"\ - Listening To Listening\", that has been designed to accompany a real-world sculpture\ - \ with two prototype interaction schemes. Our artwork is created for the HoloLens\ - \ platform so that users can have an individual experience in a mixed reality\ - \ context. Personal AR systems have recently become available and practical for\ - \ integration into public art projects, however research into sonic sculpture\ - \ works has yet to account for the affordances of current portable and mainstream\ - \ AR systems. In this work, we take advantage of the HoloLens' spatial awareness\ - \ to build sonic spaces that have a precise spatial relationship to a given sculpture\ - \ and where the sculpture itself is modelled in the augmented scene as an \"invisible\ - \ hologram\". We describe the artistic rationale for our artwork, the design of\ - \ the two interaction schemes, and the technical and usability feedback that we\ - \ have obtained from demonstrations during iterative development. This work appears\ - \ to be the first time that head-mounted AR has been used to build an interactive\ - \ sonic landscape to engage with a public sculpture.},\n address = {Birmingham,\ - \ UK},\n author = {Martin, Charles Patrick and Liu, Zeruo and Wang, Yichen and\ - \ He, Wennan and Gardner, Henry},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813445},\n\ - \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ - \ = {July},\n pages = {39--42},\n presentation-video = {https://youtu.be/RlTWXnFOLN8},\n\ - \ publisher = {Birmingham City University},\n title = {Sonic Sculpture: Activating\ - \ Engagement with Head-Mounted Augmented Reality},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper8.pdf},\n\ - \ year = {2020}\n}\n" + ID: Ojanen2007 + abstract: This paper presents a line of historic electronic musical instruments + designed by Erkki Kurenniemi in the 1960's and1970's. Kurenniemi's instruments + were influenced by digitallogic and an experimental attitude towards user interfacedesign. + The paper presents an overview of Kurenniemi'sinstruments and a detailed description + of selected devices.Emphasis is put on user interface issues such as unconventional + interactive real-time control and programming methods. + address: 'New York City, NY, United States' + author: 'Ojanen, Mikko and Suominen, Jari and Kallio, Titti and Lassfolk, Kai' + bibtex: "@inproceedings{Ojanen2007,\n abstract = {This paper presents a line of\ + \ historic electronic musical instruments designed by Erkki Kurenniemi in the\ + \ 1960's and1970's. Kurenniemi's instruments were influenced by digitallogic and\ + \ an experimental attitude towards user interfacedesign. The paper presents an\ + \ overview of Kurenniemi'sinstruments and a detailed description of selected devices.Emphasis\ + \ is put on user interface issues such as unconventional interactive real-time\ + \ control and programming methods.},\n address = {New York City, NY, United States},\n\ + \ author = {Ojanen, Mikko and Suominen, Jari and Kallio, Titti and Lassfolk, Kai},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1177211},\n issn = {2220-4806},\n\ + \ keywords = {Erkki Kurenniemi, Dimi, Synthesizer, Digital electronics, User interface\ + \ design },\n pages = {88--93},\n title = {Design Principles and User Interfaces\ + \ of Erkki Kurenniemi's Electronic Musical Instruments of the 1960's and 1970's},\n\ + \ url = {http://www.nime.org/proceedings/2007/nime2007_088.pdf},\n year = {2007}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813445 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1177211 issn: 2220-4806 - month: July - pages: 39--42 - presentation-video: https://youtu.be/RlTWXnFOLN8 - publisher: Birmingham City University - title: 'Sonic Sculpture: Activating Engagement with Head-Mounted Augmented Reality' - url: https://www.nime.org/proceedings/2020/nime2020_paper8.pdf - year: 2020 + keywords: 'Erkki Kurenniemi, Dimi, Synthesizer, Digital electronics, User interface + design ' + pages: 88--93 + title: Design Principles and User Interfaces of Erkki Kurenniemi's Electronic Musical + Instruments of the 1960's and 1970's + url: http://www.nime.org/proceedings/2007/nime2007_088.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: NIME20_80 - abstract: 'Augmented instruments have been a widely explored research topic since - the late 80s. The possibility to use sensors for providing an input for sound - processing/synthesis units let composers and sound artist open up new ways for - experimentation. Augmented Reality, by rendering virtual objects in the real world - and by making those objects interactive (via some sensor-generated input), provides - a new frame for this research field. In fact, the 3D visual feedback, delivering - a precise indication of the spatial configuration/function of each virtual interface, - can make the instrumental augmentation process more intuitive for the interpreter - and more resourceful for a composer/creator: interfaces can change their behavior - over time, can be reshaped, activated or deactivated. Each of these modifications - can be made obvious to the performer by using strategies of visual feedback. In - addition, it is possible to accurately sample space and to map it with differentiated - functions. Augmenting interfaces can also be considered a visual expressive tool - for the audience and designed accordingly: the performer’s point of view (or another - point of view provided by an external camera) can be mirrored to a projector. - This article will show some example of different designs of AR piano augmentation - from the composition Studi sulla realtà nuova.' - address: 'Birmingham, UK' - author: 'Santini, Giovanni ' - bibtex: "@inproceedings{NIME20_80,\n abstract = {Augmented instruments have been\ - \ a widely explored research topic since the late 80s. The possibility to use\ - \ sensors for providing an input for sound processing/synthesis units let composers\ - \ and sound artist open up new ways for experimentation. Augmented Reality, by\ - \ rendering virtual objects in the real world and by making those objects interactive\ - \ (via some sensor-generated input), provides a new frame for this research field.\ - \ In fact, the 3D visual feedback, delivering a precise indication of the spatial\ - \ configuration/function of each virtual interface, can make the instrumental\ - \ augmentation process more intuitive for the interpreter and more resourceful\ - \ for a composer/creator: interfaces can change their behavior over time, can\ - \ be reshaped, activated or deactivated. Each of these modifications can be made\ - \ obvious to the performer by using strategies of visual feedback. In addition,\ - \ it is possible to accurately sample space and to map it with differentiated\ - \ functions. Augmenting interfaces can also be considered a visual expressive\ - \ tool for the audience and designed accordingly: the performer’s point of view\ - \ (or another point of view provided by an external camera) can be mirrored to\ - \ a projector. This article will show some example of different designs of AR\ - \ piano augmentation from the composition Studi sulla realtà nuova.},\n address\ - \ = {Birmingham, UK},\n author = {Santini, Giovanni },\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.4813449},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {411--415},\n presentation-video\ - \ = {https://youtu.be/3HBWvKj2cqc},\n publisher = {Birmingham City University},\n\ - \ title = {Augmented Piano in Augmented Reality},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper80.pdf},\n\ - \ year = {2020}\n}\n" + ID: Magnusson2007 + abstract: 'This paper reports on a survey conducted in the autumn of 2006 with the + objective to understand people''s relationship to their musical tools. The survey + focused on the question of embodiment and its different modalities in the fields + of acoustic and digital instruments. The questions of control, instrumental entropy, + limitations and creativity were addressed in relation to people''s activities + of playing, creating or modifying their instruments. The approach used in the + survey was phenomenological, i.e. we were concerned with the experience of playing, + composing for and designing digital or acoustic instruments. At the time of analysis, + we had 209 replies from musicians, composers, engineers, designers, artists and + others interested in this topic. The survey was mainly aimed at instrumentalists + and people who create their own instruments or compositions in flexible audio + programming environments such as SuperCollider, Pure Data, ChucK, Max/MSP, CSound, + etc. ' + address: 'New York City, NY, United States' + author: 'Magnusson, Thor and Mendieta, Enrike H.' + bibtex: "@inproceedings{Magnusson2007,\n abstract = {This paper reports on a survey\ + \ conducted in the autumn of 2006 with the objective to understand people's relationship\ + \ to their musical tools. The survey focused on the question of embodiment and\ + \ its different modalities in the fields of acoustic and digital instruments.\ + \ The questions of control, instrumental entropy, limitations and creativity were\ + \ addressed in relation to people's activities of playing, creating or modifying\ + \ their instruments. The approach used in the survey was phenomenological, i.e.\ + \ we were concerned with the experience of playing, composing for and designing\ + \ digital or acoustic instruments. At the time of analysis, we had 209 replies\ + \ from musicians, composers, engineers, designers, artists and others interested\ + \ in this topic. The survey was mainly aimed at instrumentalists and people who\ + \ create their own instruments or compositions in flexible audio programming environments\ + \ such as SuperCollider, Pure Data, ChucK, Max/MSP, CSound, etc. },\n address\ + \ = {New York City, NY, United States},\n author = {Magnusson, Thor and Mendieta,\ + \ Enrike H.},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177171},\n issn\ + \ = {2220-4806},\n keywords = {Survey, musical instruments, usability, ergonomics,\ + \ embodiment, mapping, affordances, constraints, instrumental entropy, audio programming.\ + \ },\n pages = {94--99},\n title = {The Acoustic, the Digital and the Body : A\ + \ Survey on Musical Instruments},\n url = {http://www.nime.org/proceedings/2007/nime2007_094.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813449 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1177171 issn: 2220-4806 - month: July - pages: 411--415 - presentation-video: https://youtu.be/3HBWvKj2cqc - publisher: Birmingham City University - title: Augmented Piano in Augmented Reality - url: https://www.nime.org/proceedings/2020/nime2020_paper80.pdf - year: 2020 + keywords: 'Survey, musical instruments, usability, ergonomics, embodiment, mapping, + affordances, constraints, instrumental entropy, audio programming. ' + pages: 94--99 + title: 'The Acoustic, the Digital and the Body : A Survey on Musical Instruments' + url: http://www.nime.org/proceedings/2007/nime2007_094.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: NIME20_81 - abstract: 'Whilst there is a large body of NIME papers that concentrate on the presentation - of new technologies there are fewer papers that have focused on a longitudinal - understanding of NIMEs in practice. This paper embodies the more recent acknowledgement - of the importance of practice-based methods of evaluation [1,2,3,4] concerning - the use of NIMEs within performance and the recognition that it is only within - the situation of practice that the context is available to actually interpret - and evaluate the instrument [2]. Within this context this paper revisits the Feral - Cello performance system that was first presented at NIME 2017 [5]. This paper - explores what has been learned through the artistic practice of performing and - workshopping in this context by drawing heavily on the experiences of the performer/composer - who has become an integral part of this project and co-author of this paper. The - original philosophical context is also revisited and reflections are made on the - tensions between this position and the need to ‘get something to work’. The authors - feel the presentation of the semi-structured interview within the paper is the - best method of staying truthful to Hayes understanding of musical improvisation - as an enactive framework ‘in its ability to demonstrate the importance of participatory, - relational, emergent, and embodied musical activities and processes’ [4].' - address: 'Birmingham, UK' - author: 'Davis, Tom and Reid, Laura' - bibtex: "@inproceedings{NIME20_81,\n abstract = {Whilst there is a large body of\ - \ NIME papers that concentrate on the presentation of new technologies there are\ - \ fewer papers that have focused on a longitudinal understanding of NIMEs in practice.\ - \ This paper embodies the more recent acknowledgement of the importance of practice-based\ - \ methods of evaluation [1,2,3,4] concerning the use of NIMEs within performance\ - \ and the recognition that it is only within the situation of practice that the\ - \ context is available to actually interpret and evaluate the instrument [2].\ - \ Within this context this paper revisits the Feral Cello performance system that\ - \ was first presented at NIME 2017 [5]. This paper explores what has been learned\ - \ through the artistic practice of performing and workshopping in this context\ - \ by drawing heavily on the experiences of the performer/composer who has become\ - \ an integral part of this project and co-author of this paper. The original philosophical\ - \ context is also revisited and reflections are made on the tensions between this\ - \ position and the need to ‘get something to work’. The authors feel the presentation\ - \ of the semi-structured interview within the paper is the best method of staying\ - \ truthful to Hayes understanding of musical improvisation as an enactive framework\ - \ ‘in its ability to demonstrate the importance of participatory, relational,\ - \ emergent, and embodied musical activities and processes’ [4].},\n address =\ - \ {Birmingham, UK},\n author = {Davis, Tom and Reid, Laura},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.4813453},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {416--421},\n presentation-video\ - \ = {https://youtu.be/9npR0T6YGiA},\n publisher = {Birmingham City University},\n\ - \ title = {Taking Back Control: Taming the Feral Cello},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper81.pdf},\n\ - \ year = {2020}\n}\n" + ID: Zbyszynski2007 + abstract: 'We summarize a decade of musical projects and research employing Wacom + digitizing tablets as musical controllers, discussing general implementation schemes + using Max/MSP and OpenSoundControl, and specific implementations in musical improvisation, + interactive sound installation, interactive multimedia performance, and as a compositional + assistant. We examine two-handed sensing strategies and schemes for gestural mapping. ' + address: 'New York City, NY, United States' + author: 'Zbyszynski, Michael and Wright, Matthew and Momeni, Ali and Cullen, Daniel' + bibtex: "@inproceedings{Zbyszynski2007,\n abstract = {We summarize a decade of musical\ + \ projects and research employing Wacom digitizing tablets as musical controllers,\ + \ discussing general implementation schemes using Max/MSP and OpenSoundControl,\ + \ and specific implementations in musical improvisation, interactive sound installation,\ + \ interactive multimedia performance, and as a compositional assistant. We examine\ + \ two-handed sensing strategies and schemes for gestural mapping. },\n address\ + \ = {New York City, NY, United States},\n author = {Zbyszynski, Michael and Wright,\ + \ Matthew and Momeni, Ali and Cullen, Daniel},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1179483},\n issn = {2220-4806},\n keywords = {1,algorithmic\ + \ composition,digitizing tablet,expressivity,gesture,mapping,nime07,position sensing,wacom\ + \ tablet,why the wacom tablet},\n pages = {100--105},\n title = {Ten Years of\ + \ Tablet Musical Interfaces at CNMAT},\n url = {http://www.nime.org/proceedings/2007/nime2007_100.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813453 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1179483 issn: 2220-4806 - month: July - pages: 416--421 - presentation-video: https://youtu.be/9npR0T6YGiA - publisher: Birmingham City University - title: 'Taking Back Control: Taming the Feral Cello' - url: https://www.nime.org/proceedings/2020/nime2020_paper81.pdf - year: 2020 + keywords: '1,algorithmic composition,digitizing tablet,expressivity,gesture,mapping,nime07,position + sensing,wacom tablet,why the wacom tablet' + pages: 100--105 + title: Ten Years of Tablet Musical Interfaces at CNMAT + url: http://www.nime.org/proceedings/2007/nime2007_100.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: NIME20_82 - abstract: 'Becoming a practical musician traditionally requires an extensive amount - of preparatory work to master the technical and theoretical challenges of the - particular instrument and musical style before being able to devote oneself to - musical expression. In particular, in jazz improvisation, one of the major barriers - is the mastery and appropriate selection of scales from a wide range, according - to harmonic context and style. In this paper, we present AutoScale, an interactive - software for making jazz improvisation more accessible by lifting the burden of - scale selection from the musician while still allowing full controllability if - desired. This is realized by implementing a MIDI effect that dynamically maps - the desired scales onto a standardized layout. Scale selection can be pre-programmed, - automated based on algorithmic lead sheet analysis, or interactively adapted. - We discuss the music-theoretical foundations underlying our approach, the design - choices taken for building an intuitive user interface, and provide implementations - as VST plugin and web applications for use with a Launchpad or traditional MIDI - keyboard.' - address: 'Birmingham, UK' - author: 'Jaccard, Thibault and Lieck, Robert and Rohrmeier, Martin' - bibtex: "@inproceedings{NIME20_82,\n abstract = {Becoming a practical musician traditionally\ - \ requires an extensive amount of preparatory work to master the technical and\ - \ theoretical challenges of the particular instrument and musical style before\ - \ being able to devote oneself to musical expression. In particular, in jazz improvisation,\ - \ one of the major barriers is the mastery and appropriate selection of scales\ - \ from a wide range, according to harmonic context and style. In this paper, we\ - \ present AutoScale, an interactive software for making jazz improvisation more\ - \ accessible by lifting the burden of scale selection from the musician while\ - \ still allowing full controllability if desired. This is realized by implementing\ - \ a MIDI effect that dynamically maps the desired scales onto a standardized layout.\ - \ Scale selection can be pre-programmed, automated based on algorithmic lead sheet\ - \ analysis, or interactively adapted. We discuss the music-theoretical foundations\ - \ underlying our approach, the design choices taken for building an intuitive\ - \ user interface, and provide implementations as VST plugin and web applications\ - \ for use with a Launchpad or traditional MIDI keyboard.},\n address = {Birmingham,\ - \ UK},\n author = {Jaccard, Thibault and Lieck, Robert and Rohrmeier, Martin},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.4813457},\n editor = {Romain Michon\ - \ and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages =\ - \ {422--427},\n presentation-video = {https://youtu.be/KqGpTTQ9ZrE},\n publisher\ - \ = {Birmingham City University},\n title = {AutoScale: Automatic and Dynamic\ - \ Scale Selection for Live Jazz Improvisation},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper82.pdf},\n\ - \ year = {2020}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.4813457 - editor: Romain Michon and Franziska Schroeder - issn: 2220-4806 - month: July - pages: 422--427 - presentation-video: https://youtu.be/KqGpTTQ9ZrE - publisher: Birmingham City University - title: 'AutoScale: Automatic and Dynamic Scale Selection for Live Jazz Improvisation' - url: https://www.nime.org/proceedings/2020/nime2020_paper82.pdf - year: 2020 - - -- ENTRYTYPE: inproceedings - ID: NIME20_83 - abstract: 'Nearly two decades after its inception as a workshop at the ACM Conference - on Human Factors in Computing Systems, NIME exists as an established international - conference significantly distinct from its precursor. While this origin story - is often noted, the implications of NIME''s history as emerging from a field predominantly - dealing with human-computer interaction have rarely been discussed. In this paper - we highlight many of the recent—and some not so recent—challenges that have been - brought upon the NIME community as it attempts to maintain and expand its identity - as a platform for multidisciplinary research into HCI, interface design, and electronic - and computer music. We discuss the relationship between the market demands of - the neoliberal university—which have underpinned academia''s drive for innovation—and - the quantification and economisation of research performance which have facilitated - certain disciplinary and social frictions to emerge within NIME-related research - and practice. Drawing on work that engages with feminist theory and cultural studies, - we suggest that critical reflection and moreover mediation is necessary in order - to address burgeoning concerns which have been raised within the NIME discourse - in relation to methodological approaches,''diversity and inclusion'', ''accessibility'', - and the fostering of rigorous interdisciplinary research.' - address: 'Birmingham, UK' - author: 'Hayes, Lauren and Marquez-Borbon, Adnan' - bibtex: "@inproceedings{NIME20_83,\n abstract = {Nearly two decades after its inception\ - \ as a workshop at the ACM Conference on Human Factors in Computing Systems, NIME\ - \ exists as an established international conference significantly distinct from\ - \ its precursor. While this origin story is often noted, the implications of NIME's\ - \ history as emerging from a field predominantly dealing with human-computer interaction\ - \ have rarely been discussed. In this paper we highlight many of the recent—and\ - \ some not so recent—challenges that have been brought upon the NIME community\ - \ as it attempts to maintain and expand its identity as a platform for multidisciplinary\ - \ research into HCI, interface design, and electronic and computer music. We discuss\ - \ the relationship between the market demands of the neoliberal university—which\ - \ have underpinned academia's drive for innovation—and the quantification and\ - \ economisation of research performance which have facilitated certain disciplinary\ - \ and social frictions to emerge within NIME-related research and practice. Drawing\ - \ on work that engages with feminist theory and cultural studies, we suggest that\ - \ critical reflection and moreover mediation is necessary in order to address\ - \ burgeoning concerns which have been raised within the NIME discourse in relation\ - \ to methodological approaches,'diversity and inclusion', 'accessibility', and\ - \ the fostering of rigorous interdisciplinary research.},\n address = {Birmingham,\ - \ UK},\n author = {Hayes, Lauren and Marquez-Borbon, Adnan},\n booktitle = {Proceedings\ + ID: Gurevich2007 + abstract: 'We describe the prevailing model of musical expression, which assumes + a binary formulation of "the text" and "the act", along with its implied roles + of composer and performer. We argue that this model not only excludes some contemporary + aesthetic values but also limits the communicative ability of new music interfaces. + As an alternative, an ecology of musical creation accounts for both a diversity + of aesthetic goals and the complex interrelation of human and non-human agents. + An ecological perspective on several approaches to musical creation with interactive + technologies reveals an expanded, more inclusive view of artistic interaction + that facilitates novel, compelling ways to use technology for music. This paper + is fundamentally a call to consider the role of aesthetic values in the analysis + of artistic processes and technologies. ' + address: 'New York City, NY, United States' + author: 'Gurevich, Michael and Treviño, Jeffrey' + bibtex: "@inproceedings{Gurevich2007,\n abstract = {We describe the prevailing model\ + \ of musical expression, which assumes a binary formulation of \"the text\" and\ + \ \"the act\", along with its implied roles of composer and performer. We argue\ + \ that this model not only excludes some contemporary aesthetic values but also\ + \ limits the communicative ability of new music interfaces. As an alternative,\ + \ an ecology of musical creation accounts for both a diversity of aesthetic goals\ + \ and the complex interrelation of human and non-human agents. An ecological perspective\ + \ on several approaches to musical creation with interactive technologies reveals\ + \ an expanded, more inclusive view of artistic interaction that facilitates novel,\ + \ compelling ways to use technology for music. This paper is fundamentally a call\ + \ to consider the role of aesthetic values in the analysis of artistic processes\ + \ and technologies. },\n address = {New York City, NY, United States},\n author\ + \ = {Gurevich, Michael and Trevi\\~{n}o, Jeffrey},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.4813459},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {428--433},\n presentation-video\ - \ = {https://youtu.be/4UERHlFUQzo},\n publisher = {Birmingham City University},\n\ - \ title = {Nuanced and Interrelated Mediations and Exigencies (NIME): Addressing\ - \ the Prevailing Political and Epistemological Crises},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper83.pdf},\n\ - \ year = {2020}\n}\n" + \ doi = {10.5281/zenodo.1177107},\n issn = {2220-4806},\n keywords = {Expression,\ + \ expressivity, non-expressive, emotion, discipline, model, construct, discourse,\ + \ aesthetic goal, experience, transparency, evaluation, communication },\n pages\ + \ = {106--111},\n title = {Expression and Its Discontents : Toward an Ecology\ + \ of Musical Creation},\n url = {http://www.nime.org/proceedings/2007/nime2007_106.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813459 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1177107 issn: 2220-4806 - month: July - pages: 428--433 - presentation-video: https://youtu.be/4UERHlFUQzo - publisher: Birmingham City University - title: 'Nuanced and Interrelated Mediations and Exigencies (NIME): Addressing the - Prevailing Political and Epistemological Crises' - url: https://www.nime.org/proceedings/2020/nime2020_paper83.pdf - year: 2020 + keywords: 'Expression, expressivity, non-expressive, emotion, discipline, model, + construct, discourse, aesthetic goal, experience, transparency, evaluation, communication ' + pages: 106--111 + title: 'Expression and Its Discontents : Toward an Ecology of Musical Creation' + url: http://www.nime.org/proceedings/2007/nime2007_106.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: NIME20_84 - abstract: 'Digital musical instrument design is often presented as an open-ended - creative process in which technology is adopted and adapted to serve the musical - will of the designer. The real-time music programming languages powering many - new instruments often provide access to audio manipulation at a low level, theoretically - allowing the creation of any sonic structure from primitive operations. As a result, - designers may assume that these seemingly omnipotent tools are pliable vehicles - for the expression of musical ideas. We present the outcomes of a compositional - game in which sound designers were invited to create simple instruments using - common sensors and the Pure Data programming language. We report on the patterns - and structures that often emerged during the exercise, arguing that designers - respond strongly to suggestions offered by the tools they use. We discuss the - idea that current music programming languages may be as culturally loaded as the - communities of practice that produce and use them. Instrument making is then best - viewed as a protracted negotiation between designer and tools.' - address: 'Birmingham, UK' - author: 'McPherson, Andrew and Lepri, Giacomo' - bibtex: "@inproceedings{NIME20_84,\n abstract = {Digital musical instrument design\ - \ is often presented as an open-ended creative process in which technology is\ - \ adopted and adapted to serve the musical will of the designer. The real-time\ - \ music programming languages powering many new instruments often provide access\ - \ to audio manipulation at a low level, theoretically allowing the creation of\ - \ any sonic structure from primitive operations. As a result, designers may assume\ - \ that these seemingly omnipotent tools are pliable vehicles for the expression\ - \ of musical ideas. We present the outcomes of a compositional game in which sound\ - \ designers were invited to create simple instruments using common sensors and\ - \ the Pure Data programming language. We report on the patterns and structures\ - \ that often emerged during the exercise, arguing that designers respond strongly\ - \ to suggestions offered by the tools they use. We discuss the idea that current\ - \ music programming languages may be as culturally loaded as the communities of\ - \ practice that produce and use them. Instrument making is then best viewed as\ - \ a protracted negotiation between designer and tools.},\n address = {Birmingham,\ - \ UK},\n author = {McPherson, Andrew and Lepri, Giacomo},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.4813461},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {434--439},\n presentation-video\ - \ = {https://youtu.be/-nRtaucPKx4},\n publisher = {Birmingham City University},\n\ - \ title = {Beholden to our tools: negotiating with technology while sketching\ - \ digital instruments},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper84.pdf},\n\ - \ year = {2020}\n}\n" + ID: Nilson2007 + abstract: 'Live coding is almost the antithesis of immediate physical musicianship, + and yet, has attracted the attentions of a number of computer-literate musicians, + as well as the music-savvy programmers that might be more expected. It is within + the context of live coding that I seek to explore the question of practising a + contemporary digital musical instrument, which is often raised as an aside but + more rarely carried out in research (though see [12]). At what stage of expertise + are the members of the live coding movement, and what practice regimes might help + them to find their true potential?' + address: 'New York City, NY, United States' + author: 'Nilson, Click' + bibtex: "@inproceedings{Nilson2007,\n abstract = {Live coding is almost the antithesis\ + \ of immediate physical musicianship, and yet, has attracted the attentions of\ + \ a number of computer-literate musicians, as well as the music-savvy programmers\ + \ that might be more expected. It is within the context of live coding that I\ + \ seek to explore the question of practising a contemporary digital musical instrument,\ + \ which is often raised as an aside but more rarely carried out in research (though\ + \ see [12]). At what stage of expertise are the members of the live coding movement,\ + \ and what practice regimes might help them to find their true potential?},\n\ + \ address = {New York City, NY, United States},\n author = {Nilson, Click},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1177209},\n issn = {2220-4806},\n\ + \ keywords = {Practice, practising, live coding },\n pages = {112--117},\n title\ + \ = {Live Coding Practice},\n url = {http://www.nime.org/proceedings/2007/nime2007_112.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813461 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1177209 issn: 2220-4806 - month: July - pages: 434--439 - presentation-video: https://youtu.be/-nRtaucPKx4 - publisher: Birmingham City University - title: 'Beholden to our tools: negotiating with technology while sketching digital - instruments' - url: https://www.nime.org/proceedings/2020/nime2020_paper84.pdf - year: 2020 + keywords: 'Practice, practising, live coding ' + pages: 112--117 + title: Live Coding Practice + url: http://www.nime.org/proceedings/2007/nime2007_112.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: NIME20_85 - abstract: 'Percussive fingerstyle is a playing technique adopted by many contemporary - acoustic guitarists, and it has grown substantially in popularity over the last - decade. Its foundations lie in the use of the guitar''s body for percussive lines, - and in the extended range given by the novel use of altered tunings. There are - very few formal accounts of percussive fingerstyle, therefore, we devised an interview - study to investigate its approach to composition, performance and musical experimentation. - Our aim was to gain insight into the technique from a gesture-based point of view, - observe whether modern fingerstyle shares similarities to the approaches in NIME - practice and investigate possible avenues for guitar augmentations inspired by - the percussive technique. We conducted an inductive thematic analysis on the transcribed - interviews: our findings highlight the participants'' material-based approach - to musical interaction and we present a three-zone model of the most common percussive - gestures on the guitar''s body. Furthermore, we examine current trends in Digital - Musical Instruments, especially in guitar augmentation, and we discuss possible - future directions in augmented guitars in light of the interviewees'' perspectives.' - address: 'Birmingham, UK' - author: 'Martelloni, Andrea and McPherson, Andrew and Barthet, Mathieu' - bibtex: "@inproceedings{NIME20_85,\n abstract = {Percussive fingerstyle is a playing\ - \ technique adopted by many contemporary acoustic guitarists, and it has grown\ - \ substantially in popularity over the last decade. Its foundations lie in the\ - \ use of the guitar's body for percussive lines, and in the extended range given\ - \ by the novel use of altered tunings. There are very few formal accounts of percussive\ - \ fingerstyle, therefore, we devised an interview study to investigate its approach\ - \ to composition, performance and musical experimentation. Our aim was to gain\ - \ insight into the technique from a gesture-based point of view, observe whether\ - \ modern fingerstyle shares similarities to the approaches in NIME practice and\ - \ investigate possible avenues for guitar augmentations inspired by the percussive\ - \ technique. We conducted an inductive thematic analysis on the transcribed interviews:\ - \ our findings highlight the participants' material-based approach to musical\ - \ interaction and we present a three-zone model of the most common percussive\ - \ gestures on the guitar's body. Furthermore, we examine current trends in Digital\ - \ Musical Instruments, especially in guitar augmentation, and we discuss possible\ - \ future directions in augmented guitars in light of the interviewees' perspectives.},\n\ - \ address = {Birmingham, UK},\n author = {Martelloni, Andrea and McPherson, Andrew\ - \ and Barthet, Mathieu},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813463},\n\ - \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ - \ = {July},\n pages = {440--445},\n presentation-video = {https://youtu.be/ON8ckEBcQ98},\n\ - \ publisher = {Birmingham City University},\n title = {Percussive Fingerstyle\ - \ Guitar through the Lens of NIME: an Interview Study},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper85.pdf},\n\ - \ year = {2020}\n}\n" + ID: Mann2007 + abstract: 'This paper presents two main ideas: (1) Various newly invented liquid-based + or underwater musical instruments are proposed that function like woodwind instruments + but use water instead of air. These “woodwater” instruments expand the space of + known instruments to include all three states of matter: solid (strings, percussion); + liquid (the proposed instruments); and gas (brass and woodwinds). Instruments + that use the fourth state of matter (plasma) are also proposed. (2) Although the + current trend in musical interfaces has been to expand versatililty and generality + by separating the interface from the sound-producing medium, this paper identifies + an opposite trend in musical interface design inspired by instruments such as + the harp, the acoustic or electric guitar, the tin whistle, and the Neanderthal + flute, that have a directness of user-interface, where the fingers of the musician + are in direct physical contact with the sound-producing medium. The newly invented + instruments are thus designed to have this sensually tempting intimacy not be + lost behind layers of abstraction, while also allowing for the high degree of + virtuosity. Examples presented include the poseidophone, an instrument made from + an array of ripple tanks, each tuned for a particular note, and the hydraulophone, + an instrument in which sound is produced by pressurized hydraulic fluid that is + in direct physical contact with the fingers of the player. Instruments based on + these primordial media tend to fall outside existing classifications and taxonomies + of known musical instruments which only consider instruments that make sound with + solid or gaseous states of matter. To better understand and contextualize some + of the new primordial user interfaces, a broader concept of musical instrument + classification is proposed that considers the states of matter of both the user-interface + and the sound production medium.' + address: 'New York City, NY, United States' + author: 'Mann, Steve' + bibtex: "@inproceedings{Mann2007,\n abstract = {This paper presents two main ideas:\ + \ (1) Various newly invented liquid-based or underwater musical instruments are\ + \ proposed that function like woodwind instruments but use water instead of air.\ + \ These “woodwater” instruments expand the space of known instruments to include\ + \ all three states of matter: solid (strings, percussion); liquid (the proposed\ + \ instruments); and gas (brass and woodwinds). Instruments that use the fourth\ + \ state of matter (plasma) are also proposed. (2) Although the current trend in\ + \ musical interfaces has been to expand versatililty and generality by separating\ + \ the interface from the sound-producing medium, this paper identifies an opposite\ + \ trend in musical interface design inspired by instruments such as the harp,\ + \ the acoustic or electric guitar, the tin whistle, and the Neanderthal flute,\ + \ that have a directness of user-interface, where the fingers of the musician\ + \ are in direct physical contact with the sound-producing medium. The newly invented\ + \ instruments are thus designed to have this sensually tempting intimacy not be\ + \ lost behind layers of abstraction, while also allowing for the high degree of\ + \ virtuosity. Examples presented include the poseidophone, an instrument made\ + \ from an array of ripple tanks, each tuned for a particular note, and the hydraulophone,\ + \ an instrument in which sound is produced by pressurized hydraulic fluid that\ + \ is in direct physical contact with the fingers of the player. Instruments based\ + \ on these primordial media tend to fall outside existing classifications and\ + \ taxonomies of known musical instruments which only consider instruments that\ + \ make sound with solid or gaseous states of matter. To better understand and\ + \ contextualize some of the new primordial user interfaces, a broader concept\ + \ of musical instrument classification is proposed that considers the states of\ + \ matter of both the user-interface and the sound production medium.},\n address\ + \ = {New York City, NY, United States},\n author = {Mann, Steve},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1177181},\n issn = {2220-4806},\n keywords\ + \ = {all or part of,ethnomusicology,hydraulophone,is granted without fee,nime07,or\ + \ hard copies of,permission to make digital,personal or classroom use,provided\ + \ that copies are,tangible user interface,this work for},\n pages = {118--123},\n\ + \ title = {Natural Interfaces for Musical Expression : Physiphones and a Physics-Based\ + \ Organology},\n url = {http://www.nime.org/proceedings/2007/nime2007_118.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813463 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1177181 issn: 2220-4806 - month: July - pages: 440--445 - presentation-video: https://youtu.be/ON8ckEBcQ98 - publisher: Birmingham City University - title: 'Percussive Fingerstyle Guitar through the Lens of NIME: an Interview Study' - url: https://www.nime.org/proceedings/2020/nime2020_paper85.pdf - year: 2020 + keywords: 'all or part of,ethnomusicology,hydraulophone,is granted without fee,nime07,or + hard copies of,permission to make digital,personal or classroom use,provided that + copies are,tangible user interface,this work for' + pages: 118--123 + title: 'Natural Interfaces for Musical Expression : Physiphones and a Physics-Based + Organology' + url: http://www.nime.org/proceedings/2007/nime2007_118.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: NIME20_86 - abstract: 'In the field of human computer interaction (HCI) the limitations of prototypes - as the primary artefact used in research are being realised. Prototypes often - remain open in their design, are partially-finished, and have a focus on a specific - aspect of interaction. Previous authors have proposed `research products'' as - a specific category of artefact distinct from both research prototypes and commercial - products. The characteristics of research products are their holistic completeness - as a design artefact, their situatedness in a specific cultural context, and the - fact that they are evaluated for what they are, not what they will become. This - paper discusses the ways in which many instruments created within the context - of New Interfaces for Musical Expression (NIME), including those that are used - in performances, often fall into the category of prototype. We shall discuss why - research products might be a useful framing for NIME research. Research products - shall be weighed up against some of the main themes of NIME research: technological - innovation; musical expression; instrumentality. We conclude this paper with a - case study of Strummi, a digital musical instrument which we frame as research - product.' - address: 'Birmingham, UK' - author: 'Jack, Robert and Harrison, Jacob and McPherson, Andrew' - bibtex: "@inproceedings{NIME20_86,\n abstract = {In the field of human computer\ - \ interaction (HCI) the limitations of prototypes as the primary artefact used\ - \ in research are being realised. Prototypes often remain open in their design,\ - \ are partially-finished, and have a focus on a specific aspect of interaction.\ - \ Previous authors have proposed `research products' as a specific category of\ - \ artefact distinct from both research prototypes and commercial products. The\ - \ characteristics of research products are their holistic completeness as a design\ - \ artefact, their situatedness in a specific cultural context, and the fact that\ - \ they are evaluated for what they are, not what they will become. This paper\ - \ discusses the ways in which many instruments created within the context of New\ - \ Interfaces for Musical Expression (NIME), including those that are used in performances,\ - \ often fall into the category of prototype. We shall discuss why research products\ - \ might be a useful framing for NIME research. Research products shall be weighed\ - \ up against some of the main themes of NIME research: technological innovation;\ - \ musical expression; instrumentality. We conclude this paper with a case study\ - \ of Strummi, a digital musical instrument which we frame as research product.},\n\ - \ address = {Birmingham, UK},\n author = {Jack, Robert and Harrison, Jacob and\ - \ McPherson, Andrew},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813465},\n\ - \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ - \ = {July},\n pages = {446--451},\n presentation-video = {https://youtu.be/luJwlZBeBqY},\n\ - \ publisher = {Birmingham City University},\n title = {Digital Musical Instruments\ - \ as Research Products},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper86.pdf},\n\ - \ year = {2020}\n}\n" + ID: Bevilacqua2007 + abstract: 'We present in this paper a complete gestural interface built to support + music pedagogy. The development of this prototype concerned both hardware and + software components: a small wireless sensor interface including accelerometers + and gyroscopes, and an analysis system enabling gesture following and recognition. + A first set of experiments was conducted with teenagers in a music theory class. + The preliminary results were encouraging concerning the suitability of these developments + in music education. ' + address: 'New York City, NY, United States' + author: 'Bevilacqua, Frédéric and Guédy, Fabrice and Schnell, Norbert and Fléty, + Emmanuel and Leroy, Nicolas' + bibtex: "@inproceedings{Bevilacqua2007,\n abstract = {We present in this paper a\ + \ complete gestural interface built to support music pedagogy. The development\ + \ of this prototype concerned both hardware and software components: a small wireless\ + \ sensor interface including accelerometers and gyroscopes, and an analysis system\ + \ enabling gesture following and recognition. A first set of experiments was conducted\ + \ with teenagers in a music theory class. The preliminary results were encouraging\ + \ concerning the suitability of these developments in music education. },\n address\ + \ = {New York City, NY, United States},\n author = {Bevilacqua, Fr\\'{e}d\\'{e}ric\ + \ and Gu\\'{e}dy, Fabrice and Schnell, Norbert and Fl\\'{e}ty, Emmanuel and Leroy,\ + \ Nicolas},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177045},\n issn\ + \ = {2220-4806},\n keywords = {Technology-enhanced learning, music pedagogy, wireless\ + \ interface, gesture-follower, gesture recognition },\n pages = {124--129},\n\ + \ title = {Wireless Sensor Interface and Gesture-Follower for Music Pedagogy},\n\ + \ url = {http://www.nime.org/proceedings/2007/nime2007_124.pdf},\n year = {2007}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813465 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1177045 issn: 2220-4806 - month: July - pages: 446--451 - presentation-video: https://youtu.be/luJwlZBeBqY - publisher: Birmingham City University - title: Digital Musical Instruments as Research Products - url: https://www.nime.org/proceedings/2020/nime2020_paper86.pdf - year: 2020 + keywords: 'Technology-enhanced learning, music pedagogy, wireless interface, gesture-follower, + gesture recognition ' + pages: 124--129 + title: Wireless Sensor Interface and Gesture-Follower for Music Pedagogy + url: http://www.nime.org/proceedings/2007/nime2007_124.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: NIME20_87 - abstract: 'This paper presents a micro-residency in a pop-up shop and collaborative - making amongst a group of researchers and practitioners. The making extends to - sound(-making) objects, instruments, workshop, sound installation, performance - and discourse on DIY electronic music. Our research builds on creative workshopping - and speculative design and is informed by ideas of collective making. The ad hoc - and temporary pop-up space is seen as formative in shaping the outcomes of the - work. Through the lens of curated research, working together with a provocative - brief, we explored handmade objects, craft, non-craft, human error, and the spirit - of DIY, DIYness. We used the Studio Bench - a method that brings making, recording - and performance together in one space - and viewed workshopping and performance - as a holistic event. A range of methodologies were investigated in relation to - NIME. These included the Hardware Mash-up, Speculative Sound Circuits and Reverse - Design, from product to prototype, resulting in the instrument the Radical Nails. - Finally, our work drew on the notion of design as performance and making in public - and further developed our understanding of workshop-installation and performance-installation.' - address: 'Birmingham, UK' - author: 'Patel, Amit D and Richards, John ' - bibtex: "@inproceedings{NIME20_87,\n abstract = {This paper presents a micro-residency\ - \ in a pop-up shop and collaborative making amongst a group of researchers and\ - \ practitioners. The making extends to sound(-making) objects, instruments, workshop,\ - \ sound installation, performance and discourse on DIY electronic music. Our research\ - \ builds on creative workshopping and speculative design and is informed by ideas\ - \ of collective making. The ad hoc and temporary pop-up space is seen as formative\ - \ in shaping the outcomes of the work. Through the lens of curated research, working\ - \ together with a provocative brief, we explored handmade objects, craft, non-craft,\ - \ human error, and the spirit of DIY, DIYness. We used the Studio Bench - a method\ - \ that brings making, recording and performance together in one space - and viewed\ - \ workshopping and performance as a holistic event. A range of methodologies were\ - \ investigated in relation to NIME. These included the Hardware Mash-up, Speculative\ - \ Sound Circuits and Reverse Design, from product to prototype, resulting in the\ - \ instrument the Radical Nails. Finally, our work drew on the notion of design\ - \ as performance and making in public and further developed our understanding\ - \ of workshop-installation and performance-installation.},\n address = {Birmingham,\ - \ UK},\n author = {Patel, Amit D and Richards, John },\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.4813473},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {452--457},\n publisher = {Birmingham\ - \ City University},\n title = {Pop-up for Collaborative Music-making},\n url =\ - \ {https://www.nime.org/proceedings/2020/nime2020_paper87.pdf},\n year = {2020}\n\ - }\n" + ID: Dannenberg2007 + abstract: 'Augmenting performances of live popular music with computer systems poses + many new challenges. Here, "popular music" is taken to mean music with a mostly + steady tempo, some improvisational elements, and largely predetermined melodies, + harmonies, and other parts. The overall problem is studied by developing a framework + consisting of constraints and subproblems that any solution should address. These + problems include beat acquisition, beat phase, score location, sound synthesis, + data preparation, and adaptation. A prototype system is described that offers + a set of solutions to the problems posed by the framework, and future work is + suggested. ' + address: 'New York City, NY, United States' + author: 'Dannenberg, Roger B.' + bibtex: "@inproceedings{Dannenberg2007,\n abstract = {Augmenting performances of\ + \ live popular music with computer systems poses many new challenges. Here, \"\ + popular music\" is taken to mean music with a mostly steady tempo, some improvisational\ + \ elements, and largely predetermined melodies, harmonies, and other parts. The\ + \ overall problem is studied by developing a framework consisting of constraints\ + \ and subproblems that any solution should address. These problems include beat\ + \ acquisition, beat phase, score location, sound synthesis, data preparation,\ + \ and adaptation. A prototype system is described that offers a set of solutions\ + \ to the problems posed by the framework, and future work is suggested. },\n address\ + \ = {New York City, NY, United States},\n author = {Dannenberg, Roger B.},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1177081},\n issn = {2220-4806},\n keywords\ + \ = {accompaniment,beat,conducting,intelligent,music synchronization,nime07,synthetic\ + \ performer,tracking,virtual orchestra},\n pages = {130--135},\n title = {New\ + \ Interfaces for Popular Music Performance},\n url = {http://www.nime.org/proceedings/2007/nime2007_130.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813473 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1177081 issn: 2220-4806 - month: July - pages: 452--457 - publisher: Birmingham City University - title: Pop-up for Collaborative Music-making - url: https://www.nime.org/proceedings/2020/nime2020_paper87.pdf - year: 2020 + keywords: 'accompaniment,beat,conducting,intelligent,music synchronization,nime07,synthetic + performer,tracking,virtual orchestra' + pages: 130--135 + title: New Interfaces for Popular Music Performance + url: http://www.nime.org/proceedings/2007/nime2007_130.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: NIME20_88 - abstract: 'This paper introduces a new method for direct control using the voice - via measurement of vocal muscular activation with surface electromyography (sEMG). - Digital musical interfaces based on the voice have typically used indirect control, - in which features extracted from audio signals control the parameters of sound - generation, for example in audio to MIDI controllers. By contrast, focusing on - the musculature of the singing voice allows direct muscular control, or alternatively, - combined direct and indirect control in an augmented vocal instrument. In this - way we aim to both preserve the intimate relationship a vocalist has with their - instrument and key timbral and stylistic characteristics of the voice while expanding - its sonic capabilities. This paper discusses other digital instruments which effectively - utilise a combination of indirect and direct control as well as a history of controllers - involving the voice. Subsequently, a new method of direct control from physiological - aspects of singing through sEMG and its capabilities are discussed. Future developments - of the system are further outlined along with usage in performance studies, interactive - live vocal performance, and educational and practice tools.' - address: 'Birmingham, UK' - author: 'Reed, Courtney and McPherson, Andrew' - bibtex: "@inproceedings{NIME20_88,\n abstract = {This paper introduces a new method\ - \ for direct control using the voice via measurement of vocal muscular activation\ - \ with surface electromyography (sEMG). Digital musical interfaces based on the\ - \ voice have typically used indirect control, in which features extracted from\ - \ audio signals control the parameters of sound generation, for example in audio\ - \ to MIDI controllers. By contrast, focusing on the musculature of the singing\ - \ voice allows direct muscular control, or alternatively, combined direct and\ - \ indirect control in an augmented vocal instrument. In this way we aim to both\ - \ preserve the intimate relationship a vocalist has with their instrument and\ - \ key timbral and stylistic characteristics of the voice while expanding its sonic\ - \ capabilities. This paper discusses other digital instruments which effectively\ - \ utilise a combination of indirect and direct control as well as a history of\ - \ controllers involving the voice. Subsequently, a new method of direct control\ - \ from physiological aspects of singing through sEMG and its capabilities are\ - \ discussed. Future developments of the system are further outlined along with\ - \ usage in performance studies, interactive live vocal performance, and educational\ - \ and practice tools.},\n address = {Birmingham, UK},\n author = {Reed, Courtney\ - \ and McPherson, Andrew},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813475},\n\ - \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ - \ = {July},\n pages = {458--463},\n presentation-video = {https://youtu.be/1nWLgQGNh0g},\n\ - \ publisher = {Birmingham City University},\n title = {Surface Electromyography\ - \ for Direct Vocal Control},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper88.pdf},\n\ - \ year = {2020}\n}\n" + ID: Lee2007 + abstract: 'We present a system for rhythmic analysis of human motion inreal-time. + Using a combination of both spectral (Fourier) andspatial analysis of onsets, + we are able to extract repeating rhythmic patterns from data collected using accelerometers. + These extracted rhythmic patterns show the relative magnitudes of accentuated + movements and their spacing in time. Inspired by previouswork in automatic beat + detection of audio recordings, we designedour algorithms to be robust to changes + in timing using multipleanalysis techniques and methods for sensor fusion, filtering + andclustering. We tested our system using a limited set of movements,as well as + dance movements collected from a professional, bothwith promising results.' + address: 'New York City, NY, United States' + author: 'Lee, Eric and Enke, Urs and Borchers, Jan and de Jong, Leo' + bibtex: "@inproceedings{Lee2007,\n abstract = {We present a system for rhythmic\ + \ analysis of human motion inreal-time. Using a combination of both spectral (Fourier)\ + \ andspatial analysis of onsets, we are able to extract repeating rhythmic patterns\ + \ from data collected using accelerometers. These extracted rhythmic patterns\ + \ show the relative magnitudes of accentuated movements and their spacing in time.\ + \ Inspired by previouswork in automatic beat detection of audio recordings, we\ + \ designedour algorithms to be robust to changes in timing using multipleanalysis\ + \ techniques and methods for sensor fusion, filtering andclustering. We tested\ + \ our system using a limited set of movements,as well as dance movements collected\ + \ from a professional, bothwith promising results.},\n address = {New York City,\ + \ NY, United States},\n author = {Lee, Eric and Enke, Urs and Borchers, Jan and\ + \ de Jong, Leo},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177159},\n\ + \ issn = {2220-4806},\n keywords = {rhythm analysis, dance movement analysis,\ + \ onset analysis },\n pages = {136--141},\n title = {Towards Rhythmic Analysis\ + \ of Human Motion Using Acceleration-Onset Times},\n url = {http://www.nime.org/proceedings/2007/nime2007_136.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813475 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1177159 issn: 2220-4806 - month: July - pages: 458--463 - presentation-video: https://youtu.be/1nWLgQGNh0g - publisher: Birmingham City University - title: Surface Electromyography for Direct Vocal Control - url: https://www.nime.org/proceedings/2020/nime2020_paper88.pdf - year: 2020 + keywords: 'rhythm analysis, dance movement analysis, onset analysis ' + pages: 136--141 + title: Towards Rhythmic Analysis of Human Motion Using Acceleration-Onset Times + url: http://www.nime.org/proceedings/2007/nime2007_136.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: NIME20_89 - abstract: 'The presented sound synthesis system allows the individual spatialization - of spectral components in real-time, using a sinusoidal modeling approach within - 3-dimensional sound reproduction systems. A co-developed, dedicated haptic interface - is used to jointly control spectral and spatial attributes of the sound. Within - a user study, participants were asked to create an individual mapping between - control parameters of the interface and rendering parameters of sound synthesis - and spatialization, using a visual programming environment. Resulting mappings - of all participants are evaluated, indicating the preference of single control - parameters for specific tasks. In comparison with mappings intended by the development - team, the results validate certain design decisions and indicate new directions.' - address: 'Birmingham, UK' - author: 'von Coler, Henrik and Lepa, Steffen and Weinzierl, Stefan' - bibtex: "@inproceedings{NIME20_89,\n abstract = {The presented sound synthesis system\ - \ allows the individual spatialization of spectral components in real-time, using\ - \ a sinusoidal modeling approach within 3-dimensional sound reproduction systems.\ - \ A co-developed, dedicated haptic interface is used to jointly control spectral\ - \ and spatial attributes of the sound. Within a user study, participants were\ - \ asked to create an individual mapping between control parameters of the interface\ - \ and rendering parameters of sound synthesis and spatialization, using a visual\ - \ programming environment. Resulting mappings of all participants are evaluated,\ - \ indicating the preference of single control parameters for specific tasks. In\ - \ comparison with mappings intended by the development team, the results validate\ - \ certain design decisions and indicate new directions.},\n address = {Birmingham,\ - \ UK},\n author = {von Coler, Henrik and Lepa, Steffen and Weinzierl, Stefan},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.4813477},\n editor = {Romain Michon\ - \ and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages =\ - \ {464--469},\n publisher = {Birmingham City University},\n title = {User-Defined\ - \ Mappings for Spatial Sound Synthesis},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper89.pdf},\n\ - \ year = {2020}\n}\n" + ID: Bouillot2007 + abstract: 'Remote real-time musical interaction is a domain where endto-end latency + is a well known problem. Today, the mainexplored approach aims to keep it below + the musicians perception threshold. In this paper, we explore another approach, + where end-to-end delays rise to several seconds, butcomputed in a controlled (and + synchronized) way dependingon the structure of the musical pieces. Thanks to our + fullydistributed prototype called nJam, we perform user experiments to show how + this new kind of interactivity breaks theactual end-to-end latency bounds.' + address: 'New York City, NY, United States' + author: 'Bouillot, Nicolas' + bibtex: "@inproceedings{Bouillot2007,\n abstract = {Remote real-time musical interaction\ + \ is a domain where endto-end latency is a well known problem. Today, the mainexplored\ + \ approach aims to keep it below the musicians perception threshold. In this paper,\ + \ we explore another approach, where end-to-end delays rise to several seconds,\ + \ butcomputed in a controlled (and synchronized) way dependingon the structure\ + \ of the musical pieces. Thanks to our fullydistributed prototype called nJam,\ + \ we perform user experiments to show how this new kind of interactivity breaks\ + \ theactual end-to-end latency bounds.},\n address = {New York City, NY, United\ + \ States},\n author = {Bouillot, Nicolas},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177055},\n\ + \ issn = {2220-4806},\n keywords = {Remote real-time musical interaction, end-to-end\ + \ delays, syn- chronization, user experiments, distributed metronome, NMP. },\n\ + \ pages = {142--147},\n title = {nJam User Experiments : Enabling Remote Musical\ + \ Interaction from Milliseconds to Seconds},\n url = {http://www.nime.org/proceedings/2007/nime2007_142.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813477 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1177055 issn: 2220-4806 - month: July - pages: 464--469 - publisher: Birmingham City University - title: User-Defined Mappings for Spatial Sound Synthesis - url: https://www.nime.org/proceedings/2020/nime2020_paper89.pdf - year: 2020 + keywords: 'Remote real-time musical interaction, end-to-end delays, syn- chronization, + user experiments, distributed metronome, NMP. ' + pages: 142--147 + title: 'nJam User Experiments : Enabling Remote Musical Interaction from Milliseconds + to Seconds' + url: http://www.nime.org/proceedings/2007/nime2007_142.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: NIME20_9 - abstract: 'The popularity of applying machine learning techniques in musical domains - has created an inherent availability of freely accessible pre-trained neural network - (NN) models ready for use in creative applications. This work outlines the implementation - of one such application in the form of an assistance tool designed for live improvisational - performances by laptop ensembles. The primary intention was to leverage off-the-shelf - pre-trained NN models as a basis for assisting individual performers either as - musical novices looking to engage with more experienced performers or as a tool - to expand musical possibilities through new forms of creative expression. The - system expands upon a variety of ideas found in different research areas including - new interfaces for musical expression, generative music and group performance - to produce a networked performance solution served via a web-browser interface. - The final implementation of the system offers performers a mixture of high and - low-level controls to influence the shape of sequences of notes output by locally - run NN models in real time, also allowing performers to define their level of - engagement with the assisting generative models. Two test performances were played, - with the system shown to feasibly support four performers over a four minute piece - while producing musically cohesive and engaging music. Iterations on the design - of the system exposed technical constraints on the use of a JavaScript environment - for generative models in a live music context, largely derived from inescapable - processing overheads.' - address: 'Birmingham, UK' - author: 'Proctor, Rohan and Martin, Charles Patrick' - bibtex: "@inproceedings{NIME20_9,\n abstract = {The popularity of applying machine\ - \ learning techniques in musical domains has created an inherent availability\ - \ of freely accessible pre-trained neural network (NN) models ready for use in\ - \ creative applications. This work outlines the implementation of one such application\ - \ in the form of an assistance tool designed for live improvisational performances\ - \ by laptop ensembles. The primary intention was to leverage off-the-shelf pre-trained\ - \ NN models as a basis for assisting individual performers either as musical novices\ - \ looking to engage with more experienced performers or as a tool to expand musical\ - \ possibilities through new forms of creative expression. The system expands upon\ - \ a variety of ideas found in different research areas including new interfaces\ - \ for musical expression, generative music and group performance to produce a\ - \ networked performance solution served via a web-browser interface. The final\ - \ implementation of the system offers performers a mixture of high and low-level\ - \ controls to influence the shape of sequences of notes output by locally run\ - \ NN models in real time, also allowing performers to define their level of engagement\ - \ with the assisting generative models. Two test performances were played, with\ - \ the system shown to feasibly support four performers over a four minute piece\ - \ while producing musically cohesive and engaging music. Iterations on the design\ - \ of the system exposed technical constraints on the use of a JavaScript environment\ - \ for generative models in a live music context, largely derived from inescapable\ - \ processing overheads.},\n address = {Birmingham, UK},\n author = {Proctor, Rohan\ - \ and Martin, Charles Patrick},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813481},\n\ - \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ - \ = {July},\n pages = {43--48},\n publisher = {Birmingham City University},\n\ - \ title = {A Laptop Ensemble Performance System using Recurrent Neural Networks},\n\ - \ url = {https://www.nime.org/proceedings/2020/nime2020_paper9.pdf},\n year =\ - \ {2020}\n}\n" + ID: Moody2007 + abstract: 'This paper describes the Ashitaka audiovisual instrumentand the process + used to develop it. The main idea guidingthe design of the instrument is that + motion can be used toconnect audio and visuals, and the first part of the paperconsists + of an exploration of this idea. The issue of mappings is raised, discussing both + audio-visual mappings andthe mappings between the interface and synthesis methods.The + paper concludes with a detailed look at the instrumentitself, including the interface, + synthesis methods, and mappings used.' + address: 'New York City, NY, United States' + author: 'Moody, Niall and Fells, Nick and Bailey, Nicholas' + bibtex: "@inproceedings{Moody2007,\n abstract = {This paper describes the Ashitaka\ + \ audiovisual instrumentand the process used to develop it. The main idea guidingthe\ + \ design of the instrument is that motion can be used toconnect audio and visuals,\ + \ and the first part of the paperconsists of an exploration of this idea. The\ + \ issue of mappings is raised, discussing both audio-visual mappings andthe mappings\ + \ between the interface and synthesis methods.The paper concludes with a detailed\ + \ look at the instrumentitself, including the interface, synthesis methods, and\ + \ mappings used.},\n address = {New York City, NY, United States},\n author =\ + \ {Moody, Niall and Fells, Nick and Bailey, Nicholas},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177199},\n issn = {2220-4806},\n keywords = {audiovisual,instrument,mappings,nime07,synchresis,x3d},\n\ + \ pages = {148--153},\n title = {Ashitaka : An Audiovisual Instrument},\n url\ + \ = {http://www.nime.org/proceedings/2007/nime2007_148.pdf},\n year = {2007}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813481 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1177199 issn: 2220-4806 - month: July - pages: 43--48 - publisher: Birmingham City University - title: A Laptop Ensemble Performance System using Recurrent Neural Networks - url: https://www.nime.org/proceedings/2020/nime2020_paper9.pdf - year: 2020 + keywords: audiovisual,instrument,mappings,nime07,synchresis,x3d + pages: 148--153 + title: 'Ashitaka : An Audiovisual Instrument' + url: http://www.nime.org/proceedings/2007/nime2007_148.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: NIME20_90 - abstract: 'In this paper, we present and evaluate Elemental, a NIME (New Interface - for Musical Expression) based on audio synthesis of sounds of meteorological phenomena, - namely rain, wind and thunder, intended for application in contemporary music/sound - art, performing arts and entertainment. We first describe the system, controlled - by the performer’s arms through Inertial Measuring Units and Electromyography - sensors. The produced data is analyzed and used through mapping strategies as - input of the sound synthesis engine. We conducted user studies to refine the sound - synthesis engine, the choice of gestures and the mappings between them, and to - finally evaluate this proof of concept. Indeed, the users approached the system - with their own awareness ranging from the manipulation of abstract sound to the - direct simulation of atmospheric phenomena - in the latter case, it could even - be to revive memories or to create novel situations. This suggests that the approach - of instrumentalization of sounds of known source may be a fruitful strategy for - constructing expressive interactive sonic systems.' - address: 'Birmingham, UK' - author: 'Brizolara, Tiago and Gibet, Sylvie and Larboulette, Caroline ' - bibtex: "@inproceedings{NIME20_90,\n abstract = {In this paper, we present and evaluate\ - \ Elemental, a NIME (New Interface for Musical Expression) based on audio synthesis\ - \ of sounds of meteorological phenomena, namely rain, wind and thunder, intended\ - \ for application in contemporary music/sound art, performing arts and entertainment.\ - \ We first describe the system, controlled by the performer’s arms through Inertial\ - \ Measuring Units and Electromyography sensors. The produced data is analyzed\ - \ and used through mapping strategies as input of the sound synthesis engine.\ - \ We conducted user studies to refine the sound synthesis engine, the choice of\ - \ gestures and the mappings between them, and to finally evaluate this proof of\ - \ concept. Indeed, the users approached the system with their own awareness ranging\ - \ from the manipulation of abstract sound to the direct simulation of atmospheric\ - \ phenomena - in the latter case, it could even be to revive memories or to create\ - \ novel situations. This suggests that the approach of instrumentalization of\ - \ sounds of known source may be a fruitful strategy for constructing expressive\ - \ interactive sonic systems.},\n address = {Birmingham, UK},\n author = {Brizolara,\ - \ Tiago and Gibet, Sylvie and Larboulette, Caroline },\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.4813483},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {470--476},\n publisher = {Birmingham\ - \ City University},\n title = {Elemental: a Gesturally Controlled System to Perform\ - \ Meteorological Sounds},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper90.pdf},\n\ - \ year = {2020}\n}\n" + ID: Aimi2007 + abstract: 'This paper describes several example hybrid acoustic / electronic percussion + instruments using realtime convolution toaugment and modify the apparent acoustics + of damped physical objects. Examples of cymbal, frame drum, practice pad,brush, + and bass drum controllers are described.' + address: 'New York City, NY, United States' + author: 'Aimi, Roberto' + bibtex: "@inproceedings{Aimi2007,\n abstract = {This paper describes several example\ + \ hybrid acoustic / electronic percussion instruments using realtime convolution\ + \ toaugment and modify the apparent acoustics of damped physical objects. Examples\ + \ of cymbal, frame drum, practice pad,brush, and bass drum controllers are described.},\n\ + \ address = {New York City, NY, United States},\n author = {Aimi, Roberto},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1177033},\n issn = {2220-4806},\n\ + \ keywords = {Musical controllers, extended acoustic instruments },\n pages =\ + \ {154--159},\n title = {Percussion Instruments Using Realtime Convolution : Physical\ + \ Controllers},\n url = {http://www.nime.org/proceedings/2007/nime2007_154.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813483 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1177033 issn: 2220-4806 - month: July - pages: 470--476 - publisher: Birmingham City University - title: 'Elemental: a Gesturally Controlled System to Perform Meteorological Sounds' - url: https://www.nime.org/proceedings/2020/nime2020_paper90.pdf - year: 2020 + keywords: 'Musical controllers, extended acoustic instruments ' + pages: 154--159 + title: 'Percussion Instruments Using Realtime Convolution : Physical Controllers' + url: http://www.nime.org/proceedings/2007/nime2007_154.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: NIME20_91 - abstract: 'This paper describes the ongoing process of developing RAW, a collaborative - body–machine instrument that relies on ''sculpting'' the sonification of raw EMG - signals. The instrument is built around two Myo armbands located on the forearms - of the performer. These are used to investigate muscle contraction, which is again - used as the basis for the sonic interaction design. Using a practice-based approach, - the aim is to explore the musical aesthetics of naturally occurring bioelectric - signals. We are particularly interested in exploring the differences between processing - at audio rate versus control rate, and how the level of detail in the signal–and - the complexity of the mappings–influence the experience of control in the instrument. - This is exemplified through reflections on four concerts in which RAW has been - used in different types of collective improvisation.' - address: 'Birmingham, UK' - author: 'Erdem, Çağrı and Jensenius, Alexander Refsum' - bibtex: "@inproceedings{NIME20_91,\n abstract = {This paper describes the ongoing\ - \ process of developing RAW, a collaborative body–machine instrument that relies\ - \ on 'sculpting' the sonification of raw EMG signals. The instrument is built\ - \ around two Myo armbands located on the forearms of the performer. These are\ - \ used to investigate muscle contraction, which is again used as the basis for\ - \ the sonic interaction design. Using a practice-based approach, the aim is to\ - \ explore the musical aesthetics of naturally occurring bioelectric signals. We\ - \ are particularly interested in exploring the differences between processing\ - \ at audio rate versus control rate, and how the level of detail in the signal–and\ - \ the complexity of the mappings–influence the experience of control in the instrument.\ - \ This is exemplified through reflections on four concerts in which RAW has been\ - \ used in different types of collective improvisation.},\n address = {Birmingham,\ - \ UK},\n author = {Erdem, Çağrı and Jensenius, Alexander Refsum},\n booktitle\ + ID: Rohs2007 + abstract: 'CaMus2 allows collaborative performance with mobile camera phones. The + original CaMus project was extended tosupport multiple phones performing in the + same space andgenerating MIDI signals to control sound generation andmanipulation + software or hardware. Through an opticalflow technology the system can be used + without a referencemarker grid. When using a marker grid, the use of dynamicdigital + zoom extends the range of performance. Semanticinformation display helps guide + the performer visually.' + address: 'New York City, NY, United States' + author: 'Rohs, Michael and Essl, Georg' + bibtex: "@inproceedings{Rohs2007,\n abstract = {CaMus2 allows collaborative performance\ + \ with mobile camera phones. The original CaMus project was extended tosupport\ + \ multiple phones performing in the same space andgenerating MIDI signals to control\ + \ sound generation andmanipulation software or hardware. Through an opticalflow\ + \ technology the system can be used without a referencemarker grid. When using\ + \ a marker grid, the use of dynamicdigital zoom extends the range of performance.\ + \ Semanticinformation display helps guide the performer visually.},\n address\ + \ = {New York City, NY, United States},\n author = {Rohs, Michael and Essl, Georg},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1177233},\n issn = {2220-4806},\n\ + \ keywords = {Camera phone, mobile phone, music performance, mobile sound generation,\ + \ sensing-based interaction, collaboration },\n pages = {160--163},\n title =\ + \ {CaMus 2 -- Optical Flow and Collaboration in Camera Phone Music Performance},\n\ + \ url = {http://www.nime.org/proceedings/2007/nime2007_160.pdf},\n year = {2007}\n\ + }\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1177233 + issn: 2220-4806 + keywords: 'Camera phone, mobile phone, music performance, mobile sound generation, + sensing-based interaction, collaboration ' + pages: 160--163 + title: CaMus 2 -- Optical Flow and Collaboration in Camera Phone Music Performance + url: http://www.nime.org/proceedings/2007/nime2007_160.pdf + year: 2007 + + +- ENTRYTYPE: inproceedings + ID: Fiebrink2007 + abstract: 'We draw on our experiences with the Princeton Laptop Orchestra to discuss + novel uses of the laptop’s native physical inputs for flexible and expressive + control. We argue that instruments designed using these built-in inputs offer + benefits over custom standalone controllers, particularly in certain group performance + settings; creatively thinking about native capabilities can lead to interesting + and unique new interfaces. We discuss a variety of example instruments that use + the laptop’s native capabilities and suggest avenues for future work. We also + describe a new toolkit for rapidly experimenting with these capabilities.' + address: 'New York City, NY, United States' + author: 'Fiebrink, Rebecca and Wang, Ge and Cook, Perry R.' + bibtex: "@inproceedings{Fiebrink2007,\n abstract = {We draw on our experiences with\ + \ the Princeton Laptop Orchestra to discuss novel uses of the laptop’s native\ + \ physical inputs for flexible and expressive control. We argue that instruments\ + \ designed using these built-in inputs offer benefits over custom standalone controllers,\ + \ particularly in certain group performance settings; creatively thinking about\ + \ native capabilities can lead to interesting and unique new interfaces. We discuss\ + \ a variety of example instruments that use the laptop’s native capabilities and\ + \ suggest avenues for future work. We also describe a new toolkit for rapidly\ + \ experimenting with these capabilities.},\n address = {New York City, NY, United\ + \ States},\n author = {Fiebrink, Rebecca and Wang, Ge and Cook, Perry R.},\n booktitle\ \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.4813485},\n editor = {Romain Michon and\ - \ Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages = {477--482},\n\ - \ presentation-video = {https://youtu.be/gX-X1iw7uWE},\n publisher = {Birmingham\ - \ City University},\n title = {RAW: Exploring Control Structures for Muscle-based\ - \ Interaction in Collective Improvisation},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper91.pdf},\n\ - \ year = {2020}\n}\n" + \ Expression},\n doi = {10.5281/zenodo.1177087},\n issn = {2220-4806},\n keywords\ + \ = {Mapping strategies. Laptop-based physical interfaces. Collaborative laptop\ + \ performance.},\n pages = {164--167},\n title = {Don't Forget the Laptop : Using\ + \ Native Input Capabilities for Expressive Musical Control},\n url = {http://www.nime.org/proceedings/2007/nime2007_164.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813485 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1177087 issn: 2220-4806 - month: July - pages: 477--482 - presentation-video: https://youtu.be/gX-X1iw7uWE - publisher: Birmingham City University - title: 'RAW: Exploring Control Structures for Muscle-based Interaction in Collective - Improvisation' - url: https://www.nime.org/proceedings/2020/nime2020_paper91.pdf - year: 2020 + keywords: Mapping strategies. Laptop-based physical interfaces. Collaborative laptop + performance. + pages: 164--167 + title: 'Don''t Forget the Laptop : Using Native Input Capabilities for Expressive + Musical Control' + url: http://www.nime.org/proceedings/2007/nime2007_164.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: NIME20_92 - abstract: 'Mobile devices provide musicians with the convenience of musical accompaniment - wherever they are, granting them new methods for developing their craft. We developed - the application SmartDrone to give users the freedom to practice in different - harmonic settings with the assistance of their smartphone. This application further - explores the area of dynamic accompaniment by implementing functionality so that - chords are generated based on the key in which the user is playing. Since this - app was designed to be a tool for scale practice, drone-like accompaniment was - chosen so that musicians could experiment with combinations of melody and harmony. - The details of the application development process are discussed in this paper, - with the main focus on scale analysis and harmonic transposition. By using these - two components, the application is able to dynamically alter key to reflect the - user''s playing. As well as the design and implementation details, this paper - reports and examines feedback from a small user study of undergraduate music students - who used the app. ' - address: 'Birmingham, UK' - author: 'MacDonald, Travis C and Hughes, James and MacKenzie, Barry' - bibtex: "@inproceedings{NIME20_92,\n abstract = {Mobile devices provide musicians\ - \ with the convenience of musical accompaniment wherever they are, granting them\ - \ new methods for developing their craft. We developed the application SmartDrone\ - \ to give users the freedom to practice in different harmonic settings with the\ - \ assistance of their smartphone. This application further explores the area of\ - \ dynamic accompaniment by implementing functionality so that chords are generated\ - \ based on the key in which the user is playing. Since this app was designed to\ - \ be a tool for scale practice, drone-like accompaniment was chosen so that musicians\ - \ could experiment with combinations of melody and harmony. The details of the\ - \ application development process are discussed in this paper, with the main focus\ - \ on scale analysis and harmonic transposition. By using these two components,\ - \ the application is able to dynamically alter key to reflect the user's playing.\ - \ As well as the design and implementation details, this paper reports and examines\ - \ feedback from a small user study of undergraduate music students who used the\ - \ app. },\n address = {Birmingham, UK},\n author = {MacDonald, Travis C and Hughes,\ - \ James and MacKenzie, Barry},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813488},\n\ - \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ - \ = {July},\n pages = {483--488},\n publisher = {Birmingham City University},\n\ - \ title = {SmartDrone: An Aurally Interactive Harmonic Drone},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper92.pdf},\n\ - \ year = {2020}\n}\n" + ID: Moriwaki2007 + abstract: 'In this paper the authors present the MIDI Scrapyard Challenge (MSC) + workshop, a one-day hands-on experience which asks participants to create musical + controllers out of cast-off electronics, found materials and junk. The workshop + experience, principles, and considerations are detailed, along with sample projects + which have been created in various MSC workshops. Observations and implications + as well as future developments for the workshop are discussed.' + address: 'New York City, NY, United States' + author: 'Moriwaki, Katherine and Brucken-Cohen, Jonah' + bibtex: "@inproceedings{Moriwaki2007,\n abstract = {In this paper the authors present\ + \ the MIDI Scrapyard Challenge (MSC) workshop, a one-day hands-on experience which\ + \ asks participants to create musical controllers out of cast-off electronics,\ + \ found materials and junk. The workshop experience, principles, and considerations\ + \ are detailed, along with sample projects which have been created in various\ + \ MSC workshops. Observations and implications as well as future developments\ + \ for the workshop are discussed.},\n address = {New York City, NY, United States},\n\ + \ author = {Moriwaki, Katherine and Brucken-Cohen, Jonah},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177201},\n issn = {2220-4806},\n keywords = {Workshop,\ + \ MIDI, Interaction Design, Creativity, Performance},\n pages = {168--172},\n\ + \ title = {MIDI Scrapyard Challenge Workshops},\n url = {http://www.nime.org/proceedings/2007/nime2007_168.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813488 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1177201 issn: 2220-4806 - month: July - pages: 483--488 - publisher: Birmingham City University - title: 'SmartDrone: An Aurally Interactive Harmonic Drone' - url: https://www.nime.org/proceedings/2020/nime2020_paper92.pdf - year: 2020 + keywords: 'Workshop, MIDI, Interaction Design, Creativity, Performance' + pages: 168--172 + title: MIDI Scrapyard Challenge Workshops + url: http://www.nime.org/proceedings/2007/nime2007_168.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: NIME20_93 - abstract: 'Previous research on musical embodiment has reported that expert performers - often regard their instruments as an extension of their body. Not every digital - musical instrument seeks to create a close relationship between body and instrument, - but even for the many that do, the design process often focuses heavily on technical - and sonic factors, with relatively less attention to the bodily experience of - the performer. In this paper we propose Somaesthetic design as an alternative - to explore this space. The Soma method aims to attune the sensibilities of designers, - as well as their experience of their body, and make use of these notions as a - resource for creative design. We then report on a series of workshops exploring - the relationship between the body and the guitar with a Soma design approach. - The workshops resulted in a series of guitar-related artefacts and NIMEs that - emerged from the somatic exploration of balance and tension during guitar performance. - Lastly we present lessons learned from our research that could inform future Soma-based - musical instrument design, and how NIME research may also inform Soma design.' - address: 'Birmingham, UK' - author: 'Martinez Avila, Juan P and Tsaknaki, Vasiliki and Karpashevich, Pavel - and Windlin, Charles and Valenti, Niklas and Höök, Kristina and McPherson, Andrew - and Benford, Steve' - bibtex: "@inproceedings{NIME20_93,\n abstract = {Previous research on musical embodiment\ - \ has reported that expert performers often regard their instruments as an extension\ - \ of their body. Not every digital musical instrument seeks to create a close\ - \ relationship between body and instrument, but even for the many that do, the\ - \ design process often focuses heavily on technical and sonic factors, with relatively\ - \ less attention to the bodily experience of the performer. In this paper we propose\ - \ Somaesthetic design as an alternative to explore this space. The Soma method\ - \ aims to attune the sensibilities of designers, as well as their experience of\ - \ their body, and make use of these notions as a resource for creative design.\ - \ We then report on a series of workshops exploring the relationship between the\ - \ body and the guitar with a Soma design approach. The workshops resulted in a\ - \ series of guitar-related artefacts and NIMEs that emerged from the somatic exploration\ - \ of balance and tension during guitar performance. Lastly we present lessons\ - \ learned from our research that could inform future Soma-based musical instrument\ - \ design, and how NIME research may also inform Soma design.},\n address = {Birmingham,\ - \ UK},\n author = {Martinez Avila, Juan P and Tsaknaki, Vasiliki and Karpashevich,\ - \ Pavel and Windlin, Charles and Valenti, Niklas and Höök, Kristina and McPherson,\ - \ Andrew and Benford, Steve},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813491},\n\ - \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ - \ = {July},\n pages = {489--494},\n presentation-video = {https://youtu.be/i4UN_23A_SE},\n\ - \ publisher = {Birmingham City University},\n title = {Soma Design for NIME},\n\ - \ url = {https://www.nime.org/proceedings/2020/nime2020_paper93.pdf},\n year =\ - \ {2020}\n}\n" + ID: Lee2007a + abstract: 'We present REXband, an interactive music exhibit for collaborative improvisation + to medieval music. This audio-only system consists of three digitally augmented + medieval instrument replicas: thehurdy gurdy, harp, and frame drum. The instruments + communicate with software that provides users with both musical support and feedback + on their performance using a "virtual audience" set in a medieval tavern. REXband + builds upon previous work in interactive music exhibits by incorporating aspects + of e-learning to educate, in addition to interaction design patterns to entertain; + care was also taken to ensure historic authenticity. Feedback from user testing + in both controlled (laboratory) and public (museum) environments has been extremely + positive. REXband is part of the Regensburg Experience, an exhibition scheduled + to open in July 2007 to showcase the rich history of Regensburg, Germany.' + address: 'New York City, NY, United States' + author: 'Lee, Eric and Wolf, Marius and Jansen, Yvonne and Borchers, Jan' + bibtex: "@inproceedings{Lee2007a,\n abstract = {We present REXband, an interactive\ + \ music exhibit for collaborative improvisation to medieval music. This audio-only\ + \ system consists of three digitally augmented medieval instrument replicas: thehurdy\ + \ gurdy, harp, and frame drum. The instruments communicate with software that\ + \ provides users with both musical support and feedback on their performance using\ + \ a \"virtual audience\" set in a medieval tavern. REXband builds upon previous\ + \ work in interactive music exhibits by incorporating aspects of e-learning to\ + \ educate, in addition to interaction design patterns to entertain; care was also\ + \ taken to ensure historic authenticity. Feedback from user testing in both controlled\ + \ (laboratory) and public (museum) environments has been extremely positive. REXband\ + \ is part of the Regensburg Experience, an exhibition scheduled to open in July\ + \ 2007 to showcase the rich history of Regensburg, Germany.},\n address = {New\ + \ York City, NY, United States},\n author = {Lee, Eric and Wolf, Marius and Jansen,\ + \ Yvonne and Borchers, Jan},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177163},\n\ + \ issn = {2220-4806},\n keywords = {interactive music exhibits, medieval music,\ + \ augmented instruments, e-learning, education },\n pages = {172--177},\n title\ + \ = {REXband : A Multi-User Interactive Exhibit for Exploring Medieval Music},\n\ + \ url = {http://www.nime.org/proceedings/2007/nime2007_172.pdf},\n year = {2007}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813491 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1177163 issn: 2220-4806 - month: July - pages: 489--494 - presentation-video: https://youtu.be/i4UN_23A_SE - publisher: Birmingham City University - title: Soma Design for NIME - url: https://www.nime.org/proceedings/2020/nime2020_paper93.pdf - year: 2020 + keywords: 'interactive music exhibits, medieval music, augmented instruments, e-learning, + education ' + pages: 172--177 + title: 'REXband : A Multi-User Interactive Exhibit for Exploring Medieval Music' + url: http://www.nime.org/proceedings/2007/nime2007_172.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: NIME20_94 - abstract: 'The khipu is an information processing and transmission device used mainly - by the Inca empire and previous Andean societies. This mnemotechnic interface - is one of the first textile computers known, consisting of a central wool or cotton - cord to which other strings are attached with knots of different shapes, colors, - and sizes encrypting different kinds of values and information. The system was - widely used until the Spanish colonization that banned their use and destroyed - a large number of these devices. This paper introduces the creation process of - a NIME based in a khipu converted into an electronic instrument for the interaction - and generation of live experimental sound by weaving knots with conductive rubber - cords, and its implementation in the performance Knotting the memory//Encoding - the Khipu_ that aim to pay homage to this system, from a decolonial perspective - continuing the interrupted legacy of this ancestral practice in a different experience - of tangible live coding and computer music, as well as weaving the past with the - present of the indigenous and people resistance of the Andean territory with their - sounds.' - address: 'Birmingham, UK' - author: 'Cadavid, Laddy P' - bibtex: "@inproceedings{NIME20_94,\n abstract = {The khipu is an information processing\ - \ and transmission device used mainly by the Inca empire and previous Andean societies.\ - \ This mnemotechnic interface is one of the first textile computers known, consisting\ - \ of a central wool or cotton cord to which other strings are attached with knots\ - \ of different shapes, colors, and sizes encrypting different kinds of values\ - \ and information. The system was widely used until the Spanish colonization that\ - \ banned their use and destroyed a large number of these devices. This paper introduces\ - \ the creation process of a NIME based in a khipu converted into an electronic\ - \ instrument for the interaction and generation of live experimental sound by\ - \ weaving knots with conductive rubber cords, and its implementation in the performance\ - \ Knotting the memory//Encoding the Khipu_ that aim to pay homage to this system,\ - \ from a decolonial perspective continuing the interrupted legacy of this ancestral\ - \ practice in a different experience of tangible live coding and computer music,\ - \ as well as weaving the past with the present of the indigenous and people resistance\ - \ of the Andean territory with their sounds.},\n address = {Birmingham, UK},\n\ - \ author = {Cadavid, Laddy P},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813495},\n\ - \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ - \ = {July},\n pages = {495--498},\n presentation-video = {https://youtu.be/nw5rbc15pT8},\n\ - \ publisher = {Birmingham City University},\n title = {Knotting the memory//Encoding\ - \ the Khipu_: Reuse of an ancient Andean device as a NIME },\n url = {https://www.nime.org/proceedings/2020/nime2020_paper94.pdf},\n\ - \ year = {2020}\n}\n" + ID: Baalman2007 + abstract: 'This paper describes work on a newly created large-scale interactive + theater performance entitled Schwelle (Thresholds). The authors discuss an innovative + approach towards the conception, development and implementation of dynamic and + responsive audio scenography: a constantly evolving, multi-layered sound design + generated by continuous input from a series of distributed wireless sensors deployed + both on the body of a performer and placed within the physical stage environment. + The paper is divided into conceptual and technological parts. We first describe + the project’s dramaturgical and conceptual context in order to situate the artistic + framework that has guided the technological system design. Specifically, this + framework discusses the team’s approach in combining techniques from situated + computing, theatrical sound design practice and dynamical systems in order to + create a new kind of adaptive audio scenographic environment augmented by wireless, + distributed sensing for use in live theatrical performance. The goal of this adaptive + sound design is to move beyond both existing playback models used in theatre sound + as well as the purely humancentered, controller-instrument approach used in much + current interactive performance practice.' + address: 'New York City, NY, United States' + author: 'Baalman, Marije A. and Moody-Grigsby, Daniel and Salter, Christopher L.' + bibtex: "@inproceedings{Baalman2007,\n abstract = {This paper describes work on\ + \ a newly created large-scale interactive theater performance entitled Schwelle\ + \ (Thresholds). The authors discuss an innovative approach towards the conception,\ + \ development and implementation of dynamic and responsive audio scenography:\ + \ a constantly evolving, multi-layered sound design generated by continuous input\ + \ from a series of distributed wireless sensors deployed both on the body of a\ + \ performer and placed within the physical stage environment. The paper is divided\ + \ into conceptual and technological parts. We first describe the project’s dramaturgical\ + \ and conceptual context in order to situate the artistic framework that has guided\ + \ the technological system design. Specifically, this framework discusses the\ + \ team’s approach in combining techniques from situated computing, theatrical\ + \ sound design practice and dynamical systems in order to create a new kind of\ + \ adaptive audio scenographic environment augmented by wireless, distributed sensing\ + \ for use in live theatrical performance. The goal of this adaptive sound design\ + \ is to move beyond both existing playback models used in theatre sound as well\ + \ as the purely humancentered, controller-instrument approach used in much current\ + \ interactive performance practice.},\n address = {New York City, NY, United States},\n\ + \ author = {Baalman, Marije A. and Moody-Grigsby, Daniel and Salter, Christopher\ + \ L.},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1177035},\n issn = {2220-4806},\n\ + \ keywords = {Interactive performance, dynamical systems, wireless sens- ing,\ + \ adaptive audio scenography, audio dramaturgy, situated computing, sound design\ + \ },\n pages = {178--184},\n title = {Schwelle : Sensor Augmented, Adaptive Sound\ + \ Design for Live Theatrical Performance},\n url = {http://www.nime.org/proceedings/2007/nime2007_178.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813495 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1177035 issn: 2220-4806 - month: July - pages: 495--498 - presentation-video: https://youtu.be/nw5rbc15pT8 - publisher: Birmingham City University - title: 'Knotting the memory//Encoding the Khipu_: Reuse of an ancient Andean device - as a NIME ' - url: https://www.nime.org/proceedings/2020/nime2020_paper94.pdf - year: 2020 + keywords: 'Interactive performance, dynamical systems, wireless sens- ing, adaptive + audio scenography, audio dramaturgy, situated computing, sound design ' + pages: 178--184 + title: 'Schwelle : Sensor Augmented, Adaptive Sound Design for Live Theatrical Performance' + url: http://www.nime.org/proceedings/2007/nime2007_178.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: NIME20_95 - abstract: 'The recent proliferation of commercial software claiming ground in the - field of music AI has provided opportunity to engage with AI in music making without - the need to use libraries aimed at those with programming skills. Pre-packaged - music AI software has the potential to broaden access to machine learning tools - but it is unclear how widely these softwares are used by music technologists or - how engagement affects attitudes towards AI in music making. To interrogate these - questions we undertook a survey in October 2019, gaining 117 responses. The survey - collected statistical information on the use of pre-packaged and self-written - music AI software. Respondents reported a range of musical outputs including producing - recordings, live performance and generative work across many genres of music making. - The survey also gauged general attitudes towards AI in music and provided an open - field for general comments. The responses to the survey suggested a forward-looking - attitude to music AI with participants often pointing to the future potential - of AI tools, rather than present utility. Optimism was partially related to programming - skill with those with more experience showing higher skepticism towards the current - state and future potential of AI.' - address: 'Birmingham, UK' - author: 'Knotts, Shelly and Collins, Nick' - bibtex: "@inproceedings{NIME20_95,\n abstract = {The recent proliferation of commercial\ - \ software claiming ground in the field of music AI has provided opportunity to\ - \ engage with AI in music making without the need to use libraries aimed at those\ - \ with programming skills. Pre-packaged music AI software has the potential to\ - \ broaden access to machine learning tools but it is unclear how widely these\ - \ softwares are used by music technologists or how engagement affects attitudes\ - \ towards AI in music making. To interrogate these questions we undertook a survey\ - \ in October 2019, gaining 117 responses. The survey collected statistical information\ - \ on the use of pre-packaged and self-written music AI software. Respondents reported\ - \ a range of musical outputs including producing recordings, live performance\ - \ and generative work across many genres of music making. The survey also gauged\ - \ general attitudes towards AI in music and provided an open field for general\ - \ comments. The responses to the survey suggested a forward-looking attitude to\ - \ music AI with participants often pointing to the future potential of AI tools,\ - \ rather than present utility. Optimism was partially related to programming skill\ - \ with those with more experience showing higher skepticism towards the current\ - \ state and future potential of AI.},\n address = {Birmingham, UK},\n author =\ - \ {Knotts, Shelly and Collins, Nick},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813499},\n\ - \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ - \ = {July},\n pages = {499--504},\n presentation-video = {https://youtu.be/v6hT3ED3N60},\n\ - \ publisher = {Birmingham City University},\n title = {A survey on the uptake\ - \ of Music AI Software},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper95.pdf},\n\ - \ year = {2020}\n}\n" + ID: Jakovich2007 + abstract: 'Architectural space is a key contributor to the perceptual world we experience + daily. We present ‘ParticleTecture’, a soundspace installation system that extends + spatial perception of ordinary architectural space through gestural interaction + with sound in space. ParticleTecture employs a particle metaphor to produce granular + synthesis soundspaces in response to video-tracking of human movement. It incorporates + an adaptive mechanism that utilizes a measure of engagement to inform ongoing + audio patterns in response to human activity. By identifying engaging features + in its response, the system is able to predict, pre-empt and shape its evolving + responses in accordance with the most engaging, compelling, interesting attributes + of the active environment. An implementation of ParticleTecture for gallery installation + is presented and discussed as one form of architectural space.' + address: 'New York City, NY, United States' + author: 'Jakovich, Joanne and Beilharz, Kirsty' + bibtex: "@inproceedings{Jakovich2007,\n abstract = {Architectural space is a key\ + \ contributor to the perceptual world we experience daily. We present ‘ParticleTecture’,\ + \ a soundspace installation system that extends spatial perception of ordinary\ + \ architectural space through gestural interaction with sound in space. ParticleTecture\ + \ employs a particle metaphor to produce granular synthesis soundspaces in response\ + \ to video-tracking of human movement. It incorporates an adaptive mechanism that\ + \ utilizes a measure of engagement to inform ongoing audio patterns in response\ + \ to human activity. By identifying engaging features in its response, the system\ + \ is able to predict, pre-empt and shape its evolving responses in accordance\ + \ with the most engaging, compelling, interesting attributes of the active environment.\ + \ An implementation of ParticleTecture for gallery installation is presented and\ + \ discussed as one form of architectural space.},\n address = {New York City,\ + \ NY, United States},\n author = {Jakovich, Joanne and Beilharz, Kirsty},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1177127},\n issn = {2220-4806},\n keywords\ + \ = {Architecture, installation, interaction, granular synthesis, adaptation,\ + \ engagement. },\n pages = {185--190},\n title = {ParticleTecture : Interactive\ + \ Granular Soundspaces for Architectural Design},\n url = {http://www.nime.org/proceedings/2007/nime2007_185.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813499 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1177127 issn: 2220-4806 - month: July - pages: 499--504 - presentation-video: https://youtu.be/v6hT3ED3N60 - publisher: Birmingham City University - title: A survey on the uptake of Music AI Software - url: https://www.nime.org/proceedings/2020/nime2020_paper95.pdf - year: 2020 + keywords: 'Architecture, installation, interaction, granular synthesis, adaptation, + engagement. ' + pages: 185--190 + title: 'ParticleTecture : Interactive Granular Soundspaces for Architectural Design' + url: http://www.nime.org/proceedings/2007/nime2007_185.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: NIME20_96 - abstract: 'Cycle is a software tool for musical composition and improvisation that - represents events along a circular timeline. In doing so, it breaks from the linear - representational conventions of European Art music and modern Digital Audio Workstations. - A user specifies time points on different layers, each of which corresponds to - a particular sound. The layers are superimposed on a single circle, which allows - a unique visual perspective on the relationships between musical voices given - their geometric positions. Positions in-between quantizations are possible, which - encourages experimentation with expressive timing and machine rhythms. User-selected - transformations affect groups of notes, layers, and the pattern as a whole. Past - and future states are also represented, synthesizing linear and cyclical notions - of time. This paper will contemplate philosophical questions raised by circular - rhythmic notation and will reflect on the ways in which the representational novelties - and editing functions of Cycle have inspired creativity in musical composition.' - address: 'Birmingham, UK' - author: 'Barton, Scott' - bibtex: "@inproceedings{NIME20_96,\n abstract = {Cycle is a software tool for musical\ - \ composition and improvisation that represents events along a circular timeline.\ - \ In doing so, it breaks from the linear representational conventions of European\ - \ Art music and modern Digital Audio Workstations. A user specifies time points\ - \ on different layers, each of which corresponds to a particular sound. The layers\ - \ are superimposed on a single circle, which allows a unique visual perspective\ - \ on the relationships between musical voices given their geometric positions.\ - \ Positions in-between quantizations are possible, which encourages experimentation\ - \ with expressive timing and machine rhythms. User-selected transformations affect\ - \ groups of notes, layers, and the pattern as a whole. Past and future states\ - \ are also represented, synthesizing linear and cyclical notions of time. This\ - \ paper will contemplate philosophical questions raised by circular rhythmic notation\ - \ and will reflect on the ways in which the representational novelties and editing\ - \ functions of Cycle have inspired creativity in musical composition.},\n address\ - \ = {Birmingham, UK},\n author = {Barton, Scott},\n booktitle = {Proceedings of\ + ID: Franinovic2007 + abstract: 'The distinctive features of interactive sound installations in public + space are considered, with special attention to the rich, if undoubtedly difficult, + environments in which they exist. It is argued that such environments, and the + social contexts that they imply, are among the most valuable features of these + works for the approach that we have adopted to creation as research practice. + The discussion is articulated through case studies drawn from two of our installations, + Recycled Soundscapes (2004) and Skyhooks (2006). Implications for the broader + design of new musical instruments are presented.' + address: 'New York City, NY, United States' + author: 'Franinovic, Karmen and Visell, Yon' + bibtex: "@inproceedings{Franinovic2007,\n abstract = {The distinctive features of\ + \ interactive sound installations in public space are considered, with special\ + \ attention to the rich, if undoubtedly difficult, environments in which they\ + \ exist. It is argued that such environments, and the social contexts that they\ + \ imply, are among the most valuable features of these works for the approach\ + \ that we have adopted to creation as research practice. The discussion is articulated\ + \ through case studies drawn from two of our installations, Recycled Soundscapes\ + \ (2004) and Skyhooks (2006). Implications for the broader design of new musical\ + \ instruments are presented.},\n address = {New York City, NY, United States},\n\ + \ author = {Franinovic, Karmen and Visell, Yon},\n booktitle = {Proceedings of\ \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.4813501},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {505--508},\n presentation-video\ - \ = {https://youtu.be/0CEKbyJUSw4},\n publisher = {Birmingham City University},\n\ - \ title = {Circularity in Rhythmic Representation and Composition},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper96.pdf},\n\ - \ year = {2020}\n}\n" + \ = {10.5281/zenodo.1177093},\n issn = {2220-4806},\n keywords = {architecture,interaction,music,nime07,sound\ + \ in-,urban design},\n pages = {191--196},\n title = {New Musical Interfaces in\ + \ Context : Sonic Interaction Design in the Urban Setting},\n url = {http://www.nime.org/proceedings/2007/nime2007_191.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813501 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1177093 issn: 2220-4806 - month: July - pages: 505--508 - presentation-video: https://youtu.be/0CEKbyJUSw4 - publisher: Birmingham City University - title: Circularity in Rhythmic Representation and Composition - url: https://www.nime.org/proceedings/2020/nime2020_paper96.pdf - year: 2020 + keywords: 'architecture,interaction,music,nime07,sound in-,urban design' + pages: 191--196 + title: 'New Musical Interfaces in Context : Sonic Interaction Design in the Urban + Setting' + url: http://www.nime.org/proceedings/2007/nime2007_191.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: NIME20_97 - abstract: 'This lab report discusses recent projects and activities of the Experimental - Music Technologies Lab at the University of Sussex. The lab was founded in 2014 - and has contributed to the development of the field of new musical technologies. - The report introduces the lab’s agenda, gives examples of its activities through - common themes and gives short description of lab members’ work. The lab environment, - funding income and future vision are also presented.' - address: 'Birmingham, UK' - author: 'Magnusson, Thor' - bibtex: "@inproceedings{NIME20_97,\n abstract = {This lab report discusses recent\ - \ projects and activities of the Experimental Music Technologies Lab at the University\ - \ of Sussex. The lab was founded in 2014 and has contributed to the development\ - \ of the field of new musical technologies. The report introduces the lab’s agenda,\ - \ gives examples of its activities through common themes and gives short description\ - \ of lab members’ work. The lab environment, funding income and future vision\ - \ are also presented.},\n address = {Birmingham, UK},\n author = {Magnusson, Thor},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.4813503},\n editor = {Romain Michon\ - \ and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages =\ - \ {509--513},\n publisher = {Birmingham City University},\n title = {Instrumental\ - \ Investigations at Emute Lab},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper97.pdf},\n\ - \ year = {2020}\n}\n" + ID: Gimenes2007 + abstract: 'In this paper we introduce a System conceived to serve as the "musical + brain" of autonomous musical robots or agent-based software simulations of robotic + systems. Our research goal is to provide robots with the ability to integrate + with the musical culture of their surroundings. In a multi-agent configuration, + the System can simulate an environment in which autonomous agents interact with + each other as well as with external agents (e.g., robots, human beings or other + systems). The main outcome of these interactions is the transformation and development + of their musical styles as well as the musical style of the environment in which + they live. ' + address: 'New York City, NY, United States' + author: 'Gimenes, Marcelo and Miranda, Eduardo and Johnson, Chris' + bibtex: "@inproceedings{Gimenes2007,\n abstract = {In this paper we introduce a\ + \ System conceived to serve as the \"musical brain\" of autonomous musical robots\ + \ or agent-based software simulations of robotic systems. Our research goal is\ + \ to provide robots with the ability to integrate with the musical culture of\ + \ their surroundings. In a multi-agent configuration, the System can simulate\ + \ an environment in which autonomous agents interact with each other as well as\ + \ with external agents (e.g., robots, human beings or other systems). The main\ + \ outcome of these interactions is the transformation and development of their\ + \ musical styles as well as the musical style of the environment in which they\ + \ live. },\n address = {New York City, NY, United States},\n author = {Gimenes,\ + \ Marcelo and Miranda, Eduardo and Johnson, Chris},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177099},\n issn = {2220-4806},\n keywords = {artificial\ + \ life,musical style,musicianship,nime07},\n pages = {197--202},\n title = {Musicianship\ + \ for Robots with Style},\n url = {http://www.nime.org/proceedings/2007/nime2007_197.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813503 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1177099 issn: 2220-4806 - month: July - pages: 509--513 - publisher: Birmingham City University - title: Instrumental Investigations at Emute Lab - url: https://www.nime.org/proceedings/2020/nime2020_paper97.pdf - year: 2020 + keywords: 'artificial life,musical style,musicianship,nime07' + pages: 197--202 + title: Musicianship for Robots with Style + url: http://www.nime.org/proceedings/2007/nime2007_197.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: NIME20_98 - abstract: 'Creative systems such as algorithmic composers often use Artificial Intelligence - models like Markov chains, Artificial Neural Networks, and Genetic Algorithms - in order to model stochastic processes. Unconventional Computing (UC) technologies - explore non-digital ways of data storage, processing, input, and output. UC paradigms - such as Quantum Computing and Biocomputing delve into domains beyond the binary - bit to handle complex non-linear functions. In this paper, we harness Physarum - polycephalum as memristors to process and generate creative data for popular music. - While there has been research conducted in this area, the literature lacks examples - of popular music and how the organism''s non-linear behaviour can be controlled - while composing music. This is important because non-linear forms of representation - are not as obvious as conventional digital means. This study aims at disseminating - this technology to non-experts and musicians so that they can incorporate it in - their creative processes. Furthermore, it combines resistors and memristors to - have more flexibility while generating music and optimises parameters for faster - processing and performance. ' - address: 'Birmingham, UK' - author: 'Venkatesh, Satvik and Braund, Edward and Miranda, Eduardo' - bibtex: "@inproceedings{NIME20_98,\n abstract = {Creative systems such as algorithmic\ - \ composers often use Artificial Intelligence models like Markov chains, Artificial\ - \ Neural Networks, and Genetic Algorithms in order to model stochastic processes.\ - \ Unconventional Computing (UC) technologies explore non-digital ways of data\ - \ storage, processing, input, and output. UC paradigms such as Quantum Computing\ - \ and Biocomputing delve into domains beyond the binary bit to handle complex\ - \ non-linear functions. In this paper, we harness Physarum polycephalum as memristors\ - \ to process and generate creative data for popular music. While there has been\ - \ research conducted in this area, the literature lacks examples of popular music\ - \ and how the organism's non-linear behaviour can be controlled while composing\ - \ music. This is important because non-linear forms of representation are not\ - \ as obvious as conventional digital means. This study aims at disseminating this\ - \ technology to non-experts and musicians so that they can incorporate it in their\ - \ creative processes. Furthermore, it combines resistors and memristors to have\ - \ more flexibility while generating music and optimises parameters for faster\ - \ processing and performance. },\n address = {Birmingham, UK},\n author = {Venkatesh,\ - \ Satvik and Braund, Edward and Miranda, Eduardo},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.4813507},\n editor = {Romain Michon and Franziska Schroeder},\n\ - \ issn = {2220-4806},\n month = {July},\n pages = {514--519},\n presentation-video\ - \ = {https://youtu.be/NBLa-KoMUh8},\n publisher = {Birmingham City University},\n\ - \ title = {Composing Popular Music with Physarum polycephalum-based Memristors},\n\ - \ url = {https://www.nime.org/proceedings/2020/nime2020_paper98.pdf},\n year =\ - \ {2020}\n}\n" + ID: Topper2007 + abstract: ' WISEAR (Wireless Sensor Array)8, provides a robust andscalable platform + for virtually limitless types of data input tosoftware synthesis engines. It is + essentially a Linux based SBC(Single Board Computer) with 802.11a/b/g wireless + capability.The device, with batteries, only weighs a few pounds and can beworn + by a dancer or other live performer. Past work has focusedon connecting "conventional" + sensors (eg., bend sensors,accelerometers, FSRs, etc...) to the board and using + it as a datarelay, sending the data as real time control messages to synthesisengines + like Max/MSP and RTcmix1. Current research hasextended the abilities of the device + to take real-time audio andvideo data from USB cameras and audio devices, as well + asrunning synthesis engines on board the device itself. Given itsgeneric network + ability (eg., being an 802.11a/b/g device) there istheoretically no limit to the + number of WISEAR boxes that canbe used simultaneously in a performance, facilitating + multiperformer compositions. This paper will present the basic design philosophy + behindWISEAR, explain some of the basic concepts and methods, aswell as provide + a live demonstration of the running device, wornby the author.' + address: 'New York City, NY, United States' + author: 'Topper, David' + bibtex: "@inproceedings{Topper2007,\n abstract = { WISEAR (Wireless Sensor Array)8,\ + \ provides a robust andscalable platform for virtually limitless types of data\ + \ input tosoftware synthesis engines. It is essentially a Linux based SBC(Single\ + \ Board Computer) with 802.11a/b/g wireless capability.The device, with batteries,\ + \ only weighs a few pounds and can beworn by a dancer or other live performer.\ + \ Past work has focusedon connecting \"conventional\" sensors (eg., bend sensors,accelerometers,\ + \ FSRs, etc...) to the board and using it as a datarelay, sending the data as\ + \ real time control messages to synthesisengines like Max/MSP and RTcmix1. Current\ + \ research hasextended the abilities of the device to take real-time audio andvideo\ + \ data from USB cameras and audio devices, as well asrunning synthesis engines\ + \ on board the device itself. Given itsgeneric network ability (eg., being an\ + \ 802.11a/b/g device) there istheoretically no limit to the number of WISEAR boxes\ + \ that canbe used simultaneously in a performance, facilitating multiperformer\ + \ compositions. This paper will present the basic design philosophy behindWISEAR,\ + \ explain some of the basic concepts and methods, aswell as provide a live demonstration\ + \ of the running device, wornby the author.},\n address = {New York City, NY,\ + \ United States},\n author = {Topper, David},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1177261},\n issn = {2220-4806},\n keywords = {Wireless, sensors,\ + \ embedded devices, linux, real-time audio, real- time video },\n pages = {203--204},\n\ + \ title = {Extended Applications of the Wireless Sensor Array (WISEAR)},\n url\ + \ = {http://www.nime.org/proceedings/2007/nime2007_203.pdf},\n year = {2007}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813507 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1177261 issn: 2220-4806 - month: July - pages: 514--519 - presentation-video: https://youtu.be/NBLa-KoMUh8 - publisher: Birmingham City University - title: Composing Popular Music with Physarum polycephalum-based Memristors - url: https://www.nime.org/proceedings/2020/nime2020_paper98.pdf - year: 2020 + keywords: 'Wireless, sensors, embedded devices, linux, real-time audio, real- time + video ' + pages: 203--204 + title: Extended Applications of the Wireless Sensor Array (WISEAR) + url: http://www.nime.org/proceedings/2007/nime2007_203.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: NIME20_99 - abstract: 'PathoSonic is a VR experience that enables a participant to visualize - and perform a sound file based on timbre feature descriptors displayed in space. - The name comes from the different paths the participant can create through their - sonic explorations. The goal of this research is to leverage affordances of virtual - reality technology to visualize sound through different levels of performance-based - interactivity that immerses the participant''s body in a spatial virtual environment. - Through implementation of a multi-sensory experience, including visual aesthetics, - sound, and haptic feedback, we explore inclusive approaches to sound visualization, - making it more accessible to a wider audience including those with hearing, and - mobility impairments. The online version of the paper can be accessed here: https://fdch.github.io/pathosonic' - address: 'Birmingham, UK' - author: 'Camara Halac, Fede and Addy, Shadrick' - bibtex: "@inproceedings{NIME20_99,\n abstract = {PathoSonic is a VR experience that\ - \ enables a participant to visualize and perform a sound file based on timbre\ - \ feature descriptors displayed in space. The name comes from the different paths\ - \ the participant can create through their sonic explorations. The goal of this\ - \ research is to leverage affordances of virtual reality technology to visualize\ - \ sound through different levels of performance-based interactivity that immerses\ - \ the participant's body in a spatial virtual environment. Through implementation\ - \ of a multi-sensory experience, including visual aesthetics, sound, and haptic\ - \ feedback, we explore inclusive approaches to sound visualization, making it\ - \ more accessible to a wider audience including those with hearing, and mobility\ - \ impairments. The online version of the paper can be accessed here: https://fdch.github.io/pathosonic},\n\ - \ address = {Birmingham, UK},\n author = {Camara Halac, Fede and Addy, Shadrick},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.4813510},\n editor = {Romain Michon\ - \ and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages =\ - \ {520--522},\n publisher = {Birmingham City University},\n title = {PathoSonic:\ - \ Performing Sound In Virtual Reality Feature Space},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper99.pdf},\n\ - \ year = {2020}\n}\n" + ID: Fernstrom2007 + abstract: 'In this paper, we describe a new wearable wireless sensor system for + solo or group dance performances. The system consists of a number of 25mm Wireless + Inertial Measurement Unit (WIMU) nodes designed at the Tyndall National Institute. + Each sensor node has two dual-axis accelerometers, three single axis gyroscopes + and two dual axis magnetometers, providing 6 Degrees of Freedom (DOF) movement + tracking. All sensors transmit data wirelessly to a basestation at a frequency + band and power that does not require licensing. The interface process has been + developed at the Interaction Design Center of the University of Limerick (Ireland). + The data are acquired and manipulated in well-know real-time software like pd + and Max/MSP. This paper presents the new system, describes the interface design + and outlines the main achievements of this collaborative research, which has been + named ‘Celeritas’.' + address: 'New York City, NY, United States' + author: 'Torre, Giuseppe and Fernström, Mikael and O''Flynn, Brendan and Angove, + Philip' + bibtex: "@inproceedings{Fernstrom2007,\n abstract = {In this paper, we describe\ + \ a new wearable wireless sensor system for solo or group dance performances.\ + \ The system consists of a number of 25mm Wireless Inertial Measurement Unit (WIMU)\ + \ nodes designed at the Tyndall National Institute. Each sensor node has two dual-axis\ + \ accelerometers, three single axis gyroscopes and two dual axis magnetometers,\ + \ providing 6 Degrees of Freedom (DOF) movement tracking. All sensors transmit\ + \ data wirelessly to a basestation at a frequency band and power that does not\ + \ require licensing. The interface process has been developed at the Interaction\ + \ Design Center of the University of Limerick (Ireland). The data are acquired\ + \ and manipulated in well-know real-time software like pd and Max/MSP. This paper\ + \ presents the new system, describes the interface design and outlines the main\ + \ achievements of this collaborative research, which has been named ‘Celeritas’.},\n\ + \ address = {New York City, NY, United States},\n author = {Torre, Giuseppe and\ + \ Fernstr\\''{o}m, Mikael and O'Flynn, Brendan and Angove, Philip},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1179463},\n issn = {2220-4806},\n keywords\ + \ = {Inertial Measurement Unit, IMU, Position Tracking, Interactive Dance Performance,\ + \ Graphical Object, Mapping. },\n pages = {205--208},\n title = {Celeritas : Wearable\ + \ Wireless System},\n url = {http://www.nime.org/proceedings/2007/nime2007_205.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4813510 - editor: Romain Michon and Franziska Schroeder + doi: 10.5281/zenodo.1179463 issn: 2220-4806 - month: July - pages: 520--522 - publisher: Birmingham City University - title: 'PathoSonic: Performing Sound In Virtual Reality Feature Space' - url: https://www.nime.org/proceedings/2020/nime2020_paper99.pdf - year: 2020 + keywords: 'Inertial Measurement Unit, IMU, Position Tracking, Interactive Dance + Performance, Graphical Object, Mapping. ' + pages: 205--208 + title: 'Celeritas : Wearable Wireless System' + url: http://www.nime.org/proceedings/2007/nime2007_205.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Cannon2003 - abstract: 'In this paper we present a design for the EpipE, a newexpressive electronic - music controller based on the IrishUilleann Pipes, a 7-note polyphonic reeded - woodwind. Thecore of this proposed controller design is a continuouselectronic - tonehole-sensing arrangement, equally applicableto other woodwind interfaces like - those of the flute, recorder orJapanese shakuhachi. The controller will initially - be used todrive a physically-based synthesis model, with the eventualgoal being - the development of a mapping layer allowing theEpipE interface to operate as a - MIDI-like controller of arbitrarysynthesis models.' - address: 'Montreal, Canada' - author: 'Cannon, Cormac and Hughes, Stephen and O''Modhrain, Sile' - bibtex: "@inproceedings{Cannon2003,\n abstract = {In this paper we present a design\ - \ for the EpipE, a newexpressive electronic music controller based on the IrishUilleann\ - \ Pipes, a 7-note polyphonic reeded woodwind. Thecore of this proposed controller\ - \ design is a continuouselectronic tonehole-sensing arrangement, equally applicableto\ - \ other woodwind interfaces like those of the flute, recorder orJapanese shakuhachi.\ - \ The controller will initially be used todrive a physically-based synthesis model,\ - \ with the eventualgoal being the development of a mapping layer allowing theEpipE\ - \ interface to operate as a MIDI-like controller of arbitrarysynthesis models.},\n\ - \ address = {Montreal, Canada},\n author = {Cannon, Cormac and Hughes, Stephen\ - \ and O'Modhrain, Sile},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n doi\ - \ = {10.5281/zenodo.1176497},\n issn = {2220-4806},\n keywords = {Controllers,\ - \ continuous woodwind tonehole sensor, uilleann pipes, Irish bagpipe, physical\ - \ modelling, double reed, conical bore, tonehole. },\n pages = {3--8},\n title\ - \ = {EpipE: Exploration of the Uilleann Pipes as a Potential Controller for Computer-based\ - \ Music},\n url = {http://www.nime.org/proceedings/2003/nime2003_003.pdf},\n year\ - \ = {2003}\n}\n" + ID: Sinclair2007 + abstract: 'This paper presents an approach to audio-haptic integration that utilizes + Open Sound Control, an increasingly wellsupported standard for audio communication, + to initializeand communicate with dynamic virtual environments thatwork with off-the-shelf + force-feedback devices.' + address: 'New York City, NY, United States' + author: 'Sinclair, Stephen and Wanderley, Marcelo M.' + bibtex: "@inproceedings{Sinclair2007,\n abstract = {This paper presents an approach\ + \ to audio-haptic integration that utilizes Open Sound Control, an increasingly\ + \ wellsupported standard for audio communication, to initializeand communicate\ + \ with dynamic virtual environments thatwork with off-the-shelf force-feedback\ + \ devices.},\n address = {New York City, NY, United States},\n author = {Sinclair,\ + \ Stephen and Wanderley, Marcelo M.},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177245},\n\ + \ issn = {2220-4806},\n keywords = {Haptics, control, multi-modal, audio, force-feedback\ + \ },\n pages = {209--212},\n title = {Defining a Control Standard for Easily Integrating\ + \ Haptic Virtual Environments with Existing Audio / Visual Systems},\n url = {http://www.nime.org/proceedings/2007/nime2007_209.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176497 + doi: 10.5281/zenodo.1177245 issn: 2220-4806 - keywords: 'Controllers, continuous woodwind tonehole sensor, uilleann pipes, Irish - bagpipe, physical modelling, double reed, conical bore, tonehole. ' - pages: 3--8 - title: 'EpipE: Exploration of the Uilleann Pipes as a Potential Controller for Computer-based - Music' - url: http://www.nime.org/proceedings/2003/nime2003_003.pdf - year: 2003 + keywords: 'Haptics, control, multi-modal, audio, force-feedback ' + pages: 209--212 + title: Defining a Control Standard for Easily Integrating Haptic Virtual Environments + with Existing Audio / Visual Systems + url: http://www.nime.org/proceedings/2007/nime2007_209.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Young2003 - abstract: HyperPuja is a novel controller that closely mimicks the behavior of a - Tibetan Singing Bowl rubbed with a "puja" stick. Our design hides the electronics - from the performer to maintain the original look and feel of the instrument and - the performance. This is achieved by using wireless technology to keep the stick - un-tethered as well as burying the electronics inside the the core of the stick. - The measured parameters closely resemble the input parameters of a related physical - synthesis model allowing for convenient mapping of sensor parameters to synthesis - input. The new controller allows for flexible choice of sound synthesis while - fully maintaining the characteristics of the physical interaction of the original - instrument. - address: 'Montreal, Canada' - author: 'Young, Diana and Essl, Georg' - bibtex: "@inproceedings{Young2003,\n abstract = {HyperPuja is a novel controller\ - \ that closely mimicks the behavior of a Tibetan Singing Bowl rubbed with a \"\ - puja\" stick. Our design hides the electronics from the performer to maintain\ - \ the original look and feel of the instrument and the performance. This is achieved\ - \ by using wireless technology to keep the stick un-tethered as well as burying\ - \ the electronics inside the the core of the stick. The measured parameters closely\ - \ resemble the input parameters of a related physical synthesis model allowing\ - \ for convenient mapping of sensor parameters to synthesis input. The new controller\ - \ allows for flexible choice of sound synthesis while fully maintaining the characteristics\ - \ of the physical interaction of the original instrument.},\n address = {Montreal,\ - \ Canada},\n author = {Young, Diana and Essl, Georg},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176577},\n issn = {2220-4806},\n\ - \ pages = {9--14},\n title = {HyperPuja: A Tibetan Singing Bowl Controller},\n\ - \ url = {http://www.nime.org/proceedings/2003/nime2003_009.pdf},\n year = {2003}\n\ - }\n" + ID: Donaldson2007 + abstract: 'Chroma based representations of acoustic phenomenon are representations + of sound as pitched acoustic energy. A framewise chroma distribution over an entire + musical piece is a useful and straightforward representation of its musical pitch + over time. This paper examines a method of condensing the block-wise chroma information + of a musical piece into a two dimensional embedding. Such an embedding is a representation + or map of the different pitched energies in a song, and how these energies relate + to each other in the context of the song. The paper presents an interactive version + of this representation as an exploratory analytical tool or instrument for granular + synthesis. Pointing and clicking on the interactive map recreates the acoustical + energy present in the chroma blocks at that location, providing an effective way + of both exploring the relationships between sounds in the original piece, and + recreating a synthesized approximation of these sounds in an instrumental fashion. ' + address: 'New York City, NY, United States' + author: 'Donaldson, Justin and Knopke, Ian and Raphael, Chris' + bibtex: "@inproceedings{Donaldson2007,\n abstract = {Chroma based representations\ + \ of acoustic phenomenon are representations of sound as pitched acoustic energy.\ + \ A framewise chroma distribution over an entire musical piece is a useful and\ + \ straightforward representation of its musical pitch over time. This paper examines\ + \ a method of condensing the block-wise chroma information of a musical piece\ + \ into a two dimensional embedding. Such an embedding is a representation or map\ + \ of the different pitched energies in a song, and how these energies relate to\ + \ each other in the context of the song. The paper presents an interactive version\ + \ of this representation as an exploratory analytical tool or instrument for granular\ + \ synthesis. Pointing and clicking on the interactive map recreates the acoustical\ + \ energy present in the chroma blocks at that location, providing an effective\ + \ way of both exploring the relationships between sounds in the original piece,\ + \ and recreating a synthesized approximation of these sounds in an instrumental\ + \ fashion. },\n address = {New York City, NY, United States},\n author = {Donaldson,\ + \ Justin and Knopke, Ian and Raphael, Chris},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1177085},\n issn = {2220-4806},\n keywords = {Chroma, granular\ + \ synthesis, dimensionality reduction },\n pages = {213--219},\n title = {Chroma\ + \ Palette : Chromatic Maps of Sound As Granular Synthesis Interface},\n url =\ + \ {http://www.nime.org/proceedings/2007/nime2007_213.pdf},\n year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176577 + doi: 10.5281/zenodo.1177085 issn: 2220-4806 - pages: 9--14 - title: 'HyperPuja: A Tibetan Singing Bowl Controller' - url: http://www.nime.org/proceedings/2003/nime2003_009.pdf - year: 2003 + keywords: 'Chroma, granular synthesis, dimensionality reduction ' + pages: 213--219 + title: 'Chroma Palette : Chromatic Maps of Sound As Granular Synthesis Interface' + url: http://www.nime.org/proceedings/2007/nime2007_213.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Scavone2003 - abstract: 'The Pipe is an experimental, general purpose music input device designed - and built in the form of a compact MIDI wind controller. The development of this - device was motivated in part by an interest in exploring breath pressure as a - control input. The Pipe provides a variety of common sensor types, including force - sensing resistors, momentary switches, accelerometers, potentiometers, and an - air pressure transducer, which allow maximum flexibility in the design of a sensor - mapping scheme. The Pipe uses a programmable BASIC Stamp 2sx microprocessor which - outputs control messages via a standard MIDI jack.' - address: 'Montreal, Canada' - author: 'Scavone, Gary' - bibtex: "@inproceedings{Scavone2003,\n abstract = {The Pipe is an experimental,\ - \ general purpose music input device designed and built in the form of a compact\ - \ MIDI wind controller. The development of this device was motivated in part by\ - \ an interest in exploring breath pressure as a control input. The Pipe provides\ - \ a variety of common sensor types, including force sensing resistors, momentary\ - \ switches, accelerometers, potentiometers, and an air pressure transducer, which\ - \ allow maximum flexibility in the design of a sensor mapping scheme. The Pipe\ - \ uses a programmable BASIC Stamp 2sx microprocessor which outputs control messages\ - \ via a standard MIDI jack.},\n address = {Montreal, Canada},\n author = {Scavone,\ - \ Gary},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176557},\n\ - \ issn = {2220-4806},\n keywords = {MIDI Controller, Wind Controller, Breath Control,\ - \ Human Computer Interaction. },\n pages = {15--18},\n title = {THE PIPE: Explorations\ - \ with Breath Control},\n url = {http://www.nime.org/proceedings/2003/nime2003_015.pdf},\n\ - \ year = {2003}\n}\n" + ID: Collins2007 + address: 'New York City, NY, United States' + author: 'Collins, Nick' + bibtex: "@inproceedings{Collins2007,\n address = {New York City, NY, United States},\n\ + \ author = {Collins, Nick},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177075},\n\ + \ issn = {2220-4806},\n keywords = {accompaniment,concatenative sound syn-,feature\ + \ matching,inner parts,interactive mu-,melodic similarity,nime07,thesis},\n pages\ + \ = {220--223},\n title = {Matching Parts : Inner Voice Led Control for Symbolic\ + \ and Audio Accompaniment},\n url = {http://www.nime.org/proceedings/2007/nime2007_220.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176557 + doi: 10.5281/zenodo.1177075 issn: 2220-4806 - keywords: 'MIDI Controller, Wind Controller, Breath Control, Human Computer Interaction. ' - pages: 15--18 - title: 'THE PIPE: Explorations with Breath Control' - url: http://www.nime.org/proceedings/2003/nime2003_015.pdf - year: 2003 + keywords: 'accompaniment,concatenative sound syn-,feature matching,inner parts,interactive + mu-,melodic similarity,nime07,thesis' + pages: 220--223 + title: 'Matching Parts : Inner Voice Led Control for Symbolic and Audio Accompaniment' + url: http://www.nime.org/proceedings/2007/nime2007_220.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Baalman2003 - abstract: The STRIMIDILATOR is an instrument that uses the deviation and the vibration - of strings as MIDI-controllers. Thismethod of control gives the user direct tactile - force feedbackand allows for subtle control. The development of the instrument - and its different functions are described. - address: 'Montreal, Canada' - author: 'Baalman, Marije A.' - bibtex: "@inproceedings{Baalman2003,\n abstract = {The STRIMIDILATOR is an instrument\ - \ that uses the deviation and the vibration of strings as MIDI-controllers. Thismethod\ - \ of control gives the user direct tactile force feedbackand allows for subtle\ - \ control. The development of the instrument and its different functions are described.},\n\ - \ address = {Montreal, Canada},\n author = {Baalman, Marije A.},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176486},\n issn = {2220-4806},\n\ - \ keywords = {MIDI controllers, tactile force feedback, strings. Figure The STRIMIDILATOR\ - \ },\n pages = {19--23},\n title = {The {STRIMIDILATOR}: a String Controlled {MIDI}-Instrument},\n\ - \ url = {http://www.nime.org/proceedings/2003/nime2003_019.pdf},\n year = {2003}\n\ + ID: Cartwright2007 + abstract: "This report presents the design and construct ion of Rage in Conjunction\ + \ with the Machine, a simple but novel pairing of musical interface and sound\ + \ sculpture. The ,\n,\nauthors discuss the design and creation of this instrument\ + \ , focusing on the unique aspects of it, including the use of physical systems,\ + \ large gestural input, scale, and the electronic coupling of a physical input\ + \ to a physical output." + address: 'New York City, NY, United States' + author: 'Cartwright, Mark and Jones, Matt and Terasawa, Hiroko' + bibtex: "@inproceedings{Cartwright2007,\n abstract = {This report presents the design\ + \ and construct ion of Rage in Conjunction with the Machine, a simple but novel\ + \ pairing of musical interface and sound sculpture. The ,\n,\nauthors discuss\ + \ the design and creation of this instrument , focusing on the unique aspects\ + \ of it, including the use of physical systems, large gestural input, scale, and\ + \ the electronic coupling of a physical input to a physical output.},\n address\ + \ = {New York City, NY, United States},\n author = {Cartwright, Mark and Jones,\ + \ Matt and Terasawa, Hiroko},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177063},\n\ + \ issn = {2220-4806},\n keywords = {audience participation,inflatable,instrume\ + \ nt design,instrume nt size,mapping,musical,new musical instrument,nime07,physical\ + \ systems,sound scultpure},\n pages = {224--227},\n title = {Rage in Conjunction\ + \ with the Machine},\n url = {http://www.nime.org/proceedings/2007/nime2007_224.pdf},\n\ + \ year = {2007}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1177063 + issn: 2220-4806 + keywords: 'audience participation,inflatable,instrume nt design,instrume nt size,mapping,musical,new + musical instrument,nime07,physical systems,sound scultpure' + pages: 224--227 + title: Rage in Conjunction with the Machine + url: http://www.nime.org/proceedings/2007/nime2007_224.pdf + year: 2007 + + +- ENTRYTYPE: inproceedings + ID: Weinberg2007 + abstract: 'The paper presents the theoretical background and the design scheme for + a perceptual and improvisational robotic marimba player that interacts with human + musicians in a visual and acoustic manner. Informed by an evaluation of a previously + developed robotic percussionist, we present the extension of our work to melodic + and harmonic realms with the design of a robotic player that listens to, analyzes + and improvises pitch-based musical materials. After a presentation of the motivation + for the project, theoretical background and related work, we present a set of + research questions followed by a description of hardware and software approaches + that address these questions. The paper concludes with a description of our plans + to implement and embed these approaches in a robotic marimba player that will + be used in workshops and concerts.' + address: 'New York City, NY, United States' + author: 'Weinberg, Gil and Driscoll, Scott' + bibtex: "@inproceedings{Weinberg2007,\n abstract = {The paper presents the theoretical\ + \ background and the design scheme for a perceptual and improvisational robotic\ + \ marimba player that interacts with human musicians in a visual and acoustic\ + \ manner. Informed by an evaluation of a previously developed robotic percussionist,\ + \ we present the extension of our work to melodic and harmonic realms with the\ + \ design of a robotic player that listens to, analyzes and improvises pitch-based\ + \ musical materials. After a presentation of the motivation for the project, theoretical\ + \ background and related work, we present a set of research questions followed\ + \ by a description of hardware and software approaches that address these questions.\ + \ The paper concludes with a description of our plans to implement and embed these\ + \ approaches in a robotic marimba player that will be used in workshops and concerts.},\n\ + \ address = {New York City, NY, United States},\n author = {Weinberg, Gil and\ + \ Driscoll, Scott},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179477},\n\ + \ issn = {2220-4806},\n keywords = {human-machine interaction,improvisation,nime07,perceptual\ + \ modeling,robotic musicianship},\n pages = {228--233},\n title = {The Design\ + \ of a Robotic Marimba Player -- Introducing Pitch into Robotic Musicianship},\n\ + \ url = {http://www.nime.org/proceedings/2007/nime2007_228.pdf},\n year = {2007}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176486 + doi: 10.5281/zenodo.1179477 issn: 2220-4806 - keywords: 'MIDI controllers, tactile force feedback, strings. Figure The STRIMIDILATOR ' - pages: 19--23 - title: 'The STRIMIDILATOR: a String Controlled MIDI-Instrument' - url: http://www.nime.org/proceedings/2003/nime2003_019.pdf - year: 2003 + keywords: 'human-machine interaction,improvisation,nime07,perceptual modeling,robotic + musicianship' + pages: 228--233 + title: The Design of a Robotic Marimba Player -- Introducing Pitch into Robotic + Musicianship + url: http://www.nime.org/proceedings/2007/nime2007_228.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Wilson2003 - abstract: 'Over the past year the instructors of the Human ComputerInteraction courses - at CCRMA have undertaken a technology shift to a much more powerful teaching platform. - Wedescribe the technical features of the new Atmel AVR basedplatform, contrasting - it with the Parallax BASIC Stampplatform used in the past. The successes and failures - ofthe new platform are considered, and some student projectsuccess stories described.' - address: 'Montreal, Canada' - author: 'Wilson, Scott and Gurevich, Michael and Verplank, Bill and Stang, Pascal' - bibtex: "@inproceedings{Wilson2003,\n abstract = {Over the past year the instructors\ - \ of the Human ComputerInteraction courses at CCRMA have undertaken a technology\ - \ shift to a much more powerful teaching platform. Wedescribe the technical features\ - \ of the new Atmel AVR basedplatform, contrasting it with the Parallax BASIC Stampplatform\ - \ used in the past. The successes and failures ofthe new platform are considered,\ - \ and some student projectsuccess stories described.},\n address = {Montreal,\ - \ Canada},\n author = {Wilson, Scott and Gurevich, Michael and Verplank, Bill\ - \ and Stang, Pascal},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n doi\ - \ = {10.5281/zenodo.1176571},\n issn = {2220-4806},\n keywords = {Microcontrollers,\ - \ Music Controllers, Pedagogy, Atmel AVR, BASIC Stamp.},\n pages = {24--29},\n\ - \ title = {Microcontrollers in Music HCI Instruction: Reflections on our Switch\ - \ to the Atmel AVR Platform},\n url = {http://www.nime.org/proceedings/2003/nime2003_024.pdf},\n\ - \ year = {2003}\n}\n" + ID: Robertson2007 + abstract: 'This paper describes the development of B-Keeper, a reatime beat tracking + system implemented in Java and Max/MSP,which is capable of maintaining synchronisation + between anelectronic sequencer and a drummer. This enables musicians to interact + with electronic parts which are triggeredautomatically by the computer from performance + information. We describe an implementation which functions withthe sequencer Ableton + Live.' + address: 'New York City, NY, United States' + author: 'Robertson, Andrew and Plumbley, Mark D.' + bibtex: "@inproceedings{Robertson2007,\n abstract = {This paper describes the development\ + \ of B-Keeper, a reatime beat tracking system implemented in Java and Max/MSP,which\ + \ is capable of maintaining synchronisation between anelectronic sequencer and\ + \ a drummer. This enables musicians to interact with electronic parts which are\ + \ triggeredautomatically by the computer from performance information. We describe\ + \ an implementation which functions withthe sequencer Ableton Live.},\n address\ + \ = {New York City, NY, United States},\n author = {Robertson, Andrew and Plumbley,\ + \ Mark D.},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177231},\n issn\ + \ = {2220-4806},\n keywords = {Human-Computer Interaction, Automatic Accompaniment,\ + \ Performance },\n pages = {234--237},\n title = {B-Keeper : A Beat-Tracker for\ + \ Live Performance},\n url = {http://www.nime.org/proceedings/2007/nime2007_234.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176571 + doi: 10.5281/zenodo.1177231 issn: 2220-4806 - keywords: 'Microcontrollers, Music Controllers, Pedagogy, Atmel AVR, BASIC Stamp.' - pages: 24--29 - title: 'Microcontrollers in Music HCI Instruction: Reflections on our Switch to - the Atmel AVR Platform' - url: http://www.nime.org/proceedings/2003/nime2003_024.pdf - year: 2003 + keywords: 'Human-Computer Interaction, Automatic Accompaniment, Performance ' + pages: 234--237 + title: 'B-Keeper : A Beat-Tracker for Live Performance' + url: http://www.nime.org/proceedings/2007/nime2007_234.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Andersen2003 - abstract: 'The Disc Jockey (DJ) software system Mixxx is presented.Mixxx makes it - possible to conduct studies of new interaction techniques in connection with the - DJ situation, by itsopen design and easy integration of new software modulesand - MIDI connection to external controllers. To gain a better understanding of working - practices, and to aid the designprocess of new interfaces, interviews with two - contemporarymusicians and DJ''s are presented. In contact with thesemusicians - development of several novel prototypes for DJinteraction have been made. Finally - implementation detailsof Mixxx are described.' - address: 'Montreal, Canada' - author: 'Andersen, Tue H.' - bibtex: "@inproceedings{Andersen2003,\n abstract = {The Disc Jockey (DJ) software\ - \ system Mixxx is presented.Mixxx makes it possible to conduct studies of new\ - \ interaction techniques in connection with the DJ situation, by itsopen design\ - \ and easy integration of new software modulesand MIDI connection to external\ - \ controllers. To gain a better understanding of working practices, and to aid\ - \ the designprocess of new interfaces, interviews with two contemporarymusicians\ - \ and DJ's are presented. In contact with thesemusicians development of several\ - \ novel prototypes for DJinteraction have been made. Finally implementation detailsof\ - \ Mixxx are described.},\n address = {Montreal, Canada},\n author = {Andersen,\ - \ Tue H.},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176484},\n\ - \ issn = {2220-4806},\n keywords = {DJ, software, interaction, visualization,\ - \ controllers, augmented reality.},\n pages = {30--35},\n title = {Mixxx : Towards\ - \ Novel DJ Interfaces},\n url = {http://www.nime.org/proceedings/2003/nime2003_030.pdf},\n\ - \ year = {2003}\n}\n" + ID: Kapur2007 + abstract: 'This paper describes a system enabling a human to perform music with + a robot in real-time, in the context of North Indian classical music. We modify + a traditional acoustic sitar into a hyperinstrument in order to capture performance + gestures for musical analysis. A custom built four-armed robotic Indian drummer + was built using a microchip, solenoids, aluminum and folk frame drums. Algorithms + written towards "intelligent" machine musicianship are described. The final goal + of this research is to have a robotic drummer accompany a professional human sitar + player live in performance. ' + address: 'New York City, NY, United States' + author: 'Kapur, Ajay and Singer, Eric and Benning, Manjinder S. and Tzanetakis, + George and Trimpin, Trimpin' + bibtex: "@inproceedings{Kapur2007,\n abstract = {This paper describes a system enabling\ + \ a human to perform music with a robot in real-time, in the context of North\ + \ Indian classical music. We modify a traditional acoustic sitar into a hyperinstrument\ + \ in order to capture performance gestures for musical analysis. A custom built\ + \ four-armed robotic Indian drummer was built using a microchip, solenoids, aluminum\ + \ and folk frame drums. Algorithms written towards \"intelligent\" machine musicianship\ + \ are described. The final goal of this research is to have a robotic drummer\ + \ accompany a professional human sitar player live in performance. },\n address\ + \ = {New York City, NY, United States},\n author = {Kapur, Ajay and Singer, Eric\ + \ and Benning, Manjinder S. and Tzanetakis, George and Trimpin, Trimpin},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1177137},\n issn = {2220-4806},\n keywords\ + \ = {Musical Robotics, Electronic Sitar, Hyperinstruments, Music Information Retrieval\ + \ (MIR). },\n pages = {238--241},\n title = {Integrating HyperInstruments , Musical\ + \ Robots \\& Machine Musicianship for North {India}n Classical Music},\n url =\ + \ {http://www.nime.org/proceedings/2007/nime2007_238.pdf},\n year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176484 + doi: 10.5281/zenodo.1177137 issn: 2220-4806 - keywords: 'DJ, software, interaction, visualization, controllers, augmented reality.' - pages: 30--35 - title: 'Mixxx : Towards Novel DJ Interfaces' - url: http://www.nime.org/proceedings/2003/nime2003_030.pdf - year: 2003 + keywords: 'Musical Robotics, Electronic Sitar, Hyperinstruments, Music Information + Retrieval (MIR). ' + pages: 238--241 + title: 'Integrating HyperInstruments , Musical Robots & Machine Musicianship for + North Indian Classical Music' + url: http://www.nime.org/proceedings/2007/nime2007_238.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Orio2003 - abstract: 'Score following is the synchronisation of a computer with a performer - playing a known musical score. It now has a history of about twenty years as a - research and musical topic, and is an ongoing project at Ircam. We present an - overview of existing and historical score following systems, followed by fundamental - definitions and terminology, and considerations about score formats, evaluation - of score followers, and training. The score follower that we developed at Ircam - is based on a Hidden Markov Model and on the modeling of the expected signal received - from the performer. The model has been implemented in an audio and a Midi version, - and is now being used in production. We report here our first experiences and - our first steps towards a complete evaluation of system performances. Finally, - we indicate directions how score following can go beyond the artistic applications - known today.' - address: 'Montreal, Canada' - author: 'Orio, Nicola and Lemouton, Serge and Schwarz, Diemo' - bibtex: "@inproceedings{Orio2003,\n abstract = {Score following is the synchronisation\ - \ of a computer with a performer playing a known musical score. It now has a history\ - \ of about twenty years as a research and musical topic, and is an ongoing project\ - \ at Ircam. We present an overview of existing and historical score following\ - \ systems, followed by fundamental definitions and terminology, and considerations\ - \ about score formats, evaluation of score followers, and training. The score\ - \ follower that we developed at Ircam is based on a Hidden Markov Model and on\ - \ the modeling of the expected signal received from the performer. The model has\ - \ been implemented in an audio and a Midi version, and is now being used in production.\ - \ We report here our first experiences and our first steps towards a complete\ - \ evaluation of system performances. Finally, we indicate directions how score\ - \ following can go beyond the artistic applications known today.},\n address =\ - \ {Montreal, Canada},\n author = {Orio, Nicola and Lemouton, Serge and Schwarz,\ - \ Diemo},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176547},\n\ - \ issn = {2220-4806},\n keywords = {Score following, score recognition, real time\ - \ audio alignment, virtual accompaniment.},\n pages = {36--41},\n title = {Score\ - \ Following: State of the Art and New Developments},\n url = {http://www.nime.org/proceedings/2003/nime2003_036.pdf},\n\ - \ year = {2003}\n}\n" + ID: Clay2007 + abstract: 'The starting point for this project is the want to produce a music controller + that could be employed in such a manner that even lay public could enjoy the possibilities + of mobile art. All of the works that are discussed here are in relation to a new + GPS-based controller, the Wrist-Conductor. The works are technically based around + the synchronizing possibilities using the GPS Time Mark and are aesthetically + rooted in works that function in an open public space such as a city or a forest. + One of the works intended for the controller, China Gates, is discussed here in + detail in order to describe how the GPS Wrist-Controller is actually used in a + public art context. The other works, CitySonics, The Enchanted Forest and Get + a Pot & a Spoon are described briefly in order to demonstrate that even a simple + controller can be used to create a body of works. This paper also addresses the + breaking of the media bubble via the concept of the “open audience”, or how mobile + art can engage pedestrians as viewers or listeners within public space and not + remain an isolated experience for performers only.' + address: 'New York City, NY, United States' + author: 'Clay, Arthur and Majoe, Dennis' + bibtex: "@inproceedings{Clay2007,\n abstract = {The starting point for this project\ + \ is the want to produce a music controller that could be employed in such a manner\ + \ that even lay public could enjoy the possibilities of mobile art. All of the\ + \ works that are discussed here are in relation to a new GPS-based controller,\ + \ the Wrist-Conductor. The works are technically based around the synchronizing\ + \ possibilities using the GPS Time Mark and are aesthetically rooted in works\ + \ that function in an open public space such as a city or a forest. One of the\ + \ works intended for the controller, China Gates, is discussed here in detail\ + \ in order to describe how the GPS Wrist-Controller is actually used in a public\ + \ art context. The other works, CitySonics, The Enchanted Forest and Get a Pot\ + \ \\& a Spoon are described briefly in order to demonstrate that even a simple\ + \ controller can be used to create a body of works. This paper also addresses\ + \ the breaking of the media bubble via the concept of the “open audience”, or\ + \ how mobile art can engage pedestrians as viewers or listeners within public\ + \ space and not remain an isolated experience for performers only.},\n address\ + \ = {New York City, NY, United States},\n author = {Clay, Arthur and Majoe, Dennis},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1177073},\n issn = {2220-4806},\n\ + \ keywords = {Mobile Music, GPS, Controller, Collaborative Performance },\n pages\ + \ = {242--245},\n title = {The Wrist-Conductor},\n url = {http://www.nime.org/proceedings/2007/nime2007_242.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176547 + doi: 10.5281/zenodo.1177073 issn: 2220-4806 - keywords: 'Score following, score recognition, real time audio alignment, virtual - accompaniment.' - pages: 36--41 - title: 'Score Following: State of the Art and New Developments' - url: http://www.nime.org/proceedings/2003/nime2003_036.pdf - year: 2003 + keywords: 'Mobile Music, GPS, Controller, Collaborative Performance ' + pages: 242--245 + title: The Wrist-Conductor + url: http://www.nime.org/proceedings/2007/nime2007_242.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Traube2003 - abstract: 'In this paper, we describe a multi-level approach for the extraction - of instrumental gesture parameters taken from the characteristics of the signal - captured by a microphone and based on the knowledge of physical mechanisms taking - place on the instrument. We also explore the relationships between some features - of timbre and gesture parameters, taking as a starting point for the exploration - the timbre descriptors commonly used by professional musicians when they verbally - describe the sounds they produce with their instrument. Finally, we present how - this multi-level approach can be applied to the study of the timbre space of the - classical guitar.' - address: 'Montreal, Canada' - author: 'Traube, Caroline and Depalle, Philippe and Wanderley, Marcelo M.' - bibtex: "@inproceedings{Traube2003,\n abstract = {In this paper, we describe a multi-level\ - \ approach for the extraction of instrumental gesture parameters taken from the\ - \ characteristics of the signal captured by a microphone and based on the knowledge\ - \ of physical mechanisms taking place on the instrument. We also explore the relationships\ - \ between some features of timbre and gesture parameters, taking as a starting\ - \ point for the exploration the timbre descriptors commonly used by professional\ - \ musicians when they verbally describe the sounds they produce with their instrument.\ - \ Finally, we present how this multi-level approach can be applied to the study\ - \ of the timbre space of the classical guitar.},\n address = {Montreal, Canada},\n\ - \ author = {Traube, Caroline and Depalle, Philippe and Wanderley, Marcelo M.},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176567},\n\ - \ issn = {2220-4806},\n keywords = {Signal analysis, indirect acquisition of instrumental\ - \ gesture, guitar},\n pages = {42--47},\n title = {Indirect Acquisition of Instrumental\ - \ Gesture Based on Signal , Physical and Perceptual Information},\n url = {http://www.nime.org/proceedings/2003/nime2003_042.pdf},\n\ - \ year = {2003}\n}\n" + ID: Hollinger2007 + abstract: 'This paper presents an electronic piano keyboard and computer mouse designed + for use in a magnetic resonance imaging scanner. The interface allows neuroscientists + studying motor learning of musical tasks to perform functional scans of a subject''s + brain while synchronizing the scanner, auditory and visual stimuli, and auditory + feedback with the onset, offset, and velocity of the piano keys. The design of + the initial prototype and environment-specific issues are described, as well as + prior work in the field. Preliminary results are positive and were unable to show + the existence of image artifacts caused by the interface. Recommendations to improve + the optical assembly are provided in order to increase the robustness of the design. ' + address: 'New York City, NY, United States' + author: 'Hollinger, Avrum and Steele, Christopher and Penhune, Virginia and Zatorre, + Robert and Wanderley, Marcelo M.' + bibtex: "@inproceedings{Hollinger2007,\n abstract = {This paper presents an electronic\ + \ piano keyboard and computer mouse designed for use in a magnetic resonance imaging\ + \ scanner. The interface allows neuroscientists studying motor learning of musical\ + \ tasks to perform functional scans of a subject's brain while synchronizing the\ + \ scanner, auditory and visual stimuli, and auditory feedback with the onset,\ + \ offset, and velocity of the piano keys. The design of the initial prototype\ + \ and environment-specific issues are described, as well as prior work in the\ + \ field. Preliminary results are positive and were unable to show the existence\ + \ of image artifacts caused by the interface. Recommendations to improve the optical\ + \ assembly are provided in order to increase the robustness of the design. },\n\ + \ address = {New York City, NY, United States},\n author = {Hollinger, Avrum and\ + \ Steele, Christopher and Penhune, Virginia and Zatorre, Robert and Wanderley,\ + \ Marcelo M.},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177119},\n issn\ + \ = {2220-4806},\n keywords = {Input device, MRI-compatible, fMRI, motor learning,\ + \ optical sensing. },\n pages = {246--249},\n title = {fMRI-Compatible Electronic\ + \ Controllers},\n url = {http://www.nime.org/proceedings/2007/nime2007_246.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176567 + doi: 10.5281/zenodo.1177119 issn: 2220-4806 - keywords: 'Signal analysis, indirect acquisition of instrumental gesture, guitar' - pages: 42--47 - title: 'Indirect Acquisition of Instrumental Gesture Based on Signal , Physical - and Perceptual Information' - url: http://www.nime.org/proceedings/2003/nime2003_042.pdf - year: 2003 + keywords: 'Input device, MRI-compatible, fMRI, motor learning, optical sensing. ' + pages: 246--249 + title: fMRI-Compatible Electronic Controllers + url: http://www.nime.org/proceedings/2007/nime2007_246.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Nagashima2003 - abstract: 'This is a report of research and some experimental applications of human-computer - interaction in multi-media performing arts. The human performer and the computer - systems perform computer graphic and computer music interactively in real-time. - In general, many sensors are used for the interactive communication as interfaces, - and the performer receives the output of the system via graphics, sounds and physical - reactions of interfaces like musical instruments. I have produced many types of - interfaces, not only with physical/electrical sensors but also with biological/physiological - sensors. This paper is intended as an investigation of some special approaches: - (1) 16-channel electromyogram sensor called “MiniBioMuse-III” and its application - work called “BioCosmicStorm-II” performed in Paris, Kassel and Hamburg in 2001, - (2) sensing/reacting with “breathing” in performing arts, (3) 8-channel electric-feedback - system and its experiments of “body-hearing sounds” and “body-listening to music”.' - address: 'Montreal, Canada' + ID: Nagashima2007 + abstract: 'This is a report of research project about developing novel musical instruments + for interactive computer music. The project''s name - "GHI project" means that + "It might be good that musical instrument shines, isn''t it?" in Japanese. I examined + the essences of musical instruments again on proverb "Taking a lesson from the + past". At the first step, my project targeted and chose "Kendang" - the traditional + musical instrument of Indonesia.' + address: 'New York City, NY, United States' author: 'Nagashima, Yoichi' - bibtex: "@inproceedings{Nagashima2003,\n abstract = {This is a report of research\ - \ and some experimental applications of human-computer interaction in multi-media\ - \ performing arts. The human performer and the computer systems perform computer\ - \ graphic and computer music interactively in real-time. In general, many sensors\ - \ are used for the interactive communication as interfaces, and the performer\ - \ receives the output of the system via graphics, sounds and physical reactions\ - \ of interfaces like musical instruments. I have produced many types of interfaces,\ - \ not only with physical/electrical sensors but also with biological/physiological\ - \ sensors. This paper is intended as an investigation of some special approaches:\ - \ (1) 16-channel electromyogram sensor called “MiniBioMuse-III” and its application\ - \ work called “BioCosmicStorm-II” performed in Paris, Kassel and Hamburg in 2001,\ - \ (2) sensing/reacting with “breathing” in performing arts, (3) 8-channel electric-feedback\ - \ system and its experiments of “body-hearing sounds” and “body-listening to music”.},\n\ - \ address = {Montreal, Canada},\n author = {Nagashima, Yoichi},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176539},\n issn = {2220-4806},\n\ - \ pages = {48--53},\n title = {Bio-Sensing Systems and Bio-Feedback Systems for\ - \ Interactive Media Arts},\n url = {http://www.nime.org/proceedings/2003/nime2003_048.pdf},\n\ - \ year = {2003}\n}\n" + bibtex: "@inproceedings{Nagashima2007,\n abstract = {This is a report of research\ + \ project about developing novel musical instruments for interactive computer\ + \ music. The project's name - \"GHI project\" means that \"It might be good that\ + \ musical instrument shines, isn't it?\" in Japanese. I examined the essences\ + \ of musical instruments again on proverb \"Taking a lesson from the past\". At\ + \ the first step, my project targeted and chose \"Kendang\" - the traditional\ + \ musical instrument of Indonesia.},\n address = {New York City, NY, United States},\n\ + \ author = {Nagashima, Yoichi},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177205},\n\ + \ issn = {2220-4806},\n keywords = {kendang, media arts, new instruments, sound\ + \ and light},\n pages = {250--253},\n title = {GHI project and \"Cyber Kendang\"\ + },\n url = {http://www.nime.org/proceedings/2007/nime2007_250.pdf},\n year = {2007}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176539 + doi: 10.5281/zenodo.1177205 issn: 2220-4806 - pages: 48--53 - title: Bio-Sensing Systems and Bio-Feedback Systems for Interactive Media Arts - url: http://www.nime.org/proceedings/2003/nime2003_048.pdf - year: 2003 + keywords: 'kendang, media arts, new instruments, sound and light' + pages: 250--253 + title: GHI project and "Cyber Kendang" + url: http://www.nime.org/proceedings/2007/nime2007_250.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Momeni2003 - abstract: 'In this paper, we examine the use of spatial layouts of musicalmaterial - for live performance control. Emphasis is given tosoftware tools that provide - for the simple and intuitivegeometric organization of sound material, sound processingparameters, - and higher-level musical structures.' - address: 'Montreal, Canada' - author: 'Momeni, Ali and Wessel, David' - bibtex: "@inproceedings{Momeni2003,\n abstract = {In this paper, we examine the\ - \ use of spatial layouts of musicalmaterial for live performance control. Emphasis\ - \ is given tosoftware tools that provide for the simple and intuitivegeometric\ - \ organization of sound material, sound processingparameters, and higher-level\ - \ musical structures.},\n address = {Montreal, Canada},\n author = {Momeni, Ali\ - \ and Wessel, David},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n doi\ - \ = {10.5281/zenodo.1176535},\n issn = {2220-4806},\n keywords = {Perceptual Spaces,\ - \ Graphical Models, Real-time Instruments, Dimensionality Reduction, Multidimensional\ - \ Scaling, Live Performance, Gestural Controllers, Live Interaction, High-level\ - \ Control.},\n pages = {54--62},\n title = {Characterizing and Controlling Musical\ - \ Material Intuitively with Geometric Models},\n url = {http://www.nime.org/proceedings/2003/nime2003_054.pdf},\n\ - \ year = {2003}\n}\n" + ID: Toyoda2007 + abstract: 'This study proposes new possibilities for interaction design pertaining + to music piece creation. Specifically, the study created an environment wherein + a wide range of users are able to easily experience new musical expressions via + a combination of newly developed software and the Nintendo Wii Remote controller. ' + address: 'New York City, NY, United States' + author: 'Toyoda, Shinichiro' + bibtex: "@inproceedings{Toyoda2007,\n abstract = {This study proposes new possibilities\ + \ for interaction design pertaining to music piece creation. Specifically, the\ + \ study created an environment wherein a wide range of users are able to easily\ + \ experience new musical expressions via a combination of newly developed software\ + \ and the Nintendo Wii Remote controller. },\n address = {New York City, NY, United\ + \ States},\n author = {Toyoda, Shinichiro},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1179465},\n issn = {2220-4806},\n keywords = {Interactive systems,\ + \ improvisation, gesture, composition INTRODUCTION Though music related research\ + \ focusing on the interaction between people and computers is currently experiencing\ + \ wide range development, the history of approaches wherein the creation of new\ + \ musical expression is made possible via the active },\n pages = {254--255},\n\ + \ title = {Sensillum : An Improvisational Approach to Composition},\n url = {http://www.nime.org/proceedings/2007/nime2007_254.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176535 + doi: 10.5281/zenodo.1179465 issn: 2220-4806 - keywords: 'Perceptual Spaces, Graphical Models, Real-time Instruments, Dimensionality - Reduction, Multidimensional Scaling, Live Performance, Gestural Controllers, Live - Interaction, High-level Control.' - pages: 54--62 - title: Characterizing and Controlling Musical Material Intuitively with Geometric - Models - url: http://www.nime.org/proceedings/2003/nime2003_054.pdf - year: 2003 + keywords: 'Interactive systems, improvisation, gesture, composition INTRODUCTION + Though music related research focusing on the interaction between people and computers + is currently experiencing wide range development, the history of approaches wherein + the creation of new musical expression is made possible via the active ' + pages: 254--255 + title: 'Sensillum : An Improvisational Approach to Composition' + url: http://www.nime.org/proceedings/2007/nime2007_254.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Burtner2003 - abstract: This paper explores compositional and notational approaches for working - with controllers. The notational systems devised for the composition (dis)Appearances - are discussed in depth in an attempt to formulate a new approach to composition - using ensembles that navigates a performative space between reality and virtuality. - address: 'Montreal, Canada' - author: 'Burtner, Matthew' - bibtex: "@inproceedings{Burtner2003,\n abstract = {This paper explores compositional\ - \ and notational approaches for working with controllers. The notational systems\ - \ devised for the composition (dis)Appearances are discussed in depth in an attempt\ - \ to formulate a new approach to composition using ensembles that navigates a\ - \ performative space between reality and virtuality.},\n address = {Montreal,\ - \ Canada},\n author = {Burtner, Matthew},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n\ - \ doi = {10.5281/zenodo.1176492},\n issn = {2220-4806},\n keywords = {Composition,\ - \ notation systems, virtual reality, controllers, physical modeling, string, violin.},\n\ - \ pages = {63--69},\n title = {Composing for the (dis)Embodied Ensemble : Notational\ - \ Systems in (dis)Appearances},\n url = {http://www.nime.org/proceedings/2003/nime2003_063.pdf},\n\ - \ year = {2003}\n}\n" + ID: Gruenbaum2007 + abstract: 'Almost all traditional musical instruments have a one-to-one correspondence + between a given fingering and the pitch that sounds for that fingering. The Samchillian + Tip Tip Tip Cheeepeeeee does not --- it is a keyboard MIDI controller that is + based on intervals rather than fixed pitches. That is, a given keypress will sound + a pitch a number of steps away from the last note sounded (within the key signature + and scale selected) according to the ''delta'' value assigned to that key. The + advantages of such a system are convenience, speed, and the ability to play difficult, + unusual and/or unintended passages extemporaneously. ' + address: 'New York City, NY, United States' + author: 'Gruenbaum, Leon' + bibtex: "@inproceedings{Gruenbaum2007,\n abstract = {Almost all traditional musical\ + \ instruments have a one-to-one correspondence between a given fingering and the\ + \ pitch that sounds for that fingering. The Samchillian Tip Tip Tip Cheeepeeeee\ + \ does not --- it is a keyboard MIDI controller that is based on intervals rather\ + \ than fixed pitches. That is, a given keypress will sound a pitch a number of\ + \ steps away from the last note sounded (within the key signature and scale selected)\ + \ according to the 'delta' value assigned to that key. The advantages of such\ + \ a system are convenience, speed, and the ability to play difficult, unusual\ + \ and/or unintended passages extemporaneously. },\n address = {New York City,\ + \ NY, United States},\n author = {Gruenbaum, Leon},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177103},\n issn = {2220-4806},\n keywords = {samchillian,\ + \ keyboard, MIDI controller, relative, interval, microtonal, computer keyboard,\ + \ pitch, musical instrument },\n pages = {256--259},\n title = {The Samchillian\ + \ Tip Tip Tip Cheeepeeeee : A Relativistic Keyboard Instrument},\n url = {http://www.nime.org/proceedings/2007/nime2007_256.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176492 + doi: 10.5281/zenodo.1177103 issn: 2220-4806 - keywords: 'Composition, notation systems, virtual reality, controllers, physical - modeling, string, violin.' - pages: 63--69 - title: 'Composing for the (dis)Embodied Ensemble : Notational Systems in (dis)Appearances' - url: http://www.nime.org/proceedings/2003/nime2003_063.pdf - year: 2003 + keywords: 'samchillian, keyboard, MIDI controller, relative, interval, microtonal, + computer keyboard, pitch, musical instrument ' + pages: 256--259 + title: 'The Samchillian Tip Tip Tip Cheeepeeeee : A Relativistic Keyboard Instrument' + url: http://www.nime.org/proceedings/2007/nime2007_256.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Jorda2003 - abstract: 'This paper first introduces two previous software-based musicinstruments - designed by the author, and analyses the crucialimportance of the visual feedback - introduced by theirinterfaces. A quick taxonomy and analysis of the visualcomponents - in current trends of interactive music software isthen proposed, before introducing - the reacTable*, a newproject that is currently under development. The reacTable* - isa collaborative music instrument, aimed both at novices andadvanced musicians, - which employs computer vision andtangible interfaces technologies, and pushes - further the visualfeedback interface ideas and techniques aforementioned.' - address: 'Montreal, Canada' - author: 'Jordà, Sergi' - bibtex: "@inproceedings{Jorda2003,\n abstract = {This paper first introduces two\ - \ previous software-based musicinstruments designed by the author, and analyses\ - \ the crucialimportance of the visual feedback introduced by theirinterfaces.\ - \ A quick taxonomy and analysis of the visualcomponents in current trends of interactive\ - \ music software isthen proposed, before introducing the reacTable*, a newproject\ - \ that is currently under development. The reacTable* isa collaborative music\ - \ instrument, aimed both at novices andadvanced musicians, which employs computer\ - \ vision andtangible interfaces technologies, and pushes further the visualfeedback\ - \ interface ideas and techniques aforementioned.},\n address = {Montreal, Canada},\n\ - \ author = {Jord\\`{a}, Sergi},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n\ - \ doi = {10.5281/zenodo.1176519},\n issn = {2220-4806},\n keywords = {Interactive\ - \ music instruments, audio visualization, visual interfaces, visual feedback,\ - \ tangible interfaces, computer vision, augmented reality, music instruments for\ - \ novices, collaborative music.},\n pages = {70--76},\n title = {Sonigraphical\ - \ Instruments: From {FM}OL to the reacTable*},\n url = {http://www.nime.org/proceedings/2003/nime2003_070.pdf},\n\ - \ year = {2003}\n}\n" + ID: Freeman2007 + abstract: 'Graph Theory links the creative music-making activities of web site visitors + to the dynamic generation of an instrumental score for solo violin. Participants + use a web-based interface to navigate among short, looping musical fragments to + create their own unique path through the open-form composition. Before each concert + performance, the violinist prints out a new copy of the score that orders the + fragments based on the decisions made by web visitors. ' + address: 'New York City, NY, United States' + author: 'Freeman, Jason' + bibtex: "@inproceedings{Freeman2007,\n abstract = {Graph Theory links the creative\ + \ music-making activities of web site visitors to the dynamic generation of an\ + \ instrumental score for solo violin. Participants use a web-based interface to\ + \ navigate among short, looping musical fragments to create their own unique path\ + \ through the open-form composition. Before each concert performance, the violinist\ + \ prints out a new copy of the score that orders the fragments based on the decisions\ + \ made by web visitors. },\n address = {New York City, NY, United States},\n author\ + \ = {Freeman, Jason},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177095},\n\ + \ issn = {2220-4806},\n keywords = {Music, Composition, Residency, Audience Interaction,\ + \ Collaboration, Violin, Graph, Flash, Internet, Traveling Salesman. },\n pages\ + \ = {260--263},\n title = {Graph Theory : Interfacing Audiences Into the Compositional\ + \ Process},\n url = {http://www.nime.org/proceedings/2007/nime2007_260.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176519 + doi: 10.5281/zenodo.1177095 issn: 2220-4806 - keywords: 'Interactive music instruments, audio visualization, visual interfaces, - visual feedback, tangible interfaces, computer vision, augmented reality, music - instruments for novices, collaborative music.' - pages: 70--76 - title: 'Sonigraphical Instruments: From FMOL to the reacTable*' - url: http://www.nime.org/proceedings/2003/nime2003_070.pdf - year: 2003 + keywords: 'Music, Composition, Residency, Audience Interaction, Collaboration, Violin, + Graph, Flash, Internet, Traveling Salesman. ' + pages: 260--263 + title: 'Graph Theory : Interfacing Audiences Into the Compositional Process' + url: http://www.nime.org/proceedings/2007/nime2007_260.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Hatanaka2003 - abstract: 'A handheld electronic musical instrument, named the BentoBox, was developed. - The motivation was to develop aninstrument which one can easily carry around and - play inmoments of free time, for example when riding public transportation or - during short breaks at work. The device wasdesigned to enable quick learning by - having various scalesprogrammed for different styles of music, and also beexpressive - by having hand controlled timbral effects whichcan be manipulated while playing. - Design analysis anditeration lead to a compact and ergonomic device. This paperfocuses - on the ergonomic design process of the hardware.' - address: 'Montreal, Canada' - author: 'Hatanaka, Motohide' - bibtex: "@inproceedings{Hatanaka2003,\n abstract = {A handheld electronic musical\ - \ instrument, named the BentoBox, was developed. The motivation was to develop\ - \ aninstrument which one can easily carry around and play inmoments of free time,\ - \ for example when riding public transportation or during short breaks at work.\ - \ The device wasdesigned to enable quick learning by having various scalesprogrammed\ - \ for different styles of music, and also beexpressive by having hand controlled\ - \ timbral effects whichcan be manipulated while playing. Design analysis anditeration\ - \ lead to a compact and ergonomic device. This paperfocuses on the ergonomic design\ - \ process of the hardware.},\n address = {Montreal, Canada},\n author = {Hatanaka,\ - \ Motohide},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176509},\n\ - \ issn = {2220-4806},\n keywords = {MIDI controller, electronic musical instrument,\ - \ musical instrument design, ergonomics, playability, human computer interface.\ - \ },\n pages = {77--82},\n title = {Ergonomic Design of A Portable Musical Instrument},\n\ - \ url = {http://www.nime.org/proceedings/2003/nime2003_077.pdf},\n year = {2003}\n\ - }\n" + ID: Villar2007 + abstract: 'This paper describes the design and implementation of a new interface + prototype for live music mixing. The ColorDex system employs a completely new + operational metaphor which allows the mix DJ to prepare up to six tracks at once, + and perform mixes between up to three of those at a time. The basic premises of + the design are: 1) Build a performance tool that multiplies the possible choices + a DJ has in respect in how and when tracks are prepared and mixed; 2) Design the + system in such a way that the tool does not overload the performer with unnecessary + complexity, and 3) Make use of novel technology to make the performance of live + music mixing more engaging for both the performer and the audience. The core components + of the system are: A software program to load, visualize and playback digitally + encoded tracks; the HDDJ device (built chiefly out of a repurposed hard disk drive), + which provides tactile manipulation of the playback speed and position of tracks; + and the Cubic Crossfader, a wireless sensor cube that controls of the volume of + individual tracks, and allows the DJ to mix these in interesting ways. ' + address: 'New York City, NY, United States' + author: 'Villar, Nicolas and Gellersen, Hans and Jervis, Matt and Lang, Alexander' + bibtex: "@inproceedings{Villar2007,\n abstract = {This paper describes the design\ + \ and implementation of a new interface prototype for live music mixing. The ColorDex\ + \ system employs a completely new operational metaphor which allows the mix DJ\ + \ to prepare up to six tracks at once, and perform mixes between up to three of\ + \ those at a time. The basic premises of the design are: 1) Build a performance\ + \ tool that multiplies the possible choices a DJ has in respect in how and when\ + \ tracks are prepared and mixed; 2) Design the system in such a way that the tool\ + \ does not overload the performer with unnecessary complexity, and 3) Make use\ + \ of novel technology to make the performance of live music mixing more engaging\ + \ for both the performer and the audience. The core components of the system are:\ + \ A software program to load, visualize and playback digitally encoded tracks;\ + \ the HDDJ device (built chiefly out of a repurposed hard disk drive), which provides\ + \ tactile manipulation of the playback speed and position of tracks; and the Cubic\ + \ Crossfader, a wireless sensor cube that controls of the volume of individual\ + \ tracks, and allows the DJ to mix these in interesting ways. },\n address = {New\ + \ York City, NY, United States},\n author = {Villar, Nicolas and Gellersen, Hans\ + \ and Jervis, Matt and Lang, Alexander},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179475},\n\ + \ issn = {2220-4806},\n keywords = {Novel interfaces, live music-mixing, cube-based\ + \ interfaces, crossfading, repurposing HDDs, accelerometer-based cubic control\ + \ },\n pages = {264--269},\n title = {The ColorDex DJ System : A New Interface\ + \ for Live Music Mixing},\n url = {http://www.nime.org/proceedings/2007/nime2007_264.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176509 + doi: 10.5281/zenodo.1179475 issn: 2220-4806 - keywords: 'MIDI controller, electronic musical instrument, musical instrument design, - ergonomics, playability, human computer interface. ' - pages: 77--82 - title: Ergonomic Design of A Portable Musical Instrument - url: http://www.nime.org/proceedings/2003/nime2003_077.pdf - year: 2003 + keywords: 'Novel interfaces, live music-mixing, cube-based interfaces, crossfading, + repurposing HDDs, accelerometer-based cubic control ' + pages: 264--269 + title: 'The ColorDex DJ System : A New Interface for Live Music Mixing' + url: http://www.nime.org/proceedings/2007/nime2007_264.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Shiraiwa2003 - abstract: 'This paper presents a novel use of a chemical experiments’ framework - as a control layer and sound source in a con- cert situation. Signal fluctuations - from electrolytic batteries made out of household chemicals, and acoustic samples - obtained from an acid/base reaction are used for musical purposes beyond the standard - data sonification role. The batteries are controlled in handy ways such as warming, - stirring and pouring that are also visually engaging. Audio mappings include synthetic - and sampled sounds completing a recipe that concocts a live performance of computer - music.' - address: 'Montreal, Canada' - author: 'Shiraiwa, Hiroko and Segnini, Rodrigo and Woo, Vivian' - bibtex: "@inproceedings{Shiraiwa2003,\n abstract = {This paper presents a novel\ - \ use of a chemical experiments’ framework as a control layer and sound source\ - \ in a con- cert situation. Signal fluctuations from electrolytic batteries made\ - \ out of household chemicals, and acoustic samples obtained from an acid/base\ - \ reaction are used for musical purposes beyond the standard data sonification\ - \ role. The batteries are controlled in handy ways such as warming, stirring and\ - \ pouring that are also visually engaging. Audio mappings include synthetic and\ - \ sampled sounds completing a recipe that concocts a live performance of computer\ - \ music.},\n address = {Montreal, Canada},\n author = {Shiraiwa, Hiroko and Segnini,\ - \ Rodrigo and Woo, Vivian},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n doi\ - \ = {10.5281/zenodo.1176561},\n issn = {2220-4806},\n keywords = {Chemical music,\ - \ Applied chemistry, Battery Controller.},\n pages = {83--86},\n title = {Sound\ - \ Kitchen: Designing a Chemically Controlled Musical Performance},\n url = {http://www.nime.org/proceedings/2003/nime2003_083.pdf},\n\ - \ year = {2003}\n}\n" + ID: Dahl2007 + abstract: 'In this paper, we describe a musical controller – the WaveSaw – for directly + manipulating a wavetable. The WaveSaw consists of a long, flexible metal strip + with handles on either end, somewhat analogous to a saw. The user plays the WaveSaw + by holding the handles and bending the metal strip. We use sensors to measure + the strip’s curvature and reconstruct its shape as a wavetable stored in a computer. + This provides a direct gestural mapping from the shape of the WaveSaw to the timbral + characteristics of the computer-generated sound. Additional sensors provide control + of pitch, amplitude, and other musical parameters.' + address: 'New York City, NY, United States' + author: 'Dahl, Luke and Whetsell, Nathan and Van Stoecker, John' + bibtex: "@inproceedings{Dahl2007,\n abstract = {In this paper, we describe a musical\ + \ controller – the WaveSaw – for directly manipulating a wavetable. The WaveSaw\ + \ consists of a long, flexible metal strip with handles on either end, somewhat\ + \ analogous to a saw. The user plays the WaveSaw by holding the handles and bending\ + \ the metal strip. We use sensors to measure the strip’s curvature and reconstruct\ + \ its shape as a wavetable stored in a computer. This provides a direct gestural\ + \ mapping from the shape of the WaveSaw to the timbral characteristics of the\ + \ computer-generated sound. Additional sensors provide control of pitch, amplitude,\ + \ and other musical parameters.},\n address = {New York City, NY, United States},\n\ + \ author = {Dahl, Luke and Whetsell, Nathan and Van Stoecker, John},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1177079},\n issn = {2220-4806},\n keywords\ + \ = {Musical controller, Puredata, scanned synthesis, flex sensors. },\n pages\ + \ = {270--272},\n title = {The WaveSaw : A Flexible Instrument for Direct Timbral\ + \ Manipulation},\n url = {http://www.nime.org/proceedings/2007/nime2007_270.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176561 + doi: 10.5281/zenodo.1177079 issn: 2220-4806 - keywords: 'Chemical music, Applied chemistry, Battery Controller.' - pages: 83--86 - title: 'Sound Kitchen: Designing a Chemically Controlled Musical Performance' - url: http://www.nime.org/proceedings/2003/nime2003_083.pdf - year: 2003 + keywords: 'Musical controller, Puredata, scanned synthesis, flex sensors. ' + pages: 270--272 + title: 'The WaveSaw : A Flexible Instrument for Direct Timbral Manipulation' + url: http://www.nime.org/proceedings/2007/nime2007_270.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Ryan2003 - abstract: 'This report details work on the interdisciplinary mediaproject TGarden. - The authors discuss the challengesencountered while developing a responsive musicalenvironment - for the general public involving wearable,sensor-integrated clothing as the central - interface and input device. The project''s dramaturgical andtechnical/implementation - background are detailed toprovide a framework for the creation of a responsive - hardwareand software system that reinforces a tangible relationshipbetween the - participant''s improvised movement and musicalresponse. Finally, the authors take - into consideration testingscenarios gathered from public prototypes in two Europeanlocales - in 2001 to evaluate user experience of the system.' - address: 'Montreal, Canada' - author: 'Ryan, Joel and Salter, Christopher L.' - bibtex: "@inproceedings{Ryan2003,\n abstract = {This report details work on the\ - \ interdisciplinary mediaproject TGarden. The authors discuss the challengesencountered\ - \ while developing a responsive musicalenvironment for the general public involving\ - \ wearable,sensor-integrated clothing as the central interface and input device.\ - \ The project's dramaturgical andtechnical/implementation background are detailed\ - \ toprovide a framework for the creation of a responsive hardwareand software\ - \ system that reinforces a tangible relationshipbetween the participant's improvised\ - \ movement and musicalresponse. Finally, the authors take into consideration testingscenarios\ - \ gathered from public prototypes in two Europeanlocales in 2001 to evaluate user\ - \ experience of the system.},\n address = {Montreal, Canada},\n author = {Ryan,\ - \ Joel and Salter, Christopher L.},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n\ - \ doi = {10.5281/zenodo.1176555},\n issn = {2220-4806},\n keywords = {Gesture,\ - \ interaction, embodied action, enaction, physical model, responsive environment,\ - \ interactive musical systems, affordance, interface, phenomenology, energy, kinetics,\ - \ time constant, induced ballistics, wearable computing, accelerometer, audience\ - \ participation, dynamical system, dynamic compliance, effort, wearable instrument,\ - \ augmented physicality. },\n pages = {87--90},\n title = {TGarden: Wearable Instruments\ - \ and Augmented Physicality},\n url = {http://www.nime.org/proceedings/2003/nime2003_087.pdf},\n\ - \ year = {2003}\n}\n" + ID: Bennett2007 + abstract: 'This paper proposes that the physicality of an instrument be considered + an important aspect in the design of new interfaces for musical expression. The + use of Laban''s theory of effort in the design of new effortful interfaces, in + particular looking at effortspace modulation, is investigated, and a platform + for effortful interface development (named the DAMPER) is described. Finally, + future work is described and further areas of research are highlighted. ' + address: 'New York City, NY, United States' + author: 'Bennett, Peter and Ward, Nicholas and O''Modhrain, Sile and Rebelo, Pedro' + bibtex: "@inproceedings{Bennett2007,\n abstract = {This paper proposes that the\ + \ physicality of an instrument be considered an important aspect in the design\ + \ of new interfaces for musical expression. The use of Laban's theory of effort\ + \ in the design of new effortful interfaces, in particular looking at effortspace\ + \ modulation, is investigated, and a platform for effortful interface development\ + \ (named the DAMPER) is described. Finally, future work is described and further\ + \ areas of research are highlighted. },\n address = {New York City, NY, United\ + \ States},\n author = {Bennett, Peter and Ward, Nicholas and O'Modhrain, Sile\ + \ and Rebelo, Pedro},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177041},\n\ + \ issn = {2220-4806},\n keywords = {Effortful Interaction. Haptics. Laban Analysis.\ + \ Physicality. HCI. },\n pages = {273--276},\n title = {DAMPER : A Platform for\ + \ Effortful Interface Development},\n url = {http://www.nime.org/proceedings/2007/nime2007_273.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176555 + doi: 10.5281/zenodo.1177041 issn: 2220-4806 - keywords: 'Gesture, interaction, embodied action, enaction, physical model, responsive - environment, interactive musical systems, affordance, interface, phenomenology, - energy, kinetics, time constant, induced ballistics, wearable computing, accelerometer, - audience participation, dynamical system, dynamic compliance, effort, wearable - instrument, augmented physicality. ' - pages: 87--90 - title: 'TGarden: Wearable Instruments and Augmented Physicality' - url: http://www.nime.org/proceedings/2003/nime2003_087.pdf - year: 2003 + keywords: 'Effortful Interaction. Haptics. Laban Analysis. Physicality. HCI. ' + pages: 273--276 + title: 'DAMPER : A Platform for Effortful Interface Development' + url: http://www.nime.org/proceedings/2007/nime2007_273.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Ventura2003 - abstract: 'We present a sensor-doll interface as a musical outlet forpersonal expression. - A doll serves the dual role of being bothan expressive agent and a playmate by - allowing solo andaccompanied performance. An internal computer and sensorsystem - allow the doll to receive input from the user and itssurroundings, and then respond - accordingly with musicalfeedback. Sets of musical timbres and melodies may bechanged - by presenting the doll with a series of themed clothhats, each suggesting a different - style of play. The doll mayperform by itself and play a number of melodies, or - it maycollaborate with the user when its limbs are squeezed or bent.Shared play - is further encouraged by a basic set of aural tonesmimicking conversation.' - address: 'Montreal, Canada' - author: 'Ventura, David and Mase, Kenji' - bibtex: "@inproceedings{Ventura2003,\n abstract = {We present a sensor-doll interface\ - \ as a musical outlet forpersonal expression. A doll serves the dual role of being\ - \ bothan expressive agent and a playmate by allowing solo andaccompanied performance.\ - \ An internal computer and sensorsystem allow the doll to receive input from the\ - \ user and itssurroundings, and then respond accordingly with musicalfeedback.\ - \ Sets of musical timbres and melodies may bechanged by presenting the doll with\ - \ a series of themed clothhats, each suggesting a different style of play. The\ - \ doll mayperform by itself and play a number of melodies, or it maycollaborate\ - \ with the user when its limbs are squeezed or bent.Shared play is further encouraged\ - \ by a basic set of aural tonesmimicking conversation.},\n address = {Montreal,\ - \ Canada},\n author = {Ventura, David and Mase, Kenji},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176569},\n issn = {2220-4806},\n\ - \ keywords = {Musical improvisation, toy interface agent, sensor doll, context\ - \ awareness. },\n pages = {91--94},\n title = {Duet Musical Companion: Improvisational\ - \ Interfaces for Children},\n url = {http://www.nime.org/proceedings/2003/nime2003_091.pdf},\n\ - \ year = {2003}\n}\n" + ID: Francois2007 + abstract: 'This paper describes the design of Mimi, a multi-modal interactive musical + improvisation system that explores the potential and powerful impact of visual + feedback in performermachine interaction. Mimi is a performer-centric tool designed + for use in performance and teaching. Its key andnovel component is its visual + interface, designed to providethe performer with instantaneous and continuous + information on the state of the system. For human improvisation,in which context + and planning are paramount, the relevantstate of the system extends to the near + future and recentpast. Mimi''s visual interface allows for a peculiar blendof + raw reflex typically associated with improvisation, andpreparation and timing + more closely affiliated with scorebased reading. Mimi is not only an effective + improvisationpartner, it has also proven itself to be an invaluable platformthrough + which to interrogate the mental models necessaryfor successful improvisation.' + address: 'New York City, NY, United States' + author: 'François, Alexandre R. and Chew, Elaine and Thurmond, Dennis' + bibtex: "@inproceedings{Francois2007,\n abstract = {This paper describes the design\ + \ of Mimi, a multi-modal interactive musical improvisation system that explores\ + \ the potential and powerful impact of visual feedback in performermachine interaction.\ + \ Mimi is a performer-centric tool designed for use in performance and teaching.\ + \ Its key andnovel component is its visual interface, designed to providethe performer\ + \ with instantaneous and continuous information on the state of the system. For\ + \ human improvisation,in which context and planning are paramount, the relevantstate\ + \ of the system extends to the near future and recentpast. Mimi's visual interface\ + \ allows for a peculiar blendof raw reflex typically associated with improvisation,\ + \ andpreparation and timing more closely affiliated with scorebased reading. Mimi\ + \ is not only an effective improvisationpartner, it has also proven itself to\ + \ be an invaluable platformthrough which to interrogate the mental models necessaryfor\ + \ successful improvisation.},\n address = {New York City, NY, United States},\n\ + \ author = {Fran\\c{c}ois, Alexandre R. and Chew, Elaine and Thurmond, Dennis},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1177091},\n issn = {2220-4806},\n\ + \ keywords = {Performer-machine interaction, visualization design, machine improvisation\ + \ },\n pages = {277--280},\n title = {Visual Feedback in Performer-Machine Interaction\ + \ for Musical Improvisation},\n url = {http://www.nime.org/proceedings/2007/nime2007_277.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176569 + doi: 10.5281/zenodo.1177091 issn: 2220-4806 - keywords: 'Musical improvisation, toy interface agent, sensor doll, context awareness. ' - pages: 91--94 - title: 'Duet Musical Companion: Improvisational Interfaces for Children' - url: http://www.nime.org/proceedings/2003/nime2003_091.pdf - year: 2003 + keywords: 'Performer-machine interaction, visualization design, machine improvisation ' + pages: 277--280 + title: Visual Feedback in Performer-Machine Interaction for Musical Improvisation + url: http://www.nime.org/proceedings/2007/nime2007_277.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Howard2003 - abstract: 'A physical modelling music synthesis system known as ‘Cymatic’ is described - that enables ‘virtual instruments’ to be controlled in real-time via a force-feedback - joystick and a force-feedback mouse. These serve to provide the user with gestural - controllers whilst in addition giving tactile feedback to the user. Cymatic virtual - instruments are set up via a graphical user interface in a manner that is highly - intuitive. Users design and play these virtual instruments by interacting directly - with their physical shape and structure in terms of the physical properties of - basic objects such as strings, membranes and solids which can be interconnected - to form complex structures. The virtual instrument can be excited at any point - mass by the following: bowing, plucking, striking, sine/square/sawtooth/random - waveform, or an external sound source. Virtual microphones can be placed at any - point masses to deliver the acoustic output. This paper describes the underlying - structure and principles upon which Cymatic is based, and illustrates its acoustic - output.' - address: 'Montreal, Canada' - author: 'Howard, David M. and Rimell, Stuart and Hunt, Andy D.' - bibtex: "@inproceedings{Howard2003,\n abstract = {A physical modelling music synthesis\ - \ system known as ‘Cymatic’ is described that enables ‘virtual instruments’ to\ - \ be controlled in real-time via a force-feedback joystick and a force-feedback\ - \ mouse. These serve to provide the user with gestural controllers whilst in addition\ - \ giving tactile feedback to the user. Cymatic virtual instruments are set up\ - \ via a graphical user interface in a manner that is highly intuitive. Users design\ - \ and play these virtual instruments by interacting directly with their physical\ - \ shape and structure in terms of the physical properties of basic objects such\ - \ as strings, membranes and solids which can be interconnected to form complex\ - \ structures. The virtual instrument can be excited at any point mass by the following:\ - \ bowing, plucking, striking, sine/square/sawtooth/random waveform, or an external\ - \ sound source. Virtual microphones can be placed at any point masses to deliver\ - \ the acoustic output. This paper describes the underlying structure and principles\ - \ upon which Cymatic is based, and illustrates its acoustic output.},\n address\ - \ = {Montreal, Canada},\n author = {Howard, David M. and Rimell, Stuart and Hunt,\ - \ Andy D.},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176515},\n\ - \ issn = {2220-4806},\n keywords = {Physical modeling, haptic controllers, gesture\ - \ control, force feedback.},\n pages = {95--98},\n title = {Force Feedback Gesture\ - \ Controlled Physical Modelling Synthesis},\n url = {http://www.nime.org/proceedings/2003/nime2003_095.pdf},\n\ - \ year = {2003}\n}\n" + ID: Poepel2007 + abstract: 'Many fascinating new developments in the area bowed stringed instruments + have been developed in recent years. However, the majority of these new applications + are either not well known, used orconsidered in a broader context by their target + users. The necessaryexchange between the world of developers and the players is + ratherlimited. A group of performers, researchers, instrument developersand composers + was founded in order to share expertise and experiences and to give each other + feedback on the work done to developnew instruments. Instruments incorporating + new interfaces, synthesis methods, sensor technology, new materials like carbon + fiber andwood composites as well as composite materials and research outcome are + presented and discussed in the group. This paper gives anintroduction to the group + and reports about activities and outcomesin the last two years.' + address: 'New York City, NY, United States' + author: 'Poepel, Cornelius and Marx, Günter' + bibtex: "@inproceedings{Poepel2007,\n abstract = {Many fascinating new developments\ + \ in the area bowed stringed instruments have been developed in recent years.\ + \ However, the majority of these new applications are either not well known, used\ + \ orconsidered in a broader context by their target users. The necessaryexchange\ + \ between the world of developers and the players is ratherlimited. A group of\ + \ performers, researchers, instrument developersand composers was founded in order\ + \ to share expertise and experiences and to give each other feedback on the work\ + \ done to developnew instruments. Instruments incorporating new interfaces, synthesis\ + \ methods, sensor technology, new materials like carbon fiber andwood composites\ + \ as well as composite materials and research outcome are presented and discussed\ + \ in the group. This paper gives anintroduction to the group and reports about\ + \ activities and outcomesin the last two years.},\n address = {New York City,\ + \ NY, United States},\n author = {Poepel, Cornelius and Marx, G\\''{u}nter},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1177221},\n issn = {2220-4806},\n\ + \ keywords = {Interdisciplinary user group, electronic bowed string instrument,\ + \ evaluation of computer based musical instruments },\n pages = {281--284},\n\ + \ title = {>hot\\_strings SIG},\n url = {http://www.nime.org/proceedings/2007/nime2007_281.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176515 + doi: 10.5281/zenodo.1177221 issn: 2220-4806 - keywords: 'Physical modeling, haptic controllers, gesture control, force feedback.' - pages: 95--98 - title: Force Feedback Gesture Controlled Physical Modelling Synthesis - url: http://www.nime.org/proceedings/2003/nime2003_095.pdf - year: 2003 + keywords: 'Interdisciplinary user group, electronic bowed string instrument, evaluation + of computer based musical instruments ' + pages: 281--284 + title: '>hot_strings SIG' + url: http://www.nime.org/proceedings/2007/nime2007_281.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Hoskinson2003 - abstract: 'We describe the design and implementation of an adaptive system to map - control parameters to modal audio synthesis parameters in real-time. The modal - parameters describe the linear response of a virtual vibrating solid, which is - played as a musical instrument by a separate interface. The system uses a three - layer feedforward backpropagation neural network which is trained by a discrete - set of input-output examples. After training, the network extends the training - set, which functions as the specification by example of the controller, to a continuous - mapping allowing the real-time morphing of synthetic sound models. We have implemented - a prototype application using a controller which collects data from a hand-drawn - digital picture. The virtual instrument consists of a bank of modal resonators - whose frequencies, dampings, and gains are the parameters we control. We train - the system by providing pictorial representations of physical objects such as - a bell or a lamp, and associate high quality modal models obtained from measurements - on real objects with these inputs. After training, the user can draw pictures - interactively and “play” modal models which provide interesting (though unrealistic) - interpolations of the models from the training set in real-time.' - address: 'Montreal, Canada' - author: 'Hoskinson, Reynald and van den Doel, Kees and Fels, Sidney S.' - bibtex: "@inproceedings{Hoskinson2003,\n abstract = {We describe the design and\ - \ implementation of an adaptive system to map control parameters to modal audio\ - \ synthesis parameters in real-time. The modal parameters describe the linear\ - \ response of a virtual vibrating solid, which is played as a musical instrument\ - \ by a separate interface. The system uses a three layer feedforward backpropagation\ - \ neural network which is trained by a discrete set of input-output examples.\ - \ After training, the network extends the training set, which functions as the\ - \ specification by example of the controller, to a continuous mapping allowing\ - \ the real-time morphing of synthetic sound models. We have implemented a prototype\ - \ application using a controller which collects data from a hand-drawn digital\ - \ picture. The virtual instrument consists of a bank of modal resonators whose\ - \ frequencies, dampings, and gains are the parameters we control. We train the\ - \ system by providing pictorial representations of physical objects such as a\ - \ bell or a lamp, and associate high quality modal models obtained from measurements\ - \ on real objects with these inputs. After training, the user can draw pictures\ - \ interactively and “play” modal models which provide interesting (though unrealistic)\ - \ interpolations of the models from the training set in real-time.},\n address\ - \ = {Montreal, Canada},\n author = {Hoskinson, Reynald and van den Doel, Kees\ - \ and Fels, Sidney S.},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n doi\ - \ = {10.5281/zenodo.1176513},\n issn = {2220-4806},\n pages = {99--103},\n title\ - \ = {Real-time Adaptive Control of Modal Synthesis},\n url = {http://www.nime.org/proceedings/2003/nime2003_099.pdf},\n\ - \ year = {2003}\n}\n" + ID: Cook2007 + abstract: 'Description of a project, inspired by the theory of affordance, exploring + the issues of visceral expression and audience engagement in the realm of computer + performance. Describes interaction design research techniques in novel application, + used to engage and gain insight into the culture and mindset of the improvising + musician. This research leads to the design and implementation of a prototype + system that allows musicians to play an object of their choice as a musical instrument.' + address: 'New York City, NY, United States' + author: 'Cook, Andrew A. and Pullin, Graham' + bibtex: "@inproceedings{Cook2007,\n abstract = {Description of a project, inspired\ + \ by the theory of affordance, exploring the issues of visceral expression and\ + \ audience engagement in the realm of computer performance. Describes interaction\ + \ design research techniques in novel application, used to engage and gain insight\ + \ into the culture and mindset of the improvising musician. This research leads\ + \ to the design and implementation of a prototype system that allows musicians\ + \ to play an object of their choice as a musical instrument.},\n address = {New\ + \ York City, NY, United States},\n author = {Cook, Andrew A. and Pullin, Graham},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1177077},\n issn = {2220-4806},\n\ + \ keywords = {1,affordance,background and problem space,cultural probes,design\ + \ research,improvisation,interaction design,nime07,performance},\n pages = {285--288},\n\ + \ title = {Tactophonics : Your Favourite Thing Wants to Sing},\n url = {http://www.nime.org/proceedings/2007/nime2007_285.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176513 + doi: 10.5281/zenodo.1177077 issn: 2220-4806 - pages: 99--103 - title: Real-time Adaptive Control of Modal Synthesis - url: http://www.nime.org/proceedings/2003/nime2003_099.pdf - year: 2003 + keywords: '1,affordance,background and problem space,cultural probes,design research,improvisation,interaction + design,nime07,performance' + pages: 285--288 + title: 'Tactophonics : Your Favourite Thing Wants to Sing' + url: http://www.nime.org/proceedings/2007/nime2007_285.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Young2003a - abstract: 'Driving a bowed string physical model using a bow controller, we explore - the potentials of using the real gestures of a violinist to simulate violin sound - using a virtual instrument. After a description of the software and hardware developed, - preliminary results and future work are discussed.' - address: 'Montreal, Canada' - author: 'Young, Diana and Serafin, Stefania' - bibtex: "@inproceedings{Young2003a,\n abstract = {Driving a bowed string physical\ - \ model using a bow controller, we explore the potentials of using the real gestures\ - \ of a violinist to simulate violin sound using a virtual instrument. After a\ - \ description of the software and hardware developed, preliminary results and\ - \ future work are discussed.},\n address = {Montreal, Canada},\n author = {Young,\ - \ Diana and Serafin, Stefania},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n\ - \ doi = {10.5281/zenodo.1176579},\n issn = {2220-4806},\n pages = {104--108},\n\ - \ title = {Playability Evaluation of a Virtual Bowed String Instrument},\n url\ - \ = {http://www.nime.org/proceedings/2003/nime2003_104.pdf},\n year = {2003}\n\ - }\n" + ID: Perez2007 + abstract: 'In this paper, we describe the composition of a piece for choir and Integral + Music Controller. We focus more on the aesthetic, conceptual, and practical aspects + of the interface and less on the technological details. We especially stress the + influence that the designed interface poses on the compositional process and how + we approach the expressive organisation of musical materials during the composition + of the piece, as well as the addition of nuances (personal real-time expression) + by the musicians at performance time. ' + address: 'New York City, NY, United States' + author: 'Pérez, Miguel A. and Knapp, Benjamin and Alcorn, Michael' + bibtex: "@inproceedings{Perez2007,\n abstract = {In this paper, we describe the\ + \ composition of a piece for choir and Integral Music Controller. We focus more\ + \ on the aesthetic, conceptual, and practical aspects of the interface and less\ + \ on the technological details. We especially stress the influence that the designed\ + \ interface poses on the compositional process and how we approach the expressive\ + \ organisation of musical materials during the composition of the piece, as well\ + \ as the addition of nuances (personal real-time expression) by the musicians\ + \ at performance time. },\n address = {New York City, NY, United States},\n author\ + \ = {P\\'{e}rez, Miguel A. and Knapp, Benjamin and Alcorn, Michael},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1177215},\n issn = {2220-4806},\n keywords\ + \ = {Composition, Integral Music Controller, Emotion measurement, Physiological\ + \ Measurement, Spatialisation. },\n pages = {289--292},\n title = {D\\'{\\i}amair\ + \ : Composing for Choir and Integral Music Controller},\n url = {http://www.nime.org/proceedings/2007/nime2007_289.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176579 + doi: 10.5281/zenodo.1177215 issn: 2220-4806 - pages: 104--108 - title: Playability Evaluation of a Virtual Bowed String Instrument - url: http://www.nime.org/proceedings/2003/nime2003_104.pdf - year: 2003 + keywords: 'Composition, Integral Music Controller, Emotion measurement, Physiological + Measurement, Spatialisation. ' + pages: 289--292 + title: 'Díamair : Composing for Choir and Integral Music Controller' + url: http://www.nime.org/proceedings/2007/nime2007_289.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Gaye2003 - abstract: 'In the project Sonic City, we have developed a system thatenables users - to create electronic music in real time by walkingthrough and interacting with - the urban environment. Weexplore the use of public space and everyday behaviours - forcreative purposes, in particular the city as an interface andmobility as an - interaction model for electronic music making.A multi-disciplinary design process - resulted in theimplementation of a wearable, context-aware prototype. Thesystem - produces music by retrieving information aboutcontext and user action and mapping - it to real-time processingof urban sounds. Potentials, constraints, and implications - ofthis type of music creation are discussed.' - address: 'Montreal, Canada' - author: 'Gaye, Lalya and Mazé, Ramia and Holmquist, Lars E.' - bibtex: "@inproceedings{Gaye2003,\n abstract = {In the project Sonic City, we have\ - \ developed a system thatenables users to create electronic music in real time\ - \ by walkingthrough and interacting with the urban environment. Weexplore the\ - \ use of public space and everyday behaviours forcreative purposes, in particular\ - \ the city as an interface andmobility as an interaction model for electronic\ - \ music making.A multi-disciplinary design process resulted in theimplementation\ - \ of a wearable, context-aware prototype. Thesystem produces music by retrieving\ - \ information aboutcontext and user action and mapping it to real-time processingof\ - \ urban sounds. Potentials, constraints, and implications ofthis type of music\ - \ creation are discussed.},\n address = {Montreal, Canada},\n author = {Gaye,\ - \ Lalya and Maz\\'{e}, Ramia and Holmquist, Lars E.},\n booktitle = {Proceedings\ + ID: Fornari2007 + abstract: 'We present an interactive sound spatialization and synthesis system based + on Interaural Time Difference (ITD) model and Evolutionary Computation. We define + a Sonic Localization Field using sound attenuation and ITD azimuth angle parameters + and, in order to control an adaptive algorithm, we used pairs of these parameters + as Spatial Sound Genotypes (SSG). They are extracted from waveforms which are + considered individuals of a Population Set. A user-interface receives input from + a generic gesture interface (such as a NIME device) and interprets them as ITD + cues. Trajectories provided by these signals are used as Target Sets of an evolutionary + algorithm. A Fitness procedure optimizes locally the distance between the Target + Set and the SSG pairs. Through a parametric score the user controls dynamic changes + in the sound output. ' + address: 'New York City, NY, United States' + author: 'Fornari, Jose and Maia, Adolfo Jr. and Manzolli, Jonatas' + bibtex: "@inproceedings{Fornari2007,\n abstract = {We present an interactive sound\ + \ spatialization and synthesis system based on Interaural Time Difference (ITD)\ + \ model and Evolutionary Computation. We define a Sonic Localization Field using\ + \ sound attenuation and ITD azimuth angle parameters and, in order to control\ + \ an adaptive algorithm, we used pairs of these parameters as Spatial Sound Genotypes\ + \ (SSG). They are extracted from waveforms which are considered individuals of\ + \ a Population Set. A user-interface receives input from a generic gesture interface\ + \ (such as a NIME device) and interprets them as ITD cues. Trajectories provided\ + \ by these signals are used as Target Sets of an evolutionary algorithm. A Fitness\ + \ procedure optimizes locally the distance between the Target Set and the SSG\ + \ pairs. Through a parametric score the user controls dynamic changes in the sound\ + \ output. },\n address = {New York City, NY, United States},\n author = {Fornari,\ + \ Jose and Maia, Adolfo Jr. and Manzolli, Jonatas},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176507},\n issn = {2220-4806},\n\ - \ keywords = {Interactive music, interaction design, urban environment, wearable\ - \ computing, context-awareness, mobility},\n pages = {109--115},\n title = {Sonic\ - \ City: The Urban Environment as a Musical Interface},\n url = {http://www.nime.org/proceedings/2003/nime2003_109.pdf},\n\ - \ year = {2003}\n}\n" + \ doi = {10.5281/zenodo.1177089},\n issn = {2220-4806},\n keywords = {interactive,\ + \ sound, spatialization, evolutionary, adaptation. },\n pages = {293--298},\n\ + \ title = {Interactive Spatialization and Sound Design using an Evolutionary System},\n\ + \ url = {http://www.nime.org/proceedings/2007/nime2007_293.pdf},\n year = {2007}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176507 + doi: 10.5281/zenodo.1177089 issn: 2220-4806 - keywords: 'Interactive music, interaction design, urban environment, wearable computing, - context-awareness, mobility' - pages: 109--115 - title: 'Sonic City: The Urban Environment as a Musical Interface' - url: http://www.nime.org/proceedings/2003/nime2003_109.pdf - year: 2003 + keywords: 'interactive, sound, spatialization, evolutionary, adaptation. ' + pages: 293--298 + title: Interactive Spatialization and Sound Design using an Evolutionary System + url: http://www.nime.org/proceedings/2007/nime2007_293.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Lyons2003 - abstract: 'The role of the face and mouth in speech production as well asnon-verbal - communication suggests the use of facial action tocontrol musical sound. Here - we document work on theMouthesizer, a system which uses a headworn miniaturecamera - and computer vision algorithm to extract shapeparameters from the mouth opening - and output these as MIDIcontrol changes. We report our experience with variousgesture-to-sound - mappings and musical applications, anddescribe a live performance which used the - Mouthesizerinterface.' - address: 'Montreal, Canada' - author: 'Lyons, Michael J. and Haehnel, Michael and Tetsutani, Nobuji' - bibtex: "@inproceedings{Lyons2003,\n abstract = {The role of the face and mouth\ - \ in speech production as well asnon-verbal communication suggests the use of\ - \ facial action tocontrol musical sound. Here we document work on theMouthesizer,\ - \ a system which uses a headworn miniaturecamera and computer vision algorithm\ - \ to extract shapeparameters from the mouth opening and output these as MIDIcontrol\ - \ changes. We report our experience with variousgesture-to-sound mappings and\ - \ musical applications, anddescribe a live performance which used the Mouthesizerinterface.},\n\ - \ address = {Montreal, Canada},\n author = {Lyons, Michael J. and Haehnel, Michael\ - \ and Tetsutani, Nobuji},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n doi\ - \ = {10.5281/zenodo.1176529},\n issn = {2220-4806},\n keywords = {Video-based\ - \ interface; mouth controller; alternative input devices. },\n pages = {116--121},\n\ - \ title = {Designing, Playing, and Performing with a Vision-based Mouth Interface},\n\ - \ url = {http://www.nime.org/proceedings/2003/nime2003_116.pdf},\n year = {2003}\n\ - }\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176529 + ID: Hornof2007 + abstract: 'In this project, eye tracking researchers and computer music composers + collaborate to create musical compositions that are played with the eyes. A commercial + eye tracker (LC Technologies Eyegaze) is connected to a music and multimedia authoring + environment (Max/MSP/Jitter). The project addresses issues of both noise and control: + How will the performance benefit from the noise inherent in eye trackers and eye + movements, and to what extent should the composition encourage the performer to + try to control a specific musical outcome? Providing one set of answers to these + two questions, the authors create an eye-controlled composition, EyeMusic v1.0, + which was selected by juries for live performance at computer music conferences.' + address: 'New York City, NY, United States' + author: 'Hornof, Anthony J. and Rogers, Troy and Halverson, Tim' + bibtex: "@inproceedings{Hornof2007,\n abstract = {In this project, eye tracking\ + \ researchers and computer music composers collaborate to create musical compositions\ + \ that are played with the eyes. A commercial eye tracker (LC Technologies Eyegaze)\ + \ is connected to a music and multimedia authoring environment (Max/MSP/Jitter).\ + \ The project addresses issues of both noise and control: How will the performance\ + \ benefit from the noise inherent in eye trackers and eye movements, and to what\ + \ extent should the composition encourage the performer to try to control a specific\ + \ musical outcome? Providing one set of answers to these two questions, the authors\ + \ create an eye-controlled composition, EyeMusic v1.0, which was selected by juries\ + \ for live performance at computer music conferences.},\n address = {New York\ + \ City, NY, United States},\n author = {Hornof, Anthony J. and Rogers, Troy and\ + \ Halverson, Tim},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177121},\n\ + \ issn = {2220-4806},\n keywords = {H.5.2 [Information Interfaces and Presentation]\ + \ User Interfaces --- input devices and strategies, interaction styles. J.5 [Arts\ + \ and Humanities] Fine arts, performing arts. },\n pages = {299--300},\n title\ + \ = {EyeMusic : Performing Live Music and Multimedia Compositions with Eye Movements},\n\ + \ url = {http://www.nime.org/proceedings/2007/nime2007_299.pdf},\n year = {2007}\n\ + }\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1177121 issn: 2220-4806 - keywords: 'Video-based interface; mouth controller; alternative input devices. ' - pages: 116--121 - title: 'Designing, Playing, and Performing with a Vision-based Mouth Interface' - url: http://www.nime.org/proceedings/2003/nime2003_116.pdf - year: 2003 + keywords: 'H.5.2 [Information Interfaces and Presentation] User Interfaces --- input + devices and strategies, interaction styles. J.5 [Arts and Humanities] Fine arts, + performing arts. ' + pages: 299--300 + title: 'EyeMusic : Performing Live Music and Multimedia Compositions with Eye Movements' + url: http://www.nime.org/proceedings/2007/nime2007_299.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Hewitt2003 - abstract: 'This paper describes work in progress for the development of a gestural - controller interface for contemporary vocal performance and electronic processing. - The paper includes a preliminary investigation of the gestures and movements of - vocalists who use microphones and microphone stands. This repertoire of gestures - forms the foundation of a well-practiced ‘language’ and social code for communication - between performers and audiences and serves as a basis for alternate controller - design principles. A prototype design, based on a modified microphone stand, is - presented along with a discussion of possible controller mapping strategies and - identification of directions for future research.' - address: 'Montreal, Canada' - author: 'Hewitt, Donna and Stevenson, Ian' - bibtex: "@inproceedings{Hewitt2003,\n abstract = {This paper describes work in progress\ - \ for the development of a gestural controller interface for contemporary vocal\ - \ performance and electronic processing. The paper includes a preliminary investigation\ - \ of the gestures and movements of vocalists who use microphones and microphone\ - \ stands. This repertoire of gestures forms the foundation of a well-practiced\ - \ ‘language’ and social code for communication between performers and audiences\ - \ and serves as a basis for alternate controller design principles. A prototype\ - \ design, based on a modified microphone stand, is presented along with a discussion\ - \ of possible controller mapping strategies and identification of directions for\ - \ future research.},\n address = {Montreal, Canada},\n author = {Hewitt, Donna\ - \ and Stevenson, Ian},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n doi\ - \ = {10.5281/zenodo.1176511},\n issn = {2220-4806},\n keywords = {Alternate controller,\ - \ gesture, microphone technique, vocal performance, performance interface, electronic\ - \ music. },\n pages = {122--128},\n title = {E-mic: Extended Mic-stand Interface\ - \ Controller},\n url = {http://www.nime.org/proceedings/2003/nime2003_122.pdf},\n\ - \ year = {2003}\n}\n" + ID: Kirk2007 + abstract: 'The FrankenPipe project is an attempt to convert a traditionalHighland + Bagpipe into a controller capable of driving both realtime synthesis on a laptop + as well as a radio-controlled (RC) car.Doing so engages musical creativity while + enabling novel, oftenhumorous, performance art. The chanter is outfitted withphotoresistors + (CdS photoconductive cells) underneath each hole,allowing a full range of MIDI + values to be produced with eachfinger and giving the player a natural feel. An + air-pressure sensoris also deployed in the bag to provide another element of controlwhile + capturing a fundamental element of bagpipe performance.The final product navigates + the realm of both musical instrumentand toy, allowing the performer to create + a novel yet richperformance experience for the audience.' + address: 'New York City, NY, United States' + author: 'Kirk, Turner and Leider, Colby' + bibtex: "@inproceedings{Kirk2007,\n abstract = {The FrankenPipe project is an attempt\ + \ to convert a traditionalHighland Bagpipe into a controller capable of driving\ + \ both realtime synthesis on a laptop as well as a radio-controlled (RC) car.Doing\ + \ so engages musical creativity while enabling novel, oftenhumorous, performance\ + \ art. The chanter is outfitted withphotoresistors (CdS photoconductive cells)\ + \ underneath each hole,allowing a full range of MIDI values to be produced with\ + \ eachfinger and giving the player a natural feel. An air-pressure sensoris also\ + \ deployed in the bag to provide another element of controlwhile capturing a fundamental\ + \ element of bagpipe performance.The final product navigates the realm of both\ + \ musical instrumentand toy, allowing the performer to create a novel yet richperformance\ + \ experience for the audience.},\n address = {New York City, NY, United States},\n\ + \ author = {Kirk, Turner and Leider, Colby},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1177151},\n issn = {2220-4806},\n keywords = {FrankenPipe, alternate\ + \ controller, MIDI, bagpipe, photoresistor, chanter. },\n pages = {301--304},\n\ + \ title = {The FrankenPipe : A Novel Bagpipe Controller},\n url = {http://www.nime.org/proceedings/2007/nime2007_301.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176511 + doi: 10.5281/zenodo.1177151 issn: 2220-4806 - keywords: 'Alternate controller, gesture, microphone technique, vocal performance, - performance interface, electronic music. ' - pages: 122--128 - title: 'E-mic: Extended Mic-stand Interface Controller' - url: http://www.nime.org/proceedings/2003/nime2003_122.pdf - year: 2003 + keywords: 'FrankenPipe, alternate controller, MIDI, bagpipe, photoresistor, chanter. ' + pages: 301--304 + title: 'The FrankenPipe : A Novel Bagpipe Controller' + url: http://www.nime.org/proceedings/2007/nime2007_301.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Blaine2003 - abstract: 'We explore a variety of design criteria applicable to thecreation of - collaborative interfaces for musical experience. Themain factor common to the - design of most collaborativeinterfaces for novices is that musical control is - highlyrestricted, which makes it possible to easily learn andparticipate in the - collective experience. Balancing this tradeoff is a key concern for designers, - as this happens at theexpense of providing an upward path to virtuosity with theinterface. - We attempt to identify design considerationsexemplified by a sampling of recent - collaborative devicesprimarily oriented toward novice interplay. It is our intentionto - provide a non-technical overview of design issues inherentin configuring multiplayer - experiences, particularly for entrylevel players.' - address: 'Montreal, Canada' - author: 'Blaine, Tina and Fels, Sidney S.' - bibtex: "@inproceedings{Blaine2003,\n abstract = {We explore a variety of design\ - \ criteria applicable to thecreation of collaborative interfaces for musical experience.\ - \ Themain factor common to the design of most collaborativeinterfaces for novices\ - \ is that musical control is highlyrestricted, which makes it possible to easily\ - \ learn andparticipate in the collective experience. Balancing this tradeoff is\ - \ a key concern for designers, as this happens at theexpense of providing an upward\ - \ path to virtuosity with theinterface. We attempt to identify design considerationsexemplified\ - \ by a sampling of recent collaborative devicesprimarily oriented toward novice\ - \ interplay. It is our intentionto provide a non-technical overview of design\ - \ issues inherentin configuring multiplayer experiences, particularly for entrylevel\ - \ players.},\n address = {Montreal, Canada},\n author = {Blaine, Tina and Fels,\ - \ Sidney S.},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176490},\n\ - \ issn = {2220-4806},\n keywords = {Design, collaborative interface, musical experience,\ - \ multiplayer, novice, musical control. },\n pages = {129--134},\n title = {Contexts\ - \ of Collaborative Musical Experiences},\n url = {http://www.nime.org/proceedings/2003/nime2003_129.pdf},\n\ - \ year = {2003}\n}\n" + ID: Camurri2007a + abstract: 'EyesWeb XMI (for eXtended Multimodal Interaction) is the new version + of the well-known EyesWeb platform. It has a main focus on multimodality and the + main design target of this new release has been to improve the ability to process + and correlate several streams of data. It has been used extensively to build a + set of interactive systems for performing arts applications for Festival della + Scienza 2006, Genoa, Italy. The purpose of this paper is to describe the developed + installations as well as the new EyesWeb features that helped in their development.' + address: 'New York City, NY, United States' + author: 'Camurri, Antonio and Coletta, Paolo and Varni, Giovanna and Ghisio, Simone' + bibtex: "@inproceedings{Camurri2007a,\n abstract = {EyesWeb XMI (for eXtended Multimodal\ + \ Interaction) is the new version of the well-known EyesWeb platform. It has a\ + \ main focus on multimodality and the main design target of this new release has\ + \ been to improve the ability to process and correlate several streams of data.\ + \ It has been used extensively to build a set of interactive systems for performing\ + \ arts applications for Festival della Scienza 2006, Genoa, Italy. The purpose\ + \ of this paper is to describe the developed installations as well as the new\ + \ EyesWeb features that helped in their development.},\n address = {New York City,\ + \ NY, United States},\n author = {Camurri, Antonio and Coletta, Paolo and Varni,\ + \ Giovanna and Ghisio, Simone},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177061},\n\ + \ issn = {2220-4806},\n keywords = {EyesWeb, multimodal interactive systems, performing\ + \ arts. },\n pages = {305--308},\n title = {Developing Multimodal Interactive\ + \ Systems with EyesWeb XMI},\n url = {http://www.nime.org/proceedings/2007/nime2007_305.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176490 + doi: 10.5281/zenodo.1177061 issn: 2220-4806 - keywords: 'Design, collaborative interface, musical experience, multiplayer, novice, - musical control. ' - pages: 129--134 - title: Contexts of Collaborative Musical Experiences - url: http://www.nime.org/proceedings/2003/nime2003_129.pdf - year: 2003 + keywords: 'EyesWeb, multimodal interactive systems, performing arts. ' + pages: 305--308 + title: Developing Multimodal Interactive Systems with EyesWeb XMI + url: http://www.nime.org/proceedings/2007/nime2007_305.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Hunt2003 - abstract: 'MidiGrid is a computer-based musical instrument, primarilycontrolled - with the computer mouse, which allows liveperformance of MIDI-based musical material - by mapping 2dimensional position onto musical events. Since itsinvention in 1987, - it has gained a small, but enthusiastic,band of users, and has become the primary - instrument forseveral people with physical disabilities. This paper reviewsits - development, uses and user interface issues, and highlightsthe work currently - in progress for its transformation intoMediaGrid.' - address: 'Montreal, Canada' - author: 'Hunt, Andy D. and Kirk, Ross' - bibtex: "@inproceedings{Hunt2003,\n abstract = {MidiGrid is a computer-based musical\ - \ instrument, primarilycontrolled with the computer mouse, which allows liveperformance\ - \ of MIDI-based musical material by mapping 2dimensional position onto musical\ - \ events. Since itsinvention in 1987, it has gained a small, but enthusiastic,band\ - \ of users, and has become the primary instrument forseveral people with physical\ - \ disabilities. This paper reviewsits development, uses and user interface issues,\ - \ and highlightsthe work currently in progress for its transformation intoMediaGrid.},\n\ - \ address = {Montreal, Canada},\n author = {Hunt, Andy D. and Kirk, Ross},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176517},\n\ - \ issn = {2220-4806},\n keywords = {Live performance, Computer-based musical instruments,\ - \ Human Computer Interaction for Music},\n pages = {135--139},\n title = {MidiGrid:\ - \ Past, Present and Future},\n url = {http://www.nime.org/proceedings/2003/nime2003_135.pdf},\n\ - \ year = {2003}\n}\n" + ID: Hoffman2007 + abstract: 'A crucial set of decisions in digital musical instrument design deals + with choosing mappings between parameters controlled by the performer and the + synthesis algorithms that actually generate sound. Feature-based synthesis offers + a way to parameterize audio synthesis in terms of the quantifiable perceptual + characteristics, or features, the performer wishes the sound to take on. Techniques + for accomplishing such mappings and enabling feature-based synthesis to be performed + in real time are discussed. An example is given of how a real-time performance + system might be designed to take advantage of feature-based synthesis''s ability + to provide perceptually meaningful control over a large number of synthesis parameters. ' + address: 'New York City, NY, United States' + author: 'Hoffman, Matt and Cook, Perry R.' + bibtex: "@inproceedings{Hoffman2007,\n abstract = {A crucial set of decisions in\ + \ digital musical instrument design deals with choosing mappings between parameters\ + \ controlled by the performer and the synthesis algorithms that actually generate\ + \ sound. Feature-based synthesis offers a way to parameterize audio synthesis\ + \ in terms of the quantifiable perceptual characteristics, or features, the performer\ + \ wishes the sound to take on. Techniques for accomplishing such mappings and\ + \ enabling feature-based synthesis to be performed in real time are discussed.\ + \ An example is given of how a real-time performance system might be designed\ + \ to take advantage of feature-based synthesis's ability to provide perceptually\ + \ meaningful control over a large number of synthesis parameters. },\n address\ + \ = {New York City, NY, United States},\n author = {Hoffman, Matt and Cook, Perry\ + \ R.},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1177117},\n issn = {2220-4806},\n\ + \ keywords = {Feature, Synthesis, Analysis, Mapping, Real-time. },\n pages = {309--312},\n\ + \ title = {Real-Time Feature-Based Synthesis for Live Musical Performance},\n\ + \ url = {http://www.nime.org/proceedings/2007/nime2007_309.pdf},\n year = {2007}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176517 + doi: 10.5281/zenodo.1177117 issn: 2220-4806 - keywords: 'Live performance, Computer-based musical instruments, Human Computer - Interaction for Music' - pages: 135--139 - title: 'MidiGrid: Past, Present and Future' - url: http://www.nime.org/proceedings/2003/nime2003_135.pdf - year: 2003 + keywords: 'Feature, Synthesis, Analysis, Mapping, Real-time. ' + pages: 309--312 + title: Real-Time Feature-Based Synthesis for Live Musical Performance + url: http://www.nime.org/proceedings/2007/nime2007_309.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Kessous2003 - abstract: 'This paper presents a study of bimanual control applied tosound synthesis. - This study deals with coordination,cooperation, and abilities of our hands in - musical context. Wedescribe examples of instruments made using subtractivesynthesis, - scanned synthesis in Max/MSP and commercialstand-alone software synthesizers via - MIDI communicationprotocol. These instruments have been designed according to - amulti-layer-mapping model, which provides modular design.They have been used - in concerts and performanceconsiderations are discussed too.' - address: 'Montreal, Canada' - author: 'Kessous, Loïc and Arfib, Daniel' - bibtex: "@inproceedings{Kessous2003,\n abstract = {This paper presents a study of\ - \ bimanual control applied tosound synthesis. This study deals with coordination,cooperation,\ - \ and abilities of our hands in musical context. Wedescribe examples of instruments\ - \ made using subtractivesynthesis, scanned synthesis in Max/MSP and commercialstand-alone\ - \ software synthesizers via MIDI communicationprotocol. These instruments have\ - \ been designed according to amulti-layer-mapping model, which provides modular\ - \ design.They have been used in concerts and performanceconsiderations are discussed\ - \ too.},\n address = {Montreal, Canada},\n author = {Kessous, Lo\\\"{i}c and Arfib,\ - \ Daniel},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176523},\n\ - \ issn = {2220-4806},\n keywords = {Gesture control, mapping, alternate controllers,\ - \ musical instruments. },\n pages = {140--145},\n title = {Bimanuality in Alternate\ - \ Musical Instruments},\n url = {http://www.nime.org/proceedings/2003/nime2003_140.pdf},\n\ - \ year = {2003}\n}\n" + ID: Hashida2007 + abstract: 'This paper introduces jPop-E (java-based PolyPhrase Ensemble), an assistant + system for the Pop-E performancerendering system. Using this assistant system, + MIDI dataincluding expressive tempo changes or velocity control canbe created + based on the user''s musical intention. Pop-E(PolyPhrase Ensemble) is one of the + few machine systemsdevoted to creating expressive musical performances thatcan + deal with the structure of polyphonic music and theuser''s interpretation of the + music. A well-designed graphical user interface is required to make full use of + the potential ability of Pop-E. In this paper, we discuss the necessaryelements + of the user interface for Pop-E, and describe theimplemented system, jPop-E.' + address: 'New York City, NY, United States' + author: 'Hashida, Mitsuyo and Nagata, Noriko and Katayose, Haruhiro' + bibtex: "@inproceedings{Hashida2007,\n abstract = {This paper introduces jPop-E\ + \ (java-based PolyPhrase Ensemble), an assistant system for the Pop-E performancerendering\ + \ system. Using this assistant system, MIDI dataincluding expressive tempo changes\ + \ or velocity control canbe created based on the user's musical intention. Pop-E(PolyPhrase\ + \ Ensemble) is one of the few machine systemsdevoted to creating expressive musical\ + \ performances thatcan deal with the structure of polyphonic music and theuser's\ + \ interpretation of the music. A well-designed graphical user interface is required\ + \ to make full use of the potential ability of Pop-E. In this paper, we discuss\ + \ the necessaryelements of the user interface for Pop-E, and describe theimplemented\ + \ system, jPop-E.},\n address = {New York City, NY, United States},\n author =\ + \ {Hashida, Mitsuyo and Nagata, Noriko and Katayose, Haruhiro},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177111},\n issn = {2220-4806},\n keywords = {Performance\ + \ Rendering, User Interface, Ensemble Music Ex- pression },\n pages = {313--316},\n\ + \ title = {jPop-E : An Assistant System for Performance Rendering of Ensemble\ + \ Music},\n url = {http://www.nime.org/proceedings/2007/nime2007_313.pdf},\n year\ + \ = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176523 + doi: 10.5281/zenodo.1177111 issn: 2220-4806 - keywords: 'Gesture control, mapping, alternate controllers, musical instruments. ' - pages: 140--145 - title: Bimanuality in Alternate Musical Instruments - url: http://www.nime.org/proceedings/2003/nime2003_140.pdf - year: 2003 + keywords: 'Performance Rendering, User Interface, Ensemble Music Ex- pression ' + pages: 313--316 + title: 'jPop-E : An Assistant System for Performance Rendering of Ensemble Music' + url: http://www.nime.org/proceedings/2007/nime2007_313.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Modler2003 - abstract: 'This paper describes the implementation of Time Delay NeuralNetworks - (TDNN) to recognize gestures from video images.Video sources are used because - they are non-invasive and do notinhibit performer''s physical movement or require - specialistdevices to be attached to the performer which experience hasshown to - be a significant problem that impacts musiciansperformance and can focus musical - rehearsals and performancesupon technical rather than musical concerns (Myatt - 2003).We describe a set of hand gestures learned by an artificial neuralnetwork - to control musical parameters expressively in real time.The set is made up of - different types of gestures in order toinvestigate:-aspects of the recognition - process-expressive musical control-schemes of parameter mapping-generalization - issues for an extended set for musicalcontrolThe learning procedure of the Neural - Network is describedwhich is based on variations by affine transformations of - imagesequences of the hand gestures.The whole application including the gesture - capturing isimplemented in jMax to achieve real time conditions and easyintegration - into a musical environment to realize differentmappings and routings of the control - stream.The system represents a practice-based research using actualmusic models - like compositions and processes of compositionwhich will follow the work described - in the paper.' - address: 'Montreal, Canada' - author: 'Modler, Paul and Myatt, Tony and Saup, Michael' - bibtex: "@inproceedings{Modler2003,\n abstract = {This paper describes the implementation\ - \ of Time Delay NeuralNetworks (TDNN) to recognize gestures from video images.Video\ - \ sources are used because they are non-invasive and do notinhibit performer's\ - \ physical movement or require specialistdevices to be attached to the performer\ - \ which experience hasshown to be a significant problem that impacts musiciansperformance\ - \ and can focus musical rehearsals and performancesupon technical rather than\ - \ musical concerns (Myatt 2003).We describe a set of hand gestures learned by\ - \ an artificial neuralnetwork to control musical parameters expressively in real\ - \ time.The set is made up of different types of gestures in order toinvestigate:-aspects\ - \ of the recognition process-expressive musical control-schemes of parameter mapping-generalization\ - \ issues for an extended set for musicalcontrolThe learning procedure of the Neural\ - \ Network is describedwhich is based on variations by affine transformations of\ - \ imagesequences of the hand gestures.The whole application including the gesture\ - \ capturing isimplemented in jMax to achieve real time conditions and easyintegration\ - \ into a musical environment to realize differentmappings and routings of the\ - \ control stream.The system represents a practice-based research using actualmusic\ - \ models like compositions and processes of compositionwhich will follow the work\ - \ described in the paper.},\n address = {Montreal, Canada},\n author = {Modler,\ - \ Paul and Myatt, Tony and Saup, Michael},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n\ - \ doi = {10.5281/zenodo.1176533},\n issn = {2220-4806},\n keywords = {Gesture\ - \ Recognition, Artificial Neural Network, Expressive Control, Real-time Interaction\ - \ },\n pages = {146--150},\n title = {An Experimental Set of Hand Gestures for\ - \ Expressive Control of Musical Parameters in Realtime},\n url = {http://www.nime.org/proceedings/2003/nime2003_146.pdf},\n\ - \ year = {2003}\n}\n" + ID: Sarkar2007 + abstract: 'Playing music over the Internet, whether for real-time jamming, network + performance or distance education, is constrained by the speed of light which + introduces, over long distances, time delays unsuitable for musical applications. + Current musical collaboration systems generally transmit compressed audio streams + over low-latency and high-bandwidthnetworks to optimize musician synchronization. + This paperproposes an alternative approach based on pattern recognition and music + prediction. Trained for a particular typeof music, here the Indian tabla drum, + the system calledTablaNet identifies rhythmic patterns by recognizing individual + strokes played by a musician and mapping them dynamically to known musical constructs. + Symbols representing these musical structures are sent over the network toa corresponding + computer system. The computer at thereceiving end anticipates incoming events + by analyzing previous phrases and synthesizes an estimated audio output.Although + such a system may introduce variants due to prediction approximations, resulting + in a slightly different musical experience at both ends, we find that it demonstratesa + high level of playability with an immediacy not present inother systems, and functions + well as an educational tool.' + address: 'New York City, NY, United States' + author: 'Sarkar, Mihir and Vercoe, Barry' + bibtex: "@inproceedings{Sarkar2007,\n abstract = {Playing music over the Internet,\ + \ whether for real-time jamming, network performance or distance education, is\ + \ constrained by the speed of light which introduces, over long distances, time\ + \ delays unsuitable for musical applications. Current musical collaboration systems\ + \ generally transmit compressed audio streams over low-latency and high-bandwidthnetworks\ + \ to optimize musician synchronization. This paperproposes an alternative approach\ + \ based on pattern recognition and music prediction. Trained for a particular\ + \ typeof music, here the Indian tabla drum, the system calledTablaNet identifies\ + \ rhythmic patterns by recognizing individual strokes played by a musician and\ + \ mapping them dynamically to known musical constructs. Symbols representing these\ + \ musical structures are sent over the network toa corresponding computer system.\ + \ The computer at thereceiving end anticipates incoming events by analyzing previous\ + \ phrases and synthesizes an estimated audio output.Although such a system may\ + \ introduce variants due to prediction approximations, resulting in a slightly\ + \ different musical experience at both ends, we find that it demonstratesa high\ + \ level of playability with an immediacy not present inother systems, and functions\ + \ well as an educational tool.},\n address = {New York City, NY, United States},\n\ + \ author = {Sarkar, Mihir and Vercoe, Barry},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1177239},\n issn = {2220-4806},\n keywords = {network music\ + \ performance, real-time online musical collab- oration, Indian percussions, tabla\ + \ bols, strokes recognition, music prediction },\n pages = {317--320},\n title\ + \ = {Recognition and Prediction in a Network Music Performance System for {India}n\ + \ Percussion},\n url = {http://www.nime.org/proceedings/2007/nime2007_317.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176533 + doi: 10.5281/zenodo.1177239 issn: 2220-4806 - keywords: 'Gesture Recognition, Artificial Neural Network, Expressive Control, Real-time - Interaction ' - pages: 146--150 - title: An Experimental Set of Hand Gestures for Expressive Control of Musical Parameters - in Realtime - url: http://www.nime.org/proceedings/2003/nime2003_146.pdf - year: 2003 + keywords: 'network music performance, real-time online musical collab- oration, + Indian percussions, tabla bols, strokes recognition, music prediction ' + pages: 317--320 + title: Recognition and Prediction in a Network Music Performance System for Indian + Percussion + url: http://www.nime.org/proceedings/2007/nime2007_317.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Nakra2003 - abstract: 'This paper describes the artistic projects undertaken at ImmersionMusic, - Inc. (www.immersionmusic.org) during its three-yearexistence. We detail work in - interactive performance systems,computer-based training systems, and concert production.' - address: 'Montreal, Canada' - author: 'Nakra, Teresa M.' - bibtex: "@inproceedings{Nakra2003,\n abstract = {This paper describes the artistic\ - \ projects undertaken at ImmersionMusic, Inc. (www.immersionmusic.org) during\ - \ its three-yearexistence. We detail work in interactive performance systems,computer-based\ - \ training systems, and concert production.},\n address = {Montreal, Canada},\n\ - \ author = {Nakra, Teresa M.},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n\ - \ doi = {10.5281/zenodo.1176541},\n issn = {2220-4806},\n keywords = {Interactive\ - \ computer music systems, gestural interaction, Conductor's Jacket, Digital Baton\ - \ },\n pages = {151--152},\n title = {Immersion Music: a Progress Report},\n url\ - \ = {http://www.nime.org/proceedings/2003/nime2003_151.pdf},\n year = {2003}\n\ - }\n" + ID: Vigoda2007 + abstract: 'JamiOki-PureJoy is a novel electronically mediated musical performance + system. PureJoy is a musical instrument; A highly flexible looper, sampler, effects + processor and sound manipulation interface based on Pure Data, with input from + a joystick controller and headset microphone. PureJoy allows the player to essentially + sculpt their voice with their hands. JamiOki is an engine for running group-player + musical game pieces. JamiOki helps each player by ‘whispering instructions’ in + their ear. Players track and control their progress through the game using a graphical + display and a touch-sensitive footpad. JamiOki is an architecture for bringing + groups of players together to express themselves musically in a way that is both + spontaneous and formally satisfying. The flexibility of the PureJoy instrument + offers to JamiOki the ability for any player to play any requested role in the + music at any time. The musical structure provided by JamiOki helps PureJoy players + create more complex pieces of music on the fly with spontaneous sounds, silences, + themes, recapitulation, tight transitions, structural hierarchy, interesting interactions, + and even friendly competition. As a combined system JamiOki-PureJoy is exciting + and fun to play.' + address: 'New York City, NY, United States' + author: 'Vigoda, Benjamin and Merrill, David' + bibtex: "@inproceedings{Vigoda2007,\n abstract = {JamiOki-PureJoy is a novel electronically\ + \ mediated musical performance system. PureJoy is a musical instrument; A highly\ + \ flexible looper, sampler, effects processor and sound manipulation interface\ + \ based on Pure Data, with input from a joystick controller and headset microphone.\ + \ PureJoy allows the player to essentially sculpt their voice with their hands.\ + \ JamiOki is an engine for running group-player musical game pieces. JamiOki helps\ + \ each player by ‘whispering instructions’ in their ear. Players track and control\ + \ their progress through the game using a graphical display and a touch-sensitive\ + \ footpad. JamiOki is an architecture for bringing groups of players together\ + \ to express themselves musically in a way that is both spontaneous and formally\ + \ satisfying. The flexibility of the PureJoy instrument offers to JamiOki the\ + \ ability for any player to play any requested role in the music at any time.\ + \ The musical structure provided by JamiOki helps PureJoy players create more\ + \ complex pieces of music on the fly with spontaneous sounds, silences, themes,\ + \ recapitulation, tight transitions, structural hierarchy, interesting interactions,\ + \ and even friendly competition. As a combined system JamiOki-PureJoy is exciting\ + \ and fun to play.},\n address = {New York City, NY, United States},\n author\ + \ = {Vigoda, Benjamin and Merrill, David},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179473},\n\ + \ issn = {2220-4806},\n keywords = {JamiOki, PureJoy, collaborative performance,\ + \ structured im- provisation, electronically-mediated performance, found sound\ + \ },\n pages = {321--326},\n title = {JamiOki-PureJoy : A Game Engine and Instrument\ + \ for Electronically-Mediated Musical Improvisation},\n url = {http://www.nime.org/proceedings/2007/nime2007_321.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176541 + doi: 10.5281/zenodo.1179473 issn: 2220-4806 - keywords: 'Interactive computer music systems, gestural interaction, Conductor''s - Jacket, Digital Baton ' - pages: 151--152 - title: 'Immersion Music: a Progress Report' - url: http://www.nime.org/proceedings/2003/nime2003_151.pdf - year: 2003 + keywords: 'JamiOki, PureJoy, collaborative performance, structured im- provisation, + electronically-mediated performance, found sound ' + pages: 321--326 + title: 'JamiOki-PureJoy : A Game Engine and Instrument for Electronically-Mediated + Musical Improvisation' + url: http://www.nime.org/proceedings/2007/nime2007_321.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Wright2003 - abstract: 'OpenSound Control (“OSC”) is a protocol for communication among computers, - sound synthesizers, and other multimedia devices that is optimized for modern - networking technology. OSC has achieved wide use in the field of computer-based - new interfaces for musical expression for wide-area and local-area networked distributed - music systems, inter-process communication, and even within a single application.' - address: 'Montreal, Canada' - author: 'Wright, Matthew and Freed, Adrian and Momeni, Ali' - bibtex: "@inproceedings{Wright2003,\n abstract = {OpenSound Control (“OSC”) is a\ - \ protocol for communication among computers, sound synthesizers, and other multimedia\ - \ devices that is optimized for modern networking technology. OSC has achieved\ - \ wide use in the field of computer-based new interfaces for musical expression\ - \ for wide-area and local-area networked distributed music systems, inter-process\ - \ communication, and even within a single application.},\n address = {Montreal,\ - \ Canada},\n author = {Wright, Matthew and Freed, Adrian and Momeni, Ali},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176575},\n\ - \ issn = {2220-4806},\n keywords = {OpenSound Control, Networking, client/server\ - \ communication},\n pages = {153--159},\n title = {OpenSound Control: State of\ - \ the Art 2003},\n url = {http://www.nime.org/proceedings/2003/nime2003_153.pdf},\n\ - \ year = {2003}\n}\n" + ID: Gomez2007 + abstract: In this article we want to show how graphical languages can be used successfully + for monitoring and controlling a digital musical instrument. An overview of the + design and development stages of this instrument shows how we can create models + which will simplify the control and use of different kinds of musical algorithms + for synthesis and sequencing. + address: 'New York City, NY, United States' + author: 'Gómez, Daniel and Donner, Tjebbe and Posada, Andrés' + bibtex: "@inproceedings{Gomez2007,\n abstract = {In this article we want to show\ + \ how graphical languages can be used successfully for monitoring and controlling\ + \ a digital musical instrument. An overview of the design and development stages\ + \ of this instrument shows how we can create models which will simplify the control\ + \ and use of different kinds of musical algorithms for synthesis and sequencing.},\n\ + \ address = {New York City, NY, United States},\n author = {G\\'{o}mez, Daniel\ + \ and Donner, Tjebbe and Posada, Andr\\'{e}s},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1177097},\n issn = {2220-4806},\n keywords = {nime07},\n pages\ + \ = {327--329},\n title = {A Look at the Design and Creation of a Graphically\ + \ Controlled Digital Musical Instrument},\n url = {http://www.nime.org/proceedings/2007/nime2007_327.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176575 + doi: 10.5281/zenodo.1177097 issn: 2220-4806 - keywords: 'OpenSound Control, Networking, client/server communication' - pages: 153--159 - title: 'OpenSound Control: State of the Art 2003' - url: http://www.nime.org/proceedings/2003/nime2003_153.pdf - year: 2003 + keywords: nime07 + pages: 327--329 + title: A Look at the Design and Creation of a Graphically Controlled Digital Musical + Instrument + url: http://www.nime.org/proceedings/2007/nime2007_327.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Dobrian2003 - abstract: 'This article reports on a project that uses unfettered gestural motion - for expressive musical purposes. The project involves the development of, and - experimentation with, software to receive data from a Vicon motion capture system, - and to translate and map that data into data for the control of music and other - media such as lighting. In addition to the commercially standard MIDI-which allows - direct control of external synthesizers, processors, and other devices-other mappings - are used for direct software control of digital audio and video. This report describes - the design and implementation of the software, discusses specific experiments - performed with it, and evaluates its application in terms of aesthetic pros and - cons.' - address: 'Montreal, Canada' - author: 'Dobrian, Christopher and Bevilacqua, Frédéric' - bibtex: "@inproceedings{Dobrian2003,\n abstract = {This article reports on a project\ - \ that uses unfettered gestural motion for expressive musical purposes. The project\ - \ involves the development of, and experimentation with, software to receive data\ - \ from a Vicon motion capture system, and to translate and map that data into\ - \ data for the control of music and other media such as lighting. In addition\ - \ to the commercially standard MIDI-which allows direct control of external synthesizers,\ - \ processors, and other devices-other mappings are used for direct software control\ - \ of digital audio and video. This report describes the design and implementation\ - \ of the software, discusses specific experiments performed with it, and evaluates\ - \ its application in terms of aesthetic pros and cons.},\n address = {Montreal,\ - \ Canada},\n author = {Dobrian, Christopher and Bevilacqua, Fr\\'{e}d\\'{e}ric},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176503},\n\ - \ issn = {2220-4806},\n keywords = {Motion capture, gestural control, mapping.\ - \ },\n pages = {161--163},\n title = {Gestural Control of Music Using the Vicon\ - \ 8 Motion Capture System},\n url = {http://www.nime.org/proceedings/2003/nime2003_161.pdf},\n\ - \ year = {2003}\n}\n" + ID: Vanegas2007 + abstract: 'The guitar pick has traditionally been used to strike or rakethe strings + of a guitar or bass, and in rarer instances, ashamisen, lute, or other stringed + instrument. The pressure exerted on it, however, has until now been ignored.The + MIDI Pick, an enhanced guitar pick, embraces this dimension, acting as a trigger + for serial data, audio samples,MIDI messages 1, Max/MSP patches, and on/off messages.This + added scope expands greatly the stringed instrumentplayer''s musical dynamic in + the studio or on stage.' + address: 'New York City, NY, United States' + author: 'Vanegas, Roy' + bibtex: "@inproceedings{Vanegas2007,\n abstract = {The guitar pick has traditionally\ + \ been used to strike or rakethe strings of a guitar or bass, and in rarer instances,\ + \ ashamisen, lute, or other stringed instrument. The pressure exerted on it, however,\ + \ has until now been ignored.The MIDI Pick, an enhanced guitar pick, embraces\ + \ this dimension, acting as a trigger for serial data, audio samples,MIDI messages\ + \ 1, Max/MSP patches, and on/off messages.This added scope expands greatly the\ + \ stringed instrumentplayer's musical dynamic in the studio or on stage.},\n address\ + \ = {New York City, NY, United States},\n author = {Vanegas, Roy},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1179471},\n issn = {2220-4806},\n keywords\ + \ = {guitar, MIDI, pick, plectrum, wireless, bluetooth, ZigBee, Arduino, NIME,\ + \ ITP },\n pages = {330--333},\n title = {The {MIDI} Pick : Trigger Serial Data\ + \ , Samples, and {MIDI} from a Guitar Pick},\n url = {http://www.nime.org/proceedings/2007/nime2007_330.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176503 + doi: 10.5281/zenodo.1179471 issn: 2220-4806 - keywords: 'Motion capture, gestural control, mapping. ' - pages: 161--163 - title: Gestural Control of Music Using the Vicon 8 Motion Capture System - url: http://www.nime.org/proceedings/2003/nime2003_161.pdf - year: 2003 + keywords: 'guitar, MIDI, pick, plectrum, wireless, bluetooth, ZigBee, Arduino, NIME, + ITP ' + pages: 330--333 + title: 'The MIDI Pick : Trigger Serial Data , Samples, and MIDI from a Guitar Pick' + url: http://www.nime.org/proceedings/2007/nime2007_330.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Nishimoto2003 - abstract: 'In this paper, we discuss a design principle for the musical instruments - that are useful for both novices and professional musicians and that facilitate - musically rich expression. We believe that the versatility of conventional musical - instruments causes difficulty in performance. By dynamically specializing a musical - instrument for performing a specific (genre of) piece, the musical instrument - could become more useful for performing the piece and facilitates expressive performance. - Based on this idea, we developed two new types of musical instruments, i.e., a - "given-melody-based musical instrument" and a "harmonic-function-based musical - instrument". From the experimental results using two prototypes, we demonstrate - the efficiency of the design principle.' - address: 'Montreal, Canada' - author: 'Nishimoto, Kazushi and Oshima, Chika and Miyagawa, Yohei' - bibtex: "@inproceedings{Nishimoto2003,\n abstract = {In this paper, we discuss a\ - \ design principle for the musical instruments that are useful for both novices\ - \ and professional musicians and that facilitate musically rich expression. We\ - \ believe that the versatility of conventional musical instruments causes difficulty\ - \ in performance. By dynamically specializing a musical instrument for performing\ - \ a specific (genre of) piece, the musical instrument could become more useful\ - \ for performing the piece and facilitates expressive performance. Based on this\ - \ idea, we developed two new types of musical instruments, i.e., a \"given-melody-based\ - \ musical instrument\" and a \"harmonic-function-based musical instrument\". From\ - \ the experimental results using two prototypes, we demonstrate the efficiency\ - \ of the design principle.},\n address = {Montreal, Canada},\n author = {Nishimoto,\ - \ Kazushi and Oshima, Chika and Miyagawa, Yohei},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n date\ - \ = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176545},\n issn = {2220-4806},\n\ - \ keywords = {Musical instruments, expression, design principle, degree of freedom,\ - \ dynamic specialization},\n pages = {164--169},\n title = {Why Always Versatile?\ - \ Dynamically Customizable Musical Instruments Facilitate Expressive Performances},\n\ - \ url = {http://www.nime.org/proceedings/2003/nime2003_164.pdf},\n year = {2003}\n\ - }\n" + ID: Benning2007 + abstract: 'This paper describes the design and experimentation of a Kalman Filter + used to improve position tracking of a 3-D gesture-based musical controller known + as the Radiodrum. The Singer dynamic model for target tracking is used to describe + the evolution of a Radiodrum''s stick position in time. The autocorrelation time + constant of a gesture''s acceleration and the variance of the gesture acceleration + are used to tune the model to various performance modes. Multiple Kalman Filters + tuned to each gesture type are run in parallel and an Interacting Multiple Model + (IMM) is implemented to decide on the best combination of filter outputs to track + the current gesture. Our goal is to accurately track Radiodrum gestures through + noisy measurement signals. ' + address: 'New York City, NY, United States' + author: 'Benning, Manjinder S. and McGuire, Michael and Driessen, Peter' + bibtex: "@inproceedings{Benning2007,\n abstract = {This paper describes the design\ + \ and experimentation of a Kalman Filter used to improve position tracking of\ + \ a 3-D gesture-based musical controller known as the Radiodrum. The Singer dynamic\ + \ model for target tracking is used to describe the evolution of a Radiodrum's\ + \ stick position in time. The autocorrelation time constant of a gesture's acceleration\ + \ and the variance of the gesture acceleration are used to tune the model to various\ + \ performance modes. Multiple Kalman Filters tuned to each gesture type are run\ + \ in parallel and an Interacting Multiple Model (IMM) is implemented to decide\ + \ on the best combination of filter outputs to track the current gesture. Our\ + \ goal is to accurately track Radiodrum gestures through noisy measurement signals.\ + \ },\n address = {New York City, NY, United States},\n author = {Benning, Manjinder\ + \ S. and McGuire, Michael and Driessen, Peter},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1177043},\n issn = {2220-4806},\n keywords = {Kalman Filtering,\ + \ Radiodrum, Gesture Tracking, Interacting Multiple Model INTRODUCTION Intention\ + \ is a key aspect of traditional music performance. The ability for an artist\ + \ to reliably reproduce sound, pitch, rhythms, and emotion is paramount to the\ + \ design of any instrument. With the },\n pages = {334--337},\n title = {Improved\ + \ Position Tracking of a {3-D} Gesture-Based Musical Controller Using a {Kalman}\ + \ Filter},\n url = {http://www.nime.org/proceedings/2007/nime2007_334.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176545 + doi: 10.5281/zenodo.1177043 issn: 2220-4806 - keywords: 'Musical instruments, expression, design principle, degree of freedom, - dynamic specialization' - pages: 164--169 - title: 'Why Always Versatile? Dynamically Customizable Musical Instruments Facilitate - Expressive Performances' - url: http://www.nime.org/proceedings/2003/nime2003_164.pdf - year: 2003 + keywords: 'Kalman Filtering, Radiodrum, Gesture Tracking, Interacting Multiple Model + INTRODUCTION Intention is a key aspect of traditional music performance. The ability + for an artist to reliably reproduce sound, pitch, rhythms, and emotion is paramount + to the design of any instrument. With the ' + pages: 334--337 + title: Improved Position Tracking of a 3-D Gesture-Based Musical Controller Using + a Kalman Filter + url: http://www.nime.org/proceedings/2007/nime2007_334.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: NewtonDunn2003 - abstract: 'In this paper, we introduce Block Jam, a Tangible UserInterface that - controls a dynamic polyrhythmic sequencerusing 26 physical artifacts. These physical - artifacts, that wecall blocks, are a new type of input device for manipulatingan - interactive music system. The blocks'' functional andtopological statuses are - tightly coupled to an ad hocsequencer, interpreting the user''s arrangement of - the blocksas meaningful musical phrases and structures.We demonstrate that we - have created both a tangible andvisual language that enables both the novice and - musicallytrained users by taking advantage of both their explorativeand intuitive - abilities. The tangible nature of the blocks andthe intuitive interface promotes - face-to-face collaborationand social interaction within a single system. The principleof - collaboration is further extended by linking two BlockJam systems together to - create a network.We discuss our project vision, design rational, relatedworks, - and the implementation of Block Jam prototypes.Figure 1. A cluster of blocks, - note the mother block on thebottom right' - address: 'Montreal, Canada' - author: 'Newton-Dunn, Henry and Nakano, Hiroaki and Gibson, James' - bibtex: "@inproceedings{NewtonDunn2003,\n abstract = {In this paper, we introduce\ - \ Block Jam, a Tangible UserInterface that controls a dynamic polyrhythmic sequencerusing\ - \ 26 physical artifacts. These physical artifacts, that wecall blocks, are a new\ - \ type of input device for manipulatingan interactive music system. The blocks'\ - \ functional andtopological statuses are tightly coupled to an ad hocsequencer,\ - \ interpreting the user's arrangement of the blocksas meaningful musical phrases\ - \ and structures.We demonstrate that we have created both a tangible andvisual\ - \ language that enables both the novice and musicallytrained users by taking advantage\ - \ of both their explorativeand intuitive abilities. The tangible nature of the\ - \ blocks andthe intuitive interface promotes face-to-face collaborationand social\ - \ interaction within a single system. The principleof collaboration is further\ - \ extended by linking two BlockJam systems together to create a network.We discuss\ - \ our project vision, design rational, relatedworks, and the implementation of\ - \ Block Jam prototypes.Figure 1. A cluster of blocks, note the mother block on\ - \ thebottom right},\n address = {Montreal, Canada},\n author = {Newton-Dunn, Henry\ - \ and Nakano, Hiroaki and Gibson, James},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n\ - \ doi = {10.5281/zenodo.1176543},\n issn = {2220-4806},\n keywords = {Tangible\ - \ interface, modular system, polyrhythmic sequencer. VISION We believe in a future\ - \ where music will no longer be considered a linear composition, but a dynamic\ - \ structure, and musical composition will extend to interaction. We also believe\ - \ that through the },\n pages = {170--177},\n title = {Block Jam: A Tangible Interface\ - \ for Interactive Music},\n url = {http://www.nime.org/proceedings/2003/nime2003_170.pdf},\n\ - \ year = {2003}\n}\n" + ID: Keating2007 + abstract: 'In this paper, design scenarios made possible by the use of an interactive + illuminated floor as the basis of an audiovisual environment are presented. By + interfacing a network of pressure sensitive, light-emitting tiles with a 7.1 channel + speaker system and requisite audio software, many avenues for collaborative expression + emerge, as do heretofore unexplored modes of multiplayer music and dance gaming. + By giving users light and sound cues that both guide and respond to their movement, + a rich environment is created that playfully integrates the auditory, the visual, + and the kinesthetic into a unified interactive experience.' + address: 'New York City, NY, United States' + author: 'Keating, Noah H.' + bibtex: "@inproceedings{Keating2007,\n abstract = {In this paper, design scenarios\ + \ made possible by the use of an interactive illuminated floor as the basis of\ + \ an audiovisual environment are presented. By interfacing a network of pressure\ + \ sensitive, light-emitting tiles with a 7.1 channel speaker system and requisite\ + \ audio software, many avenues for collaborative expression emerge, as do heretofore\ + \ unexplored modes of multiplayer music and dance gaming. By giving users light\ + \ and sound cues that both guide and respond to their movement, a rich environment\ + \ is created that playfully integrates the auditory, the visual, and the kinesthetic\ + \ into a unified interactive experience.},\n address = {New York City, NY, United\ + \ States},\n author = {Keating, Noah H.},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177141},\n\ + \ issn = {2220-4806},\n keywords = {Responsive Environments, Audiovisual Play,\ + \ Kinetic Games, Movement Rich Game Play, Immersive Dance, Smart Floor },\n pages\ + \ = {338--343},\n title = {The Lambent Reactive : An Audiovisual Environment for\ + \ Kinesthetic Playforms},\n url = {http://www.nime.org/proceedings/2007/nime2007_338.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176543 + doi: 10.5281/zenodo.1177141 issn: 2220-4806 - keywords: 'Tangible interface, modular system, polyrhythmic sequencer. VISION We - believe in a future where music will no longer be considered a linear composition, - but a dynamic structure, and musical composition will extend to interaction. We - also believe that through the ' - pages: 170--177 - title: 'Block Jam: A Tangible Interface for Interactive Music' - url: http://www.nime.org/proceedings/2003/nime2003_170.pdf - year: 2003 + keywords: 'Responsive Environments, Audiovisual Play, Kinetic Games, Movement Rich + Game Play, Immersive Dance, Smart Floor ' + pages: 338--343 + title: 'The Lambent Reactive : An Audiovisual Environment for Kinesthetic Playforms' + url: http://www.nime.org/proceedings/2007/nime2007_338.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Kartadinata2003 - abstract: 'In this paper I present the gluiph, a single-board computer thatwas conceived - as a platform for integrated electronic musicalinstruments. It aims to provide - new instruments as well asexisting ones with a stronger identity by untethering - themfrom the often lab-like stage setups built around general purpose computers. - The key additions to its core are a flexiblesensor subsystem and multi-channel - audio I/O. In contrast toother stand-alone approaches it retains a higher degree - offlexibility by supporting popular music programming languages, with Miller Puckette''s - pd [1] being the current focus.' - address: 'Montreal, Canada' - author: 'Kartadinata, Sukandar' - bibtex: "@inproceedings{Kartadinata2003,\n abstract = {In this paper I present the\ - \ gluiph, a single-board computer thatwas conceived as a platform for integrated\ - \ electronic musicalinstruments. It aims to provide new instruments as well asexisting\ - \ ones with a stronger identity by untethering themfrom the often lab-like stage\ - \ setups built around general purpose computers. The key additions to its core\ - \ are a flexiblesensor subsystem and multi-channel audio I/O. In contrast toother\ - \ stand-alone approaches it retains a higher degree offlexibility by supporting\ - \ popular music programming languages, with Miller Puckette's pd [1] being the\ - \ current focus.},\n address = {Montreal, Canada},\n author = {Kartadinata, Sukandar},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176521},\n\ - \ issn = {2220-4806},\n keywords = {Musical instrument, integration, single-board\ - \ computer (SBC), embedded system, stand-alone system, pd, DSP, sensor, latency,\ - \ flexibility, coherency.},\n pages = {180--183},\n title = {The Gluiph: a Nucleus\ - \ for Integrated Instruments},\n url = {http://www.nime.org/proceedings/2003/nime2003_180.pdf},\n\ - \ year = {2003}\n}\n" + ID: Stark2007 + abstract: 'We present a new group of audio effects that use beat tracking, the detection + of beats in an audio signal, to relate effectparameters to the beats in an input + signal. Conventional audio effects are augmented so that their operation is related + tothe output of a beat tracking system. We present a temposynchronous delay effect + and a set of beat synchronous lowfrequency oscillator effects including tremolo, + vibrato andauto-wah. All effects are implemented in real-time as VSTplug-ins to + allow for their use in live performance.' + address: 'New York City, NY, United States' + author: 'Stark, Adam M. and Plumbley, Mark D. and Davies, Matthew E.' + bibtex: "@inproceedings{Stark2007,\n abstract = {We present a new group of audio\ + \ effects that use beat tracking, the detection of beats in an audio signal, to\ + \ relate effectparameters to the beats in an input signal. Conventional audio\ + \ effects are augmented so that their operation is related tothe output of a beat\ + \ tracking system. We present a temposynchronous delay effect and a set of beat\ + \ synchronous lowfrequency oscillator effects including tremolo, vibrato andauto-wah.\ + \ All effects are implemented in real-time as VSTplug-ins to allow for their use\ + \ in live performance.},\n address = {New York City, NY, United States},\n author\ + \ = {Stark, Adam M. and Plumbley, Mark D. and Davies, Matthew E.},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1177249},\n issn = {2220-4806},\n keywords\ + \ = {a beat-synchronous tremolo effect,audio effects,beat tracking,figure 1,im-,nime07,plemented\ + \ as a vst,plug-in,real-time,the rate is controlled,vst plug-in},\n pages = {344--345},\n\ + \ title = {Real-Time Beat-Synchronous Audio Effects},\n url = {http://www.nime.org/proceedings/2007/nime2007_344.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176521 + doi: 10.5281/zenodo.1177249 issn: 2220-4806 - keywords: 'Musical instrument, integration, single-board computer (SBC), embedded - system, stand-alone system, pd, DSP, sensor, latency, flexibility, coherency.' - pages: 180--183 - title: 'The Gluiph: a Nucleus for Integrated Instruments' - url: http://www.nime.org/proceedings/2003/nime2003_180.pdf - year: 2003 + keywords: 'a beat-synchronous tremolo effect,audio effects,beat tracking,figure + 1,im-,nime07,plemented as a vst,plug-in,real-time,the rate is controlled,vst plug-in' + pages: 344--345 + title: Real-Time Beat-Synchronous Audio Effects + url: http://www.nime.org/proceedings/2007/nime2007_344.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Couturier2003 - abstract: 'In this paper, we describe a new interface for musicalperformance, using - the interaction with a graphical userinterface in a powerful manner: the user - directly touches ascreen where graphical objects are displayed and can useseveral - fingers simultaneously to interact with the objects. Theconcept of this interface - is based on the superposition of thegesture spatial place and the visual feedback - spatial place; i tgives the impression that the graphical objects are real. Thisconcept - enables a huge freedom in designing interfaces. Thegesture device we have created - gives the position of fourfingertips using 3D sensors and the data is performed - in theMax/MSP environment. We have realized two practicalexamples of musical use - of such a device, using PhotosonicSynthesis and Scanned Synthesis.' - address: 'Montreal, Canada' - author: 'Couturier, Jean-Michel and Arfib, Daniel' - bibtex: "@inproceedings{Couturier2003,\n abstract = {In this paper, we describe\ - \ a new interface for musicalperformance, using the interaction with a graphical\ - \ userinterface in a powerful manner: the user directly touches ascreen where\ - \ graphical objects are displayed and can useseveral fingers simultaneously to\ - \ interact with the objects. Theconcept of this interface is based on the superposition\ - \ of thegesture spatial place and the visual feedback spatial place; i tgives\ - \ the impression that the graphical objects are real. Thisconcept enables a huge\ - \ freedom in designing interfaces. Thegesture device we have created gives the\ - \ position of fourfingertips using 3D sensors and the data is performed in theMax/MSP\ - \ environment. We have realized two practicalexamples of musical use of such a\ - \ device, using PhotosonicSynthesis and Scanned Synthesis.},\n address = {Montreal,\ - \ Canada},\n author = {Couturier, Jean-Michel and Arfib, Daniel},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176501},\n\ - \ issn = {2220-4806},\n keywords = {HCI, touch screen, multimodality, mapping,\ - \ direct interaction, gesture devices, bimanual interaction, two-handed, Max/MSP.\ - \ },\n pages = {184--187},\n title = {Pointing Fingers: Using Multiple Direct\ - \ Interactions with Visual Objects to Perform Music},\n url = {http://www.nime.org/proceedings/2003/nime2003_184.pdf},\n\ - \ year = {2003}\n}\n" + ID: Wulfson2007 + abstract: 'This article presents various custom software tools called Automatic + Notation Generators (ANG''s) developed by the authors to aid in the creation of + algorithmic instrumental compositions. The unique possibilities afforded by ANG + software are described, along with relevant examples of their compositional output. + These avenues of exploration include: mappings of spectral data directly into + notated music, the creation of software transcribers that enable users to generate + multiple realizations of algorithmic compositions, and new types of spontaneous + performance with live generated screen-based music notation. The authors present + their existing software tools along with suggestions for future research and artistic + inquiry. ' + address: 'New York City, NY, United States' + author: 'Wulfson, Harris and Barrett, G. Douglas and Winter, Michael' + bibtex: "@inproceedings{Wulfson2007,\n abstract = {This article presents various\ + \ custom software tools called Automatic Notation Generators (ANG's) developed\ + \ by the authors to aid in the creation of algorithmic instrumental compositions.\ + \ The unique possibilities afforded by ANG software are described, along with\ + \ relevant examples of their compositional output. These avenues of exploration\ + \ include: mappings of spectral data directly into notated music, the creation\ + \ of software transcribers that enable users to generate multiple realizations\ + \ of algorithmic compositions, and new types of spontaneous performance with live\ + \ generated screen-based music notation. The authors present their existing software\ + \ tools along with suggestions for future research and artistic inquiry. },\n\ + \ address = {New York City, NY, United States},\n author = {Wulfson, Harris and\ + \ Barrett, G. Douglas and Winter, Michael},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1176861},\n issn = {2220-4806},\n keywords = {nime07},\n pages\ + \ = {346--351},\n title = {Automatic Notation Generators},\n url = {http://www.nime.org/proceedings/2007/nime2007_346.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176501 + doi: 10.5281/zenodo.1176861 issn: 2220-4806 - keywords: 'HCI, touch screen, multimodality, mapping, direct interaction, gesture - devices, bimanual interaction, two-handed, Max/MSP. ' - pages: 184--187 - title: 'Pointing Fingers: Using Multiple Direct Interactions with Visual Objects - to Perform Music' - url: http://www.nime.org/proceedings/2003/nime2003_184.pdf - year: 2003 + keywords: nime07 + pages: 346--351 + title: Automatic Notation Generators + url: http://www.nime.org/proceedings/2007/nime2007_346.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Singer2003a - abstract: 'This paper describes the LEMUR GuitarBot, a robotic musical instrument - composed of four independent MIDI controllable single-stringed movable bridge - units. Design methodology, development and fabrication process, control specification - and results are discussed.' - address: 'Montreal, Canada' - author: 'Singer, Eric and Larke, Kevin and Bianciardi, David' - bibtex: "@inproceedings{Singer2003a,\n abstract = {This paper describes the LEMUR\ - \ GuitarBot, a robotic musical instrument composed of four independent MIDI controllable\ - \ single-stringed movable bridge units. Design methodology, development and fabrication\ - \ process, control specification and results are discussed.},\n address = {Montreal,\ - \ Canada},\n author = {Singer, Eric and Larke, Kevin and Bianciardi, David},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176565},\n\ - \ issn = {2220-4806},\n keywords = {Robotics, interactive, performance, MIDI,\ - \ string instrument.},\n pages = {188--191},\n title = {{LEMUR} GuitarBot: {MIDI}\ - \ Robotic String Instrument},\n url = {http://www.nime.org/proceedings/2003/nime2003_188.pdf},\n\ - \ year = {2003}\n}\n" + ID: Young2007 + abstract: 'This paper presents a newly created database containing calibrated gesture + and audio data corresponding to various violin bowstrokes, as well as video and + motion capture data in some cases. The database is web-accessible and searchable + by keywords and subject. It also has several important features designed to improve + accessibility to the data and to foster collaboration between researchers in fields + related to bowed string synthesis, acoustics, and gesture.' + address: 'New York City, NY, United States' + author: 'Young, Diana and Deshmane, Anagha' + bibtex: "@inproceedings{Young2007,\n abstract = {This paper presents a newly created\ + \ database containing calibrated gesture and audio data corresponding to various\ + \ violin bowstrokes, as well as video and motion capture data in some cases. The\ + \ database is web-accessible and searchable by keywords and subject. It also has\ + \ several important features designed to improve accessibility to the data and\ + \ to foster collaboration between researchers in fields related to bowed string\ + \ synthesis, acoustics, and gesture.},\n address = {New York City, NY, United\ + \ States},\n author = {Young, Diana and Deshmane, Anagha},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1179481},\n issn = {2220-4806},\n keywords = {violin,\ + \ bowed string, bowstroke, bowing, bowing parameters, technique, gesture, audio\ + \ },\n pages = {352--357},\n title = {Bowstroke Database : A Web-Accessible Archive\ + \ of Violin Bowing Data},\n url = {http://www.nime.org/proceedings/2007/nime2007_352.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176565 + doi: 10.5281/zenodo.1179481 issn: 2220-4806 - keywords: 'Robotics, interactive, performance, MIDI, string instrument.' - pages: 188--191 - title: 'LEMUR GuitarBot: MIDI Robotic String Instrument' - url: http://www.nime.org/proceedings/2003/nime2003_188.pdf - year: 2003 + keywords: 'violin, bowed string, bowstroke, bowing, bowing parameters, technique, + gesture, audio ' + pages: 352--357 + title: 'Bowstroke Database : A Web-Accessible Archive of Violin Bowing Data' + url: http://www.nime.org/proceedings/2007/nime2007_352.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Peiper2003 - abstract: We introduce a software system for real-time classification of violin - bow strokes (articulations). The system uses an electromagnetic motion tracking - system to capture raw gesture data. The data is analyzed to extract stroke features. - These features are provided to a decision tree for training and classification. - Feedback from feature and classification data is presented visually in an immersive - graphic environment. - address: 'Montreal, Canada' - author: 'Peiper, Chad and Warden, David and Garnett, Guy' - bibtex: "@inproceedings{Peiper2003,\n abstract = {We introduce a software system\ - \ for real-time classification of violin bow strokes (articulations). The system\ - \ uses an electromagnetic motion tracking system to capture raw gesture data.\ - \ The data is analyzed to extract stroke features. These features are provided\ - \ to a decision tree for training and classification. Feedback from feature and\ - \ classification data is presented visually in an immersive graphic environment.},\n\ - \ address = {Montreal, Canada},\n author = {Peiper, Chad and Warden, David and\ - \ Garnett, Guy},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n doi =\ - \ {10.5281/zenodo.1176553},\n issn = {2220-4806},\n pages = {192--196},\n title\ - \ = {An Interface for Real-time Classification of Articulations Produced by Violin\ - \ Bowing},\n url = {http://www.nime.org/proceedings/2003/nime2003_192.pdf},\n\ - \ year = {2003}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176553 - issn: 2220-4806 - pages: 192--196 - title: An Interface for Real-time Classification of Articulations Produced by Violin - Bowing - url: http://www.nime.org/proceedings/2003/nime2003_192.pdf - year: 2003 + ID: Schacher2007 + abstract: 'This paper presents a methodology and a set of tools for gesture control + of sources in 3D surround sound. The techniques for rendering acoustic events + on multi-speaker or headphone-based surround systems have evolved considerably, + making it possible to use them in real-time performances on light equipment. Controlling + the placement of sound sources is usually done in idiosyncratic ways and has not + yet been fully explored and formalized. This issue is addressed here with the + proposition of a methodical approach. The mapping of gestures to source motion + is implemented by giving the sources physical object properties and manipulating + these characteristics with standard geometrical transforms through hierarchical + or emergent relationships. ' + address: 'New York City, NY, United States' + author: 'Schacher, Jan C.' + bibtex: "@inproceedings{Schacher2007,\n abstract = {This paper presents a methodology\ + \ and a set of tools for gesture control of sources in 3D surround sound. The\ + \ techniques for rendering acoustic events on multi-speaker or headphone-based\ + \ surround systems have evolved considerably, making it possible to use them in\ + \ real-time performances on light equipment. Controlling the placement of sound\ + \ sources is usually done in idiosyncratic ways and has not yet been fully explored\ + \ and formalized. This issue is addressed here with the proposition of a methodical\ + \ approach. The mapping of gestures to source motion is implemented by giving\ + \ the sources physical object properties and manipulating these characteristics\ + \ with standard geometrical transforms through hierarchical or emergent relationships.\ + \ },\n address = {New York City, NY, United States},\n author = {Schacher, Jan\ + \ C.},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1177241},\n issn = {2220-4806},\n\ + \ keywords = {Gesture, Surround Sound, Mapping, Trajectory, Transform Matrix,\ + \ Tree Hierarchy, Emergent Structures. },\n pages = {358--362},\n title = {Gesture\ + \ Control of Sounds in {3D} Space},\n url = {http://www.nime.org/proceedings/2007/nime2007_358.pdf},\n\ + \ year = {2007}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1177241 + issn: 2220-4806 + keywords: 'Gesture, Surround Sound, Mapping, Trajectory, Transform Matrix, Tree + Hierarchy, Emergent Structures. ' + pages: 358--362 + title: Gesture Control of Sounds in 3D Space + url: http://www.nime.org/proceedings/2007/nime2007_358.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Settel2003 - abstract: 'The subject of instrument design is quite broad. Much work has been done - at Ircam, MIT, CNMAT, Stanford and elsewhere in the area. In this paper we will - present our own developed approach to designing and using instruments in composition - and performance for the authors’ “Convolution Brothers” pieces. The presentation - of this paper is accompanied by a live Convolution Brothers demonstration.' - address: 'Montreal, Canada' - author: 'Settel, Zack and Lippe, Cort' - bibtex: "@inproceedings{Settel2003,\n abstract = {The subject of instrument design\ - \ is quite broad. Much work has been done at Ircam, MIT, CNMAT, Stanford and elsewhere\ - \ in the area. In this paper we will present our own developed approach to designing\ - \ and using instruments in composition and performance for the authors’ “Convolution\ - \ Brothers” pieces. The presentation of this paper is accompanied by a live Convolution\ - \ Brothers demonstration.},\n address = {Montreal, Canada},\n author = {Settel,\ - \ Zack and Lippe, Cort},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n doi\ - \ = {10.5281/zenodo.1176559},\n issn = {2220-4806},\n pages = {197--200},\n title\ - \ = {Convolution Brother's Instrument Design},\n url = {http://www.nime.org/proceedings/2003/nime2003_197.pdf},\n\ - \ year = {2003}\n}\n" + ID: Porres2007 + abstract: 'This work presents an interactive device to control an adaptive tuning + and synthesis system. The gestural controller is based on the theremin concept + in which only an antenna is used as a proximity sensor. This interactive process + is guided by sensorial consonance curves and adaptive tuning related to psychoacoustical + studies. We used an algorithm to calculate the dissonance values according to + amplitudes and frequencies of a given sound spectrum. The theoretical background + is presented followed by interactive composition strategies and sound results. ' + address: 'New York City, NY, United States' + author: 'Porres, Alexandre T. and Manzolli, Jonatas' + bibtex: "@inproceedings{Porres2007,\n abstract = {This work presents an interactive\ + \ device to control an adaptive tuning and synthesis system. The gestural controller\ + \ is based on the theremin concept in which only an antenna is used as a proximity\ + \ sensor. This interactive process is guided by sensorial consonance curves and\ + \ adaptive tuning related to psychoacoustical studies. We used an algorithm to\ + \ calculate the dissonance values according to amplitudes and frequencies of a\ + \ given sound spectrum. The theoretical background is presented followed by interactive\ + \ composition strategies and sound results. },\n address = {New York City, NY,\ + \ United States},\n author = {Porres, Alexandre T. and Manzolli, Jonatas},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1177223},\n issn = {2220-4806},\n keywords\ + \ = {Interaction, adaptive tuning, theremin, sensorial dissonance, synthesis.\ + \ },\n pages = {363--366},\n title = {Adaptive Tuning Using Theremin as Gestural\ + \ Controller},\n url = {http://www.nime.org/proceedings/2007/nime2007_363.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176559 + doi: 10.5281/zenodo.1177223 issn: 2220-4806 - pages: 197--200 - title: Convolution Brother's Instrument Design - url: http://www.nime.org/proceedings/2003/nime2003_197.pdf - year: 2003 + keywords: 'Interaction, adaptive tuning, theremin, sensorial dissonance, synthesis. ' + pages: 363--366 + title: Adaptive Tuning Using Theremin as Gestural Controller + url: http://www.nime.org/proceedings/2007/nime2007_363.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Choi2003 - abstract: 'This paper suggests that there is a need for formalizing acomponent model - of gestural primitive throughput in musicinstrument design. The purpose of this - model is to construct acoherent and meaningful interaction between performer andinstrument. - Such a model has been implicit in previous researchfor interactive performance - systems. The model presented heredistinguishes gestural primitives from units - of measure ofgestures. The throughput model identifies symmetry betweenperformance - gestures and musical gestures, and indicates a rolefor gestural primitives when - a performer navigates regions ofstable oscillations in a musical instrument. The - use of a highdimensional interface tool is proposed for instrument design, forfine-tuning - the mapping between movement sensor data andsound synthesis control data.' - address: 'Montreal, Canada' - author: 'Choi, Insook' - bibtex: "@inproceedings{Choi2003,\n abstract = {This paper suggests that there is\ - \ a need for formalizing acomponent model of gestural primitive throughput in\ - \ musicinstrument design. The purpose of this model is to construct acoherent\ - \ and meaningful interaction between performer andinstrument. Such a model has\ - \ been implicit in previous researchfor interactive performance systems. The model\ - \ presented heredistinguishes gestural primitives from units of measure ofgestures.\ - \ The throughput model identifies symmetry betweenperformance gestures and musical\ - \ gestures, and indicates a rolefor gestural primitives when a performer navigates\ - \ regions ofstable oscillations in a musical instrument. The use of a highdimensional\ - \ interface tool is proposed for instrument design, forfine-tuning the mapping\ - \ between movement sensor data andsound synthesis control data.},\n address =\ - \ {Montreal, Canada},\n author = {Choi, Insook},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n date\ - \ = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176499},\n issn = {2220-4806},\n\ - \ keywords = {Performance gestures, musical gestures, instrument design, mapping,\ - \ tuning, affordances, stability. },\n pages = {201--204},\n title = {A Component\ - \ Model of Gestural Primitive Throughput},\n url = {http://www.nime.org/proceedings/2003/nime2003_201.pdf},\n\ - \ year = {2003}\n}\n" + ID: Hsu2007 + abstract: 'In previous publications (see for example [2] and [3]), we described + an interactive music system, designed to improvise with saxophonist John Butcher; + our system analyzes timbral and gestural features in real-time, and uses this + information to guide response generation. This paper overviews our recent work + with the system''s interaction management component (IMC). We explore several + options for characterizing improvisation at a higher level, and managing decisions + for interactive performance in a rich timbral environment. We developed a simple, + efficient framework using a small number of features suggested by recent work + in mood modeling in music. We describe and evaluate the first version of the IMC, + which was used in performance at the Live Algorithms for Music (LAM) conference + in December 2006. We touch on developments on the system since LAM, and discuss + future plans to address perceived shortcomings in responsiveness, and the ability + of the system to make long-term adaptations. ' + address: 'New York City, NY, United States' + author: 'Hsu, William' + bibtex: "@inproceedings{Hsu2007,\n abstract = {In previous publications (see for\ + \ example [2] and [3]), we described an interactive music system, designed to\ + \ improvise with saxophonist John Butcher; our system analyzes timbral and gestural\ + \ features in real-time, and uses this information to guide response generation.\ + \ This paper overviews our recent work with the system's interaction management\ + \ component (IMC). We explore several options for characterizing improvisation\ + \ at a higher level, and managing decisions for interactive performance in a rich\ + \ timbral environment. We developed a simple, efficient framework using a small\ + \ number of features suggested by recent work in mood modeling in music. We describe\ + \ and evaluate the first version of the IMC, which was used in performance at\ + \ the Live Algorithms for Music (LAM) conference in December 2006. We touch on\ + \ developments on the system since LAM, and discuss future plans to address perceived\ + \ shortcomings in responsiveness, and the ability of the system to make long-term\ + \ adaptations. },\n address = {New York City, NY, United States},\n author = {Hsu,\ + \ William},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177123},\n issn\ + \ = {2220-4806},\n keywords = {Interactive music systems, timbral analysis, free\ + \ improvisation. },\n pages = {367--370},\n title = {Design Issues in Interaction\ + \ Modeling for Free Improvisation},\n url = {http://www.nime.org/proceedings/2007/nime2007_367.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176499 + doi: 10.5281/zenodo.1177123 issn: 2220-4806 - keywords: 'Performance gestures, musical gestures, instrument design, mapping, tuning, - affordances, stability. ' - pages: 201--204 - title: A Component Model of Gestural Primitive Throughput - url: http://www.nime.org/proceedings/2003/nime2003_201.pdf - year: 2003 + keywords: 'Interactive music systems, timbral analysis, free improvisation. ' + pages: 367--370 + title: Design Issues in Interaction Modeling for Free Improvisation + url: http://www.nime.org/proceedings/2007/nime2007_367.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: PalacioQuintin2003 - abstract: 'The Hyper-Flute is a standard Boehm flute (the model used is a Powell - 2100, made in Boston) extended via electronic sensors that link it to a computer, - enabling control of digital sound processing parameters while performing. The - instrument’s electronic extensions are described in some detail, and performance - applications are briefly discussed.' - address: 'Montreal, Canada' - author: 'Palacio-Quintin, Cléo' - bibtex: "@inproceedings{PalacioQuintin2003,\n abstract = {The Hyper-Flute is a standard\ - \ Boehm flute (the model used is a Powell 2100, made in Boston) extended via electronic\ - \ sensors that link it to a computer, enabling control of digital sound processing\ - \ parameters while performing. The instrument’s electronic extensions are described\ - \ in some detail, and performance applications are briefly discussed.},\n address\ - \ = {Montreal, Canada},\n author = {Palacio-Quintin, Cl\\'{e}o},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176549},\n issn = {2220-4806},\n\ - \ keywords = {Digital sound processing, flute, hyper-instrument, interactive music,\ - \ live electronics, performance, sensors.},\n pages = {206--207},\n title = {The\ - \ Hyper-Flute},\n url = {http://www.nime.org/proceedings/2003/nime2003_206.pdf},\n\ - \ year = {2003}\n}\n" + ID: Groux2007 + abstract: 'Until recently, the sonification of Virtual Environments had often been + reduced to its simplest expression. Too often soundscapes and background music + are predetermined, repetitive and somewhat predictable. Yet, there is room for + more complex and interesting sonification schemes that can improve the sensation + of presence in a Virtual Environment. In this paper we propose a system that automatically + generates original background music in real-time called VR-RoBoser. As a test + case we present the application of VR-RoBoser to a dynamic avatar that explores + its environment. We show that the musical events are directly and continuously + generated and influenced by the behavior of the avatar in three-dimensional virtual + space, generating a context dependent sonification. ' + address: 'New York City, NY, United States' + author: 'le Groux, Sylvain and Manzolli, Jonatas and Verschure, Paul F.' + bibtex: "@inproceedings{Groux2007,\n abstract = {Until recently, the sonification\ + \ of Virtual Environments had often been reduced to its simplest expression. Too\ + \ often soundscapes and background music are predetermined, repetitive and somewhat\ + \ predictable. Yet, there is room for more complex and interesting sonification\ + \ schemes that can improve the sensation of presence in a Virtual Environment.\ + \ In this paper we propose a system that automatically generates original background\ + \ music in real-time called VR-RoBoser. As a test case we present the application\ + \ of VR-RoBoser to a dynamic avatar that explores its environment. We show that\ + \ the musical events are directly and continuously generated and influenced by\ + \ the behavior of the avatar in three-dimensional virtual space, generating a\ + \ context dependent sonification. },\n address = {New York City, NY, United States},\n\ + \ author = {le Groux, Sylvain and Manzolli, Jonatas and Verschure, Paul F.},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1177101},\n issn = {2220-4806},\n\ + \ keywords = {Real-time Composition, Interactive Sonification, Real-time Neural\ + \ Processing, Multimedia, Virtual Environment, Avatar. },\n pages = {371--374},\n\ + \ title = {VR-RoBoser : Real-Time Adaptive Sonification of Virtual Environments\ + \ Based on Avatar Behavior},\n url = {http://www.nime.org/proceedings/2007/nime2007_371.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176549 + doi: 10.5281/zenodo.1177101 issn: 2220-4806 - keywords: 'Digital sound processing, flute, hyper-instrument, interactive music, - live electronics, performance, sensors.' - pages: 206--207 - title: The Hyper-Flute - url: http://www.nime.org/proceedings/2003/nime2003_206.pdf - year: 2003 + keywords: 'Real-time Composition, Interactive Sonification, Real-time Neural Processing, + Multimedia, Virtual Environment, Avatar. ' + pages: 371--374 + title: 'VR-RoBoser : Real-Time Adaptive Sonification of Virtual Environments Based + on Avatar Behavior' + url: http://www.nime.org/proceedings/2007/nime2007_371.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Allison2003 - abstract: 'SensorBox is a low cost, low latency, high-resolutioninterface for obtaining - gestural data from sensors for use inrealtime with a computer-based interactive - system. Wediscuss its implementation, benefits, current limitations, andcompare - it with several popular interfaces for gestural dataacquisition.' - address: 'Montreal, Canada' - author: 'Allison, Jesse T. and Place, Timothy' - bibtex: "@inproceedings{Allison2003,\n abstract = {SensorBox is a low cost, low\ - \ latency, high-resolutioninterface for obtaining gestural data from sensors for\ - \ use inrealtime with a computer-based interactive system. Wediscuss its implementation,\ - \ benefits, current limitations, andcompare it with several popular interfaces\ - \ for gestural dataacquisition.},\n address = {Montreal, Canada},\n author = {Allison,\ - \ Jesse T. and Place, Timothy},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n\ - \ doi = {10.5281/zenodo.1176482},\n issn = {2220-4806},\n keywords = {Sensors,\ - \ gestural acquisition, audio interface, interactive music, SensorBox. },\n pages\ - \ = {208--210},\n title = {SensorBox: Practical Audio Interface for Gestural Performance},\n\ - \ url = {http://www.nime.org/proceedings/2003/nime2003_208.pdf},\n year = {2003}\n\ - }\n" + ID: Steiner2007 + abstract: 'In this paper we discuss our progress on the HID toolkit, a collection + of software modules for the Pure Data and Max/MSP programming environments that + provide unified, user-friendly and cross-platform access to human interface devices + (HIDs) such as joysticks, digitizer tablets, and stomp-pads. These HIDs are ubiquitous, + inexpensive and capable of sensing a wide range of human gesture, making them + appealing interfaces for interactive media control. However, it is difficult to + utilize many of these devices for custom-made applications, particularly for novices. + The modules we discuss in this paper are [hidio], which handles incoming and outgoing + data between a patch and a HID, and [input noticer], which monitors HID plug/unplug + events. The goal in creating these modules is to preserve maximal flexibility + in accessing the input and output capabilities of HIDs, in a manner that is ap- + proachable for both sophisticated and beginning designers. This paper documents + our design notes and implementa- tion considerations, current progress, and ideas + for future extensions to the HID toolkit.' + address: 'New York City, NY, United States' + author: 'Steiner, Hans-Christoph and Merrill, David and Matthes, Olaf' + bibtex: "@inproceedings{Steiner2007,\n abstract = {In this paper we discuss our\ + \ progress on the HID toolkit, a collection of software modules for the Pure Data\ + \ and Max/MSP programming environments that provide unified, user-friendly and\ + \ cross-platform access to human interface devices (HIDs) such as joysticks, digitizer\ + \ tablets, and stomp-pads. These HIDs are ubiquitous, inexpensive and capable\ + \ of sensing a wide range of human gesture, making them appealing interfaces for\ + \ interactive media control. However, it is difficult to utilize many of these\ + \ devices for custom-made applications, particularly for novices. The modules\ + \ we discuss in this paper are [hidio], which handles incoming and outgoing data\ + \ between a patch and a HID, and [input noticer], which monitors HID plug/unplug\ + \ events. The goal in creating these modules is to preserve maximal flexibility\ + \ in accessing the input and output capabilities of HIDs, in a manner that is\ + \ ap- proachable for both sophisticated and beginning designers. This paper documents\ + \ our design notes and implementa- tion considerations, current progress, and\ + \ ideas for future extensions to the HID toolkit.},\n address = {New York City,\ + \ NY, United States},\n author = {Steiner, Hans-Christoph and Merrill, David and\ + \ Matthes, Olaf},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177251},\n\ + \ issn = {2220-4806},\n keywords = {nime07},\n pages = {375--378},\n title = {A\ + \ Unified Toolkit for Accessing Human Interface Devices in Pure Data and Max /\ + \ MSP},\n url = {http://www.nime.org/proceedings/2007/nime2007_375.pdf},\n year\ + \ = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176482 + doi: 10.5281/zenodo.1177251 issn: 2220-4806 - keywords: 'Sensors, gestural acquisition, audio interface, interactive music, SensorBox. ' - pages: 208--210 - title: 'SensorBox: Practical Audio Interface for Gestural Performance' - url: http://www.nime.org/proceedings/2003/nime2003_208.pdf - year: 2003 + keywords: nime07 + pages: 375--378 + title: A Unified Toolkit for Accessing Human Interface Devices in Pure Data and + Max / MSP + url: http://www.nime.org/proceedings/2007/nime2007_375.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Baird2003 - abstract: 'This software tool, developed in Max/MSP, presentsperformers with image - files consisting of traditional notationas well as conducting in the form of video - playback. Theimpetus for this work was the desire to allow the musicalmaterial - for each performer of a given piece to differ withregard to content and tempo.' - address: 'Montreal, Canada' - author: 'Baird, Kevin C.' - bibtex: "@inproceedings{Baird2003,\n abstract = {This software tool, developed in\ - \ Max/MSP, presentsperformers with image files consisting of traditional notationas\ - \ well as conducting in the form of video playback. Theimpetus for this work was\ - \ the desire to allow the musicalmaterial for each performer of a given piece\ - \ to differ withregard to content and tempo.},\n address = {Montreal, Canada},\n\ - \ author = {Baird, Kevin C.},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n\ - \ doi = {10.5281/zenodo.1176488},\n issn = {2220-4806},\n keywords = {Open form,\ - \ notation, polymeter, polytempi, Max/MSP. },\n pages = {211--212},\n title =\ - \ {Multi-Conductor: An Onscreen Polymetrical Conducting and Notation Display System},\n\ - \ url = {http://www.nime.org/proceedings/2003/nime2003_211.pdf},\n year = {2003}\n\ - }\n" + ID: Nort2007 + abstract: 'This paper describes musical experiments aimed at designing control structures + for navigating complex and continuous sonic spaces. The focus is on sound processing + techniques which contain a high number of control parameters,and which exhibit + subtle and interesting micro-variationsand textural qualities when controlled + properly. The examples all use a simple low-dimensional controller --- a standard + graphics tablet --- and the task of initimate and subtle textural manipulations + is left to the design of proper mappings,created using a custom toolbox of mapping + functions. Thiswork further acts to contextualize past theoretical results bythe + given musical presentations, and arrives at some conclusions about the interplay + between musical intention, controlstrategies and the process of their design.' + address: 'New York City, NY, United States' + author: 'Van Nort, Doug and Wanderley, Marcelo M.' + bibtex: "@inproceedings{Nort2007,\n abstract = {This paper describes musical experiments\ + \ aimed at designing control structures for navigating complex and continuous\ + \ sonic spaces. The focus is on sound processing techniques which contain a high\ + \ number of control parameters,and which exhibit subtle and interesting micro-variationsand\ + \ textural qualities when controlled properly. The examples all use a simple low-dimensional\ + \ controller --- a standard graphics tablet --- and the task of initimate and\ + \ subtle textural manipulations is left to the design of proper mappings,created\ + \ using a custom toolbox of mapping functions. Thiswork further acts to contextualize\ + \ past theoretical results bythe given musical presentations, and arrives at some\ + \ conclusions about the interplay between musical intention, controlstrategies\ + \ and the process of their design.},\n address = {New York City, NY, United States},\n\ + \ author = {Van Nort, Doug and Wanderley, Marcelo M.},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1179469},\n issn = {2220-4806},\n keywords = {Mapping,\ + \ Control, Sound Texture, Musical Gestures },\n pages = {379--383},\n title =\ + \ {Control Strategies for Navigation of Complex Sonic Spaces Transformation of\ + \ Resonant Models},\n url = {http://www.nime.org/proceedings/2007/nime2007_379.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176488 + doi: 10.5281/zenodo.1179469 issn: 2220-4806 - keywords: 'Open form, notation, polymeter, polytempi, Max/MSP. ' - pages: 211--212 - title: 'Multi-Conductor: An Onscreen Polymetrical Conducting and Notation Display - System' - url: http://www.nime.org/proceedings/2003/nime2003_211.pdf - year: 2003 + keywords: 'Mapping, Control, Sound Texture, Musical Gestures ' + pages: 379--383 + title: Control Strategies for Navigation of Complex Sonic Spaces Transformation + of Resonant Models + url: http://www.nime.org/proceedings/2007/nime2007_379.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Kleinsasser2003 - abstract: 'This document describes modular software supporting livesignal processing - and sound file playback within theMax/MSP environment. Dsp.rack integrates signalprocessing, - memory buffer recording, and pre-recordedmulti-channel file playback using an - interconnected,programmable signal flow matrix, and an eight-channel i/oformat.' - address: 'Montreal, Canada' - author: 'Kleinsasser, William' - bibtex: "@inproceedings{Kleinsasser2003,\n abstract = {This document describes modular\ - \ software supporting livesignal processing and sound file playback within theMax/MSP\ - \ environment. Dsp.rack integrates signalprocessing, memory buffer recording,\ - \ and pre-recordedmulti-channel file playback using an interconnected,programmable\ - \ signal flow matrix, and an eight-channel i/oformat.},\n address = {Montreal,\ - \ Canada},\n author = {Kleinsasser, William},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n date =\ - \ {22-24 May, 2003},\n doi = {10.5281/zenodo.1176525},\n issn = {2220-4806},\n\ - \ keywords = {Digital signal processing, Max/MSP, computer music performance,\ - \ matrix routing, live performance processing. },\n pages = {213--215},\n title\ - \ = {Dsp.rack: Laptop-based Modular, Programmable Digital Signal Processing and\ - \ Mixing for Live Performance},\n url = {http://www.nime.org/proceedings/2003/nime2003_213.pdf},\n\ - \ year = {2003}\n}\n" + ID: Knorig2007 + abstract: 'In this paper we present the concept and prototype of a new musical interface + that utilizes the close relationship between gestural expression in the act of + painting and that of playing a musical instrument in order to provide non-musicians + the opportunity to create musical expression. A physical brush on a canvas acts + as the instrument. The characteristics of its stroke are intuitively mapped to + a conductor program, defining expressive parameters of the tone in real-time. + Two different interaction modes highlight the importance of bodily expression + in making music as well as the value of a metaphorical visual representation.' + address: 'New York City, NY, United States' + author: 'Knörig, André and Müller, Boris and Wettach, Reto' + bibtex: "@inproceedings{Knorig2007,\n abstract = {In this paper we present the concept\ + \ and prototype of a new musical interface that utilizes the close relationship\ + \ between gestural expression in the act of painting and that of playing a musical\ + \ instrument in order to provide non-musicians the opportunity to create musical\ + \ expression. A physical brush on a canvas acts as the instrument. The characteristics\ + \ of its stroke are intuitively mapped to a conductor program, defining expressive\ + \ parameters of the tone in real-time. Two different interaction modes highlight\ + \ the importance of bodily expression in making music as well as the value of\ + \ a metaphorical visual representation.},\n address = {New York City, NY, United\ + \ States},\n author = {Kn\\''{o}rig, Andr\\'{e} and M\\''{u}ller, Boris and Wettach,\ + \ Reto},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1177155},\n issn = {2220-4806},\n\ + \ keywords = {musical interface, musical expression, expressive gesture, musical\ + \ education, natural interface },\n pages = {384--385},\n title = {Articulated\ + \ Paint : Musical Expression for Non-Musicians},\n url = {http://www.nime.org/proceedings/2007/nime2007_384.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176525 + doi: 10.5281/zenodo.1177155 issn: 2220-4806 - keywords: 'Digital signal processing, Max/MSP, computer music performance, matrix - routing, live performance processing. ' - pages: 213--215 - title: 'Dsp.rack: Laptop-based Modular, Programmable Digital Signal Processing and - Mixing for Live Performance' - url: http://www.nime.org/proceedings/2003/nime2003_213.pdf - year: 2003 + keywords: 'musical interface, musical expression, expressive gesture, musical education, + natural interface ' + pages: 384--385 + title: 'Articulated Paint : Musical Expression for Non-Musicians' + url: http://www.nime.org/proceedings/2007/nime2007_384.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Laibowitz2003 - abstract: 'This paper is a demo proposal for a new musical interfacebased on a DNA-like - double-helix and concepts in charactergeneration. It contains a description of - the interface,motivations behind developing such an interface, variousmappings - of the interface to musical applications, and therequirements to demo the interface.' - address: 'Montreal, Canada' - author: 'Laibowitz, Mat' - bibtex: "@inproceedings{Laibowitz2003,\n abstract = {This paper is a demo proposal\ - \ for a new musical interfacebased on a DNA-like double-helix and concepts in\ - \ charactergeneration. It contains a description of the interface,motivations\ - \ behind developing such an interface, variousmappings of the interface to musical\ - \ applications, and therequirements to demo the interface.},\n address = {Montreal,\ - \ Canada},\n author = {Laibowitz, Mat},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n\ - \ doi = {10.5281/zenodo.1176527},\n issn = {2220-4806},\n keywords = {Performance,\ - \ Design, Experimentation, DNA, Big Five. },\n pages = {216--217},\n title = {BASIS:\ - \ A Genesis in Musical Interfaces},\n url = {http://www.nime.org/proceedings/2003/nime2003_216.pdf},\n\ - \ year = {2003}\n}\n" + ID: Baba2007 + abstract: 'Freqtric Drums is a new musical, corporal electronic instrument that + allows us not only to recover face-to-face communication, but also makes possible + body-to-body communication so that a self image based on the sense of being a + separate body can be signicant altered through an openness toand even a sense + of becoming part of another body. FreqtricDrums is a device that turns audiences + surrounding a performer into drums so that the performer, as a drummer, cancommunicate + with audience members as if they were a setof drums. We describe our concept and + the implementationand process of evolution of Freqtric Drums.' + address: 'New York City, NY, United States' + author: 'Baba, Tetsuaki and Ushiama, Taketoshi and Tomimatsu, Kiyoshi' + bibtex: "@inproceedings{Baba2007,\n abstract = {Freqtric Drums is a new musical,\ + \ corporal electronic instrument that allows us not only to recover face-to-face\ + \ communication, but also makes possible body-to-body communication so that a\ + \ self image based on the sense of being a separate body can be signicant altered\ + \ through an openness toand even a sense of becoming part of another body. FreqtricDrums\ + \ is a device that turns audiences surrounding a performer into drums so that\ + \ the performer, as a drummer, cancommunicate with audience members as if they\ + \ were a setof drums. We describe our concept and the implementationand process\ + \ of evolution of Freqtric Drums.},\n address = {New York City, NY, United States},\n\ + \ author = {Baba, Tetsuaki and Ushiama, Taketoshi and Tomimatsu, Kiyoshi},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1177037},\n issn = {2220-4806},\n keywords\ + \ = {interpersonal communication, musical instrument, interaction design, skin\ + \ contact, touch },\n pages = {386--387},\n title = {Freqtric Drums : A Musical\ + \ Instrument that Uses Skin Contact as an Interface},\n url = {http://www.nime.org/proceedings/2007/nime2007_386.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176527 + doi: 10.5281/zenodo.1177037 issn: 2220-4806 - keywords: 'Performance, Design, Experimentation, DNA, Big Five. ' - pages: 216--217 - title: 'BASIS: A Genesis in Musical Interfaces' - url: http://www.nime.org/proceedings/2003/nime2003_216.pdf - year: 2003 + keywords: 'interpersonal communication, musical instrument, interaction design, + skin contact, touch ' + pages: 386--387 + title: 'Freqtric Drums : A Musical Instrument that Uses Skin Contact as an Interface' + url: http://www.nime.org/proceedings/2007/nime2007_386.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Merrill2003 - abstract: This paper describes a system which uses the output fromhead-tracking - and gesture recognition software to drive aparameterized guitar effects synthesizer - in real-time. - address: 'Montreal, Canada' - author: 'Merrill, David' - bibtex: "@inproceedings{Merrill2003,\n abstract = {This paper describes a system\ - \ which uses the output fromhead-tracking and gesture recognition software to\ - \ drive aparameterized guitar effects synthesizer in real-time.},\n address =\ - \ {Montreal, Canada},\n author = {Merrill, David},\n booktitle = {Proceedings\ + ID: Han2007 + abstract: 'Project Scriabin is an interactive implementation of Alexander Scriabin’s + experimentation with “opposite mapping direction”, that is, mapping from hue (colour) + to pitch (sound). Main colour to sound coding was implemented by Scriabin’s colour + scale.' + address: 'New York City, NY, United States' + author: 'Han, Chang Min' + bibtex: "@inproceedings{Han2007,\n abstract = {Project Scriabin is an interactive\ + \ implementation of Alexander Scriabin’s experimentation with “opposite mapping\ + \ direction”, that is, mapping from hue (colour) to pitch (sound). Main colour\ + \ to sound coding was implemented by Scriabin’s colour scale.},\n address = {New\ + \ York City, NY, United States},\n author = {Han, Chang Min},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176531},\n issn = {2220-4806},\n\ - \ keywords = {Head-tracking, gestural control, continuous control, parameterized\ - \ effects processor. },\n pages = {218--219},\n title = {Head-Tracking for Gestural\ - \ and Continuous Control of Parameterized Audio Effects},\n url = {http://www.nime.org/proceedings/2003/nime2003_218.pdf},\n\ - \ year = {2003}\n}\n" + \ doi = {10.5281/zenodo.1177109},\n issn = {2220-4806},\n keywords = {Synaesthesia,\ + \ Sonification, Touch Screen},\n pages = {388--389},\n title = {Project Scriabin\ + \ v.3},\n url = {http://www.nime.org/proceedings/2007/nime2007_388.pdf},\n year\ + \ = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176531 + doi: 10.5281/zenodo.1177109 issn: 2220-4806 - keywords: 'Head-tracking, gestural control, continuous control, parameterized effects - processor. ' - pages: 218--219 - title: Head-Tracking for Gestural and Continuous Control of Parameterized Audio - Effects - url: http://www.nime.org/proceedings/2003/nime2003_218.pdf - year: 2003 + keywords: 'Synaesthesia, Sonification, Touch Screen' + pages: 388--389 + title: Project Scriabin v.3 + url: http://www.nime.org/proceedings/2007/nime2007_388.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Singer2003 - abstract: 'This paper describes the Sonic Banana, a bend-sensor based alternative - MIDI controller.' - address: 'Montreal, Canada' - author: 'Singer, Eric' - bibtex: "@inproceedings{Singer2003,\n abstract = {This paper describes the Sonic\ - \ Banana, a bend-sensor based alternative MIDI controller.},\n address = {Montreal,\ - \ Canada},\n author = {Singer, Eric},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n\ - \ doi = {10.5281/zenodo.1176563},\n issn = {2220-4806},\n keywords = {Interactive,\ - \ controller, bend, sensors, performance, MIDI.},\n pages = {220--221},\n title\ - \ = {Sonic Banana: A Novel Bend-Sensor-Based {MIDI} Controller},\n url = {http://www.nime.org/proceedings/2003/nime2003_220.pdf},\n\ - \ year = {2003}\n}\n" + ID: Castellano2007 + abstract: 'In this paper we describe a system which allows users to use their full-body + for controlling in real-time the generation of an expressive audio-visual feedback. + The system extracts expressive motion features from the user''s full-body movements + and gestures. The values of these motion features are mapped both onto acoustic + parameters for the real-time expressive rendering of a piece of music, and onto + real-time generated visual feedback projected on a screen in front of the user. ' + address: 'New York City, NY, United States' + author: 'Castellano, Ginevra and Bresin, Roberto and Camurri, Antonio and Volpe, + Gualtiero' + bibtex: "@inproceedings{Castellano2007,\n abstract = {In this paper we describe\ + \ a system which allows users to use their full-body for controlling in real-time\ + \ the generation of an expressive audio-visual feedback. The system extracts expressive\ + \ motion features from the user's full-body movements and gestures. The values\ + \ of these motion features are mapped both onto acoustic parameters for the real-time\ + \ expressive rendering of a piece of music, and onto real-time generated visual\ + \ feedback projected on a screen in front of the user. },\n address = {New York\ + \ City, NY, United States},\n author = {Castellano, Ginevra and Bresin, Roberto\ + \ and Camurri, Antonio and Volpe, Gualtiero},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1177065},\n issn = {2220-4806},\n keywords = {Expressive interaction;\ + \ multimodal environments; interactive music systems },\n pages = {390--391},\n\ + \ title = {Expressive Control of Music and Visual Media by Full-Body Movement},\n\ + \ url = {http://www.nime.org/proceedings/2007/nime2007_390.pdf},\n year = {2007}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176563 + doi: 10.5281/zenodo.1177065 issn: 2220-4806 - keywords: 'Interactive, controller, bend, sensors, performance, MIDI.' - pages: 220--221 - title: 'Sonic Banana: A Novel Bend-Sensor-Based MIDI Controller' - url: http://www.nime.org/proceedings/2003/nime2003_220.pdf - year: 2003 + keywords: 'Expressive interaction; multimodal environments; interactive music systems ' + pages: 390--391 + title: Expressive Control of Music and Visual Media by Full-Body Movement + url: http://www.nime.org/proceedings/2007/nime2007_390.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Muth2003 - abstract: 'Sodaconductor is a musical interface for generating OSCcontrol data based - on the dynamic physical simulation toolSodaconstructor as it can be seen and heard - onhttp://www.sodaplay.com.' - address: 'Montreal, Canada' - author: 'Muth, David and Burton, Ed' - bibtex: "@inproceedings{Muth2003,\n abstract = {Sodaconductor is a musical interface\ - \ for generating OSCcontrol data based on the dynamic physical simulation toolSodaconstructor\ - \ as it can be seen and heard onhttp://www.sodaplay.com.},\n address = {Montreal,\ - \ Canada},\n author = {Muth, David and Burton, Ed},\n booktitle = {Proceedings\ + ID: Tindale2007 + abstract: This paper describes a hybrid method to allow drummers to expressively + utilize electronics. Commercial electronic drum hardware is made more expressive + by replacing the sample playback “drum brain” with a physical modeling algorithm + implemented in Max/MSP. Timbre recognition techniques identify striking implement + and location as symbolic data that can be used to modify the parameters of the + physical model. + address: 'New York City, NY, United States' + author: 'Tindale, Adam R.' + bibtex: "@inproceedings{Tindale2007,\n abstract = {This paper describes a hybrid\ + \ method to allow drummers to expressively utilize electronics. Commercial electronic\ + \ drum hardware is made more expressive by replacing the sample playback “drum\ + \ brain” with a physical modeling algorithm implemented in Max/MSP. Timbre recognition\ + \ techniques identify striking implement and location as symbolic data that can\ + \ be used to modify the parameters of the physical model.},\n address = {New York\ + \ City, NY, United States},\n author = {Tindale, Adam R.},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176537},\n issn = {2220-4806},\n\ - \ keywords = {Sodaconstrucor, Soda, Open Sound Control, Networked Performance,\ - \ Physical Simulation, Generative Composition, Java Application, Non-Linear Sequencing.},\n\ - \ pages = {222--224},\n title = {Sodaconductor},\n url = {http://www.nime.org/proceedings/2003/nime2003_222.pdf},\n\ - \ year = {2003}\n}\n" + \ doi = {10.5281/zenodo.1177259},\n issn = {2220-4806},\n keywords = {electronic\ + \ percussion,nime07,physical modeling,timbre recognition},\n pages = {392--393},\n\ + \ title = {A Hybrid Method for Extended Percussive Gesture},\n url = {http://www.nime.org/proceedings/2007/nime2007_392.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176537 + doi: 10.5281/zenodo.1177259 issn: 2220-4806 - keywords: 'Sodaconstrucor, Soda, Open Sound Control, Networked Performance, Physical - Simulation, Generative Composition, Java Application, Non-Linear Sequencing.' - pages: 222--224 - title: Sodaconductor - url: http://www.nime.org/proceedings/2003/nime2003_222.pdf - year: 2003 + keywords: 'electronic percussion,nime07,physical modeling,timbre recognition' + pages: 392--393 + title: A Hybrid Method for Extended Percussive Gesture + url: http://www.nime.org/proceedings/2007/nime2007_392.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Flety2003 - abstract: 'Ircam has been deeply involved into gesture analysis and sensingfor about - four years now, as several artistic projects demonstrate.Ircam has often been - solicited for sharing software and hardwaretools for gesture sensing, especially - devices for the acquisition andconversion of sensor data, such as the AtoMIC Pro - [1][2]. Thisdemo-paper describes the recent design of a new sensor to MIDIinterface - called EoBody1' - address: 'Montreal, Canada' - author: 'Fléty, Emmanuel and Sirguy, Marc' - bibtex: "@inproceedings{Flety2003,\n abstract = {Ircam has been deeply involved\ - \ into gesture analysis and sensingfor about four years now, as several artistic\ - \ projects demonstrate.Ircam has often been solicited for sharing software and\ - \ hardwaretools for gesture sensing, especially devices for the acquisition andconversion\ - \ of sensor data, such as the AtoMIC Pro [1][2]. Thisdemo-paper describes the\ - \ recent design of a new sensor to MIDIinterface called EoBody1},\n address =\ - \ {Montreal, Canada},\n author = {Fl\\'{e}ty, Emmanuel and Sirguy, Marc},\n booktitle\ + ID: Bottoni2007 + abstract: This paper reports our experiments on using a dual-coreDSP processor in + the construction of a user-programmablemusical instrument and controller called + the TouchBox. + address: 'New York City, NY, United States' + author: 'Bottoni, Paolo and Caporali, Riccardo and Capuano, Daniele and Faralli, + Stefano and Labella, Anna and Pierro, Mario' + bibtex: "@inproceedings{Bottoni2007,\n abstract = {This paper reports our experiments\ + \ on using a dual-coreDSP processor in the construction of a user-programmablemusical\ + \ instrument and controller called the TouchBox.},\n address = {New York City,\ + \ NY, United States},\n author = {Bottoni, Paolo and Caporali, Riccardo and Capuano,\ + \ Daniele and Faralli, Stefano and Labella, Anna and Pierro, Mario},\n booktitle\ \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176505},\n\ - \ issn = {2220-4806},\n keywords = {Gestural controller, Sensor, MIDI, Computer\ - \ Music. },\n pages = {225--226},\n title = {EoBody : a Follow-up to AtoMIC Pro's\ - \ Technology},\n url = {http://www.nime.org/proceedings/2003/nime2003_225.pdf},\n\ - \ year = {2003}\n}\n" + \ Expression},\n doi = {10.5281/zenodo.1177053},\n issn = {2220-4806},\n keywords\ + \ = {dual-core, DSP, touch-screen, synthesizer, controller },\n pages = {394--395},\n\ + \ title = {Use of a Dual-Core {DSP} in a Low-Cost, Touch-Screen Based Musical\ + \ Instrument},\n url = {http://www.nime.org/proceedings/2007/nime2007_394.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176505 + doi: 10.5281/zenodo.1177053 issn: 2220-4806 - keywords: 'Gestural controller, Sensor, MIDI, Computer Music. ' - pages: 225--226 - title: 'EoBody : a Follow-up to AtoMIC Pro''s Technology' - url: http://www.nime.org/proceedings/2003/nime2003_225.pdf - year: 2003 + keywords: 'dual-core, DSP, touch-screen, synthesizer, controller ' + pages: 394--395 + title: 'Use of a Dual-Core DSP in a Low-Cost, Touch-Screen Based Musical Instrument' + url: http://www.nime.org/proceedings/2007/nime2007_394.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Paradiso2003 - abstract: 'Several well-known alternative musical controllers were inspired by sensor - systems developed in other fields, often coming to their musical application via - surprising routes. Correspondingly, work on electronic music controllers has relevance - to other applications and broader research themes. In this article, I give a tour - though several controller systems that I have been involved with over the past - decade and outline their connections with other areas of inquiry.' - address: 'Montreal, Canada' - author: 'Paradiso, Joseph A.' - bibtex: "@inproceedings{Paradiso2003,\n abstract = {Several well-known alternative\ - \ musical controllers were inspired by sensor systems developed in other fields,\ - \ often coming to their musical application via surprising routes. Correspondingly,\ - \ work on electronic music controllers has relevance to other applications and\ - \ broader research themes. In this article, I give a tour though several controller\ - \ systems that I have been involved with over the past decade and outline their\ - \ connections with other areas of inquiry.},\n address = {Montreal, Canada},\n\ - \ author = {Paradiso, Joseph A.},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n\ - \ doi = {10.5281/zenodo.1176551},\n issn = {2220-4806},\n pages = {228--234},\n\ - \ title = {Dual-Use Technologies for Electronic Music Controllers: A Personal\ - \ Perspective},\n url = {http://www.nime.org/proceedings/2003/nime2003_228.pdf},\n\ - \ year = {2003}\n}\n" + ID: Kanebako2007 + abstract: 'This instrument is a part of the “Gangu Project” at IAMAS, which aim + to develop digital toys for improving children’s social behavior in the future. + It was further developed as part of the IAMAS-Interface Cultures exchange program. + “Mountain Guitar” is a new musical instrument that enables musical expression + through a custom-made sensor technology, which captures and transforms the height + at which the instrument is held to the musical outcome during the playing session. + One of the goals of “Mountain Guitar” is to let untrained users easily and intuitively + play guitar through their body movements. In addition to capturing the users’ + body movements, “Mountain Guitar” also simulates standard guitar playing techniques + such as vibrato, choking, and mute. “Mountain Guitar’s” goal is to provide playing + pleasure for guitar training sessions. This poster describes the “Mountain Guitar’s” + fundamental principles and its mode of operation.' + address: 'New York City, NY, United States' + author: 'Kanebako, Junichi and Gibson, James and Mignonneau, Laurent' + bibtex: "@inproceedings{Kanebako2007,\n abstract = {This instrument is a part of\ + \ the “Gangu Project” at IAMAS, which aim to develop digital toys for improving\ + \ children’s social behavior in the future. It was further developed as part of\ + \ the IAMAS-Interface Cultures exchange program. “Mountain Guitar” is a new musical\ + \ instrument that enables musical expression through a custom-made sensor technology,\ + \ which captures and transforms the height at which the instrument is held to\ + \ the musical outcome during the playing session. One of the goals of “Mountain\ + \ Guitar” is to let untrained users easily and intuitively play guitar through\ + \ their body movements. In addition to capturing the users’ body movements, “Mountain\ + \ Guitar” also simulates standard guitar playing techniques such as vibrato, choking,\ + \ and mute. “Mountain Guitar’s” goal is to provide playing pleasure for guitar\ + \ training sessions. This poster describes the “Mountain Guitar’s” fundamental\ + \ principles and its mode of operation.},\n address = {New York City, NY, United\ + \ States},\n author = {Kanebako, Junichi and Gibson, James and Mignonneau, Laurent},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1177133},\n issn = {2220-4806},\n\ + \ keywords = {Musical Expression, Guitar Instrument, MIDI to sensor mapping, Physical\ + \ Computing, Intuitive Interaction},\n pages = {396--398},\n title = {Mountain\ + \ Guitar : a Musical Instrument for Everyone},\n url = {http://www.nime.org/proceedings/2007/nime2007_396.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176551 + doi: 10.5281/zenodo.1177133 issn: 2220-4806 - pages: 228--234 - title: 'Dual-Use Technologies for Electronic Music Controllers: A Personal Perspective' - url: http://www.nime.org/proceedings/2003/nime2003_228.pdf - year: 2003 + keywords: 'Musical Expression, Guitar Instrument, MIDI to sensor mapping, Physical + Computing, Intuitive Interaction' + pages: 396--398 + title: 'Mountain Guitar : a Musical Instrument for Everyone' + url: http://www.nime.org/proceedings/2007/nime2007_396.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Cadoz2003 - address: 'Montreal, Canada' - author: 'Cadoz, Claude and Luciani, Annie and Florens, Jean-Loup and Castagn\''{e}, - Nicolas' - bibtex: "@inproceedings{Cadoz2003,\n address = {Montreal, Canada},\n author = {Cadoz,\ - \ Claude and Luciani, Annie and Florens, Jean-Loup and Castagn\\'{e}, Nicolas},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176494},\n\ - \ issn = {2220-4806},\n pages = {235--246},\n title = {{AC}ROE --- {ICA} Artistic\ - \ Creation and Computer Interactive Multisensory Simulation Force Feedback Gesture\ - \ Transducers},\n url = {http://www.nime.org/proceedings/2003/nime2003_235.pdf},\n\ - \ year = {2003}\n}\n" + ID: Sirguy2007 + abstract: 'Eowave and Ircam have been deeply involved into gestureanalysis and sensing + for a few years by now, as severalartistic projects demonstrate (1). In 2004, + Eowave has beenworking with Ircam on the development of the Eobodysensor system, + and since that, Eowave''s range of sensors hasbeen increased with new sensors + sometimes developed innarrow collaboration with artists for custom sensor systemsfor + installations and performances. This demo-paperdescribes the recent design of + a new USB/MIDI-to-sensorinterface called Eobody2.' + address: 'New York City, NY, United States' + author: 'Sirguy, Marc and Gallin, Emmanuelle' + bibtex: "@inproceedings{Sirguy2007,\n abstract = {Eowave and Ircam have been deeply\ + \ involved into gestureanalysis and sensing for a few years by now, as severalartistic\ + \ projects demonstrate (1). In 2004, Eowave has beenworking with Ircam on the\ + \ development of the Eobodysensor system, and since that, Eowave's range of sensors\ + \ hasbeen increased with new sensors sometimes developed innarrow collaboration\ + \ with artists for custom sensor systemsfor installations and performances. This\ + \ demo-paperdescribes the recent design of a new USB/MIDI-to-sensorinterface called\ + \ Eobody2.},\n address = {New York City, NY, United States},\n author = {Sirguy,\ + \ Marc and Gallin, Emmanuelle},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177247},\n\ + \ issn = {2220-4806},\n keywords = {Gestural controller, Sensor, MIDI, USB, Computer\ + \ music, Relays, Motors, Robots, Wireless. },\n pages = {401--402},\n title =\ + \ {Eobody2 : A Follow-up to Eobody's Technology},\n url = {http://www.nime.org/proceedings/2007/nime2007_401.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '22-24 May, 2003' - doi: 10.5281/zenodo.1176494 + doi: 10.5281/zenodo.1177247 issn: 2220-4806 - pages: 235--246 - title: '{AC}ROE --- {ICA} Artistic Creation and Computer Interactive Multisensory - Simulation Force Feedback Gesture Transducers' - url: http://www.nime.org/proceedings/2003/nime2003_235.pdf - year: 2003 + keywords: 'Gestural controller, Sensor, MIDI, USB, Computer music, Relays, Motors, + Robots, Wireless. ' + pages: 401--402 + title: 'Eobody2 : A Follow-up to Eobody''s Technology' + url: http://www.nime.org/proceedings/2007/nime2007_401.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: KimBoyle2008 - abstract: "The rapid development of network communicationtechnologies has allowed\ - \ composers to create new ways inwhich to directly engage participants in the\ - \ exploration of newmusical environments. A number of distinctive aestheticapproaches\ - \ to the musical application of networks will beoutlined in this paper each of\ - \ which is mediated andconditioned by the technical and aesthetic foundations\ - \ of thenetwork technologies themselves. Recent work in the field byartists such\ - \ as Atau Tanaka and Metraform will be examined, aswill some of the earlier pioneering\ - \ work in the genre by MaxNeuhaus. While recognizing the historical context ofcollaborative\ - \ work, the ,\n,\nauthor will examine how the strategiesemployed in the work of\ - \ these artists have helped redefine anew aesthetics of engagement in which play,\ - \ spatial andtemporal dislocation are amongst the genre's definingcharacteristics." - address: 'Genoa, Italy' - author: 'Kim-Boyle, David' - bibtex: "@inproceedings{KimBoyle2008,\n abstract = {The rapid development of network\ - \ communicationtechnologies has allowed composers to create new ways inwhich to\ - \ directly engage participants in the exploration of newmusical environments.\ - \ A number of distinctive aestheticapproaches to the musical application of networks\ - \ will beoutlined in this paper each of which is mediated andconditioned by the\ - \ technical and aesthetic foundations of thenetwork technologies themselves. Recent\ - \ work in the field byartists such as Atau Tanaka and Metraform will be examined,\ - \ aswill some of the earlier pioneering work in the genre by MaxNeuhaus. While\ - \ recognizing the historical context ofcollaborative work, the ,\n,\nauthor will\ - \ examine how the strategiesemployed in the work of these artists have helped\ - \ redefine anew aesthetics of engagement in which play, spatial andtemporal dislocation\ - \ are amongst the genre's definingcharacteristics.},\n address = {Genoa, Italy},\n\ - \ author = {Kim-Boyle, David},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179579},\n\ - \ issn = {2220-4806},\n keywords = {Networks, collaborative, open-form, play,\ - \ interface. },\n pages = {3--8},\n title = {Network Musics --- Play , Engagement\ - \ and the Democratization of Performance},\n url = {http://www.nime.org/proceedings/2008/nime2008_003.pdf},\n\ - \ year = {2008}\n}\n" + ID: Till2007 + abstract: 'The WISP is a novel wireless sensor that uses 3 axis magnetometers, accelerometers, + and rate gyroscopes to provide a real-time measurement of its own orientation + in space. Orientation data are transmitted via the Open Sound Control protocol + (OSC) to a synthesis engine for interactive live dance performance. ' + address: 'New York City, NY, United States' + author: 'Till, Bernie C. and Benning, Manjinder S. and Livingston, Nigel' + bibtex: "@inproceedings{Till2007,\n abstract = {The WISP is a novel wireless sensor\ + \ that uses 3 axis magnetometers, accelerometers, and rate gyroscopes to provide\ + \ a real-time measurement of its own orientation in space. Orientation data are\ + \ transmitted via the Open Sound Control protocol (OSC) to a synthesis engine\ + \ for interactive live dance performance. },\n address = {New York City, NY, United\ + \ States},\n author = {Till, Bernie C. and Benning, Manjinder S. and Livingston,\ + \ Nigel},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1177257},\n issn = {2220-4806},\n\ + \ keywords = {Music Controller, Human-Computer Interaction, Wireless Sensing,\ + \ Inertial Sensing. },\n pages = {403--404},\n title = {Wireless Inertial Sensor\ + \ Package (WISP)},\n url = {http://www.nime.org/proceedings/2007/nime2007_403.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179579 + doi: 10.5281/zenodo.1177257 issn: 2220-4806 - keywords: 'Networks, collaborative, open-form, play, interface. ' - pages: 3--8 - title: 'Network Musics --- Play , Engagement and the Democratization of Performance' - url: http://www.nime.org/proceedings/2008/nime2008_003.pdf - year: 2008 + keywords: 'Music Controller, Human-Computer Interaction, Wireless Sensing, Inertial + Sensing. ' + pages: 403--404 + title: Wireless Inertial Sensor Package (WISP) + url: http://www.nime.org/proceedings/2007/nime2007_403.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Barbosa2008 - abstract: 'This paper presents the latest developments of the Public Sound Objects - (PSOs) system, an experimental framework to implement and test new concepts for - Networked Music. The project of a Public interactive installation using the PSOs - system was commissioned in 2007 by Casa da Musica, the main concert hall space - in Porto. It resulted in a distributed musical structure with up to ten interactive - performance terminals distributed along the Casa da Musica''s hallways, collectively - controlling a shared acoustic piano. The installation allows the visitors to collaborate - remotely with each other, within the building, using a software interface custom - developed to facilitate collaborative music practices and with no requirements - in terms previous knowledge of musical performance. ' - address: 'Genoa, Italy' - author: 'Barbosa, Àlvaro' - bibtex: "@inproceedings{Barbosa2008,\n abstract = {This paper presents the latest\ - \ developments of the Public Sound Objects (PSOs) system, an experimental framework\ - \ to implement and test new concepts for Networked Music. The project of a Public\ - \ interactive installation using the PSOs system was commissioned in 2007 by Casa\ - \ da Musica, the main concert hall space in Porto. It resulted in a distributed\ - \ musical structure with up to ten interactive performance terminals distributed\ - \ along the Casa da Musica's hallways, collectively controlling a shared acoustic\ - \ piano. The installation allows the visitors to collaborate remotely with each\ - \ other, within the building, using a software interface custom developed to facilitate\ - \ collaborative music practices and with no requirements in terms previous knowledge\ - \ of musical performance. },\n address = {Genoa, Italy},\n author = {Barbosa,\ - \ \\`{A}lvaro},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179487},\n\ - \ issn = {2220-4806},\n keywords = {algorithmic composition,behavioral driven,electronic\ - \ music instruments,interfaces,network music instruments,nime08,performance,public\ - \ music,real-time collaborative,sound},\n pages = {9--12},\n title = {Ten-Hand\ - \ Piano : A Networked Music Installation},\n url = {http://www.nime.org/proceedings/2008/nime2008_009.pdf},\n\ - \ year = {2008}\n}\n" + ID: Loewenstein2007 + abstract: 'The ”Acoustic Map“ is an interactive soundinstallation developed for + the “Hallakustika” Festival in Hall (Tyrolia, Austria) using the motion tracking + software Eyes-Web and Max-MSP. For the NIME 07 a simulation of the motion tracking + part of the original work will be shown. Its aim was to create an interactive + city portrait of the city of Hall and to offer the possibility to enhance six + sites of the city on an acoustical basis with what I called an “acoustic zoom”.' + address: 'New York City, NY, United States' + author: 'Loewenstein, Stefan' + bibtex: "@inproceedings{Loewenstein2007,\n abstract = {The ”Acoustic Map“ is an\ + \ interactive soundinstallation developed for the “Hallakustika” Festival in Hall\ + \ (Tyrolia, Austria) using the motion tracking software Eyes-Web and Max-MSP.\ + \ For the NIME 07 a simulation of the motion tracking part of the original work\ + \ will be shown. Its aim was to create an interactive city portrait of the city\ + \ of Hall and to offer the possibility to enhance six sites of the city on an\ + \ acoustical basis with what I called an “acoustic zoom”.},\n address = {New York\ + \ City, NY, United States},\n author = {Loewenstein, Stefan},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177167},\n issn = {2220-4806},\n keywords = {nime07},\n\ + \ pages = {405--406},\n title = {\"Acoustic Map\" -- An Interactive Cityportrait},\n\ + \ url = {http://www.nime.org/proceedings/2007/nime2007_405.pdf},\n year = {2007}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179487 + doi: 10.5281/zenodo.1177167 issn: 2220-4806 - keywords: 'algorithmic composition,behavioral driven,electronic music instruments,interfaces,network - music instruments,nime08,performance,public music,real-time collaborative,sound' - pages: 9--12 - title: 'Ten-Hand Piano : A Networked Music Installation' - url: http://www.nime.org/proceedings/2008/nime2008_009.pdf - year: 2008 + keywords: nime07 + pages: 405--406 + title: '"Acoustic Map" -- An Interactive Cityportrait' + url: http://www.nime.org/proceedings/2007/nime2007_405.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Wozniewski2008 - abstract: 'New application spaces and artistic forms can emerge whenusers are freed - from constraints. In the general case ofhuman-computer interfaces, users are often - confined to afixed location, severely limiting mobility. To overcome thisconstraint - in the context of musical interaction, we presenta system to manage large-scale - collaborative mobile audioenvironments, driven by user movement. Multiple participants - navigate through physical space while sharing overlaid virtual elements. Each - user is equipped with a mobilecomputing device, GPS receiver, orientation sensor, - microphone, headphones, or various combinations of these technologies. We investigate - methods of location tracking, wireless audio streaming, and state management between - mobiledevices and centralized servers. The result is a system thatallows mobile - users, with subjective 3-D audio rendering,to share virtual scenes. The audio - elements of these scenescan be organized into large-scale spatial audio interfaces,thus - allowing for immersive mobile performance, locativeaudio installations, and many - new forms of collaborativesonic activity.' - address: 'Genoa, Italy' - author: 'Wozniewski, Mike and Bouillot, Nicolas and Settel, Zack and Cooperstock, - Jeremy R.' - bibtex: "@inproceedings{Wozniewski2008,\n abstract = {New application spaces and\ - \ artistic forms can emerge whenusers are freed from constraints. In the general\ - \ case ofhuman-computer interfaces, users are often confined to afixed location,\ - \ severely limiting mobility. To overcome thisconstraint in the context of musical\ - \ interaction, we presenta system to manage large-scale collaborative mobile audioenvironments,\ - \ driven by user movement. Multiple participants navigate through physical space\ - \ while sharing overlaid virtual elements. Each user is equipped with a mobilecomputing\ - \ device, GPS receiver, orientation sensor, microphone, headphones, or various\ - \ combinations of these technologies. We investigate methods of location tracking,\ - \ wireless audio streaming, and state management between mobiledevices and centralized\ - \ servers. The result is a system thatallows mobile users, with subjective 3-D\ - \ audio rendering,to share virtual scenes. The audio elements of these scenescan\ - \ be organized into large-scale spatial audio interfaces,thus allowing for immersive\ - \ mobile performance, locativeaudio installations, and many new forms of collaborativesonic\ - \ activity.},\n address = {Genoa, Italy},\n author = {Wozniewski, Mike and Bouillot,\ - \ Nicolas and Settel, Zack and Cooperstock, Jeremy R.},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1179651},\n issn = {2220-4806},\n keywords = {sonic navigation,\ - \ mobile music, spatial interaction, wireless audio streaming, locative media,\ - \ collaborative interfaces },\n pages = {13--18},\n title = {Large-Scale Mobile\ - \ Audio Environments for Collaborative Musical Interaction},\n url = {http://www.nime.org/proceedings/2008/nime2008_013.pdf},\n\ - \ year = {2008}\n}\n" + ID: Hashida2007a + abstract: 'This paper introduces a system for improvisational musical expression + that enables all users, novice and experienced, to perform intuitively and expressively. + Users can generate musically consistent results through intuitive action, inputting + rhythm in a decent tempo. We demonstrate novel mapping ways that reflect user’s + input information more interactively and effectively in generating the music. + We also present various input devices that allow users more creative liberty.' + address: 'New York City, NY, United States' + author: 'Hashida, Tomoko and Naemura, Takeshi and Sato, Takao' + bibtex: "@inproceedings{Hashida2007a,\n abstract = {This paper introduces a system\ + \ for improvisational musical expression that enables all users, novice and experienced,\ + \ to perform intuitively and expressively. Users can generate musically consistent\ + \ results through intuitive action, inputting rhythm in a decent tempo. We demonstrate\ + \ novel mapping ways that reflect user’s input information more interactively\ + \ and effectively in generating the music. We also present various input devices\ + \ that allow users more creative liberty.},\n address = {New York City, NY, United\ + \ States},\n author = {Hashida, Tomoko and Naemura, Takeshi and Sato, Takao},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1177113},\n issn = {2220-4806},\n\ + \ keywords = {Improvisation, interactive music, a sense of tempo },\n pages =\ + \ {407--408},\n title = {A System for Improvisational Musical Expression Based\ + \ on Player's Sense of Tempo},\n url = {http://www.nime.org/proceedings/2007/nime2007_407.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179651 + doi: 10.5281/zenodo.1177113 issn: 2220-4806 - keywords: 'sonic navigation, mobile music, spatial interaction, wireless audio streaming, - locative media, collaborative interfaces ' - pages: 13--18 - title: Large-Scale Mobile Audio Environments for Collaborative Musical Interaction - url: http://www.nime.org/proceedings/2008/nime2008_013.pdf - year: 2008 + keywords: 'Improvisation, interactive music, a sense of tempo ' + pages: 407--408 + title: A System for Improvisational Musical Expression Based on Player's Sense of + Tempo + url: http://www.nime.org/proceedings/2007/nime2007_407.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Fraietta2008 - abstract: 'Open Sound Control (OSC) is being used successfully as amessaging protocol - among many computers, gesturalcontrollers and multimedia systems. Although OSC - hasaddressed some of the shortcomings of MIDI, OSC cannotdeliver on its promises - as a real-time communication protocolfor constrained embedded systems. This paper - will examinesome of the advantages but also dispel some of the mythsconcerning - OSC. The paper will also describe how some of thebest features of OSC can be used - to develop a lightweightprotocol that is microcontroller friendly.' - address: 'Genoa, Italy' - author: 'Fraietta, Angelo' - bibtex: "@inproceedings{Fraietta2008,\n abstract = {Open Sound Control (OSC) is\ - \ being used successfully as amessaging protocol among many computers, gesturalcontrollers\ - \ and multimedia systems. Although OSC hasaddressed some of the shortcomings of\ - \ MIDI, OSC cannotdeliver on its promises as a real-time communication protocolfor\ - \ constrained embedded systems. This paper will examinesome of the advantages\ - \ but also dispel some of the mythsconcerning OSC. The paper will also describe\ - \ how some of thebest features of OSC can be used to develop a lightweightprotocol\ - \ that is microcontroller friendly.},\n address = {Genoa, Italy},\n author = {Fraietta,\ - \ Angelo},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1179537},\n issn = {2220-4806},\n\ - \ keywords = {a,data transmission protocols,gestural controllers,has been implemented\ - \ as,midi,nime08,open sound control,osc},\n pages = {19--23},\n title = {Open\ - \ Sound Control : Constraints and Limitations},\n url = {http://www.nime.org/proceedings/2008/nime2008_019.pdf},\n\ - \ year = {2008}\n}\n" + ID: Nakamoto2007 + abstract: 'We proposed a circle canon system for enjoying a musical ensemble supported + by a computer and network. Using the song Frog round, which is a popular circle + canon chorus originated from a German folk song, we produced a singing ensemble + opportunity where everyone plays the music together at the same time. The aim + of our system is that anyone can experience the joyful feeling of actually playing + the music as well as sharing it with others. ' + address: 'New York City, NY, United States' + author: 'Nakamoto, Misako and Kuhara, Yasuo' + bibtex: "@inproceedings{Nakamoto2007,\n abstract = {We proposed a circle canon system\ + \ for enjoying a musical ensemble supported by a computer and network. Using the\ + \ song {Frog round}, which is a popular circle canon chorus originated from a\ + \ German folk song, we produced a singing ensemble opportunity where everyone\ + \ plays the music together at the same time. The aim of our system is that anyone\ + \ can experience the joyful feeling of actually playing the music as well as sharing\ + \ it with others. },\n address = {New York City, NY, United States},\n author\ + \ = {Nakamoto, Misako and Kuhara, Yasuo},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177207},\n\ + \ issn = {2220-4806},\n keywords = {Circle canon, Chorus, Song, Frog round, Ensemble,\ + \ Internet, Max/MSP, MySQL database. },\n pages = {409--410},\n title = {Circle\ + \ Canon Chorus System Used To Enjoy A Musical Ensemble Singing \"Frog Round\"\ + },\n url = {http://www.nime.org/proceedings/2007/nime2007_409.pdf},\n year = {2007}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179537 + doi: 10.5281/zenodo.1177207 issn: 2220-4806 - keywords: 'a,data transmission protocols,gestural controllers,has been implemented - as,midi,nime08,open sound control,osc' - pages: 19--23 - title: 'Open Sound Control : Constraints and Limitations' - url: http://www.nime.org/proceedings/2008/nime2008_019.pdf - year: 2008 + keywords: 'Circle canon, Chorus, Song, Frog round, Ensemble, Internet, Max/MSP, + MySQL database. ' + pages: 409--410 + title: Circle Canon Chorus System Used To Enjoy A Musical Ensemble Singing "Frog + Round" + url: http://www.nime.org/proceedings/2007/nime2007_409.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Bozzolan2008 - abstract: 'The continuous evolutions in the human-computer interfaces field have - allowed the development of control devicesthat let have a more and more intuitive, - gestural and noninvasive interaction.Such devices find a natural employment also - in the musicapplied informatics and in particular in the electronic music,always - searching for new expressive means.This paper presents a prototype of a system - for the realtime control of sound spatialization in a multichannel configuration - with a multimodal interaction interface. The spatializer, called SMuSIM, employs - interaction devices thatrange from the simple and well-established mouse and keyboard - to a classical gaming used joystick (gamepad), finallyexploiting more advanced - and innovative typologies basedon image analysis (as a webcam).' - address: 'Genoa, Italy' - author: 'Bozzolan, Matteo and Cospito, Giovanni' - bibtex: "@inproceedings{Bozzolan2008,\n abstract = {The continuous evolutions in\ - \ the human-computer interfaces field have allowed the development of control\ - \ devicesthat let have a more and more intuitive, gestural and noninvasive interaction.Such\ - \ devices find a natural employment also in the musicapplied informatics and in\ - \ particular in the electronic music,always searching for new expressive means.This\ - \ paper presents a prototype of a system for the realtime control of sound spatialization\ - \ in a multichannel configuration with a multimodal interaction interface. The\ - \ spatializer, called SMuSIM, employs interaction devices thatrange from the simple\ - \ and well-established mouse and keyboard to a classical gaming used joystick\ - \ (gamepad), finallyexploiting more advanced and innovative typologies basedon\ - \ image analysis (as a webcam).},\n address = {Genoa, Italy},\n author = {Bozzolan,\ - \ Matteo and Cospito, Giovanni},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179501},\n\ - \ issn = {2220-4806},\n keywords = {Sound spatialization, multimodal interaction,\ - \ interaction interfaces, EyesWeb, Pure data. },\n pages = {24--27},\n title =\ - \ {SMuSIM : a Prototype of Multichannel Spatialization System with Multimodal\ - \ Interaction Interface},\n url = {http://www.nime.org/proceedings/2008/nime2008_024.pdf},\n\ - \ year = {2008}\n}\n" + ID: Estrada2007 + abstract: 'Loop-R is a real-time video performance tool, based in the exploration + of low-tech, used technology and human engineering research. With this tool its + author is giving a shout to industry, using existing and mistreated technology + in innovative ways, combining concepts and interfaces: blending segregated interfaces + (GUI and Physical) into one. After graspable interfaces and the “end” of WIMP + interfaces, hardware and software blend themselves in a new genre providing free + control of video-loops in an expressive hybrid tool.' + address: 'New York City, NY, United States' + author: 'Pereira, Rui' + bibtex: "@inproceedings{Estrada2007,\n abstract = {Loop-R is a real-time video performance\ + \ tool, based in the exploration of low-tech, used technology and human engineering\ + \ research. With this tool its author is giving a shout to industry, using existing\ + \ and mistreated technology in innovative ways, combining concepts and interfaces:\ + \ blending segregated interfaces (GUI and Physical) into one. After graspable\ + \ interfaces and the “end” of WIMP interfaces, hardware and software blend themselves\ + \ in a new genre providing free control of video-loops in an expressive hybrid\ + \ tool.},\n address = {New York City, NY, United States},\n author = {Pereira,\ + \ Rui},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1177219},\n issn = {2220-4806},\n\ + \ keywords = {Real-time; video; interface; live-visuals; loop; },\n pages = {411--414},\n\ + \ title = {Loop-R : Real-Time Video Interface},\n url = {http://www.nime.org/proceedings/2007/nime2007_411.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179501 + doi: 10.5281/zenodo.1177219 issn: 2220-4806 - keywords: 'Sound spatialization, multimodal interaction, interaction interfaces, - EyesWeb, Pure data. ' - pages: 24--27 - title: 'SMuSIM : a Prototype of Multichannel Spatialization System with Multimodal - Interaction Interface' - url: http://www.nime.org/proceedings/2008/nime2008_024.pdf - year: 2008 + keywords: 'Real-time; video; interface; live-visuals; loop; ' + pages: 411--414 + title: 'Loop-R : Real-Time Video Interface' + url: http://www.nime.org/proceedings/2007/nime2007_411.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Nash2008 - abstract: 'Over the last century, composers have made increasingly ambitious experiments - with musical time, but have been impeded in expressing more temporally-complex - musical processes by the limitations of both music notations and human performers. - In this paper, we describe a computer-based notation and gestural control system - for independently manipulating the tempi of musical parts within a piece, at performance - time. We describe how the problem was approached, drawing upon feedback and suggestions - from consultations across multiple disciplines, seeking analogous problems in - other fields. Throughout, our approach is guided and, ultimately, assessed by - an established professional composer, who was able to interact with a working - prototype of the system. ' - address: 'Genoa, Italy' - author: 'Nash, Chris and Blackwell, Alan' - bibtex: "@inproceedings{Nash2008,\n abstract = {Over the last century, composers\ - \ have made increasingly ambitious experiments with musical time, but have been\ - \ impeded in expressing more temporally-complex musical processes by the limitations\ - \ of both music notations and human performers. In this paper, we describe a computer-based\ - \ notation and gestural control system for independently manipulating the tempi\ - \ of musical parts within a piece, at performance time. We describe how the problem\ - \ was approached, drawing upon feedback and suggestions from consultations across\ - \ multiple disciplines, seeking analogous problems in other fields. Throughout,\ - \ our approach is guided and, ultimately, assessed by an established professional\ - \ composer, who was able to interact with a working prototype of the system. },\n\ - \ address = {Genoa, Italy},\n author = {Nash, Chris and Blackwell, Alan},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1179603},\n issn = {2220-4806},\n keywords\ - \ = {composition,gesture,nime08,performance,polytempi,realtime,tempo},\n pages\ - \ = {28--33},\n title = {Realtime Representation and Gestural Control of Musical\ - \ Polytempi},\n url = {http://www.nime.org/proceedings/2008/nime2008_028.pdf},\n\ - \ year = {2008}\n}\n" + ID: Rigler2007 + abstract: 'The Music Cre8tor is an interactive music composition system controlled + by motion sensors specifically designed for children with disabilities although + not exclusively for this population. The player(s) of the Music Cre8tor can either + hold or attach accelerometer sensors to trigger a variety of computer-generated + sounds, MIDI instruments and/or pre-recorded sound files. The sensitivity of the + sensors can be modified for each unique individual so that even the smallest movement + can control a sound. The flexibility of the system is such that either four people + can play simultaneously and/or one or more players can use up to four sensors. + The original goal of this program was to empower students with disabilities to + create music and encourage them to perform with other musicians, however this + same goal has expanded to include other populations.' + address: 'New York City, NY, United States' + author: 'Rigler, Jane and Seldess, Zachary' + bibtex: "@inproceedings{Rigler2007,\n abstract = {The Music Cre8tor is an interactive\ + \ music composition system controlled by motion sensors specifically designed\ + \ for children with disabilities although not exclusively for this population.\ + \ The player(s) of the Music Cre8tor can either hold or attach accelerometer sensors\ + \ to trigger a variety of computer-generated sounds, MIDI instruments and/or pre-recorded\ + \ sound files. The sensitivity of the sensors can be modified for each unique\ + \ individual so that even the smallest movement can control a sound. The flexibility\ + \ of the system is such that either four people can play simultaneously and/or\ + \ one or more players can use up to four sensors. The original goal of this program\ + \ was to empower students with disabilities to create music and encourage them\ + \ to perform with other musicians, however this same goal has expanded to include\ + \ other populations.},\n address = {New York City, NY, United States},\n author\ + \ = {Rigler, Jane and Seldess, Zachary},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177227},\n\ + \ issn = {2220-4806},\n keywords = {Music Education, disabilities, special education,\ + \ motion sensors, music composition, interactive performance. },\n pages = {415--416},\n\ + \ title = {The Music Cre8tor : an Interactive System for Musical Exploration and\ + \ Education},\n url = {http://www.nime.org/proceedings/2007/nime2007_415.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179603 + doi: 10.5281/zenodo.1177227 issn: 2220-4806 - keywords: composition,gesture,nime08,performance,polytempi,realtime,tempo - pages: 28--33 - title: Realtime Representation and Gestural Control of Musical Polytempi - url: http://www.nime.org/proceedings/2008/nime2008_028.pdf - year: 2008 + keywords: 'Music Education, disabilities, special education, motion sensors, music + composition, interactive performance. ' + pages: 415--416 + title: 'The Music Cre8tor : an Interactive System for Musical Exploration and Education' + url: http://www.nime.org/proceedings/2007/nime2007_415.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Laurson2008 - address: 'Genoa, Italy' - author: 'Laurson, Mikael and Kuuskankare, Mika' - bibtex: "@inproceedings{Laurson2008,\n address = {Genoa, Italy},\n author = {Laurson,\ - \ Mikael and Kuuskankare, Mika},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179589},\n\ - \ issn = {2220-4806},\n keywords = {synthesis control, expressive timing, playing\ - \ styles },\n pages = {34--37},\n title = {Towards Idiomatic and Flexible Score-based\ - \ Gestural Control with a Scripting Language},\n url = {http://www.nime.org/proceedings/2008/nime2008_034.pdf},\n\ - \ year = {2008}\n}\n" + ID: Guedes2007 + abstract: 'In this demonstration, I exemplify how a musical channel ofcommunication + can be established in computer-mediatedinteraction between musicians and dancers + in real time. Thischannel of communication uses a software libraryimplemented + as a library of external objects for Max/MSP[1],that processes data from an object + or library that performsframe-differencing analysis of a video stream in real + time inthis programming environment.' + address: 'New York City, NY, United States' + author: 'Guedes, Carlos' + bibtex: "@inproceedings{Guedes2007,\n abstract = {In this demonstration, I exemplify\ + \ how a musical channel ofcommunication can be established in computer-mediatedinteraction\ + \ between musicians and dancers in real time. Thischannel of communication uses\ + \ a software libraryimplemented as a library of external objects for Max/MSP[1],that\ + \ processes data from an object or library that performsframe-differencing analysis\ + \ of a video stream in real time inthis programming environment.},\n address =\ + \ {New York City, NY, United States},\n author = {Guedes, Carlos},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1177105},\n issn = {2220-4806},\n keywords\ + \ = {dance,in dance,interaction between music and,interactive,interactive dance,interactive\ + \ performance,musical rhythm and rhythm,nime07,performance systems},\n pages =\ + \ {417--419},\n title = {Establishing a Musical Channel of Communication between\ + \ Dancers and Musicians in Computer-Mediated Collaborations in Dance Performance},\n\ + \ url = {http://www.nime.org/proceedings/2007/nime2007_417.pdf},\n year = {2007}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179589 + doi: 10.5281/zenodo.1177105 issn: 2220-4806 - keywords: 'synthesis control, expressive timing, playing styles ' - pages: 34--37 - title: Towards Idiomatic and Flexible Score-based Gestural Control with a Scripting - Language - url: http://www.nime.org/proceedings/2008/nime2008_034.pdf - year: 2008 + keywords: 'dance,in dance,interaction between music and,interactive,interactive + dance,interactive performance,musical rhythm and rhythm,nime07,performance systems' + pages: 417--419 + title: Establishing a Musical Channel of Communication between Dancers and Musicians + in Computer-Mediated Collaborations in Dance Performance + url: http://www.nime.org/proceedings/2007/nime2007_417.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Bouenard2008 - abstract: 'A new interface for visualizing and analyzing percussion gestures is - presented, proposing enhancements of existing motion capture analysis tools. This - is achieved by offering apercussion gesture analysis protocol using motion capture.A - virtual character dynamic model is then designed in order to take advantage of - gesture characteristics, yielding toimprove gesture analysis with visualization - and interactioncues of different types.' - address: 'Genoa, Italy' - author: 'Bouënard, Alexandre and Gibet, Sylvie and Wanderley, Marcelo M.' - bibtex: "@inproceedings{Bouenard2008,\n abstract = {A new interface for visualizing\ - \ and analyzing percussion gestures is presented, proposing enhancements of existing\ - \ motion capture analysis tools. This is achieved by offering apercussion gesture\ - \ analysis protocol using motion capture.A virtual character dynamic model is\ - \ then designed in order to take advantage of gesture characteristics, yielding\ - \ toimprove gesture analysis with visualization and interactioncues of different\ - \ types.},\n address = {Genoa, Italy},\n author = {Bou\\''{e}nard, Alexandre and\ - \ Gibet, Sylvie and Wanderley, Marcelo M.},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1179497},\n issn = {2220-4806},\n keywords = {Gesture and sound,\ - \ interface, percussion gesture, virtual character, interaction. },\n pages =\ - \ {38--43},\n title = {Enhancing the Visualization of Percussion Gestures by Virtual\ - \ Character Animation},\n url = {http://www.nime.org/proceedings/2008/nime2008_038.pdf},\n\ - \ year = {2008}\n}\n" + ID: Bull2007 + address: 'New York City, NY, United States' + author: 'Bull, Steve and Gresham-Lancaster, Scot and Mintchev, Kalin and Svoboda, + Terese' + bibtex: "@inproceedings{Bull2007,\n address = {New York City, NY, United States},\n\ + \ author = {Bull, Steve and Gresham-Lancaster, Scot and Mintchev, Kalin and Svoboda,\ + \ Terese},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1177057},\n issn = {2220-4806},\n\ + \ keywords = {nime07},\n pages = {420--420},\n title = {Cellphonia : WET},\n url\ + \ = {http://www.nime.org/proceedings/2007/nime2007_420.pdf},\n year = {2007}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179497 + doi: 10.5281/zenodo.1177057 issn: 2220-4806 - keywords: 'Gesture and sound, interface, percussion gesture, virtual character, - interaction. ' - pages: 38--43 - title: Enhancing the Visualization of Percussion Gestures by Virtual Character Animation - url: http://www.nime.org/proceedings/2008/nime2008_038.pdf - year: 2008 + keywords: nime07 + pages: 420--420 + title: 'Cellphonia : WET' + url: http://www.nime.org/proceedings/2007/nime2007_420.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Young2008 - address: 'Genoa, Italy' - author: 'Young, Diana' - bibtex: "@inproceedings{Young2008,\n address = {Genoa, Italy},\n author = {Young,\ - \ Diana},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1177457},\n issn = {2220-4806},\n\ - \ keywords = {bowing, gesture, playing technique, principal component anal- ysis,\ - \ classification },\n pages = {44--48},\n title = {Classification of Common Violin\ - \ Bowing Techniques Using Gesture Data from a Playable Measurement System},\n\ - \ url = {http://www.nime.org/proceedings/2008/nime2008_044.pdf},\n year = {2008}\n\ + ID: Court2007 + address: 'New York City, NY, United States' + author: Collective Dearraindrop + bibtex: "@inproceedings{Court2007,\n address = {New York City, NY, United States},\n\ + \ author = {Collective Dearraindrop},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177083},\n\ + \ issn = {2220-4806},\n keywords = {nime07},\n pages = {421--421},\n title = {Miller},\n\ + \ url = {http://www.nime.org/proceedings/2007/nime2007_421.pdf},\n year = {2007}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177457 + doi: 10.5281/zenodo.1177083 issn: 2220-4806 - keywords: 'bowing, gesture, playing technique, principal component anal- ysis, classification ' - pages: 44--48 - title: Classification of Common Violin Bowing Techniques Using Gesture Data from - a Playable Measurement System - url: http://www.nime.org/proceedings/2008/nime2008_044.pdf - year: 2008 + keywords: nime07 + pages: 421--421 + title: Miller + url: http://www.nime.org/proceedings/2007/nime2007_421.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Pakarinen2008 - abstract: 'This article discusses a virtual slide guitar instrument, recently introduced - in [7]. The instrument consists of a novelphysics-based synthesis model and a - gestural user interface.The synthesis engine uses energy-compensated time-varyingdigital - waveguides. The string algorithm also contains aparametric model for synthesizing - the tube-string contactsounds. The real-time virtual slide guitar user interface - employs optical gesture recognition, so that the user can playthis virtual instrument - simply by making slide guitar playing gestures in front of a camera.' - address: 'Genoa, Italy' - author: 'Pakarinen, Jyri and Välimäki, Vesa and Puputti, Tapio' - bibtex: "@inproceedings{Pakarinen2008,\n abstract = {This article discusses a virtual\ - \ slide guitar instrument, recently introduced in [7]. The instrument consists\ - \ of a novelphysics-based synthesis model and a gestural user interface.The synthesis\ - \ engine uses energy-compensated time-varyingdigital waveguides. The string algorithm\ - \ also contains aparametric model for synthesizing the tube-string contactsounds.\ - \ The real-time virtual slide guitar user interface employs optical gesture recognition,\ - \ so that the user can playthis virtual instrument simply by making slide guitar\ - \ playing gestures in front of a camera.},\n address = {Genoa, Italy},\n author\ - \ = {Pakarinen, Jyri and V\\''{a}lim\\''{a}ki, Vesa and Puputti, Tapio},\n booktitle\ + ID: Hauert2007 + address: 'New York City, NY, United States' + author: 'Hauert, Sibylle and Reichmuth, Daniel and B\"{o}hm, Volker' + bibtex: "@inproceedings{Hauert2007,\n address = {New York City, NY, United States},\n\ + \ author = {Hauert, Sibylle and Reichmuth, Daniel and B\\\"{o}hm, Volker},\n booktitle\ \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1179607},\n issn = {2220-4806},\n keywords\ - \ = {Sound synthesis, slide guitar, gesture control, physical mod- eling },\n\ - \ pages = {49--52},\n title = {Slide Guitar Synthesizer with Gestural Control},\n\ - \ url = {http://www.nime.org/proceedings/2008/nime2008_049.pdf},\n year = {2008}\n\ - }\n" + \ Expression},\n doi = {10.5281/zenodo.1177115},\n issn = {2220-4806},\n keywords\ + \ = {nime07},\n pages = {422--422},\n title = {Instant City, a Music Building\ + \ Game Table},\n url = {http://www.nime.org/proceedings/2007/nime2007_422.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179607 + doi: 10.5281/zenodo.1177115 issn: 2220-4806 - keywords: 'Sound synthesis, slide guitar, gesture control, physical mod- eling ' - pages: 49--52 - title: Slide Guitar Synthesizer with Gestural Control - url: http://www.nime.org/proceedings/2008/nime2008_049.pdf - year: 2008 + keywords: nime07 + pages: 422--422 + title: 'Instant City, a Music Building Game Table' + url: http://www.nime.org/proceedings/2007/nime2007_422.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Lahdeoja2008 - address: 'Genoa, Italy' - author: 'L\''''{a}hdeoja, Otso' - bibtex: "@inproceedings{Lahdeoja2008,\n address = {Genoa, Italy},\n author = {L\\\ - ''{a}hdeoja, Otso},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179585},\n\ - \ issn = {2220-4806},\n keywords = {Augmented instrument, electric guitar, gesture-sound\ - \ relationship },\n pages = {53--56},\n title = {An Approach to Instrument Augmentation\ - \ : the Electric Guitar},\n url = {http://www.nime.org/proceedings/2008/nime2008_053.pdf},\n\ - \ year = {2008}\n}\n" + ID: Milmoe2007 + address: 'New York City, NY, United States' + author: 'Milmoe, Andrew' + bibtex: "@inproceedings{Milmoe2007,\n address = {New York City, NY, United States},\n\ + \ author = {Milmoe, Andrew},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177197},\n\ + \ issn = {2220-4806},\n keywords = {nime07},\n pages = {423--423},\n title = {NIME\ + \ Performance \\& Installation : Sonic Pong V3.0},\n url = {http://www.nime.org/proceedings/2007/nime2007_423.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179585 + doi: 10.5281/zenodo.1177197 issn: 2220-4806 - keywords: 'Augmented instrument, electric guitar, gesture-sound relationship ' - pages: 53--56 - title: 'An Approach to Instrument Augmentation : the Electric Guitar' - url: http://www.nime.org/proceedings/2008/nime2008_053.pdf - year: 2008 + keywords: nime07 + pages: 423--423 + title: 'NIME Performance \& Installation : Sonic Pong V3.0' + url: http://www.nime.org/proceedings/2007/nime2007_423.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Raisanen2008 - abstract: 'This paper describes the Sormina, a new virtual and tangibleinstrument, - which has its origins in both virtual technology andthe heritage of traditional - instrument design. The motivationbehind the project is presented, as well as hardware - andsoftware design. Insights gained through collaboration withacoustic musicians - are presented, as well as comparison tohistorical instrument design.' - address: 'Genoa, Italy' - author: 'Räisänen, Juhani' - bibtex: "@inproceedings{Raisanen2008,\n abstract = {This paper describes the Sormina,\ - \ a new virtual and tangibleinstrument, which has its origins in both virtual\ - \ technology andthe heritage of traditional instrument design. The motivationbehind\ - \ the project is presented, as well as hardware andsoftware design. Insights gained\ - \ through collaboration withacoustic musicians are presented, as well as comparison\ - \ tohistorical instrument design.},\n address = {Genoa, Italy},\n author = {R\\\ - ''{a}is\\''{a}nen, Juhani},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179617},\n\ - \ issn = {2220-4806},\n keywords = {Gestural controller, digital musical instrument,\ - \ usability, music history, design. },\n pages = {57--60},\n title = {Sormina\ - \ -- a New Virtual and Tangible Instrument},\n url = {http://www.nime.org/proceedings/2008/nime2008_057.pdf},\n\ - \ year = {2008}\n}\n" + ID: Biggs2007 + address: 'New York City, NY, United States' + author: 'Biggs, Betsey' + bibtex: "@inproceedings{Biggs2007,\n address = {New York City, NY, United States},\n\ + \ author = {Biggs, Betsey},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177047},\n\ + \ issn = {2220-4806},\n keywords = {nime07},\n pages = {424--424},\n title = {The\ + \ Tipping Point},\n url = {http://www.nime.org/proceedings/2007/nime2007_424.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179617 + doi: 10.5281/zenodo.1177047 issn: 2220-4806 - keywords: 'Gestural controller, digital musical instrument, usability, music history, - design. ' - pages: 57--60 - title: Sormina -- a New Virtual and Tangible Instrument - url: http://www.nime.org/proceedings/2008/nime2008_057.pdf - year: 2008 + keywords: nime07 + pages: 424--424 + title: The Tipping Point + url: http://www.nime.org/proceedings/2007/nime2007_424.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Berdahl2008a - abstract: 'The music community has long had a strong interest in haptic technology. - Recently, more effort has been put into making it more and more accessible to - instrument designers.This paper covers some of these technologies with the aimof - helping instrument designers add haptic feedback to theirinstruments. We begin - by giving a brief overview of practicalactuators. Next, we compare and contrast - using embeddedmicrocontrollers versus general purpose computers as controllers. - Along the way, we mention some common softwareenvironments for implementing control - algorithms. Then wediscuss the fundamental haptic control algorithms as well assome - more complex ones. Finally, we present two practicaland effective haptic musical - instruments: the haptic drumand the Cellomobo.' - address: 'Genoa, Italy' - author: 'Berdahl, Edgar and Steiner, Hans-Christoph and Oldham, Collin' - bibtex: "@inproceedings{Berdahl2008a,\n abstract = {The music community has long\ - \ had a strong interest in haptic technology. Recently, more effort has been put\ - \ into making it more and more accessible to instrument designers.This paper covers\ - \ some of these technologies with the aimof helping instrument designers add haptic\ - \ feedback to theirinstruments. We begin by giving a brief overview of practicalactuators.\ - \ Next, we compare and contrast using embeddedmicrocontrollers versus general\ - \ purpose computers as controllers. Along the way, we mention some common softwareenvironments\ - \ for implementing control algorithms. Then wediscuss the fundamental haptic control\ - \ algorithms as well assome more complex ones. Finally, we present two practicaland\ - \ effective haptic musical instruments: the haptic drumand the Cellomobo.},\n\ - \ address = {Genoa, Italy},\n author = {Berdahl, Edgar and Steiner, Hans-Christoph\ - \ and Oldham, Collin},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179495},\n\ - \ issn = {2220-4806},\n keywords = {haptic, actuator, practical, immersion, embedded,\ - \ sampling rate, woofer, haptic drum, Cellomobo },\n pages = {61--66},\n title\ - \ = {Practical Hardware and Algorithms for Creating Haptic Musical Instruments},\n\ - \ url = {http://www.nime.org/proceedings/2008/nime2008_061.pdf},\n year = {2008}\n\ + ID: Morris2007 + address: 'New York City, NY, United States' + author: 'Morris, Simon' + bibtex: "@inproceedings{Morris2007,\n address = {New York City, NY, United States},\n\ + \ author = {Morris, Simon},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177203},\n\ + \ issn = {2220-4806},\n keywords = {nime07},\n pages = {425--425},\n title = {Musique\ + \ Concrete : Transforming Space , Sound and the City Through Skateboarding},\n\ + \ url = {http://www.nime.org/proceedings/2007/nime2007_425.pdf},\n year = {2007}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179495 + doi: 10.5281/zenodo.1177203 issn: 2220-4806 - keywords: 'haptic, actuator, practical, immersion, embedded, sampling rate, woofer, - haptic drum, Cellomobo ' - pages: 61--66 - title: Practical Hardware and Algorithms for Creating Haptic Musical Instruments - url: http://www.nime.org/proceedings/2008/nime2008_061.pdf - year: 2008 + keywords: nime07 + pages: 425--425 + title: 'Musique Concrete : Transforming Space , Sound and the City Through Skateboarding' + url: http://www.nime.org/proceedings/2007/nime2007_425.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Zoran2008 - address: 'Genoa, Italy' - author: 'Zoran, Amit and Maes, Pattie' - bibtex: "@inproceedings{Zoran2008,\n address = {Genoa, Italy},\n author = {Zoran,\ - \ Amit and Maes, Pattie},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177463},\n\ - \ issn = {2220-4806},\n keywords = {nime08},\n pages = {67--70},\n title = {Considering\ - \ Virtual \\& Physical Aspects in Acoustic Guitar Design},\n url = {http://www.nime.org/proceedings/2008/nime2008_067.pdf},\n\ - \ year = {2008}\n}\n" + ID: Uozumi2007 + address: 'New York City, NY, United States' + author: 'Uozumi, Yuta and Takahashi, Masato and Kobayashi, Ryoho' + bibtex: "@inproceedings{Uozumi2007,\n address = {New York City, NY, United States},\n\ + \ author = {Uozumi, Yuta and Takahashi, Masato and Kobayashi, Ryoho},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1179467},\n issn = {2220-4806},\n keywords\ + \ = {nime07},\n pages = {426--426},\n title = {Bd : A Sound Installation with\ + \ Swarming Robots},\n url = {http://www.nime.org/proceedings/2007/nime2007_426.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177463 + doi: 10.5281/zenodo.1179467 issn: 2220-4806 - keywords: nime08 - pages: 67--70 - title: Considering Virtual \& Physical Aspects in Acoustic Guitar Design - url: http://www.nime.org/proceedings/2008/nime2008_067.pdf - year: 2008 + keywords: nime07 + pages: 426--426 + title: 'Bd : A Sound Installation with Swarming Robots' + url: http://www.nime.org/proceedings/2007/nime2007_426.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Menzies2008 - abstract: 'Phya is an open source C++ library originally designed foradding physically - modeled contact sounds into computergame environments equipped with physics engines. - We review some aspects of this system, and also consider it fromthe purely aesthetic - perspective of musical expression.' - address: 'Genoa, Italy' - author: 'Menzies, Dylan' - bibtex: "@inproceedings{Menzies2008,\n abstract = {Phya is an open source C++ library\ - \ originally designed foradding physically modeled contact sounds into computergame\ - \ environments equipped with physics engines. We review some aspects of this system,\ - \ and also consider it fromthe purely aesthetic perspective of musical expression.},\n\ - \ address = {Genoa, Italy},\n author = {Menzies, Dylan},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1179599},\n issn = {2220-4806},\n keywords = {NIME, musical\ - \ expression, virtual reality, physical model- ing, audio synthesis },\n pages\ - \ = {71--76},\n title = {Virtual Intimacy : Phya as an Instrument},\n url = {http://www.nime.org/proceedings/2008/nime2008_071.pdf},\n\ - \ year = {2008}\n}\n" + ID: Stanza2007 + address: 'New York City, NY, United States' + author: ', Stanza' + bibtex: "@inproceedings{Stanza2007,\n address = {New York City, NY, United States},\n\ + \ author = {, Stanza},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177029},\n\ + \ issn = {2220-4806},\n keywords = {nime07},\n pages = {427--427},\n title = {Sensity},\n\ + \ url = {http://www.nime.org/proceedings/2007/nime2007_427.pdf},\n year = {2007}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179599 + doi: 10.5281/zenodo.1177029 issn: 2220-4806 - keywords: 'NIME, musical expression, virtual reality, physical model- ing, audio - synthesis ' - pages: 71--76 - title: 'Virtual Intimacy : Phya as an Instrument' - url: http://www.nime.org/proceedings/2008/nime2008_071.pdf - year: 2008 + keywords: nime07 + pages: 427--427 + title: Sensity + url: http://www.nime.org/proceedings/2007/nime2007_427.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Butler2008 - abstract: 'In this paper I discuss the importance of and need forpedagogical materials - to support the development of newinterfaces and new instruments for electronic - music. I describemy method for creating a graduated series of pedagogicaletudes - composed using Max/MSP. The etudes will helpperformers and instrument designers - learn the most commonlyused basic skills necessary to perform with interactiveelectronic - music instruments. My intention is that the finalseries will guide a beginner - from these initial steps through agraduated method, eventually incorporating some - of the moreadvanced techniques regularly used by electronic musiccomposers.I describe - the order of the series, and discuss the benefits (bothto performers and to composers) - of having a logical sequence ofskill-based etudes. I also connect the significance - of skilledperformers to the development of two essential areas that Iperceive - are still just emerging in this field: the creation of acomposed repertoire and - an increase in musical expressionduring performance.' - address: 'Genoa, Italy' - author: 'Butler, Jennifer' - bibtex: "@inproceedings{Butler2008,\n abstract = {In this paper I discuss the importance\ - \ of and need forpedagogical materials to support the development of newinterfaces\ - \ and new instruments for electronic music. I describemy method for creating a\ - \ graduated series of pedagogicaletudes composed using Max/MSP. The etudes will\ - \ helpperformers and instrument designers learn the most commonlyused basic skills\ - \ necessary to perform with interactiveelectronic music instruments. My intention\ - \ is that the finalseries will guide a beginner from these initial steps through\ - \ agraduated method, eventually incorporating some of the moreadvanced techniques\ - \ regularly used by electronic musiccomposers.I describe the order of the series,\ - \ and discuss the benefits (bothto performers and to composers) of having a logical\ - \ sequence ofskill-based etudes. I also connect the significance of skilledperformers\ - \ to the development of two essential areas that Iperceive are still just emerging\ - \ in this field: the creation of acomposed repertoire and an increase in musical\ - \ expressionduring performance.},\n address = {Genoa, Italy},\n author = {Butler,\ - \ Jennifer},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179503},\n issn\ - \ = {2220-4806},\n keywords = {composition,etudes,max,msp,musical controllers,musical\ - \ expression,nime08,pedagogy,repertoire},\n pages = {77--80},\n title = {Creating\ - \ Pedagogical Etudes for Interactive Instruments},\n url = {http://www.nime.org/proceedings/2008/nime2008_077.pdf},\n\ - \ year = {2008}\n}\n" + ID: Sa2007 + address: 'New York City, NY, United States' + author: 'Sa, Adriana' + bibtex: "@inproceedings{Sa2007,\n address = {New York City, NY, United States},\n\ + \ author = {Sa, Adriana},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177235},\n\ + \ issn = {2220-4806},\n keywords = {nime07},\n pages = {428--428},\n title = {Thresholds},\n\ + \ url = {http://www.nime.org/proceedings/2007/nime2007_428.pdf},\n year = {2007}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179503 + doi: 10.5281/zenodo.1177235 issn: 2220-4806 - keywords: 'composition,etudes,max,msp,musical controllers,musical expression,nime08,pedagogy,repertoire' - pages: 77--80 - title: Creating Pedagogical Etudes for Interactive Instruments - url: http://www.nime.org/proceedings/2008/nime2008_077.pdf - year: 2008 + keywords: nime07 + pages: 428--428 + title: Thresholds + url: http://www.nime.org/proceedings/2007/nime2007_428.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Stowell2008 - abstract: 'The expressive and creative affordances of an interface aredifficult - to evaluate, particularly with quantitative methods.However, rigorous qualitative - methods do exist and can beused to investigate such topics. We present a methodologybased - around user studies involving Discourse Analysis ofspeech. We also present an - example of the methodologyin use: we evaluate a musical interface which utilises - vocaltimbre, with a user group of beatboxers.' - address: 'Genoa, Italy' - author: 'Stowell, Dan and Plumbley, Mark D. and Bryan-Kinns, Nick' - bibtex: "@inproceedings{Stowell2008,\n abstract = {The expressive and creative affordances\ - \ of an interface aredifficult to evaluate, particularly with quantitative methods.However,\ - \ rigorous qualitative methods do exist and can beused to investigate such topics.\ - \ We present a methodologybased around user studies involving Discourse Analysis\ - \ ofspeech. We also present an example of the methodologyin use: we evaluate a\ - \ musical interface which utilises vocaltimbre, with a user group of beatboxers.},\n\ - \ address = {Genoa, Italy},\n author = {Stowell, Dan and Plumbley, Mark D. and\ - \ Bryan-Kinns, Nick},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179631},\n\ - \ issn = {2220-4806},\n keywords = {discourse analysis,evaluation,nime08,qualitative\ - \ methods,voice},\n pages = {81--86},\n title = {Discourse Analysis Evaluation\ - \ Method for Expressive Musical Interfaces},\n url = {http://www.nime.org/proceedings/2008/nime2008_081.pdf},\n\ - \ year = {2008}\n}\n" + ID: Takahashi2007 + address: 'New York City, NY, United States' + author: 'Takahashi, Masato and Tanaka, Hiroya' + bibtex: "@inproceedings{Takahashi2007,\n address = {New York City, NY, United States},\n\ + \ author = {Takahashi, Masato and Tanaka, Hiroya},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177253},\n issn = {2220-4806},\n keywords = {nime07},\n\ + \ pages = {429--429},\n title = {bog : Instrumental Aliens},\n url = {http://www.nime.org/proceedings/2007/nime2007_429.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179631 + doi: 10.5281/zenodo.1177253 issn: 2220-4806 - keywords: 'discourse analysis,evaluation,nime08,qualitative methods,voice' - pages: 81--86 - title: Discourse Analysis Evaluation Method for Expressive Musical Interfaces - url: http://www.nime.org/proceedings/2008/nime2008_081.pdf - year: 2008 + keywords: nime07 + pages: 429--429 + title: 'bog : Instrumental Aliens' + url: http://www.nime.org/proceedings/2007/nime2007_429.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Kiefer2008 - abstract: 'There is small but useful body of research concerning theevaluation of - musical interfaces with HCI techniques. Inthis paper, we present a case study - in implementing thesetechniques; we describe a usability experiment which evaluated - the Nintendo Wiimote as a musical controller, andreflect on the effectiveness - of our choice of HCI methodologies in this context. The study offered some valuable - results,but our picture of the Wiimote was incomplete as we lackeddata concerning - the participants'' instantaneous musical experience. Recent trends in HCI are - leading researchers totackle this problem of evaluating user experience; we reviewsome - of their work and suggest that with some adaptation itcould provide useful new - tools and methodologies for computer musicians.' - address: 'Genoa, Italy' - author: 'Kiefer, Chris and Collins, Nick and Fitzpatrick, Geraldine' - bibtex: "@inproceedings{Kiefer2008,\n abstract = {There is small but useful body\ - \ of research concerning theevaluation of musical interfaces with HCI techniques.\ - \ Inthis paper, we present a case study in implementing thesetechniques; we describe\ - \ a usability experiment which evaluated the Nintendo Wiimote as a musical controller,\ - \ andreflect on the effectiveness of our choice of HCI methodologies in this context.\ - \ The study offered some valuable results,but our picture of the Wiimote was incomplete\ - \ as we lackeddata concerning the participants' instantaneous musical experience.\ - \ Recent trends in HCI are leading researchers totackle this problem of evaluating\ - \ user experience; we reviewsome of their work and suggest that with some adaptation\ - \ itcould provide useful new tools and methodologies for computer musicians.},\n\ - \ address = {Genoa, Italy},\n author = {Kiefer, Chris and Collins, Nick and Fitzpatrick,\ - \ Geraldine},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179577},\n issn\ - \ = {2220-4806},\n keywords = {HCI Methodology, Wiimote, Evaluating Musical Interaction\ - \ },\n pages = {87--90},\n title = {HCI Methodology For Evaluating Musical Controllers\ - \ : A Case Study},\n url = {http://www.nime.org/proceedings/2008/nime2008_087.pdf},\n\ - \ year = {2008}\n}\n" + ID: Oliver2007 + address: 'New York City, NY, United States' + author: 'Oliver, Julian and Pickles, Steven' + bibtex: "@inproceedings{Oliver2007,\n address = {New York City, NY, United States},\n\ + \ author = {Oliver, Julian and Pickles, Steven},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1177213},\n issn = {2220-4806},\n keywords = {nime07},\n pages\ + \ = {430--430},\n title = {Fijuu2 : A Game-Based Audio-Visual Performance and\ + \ Composition Engine},\n url = {http://www.nime.org/proceedings/2007/nime2007_430.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179577 + doi: 10.5281/zenodo.1177213 issn: 2220-4806 - keywords: 'HCI Methodology, Wiimote, Evaluating Musical Interaction ' - pages: 87--90 - title: 'HCI Methodology For Evaluating Musical Controllers : A Case Study' - url: http://www.nime.org/proceedings/2008/nime2008_087.pdf - year: 2008 + keywords: nime07 + pages: 430--430 + title: 'Fijuu2 : A Game-Based Audio-Visual Performance and Composition Engine' + url: http://www.nime.org/proceedings/2007/nime2007_430.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Bau2008 - abstract: 'We combine two concepts, the musical instrument as metaphorand technology - probes, to explore how tangible interfaces canexploit the semantic richness of - sound. Using participatorydesign methods from Human-Computer Interaction (HCI), - wedesigned and tested the A20, a polyhedron-shaped, multichannel audio input/output - device. The software maps soundaround the edges and responds to the user''s gestural - input,allowing both aural and haptic modes of interaction as well asdirect manipulation - of media content. The software is designedto be very flexible and can be adapted - to a wide range ofshapes. Our tests of the A20''s perceptual and interactionproperties - showed that users can successfully detect soundplacement, movement and haptic - effects on this device. Ourparticipatory design workshops explored the possibilities - of theA20 as a generative tool for the design of an extended,collaborative personal - music player. The A20 helped users toenact scenarios of everyday mobile music - player use and togenerate new design ideas.' - address: 'Genoa, Italy' - author: 'Bau, Olivier and Tanaka, Atau and Mackay, Wendy E.' - bibtex: "@inproceedings{Bau2008,\n abstract = {We combine two concepts, the musical\ - \ instrument as metaphorand technology probes, to explore how tangible interfaces\ - \ canexploit the semantic richness of sound. Using participatorydesign methods\ - \ from Human-Computer Interaction (HCI), wedesigned and tested the A20, a polyhedron-shaped,\ - \ multichannel audio input/output device. The software maps soundaround the edges\ - \ and responds to the user's gestural input,allowing both aural and haptic modes\ - \ of interaction as well asdirect manipulation of media content. The software\ - \ is designedto be very flexible and can be adapted to a wide range ofshapes.\ - \ Our tests of the A20's perceptual and interactionproperties showed that users\ - \ can successfully detect soundplacement, movement and haptic effects on this\ - \ device. Ourparticipatory design workshops explored the possibilities of theA20\ - \ as a generative tool for the design of an extended,collaborative personal music\ - \ player. The A20 helped users toenact scenarios of everyday mobile music player\ - \ use and togenerate new design ideas.},\n address = {Genoa, Italy},\n author\ - \ = {Bau, Olivier and Tanaka, Atau and Mackay, Wendy E.},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1179489},\n issn = {2220-4806},\n keywords = {Generative\ - \ design tools, Instrument building, Multi-faceted audio, Personal music devices,\ - \ Tangible user interfaces, Technology probes },\n pages = {91--96},\n title =\ - \ {The A20 : Musical Metaphors for Interface Design},\n url = {http://www.nime.org/proceedings/2008/nime2008_091.pdf},\n\ - \ year = {2008}\n}\n" + ID: Corness2007 + address: 'New York City, NY, United States' + author: 'Seo, Jinsil and Corness, Greg' + bibtex: "@inproceedings{Corness2007,\n address = {New York City, NY, United States},\n\ + \ author = {Seo, Jinsil and Corness, Greg},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1177243},\n issn = {2220-4806},\n keywords = {nime07},\n pages\ + \ = {431--431},\n title = {nite\\_aura : An Audio-Visual Interactive Immersive\ + \ Installation},\n url = {http://www.nime.org/proceedings/2007/nime2007_431.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179489 + doi: 10.5281/zenodo.1177243 issn: 2220-4806 - keywords: 'Generative design tools, Instrument building, Multi-faceted audio, Personal - music devices, Tangible user interfaces, Technology probes ' - pages: 91--96 - title: 'The A20 : Musical Metaphors for Interface Design' - url: http://www.nime.org/proceedings/2008/nime2008_091.pdf - year: 2008 + keywords: nime07 + pages: 431--431 + title: 'nite\_aura : An Audio-Visual Interactive Immersive Installation' + url: http://www.nime.org/proceedings/2007/nime2007_431.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Grosshauser2008 - abstract: 'The described project is a new approach to use highly sensitive low force - pressure sensor matrices for malposition, cramping and tension of hands and fingers, - gesture and keystroke analysis and for new musical expression. In the latter, - sensors are used as additional touch sensitive switches and keys. In pedagogical - issues, new ways of technology enhanced teaching, self teaching and exercising - are described. The used sensors are custom made in collaboration with the ReactiveS - Sensorlab. ' - address: 'Genoa, Italy' - author: 'Grosshauser, Tobias' - bibtex: "@inproceedings{Grosshauser2008,\n abstract = {The described project is\ - \ a new approach to use highly sensitive low force pressure sensor matrices for\ - \ malposition, cramping and tension of hands and fingers, gesture and keystroke\ - \ analysis and for new musical expression. In the latter, sensors are used as\ - \ additional touch sensitive switches and keys. In pedagogical issues, new ways\ - \ of technology enhanced teaching, self teaching and exercising are described.\ - \ The used sensors are custom made in collaboration with the ReactiveS Sensorlab.\ - \ },\n address = {Genoa, Italy},\n author = {Grosshauser, Tobias},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1179551},\n issn = {2220-4806},\n keywords\ - \ = {Pressure Measurement, Force, Sensor, Finger, Violin, Strings, Piano, Left\ - \ Hand, Right Hand, Time Line, Cramping, Gesture and Posture Analysis. },\n pages\ - \ = {97--102},\n title = {Low Force Pressure Measurement : Pressure Sensor Matrices\ - \ for Gesture Analysis , Stiffness Recognition and Augmented Instruments},\n url\ - \ = {http://www.nime.org/proceedings/2008/nime2008_097.pdf},\n year = {2008}\n\ - }\n" + ID: Historia2007 + address: 'New York City, NY, United States' + author: '\''{A}lvarez-Fern\''{a}ndez, Miguel and Kersten, Stefan and Piascik, Asia' + bibtex: "@inproceedings{Historia2007,\n address = {New York City, NY, United States},\n\ + \ author = {\\'{A}lvarez-Fern\\'{a}ndez, Miguel and Kersten, Stefan and Piascik,\ + \ Asia},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1177031},\n issn = {2220-4806},\n\ + \ keywords = {nime07},\n pages = {432--432},\n title = {Soundanism},\n url = {http://www.nime.org/proceedings/2007/nime2007_432.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179551 + doi: 10.5281/zenodo.1177031 issn: 2220-4806 - keywords: 'Pressure Measurement, Force, Sensor, Finger, Violin, Strings, Piano, - Left Hand, Right Hand, Time Line, Cramping, Gesture and Posture Analysis. ' - pages: 97--102 - title: 'Low Force Pressure Measurement : Pressure Sensor Matrices for Gesture Analysis - , Stiffness Recognition and Augmented Instruments' - url: http://www.nime.org/proceedings/2008/nime2008_097.pdf - year: 2008 + keywords: nime07 + pages: 432--432 + title: Soundanism + url: http://www.nime.org/proceedings/2007/nime2007_432.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Torre2008 - abstract: 'In this paper, we describe an algorithm for the numericalevaluation of - the orientation of an object to which a clusterof accelerometers, gyroscopes and - magnetometers has beenattached. The algorithm is implemented through a set ofMax/Msp - and pd new externals. Through the successfulimplementation of the algorithm, we - introduce Pointingat, a new gesture device for the control of sound in a 3Denvironment. - This work has been at the core of the Celeritas Project, an interdisciplinary - research project on motiontracking technology and multimedia live performances - between the Tyndall Institute of Cork and the InteractionDesign Centre of Limerick.' - address: 'Genoa, Italy' - author: 'Torre, Giuseppe and Torres, Javier and Fernström, Mikael' - bibtex: "@inproceedings{Torre2008,\n abstract = {In this paper, we describe an algorithm\ - \ for the numericalevaluation of the orientation of an object to which a clusterof\ - \ accelerometers, gyroscopes and magnetometers has beenattached. The algorithm\ - \ is implemented through a set ofMax/Msp and pd new externals. Through the successfulimplementation\ - \ of the algorithm, we introduce Pointingat, a new gesture device for the control\ - \ of sound in a 3Denvironment. This work has been at the core of the Celeritas\ - \ Project, an interdisciplinary research project on motiontracking technology\ - \ and multimedia live performances between the Tyndall Institute of Cork and the\ - \ InteractionDesign Centre of Limerick.},\n address = {Genoa, Italy},\n author\ - \ = {Torre, Giuseppe and Torres, Javier and Fernstr\\''{o}m, Mikael},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1179641},\n issn = {2220-4806},\n keywords\ - \ = {eu-,ler,max,micro-electro-mechanical,msp,nime08,orientation matrix,pd,pitch\ - \ yaw and roll,quaternion,sensors,surement unit,tracking orientation,wimu,wireless\ - \ inertial mea-},\n pages = {103--106},\n title = {The Development of Motion Tracking\ - \ Algorithms for Low Cost Inertial Measurement Units},\n url = {http://www.nime.org/proceedings/2008/nime2008_103.pdf},\n\ - \ year = {2008}\n}\n" + ID: Quessy2007 + address: 'New York City, NY, United States' + author: 'Quessy, Alexandre' + bibtex: "@inproceedings{Quessy2007,\n address = {New York City, NY, United States},\n\ + \ author = {Quessy, Alexandre},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177225},\n\ + \ issn = {2220-4806},\n keywords = {nime07},\n pages = {433--433},\n title = {Human\ + \ Sequencer},\n url = {http://www.nime.org/proceedings/2007/nime2007_433.pdf},\n\ + \ year = {2007}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179641 + doi: 10.5281/zenodo.1177225 issn: 2220-4806 - keywords: 'eu-,ler,max,micro-electro-mechanical,msp,nime08,orientation matrix,pd,pitch - yaw and roll,quaternion,sensors,surement unit,tracking orientation,wimu,wireless - inertial mea-' - pages: 103--106 - title: The Development of Motion Tracking Algorithms for Low Cost Inertial Measurement - Units - url: http://www.nime.org/proceedings/2008/nime2008_103.pdf - year: 2008 + keywords: nime07 + pages: 433--433 + title: Human Sequencer + url: http://www.nime.org/proceedings/2007/nime2007_433.pdf + year: 2007 - ENTRYTYPE: inproceedings - ID: Freed2008 - abstract: 'The paper introduces new fiber and malleable materials,including piezoresistive - fabric and conductive heat-shrinktubing, and shows techniques and examples of - how they maybe used for rapid prototyping and agile development of musicalinstrument - controllers. New implementations of well-knowndesigns are covered as well as enhancements - of existingcontrollers. Finally, two new controllers are introduced that aremade - possible by these recently available materials andconstruction techniques.' - address: 'Genoa, Italy' - author: 'Freed, Adrian' - bibtex: "@inproceedings{Freed2008,\n abstract = {The paper introduces new fiber\ - \ and malleable materials,including piezoresistive fabric and conductive heat-shrinktubing,\ - \ and shows techniques and examples of how they maybe used for rapid prototyping\ - \ and agile development of musicalinstrument controllers. New implementations\ - \ of well-knowndesigns are covered as well as enhancements of existingcontrollers.\ - \ Finally, two new controllers are introduced that aremade possible by these recently\ - \ available materials andconstruction techniques.},\n address = {Genoa, Italy},\n\ - \ author = {Freed, Adrian},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179539},\n\ - \ issn = {2220-4806},\n keywords = {Agile Development, Rapid Prototyping, Conductive\ - \ fabric, Piezoresistive fabric, conductive heatshrink tubing, augmented instruments.\ - \ },\n pages = {107--112},\n title = {Application of new Fiber and Malleable Materials\ - \ for Agile Development of Augmented Instruments and Controllers},\n url = {http://www.nime.org/proceedings/2008/nime2008_107.pdf},\n\ - \ year = {2008}\n}\n" + ID: Collicutt2009 + abstract: 'This paper presents an evaluation and comparison of four input devices + for percussion tasks: a standard tom drum, Roland V-Drum, and two established + examples of gestural controllers: the Buchla Lightning II, and the Radio Baton. + The primary goal of this study was to determine how players'' actions changed + when moving from an acoustic instrument like the tom drum, to a gestural controller + like the Buchla Lightning, which bears little resemblance to an acoustic percussion + instrument. Motion capture data was analyzed by comparing a subject''s hand height + variability and timing accuracy across the four instruments as they performed + simple musical tasks. Results suggest that certain gestures such as hand height + amplitude can be adapted to these gestural controllers with little change and + that in general subjects'' timing variability is significantly affected when playing + on the Lightning and Radio Baton when compared to the more familiar tom drum and + VDrum. Possible explanations and other observations are also presented. ' + address: 'Pittsburgh, PA, United States' + author: 'Collicutt, Mike and Casciato, Carmine and Wanderley, Marcelo M.' + bibtex: "@inproceedings{Collicutt2009,\n abstract = {This paper presents an evaluation\ + \ and comparison of four input devices for percussion tasks: a standard tom drum,\ + \ Roland V-Drum, and two established examples of gestural controllers: the Buchla\ + \ Lightning II, and the Radio Baton. The primary goal of this study was to determine\ + \ how players' actions changed when moving from an acoustic instrument like the\ + \ tom drum, to a gestural controller like the Buchla Lightning, which bears little\ + \ resemblance to an acoustic percussion instrument. Motion capture data was analyzed\ + \ by comparing a subject's hand height variability and timing accuracy across\ + \ the four instruments as they performed simple musical tasks. Results suggest\ + \ that certain gestures such as hand height amplitude can be adapted to these\ + \ gestural controllers with little change and that in general subjects' timing\ + \ variability is significantly affected when playing on the Lightning and Radio\ + \ Baton when compared to the more familiar tom drum and VDrum. Possible explanations\ + \ and other observations are also presented. },\n address = {Pittsburgh, PA, United\ + \ States},\n author = {Collicutt, Mike and Casciato, Carmine and Wanderley, Marcelo\ + \ M.},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1177491},\n issn = {2220-4806},\n\ + \ keywords = {Evaluation of Input Devices, Motion Capture, Buchla Lightning II,\ + \ Radio Baton. },\n pages = {1--6},\n title = {From Real to Virtual : A Comparison\ + \ of Input Devices for Percussion Tasks},\n url = {http://www.nime.org/proceedings/2009/nime2009_001.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179539 + doi: 10.5281/zenodo.1177491 issn: 2220-4806 - keywords: 'Agile Development, Rapid Prototyping, Conductive fabric, Piezoresistive - fabric, conductive heatshrink tubing, augmented instruments. ' - pages: 107--112 - title: Application of new Fiber and Malleable Materials for Agile Development of - Augmented Instruments and Controllers - url: http://www.nime.org/proceedings/2008/nime2008_107.pdf - year: 2008 + keywords: 'Evaluation of Input Devices, Motion Capture, Buchla Lightning II, Radio + Baton. ' + pages: 1--6 + title: 'From Real to Virtual : A Comparison of Input Devices for Percussion Tasks' + url: http://www.nime.org/proceedings/2009/nime2009_001.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Crevoisier2008 - abstract: 'In this paper, we describe a set of hardware and software tools for creating - musical controllers with any flat surface or simple object, such as tables, walls, - metallic plates, wood boards, etc. The system makes possible to transform such - physical objects and surfaces into virtual control interfaces, by using computer - vision technologies to track the interaction made by the musician, either with - the hands, mallets or sticks. These new musical interfaces, freely reconfigurable, - can be used to control standard sound modules or effect processors, by defining - zones on their surface and assigning them musical commands, such as the triggering - of notes or the modulation of parameters.' - address: 'Genoa, Italy' - author: 'Crevoisier, Alain and Kellum, Greg' - bibtex: "@inproceedings{Crevoisier2008,\n abstract = {In this paper, we describe\ - \ a set of hardware and software tools for creating musical controllers with any\ - \ flat surface or simple object, such as tables, walls, metallic plates, wood\ - \ boards, etc. The system makes possible to transform such physical objects and\ - \ surfaces into virtual control interfaces, by using computer vision technologies\ - \ to track the interaction made by the musician, either with the hands, mallets\ - \ or sticks. These new musical interfaces, freely reconfigurable, can be used\ - \ to control standard sound modules or effect processors, by defining zones on\ - \ their surface and assigning them musical commands, such as the triggering of\ - \ notes or the modulation of parameters.},\n address = {Genoa, Italy},\n author\ - \ = {Crevoisier, Alain and Kellum, Greg},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179517},\n\ - \ issn = {2220-4806},\n keywords = {Computer Vision, Multi-touch Interaction,\ - \ Musical Interfaces. },\n pages = {113--116},\n title = {Transforming Ordinary\ - \ Surfaces into Multi-touch Controllers},\n url = {http://www.nime.org/proceedings/2008/nime2008_113.pdf},\n\ - \ year = {2008}\n}\n" + ID: Hadjakos2009 + abstract: 'Measurement of pianists'' arm movement provides a signal,which is composed + of controlled movements and noise. Thenoise is composed of uncontrolled movement + generated bythe interaction of the arm with the piano action and measurement error. + We propose a probabilistic model for armtouch movements, which allows to estimate + the amount ofnoise in a joint. This estimation helps to interpret the movement + signal, which is of interest for augmented piano andpiano pedagogy applications.' + address: 'Pittsburgh, PA, United States' + author: 'Hadjakos, Aristotelis and Aitenbichler, Erwin and Mühlhäuser, Max' + bibtex: "@inproceedings{Hadjakos2009,\n abstract = {Measurement of pianists' arm\ + \ movement provides a signal,which is composed of controlled movements and noise.\ + \ Thenoise is composed of uncontrolled movement generated bythe interaction of\ + \ the arm with the piano action and measurement error. We propose a probabilistic\ + \ model for armtouch movements, which allows to estimate the amount ofnoise in\ + \ a joint. This estimation helps to interpret the movement signal, which is of\ + \ interest for augmented piano andpiano pedagogy applications.},\n address = {Pittsburgh,\ + \ PA, United States},\n author = {Hadjakos, Aristotelis and Aitenbichler, Erwin\ + \ and M\\''{u}hlh\\''{a}user, Max},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177567},\n\ + \ issn = {2220-4806},\n keywords = {Piano, arm movement, gesture, classification,\ + \ augmented instrument, inertial sensing. },\n pages = {7--12},\n title = {Probabilistic\ + \ Model of Pianists' Arm Touch Movements},\n url = {http://www.nime.org/proceedings/2009/nime2009_007.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179517 + doi: 10.5281/zenodo.1177567 issn: 2220-4806 - keywords: 'Computer Vision, Multi-touch Interaction, Musical Interfaces. ' - pages: 113--116 - title: Transforming Ordinary Surfaces into Multi-touch Controllers - url: http://www.nime.org/proceedings/2008/nime2008_113.pdf - year: 2008 - - -- ENTRYTYPE: inproceedings - ID: Ward2008 - abstract: 'This paper presents a comparison of the movement styles of two theremin - players based on observation and analysis of video recordings. The premise behind - this research is that a consideration of musicians'' movements could form the - basis for a new framework for the design of new instruments. Laban Movement Analysis - is used to qualitatively analyse the movement styles of the musicians and to argue - that the Recuperation phase of their phrasing is essential to achieve satisfactory - performance. ' - address: 'Genoa, Italy' - author: 'Ward, Nicholas and Penfield, Kedzie and O''Modhrain, Sile and Knapp, Benjamin' - bibtex: "@inproceedings{Ward2008,\n abstract = {This paper presents a comparison\ - \ of the movement styles of two theremin players based on observation and analysis\ - \ of video recordings. The premise behind this research is that a consideration\ - \ of musicians' movements could form the basis for a new framework for the design\ - \ of new instruments. Laban Movement Analysis is used to qualitatively analyse\ - \ the movement styles of the musicians and to argue that the Recuperation phase\ - \ of their phrasing is essential to achieve satisfactory performance. },\n address\ - \ = {Genoa, Italy},\n author = {Ward, Nicholas and Penfield, Kedzie and O'Modhrain,\ - \ Sile and Knapp, Benjamin},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179649},\n\ - \ issn = {2220-4806},\n keywords = {Effort Phrasing, Recuperation, Laban Movement\ - \ Analysis, Theremin },\n pages = {117--121},\n title = {A Study of Two Thereminists\ - \ : Towards Movement Informed Instrument Design},\n url = {http://www.nime.org/proceedings/2008/nime2008_117.pdf},\n\ - \ year = {2008}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1179649 - issn: 2220-4806 - keywords: 'Effort Phrasing, Recuperation, Laban Movement Analysis, Theremin ' - pages: 117--121 - title: 'A Study of Two Thereminists : Towards Movement Informed Instrument Design' - url: http://www.nime.org/proceedings/2008/nime2008_117.pdf - year: 2008 + keywords: 'Piano, arm movement, gesture, classification, augmented instrument, inertial + sensing. ' + pages: 7--12 + title: Probabilistic Model of Pianists' Arm Touch Movements + url: http://www.nime.org/proceedings/2009/nime2009_007.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Maniatakos2008 - address: 'Genoa, Italy' - author: 'Maniatakos, Vassilios-Fivos A. and Jacquemin, Christian' - bibtex: "@inproceedings{Maniatakos2008,\n address = {Genoa, Italy},\n author = {Maniatakos,\ - \ Vassilios-Fivos A. and Jacquemin, Christian},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1179595},\n issn = {2220-4806},\n keywords = {affective computing,\ - \ interactive performance, HMM, gesture recognition, intelligent mapping, affective\ - \ interface },\n pages = {122--127},\n title = {Towards an Affective Gesture Interface\ - \ for Expressive Music Performance},\n url = {http://www.nime.org/proceedings/2008/nime2008_122.pdf},\n\ - \ year = {2008}\n}\n" + ID: Gelineck2009 + abstract: 'This paper presents a HCI inspired evaluation of simple physical interfaces + used to control physical models. Specifically knobs and sliders are compared in + a creative and exploratory framework, which simulates the natural environment + in which an electronic musician would normally explore a new instrument. No significant + difference was measured between using knobs and sliders for controlling parameters + of a physical modeling electronic instrument. Thereported difference between the + tested instruments were mostlydue to the sound synthesis models.' + address: 'Pittsburgh, PA, United States' + author: 'Gelineck, Steven and Serafin, Stefania' + bibtex: "@inproceedings{Gelineck2009,\n abstract = {This paper presents a HCI inspired\ + \ evaluation of simple physical interfaces used to control physical models. Specifically\ + \ knobs and sliders are compared in a creative and exploratory framework, which\ + \ simulates the natural environment in which an electronic musician would normally\ + \ explore a new instrument. No significant difference was measured between using\ + \ knobs and sliders for controlling parameters of a physical modeling electronic\ + \ instrument. Thereported difference between the tested instruments were mostlydue\ + \ to the sound synthesis models.},\n address = {Pittsburgh, PA, United States},\n\ + \ author = {Gelineck, Steven and Serafin, Stefania},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177549},\n issn = {2220-4806},\n keywords = {Evaluation,\ + \ Interfaces, Sliders, Knobs, Physi- cal Modeling, Electronic Musicians, Exploration,\ + \ Creativ- ity, Affordances. },\n pages = {13--18},\n title = {A Quantitative\ + \ Evaluation of the Differences between Knobs and Sliders},\n url = {http://www.nime.org/proceedings/2009/nime2009_013.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179595 + doi: 10.5281/zenodo.1177549 issn: 2220-4806 - keywords: 'affective computing, interactive performance, HMM, gesture recognition, - intelligent mapping, affective interface ' - pages: 122--127 - title: Towards an Affective Gesture Interface for Expressive Music Performance - url: http://www.nime.org/proceedings/2008/nime2008_122.pdf - year: 2008 + keywords: 'Evaluation, Interfaces, Sliders, Knobs, Physi- cal Modeling, Electronic + Musicians, Exploration, Creativ- ity, Affordances. ' + pages: 13--18 + title: A Quantitative Evaluation of the Differences between Knobs and Sliders + url: http://www.nime.org/proceedings/2009/nime2009_013.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Kallblad2008 - abstract: 'It started with an idea to create an empty space in which you activated - music and light as you moved around. In responding to the music and lighting you - would activate more or different sounds and thereby communicate with the space - through your body. This led to an artistic research project in which children''s - spontaneous movement was observed, a choreography made based on the children''s - movements and music written and recorded for the choreography. This music was - then decomposed and choreographed into an empty space at Botkyrka konsthall creating - an interactive dance installation. It was realized using an interactive sound - and light system in which 5 video cameras were detecting the motion in the room - connected to a 4-channel sound system and a set of 14 light modules. During five - weeks people of all ages came to dance and move around in the installation. The - installation attracted a wide range of people of all ages and the tentative evaluation - indicates that it was very positively received and that it encouraged free movement - in the intended way. Besides observing the activity in the installation interviews - were made with schoolchildren age 7 who had participated in the installation. ' - address: 'Genoa, Italy' - author: 'Källblad, Anna and Friberg, Anders and Svensson, Karl and Edelholm, Elisabet - S.' - bibtex: "@inproceedings{Kallblad2008,\n abstract = {It started with an idea to create\ - \ an empty space in which you activated music and light as you moved around. In\ - \ responding to the music and lighting you would activate more or different sounds\ - \ and thereby communicate with the space through your body. This led to an artistic\ - \ research project in which children's spontaneous movement was observed, a choreography\ - \ made based on the children's movements and music written and recorded for the\ - \ choreography. This music was then decomposed and choreographed into an empty\ - \ space at Botkyrka konsthall creating an interactive dance installation. It was\ - \ realized using an interactive sound and light system in which 5 video cameras\ - \ were detecting the motion in the room connected to a 4-channel sound system\ - \ and a set of 14 light modules. During five weeks people of all ages came to\ - \ dance and move around in the installation. The installation attracted a wide\ - \ range of people of all ages and the tentative evaluation indicates that it was\ - \ very positively received and that it encouraged free movement in the intended\ - \ way. Besides observing the activity in the installation interviews were made\ - \ with schoolchildren age 7 who had participated in the installation. },\n address\ - \ = {Genoa, Italy},\n author = {K\\''{a}llblad, Anna and Friberg, Anders and Svensson,\ - \ Karl and Edelholm, Elisabet S.},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179573},\n\ - \ issn = {2220-4806},\n keywords = {Installation, dance, video recognition, children's\ - \ movement, interactive multimedia },\n pages = {128--133},\n title = {Hoppsa\ - \ Universum -- An Interactive Dance Installation for Children},\n url = {http://www.nime.org/proceedings/2008/nime2008_128.pdf},\n\ - \ year = {2008}\n}\n" + ID: Pedrosa2009 + abstract: 'Haptic feedback is an important element that needs to be carefully designed + in computer music interfaces. This paper presents an evaluation of several force + renderings for target acquisition in space when used to support a music related + task. The study presented here addresses only one musical aspect: the need to + repeat elements accurately in time and in content. Several force scenarios will + be rendered over a simple 3D target acquisition task and users'' performance will + be quantitatively and qualitatively evaluated. The results show how the users'' + subjective preference for a particular kind of force support does not always correlate + to a quantitative measurement of performance enhancement. We describe a way in + which a control mapping for a musical interface could be achieved without contradicting + the users'' preferences as obtained from the study. ' + address: 'Pittsburgh, PA, United States' + author: 'Pedrosa, Ricardo and Maclean, Karon E.' + bibtex: "@inproceedings{Pedrosa2009,\n abstract = {Haptic feedback is an important\ + \ element that needs to be carefully designed in computer music interfaces. This\ + \ paper presents an evaluation of several force renderings for target acquisition\ + \ in space when used to support a music related task. The study presented here\ + \ addresses only one musical aspect: the need to repeat elements accurately in\ + \ time and in content. Several force scenarios will be rendered over a simple\ + \ 3D target acquisition task and users' performance will be quantitatively and\ + \ qualitatively evaluated. The results show how the users' subjective preference\ + \ for a particular kind of force support does not always correlate to a quantitative\ + \ measurement of performance enhancement. We describe a way in which a control\ + \ mapping for a musical interface could be achieved without contradicting the\ + \ users' preferences as obtained from the study. },\n address = {Pittsburgh, PA,\ + \ United States},\n author = {Pedrosa, Ricardo and Maclean, Karon E.},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1177657},\n issn = {2220-4806},\n keywords\ + \ = {music interfaces, force feedback, tempo, comfort, target acquisition. },\n\ + \ pages = {19--24},\n title = {Evaluation of {3D} Haptic Target Rendering to Support\ + \ Timing in Music Tasks},\n url = {http://www.nime.org/proceedings/2009/nime2009_019.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179573 + doi: 10.5281/zenodo.1177657 issn: 2220-4806 - keywords: 'Installation, dance, video recognition, children''s movement, interactive - multimedia ' - pages: 128--133 - title: Hoppsa Universum -- An Interactive Dance Installation for Children - url: http://www.nime.org/proceedings/2008/nime2008_128.pdf - year: 2008 + keywords: 'music interfaces, force feedback, tempo, comfort, target acquisition. ' + pages: 19--24 + title: Evaluation of 3D Haptic Target Rendering to Support Timing in Music Tasks + url: http://www.nime.org/proceedings/2009/nime2009_019.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Camurri2008 - address: 'Genoa, Italy' - author: 'Camurri, Antonio and Canepa, Corrado and Coletta, Paolo and Mazzarino, - Barbara and Volpe, Gualtiero' - bibtex: "@inproceedings{Camurri2008,\n address = {Genoa, Italy},\n author = {Camurri,\ - \ Antonio and Canepa, Corrado and Coletta, Paolo and Mazzarino, Barbara and Volpe,\ - \ Gualtiero},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179505},\n issn\ - \ = {2220-4806},\n keywords = {Active listening of music, expressive interfaces,\ - \ full-body motion analysis and expressive gesture processing, multimodal interactive\ - \ systems for music and performing arts applications, collaborative environments,\ - \ social interaction. },\n pages = {134--139},\n title = {Mappe per Affetti Erranti\ - \ : a Multimodal System for Social Active Listening and Expressive Performance},\n\ - \ url = {http://www.nime.org/proceedings/2008/nime2008_134.pdf},\n year = {2008}\n\ - }\n" + ID: Hsu2009 + abstract: 'In this paper, we discuss a number of issues related to the design of + evaluation tests for comparing interactive music systems for improvisation. Our + testing procedure covers rehearsal and performance environments, and captures + the experiences of a musician/participant as well as an audience member/observer. + We attempt to isolate salient components of system behavior, and test whether + the musician or audience are able to discern between systems with significantly + different behavioral components. We report on our experiences with our testing + methodology, in comparative studies of our London and ARHS improvisation systems + [1]. ' + address: 'Pittsburgh, PA, United States' + author: 'Hsu, William and Sosnick, Marc' + bibtex: "@inproceedings{Hsu2009,\n abstract = {In this paper, we discuss a number\ + \ of issues related to the design of evaluation tests for comparing interactive\ + \ music systems for improvisation. Our testing procedure covers rehearsal and\ + \ performance environments, and captures the experiences of a musician/participant\ + \ as well as an audience member/observer. We attempt to isolate salient components\ + \ of system behavior, and test whether the musician or audience are able to discern\ + \ between systems with significantly different behavioral components. We report\ + \ on our experiences with our testing methodology, in comparative studies of our\ + \ London and ARHS improvisation systems [1]. },\n address = {Pittsburgh, PA, United\ + \ States},\n author = {Hsu, William and Sosnick, Marc},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177579},\n issn = {2220-4806},\n keywords = {Interactive\ + \ music systems, human computer interaction, evaluation tests. },\n pages = {25--28},\n\ + \ title = {Evaluating Interactive Music Systems : An HCI Approach},\n url = {http://www.nime.org/proceedings/2009/nime2009_025.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179505 + doi: 10.5281/zenodo.1177579 issn: 2220-4806 - keywords: 'Active listening of music, expressive interfaces, full-body motion analysis - and expressive gesture processing, multimodal interactive systems for music and - performing arts applications, collaborative environments, social interaction. ' - pages: 134--139 - title: 'Mappe per Affetti Erranti : a Multimodal System for Social Active Listening - and Expressive Performance' - url: http://www.nime.org/proceedings/2008/nime2008_134.pdf - year: 2008 + keywords: 'Interactive music systems, human computer interaction, evaluation tests. ' + pages: 25--28 + title: 'Evaluating Interactive Music Systems : An HCI Approach' + url: http://www.nime.org/proceedings/2009/nime2009_025.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Canazza2008 - abstract: "Musical open works can be often thought like sequences of musical structures,\ - \ which can be arranged by anyone who had access to them and who wished to realize\ - \ the work. This paper proposes an innovative agent-based system to model the\ - \ information and organize it in structured knowledge; to create effective, graph-centric\ - \ browsing perspectives and views for the user; to use ,\n,\nauthoring tools for\ - \ the performance of open work of electro-acoustic music. " - address: 'Genoa, Italy' - author: 'Canazza, Sergio and Dattolo, Antonina' - bibtex: "@inproceedings{Canazza2008,\n abstract = {Musical open works can be often\ - \ thought like sequences of musical structures, which can be arranged by anyone\ - \ who had access to them and who wished to realize the work. This paper proposes\ - \ an innovative agent-based system to model the information and organize it in\ - \ structured knowledge; to create effective, graph-centric browsing perspectives\ - \ and views for the user; to use ,\n,\nauthoring tools for the performance of\ - \ open work of electro-acoustic music. },\n address = {Genoa, Italy},\n author\ - \ = {Canazza, Sergio and Dattolo, Antonina},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1179507},\n issn = {2220-4806},\n keywords = {Musical Open Work,\ - \ Multimedia Information Systems, Software Agents, zz-structures. },\n pages =\ - \ {140--143},\n title = {New Data Structure for Old Musical Open Works},\n url\ - \ = {http://www.nime.org/proceedings/2008/nime2008_140.pdf},\n year = {2008}\n\ + ID: Spowage2009 + abstract: 'Due to the accelerating development of ‘rapidly to become redundant’ + technologies, there is a growing mountain of perfectly serviceable discarded electronic + devices hiding quietly at the bottom of almost every domestic rubbish pile or + at the back of nearly every second hand shop. If you add in to this scenario the + accelerating nature of our society where people don’t have time or the motivation + in their lives to sell or auction their redundant electronics, one can discover + a plethora of discarded materials available for salvage. Using this as a starting + point, I have produced a portable noise instrument from recycled materials, that + is primarily an artistic led venture, built specifically for live performance.' + address: 'Pittsburgh, PA, United States' + author: 'Spowage, Neal' + bibtex: "@inproceedings{Spowage2009,\n abstract = {Due to the accelerating development\ + \ of ‘rapidly to become redundant’ technologies, there is a growing mountain of\ + \ perfectly serviceable discarded electronic devices hiding quietly at the bottom\ + \ of almost every domestic rubbish pile or at the back of nearly every second\ + \ hand shop. If you add in to this scenario the accelerating nature of our society\ + \ where people don’t have time or the motivation in their lives to sell or auction\ + \ their redundant electronics, one can discover a plethora of discarded materials\ + \ available for salvage. Using this as a starting point, I have produced a portable\ + \ noise instrument from recycled materials, that is primarily an artistic led\ + \ venture, built specifically for live performance.},\n address = {Pittsburgh,\ + \ PA, United States},\n author = {Spowage, Neal},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1177683},\n issn = {2220-4806},\n keywords = {nime09},\n pages\ + \ = {29--30},\n title = {The Ghetto Bastard : A Portable Noise Instrument},\n\ + \ url = {http://www.nime.org/proceedings/2009/nime2009_029.pdf},\n year = {2009}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179507 + doi: 10.5281/zenodo.1177683 issn: 2220-4806 - keywords: 'Musical Open Work, Multimedia Information Systems, Software Agents, zz-structures. ' - pages: 140--143 - title: New Data Structure for Old Musical Open Works - url: http://www.nime.org/proceedings/2008/nime2008_140.pdf - year: 2008 + keywords: nime09 + pages: 29--30 + title: 'The Ghetto Bastard : A Portable Noise Instrument' + url: http://www.nime.org/proceedings/2009/nime2009_029.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Eigenfeldt2008 - abstract: 'This paper presents an agent-based architecture for robotic musical instruments - that generate polyphonic rhythmic patterns that continuously evolve and develop - in a musically "intelligent" manner. Agent-based software offers a new method - for real-time composition that allows for complex interactions between individual - voices while requiring very little user interaction or supervision. The system - described, Kinetic Engine, is an environment in which individual software agents, - emulate drummers improvising within a percussion ensemble. Player agents assume - roles and personalities within the ensemble, and communicate with one another - to create complex rhythmic interactions. In this project, the ensemble is comprised - of a 12-armed musical robot, MahaDeviBot, in which each limb has its own software - agent controlling what it performs. ' - address: 'Genoa, Italy' - author: 'Eigenfeldt, Arne and Kapur, Ajay' - bibtex: "@inproceedings{Eigenfeldt2008,\n abstract = {This paper presents an agent-based\ - \ architecture for robotic musical instruments that generate polyphonic rhythmic\ - \ patterns that continuously evolve and develop in a musically \"intelligent\"\ - \ manner. Agent-based software offers a new method for real-time composition that\ - \ allows for complex interactions between individual voices while requiring very\ - \ little user interaction or supervision. The system described, Kinetic Engine,\ - \ is an environment in which individual software agents, emulate drummers improvising\ - \ within a percussion ensemble. Player agents assume roles and personalities within\ - \ the ensemble, and communicate with one another to create complex rhythmic interactions.\ - \ In this project, the ensemble is comprised of a 12-armed musical robot, MahaDeviBot,\ - \ in which each limb has its own software agent controlling what it performs.\ - \ },\n address = {Genoa, Italy},\n author = {Eigenfeldt, Arne and Kapur, Ajay},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1179527},\n issn = {2220-4806},\n\ - \ keywords = {Robotic Musical Instruments, Agents, Machine Musicianship. },\n\ - \ pages = {144--149},\n title = {An Agent-based System for Robotic Musical Performance},\n\ - \ url = {http://www.nime.org/proceedings/2008/nime2008_144.pdf},\n year = {2008}\n\ - }\n" + ID: Humphrey2009 + abstract: 'Motivated by previous work aimed at developing mathematical models to + describe expressive timing in music, and specifically the final ritardandi, using + measured kinematic data, we further investigate the linkage of locomotion and + timing in music. The natural running behavior of four subjects is measured with + a wearable sensor prototype and analyzed to create normalized tempo curves. The + resulting curves are then used to modulate the final ritard of MIDI scores, which + are also performed by an expert musician. A Turing-inspired listening test is + conducted to observe a human listener''s ability to determine the nature of the + performer. ' + address: 'Pittsburgh, PA, United States' + author: 'Humphrey, Eric and Leider, Colby' + bibtex: "@inproceedings{Humphrey2009,\n abstract = {Motivated by previous work aimed\ + \ at developing mathematical models to describe expressive timing in music, and\ + \ specifically the final ritardandi, using measured kinematic data, we further\ + \ investigate the linkage of locomotion and timing in music. The natural running\ + \ behavior of four subjects is measured with a wearable sensor prototype and analyzed\ + \ to create normalized tempo curves. The resulting curves are then used to modulate\ + \ the final ritard of MIDI scores, which are also performed by an expert musician.\ + \ A Turing-inspired listening test is conducted to observe a human listener's\ + \ ability to determine the nature of the performer. },\n address = {Pittsburgh,\ + \ PA, United States},\n author = {Humphrey, Eric and Leider, Colby},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1177581},\n issn = {2220-4806},\n keywords\ + \ = {Musical kinematics, expressive tempo, machine music. },\n pages = {31--32},\n\ + \ title = {The Navi Activity Monitor : Toward Using Kinematic Data to Humanize\ + \ Computer Music},\n url = {http://www.nime.org/proceedings/2009/nime2009_031.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179527 + doi: 10.5281/zenodo.1177581 issn: 2220-4806 - keywords: 'Robotic Musical Instruments, Agents, Machine Musicianship. ' - pages: 144--149 - title: An Agent-based System for Robotic Musical Performance - url: http://www.nime.org/proceedings/2008/nime2008_144.pdf - year: 2008 + keywords: 'Musical kinematics, expressive tempo, machine music. ' + pages: 31--32 + title: 'The Navi Activity Monitor : Toward Using Kinematic Data to Humanize Computer + Music' + url: http://www.nime.org/proceedings/2009/nime2009_031.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Goina2008 - abstract: 'In this paper, we investigate the relationships between gesture and sound - by means of an elementary gesture sonification. This work takes inspiration from - Bauhaus'' ideals and Paul Klee''s investigation into forms and pictorial representation. - In line with these ideas, the main aim of this work is to reduce gesture to a - combination of a small number of elementary components (gestalts) used to control - a corresponding small set of sounds. By means of a demonstrative tool, we introduce - here a line of research that is at its initial stage. The envisaged goal of future - developments is a novel system that could be a composing/improvising tool as well - as an interface for interactive dance and performance. ' - address: 'Genoa, Italy' - author: 'Goina, Maurizio and Polotti, Pietro' - bibtex: "@inproceedings{Goina2008,\n abstract = {In this paper, we investigate the\ - \ relationships between gesture and sound by means of an elementary gesture sonification.\ - \ This work takes inspiration from Bauhaus' ideals and Paul Klee's investigation\ - \ into forms and pictorial representation. In line with these ideas, the main\ - \ aim of this work is to reduce gesture to a combination of a small number of\ - \ elementary components (gestalts) used to control a corresponding small set of\ - \ sounds. By means of a demonstrative tool, we introduce here a line of research\ - \ that is at its initial stage. The envisaged goal of future developments is a\ - \ novel system that could be a composing/improvising tool as well as an interface\ - \ for interactive dance and performance. },\n address = {Genoa, Italy},\n author\ - \ = {Goina, Maurizio and Polotti, Pietro},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179549},\n\ - \ issn = {2220-4806},\n keywords = {Bauhaus, Klee, gesture analysis, sonification.\ - \ },\n pages = {150--153},\n title = {Elementary Gestalts for Gesture Sonification},\n\ - \ url = {http://www.nime.org/proceedings/2008/nime2008_150.pdf},\n year = {2008}\n\ - }\n" + ID: Muller2009 + abstract: 'Vibetone is a musical input device which was build to explore tactile + feedback in gesture based interaction. It is a prototype aimed to allow the performer + to play both continuously and discrete pitched sounds in the same space. Our primary + focus is on tactile feedback to guide the artist''s movements during his performance. + Thus, also untrained users are enabled to musical expression through bodily actions + and precisely arm movements, guided through tactile feedback signals. ' + address: 'Pittsburgh, PA, United States' + author: 'Müller, Alexander and Essl, Georg' + bibtex: "@inproceedings{Muller2009,\n abstract = {Vibetone is a musical input device\ + \ which was build to explore tactile feedback in gesture based interaction. It\ + \ is a prototype aimed to allow the performer to play both continuously and discrete\ + \ pitched sounds in the same space. Our primary focus is on tactile feedback to\ + \ guide the artist's movements during his performance. Thus, also untrained users\ + \ are enabled to musical expression through bodily actions and precisely arm movements,\ + \ guided through tactile feedback signals. },\n address = {Pittsburgh, PA, United\ + \ States},\n author = {M\\''{u}ller, Alexander and Essl, Georg},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177623},\n issn = {2220-4806},\n keywords = {tactile\ + \ feedback, intuitive interaction, gestural interaction, MIDI controller },\n\ + \ pages = {33--34},\n title = {Utilizing Tactile Feedback to Guide Movements Between\ + \ Sounds},\n url = {http://www.nime.org/proceedings/2009/nime2009_033.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179549 + doi: 10.5281/zenodo.1177623 issn: 2220-4806 - keywords: 'Bauhaus, Klee, gesture analysis, sonification. ' - pages: 150--153 - title: Elementary Gestalts for Gesture Sonification - url: http://www.nime.org/proceedings/2008/nime2008_150.pdf - year: 2008 + keywords: 'tactile feedback, intuitive interaction, gestural interaction, MIDI controller ' + pages: 33--34 + title: Utilizing Tactile Feedback to Guide Movements Between Sounds + url: http://www.nime.org/proceedings/2009/nime2009_033.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: DelleMonache2008 - abstract: 'We present our work with augmented everyday objectstransformed into sound - sources for music generation. The idea isto give voice to objects through technology. - More specifically, theparadigm of the birth of musical instruments as a sonification - ofobjects used in domestic or work everyday environments is hereconsidered and - transposed into the technologically augmentedscenarios of our contemporary world.' - address: 'Genoa, Italy' - author: 'Delle Monache, Stefano and Polotti, Pietro and Papetti, Stefano and Rocchesso, - Davide' - bibtex: "@inproceedings{DelleMonache2008,\n abstract = {We present our work with\ - \ augmented everyday objectstransformed into sound sources for music generation.\ - \ The idea isto give voice to objects through technology. More specifically, theparadigm\ - \ of the birth of musical instruments as a sonification ofobjects used in domestic\ - \ or work everyday environments is hereconsidered and transposed into the technologically\ - \ augmentedscenarios of our contemporary world.},\n address = {Genoa, Italy},\n\ - \ author = {Delle Monache, Stefano and Polotti, Pietro and Papetti, Stefano and\ - \ Rocchesso, Davide},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179519},\n\ - \ issn = {2220-4806},\n keywords = {Rag-time washboard, sounding objects, physics-based\ - \ sound synthesis, interactivity, sonification, augmented everyday objects. },\n\ - \ pages = {154--157},\n title = {Sonically Augmented Found Objects},\n url = {http://www.nime.org/proceedings/2008/nime2008_154.pdf},\n\ - \ year = {2008}\n}\n" + ID: Ferguson2009 + abstract: 'Sonification is generally considered in a statistical data analysis context. + This research discusses the development of an interface for live control of sonification + – for controlling and altering sonifications over the course of their playback. + This is designed primarily with real-time sources in mind, rather than with static + datasets, and is intended as a performative, live data-art creative activity. + The interface enables the performer to use the interface as an instrument for + iterative interpretations and variations of sonifications of multiple datastreams. + Using the interface, the performer can alter the scale, granularity, timbre, hierarchy + of elements, spatialisation, spectral filtering, key/modality, rhythmic distribution + and register ‘on-the-fly’ to both perform data-generated music, and investigate + data in a live exploratory, interactive manner.' + address: 'Pittsburgh, PA, United States' + author: 'Ferguson, Sam and Beilharz, Kirsty' + bibtex: "@inproceedings{Ferguson2009,\n abstract = {Sonification is generally considered\ + \ in a statistical data analysis context. This research discusses the development\ + \ of an interface for live control of sonification – for controlling and altering\ + \ sonifications over the course of their playback. This is designed primarily\ + \ with real-time sources in mind, rather than with static datasets, and is intended\ + \ as a performative, live data-art creative activity. The interface enables the\ + \ performer to use the interface as an instrument for iterative interpretations\ + \ and variations of sonifications of multiple datastreams. Using the interface,\ + \ the performer can alter the scale, granularity, timbre, hierarchy of elements,\ + \ spatialisation, spectral filtering, key/modality, rhythmic distribution and\ + \ register ‘on-the-fly’ to both perform data-generated music, and investigate\ + \ data in a live exploratory, interactive manner.},\n address = {Pittsburgh, PA,\ + \ United States},\n author = {Ferguson, Sam and Beilharz, Kirsty},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1177511},\n issn = {2220-4806},\n keywords\ + \ = {Sonification, Interactive Sonification, Auditory Display. },\n pages = {35--36},\n\ + \ title = {An Interface for Live Interactive Sonification},\n url = {http://www.nime.org/proceedings/2009/nime2009_035.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179519 + doi: 10.5281/zenodo.1177511 issn: 2220-4806 - keywords: 'Rag-time washboard, sounding objects, physics-based sound synthesis, - interactivity, sonification, augmented everyday objects. ' - pages: 154--157 - title: Sonically Augmented Found Objects - url: http://www.nime.org/proceedings/2008/nime2008_154.pdf - year: 2008 + keywords: 'Sonification, Interactive Sonification, Auditory Display. ' + pages: 35--36 + title: An Interface for Live Interactive Sonification + url: http://www.nime.org/proceedings/2009/nime2009_035.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Pelletier2008 - abstract: 'This paper describes a generalized motion-based framework forthe generation - of large musical control fields from imaging data.The framework is general in - the sense that it does not depend ona particular source of sensing data. Real-time - images of stageperformers, pre-recorded and live video, as well as more exoticdata - from imaging systems such as thermography, pressuresensor arrays, etc. can be - used as a source of control. Featurepoints are extracted from the candidate images, - from whichmotion vector fields are calculated. After some processing, thesemotion - vectors are mapped individually to sound synthesisparameters. Suitable synthesis - techniques include granular andmicrosonic algorithms, additive synthesis and micro-polyphonicorchestration. - Implementation details of this framework isdiscussed, as well as suitable creative - and artistic uses andapproaches.' - address: 'Genoa, Italy' - author: 'Pelletier, Jean-Marc' - bibtex: "@inproceedings{Pelletier2008,\n abstract = {This paper describes a generalized\ - \ motion-based framework forthe generation of large musical control fields from\ - \ imaging data.The framework is general in the sense that it does not depend ona\ - \ particular source of sensing data. Real-time images of stageperformers, pre-recorded\ - \ and live video, as well as more exoticdata from imaging systems such as thermography,\ - \ pressuresensor arrays, etc. can be used as a source of control. Featurepoints\ - \ are extracted from the candidate images, from whichmotion vector fields are\ - \ calculated. After some processing, thesemotion vectors are mapped individually\ - \ to sound synthesisparameters. Suitable synthesis techniques include granular\ - \ andmicrosonic algorithms, additive synthesis and micro-polyphonicorchestration.\ - \ Implementation details of this framework isdiscussed, as well as suitable creative\ - \ and artistic uses andapproaches.},\n address = {Genoa, Italy},\n author = {Pelletier,\ - \ Jean-Marc},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179611},\n issn\ - \ = {2220-4806},\n keywords = {Computer vision, control field, image analysis,\ - \ imaging, mapping, microsound, motion flow, sonification, synthesis },\n pages\ - \ = {158--163},\n title = {Sonified Motion Flow Fields as a Means of Musical Expression},\n\ - \ url = {http://www.nime.org/proceedings/2008/nime2008_158.pdf},\n year = {2008}\n\ - }\n" + ID: Reben2009 + abstract: 'In this project we have developed reactive instruments for performance. + Reactive instruments provide feedback for the performer thereby providing a more + dynamic experience. This is achieved through the use of haptics and robotics. + Haptics provide a feedback system to the control surface. Robotics provides a + way to actuate the instruments and their control surfaces. This allows a highly + coordinated "dance" between performer and the instrument. An application for this + idea is presented as a linear slide interface. Reactive interfaces represent a + dynamic way for music to be portrayed in performance. ' + address: 'Pittsburgh, PA, United States' + author: 'Reben, Alexander and Laibowitz, Mat and Paradiso, Joseph A.' + bibtex: "@inproceedings{Reben2009,\n abstract = {In this project we have developed\ + \ reactive instruments for performance. Reactive instruments provide feedback\ + \ for the performer thereby providing a more dynamic experience. This is achieved\ + \ through the use of haptics and robotics. Haptics provide a feedback system to\ + \ the control surface. Robotics provides a way to actuate the instruments and\ + \ their control surfaces. This allows a highly coordinated \"dance\" between performer\ + \ and the instrument. An application for this idea is presented as a linear slide\ + \ interface. Reactive interfaces represent a dynamic way for music to be portrayed\ + \ in performance. },\n address = {Pittsburgh, PA, United States},\n author = {Reben,\ + \ Alexander and Laibowitz, Mat and Paradiso, Joseph A.},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177663},\n issn = {2220-4806},\n keywords = {haptics,\ + \ robotics, dynamic interfaces },\n pages = {37--38},\n title = {Responsive Music\ + \ Interfaces for Performance},\n url = {http://www.nime.org/proceedings/2009/nime2009_037.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179611 + doi: 10.5281/zenodo.1177663 issn: 2220-4806 - keywords: 'Computer vision, control field, image analysis, imaging, mapping, microsound, - motion flow, sonification, synthesis ' - pages: 158--163 - title: Sonified Motion Flow Fields as a Means of Musical Expression - url: http://www.nime.org/proceedings/2008/nime2008_158.pdf - year: 2008 + keywords: 'haptics, robotics, dynamic interfaces ' + pages: 37--38 + title: Responsive Music Interfaces for Performance + url: http://www.nime.org/proceedings/2009/nime2009_037.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Dubrau2008 - address: 'Genoa, Italy' - author: 'Dubrau, Josh and Havryliv, Mark' - bibtex: "@inproceedings{Dubrau2008,\n address = {Genoa, Italy},\n author = {Dubrau,\ - \ Josh and Havryliv, Mark},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179525},\n\ - \ issn = {2220-4806},\n keywords = {Poetry, language sonification, psychoanalysis,\ - \ linguistics, Freud, realtime poetry. },\n pages = {164--167},\n title = {P[a]ra[pra]xis\ - \ : Poetry in Motion},\n url = {http://www.nime.org/proceedings/2008/nime2008_164.pdf},\n\ - \ year = {2008}\n}\n" + ID: Lai2009 + abstract: 'Hands On Stage, designed from a percussionist''s perspective, is a new + performance interface designed for audiovisual improvisation. It comprises a custom-built + table interface and a performance system programmed in two environments, SuperCollider + 3 and Isadora. This paper traces the interface''s evolution over matters of relevant + technology, concept, construction, system design, and its creative outcomes. ' + address: 'Pittsburgh, PA, United States' + author: 'Lai, Chi-Hsia' + bibtex: "@inproceedings{Lai2009,\n abstract = {Hands On Stage, designed from a percussionist's\ + \ perspective, is a new performance interface designed for audiovisual improvisation.\ + \ It comprises a custom-built table interface and a performance system programmed\ + \ in two environments, SuperCollider 3 and Isadora. This paper traces the interface's\ + \ evolution over matters of relevant technology, concept, construction, system\ + \ design, and its creative outcomes. },\n address = {Pittsburgh, PA, United States},\n\ + \ author = {Lai, Chi-Hsia},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177609},\n\ + \ issn = {2220-4806},\n keywords = {audiovisual, interface design, performance.\ + \ },\n pages = {39--40},\n title = {Hands On Stage : A Sound and Image Performance\ + \ Interface},\n url = {http://www.nime.org/proceedings/2009/nime2009_039.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179525 + doi: 10.5281/zenodo.1177609 issn: 2220-4806 - keywords: 'Poetry, language sonification, psychoanalysis, linguistics, Freud, realtime - poetry. ' - pages: 164--167 - title: 'P[a]ra[pra]xis : Poetry in Motion' - url: http://www.nime.org/proceedings/2008/nime2008_164.pdf - year: 2008 + keywords: 'audiovisual, interface design, performance. ' + pages: 39--40 + title: 'Hands On Stage : A Sound and Image Performance Interface' + url: http://www.nime.org/proceedings/2009/nime2009_039.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Schacher2008 - abstract: 'Moving out of doors with digital tools and electronic music and creating - musically rich experiences is made possible by the increased availability of ever - smaller and more powerful mobile computers. Composing music for and in a landscape - instead of for a closed architectural space offers new perspectives but also raises - questions about interaction and composition of electronic music. The work we present - here was commissioned by a festival and ran on a daily basis over a period of - three months. A GPS-enabled embedded Linux system is assembled to serve as a location-aware - sound platform. Several challenges have to be overcome both technically and artistically - to achieve a seamless experience and provide a simple device to be handed to the - public. By building this interactive experience, which relies as much on the user''s - willingness to explore the invisible sonic landscape as on the ability to deploy - the technology, a number of new avenues for exploring electronic music and interactivity - in location-based media open up. New ways of composing music for and in a landscape - and for creating audience interaction are explored. ' - address: 'Genoa, Italy' - author: 'Schacher, Jan C.' - bibtex: "@inproceedings{Schacher2008,\n abstract = {Moving out of doors with digital\ - \ tools and electronic music and creating musically rich experiences is made possible\ - \ by the increased availability of ever smaller and more powerful mobile computers.\ - \ Composing music for and in a landscape instead of for a closed architectural\ - \ space offers new perspectives but also raises questions about interaction and\ - \ composition of electronic music. The work we present here was commissioned by\ - \ a festival and ran on a daily basis over a period of three months. A GPS-enabled\ - \ embedded Linux system is assembled to serve as a location-aware sound platform.\ - \ Several challenges have to be overcome both technically and artistically to\ - \ achieve a seamless experience and provide a simple device to be handed to the\ - \ public. By building this interactive experience, which relies as much on the\ - \ user's willingness to explore the invisible sonic landscape as on the ability\ - \ to deploy the technology, a number of new avenues for exploring electronic music\ - \ and interactivity in location-based media open up. New ways of composing music\ - \ for and in a landscape and for creating audience interaction are explored. },\n\ - \ address = {Genoa, Italy},\n author = {Schacher, Jan C.},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1179623},\n issn = {2220-4806},\n keywords = {Location-based,\ - \ electronic music, composition, embedded Linux, GPS, Pure Data, interaction,\ - \ mapping, soundscape },\n pages = {168--171},\n title = {Davos Soundscape, a\ - \ Location Based Interactive Composition},\n url = {http://www.nime.org/proceedings/2008/nime2008_168.pdf},\n\ - \ year = {2008}\n}\n" + ID: McDonald2009 + abstract: 'The Vibrobyte is a wireless haptic interface specialized forco-located + musical performance. The hardware is designedaround the open source Arduino platform, + with haptic control data encapsulated in OSC messages, and OSC/hardwarecommunications + handled by Processing. The Vibrobyte wasfeatured at the International Computer + Music Conference2008 (ICMC) in a telematic performance between ensembles in Belfast, + Palo Alto (California, USA), and Troy (NewYork, USA).' + address: 'Pittsburgh, PA, United States' + author: 'McDonald, Kyle and Kouttron, Dane and Bahn, Curtis and Braasch, Jonas and + Oliveros, Pauline' + bibtex: "@inproceedings{McDonald2009,\n abstract = {The Vibrobyte is a wireless\ + \ haptic interface specialized forco-located musical performance. The hardware\ + \ is designedaround the open source Arduino platform, with haptic control data\ + \ encapsulated in OSC messages, and OSC/hardwarecommunications handled by Processing.\ + \ The Vibrobyte wasfeatured at the International Computer Music Conference2008\ + \ (ICMC) in a telematic performance between ensembles in Belfast, Palo Alto (California,\ + \ USA), and Troy (NewYork, USA).},\n address = {Pittsburgh, PA, United States},\n\ + \ author = {McDonald, Kyle and Kouttron, Dane and Bahn, Curtis and Braasch, Jonas\ + \ and Oliveros, Pauline},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177627},\n\ + \ issn = {2220-4806},\n keywords = {haptics,interface,nime09,performance,telematic},\n\ + \ pages = {41--42},\n title = {The Vibrobyte : A Haptic Interface for Co-Located\ + \ Performance},\n url = {http://www.nime.org/proceedings/2009/nime2009_041.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179623 + doi: 10.5281/zenodo.1177627 issn: 2220-4806 - keywords: 'Location-based, electronic music, composition, embedded Linux, GPS, Pure - Data, interaction, mapping, soundscape ' - pages: 168--171 - title: 'Davos Soundscape, a Location Based Interactive Composition' - url: http://www.nime.org/proceedings/2008/nime2008_168.pdf - year: 2008 + keywords: haptics,interface,nime09,performance,telematic + pages: 41--42 + title: 'The Vibrobyte : A Haptic Interface for Co-Located Performance' + url: http://www.nime.org/proceedings/2009/nime2009_041.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Schmeder2008 - abstract: 'A general-purpose firmware for a low cost microcontroller is described - that employs the Open Sound Control protocol over USB. The firmware is designed - with considerations for integration in new musical interfaces and embedded devices. - Features of note include stateless design, efficient floating-point support, temporally - correct data handling, and protocol completeness. A timing performance analysis - is conducted.' - address: 'Genoa, Italy' - author: 'Schmeder, Andrew and Freed, Adrian' - bibtex: "@inproceedings{Schmeder2008,\n abstract = {A general-purpose firmware for\ - \ a low cost microcontroller is described that employs the Open Sound Control\ - \ protocol over USB. The firmware is designed with considerations for integration\ - \ in new musical interfaces and embedded devices. Features of note include stateless\ - \ design, efficient floating-point support, temporally correct data handling,\ - \ and protocol completeness. A timing performance analysis is conducted.},\n address\ - \ = {Genoa, Italy},\n author = {Schmeder, Andrew and Freed, Adrian},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1179627},\n issn = {2220-4806},\n keywords\ - \ = {jitter,latency,nime08,open sound control,pic microcontroller,usb},\n pages\ - \ = {175--180},\n title = {uOSC : The Open Sound Control Reference Platform for\ - \ Embedded Devices},\n url = {http://www.nime.org/proceedings/2008/nime2008_175.pdf},\n\ - \ year = {2008}\n}\n" + ID: Wiley2009 + abstract: 'This paper describes a cost-effective, modular, open source framework + for a laser interface design that is open to community development, interaction + and user modification. The following paper highlights ways in which we are implementing + the multi-laser gestural interface in musical, visual, and robotic contexts. ' + address: 'Pittsburgh, PA, United States' + author: 'Wiley, Meason and Kapur, Ajay' + bibtex: "@inproceedings{Wiley2009,\n abstract = {This paper describes a cost-effective,\ + \ modular, open source framework for a laser interface design that is open to\ + \ community development, interaction and user modification. The following paper\ + \ highlights ways in which we are implementing the multi-laser gestural interface\ + \ in musical, visual, and robotic contexts. },\n address = {Pittsburgh, PA, United\ + \ States},\n author = {Wiley, Meason and Kapur, Ajay},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177709},\n issn = {2220-4806},\n keywords = {Lasers,\ + \ photocell sensor, UltraSound, Open Source controller design, digital gamelan,\ + \ digital tanpura },\n pages = {43--44},\n title = {Multi-Laser Gestural Interface\ + \ --- Solutions for Cost-Effective and Open Source Controllers},\n url = {http://www.nime.org/proceedings/2009/nime2009_043.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179627 + doi: 10.5281/zenodo.1177709 issn: 2220-4806 - keywords: 'jitter,latency,nime08,open sound control,pic microcontroller,usb' - pages: 175--180 - title: 'uOSC : The Open Sound Control Reference Platform for Embedded Devices' - url: http://www.nime.org/proceedings/2008/nime2008_175.pdf - year: 2008 + keywords: 'Lasers, photocell sensor, UltraSound, Open Source controller design, + digital gamelan, digital tanpura ' + pages: 43--44 + title: Multi-Laser Gestural Interface --- Solutions for Cost-Effective and Open + Source Controllers + url: http://www.nime.org/proceedings/2009/nime2009_043.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Place2008 - abstract: 'An approach for creating structured Open Sound Control(OSC) messages - by separating the addressing of node valuesand node properties is suggested. This - includes a methodfor querying values and properties. As a result, it is possibleto - address complex nodes as classes inside of more complextree structures using an - OSC namespace. This is particularly useful for creating flexible communication - in modularsystems. A prototype implementation is presented and discussed.' - address: 'Genoa, Italy' - author: 'Place, Timothy and Lossius, Trond and Jensenius, Alexander R. and Peters, - Nils' - bibtex: "@inproceedings{Place2008,\n abstract = {An approach for creating structured\ - \ Open Sound Control(OSC) messages by separating the addressing of node valuesand\ - \ node properties is suggested. This includes a methodfor querying values and\ - \ properties. As a result, it is possibleto address complex nodes as classes inside\ - \ of more complextree structures using an OSC namespace. This is particularly\ - \ useful for creating flexible communication in modularsystems. A prototype implementation\ - \ is presented and discussed.},\n address = {Genoa, Italy},\n author = {Place,\ - \ Timothy and Lossius, Trond and Jensenius, Alexander R. and Peters, Nils},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1179613},\n issn = {2220-4806},\n\ - \ keywords = {jamoma,namespace,nime08,osc,standardization},\n pages = {181--184},\n\ - \ title = {Addressing Classes by Differentiating Values and Properties in OSC},\n\ - \ url = {http://www.nime.org/proceedings/2008/nime2008_181.pdf},\n year = {2008}\n\ - }\n" + ID: Kanda2009 + abstract: 'We introduce Mims, which is an interactive-multimedia live-performance + system, where pieces rendered by a performer’s voice are translated into floating + objects called voice objects. The voice objects are generated from the performer’s + current position on the screen, and absorbed by another flying object called Mims. + Voice sounds are modulated by the behavior of Mims. Performers can control these + objects and sound effects by using their own gestures. Mims provides performers + and their audiences with expressive visual feedback in terms of sound manipulations + and results.' + address: 'Pittsburgh, PA, United States' + author: 'Kanda, Ryo and Hashida, Mitsuyo and Katayose, Haruhiro' + bibtex: "@inproceedings{Kanda2009,\n abstract = {We introduce Mims, which is an\ + \ interactive-multimedia live-performance system, where pieces rendered by a performer’s\ + \ voice are translated into floating objects called voice objects. The voice objects\ + \ are generated from the performer’s current position on the screen, and absorbed\ + \ by another flying object called Mims. Voice sounds are modulated by the behavior\ + \ of Mims. Performers can control these objects and sound effects by using their\ + \ own gestures. Mims provides performers and their audiences with expressive visual\ + \ feedback in terms of sound manipulations and results.},\n address = {Pittsburgh,\ + \ PA, United States},\n author = {Kanda, Ryo and Hashida, Mitsuyo and Katayose,\ + \ Haruhiro},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177595},\n issn\ + \ = {2220-4806},\n keywords = {Interaction, audience, performer, visualize, sensor,\ + \ physical, gesture. },\n pages = {45--47},\n title = {Mims : Interactive Multimedia\ + \ Live Performance System},\n url = {http://www.nime.org/proceedings/2009/nime2009_045.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179613 + doi: 10.5281/zenodo.1177595 issn: 2220-4806 - keywords: jamoma,namespace,nime08,osc,standardization - pages: 181--184 - title: Addressing Classes by Differentiating Values and Properties in OSC - url: http://www.nime.org/proceedings/2008/nime2008_181.pdf - year: 2008 + keywords: 'Interaction, audience, performer, visualize, sensor, physical, gesture. ' + pages: 45--47 + title: 'Mims : Interactive Multimedia Live Performance System' + url: http://www.nime.org/proceedings/2009/nime2009_045.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Platz2008 - abstract: 'Many mobile devices, specifically mobile phones, come equipped with a - microphone. Microphones are high-fidelity sensors that can pick up sounds relating - to a range of physical phenomena. Using simple feature extraction methods,parameters - can be found that sensibly map to synthesis algorithms to allow expressive and - interactive performance.For example blowing noise can be used as a wind instrument - excitation source. Also other types of interactionscan be detected via microphones, - such as striking. Hencethe microphone, in addition to allowing literal recording,serves - as an additional source of input to the developingfield of mobile phone performance.' - address: 'Genoa, Italy' - author: 'Ananya, Misra and Essl, Georg and Rohs, Michael' - bibtex: "@inproceedings{Platz2008,\n abstract = {Many mobile devices, specifically\ - \ mobile phones, come equipped with a microphone. Microphones are high-fidelity\ - \ sensors that can pick up sounds relating to a range of physical phenomena. Using\ - \ simple feature extraction methods,parameters can be found that sensibly map\ - \ to synthesis algorithms to allow expressive and interactive performance.For\ - \ example blowing noise can be used as a wind instrument excitation source. Also\ - \ other types of interactionscan be detected via microphones, such as striking.\ - \ Hencethe microphone, in addition to allowing literal recording,serves as an\ - \ additional source of input to the developingfield of mobile phone performance.},\n\ - \ address = {Genoa, Italy},\n author = {Ananya, Misra and Essl, Georg and Rohs,\ - \ Michael},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179485},\n issn\ - \ = {2220-4806},\n keywords = {mobile music making, microphone, mobile-stk },\n\ - \ pages = {185--188},\n title = {Microphone as Sensor in Mobile Phone Performance},\n\ - \ url = {http://www.nime.org/proceedings/2008/nime2008_185.pdf},\n year = {2008}\n\ + ID: Goto2009a + abstract: 'This is intended to introduce the system, which combines BodySuit, especially + Powered Suit, and Second Life, as well as its possibilities and its uses in a + musical performance application. The system which we propose contains both a gesture + controller and robots at the same time. In this system, the Data Suit, BodySuit + controls the avatar in Second Life and Second Life controls the exoskeleton, Powered + Suit in real time. These are related with each other in conjunction with Second + Life in Internet. BodySuit doesn''t contain a hand-held controller. A performer, + for example a dancer, wears a suit. Gestures are transformed into electronic signals + by sensors. Powered Suit is another suit that a dancer wears, but gestures are + generated by motors. This is a sort of wearable robot. Second Life is software + that is developed by Linden Lab. It allows creating a virtual world and a virtual + human (avatar) in Internet. Working together with BodySuit, Powered Suit, and + Second Life the idea behind the system is that a human body is augmented by electronic + signals and is reflected in a virtual world in order to be able to perform interactively. ' + address: 'Pittsburgh, PA, United States' + author: 'Goto, Suguru and Powell, Rob' + bibtex: "@inproceedings{Goto2009a,\n abstract = {This is intended to introduce the\ + \ system, which combines BodySuit, especially Powered Suit, and Second Life, as\ + \ well as its possibilities and its uses in a musical performance application.\ + \ The system which we propose contains both a gesture controller and robots at\ + \ the same time. In this system, the Data Suit, BodySuit controls the avatar in\ + \ Second Life and Second Life controls the exoskeleton, Powered Suit in real time.\ + \ These are related with each other in conjunction with Second Life in Internet.\ + \ BodySuit doesn't contain a hand-held controller. A performer, for example a\ + \ dancer, wears a suit. Gestures are transformed into electronic signals by sensors.\ + \ Powered Suit is another suit that a dancer wears, but gestures are generated\ + \ by motors. This is a sort of wearable robot. Second Life is software that is\ + \ developed by Linden Lab. It allows creating a virtual world and a virtual human\ + \ (avatar) in Internet. Working together with BodySuit, Powered Suit, and Second\ + \ Life the idea behind the system is that a human body is augmented by electronic\ + \ signals and is reflected in a virtual world in order to be able to perform interactively.\ + \ },\n address = {Pittsburgh, PA, United States},\n author = {Goto, Suguru and\ + \ Powell, Rob},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177559},\n\ + \ issn = {2220-4806},\n keywords = {artificial intelligence,gesture controller,humanoid\ + \ robot,interaction,internet,nime09,robot},\n pages = {48--49},\n title = {netBody\ + \ --- \"Augmented Body and Virtual Body II\" with the System, BodySuit, Powered\ + \ Suit and Second Life --- Its Introduction of an Application of the System},\n\ + \ url = {http://www.nime.org/proceedings/2009/nime2009_048.pdf},\n year = {2009}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179485 - issn: 2220-4806 - keywords: 'mobile music making, microphone, mobile-stk ' - pages: 185--188 - title: Microphone as Sensor in Mobile Phone Performance - url: http://www.nime.org/proceedings/2008/nime2008_185.pdf - year: 2008 - - -- ENTRYTYPE: inproceedings - ID: Bouillot2008 - address: 'Genoa, Italy' - author: 'Bouillot, Nicolas and Wozniewski, Mike and Settel, Zack and Cooperstock, - Jeremy R.' - bibtex: "@inproceedings{Bouillot2008,\n address = {Genoa, Italy},\n author = {Bouillot,\ - \ Nicolas and Wozniewski, Mike and Settel, Zack and Cooperstock, Jeremy R.},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1179499},\n issn = {2220-4806},\n\ - \ keywords = {nime08},\n pages = {189--192},\n title = {A Mobile Wireless Augmented\ - \ Guitar},\n url = {http://www.nime.org/proceedings/2008/nime2008_189.pdf},\n\ - \ year = {2008}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1179499 + doi: 10.5281/zenodo.1177559 issn: 2220-4806 - keywords: nime08 - pages: 189--192 - title: A Mobile Wireless Augmented Guitar - url: http://www.nime.org/proceedings/2008/nime2008_189.pdf - year: 2008 + keywords: 'artificial intelligence,gesture controller,humanoid robot,interaction,internet,nime09,robot' + pages: 48--49 + title: 'netBody --- "Augmented Body and Virtual Body II" with the System, BodySuit, + Powered Suit and Second Life --- Its Introduction of an Application of the System' + url: http://www.nime.org/proceedings/2009/nime2009_048.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Jacobs2008 - abstract: None - address: 'Genoa, Italy' - author: 'Jacobs, Robert and Feldmeier, Mark and Paradiso, Joseph A.' - bibtex: "@inproceedings{Jacobs2008,\n abstract = {None},\n address = {Genoa, Italy},\n\ - \ author = {Jacobs, Robert and Feldmeier, Mark and Paradiso, Joseph A.},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1179567},\n issn = {2220-4806},\n keywords\ - \ = {None},\n pages = {193--196},\n title = {A Mobile Music Environment Using\ - \ a PD Compiler and Wireless Sensors},\n url = {http://www.nime.org/proceedings/2008/nime2008_193.pdf},\n\ - \ year = {2008}\n}\n" + ID: Ogawa2009 + abstract: 'We developed a system called Life Game Orchestra that generates music + by translating cellular patterns of Conway''s Game of Life into musical scales. + A performer can compose music by controlling varying cell patterns and sounds + with visual and auditory fun. A performer assigns the elements of tone to two-dimensional + cell patterns in the matrix of the Game of Life. Our system searches defined cell + patterns in the varying matrix dynamically. If the patterns are matched, corresponding + tones are generated. A performer can make cells in the matrix by moving in front + of a camera and interactively influencing the generation of music. The progress + of the Game of Life is controlled with a clock defined by the performer to configure + the groove of the music. By running multiple matrices with different pattern mapping, + clock timing, and instruments, we can perform an ensemble. The Life Game Orchestra + is a fusion system of the design of a performer and the emergence of cellular + automata as a complex system. ' + address: 'Pittsburgh, PA, United States' + author: 'Ogawa, Keisuke and Kuhara, Yasuo' + bibtex: "@inproceedings{Ogawa2009,\n abstract = {We developed a system called Life\ + \ Game Orchestra that generates music by translating cellular patterns of Conway's\ + \ Game of Life into musical scales. A performer can compose music by controlling\ + \ varying cell patterns and sounds with visual and auditory fun. A performer assigns\ + \ the elements of tone to two-dimensional cell patterns in the matrix of the Game\ + \ of Life. Our system searches defined cell patterns in the varying matrix dynamically.\ + \ If the patterns are matched, corresponding tones are generated. A performer\ + \ can make cells in the matrix by moving in front of a camera and interactively\ + \ influencing the generation of music. The progress of the Game of Life is controlled\ + \ with a clock defined by the performer to configure the groove of the music.\ + \ By running multiple matrices with different pattern mapping, clock timing, and\ + \ instruments, we can perform an ensemble. The Life Game Orchestra is a fusion\ + \ system of the design of a performer and the emergence of cellular automata as\ + \ a complex system. },\n address = {Pittsburgh, PA, United States},\n author =\ + \ {Ogawa, Keisuke and Kuhara, Yasuo},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177647},\n\ + \ issn = {2220-4806},\n keywords = {Conway's Game of Life, Cellular automata,\ + \ Cell pattern, scale, Interactive composition, performance. },\n pages = {50--51},\n\ + \ title = {Life Game Orchestra as an Interactive Music Composition System Translating\ + \ Cellular Patterns of Automata into Musical Scales},\n url = {http://www.nime.org/proceedings/2009/nime2009_050.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179567 + doi: 10.5281/zenodo.1177647 issn: 2220-4806 - keywords: None - pages: 193--196 - title: A Mobile Music Environment Using a PD Compiler and Wireless Sensors - url: http://www.nime.org/proceedings/2008/nime2008_193.pdf - year: 2008 + keywords: 'Conway''s Game of Life, Cellular automata, Cell pattern, scale, Interactive + composition, performance. ' + pages: 50--51 + title: Life Game Orchestra as an Interactive Music Composition System Translating + Cellular Patterns of Automata into Musical Scales + url: http://www.nime.org/proceedings/2009/nime2009_050.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Bencina2008 - address: 'Genoa, Italy' - author: 'Bencina, Ross and Wilde, Danielle and Langley, Somaya' - bibtex: "@inproceedings{Bencina2008,\n address = {Genoa, Italy},\n author = {Bencina,\ - \ Ross and Wilde, Danielle and Langley, Somaya},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1179491},\n issn = {2220-4806},\n keywords = {gestural control,mapping,nime08,prototyping,three-axis\ - \ accelerometers,vocal,wii remote},\n pages = {197--202},\n title = {Gesture=Sound\ - \ Experiments : Process and Mappings},\n url = {http://www.nime.org/proceedings/2008/nime2008_197.pdf},\n\ - \ year = {2008}\n}\n" + ID: Toenjes2009 + abstract: 'This article describes the implications of design and materials of computer + controllers used in the context of interactive dance performance. Size, shape, + and layout all influence audience perception of the performer, and materials imply + context for further interpretation of the interactive performance work. It describes + the construction of the "Control/Recorder" and the "VideoLyre", two custom computer + control surfaces made for Leonardo''s Chimes, a work by Toenjes, Marchant and + Smith, and how these controllers contribute to theatrical aesthetic intent. ' + address: 'Pittsburgh, PA, United States' + author: 'Toenjes, John' + bibtex: "@inproceedings{Toenjes2009,\n abstract = {This article describes the implications\ + \ of design and materials of computer controllers used in the context of interactive\ + \ dance performance. Size, shape, and layout all influence audience perception\ + \ of the performer, and materials imply context for further interpretation of\ + \ the interactive performance work. It describes the construction of the \"Control/Recorder\"\ + \ and the \"VideoLyre\", two custom computer control surfaces made for Leonardo's\ + \ Chimes, a work by Toenjes, Marchant and Smith, and how these controllers contribute\ + \ to theatrical aesthetic intent. },\n address = {Pittsburgh, PA, United States},\n\ + \ author = {Toenjes, John},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177693},\n\ + \ issn = {2220-4806},\n keywords = {control surface, interface, tactile, natural,\ + \ organic, interactive dance. },\n pages = {52--53},\n title = {Natural Materials\ + \ on Stage : Custom Controllers for Aesthetic Effect},\n url = {http://www.nime.org/proceedings/2009/nime2009_052.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179491 + doi: 10.5281/zenodo.1177693 issn: 2220-4806 - keywords: 'gestural control,mapping,nime08,prototyping,three-axis accelerometers,vocal,wii - remote' - pages: 197--202 - title: 'Gesture=Sound Experiments : Process and Mappings' - url: http://www.nime.org/proceedings/2008/nime2008_197.pdf - year: 2008 + keywords: 'control surface, interface, tactile, natural, organic, interactive dance. ' + pages: 52--53 + title: 'Natural Materials on Stage : Custom Controllers for Aesthetic Effect' + url: http://www.nime.org/proceedings/2009/nime2009_052.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Ciglar2008 - address: 'Genoa, Italy' - author: 'Ciglar, Miha' - bibtex: "@inproceedings{Ciglar2008,\n address = {Genoa, Italy},\n author = {Ciglar,\ - \ Miha},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1179511},\n issn = {2220-4806},\n\ - \ keywords = {dancer, fig, from the system in, gesture recognition, haptic feedback,\ - \ in, markers attached to the, motion tracking, nime08, s limbs, the dancer receives\ - \ feedback, two ways},\n pages = {203--206},\n title = {\"3rd. Pole\" -- A Composition\ - \ Performed via Gestural Cues},\n url = {http://www.nime.org/proceedings/2008/nime2008_203.pdf},\n\ - \ year = {2008}\n}\n" + ID: Keith2009 + abstract: 'Deviate generates multiple streams of melodic and rhythmic output in + real-time, according to user-specified control parameters. This performance system + has been implemented using Max 5 [1] within the genre of popular contemporary + electronic music, incorporating techno, IDM, and related forms. The aim of this + project is not musical style synthesis, but to construct an environment in which + a range of creative and musical goals may be achieved. A key aspect is control + over generative processes, as well as consistent yet varied output. An approach + is described which frees the user from determining note-level output while allowing + control to be maintained over larger structural details, focusing specifically + on the melodic aspect of this system. Audio examples are located online at http://www.cetenbaath.com/cb/about-deviate/. ' + address: 'Pittsburgh, PA, United States' + author: 'Keith, Sarah' + bibtex: "@inproceedings{Keith2009,\n abstract = {Deviate generates multiple streams\ + \ of melodic and rhythmic output in real-time, according to user-specified control\ + \ parameters. This performance system has been implemented using Max 5 [1] within\ + \ the genre of popular contemporary electronic music, incorporating techno, IDM,\ + \ and related forms. The aim of this project is not musical style synthesis, but\ + \ to construct an environment in which a range of creative and musical goals may\ + \ be achieved. A key aspect is control over generative processes, as well as consistent\ + \ yet varied output. An approach is described which frees the user from determining\ + \ note-level output while allowing control to be maintained over larger structural\ + \ details, focusing specifically on the melodic aspect of this system. Audio examples\ + \ are located online at http://www.cetenbaath.com/cb/about-deviate/. },\n address\ + \ = {Pittsburgh, PA, United States},\n author = {Keith, Sarah},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177599},\n issn = {2220-4806},\n keywords = {generative,\ + \ performance, laptop, popular music },\n pages = {54--55},\n title = {Controlling\ + \ Live Generative Electronic Music with Deviate},\n url = {http://www.nime.org/proceedings/2009/nime2009_054.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179511 + doi: 10.5281/zenodo.1177599 issn: 2220-4806 - keywords: 'dancer, fig, from the system in, gesture recognition, haptic feedback, - in, markers attached to the, motion tracking, nime08, s limbs, the dancer receives - feedback, two ways' - pages: 203--206 - title: '"3rd. Pole" -- A Composition Performed via Gestural Cues' - url: http://www.nime.org/proceedings/2008/nime2008_203.pdf - year: 2008 + keywords: 'generative, performance, laptop, popular music ' + pages: 54--55 + title: Controlling Live Generative Electronic Music with Deviate + url: http://www.nime.org/proceedings/2009/nime2009_054.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Hansen2008 - abstract: 'This paper describes a project started for implementing DJscratching - techniques on the reactable. By interacting withobjects representing scratch patterns - commonly performedon the turntable and the crossfader, the musician can playwith - DJ techniques and manipulate how they are executedin a performance. This is a - novel approach to the digital DJapplications and hardware. Two expert musicians - practisedand performed on the reactable in order to both evaluate theplayability - and improve the design of the DJ techniques.' - address: 'Genoa, Italy' - author: 'Hansen, Kjetil F. and Alonso, Marcos' - bibtex: "@inproceedings{Hansen2008,\n abstract = {This paper describes a project\ - \ started for implementing DJscratching techniques on the reactable. By interacting\ - \ withobjects representing scratch patterns commonly performedon the turntable\ - \ and the crossfader, the musician can playwith DJ techniques and manipulate how\ - \ they are executedin a performance. This is a novel approach to the digital DJapplications\ - \ and hardware. Two expert musicians practisedand performed on the reactable in\ - \ order to both evaluate theplayability and improve the design of the DJ techniques.},\n\ - \ address = {Genoa, Italy},\n author = {Hansen, Kjetil F. and Alonso, Marcos},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1179555},\n issn = {2220-4806},\n\ - \ keywords = {dj scratch techniques,interfaces,nime08,playability,reactable},\n\ - \ pages = {207--210},\n title = {More DJ Techniques on the reactable},\n url =\ - \ {http://www.nime.org/proceedings/2008/nime2008_207.pdf},\n year = {2008}\n}\n" + ID: Dolphin2009 + abstract: 'SpiralSet is a sound toy incorporating game enginesoftware used in conjunction + with a spectral synthesissound engine constructed in Max/MSP/Jitter. SpiralSetwas + presented as an interactive installation piece at theSonic Arts Expo 2008, in + Brighton, UK. A custom madesensor-based interface is used for control of the system.The + user interactions are designed to be quickly accessiblein an installation context, + yet allowing the potential forsonic depth and variation.' + address: 'Pittsburgh, PA, United States' + author: 'Dolphin, Andy' + bibtex: "@inproceedings{Dolphin2009,\n abstract = {SpiralSet is a sound toy incorporating\ + \ game enginesoftware used in conjunction with a spectral synthesissound engine\ + \ constructed in Max/MSP/Jitter. SpiralSetwas presented as an interactive installation\ + \ piece at theSonic Arts Expo 2008, in Brighton, UK. A custom madesensor-based\ + \ interface is used for control of the system.The user interactions are designed\ + \ to be quickly accessiblein an installation context, yet allowing the potential\ + \ forsonic depth and variation.},\n address = {Pittsburgh, PA, United States},\n\ + \ author = {Dolphin, Andy},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177497},\n\ + \ issn = {2220-4806},\n keywords = {Sound Toys, Game Engines, Animated Interfaces,\ + \ Spectral Synthesis, Open Work, Max/MSP. },\n pages = {56--57},\n title = {SpiralSet\ + \ : A Sound Toy Utilizing Game Engine Technologies},\n url = {http://www.nime.org/proceedings/2009/nime2009_056.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179555 + doi: 10.5281/zenodo.1177497 issn: 2220-4806 - keywords: 'dj scratch techniques,interfaces,nime08,playability,reactable' - pages: 207--210 - title: More DJ Techniques on the reactable - url: http://www.nime.org/proceedings/2008/nime2008_207.pdf - year: 2008 + keywords: 'Sound Toys, Game Engines, Animated Interfaces, Spectral Synthesis, Open + Work, Max/MSP. ' + pages: 56--57 + title: 'SpiralSet : A Sound Toy Utilizing Game Engine Technologies' + url: http://www.nime.org/proceedings/2009/nime2009_056.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Dimitrov2008 - abstract: 'This paper reports on a Short-Term Scientific Mission (STSM)sponsored - by the Sonic Interaction Design (SID) EuropeanCOST Action IC601.Prototypes of - objects for the novel instrument Reactablewere developed, with the goal of studying - sonification ofmovements on this platform using physical models. A physical model - of frictional interactions between rubbed dry surfaces was used as an audio generation - engine, which alloweddevelopment in two directions --- a set of objects that affordsmotions - similar to sliding, and a single object aiming tosonify contact friction sound. - Informal evaluation was obtained from a Reactable expert user, regarding these - sets ofobjects. Experiments with the objects were also performed- related to both - audio filtering, and interfacing with otherobjects for the Reactable.' - address: 'Genoa, Italy' - author: 'Dimitrov, Smilen and Alonso, Marcos and Serafin, Stefania' - bibtex: "@inproceedings{Dimitrov2008,\n abstract = {This paper reports on a Short-Term\ - \ Scientific Mission (STSM)sponsored by the Sonic Interaction Design (SID) EuropeanCOST\ - \ Action IC601.Prototypes of objects for the novel instrument Reactablewere developed,\ - \ with the goal of studying sonification ofmovements on this platform using physical\ - \ models. A physical model of frictional interactions between rubbed dry surfaces\ - \ was used as an audio generation engine, which alloweddevelopment in two directions\ - \ --- a set of objects that affordsmotions similar to sliding, and a single object\ - \ aiming tosonify contact friction sound. Informal evaluation was obtained from\ - \ a Reactable expert user, regarding these sets ofobjects. Experiments with the\ - \ objects were also performed- related to both audio filtering, and interfacing\ - \ with otherobjects for the Reactable.},\n address = {Genoa, Italy},\n author\ - \ = {Dimitrov, Smilen and Alonso, Marcos and Serafin, Stefania},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1179523},\n issn = {2220-4806},\n keywords = {Reactable,\ - \ physical model, motion sonification, contact fric- tion },\n pages = {211--214},\n\ - \ title = {Developing Block-Movement, Physical-Model Based Objects for the Reactable},\n\ - \ url = {http://www.nime.org/proceedings/2008/nime2008_211.pdf},\n year = {2008}\n\ - }\n" + ID: Gao2009 + abstract: 'This paper explores a rapidly developed, new musical interface involving + a touch-screen, 32 pressure sensitive button pads, infrared sensor, 8 knobs and + cross-fader. We provide a versatile platform for computer-based music performance + and production using a human computer interface that has strong visual and tactile + feedback as well as robust software that exploits the strengths of each individual + system component. ' + address: 'Pittsburgh, PA, United States' + author: 'Gao, Mingfei and Hanson, Craig' + bibtex: "@inproceedings{Gao2009,\n abstract = {This paper explores a rapidly developed,\ + \ new musical interface involving a touch-screen, 32 pressure sensitive button\ + \ pads, infrared sensor, 8 knobs and cross-fader. We provide a versatile platform\ + \ for computer-based music performance and production using a human computer interface\ + \ that has strong visual and tactile feedback as well as robust software that\ + \ exploits the strengths of each individual system component. },\n address = {Pittsburgh,\ + \ PA, United States},\n author = {Gao, Mingfei and Hanson, Craig},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1177547},\n issn = {2220-4806},\n keywords\ + \ = {live performance interface,lumi,nime09,pressure},\n pages = {58--59},\n title\ + \ = {LUMI : Live Performance Paradigms Utilizing Software Integrated Touch Screen\ + \ and Pressure Sensitive Button Matrix},\n url = {http://www.nime.org/proceedings/2009/nime2009_058.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179523 + doi: 10.5281/zenodo.1177547 issn: 2220-4806 - keywords: 'Reactable, physical model, motion sonification, contact fric- tion ' - pages: 211--214 - title: 'Developing Block-Movement, Physical-Model Based Objects for the Reactable' - url: http://www.nime.org/proceedings/2008/nime2008_211.pdf - year: 2008 + keywords: 'live performance interface,lumi,nime09,pressure' + pages: 58--59 + title: 'LUMI : Live Performance Paradigms Utilizing Software Integrated Touch Screen + and Pressure Sensitive Button Matrix' + url: http://www.nime.org/proceedings/2009/nime2009_058.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Thiebaut2008 - abstract: 'This research focuses on real-time gesture learning and recognition. - Events arrive in a continuous stream without explicitly given boundaries. To obtain - temporal accuracy, weneed to consider the lag between the detection of an eventand - any effects we wish to trigger with it. Two methodsfor real time gesture recognition - using a Nintendo Wii controller are presented. The first detects gestures similar - to agiven template using either a Euclidean distance or a cosinesimilarity measure. - The second method uses novel information theoretic methods to detect and categorize - gestures inan unsupervised way. The role of supervision, detection lagand the - importance of haptic feedback are discussed.' - address: 'Genoa, Italy' - author: 'Thiebaut, Jean-Baptiste and Abdallah, Samer and Robertson, Andrew and Bryan-Kinns, - Nick and Plumbley, Mark D.' - bibtex: "@inproceedings{Thiebaut2008,\n abstract = {This research focuses on real-time\ - \ gesture learning and recognition. Events arrive in a continuous stream without\ - \ explicitly given boundaries. To obtain temporal accuracy, weneed to consider\ - \ the lag between the detection of an eventand any effects we wish to trigger\ - \ with it. Two methodsfor real time gesture recognition using a Nintendo Wii controller\ - \ are presented. The first detects gestures similar to agiven template using either\ - \ a Euclidean distance or a cosinesimilarity measure. The second method uses novel\ - \ information theoretic methods to detect and categorize gestures inan unsupervised\ - \ way. The role of supervision, detection lagand the importance of haptic feedback\ - \ are discussed.},\n address = {Genoa, Italy},\n author = {Thiebaut, Jean-Baptiste\ - \ and Abdallah, Samer and Robertson, Andrew and Bryan-Kinns, Nick and Plumbley,\ - \ Mark D.},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179639},\n issn\ - \ = {2220-4806},\n keywords = {Gesture recognition, supervised and unsupervised\ - \ learning, interaction, haptic feedback, information dynamics, HMMs },\n pages\ - \ = {215--218},\n title = {Real Time Gesture Learning and Recognition : Towards\ - \ Automatic Categorization},\n url = {http://www.nime.org/proceedings/2008/nime2008_215.pdf},\n\ - \ year = {2008}\n}\n" + ID: Gillian2009 + abstract: 'This paper presents the SARC EyesWeb Catalog (SEC), agroup of blocks + designed for real-time gesture recognitionthat have been developed for the open + source program EyesWeb. We describe how the recognition of real-time bodymovements + can be used for musician-computer-interaction.' + address: 'Pittsburgh, PA, United States' + author: 'Gillian, Nicholas and Knapp, Benjamin and O''Modhrain, Sile' + bibtex: "@inproceedings{Gillian2009,\n abstract = {This paper presents the SARC\ + \ EyesWeb Catalog (SEC), agroup of blocks designed for real-time gesture recognitionthat\ + \ have been developed for the open source program EyesWeb. We describe how the\ + \ recognition of real-time bodymovements can be used for musician-computer-interaction.},\n\ + \ address = {Pittsburgh, PA, United States},\n author = {Gillian, Nicholas and\ + \ Knapp, Benjamin and O'Modhrain, Sile},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177551},\n\ + \ issn = {2220-4806},\n keywords = {SARC EyesWeb Catalog, gesture recognition\ + \ },\n pages = {60--61},\n title = {The {SAR}C EyesWeb Catalog : A Pattern Recognition\ + \ Toolbox for Musician-Computer Interaction},\n url = {http://www.nime.org/proceedings/2009/nime2009_060.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179639 + doi: 10.5281/zenodo.1177551 issn: 2220-4806 - keywords: 'Gesture recognition, supervised and unsupervised learning, interaction, - haptic feedback, information dynamics, HMMs ' - pages: 215--218 - title: 'Real Time Gesture Learning and Recognition : Towards Automatic Categorization' - url: http://www.nime.org/proceedings/2008/nime2008_215.pdf - year: 2008 + keywords: 'SARC EyesWeb Catalog, gesture recognition ' + pages: 60--61 + title: 'The SARC EyesWeb Catalog : A Pattern Recognition Toolbox for Musician-Computer + Interaction' + url: http://www.nime.org/proceedings/2009/nime2009_060.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Kimura2008 - abstract: 'This paper describes the compositional process for creatingthe interactive - work for violin entitled VITESSIMO using theAugmented Violin [1].' - address: 'Genoa, Italy' - author: 'Kimura, Mari' - bibtex: "@inproceedings{Kimura2008,\n abstract = {This paper describes the compositional\ - \ process for creatingthe interactive work for violin entitled VITESSIMO using\ - \ theAugmented Violin [1].},\n address = {Genoa, Italy},\n author = {Kimura, Mari},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1179581},\n issn = {2220-4806},\n\ - \ keywords = {Augmented Violin, gesture tracking, interactive performance },\n\ - \ pages = {219--220},\n title = {Making of VITESSIMO for Augmented Violin : Compositional\ - \ Process and Performance},\n url = {http://www.nime.org/proceedings/2008/nime2008_219.pdf},\n\ - \ year = {2008}\n}\n" + ID: Nishino2009 + abstract: 'We describe a new method for 2D fiducial tracking. We use region adjacency + information together with angles between regions to encode IDs inside fiducials, + whereas previous research by Kaltenbrunner and Bencina utilize region adjacency + tree. Our method supports a wide ID range and is fast enough to accommodate real-time + video. It is also very robust against false positive detection. ' + address: 'Pittsburgh, PA, United States' + author: 'Nishino, Hiroki' + bibtex: "@inproceedings{Nishino2009,\n abstract = {We describe a new method for\ + \ 2D fiducial tracking. We use region adjacency information together with angles\ + \ between regions to encode IDs inside fiducials, whereas previous research by\ + \ Kaltenbrunner and Bencina utilize region adjacency tree. Our method supports\ + \ a wide ID range and is fast enough to accommodate real-time video. It is also\ + \ very robust against false positive detection. },\n address = {Pittsburgh, PA,\ + \ United States},\n author = {Nishino, Hiroki},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1177643},\n issn = {2220-4806},\n keywords = {fiducial tracking,\ + \ computer vision, tangible user interface, interaction techniques. },\n pages\ + \ = {62--63},\n title = {A {2D} Fiducial Tracking Method based on Topological\ + \ Region Adjacency and Angle Information},\n url = {http://www.nime.org/proceedings/2009/nime2009_062.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179581 + doi: 10.5281/zenodo.1177643 issn: 2220-4806 - keywords: 'Augmented Violin, gesture tracking, interactive performance ' - pages: 219--220 - title: 'Making of VITESSIMO for Augmented Violin : Compositional Process and Performance' - url: http://www.nime.org/proceedings/2008/nime2008_219.pdf - year: 2008 + keywords: 'fiducial tracking, computer vision, tangible user interface, interaction + techniques. ' + pages: 62--63 + title: A 2D Fiducial Tracking Method based on Topological Region Adjacency and Angle + Information + url: http://www.nime.org/proceedings/2009/nime2009_062.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Loviscach2008 - abstract: 'Sound libraries for music synthesizers easily comprise one thousand or - more programs (''''patches''''). Thus, there are enough raw data to apply data - mining to reveal typical settings and to extract dependencies. Intelligent user - interfaces for music synthesizers can be based on such statistics. This paper - proposes two approaches: First, the user sets any number of parameters and then - lets the system find the nearest sounds in the database, a kind of patch autocompletion. - Second, all parameters are "live" as usual, but turning one knob or setting a - switch will also change the settings of other, statistically related controls. - Both approaches canbe used with the standard interface of the synthesizer. On - top of that, this paper introduces alternative or additional interfaces based - on data visualization.' - address: 'Genoa, Italy' - author: 'Loviscach, Jörn' - bibtex: "@inproceedings{Loviscach2008,\n abstract = {Sound libraries for music synthesizers\ - \ easily comprise one thousand or more programs (''patches''). Thus, there are\ - \ enough raw data to apply data mining to reveal typical settings and to extract\ - \ dependencies. Intelligent user interfaces for music synthesizers can be based\ - \ on such statistics. This paper proposes two approaches: First, the user sets\ - \ any number of parameters and then lets the system find the nearest sounds in\ - \ the database, a kind of patch autocompletion. Second, all parameters are \"\ - live\" as usual, but turning one knob or setting a switch will also change the\ - \ settings of other, statistically related controls. Both approaches canbe used\ - \ with the standard interface of the synthesizer. On top of that, this paper introduces\ - \ alternative or additional interfaces based on data visualization.},\n address\ - \ = {Genoa, Italy},\n author = {Loviscach, J\\''{o}rn},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1179591},\n issn = {2220-4806},\n keywords = {Information\ - \ visualization, mutual information, intelligent user interfaces },\n pages =\ - \ {221--224},\n title = {Programming a Music Synthesizer through Data Mining},\n\ - \ url = {http://www.nime.org/proceedings/2008/nime2008_221.pdf},\n year = {2008}\n\ - }\n" + ID: Solis2009 + abstract: 'During several decades, the research at Waseda University has been focused + on developing anthropomorphic robots capable performing musical instruments. As + a result of our research efforts, the Waseda Flutist Robot WF-4RIV and the Waseda + Saxophonist Robot WAS-1 have been designed to reproduce the human player performance. + As a long-term goal, we are proposing to enable the interaction between musical + performance robots as well as with human players. In general the communication + of humans within a band is a special case of conventional human social behavior. + Rhythm, harmony and timbre of the music played represent the emotional states + of the musicians. So the development of an artificial entity that participates + in such an interaction may contribute to the better understanding of some of the + mechanisms that enable the communication of humans in musical terms. Therefore, + we are not considering a musical performance robot (MPR) just as a mere sophisticated + MIDI instrument. Instead, its human-like design and the integration of perceptual + capabilities may enable to act on its own autonomous initiative based on models + which consider its own physical constrains. In this paper, we present an overview + of our research approaches towards enabling the interaction between musical performance + robots as well as with musicians. ' + address: 'Pittsburgh, PA, United States' + author: 'Solis, Jorge and Ninomiya, Takeshi and Petersen, Klaus and Takeuchi, Masaki + and Takanishi, Atsuo' + bibtex: "@inproceedings{Solis2009,\n abstract = {During several decades, the research\ + \ at Waseda University has been focused on developing anthropomorphic robots capable\ + \ performing musical instruments. As a result of our research efforts, the Waseda\ + \ Flutist Robot WF-4RIV and the Waseda Saxophonist Robot WAS-1 have been designed\ + \ to reproduce the human player performance. As a long-term goal, we are proposing\ + \ to enable the interaction between musical performance robots as well as with\ + \ human players. In general the communication of humans within a band is a special\ + \ case of conventional human social behavior. Rhythm, harmony and timbre of the\ + \ music played represent the emotional states of the musicians. So the development\ + \ of an artificial entity that participates in such an interaction may contribute\ + \ to the better understanding of some of the mechanisms that enable the communication\ + \ of humans in musical terms. Therefore, we are not considering a musical performance\ + \ robot (MPR) just as a mere sophisticated MIDI instrument. Instead, its human-like\ + \ design and the integration of perceptual capabilities may enable to act on its\ + \ own autonomous initiative based on models which consider its own physical constrains.\ + \ In this paper, we present an overview of our research approaches towards enabling\ + \ the interaction between musical performance robots as well as with musicians.\ + \ },\n address = {Pittsburgh, PA, United States},\n author = {Solis, Jorge and\ + \ Ninomiya, Takeshi and Petersen, Klaus and Takeuchi, Masaki and Takanishi, Atsuo},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1177681},\n issn = {2220-4806},\n\ + \ keywords = {nime09},\n pages = {64--69},\n title = {Anthropomorphic Musical\ + \ Performance Robots at Waseda University : Increasing Understanding of the Nature\ + \ of Human Musical Interaction Abstract},\n url = {http://www.nime.org/proceedings/2009/nime2009_064.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179591 + doi: 10.5281/zenodo.1177681 issn: 2220-4806 - keywords: 'Information visualization, mutual information, intelligent user interfaces ' - pages: 221--224 - title: Programming a Music Synthesizer through Data Mining - url: http://www.nime.org/proceedings/2008/nime2008_221.pdf - year: 2008 + keywords: nime09 + pages: 64--69 + title: 'Anthropomorphic Musical Performance Robots at Waseda University : Increasing + Understanding of the Nature of Human Musical Interaction Abstract' + url: http://www.nime.org/proceedings/2009/nime2009_064.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Ng2008 - abstract: 'This paper presents a project called i-Maestro (www.i-maestro.org) which - develops interactive multimedia environments for technology enhanced music education. - The project explores novel solutions for music training in both theory and performance, - building on recent innovations resulting from the development of computer and - information technologies, by exploiting new pedagogical paradigms with cooperative - and interactive self-learning environments, gesture interfaces, and augmented - instruments. This paper discusses the general context along with the background - and current developments of the project, together with an overview of the framework - and discussions on a number of selected tools to support technology-enhanced music - learning and teaching. ' - address: 'Genoa, Italy' - author: 'Ng, Kia and Nesi, Paolo' - bibtex: "@inproceedings{Ng2008,\n abstract = {This paper presents a project called\ - \ i-Maestro (www.i-maestro.org) which develops interactive multimedia environments\ - \ for technology enhanced music education. The project explores novel solutions\ - \ for music training in both theory and performance, building on recent innovations\ - \ resulting from the development of computer and information technologies, by\ - \ exploiting new pedagogical paradigms with cooperative and interactive self-learning\ - \ environments, gesture interfaces, and augmented instruments. This paper discusses\ - \ the general context along with the background and current developments of the\ - \ project, together with an overview of the framework and discussions on a number\ - \ of selected tools to support technology-enhanced music learning and teaching.\ - \ },\n address = {Genoa, Italy},\n author = {Ng, Kia and Nesi, Paolo},\n booktitle\ + ID: Weinberg2009a + abstract: 'This paper presents an interactive and improvisational jam session, including + human players and two robotic musicians. The project was developed in an effort + to create novel and inspiring music through human-robot collaboration. The jam + session incorporates Shimon, a newly-developed socially-interactive robotic marimba + player, and Haile, a perceptual robotic percussionist developed in previous work. + The paper gives an overview of the musical perception modules, adaptive improvisation + modes and human-robot musical interaction models that were developed for the session. + The paper also addresses the musical output that can be created from increased + interconnections in an expanded multiple-robot multiplehuman ensemble, and suggests + directions for future work. ' + address: 'Pittsburgh, PA, United States' + author: 'Weinberg, Gil and Blosser, Brian and Mallikarjuna, Trishul and Raman, Aparna' + bibtex: "@inproceedings{Weinberg2009a,\n abstract = {This paper presents an interactive\ + \ and improvisational jam session, including human players and two robotic musicians.\ + \ The project was developed in an effort to create novel and inspiring music through\ + \ human-robot collaboration. The jam session incorporates Shimon, a newly-developed\ + \ socially-interactive robotic marimba player, and Haile, a perceptual robotic\ + \ percussionist developed in previous work. The paper gives an overview of the\ + \ musical perception modules, adaptive improvisation modes and human-robot musical\ + \ interaction models that were developed for the session. The paper also addresses\ + \ the musical output that can be created from increased interconnections in an\ + \ expanded multiple-robot multiplehuman ensemble, and suggests directions for\ + \ future work. },\n address = {Pittsburgh, PA, United States},\n author = {Weinberg,\ + \ Gil and Blosser, Brian and Mallikarjuna, Trishul and Raman, Aparna},\n booktitle\ \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1179605},\n issn = {2220-4806},\n keywords\ - \ = {augmented instrument,education,gesture,interactive,interface,motion,multimedia,music,nime08,notation,sensor,sonification,technology-enhanced\ - \ learning,visualisation},\n pages = {225--228},\n title = {i-Maestro : Technology-Enhanced\ - \ Learning and Teaching for Music},\n url = {http://www.nime.org/proceedings/2008/nime2008_225.pdf},\n\ - \ year = {2008}\n}\n" + \ Expression},\n doi = {10.5281/zenodo.1177705},\n issn = {2220-4806},\n keywords\ + \ = {Robotic musicianship, Shimon, Haile. },\n pages = {70--73},\n title = {The\ + \ Creation of a Multi-Human, Multi-Robot Interactive Jam Session},\n url = {http://www.nime.org/proceedings/2009/nime2009_070.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179605 + doi: 10.5281/zenodo.1177705 issn: 2220-4806 - keywords: 'augmented instrument,education,gesture,interactive,interface,motion,multimedia,music,nime08,notation,sensor,sonification,technology-enhanced - learning,visualisation' - pages: 225--228 - title: 'i-Maestro : Technology-Enhanced Learning and Teaching for Music' - url: http://www.nime.org/proceedings/2008/nime2008_225.pdf - year: 2008 + keywords: 'Robotic musicianship, Shimon, Haile. ' + pages: 70--73 + title: 'The Creation of a Multi-Human, Multi-Robot Interactive Jam Session' + url: http://www.nime.org/proceedings/2009/nime2009_070.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Kuyken2008 - abstract: 'This paper describes the HOP system. It consists of a wireless module - built up by multiple nodes and a base station. The nodes detect acceleration of - e.g. human movement. At a rate of 100 Hertz the base station collects the acceleration - samples. The data can be acquired in real-time software like Pure Data and Max/MSP. - The data can be used to analyze and/or sonify movement. ' - address: 'Genoa, Italy' - author: 'Kuyken, Bart and Verstichel, Wouter and Bossuyt, Frederick and Vanfleteren, - Jan and Demey, Michiel and Leman, Marc' - bibtex: "@inproceedings{Kuyken2008,\n abstract = {This paper describes the HOP system.\ - \ It consists of a wireless module built up by multiple nodes and a base station.\ - \ The nodes detect acceleration of e.g. human movement. At a rate of 100 Hertz\ - \ the base station collects the acceleration samples. The data can be acquired\ - \ in real-time software like Pure Data and Max/MSP. The data can be used to analyze\ - \ and/or sonify movement. },\n address = {Genoa, Italy},\n author = {Kuyken, Bart\ - \ and Verstichel, Wouter and Bossuyt, Frederick and Vanfleteren, Jan and Demey,\ - \ Michiel and Leman, Marc},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179583},\n\ - \ issn = {2220-4806},\n keywords = {Digital Musical Instrument, Wireless Sensors,\ - \ Inertial Sensing, Hop Sensor },\n pages = {229--232},\n title = {The HOP Sensor\ - \ : Wireless Motion Sensor},\n url = {http://www.nime.org/proceedings/2008/nime2008_229.pdf},\n\ - \ year = {2008}\n}\n" + ID: Gong2009 + abstract: 'In this project, we have developed a real-time writing instrument for + music control. The controller, MusicGrip, can capture the subtle dynamics of the + user''s grip while writing or drawing and map this to musical control signals + and sonic outputs. This paper discusses this conversion of the common motor motion + of handwriting into an innovative form of music expression. The presented example + instrument can be used to integrate the composing aspect of music with painting + and writing, creating a new art form from the resultant aural and visual representation + of the collaborative performing process. ' + address: 'Pittsburgh, PA, United States' + author: 'Gong, Nan-Wei and Laibowitz, Mat and Paradiso, Joseph A.' + bibtex: "@inproceedings{Gong2009,\n abstract = {In this project, we have developed\ + \ a real-time writing instrument for music control. The controller, MusicGrip,\ + \ can capture the subtle dynamics of the user's grip while writing or drawing\ + \ and map this to musical control signals and sonic outputs. This paper discusses\ + \ this conversion of the common motor motion of handwriting into an innovative\ + \ form of music expression. The presented example instrument can be used to integrate\ + \ the composing aspect of music with painting and writing, creating a new art\ + \ form from the resultant aural and visual representation of the collaborative\ + \ performing process. },\n address = {Pittsburgh, PA, United States},\n author\ + \ = {Gong, Nan-Wei and Laibowitz, Mat and Paradiso, Joseph A.},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177555},\n issn = {2220-4806},\n keywords = {Interactive\ + \ music control, writing instrument, pen controller, MIDI, group performing activity.\ + \ },\n pages = {74--77},\n title = {MusicGrip : A Writing Instrument for Music\ + \ Control},\n url = {http://www.nime.org/proceedings/2009/nime2009_074.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179583 + doi: 10.5281/zenodo.1177555 issn: 2220-4806 - keywords: 'Digital Musical Instrument, Wireless Sensors, Inertial Sensing, Hop Sensor ' - pages: 229--232 - title: 'The HOP Sensor : Wireless Motion Sensor' - url: http://www.nime.org/proceedings/2008/nime2008_229.pdf - year: 2008 - - -- ENTRYTYPE: inproceedings - ID: Coghlan2008 - address: 'Genoa, Italy' - author: 'Coghlan, Niall and Knapp, Benjamin' - bibtex: "@inproceedings{Coghlan2008,\n address = {Genoa, Italy},\n author = {Coghlan,\ - \ Niall and Knapp, Benjamin},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179513},\n\ - \ issn = {2220-4806},\n keywords = {Ubiquitous computing, context -awareness,\ - \ networking, embedded systems, chairs, digital artefacts, emotional state sensing,\ - \ affective computing, biosignals. },\n pages = {233--236},\n title = {Sensory\ - \ Chairs : A System for Biosignal Research and Performance},\n url = {http://www.nime.org/proceedings/2008/nime2008_233.pdf},\n\ - \ year = {2008}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1179513 - issn: 2220-4806 - keywords: 'Ubiquitous computing, context -awareness, networking, embedded systems, - chairs, digital artefacts, emotional state sensing, affective computing, biosignals. ' - pages: 233--236 - title: 'Sensory Chairs : A System for Biosignal Research and Performance' - url: http://www.nime.org/proceedings/2008/nime2008_233.pdf - year: 2008 + keywords: 'Interactive music control, writing instrument, pen controller, MIDI, + group performing activity. ' + pages: 74--77 + title: 'MusicGrip : A Writing Instrument for Music Control' + url: http://www.nime.org/proceedings/2009/nime2009_074.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Godbehere2008 - abstract: 'We present examples of a wireless sensor network as applied to wearable - digital music controllers. Recent advances in wireless Personal Area Networks - (PANs) have precipitated the IEEE 802.15.4 standard for low-power, low-cost wireless - sensor networks. We have applied this new technology to create a fully wireless, - wearable network of accelerometers which are small enough to be hidden under clothing. - Various motion analysis and machine learning techniques are applied to the raw - accelerometer data in real-time to generate and control music on the fly. ' - address: 'Genoa, Italy' - author: 'Godbehere, Andrew B. and Ward, Nathan J.' - bibtex: "@inproceedings{Godbehere2008,\n abstract = {We present examples of a wireless\ - \ sensor network as applied to wearable digital music controllers. Recent advances\ - \ in wireless Personal Area Networks (PANs) have precipitated the IEEE 802.15.4\ - \ standard for low-power, low-cost wireless sensor networks. We have applied this\ - \ new technology to create a fully wireless, wearable network of accelerometers\ - \ which are small enough to be hidden under clothing. Various motion analysis\ - \ and machine learning techniques are applied to the raw accelerometer data in\ - \ real-time to generate and control music on the fly. },\n address = {Genoa, Italy},\n\ - \ author = {Godbehere, Andrew B. and Ward, Nathan J.},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1179547},\n issn = {2220-4806},\n keywords = {Wearable\ - \ computing, personal area networks, accelerometers, 802.15.4, motion analysis,\ - \ human-computer interaction, live performance, digital musical controllers, gestural\ - \ control },\n pages = {237--240},\n title = {Wearable Interfaces for Cyberphysical\ - \ Musical Expression},\n url = {http://www.nime.org/proceedings/2008/nime2008_237.pdf},\n\ - \ year = {2008}\n}\n" + ID: Partridge2009 + abstract: 'Tabletops—and by extension, tabletop computers— naturally facilitate + group work. In particular, they provide a fascinating platform for exploring the + possibilities of collaborative audio improvisation. Existing tabletop instruments + (and digital instruments in general) tend to impose either a steep learning curve + on novice players or a frustrating ceiling of expressivity upon experts. We introduce + WallBalls, an intuitive tabletop instrument designed to support both novice and + expert performance. At first glance, WallBalls resembles a toy, game or whimsical + sketchpad, but it quickly reveals itself as a deeply expressive and highly adaptable + sample-based instrument capable of facilitating a startling variety of collaborative + sound art.' + address: 'Pittsburgh, PA, United States' + author: 'Partridge, Grant and Irani, Pourang and Fitzell, Gordon' + bibtex: "@inproceedings{Partridge2009,\n abstract = {Tabletops—and by extension,\ + \ tabletop computers— naturally facilitate group work. In particular, they provide\ + \ a fascinating platform for exploring the possibilities of collaborative audio\ + \ improvisation. Existing tabletop instruments (and digital instruments in general)\ + \ tend to impose either a steep learning curve on novice players or a frustrating\ + \ ceiling of expressivity upon experts. We introduce WallBalls, an intuitive tabletop\ + \ instrument designed to support both novice and expert performance. At first\ + \ glance, WallBalls resembles a toy, game or whimsical sketchpad, but it quickly\ + \ reveals itself as a deeply expressive and highly adaptable sample-based instrument\ + \ capable of facilitating a startling variety of collaborative sound art.},\n\ + \ address = {Pittsburgh, PA, United States},\n author = {Partridge, Grant and\ + \ Irani, Pourang and Fitzell, Gordon},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177655},\n\ + \ issn = {2220-4806},\n keywords = {Tabletop computers, collaborative instruments,\ + \ collaborative composition, group improvisation, spatial audio interfaces, customizable\ + \ instruments. },\n pages = {78--81},\n title = {Let Loose with WallBalls, a Collaborative\ + \ Tabletop Instrument for Tomorrow},\n url = {http://www.nime.org/proceedings/2009/nime2009_078.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179547 + doi: 10.5281/zenodo.1177655 issn: 2220-4806 - keywords: 'Wearable computing, personal area networks, accelerometers, 802.15.4, - motion analysis, human-computer interaction, live performance, digital musical - controllers, gestural control ' - pages: 237--240 - title: Wearable Interfaces for Cyberphysical Musical Expression - url: http://www.nime.org/proceedings/2008/nime2008_237.pdf - year: 2008 + keywords: 'Tabletop computers, collaborative instruments, collaborative composition, + group improvisation, spatial audio interfaces, customizable instruments. ' + pages: 78--81 + title: 'Let Loose with WallBalls, a Collaborative Tabletop Instrument for Tomorrow' + url: http://www.nime.org/proceedings/2009/nime2009_078.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Hayafuchi2008 - abstract: 'This research aims to develop a wearable musical interface which enables - to control audio and video signals by using hand gestures and human body motions. - We have been developing an audio-visual manipulation system that realizes tracks - control, time-based operations and searching for tracks from massive music library. - It aims to build an emotional and affecting musical interaction, and will provide - a better method of music listening to people. A sophisticated glove-like device - with an acceleration sensor and several strain sensors has been developed. A realtime - signal processing and musical control are executed as a result of gesture recognition. - We also developed a stand-alone device that performs as a musical controller and - player at the same time. In this paper, we describe the development of a compact - and sophisticated sensor device, and demonstrate its performance of audio and - video signals control.' - address: 'Genoa, Italy' - author: 'Hayafuchi, Kouki and Suzuki, Kenji' - bibtex: "@inproceedings{Hayafuchi2008,\n abstract = {This research aims to develop\ - \ a wearable musical interface which enables to control audio and video signals\ - \ by using hand gestures and human body motions. We have been developing an audio-visual\ - \ manipulation system that realizes tracks control, time-based operations and\ - \ searching for tracks from massive music library. It aims to build an emotional\ - \ and affecting musical interaction, and will provide a better method of music\ - \ listening to people. A sophisticated glove-like device with an acceleration\ - \ sensor and several strain sensors has been developed. A realtime signal processing\ - \ and musical control are executed as a result of gesture recognition. We also\ - \ developed a stand-alone device that performs as a musical controller and player\ - \ at the same time. In this paper, we describe the development of a compact and\ - \ sophisticated sensor device, and demonstrate its performance of audio and video\ - \ signals control.},\n address = {Genoa, Italy},\n author = {Hayafuchi, Kouki\ - \ and Suzuki, Kenji},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179561},\n\ - \ issn = {2220-4806},\n keywords = {Embodied Sound Media, Music Controller, Gestures,\ - \ Body Motion, Musical Interface },\n pages = {241--244},\n title = {MusicGlove:\ - \ A Wearable Musical Controller for Massive Media Library},\n url = {http://www.nime.org/proceedings/2008/nime2008_241.pdf},\n\ - \ year = {2008}\n}\n" + ID: Min2009 + abstract: 'It is surely not difficult for anyone with experience in thesubject known + as Music Theory to realize that there is avery definite and precise relationship + between music andmathematics. This paper describes the SoriSu, a newelectronic + musical instrument based on Sudoku puzzles,which probe the expressive possibilities + of mathematicalconcepts in music. The concept proposes a new way ofmapping numbers + to sound. This interface was designed toprovide easy and pleasing access to music + for users whoare unfamiliar or uncomfortable with current musicaldevices. The + motivation behind the project is presented, aswell as hardware and software design.' + address: 'Pittsburgh, PA, United States' + author: 'Min, Hye Ki' + bibtex: "@inproceedings{Min2009,\n abstract = {It is surely not difficult for anyone\ + \ with experience in thesubject known as Music Theory to realize that there is\ + \ avery definite and precise relationship between music andmathematics. This paper\ + \ describes the SoriSu, a newelectronic musical instrument based on Sudoku puzzles,which\ + \ probe the expressive possibilities of mathematicalconcepts in music. The concept\ + \ proposes a new way ofmapping numbers to sound. This interface was designed toprovide\ + \ easy and pleasing access to music for users whoare unfamiliar or uncomfortable\ + \ with current musicaldevices. The motivation behind the project is presented,\ + \ aswell as hardware and software design.},\n address = {Pittsburgh, PA, United\ + \ States},\n author = {Min, Hye Ki},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177631},\n\ + \ issn = {2220-4806},\n keywords = {Numbers, Game Interfaces, Mathematics and\ + \ Sound, Mathematics in Music, Puzzles, Tangible User Interfaces. },\n pages =\ + \ {82--85},\n title = {SORISU : Sound with Numbers},\n url = {http://www.nime.org/proceedings/2009/nime2009_082.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179561 + doi: 10.5281/zenodo.1177631 issn: 2220-4806 - keywords: 'Embodied Sound Media, Music Controller, Gestures, Body Motion, Musical - Interface ' - pages: 241--244 - title: 'MusicGlove: A Wearable Musical Controller for Massive Media Library' - url: http://www.nime.org/proceedings/2008/nime2008_241.pdf - year: 2008 + keywords: 'Numbers, Game Interfaces, Mathematics and Sound, Mathematics in Music, + Puzzles, Tangible User Interfaces. ' + pages: 82--85 + title: 'SORISU : Sound with Numbers' + url: http://www.nime.org/proceedings/2009/nime2009_082.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Zbyszynski2008 - abstract: 'This paper proposes the creation of a method book for tabletbased instruments, - evaluating pedagogical materials fortraditional instruments as well as research - in human-computerinteraction and tablet interfaces.' - address: 'Genoa, Italy' - author: 'Zbyszynski, Michael' - bibtex: "@inproceedings{Zbyszynski2008,\n abstract = {This paper proposes the creation\ - \ of a method book for tabletbased instruments, evaluating pedagogical materials\ - \ fortraditional instruments as well as research in human-computerinteraction\ - \ and tablet interfaces.},\n address = {Genoa, Italy},\n author = {Zbyszynski,\ - \ Michael},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177461},\n issn\ - \ = {2220-4806},\n keywords = {Wacom tablet, digitizing tablet, expressivity,\ - \ gesture, mapping, pedagogy, practice },\n pages = {245--248},\n title = {An\ - \ Elementary Method for Tablet},\n url = {http://www.nime.org/proceedings/2008/nime2008_245.pdf},\n\ - \ year = {2008}\n}\n" + ID: Mann2009 + abstract: 'This paper describes the inspiration and implementation of a tactile, + tabletop synthesizer/step sequencer. The Tactus is an expandable and inexpensive + musical interface for the creation of loop-based music inspired by the Bubblegum + Sequencer [2]. An optical camera, coupled with a computer running Max/MSP/Jitter + can turn almost any matrix-like object into a step sequencer. The empty cells + in the gridded object are filled with a fitting, colored object; the placement + of which is analogous to adding an instrument or switching on a box in a step + sequencer grid. The color and column position of every element in the matrix are + used as parameters for a synthesizer while the row position of that element corresponds + to the moment within the loop that entry is sounded. The two dimensional array + can be positioned anywhere within the camera''s visibility. Both the translation + and rotation of the physical matrix are assigned to global parameters that affect + the music while preserving the color and order of the cells. A rotation of 180 + degrees, for example, will not reverse the sequence, but instead change an assigned + global parameter.' + address: 'Pittsburgh, PA, United States' + author: 'Mann, Yotam and Lubow, Jeff and Freed, Adrian' + bibtex: "@inproceedings{Mann2009,\n abstract = {This paper describes the inspiration\ + \ and implementation of a tactile, tabletop synthesizer/step sequencer. The Tactus\ + \ is an expandable and inexpensive musical interface for the creation of loop-based\ + \ music inspired by the Bubblegum Sequencer [2]. An optical camera, coupled with\ + \ a computer running Max/MSP/Jitter can turn almost any matrix-like object into\ + \ a step sequencer. The empty cells in the gridded object are filled with a fitting,\ + \ colored object; the placement of which is analogous to adding an instrument\ + \ or switching on a box in a step sequencer grid. The color and column position\ + \ of every element in the matrix are used as parameters for a synthesizer while\ + \ the row position of that element corresponds to the moment within the loop that\ + \ entry is sounded. The two dimensional array can be positioned anywhere within\ + \ the camera's visibility. Both the translation and rotation of the physical matrix\ + \ are assigned to global parameters that affect the music while preserving the\ + \ color and order of the cells. A rotation of 180 degrees, for example, will not\ + \ reverse the sequence, but instead change an assigned global parameter.},\n address\ + \ = {Pittsburgh, PA, United States},\n author = {Mann, Yotam and Lubow, Jeff and\ + \ Freed, Adrian},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177625},\n\ + \ issn = {2220-4806},\n keywords = {nime09},\n pages = {86--89},\n title = {The\ + \ Tactus : a Tangible , Rhythmic Grid Interface Using Found-Objects},\n url =\ + \ {http://www.nime.org/proceedings/2009/nime2009_086.pdf},\n year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177461 + doi: 10.5281/zenodo.1177625 issn: 2220-4806 - keywords: 'Wacom tablet, digitizing tablet, expressivity, gesture, mapping, pedagogy, - practice ' - pages: 245--248 - title: An Elementary Method for Tablet - url: http://www.nime.org/proceedings/2008/nime2008_245.pdf - year: 2008 + keywords: nime09 + pages: 86--89 + title: 'The Tactus : a Tangible , Rhythmic Grid Interface Using Found-Objects' + url: http://www.nime.org/proceedings/2009/nime2009_086.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Roma2008 - abstract: We present an audio waveform editor that can be operated in real time - through a tabletop interface. The systemcombines multi-touch and tangible interaction - techniques inorder to implement the metaphor of a toolkit that allows direct manipulation - of a sound sample. The resulting instrument is well suited for live performance - based on evolvingloops. - address: 'Genoa, Italy' - author: 'Roma, Gerard and Xambó, Anna' - bibtex: "@inproceedings{Roma2008,\n abstract = {We present an audio waveform editor\ - \ that can be operated in real time through a tabletop interface. The systemcombines\ - \ multi-touch and tangible interaction techniques inorder to implement the metaphor\ - \ of a toolkit that allows direct manipulation of a sound sample. The resulting\ - \ instrument is well suited for live performance based on evolvingloops.},\n address\ - \ = {Genoa, Italy},\n author = {Roma, Gerard and Xamb\\'{o}, Anna},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1179621},\n issn = {2220-4806},\n keywords\ - \ = {tangible interface, tabletop interface, musical performance, interaction\ - \ techniques },\n pages = {249--252},\n title = {A Tabletop Waveform Editor for\ - \ Live Performance},\n url = {http://www.nime.org/proceedings/2008/nime2008_249.pdf},\n\ - \ year = {2008}\n}\n" + ID: Hockman2009 + abstract: 'This paper presents a method for using a runner''s pacefor real-time + control of the time-scaling facility of a phasevocoder, resulting in the automated + synchronization of anaudio track tempo to the generated control signal. The increase + in usage of portable music players during exercisehas given rise to the development + of new personal exerciseaids, most notably the Nike+iPod system, which relies + onembedded sensor technologies to provide kinematic workout statistics. There + are also systems that select songs basedon the measured step frequency of a runner. + The proposedsystem also uses the pace of a runner, but this information isused + to change the tempo of the music.' + address: 'Pittsburgh, PA, United States' + author: 'Hockman, Jason A. and Wanderley, Marcelo M. and Fujinaga, Ichiro' + bibtex: "@inproceedings{Hockman2009,\n abstract = {This paper presents a method\ + \ for using a runner's pacefor real-time control of the time-scaling facility\ + \ of a phasevocoder, resulting in the automated synchronization of anaudio track\ + \ tempo to the generated control signal. The increase in usage of portable music\ + \ players during exercisehas given rise to the development of new personal exerciseaids,\ + \ most notably the Nike+iPod system, which relies onembedded sensor technologies\ + \ to provide kinematic workout statistics. There are also systems that select\ + \ songs basedon the measured step frequency of a runner. The proposedsystem also\ + \ uses the pace of a runner, but this information isused to change the tempo of\ + \ the music.},\n address = {Pittsburgh, PA, United States},\n author = {Hockman,\ + \ Jason A. and Wanderley, Marcelo M. and Fujinaga, Ichiro},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177575},\n issn = {2220-4806},\n keywords = {NIME, synchronization,\ + \ exercise, time-scaling. },\n pages = {90--93},\n title = {Real-Time Phase Vocoder\ + \ Manipulation by Runner's Pace},\n url = {http://www.nime.org/proceedings/2009/nime2009_090.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179621 + doi: 10.5281/zenodo.1177575 issn: 2220-4806 - keywords: 'tangible interface, tabletop interface, musical performance, interaction - techniques ' - pages: 249--252 - title: A Tabletop Waveform Editor for Live Performance - url: http://www.nime.org/proceedings/2008/nime2008_249.pdf - year: 2008 + keywords: 'NIME, synchronization, exercise, time-scaling. ' + pages: 90--93 + title: Real-Time Phase Vocoder Manipulation by Runner's Pace + url: http://www.nime.org/proceedings/2009/nime2009_090.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Valle2008a - address: 'Genoa, Italy' - author: 'Valle, Andrea' - bibtex: "@inproceedings{Valle2008a,\n address = {Genoa, Italy},\n author = {Valle,\ - \ Andrea},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1179645},\n issn = {2220-4806},\n\ - \ keywords = {algorithmic composition,automatic notation,nime08},\n pages = {253--256},\n\ - \ title = {Integrated Algorithmic Composition Fluid systems for including notation\ - \ in music composition cycle},\n url = {http://www.nime.org/proceedings/2008/nime2008_253.pdf},\n\ - \ year = {2008}\n}\n" + ID: Nymoen2009 + abstract: 'The paper presents Nymophone2, an acoustic instrument with a complex + relationship between performance actions and emergent sound. A method for describing + the multidimensional control actions needed to play the instrument is presented + and discussed.' + address: 'Pittsburgh, PA, United States' + author: 'Nymoen, Kristian and Jensenius, Alexander R.' + bibtex: "@inproceedings{Nymoen2009,\n abstract = {The paper presents Nymophone2,\ + \ an acoustic instrument with a complex relationship between performance actions\ + \ and emergent sound. A method for describing the multidimensional control actions\ + \ needed to play the instrument is presented and discussed.},\n address = {Pittsburgh,\ + \ PA, United States},\n author = {Nymoen, Kristian and Jensenius, Alexander R.},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1177645},\n issn = {2220-4806},\n\ + \ keywords = {nime09},\n pages = {94--97},\n title = {A Discussion of Multidimensional\ + \ Mapping in Nymophone2},\n url = {http://www.nime.org/proceedings/2009/nime2009_094.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179645 + doi: 10.5281/zenodo.1177645 issn: 2220-4806 - keywords: 'algorithmic composition,automatic notation,nime08' - pages: 253--256 - title: Integrated Algorithmic Composition Fluid systems for including notation in - music composition cycle - url: http://www.nime.org/proceedings/2008/nime2008_253.pdf - year: 2008 + keywords: nime09 + pages: 94--97 + title: A Discussion of Multidimensional Mapping in Nymophone2 + url: http://www.nime.org/proceedings/2009/nime2009_094.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Valle2008 - abstract: 'This paper is about GeoGraphy, a graph-based system forthe control of - both musical composition and interactive performance and its implementation in - a real-time, interactiveapplication. The implementation includes a flexible userinterface - system.' - address: 'Genoa, Italy' - author: 'Valle, Andrea' - bibtex: "@inproceedings{Valle2008,\n abstract = {This paper is about GeoGraphy,\ - \ a graph-based system forthe control of both musical composition and interactive\ - \ performance and its implementation in a real-time, interactiveapplication. The\ - \ implementation includes a flexible userinterface system.},\n address = {Genoa,\ - \ Italy},\n author = {Valle, Andrea},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179643},\n\ - \ issn = {2220-4806},\n keywords = {a graph,composition,figure 1,interfaces,left,live\ - \ coding,musical algorithmic composition,nime08,performance,vertex durations and\ - \ coor-},\n pages = {257--260},\n title = {GeoGraphy : a Real-Time, Graph-Based\ - \ Composition Environment},\n url = {http://www.nime.org/proceedings/2008/nime2008_257.pdf},\n\ - \ year = {2008}\n}\n" + ID: Schlessinger2009 + abstract: ' We present the Kalichord: a small, handheld electro/acoustic instrument + in which the player''s right hand plucks virtual strings while his left hand uses + buttons to play independent bass lines. The Kalichord uses the analog signal from + plucked acoustic tines to excite a physical string model, allowing a nuanced and + intuitive plucking experience. First, we catalog instruments related to the Kalichord. + Then we examine the use of analog signals to excite a physical string model and + discuss the expressiveness and form factors that this technique affords. We then + describe the overall construction of the Kalichord and possible playing styles, + and finally we consider ways we hope to improve upon the current prototype. ' + address: 'Pittsburgh, PA, United States' + author: 'Schlessinger, Daniel and Smith, Julius O.' + bibtex: "@inproceedings{Schlessinger2009,\n abstract = { We present the Kalichord:\ + \ a small, handheld electro/acoustic instrument in which the player's right hand\ + \ plucks virtual strings while his left hand uses buttons to play independent\ + \ bass lines. The Kalichord uses the analog signal from plucked acoustic tines\ + \ to excite a physical string model, allowing a nuanced and intuitive plucking\ + \ experience. First, we catalog instruments related to the Kalichord. Then we\ + \ examine the use of analog signals to excite a physical string model and discuss\ + \ the expressiveness and form factors that this technique affords. We then describe\ + \ the overall construction of the Kalichord and possible playing styles, and finally\ + \ we consider ways we hope to improve upon the current prototype. },\n address\ + \ = {Pittsburgh, PA, United States},\n author = {Schlessinger, Daniel and Smith,\ + \ Julius O.},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177671},\n issn\ + \ = {2220-4806},\n keywords = {Kalichord, physical model, tine, piezo, plucked\ + \ string, electro-acoustic instruments, kalimba, accordion },\n pages = {98--101},\n\ + \ title = {The Kalichord : A Physically Modeled Electro-Acoustic Plucked String\ + \ Instrument},\n url = {http://www.nime.org/proceedings/2009/nime2009_098.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179643 + doi: 10.5281/zenodo.1177671 issn: 2220-4806 - keywords: 'a graph,composition,figure 1,interfaces,left,live coding,musical algorithmic - composition,nime08,performance,vertex durations and coor-' - pages: 257--260 - title: 'GeoGraphy : a Real-Time, Graph-Based Composition Environment' - url: http://www.nime.org/proceedings/2008/nime2008_257.pdf - year: 2008 + keywords: 'Kalichord, physical model, tine, piezo, plucked string, electro-acoustic + instruments, kalimba, accordion ' + pages: 98--101 + title: 'The Kalichord : A Physically Modeled Electro-Acoustic Plucked String Instrument' + url: http://www.nime.org/proceedings/2009/nime2009_098.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Zannos2008 - abstract: 'In this paper, we describe the development of multi-platform tools for - Audiovisual and Kinetic installations. These involve the connection of three development - environments: Python, SuperCollider and Processing, in order to drive kinetic - art installations and to combine these with digital synthesis of sound and image - in real time. By connecting these three platforms via the OSC protocol, we enable - the control in real time of analog physical media (a device that draws figures - on sand), sound synthesis and image synthesis. We worked on the development of - algorithms for drawing figures and synthesizing images and sound on all three - platforms and experimented with various mechanisms for coordinating synthesis - and rendering in different media. Several problems were addressed: How to coordinate - the timing between different platforms? What configuration to use? Clientserver - (who is the client who the server?), equal partners, mixed configurations. A library - was developed in SuperCollider to enable the packaging of algorithms into modules - with automatic generation of GUI from specifications, and the saving of configurations - of modules into session files as scripts in SuperCollider code. The application - of this library as a framework for both driving graphic synthesis in Processing - and receiving control data from it resulted in an environment for experimentation - that is also being used successfully in teaching interactive audiovisual media. ' - address: 'Genoa, Italy' - author: 'Zannos, Iannis' - bibtex: "@inproceedings{Zannos2008,\n abstract = {In this paper, we describe the\ - \ development of multi-platform tools for Audiovisual and Kinetic installations.\ - \ These involve the connection of three development environments: Python, SuperCollider\ - \ and Processing, in order to drive kinetic art installations and to combine these\ - \ with digital synthesis of sound and image in real time. By connecting these\ - \ three platforms via the OSC protocol, we enable the control in real time of\ - \ analog physical media (a device that draws figures on sand), sound synthesis\ - \ and image synthesis. We worked on the development of algorithms for drawing\ - \ figures and synthesizing images and sound on all three platforms and experimented\ - \ with various mechanisms for coordinating synthesis and rendering in different\ - \ media. Several problems were addressed: How to coordinate the timing between\ - \ different platforms? What configuration to use? Clientserver (who is the client\ - \ who the server?), equal partners, mixed configurations. A library was developed\ - \ in SuperCollider to enable the packaging of algorithms into modules with automatic\ - \ generation of GUI from specifications, and the saving of configurations of modules\ - \ into session files as scripts in SuperCollider code. The application of this\ - \ library as a framework for both driving graphic synthesis in Processing and\ - \ receiving control data from it resulted in an environment for experimentation\ - \ that is also being used successfully in teaching interactive audiovisual media.\ - \ },\n address = {Genoa, Italy},\n author = {Zannos, Iannis},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177459},\n issn = {2220-4806},\n keywords = {kinetic\ - \ art, audiovisual installations, python, SuperCollider, Processing, algorithmic\ - \ art, tools for multi-platform development },\n pages = {261--264},\n title =\ - \ {Multi-Platform Development of Audiovisual and Kinetic Installations},\n url\ - \ = {http://www.nime.org/proceedings/2008/nime2008_261.pdf},\n year = {2008}\n\ - }\n" + ID: Lahdeoja2009 + abstract: 'In this paper we describe an approach for introducing newelectronic percussive + sound possibilities for stringinstruments by "listening" to the sounds of the + instrument''sbody and extracting audio and data from the wood''sacoustic vibrations. + A method for capturing, localizing andanalyzing the percussive hits on the instrument''s + body ispresented, in connection with an audio-driven electronicpercussive sound + module. The system introduces a newgesture-sound relationship in the electric + string instrumentplaying environment, namely the use of percussivetechniques on + the instrument''s body which are null inregular circumstances due to selective + and exclusivemicrophone use for the strings. Instrument bodypercussions are widely + used in the acoustic instrumentalpraxis. They yield a strong potential for providing + anextended soundscape via instrument augmentation, directlycontrolled by the musician + through haptic manipulation ofthe instrument itself. The research work was carried + out onthe electric guitar, but the method used can apply to anystring instrument + with a resonating body.' + address: 'Pittsburgh, PA, United States' + author: 'Lähdeoja, Otso' + bibtex: "@inproceedings{Lahdeoja2009,\n abstract = {In this paper we describe an\ + \ approach for introducing newelectronic percussive sound possibilities for stringinstruments\ + \ by \"listening\" to the sounds of the instrument'sbody and extracting audio\ + \ and data from the wood'sacoustic vibrations. A method for capturing, localizing\ + \ andanalyzing the percussive hits on the instrument's body ispresented, in connection\ + \ with an audio-driven electronicpercussive sound module. The system introduces\ + \ a newgesture-sound relationship in the electric string instrumentplaying environment,\ + \ namely the use of percussivetechniques on the instrument's body which are null\ + \ inregular circumstances due to selective and exclusivemicrophone use for the\ + \ strings. Instrument bodypercussions are widely used in the acoustic instrumentalpraxis.\ + \ They yield a strong potential for providing anextended soundscape via instrument\ + \ augmentation, directlycontrolled by the musician through haptic manipulation\ + \ ofthe instrument itself. The research work was carried out onthe electric guitar,\ + \ but the method used can apply to anystring instrument with a resonating body.},\n\ + \ address = {Pittsburgh, PA, United States},\n author = {L\\''{a}hdeoja, Otso},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1177607},\n issn = {2220-4806},\n\ + \ keywords = {augmented instrument,chordophone,contact microphone systems,electric,electronic\ + \ percussion,even with,guitar,leaving the instrument body,nime09,there is always\ + \ a,trade-off,virtually mute},\n pages = {102--105},\n title = {Augmenting Chordophones\ + \ with Hybrid Percussive Sound Possibilities},\n url = {http://www.nime.org/proceedings/2009/nime2009_102.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177459 + doi: 10.5281/zenodo.1177607 issn: 2220-4806 - keywords: 'kinetic art, audiovisual installations, python, SuperCollider, Processing, - algorithmic art, tools for multi-platform development ' - pages: 261--264 - title: Multi-Platform Development of Audiovisual and Kinetic Installations - url: http://www.nime.org/proceedings/2008/nime2008_261.pdf - year: 2008 + keywords: 'augmented instrument,chordophone,contact microphone systems,electric,electronic + percussion,even with,guitar,leaving the instrument body,nime09,there is always + a,trade-off,virtually mute' + pages: 102--105 + title: Augmenting Chordophones with Hybrid Percussive Sound Possibilities + url: http://www.nime.org/proceedings/2009/nime2009_102.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Corness2008 - abstract: 'Through the developing of tools for analyzing the performerssonic and - movement-based gestures, research into the systemperformer interaction has focused - on the computer''s ability torespond to the performer. Where as such work shows - interestwithin the community in developing an interaction paradigmmodeled on the - player, by focusing on the perception andreasoning of the system, this research - assumes that theperformer''s manner of interaction is in agreement with thiscomputational - model. My study presents an alternative model ofinteraction designed for improvisatory - performance centered onthe perception of the performer as understood by theories - takenfrom performance practices and cognitive science.' - address: 'Genoa, Italy' - author: 'Corness, Greg' - bibtex: "@inproceedings{Corness2008,\n abstract = {Through the developing of tools\ - \ for analyzing the performerssonic and movement-based gestures, research into\ - \ the systemperformer interaction has focused on the computer's ability torespond\ - \ to the performer. Where as such work shows interestwithin the community in developing\ - \ an interaction paradigmmodeled on the player, by focusing on the perception\ - \ andreasoning of the system, this research assumes that theperformer's manner\ - \ of interaction is in agreement with thiscomputational model. My study presents\ - \ an alternative model ofinteraction designed for improvisatory performance centered\ - \ onthe perception of the performer as understood by theories takenfrom performance\ - \ practices and cognitive science.},\n address = {Genoa, Italy},\n author = {Corness,\ - \ Greg},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1179515},\n issn = {2220-4806},\n\ - \ keywords = {Interactive performance, Perception, HCI },\n pages = {265--268},\n\ - \ title = {Performer Model : Towards a Framework for Interactive Performance Based\ - \ on Perceived Intention},\n url = {http://www.nime.org/proceedings/2008/nime2008_265.pdf},\n\ - \ year = {2008}\n}\n" + ID: Kahrs2009 + abstract: 'Large vibrating plates are used as thunder sheets in orchestras. We have + extended the use of flat plates by cementing aflat panel electroacoustic transducer + on a large brass sheet.Because of the thickness of the panel, the output is subject + tononlinear distortion. When combined with a real-time inputand signal processing + algorithm, the active brass plate canbecome an effective musical instrument for + performance ofnew music.' + address: 'Pittsburgh, PA, United States' + author: 'Kahrs, Mark and Skulina, David and Bilbao, Stefan and Campbell, Murray' + bibtex: "@inproceedings{Kahrs2009,\n abstract = {Large vibrating plates are used\ + \ as thunder sheets in orchestras. We have extended the use of flat plates by\ + \ cementing aflat panel electroacoustic transducer on a large brass sheet.Because\ + \ of the thickness of the panel, the output is subject tononlinear distortion.\ + \ When combined with a real-time inputand signal processing algorithm, the active\ + \ brass plate canbecome an effective musical instrument for performance ofnew\ + \ music.},\n address = {Pittsburgh, PA, United States},\n author = {Kahrs, Mark\ + \ and Skulina, David and Bilbao, Stefan and Campbell, Murray},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177593},\n issn = {2220-4806},\n keywords = {Electroacoustics,\ + \ flat panel },\n pages = {106--109},\n title = {An Electroacoustically Controlled\ + \ Vibrating Plate},\n url = {http://www.nime.org/proceedings/2009/nime2009_106.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179515 + doi: 10.5281/zenodo.1177593 issn: 2220-4806 - keywords: 'Interactive performance, Perception, HCI ' - pages: 265--268 - title: 'Performer Model : Towards a Framework for Interactive Performance Based - on Perceived Intention' - url: http://www.nime.org/proceedings/2008/nime2008_265.pdf - year: 2008 + keywords: 'Electroacoustics, flat panel ' + pages: 106--109 + title: An Electroacoustically Controlled Vibrating Plate + url: http://www.nime.org/proceedings/2009/nime2009_106.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Teles2008 - address: 'Genoa, Italy' - author: 'Teles, Paulo C. and Boyle, Aidan' - bibtex: "@inproceedings{Teles2008,\n address = {Genoa, Italy},\n author = {Teles,\ - \ Paulo C. and Boyle, Aidan},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179637},\n\ - \ issn = {2220-4806},\n keywords = {nime08},\n pages = {269--272},\n title = {Developing\ - \ an \"Antigenous\" Art Installation Based on a Touchless Endosystem Interface},\n\ - \ url = {http://www.nime.org/proceedings/2008/nime2008_269.pdf},\n year = {2008}\n\ - }\n" + ID: Smallwood2009a + abstract: 'This paper gives a historical overview of the development of alternative + sonic display systems at Princeton University; in particular, the design, construction, + and use in live performance of a series of spherical and hemispherical speaker + systems. We also provide a DIY guide to constructing the latest series of loudspeakers + that we are currently using in our research and music making. ' + address: 'Pittsburgh, PA, United States' + author: 'Smallwood, Scott and Cook, Perry R. and Trueman, Dan and McIntyre, Lawrence' + bibtex: "@inproceedings{Smallwood2009a,\n abstract = {This paper gives a historical\ + \ overview of the development of alternative sonic display systems at Princeton\ + \ University; in particular, the design, construction, and use in live performance\ + \ of a series of spherical and hemispherical speaker systems. We also provide\ + \ a DIY guide to constructing the latest series of loudspeakers that we are currently\ + \ using in our research and music making. },\n address = {Pittsburgh, PA, United\ + \ States},\n author = {Smallwood, Scott and Cook, Perry R. and Trueman, Dan and\ + \ McIntyre, Lawrence},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177679},\n\ + \ issn = {2220-4806},\n keywords = {loudspeakers, hemispherical speakers, sonic\ + \ display systems, laptop orchestras. },\n pages = {110--115},\n title = {Don't\ + \ Forget the Loudspeaker --- A History of Hemispherical Speakers at Princeton\ + \ , Plus a DIY Guide},\n url = {http://www.nime.org/proceedings/2009/nime2009_110.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179637 + doi: 10.5281/zenodo.1177679 issn: 2220-4806 - keywords: nime08 - pages: 269--272 - title: Developing an "Antigenous" Art Installation Based on a Touchless Endosystem - Interface - url: http://www.nime.org/proceedings/2008/nime2008_269.pdf - year: 2008 + keywords: 'loudspeakers, hemispherical speakers, sonic display systems, laptop orchestras. ' + pages: 110--115 + title: 'Don''t Forget the Loudspeaker --- A History of Hemispherical Speakers at + Princeton , Plus a DIY Guide' + url: http://www.nime.org/proceedings/2009/nime2009_110.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Lanzalone2008 - address: 'Genoa, Italy' - author: 'Lanzalone, Silvia' - bibtex: "@inproceedings{Lanzalone2008,\n address = {Genoa, Italy},\n author = {Lanzalone,\ - \ Silvia},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1179587},\n issn = {2220-4806},\n\ - \ keywords = {nime08},\n pages = {273--276},\n title = {The 'Suspended Clarinet'\ - \ with the 'Uncaused Sound' : Description of a Renewed Musical Instrument},\n\ - \ url = {http://www.nime.org/proceedings/2008/nime2008_273.pdf},\n year = {2008}\n\ + ID: Freed2009a + abstract: 'The history and future of Open Sound Control (OSC) is discussed and the + next iteration of the OSC specification is introduced with discussion of new features + to support NIME community activities. The roadmap to a major revision of OSC is + developed. ' + address: 'Pittsburgh, PA, United States' + author: 'Freed, Adrian and Schmeder, Andrew' + bibtex: "@inproceedings{Freed2009a,\n abstract = {The history and future of Open\ + \ Sound Control (OSC) is discussed and the next iteration of the OSC specification\ + \ is introduced with discussion of new features to support NIME community activities.\ + \ The roadmap to a major revision of OSC is developed. },\n address = {Pittsburgh,\ + \ PA, United States},\n author = {Freed, Adrian and Schmeder, Andrew},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1177517},\n issn = {2220-4806},\n keywords\ + \ = {Open Sound Control, Time Tag, OSC, Reservation Protocols. },\n pages = {116--120},\n\ + \ title = {Features and Future of Open Sound Control version 1.1 for NIME},\n\ + \ url = {http://www.nime.org/proceedings/2009/nime2009_116.pdf},\n year = {2009}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179587 + doi: 10.5281/zenodo.1177517 issn: 2220-4806 - keywords: nime08 - pages: 273--276 - title: 'The ''Suspended Clarinet'' with the ''Uncaused Sound'' : Description of - a Renewed Musical Instrument' - url: http://www.nime.org/proceedings/2008/nime2008_273.pdf - year: 2008 + keywords: 'Open Sound Control, Time Tag, OSC, Reservation Protocols. ' + pages: 116--120 + title: Features and Future of Open Sound Control version 1.1 for NIME + url: http://www.nime.org/proceedings/2009/nime2009_116.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Hashida2008 - abstract: 'One of the advantages of case-based systems is that theycan generate - expressions even if the user doesn''t know howthe system applies expression rules. - However, the systemscannot avoid the problem of data sparseness and do notpermit - a user to improve the expression of a certain part ofa melody directly. After - discussing the functions requiredfor user-oriented interface for performance rendering - systems, this paper proposes a directable case-based performance rendering system, - called Itopul. Itopul is characterized by 1) a combination of the phrasing model - and thepulse model, 2) the use of a hierarchical music structure foravoiding from - the data sparseness problem, 3) visualizationof the processing progress, and 4) - music structures directlymodifiable by the user.' - address: 'Genoa, Italy' - author: 'Hashida, Mitsuyo and Ito, Yosuke and Katayose, Haruhiro' - bibtex: "@inproceedings{Hashida2008,\n abstract = {One of the advantages of case-based\ - \ systems is that theycan generate expressions even if the user doesn't know howthe\ - \ system applies expression rules. However, the systemscannot avoid the problem\ - \ of data sparseness and do notpermit a user to improve the expression of a certain\ - \ part ofa melody directly. After discussing the functions requiredfor user-oriented\ - \ interface for performance rendering systems, this paper proposes a directable\ - \ case-based performance rendering system, called Itopul. Itopul is characterized\ - \ by 1) a combination of the phrasing model and thepulse model, 2) the use of\ - \ a hierarchical music structure foravoiding from the data sparseness problem,\ - \ 3) visualizationof the processing progress, and 4) music structures directlymodifiable\ - \ by the user.},\n address = {Genoa, Italy},\n author = {Hashida, Mitsuyo and\ - \ Ito, Yosuke and Katayose, Haruhiro},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179559},\n\ - \ issn = {2220-4806},\n keywords = {Performance Rendering, User Interface, Case-based\ - \ Approach },\n pages = {277--280},\n title = {A Directable Performance Rendering\ - \ System: Itopul},\n url = {http://www.nime.org/proceedings/2008/nime2008_277.pdf},\n\ - \ year = {2008}\n}\n" + ID: Schmeder2009 + abstract: 'An on-the-fly reconfigurable low-level embedded servicearchitecture is + presented as a means to improve scalability, improve conceptual comprehensibility, + reduce humanerror and reduce development time when designing newsensor-based electronic + musical instruments with real-timeresponsiveness. The implementation of the concept + ina project called micro-OSC is described. Other sensorinterfacing products are + evaluated in the context of DIYprototyping of musical instruments. The capabilities + ofthe micro-OSC platform are demonstrated through a set ofexamples including resistive + sensing, mixed digital-analogsystems, many-channel sensor interfaces and time-basedmeasurement + methods.' + address: 'Pittsburgh, PA, United States' + author: 'Schmeder, Andrew and Freed, Adrian' + bibtex: "@inproceedings{Schmeder2009,\n abstract = {An on-the-fly reconfigurable\ + \ low-level embedded servicearchitecture is presented as a means to improve scalability,\ + \ improve conceptual comprehensibility, reduce humanerror and reduce development\ + \ time when designing newsensor-based electronic musical instruments with real-timeresponsiveness.\ + \ The implementation of the concept ina project called micro-OSC is described.\ + \ Other sensorinterfacing products are evaluated in the context of DIYprototyping\ + \ of musical instruments. The capabilities ofthe micro-OSC platform are demonstrated\ + \ through a set ofexamples including resistive sensing, mixed digital-analogsystems,\ + \ many-channel sensor interfaces and time-basedmeasurement methods.},\n address\ + \ = {Pittsburgh, PA, United States},\n author = {Schmeder, Andrew and Freed, Adrian},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1177673},\n issn = {2220-4806},\n\ + \ keywords = {real-time musical interface, DIY design, em- bedded web services,\ + \ rapid prototyping, reconfigurable firmware },\n pages = {121--124},\n title\ + \ = {A Low-level Embedded Service Architecture for Rapid DIY Design of Real-time\ + \ Musical Instruments},\n url = {http://www.nime.org/proceedings/2009/nime2009_121.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179559 + doi: 10.5281/zenodo.1177673 issn: 2220-4806 - keywords: 'Performance Rendering, User Interface, Case-based Approach ' - pages: 277--280 - title: 'A Directable Performance Rendering System: Itopul' - url: http://www.nime.org/proceedings/2008/nime2008_277.pdf - year: 2008 + keywords: 'real-time musical interface, DIY design, em- bedded web services, rapid + prototyping, reconfigurable firmware ' + pages: 121--124 + title: A Low-level Embedded Service Architecture for Rapid DIY Design of Real-time + Musical Instruments + url: http://www.nime.org/proceedings/2009/nime2009_121.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Hazlewood2008 - abstract: 'In this work we describe our initial explorations in building a musical - instrument specifically for providing listenerswith simple, but useful, ambient - information. The termAmbient Musical Information Systems (AMIS) is proposedto - describe this kind of research. Instruments like these differ from standard musical - instruments in that they are tobe perceived indirectly from outside one''s primary - focus ofattention. We describe our rationale for creating such a device, a discussion - on the appropriate qualities of sound fordelivering ambient information, and a - description of an instrument created for use in a series of experiments that wewill - use to test out ideas. We conclude with a discussion ofour initial findings, and - some further directions we wish toexplore.' - address: 'Genoa, Italy' - author: 'Hazlewood, William R. and Knopke, Ian' - bibtex: "@inproceedings{Hazlewood2008,\n abstract = {In this work we describe our\ - \ initial explorations in building a musical instrument specifically for providing\ - \ listenerswith simple, but useful, ambient information. The termAmbient Musical\ - \ Information Systems (AMIS) is proposedto describe this kind of research. Instruments\ - \ like these differ from standard musical instruments in that they are tobe perceived\ - \ indirectly from outside one's primary focus ofattention. We describe our rationale\ - \ for creating such a device, a discussion on the appropriate qualities of sound\ - \ fordelivering ambient information, and a description of an instrument created\ - \ for use in a series of experiments that wewill use to test out ideas. We conclude\ - \ with a discussion ofour initial findings, and some further directions we wish\ - \ toexplore.},\n address = {Genoa, Italy},\n author = {Hazlewood, William R. and\ - \ Knopke, Ian},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179563},\n\ - \ issn = {2220-4806},\n keywords = {Ambient Musical Information Systems, musical\ - \ instruments, human computer interaction, Markov chain, probability, al- gorithmic\ - \ composition },\n pages = {281--284},\n title = {Designing Ambient Musical Information\ - \ Systems},\n url = {http://www.nime.org/proceedings/2008/nime2008_281.pdf},\n\ - \ year = {2008}\n}\n" + ID: Steiner2009 + abstract: 'Firmata is a generic protocol for communicating with microcontrollers + from software on a host computer. The central goal is to make the microcontroller + an extension of theprogramming environment on the host computer in a manner that + feels natural in that programming environment. Itwas designed to be open and flexible + so that any programming environment can support it, and simple to implementboth + on the microcontroller and the host computer to ensurea wide range of implementations. + The current reference implementation is a library for Arduino/Wiring and is includedwith + Arduino software package since version 0012. Thereare matching software modules + for a number of languages,like Pd, OpenFrameworks, Max/MSP, and Processing.' + address: 'Pittsburgh, PA, United States' + author: 'Steiner, Hans-Christoph' + bibtex: "@inproceedings{Steiner2009,\n abstract = {Firmata is a generic protocol\ + \ for communicating with microcontrollers from software on a host computer. The\ + \ central goal is to make the microcontroller an extension of theprogramming environment\ + \ on the host computer in a manner that feels natural in that programming environment.\ + \ Itwas designed to be open and flexible so that any programming environment can\ + \ support it, and simple to implementboth on the microcontroller and the host\ + \ computer to ensurea wide range of implementations. The current reference implementation\ + \ is a library for Arduino/Wiring and is includedwith Arduino software package\ + \ since version 0012. Thereare matching software modules for a number of languages,like\ + \ Pd, OpenFrameworks, Max/MSP, and Processing.},\n address = {Pittsburgh, PA,\ + \ United States},\n author = {Steiner, Hans-Christoph},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177689},\n issn = {2220-4806},\n keywords = {arduino,microcontroller,nime09,processing,pure\ + \ data},\n pages = {125--130},\n title = {Firmata : Towards Making Microcontrollers\ + \ Act Like Extensions of the Computer},\n url = {http://www.nime.org/proceedings/2009/nime2009_125.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179563 + doi: 10.5281/zenodo.1177689 issn: 2220-4806 - keywords: 'Ambient Musical Information Systems, musical instruments, human computer - interaction, Markov chain, probability, al- gorithmic composition ' - pages: 281--284 - title: Designing Ambient Musical Information Systems - url: http://www.nime.org/proceedings/2008/nime2008_281.pdf - year: 2008 + keywords: 'arduino,microcontroller,nime09,processing,pure data' + pages: 125--130 + title: 'Firmata : Towards Making Microcontrollers Act Like Extensions of the Computer' + url: http://www.nime.org/proceedings/2009/nime2009_125.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Hadjakos2008 - abstract: 'The Elbow Piano distinguishes two types of piano touch: a touchwith movement - in the elbow joint and a touch without. A playednote is first mapped to the left - or right hand by visual tracking.Custom-built goniometers attached to the player''s - arms are usedto detect the type of touch. The two different types of touchesare - sonified by different instrument sounds. This gives theplayer an increased awareness - of his elbow movements, which isconsidered valuable for piano education. We have - implementedthe system and evaluated it with a group of music students.' - address: 'Genoa, Italy' - author: 'Hadjakos, Aristotelis and Aitenbichler, Erwin and Mühlhäuser, Max' - bibtex: "@inproceedings{Hadjakos2008,\n abstract = {The Elbow Piano distinguishes\ - \ two types of piano touch: a touchwith movement in the elbow joint and a touch\ - \ without. A playednote is first mapped to the left or right hand by visual tracking.Custom-built\ - \ goniometers attached to the player's arms are usedto detect the type of touch.\ - \ The two different types of touchesare sonified by different instrument sounds.\ - \ This gives theplayer an increased awareness of his elbow movements, which isconsidered\ - \ valuable for piano education. We have implementedthe system and evaluated it\ - \ with a group of music students.},\n address = {Genoa, Italy},\n author = {Hadjakos,\ - \ Aristotelis and Aitenbichler, Erwin and M\\''{u}hlh\\''{a}user, Max},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1179553},\n issn = {2220-4806},\n keywords\ - \ = {Piano, education, sonification, feedback, gesture. },\n pages = {285--288},\n\ - \ title = {The Elbow Piano : Sonification of Piano Playing Movements},\n url =\ - \ {http://www.nime.org/proceedings/2008/nime2008_285.pdf},\n year = {2008}\n}\n" + ID: Baalman2009a + abstract: 'The SenseWorld DataNetwork framework addresses the is- sue of sharing + and manipulating multiple data streams among different media systems in a heterogenous + interactive per- formance environment. It is intended to facilitate the cre- ation, + rehearsal process and performance practice of collab- orative interactive media + art works, by making the sharing of data (from sensors or internal processes) + between collab- orators easier, faster and more flexible.' + address: 'Pittsburgh, PA, United States' + author: 'Baalman, Marije A. and Smoak, Harry C. and Salter, Christopher L. and Malloch, + Joseph and Wanderley, Marcelo M.' + bibtex: "@inproceedings{Baalman2009a,\n abstract = {The SenseWorld DataNetwork framework\ + \ addresses the is- sue of sharing and manipulating multiple data streams among\ + \ different media systems in a heterogenous interactive per- formance environment.\ + \ It is intended to facilitate the cre- ation, rehearsal process and performance\ + \ practice of collab- orative interactive media art works, by making the sharing\ + \ of data (from sensors or internal processes) between collab- orators easier,\ + \ faster and more flexible.},\n address = {Pittsburgh, PA, United States},\n author\ + \ = {Baalman, Marije A. and Smoak, Harry C. and Salter, Christopher L. and Malloch,\ + \ Joseph and Wanderley, Marcelo M.},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177471},\n\ + \ issn = {2220-4806},\n keywords = {Data exchange, collaborative performance,\ + \ interactive performance, interactive art works, sensor data, OpenSoundControl,\ + \ SuperCollider, Max/MSP},\n pages = {131--134},\n title = {Sharing Data in Collaborative,\ + \ Interactive Performances : the SenseWorld DataNetwork},\n url = {http://www.nime.org/proceedings/2009/nime2009_131.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179553 + doi: 10.5281/zenodo.1177471 issn: 2220-4806 - keywords: 'Piano, education, sonification, feedback, gesture. ' - pages: 285--288 - title: 'The Elbow Piano : Sonification of Piano Playing Movements' - url: http://www.nime.org/proceedings/2008/nime2008_285.pdf - year: 2008 + keywords: 'Data exchange, collaborative performance, interactive performance, interactive + art works, sensor data, OpenSoundControl, SuperCollider, Max/MSP' + pages: 131--134 + title: 'Sharing Data in Collaborative, Interactive Performances : the SenseWorld + DataNetwork' + url: http://www.nime.org/proceedings/2009/nime2009_131.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Takegawa2008 - abstract: 'Musical keyboard instruments have a long history, whichresulted in many - kinds of keyboards (claviers) today. Sincethe hardware of conventional musical - keyboards cannot bechanged, such as the number of keys, musicians have tocarry - these large keyboards for playing music that requiresonly a small diapason. To - solve this problem, the goal ofour study is to construct UnitKeyboard, which has - only 12keys (7 white keys and 5 black keys) and connectors fordocking with other - UnitKeyboards. We can build variouskinds of musical keyboard configurations by - connecting oneUnitKeyboard to others, since they have automatic settingsfor multiple - keyboard instruments. We discuss the usabilityof the UnitKeyboard from reviews - by several amateur andprofessional pianists who used the UnitKeyboard.' - address: 'Genoa, Italy' - author: 'Takegawa, Yoshinari and Tsukamoto, Masahiko' - bibtex: "@inproceedings{Takegawa2008,\n abstract = {Musical keyboard instruments\ - \ have a long history, whichresulted in many kinds of keyboards (claviers) today.\ - \ Sincethe hardware of conventional musical keyboards cannot bechanged, such as\ - \ the number of keys, musicians have tocarry these large keyboards for playing\ - \ music that requiresonly a small diapason. To solve this problem, the goal ofour\ - \ study is to construct UnitKeyboard, which has only 12keys (7 white keys and\ - \ 5 black keys) and connectors fordocking with other UnitKeyboards. We can build\ - \ variouskinds of musical keyboard configurations by connecting oneUnitKeyboard\ - \ to others, since they have automatic settingsfor multiple keyboard instruments.\ - \ We discuss the usabilityof the UnitKeyboard from reviews by several amateur\ - \ andprofessional pianists who used the UnitKeyboard.},\n address = {Genoa, Italy},\n\ - \ author = {Takegawa, Yoshinari and Tsukamoto, Masahiko},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1179635},\n issn = {2220-4806},\n keywords = {Portable\ - \ keyboard instruments, block interface, Automatic settings },\n pages = {289--292},\n\ - \ title = {UnitKeyboard : An Easily Configurable Compact Clavier},\n url = {http://www.nime.org/proceedings/2008/nime2008_289.pdf},\n\ - \ year = {2008}\n}\n" + ID: Bouillot2009 + abstract: 'Low-latency streaming of high-quality audio has the potential to dramatically + transform the world of interactive musical applications. We provide methods for + accurately measuring the end-to-end latency and audio quality of a delivered audio + stream and apply these methods to an empirical evaluation of several streaming + engines. In anticipationof future demands for emerging applications involving + audio interaction, we also review key features of streamingengines and discuss + potential challenges that remain to beovercome.' + address: 'Pittsburgh, PA, United States' + author: 'Bouillot, Nicolas and Cooperstock, Jeremy R.' + bibtex: "@inproceedings{Bouillot2009,\n abstract = {Low-latency streaming of high-quality\ + \ audio has the potential to dramatically transform the world of interactive musical\ + \ applications. We provide methods for accurately measuring the end-to-end latency\ + \ and audio quality of a delivered audio stream and apply these methods to an\ + \ empirical evaluation of several streaming engines. In anticipationof future\ + \ demands for emerging applications involving audio interaction, we also review\ + \ key features of streamingengines and discuss potential challenges that remain\ + \ to beovercome.},\n address = {Pittsburgh, PA, United States},\n author = {Bouillot,\ + \ Nicolas and Cooperstock, Jeremy R.},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177485},\n\ + \ issn = {2220-4806},\n keywords = {Networked Musical Performance, high-fidelity\ + \ audio streaming, glitch detection, latency measurement },\n pages = {135--140},\n\ + \ title = {Challenges and Performance of High-Fidelity Audio Streaming for Interactive\ + \ Performances},\n url = {http://www.nime.org/proceedings/2009/nime2009_135.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179635 + doi: 10.5281/zenodo.1177485 issn: 2220-4806 - keywords: 'Portable keyboard instruments, block interface, Automatic settings ' - pages: 289--292 - title: 'UnitKeyboard : An Easily Configurable Compact Clavier' - url: http://www.nime.org/proceedings/2008/nime2008_289.pdf - year: 2008 + keywords: 'Networked Musical Performance, high-fidelity audio streaming, glitch + detection, latency measurement ' + pages: 135--140 + title: Challenges and Performance of High-Fidelity Audio Streaming for Interactive + Performances + url: http://www.nime.org/proceedings/2009/nime2009_135.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: PalacioQuintin2008 - abstract: 'After eight years of practice on the first hyper-flute prototype (a flute - extended with sensors), this article presentsa retrospective of its instrumental - practice and the newdevelopments planned from both technological and musical perspectives. - Design, performance skills, and mappingstrategies are discussed, as well as interactive - compositionand improvisation.' - address: 'Genoa, Italy' - author: 'Palacio-Quintin, Cléo' - bibtex: "@inproceedings{PalacioQuintin2008,\n abstract = {After eight years of practice\ - \ on the first hyper-flute prototype (a flute extended with sensors), this article\ - \ presentsa retrospective of its instrumental practice and the newdevelopments\ - \ planned from both technological and musical perspectives. Design, performance\ - \ skills, and mappingstrategies are discussed, as well as interactive compositionand\ - \ improvisation.},\n address = {Genoa, Italy},\n author = {Palacio-Quintin, Cl\\\ - '{e}o},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1179609},\n issn = {2220-4806},\n\ - \ keywords = {composition,gestural control,hyper-flute,hyper-instruments,improvisation,interactive\ - \ music,mapping,nime08,sensors},\n pages = {293--298},\n title = {Eight Years\ - \ of Practice on the Hyper-Flute : Technological and Musical Perspectives},\n\ - \ url = {http://www.nime.org/proceedings/2008/nime2008_293.pdf},\n year = {2008}\n\ - }\n" + ID: Todoroff2009 + abstract: "''Extension du corps sonore'' is long-term project initiatedby Musiques\ + \ Nouvelles [4], a contemporary music ensemble in Mons. It aims at giving instrumental\ + \ music performers an extended control over the sound of their instrument byextending\ + \ the understanding of the sound body from the instrument only to the combination\ + \ of the instrument and thewhole body of the performer. The development started\ + \ atARTeM and got the benefit of a three month numediartresearch project [1] that\ + \ focused on three axes of research:pre-processing of sensor data, gesture recognition\ + \ and mapping through interpolation. The objectives were the development of computing\ + \ methods and flexible Max/MSP externals to be later integrated in the ARTeM software\ + \ framework for the concerts with viola player Dominica Eyckmans. They could be\ + \ used in a variety of other artistic worksand will be made available on the numediart\ + \ website [1],where more detailed information can be found in the Quarterly Progress\ + \ Scientific Report #4." + address: 'Pittsburgh, PA, United States' + author: 'Todoroff, Todor and Bettens, Frédéric and Reboursière, Loïc and Chu, Wen-Yang' + bibtex: "@inproceedings{Todoroff2009,\n abstract = {''Extension du corps sonore''\ + \ is long-term project initiatedby Musiques Nouvelles [4], a contemporary music\ + \ ensemble in Mons. It aims at giving instrumental music performers an extended\ + \ control over the sound of their instrument byextending the understanding of\ + \ the sound body from the instrument only to the combination of the instrument\ + \ and thewhole body of the performer. The development started atARTeM and got\ + \ the benefit of a three month numediartresearch project [1] that focused on three\ + \ axes of research:pre-processing of sensor data, gesture recognition and mapping\ + \ through interpolation. The objectives were the development of computing methods\ + \ and flexible Max/MSP externals to be later integrated in the ARTeM software\ + \ framework for the concerts with viola player Dominica Eyckmans. They could be\ + \ used in a variety of other artistic worksand will be made available on the numediart\ + \ website [1],where more detailed information can be found in the Quarterly Progress\ + \ Scientific Report \\#4.},\n address = {Pittsburgh, PA, United States},\n author\ + \ = {Todoroff, Todor and Bettens, Fr\\'{e}d\\'{e}ric and Reboursi\\`{e}re, Lo\\\ + \"{i}c and Chu, Wen-Yang},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177691},\n\ + \ issn = {2220-4806},\n keywords = {Sensor data pre-processing, gesture recognition,\ + \ mapping, interpolation, extension du corps sonore },\n pages = {141--146},\n\ + \ title = {''Extension du Corps Sonore'' --- Dancing Viola},\n url = {http://www.nime.org/proceedings/2009/nime2009_141.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179609 + doi: 10.5281/zenodo.1177691 issn: 2220-4806 - keywords: 'composition,gestural control,hyper-flute,hyper-instruments,improvisation,interactive - music,mapping,nime08,sensors' - pages: 293--298 - title: 'Eight Years of Practice on the Hyper-Flute : Technological and Musical Perspectives' - url: http://www.nime.org/proceedings/2008/nime2008_293.pdf - year: 2008 + keywords: 'Sensor data pre-processing, gesture recognition, mapping, interpolation, + extension du corps sonore ' + pages: 141--146 + title: '''''Extension du Corps Sonore'''' --- Dancing Viola' + url: http://www.nime.org/proceedings/2009/nime2009_141.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Berdahl2008 - abstract: 'We introduce physically motivated interfaces for playing virtual musical - instruments, and we suggest that they lie somewhere in between commonplace interfaces - and haptic interfaces in terms of their complexity. Next, we review guitarlike - interfaces, and we design an interface to a virtual string.The excitation signal - and pitch are sensed separately usingtwo independent string segments. These parameters - controla two-axis digital waveguide virtual string, which modelsvibrations in - the horizontal and vertical transverse axes aswell as the coupling between them. - Finally, we consider theadvantages of using a multi-axis pickup for measuring - theexcitation signal.' - address: 'Genoa, Italy' - author: 'Berdahl, Edgar and Smith, Julius O.' - bibtex: "@inproceedings{Berdahl2008,\n abstract = {We introduce physically motivated\ - \ interfaces for playing virtual musical instruments, and we suggest that they\ - \ lie somewhere in between commonplace interfaces and haptic interfaces in terms\ - \ of their complexity. Next, we review guitarlike interfaces, and we design an\ - \ interface to a virtual string.The excitation signal and pitch are sensed separately\ - \ usingtwo independent string segments. These parameters controla two-axis digital\ - \ waveguide virtual string, which modelsvibrations in the horizontal and vertical\ - \ transverse axes aswell as the coupling between them. Finally, we consider theadvantages\ - \ of using a multi-axis pickup for measuring theexcitation signal.},\n address\ - \ = {Genoa, Italy},\n author = {Berdahl, Edgar and Smith, Julius O.},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1179493},\n issn = {2220-4806},\n keywords\ - \ = {physically motivated, physical, models, modeling, vibrating string, guitar,\ - \ pitch detection, interface, excitation, coupled strings, haptic },\n pages =\ - \ {299--302},\n title = {A Tangible Virtual Vibrating String : A Physically Motivated\ - \ Virtual Musical Instrument Interface},\n url = {http://www.nime.org/proceedings/2008/nime2008_299.pdf},\n\ - \ year = {2008}\n}\n" + ID: Leider2009a + abstract: 'We describe initial prototypes and a design strategy for new, user-customized + audio-manipulation and editing tools. These tools are designed to enable intuitive + control of audio-processing tasks while anthropomorphically matching the target + user. ' + address: 'Pittsburgh, PA, United States' + author: 'Leider, Colby and Mann, Doug and Plazas, Daniel and Battaglia, Michael + and Draper, Reid' + bibtex: "@inproceedings{Leider2009a,\n abstract = {We describe initial prototypes\ + \ and a design strategy for new, user-customized audio-manipulation and editing\ + \ tools. These tools are designed to enable intuitive control of audio-processing\ + \ tasks while anthropomorphically matching the target user. },\n address = {Pittsburgh,\ + \ PA, United States},\n author = {Leider, Colby and Mann, Doug and Plazas, Daniel\ + \ and Battaglia, Michael and Draper, Reid},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1177617},\n issn = {2220-4806},\n keywords = {user modeling,\ + \ user customization },\n pages = {147--148},\n title = {The elBo and footPad\ + \ : Toward Personalized Hardware for Audio Manipulation},\n url = {http://www.nime.org/proceedings/2009/nime2009_147.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179493 + doi: 10.5281/zenodo.1177617 issn: 2220-4806 - keywords: 'physically motivated, physical, models, modeling, vibrating string, guitar, - pitch detection, interface, excitation, coupled strings, haptic ' - pages: 299--302 - title: 'A Tangible Virtual Vibrating String : A Physically Motivated Virtual Musical - Instrument Interface' - url: http://www.nime.org/proceedings/2008/nime2008_299.pdf - year: 2008 + keywords: 'user modeling, user customization ' + pages: 147--148 + title: 'The elBo and footPad : Toward Personalized Hardware for Audio Manipulation' + url: http://www.nime.org/proceedings/2009/nime2009_147.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Geiger2008 - abstract: 'Being one of the earliest electronic instruments the basic principles - of the Theremin have often been used to design new musical interfaces. We present - the structured design and evaluation of a set of 3D interfaces for a virtual Theremin, - the VRemin. The variants differ in the size of the interaction space, the interface - complexity, and the applied IO devices. We conducted a formal evaluation based - on the well-known AttrakDiff questionnaire for evaluating the hedonic and pragmatic - quality of interactive products. The presented work is a first approach towards - a participatory design process for musical interfaces that includes user evaluation - at early design phases. ' - address: 'Genoa, Italy' - author: 'Geiger, Christian and Reckter, Holger and Paschke, David and Schulz, Florian - and Poepel, Cornelius' - bibtex: "@inproceedings{Geiger2008,\n abstract = {Being one of the earliest electronic\ - \ instruments the basic principles of the Theremin have often been used to design\ - \ new musical interfaces. We present the structured design and evaluation of a\ - \ set of 3D interfaces for a virtual Theremin, the VRemin. The variants differ\ - \ in the size of the interaction space, the interface complexity, and the applied\ - \ IO devices. We conducted a formal evaluation based on the well-known AttrakDiff\ - \ questionnaire for evaluating the hedonic and pragmatic quality of interactive\ - \ products. The presented work is a first approach towards a participatory design\ - \ process for musical interfaces that includes user evaluation at early design\ - \ phases. },\n address = {Genoa, Italy},\n author = {Geiger, Christian and Reckter,\ - \ Holger and Paschke, David and Schulz, Florian and Poepel, Cornelius},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1179545},\n issn = {2220-4806},\n keywords\ - \ = {3d interaction techniques,an important concept for,both hands,evaluation,few\ - \ wimp interface concepts,in contrast the use,make efficient use of,nime08,of\ - \ both hands is,theremin-based interfaces},\n pages = {303--306},\n title = {Towards\ - \ Participatory Design and Evaluation of Theremin-based Musical Interfaces},\n\ - \ url = {http://www.nime.org/proceedings/2008/nime2008_303.pdf},\n year = {2008}\n\ - }\n" + ID: Crawford2009 + abstract: The MIDI-Airguitar is a hand held musical controller based on Force Sensing + Resister (FSR) and Accelerometer technology. The hardware and software implementation + of the MIDI-Airguitars are described below. Current practices of the authors in + performance are discussed. + address: 'Pittsburgh, PA, United States' + author: 'Crawford, Langdon and Fastenow, William D.' + bibtex: "@inproceedings{Crawford2009,\n abstract = {The MIDI-Airguitar is a hand\ + \ held musical controller based on Force Sensing Resister (FSR) and Accelerometer\ + \ technology. The hardware and software implementation of the MIDI-Airguitars\ + \ are described below. Current practices of the authors in performance are discussed.},\n\ + \ address = {Pittsburgh, PA, United States},\n author = {Crawford, Langdon and\ + \ Fastenow, William D.},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177495},\n\ + \ issn = {2220-4806},\n keywords = {nime09},\n pages = {149--150},\n title = {The\ + \ Midi-AirGuitar , A serious Musical Controller with a Funny Name Music Technology\ + \ Program},\n url = {http://www.nime.org/proceedings/2009/nime2009_149.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179545 + doi: 10.5281/zenodo.1177495 issn: 2220-4806 - keywords: '3d interaction techniques,an important concept for,both hands,evaluation,few - wimp interface concepts,in contrast the use,make efficient use of,nime08,of both - hands is,theremin-based interfaces' - pages: 303--306 - title: Towards Participatory Design and Evaluation of Theremin-based Musical Interfaces - url: http://www.nime.org/proceedings/2008/nime2008_303.pdf - year: 2008 + keywords: nime09 + pages: 149--150 + title: 'The Midi-AirGuitar , A serious Musical Controller with a Funny Name Music + Technology Program' + url: http://www.nime.org/proceedings/2009/nime2009_149.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Henriques2008 - address: 'Genoa, Italy' - author: 'Henriques, Tom\''{a}s' - bibtex: "@inproceedings{Henriques2008,\n address = {Genoa, Italy},\n author = {Henriques,\ - \ Tom\\'{a}s},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179565},\n issn\ - \ = {2220-4806},\n keywords = {computer music,musical instrument,nime08,sensor\ - \ technologies},\n pages = {307--310},\n title = {META-{EV}I Innovative Performance\ - \ Paths with a Wind Controller},\n url = {http://www.nime.org/proceedings/2008/nime2008_307.pdf},\n\ - \ year = {2008}\n}\n" + ID: Bottcher2009 + abstract: 'In this poster we present the early prototype of the augmented Psychophone + --- a saxophone with various applied sensors, allowing the saxophone player to + attach effects like pitch shifting, wah-wah and ring modulation to the saxophone, + simply by moving the saxophone as one would do when really being enthusiastic + and involved in the performance. The possibility of scratching on the previously + recorded sound is also possible directly on the saxophone. ' + address: 'Pittsburgh, PA, United States' + author: 'Böttcher, Niels and Dimitrov, Smilen' + bibtex: "@inproceedings{Bottcher2009,\n abstract = {In this poster we present the\ + \ early prototype of the augmented Psychophone --- a saxophone with various applied\ + \ sensors, allowing the saxophone player to attach effects like pitch shifting,\ + \ wah-wah and ring modulation to the saxophone, simply by moving the saxophone\ + \ as one would do when really being enthusiastic and involved in the performance.\ + \ The possibility of scratching on the previously recorded sound is also possible\ + \ directly on the saxophone. },\n address = {Pittsburgh, PA, United States},\n\ + \ author = {B\\''{o}ttcher, Niels and Dimitrov, Smilen},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177467},\n issn = {2220-4806},\n keywords = {Augmented\ + \ saxophone, Physical computing, hyper instruments, mapping. },\n pages = {151--152},\n\ + \ title = {An Early Prototype of the Augmented PsychoPhone},\n url = {http://www.nime.org/proceedings/2009/nime2009_151.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179565 + doi: 10.5281/zenodo.1177467 issn: 2220-4806 - keywords: 'computer music,musical instrument,nime08,sensor technologies' - pages: 307--310 - title: 'META-{EV}I Innovative Performance Paths with a Wind Controller' - url: http://www.nime.org/proceedings/2008/nime2008_307.pdf - year: 2008 + keywords: 'Augmented saxophone, Physical computing, hyper instruments, mapping. ' + pages: 151--152 + title: An Early Prototype of the Augmented PsychoPhone + url: http://www.nime.org/proceedings/2009/nime2009_151.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Price2008 - address: 'Genoa, Italy' - author: 'Price, Robin and Rebelo, Pedro' - bibtex: "@inproceedings{Price2008,\n address = {Genoa, Italy},\n author = {Price,\ - \ Robin and Rebelo, Pedro},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179615},\n\ - \ issn = {2220-4806},\n keywords = {Mapping, database, audiovisual, radio, installation\ - \ art. },\n pages = {311--314},\n title = {Database and Mapping Design for Audiovisual\ - \ Prepared Radio Set Installation},\n url = {http://www.nime.org/proceedings/2008/nime2008_311.pdf},\n\ - \ year = {2008}\n}\n" + ID: Siwiak2009 + abstract: 'Catch Your Breath is an interactive audiovisual bio-feedbacksystem adapted + from a project designed to reduce respiratory irregularity in patients undergoing + 4D CT scans for oncological diagnosis. The system is currently implementedand + assessed as a potential means to reduce motion-induceddistortion in CT images.A + museum installation based on the same principle wascreated in which an inexpensive + wall-mounted web camera tracks an IR sensor embedded into a pendant worn bythe + user. The motion of the subjects breathing is trackedand interpreted as a real-time + variable tempo adjustment toa stored musical file. The subject can then adjust + his/herbreathing to synchronize with a separate accompanimentline. When the breathing + is regular and is at the desiredtempo, the audible result sounds synchronous and + harmonious. The accompaniment''s tempo progresses and gradually decrease which + causes the breathing to synchronize andslow down, thus increasing relaxation.' + address: 'Pittsburgh, PA, United States' + author: 'Siwiak, Diana and Berger, Jonathan and Yang, Yao' + bibtex: "@inproceedings{Siwiak2009,\n abstract = {Catch Your Breath is an interactive\ + \ audiovisual bio-feedbacksystem adapted from a project designed to reduce respiratory\ + \ irregularity in patients undergoing 4D CT scans for oncological diagnosis. The\ + \ system is currently implementedand assessed as a potential means to reduce motion-induceddistortion\ + \ in CT images.A museum installation based on the same principle wascreated in\ + \ which an inexpensive wall-mounted web camera tracks an IR sensor embedded into\ + \ a pendant worn bythe user. The motion of the subjects breathing is trackedand\ + \ interpreted as a real-time variable tempo adjustment toa stored musical file.\ + \ The subject can then adjust his/herbreathing to synchronize with a separate\ + \ accompanimentline. When the breathing is regular and is at the desiredtempo,\ + \ the audible result sounds synchronous and harmonious. The accompaniment's tempo\ + \ progresses and gradually decrease which causes the breathing to synchronize\ + \ andslow down, thus increasing relaxation.},\n address = {Pittsburgh, PA, United\ + \ States},\n author = {Siwiak, Diana and Berger, Jonathan and Yang, Yao},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1177675},\n issn = {2220-4806},\n keywords\ + \ = {sensor, music, auditory display. },\n pages = {153--154},\n title = {Catch\ + \ Your Breath --- Musical Biofeedback for Breathing Regulation},\n url = {http://www.nime.org/proceedings/2009/nime2009_153.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179615 + doi: 10.5281/zenodo.1177675 issn: 2220-4806 - keywords: 'Mapping, database, audiovisual, radio, installation art. ' - pages: 311--314 - title: Database and Mapping Design for Audiovisual Prepared Radio Set Installation - url: http://www.nime.org/proceedings/2008/nime2008_311.pdf - year: 2008 + keywords: 'sensor, music, auditory display. ' + pages: 153--154 + title: Catch Your Breath --- Musical Biofeedback for Breathing Regulation + url: http://www.nime.org/proceedings/2009/nime2009_153.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Jo2008 - abstract: 'Monalisa is a software platform that enables to "see the sound, hear - the image". It consists of three software: Monalisa Application, Monalisa-Audio - Unit, and Monalisa-Image Unit, and an installation: Monalisa "shadow of the sound". - In this paper, we describe the implementation of each software and installation - with the explanation of the basic algorithms to treat the image data and the sound - data transparently.' - address: 'Genoa, Italy' - author: 'Jo, Kazuhiro and Nagano, Norihisa' - bibtex: "@inproceedings{Jo2008,\n abstract = {Monalisa is a software platform that\ - \ enables to \"see the sound, hear the image\". It consists of three software:\ - \ Monalisa Application, Monalisa-Audio Unit, and Monalisa-Image Unit, and an installation:\ - \ Monalisa \"shadow of the sound\". In this paper, we describe the implementation\ - \ of each software and installation with the explanation of the basic algorithms\ - \ to treat the image data and the sound data transparently.},\n address = {Genoa,\ - \ Italy},\n author = {Jo, Kazuhiro and Nagano, Norihisa},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1179569},\n issn = {2220-4806},\n keywords = {Sound and\ - \ Image Processing Software, Plug-in, Installation },\n pages = {315--318},\n\ - \ title = {Monalisa : \"See the Sound , Hear the Image\"},\n url = {http://www.nime.org/proceedings/2008/nime2008_315.pdf},\n\ - \ year = {2008}\n}\n" + ID: Peng2009 + abstract: 'With the increase of sales of Wii game consoles, it is becoming commonplace + for the Wii remote to be used as analternative input device for other computer + systems. In thispaper, we present a system which makes use of the infraredcamera + within the Wii remote to capture the gestures of aconductor using a baton with + an infrared LED and battery.Our system then performs data analysis with gesture + classification and following, and finally displays the gestures using visual baton + trajectories and audio feedback. Gesturetrajectories are displayed in real time + and can be comparedto the corresponding diagram shown in a textbook. In addition, + since a conductor normally does not look at a screenwhile conducting, tones are + played to represent a certainbeat in a conducting gesture. Further, the system + can be controlled entirely with the baton, removing the need to switchfrom baton + to mouse. The interface is intended to be usedfor pedagogy purposes.' + address: 'Pittsburgh, PA, United States' + author: 'Peng, Lijuan and Gerhard, David' + bibtex: "@inproceedings{Peng2009,\n abstract = {With the increase of sales of Wii\ + \ game consoles, it is becoming commonplace for the Wii remote to be used as analternative\ + \ input device for other computer systems. In thispaper, we present a system which\ + \ makes use of the infraredcamera within the Wii remote to capture the gestures\ + \ of aconductor using a baton with an infrared LED and battery.Our system then\ + \ performs data analysis with gesture classification and following, and finally\ + \ displays the gestures using visual baton trajectories and audio feedback. Gesturetrajectories\ + \ are displayed in real time and can be comparedto the corresponding diagram shown\ + \ in a textbook. In addition, since a conductor normally does not look at a screenwhile\ + \ conducting, tones are played to represent a certainbeat in a conducting gesture.\ + \ Further, the system can be controlled entirely with the baton, removing the\ + \ need to switchfrom baton to mouse. The interface is intended to be usedfor pedagogy\ + \ purposes.},\n address = {Pittsburgh, PA, United States},\n author = {Peng, Lijuan\ + \ and Gerhard, David},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177659},\n\ + \ issn = {2220-4806},\n keywords = {Conducting, Gesture, Infrared, Learning, Wii.\ + \ },\n pages = {155--156},\n title = {A Wii-Based Gestural Interface for Computer\ + \ Conducting Systems},\n url = {http://www.nime.org/proceedings/2009/nime2009_155.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179569 + doi: 10.5281/zenodo.1177659 issn: 2220-4806 - keywords: 'Sound and Image Processing Software, Plug-in, Installation ' - pages: 315--318 - title: 'Monalisa : "See the Sound , Hear the Image"' - url: http://www.nime.org/proceedings/2008/nime2008_315.pdf - year: 2008 + keywords: 'Conducting, Gesture, Infrared, Learning, Wii. ' + pages: 155--156 + title: A Wii-Based Gestural Interface for Computer Conducting Systems + url: http://www.nime.org/proceedings/2009/nime2009_155.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Robertson2008 - address: 'Genoa, Italy' - author: 'Robertson, Andrew and Plumbley, Mark D. and Bryan-Kinns, Nick' - bibtex: "@inproceedings{Robertson2008,\n address = {Genoa, Italy},\n author = {Robertson,\ - \ Andrew and Plumbley, Mark D. and Bryan-Kinns, Nick},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1179619},\n issn = {2220-4806},\n keywords = {Automatic\ - \ Accompaniment, Beat Tracking, Human-Computer Interaction, Musical Interface\ - \ Evaluation },\n pages = {319--324},\n title = {A Turing Test for B-Keeper :\ - \ Evaluating an Interactive},\n url = {http://www.nime.org/proceedings/2008/nime2008_319.pdf},\n\ - \ year = {2008}\n}\n" + ID: Parson2009 + abstract: '''''Music for 32 Chess Pieces'''' is a software system that supports + composing, performing and improvising music by playing a chess game. A game server + stores a representation of the state of a game, validates proposed moves by players, + updates game state, and extracts a graph of piece-to-piece relationships. It also + loads a plugin code module that acts as a composition. A plugin maps pieces and + relationships on the board, such as support or attack relationships, to a timed + sequence of notes and accents. The server transmits notes in a sequence to an + audio renderer process via network datagrams. Two players can perform a composition + by playing chess, and a player can improvise by adjusting a plugin''s music mapping + parameters via a graphical user interface. A composer can create a new composition + by writing a new plugin that uses a distinct algorithm for mapping game rules + and states to music. A composer can also write a new note-to-sound mapping program + in the audio renderer language. This software is available at http://faculty.kutztown.edu/parson/music/ParsonMusic.html. ' + address: 'Pittsburgh, PA, United States' + author: 'Parson, Dale E.' + bibtex: "@inproceedings{Parson2009,\n abstract = {''Music for 32 Chess Pieces''\ + \ is a software system that supports composing, performing and improvising music\ + \ by playing a chess game. A game server stores a representation of the state\ + \ of a game, validates proposed moves by players, updates game state, and extracts\ + \ a graph of piece-to-piece relationships. It also loads a plugin code module\ + \ that acts as a composition. A plugin maps pieces and relationships on the board,\ + \ such as support or attack relationships, to a timed sequence of notes and accents.\ + \ The server transmits notes in a sequence to an audio renderer process via network\ + \ datagrams. Two players can perform a composition by playing chess, and a player\ + \ can improvise by adjusting a plugin's music mapping parameters via a graphical\ + \ user interface. A composer can create a new composition by writing a new plugin\ + \ that uses a distinct algorithm for mapping game rules and states to music. A\ + \ composer can also write a new note-to-sound mapping program in the audio renderer\ + \ language. This software is available at http://faculty.kutztown.edu/parson/music/ParsonMusic.html.\ + \ },\n address = {Pittsburgh, PA, United States},\n author = {Parson, Dale E.},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1177653},\n issn = {2220-4806},\n\ + \ keywords = {algorithmic composition, chess, ChucK, improvisation, Max/MSP, SuperCollider.\ + \ },\n pages = {157--158},\n title = {Chess-Based Composition and Improvisation\ + \ for Non-Musicians},\n url = {http://www.nime.org/proceedings/2009/nime2009_157.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179619 + doi: 10.5281/zenodo.1177653 issn: 2220-4806 - keywords: 'Automatic Accompaniment, Beat Tracking, Human-Computer Interaction, Musical - Interface Evaluation ' - pages: 319--324 - title: 'A Turing Test for B-Keeper : Evaluating an Interactive' - url: http://www.nime.org/proceedings/2008/nime2008_319.pdf - year: 2008 + keywords: 'algorithmic composition, chess, ChucK, improvisation, Max/MSP, SuperCollider. ' + pages: 157--158 + title: Chess-Based Composition and Improvisation for Non-Musicians + url: http://www.nime.org/proceedings/2009/nime2009_157.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Gatzsche2008 - abstract: 'In this paper, we present a pitch space based musical interface approach. - A pitch space arranges tones in a way that meaningful tone combinations can be - easily generated. Using a touch sensitive surface or a 3D-Joystick a player can - move through the pitch space and create the desired sound by selecting tones. - The more optimal the tones are geometrically arranged, the less control parameters - are required to move through the space and to select the desired pitches. For - this the quality of pitch space based musical interfaces depends on two factors: - 1. the way how the tones are organized within the pitch space and 2. the way how - the parameters of a given controller are used to move through the space and to - select pitches. This paper presents a musical interface based on a tonal pitch - space derived from a four dimensional model found by the music psychologists [11], - [2]. The proposed pitch space particularly eases the creation of tonal harmonic - music. Simultaneously it outlines music psychological and theoretical principles - of music. ' - address: 'Genoa, Italy' - author: 'Gatzsche, Gabriel and Mehnert, Markus and Stöcklmeier, Christian' - bibtex: "@inproceedings{Gatzsche2008,\n abstract = {In this paper, we present a\ - \ pitch space based musical interface approach. A pitch space arranges tones in\ - \ a way that meaningful tone combinations can be easily generated. Using a touch\ - \ sensitive surface or a 3D-Joystick a player can move through the pitch space\ - \ and create the desired sound by selecting tones. The more optimal the tones\ - \ are geometrically arranged, the less control parameters are required to move\ - \ through the space and to select the desired pitches. For this the quality of\ - \ pitch space based musical interfaces depends on two factors: 1. the way how\ - \ the tones are organized within the pitch space and 2. the way how the parameters\ - \ of a given controller are used to move through the space and to select pitches.\ - \ This paper presents a musical interface based on a tonal pitch space derived\ - \ from a four dimensional model found by the music psychologists [11], [2]. The\ - \ proposed pitch space particularly eases the creation of tonal harmonic music.\ - \ Simultaneously it outlines music psychological and theoretical principles of\ - \ music. },\n address = {Genoa, Italy},\n author = {Gatzsche, Gabriel and Mehnert,\ - \ Markus and St\\''{o}cklmeier, Christian},\n booktitle = {Proceedings of the\ + ID: Dolphin2009a + abstract: 'This paper reports on work in progress on the creativeproject MagNular, + part of a wider practical study of thepotential collaborative compositional applications + of gameengine technologies. MagNular is a sound toy utilizingcomputer game and + physics engine technologies to createan animated interface used in conjunction + with an externalsound engine developed within Max/MSP. The playercontrols virtual + magnets that attract or repel numerousparticle objects, moving them freely around + the virtualspace. Particle object collision data is mapped to controlsound onsets + and synthesis/DSP (Digital SignalProcessing) parameters. The user "composes" bycontrolling + and influencing the simulated physicalbehaviors of the particle objects within + the animatedinterface.' + address: 'Pittsburgh, PA, United States' + author: 'Dolphin, Andy' + bibtex: "@inproceedings{Dolphin2009a,\n abstract = {This paper reports on work in\ + \ progress on the creativeproject MagNular, part of a wider practical study of\ + \ thepotential collaborative compositional applications of gameengine technologies.\ + \ MagNular is a sound toy utilizingcomputer game and physics engine technologies\ + \ to createan animated interface used in conjunction with an externalsound engine\ + \ developed within Max/MSP. The playercontrols virtual magnets that attract or\ + \ repel numerousparticle objects, moving them freely around the virtualspace.\ + \ Particle object collision data is mapped to controlsound onsets and synthesis/DSP\ + \ (Digital SignalProcessing) parameters. The user \"composes\" bycontrolling and\ + \ influencing the simulated physicalbehaviors of the particle objects within the\ + \ animatedinterface.},\n address = {Pittsburgh, PA, United States},\n author =\ + \ {Dolphin, Andy},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177499},\n\ + \ issn = {2220-4806},\n keywords = {Sound Toys, Open Work, Game Engines, Animated\ + \ Interfaces, Max/MSP. },\n pages = {159--160},\n title = {MagNular : Symbolic\ + \ Control of an External Sound Engine Using an Animated Interface},\n url = {http://www.nime.org/proceedings/2009/nime2009_159.pdf},\n\ + \ year = {2009}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1177499 + issn: 2220-4806 + keywords: 'Sound Toys, Open Work, Game Engines, Animated Interfaces, Max/MSP. ' + pages: 159--160 + title: 'MagNular : Symbolic Control of an External Sound Engine Using an Animated + Interface' + url: http://www.nime.org/proceedings/2009/nime2009_159.pdf + year: 2009 + + +- ENTRYTYPE: inproceedings + ID: Feehan2009 + abstract: 'AUDIO ORIENTEERING is a collaborative performance environment in which + physical tokens are used to navigate an invisible sonic landscape. In this paper, + I describe the hardware and software used to implement a prototype audio terrain + with multiple interaction modes and sonic behaviors mapped onto three-dimensional + space. ' + address: 'Pittsburgh, PA, United States' + author: 'Feehan, Noah' + bibtex: "@inproceedings{Feehan2009,\n abstract = {AUDIO ORIENTEERING is a collaborative\ + \ performance environment in which physical tokens are used to navigate an invisible\ + \ sonic landscape. In this paper, I describe the hardware and software used to\ + \ implement a prototype audio terrain with multiple interaction modes and sonic\ + \ behaviors mapped onto three-dimensional space. },\n address = {Pittsburgh, PA,\ + \ United States},\n author = {Feehan, Noah},\n booktitle = {Proceedings of the\ \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1179541},\n issn = {2220-4806},\n keywords = {Pitch space, musical\ - \ interface, Carol L. Krumhansl, music psychology, music theory, western tonal\ - \ music, 3D tonality model, spiral of thirds, 3D, Hardware controller, Symmetry\ - \ model },\n pages = {325--330},\n title = {Interaction with Tonal Pitch Spaces},\n\ - \ url = {http://www.nime.org/proceedings/2008/nime2008_325.pdf},\n year = {2008}\n\ - }\n" + \ {10.5281/zenodo.1177505},\n issn = {2220-4806},\n keywords = {wii, 3-d positioning,\ + \ audio terrain, collaborative performance. },\n pages = {161--162},\n title =\ + \ {Audio Orienteering -- Navigating an Invisible Terrain},\n url = {http://www.nime.org/proceedings/2009/nime2009_161.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179541 + doi: 10.5281/zenodo.1177505 issn: 2220-4806 - keywords: 'Pitch space, musical interface, Carol L. Krumhansl, music psychology, - music theory, western tonal music, 3D tonality model, spiral of thirds, 3D, Hardware - controller, Symmetry model ' - pages: 325--330 - title: Interaction with Tonal Pitch Spaces - url: http://www.nime.org/proceedings/2008/nime2008_325.pdf - year: 2008 + keywords: 'wii, 3-d positioning, audio terrain, collaborative performance. ' + pages: 161--162 + title: Audio Orienteering -- Navigating an Invisible Terrain + url: http://www.nime.org/proceedings/2009/nime2009_161.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Chordia2008 - abstract: 'We describe a system that can listen to a performance of Indian music - and recognize the raag, the fundamental melodicframework that Indian classical - musicians improvise within.In addition to determining the most likely raag being - performed, the system displays the estimated the likelihoodof each of the other - possible raags, visualizing the changesover time. The system computes the pitch-class - distributionand uses a Bayesian decision rule to classify the resultingtwelve - dimensional feature vector, where each feature represents the relative use of - each pitch class. We show that thesystem achieves high performance on a variety - of sources,making it a viable tool for interactive performance.' - address: 'Genoa, Italy' - author: 'Chordia, Parag and Rae, Alex' - bibtex: "@inproceedings{Chordia2008,\n abstract = {We describe a system that can\ - \ listen to a performance of Indian music and recognize the raag, the fundamental\ - \ melodicframework that Indian classical musicians improvise within.In addition\ - \ to determining the most likely raag being performed, the system displays the\ - \ estimated the likelihoodof each of the other possible raags, visualizing the\ - \ changesover time. The system computes the pitch-class distributionand uses a\ - \ Bayesian decision rule to classify the resultingtwelve dimensional feature vector,\ - \ where each feature represents the relative use of each pitch class. We show\ - \ that thesystem achieves high performance on a variety of sources,making it a\ - \ viable tool for interactive performance.},\n address = {Genoa, Italy},\n author\ - \ = {Chordia, Parag and Rae, Alex},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179509},\n\ - \ issn = {2220-4806},\n keywords = {automatic recognition,indian music,nime08,raag,raga},\n\ - \ pages = {331--334},\n title = {Real-Time Raag Recognition for Interactive Music},\n\ - \ url = {http://www.nime.org/proceedings/2008/nime2008_331.pdf},\n year = {2008}\n\ - }\n" + ID: DeJong2009 + abstract: 'This paper presents developments in the technology underlying the cyclotactor, + a finger-based tactile I/O device for musical interaction. These include significant + improvements both in the basic characteristics of tactile interaction and in the + related (vibro)tactile sample rates, latencies, and timing precision. After presenting + the new prototype''s tactile output force landscape, some of the new possibilities + for interaction are discussed, especially those for musical interaction with zero + audio/tactile latency.' + address: 'Pittsburgh, PA, United States' + author: 'de Jong, Staas' + bibtex: "@inproceedings{DeJong2009,\n abstract = {This paper presents developments\ + \ in the technology underlying the cyclotactor, a finger-based tactile I/O device\ + \ for musical interaction. These include significant improvements both in the\ + \ basic characteristics of tactile interaction and in the related (vibro)tactile\ + \ sample rates, latencies, and timing precision. After presenting the new prototype's\ + \ tactile output force landscape, some of the new possibilities for interaction\ + \ are discussed, especially those for musical interaction with zero audio/tactile\ + \ latency.},\n address = {Pittsburgh, PA, United States},\n author = {de Jong,\ + \ Staas},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1177591},\n issn = {2220-4806},\n\ + \ keywords = {Musical controller, tactile interface. },\n pages = {163--164},\n\ + \ title = {Developing the Cyclotactor},\n url = {http://www.nime.org/proceedings/2009/nime2009_163.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179509 + doi: 10.5281/zenodo.1177591 issn: 2220-4806 - keywords: 'automatic recognition,indian music,nime08,raag,raga' - pages: 331--334 - title: Real-Time Raag Recognition for Interactive Music - url: http://www.nime.org/proceedings/2008/nime2008_331.pdf - year: 2008 + keywords: 'Musical controller, tactile interface. ' + pages: 163--164 + title: Developing the Cyclotactor + url: http://www.nime.org/proceedings/2009/nime2009_163.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Vinjar2008 - abstract: 'A general CAC1-environment charged with physical-modelling capabilities - is described. It combines CommonMusic,ODE and Fluxus in a modular way, making - a powerful andflexible environment for experimenting with physical modelsin composition.Composition - in this respect refers to the generation andmanipulation of structure typically - on or above a note, phrase or voice-level. Compared to efforts in synthesisand - performance little work has gone into applying physicalmodels to composition. - Potentials in composition-applications are presumably large.The implementation - of the physically equipped CAC-environment is described in detail.' - address: 'Genoa, Italy' - author: 'Vinjar, Anders' - bibtex: "@inproceedings{Vinjar2008,\n abstract = {A general CAC1-environment charged\ - \ with physical-modelling capabilities is described. It combines CommonMusic,ODE\ - \ and Fluxus in a modular way, making a powerful andflexible environment for experimenting\ - \ with physical modelsin composition.Composition in this respect refers to the\ - \ generation andmanipulation of structure typically on or above a note, phrase\ - \ or voice-level. Compared to efforts in synthesisand performance little work\ - \ has gone into applying physicalmodels to composition. Potentials in composition-applications\ - \ are presumably large.The implementation of the physically equipped CAC-environment\ - \ is described in detail.},\n address = {Genoa, Italy},\n author = {Vinjar, Anders},\n\ + ID: Schiesser2009 + abstract: 'A MIDI-to-OSC converter is implemented on a commercially available embedded + linux system, tighly integratedwith a microcontroller. A layered method is developed + whichpermits the conversion of serial data such as MIDI to OSCformatted network + packets with an overall system latencybelow 5 milliseconds for common MIDI messages.The + Gumstix embedded computer provide an interesting and modular platform for the + development of such anembedded applications. The project shows great potentialto + evolve into a generic sensors-to-OSC ethernet converterwhich should be very useful + for artistic purposes and couldbe used as a fast prototyping interface for gesture + acquisitiondevices.' + address: 'Pittsburgh, PA, United States' + author: 'Schiesser, Sébastien' + bibtex: "@inproceedings{Schiesser2009,\n abstract = {A MIDI-to-OSC converter is\ + \ implemented on a commercially available embedded linux system, tighly integratedwith\ + \ a microcontroller. A layered method is developed whichpermits the conversion\ + \ of serial data such as MIDI to OSCformatted network packets with an overall\ + \ system latencybelow 5 milliseconds for common MIDI messages.The Gumstix embedded\ + \ computer provide an interesting and modular platform for the development of\ + \ such anembedded applications. The project shows great potentialto evolve into\ + \ a generic sensors-to-OSC ethernet converterwhich should be very useful for artistic\ + \ purposes and couldbe used as a fast prototyping interface for gesture acquisitiondevices.},\n\ + \ address = {Pittsburgh, PA, United States},\n author = {Schiesser, S\\'{e}bastien},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1179647},\n issn = {2220-4806},\n\ - \ keywords = {Physical Models in composition, CommonMusic, Musical mapping },\n\ - \ pages = {335--338},\n title = {Bending Common Music with Physical Models},\n\ - \ url = {http://www.nime.org/proceedings/2008/nime2008_335.pdf},\n year = {2008}\n\ - }\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1179647 - issn: 2220-4806 - keywords: 'Physical Models in composition, CommonMusic, Musical mapping ' - pages: 335--338 - title: Bending Common Music with Physical Models - url: http://www.nime.org/proceedings/2008/nime2008_335.pdf - year: 2008 - - -- ENTRYTYPE: inproceedings - ID: Schedel2008 - abstract: 'The Color of Waiting is an interactive theater workwith music, dance, - and video which was developed atSTEIM in Amsterdam and further refined at CMMASin - Morelia Mexico with funding from Meet theComposer. Using Max/MSP/ Jitter a cellist - is able tocontrol sound and video during the performancewhile performing a structured - improvisation inresponse to the dancer''s movement. In order toensure. repeated - performances of The Color o fWaiting , Kinesthetech Sense created the scorecontained - in this paper. Performance is essential tothe practice of time-based art as a - living form, buthas been complicated by the unique challenges ininterpretation - and re-creation posed by worksincorporating technology. Creating a detailed scoreis - one of the ways artists working with technologycan combat obsolescence.' - address: 'Genoa, Italy' - author: 'Schedel, Margaret and Rootberg, Alison and de Martelly, Elizabeth' - bibtex: "@inproceedings{Schedel2008,\n abstract = {The Color of Waiting is an interactive\ - \ theater workwith music, dance, and video which was developed atSTEIM in Amsterdam\ - \ and further refined at CMMASin Morelia Mexico with funding from Meet theComposer.\ - \ Using Max/MSP/ Jitter a cellist is able tocontrol sound and video during the\ - \ performancewhile performing a structured improvisation inresponse to the dancer's\ - \ movement. In order toensure. repeated performances of The Color o fWaiting ,\ - \ Kinesthetech Sense created the scorecontained in this paper. Performance is\ - \ essential tothe practice of time-based art as a living form, buthas been complicated\ - \ by the unique challenges ininterpretation and re-creation posed by worksincorporating\ - \ technology. Creating a detailed scoreis one of the ways artists working with\ - \ technologycan combat obsolescence.},\n address = {Genoa, Italy},\n author =\ - \ {Schedel, Margaret and Rootberg, Alison and de Martelly, Elizabeth},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1179625},\n issn = {2220-4806},\n keywords\ - \ = {nime08},\n pages = {339--342},\n title = {Scoring an Interactive, Multimedia\ - \ Performance Work},\n url = {http://www.nime.org/proceedings/2008/nime2008_339.pdf},\n\ - \ year = {2008}\n}\n" + \ Musical Expression},\n doi = {10.5281/zenodo.1177669},\n issn = {2220-4806},\n\ + \ keywords = {MIDI, Open Sound Control, converter, gumstix },\n pages = {165--168},\n\ + \ title = {midOSC : a Gumstix-Based {MIDI-to-OSC} Converter},\n url = {http://www.nime.org/proceedings/2009/nime2009_165.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179625 + doi: 10.5281/zenodo.1177669 issn: 2220-4806 - keywords: nime08 - pages: 339--342 - title: 'Scoring an Interactive, Multimedia Performance Work' - url: http://www.nime.org/proceedings/2008/nime2008_339.pdf - year: 2008 + keywords: 'MIDI, Open Sound Control, converter, gumstix ' + pages: 165--168 + title: 'midOSC : a Gumstix-Based MIDI-to-OSC Converter' + url: http://www.nime.org/proceedings/2009/nime2009_165.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Endo2008 - abstract: 'We developed a rhythmic instruments ensemble simulator generating animation - using game controllers. The motion of a player is transformed into musical expression - data of MIDI to generate sounds, and MIDI data are transformed into animation - control parameters to generate movies. These animations and music are shown as - the reflection of player performance. Multiple players can perform a musical ensemble - to make more varied patterns of animation. Our system is so easy that everyone - can enjoy performing a fusion of music and animation. ' - address: 'Genoa, Italy' - author: 'Endo, Ayaka and Kuhara, Yasuo' - bibtex: "@inproceedings{Endo2008,\n abstract = {We developed a rhythmic instruments\ - \ ensemble simulator generating animation using game controllers. The motion of\ - \ a player is transformed into musical expression data of MIDI to generate sounds,\ - \ and MIDI data are transformed into animation control parameters to generate\ - \ movies. These animations and music are shown as the reflection of player performance.\ - \ Multiple players can perform a musical ensemble to make more varied patterns\ - \ of animation. Our system is so easy that everyone can enjoy performing a fusion\ - \ of music and animation. },\n address = {Genoa, Italy},\n author = {Endo, Ayaka\ - \ and Kuhara, Yasuo},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179529},\n\ - \ issn = {2220-4806},\n keywords = {Wii Remote, Wireless game controller, MIDI,\ - \ Max/MSP, Flash movie, Gesture music and animation. },\n pages = {345--346},\n\ - \ title = {Rhythmic Instruments Ensemble Simulator Generating Animation Movies\ - \ Using {Bluetooth} Game Controller},\n url = {http://www.nime.org/proceedings/2008/nime2008_345.pdf},\n\ - \ year = {2008}\n}\n" + ID: Nagashima2009 + abstract: 'This is a technical and experimental report of parallel processing, using + the "Propeller" chip. Its eight 32 bits processors (cogs) can operate simultaneously, + either independently or cooperatively, sharing common resources through a central + hub. I introduce this unique processor and discuss about the possibility to develop + interactive systems and smart interfaces in media arts, because we need many kinds + of tasks at a same time with NIMErelated systems and installations. I will report + about (1) Propeller chip and its powerful IDE, (2) external interfaces for analog/digital + inputs/outputs, (3) VGA/NTSC/PAL video generation, (4) audio signal processing, + and (5) originally-developed MIDI input/output method. I also introduce three + experimental prototype systems.' + address: 'Pittsburgh, PA, United States' + author: 'Nagashima, Yoichi' + bibtex: "@inproceedings{Nagashima2009,\n abstract = {This is a technical and experimental\ + \ report of parallel processing, using the \"Propeller\" chip. Its eight 32 bits\ + \ processors (cogs) can operate simultaneously, either independently or cooperatively,\ + \ sharing common resources through a central hub. I introduce this unique processor\ + \ and discuss about the possibility to develop interactive systems and smart interfaces\ + \ in media arts, because we need many kinds of tasks at a same time with NIMErelated\ + \ systems and installations. I will report about (1) Propeller chip and its powerful\ + \ IDE, (2) external interfaces for analog/digital inputs/outputs, (3) VGA/NTSC/PAL\ + \ video generation, (4) audio signal processing, and (5) originally-developed\ + \ MIDI input/output method. I also introduce three experimental prototype systems.},\n\ + \ address = {Pittsburgh, PA, United States},\n author = {Nagashima, Yoichi},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1177635},\n issn = {2220-4806},\n\ + \ keywords = {Propeller, parallel processing, MIDI, sensor, interfaces. },\n pages\ + \ = {169--170},\n title = {Parallel Processing System Design with \"Propeller\"\ + \ Processor},\n url = {http://www.nime.org/proceedings/2009/nime2009_169.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179529 + doi: 10.5281/zenodo.1177635 issn: 2220-4806 - keywords: 'Wii Remote, Wireless game controller, MIDI, Max/MSP, Flash movie, Gesture - music and animation. ' - pages: 345--346 - title: Rhythmic Instruments Ensemble Simulator Generating Animation Movies Using - Bluetooth Game Controller - url: http://www.nime.org/proceedings/2008/nime2008_345.pdf - year: 2008 + keywords: 'Propeller, parallel processing, MIDI, sensor, interfaces. ' + pages: 169--170 + title: Parallel Processing System Design with "Propeller" Processor + url: http://www.nime.org/proceedings/2009/nime2009_169.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: McMillen2008 - abstract: 'The demonstration of a series of properly weighted and balanced Bluetooth - sensor bows for violin, viola, cello and bass. ' - address: 'Genoa, Italy' - author: 'McMillen, Keith A.' - bibtex: "@inproceedings{McMillen2008,\n abstract = {The demonstration of a series\ - \ of properly weighted and balanced Bluetooth sensor bows for violin, viola, cello\ - \ and bass. },\n address = {Genoa, Italy},\n author = {McMillen, Keith A.},\n\ + ID: Fyans2009 + abstract: 'The development of new interfaces for musical expressionhas created a + need to study how spectators comprehend newperformance technologies and practices. + As part of a largerproject examining how interactions with technology can becommunicated + with the spectator, we relate our model ofspectator understanding of error to + the NIME discourse surrounding transparency, mapping, skill and success.' + address: 'Pittsburgh, PA, United States' + author: 'Fyans, A. Cavan and Gurevich, Michael and Stapleton, Paul' + bibtex: "@inproceedings{Fyans2009,\n abstract = {The development of new interfaces\ + \ for musical expressionhas created a need to study how spectators comprehend\ + \ newperformance technologies and practices. As part of a largerproject examining\ + \ how interactions with technology can becommunicated with the spectator, we relate\ + \ our model ofspectator understanding of error to the NIME discourse surrounding\ + \ transparency, mapping, skill and success.},\n address = {Pittsburgh, PA, United\ + \ States},\n author = {Fyans, A. Cavan and Gurevich, Michael and Stapleton, Paul},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1179597},\n issn = {2220-4806},\n\ - \ keywords = {Sensor bow, stringed instruments, bluetooth },\n pages = {347--348},\n\ - \ title = {Stage-Worthy Sensor Bows for Stringed Instruments},\n url = {http://www.nime.org/proceedings/2008/nime2008_347.pdf},\n\ - \ year = {2008}\n}\n" + \ Musical Expression},\n doi = {10.5281/zenodo.1177519},\n issn = {2220-4806},\n\ + \ keywords = {performance, skill, transparency, design, HCI },\n pages = {171--172},\n\ + \ title = {Where Did It All Go Wrong ? A Model of Error From the Spectator's Perspective},\n\ + \ url = {http://www.nime.org/proceedings/2009/nime2009_171.pdf},\n year = {2009}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179597 + doi: 10.5281/zenodo.1177519 issn: 2220-4806 - keywords: 'Sensor bow, stringed instruments, bluetooth ' - pages: 347--348 - title: Stage-Worthy Sensor Bows for Stringed Instruments - url: http://www.nime.org/proceedings/2008/nime2008_347.pdf - year: 2008 + keywords: 'performance, skill, transparency, design, HCI ' + pages: 171--172 + title: 'Where Did It All Go Wrong ? A Model of Error From the Spectator''s Perspective' + url: http://www.nime.org/proceedings/2009/nime2009_171.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Flanigan2008 - abstract: Plink Jet is a robotic musical instrument made from scavenged inkjet printers - and guitar parts. We investigate the expressive capabilities of everyday machine - technology by recontextualizing the relatively high-tech mechanisms of typical - office debris into an electro-acoustic musical instrument. We also explore the - performative relationship between human and machine. - address: 'Genoa, Italy' - author: 'Flanigan, Lesley and Doro, Andrew' - bibtex: "@inproceedings{Flanigan2008,\n abstract = {Plink Jet is a robotic musical\ - \ instrument made from scavenged inkjet printers and guitar parts. We investigate\ - \ the expressive capabilities of everyday machine technology by recontextualizing\ - \ the relatively high-tech mechanisms of typical office debris into an electro-acoustic\ - \ musical instrument. We also explore the performative relationship between human\ - \ and machine.},\n address = {Genoa, Italy},\n author = {Flanigan, Lesley and\ - \ Doro, Andrew},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179533},\n\ - \ issn = {2220-4806},\n keywords = {Interaction Design, Repurposing of Consumer\ - \ Technology, DIY, Performing Technology, Robotics, Automation, Infra-Instrument\ - \ },\n pages = {349--351},\n title = {Plink Jet},\n url = {http://www.nime.org/proceedings/2008/nime2008_349.pdf},\n\ - \ year = {2008}\n}\n" + ID: dAlessandro2009 + abstract: 'In this paper we present new issues and challenges relatedto the vertical + tablet playing. The approach is based on apreviously presented instrument, the + HANDSKETCH. Thisinstrument has now been played regularly for more than twoyears + by several performers. Therefore this is an opportunityto propose a better understanding + of the performing strategy.We present the behavior of the whole body as an underlyingaspect + in the manipulation of the instrument.' + address: 'Pittsburgh, PA, United States' + author: 'd''Alessandro, Nicolas and Dutoit, Thierry' + bibtex: "@inproceedings{dAlessandro2009,\n abstract = {In this paper we present\ + \ new issues and challenges relatedto the vertical tablet playing. The approach\ + \ is based on apreviously presented instrument, the HANDSKETCH. Thisinstrument\ + \ has now been played regularly for more than twoyears by several performers.\ + \ Therefore this is an opportunityto propose a better understanding of the performing\ + \ strategy.We present the behavior of the whole body as an underlyingaspect in\ + \ the manipulation of the instrument.},\n address = {Pittsburgh, PA, United States},\n\ + \ author = {d'Alessandro, Nicolas and Dutoit, Thierry},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177465},\n issn = {2220-4806},\n keywords = {graphic\ + \ tablet, playing position, techniques },\n pages = {173--174},\n title = {Advanced\ + \ Techniques for Vertical Tablet Playing A Overview of Two Years of Practicing\ + \ the HandSketch 1.x},\n url = {http://www.nime.org/proceedings/2009/nime2009_173.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179533 + doi: 10.5281/zenodo.1177465 issn: 2220-4806 - keywords: 'Interaction Design, Repurposing of Consumer Technology, DIY, Performing - Technology, Robotics, Automation, Infra-Instrument ' - pages: 349--351 - title: Plink Jet - url: http://www.nime.org/proceedings/2008/nime2008_349.pdf - year: 2008 + keywords: 'graphic tablet, playing position, techniques ' + pages: 173--174 + title: Advanced Techniques for Vertical Tablet Playing A Overview of Two Years of + Practicing the HandSketch 1.x + url: http://www.nime.org/proceedings/2009/nime2009_173.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Kamiyama2008 - address: 'Genoa, Italy' - author: 'Kamiyama, Yusuke and Tanaka, Mai and Tanaka, Hiroya' - bibtex: "@inproceedings{Kamiyama2008,\n address = {Genoa, Italy},\n author = {Kamiyama,\ - \ Yusuke and Tanaka, Mai and Tanaka, Hiroya},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1179575},\n issn = {2220-4806},\n keywords = {umbrella, musical\ - \ expression, sound generating device, 3D sound system, sound-field arrangement.\ - \ },\n pages = {352--353},\n title = {Oto-Shigure : An Umbrella-Shaped Sound Generator\ - \ for Musical Expression},\n url = {http://www.nime.org/proceedings/2008/nime2008_352.pdf},\n\ - \ year = {2008}\n}\n" + ID: Hoofer2009 + abstract: This paper describes a method for classification of different beat gestures + within traditional beat patterns based on gyroscope data and machine learning + techniques and provides a quantitative evaluation. + address: 'Pittsburgh, PA, United States' + author: 'Höofer, Andreas and Hadjakos, Aristotelis and Mühlhäuser, Max' + bibtex: "@inproceedings{Hoofer2009,\n abstract = {This paper describes a method\ + \ for classification of different beat gestures within traditional beat patterns\ + \ based on gyroscope data and machine learning techniques and provides a quantitative\ + \ evaluation.},\n address = {Pittsburgh, PA, United States},\n author = {H\\''{o}ofer,\ + \ Andreas and Hadjakos, Aristotelis and M\\''{u}hlh\\''{a}user, Max},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1177565},\n issn = {2220-4806},\n keywords\ + \ = {nime09},\n pages = {175--176},\n title = {Gyroscope-Based Conducting Gesture\ + \ Recognition},\n url = {http://www.nime.org/proceedings/2009/nime2009_175.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179575 + doi: 10.5281/zenodo.1177565 issn: 2220-4806 - keywords: 'umbrella, musical expression, sound generating device, 3D sound system, - sound-field arrangement. ' - pages: 352--353 - title: 'Oto-Shigure : An Umbrella-Shaped Sound Generator for Musical Expression' - url: http://www.nime.org/proceedings/2008/nime2008_352.pdf - year: 2008 + keywords: nime09 + pages: 175--176 + title: Gyroscope-Based Conducting Gesture Recognition + url: http://www.nime.org/proceedings/2009/nime2009_175.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Follmer2008 - address: 'Genoa, Italy' - author: 'Follmer, Sean and Warren, Chris and Marquez-Borbon, Adnan' - bibtex: "@inproceedings{Follmer2008,\n address = {Genoa, Italy},\n author = {Follmer,\ - \ Sean and Warren, Chris and Marquez-Borbon, Adnan},\n booktitle = {Proceedings\ + ID: Berdahl2009b + abstract: 'Haptic technology, providing force cues and creating a programmable physical + instrument interface, can assist musicians in making gestures. The finite reaction + time of thehuman motor control system implies that the execution of abrief musical + gesture does not rely on immediate feedbackfrom the senses, rather it is preprogrammed + to some degree.Consequently, we suggest designing relatively simple anddeterministic + interfaces for providing haptic assistance.In this paper, we consider the specific + problem of assisting a musician in selecting pitches from a continuous range.We + build on a prior study by O''Modhrain of the accuracyof pitches selected by musicians + on a Theremin-like hapticinterface. To improve the assistance, we augment the + interface with programmed detents so that the musician can feelthe locations of + equal tempered pitches. Nevertheless, themusician can still perform arbitrary + pitch inflections such asglissandi, falls, and scoops. We investigate various + formsof haptic detents, including fixed detent levels and forcesensitive detent + levels. Preliminary results from a subjecttest confirm improved accuracy in pitch + selection broughtabout by detents.' + address: 'Pittsburgh, PA, United States' + author: 'Berdahl, Edgar and Niemeyer, Günter and Smith, Julius O.' + bibtex: "@inproceedings{Berdahl2009b,\n abstract = {Haptic technology, providing\ + \ force cues and creating a programmable physical instrument interface, can assist\ + \ musicians in making gestures. The finite reaction time of thehuman motor control\ + \ system implies that the execution of abrief musical gesture does not rely on\ + \ immediate feedbackfrom the senses, rather it is preprogrammed to some degree.Consequently,\ + \ we suggest designing relatively simple anddeterministic interfaces for providing\ + \ haptic assistance.In this paper, we consider the specific problem of assisting\ + \ a musician in selecting pitches from a continuous range.We build on a prior\ + \ study by O'Modhrain of the accuracyof pitches selected by musicians on a Theremin-like\ + \ hapticinterface. To improve the assistance, we augment the interface with programmed\ + \ detents so that the musician can feelthe locations of equal tempered pitches.\ + \ Nevertheless, themusician can still perform arbitrary pitch inflections such\ + \ asglissandi, falls, and scoops. We investigate various formsof haptic detents,\ + \ including fixed detent levels and forcesensitive detent levels. Preliminary\ + \ results from a subjecttest confirm improved accuracy in pitch selection broughtabout\ + \ by detents.},\n address = {Pittsburgh, PA, United States},\n author = {Berdahl,\ + \ Edgar and Niemeyer, G\\''{u}nter and Smith, Julius O.},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1179535},\n issn = {2220-4806},\n keywords = {nime08},\n\ - \ pages = {354--355},\n title = {The Pond : Interactive Multimedia Installation},\n\ - \ url = {http://www.nime.org/proceedings/2008/nime2008_354.pdf},\n year = {2008}\n\ - }\n" + \ doi = {10.5281/zenodo.1177481},\n issn = {2220-4806},\n keywords = {Haptic,\ + \ detent, pitch selection, human motor system, feedback control, response time,\ + \ gravity well },\n pages = {177--182},\n title = {Using Haptics to Assist Performers\ + \ in Making Gestures to a Musical Instrument},\n url = {http://www.nime.org/proceedings/2009/nime2009_177.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179535 + doi: 10.5281/zenodo.1177481 issn: 2220-4806 - keywords: nime08 - pages: 354--355 - title: 'The Pond : Interactive Multimedia Installation' - url: http://www.nime.org/proceedings/2008/nime2008_354.pdf - year: 2008 + keywords: 'Haptic, detent, pitch selection, human motor system, feedback control, + response time, gravity well ' + pages: 177--182 + title: Using Haptics to Assist Performers in Making Gestures to a Musical Instrument + url: http://www.nime.org/proceedings/2009/nime2009_177.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Hartman2008 - address: 'Genoa, Italy' - author: 'Hartman, Ethan and Cooper, Jeff and Spratt, Kyle' - bibtex: "@inproceedings{Hartman2008,\n address = {Genoa, Italy},\n author = {Hartman,\ - \ Ethan and Cooper, Jeff and Spratt, Kyle},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1179557},\n issn = {2220-4806},\n keywords = {nime08},\n pages\ - \ = {356--357},\n title = {Swing Set : Musical Controllers with Inherent Physical\ - \ Dynamics},\n url = {http://www.nime.org/proceedings/2008/nime2008_356.pdf},\n\ - \ year = {2008}\n}\n" + ID: Berdahl2009a + abstract: 'A haptic musical instrument is an electronic musical instrument that + provides the musician not only with audio feedback but also with force feedback. + By programming feedback controllers to emulate the laws of physics, many haptic + musical instruments have been previously designed thatmimic real acoustic musical + instruments. The controllerprograms have been implemented using finite difference + and(approximate) hybrid digital waveguide models. We presenta novel method for + constructing haptic musical instrumentsin which a haptic device is directly interfaced + with a conventional digital waveguide model by way of a junction element, improving + the quality of the musician''s interactionwith the virtual instrument. We introduce + both the explicitdigital waveguide control junction and the implicit digitalwaveguide + control junction.' + address: 'Pittsburgh, PA, United States' + author: 'Berdahl, Edgar and Niemeyer, Günter and Smith, Julius O.' + bibtex: "@inproceedings{Berdahl2009a,\n abstract = {A haptic musical instrument\ + \ is an electronic musical instrument that provides the musician not only with\ + \ audio feedback but also with force feedback. By programming feedback controllers\ + \ to emulate the laws of physics, many haptic musical instruments have been previously\ + \ designed thatmimic real acoustic musical instruments. The controllerprograms\ + \ have been implemented using finite difference and(approximate) hybrid digital\ + \ waveguide models. We presenta novel method for constructing haptic musical instrumentsin\ + \ which a haptic device is directly interfaced with a conventional digital waveguide\ + \ model by way of a junction element, improving the quality of the musician's\ + \ interactionwith the virtual instrument. We introduce both the explicitdigital\ + \ waveguide control junction and the implicit digitalwaveguide control junction.},\n\ + \ address = {Pittsburgh, PA, United States},\n author = {Berdahl, Edgar and Niemeyer,\ + \ G\\''{u}nter and Smith, Julius O.},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177479},\n\ + \ issn = {2220-4806},\n keywords = {haptic musical instrument, digital waveguide,\ + \ control junction, explicit, implicit, teleoperation },\n pages = {183--186},\n\ + \ title = {Using Haptic Devices to Interface Directly with Digital Waveguide-Based\ + \ Musical Instruments},\n url = {http://www.nime.org/proceedings/2009/nime2009_183.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179557 + doi: 10.5281/zenodo.1177479 issn: 2220-4806 - keywords: nime08 - pages: 356--357 - title: 'Swing Set : Musical Controllers with Inherent Physical Dynamics' - url: http://www.nime.org/proceedings/2008/nime2008_356.pdf - year: 2008 + keywords: 'haptic musical instrument, digital waveguide, control junction, explicit, + implicit, teleoperation ' + pages: 183--186 + title: Using Haptic Devices to Interface Directly with Digital Waveguide-Based Musical + Instruments + url: http://www.nime.org/proceedings/2009/nime2009_183.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Modler2008 - address: 'Genoa, Italy' - author: 'Modler, Paul and Myatt, Tony' - bibtex: "@inproceedings{Modler2008,\n address = {Genoa, Italy},\n author = {Modler,\ - \ Paul and Myatt, Tony},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179601},\n\ - \ issn = {2220-4806},\n keywords = {nime08},\n pages = {358--359},\n title = {Video\ - \ Based Recognition of Hand Gestures by Neural Networks for the Control of Sound\ - \ and Music},\n url = {http://www.nime.org/proceedings/2008/nime2008_358.pdf},\n\ - \ year = {2008}\n}\n" + ID: Havryliv2009 + abstract: 'The carillon is one of the few instruments that elicit sophisticated + haptic interaction from amateur and professional players alike. Like the piano + keyboard, the velocity of a player''s impact on each carillon key, or baton, affects + the quality of the resultant tone; unlike the piano, each carillon baton returns + a different forcefeedback. Force-feedback varies widely from one baton to the + next across the entire range of the instrument and with further idiosyncratic + variation from one instrument to another. This makes the carillon an ideal candidate + for haptic simulation. The application of synthesized forcefeedback based on an + analysis of forces operating in a typical carillon mechanism offers a blueprint + for the design of an electronic practice clavier and with it the solution to a + problem that has vexed carillonists for centuries, namely the inability to rehearse + repertoire in private. This paper will focus on design and implementation of a + haptic carillon clavier derived from an analysis of the Australian National Carillon + in Canberra. ' + address: 'Pittsburgh, PA, United States' + author: 'Havryliv, Mark and Naghdy, Fazel and Schiemer, Greg and Hurd, Timothy' + bibtex: "@inproceedings{Havryliv2009,\n abstract = {The carillon is one of the few\ + \ instruments that elicit sophisticated haptic interaction from amateur and professional\ + \ players alike. Like the piano keyboard, the velocity of a player's impact on\ + \ each carillon key, or baton, affects the quality of the resultant tone; unlike\ + \ the piano, each carillon baton returns a different forcefeedback. Force-feedback\ + \ varies widely from one baton to the next across the entire range of the instrument\ + \ and with further idiosyncratic variation from one instrument to another. This\ + \ makes the carillon an ideal candidate for haptic simulation. The application\ + \ of synthesized forcefeedback based on an analysis of forces operating in a typical\ + \ carillon mechanism offers a blueprint for the design of an electronic practice\ + \ clavier and with it the solution to a problem that has vexed carillonists for\ + \ centuries, namely the inability to rehearse repertoire in private. This paper\ + \ will focus on design and implementation of a haptic carillon clavier derived\ + \ from an analysis of the Australian National Carillon in Canberra. },\n address\ + \ = {Pittsburgh, PA, United States},\n author = {Havryliv, Mark and Naghdy, Fazel\ + \ and Schiemer, Greg and Hurd, Timothy},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177569},\n\ + \ issn = {2220-4806},\n keywords = {Haptics, force-feedback, mechanical analysis.\ + \ },\n pages = {187--192},\n title = {Haptic Carillon -- Analysis \\& Design of\ + \ the Carillon Mechanism},\n url = {http://www.nime.org/proceedings/2009/nime2009_187.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179601 + doi: 10.5281/zenodo.1177569 issn: 2220-4806 - keywords: nime08 - pages: 358--359 - title: Video Based Recognition of Hand Gestures by Neural Networks for the Control - of Sound and Music - url: http://www.nime.org/proceedings/2008/nime2008_358.pdf - year: 2008 + keywords: 'Haptics, force-feedback, mechanical analysis. ' + pages: 187--192 + title: Haptic Carillon -- Analysis & Design of the Carillon Mechanism + url: http://www.nime.org/proceedings/2009/nime2009_187.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Suzuki2008 - abstract: 'This research aims to develop a novel instrument for sociomusical interaction - where a number of participants can produce sounds by feet in collaboration with - each other. Thedeveloped instrument, beacon, is regarded as embodied soundmedia - product that will provide an interactive environmentaround it. The beacon produces - laser beams lying on theground and rotating. Audio sounds are then produced whenthe - beams pass individual performer''s foot. As the performers are able to control - the pitch and sound length accordingto the foot location and angles facing the - instrument, theperformer''s body motion and foot behavior can be translated into - sound and music in an intuitive manner.' - address: 'Genoa, Italy' - author: 'Suzuki, Kenji and Kyoya, Miho and Kamatani, Takahiro and Uchiyama, Toshiaki' - bibtex: "@inproceedings{Suzuki2008,\n abstract = {This research aims to develop\ - \ a novel instrument for sociomusical interaction where a number of participants\ - \ can produce sounds by feet in collaboration with each other. Thedeveloped instrument,\ - \ beacon, is regarded as embodied soundmedia product that will provide an interactive\ - \ environmentaround it. The beacon produces laser beams lying on theground and\ - \ rotating. Audio sounds are then produced whenthe beams pass individual performer's\ - \ foot. As the performers are able to control the pitch and sound length accordingto\ - \ the foot location and angles facing the instrument, theperformer's body motion\ - \ and foot behavior can be translated into sound and music in an intuitive manner.},\n\ - \ address = {Genoa, Italy},\n author = {Suzuki, Kenji and Kyoya, Miho and Kamatani,\ - \ Takahiro and Uchiyama, Toshiaki},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179633},\n\ - \ issn = {2220-4806},\n keywords = {Embodied sound media, Hyper-instrument, Laser\ - \ beams },\n pages = {360--361},\n title = {beacon : Embodied Sound Media Environment\ - \ for Socio-Musical Interaction},\n url = {http://www.nime.org/proceedings/2008/nime2008_360.pdf},\n\ - \ year = {2008}\n}\n" + ID: Leeuw2009 + abstract: 'The Electrumpet is an enhancement of a normal trumpet with a variety + of electronic sensors and buttons. It is a new hybrid instrument that facilitates + simultaneous acoustic and electronic playing. The normal playing skills of a trumpet + player apply to the new instrument. The placing of the buttons and sensors is + not a hindrance to acoustic use of the instrument and they are conveniently located. + The device can be easily attached to and detached from a normal Bb-trumpet. The + device has a wireless connection with the computer through Bluetooth-serial (Arduino). + Audio and data processing in the computer is effected by three separate instances + of MAX/MSP connected through OSC (controller data) and Soundflower (sound data). + The current prototype consists of 7 analogue sensors (4 valve-like potentiometers, + 2 pressure sensors, 1 "Ribbon" controller) and 9 digital switches. An LCD screen + that is controlled by a separate Arduino (mini) is attached to the trumpet and + displays the current controller settings that are sent through a serial connection. ' + address: 'Pittsburgh, PA, United States' + author: 'Leeuw, Hans' + bibtex: "@inproceedings{Leeuw2009,\n abstract = {The Electrumpet is an enhancement\ + \ of a normal trumpet with a variety of electronic sensors and buttons. It is\ + \ a new hybrid instrument that facilitates simultaneous acoustic and electronic\ + \ playing. The normal playing skills of a trumpet player apply to the new instrument.\ + \ The placing of the buttons and sensors is not a hindrance to acoustic use of\ + \ the instrument and they are conveniently located. The device can be easily attached\ + \ to and detached from a normal Bb-trumpet. The device has a wireless connection\ + \ with the computer through Bluetooth-serial (Arduino). Audio and data processing\ + \ in the computer is effected by three separate instances of MAX/MSP connected\ + \ through OSC (controller data) and Soundflower (sound data). The current prototype\ + \ consists of 7 analogue sensors (4 valve-like potentiometers, 2 pressure sensors,\ + \ 1 \"Ribbon\" controller) and 9 digital switches. An LCD screen that is controlled\ + \ by a separate Arduino (mini) is attached to the trumpet and displays the current\ + \ controller settings that are sent through a serial connection. },\n address\ + \ = {Pittsburgh, PA, United States},\n author = {Leeuw, Hans},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177613},\n issn = {2220-4806},\n keywords = {Trumpet,\ + \ multiple Arduinos, Bluetooth, LCD, low latency, OSC, MAX/MSP. },\n pages = {193--198},\n\ + \ title = {The Electrumpet , a Hybrid Electro-Acoustic Instrument},\n url = {http://www.nime.org/proceedings/2009/nime2009_193.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179633 + doi: 10.5281/zenodo.1177613 issn: 2220-4806 - keywords: 'Embodied sound media, Hyper-instrument, Laser beams ' - pages: 360--361 - title: 'beacon : Embodied Sound Media Environment for Socio-Musical Interaction' - url: http://www.nime.org/proceedings/2008/nime2008_360.pdf - year: 2008 + keywords: 'Trumpet, multiple Arduinos, Bluetooth, LCD, low latency, OSC, MAX/MSP. ' + pages: 193--198 + title: 'The Electrumpet , a Hybrid Electro-Acoustic Instrument' + url: http://www.nime.org/proceedings/2009/nime2009_193.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Sjuve2008 - abstract: 'This paper describes the development of a wireless wearablecontroller, - GO, for both sound processing and interactionwith wearable lights. Pure Data is - used for sound processing.The GO prototype is built using a PIC microcontroller - usingvarious sensors for receiving information from physicalmovements.' - address: 'Genoa, Italy' - author: 'Sjuve, Eva' - bibtex: "@inproceedings{Sjuve2008,\n abstract = {This paper describes the development\ - \ of a wireless wearablecontroller, GO, for both sound processing and interactionwith\ - \ wearable lights. Pure Data is used for sound processing.The GO prototype is\ - \ built using a PIC microcontroller usingvarious sensors for receiving information\ - \ from physicalmovements.},\n address = {Genoa, Italy},\n author = {Sjuve, Eva},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1179629},\n issn = {2220-4806},\n\ - \ keywords = {Wireless controller, Pure Data, Gestural interface, Interactive\ - \ Lights. },\n pages = {362--363},\n title = {Prototype GO : Wireless Controller\ - \ for Pure Data},\n url = {http://www.nime.org/proceedings/2008/nime2008_362.pdf},\n\ - \ year = {2008}\n}\n" + ID: Gallin2009 + abstract: 'Starting from a parallelism between the effervescence of the 1920s in + the exploration of new ways of controlling music and the actual revolution in + the design of new control possibilities, this paper aims to explore the possibilities + of rethinking instruments from the past towards instruments of the future. Through + three examples (the experience of the Persephone, the design of the Persephone2 + and the 4 strings ribbon cello project), I will explore the contemporary notion + of “instruments of the future” vs. controls that people expect from such instruments + nowadays.' + address: 'Pittsburgh, PA, United States' + author: 'Gallin, Emmanuelle and Sirguy, Marc' + bibtex: "@inproceedings{Gallin2009,\n abstract = {Starting from a parallelism between\ + \ the effervescence of the 1920s in the exploration of new ways of controlling\ + \ music and the actual revolution in the design of new control possibilities,\ + \ this paper aims to explore the possibilities of rethinking instruments from\ + \ the past towards instruments of the future. Through three examples (the experience\ + \ of the Persephone, the design of the Persephone2 and the 4 strings ribbon cello\ + \ project), I will explore the contemporary notion of “instruments of the future”\ + \ vs. controls that people expect from such instruments nowadays.},\n address\ + \ = {Pittsburgh, PA, United States},\n author = {Gallin, Emmanuelle and Sirguy,\ + \ Marc},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1177521},\n issn = {2220-4806},\n\ + \ keywords = {Controller, Sensor, MIDI, USB, Computer Music, ribbon controllers,\ + \ ribbon cello. },\n pages = {199--202},\n title = {Sensor Technology and the\ + \ Remaking of Instruments from the Past},\n url = {http://www.nime.org/proceedings/2009/nime2009_199.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179629 + doi: 10.5281/zenodo.1177521 issn: 2220-4806 - keywords: 'Wireless controller, Pure Data, Gestural interface, Interactive Lights. ' - pages: 362--363 - title: 'Prototype GO : Wireless Controller for Pure Data' - url: http://www.nime.org/proceedings/2008/nime2008_362.pdf - year: 2008 + keywords: 'Controller, Sensor, MIDI, USB, Computer Music, ribbon controllers, ribbon + cello. ' + pages: 199--202 + title: Sensor Technology and the Remaking of Instruments from the Past + url: http://www.nime.org/proceedings/2009/nime2009_199.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Macrae2008 - address: 'Genoa, Italy' - author: 'Macrae, Robert and Dixon, Simon' - bibtex: "@inproceedings{Macrae2008,\n address = {Genoa, Italy},\n author = {Macrae,\ - \ Robert and Dixon, Simon},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179593},\n\ - \ issn = {2220-4806},\n keywords = {Graphical Interface, Computer Game, MIDI Display\ - \ },\n pages = {364--365},\n title = {From Toy to Tutor : Note-Scroller is a Game\ - \ to Teach Music},\n url = {http://www.nime.org/proceedings/2008/nime2008_364.pdf},\n\ - \ year = {2008}\n}\n" + ID: Nicolls2009 + abstract: '“The reinvigoration of the role of the human body” - as John Richards + recently described trends in using homemade electronics to move away from laptop + performance [1] - is mirrored in an ambition of instrumentalists to interact more + closely with the electronic sounds they are helping to create. For these players, + there has often been a one-way street of the ‘instrument feeds MAX patch’ paradigm + and arguments are made here for more complete performance feedback systems. Instrumentalists + come to the question of interactivity with a whole array of gestures, sounds and + associations already in place, so must choose carefully the means by which the + instrumental performance is augmented. Frances-Marie Uitti [2] is a pioneer in + the field, creating techniques to amplify the cellist’s innate performative gestures + and in parallel developing the instrument. This paper intends to give an overview + of the author’s work in developing interactivity in piano performance, mechanical + augmentation of the piano and possible structural developments of the instrument + to bring it into the twenty-first century.' + address: 'Pittsburgh, PA, United States' + author: 'Nicolls, Sarah' + bibtex: "@inproceedings{Nicolls2009,\n abstract = {“The reinvigoration of the role\ + \ of the human body” - as John Richards recently described trends in using homemade\ + \ electronics to move away from laptop performance [1] - is mirrored in an ambition\ + \ of instrumentalists to interact more closely with the electronic sounds they\ + \ are helping to create. For these players, there has often been a one-way street\ + \ of the ‘instrument feeds MAX patch’ paradigm and arguments are made here for\ + \ more complete performance feedback systems. Instrumentalists come to the question\ + \ of interactivity with a whole array of gestures, sounds and associations already\ + \ in place, so must choose carefully the means by which the instrumental performance\ + \ is augmented. Frances-Marie Uitti [2] is a pioneer in the field, creating techniques\ + \ to amplify the cellist’s innate performative gestures and in parallel developing\ + \ the instrument. This paper intends to give an overview of the author’s work\ + \ in developing interactivity in piano performance, mechanical augmentation of\ + \ the piano and possible structural developments of the instrument to bring it\ + \ into the twenty-first century.},\n address = {Pittsburgh, PA, United States},\n\ + \ author = {Nicolls, Sarah},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177641},\n\ + \ issn = {2220-4806},\n keywords = {sensor, gestural, technology, performance,\ + \ piano, motors, interactive },\n pages = {203--206},\n title = {Twenty-First\ + \ Century Piano},\n url = {http://www.nime.org/proceedings/2009/nime2009_203.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179593 + doi: 10.5281/zenodo.1177641 issn: 2220-4806 - keywords: 'Graphical Interface, Computer Game, MIDI Display ' - pages: 364--365 - title: 'From Toy to Tutor : Note-Scroller is a Game to Teach Music' - url: http://www.nime.org/proceedings/2008/nime2008_364.pdf - year: 2008 + keywords: 'sensor, gestural, technology, performance, piano, motors, interactive ' + pages: 203--206 + title: Twenty-First Century Piano + url: http://www.nime.org/proceedings/2009/nime2009_203.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Favilla2008 - abstract: 'This demonstration presents three new augmented and metasaxophone interface/instruments, - built by the Bent LeatherBand. The instruments are designed for virtuosic liveperformance - and make use of Sukandar Kartadinata''s Gluion[OSC] interfaces. The project rationale - and research outcomesfor the first twelve months is discussed. Instruments/interfacesdescribed - include the Gluisop, Gluialto and Leathersop.' - address: 'Genoa, Italy' - author: 'Favilla, Stuart and Cannon, Joanne and Hicks, Tony and Chant, Dale and - Favilla, Paris' - bibtex: "@inproceedings{Favilla2008,\n abstract = {This demonstration presents three\ - \ new augmented and metasaxophone interface/instruments, built by the Bent LeatherBand.\ - \ The instruments are designed for virtuosic liveperformance and make use of Sukandar\ - \ Kartadinata's Gluion[OSC] interfaces. The project rationale and research outcomesfor\ - \ the first twelve months is discussed. Instruments/interfacesdescribed include\ - \ the Gluisop, Gluialto and Leathersop.},\n address = {Genoa, Italy},\n author\ - \ = {Favilla, Stuart and Cannon, Joanne and Hicks, Tony and Chant, Dale and Favilla,\ - \ Paris},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1179531},\n issn = {2220-4806},\n\ - \ keywords = {Augmented saxophone, Gluion, OSC, virtuosic performance systems\ - \ },\n pages = {366--369},\n title = {Gluisax : Bent Leather Band's Augmented\ - \ Saxophone Project},\n url = {http://www.nime.org/proceedings/2008/nime2008_366.pdf},\n\ - \ year = {2008}\n}\n" + ID: Johnston2009 + abstract: 'In this paper we describe an interaction framework which classifies musicians'' + interactions with virtual musical instruments into three modes: instrumental, + ornamental and conversational. We argue that conversational interactions are the + most difficult to design for, but also the most interesting. To illustrate our + approach to designing for conversational interactions we describe the performance + work Partial Reflections 3 for two clarinets and interactive software. This software + uses simulated physical models to create a virtual sound sculpture which both + responds to and produces sounds and visuals.' + address: 'Pittsburgh, PA, United States' + author: 'Johnston, Andrew and Candy, Linda and Edmonds, Ernest' + bibtex: "@inproceedings{Johnston2009,\n abstract = {In this paper we describe an\ + \ interaction framework which classifies musicians' interactions with virtual\ + \ musical instruments into three modes: instrumental, ornamental and conversational.\ + \ We argue that conversational interactions are the most difficult to design for,\ + \ but also the most interesting. To illustrate our approach to designing for conversational\ + \ interactions we describe the performance work Partial Reflections 3 for two\ + \ clarinets and interactive software. This software uses simulated physical models\ + \ to create a virtual sound sculpture which both responds to and produces sounds\ + \ and visuals.},\n address = {Pittsburgh, PA, United States},\n author = {Johnston,\ + \ Andrew and Candy, Linda and Edmonds, Ernest},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1177585},\n issn = {2220-4806},\n keywords = {Music, instruments,\ + \ interaction. },\n pages = {207--212},\n title = {Designing for Conversational\ + \ Interaction},\n url = {http://www.nime.org/proceedings/2009/nime2009_207.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179531 + doi: 10.5281/zenodo.1177585 issn: 2220-4806 - keywords: 'Augmented saxophone, Gluion, OSC, virtuosic performance systems ' - pages: 366--369 - title: 'Gluisax : Bent Leather Band''s Augmented Saxophone Project' - url: http://www.nime.org/proceedings/2008/nime2008_366.pdf - year: 2008 + keywords: 'Music, instruments, interaction. ' + pages: 207--212 + title: Designing for Conversational Interaction + url: http://www.nime.org/proceedings/2009/nime2009_207.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: DeJong2008 - address: 'Genoa, Italy' - author: 'de Jong, Staas' - bibtex: "@inproceedings{DeJong2008,\n address = {Genoa, Italy},\n author = {de Jong,\ - \ Staas},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1179571},\n issn = {2220-4806},\n\ - \ keywords = {nime08},\n pages = {370--371},\n title = {The Cyclotactor : Towards\ - \ a Tactile Platform for Musical Interaction},\n url = {http://www.nime.org/proceedings/2008/nime2008_370.pdf},\n\ - \ year = {2008}\n}\n" + ID: Gurevich2009 + abstract: 'In this paper we discuss the concept of style, focusing in particular + on methods of designing new instruments that facilitate the cultivation and recognition + of style. We distinguishbetween style and structure of an interaction and discuss + thesignificance of this formulation within the context of NIME.Two workshops that + were conducted to explore style in interaction design are described, from which + we identify elements of style that can inform and influence the design process. + From these, we suggest steps toward designing forstyle in new musical interactions.' + address: 'Pittsburgh, PA, United States' + author: 'Gurevich, Michael and Stapleton, Paul and Bennett, Peter' + bibtex: "@inproceedings{Gurevich2009,\n abstract = {In this paper we discuss the\ + \ concept of style, focusing in particular on methods of designing new instruments\ + \ that facilitate the cultivation and recognition of style. We distinguishbetween\ + \ style and structure of an interaction and discuss thesignificance of this formulation\ + \ within the context of NIME.Two workshops that were conducted to explore style\ + \ in interaction design are described, from which we identify elements of style\ + \ that can inform and influence the design process. From these, we suggest steps\ + \ toward designing forstyle in new musical interactions.},\n address = {Pittsburgh,\ + \ PA, United States},\n author = {Gurevich, Michael and Stapleton, Paul and Bennett,\ + \ Peter},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1177563},\n issn = {2220-4806},\n\ + \ keywords = {expression, style, structure, skill, virtuosity },\n pages = {213--217},\n\ + \ title = {Designing for Style in New Musical Interactions},\n url = {http://www.nime.org/proceedings/2009/nime2009_213.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179571 + doi: 10.5281/zenodo.1177563 issn: 2220-4806 - keywords: nime08 - pages: 370--371 - title: 'The Cyclotactor : Towards a Tactile Platform for Musical Interaction' - url: http://www.nime.org/proceedings/2008/nime2008_370.pdf - year: 2008 + keywords: 'expression, style, structure, skill, virtuosity ' + pages: 213--217 + title: Designing for Style in New Musical Interactions + url: http://www.nime.org/proceedings/2009/nime2009_213.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Demey2008 - abstract: 'The Musical Synchrotron is a software interface that connects wireless - motion sensors to a real-time interactive environment (Pure Data, Max/MSP). In - addition to the measurement of movement, the system provides audio playback and - visual feedback. The Musical Synchrotron outputs a score with the degree in which - synchronization with the presented music is successful. The interface has been - used to measure how people move in response to music. The system was used for - experiments at public events. ' - address: 'Genoa, Italy' - author: 'Demey, Michiel and Leman, Marc and Bossuyt, Frederick and Vanfleteren, - Jan' - bibtex: "@inproceedings{Demey2008,\n abstract = {The Musical Synchrotron is a software\ - \ interface that connects wireless motion sensors to a real-time interactive environment\ - \ (Pure Data, Max/MSP). In addition to the measurement of movement, the system\ - \ provides audio playback and visual feedback. The Musical Synchrotron outputs\ - \ a score with the degree in which synchronization with the presented music is\ - \ successful. The interface has been used to measure how people move in response\ - \ to music. The system was used for experiments at public events. },\n address\ - \ = {Genoa, Italy},\n author = {Demey, Michiel and Leman, Marc and Bossuyt, Frederick\ - \ and Vanfleteren, Jan},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179521},\n\ - \ issn = {2220-4806},\n keywords = {Wireless sensors, tempo perception, social\ - \ interaction, music and movement, embodied music cognition },\n pages = {372--373},\n\ - \ title = {The Musical Synchrotron : Using Wireless Motion Sensors to Study How\ - \ Social Interaction Affects Synchronization with Musical Tempo},\n url = {http://www.nime.org/proceedings/2008/nime2008_372.pdf},\n\ - \ year = {2008}\n}\n" + ID: Cook2009 + abstract: 'This paper revisits/extends “Principles for Designing Computer Music + Controllers” (NIME 2001), subsequently updated in a NIME 2007 keynote address. + A redesign of SqueezeVox Maggie (a reoccurring NIME character) is used as an example + of which principles have held fast over the years, and which have changed due + to advances in technology. A few new principles are also added to the list.' + address: 'Pittsburgh, PA, United States' + author: 'Cook, Perry R.' + bibtex: "@inproceedings{Cook2009,\n abstract = {This paper revisits/extends “Principles\ + \ for Designing Computer Music Controllers” (NIME 2001), subsequently updated\ + \ in a NIME 2007 keynote address. A redesign of SqueezeVox Maggie (a reoccurring\ + \ NIME character) is used as an example of which principles have held fast over\ + \ the years, and which have changed due to advances in technology. A few new principles\ + \ are also added to the list.},\n address = {Pittsburgh, PA, United States},\n\ + \ author = {Cook, Perry R.},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177493},\n\ + \ issn = {2220-4806},\n keywords = {HCI, Composed Instruments, Voice Synthesis,\ + \ Wireless, Batteries, Laptop Orchestras, SenSAs.},\n pages = {218--221},\n title\ + \ = {Re-Designing Principles for Computer Music Controllers : a Case Study of\ + \ SqueezeVox Maggie},\n url = {http://www.nime.org/proceedings/2009/nime2009_218.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179521 + doi: 10.5281/zenodo.1177493 issn: 2220-4806 - keywords: 'Wireless sensors, tempo perception, social interaction, music and movement, - embodied music cognition ' - pages: 372--373 - title: 'The Musical Synchrotron : Using Wireless Motion Sensors to Study How Social - Interaction Affects Synchronization with Musical Tempo' - url: http://www.nime.org/proceedings/2008/nime2008_372.pdf - year: 2008 + keywords: 'HCI, Composed Instruments, Voice Synthesis, Wireless, Batteries, Laptop + Orchestras, SenSAs.' + pages: 218--221 + title: 'Re-Designing Principles for Computer Music Controllers : a Case Study of + SqueezeVox Maggie' + url: http://www.nime.org/proceedings/2009/nime2009_218.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Sheffield2016 - abstract: |- - The Haptic Capstans are two rotational force-feedback knobs - circumscribed by eye-catching LED rings. In this work, the Haptic Capstans are - programmed using physical models in order to experiment with audio-visual-haptic - interactions for music applications. - address: 'Brisbane, Australia' - author: Eric Sheffield and Edgar Berdahl and Andrew Pfalz - bibtex: "@inproceedings{Sheffield2016,\n abstract = {The Haptic Capstans are two\ - \ rotational force-feedback knobs\ncircumscribed by eye-catching LED rings. In\ - \ this work, the Haptic Capstans are\nprogrammed using physical models in order\ - \ to experiment with audio-visual-haptic\ninteractions for music applications.},\n\ - \ address = {Brisbane, Australia},\n author = {Eric Sheffield and Edgar Berdahl\ - \ and Andrew Pfalz},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176002},\n\ - \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {1--2},\n publisher\ - \ = {Queensland Conservatorium Griffith University},\n title = {The Haptic Capstans:\ - \ Rotational Force Feedback for Music using a FireFader Derivative Device},\n\ - \ track = {Demonstrations},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper00012.pdf},\n\ - \ year = {2016}\n}\n" + ID: Kapuscinski2009 + abstract: 'This paper reports on initial stages of research leading to the development + of an intermedia performance Counterlines --- a duet for Disklavier and Wacom + Cintiq, in which both performers generate audiovisual gestures that relate to + each other contrapuntally. The pianist generates graphic elements while playing + music and the graphic performer generates piano notes by drawing lines. The paper + focuses on interfacing sounds and images performed by the pianist. It provides + rationale for the choice of materials of great simplicity and describes our approach + to mapping. ' + address: 'Pittsburgh, PA, United States' + author: 'Kapuscinski, Jaroslaw and Sanchez, Javier' + bibtex: "@inproceedings{Kapuscinski2009,\n abstract = {This paper reports on initial\ + \ stages of research leading to the development of an intermedia performance Counterlines\ + \ --- a duet for Disklavier and Wacom Cintiq, in which both performers generate\ + \ audiovisual gestures that relate to each other contrapuntally. The pianist generates\ + \ graphic elements while playing music and the graphic performer generates piano\ + \ notes by drawing lines. The paper focuses on interfacing sounds and images performed\ + \ by the pianist. It provides rationale for the choice of materials of great simplicity\ + \ and describes our approach to mapping. },\n address = {Pittsburgh, PA, United\ + \ States},\n author = {Kapuscinski, Jaroslaw and Sanchez, Javier},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1177597},\n issn = {2220-4806},\n keywords\ + \ = {intermedia, Disklavier, piano, Wacom Cintiq, mapping, visual music },\n pages\ + \ = {222--225},\n title = {Interfacing Graphic and Musical Elements in Counterlines},\n\ + \ url = {http://www.nime.org/proceedings/2009/nime2009_222.pdf},\n year = {2009}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176002 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177597 issn: 2220-4806 - pages: 1--2 - publisher: Queensland Conservatorium Griffith University - title: 'The Haptic Capstans: Rotational Force Feedback for Music using a FireFader - Derivative Device' - track: Demonstrations - url: http://www.nime.org/proceedings/2016/nime2016_paper00012.pdf - year: 2016 + keywords: 'intermedia, Disklavier, piano, Wacom Cintiq, mapping, visual music ' + pages: 222--225 + title: Interfacing Graphic and Musical Elements in Counterlines + url: http://www.nime.org/proceedings/2009/nime2009_222.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Long2016 - abstract: 'Musical robots provide artists and musicians with the ability to realise - complex new musical ideas in real acoustic space. However, most musical robots - are created with open-loop control systems, many of which require time consuming - calibration and do not reach the level of reliability of other electronic musical - instruments such as synthesizers. This paper outlines the construction of a new - robotic musical instrument, the Closed-Loop Robotic Glockenspiel, and discusses - the improved robustness, usability and expressive capabilities that closed-loop - control systems and embedded musical information retrieval processes can afford - robotic musical instruments. The hardware design of the instrument is described - along with the firmware of the embedded MIR system. The result is a new desktop - robotic musical instrument that is capable of continuous unaided re-calibration, - is as simple to use as more traditional hardware electronic sound-sources and - provides musicians with new expressive capabilities. ' - address: 'Brisbane, Australia' - author: Jason Long and Dale Carnegie and Ajay Kapur - bibtex: "@inproceedings{Long2016,\n abstract = {Musical robots provide artists and\ - \ musicians with the ability to realise complex new musical ideas in real acoustic\ - \ space. However, most musical robots are created with open-loop control systems,\ - \ many of which require time consuming calibration and do not reach the level\ - \ of reliability of other electronic musical instruments such as synthesizers.\ - \ This paper outlines the construction of a new robotic musical instrument, the\ - \ Closed-Loop Robotic Glockenspiel, and discusses the improved robustness, usability\ - \ and expressive capabilities that closed-loop control systems and embedded musical\ - \ information retrieval processes can afford robotic musical instruments. The\ - \ hardware design of the instrument is described along with the firmware of the\ - \ embedded MIR system. The result is a new desktop robotic musical instrument\ - \ that is capable of continuous unaided re-calibration, is as simple to use as\ - \ more traditional hardware electronic sound-sources and provides musicians with\ - \ new expressive capabilities. },\n address = {Brisbane, Australia},\n author\ - \ = {Jason Long and Dale Carnegie and Ajay Kapur},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.3964607},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ - \ pages = {2--7},\n publisher = {Queensland Conservatorium Griffith University},\n\ - \ title = {The Closed-Loop Robotic Glockenspiel: Improving Musical Robots with\ - \ Embedded Musical Information Retrieval},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0002.pdf},\n\ - \ year = {2016}\n}\n" + ID: Polfreman2009 + abstract: 'Music composition on computer is a challenging task, involving a range + of data types to be managed within a single software tool. A composition typically + comprises a complex arrangement of material, with many internal relationships + between data in different locations repetition, inversion, retrograde, reversal + and more sophisticated transformations. The creation of such complex artefacts + is labour intensive, and current systems typically place a significant cognitive + burden on the composer in terms of maintaining a work as a coherent whole. FrameWorks + 3D is an attempt to improve support for composition tasks within a Digital Audio + Workstation (DAW) style environment via a novel three-dimensional (3D) user-interface. + In addition to the standard paradigm of tracks, regions and tape recording analogy, + FrameWorks displays hierarchical and transformational information in a single, + fully navigable workspace. The implementation combines Java with Max/MSP to create + a cross-platform, user-extensible package and will be used to assess the viability + of such a tool and to develop the ideas further. ' + address: 'Pittsburgh, PA, United States' + author: 'Polfreman, Richard' + bibtex: "@inproceedings{Polfreman2009,\n abstract = {Music composition on computer\ + \ is a challenging task, involving a range of data types to be managed within\ + \ a single software tool. A composition typically comprises a complex arrangement\ + \ of material, with many internal relationships between data in different locations\ + \ repetition, inversion, retrograde, reversal and more sophisticated transformations.\ + \ The creation of such complex artefacts is labour intensive, and current systems\ + \ typically place a significant cognitive burden on the composer in terms of maintaining\ + \ a work as a coherent whole. FrameWorks 3D is an attempt to improve support for\ + \ composition tasks within a Digital Audio Workstation (DAW) style environment\ + \ via a novel three-dimensional (3D) user-interface. In addition to the standard\ + \ paradigm of tracks, regions and tape recording analogy, FrameWorks displays\ + \ hierarchical and transformational information in a single, fully navigable workspace.\ + \ The implementation combines Java with Max/MSP to create a cross-platform, user-extensible\ + \ package and will be used to assess the viability of such a tool and to develop\ + \ the ideas further. },\n address = {Pittsburgh, PA, United States},\n author\ + \ = {Polfreman, Richard},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177661},\n\ + \ issn = {2220-4806},\n keywords = {Digital Audio Workstation, graphical user-interfaces,\ + \ 3D graphics, Max/MSP, Java. },\n pages = {226--229},\n title = {FrameWorks {3D}\ + \ : Composition in the Third Dimension},\n url = {http://www.nime.org/proceedings/2009/nime2009_226.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3964607 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177661 issn: 2220-4806 - pages: 2--7 - publisher: Queensland Conservatorium Griffith University - title: 'The Closed-Loop Robotic Glockenspiel: Improving Musical Robots with Embedded - Musical Information Retrieval' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0002.pdf - year: 2016 + keywords: 'Digital Audio Workstation, graphical user-interfaces, 3D graphics, Max/MSP, + Java. ' + pages: 226--229 + title: 'FrameWorks 3D : Composition in the Third Dimension' + url: http://www.nime.org/proceedings/2009/nime2009_226.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Carey2016a - abstract: "This paper describes a package of modular tools developed for use\nwith\ - \ virtual reality peripherals to allow for music composition, performance and\n\ - viewing in `real-time' across networks within a spectralist paradigm.\nThe central\ - \ tool is SpectraScore, a Max/MSP abstraction for analysing audio\nsignals and\ - \ ranking the resultant partials according to their harmonic pitch\nclass profiles.\ - \ This data triggers the generation of objects in a virtual world\nbased on the\ - \ `topography' of the source sound, which is experienced\nby network clients via\ - \ Google Cardboard headsets. They use their movements to\ntrigger audio in various\ - \ microtonal tunings and incidentally generate scores.\nThese scores are transmitted\ - \ to performers who improvise music from this notation\nusing Leap Motion Theremins,\ - \ also in VR space. Finally, the performance is\nbroadcast via a web audio stream,\ - \ which can be heard by the composer-audience in\nthe initial virtual world. The\ - \ `real-time composers' and performers\nare not required to have any prior knowledge\ - \ of complex computer systems and\ninteract either using head position tracking,\ - \ or with a Oculus Rift DK2 and a\nLeap Motion Camera. " - address: 'Brisbane, Australia' - author: Benedict Carey - bibtex: "@inproceedings{Carey2016a,\n abstract = {This paper describes a package\ - \ of modular tools developed for use\nwith virtual reality peripherals to allow\ - \ for music composition, performance and\nviewing in `real-time' across networks\ - \ within a spectralist paradigm.\nThe central tool is SpectraScore, a Max/MSP\ - \ abstraction for analysing audio\nsignals and ranking the resultant partials\ - \ according to their harmonic pitch\nclass profiles. This data triggers the generation\ - \ of objects in a virtual world\nbased on the `topography' of the source sound,\ - \ which is experienced\nby network clients via Google Cardboard headsets. They\ - \ use their movements to\ntrigger audio in various microtonal tunings and incidentally\ - \ generate scores.\nThese scores are transmitted to performers who improvise music\ - \ from this notation\nusing Leap Motion Theremins, also in VR space. Finally,\ - \ the performance is\nbroadcast via a web audio stream, which can be heard by\ - \ the composer-audience in\nthe initial virtual world. The `real-time composers'\ - \ and performers\nare not required to have any prior knowledge of complex computer\ - \ systems and\ninteract either using head position tracking, or with a Oculus\ - \ Rift DK2 and a\nLeap Motion Camera. },\n address = {Brisbane, Australia},\n\ - \ author = {Benedict Carey},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176004},\n\ - \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {3--4},\n publisher\ - \ = {Queensland Conservatorium Griffith University},\n title = {SpectraScore VR:\ - \ Networkable virtual reality software tools for real-time composition and performance},\n\ - \ track = {Demonstrations},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper00022.pdf},\n\ - \ year = {2016}\n}\n" + ID: Freed2009 + abstract: 'A compendium of foundational circuits for interfacing resistive pressure + and position sensors is presented with example applications for music controllers + and tangible interfaces. ' + address: 'Pittsburgh, PA, United States' + author: 'Freed, Adrian' + bibtex: "@inproceedings{Freed2009,\n abstract = {A compendium of foundational circuits\ + \ for interfacing resistive pressure and position sensors is presented with example\ + \ applications for music controllers and tangible interfaces. },\n address = {Pittsburgh,\ + \ PA, United States},\n author = {Freed, Adrian},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1177515},\n issn = {2220-4806},\n keywords = {Piezoresistive\ + \ Touch Sensor Pressure Sensing Current Steering Multitouch. },\n pages = {230--235},\n\ + \ title = {Novel and Forgotten Current-steering Techniques for Resistive Multitouch,\ + \ Duotouch, and Polytouch Position Sensing with Pressure},\n url = {http://www.nime.org/proceedings/2009/nime2009_230.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176004 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177515 issn: 2220-4806 - pages: 3--4 - publisher: Queensland Conservatorium Griffith University - title: 'SpectraScore VR: Networkable virtual reality software tools for real-time - composition and performance' - track: Demonstrations - url: http://www.nime.org/proceedings/2016/nime2016_paper00022.pdf - year: 2016 + keywords: 'Piezoresistive Touch Sensor Pressure Sensing Current Steering Multitouch. ' + pages: 230--235 + title: 'Novel and Forgotten Current-steering Techniques for Resistive Multitouch, + Duotouch, and Polytouch Position Sensing with Pressure' + url: http://www.nime.org/proceedings/2009/nime2009_230.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Chang2016 - abstract: 'This paper discusses a new approach to acoustic amplitude modulation. - Building on prior work with electromagnetic augmentation of acoustic instruments, - we begin with a theory of operation model to describe the mechanical forces necessary - to produce acoustic amplitude modulation synthesis. We then propose an implementation - of our model as an instrumental prototype. The results illustrate that our acoustic - amplitude modulation system produces controllable sideband components, and that - synthesis generated from our corresponding numerical dynamic system model closely - approximates the acoustic result of the physical system.' - address: 'Brisbane, Australia' - author: Herbert H.C. Chang and Spencer Topel - bibtex: "@inproceedings{Chang2016,\n abstract = {This paper discusses a new approach\ - \ to acoustic amplitude modulation. Building on prior work with electromagnetic\ - \ augmentation of acoustic instruments, we begin with a theory of operation model\ - \ to describe the mechanical forces necessary to produce acoustic amplitude modulation\ - \ synthesis. We then propose an implementation of our model as an instrumental\ - \ prototype. The results illustrate that our acoustic amplitude modulation system\ - \ produces controllable sideband components, and that synthesis generated from\ - \ our corresponding numerical dynamic system model closely approximates the acoustic\ - \ result of the physical system.},\n address = {Brisbane, Australia},\n author\ - \ = {Herbert H.C. Chang and Spencer Topel},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.3964599},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ - \ pages = {8--13},\n publisher = {Queensland Conservatorium Griffith University},\n\ - \ title = {Electromagnetically Actuated Acoustic Amplitude Modulation Synthesis},\n\ - \ track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0003.pdf},\n\ - \ year = {2016}\n}\n" + ID: Jones2009a + abstract: 'This paper presents a new force-sensitive surface designedfor playing + music. A prototype system has been implemented using a passive capacitive sensor, + a commodity multichannel audio interface, and decoding software running ona laptop + computer. This setup has been a successful, lowcost route to a number of experiments + in intimate musicalcontrol.' + address: 'Pittsburgh, PA, United States' + author: 'Jones, Randy and Driessen, Peter and Schloss, Andrew and Tzanetakis, George' + bibtex: "@inproceedings{Jones2009a,\n abstract = {This paper presents a new force-sensitive\ + \ surface designedfor playing music. A prototype system has been implemented using\ + \ a passive capacitive sensor, a commodity multichannel audio interface, and decoding\ + \ software running ona laptop computer. This setup has been a successful, lowcost\ + \ route to a number of experiments in intimate musicalcontrol.},\n address = {Pittsburgh,\ + \ PA, United States},\n author = {Jones, Randy and Driessen, Peter and Schloss,\ + \ Andrew and Tzanetakis, George},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177589},\n\ + \ issn = {2220-4806},\n keywords = {Multitouch, sensors, tactile, capacitive,\ + \ percussion controllers. },\n pages = {236--241},\n title = {A Force-Sensitive\ + \ Surface for Intimate Control},\n url = {http://www.nime.org/proceedings/2009/nime2009_236.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3964599 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177589 issn: 2220-4806 - pages: 8--13 - publisher: Queensland Conservatorium Griffith University - title: Electromagnetically Actuated Acoustic Amplitude Modulation Synthesis - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0003.pdf - year: 2016 + keywords: 'Multitouch, sensors, tactile, capacitive, percussion controllers. ' + pages: 236--241 + title: A Force-Sensitive Surface for Intimate Control + url: http://www.nime.org/proceedings/2009/nime2009_236.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Berdahl2016 - abstract: "New interfaces for vibrotactile interaction with touchscreens are\nrealized.\ - \ An electromagnetic design for wireless actuation of 3D-printed\nconductive tokens\ - \ is analyzed. Example music interactions are implemented using\nphysical modeling\ - \ paradigms, each investigated within the context of a particular\ntoken that\ - \ suggests a different interaction metaphor." - address: 'Brisbane, Australia' - author: Edgar Berdahl and Danny Holmes and Eric Sheffield - bibtex: "@inproceedings{Berdahl2016,\n abstract = {New interfaces for vibrotactile\ - \ interaction with touchscreens are\nrealized. An electromagnetic design for wireless\ - \ actuation of 3D-printed\nconductive tokens is analyzed. Example music interactions\ - \ are implemented using\nphysical modeling paradigms, each investigated within\ - \ the context of a particular\ntoken that suggests a different interaction metaphor.},\n\ - \ address = {Brisbane, Australia},\n author = {Edgar Berdahl and Danny Holmes\ - \ and Eric Sheffield},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1175984},\n\ - \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {5--6},\n publisher\ - \ = {Queensland Conservatorium Griffith University},\n title = {Wireless Vibrotactile\ - \ Tokens for Audio-Haptic Interaction with Touchscreen Interfaces},\n track =\ - \ {Demonstrations},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper00032.pdf},\n\ - \ year = {2016}\n}\n" + ID: Kellum2009 + abstract: 'This paper introduces a flexible mapping editor, which transforms multi-touch + devices into musical instruments. The editor enables users to create interfaces + by dragging and dropping components onto the interface and attaching actions to + them, which will be executed when certain userdefined conditions obtain. The editor + receives touch information via the non-proprietary communication protocol, TUIO + [9], and can, therefore, be used together with a variety of different multi-touch + input devices. ' + address: 'Pittsburgh, PA, United States' + author: 'Kellum, Greg and Crevoisier, Alain' + bibtex: "@inproceedings{Kellum2009,\n abstract = {This paper introduces a flexible\ + \ mapping editor, which transforms multi-touch devices into musical instruments.\ + \ The editor enables users to create interfaces by dragging and dropping components\ + \ onto the interface and attaching actions to them, which will be executed when\ + \ certain userdefined conditions obtain. The editor receives touch information\ + \ via the non-proprietary communication protocol, TUIO [9], and can, therefore,\ + \ be used together with a variety of different multi-touch input devices. },\n\ + \ address = {Pittsburgh, PA, United States},\n author = {Kellum, Greg and Crevoisier,\ + \ Alain},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1177601},\n issn = {2220-4806},\n\ + \ keywords = {NIME, multi-touch, multi-modal interface, sonic interaction design.\ + \ },\n pages = {242--245},\n title = {A Flexible Mapping Editor for Multi-touch\ + \ Musical Instruments},\n url = {http://www.nime.org/proceedings/2009/nime2009_242.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1175984 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177601 issn: 2220-4806 - pages: 5--6 - publisher: Queensland Conservatorium Griffith University - title: Wireless Vibrotactile Tokens for Audio-Haptic Interaction with Touchscreen - Interfaces - track: Demonstrations - url: http://www.nime.org/proceedings/2016/nime2016_paper00032.pdf - year: 2016 + keywords: 'NIME, multi-touch, multi-modal interface, sonic interaction design. ' + pages: 242--245 + title: A Flexible Mapping Editor for Multi-touch Musical Instruments + url: http://www.nime.org/proceedings/2009/nime2009_242.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Baldwin2016 - abstract: 'An interactive museum exhibit of a digitally augmented medieval musical - instrument, the tromba marina, is presented. The tromba marina is a curious single - stringed instrument with a rattling bridge, from which a trumpet-like timbre is - produced. The physical instrument was constructed as a replica of one found in - Musikmuseet in Frederiksberg. The replicated instrument was augmented with a pickup, - speakers and digital signal processing to create a more reliable, approachable - and appropriate instrument for interactive display in the museum. We report on - the evaluation of the instrument performed at the Danish museum of musical instruments.' - address: 'Brisbane, Australia' - author: Alex Baldwin and Troels Hammer and Edvinas Pechiulis and Peter Williams - and Dan Overholt and Stefania Serafin - bibtex: "@inproceedings{Baldwin2016,\n abstract = {An interactive museum exhibit\ - \ of a digitally augmented medieval musical instrument, the tromba marina, is\ - \ presented. The tromba marina is a curious single stringed instrument with a\ - \ rattling bridge, from which a trumpet-like timbre is produced. The physical\ - \ instrument was constructed as a replica of one found in Musikmuseet in Frederiksberg.\ - \ The replicated instrument was augmented with a pickup, speakers and digital\ - \ signal processing to create a more reliable, approachable and appropriate instrument\ - \ for interactive display in the museum. We report on the evaluation of the instrument\ - \ performed at the Danish museum of musical instruments.},\n address = {Brisbane,\ - \ Australia},\n author = {Alex Baldwin and Troels Hammer and Edvinas Pechiulis\ - \ and Peter Williams and Dan Overholt and Stefania Serafin},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.3964592},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ - \ pages = {14--19},\n publisher = {Queensland Conservatorium Griffith University},\n\ - \ title = {Tromba Moderna: A Digitally Augmented Medieval Instrument},\n track\ - \ = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0004.pdf},\n\ - \ year = {2016}\n}\n" + ID: Kiefer2009 + abstract: 'Phalanger is a system which facilitates the control of music software + with hand and finger motion, with the aim of creating a fluid style of interaction + that promotes musicality. The system is purely video based, requires no wearables + or accessories and uses affordable and accessible technology. It employs a neural + network for background segmentation, a combination of imaging techniques for frame + analysis, and a support vector machine (SVM) for recognition of hand positions. + System evaluation showed the SVM to reliably differentiate between eight different + classes. An initial formative user evaluation with ten musicians was carried out + to help build a picture of how users responded to the system; this highlighted + areas that need improvement and lent some insight into useful features for the + next version.' + address: 'Pittsburgh, PA, United States' + author: 'Kiefer, Chris and Collins, Nick and Fitzpatrick, Geraldine' + bibtex: "@inproceedings{Kiefer2009,\n abstract = {Phalanger is a system which facilitates\ + \ the control of music software with hand and finger motion, with the aim of creating\ + \ a fluid style of interaction that promotes musicality. The system is purely\ + \ video based, requires no wearables or accessories and uses affordable and accessible\ + \ technology. It employs a neural network for background segmentation, a combination\ + \ of imaging techniques for frame analysis, and a support vector machine (SVM)\ + \ for recognition of hand positions. System evaluation showed the SVM to reliably\ + \ differentiate between eight different classes. An initial formative user evaluation\ + \ with ten musicians was carried out to help build a picture of how users responded\ + \ to the system; this highlighted areas that need improvement and lent some insight\ + \ into useful features for the next version.},\n address = {Pittsburgh, PA, United\ + \ States},\n author = {Kiefer, Chris and Collins, Nick and Fitzpatrick, Geraldine},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1177603},\n issn = {2220-4806},\n\ + \ keywords = {nime09},\n pages = {246--249},\n title = {Phalanger : Controlling\ + \ Music Software With Hand Movement Using A Computer Vision and Machine Learning\ + \ Approach},\n url = {http://www.nime.org/proceedings/2009/nime2009_246.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3964592 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177603 issn: 2220-4806 - pages: 14--19 - publisher: Queensland Conservatorium Griffith University - title: 'Tromba Moderna: A Digitally Augmented Medieval Instrument' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0004.pdf - year: 2016 + keywords: nime09 + pages: 246--249 + title: 'Phalanger : Controlling Music Software With Hand Movement Using A Computer + Vision and Machine Learning Approach' + url: http://www.nime.org/proceedings/2009/nime2009_246.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Berg2016 - abstract: |- - This demonstration describes Tango, software for - Computer-Human Improvisation developed for more than 25 years by Henning Berg. - Tango listens to an improvising musician, analyses what it hears - and plays musical responses which relate directly to the musical input. - If the improviser in turn reacts to these answers, a musical loop between the - human and the machine can emerge. The way input and reaction correlate and the - predictability of Tango's responses can be defined by the user via a setup - of improvising environments, called Rooms. - Real-time sampling with knowledge of the musical content behind the samples and - Midi-handling are unified via Tango's own monophonic audio-to-Midi, time - stretching and pitch shifting algorithms. Both audio and Midi can be used by - Tango's modules (e.g. Listeners, Players, Modifiers, Metronomes or Harmony) - for input and output. - A flexible real time control system allows for internal and external remote - control and scaling of most parameters. - The free software for Windows7 with all necessary folders, English and German - manuals, many example-Rooms and a few videos can be downloaded at - www.henning-berg.de. - address: 'Brisbane, Australia' - author: Henning Berg - bibtex: "@inproceedings{Berg2016,\n abstract = {This demonstration describes Tango,\ - \ software for\nComputer-Human Improvisation developed for more than 25 years\ - \ by Henning Berg.\nTango listens to an improvising musician, analyses what it\ - \ hears\nand plays musical responses which relate directly to the musical input.\n\ - If the improviser in turn reacts to these answers, a musical loop between the\n\ - human and the machine can emerge. The way input and reaction correlate and the\n\ - predictability of Tango's responses can be defined by the user via a setup\nof\ - \ improvising environments, called Rooms.\nReal-time sampling with knowledge of\ - \ the musical content behind the samples and\nMidi-handling are unified via Tango's\ - \ own monophonic audio-to-Midi, time\nstretching and pitch shifting algorithms.\ - \ Both audio and Midi can be used by\nTango's modules (e.g. Listeners, Players,\ - \ Modifiers, Metronomes or Harmony)\nfor input and output.\nA flexible real time\ - \ control system allows for internal and external remote\ncontrol and scaling\ - \ of most parameters.\nThe free software for Windows7 with all necessary folders,\ - \ English and German\nmanuals, many example-Rooms and a few videos can be downloaded\ - \ at\nwww.henning-berg.de.},\n address = {Brisbane, Australia},\n author = {Henning\ - \ Berg},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1175990},\n isbn = {978-1-925455-13-7},\n\ - \ issn = {2220-4806},\n pages = {7--8},\n publisher = {Queensland Conservatorium\ - \ Griffith University},\n title = {Tango: Software for Computer-Human Improvisation},\n\ - \ track = {Demonstrations},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper00042.pdf},\n\ - \ year = {2016}\n}\n" + ID: Nakra2009 + abstract: 'The UBS Virtual Maestro is an interactive conducting system designed + by Immersion Music to simulate the experience of orchestral conducting for the + general public attending a classical music concert. The system utilizes the Wii + Remote, which users hold and move like a conducting baton to affect the tempo + and dynamics of an orchestral video/audio recording. The accelerometer data from + the Wii Remote is used to control playback speed and volume in real-time. The + system is housed in a UBSbranded kiosk that has toured classical performing arts + venues throughout the United States and Europe in 2007 and 2008. In this paper + we share our experiences in designing this standalone system for thousands of + users, and lessons that we learned from the project. ' + address: 'Pittsburgh, PA, United States' + author: 'Nakra, Teresa M. and Ivanov, Yuri and Smaragdis, Paris and Ault, Chris' + bibtex: "@inproceedings{Nakra2009,\n abstract = {The UBS Virtual Maestro is an interactive\ + \ conducting system designed by Immersion Music to simulate the experience of\ + \ orchestral conducting for the general public attending a classical music concert.\ + \ The system utilizes the Wii Remote, which users hold and move like a conducting\ + \ baton to affect the tempo and dynamics of an orchestral video/audio recording.\ + \ The accelerometer data from the Wii Remote is used to control playback speed\ + \ and volume in real-time. The system is housed in a UBSbranded kiosk that has\ + \ toured classical performing arts venues throughout the United States and Europe\ + \ in 2007 and 2008. In this paper we share our experiences in designing this standalone\ + \ system for thousands of users, and lessons that we learned from the project.\ + \ },\n address = {Pittsburgh, PA, United States},\n author = {Nakra, Teresa M.\ + \ and Ivanov, Yuri and Smaragdis, Paris and Ault, Chris},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177637},\n issn = {2220-4806},\n keywords = {conducting,\ + \ gesture, interactive installations, Wii Remote },\n pages = {250--255},\n title\ + \ = {The UBS Virtual Maestro : an Interactive Conducting System},\n url = {http://www.nime.org/proceedings/2009/nime2009_250.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1175990 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177637 issn: 2220-4806 - pages: 7--8 - publisher: Queensland Conservatorium Griffith University - title: 'Tango: Software for Computer-Human Improvisation' - track: Demonstrations - url: http://www.nime.org/proceedings/2016/nime2016_paper00042.pdf - year: 2016 + keywords: 'conducting, gesture, interactive installations, Wii Remote ' + pages: 250--255 + title: 'The UBS Virtual Maestro : an Interactive Conducting System' + url: http://www.nime.org/proceedings/2009/nime2009_250.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: McPherson2016 - abstract: "The importance of low and consistent latency in interactive music\nsystems\ - \ is well-established. So how do commonly-used tools for creating digital\nmusical\ - \ instruments and other tangible interfaces perform in terms of latency\nfrom\ - \ user action to sound output? This paper examines several common\nconfigurations\ - \ where a microcontroller (e.g. Arduino) or wireless device\ncommunicates with\ - \ computer-based sound generator (e.g. Max/MSP, Pd). We find\nthat, perhaps surprisingly,\ - \ almost none of the tested configurations meet\ngenerally-accepted guidelines\ - \ for latency and jitter. To address this limitation,\nthe paper presents a new\ - \ embedded platform, Bela, which is capable of complex\naudio and sensor processing\ - \ at submillisecond latency." - address: 'Brisbane, Australia' - author: Andrew McPherson and Robert Jack and Giulio Moro - bibtex: "@inproceedings{McPherson2016,\n abstract = {The importance of low and consistent\ - \ latency in interactive music\nsystems is well-established. So how do commonly-used\ - \ tools for creating digital\nmusical instruments and other tangible interfaces\ - \ perform in terms of latency\nfrom user action to sound output? This paper examines\ - \ several common\nconfigurations where a microcontroller (e.g. Arduino) or wireless\ - \ device\ncommunicates with computer-based sound generator (e.g. Max/MSP, Pd).\ - \ We find\nthat, perhaps surprisingly, almost none of the tested configurations\ - \ meet\ngenerally-accepted guidelines for latency and jitter. To address this\ - \ limitation,\nthe paper presents a new embedded platform, Bela, which is capable\ - \ of complex\naudio and sensor processing at submillisecond latency.},\n address\ - \ = {Brisbane, Australia},\n author = {Andrew McPherson and Robert Jack and Giulio\ - \ Moro},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.3964611},\n isbn = {978-1-925455-13-7},\n\ - \ issn = {2220-4806},\n pages = {20--25},\n publisher = {Queensland Conservatorium\ - \ Griffith University},\n title = {Action-Sound Latency: Are Our Tools Fast Enough?},\n\ - \ track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0005.pdf},\n\ - \ year = {2016}\n}\n" + ID: Jessop2009 + abstract: 'This paper describes The Vocal Augmentation and Manipulation Prosthesis + (VAMP) a gesture-based wearable controller for live-time vocal performance. This + controller allows a singer to capture and manipulate single notes that he or she + sings, using a gestural vocabulary developed from that of choral conducting. By + drawing from a familiar gestural vocabulary, this controller and the associated + mappings can be more intuitive and expressive for both performer and audience. ' + address: 'Pittsburgh, PA, United States' + author: 'Jessop, Elena' + bibtex: "@inproceedings{Jessop2009,\n abstract = {This paper describes The Vocal\ + \ Augmentation and Manipulation Prosthesis (VAMP) a gesture-based wearable controller\ + \ for live-time vocal performance. This controller allows a singer to capture\ + \ and manipulate single notes that he or she sings, using a gestural vocabulary\ + \ developed from that of choral conducting. By drawing from a familiar gestural\ + \ vocabulary, this controller and the associated mappings can be more intuitive\ + \ and expressive for both performer and audience. },\n address = {Pittsburgh,\ + \ PA, United States},\n author = {Jessop, Elena},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1177583},\n issn = {2220-4806},\n keywords = {musical expressivity,\ + \ vocal performance, gestural control, conducting. },\n pages = {256--259},\n\ + \ title = {The Vocal Augmentation and Manipulation Prosthesis (VAMP): A Conducting-Based\ + \ Gestural Controller for Vocal Performance},\n url = {http://www.nime.org/proceedings/2009/nime2009_256.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.3964611 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177583 issn: 2220-4806 - pages: 20--25 - publisher: Queensland Conservatorium Griffith University - title: 'Action-Sound Latency: Are Our Tools Fast Enough?' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0005.pdf - year: 2016 + keywords: 'musical expressivity, vocal performance, gestural control, conducting. ' + pages: 256--259 + title: 'The Vocal Augmentation and Manipulation Prosthesis (VAMP): A Conducting-Based + Gestural Controller for Vocal Performance' + url: http://www.nime.org/proceedings/2009/nime2009_256.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Berdahl2016a - abstract: "Virtual ``slack'' strings are designed for and employed by the\nLaptop\ - \ Orchestra of Louisiana. These virtual strings are ``slack'' in the sense\nthat\ - \ they can be very easily displaced, bent, tugged upon, etc. This enables\nforce-feedback\ - \ control of widely ranging pitch glides, by as much as an octave or\nmore, simply\ - \ by bending the virtual string. To realize a slack string design, a\nvirtual\ - \ spring with a specific nonlinear characteristic curve is designed.\nViolin,\ - \ viola, and cello-scale models are tuned and employed by the Laptop\nOrchestra\ - \ of Louisiana in Quartet for Strings." - address: 'Brisbane, Australia' - author: Edgar Berdahl and Andrew Pfalz and Stephen David Beck - bibtex: "@inproceedings{Berdahl2016a,\n abstract = {Virtual ``slack'' strings are\ - \ designed for and employed by the\nLaptop Orchestra of Louisiana. These virtual\ - \ strings are ``slack'' in the sense\nthat they can be very easily displaced,\ - \ bent, tugged upon, etc. This enables\nforce-feedback control of widely ranging\ - \ pitch glides, by as much as an octave or\nmore, simply by bending the virtual\ - \ string. To realize a slack string design, a\nvirtual spring with a specific\ - \ nonlinear characteristic curve is designed.\nViolin, viola, and cello-scale\ - \ models are tuned and employed by the Laptop\nOrchestra of Louisiana in Quartet\ - \ for Strings.},\n address = {Brisbane, Australia},\n author = {Edgar Berdahl\ - \ and Andrew Pfalz and Stephen David Beck},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1175988},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ - \ pages = {9--10},\n publisher = {Queensland Conservatorium Griffith University},\n\ - \ title = {Very Slack Strings: A Physical Model and Its Use in the Composition\ - \ Quartet for Strings},\n track = {Demonstrations},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper00052.pdf},\n\ - \ year = {2016}\n}\n" + ID: Henriques2009 + abstract: 'The Double Slide Controller is a new electronic music instrument that + departs from the slide trombone as a model for its design. Going much beyond a + mere simulation of its acoustic counterpart it introduces truly innovative features: + two powerful and versatile sets of gesture driven interfaces actuated by the hands + of the performer, as well as featuring two independent slides, one for each hand/arm + of the musician. The combination of these features make this instrument a great + tool to explore new venues in musical expression, given the many degrees of technical + and musical complexity that can be achieved during its performance.' + address: 'Pittsburgh, PA, United States' + author: 'Henriques, Tomás' + bibtex: "@inproceedings{Henriques2009,\n abstract = {The Double Slide Controller\ + \ is a new electronic music instrument that departs from the slide trombone as\ + \ a model for its design. Going much beyond a mere simulation of its acoustic\ + \ counterpart it introduces truly innovative features: two powerful and versatile\ + \ sets of gesture driven interfaces actuated by the hands of the performer, as\ + \ well as featuring two independent slides, one for each hand/arm of the musician.\ + \ The combination of these features make this instrument a great tool to explore\ + \ new venues in musical expression, given the many degrees of technical and musical\ + \ complexity that can be achieved during its performance.},\n address = {Pittsburgh,\ + \ PA, United States},\n author = {Henriques, Tom\\'{a}s},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177571},\n issn = {2220-4806},\n keywords = {Musical\ + \ Instrument, Sensor technologies, Computer Music, Hardware and Software Design.},\n\ + \ pages = {260--261},\n title = {Double Slide Controller},\n url = {http://www.nime.org/proceedings/2009/nime2009_260.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1175988 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177571 issn: 2220-4806 - pages: 9--10 - publisher: Queensland Conservatorium Griffith University - title: 'Very Slack Strings: A Physical Model and Its Use in the Composition Quartet - for Strings' - track: Demonstrations - url: http://www.nime.org/proceedings/2016/nime2016_paper00052.pdf - year: 2016 + keywords: 'Musical Instrument, Sensor technologies, Computer Music, Hardware and + Software Design.' + pages: 260--261 + title: Double Slide Controller + url: http://www.nime.org/proceedings/2009/nime2009_260.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Oda2016 - abstract: "At a time in the near future, many computers (including devices\nsuch\ - \ as smart-phones) will have system clocks that are synchronized to a high\ndegree\ - \ (less than 1 ms of error). This will enable us to coordinate events across\n\ - unconnected devices with a degree of accuracy that was previously impossible.\ - \ In\nparticular, high clock synchronization means that we can use these clocks\ - \ to\nsynchronize tempo between humans or sequencers with little-to-no communication\n\ - between the devices. To facilitate this low-overhead tempo synchronization, we\n\ - propose the Global Metronome, which is a simple, computationally cheap method\ - \ to\nobtain absolute tempo synchronization. We present experimental results\n\ - demonstrating the effectiveness of using the Global Metronome and compare the\n\ - performance to MIDI clock sync, a common synchronization method. Finally, we\n\ - present an open source implementation of a Global Metronome server using a\nGPS-connected\ - \ Raspberry Pi that can be built for under $100." - address: 'Brisbane, Australia' - author: Reid Oda and Rebecca Fiebrink - bibtex: "@inproceedings{Oda2016,\n abstract = {At a time in the near future, many\ - \ computers (including devices\nsuch as smart-phones) will have system clocks\ - \ that are synchronized to a high\ndegree (less than 1 ms of error). This will\ - \ enable us to coordinate events across\nunconnected devices with a degree of\ - \ accuracy that was previously impossible. In\nparticular, high clock synchronization\ - \ means that we can use these clocks to\nsynchronize tempo between humans or sequencers\ - \ with little-to-no communication\nbetween the devices. To facilitate this low-overhead\ - \ tempo synchronization, we\npropose the Global Metronome, which is a simple,\ - \ computationally cheap method to\nobtain absolute tempo synchronization. We present\ - \ experimental results\ndemonstrating the effectiveness of using the Global Metronome\ - \ and compare the\nperformance to MIDI clock sync, a common synchronization method.\ - \ Finally, we\npresent an open source implementation of a Global Metronome server\ - \ using a\nGPS-connected Raspberry Pi that can be built for under $100.},\n address\ - \ = {Brisbane, Australia},\n author = {Reid Oda and Rebecca Fiebrink},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176096},\n isbn = {978-1-925455-13-7},\n\ - \ issn = {2220-4806},\n pages = {26--31},\n publisher = {Queensland Conservatorium\ - \ Griffith University},\n title = {The Global Metronome: Absolute Tempo Sync For\ - \ Networked Musical Performance},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0006.pdf},\n\ - \ year = {2016}\n}\n" + ID: Berdahl2009 + abstract: 'When we asked a colleague of ours why people do not make more haptic + musical instruments, he replied that he thought they were “too hard to program + and too expensive.” We decided to solve these perceived problems by introducing + HSP, a simple platform for implementing haptic musical instruments. HSP obviates + the need for employing low-level embedded control software because the haptic + device is controlled directly from within the Pure Data (Pd) software running + on a general purpose computer. Positions can be read from the haptic device, and + forces can be written to the device using messages in Pd. Various additional objects + have been created to facilitate rapid prototyping of useful haptic musical instruments + in Pd. HSP operates under Linux, OS X, and Windows and supports the mass-produced + Falcon haptic device from NovInt, which can currently be obtained for as little + as US$150. All of the above make HSP an especially excellent choice for pedagogical + environments where multiple workstations are required and example programs should + be complete yet simple.' + address: 'Pittsburgh, PA, United States' + author: 'Berdahl, Edgar and Niemeyer, Günter and Smith, Julius O.' + bibtex: "@inproceedings{Berdahl2009,\n abstract = {When we asked a colleague of\ + \ ours why people do not make more haptic musical instruments, he replied that\ + \ he thought they were “too hard to program and too expensive.” We decided to\ + \ solve these perceived problems by introducing HSP, a simple platform for implementing\ + \ haptic musical instruments. HSP obviates the need for employing low-level embedded\ + \ control software because the haptic device is controlled directly from within\ + \ the Pure Data (Pd) software running on a general purpose computer. Positions\ + \ can be read from the haptic device, and forces can be written to the device\ + \ using messages in Pd. Various additional objects have been created to facilitate\ + \ rapid prototyping of useful haptic musical instruments in Pd. HSP operates under\ + \ Linux, OS X, and Windows and supports the mass-produced Falcon haptic device\ + \ from NovInt, which can currently be obtained for as little as US\\$150. All\ + \ of the above make HSP an especially excellent choice for pedagogical environments\ + \ where multiple workstations are required and example programs should be complete\ + \ yet simple.},\n address = {Pittsburgh, PA, United States},\n author = {Berdahl,\ + \ Edgar and Niemeyer, G\\''{u}nter and Smith, Julius O.},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177477},\n issn = {2220-4806},\n keywords = { haptic\ + \ musical instrument, HSP, haptics, computer music, physical modeling, Pure Data\ + \ (Pd), NovInt},\n pages = {262--263},\n title = {HSP : A Simple and Effective\ + \ Open-Source Platform for Implementing Haptic Musical Instruments},\n url = {http://www.nime.org/proceedings/2009/nime2009_262.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176096 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177477 issn: 2220-4806 - pages: 26--31 - publisher: Queensland Conservatorium Griffith University - title: 'The Global Metronome: Absolute Tempo Sync For Networked Musical Performance' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0006.pdf - year: 2016 + keywords: ' haptic musical instrument, HSP, haptics, computer music, physical modeling, + Pure Data (Pd), NovInt' + pages: 262--263 + title: 'HSP : A Simple and Effective Open-Source Platform for Implementing Haptic + Musical Instruments' + url: http://www.nime.org/proceedings/2009/nime2009_262.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Smallwood2016 - abstract: "This paper describes the development, creation, and deployment of\na\ - \ sound installation entitled Coronium 3500 (Lucie's Halo), commissioned by\n\ - the Caramoor Center for Music and the Arts. The piece, a 12-channel immersive\n\ - sound installation driven by solar power, was exhibited as part of the exhibition\n\ - In the Garden of Sonic Delights from June 7 to Nov. 4, 2014, and again for\nsimilar\ - \ duration in 2015. Herein I describe the aesthetic and technical details\nof\ - \ the piece and its ultimate deployment, as well as reflecting on the results\n\ - and the implications for future work." - address: 'Brisbane, Australia' - author: Scott Smallwood - bibtex: "@inproceedings{Smallwood2016,\n abstract = {This paper describes the development,\ - \ creation, and deployment of\na sound installation entitled Coronium 3500 (Lucie's\ - \ Halo), commissioned by\nthe Caramoor Center for Music and the Arts. The piece,\ - \ a 12-channel immersive\nsound installation driven by solar power, was exhibited\ - \ as part of the exhibition\nIn the Garden of Sonic Delights from June 7 to Nov.\ - \ 4, 2014, and again for\nsimilar duration in 2015. Herein I describe the aesthetic\ - \ and technical details\nof the piece and its ultimate deployment, as well as\ - \ reflecting on the results\nand the implications for future work.},\n address\ - \ = {Brisbane, Australia},\n author = {Scott Smallwood},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176127},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ - \ pages = {32--35},\n publisher = {Queensland Conservatorium Griffith University},\n\ - \ title = {Coronium 3500: A Solarsonic Installation for Caramoor},\n track = {Papers},\n\ - \ url = {http://www.nime.org/proceedings/2016/nime2016_paper0007.pdf},\n year\ - \ = {2016}\n}\n" + ID: Barri2009 + abstract: 'This paper introduces the new audiovisual sequencing system "Versum" + that allows users to compose in three dimensions. In the present paper the conceptual + soil from which this system has sprung is discussed first. Secondly, the basic + concepts with which Versum operates are explained, providing a general idea of + what is meant by sequencing in three dimensions and explaining what compositions + made in Versum can look and sound like. Thirdly, the practical ways in which a + composer can use Versum to make his own audiovisual compositions are presented + by means of a more detailed description of the different graphical user interface + elements. Fourthly, a short description is given of the modular structure of the + software underlying Versum. Finally, several foresights regarding the directions + in which Versum will continue to develop in the near future are presented. ' + address: 'Pittsburgh, PA, United States' + author: 'Barri, Tarik' + bibtex: "@inproceedings{Barri2009,\n abstract = {This paper introduces the new audiovisual\ + \ sequencing system \"Versum\" that allows users to compose in three dimensions.\ + \ In the present paper the conceptual soil from which this system has sprung is\ + \ discussed first. Secondly, the basic concepts with which Versum operates are\ + \ explained, providing a general idea of what is meant by sequencing in three\ + \ dimensions and explaining what compositions made in Versum can look and sound\ + \ like. Thirdly, the practical ways in which a composer can use Versum to make\ + \ his own audiovisual compositions are presented by means of a more detailed description\ + \ of the different graphical user interface elements. Fourthly, a short description\ + \ is given of the modular structure of the software underlying Versum. Finally,\ + \ several foresights regarding the directions in which Versum will continue to\ + \ develop in the near future are presented. },\n address = {Pittsburgh, PA, United\ + \ States},\n author = {Barri, Tarik},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177473},\n\ + \ issn = {2220-4806},\n keywords = {audiovisual, sequencing, collaboration. },\n\ + \ pages = {264--265},\n title = {Versum : Audiovisual Composing in 3d},\n url\ + \ = {http://www.nime.org/proceedings/2009/nime2009_264.pdf},\n year = {2009}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176127 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177473 issn: 2220-4806 - pages: 32--35 - publisher: Queensland Conservatorium Griffith University - title: 'Coronium 3500: A Solarsonic Installation for Caramoor' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0007.pdf - year: 2016 + keywords: 'audiovisual, sequencing, collaboration. ' + pages: 264--265 + title: 'Versum : Audiovisual Composing in 3d' + url: http://www.nime.org/proceedings/2009/nime2009_264.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Laurenzo2016 - abstract: |- - In the period between June 2014 and June 2015, at least 5,500 - immigrants died trying to reach Europe from Africa while crossing the - Mediterranean Sea. - In this paper we present 5500, a piano performance that is a part of an on-going - project that investigates the incorporation of electrical muscle stimulation - (EMS) into musical performances, with a particular interest in the political - significance of the negotiation of control that arises. - 5500 consists of a performance of Beethoven's Sonata Pathétique, where the - pianist's execution is disrupted using computer-controlled electrodes - which stimulate the muscles in his or her arms causing their involuntary - contractions and affecting the final musical result. - address: 'Brisbane, Australia' - author: Tomas Laurenzo - bibtex: "@inproceedings{Laurenzo2016,\n abstract = {In the period between June 2014\ - \ and June 2015, at least 5,500\nimmigrants died trying to reach Europe from Africa\ - \ while crossing the\nMediterranean Sea.\nIn this paper we present 5500, a piano\ - \ performance that is a part of an on-going\nproject that investigates the incorporation\ - \ of electrical muscle stimulation\n(EMS) into musical performances, with a particular\ - \ interest in the political\nsignificance of the negotiation of control that arises.\n\ - 5500 consists of a performance of Beethoven's Sonata Path\\'{e}tique, where the\n\ - pianist's execution is disrupted using computer-controlled electrodes\nwhich stimulate\ - \ the muscles in his or her arms causing their involuntary\ncontractions and affecting\ - \ the final musical result.},\n address = {Brisbane, Australia},\n author = {Tomas\ - \ Laurenzo},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176058},\n isbn\ - \ = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {36--40},\n publisher\ - \ = {Queensland Conservatorium Griffith University},\n title = {5500: performance,\ - \ control, and politics},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0008.pdf},\n\ - \ year = {2016}\n}\n" + ID: Bullock2009 + abstract: In this paper we describe findings related to user interfacerequirements + for live electronic music arising from researchconducted as part of the first + three-year phase of the EUfunded Integra project. A number of graphical user interface(GUI) + prototypes developed during the Integra project initial phase are described and + conclusions drawn about theirdesign and implementation. + address: 'Pittsburgh, PA, United States' + author: 'Bullock, Jamie and Coccioli, Lamberto' + bibtex: "@inproceedings{Bullock2009,\n abstract = {In this paper we describe findings\ + \ related to user interfacerequirements for live electronic music arising from\ + \ researchconducted as part of the first three-year phase of the EUfunded Integra\ + \ project. A number of graphical user interface(GUI) prototypes developed during\ + \ the Integra project initial phase are described and conclusions drawn about\ + \ theirdesign and implementation.},\n address = {Pittsburgh, PA, United States},\n\ + \ author = {Bullock, Jamie and Coccioli, Lamberto},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177489},\n issn = {2220-4806},\n keywords = {Integra,\ + \ User Interface, Usability, Design, Live Electronics, Music Technology },\n pages\ + \ = {266--267},\n title = {Towards a Humane Graphical User Interface for Live\ + \ Electronic Music},\n url = {http://www.nime.org/proceedings/2009/nime2009_266.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176058 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177489 issn: 2220-4806 - pages: 36--40 - publisher: Queensland Conservatorium Griffith University - title: '5500: performance, control, and politics' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0008.pdf - year: 2016 + keywords: 'Integra, User Interface, Usability, Design, Live Electronics, Music Technology ' + pages: 266--267 + title: Towards a Humane Graphical User Interface for Live Electronic Music + url: http://www.nime.org/proceedings/2009/nime2009_266.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Johnson2016 - abstract: "This paper provides an overview of a new mechatronic loudspeaker\nsystem:\ - \ speaker.motion. The system affords automated positioning of a loudspeaker\n\ - in real-time in order to manipulate the spatial qualities of electronic music.\n\ - The paper gives a technical overview of how the system's hardware and\nsoftware\ - \ were developed and the design criteria and methodology. There is\ndiscussion\ - \ of the unique features of the speaker.motion spatialisation system and\nthe\ - \ methods of user interaction, as well as a look at the creative possibilities\n\ - that the loudspeakers afford. The creative affordances are explored through the\n\ - case study of two new pieces written for the speaker.motion system. It is hoped\n\ - that the speaker.motion system will afford composers and performers with a new\n\ - range of spatial aesthetics to use in spatial performances, and encourage\nexploration\ - \ of the acoustic properties of physical performance and installation\nspaces\ - \ in electronic music." - address: 'Brisbane, Australia' - author: Bridget Johnson and Michael Norris and Ajay Kapur - bibtex: "@inproceedings{Johnson2016,\n abstract = {This paper provides an overview\ - \ of a new mechatronic loudspeaker\nsystem: speaker.motion. The system affords\ - \ automated positioning of a loudspeaker\nin real-time in order to manipulate\ - \ the spatial qualities of electronic music.\nThe paper gives a technical overview\ - \ of how the system's hardware and\nsoftware were developed and the design criteria\ - \ and methodology. There is\ndiscussion of the unique features of the speaker.motion\ - \ spatialisation system and\nthe methods of user interaction, as well as a look\ - \ at the creative possibilities\nthat the loudspeakers afford. The creative affordances\ - \ are explored through the\ncase study of two new pieces written for the speaker.motion\ - \ system. It is hoped\nthat the speaker.motion system will afford composers and\ - \ performers with a new\nrange of spatial aesthetics to use in spatial performances,\ - \ and encourage\nexploration of the acoustic properties of physical performance\ - \ and installation\nspaces in electronic music.},\n address = {Brisbane, Australia},\n\ - \ author = {Bridget Johnson and Michael Norris and Ajay Kapur},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176046},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ - \ pages = {41--45},\n publisher = {Queensland Conservatorium Griffith University},\n\ - \ title = {speaker.motion: A Mechatronic Loudspeaker System for Live Spatialisation},\n\ - \ track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0009.pdf},\n\ - \ year = {2016}\n}\n" + ID: Laurenzo2009 + abstract: 'In this paper, we present YARMI, a collaborative, networked, tangible, + musical instrument. YARMI operates on augmented-reality space (shared between + the performers and the public), presenting a multiple tabletop interface where + several musical sequencers and real–time effects machines can be operated.' + address: 'Pittsburgh, PA, United States' + author: 'Laurenzo, Tomas and Rodríguez, Ernesto and Castro, Juan Fabrizio' + bibtex: "@inproceedings{Laurenzo2009,\n abstract = {In this paper, we present YARMI,\ + \ a collaborative, networked, tangible, musical instrument. YARMI operates on\ + \ augmented-reality space (shared between the performers and the public), presenting\ + \ a multiple tabletop interface where several musical sequencers and real–time\ + \ effects machines can be operated.},\n address = {Pittsburgh, PA, United States},\n\ + \ author = {Laurenzo, Tomas and Rodr\\'{\\i}guez, Ernesto and Castro, Juan Fabrizio},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1177611},\n issn = {2220-4806},\n\ + \ keywords = {Interactive music instruments, visual interfaces, visual feedback,\ + \ tangible interfaces, augmented reality, collaborative music, networked musical\ + \ instruments, real-time musical systems, musical sequencer. },\n pages = {268--269},\n\ + \ title = {YARMI : an Augmented Reality Musical Instrument},\n url = {http://www.nime.org/proceedings/2009/nime2009_268.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176046 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177611 issn: 2220-4806 - pages: 41--45 - publisher: Queensland Conservatorium Griffith University - title: 'speaker.motion: A Mechatronic Loudspeaker System for Live Spatialisation' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0009.pdf - year: 2016 + keywords: 'Interactive music instruments, visual interfaces, visual feedback, tangible + interfaces, augmented reality, collaborative music, networked musical instruments, + real-time musical systems, musical sequencer. ' + pages: 268--269 + title: 'YARMI : an Augmented Reality Musical Instrument' + url: http://www.nime.org/proceedings/2009/nime2009_268.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Nort2016 - abstract: |- - This paper presents our work towards a database of performance - activity that is grounded in an embodied view on meaning creation that crosses - sense modalities. Our system design is informed by the philosophical and - aesthestic intentions of the laboratory context within which it is designed, - focused on distribution of performance activity across temporal and spatial - dimensions, and expanded notions of the instrumental system as environmental - performative agent. We focus here on design decisions that result from this - overarching worldview on digitally-mediated performance. - address: 'Brisbane, Australia' - author: Doug Van Nort and Ian Jarvis and Michael Palumbo - bibtex: "@inproceedings{Nort2016,\n abstract = {This paper presents our work towards\ - \ a database of performance\nactivity that is grounded in an embodied view on\ - \ meaning creation that crosses\nsense modalities. Our system design is informed\ - \ by the philosophical and\naesthestic intentions of the laboratory context within\ - \ which it is designed,\nfocused on distribution of performance activity across\ - \ temporal and spatial\ndimensions, and expanded notions of the instrumental system\ - \ as environmental\nperformative agent. We focus here on design decisions that\ - \ result from this\noverarching worldview on digitally-mediated performance.},\n\ - \ address = {Brisbane, Australia},\n author = {Doug Van Nort and Ian Jarvis and\ - \ Michael Palumbo},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176092},\n\ - \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {46--50},\n publisher\ - \ = {Queensland Conservatorium Griffith University},\n title = {Towards a Mappable\ - \ Database of Emergent Gestural Meaning},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0010.pdf},\n\ - \ year = {2016}\n}\n" + ID: Essl2009 + abstract: When creating new musical instruments on a mobile phone platform one has + to map sensory input to synthesis algorithms. We propose that the very task of + this mapping belongs in the creative process and to this end we develop a way + to rapidly and on-the-fly edit the mapping of mobile phone instruments. The result + is that the meaning of the instruments can continuously be changed during a live + performance. + address: 'Pittsburgh, PA, United States' + author: 'Essl, Georg' + bibtex: "@inproceedings{Essl2009,\n abstract = {When creating new musical instruments\ + \ on a mobile phone platform one has to map sensory input to synthesis algorithms.\ + \ We propose that the very task of this mapping belongs in the creative process\ + \ and to this end we develop a way to rapidly and on-the-fly edit the mapping\ + \ of mobile phone instruments. The result is that the meaning of the instruments\ + \ can continuously be changed during a live performance.},\n address = {Pittsburgh,\ + \ PA, United States},\n author = {Essl, Georg},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1177503},\n issn = {2220-4806},\n keywords = {mobile phone\ + \ instruments,nime,nime09,on-the-fly},\n pages = {270--273},\n title = {SpeedDial\ + \ : Rapid and On-The-Fly Mapping of Mobile Phone Instruments},\n url = {http://www.nime.org/proceedings/2009/nime2009_270.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176092 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177503 issn: 2220-4806 - pages: 46--50 - publisher: Queensland Conservatorium Griffith University - title: Towards a Mappable Database of Emergent Gestural Meaning - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0010.pdf - year: 2016 + keywords: 'mobile phone instruments,nime,nime09,on-the-fly' + pages: 270--273 + title: 'SpeedDial : Rapid and On-The-Fly Mapping of Mobile Phone Instruments' + url: http://www.nime.org/proceedings/2009/nime2009_270.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Long2016a - abstract: "The majority of musical robotics performances, projects and\ninstallations\ - \ utilise microcontroller hardware to digitally interface the robotic\ninstruments\ - \ with sequencer software and other musical controllers, often via a\npersonal\ - \ computer. While in many ways digital interfacing offers considerable\npower\ - \ and flexibility, digital protocols, equipment and audio workstations often\n\ - tend to suggest particular music-making work-flows and have resolution and timing\n\ - limitations. This paper describes the creation of a hardware interface that\n\ - allows direct communication between analogue synthesizer equipment and simple\n\ - robotic musical instruments entirely in the analogue domain without the use of\n\ - computers, microcontrollers or software of any kind. Several newly created\nmusical\ - \ robots of various designs are presented, together with a custom built\nhardware\ - \ interface with circuitry that enables analogue synthesizers to interface\nwith\ - \ the robots without any digital intermediary. This enables novel methods of\n\ - musical expression, creates new music-making work-flows for composing and\nimprovising\ - \ with musical robots and takes advantage of the low latency and\ninfinite resolution\ - \ of analogue circuits." - address: 'Brisbane, Australia' - author: Jason Long and Ajay Kapur and Dale Carnegie - bibtex: "@inproceedings{Long2016a,\n abstract = {The majority of musical robotics\ - \ performances, projects and\ninstallations utilise microcontroller hardware to\ - \ digitally interface the robotic\ninstruments with sequencer software and other\ - \ musical controllers, often via a\npersonal computer. While in many ways digital\ - \ interfacing offers considerable\npower and flexibility, digital protocols, equipment\ - \ and audio workstations often\ntend to suggest particular music-making work-flows\ - \ and have resolution and timing\nlimitations. This paper describes the creation\ - \ of a hardware interface that\nallows direct communication between analogue synthesizer\ - \ equipment and simple\nrobotic musical instruments entirely in the analogue domain\ - \ without the use of\ncomputers, microcontrollers or software of any kind. Several\ - \ newly created\nmusical robots of various designs are presented, together with\ - \ a custom built\nhardware interface with circuitry that enables analogue synthesizers\ - \ to interface\nwith the robots without any digital intermediary. This enables\ - \ novel methods of\nmusical expression, creates new music-making work-flows for\ - \ composing and\nimprovising with musical robots and takes advantage of the low\ - \ latency and\ninfinite resolution of analogue circuits.},\n address = {Brisbane,\ - \ Australia},\n author = {Jason Long and Ajay Kapur and Dale Carnegie},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176072},\n isbn = {978-1-925455-13-7},\n\ - \ issn = {2220-4806},\n pages = {51--54},\n publisher = {Queensland Conservatorium\ - \ Griffith University},\n title = {An Analogue Interface for Musical Robots},\n\ - \ track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0011.pdf},\n\ - \ year = {2016}\n}\n" + ID: Fels2009 + abstract: 'We have constructed an easy-to-use portable, wearable gesture-to-speech + system based on the Glove-TalkII [1] and GRASSP [2] Digital Ventriloquized Actors + (DIVAs). Our new portable system, called a ForTouch, is a specific model of a + DIVA and refines the use of a formant speech synthesizer. Using ForTouch, a user + can speak using hand gestures mapped to synthetic sound using a mapping function + that preserves gesture trajectories. By making ForTouch portable and self-contained, + speakers can communicate with others in the community and perform in new music/theatre + stage productions. Figure 1 shows one performer using the ForTouch. ForTouch performers + also allow us to study the relation between gestures and speech/song production.' + address: 'Pittsburgh, PA, United States' + author: 'Fels, Sidney S. and Pritchard, Bob and Lenters, Allison' + bibtex: "@inproceedings{Fels2009,\n abstract = {We have constructed an easy-to-use\ + \ portable, wearable gesture-to-speech system based on the Glove-TalkII [1] and\ + \ GRASSP [2] Digital Ventriloquized Actors (DIVAs). Our new portable system, called\ + \ a ForTouch, is a specific model of a DIVA and refines the use of a formant speech\ + \ synthesizer. Using ForTouch, a user can speak using hand gestures mapped to\ + \ synthetic sound using a mapping function that preserves gesture trajectories.\ + \ By making ForTouch portable and self-contained, speakers can communicate with\ + \ others in the community and perform in new music/theatre stage productions.\ + \ Figure 1 shows one performer using the ForTouch. ForTouch performers also allow\ + \ us to study the relation between gestures and speech/song production.},\n address\ + \ = {Pittsburgh, PA, United States},\n author = {Fels, Sidney S. and Pritchard,\ + \ Bob and Lenters, Allison},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177509},\n\ + \ issn = {2220-4806},\n keywords = {nime09},\n pages = {274--275},\n title = {ForTouch\ + \ : A Wearable Digital Ventriloquized Actor},\n url = {http://www.nime.org/proceedings/2009/nime2009_274.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176072 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177509 issn: 2220-4806 - pages: 51--54 - publisher: Queensland Conservatorium Griffith University - title: An Analogue Interface for Musical Robots - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0011.pdf - year: 2016 + keywords: nime09 + pages: 274--275 + title: 'ForTouch : A Wearable Digital Ventriloquized Actor' + url: http://www.nime.org/proceedings/2009/nime2009_274.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Barrett2016 - abstract: "Despite increasingly accessible and user-friendly multi-channel\ncompositional\ - \ tools, many composers still choose stereo formats for their work,\nwhere the\ - \ compositional process is allied to diffusion performance over a\n`classical'\ - \ loudspeaker orchestra. Although such orchestras remain\ncommon within UK institutions\ - \ as well as in France, they are in decline in the\nrest of the world. In contrast,\ - \ permanent, high-density loudspeaker arrays are on\nthe rise, as is the practical\ - \ application of 3-D audio technologies. Looking to\nthe future, we need to reconcile\ - \ the performance of historical and new stereo\nworks, side-by-side native 3-D\ - \ compositions. In anticipation of this growing\nneed, we have designed and tested\ - \ a prototype `Virtualmonium'. The\nVirtualmonium is an instrument for classical\ - \ diffusion performance over an\nacousmonium emulated in higher-order Ambisonics.\ - \ It allows composers to\ncustom-design loudspeaker orchestra emulations for the\ - \ performance of their\nworks, rehearse and refine performances off-site, and\ - \ perform classical\nrepertoire alongside native 3-D formats in the same concert.\ - \ This paper describes\nthe technical design of the Virtualmonium, assesses the\ - \ success of the prototype\nin some preliminary listening tests and concerts,\ - \ and speculates how the\ninstrument can further composition and performance practice." - address: 'Brisbane, Australia' - author: Natasha Barrett and Alexander Refsum Jensenius - bibtex: "@inproceedings{Barrett2016,\n abstract = {Despite increasingly accessible\ - \ and user-friendly multi-channel\ncompositional tools, many composers still choose\ - \ stereo formats for their work,\nwhere the compositional process is allied to\ - \ diffusion performance over a\n`classical' loudspeaker orchestra. Although such\ - \ orchestras remain\ncommon within UK institutions as well as in France, they\ - \ are in decline in the\nrest of the world. In contrast, permanent, high-density\ - \ loudspeaker arrays are on\nthe rise, as is the practical application of 3-D\ - \ audio technologies. Looking to\nthe future, we need to reconcile the performance\ - \ of historical and new stereo\nworks, side-by-side native 3-D compositions. In\ - \ anticipation of this growing\nneed, we have designed and tested a prototype\ - \ `Virtualmonium'. The\nVirtualmonium is an instrument for classical diffusion\ - \ performance over an\nacousmonium emulated in higher-order Ambisonics. It allows\ - \ composers to\ncustom-design loudspeaker orchestra emulations for the performance\ - \ of their\nworks, rehearse and refine performances off-site, and perform classical\n\ - repertoire alongside native 3-D formats in the same concert. This paper describes\n\ - the technical design of the Virtualmonium, assesses the success of the prototype\n\ - in some preliminary listening tests and concerts, and speculates how the\ninstrument\ - \ can further composition and performance practice.},\n address = {Brisbane, Australia},\n\ - \ author = {Natasha Barrett and Alexander Refsum Jensenius},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1175974},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ - \ pages = {55--60},\n publisher = {Queensland Conservatorium Griffith University},\n\ - \ title = {The `Virtualmonium': an instrument for classical sound diffusion over\ - \ a virtual loudspeaker orchestra},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0012.pdf},\n\ - \ year = {2016}\n}\n" + ID: Mclean2009 + abstract: 'Phonetic symbols describe movements of the vocal tract,tongue and lips, + and are combined into complex movementsforming the words of language. In music, + vocables are wordsthat describe musical sounds, by relating vocal movementsto + articulations of a musical instrument. We posit that vocable words allow the composers + and listeners to engageclosely with dimensions of timbre, and that vocables couldsee + greater use in electronic music interfaces. A preliminarysystem for controlling + percussive physical modelling synthesis with textual words is introduced, with + particular application in expressive specification of timbre during computer music + performances.' + address: 'Pittsburgh, PA, United States' + author: 'Mclean, Alex and Wiggins, Geraint' + bibtex: "@inproceedings{Mclean2009,\n abstract = {Phonetic symbols describe movements\ + \ of the vocal tract,tongue and lips, and are combined into complex movementsforming\ + \ the words of language. In music, vocables are wordsthat describe musical sounds,\ + \ by relating vocal movementsto articulations of a musical instrument. We posit\ + \ that vocable words allow the composers and listeners to engageclosely with dimensions\ + \ of timbre, and that vocables couldsee greater use in electronic music interfaces.\ + \ A preliminarysystem for controlling percussive physical modelling synthesis\ + \ with textual words is introduced, with particular application in expressive\ + \ specification of timbre during computer music performances.},\n address = {Pittsburgh,\ + \ PA, United States},\n author = {Mclean, Alex and Wiggins, Geraint},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1177629},\n issn = {2220-4806},\n keywords\ + \ = {nime09,timbre,vocable synthesis},\n pages = {276--279},\n title = {Words\ + \ , Movement and Timbre},\n url = {http://www.nime.org/proceedings/2009/nime2009_276.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1175974 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177629 issn: 2220-4806 - pages: 55--60 - publisher: Queensland Conservatorium Griffith University - title: 'The `Virtualmonium'': an instrument for classical sound diffusion over a - virtual loudspeaker orchestra' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0012.pdf - year: 2016 + keywords: 'nime09,timbre,vocable synthesis' + pages: 276--279 + title: 'Words , Movement and Timbre' + url: http://www.nime.org/proceedings/2009/nime2009_276.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Arango2016 - abstract: "This paper reports the goals, procedures and recent activities of\nthe\ - \ Smartphone Ensemble, an academic group of musicians and designers exploring\n\ - mobile phones social mediation in musical contexts. The SE was created in the\n\ - Design and Creation program at the Caldas University in Manizales, Colombia and\n\ - includes six regular members. The group intends to enhance links among musicians,\n\ - and between the musicians and their audience, by leveraging the network\ncapabilities\ - \ and mobility of smart phones, and exploring the expressivity of\nurban space.\ - \ Through the creation of pieces and interventions that are related to\nurban\ - \ experiences, the Smartphone Ensemble envisions alternatives to the standard\n\ - musical performance space. In this regard, the performances intend to be urban\n\ - interventions, not traditional concerts, they progress according to previously\n\ - defined tours around the city that the group embarks while playing" - address: 'Brisbane, Australia' - author: Julian Jaramillo Arango and Daniel Melàn Giraldo - bibtex: "@inproceedings{Arango2016,\n abstract = {This paper reports the goals,\ - \ procedures and recent activities of\nthe Smartphone Ensemble, an academic group\ - \ of musicians and designers exploring\nmobile phones social mediation in musical\ - \ contexts. The SE was created in the\nDesign and Creation program at the Caldas\ - \ University in Manizales, Colombia and\nincludes six regular members. The group\ - \ intends to enhance links among musicians,\nand between the musicians and their\ - \ audience, by leveraging the network\ncapabilities and mobility of smart phones,\ - \ and exploring the expressivity of\nurban space. Through the creation of pieces\ - \ and interventions that are related to\nurban experiences, the Smartphone Ensemble\ - \ envisions alternatives to the standard\nmusical performance space. In this regard,\ - \ the performances intend to be urban\ninterventions, not traditional concerts,\ - \ they progress according to previously\ndefined tours around the city that the\ - \ group embarks while playing},\n address = {Brisbane, Australia},\n author =\ - \ {Julian Jaramillo Arango and Daniel Mel\\`{a}n Giraldo},\n booktitle = {Proceedings\ + ID: Fiebrink2009 + abstract: 'Supervised learning methods have long been used to allow musical interface + designers to generate new mappings by example. We propose a method for harnessing + machine learning algorithms within a radically interactive paradigm, in which + the designer may repeatedly generate examples, train a learner, evaluate outcomes, + and modify parameters in real-time within a single software environment. We describe + our meta-instrument, the Wekinator, which allows a user to engage in on-the-fly + learning using arbitrary control modalities and sound synthesis environments. + We provide details regarding the system implementation and discuss our experiences + using the Wekinator for experimentation and performance. ' + address: 'Pittsburgh, PA, United States' + author: 'Fiebrink, Rebecca and Trueman, Dan and Cook, Perry R.' + bibtex: "@inproceedings{Fiebrink2009,\n abstract = {Supervised learning methods\ + \ have long been used to allow musical interface designers to generate new mappings\ + \ by example. We propose a method for harnessing machine learning algorithms within\ + \ a radically interactive paradigm, in which the designer may repeatedly generate\ + \ examples, train a learner, evaluate outcomes, and modify parameters in real-time\ + \ within a single software environment. We describe our meta-instrument, the Wekinator,\ + \ which allows a user to engage in on-the-fly learning using arbitrary control\ + \ modalities and sound synthesis environments. We provide details regarding the\ + \ system implementation and discuss our experiences using the Wekinator for experimentation\ + \ and performance. },\n address = {Pittsburgh, PA, United States},\n author =\ + \ {Fiebrink, Rebecca and Trueman, Dan and Cook, Perry R.},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1175850},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ - \ pages = {61--64},\n publisher = {Queensland Conservatorium Griffith University},\n\ - \ title = {The Smartphone Ensemble. Exploring mobile computer mediation in collaborative\ - \ musical performance},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0013.pdf},\n\ - \ year = {2016}\n}\n" + \ doi = {10.5281/zenodo.1177513},\n issn = {2220-4806},\n keywords = {Machine\ + \ learning, mapping, tools. },\n pages = {280--285},\n title = {A Meta-Instrument\ + \ for Interactive, On-the-Fly Machine Learning},\n url = {http://www.nime.org/proceedings/2009/nime2009_280.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1175850 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177513 issn: 2220-4806 - pages: 61--64 - publisher: Queensland Conservatorium Griffith University - title: The Smartphone Ensemble. Exploring mobile computer mediation in collaborative - musical performance - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0013.pdf - year: 2016 + keywords: 'Machine learning, mapping, tools. ' + pages: 280--285 + title: 'A Meta-Instrument for Interactive, On-the-Fly Machine Learning' + url: http://www.nime.org/proceedings/2009/nime2009_280.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Hofmann2016 - abstract: "Electronic pickup systems for acoustic instruments are often used\nin\ - \ popular and contemporary music performances because they allow amplification\n\ - and modification of a clean and direct signal. Strain gauge sensors on saxophone\n\ - and clarinet reeds have been shown to be a useful tool to gain insight into\n\ - tongue articulation during performance but also capture the reed vibrations. In\n\ - our previous design, we used a procedure with epoxy adhesive to glue the strain\n\ - gauge sensors to the flat side of the synthetic single reeds. The new design\n\ - integrates the sensor inside a synthetic reed, respectively between layers of\n\ - fibre polymer and wood. This allows an industrial production of sensor reeds.\n\ - Sensor reeds open up new possibilities to pick up woodwind instruments and to\n\ - analyse, to modify, and to amplify the signal. A signal-to-noise analysis of the\n\ - signals from both designs showed that a sensor, glued to the outside of the reed,\n\ - produced a cleaner signal." - address: 'Brisbane, Australia' - author: Alex Hofmann and Vasileios Chatziioannou and Alexander Mayer and Harry Hartmann - bibtex: "@inproceedings{Hofmann2016,\n abstract = {Electronic pickup systems for\ - \ acoustic instruments are often used\nin popular and contemporary music performances\ - \ because they allow amplification\nand modification of a clean and direct signal.\ - \ Strain gauge sensors on saxophone\nand clarinet reeds have been shown to be\ - \ a useful tool to gain insight into\ntongue articulation during performance but\ - \ also capture the reed vibrations. In\nour previous design, we used a procedure\ - \ with epoxy adhesive to glue the strain\ngauge sensors to the flat side of the\ - \ synthetic single reeds. The new design\nintegrates the sensor inside a synthetic\ - \ reed, respectively between layers of\nfibre polymer and wood. This allows an\ - \ industrial production of sensor reeds.\nSensor reeds open up new possibilities\ - \ to pick up woodwind instruments and to\nanalyse, to modify, and to amplify the\ - \ signal. A signal-to-noise analysis of the\nsignals from both designs showed\ - \ that a sensor, glued to the outside of the reed,\nproduced a cleaner signal.},\n\ - \ address = {Brisbane, Australia},\n author = {Alex Hofmann and Vasileios Chatziioannou\ - \ and Alexander Mayer and Harry Hartmann},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176028},\n\ - \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {65--68},\n publisher\ - \ = {Queensland Conservatorium Griffith University},\n title = {Development of\ - \ Fibre Polymer Sensor {Reed}s for Saxophone and Clarinet},\n track = {Papers},\n\ - \ url = {http://www.nime.org/proceedings/2016/nime2016_paper0014.pdf},\n year\ - \ = {2016}\n}\n" + ID: Schacher2009 + abstract: 'In this paper mappings and adaptation in the context of interactive sound + installations are discussed. Starting from an ecological perspective on non-expert + audience interaction a brief overview and discussion of mapping strategies with + a special focus on adaptive systems using machine learning algorithms is given. + An audio-visual interactive installation is analyzed and its implementation used + to illustrate the issues of audience engagement and to discuss the efficiency + of adaptive mappings. ' + address: 'Pittsburgh, PA, United States' + author: 'Schacher, Jan C.' + bibtex: "@inproceedings{Schacher2009,\n abstract = {In this paper mappings and adaptation\ + \ in the context of interactive sound installations are discussed. Starting from\ + \ an ecological perspective on non-expert audience interaction a brief overview\ + \ and discussion of mapping strategies with a special focus on adaptive systems\ + \ using machine learning algorithms is given. An audio-visual interactive installation\ + \ is analyzed and its implementation used to illustrate the issues of audience\ + \ engagement and to discuss the efficiency of adaptive mappings. },\n address\ + \ = {Pittsburgh, PA, United States},\n author = {Schacher, Jan C.},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1177667},\n issn = {2220-4806},\n keywords\ + \ = {Interaction, adaptive mapping, machine learning, audience engagement },\n\ + \ pages = {286--289},\n title = {Action and Perception in Interactive Sound Installations\ + \ : An Ecological Approach},\n url = {http://www.nime.org/proceedings/2009/nime2009_286.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176028 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177667 issn: 2220-4806 - pages: 65--68 - publisher: Queensland Conservatorium Griffith University - title: Development of Fibre Polymer Sensor Reeds for Saxophone and Clarinet - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0014.pdf - year: 2016 + keywords: 'Interaction, adaptive mapping, machine learning, audience engagement ' + pages: 286--289 + title: 'Action and Perception in Interactive Sound Installations : An Ecological + Approach' + url: http://www.nime.org/proceedings/2009/nime2009_286.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Kapur2016 - abstract: 'This paper presents two new musical robot systems and an accompanying - driver electronics array. These systems are designed to allow for modular extensibility - and ease of use with different instrument systems. The first system discussed - is MalletOTon, a mechatronic mallet instrument player that may be re-configured - to play a number of different instruments. Secondly, the Modulet mechatronic noisemakers - are presented. These instruments are discrete modules that may be installed throughout - a space in a wide variety of configurations. In addition to presenting the aforementioned - new instruments, the Novalis system is shown. Novalis is an open-ended driver - system for mechatronic instruments, designed to afford rapid deployment and modularity. - Where prior mechatronic instruments are often purpose-built, the robots and robot - electronics presented in this paper may be re-deployed in a wide-ranging, diverse - manner. Taken as a whole, the design practices discussed in this paper go toward - establishing a paradigm of modular and extensible mechatronic instrument development.' - address: 'Brisbane, Australia' - author: Ajay Kapur and Jim Murphy and Michael Darling and Eric Heep and Bruce Lott - and Ness Morris - bibtex: "@inproceedings{Kapur2016,\n abstract = {This paper presents two new musical\ - \ robot systems and an accompanying driver electronics array. These systems are\ - \ designed to allow for modular extensibility and ease of use with different instrument\ - \ systems. The first system discussed is MalletOTon, a mechatronic mallet instrument\ - \ player that may be re-configured to play a number of different instruments.\ - \ Secondly, the Modulet mechatronic noisemakers are presented. These instruments\ - \ are discrete modules that may be installed throughout a space in a wide variety\ - \ of configurations. In addition to presenting the aforementioned new instruments,\ - \ the Novalis system is shown. Novalis is an open-ended driver system for mechatronic\ - \ instruments, designed to afford rapid deployment and modularity. Where prior\ - \ mechatronic instruments are often purpose-built, the robots and robot electronics\ - \ presented in this paper may be re-deployed in a wide-ranging, diverse manner.\ - \ Taken as a whole, the design practices discussed in this paper go toward establishing\ - \ a paradigm of modular and extensible mechatronic instrument development.},\n\ - \ address = {Brisbane, Australia},\n author = {Ajay Kapur and Jim Murphy and Michael\ - \ Darling and Eric Heep and Bruce Lott and Ness Morris},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176050},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ - \ pages = {69--72},\n publisher = {Queensland Conservatorium Griffith University},\n\ - \ title = {MalletOTon and the Modulets: Modular and Extensible Musical Robots},\n\ - \ track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0015.pdf},\n\ - \ year = {2016}\n}\n" + ID: Kirk2009 + abstract: 'In this paper we describe and analyze The Argus Project, a sound installation + involving the real-time processing and spatialized projection of sound sources + from beneath a pond’s surface. The primary aim of The Argus Project is to project + the natural sound sources from below the pond’s surface while tracking the changes + in the environmental factors above the surface so as to map this data onto the + real-time audio processing. The project takes as its conceptual model that of + a feedback network, or, a process in which the factors that produce a result are + themselves modified and reinforced by that result. Examples are given of the compositional + process, the execution, and processing techniques.' + address: 'Pittsburgh, PA, United States' + author: 'Kirk, Jonathon and Weisert, Lee' + bibtex: "@inproceedings{Kirk2009,\n abstract = {In this paper we describe and analyze\ + \ The Argus Project, a sound installation involving the real-time processing and\ + \ spatialized projection of sound sources from beneath a pond’s surface. The primary\ + \ aim of The Argus Project is to project the natural sound sources from below\ + \ the pond’s surface while tracking the changes in the environmental factors above\ + \ the surface so as to map this data onto the real-time audio processing. The\ + \ project takes as its conceptual model that of a feedback network, or, a process\ + \ in which the factors that produce a result are themselves modified and reinforced\ + \ by that result. Examples are given of the compositional process, the execution,\ + \ and processing techniques.},\n address = {Pittsburgh, PA, United States},\n\ + \ author = {Kirk, Jonathon and Weisert, Lee},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1177605},\n issn = {2220-4806},\n keywords = {nime09},\n pages\ + \ = {290--292},\n title = {The Argus Project : Underwater Soundscape Composition\ + \ with Laser- Controlled Modulation},\n url = {http://www.nime.org/proceedings/2009/nime2009_290.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176050 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177605 issn: 2220-4806 - pages: 69--72 - publisher: Queensland Conservatorium Griffith University - title: 'MalletOTon and the Modulets: Modular and Extensible Musical Robots' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0015.pdf - year: 2016 + keywords: nime09 + pages: 290--292 + title: 'The Argus Project : Underwater Soundscape Composition with Laser- Controlled + Modulation' + url: http://www.nime.org/proceedings/2009/nime2009_290.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Olson2016 - abstract: "Video games and music have influenced each other since the\nbeginning\ - \ of the consumer video game era. In particular the chiptune genre of\nmusic uses\ - \ sounds from 8-bit video games; these sounds have even found their way\ninto\ - \ contemporary popular music. However, in this genre, game sounds are arranged\n\ - using conventional musical interfaces, meaning the games themselves (their\nalgorithms,\ - \ design and interactivity) play no role in the creation of the music.\nThis paper\ - \ describes a new way of creating music with 8-bit games, by reverse\nengineering\ - \ and augmenting them with run-time scripts. A new API, Emstrument, is\npresented\ - \ which allows these scripts to send MIDI to music production software.\nThe end\ - \ result is game-derived musical interfaces any computer musician can use\nwith\ - \ their existing workflow. This enhances prior work in repurposing games as\n\ - musical interfaces by allowing musicians to use the original games instead of\n\ - having to build new versions with added musical capabilities.\nSeveral examples\ - \ of both new musical instruments and dynamic interactive musical\ncompositions\ - \ using Emstrument are presented, using iconic games from the 8-bit\nera." - address: 'Brisbane, Australia' - author: Ben Olson - bibtex: "@inproceedings{Olson2016,\n abstract = {Video games and music have influenced\ - \ each other since the\nbeginning of the consumer video game era. In particular\ - \ the chiptune genre of\nmusic uses sounds from 8-bit video games; these sounds\ - \ have even found their way\ninto contemporary popular music. However, in this\ - \ genre, game sounds are arranged\nusing conventional musical interfaces, meaning\ - \ the games themselves (their\nalgorithms, design and interactivity) play no role\ - \ in the creation of the music.\nThis paper describes a new way of creating music\ - \ with 8-bit games, by reverse\nengineering and augmenting them with run-time\ - \ scripts. A new API, Emstrument, is\npresented which allows these scripts to\ - \ send MIDI to music production software.\nThe end result is game-derived musical\ - \ interfaces any computer musician can use\nwith their existing workflow. This\ - \ enhances prior work in repurposing games as\nmusical interfaces by allowing\ - \ musicians to use the original games instead of\nhaving to build new versions\ - \ with added musical capabilities.\nSeveral examples of both new musical instruments\ - \ and dynamic interactive musical\ncompositions using Emstrument are presented,\ - \ using iconic games from the 8-bit\nera.},\n address = {Brisbane, Australia},\n\ - \ author = {Ben Olson},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176100},\n\ - \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {73--77},\n publisher\ - \ = {Queensland Conservatorium Griffith University},\n title = {Transforming 8-Bit\ - \ Video Games into Musical Interfaces via Reverse Engineering and Augmentation},\n\ - \ track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0016.pdf},\n\ - \ year = {2016}\n}\n" + ID: StClair2009 + abstract: 'We describe a novel transformation of a playground - merry-go-round, + teeter-totter (also referred to as a see-saw), swings, and climbing structure + – from its traditional purpose to a collaborative and interactive musical performance + system by equipping key structures with sensors that communicate with a computer. + A set of Max/ MSP patches translate the physical gestures of playground play into + a variety of performer-selected musical mappings. In addition to the electro-acoustic + interactivity, the climbing structure incorporates acoustic musical instruments.' + address: 'Pittsburgh, PA, United States' + author: 'St. Clair, Michael and Leitman, Sasha' + bibtex: "@inproceedings{StClair2009,\n abstract = {We describe a novel transformation\ + \ of a playground - merry-go-round, teeter-totter (also referred to as a see-saw),\ + \ swings, and climbing structure – from its traditional purpose to a collaborative\ + \ and interactive musical performance system by equipping key structures with\ + \ sensors that communicate with a computer. A set of Max/ MSP patches translate\ + \ the physical gestures of playground play into a variety of performer-selected\ + \ musical mappings. In addition to the electro-acoustic interactivity, the climbing\ + \ structure incorporates acoustic musical instruments.},\n address = {Pittsburgh,\ + \ PA, United States},\n author = {St. Clair, Michael and Leitman, Sasha},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1177685},\n issn = {2220-4806},\n keywords\ + \ = {Real-time, Music, Playground, Interactive, Installation, Radical Collaboration,\ + \ Play.},\n pages = {293--296},\n title = {PlaySoundGround : An Interactive Musical\ + \ Playground},\n url = {http://www.nime.org/proceedings/2009/nime2009_293.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176100 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177685 issn: 2220-4806 - pages: 73--77 - publisher: Queensland Conservatorium Griffith University - title: Transforming 8-Bit Video Games into Musical Interfaces via Reverse Engineering - and Augmentation - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0016.pdf - year: 2016 + keywords: 'Real-time, Music, Playground, Interactive, Installation, Radical Collaboration, + Play.' + pages: 293--296 + title: 'PlaySoundGround : An Interactive Musical Playground' + url: http://www.nime.org/proceedings/2009/nime2009_293.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Cherston2016 - abstract: |- - We present a sonification platform for generating audio driven by - real-time particle collision data from the ATLAS experiment at CERN. This paper - provides a description of the data-to-audio mapping interfaces supported by the - project's composition tool as well as a preliminary evaluation of the platform's - evolution to meet the aesthetic needs of vastly distinct musical styles and - presentation venues. Our work has been conducted in collaboration with the ATLAS - Outreach team and is part of a broad vision to better harness real-time sensor - data as a canvas for artistic expression. Data-driven streaming audio can be - treated as a reimagined form of live radio for which composers craft the - instruments but real-time particle collisions pluck the strings. - address: 'Brisbane, Australia' - author: Juliana Cherston and Ewan Hill and Steven Goldfarb and Joseph Paradiso - bibtex: "@inproceedings{Cherston2016,\n abstract = {We present a sonification platform\ - \ for generating audio driven by\nreal-time particle collision data from the ATLAS\ - \ experiment at CERN. This paper\nprovides a description of the data-to-audio\ - \ mapping interfaces supported by the\nproject's composition tool as well as a\ - \ preliminary evaluation of the platform's\nevolution to meet the aesthetic needs\ - \ of vastly distinct musical styles and\npresentation venues. Our work has been\ - \ conducted in collaboration with the ATLAS\nOutreach team and is part of a broad\ - \ vision to better harness real-time sensor\ndata as a canvas for artistic expression.\ - \ Data-driven streaming audio can be\ntreated as a reimagined form of live radio\ - \ for which composers craft the\ninstruments but real-time particle collisions\ - \ pluck the strings.},\n address = {Brisbane, Australia},\n author = {Juliana\ - \ Cherston and Ewan Hill and Steven Goldfarb and Joseph Paradiso},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176012},\n isbn = {978-1-925455-13-7},\n\ - \ issn = {2220-4806},\n pages = {78--83},\n publisher = {Queensland Conservatorium\ - \ Griffith University},\n title = {Musician and Mega-Machine: Compositions Driven\ - \ by Real-Time Particle Collision Data from the ATLAS Detector},\n track = {Papers},\n\ - \ url = {http://www.nime.org/proceedings/2016/nime2016_paper0017.pdf},\n year\ - \ = {2016}\n}\n" + ID: Jones2009 + abstract: 'The Fragmented Orchestra is a distributed musical instrument which combines + live audio streams from geographically disparate sites, and granulates each according + to thespike timings of an artificial spiking neural network. Thispaper introduces + the work, outlining its historical context,technical architecture, neuronal model + and network infrastructure, making specific reference to modes of interactionwith + the public.' + address: 'Pittsburgh, PA, United States' + author: 'Jones, Daniel and Hodgson, Tim and Grant, Jane and Matthias, John and Outram, + Nicholas and Ryan, Nick' + bibtex: "@inproceedings{Jones2009,\n abstract = {The Fragmented Orchestra is a distributed\ + \ musical instrument which combines live audio streams from geographically disparate\ + \ sites, and granulates each according to thespike timings of an artificial spiking\ + \ neural network. Thispaper introduces the work, outlining its historical context,technical\ + \ architecture, neuronal model and network infrastructure, making specific reference\ + \ to modes of interactionwith the public.},\n address = {Pittsburgh, PA, United\ + \ States},\n author = {Jones, Daniel and Hodgson, Tim and Grant, Jane and Matthias,\ + \ John and Outram, Nicholas and Ryan, Nick},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1177587},\n issn = {2220-4806},\n keywords = {distributed,emergent,environmental,installation,neural\ + \ network,nime09,sound,streaming audio},\n pages = {297--302},\n title = {The\ + \ Fragmented Orchestra},\n url = {http://www.nime.org/proceedings/2009/nime2009_297.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176012 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177587 issn: 2220-4806 - pages: 78--83 - publisher: Queensland Conservatorium Griffith University - title: 'Musician and Mega-Machine: Compositions Driven by Real-Time Particle Collision - Data from the ATLAS Detector' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0017.pdf - year: 2016 + keywords: 'distributed,emergent,environmental,installation,neural network,nime09,sound,streaming + audio' + pages: 297--302 + title: The Fragmented Orchestra + url: http://www.nime.org/proceedings/2009/nime2009_297.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Lind2016 - abstract: "Digitalization has enabled material decoupling of sound from the\nphysical\ - \ devices traditionally used to conceive it. This paper reports an\nartistic exploration\ - \ of novel mappings between everyday objects and digital\nsound. The Wheel Quintet---a\ - \ novel musical instrument comprising four\nbicycle wheels and a skateboard---was\ - \ created using off-the-shelf\ncomponents and visual programming in Max/MSP. The\ - \ use of everyday objects sought\nto enable people to quickly master the instrument,\ - \ regardless of their musical\nbackgrounds, and collectively create polytempic\ - \ musical textures in a\nparticipatory art context. Applying an action research\ - \ approach, the paper\nexamines in detail two key cycles of planning, action,\ - \ and analysis related to\nthe instrument, involving an interactive museum exhibition\ - \ open to the public and\na concert hall performance conducted by an animated\ - \ music notation system.\nDrawing on insights from the study, the paper contributes\ - \ new knowledge\nconcerning the creation and use of novel interfaces for music\ - \ composition and\nperformance enabled by digitalization." - address: 'Brisbane, Australia' - author: Anders Lind and Daniel Nylén - bibtex: "@inproceedings{Lind2016,\n abstract = {Digitalization has enabled material\ - \ decoupling of sound from the\nphysical devices traditionally used to conceive\ - \ it. This paper reports an\nartistic exploration of novel mappings between everyday\ - \ objects and digital\nsound. The Wheel Quintet---a novel musical instrument comprising\ - \ four\nbicycle wheels and a skateboard---was created using off-the-shelf\ncomponents\ - \ and visual programming in Max/MSP. The use of everyday objects sought\nto enable\ - \ people to quickly master the instrument, regardless of their musical\nbackgrounds,\ - \ and collectively create polytempic musical textures in a\nparticipatory art\ - \ context. Applying an action research approach, the paper\nexamines in detail\ - \ two key cycles of planning, action, and analysis related to\nthe instrument,\ - \ involving an interactive museum exhibition open to the public and\na concert\ - \ hall performance conducted by an animated music notation system.\nDrawing on\ - \ insights from the study, the paper contributes new knowledge\nconcerning the\ - \ creation and use of novel interfaces for music composition and\nperformance\ - \ enabled by digitalization.},\n address = {Brisbane, Australia},\n author = {Anders\ - \ Lind and Daniel Nyl\\'{e}n},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176064},\n\ - \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {84--89},\n publisher\ - \ = {Queensland Conservatorium Griffith University},\n title = {Mapping Everyday\ - \ Objects to Digital Materiality in The Wheel Quintet: Polytempic Music and Participatory\ - \ Art},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0018.pdf},\n\ - \ year = {2016}\n}\n" + ID: Wang2009 + abstract: 'The Smule Ocarina is a wind instrument designed for the iPhone, fully + leveraging its wide array of technologies: microphone input (for breath input), + multitouch (for fingering), accelerometer, real-time sound synthesis, highperformance + graphics, GPS/location, and persistent data connection. In this mobile musical + artifact, the interactions of the ancient flute-like instrument are both preserved + and transformed via breath-control and multitouch finger-holes, while the onboard + global positioning and persistent data connection provide the opportunity to create + a new social experience, allowing the users of Ocarina to listen to one another. + In this way, Ocarina is also a type of social instrument that enables a different, + perhaps even magical, sense of global connectivity. ' + address: 'Pittsburgh, PA, United States' + author: 'Wang, Ge' + bibtex: "@inproceedings{Wang2009,\n abstract = {The Smule Ocarina is a wind instrument\ + \ designed for the iPhone, fully leveraging its wide array of technologies: microphone\ + \ input (for breath input), multitouch (for fingering), accelerometer, real-time\ + \ sound synthesis, highperformance graphics, GPS/location, and persistent data\ + \ connection. In this mobile musical artifact, the interactions of the ancient\ + \ flute-like instrument are both preserved and transformed via breath-control\ + \ and multitouch finger-holes, while the onboard global positioning and persistent\ + \ data connection provide the opportunity to create a new social experience, allowing\ + \ the users of Ocarina to listen to one another. In this way, Ocarina is also\ + \ a type of social instrument that enables a different, perhaps even magical,\ + \ sense of global connectivity. },\n address = {Pittsburgh, PA, United States},\n\ + \ author = {Wang, Ge},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177697},\n\ + \ issn = {2220-4806},\n keywords = {chuck,design,in,in real-time,interface,iphone,mobile\ + \ music,multitouch,nime09,ocarina,pulsing waves,social,sonically and onscreen\ + \ and,sound synthesis takes place,the breath is visualized},\n pages = {303--307},\n\ + \ title = {Designing Smule's Ocarina : The iPhone's Magic Flute},\n url = {http://www.nime.org/proceedings/2009/nime2009_303.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176064 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177697 issn: 2220-4806 - pages: 84--89 - publisher: Queensland Conservatorium Griffith University - title: 'Mapping Everyday Objects to Digital Materiality in The Wheel Quintet: Polytempic - Music and Participatory Art' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0018.pdf - year: 2016 + keywords: 'chuck,design,in,in real-time,interface,iphone,mobile music,multitouch,nime09,ocarina,pulsing + waves,social,sonically and onscreen and,sound synthesis takes place,the breath + is visualized' + pages: 303--307 + title: 'Designing Smule''s Ocarina : The iPhone''s Magic Flute' + url: http://www.nime.org/proceedings/2009/nime2009_303.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Balandra2016 - abstract: 'An entertainment environment to enrich music listening experience is - purposed. This environment is composed of 3 modules: a MIDI player, a music animation - and a haptic module that translates the notes played by one instrument into a - resemblant vibration. To create the haptic vibration, the notes'' relative pitch - in the song are calculated, then these positions are mapped into the haptic signals'' - amplitude and frequency. Also, the envelope of the haptic signal is modified, - by using an ADSR filter, to have the same envelope as the audio signal. To evaluate - the perceived cross-modal similarity between users, two experiments were performed. - In both, the users used the complete entertainment environment to rank the similarity - between 3 different haptic signals, with triangular, square and analogue envelopes - and 4 different instruments in a classical song. The first experiment was performed - with the purposed amplitude and frequency technique, while the second experiment - was performed with constant frequency and amplitude. Results, show different envelope - user preferences. The square and triangular envelopes were preferred in the first - experiment, while only analogue envelopes were preferred in the second. This suggests - that the users'' envelope perception was masked by the changes in amplitude and - frequency.' - address: 'Brisbane, Australia' - author: Alfonso Balandra and Hironori Mitake and Shoichi Hasegawa - bibtex: "@inproceedings{Balandra2016,\n abstract = {An entertainment environment\ - \ to enrich music listening experience is purposed. This environment is composed\ - \ of 3 modules: a MIDI player, a music animation and a haptic module that translates\ - \ the notes played by one instrument into a resemblant vibration. To create the\ - \ haptic vibration, the notes' relative pitch in the song are calculated, then\ - \ these positions are mapped into the haptic signals' amplitude and frequency.\ - \ Also, the envelope of the haptic signal is modified, by using an ADSR filter,\ - \ to have the same envelope as the audio signal. To evaluate the perceived cross-modal\ - \ similarity between users, two experiments were performed. In both, the users\ - \ used the complete entertainment environment to rank the similarity between 3\ - \ different haptic signals, with triangular, square and analogue envelopes and\ - \ 4 different instruments in a classical song. The first experiment was performed\ - \ with the purposed amplitude and frequency technique, while the second experiment\ - \ was performed with constant frequency and amplitude. Results, show different\ - \ envelope user preferences. The square and triangular envelopes were preferred\ - \ in the first experiment, while only analogue envelopes were preferred in the\ - \ second. This suggests that the users' envelope perception was masked by the\ - \ changes in amplitude and frequency.},\n address = {Brisbane, Australia},\n author\ - \ = {Alfonso Balandra and Hironori Mitake and Shoichi Hasegawa},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1175968},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ - \ pages = {90--95},\n publisher = {Queensland Conservatorium Griffith University},\n\ - \ title = {Haptic Music Player---Synthetic audio-tactile stimuli generation based\ - \ on the notes' pitch and instruments' envelope mapping},\n track = {Papers},\n\ - \ url = {http://www.nime.org/proceedings/2016/nime2016_paper0019.pdf},\n year\ - \ = {2016}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1175968 - isbn: 978-1-925455-13-7 - issn: 2220-4806 - pages: 90--95 - publisher: Queensland Conservatorium Griffith University - title: Haptic Music Player---Synthetic audio-tactile stimuli generation based on - the notes' pitch and instruments' envelope mapping - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0019.pdf - year: 2016 - - -- ENTRYTYPE: inproceedings - ID: Huberth2016 - abstract: 'Notation systems are used in almost all fields, especially for the communication - and expression of ideas. This paper proposes and discusses a notation system for - Gametrak-based computer music instruments. The notation system''s design is informed - both by Western music notation and dance notation, as well as common mappings - used in laptop orchestras. It is designed to be sound-agnostic, primarily instructing - the performer in their motions. While the discussion of such a notation system - may be particularly timely due to the growing commercially-available 3D motion - tracking controllers, the notation system may prove especially useful in the context - of Gametrak and laptop orchestra, for which score-based representation can help - clarify performer interaction and serve as a teaching tool in documenting prior - work.' - address: 'Brisbane, Australia' - author: Madeline Huberth and Chryssie Nanou - bibtex: "@inproceedings{Huberth2016,\n abstract = {Notation systems are used in\ - \ almost all fields, especially for the communication and expression of ideas.\ - \ This paper proposes and discusses a notation system for Gametrak-based computer\ - \ music instruments. The notation system's design is informed both by Western\ - \ music notation and dance notation, as well as common mappings used in laptop\ - \ orchestras. It is designed to be sound-agnostic, primarily instructing the performer\ - \ in their motions. While the discussion of such a notation system may be particularly\ - \ timely due to the growing commercially-available 3D motion tracking controllers,\ - \ the notation system may prove especially useful in the context of Gametrak and\ - \ laptop orchestra, for which score-based representation can help clarify performer\ - \ interaction and serve as a teaching tool in documenting prior work.},\n address\ - \ = {Brisbane, Australia},\n author = {Madeline Huberth and Chryssie Nanou},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176034},\n isbn = {978-1-925455-13-7},\n\ - \ issn = {2220-4806},\n pages = {96--105},\n publisher = {Queensland Conservatorium\ - \ Griffith University},\n title = {Notation for {3D} Motion Tracking Controllers:\ - \ A Gametrak Case Study},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0020.pdf},\n\ - \ year = {2016}\n}\n" + ID: Gillian2009a + abstract: 'This paper presents Scratch-Off, a new musical multiplayer DJ game that + has been designed for a mobile phone. We describe how the game is used as a test + platform for experimenting with various types of multimodal feedback. The game + uses movement gestures made by the players to scratch a record and control crossfades + between tracks, with the objective of the game to make the correct scratch at + the correct time in relation to the music. Gestures are detected using the devices + built-in tri-axis accelerometer and multi-touch screen display. The players receive + visual, audio and various types of vibrotactile feedback to help them make the + correct scratch on the beat of the music track. We also discuss the results of + a pilot study using this interface. ' + address: 'Pittsburgh, PA, United States' + author: 'Gillian, Nicholas and O''Modhrain, Sile and Essl, Georg' + bibtex: "@inproceedings{Gillian2009a,\n abstract = {This paper presents {Scratch-Off},\ + \ a new musical multiplayer DJ game that has been designed for a mobile phone.\ + \ We describe how the game is used as a test platform for experimenting with various\ + \ types of multimodal feedback. The game uses movement gestures made by the players\ + \ to scratch a record and control crossfades between tracks, with the objective\ + \ of the game to make the correct scratch at the correct time in relation to the\ + \ music. Gestures are detected using the devices built-in tri-axis accelerometer\ + \ and multi-touch screen display. The players receive visual, audio and various\ + \ types of vibrotactile feedback to help them make the correct scratch on the\ + \ beat of the music track. We also discuss the results of a pilot study using\ + \ this interface. },\n address = {Pittsburgh, PA, United States},\n author = {Gillian,\ + \ Nicholas and O'Modhrain, Sile and Essl, Georg},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1177553},\n issn = {2220-4806},\n keywords = {Mobile devices,\ + \ gesture, audio games. },\n pages = {308--311},\n title = {Scratch-Off : A Gesture\ + \ Based Mobile Music Game with Tactile Feedback},\n url = {http://www.nime.org/proceedings/2009/nime2009_308.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176034 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177553 issn: 2220-4806 - pages: 96--105 - publisher: Queensland Conservatorium Griffith University - title: 'Notation for 3D Motion Tracking Controllers: A Gametrak Case Study' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0020.pdf - year: 2016 + keywords: 'Mobile devices, gesture, audio games. ' + pages: 308--311 + title: 'Scratch-Off : A Gesture Based Mobile Music Game with Tactile Feedback' + url: http://www.nime.org/proceedings/2009/nime2009_308.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Cakmak2016 - abstract: "In this paper, we describe a novel multimedia system for networked\n\ - musical collaboration. Our system, called Monad, offers a 3D virtual environment\n\ - that can be shared by multiple participants to collaborate remotely on a musical\n\ - performance. With Monad, we explore how various features of this environment in\n\ - relation to game mechanics, network architecture, and audiovisual aesthetics can\n\ - be used to mitigate problems inherent to networked musical performance, such as\n\ - time delays, data loss, and reduced agency of users. Finally, we describe the\n\ - results of a series of qualitative user studies that illustrate the effectiveness\n\ - of some of our design decisions with two separate versions of Monad." - address: 'Brisbane, Australia' - author: Cem Cakmak and Anil Camci and Angus Forbes - bibtex: "@inproceedings{Cakmak2016,\n abstract = {In this paper, we describe a novel\ - \ multimedia system for networked\nmusical collaboration. Our system, called Monad,\ - \ offers a 3D virtual environment\nthat can be shared by multiple participants\ - \ to collaborate remotely on a musical\nperformance. With Monad, we explore how\ - \ various features of this environment in\nrelation to game mechanics, network\ - \ architecture, and audiovisual aesthetics can\nbe used to mitigate problems inherent\ - \ to networked musical performance, such as\ntime delays, data loss, and reduced\ - \ agency of users. Finally, we describe the\nresults of a series of qualitative\ - \ user studies that illustrate the effectiveness\nof some of our design decisions\ - \ with two separate versions of Monad.},\n address = {Brisbane, Australia},\n\ - \ author = {Cem Cakmak and Anil Camci and Angus Forbes},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176002},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ - \ pages = {106--111},\n publisher = {Queensland Conservatorium Griffith University},\n\ - \ title = {Networked Virtual Environments as Collaborative Music Spaces},\n track\ - \ = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0022.pdf},\n\ - \ year = {2016}\n}\n" + ID: Weinberg2009 + abstract: 'ZooZBeat is a gesture-based mobile music studio. It is designed to provide + users with expressive and creative access to music making on the go. ZooZBeat + users shake the phone or tap the screen to enter notes. The result is quantized, + mapped onto a musical scale, and looped. Users can then use tilt and shake movements + to manipulate and share their creation in a group. Emphasis is placed on finding + intuitive metaphors for mobile music creation and maintaining a balance between + control and ease-of-use that allows non-musicians to begin creating music with + the application immediately. ' + address: 'Pittsburgh, PA, United States' + author: 'Weinberg, Gil and Beck, Andrew and Godfrey, Mark' + bibtex: "@inproceedings{Weinberg2009,\n abstract = {ZooZBeat is a gesture-based\ + \ mobile music studio. It is designed to provide users with expressive and creative\ + \ access to music making on the go. ZooZBeat users shake the phone or tap the\ + \ screen to enter notes. The result is quantized, mapped onto a musical scale,\ + \ and looped. Users can then use tilt and shake movements to manipulate and share\ + \ their creation in a group. Emphasis is placed on finding intuitive metaphors\ + \ for mobile music creation and maintaining a balance between control and ease-of-use\ + \ that allows non-musicians to begin creating music with the application immediately.\ + \ },\n address = {Pittsburgh, PA, United States},\n author = {Weinberg, Gil and\ + \ Beck, Andrew and Godfrey, Mark},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177703},\n\ + \ issn = {2220-4806},\n keywords = {mobile music, gestural control },\n pages\ + \ = {312--315},\n title = {ZooZBeat : a Gesture-based Mobile Music Studio},\n\ + \ url = {http://www.nime.org/proceedings/2009/nime2009_312.pdf},\n year = {2009}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176002 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177703 issn: 2220-4806 - pages: 106--111 - publisher: Queensland Conservatorium Griffith University - title: Networked Virtual Environments as Collaborative Music Spaces - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0022.pdf - year: 2016 + keywords: 'mobile music, gestural control ' + pages: 312--315 + title: 'ZooZBeat : a Gesture-based Mobile Music Studio' + url: http://www.nime.org/proceedings/2009/nime2009_312.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Becking2016 - abstract: "Most instruments traditionally used to teach music in early\neducation,\ - \ like xylophones or flutes, encumber children with the additional\ndifficulty\ - \ of an unfamiliar and unnatural interface. The most simple expressive\ninteraction,\ - \ that even the smallest children use in order to make music, is\npounding at\ - \ surfaces. Through the design of an instrument with a simple\ninterface, like\ - \ a drum, but which produces a melodic sound, children can be\nprovided with an\ - \ easy and intuitive means to produce consonance. This should then\nbe further\ - \ complemented with information from analysis and interpretation of\nchildlike\ - \ gestures and dance moves, reflecting their natural understanding\nof musical\ - \ structure and motion. Based on these assumptions we propose a modular\nand reactive\ - \ system for dynamic composition with accessible interfaces, divided\ninto distinct\ - \ plugins usable in a standard digital audio workstation. This paper\ndescribes\ - \ our concept and how it can facilitate access to collaborative music\nmaking\ - \ for small children. A first prototypical implementation has been\ndesigned and\ - \ developed during the ongoing research project\nDrum-Dance-Music-Machine (DDMM),\ - \ a cooperation with the local social welfare\nassociation AWO Hagen and the chair\ - \ of musical education at the University of\nApplied Sciences Bielefeld." - address: 'Brisbane, Australia' - author: Dominic Becking and Christine Steinmeier and Philipp Kroos - bibtex: "@inproceedings{Becking2016,\n abstract = {Most instruments traditionally\ - \ used to teach music in early\neducation, like xylophones or flutes, encumber\ - \ children with the additional\ndifficulty of an unfamiliar and unnatural interface.\ - \ The most simple expressive\ninteraction, that even the smallest children use\ - \ in order to make music, is\npounding at surfaces. Through the design of an instrument\ - \ with a simple\ninterface, like a drum, but which produces a melodic sound, children\ - \ can be\nprovided with an easy and intuitive means to produce consonance. This\ - \ should then\nbe further complemented with information from analysis and interpretation\ - \ of\nchildlike gestures and dance moves, reflecting their natural understanding\n\ - of musical structure and motion. Based on these assumptions we propose a modular\n\ - and reactive system for dynamic composition with accessible interfaces, divided\n\ - into distinct plugins usable in a standard digital audio workstation. This paper\n\ - describes our concept and how it can facilitate access to collaborative music\n\ - making for small children. A first prototypical implementation has been\ndesigned\ - \ and developed during the ongoing research project\nDrum-Dance-Music-Machine\ - \ (DDMM), a cooperation with the local social welfare\nassociation AWO Hagen and\ - \ the chair of musical education at the University of\nApplied Sciences Bielefeld.},\n\ - \ address = {Brisbane, Australia},\n author = {Dominic Becking and Christine Steinmeier\ - \ and Philipp Kroos},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1175980},\n\ - \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {112--117},\n publisher\ - \ = {Queensland Conservatorium Griffith University},\n title = {Drum-Dance-Music-Machine:\ - \ Construction of a Technical Toolset for Low-Threshold Access to Collaborative\ - \ Musical Performance},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0023.pdf},\n\ - \ year = {2016}\n}\n" + ID: Bianchi2009 + abstract: 'It has been shown that collaborative musical interfaces encourage novice + users to explore the sound space and promote their participation as music performers. + Nevertheless, such interfaces are generally physically situated and can limit + the possibility of movements on the stage, a critical factor in live music performance. + In this paper we introduce the Drummer, a networked digital musical interface + that allows multiple performers to design and play drum kits simultaneously while, + at the same time, keeping their ability to freely move on the stage. The system + consists of multiple Nintendo DS clients with an intuitive, user-configurable + interface and a server computer which plays drum sounds. The Drummer Machine, + a small piece of hardware to augment the performance of the Drummer, is also introduced. ' + address: 'Pittsburgh, PA, United States' + author: 'Bianchi, Andrea and Yeo, Woon Seung' + bibtex: "@inproceedings{Bianchi2009,\n abstract = {It has been shown that collaborative\ + \ musical interfaces encourage novice users to explore the sound space and promote\ + \ their participation as music performers. Nevertheless, such interfaces are generally\ + \ physically situated and can limit the possibility of movements on the stage,\ + \ a critical factor in live music performance. In this paper we introduce the\ + \ Drummer, a networked digital musical interface that allows multiple performers\ + \ to design and play drum kits simultaneously while, at the same time, keeping\ + \ their ability to freely move on the stage. The system consists of multiple Nintendo\ + \ DS clients with an intuitive, user-configurable interface and a server computer\ + \ which plays drum sounds. The Drummer Machine, a small piece of hardware to augment\ + \ the performance of the Drummer, is also introduced. },\n address = {Pittsburgh,\ + \ PA, United States},\n author = {Bianchi, Andrea and Yeo, Woon Seung},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1177483},\n issn = {2220-4806},\n keywords\ + \ = {collaborative interface, multiplayer, musical expression, musical control,\ + \ game control, Nintendo DS.},\n pages = {316--319},\n title = {The Drummer :\ + \ a Collaborative Musical Interface with Mobility},\n url = {http://www.nime.org/proceedings/2009/nime2009_316.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1175980 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177483 issn: 2220-4806 - pages: 112--117 - publisher: Queensland Conservatorium Griffith University - title: 'Drum-Dance-Music-Machine: Construction of a Technical Toolset for Low-Threshold - Access to Collaborative Musical Performance' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0023.pdf - year: 2016 + keywords: 'collaborative interface, multiplayer, musical expression, musical control, + game control, Nintendo DS.' + pages: 316--319 + title: 'The Drummer : a Collaborative Musical Interface with Mobility' + url: http://www.nime.org/proceedings/2009/nime2009_316.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Leitman2016 - abstract: "Music Maker is a free online resource that provides files for 3D\nprinting\ - \ woodwind and brass mouthpieces and tutorials for using those mouthpieces\nto\ - \ learn about acoustics and music. The mouthpieces are designed to fit into\n\ - standard plumbing and automobile parts that can be easily purchased at home\n\ - improvement and automotive stores. The result is a musical tool that can be used\n\ - as simply as a set of building blocks to bridge the gap between our increasingly\n\ - digital world of fabrication and the real-world materials that make up our daily\n\ - lives.\nAn increasing number of schools, libraries and community groups are purchasing\ - \ 3D\nprinters but many are still struggling to create engaging and relevant curriculum\n\ - that ties into academic subjects. Making new musical instruments is a fantastic\n\ - way to learn about acoustics, physics and mathematics." - address: 'Brisbane, Australia' - author: Sasha Leitman and John Granzow - bibtex: "@inproceedings{Leitman2016,\n abstract = {Music Maker is a free online\ - \ resource that provides files for 3D\nprinting woodwind and brass mouthpieces\ - \ and tutorials for using those mouthpieces\nto learn about acoustics and music.\ - \ The mouthpieces are designed to fit into\nstandard plumbing and automobile parts\ - \ that can be easily purchased at home\nimprovement and automotive stores. The\ - \ result is a musical tool that can be used\nas simply as a set of building blocks\ - \ to bridge the gap between our increasingly\ndigital world of fabrication and\ - \ the real-world materials that make up our daily\nlives.\nAn increasing number\ - \ of schools, libraries and community groups are purchasing 3D\nprinters but many\ - \ are still struggling to create engaging and relevant curriculum\nthat ties into\ - \ academic subjects. Making new musical instruments is a fantastic\nway to learn\ - \ about acoustics, physics and mathematics.},\n address = {Brisbane, Australia},\n\ - \ author = {Sasha Leitman and John Granzow},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1176062},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ - \ pages = {118--121},\n publisher = {Queensland Conservatorium Griffith University},\n\ - \ title = {Music Maker: 3d Printing and Acoustics Curriculum},\n track = {Papers},\n\ - \ url = {http://www.nime.org/proceedings/2016/nime2016_paper0024.pdf},\n year\ - \ = {2016}\n}\n" + ID: Wechsler2009 + address: 'Pittsburgh, PA, United States' + author: 'Wechsler, Robert' + bibtex: "@inproceedings{Wechsler2009,\n address = {Pittsburgh, PA, United States},\n\ + \ author = {Wechsler, Robert},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177701},\n\ + \ issn = {2220-4806},\n keywords = {nime09},\n pages = {320--320},\n title = {The\ + \ Oklo Phenomenon},\n url = {http://www.nime.org/proceedings/2009/nime2009_320.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176062 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177701 issn: 2220-4806 - pages: 118--121 - publisher: Queensland Conservatorium Griffith University - title: 'Music Maker: 3d Printing and Acoustics Curriculum' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0024.pdf - year: 2016 + keywords: nime09 + pages: 320--320 + title: The Oklo Phenomenon + url: http://www.nime.org/proceedings/2009/nime2009_320.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Sello2016 - abstract: |- - This paper introduces the Hexenkessel---an augmented musical - instrument for interactive multimedia arts. The Hexenkessel is a classical - timpani with its drumhead acting as a tangible user interface for expressive - multimedia performances on stage. - address: 'Brisbane, Australia' - author: Jacob T. Sello - bibtex: "@inproceedings{Sello2016,\n abstract = {This paper introduces the Hexenkessel---an\ - \ augmented musical\ninstrument for interactive multimedia arts. The Hexenkessel\ - \ is a classical\ntimpani with its drumhead acting as a tangible user interface\ - \ for expressive\nmultimedia performances on stage.},\n address = {Brisbane, Australia},\n\ - \ author = {Jacob T. Sello},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176118},\n\ - \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {122--131},\n publisher\ - \ = {Queensland Conservatorium Griffith University},\n title = {The Hexenkessel:\ - \ A Hybrid Musical Instrument for Multimedia Performances},\n track = {Papers},\n\ - \ url = {http://www.nime.org/proceedings/2016/nime2016_paper0025.pdf},\n year\ - \ = {2016}\n}\n" + ID: Lieberman2009 + address: 'Pittsburgh, PA, United States' + author: 'Lieberman, David' + bibtex: "@inproceedings{Lieberman2009,\n address = {Pittsburgh, PA, United States},\n\ + \ author = {Lieberman, David},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177619},\n\ + \ issn = {2220-4806},\n keywords = {nime09},\n pages = {321--321},\n title = {Anigraphical\ + \ Etude 9},\n url = {http://www.nime.org/proceedings/2009/nime2009_321.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176118 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177619 issn: 2220-4806 - pages: 122--131 - publisher: Queensland Conservatorium Griffith University - title: 'The Hexenkessel: A Hybrid Musical Instrument for Multimedia Performances' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0025.pdf - year: 2016 + keywords: nime09 + pages: 321--321 + title: Anigraphical Etude 9 + url: http://www.nime.org/proceedings/2009/nime2009_321.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Lnicode228hdeoja2016 - abstract: "This paper presents an ongoing project for augmenting acoustic\ninstruments\ - \ with active acoustics. Active acoustics are defined as audio-rate\nvibration\ - \ driven into the instruments physical structure, inducing air-borne\nsound output.\ - \ The instrument's acoustic sound is thus doubled by an\nelectronic soundscape\ - \ radiating from the same source. The article is centered on\na case study on\ - \ two guitars, one with hexaphonic sound capture and the other with\nmonophonic\ - \ pickup. The article discusses the design, implementation, acoustics,\nsound\ - \ capture and processing of an active acoustic instrument, as well as\ngestural\ - \ control using the Leap Motion sensor. Extensions towards other\ninstruments\ - \ are presented, in connection with related artistic projects and\n`electronic\ - \ chamber music' aesthetics." - address: 'Brisbane, Australia' - author: Otso Lähdeoja - bibtex: "@inproceedings{Lnicode228hdeoja2016,\n abstract = {This paper presents\ - \ an ongoing project for augmenting acoustic\ninstruments with active acoustics.\ - \ Active acoustics are defined as audio-rate\nvibration driven into the instruments\ - \ physical structure, inducing air-borne\nsound output. The instrument's acoustic\ - \ sound is thus doubled by an\nelectronic soundscape radiating from the same source.\ - \ The article is centered on\na case study on two guitars, one with hexaphonic\ - \ sound capture and the other with\nmonophonic pickup. The article discusses the\ - \ design, implementation, acoustics,\nsound capture and processing of an active\ - \ acoustic instrument, as well as\ngestural control using the Leap Motion sensor.\ - \ Extensions towards other\ninstruments are presented, in connection with related\ - \ artistic projects and\n`electronic chamber music' aesthetics.},\n address =\ - \ {Brisbane, Australia},\n author = {Otso L\\''{a}hdeoja},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176054},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ - \ pages = {132--136},\n publisher = {Queensland Conservatorium Griffith University},\n\ - \ title = {Active Acoustic Instruments for Electronic Chamber Music},\n track\ - \ = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0027.pdf},\n\ - \ year = {2016}\n}\n" + ID: Hong2009 + address: 'Pittsburgh, PA, United States' + author: 'Hong, Min Eui' + bibtex: "@inproceedings{Hong2009,\n address = {Pittsburgh, PA, United States},\n\ + \ author = {Hong, Min Eui},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177577},\n\ + \ issn = {2220-4806},\n keywords = {nime09},\n pages = {322--322},\n title = {Cosmic\ + \ Strings II},\n url = {http://www.nime.org/proceedings/2009/nime2009_322.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176054 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177577 issn: 2220-4806 - pages: 132--136 - publisher: Queensland Conservatorium Griffith University - title: Active Acoustic Instruments for Electronic Chamber Music - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0027.pdf - year: 2016 + keywords: nime09 + pages: 322--322 + title: Cosmic Strings II + url: http://www.nime.org/proceedings/2009/nime2009_322.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Lynch2016 - abstract: "We present a composition framework that facilitates novel musical\nmappings\ - \ for large-scale distributed networks of environmental sensors. A library\nof\ - \ C-externals called ChainFlow for the graphical programming language Max/MSP\n\ - that provides an interface to real-time and historical data for large sensor\n\ - deployments was designed and implemented. This library along with spatialized\n\ - audio techniques were used to create immersive musical compositions which can\ - \ be\npresented on their own or complemented by a graphical 3D virtual world.\ - \ Musical\nworks driven by a sensor network deployed in a wetland restoration\ - \ project called\nTidmarsh are presented as case studies in augmented presence\ - \ through musical\nmapping." - address: 'Brisbane, Australia' - author: Evan Lynch and Joseph Paradiso - bibtex: "@inproceedings{Lynch2016,\n abstract = {We present a composition framework\ - \ that facilitates novel musical\nmappings for large-scale distributed networks\ - \ of environmental sensors. A library\nof C-externals called ChainFlow for the\ - \ graphical programming language Max/MSP\nthat provides an interface to real-time\ - \ and historical data for large sensor\ndeployments was designed and implemented.\ - \ This library along with spatialized\naudio techniques were used to create immersive\ - \ musical compositions which can be\npresented on their own or complemented by\ - \ a graphical 3D virtual world. Musical\nworks driven by a sensor network deployed\ - \ in a wetland restoration project called\nTidmarsh are presented as case studies\ - \ in augmented presence through musical\nmapping.},\n address = {Brisbane, Australia},\n\ - \ author = {Evan Lynch and Joseph Paradiso},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1176074},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ - \ pages = {137--142},\n publisher = {Queensland Conservatorium Griffith University},\n\ - \ title = {SensorChimes: Musical Mapping for Sensor Networks},\n track = {Papers},\n\ - \ url = {http://www.nime.org/proceedings/2016/nime2016_paper0028.pdf},\n year\ - \ = {2016}\n}\n" + ID: Rogers2009 + address: 'Pittsburgh, PA, United States' + author: 'Rogers, Troy and Kemper, Steven and Barton, Scott' + bibtex: "@inproceedings{Rogers2009,\n address = {Pittsburgh, PA, United States},\n\ + \ author = {Rogers, Troy and Kemper, Steven and Barton, Scott},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177665},\n issn = {2220-4806},\n keywords = {nime09},\n\ + \ pages = {323--323},\n title = {Study no. 1 for {PAM} and MADI},\n url = {http://www.nime.org/proceedings/2009/nime2009_323.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176074 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177665 issn: 2220-4806 - pages: 137--142 - publisher: Queensland Conservatorium Griffith University - title: 'SensorChimes: Musical Mapping for Sensor Networks' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0028.pdf - year: 2016 + keywords: nime09 + pages: 323--323 + title: 'Study no. 1 for {PAM} and MADI' + url: http://www.nime.org/proceedings/2009/nime2009_323.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Nakanishi2016 - abstract: "NAKANISYNTH is a synthesiser application available on iOS devices\nthat\ - \ provides a simple and intuitive interface, allowing users to produce sound\n\ - loops by freehand drawing sound waves and envelope curves. The interface provides\n\ - a simple way of interacting: the only input required involves drawing two\nwaveforms,\ - \ meaning that users can easily produce various sounds intuitively\nwithout the\ - \ need for complex manipulation. The application's interface comprises\nof an\ - \ interchangeable ribbon and keyboard feature, plus two panels where users\ncan\ - \ edit waveforms, allowing users to make sounds. This simple approach to the\n\ - interface means that it is easy for users to understand the relationship between\n\ - a waveform and the sound that it produces. " - address: 'Brisbane, Australia' - author: Kyosuke Nakanishi and Paul Haimes and Tetsuaki Baba and Kumiko Kushiyama - bibtex: "@inproceedings{Nakanishi2016,\n abstract = {NAKANISYNTH is a synthesiser\ - \ application available on iOS devices\nthat provides a simple and intuitive interface,\ - \ allowing users to produce sound\nloops by freehand drawing sound waves and envelope\ - \ curves. The interface provides\na simple way of interacting: the only input\ - \ required involves drawing two\nwaveforms, meaning that users can easily produce\ - \ various sounds intuitively\nwithout the need for complex manipulation. The application's\ - \ interface comprises\nof an interchangeable ribbon and keyboard feature, plus\ - \ two panels where users\ncan edit waveforms, allowing users to make sounds. This\ - \ simple approach to the\ninterface means that it is easy for users to understand\ - \ the relationship between\na waveform and the sound that it produces. },\n address\ - \ = {Brisbane, Australia},\n author = {Kyosuke Nakanishi and Paul Haimes and Tetsuaki\ - \ Baba and Kumiko Kushiyama},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176086},\n\ - \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {143--145},\n publisher\ - \ = {Queensland Conservatorium Griffith University},\n title = {NAKANISYNTH: An\ - \ Intuitive Freehand Drawing Waveform Synthesiser Application for iOS Devices},\n\ - \ track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0029.pdf},\n\ - \ year = {2016}\n}\n" + ID: Paine2009 + address: 'Pittsburgh, PA, United States' + author: 'Paine, Garth and Atherton, Michael' + bibtex: "@inproceedings{Paine2009,\n address = {Pittsburgh, PA, United States},\n\ + \ author = {Paine, Garth and Atherton, Michael},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1177651},\n issn = {2220-4806},\n keywords = {nime09},\n pages\ + \ = {324--324},\n title = {Fue Sho -- Electrofusion},\n url = {http://www.nime.org/proceedings/2009/nime2009_324.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176086 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177651 issn: 2220-4806 - pages: 143--145 - publisher: Queensland Conservatorium Griffith University - title: 'NAKANISYNTH: An Intuitive Freehand Drawing Waveform Synthesiser Application - for iOS Devices' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0029.pdf - year: 2016 + keywords: nime09 + pages: 324--324 + title: Fue Sho -- Electrofusion + url: http://www.nime.org/proceedings/2009/nime2009_324.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Vindriis2016 - abstract: "StrumBot is a novel standalone six stringed robotic guitar\nconsisting\ - \ of mechanisms designed to enable musical expressivity and minimise\nacoustic\ - \ noise. It is desirable for less than 60 dBA of noise at 1 m to be\nemitted to\ - \ allow StrumBot to play in intimate venues such as cafés or\nrestaurants without\ - \ loud motor noises detracting from the musical experience.\nStrumBot improves\ - \ upon previous RMI's by allowing additional expressive\nopportunities for a composer\ - \ to utilise. StrumBot can perform slides, vibrato,\nmuting techniques, pitch\ - \ bends, pluck power variances, timbre control, complex\nchords and fast strumming\ - \ patterns.\nA MIDI input allows commercial or custom controllers to operate StrumBot.\ - \ Novel\nnote allocation algorithms were created to allow a single MIDI stream\ - \ of notes to\nbe allocated across the six guitar strings.\nLatency measurements\ - \ from MIDI input to string pluck are as low as 40 ms for a\nbest case scenario\ - \ strum, allowing StrumBot to accompany a live musician with\nminimal audible\ - \ delay.\nA relay based loop switcher is incorporated, allowing StrumBot to activate\n\ - standard commercial guitar pedals based on a MIDI instruction. " - address: 'Brisbane, Australia' - author: Richard Vindriis and Dale Carnegie - bibtex: "@inproceedings{Vindriis2016,\n abstract = {StrumBot is a novel standalone\ - \ six stringed robotic guitar\nconsisting of mechanisms designed to enable musical\ - \ expressivity and minimise\nacoustic noise. It is desirable for less than 60\ - \ dBA of noise at 1 m to be\nemitted to allow StrumBot to play in intimate venues\ - \ such as caf\\'{e}s or\nrestaurants without loud motor noises detracting from\ - \ the musical experience.\nStrumBot improves upon previous RMI's by allowing additional\ - \ expressive\nopportunities for a composer to utilise. StrumBot can perform slides,\ - \ vibrato,\nmuting techniques, pitch bends, pluck power variances, timbre control,\ - \ complex\nchords and fast strumming patterns.\nA MIDI input allows commercial\ - \ or custom controllers to operate StrumBot. Novel\nnote allocation algorithms\ - \ were created to allow a single MIDI stream of notes to\nbe allocated across\ - \ the six guitar strings.\nLatency measurements from MIDI input to string pluck\ - \ are as low as 40 ms for a\nbest case scenario strum, allowing StrumBot to accompany\ - \ a live musician with\nminimal audible delay.\nA relay based loop switcher is\ - \ incorporated, allowing StrumBot to activate\nstandard commercial guitar pedals\ - \ based on a MIDI instruction. },\n address = {Brisbane, Australia},\n author\ - \ = {Richard Vindriis and Dale Carnegie},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176135},\n\ - \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {146--151},\n publisher\ - \ = {Queensland Conservatorium Griffith University},\n title = {StrumBot---An\ - \ Overview of a Strumming Guitar Robot},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0030.pdf},\n\ - \ year = {2016}\n}\n" + ID: Barri2009a + address: 'Pittsburgh, PA, United States' + author: 'Barri, Tarik' + bibtex: "@inproceedings{Barri2009a,\n address = {Pittsburgh, PA, United States},\n\ + \ author = {Barri, Tarik},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177475},\n\ + \ issn = {2220-4806},\n keywords = {nime09},\n pages = {325--325},\n title = {Versum\ + \ -- Fluor},\n url = {http://www.nime.org/proceedings/2009/nime2009_325.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176135 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177475 issn: 2220-4806 - pages: 146--151 - publisher: Queensland Conservatorium Griffith University - title: StrumBot---An Overview of a Strumming Guitar Robot - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0030.pdf - year: 2016 + keywords: nime09 + pages: 325--325 + title: Versum -- Fluor + url: http://www.nime.org/proceedings/2009/nime2009_325.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Shaw2016 - abstract: "This paper describes a long term, collaborative project Sound\nSpaces.\ - \ Within this project we creatively investigated various environments and\nbuilt\ - \ a collection of artworks in response to material gathered through a number\n\ - of practical field visits. Our responses were presented in numerous,\nidiosyncratic\ - \ ways and took shape through a number of concerted making\nactivities. The work\ - \ was conducted both in and with the public, allowing\nparticipants to inform\ - \ the creative decisions made throughout the project as well\nas experiencing\ - \ the building of the artworks. Within this essay we report on our\nprocess, presentation\ - \ and offer alternative methods for collecting material and\npresenting representations\ - \ of space. We describe the many responses made during\nour time and related these\ - \ to research concerns relevant to the NIME community.\nWe conclude with our findings\ - \ and, through the production of an annotated\nportfolio, offer our main emerging\ - \ themes as points of discussion. " - address: 'Brisbane, Australia' - author: Tim Shaw and Simon Bowen and John Bowers - bibtex: "@inproceedings{Shaw2016,\n abstract = {This paper describes a long term,\ - \ collaborative project Sound\nSpaces. Within this project we creatively investigated\ - \ various environments and\nbuilt a collection of artworks in response to material\ - \ gathered through a number\nof practical field visits. Our responses were presented\ - \ in numerous,\nidiosyncratic ways and took shape through a number of concerted\ - \ making\nactivities. The work was conducted both in and with the public, allowing\n\ - participants to inform the creative decisions made throughout the project as well\n\ - as experiencing the building of the artworks. Within this essay we report on our\n\ - process, presentation and offer alternative methods for collecting material and\n\ - presenting representations of space. We describe the many responses made during\n\ - our time and related these to research concerns relevant to the NIME community.\n\ - We conclude with our findings and, through the production of an annotated\nportfolio,\ - \ offer our main emerging themes as points of discussion. },\n address = {Brisbane,\ - \ Australia},\n author = {Tim Shaw and Simon Bowen and John Bowers},\n booktitle\ + ID: Miyama2009 + address: 'Pittsburgh, PA, United States' + author: 'Miyama, Chikashi' + bibtex: "@inproceedings{Miyama2009,\n address = {Pittsburgh, PA, United States},\n\ + \ author = {Miyama, Chikashi},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177633},\n\ + \ issn = {2220-4806},\n keywords = {nime09},\n pages = {326--326},\n title = {Angry\ + \ Sparrow},\n url = {http://www.nime.org/proceedings/2009/nime2009_326.pdf},\n\ + \ year = {2009}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1177633 + issn: 2220-4806 + keywords: nime09 + pages: 326--326 + title: Angry Sparrow + url: http://www.nime.org/proceedings/2009/nime2009_326.pdf + year: 2009 + + +- ENTRYTYPE: inproceedings + ID: Lyon2009 + address: 'Pittsburgh, PA, United States' + author: 'Lyon, Eric and Knapp, Benjamin and Ouzounian, Gascia' + bibtex: "@inproceedings{Lyon2009,\n address = {Pittsburgh, PA, United States},\n\ + \ author = {Lyon, Eric and Knapp, Benjamin and Ouzounian, Gascia},\n booktitle\ \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176122},\n isbn = {978-1-925455-13-7},\n\ - \ issn = {2220-4806},\n pages = {152--157},\n publisher = {Queensland Conservatorium\ - \ Griffith University},\n title = {Unfoldings: Multiple Explorations of Sound\ - \ and Space},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0031.pdf},\n\ - \ year = {2016}\n}\n" + \ Expression},\n doi = {10.5281/zenodo.1177621},\n issn = {2220-4806},\n keywords\ + \ = {nime09},\n pages = {327--327},\n title = {Biomuse Trio},\n url = {http://www.nime.org/proceedings/2009/nime2009_327.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176122 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177621 issn: 2220-4806 - pages: 152--157 - publisher: Queensland Conservatorium Griffith University - title: 'Unfoldings: Multiple Explorations of Sound and Space' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0031.pdf - year: 2016 + keywords: nime09 + pages: 327--327 + title: Biomuse Trio + url: http://www.nime.org/proceedings/2009/nime2009_327.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Rieger2016 - abstract: "The Driftwood is a maneuverable sculptural instrument &\ncontroller.\ - \ Tactilely, it is a micro-terrain one can explore with the hands as\nwith the\ - \ ears. Closed circuit sensors, moving wooden parts and Piezo microphones\nare\ - \ discussed in the design phase alongside background and musical implementation\n\ - concepts. Electronics and nature converge in this instrument harmoniously\nreferencing\ - \ our changing world and environment. When engaging with the sonic\nsculpture\ - \ silent objects become audible and rest-wood is venerated. It is\nrevealed to\ - \ the musician interacting with Driftwood that our actions intervene\ndirectly\ - \ with issues relating to sustainability and the amount of value we place\non\ - \ the world we live in. Every scrap of wood was once a tree, Driftwood reminds\n\ - us of this in a multi-sensory playing experience. The Driftwood proposes a\nreinterpretation\ - \ of the process of music creation, awareness and expression." - address: 'Brisbane, Australia' - author: Alexandra Rieger and Spencer Topel - bibtex: "@inproceedings{Rieger2016,\n abstract = {The Driftwood is a maneuverable\ - \ sculptural instrument &\ncontroller. Tactilely, it is a micro-terrain one can\ - \ explore with the hands as\nwith the ears. Closed circuit sensors, moving wooden\ - \ parts and Piezo microphones\nare discussed in the design phase alongside background\ - \ and musical implementation\nconcepts. Electronics and nature converge in this\ - \ instrument harmoniously\nreferencing our changing world and environment. When\ - \ engaging with the sonic\nsculpture silent objects become audible and rest-wood\ - \ is venerated. It is\nrevealed to the musician interacting with Driftwood that\ - \ our actions intervene\ndirectly with issues relating to sustainability and the\ - \ amount of value we place\non the world we live in. Every scrap of wood was once\ - \ a tree, Driftwood reminds\nus of this in a multi-sensory playing experience.\ - \ The Driftwood proposes a\nreinterpretation of the process of music creation,\ - \ awareness and expression.},\n address = {Brisbane, Australia},\n author = {Alexandra\ - \ Rieger and Spencer Topel},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176110},\n\ - \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {158--159},\n publisher\ - \ = {Queensland Conservatorium Griffith University},\n title = {Driftwood: Redefining\ - \ Sound Sculpture Controllers},\n track = {Demonstrations},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0032.pdf},\n\ - \ year = {2016}\n}\n" + ID: Goto2009 + address: 'Pittsburgh, PA, United States' + author: 'Goto, Suguru' + bibtex: "@inproceedings{Goto2009,\n address = {Pittsburgh, PA, United States},\n\ + \ author = {Goto, Suguru},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177557},\n\ + \ issn = {2220-4806},\n keywords = {nime09},\n pages = {328--328},\n title = {BodyJack},\n\ + \ url = {http://www.nime.org/proceedings/2009/nime2009_328.pdf},\n year = {2009}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176110 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177557 issn: 2220-4806 - pages: 158--159 - publisher: Queensland Conservatorium Griffith University - title: 'Driftwood: Redefining Sound Sculpture Controllers' - track: Demonstrations - url: http://www.nime.org/proceedings/2016/nime2016_paper0032.pdf - year: 2016 + keywords: nime09 + pages: 328--328 + title: BodyJack + url: http://www.nime.org/proceedings/2009/nime2009_328.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Kleinberger2016 - abstract: "The following paper documents the prototype of a musical door that\n\ - interactively plays sounds, melodies, and sound textures when in use. We took\ - \ the\nnatural interactions people have with doors---grabbing and turning\nthe\ - \ knob and pushing and puling motions---and turned them into\nmusical activities.\ - \ The idea behind this project comes from the fact that the\nactivity of using\ - \ a door is almost always accompanied by a sound that is\ngenerally ignored by\ - \ the user. We believe that this sound can be considered\nmusically rich and expressive\ - \ because each door has specific sound\ncharacteristics and each person makes\ - \ it sound slightly different. By augmenting\nthe door to create an unexpected\ - \ sound, this project encourages us to listen to\nour daily lives with a musician's\ - \ critical ear, and reminds us of the musicality\nof our everyday activities." - address: 'Brisbane, Australia' - author: Rebecca Kleinberger and Akito van Troyer - bibtex: "@inproceedings{Kleinberger2016,\n abstract = {The following paper documents\ - \ the prototype of a musical door that\ninteractively plays sounds, melodies,\ - \ and sound textures when in use. We took the\nnatural interactions people have\ - \ with doors---grabbing and turning\nthe knob and pushing and puling motions---and\ - \ turned them into\nmusical activities. The idea behind this project comes from\ - \ the fact that the\nactivity of using a door is almost always accompanied by\ - \ a sound that is\ngenerally ignored by the user. We believe that this sound can\ - \ be considered\nmusically rich and expressive because each door has specific\ - \ sound\ncharacteristics and each person makes it sound slightly different. By\ - \ augmenting\nthe door to create an unexpected sound, this project encourages\ - \ us to listen to\nour daily lives with a musician's critical ear, and reminds\ - \ us of the musicality\nof our everyday activities.},\n address = {Brisbane, Australia},\n\ - \ author = {Rebecca Kleinberger and Akito van Troyer},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176052},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ - \ pages = {160--161},\n publisher = {Queensland Conservatorium Griffith University},\n\ - \ title = {Dooremi: a Doorway to Music},\n track = {Demonstrations},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0033.pdf},\n\ - \ year = {2016}\n}\n" + ID: Baalman2009 + address: 'Pittsburgh, PA, United States' + author: 'Baalman, Marije A.' + bibtex: "@inproceedings{Baalman2009,\n address = {Pittsburgh, PA, United States},\n\ + \ author = {Baalman, Marije A.},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177469},\n\ + \ issn = {2220-4806},\n keywords = {nime09},\n pages = {329--329},\n title = {Code\ + \ LiveCode Live, or livecode Embodied},\n url = {http://www.nime.org/proceedings/2009/nime2009_329.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176052 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177469 issn: 2220-4806 - pages: 160--161 - publisher: Queensland Conservatorium Griffith University - title: 'Dooremi: a Doorway to Music' - track: Demonstrations - url: http://www.nime.org/proceedings/2016/nime2016_paper0033.pdf - year: 2016 + keywords: nime09 + pages: 329--329 + title: 'Code LiveCode Live, or livecode Embodied' + url: http://www.nime.org/proceedings/2009/nime2009_329.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Normark2016 - abstract: "This paper describes how a classical instrument, the clarinet, can\n\ - be extended with modern technology to create a new and easy to use augmented\n\ - instrument. The paper describes the design process, technical details and how\ - \ a\nmusician can use the instrument. The clarinet bell is extended with sensor\n\ - technology in order to improve the ways the clarinet is traditionally played and\n\ - improve the performing artist's musical and performative expressions. New\nways\ - \ of performing music with a clarinet also opens up for novel ways of\ncomposing\ - \ musical pieces. The design is iterated in two versions with improved\nhardware\ - \ and form factor where everything is packaged into the clarinet bell. The\nclarinet\ - \ uses electronics that wirelessly sends sensor data to a computer that\nprocesses\ - \ a live audio feed via the software MAX 7 and plays it back via\nloudspeakers\ - \ on the stage. The extended clarinet provides several ways of\ntransforming audio\ - \ and also adds several ways of making performances more\nvisually interesting.\ - \ It is shown that this way of using sensor technology in a\ntraditional musical\ - \ instrument adds new dimensions to the performance and allows\ncreative persons\ - \ to express themselves in new ways as well as giving the audience\nan improved\ - \ experience. " - address: 'Brisbane, Australia' - author: 'Normark, Carl Jürgen and Peter Parnes and Robert Ek and Harald Andersson' - bibtex: "@inproceedings{Normark2016,\n abstract = {This paper describes how a classical\ - \ instrument, the clarinet, can\nbe extended with modern technology to create\ - \ a new and easy to use augmented\ninstrument. The paper describes the design\ - \ process, technical details and how a\nmusician can use the instrument. The clarinet\ - \ bell is extended with sensor\ntechnology in order to improve the ways the clarinet\ - \ is traditionally played and\nimprove the performing artist's musical and performative\ - \ expressions. New\nways of performing music with a clarinet also opens up for\ - \ novel ways of\ncomposing musical pieces. The design is iterated in two versions\ - \ with improved\nhardware and form factor where everything is packaged into the\ - \ clarinet bell. The\nclarinet uses electronics that wirelessly sends sensor data\ - \ to a computer that\nprocesses a live audio feed via the software MAX 7 and plays\ - \ it back via\nloudspeakers on the stage. The extended clarinet provides several\ - \ ways of\ntransforming audio and also adds several ways of making performances\ - \ more\nvisually interesting. It is shown that this way of using sensor technology\ - \ in a\ntraditional musical instrument adds new dimensions to the performance\ - \ and allows\ncreative persons to express themselves in new ways as well as giving\ - \ the audience\nan improved experience. },\n address = {Brisbane, Australia},\n\ - \ author = {Normark, Carl J\\''{u}rgen and Peter Parnes and Robert Ek and Harald\ - \ Andersson},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176090},\n isbn\ - \ = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {162--167},\n publisher\ - \ = {Queensland Conservatorium Griffith University},\n title = {The extended clarinet},\n\ - \ track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0034.pdf},\n\ - \ year = {2016}\n}\n" + ID: Torre2009 + address: 'Pittsburgh, PA, United States' + author: 'Torre, Giuseppe and Sazdov, Robert and Konczewska, Dorota' + bibtex: "@inproceedings{Torre2009,\n address = {Pittsburgh, PA, United States},\n\ + \ author = {Torre, Giuseppe and Sazdov, Robert and Konczewska, Dorota},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1177695},\n issn = {2220-4806},\n keywords\ + \ = {nime09},\n pages = {330--330},\n title = {MOLITVA --- Composition for Voice,\ + \ Live Electronics, Pointing-At Glove Device and {3-D} Setup of Speakers},\n url\ + \ = {http://www.nime.org/proceedings/2009/nime2009_330.pdf},\n year = {2009}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176090 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177695 issn: 2220-4806 - pages: 162--167 - publisher: Queensland Conservatorium Griffith University - title: The extended clarinet - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0034.pdf - year: 2016 + keywords: nime09 + pages: 330--330 + title: 'MOLITVA --- Composition for Voice, Live Electronics, Pointing-At Glove Device + and {3-D} Setup of Speakers' + url: http://www.nime.org/proceedings/2009/nime2009_330.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Nagashim2016 - abstract: |- - This is a report of a novel tactile musical instrument. This - instrument is called Multi Rubbing Tactile Instrument (MRTI2015), using ten - pieces of PAW sensor produced by the RT corporation. Previous research was - focused on untouchable instruments, but this approach is fully tactile---rub - and touch. The ten PAW sensors are assigned on the surface of the egg-like - plastic case to fit the ten fingers grasping the instrument. The controller is - mbed (NucleoF401RE), and it communicates with the host PC via high speed serial - (115200bps) by an MIDI-like protocol. Inside the egg-like plastic case, this - instrument has eight blue-LEDs which are controlled by the host in order to - display the grasping nuances. The prototype of this instrument contains realtime - visualizing system with chaotic graphics by Open-GL. I will report on the - principle of the sensor, and details about realizing the new system. - address: 'Brisbane, Australia' - author: Yoichi Nagashima - bibtex: "@inproceedings{Nagashim2016,\n abstract = {This is a report of a novel\ - \ tactile musical instrument. This\ninstrument is called Multi Rubbing Tactile\ - \ Instrument (MRTI2015), using ten\npieces of PAW sensor produced by the RT corporation.\ - \ Previous research was\nfocused on untouchable instruments, but this approach\ - \ is fully tactile---rub\nand touch. The ten PAW sensors are assigned on the surface\ - \ of the egg-like\nplastic case to fit the ten fingers grasping the instrument.\ - \ The controller is\nmbed (NucleoF401RE), and it communicates with the host PC\ - \ via high speed serial\n(115200bps) by an MIDI-like protocol. Inside the egg-like\ - \ plastic case, this\ninstrument has eight blue-LEDs which are controlled by the\ - \ host in order to\ndisplay the grasping nuances. The prototype of this instrument\ - \ contains realtime\nvisualizing system with chaotic graphics by Open-GL. I will\ - \ report on the\nprinciple of the sensor, and details about realizing the new\ - \ system.},\n address = {Brisbane, Australia},\n author = {Yoichi Nagashima},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176084},\n isbn = {978-1-925455-13-7},\n\ - \ issn = {2220-4806},\n pages = {168--169},\n publisher = {Queensland Conservatorium\ - \ Griffith University},\n title = {Multi Rubbing Tactile Instrument},\n track\ - \ = {Demonstrations},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0035.pdf},\n\ - \ year = {2016}\n}\n" + ID: Neill2009 + address: 'Pittsburgh, PA, United States' + author: 'Neill, Ben and Singer, Eric' + bibtex: "@inproceedings{Neill2009,\n address = {Pittsburgh, PA, United States},\n\ + \ author = {Neill, Ben and Singer, Eric},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177639},\n\ + \ issn = {2220-4806},\n keywords = {nime09},\n pages = {331--331},\n title = {Ben\ + \ Neill and LEMUR},\n url = {http://www.nime.org/proceedings/2009/nime2009_331.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176084 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177639 issn: 2220-4806 - pages: 168--169 - publisher: Queensland Conservatorium Griffith University - title: Multi Rubbing Tactile Instrument - track: Demonstrations - url: http://www.nime.org/proceedings/2016/nime2016_paper0035.pdf - year: 2016 + keywords: nime09 + pages: 331--331 + title: Ben Neill and LEMUR + url: http://www.nime.org/proceedings/2009/nime2009_331.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Zhang2016 - abstract: "This paper presents a web-based application enabling audiences to\ncollaboratively\ - \ contribute to the creative process during live music\nperformances. The system\ - \ aims at enhancing audience engagement and creating new\nforms of live music\ - \ experiences. Interaction between audience and performers is\nmade possible through\ - \ a client/server architecture enabling bidirectional\ncommunication of creative\ - \ data. Audience members can vote for pre-determined\nmusical attributes using\ - \ a smartphone-friendly and cross-platform web\napplication. The system gathers\ - \ audience members' votes and provide feedback\nthrough visualisations that can\ - \ be tailored for specific needs. In order to\nsupport multiple performers and\ - \ large audiences, automatic audience-to-performer\ngroupings are handled by the\ - \ application. The framework was applied to support\nlive interactive musical\ - \ improvisations where creative roles are shared amongst\naudience and performers\ - \ (Open Symphony). Qualitative analyses of user surveys\nhighlighted very positive\ - \ feedback related to themes such as engagement and\ncreativity and also identified\ - \ further design challenges around audience sense of\ncontrol and latency." - address: 'Brisbane, Australia' - author: Leshao Zhang and Yongmeng Wu and Mathieu Barthet - bibtex: "@inproceedings{Zhang2016,\n abstract = {This paper presents a web-based\ - \ application enabling audiences to\ncollaboratively contribute to the creative\ - \ process during live music\nperformances. The system aims at enhancing audience\ - \ engagement and creating new\nforms of live music experiences. Interaction between\ - \ audience and performers is\nmade possible through a client/server architecture\ - \ enabling bidirectional\ncommunication of creative data. Audience members can\ - \ vote for pre-determined\nmusical attributes using a smartphone-friendly and\ - \ cross-platform web\napplication. The system gathers audience members' votes\ - \ and provide feedback\nthrough visualisations that can be tailored for specific\ - \ needs. In order to\nsupport multiple performers and large audiences, automatic\ - \ audience-to-performer\ngroupings are handled by the application. The framework\ - \ was applied to support\nlive interactive musical improvisations where creative\ - \ roles are shared amongst\naudience and performers (Open Symphony). Qualitative\ - \ analyses of user surveys\nhighlighted very positive feedback related to themes\ - \ such as engagement and\ncreativity and also identified further design challenges\ - \ around audience sense of\ncontrol and latency.},\n address = {Brisbane, Australia},\n\ - \ author = {Leshao Zhang and Yongmeng Wu and Mathieu Barthet},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176147},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ - \ pages = {170--175},\n publisher = {Queensland Conservatorium Griffith University},\n\ - \ title = {A Web Application for Audience Participation in Live Music Performance:\ - \ The Open Symphony Use Case},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0036.pdf},\n\ - \ year = {2016}\n}\n" + ID: Hindman2009 + address: 'Pittsburgh, PA, United States' + author: 'Hindman, David and Drummond, Evan' + bibtex: "@inproceedings{Hindman2009,\n address = {Pittsburgh, PA, United States},\n\ + \ author = {Hindman, David and Drummond, Evan},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1177573},\n issn = {2220-4806},\n keywords = {nime09},\n pages\ + \ = {332--332},\n title = {Performance: Modal Kombat Plays {PON}G},\n url = {http://www.nime.org/proceedings/2009/nime2009_332.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176147 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177573 issn: 2220-4806 - pages: 170--175 - publisher: Queensland Conservatorium Griffith University - title: 'A Web Application for Audience Participation in Live Music Performance: - The Open Symphony Use Case' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0036.pdf - year: 2016 + keywords: nime09 + pages: 332--332 + title: 'Performance: Modal Kombat Plays {PON}G' + url: http://www.nime.org/proceedings/2009/nime2009_332.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: CarvalhoJunior2016 - abstract: |- - Cloud services allow musicians and developers to build audience - participation software with minimal network configuration for audience and no - need for server-side development. In this paper we discuss how a cloud service - supported the audience participation music performance, Crowd in C[loud], which - enables audience participation on a large scale using the audience audience's - smartphones. - We present the detail of the cloud service technology and an analysis of the - network transaction data regarding the performance. - This helps us to understand the nature of cloud-based audience participation - pieces based on the characteristics of a performance reality and provides cues - about the technology's scalability. - address: 'Brisbane, Australia' - author: Antonio Deusany de Carvalho Junior and Sang Won Lee and Georg Essl - bibtex: "@inproceedings{CarvalhoJunior2016,\n abstract = {Cloud services allow musicians\ - \ and developers to build audience\nparticipation software with minimal network\ - \ configuration for audience and no\nneed for server-side development. In this\ - \ paper we discuss how a cloud service\nsupported the audience participation music\ - \ performance, Crowd in C[loud], which\nenables audience participation on a large\ - \ scale using the audience audience's\nsmartphones.\nWe present the detail of\ - \ the cloud service technology and an analysis of the\nnetwork transaction data\ - \ regarding the performance.\nThis helps us to understand the nature of cloud-based\ - \ audience participation\npieces based on the characteristics of a performance\ - \ reality and provides cues\nabout the technology's scalability.},\n address =\ - \ {Brisbane, Australia},\n author = {Antonio Deusany de Carvalho Junior and Sang\ - \ Won Lee and Georg Essl},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176008},\n\ - \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {176--181},\n publisher\ - \ = {Queensland Conservatorium Griffith University},\n title = {Understanding\ - \ Cloud Support for the Audience Participation Concert Performance of Crowd in\ - \ C[loud]},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0037.pdf},\n\ - \ year = {2016}\n}\n" + ID: Leider2009 + address: 'Pittsburgh, PA, United States' + author: 'Leider, Colby' + bibtex: "@inproceedings{Leider2009,\n address = {Pittsburgh, PA, United States},\n\ + \ author = {Leider, Colby},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177615},\n\ + \ issn = {2220-4806},\n keywords = {nime09},\n pages = {333--333},\n title = {Afflux/Reflux},\n\ + \ url = {http://www.nime.org/proceedings/2009/nime2009_333.pdf},\n year = {2009}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176008 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177615 issn: 2220-4806 - pages: 176--181 - publisher: Queensland Conservatorium Griffith University - title: 'Understanding Cloud Support for the Audience Participation Concert Performance - of Crowd in C[loud]' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0037.pdf - year: 2016 + keywords: nime09 + pages: 333--333 + title: Afflux/Reflux + url: http://www.nime.org/proceedings/2009/nime2009_333.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Wang2016 - abstract: |- - This article presents observations and strategies for designing - game-like elements for expressive mobile musical interactions. The designs of - several popular commercial mobile music instruments are discussed and compared, - along with the different ways they integrate musical information and game-like - elements. In particular, issues of designing goals, rules, and interactions are - balanced with articulating expressiveness. These experiences aim to invite and - engage users with game design while maintaining and encouraging open-ended - musical expression and exploration. A set of observations is derived, leading to - a broader design motivation and philosophy. - address: 'Brisbane, Australia' - author: Ge Wang - bibtex: "@inproceedings{Wang2016,\n abstract = {This article presents observations\ - \ and strategies for designing\ngame-like elements for expressive mobile musical\ - \ interactions. The designs of\nseveral popular commercial mobile music instruments\ - \ are discussed and compared,\nalong with the different ways they integrate musical\ - \ information and game-like\nelements. In particular, issues of designing goals,\ - \ rules, and interactions are\nbalanced with articulating expressiveness. These\ - \ experiences aim to invite and\nengage users with game design while maintaining\ - \ and encouraging open-ended\nmusical expression and exploration. A set of observations\ - \ is derived, leading to\na broader design motivation and philosophy.},\n address\ - \ = {Brisbane, Australia},\n author = {Ge Wang},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1176141},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ - \ pages = {182--187},\n publisher = {Queensland Conservatorium Griffith University},\n\ - \ title = {Game Design for Expressive Mobile Music},\n track = {Papers},\n url\ - \ = {http://www.nime.org/proceedings/2016/nime2016_paper0038.pdf},\n year = {2016}\n\ - }\n" + ID: Wang2009a + address: 'Pittsburgh, PA, United States' + author: 'Wang, Ge and Fiebrink, Rebecca' + bibtex: "@inproceedings{Wang2009a,\n address = {Pittsburgh, PA, United States},\n\ + \ author = {Wang, Ge and Fiebrink, Rebecca},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1177699},\n issn = {2220-4806},\n keywords = {nime09},\n pages\ + \ = {334--334},\n title = {PLOrk Beat Science 2.0},\n url = {http://www.nime.org/proceedings/2009/nime2009_334.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176141 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177699 issn: 2220-4806 - pages: 182--187 - publisher: Queensland Conservatorium Griffith University - title: Game Design for Expressive Mobile Music - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0038.pdf - year: 2016 + keywords: nime09 + pages: 334--334 + title: PLOrk Beat Science 2.0 + url: http://www.nime.org/proceedings/2009/nime2009_334.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Banas2016 - abstract: "An auditory game has been developed as a part of research in\nWavefield\ - \ Synthesis. In order to design and implement this game, a number of\ntechnologies\ - \ have been incorporated in the development process. By pairing motion\ncapture\ - \ with a WiiMote new dimension of movement input was achieved.\nWe present an\ - \ evaluation study where the game was assessed." - address: 'Brisbane, Australia' - author: Jan Banas and Razvan Paisa and Iakovos Vogiatzoglou and Francesco Grani - and Stefania Serafin - bibtex: "@inproceedings{Banas2016,\n abstract = {An auditory game has been developed\ - \ as a part of research in\nWavefield Synthesis. In order to design and implement\ - \ this game, a number of\ntechnologies have been incorporated in the development\ - \ process. By pairing motion\ncapture with a WiiMote new dimension of movement\ - \ input was achieved.\nWe present an evaluation study where the game was assessed.},\n\ - \ address = {Brisbane, Australia},\n author = {Jan Banas and Razvan Paisa and\ - \ Iakovos Vogiatzoglou and Francesco Grani and Stefania Serafin},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1175972},\n isbn = {978-1-925455-13-7},\n\ - \ issn = {2220-4806},\n pages = {188--193},\n publisher = {Queensland Conservatorium\ - \ Griffith University},\n title = {Design and evaluation of a gesture driven wave\ - \ field synthesis auditory game},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0039.pdf},\n\ - \ year = {2016}\n}\n" + ID: Wessel2009 + address: 'Pittsburgh, PA, United States' + author: 'Wessel, David' + bibtex: "@inproceedings{Wessel2009,\n address = {Pittsburgh, PA, United States},\n\ + \ author = {Wessel, David},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177707},\n\ + \ issn = {2220-4806},\n keywords = {nime09},\n pages = {335--335},\n title = {Hands\ + \ On --- A New Work from SLABS Controller and Generative Algorithms},\n url =\ + \ {http://www.nime.org/proceedings/2009/nime2009_335.pdf},\n year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1175972 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177707 issn: 2220-4806 - pages: 188--193 - publisher: Queensland Conservatorium Griffith University - title: Design and evaluation of a gesture driven wave field synthesis auditory game - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0039.pdf - year: 2016 + keywords: nime09 + pages: 335--335 + title: Hands On --- A New Work from SLABS Controller and Generative Algorithms + url: http://www.nime.org/proceedings/2009/nime2009_335.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Baytas2016 - abstract: |- - In this paper, we investigate how watching a live-sequenced - electronic music performance, compared to merely hearing the music, contributes - to spectators' experiences of tension. We also explore the role of the - performers' effective and ancillary gestures in conveying tension, when they can - be seen. To this end, we conducted an experiment where 30 participants heard, - saw, or both heard and saw a live-sequenced techno music performance recording - while they produced continuous judgments on their experience of tension. Eye - tracking data was also recorded from participants who saw the visuals, to reveal - aspects of the performance that influenced their tension judgments. We analysed - the data to explore how auditory and visual components and the performer's - movements contribute to spectators' experience of tension. Our results show that - their perception of emotional intensity is consistent across hearing and sight, - suggesting that gestures in live-sequencing can be a medium - for expressive performance. - address: 'Brisbane, Australia' - author: 'Baytas, Mehmet Aydin and Tilbe Goksun and Oguzhan Ozcan' - bibtex: "@inproceedings{Baytas2016,\n abstract = {In this paper, we investigate\ - \ how watching a live-sequenced\nelectronic music performance, compared to merely\ - \ hearing the music, contributes\nto spectators' experiences of tension. We also\ - \ explore the role of the\nperformers' effective and ancillary gestures in conveying\ - \ tension, when they can\nbe seen. To this end, we conducted an experiment where\ - \ 30 participants heard,\nsaw, or both heard and saw a live-sequenced techno music\ - \ performance recording\nwhile they produced continuous judgments on their experience\ - \ of tension. Eye\ntracking data was also recorded from participants who saw the\ - \ visuals, to reveal\naspects of the performance that influenced their tension\ - \ judgments. We analysed\nthe data to explore how auditory and visual components\ - \ and the performer's\nmovements contribute to spectators' experience of tension.\ - \ Our results show that\ntheir perception of emotional intensity is consistent\ - \ across hearing and sight,\nsuggesting that gestures in live-sequencing can be\ - \ a medium\nfor expressive performance.},\n address = {Brisbane, Australia},\n\ - \ author = {Baytas, Mehmet Aydin and Tilbe Goksun and Oguzhan Ozcan},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1175978},\n isbn = {978-1-925455-13-7},\n\ - \ issn = {2220-4806},\n pages = {194--199},\n publisher = {Queensland Conservatorium\ - \ Griffith University},\n title = {The Perception of Live-sequenced Electronic\ - \ Music via Hearing and Sight},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0040.pdf},\n\ - \ year = {2016}\n}\n" + ID: Dubois2009 + address: 'Pittsburgh, PA, United States' + author: 'Dubois, R. Luke and Flanigan, Lesley' + bibtex: "@inproceedings{Dubois2009,\n address = {Pittsburgh, PA, United States},\n\ + \ author = {Dubois, R. Luke and Flanigan, Lesley},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177501},\n issn = {2220-4806},\n keywords = {nime09},\n\ + \ pages = {336--336},\n title = {Bioluminescence},\n url = {http://www.nime.org/proceedings/2009/nime2009_336.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1175978 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177501 issn: 2220-4806 - pages: 194--199 - publisher: Queensland Conservatorium Griffith University - title: The Perception of Live-sequenced Electronic Music via Hearing and Sight - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0040.pdf - year: 2016 + keywords: nime09 + pages: 336--336 + title: Bioluminescence + url: http://www.nime.org/proceedings/2009/nime2009_336.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Bin2016 - abstract: "This paper explores the roles of technical and musical familiarity\n\ - in shaping audience response to digital musical instrument (DMI) performances.\ - \ In\nan audience study conducted during an evening concert, we examined two primary\n\ - questions: first, whether a deeper understanding of how a DMI works increases\ - \ an\naudience's enjoyment and interest in the performance; and second, given\ - \ the same\nDMI and same performer, whether playing in a conventional (vernacular)\ - \ versus an\nexperimental musical style affects an audience's response. We held\ - \ a concert in\nwhich two DMI creator-performers each played two pieces in differing\ - \ styles.\nBefore the concert, each half the 64-person audience was given a technical\n\ - explanation of one of the instruments. Results showed that receiving an\nexplanation\ - \ increased the reported understanding of that instrument, but had no\neffect\ - \ on either the reported level of interest or enjoyment. On the other hand,\n\ - performances in experimental versus conventional style on the same instrument\n\ - received widely divergent audience responses. We discuss implications of these\n\ - findings for DMI design." - address: 'Brisbane, Australia' - author: S. Astrid Bin and Nick Bryan-Kinns and Andrew P. McPherson - bibtex: "@inproceedings{Bin2016,\n abstract = {This paper explores the roles of\ - \ technical and musical familiarity\nin shaping audience response to digital musical\ - \ instrument (DMI) performances. In\nan audience study conducted during an evening\ - \ concert, we examined two primary\nquestions: first, whether a deeper understanding\ - \ of how a DMI works increases an\naudience's enjoyment and interest in the performance;\ - \ and second, given the same\nDMI and same performer, whether playing in a conventional\ - \ (vernacular) versus an\nexperimental musical style affects an audience's response.\ - \ We held a concert in\nwhich two DMI creator-performers each played two pieces\ - \ in differing styles.\nBefore the concert, each half the 64-person audience was\ - \ given a technical\nexplanation of one of the instruments. Results showed that\ - \ receiving an\nexplanation increased the reported understanding of that instrument,\ - \ but had no\neffect on either the reported level of interest or enjoyment. On\ - \ the other hand,\nperformances in experimental versus conventional style on the\ - \ same instrument\nreceived widely divergent audience responses. We discuss implications\ - \ of these\nfindings for DMI design.},\n address = {Brisbane, Australia},\n author\ - \ = {S. Astrid Bin and Nick Bryan-Kinns and Andrew P. McPherson},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1175994},\n isbn = {978-1-925455-13-7},\n\ - \ issn = {2220-4806},\n pages = {200--205},\n publisher = {Queensland Conservatorium\ - \ Griffith University},\n title = {Skip the Pre-Concert Demo: How Technical Familiarity\ - \ and Musical Style Affect Audience Response},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0041.pdf},\n\ - \ year = {2016}\n}\n" + ID: Bukvic2009 + address: 'Pittsburgh, PA, United States' + author: 'Bukvic, Ivika and Standley, Eric' + bibtex: "@inproceedings{Bukvic2009,\n address = {Pittsburgh, PA, United States},\n\ + \ author = {Bukvic, Ivika and Standley, Eric},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1177487},\n issn = {2220-4806},\n keywords = {nime09},\n pages\ + \ = {337--337},\n title = {Elemental \\& Cyrene Reefs},\n url = {http://www.nime.org/proceedings/2009/nime2009_337.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1175994 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177487 issn: 2220-4806 - pages: 200--205 - publisher: Queensland Conservatorium Griffith University - title: 'Skip the Pre-Concert Demo: How Technical Familiarity and Musical Style Affect - Audience Response' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0041.pdf - year: 2016 + keywords: nime09 + pages: 337--337 + title: Elemental \& Cyrene Reefs + url: http://www.nime.org/proceedings/2009/nime2009_337.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Wu2016 - abstract: "This paper presents an empirical evaluation of a digital music\ninstrument\ - \ (DMI) for electroacoustic vocal performance, the Tibetan Singing\nPrayer Wheel\ - \ (TSPW). Specifically, we study audience preference for the way it\nmaps horizontal\ - \ spinning gestures to vocal processing parameters. We filmed six\nsongs with\ - \ the singer using the TSPW, and created two alternative soundtracks for\neach\ - \ song: one desynchronized, and one with the mapping inverted. Participants\n\ - viewed all six songs with either the original or desynchronized soundtrack\n(Experiment\ - \ 1), or either the original or inverted-mapping soundtrack (Experiment\n2). Participants\ - \ were asked several questions via questionnaire after each song.\nOverall, they\ - \ reported higher engagement and preference for the original\nversions, suggesting\ - \ that audiences of the TSPW prefer more highly synchronized\nperformances, as\ - \ well as more intuitive mappings, though level of perceived\nexpression of the\ - \ performer only significantly differed in Experiment 1. Further,\nwe believe\ - \ that our experimental methods contribute to how DMIs can be evaluated\nfrom\ - \ the audience's (a recently noted underrepresented stakeholder)\nperspective." - address: 'Brisbane, Australia' - author: Jiayue Cecilia Wu and Madeline Huberth and Yoo Hsiu Yeh and Matt Wright - bibtex: "@inproceedings{Wu2016,\n abstract = {This paper presents an empirical evaluation\ - \ of a digital music\ninstrument (DMI) for electroacoustic vocal performance,\ - \ the Tibetan Singing\nPrayer Wheel (TSPW). Specifically, we study audience preference\ - \ for the way it\nmaps horizontal spinning gestures to vocal processing parameters.\ - \ We filmed six\nsongs with the singer using the TSPW, and created two alternative\ - \ soundtracks for\neach song: one desynchronized, and one with the mapping inverted.\ - \ Participants\nviewed all six songs with either the original or desynchronized\ - \ soundtrack\n(Experiment 1), or either the original or inverted-mapping soundtrack\ - \ (Experiment\n2). Participants were asked several questions via questionnaire\ - \ after each song.\nOverall, they reported higher engagement and preference for\ - \ the original\nversions, suggesting that audiences of the TSPW prefer more highly\ - \ synchronized\nperformances, as well as more intuitive mappings, though level\ - \ of perceived\nexpression of the performer only significantly differed in Experiment\ - \ 1. Further,\nwe believe that our experimental methods contribute to how DMIs\ - \ can be evaluated\nfrom the audience's (a recently noted underrepresented stakeholder)\n\ - perspective.},\n address = {Brisbane, Australia},\n author = {Jiayue Cecilia Wu\ - \ and Madeline Huberth and Yoo Hsiu Yeh and Matt Wright},\n booktitle = {Proceedings\ + ID: GreshamLancaster2009 + address: 'Pittsburgh, PA, United States' + author: 'Gresham-Lancaster, Scot and Bull, Steve' + bibtex: "@inproceedings{GreshamLancaster2009,\n address = {Pittsburgh, PA, United\ + \ States},\n author = {Gresham-Lancaster, Scot and Bull, Steve},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177561},\n issn = {2220-4806},\n keywords = {nime09},\n\ + \ pages = {338--338},\n title = {Cellphonia: 4'33},\n url = {http://www.nime.org/proceedings/2009/nime2009_338.pdf},\n\ + \ year = {2009}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1177561 + issn: 2220-4806 + keywords: nime09 + pages: 338--338 + title: 'Cellphonia: 4''33' + url: http://www.nime.org/proceedings/2009/nime2009_338.pdf + year: 2009 + + +- ENTRYTYPE: inproceedings + ID: Overholt2009 + address: 'Pittsburgh, PA, United States' + author: 'Overholt, Dan and Lahey, Byron and Skriver Hansen, Anne-Marie and Burleson, + Winslow and Norrgaard Jensen, Camilla' + bibtex: "@inproceedings{Overholt2009,\n address = {Pittsburgh, PA, United States},\n\ + \ author = {Overholt, Dan and Lahey, Byron and Skriver Hansen, Anne-Marie and\ + \ Burleson, Winslow and Norrgaard Jensen, Camilla},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176143},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ - \ pages = {206--211},\n publisher = {Queensland Conservatorium Griffith University},\n\ - \ title = {Evaluating the Audience's Perception of Real-time Gestural Control\ - \ and Mapping Mechanisms in Electroacoustic Vocal Performance},\n track = {Papers},\n\ - \ url = {http://www.nime.org/proceedings/2016/nime2016_paper0042.pdf},\n year\ - \ = {2016}\n}\n" + \ doi = {10.5281/zenodo.1177649},\n issn = {2220-4806},\n keywords = {nime09},\n\ + \ pages = {339--339},\n title = {Pendaphonics},\n url = {http://www.nime.org/proceedings/2009/nime2009_339.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176143 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177649 issn: 2220-4806 - pages: 206--211 - publisher: Queensland Conservatorium Griffith University - title: Evaluating the Audience's Perception of Real-time Gestural Control and Mapping - Mechanisms in Electroacoustic Vocal Performance - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0042.pdf - year: 2016 + keywords: nime09 + pages: 339--339 + title: Pendaphonics + url: http://www.nime.org/proceedings/2009/nime2009_339.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Lee2016 - abstract: 'This paper suggests a novel form of audiovisual performance --- live - writing --- that transforms creative writing into a real-time performing art. - The process of typing a poem on the fly is captured and augmented to create an - audiovisual performance that establishes natural links among the components of - typing gestures, the poem being written on the fly, and audiovisual artifacts. - Live writing draws upon ideas from the tradition of live coding in which the process - of programming is revealed to the audience in real-time. This paper discusses - the motivation behind the idea, interaction schemes and a performance interface - for such a performance practice. Our live writing performance system is enabled - by a custom text editor, writing-sound mapping strategies of our choice, a poem-sonification, - and temporal typography. We describe two live writing performances that take different - approaches as they vary the degree of composition and improvisation in writing.' - address: 'Brisbane, Australia' - author: Sang Won Lee and Georg Essl and Mari Martinez - bibtex: "@inproceedings{Lee2016,\n abstract = {This paper suggests a novel form\ - \ of audiovisual performance --- live writing --- that transforms creative writing\ - \ into a real-time performing art. The process of typing a poem on the fly is\ - \ captured and augmented to create an audiovisual performance that establishes\ - \ natural links among the components of typing gestures, the poem being written\ - \ on the fly, and audiovisual artifacts. Live writing draws upon ideas from the\ - \ tradition of live coding in which the process of programming is revealed to\ - \ the audience in real-time. This paper discusses the motivation behind the idea,\ - \ interaction schemes and a performance interface for such a performance practice.\ - \ Our live writing performance system is enabled by a custom text editor, writing-sound\ - \ mapping strategies of our choice, a poem-sonification, and temporal typography.\ - \ We describe two live writing performances that take different approaches as\ - \ they vary the degree of composition and improvisation in writing.},\n address\ - \ = {Brisbane, Australia},\n author = {Sang Won Lee and Georg Essl and Mari Martinez},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176060},\n isbn = {978-1-925455-13-7},\n\ - \ issn = {2220-4806},\n pages = {212--217},\n publisher = {Queensland Conservatorium\ - \ Griffith University},\n title = {Live Writing : Writing as a Real-time Audiovisual\ - \ Performance},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0043.pdf},\n\ - \ year = {2016}\n}\n" + ID: Smallwood2009 + address: 'Pittsburgh, PA, United States' + author: 'Smallwood, Scott' + bibtex: "@inproceedings{Smallwood2009,\n address = {Pittsburgh, PA, United States},\n\ + \ author = {Smallwood, Scott},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177677},\n\ + \ issn = {2220-4806},\n keywords = {nime09},\n pages = {340--340},\n title = {Sound\ + \ Lanterns},\n url = {http://www.nime.org/proceedings/2009/nime2009_340.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176060 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177677 issn: 2220-4806 - pages: 212--217 - publisher: Queensland Conservatorium Griffith University - title: 'Live Writing : Writing as a Real-time Audiovisual Performance' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0043.pdf - year: 2016 + keywords: nime09 + pages: 340--340 + title: Sound Lanterns + url: http://www.nime.org/proceedings/2009/nime2009_340.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Jathal2016 - abstract: |- - The majority of electronic percussion controllers on the market - today are based on location-oriented striking techniques, resulting in a finger - drumming interaction paradigm, that is both fundamentally eclectic as well as - imposingly contr. The few controllers that allow hand-drumming - techniques also invariably conform to region-based triggering design, or, in - trade-off for expressivity, end up excluding hardware connectivity options that - are vital to the context of the modern electronic rhythm producer. The HandSolo - is a timbre-based drum controller that allows the use of natural, hand-drumming - strokes, whilst offering the same end-goal functionality that percussion - controller users have come to expect over the past decade. - address: 'Brisbane, Australia' - author: Kunal Jathal and Tae-Hong Park - bibtex: "@inproceedings{Jathal2016,\n abstract = {The majority of electronic percussion\ - \ controllers on the market\ntoday are based on location-oriented striking techniques,\ - \ resulting in a finger\ndrumming interaction paradigm, that is both fundamentally\ - \ eclectic as well as\nimposingly contr. The few controllers that allow hand-drumming\n\ - techniques also invariably conform to region-based triggering design, or, in\n\ - trade-off for expressivity, end up excluding hardware connectivity options that\n\ - are vital to the context of the modern electronic rhythm producer. The HandSolo\n\ - is a timbre-based drum controller that allows the use of natural, hand-drumming\n\ - strokes, whilst offering the same end-goal functionality that percussion\ncontroller\ - \ users have come to expect over the past decade.},\n address = {Brisbane, Australia},\n\ - \ author = {Kunal Jathal and Tae-Hong Park},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1176042},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ - \ pages = {218--223},\n publisher = {Queensland Conservatorium Griffith University},\n\ - \ title = {The HandSolo: A Hand Drum Controller for Natural Rhythm Entry and Production},\n\ - \ track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0044.pdf},\n\ - \ year = {2016}\n}\n" + ID: Stearns2009 + address: 'Pittsburgh, PA, United States' + author: 'Stearns, Phillip' + bibtex: "@inproceedings{Stearns2009,\n address = {Pittsburgh, PA, United States},\n\ + \ author = {Stearns, Phillip},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177687},\n\ + \ issn = {2220-4806},\n keywords = {nime09},\n pages = {341--341},\n title = {AANN:\ + \ Artificial Analog Neural Network},\n url = {http://www.nime.org/proceedings/2009/nime2009_341.pdf},\n\ + \ year = {2009}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176042 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.1177687 issn: 2220-4806 - pages: 218--223 - publisher: Queensland Conservatorium Griffith University - title: 'The HandSolo: A Hand Drum Controller for Natural Rhythm Entry and Production' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0044.pdf - year: 2016 + keywords: nime09 + pages: 341--341 + title: 'AANN: Artificial Analog Neural Network' + url: http://www.nime.org/proceedings/2009/nime2009_341.pdf + year: 2009 - ENTRYTYPE: inproceedings - ID: Nash2016 - abstract: "This paper presents a development of the ubiquitous computer\nkeyboard\ - \ to capture velocity and other continuous musical properties, in order to\nsupport\ - \ more expressive interaction with music software. Building on existing\n`virtual\ - \ piano' utilities, the device is designed to provide a richer\nmechanism for\ - \ note entry within predominantly non-realtime editing tasks, in\napplications\ - \ where keyboard interaction is a central component of the user\nexperience (score\ - \ editors, sequencers, DAWs, trackers, live coding), and in which\nusers draw\ - \ on virtuosities in both music and computing.\nIn the keyboard, additional hardware\ - \ combines existing scan code (key press)\ndata with accelerometer readings to\ - \ create a secondary USB device, using the same\ncable but visible to software\ - \ as a separate USB MIDI device aside existing USB\nHID functionality. This paper\ - \ presents and evaluates an initial prototype,\ndeveloped using an Arduino board\ - \ and inexpensive sensors, and discusses design\nconsiderations and test findings\ - \ in musical applications, drawing on user studies\nof keyboard-mediated music\ - \ interaction. Without challenging more established (and\nexpensive) performance\ - \ devices; significant benefits are demonstrated in\nnotation-mediated interaction,\ - \ where the user's focus rests with\nsoftware." - address: 'Brisbane, Australia' - author: Chris Nash - bibtex: "@inproceedings{Nash2016,\n abstract = {This paper presents a development\ - \ of the ubiquitous computer\nkeyboard to capture velocity and other continuous\ - \ musical properties, in order to\nsupport more expressive interaction with music\ - \ software. Building on existing\n`virtual piano' utilities, the device is designed\ - \ to provide a richer\nmechanism for note entry within predominantly non-realtime\ - \ editing tasks, in\napplications where keyboard interaction is a central component\ - \ of the user\nexperience (score editors, sequencers, DAWs, trackers, live coding),\ - \ and in which\nusers draw on virtuosities in both music and computing.\nIn the\ - \ keyboard, additional hardware combines existing scan code (key press)\ndata\ - \ with accelerometer readings to create a secondary USB device, using the same\n\ - cable but visible to software as a separate USB MIDI device aside existing USB\n\ - HID functionality. This paper presents and evaluates an initial prototype,\ndeveloped\ - \ using an Arduino board and inexpensive sensors, and discusses design\nconsiderations\ - \ and test findings in musical applications, drawing on user studies\nof keyboard-mediated\ - \ music interaction. Without challenging more established (and\nexpensive) performance\ - \ devices; significant benefits are demonstrated in\nnotation-mediated interaction,\ - \ where the user's focus rests with\nsoftware.},\n address = {Brisbane, Australia},\n\ - \ author = {Chris Nash},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176088},\n\ - \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {224--229},\n publisher\ - \ = {Queensland Conservatorium Griffith University},\n title = {The 'E' in QWERTY:\ - \ Musical Expression with Old Computer Interfaces},\n track = {Papers},\n url\ - \ = {http://www.nime.org/proceedings/2016/nime2016_paper0045.pdf},\n year = {2016}\n\ - }\n" + ID: Tomas2019 + abstract: 'This paper reports on a workshop where participants produced physical + mock-ups of musical interfaces directly after miming control of short electroacoustic + music pieces. Our goal was understanding how people envision and materialize their + own sound-producing gestures from spontaneous cognitive mappings. During the workshop, + 50 participants from different creative backgrounds modeled more than 180 physical + artifacts. Participants were filmed and interviewed for the later analysis of + their different multimodal associations about music. Our initial hypothesis was + that most of the physical mock-ups would be similar to the sound-producing objects + that participants would identify in the musical pieces. Although the majority + of artifacts clearly showed correlated design trajectories, our results indicate + that a relevant number of participants intuitively decided to engineer alternative + solutions emphasizing their personal design preferences. Therefore, in this paper + we present and discuss the workshop format, its results and the possible applications + for designing new musical interfaces.' + address: 'Porto Alegre, Brazil' + author: Enrique Tomas and Thomas Gorbach and Hilda Tellioglu and Martin Kaltenbrunner + bibtex: "@inproceedings{Tomas2019,\n abstract = {This paper reports on a workshop\ + \ where participants produced physical mock-ups of musical interfaces directly\ + \ after miming control of short electroacoustic music pieces. Our goal was understanding\ + \ how people envision and materialize their own sound-producing gestures from\ + \ spontaneous cognitive mappings. During the workshop, 50 participants from different\ + \ creative backgrounds modeled more than 180 physical artifacts. Participants\ + \ were filmed and interviewed for the later analysis of their different multimodal\ + \ associations about music. Our initial hypothesis was that most of the physical\ + \ mock-ups would be similar to the sound-producing objects that participants would\ + \ identify in the musical pieces. Although the majority of artifacts clearly showed\ + \ correlated design trajectories, our results indicate that a relevant number\ + \ of participants intuitively decided to engineer alternative solutions emphasizing\ + \ their personal design preferences. Therefore, in this paper we present and discuss\ + \ the workshop format, its results and the possible applications for designing\ + \ new musical interfaces.},\n address = {Porto Alegre, Brazil},\n author = {Enrique\ + \ Tomas and Thomas Gorbach and Hilda Tellioglu and Martin Kaltenbrunner},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.3672842},\n editor = {Marcelo Queiroz and\ + \ Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {1--6},\n\ + \ publisher = {UFRGS},\n title = {Material embodiments of electroacoustic music:\ + \ an experimental workshop study},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper001.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176088 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.3672842 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 - pages: 224--229 - publisher: Queensland Conservatorium Griffith University - title: 'The ''E'' in QWERTY: Musical Expression with Old Computer Interfaces' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0045.pdf - year: 2016 + month: June + pages: 1--6 + publisher: UFRGS + title: 'Material embodiments of electroacoustic music: an experimental workshop + study' + url: http://www.nime.org/proceedings/2019/nime2019_paper001.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: Greenhill2016 - abstract: "We present Focal, an eye-tracking musical expression controller\nwhich\ - \ allows\nhands-free control over audio effects and synthesis parameters during\n\ - peformance. A see-through head-mounted display projects virtual dials and\nswitches\ - \ into the visual field. The performer controls these with a single\nexpression\ - \ pedal, switching context by glancing at the object they wish to\ncontrol. This\ - \ simple interface allows for minimal physical disturbance to the\ninstrumental\ - \ musician, whilst enabling the control of many parameters otherwise\nonly achievable\ - \ with multiple foot pedalboards. We describe the development of\nthe system,\ - \ including the construction of the eye-tracking display, and the\ndesign of the\ - \ musical interface. We also present a comparison of a performance\nbetween Focal\ - \ and conventional controllers. " - address: 'Brisbane, Australia' - author: Stewart Greenhill and Cathie Travers - bibtex: "@inproceedings{Greenhill2016,\n abstract = {We present Focal, an eye-tracking\ - \ musical expression controller\nwhich allows\nhands-free control over audio effects\ - \ and synthesis parameters during\npeformance. A see-through head-mounted display\ - \ projects virtual dials and\nswitches into the visual field. The performer controls\ - \ these with a single\nexpression pedal, switching context by glancing at the\ - \ object they wish to\ncontrol. This simple interface allows for minimal physical\ - \ disturbance to the\ninstrumental musician, whilst enabling the control of many\ - \ parameters otherwise\nonly achievable with multiple foot pedalboards. We describe\ - \ the development of\nthe system, including the construction of the eye-tracking\ - \ display, and the\ndesign of the musical interface. We also present a comparison\ - \ of a performance\nbetween Focal and conventional controllers. },\n address =\ - \ {Brisbane, Australia},\n author = {Stewart Greenhill and Cathie Travers},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176022},\n isbn = {978-1-925455-13-7},\n\ - \ issn = {2220-4806},\n pages = {230--235},\n publisher = {Queensland Conservatorium\ - \ Griffith University},\n title = {Focal : An Eye-Tracking Musical Expression\ - \ Controller},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0046.pdf},\n\ - \ year = {2016}\n}\n" + ID: Lu2019 + abstract: 'In this paper, collaborative performance is defined as the performance + of the piano by the performer and accompanied by an automatic harp. The automatic + harp can play music based on the electronic score and change its speed according + to the speed of the performer. We built a 32-channel automatic harp and designed + a layered modular framework integrating both hardware and software, for experimental + real-time control protocols. Considering that MIDI keyboard lacking information + of force (acceleration) and fingering detection, both of which are important for + expression, we designed force-sensor glove and achieved basic image recognition. + They are used to accurately detect speed, force (corresponding to velocity in + MIDI) and pitch when a performer plays the piano.' + address: 'Porto Alegre, Brazil' + author: Yupu Lu and Yijie Wu and Shijie Zhu + bibtex: "@inproceedings{Lu2019,\n abstract = {In this paper, collaborative performance\ + \ is defined as the performance of the piano by the performer and accompanied\ + \ by an automatic harp. The automatic harp can play music based on the electronic\ + \ score and change its speed according to the speed of the performer. We built\ + \ a 32-channel automatic harp and designed a layered modular framework integrating\ + \ both hardware and software, for experimental real-time control protocols. Considering\ + \ that MIDI keyboard lacking information of force (acceleration) and fingering\ + \ detection, both of which are important for expression, we designed force-sensor\ + \ glove and achieved basic image recognition. They are used to accurately detect\ + \ speed, force (corresponding to velocity in MIDI) and pitch when a performer\ + \ plays the piano.},\n address = {Porto Alegre, Brazil},\n author = {Yupu Lu and\ + \ Yijie Wu and Shijie Zhu},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672846},\n\ + \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {7--8},\n publisher = {UFRGS},\n title = {Collaborative\ + \ Musical Performances with Automatic Harp Based on Image Recognition and Force\ + \ Sensing Resistors},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper002.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176022 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.3672846 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 - pages: 230--235 - publisher: Queensland Conservatorium Griffith University - title: 'Focal : An Eye-Tracking Musical Expression Controller' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0046.pdf - year: 2016 + month: June + pages: 7--8 + publisher: UFRGS + title: Collaborative Musical Performances with Automatic Harp Based on Image Recognition + and Force Sensing Resistors + url: http://www.nime.org/proceedings/2019/nime2019_paper002.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: Meacham2016 - abstract: |- - The `Laptop Accordion' co-opts the commodity laptop - computer - to craft an expressive, whimsical accordion-like instrument. - It utilizes the opening and closing of the laptop - screen as a physical metaphor for accordion bellows, and the - laptop keyboard as musical buttonboard. Motion is tracked - using the laptop camera via optical flow and mapped to continuous - control over dynamics, while the sound is generated - in real-time. The instrument uses both skeuomorphic and - abstract onscreen graphics which further reference the core - mechanics of `squeezebox' instruments. The laptop accordion - provides several game modes, while overall offering an - unconventional aesthetic experience in music making. - address: 'Brisbane, Australia' - author: Aidan Meacham and Sanjay Kannan and Ge Wang - bibtex: "@inproceedings{Meacham2016,\n abstract = {The `Laptop Accordion' co-opts\ - \ the commodity laptop\ncomputer\nto craft an expressive, whimsical accordion-like\ - \ instrument.\nIt utilizes the opening and closing of the laptop\nscreen as a\ - \ physical metaphor for accordion bellows, and the\nlaptop keyboard as musical\ - \ buttonboard. Motion is tracked\nusing the laptop camera via optical flow and\ - \ mapped to continuous\ncontrol over dynamics, while the sound is generated\n\ - in real-time. The instrument uses both skeuomorphic and\nabstract onscreen graphics\ - \ which further reference the core\nmechanics of `squeezebox' instruments. The\ - \ laptop accordion\nprovides several game modes, while overall offering an\nunconventional\ - \ aesthetic experience in music making.},\n address = {Brisbane, Australia},\n\ - \ author = {Aidan Meacham and Sanjay Kannan and Ge Wang},\n booktitle = {Proceedings\ + ID: Arbel2019 + abstract: 'The Symbaline is an active instrument comprised of several partly-filled + wine glasses excited by electromagnetic coils. This work describes an electromechanical + system for incorporating frequency and amplitude modulation to the Symbaline''s + sound. A pendulum having a magnetic bob is suspended inside the liquid in the + wine glass. The pendulum is put into oscillation by driving infra-sound signals + through the coil. The pendulum''s movement causes the liquid in the glass to slosh + back and forth. Simultaneously, wine glass sounds are produced by driving audio-range + signals through the coil, inducing vibrations in a small magnet attached to the + glass surface and exciting glass vibrations. As the glass vibrates, the sloshing + liquid periodically changes the glass''s resonance frequencies and dampens the + glass, thus modulating both wine glass pitch and sound intensity.' + address: 'Porto Alegre, Brazil' + author: Lior Arbel and Yoav Y. Schechner and Noam Amir + bibtex: "@inproceedings{Arbel2019,\n abstract = {The Symbaline is an active instrument\ + \ comprised of several partly-filled wine glasses excited by electromagnetic coils.\ + \ This work describes an electromechanical system for incorporating frequency\ + \ and amplitude modulation to the Symbaline's sound. A pendulum having a magnetic\ + \ bob is suspended inside the liquid in the wine glass. The pendulum is put into\ + \ oscillation by driving infra-sound signals through the coil. The pendulum's\ + \ movement causes the liquid in the glass to slosh back and forth. Simultaneously,\ + \ wine glass sounds are produced by driving audio-range signals through the coil,\ + \ inducing vibrations in a small magnet attached to the glass surface and exciting\ + \ glass vibrations. As the glass vibrates, the sloshing liquid periodically changes\ + \ the glass's resonance frequencies and dampens the glass, thus modulating both\ + \ wine glass pitch and sound intensity.},\n address = {Porto Alegre, Brazil},\n\ + \ author = {Lior Arbel and Yoav Y. Schechner and Noam Amir},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176078},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ - \ pages = {236--240},\n publisher = {Queensland Conservatorium Griffith University},\n\ - \ title = {The Laptop Accordion},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0047.pdf},\n\ - \ year = {2016}\n}\n" + \ doi = {10.5281/zenodo.3672848},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {9--14},\n publisher = {UFRGS},\n\ + \ title = {The Symbaline --- An Active Wine Glass Instrument with a Liquid Sloshing\ + \ Vibrato Mechanism},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper003.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176078 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.3672848 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 - pages: 236--240 - publisher: Queensland Conservatorium Griffith University - title: The Laptop Accordion - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0047.pdf - year: 2016 + month: June + pages: 9--14 + publisher: UFRGS + title: The Symbaline --- An Active Wine Glass Instrument with a Liquid Sloshing + Vibrato Mechanism + url: http://www.nime.org/proceedings/2019/nime2019_paper003.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: Jakobsen2016 - abstract: "This paper presents a novel platform for expressive music making\ncalled\ - \ Hitmachine. Hitmachine lets you build and play your own musical\ninstruments\ - \ from Legos and sensors and is aimed towards empowering everyone to\nengage in\ - \ rich music making despite of prior musical experience. The paper\npresents findings\ - \ from a 4-day workshop where more that 150 children from ages\n3-13 built and\ - \ played their own musical instruments. The children used different\nsensors for\ - \ playing and performed with their instruments on stage. The findings\nshow how\ - \ age influenced the children's musical understanding and\nexpressivity, and gives\ - \ insight into important aspects to consider when designing\nfor expressive music\ - \ for novices." - address: 'Brisbane, Australia' - author: Kasper buhl Jakobsen and Marianne Graves Petersen and Majken Kirkegaard - Rasmussen and Jens Emil Groenbaek and jakob winge and jeppe stougaard - bibtex: "@inproceedings{Jakobsen2016,\n abstract = {This paper presents a novel\ - \ platform for expressive music making\ncalled Hitmachine. Hitmachine lets you\ - \ build and play your own musical\ninstruments from Legos and sensors and is aimed\ - \ towards empowering everyone to\nengage in rich music making despite of prior\ - \ musical experience. The paper\npresents findings from a 4-day workshop where\ - \ more that 150 children from ages\n3-13 built and played their own musical instruments.\ - \ The children used different\nsensors for playing and performed with their instruments\ - \ on stage. The findings\nshow how age influenced the children's musical understanding\ - \ and\nexpressivity, and gives insight into important aspects to consider when\ - \ designing\nfor expressive music for novices.},\n address = {Brisbane, Australia},\n\ - \ author = {Kasper buhl Jakobsen and Marianne Graves Petersen and Majken Kirkegaard\ - \ Rasmussen and Jens Emil Groenbaek and jakob winge and jeppe stougaard},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176038},\n isbn = {978-1-925455-13-7},\n\ - \ issn = {2220-4806},\n pages = {241--246},\n publisher = {Queensland Conservatorium\ - \ Griffith University},\n title = {Hitmachine: Collective Musical Expressivity\ - \ for Novices},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0048.pdf},\n\ - \ year = {2016}\n}\n" + ID: deSouzaNunes2019 + abstract: "This paper presents the SIBILIM, a low-cost musical interface composed\ + \ of a resonance box made of cardboard containing customised push buttons that\ + \ interact with a smartphone through its video camera. Each button can be mapped\ + \ to a set of MIDI notes or control parameters. The sound is generated through\ + \ synthesis or sample playback and can be amplified with the help of a transducer,\ + \ which excites the resonance box. An essential contribution of this interface\ + \ is the possibility of reconfiguration of the buttons layout without the need\ + \ to hard rewire the system since it uses only the smartphone built-in camera.\ + \ This features allow for quick instrument customisation for different use cases,\n\ + such as low cost projects for schools or instrument building workshops. Our case\ + \ study used the Sibilim for music education, where it was designed to develop\ + \ the conscious of music perception and to stimulate creativity through exercises\ + \ of short tonal musical compositions. We conducted a study with a group of twelve\ + \ participants in an experimental workshop to verify its validity." + address: 'Porto Alegre, Brazil' + author: Helena de Souza Nunes and Federico Visi and Lydia Helena Wohl Coelho and + Rodrigo Schramm + bibtex: "@inproceedings{deSouzaNunes2019,\n abstract = {This paper presents the\ + \ SIBILIM, a low-cost musical interface composed of a resonance box made of cardboard\ + \ containing customised push buttons that interact with a smartphone through its\ + \ video camera. Each button can be mapped to a set of MIDI notes or control parameters.\ + \ The sound is generated through synthesis or sample playback and can be amplified\ + \ with the help of a transducer, which excites the resonance box. An essential\ + \ contribution of this interface is the possibility of reconfiguration of the\ + \ buttons layout without the need to hard rewire the system since it uses only\ + \ the smartphone built-in camera. This features allow for quick instrument customisation\ + \ for different use cases,\nsuch as low cost projects for schools or instrument\ + \ building workshops. Our case study used the Sibilim for music education, where\ + \ it was designed to develop the conscious of music perception and to stimulate\ + \ creativity through exercises of short tonal musical compositions. We conducted\ + \ a study with a group of twelve participants in an experimental workshop to verify\ + \ its validity.},\n address = {Porto Alegre, Brazil},\n author = {Helena de Souza\ + \ Nunes and Federico Visi and Lydia Helena Wohl Coelho and Rodrigo Schramm},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.3672850},\n editor = {Marcelo Queiroz\ + \ and Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {15--20},\n\ + \ publisher = {UFRGS},\n title = {SIBILIM: A low-cost customizable wireless musical\ + \ interface},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper004.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176038 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.3672850 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 - pages: 241--246 - publisher: Queensland Conservatorium Griffith University - title: 'Hitmachine: Collective Musical Expressivity for Novices' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0048.pdf - year: 2016 + month: June + pages: 15--20 + publisher: UFRGS + title: 'SIBILIM: A low-cost customizable wireless musical interface' + url: http://www.nime.org/proceedings/2019/nime2019_paper004.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: Michon2016 - abstract: "In this paper, we present the BladeAxe: an iPad-based musical\ninstrument\ - \ leveraging the concepts of augmented mobile device and hybrid\nphysical model\ - \ controller. By being almost fully standalone, it can be used\neasily on stage\ - \ in the frame of a live performance by simply plugging it to a\ntraditional guitar\ - \ amplifier or to any sound system. Its acoustical plucking\nsystem provides the\ - \ performer with an extended expressive potential compared to a\nstandard controller.\n\ - After presenting an intermediate version of the BladeAxe, we'll describe\nour\ - \ final design. We will also introduce a similar instrument: the PlateAxe." - address: 'Brisbane, Australia' - author: Romain Michon and Julius Orion Iii Smith and Matthew Wright and Chris Chafe - bibtex: "@inproceedings{Michon2016,\n abstract = {In this paper, we present the\ - \ BladeAxe: an iPad-based musical\ninstrument leveraging the concepts of augmented\ - \ mobile device and hybrid\nphysical model controller. By being almost fully standalone,\ - \ it can be used\neasily on stage in the frame of a live performance by simply\ - \ plugging it to a\ntraditional guitar amplifier or to any sound system. Its acoustical\ - \ plucking\nsystem provides the performer with an extended expressive potential\ - \ compared to a\nstandard controller.\nAfter presenting an intermediate version\ - \ of the BladeAxe, we'll describe\nour final design. We will also introduce a\ - \ similar instrument: the PlateAxe.},\n address = {Brisbane, Australia},\n author\ - \ = {Romain Michon and Julius Orion Iii Smith and Matthew Wright and Chris Chafe},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176080},\n isbn = {978-1-925455-13-7},\n\ - \ issn = {2220-4806},\n pages = {247--252},\n publisher = {Queensland Conservatorium\ - \ Griffith University},\n title = {Augmenting the iPad: the BladeAxe},\n track\ - \ = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0049.pdf},\n\ - \ year = {2016}\n}\n" + ID: Bell2019 + abstract: 'The combination of graphic/animated scores, acoustic signals (audio-scores) + and Head-Mounted Display (HMD) technology offers promising potentials in the context + of distributed notation, for live performances and concerts involving voices, + instruments and electronics. After an explanation of what SmartVox is technically, + and how it is used by composers and performers, this paper explains why this form + of technology-aided performance might help musicians for synchronization to an + electronic tape and (spectral) tuning. Then, from an exploration of the concepts + of distributed notation and networked music performances, it proposes solutions + (in conjunction with INScore, BabelScores and the Decibel Score Player) seeking + for the expansion of distributed notation practice to a wider community. It finally + presents findings relative to the use of SmartVox with HMDs.' + address: 'Porto Alegre, Brazil' + author: Jonathan Bell + bibtex: "@inproceedings{Bell2019,\n abstract = {The combination of graphic/animated\ + \ scores, acoustic signals (audio-scores) and Head-Mounted Display (HMD) technology\ + \ offers promising potentials in the context of distributed notation, for live\ + \ performances and concerts involving voices, instruments and electronics. After\ + \ an explanation of what SmartVox is technically, and how it is used by composers\ + \ and performers, this paper explains why this form of technology-aided performance\ + \ might help musicians for synchronization to an electronic tape and (spectral)\ + \ tuning. Then, from an exploration of the concepts of distributed notation and\ + \ networked music performances, it proposes solutions (in conjunction with INScore,\ + \ BabelScores and the Decibel Score Player) seeking for the expansion of distributed\ + \ notation practice to a wider community. It finally presents findings relative\ + \ to the use of SmartVox with HMDs.},\n address = {Porto Alegre, Brazil},\n author\ + \ = {Jonathan Bell},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672852},\n\ + \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {21--24},\n publisher = {UFRGS},\n title = {The Risset Cycle,\ + \ Recent Use Cases With SmartVox},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper005.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176080 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.3672852 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 - pages: 247--252 - publisher: Queensland Conservatorium Griffith University - title: 'Augmenting the iPad: the BladeAxe' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0049.pdf - year: 2016 + month: June + pages: 21--24 + publisher: UFRGS + title: 'The Risset Cycle, Recent Use Cases With SmartVox' + url: http://www.nime.org/proceedings/2019/nime2019_paper005.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: Hnicode233onMorissette2016 - abstract: "The author's artistic practice as a composer and performer is\ntransdisciplinary.\ - \ The body as a vector associated with sound, gesture, video,\nphysical space,\ - \ and technological space, constitute the six founding elements.\nThey give rise\ - \ to works between music and dance, between musical theater and\nmultimedia works\ - \ leading to a new hybrid performative practice. These works are\nrealized using\ - \ a motion capture system by computer vision, SICMAP (Systéme\nInteractif de Captation\ - \ du Mouvement en Art Performatif --- Interactive\nMotion Capture System For The\ - \ Performative Arts). In this paper, the author\nsituates her artistic practice\ - \ founded by the three pillars of transdisciplinary\nresearch methodology. The\ - \ path taken by the performer-creator, leading to the\nconception of the SICMAP,\ - \ is then explained through a reflection on the\n`dream instrument'. Followed\ - \ by a technical description, the SICMAP\nis contextualized using theoretical\ - \ models: the instrumental continuum and energy\ncontinuum, the `dream instrument'\ - \ and the typology of the\ninstrumental gesture. Initiated by the SICMAP, these\ - \ are then applied to a new\nparadigm the gesture-sound space and subsequently\ - \ put into practice through the\ncreation of the work From Infinity To Within." - address: 'Brisbane, Australia' - author: Barah Héon-Morissette - bibtex: "@inproceedings{Hnicode233onMorissette2016,\n abstract = {The author's artistic\ - \ practice as a composer and performer is\ntransdisciplinary. The body as a vector\ - \ associated with sound, gesture, video,\nphysical space, and technological space,\ - \ constitute the six founding elements.\nThey give rise to works between music\ - \ and dance, between musical theater and\nmultimedia works leading to a new hybrid\ - \ performative practice. These works are\nrealized using a motion capture system\ - \ by computer vision, SICMAP (Syst\\'{e}me\nInteractif de Captation du Mouvement\ - \ en Art Performatif --- Interactive\nMotion Capture System For The Performative\ - \ Arts). In this paper, the author\nsituates her artistic practice founded by\ - \ the three pillars of transdisciplinary\nresearch methodology. The path taken\ - \ by the performer-creator, leading to the\nconception of the SICMAP, is then\ - \ explained through a reflection on the\n`dream instrument'. Followed by a technical\ - \ description, the SICMAP\nis contextualized using theoretical models: the instrumental\ - \ continuum and energy\ncontinuum, the `dream instrument' and the typology of\ - \ the\ninstrumental gesture. Initiated by the SICMAP, these are then applied to\ - \ a new\nparadigm the gesture-sound space and subsequently put into practice through\ - \ the\ncreation of the work From Infinity To Within.},\n address = {Brisbane,\ - \ Australia},\n author = {Barah H\\'{e}on-Morissette},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176024},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ - \ pages = {253--258},\n publisher = {Queensland Conservatorium Griffith University},\n\ - \ title = {Transdisciplinary Methodology: from Theory to the Stage, Creation for\ - \ the {SIC}MAP},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0050.pdf},\n\ - \ year = {2016}\n}\n" + ID: Wang2019 + abstract: 'This paper documents the key issues of performance and compatibility + working with Musical Instrument Digital Interface (MIDI) via Bluetooth Low Energy + (BLE) as a wireless interface for sensor or controller data and inter-module communication + in the context of building interactive digital systems. An overview of BLE MIDI + is presented along with a comparison of the protocol from the perspective of theoretical + limits and interoperability, showing its widespread compatibility across platforms + compared with other alternatives. Then we perform three complementary tests on + BLE MIDI and alternative interfaces using prototype and commercial devices, showing + that BLE MIDI has comparable performance with the tested WiFi implementations, + with end-to-end (sensor input to audio output) latencies of under 10ms under certain + conditions. Overall, BLE MIDI is an ideal choice for controllers and sensor interfaces + that are designed to work on a wide variety of platforms.' + address: 'Porto Alegre, Brazil' + author: Johnty Wang and Axel Mulder and Marcelo Wanderley + bibtex: "@inproceedings{Wang2019,\n abstract = {This paper documents the key issues\ + \ of performance and compatibility working with Musical Instrument Digital Interface\ + \ (MIDI) via Bluetooth Low Energy (BLE) as a wireless interface for sensor or\ + \ controller data and inter-module communication in the context of building interactive\ + \ digital systems. An overview of BLE MIDI is presented along with a comparison\ + \ of the protocol from the perspective of theoretical limits and interoperability,\ + \ showing its widespread compatibility across platforms compared with other alternatives.\ + \ Then we perform three complementary tests on BLE MIDI and alternative interfaces\ + \ using prototype and commercial devices, showing that BLE MIDI has comparable\ + \ performance with the tested WiFi implementations, with end-to-end (sensor input\ + \ to audio output) latencies of under 10ms under certain conditions. Overall,\ + \ BLE MIDI is an ideal choice for controllers and sensor interfaces that are designed\ + \ to work on a wide variety of platforms.},\n address = {Porto Alegre, Brazil},\n\ + \ author = {Johnty Wang and Axel Mulder and Marcelo Wanderley},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.3672854},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {25--30},\n publisher = {UFRGS},\n\ + \ title = {Practical Considerations for {MIDI} over Bluetooth Low Energy as a\ + \ Wireless Interface},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper006.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176024 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.3672854 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 - pages: 253--258 - publisher: Queensland Conservatorium Griffith University - title: 'Transdisciplinary Methodology: from Theory to the Stage, Creation for the - SICMAP' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0050.pdf - year: 2016 + month: June + pages: 25--30 + publisher: UFRGS + title: Practical Considerations for MIDI over Bluetooth Low Energy as a Wireless + Interface + url: http://www.nime.org/proceedings/2019/nime2019_paper006.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: Xiao2016 - abstract: "This paper explores how an actuated pin-based shape display may\nserve\ - \ as a platform on which to build musical instruments and controllers. We\ndesigned\ - \ and prototyped three new instruments that use the shape display not only\nas\ - \ an input device, but also as a source of acoustic sound. These cover a range\n\ - of interaction paradigms to generate ambient textures, polyrhythms, and melodies.\n\ - This paper first presents existing work from which we drew interactions and\n\ - metaphors for our designs. We then introduce each of our instruments and the\n\ - back-end software we used to prototype them. Finally, we offer reflections on\n\ - some central themes of NIME, including the relationship between musician and\n\ - machine." - address: 'Brisbane, Australia' - author: Xiao Xiao and Donald Derek Haddad and Thomas Sanchez and Akito van Troyer - and Rébecca Kleinberger and Penny Webb and Joe Paradiso and Tod Machover and Hiroshi - Ishii - bibtex: "@inproceedings{Xiao2016,\n abstract = {This paper explores how an actuated\ - \ pin-based shape display may\nserve as a platform on which to build musical instruments\ - \ and controllers. We\ndesigned and prototyped three new instruments that use\ - \ the shape display not only\nas an input device, but also as a source of acoustic\ - \ sound. These cover a range\nof interaction paradigms to generate ambient textures,\ - \ polyrhythms, and melodies.\nThis paper first presents existing work from which\ - \ we drew interactions and\nmetaphors for our designs. We then introduce each\ - \ of our instruments and the\nback-end software we used to prototype them. Finally,\ - \ we offer reflections on\nsome central themes of NIME, including the relationship\ - \ between musician and\nmachine.},\n address = {Brisbane, Australia},\n author\ - \ = {Xiao Xiao and Donald Derek Haddad and Thomas Sanchez and Akito van Troyer\ - \ and R\\'{e}becca Kleinberger and Penny Webb and Joe Paradiso and Tod Machover\ - \ and Hiroshi Ishii},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176145},\n\ - \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {259--264},\n publisher\ - \ = {Queensland Conservatorium Griffith University},\n title = {Kin\\'{e}phone:\ - \ Exploring the Musical Potential of an Actuated Pin-Based Shape Display},\n track\ - \ = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0051.pdf},\n\ - \ year = {2016}\n}\n" + ID: Ramchurn2019 + abstract: 'We report on the design and deployment of systems for the performance + of live score accompaniment to an interactive movie by a Networked Musical Ensemble. + In this case, the audio-visual content of the movie is selected in real time based + on user input to a Brain-Computer Interface (BCI). Our system supports musical + improvisation between human performers and automated systems responding to the + BCI. We explore the performers'' roles during two performances when these socio-technical + systems were implemented, in terms of coordination, problem-solving, managing + uncertainty and musical responses to system constraints. This allows us to consider + how features of these systems and practices might be incorporated into a general + tool, aimed at any musician, which could scale for use in different performance + settings involving interactive media. ' + address: 'Porto Alegre, Brazil' + author: Richard Ramchurn and Juan Pablo Martinez-Avila and Sarah Martindale and + Alan Chamberlain and Max L Wilson and Steve Benford + bibtex: "@inproceedings{Ramchurn2019,\n abstract = {We report on the design and\ + \ deployment of systems for the performance of live score accompaniment to an\ + \ interactive movie by a Networked Musical Ensemble. In this case, the audio-visual\ + \ content of the movie is selected in real time based on user input to a Brain-Computer\ + \ Interface (BCI). Our system supports musical improvisation between human performers\ + \ and automated systems responding to the BCI. We explore the performers' roles\ + \ during two performances when these socio-technical systems were implemented,\ + \ in terms of coordination, problem-solving, managing uncertainty and musical\ + \ responses to system constraints. This allows us to consider how features of\ + \ these systems and practices might be incorporated into a general tool, aimed\ + \ at any musician, which could scale for use in different performance settings\ + \ involving interactive media. },\n address = {Porto Alegre, Brazil},\n author\ + \ = {Richard Ramchurn and Juan Pablo Martinez-Avila and Sarah Martindale and Alan\ + \ Chamberlain and Max L Wilson and Steve Benford},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.3672856},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {31--36},\n publisher = {UFRGS},\n\ + \ title = {Improvising a Live Score to an Interactive Brain-Controlled Film},\n\ + \ url = {http://www.nime.org/proceedings/2019/nime2019_paper007.pdf},\n year =\ + \ {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176145 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.3672856 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 - pages: 259--264 - publisher: Queensland Conservatorium Griffith University - title: 'Kinéphone: Exploring the Musical Potential of an Actuated Pin-Based Shape - Display' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0051.pdf - year: 2016 + month: June + pages: 31--36 + publisher: UFRGS + title: Improvising a Live Score to an Interactive Brain-Controlled Film + url: http://www.nime.org/proceedings/2019/nime2019_paper007.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: Waite2016 - abstract: "This paper presents a brief review of current literature detailing\n\ - some of the issues and trends in composition and performance with interactive\n\ - music systems. Of particular interest is how musicians interact with a separate\n\ - machine entity that exercises agency over the creative process. The use of\nreal-world\ - \ metaphors as a strategy for increasing audience engagement is also\ndiscussed.\n\ - The composition and system Church Belles is presented, analyzed and evaluated\ - \ in\nterms of its architecture, how it relates to existing studies of musician-machine\n\ - creative interaction and how the use of a real-world metaphor can promote\naudience\ - \ perceptions of liveness. This develops previous NIME work by offering a\ndetailed\ - \ case study of the development process of both a system and a piece for\npopular,\ - \ non-improvisational vocal/guitar music." - address: 'Brisbane, Australia' - author: Si Waite - bibtex: "@inproceedings{Waite2016,\n abstract = {This paper presents a brief review\ - \ of current literature detailing\nsome of the issues and trends in composition\ - \ and performance with interactive\nmusic systems. Of particular interest is how\ - \ musicians interact with a separate\nmachine entity that exercises agency over\ - \ the creative process. The use of\nreal-world metaphors as a strategy for increasing\ - \ audience engagement is also\ndiscussed.\nThe composition and system Church Belles\ - \ is presented, analyzed and evaluated in\nterms of its architecture, how it relates\ - \ to existing studies of musician-machine\ncreative interaction and how the use\ - \ of a real-world metaphor can promote\naudience perceptions of liveness. This\ - \ develops previous NIME work by offering a\ndetailed case study of the development\ - \ process of both a system and a piece for\npopular, non-improvisational vocal/guitar\ - \ music.},\n address = {Brisbane, Australia},\n author = {Si Waite},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176139},\n isbn = {978-1-925455-13-7},\n\ - \ issn = {2220-4806},\n pages = {265--270},\n publisher = {Queensland Conservatorium\ - \ Griffith University},\n title = {Church Belles: An Interactive System and Composition\ - \ Using Real-World Metaphors},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0052.pdf},\n\ - \ year = {2016}\n}\n" + ID: Tom2019 + abstract: 'Although several Digital Musical Instruments (DMIs) have been presented + at NIME, very few of them remain accessible to the community. Rebuilding a DMI + is often a necessary step to allow for performance with NIMEs. Rebuilding a DMI + exactly similar to its original, however, might not be possible due to technology + obsolescence, lack of documentation or other reasons. It might then be interesting + to re-interpret a DMI and build an instrument inspired by the original one, creating + novel performance opportunities. This paper presents the challenges and approaches + involved in rebuilding and re-interpreting an existing DMI, The Sponge by Martin + Marier. The rebuilt versions make use of newer/improved technology and customized + design aspects like addition of vibrotactile feedback and implementation of different + mapping strategies. It also discusses the implications of embedding sound synthesis + within the DMI, by using the Prynth framework and further presents a comparison + between this approach and the more traditional ground-up approach. As a result + of the evaluation and comparison of the two rebuilt DMIs, we present a third version + which combines the benefits and discuss performance issues with these devices.' + address: 'Porto Alegre, Brazil' + author: Ajin Jiji Tom and Harish Jayanth Venkatesan and Ivan Franco and Marcelo + Wanderley + bibtex: "@inproceedings{Tom2019,\n abstract = {Although several Digital Musical\ + \ Instruments (DMIs) have been presented at NIME, very few of them remain accessible\ + \ to the community. Rebuilding a DMI is often a necessary step to allow for performance\ + \ with NIMEs. Rebuilding a DMI exactly similar to its original, however, might\ + \ not be possible due to technology obsolescence, lack of documentation or other\ + \ reasons. It might then be interesting to re-interpret a DMI and build an instrument\ + \ inspired by the original one, creating novel performance opportunities. This\ + \ paper presents the challenges and approaches involved in rebuilding and re-interpreting\ + \ an existing DMI, The Sponge by Martin Marier. The rebuilt versions make use\ + \ of newer/improved technology and customized design aspects like addition of\ + \ vibrotactile feedback and implementation of different mapping strategies. It\ + \ also discusses the implications of embedding sound synthesis within the DMI,\ + \ by using the Prynth framework and further presents a comparison between this\ + \ approach and the more traditional ground-up approach. As a result of the evaluation\ + \ and comparison of the two rebuilt DMIs, we present a third version which combines\ + \ the benefits and discuss performance issues with these devices.},\n address\ + \ = {Porto Alegre, Brazil},\n author = {Ajin Jiji Tom and Harish Jayanth Venkatesan\ + \ and Ivan Franco and Marcelo Wanderley},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672858},\n\ + \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {37--42},\n publisher = {UFRGS},\n title = {Rebuilding and\ + \ Reinterpreting a Digital Musical Instrument --- The Sponge},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper008.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176139 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.3672858 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 - pages: 265--270 - publisher: Queensland Conservatorium Griffith University - title: 'Church Belles: An Interactive System and Composition Using Real-World Metaphors' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0052.pdf - year: 2016 + month: June + pages: 37--42 + publisher: UFRGS + title: Rebuilding and Reinterpreting a Digital Musical Instrument --- The Sponge + url: http://www.nime.org/proceedings/2019/nime2019_paper008.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: Olowe2016 - abstract: "We propose residUUm, an audiovisual performance tool that uses\nsonification\ - \ to orchestrate a particle system of shapes, as an attempt to build\nan audiovisual\ - \ user interface in which all the actions of a performer on a laptop\nare intended\ - \ to be explicitly interpreted by the audience. We propose two\napproaches to\ - \ performing with residUUm and discuss the methods utilized to\nfulfill the promise\ - \ of audience-visible interaction: mapping and performance\nstrategies applied\ - \ to express audiovisual interactions with multilayered\nsound-image relationships.\ - \ The system received positive feedback from 34 audience\nparticipants on aspects\ - \ such as aesthetics and audiovisual integration, and we\nidentified further design\ - \ challenges around performance clarity and strategy. We\ndiscuss residUUm's development\ - \ objectives, modes of interaction and the impact of\nan audience-visible interface\ - \ on the performer and observer. " - address: 'Brisbane, Australia' - author: Ireti Olowe and Giulio Moro and Mathieu Barthet - bibtex: "@inproceedings{Olowe2016,\n abstract = {We propose residUUm, an audiovisual\ - \ performance tool that uses\nsonification to orchestrate a particle system of\ - \ shapes, as an attempt to build\nan audiovisual user interface in which all the\ - \ actions of a performer on a laptop\nare intended to be explicitly interpreted\ - \ by the audience. We propose two\napproaches to performing with residUUm and\ - \ discuss the methods utilized to\nfulfill the promise of audience-visible interaction:\ - \ mapping and performance\nstrategies applied to express audiovisual interactions\ - \ with multilayered\nsound-image relationships. The system received positive feedback\ - \ from 34 audience\nparticipants on aspects such as aesthetics and audiovisual\ - \ integration, and we\nidentified further design challenges around performance\ - \ clarity and strategy. We\ndiscuss residUUm's development objectives, modes of\ - \ interaction and the impact of\nan audience-visible interface on the performer\ - \ and observer. },\n address = {Brisbane, Australia},\n author = {Ireti Olowe\ - \ and Giulio Moro and Mathieu Barthet},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176098},\n\ - \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {271--276},\n publisher\ - \ = {Queensland Conservatorium Griffith University},\n title = {residUUm: user\ - \ mapping and performance strategies for multilayered live audiovisual generation},\n\ - \ track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0053.pdf},\n\ - \ year = {2016}\n}\n" + ID: Nishida2019 + abstract: 'Recent technological advances, such as increased CPU/GPU processing speed, + along with the miniaturization of devices and sensors, have created new possibilities + for integrating immersive technologies in music and performance art. Virtual and + Augmented Reality (VR/AR) have become increasingly interesting as mobile device + platforms, such as up-to-date smartphones, with necessary CPU resources entered + the consumer market. In combination with recent web technologies, any mobile device + can simply connect with a browser to a local server to access the latest technology. + The web platform also eases the integration of collaborative situated media in + participatory artwork. In this paper, we present the interactive music improvisation + piece ‘Border,'' premiered in 2018 at the Beyond Festival at the Center for Art + and Media Karlsruhe (ZKM). This piece explores the interaction between a performer + and the audience using web-based applications – including AR, real-time 3D audio/video + streaming, advanced web audio, and gesture-controlled virtual instruments – on + smart mobile devices.' + address: 'Porto Alegre, Brazil' + author: Kiyu Nishida and Akishige Yuguchi and kazuhiro jo and Paul Modler and Markus + Noisternig + bibtex: "@inproceedings{Nishida2019,\n abstract = {Recent technological advances,\ + \ such as increased CPU/GPU processing speed, along with the miniaturization of\ + \ devices and sensors, have created new possibilities for integrating immersive\ + \ technologies in music and performance art. Virtual and Augmented Reality (VR/AR)\ + \ have become increasingly interesting as mobile device platforms, such as up-to-date\ + \ smartphones, with necessary CPU resources entered the consumer market. In combination\ + \ with recent web technologies, any mobile device can simply connect with a browser\ + \ to a local server to access the latest technology. The web platform also eases\ + \ the integration of collaborative situated media in participatory artwork. In\ + \ this paper, we present the interactive music improvisation piece ‘Border,' premiered\ + \ in 2018 at the Beyond Festival at the Center for Art and Media Karlsruhe (ZKM).\ + \ This piece explores the interaction between a performer and the audience using\ + \ web-based applications – including AR, real-time 3D audio/video streaming, advanced\ + \ web audio, and gesture-controlled virtual instruments – on smart mobile devices.},\n\ + \ address = {Porto Alegre, Brazil},\n author = {Kiyu Nishida and Akishige Yuguchi\ + \ and kazuhiro jo and Paul Modler and Markus Noisternig},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.3672860},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {43--46},\n publisher = {UFRGS},\n\ + \ title = {Border: A Live Performance Based on Web {AR} and a Gesture-Controlled\ + \ Virtual Instrument},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper009.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176098 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.3672860 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 - pages: 271--276 - publisher: Queensland Conservatorium Griffith University - title: 'residUUm: user mapping and performance strategies for multilayered live - audiovisual generation' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0053.pdf - year: 2016 + month: June + pages: 43--46 + publisher: UFRGS + title: 'Border: A Live Performance Based on Web AR and a Gesture-Controlled Virtual + Instrument' + url: http://www.nime.org/proceedings/2019/nime2019_paper009.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: Bhumber2016 - abstract: "This paper describes the processes involved in developing Pendula,\n\ - a performance environment and interactive installation using swings, interactive\n\ - video, and audio. A presentation of the project is described using three swings.\n\ - Gyroscopic and accelerometer data were used in each of the setups to control\n\ - audio and visual parameters.The installation was presented as both an interactive\n\ - environment and as a performance instrument, with multiple public performances.\n\ - Construction of the physical devices used, circuits built, and software created\n\ - is covered in this paper, along with a discussion of problems and their solutions\n\ - encountered during the development of Pendula." - address: 'Brisbane, Australia' - author: Kirandeep Bhumber and Nancy Lee and Brian Topp - bibtex: "@inproceedings{Bhumber2016,\n abstract = {This paper describes the processes\ - \ involved in developing Pendula,\na performance environment and interactive installation\ - \ using swings, interactive\nvideo, and audio. A presentation of the project is\ - \ described using three swings.\nGyroscopic and accelerometer data were used in\ - \ each of the setups to control\naudio and visual parameters.The installation\ - \ was presented as both an interactive\nenvironment and as a performance instrument,\ - \ with multiple public performances.\nConstruction of the physical devices used,\ - \ circuits built, and software created\nis covered in this paper, along with a\ - \ discussion of problems and their solutions\nencountered during the development\ - \ of Pendula.},\n address = {Brisbane, Australia},\n author = {Kirandeep Bhumber\ - \ and Nancy Lee and Brian Topp},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1175992},\n\ - \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {277--285},\n publisher\ - \ = {Queensland Conservatorium Griffith University},\n title = {Pendula: An Interactive\ - \ Swing Installation and Performance Environment},\n track = {Papers},\n url =\ - \ {http://www.nime.org/proceedings/2016/nime2016_paper0054.pdf},\n year = {2016}\n\ - }\n" + ID: Dahlstedt2019 + abstract: 'Libration Perturbed is a performance and an improvisation instrument, + originally composed and designed for a multi-speaker dome. The performer controls + a bank of 64 virtual inter-connected resonating strings, with individual and direct + control of tuning and resonance characteristics through a multitouch-enhanced + klavier interface (TouchKeys). It is a hybrid acoustic-electronic instrument, + as all string vibrations originate from physical vibrations in the klavier and + its casing, captured through contact microphones. In addition, there are gestural + strings, called ropes, excited by performed musical gestures. All strings and + ropes are connected, and inter-resonate together as a ”super-harp”, internally + and through the performance space. With strong resonance, strings may go into + chaotic motion or emergent quasi-periodic patterns, but custom adaptive leveling + mechanisms keep loudness under the musician''s control at all times. The hybrid + digital/acoustic approach and the enhanced keyboard provide for an expressive + and very physical interaction, and a strong multi-channel immersive experience. + The paper describes the aesthetic choices behind the design of the system, as + well as the technical implementation, and – primarily – the interaction design, + as it emerges from mapping, sound design, physical modeling and integration of + the acoustic, the gestural, and the virtual. The work is evaluated based on the + experiences from a series of performances.' + address: 'Porto Alegre, Brazil' + author: Palle Dahlstedt + bibtex: "@inproceedings{Dahlstedt2019,\n abstract = {Libration Perturbed is a performance\ + \ and an improvisation instrument, originally composed and designed for a multi-speaker\ + \ dome. The performer controls a bank of 64 virtual inter-connected resonating\ + \ strings, with individual and direct control of tuning and resonance characteristics\ + \ through a multitouch-enhanced klavier interface (TouchKeys). It is a hybrid\ + \ acoustic-electronic instrument, as all string vibrations originate from physical\ + \ vibrations in the klavier and its casing, captured through contact microphones.\ + \ In addition, there are gestural strings, called ropes, excited by performed\ + \ musical gestures. All strings and ropes are connected, and inter-resonate together\ + \ as a ”super-harp”, internally and through the performance space. With strong\ + \ resonance, strings may go into chaotic motion or emergent quasi-periodic patterns,\ + \ but custom adaptive leveling mechanisms keep loudness under the musician's control\ + \ at all times. The hybrid digital/acoustic approach and the enhanced keyboard\ + \ provide for an expressive and very physical interaction, and a strong multi-channel\ + \ immersive experience. The paper describes the aesthetic choices behind the design\ + \ of the system, as well as the technical implementation, and – primarily – the\ + \ interaction design, as it emerges from mapping, sound design, physical modeling\ + \ and integration of the acoustic, the gestural, and the virtual. The work is\ + \ evaluated based on the experiences from a series of performances.},\n address\ + \ = {Porto Alegre, Brazil},\n author = {Palle Dahlstedt},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.3672862},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {47--52},\n publisher = {UFRGS},\n\ + \ title = {Taming and Tickling the Beast --- Multi-Touch Keyboard as Interface\ + \ for a Physically Modelled Interconnected Resonating Super-Harp},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper010.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1175992 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.3672862 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 - pages: 277--285 - publisher: Queensland Conservatorium Griffith University - title: 'Pendula: An Interactive Swing Installation and Performance Environment' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0054.pdf - year: 2016 + month: June + pages: 47--52 + publisher: UFRGS + title: Taming and Tickling the Beast --- Multi-Touch Keyboard as Interface for a + Physically Modelled Interconnected Resonating Super-Harp + url: http://www.nime.org/proceedings/2019/nime2019_paper010.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: Dabin2016 - abstract: "This project explores the potential for 3D modelling and printing\nto\ - \ create customised flutes that can play music in a variety of microtonal\nscales.\ - \ One of the challenges in the field of microtonality is that conventional\nmusical\ - \ instruments are inadequate for realising the abundance of theoretical\ntunings\ - \ that musicians wish to investigate. This paper focuses on the development\n\ - of two types of flutes, the recorder and transverse flute, with interchangeable\n\ - mouthpieces. These flutes are designed to play subharmonic microtonal scales.\ - \ The\ndiscussion provides an overview of the design and implementation process,\n\ - including calculation methods for acoustic modelling and 3D printing\ntechnologies,\ - \ as well as an evaluation of some of the difficulties encountered.\nResults from\ - \ our 3D printed flutes suggest that whilst further refinements are\nnecessary\ - \ in our designs, 3D modelling and printing techniques offer new and\nvaluable\ - \ methods for the design and production of customised musical instruments.\nThe\ - \ long term goal of this project is to create a system in which users can\nspecify\ - \ the tuning of their instrument to generate a 3D model and have it printed\n\ - on demand. " - address: 'Brisbane, Australia' - author: Matthew Dabin and Terumi Narushima and Stephen Beirne and Christian Ritz - and Kraig Grady - bibtex: "@inproceedings{Dabin2016,\n abstract = {This project explores the potential\ - \ for 3D modelling and printing\nto create customised flutes that can play music\ - \ in a variety of microtonal\nscales. One of the challenges in the field of microtonality\ - \ is that conventional\nmusical instruments are inadequate for realising the abundance\ - \ of theoretical\ntunings that musicians wish to investigate. This paper focuses\ - \ on the development\nof two types of flutes, the recorder and transverse flute,\ - \ with interchangeable\nmouthpieces. These flutes are designed to play subharmonic\ - \ microtonal scales. The\ndiscussion provides an overview of the design and implementation\ - \ process,\nincluding calculation methods for acoustic modelling and 3D printing\n\ - technologies, as well as an evaluation of some of the difficulties encountered.\n\ - Results from our 3D printed flutes suggest that whilst further refinements are\n\ - necessary in our designs, 3D modelling and printing techniques offer new and\n\ - valuable methods for the design and production of customised musical instruments.\n\ - The long term goal of this project is to create a system in which users can\n\ - specify the tuning of their instrument to generate a 3D model and have it printed\n\ - on demand. },\n address = {Brisbane, Australia},\n author = {Matthew Dabin and\ - \ Terumi Narushima and Stephen Beirne and Christian Ritz and Kraig Grady},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176014},\n isbn = {978-1-925455-13-7},\n\ - \ issn = {2220-4806},\n pages = {286--290},\n publisher = {Queensland Conservatorium\ - \ Griffith University},\n title = {{3D} Modelling and Printing of Microtonal Flutes},\n\ - \ track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0056.pdf},\n\ - \ year = {2016}\n}\n" + ID: Cavdir2019 + abstract: 'This research represents an evolution and evaluation of the embodied + physical laptop instruments. Specifically, these are instruments that are physical + in that they use bodily interaction, take advantage of the physical affordances + of the laptop. They are embodied in the sense that instruments are played in such + ways where the sound is embedded to be close to the instrument. Three distinct + laptop instruments, Taptop, Armtop, and Blowtop, are introduced in this paper. + We discuss the integrity of the design process with composing for laptop instruments + and performing with them. In this process, our aim is to blur the boundaries of + the composer and designer/engineer roles. How the physicality is achieved by leveraging + musical gestures gained through traditional instrument practice is studied, as + well as those inspired by body gestures. We aim to explore how using such interaction + methods affects the communication between the ensemble and the audience. An aesthetic-first + qualitative evaluation of these interfaces is discussed, through works and performances + crafted specifically for these instruments and presented in the concert setting + of the laptop orchestra. In so doing, we reflect on how such physical, embodied + instrument design practices can inform a different kind of expressive and performance + mindset.' + address: 'Porto Alegre, Brazil' + author: Doga Cavdir and Juan Sierra and Ge Wang + bibtex: "@inproceedings{Cavdir2019,\n abstract = {This research represents an evolution\ + \ and evaluation of the embodied physical laptop instruments. Specifically, these\ + \ are instruments that are physical in that they use bodily interaction, take\ + \ advantage of the physical affordances of the laptop. They are embodied in the\ + \ sense that instruments are played in such ways where the sound is embedded to\ + \ be close to the instrument. Three distinct laptop instruments, Taptop, Armtop,\ + \ and Blowtop, are introduced in this paper. We discuss the integrity of the design\ + \ process with composing for laptop instruments and performing with them. In this\ + \ process, our aim is to blur the boundaries of the composer and designer/engineer\ + \ roles. How the physicality is achieved by leveraging musical gestures gained\ + \ through traditional instrument practice is studied, as well as those inspired\ + \ by body gestures. We aim to explore how using such interaction methods affects\ + \ the communication between the ensemble and the audience. An aesthetic-first\ + \ qualitative evaluation of these interfaces is discussed, through works and performances\ + \ crafted specifically for these instruments and presented in the concert setting\ + \ of the laptop orchestra. In so doing, we reflect on how such physical, embodied\ + \ instrument design practices can inform a different kind of expressive and performance\ + \ mindset.},\n address = {Porto Alegre, Brazil},\n author = {Doga Cavdir and Juan\ + \ Sierra and Ge Wang},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672864},\n\ + \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {53--58},\n publisher = {UFRGS},\n title = {Taptop, Armtop,\ + \ Blowtop: Evolving the Physical Laptop Instrument},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper011.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176014 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.3672864 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 - pages: 286--290 - publisher: Queensland Conservatorium Griffith University - title: 3D Modelling and Printing of Microtonal Flutes - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0056.pdf - year: 2016 + month: June + pages: 53--58 + publisher: UFRGS + title: 'Taptop, Armtop, Blowtop: Evolving the Physical Laptop Instrument' + url: http://www.nime.org/proceedings/2019/nime2019_paper011.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: Hofmann2016a - abstract: "Low cost, credit card size computers like the Raspberry Pi allow\nmusicians\ - \ to experiment with building software-based standalone musical\ninstruments.\ - \ The COSMO Project aims to provide an easy-to-use hardware and\nsoftware framework\ - \ to build Csound based instruments as hardware devices. Inside\nthe instrument,\ - \ the Csound software is running on a Raspberry Pi computer,\nconnected to a custom\ - \ designed interface board (COSMO-HAT) that allows to connect\npotentiometers,\ - \ switches, LED's, and sensors. A classic stomp box design is used\nto demonstrate\ - \ how Csound can be brought on stage as a stand-alone hardware\neffect instrument." - address: 'Brisbane, Australia' - author: Alex Hofmann and Bernt Waerstad and Kristoffer Koch - bibtex: "@inproceedings{Hofmann2016a,\n abstract = {Low cost, credit card size computers\ - \ like the Raspberry Pi allow\nmusicians to experiment with building software-based\ - \ standalone musical\ninstruments. The COSMO Project aims to provide an easy-to-use\ - \ hardware and\nsoftware framework to build Csound based instruments as hardware\ - \ devices. Inside\nthe instrument, the Csound software is running on a Raspberry\ - \ Pi computer,\nconnected to a custom designed interface board (COSMO-HAT) that\ - \ allows to connect\npotentiometers, switches, LED's, and sensors. A classic stomp\ - \ box design is used\nto demonstrate how Csound can be brought on stage as a stand-alone\ - \ hardware\neffect instrument.},\n address = {Brisbane, Australia},\n author =\ - \ {Alex Hofmann and Bernt Waerstad and Kristoffer Koch},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176030},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ - \ pages = {291--294},\n publisher = {Queensland Conservatorium Griffith University},\n\ - \ title = {Csound Instruments On Stage},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0057.pdf},\n\ - \ year = {2016}\n}\n" + ID: GomezJauregui2019 + abstract: 'This work aims to explore the use of a new gesture-based interaction + built on automatic recognition of Soundpainting structured gestural language. + In the proposed approach, a composer (called Soundpainter) performs Soundpainting + gestures facing a Kinect sensor. Then, a gesture recognition system captures gestures + that are sent to a sound generator software. The proposed method was used to stage + an artistic show in which a Soundpainter had to improvise with 6 different gestures + to generate a musical composition from different sounds in real time. The accuracy + of the gesture recognition system was evaluated as well as Soundpainter''s user + experience. In addition, a user evaluation study for using our proposed system + in a learning context was also conducted. Current results open up perspectives + for the design of new artistic expressions based on the use of automatic gestural + recognition supported by Soundpainting language.' + address: 'Porto Alegre, Brazil' + author: David Antonio Gómez Jáuregui and Irvin Dongo and Nadine Couture + bibtex: "@inproceedings{GomezJauregui2019,\n abstract = {This work aims to explore\ + \ the use of a new gesture-based interaction built on automatic recognition of\ + \ Soundpainting structured gestural language. In the proposed approach, a composer\ + \ (called Soundpainter) performs Soundpainting gestures facing a Kinect sensor.\ + \ Then, a gesture recognition system captures gestures that are sent to a sound\ + \ generator software. The proposed method was used to stage an artistic show in\ + \ which a Soundpainter had to improvise with 6 different gestures to generate\ + \ a musical composition from different sounds in real time. The accuracy of the\ + \ gesture recognition system was evaluated as well as Soundpainter's user experience.\ + \ In addition, a user evaluation study for using our proposed system in a learning\ + \ context was also conducted. Current results open up perspectives for the design\ + \ of new artistic expressions based on the use of automatic gestural recognition\ + \ supported by Soundpainting language.},\n address = {Porto Alegre, Brazil},\n\ + \ author = {David Antonio Gómez Jáuregui and Irvin Dongo and Nadine Couture},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.3672866},\n editor = {Marcelo Queiroz\ + \ and Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {59--64},\n\ + \ publisher = {UFRGS},\n title = {Automatic Recognition of Soundpainting for the\ + \ Generation of Electronic Music Sounds},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper012.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176030 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.3672866 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 - pages: 291--294 - publisher: Queensland Conservatorium Griffith University - title: Csound Instruments On Stage - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0057.pdf - year: 2016 + month: June + pages: 59--64 + publisher: UFRGS + title: Automatic Recognition of Soundpainting for the Generation of Electronic Music + Sounds + url: http://www.nime.org/proceedings/2019/nime2019_paper012.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: Resch2016 - abstract: "This paper describes a setup for embedding complex virtual\ninstruments\ - \ such as a physical model of the prepared piano sound synthesis in the\nsequencing\ - \ library note~ for Max. Based on the requirements of contemporary music\nand\ - \ media arts, note~ introduces computer-aided composition techniques and\ngraphical\ - \ user interfaces for sequencing and editing into the real time world of\nMax/MSP.\ - \ A piano roll, a microtonal musical score and the capability to attach\nfloating-point\ - \ lists of (theoretically) arbitrary length to a single note-on\nevent, enables\ - \ artists to play, edit and record compound sound synthesis with the\nnecessary\ - \ precision." - address: 'Brisbane, Australia' - author: Thomas Resch and Stefan Bilbao - bibtex: "@inproceedings{Resch2016,\n abstract = {This paper describes a setup for\ - \ embedding complex virtual\ninstruments such as a physical model of the prepared\ - \ piano sound synthesis in the\nsequencing library note~ for Max. Based on the\ - \ requirements of contemporary music\nand media arts, note~ introduces computer-aided\ - \ composition techniques and\ngraphical user interfaces for sequencing and editing\ - \ into the real time world of\nMax/MSP. A piano roll, a microtonal musical score\ - \ and the capability to attach\nfloating-point lists of (theoretically) arbitrary\ - \ length to a single note-on\nevent, enables artists to play, edit and record\ - \ compound sound synthesis with the\nnecessary precision.},\n address = {Brisbane,\ - \ Australia},\n author = {Thomas Resch and Stefan Bilbao},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176108},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ - \ pages = {295--299},\n publisher = {Queensland Conservatorium Griffith University},\n\ - \ title = {Controlling complex virtuel instruments---A setup with note~ for Max\ - \ and prepared piano sound synthesis},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0058.pdf},\n\ - \ year = {2016}\n}\n" + ID: Morreale2019 + abstract: 'This paper introduces the Magpick, an augmented pick for electric guitar + that uses electromagnetic induction to sense the motion of the pick with respect + to the permanent magnets in the guitar pickup. The Magpick provides the guitarist + with nuanced control of the sound which coexists with traditional plucking-hand + technique. The paper presents three ways that the signal from the pick can modulate + the guitar sound, followed by a case study of its use in which 11 guitarists tested + the Magpick for five days and composed a piece with it. Reflecting on their comments + and experiences, we outline the innovative features of this technology from the + point of view of performance practice. In particular, compared to other augmentations, + the high temporal resolution, low latency, and large dynamic range of the Magpick + support a highly nuanced control over the sound. Our discussion highlights the + utility of having the locus of augmentation coincide with the locus of interaction.' + address: 'Porto Alegre, Brazil' + author: Fabio Morreale and Andrea Guidi and Andrew P. McPherson + bibtex: "@inproceedings{Morreale2019,\n abstract = {This paper introduces the Magpick,\ + \ an augmented pick for electric guitar that uses electromagnetic induction to\ + \ sense the motion of the pick with respect to the permanent magnets in the guitar\ + \ pickup. The Magpick provides the guitarist with nuanced control of the sound\ + \ which coexists with traditional plucking-hand technique. The paper presents\ + \ three ways that the signal from the pick can modulate the guitar sound, followed\ + \ by a case study of its use in which 11 guitarists tested the Magpick for five\ + \ days and composed a piece with it. Reflecting on their comments and experiences,\ + \ we outline the innovative features of this technology from the point of view\ + \ of performance practice. In particular, compared to other augmentations, the\ + \ high temporal resolution, low latency, and large dynamic range of the Magpick\ + \ support a highly nuanced control over the sound. Our discussion highlights the\ + \ utility of having the locus of augmentation coincide with the locus of interaction.},\n\ + \ address = {Porto Alegre, Brazil},\n author = {Fabio Morreale and Andrea Guidi\ + \ and Andrew P. McPherson},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672868},\n\ + \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {65--70},\n publisher = {UFRGS},\n title = {Magpick: an\ + \ Augmented Guitar Pick for Nuanced Control},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper013.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176108 - isbn: 978-1-925455-13-7 - issn: 2220-4806 - pages: 295--299 - publisher: Queensland Conservatorium Griffith University - title: Controlling complex virtuel instruments---A setup with note~ for Max and - prepared piano sound synthesis - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0058.pdf - year: 2016 - - -- ENTRYTYPE: inproceedings - ID: Brown2016 - abstract: "Camera-based motion tracking has become a popular enabling\ntechnology\ - \ for gestural human-computer interaction. However, the approach suffers\nfrom\ - \ several limitations which have been shown to be particularly problematic\nwhen\ - \ employed within musical contexts. This paper presents Leimu, a wrist mount\n\ - that couples a Leap Motion optical sensor with an inertial measurement unit to\n\ - combine the benefits of wearable and camera-based motion tracking. Leimu is\n\ - designed, developed and then evaluated using discourse and statistical analysis\n\ - methods. The results indicate that the Leimu is an effective interface for\ngestural\ - \ music interaction and offers improved tracking precision over Leap\nMotion positioned\ - \ on a table top. " - address: 'Brisbane, Australia' - author: Dom Brown and Nathan Renney and Adam Stark and Chris Nash and Tom Mitchell - bibtex: "@inproceedings{Brown2016,\n abstract = {Camera-based motion tracking has\ - \ become a popular enabling\ntechnology for gestural human-computer interaction.\ - \ However, the approach suffers\nfrom several limitations which have been shown\ - \ to be particularly problematic\nwhen employed within musical contexts. This\ - \ paper presents Leimu, a wrist mount\nthat couples a Leap Motion optical sensor\ - \ with an inertial measurement unit to\ncombine the benefits of wearable and camera-based\ - \ motion tracking. Leimu is\ndesigned, developed and then evaluated using discourse\ - \ and statistical analysis\nmethods. The results indicate that the Leimu is an\ - \ effective interface for\ngestural music interaction and offers improved tracking\ - \ precision over Leap\nMotion positioned on a table top. },\n address = {Brisbane,\ - \ Australia},\n author = {Dom Brown and Nathan Renney and Adam Stark and Chris\ - \ Nash and Tom Mitchell},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176000},\n\ - \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {300--304},\n publisher\ - \ = {Queensland Conservatorium Griffith University},\n title = {Leimu: Gloveless\ - \ Music Interaction Using a Wrist Mounted Leap Motion},\n track = {Papers},\n\ - \ url = {http://www.nime.org/proceedings/2016/nime2016_paper0059.pdf},\n year\ - \ = {2016}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1176000 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.3672868 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 - pages: 300--304 - publisher: Queensland Conservatorium Griffith University - title: 'Leimu: Gloveless Music Interaction Using a Wrist Mounted Leap Motion' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0059.pdf - year: 2016 + month: June + pages: 65--70 + publisher: UFRGS + title: 'Magpick: an Augmented Guitar Pick for Nuanced Control' + url: http://www.nime.org/proceedings/2019/nime2019_paper013.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: Gnicode243mez2016 - abstract: "This paper presents the design of a Max/MSP flexible workflow\nframework\ - \ built for complex real-time interactive performances. This system was\ndeveloped\ - \ for Emovere, an interdisciplinary piece for dance, biosignals, sound\nand visuals,\ - \ yet it was conceived to accommodate interactive performances of\ndifferent nature\ - \ and of heterogeneous technical requirements, which we believe to\nrepresent\ - \ a common underlying structure among these.\nThe work presented in this document\ - \ proposes a framework that takes care of the\nsignal input/output stages, as\ - \ well as storing and recalling presets and scenes,\nthus allowing the user to\ - \ focus on the programming of interaction models and\nsound synthesis or sound\ - \ processing. Results are presented with Emovere as an\nexample case, discussing\ - \ the advantages and further challenges that this\nframework offers for other\ - \ performance scenarios." - address: 'Brisbane, Australia' - author: Esteban Gómez and Javier Jaimovich - bibtex: "@inproceedings{Gnicode243mez2016,\n abstract = {This paper presents the\ - \ design of a Max/MSP flexible workflow\nframework built for complex real-time\ - \ interactive performances. This system was\ndeveloped for Emovere, an interdisciplinary\ - \ piece for dance, biosignals, sound\nand visuals, yet it was conceived to accommodate\ - \ interactive performances of\ndifferent nature and of heterogeneous technical\ - \ requirements, which we believe to\nrepresent a common underlying structure among\ - \ these.\nThe work presented in this document proposes a framework that takes\ - \ care of the\nsignal input/output stages, as well as storing and recalling presets\ - \ and scenes,\nthus allowing the user to focus on the programming of interaction\ - \ models and\nsound synthesis or sound processing. Results are presented with\ - \ Emovere as an\nexample case, discussing the advantages and further challenges\ - \ that this\nframework offers for other performance scenarios.},\n address = {Brisbane,\ - \ Australia},\n author = {Esteban G\\'{o}mez and Javier Jaimovich},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176018},\n isbn = {978-1-925455-13-7},\n\ - \ issn = {2220-4806},\n pages = {305--309},\n publisher = {Queensland Conservatorium\ - \ Griffith University},\n title = {Designing a Flexible Workflow for Complex Real-Time\ - \ Interactive Performances},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0060.pdf},\n\ - \ year = {2016}\n}\n" + ID: Petit2019 + abstract: 'Skini is a platform for composing and producing live performances with + audience participating using connected devices (smartphones, tablets, PC, etc.). + The music composer creates beforehand musical elements such as melodic patterns, + sound patterns, instruments, group of instruments, and a dynamic score that governs + the way the basic elements will behave according to events produced by the audience. + During the concert or the performance, the audience, by interacting with the system, + gives birth to an original music composition. Skini music scores are expressed + in terms of constraints that establish relationships between instruments. A constraint + maybe instantaneous, for instance one may disable violins while trumpets are playing. + A constraint may also be temporal, for instance, the piano cannot play more than + 30 consecutive seconds. The Skini platform is implemented in Hop.js and HipHop.js. + HipHop.js, a synchronous reactive DLS, is used for implementing the music scores + as its elementary constructs consisting of high level operators such as parallel + executions, sequences, awaits, synchronization points, etc, form an ideal core + language for implementing Skini constraints. This paper presents the Skini platform. + It reports about live performances and an educational project. It briefly overviews + the use of HipHop.js for representing score.' + address: 'Porto Alegre, Brazil' + author: Bertrand Petit and manuel serrano + bibtex: "@inproceedings{Petit2019,\n abstract = {Skini is a platform for composing\ + \ and producing live performances with audience participating using connected\ + \ devices (smartphones, tablets, PC, etc.). The music composer creates beforehand\ + \ musical elements such as melodic patterns, sound patterns, instruments, group\ + \ of instruments, and a dynamic score that governs the way the basic elements\ + \ will behave according to events produced by the audience. During the concert\ + \ or the performance, the audience, by interacting with the system, gives birth\ + \ to an original music composition. Skini music scores are expressed in terms\ + \ of constraints that establish relationships between instruments. A constraint\ + \ maybe instantaneous, for instance one may disable violins while trumpets are\ + \ playing. A constraint may also be temporal, for instance, the piano cannot play\ + \ more than 30 consecutive seconds. The Skini platform is implemented in Hop.js\ + \ and HipHop.js. HipHop.js, a synchronous reactive DLS, is used for implementing\ + \ the music scores as its elementary constructs consisting of high level operators\ + \ such as parallel executions, sequences, awaits, synchronization points, etc,\ + \ form an ideal core language for implementing Skini constraints. This paper presents\ + \ the Skini platform. It reports about live performances and an educational project.\ + \ It briefly overviews the use of HipHop.js for representing score.},\n address\ + \ = {Porto Alegre, Brazil},\n author = {Bertrand Petit and manuel serrano},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.3672870},\n editor = {Marcelo Queiroz\ + \ and Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {71--76},\n\ + \ publisher = {UFRGS},\n title = {Composing and executing Interactive music using\ + \ the HipHop.js language},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper014.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176018 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.3672870 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 - pages: 305--309 - publisher: Queensland Conservatorium Griffith University - title: Designing a Flexible Workflow for Complex Real-Time Interactive Performances - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0060.pdf - year: 2016 + month: June + pages: 71--76 + publisher: UFRGS + title: Composing and executing Interactive music using the HipHop.js language + url: http://www.nime.org/proceedings/2019/nime2019_paper014.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: Volioti2016 - abstract: "There is a growing interest in `unlocking' the motor\nskills of expert\ - \ musicians. Motivated by this need, the main objective of this\npaper is to present\ - \ a new way of modeling expressive gesture variations in\nmusical performance.\ - \ For this purpose, the 3D gesture recognition engine\n`x2Gesture' (eXpert eXpressive\ - \ Gesture) has been developed, inspired\nby the Gesture Variation Follower, which\ - \ is initially designed and developed at\nIRCAM in Paris and then extended at\ - \ Goldsmiths College in London. x2Gesture\nsupports both learning of musical gestures\ - \ and live performing, through gesture\nsonification, as a unified user experience.\ - \ The deeper understanding of the\nexpressive gestural variations permits to define\ - \ the confidence bounds of the\nexpert's gestures, which are used during the decoding\ - \ phase of the\nrecognition. The first experiments show promising results in terms\ - \ of recognition\naccuracy and temporal alignment between template and performed\ - \ gesture, which\nleads to a better fluidity and immediacy and thus gesture sonification. " - address: 'Brisbane, Australia' - author: Christina Volioti and Sotiris Manitsaris and Eleni Katsouli and Athanasios - Manitsaris - bibtex: "@inproceedings{Volioti2016,\n abstract = {There is a growing interest in\ - \ `unlocking' the motor\nskills of expert musicians. Motivated by this need, the\ - \ main objective of this\npaper is to present a new way of modeling expressive\ - \ gesture variations in\nmusical performance. For this purpose, the 3D gesture\ - \ recognition engine\n`x2Gesture' (eXpert eXpressive Gesture) has been developed,\ - \ inspired\nby the Gesture Variation Follower, which is initially designed and\ - \ developed at\nIRCAM in Paris and then extended at Goldsmiths College in London.\ - \ x2Gesture\nsupports both learning of musical gestures and live performing, through\ - \ gesture\nsonification, as a unified user experience. The deeper understanding\ - \ of the\nexpressive gestural variations permits to define the confidence bounds\ - \ of the\nexpert's gestures, which are used during the decoding phase of the\n\ - recognition. The first experiments show promising results in terms of recognition\n\ - accuracy and temporal alignment between template and performed gesture, which\n\ - leads to a better fluidity and immediacy and thus gesture sonification. },\n address\ - \ = {Brisbane, Australia},\n author = {Christina Volioti and Sotiris Manitsaris\ - \ and Eleni Katsouli and Athanasios Manitsaris},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1176137},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ - \ pages = {310--315},\n publisher = {Queensland Conservatorium Griffith University},\n\ - \ title = {x2Gesture: how machines could learn expressive gesture variations of\ - \ expert musicians},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0061.pdf},\n\ - \ year = {2016}\n}\n" + ID: Rocha2019 + abstract: 'Due to video game controls great presence in popular culture and its + ease of access, even people who are not in the habit of playing electronic games + possibly interacted with this kind of interface once in a lifetime. Thus, gestures + like pressing a sequence of buttons, pressing them simultaneously or sliding your + fingers through the control can be mapped for musical creation. This work aims + the elaboration of a strategy in which several gestures performed in a joystick + control can influence one or several parameters of the sound synthesis, making + a mapping denominated many to many. Buttons combinations used to perform game + actions that are common in fighting games, like Street Fighter, were mapped to + the synthesizer to create a music. Experiments show that this mapping is capable + of influencing the musical expression of a DMI making it closer to an acoustic + instrument.' + address: 'Porto Alegre, Brazil' + author: Gabriel Lopes Rocha and João Teixera Araújo and Flávio Luiz Schiavoni + bibtex: "@inproceedings{Rocha2019,\n abstract = {Due to video game controls great\ + \ presence in popular culture and its ease of access, even people who are not\ + \ in the habit of playing electronic games possibly interacted with this kind\ + \ of interface once in a lifetime. Thus, gestures like pressing a sequence of\ + \ buttons, pressing them simultaneously or sliding your fingers through the control\ + \ can be mapped for musical creation. This work aims the elaboration of a strategy\ + \ in which several gestures performed in a joystick control can influence one\ + \ or several parameters of the sound synthesis, making a mapping denominated many\ + \ to many. Buttons combinations used to perform game actions that are common in\ + \ fighting games, like Street Fighter, were mapped to the synthesizer to create\ + \ a music. Experiments show that this mapping is capable of influencing the musical\ + \ expression of a DMI making it closer to an acoustic instrument.},\n address\ + \ = {Porto Alegre, Brazil},\n author = {Gabriel Lopes Rocha and João Teixera Araújo\ + \ and Flávio Luiz Schiavoni},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672872},\n\ + \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {77--78},\n publisher = {UFRGS},\n title = {Ha Dou Ken Music:\ + \ Different mappings to play music with joysticks},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper015.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176137 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.3672872 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 - pages: 310--315 - publisher: Queensland Conservatorium Griffith University - title: 'x2Gesture: how machines could learn expressive gesture variations of expert - musicians' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0061.pdf - year: 2016 + month: June + pages: 77--78 + publisher: UFRGS + title: 'Ha Dou Ken Music: Different mappings to play music with joysticks' + url: http://www.nime.org/proceedings/2019/nime2019_paper015.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: Jaimovich2016 - abstract: 'This paper presents the work developed for Emovere: an interactive real-time - interdisciplinary performance that measures physiological signals from dancers - to drive a piece that explores and reflects around the biology of emotion. This - document focuses on the design of a series of interaction modes and materials - that were developed for this performance, and are believed to be a contribution - for the creation of artistic projects that work with dancers and physiological - signals. The paper introduces the motivation and theoretical framework behind - this project, to then deliver a detailed description and analysis of four different - interaction modes built to drive this performance using electromyography and electrocardiography. - Readers will find a discussion of the results obtained with these designs, as - well as comments on future work.' - address: 'Brisbane, Australia' - author: Javier Jaimovich - bibtex: "@inproceedings{Jaimovich2016,\n abstract = {This paper presents the work\ - \ developed for Emovere: an interactive real-time interdisciplinary performance\ - \ that measures physiological signals from dancers to drive a piece that explores\ - \ and reflects around the biology of emotion. This document focuses on the design\ - \ of a series of interaction modes and materials that were developed for this\ - \ performance, and are believed to be a contribution for the creation of artistic\ - \ projects that work with dancers and physiological signals. The paper introduces\ - \ the motivation and theoretical framework behind this project, to then deliver\ - \ a detailed description and analysis of four different interaction modes built\ - \ to drive this performance using electromyography and electrocardiography. Readers\ - \ will find a discussion of the results obtained with these designs, as well as\ - \ comments on future work.},\n address = {Brisbane, Australia},\n author = {Javier\ - \ Jaimovich},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176036},\n isbn\ - \ = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {316--320},\n publisher\ - \ = {Queensland Conservatorium Griffith University},\n title = {Emovere: Designing\ - \ Sound Interactions for Biosignals and Dancers},\n track = {Papers},\n url =\ - \ {http://www.nime.org/proceedings/2016/nime2016_paper0062.pdf},\n year = {2016}\n\ - }\n" + ID: Næss2019 + abstract: 'This paper describes a new intelligent interactive instrument, based + on an embedded computing platform, where deep neural networks are applied to interactive + music generation. Even though using neural networks for music composition is not + uncommon, a lot of these models tend to not support any form of user interaction. + We introduce a self-contained intelligent instrument using generative models, + with support for real-time interaction where the user can adjust high-level parameters + to modify the music generated by the instrument. We describe the technical details + of our generative model and discuss the experience of using the system as part + of musical performance.' + address: 'Porto Alegre, Brazil' + author: Torgrim Rudland Næss and Charles Patrick Martin + bibtex: "@inproceedings{Næss2019,\n abstract = {This paper describes a new intelligent\ + \ interactive instrument, based on an embedded computing platform, where deep\ + \ neural networks are applied to interactive music generation. Even though using\ + \ neural networks for music composition is not uncommon, a lot of these models\ + \ tend to not support any form of user interaction. We introduce a self-contained\ + \ intelligent instrument using generative models, with support for real-time interaction\ + \ where the user can adjust high-level parameters to modify the music generated\ + \ by the instrument. We describe the technical details of our generative model\ + \ and discuss the experience of using the system as part of musical performance.},\n\ + \ address = {Porto Alegre, Brazil},\n author = {Torgrim Rudland Næss and Charles\ + \ Patrick Martin},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672874},\n\ + \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {79--82},\n publisher = {UFRGS},\n title = {A Physical Intelligent\ + \ Instrument using Recurrent Neural Networks},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper016.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176036 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.3672874 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 - pages: 316--320 - publisher: Queensland Conservatorium Griffith University - title: 'Emovere: Designing Sound Interactions for Biosignals and Dancers' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0062.pdf - year: 2016 + month: June + pages: 79--82 + publisher: UFRGS + title: A Physical Intelligent Instrument using Recurrent Neural Networks + url: http://www.nime.org/proceedings/2019/nime2019_paper016.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: Snicode248derberg2016 - abstract: "This paper explores the possibility of breaking the barrier\nbetween\ - \ deaf and hearing people when it comes to the subject of making music.\nSuggestions\ - \ on how deaf and hearing people can collaborate in creating music\ntogether,\ - \ are presented. The conducted research will focus on deaf people with a\ngeneral\ - \ interest in music as well as hearing musicians as target groups. Through\nreviewing\ - \ different related research areas, it is found that visualization of\nsound along\ - \ with a haptic feedback can help deaf people interpret and interact\nwith music.\ - \ With this in mind, three variations of a collaborative user interface\nare presented,\ - \ in which deaf and hearing people are meant to collaborate in\ncreating short\ - \ beats and melody sequences. Through evaluating the three\nprototypes, with two\ - \ deaf people and two hearing musicians, it is found that the\ntarget groups can\ - \ collaborate to some extent in creating beats. However, in order\nfor the target\ - \ groups to create melodic sequences together in a satisfactory\nmanner, more\ - \ detailed visualization and distributed haptic output is necessary,\nmostly due\ - \ to the fact that the deaf test participants struggle in distinguishing\nbetween\ - \ higher pitch and timbre. " - address: 'Brisbane, Australia' - author: 'Söderberg, Ene Alicia and Odgaard, Rasmus Emil and Sarah Bitsch and Oliver - Höeg-Jensen and Christensen, Nikolaj Schildt and Poulsen, Sören Dahl and Steven - Gelineck' - bibtex: "@inproceedings{Snicode248derberg2016,\n abstract = {This paper explores\ - \ the possibility of breaking the barrier\nbetween deaf and hearing people when\ - \ it comes to the subject of making music.\nSuggestions on how deaf and hearing\ - \ people can collaborate in creating music\ntogether, are presented. The conducted\ - \ research will focus on deaf people with a\ngeneral interest in music as well\ - \ as hearing musicians as target groups. Through\nreviewing different related\ - \ research areas, it is found that visualization of\nsound along with a haptic\ - \ feedback can help deaf people interpret and interact\nwith music. With this\ - \ in mind, three variations of a collaborative user interface\nare presented,\ - \ in which deaf and hearing people are meant to collaborate in\ncreating short\ - \ beats and melody sequences. Through evaluating the three\nprototypes, with two\ - \ deaf people and two hearing musicians, it is found that the\ntarget groups can\ - \ collaborate to some extent in creating beats. However, in order\nfor the target\ - \ groups to create melodic sequences together in a satisfactory\nmanner, more\ - \ detailed visualization and distributed haptic output is necessary,\nmostly due\ - \ to the fact that the deaf test participants struggle in distinguishing\nbetween\ - \ higher pitch and timbre. },\n address = {Brisbane, Australia},\n author = {S\\\ - ''{o}derberg, Ene Alicia and Odgaard, Rasmus Emil and Sarah Bitsch and Oliver\ - \ H\\''{o}eg-Jensen and Christensen, Nikolaj Schildt and Poulsen, S\\''{o}ren\ - \ Dahl and Steven Gelineck},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176112},\n\ - \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {321--326},\n publisher\ - \ = {Queensland Conservatorium Griffith University},\n title = {Music Aid---Towards\ - \ a Collaborative Experience for Deaf and Hearing People in Creating Music},\n\ - \ track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0063.pdf},\n\ - \ year = {2016}\n}\n" + ID: Fraietta2019 + abstract: 'This paper details the mapping strategy of the work Order and Progress: + a sonic segue across A Auriverde, a composition based upon the skyscape represented + on the Brazilian flag. This work uses the Stellarium planetarium software as a + performance interface, blending the political symbology, scientific data and musical + mapping of each star represented on the flag as a multimedia performance. The + work is interfaced through the Stellar Command module, a Java based program that + converts the visible field of view from the Stellarium planetarium interface to + astronomical data through the VizieR database of astronomical catalogues. This + scientific data is then mapped to musical parameters through a Java based programming + environment. I will discuss the strategies employed to create a work that was + not only artistically novel, but also visually engaging and scientifically accurate.' + address: 'Porto Alegre, Brazil' + author: Angelo Fraietta + bibtex: "@inproceedings{Fraietta2019,\n abstract = {This paper details the mapping\ + \ strategy of the work Order and Progress: a sonic segue across A Auriverde, a\ + \ composition based upon the skyscape represented on the Brazilian flag. This\ + \ work uses the Stellarium planetarium software as a performance interface, blending\ + \ the political symbology, scientific data and musical mapping of each star represented\ + \ on the flag as a multimedia performance. The work is interfaced through the\ + \ Stellar Command module, a Java based program that converts the visible field\ + \ of view from the Stellarium planetarium interface to astronomical data through\ + \ the VizieR database of astronomical catalogues. This scientific data is then\ + \ mapped to musical parameters through a Java based programming environment. I\ + \ will discuss the strategies employed to create a work that was not only artistically\ + \ novel, but also visually engaging and scientifically accurate.},\n address =\ + \ {Porto Alegre, Brazil},\n author = {Angelo Fraietta},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.3672876},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {83--88},\n publisher = {UFRGS},\n\ + \ title = {Creating Order and Progress},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper017.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176112 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.3672876 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 - pages: 321--326 - publisher: Queensland Conservatorium Griffith University - title: Music Aid---Towards a Collaborative Experience for Deaf and Hearing People - in Creating Music - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0063.pdf - year: 2016 + month: June + pages: 83--88 + publisher: UFRGS + title: Creating Order and Progress + url: http://www.nime.org/proceedings/2019/nime2019_paper017.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: Larsen2016 - abstract: "Many forms of enabling technologies exist today. While\ntechnologies\ - \ aimed at enabling basic tasks in everyday life (locomotion, eating,\netc.) are\ - \ more common, musical instruments for people with disabilities can\nprovide a\ - \ chance for emotional enjoyment, as well as improve physical conditions\nthrough\ - \ therapeutic use. The field of musical instruments for people with\nphysical\ - \ disabilities, however, is still an emerging area of research. In this\narticle,\ - \ we look at the current state of developments, including a survey of\ncustom\ - \ designed instruments, augmentations / modifications of existing\ninstruments,\ - \ music-supported therapy, and recent trends in the area. The overview\nis extrapolated\ - \ to look at where the research is headed, providing insights for\npotential future\ - \ work." - address: 'Brisbane, Australia' - author: Jeppe Veirum Larsen and Dan Overholt and Thomas B. Moeslund - bibtex: "@inproceedings{Larsen2016,\n abstract = {Many forms of enabling technologies\ - \ exist today. While\ntechnologies aimed at enabling basic tasks in everyday life\ - \ (locomotion, eating,\netc.) are more common, musical instruments for people\ - \ with disabilities can\nprovide a chance for emotional enjoyment, as well as\ - \ improve physical conditions\nthrough therapeutic use. The field of musical instruments\ - \ for people with\nphysical disabilities, however, is still an emerging area of\ - \ research. In this\narticle, we look at the current state of developments, including\ - \ a survey of\ncustom designed instruments, augmentations / modifications of existing\n\ - instruments, music-supported therapy, and recent trends in the area. The overview\n\ - is extrapolated to look at where the research is headed, providing insights for\n\ - potential future work.},\n address = {Brisbane, Australia},\n author = {Jeppe\ - \ Veirum Larsen and Dan Overholt and Thomas B. Moeslund},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176056},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ - \ pages = {327--331},\n publisher = {Queensland Conservatorium Griffith University},\n\ - \ title = {The Prospects of Musical Instruments For People with Physical Disabilities},\n\ - \ track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0064.pdf},\n\ - \ year = {2016}\n}\n" + ID: Tragtenberg2019 + abstract: 'This paper discusses the creation of instruments in which music is intentionally + generated by dance. We introduce the conceptual framework of Digital Dance and + Music Instruments (DDMI). Several DDMI have already been created, but they have + been developed isolatedly, and there is still a lack of a common process of ideation + and development. Knowledge about Digital Musical Instruments (DMIs) and Interactive + Dance Systems (IDSs) can contribute to the design of DDMI, but the former brings + few contributions to the body''s expressiveness, and the latter brings few references + to an instrumental relationship with music. Because of those different premises, + the integration between both paradigms can be an arduous task for the designer + of DDMI. The conceptual framework of DDMI can also serve as a bridge between DMIs + and IDSs, serving as a lingua franca between both communities and facilitating + the exchange of knowledge. The conceptual framework has shown to be a promising + analytical tool for the design, development, and evaluation of new digital dance + and music instrument.' + address: 'Porto Alegre, Brazil' + author: João Nogueira Tragtenberg and Filipe Calegario and Giordano Cabral and Geber + L. Ramalho + bibtex: "@inproceedings{Tragtenberg2019,\n abstract = {This paper discusses the\ + \ creation of instruments in which music is intentionally generated by dance.\ + \ We introduce the conceptual framework of Digital Dance and Music Instruments\ + \ (DDMI). Several DDMI have already been created, but they have been developed\ + \ isolatedly, and there is still a lack of a common process of ideation and development.\ + \ Knowledge about Digital Musical Instruments (DMIs) and Interactive Dance Systems\ + \ (IDSs) can contribute to the design of DDMI, but the former brings few contributions\ + \ to the body's expressiveness, and the latter brings few references to an instrumental\ + \ relationship with music. Because of those different premises, the integration\ + \ between both paradigms can be an arduous task for the designer of DDMI. The\ + \ conceptual framework of DDMI can also serve as a bridge between DMIs and IDSs,\ + \ serving as a lingua franca between both communities and facilitating the exchange\ + \ of knowledge. The conceptual framework has shown to be a promising analytical\ + \ tool for the design, development, and evaluation of new digital dance and music\ + \ instrument.},\n address = {Porto Alegre, Brazil},\n author = {João Nogueira\ + \ Tragtenberg and Filipe Calegario and Giordano Cabral and Geber L. Ramalho},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.3672878},\n editor = {Marcelo Queiroz\ + \ and Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {89--94},\n\ + \ publisher = {UFRGS},\n title = {Towards the Concept of Digital Dance and Music\ + \ Instruments},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper018.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176056 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.3672878 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 - pages: 327--331 - publisher: Queensland Conservatorium Griffith University - title: The Prospects of Musical Instruments For People with Physical Disabilities - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0064.pdf - year: 2016 + month: June + pages: 89--94 + publisher: UFRGS + title: Towards the Concept of Digital Dance and Music Instruments + url: http://www.nime.org/proceedings/2019/nime2019_paper018.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: Benson2016 - abstract: "We present an innovative sound spatialization and shaping\ninterface,\ - \ called SoundMorpheus, which allows the placement of sounds in space,\nas well\ - \ as the altering of sound characteristics, via arm movements that resemble\n\ - those of a conductor. The interface displays sounds (or their attributes) to the\n\ - user, who reaches for them with one or both hands, grabs them, and gently or\n\ - forcefully sends them around in space, in a 360° circle. The system\ncombines\ - \ MIDI and traditional instruments with one or more myoelectric sensors.\nThese\ - \ components may be physically collocated or distributed in various locales\n\ - connected via the Internet. This system also supports the performance of\nacousmatic\ - \ and electronic music, enabling performances where the traditionally\ncentral\ - \ mixing board, need not be touched at all (or minimally touched for\ncalibration).\ - \ Finally, the system may facilitate the recording of a visual score\nof a performance,\ - \ which can be stored for later playback and additional\nmanipulation. We present\ - \ three projects that utilize SoundMorpheus and\ndemonstrate its capabilities\ - \ and potential." - address: 'Brisbane, Australia' - author: Christopher Benson and Bill Manaris and Seth Stoudenmier and Timothy Ward - bibtex: "@inproceedings{Benson2016,\n abstract = {We present an innovative sound\ - \ spatialization and shaping\ninterface, called SoundMorpheus, which allows the\ - \ placement of sounds in space,\nas well as the altering of sound characteristics,\ - \ via arm movements that resemble\nthose of a conductor. The interface displays\ - \ sounds (or their attributes) to the\nuser, who reaches for them with one or\ - \ both hands, grabs them, and gently or\nforcefully sends them around in space,\ - \ in a 360$^{\\circ}$ circle. The system\ncombines MIDI and traditional instruments\ - \ with one or more myoelectric sensors.\nThese components may be physically collocated\ - \ or distributed in various locales\nconnected via the Internet. This system also\ - \ supports the performance of\nacousmatic and electronic music, enabling performances\ - \ where the traditionally\ncentral mixing board, need not be touched at all (or\ - \ minimally touched for\ncalibration). Finally, the system may facilitate the\ - \ recording of a visual score\nof a performance, which can be stored for later\ - \ playback and additional\nmanipulation. We present three projects that utilize\ - \ SoundMorpheus and\ndemonstrate its capabilities and potential.},\n address =\ - \ {Brisbane, Australia},\n author = {Christopher Benson and Bill Manaris and Seth\ - \ Stoudenmier and Timothy Ward},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1175982},\n\ - \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {332--337},\n publisher\ - \ = {Queensland Conservatorium Griffith University},\n title = {SoundMorpheus:\ - \ A Myoelectric-Sensor Based Interface for Sound Spatialization and Shaping},\n\ - \ track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0065.pdf},\n\ - \ year = {2016}\n}\n" + ID: Bomba2019 + abstract: 'Visitors interact with a blindfolded artist''s body, the motions of which + are tracked and translated into synthesized four-channel sound, surrounding the + participants. Through social-physical and aural interactions, they play his instrument-body, + in a mutual dance. Crucial for this work has been the motion-to-sound mapping + design, and the investigations of bodily interaction with normal lay-people and + with professional contact-improvisation dancers. The extra layer of social-physical + interaction both constrains and inspires the participant-artist relation and the + sonic exploration, and through this, his body is transformed into an instrument, + and physical space is transformed into a sound-space. The project aims to explore + the experience of interaction between human and technology and its impact on one''s + bodily perception and embodiment, as well as the relation between body and space, + departing from a set of existing theories on embodiment. In the paper, its underlying + aesthetics are described and discussed, as well as the sensitive motion research + process behind it, and the technical implementation of the work. It is evaluated + based on participant behavior and experiences and analysis of its premiere exhibition + in 2018.' + address: 'Porto Alegre, Brazil' + author: Maros Suran Bomba and Palle Dahlstedt + bibtex: "@inproceedings{Bomba2019,\n abstract = {Visitors interact with a blindfolded\ + \ artist's body, the motions of which are tracked and translated into synthesized\ + \ four-channel sound, surrounding the participants. Through social-physical and\ + \ aural interactions, they play his instrument-body, in a mutual dance. Crucial\ + \ for this work has been the motion-to-sound mapping design, and the investigations\ + \ of bodily interaction with normal lay-people and with professional contact-improvisation\ + \ dancers. The extra layer of social-physical interaction both constrains and\ + \ inspires the participant-artist relation and the sonic exploration, and through\ + \ this, his body is transformed into an instrument, and physical space is transformed\ + \ into a sound-space. The project aims to explore the experience of interaction\ + \ between human and technology and its impact on one's bodily perception and embodiment,\ + \ as well as the relation between body and space, departing from a set of existing\ + \ theories on embodiment. In the paper, its underlying aesthetics are described\ + \ and discussed, as well as the sensitive motion research process behind it, and\ + \ the technical implementation of the work. It is evaluated based on participant\ + \ behavior and experiences and analysis of its premiere exhibition in 2018.},\n\ + \ address = {Porto Alegre, Brazil},\n author = {Maros Suran Bomba and Palle Dahlstedt},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.3672880},\n editor = {Marcelo Queiroz\ + \ and Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {95--100},\n\ + \ publisher = {UFRGS},\n title = {Somacoustics: Interactive Body-as-Instrument},\n\ + \ url = {http://www.nime.org/proceedings/2019/nime2019_paper019.pdf},\n year =\ + \ {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1175982 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.3672880 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 - pages: 332--337 - publisher: Queensland Conservatorium Griffith University - title: 'SoundMorpheus: A Myoelectric-Sensor Based Interface for Sound Spatialization - and Shaping' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0065.pdf - year: 2016 + month: June + pages: 95--100 + publisher: UFRGS + title: 'Somacoustics: Interactive Body-as-Instrument' + url: http://www.nime.org/proceedings/2019/nime2019_paper019.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: Ozdemir2016 - abstract: "PORTAL is an interactive performance tool that uses a laser\nprojector\ - \ to visualize computer-generated audio signals. In this paper, we first\noffer\ - \ an overview of earlier work on audiovisual and laser art that inspired the\n\ - current project. We then discuss our own implementation, focusing not only on\ - \ the\ntechnical issues related to the use of a laser projector in an artistic\ - \ context,\nbut also on the aesthetic considerations in dealing with the translation\ - \ of\nsounds into visuals, and vice versa. We provide detailed descriptions of\ - \ our\nhardware implementation, our software system, and its desktop and mobile\n\ - interfaces, which are made available online. Finally, we offer the results of\ - \ a\nuser study we conducted in the form of an interactive online survey on audience\n\ - perception of the relationship between analogous sounds and visuals, which was\n\ - explored as part of our performance practice." - address: 'Brisbane, Australia' - author: Gorkem Ozdemir and Anil Camci and Angus Forbes - bibtex: "@inproceedings{Ozdemir2016,\n abstract = {PORTAL is an interactive performance\ - \ tool that uses a laser\nprojector to visualize computer-generated audio signals.\ - \ In this paper, we first\noffer an overview of earlier work on audiovisual and\ - \ laser art that inspired the\ncurrent project. We then discuss our own implementation,\ - \ focusing not only on the\ntechnical issues related to the use of a laser projector\ - \ in an artistic context,\nbut also on the aesthetic considerations in dealing\ - \ with the translation of\nsounds into visuals, and vice versa. We provide detailed\ - \ descriptions of our\nhardware implementation, our software system, and its desktop\ - \ and mobile\ninterfaces, which are made available online. Finally, we offer the\ - \ results of a\nuser study we conducted in the form of an interactive online survey\ - \ on audience\nperception of the relationship between analogous sounds and visuals,\ - \ which was\nexplored as part of our performance practice.},\n address = {Brisbane,\ - \ Australia},\n author = {Gorkem Ozdemir and Anil Camci and Angus Forbes},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176102},\n isbn = {978-1-925455-13-7},\n\ - \ issn = {2220-4806},\n pages = {338--343},\n publisher = {Queensland Conservatorium\ - \ Griffith University},\n title = {PORTAL: An Audiovisual Laser Performance System},\n\ - \ track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0066.pdf},\n\ - \ year = {2016}\n}\n" + ID: Turczan2019 + abstract: 'The Scale Navigator is a graphical interface implementation of Dmitri + Tymoczko''s scale network designed to help generate algorithmic harmony and harmonically + synchronize performers in a laptop or electro-acoustic orchestra. The user manipulates + the Scale Navigator to direct harmony on a chord-to-chord level and on a scale-to-scale + level. In a live performance setting, the interface broadcasts control data, MIDI, + and real-time notation to an ensemble of live electronic performers, sight-reading + improvisers, and musical generative algorithms. ' + address: 'Porto Alegre, Brazil' + author: Nathan Turczan and Ajay Kapur + bibtex: "@inproceedings{Turczan2019,\n abstract = {The Scale Navigator is a graphical\ + \ interface implementation of Dmitri Tymoczko's scale network designed to help\ + \ generate algorithmic harmony and harmonically synchronize performers in a laptop\ + \ or electro-acoustic orchestra. The user manipulates the Scale Navigator to direct\ + \ harmony on a chord-to-chord level and on a scale-to-scale level. In a live performance\ + \ setting, the interface broadcasts control data, MIDI, and real-time notation\ + \ to an ensemble of live electronic performers, sight-reading improvisers, and\ + \ musical generative algorithms. },\n address = {Porto Alegre, Brazil},\n author\ + \ = {Nathan Turczan and Ajay Kapur},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672882},\n\ + \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {101--104},\n publisher = {UFRGS},\n title = {The Scale\ + \ Navigator: A System for Networked Algorithmic Harmony},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper020.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176102 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.3672882 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 - pages: 338--343 - publisher: Queensland Conservatorium Griffith University - title: 'PORTAL: An Audiovisual Laser Performance System' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0066.pdf - year: 2016 + month: June + pages: 101--104 + publisher: UFRGS + title: 'The Scale Navigator: A System for Networked Algorithmic Harmony' + url: http://www.nime.org/proceedings/2019/nime2019_paper020.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: Lindell2016 - abstract: "We organised an elven day intense course in materiality for\nmusical\ - \ expressions to explore underlying principles of New Interfaces for\nMusical\ - \ Expression (NIME) in higher education. We grounded the course in\ndifferent\ - \ aspects of materiality and gathered interdisciplinary student teams\nfrom three\ - \ Nordic universities. Electronic music instrument makers participated\nin providing\ - \ the course. In eleven days the students designed and built\ninterfaces for musical\ - \ expressions, composed a piece, and performed at the\nNorberg electronic music\ - \ festival. The students explored the relationship\nbetween technology and possible\ - \ musical expression with a strong connection to\nculture and place. The emphasis\ - \ on performance provided closure and motivated\nteams to move forward in their\ - \ design and artistic processes. On the basis of the\ncourse we discuss an interdisciplinary\ - \ NIME course syllabus, and we infer that it\nbenefits from grounding in materiality\ - \ and in the place with a strong reference\nto culture." - address: 'Brisbane, Australia' - author: Rikard Lindell and Koray Tahiroglu and Morten Riis and Jennie Schaeffer - bibtex: "@inproceedings{Lindell2016,\n abstract = {We organised an elven day intense\ - \ course in materiality for\nmusical expressions to explore underlying principles\ - \ of New Interfaces for\nMusical Expression (NIME) in higher education. We grounded\ - \ the course in\ndifferent aspects of materiality and gathered interdisciplinary\ - \ student teams\nfrom three Nordic universities. Electronic music instrument makers\ - \ participated\nin providing the course. In eleven days the students designed\ - \ and built\ninterfaces for musical expressions, composed a piece, and performed\ - \ at the\nNorberg electronic music festival. The students explored the relationship\n\ - between technology and possible musical expression with a strong connection to\n\ - culture and place. The emphasis on performance provided closure and motivated\n\ - teams to move forward in their design and artistic processes. On the basis of\ - \ the\ncourse we discuss an interdisciplinary NIME course syllabus, and we infer\ - \ that it\nbenefits from grounding in materiality and in the place with a strong\ - \ reference\nto culture.},\n address = {Brisbane, Australia},\n author = {Rikard\ - \ Lindell and Koray Tahiroglu and Morten Riis and Jennie Schaeffer},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176066},\n isbn = {978-1-925455-13-7},\n\ - \ issn = {2220-4806},\n pages = {344--349},\n publisher = {Queensland Conservatorium\ - \ Griffith University},\n title = {Materiality for Musical Expressions: an Approach\ - \ to Interdisciplinary Syllabus Development for NIME},\n track = {Papers},\n url\ - \ = {http://www.nime.org/proceedings/2016/nime2016_paper0067.pdf},\n year = {2016}\n\ - }\n" + ID: Lucas2019 + abstract: 'In this paper, the authors describe the evaluation of a collection of + bespoke knob cap designs intended to improve the ease in which a specific musician + with dyskinetic cerebral palsy can operate rotary controls in a musical context. + The authors highlight the importance of the performers perspective when using + design as a means for overcoming access barriers to music. Also, while the authors + were not able to find an ideal solution for the musician within the confines of + this study, several useful observations on the process of evaluating bespoke assistive + music technology are described; observations which may prove useful to digital + musical instrument designers working within the field of inclusive music.' + address: 'Porto Alegre, Brazil' + author: Alex Michael Lucas and Miguel Ortiz and Dr. Franziska Schroeder + bibtex: "@inproceedings{Lucas2019,\n abstract = {In this paper, the authors describe\ + \ the evaluation of a collection of bespoke knob cap designs intended to improve\ + \ the ease in which a specific musician with dyskinetic cerebral palsy can operate\ + \ rotary controls in a musical context. The authors highlight the importance of\ + \ the performers perspective when using design as a means for overcoming access\ + \ barriers to music. Also, while the authors were not able to find an ideal solution\ + \ for the musician within the confines of this study, several useful observations\ + \ on the process of evaluating bespoke assistive music technology are described;\ + \ observations which may prove useful to digital musical instrument designers\ + \ working within the field of inclusive music.},\n address = {Porto Alegre, Brazil},\n\ + \ author = {Alex Michael Lucas and Miguel Ortiz and Dr. Franziska Schroeder},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.3672884},\n editor = {Marcelo Queiroz\ + \ and Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {105--109},\n\ + \ publisher = {UFRGS},\n title = {Bespoke Design for Inclusive Music: The Challenges\ + \ of Evaluation},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper021.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176066 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.3672884 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 - pages: 344--349 - publisher: Queensland Conservatorium Griffith University - title: 'Materiality for Musical Expressions: an Approach to Interdisciplinary Syllabus - Development for NIME' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0067.pdf - year: 2016 + month: June + pages: 105--109 + publisher: UFRGS + title: 'Bespoke Design for Inclusive Music: The Challenges of Evaluation' + url: http://www.nime.org/proceedings/2019/nime2019_paper021.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: Gimenes2016 - abstract: "This paper introduces Performance Without Borders and Embodied\niSound,\ - \ two sound installations performed at the 2016 Peninsula Arts Contemporary\n\ - Music Festival at Plymouth University. Sharing in common the use of smartphones\n\ - to afford real-time audience participation, two bespoke distributed computer\n\ - systems (Sherwell and Levinsky Music, respectively). Whilst the first one\nimplements\ - \ a cloud-based voting system, the second implements movement tracking\nand iBeacon-based\ - \ indoor-positioning to control the choice of soundtracks, audio\nsynthesis, and\ - \ surround sound positioning, among other parameters. The general\nconcepts of\ - \ the installations, in particular design and interactive possibilities\nafforded\ - \ by the computer systems are presented." - address: 'Brisbane, Australia' - author: Marcelo Gimenes and Pierre-Emmanuel Largeron and Eduardo Miranda - bibtex: "@inproceedings{Gimenes2016,\n abstract = {This paper introduces Performance\ - \ Without Borders and Embodied\niSound, two sound installations performed at the\ - \ 2016 Peninsula Arts Contemporary\nMusic Festival at Plymouth University. Sharing\ - \ in common the use of smartphones\nto afford real-time audience participation,\ - \ two bespoke distributed computer\nsystems (Sherwell and Levinsky Music, respectively).\ - \ Whilst the first one\nimplements a cloud-based voting system, the second implements\ - \ movement tracking\nand iBeacon-based indoor-positioning to control the choice\ - \ of soundtracks, audio\nsynthesis, and surround sound positioning, among other\ - \ parameters. The general\nconcepts of the installations, in particular design\ - \ and interactive possibilities\nafforded by the computer systems are presented.},\n\ - \ address = {Brisbane, Australia},\n author = {Marcelo Gimenes and Pierre-Emmanuel\ - \ Largeron and Eduardo Miranda},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176020},\n\ - \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {350--354},\n publisher\ - \ = {Queensland Conservatorium Griffith University},\n title = {Frontiers: Expanding\ - \ Musical Imagination With Audience Participation},\n track = {Papers},\n url\ - \ = {http://www.nime.org/proceedings/2016/nime2016_paper0068.pdf},\n year = {2016}\n\ - }\n" + ID: Xiao2019 + abstract: 'T-Voks is an augmented theremin that controls Voks, a performative singing + synthesizer. Originally developed for control with a graphic tablet interface, + Voks allows for real-time pitch and time scaling, vocal effort modification and + syllable sequencing for pre-recorded voice utterances. For T-Voks the theremin''s + frequency antenna modifies the output pitch of the target utterance while the + amplitude antenna controls not only volume as usual but also voice quality and + vocal effort. Syllabic sequencing is handled by an additional pressure sensor + attached to the player''s volume-control hand. This paper presents the system + architecture of T-Voks, the preparation procedure for a song, playing gestures, + and practice techniques, along with musical and poetic examples across four different + languages and styles.' + address: 'Porto Alegre, Brazil' + author: Xiao Xiao and Grégoire Locqueville and Christophe d'Alessandro and Boris + Doval + bibtex: "@inproceedings{Xiao2019,\n abstract = {T-Voks is an augmented theremin\ + \ that controls Voks, a performative singing synthesizer. Originally developed\ + \ for control with a graphic tablet interface, Voks allows for real-time pitch\ + \ and time scaling, vocal effort modification and syllable sequencing for pre-recorded\ + \ voice utterances. For T-Voks the theremin's frequency antenna modifies the output\ + \ pitch of the target utterance while the amplitude antenna controls not only\ + \ volume as usual but also voice quality and vocal effort. Syllabic sequencing\ + \ is handled by an additional pressure sensor attached to the player's volume-control\ + \ hand. This paper presents the system architecture of T-Voks, the preparation\ + \ procedure for a song, playing gestures, and practice techniques, along with\ + \ musical and poetic examples across four different languages and styles.},\n\ + \ address = {Porto Alegre, Brazil},\n author = {Xiao Xiao and Grégoire Locqueville\ + \ and Christophe d'Alessandro and Boris Doval},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.3672886},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {110--115},\n publisher = {UFRGS},\n\ + \ title = {T-Voks: the Singing and Speaking Theremin},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper022.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176020 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.3672886 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 - pages: 350--354 - publisher: Queensland Conservatorium Griffith University - title: 'Frontiers: Expanding Musical Imagination With Audience Participation' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0068.pdf - year: 2016 + month: June + pages: 110--115 + publisher: UFRGS + title: 'T-Voks: the Singing and Speaking Theremin' + url: http://www.nime.org/proceedings/2019/nime2019_paper022.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: Schlei2016 - abstract: |- - The PourOver Sensor Framework is an open iOS framework designed to - connect iOS control sources (hardware sensors, user input, custom algorithms) to - an audio graph's parameters. The design of the framework, motivation, and use - cases are discussed. The framework is demonstrated in an end-user friendly iOS - app PourOver, in which users can run Pd patches with easy access to hardware - sensors and iOS APIs. - address: 'Brisbane, Australia' - author: Kevin Schlei and Chris Burns and Aidan Menuge - bibtex: "@inproceedings{Schlei2016,\n abstract = {The PourOver Sensor Framework\ - \ is an open iOS framework designed to\nconnect iOS control sources (hardware\ - \ sensors, user input, custom algorithms) to\nan audio graph's parameters. The\ - \ design of the framework, motivation, and use\ncases are discussed. The framework\ - \ is demonstrated in an end-user friendly iOS\napp PourOver, in which users can\ - \ run Pd patches with easy access to hardware\nsensors and iOS APIs.},\n address\ - \ = {Brisbane, Australia},\n author = {Kevin Schlei and Chris Burns and Aidan\ - \ Menuge},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1176114},\n isbn = {978-1-925455-13-7},\n\ - \ issn = {2220-4806},\n pages = {355--358},\n publisher = {Queensland Conservatorium\ - \ Griffith University},\n title = {PourOver: A Sensor-Driven Generative Music\ - \ Platform},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0069.pdf},\n\ - \ year = {2016}\n}\n" + ID: Brown2019 + abstract: 'Recent developments in music technology have enabled novel timbres to + be acoustically synthesized using various actuation and excitation methods. Utilizing + recent work in nonlinear acoustic synthesis, we propose a transducer based augmented + percussion implement entitled DRMMR. This design enables the user to sustain computer + sequencer-like drum rolls at faster speeds while also enabling the user to achieve + nonlinear acoustic synthesis effects. Our acoustic evaluation shows drum rolls + executed by DRMMR easily exhibit greater levels of regularity, speed, and precision + than comparable transducer and electromagnetic-based actuation methods. DRMMR''s + nonlinear acoustic synthesis functionality also presents possibilities for new + kinds of sonic interactions on the surface of drum membranes.' + address: 'Porto Alegre, Brazil' + author: Hunter Brown and spencer topel + bibtex: "@inproceedings{Brown2019,\n abstract = {Recent developments in music technology\ + \ have enabled novel timbres to be acoustically synthesized using various actuation\ + \ and excitation methods. Utilizing recent work in nonlinear acoustic synthesis,\ + \ we propose a transducer based augmented percussion implement entitled DRMMR.\ + \ This design enables the user to sustain computer sequencer-like drum rolls at\ + \ faster speeds while also enabling the user to achieve nonlinear acoustic synthesis\ + \ effects. Our acoustic evaluation shows drum rolls executed by DRMMR easily exhibit\ + \ greater levels of regularity, speed, and precision than comparable transducer\ + \ and electromagnetic-based actuation methods. DRMMR's nonlinear acoustic synthesis\ + \ functionality also presents possibilities for new kinds of sonic interactions\ + \ on the surface of drum membranes.},\n address = {Porto Alegre, Brazil},\n author\ + \ = {Hunter Brown and spencer topel},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672888},\n\ + \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {116--121},\n publisher = {UFRGS},\n title = {{DRMMR}: An\ + \ Augmented Percussion Implement},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper023.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176114 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.3672888 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 - pages: 355--358 - publisher: Queensland Conservatorium Griffith University - title: 'PourOver: A Sensor-Driven Generative Music Platform' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0069.pdf - year: 2016 + month: June + pages: 116--121 + publisher: UFRGS + title: 'DRMMR: An Augmented Percussion Implement' + url: http://www.nime.org/proceedings/2019/nime2019_paper023.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: Hindle2016 - abstract: |2- - NIMEs typically focus on novelty but the cost of novelty is - often to - ignore other non-functional requirements and concerns such as - usability or security. Digital security has probably not been a - concern for performers due to the duration of their performances and - lack of disrespectful hackers, known as crackers, in attendance - carrying the appropriate equipment and software necessary to hack a - performance. Yet many modern NIMEs could be hacked from smart-phones - in the audience. The lack of security hardening makes NIMEs an easy - target --- but a question arises: if hacking can interrupt or modify - a performance couldn't hacking itself also be performance? Thus - would music hacking, live-hacking, be similar to live-coding? In - this paper we discuss how NIMEs are in danger of being hacked, and - yet how hacking can be an act of performance too. - address: 'Brisbane, Australia' - author: Abram Hindle - bibtex: "@inproceedings{Hindle2016,\n abstract = { NIMEs typically focus on novelty\ - \ but the cost of novelty is\noften to\nignore other non-functional requirements\ - \ and concerns such as\nusability or security. Digital security has probably not\ - \ been a\nconcern for performers due to the duration of their performances and\n\ - lack of disrespectful hackers, known as crackers, in attendance\ncarrying the\ - \ appropriate equipment and software necessary to hack a\nperformance. Yet many\ - \ modern NIMEs could be hacked from smart-phones\nin the audience. The lack of\ - \ security hardening makes NIMEs an easy\ntarget --- but a question arises: if\ - \ hacking can interrupt or modify\na performance couldn't hacking itself also\ - \ be performance? Thus\nwould music hacking, live-hacking, be similar to live-coding?\ - \ In\nthis paper we discuss how NIMEs are in danger of being hacked, and\nyet\ - \ how hacking can be an act of performance too.},\n address = {Brisbane, Australia},\n\ - \ author = {Abram Hindle},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176026},\n\ - \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {359--364},\n publisher\ - \ = {Queensland Conservatorium Griffith University},\n title = {Hacking NIMEs},\n\ - \ track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0070.pdf},\n\ - \ year = {2016}\n}\n" + ID: Lepri2019 + abstract: 'The emergence of a new technology can be considered as the result of + social, cultural and technical process. Instrument designs are particularly influenced + by cultural and aesthetic values linked to the specific contexts and communities + that produced them. In previous work, we ran a design fiction workshop in which + musicians created non-functional instrument mockups. In the current paper, we + report on an online survey in which music technologists were asked to speculate + on the background of the musicians who designed particular instruments. Our results + showed several cues for the interpretation of the artefacts'' origins, including + physical features, body-instrument interactions, use of language and references + to established music practices and tools. Tacit musical and cultural values were + also identified based on intuitive and holistic judgments. Our discussion highlights + the importance of cultural awareness and context-dependent values on the design + and use of interactive musical systems.' + address: 'Porto Alegre, Brazil' + author: Giacomo Lepri and Andrew P. McPherson + bibtex: "@inproceedings{Lepri2019,\n abstract = {The emergence of a new technology\ + \ can be considered as the result of social, cultural and technical process. Instrument\ + \ designs are particularly influenced by cultural and aesthetic values linked\ + \ to the specific contexts and communities that produced them. In previous work,\ + \ we ran a design fiction workshop in which musicians created non-functional instrument\ + \ mockups. In the current paper, we report on an online survey in which music\ + \ technologists were asked to speculate on the background of the musicians who\ + \ designed particular instruments. Our results showed several cues for the interpretation\ + \ of the artefacts' origins, including physical features, body-instrument interactions,\ + \ use of language and references to established music practices and tools. Tacit\ + \ musical and cultural values were also identified based on intuitive and holistic\ + \ judgments. Our discussion highlights the importance of cultural awareness and\ + \ context-dependent values on the design and use of interactive musical systems.},\n\ + \ address = {Porto Alegre, Brazil},\n author = {Giacomo Lepri and Andrew P. McPherson},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.3672890},\n editor = {Marcelo Queiroz\ + \ and Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {122--127},\n\ + \ publisher = {UFRGS},\n title = {Fictional instruments, real values: discovering\ + \ musical backgrounds with non-functional prototypes},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper024.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176026 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.3672890 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 - pages: 359--364 - publisher: Queensland Conservatorium Griffith University - title: Hacking NIMEs - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0070.pdf - year: 2016 + month: June + pages: 122--127 + publisher: UFRGS + title: 'Fictional instruments, real values: discovering musical backgrounds with + non-functional prototypes' + url: http://www.nime.org/proceedings/2019/nime2019_paper024.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: Jordnicode2252016 - abstract: 'This paper presents a generative drumming agent built from the results - of an extensive survey carried out with electronic music producers, in two phases. - Following the techniques of user-centered interaction design, an international - group of beat producers was reviewed on the possibility of using AI algorithms - to help them in the beat production workflow. The analyzed results of these tests - were used as design requirements for constructing a system that would indeed perform - some tasks alongside the producer. The first results of this working prototype - are presented with a description of the system. The prototype is a stylistic drum - generator that creates new rhythmic patterns after being trained with a collection - of drum tracks. Further stages of development and potential algorithms are discussed.' - address: 'Brisbane, Australia' - author: Sergi Jordà and Daniel Gómez-Marín and Ángel Faraldo and Perfecto Herrera - bibtex: "@inproceedings{Jordnicode2252016,\n abstract = {This paper presents a generative\ - \ drumming agent built from the results of an extensive survey carried out with\ - \ electronic music producers, in two phases. Following the techniques of user-centered\ - \ interaction design, an international group of beat producers was reviewed on\ - \ the possibility of using AI algorithms to help them in the beat production workflow.\ - \ The analyzed results of these tests were used as design requirements for constructing\ - \ a system that would indeed perform some tasks alongside the producer. The first\ - \ results of this working prototype are presented with a description of the system.\ - \ The prototype is a stylistic drum generator that creates new rhythmic patterns\ - \ after being trained with a collection of drum tracks. Further stages of development\ - \ and potential algorithms are discussed.},\n address = {Brisbane, Australia},\n\ - \ author = {Sergi Jord\\`{a} and Daniel G\\'{o}mez-Mar\\'{i}n and \\'{A}ngel Faraldo\ - \ and Perfecto Herrera},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176048},\n\ - \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {365--370},\n publisher\ - \ = {Queensland Conservatorium Griffith University},\n title = {Drumming with\ - \ style: From user needs to a working prototype},\n track = {Papers},\n url =\ - \ {http://www.nime.org/proceedings/2016/nime2016_paper0071.pdf},\n year = {2016}\n\ - }\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1176048 - isbn: 978-1-925455-13-7 - issn: 2220-4806 - pages: 365--370 - publisher: Queensland Conservatorium Griffith University - title: 'Drumming with style: From user needs to a working prototype' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0071.pdf - year: 2016 - - -- ENTRYTYPE: inproceedings - ID: Bown2016 - abstract: |- - We describe a project in which a game of lawn bowls was recreated - using Distributed Interactive Audio Devices (DIADs), to create an interactive - musical experience in the form of a game. This paper details the design of the - underlying digital music system, some of the compositional and design - considerations, and the technical challenges involved. We discuss future - directions for our system and compositional method. - address: 'Brisbane, Australia' - author: Oliver Bown and Sam Ferguson - bibtex: "@inproceedings{Bown2016,\n abstract = {We describe a project in which a\ - \ game of lawn bowls was recreated\nusing Distributed Interactive Audio Devices\ - \ (DIADs), to create an interactive\nmusical experience in the form of a game.\ - \ This paper details the design of the\nunderlying digital music system, some\ - \ of the compositional and design\nconsiderations, and the technical challenges\ - \ involved. We discuss future\ndirections for our system and compositional method.},\n\ - \ address = {Brisbane, Australia},\n author = {Oliver Bown and Sam Ferguson},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1175998},\n isbn = {978-1-925455-13-7},\n\ - \ issn = {2220-4806},\n pages = {371--372},\n publisher = {Queensland Conservatorium\ - \ Griffith University},\n title = {A Musical Game of Bowls Using the DIADs},\n\ - \ track = {Demonstrations},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0072.pdf},\n\ - \ year = {2016}\n}\n" + ID: Dewey2019 + abstract: 'This paper presents the first stage in the design and evaluation of a + novel container metaphor interface for equalisation control. The prototype system + harnesses the Pepper''s Ghost illusion to project mid-air a holographic data visualisation + of an audio track''s long-term average and real-time frequency content as a deformable + shape manipulated directly via hand gestures. The system uses HTML 5, JavaScript + and the Web Audio API in conjunction with a Leap Motion controller and bespoke + low budget projection system. During subjective evaluation users commented that + the novel system was simpler and more intuitive to use than commercially established + equalisation interface paradigms and most suited to creative, expressive and explorative + equalisation tasks.' + address: 'Porto Alegre, Brazil' + author: Christopher Dewey and Jonathan P. Wakefield + bibtex: "@inproceedings{Dewey2019,\n abstract = {This paper presents the first stage\ + \ in the design and evaluation of a novel container metaphor interface for equalisation\ + \ control. The prototype system harnesses the Pepper's Ghost illusion to project\ + \ mid-air a holographic data visualisation of an audio track's long-term average\ + \ and real-time frequency content as a deformable shape manipulated directly via\ + \ hand gestures. The system uses HTML 5, JavaScript and the Web Audio API in conjunction\ + \ with a Leap Motion controller and bespoke low budget projection system. During\ + \ subjective evaluation users commented that the novel system was simpler and\ + \ more intuitive to use than commercially established equalisation interface paradigms\ + \ and most suited to creative, expressive and explorative equalisation tasks.},\n\ + \ address = {Porto Alegre, Brazil},\n author = {Christopher Dewey and Jonathan\ + \ P. Wakefield},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672892},\n\ + \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {128--129},\n publisher = {UFRGS},\n title = {Exploring\ + \ the Container Metaphor for Equalisation Manipulation},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper025.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1175998 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.3672892 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 - pages: 371--372 - publisher: Queensland Conservatorium Griffith University - title: A Musical Game of Bowls Using the DIADs - track: Demonstrations - url: http://www.nime.org/proceedings/2016/nime2016_paper0072.pdf - year: 2016 + month: June + pages: 128--129 + publisher: UFRGS + title: Exploring the Container Metaphor for Equalisation Manipulation + url: http://www.nime.org/proceedings/2019/nime2019_paper025.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: Eyes2016 - abstract: "During an electronic music performance it is common to see light\nand\ - \ sound interacting electronically in many different ways. From sound and light\n\ - shows, whereby light reacts to sound, or generated visuals are projected onto\ - \ a\nscreen behind the performer. However we asked the question what if we could\n\ - convert sound to light and back again and control sound with light? Inspired by\n\ - the huge acoustic of the Mimerlaven at Norberg festival we built a `light\ninstrument'\ - \ that allowed us to interrupt and disrupt sound using light\nforming the basis\ - \ of our piece `Interruption'." - address: 'Brisbane, Australia' - author: Benjamin James Eyes and Laurits Esben Jongejan - bibtex: "@inproceedings{Eyes2016,\n abstract = {During an electronic music performance\ - \ it is common to see light\nand sound interacting electronically in many different\ - \ ways. From sound and light\nshows, whereby light reacts to sound, or generated\ - \ visuals are projected onto a\nscreen behind the performer. However we asked\ - \ the question what if we could\nconvert sound to light and back again and control\ - \ sound with light? Inspired by\nthe huge acoustic of the Mimerlaven at Norberg\ - \ festival we built a `light\ninstrument' that allowed us to interrupt and disrupt\ - \ sound using light\nforming the basis of our piece `Interruption'.},\n address\ - \ = {Brisbane, Australia},\n author = {Benjamin James Eyes and Laurits Esben Jongejan},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176016},\n isbn = {978-1-925455-13-7},\n\ - \ issn = {2220-4806},\n pages = {373--374},\n publisher = {Queensland Conservatorium\ - \ Griffith University},\n title = {How to Stop Sound: Creating a light instrument\ - \ and `Interruption' a piece for the Mimerlaven, Norberg Festival 2015.},\n track\ - \ = {Demonstrations},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0073.pdf},\n\ - \ year = {2016}\n}\n" + ID: Hofmann2019 + abstract: 'Physics-based sound synthesis allows to shape the sound by modifying + parameters that reference to real world properties of acoustic instruments. This + paper presents a hybrid physical modeling single reed instrument, where a virtual + tube is coupled to a real mouthpiece with a sensor-equipped clarinet reed. The + tube model is provided as an opcode for Csound which is running on the low-latency + embedded audio-platform Bela. An actuator is connected to the audio output and + the sensor-reed signal is fed back into the input of Bela. The performer can control + the coupling between reed and actuator, and is also provided with a 3D-printed + slider/knob interface to change parameters of the tube model in real-time.' + address: 'Porto Alegre, Brazil' + author: Alex Hofmann and Vasileios Chatziioannou and Sebastian Schmutzhard and Gökberk + Erdogan and Alexander Mayer + bibtex: "@inproceedings{Hofmann2019,\n abstract = {Physics-based sound synthesis\ + \ allows to shape the sound by modifying parameters that reference to real world\ + \ properties of acoustic instruments. This paper presents a hybrid physical modeling\ + \ single reed instrument, where a virtual tube is coupled to a real mouthpiece\ + \ with a sensor-equipped clarinet reed. The tube model is provided as an opcode\ + \ for Csound which is running on the low-latency embedded audio-platform Bela.\ + \ An actuator is connected to the audio output and the sensor-reed signal is fed\ + \ back into the input of Bela. The performer can control the coupling between\ + \ reed and actuator, and is also provided with a 3D-printed slider/knob interface\ + \ to change parameters of the tube model in real-time.},\n address = {Porto Alegre,\ + \ Brazil},\n author = {Alex Hofmann and Vasileios Chatziioannou and Sebastian\ + \ Schmutzhard and Gökberk Erdogan and Alexander Mayer},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.3672896},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {130--133},\n publisher = {UFRGS},\n\ + \ title = {The Half-Physler: An oscillating real-time interface to a tube resonator\ + \ model},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper026.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176016 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.3672896 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 - pages: 373--374 - publisher: Queensland Conservatorium Griffith University - title: 'How to Stop Sound: Creating a light instrument and `Interruption'' a piece - for the Mimerlaven, Norberg Festival 2015.' - track: Demonstrations - url: http://www.nime.org/proceedings/2016/nime2016_paper0073.pdf - year: 2016 + month: June + pages: 130--133 + publisher: UFRGS + title: 'The Half-Physler: An oscillating real-time interface to a tube resonator + model' + url: http://www.nime.org/proceedings/2019/nime2019_paper026.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: Hope2016 - abstract: "This paper-demonstration provides an overview of an generative\nmusic\ - \ score adapted for the iPad by the Decibel new music ensemble. The original\n\ - score `Loaded (NSFW)' (2015) is by Western Australian composer Laura\nJane Lowther,\ - \ and is scored for ensemble and electronics, commissioned for a\nperformance\ - \ in April 2015 at the Perth Institute of Contemporary Arts. It engages\nand develops\ - \ the Decibel Score Player application, a score reader and generator\nfor the\ - \ iPad as a tool for displaying an interactive score that requires\nperformers\ - \ to react to news headlines through musical means. The paper will\nintroduce\ - \ the concept for the player, how it was developed, and how it was used\nin the\ - \ premiere performance. The associated demonstration shows how the score\nappears\ - \ on the iPads. " - address: 'Brisbane, Australia' - author: Cat Hope and Stuart James and Aaron Wyatt - bibtex: "@inproceedings{Hope2016,\n abstract = {This paper-demonstration provides\ - \ an overview of an generative\nmusic score adapted for the iPad by the Decibel\ - \ new music ensemble. The original\nscore `Loaded (NSFW)' (2015) is by Western\ - \ Australian composer Laura\nJane Lowther, and is scored for ensemble and electronics,\ - \ commissioned for a\nperformance in April 2015 at the Perth Institute of Contemporary\ - \ Arts. It engages\nand develops the Decibel Score Player application, a score\ - \ reader and generator\nfor the iPad as a tool for displaying an interactive score\ - \ that requires\nperformers to react to news headlines through musical means.\ - \ The paper will\nintroduce the concept for the player, how it was developed,\ - \ and how it was used\nin the premiere performance. The associated demonstration\ - \ shows how the score\nappears on the iPads. },\n address = {Brisbane, Australia},\n\ - \ author = {Cat Hope and Stuart James and Aaron Wyatt},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176032},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ - \ pages = {375--376},\n publisher = {Queensland Conservatorium Griffith University},\n\ - \ title = {Headline grabs for music: The development of the iPad score generator\ - \ for `Loaded (NSFW)'},\n track = {Demonstrations},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0074.pdf},\n\ - \ year = {2016}\n}\n" + ID: Bussigel2019 + abstract: 'There is rich history of using found or “readymade” objects in music + performances and sound installations. John Cage''s Water Walk, Carolee Schneeman''s + Noise Bodies, and David Tudor''s Rainforest all lean on both the sonic and cultural + affordances of found objects. Today, composers and sound artists continue to look + at the everyday, combining readymades with microcontrollers and homemade electronics + and repurposing known interfaces for their latent sonic potential. This paper + gives a historical overview of work at the intersection of music and the readymade + and then describes three recent sound installations/performances by the authors + that further explore this space. The emphasis is on processes involved in working + with found objects--the complex, practical, and playful explorations into sound + and material culture.' + address: 'Porto Alegre, Brazil' + author: Peter Bussigel and Stephan Moore and Scott Smallwood + bibtex: "@inproceedings{Bussigel2019,\n abstract = {There is rich history of using\ + \ found or “readymade” objects in music performances and sound installations.\ + \ John Cage's Water Walk, Carolee Schneeman's Noise Bodies, and David Tudor's\ + \ Rainforest all lean on both the sonic and cultural affordances of found objects.\ + \ Today, composers and sound artists continue to look at the everyday, combining\ + \ readymades with microcontrollers and homemade electronics and repurposing known\ + \ interfaces for their latent sonic potential. This paper gives a historical overview\ + \ of work at the intersection of music and the readymade and then describes three\ + \ recent sound installations/performances by the authors that further explore\ + \ this space. The emphasis is on processes involved in working with found objects--the\ + \ complex, practical, and playful explorations into sound and material culture.},\n\ + \ address = {Porto Alegre, Brazil},\n author = {Peter Bussigel and Stephan Moore\ + \ and Scott Smallwood},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672898},\n\ + \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {134--139},\n publisher = {UFRGS},\n title = {Reanimating\ + \ the Readymade},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper027.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176032 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.3672898 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 - pages: 375--376 - publisher: Queensland Conservatorium Griffith University - title: 'Headline grabs for music: The development of the iPad score generator for - `Loaded (NSFW)''' - track: Demonstrations - url: http://www.nime.org/proceedings/2016/nime2016_paper0074.pdf - year: 2016 + month: June + pages: 134--139 + publisher: UFRGS + title: Reanimating the Readymade + url: http://www.nime.org/proceedings/2019/nime2019_paper027.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: Carey2016 - abstract: |- - This paper discusses practice-based research in the context of - live performance with interactive systems. We focus on two approaches, both of - which are concerned with documenting, examining and reflecting on the real-world - behaviours and experiences of people and artefacts involved in the creation of - new works. The first approach is primarily based on reflections by an individual - performer/developer (auto-ethnography) and the second on interviews and - observations. The rationales for both approaches are presented along with - findings from research which applied them in order to illustrate and explore the - characteristics of both. Challenges, including the difficulty of balancing - rigour and relevance and the risks of negatively impacting on creative practices - are articulated, as are the potential benefits. - address: 'Brisbane, Australia' - author: Benjamin Carey and Andrew Johnston - bibtex: "@inproceedings{Carey2016,\n abstract = {This paper discusses practice-based\ - \ research in the context of\nlive performance with interactive systems. We focus\ - \ on two approaches, both of\nwhich are concerned with documenting, examining\ - \ and reflecting on the real-world\nbehaviours and experiences of people and artefacts\ - \ involved in the creation of\nnew works. The first approach is primarily based\ - \ on reflections by an individual\nperformer/developer (auto-ethnography) and\ - \ the second on interviews and\nobservations. The rationales for both approaches\ - \ are presented along with\nfindings from research which applied them in order\ - \ to illustrate and explore the\ncharacteristics of both. Challenges, including\ - \ the difficulty of balancing\nrigour and relevance and the risks of negatively\ - \ impacting on creative practices\nare articulated, as are the potential benefits.},\n\ - \ address = {Brisbane, Australia},\n author = {Benjamin Carey and Andrew Johnston},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176006},\n isbn = {978-1-925455-13-7},\n\ - \ issn = {2220-4806},\n pages = {377--382},\n publisher = {Queensland Conservatorium\ - \ Griffith University},\n title = {Reflection On Action in NIME Research: Two\ - \ Complementary Perspectives},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0075.pdf},\n\ - \ year = {2016}\n}\n" + ID: Zhang2019 + abstract: 'Haptic interfaces have untapped the sense of touch to assist multimodal + music learning. We have recently seen various improvements of interface design + on tactile feedback and force guidance aiming to make instrument learning more + effective. However, most interfaces are still quite static; they cannot yet sense + the learning progress and adjust the tutoring strategy accordingly. To solve this + problem, we contribute an adaptive haptic interface based on the latest design + of haptic flute. We first adopted a clutch mechanism to enable the interface to + turn on and off the haptic control flexibly in real time. The interactive tutor + is then able to follow human performances and apply the “teacher force” only when + the software instructs so. Finally, we incorporated the adaptive interface with + a step-by-step dynamic learning strategy. Experimental results showed that dynamic + learning dramatically outperforms static learning, which boosts the learning rate + by 45.3% and shrinks the forgetting chance by 86%.' + address: 'Porto Alegre, Brazil' + author: Yian Zhang and Yinmiao Li and Daniel Chin and Gus Xia + bibtex: "@inproceedings{Zhang2019,\n abstract = {Haptic interfaces have untapped\ + \ the sense of touch to assist multimodal music learning. We have recently seen\ + \ various improvements of interface design on tactile feedback and force guidance\ + \ aiming to make instrument learning more effective. However, most interfaces\ + \ are still quite static; they cannot yet sense the learning progress and adjust\ + \ the tutoring strategy accordingly. To solve this problem, we contribute an adaptive\ + \ haptic interface based on the latest design of haptic flute. We first adopted\ + \ a clutch mechanism to enable the interface to turn on and off the haptic control\ + \ flexibly in real time. The interactive tutor is then able to follow human performances\ + \ and apply the “teacher force” only when the software instructs so. Finally,\ + \ we incorporated the adaptive interface with a step-by-step dynamic learning\ + \ strategy. Experimental results showed that dynamic learning dramatically outperforms\ + \ static learning, which boosts the learning rate by 45.3% and shrinks the forgetting\ + \ chance by 86%.},\n address = {Porto Alegre, Brazil},\n author = {Yian Zhang\ + \ and Yinmiao Li and Daniel Chin and Gus Xia},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.3672900},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {140--145},\n publisher = {UFRGS},\n\ + \ title = {Adaptive Multimodal Music Learning via Interactive Haptic Instrument},\n\ + \ url = {http://www.nime.org/proceedings/2019/nime2019_paper028.pdf},\n year =\ + \ {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176006 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.3672900 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 - pages: 377--382 - publisher: Queensland Conservatorium Griffith University - title: 'Reflection On Action in NIME Research: Two Complementary Perspectives' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0075.pdf - year: 2016 + month: June + pages: 140--145 + publisher: UFRGS + title: Adaptive Multimodal Music Learning via Interactive Haptic Instrument + url: http://www.nime.org/proceedings/2019/nime2019_paper028.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: Nuannicode225in2016 - abstract: "In this paper we describe an approach for generating and\nvisualising\ - \ new rhythmic patterns from existing audio in real-time using\nconcatenative\ - \ synthesis. We introduce a graph-based model enabling novel\nvisualisation and\ - \ manipulation of new patterns that mimics the rhythmic and\ntimbral character\ - \ of an existing target seed pattern using a separate database of\npalette sounds.\ - \ Our approach is described, reporting on those features that may\nbe useful in\ - \ describing units of sound related to rhythm and how they might then\nbe projected\ - \ into two-dimensional space for visualisation using reduction\ntechniques and\ - \ clustering. We conclude the paper with our qualitative appraisal\nof using the\ - \ interface and outline scope for future work." - address: 'Brisbane, Australia' - author: Càrthach Ó Nuanàin and Sergi Jordà and Perfecto Herrera - bibtex: "@inproceedings{Nuannicode225in2016,\n abstract = {In this paper we describe\ - \ an approach for generating and\nvisualising new rhythmic patterns from existing\ - \ audio in real-time using\nconcatenative synthesis. We introduce a graph-based\ - \ model enabling novel\nvisualisation and manipulation of new patterns that mimics\ - \ the rhythmic and\ntimbral character of an existing target seed pattern using\ - \ a separate database of\npalette sounds. Our approach is described, reporting\ - \ on those features that may\nbe useful in describing units of sound related to\ - \ rhythm and how they might then\nbe projected into two-dimensional space for\ - \ visualisation using reduction\ntechniques and clustering. We conclude the paper\ - \ with our qualitative appraisal\nof using the interface and outline scope for\ - \ future work.},\n address = {Brisbane, Australia},\n author = {C\\`{a}rthach\ - \ \\'{O} Nuan\\`{a}in and Sergi Jord\\`{a} and Perfecto Herrera},\n booktitle\ + ID: Sguiglia2019 + abstract: 'We present El mapa no es el territorio (MNT), a set of open source tools + that facilitate the design of visual and musical mappings for interactive installations + and performance pieces. MNT is being developed by a multidisciplinary group that + explores gestural control of audio-visual environments and virtual instruments. + Along with these tools, this paper will present two projects in which they were + used -interactive installation Memorias Migrantes and stage performance Recorte + de Jorge Cárdenas Cayendo-, showing how MNT allows us to develop collaborative + artworks that articulate body movement and generative audiovisual systems, and + how its current version was influenced by these successive implementations.' + address: 'Porto Alegre, Brazil' + author: Fabián Sguiglia and Pauli Coton and Fernando Toth + bibtex: "@inproceedings{Sguiglia2019,\n abstract = {We present El mapa no es el\ + \ territorio (MNT), a set of open source tools that facilitate the design of visual\ + \ and musical mappings for interactive installations and performance pieces. MNT\ + \ is being developed by a multidisciplinary group that explores gestural control\ + \ of audio-visual environments and virtual instruments. Along with these tools,\ + \ this paper will present two projects in which they were used -interactive installation\ + \ Memorias Migrantes and stage performance Recorte de Jorge Cárdenas Cayendo-,\ + \ showing how MNT allows us to develop collaborative artworks that articulate\ + \ body movement and generative audiovisual systems, and how its current version\ + \ was influenced by these successive implementations.},\n address = {Porto Alegre,\ + \ Brazil},\n author = {Fabián Sguiglia and Pauli Coton and Fernando Toth},\n booktitle\ \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176094},\n isbn = {978-1-925455-13-7},\n\ - \ issn = {2220-4806},\n pages = {383--387},\n publisher = {Queensland Conservatorium\ - \ Griffith University},\n title = {An Interactive Software Instrument for Real-time\ - \ Rhythmic Concatenative Synthesis},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0076.pdf},\n\ - \ year = {2016}\n}\n" + \ Expression},\n doi = {10.5281/zenodo.3672902},\n editor = {Marcelo Queiroz and\ + \ Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {146--149},\n\ + \ publisher = {UFRGS},\n title = {El mapa no es el territorio: Sensor mapping\ + \ for audiovisual performances},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper029.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176094 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.3672902 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 - pages: 383--387 - publisher: Queensland Conservatorium Griffith University - title: An Interactive Software Instrument for Real-time Rhythmic Concatenative Synthesis - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0076.pdf - year: 2016 + month: June + pages: 146--149 + publisher: UFRGS + title: 'El mapa no es el territorio: Sensor mapping for audiovisual performances' + url: http://www.nime.org/proceedings/2019/nime2019_paper029.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: Milne2016 - abstract: "We present an application XronoMorph for the\nalgorithmic generation\ - \ of rhythms in the context of creative composition and\nperformance, and of musical\ - \ analysis and education. XronoMorph makes use of\nvisual and geometrical conceptualizations\ - \ of rhythms, and allows the user to\nsmoothly morph between rhythms. Sonification\ - \ of the user generated geometrical\nconstructs is possible using a built-in sampler,\ - \ VST and AU plugins, or\nstandalone synthesizers via MIDI. The algorithms are\ - \ based on two underlying\nmathematical principles: perfect balance and well-formedness,\ - \ both of which can\nbe derived from coefficients of the discrete Fourier transform\ - \ of the rhythm. The\nmathematical background, musical implications, and their\ - \ implementation in the\nsoftware are discussed." - address: 'Brisbane, Australia' - author: Andrew J. Milne and Steffen A. Herff and David Bulger and William A. Sethares - and Roger T. Dean - bibtex: "@inproceedings{Milne2016,\n abstract = {We present an application XronoMorph\ - \ for the\nalgorithmic generation of rhythms in the context of creative composition\ - \ and\nperformance, and of musical analysis and education. XronoMorph makes use\ - \ of\nvisual and geometrical conceptualizations of rhythms, and allows the user\ - \ to\nsmoothly morph between rhythms. Sonification of the user generated geometrical\n\ - constructs is possible using a built-in sampler, VST and AU plugins, or\nstandalone\ - \ synthesizers via MIDI. The algorithms are based on two underlying\nmathematical\ - \ principles: perfect balance and well-formedness, both of which can\nbe derived\ - \ from coefficients of the discrete Fourier transform of the rhythm. The\nmathematical\ - \ background, musical implications, and their implementation in the\nsoftware\ - \ are discussed.},\n address = {Brisbane, Australia},\n author = {Andrew J. Milne\ - \ and Steffen A. Herff and David Bulger and William A. Sethares and Roger T. Dean},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176082},\n isbn = {978-1-925455-13-7},\n\ - \ issn = {2220-4806},\n pages = {388--393},\n publisher = {Queensland Conservatorium\ - \ Griffith University},\n title = {XronoMorph: Algorithmic Generation of Perfectly\ - \ Balanced and Well-Formed Rhythms},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0077.pdf},\n\ - \ year = {2016}\n}\n" + ID: Yaremchuk2019 + abstract: 'The Rulers is a Digital Musical Instrument with 7 metal beams, each of + which is fixed at one end. It uses infrared sensors, Hall sensors, and strain + gauges to estimate deflection. These sensors each perform better or worse depending + on the class of gesture the user is making, motivating sensor fusion practices. + Residuals between Kalman filters and sensor output are calculated and used as + input to a recurrent neural network which outputs a classification that determines + which processing parameters and sensor measurements are employed. Multiple instances + (30) of layer recurrent neural networks with a single hidden layer varying in + size from 1 to 10 processing units were trained, and tested on previously unseen + data. The best performing neural network has only 3 hidden units and has a sufficiently + low error rate to be good candidate for gesture classification. This paper demonstrates + that: dynamic networks out-perform feedforward networks for this type of gesture + classification, a small network can handle a problem of this level of complexity, + recurrent networks of this size are fast enough for real-time applications of + this type, and the importance of training multiple instances of each network architecture + and selecting the best performing one from within that set.' + address: 'Porto Alegre, Brazil' + author: Vanessa Yaremchuk and Carolina Brum Medeiros and Marcelo Wanderley + bibtex: "@inproceedings{Yaremchuk2019,\n abstract = {The Rulers is a Digital Musical\ + \ Instrument with 7 metal beams, each of which is fixed at one end. It uses infrared\ + \ sensors, Hall sensors, and strain gauges to estimate deflection. These sensors\ + \ each perform better or worse depending on the class of gesture the user is making,\ + \ motivating sensor fusion practices. Residuals between Kalman filters and sensor\ + \ output are calculated and used as input to a recurrent neural network which\ + \ outputs a classification that determines which processing parameters and sensor\ + \ measurements are employed. Multiple instances (30) of layer recurrent neural\ + \ networks with a single hidden layer varying in size from 1 to 10 processing\ + \ units were trained, and tested on previously unseen data. The best performing\ + \ neural network has only 3 hidden units and has a sufficiently low error rate\ + \ to be good candidate for gesture classification. This paper demonstrates that:\ + \ dynamic networks out-perform feedforward networks for this type of gesture classification,\ + \ a small network can handle a problem of this level of complexity, recurrent\ + \ networks of this size are fast enough for real-time applications of this type,\ + \ and the importance of training multiple instances of each network architecture\ + \ and selecting the best performing one from within that set.},\n address = {Porto\ + \ Alegre, Brazil},\n author = {Vanessa Yaremchuk and Carolina Brum Medeiros and\ + \ Marcelo Wanderley},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672904},\n\ + \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {150--155},\n publisher = {UFRGS},\n title = {Small Dynamic\ + \ Neural Networks for Gesture Classification with The Rulers (a Digital Musical\ + \ Instrument)},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper030.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176082 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.3672904 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 - pages: 388--393 - publisher: Queensland Conservatorium Griffith University - title: 'XronoMorph: Algorithmic Generation of Perfectly Balanced and Well-Formed - Rhythms' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0077.pdf - year: 2016 + month: June + pages: 150--155 + publisher: UFRGS + title: Small Dynamic Neural Networks for Gesture Classification with The Rulers + (a Digital Musical Instrument) + url: http://www.nime.org/proceedings/2019/nime2019_paper030.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: Vickery2016 - abstract: "The rhizome concept explored by Deleuze and Guatarri has had an\nimportant\ - \ influence on formal thinking in music and new media. This paper\nexplores the\ - \ development of rhizomatic musical scores that are arranged\ncartographically\ - \ with nodal points allowing for alternate pathways to be\ntraversed. The challenges\ - \ of pre-digital exemplars of rhizomatic structure are\ndiscussed. It follows\ - \ the development of concepts and technology used in the\ncreation of five works\ - \ by the author Ubahn c. 1985: the Rosenberg Variations\n[2012], The Last Years\ - \ [2012], Sacrificial Zones [2014], detritus [2015] and\ntrash vortex [2015].\ - \ The paper discusses the potential for the evolution of novel\nformal structures\ - \ using rhizomatic structures. " - address: 'Brisbane, Australia' - author: Lindsay Vickery - bibtex: "@inproceedings{Vickery2016,\n abstract = {The rhizome concept explored\ - \ by Deleuze and Guatarri has had an\nimportant influence on formal thinking in\ - \ music and new media. This paper\nexplores the development of rhizomatic musical\ - \ scores that are arranged\ncartographically with nodal points allowing for alternate\ - \ pathways to be\ntraversed. The challenges of pre-digital exemplars of rhizomatic\ - \ structure are\ndiscussed. It follows the development of concepts and technology\ - \ used in the\ncreation of five works by the author Ubahn c. 1985: the Rosenberg\ - \ Variations\n[2012], The Last Years [2012], Sacrificial Zones [2014], detritus\ - \ [2015] and\ntrash vortex [2015]. The paper discusses the potential for the evolution\ - \ of novel\nformal structures using rhizomatic structures. },\n address = {Brisbane,\ - \ Australia},\n author = {Lindsay Vickery},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1176133},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ - \ pages = {394--400},\n publisher = {Queensland Conservatorium Griffith University},\n\ - \ title = {Rhizomatic approaches to screen-based music notation},\n track = {Papers},\n\ - \ url = {http://www.nime.org/proceedings/2016/nime2016_paper0078.pdf},\n year\ - \ = {2016}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1176133 - isbn: 978-1-925455-13-7 - issn: 2220-4806 - pages: 394--400 - publisher: Queensland Conservatorium Griffith University - title: Rhizomatic approaches to screen-based music notation - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0078.pdf - year: 2016 - - -- ENTRYTYPE: inproceedings - ID: James2016 - abstract: |- - This paper documents a method of controlling complex sound - synthesis processes such as granular synthesis, additive synthesis, timbre - morphology, swarm-based spatialisation, spectral spatialisation, and timbre - spatialisation via a multi-parametric 2D interface. This paper evaluates the use - of audio-rate control signals for sound synthesis, and discussing approaches to - de-interleaving, synchronization, and mapping. The paper also outlines a number - of ways of extending the expressivity of such a control interface by coupling - this with another 2D multi-parametric nodes interface and audio-rate 2D table - lookup. The paper proceeds to review methods of navigating multi-parameter sets - via interpolation and transformation. Some case studies are finally discussed in - the paper. The author has used this method to control complex sound synthesis - processes that require control data for more that a thousand parameters. - address: 'Brisbane, Australia' - author: Stuart James - bibtex: "@inproceedings{James2016,\n abstract = {This paper documents a method of\ - \ controlling complex sound\nsynthesis processes such as granular synthesis, additive\ - \ synthesis, timbre\nmorphology, swarm-based spatialisation, spectral spatialisation,\ - \ and timbre\nspatialisation via a multi-parametric 2D interface. This paper evaluates\ - \ the use\nof audio-rate control signals for sound synthesis, and discussing approaches\ - \ to\nde-interleaving, synchronization, and mapping. The paper also outlines a\ - \ number\nof ways of extending the expressivity of such a control interface by\ - \ coupling\nthis with another 2D multi-parametric nodes interface and audio-rate\ - \ 2D table\nlookup. The paper proceeds to review methods of navigating multi-parameter\ - \ sets\nvia interpolation and transformation. Some case studies are finally discussed\ - \ in\nthe paper. The author has used this method to control complex sound synthesis\n\ - processes that require control data for more that a thousand parameters.},\n address\ - \ = {Brisbane, Australia},\n author = {Stuart James},\n booktitle = {Proceedings\ + ID: Dahlstedtb2019 + abstract: 'We present a work where a space of realtime synthesized sounds is explored + through ear (Oto) and movement (Kinesis) by one or two dancers. Movement is tracked + and mapped through extensive pre-processing to a high-dimensional acoustic space, + using a many-to-many mapping, so that every small body movement matters. Designed + for improvised exploration, it works as both performance and installation. Through + this re-translation of bodily action, position, and posture into infinite-dimensional + sound texture and timbre, the performers are invited to re-think and re-learn + position and posture as sound, effort as gesture, and timbre as a bodily construction. + The sound space can be shared by two people, with added modes of presence, proximity + and interaction. The aesthetic background and technical implementation of the + system are described, and the system is evaluated based on a number of performances, + workshops and installation exhibits. Finally, the aesthetic and choreographic + motivations behind the performance narrative are explained, and discussed in the + light of the design of the sonification.' + address: 'Porto Alegre, Brazil' + author: Palle Dahlstedt and Ami Skånberg Dahlstedt + bibtex: "@inproceedings{Dahlstedtb2019,\n abstract = {We present a work where a\ + \ space of realtime synthesized sounds is explored through ear (Oto) and movement\ + \ (Kinesis) by one or two dancers. Movement is tracked and mapped through extensive\ + \ pre-processing to a high-dimensional acoustic space, using a many-to-many mapping,\ + \ so that every small body movement matters. Designed for improvised exploration,\ + \ it works as both performance and installation. Through this re-translation of\ + \ bodily action, position, and posture into infinite-dimensional sound texture\ + \ and timbre, the performers are invited to re-think and re-learn position and\ + \ posture as sound, effort as gesture, and timbre as a bodily construction. The\ + \ sound space can be shared by two people, with added modes of presence, proximity\ + \ and interaction. The aesthetic background and technical implementation of the\ + \ system are described, and the system is evaluated based on a number of performances,\ + \ workshops and installation exhibits. Finally, the aesthetic and choreographic\ + \ motivations behind the performance narrative are explained, and discussed in\ + \ the light of the design of the sonification.},\n address = {Porto Alegre, Brazil},\n\ + \ author = {Palle Dahlstedt and Ami Skånberg Dahlstedt},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176040},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ - \ pages = {401--406},\n publisher = {Queensland Conservatorium Griffith University},\n\ - \ title = {A Multi-Point {2D} Interface: Audio-Rate Signals for Controlling Complex\ - \ Multi-Parametric Sound Synthesis},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0079.pdf},\n\ - \ year = {2016}\n}\n" + \ doi = {10.5281/zenodo.3672906},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {156--161},\n publisher = {UFRGS},\n\ + \ title = {OtoKin: Mapping for Sound Space Exploration through Dance Improvisation},\n\ + \ url = {http://www.nime.org/proceedings/2019/nime2019_paper031.pdf},\n year =\ + \ {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176040 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.3672906 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 - pages: 401--406 - publisher: Queensland Conservatorium Griffith University - title: 'A Multi-Point 2D Interface: Audio-Rate Signals for Controlling Complex Multi-Parametric - Sound Synthesis' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0079.pdf - year: 2016 + month: June + pages: 156--161 + publisher: UFRGS + title: 'OtoKin: Mapping for Sound Space Exploration through Dance Improvisation' + url: http://www.nime.org/proceedings/2019/nime2019_paper031.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: Schlienger2016 - abstract: "Despite the near ubiquitous availability of interfaces for spatial\n\ - interaction, standard audio spatialisation technology makes very little use of\n\ - it. In fact, we find that audio technology often impedes spatial interaction:\ - \ In\nthe workshop on music, space and interaction we thus developed the idea\ - \ of a\nreal-time panning whereby a moving sound source is reproduced as a virtual\ - \ source\non a panning trajectory. We define a series of application scenarios\ - \ where we\ndescribe in detail what functionality is required to inform an implementation.\ - \ In\nour earlier work we showed that Acoustic Localisation (AL) potentially can\n\ - provide a pervasive technique for spatially interactive audio applications.\n\ - Playing through the application scenarios with AL in mind provides interesting\n\ - approaches. For one scenario we show an example implementation as proof of\nconcept." - address: 'Brisbane, Australia' - author: Dominik Schlienger - bibtex: "@inproceedings{Schlienger2016,\n abstract = {Despite the near ubiquitous\ - \ availability of interfaces for spatial\ninteraction, standard audio spatialisation\ - \ technology makes very little use of\nit. In fact, we find that audio technology\ - \ often impedes spatial interaction: In\nthe workshop on music, space and interaction\ - \ we thus developed the idea of a\nreal-time panning whereby a moving sound source\ - \ is reproduced as a virtual source\non a panning trajectory. We define a series\ - \ of application scenarios where we\ndescribe in detail what functionality is\ - \ required to inform an implementation. In\nour earlier work we showed that Acoustic\ - \ Localisation (AL) potentially can\nprovide a pervasive technique for spatially\ - \ interactive audio applications.\nPlaying through the application scenarios with\ - \ AL in mind provides interesting\napproaches. For one scenario we show an example\ - \ implementation as proof of\nconcept.},\n address = {Brisbane, Australia},\n\ - \ author = {Dominik Schlienger},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176116},\n\ - \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {407--412},\n publisher\ - \ = {Queensland Conservatorium Griffith University},\n title = {Acoustic Localisation\ - \ for Spatial Reproduction of Moving Sound Source: Application Scenarios \\& Proof\ - \ of Concept},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0080.pdf},\n\ - \ year = {2016}\n}\n" + ID: Wright2019 + abstract: 'Taking inspiration from research into deliberately constrained musical + technologies and the emergence of neurodiverse, child-led musical groups such + as the Artism Ensemble, the interplay between design-constraints, inclusivity + and appro- priation is explored. A small scale review covers systems from two + prominent UK-based companies, and two itera- tions of a new prototype system that + were developed in collaboration with a small group of young people on the autistic + spectrum. Amongst these technologies, the aspects of musical experience that are + made accessible differ with re- spect to the extent and nature of each system''s + constraints. It is argued that the design-constraints of the new prototype system + facilitated the diverse playing styles and techniques observed during its development. + Based on these obser- vations, we propose that deliberately constrained musical + instruments may be one way of providing more opportuni- ties for the emergence + of personal practices and preferences in neurodiverse groups of children and young + people, and that this is a fitting subject for further research.' + address: 'Porto Alegre, Brazil' + author: Joe Wright and James Dooley + bibtex: "@inproceedings{Wright2019,\n abstract = {Taking inspiration from research\ + \ into deliberately constrained musical technologies and the emergence of neurodiverse,\ + \ child-led musical groups such as the Artism Ensemble, the interplay between\ + \ design-constraints, inclusivity and appro- priation is explored. A small scale\ + \ review covers systems from two prominent UK-based companies, and two itera-\ + \ tions of a new prototype system that were developed in collaboration with a\ + \ small group of young people on the autistic spectrum. Amongst these technologies,\ + \ the aspects of musical experience that are made accessible differ with re- spect\ + \ to the extent and nature of each system's constraints. It is argued that the\ + \ design-constraints of the new prototype system facilitated the diverse playing\ + \ styles and techniques observed during its development. Based on these obser-\ + \ vations, we propose that deliberately constrained musical instruments may be\ + \ one way of providing more opportuni- ties for the emergence of personal practices\ + \ and preferences in neurodiverse groups of children and young people, and that\ + \ this is a fitting subject for further research.},\n address = {Porto Alegre,\ + \ Brazil},\n author = {Joe Wright and James Dooley},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.3672908},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {162--167},\n publisher = {UFRGS},\n\ + \ title = {On the Inclusivity of Constraint: Creative Appropriation in Instruments\ + \ for Neurodiverse Children and Young People},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper032.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176116 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.3672908 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 - pages: 407--412 - publisher: Queensland Conservatorium Griffith University - title: 'Acoustic Localisation for Spatial Reproduction of Moving Sound Source: Application - Scenarios & Proof of Concept' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0080.pdf - year: 2016 + month: June + pages: 162--167 + publisher: UFRGS + title: 'On the Inclusivity of Constraint: Creative Appropriation in Instruments + for Neurodiverse Children and Young People' + url: http://www.nime.org/proceedings/2019/nime2019_paper032.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: Soraghan2016 - abstract: "Many commercial software applications for timbre creation and\nmanipulation\ - \ feature an engineering-focused, parametric layout. This paper argues\nthe case\ - \ for a perceptually motivated approach to interface design in such tools.\n`Perceptually\ - \ motivated' in this context refers to the use of common semantic\ntimbre descriptors\ - \ to influence the digital representation of timbre. A review is\ngiven of existing\ - \ research into semantic descriptors of timbre, as well as\ncorresponding acoustic\ - \ features of timbre. Discussion is also given on existing\ninterface design techniques.\ - \ The perceptually motivated approach to interface\ndesign is demonstrated using\ - \ an example system, which makes use of perceptually\nrelevant mappings from acoustic\ - \ timbre features to semantic timbre descriptors\nand visualises sounds as physical\ - \ objects." - address: 'Brisbane, Australia' - author: Sean Soraghan and Alain Renaud and Ben Supper - bibtex: "@inproceedings{Soraghan2016,\n abstract = {Many commercial software applications\ - \ for timbre creation and\nmanipulation feature an engineering-focused, parametric\ - \ layout. This paper argues\nthe case for a perceptually motivated approach to\ - \ interface design in such tools.\n`Perceptually motivated' in this context refers\ - \ to the use of common semantic\ntimbre descriptors to influence the digital representation\ - \ of timbre. A review is\ngiven of existing research into semantic descriptors\ - \ of timbre, as well as\ncorresponding acoustic features of timbre. Discussion\ - \ is also given on existing\ninterface design techniques. The perceptually motivated\ - \ approach to interface\ndesign is demonstrated using an example system, which\ - \ makes use of perceptually\nrelevant mappings from acoustic timbre features to\ - \ semantic timbre descriptors\nand visualises sounds as physical objects.},\n\ - \ address = {Brisbane, Australia},\n author = {Sean Soraghan and Alain Renaud\ - \ and Ben Supper},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176129},\n\ - \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {413--418},\n publisher\ - \ = {Queensland Conservatorium Griffith University},\n title = {Towards a perceptual\ - \ framework for interface design in digital environments for timbre manipulation},\n\ - \ track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0081.pdf},\n\ - \ year = {2016}\n}\n" + ID: Almeida2019 + abstract: 'We present AMIGO, a real-time computer music system that assists novice + users in the composition process through guided musical improvisation. The system + consists of 1) a computational analysis-generation algorithm, which not only formalizes + musical principles from examples, but also guides the user in selecting note sequences; + 2) a MIDI keyboard controller with an integrated LED stripe, which provides visual + feedback to the user; and 3) a real-time music notation, which displays the generated + output. Ultimately, AMIGO allows the intuitive creation of new musical structures + and the acquisition of Western music formalisms, such as musical notation.' + address: 'Porto Alegre, Brazil' + author: Isabela Corintha Almeida and Giordano Cabral and Professor Gilberto Bernardes + Almeida + bibtex: "@inproceedings{Almeida2019,\n abstract = {We present AMIGO, a real-time\ + \ computer music system that assists novice users in the composition process through\ + \ guided musical improvisation. The system consists of 1) a computational analysis-generation\ + \ algorithm, which not only formalizes musical principles from examples, but also\ + \ guides the user in selecting note sequences; 2) a MIDI keyboard controller with\ + \ an integrated LED stripe, which provides visual feedback to the user; and 3)\ + \ a real-time music notation, which displays the generated output. Ultimately,\ + \ AMIGO allows the intuitive creation of new musical structures and the acquisition\ + \ of Western music formalisms, such as musical notation.},\n address = {Porto\ + \ Alegre, Brazil},\n author = {Isabela Corintha Almeida and Giordano Cabral and\ + \ Professor Gilberto Bernardes Almeida},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672910},\n\ + \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {168--169},\n publisher = {UFRGS},\n title = {{AMIGO}: An\ + \ Assistive Musical Instrument to Engage, Create and Learn Music},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper033.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176129 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.3672910 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 - pages: 413--418 - publisher: Queensland Conservatorium Griffith University - title: Towards a perceptual framework for interface design in digital environments - for timbre manipulation - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0081.pdf - year: 2016 + month: June + pages: 168--169 + publisher: UFRGS + title: 'AMIGO: An Assistive Musical Instrument to Engage, Create and Learn Music' + url: http://www.nime.org/proceedings/2019/nime2019_paper033.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: Reid2016 - abstract: "This paper describes the design of a Minimally Invasive Gesture\nSensing\ - \ Interface (MIGSI) for trumpet. The interface attaches effortlessly to any\n\ - B-flat or C trumpet and requires no permanent modifications to the\nhost-instrument.\ - \ It was designed first and foremost with accessibility in\nmind an approach that\ - \ is uncommon in augmented instrument design and\nseeks to strike a balance between\ - \ minimal design and robust control. MIGSI uses\nsensor technology to capture\ - \ gestural data such as valve displacement, hand\ntension, and instrument position,\ - \ to offer extended control and expressivity to\ntrumpet players. Several streams\ - \ of continuous data are transmitted wirelessly\nfrom MIGSI to the receiving computer,\ - \ where MIGSI Mapping application (a simple\ngraphical user interface) parses\ - \ the incoming data into individually accessible\nvariables. It is our hope that\ - \ MIGSI will be adopted by trumpet players and\ncomposers, and that over time\ - \ a new body of repertoire for the augmented trumpet\nwill emerge." - address: 'Brisbane, Australia' - author: Sarah Reid and Ryan Gaston and Colin Honigman and Ajay Kapur - bibtex: "@inproceedings{Reid2016,\n abstract = {This paper describes the design\ - \ of a Minimally Invasive Gesture\nSensing Interface (MIGSI) for trumpet. The\ - \ interface attaches effortlessly to any\nB-flat or C trumpet and requires no\ - \ permanent modifications to the\nhost-instrument. It was designed first and foremost\ - \ with accessibility in\nmind an approach that is uncommon in augmented instrument\ - \ design and\nseeks to strike a balance between minimal design and robust control.\ - \ MIGSI uses\nsensor technology to capture gestural data such as valve displacement,\ - \ hand\ntension, and instrument position, to offer extended control and expressivity\ - \ to\ntrumpet players. Several streams of continuous data are transmitted wirelessly\n\ - from MIGSI to the receiving computer, where MIGSI Mapping application (a simple\n\ - graphical user interface) parses the incoming data into individually accessible\n\ - variables. It is our hope that MIGSI will be adopted by trumpet players and\n\ - composers, and that over time a new body of repertoire for the augmented trumpet\n\ - will emerge.},\n address = {Brisbane, Australia},\n author = {Sarah Reid and Ryan\ - \ Gaston and Colin Honigman and Ajay Kapur},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1176106},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ - \ pages = {419--424},\n publisher = {Queensland Conservatorium Griffith University},\n\ - \ title = {Minimally Invasive Gesture Sensing Interface (MIGSI) for Trumpet},\n\ - \ track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0082.pdf},\n\ - \ year = {2016}\n}\n" + ID: Figueiró2019 + abstract: 'ESMERIL is an application developed for Android with a toolchain based + on Puredata and OpenFrameworks (with Ofelia library). The application enables + music creation in a specific expanded format: four separate mono tracks, each + one able to manipulate up to eight audio samples per channel. It works also as + a performance instrument that stimulates collaborative remixings from compositions + of scored interaction gestures called “scenes”. The interface also aims to be + a platform to exchange those sample packs as artistic releases, a format similar + to the popular idea of an “album”, but prepared to those four channel packs of + samples and scores of interaction. It uses an adaptive audio slicing mechanism + and it is based on interaction design for multi-touch screen features. A timing + sequencer enhances the interaction between pre-set sequences (the “scenes”) and + screen manipulation scratching, expanding and moving graphic sound waves. This + paper describes the graphical interface features, some development decisions up + to now and perspectives to its continuity.' + address: 'Porto Alegre, Brazil' + author: Cristiano Figueiró and Guilherme Soares and Bruno Rohde + bibtex: "@inproceedings{Figueiró2019,\n abstract = {ESMERIL is an application developed\ + \ for Android with a toolchain based on Puredata and OpenFrameworks (with Ofelia\ + \ library). The application enables music creation in a specific expanded format:\ + \ four separate mono tracks, each one able to manipulate up to eight audio samples\ + \ per channel. It works also as a performance instrument that stimulates collaborative\ + \ remixings from compositions of scored interaction gestures called “scenes”.\ + \ The interface also aims to be a platform to exchange those sample packs as artistic\ + \ releases, a format similar to the popular idea of an “album”, but prepared to\ + \ those four channel packs of samples and scores of interaction. It uses an adaptive\ + \ audio slicing mechanism and it is based on interaction design for multi-touch\ + \ screen features. A timing sequencer enhances the interaction between pre-set\ + \ sequences (the “scenes”) and screen manipulation scratching, expanding and moving\ + \ graphic sound waves. This paper describes the graphical interface features,\ + \ some development decisions up to now and perspectives to its continuity.},\n\ + \ address = {Porto Alegre, Brazil},\n author = {Cristiano Figueiró and Guilherme\ + \ Soares and Bruno Rohde},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672912},\n\ + \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {170--173},\n publisher = {UFRGS},\n title = {{ESMERIL}\ + \ --- An interactive audio player and composition system for collaborative experimental\ + \ music netlabels},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper034.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176106 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.3672912 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 - pages: 419--424 - publisher: Queensland Conservatorium Griffith University - title: Minimally Invasive Gesture Sensing Interface (MIGSI) for Trumpet - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0082.pdf - year: 2016 + month: June + pages: 170--173 + publisher: UFRGS + title: ESMERIL --- An interactive audio player and composition system for collaborative + experimental music netlabels + url: http://www.nime.org/proceedings/2019/nime2019_paper034.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: Paine2016 - abstract: "The question of sound as an experience of now, as a conduit to the\n\ - quality of our belonging to the present, is challenging. Yet it is a crucial\n\ - issue in discussions about ecological listening. I have come to think of sound\ - \ as\na viscous material, a vibrating field of energy that has texture and density\ - \ and\na physicality that is unlike most other media.\nNow suggests a desire of\ - \ becoming present in the resonating sound field of our\nimmediate environment.\ - \ The energy in the field constantly modulates and drifts. I\ndraw on voices and\ - \ forces from the natural environment, humans and machines. The\nwork seeks to\ - \ draw the listeners into an inner space in which they can be both\npresent and\ - \ aware of their sonic environment and become immersed in it. Now is\npartly inspired\ - \ by Samuel Beckett's novel Watt, specifically Watt's\nmysterious journey into\ - \ to the unknown." - address: 'Brisbane, Australia' - author: Garth Paine - bibtex: "@inproceedings{Paine2016,\n abstract = {The question of sound as an experience\ - \ of now, as a conduit to the\nquality of our belonging to the present, is challenging.\ - \ Yet it is a crucial\nissue in discussions about ecological listening. I have\ - \ come to think of sound as\na viscous material, a vibrating field of energy that\ - \ has texture and density and\na physicality that is unlike most other media.\n\ - Now suggests a desire of becoming present in the resonating sound field of our\n\ - immediate environment. The energy in the field constantly modulates and drifts.\ - \ I\ndraw on voices and forces from the natural environment, humans and machines.\ - \ The\nwork seeks to draw the listeners into an inner space in which they can\ - \ be both\npresent and aware of their sonic environment and become immersed in\ - \ it. Now is\npartly inspired by Samuel Beckett's novel Watt, specifically Watt's\n\ - mysterious journey into to the unknown.},\n address = {Brisbane, Australia},\n\ - \ author = {Garth Paine},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176104},\n\ - \ isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {425--426},\n publisher\ - \ = {Queensland Conservatorium Griffith University},\n title = {Now},\n track\ - \ = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0083.pdf},\n\ - \ year = {2016}\n}\n" + ID: Weber2019 + abstract: 'We introduce a machine learning technique to autonomously generate novel + melodies that are variations of an arbitrary base melody. These are produced by + a neural network that ensures that (with high probability) the melodic and rhythmic + structure of the new melody is consistent with a given set of sample songs. We + train a Variational Autoencoder network to identify a low-dimensional set of variables + that allows for the compression and representation of sample songs. By perturbing + these variables with Perlin Noise---a temporally-consistent parameterized noise + function---it is possible to generate smoothly-changing novel melodies. We show + that (1) by regulating the amount of noise, one can specify how much of the base + song will be preserved; and (2) there is a direct correlation between the noise + signal and the differences between the statistical properties of novel melodies + and the original one. Users can interpret the controllable noise as a type of + "creativity knob": the higher it is, the more leeway the network has to generate + significantly different melodies. We present a physical prototype that allows + musicians to use a keyboard to provide base melodies and to adjust the network''s + "creativity knobs" to regulate in real-time the process that proposes new melody + ideas.' + address: 'Porto Alegre, Brazil' + author: Aline Weber and Lucas Nunes Alegre and Jim Torresen and Bruno C. da Silva + bibtex: "@inproceedings{Weber2019,\n abstract = {We introduce a machine learning\ + \ technique to autonomously generate novel melodies that are variations of an\ + \ arbitrary base melody. These are produced by a neural network that ensures that\ + \ (with high probability) the melodic and rhythmic structure of the new melody\ + \ is consistent with a given set of sample songs. We train a Variational Autoencoder\ + \ network to identify a low-dimensional set of variables that allows for the compression\ + \ and representation of sample songs. By perturbing these variables with Perlin\ + \ Noise---a temporally-consistent parameterized noise function---it is possible\ + \ to generate smoothly-changing novel melodies. We show that (1) by regulating\ + \ the amount of noise, one can specify how much of the base song will be preserved;\ + \ and (2) there is a direct correlation between the noise signal and the differences\ + \ between the statistical properties of novel melodies and the original one. Users\ + \ can interpret the controllable noise as a type of \"creativity knob\": the higher\ + \ it is, the more leeway the network has to generate significantly different melodies.\ + \ We present a physical prototype that allows musicians to use a keyboard to provide\ + \ base melodies and to adjust the network's \"creativity knobs\" to regulate in\ + \ real-time the process that proposes new melody ideas.},\n address = {Porto Alegre,\ + \ Brazil},\n author = {Aline Weber and Lucas Nunes Alegre and Jim Torresen and\ + \ Bruno C. da Silva},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672914},\n\ + \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {174--179},\n publisher = {UFRGS},\n title = {Parameterized\ + \ Melody Generation with Autoencoders and Temporally-Consistent Noise},\n url\ + \ = {http://www.nime.org/proceedings/2019/nime2019_paper035.pdf},\n year = {2019}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176104 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.3672914 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 - pages: 425--426 - publisher: Queensland Conservatorium Griffith University - title: Now - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0083.pdf - year: 2016 + month: June + pages: 174--179 + publisher: UFRGS + title: Parameterized Melody Generation with Autoencoders and Temporally-Consistent + Noise + url: http://www.nime.org/proceedings/2019/nime2019_paper035.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: Shapiro2016 - abstract: "NIME research realizes a vision of performance by means of\ncomputational\ - \ expression, linking body and space to sound and imagery through\neclectic forms\ - \ of sensing and interaction. This vision could dramatically impact\ncomputer\ - \ science education, simultaneously modernizing the field and drawing in\ndiverse\ - \ new participants. We describe our work creating a NIME-inspired computer\nmusic\ - \ toolkit for kids called BlockyTalky; the toolkit enables users to create\nnetworks\ - \ of sensing devices and synthesizers. We offer findings from our research\non\ - \ student learning through programming and performance. We conclude by\nsuggesting\ - \ a number of future directions for NIME researchers interested in\neducation." - address: 'Brisbane, Australia' - author: R. Benjamin Shapiro and Rebecca Fiebrink and Matthew Ahrens and Annie Kelly - bibtex: "@inproceedings{Shapiro2016,\n abstract = {NIME research realizes a vision\ - \ of performance by means of\ncomputational expression, linking body and space\ - \ to sound and imagery through\neclectic forms of sensing and interaction. This\ - \ vision could dramatically impact\ncomputer science education, simultaneously\ - \ modernizing the field and drawing in\ndiverse new participants. We describe\ - \ our work creating a NIME-inspired computer\nmusic toolkit for kids called BlockyTalky;\ - \ the toolkit enables users to create\nnetworks of sensing devices and synthesizers.\ - \ We offer findings from our research\non student learning through programming\ - \ and performance. We conclude by\nsuggesting a number of future directions for\ - \ NIME researchers interested in\neducation.},\n address = {Brisbane, Australia},\n\ - \ author = {R. Benjamin Shapiro and Rebecca Fiebrink and Matthew Ahrens and Annie\ - \ Kelly},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1176120},\n isbn = {978-1-925455-13-7},\n\ - \ issn = {2220-4806},\n pages = {427--432},\n publisher = {Queensland Conservatorium\ - \ Griffith University},\n title = {BlockyTalky: A Physical and Distributed Computer\ - \ Music Toolkit for Kids},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0084.pdf},\n\ - \ year = {2016}\n}\n" + ID: Tanaka2019 + abstract: 'This paper presents a system that allows users to quickly try different + ways to train neural networks and temporal modeling techniques to associate arm + gestures with time varying sound. We created a software framework for this, and + designed three interactive sounds and presented them to participants in a workshop + based study. We build upon previous work in sound-tracing and mapping-by-demonstration + to ask the participants to design gestures with which to perform the given sounds + using a multimodal, inertial measurement (IMU) and muscle sensing (EMG) device. + We presented the user with four techniques for associating sensor input to synthesizer + parameter output. Two were classical techniques from the literature, and two proposed + different ways to capture dynamic gesture in a neural network. These four techniques + were: 1.) A Static Position regression training procedure, 2.) A Hidden Markov + based temporal modeler, 3.) Whole Gesture capture to a neural network, and 4.) + a Windowed method using the position-based procedure on the fly during the performance + of a dynamic gesture. Our results show trade-offs between accurate, predictable + reproduction of the source sounds and exploration of the gesture-sound space. + Several of the users were attracted to our new windowed method for capturing gesture + anchor points on the fly as training data for neural network based regression. + This paper will be of interest to musicians interested in going from sound design + to gesture design and offers a workflow for quickly trying different mapping-by-demonstration + techniques.' + address: 'Porto Alegre, Brazil' + author: 'Atau Tanaka and Di Donato, Balandino and Michael Zbyszynski and Geert Roks' + bibtex: "@inproceedings{Tanaka2019,\n abstract = {This paper presents a system that\ + \ allows users to quickly try different ways to train neural networks and temporal\ + \ modeling techniques to associate arm gestures with time varying sound. We created\ + \ a software framework for this, and designed three interactive sounds and presented\ + \ them to participants in a workshop based study. We build upon previous work\ + \ in sound-tracing and mapping-by-demonstration to ask the participants to design\ + \ gestures with which to perform the given sounds using a multimodal, inertial\ + \ measurement (IMU) and muscle sensing (EMG) device. We presented the user with\ + \ four techniques for associating sensor input to synthesizer parameter output.\ + \ Two were classical techniques from the literature, and two proposed different\ + \ ways to capture dynamic gesture in a neural network. These four techniques were:\ + \ 1.) A Static Position regression training procedure, 2.) A Hidden Markov based\ + \ temporal modeler, 3.) Whole Gesture capture to a neural network, and 4.) a Windowed\ + \ method using the position-based procedure on the fly during the performance\ + \ of a dynamic gesture. Our results show trade-offs between accurate, predictable\ + \ reproduction of the source sounds and exploration of the gesture-sound space.\ + \ Several of the users were attracted to our new windowed method for capturing\ + \ gesture anchor points on the fly as training data for neural network based regression.\ + \ This paper will be of interest to musicians interested in going from sound design\ + \ to gesture design and offers a workflow for quickly trying different mapping-by-demonstration\ + \ techniques.},\n address = {Porto Alegre, Brazil},\n author = {Atau Tanaka and\ + \ Di Donato, Balandino and Michael Zbyszynski and Geert Roks},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.3672916},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {180--185},\n publisher = {UFRGS},\n\ + \ title = {Designing Gestures for Continuous Sonic Interaction},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper036.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176120 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.3672916 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 - pages: 427--432 - publisher: Queensland Conservatorium Griffith University - title: 'BlockyTalky: A Physical and Distributed Computer Music Toolkit for Kids' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0084.pdf - year: 2016 + month: June + pages: 180--185 + publisher: UFRGS + title: Designing Gestures for Continuous Sonic Interaction + url: http://www.nime.org/proceedings/2019/nime2019_paper036.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: Bowers2016 - abstract: 'This paper describes an instance of what we call `curated research'', - a concerted thinking, making and performance activity between two research teams - with a dedicated interest in the creation of experimental musical instruments - and the development of new performance practices. Our work builds theoretically - upon critical work in philosophy, anthropology and aesthetics, and practically - upon previous explorations of strategies for facilitating rapid, collaborative, - publicly-oriented making in artistic settings. We explored an orientation to making - which promoted the creation of a family of instruments and performance environments - that were responses to the self-consciously provocative theme of `One Knob To - Rule Them All''. A variety of design issues were explored including: mapping, - physicality, the question of control in interface design, reductionist aesthetics - and design strategies, and questions of gender and power in musical culture. We - discuss not only the technologies which were made but also reflect on the value - of such concerted, provocatively thematised, collective making activities for - addressing foundational design issues. As such, our work is intended not just - as a technical and practical contribution to NIME but also a reflective provocation - into how we conduct research itself in a curated critical manner.' - address: 'Brisbane, Australia' - author: John Bowers and John Richards and Tim Shaw and Jim Frieze and Ben Freeth - and Sam Topley and Neal Spowage and Steve Jones and Amit Patel and Li Rui - bibtex: "@inproceedings{Bowers2016,\n abstract = {This paper describes an instance\ - \ of what we call `curated research', a concerted thinking, making and performance\ - \ activity between two research teams with a dedicated interest in the creation\ - \ of experimental musical instruments and the development of new performance practices.\ - \ Our work builds theoretically upon critical work in philosophy, anthropology\ - \ and aesthetics, and practically upon previous explorations of strategies for\ - \ facilitating rapid, collaborative, publicly-oriented making in artistic settings.\ - \ We explored an orientation to making which promoted the creation of a family\ - \ of instruments and performance environments that were responses to the self-consciously\ - \ provocative theme of `One Knob To Rule Them All'. A variety of design issues\ - \ were explored including: mapping, physicality, the question of control in interface\ - \ design, reductionist aesthetics and design strategies, and questions of gender\ - \ and power in musical culture. We discuss not only the technologies which were\ - \ made but also reflect on the value of such concerted, provocatively thematised,\ - \ collective making activities for addressing foundational design issues. As such,\ - \ our work is intended not just as a technical and practical contribution to NIME\ - \ but also a reflective provocation into how we conduct research itself in a curated\ - \ critical manner.},\n address = {Brisbane, Australia},\n author = {John Bowers\ - \ and John Richards and Tim Shaw and Jim Frieze and Ben Freeth and Sam Topley\ - \ and Neal Spowage and Steve Jones and Amit Patel and Li Rui},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1175996},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ - \ pages = {433--438},\n publisher = {Queensland Conservatorium Griffith University},\n\ - \ title = {One Knob To Rule Them All: Reductionist Interfaces for Expansionist\ - \ Research},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0085.pdf},\n\ - \ year = {2016}\n}\n" + ID: Erdem2019 + abstract: 'This paper describes the process of developing a shared instrument for + music--dance performance, with a particular focus on exploring the boundaries + between standstill vs motion, and silence vs sound. The piece Vrengt grew from + the idea of enabling a true partnership between a musician and a dancer, developing + an instrument that would allow for active co-performance. Using a participatory + design approach, we worked with sonification as a tool for systematically exploring + the dancer''s bodily expressions. The exploration used a "spatiotemporal matrix", + with a particular focus on sonic microinteraction. In the final performance, two + Myo armbands were used for capturing muscle activity of the arm and leg of the + dancer, together with a wireless headset microphone capturing the sound of breathing. + In the paper we reflect on multi-user instrument paradigms, discuss our approach + to creating a shared instrument using sonification as a tool for the sound design, + and reflect on the performers'' subjective evaluation of the instrument. ' + address: 'Porto Alegre, Brazil' + author: 'Cagri Erdem and Katja Henriksen Schia and Jensenius, Alexander Refsum' + bibtex: "@inproceedings{Erdem2019,\n abstract = {This paper describes the process\ + \ of developing a shared instrument for music--dance performance, with a particular\ + \ focus on exploring the boundaries between standstill vs motion, and silence\ + \ vs sound. The piece Vrengt grew from the idea of enabling a true partnership\ + \ between a musician and a dancer, developing an instrument that would allow for\ + \ active co-performance. Using a participatory design approach, we worked with\ + \ sonification as a tool for systematically exploring the dancer's bodily expressions.\ + \ The exploration used a \"spatiotemporal matrix\", with a particular focus on\ + \ sonic microinteraction. In the final performance, two Myo armbands were used\ + \ for capturing muscle activity of the arm and leg of the dancer, together with\ + \ a wireless headset microphone capturing the sound of breathing. In the paper\ + \ we reflect on multi-user instrument paradigms, discuss our approach to creating\ + \ a shared instrument using sonification as a tool for the sound design, and reflect\ + \ on the performers' subjective evaluation of the instrument. },\n address =\ + \ {Porto Alegre, Brazil},\n author = {Cagri Erdem and Katja Henriksen Schia and\ + \ Jensenius, Alexander Refsum},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672918},\n\ + \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {186--191},\n publisher = {UFRGS},\n title = {Vrengt: A\ + \ Shared Body-Machine Instrument for Music-Dance Performance},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper037.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1175996 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.3672918 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 - pages: 433--438 - publisher: Queensland Conservatorium Griffith University - title: 'One Knob To Rule Them All: Reductionist Interfaces for Expansionist Research' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0085.pdf - year: 2016 + month: June + pages: 186--191 + publisher: UFRGS + title: 'Vrengt: A Shared Body-Machine Instrument for Music-Dance Performance' + url: http://www.nime.org/proceedings/2019/nime2019_paper037.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: Jensenius2016 - abstract: "This paper provides an overview of the process of editing the\nforthcoming\ - \ anthology A NIME Reader---Fifteen years of New Interfaces for\nMusical Expression.\ - \ The selection process is presented, and we reflect on some\nof the trends we\ - \ have observed in re-discovering the collection of more than 1200\nNIME papers\ - \ published throughout the 15 yearlong history of the conference. An\nanthology\ - \ is necessarily selective, and ours is no exception. As we present in\nthis paper,\ - \ the aim has been to represent the wide range of artistic,\nscientific, and technological\ - \ approaches that characterize the NIME conference.\nThe anthology also includes\ - \ critical discourse, and through acknowledgment of the\nstrengths and weaknesses\ - \ of the NIME community, we propose activities which could\nfurther diversify\ - \ and strengthen the field." - address: 'Brisbane, Australia' - author: Alexander Refsum Jensenius and Michael J. Lyons - bibtex: "@inproceedings{Jensenius2016,\n abstract = {This paper provides an overview\ - \ of the process of editing the\nforthcoming anthology A NIME Reader---Fifteen\ - \ years of New Interfaces for\nMusical Expression. The selection process is presented,\ - \ and we reflect on some\nof the trends we have observed in re-discovering the\ - \ collection of more than 1200\nNIME papers published throughout the 15 yearlong\ - \ history of the conference. An\nanthology is necessarily selective, and ours\ - \ is no exception. As we present in\nthis paper, the aim has been to represent\ - \ the wide range of artistic,\nscientific, and technological approaches that characterize\ - \ the NIME conference.\nThe anthology also includes critical discourse, and through\ - \ acknowledgment of the\nstrengths and weaknesses of the NIME community, we propose\ - \ activities which could\nfurther diversify and strengthen the field.},\n address\ - \ = {Brisbane, Australia},\n author = {Alexander Refsum Jensenius and Michael\ - \ J. Lyons},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176044},\n isbn\ - \ = {978-1-925455-13-7},\n issn = {2220-4806},\n pages = {439--443},\n publisher\ - \ = {Queensland Conservatorium Griffith University},\n title = {Trends at NIME---Reflections\ - \ on Editing A NIME Reader},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0086.pdf},\n\ - \ year = {2016}\n}\n" + ID: ParkeWolfe2019 + abstract: 'We have built a new software toolkit that enables music therapists and + teachers to create custom digital musical interfaces for children with diverse + disabilities. It was designed in collaboration with music therapists, teachers, + and children. It uses interactive machine learning to create new sensor- and vision-based + musical interfaces using demonstrations of actions and sound, making interface + building fast and accessible to people without programming or engineering expertise. + Interviews with two music therapy and education professionals who have used the + software extensively illustrate how richly customised, sensor-based interfaces + can be used in music therapy contexts; they also reveal how properties of input + devices, music-making approaches, and mapping techniques can support a variety + of interaction styles and therapy goals.' + address: 'Porto Alegre, Brazil' + author: Samuel Thompson Parke-Wolfe and Hugo Scurto and Rebecca Fiebrink + bibtex: "@inproceedings{ParkeWolfe2019,\n abstract = {We have built a new software\ + \ toolkit that enables music therapists and teachers to create custom digital\ + \ musical interfaces for children with diverse disabilities. It was designed in\ + \ collaboration with music therapists, teachers, and children. It uses interactive\ + \ machine learning to create new sensor- and vision-based musical interfaces using\ + \ demonstrations of actions and sound, making interface building fast and accessible\ + \ to people without programming or engineering expertise. Interviews with two\ + \ music therapy and education professionals who have used the software extensively\ + \ illustrate how richly customised, sensor-based interfaces can be used in music\ + \ therapy contexts; they also reveal how properties of input devices, music-making\ + \ approaches, and mapping techniques can support a variety of interaction styles\ + \ and therapy goals.},\n address = {Porto Alegre, Brazil},\n author = {Samuel\ + \ Thompson Parke-Wolfe and Hugo Scurto and Rebecca Fiebrink},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.3672920},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {192--197},\n publisher = {UFRGS},\n\ + \ title = {Sound Control: Supporting Custom Musical Interface Design for Children\ + \ with Disabilities},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper038.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176044 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.3672920 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 - pages: 439--443 - publisher: Queensland Conservatorium Griffith University - title: Trends at NIME---Reflections on Editing A NIME Reader - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0086.pdf - year: 2016 + month: June + pages: 192--197 + publisher: UFRGS + title: 'Sound Control: Supporting Custom Musical Interface Design for Children with + Disabilities' + url: http://www.nime.org/proceedings/2019/nime2019_paper038.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: Tahironicode287lu2016 - abstract: |- - In this paper we present the new development of a semi-autonomous - response module for the NOISA system. NOISA is an interactive music system that - predicts performer's engagement levels, learns from the performer, decides what - to do and does it at the right moment. As an improvement for the above, we - implemented real-time adaptive features that respond to a detailed monitoring of - the performer's engagement and to overall sonic space, while evaluating the - impact of its actions. Through these new features, the response module produces - meaningful and non-intrusive counter actions, attempting to deepen and maintain - the performer's engagement in musical interaction. In a formative study we - compared our designed response module against a random control system of events, - in which the former performed consistently better than the latter. - address: 'Brisbane, Australia' - author: Koray Tahiroglu and Juan Carlos Vasquez and Johan Kildal - bibtex: "@inproceedings{Tahironicode287lu2016,\n abstract = {In this paper we present\ - \ the new development of a semi-autonomous\nresponse module for the NOISA system.\ - \ NOISA is an interactive music system that\npredicts performer's engagement levels,\ - \ learns from the performer, decides what\nto do and does it at the right moment.\ - \ As an improvement for the above, we\nimplemented real-time adaptive features\ - \ that respond to a detailed monitoring of\nthe performer's engagement and to\ - \ overall sonic space, while evaluating the\nimpact of its actions. Through these\ - \ new features, the response module produces\nmeaningful and non-intrusive counter\ - \ actions, attempting to deepen and maintain\nthe performer's engagement in musical\ - \ interaction. In a formative study we\ncompared our designed response module\ - \ against a random control system of events,\nin which the former performed consistently\ - \ better than the latter.},\n address = {Brisbane, Australia},\n author = {Koray\ - \ Tahiroglu and Juan Carlos Vasquez and Johan Kildal},\n booktitle = {Proceedings\ + ID: Hödl2019 + abstract: 'With a new digital music instrument (DMI), the interface itself, the + sound generation, the composition, and the performance are often closely related + and even intrinsically linked with each other. Similarly, the instrument designer, + composer, and performer are often the same person. The Academic Festival Overture + is a new piece of music for the DMI Trombosonic and symphonic orchestra written + by a composer who had no prior experience with the instrument. The piece underwent + the phases of a composition competition, rehearsals, a music video production, + and a public live performance. This whole process was evaluated reflecting on + the experience of three involved key stakeholder: the composer, the conductor, + and the instrument designer as performer. `Blending dimensions'' of these stakeholder + and decoupling the composition from the instrument designer inspired the newly + involved composer to completely rethink the DMI''s interaction and sound concept. + Thus, to deliberately avoid an early collaboration between a DMI designer and + a composer bears the potential for new inspiration and at the same time the challenge + to seek such a collaboration in the need of clarifying possible misunderstandings + and improvement.' + address: 'Porto Alegre, Brazil' + author: Oliver Hödl + bibtex: "@inproceedings{Hödl2019,\n abstract = {With a new digital music instrument\ + \ (DMI), the interface itself, the sound generation, the composition, and the\ + \ performance are often closely related and even intrinsically linked with each\ + \ other. Similarly, the instrument designer, composer, and performer are often\ + \ the same person. The Academic Festival Overture is a new piece of music for\ + \ the DMI Trombosonic and symphonic orchestra written by a composer who had no\ + \ prior experience with the instrument. The piece underwent the phases of a composition\ + \ competition, rehearsals, a music video production, and a public live performance.\ + \ This whole process was evaluated reflecting on the experience of three involved\ + \ key stakeholder: the composer, the conductor, and the instrument designer as\ + \ performer. `Blending dimensions' of these stakeholder and decoupling the composition\ + \ from the instrument designer inspired the newly involved composer to completely\ + \ rethink the DMI's interaction and sound concept. Thus, to deliberately avoid\ + \ an early collaboration between a DMI designer and a composer bears the potential\ + \ for new inspiration and at the same time the challenge to seek such a collaboration\ + \ in the need of clarifying possible misunderstandings and improvement.},\n address\ + \ = {Porto Alegre, Brazil},\n author = {Oliver Hödl},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176131},\n isbn = {978-1-925455-13-7},\n issn = {2220-4806},\n\ - \ pages = {444--449},\n publisher = {Queensland Conservatorium Griffith University},\n\ - \ title = {Non-intrusive Counter-actions: Maintaining Progressively Engaging Interactions\ - \ for Music Performance},\n track = {Papers},\n url = {http://www.nime.org/proceedings/2016/nime2016_paper0087.pdf},\n\ - \ year = {2016}\n}\n" + \ doi = {10.5281/zenodo.3672922},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {198--203},\n publisher = {UFRGS},\n\ + \ title = {'Blending Dimensions' when Composing for {DMI} and Symphonic Orchestra},\n\ + \ url = {http://www.nime.org/proceedings/2019/nime2019_paper039.pdf},\n year =\ + \ {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176131 - isbn: 978-1-925455-13-7 + doi: 10.5281/zenodo.3672922 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 - pages: 444--449 - publisher: Queensland Conservatorium Griffith University - title: 'Non-intrusive Counter-actions: Maintaining Progressively Engaging Interactions - for Music Performance' - track: Papers - url: http://www.nime.org/proceedings/2016/nime2016_paper0087.pdf - year: 2016 + month: June + pages: 198--203 + publisher: UFRGS + title: '''Blending Dimensions'' when Composing for DMI and Symphonic Orchestra' + url: http://www.nime.org/proceedings/2019/nime2019_paper039.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_1 - abstract: 'This paper provides figures and metrics over twenty years of New Interfaces - for Musical Expression conferences, which are derived by analyzing the publicly - available paper proceedings. Besides presenting statistical information and a - bibliometric study, we aim at identifying trends and patterns. The analysis shows - the growth and heterogeneity of the NIME demographic, as well the increase in - research output. The data presented in this paper allows the community to reflect - on several issues such as diversity and sustainability, and it provides insights - to address challenges and set future directions.' - address: 'Shanghai, China' - articleno: 1 - author: 'Fasciani, Stefano and Goode, Jackson' - bibtex: "@inproceedings{NIME21_1,\n abstract = {This paper provides figures and\ - \ metrics over twenty years of New Interfaces for Musical Expression conferences,\ - \ which are derived by analyzing the publicly available paper proceedings. Besides\ - \ presenting statistical information and a bibliometric study, we aim at identifying\ - \ trends and patterns. The analysis shows the growth and heterogeneity of the\ - \ NIME demographic, as well the increase in research output. The data presented\ - \ in this paper allows the community to reflect on several issues such as diversity\ - \ and sustainability, and it provides insights to address challenges and set future\ - \ directions.},\n address = {Shanghai, China},\n articleno = {1},\n author = {Fasciani,\ - \ Stefano and Goode, Jackson},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.b368bcd5},\n\ - \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/44W7dB7lzQg},\n\ - \ title = {20 NIMEs: Twenty Years of New Interfaces for Musical Expression},\n\ - \ url = {https://nime.pubpub.org/pub/20nimes},\n year = {2021}\n}\n" + ID: haki2019 + abstract: 'This paper presents a detailed explanation of a system generating basslines + that are stylistically and rhythmically interlocked with a provided audio drum + loop. The proposed system is based on a natural language processing technique: + word-based sequence-to-sequence learning using LSTM units. The novelty of the + proposed method lies in the fact that the system is not reliant on a voice-by-voice + transcription of drums; instead, in this method, a drum representation is used + as an input sequence from which a translated bassline is obtained at the output. + The drum representation consists of fixed size sequences of onsets detected from + a 2-bar audio drum loop in eight different frequency bands. The basslines generated + by this method consist of pitched notes with different duration. The proposed + system was trained on two distinct datasets compiled for this project by the authors. + Each dataset contains a variety of 2-bar drum loops with annotated basslines from + two different styles of dance music: House and Soca. A listening experiment designed + based on the system revealed that the proposed system is capable of generating + basslines that are interesting and are well rhythmically interlocked with the + drum loops from which they were generated.' + address: 'Porto Alegre, Brazil' + author: behzad haki and Sergi Jorda + bibtex: "@inproceedings{haki2019,\n abstract = {This paper presents a detailed explanation\ + \ of a system generating basslines that are stylistically and rhythmically interlocked\ + \ with a provided audio drum loop. The proposed system is based on a natural language\ + \ processing technique: word-based sequence-to-sequence learning using LSTM units.\ + \ The novelty of the proposed method lies in the fact that the system is not reliant\ + \ on a voice-by-voice transcription of drums; instead, in this method, a drum\ + \ representation is used as an input sequence from which a translated bassline\ + \ is obtained at the output. The drum representation consists of fixed size sequences\ + \ of onsets detected from a 2-bar audio drum loop in eight different frequency\ + \ bands. The basslines generated by this method consist of pitched notes with\ + \ different duration. The proposed system was trained on two distinct datasets\ + \ compiled for this project by the authors. Each dataset contains a variety of\ + \ 2-bar drum loops with annotated basslines from two different styles of dance\ + \ music: House and Soca. A listening experiment designed based on the system revealed\ + \ that the proposed system is capable of generating basslines that are interesting\ + \ and are well rhythmically interlocked with the drum loops from which they were\ + \ generated.},\n address = {Porto Alegre, Brazil},\n author = {behzad haki and\ + \ Sergi Jorda},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672928},\n\ + \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {204--209},\n publisher = {UFRGS},\n title = {A Bassline\ + \ Generation System Based on Sequence-to-Sequence Learning},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper040.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.b368bcd5 + doi: 10.5281/zenodo.3672928 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/44W7dB7lzQg - title: '20 NIMEs: Twenty Years of New Interfaces for Musical Expression' - url: https://nime.pubpub.org/pub/20nimes - year: 2021 + pages: 204--209 + publisher: UFRGS + title: A Bassline Generation System Based on Sequence-to-Sequence Learning + url: http://www.nime.org/proceedings/2019/nime2019_paper040.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_10 - abstract: 'This paper investigates how the concept of score has been used in the - NIME community. To this end, we performed a systematic literature review of the - NIME proceedings, analyzing papers in which scores play a central role. We analyzed - the score not as an object per se but in relation to the users and the interactive - system(s). In other words, we primarily looked at the role that scores play in - the performance ecology. For this reason, to analyze the papers, we relied on - ARCAA, a recent framework created to investigate artifact ecologies in computer - music performances. Using the framework, we created a scheme for each paper and - clustered the papers according to similarities. Our analysis produced five main - categories that we present and discuss in relation to literature about musical - scores.' - address: 'Shanghai, China' - articleno: 10 - author: 'Masu, Raul and Correia, Nuno N. and Romao, Teresa' - bibtex: "@inproceedings{NIME21_10,\n abstract = {This paper investigates how the\ - \ concept of score has been used in the NIME community. To this end, we performed\ - \ a systematic literature review of the NIME proceedings, analyzing papers in\ - \ which scores play a central role. We analyzed the score not as an object per\ - \ se but in relation to the users and the interactive system(s). In other words,\ - \ we primarily looked at the role that scores play in the performance ecology.\ - \ For this reason, to analyze the papers, we relied on ARCAA, a recent framework\ - \ created to investigate artifact ecologies in computer music performances. Using\ - \ the framework, we created a scheme for each paper and clustered the papers according\ - \ to similarities. Our analysis produced five main categories that we present\ - \ and discuss in relation to literature about musical scores.},\n address = {Shanghai,\ - \ China},\n articleno = {10},\n author = {Masu, Raul and Correia, Nuno N. and\ - \ Romao, Teresa},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.3ffad95a},\n\ - \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/j7XmQvDdUPk},\n\ - \ title = {NIME Scores: a Systematic Review of How Scores Have Shaped Performance\ - \ Ecologies in NIME},\n url = {https://nime.pubpub.org/pub/41cj1pyt},\n year =\ - \ {2021}\n}\n" + ID: May2019 + abstract: 'This paper presents a novel physical fuzz pedal effect system named BLIKSEM. + Our approach applies previous work in nonlinear acoustic synthesis via a driven + cantilever soundboard configuration for the purpose of generating fuzz pedal-like + effects as well as a variety of novel audio effects. Following a presentation + of our pedal design, we compare the performance of our system with various various + classic and contemporary fuzz pedals using an electric guitar. Our results show + that BLIKSEM is capable of generating signals that approximate the timbre and + dynamic behaviors of conventional fuzz pedals, as well as offer new mechanisms + for expressive interactions and a range of new effects in different configurations.' + address: 'Porto Alegre, Brazil' + author: Lloyd May and spencer topel + bibtex: "@inproceedings{May2019,\n abstract = {This paper presents a novel physical\ + \ fuzz pedal effect system named BLIKSEM. Our approach applies previous work in\ + \ nonlinear acoustic synthesis via a driven cantilever soundboard configuration\ + \ for the purpose of generating fuzz pedal-like effects as well as a variety of\ + \ novel audio effects. Following a presentation of our pedal design, we compare\ + \ the performance of our system with various various classic and contemporary\ + \ fuzz pedals using an electric guitar. Our results show that BLIKSEM is capable\ + \ of generating signals that approximate the timbre and dynamic behaviors of conventional\ + \ fuzz pedals, as well as offer new mechanisms for expressive interactions and\ + \ a range of new effects in different configurations.},\n address = {Porto Alegre,\ + \ Brazil},\n author = {Lloyd May and spencer topel},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.3672930},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {210--215},\n publisher = {UFRGS},\n\ + \ title = {{BLIKSEM}: An Acoustic Synthesis Fuzz Pedal},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper041.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.3ffad95a + doi: 10.5281/zenodo.3672930 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/j7XmQvDdUPk - title: 'NIME Scores: a Systematic Review of How Scores Have Shaped Performance Ecologies - in NIME' - url: https://nime.pubpub.org/pub/41cj1pyt - year: 2021 + pages: 210--215 + publisher: UFRGS + title: 'BLIKSEM: An Acoustic Synthesis Fuzz Pedal' + url: http://www.nime.org/proceedings/2019/nime2019_paper041.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_11 - abstract: 'This paper presents the development of MapLooper: a live-looping system - for gesture-to-sound mappings. We first reviewed loop-based Digital Musical Instruments - (DMIs). We then developed a connectivity infrastructure for wireless embedded - musical instruments with distributed mapping and synchronization. We evaluated - our infrastructure in the context of the real-time constraints of music performance. - We measured a round-trip latency of 4.81 ms when mapping signals at 100 Hz with - embedded libmapper and an average inter-onset delay of 3.03 ms for synchronizing - with Ableton Link. On top of this infrastructure, we developed MapLooper: a live-looping - tool with 2 example musical applications: a harp synthesizer with SuperCollider - and embedded source-filter synthesis with FAUST on ESP32. Our system is based - on a novel approach to mapping, extrapolating from using FIR and IIR filters on - gestural data to using delay-lines as part of the mapping of DMIs. Our system - features rhythmic time quantization and a flexible loop manipulation system for - creative musical exploration. We open-source all of our components.' - address: 'Shanghai, China' - articleno: 11 - author: 'Frisson, Christian and Bredholt, Mathias and Malloch, Joseph and Wanderley, - Marcelo M.' - bibtex: "@inproceedings{NIME21_11,\n abstract = {This paper presents the development\ - \ of MapLooper: a live-looping system for gesture-to-sound mappings. We first\ - \ reviewed loop-based Digital Musical Instruments (DMIs). We then developed a\ - \ connectivity infrastructure for wireless embedded musical instruments with distributed\ - \ mapping and synchronization. We evaluated our infrastructure in the context\ - \ of the real-time constraints of music performance. We measured a round-trip\ - \ latency of 4.81 ms when mapping signals at 100 Hz with embedded libmapper and\ - \ an average inter-onset delay of 3.03 ms for synchronizing with Ableton Link.\ - \ On top of this infrastructure, we developed MapLooper: a live-looping tool with\ - \ 2 example musical applications: a harp synthesizer with SuperCollider and embedded\ - \ source-filter synthesis with FAUST on ESP32. Our system is based on a novel\ - \ approach to mapping, extrapolating from using FIR and IIR filters on gestural\ - \ data to using delay-lines as part of the mapping of DMIs. Our system features\ - \ rhythmic time quantization and a flexible loop manipulation system for creative\ - \ musical exploration. We open-source all of our components.},\n address = {Shanghai,\ - \ China},\n articleno = {11},\n author = {Frisson, Christian and Bredholt, Mathias\ - \ and Malloch, Joseph and Wanderley, Marcelo M.},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.21428/92fbeb44.47175201},\n issn = {2220-4806},\n month = {June},\n presentation-video\ - \ = {https://youtu.be/9r0zDJA8qbs},\n title = {MapLooper: Live-looping of distributed\ - \ gesture-to-sound mappings},\n url = {https://nime.pubpub.org/pub/2pqbusk7},\n\ - \ year = {2021}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.21428/92fbeb44.47175201 - issn: 2220-4806 - month: June - presentation-video: https://youtu.be/9r0zDJA8qbs - title: 'MapLooper: Live-looping of distributed gesture-to-sound mappings' - url: https://nime.pubpub.org/pub/2pqbusk7 - year: 2021 - - -- ENTRYTYPE: inproceedings - ID: NIME21_12 - abstract: 'Assessment of user experience (UX) is increasingly important in music - interaction evaluation, as witnessed in previous NIME reviews describing varied - and idiosyncratic evaluation strategies. This paper focuses on evaluations conducted - in the last four years of NIME (2017 to 2020), compares results to previous research, - and classifies evaluation types to describe how researchers approach and study - UX in NIME. While results of this review confirm patterns such as the prominence - of short-term, performer perspective evaluations, and the variety of evaluation - strategies used, they also show that UX-focused evaluations are typically exploratory - and limited to novice performers. Overall, these patterns indicate that current - UX evaluation strategies do not address dynamic factors such as skill development, - the evolution of the performer-instrument relationship, and hedonic and cognitive - aspects of UX. To address such limitations, we discuss a number of less common - tools developed within and outside of NIME that focus on dynamic aspects of UX, - potentially leading to more informative and meaningful evaluation insights.' - address: 'Shanghai, China' - articleno: 12 - author: 'Reimer, P. J. Charles and Wanderley, Marcelo M.' - bibtex: "@inproceedings{NIME21_12,\n abstract = {Assessment of user experience (UX)\ - \ is increasingly important in music interaction evaluation, as witnessed in previous\ - \ NIME reviews describing varied and idiosyncratic evaluation strategies. This\ - \ paper focuses on evaluations conducted in the last four years of NIME (2017\ - \ to 2020), compares results to previous research, and classifies evaluation types\ - \ to describe how researchers approach and study UX in NIME. While results of\ - \ this review confirm patterns such as the prominence of short-term, performer\ - \ perspective evaluations, and the variety of evaluation strategies used, they\ - \ also show that UX-focused evaluations are typically exploratory and limited\ - \ to novice performers. Overall, these patterns indicate that current UX evaluation\ - \ strategies do not address dynamic factors such as skill development, the evolution\ - \ of the performer-instrument relationship, and hedonic and cognitive aspects\ - \ of UX. To address such limitations, we discuss a number of less common tools\ - \ developed within and outside of NIME that focus on dynamic aspects of UX, potentially\ - \ leading to more informative and meaningful evaluation insights.},\n address\ - \ = {Shanghai, China},\n articleno = {12},\n author = {Reimer, P. J. Charles and\ - \ Wanderley, Marcelo M.},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.807a000f},\n\ - \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/WTaee8NVtPg},\n\ - \ title = {Embracing Less Common Evaluation Strategies for Studying User Experience\ - \ in NIME},\n url = {https://nime.pubpub.org/pub/fidgs435},\n year = {2021}\n\ - }\n" + ID: Xambó2019 + abstract: 'In this paper, we present a workshop of physical computing applied to + NIME design based on science, technology, engineering, arts, and mathematics (STEAM) + education. The workshop is designed for master students with multidisciplinary + backgrounds. They are encouraged to work in teams from two university campuses + remotely connected through a portal space. The components of the workshop are + prototyping, music improvisation and reflective practice. We report the results + of this course, which show a positive impact on the students'' confidence in prototyping + and intention to continue in STEM fields. We also present the challenges and lessons + learned on how to improve the teaching of hybrid technologies and programming + skills in an interdisciplinary context across two locations, with the aim of satisfying + both beginners and experts. We conclude with a broader discussion on how these + new pedagogical perspectives can improve NIME-related courses.' + address: 'Porto Alegre, Brazil' + author: Anna Xambó and Sigurd Saue and Alexander Refsum Jensenius and Robin Støckert + and Oeyvind Brandtsegg + bibtex: "@inproceedings{Xambó2019,\n abstract = {In this paper, we present a workshop\ + \ of physical computing applied to NIME design based on science, technology, engineering,\ + \ arts, and mathematics (STEAM) education. The workshop is designed for master\ + \ students with multidisciplinary backgrounds. They are encouraged to work in\ + \ teams from two university campuses remotely connected through a portal space.\ + \ The components of the workshop are prototyping, music improvisation and reflective\ + \ practice. We report the results of this course, which show a positive impact\ + \ on the students' confidence in prototyping and intention to continue in STEM\ + \ fields. We also present the challenges and lessons learned on how to improve\ + \ the teaching of hybrid technologies and programming skills in an interdisciplinary\ + \ context across two locations, with the aim of satisfying both beginners and\ + \ experts. We conclude with a broader discussion on how these new pedagogical\ + \ perspectives can improve NIME-related courses.},\n address = {Porto Alegre,\ + \ Brazil},\n author = {Anna Xambó and Sigurd Saue and Alexander Refsum Jensenius\ + \ and Robin Støckert and Oeyvind Brandtsegg},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.3672932},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {216--221},\n publisher = {UFRGS},\n\ + \ title = {{NIME} Prototyping in Teams: A Participatory Approach to Teaching Physical\ + \ Computing},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper042.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.807a000f + doi: 10.5281/zenodo.3672932 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/WTaee8NVtPg - title: Embracing Less Common Evaluation Strategies for Studying User Experience - in NIME - url: https://nime.pubpub.org/pub/fidgs435 - year: 2021 + pages: 216--221 + publisher: UFRGS + title: 'NIME Prototyping in Teams: A Participatory Approach to Teaching Physical + Computing' + url: http://www.nime.org/proceedings/2019/nime2019_paper042.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_13 - abstract: 'To tackle digital musical instrument (DMI) longevity and the problem - of the second performer, we proposed the T-Stick Music Creation Project, a series - of musical commissions along with workshops, mentorship, and technical support, - meant to foment composition and performance using the T-Stick and provide an opportunity - to improve technical and pedagogical support for the instrument. Based on the - project’s outcomes, we describe three main contributions: our approach; the artistic - works produced; and analysis of these works demonstrating the T-Stick as actuator, - modulator, and data provider.' - address: 'Shanghai, China' - articleno: 13 - author: 'Fukuda, Takuto and Meneses, Eduardo and West, Travis and Wanderley, Marcelo - M.' - bibtex: "@inproceedings{NIME21_13,\n abstract = {To tackle digital musical instrument\ - \ (DMI) longevity and the problem of the second performer, we proposed the T-Stick\ - \ Music Creation Project, a series of musical commissions along with workshops,\ - \ mentorship, and technical support, meant to foment composition and performance\ - \ using the T-Stick and provide an opportunity to improve technical and pedagogical\ - \ support for the instrument. Based on the project’s outcomes, we describe three\ - \ main contributions: our approach; the artistic works produced; and analysis\ - \ of these works demonstrating the T-Stick as actuator, modulator, and data provider.},\n\ - \ address = {Shanghai, China},\n articleno = {13},\n author = {Fukuda, Takuto\ - \ and Meneses, Eduardo and West, Travis and Wanderley, Marcelo M.},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.21428/92fbeb44.26f33210},\n issn = {2220-4806},\n month\ - \ = {June},\n presentation-video = {https://youtu.be/tfOUMr3p4b4},\n title = {The\ - \ T-Stick Music Creation Project: An approach to building a creative community\ - \ around a DMI},\n url = {https://nime.pubpub.org/pub/7c4qdj4u},\n year = {2021}\n\ + ID: Meneses2019 + abstract: 'The increasing availability of accessible sensor technologies, single + board computers, and prototyping platforms have resulted in a growing number of + frameworks explicitly geared towards the design and construction of Digital and + Augmented Musical Instruments. Developing such instruments can be facilitated + by choosing the most suitable framework for each project. In the process of selecting + a framework for implementing an augmented guitar instrument, we have tested three + Linux-based open-source platforms that have been designed for real-time sensor + interfacing, audio processing, and synthesis. Factors such as acquisition latency, + workload measurements, documentation, and software implementation are compared + and discussed to determine the suitability of each environment for our particular + project.' + address: 'Porto Alegre, Brazil' + author: Eduardo Meneses and Johnty Wang and Sergio Freire and Marcelo Wanderley + bibtex: "@inproceedings{Meneses2019,\n abstract = {The increasing availability of\ + \ accessible sensor technologies, single board computers, and prototyping platforms\ + \ have resulted in a growing number of frameworks explicitly geared towards the\ + \ design and construction of Digital and Augmented Musical Instruments. Developing\ + \ such instruments can be facilitated by choosing the most suitable framework\ + \ for each project. In the process of selecting a framework for implementing an\ + \ augmented guitar instrument, we have tested three Linux-based open-source platforms\ + \ that have been designed for real-time sensor interfacing, audio processing,\ + \ and synthesis. Factors such as acquisition latency, workload measurements, documentation,\ + \ and software implementation are compared and discussed to determine the suitability\ + \ of each environment for our particular project.},\n address = {Porto Alegre,\ + \ Brazil},\n author = {Eduardo Meneses and Johnty Wang and Sergio Freire and Marcelo\ + \ Wanderley},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672934},\n editor\ + \ = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n\ + \ pages = {222--227},\n publisher = {UFRGS},\n title = {A Comparison of Open-Source\ + \ Linux Frameworks for an Augmented Musical Instrument Implementation},\n url\ + \ = {http://www.nime.org/proceedings/2019/nime2019_paper043.pdf},\n year = {2019}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.26f33210 + doi: 10.5281/zenodo.3672934 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/tfOUMr3p4b4 - title: 'The T-Stick Music Creation Project: An approach to building a creative community - around a DMI' - url: https://nime.pubpub.org/pub/7c4qdj4u - year: 2021 + pages: 222--227 + publisher: UFRGS + title: A Comparison of Open-Source Linux Frameworks for an Augmented Musical Instrument + Implementation + url: http://www.nime.org/proceedings/2019/nime2019_paper043.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_14 - abstract: 'This paper provides initial efforts in developing and evaluating a real-time - movement sonification framework for physical activity practice and learning. Reactive - Video provides an interactive, vision-based, adaptive video playback with auditory - feedback on users'' performance to better support when learning and practicing - new physical skills. We implement the sonification for auditory feedback design - by extending the Web Audio API framework. The current application focuses on Tai-Chi - performance and provides two main audio cues to users for several Tai Chi exercises. - We provide our design approach, implementation, and sound generation and mapping, - specifically for interactive systems with direct video manipulation. Our observations - reveal the relationship between the movement-to-sound mapping and characteristics - of the physical activity.' - address: 'Shanghai, China' - articleno: 14 - author: 'Cavdir, Doga and Clarke, Chris and Chiu, Patrick and Denoue, Laurent and - Kimber, Don' - bibtex: "@inproceedings{NIME21_14,\n abstract = {This paper provides initial efforts\ - \ in developing and evaluating a real-time movement sonification framework for\ - \ physical activity practice and learning. Reactive Video provides an interactive,\ - \ vision-based, adaptive video playback with auditory feedback on users' performance\ - \ to better support when learning and practicing new physical skills. We implement\ - \ the sonification for auditory feedback design by extending the Web Audio API\ - \ framework. The current application focuses on Tai-Chi performance and provides\ - \ two main audio cues to users for several Tai Chi exercises. We provide our design\ - \ approach, implementation, and sound generation and mapping, specifically for\ - \ interactive systems with direct video manipulation. Our observations reveal\ - \ the relationship between the movement-to-sound mapping and characteristics of\ - \ the physical activity.},\n address = {Shanghai, China},\n articleno = {14},\n\ - \ author = {Cavdir, Doga and Clarke, Chris and Chiu, Patrick and Denoue, Laurent\ - \ and Kimber, Don},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.eef53755},\n\ - \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/pbvZI80XgEU},\n\ - \ title = {Reactive Video: Movement Sonification for Learning Physical Activity\ - \ with Adaptive Video Playback},\n url = {https://nime.pubpub.org/pub/dzlsifz6},\n\ - \ year = {2021}\n}\n" + ID: MatusLerner2019 + abstract: 'During the twentieth century several Latin American nations (such as + Argentina, Brazil, Chile, Cuba and Mexico) have originated relevant antecedents + in the NIME field. Their innovative authors have interrelated musical composition, + lutherie, electronics and computing. This paper provides a panoramic view of their + original electronic instruments and experimental sound practices, as well as a + perspective of them regarding other inventions around the World.' + address: 'Porto Alegre, Brazil' + author: 'Lerner, Martin Matus' + bibtex: "@inproceedings{MatusLerner2019,\n abstract = {During the twentieth century\ + \ several Latin American nations (such as Argentina, Brazil, Chile, Cuba and Mexico)\ + \ have originated relevant antecedents in the NIME field. Their innovative authors\ + \ have interrelated musical composition, lutherie, electronics and computing.\ + \ This paper provides a panoramic view of their original electronic instruments\ + \ and experimental sound practices, as well as a perspective of them regarding\ + \ other inventions around the World.},\n address = {Porto Alegre, Brazil},\n author\ + \ = {Lerner, Martin Matus},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672936},\n\ + \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {228--233},\n publisher = {UFRGS},\n title = {Latin American\ + \ {NIME}s: Electronic Musical Instruments and Experimental Sound Devices in the\ + \ Twentieth Century},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper044.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.eef53755 + doi: 10.5281/zenodo.3672936 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/pbvZI80XgEU - title: 'Reactive Video: Movement Sonification for Learning Physical Activity with - Adaptive Video Playback' - url: https://nime.pubpub.org/pub/dzlsifz6 - year: 2021 + pages: 228--233 + publisher: UFRGS + title: 'Latin American NIMEs: Electronic Musical Instruments and Experimental Sound + Devices in the Twentieth Century' + url: http://www.nime.org/proceedings/2019/nime2019_paper044.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_15 - abstract: 'We present hyper-hybrid flute, a new interface which can be toggled between - its electronic mode and its acoustic mode. In its acoustic mode, the interface - is identical to the regular six-hole recorder. In its electronic mode, the interface - detects the player''s fingering and breath velocity and translates them to MIDI - messages. Specifically, it maps higher breath velocity to higher octaves, with - the modulo remainder controlling the microtonal pitch bend. This novel mapping - reproduces a highly realistic flute-playing experience. Furthermore, changing - the parameters easily augments the interface into a hyperinstrument that allows - the player to control microtones more expressively via breathing techniques.' - address: 'Shanghai, China' - articleno: 15 - author: 'Chin, Daniel and Zhang, Ian and Xia, Gus' - bibtex: "@inproceedings{NIME21_15,\n abstract = {We present hyper-hybrid flute,\ - \ a new interface which can be toggled between its electronic mode and its acoustic\ - \ mode. In its acoustic mode, the interface is identical to the regular six-hole\ - \ recorder. In its electronic mode, the interface detects the player's fingering\ - \ and breath velocity and translates them to MIDI messages. Specifically, it maps\ - \ higher breath velocity to higher octaves, with the modulo remainder controlling\ - \ the microtonal pitch bend. This novel mapping reproduces a highly realistic\ - \ flute-playing experience. Furthermore, changing the parameters easily augments\ - \ the interface into a hyperinstrument that allows the player to control microtones\ - \ more expressively via breathing techniques.},\n address = {Shanghai, China},\n\ - \ articleno = {15},\n author = {Chin, Daniel and Zhang, Ian and Xia, Gus},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.21428/92fbeb44.c09d91be},\n issn = {2220-4806},\n month\ - \ = {June},\n presentation-video = {https://youtu.be/UIqsYK9F4xo},\n title = {Hyper-hybrid\ - \ Flute: Simulating and Augmenting How Breath Affects Octave and Microtone},\n\ - \ url = {https://nime.pubpub.org/pub/eshr},\n year = {2021}\n}\n" + ID: Reid2019 + abstract: 'This paper presents four years of development in performance and compositional + practice on an electronically augmented trumpet called MIGSI. Discussion is focused + on conceptual and technical approaches to data mapping, sonic interaction, and + composition that are inspired by philosophical questions of time: what is now? + Is time linear or multi-directional? Can we operate in multiple modes of temporal + perception simultaneously? A number of mapping strategies are presented which + explore these ideas through the manipulation of temporal separation between user + input and sonic output. In addition to presenting technical progress, this paper + will introduce a body of original repertoire composed for MIGSI, in order to illustrate + how these tools and approaches have been utilized in live performance and how + they may find use in other creative applications.' + address: 'Porto Alegre, Brazil' + author: Sarah Reid and Ryan Gaston and Ajay Kapur + bibtex: "@inproceedings{Reid2019,\n abstract = {This paper presents four years of\ + \ development in performance and compositional practice on an electronically augmented\ + \ trumpet called MIGSI. Discussion is focused on conceptual and technical approaches\ + \ to data mapping, sonic interaction, and composition that are inspired by philosophical\ + \ questions of time: what is now? Is time linear or multi-directional? Can we\ + \ operate in multiple modes of temporal perception simultaneously? A number of\ + \ mapping strategies are presented which explore these ideas through the manipulation\ + \ of temporal separation between user input and sonic output. In addition to presenting\ + \ technical progress, this paper will introduce a body of original repertoire\ + \ composed for MIGSI, in order to illustrate how these tools and approaches have\ + \ been utilized in live performance and how they may find use in other creative\ + \ applications.},\n address = {Porto Alegre, Brazil},\n author = {Sarah Reid and\ + \ Ryan Gaston and Ajay Kapur},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672940},\n\ + \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {234--239},\n publisher = {UFRGS},\n title = {Perspectives\ + \ on Time: performance practice, mapping strategies, \\& composition with {MIGSI}},\n\ + \ url = {http://www.nime.org/proceedings/2019/nime2019_paper045.pdf},\n year =\ + \ {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.c09d91be + doi: 10.5281/zenodo.3672940 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/UIqsYK9F4xo - title: 'Hyper-hybrid Flute: Simulating and Augmenting How Breath Affects Octave - and Microtone' - url: https://nime.pubpub.org/pub/eshr - year: 2021 + pages: 234--239 + publisher: UFRGS + title: 'Perspectives on Time: performance practice, mapping strategies, & composition + with MIGSI' + url: http://www.nime.org/proceedings/2019/nime2019_paper045.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_16 - abstract: 'This paper examines grid interfaces which are currently used in many - musical devices and instruments. This type of interface concept has been rooted - in the NIME community since the early 2000s. We provide an overview of research - projects and commercial products and conducted an expert interview as well as - an online survey. In summary this work shares: (1) an overview on grid controller - research, (2) a set of three usability issues deduced by a multi method approach, - and (3) an evaluation of user perceptions regarding persistent usability issues - and common reasons for the use of grid interfaces.' - address: 'Shanghai, China' - articleno: 16 - author: 'Rossmy, Beat and Wiethoff, Alexander' - bibtex: "@inproceedings{NIME21_16,\n abstract = {This paper examines grid interfaces\ - \ which are currently used in many musical devices and instruments. This type\ - \ of interface concept has been rooted in the NIME community since the early 2000s.\ - \ We provide an overview of research projects and commercial products and conducted\ - \ an expert interview as well as an online survey. In summary this work shares:\ - \ (1) an overview on grid controller research, (2) a set of three usability issues\ - \ deduced by a multi method approach, and (3) an evaluation of user perceptions\ - \ regarding persistent usability issues and common reasons for the use of grid\ - \ interfaces.},\n address = {Shanghai, China},\n articleno = {16},\n author =\ - \ {Rossmy, Beat and Wiethoff, Alexander},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.6a2451e6},\n\ - \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/GuPIz2boJwA},\n\ - \ title = {Musical Grid Interfaces: Past, Present, and Future Directions},\n url\ - \ = {https://nime.pubpub.org/pub/grid-past-present-future},\n year = {2021}\n\ - }\n" + ID: Lamounier2019 + abstract: 'The present work explores the design of multimodal interfaces that capture + hand gestures and promote interactions between dance, music and wearable technologic + garment. We aim at studying the design strategies used to interface music to other + domains of the performance, in special, the application of wearable technologies + into music performances. The project describes the development of the music and + wearable interfaces, which comprise a hand interface and a mechanical actuator + attached to the dancer''s dress. The performance resulted from the study is inspired + in the butoh dances and attempts to add a technological poetic as music-dance-wearable + interactions to the traditional dialogue between dance and music. ' + address: 'Porto Alegre, Brazil' + author: Natacha Lamounier and Luiz Naveda and Adriana Bicalho + bibtex: "@inproceedings{Lamounier2019,\n abstract = {The present work explores the\ + \ design of multimodal interfaces that capture hand gestures and promote interactions\ + \ between dance, music and wearable technologic garment. We aim at studying the\ + \ design strategies used to interface music to other domains of the performance,\ + \ in special, the application of wearable technologies into music performances.\ + \ The project describes the development of the music and wearable interfaces,\ + \ which comprise a hand interface and a mechanical actuator attached to the dancer's\ + \ dress. The performance resulted from the study is inspired in the butoh dances\ + \ and attempts to add a technological poetic as music-dance-wearable interactions\ + \ to the traditional dialogue between dance and music. },\n address = {Porto Alegre,\ + \ Brazil},\n author = {Natacha Lamounier and Luiz Naveda and Adriana Bicalho},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.3672942},\n editor = {Marcelo Queiroz\ + \ and Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {240--245},\n\ + \ publisher = {UFRGS},\n title = {The design of technological interfaces for interactions\ + \ between music, dance and garment movements},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper046.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.6a2451e6 + doi: 10.5281/zenodo.3672942 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/GuPIz2boJwA - title: 'Musical Grid Interfaces: Past, Present, and Future Directions' - url: https://nime.pubpub.org/pub/grid-past-present-future - year: 2021 + pages: 240--245 + publisher: UFRGS + title: 'The design of technological interfaces for interactions between music, dance + and garment movements' + url: http://www.nime.org/proceedings/2019/nime2019_paper046.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_17 - abstract: 'Musical grid interfaces such as the monome grid have developed into standard - interfaces for musical equipment over the last 15 years. However, the types of - possible interactions more or less remained the same, only expanding grid capabilities - by external IO elements. Therefore, we propose to transfer capacitive touch technology - to grid devices to expand their input capabilities by combining tangible and capacitive-touch - based interaction paradigms. This enables to keep the generic nature of grid interfaces - which is a key feature for many users. In this paper we present the TouchGrid - concept and share our proof-of-concept implementation as well as an expert evaluation - regarding the general concept of touch interaction used on grid devices. TouchGrid - provides swipe and bezel interaction derived from smart phone interfaces to allow - navigation between applications and access to menu systems in a familiar way.' - address: 'Shanghai, China' - articleno: 17 - author: 'Rossmy, Beat and Unger, Sebastian and Wiethoff, Alexander' - bibtex: "@inproceedings{NIME21_17,\n abstract = {Musical grid interfaces such as\ - \ the monome grid have developed into standard interfaces for musical equipment\ - \ over the last 15 years. However, the types of possible interactions more or\ - \ less remained the same, only expanding grid capabilities by external IO elements.\ - \ Therefore, we propose to transfer capacitive touch technology to grid devices\ - \ to expand their input capabilities by combining tangible and capacitive-touch\ - \ based interaction paradigms. This enables to keep the generic nature of grid\ - \ interfaces which is a key feature for many users. In this paper we present the\ - \ TouchGrid concept and share our proof-of-concept implementation as well as an\ - \ expert evaluation regarding the general concept of touch interaction used on\ - \ grid devices. TouchGrid provides swipe and bezel interaction derived from smart\ - \ phone interfaces to allow navigation between applications and access to menu\ - \ systems in a familiar way.},\n address = {Shanghai, China},\n articleno = {17},\n\ - \ author = {Rossmy, Beat and Unger, Sebastian and Wiethoff, Alexander},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.21428/92fbeb44.303223db},\n issn = {2220-4806},\n month\ - \ = {June},\n presentation-video = {https://youtu.be/ti2h_WK5NeU},\n title = {TouchGrid\ - \ – Combining Touch Interaction with Musical Grid Interfaces},\n url = {https://nime.pubpub.org/pub/touchgrid},\n\ - \ year = {2021}\n}\n" + ID: AlarconDiaz2019 + abstract: 'INTIMAL is a physical virtual embodied system for relational listening + that integrates body movement, oral archives, and voice expression through telematic + improvisatory performance in migratory contexts. It has been informed by nine + Colombian migrant women who express their migratory journeys through free body + movement, voice and spoken word improvisation. These improvisations have been + recorded using Motion Capture, in order to develop interfaces for co-located and + telematic interactions for the sharing of narratives of migration. In this paper, + using data from the Motion Capture experiments, we are exploring two specific + movements from improvisers: displacements on space (walking, rotating), and breathing + data. Here we envision how co-relations between walking and breathing, might be + further studied to implement interfaces that help the making of connections between + place, and the feeling of presence for people in-between distant locations.' + address: 'Porto Alegre, Brazil' + author: Ximena Alarcon Diaz and Victor Evaristo Gonzalez Sanchez and Cagri Erdem + bibtex: "@inproceedings{AlarconDiaz2019,\n abstract = {INTIMAL is a physical virtual\ + \ embodied system for relational listening that integrates body movement, oral\ + \ archives, and voice expression through telematic improvisatory performance in\ + \ migratory contexts. It has been informed by nine Colombian migrant women who\ + \ express their migratory journeys through free body movement, voice and spoken\ + \ word improvisation. These improvisations have been recorded using Motion Capture,\ + \ in order to develop interfaces for co-located and telematic interactions for\ + \ the sharing of narratives of migration. In this paper, using data from the Motion\ + \ Capture experiments, we are exploring two specific movements from improvisers:\ + \ displacements on space (walking, rotating), and breathing data. Here we envision\ + \ how co-relations between walking and breathing, might be further studied to\ + \ implement interfaces that help the making of connections between place, and\ + \ the feeling of presence for people in-between distant locations.},\n address\ + \ = {Porto Alegre, Brazil},\n author = {Ximena Alarcon Diaz and Victor Evaristo\ + \ Gonzalez Sanchez and Cagri Erdem},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672944},\n\ + \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {246--249},\n publisher = {UFRGS},\n title = {{INTIMAL}:\ + \ Walking to Find Place, Breathing to Feel Presence},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper047.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.303223db + doi: 10.5281/zenodo.3672944 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/ti2h_WK5NeU - title: TouchGrid – Combining Touch Interaction with Musical Grid Interfaces - url: https://nime.pubpub.org/pub/touchgrid - year: 2021 + pages: 246--249 + publisher: UFRGS + title: 'INTIMAL: Walking to Find Place, Breathing to Feel Presence' + url: http://www.nime.org/proceedings/2019/nime2019_paper047.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_18 - abstract: 'Composing is a neglected area of music education. To increase participation, - many technologies provide open-ended interfaces to motivate child autodidactic - use, drawing influence from Papert’s LOGO philosophy to support children’s learning - through play. This paper presents a case study examining which interactions with - Codetta, a LOGO-inspired, block-based music platform, supports children’s creativity - in music composition. Interaction logs were collected from 20 children and correlated - against socially-validated creativity scores. To conclude, we recommend that the - transition between low-level edits and high-level processes should be carefully - scaffolded.' - address: 'Shanghai, China' - articleno: 18 - author: 'Ford, Corey and Bryan-Kinns, Nick and Nash, Chris' - bibtex: "@inproceedings{NIME21_18,\n abstract = {Composing is a neglected area of\ - \ music education. To increase participation, many technologies provide open-ended\ - \ interfaces to motivate child autodidactic use, drawing influence from Papert’s\ - \ LOGO philosophy to support children’s learning through play. This paper presents\ - \ a case study examining which interactions with Codetta, a LOGO-inspired, block-based\ - \ music platform, supports children’s creativity in music composition. Interaction\ - \ logs were collected from 20 children and correlated against socially-validated\ - \ creativity scores. To conclude, we recommend that the transition between low-level\ - \ edits and high-level processes should be carefully scaffolded.},\n address =\ - \ {Shanghai, China},\n articleno = {18},\n author = {Ford, Corey and Bryan-Kinns,\ - \ Nick and Nash, Chris},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.e83deee9},\n\ - \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/XpMiDWrxXMU},\n\ - \ title = {Creativity in Children's Digital Music Composition},\n url = {https://nime.pubpub.org/pub/ker5w948},\n\ - \ year = {2021}\n}\n" + ID: Sardana2019 + abstract: "Locus is a NIME designed specifically for an interactive, immersive high\ + \ density loudspeaker array environment. The system is based on a pointing mechanism\ + \ to interact with a sound scene comprising 128 speakers. Users can point anywhere\ + \ to interact with the system, and the spatial interaction utilizes motion capture,\ + \ so it does not require a screen. Instead, it is completely controlled via hand\ + \ gestures using a glove that is populated with motion-tracking markers.\n\nThe\ + \ main purpose of this system is to offer intuitive physical interaction with\ + \ the perimeter-based spatial sound sources. Further, its goal is to minimize\ + \ user-worn technology and thereby enhance freedom of motion by utilizing environmental\ + \ sensing devices, such as motion capture cameras or infrared sensors. The ensuing\ + \ creativity enabling technology is applicable to a broad array of possible scenarios,\ + \ from researching limits of human spatial hearing perception to facilitating\ + \ learning and artistic performances, including dance. In this paper, we describe\ + \ our NIME design and implementation, its preliminary assessment, and offer a\ + \ Unity-based toolkit to facilitate its broader deployment and adoption." + address: 'Porto Alegre, Brazil' + author: Disha Sardana and Woohun Joo and Ivica Ico Bukvic and Greg Earle + bibtex: "@inproceedings{Sardana2019,\n abstract = {Locus is a NIME designed specifically\ + \ for an interactive, immersive high density loudspeaker array environment. The\ + \ system is based on a pointing mechanism to interact with a sound scene comprising\ + \ 128 speakers. Users can point anywhere to interact with the system, and the\ + \ spatial interaction utilizes motion capture, so it does not require a screen.\ + \ Instead, it is completely controlled via hand gestures using a glove that is\ + \ populated with motion-tracking markers.\n\nThe main purpose of this system is\ + \ to offer intuitive physical interaction with the perimeter-based spatial sound\ + \ sources. Further, its goal is to minimize user-worn technology and thereby enhance\ + \ freedom of motion by utilizing environmental sensing devices, such as motion\ + \ capture cameras or infrared sensors. The ensuing creativity enabling technology\ + \ is applicable to a broad array of possible scenarios, from researching limits\ + \ of human spatial hearing perception to facilitating learning and artistic performances,\ + \ including dance. In this paper, we describe our NIME design and implementation,\ + \ its preliminary assessment, and offer a Unity-based toolkit to facilitate its\ + \ broader deployment and adoption.},\n address = {Porto Alegre, Brazil},\n author\ + \ = {Disha Sardana and Woohun Joo and Ivica Ico Bukvic and Greg Earle},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.3672946},\n editor = {Marcelo Queiroz and\ + \ Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {250--255},\n\ + \ publisher = {UFRGS},\n title = {Introducing Locus: a {NIME} for Immersive Exocentric\ + \ Aural Environments},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper048.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.e83deee9 + doi: 10.5281/zenodo.3672946 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/XpMiDWrxXMU - title: Creativity in Children's Digital Music Composition - url: https://nime.pubpub.org/pub/ker5w948 - year: 2021 + pages: 250--255 + publisher: UFRGS + title: 'Introducing Locus: a NIME for Immersive Exocentric Aural Environments' + url: http://www.nime.org/proceedings/2019/nime2019_paper048.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_19 - abstract: 'Various studies have shown that haptic interfaces could enhance the learning - efficiency in music learning, but most existing studies focus on training motor - skills of instrument playing such as finger motions. In this paper, we present - a wearable haptic device to guide diaphragmatic breathing, which can be used in - vocal training as well as the learning of wind instruments. The device is a wearable - strap vest, consisting of a spinal exoskeleton on the back for inhalation and - an elastic belt around the waist for exhalation. We first conducted case studies - to assess how convenient and comfortable to wear the device, and then evaluate - its effectiveness in guiding rhythm and breath. Results show users'' acceptance - of the haptic interface and the potential of haptic guidance in vocal training.' - address: 'Shanghai, China' - articleno: 19 - author: 'Li, Yinmiao and Piao, Ziyue and Xia, Gus' - bibtex: "@inproceedings{NIME21_19,\n abstract = {Various studies have shown that\ - \ haptic interfaces could enhance the learning efficiency in music learning, but\ - \ most existing studies focus on training motor skills of instrument playing such\ - \ as finger motions. In this paper, we present a wearable haptic device to guide\ - \ diaphragmatic breathing, which can be used in vocal training as well as the\ - \ learning of wind instruments. The device is a wearable strap vest, consisting\ - \ of a spinal exoskeleton on the back for inhalation and an elastic belt around\ - \ the waist for exhalation. We first conducted case studies to assess how convenient\ - \ and comfortable to wear the device, and then evaluate its effectiveness in guiding\ - \ rhythm and breath. Results show users' acceptance of the haptic interface and\ - \ the potential of haptic guidance in vocal training.},\n address = {Shanghai,\ - \ China},\n articleno = {19},\n author = {Li, Yinmiao and Piao, Ziyue and Xia,\ - \ Gus},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.21428/92fbeb44.6d342615},\n issn = {2220-4806},\n\ - \ month = {June},\n presentation-video = {https://youtu.be/-t-u0V-27ng},\n title\ - \ = {A Wearable Haptic Interface for Breath Guidance in Vocal Training},\n url\ - \ = {https://nime.pubpub.org/pub/cgi7t0ta},\n year = {2021}\n}\n" + ID: Ho2019 + abstract: 'This paper presents an ongoing process of examining and reinventing the + Guqin, to forge a contemporary engagement with this unique traditional Chinese + string instrument. The SlowQin is both a hybrid resemblance of the Guqin and a + fully functioning wireless interface to interact with computer software. It has + been developed and performed during the last decade. Instead of aiming for virtuosic + perfection of playing the instrument, SlowQin emphasizes the openness for continuously + rethinking and reinventing the Guqin''s possibilities. Through a combination of + conceptual work and practical production, Echo Ho''s SlowQin project works as + an experimental twist on Historically Informed Performance, with the motivation + of conveying artistic gestures that tackle philosophical, ideological, and socio-political + subjects embedded in our living environment in globalised conditions. In particular, + this paper touches the history of the Guqin, gives an overview of the technical + design concepts of the instrument, and discusses the aesthetical approaches of + the SlowQin performances that have been realised so far.' + address: 'Porto Alegre, Brazil' + author: Echo Ho and Prof. Dr. Phil. Alberto de Campo and Hannes Hoelzl + bibtex: "@inproceedings{Ho2019,\n abstract = {This paper presents an ongoing process\ + \ of examining and reinventing the Guqin, to forge a contemporary engagement with\ + \ this unique traditional Chinese string instrument. The SlowQin is both a hybrid\ + \ resemblance of the Guqin and a fully functioning wireless interface to interact\ + \ with computer software. It has been developed and performed during the last\ + \ decade. Instead of aiming for virtuosic perfection of playing the instrument,\ + \ SlowQin emphasizes the openness for continuously rethinking and reinventing\ + \ the Guqin's possibilities. Through a combination of conceptual work and practical\ + \ production, Echo Ho's SlowQin project works as an experimental twist on Historically\ + \ Informed Performance, with the motivation of conveying artistic gestures that\ + \ tackle philosophical, ideological, and socio-political subjects embedded in\ + \ our living environment in globalised conditions. In particular, this paper touches\ + \ the history of the Guqin, gives an overview of the technical design concepts\ + \ of the instrument, and discusses the aesthetical approaches of the SlowQin performances\ + \ that have been realised so far.},\n address = {Porto Alegre, Brazil},\n author\ + \ = {Echo Ho and Prof. Dr. Phil. Alberto de Campo and Hannes Hoelzl},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.3672948},\n editor = {Marcelo Queiroz and\ + \ Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {256--259},\n\ + \ publisher = {UFRGS},\n title = {The SlowQin: An Interdisciplinary Approach to\ + \ reinventing the Guqin},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper049.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.6d342615 + doi: 10.5281/zenodo.3672948 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/-t-u0V-27ng - title: A Wearable Haptic Interface for Breath Guidance in Vocal Training - url: https://nime.pubpub.org/pub/cgi7t0ta - year: 2021 + pages: 256--259 + publisher: UFRGS + title: 'The SlowQin: An Interdisciplinary Approach to reinventing the Guqin' + url: http://www.nime.org/proceedings/2019/nime2019_paper049.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_2 - abstract: 'Ambient sounds such as breaking waves or rustling leaves are sometimes - used in music recording, composition and performance. However, as these sounds - lack a precise pitch, they can not be used melodically. This work describes Aeolis, - a virtual instrument producing pitched tones from a real-time ambient sound input - using subtractive synthesis. The produced tones retain the identifiable timbres - of the ambient sounds. Tones generated using input sounds from various environments, - such as sea waves, leaves rustle and traffic noise, are analyzed. A configuration - for a live in-situ performance is described, consisting of live streaming the - produced sounds. In this configuration, the environment itself acts as a ‘performer’ - of sorts, alongside the Aeolis player, providing both real-time input signals - and complementary visual cues.' - address: 'Shanghai, China' - articleno: 2 - author: 'Arbel, Lior' - bibtex: "@inproceedings{NIME21_2,\n abstract = {Ambient sounds such as breaking\ - \ waves or rustling leaves are sometimes used in music recording, composition\ - \ and performance. However, as these sounds lack a precise pitch, they can not\ - \ be used melodically. This work describes Aeolis, a virtual instrument producing\ - \ pitched tones from a real-time ambient sound input using subtractive synthesis.\ - \ The produced tones retain the identifiable timbres of the ambient sounds. Tones\ - \ generated using input sounds from various environments, such as sea waves, leaves\ - \ rustle and traffic noise, are analyzed. A configuration for a live in-situ performance\ - \ is described, consisting of live streaming the produced sounds. In this configuration,\ - \ the environment itself acts as a ‘performer’ of sorts, alongside the Aeolis\ - \ player, providing both real-time input signals and complementary visual cues.},\n\ - \ address = {Shanghai, China},\n articleno = {2},\n author = {Arbel, Lior},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.21428/92fbeb44.64f66047},\n issn = {2220-4806},\n\ - \ month = {June},\n presentation-video = {https://youtu.be/C0WEeaYy0tQ},\n title\ - \ = {Aeolis: A Virtual Instrument Producing Pitched Tones With Soundscape Timbres},\n\ - \ url = {https://nime.pubpub.org/pub/c3w33wya},\n year = {2021}\n}\n" + ID: Martin2019 + abstract: 'This paper is about creating digital musical instruments where a predictive + neural network model is integrated into the interactive system. Rather than predicting + symbolic music (e.g., MIDI notes), we suggest that predicting future control data + from the user and precise temporal information can lead to new and interesting + interactive possibilities. We propose that a mixture density recurrent neural + network (MDRNN) is an appropriate model for this task. The predictions can be + used to fill-in control data when the user stops performing, or as a kind of filter + on the user''s own input. We present an interactive MDRNN prediction server that + allows rapid prototyping of new NIMEs featuring predictive musical interaction + by recording datasets, training MDRNN models, and experimenting with interaction + modes. We illustrate our system with several example NIMEs applying this idea. + Our evaluation shows that real-time predictive interaction is viable even on single-board + computers and that small models are appropriate for small datasets.' + address: 'Porto Alegre, Brazil' + author: Charles Patrick Martin and Jim Torresen + bibtex: "@inproceedings{Martin2019,\n abstract = {This paper is about creating digital\ + \ musical instruments where a predictive neural network model is integrated into\ + \ the interactive system. Rather than predicting symbolic music (e.g., MIDI notes),\ + \ we suggest that predicting future control data from the user and precise temporal\ + \ information can lead to new and interesting interactive possibilities. We propose\ + \ that a mixture density recurrent neural network (MDRNN) is an appropriate model\ + \ for this task. The predictions can be used to fill-in control data when the\ + \ user stops performing, or as a kind of filter on the user's own input. We present\ + \ an interactive MDRNN prediction server that allows rapid prototyping of new\ + \ NIMEs featuring predictive musical interaction by recording datasets, training\ + \ MDRNN models, and experimenting with interaction modes. We illustrate our system\ + \ with several example NIMEs applying this idea. Our evaluation shows that real-time\ + \ predictive interaction is viable even on single-board computers and that small\ + \ models are appropriate for small datasets.},\n address = {Porto Alegre, Brazil},\n\ + \ author = {Charles Patrick Martin and Jim Torresen},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.3672952},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {260--265},\n publisher = {UFRGS},\n\ + \ title = {An Interactive Musical Prediction System with Mixture Density Recurrent\ + \ Neural Networks},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper050.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.64f66047 + doi: 10.5281/zenodo.3672952 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/C0WEeaYy0tQ - title: 'Aeolis: A Virtual Instrument Producing Pitched Tones With Soundscape Timbres' - url: https://nime.pubpub.org/pub/c3w33wya - year: 2021 + pages: 260--265 + publisher: UFRGS + title: An Interactive Musical Prediction System with Mixture Density Recurrent Neural + Networks + url: http://www.nime.org/proceedings/2019/nime2019_paper050.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_20 - abstract: 'The development of technologies for acquisition and display gives access - to a large variety of volumetric (3D) textures, either synthetic or obtained through - tomography. They constitute extremely rich data which is usually explored for - informative purposes, in medical or engineering contexts. We believe that this - exploration has a strong potential for musical expression. To that extent, we - propose a design space for the musical exploration of volumetric textures. We - describe the challenges for its implementation in Virtual and Mixed-Reality and - we present a case study with an instrument called the Volume Sequencer which we - analyse using your design space. Finally, we evaluate the impact on expressive - exploration of two dimensions, namely the amount of visual feedback and the selection - variability.' - address: 'Shanghai, China' - articleno: 20 - author: 'Berthaut, Florent' - bibtex: "@inproceedings{NIME21_20,\n abstract = {The development of technologies\ - \ for acquisition and display gives access to a large variety of volumetric (3D)\ - \ textures, either synthetic or obtained through tomography. They constitute extremely\ - \ rich data which is usually explored for informative purposes, in medical or\ - \ engineering contexts. We believe that this exploration has a strong potential\ - \ for musical expression. To that extent, we propose a design space for the musical\ - \ exploration of volumetric textures. We describe the challenges for its implementation\ - \ in Virtual and Mixed-Reality and we present a case study with an instrument\ - \ called the Volume Sequencer which we analyse using your design space. Finally,\ - \ we evaluate the impact on expressive exploration of two dimensions, namely the\ - \ amount of visual feedback and the selection variability.},\n address = {Shanghai,\ - \ China},\n articleno = {20},\n author = {Berthaut, Florent},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.21428/92fbeb44.6607d04f},\n issn = {2220-4806},\n month = {June},\n\ - \ presentation-video = {https://youtu.be/C9EiA3TSUag},\n title = {Musical Exploration\ - \ of Volumetric Textures in Mixed and Virtual Reality},\n url = {https://nime.pubpub.org/pub/sqceyucq},\n\ - \ year = {2021}\n}\n" + ID: Bazoge2019 + abstract: 'The paper presents the electronic music performance project Vis Insita + implementing the design of experimental instrumental interfaces based on optical + motion capture technology with passive infrared markers (MoCap), and the analysis + of their use in a real scenic presentation context. Because of MoCap''s predisposition + to capture the movements of the body, a lot of research and musical applications + in the performing arts concern dance or the sonification of gesture. For our research, + we wanted to move away from the capture of the human body to analyse the possibilities + of a kinetic object handled by a performer, both in terms of musical expression, + but also in the broader context of a multimodal scenic interpretation.' + address: 'Porto Alegre, Brazil' + author: Nicolas Bazoge and Ronan Gaugne and Florian Nouviale and Valérie Gouranton + and Bruno Bossis + bibtex: "@inproceedings{Bazoge2019,\n abstract = {The paper presents the electronic\ + \ music performance project Vis Insita implementing the design of experimental\ + \ instrumental interfaces based on optical motion capture technology with passive\ + \ infrared markers (MoCap), and the analysis of their use in a real scenic presentation\ + \ context. Because of MoCap's predisposition to capture the movements of the body,\ + \ a lot of research and musical applications in the performing arts concern dance\ + \ or the sonification of gesture. For our research, we wanted to move away from\ + \ the capture of the human body to analyse the possibilities of a kinetic object\ + \ handled by a performer, both in terms of musical expression, but also in the\ + \ broader context of a multimodal scenic interpretation.},\n address = {Porto\ + \ Alegre, Brazil},\n author = {Nicolas Bazoge and Ronan Gaugne and Florian Nouviale\ + \ and Valérie Gouranton and Bruno Bossis},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672954},\n\ + \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {266--271},\n publisher = {UFRGS},\n title = {Expressive\ + \ potentials of motion capture in musical performance},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper051.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.6607d04f + doi: 10.5281/zenodo.3672954 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/C9EiA3TSUag - title: Musical Exploration of Volumetric Textures in Mixed and Virtual Reality - url: https://nime.pubpub.org/pub/sqceyucq - year: 2021 + pages: 266--271 + publisher: UFRGS + title: Expressive potentials of motion capture in musical performance + url: http://www.nime.org/proceedings/2019/nime2019_paper051.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_21 - abstract: 'The Girls Electronic Arts Retreat (GEAR) is a STEAM summer camp for ages - 8 - 11. In this paper, we compare and contrast lessons from the first two iterations - of GEAR, including one in-person and one remote session. We introduce our Teaching - Interfaces for Musical Expression (TIME) framework and use our analyses to compose - a list of best practices in TIME development and implementation.' - address: 'Shanghai, China' - articleno: 21 - author: 'Aresty, Abby and Gibson, Rachel' - bibtex: "@inproceedings{NIME21_21,\n abstract = {The Girls Electronic Arts Retreat\ - \ (GEAR) is a STEAM summer camp for ages 8 - 11. In this paper, we compare and\ - \ contrast lessons from the first two iterations of GEAR, including one in-person\ - \ and one remote session. We introduce our Teaching Interfaces for Musical Expression\ - \ (TIME) framework and use our analyses to compose a list of best practices in\ - \ TIME development and implementation.},\n address = {Shanghai, China},\n articleno\ - \ = {21},\n author = {Aresty, Abby and Gibson, Rachel},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.21428/92fbeb44.25757aca},\n issn = {2220-4806},\n month = {June},\n\ - \ presentation-video = {https://youtu.be/8qeFjNGaEHc},\n title = {Changing GEAR:\ - \ The Girls Electronic Arts Retreat's Teaching Interfaces for Musical Expression},\n\ - \ url = {https://nime.pubpub.org/pub/8lop0zj4},\n year = {2021}\n}\n" + ID: VanTroyer2019 + abstract: 'This paper explores the potential of image-to-image translation techniques + in aiding the design of new hardware-based musical interfaces such as MIDI keyboard, + grid-based controller, drum machine, and analog modular synthesizers. We collected + an extensive image database of such interfaces and implemented image-to-image + translation techniques using variants of Generative Adversarial Networks. The + created models learn the mapping between input and output images using a training + set of either paired or unpaired images. We qualitatively assess the visual outcomes + based on three image-to-image translation models: reconstructing interfaces from + edge maps, and collection style transfers based on two image sets: visuals of + mosaic tile patterns and geometric abstract two-dimensional arts. This paper aims + to demonstrate that synthesizing interface layouts based on image-to-image translation + techniques can yield insights for researchers, musicians, music technology industrial + designers, and the broader NIME community.' + address: 'Porto Alegre, Brazil' + author: Akito Van Troyer and Rebecca Kleinberger + bibtex: "@inproceedings{VanTroyer2019,\n abstract = {This paper explores the potential\ + \ of image-to-image translation techniques in aiding the design of new hardware-based\ + \ musical interfaces such as MIDI keyboard, grid-based controller, drum machine,\ + \ and analog modular synthesizers. We collected an extensive image database of\ + \ such interfaces and implemented image-to-image translation techniques using\ + \ variants of Generative Adversarial Networks. The created models learn the mapping\ + \ between input and output images using a training set of either paired or unpaired\ + \ images. We qualitatively assess the visual outcomes based on three image-to-image\ + \ translation models: reconstructing interfaces from edge maps, and collection\ + \ style transfers based on two image sets: visuals of mosaic tile patterns and\ + \ geometric abstract two-dimensional arts. This paper aims to demonstrate that\ + \ synthesizing interface layouts based on image-to-image translation techniques\ + \ can yield insights for researchers, musicians, music technology industrial designers,\ + \ and the broader NIME community.},\n address = {Porto Alegre, Brazil},\n author\ + \ = {Akito Van Troyer and Rebecca Kleinberger},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.3672956},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {272--277},\n publisher = {UFRGS},\n\ + \ title = {From Mondrian to Modular Synth: Rendering {NIME} using Generative Adversarial\ + \ Networks},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper052.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.25757aca + doi: 10.5281/zenodo.3672956 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/8qeFjNGaEHc - title: 'Changing GEAR: The Girls Electronic Arts Retreat''s Teaching Interfaces - for Musical Expression' - url: https://nime.pubpub.org/pub/8lop0zj4 - year: 2021 + pages: 272--277 + publisher: UFRGS + title: 'From Mondrian to Modular Synth: Rendering NIME using Generative Adversarial + Networks' + url: http://www.nime.org/proceedings/2019/nime2019_paper052.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_22 - abstract: 'The praxis of using detailed visual models to illustrate complex ideas - is widely used in the sciences but less so in music theory. Taking the composer’s - notes as a starting point, we have developed a complete interactive 3D model of - Grisey’s Talea (1986). Our model presents a novel approach to music education - and theory by making understanding of complex musical structures accessible to - students and non-musicians, particularly those who struggle with traditional means - of learning or whose mode of learning is predominantly visual. The model builds - on the foundations of 1) the historical associations between visual and musical - arts and those concerning spectralists in particular 2) evidence of recurring - cross-modal associations in the general population and consistent associations - for individual synesthetes. Research into educational uses of the model is a topic - for future exploration.' - address: 'Shanghai, China' - articleno: 22 - author: 'Andersen, Anne Sophie and Kwan, Derek' - bibtex: "@inproceedings{NIME21_22,\n abstract = {The praxis of using detailed visual\ - \ models to illustrate complex ideas is widely used in the sciences but less so\ - \ in music theory. Taking the composer’s notes as a starting point, we have developed\ - \ a complete interactive 3D model of Grisey’s Talea (1986). Our model presents\ - \ a novel approach to music education and theory by making understanding of complex\ - \ musical structures accessible to students and non-musicians, particularly those\ - \ who struggle with traditional means of learning or whose mode of learning is\ - \ predominantly visual. The model builds on the foundations of 1) the historical\ - \ associations between visual and musical arts and those concerning spectralists\ - \ in particular 2) evidence of recurring cross-modal associations in the general\ - \ population and consistent associations for individual synesthetes. Research\ - \ into educational uses of the model is a topic for future exploration.},\n address\ - \ = {Shanghai, China},\n articleno = {22},\n author = {Andersen, Anne Sophie and\ - \ Kwan, Derek},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.27d09832},\n\ - \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/PGYOkFjyrek},\n\ - \ title = {Grisey’s 'Talea': Musical Representation As An Interactive 3D Map},\n\ - \ url = {https://nime.pubpub.org/pub/oiwz8bb7},\n year = {2021}\n}\n" + ID: Pardue2019 + abstract: 'When designing an augmented acoustic instrument, it is often of interest + to retain an instrument''s sound quality and nuanced response while leveraging + the richness of digital synthesis. Digital audio has traditionally been generated + through speakers, separating sound generation from the instrument itself, or by + adding an actuator within the instrument''s resonating body, imparting new sounds + along with the original. We offer a third option, isolating the playing interface + from the actuated resonating body, allowing us to rewrite the relationship between + performance action and sound result while retaining the general form and feel + of the acoustic instrument. We present a hybrid acoustic-electronic violin based + on a stick-body electric violin and an electrodynamic polyphonic pick-up capturing + individual string displacements. A conventional violin body acts as the resonator, + actuated using digitally altered audio of the string inputs. By attaching the + electric violin above the body with acoustic isolation, we retain the physical + playing experience of a normal violin along with some of the acoustic filtering + and radiation of a traditional build. We propose the use of the hybrid instrument + with digitally automated pitch and tone correction to make an easy violin for + use as a potential motivational tool for beginning violinists.' + address: 'Porto Alegre, Brazil' + author: Laurel Pardue and Kurijn Buys and Dan Overholt and Andrew P. McPherson and + Michael Edinger + bibtex: "@inproceedings{Pardue2019,\n abstract = {When designing an augmented acoustic\ + \ instrument, it is often of interest to retain an instrument's sound quality\ + \ and nuanced response while leveraging the richness of digital synthesis. Digital\ + \ audio has traditionally been generated through speakers, separating sound generation\ + \ from the instrument itself, or by adding an actuator within the instrument's\ + \ resonating body, imparting new sounds along with the original. We offer a third\ + \ option, isolating the playing interface from the actuated resonating body, allowing\ + \ us to rewrite the relationship between performance action and sound result while\ + \ retaining the general form and feel of the acoustic instrument. We present\ + \ a hybrid acoustic-electronic violin based on a stick-body electric violin and\ + \ an electrodynamic polyphonic pick-up capturing individual string displacements.\ + \ A conventional violin body acts as the resonator, actuated using digitally\ + \ altered audio of the string inputs. By attaching the electric violin above\ + \ the body with acoustic isolation, we retain the physical playing experience\ + \ of a normal violin along with some of the acoustic filtering and radiation of\ + \ a traditional build. We propose the use of the hybrid instrument with digitally\ + \ automated pitch and tone correction to make an easy violin for use as a potential\ + \ motivational tool for beginning violinists.},\n address = {Porto Alegre, Brazil},\n\ + \ author = {Laurel Pardue and Kurijn Buys and Dan Overholt and Andrew P. McPherson\ + \ and Michael Edinger},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672958},\n\ + \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {278--283},\n publisher = {UFRGS},\n title = {Separating\ + \ sound from source: sonic transformation of the violin through electrodynamic\ + \ pickups and acoustic actuation},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper053.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.27d09832 + doi: 10.5281/zenodo.3672958 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/PGYOkFjyrek - title: 'Grisey’s ''Talea'': Musical Representation As An Interactive 3D Map' - url: https://nime.pubpub.org/pub/oiwz8bb7 - year: 2021 + pages: 278--283 + publisher: UFRGS + title: 'Separating sound from source: sonic transformation of the violin through + electrodynamic pickups and acoustic actuation' + url: http://www.nime.org/proceedings/2019/nime2019_paper053.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_23 - abstract: 'In this paper we discuss the beneficial aspects of incorporating energy-motion - models as a design pattern in musical interface design. These models can be understood - as archetypes of motion trajectories which are commonly applied in the analysis - and composition of acousmatic music. With the aim of exploring a new possible - paradigm for interface design, our research builds on the parallel investigation - of embodied music cognition theory and the praxis of acousmatic music. After having - run a large study for understanding a listener’s spontaneous rendering of form - and movement, we built a number of digital instruments especially designed to - emphasise a particular energy-motion profile. The evaluation through composition - and performance indicates that this design paradigm can foster musical inventiveness - and expression in the processes of composition and performance of gestural electronic - music.' - address: 'Shanghai, China' - articleno: 23 - author: 'Tomás, Enrique and Gorbach, Thomas and Tellioğlu, Hilda and Kaltenbrunner, - Martin' - bibtex: "@inproceedings{NIME21_23,\n abstract = {In this paper we discuss the beneficial\ - \ aspects of incorporating energy-motion models as a design pattern in musical\ - \ interface design. These models can be understood as archetypes of motion trajectories\ - \ which are commonly applied in the analysis and composition of acousmatic music.\ - \ With the aim of exploring a new possible paradigm for interface design, our\ - \ research builds on the parallel investigation of embodied music cognition theory\ - \ and the praxis of acousmatic music. After having run a large study for understanding\ - \ a listener’s spontaneous rendering of form and movement, we built a number of\ - \ digital instruments especially designed to emphasise a particular energy-motion\ - \ profile. The evaluation through composition and performance indicates that this\ - \ design paradigm can foster musical inventiveness and expression in the processes\ - \ of composition and performance of gestural electronic music.},\n address = {Shanghai,\ - \ China},\n articleno = {23},\n author = {Tomás, Enrique and Gorbach, Thomas and\ - \ Tellioğlu, Hilda and Kaltenbrunner, Martin},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.21428/92fbeb44.ce8139a8},\n issn = {2220-4806},\n month = {June},\n presentation-video\ - \ = {https://youtu.be/QDjCEnGYSC4},\n title = {Embodied Gestures: Sculpting Energy-Motion\ - \ Models into Musical Interfaces},\n url = {https://nime.pubpub.org/pub/gsx1wqt5},\n\ - \ year = {2021}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.21428/92fbeb44.ce8139a8 - issn: 2220-4806 - month: June - presentation-video: https://youtu.be/QDjCEnGYSC4 - title: 'Embodied Gestures: Sculpting Energy-Motion Models into Musical Interfaces' - url: https://nime.pubpub.org/pub/gsx1wqt5 - year: 2021 - - -- ENTRYTYPE: inproceedings - ID: NIME21_24 - abstract: 'This paper addresses environmental issues around NIME research and practice. - We discuss the formulation of an environmental statement for the conference as - well as the initiation of a NIME Eco Wiki containing information on environmental - concerns related to the creation of new musical instruments. We outline a number - of these concerns and, by systematically reviewing the proceedings of all previous - NIME conferences, identify a general lack of reflection on the environmental impact - of the research undertaken. Finally, we propose a framework for addressing the - making, testing, using, and disposal of NIMEs in the hope that sustainability - may become a central concern to researchers.' - address: 'Shanghai, China' - articleno: 24 - author: 'Masu, Raul and Melbye, Adam Pultz and Sullivan, John and Jensenius, Alexander - Refsum' - bibtex: "@inproceedings{NIME21_24,\n abstract = {This paper addresses environmental\ - \ issues around NIME research and practice. We discuss the formulation of an environmental\ - \ statement for the conference as well as the initiation of a NIME Eco Wiki containing\ - \ information on environmental concerns related to the creation of new musical\ - \ instruments. We outline a number of these concerns and, by systematically reviewing\ - \ the proceedings of all previous NIME conferences, identify a general lack of\ - \ reflection on the environmental impact of the research undertaken. Finally,\ - \ we propose a framework for addressing the making, testing, using, and disposal\ - \ of NIMEs in the hope that sustainability may become a central concern to researchers.},\n\ - \ address = {Shanghai, China},\n articleno = {24},\n author = {Masu, Raul and\ - \ Melbye, Adam Pultz and Sullivan, John and Jensenius, Alexander Refsum},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.21428/92fbeb44.5725ad8f},\n issn = {2220-4806},\n month\ - \ = {June},\n presentation-video = {https://youtu.be/JE6YqYsV5Oo},\n title = {NIME\ - \ and the Environment: Toward a More Sustainable NIME Practice},\n url = {https://nime.pubpub.org/pub/4bbl5lod},\n\ - \ year = {2021}\n}\n" + ID: Advincula2019 + abstract: 'This paper introduces the Grain Prism, a hybrid of a granular synthesizer + and sampler that, through a capacitive sensing interface presented in obscure + glyphs, invites users to create experimental sound textures with their own recorded + voice. The capacitive sensing system, activated through skin contact over single + glyphs or a combination of them, instigates the user to decipher the hidden sonic + messages. The mysterious interface open space to aleatoricism in the act of conjuring + sound, and therefore new discoveries. The users, when forced to abandon preconceived + ways of playing a synthesizer, look at themselves in a different light, as their + voice is the source material.' + address: 'Porto Alegre, Brazil' + author: Gabriela Bila Advincula and Don Derek Haddad and Kent Larson + bibtex: "@inproceedings{Advincula2019,\n abstract = {This paper introduces the Grain\ + \ Prism, a hybrid of a granular synthesizer and sampler that, through a capacitive\ + \ sensing interface presented in obscure glyphs, invites users to create experimental\ + \ sound textures with their own recorded voice. The capacitive sensing system,\ + \ activated through skin contact over single glyphs or a combination of them,\ + \ instigates the user to decipher the hidden sonic messages. The mysterious interface\ + \ open space to aleatoricism in the act of conjuring sound, and therefore new\ + \ discoveries. The users, when forced to abandon preconceived ways of playing\ + \ a synthesizer, look at themselves in a different light, as their voice is the\ + \ source material.},\n address = {Porto Alegre, Brazil},\n author = {Gabriela\ + \ Bila Advincula and Don Derek Haddad and Kent Larson},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.3672960},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {284--285},\n publisher = {UFRGS},\n\ + \ title = {Grain Prism: Hieroglyphic Interface for Granular Sampling},\n url =\ + \ {http://www.nime.org/proceedings/2019/nime2019_paper054.pdf},\n year = {2019}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.5725ad8f + doi: 10.5281/zenodo.3672960 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/JE6YqYsV5Oo - title: 'NIME and the Environment: Toward a More Sustainable NIME Practice' - url: https://nime.pubpub.org/pub/4bbl5lod - year: 2021 + pages: 284--285 + publisher: UFRGS + title: 'Grain Prism: Hieroglyphic Interface for Granular Sampling' + url: http://www.nime.org/proceedings/2019/nime2019_paper054.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_25 - abstract: 'The Global Hyperorgan is an intercontinental, creative space for acoustic - musicking. Existing pipe organs around the world are networked for real-time, - geographically-distant performance, with performers utilizing instruments and - other input devices to collaborate musically through the voices of the pipes in - each location. A pilot study was carried out in January 2021, connecting two large - pipe organs in Piteå, Sweden, and Amsterdam, the Netherlands. A quartet of performers - tested the Global Hyperorgan’s capacities for telematic musicking through a series - of pieces. The concept of modularity is useful when considering the artistic challenges - and possibilities of the Global Hyperorgan. We observe how the modular system - utilized in the pilot study afforded multiple experiences of shared instrumentality - from which new, synthetic voices emerge. As a long-term technological, artistic - and social research project, the Global Hyperorgan offers a platform for exploring - technology, agency, voice, and intersubjectivity in hyper-acoustic telematic musicking.' - address: 'Shanghai, China' - articleno: 25 - author: 'Harlow, Randall and Petersson, Mattias and Ek, Robert and Visi, Federico - and Östersjö, Stefan' - bibtex: "@inproceedings{NIME21_25,\n abstract = {The Global Hyperorgan is an intercontinental,\ - \ creative space for acoustic musicking. Existing pipe organs around the world\ - \ are networked for real-time, geographically-distant performance, with performers\ - \ utilizing instruments and other input devices to collaborate musically through\ - \ the voices of the pipes in each location. A pilot study was carried out in January\ - \ 2021, connecting two large pipe organs in Piteå, Sweden, and Amsterdam, the\ - \ Netherlands. A quartet of performers tested the Global Hyperorgan’s capacities\ - \ for telematic musicking through a series of pieces. The concept of modularity\ - \ is useful when considering the artistic challenges and possibilities of the\ - \ Global Hyperorgan. We observe how the modular system utilized in the pilot study\ - \ afforded multiple experiences of shared instrumentality from which new, synthetic\ - \ voices emerge. As a long-term technological, artistic and social research project,\ - \ the Global Hyperorgan offers a platform for exploring technology, agency, voice,\ - \ and intersubjectivity in hyper-acoustic telematic musicking.},\n address = {Shanghai,\ - \ China},\n articleno = {25},\n author = {Harlow, Randall and Petersson, Mattias\ - \ and Ek, Robert and Visi, Federico and Östersjö, Stefan},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.21428/92fbeb44.d4146b2d},\n issn = {2220-4806},\n month = {June},\n\ - \ presentation-video = {https://youtu.be/t88aIXdqBWQ},\n title = {Global Hyperorgan:\ - \ a platform for telematic musicking and research},\n url = {https://nime.pubpub.org/pub/a626cbqh},\n\ - \ year = {2021}\n}\n" + ID: Bown2019 + abstract: 'We present an audio-focused creative coding toolkit for deploying music + programs to remote networked devices. It is designed to support efficient creative + exploratory search in the context of the Internet of Things (IoT), where one or + more devices must be configured, programmed and interact over a network, with + applications in digital musical instruments, networked music performance and other + digital experiences. Users can easily monitor and hack what multiple devices are + doing on the fly, enhancing their ability to perform ``exploratory search'''' + in a creative workflow. We present two creative case studies using the system: + the creation of a dance performance and the creation of a distributed musical + installation. Analysing different activities within the production process, with + a particular focus on the trade-off between more creative exploratory tasks and + more standard configuring and problem-solving tasks, we show how the system supports + creative exploratory search for multiple networked devices. ' + address: 'Porto Alegre, Brazil' + author: Oliver Bown and Angelo Fraietta and Sam Ferguson and Lian Loke and Liam + Bray + bibtex: "@inproceedings{Bown2019,\n abstract = {We present an audio-focused creative\ + \ coding toolkit for deploying music programs to remote networked devices. It\ + \ is designed to support efficient creative exploratory search in the context\ + \ of the Internet of Things (IoT), where one or more devices must be configured,\ + \ programmed and interact over a network, with applications in digital musical\ + \ instruments, networked music performance and other digital experiences. Users\ + \ can easily monitor and hack what multiple devices are doing on the fly, enhancing\ + \ their ability to perform ``exploratory search'' in a creative workflow. We present\ + \ two creative case studies using the system: the creation of a dance performance\ + \ and the creation of a distributed musical installation. Analysing different\ + \ activities within the production process, with a particular focus on the trade-off\ + \ between more creative exploratory tasks and more standard configuring and problem-solving\ + \ tasks, we show how the system supports creative exploratory search for multiple\ + \ networked devices. },\n address = {Porto Alegre, Brazil},\n author = {Oliver\ + \ Bown and Angelo Fraietta and Sam Ferguson and Lian Loke and Liam Bray},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.3672962},\n editor = {Marcelo Queiroz and\ + \ Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {286--291},\n\ + \ publisher = {UFRGS},\n title = {Facilitating Creative Exploratory Search with\ + \ Multiple Networked Audio Devices Using HappyBrackets},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper055.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.d4146b2d + doi: 10.5281/zenodo.3672962 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/t88aIXdqBWQ - title: 'Global Hyperorgan: a platform for telematic musicking and research' - url: https://nime.pubpub.org/pub/a626cbqh - year: 2021 + pages: 286--291 + publisher: UFRGS + title: Facilitating Creative Exploratory Search with Multiple Networked Audio Devices + Using HappyBrackets + url: http://www.nime.org/proceedings/2019/nime2019_paper055.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_26 - abstract: 'The nature of digital musical instruments (DMIs), often bespoke artefacts - devised by single or small groups of technologists, requires thought about how - they are shared and archived so that others can replicate or adapt designs. The - ability for replication contributes to an instrument’s longevity and creates opportunities - for both DMI designers and researchers. Research papers often omit necessary knowledge - for replicating research artefacts, but we argue that mitigating this situation - is not just about including design materials and documentation. Our way of approaching - this issue is by drawing on an age-old method as a way of disseminating knowledge, - the apprenticeship. We propose the DMI apprenticeship as a way of exploring the - procedural obstacles of replicating DMIs, while highlighting for both apprentice - and designer the elements of knowledge that are a challenge to communicate in - conventional documentation. Our own engagement with the DMI apprenticeship led - to successfully replicating an instrument, Strummi. Framing this process as an - apprenticeship highlighted the non-obvious areas of the documentation and manufacturing - process that are crucial in the successful replication of a DMI.' - address: 'Shanghai, China' - articleno: 26 - author: 'Zayas-Garin, Luis and Harrison, Jacob and Jack, Robert and McPherson, Andrew' - bibtex: "@inproceedings{NIME21_26,\n abstract = {The nature of digital musical instruments\ - \ (DMIs), often bespoke artefacts devised by single or small groups of technologists,\ - \ requires thought about how they are shared and archived so that others can replicate\ - \ or adapt designs. The ability for replication contributes to an instrument’s\ - \ longevity and creates opportunities for both DMI designers and researchers.\ - \ Research papers often omit necessary knowledge for replicating research artefacts,\ - \ but we argue that mitigating this situation is not just about including design\ - \ materials and documentation. Our way of approaching this issue is by drawing\ - \ on an age-old method as a way of disseminating knowledge, the apprenticeship.\ - \ We propose the DMI apprenticeship as a way of exploring the procedural obstacles\ - \ of replicating DMIs, while highlighting for both apprentice and designer the\ - \ elements of knowledge that are a challenge to communicate in conventional documentation.\ - \ Our own engagement with the DMI apprenticeship led to successfully replicating\ - \ an instrument, Strummi. Framing this process as an apprenticeship highlighted\ - \ the non-obvious areas of the documentation and manufacturing process that are\ - \ crucial in the successful replication of a DMI.},\n address = {Shanghai, China},\n\ - \ articleno = {26},\n author = {Zayas-Garin, Luis and Harrison, Jacob and Jack,\ - \ Robert and McPherson, Andrew},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.87f1d63e},\n\ - \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/zTMaubJjlzA},\n\ - \ title = {DMI Apprenticeship: Sharing and Replicating Musical Artefacts},\n url\ - \ = {https://nime.pubpub.org/pub/dmiapprenticeship},\n year = {2021}\n}\n" + ID: FernandesSantos2019 + abstract: 'During the musical performance, expert musicians consciously manipulate + acoustical parameters expressing their interpretative choices. Also, players make + physical motions and, in many cases, these gestures are related to the musicians'' + artistic intentions. However, it''s not clear if the sound manipulation reflects + in physical motions. The understanding of the musical structure of the work being + performed in its many levels may impact the projection of artistic intentions, + and performers alter it in micro and macro-sections, such as in musical motifs, + phrases and sessions. Therefore, this paper investigates the timing manipulation + and how such variations may reflect in physical gestures. The study involved musicians + (flute, clarinet, and bassoon players) performing a unison excerpt by G. Rossini. + We analyzed the relationship between timing variation (the Inter Onsets Interval + deviations) and physical motion based on the traveled distance of the flute under + different conditions. The flutists were asked to play the musical excerpt in three + experimental conditions: (1) playing solo and playing in duets with previous recordings + by other instrumentalists, (2) clarinetist and (3) bassoonist. The finding suggests + that: 1) the movements, which seem to be related to the sense of pulse, are recurrent + and stable, 2) the timing variability in micro or macro sections reflects in gestures'' + amplitude performed by flutists.' + address: 'Porto Alegre, Brazil' + author: Thais Fernandes Santos + bibtex: "@inproceedings{FernandesSantos2019,\n abstract = {During the musical performance,\ + \ expert musicians consciously manipulate acoustical parameters expressing their\ + \ interpretative choices. Also, players make physical motions and, in many cases,\ + \ these gestures are related to the musicians' artistic intentions. However, it's\ + \ not clear if the sound manipulation reflects in physical motions. The understanding\ + \ of the musical structure of the work being performed in its many levels may\ + \ impact the projection of artistic intentions, and performers alter it in micro\ + \ and macro-sections, such as in musical motifs, phrases and sessions. Therefore,\ + \ this paper investigates the timing manipulation and how such variations may\ + \ reflect in physical gestures. The study involved musicians (flute, clarinet,\ + \ and bassoon players) performing a unison excerpt by G. Rossini. We analyzed\ + \ the relationship between timing variation (the Inter Onsets Interval deviations)\ + \ and physical motion based on the traveled distance of the flute under different\ + \ conditions. The flutists were asked to play the musical excerpt in three experimental\ + \ conditions: (1) playing solo and playing in duets with previous recordings by\ + \ other instrumentalists, (2) clarinetist and (3) bassoonist. The finding suggests\ + \ that: 1) the movements, which seem to be related to the sense of pulse, are\ + \ recurrent and stable, 2) the timing variability in micro or macro sections reflects\ + \ in gestures' amplitude performed by flutists.},\n address = {Porto Alegre, Brazil},\n\ + \ author = {Thais Fernandes Santos},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672966},\n\ + \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {292--297},\n publisher = {UFRGS},\n title = {The reciprocity\ + \ between ancillary gesture and music structure performed by expert musicians},\n\ + \ url = {http://www.nime.org/proceedings/2019/nime2019_paper056.pdf},\n year =\ + \ {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.87f1d63e + doi: 10.5281/zenodo.3672966 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/zTMaubJjlzA - title: 'DMI Apprenticeship: Sharing and Replicating Musical Artefacts' - url: https://nime.pubpub.org/pub/dmiapprenticeship - year: 2021 + pages: 292--297 + publisher: UFRGS + title: The reciprocity between ancillary gesture and music structure performed by + expert musicians + url: http://www.nime.org/proceedings/2019/nime2019_paper056.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_27 - abstract: 'This paper presents the soma design process of creating Body Electric: - a novel interface for the capture and use of biofeedback signals and physiological - changes generated in the body by breathing, during singing. This NIME design is - grounded in the performer''s experience of, and relationship to, their body and - their voice. We show that NIME design using principles from soma design can offer - creative opportunities in developing novel sensing mechanisms, which can in turn - inform composition and further elicit curious engagements between performer and - artefact, disrupting notions of performer-led control. As contributions, this - work 1) offers an example of NIME design for situated living, feeling, performing - bodies, and 2) presents the rich potential of soma design as a path for designing - in this context.' - address: 'Shanghai, China' - articleno: 27 - author: 'Cotton, Kelsey and Sanches, Pedro and Tsaknaki, Vasiliki and Karpashevich, - Pavel' - bibtex: "@inproceedings{NIME21_27,\n abstract = {This paper presents the soma design\ - \ process of creating Body Electric: a novel interface for the capture and use\ - \ of biofeedback signals and physiological changes generated in the body by breathing,\ - \ during singing. This NIME design is grounded in the performer's experience of,\ - \ and relationship to, their body and their voice. We show that NIME design using\ - \ principles from soma design can offer creative opportunities in developing novel\ - \ sensing mechanisms, which can in turn inform composition and further elicit\ - \ curious engagements between performer and artefact, disrupting notions of performer-led\ - \ control. As contributions, this work 1) offers an example of NIME design for\ - \ situated living, feeling, performing bodies, and 2) presents the rich potential\ - \ of soma design as a path for designing in this context.},\n address = {Shanghai,\ - \ China},\n articleno = {27},\n author = {Cotton, Kelsey and Sanches, Pedro and\ - \ Tsaknaki, Vasiliki and Karpashevich, Pavel},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.21428/92fbeb44.ec9f8fdd},\n issn = {2220-4806},\n month = {June},\n presentation-video\ - \ = {https://youtu.be/zwzCgG8MXNA},\n title = {The Body Electric: A NIME designed\ - \ through and with the somatic experience of singing},\n url = {https://nime.pubpub.org/pub/ntm5kbux},\n\ - \ year = {2021}\n}\n" + ID: Paisa2019 + abstract: 'This project describes a novel approach to hybrid electro-acoustical + instruments by augmenting the Sensel Morph, with real-time audio sensing capabilities. + The actual action-sounds are captured with a piezoelectric transducer and processed + in Max 8 to extend the sonic range existing in the acoustical domain alone. The + control parameters are captured by the Morph and mapped to audio algorithm proprieties + like filter cutoff frequency, frequency shift or overdrive. The instrument opens + up the possibility for a large selection of different interaction techniques that + have a direct impact on the output sound. The instrument is evaluated from a sound + designer''s perspective, encouraging exploration in the materials used as well + as techniques. The contribution are two-fold. First, the use of a piezo transducer + to augment the Sensel Morph affords an extra dimension of control on top of the + offerings. Second, the use of acoustic sounds from physical interactions as a + source for excitation and manipulation of an audio processing system offers a + large variety of new sounds to be discovered. The methodology involved an exploratory + process of iterative instrument making, interspersed with observations gathered + via improvisatory trials, focusing on the new interactions made possible through + the fusion of audio-rate inputs with the Morph''s default interaction methods.' + address: 'Porto Alegre, Brazil' + author: Razvan Paisa and Dan Overholt + bibtex: "@inproceedings{Paisa2019,\n abstract = {This project describes a novel\ + \ approach to hybrid electro-acoustical instruments by augmenting the Sensel Morph,\ + \ with real-time audio sensing capabilities. The actual action-sounds are captured\ + \ with a piezoelectric transducer and processed in Max 8 to extend the sonic range\ + \ existing in the acoustical domain alone. The control parameters are captured\ + \ by the Morph and mapped to audio algorithm proprieties like filter cutoff frequency,\ + \ frequency shift or overdrive. The instrument opens up the possibility for a\ + \ large selection of different interaction techniques that have a direct impact\ + \ on the output sound. The instrument is evaluated from a sound designer's perspective,\ + \ encouraging exploration in the materials used as well as techniques. The contribution\ + \ are two-fold. First, the use of a piezo transducer to augment the Sensel Morph\ + \ affords an extra dimension of control on top of the offerings. Second, the use\ + \ of acoustic sounds from physical interactions as a source for excitation and\ + \ manipulation of an audio processing system offers a large variety of new sounds\ + \ to be discovered. The methodology involved an exploratory process of iterative\ + \ instrument making, interspersed with observations gathered via improvisatory\ + \ trials, focusing on the new interactions made possible through the fusion of\ + \ audio-rate inputs with the Morph's default interaction methods.},\n address\ + \ = {Porto Alegre, Brazil},\n author = {Razvan Paisa and Dan Overholt},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.3672968},\n editor = {Marcelo Queiroz and\ + \ Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {298--302},\n\ + \ publisher = {UFRGS},\n title = {Enhancing the Expressivity of the Sensel Morph\ + \ via Audio-rate Sensing},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper057.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.ec9f8fdd + doi: 10.5281/zenodo.3672968 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/zwzCgG8MXNA - title: 'The Body Electric: A NIME designed through and with the somatic experience - of singing' - url: https://nime.pubpub.org/pub/ntm5kbux - year: 2021 + pages: 298--302 + publisher: UFRGS + title: Enhancing the Expressivity of the Sensel Morph via Audio-rate Sensing + url: http://www.nime.org/proceedings/2019/nime2019_paper057.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_28 - abstract: 'This paper discusses findings from a survey on interfaces for making - electronic music. We invited electronic music makers of varying experience to - reflect on their practice and setup and to imagine and describe their ideal interface - for music-making. We also asked them to reflect on the state of gestural controllers, - machine learning, and artificial intelligence in their practice. We had 118 people - respond to the survey, with 40.68% professional musicians, and 10.17% identifying - as living with a disability or access requirement. Results highlight limitations - of music-making setups as perceived by electronic music makers, reflections on - how imagined novel interfaces could address such limitations, and positive attitudes - towards ML and AI in general.' - address: 'Shanghai, China' - articleno: 28 - author: 'Frid, Emma and Ilsar, Alon' - bibtex: "@inproceedings{NIME21_28,\n abstract = {This paper discusses findings from\ - \ a survey on interfaces for making electronic music. We invited electronic music\ - \ makers of varying experience to reflect on their practice and setup and to imagine\ - \ and describe their ideal interface for music-making. We also asked them to reflect\ - \ on the state of gestural controllers, machine learning, and artificial intelligence\ - \ in their practice. We had 118 people respond to the survey, with 40.68% professional\ - \ musicians, and 10.17% identifying as living with a disability or access requirement.\ - \ Results highlight limitations of music-making setups as perceived by electronic\ - \ music makers, reflections on how imagined novel interfaces could address such\ - \ limitations, and positive attitudes towards ML and AI in general.},\n address\ - \ = {Shanghai, China},\n articleno = {28},\n author = {Frid, Emma and Ilsar, Alon},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.21428/92fbeb44.c37a2370},\n issn = {2220-4806},\n\ - \ month = {June},\n presentation-video = {https://youtu.be/vX8B7fQki_w},\n title\ - \ = {Reimagining (Accessible) Digital Musical Instruments: A Survey on Electronic\ - \ Music-Making Tools},\n url = {https://nime.pubpub.org/pub/reimaginingadmis},\n\ - \ year = {2021}\n}\n" + ID: Ramos2019 + abstract: 'This paper presents a description of the design and usage of Eolos, a + wireless MIDI wind controller. The main goal of Eolos is to provide an interface + that facilitates the production of music for any individual, regardless of their + playing skills or previous musical knowledge. Its features are: open design, lower + cost than commercial alternatives, wireless MIDI operation, rechargeable battery + power, graphical user interface, tactile keys, sensitivity to air pressure, left-right + reversible design and two FSR sensors. There is also a mention about its participation + in the 1st Collaborative Concert over the Internet between Argentina and Cuba + "Tradición y Nuevas Sonoridades".' + address: 'Porto Alegre, Brazil' + author: Juan Mariano Ramos + bibtex: "@inproceedings{Ramos2019,\n abstract = {This paper presents a description\ + \ of the design and usage of Eolos, a wireless MIDI wind controller. The main\ + \ goal of Eolos is to provide an interface that facilitates the production of\ + \ music for any individual, regardless of their playing skills or previous musical\ + \ knowledge. Its features are: open design, lower cost than commercial alternatives,\ + \ wireless MIDI operation, rechargeable battery power, graphical user interface,\ + \ tactile keys, sensitivity to air pressure, left-right reversible design and\ + \ two FSR sensors. There is also a mention about its participation in the 1st\ + \ Collaborative Concert over the Internet between Argentina and Cuba \"Tradición\ + \ y Nuevas Sonoridades\".},\n address = {Porto Alegre, Brazil},\n author = {Juan\ + \ Mariano Ramos},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672972},\n\ + \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {303--306},\n publisher = {UFRGS},\n title = {Eolos: a wireless\ + \ {MIDI} wind controller},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper058.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.c37a2370 + doi: 10.5281/zenodo.3672972 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/vX8B7fQki_w - title: 'Reimagining (Accessible) Digital Musical Instruments: A Survey on Electronic - Music-Making Tools' - url: https://nime.pubpub.org/pub/reimaginingadmis - year: 2021 + pages: 303--306 + publisher: UFRGS + title: 'Eolos: a wireless MIDI wind controller' + url: http://www.nime.org/proceedings/2019/nime2019_paper058.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_29 - abstract: 'The Magnetic Resonator Piano (MRP) is a relatively well-established DMI - which significantly expands the capabilities of the acoustic piano. This paper - presents SoftMRP, a Max/MSP patch designed to emulate the physical MRP and thereby - to allow rehearsal of MRP repertoire and performance techniques using any MIDI - keyboard and expression pedal; it is hoped that the development of such a tool - will encourage even more widespread adoption of the original instrument amongst - composers and performers. This paper explains SoftMRP’s features and limitations, - discussing the challenges of approximating responses which rely upon the MRP’s - continuous sensing of key position, and considering ways in which the development - of the emulation might feed back into the development of the original instrument, - both specifically and more broadly: since it was designed by a composer, based - on his experience of writing for the instrument, it offers the MRP’s designers - an insight into how the instrument is conceptualised and understood by the musicians - who use it.' - address: 'Shanghai, China' - articleno: 29 - author: 'Pitkin, Jonathan' - bibtex: "@inproceedings{NIME21_29,\n abstract = {The Magnetic Resonator Piano (MRP)\ - \ is a relatively well-established DMI which significantly expands the capabilities\ - \ of the acoustic piano. This paper presents SoftMRP, a Max/MSP patch designed\ - \ to emulate the physical MRP and thereby to allow rehearsal of MRP repertoire\ - \ and performance techniques using any MIDI keyboard and expression pedal; it\ - \ is hoped that the development of such a tool will encourage even more widespread\ - \ adoption of the original instrument amongst composers and performers. This paper\ - \ explains SoftMRP’s features and limitations, discussing the challenges of approximating\ - \ responses which rely upon the MRP’s continuous sensing of key position, and\ - \ considering ways in which the development of the emulation might feed back into\ - \ the development of the original instrument, both specifically and more broadly:\ - \ since it was designed by a composer, based on his experience of writing for\ - \ the instrument, it offers the MRP’s designers an insight into how the instrument\ - \ is conceptualised and understood by the musicians who use it.},\n address =\ - \ {Shanghai, China},\n articleno = {29},\n author = {Pitkin, Jonathan},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.21428/92fbeb44.9e7da18f},\n issn = {2220-4806},\n month\ - \ = {June},\n presentation-video = {https://youtu.be/Fw43nHVyGUg},\n title = {SoftMRP:\ - \ a Software Emulation of the Magnetic Resonator Piano},\n url = {https://nime.pubpub.org/pub/m9nhdm0p},\n\ - \ year = {2021}\n}\n" + ID: Yang2019 + abstract: 'Variational Autoencoder has already achieved great results on image generation and + recently made promising progress on music sequence generation. However, the model + is still quite difficult to control in the sense that the learned latent representations + lack meaningful music semantics. What users really need is to interact with certain music + features, such as rhythm and pitch contour, in the creation process so that they + can easily test different composition ideas. In this paper, we propose a disentanglement + by augmentation method to inspect the pitch and rhythm interpretations of the + latent representations. Based on the interpretable representations, an intuitive + graphical user interface demo is designed for users to better direct the music + creation process by manipulating the pitch contours and rhythmic complexity.' + address: 'Porto Alegre, Brazil' + author: Ruihan Yang and Tianyao Chen and Yiyi Zhang and gus xia + bibtex: "@inproceedings{Yang2019,\n abstract = {Variational Autoencoder has already\ + \ achieved great results on image generation and recently made promising progress\ + \ on music sequence generation. However, the model is still quite difficult to\ + \ control in the sense that the learned latent representations lack meaningful\ + \ music semantics. What users really need is to interact with certain music features,\ + \ such as rhythm and pitch contour, in the creation process so that they can easily\ + \ test different composition ideas. In this paper, we propose a disentanglement\ + \ by augmentation method to inspect the pitch and rhythm interpretations of the\ + \ latent representations. Based on the interpretable representations, an intuitive\ + \ graphical user interface demo is designed for users to better direct the music\ + \ creation process by manipulating the pitch contours and rhythmic complexity.},\n\ + \ address = {Porto Alegre, Brazil},\n author = {Ruihan Yang and Tianyao Chen and\ + \ Yiyi Zhang and gus xia},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672974},\n\ + \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {307--312},\n publisher = {UFRGS},\n title = {Inspecting\ + \ and Interacting with Meaningful Music Representations using {VAE}},\n url =\ + \ {http://www.nime.org/proceedings/2019/nime2019_paper059.pdf},\n year = {2019}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.9e7da18f + doi: 10.5281/zenodo.3672974 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/Fw43nHVyGUg - title: 'SoftMRP: a Software Emulation of the Magnetic Resonator Piano' - url: https://nime.pubpub.org/pub/m9nhdm0p - year: 2021 - - -- ENTRYTYPE: inproceedings - ID: NIME21_3 - abstract: 'This paper presents the design and preliminary evaluation of an Accessible - Digital Musical Instrument (ADMI) in the form of a tangible wooden step sequencer - that uses photoresistors and wooden blocks to trigger musical events. Furthermore, - the paper presents a short overview of design criteria for ADMIs based on literature - and first insights of an ongoing qualitative interview study with German Special - Educational Needs (SEN) teachers conducted by the first author. The preliminary - evaluation is realized by a reflection on the mentioned criteria. The instrument - was designed as a starting point for a participatory design process in music education - settings. The software is programmed in Pure Data and running on a Raspberry Pi - computer that fits inside the body of the instrument. While most similar developments - focus on professional performance and complex interactions, LoopBlocks focuses - on accessibility and Special Educational Needs settings. The main goal is to reduce - the cognitive load needed to play music by providing a clear and constrained interaction, - thus reducing intellectual and technical barriers to active music making.' - address: 'Shanghai, China' - articleno: 3 - author: 'Förster, Andreas and Komesker, Mathias' - bibtex: "@inproceedings{NIME21_3,\n abstract = {This paper presents the design and\ - \ preliminary evaluation of an Accessible Digital Musical Instrument (ADMI) in\ - \ the form of a tangible wooden step sequencer that uses photoresistors and wooden\ - \ blocks to trigger musical events. Furthermore, the paper presents a short overview\ - \ of design criteria for ADMIs based on literature and first insights of an ongoing\ - \ qualitative interview study with German Special Educational Needs (SEN) teachers\ - \ conducted by the first author. The preliminary evaluation is realized by a reflection\ - \ on the mentioned criteria. The instrument was designed as a starting point for\ - \ a participatory design process in music education settings. The software is\ - \ programmed in Pure Data and running on a Raspberry Pi computer that fits inside\ - \ the body of the instrument. While most similar developments focus on professional\ - \ performance and complex interactions, LoopBlocks focuses on accessibility and\ - \ Special Educational Needs settings. The main goal is to reduce the cognitive\ - \ load needed to play music by providing a clear and constrained interaction,\ - \ thus reducing intellectual and technical barriers to active music making.},\n\ - \ address = {Shanghai, China},\n articleno = {3},\n author = {Förster, Andreas\ - \ and Komesker, Mathias},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.f45e1caf},\n\ - \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/u5o0gmB3MX8},\n\ - \ title = {LoopBlocks: Design and Preliminary Evaluation of an Accessible Tangible\ - \ Musical Step Sequencer},\n url = {https://nime.pubpub.org/pub/bj2w1gdx},\n year\ - \ = {2021}\n}\n" + pages: 307--312 + publisher: UFRGS + title: Inspecting and Interacting with Meaningful Music Representations using VAE + url: http://www.nime.org/proceedings/2019/nime2019_paper059.pdf + year: 2019 + + +- ENTRYTYPE: inproceedings + ID: Roma2019 + abstract: 'Descriptor spaces have become an ubiquitous interaction paradigm for + music based on collections of audio samples. However, most systems rely on a small + predefined set of descriptors, which the user is often required to understand + and choose from. There is no guarantee that the chosen descriptors are relevant + for a given collection. In addition, this method does not scale to longer samples + that require higher-dimensional descriptions, which biases systems towards the + use of short samples. In this paper we propose novel framework for automatic creation + of interactive sound spaces from sound collections using feature learning and + dimensionality reduction. The framework is implemented as a software library using + the SuperCollider language. We compare several algorithms and describe some example + interfaces for interacting with the resulting spaces. Our experiments signal the + potential of unsupervised algorithms for creating data-driven musical interfaces.' + address: 'Porto Alegre, Brazil' + author: Gerard Roma and Owen Green and Pierre Alexandre Tremblay + bibtex: "@inproceedings{Roma2019,\n abstract = {Descriptor spaces have become an\ + \ ubiquitous interaction paradigm for music based on collections of audio samples.\ + \ However, most systems rely on a small predefined set of descriptors, which the\ + \ user is often required to understand and choose from. There is no guarantee\ + \ that the chosen descriptors are relevant for a given collection. In addition,\ + \ this method does not scale to longer samples that require higher-dimensional\ + \ descriptions, which biases systems towards the use of short samples. In this\ + \ paper we propose novel framework for automatic creation of interactive sound\ + \ spaces from sound collections using feature learning and dimensionality reduction.\ + \ The framework is implemented as a software library using the SuperCollider language.\ + \ We compare several algorithms and describe some example interfaces for interacting\ + \ with the resulting spaces. Our experiments signal the potential of unsupervised\ + \ algorithms for creating data-driven musical interfaces.},\n address = {Porto\ + \ Alegre, Brazil},\n author = {Gerard Roma and Owen Green and Pierre Alexandre\ + \ Tremblay},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672976},\n editor\ + \ = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n\ + \ pages = {313--318},\n publisher = {UFRGS},\n title = {Adaptive Mapping of Sound\ + \ Collections for Data-driven Musical Interfaces},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper060.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.f45e1caf + doi: 10.5281/zenodo.3672976 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/u5o0gmB3MX8 - title: 'LoopBlocks: Design and Preliminary Evaluation of an Accessible Tangible - Musical Step Sequencer' - url: https://nime.pubpub.org/pub/bj2w1gdx - year: 2021 + pages: 313--318 + publisher: UFRGS + title: Adaptive Mapping of Sound Collections for Data-driven Musical Interfaces + url: http://www.nime.org/proceedings/2019/nime2019_paper060.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_30 - abstract: 'The NIME community has proposed a variety of interfaces that connect - making music and education. This paper reviews current literature, proposes a - method for developing educational NIMEs, and reflects on a way to manifest computational - thinking through music computing. A case study is presented and discussed in which - a programmable mechatronics educational NIME and a virtual simulation of the NIME - offered as a web application were developed.' - address: 'Shanghai, China' - articleno: 30 - author: 'Tsoukalas, Kyriakos and Bukvic, Ivica' - bibtex: "@inproceedings{NIME21_30,\n abstract = {The NIME community has proposed\ - \ a variety of interfaces that connect making music and education. This paper\ - \ reviews current literature, proposes a method for developing educational NIMEs,\ - \ and reflects on a way to manifest computational thinking through music computing.\ - \ A case study is presented and discussed in which a programmable mechatronics\ - \ educational NIME and a virtual simulation of the NIME offered as a web application\ - \ were developed.},\n address = {Shanghai, China},\n articleno = {30},\n author\ - \ = {Tsoukalas, Kyriakos and Bukvic, Ivica},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.21428/92fbeb44.1eeb3ada},\n issn = {2220-4806},\n month = {June},\n presentation-video\ - \ = {https://youtu.be/pdsfZX_kJBo},\n title = {Music Computing and Computational\ - \ Thinking: A Case Study},\n url = {https://nime.pubpub.org/pub/t94aq9rf},\n year\ - \ = {2021}\n}\n" + ID: Norilo2019 + abstract: 'This paper presents Veneer, a visual, touch-ready programming interface + for the Kronos programming language. The challenges of representing high-level + data flow abstractions, including higher order functions, are described. The tension + between abstraction and spontaneity in programming is addressed, and gradual abstraction + in live programming is proposed as a potential solution. Several novel user interactions + for patching on a touch device are shown. In addition, the paper describes some + of the current issues of web audio music applications and offers strategies for + integrating a web-based presentation layer with a low-latency native processing + backend.' + address: 'Porto Alegre, Brazil' + author: Vesa Petri Norilo + bibtex: "@inproceedings{Norilo2019,\n abstract = {This paper presents Veneer, a\ + \ visual, touch-ready programming interface for the Kronos programming language.\ + \ The challenges of representing high-level data flow abstractions, including\ + \ higher order functions, are described. The tension between abstraction and spontaneity\ + \ in programming is addressed, and gradual abstraction in live programming is\ + \ proposed as a potential solution. Several novel user interactions for patching\ + \ on a touch device are shown. In addition, the paper describes some of the current\ + \ issues of web audio music applications and offers strategies for integrating\ + \ a web-based presentation layer with a low-latency native processing backend.},\n\ + \ address = {Porto Alegre, Brazil},\n author = {Vesa Petri Norilo},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.3672978},\n editor = {Marcelo Queiroz and\ + \ Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {319--324},\n\ + \ publisher = {UFRGS},\n title = {Veneer: Visual and Touch-based Programming for\ + \ Audio},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper061.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.1eeb3ada + doi: 10.5281/zenodo.3672978 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/pdsfZX_kJBo - title: 'Music Computing and Computational Thinking: A Case Study' - url: https://nime.pubpub.org/pub/t94aq9rf - year: 2021 + pages: 319--324 + publisher: UFRGS + title: 'Veneer: Visual and Touch-based Programming for Audio' + url: http://www.nime.org/proceedings/2019/nime2019_paper061.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_31 - abstract: 'We present new results combining data from a previously published study - of the mapping design process and a new replication of the same method with a - group of participants having different background expertise. Our thematic analysis - of participants'' interview responses reveal some design criteria common to both - groups of participants: mappings must manage the balance of control between the - instrument and the player, and they should be easy to understand for the player - and audience. We also consider several criteria that distinguish the two groups'' - evaluation strategies. We conclude with important discussion of the mapping designer''s - perspective, performance with gestural controllers, and the difficulties of evaluating - mapping designs and musical instruments in general.' - address: 'Shanghai, China' - articleno: 31 - author: 'West, Travis and Caramiaux, Baptiste and Huot, Stéphane and Wanderley, - Marcelo M.' - bibtex: "@inproceedings{NIME21_31,\n abstract = {We present new results combining\ - \ data from a previously published study of the mapping design process and a new\ - \ replication of the same method with a group of participants having different\ - \ background expertise. Our thematic analysis of participants' interview responses\ - \ reveal some design criteria common to both groups of participants: mappings\ - \ must manage the balance of control between the instrument and the player, and\ - \ they should be easy to understand for the player and audience. We also consider\ - \ several criteria that distinguish the two groups' evaluation strategies. We\ - \ conclude with important discussion of the mapping designer's perspective, performance\ - \ with gestural controllers, and the difficulties of evaluating mapping designs\ - \ and musical instruments in general.},\n address = {Shanghai, China},\n articleno\ - \ = {31},\n author = {West, Travis and Caramiaux, Baptiste and Huot, Stéphane\ - \ and Wanderley, Marcelo M.},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.04f0fc35},\n\ - \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/3hM531E_vlg},\n\ - \ title = {Making Mappings: Design Criteria for Live Performance},\n url = {https://nime.pubpub.org/pub/f1ueovwv},\n\ - \ year = {2021}\n}\n" + ID: Faitas2019 + abstract: 'Generating convincing music via deep neural networks is a challenging + problem that shows promise for many applications including interactive musical + creation. One part of this challenge is the problem of generating convincing accompaniment + parts to a given melody, as could be used in an automatic accompaniment system. + Despite much progress in this area, systems that can automatically learn to generate + interesting sounding, as well as harmonically plausible, accompanying melodies + remain somewhat elusive. In this paper we explore the problem of sequence to sequence + music generation where a human user provides a sequence of notes, and a neural + network model responds with a harmonically suitable sequence of equal length. + We consider two sequence-to-sequence models; one featuring standard unidirectional + long short-term memory (LSTM) architecture, and the other featuring bidirectional + LSTM, both successfully trained to produce a sequence based on the given input. + Both of these are fairly dated models, as part of the investigation is to see + what can be achieved with such models. These are evaluated and compared via a + qualitative study that features 106 respondents listening to eight random samples + from our set of generated music, as well as two human samples. From the results + we see a preference for the sequences generated by the bidirectional model as + well as an indication that these sequences sound more human.' + address: 'Porto Alegre, Brazil' + author: Andrei Faitas and Synne Engdahl Baumann and Torgrim Rudland Næss and Jim + Torresen and Charles Patrick Martin + bibtex: "@inproceedings{Faitas2019,\n abstract = {Generating convincing music via\ + \ deep neural networks is a challenging problem that shows promise for many applications\ + \ including interactive musical creation. One part of this challenge is the problem\ + \ of generating convincing accompaniment parts to a given melody, as could be\ + \ used in an automatic accompaniment system. Despite much progress in this area,\ + \ systems that can automatically learn to generate interesting sounding, as well\ + \ as harmonically plausible, accompanying melodies remain somewhat elusive. In\ + \ this paper we explore the problem of sequence to sequence music generation where\ + \ a human user provides a sequence of notes, and a neural network model responds\ + \ with a harmonically suitable sequence of equal length. We consider two sequence-to-sequence\ + \ models; one featuring standard unidirectional long short-term memory (LSTM)\ + \ architecture, and the other featuring bidirectional LSTM, both successfully\ + \ trained to produce a sequence based on the given input. Both of these are fairly\ + \ dated models, as part of the investigation is to see what can be achieved with\ + \ such models. These are evaluated and compared via a qualitative study that features\ + \ 106 respondents listening to eight random samples from our set of generated\ + \ music, as well as two human samples. From the results we see a preference for\ + \ the sequences generated by the bidirectional model as well as an indication\ + \ that these sequences sound more human.},\n address = {Porto Alegre, Brazil},\n\ + \ author = {Andrei Faitas and Synne Engdahl Baumann and Torgrim Rudland Næss and\ + \ Jim Torresen and Charles Patrick Martin},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.3672980},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {325--330},\n publisher = {UFRGS},\n\ + \ title = {Generating Convincing Harmony Parts with Simple Long Short-Term Memory\ + \ Networks},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper062.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.04f0fc35 + doi: 10.5281/zenodo.3672980 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/3hM531E_vlg - title: 'Making Mappings: Design Criteria for Live Performance' - url: https://nime.pubpub.org/pub/f1ueovwv - year: 2021 + pages: 325--330 + publisher: UFRGS + title: Generating Convincing Harmony Parts with Simple Long Short-Term Memory Networks + url: http://www.nime.org/proceedings/2019/nime2019_paper062.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_32 - abstract: 'What is the relationship between a musician-designer''s auditory imagery - for a musical piece, a design idea for an augmented instrument to support the - realisation of that piece, and the aspiration to introduce the resulting instrument - to a community of like-minded performers? We explore this NIME topic in the context - of building the first iteration of an augmented acoustic guitar prototype for - percussive fingerstyle guitarists. The first author, himself a percussive fingerstyle - player, started the project of an augmented guitar with expectations and assumptions - made around his own playing style, and in particular around the arrangement of - one song. This input was complemented by the outcome of an interview study, in - which percussive guitarists highlighted functional and creative requirements to - suit their needs. We ran a pilot study to assess the resulting prototype, involving - two other players. We present their feedback on two configurations of the prototype, - one equalising the signal of surface sensors and the other based on sample triggering. - The equalisation-based setting was better received, however both participants - provided useful suggestions to improve the sample-triggering model following their - own auditory imagery.' - address: 'Shanghai, China' - articleno: 32 - author: 'Martelloni, Andrea and McPherson, Andrew and Barthet, Mathieu' - bibtex: "@inproceedings{NIME21_32,\n abstract = {What is the relationship between\ - \ a musician-designer's auditory imagery for a musical piece, a design idea for\ - \ an augmented instrument to support the realisation of that piece, and the aspiration\ - \ to introduce the resulting instrument to a community of like-minded performers?\ - \ We explore this NIME topic in the context of building the first iteration of\ - \ an augmented acoustic guitar prototype for percussive fingerstyle guitarists.\ - \ The first author, himself a percussive fingerstyle player, started the project\ - \ of an augmented guitar with expectations and assumptions made around his own\ - \ playing style, and in particular around the arrangement of one song. This input\ - \ was complemented by the outcome of an interview study, in which percussive guitarists\ - \ highlighted functional and creative requirements to suit their needs. We ran\ - \ a pilot study to assess the resulting prototype, involving two other players.\ - \ We present their feedback on two configurations of the prototype, one equalising\ - \ the signal of surface sensors and the other based on sample triggering. The\ - \ equalisation-based setting was better received, however both participants provided\ - \ useful suggestions to improve the sample-triggering model following their own\ - \ auditory imagery.},\n address = {Shanghai, China},\n articleno = {32},\n author\ - \ = {Martelloni, Andrea and McPherson, Andrew and Barthet, Mathieu},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.21428/92fbeb44.2f6db6e6},\n issn = {2220-4806},\n month\ - \ = {June},\n presentation-video = {https://youtu.be/qeX6dUrJURY},\n title = {Guitar\ - \ augmentation for Percussive Fingerstyle: Combining self-reflexive practice and\ - \ user-centred design},\n url = {https://nime.pubpub.org/pub/zgj85mzv},\n year\ - \ = {2021}\n}\n" + ID: Marasco2019 + abstract: 'Bendit_I/O is a system that allows for wireless, networked performance + of circuit-bent devices, giving artists a new outlet for performing with repurposed + technology. In a typical setup, a user pre-bends a device using the Bendit_I/O + board as an intermediary, replacing physical switches and potentiometers with + the board''s reed relays, motor driver, and digital potentiometer signals. Bendit_I/O + brings the networking techniques of distributed music performances to the hardware + hacking realm, opening the door for creative implementation of multiple circuit-bent + devices in audiovisual experiences. Consisting of a Wi-Fi- enabled I/O board and + a Node-based server, the system provides performers with a variety of interaction + and control possibilities between connected users and hacked devices. Moreover, + it is user-friendly, low-cost, and modular, making it a flexible toolset for artists + of diverse experience levels.' + address: 'Porto Alegre, Brazil' + author: Anthony T. Marasco and Edgar Berdahl and Jesse Allison + bibtex: "@inproceedings{Marasco2019,\n abstract = {Bendit\\_I/O is a system that\ + \ allows for wireless, networked performance of circuit-bent devices, giving artists\ + \ a new outlet for performing with repurposed technology. In a typical setup,\ + \ a user pre-bends a device using the Bendit\\_I/O board as an intermediary, replacing\ + \ physical switches and potentiometers with the board's reed relays, motor driver,\ + \ and digital potentiometer signals. Bendit\\_I/O brings the networking techniques\ + \ of distributed music performances to the hardware hacking realm, opening the\ + \ door for creative implementation of multiple circuit-bent devices in audiovisual\ + \ experiences. Consisting of a Wi-Fi- enabled I/O board and a Node-based server,\ + \ the system provides performers with a variety of interaction and control possibilities\ + \ between connected users and hacked devices. Moreover, it is user-friendly, low-cost,\ + \ and modular, making it a flexible toolset for artists of diverse experience\ + \ levels.},\n address = {Porto Alegre, Brazil},\n author = {Anthony T. Marasco\ + \ and Edgar Berdahl and Jesse Allison},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672982},\n\ + \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {331--334},\n publisher = {UFRGS},\n title = {{Bendit\\\ + _I/O}: A System for Networked Performance of Circuit-Bent Devices},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper063.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.2f6db6e6 + doi: 10.5281/zenodo.3672982 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/qeX6dUrJURY - title: 'Guitar augmentation for Percussive Fingerstyle: Combining self-reflexive - practice and user-centred design' - url: https://nime.pubpub.org/pub/zgj85mzv - year: 2021 + pages: 331--334 + publisher: UFRGS + title: 'Bendit_I/O: A System for Networked Performance of Circuit-Bent Devices' + url: http://www.nime.org/proceedings/2019/nime2019_paper063.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_33 - abstract: 'Recent applications of Transformer neural networks in the field of music - have demonstrated their ability to effectively capture and emulate long-term dependencies - characteristic of human notions of musicality and creative merit. We propose a - novel approach to automated symbolic rhythm generation, where a Transformer-XL - model trained on the Magenta Groove MIDI Dataset is used for the tasks of sequence - generation and continuation. Hundreds of generations are evaluated using blind-listening - tests to determine the extent to which the aspects of rhythm we understand to - be valuable are learnt and reproduced. Our model is able to achieve a standard - of rhythmic production comparable to human playing across arbitrarily long time - periods and multiple playing styles.' - address: 'Shanghai, China' - articleno: 33 - author: 'Nuttall, Thomas and Haki, Behzad and Jorda, Sergi' - bibtex: "@inproceedings{NIME21_33,\n abstract = {Recent applications of Transformer\ - \ neural networks in the field of music have demonstrated their ability to effectively\ - \ capture and emulate long-term dependencies characteristic of human notions of\ - \ musicality and creative merit. We propose a novel approach to automated symbolic\ - \ rhythm generation, where a Transformer-XL model trained on the Magenta Groove\ - \ MIDI Dataset is used for the tasks of sequence generation and continuation.\ - \ Hundreds of generations are evaluated using blind-listening tests to determine\ - \ the extent to which the aspects of rhythm we understand to be valuable are learnt\ - \ and reproduced. Our model is able to achieve a standard of rhythmic production\ - \ comparable to human playing across arbitrarily long time periods and multiple\ - \ playing styles.},\n address = {Shanghai, China},\n articleno = {33},\n author\ - \ = {Nuttall, Thomas and Haki, Behzad and Jorda, Sergi},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.21428/92fbeb44.fe9a0d82},\n issn = {2220-4806},\n month = {June},\n\ - \ presentation-video = {https://youtu.be/Ul9s8qSMUgU},\n title = {Transformer\ - \ Neural Networks for Automated Rhythm Generation},\n url = {https://nime.pubpub.org/pub/8947fhly},\n\ - \ year = {2021}\n}\n" + ID: Macionis2019 + abstract: '''Where Is The Quiet?'' is a mixed-media installation that utilizes immersive + experience design, mechatronics, and machine learning in order to enhance wellness + and increase connectivity to the natural world. Individuals interact with the + installation by wearing a brainwave interface that measures the strength of the + alpha wave signal. The interface then transmits the data to a computer that uses + it in order to determine the individual''s overall state of relaxation. As the + individual achieves higher states of relaxation, mechatronic instruments respond + and provide feedback. This feedback not only encourages self-awareness but also + it motivates the individual to relax further. Visitors without the headset experience + the installation by watching a film and listening to an original musical score. + Through the novel arrangement of technologies and features, ''Where Is The Quiet?'' + demonstrates that mediated technological experiences are capable of evoking meditative + states of consciousness, facilitating individual and group connectivity, and deepening + awareness of the natural world. As such, this installation opens the door to future + research regarding the possibility of immersive experiences supporting humanitarian + needs.' + address: 'Porto Alegre, Brazil' + author: McLean J Macionis and Ajay Kapur + bibtex: "@inproceedings{Macionis2019,\n abstract = {'Where Is The Quiet?' is a mixed-media\ + \ installation that utilizes immersive experience design, mechatronics, and machine\ + \ learning in order to enhance wellness and increase connectivity to the natural\ + \ world. Individuals interact with the installation by wearing a brainwave interface\ + \ that measures the strength of the alpha wave signal. The interface then transmits\ + \ the data to a computer that uses it in order to determine the individual's overall\ + \ state of relaxation. As the individual achieves higher states of relaxation,\ + \ mechatronic instruments respond and provide feedback. This feedback not only\ + \ encourages self-awareness but also it motivates the individual to relax further.\ + \ Visitors without the headset experience the installation by watching a film\ + \ and listening to an original musical score. Through the novel arrangement of\ + \ technologies and features, 'Where Is The Quiet?' demonstrates that mediated\ + \ technological experiences are capable of evoking meditative states of consciousness,\ + \ facilitating individual and group connectivity, and deepening awareness of the\ + \ natural world. As such, this installation opens the door to future research\ + \ regarding the possibility of immersive experiences supporting humanitarian needs.},\n\ + \ address = {Porto Alegre, Brazil},\n author = {McLean J Macionis and Ajay Kapur},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.3672984},\n editor = {Marcelo Queiroz\ + \ and Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {335--338},\n\ + \ publisher = {UFRGS},\n title = {Where Is The Quiet: Immersive Experience Design\ + \ Using the Brain, Mechatronics, and Machine Learning},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper064.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.fe9a0d82 + doi: 10.5281/zenodo.3672984 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/Ul9s8qSMUgU - title: Transformer Neural Networks for Automated Rhythm Generation - url: https://nime.pubpub.org/pub/8947fhly - year: 2021 + pages: 335--338 + publisher: UFRGS + title: 'Where Is The Quiet: Immersive Experience Design Using the Brain, Mechatronics, + and Machine Learning' + url: http://www.nime.org/proceedings/2019/nime2019_paper064.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_34 - abstract: 'This paper provides a study of a workshop which invited composers, musicians, - and sound designers to explore instruments from the history of electronic sound - in Sweden. The workshop participants applied media archaeology methods towards - analyzing one particular instrument from the past, the Dataton System 3000. They - then applied design fiction methods towards imagining several speculative instruments - of the future. Each stage of the workshop revealed very specific utopian ideas - surrounding the design of sound instruments. After introducing the background - and methods of the workshop, the authors present an overview and thematic analysis - of the workshop''s outcomes. The paper concludes with some reflections on the - use of this method-in-progress for investigating the ethics and affordances of - historical electronic sound instruments. It also suggests the significance of - ethics and affordances for the design of contemporary instruments.' - address: 'Shanghai, China' - articleno: 34 - author: 'Holzer, Derek and Frisk, Henrik and Holzapfel, Andre' - bibtex: "@inproceedings{NIME21_34,\n abstract = {This paper provides a study of\ - \ a workshop which invited composers, musicians, and sound designers to explore\ - \ instruments from the history of electronic sound in Sweden. The workshop participants\ - \ applied media archaeology methods towards analyzing one particular instrument\ - \ from the past, the Dataton System 3000. They then applied design fiction methods\ - \ towards imagining several speculative instruments of the future. Each stage\ - \ of the workshop revealed very specific utopian ideas surrounding the design\ - \ of sound instruments. After introducing the background and methods of the workshop,\ - \ the authors present an overview and thematic analysis of the workshop's outcomes.\ - \ The paper concludes with some reflections on the use of this method-in-progress\ - \ for investigating the ethics and affordances of historical electronic sound\ - \ instruments. It also suggests the significance of ethics and affordances for\ - \ the design of contemporary instruments.},\n address = {Shanghai, China},\n articleno\ - \ = {34},\n author = {Holzer, Derek and Frisk, Henrik and Holzapfel, Andre},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.21428/92fbeb44.2723647f},\n issn = {2220-4806},\n\ - \ month = {June},\n presentation-video = {https://youtu.be/qBapYX7IOHA},\n title\ - \ = {Sounds of Futures Passed: Media Archaeology and Design Fiction as NIME Methodologies},\n\ - \ url = {https://nime.pubpub.org/pub/200fpd5a},\n year = {2021}\n}\n" + ID: Carson2019 + abstract: 'Mesh Garden explores participatory music-making with smart- phones using + an audio sequencer game made up of a distributed smartphone speaker system. The + piece allows a group of people in a relaxed situation to create a piece of ambient + music using their smartphones networked through the internet. The players'' interactions + with the music are derived from the orientations of their phones. The work also + has a gameplay aspect; if two players'' phones match in orientation, one player + has the option to take the other player''s note, building up a bank of notes that + will be used to form a melody.' + address: 'Porto Alegre, Brazil' + author: Tate Carson + bibtex: "@inproceedings{Carson2019,\n abstract = {Mesh Garden explores participatory\ + \ music-making with smart- phones using an audio sequencer game made up of a distributed\ + \ smartphone speaker system. The piece allows a group of people in a relaxed situation\ + \ to create a piece of ambient music using their smartphones networked through\ + \ the internet. The players' interactions with the music are derived from the\ + \ orientations of their phones. The work also has a gameplay aspect; if two players'\ + \ phones match in orientation, one player has the option to take the other player's\ + \ note, building up a bank of notes that will be used to form a melody.},\n address\ + \ = {Porto Alegre, Brazil},\n author = {Tate Carson},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.3672986},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {339--342},\n publisher = {UFRGS},\n\ + \ title = {Mesh Garden: A creative-based musical game for participatory musical\ + \ performance },\n url = {http://www.nime.org/proceedings/2019/nime2019_paper065.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.2723647f + doi: 10.5281/zenodo.3672986 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/qBapYX7IOHA - title: 'Sounds of Futures Passed: Media Archaeology and Design Fiction as NIME Methodologies' - url: https://nime.pubpub.org/pub/200fpd5a - year: 2021 + pages: 339--342 + publisher: UFRGS + title: 'Mesh Garden: A creative-based musical game for participatory musical performance ' + url: http://www.nime.org/proceedings/2019/nime2019_paper065.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_35 - abstract: 'Audio and haptic sensations have previously been linked in the development - of NIMEs and in other domains like human-computer interaction. Most efforts to - work with these modalities together tend to either treat haptics as secondary - to audio, or conversely, audio as secondary to haptics, and design sensations - in each modality separately. In this paper, we investigate the possibility of - designing audio and vibrotactile effects simultaneously by interpolating audio-haptic - control spaces. An inverse radial basis function method is used to dynamically - create a mapping from a two-dimensional space to a many-dimensional control space - for multimodal effects based on user-specified control points. Two proofs of concept - were developed focusing on modifying the same structure across modalities and - parallel structures.' - address: 'Shanghai, China' - articleno: 35 - author: 'Regimbal, Juliette and Wanderley, Marcelo M.' - bibtex: "@inproceedings{NIME21_35,\n abstract = {Audio and haptic sensations have\ - \ previously been linked in the development of NIMEs and in other domains like\ - \ human-computer interaction. Most efforts to work with these modalities together\ - \ tend to either treat haptics as secondary to audio, or conversely, audio as\ - \ secondary to haptics, and design sensations in each modality separately. In\ - \ this paper, we investigate the possibility of designing audio and vibrotactile\ - \ effects simultaneously by interpolating audio-haptic control spaces. An inverse\ - \ radial basis function method is used to dynamically create a mapping from a\ - \ two-dimensional space to a many-dimensional control space for multimodal effects\ - \ based on user-specified control points. Two proofs of concept were developed\ - \ focusing on modifying the same structure across modalities and parallel structures.},\n\ - \ address = {Shanghai, China},\n articleno = {35},\n author = {Regimbal, Juliette\ - \ and Wanderley, Marcelo M.},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.1084cb07},\n\ - \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/eH3mn1Ad5BE},\n\ - \ title = {Interpolating Audio and Haptic Control Spaces},\n url = {https://nime.pubpub.org/pub/zd2z1evu},\n\ - \ year = {2021}\n}\n" + ID: Rossmy2019 + abstract: 'In this paper we draw a picture that captures the increasing interest + in the format of modular synthesizers today. We therefore provide a historical + summary, which includes the origins, the fall and the rediscovery of that technology. + Further an empirical analysis is performed based on statements given by artists + and manufacturers taken from published interviews. These statements were aggregated, + objectified and later reviewed by an expert group consisting of modular synthesizer + vendors. Their responses provide the basis for the discussion on how emerging + trends in synthesizer interface design reveal challenges and opportunities for + the NIME community. ' + address: 'Porto Alegre, Brazil' + author: Beat Rossmy and Alexander Wiethoff + bibtex: "@inproceedings{Rossmy2019,\n abstract = {In this paper we draw a picture\ + \ that captures the increasing interest in the format of modular synthesizers\ + \ today. We therefore provide a historical summary, which includes the origins,\ + \ the fall and the rediscovery of that technology. Further an empirical analysis\ + \ is performed based on statements given by artists and manufacturers taken from\ + \ published interviews. These statements were aggregated, objectified and later\ + \ reviewed by an expert group consisting of modular synthesizer vendors. Their\ + \ responses provide the basis for the discussion on how emerging trends in synthesizer\ + \ interface design reveal challenges and opportunities for the NIME community.\ + \ },\n address = {Porto Alegre, Brazil},\n author = {Beat Rossmy and Alexander\ + \ Wiethoff},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672988},\n editor\ + \ = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n\ + \ pages = {343--348},\n publisher = {UFRGS},\n title = {The Modular Backward Evolution\ + \ --- Why to Use Outdated Technologies},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper066.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.1084cb07 + doi: 10.5281/zenodo.3672988 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/eH3mn1Ad5BE - title: Interpolating Audio and Haptic Control Spaces - url: https://nime.pubpub.org/pub/zd2z1evu - year: 2021 + pages: 343--348 + publisher: UFRGS + title: The Modular Backward Evolution --- Why to Use Outdated Technologies + url: http://www.nime.org/proceedings/2019/nime2019_paper066.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_36 - abstract: 'Algorithmic Power Ballads is a performance for Saxophone and autonomous - improvisor, with an optional third performer who can use the web interface to - hand-write note sequences, and adjust synthesis parameters. The performance system - explores shifting power dynamics between acoustic, algorithmic and autonomous - performers through modifying the amount of control and agency they have over the - sound over the duration of the performance. A higher-level algorithm how strongly - the machine listening algorithms, which analyse the saxophone input, influence - the rhythmic and melodic patterns generated by the system. The autonomous improvisor - is trained on power ballad melodies prior to the performance and in lieu of influence - from the saxophonist and live coder strays towards melodic phrases from this musical - style. The piece is written in javascript and WebAudio API and uses MMLL a browser-based - machine listening library.' - address: 'Shanghai, China' - articleno: 36 - author: 'Knotts, Shelly' - bibtex: "@inproceedings{NIME21_36,\n abstract = {Algorithmic Power Ballads is a\ - \ performance for Saxophone and autonomous improvisor, with an optional third\ - \ performer who can use the web interface to hand-write note sequences, and adjust\ - \ synthesis parameters. The performance system explores shifting power dynamics\ - \ between acoustic, algorithmic and autonomous performers through modifying the\ - \ amount of control and agency they have over the sound over the duration of the\ - \ performance. A higher-level algorithm how strongly the machine listening algorithms,\ - \ which analyse the saxophone input, influence the rhythmic and melodic patterns\ - \ generated by the system. The autonomous improvisor is trained on power ballad\ - \ melodies prior to the performance and in lieu of influence from the saxophonist\ - \ and live coder strays towards melodic phrases from this musical style. The piece\ - \ is written in javascript and WebAudio API and uses MMLL a browser-based machine\ - \ listening library.},\n address = {Shanghai, China},\n articleno = {36},\n author\ - \ = {Knotts, Shelly},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.548cca2b},\n\ - \ issn = {2220-4806},\n month = {June},\n title = {Algorithmic Power Ballads},\n\ - \ url = {https://nime.pubpub.org/pub/w2ubqkv4},\n year = {2021}\n}\n" + ID: Goudard2019 + abstract: 'This article questions the notion of ephemerality of digital musical + instruments (DMI). Longevity is generally regarded as a valuable quality that + good design criteria should help to achieve. However, the nature of the tools, + of the performance conditions and of the music itself may lead to think of ephemerality + as an intrinsic modality of the existence of DMIs. In particular, the conditions + of contemporary musical production suggest that contextual adaptations of instrumental + devices beyond the monolithic unity of classical instruments should be considered. + The first two parts of this article analyse various reasons to reassess the issue + of longevity and ephemerality. The last two sections attempt to propose an articulation + of these two aspects to inform both the design of the DMI and their learning.' + address: 'Porto Alegre, Brazil' + author: Vincent Goudard + bibtex: "@inproceedings{Goudard2019,\n abstract = {This article questions the notion\ + \ of ephemerality of digital musical instruments (DMI). Longevity is generally\ + \ regarded as a valuable quality that good design criteria should help to achieve.\ + \ However, the nature of the tools, of the performance conditions and of the music\ + \ itself may lead to think of ephemerality as an intrinsic modality of the existence\ + \ of DMIs. In particular, the conditions of contemporary musical production suggest\ + \ that contextual adaptations of instrumental devices beyond the monolithic unity\ + \ of classical instruments should be considered. The first two parts of this article\ + \ analyse various reasons to reassess the issue of longevity and ephemerality.\ + \ The last two sections attempt to propose an articulation of these two aspects\ + \ to inform both the design of the DMI and their learning.},\n address = {Porto\ + \ Alegre, Brazil},\n author = {Vincent Goudard},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.3672990},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {349--354},\n publisher = {UFRGS},\n\ + \ title = {Ephemeral instruments},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper067.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.548cca2b + doi: 10.5281/zenodo.3672990 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - title: Algorithmic Power Ballads - url: https://nime.pubpub.org/pub/w2ubqkv4 - year: 2021 + pages: 349--354 + publisher: UFRGS + title: Ephemeral instruments + url: http://www.nime.org/proceedings/2019/nime2019_paper067.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_37 - abstract: 'In this paper, Entangled, a multi-modal instrument in virtual 3D space - with sound, graphics, and the smartphone-based gestural interface for multi-user - is introduced. Within the same network, the players can use their smartphone as - the controller by entering a specific URL into their smartphone’s browser. After - joining the network, by actuating the smartphone''s accelerometer, the players - apply gravitational force to a swarm of particles in the virtual space. Machine - learning-based gesture pattern recognition is parallelly used to increase the - functionality of the gestural command. Through this interface, the player can - achieve intuitive control of gravitation in virtual reality (VR) space. The gravitation - becomes the medium of the system involving physics, graphics, and sonification - which composes a multimodal compositional language with cross-modal correspondence. Entangled is - built on AlloLib, which is a cross-platform suite of C++ components for building - interactive multimedia tools and applications. Throughout the script, the reason - for each decision is elaborated arguing the importance of crossmodal correspondence - in the design procedure.' - address: 'Shanghai, China' - articleno: 37 - author: 'Lee, Myungin' - bibtex: "@inproceedings{NIME21_37,\n abstract = {In this paper, Entangled, a multi-modal\ - \ instrument in virtual 3D space with sound, graphics, and the smartphone-based\ - \ gestural interface for multi-user is introduced. Within the same network, the\ - \ players can use their smartphone as the controller by entering a specific URL\ - \ into their smartphone’s browser. After joining the network, by actuating the\ - \ smartphone's accelerometer, the players apply gravitational force to a swarm\ - \ of particles in the virtual space. Machine learning-based gesture pattern recognition\ - \ is parallelly used to increase the functionality of the gestural command. Through\ - \ this interface, the player can achieve intuitive control of gravitation in virtual\ - \ reality (VR) space. The gravitation becomes the medium of the system involving\ - \ physics, graphics, and sonification which composes a multimodal compositional\ - \ language with cross-modal correspondence. Entangled is built on AlloLib, which\ - \ is a cross-platform suite of C++ components for building interactive multimedia\ - \ tools and applications. Throughout the script, the reason for each decision\ - \ is elaborated arguing the importance of crossmodal correspondence in the design\ - \ procedure.},\n address = {Shanghai, China},\n articleno = {37},\n author = {Lee,\ - \ Myungin},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.eae7c23f},\n\ - \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/NjpXFYDvuZw},\n\ - \ title = {Entangled: A Multi-Modal, Multi-User Interactive Instrument in Virtual\ - \ 3D Space Using the Smartphone for Gesture Control},\n url = {https://nime.pubpub.org/pub/4gt8wiy0},\n\ - \ year = {2021}\n}\n" + ID: Jaramillo2019 + abstract: 'This paper reports the conception, design, implementation and evaluation + processes of PICO, a portable audio effect system created with Pure Data and the + Raspberry Pi, which augments traditional plucked string instruments such as the + Brazilian Cavaquinho, the Venezuelan Cuatro, the Colombian Tiple and the Peruvian/Bolivian + Charango. A fabric soft case fixed to the instrument`s body holds the PICO modules: + the touchscreen, the single board computer, the sound card, the speaker system + and the DC power bank. The device audio specifications arose from musicological + insights about the social role of performers in their musical contexts and the + instruments'' playing techniques. They were taken as design challenges in the + creation process of PICO`s first prototype, which was submitted to a short evaluation. + Along with the construction of PICO, we reflected over the design of an interactive + audio interface as a mode of research. Therefore, the paper will also discuss + methodological aspects of audio hardware design.' + address: 'Porto Alegre, Brazil' + author: Julian Jaramillo and Fernando Iazzetta + bibtex: "@inproceedings{Jaramillo2019,\n abstract = {This paper reports the conception,\ + \ design, implementation and evaluation processes of PICO, a portable audio effect\ + \ system created with Pure Data and the Raspberry Pi, which augments traditional\ + \ plucked string instruments such as the Brazilian Cavaquinho, the Venezuelan\ + \ Cuatro, the Colombian Tiple and the Peruvian/Bolivian Charango. A fabric soft\ + \ case fixed to the instrument`s body holds the PICO modules: the touchscreen,\ + \ the single board computer, the sound card, the speaker system and the DC power\ + \ bank. The device audio specifications arose from musicological insights about\ + \ the social role of performers in their musical contexts and the instruments'\ + \ playing techniques. They were taken as design challenges in the creation process\ + \ of PICO`s first prototype, which was submitted to a short evaluation. Along\ + \ with the construction of PICO, we reflected over the design of an interactive\ + \ audio interface as a mode of research. Therefore, the paper will also discuss\ + \ methodological aspects of audio hardware design.},\n address = {Porto Alegre,\ + \ Brazil},\n author = {Julian Jaramillo and Fernando Iazzetta},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.3672992},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {355--360},\n publisher = {UFRGS},\n\ + \ title = {{PICO}: A portable audio effect box for traditional plucked-string\ + \ instruments},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper068.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.eae7c23f + doi: 10.5281/zenodo.3672992 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/NjpXFYDvuZw - title: 'Entangled: A Multi-Modal, Multi-User Interactive Instrument in Virtual 3D - Space Using the Smartphone for Gesture Control' - url: https://nime.pubpub.org/pub/4gt8wiy0 - year: 2021 + pages: 355--360 + publisher: UFRGS + title: 'PICO: A portable audio effect box for traditional plucked-string instruments' + url: http://www.nime.org/proceedings/2019/nime2019_paper068.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_38 - abstract: 'We present Spire Muse, a co-creative musical agent that engages in different - kinds of interactive behaviors. The software utilizes corpora of solo instrumental - performances encoded as self-organized maps and outputs slices of the corpora - as concatenated, remodeled audio sequences. Transitions between behaviors can - be automated, and the interface enables the negotiation of these transitions through - feedback buttons that signal approval, force reversions to previous behaviors, - or request change. Musical responses are embedded in a pre-trained latent space, - emergent in the interaction, and influenced through the weighting of rhythmic, - spectral, harmonic, and melodic features. The training and run-time modules utilize - a modified version of the MASOM agent architecture. Our model stimulates spontaneous - creativity and reduces the need for the user to sustain analytical mind frames, - thereby optimizing flow. The agent traverses a system autonomy axis ranging from - reactive to proactive, which includes the behaviors of shadowing, mirroring, and - coupling. A fourth behavior—negotiation—is emergent from the interface between - agent and user. The synergy of corpora, interactive modes, and influences induces - musical responses along a musical similarity axis from converging to diverging. - We share preliminary observations from experiments with the agent and discuss - design challenges and future prospects.' - address: 'Shanghai, China' - articleno: 38 - author: 'Thelle, Notto J. W. and Pasquier, Philippe' - bibtex: "@inproceedings{NIME21_38,\n abstract = {We present Spire Muse, a co-creative\ - \ musical agent that engages in different kinds of interactive behaviors. The\ - \ software utilizes corpora of solo instrumental performances encoded as self-organized\ - \ maps and outputs slices of the corpora as concatenated, remodeled audio sequences.\ - \ Transitions between behaviors can be automated, and the interface enables the\ - \ negotiation of these transitions through feedback buttons that signal approval,\ - \ force reversions to previous behaviors, or request change. Musical responses\ - \ are embedded in a pre-trained latent space, emergent in the interaction, and\ - \ influenced through the weighting of rhythmic, spectral, harmonic, and melodic\ - \ features. The training and run-time modules utilize a modified version of the\ - \ MASOM agent architecture. Our model stimulates spontaneous creativity and reduces\ - \ the need for the user to sustain analytical mind frames, thereby optimizing\ - \ flow. The agent traverses a system autonomy axis ranging from reactive to proactive,\ - \ which includes the behaviors of shadowing, mirroring, and coupling. A fourth\ - \ behavior—negotiation—is emergent from the interface between agent and user.\ - \ The synergy of corpora, interactive modes, and influences induces musical responses\ - \ along a musical similarity axis from converging to diverging. We share preliminary\ - \ observations from experiments with the agent and discuss design challenges and\ - \ future prospects.},\n address = {Shanghai, China},\n articleno = {38},\n author\ - \ = {Thelle, Notto J. W. and Pasquier, Philippe},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.21428/92fbeb44.84c0b364},\n issn = {2220-4806},\n month = {June},\n presentation-video\ - \ = {https://youtu.be/4QMQNyoGfOs},\n title = {Spire Muse: A Virtual Musical Partner\ - \ for Creative Brainstorming},\n url = {https://nime.pubpub.org/pub/wcj8sjee},\n\ - \ year = {2021}\n}\n" + ID: Bertissolo2019 + abstract: ' This paper focuses on ongoing research in music composition based on the + study of cognitive research in musical meaning. As a method and result at the + same time, we propose the creation of experiments related to key issues in composition + and music cognition, such as music and movement, memory, expectation and metaphor + in creative process. The theoretical reference approached is linked to the embodied + cognition, with unfolding related to the cognitive semantics and the enactivist + current of cognitive sciences, among other domains of contemporary sciences of + mind and neuroscience. The experiments involve the relationship between music + and movement, based on prior research using as a reference context in which it + is not possible to establish a clear distinction between them: the Capoeira. Finally, + we proposes a discussion about the application of the theoretical approach in + two compositions: Boreal IV, for Steel Drums and real time electronics, and Converse, + collaborative multimedia piece for piano, real-time audio (Puredata) and video + processing (GEM and live video) and a dancer.' + address: 'Porto Alegre, Brazil' + author: Guilherme Bertissolo + bibtex: "@inproceedings{Bertissolo2019,\n abstract = { This paper focuses on ongoing\ + \ research in music composition based on the study of cognitive research in\ + \ musical meaning. As a method and result at the same time, we propose the creation\ + \ of experiments related to key issues in composition and music cognition, such\ + \ as music and movement, memory, expectation and metaphor in creative process.\ + \ The theoretical reference approached is linked to the embodied cognition, with\ + \ unfolding related to the cognitive semantics and the enactivist current of cognitive\ + \ sciences, among other domains of contemporary sciences of mind and neuroscience.\ + \ The experiments involve the relationship between music and movement, based on\ + \ prior research using as a reference context in which it is not possible to establish\ + \ a clear distinction between them: the Capoeira. Finally, we proposes a discussion\ + \ about the application of the theoretical approach in two compositions: Boreal\ + \ IV, for Steel Drums and real time electronics, and Converse, collaborative multimedia\ + \ piece for piano, real-time audio (Puredata) and video processing (GEM and live\ + \ video) and a dancer.},\n address = {Porto Alegre, Brazil},\n author = {Guilherme\ + \ Bertissolo},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3672994},\n editor\ + \ = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n\ + \ pages = {361--364},\n publisher = {UFRGS},\n title = {Composing Understandings:\ + \ music, motion, gesture and embodied cognition},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper069.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.84c0b364 + doi: 10.5281/zenodo.3672994 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/4QMQNyoGfOs - title: 'Spire Muse: A Virtual Musical Partner for Creative Brainstorming' - url: https://nime.pubpub.org/pub/wcj8sjee - year: 2021 + pages: 361--364 + publisher: UFRGS + title: 'Composing Understandings: music, motion, gesture and embodied cognition' + url: http://www.nime.org/proceedings/2019/nime2019_paper069.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_39 - abstract: 'This paper introduces a new Electrumpet control system that affords for - quick and easy access to all its electro-acoustic features. The new implementation - uses virtuosic gestures learned on the acoustic trumpet for quick electronic control, - showing its effectiveness by controlling an innovative interactive harmoniser. - Seamless transition from the smooth but rigid, often uncommunicative sound of - the harmoniser to a more noisy, open and chaotic sound world required the addition - of extra features and scenarios. This prepares the instrument for multiple musical - environments, including free improvised settings with large sonic diversity. The - system should particularly interest virtuoso improvising electroacoustic musicians - and hyperinstrument player/developers that combine many musical styles in their - art and that look for inspiration to use existing virtuosity for electronic control.' - address: 'Shanghai, China' - articleno: 39 - author: 'Leeuw, Hans' - bibtex: "@inproceedings{NIME21_39,\n abstract = {This paper introduces a new Electrumpet\ - \ control system that affords for quick and easy access to all its electro-acoustic\ - \ features. The new implementation uses virtuosic gestures learned on the acoustic\ - \ trumpet for quick electronic control, showing its effectiveness by controlling\ - \ an innovative interactive harmoniser. Seamless transition from the smooth but\ - \ rigid, often uncommunicative sound of the harmoniser to a more noisy, open and\ - \ chaotic sound world required the addition of extra features and scenarios. This\ - \ prepares the instrument for multiple musical environments, including free improvised\ - \ settings with large sonic diversity. The system should particularly interest\ - \ virtuoso improvising electroacoustic musicians and hyperinstrument player/developers\ - \ that combine many musical styles in their art and that look for inspiration\ - \ to use existing virtuosity for electronic control.},\n address = {Shanghai,\ - \ China},\n articleno = {39},\n author = {Leeuw, Hans},\n booktitle = {Proceedings\ + ID: RamosFlores2019 + abstract: 'New interfaces allow performers to access new possibilities of musical + expression. Even though interfaces are often designed to be adaptable to different + software, most of them rely on external speakers or similar transducers. This + often results on disembodiment and acoustic disengagement from the interface, + and in the case of augmented instruments, from the instruments themselves. This + paper describes a project in which a hybrid system allows an acoustic integration + between the sound of acoustic saxophone and electronics.' + address: 'Porto Alegre, Brazil' + author: Cristohper Ramos Flores and Jim Murphy and Michael Norris + bibtex: "@inproceedings{RamosFlores2019,\n abstract = {New interfaces allow performers\ + \ to access new possibilities of musical expression. Even though interfaces are\ + \ often designed to be adaptable to different software, most of them rely on external\ + \ speakers or similar transducers. This often results on disembodiment and acoustic\ + \ disengagement from the interface, and in the case of augmented instruments,\ + \ from the instruments themselves. This paper describes a project in which a hybrid\ + \ system allows an acoustic integration between the sound of acoustic saxophone\ + \ and electronics.},\n address = {Porto Alegre, Brazil},\n author = {Cristohper\ + \ Ramos Flores and Jim Murphy and Michael Norris},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.21428/92fbeb44.a8e0cceb},\n issn = {2220-4806},\n month = {June},\n\ - \ presentation-video = {https://youtu.be/oHM_WfHOGUo},\n title = {Virtuoso mapping\ - \ for the Electrumpet, a hyperinstrument strategy},\n url = {https://nime.pubpub.org/pub/fxe52ym6},\n\ - \ year = {2021}\n}\n" + \ doi = {10.5281/zenodo.3672996},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {365--370},\n publisher = {UFRGS},\n\ + \ title = {HypeSax: Saxophone acoustic augmentation},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper070.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.a8e0cceb + doi: 10.5281/zenodo.3672996 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/oHM_WfHOGUo - title: 'Virtuoso mapping for the Electrumpet, a hyperinstrument strategy' - url: https://nime.pubpub.org/pub/fxe52ym6 - year: 2021 + pages: 365--370 + publisher: UFRGS + title: 'HypeSax: Saxophone acoustic augmentation' + url: http://www.nime.org/proceedings/2019/nime2019_paper070.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_4 - abstract: 'In this paper, we discuss the importance of replicability in Digital - Musical Instrument (DMI) design and the NIME community. Replication enables us - to: create new artifacts based on existing ones, experiment DMIs in different - contexts and cultures, and validate obtained results from evaluations. We investigate - how the papers present artifact documentation and source code by analyzing the - NIME proceedings from 2018, 2019, and 2020. We argue that the presence and the - quality of documentation are good indicators of replicability and can be beneficial - for the NIME community. Finally, we discuss the importance of documentation for - replication, propose a call to action towards more replicable projects, and present - a practical guide informing future steps toward replicability in the NIME community.' - address: 'Shanghai, China' - articleno: 4 - author: 'Calegario, Filipe and Tragtenberg, João and Frisson, Christian and Meneses, - Eduardo and Malloch, Joseph and Cusson, Vincent and Wanderley, Marcelo M.' - bibtex: "@inproceedings{NIME21_4,\n abstract = {In this paper, we discuss the importance\ - \ of replicability in Digital Musical Instrument (DMI) design and the NIME community.\ - \ Replication enables us to: create new artifacts based on existing ones, experiment\ - \ DMIs in different contexts and cultures, and validate obtained results from\ - \ evaluations. We investigate how the papers present artifact documentation and\ - \ source code by analyzing the NIME proceedings from 2018, 2019, and 2020. We\ - \ argue that the presence and the quality of documentation are good indicators\ - \ of replicability and can be beneficial for the NIME community. Finally, we discuss\ - \ the importance of documentation for replication, propose a call to action towards\ - \ more replicable projects, and present a practical guide informing future steps\ - \ toward replicability in the NIME community.},\n address = {Shanghai, China},\n\ - \ articleno = {4},\n author = {Calegario, Filipe and Tragtenberg, João and Frisson,\ - \ Christian and Meneses, Eduardo and Malloch, Joseph and Cusson, Vincent and Wanderley,\ - \ Marcelo M.},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.dc50e34d},\n\ - \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/ySh5SueLMAA},\n\ - \ title = {Documentation and Replicability in the NIME Community},\n url = {https://nime.pubpub.org/pub/czq0nt9i},\n\ - \ year = {2021}\n}\n" + ID: Chwalek2019 + abstract: 'We describe the design of an untethered digital synthesizer that can + be held and manipulated while broadcasting audio data to a receiving off-the-shelf + Bluetooth receiver. The synthesizer allows the user to freely rotate and reorient + the instrument while exploiting non-contact light sensing for a truly expressive + performance. The system consists of a suite of sensors that convert rotation, + orientation, touch, and user proximity into various audio filters and effects + operated on preset wave tables, while offering a persistence of vision display + for input visualization. This paper discusses the design of the system, including + the circuit, mechanics, and software layout, as well as how this device may be + incorporated into a performance. ' + address: 'Porto Alegre, Brazil' + author: Patrick Chwalek and Joe Paradiso + bibtex: "@inproceedings{Chwalek2019,\n abstract = {We describe the design of an\ + \ untethered digital synthesizer that can be held and manipulated while broadcasting\ + \ audio data to a receiving off-the-shelf Bluetooth receiver. The synthesizer\ + \ allows the user to freely rotate and reorient the instrument while exploiting\ + \ non-contact light sensing for a truly expressive performance. The system consists\ + \ of a suite of sensors that convert rotation, orientation, touch, and user proximity\ + \ into various audio filters and effects operated on preset wave tables, while\ + \ offering a persistence of vision display for input visualization. This paper\ + \ discusses the design of the system, including the circuit, mechanics, and software\ + \ layout, as well as how this device may be incorporated into a performance. },\n\ + \ address = {Porto Alegre, Brazil},\n author = {Patrick Chwalek and Joe Paradiso},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.3672998},\n editor = {Marcelo Queiroz\ + \ and Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {371--374},\n\ + \ publisher = {UFRGS},\n title = {CD-Synth: a Rotating, Untethered, Digital Synthesizer},\n\ + \ url = {http://www.nime.org/proceedings/2019/nime2019_paper071.pdf},\n year =\ + \ {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.dc50e34d + doi: 10.5281/zenodo.3672998 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/ySh5SueLMAA - title: Documentation and Replicability in the NIME Community - url: https://nime.pubpub.org/pub/czq0nt9i - year: 2021 + pages: 371--374 + publisher: UFRGS + title: 'CD-Synth: a Rotating, Untethered, Digital Synthesizer' + url: http://www.nime.org/proceedings/2019/nime2019_paper071.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_40 - abstract: 'The use of crowdsourced sounds in live coding can be seen as an example - of asynchronous collaboration. It is not uncommon for crowdsourced databases to - return unexpected results to the queries submitted by a user. In such a situation, - a live coder is likely to require some degree of additional filtering to adapt - the results to her/his musical intentions. We refer to this context-dependent - decisions as situated musical actions. Here, we present directions for designing - a customisable virtual companion to help live coders in their practice. In particular, - we introduce a machine learning (ML) model that, based on a set of examples provided - by the live coder, filters the crowdsourced sounds retrieved from the Freesound - online database at performance time. We evaluated a first illustrative model using - objective and subjective measures. We tested a more generic live coding framework - in two performances and two workshops, where several ML models have been trained - and used. We discuss the promising results for ML in education, live coding practices - and the design of future NIMEs.' - address: 'Shanghai, China' - articleno: 40 - author: 'Xambó, Anna and Roma, Gerard and Roig, Sam and Solaz, Eduard' - bibtex: "@inproceedings{NIME21_40,\n abstract = {The use of crowdsourced sounds\ - \ in live coding can be seen as an example of asynchronous collaboration. It is\ - \ not uncommon for crowdsourced databases to return unexpected results to the\ - \ queries submitted by a user. In such a situation, a live coder is likely to\ - \ require some degree of additional filtering to adapt the results to her/his\ - \ musical intentions. We refer to this context-dependent decisions as situated\ - \ musical actions. Here, we present directions for designing a customisable virtual\ - \ companion to help live coders in their practice. In particular, we introduce\ - \ a machine learning (ML) model that, based on a set of examples provided by the\ - \ live coder, filters the crowdsourced sounds retrieved from the Freesound online\ - \ database at performance time. We evaluated a first illustrative model using\ - \ objective and subjective measures. We tested a more generic live coding framework\ - \ in two performances and two workshops, where several ML models have been trained\ - \ and used. We discuss the promising results for ML in education, live coding\ - \ practices and the design of future NIMEs.},\n address = {Shanghai, China},\n\ - \ articleno = {40},\n author = {Xambó, Anna and Roma, Gerard and Roig, Sam and\ - \ Solaz, Eduard},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.64c9f217},\n\ - \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/F4UoH1hRMoU},\n\ - \ title = {Live Coding with the Cloud and a Virtual Agent},\n url = {https://nime.pubpub.org/pub/zpdgg2fg},\n\ - \ year = {2021}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.21428/92fbeb44.64c9f217 - issn: 2220-4806 - month: June - presentation-video: https://youtu.be/F4UoH1hRMoU - title: Live Coding with the Cloud and a Virtual Agent - url: https://nime.pubpub.org/pub/zpdgg2fg - year: 2021 + ID: Granieri2019 + abstract: 'This paper presents Reach, a keyboard-based gesture recog- nition system + for live piano sound modulation. Reach is a system built using the Leap Motion + Orion SDK, Pure Data and a custom C++ OSC mapper1. It provides control over the + sound modulation of an acoustic piano using the pi- anist''s ancillary gestures. + The system was developed using an iterative design pro- cess, incorporating research + findings from two user studies and several case studies. The results that emerged + show the potential of recognising and utilising the pianist''s existing technique + when designing keyboard-based DMIs, reducing the requirement to learn additional + techniques.' + address: 'Porto Alegre, Brazil' + author: Niccolò Granieri and James Dooley + bibtex: "@inproceedings{Granieri2019,\n abstract = {This paper presents Reach, a\ + \ keyboard-based gesture recog- nition system for live piano sound modulation.\ + \ Reach is a system built using the Leap Motion Orion SDK, Pure Data and a custom\ + \ C++ OSC mapper1. It provides control over the sound modulation of an acoustic\ + \ piano using the pi- anist's ancillary gestures. The system was developed using\ + \ an iterative design pro- cess, incorporating research findings from two user\ + \ studies and several case studies. The results that emerged show the potential\ + \ of recognising and utilising the pianist's existing technique when designing\ + \ keyboard-based DMIs, reducing the requirement to learn additional techniques.},\n\ + \ address = {Porto Alegre, Brazil},\n author = {Niccolò Granieri and James Dooley},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.3673000},\n editor = {Marcelo Queiroz\ + \ and Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {375--376},\n\ + \ publisher = {UFRGS},\n title = {Reach: a keyboard-based gesture recognition\ + \ system for live piano sound modulation},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper072.pdf},\n\ + \ year = {2019}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.3673000 + editor: Marcelo Queiroz and Anna Xambó Sedó + issn: 2220-4806 + month: June + pages: 375--376 + publisher: UFRGS + title: 'Reach: a keyboard-based gesture recognition system for live piano sound + modulation' + url: http://www.nime.org/proceedings/2019/nime2019_paper072.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_41 - abstract: 'In this paper, we propose COSMIC, a COnverSational Interface for Human-AI - MusIc Co-Creation. It is a chatbot with a two-fold design philosophy: to understand - human creative intent and to help humans in their creation. The core Natural Language - Processing (NLP) module is responsible for three functions: 1) understanding human - needs in chat, 2) cross-modal interaction between natural language understanding - and music generation models, and 3) mixing and coordinating multiple algorithms - to complete the composition.1' - address: 'Shanghai, China' - articleno: 41 - author: 'Zhang, Yixiao and Xia, Gus and Levy, Mark and Dixon, Simon' - bibtex: "@inproceedings{NIME21_41,\n abstract = {In this paper, we propose COSMIC,\ - \ a COnverSational Interface for Human-AI MusIc Co-Creation. It is a chatbot with\ - \ a two-fold design philosophy: to understand human creative intent and to help\ - \ humans in their creation. The core Natural Language Processing (NLP) module\ - \ is responsible for three functions: 1) understanding human needs in chat, 2)\ - \ cross-modal interaction between natural language understanding and music generation\ - \ models, and 3) mixing and coordinating multiple algorithms to complete the composition.1},\n\ - \ address = {Shanghai, China},\n articleno = {41},\n author = {Zhang, Yixiao and\ - \ Xia, Gus and Levy, Mark and Dixon, Simon},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.21428/92fbeb44.110a7a32},\n issn = {2220-4806},\n month = {June},\n presentation-video\ - \ = {https://youtu.be/o5YO0ni7sng},\n title = {COSMIC: A Conversational Interface\ - \ for Human-AI Music Co-Creation},\n url = {https://nime.pubpub.org/pub/in6wsc9t},\n\ - \ year = {2021}\n}\n" + ID: schedel2019 + abstract: 'This paper describes the creation of a NIME created from an iron and + wooden ironing board. The ironing board acts as a resonator for the system which + includes sensors embedded in the iron such as pressure, and piezo microphones. + The iron has LEDs wired to the sides and at either end of the board are CCDs; + using machine learning we can identify what kind of fabric is being ironed, and + the position of the iron along the x and y-axes as well as its rotation and tilt. + This instrument is part of a larger project, Women''s Labor, that juxtaposes traditional + musical instruments such as spinets and virginals designated for “ladies” with + new interfaces for musical expression that repurpose older tools of women''s work. + Using embedded technologies, we reimagine domestic tools as musical interfaces, + creating expressive instruments from the appliances of women''s chores.' + address: 'Porto Alegre, Brazil' + author: margaret schedel and Jocelyn Ho and Matthew Blessing + bibtex: "@inproceedings{schedel2019,\n abstract = {This paper describes the creation\ + \ of a NIME created from an iron and wooden ironing board. The ironing board acts\ + \ as a resonator for the system which includes sensors embedded in the iron such\ + \ as pressure, and piezo microphones. The iron has LEDs wired to the sides and\ + \ at either end of the board are CCDs; using machine learning we can identify\ + \ what kind of fabric is being ironed, and the position of the iron along the\ + \ x and y-axes as well as its rotation and tilt. This instrument is part of a\ + \ larger project, Women's Labor, that juxtaposes traditional musical instruments\ + \ such as spinets and virginals designated for “ladies” with new interfaces for\ + \ musical expression that repurpose older tools of women's work. Using embedded\ + \ technologies, we reimagine domestic tools as musical interfaces, creating expressive\ + \ instruments from the appliances of women's chores.},\n address = {Porto Alegre,\ + \ Brazil},\n author = {margaret schedel and Jocelyn Ho and Matthew Blessing},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.3672729},\n editor = {Marcelo Queiroz\ + \ and Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {377--380},\n\ + \ publisher = {UFRGS},\n title = {Women's Labor: Creating {NIME}s from Domestic\ + \ Tools },\n url = {http://www.nime.org/proceedings/2019/nime2019_paper073.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.110a7a32 + doi: 10.5281/zenodo.3672729 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/o5YO0ni7sng - title: 'COSMIC: A Conversational Interface for Human-AI Music Co-Creation' - url: https://nime.pubpub.org/pub/in6wsc9t - year: 2021 + pages: 377--380 + publisher: UFRGS + title: 'Women''s Labor: Creating NIMEs from Domestic Tools ' + url: http://www.nime.org/proceedings/2019/nime2019_paper073.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_42 - abstract: 'This paper presents Living Sounds, an internet radio station and online - venue hosted by nature. The virtual space is animated by live sound from a restored - wetland wildlife sanctuary, spatially mixed from dozens of 24/7 streaming microphones - across the landscape. The station’s guests are invited artists and others whose - performances are responsive to and contingent upon the ever-changing environmental - sound. Subtle, sound-active drawings by different visual designers anchor the - one-page website. Using low latency, high fidelity WebRTC, our system allows guests - to mix themselves in, remix the raw nature streams, or run our multichannel sources - fully through their own processors. Created in early 2020 in response to the locked - down conditions of the COVID-19 pandemic, the site became a virtual oasis, with - usage data showing long duration visits. In collaboration with several festivals - that went online in 2020, programmed live content included music, storytelling, - and guided meditation. One festival commissioned a local microphone installation, - resulting in a second nature source for the station: 5-channels of sound from - a small Maine island. Catalyzed by recent events, when many have been separated - from environments of inspiration and restoration, we propose Living Sounds as - both a virtual nature space for cohabitation and a new kind of contingent online - venue.' - address: 'Shanghai, China' - articleno: 42 - author: 'Dublon, Gershon and Liu, Xin' - bibtex: "@inproceedings{NIME21_42,\n abstract = {This paper presents Living Sounds,\ - \ an internet radio station and online venue hosted by nature. The virtual space\ - \ is animated by live sound from a restored wetland wildlife sanctuary, spatially\ - \ mixed from dozens of 24/7 streaming microphones across the landscape. The station’s\ - \ guests are invited artists and others whose performances are responsive to and\ - \ contingent upon the ever-changing environmental sound. Subtle, sound-active\ - \ drawings by different visual designers anchor the one-page website. Using low\ - \ latency, high fidelity WebRTC, our system allows guests to mix themselves in,\ - \ remix the raw nature streams, or run our multichannel sources fully through\ - \ their own processors. Created in early 2020 in response to the locked down conditions\ - \ of the COVID-19 pandemic, the site became a virtual oasis, with usage data showing\ - \ long duration visits. In collaboration with several festivals that went online\ - \ in 2020, programmed live content included music, storytelling, and guided meditation.\ - \ One festival commissioned a local microphone installation, resulting in a second\ - \ nature source for the station: 5-channels of sound from a small Maine island.\ - \ Catalyzed by recent events, when many have been separated from environments\ - \ of inspiration and restoration, we propose Living Sounds as both a virtual nature\ - \ space for cohabitation and a new kind of contingent online venue.},\n address\ - \ = {Shanghai, China},\n articleno = {42},\n author = {Dublon, Gershon and Liu,\ - \ Xin},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.21428/92fbeb44.b90e0fcb},\n issn = {2220-4806},\n\ - \ month = {June},\n presentation-video = {https://youtu.be/tE4YMDf-bQE},\n title\ - \ = {Living Sounds: Live Nature Sound as Online Performance Space},\n url = {https://nime.pubpub.org/pub/46by9xxn},\n\ - \ year = {2021}\n}\n" + ID: RauberDuBois2019 + abstract: 'This paper presents HMusic, a domain specific language based on music + patterns that can be used to write music and live coding. The main abstractions + provided by the language are patterns and tracks. Code written in HMusic looks + like patterns and multi-tracks available in music sequencers and drum machines. + HMusic provides primitives to design and compose patterns generating new patterns. + The basic abstractions provided by the language have an inductive definition and + HMusic is embedded in the Haskell functional programming language, programmers + can design functions to manipulate music on the fly.' + address: 'Porto Alegre, Brazil' + author: Andre Rauber Du Bois and Rodrigo Geraldo Ribeiro + bibtex: "@inproceedings{RauberDuBois2019,\n abstract = {This paper presents HMusic,\ + \ a domain specific language based on music patterns that can be used to write\ + \ music and live coding. The main abstractions provided by the language are patterns\ + \ and tracks. Code written in HMusic looks like patterns and multi-tracks available\ + \ in music sequencers and drum machines. HMusic provides primitives to design\ + \ and compose patterns generating new patterns. The basic abstractions provided\ + \ by the language have an inductive definition and HMusic is embedded in the Haskell\ + \ functional programming language, programmers can design functions to manipulate\ + \ music on the fly.},\n address = {Porto Alegre, Brazil},\n author = {Andre Rauber\ + \ Du Bois and Rodrigo Geraldo Ribeiro},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3673003},\n\ + \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {381--386},\n publisher = {UFRGS},\n title = {HMusic: A\ + \ domain specific language for music programming and live coding},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper074.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.b90e0fcb + doi: 10.5281/zenodo.3673003 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/tE4YMDf-bQE - title: 'Living Sounds: Live Nature Sound as Online Performance Space' - url: https://nime.pubpub.org/pub/46by9xxn - year: 2021 + pages: 381--386 + publisher: UFRGS + title: 'HMusic: A domain specific language for music programming and live coding' + url: http://www.nime.org/proceedings/2019/nime2019_paper074.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_43 - abstract: 'Speculātor is presented as a fist-sized, battery-powered, environmentally - aware, soundscape augmentation artifact that listens to the sonic environment - and provides real-time illuminated visual feedback in reaction to what it hears. - The visual soundscape augmentations these units offer allow for creating sonic - art installations whose artistic subject is the unaltered in-situ sonic environment. - Speculātor is designed to be quickly installed in exposed outdoor environments - without power infrastructure to allow maximum flexibility when selecting exhibition - locations. Data from light, temperature, and humidity sensors guide behavior to - maximize soundscape augmentation effectiveness and protect artifacts from operating - under dangerous environmental conditions. To highlight the music-like qualities - of cicada vocalizations, installations conducted between October 2019 and March - 2020, where multiple Speculātor units are installed in outdoor natural locations - are presented as an initial case study.' - address: 'Shanghai, China' - articleno: 43 - author: 'Villicaña-Shaw, Nathan and Carnegie, Dale A. and Murphy, Jim and Zareei, - Mo' - bibtex: "@inproceedings{NIME21_43,\n abstract = {Speculātor is presented as a fist-sized,\ - \ battery-powered, environmentally aware, soundscape augmentation artifact that\ - \ listens to the sonic environment and provides real-time illuminated visual feedback\ - \ in reaction to what it hears. The visual soundscape augmentations these units\ - \ offer allow for creating sonic art installations whose artistic subject is the\ - \ unaltered in-situ sonic environment. Speculātor is designed to be quickly installed\ - \ in exposed outdoor environments without power infrastructure to allow maximum\ - \ flexibility when selecting exhibition locations. Data from light, temperature,\ - \ and humidity sensors guide behavior to maximize soundscape augmentation effectiveness\ - \ and protect artifacts from operating under dangerous environmental conditions.\ - \ To highlight the music-like qualities of cicada vocalizations, installations\ - \ conducted between October 2019 and March 2020, where multiple Speculātor units\ - \ are installed in outdoor natural locations are presented as an initial case\ - \ study.},\n address = {Shanghai, China},\n articleno = {43},\n author = {Villicaña-Shaw,\ - \ Nathan and Carnegie, Dale A. and Murphy, Jim and Zareei, Mo},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.21428/92fbeb44.e521c5a4},\n issn = {2220-4806},\n month = {June},\n\ - \ presentation-video = {https://youtu.be/kP3fDzAHXDw},\n title = {Speculātor:\ - \ visual soundscape augmentation of natural environments},\n url = {https://nime.pubpub.org/pub/pxr0grnk},\n\ - \ year = {2021}\n}\n" + ID: Fraiettab2019 + abstract: "This paper presents the use of Stellarium planetarium software coupled\ + \ with the VizieR database of astronomical catalogues as an interface mechanism\ + \ for creating astronomy based multimedia performances, and as a music composition\ + \ interface. The celestial display from Stellarium is used to query VizieR, which\ + \ then obtains scienti\nc astronomical data from the stars displayed--including\ + \ colour, celestial position, magnitude and distance--and sends it as input data\ + \ for music composition or performance. Stellarium and VizieR are controlled through\ + \ Stellar Command, a software library that couples the two systems and can be\ + \ used as both a standalone command line utility using Open Sound Control, and\ + \ as a software library." + address: 'Porto Alegre, Brazil' + author: Angelo Fraietta + bibtex: "@inproceedings{Fraiettab2019,\n abstract = {This paper presents the use\ + \ of Stellarium planetarium software coupled with the VizieR database of astronomical\ + \ catalogues as an interface mechanism for creating astronomy based multimedia\ + \ performances, and as a music composition interface. The celestial display from\ + \ Stellarium is used to query VizieR, which then obtains scienti\nc astronomical\ + \ data from the stars displayed--including colour, celestial position, magnitude\ + \ and distance--and sends it as input data for music composition or performance.\ + \ Stellarium and VizieR are controlled through Stellar Command, a software library\ + \ that couples the two systems and can be used as both a standalone command line\ + \ utility using Open Sound Control, and as a software library.},\n address = {Porto\ + \ Alegre, Brazil},\n author = {Angelo Fraietta},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.3673005},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {387--392},\n publisher = {UFRGS},\n\ + \ title = {Stellar Command: a planetarium software based cosmic performance interface},\n\ + \ url = {http://www.nime.org/proceedings/2019/nime2019_paper075.pdf},\n year =\ + \ {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.e521c5a4 + doi: 10.5281/zenodo.3673005 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/kP3fDzAHXDw - title: 'Speculātor: visual soundscape augmentation of natural environments' - url: https://nime.pubpub.org/pub/pxr0grnk - year: 2021 + pages: 387--392 + publisher: UFRGS + title: 'Stellar Command: a planetarium software based cosmic performance interface' + url: http://www.nime.org/proceedings/2019/nime2019_paper075.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_44 - abstract: 'This paper outlines a demonstration of an acoustic piano augmentation - that allows for infinite sustain of one or many notes. The result is a natural - sounding piano sustain that lasts for an unnatural period of time. Using a tactile - shaker, a contact microphone and an amplitude activated FFT-freeze Max patch, - this system is easily assembled and creates an infinitely sustaining piano.' - address: 'Shanghai, China' - articleno: 44 - author: 'Thompson, William and Berdahl, Edgar' - bibtex: "@inproceedings{NIME21_44,\n abstract = {This paper outlines a demonstration\ - \ of an acoustic piano augmentation that allows for infinite sustain of one or\ - \ many notes. The result is a natural sounding piano sustain that lasts for an\ - \ unnatural period of time. Using a tactile shaker, a contact microphone and an\ - \ amplitude activated FFT-freeze Max patch, this system is easily assembled and\ - \ creates an infinitely sustaining piano.},\n address = {Shanghai, China},\n articleno\ - \ = {44},\n author = {Thompson, William and Berdahl, Edgar},\n booktitle = {Proceedings\ + ID: Müller2019 + abstract: 'Telematic performances connect two or more locations so that participants + are able to interact in real time. Such practices blend a variety of dimensions, + insofar as the representation of remote performers on a local stage intrinsically + occurs on auditory, as well as visual and scenic, levels. Due to their multimodal + nature, the analysis or creation of such performances can quickly descend into + a house of mirrors wherein certain intensely interdependent dimensions come to + the fore, while others are multiplied, seem hidden or are made invisible. In order + to have a better understanding of such performances, Dimension Space Analysis, + with its capacity to review multifaceted components of a system, can be applied + to telematic performances, understood here as (a bundle of) NIMEs. In the second + part of the paper, some telematic works from the practices of the authors are + described with the toolset developed.' + address: 'Porto Alegre, Brazil' + author: Patrick Müller and Johannes Michael Schuett + bibtex: "@inproceedings{Müller2019,\n abstract = {Telematic performances connect\ + \ two or more locations so that participants are able to interact in real time.\ + \ Such practices blend a variety of dimensions, insofar as the representation\ + \ of remote performers on a local stage intrinsically occurs on auditory, as well\ + \ as visual and scenic, levels. Due to their multimodal nature, the analysis or\ + \ creation of such performances can quickly descend into a house of mirrors wherein\ + \ certain intensely interdependent dimensions come to the fore, while others are\ + \ multiplied, seem hidden or are made invisible. In order to have a better understanding\ + \ of such performances, Dimension Space Analysis, with its capacity to review\ + \ multifaceted components of a system, can be applied to telematic performances,\ + \ understood here as (a bundle of) NIMEs. In the second part of the paper, some\ + \ telematic works from the practices of the authors are described with the toolset\ + \ developed.},\n address = {Porto Alegre, Brazil},\n author = {Patrick Müller\ + \ and Johannes Michael Schuett},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3673007},\n\ + \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {393--400},\n publisher = {UFRGS},\n title = {Towards a\ + \ Telematic Dimension Space},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper076.pdf},\n\ + \ year = {2019}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.3673007 + editor: Marcelo Queiroz and Anna Xambó Sedó + issn: 2220-4806 + month: June + pages: 393--400 + publisher: UFRGS + title: Towards a Telematic Dimension Space + url: http://www.nime.org/proceedings/2019/nime2019_paper076.pdf + year: 2019 + + +- ENTRYTYPE: inproceedings + ID: Lucasb2019 + abstract: 'Unity is one of the most used engines in the game industry and several + extensions have been implemented to increase its features in order to create multimedia + products in a more effective and efficient way. From the point of view of audio + development, Unity has included an Audio Mixer from version 5 which facilitates + the organization of sounds, effects, and the mixing process in general; however, + this module can be manipulated only through its graphical interface. This work + describes the design and implementation of an extension tool to map parameters + from the Audio Mixer to MIDI external devices, like controllers with sliders and + knobs, such way the developer can easily mix a game with the feeling of a physical + interface. ' + address: 'Porto Alegre, Brazil' + author: Pedro Pablo Lucas + bibtex: "@inproceedings{Lucasb2019,\n abstract = {Unity is one of the most used\ + \ engines in the game industry and several extensions have been implemented to\ + \ increase its features in order to create multimedia products in a more effective\ + \ and efficient way. From the point of view of audio development, Unity has included\ + \ an Audio Mixer from version 5 which facilitates the organization of sounds,\ + \ effects, and the mixing process in general; however, this module can be manipulated\ + \ only through its graphical interface. This work describes the design and implementation\ + \ of an extension tool to map parameters from the Audio Mixer to MIDI external\ + \ devices, like controllers with sliders and knobs, such way the developer can\ + \ easily mix a game with the feeling of a physical interface. },\n address = {Porto\ + \ Alegre, Brazil},\n author = {Pedro Pablo Lucas},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.21428/92fbeb44.2c4879f5},\n issn = {2220-4806},\n month = {June},\n\ - \ presentation-video = {https://youtu.be/YRby0VdL8Nk},\n title = {An Infinitely\ - \ Sustaining Piano Achieved Through a Soundboard-Mounted Shaker },\n url = {https://nime.pubpub.org/pub/cde9r70r},\n\ - \ year = {2021}\n}\n" + \ doi = {10.5281/zenodo.3673009},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {401--404},\n publisher = {UFRGS},\n\ + \ title = {A {MIDI} Controller Mapper for the Built-in Audio Mixer in the Unity\ + \ Game Engine},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper077.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.2c4879f5 + doi: 10.5281/zenodo.3673009 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/YRby0VdL8Nk - title: 'An Infinitely Sustaining Piano Achieved Through a Soundboard-Mounted Shaker ' - url: https://nime.pubpub.org/pub/cde9r70r - year: 2021 + pages: 401--404 + publisher: UFRGS + title: A MIDI Controller Mapper for the Built-in Audio Mixer in the Unity Game Engine + url: http://www.nime.org/proceedings/2019/nime2019_paper077.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_45 - abstract: 'Block-based coding environments enable novices to write code that bypasses - the syntactic complexities of text. However, we see a lack of effective block-based - tools that balance programming with expressive music making. We introduce Toneblocks1, - a prototype web application intended to be intuitive and engaging for novice users - with interests in computer programming and music. Toneblocks is designed to lower - the barrier of entry while increasing the ceiling of expression for advanced users. - In Toneblocks, users produce musical loops ranging from static sequences to generative - systems, and can manipulate their properties live. Pilot usability tests conducted - with two participants provide evidence that the current prototype is easy to use - and can produce complex musical output. An evaluation offers potential future - improvements including user-defined variables and functions, and rhythmic variability.' - address: 'Shanghai, China' - articleno: 45 - author: 'Quigley, Michael and Payne, William' - bibtex: "@inproceedings{NIME21_45,\n abstract = {Block-based coding environments\ - \ enable novices to write code that bypasses the syntactic complexities of text.\ - \ However, we see a lack of effective block-based tools that balance programming\ - \ with expressive music making. We introduce Toneblocks1, a prototype web application\ - \ intended to be intuitive and engaging for novice users with interests in computer\ - \ programming and music. Toneblocks is designed to lower the barrier of entry\ - \ while increasing the ceiling of expression for advanced users. In Toneblocks,\ - \ users produce musical loops ranging from static sequences to generative systems,\ - \ and can manipulate their properties live. Pilot usability tests conducted with\ - \ two participants provide evidence that the current prototype is easy to use\ - \ and can produce complex musical output. An evaluation offers potential future\ - \ improvements including user-defined variables and functions, and rhythmic variability.},\n\ - \ address = {Shanghai, China},\n articleno = {45},\n author = {Quigley, Michael\ - \ and Payne, William},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.46c0f6ef},\n\ - \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/c64l1hK3QiY},\n\ - \ title = {Toneblocks: Block-based musical programming},\n url = {https://nime.pubpub.org/pub/qn6lqnzx},\n\ - \ year = {2021}\n}\n" + ID: Lucasc2019 + abstract: 'AuSynthAR is a digital instrument based on Augmented Reality (AR), which + allows sound synthesis modules to create simple sound networks. It only requires + a mobile device, a set of tokens, a sound output device and, optionally, a MIDI + controller, which makes it an affordable instrument. An application running on + the device generates the sounds and the graphical augmentations over the tokens.' + address: 'Porto Alegre, Brazil' + author: Pedro Pablo Lucas + bibtex: "@inproceedings{Lucasc2019,\n abstract = {AuSynthAR is a digital instrument\ + \ based on Augmented Reality (AR), which allows sound synthesis modules to create\ + \ simple sound networks. It only requires a mobile device, a set of tokens, a\ + \ sound output device and, optionally, a MIDI controller, which makes it an affordable\ + \ instrument. An application running on the device generates the sounds and the\ + \ graphical augmentations over the tokens.},\n address = {Porto Alegre, Brazil},\n\ + \ author = {Pedro Pablo Lucas},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3673011},\n\ + \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {405--406},\n publisher = {UFRGS},\n title = {AuSynthAR:\ + \ A simple low-cost modular synthesizer based on Augmented Reality},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper078.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.46c0f6ef + doi: 10.5281/zenodo.3673011 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/c64l1hK3QiY - title: 'Toneblocks: Block-based musical programming' - url: https://nime.pubpub.org/pub/qn6lqnzx - year: 2021 + pages: 405--406 + publisher: UFRGS + title: 'AuSynthAR: A simple low-cost modular synthesizer based on Augmented Reality' + url: http://www.nime.org/proceedings/2019/nime2019_paper078.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_46 - abstract: 'This paper introduces “Ripples”, an iOS application for the Atlanta Botanical - Garden that uses auditory augmented reality to provide an intuitive music guide - by seamlessly integrating information about the garden into the visiting experience. - For each point of interest nearby, “Ripples” generates music in real time, representing - a location through data collected from users’ smartphones. The music is then overlaid - onto the physical environment and binaural spatialization indicates real-world - coordinates of their represented places. By taking advantage of the human auditory - sense’s innate spatial sound source localization and source separation capabilities, - “Ripples” makes navigation intuitive and information easy to understand.' - address: 'Shanghai, China' - articleno: 46 - author: 'Wu, Yi and Freeman, Jason' - bibtex: "@inproceedings{NIME21_46,\n abstract = {This paper introduces “Ripples”,\ - \ an iOS application for the Atlanta Botanical Garden that uses auditory augmented\ - \ reality to provide an intuitive music guide by seamlessly integrating information\ - \ about the garden into the visiting experience. For each point of interest nearby,\ - \ “Ripples” generates music in real time, representing a location through data\ - \ collected from users’ smartphones. The music is then overlaid onto the physical\ - \ environment and binaural spatialization indicates real-world coordinates of\ - \ their represented places. By taking advantage of the human auditory sense’s\ - \ innate spatial sound source localization and source separation capabilities,\ - \ “Ripples” makes navigation intuitive and information easy to understand.},\n\ - \ address = {Shanghai, China},\n articleno = {46},\n author = {Wu, Yi and Freeman,\ - \ Jason},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.21428/92fbeb44.b8e82252},\n issn = {2220-4806},\n\ - \ month = {June},\n presentation-video = {https://youtu.be/T7EJVACX3QI},\n title\ - \ = {Ripples: An Auditory Augmented Reality iOS Application for the Atlanta Botanical\ - \ Garden},\n url = {https://nime.pubpub.org/pub/n1o19efr},\n year = {2021}\n}\n" + ID: Haddad2019 + abstract: 'This paper introduces a versatile module for Eurorack synthesizers that + allows multiple modular synthesizers to be patched together remotely through the + world wide web. The module is configured from a read-eval-print-loop environment + running in the web browser, that can be used to send signals to the modular synthesizer + from a live coding interface or from various data sources on the internet.' + address: 'Porto Alegre, Brazil' + author: Don Derek Haddad and Joe Paradiso + bibtex: "@inproceedings{Haddad2019,\n abstract = {This paper introduces a versatile\ + \ module for Eurorack synthesizers that allows multiple modular synthesizers to\ + \ be patched together remotely through the world wide web. The module is configured\ + \ from a read-eval-print-loop environment running in the web browser, that can\ + \ be used to send signals to the modular synthesizer from a live coding interface\ + \ or from various data sources on the internet.},\n address = {Porto Alegre, Brazil},\n\ + \ author = {Don Derek Haddad and Joe Paradiso},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.3673013},\n editor = {Marcelo Queiroz and Anna Xambó Sedó},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {407--410},\n publisher = {UFRGS},\n\ + \ title = {The World Wide Web in an Analog Patchbay},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper079.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.b8e82252 + doi: 10.5281/zenodo.3673013 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/T7EJVACX3QI - title: 'Ripples: An Auditory Augmented Reality iOS Application for the Atlanta Botanical - Garden' - url: https://nime.pubpub.org/pub/n1o19efr - year: 2021 + pages: 407--410 + publisher: UFRGS + title: The World Wide Web in an Analog Patchbay + url: http://www.nime.org/proceedings/2019/nime2019_paper079.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_47 - abstract: 'This article describes Mono-Replay, a software environment designed for - sound animation. "Sound animation" in this context means musical performance based - on various modes of replay and transformation of all kinds of recorded music samples. - Sound animation using Mono-Replay is a two-step process, including an off-line - analysis phase and on-line performance or synthesis phase. The analysis phase - proceeds with time segmentation, and the set up of anchor points corresponding - to temporal musical discourse parameters (notes, pulses, events). This allows, - at the performance phase, for control of timing, playback position, playback speed, - and a variety of spectral effects, with the help of gesture interfaces. Animation - principles and software features of Mono-Replay are described. Two examples of - sound animation based on beat tracking and transient detection algorithms are - presented (a multi-track record of Superstition by Steve Wonder and Jeff Beck - and Accidents/Harmoniques, an electroacoustic piece by Bernard Parmegiani). With - the help of these two contrasted examples, the fundamental principles of “sound - animation” are reviewed: parameters of musical discourse, audio file segmentation, - gestural control and interaction for animation at the performance stage.' - address: 'Shanghai, China' - articleno: 47 - author: 'LUCAS, Thomas and d''Alessandro, Christophe and Laubier, Serge de' - bibtex: "@inproceedings{NIME21_47,\n abstract = {This article describes Mono-Replay,\ - \ a software environment designed for sound animation. \"Sound animation\" in\ - \ this context means musical performance based on various modes of replay and\ - \ transformation of all kinds of recorded music samples. Sound animation using\ - \ Mono-Replay is a two-step process, including an off-line analysis phase and\ - \ on-line performance or synthesis phase. The analysis phase proceeds with time\ - \ segmentation, and the set up of anchor points corresponding to temporal musical\ - \ discourse parameters (notes, pulses, events). This allows, at the performance\ - \ phase, for control of timing, playback position, playback speed, and a variety\ - \ of spectral effects, with the help of gesture interfaces. Animation principles\ - \ and software features of Mono-Replay are described. Two examples of sound animation\ - \ based on beat tracking and transient detection algorithms are presented (a multi-track\ - \ record of Superstition by Steve Wonder and Jeff Beck and Accidents/Harmoniques,\ - \ an electroacoustic piece by Bernard Parmegiani). With the help of these two\ - \ contrasted examples, the fundamental principles of “sound animation” are reviewed:\ - \ parameters of musical discourse, audio file segmentation, gestural control and\ - \ interaction for animation at the performance stage.},\n address = {Shanghai,\ - \ China},\n articleno = {47},\n author = {LUCAS, Thomas and d'Alessandro, Christophe\ - \ and Laubier, Serge de},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.7b843efe},\n\ - \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/Ck79wRgqXfU},\n\ - \ title = {Mono-Replay : a software tool for digitized sound animation},\n url\ - \ = {https://nime.pubpub.org/pub/8lqitvvq},\n year = {2021}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.21428/92fbeb44.7b843efe - issn: 2220-4806 - month: June - presentation-video: https://youtu.be/Ck79wRgqXfU - title: 'Mono-Replay : a software tool for digitized sound animation' - url: https://nime.pubpub.org/pub/8lqitvvq - year: 2021 - - -- ENTRYTYPE: inproceedings - ID: NIME21_48 - abstract: 'This paper discusses Pandora''s Box, a novel idiosyncratic electroacoustic - instrument and performance utilizing feedback as sound generation principle. The - instrument''s signal path consists of a closed-loop through custom DSP algorithms - and a spring. Pandora''s Box is played by tactile interaction with the spring - and a control panel with faders and switches. The design and implementation are - described and rituals are explained referencing a video recording of a concert.' - address: 'Shanghai, China' - articleno: 48 - author: 'Slager, Ward J.' - bibtex: "@inproceedings{NIME21_48,\n abstract = {This paper discusses Pandora's\ - \ Box, a novel idiosyncratic electroacoustic instrument and performance utilizing\ - \ feedback as sound generation principle. The instrument's signal path consists\ - \ of a closed-loop through custom DSP algorithms and a spring. Pandora's Box is\ - \ played by tactile interaction with the spring and a control panel with faders\ - \ and switches. The design and implementation are described and rituals are explained\ - \ referencing a video recording of a concert.},\n address = {Shanghai, China},\n\ - \ articleno = {48},\n author = {Slager, Ward J.},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.21428/92fbeb44.61b13baf},\n issn = {2220-4806},\n month = {June},\n presentation-video\ - \ = {https://youtu.be/s89Ycd0QkDI},\n title = {Designing and performing with Pandora’s\ - \ Box: transforming feedback physically and with algorithms},\n url = {https://nime.pubpub.org/pub/kx6d0553},\n\ - \ year = {2021}\n}\n" + ID: Yoshimura2019 + abstract: 'In this paper, we propose a “voice” instrument based on vocal tract models + with a soft material for a 3D printer and an electrolarynx. In our practice, we + explore the incongruity of the voice instrument through the accompanying music + production and performance. With the instrument, we aim to return to the fact + that the “Machine speaks out.” With the production of a song “Vocalise (Incomplete),” + and performances, we reveal how the instrument could work with the audiences and + explore the uncultivated field of voices.' + address: 'Porto Alegre, Brazil' + author: Fou Yoshimura and kazuhiro jo + bibtex: "@inproceedings{Yoshimura2019,\n abstract = {In this paper, we propose a\ + \ “voice” instrument based on vocal tract models with a soft material for a 3D\ + \ printer and an electrolarynx. In our practice, we explore the incongruity of\ + \ the voice instrument through the accompanying music production and performance.\ + \ With the instrument, we aim to return to the fact that the “Machine speaks out.”\ + \ With the production of a song “Vocalise (Incomplete),” and performances, we\ + \ reveal how the instrument could work with the audiences and explore the uncultivated\ + \ field of voices.},\n address = {Porto Alegre, Brazil},\n author = {Fou Yoshimura\ + \ and kazuhiro jo},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3673015},\n\ + \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {411--412},\n publisher = {UFRGS},\n title = {A \"voice\"\ + \ instrument based on vocal tract models by using soft material for a 3D printer\ + \ and an electrolarynx},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper080.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.61b13baf + doi: 10.5281/zenodo.3673015 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/s89Ycd0QkDI - title: 'Designing and performing with Pandora’s Box: transforming feedback physically - and with algorithms' - url: https://nime.pubpub.org/pub/kx6d0553 - year: 2021 + pages: 411--412 + publisher: UFRGS + title: A "voice" instrument based on vocal tract models by using soft material for + a 3D printer and an electrolarynx + url: http://www.nime.org/proceedings/2019/nime2019_paper080.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_49 - abstract: 'Quadrant is a new human-computer interface based on an array of distance - sensors. The hardware consists of 4 time-of-flight detectors and is designed to - detect the position, velocity, and orientation of the user''s hand in free space. - Signal processing is used to recognize gestures and other events, which we map - to a variety of musical parameters to demonstrate possible applications. We have - developed Quadrant as an open-hardware circuit board, which acts as a USB controller - to a host computer.' - address: 'Shanghai, China' - articleno: 49 - author: 'Chronopoulos, Chris' - bibtex: "@inproceedings{NIME21_49,\n abstract = {Quadrant is a new human-computer\ - \ interface based on an array of distance sensors. The hardware consists of 4\ - \ time-of-flight detectors and is designed to detect the position, velocity, and\ - \ orientation of the user's hand in free space. Signal processing is used to recognize\ - \ gestures and other events, which we map to a variety of musical parameters to\ - \ demonstrate possible applications. We have developed Quadrant as an open-hardware\ - \ circuit board, which acts as a USB controller to a host computer.},\n address\ - \ = {Shanghai, China},\n articleno = {49},\n author = {Chronopoulos, Chris},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.21428/92fbeb44.761367fd},\n issn = {2220-4806},\n\ - \ month = {June},\n presentation-video = {https://youtu.be/p8flHKv17Y8},\n title\ - \ = {Quadrant: A Multichannel, Time-of-Flight Based Hand Tracking Interface for\ - \ Computer Music},\n url = {https://nime.pubpub.org/pub/quadrant},\n year = {2021}\n\ - }\n" + ID: YepezPlacencia2019 + abstract: 'Mechatronic chordophones have become increasingly common in mechatronic + music. As expressive instruments, they offer multiple techniques to create and + manipulate sounds using their actuation mechanisms. Chordophone designs have taken + multiple forms, from frames that play a guitar-like instrument, to machines that + integrate strings and actuators as part of their frame. However, few of these + instruments have taken advantage of dynamics, which have been largely unexplored. + This paper details the design and construction of a new picking mechanism prototype + which enables expressive techniques through fast and precise movement and actuation. + We have adopted iterative design and rapid prototyping strategies to develop and + refine a compact picker capable of creating dynamic variations reliably. Finally, + a quantitative evaluation process demonstrates that this system offers the speed + and consistency of previously existing picking mechanisms, while providing increased + control over musical dynamics and articulations.' + address: 'Porto Alegre, Brazil' + author: Juan Pablo Yepez Placencia and Jim Murphy and Dale Carnegie + bibtex: "@inproceedings{YepezPlacencia2019,\n abstract = {Mechatronic chordophones\ + \ have become increasingly common in mechatronic music. As expressive instruments,\ + \ they offer multiple techniques to create and manipulate sounds using their actuation\ + \ mechanisms. Chordophone designs have taken multiple forms, from frames that\ + \ play a guitar-like instrument, to machines that integrate strings and actuators\ + \ as part of their frame. However, few of these instruments have taken advantage\ + \ of dynamics, which have been largely unexplored. This paper details the design\ + \ and construction of a new picking mechanism prototype which enables expressive\ + \ techniques through fast and precise movement and actuation. We have adopted\ + \ iterative design and rapid prototyping strategies to develop and refine a compact\ + \ picker capable of creating dynamic variations reliably. Finally, a quantitative\ + \ evaluation process demonstrates that this system offers the speed and consistency\ + \ of previously existing picking mechanisms, while providing increased control\ + \ over musical dynamics and articulations.},\n address = {Porto Alegre, Brazil},\n\ + \ author = {Juan Pablo Yepez Placencia and Jim Murphy and Dale Carnegie},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.3673017},\n editor = {Marcelo Queiroz and\ + \ Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {413--418},\n\ + \ publisher = {UFRGS},\n title = {Exploring Dynamic Variations for Expressive\ + \ Mechatronic Chordophones},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper081.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.761367fd + doi: 10.5281/zenodo.3673017 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/p8flHKv17Y8 - title: 'Quadrant: A Multichannel, Time-of-Flight Based Hand Tracking Interface for - Computer Music' - url: https://nime.pubpub.org/pub/quadrant - year: 2021 + pages: 413--418 + publisher: UFRGS + title: Exploring Dynamic Variations for Expressive Mechatronic Chordophones + url: http://www.nime.org/proceedings/2019/nime2019_paper081.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_5 - abstract: 'This short article presents a reductionist infra-instrument. It concerns - a yellow die-cast aluminium box only featuring a key switch and a 1/4” TRS balanced - audio output as its UI. On the turn of the key, the device performs a certain - poem in Morse code and via very low frequency acoustic pulses; in this way, it - transforms poetry into bursts of intense acoustic energy that may resonate a hosting - architecture and any human bodies therein. It is argued that the instrument functions - at the very same time as a critical/speculative electronic object, as an ad-hoc - performance instrument, and as a piece of (conceptual) art on its own sake.' - address: 'Shanghai, China' - articleno: 5 - author: 'Koutsomichalis, Marinos' - bibtex: "@inproceedings{NIME21_5,\n abstract = {This short article presents a reductionist\ - \ infra-instrument. It concerns a yellow die-cast aluminium box only featuring\ - \ a key switch and a 1/4” TRS balanced audio output as its UI. On the turn of\ - \ the key, the device performs a certain poem in Morse code and via very low frequency\ - \ acoustic pulses; in this way, it transforms poetry into bursts of intense acoustic\ - \ energy that may resonate a hosting architecture and any human bodies therein.\ - \ It is argued that the instrument functions at the very same time as a critical/speculative\ - \ electronic object, as an ad-hoc performance instrument, and as a piece of (conceptual)\ - \ art on its own sake.},\n address = {Shanghai, China},\n articleno = {5},\n author\ - \ = {Koutsomichalis, Marinos},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.765a94a7},\n\ - \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/_IUT0tbtkBI},\n\ - \ title = {A Yellow Box with a Key Switch and a 1/4\" TRS Balanced Audio Output},\n\ - \ url = {https://nime.pubpub.org/pub/n69uznd4},\n year = {2021}\n}\n" + ID: Chauhan2019 + abstract: 'In this paper, we introduce and explore a novel Virtual Reality musical + interaction system (named REVOLVE) that utilises a user-guided evolutionary algorithm + to personalise musical instruments to users'' individual preferences. REVOLVE + is designed towards being an `endlessly entertaining'' experience through the + potentially infinite number of sounds that can be produced. Our hypothesis is + that using evolutionary algorithms with VR for musical interactions will lead + to increased user telepresence. In addition to this, REVOLVE was designed to inform + novel research into this unexplored area. Think aloud trials and thematic analysis + revealed 5 main themes: control, comparison to the real world, immersion, general + usability and limitations, in addition to practical improvements. Overall, it + was found that this combination of technologies did improve telepresence levels, + proving the original hypothesis correct.' + address: 'Porto Alegre, Brazil' + author: Dhruv Chauhan and Peter Bennett + bibtex: "@inproceedings{Chauhan2019,\n abstract = {In this paper, we introduce and\ + \ explore a novel Virtual Reality musical interaction system (named REVOLVE) that\ + \ utilises a user-guided evolutionary algorithm to personalise musical instruments\ + \ to users' individual preferences. REVOLVE is designed towards being an `endlessly\ + \ entertaining' experience through the potentially infinite number of sounds that\ + \ can be produced. Our hypothesis is that using evolutionary algorithms with VR\ + \ for musical interactions will lead to increased user telepresence. In addition\ + \ to this, REVOLVE was designed to inform novel research into this unexplored\ + \ area. Think aloud trials and thematic analysis revealed 5 main themes: control,\ + \ comparison to the real world, immersion, general usability and limitations,\ + \ in addition to practical improvements. Overall, it was found that this combination\ + \ of technologies did improve telepresence levels, proving the original hypothesis\ + \ correct.},\n address = {Porto Alegre, Brazil},\n author = {Dhruv Chauhan and\ + \ Peter Bennett},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3673019},\n\ + \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {419--422},\n publisher = {UFRGS},\n title = {Searching\ + \ for the Perfect Instrument: Increased Telepresence through Interactive Evolutionary\ + \ Instrument Design},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper082.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.765a94a7 + doi: 10.5281/zenodo.3673019 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/_IUT0tbtkBI - title: A Yellow Box with a Key Switch and a 1/4" TRS Balanced Audio Output - url: https://nime.pubpub.org/pub/n69uznd4 - year: 2021 + pages: 419--422 + publisher: UFRGS + title: 'Searching for the Perfect Instrument: Increased Telepresence through Interactive + Evolutionary Instrument Design' + url: http://www.nime.org/proceedings/2019/nime2019_paper082.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_50 - abstract: 'In the present study a musician and a dancer explore the co-play between - them through sensory technology. The main questions concern the placement and - processing of motion sensors, and the choice of sound parameters that a dancer - can manipulate. Results indicate that sound parameters of delay and pitch altered - dancers’ experience most positively and that placement of sensors on each wrist - and ankle with a diagonal mapping of the sound parameters was the most suitable.' - address: 'Shanghai, China' - articleno: 50 - author: 'Andersson López, Lisa and Svenns, Thelma and Holzapfel, Andre' - bibtex: "@inproceedings{NIME21_50,\n abstract = {In the present study a musician\ - \ and a dancer explore the co-play between them through sensory technology. The\ - \ main questions concern the placement and processing of motion sensors, and the\ - \ choice of sound parameters that a dancer can manipulate. Results indicate that\ - \ sound parameters of delay and pitch altered dancers’ experience most positively\ - \ and that placement of sensors on each wrist and ankle with a diagonal mapping\ - \ of the sound parameters was the most suitable.},\n address = {Shanghai, China},\n\ - \ articleno = {50},\n author = {Andersson López, Lisa and Svenns, Thelma and Holzapfel,\ - \ Andre},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.21428/92fbeb44.18c3fc2b},\n issn = {2220-4806},\n\ - \ month = {June},\n presentation-video = {https://youtu.be/Mo8mVJJrqx8},\n title\ - \ = {Sensitiv – Designing a Sonic Co-play Tool for Interactive Dance},\n url =\ - \ {https://nime.pubpub.org/pub/y1y5jolp},\n year = {2021}\n}\n" + ID: Savery2019 + abstract: 'Harriet Padberg wrote Computer-Composed Canon and Free Fugue as part + of her 1964 dissertation in Mathematics and Music at Saint Louis University. This + program is one of the earliest examples of text-to-music software and algorithmic + composition, which are areas of great interest in the present-day field of music + technology. This paper aims to analyze the technological innovation, aesthetic + design process, and impact of Harriet Padberg''s original 1964 thesis as well + as the design of a modern recreation and utilization, in order to gain insight + to the nature of revisiting older works. Here, we present our open source recreation + of Padberg''s program with a modern interface and, through its use as an artistic + tool by three composers, show how historical works can be effectively used for + new creative purposes in contemporary contexts. Not Even One by Molly Jones draws + on the historical and social significance of Harriet Padberg through using her + program in a piece about the lack of representation of women judges in composition + competitions. Brevity by Anna Savery utilizes the original software design as + a composition tool, and The Padberg Piano by Anthony Caulkins uses the melodic + generation of the original to create a software instrument.' + address: 'Porto Alegre, Brazil' + author: Richard J Savery and Benjamin Genchel and Jason Brent Smith and Anthony + Caulkins and Molly E Jones and Anna Savery + bibtex: "@inproceedings{Savery2019,\n abstract = {Harriet Padberg wrote Computer-Composed\ + \ Canon and Free Fugue as part of her 1964 dissertation in Mathematics and Music\ + \ at Saint Louis University. This program is one of the earliest examples of text-to-music\ + \ software and algorithmic composition, which are areas of great interest in the\ + \ present-day field of music technology. This paper aims to analyze the technological\ + \ innovation, aesthetic design process, and impact of Harriet Padberg's original\ + \ 1964 thesis as well as the design of a modern recreation and utilization, in\ + \ order to gain insight to the nature of revisiting older works. Here, we present\ + \ our open source recreation of Padberg's program with a modern interface and,\ + \ through its use as an artistic tool by three composers, show how historical\ + \ works can be effectively used for new creative purposes in contemporary contexts.\ + \ Not Even One by Molly Jones draws on the historical and social significance\ + \ of Harriet Padberg through using her program in a piece about the lack of representation\ + \ of women judges in composition competitions. Brevity by Anna Savery utilizes\ + \ the original software design as a composition tool, and The Padberg Piano by\ + \ Anthony Caulkins uses the melodic generation of the original to create a software\ + \ instrument.},\n address = {Porto Alegre, Brazil},\n author = {Richard J Savery\ + \ and Benjamin Genchel and Jason Brent Smith and Anthony Caulkins and Molly E\ + \ Jones and Anna Savery},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3673021},\n\ + \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {423--428},\n publisher = {UFRGS},\n title = {Learning from\ + \ History: Recreating and Repurposing Harriet Padberg's Computer Composed Canon\ + \ and Free Fugue},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper083.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.18c3fc2b + doi: 10.5281/zenodo.3673021 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/Mo8mVJJrqx8 - title: Sensitiv – Designing a Sonic Co-play Tool for Interactive Dance - url: https://nime.pubpub.org/pub/y1y5jolp - year: 2021 + pages: 423--428 + publisher: UFRGS + title: 'Learning from History: Recreating and Repurposing Harriet Padberg''s Computer + Composed Canon and Free Fugue' + url: http://www.nime.org/proceedings/2019/nime2019_paper083.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_51 - abstract: 'Wireless sensor-based technologies are becoming increasingly accessible - and widely explored in interactive musical performance due to their ubiquity and - low-cost, which brings the necessity of understanding the capabilities and limitations - of these sensors. This is usually approached by using a reference system, such - as an optical motion capture system, to assess the signals’ properties. However, - this process raises the issue of synchronizing the signal and the reference data - streams, as each sensor is subject to different latency, time drift, reference - clocks and initialization timings. This paper presents an empirical quantification - of the latency communication stages in a setup consisting of a Qualisys optical - motion capture (mocap) system and a wireless microcontroller-based sensor device. - We performed event-to-end tests on the critical components of the hybrid setup - to determine the synchronization suitability. Overall, further synchronization - is viable because of the near individual average latencies of around 25ms for - both the mocap system and the wireless sensor interface.' - address: 'Shanghai, China' - articleno: 51 - author: 'Santos, Geise and Wang, Johnty and Brum, Carolina and Wanderley, Marcelo - M. and Tavares, Tiago and Rocha, Anderson' - bibtex: "@inproceedings{NIME21_51,\n abstract = {Wireless sensor-based technologies\ - \ are becoming increasingly accessible and widely explored in interactive musical\ - \ performance due to their ubiquity and low-cost, which brings the necessity of\ - \ understanding the capabilities and limitations of these sensors. This is usually\ - \ approached by using a reference system, such as an optical motion capture system,\ - \ to assess the signals’ properties. However, this process raises the issue of\ - \ synchronizing the signal and the reference data streams, as each sensor is subject\ - \ to different latency, time drift, reference clocks and initialization timings.\ - \ This paper presents an empirical quantification of the latency communication\ - \ stages in a setup consisting of a Qualisys optical motion capture (mocap) system\ - \ and a wireless microcontroller-based sensor device. We performed event-to-end\ - \ tests on the critical components of the hybrid setup to determine the synchronization\ - \ suitability. Overall, further synchronization is viable because of the near\ - \ individual average latencies of around 25ms for both the mocap system and the\ - \ wireless sensor interface.},\n address = {Shanghai, China},\n articleno = {51},\n\ - \ author = {Santos, Geise and Wang, Johnty and Brum, Carolina and Wanderley, Marcelo\ - \ M. and Tavares, Tiago and Rocha, Anderson},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.21428/92fbeb44.51b1c3a1},\n issn = {2220-4806},\n month = {June},\n presentation-video\ - \ = {https://youtu.be/a1TVvr9F7hE},\n title = {Comparative Latency Analysis of\ - \ Optical and Inertial Motion Capture Systems for Gestural Analysis and Musical\ - \ Performance},\n url = {https://nime.pubpub.org/pub/wmcqkvw1},\n year = {2021}\n\ - }\n" + ID: Berdahl2019 + abstract: 'The design of a Spatially Distributed Vibrotactile Actuator Array (SDVAA) + for the fingertips is presented. It provides high-fidelity vibrotactile stimulation + at the audio sampling rate. Prior works are discussed, and the system is demonstrated + using two music compositions by the authors.' + address: 'Porto Alegre, Brazil' + author: Edgar Berdahl and Austin Franklin and Eric Sheffield + bibtex: "@inproceedings{Berdahl2019,\n abstract = {The design of a Spatially Distributed\ + \ Vibrotactile Actuator Array (SDVAA) for the fingertips is presented. It provides\ + \ high-fidelity vibrotactile stimulation at the audio sampling rate. Prior works\ + \ are discussed, and the system is demonstrated using two music compositions by\ + \ the authors.},\n address = {Porto Alegre, Brazil},\n author = {Edgar Berdahl\ + \ and Austin Franklin and Eric Sheffield},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3673023},\n\ + \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {429--430},\n publisher = {UFRGS},\n title = {A Spatially\ + \ Distributed Vibrotactile Actuator Array for the Fingertips},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper084.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.51b1c3a1 + doi: 10.5281/zenodo.3673023 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/a1TVvr9F7hE - title: Comparative Latency Analysis of Optical and Inertial Motion Capture Systems - for Gestural Analysis and Musical Performance - url: https://nime.pubpub.org/pub/wmcqkvw1 - year: 2021 + pages: 429--430 + publisher: UFRGS + title: A Spatially Distributed Vibrotactile Actuator Array for the Fingertips + url: http://www.nime.org/proceedings/2019/nime2019_paper084.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_52 - abstract: 'The work presented here is based on the Hybrid Augmented Saxophone of - Gestural Symbioses (HASGS) system with a focus on and its evolution over the last - five years, and an emphasis on its functional structure and the repertoire. The - HASGS system was intended to retain focus on the performance of the acoustic instrument, - keeping gestures centralised within the habitual practice of the instrument, and - reducing the use of external devices to control electronic parameters in mixed - music. Taking a reduced approach, the technology chosen to prototype HASGS was - developed in order to serve the aesthetic intentions of the pieces being written - for it. This strategy proved to avoid an overload of solutions that could bring - artefacts and superficial use of the augmentation processes, which sometimes occur - on augmented instruments, specially prototyped for improvisational intentionality. - Here, we discuss how the repertoire, hardware, and software of the system can - be mutually affected by this approach. We understand this project as an empirically-based - study which can both serve as a model for analysis, as well provide composers - and performers with pathways and creative strategies for the development of augmentation - processes.' - address: 'Shanghai, China' - articleno: 52 - author: 'Portovedo, Henrique and Lopes, Paulo Ferreira and Mendes, Ricardo and Gala, - Tiago' - bibtex: "@inproceedings{NIME21_52,\n abstract = {The work presented here is based\ - \ on the Hybrid Augmented Saxophone of Gestural Symbioses (HASGS) system with\ - \ a focus on and its evolution over the last five years, and an emphasis on its\ - \ functional structure and the repertoire. The HASGS system was intended to retain\ - \ focus on the performance of the acoustic instrument, keeping gestures centralised\ - \ within the habitual practice of the instrument, and reducing the use of external\ - \ devices to control electronic parameters in mixed music. Taking a reduced approach,\ - \ the technology chosen to prototype HASGS was developed in order to serve the\ - \ aesthetic intentions of the pieces being written for it. This strategy proved\ - \ to avoid an overload of solutions that could bring artefacts and superficial\ - \ use of the augmentation processes, which sometimes occur on augmented instruments,\ - \ specially prototyped for improvisational intentionality. Here, we discuss how\ - \ the repertoire, hardware, and software of the system can be mutually affected\ - \ by this approach. We understand this project as an empirically-based study which\ - \ can both serve as a model for analysis, as well provide composers and performers\ - \ with pathways and creative strategies for the development of augmentation processes.},\n\ - \ address = {Shanghai, China},\n articleno = {52},\n author = {Portovedo, Henrique\ - \ and Lopes, Paulo Ferreira and Mendes, Ricardo and Gala, Tiago},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.21428/92fbeb44.643abd8c},\n issn = {2220-4806},\n month\ - \ = {June},\n presentation-video = {https://youtu.be/wRygkMgx2Oc},\n title = {HASGS:\ - \ Five Years of Reduced Augmented Evolution},\n url = {https://nime.pubpub.org/pub/1293exfw},\n\ - \ year = {2021}\n}\n" + ID: Gregorio2019 + abstract: 'Feature-based synthesis applies machine learning and signal processing + methods to the development of alternative interfaces for controlling parametric + synthesis algorithms. One approach, geared toward real-time control, uses low + dimensional gestural controllers and learned mappings from control spaces to parameter + spaces, making use of an intermediate latent timbre distribution, such that the + control space affords a spatially-intuitive arrangement of sonic possibilities. + Whereas many existing systems present alternatives to the traditional parametric + interfaces, the proposed system explores ways in which feature-based synthesis + can augment one-to-one parameter control, made possible by fully invertible mappings + between control and parameter spaces.' + address: 'Porto Alegre, Brazil' + author: Jeff Gregorio and Youngmoo Kim + bibtex: "@inproceedings{Gregorio2019,\n abstract = {Feature-based synthesis applies\ + \ machine learning and signal processing methods to the development of alternative\ + \ interfaces for controlling parametric synthesis algorithms. One approach, geared\ + \ toward real-time control, uses low dimensional gestural controllers and learned\ + \ mappings from control spaces to parameter spaces, making use of an intermediate\ + \ latent timbre distribution, such that the control space affords a spatially-intuitive\ + \ arrangement of sonic possibilities. Whereas many existing systems present alternatives\ + \ to the traditional parametric interfaces, the proposed system explores ways\ + \ in which feature-based synthesis can augment one-to-one parameter control, made\ + \ possible by fully invertible mappings between control and parameter spaces.},\n\ + \ address = {Porto Alegre, Brazil},\n author = {Jeff Gregorio and Youngmoo Kim},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.3673025},\n editor = {Marcelo Queiroz\ + \ and Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {431--436},\n\ + \ publisher = {UFRGS},\n title = {Augmenting Parametric Synthesis with Learned\ + \ Timbral Controllers},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper085.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.643abd8c + doi: 10.5281/zenodo.3673025 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/wRygkMgx2Oc - title: 'HASGS: Five Years of Reduced Augmented Evolution' - url: https://nime.pubpub.org/pub/1293exfw - year: 2021 + pages: 431--436 + publisher: UFRGS + title: Augmenting Parametric Synthesis with Learned Timbral Controllers + url: http://www.nime.org/proceedings/2019/nime2019_paper085.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_53 - abstract: 'This paper presents a theoretical framework for describing interactive - sound installations, along with an interactive database, on a web application, - for visualizing various features of sound installations. A corpus of 195 interactive - sound installations was reviewed to derive a taxonomy describing them across three - perspectives: Artistic Intention, Interaction and System Design. A web application - is provided to dynamically visualize and explore the corpus of sound installations - using interactive charts (https://isi-database.herokuapp.com/). Our contribution - is two-sided: we provide a theoretical framework to characterize interactive sound - installations as well as a tool to inform sound artists and designers about up-to-date - practices regarding interactive sound installations design.' - address: 'Shanghai, China' - articleno: 53 - author: 'Fraisse, Valérian and Guastavino, Catherine and Wanderley, Marcelo M.' - bibtex: "@inproceedings{NIME21_53,\n abstract = {This paper presents a theoretical\ - \ framework for describing interactive sound installations, along with an interactive\ - \ database, on a web application, for visualizing various features of sound installations.\ - \ A corpus of 195 interactive sound installations was reviewed to derive a taxonomy\ - \ describing them across three perspectives: Artistic Intention, Interaction and\ - \ System Design. A web application is provided to dynamically visualize and explore\ - \ the corpus of sound installations using interactive charts (https://isi-database.herokuapp.com/).\ - \ Our contribution is two-sided: we provide a theoretical framework to characterize\ - \ interactive sound installations as well as a tool to inform sound artists and\ - \ designers about up-to-date practices regarding interactive sound installations\ - \ design.},\n address = {Shanghai, China},\n articleno = {53},\n author = {Fraisse,\ - \ Valérian and Guastavino, Catherine and Wanderley, Marcelo M.},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.21428/92fbeb44.4fd9089c},\n issn = {2220-4806},\n month = {June},\n\ - \ presentation-video = {https://youtu.be/MtIVB7P3bs4},\n title = {A Visualization\ - \ Tool to Explore Interactive Sound Installations},\n url = {https://nime.pubpub.org/pub/i1rx1t2e},\n\ - \ year = {2021}\n}\n" + ID: Leigh2019 + abstract: 'This paper introduces studies conducted with musicians that aim to understand + modes of human-robot interaction, situated between automation and human augmentation. + Our robotic guitar system used for the study consists of various sound generating + mechanisms, either driven by software or by a musician directly. The control mechanism + allows the musician to have a varying degree of agency over the overall musical + direction. We present interviews and discussions on open-ended experiments conducted + with music students and musicians. The outcome of this research includes new modes + of playing the guitar given the robotic capabilities, and an understanding of + how automation can be integrated into instrument-playing processes. The results + present insights into how a human-machine hybrid system can increase the efficacy + of training or exploration, without compromising human engagement with a task.' + address: 'Porto Alegre, Brazil' + author: Sang-won Leigh and Abhinandan Jain and Pattie Maes + bibtex: "@inproceedings{Leigh2019,\n abstract = {This paper introduces studies conducted\ + \ with musicians that aim to understand modes of human-robot interaction, situated\ + \ between automation and human augmentation. Our robotic guitar system used for\ + \ the study consists of various sound generating mechanisms, either driven by\ + \ software or by a musician directly. The control mechanism allows the musician\ + \ to have a varying degree of agency over the overall musical direction. We present\ + \ interviews and discussions on open-ended experiments conducted with music students\ + \ and musicians. The outcome of this research includes new modes of playing the\ + \ guitar given the robotic capabilities, and an understanding of how automation\ + \ can be integrated into instrument-playing processes. The results present insights\ + \ into how a human-machine hybrid system can increase the efficacy of training\ + \ or exploration, without compromising human engagement with a task.},\n address\ + \ = {Porto Alegre, Brazil},\n author = {Sang-won Leigh and Abhinandan Jain and\ + \ Pattie Maes},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3673027},\n\ + \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {437--442},\n publisher = {UFRGS},\n title = {Exploring\ + \ Human-Machine Synergy and Interaction on a Robotic Instrument},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper086.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.4fd9089c + doi: 10.5281/zenodo.3673027 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/MtIVB7P3bs4 - title: A Visualization Tool to Explore Interactive Sound Installations - url: https://nime.pubpub.org/pub/i1rx1t2e - year: 2021 + pages: 437--442 + publisher: UFRGS + title: Exploring Human-Machine Synergy and Interaction on a Robotic Instrument + url: http://www.nime.org/proceedings/2019/nime2019_paper086.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_54 - abstract: 'Self-resonating vibrotactile instruments (SRIs) are hybrid feedback instruments, - characterised by an electro-mechanical feedback loop that is both the means of - sound production and the expressive interface. Through the lens of contemporary - SRIs, we reflect on how they are characterised, designed, and played. By considering - reports from designers and players of this species of instrument-performance system, - we explore the experience of playing them. With a view to supporting future research - and practice in the field, we illustrate the value of conceptualising SRIs in - Cybernetic and systems theoretic terms and suggest that this offers an intuitive, - yet powerful basis for future performance, analysis and making; in doing so we - close the loop in the making, playing and conceptualisation of SRIs with the aim - of nourishing the evolution of theory, creative and technical practice in this - field.' - address: 'Shanghai, China' - articleno: 54 - author: 'Eldridge, Alice and Kiefer, Chris and Overholt, Dan and Ulfarsson, Halldor' - bibtex: "@inproceedings{NIME21_54,\n abstract = {Self-resonating vibrotactile instruments\ - \ (SRIs) are hybrid feedback instruments, characterised by an electro-mechanical\ - \ feedback loop that is both the means of sound production and the expressive\ - \ interface. Through the lens of contemporary SRIs, we reflect on how they are\ - \ characterised, designed, and played. By considering reports from designers and\ - \ players of this species of instrument-performance system, we explore the experience\ - \ of playing them. With a view to supporting future research and practice in the\ - \ field, we illustrate the value of conceptualising SRIs in Cybernetic and systems\ - \ theoretic terms and suggest that this offers an intuitive, yet powerful basis\ - \ for future performance, analysis and making; in doing so we close the loop in\ - \ the making, playing and conceptualisation of SRIs with the aim of nourishing\ - \ the evolution of theory, creative and technical practice in this field.},\n\ - \ address = {Shanghai, China},\n articleno = {54},\n author = {Eldridge, Alice\ - \ and Kiefer, Chris and Overholt, Dan and Ulfarsson, Halldor},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.21428/92fbeb44.1f29a09e},\n issn = {2220-4806},\n month = {June},\n\ - \ presentation-video = {https://youtu.be/EP1G4vCVm_E},\n title = {Self-resonating\ - \ Vibrotactile Feedback Instruments {\\textbar}{\\textbar}: Making, Playing, Conceptualising\ - \ :{\\textbar}{\\textbar}},\n url = {https://nime.pubpub.org/pub/6mhrjiqt},\n\ - \ year = {2021}\n}\n" + ID: Lee2019 + abstract: 'Modern computer music performances often involve a musical instrument + that is primarily digital; software runs on a computer, and the physical form + of the instrument is the computer. In such a practice, the performance interface + is rendered on a computer screen for the performer. There has been a concern in + using a laptop as a musical instrument from the audience''s perspective, in that + having ``a laptop performer sitting behind the screen'''' makes it difficult for + the audience to understand how the performer is creating music. Mirroring a computer + screen on a projection screen has been one way to address the concern and reveal + the performer''s instrument. This paper introduces and discusses the author''s + computer music practice, in which a performer actively considers screen mirroring + as an essential part of the performance, beyond visualization of music. In this + case, screen mirroring is not complementary, but inevitable from the inception + of the performance. The related works listed within explore various roles of screen + mirroring in computer music performance and helps us understand empirical and + logistical findings in such practices.' + address: 'Porto Alegre, Brazil' + author: Sang Won Lee + bibtex: "@inproceedings{Lee2019,\n abstract = {Modern computer music performances\ + \ often involve a musical instrument that is primarily digital; software runs\ + \ on a computer, and the physical form of the instrument is the computer. In such\ + \ a practice, the performance interface is rendered on a computer screen for the\ + \ performer. There has been a concern in using a laptop as a musical instrument\ + \ from the audience's perspective, in that having ``a laptop performer sitting\ + \ behind the screen'' makes it difficult for the audience to understand how the\ + \ performer is creating music. Mirroring a computer screen on a projection screen\ + \ has been one way to address the concern and reveal the performer's instrument.\ + \ This paper introduces and discusses the author's computer music practice, in\ + \ which a performer actively considers screen mirroring as an essential part of\ + \ the performance, beyond visualization of music. In this case, screen mirroring\ + \ is not complementary, but inevitable from the inception of the performance.\ + \ The related works listed within explore various roles of screen mirroring in\ + \ computer music performance and helps us understand empirical and logistical\ + \ findings in such practices.},\n address = {Porto Alegre, Brazil},\n author =\ + \ {Sang Won Lee},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.3673029},\n\ + \ editor = {Marcelo Queiroz and Anna Xambó Sedó},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {443--448},\n publisher = {UFRGS},\n title = {Show Them\ + \ My Screen: Mirroring a Laptop Screen as an Expressive and Communicative Means\ + \ in Computer Music},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper087.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.1f29a09e + doi: 10.5281/zenodo.3673029 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/EP1G4vCVm_E - title: 'Self-resonating Vibrotactile Feedback Instruments ||: Making, Playing, Conceptualising - :||' - url: https://nime.pubpub.org/pub/6mhrjiqt - year: 2021 + pages: 443--448 + publisher: UFRGS + title: 'Show Them My Screen: Mirroring a Laptop Screen as an Expressive and Communicative + Means in Computer Music' + url: http://www.nime.org/proceedings/2019/nime2019_paper087.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_55 - abstract: 'Virtual reality (VR) offers novel possibilities of design choices for - Digital Musical Instruments in terms of shapes, sizes, sounds or colours, removing - many constraints inherent to physical interfaces. In particular, the size and - position of the interface components of Immersive Virtual Musical Instruments - (IVMIs) can be freely chosen to elicit large or small hand gestures. In addition, - VR allows for the manipulation of what users visually perceive of their actual - physical actions, through redirections and changes in Control-Display Ratio (CDR). - Visual and gestural amplitudes can therefore be defined separately, potentially - affecting the user experience in new ways. In this paper, we investigate the use - of CDR to enrich the design with a control over the user perceived fatigue, sense - of presence and musical expression. Our findings suggest that the CDR has an impact - on the sense of presence, on the perceived difficulty of controlling the sound - and on the distance covered by the hand. From these results, we derive a set of - insights and guidelines for the design of IVMIs.' - address: 'Shanghai, China' - articleno: 55 - author: 'Reynaert, Vincent and Berthaut, Florent and Rekik, Yosra and grisoni, laurent' - bibtex: "@inproceedings{NIME21_55,\n abstract = {Virtual reality (VR) offers novel\ - \ possibilities of design choices for Digital Musical Instruments in terms of\ - \ shapes, sizes, sounds or colours, removing many constraints inherent to physical\ - \ interfaces. In particular, the size and position of the interface components\ - \ of Immersive Virtual Musical Instruments (IVMIs) can be freely chosen to elicit\ - \ large or small hand gestures. In addition, VR allows for the manipulation of\ - \ what users visually perceive of their actual physical actions, through redirections\ - \ and changes in Control-Display Ratio (CDR). Visual and gestural amplitudes can\ - \ therefore be defined separately, potentially affecting the user experience in\ - \ new ways. In this paper, we investigate the use of CDR to enrich the design\ - \ with a control over the user perceived fatigue, sense of presence and musical\ - \ expression. Our findings suggest that the CDR has an impact on the sense of\ - \ presence, on the perceived difficulty of controlling the sound and on the distance\ - \ covered by the hand. From these results, we derive a set of insights and guidelines\ - \ for the design of IVMIs.},\n address = {Shanghai, China},\n articleno = {55},\n\ - \ author = {Reynaert, Vincent and Berthaut, Florent and Rekik, Yosra and grisoni,\ - \ laurent},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.c47be986},\n\ - \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/d1DthYt8EUw},\n\ - \ title = {The Effect of Control-Display Ratio on User Experience in Immersive\ - \ Virtual Musical Instruments},\n url = {https://nime.pubpub.org/pub/8n8br4cc},\n\ - \ year = {2021}\n}\n" + ID: Davis2019 + abstract: 'We present IllumiWear, a novel eTextile prototype that uses fiber optics + as interactive input and visual output. Fiber optic cables are separated into + bundles and then woven like a basket into a bendable glowing fabric. By equipping + light emitting diodes to one side of these bundles and photodiode light intensity + sensors to the other, loss of light intensity can be measured when the fabric + is bent. The sensing technique of IllumiWear is not only able to discriminate + between discreet touch, slight bends, and harsh bends, but also recover the location + of deformation. In this way, our computational fabric prototype uses its intrinsic + means of visual output (light) as a tool for interactive input. We provide design + and implementation details for our prototype as well as a technical evaluation + of its effectiveness and limitations as an interactive computational textile. + In addition, we examine the potential of this prototype''s interactive capabilities + by extending our eTextile to create a tangible user interface for audio and visual + manipulation.' + address: 'Porto Alegre, Brazil' + author: Josh Urban Davis + bibtex: "@inproceedings{Davis2019,\n abstract = {We present IllumiWear, a novel\ + \ eTextile prototype that uses fiber optics as interactive input and visual output.\ + \ Fiber optic cables are separated into bundles and then woven like a basket into\ + \ a bendable glowing fabric. By equipping light emitting diodes to one side of\ + \ these bundles and photodiode light intensity sensors to the other, loss of light\ + \ intensity can be measured when the fabric is bent. The sensing technique of\ + \ IllumiWear is not only able to discriminate between discreet touch, slight bends,\ + \ and harsh bends, but also recover the location of deformation. In this way,\ + \ our computational fabric prototype uses its intrinsic means of visual output\ + \ (light) as a tool for interactive input. We provide design and implementation\ + \ details for our prototype as well as a technical evaluation of its effectiveness\ + \ and limitations as an interactive computational textile. In addition, we examine\ + \ the potential of this prototype's interactive capabilities by extending our\ + \ eTextile to create a tangible user interface for audio and visual manipulation.},\n\ + \ address = {Porto Alegre, Brazil},\n author = {Josh Urban Davis},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.3673033},\n editor = {Marcelo Queiroz and\ + \ Anna Xambó Sedó},\n issn = {2220-4806},\n month = {June},\n pages = {449--454},\n\ + \ publisher = {UFRGS},\n title = {IllumiWear: A Fiber-Optic eTextile for MultiMedia\ + \ Interactions},\n url = {http://www.nime.org/proceedings/2019/nime2019_paper088.pdf},\n\ + \ year = {2019}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.c47be986 + doi: 10.5281/zenodo.3673033 + editor: Marcelo Queiroz and Anna Xambó Sedó issn: 2220-4806 month: June - presentation-video: https://youtu.be/d1DthYt8EUw - title: The Effect of Control-Display Ratio on User Experience in Immersive Virtual - Musical Instruments - url: https://nime.pubpub.org/pub/8n8br4cc - year: 2021 + pages: 449--454 + publisher: UFRGS + title: 'IllumiWear: A Fiber-Optic eTextile for MultiMedia Interactions' + url: http://www.nime.org/proceedings/2019/nime2019_paper088.pdf + year: 2019 - ENTRYTYPE: inproceedings - ID: NIME21_56 - abstract: 'This paper explores ecological perspectives of human activity in the - use of digital musical instruments and assistive technology. While such perspectives - are relatively nascent in DMI design and evaluation, ecological frameworks have - a long-standing foundation in occupational therapy and the design of assistive - technology products and services. Informed by two case studies, the authors'' - critique, compare and marry concepts from each domain to guide future research - into accessible music technology. The authors discover that ecological frameworks - used by occupational therapists are helpful in describing the nature of individual - impairment, disability and situated context. However, such frameworks seemingly - flounder when attempting to describe the personal value of music-making.' - address: 'Shanghai, China' - articleno: 56 - author: 'Lucas, Alex and Harrison, Jacob and Schroeder, Franziska and Ortiz, Miguel' - bibtex: "@inproceedings{NIME21_56,\n abstract = {This paper explores ecological\ - \ perspectives of human activity in the use of digital musical instruments and\ - \ assistive technology. While such perspectives are relatively nascent in DMI\ - \ design and evaluation, ecological frameworks have a long-standing foundation\ - \ in occupational therapy and the design of assistive technology products and\ - \ services. Informed by two case studies, the authors' critique, compare and marry\ - \ concepts from each domain to guide future research into accessible music technology.\ - \ The authors discover that ecological frameworks used by occupational therapists\ - \ are helpful in describing the nature of individual impairment, disability and\ - \ situated context. However, such frameworks seemingly flounder when attempting\ - \ to describe the personal value of music-making.},\n address = {Shanghai, China},\n\ - \ articleno = {56},\n author = {Lucas, Alex and Harrison, Jacob and Schroeder,\ - \ Franziska and Ortiz, Miguel},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.ff09de34},\n\ - \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/Khk05vKMrao},\n\ - \ title = {Cross-Pollinating Ecological Perspectives in ADMI Design and Evaluation},\n\ - \ url = {https://nime.pubpub.org/pub/d72sylsq},\n year = {2021}\n}\n" + ID: Nakanishi2013 + abstract: 'In this paper, the authors introduce an interactive device, ``POWDER + BOX''''for use by novices in musical sessions. ``POWDER BOX'''' is equipped withsensor-based + replaceable interfaces, which enable participants to discover andselect their + favorite playing styles of musical instruments during a musicalsession. In addition, + it has a wireless communication function thatsynchronizes musical scale and BPM + between multiple devices. To date, various kinds of ``inventive'''' electronic + musical instruments havebeen created in the field of Computer Music field. The + authors are interestedin formations of musical sessions, aiming for a balance + between simpleinteraction and musical expression. This study focuses on the development + ofperformance playing styles.Musicians occasionally change their playing styles + (e.g., guitar pluckingstyle) during a musical session. Generally, it is difficult + for nonmusicians toachieve this kind of smooth changing depends on levels of their + skillacquisition. However, it is essentially important for enjoying musical sessionswhether + people could acquire these skills. Here, the authors attempted to develop the + device that supports nonmusicians toconquer this point using replaceable interfaces. + The authors expected thatchanging interfaces would bring similar effect as changing + playing style by theskillful player. This research aims to establish an environment + in whichnonmusicians and musicians share their individual musical ideas easily. + Here,the interaction design and configuration of the ``POWDER BOX'''' is presented.' + address: 'Daejeon, Republic of Korea' + author: Yoshihito Nakanishi and Seiichiro Matsumura and Chuichi Arakawa + bibtex: "@inproceedings{Nakanishi2013,\n abstract = {In this paper, the authors\ + \ introduce an interactive device, ``POWDER BOX''for use by novices in musical\ + \ sessions. ``POWDER BOX'' is equipped withsensor-based replaceable interfaces,\ + \ which enable participants to discover andselect their favorite playing styles\ + \ of musical instruments during a musicalsession. In addition, it has a wireless\ + \ communication function thatsynchronizes musical scale and BPM between multiple\ + \ devices. To date, various kinds of ``inventive'' electronic musical instruments\ + \ havebeen created in the field of Computer Music field. The authors are interestedin\ + \ formations of musical sessions, aiming for a balance between simpleinteraction\ + \ and musical expression. This study focuses on the development ofperformance\ + \ playing styles.Musicians occasionally change their playing styles (e.g., guitar\ + \ pluckingstyle) during a musical session. Generally, it is difficult for nonmusicians\ + \ toachieve this kind of smooth changing depends on levels of their skillacquisition.\ + \ However, it is essentially important for enjoying musical sessionswhether people\ + \ could acquire these skills. Here, the authors attempted to develop the device\ + \ that supports nonmusicians toconquer this point using replaceable interfaces.\ + \ The authors expected thatchanging interfaces would bring similar effect as changing\ + \ playing style by theskillful player. This research aims to establish an environment\ + \ in whichnonmusicians and musicians share their individual musical ideas easily.\ + \ Here,the interaction design and configuration of the ``POWDER BOX'' is presented.},\n\ + \ address = {Daejeon, Republic of Korea},\n author = {Yoshihito Nakanishi and\ + \ Seiichiro Matsumura and Chuichi Arakawa},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1178620},\n issn = {2220-4806},\n keywords = {Musical instrument,\ + \ synthesizer, replaceable interface, sensors},\n month = {May},\n pages = {373--376},\n\ + \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {{POWDER}\ + \ {BOX}: An Interactive Device with Sensor Based Replaceable Interface For Musical\ + \ Session},\n url = {http://www.nime.org/proceedings/2013/nime2013_101.pdf},\n\ + \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.ff09de34 + doi: 10.5281/zenodo.1178620 issn: 2220-4806 - month: June - presentation-video: https://youtu.be/Khk05vKMrao - title: Cross-Pollinating Ecological Perspectives in ADMI Design and Evaluation - url: https://nime.pubpub.org/pub/d72sylsq - year: 2021 + keywords: 'Musical instrument, synthesizer, replaceable interface, sensors' + month: May + pages: 373--376 + publisher: 'Graduate School of Culture Technology, KAIST' + title: 'POWDER BOX: An Interactive Device with Sensor Based Replaceable Interface + For Musical Session' + url: http://www.nime.org/proceedings/2013/nime2013_101.pdf + year: 2013 - ENTRYTYPE: inproceedings - ID: NIME21_57 - abstract: 'This paper describes Le Bâton, a new digital musical instrument based - on the nonlinear dynamics of the triple pendulum. The triple pendulum is a simple - physical system constructed by attaching three pendulums vertically such that - each joint can swing freely. When subjected to large oscillations, its motion - is chaotic and is often described as unexpectedly mesmerizing. Le Bâton uses wireless - inertial measurement units (IMUs) embedded in each pendulum arm to send real-time - motion data to Max/MSP. Additionally, we implemented a control mechanism, allowing - a user to remotely interact with it by setting the initial release angle. Here, - we explain the motivation and design of Le Bâton and describe mapping strategies. - To conclude, we discuss how its nature of user interaction complicates its status - as a digital musical instrument.' - address: 'Shanghai, China' - articleno: 57 - author: 'Skarha, Matthew and Cusson, Vincent and Frisson, Christian and Wanderley, - Marcelo M.' - bibtex: "@inproceedings{NIME21_57,\n abstract = {This paper describes Le Bâton,\ - \ a new digital musical instrument based on the nonlinear dynamics of the triple\ - \ pendulum. The triple pendulum is a simple physical system constructed by attaching\ - \ three pendulums vertically such that each joint can swing freely. When subjected\ - \ to large oscillations, its motion is chaotic and is often described as unexpectedly\ - \ mesmerizing. Le Bâton uses wireless inertial measurement units (IMUs) embedded\ - \ in each pendulum arm to send real-time motion data to Max/MSP. Additionally,\ - \ we implemented a control mechanism, allowing a user to remotely interact with\ - \ it by setting the initial release angle. Here, we explain the motivation and\ - \ design of Le Bâton and describe mapping strategies. To conclude, we discuss\ - \ how its nature of user interaction complicates its status as a digital musical\ - \ instrument.},\n address = {Shanghai, China},\n articleno = {57},\n author =\ - \ {Skarha, Matthew and Cusson, Vincent and Frisson, Christian and Wanderley, Marcelo\ - \ M.},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.21428/92fbeb44.09ecc54d},\n issn = {2220-4806},\n\ - \ month = {June},\n presentation-video = {https://youtu.be/bLx5b9aqwgI},\n title\ - \ = {Le Bâton: A Digital Musical Instrument Based on the Chaotic Triple Pendulum},\n\ - \ url = {https://nime.pubpub.org/pub/uh1zfz1f},\n year = {2021}\n}\n" + ID: Fohl2013 + abstract: 'This paper presents the design and implementation of agesture control + interface for a wave field synthesis system.The user''s motion is tracked by a + IR-camera-based trackingsystem. The developed connecting software processes thetracker + data to modify the positions of the virtual soundsources of the wave field synthesis + system. Due to the mod-ular design of the software, the triggered actions of the + ges-tures may easily be modified. Three elementary gestureswere designed and implemented: + Select / deselect, circularmovement and radial movement. The guidelines for gesturedesign + and detection are presented, and the user experiencesare discussed.' + address: 'Daejeon, Republic of Korea' + author: Wolfgang Fohl and Malte Nogalski + bibtex: "@inproceedings{Fohl2013,\n abstract = {This paper presents the design and\ + \ implementation of agesture control interface for a wave field synthesis system.The\ + \ user's motion is tracked by a IR-camera-based trackingsystem. The developed\ + \ connecting software processes thetracker data to modify the positions of the\ + \ virtual soundsources of the wave field synthesis system. Due to the mod-ular\ + \ design of the software, the triggered actions of the ges-tures may easily be\ + \ modified. Three elementary gestureswere designed and implemented: Select / deselect,\ + \ circularmovement and radial movement. The guidelines for gesturedesign and detection\ + \ are presented, and the user experiencesare discussed.},\n address = {Daejeon,\ + \ Republic of Korea},\n author = {Wolfgang Fohl and Malte Nogalski},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1178522},\n issn = {2220-4806},\n keywords\ + \ = {Wave field synthesis, gesture control},\n month = {May},\n pages = {341--346},\n\ + \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {A Gesture\ + \ Control Interface for a Wave Field Synthesis System},\n url = {http://www.nime.org/proceedings/2013/nime2013_106.pdf},\n\ + \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.09ecc54d + doi: 10.5281/zenodo.1178522 issn: 2220-4806 - month: June - presentation-video: https://youtu.be/bLx5b9aqwgI - title: 'Le Bâton: A Digital Musical Instrument Based on the Chaotic Triple Pendulum' - url: https://nime.pubpub.org/pub/uh1zfz1f - year: 2021 + keywords: 'Wave field synthesis, gesture control' + month: May + pages: 341--346 + publisher: 'Graduate School of Culture Technology, KAIST' + title: A Gesture Control Interface for a Wave Field Synthesis System + url: http://www.nime.org/proceedings/2013/nime2013_106.pdf + year: 2013 - ENTRYTYPE: inproceedings - ID: NIME21_58 - abstract: 'The CHILLER (a Computer-Human Interface for the Live Labeling of Emotional - Responses) is a prototype of an affordable and easy-to-use wearable sensor for - the real-time detection and visualization of one of the most accurate biomarkers - of musical emotional processing:  the piloerection of the skin (i.e., the goosebumps) - that accompany musical chills (also known as musical frissons or shivers down - the spine). In controlled laboratory experiments, electrodermal activity (EDA) - has been traditionally used to measure fluctuations of musical emotion. EDA is, - however, ill-suited for real-world settings (e.g., live concerts) because of its - sensitivity to movement, electronic noise and variations in the contact between - the skin and the recording electrodes. The CHILLER, based on the Raspberry Pi - architecture, overcomes these limitations by using a well-known algorithm capable - of detecting goosebumps from a video recording of a patch of skin. The CHILLER - has potential applications in both academia and industry and could be used as - a tool to broaden participation in STEM, as it brings together concepts from experimental - psychology, neuroscience, physiology and computer science in an inexpensive, do-it-yourself - device well-suited for educational purposes.' - address: 'Shanghai, China' - articleno: 58 - author: 'Pelofi, Claire and Goldstein, Michal and Bevilacqua, Dana and McPhee, Michael - and Abrams, Ellie and Ripollés, Pablo' - bibtex: "@inproceedings{NIME21_58,\n abstract = {The CHILLER (a Computer-Human Interface\ - \ for the Live Labeling of Emotional Responses) is a prototype of an affordable\ - \ and easy-to-use wearable sensor for the real-time detection and visualization\ - \ of one of the most accurate biomarkers of musical emotional processing:  the\ - \ piloerection of the skin (i.e., the goosebumps) that accompany musical chills\ - \ (also known as musical frissons or shivers down the spine). In controlled laboratory\ - \ experiments, electrodermal activity (EDA) has been traditionally used to measure\ - \ fluctuations of musical emotion. EDA is, however, ill-suited for real-world\ - \ settings (e.g., live concerts) because of its sensitivity to movement, electronic\ - \ noise and variations in the contact between the skin and the recording electrodes.\ - \ The CHILLER, based on the Raspberry Pi architecture, overcomes these limitations\ - \ by using a well-known algorithm capable of detecting goosebumps from a video\ - \ recording of a patch of skin. The CHILLER has potential applications in both\ - \ academia and industry and could be used as a tool to broaden participation in\ - \ STEM, as it brings together concepts from experimental psychology, neuroscience,\ - \ physiology and computer science in an inexpensive, do-it-yourself device well-suited\ - \ for educational purposes.},\n address = {Shanghai, China},\n articleno = {58},\n\ - \ author = {Pelofi, Claire and Goldstein, Michal and Bevilacqua, Dana and McPhee,\ - \ Michael and Abrams, Ellie and Ripollés, Pablo},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.21428/92fbeb44.5da1ca0b},\n issn = {2220-4806},\n month = {June},\n presentation-video\ - \ = {https://youtu.be/JujnpqoSdR4},\n title = {CHILLER: a Computer Human Interface\ - \ for the Live Labeling of Emotional Responses},\n url = {https://nime.pubpub.org/pub/kdahf9fq},\n\ - \ year = {2021}\n}\n" + ID: Burlet2013 + abstract: 'Sensor-based gesture recognition is investigated as a possible solution + to theproblem of managing an overwhelming number of audio effects in live guitarperformances. + A realtime gesture recognition system, which automaticallytoggles digital audio + effects according to gestural information captured by anaccelerometer attached + to the body of a guitar, is presented. To supplement theseveral predefined gestures + provided by the recognition system, personalizedgestures may be trained by the + user. Upon successful recognition of a gesture,the corresponding audio effects + are applied to the guitar signal and visualfeedback is provided to the user. An + evaluation of the system yielded 86%accuracy for user-independent recognition + and 99% accuracy for user-dependentrecognition, on average.' + address: 'Daejeon, Republic of Korea' + author: Gregory Burlet and Ichiro Fujinaga + bibtex: "@inproceedings{Burlet2013,\n abstract = {Sensor-based gesture recognition\ + \ is investigated as a possible solution to theproblem of managing an overwhelming\ + \ number of audio effects in live guitarperformances. A realtime gesture recognition\ + \ system, which automaticallytoggles digital audio effects according to gestural\ + \ information captured by anaccelerometer attached to the body of a guitar, is\ + \ presented. To supplement theseveral predefined gestures provided by the recognition\ + \ system, personalizedgestures may be trained by the user. Upon successful recognition\ + \ of a gesture,the corresponding audio effects are applied to the guitar signal\ + \ and visualfeedback is provided to the user. An evaluation of the system yielded\ + \ 86%accuracy for user-independent recognition and 99% accuracy for user-dependentrecognition,\ + \ on average.},\n address = {Daejeon, Republic of Korea},\n author = {Gregory\ + \ Burlet and Ichiro Fujinaga},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178488},\n\ + \ issn = {2220-4806},\n keywords = {Augmented instrument, gesture recognition,\ + \ accelerometer, pattern recognition, performance practice},\n month = {May},\n\ + \ pages = {41--44},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ + \ title = {Stompboxes: Kicking the Habit},\n url = {http://www.nime.org/proceedings/2013/nime2013_109.pdf},\n\ + \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.5da1ca0b + doi: 10.5281/zenodo.1178488 issn: 2220-4806 - month: June - presentation-video: https://youtu.be/JujnpqoSdR4 - title: 'CHILLER: a Computer Human Interface for the Live Labeling of Emotional Responses' - url: https://nime.pubpub.org/pub/kdahf9fq - year: 2021 - - -- ENTRYTYPE: inproceedings - ID: NIME21_59 - abstract: 'Creating an artificially intelligent (AI) aid for music composers requires - a practical and modular approach, one that allows the composer to manipulate the - technology when needed in the search for new sounds. Many existing approaches - fail to capture the interest of composers as they are limited beyond their demonstrative - purposes, allow for only minimal interaction from the composer or require GPU - access to generate samples quickly. This paper introduces Score-Transformer (ST), - a practical integration of deep learning technology to aid in the creation of - new music which works seamlessly alongside any popular software notation (Finale, - Sibelius, etc.). Score-Transformer is built upon a variant of the powerful transformer - model, currently used in state-of-the-art natural language models. Owing to hierarchical - and sequential similarities between music and language, the transformer model - can learn to write polyphonic MIDI music based on any styles, genres, or composers - it is trained upon. This paper briefly outlines how the model learns and later - notates music based upon any prompt given to it from the user. Furthermore, ST - can be updated at any time on additional MIDI recordings minimizing the risk of - the software becoming outdated or impractical for continued use.' - address: 'Shanghai, China' - articleno: 59 - author: 'Lupker, Jeffrey A. T.' - bibtex: "@inproceedings{NIME21_59,\n abstract = {Creating an artificially intelligent\ - \ (AI) aid for music composers requires a practical and modular approach, one\ - \ that allows the composer to manipulate the technology when needed in the search\ - \ for new sounds. Many existing approaches fail to capture the interest of composers\ - \ as they are limited beyond their demonstrative purposes, allow for only minimal\ - \ interaction from the composer or require GPU access to generate samples quickly.\ - \ This paper introduces Score-Transformer (ST), a practical integration of deep\ - \ learning technology to aid in the creation of new music which works seamlessly\ - \ alongside any popular software notation (Finale, Sibelius, etc.). Score-Transformer\ - \ is built upon a variant of the powerful transformer model, currently used in\ - \ state-of-the-art natural language models. Owing to hierarchical and sequential\ - \ similarities between music and language, the transformer model can learn to\ - \ write polyphonic MIDI music based on any styles, genres, or composers it is\ - \ trained upon. This paper briefly outlines how the model learns and later notates\ - \ music based upon any prompt given to it from the user. Furthermore, ST can be\ - \ updated at any time on additional MIDI recordings minimizing the risk of the\ - \ software becoming outdated or impractical for continued use.},\n address = {Shanghai,\ - \ China},\n articleno = {59},\n author = {Lupker, Jeffrey A. T.},\n booktitle\ + keywords: 'Augmented instrument, gesture recognition, accelerometer, pattern recognition, + performance practice' + month: May + pages: 41--44 + publisher: 'Graduate School of Culture Technology, KAIST' + title: 'Stompboxes: Kicking the Habit' + url: http://www.nime.org/proceedings/2013/nime2013_109.pdf + year: 2013 + + +- ENTRYTYPE: inproceedings + ID: Jensenius2013 + abstract: 'The paper presents the Kinectofon, an instrument for creating sounds + through free-hand interaction in a 3D space. The instrument is based on the RGB + anddepth image streams retrieved from a Microsoft Kinect sensor device. These + twoimage streams are used to create different types of motiongrams, which, again, + are used as the source material for a sonification process based on inverse FFT. + The instrument is intuitive to play, allowing the performer to createsound by + "touching" a virtual sound wall.' + address: 'Daejeon, Republic of Korea' + author: Alexander Refsum Jensenius + bibtex: "@inproceedings{Jensenius2013,\n abstract = {The paper presents the Kinectofon,\ + \ an instrument for creating sounds through free-hand interaction in a 3D space.\ + \ The instrument is based on the RGB anddepth image streams retrieved from a Microsoft\ + \ Kinect sensor device. These twoimage streams are used to create different types\ + \ of motiongrams, which, again, are used as the source material for a sonification\ + \ process based on inverse FFT. The instrument is intuitive to play, allowing\ + \ the performer to createsound by \"touching\" a virtual sound wall.},\n address\ + \ = {Daejeon, Republic of Korea},\n author = {Alexander Refsum Jensenius},\n booktitle\ \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.21428/92fbeb44.21d4fd1f},\n issn = {2220-4806},\n month\ - \ = {June},\n presentation-video = {https://youtu.be/CZO8nj6YzVI},\n title = {Score-Transformer:\ - \ A Deep Learning Aid for Music Composition},\n url = {https://nime.pubpub.org/pub/7a6ij1ak},\n\ - \ year = {2021}\n}\n" + \ Expression},\n doi = {10.5281/zenodo.1178564},\n issn = {2220-4806},\n keywords\ + \ = {Kinect, motiongram, sonification, video analysis},\n month = {May},\n pages\ + \ = {196--197},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ + \ title = {Kinectofon: Performing with Shapes in Planes},\n url = {http://www.nime.org/proceedings/2013/nime2013_110.pdf},\n\ + \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.21d4fd1f + doi: 10.5281/zenodo.1178564 issn: 2220-4806 - month: June - presentation-video: https://youtu.be/CZO8nj6YzVI - title: 'Score-Transformer: A Deep Learning Aid for Music Composition' - url: https://nime.pubpub.org/pub/7a6ij1ak - year: 2021 + keywords: 'Kinect, motiongram, sonification, video analysis' + month: May + pages: 196--197 + publisher: 'Graduate School of Culture Technology, KAIST' + title: 'Kinectofon: Performing with Shapes in Planes' + url: http://www.nime.org/proceedings/2013/nime2013_110.pdf + year: 2013 - ENTRYTYPE: inproceedings - ID: NIME21_6 - abstract: 'We propose and evaluate an approach to incorporating multiple user-provided - inputs, each demonstrating a complementary set of musical characteristics, to - guide the output of a generative model for synthesizing short music performances - or loops. We focus on user inputs that describe both “what to play” (via scores - in MIDI format) and “how to play it” (via rhythmic inputs to specify expressive - timing and dynamics). Through experiments, we demonstrate that our method can - facilitate human-AI co-creation of drum loops with diverse and customizable outputs. - In the process, we argue for the interaction paradigm of mapping by demonstration - as a promising approach to working with deep learning models that are capable - of generating complex and realistic musical parts.' - address: 'Shanghai, China' - articleno: 6 - author: 'Gillick, Jon and Bamman, David' - bibtex: "@inproceedings{NIME21_6,\n abstract = {We propose and evaluate an approach\ - \ to incorporating multiple user-provided inputs, each demonstrating a complementary\ - \ set of musical characteristics, to guide the output of a generative model for\ - \ synthesizing short music performances or loops. We focus on user inputs that\ - \ describe both “what to play” (via scores in MIDI format) and “how to play it”\ - \ (via rhythmic inputs to specify expressive timing and dynamics). Through experiments,\ - \ we demonstrate that our method can facilitate human-AI co-creation of drum loops\ - \ with diverse and customizable outputs. In the process, we argue for the interaction\ - \ paradigm of mapping by demonstration as a promising approach to working with\ - \ deep learning models that are capable of generating complex and realistic musical\ - \ parts.},\n address = {Shanghai, China},\n articleno = {6},\n author = {Gillick,\ - \ Jon and Bamman, David},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.06e2d5f4},\n\ - \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/Q2M_smiN6oo},\n\ - \ title = {What to Play and How to Play it: Guiding Generative Music Models with\ - \ Multiple Demonstrations},\n url = {https://nime.pubpub.org/pub/s3x60926},\n\ - \ year = {2021}\n}\n" + ID: Fried2013 + abstract: 'We present a method for automatic feature extraction and cross-modal + mappingusing deep learning. Our system uses stacked autoencoders to learn a layeredfeature + representation of the data. Feature vectors from two (or more)different domains + are mapped to each other, effectively creating a cross-modalmapping. Our system + can either run fully unsupervised, or it can use high-levellabeling to fine-tune + the mapping according a user''s needs. We show severalapplications for our method, + mapping sound to or from images or gestures. Weevaluate system performance both + in standalone inference tasks and incross-modal mappings.' + address: 'Daejeon, Republic of Korea' + author: Ohad Fried and Rebecca Fiebrink + bibtex: "@inproceedings{Fried2013,\n abstract = {We present a method for automatic\ + \ feature extraction and cross-modal mappingusing deep learning. Our system uses\ + \ stacked autoencoders to learn a layeredfeature representation of the data. Feature\ + \ vectors from two (or more)different domains are mapped to each other, effectively\ + \ creating a cross-modalmapping. Our system can either run fully unsupervised,\ + \ or it can use high-levellabeling to fine-tune the mapping according a user's\ + \ needs. We show severalapplications for our method, mapping sound to or from\ + \ images or gestures. Weevaluate system performance both in standalone inference\ + \ tasks and incross-modal mappings.},\n address = {Daejeon, Republic of Korea},\n\ + \ author = {Ohad Fried and Rebecca Fiebrink},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1178528},\n issn = {2220-4806},\n keywords = {Deep learning,\ + \ feature learning, mapping, gestural control},\n month = {May},\n pages = {531--534},\n\ + \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {Cross-modal\ + \ Sound Mapping Using Deep Learning},\n url = {http://www.nime.org/proceedings/2013/nime2013_111.pdf},\n\ + \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.06e2d5f4 + doi: 10.5281/zenodo.1178528 issn: 2220-4806 - month: June - presentation-video: https://youtu.be/Q2M_smiN6oo - title: 'What to Play and How to Play it: Guiding Generative Music Models with Multiple - Demonstrations' - url: https://nime.pubpub.org/pub/s3x60926 - year: 2021 + keywords: 'Deep learning, feature learning, mapping, gestural control' + month: May + pages: 531--534 + publisher: 'Graduate School of Culture Technology, KAIST' + title: Cross-modal Sound Mapping Using Deep Learning + url: http://www.nime.org/proceedings/2013/nime2013_111.pdf + year: 2013 - ENTRYTYPE: inproceedings - ID: NIME21_60 - abstract: 'Amstramgrame is a music technology STEAM (Science Technology Engineering - Arts and Mathematics) project aiming at making more tangible abstract scientific - concepts through the programming of a Digital Musical Instrument (DMI): the Gramophone. - Various custom tools ranging from online programming environments to the Gramophone - itself have been developed as part of this project. An innovative method anchored - in the reality of the field as well as a wide range of key-turn pedagogical scenarios - are also part of the Amtramgrame toolkit. This article presents the tools and - the method of Amstramgrame as well as the results of its pilot phase. Future directions - along with some insights on the implementation of this kind of project are provided - as well.' - address: 'Shanghai, China' - articleno: 60 - author: 'Michon, Romain and Dumitrascu, Catinca and Chudet, Sandrine and Orlarey, - Yann and Letz, Stéphane and Fober, Dominique' - bibtex: "@inproceedings{NIME21_60,\n abstract = {Amstramgrame is a music technology\ - \ STEAM (Science Technology Engineering Arts and Mathematics) project aiming at\ - \ making more tangible abstract scientific concepts through the programming of\ - \ a Digital Musical Instrument (DMI): the Gramophone. Various custom tools ranging\ - \ from online programming environments to the Gramophone itself have been developed\ - \ as part of this project. An innovative method anchored in the reality of the\ - \ field as well as a wide range of key-turn pedagogical scenarios are also part\ - \ of the Amtramgrame toolkit. This article presents the tools and the method of\ - \ Amstramgrame as well as the results of its pilot phase. Future directions along\ - \ with some insights on the implementation of this kind of project are provided\ - \ as well.},\n address = {Shanghai, China},\n articleno = {60},\n author = {Michon,\ - \ Romain and Dumitrascu, Catinca and Chudet, Sandrine and Orlarey, Yann and Letz,\ - \ Stéphane and Fober, Dominique},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.a84edd3f},\n\ - \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/KTgl4suQ_Ks},\n\ - \ title = {Amstramgrame: Making Scientific Concepts More Tangible Through Music\ - \ Technology at School},\n url = {https://nime.pubpub.org/pub/3zeala6v},\n year\ - \ = {2021}\n}\n" + ID: Kapur2013 + abstract: 'This paper describes the creation of new interfaces that extend traditionalKorean + music and dance. Specifically, this research resulted in the design ofthe eHaegum + (Korean bowed instrument), eJanggu (Korean drum), and ZiOm wearableinterfaces. + The paper describes the process of making these new interfaces aswell as how they + have been used to create new music and forms of digital artmaking that blend traditional + practice with modern techniques.' + address: 'Daejeon, Republic of Korea' + author: Ajay Kapur and Dae Hong Kim and Raakhi Kapur and Kisoon Eom + bibtex: "@inproceedings{Kapur2013,\n abstract = {This paper describes the creation\ + \ of new interfaces that extend traditionalKorean music and dance. Specifically,\ + \ this research resulted in the design ofthe eHaegum (Korean bowed instrument),\ + \ eJanggu (Korean drum), and ZiOm wearableinterfaces. The paper describes the\ + \ process of making these new interfaces aswell as how they have been used to\ + \ create new music and forms of digital artmaking that blend traditional practice\ + \ with modern techniques.},\n address = {Daejeon, Republic of Korea},\n author\ + \ = {Ajay Kapur and Dae Hong Kim and Raakhi Kapur and Kisoon Eom},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1178576},\n issn = {2220-4806},\n keywords\ + \ = {Hyperinstrument, Korean interface design, wearable sensors, dance controllers,\ + \ bowed controllers, drum controllers},\n month = {May},\n pages = {45--48},\n\ + \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {New Interfaces\ + \ for Traditional Korean Music and Dance},\n url = {http://www.nime.org/proceedings/2013/nime2013_113.pdf},\n\ + \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.a84edd3f + doi: 10.5281/zenodo.1178576 issn: 2220-4806 - month: June - presentation-video: https://youtu.be/KTgl4suQ_Ks - title: 'Amstramgrame: Making Scientific Concepts More Tangible Through Music Technology - at School' - url: https://nime.pubpub.org/pub/3zeala6v - year: 2021 + keywords: 'Hyperinstrument, Korean interface design, wearable sensors, dance controllers, + bowed controllers, drum controllers' + month: May + pages: 45--48 + publisher: 'Graduate School of Culture Technology, KAIST' + title: New Interfaces for Traditional Korean Music and Dance + url: http://www.nime.org/proceedings/2013/nime2013_113.pdf + year: 2013 - ENTRYTYPE: inproceedings - ID: NIME21_61 - abstract: 'We study the question of how wireless, self-contained CMOS-synthesizers - with built-in speakers can be used to achieve low-threshold operability of multichannel - sound fields. We deliberately use low-tech and DIY approaches to build simple - sound modules for music interaction and education in order to ensure accessibility - of the technology. The modules are operated by wireless power transfer (WPT). - A multichannel sound field can be easily generated and modulated by placing several - sound objects in proximity to the induction coils. Alterations in sound are caused - by repositioning, moving or grouping the sound modules. Although not physically - linked to each other, the objects start interacting electro-acoustically when - they share the same magnetic field. Because they are equipped with electronic - sound generators and transducers, the sound modules can work independently from - a sound studio situation.' - address: 'Shanghai, China' - articleno: 61 - author: 'Reuter, Vivian and Schwarz, Lorenz' - bibtex: "@inproceedings{NIME21_61,\n abstract = {We study the question of how wireless,\ - \ self-contained CMOS-synthesizers with built-in speakers can be used to achieve\ - \ low-threshold operability of multichannel sound fields. We deliberately use\ - \ low-tech and DIY approaches to build simple sound modules for music interaction\ - \ and education in order to ensure accessibility of the technology. The modules\ - \ are operated by wireless power transfer (WPT). A multichannel sound field can\ - \ be easily generated and modulated by placing several sound objects in proximity\ - \ to the induction coils. Alterations in sound are caused by repositioning, moving\ - \ or grouping the sound modules. Although not physically linked to each other,\ - \ the objects start interacting electro-acoustically when they share the same\ - \ magnetic field. Because they are equipped with electronic sound generators and\ - \ transducers, the sound modules can work independently from a sound studio situation.},\n\ - \ address = {Shanghai, China},\n articleno = {61},\n author = {Reuter, Vivian\ - \ and Schwarz, Lorenz},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.07c72a46},\n\ - \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/08kfv74Z880},\n\ - \ title = {Wireless Sound Modules},\n url = {https://nime.pubpub.org/pub/muvvx0y5},\n\ - \ year = {2021}\n}\n" + ID: Zhang2013 + abstract: 'The Microsoft Kinect is a popular and versatile input devicefor musical + interfaces. However, using the Kinect for suchinterfaces requires not only signi_x000C_cant + programming experience,but also the use of complex geometry or machinelearning + techniques to translate joint positions into higherlevel gestures. We created + the Kinect Instrument Builder(KIB) to address these di_x000E_culties by structuring + gesturalinterfaces as combinations of gestural widgets. KIB allowsthe user to + design an instrument by con_x000C_guring gesturalprimitives, each with a set of + simple but attractive visualfeedback elements. After designing an instrument on + KIB''sweb interface, users can play the instrument on KIB''s performanceinterface, + which displays visualizations and transmitsOSC messages to other applications + for sound synthesisor further remapping.' + address: 'Daejeon, Republic of Korea' + author: Edward Zhang + bibtex: "@inproceedings{Zhang2013,\n abstract = {The Microsoft Kinect is a popular\ + \ and versatile input devicefor musical interfaces. However, using the Kinect\ + \ for suchinterfaces requires not only signi_x000C_cant programming experience,but\ + \ also the use of complex geometry or machinelearning techniques to translate\ + \ joint positions into higherlevel gestures. We created the Kinect Instrument\ + \ Builder(KIB) to address these di_x000E_culties by structuring gesturalinterfaces\ + \ as combinations of gestural widgets. KIB allowsthe user to design an instrument\ + \ by con_x000C_guring gesturalprimitives, each with a set of simple but attractive\ + \ visualfeedback elements. After designing an instrument on KIB'sweb interface,\ + \ users can play the instrument on KIB's performanceinterface, which displays\ + \ visualizations and transmitsOSC messages to other applications for sound synthesisor\ + \ further remapping.},\n address = {Daejeon, Republic of Korea},\n author = {Edward\ + \ Zhang},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1178698},\n issn = {2220-4806},\n\ + \ keywords = {Kinect, gesture, widgets, OSC, mapping},\n month = {May},\n pages\ + \ = {519--524},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ + \ title = {KIB: Simplifying Gestural Instrument Creation Using Widgets},\n url\ + \ = {http://www.nime.org/proceedings/2013/nime2013_114.pdf},\n year = {2013}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.07c72a46 + doi: 10.5281/zenodo.1178698 issn: 2220-4806 - month: June - presentation-video: https://youtu.be/08kfv74Z880 - title: Wireless Sound Modules - url: https://nime.pubpub.org/pub/muvvx0y5 - year: 2021 + keywords: 'Kinect, gesture, widgets, OSC, mapping' + month: May + pages: 519--524 + publisher: 'Graduate School of Culture Technology, KAIST' + title: 'KIB: Simplifying Gestural Instrument Creation Using Widgets' + url: http://www.nime.org/proceedings/2013/nime2013_114.pdf + year: 2013 - ENTRYTYPE: inproceedings - ID: NIME21_62 - abstract: 'When two sounds are played at the same loudness, pitch, and duration, - what sets them apart are their timbres. This study documents the design and implementation - of the Timbre Explorer, a synthesizer interface based on efforts to dimensionalize - this perceptual concept. The resulting prototype controls four perceptually salient - dimensions of timbre in real-time: attack time, brightness, spectral flux, and - spectral density. A graphical user interface supports user understanding with - live visualizations of the effects of each dimension. The applications of this - interface are three-fold; further perceptual timbre studies, usage as a practical - shortcut for synthesizers, and educating users about the frequency domain, sound - synthesis, and the concept of timbre. The project has since been expanded to a - standalone version independent of a computer and a purely online web-audio version.' - address: 'Shanghai, China' - articleno: 62 - author: 'Lam, Joshua Ryan and Saitis, Charalampos' - bibtex: "@inproceedings{NIME21_62,\n abstract = {When two sounds are played at the\ - \ same loudness, pitch, and duration, what sets them apart are their timbres.\ - \ This study documents the design and implementation of the Timbre Explorer, a\ - \ synthesizer interface based on efforts to dimensionalize this perceptual concept.\ - \ The resulting prototype controls four perceptually salient dimensions of timbre\ - \ in real-time: attack time, brightness, spectral flux, and spectral density.\ - \ A graphical user interface supports user understanding with live visualizations\ - \ of the effects of each dimension. The applications of this interface are three-fold;\ - \ further perceptual timbre studies, usage as a practical shortcut for synthesizers,\ - \ and educating users about the frequency domain, sound synthesis, and the concept\ - \ of timbre. The project has since been expanded to a standalone version independent\ - \ of a computer and a purely online web-audio version.},\n address = {Shanghai,\ - \ China},\n articleno = {62},\n author = {Lam, Joshua Ryan and Saitis, Charalampos},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.21428/92fbeb44.92a95683},\n issn = {2220-4806},\n\ - \ month = {June},\n presentation-video = {https://youtu.be/EJ0ZAhOdBTw},\n title\ - \ = {The Timbre Explorer: A Synthesizer Interface for Educational Purposes and\ - \ Perceptual Studies},\n url = {https://nime.pubpub.org/pub/q5oc20wg},\n year\ - \ = {2021}\n}\n" + ID: Hochenbaum2013 + abstract: 'Music education is a rich subject with many approaches and methodologies + thathave developed over hundreds of years. More than ever, technology playsimportant + roles at many levels of a musician''s practice. This paper begins toexplore some + of the ways in which technology developed out of the NIMEcommunity (specifically + hyperinstruments), can inform a musician''s dailypractice, through short and long + term metrics tracking and data visualization.' + address: 'Daejeon, Republic of Korea' + author: Jordan Hochenbaum and Ajay Kapur + bibtex: "@inproceedings{Hochenbaum2013,\n abstract = {Music education is a rich\ + \ subject with many approaches and methodologies thathave developed over hundreds\ + \ of years. More than ever, technology playsimportant roles at many levels of\ + \ a musician's practice. This paper begins toexplore some of the ways in which\ + \ technology developed out of the NIMEcommunity (specifically hyperinstruments),\ + \ can inform a musician's dailypractice, through short and long term metrics tracking\ + \ and data visualization.},\n address = {Daejeon, Republic of Korea},\n author\ + \ = {Jordan Hochenbaum and Ajay Kapur},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178552},\n\ + \ issn = {2220-4806},\n keywords = {Hyperinstruments, Pedagogy, Metrics, Ezither,\ + \ Practice Room},\n month = {May},\n pages = {307--312},\n publisher = {Graduate\ + \ School of Culture Technology, KAIST},\n title = {Toward The Future Practice\ + \ Room: Empowering Musical Pedagogy through Hyperinstruments},\n url = {http://www.nime.org/proceedings/2013/nime2013_116.pdf},\n\ + \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.92a95683 + doi: 10.5281/zenodo.1178552 issn: 2220-4806 - month: June - presentation-video: https://youtu.be/EJ0ZAhOdBTw - title: 'The Timbre Explorer: A Synthesizer Interface for Educational Purposes and - Perceptual Studies' - url: https://nime.pubpub.org/pub/q5oc20wg - year: 2021 + keywords: 'Hyperinstruments, Pedagogy, Metrics, Ezither, Practice Room' + month: May + pages: 307--312 + publisher: 'Graduate School of Culture Technology, KAIST' + title: 'Toward The Future Practice Room: Empowering Musical Pedagogy through Hyperinstruments' + url: http://www.nime.org/proceedings/2013/nime2013_116.pdf + year: 2013 - ENTRYTYPE: inproceedings - ID: NIME21_63 - abstract: 'Music education is an important part of the school curriculum; it teaches - children to be creative and to collaborate with others. Music gives individuals - another medium to communicate through, which is especially important for individuals - with cognitive or physical disabilities. Teachers of children with severe disabilities - have expressed a lack of musical instruments adapted for these children, which - leads to an incomplete music education for this group. This study aims at designing - and evaluating a set of collaborative musical instruments for children with cognitive - and physical disabilities, and the research is done together with the special - education school Rullen in Stockholm, Sweden. The process was divided into three - main parts; a pre-study, building and designing, and finally a user study. Based - on findings from previous research, together with input received from teachers - at Rullen during the pre-study, the resulting design consists of four musical - instruments that are connected to a central hub. The results show that the instruments - functioned as intended and that the design makes musical learning accessible in - a way traditional instruments do not, as well as creates a good basis for a collaborative - musical experience. However, fully evaluating the effect of playing together requires - more time for the children to get comfortable with the instruments and also for - the experiment leaders to test different setups to optimize the conditions for - a good interplay.' - address: 'Shanghai, China' - articleno: 63 - author: 'Svahn, Maria and Hölling, Josefine and Curtsson, Fanny and Nokelainen, - Nina' - bibtex: "@inproceedings{NIME21_63,\n abstract = {Music education is an important\ - \ part of the school curriculum; it teaches children to be creative and to collaborate\ - \ with others. Music gives individuals another medium to communicate through,\ - \ which is especially important for individuals with cognitive or physical disabilities.\ - \ Teachers of children with severe disabilities have expressed a lack of musical\ - \ instruments adapted for these children, which leads to an incomplete music education\ - \ for this group. This study aims at designing and evaluating a set of collaborative\ - \ musical instruments for children with cognitive and physical disabilities, and\ - \ the research is done together with the special education school Rullen in Stockholm,\ - \ Sweden. The process was divided into three main parts; a pre-study, building\ - \ and designing, and finally a user study. Based on findings from previous research,\ - \ together with input received from teachers at Rullen during the pre-study, the\ - \ resulting design consists of four musical instruments that are connected to\ - \ a central hub. The results show that the instruments functioned as intended\ - \ and that the design makes musical learning accessible in a way traditional instruments\ - \ do not, as well as creates a good basis for a collaborative musical experience.\ - \ However, fully evaluating the effect of playing together requires more time\ - \ for the children to get comfortable with the instruments and also for the experiment\ - \ leaders to test different setups to optimize the conditions for a good interplay.},\n\ - \ address = {Shanghai, China},\n articleno = {63},\n author = {Svahn, Maria and\ - \ Hölling, Josefine and Curtsson, Fanny and Nokelainen, Nina},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.21428/92fbeb44.e795c9b5},\n issn = {2220-4806},\n month = {June},\n\ - \ presentation-video = {https://youtu.be/2cD9f493oJM},\n title = {The Rullen Band},\n\ - \ url = {https://nime.pubpub.org/pub/pvd6davm},\n year = {2021}\n}\n" + ID: Michon2013 + abstract: 'Black Box is a site based installation that allows users to create uniquesounds + through physical interaction. The installation consists of a geodesicdome, surround + sound speakers, and a custom instrument suspended from the apexof thedome. Audience + members entering the space are able to create sound by strikingor rubbing the + cube, and are able to control a delay system by moving the cubewithin the space.' + address: 'Daejeon, Republic of Korea' + author: Romain Michon and Myles Borins and David Meisenholder + bibtex: "@inproceedings{Michon2013,\n abstract = {Black Box is a site based installation\ + \ that allows users to create uniquesounds through physical interaction. The installation\ + \ consists of a geodesicdome, surround sound speakers, and a custom instrument\ + \ suspended from the apexof thedome. Audience members entering the space are able\ + \ to create sound by strikingor rubbing the cube, and are able to control a delay\ + \ system by moving the cubewithin the space.},\n address = {Daejeon, Republic\ + \ of Korea},\n author = {Romain Michon and Myles Borins and David Meisenholder},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178612},\n issn = {2220-4806},\n\ + \ keywords = {Satellite CCRMA, Beagleboard, PureData, Faust, Embedded-Linux, Open\ + \ Sound Control},\n month = {May},\n pages = {464--465},\n publisher = {Graduate\ + \ School of Culture Technology, KAIST},\n title = {The Black Box},\n url = {http://www.nime.org/proceedings/2013/nime2013_117.pdf},\n\ + \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.e795c9b5 + doi: 10.5281/zenodo.1178612 issn: 2220-4806 - month: June - presentation-video: https://youtu.be/2cD9f493oJM - title: The Rullen Band - url: https://nime.pubpub.org/pub/pvd6davm - year: 2021 + keywords: 'Satellite CCRMA, Beagleboard, PureData, Faust, Embedded-Linux, Open Sound + Control' + month: May + pages: 464--465 + publisher: 'Graduate School of Culture Technology, KAIST' + title: The Black Box + url: http://www.nime.org/proceedings/2013/nime2013_117.pdf + year: 2013 - ENTRYTYPE: inproceedings - ID: NIME21_64 - abstract: 'Sequencer-based live performances of electronic music require a variety - of interactions. These interactions depend strongly on the affordances and constraints - of the used instrument. Musicians may perceive the available interactions offered - by the used instrument as limiting. For furthering the development of instruments - for live performances and expanding the interaction possibilities, first, a systematic - overview of interactions in current sequencer-based music performance is needed. To - that end, we propose a taxonomy of interactions in sequencer-based music performances - of electronic music. We identify two performance modes sequencing and sound design - and four interaction classes creation, modification, selection, and evaluation. - Furthermore, we discuss the influence of the different interaction classes on - both, musicians as well as the audience and use the proposed taxonomy to analyze - six commercially available hardware devices.' - address: 'Shanghai, China' - articleno: 64 - author: 'Püst, Stefan and Gieseke, Lena and Brennecke, Angela' - bibtex: "@inproceedings{NIME21_64,\n abstract = {Sequencer-based live performances\ - \ of electronic music require a variety of interactions. These interactions depend\ - \ strongly on the affordances and constraints of the used instrument. Musicians\ - \ may perceive the available interactions offered by the used instrument as limiting.\ - \ For furthering the development of instruments for live performances and expanding\ - \ the interaction possibilities, first, a systematic overview of interactions\ - \ in current sequencer-based music performance is needed. To that end, we propose\ - \ a taxonomy of interactions in sequencer-based music performances of electronic\ - \ music. We identify two performance modes sequencing and sound design and four\ - \ interaction classes creation, modification, selection, and evaluation. Furthermore,\ - \ we discuss the influence of the different interaction classes on both, musicians\ - \ as well as the audience and use the proposed taxonomy to analyze six commercially\ - \ available hardware devices.},\n address = {Shanghai, China},\n articleno = {64},\n\ - \ author = {Püst, Stefan and Gieseke, Lena and Brennecke, Angela},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.21428/92fbeb44.0d5ab18d},\n issn = {2220-4806},\n month\ - \ = {June},\n presentation-video = {https://youtu.be/c4MUKWpneg0},\n title = {Interaction\ - \ Taxonomy for Sequencer-Based Music Performances},\n url = {https://nime.pubpub.org/pub/gq2ukghi},\n\ - \ year = {2021}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.21428/92fbeb44.0d5ab18d - issn: 2220-4806 - month: June - presentation-video: https://youtu.be/c4MUKWpneg0 - title: Interaction Taxonomy for Sequencer-Based Music Performances - url: https://nime.pubpub.org/pub/gq2ukghi - year: 2021 - - -- ENTRYTYPE: inproceedings - ID: NIME21_65 - abstract: 'From an epistemological perspective, this work presents a discussion - of how the paradigm of enactive music cognition is related to improvisation in - the context of the skills and needs of 21st-century music learners. Improvisation - in music education is addressed within the perspective of an alternative but an - increasingly influential enactive approach to mind (Varela et al., 1993) followed - by the four theories known as the 4E of cognition - embedded, embodied, enactive - and extended - which naturally have characteristics in common that led them to - be grouped in this way. I discuss the “autopoietic” (self-maintain systems that - auto-reproduce over time based on their own set of internal rules) nature of the - embodied musical mind. To conclude, an overview concerning the enactivist approach - within DMIs design in order to provide a better understanding of the experiences - and benefits of using new technologies in musical learning contexts is outlined.' - address: 'Shanghai, China' - articleno: 65 - author: 'Corintha, Isabela and Cabral, Giordano' - bibtex: "@inproceedings{NIME21_65,\n abstract = {From an epistemological perspective,\ - \ this work presents a discussion of how the paradigm of enactive music cognition\ - \ is related to improvisation in the context of the skills and needs of 21st-century\ - \ music learners. Improvisation in music education is addressed within the perspective\ - \ of an alternative but an increasingly influential enactive approach to mind\ - \ (Varela et al., 1993) followed by the four theories known as the 4E of cognition\ - \ - embedded, embodied, enactive and extended - which naturally have characteristics\ - \ in common that led them to be grouped in this way. I discuss the “autopoietic”\ - \ (self-maintain systems that auto-reproduce over time based on their own set\ - \ of internal rules) nature of the embodied musical mind. To conclude, an overview\ - \ concerning the enactivist approach within DMIs design in order to provide a\ - \ better understanding of the experiences and benefits of using new technologies\ - \ in musical learning contexts is outlined.},\n address = {Shanghai, China},\n\ - \ articleno = {65},\n author = {Corintha, Isabela and Cabral, Giordano},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.21428/92fbeb44.56a01d33},\n issn = {2220-4806},\n month\ - \ = {June},\n presentation-video = {https://youtu.be/dGb5tl_tA58},\n title = {Improvised\ - \ Sound-Making within Musical Apprenticeship and Enactivism: An Intersection between\ - \ the 4E`s Model and DMIs},\n url = {https://nime.pubpub.org/pub/e4lsrn6c},\n\ - \ year = {2021}\n}\n" + ID: Choi2013 + abstract: 'The advent of Web Audio API in 2011 marked a significant advance for + web-basedmusic systems by enabling real-time sound synthesis on web browsers simply + bywriting JavaScript code. While this powerful functionality has arrived there + isa yet unaddressed need for an extension to the API to fully reveal itspotential. + To meet this need, a JavaScript library dubbed WAAX was created tofacilitate music + and audio programming based on Web Audio API bypassingunderlying tasks and augmenting + useful features. In this paper, we describecommon issues in web audio programming, + illustrate how WAAX can speed up thedevelopment, and discuss future developments.' + address: 'Daejeon, Republic of Korea' + author: Hongchan Choi and Jonathan Berger + bibtex: "@inproceedings{Choi2013,\n abstract = {The advent of Web Audio API in 2011\ + \ marked a significant advance for web-basedmusic systems by enabling real-time\ + \ sound synthesis on web browsers simply bywriting JavaScript code. While this\ + \ powerful functionality has arrived there isa yet unaddressed need for an extension\ + \ to the API to fully reveal itspotential. To meet this need, a JavaScript library\ + \ dubbed WAAX was created tofacilitate music and audio programming based on Web\ + \ Audio API bypassingunderlying tasks and augmenting useful features. In this\ + \ paper, we describecommon issues in web audio programming, illustrate how WAAX\ + \ can speed up thedevelopment, and discuss future developments.},\n address =\ + \ {Daejeon, Republic of Korea},\n author = {Hongchan Choi and Jonathan Berger},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178494},\n issn = {2220-4806},\n\ + \ keywords = {Web Audio API, Chrome, JavaScript, web-based music system, collaborative\ + \ music making, audience participation},\n month = {May},\n pages = {499--502},\n\ + \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {WAAX:\ + \ Web Audio {API} eXtension},\n url = {http://www.nime.org/proceedings/2013/nime2013_119.pdf},\n\ + \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.56a01d33 + doi: 10.5281/zenodo.1178494 issn: 2220-4806 - month: June - presentation-video: https://youtu.be/dGb5tl_tA58 - title: 'Improvised Sound-Making within Musical Apprenticeship and Enactivism: An - Intersection between the 4E`s Model and DMIs' - url: https://nime.pubpub.org/pub/e4lsrn6c - year: 2021 + keywords: 'Web Audio API, Chrome, JavaScript, web-based music system, collaborative + music making, audience participation' + month: May + pages: 499--502 + publisher: 'Graduate School of Culture Technology, KAIST' + title: 'WAAX: Web Audio API eXtension' + url: http://www.nime.org/proceedings/2013/nime2013_119.pdf + year: 2013 - ENTRYTYPE: inproceedings - ID: NIME21_66 - abstract: 'In many contexts, creating mappings for gestural interactions can form - part of an artistic process. Creators seeking a mapping that is expressive, novel, - and affords them a sense of authorship may not know how to program it up in a - signal processing patch. Tools like Wekinator [1] and MIMIC [2] allow creators - to use supervised machine learning to learn mappings from example input/output - pairings. However, a creator may know a good mapping when they encounter it yet - start with little sense of what the inputs or outputs should be. We call this - an open-ended mapping process. Addressing this need, we introduce the latent mapping, - which leverages the latent space of an unsupervised machine learning algorithm - such as a Variational Autoencoder trained on a corpus of unlabelled gestural data - from the creator. We illustrate it with Sonified Body, a system mapping full-body - movement to sound which we explore in a residency with three dancers.' - address: 'Shanghai, China' - articleno: 66 - author: 'Murray-Browne, Tim and Tigas, Panagiotis' - bibtex: "@inproceedings{NIME21_66,\n abstract = {In many contexts, creating mappings\ - \ for gestural interactions can form part of an artistic process. Creators seeking\ - \ a mapping that is expressive, novel, and affords them a sense of authorship\ - \ may not know how to program it up in a signal processing patch. Tools like Wekinator\ - \ [1] and MIMIC [2] allow creators to use supervised machine learning to learn\ - \ mappings from example input/output pairings. However, a creator may know a good\ - \ mapping when they encounter it yet start with little sense of what the inputs\ - \ or outputs should be. We call this an open-ended mapping process. Addressing\ - \ this need, we introduce the latent mapping, which leverages the latent space\ - \ of an unsupervised machine learning algorithm such as a Variational Autoencoder\ - \ trained on a corpus of unlabelled gestural data from the creator. We illustrate\ - \ it with Sonified Body, a system mapping full-body movement to sound which we\ - \ explore in a residency with three dancers.},\n address = {Shanghai, China},\n\ - \ articleno = {66},\n author = {Murray-Browne, Tim and Tigas, Panagiotis},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.21428/92fbeb44.9d4bcd4b},\n issn = {2220-4806},\n month\ - \ = {June},\n presentation-video = {https://youtu.be/zBOHWyIGaYc},\n title = {Latent\ - \ Mappings: Generating Open-Ended Expressive Mappings Using Variational Autoencoders},\n\ - \ url = {https://nime.pubpub.org/pub/latent-mappings},\n year = {2021}\n}\n" + ID: Hamano2013 + abstract: 'Electroencephalography (EEG) has been used to generate music for over + 40 years,but the most recent developments in brain--computer interfaces (BCI) + allowgreater control and more flexible expression for using new musical instrumentswith + EEG. We developed a real-time musical performance system using BCItechnology and + sonification techniques to generate imagined musical chords withorganically fluctuating + timbre. We aim to emulate the expressivity oftraditional acoustic instruments. + The BCI part of the system extracts patternsfrom the neural activity while a performer + imagines a score of music. Thesonification part of the system captures non-stationary + changes in the brainwaves and reflects them in the timbre by additive synthesis. + In this paper, wediscuss the conceptual design, system development, and the performance + of thisinstrument.' + address: 'Daejeon, Republic of Korea' + author: Takayuki Hamano and Tomasz Rutkowski and Hiroko Terasawa and Kazuo Okanoya + and Kiyoshi Furukawa + bibtex: "@inproceedings{Hamano2013,\n abstract = {Electroencephalography (EEG) has\ + \ been used to generate music for over 40 years,but the most recent developments\ + \ in brain--computer interfaces (BCI) allowgreater control and more flexible expression\ + \ for using new musical instrumentswith EEG. We developed a real-time musical\ + \ performance system using BCItechnology and sonification techniques to generate\ + \ imagined musical chords withorganically fluctuating timbre. We aim to emulate\ + \ the expressivity oftraditional acoustic instruments. The BCI part of the system\ + \ extracts patternsfrom the neural activity while a performer imagines a score\ + \ of music. Thesonification part of the system captures non-stationary changes\ + \ in the brainwaves and reflects them in the timbre by additive synthesis. In\ + \ this paper, wediscuss the conceptual design, system development, and the performance\ + \ of thisinstrument.},\n address = {Daejeon, Republic of Korea},\n author = {Takayuki\ + \ Hamano and Tomasz Rutkowski and Hiroko Terasawa and Kazuo Okanoya and Kiyoshi\ + \ Furukawa},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178542},\n issn\ + \ = {2220-4806},\n keywords = {Brain-computer interface (BCI), qualitative and\ + \ quantitative information, classification, sonification},\n month = {May},\n\ + \ pages = {49--54},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ + \ title = {Generating an Integrated Musical Expression with a Brain--Computer\ + \ Interface},\n url = {http://www.nime.org/proceedings/2013/nime2013_120.pdf},\n\ + \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.9d4bcd4b + doi: 10.5281/zenodo.1178542 issn: 2220-4806 - month: June - presentation-video: https://youtu.be/zBOHWyIGaYc - title: 'Latent Mappings: Generating Open-Ended Expressive Mappings Using Variational - Autoencoders' - url: https://nime.pubpub.org/pub/latent-mappings - year: 2021 + keywords: 'Brain-computer interface (BCI), qualitative and quantitative information, + classification, sonification' + month: May + pages: 49--54 + publisher: 'Graduate School of Culture Technology, KAIST' + title: Generating an Integrated Musical Expression with a Brain--Computer Interface + url: http://www.nime.org/proceedings/2013/nime2013_120.pdf + year: 2013 - ENTRYTYPE: inproceedings - ID: NIME21_67 - abstract: 'This paper describes Oopsy, which provides a streamlined process for - editing digital signal processing algorithms for precise and sample accurate sound - generation, transformation and modulation, and placing them in the context of - embedded hardware and modular synthesizers. This pipeline gives digital instrument - designers the development flexibility of established software with the deployment - benefits of working on hardware. Specifically, algorithm design takes place in - the flexible context of gen~ in Max, and Oopsy automatically and fluently translates - this and uploads it onto the open-ended Daisy embedded hardware. The paper locates - this work in the context of related software/hardware workflows, and provides - detail of its contributions in design, implementation, and use.' - address: 'Shanghai, China' - articleno: 67 - author: 'Wakefield, Graham' - bibtex: "@inproceedings{NIME21_67,\n abstract = {This paper describes Oopsy, which\ - \ provides a streamlined process for editing digital signal processing algorithms\ - \ for precise and sample accurate sound generation, transformation and modulation,\ - \ and placing them in the context of embedded hardware and modular synthesizers.\ - \ This pipeline gives digital instrument designers the development flexibility\ - \ of established software with the deployment benefits of working on hardware.\ - \ Specifically, algorithm design takes place in the flexible context of gen~ in\ - \ Max, and Oopsy automatically and fluently translates this and uploads it onto\ - \ the open-ended Daisy embedded hardware. The paper locates this work in the context\ - \ of related software/hardware workflows, and provides detail of its contributions\ - \ in design, implementation, and use.},\n address = {Shanghai, China},\n articleno\ - \ = {67},\n author = {Wakefield, Graham},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.e32fde90},\n\ - \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/xJwI9F9Spbo},\n\ - \ title = {A streamlined workflow from Max/gen{\\textasciitilde} to modular hardware},\n\ - \ url = {https://nime.pubpub.org/pub/0u3ruj23},\n year = {2021}\n}\n" + ID: Martin2013 + abstract: 'This paper describes the development of an Apple iPhone based mobile + computersystem for vibraphone and its use in a series of the author''s performanceprojects + in 2011 and 2012.This artistic research was motivated by a desire to develop an + alternative tolaptop computers for the author''s existing percussion and computer + performancepractice. The aims were to develop a light, compact and flexible system + usingmobile devices that would allow computer music to infiltrate solo and ensembleperformance + situations where it is difficult to use a laptop computer.The project began with + a system that brought computer elements to NordligVinter, a suite of percussion + duos, using an iPhone, RjDj, Pure Data and ahome-made pickup system. This process + was documented with video recordings andanalysed using ethnographic methods.The + mobile computer music setup proved to be elegant and convenient inperformance + situations with very little time and space to set up, as well as inperformance + classes and workshops. The simple mobile system encouragedexperimentation and + the platforms used enabled sharing with a wider audience.' + address: 'Daejeon, Republic of Korea' + author: Charles Martin + bibtex: "@inproceedings{Martin2013,\n abstract = {This paper describes the development\ + \ of an Apple iPhone based mobile computersystem for vibraphone and its use in\ + \ a series of the author's performanceprojects in 2011 and 2012.This artistic\ + \ research was motivated by a desire to develop an alternative tolaptop computers\ + \ for the author's existing percussion and computer performancepractice. The aims\ + \ were to develop a light, compact and flexible system usingmobile devices that\ + \ would allow computer music to infiltrate solo and ensembleperformance situations\ + \ where it is difficult to use a laptop computer.The project began with a system\ + \ that brought computer elements to NordligVinter, a suite of percussion duos,\ + \ using an iPhone, RjDj, Pure Data and ahome-made pickup system. This process\ + \ was documented with video recordings andanalysed using ethnographic methods.The\ + \ mobile computer music setup proved to be elegant and convenient inperformance\ + \ situations with very little time and space to set up, as well as inperformance\ + \ classes and workshops. The simple mobile system encouragedexperimentation and\ + \ the platforms used enabled sharing with a wider audience.},\n address = {Daejeon,\ + \ Republic of Korea},\n author = {Charles Martin},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178602},\n issn = {2220-4806},\n keywords = {percussion,\ + \ mobile computer music, Apple iOS, collaborative performance practice, ethnography,\ + \ artistic research},\n month = {May},\n pages = {377--380},\n publisher = {Graduate\ + \ School of Culture Technology, KAIST},\n title = {Performing with a Mobile Computer\ + \ System for Vibraphone},\n url = {http://www.nime.org/proceedings/2013/nime2013_121.pdf},\n\ + \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.e32fde90 + doi: 10.5281/zenodo.1178602 issn: 2220-4806 - month: June - presentation-video: https://youtu.be/xJwI9F9Spbo - title: A streamlined workflow from Max/gen~ to modular hardware - url: https://nime.pubpub.org/pub/0u3ruj23 - year: 2021 + keywords: 'percussion, mobile computer music, Apple iOS, collaborative performance + practice, ethnography, artistic research' + month: May + pages: 377--380 + publisher: 'Graduate School of Culture Technology, KAIST' + title: Performing with a Mobile Computer System for Vibraphone + url: http://www.nime.org/proceedings/2013/nime2013_121.pdf + year: 2013 - ENTRYTYPE: inproceedings - ID: NIME21_68 - abstract: 'In response to the 2020 pandemic, a new work was composed inspired by - the limitations and challenges of performing over the network. Since synchronization - is one of the big challenges, or perhaps something to be avoided due to network - latency, this work explicitly calls for desynchronization in a controlled way, - using metronomes running at different rates to take performers in and out of approximate - synchronization. A special editor was developed to visualize the music because - conventional editors do not support multiple continuously varying tempi.' - address: 'Shanghai, China' - articleno: 68 - author: 'Dannenberg, Roger B.' - bibtex: "@inproceedings{NIME21_68,\n abstract = {In response to the 2020 pandemic,\ - \ a new work was composed inspired by the limitations and challenges of performing\ - \ over the network. Since synchronization is one of the big challenges, or perhaps\ - \ something to be avoided due to network latency, this work explicitly calls for\ - \ desynchronization in a controlled way, using metronomes running at different\ - \ rates to take performers in and out of approximate synchronization. A special\ - \ editor was developed to visualize the music because conventional editors do\ - \ not support multiple continuously varying tempi.},\n address = {Shanghai, China},\n\ - \ articleno = {68},\n author = {Dannenberg, Roger B.},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.21428/92fbeb44.a41fe2c5},\n issn = {2220-4806},\n month = {June},\n\ - \ presentation-video = {https://youtu.be/MhcZyE2SCck},\n title = {Canons for Conlon:\ - \ Composing and Performing Multiple Tempi on the Web},\n url = {https://nime.pubpub.org/pub/jxo0v8r7},\n\ - \ year = {2021}\n}\n" + ID: McLean2013 + abstract: 'The Human vocal tract is considered for its sonorous qualities incarrying + prosodic information, which implicates vision in theperceptual processes of speech. + These considerations are put in thecontext of previous work in NIME, forming background + for theintroduction of two sound installations; ``Microphone'''', which uses acamera + and computer vision to translate mouth shapes to sounds, and``Microphone II'''', + a work-in-progress, which adds physical modellingsynthesis as a sound source, + and visualisation of mouth movements.' + address: 'Daejeon, Republic of Korea' + author: Alex McLean and EunJoo Shin and Kia Ng + bibtex: "@inproceedings{McLean2013,\n abstract = {The Human vocal tract is considered\ + \ for its sonorous qualities incarrying prosodic information, which implicates\ + \ vision in theperceptual processes of speech. These considerations are put in\ + \ thecontext of previous work in NIME, forming background for theintroduction\ + \ of two sound installations; ``Microphone'', which uses acamera and computer\ + \ vision to translate mouth shapes to sounds, and``Microphone II'', a work-in-progress,\ + \ which adds physical modellingsynthesis as a sound source, and visualisation\ + \ of mouth movements.},\n address = {Daejeon, Republic of Korea},\n author = {Alex\ + \ McLean and EunJoo Shin and Kia Ng},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178608},\n\ + \ issn = {2220-4806},\n keywords = {face tracking, computer vision, installation,\ + \ microphone},\n month = {May},\n pages = {381--384},\n publisher = {Graduate\ + \ School of Culture Technology, KAIST},\n title = {Paralinguistic Microphone},\n\ + \ url = {http://www.nime.org/proceedings/2013/nime2013_122.pdf},\n year = {2013}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.a41fe2c5 + doi: 10.5281/zenodo.1178608 issn: 2220-4806 - month: June - presentation-video: https://youtu.be/MhcZyE2SCck - title: 'Canons for Conlon: Composing and Performing Multiple Tempi on the Web' - url: https://nime.pubpub.org/pub/jxo0v8r7 - year: 2021 + keywords: 'face tracking, computer vision, installation, microphone' + month: May + pages: 381--384 + publisher: 'Graduate School of Culture Technology, KAIST' + title: Paralinguistic Microphone + url: http://www.nime.org/proceedings/2013/nime2013_122.pdf + year: 2013 - ENTRYTYPE: inproceedings - ID: NIME21_69 - abstract: 'This paper describes a subversive compositional approach to machine learning, - focused on the exploration of AI bias and computational aesthetic evaluation. - In Bias, for bass clarinet and Interactive Music System, a computer music system - using two Neural Networks trained to develop “aesthetic bias” interacts with the - musician by evaluating the sound input based on its “subjective” aesthetic judgments. - The composition problematizes the discrepancies between the concepts of error - and accuracy, associated with supervised machine learning, and aesthetic judgments - as inherently subjective and intangible. The methods used in the compositional - process are discussed with respect to the objective of balancing the trade-off - between musical authorship and interpretative freedom in interactive musical works.' - address: 'Shanghai, China' - articleno: 69 - author: 'Gioti, Artemi-Maria' - bibtex: "@inproceedings{NIME21_69,\n abstract = {This paper describes a subversive\ - \ compositional approach to machine learning, focused on the exploration of AI\ - \ bias and computational aesthetic evaluation. In Bias, for bass clarinet and\ - \ Interactive Music System, a computer music system using two Neural Networks\ - \ trained to develop “aesthetic bias” interacts with the musician by evaluating\ - \ the sound input based on its “subjective” aesthetic judgments. The composition\ - \ problematizes the discrepancies between the concepts of error and accuracy,\ - \ associated with supervised machine learning, and aesthetic judgments as inherently\ - \ subjective and intangible. The methods used in the compositional process are\ - \ discussed with respect to the objective of balancing the trade-off between musical\ - \ authorship and interpretative freedom in interactive musical works.},\n address\ - \ = {Shanghai, China},\n articleno = {69},\n author = {Gioti, Artemi-Maria},\n\ + ID: Bisig2013 + abstract: 'This paper presents a proof of concept implementation of an interface + entitledCoral. The interface serves as a physical and haptic extension of a simulatedcomplex + system, which will be employed as an intermediate mechanism for thecreation of + generative music and imagery. The paper discusses the motivationand conceptual + context that underly the implementation, describes its technicalrealisation and + presents some first interaction experiments. The paper focuseson the following + two aspects: the interrelation between the physical andvirtual behaviours and + properties of the interface and simulation, and thecapability of the interface + to enable an intuitive and tangible exploration ofthis hybrid dynamical system.' + address: 'Daejeon, Republic of Korea' + author: Daniel Bisig and Sébastien Schiesser + bibtex: "@inproceedings{Bisig2013,\n abstract = {This paper presents a proof of\ + \ concept implementation of an interface entitledCoral. The interface serves as\ + \ a physical and haptic extension of a simulatedcomplex system, which will be\ + \ employed as an intermediate mechanism for thecreation of generative music and\ + \ imagery. The paper discusses the motivationand conceptual context that underly\ + \ the implementation, describes its technicalrealisation and presents some first\ + \ interaction experiments. The paper focuseson the following two aspects: the\ + \ interrelation between the physical andvirtual behaviours and properties of the\ + \ interface and simulation, and thecapability of the interface to enable an intuitive\ + \ and tangible exploration ofthis hybrid dynamical system.},\n address = {Daejeon,\ + \ Republic of Korea},\n author = {Daniel Bisig and S{\\'e}bastien Schiesser},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.21428/92fbeb44.de74b046},\n issn = {2220-4806},\n\ - \ month = {June},\n presentation-video = {https://youtu.be/9l8NeGmvpDU},\n title\ - \ = {A Compositional Exploration of Computational Aesthetic Evaluation and AI\ - \ Bias.},\n url = {https://nime.pubpub.org/pub/zpvgmv74},\n year = {2021}\n}\n" + \ Musical Expression},\n doi = {10.5281/zenodo.1178482},\n issn = {2220-4806},\n\ + \ keywords = {haptic interface, swarm simulation, generative art},\n month = {May},\n\ + \ pages = {385--388},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ + \ title = {Coral -- a Physical and Haptic Extension of a Swarm Simulation},\n\ + \ url = {http://www.nime.org/proceedings/2013/nime2013_126.pdf},\n year = {2013}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.de74b046 + doi: 10.5281/zenodo.1178482 issn: 2220-4806 - month: June - presentation-video: https://youtu.be/9l8NeGmvpDU - title: A Compositional Exploration of Computational Aesthetic Evaluation and AI - Bias. - url: https://nime.pubpub.org/pub/zpvgmv74 - year: 2021 + keywords: 'haptic interface, swarm simulation, generative art' + month: May + pages: 385--388 + publisher: 'Graduate School of Culture Technology, KAIST' + title: Coral -- a Physical and Haptic Extension of a Swarm Simulation + url: http://www.nime.org/proceedings/2013/nime2013_126.pdf + year: 2013 - ENTRYTYPE: inproceedings - ID: NIME21_7 - abstract: "Can random digit data be transformed and utilized as a sound installation\ - \ that provides a referential connection between a book and the electromechanical\ - \ computer? What happens when the text of A Million Random Digits with 100,000\ - \ Normal Deviates is ‘vocalized’ by an electro-mechanical object? Using a media\ - \ archaeological research approach, Click::RAND^(#)2, an indeterminate sound sculpture\ - \ utilising relays as sound objects, is an audio-visual reinterpretation and representation\ - \ of an historical relationship between a book of random digits and the electromechanical\ - \ relay. Developed by the first author, Click::RAND^(#)2 is the physical re-presentation\ - \ of random digit data sets as compositional elements to complement the physical\ - \ presence of the work through spatialized sound patterns framed within the context\ - \ of Henri Lefebvre’s rhythmanalysis and experienced as synchronous, syncopated\ - \ or discordant rhythms." - address: 'Shanghai, China' - articleno: 7 - author: 'Dunham, Paul and Zareei, Dr. Mo H. and Carnegie, Prof. Dale and McKinnon, - Dr. Dugal' - bibtex: "@inproceedings{NIME21_7,\n abstract = {Can random digit data be transformed\ - \ and utilized as a sound installation that provides a referential connection\ - \ between a book and the electromechanical computer? What happens when the text\ - \ of A Million Random Digits with 100,000 Normal Deviates is ‘vocalized’ by an\ - \ electro-mechanical object? Using a media archaeological research approach, Click::RAND^(#)2,\ - \ an indeterminate sound sculpture utilising relays as sound objects, is an audio-visual\ - \ reinterpretation and representation of an historical relationship between a\ - \ book of random digits and the electromechanical relay. Developed by the first\ - \ author, Click::RAND^(#)2 is the physical re-presentation of random digit data\ - \ sets as compositional elements to complement the physical presence of the work\ - \ through spatialized sound patterns framed within the context of Henri Lefebvre’s\ - \ rhythmanalysis and experienced as synchronous, syncopated or discordant rhythms.},\n\ - \ address = {Shanghai, China},\n articleno = {7},\n author = {Dunham, Paul and\ - \ Zareei, Dr. Mo H. and Carnegie, Prof. Dale and McKinnon, Dr. Dugal},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.21428/92fbeb44.5cc6d157},\n issn = {2220-4806},\n month\ - \ = {June},\n presentation-video = {https://youtu.be/vJynbs8txuA},\n title = {Click::RAND#2.\ - \ An Indeterminate Sound Sculpture},\n url = {https://nime.pubpub.org/pub/lac4s48h},\n\ - \ year = {2021}\n}\n" + ID: Schacher2013 + abstract: 'This article documents a class that teaches gestural interaction and + juxtaposestraditional instrumental skills with digital musical instrument concepts. + Inorder to show the principles and reflections that informed the choices made + indeveloping this syllabus, fundamental elements of an instrument-bodyrelationship + and the perceptual import of sensori-motor integration areinvestigated. The methods + used to let participants learn in practicalexperimental settings are discussed, + showing a way to conceptualise andexperience the entire workflow from instrumental + sound to electronictransformations by blending gestural interaction with digital + musicalinstrument techniques and traditional instrumental playing skills. Thetechnical + interfaces and software that were deployed are explained, focussingof the interactive + potential offered by each solution. In an attempt tosummarise and evaluate the + impact of this course, a number of insights relatingto this specific pedagogical + situation are put forward. Finally, concreteexamples of interactive situations + that were developed by the participants areshown in order to demonstrate the validity + of this approach.' + address: 'Daejeon, Republic of Korea' + author: Jan C. Schacher + bibtex: "@inproceedings{Schacher2013,\n abstract = {This article documents a class\ + \ that teaches gestural interaction and juxtaposestraditional instrumental skills\ + \ with digital musical instrument concepts. Inorder to show the principles and\ + \ reflections that informed the choices made indeveloping this syllabus, fundamental\ + \ elements of an instrument-bodyrelationship and the perceptual import of sensori-motor\ + \ integration areinvestigated. The methods used to let participants learn in practicalexperimental\ + \ settings are discussed, showing a way to conceptualise andexperience the entire\ + \ workflow from instrumental sound to electronictransformations by blending gestural\ + \ interaction with digital musicalinstrument techniques and traditional instrumental\ + \ playing skills. Thetechnical interfaces and software that were deployed are\ + \ explained, focussingof the interactive potential offered by each solution. In\ + \ an attempt tosummarise and evaluate the impact of this course, a number of insights\ + \ relatingto this specific pedagogical situation are put forward. Finally, concreteexamples\ + \ of interactive situations that were developed by the participants areshown in\ + \ order to demonstrate the validity of this approach.},\n address = {Daejeon,\ + \ Republic of Korea},\n author = {Jan C. Schacher},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178656},\n issn = {2220-4806},\n keywords = {gestural\ + \ interaction, digital musical instruments, pedagogy, mapping, enactive approach},\n\ + \ month = {May},\n pages = {55--60},\n publisher = {Graduate School of Culture\ + \ Technology, KAIST},\n title = {Hybrid Musicianship --- Teaching Gestural Interaction\ + \ with Traditional and Digital Instruments},\n url = {http://www.nime.org/proceedings/2013/nime2013_127.pdf},\n\ + \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.5cc6d157 + doi: 10.5281/zenodo.1178656 issn: 2220-4806 - month: June - presentation-video: https://youtu.be/vJynbs8txuA - title: "Click::RAND#2. An Indeterminate Sound Sculpture" - url: https://nime.pubpub.org/pub/lac4s48h - year: 2021 + keywords: 'gestural interaction, digital musical instruments, pedagogy, mapping, + enactive approach' + month: May + pages: 55--60 + publisher: 'Graduate School of Culture Technology, KAIST' + title: Hybrid Musicianship --- Teaching Gestural Interaction with Traditional and + Digital Instruments + url: http://www.nime.org/proceedings/2013/nime2013_127.pdf + year: 2013 - ENTRYTYPE: inproceedings - ID: NIME21_70 - abstract: 'We present a novel robotic violinist that is designed to play Carnatic - music - a music system popular in the southern part of India. The robot plays - the D string and uses a single finger mechanism inspired by the Chitravina - a - fretless Indian lute. A fingerboard traversal system with a dynamic finger tip - apparatus enables the robot to play gamakas - pitch based embellishments in-between - notes, which are at the core of Carnatic music. A double roller design is used - for bowing which reduces space, produces a tone that resembles the tone of a conventional - violin bow, and facilitates super human playing techniques such as infinite bowing. - The design also enables the user to change the bow hair tightness to help capture - a variety of performing techniques in different musical styles. Objective assessments - and subjective listening tests were conducted to evaluate our design, indicating - that the robot can play gamakas in a realistic manner and thus, can perform Carnatic - music.' - address: 'Shanghai, China' - articleno: 70 - author: 'Sankaranarayanan, Raghavasimhan and Weinberg, Gil' - bibtex: "@inproceedings{NIME21_70,\n abstract = {We present a novel robotic violinist\ - \ that is designed to play Carnatic music - a music system popular in the southern\ - \ part of India. The robot plays the D string and uses a single finger mechanism\ - \ inspired by the Chitravina - a fretless Indian lute. A fingerboard traversal\ - \ system with a dynamic finger tip apparatus enables the robot to play gamakas\ - \ - pitch based embellishments in-between notes, which are at the core of Carnatic\ - \ music. A double roller design is used for bowing which reduces space, produces\ - \ a tone that resembles the tone of a conventional violin bow, and facilitates\ - \ super human playing techniques such as infinite bowing. The design also enables\ - \ the user to change the bow hair tightness to help capture a variety of performing\ - \ techniques in different musical styles. Objective assessments and subjective\ - \ listening tests were conducted to evaluate our design, indicating that the robot\ - \ can play gamakas in a realistic manner and thus, can perform Carnatic music.},\n\ - \ address = {Shanghai, China},\n articleno = {70},\n author = {Sankaranarayanan,\ - \ Raghavasimhan and Weinberg, Gil},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.0ad83109},\n\ - \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/4vNZm2Zewqs},\n\ - \ title = {Design of Hathaani - A Robotic Violinist for Carnatic Music},\n url\ - \ = {https://nime.pubpub.org/pub/225tmviw},\n year = {2021}\n}\n" + ID: Jackie2013 + abstract: 'SoloTouch is a guitar inspired pocket sized controller system that consists + ofa capacitive touch trigger and a lick-based note selector. The touch triggerallows + an intuitive way to play both velocity sensitive notes and vibratoexpressively + using only one finger. The lick-based note selector is an originalconcept that + provides the player an easy way to play expressive melodic linesby combining pre-programmed + ``licks'''' without the need to learn the actualnotes. The two-part controller + is primarily used as a basic MIDI controller forplaying MIDI controlled virtual + instruments, normally played by keyboardcontrollers. The controller is targeted + towards novice musicians, playerswithout prior musical training could play musical + and expressive solos,suitable for improvised jamming along modern popular music.' + address: 'Daejeon, Republic of Korea' + author: Jackie and Yi Tang Chui and Mubarak Marafa and Samson and Ka Fai Young + bibtex: "@inproceedings{Jackie2013,\n abstract = {SoloTouch is a guitar inspired\ + \ pocket sized controller system that consists ofa capacitive touch trigger and\ + \ a lick-based note selector. The touch triggerallows an intuitive way to play\ + \ both velocity sensitive notes and vibratoexpressively using only one finger.\ + \ The lick-based note selector is an originalconcept that provides the player\ + \ an easy way to play expressive melodic linesby combining pre-programmed ``licks''\ + \ without the need to learn the actualnotes. The two-part controller is primarily\ + \ used as a basic MIDI controller forplaying MIDI controlled virtual instruments,\ + \ normally played by keyboardcontrollers. The controller is targeted towards novice\ + \ musicians, playerswithout prior musical training could play musical and expressive\ + \ solos,suitable for improvised jamming along modern popular music.},\n address\ + \ = {Daejeon, Republic of Korea},\n author = {Jackie and Yi Tang Chui and Mubarak\ + \ Marafa and Samson and Ka Fai Young},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178560},\n\ + \ issn = {2220-4806},\n keywords = {Capacitive touch controller, automated note\ + \ selector, virtual instrument MIDI controller, novice musicians.},\n month =\ + \ {May},\n pages = {389--393},\n publisher = {Graduate School of Culture Technology,\ + \ KAIST},\n title = {SoloTouch: A Capacitive Touch Controller with Lick-based\ + \ Note Selector},\n url = {http://www.nime.org/proceedings/2013/nime2013_130.pdf},\n\ + \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.0ad83109 + doi: 10.5281/zenodo.1178560 issn: 2220-4806 - month: June - presentation-video: https://youtu.be/4vNZm2Zewqs - title: Design of Hathaani - A Robotic Violinist for Carnatic Music - url: https://nime.pubpub.org/pub/225tmviw - year: 2021 + keywords: 'Capacitive touch controller, automated note selector, virtual instrument + MIDI controller, novice musicians.' + month: May + pages: 389--393 + publisher: 'Graduate School of Culture Technology, KAIST' + title: 'SoloTouch: A Capacitive Touch Controller with Lick-based Note Selector' + url: http://www.nime.org/proceedings/2013/nime2013_130.pdf + year: 2013 - ENTRYTYPE: inproceedings - ID: NIME21_71 - abstract: 'The current generation of commercial hardware and software for virtual - reality and immersive environments presents possibilities for a wealth of creative - solutions for new musical expression and interaction. This paper explores the - affordances of virtual musical environments with the disabled music-making community - of Drake Music Project Northern Ireland. Recent collaborations have investigated - strategies for Guided Interactions in Virtual Musical Environments (GIVME), a - novel concept the authors introduce here. This paper gives some background on - disabled music-making with digital musical instruments before sharing recent research - projects that facilitate disabled music performance in virtual reality immersive - environments. We expand on the premise of GIVME as a potential guideline for musical - interaction design for disabled musicians in VR, and take an explorative look - at the possibilities and constraints for instrument design for disabled musicians - as virtual worlds integrate ever more closely with the real.' - address: 'Shanghai, China' - articleno: 71 - author: 'Mills, Damian and Schroeder, Franziska and D''Arcy, John' - bibtex: "@inproceedings{NIME21_71,\n abstract = {The current generation of commercial\ - \ hardware and software for virtual reality and immersive environments presents\ - \ possibilities for a wealth of creative solutions for new musical expression\ - \ and interaction. This paper explores the affordances of virtual musical environments\ - \ with the disabled music-making community of Drake Music Project Northern Ireland.\ - \ Recent collaborations have investigated strategies for Guided Interactions in\ - \ Virtual Musical Environments (GIVME), a novel concept the authors introduce\ - \ here. This paper gives some background on disabled music-making with digital\ - \ musical instruments before sharing recent research projects that facilitate\ - \ disabled music performance in virtual reality immersive environments. We expand\ - \ on the premise of GIVME as a potential guideline for musical interaction design\ - \ for disabled musicians in VR, and take an explorative look at the possibilities\ - \ and constraints for instrument design for disabled musicians as virtual worlds\ - \ integrate ever more closely with the real.},\n address = {Shanghai, China},\n\ - \ articleno = {71},\n author = {Mills, Damian and Schroeder, Franziska and D'Arcy,\ - \ John},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.21428/92fbeb44.5443652c},\n issn = {2220-4806},\n\ - \ month = {June},\n presentation-video = {https://youtu.be/sI0K9sMYc80},\n title\ - \ = {GIVME: Guided Interactions in Virtual Musical Environments: },\n url = {https://nime.pubpub.org/pub/h14o4oit},\n\ - \ year = {2021}\n}\n" + ID: Mital2013 + abstract: 'We present an interactive content-based MIR environment specifically + designedto aid in the exploration of databases of experimental electronic music,particularly + in cases where little or no metadata exist. In recent years,several rare archives + of early experimental electronic music have becomeavailable. The Daphne Oram Collection + contains one such archive, consisting ofapproximately 120 hours of 1/4 inch tape + recordings and representing a perioddating from circa 1957. This collection is + recognized as an importantmusicological resource, representing aspects of the + evolution of electronicmusic practices, including early tape editing methods, + experimental synthesistechniques and composition. However, it is extremely challenging + to derivemeaningful information from this dataset, primarily for three reasons. + First,the dataset is very large. Second, there is limited metadata --- some titles,track + lists, and occasional handwritten notes exist, but where this is true,the reliability + of the annotations are unknown. Finally, and mostsignificantly, as this is a collection + of early experimental electronic music,the sonic characteristics of the material + are often not consistent withtraditional musical information. In other words, + there is no score, no knowninstrumentation, and often no recognizable acoustic + source. We present amethod for the construction of a frequency component dictionary + derived fromthe collection via Probabilistic Latent Component Analysis (PLCA), + anddemonstrate how an interactive 3D visualization of the relationships betweenthe + PLCA-derived dictionary and the archive is facilitating researcher''sunderstanding + of the data.' + address: 'Daejeon, Republic of Korea' + author: Parag Kumar Mital and Mick Grierson + bibtex: "@inproceedings{Mital2013,\n abstract = {We present an interactive content-based\ + \ MIR environment specifically designedto aid in the exploration of databases\ + \ of experimental electronic music,particularly in cases where little or no metadata\ + \ exist. In recent years,several rare archives of early experimental electronic\ + \ music have becomeavailable. The Daphne Oram Collection contains one such archive,\ + \ consisting ofapproximately 120 hours of 1/4 inch tape recordings and representing\ + \ a perioddating from circa 1957. This collection is recognized as an importantmusicological\ + \ resource, representing aspects of the evolution of electronicmusic practices,\ + \ including early tape editing methods, experimental synthesistechniques and composition.\ + \ However, it is extremely challenging to derivemeaningful information from this\ + \ dataset, primarily for three reasons. First,the dataset is very large. Second,\ + \ there is limited metadata --- some titles,track lists, and occasional handwritten\ + \ notes exist, but where this is true,the reliability of the annotations are unknown.\ + \ Finally, and mostsignificantly, as this is a collection of early experimental\ + \ electronic music,the sonic characteristics of the material are often not consistent\ + \ withtraditional musical information. In other words, there is no score, no knowninstrumentation,\ + \ and often no recognizable acoustic source. We present amethod for the construction\ + \ of a frequency component dictionary derived fromthe collection via Probabilistic\ + \ Latent Component Analysis (PLCA), anddemonstrate how an interactive 3D visualization\ + \ of the relationships betweenthe PLCA-derived dictionary and the archive is facilitating\ + \ researcher'sunderstanding of the data.},\n address = {Daejeon, Republic of Korea},\n\ + \ author = {Parag Kumar Mital and Mick Grierson},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1178614},\n issn = {2220-4806},\n keywords = {mir, plca, mfcc,\ + \ 3d browser, daphne oram, content-based information retrieval, interactive visualization},\n\ + \ month = {May},\n pages = {227--232},\n publisher = {Graduate School of Culture\ + \ Technology, KAIST},\n title = {Mining Unlabeled Electronic Music Databases through\ + \ {3D} Interactive Visualization of Latent Component Relationships},\n url = {http://www.nime.org/proceedings/2013/nime2013_132.pdf},\n\ + \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.5443652c + doi: 10.5281/zenodo.1178614 issn: 2220-4806 - month: June - presentation-video: https://youtu.be/sI0K9sMYc80 - title: 'GIVME: Guided Interactions in Virtual Musical Environments: ' - url: https://nime.pubpub.org/pub/h14o4oit - year: 2021 + keywords: 'mir, plca, mfcc, 3d browser, daphne oram, content-based information retrieval, + interactive visualization' + month: May + pages: 227--232 + publisher: 'Graduate School of Culture Technology, KAIST' + title: Mining Unlabeled Electronic Music Databases through 3D Interactive Visualization + of Latent Component Relationships + url: http://www.nime.org/proceedings/2013/nime2013_132.pdf + year: 2013 - ENTRYTYPE: inproceedings - ID: NIME21_72 - abstract: 'In this article, we discuss the creation of The Furies: A LaptOpera, - a new opera for laptop orchestra and live vocal soloists that tells the story - of the Greek tragedy Electra. We outline the principles that guided our instrument - design with the aim of forging direct and visceral connections between the music, - the narrative, and the relationship between characters in ways we can simultaneously - hear, see, and feel. Through detailed case studies of three instruments—The Rope - and BeatPlayer, the tether chorus, and the autonomous speaker orchestra—this paper - offers tools and reflections to guide instrument-building in service of narrative-based - works through a unified multimedia art form.' - address: 'Shanghai, China' - articleno: 72 - author: 'Hege, Anne and Noufi, Camille and Georgieva, Elena and Wang, Ge' - bibtex: "@inproceedings{NIME21_72,\n abstract = {In this article, we discuss the\ - \ creation of The Furies: A LaptOpera, a new opera for laptop orchestra and live\ - \ vocal soloists that tells the story of the Greek tragedy Electra. We outline\ - \ the principles that guided our instrument design with the aim of forging direct\ - \ and visceral connections between the music, the narrative, and the relationship\ - \ between characters in ways we can simultaneously hear, see, and feel. Through\ - \ detailed case studies of three instruments—The Rope and BeatPlayer, the tether\ - \ chorus, and the autonomous speaker orchestra—this paper offers tools and reflections\ - \ to guide instrument-building in service of narrative-based works through a unified\ - \ multimedia art form.},\n address = {Shanghai, China},\n articleno = {72},\n\ - \ author = {Hege, Anne and Noufi, Camille and Georgieva, Elena and Wang, Ge},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.21428/92fbeb44.dde5029a},\n issn = {2220-4806},\n\ - \ month = {June},\n presentation-video = {https://youtu.be/QC_-h4cVVog},\n title\ - \ = {Instrument Design for The Furies: A LaptOpera},\n url = {https://nime.pubpub.org/pub/gx6klqui},\n\ - \ year = {2021}\n}\n" + ID: Hong2013 + abstract: "Laptap is a laptop-based, real-time sound synthesis/control system for\ + \ musicand multimedia performance. The system produces unique sounds by positive\ + \ audiofeedback between the on-board microphone and the speaker of a laptop com-puter.\ + \ Users can make a variety of sounds by touching the laptop computer inseveral\ + \ different ways, and control their timbre with the gestures of the otherhand\ + \ above the mi,\ncrophone and the speaker to manipulate the characteristicsof\ + \ the acoustic feedback path. We introduce the basic con,\ncept of this audiofeedback\ + \ system, describe its features for sound generation and manipulation,and discuss\ + \ the result of an experimental performance. Finally we suggest somerelevant research\ + \ topics that might follow in the future." + address: 'Daejeon, Republic of Korea' + author: Dae Ryong Hong and Woon Seung Yeo + bibtex: "@inproceedings{Hong2013,\n abstract = {Laptap is a laptop-based, real-time\ + \ sound synthesis/control system for musicand multimedia performance. The system\ + \ produces unique sounds by positive audiofeedback between the on-board microphone\ + \ and the speaker of a laptop com-puter. Users can make a variety of sounds by\ + \ touching the laptop computer inseveral different ways, and control their timbre\ + \ with the gestures of the otherhand above the mi,\ncrophone and the speaker to\ + \ manipulate the characteristicsof the acoustic feedback path. We introduce the\ + \ basic con,\ncept of this audiofeedback system, describe its features for sound\ + \ generation and manipulation,and discuss the result of an experimental performance.\ + \ Finally we suggest somerelevant research topics that might follow in the future.},\n\ + \ address = {Daejeon, Republic of Korea},\n author = {Dae Ryong Hong and Woon\ + \ Seung Yeo},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178554},\n issn\ + \ = {2220-4806},\n keywords = {Laptop music, laptop computer, audio feedback,\ + \ hand gesture, gestural control, musical mapping, audio visualization, musical\ + \ notation},\n month = {May},\n pages = {233--236},\n publisher = {Graduate School\ + \ of Culture Technology, KAIST},\n title = {Laptap: Laptop Computer as a Musical\ + \ Instrument using Audio Feedback},\n url = {http://www.nime.org/proceedings/2013/nime2013_137.pdf},\n\ + \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.dde5029a + doi: 10.5281/zenodo.1178554 issn: 2220-4806 - month: June - presentation-video: https://youtu.be/QC_-h4cVVog - title: 'Instrument Design for The Furies: A LaptOpera' - url: https://nime.pubpub.org/pub/gx6klqui - year: 2021 + keywords: 'Laptop music, laptop computer, audio feedback, hand gesture, gestural + control, musical mapping, audio visualization, musical notation' + month: May + pages: 233--236 + publisher: 'Graduate School of Culture Technology, KAIST' + title: 'Laptap: Laptop Computer as a Musical Instrument using Audio Feedback' + url: http://www.nime.org/proceedings/2013/nime2013_137.pdf + year: 2013 - ENTRYTYPE: inproceedings - ID: NIME21_73 - abstract: 'As technologies and interfaces for the instrumental control of musical - sound get ever better at tracking aspects of human position and motion in space, - a fundamental problem emerges: Unintended or even counter-intentional control - may result when humans themselves become a source of positional noise. A clear - case of what is meant by this, is the “stillness movement” of a body part, occurring - despite the simultaneous explicit intention for that body part to remain still. - In this paper, we present the results of a randomized, controlled experiment investigating - this phenomenon along a vertical axis relative to the human fingertip. The results - include characterizations of both the spatial distribution and frequency distribution - of the stillness movement observed. Also included are results indicating a possible - role for constant forces and viscosities in reducing stillness movement amplitude, - thereby potentially enabling the implementation of more positional control of - musical sound within the same available spatial range. Importantly, the above - is summarized in a form that is directly interpretable for anyone designing technologies, - interactions, or performances that involve fingertip control of musical sound. - Also, a complete data set of the experimental results is included in the separate - Appendices to this paper, again in a format that is directly interpretable.' - address: 'Shanghai, China' - articleno: 73 - author: 'de Jong, Staas' - bibtex: "@inproceedings{NIME21_73,\n abstract = {As technologies and interfaces\ - \ for the instrumental control of musical sound get ever better at tracking aspects\ - \ of human position and motion in space, a fundamental problem emerges: Unintended\ - \ or even counter-intentional control may result when humans themselves become\ - \ a source of positional noise. A clear case of what is meant by this, is the\ - \ “stillness movement” of a body part, occurring despite the simultaneous explicit\ - \ intention for that body part to remain still. In this paper, we present the\ - \ results of a randomized, controlled experiment investigating this phenomenon\ - \ along a vertical axis relative to the human fingertip. The results include characterizations\ - \ of both the spatial distribution and frequency distribution of the stillness\ - \ movement observed. Also included are results indicating a possible role for\ - \ constant forces and viscosities in reducing stillness movement amplitude, thereby\ - \ potentially enabling the implementation of more positional control of musical\ - \ sound within the same available spatial range. Importantly, the above is summarized\ - \ in a form that is directly interpretable for anyone designing technologies,\ - \ interactions, or performances that involve fingertip control of musical sound.\ - \ Also, a complete data set of the experimental results is included in the separate\ - \ Appendices to this paper, again in a format that is directly interpretable.},\n\ - \ address = {Shanghai, China},\n articleno = {73},\n author = {de Jong, Staas},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.21428/92fbeb44.9765f11d},\n issn = {2220-4806},\n\ - \ month = {June},\n presentation-video = {https://youtu.be/L_WhJ3N-v8c},\n title\ - \ = {Human noise at the fingertip: Positional (non)control under varying haptic\ - \ × musical conditions},\n url = {https://nime.pubpub.org/pub/bol2r7nr},\n year\ - \ = {2021}\n}\n" + ID: Bragg2013 + abstract: 'This paper presents a graph-theoretic model that supports the design + andanalysis of data flow within digital musical instruments (DMIs). The state + ofthe art in DMI design fails to provide any standards for the scheduling ofcomputations + within a DMI''s data flow. It does not provide a theoreticalframework within which + we can analyze different scheduling protocols and theirimpact on the DMI''s performance. + Indeed, the mapping between the DMI''s sensoryinputs and sonic outputs is classically + treated as a black box. DMI designersand builders are forced to design and schedule + the flow of data through thisblack box on their own. Improper design of the data + flow can produceundesirable results, ranging from overflowing buffers that cause + system crashesto misaligned sensory data that result in strange or disordered + sonic events.In this paper, we attempt to remedy this problem by providing a framework + forthe design and analysis of the DMI data flow. We also provide a schedulingalgorithm + built upon that framework that guarantees desirable properties forthe resulting + DMI.' + address: 'Daejeon, Republic of Korea' + author: Danielle Bragg + bibtex: "@inproceedings{Bragg2013,\n abstract = {This paper presents a graph-theoretic\ + \ model that supports the design andanalysis of data flow within digital musical\ + \ instruments (DMIs). The state ofthe art in DMI design fails to provide any standards\ + \ for the scheduling ofcomputations within a DMI's data flow. It does not provide\ + \ a theoreticalframework within which we can analyze different scheduling protocols\ + \ and theirimpact on the DMI's performance. Indeed, the mapping between the DMI's\ + \ sensoryinputs and sonic outputs is classically treated as a black box. DMI designersand\ + \ builders are forced to design and schedule the flow of data through thisblack\ + \ box on their own. Improper design of the data flow can produceundesirable results,\ + \ ranging from overflowing buffers that cause system crashesto misaligned sensory\ + \ data that result in strange or disordered sonic events.In this paper, we attempt\ + \ to remedy this problem by providing a framework forthe design and analysis of\ + \ the DMI data flow. We also provide a schedulingalgorithm built upon that framework\ + \ that guarantees desirable properties forthe resulting DMI.},\n address = {Daejeon,\ + \ Republic of Korea},\n author = {Danielle Bragg},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178486},\n issn = {2220-4806},\n keywords = {DMI design,\ + \ data flow, mapping function},\n month = {May},\n pages = {237--242},\n publisher\ + \ = {Graduate School of Culture Technology, KAIST},\n title = {Synchronous Data\ + \ Flow Modeling for {DMI}s},\n url = {http://www.nime.org/proceedings/2013/nime2013_139.pdf},\n\ + \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.9765f11d + doi: 10.5281/zenodo.1178486 issn: 2220-4806 - month: June - presentation-video: https://youtu.be/L_WhJ3N-v8c - title: 'Human noise at the fingertip: Positional (non)control under varying haptic - × musical conditions' - url: https://nime.pubpub.org/pub/bol2r7nr - year: 2021 + keywords: 'DMI design, data flow, mapping function' + month: May + pages: 237--242 + publisher: 'Graduate School of Culture Technology, KAIST' + title: Synchronous Data Flow Modeling for DMIs + url: http://www.nime.org/proceedings/2013/nime2013_139.pdf + year: 2013 - ENTRYTYPE: inproceedings - ID: NIME21_74 - abstract: 'In this paper I show how it is possible to create polyrhythmic patterns - with analogue oscillators by setting up a network of variable resistances that - connect these oscillators. The system I present is build with electronic circuits - connected to dc-motors and allows for a very tangible and playful exploration - of the dynamic properties of artificial neural networks. The theoretical underpinnings - of this approach stem from observation and models of synchronization in living - organisms, where synchronization and phase-locking is not only an observable phenomenon - but can also be seen as a marker of the quality of interaction. Realized as a - technical system of analogue oscillators synchronization also appears between - oscillators tuned at different basic rhythm and stable polyrhythmic patterns emerge - as the result of electrical connections.' - address: 'Shanghai, China' - articleno: 74 - author: 'Faubel, Christian' - bibtex: "@inproceedings{NIME21_74,\n abstract = {In this paper I show how it is\ - \ possible to create polyrhythmic patterns with analogue oscillators by setting\ - \ up a network of variable resistances that connect these oscillators. The system\ - \ I present is build with electronic circuits connected to dc-motors and allows\ - \ for a very tangible and playful exploration of the dynamic properties of artificial\ - \ neural networks. The theoretical underpinnings of this approach stem from observation\ - \ and models of synchronization in living organisms, where synchronization and\ - \ phase-locking is not only an observable phenomenon but can also be seen as a\ - \ marker of the quality of interaction. Realized as a technical system of analogue\ - \ oscillators synchronization also appears between oscillators tuned at different\ - \ basic rhythm and stable polyrhythmic patterns emerge as the result of electrical\ - \ connections.},\n address = {Shanghai, China},\n articleno = {74},\n author =\ - \ {Faubel, Christian},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.e66a8542},\n\ - \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/pJlxVJTMRto},\n\ - \ title = {Emergent Polyrhythmic Patterns with a Neuromorph Electronic Network},\n\ - \ url = {https://nime.pubpub.org/pub/g04egsqn},\n year = {2021}\n}\n" + ID: Feugere2013 + abstract: 'Digitartic, a system for bi-manual gestural control of Vowel-Consonant-Vowelperformative + singing synthesis is presented. This system is an extension of areal-time gesture-controlled + vowel singing instrument developed in the Max/MSPlanguage. In addition to pitch, + vowels and voice strength control, Digitarticis designed for gestural control + of articulation parameters for a wide set onconsonant, including various places + and manners of articulation. The phases ofarticulation between two phonemes are + continuously controlled and can bedriven in real time without noticeable delay, + at any stage of the syntheticphoneme production. Thus, as in natural singing, + very accurate rhythmicpatterns are produced and adapted while playing with other + musicians. Theinstrument features two (augmented) pen tablets for controlling + voiceproduction: one is dealing with the glottal source and vowels, the second + oneis dealing with consonant/vowel articulation. The results show very naturalconsonant + and vowel synthesis. Virtual choral practice confirms theeffectiveness of Digitartic + as an expressive musical instrument.' + address: 'Daejeon, Republic of Korea' + author: Lionel Feugère and Christophe d'Alessandro + bibtex: "@inproceedings{Feugere2013,\n abstract = {Digitartic, a system for bi-manual\ + \ gestural control of Vowel-Consonant-Vowelperformative singing synthesis is presented.\ + \ This system is an extension of areal-time gesture-controlled vowel singing instrument\ + \ developed in the Max/MSPlanguage. In addition to pitch, vowels and voice strength\ + \ control, Digitarticis designed for gestural control of articulation parameters\ + \ for a wide set onconsonant, including various places and manners of articulation.\ + \ The phases ofarticulation between two phonemes are continuously controlled and\ + \ can bedriven in real time without noticeable delay, at any stage of the syntheticphoneme\ + \ production. Thus, as in natural singing, very accurate rhythmicpatterns are\ + \ produced and adapted while playing with other musicians. Theinstrument features\ + \ two (augmented) pen tablets for controlling voiceproduction: one is dealing\ + \ with the glottal source and vowels, the second oneis dealing with consonant/vowel\ + \ articulation. The results show very naturalconsonant and vowel synthesis. Virtual\ + \ choral practice confirms theeffectiveness of Digitartic as an expressive musical\ + \ instrument.},\n address = {Daejeon, Republic of Korea},\n author = {Lionel Feug{\\\ + `e}re and Christophe d'Alessandro},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178520},\n\ + \ issn = {2220-4806},\n keywords = {singing voice synthesis, gestural control,\ + \ syllabic synthesis, articulation, formants synthesis},\n month = {May},\n pages\ + \ = {331--336},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ + \ title = {Digitartic: bi-manual gestural control of articulation in performative\ + \ singing synthesis},\n url = {http://www.nime.org/proceedings/2013/nime2013_143.pdf},\n\ + \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.e66a8542 + doi: 10.5281/zenodo.1178520 issn: 2220-4806 - month: June - presentation-video: https://youtu.be/pJlxVJTMRto - title: Emergent Polyrhythmic Patterns with a Neuromorph Electronic Network - url: https://nime.pubpub.org/pub/g04egsqn - year: 2021 + keywords: 'singing voice synthesis, gestural control, syllabic synthesis, articulation, + formants synthesis' + month: May + pages: 331--336 + publisher: 'Graduate School of Culture Technology, KAIST' + title: 'Digitartic: bi-manual gestural control of articulation in performative singing + synthesis' + url: http://www.nime.org/proceedings/2013/nime2013_143.pdf + year: 2013 - ENTRYTYPE: inproceedings - ID: NIME21_75 - abstract: 'Over past editions of the NIME Conference, there has been a growing concern - towards diversity and inclusion. It is relevant for an international community - whose vast majority of its members are in Europe, the USA, and Canada to seek - a richer cultural diversity. To contribute to a decolonial perspective in the - inclusion of underrepresented countries and ethnic/racial groups, we discuss Gambiarra - and Techno-Vernacular Creativity concepts. We believe these concepts may help - structure and stimulate individuals from these underrepresented contexts to perform - research in the NIME field.' - address: 'Shanghai, China' - articleno: 75 - author: 'Tragtenberg, João and Albuquerque, Gabriel and Calegario, Filipe' - bibtex: "@inproceedings{NIME21_75,\n abstract = {Over past editions of the NIME\ - \ Conference, there has been a growing concern towards diversity and inclusion.\ - \ It is relevant for an international community whose vast majority of its members\ - \ are in Europe, the USA, and Canada to seek a richer cultural diversity. To contribute\ - \ to a decolonial perspective in the inclusion of underrepresented countries and\ - \ ethnic/racial groups, we discuss Gambiarra and Techno-Vernacular Creativity\ - \ concepts. We believe these concepts may help structure and stimulate individuals\ - \ from these underrepresented contexts to perform research in the NIME field.},\n\ - \ address = {Shanghai, China},\n articleno = {75},\n author = {Tragtenberg, João\ - \ and Albuquerque, Gabriel and Calegario, Filipe},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.21428/92fbeb44.98354a15},\n issn = {2220-4806},\n month = {June},\n\ - \ presentation-video = {https://youtu.be/iJ8g7vBPFYw},\n title = {Gambiarra and\ - \ Techno-Vernacular Creativity in NIME Research},\n url = {https://nime.pubpub.org/pub/aqm27581},\n\ - \ year = {2021}\n}\n" + ID: Schacher2013a + abstract: 'This article describes the motivations and reflections that led to thedevelopment + of a gestural sensor instrument called the Quarterstaff. In aniterative design + and fabrication process, several versions of this interfacewere build, tested + and evaluated in performances. A detailed explanation of thedesign choices concerning + the shape but also the sensing capabilities of theinstrument illustrates the emphasis + on establishing an `enactive''instrumental relationship. A musical practice for + this type of instrument isshown by discussing the methods used in the exploration + of the gesturalpotential of the interface and the strategies deployed for the + development ofmappings and compositions. Finally, to gain more information about + how thisinstrument compares with similar designs, two dimension-space analyses + are madethat show a clear positioning in relation to instruments that precede + theQuarterstaff.' + address: 'Daejeon, Republic of Korea' + author: Jan C. Schacher + bibtex: "@inproceedings{Schacher2013a,\n abstract = {This article describes the\ + \ motivations and reflections that led to thedevelopment of a gestural sensor\ + \ instrument called the Quarterstaff. In aniterative design and fabrication process,\ + \ several versions of this interfacewere build, tested and evaluated in performances.\ + \ A detailed explanation of thedesign choices concerning the shape but also the\ + \ sensing capabilities of theinstrument illustrates the emphasis on establishing\ + \ an `enactive'instrumental relationship. A musical practice for this type of\ + \ instrument isshown by discussing the methods used in the exploration of the\ + \ gesturalpotential of the interface and the strategies deployed for the development\ + \ ofmappings and compositions. Finally, to gain more information about how thisinstrument\ + \ compares with similar designs, two dimension-space analyses are madethat show\ + \ a clear positioning in relation to instruments that precede theQuarterstaff.},\n\ + \ address = {Daejeon, Republic of Korea},\n author = {Jan C. Schacher},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1178658},\n issn = {2220-4806},\n keywords\ + \ = {Gestural sensor interface, instrument design, body-object relation, composition\ + \ and performance practice, dimension space analysis},\n month = {May},\n pages\ + \ = {535--540},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ + \ title = {The Quarterstaff, a Gestural Sensor Instrument},\n url = {http://www.nime.org/proceedings/2013/nime2013_144.pdf},\n\ + \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.98354a15 + doi: 10.5281/zenodo.1178658 issn: 2220-4806 - month: June - presentation-video: https://youtu.be/iJ8g7vBPFYw - title: Gambiarra and Techno-Vernacular Creativity in NIME Research - url: https://nime.pubpub.org/pub/aqm27581 - year: 2021 + keywords: 'Gestural sensor interface, instrument design, body-object relation, composition + and performance practice, dimension space analysis' + month: May + pages: 535--540 + publisher: 'Graduate School of Culture Technology, KAIST' + title: 'The Quarterstaff, a Gestural Sensor Instrument' + url: http://www.nime.org/proceedings/2013/nime2013_144.pdf + year: 2013 - ENTRYTYPE: inproceedings - ID: NIME21_76 - abstract: 'Digital musical instrument (DMI) design and performance is primarily - practiced by those with backgrounds in music technology and human-computer interaction. - Research on these topics is rarely led by performers, much less by those without - backgrounds in technology. In this study, we explore DMI design and performance - from the perspective of a singular community of classically-trained percussionists. - We use a practiced-based methodology informed by our skillset as percussionists - to study how instrumental skills and sensibilities can be incorporated into the - personalization of, and performance with, DMIs. We introduced a simple and adaptable - digital musical instrument, built using the Arduino Uno, that individuals (percussionists) - could personalize and extend in order to improvise, compose and create music (études). - Our analysis maps parallel percussion practices emerging from the resultant DMI - compositions and performances by examining the functionality of each Arduino instrument - through the lens of material-oriented and communication-oriented approaches to - interactivity.' - address: 'Shanghai, China' - articleno: 76 - author: 'Roth, Timothy and Huang, Aiyun and Cunningham, Tyler' - bibtex: "@inproceedings{NIME21_76,\n abstract = {Digital musical instrument (DMI)\ - \ design and performance is primarily practiced by those with backgrounds in music\ - \ technology and human-computer interaction. Research on these topics is rarely\ - \ led by performers, much less by those without backgrounds in technology. In\ - \ this study, we explore DMI design and performance from the perspective of a\ - \ singular community of classically-trained percussionists. We use a practiced-based\ - \ methodology informed by our skillset as percussionists to study how instrumental\ - \ skills and sensibilities can be incorporated into the personalization of, and\ - \ performance with, DMIs. We introduced a simple and adaptable digital musical\ - \ instrument, built using the Arduino Uno, that individuals (percussionists) could\ - \ personalize and extend in order to improvise, compose and create music (études).\ - \ Our analysis maps parallel percussion practices emerging from the resultant\ - \ DMI compositions and performances by examining the functionality of each Arduino\ - \ instrument through the lens of material-oriented and communication-oriented\ - \ approaches to interactivity.},\n address = {Shanghai, China},\n articleno =\ - \ {76},\n author = {Roth, Timothy and Huang, Aiyun and Cunningham, Tyler},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.21428/92fbeb44.c61b9546},\n issn = {2220-4806},\n month\ - \ = {June},\n presentation-video = {https://youtu.be/kjQDN907FXs},\n title = {On\ - \ Parallel Performance Practices: Some Observations on Personalizing DMIs as Percussionists},\n\ - \ url = {https://nime.pubpub.org/pub/226jlaug},\n year = {2021}\n}\n" + ID: Altavilla2013 + abstract: 'We present a study that explores the affordance evoked by sound andsound-gesture + mappings. In order to do this, we make use of a sensor systemwith minimal form + factor in a user study that minimizes cultural associationThe present study focuses + on understanding how participants describe sounds andgestures produced while playing + designed sonic interaction mappings. Thisapproach seeks to move from object-centric + affordance towards investigatingembodied gestural sonic affordances.' + address: 'Daejeon, Republic of Korea' + author: Alessandro Altavilla and Baptiste Caramiaux and Atau Tanaka + bibtex: "@inproceedings{Altavilla2013,\n abstract = {We present a study that explores\ + \ the affordance evoked by sound andsound-gesture mappings. In order to do this,\ + \ we make use of a sensor systemwith minimal form factor in a user study that\ + \ minimizes cultural associationThe present study focuses on understanding how\ + \ participants describe sounds andgestures produced while playing designed sonic\ + \ interaction mappings. Thisapproach seeks to move from object-centric affordance\ + \ towards investigatingembodied gestural sonic affordances.},\n address = {Daejeon,\ + \ Republic of Korea},\n author = {Alessandro Altavilla and Baptiste Caramiaux\ + \ and Atau Tanaka},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178463},\n\ + \ issn = {2220-4806},\n keywords = {Gestural embodiment of sound, Affordances,\ + \ Mapping},\n month = {May},\n pages = {61--64},\n publisher = {Graduate School\ + \ of Culture Technology, KAIST},\n title = {Towards Gestural Sonic Affordances},\n\ + \ url = {http://www.nime.org/proceedings/2013/nime2013_145.pdf},\n year = {2013}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.c61b9546 + doi: 10.5281/zenodo.1178463 issn: 2220-4806 - month: June - presentation-video: https://youtu.be/kjQDN907FXs - title: 'On Parallel Performance Practices: Some Observations on Personalizing DMIs - as Percussionists' - url: https://nime.pubpub.org/pub/226jlaug - year: 2021 + keywords: 'Gestural embodiment of sound, Affordances, Mapping' + month: May + pages: 61--64 + publisher: 'Graduate School of Culture Technology, KAIST' + title: Towards Gestural Sonic Affordances + url: http://www.nime.org/proceedings/2013/nime2013_145.pdf + year: 2013 - ENTRYTYPE: inproceedings - ID: NIME21_77 - abstract: 'The Seals are a political, feminist, noise, and AI-inspired electronic - sorta-surf rock band composed of the talents of Margaret Schedel, Susie Green, - Sophia Sun, Ria Rajan, and Sofy Yuditskaya, augmented by the S.E.A.L. (Synthetic - Erudition Assist Lattice), as we call the collection of AIs that assist us in - creating usable content with which to mold and shape our music and visuals. Our - concerts begin by invoking one another through internet conferencing software; - during the concert, we play skull augmented theremins while reading GPT2 & GPT3 - (Machine Learning language models) generated dialogue over pre-generated songs. - As a distributed band we designed our performance to take place over video conferencing - systems deliberately incorporating the glitch artifacts that they bring. We use - one of the oldest forms of generative operations, throwing dice, as well as the - latest in ML technology to create our collaborative music over a distance. In - this paper, we illustrate how we leverage the multiple novel interfaces that we - use to create our unique sound.' - address: 'Shanghai, China' - articleno: 77 - author: 'Yuditskaya, Sofy and Sun, Sophia and Schedel, Margaret' - bibtex: "@inproceedings{NIME21_77,\n abstract = {The Seals are a political, feminist,\ - \ noise, and AI-inspired electronic sorta-surf rock band composed of the talents\ - \ of Margaret Schedel, Susie Green, Sophia Sun, Ria Rajan, and Sofy Yuditskaya,\ - \ augmented by the S.E.A.L. (Synthetic Erudition Assist Lattice), as we call the\ - \ collection of AIs that assist us in creating usable content with which to mold\ - \ and shape our music and visuals. Our concerts begin by invoking one another\ - \ through internet conferencing software; during the concert, we play skull augmented\ - \ theremins while reading GPT2 & GPT3 (Machine Learning language models) generated\ - \ dialogue over pre-generated songs. As a distributed band we designed our performance\ - \ to take place over video conferencing systems deliberately incorporating the\ - \ glitch artifacts that they bring. We use one of the oldest forms of generative\ - \ operations, throwing dice, as well as the latest in ML technology to create\ - \ our collaborative music over a distance. In this paper, we illustrate how we\ - \ leverage the multiple novel interfaces that we use to create our unique sound.},\n\ - \ address = {Shanghai, China},\n articleno = {77},\n author = {Yuditskaya, Sofy\ - \ and Sun, Sophia and Schedel, Margaret},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.0282a79c},\n\ - \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/FmTbEUyePXg},\n\ - \ title = {Synthetic Erudition Assist Lattice},\n url = {https://nime.pubpub.org/pub/5oupvoun},\n\ - \ year = {2021}\n}\n" + ID: Cerqueira2013 + abstract: 'SoundCraft is a framework that enables real-time data gathering from + aStarCraft 2 game to external software applications, allowing for musicalinterpretation + of the game''s internal structure and strategies in novel ways.While players battle + each other for victory within the game world, a customStarCraft 2 map collects + and writes out data about players'' decision-making,performance, and current focus + on the map. This data is parsed and transmittedover Open Sound Control (OSC) in + real-time, becoming the source for thesoundscape that accompanies the player''s + game. Using SoundCraft, we havecomposed a musical work for two em StarCraft 2 + players, entitled GG Music. Thispaper details the technical and aesthetic development + of SoundCraft, includingdata collection and sonic mapping. Please see the attached + video file for a performance of GG Music using theSoundCraft framework.' + address: 'Daejeon, Republic of Korea' + author: Mark Cerqueira and Spencer Salazar and Ge Wang + bibtex: "@inproceedings{Cerqueira2013,\n abstract = {SoundCraft is a framework that\ + \ enables real-time data gathering from aStarCraft 2 game to external software\ + \ applications, allowing for musicalinterpretation of the game's internal structure\ + \ and strategies in novel ways.While players battle each other for victory within\ + \ the game world, a customStarCraft 2 map collects and writes out data about players'\ + \ decision-making,performance, and current focus on the map. This data is parsed\ + \ and transmittedover Open Sound Control (OSC) in real-time, becoming the source\ + \ for thesoundscape that accompanies the player's game. Using SoundCraft, we havecomposed\ + \ a musical work for two em StarCraft 2 players, entitled GG Music. Thispaper\ + \ details the technical and aesthetic development of SoundCraft, includingdata\ + \ collection and sonic mapping. Please see the attached video file for a performance\ + \ of GG Music using theSoundCraft framework.},\n address = {Daejeon, Republic\ + \ of Korea},\n author = {Mark Cerqueira and Spencer Salazar and Ge Wang},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1178492},\n issn = {2220-4806},\n keywords\ + \ = {interactive sonification, interactive game music, StarCraft 2},\n month =\ + \ {May},\n pages = {243--247},\n publisher = {Graduate School of Culture Technology,\ + \ KAIST},\n title = {SoundCraft: Transducing StarCraft 2},\n url = {http://www.nime.org/proceedings/2013/nime2013_146.pdf},\n\ + \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.0282a79c + doi: 10.5281/zenodo.1178492 issn: 2220-4806 - month: June - presentation-video: https://youtu.be/FmTbEUyePXg - title: Synthetic Erudition Assist Lattice - url: https://nime.pubpub.org/pub/5oupvoun - year: 2021 + keywords: 'interactive sonification, interactive game music, StarCraft 2' + month: May + pages: 243--247 + publisher: 'Graduate School of Culture Technology, KAIST' + title: 'SoundCraft: Transducing StarCraft 2' + url: http://www.nime.org/proceedings/2013/nime2013_146.pdf + year: 2013 - ENTRYTYPE: inproceedings - ID: NIME21_78 - abstract: This study investigates how accurately users can continuously control - a variety of one degree of freedom sensors commonly used in electronic music interfaces. - Analysis within an information-theoretic model yields channel capacities of maximum - information throughput in bits/sec that can support a unified comparison. The - results may inform the design of digital musical instruments and the design of - systems with similarly demanding control tasks. - address: 'Shanghai, China' - articleno: 78 - author: 'Blandino, Michael and Berdahl, Edgar' - bibtex: "@inproceedings{NIME21_78,\n abstract = {This study investigates how accurately\ - \ users can continuously control a variety of one degree of freedom sensors commonly\ - \ used in electronic music interfaces. Analysis within an information-theoretic\ - \ model yields channel capacities of maximum information throughput in bits/sec\ - \ that can support a unified comparison. The results may inform the design of\ - \ digital musical instruments and the design of systems with similarly demanding\ - \ control tasks.},\n address = {Shanghai, China},\n articleno = {78},\n author\ - \ = {Blandino, Michael and Berdahl, Edgar},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.21428/92fbeb44.c2b5a672},\n issn = {2220-4806},\n month = {June},\n presentation-video\ - \ = {https://youtu.be/-p7mp3LFsQg},\n title = {Using a Pursuit Tracking Task to\ - \ Compare Continuous Control of Various NIME Sensors},\n url = {https://nime.pubpub.org/pub/using-a-pursuit-tracking-task-to-compare-continuous-control-of-various-nime-sensors},\n\ - \ year = {2021}\n}\n" + ID: Fan2013 + abstract: 'We show how body-centric sensing can be integrated in musical interface + toenable more flexible gestural control. We present a barehanded body-centricinteraction + paradigm where users are able to interact in a spontaneous waythroughperforming + gestures. The paradigm employs a wearable camera and see-throughdisplay to enable + flexible interaction in the 3D space. We designed andimplemented a prototype called + Air Violin, a virtual musical instrument usingdepth camera, to demonstrate the + proposed interaction paradigm. We describedthe design and implementation details.' + address: 'Daejeon, Republic of Korea' + author: Xin Fan and Georg Essl + bibtex: "@inproceedings{Fan2013,\n abstract = {We show how body-centric sensing\ + \ can be integrated in musical interface toenable more flexible gestural control.\ + \ We present a barehanded body-centricinteraction paradigm where users are able\ + \ to interact in a spontaneous waythroughperforming gestures. The paradigm employs\ + \ a wearable camera and see-throughdisplay to enable flexible interaction in the\ + \ 3D space. We designed andimplemented a prototype called Air Violin, a virtual\ + \ musical instrument usingdepth camera, to demonstrate the proposed interaction\ + \ paradigm. We describedthe design and implementation details.},\n address = {Daejeon,\ + \ Republic of Korea},\n author = {Xin Fan and Georg Essl},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178512},\n issn = {2220-4806},\n keywords = {NIME, musical\ + \ instrument, interaction, gesture, Kinect},\n month = {May},\n pages = {122--123},\n\ + \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {Air Violin:\ + \ A Body-centric Style Musical Instrument},\n url = {http://www.nime.org/proceedings/2013/nime2013_149.pdf},\n\ + \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.c2b5a672 + doi: 10.5281/zenodo.1178512 issn: 2220-4806 - month: June - presentation-video: https://youtu.be/-p7mp3LFsQg - title: Using a Pursuit Tracking Task to Compare Continuous Control of Various NIME - Sensors - url: https://nime.pubpub.org/pub/using-a-pursuit-tracking-task-to-compare-continuous-control-of-various-nime-sensors - year: 2021 + keywords: 'NIME, musical instrument, interaction, gesture, Kinect' + month: May + pages: 122--123 + publisher: 'Graduate School of Culture Technology, KAIST' + title: 'Air Violin: A Body-centric Style Musical Instrument' + url: http://www.nime.org/proceedings/2013/nime2013_149.pdf + year: 2013 - ENTRYTYPE: inproceedings - ID: NIME21_79 - abstract: 'Contending with ecosystem silencing in the Anthropocene, RhumbLine: Plectrohyla - Exquisita is an installation-scale instrument featuring an ensemble of zoomorphic - musical robots that generate an acoustic soundscape from behind an acousmatic - veil, highlighting the spatial attributes of acoustic sound. Originally conceived - as a physical installation, the global COVID-19 pandemic catalyzed a reconceptualization - of the work that allowed it to function remotely and collaboratively with users - seeding robotic frog callers with improvised rhythmic calls via the internet—transforming - a physical installation into a web-based performable installation-scale instrument. - The performed calls from online visitors evolve using AI as they pass through - the frog collective. After performing a rhythm, audiences listen ambisonically - from behind a virtual veil and attempt to map the formation of the frogs, based - on the spatial information embedded in their calls. After listening, audience - members can reveal the frogs and their formation. By reconceiving rhumb lines—navigational - tools that create paths of constant bearing to navigate space—as sonic tools to - spatially orient listeners, RhumbLine: Plectrohyla Exquisita functions as a new - interface for spatial musical expression (NISME) in both its physical and virtual - instantiations.' - address: 'Shanghai, China' - articleno: 79 - author: 'Schedel, Margaret and Smith, Brian and Cosgrove, Robert and Hwang, Nick' - bibtex: "@inproceedings{NIME21_79,\n abstract = {Contending with ecosystem silencing\ - \ in the Anthropocene, RhumbLine: Plectrohyla Exquisita is an installation-scale\ - \ instrument featuring an ensemble of zoomorphic musical robots that generate\ - \ an acoustic soundscape from behind an acousmatic veil, highlighting the spatial\ - \ attributes of acoustic sound. Originally conceived as a physical installation,\ - \ the global COVID-19 pandemic catalyzed a reconceptualization of the work that\ - \ allowed it to function remotely and collaboratively with users seeding robotic\ - \ frog callers with improvised rhythmic calls via the internet—transforming a\ - \ physical installation into a web-based performable installation-scale instrument.\ - \ The performed calls from online visitors evolve using AI as they pass through\ - \ the frog collective. After performing a rhythm, audiences listen ambisonically\ - \ from behind a virtual veil and attempt to map the formation of the frogs, based\ - \ on the spatial information embedded in their calls. After listening, audience\ - \ members can reveal the frogs and their formation. By reconceiving rhumb lines—navigational\ - \ tools that create paths of constant bearing to navigate space—as sonic tools\ - \ to spatially orient listeners, RhumbLine: Plectrohyla Exquisita functions as\ - \ a new interface for spatial musical expression (NISME) in both its physical\ - \ and virtual instantiations.},\n address = {Shanghai, China},\n articleno = {79},\n\ - \ author = {Schedel, Margaret and Smith, Brian and Cosgrove, Robert and Hwang,\ - \ Nick},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.21428/92fbeb44.9e1312b1},\n issn = {2220-4806},\n\ - \ month = {June},\n presentation-video = {https://youtu.be/twzpxObh9jw},\n title\ - \ = {RhumbLine: Plectrohyla Exquisita — Spatial Listening of Zoomorphic Musical\ - \ Robots},\n url = {https://nime.pubpub.org/pub/f5jtuy87},\n year = {2021}\n}\n" + ID: Wang2013 + abstract: 'By building a wired passive stylus we have added pressure sensitivity + toexisting capacitive touch screen devices for less than ' + address: 'Daejeon, Republic of Korea' + author: Johnty Wang and Nicolas d'Alessandro and Aura Pon and Sidney Fels + bibtex: "@inproceedings{Wang2013,\n abstract = {By building a wired passive stylus\ + \ we have added pressure sensitivity toexisting capacitive touch screen devices\ + \ for less than },\n address = {Daejeon, Republic of Korea},\n author = {Johnty\ + \ Wang and Nicolas d'Alessandro and Aura Pon and Sidney Fels},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178686},\n issn = {2220-4806},\n keywords = {10 in materials,\ + \ about1/10th the cost of existing solutions. The stylus makes use of the built\ + \ inaudio interface that is available on most smartphones and tablets on the markettoday.\ + \ Limitations of the device include the physical constraint of wires, theoccupation\ + \ of one audio input and output channel, and increased latency equalto the period\ + \ of at least one audio buffer duration. The stylus has beendemonstrated in two\ + \ cases thus far: a visual musical score drawing and asinging synthesis application.},\n\ + \ month = {May},\n pages = {input interfaces, touch screens, tablets, pressure-sensitive,\ + \ low-cost},\n publisher = {Graduate School of Culture Technology, KAIST},\n title\ + \ = {PENny: An Extremely Low-Cost Pressure-Sensitive Stylus for Existing Capacitive\ + \ Touchscreens},\n url = {http://www.nime.org/proceedings/2013/nime2013_150.pdf},\n\ + \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.9e1312b1 + doi: 10.5281/zenodo.1178686 issn: 2220-4806 - month: June - presentation-video: https://youtu.be/twzpxObh9jw - title: 'RhumbLine: Plectrohyla Exquisita — Spatial Listening of Zoomorphic Musical - Robots' - url: https://nime.pubpub.org/pub/f5jtuy87 - year: 2021 + keywords: '10 in materials, about1/10th the cost of existing solutions. The stylus + makes use of the built inaudio interface that is available on most smartphones + and tablets on the markettoday. Limitations of the device include the physical + constraint of wires, theoccupation of one audio input and output channel, and + increased latency equalto the period of at least one audio buffer duration. The + stylus has beendemonstrated in two cases thus far: a visual musical score drawing + and asinging synthesis application.' + month: May + pages: 'input interfaces, touch screens, tablets, pressure-sensitive, low-cost' + publisher: 'Graduate School of Culture Technology, KAIST' + title: 'PENny: An Extremely Low-Cost Pressure-Sensitive Stylus for Existing Capacitive + Touchscreens' + url: http://www.nime.org/proceedings/2013/nime2013_150.pdf + year: 2013 - ENTRYTYPE: inproceedings - ID: NIME21_8 - abstract: 'Recent work in NIME has questioned the political and social implications - of work in this field, and has called for direct action on problems in the areas - of diversity, representation and political engagement. Though there is motivation - to address these problems, there is an open question of how to meaningfully do - so. This paper proposes that NIME’s historical record is the best tool for understanding - our own output but this record is incomplete, and makes the case for collective - action to improve how we document our work. I begin by contrasting NIME’s output - with its discourse, and explore the nature of this discourse through NIME history - and examine our inherited epistemological complexity. I assert that, if left unexamined, - this complexity can undermine our community values of diversity and inclusion. - I argue that meaningfully addressing current problems demands critical reflection - on our work, and explore how NIME’s historical record is currently used as a means - of doing so. I then review what NIME''s historical record contains (and what it - does not), and evaluate its fitness for use as a tool of inquiry. Finally I make - the case for collective action to establish better documentation practices, and - suggest features that may be helpful for the process as well as the result.' - address: 'Shanghai, China' - articleno: 8 - author: 'Bin, S. M. Astrid' - bibtex: "@inproceedings{NIME21_8,\n abstract = {Recent work in NIME has questioned\ - \ the political and social implications of work in this field, and has called\ - \ for direct action on problems in the areas of diversity, representation and\ - \ political engagement. Though there is motivation to address these problems,\ - \ there is an open question of how to meaningfully do so. This paper proposes\ - \ that NIME’s historical record is the best tool for understanding our own output\ - \ but this record is incomplete, and makes the case for collective action to improve\ - \ how we document our work. I begin by contrasting NIME’s output with its discourse,\ - \ and explore the nature of this discourse through NIME history and examine our\ - \ inherited epistemological complexity. I assert that, if left unexamined, this\ - \ complexity can undermine our community values of diversity and inclusion. I\ - \ argue that meaningfully addressing current problems demands critical reflection\ - \ on our work, and explore how NIME’s historical record is currently used as a\ - \ means of doing so. I then review what NIME's historical record contains (and\ - \ what it does not), and evaluate its fitness for use as a tool of inquiry. Finally\ - \ I make the case for collective action to establish better documentation practices,\ - \ and suggest features that may be helpful for the process as well as the result.},\n\ - \ address = {Shanghai, China},\n articleno = {8},\n author = {Bin, S. M. Astrid},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.21428/92fbeb44.ac5d43e1},\n issn = {2220-4806},\n\ - \ month = {June},\n presentation-video = {https://youtu.be/omnMRlj7miA},\n title\ - \ = {Discourse is critical: Towards a collaborative NIME history},\n url = {https://nime.pubpub.org/pub/nbrrk8ll},\n\ - \ year = {2021}\n}\n" + ID: Johnston2013 + abstract: 'This paper describes an audio-visual performance system based on real-timefluid + simulation. The aim is to provide a rich environment for works whichblur the boundaries + between dance and instrumental performance -- and sound andvisuals -- while maintaining + transparency for audiences and new performers. The system uses infra-red motion + tracking to allow performers to manipulate areal-time fluid simulation, which + in turn provides control data forcomputer-generated audio and visuals. It also + provides a control andconfiguration system which allows the behaviour of the interactive + system to bechanged over time, enabling the structure within which interactions + take placeto be `composed''.' + address: 'Daejeon, Republic of Korea' + author: Andrew Johnston + bibtex: "@inproceedings{Johnston2013,\n abstract = {This paper describes an audio-visual\ + \ performance system based on real-timefluid simulation. The aim is to provide\ + \ a rich environment for works whichblur the boundaries between dance and instrumental\ + \ performance -- and sound andvisuals -- while maintaining transparency for audiences\ + \ and new performers. The system uses infra-red motion tracking to allow performers\ + \ to manipulate areal-time fluid simulation, which in turn provides control data\ + \ forcomputer-generated audio and visuals. It also provides a control andconfiguration\ + \ system which allows the behaviour of the interactive system to bechanged over\ + \ time, enabling the structure within which interactions take placeto be `composed'.},\n\ + \ address = {Daejeon, Republic of Korea},\n author = {Andrew Johnston},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1178572},\n issn = {2220-4806},\n keywords\ + \ = {performance, dance, fluid simulation, composition},\n month = {May},\n pages\ + \ = {132--135},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ + \ title = {Fluid Simulation as Full Body Audio-Visual Instrument},\n url = {http://www.nime.org/proceedings/2013/nime2013_151.pdf},\n\ + \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.ac5d43e1 + doi: 10.5281/zenodo.1178572 issn: 2220-4806 - month: June - presentation-video: https://youtu.be/omnMRlj7miA - title: 'Discourse is critical: Towards a collaborative NIME history' - url: https://nime.pubpub.org/pub/nbrrk8ll - year: 2021 + keywords: 'performance, dance, fluid simulation, composition' + month: May + pages: 132--135 + publisher: 'Graduate School of Culture Technology, KAIST' + title: Fluid Simulation as Full Body Audio-Visual Instrument + url: http://www.nime.org/proceedings/2013/nime2013_151.pdf + year: 2013 - ENTRYTYPE: inproceedings - ID: NIME21_80 - abstract: 'In this paper we present the recent developments in the AI-terity instrument. - AI-terity is a deformable, non-rigid musical instrument that comprises a particular - artificial intelligence (AI) method for generating audio samples for real-time - audio synthesis. As an improvement, we developed the control interface structure - with additional sensor hardware. In addition, we implemented a new hybrid deep - learning architecture, GANSpaceSynth, in which we applied the GANSpace method - on the GANSynth model. Following the deep learning model improvement, we developed - new autonomous features for the instrument that aim at keeping the musician in - an active and uncertain state of exploration. Through these new features, the - instrument enables more accurate control on GAN latent space. Further, we intend - to investigate the current developments through a musical composition that idiomatically - reflects the new autonomous features of the AI-terity instrument. We argue that - the present technology of AI is suitable for enabling alternative autonomous features - in audio domain for the creative practices of musicians.' - address: 'Shanghai, China' - articleno: 80 - author: 'Tahiroğlu, Koray and Kastemaa, Miranda and Koli, Oskar' - bibtex: "@inproceedings{NIME21_80,\n abstract = {In this paper we present the recent\ - \ developments in the AI-terity instrument. AI-terity is a deformable, non-rigid\ - \ musical instrument that comprises a particular artificial intelligence (AI)\ - \ method for generating audio samples for real-time audio synthesis. As an improvement,\ - \ we developed the control interface structure with additional sensor hardware.\ - \ In addition, we implemented a new hybrid deep learning architecture, GANSpaceSynth,\ - \ in which we applied the GANSpace method on the GANSynth model. Following the\ - \ deep learning model improvement, we developed new autonomous features for the\ - \ instrument that aim at keeping the musician in an active and uncertain state\ - \ of exploration. Through these new features, the instrument enables more accurate\ - \ control on GAN latent space. Further, we intend to investigate the current developments\ - \ through a musical composition that idiomatically reflects the new autonomous\ - \ features of the AI-terity instrument. We argue that the present technology of\ - \ AI is suitable for enabling alternative autonomous features in audio domain\ - \ for the creative practices of musicians.},\n address = {Shanghai, China},\n\ - \ articleno = {80},\n author = {Tahiroğlu, Koray and Kastemaa, Miranda and Koli,\ - \ Oskar},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.21428/92fbeb44.3d0e9e12},\n issn = {2220-4806},\n\ - \ month = {June},\n presentation-video = {https://youtu.be/WVAIPwI-3P8},\n title\ - \ = {AI-terity 2.0: An Autonomous NIME Featuring GANSpaceSynth Deep Learning Model},\n\ - \ url = {https://nime.pubpub.org/pub/9zu49nu5},\n year = {2021}\n}\n" + ID: Fan2013a + abstract: 'The BioSync interface presented in this paper merges the heart-rate basedparadigm + with the brain-wave based paradigm into one mobile unit which isscalable for large + audience real-time applications. The goal of BioSync is toprovide a hybrid interface, + which uses audience biometric responses foraudience participation techniques. + To provide an affordable and scalablesolution, BioSync collects the user''s heart + rate via mobile phone pulseoximetry and the EEG data via Bluetooth communication + with the off-the-shelfMindWave Mobile hardware. Various interfaces have been designed + and implementedin the development of audience participation techniques and systems. + In thedesign and concept of BioSync, we first summarize recent interface research + foraudience participation within the NIME-related context, followed by the outlineof + the BioSync methodology and interface design. We then present a techniquefor dynamic + tempo control based on the audience biometric responses and anearly prototype + of a mobile dual-channel pulse oximetry and EEG bi-directionalinterface for iOS + device (BioSync). Finally, we present discussions and ideasfor future applications, + as well as plans for a series of experiments, whichinvestigate if temporal parameters + of an audience''s physiological metricsencourage crowd synchronization during + a live event or performance, acharacteristic, which we see as having great potential + in the creation offuture live musical and audiovisual performance applications.' + address: 'Daejeon, Republic of Korea' + author: Yuan-Yi Fan and Myles Sciotto + bibtex: "@inproceedings{Fan2013a,\n abstract = {The BioSync interface presented\ + \ in this paper merges the heart-rate basedparadigm with the brain-wave based\ + \ paradigm into one mobile unit which isscalable for large audience real-time\ + \ applications. The goal of BioSync is toprovide a hybrid interface, which uses\ + \ audience biometric responses foraudience participation techniques. To provide\ + \ an affordable and scalablesolution, BioSync collects the user's heart rate via\ + \ mobile phone pulseoximetry and the EEG data via Bluetooth communication with\ + \ the off-the-shelfMindWave Mobile hardware. Various interfaces have been designed\ + \ and implementedin the development of audience participation techniques and systems.\ + \ In thedesign and concept of BioSync, we first summarize recent interface research\ + \ foraudience participation within the NIME-related context, followed by the outlineof\ + \ the BioSync methodology and interface design. We then present a techniquefor\ + \ dynamic tempo control based on the audience biometric responses and anearly\ + \ prototype of a mobile dual-channel pulse oximetry and EEG bi-directionalinterface\ + \ for iOS device (BioSync). Finally, we present discussions and ideasfor future\ + \ applications, as well as plans for a series of experiments, whichinvestigate\ + \ if temporal parameters of an audience's physiological metricsencourage crowd\ + \ synchronization during a live event or performance, acharacteristic, which we\ + \ see as having great potential in the creation offuture live musical and audiovisual\ + \ performance applications.},\n address = {Daejeon, Republic of Korea},\n author\ + \ = {Yuan-Yi Fan and Myles Sciotto},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178514},\n\ + \ issn = {2220-4806},\n keywords = {Mobile, Biometrics, Synchronous Interaction,\ + \ Social, Audience, Experience},\n month = {May},\n pages = {248--251},\n publisher\ + \ = {Graduate School of Culture Technology, KAIST},\n title = {BioSync: An Informed\ + \ Participatory Interface for Audience Dynamics and Audiovisual Content Co-creation\ + \ using Mobile PPG and {EEG}},\n url = {http://www.nime.org/proceedings/2013/nime2013_152.pdf},\n\ + \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.3d0e9e12 + doi: 10.5281/zenodo.1178514 issn: 2220-4806 - month: June - presentation-video: https://youtu.be/WVAIPwI-3P8 - title: 'AI-terity 2.0: An Autonomous NIME Featuring GANSpaceSynth Deep Learning - Model' - url: https://nime.pubpub.org/pub/9zu49nu5 - year: 2021 + keywords: 'Mobile, Biometrics, Synchronous Interaction, Social, Audience, Experience' + month: May + pages: 248--251 + publisher: 'Graduate School of Culture Technology, KAIST' + title: 'BioSync: An Informed Participatory Interface for Audience Dynamics and Audiovisual + Content Co-creation using Mobile PPG and EEG' + url: http://www.nime.org/proceedings/2013/nime2013_152.pdf + year: 2013 - ENTRYTYPE: inproceedings - ID: NIME21_81 - abstract: 'A novel, high-fidelity, shape-sensing technology, BendShape [1], is investigated - as an expressive music controller for sound effects, direct sound manipulation, - and voice synthesis. Various approaches are considered for developing mapping - strategies that create transparent metaphors to facilitate expression for both - the performer and the audience. We explore strategies in the input, intermediate, - and output mapping layers using a two-step approach guided by Perry’s Principles -  [2]. First, we use trial-and-error to establish simple mappings between single - input parameter control and effects to identify promising directions for further - study. Then, we compose a specific piece that supports different uses of the BendShape - mappings in a performance context: this allows us to study a performer trying - different types of expressive techniques, enabling us to analyse the role each - mapping has in facilitating musical expression. We also investigate the effects - these mapping strategies have on performer bandwidth. Our main finding is that - the high fidelity of the novel BendShape sensor facilitates creating interpretable - input representations to control sound representations, and thereby match interpretations - that provide better expressive mappings, such as with vocal shape to vocal sound - and bumpiness control; however, direct mappings of individual, independent sensor - mappings to effects does not provide obvious advantages over simpler controls. - Furthermore, while the BendShape sensor enables rich explorations for sound, the - ability to find expressive interpretable shape-to-sound representations while - respecting the performer’s bandwidth limitations (caused by having many coupled - input degrees of freedom) remains a challenge and an opportunity.' - address: 'Shanghai, China' - articleno: 81 - author: 'Champagne, Alex and Pritchard, Bob and Dietz, Paul and Fels, Sidney' - bibtex: "@inproceedings{NIME21_81,\n abstract = {A novel, high-fidelity, shape-sensing\ - \ technology, BendShape [1], is investigated as an expressive music controller\ - \ for sound effects, direct sound manipulation, and voice synthesis. Various approaches\ - \ are considered for developing mapping strategies that create transparent metaphors\ - \ to facilitate expression for both the performer and the audience. We explore\ - \ strategies in the input, intermediate, and output mapping layers using a two-step\ - \ approach guided by Perry’s Principles  [2]. First, we use trial-and-error to\ - \ establish simple mappings between single input parameter control and effects\ - \ to identify promising directions for further study. Then, we compose a specific\ - \ piece that supports different uses of the BendShape mappings in a performance\ - \ context: this allows us to study a performer trying different types of expressive\ - \ techniques, enabling us to analyse the role each mapping has in facilitating\ - \ musical expression. We also investigate the effects these mapping strategies\ - \ have on performer bandwidth. Our main finding is that the high fidelity of the\ - \ novel BendShape sensor facilitates creating interpretable input representations\ - \ to control sound representations, and thereby match interpretations that provide\ - \ better expressive mappings, such as with vocal shape to vocal sound and bumpiness\ - \ control; however, direct mappings of individual, independent sensor mappings\ - \ to effects does not provide obvious advantages over simpler controls. Furthermore,\ - \ while the BendShape sensor enables rich explorations for sound, the ability\ - \ to find expressive interpretable shape-to-sound representations while respecting\ - \ the performer’s bandwidth limitations (caused by having many coupled input degrees\ - \ of freedom) remains a challenge and an opportunity.},\n address = {Shanghai,\ - \ China},\n articleno = {81},\n author = {Champagne, Alex and Pritchard, Bob and\ - \ Dietz, Paul and Fels, Sidney},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.a72b68dd},\n\ - \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/CnJmH6fX6XA},\n\ - \ title = {Investigation of a Novel Shape Sensor for Musical Expression},\n url\ - \ = {https://nime.pubpub.org/pub/bu2jb1d6},\n year = {2021}\n}\n" + ID: Yang2013 + abstract: 'What is the function of visuals in the design of an augmented keyboardperformance + device with projection? We address this question by thinkingthrough the impact + of choices made in three examples on notions of locus ofattention, visual anticipation + and causal gestalt to articulate a space ofdesign choices. Visuals can emphasize + and deemphasize aspects of performanceand help clarify the role input has to the + performance. We suggest that thisprocess might help thinking through visual feedback + design in NIMEs withrespect to the performer or the audience.' + address: 'Daejeon, Republic of Korea' + author: Qi Yang and Georg Essl + bibtex: "@inproceedings{Yang2013,\n abstract = {What is the function of visuals\ + \ in the design of an augmented keyboardperformance device with projection? We\ + \ address this question by thinkingthrough the impact of choices made in three\ + \ examples on notions of locus ofattention, visual anticipation and causal gestalt\ + \ to articulate a space ofdesign choices. Visuals can emphasize and deemphasize\ + \ aspects of performanceand help clarify the role input has to the performance.\ + \ We suggest that thisprocess might help thinking through visual feedback design\ + \ in NIMEs withrespect to the performer or the audience.},\n address = {Daejeon,\ + \ Republic of Korea},\n author = {Qi Yang and Georg Essl},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178694},\n issn = {2220-4806},\n keywords = {Visual feedback,\ + \ interaction, NIME, musical instrument, interaction, augmented keyboard, gesture,\ + \ Kinect},\n month = {May},\n pages = {252--255},\n publisher = {Graduate School\ + \ of Culture Technology, KAIST},\n title = {Visual Associations in Augmented Keyboard\ + \ Performance},\n url = {http://www.nime.org/proceedings/2013/nime2013_156.pdf},\n\ + \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.a72b68dd + doi: 10.5281/zenodo.1178694 issn: 2220-4806 - month: June - presentation-video: https://youtu.be/CnJmH6fX6XA - title: Investigation of a Novel Shape Sensor for Musical Expression - url: https://nime.pubpub.org/pub/bu2jb1d6 - year: 2021 + keywords: 'Visual feedback, interaction, NIME, musical instrument, interaction, + augmented keyboard, gesture, Kinect' + month: May + pages: 252--255 + publisher: 'Graduate School of Culture Technology, KAIST' + title: Visual Associations in Augmented Keyboard Performance + url: http://www.nime.org/proceedings/2013/nime2013_156.pdf + year: 2013 - ENTRYTYPE: inproceedings - ID: NIME21_82 - abstract: 'Debris is a playful interface for direct manipulation of audio waveforms. - Audio data is represented as a collection of waveform elements, which provide - a low-resolution visualisation of the audio sample. Each element, however, can - be individually examined, re-positioned, or broken down into smaller fragments, - thereby becoming a tangible representation of a moment in the sample. Debris is - built around the idea of looking at a sound not as a linear event to be played - from beginning to end, but as a non-linear collection of moments, timbres, and - sound fragments which can be explored, closely examined and interacted with. This - paper positions the work among conceptually related NIME interfaces, details the - various user interactions and their mappings and ends with a discussion around - the interface’s constraints.' - address: 'Shanghai, China' - articleno: 82 - author: 'Robinson, Frederic Anthony' - bibtex: "@inproceedings{NIME21_82,\n abstract = {Debris is a playful interface for\ - \ direct manipulation of audio waveforms. Audio data is represented as a collection\ - \ of waveform elements, which provide a low-resolution visualisation of the audio\ - \ sample. Each element, however, can be individually examined, re-positioned,\ - \ or broken down into smaller fragments, thereby becoming a tangible representation\ - \ of a moment in the sample. Debris is built around the idea of looking at a sound\ - \ not as a linear event to be played from beginning to end, but as a non-linear\ - \ collection of moments, timbres, and sound fragments which can be explored, closely\ - \ examined and interacted with. This paper positions the work among conceptually\ - \ related NIME interfaces, details the various user interactions and their mappings\ - \ and ends with a discussion around the interface’s constraints.},\n address =\ - \ {Shanghai, China},\n articleno = {82},\n author = {Robinson, Frederic Anthony},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.21428/92fbeb44.02005035},\n issn = {2220-4806},\n\ - \ month = {June},\n presentation-video = {https://youtu.be/H04LgbZqc-c},\n title\ - \ = {Debris: A playful interface for direct manipulation of audio waveforms},\n\ - \ url = {https://nime.pubpub.org/pub/xn761337},\n year = {2021}\n}\n" + ID: Thorogood2013 + abstract: 'Soundscape composition in improvisation and performance contexts involves + manyprocesses that can become overwhelming for a performer, impacting on thequality + of the composition. One important task is evaluating the mood of acomposition + for evoking accurate associations and memories of a soundscape. Anew system that + uses supervised machine learning is presented for theacquisition and realtime + feedback of soundscape affect. A model of sound-scape mood is created by users + entering evaluations of audio environmentsusing a mobile device. The same device + then provides feedback to the user ofthe predicted mood of other audio environments. + We used a features vector ofTotal Loudness and MFCC extracted from an audio signal + to build a multipleregression models. The evaluation of the system shows the tool + is effective inpredicting soundscape affect.' + address: 'Daejeon, Republic of Korea' + author: Miles Thorogood and Philippe Pasquier + bibtex: "@inproceedings{Thorogood2013,\n abstract = {Soundscape composition in improvisation\ + \ and performance contexts involves manyprocesses that can become overwhelming\ + \ for a performer, impacting on thequality of the composition. One important task\ + \ is evaluating the mood of acomposition for evoking accurate associations and\ + \ memories of a soundscape. Anew system that uses supervised machine learning\ + \ is presented for theacquisition and realtime feedback of soundscape affect.\ + \ A model of sound-scape mood is created by users entering evaluations of audio\ + \ environmentsusing a mobile device. The same device then provides feedback to\ + \ the user ofthe predicted mood of other audio environments. We used a features\ + \ vector ofTotal Loudness and MFCC extracted from an audio signal to build a multipleregression\ + \ models. The evaluation of the system shows the tool is effective inpredicting\ + \ soundscape affect.},\n address = {Daejeon, Republic of Korea},\n author = {Miles\ + \ Thorogood and Philippe Pasquier},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178674},\n\ + \ issn = {2220-4806},\n keywords = {soundscape, performance, machine learning,\ + \ audio features, affect grid},\n month = {May},\n pages = {256--260},\n publisher\ + \ = {Graduate School of Culture Technology, KAIST},\n title = {Impress: A Machine\ + \ Learning Approach to Soundscape Affect Classification for a Music Performance\ + \ Environment},\n url = {http://www.nime.org/proceedings/2013/nime2013_157.pdf},\n\ + \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.02005035 + doi: 10.5281/zenodo.1178674 issn: 2220-4806 - month: June - presentation-video: https://youtu.be/H04LgbZqc-c - title: 'Debris: A playful interface for direct manipulation of audio waveforms' - url: https://nime.pubpub.org/pub/xn761337 - year: 2021 + keywords: 'soundscape, performance, machine learning, audio features, affect grid' + month: May + pages: 256--260 + publisher: 'Graduate School of Culture Technology, KAIST' + title: 'Impress: A Machine Learning Approach to Soundscape Affect Classification + for a Music Performance Environment' + url: http://www.nime.org/proceedings/2013/nime2013_157.pdf + year: 2013 - ENTRYTYPE: inproceedings - ID: NIME21_83 - abstract: 'Musical audio synthesis often requires systems-level knowledge and uniquely - analytical approaches to music making, thus a number of machine learning systems - have been proposed to replace traditional parameter spaces with more intuitive - control spaces based on spatial arrangement of sonic qualities. Some prior evaluations - of simplified control spaces have shown increased user efficacy via quantitative - metrics in sound design tasks, and some indicate that simplification may lower - barriers to entry to synthesis. However, the level and nature of the appeal of - simplified interfaces to synthesists merits investigation, particularly in relation - to the type of task, prior expertise, and aesthetic values. Toward addressing - these unknowns, this work investigates user experience in a sample of 20 musicians - with varying degrees of synthesis expertise, and uses a one-week, at-home, multi-task - evaluation of a novel instrument presenting a simplified mode of control alongside - the full parameter space. We find that our participants generally give primacy - to parameter space and seek understanding of parameter-sound relationships, yet - most do report finding some creative utility in timbre-space control for discovery - of sounds, timbral transposition, and expressive modulations of parameters. Although - we find some articulations of particular aesthetic values, relationships to user - experience remain difficult to characterize generally.' - address: 'Shanghai, China' - articleno: 83 - author: 'Gregorio, Jeff and Kim, Youngmoo E.' - bibtex: "@inproceedings{NIME21_83,\n abstract = {Musical audio synthesis often requires\ - \ systems-level knowledge and uniquely analytical approaches to music making,\ - \ thus a number of machine learning systems have been proposed to replace traditional\ - \ parameter spaces with more intuitive control spaces based on spatial arrangement\ - \ of sonic qualities. Some prior evaluations of simplified control spaces have\ - \ shown increased user efficacy via quantitative metrics in sound design tasks,\ - \ and some indicate that simplification may lower barriers to entry to synthesis.\ - \ However, the level and nature of the appeal of simplified interfaces to synthesists\ - \ merits investigation, particularly in relation to the type of task, prior expertise,\ - \ and aesthetic values. Toward addressing these unknowns, this work investigates\ - \ user experience in a sample of 20 musicians with varying degrees of synthesis\ - \ expertise, and uses a one-week, at-home, multi-task evaluation of a novel instrument\ - \ presenting a simplified mode of control alongside the full parameter space.\ - \ We find that our participants generally give primacy to parameter space and\ - \ seek understanding of parameter-sound relationships, yet most do report finding\ - \ some creative utility in timbre-space control for discovery of sounds, timbral\ - \ transposition, and expressive modulations of parameters. Although we find some\ - \ articulations of particular aesthetic values, relationships to user experience\ - \ remain difficult to characterize generally.},\n address = {Shanghai, China},\n\ - \ articleno = {83},\n author = {Gregorio, Jeff and Kim, Youngmoo E.},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.21428/92fbeb44.31419bf9},\n issn = {2220-4806},\n month\ - \ = {June},\n presentation-video = {https://youtu.be/m7IqWceQmuk},\n title = {Evaluation\ - \ of Timbre-Based Control of a Parametric Synthesizer},\n url = {https://nime.pubpub.org/pub/adtb2zl5},\n\ - \ year = {2021}\n}\n" + ID: Park2013a + abstract: 'In this paper, we designed a sound effect device, which was applicable + forspray paint art process. For the applicability research of the device, wedesigned + a prototype which had a form not far off the traditional spray cans,using Arduino + and various sensors. Through the test process of the prototype,we verified the + elements that would be necessary to apply our newly designeddevice to real spray + paint art activities. Thus we checked the possibility ofvarious musical expressions + by expanding the functions of the designed device.' + address: 'Daejeon, Republic of Korea' + author: Gibeom Park and Kyogu Lee + bibtex: "@inproceedings{Park2013a,\n abstract = {In this paper, we designed a sound\ + \ effect device, which was applicable forspray paint art process. For the applicability\ + \ research of the device, wedesigned a prototype which had a form not far off\ + \ the traditional spray cans,using Arduino and various sensors. Through the test\ + \ process of the prototype,we verified the elements that would be necessary to\ + \ apply our newly designeddevice to real spray paint art activities. Thus we checked\ + \ the possibility ofvarious musical expressions by expanding the functions of\ + \ the designed device.},\n address = {Daejeon, Republic of Korea},\n author =\ + \ {Gibeom Park and Kyogu Lee},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178634},\n\ + \ issn = {2220-4806},\n keywords = {Sound effect device, Spray paint art, Arduino,\ + \ Pure Data},\n month = {May},\n pages = {65--68},\n publisher = {Graduate School\ + \ of Culture Technology, KAIST},\n title = {Sound Spray --- can-shaped sound effect\ + \ device},\n url = {http://www.nime.org/proceedings/2013/nime2013_158.pdf},\n\ + \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.31419bf9 + doi: 10.5281/zenodo.1178634 issn: 2220-4806 - month: June - presentation-video: https://youtu.be/m7IqWceQmuk - title: Evaluation of Timbre-Based Control of a Parametric Synthesizer - url: https://nime.pubpub.org/pub/adtb2zl5 - year: 2021 + keywords: 'Sound effect device, Spray paint art, Arduino, Pure Data' + month: May + pages: 65--68 + publisher: 'Graduate School of Culture Technology, KAIST' + title: Sound Spray --- can-shaped sound effect device + url: http://www.nime.org/proceedings/2013/nime2013_158.pdf + year: 2013 - ENTRYTYPE: inproceedings - ID: NIME21_84 - abstract: 'Hybridization No. 1 is a wireless hand-held rotary instrument that allows - the performer to simultaneously interact with physical and virtual spaces. The - instrument emits visible laser lights and invisible ultrasonic waves which scan - the architecture of a physical space. The instrument is also connected to a virtual - 3D model of the same space, which allows the performer to create an immersive - audiovisual composition that blurs the limits between physical and virtual space. - In this paper I describe the instrument, its operation and its integrated multimedia - system.' - address: 'Shanghai, China' - articleno: 84 - author: 'Riaño, Milton' - bibtex: "@inproceedings{NIME21_84,\n abstract = {Hybridization No. 1 is a wireless\ - \ hand-held rotary instrument that allows the performer to simultaneously interact\ - \ with physical and virtual spaces. The instrument emits visible laser lights\ - \ and invisible ultrasonic waves which scan the architecture of a physical space.\ - \ The instrument is also connected to a virtual 3D model of the same space, which\ - \ allows the performer to create an immersive audiovisual composition that blurs\ - \ the limits between physical and virtual space. In this paper I describe the\ - \ instrument, its operation and its integrated multimedia system.},\n address\ - \ = {Shanghai, China},\n articleno = {84},\n author = {Riaño, Milton},\n booktitle\ + ID: Tobise2013 + abstract: 'In guitar performance, fingering is an important factor, and complicated. + In particular, the fingering of the left hand comprises various relationshipsbetween + the finger and the string, such as a finger touching the strings, afinger pressing + the strings, and a finger releasing the strings. The recognition of the precise + fingering of the left hand is applied to aself-learning support system, which + is able to detect strings being muted by afinger, and which transcribes music + automatically, including the details offingering techniques. Therefore, the goal + of our study is the construction of a system forrecognizing the touch of strings + for the guitar. We propose a method for recognizing the touch of strings based + on theconductive characteristics of strings and frets. We develop a prototype + system, and evaluate its effectiveness.Furthermore, we propose an application + which utilizes our system.' + address: 'Daejeon, Republic of Korea' + author: Hayami Tobise and Yoshinari Takegawa and Tsutomu Terada and Masahiko Tsukamoto + bibtex: "@inproceedings{Tobise2013,\n abstract = {In guitar performance, fingering\ + \ is an important factor, and complicated. In particular, the fingering of the\ + \ left hand comprises various relationshipsbetween the finger and the string,\ + \ such as a finger touching the strings, afinger pressing the strings, and a finger\ + \ releasing the strings. The recognition of the precise fingering of the left\ + \ hand is applied to aself-learning support system, which is able to detect strings\ + \ being muted by afinger, and which transcribes music automatically, including\ + \ the details offingering techniques. Therefore, the goal of our study is the\ + \ construction of a system forrecognizing the touch of strings for the guitar.\ + \ We propose a method for recognizing the touch of strings based on theconductive\ + \ characteristics of strings and frets. We develop a prototype system, and evaluate\ + \ its effectiveness.Furthermore, we propose an application which utilizes our\ + \ system.},\n address = {Daejeon, Republic of Korea},\n author = {Hayami Tobise\ + \ and Yoshinari Takegawa and Tsutomu Terada and Masahiko Tsukamoto},\n booktitle\ \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.21428/92fbeb44.d3354ff3},\n issn = {2220-4806},\n month\ - \ = {June},\n title = {Hybridization No. 1: Standing at the Boundary between Physical\ - \ and Virtual Space},\n url = {https://nime.pubpub.org/pub/h1},\n year = {2021}\n\ + \ Expression},\n doi = {10.5281/zenodo.1178676},\n issn = {2220-4806},\n keywords\ + \ = {Guitar, Touched strings, Fingering recognition},\n month = {May},\n pages\ + \ = {261--266},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ + \ title = {Construction of a System for Recognizing Touch of Strings for Guitar},\n\ + \ url = {http://www.nime.org/proceedings/2013/nime2013_159.pdf},\n year = {2013}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.d3354ff3 + doi: 10.5281/zenodo.1178676 issn: 2220-4806 - month: June - title: 'Hybridization No. 1: Standing at the Boundary between Physical and Virtual - Space' - url: https://nime.pubpub.org/pub/h1 - year: 2021 + keywords: 'Guitar, Touched strings, Fingering recognition' + month: May + pages: 261--266 + publisher: 'Graduate School of Culture Technology, KAIST' + title: Construction of a System for Recognizing Touch of Strings for Guitar + url: http://www.nime.org/proceedings/2013/nime2013_159.pdf + year: 2013 - ENTRYTYPE: inproceedings - ID: NIME21_85 - abstract: 'We present the methods and findings of a multi-day performance research - lab that evaluated the efficacy of a novel nerve sensor in the context of a physically - inclusive performance practice. Nerve sensors are a variant of surface electromyography - that are optimized to detect signals from nerve firings rather than skeletal muscle - movement, allowing performers with altered muscle physiology or control to use - the sensors more effectively. Through iterative co-design and musical performance - evaluation, we compared the performative affordances and limitations of the nerve - sensor to other contemporary sensor-based gestural instruments. The nerve sensor - afforded the communication of gestural effort in a manner that other gestural - instruments did not, while offering a smaller palette of reliably classifiable - gestures.' - address: 'Shanghai, China' - articleno: 85 - author: 'May, Lloyd and Larsson, Peter' - bibtex: "@inproceedings{NIME21_85,\n abstract = {We present the methods and findings\ - \ of a multi-day performance research lab that evaluated the efficacy of a novel\ - \ nerve sensor in the context of a physically inclusive performance practice.\ - \ Nerve sensors are a variant of surface electromyography that are optimized to\ - \ detect signals from nerve firings rather than skeletal muscle movement, allowing\ - \ performers with altered muscle physiology or control to use the sensors more\ - \ effectively. Through iterative co-design and musical performance evaluation,\ - \ we compared the performative affordances and limitations of the nerve sensor\ - \ to other contemporary sensor-based gestural instruments. The nerve sensor afforded\ - \ the communication of gestural effort in a manner that other gestural instruments\ - \ did not, while offering a smaller palette of reliably classifiable gestures.},\n\ - \ address = {Shanghai, China},\n articleno = {85},\n author = {May, Lloyd and\ - \ Larsson, Peter},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.82c5626f},\n\ - \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/qsRVcBl2gAo},\n\ - \ title = {Nerve Sensors in Inclusive Musical Performance},\n url = {https://nime.pubpub.org/pub/yxcp36ii},\n\ - \ year = {2021}\n}\n" + ID: Tokunaga2013 + abstract: 'We are exploring the design and implementation of artificial expressions,kinetic + audio-visual representations of real-time physiological data whichreflect emotional + and cognitive state. In this work we demonstrate a prototype,the Enactive Mandala, + which maps real-time EEG signals to modulate ambientmusic and animated visual + music. The design draws inspiration from the visualmusic of the Whitney brothers + as well as traditional meditative practices.Transparent real-time audio-visual + feedback ofbrainwave qualities supports intuitive insight into the connection + betweenthoughts and physiological states. Our method is constructive: by linkingphysiology + with an dynamic a/v display, and embedding the human-machine systemin the social + contexts that arise in real-time play, we hope to seed new, andas yet unknown + forms, of non-verbal communication, or ``artificialexpressions''''.' + address: 'Daejeon, Republic of Korea' + author: Tomohiro Tokunaga and Michael J. Lyons + bibtex: "@inproceedings{Tokunaga2013,\n abstract = {We are exploring the design\ + \ and implementation of artificial expressions,kinetic audio-visual representations\ + \ of real-time physiological data whichreflect emotional and cognitive state.\ + \ In this work we demonstrate a prototype,the Enactive Mandala, which maps real-time\ + \ EEG signals to modulate ambientmusic and animated visual music. The design draws\ + \ inspiration from the visualmusic of the Whitney brothers as well as traditional\ + \ meditative practices.Transparent real-time audio-visual feedback ofbrainwave\ + \ qualities supports intuitive insight into the connection betweenthoughts and\ + \ physiological states. Our method is constructive: by linkingphysiology with\ + \ an dynamic a/v display, and embedding the human-machine systemin the social\ + \ contexts that arise in real-time play, we hope to seed new, andas yet unknown\ + \ forms, of non-verbal communication, or ``artificialexpressions''.},\n address\ + \ = {Daejeon, Republic of Korea},\n author = {Tomohiro Tokunaga and Michael J.\ + \ Lyons},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1178678},\n issn = {2220-4806},\n\ + \ keywords = {Brain-computer Interfaces, BCI, EEG, Sonification, Visualization,\ + \ Artificial Expressions, NIME, Visual Music},\n month = {May},\n pages = {118--119},\n\ + \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {Enactive\ + \ Mandala: Audio-visualizing Brain Waves},\n url = {http://www.nime.org/proceedings/2013/nime2013_16.pdf},\n\ + \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.82c5626f + doi: 10.5281/zenodo.1178678 issn: 2220-4806 - month: June - presentation-video: https://youtu.be/qsRVcBl2gAo - title: Nerve Sensors in Inclusive Musical Performance - url: https://nime.pubpub.org/pub/yxcp36ii - year: 2021 + keywords: 'Brain-computer Interfaces, BCI, EEG, Sonification, Visualization, Artificial + Expressions, NIME, Visual Music' + month: May + pages: 118--119 + publisher: 'Graduate School of Culture Technology, KAIST' + title: 'Enactive Mandala: Audio-visualizing Brain Waves' + url: http://www.nime.org/proceedings/2013/nime2013_16.pdf + year: 2013 - ENTRYTYPE: inproceedings - ID: NIME21_86 - abstract: 'This paper introduces Tune Field, a 3-dimensional tangible interface - that combines and alters previously existing concepts of topographical, field - sensing and capacitive touch interfaces as a method for musical expression and - sound visualization. Users are invited to create experimental sound textures while - modifying the topography of antennas. The interface’s touch antennas are randomly - located on a box promoting exploration and discovery of gesture-to-sound relationships. - This way, the interface opens space to playfully producing sound and triggering - visuals; thus, converting Tune Field into a sensorial experience.' - address: 'Shanghai, China' - articleno: 86 - author: 'Fernandez, Guadalupe Babio and Larson, Kent' - bibtex: "@inproceedings{NIME21_86,\n abstract = {This paper introduces Tune Field,\ - \ a 3-dimensional tangible interface that combines and alters previously existing\ - \ concepts of topographical, field sensing and capacitive touch interfaces as\ - \ a method for musical expression and sound visualization. Users are invited to\ - \ create experimental sound textures while modifying the topography of antennas.\ - \ The interface’s touch antennas are randomly located on a box promoting exploration\ - \ and discovery of gesture-to-sound relationships. This way, the interface opens\ - \ space to playfully producing sound and triggering visuals; thus, converting\ - \ Tune Field into a sensorial experience.},\n address = {Shanghai, China},\n articleno\ - \ = {86},\n author = {Fernandez, Guadalupe Babio and Larson, Kent},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.21428/92fbeb44.2305755b},\n issn = {2220-4806},\n month\ - \ = {June},\n presentation-video = {https://youtu.be/2lB8idO_yDs},\n title = {Tune\ - \ Field},\n url = {https://nime.pubpub.org/pub/eqvxspw3},\n year = {2021}\n}\n" + ID: Dobda2013 + abstract: 'Breaking musical and creative expression into elements, layers, and formulas, + we explore how live listeners create unique sonic experiences from a palette of + these elements and their interactions. Bringing us to present-day creative applications, + a social and historical overview of silent disco is presented. The advantages + of this active listening interface are outlined by the author''s expressions requiring + discrete elements, such as binaural beats, 3D audio effects, and multiple live + music acts in the same space. Events and prototypes as well as hardware and software + proposals for live multi-listener manipulation of multielemental sound and music + are presented. Examples in audio production, sound healing, music composition, + tempo phasing, and spatial audio illustrate the applications.' + address: 'Daejeon, Republic of Korea' + author: Russell Eric Dobda + bibtex: "@inproceedings{Dobda2013,\n abstract = {Breaking musical and creative expression\ + \ into elements, layers, and formulas, we explore how live listeners create unique\ + \ sonic experiences from a palette of these elements and their interactions. Bringing\ + \ us to present-day creative applications, a social and historical overview of\ + \ silent disco is presented. The advantages of this active listening interface\ + \ are outlined by the author's expressions requiring discrete elements, such as\ + \ binaural beats, 3D audio effects, and multiple live music acts in the same space.\ + \ Events and prototypes as well as hardware and software proposals for live multi-listener\ + \ manipulation of multielemental sound and music are presented. Examples in audio\ + \ production, sound healing, music composition, tempo phasing, and spatial audio\ + \ illustrate the applications.},\n address = {Daejeon, Republic of Korea},\n author\ + \ = {Russell Eric Dobda},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178502},\n\ + \ issn = {2220-4806},\n keywords = {wireless headphones, music production, silent\ + \ disco, headphone concert, binaural beats, multi-track audio, active music listening,\ + \ sound healing, mobile clubbing, smart-phone apps},\n month = {May},\n pages\ + \ = {69--72},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ + \ title = {Applied and Proposed Installations with Silent Disco Headphones for\ + \ Multi-Elemental Creative Expression},\n url = {http://www.nime.org/proceedings/2013/nime2013_161.pdf},\n\ + \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.2305755b + doi: 10.5281/zenodo.1178502 issn: 2220-4806 - month: June - presentation-video: https://youtu.be/2lB8idO_yDs - title: Tune Field - url: https://nime.pubpub.org/pub/eqvxspw3 - year: 2021 + keywords: 'wireless headphones, music production, silent disco, headphone concert, + binaural beats, multi-track audio, active music listening, sound healing, mobile + clubbing, smart-phone apps' + month: May + pages: 69--72 + publisher: 'Graduate School of Culture Technology, KAIST' + title: Applied and Proposed Installations with Silent Disco Headphones for Multi-Elemental + Creative Expression + url: http://www.nime.org/proceedings/2013/nime2013_161.pdf + year: 2013 - ENTRYTYPE: inproceedings - ID: NIME21_87 - abstract: 'The basic role of DJs is creating a seamless sequence of music tracks. - In order to make the DJ mix a single continuous audio stream, DJs control various - audio effects on a DJ mixer system particularly in the transition region between - one track and the next track and modify the audio signals in terms of volume, - timbre, tempo, and other musical elements. There have been research efforts to - imitate the DJ mixing techniques but they are mainly rule-based approaches based - on domain knowledge. In this paper, we propose a method to analyze the DJ mixer - control from real-world DJ mixes toward a data-driven approach to imitate the - DJ performance. Specifically, we estimate the mixing gain trajectories between - the two tracks using sub-band analysis with constrained convex optimization. We - evaluate the method by reconstructing the original tracks using the two source - tracks and the gain estimate, and show that the proposed method outperforms the - linear crossfading as a baseline and the single-band analysis. A listening test - from the survey of 14 participants also confirms that our proposed method is superior - among them.' - address: 'Shanghai, China' - articleno: 87 - author: 'Kim, Taejun and Yang, Yi-Hsuan and Nam, Juhan' - bibtex: "@inproceedings{NIME21_87,\n abstract = {The basic role of DJs is creating\ - \ a seamless sequence of music tracks. In order to make the DJ mix a single continuous\ - \ audio stream, DJs control various audio effects on a DJ mixer system particularly\ - \ in the transition region between one track and the next track and modify the\ - \ audio signals in terms of volume, timbre, tempo, and other musical elements.\ - \ There have been research efforts to imitate the DJ mixing techniques but they\ - \ are mainly rule-based approaches based on domain knowledge. In this paper, we\ - \ propose a method to analyze the DJ mixer control from real-world DJ mixes toward\ - \ a data-driven approach to imitate the DJ performance. Specifically, we estimate\ - \ the mixing gain trajectories between the two tracks using sub-band analysis\ - \ with constrained convex optimization. We evaluate the method by reconstructing\ - \ the original tracks using the two source tracks and the gain estimate, and show\ - \ that the proposed method outperforms the linear crossfading as a baseline and\ - \ the single-band analysis. A listening test from the survey of 14 participants\ - \ also confirms that our proposed method is superior among them.},\n address =\ - \ {Shanghai, China},\n articleno = {87},\n author = {Kim, Taejun and Yang, Yi-Hsuan\ - \ and Nam, Juhan},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.4b2fc7b9},\n\ - \ issn = {2220-4806},\n month = {June},\n presentation-video = {https://youtu.be/ju0P-Zq8Bwo},\n\ - \ title = {Reverse-Engineering The Transition Regions of Real-World DJ Mixes using\ - \ Sub-band Analysis with Convex Optimization},\n url = {https://nime.pubpub.org/pub/g7avj1a7},\n\ - \ year = {2021}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.21428/92fbeb44.4b2fc7b9 - issn: 2220-4806 - month: June - presentation-video: https://youtu.be/ju0P-Zq8Bwo - title: Reverse-Engineering The Transition Regions of Real-World DJ Mixes using Sub-band - Analysis with Convex Optimization - url: https://nime.pubpub.org/pub/g7avj1a7 - year: 2021 - - -- ENTRYTYPE: inproceedings - ID: NIME21_88 - abstract: 'This paper reports on a project aimed to break away from the portability - concerns of native DSP code between different platforms, thus freeing the instrument - designer from the burden of porting new Digital Musical Instruments (DMIs) to - different architectures. Bespoke Anywhere is a live modular style software DMI - with an instance of the Audio Anywhere (AA) framework, that enables working with - audio plugins that are compiled once and run anywhere. At the heart of Audio Anywhere - is an audio engine whose Digital Signal Processing (DSP) components are written - in Faust and deployed with Web Assembly (Wasm). We demonstrate Bespoke Anywhere - as a hosting application, for live performance, and music production. We focus - on an instance of AA using Faust for DSP, that is statically complied to portable - Wasm, and Graphical User Interfaces (GUIs) described in JSON, both of which are - loaded dynamically into our modified version of Bespoke.' - address: 'Shanghai, China' - articleno: 88 - author: 'Gaster, Benedict and Challinor, Ryan' - bibtex: "@inproceedings{NIME21_88,\n abstract = {This paper reports on a project\ - \ aimed to break away from the portability concerns of native DSP code between\ - \ different platforms, thus freeing the instrument designer from the burden of\ - \ porting new Digital Musical Instruments (DMIs) to different architectures. Bespoke\ - \ Anywhere is a live modular style software DMI with an instance of the Audio\ - \ Anywhere (AA) framework, that enables working with audio plugins that are compiled\ - \ once and run anywhere. At the heart of Audio Anywhere is an audio engine whose\ - \ Digital Signal Processing (DSP) components are written in Faust and deployed\ - \ with Web Assembly (Wasm). We demonstrate Bespoke Anywhere as a hosting application,\ - \ for live performance, and music production. We focus on an instance of AA using\ - \ Faust for DSP, that is statically complied to portable Wasm, and Graphical User\ - \ Interfaces (GUIs) described in JSON, both of which are loaded dynamically into\ - \ our modified version of Bespoke.},\n address = {Shanghai, China},\n articleno\ - \ = {88},\n author = {Gaster, Benedict and Challinor, Ryan},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.21428/92fbeb44.02c348fb},\n issn = {2220-4806},\n month = {June},\n\ - \ presentation-video = {https://youtu.be/ayJzFVRXPMs},\n title = {Bespoke Anywhere},\n\ - \ url = {https://nime.pubpub.org/pub/8jaqbl7m},\n year = {2021}\n}\n" + ID: Christopher2013 + abstract: 'This paper describes Kontrol, a new hand interface that extends the intuitivecontrol + of electronic music to traditional instrumentalist and dancers. Thegoal of the + authors has been to provide users with a device that is capable ofdetecting the + highly intricate and expressive gestures of the master performer,in order for + that information to be interpreted and used for control ofelectronic music. This + paper discusses related devices, the architecture ofKontrol, it''s potential as + a gesture recognition device, and severalperformance applications.' + address: 'Daejeon, Republic of Korea' + author: Kameron Christopher and Jingyin He and Raakhi Kapur and Ajay Kapur + bibtex: "@inproceedings{Christopher2013,\n abstract = {This paper describes Kontrol,\ + \ a new hand interface that extends the intuitivecontrol of electronic music to\ + \ traditional instrumentalist and dancers. Thegoal of the authors has been to\ + \ provide users with a device that is capable ofdetecting the highly intricate\ + \ and expressive gestures of the master performer,in order for that information\ + \ to be interpreted and used for control ofelectronic music. This paper discusses\ + \ related devices, the architecture ofKontrol, it's potential as a gesture recognition\ + \ device, and severalperformance applications.},\n address = {Daejeon, Republic\ + \ of Korea},\n author = {Kameron Christopher and Jingyin He and Raakhi Kapur and\ + \ Ajay Kapur},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178496},\n issn\ + \ = {2220-4806},\n keywords = {Hand controller, computational ethnomusicology,\ + \ dance interface, conducting interface, Wekinator, wearable sensors},\n month\ + \ = {May},\n pages = {267--270},\n publisher = {Graduate School of Culture Technology,\ + \ KAIST},\n title = {Kontrol: Hand Gesture Recognition for Music and Dance Interaction},\n\ + \ url = {http://www.nime.org/proceedings/2013/nime2013_164.pdf},\n year = {2013}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.02c348fb + doi: 10.5281/zenodo.1178496 issn: 2220-4806 - month: June - presentation-video: https://youtu.be/ayJzFVRXPMs - title: Bespoke Anywhere - url: https://nime.pubpub.org/pub/8jaqbl7m - year: 2021 + keywords: 'Hand controller, computational ethnomusicology, dance interface, conducting + interface, Wekinator, wearable sensors' + month: May + pages: 267--270 + publisher: 'Graduate School of Culture Technology, KAIST' + title: 'Kontrol: Hand Gesture Recognition for Music and Dance Interaction' + url: http://www.nime.org/proceedings/2013/nime2013_164.pdf + year: 2013 - ENTRYTYPE: inproceedings - ID: NIME21_9 - abstract: 'Learning advanced skills on a musical instrument takes a range of physical - and cognitive efforts. For instance, practicing polyrhythm is a complex task that - requires the development of both musical and physical skills. This paper explores - the use of automation in the context of learning advanced skills on the guitar. - Our robotic guitar is capable of physically plucking on the strings along with - a musician, providing both haptic and audio guidance to the musician. We hypothesize - that a multimodal and first-person experience of “being able to play” could increase - learning efficacy. We discuss the novel learning application and a user study, - through which we illustrate the implication and potential issues in systems that - provide temporary skills and in-situ multimodal guidance for learning.' - address: 'Shanghai, China' - articleno: 9 - author: 'Leigh, Sang-won and Lee, Jeonghyun (Jonna)' - bibtex: "@inproceedings{NIME21_9,\n abstract = {Learning advanced skills on a musical\ - \ instrument takes a range of physical and cognitive efforts. For instance, practicing\ - \ polyrhythm is a complex task that requires the development of both musical and\ - \ physical skills. This paper explores the use of automation in the context of\ - \ learning advanced skills on the guitar. Our robotic guitar is capable of physically\ - \ plucking on the strings along with a musician, providing both haptic and audio\ - \ guidance to the musician. We hypothesize that a multimodal and first-person\ - \ experience of “being able to play” could increase learning efficacy. We discuss\ - \ the novel learning application and a user study, through which we illustrate\ - \ the implication and potential issues in systems that provide temporary skills\ - \ and in-situ multimodal guidance for learning.},\n address = {Shanghai, China},\n\ - \ articleno = {9},\n author = {Leigh, Sang-won and Lee, Jeonghyun (Jonna)},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.21428/92fbeb44.002be215},\n issn = {2220-4806},\n\ - \ month = {June},\n presentation-video = {https://youtu.be/MeXrN95jajU},\n title\ - \ = {A Study on Learning Advanced Skills on Co-Playable Robotic Instruments},\n\ - \ url = {https://nime.pubpub.org/pub/h5dqsvpm},\n year = {2021}\n}\n" + ID: Han2013a + abstract: 'This paper presents a framework that transforms fingerprint patterns + intoaudio. We describe Digiti Sonus, an interactive installation performingfingerprint + sonification and visualization, including novel techniques forrepresenting user-intended + fingerprint expression as audio parameters. In orderto enable personalized sonification + and broaden timbre of sound, theinstallation employs sound synthesis based on + various visual feature analysissuch as minutiae extraction, area, angle, and push + pressure of fingerprints.The sonification results are discussed and the diverse + timbres of soundretrieved from different fingerprints are compared.' + address: 'Daejeon, Republic of Korea' + author: Yoon Chung Han and Byeong-jun Han and Matthew Wright + bibtex: "@inproceedings{Han2013a,\n abstract = {This paper presents a framework\ + \ that transforms fingerprint patterns intoaudio. We describe Digiti Sonus, an\ + \ interactive installation performingfingerprint sonification and visualization,\ + \ including novel techniques forrepresenting user-intended fingerprint expression\ + \ as audio parameters. In orderto enable personalized sonification and broaden\ + \ timbre of sound, theinstallation employs sound synthesis based on various visual\ + \ feature analysissuch as minutiae extraction, area, angle, and push pressure\ + \ of fingerprints.The sonification results are discussed and the diverse timbres\ + \ of soundretrieved from different fingerprints are compared.},\n address = {Daejeon,\ + \ Republic of Korea},\n author = {Yoon Chung Han and Byeong-jun Han and Matthew\ + \ Wright},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1178548},\n issn = {2220-4806},\n\ + \ keywords = {Fingerprint, Fingerprint sonification, interactive sonification,\ + \ sound synthesis, biometric data},\n month = {May},\n pages = {136--141},\n publisher\ + \ = {Graduate School of Culture Technology, KAIST},\n title = {Digiti Sonus: Advanced\ + \ Interactive Fingerprint Sonification Using Visual Feature Analysis},\n url =\ + \ {http://www.nime.org/proceedings/2013/nime2013_170.pdf},\n year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.002be215 + doi: 10.5281/zenodo.1178548 issn: 2220-4806 - month: June - presentation-video: https://youtu.be/MeXrN95jajU - title: A Study on Learning Advanced Skills on Co-Playable Robotic Instruments - url: https://nime.pubpub.org/pub/h5dqsvpm - year: 2021 + keywords: 'Fingerprint, Fingerprint sonification, interactive sonification, sound + synthesis, biometric data' + month: May + pages: 136--141 + publisher: 'Graduate School of Culture Technology, KAIST' + title: 'Digiti Sonus: Advanced Interactive Fingerprint Sonification Using Visual + Feature Analysis' + url: http://www.nime.org/proceedings/2013/nime2013_170.pdf + year: 2013 - ENTRYTYPE: inproceedings - ID: Nakanishi2013 - abstract: 'In this paper, the authors introduce an interactive device, ``POWDER - BOX''''for use by novices in musical sessions. ``POWDER BOX'''' is equipped withsensor-based - replaceable interfaces, which enable participants to discover andselect their - favorite playing styles of musical instruments during a musicalsession. In addition, - it has a wireless communication function thatsynchronizes musical scale and BPM - between multiple devices. To date, various kinds of ``inventive'''' electronic - musical instruments havebeen created in the field of Computer Music field. The - authors are interestedin formations of musical sessions, aiming for a balance - between simpleinteraction and musical expression. This study focuses on the development - ofperformance playing styles.Musicians occasionally change their playing styles - (e.g., guitar pluckingstyle) during a musical session. Generally, it is difficult - for nonmusicians toachieve this kind of smooth changing depends on levels of their - skillacquisition. However, it is essentially important for enjoying musical sessionswhether - people could acquire these skills. Here, the authors attempted to develop the - device that supports nonmusicians toconquer this point using replaceable interfaces. - The authors expected thatchanging interfaces would bring similar effect as changing - playing style by theskillful player. This research aims to establish an environment - in whichnonmusicians and musicians share their individual musical ideas easily. - Here,the interaction design and configuration of the ``POWDER BOX'''' is presented.' + ID: Perrotin2013 + abstract: 'Touch user interfaces such as touchpad or pen tablet are often used forcontinuous + pitch control in synthesis devices. Usually, pitch is set at thecontact point + on the interface, thus introducing possible pitch inaccuracies atthe note onset. + This paper proposes a new algorithm, based on an adaptiveattraction mapping, for + improving initial pitch accuracy with touch userinterfaces with continuous control. + At each new contact on the interface, thealgorithm adjusts the mapping to produce + the most likely targeted note of thescale in the vicinity of the contact point. + Then, pitch remains continuouslyadjustable as long as the contact is maintained, + allowing for vibrato,portamento and other subtle melodic control. The results + of experimentscomparing the users'' pitch accuracy with and without the help of + the algorithmshow that such a correction enables to play sharply in tune at the + contact withthe interface, regardless the musical background of the player. Therefore, + thedynamic mapping algorithm allows for a clean and accurate attack when playing + touch user interfaces for controlling continuous pitch instruments like voicesynthesizers.' address: 'Daejeon, Republic of Korea' - author: Yoshihito Nakanishi and Seiichiro Matsumura and Chuichi Arakawa - bibtex: "@inproceedings{Nakanishi2013,\n abstract = {In this paper, the authors\ - \ introduce an interactive device, ``POWDER BOX''for use by novices in musical\ - \ sessions. ``POWDER BOX'' is equipped withsensor-based replaceable interfaces,\ - \ which enable participants to discover andselect their favorite playing styles\ - \ of musical instruments during a musicalsession. In addition, it has a wireless\ - \ communication function thatsynchronizes musical scale and BPM between multiple\ - \ devices. To date, various kinds of ``inventive'' electronic musical instruments\ - \ havebeen created in the field of Computer Music field. The authors are interestedin\ - \ formations of musical sessions, aiming for a balance between simpleinteraction\ - \ and musical expression. This study focuses on the development ofperformance\ - \ playing styles.Musicians occasionally change their playing styles (e.g., guitar\ - \ pluckingstyle) during a musical session. Generally, it is difficult for nonmusicians\ - \ toachieve this kind of smooth changing depends on levels of their skillacquisition.\ - \ However, it is essentially important for enjoying musical sessionswhether people\ - \ could acquire these skills. Here, the authors attempted to develop the device\ - \ that supports nonmusicians toconquer this point using replaceable interfaces.\ - \ The authors expected thatchanging interfaces would bring similar effect as changing\ - \ playing style by theskillful player. This research aims to establish an environment\ - \ in whichnonmusicians and musicians share their individual musical ideas easily.\ - \ Here,the interaction design and configuration of the ``POWDER BOX'' is presented.},\n\ - \ address = {Daejeon, Republic of Korea},\n author = {Yoshihito Nakanishi and\ - \ Seiichiro Matsumura and Chuichi Arakawa},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1178620},\n issn = {2220-4806},\n keywords = {Musical instrument,\ - \ synthesizer, replaceable interface, sensors},\n month = {May},\n pages = {373--376},\n\ - \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {{POWDER}\ - \ {BOX}: An Interactive Device with Sensor Based Replaceable Interface For Musical\ - \ Session},\n url = {http://www.nime.org/proceedings/2013/nime2013_101.pdf},\n\ - \ year = {2013}\n}\n" + author: Olivier Perrotin and Christophe d'Alessandro + bibtex: "@inproceedings{Perrotin2013,\n abstract = {Touch user interfaces such as\ + \ touchpad or pen tablet are often used forcontinuous pitch control in synthesis\ + \ devices. Usually, pitch is set at thecontact point on the interface, thus introducing\ + \ possible pitch inaccuracies atthe note onset. This paper proposes a new algorithm,\ + \ based on an adaptiveattraction mapping, for improving initial pitch accuracy\ + \ with touch userinterfaces with continuous control. At each new contact on the\ + \ interface, thealgorithm adjusts the mapping to produce the most likely targeted\ + \ note of thescale in the vicinity of the contact point. Then, pitch remains continuouslyadjustable\ + \ as long as the contact is maintained, allowing for vibrato,portamento and other\ + \ subtle melodic control. The results of experimentscomparing the users' pitch\ + \ accuracy with and without the help of the algorithmshow that such a correction\ + \ enables to play sharply in tune at the contact withthe interface, regardless\ + \ the musical background of the player. Therefore, thedynamic mapping algorithm\ + \ allows for a clean and accurate attack when playing touch user interfaces for\ + \ controlling continuous pitch instruments like voicesynthesizers.},\n address\ + \ = {Daejeon, Republic of Korea},\n author = {Olivier Perrotin and Christophe\ + \ d'Alessandro},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178640},\n\ + \ issn = {2220-4806},\n keywords = {Sound synthesis control, touch user interfaces,\ + \ pen tablet, automatic correction, accuracy, precision},\n month = {May},\n pages\ + \ = {186--189},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ + \ title = {Adaptive mapping for improved pitch accuracy on touch user interfaces},\n\ + \ url = {http://www.nime.org/proceedings/2013/nime2013_178.pdf},\n year = {2013}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178620 + doi: 10.5281/zenodo.1178640 issn: 2220-4806 - keywords: 'Musical instrument, synthesizer, replaceable interface, sensors' + keywords: 'Sound synthesis control, touch user interfaces, pen tablet, automatic + correction, accuracy, precision' month: May - pages: 373--376 + pages: 186--189 publisher: 'Graduate School of Culture Technology, KAIST' - title: 'POWDER BOX: An Interactive Device with Sensor Based Replaceable Interface - For Musical Session' - url: http://www.nime.org/proceedings/2013/nime2013_101.pdf + title: Adaptive mapping for improved pitch accuracy on touch user interfaces + url: http://www.nime.org/proceedings/2013/nime2013_178.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Fohl2013 - abstract: 'This paper presents the design and implementation of agesture control - interface for a wave field synthesis system.The user''s motion is tracked by a - IR-camera-based trackingsystem. The developed connecting software processes thetracker - data to modify the positions of the virtual soundsources of the wave field synthesis - system. Due to the mod-ular design of the software, the triggered actions of the - ges-tures may easily be modified. Three elementary gestureswere designed and implemented: - Select / deselect, circularmovement and radial movement. The guidelines for gesturedesign - and detection are presented, and the user experiencesare discussed.' + ID: Kikukawa2013 + abstract: 'So far, there are few studies of string instruments with bows because + there aremany parameters to acquire skills and it is difficult to measure theseparameters. + Therefore, the aim of this paper is to propose a design of alearning environment + for a novice learner to acquire an accurate fingerposition skill. For achieving + the aim, we developed a learning environmentwhich can diagnose learner''s finger + position and give the learner advice byusing magnetic position sensors. The system + shows three windows; a fingerposition window for visualization of finger position, + a score window fordiagnosing finger position along the score and command prompt + window forshowing states of system and advices. Finally, we evaluated the system + by anexperiment. The experimental group improved accuracy values about fingerpositions + and also improved accuracy of pitches of sounds compared withcontrol group. These + results shows significant differences.' address: 'Daejeon, Republic of Korea' - author: Wolfgang Fohl and Malte Nogalski - bibtex: "@inproceedings{Fohl2013,\n abstract = {This paper presents the design and\ - \ implementation of agesture control interface for a wave field synthesis system.The\ - \ user's motion is tracked by a IR-camera-based trackingsystem. The developed\ - \ connecting software processes thetracker data to modify the positions of the\ - \ virtual soundsources of the wave field synthesis system. Due to the mod-ular\ - \ design of the software, the triggered actions of the ges-tures may easily be\ - \ modified. Three elementary gestureswere designed and implemented: Select / deselect,\ - \ circularmovement and radial movement. The guidelines for gesturedesign and detection\ - \ are presented, and the user experiencesare discussed.},\n address = {Daejeon,\ - \ Republic of Korea},\n author = {Wolfgang Fohl and Malte Nogalski},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1178522},\n issn = {2220-4806},\n keywords\ - \ = {Wave field synthesis, gesture control},\n month = {May},\n pages = {341--346},\n\ - \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {A Gesture\ - \ Control Interface for a Wave Field Synthesis System},\n url = {http://www.nime.org/proceedings/2013/nime2013_106.pdf},\n\ + author: Fumitaka Kikukawa and Sojiro Ishihara and Masato Soga and Hirokazu Taki + bibtex: "@inproceedings{Kikukawa2013,\n abstract = {So far, there are few studies\ + \ of string instruments with bows because there aremany parameters to acquire\ + \ skills and it is difficult to measure theseparameters. Therefore, the aim of\ + \ this paper is to propose a design of alearning environment for a novice learner\ + \ to acquire an accurate fingerposition skill. For achieving the aim, we developed\ + \ a learning environmentwhich can diagnose learner's finger position and give\ + \ the learner advice byusing magnetic position sensors. The system shows three\ + \ windows; a fingerposition window for visualization of finger position, a score\ + \ window fordiagnosing finger position along the score and command prompt window\ + \ forshowing states of system and advices. Finally, we evaluated the system by\ + \ anexperiment. The experimental group improved accuracy values about fingerpositions\ + \ and also improved accuracy of pitches of sounds compared withcontrol group.\ + \ These results shows significant differences.},\n address = {Daejeon, Republic\ + \ of Korea},\n author = {Fumitaka Kikukawa and Sojiro Ishihara and Masato Soga\ + \ and Hirokazu Taki},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178580},\n\ + \ issn = {2220-4806},\n keywords = {Magnetic Position Sensors, String Instruments,\ + \ Skill, Learning Environment, Finger Position},\n month = {May},\n pages = {271--276},\n\ + \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {Development\ + \ of A Learning Environment for Playing Erhu by Diagnosis and Advice regarding\ + \ Finger Position on Strings},\n url = {http://www.nime.org/proceedings/2013/nime2013_181.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178522 + doi: 10.5281/zenodo.1178580 issn: 2220-4806 - keywords: 'Wave field synthesis, gesture control' + keywords: 'Magnetic Position Sensors, String Instruments, Skill, Learning Environment, + Finger Position' month: May - pages: 341--346 + pages: 271--276 publisher: 'Graduate School of Culture Technology, KAIST' - title: A Gesture Control Interface for a Wave Field Synthesis System - url: http://www.nime.org/proceedings/2013/nime2013_106.pdf + title: Development of A Learning Environment for Playing Erhu by Diagnosis and Advice + regarding Finger Position on Strings + url: http://www.nime.org/proceedings/2013/nime2013_181.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Burlet2013 - abstract: 'Sensor-based gesture recognition is investigated as a possible solution - to theproblem of managing an overwhelming number of audio effects in live guitarperformances. - A realtime gesture recognition system, which automaticallytoggles digital audio - effects according to gestural information captured by anaccelerometer attached - to the body of a guitar, is presented. To supplement theseveral predefined gestures - provided by the recognition system, personalizedgestures may be trained by the - user. Upon successful recognition of a gesture,the corresponding audio effects - are applied to the guitar signal and visualfeedback is provided to the user. An - evaluation of the system yielded 86%accuracy for user-independent recognition - and 99% accuracy for user-dependentrecognition, on average.' + ID: Bortz2013 + abstract: 'Mountains and Valleys (an anonymous name for confidentiality) is a communal,site-specific + installation that takes shape as a spatially-responsiveaudio-visual field. The + public participates in the creation of theinstallation, resulting in shared ownership + of the work between both theartists and participants. Furthermore, the installation + takes new shape in eachrealization, both to incorporate the constraints and affordances + of eachspecific site, as well as to address the lessons learned from the previousiteration. + This paper describes the development and execution of Mountains andValleys over + its most recent version, with an eye toward the next iteration ata prestigious + art museum during a national festival in Washington, D.C.' address: 'Daejeon, Republic of Korea' - author: Gregory Burlet and Ichiro Fujinaga - bibtex: "@inproceedings{Burlet2013,\n abstract = {Sensor-based gesture recognition\ - \ is investigated as a possible solution to theproblem of managing an overwhelming\ - \ number of audio effects in live guitarperformances. A realtime gesture recognition\ - \ system, which automaticallytoggles digital audio effects according to gestural\ - \ information captured by anaccelerometer attached to the body of a guitar, is\ - \ presented. To supplement theseveral predefined gestures provided by the recognition\ - \ system, personalizedgestures may be trained by the user. Upon successful recognition\ - \ of a gesture,the corresponding audio effects are applied to the guitar signal\ - \ and visualfeedback is provided to the user. An evaluation of the system yielded\ - \ 86%accuracy for user-independent recognition and 99% accuracy for user-dependentrecognition,\ - \ on average.},\n address = {Daejeon, Republic of Korea},\n author = {Gregory\ - \ Burlet and Ichiro Fujinaga},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178488},\n\ - \ issn = {2220-4806},\n keywords = {Augmented instrument, gesture recognition,\ - \ accelerometer, pattern recognition, performance practice},\n month = {May},\n\ - \ pages = {41--44},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ - \ title = {Stompboxes: Kicking the Habit},\n url = {http://www.nime.org/proceedings/2013/nime2013_109.pdf},\n\ + author: Brennon Bortz and Aki Ishida and Ivica Ico Bukvic and R. Benjamin Knapp + bibtex: "@inproceedings{Bortz2013,\n abstract = {Mountains and Valleys (an anonymous\ + \ name for confidentiality) is a communal,site-specific installation that takes\ + \ shape as a spatially-responsiveaudio-visual field. The public participates in\ + \ the creation of theinstallation, resulting in shared ownership of the work between\ + \ both theartists and participants. Furthermore, the installation takes new shape\ + \ in eachrealization, both to incorporate the constraints and affordances of eachspecific\ + \ site, as well as to address the lessons learned from the previousiteration.\ + \ This paper describes the development and execution of Mountains andValleys over\ + \ its most recent version, with an eye toward the next iteration ata prestigious\ + \ art museum during a national festival in Washington, D.C.},\n address = {Daejeon,\ + \ Republic of Korea},\n author = {Brennon Bortz and Aki Ishida and Ivica Ico Bukvic\ + \ and R. Benjamin Knapp},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178484},\n\ + \ issn = {2220-4806},\n keywords = {Participatory creation, communal interaction,\ + \ fields, interactive installation, Japanese lanterns},\n month = {May},\n pages\ + \ = {73--78},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ + \ title = {Lantern Field: Exploring Participatory Design of a Communal, Spatially\ + \ Responsive Installation},\n url = {http://www.nime.org/proceedings/2013/nime2013_192.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178488 + doi: 10.5281/zenodo.1178484 issn: 2220-4806 - keywords: 'Augmented instrument, gesture recognition, accelerometer, pattern recognition, - performance practice' + keywords: 'Participatory creation, communal interaction, fields, interactive installation, + Japanese lanterns' month: May - pages: 41--44 + pages: 73--78 publisher: 'Graduate School of Culture Technology, KAIST' - title: 'Stompboxes: Kicking the Habit' - url: http://www.nime.org/proceedings/2013/nime2013_109.pdf + title: 'Lantern Field: Exploring Participatory Design of a Communal, Spatially Responsive + Installation' + url: http://www.nime.org/proceedings/2013/nime2013_192.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Jensenius2013 - abstract: 'The paper presents the Kinectofon, an instrument for creating sounds - through free-hand interaction in a 3D space. The instrument is based on the RGB - anddepth image streams retrieved from a Microsoft Kinect sensor device. These - twoimage streams are used to create different types of motiongrams, which, again, - are used as the source material for a sonification process based on inverse FFT. - The instrument is intuitive to play, allowing the performer to createsound by - "touching" a virtual sound wall.' + ID: Soria2013 + abstract: 'This work presents a general framework method for cre-ating spatialization + systems focused on electroacoustic andacousmatic music performance and creation. + Although weused the logistic equation as orbit generator, any dynami-cal system + could be suitable. The main idea lies on generating vectors of Rn with entriesfrom + data series of di_x000B_erent orbits from an speci_x000C_c dynami-cal system. + Such vectors will be called system vectors. Ourproposal is to create ordered paths + between those pointsor system vectors using the Splines Quark library by Felix,1which + allow us to generate smooth curves joining the points.Finally, interpolating that + result with a _x000C_xed sample value,we are able to obtain speci_x000C_c and + independent multidimen-sional panning trajectories for each speaker array and + forany number of sound sources.Our contribution is intended to be at the very + root of the compositionalprocess giving to the creator a method for exploring + new ways for spatialsound placement over time for a wide range of speakers ar-rangements. + The advantage of using controlled chaotic dy-namical systems like the logistic + equation, lies on the factthat the composer can freely and consciously choose + be-tween stable or irregular behaviour for the orbits that willgenerate his/her + panning trajectories. Besides, with the useof isometries, it is possible to generate + di_x000B_erent related or-bits with one single evaluation of the system. The use + ofthe spline method in SuperCollider allows the possibilityof joining and relating + those values from orbits into a wellde_x000C_ned and coherent general system. + Further research willinclude controlling synthesis parameters in the same waywe + created panning trajectories.' address: 'Daejeon, Republic of Korea' - author: Alexander Refsum Jensenius - bibtex: "@inproceedings{Jensenius2013,\n abstract = {The paper presents the Kinectofon,\ - \ an instrument for creating sounds through free-hand interaction in a 3D space.\ - \ The instrument is based on the RGB anddepth image streams retrieved from a Microsoft\ - \ Kinect sensor device. These twoimage streams are used to create different types\ - \ of motiongrams, which, again, are used as the source material for a sonification\ - \ process based on inverse FFT. The instrument is intuitive to play, allowing\ - \ the performer to createsound by \"touching\" a virtual sound wall.},\n address\ - \ = {Daejeon, Republic of Korea},\n author = {Alexander Refsum Jensenius},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1178564},\n issn = {2220-4806},\n keywords\ - \ = {Kinect, motiongram, sonification, video analysis},\n month = {May},\n pages\ - \ = {196--197},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ - \ title = {Kinectofon: Performing with Shapes in Planes},\n url = {http://www.nime.org/proceedings/2013/nime2013_110.pdf},\n\ + author: Edmar Soria and Roberto Morales-Manzanares + bibtex: "@inproceedings{Soria2013,\n abstract = {This work presents a general framework\ + \ method for cre-ating spatialization systems focused on electroacoustic andacousmatic\ + \ music performance and creation. Although weused the logistic equation as orbit\ + \ generator, any dynami-cal system could be suitable. The main idea lies on generating\ + \ vectors of Rn with entriesfrom data series of di_x000B_erent orbits from an\ + \ speci_x000C_c dynami-cal system. Such vectors will be called system vectors.\ + \ Ourproposal is to create ordered paths between those pointsor system vectors\ + \ using the Splines Quark library by Felix,1which allow us to generate smooth\ + \ curves joining the points.Finally, interpolating that result with a _x000C_xed\ + \ sample value,we are able to obtain speci_x000C_c and independent multidimen-sional\ + \ panning trajectories for each speaker array and forany number of sound sources.Our\ + \ contribution is intended to be at the very root of the compositionalprocess\ + \ giving to the creator a method for exploring new ways for spatialsound placement\ + \ over time for a wide range of speakers ar-rangements. The advantage of using\ + \ controlled chaotic dy-namical systems like the logistic equation, lies on the\ + \ factthat the composer can freely and consciously choose be-tween stable or irregular\ + \ behaviour for the orbits that willgenerate his/her panning trajectories. Besides,\ + \ with the useof isometries, it is possible to generate di_x000B_erent related\ + \ or-bits with one single evaluation of the system. The use ofthe spline method\ + \ in SuperCollider allows the possibilityof joining and relating those values\ + \ from orbits into a wellde_x000C_ned and coherent general system. Further research\ + \ willinclude controlling synthesis parameters in the same waywe created panning\ + \ trajectories.},\n address = {Daejeon, Republic of Korea},\n author = {Edmar\ + \ Soria and Roberto Morales-Manzanares},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178664},\n\ + \ issn = {2220-4806},\n keywords = {NIME, spatialization, dynamical systems, chaos},\n\ + \ month = {May},\n pages = {79--83},\n publisher = {Graduate School of Culture\ + \ Technology, KAIST},\n title = {Multidimensional sound spatialization by means\ + \ of chaotic dynamical systems},\n url = {http://www.nime.org/proceedings/2013/nime2013_195.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178564 + doi: 10.5281/zenodo.1178664 issn: 2220-4806 - keywords: 'Kinect, motiongram, sonification, video analysis' + keywords: 'NIME, spatialization, dynamical systems, chaos' month: May - pages: 196--197 + pages: 79--83 publisher: 'Graduate School of Culture Technology, KAIST' - title: 'Kinectofon: Performing with Shapes in Planes' - url: http://www.nime.org/proceedings/2013/nime2013_110.pdf + title: Multidimensional sound spatialization by means of chaotic dynamical systems + url: http://www.nime.org/proceedings/2013/nime2013_195.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Fried2013 - abstract: 'We present a method for automatic feature extraction and cross-modal - mappingusing deep learning. Our system uses stacked autoencoders to learn a layeredfeature - representation of the data. Feature vectors from two (or more)different domains - are mapped to each other, effectively creating a cross-modalmapping. Our system - can either run fully unsupervised, or it can use high-levellabeling to fine-tune - the mapping according a user''s needs. We show severalapplications for our method, - mapping sound to or from images or gestures. Weevaluate system performance both - in standalone inference tasks and incross-modal mappings.' + ID: Rosselet2013 + abstract: 'This paper presents the musical interactions aspects of the design anddevelopment + of a web-based interactive music collaboration system called JamOn. Following + a design science approach, this system is being built accordingto principles taken + from usability engineering and human computer interaction(HCI). The goal of the + system is to allow people with no to little musicalbackground to play a song collaboratively. + The musicians control the musicalcontent and structure of the song thanks to an + interface relying on the freeinking metaphor. One contribution of this interface + is that it displays musicalpatterns of different lengths in the same space. The + design of Jam On is basedon a list of performance criteria aimed at ensuring the + musicality of theperformance and the interactivity of the technical system. The + paper comparestwo alternative interfaces used for the system and explores the + various stagesof the design process aimed at making the system as musical and + interactive aspossible.' address: 'Daejeon, Republic of Korea' - author: Ohad Fried and Rebecca Fiebrink - bibtex: "@inproceedings{Fried2013,\n abstract = {We present a method for automatic\ - \ feature extraction and cross-modal mappingusing deep learning. Our system uses\ - \ stacked autoencoders to learn a layeredfeature representation of the data. Feature\ - \ vectors from two (or more)different domains are mapped to each other, effectively\ - \ creating a cross-modalmapping. Our system can either run fully unsupervised,\ - \ or it can use high-levellabeling to fine-tune the mapping according a user's\ - \ needs. We show severalapplications for our method, mapping sound to or from\ - \ images or gestures. Weevaluate system performance both in standalone inference\ - \ tasks and incross-modal mappings.},\n address = {Daejeon, Republic of Korea},\n\ - \ author = {Ohad Fried and Rebecca Fiebrink},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1178528},\n issn = {2220-4806},\n keywords = {Deep learning,\ - \ feature learning, mapping, gestural control},\n month = {May},\n pages = {531--534},\n\ - \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {Cross-modal\ - \ Sound Mapping Using Deep Learning},\n url = {http://www.nime.org/proceedings/2013/nime2013_111.pdf},\n\ + author: Ulysse Rosselet and Alain Renaud + bibtex: "@inproceedings{Rosselet2013,\n abstract = {This paper presents the musical\ + \ interactions aspects of the design anddevelopment of a web-based interactive\ + \ music collaboration system called JamOn. Following a design science approach,\ + \ this system is being built accordingto principles taken from usability engineering\ + \ and human computer interaction(HCI). The goal of the system is to allow people\ + \ with no to little musicalbackground to play a song collaboratively. The musicians\ + \ control the musicalcontent and structure of the song thanks to an interface\ + \ relying on the freeinking metaphor. One contribution of this interface is that\ + \ it displays musicalpatterns of different lengths in the same space. The design\ + \ of Jam On is basedon a list of performance criteria aimed at ensuring the musicality\ + \ of theperformance and the interactivity of the technical system. The paper comparestwo\ + \ alternative interfaces used for the system and explores the various stagesof\ + \ the design process aimed at making the system as musical and interactive aspossible.},\n\ + \ address = {Daejeon, Republic of Korea},\n author = {Ulysse Rosselet and Alain\ + \ Renaud},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1178650},\n issn = {2220-4806},\n\ + \ keywords = {Networked performance, interface design, mapping, web-based music\ + \ application},\n month = {May},\n pages = {394--399},\n publisher = {Graduate\ + \ School of Culture Technology, KAIST},\n title = {Jam On: a new interface for\ + \ web-based collective music performance},\n url = {http://www.nime.org/proceedings/2013/nime2013_196.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178528 + doi: 10.5281/zenodo.1178650 issn: 2220-4806 - keywords: 'Deep learning, feature learning, mapping, gestural control' + keywords: 'Networked performance, interface design, mapping, web-based music application' month: May - pages: 531--534 + pages: 394--399 publisher: 'Graduate School of Culture Technology, KAIST' - title: Cross-modal Sound Mapping Using Deep Learning - url: http://www.nime.org/proceedings/2013/nime2013_111.pdf + title: 'Jam On: a new interface for web-based collective music performance' + url: http://www.nime.org/proceedings/2013/nime2013_196.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Kapur2013 - abstract: 'This paper describes the creation of new interfaces that extend traditionalKorean - music and dance. Specifically, this research resulted in the design ofthe eHaegum - (Korean bowed instrument), eJanggu (Korean drum), and ZiOm wearableinterfaces. - The paper describes the process of making these new interfaces aswell as how they - have been used to create new music and forms of digital artmaking that blend traditional - practice with modern techniques.' + ID: Lai2013 + abstract: 'This paper presents observations from investigating audience experience + of apractice-based research in live sound performance with electronics. In seekingto + understand the communication flow and the engagement between performer andaudience + in this particular performance context, we designed an experiment thatinvolved + the following steps: (a) performing WOSAWIP at a new media festival,(b) conducting + a qualitative research study with audience members and (c)analyzing the data for + new insights.' address: 'Daejeon, Republic of Korea' - author: Ajay Kapur and Dae Hong Kim and Raakhi Kapur and Kisoon Eom - bibtex: "@inproceedings{Kapur2013,\n abstract = {This paper describes the creation\ - \ of new interfaces that extend traditionalKorean music and dance. Specifically,\ - \ this research resulted in the design ofthe eHaegum (Korean bowed instrument),\ - \ eJanggu (Korean drum), and ZiOm wearableinterfaces. The paper describes the\ - \ process of making these new interfaces aswell as how they have been used to\ - \ create new music and forms of digital artmaking that blend traditional practice\ - \ with modern techniques.},\n address = {Daejeon, Republic of Korea},\n author\ - \ = {Ajay Kapur and Dae Hong Kim and Raakhi Kapur and Kisoon Eom},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1178576},\n issn = {2220-4806},\n keywords\ - \ = {Hyperinstrument, Korean interface design, wearable sensors, dance controllers,\ - \ bowed controllers, drum controllers},\n month = {May},\n pages = {45--48},\n\ - \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {New Interfaces\ - \ for Traditional Korean Music and Dance},\n url = {http://www.nime.org/proceedings/2013/nime2013_113.pdf},\n\ - \ year = {2013}\n}\n" + author: Chi-Hsia Lai and Till Bovermann + bibtex: "@inproceedings{Lai2013,\n abstract = {This paper presents observations\ + \ from investigating audience experience of apractice-based research in live sound\ + \ performance with electronics. In seekingto understand the communication flow\ + \ and the engagement between performer andaudience in this particular performance\ + \ context, we designed an experiment thatinvolved the following steps: (a) performing\ + \ WOSAWIP at a new media festival,(b) conducting a qualitative research study\ + \ with audience members and (c)analyzing the data for new insights.},\n address\ + \ = {Daejeon, Republic of Korea},\n author = {Chi-Hsia Lai and Till Bovermann},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178590},\n issn = {2220-4806},\n\ + \ keywords = {Audience Experience Study, Live Performance, Evaluation, Research\ + \ Methods},\n month = {May},\n pages = {170--173},\n publisher = {Graduate School\ + \ of Culture Technology, KAIST},\n title = {Audience Experience in Sound Performance},\n\ + \ url = {http://www.nime.org/proceedings/2013/nime2013_197.pdf},\n year = {2013}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178576 + doi: 10.5281/zenodo.1178590 issn: 2220-4806 - keywords: 'Hyperinstrument, Korean interface design, wearable sensors, dance controllers, - bowed controllers, drum controllers' + keywords: 'Audience Experience Study, Live Performance, Evaluation, Research Methods' month: May - pages: 45--48 + pages: 170--173 publisher: 'Graduate School of Culture Technology, KAIST' - title: New Interfaces for Traditional Korean Music and Dance - url: http://www.nime.org/proceedings/2013/nime2013_113.pdf + title: Audience Experience in Sound Performance + url: http://www.nime.org/proceedings/2013/nime2013_197.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Zhang2013 - abstract: 'The Microsoft Kinect is a popular and versatile input devicefor musical - interfaces. However, using the Kinect for suchinterfaces requires not only signi_x000C_cant - programming experience,but also the use of complex geometry or machinelearning - techniques to translate joint positions into higherlevel gestures. We created - the Kinect Instrument Builder(KIB) to address these di_x000E_culties by structuring - gesturalinterfaces as combinations of gestural widgets. KIB allowsthe user to - design an instrument by con_x000C_guring gesturalprimitives, each with a set of - simple but attractive visualfeedback elements. After designing an instrument on - KIB''sweb interface, users can play the instrument on KIB''s performanceinterface, - which displays visualizations and transmitsOSC messages to other applications - for sound synthesisor further remapping.' + ID: Everett2013 + abstract: 'This presentation-demonstration discusses the creation of FIRST LIFE, + a75-minute mixed media performance for string quartet, live audio processing,live + motion capture video, and audience participation utilizing stochasticmodels of + chemical data provided by Martha Grover''s Research Group at theSchool of Chemical + and Biomolecular Engineering at Georgia Institute ofTechnology. Each section of + this work is constructed from contingent outcomesdrawn from biochemical research + exploring possible early Earth formations oforganic compounds. Audio-video excerpts + of the composition will be played during the presentation.Max patches for sonification + and for generating stochastic processes will bedemonstrated as well.' address: 'Daejeon, Republic of Korea' - author: Edward Zhang - bibtex: "@inproceedings{Zhang2013,\n abstract = {The Microsoft Kinect is a popular\ - \ and versatile input devicefor musical interfaces. However, using the Kinect\ - \ for suchinterfaces requires not only signi_x000C_cant programming experience,but\ - \ also the use of complex geometry or machinelearning techniques to translate\ - \ joint positions into higherlevel gestures. We created the Kinect Instrument\ - \ Builder(KIB) to address these di_x000E_culties by structuring gesturalinterfaces\ - \ as combinations of gestural widgets. KIB allowsthe user to design an instrument\ - \ by con_x000C_guring gesturalprimitives, each with a set of simple but attractive\ - \ visualfeedback elements. After designing an instrument on KIB'sweb interface,\ - \ users can play the instrument on KIB's performanceinterface, which displays\ - \ visualizations and transmitsOSC messages to other applications for sound synthesisor\ - \ further remapping.},\n address = {Daejeon, Republic of Korea},\n author = {Edward\ - \ Zhang},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1178698},\n issn = {2220-4806},\n\ - \ keywords = {Kinect, gesture, widgets, OSC, mapping},\n month = {May},\n pages\ - \ = {519--524},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ - \ title = {KIB: Simplifying Gestural Instrument Creation Using Widgets},\n url\ - \ = {http://www.nime.org/proceedings/2013/nime2013_114.pdf},\n year = {2013}\n\ - }\n" + author: Steve Everett + bibtex: "@inproceedings{Everett2013,\n abstract = {This presentation-demonstration\ + \ discusses the creation of FIRST LIFE, a75-minute mixed media performance for\ + \ string quartet, live audio processing,live motion capture video, and audience\ + \ participation utilizing stochasticmodels of chemical data provided by Martha\ + \ Grover's Research Group at theSchool of Chemical and Biomolecular Engineering\ + \ at Georgia Institute ofTechnology. Each section of this work is constructed\ + \ from contingent outcomesdrawn from biochemical research exploring possible early\ + \ Earth formations oforganic compounds. Audio-video excerpts of the composition\ + \ will be played during the presentation.Max patches for sonification and for\ + \ generating stochastic processes will bedemonstrated as well.},\n address = {Daejeon,\ + \ Republic of Korea},\n author = {Steve Everett},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1178508},\n issn = {2220-4806},\n keywords = {Data-driven\ + \ composition, sonification, live electronics-video},\n month = {May},\n pages\ + \ = {277--278},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ + \ title = {Sonifying Chemical Evolution},\n url = {http://www.nime.org/proceedings/2013/nime2013_198.pdf},\n\ + \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178698 + doi: 10.5281/zenodo.1178508 issn: 2220-4806 - keywords: 'Kinect, gesture, widgets, OSC, mapping' + keywords: 'Data-driven composition, sonification, live electronics-video' month: May - pages: 519--524 + pages: 277--278 publisher: 'Graduate School of Culture Technology, KAIST' - title: 'KIB: Simplifying Gestural Instrument Creation Using Widgets' - url: http://www.nime.org/proceedings/2013/nime2013_114.pdf + title: Sonifying Chemical Evolution + url: http://www.nime.org/proceedings/2013/nime2013_198.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Hochenbaum2013 - abstract: 'Music education is a rich subject with many approaches and methodologies - thathave developed over hundreds of years. More than ever, technology playsimportant - roles at many levels of a musician''s practice. This paper begins toexplore some - of the ways in which technology developed out of the NIMEcommunity (specifically - hyperinstruments), can inform a musician''s dailypractice, through short and long - term metrics tracking and data visualization.' + ID: McKinney2013 + abstract: 'In this paper we present Shoggoth, a 3D graphics based program for performingnetwork + music. In Shoggoth, users utilize video game style controls to navigateand manipulate + a grid of malleable height maps. Sequences can be created bydefining paths through + the maps which trigger and modulate audio playback. Withrespect to a context of + computer music performance, and specific problems innetwork music, design goals + and technical challenges are outlined. The systemis evaluated through established + taxonomies for describing interfaces, followedby an enumeration of the merits + of 3D graphics in networked performance. Indiscussing proposed improvements to + Shoggoth, design suggestions for otherdevelopers and network musicians are drawn + out.' address: 'Daejeon, Republic of Korea' - author: Jordan Hochenbaum and Ajay Kapur - bibtex: "@inproceedings{Hochenbaum2013,\n abstract = {Music education is a rich\ - \ subject with many approaches and methodologies thathave developed over hundreds\ - \ of years. More than ever, technology playsimportant roles at many levels of\ - \ a musician's practice. This paper begins toexplore some of the ways in which\ - \ technology developed out of the NIMEcommunity (specifically hyperinstruments),\ - \ can inform a musician's dailypractice, through short and long term metrics tracking\ - \ and data visualization.},\n address = {Daejeon, Republic of Korea},\n author\ - \ = {Jordan Hochenbaum and Ajay Kapur},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178552},\n\ - \ issn = {2220-4806},\n keywords = {Hyperinstruments, Pedagogy, Metrics, Ezither,\ - \ Practice Room},\n month = {May},\n pages = {307--312},\n publisher = {Graduate\ - \ School of Culture Technology, KAIST},\n title = {Toward The Future Practice\ - \ Room: Empowering Musical Pedagogy through Hyperinstruments},\n url = {http://www.nime.org/proceedings/2013/nime2013_116.pdf},\n\ + author: Chad McKinney and Nick Collins + bibtex: "@inproceedings{McKinney2013,\n abstract = {In this paper we present Shoggoth,\ + \ a 3D graphics based program for performingnetwork music. In Shoggoth, users\ + \ utilize video game style controls to navigateand manipulate a grid of malleable\ + \ height maps. Sequences can be created bydefining paths through the maps which\ + \ trigger and modulate audio playback. Withrespect to a context of computer music\ + \ performance, and specific problems innetwork music, design goals and technical\ + \ challenges are outlined. The systemis evaluated through established taxonomies\ + \ for describing interfaces, followedby an enumeration of the merits of 3D graphics\ + \ in networked performance. Indiscussing proposed improvements to Shoggoth, design\ + \ suggestions for otherdevelopers and network musicians are drawn out.},\n address\ + \ = {Daejeon, Republic of Korea},\n author = {Chad McKinney and Nick Collins},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178606},\n issn = {2220-4806},\n\ + \ keywords = {3D, Generative, Network, Environment},\n month = {May},\n pages\ + \ = {400--405},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ + \ title = {An Interactive {3D} Network Music Space},\n url = {http://www.nime.org/proceedings/2013/nime2013_199.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178552 + doi: 10.5281/zenodo.1178606 issn: 2220-4806 - keywords: 'Hyperinstruments, Pedagogy, Metrics, Ezither, Practice Room' + keywords: '3D, Generative, Network, Environment' month: May - pages: 307--312 + pages: 400--405 publisher: 'Graduate School of Culture Technology, KAIST' - title: 'Toward The Future Practice Room: Empowering Musical Pedagogy through Hyperinstruments' - url: http://www.nime.org/proceedings/2013/nime2013_116.pdf + title: An Interactive 3D Network Music Space + url: http://www.nime.org/proceedings/2013/nime2013_199.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Michon2013 - abstract: 'Black Box is a site based installation that allows users to create uniquesounds - through physical interaction. The installation consists of a geodesicdome, surround - sound speakers, and a custom instrument suspended from the apexof thedome. Audience - members entering the space are able to create sound by strikingor rubbing the - cube, and are able to control a delay system by moving the cubewithin the space.' + ID: Ferguson2013 + abstract: 'Feedback created by guitars and amplifiers is difficult to use in musicalsettings + -- parameters such as pitch and loudness are hard to specify preciselyby fretting + a string or by holding the guitar near an amplifier. This researchinvestigates + methods for controlling the level and pitch of the feedbackproduced by a guitar + and amplifier, which are based on incorporatingcorpus-based control into the system. + Two parameters are used to define thecontrol parameter space -- a simple automatic + gain control system to controlthe output level, and a band-pass filter frequency + for controlling the pitch ofthe feedback. This control parameter space is mapped + to a corpus of soundscreated by these parameters and recorded, and these sounds + are analysed usingsoftware created for concatenative synthesis. Following this + process, thedescriptors taken from the analysis can be used to select control + parametersfrom the feedback system.' address: 'Daejeon, Republic of Korea' - author: Romain Michon and Myles Borins and David Meisenholder - bibtex: "@inproceedings{Michon2013,\n abstract = {Black Box is a site based installation\ - \ that allows users to create uniquesounds through physical interaction. The installation\ - \ consists of a geodesicdome, surround sound speakers, and a custom instrument\ - \ suspended from the apexof thedome. Audience members entering the space are able\ - \ to create sound by strikingor rubbing the cube, and are able to control a delay\ - \ system by moving the cubewithin the space.},\n address = {Daejeon, Republic\ - \ of Korea},\n author = {Romain Michon and Myles Borins and David Meisenholder},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178612},\n issn = {2220-4806},\n\ - \ keywords = {Satellite CCRMA, Beagleboard, PureData, Faust, Embedded-Linux, Open\ - \ Sound Control},\n month = {May},\n pages = {464--465},\n publisher = {Graduate\ - \ School of Culture Technology, KAIST},\n title = {The Black Box},\n url = {http://www.nime.org/proceedings/2013/nime2013_117.pdf},\n\ + author: Sam Ferguson and Aengus Martin and Andrew Johnston + bibtex: "@inproceedings{Ferguson2013,\n abstract = {Feedback created by guitars\ + \ and amplifiers is difficult to use in musicalsettings -- parameters such as\ + \ pitch and loudness are hard to specify preciselyby fretting a string or by holding\ + \ the guitar near an amplifier. This researchinvestigates methods for controlling\ + \ the level and pitch of the feedbackproduced by a guitar and amplifier, which\ + \ are based on incorporatingcorpus-based control into the system. Two parameters\ + \ are used to define thecontrol parameter space -- a simple automatic gain control\ + \ system to controlthe output level, and a band-pass filter frequency for controlling\ + \ the pitch ofthe feedback. This control parameter space is mapped to a corpus\ + \ of soundscreated by these parameters and recorded, and these sounds are analysed\ + \ usingsoftware created for concatenative synthesis. Following this process, thedescriptors\ + \ taken from the analysis can be used to select control parametersfrom the feedback\ + \ system.},\n address = {Daejeon, Republic of Korea},\n author = {Sam Ferguson\ + \ and Aengus Martin and Andrew Johnston},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178518},\n\ + \ issn = {2220-4806},\n month = {May},\n pages = {541--546},\n publisher = {Graduate\ + \ School of Culture Technology, KAIST},\n title = {A corpus-based method for controlling\ + \ guitar feedback},\n url = {http://www.nime.org/proceedings/2013/nime2013_200.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178612 + doi: 10.5281/zenodo.1178518 issn: 2220-4806 - keywords: 'Satellite CCRMA, Beagleboard, PureData, Faust, Embedded-Linux, Open Sound - Control' month: May - pages: 464--465 + pages: 541--546 publisher: 'Graduate School of Culture Technology, KAIST' - title: The Black Box - url: http://www.nime.org/proceedings/2013/nime2013_117.pdf + title: A corpus-based method for controlling guitar feedback + url: http://www.nime.org/proceedings/2013/nime2013_200.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Choi2013 - abstract: 'The advent of Web Audio API in 2011 marked a significant advance for - web-basedmusic systems by enabling real-time sound synthesis on web browsers simply - bywriting JavaScript code. While this powerful functionality has arrived there - isa yet unaddressed need for an extension to the API to fully reveal itspotential. - To meet this need, a JavaScript library dubbed WAAX was created tofacilitate music - and audio programming based on Web Audio API bypassingunderlying tasks and augmenting - useful features. In this paper, we describecommon issues in web audio programming, - illustrate how WAAX can speed up thedevelopment, and discuss future developments.' + ID: KITA2013 + abstract: 'When people learn using Web-based educational resources such as an LMS(Learning + Management System) or other e-learning related systems, they aresitting in front + of their own computer at home and are often physicallyisolated from other online + learners. In some courses they are typically gettingin touch online with each + others for doing some particular group workassignments, but most of the time they + must do their own learning tasks alone.In other courses simply the individual + assignments and quizzes are provided, sothe learners are alone all the time from + the beginning until the end of thecourse.In order to keep the learners'' motivation, + it helps to feel other learnersdoing the same learning activities and belonging + to the same course.Communicating formally or informally with other learners via + Social NetworkingServices or something is one way for learners to get such a feeling, + though ina way it might sometimes disturb their learning. Sonification of the + access logof the e-learning system could be another indirect way to provide such + afeeling.' address: 'Daejeon, Republic of Korea' - author: Hongchan Choi and Jonathan Berger - bibtex: "@inproceedings{Choi2013,\n abstract = {The advent of Web Audio API in 2011\ - \ marked a significant advance for web-basedmusic systems by enabling real-time\ - \ sound synthesis on web browsers simply bywriting JavaScript code. While this\ - \ powerful functionality has arrived there isa yet unaddressed need for an extension\ - \ to the API to fully reveal itspotential. To meet this need, a JavaScript library\ - \ dubbed WAAX was created tofacilitate music and audio programming based on Web\ - \ Audio API bypassingunderlying tasks and augmenting useful features. In this\ - \ paper, we describecommon issues in web audio programming, illustrate how WAAX\ - \ can speed up thedevelopment, and discuss future developments.},\n address =\ - \ {Daejeon, Republic of Korea},\n author = {Hongchan Choi and Jonathan Berger},\n\ + author: Toshihiro KITA and Naotoshi Osaka + bibtex: "@inproceedings{KITA2013,\n abstract = {When people learn using Web-based\ + \ educational resources such as an LMS(Learning Management System) or other e-learning\ + \ related systems, they aresitting in front of their own computer at home and\ + \ are often physicallyisolated from other online learners. In some courses they\ + \ are typically gettingin touch online with each others for doing some particular\ + \ group workassignments, but most of the time they must do their own learning\ + \ tasks alone.In other courses simply the individual assignments and quizzes are\ + \ provided, sothe learners are alone all the time from the beginning until the\ + \ end of thecourse.In order to keep the learners' motivation, it helps to feel\ + \ other learnersdoing the same learning activities and belonging to the same course.Communicating\ + \ formally or informally with other learners via Social NetworkingServices or\ + \ something is one way for learners to get such a feeling, though ina way it might\ + \ sometimes disturb their learning. Sonification of the access logof the e-learning\ + \ system could be another indirect way to provide such afeeling.},\n address =\ + \ {Daejeon, Republic of Korea},\n author = {Toshihiro KITA and Naotoshi Osaka},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178494},\n issn = {2220-4806},\n\ - \ keywords = {Web Audio API, Chrome, JavaScript, web-based music system, collaborative\ - \ music making, audience participation},\n month = {May},\n pages = {499--502},\n\ - \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {WAAX:\ - \ Web Audio {API} eXtension},\n url = {http://www.nime.org/proceedings/2013/nime2013_119.pdf},\n\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178584},\n issn = {2220-4806},\n\ + \ keywords = {e-learning, online learners, Moodle, Csound, realtime sonification,\ + \ OSC (Open Sound Control)},\n month = {May},\n pages = {198--199},\n publisher\ + \ = {Graduate School of Culture Technology, KAIST},\n title = {Providing a feeling\ + \ of other remote learners' presence in an online learning environment via realtime\ + \ sonification of Moodle access log},\n url = {http://www.nime.org/proceedings/2013/nime2013_203.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178494 + doi: 10.5281/zenodo.1178584 issn: 2220-4806 - keywords: 'Web Audio API, Chrome, JavaScript, web-based music system, collaborative - music making, audience participation' + keywords: 'e-learning, online learners, Moodle, Csound, realtime sonification, OSC + (Open Sound Control)' month: May - pages: 499--502 + pages: 198--199 publisher: 'Graduate School of Culture Technology, KAIST' - title: 'WAAX: Web Audio API eXtension' - url: http://www.nime.org/proceedings/2013/nime2013_119.pdf + title: Providing a feeling of other remote learners' presence in an online learning + environment via realtime sonification of Moodle access log + url: http://www.nime.org/proceedings/2013/nime2013_203.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Hamano2013 - abstract: 'Electroencephalography (EEG) has been used to generate music for over - 40 years,but the most recent developments in brain--computer interfaces (BCI) - allowgreater control and more flexible expression for using new musical instrumentswith - EEG. We developed a real-time musical performance system using BCItechnology and - sonification techniques to generate imagined musical chords withorganically fluctuating - timbre. We aim to emulate the expressivity oftraditional acoustic instruments. - The BCI part of the system extracts patternsfrom the neural activity while a performer - imagines a score of music. Thesonification part of the system captures non-stationary - changes in the brainwaves and reflects them in the timbre by additive synthesis. - In this paper, wediscuss the conceptual design, system development, and the performance - of thisinstrument.' + ID: Gelineck2013 + abstract: 'This paper presents the continuous work towards the development of an + interface for music mixing targeted towards expert sound technicians and producers. + The mixing interface uses a stage metaphor mapping scheme where audio channels + arerepresented as digital widgets on a 2D surface. These can be controlled bymulti + touch or by smart tangibles, which are tangible blocks with embedded sensors. + The smart tangibles developed for this interface are able to sense howthey are + grasped by the user. The paper presents the design of the mixing interface including + the smart tangible as well as a preliminary user study involving a hands-on focus + group session where 5 different control technologiesare contrasted and discussed. + Preliminary findings suggest that smart tangibles were preferred, but that an + optimal interface would include a combination of touch, smart tangibles and an + extra function control tangible for extending the functionality of the smart tangibles. + Finally, the interface should incorporate both an edit and mix mode---the latter + displaying very limited visual feedback in order to force users to focus their + attention to listening instead of the interface.' address: 'Daejeon, Republic of Korea' - author: Takayuki Hamano and Tomasz Rutkowski and Hiroko Terasawa and Kazuo Okanoya - and Kiyoshi Furukawa - bibtex: "@inproceedings{Hamano2013,\n abstract = {Electroencephalography (EEG) has\ - \ been used to generate music for over 40 years,but the most recent developments\ - \ in brain--computer interfaces (BCI) allowgreater control and more flexible expression\ - \ for using new musical instrumentswith EEG. We developed a real-time musical\ - \ performance system using BCItechnology and sonification techniques to generate\ - \ imagined musical chords withorganically fluctuating timbre. We aim to emulate\ - \ the expressivity oftraditional acoustic instruments. The BCI part of the system\ - \ extracts patternsfrom the neural activity while a performer imagines a score\ - \ of music. Thesonification part of the system captures non-stationary changes\ - \ in the brainwaves and reflects them in the timbre by additive synthesis. In\ - \ this paper, wediscuss the conceptual design, system development, and the performance\ - \ of thisinstrument.},\n address = {Daejeon, Republic of Korea},\n author = {Takayuki\ - \ Hamano and Tomasz Rutkowski and Hiroko Terasawa and Kazuo Okanoya and Kiyoshi\ - \ Furukawa},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178542},\n issn\ - \ = {2220-4806},\n keywords = {Brain-computer interface (BCI), qualitative and\ - \ quantitative information, classification, sonification},\n month = {May},\n\ - \ pages = {49--54},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ - \ title = {Generating an Integrated Musical Expression with a Brain--Computer\ - \ Interface},\n url = {http://www.nime.org/proceedings/2013/nime2013_120.pdf},\n\ - \ year = {2013}\n}\n" + author: Steven Gelineck and Dan Overholt and Morten Büchert and Jesper Andersen + bibtex: "@inproceedings{Gelineck2013,\n abstract = {This paper presents the continuous\ + \ work towards the development of an interface for music mixing targeted towards\ + \ expert sound technicians and producers. The mixing interface uses a stage metaphor\ + \ mapping scheme where audio channels arerepresented as digital widgets on a 2D\ + \ surface. These can be controlled bymulti touch or by smart tangibles, which\ + \ are tangible blocks with embedded sensors. The smart tangibles developed for\ + \ this interface are able to sense howthey are grasped by the user. The paper\ + \ presents the design of the mixing interface including the smart tangible as\ + \ well as a preliminary user study involving a hands-on focus group session where\ + \ 5 different control technologiesare contrasted and discussed. Preliminary findings\ + \ suggest that smart tangibles were preferred, but that an optimal interface would\ + \ include a combination of touch, smart tangibles and an extra function control\ + \ tangible for extending the functionality of the smart tangibles. Finally, the\ + \ interface should incorporate both an edit and mix mode---the latter displaying\ + \ very limited visual feedback in order to force users to focus their attention\ + \ to listening instead of the interface.},\n address = {Daejeon, Republic of Korea},\n\ + \ author = {Steven Gelineck and Dan Overholt and Morten B{\\''u}chert and Jesper\ + \ Andersen},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178532},\n issn\ + \ = {2220-4806},\n keywords = {music mixing, tangibles, smart objects, multi-touch,\ + \ control surface, graspables, physical-digital interface, tangible user interface,\ + \ wireless sensing, sketching in hardware},\n month = {May},\n pages = {180--185},\n\ + \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {Towards\ + \ an Interface for Music Mixing based on Smart Tangibles and Multitouch},\n url\ + \ = {http://www.nime.org/proceedings/2013/nime2013_206.pdf},\n year = {2013}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178542 + doi: 10.5281/zenodo.1178532 issn: 2220-4806 - keywords: 'Brain-computer interface (BCI), qualitative and quantitative information, - classification, sonification' + keywords: 'music mixing, tangibles, smart objects, multi-touch, control surface, + graspables, physical-digital interface, tangible user interface, wireless sensing, + sketching in hardware' month: May - pages: 49--54 + pages: 180--185 publisher: 'Graduate School of Culture Technology, KAIST' - title: Generating an Integrated Musical Expression with a Brain--Computer Interface - url: http://www.nime.org/proceedings/2013/nime2013_120.pdf + title: Towards an Interface for Music Mixing based on Smart Tangibles and Multitouch + url: http://www.nime.org/proceedings/2013/nime2013_206.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Martin2013 - abstract: 'This paper describes the development of an Apple iPhone based mobile - computersystem for vibraphone and its use in a series of the author''s performanceprojects - in 2011 and 2012.This artistic research was motivated by a desire to develop an - alternative tolaptop computers for the author''s existing percussion and computer - performancepractice. The aims were to develop a light, compact and flexible system - usingmobile devices that would allow computer music to infiltrate solo and ensembleperformance - situations where it is difficult to use a laptop computer.The project began with - a system that brought computer elements to NordligVinter, a suite of percussion - duos, using an iPhone, RjDj, Pure Data and ahome-made pickup system. This process - was documented with video recordings andanalysed using ethnographic methods.The - mobile computer music setup proved to be elegant and convenient inperformance - situations with very little time and space to set up, as well as inperformance - classes and workshops. The simple mobile system encouragedexperimentation and - the platforms used enabled sharing with a wider audience.' + ID: Tang2013 + abstract: 'CalliMusic, is a system developed for users to generate traditional Chinesemusic + by writing Chinese ink brush calligraphy, turning the long-believedstrong linkage + between the two art forms with rich histories into reality. Inaddition to traditional + calligraphy writing instruments (brush, ink and paper),a camera is the only addition + needed to convert the motion of the ink brushinto musical notes through a variety + of mappings such as human-inspired,statistical and a hybrid. The design of the + system, including details of eachmapping and research issues encountered are discussed. + A user study of systemperformance suggests that the result is quite encouraging. + The technique is,obviously, applicable to other related art forms with a wide + range ofapplications.' address: 'Daejeon, Republic of Korea' - author: Charles Martin - bibtex: "@inproceedings{Martin2013,\n abstract = {This paper describes the development\ - \ of an Apple iPhone based mobile computersystem for vibraphone and its use in\ - \ a series of the author's performanceprojects in 2011 and 2012.This artistic\ - \ research was motivated by a desire to develop an alternative tolaptop computers\ - \ for the author's existing percussion and computer performancepractice. The aims\ - \ were to develop a light, compact and flexible system usingmobile devices that\ - \ would allow computer music to infiltrate solo and ensembleperformance situations\ - \ where it is difficult to use a laptop computer.The project began with a system\ - \ that brought computer elements to NordligVinter, a suite of percussion duos,\ - \ using an iPhone, RjDj, Pure Data and ahome-made pickup system. This process\ - \ was documented with video recordings andanalysed using ethnographic methods.The\ - \ mobile computer music setup proved to be elegant and convenient inperformance\ - \ situations with very little time and space to set up, as well as inperformance\ - \ classes and workshops. The simple mobile system encouragedexperimentation and\ - \ the platforms used enabled sharing with a wider audience.},\n address = {Daejeon,\ - \ Republic of Korea},\n author = {Charles Martin},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178602},\n issn = {2220-4806},\n keywords = {percussion,\ - \ mobile computer music, Apple iOS, collaborative performance practice, ethnography,\ - \ artistic research},\n month = {May},\n pages = {377--380},\n publisher = {Graduate\ - \ School of Culture Technology, KAIST},\n title = {Performing with a Mobile Computer\ - \ System for Vibraphone},\n url = {http://www.nime.org/proceedings/2013/nime2013_121.pdf},\n\ + author: Will W. W. Tang and Stephen Chan and Grace Ngai and Hong-va Leong + bibtex: "@inproceedings{Tang2013,\n abstract = {CalliMusic, is a system developed\ + \ for users to generate traditional Chinesemusic by writing Chinese ink brush\ + \ calligraphy, turning the long-believedstrong linkage between the two art forms\ + \ with rich histories into reality. Inaddition to traditional calligraphy writing\ + \ instruments (brush, ink and paper),a camera is the only addition needed to convert\ + \ the motion of the ink brushinto musical notes through a variety of mappings\ + \ such as human-inspired,statistical and a hybrid. The design of the system, including\ + \ details of eachmapping and research issues encountered are discussed. A user\ + \ study of systemperformance suggests that the result is quite encouraging. The\ + \ technique is,obviously, applicable to other related art forms with a wide range\ + \ ofapplications.},\n address = {Daejeon, Republic of Korea},\n author = {Will\ + \ W. W. Tang and Stephen Chan and Grace Ngai and Hong-va Leong},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178668},\n issn = {2220-4806},\n keywords = {Chinese\ + \ Calligraphy, Chinese Music, Assisted Music Generation},\n month = {May},\n pages\ + \ = {84--89},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ + \ title = {Computer Assisted Melo-rhythmic Generation of Traditional Chinese Music\ + \ from Ink Brush Calligraphy},\n url = {http://www.nime.org/proceedings/2013/nime2013_208.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178602 + doi: 10.5281/zenodo.1178668 issn: 2220-4806 - keywords: 'percussion, mobile computer music, Apple iOS, collaborative performance - practice, ethnography, artistic research' + keywords: 'Chinese Calligraphy, Chinese Music, Assisted Music Generation' month: May - pages: 377--380 + pages: 84--89 publisher: 'Graduate School of Culture Technology, KAIST' - title: Performing with a Mobile Computer System for Vibraphone - url: http://www.nime.org/proceedings/2013/nime2013_121.pdf + title: Computer Assisted Melo-rhythmic Generation of Traditional Chinese Music from + Ink Brush Calligraphy + url: http://www.nime.org/proceedings/2013/nime2013_208.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: McLean2013 - abstract: 'The Human vocal tract is considered for its sonorous qualities incarrying - prosodic information, which implicates vision in theperceptual processes of speech. - These considerations are put in thecontext of previous work in NIME, forming background - for theintroduction of two sound installations; ``Microphone'''', which uses acamera - and computer vision to translate mouth shapes to sounds, and``Microphone II'''', - a work-in-progress, which adds physical modellingsynthesis as a sound source, - and visualisation of mouth movements.' + ID: Kaneko2013 + abstract: 'In this paper, a function-oriented musical interface, named the sound + wheel_x0011_,is presented. This interface is designed to manipulate musical functions + likepitch class sets, tonal centers and scale degrees, rather than the _x0010_musicalsurface_x0011_, + i.e. the individual notes with concrete note heights. The sound wheelhas an interface + summarizing harmony theory, and the playing actions haveexplicit correspondencewith + musical functions. Easy usability is realized by semi-automatizing theconversion + process from musical functions into the musical surface. Thus, theplayer can use + this interface with concentration on the harmonic structure,without having his + attention caught by manipulating the musical surface.Subjective evaluation indicated + the e_x001B_ffectiveness of this interface as a toolhelpful for understanding + the music theory. Because of such features, thisinterface can be used for education + and interactive training of tonal musictheory.' address: 'Daejeon, Republic of Korea' - author: Alex McLean and EunJoo Shin and Kia Ng - bibtex: "@inproceedings{McLean2013,\n abstract = {The Human vocal tract is considered\ - \ for its sonorous qualities incarrying prosodic information, which implicates\ - \ vision in theperceptual processes of speech. These considerations are put in\ - \ thecontext of previous work in NIME, forming background for theintroduction\ - \ of two sound installations; ``Microphone'', which uses acamera and computer\ - \ vision to translate mouth shapes to sounds, and``Microphone II'', a work-in-progress,\ - \ which adds physical modellingsynthesis as a sound source, and visualisation\ - \ of mouth movements.},\n address = {Daejeon, Republic of Korea},\n author = {Alex\ - \ McLean and EunJoo Shin and Kia Ng},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178608},\n\ - \ issn = {2220-4806},\n keywords = {face tracking, computer vision, installation,\ - \ microphone},\n month = {May},\n pages = {381--384},\n publisher = {Graduate\ - \ School of Culture Technology, KAIST},\n title = {Paralinguistic Microphone},\n\ - \ url = {http://www.nime.org/proceedings/2013/nime2013_122.pdf},\n year = {2013}\n\ - }\n" + author: Shoken Kaneko + bibtex: "@inproceedings{Kaneko2013,\n abstract = {In this paper, a function-oriented\ + \ musical interface, named the sound wheel_x0011_,is presented. This interface\ + \ is designed to manipulate musical functions likepitch class sets, tonal centers\ + \ and scale degrees, rather than the _x0010_musicalsurface_x0011_, i.e. the individual\ + \ notes with concrete note heights. The sound wheelhas an interface summarizing\ + \ harmony theory, and the playing actions haveexplicit correspondencewith musical\ + \ functions. Easy usability is realized by semi-automatizing theconversion process\ + \ from musical functions into the musical surface. Thus, theplayer can use this\ + \ interface with concentration on the harmonic structure,without having his attention\ + \ caught by manipulating the musical surface.Subjective evaluation indicated the\ + \ e_x001B_ffectiveness of this interface as a toolhelpful for understanding the\ + \ music theory. Because of such features, thisinterface can be used for education\ + \ and interactive training of tonal musictheory.},\n address = {Daejeon, Republic\ + \ of Korea},\n author = {Shoken Kaneko},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178574},\n\ + \ issn = {2220-4806},\n keywords = {Music education, Interactive tonal music generation},\n\ + \ month = {May},\n pages = {202--205},\n publisher = {Graduate School of Culture\ + \ Technology, KAIST},\n title = {A Function-Oriented Interface for Music Education\ + \ and Musical Expressions: ``the Sound Wheel''},\n url = {http://www.nime.org/proceedings/2013/nime2013_21.pdf},\n\ + \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178608 + doi: 10.5281/zenodo.1178574 issn: 2220-4806 - keywords: 'face tracking, computer vision, installation, microphone' + keywords: 'Music education, Interactive tonal music generation' month: May - pages: 381--384 + pages: 202--205 publisher: 'Graduate School of Culture Technology, KAIST' - title: Paralinguistic Microphone - url: http://www.nime.org/proceedings/2013/nime2013_122.pdf + title: 'A Function-Oriented Interface for Music Education and Musical Expressions: + ``the Sound Wheel''''' + url: http://www.nime.org/proceedings/2013/nime2013_21.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Bisig2013 - abstract: 'This paper presents a proof of concept implementation of an interface - entitledCoral. The interface serves as a physical and haptic extension of a simulatedcomplex - system, which will be employed as an intermediate mechanism for thecreation of - generative music and imagery. The paper discusses the motivationand conceptual - context that underly the implementation, describes its technicalrealisation and - presents some first interaction experiments. The paper focuseson the following - two aspects: the interrelation between the physical andvirtual behaviours and - properties of the interface and simulation, and thecapability of the interface - to enable an intuitive and tangible exploration ofthis hybrid dynamical system.' + ID: Andersson2013 + abstract: 'Our voice and body are important parts of our self-experience, andcommunication + and relational possibilities. They gradually become moreimportant for Interaction + Design, due to increased development of tangibleinteraction and mobile communication. + In this paper we present and discuss ourwork with voice and tangible interaction + in our ongoing research project XXXXX.The goal is to improve health for families, + adults and children withdisabilities through use of collaborative, musical, tangible + media. We build onuse of voice in Music Therapy and on a humanistic health approach. + Ourchallenge is to design vocal and tangible interactive media that through usereduce + isolation and passivity and increase empowerment for the users. We usesound recognition, + generative sound synthesis, vibrations and cross-mediatechniques, to create rhythms, + melodies and harmonic chords to stimulatebody-voice connections, positive emotions + and structures for actions.' address: 'Daejeon, Republic of Korea' - author: Daniel Bisig and Sébastien Schiesser - bibtex: "@inproceedings{Bisig2013,\n abstract = {This paper presents a proof of\ - \ concept implementation of an interface entitledCoral. The interface serves as\ - \ a physical and haptic extension of a simulatedcomplex system, which will be\ - \ employed as an intermediate mechanism for thecreation of generative music and\ - \ imagery. The paper discusses the motivationand conceptual context that underly\ - \ the implementation, describes its technicalrealisation and presents some first\ - \ interaction experiments. The paper focuseson the following two aspects: the\ - \ interrelation between the physical andvirtual behaviours and properties of the\ - \ interface and simulation, and thecapability of the interface to enable an intuitive\ - \ and tangible exploration ofthis hybrid dynamical system.},\n address = {Daejeon,\ - \ Republic of Korea},\n author = {Daniel Bisig and S{\\'e}bastien Schiesser},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178482},\n issn = {2220-4806},\n\ - \ keywords = {haptic interface, swarm simulation, generative art},\n month = {May},\n\ - \ pages = {385--388},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ - \ title = {Coral -- a Physical and Haptic Extension of a Swarm Simulation},\n\ - \ url = {http://www.nime.org/proceedings/2013/nime2013_126.pdf},\n year = {2013}\n\ - }\n" + author: Anders-Petter Andersson and Birgitta Cappelen + bibtex: "@inproceedings{Andersson2013,\n abstract = {Our voice and body are important\ + \ parts of our self-experience, andcommunication and relational possibilities.\ + \ They gradually become moreimportant for Interaction Design, due to increased\ + \ development of tangibleinteraction and mobile communication. In this paper we\ + \ present and discuss ourwork with voice and tangible interaction in our ongoing\ + \ research project XXXXX.The goal is to improve health for families, adults and\ + \ children withdisabilities through use of collaborative, musical, tangible media.\ + \ We build onuse of voice in Music Therapy and on a humanistic health approach.\ + \ Ourchallenge is to design vocal and tangible interactive media that through\ + \ usereduce isolation and passivity and increase empowerment for the users. We\ + \ usesound recognition, generative sound synthesis, vibrations and cross-mediatechniques,\ + \ to create rhythms, melodies and harmonic chords to stimulatebody-voice connections,\ + \ positive emotions and structures for actions.},\n address = {Daejeon, Republic\ + \ of Korea},\n author = {Anders-Petter Andersson and Birgitta Cappelen},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1178465},\n issn = {2220-4806},\n keywords\ + \ = {Vocal Interaction, Tangible Interaction, Music & Health, Voice, Empowerment,\ + \ Music Therapy, Resource-Oriented},\n month = {May},\n pages = {406--412},\n\ + \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {Designing\ + \ Empowering Vocal and Tangible Interaction},\n url = {http://www.nime.org/proceedings/2013/nime2013_210.pdf},\n\ + \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178482 + doi: 10.5281/zenodo.1178465 issn: 2220-4806 - keywords: 'haptic interface, swarm simulation, generative art' + keywords: 'Vocal Interaction, Tangible Interaction, Music & Health, Voice, Empowerment, + Music Therapy, Resource-Oriented' month: May - pages: 385--388 + pages: 406--412 publisher: 'Graduate School of Culture Technology, KAIST' - title: Coral -- a Physical and Haptic Extension of a Swarm Simulation - url: http://www.nime.org/proceedings/2013/nime2013_126.pdf + title: Designing Empowering Vocal and Tangible Interaction + url: http://www.nime.org/proceedings/2013/nime2013_210.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Schacher2013 - abstract: 'This article documents a class that teaches gestural interaction and - juxtaposestraditional instrumental skills with digital musical instrument concepts. - Inorder to show the principles and reflections that informed the choices made - indeveloping this syllabus, fundamental elements of an instrument-bodyrelationship - and the perceptual import of sensori-motor integration areinvestigated. The methods - used to let participants learn in practicalexperimental settings are discussed, - showing a way to conceptualise andexperience the entire workflow from instrumental - sound to electronictransformations by blending gestural interaction with digital - musicalinstrument techniques and traditional instrumental playing skills. Thetechnical - interfaces and software that were deployed are explained, focussingof the interactive - potential offered by each solution. In an attempt tosummarise and evaluate the - impact of this course, a number of insights relatingto this specific pedagogical - situation are put forward. Finally, concreteexamples of interactive situations - that were developed by the participants areshown in order to demonstrate the validity - of this approach.' + ID: Astrinaki2013 + abstract: 'This paper describes the recent progress in our approach to generateperformative + and controllable speech. The goal of the performative HMM-basedspeech and singing + synthesis library, called Mage, is to have the ability togenerate natural sounding + speech with arbitrary speaker''s voicecharacteristics, speaking styles and expressions + and at the same time to haveaccurate reactive user control over all the available + production levels. Mageallows to arbitrarily change between voices, control speaking + style or vocalidentity, manipulate voice characteristics or alter the targeted + contexton-the-fly and also maintain the naturalness and intelligibility of the + output.To achieve these controls, it was essential to redesign and improve the + initiallibrary. This paper focuses on the improvements of the architectural design,the + additional user controls and provides an overview of a prototype, where aguitar + is used to reactively control the generation of a synthetic voice invarious levels.' address: 'Daejeon, Republic of Korea' - author: Jan C. Schacher - bibtex: "@inproceedings{Schacher2013,\n abstract = {This article documents a class\ - \ that teaches gestural interaction and juxtaposestraditional instrumental skills\ - \ with digital musical instrument concepts. Inorder to show the principles and\ - \ reflections that informed the choices made indeveloping this syllabus, fundamental\ - \ elements of an instrument-bodyrelationship and the perceptual import of sensori-motor\ - \ integration areinvestigated. The methods used to let participants learn in practicalexperimental\ - \ settings are discussed, showing a way to conceptualise andexperience the entire\ - \ workflow from instrumental sound to electronictransformations by blending gestural\ - \ interaction with digital musicalinstrument techniques and traditional instrumental\ - \ playing skills. Thetechnical interfaces and software that were deployed are\ - \ explained, focussingof the interactive potential offered by each solution. In\ - \ an attempt tosummarise and evaluate the impact of this course, a number of insights\ - \ relatingto this specific pedagogical situation are put forward. Finally, concreteexamples\ - \ of interactive situations that were developed by the participants areshown in\ - \ order to demonstrate the validity of this approach.},\n address = {Daejeon,\ - \ Republic of Korea},\n author = {Jan C. Schacher},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178656},\n issn = {2220-4806},\n keywords = {gestural\ - \ interaction, digital musical instruments, pedagogy, mapping, enactive approach},\n\ - \ month = {May},\n pages = {55--60},\n publisher = {Graduate School of Culture\ - \ Technology, KAIST},\n title = {Hybrid Musicianship --- Teaching Gestural Interaction\ - \ with Traditional and Digital Instruments},\n url = {http://www.nime.org/proceedings/2013/nime2013_127.pdf},\n\ + author: Maria Astrinaki and Nicolas d'Alessandro and Loïc Reboursière and Alexis + Moinet and Thierry Dutoit + bibtex: "@inproceedings{Astrinaki2013,\n abstract = {This paper describes the recent\ + \ progress in our approach to generateperformative and controllable speech. The\ + \ goal of the performative HMM-basedspeech and singing synthesis library, called\ + \ Mage, is to have the ability togenerate natural sounding speech with arbitrary\ + \ speaker's voicecharacteristics, speaking styles and expressions and at the same\ + \ time to haveaccurate reactive user control over all the available production\ + \ levels. Mageallows to arbitrarily change between voices, control speaking style\ + \ or vocalidentity, manipulate voice characteristics or alter the targeted contexton-the-fly\ + \ and also maintain the naturalness and intelligibility of the output.To achieve\ + \ these controls, it was essential to redesign and improve the initiallibrary.\ + \ This paper focuses on the improvements of the architectural design,the additional\ + \ user controls and provides an overview of a prototype, where aguitar is used\ + \ to reactively control the generation of a synthetic voice invarious levels.},\n\ + \ address = {Daejeon, Republic of Korea},\n author = {Maria Astrinaki and Nicolas\ + \ d'Alessandro and Lo{\\\"i}c Reboursi{\\`e}re and Alexis Moinet and Thierry Dutoit},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178467},\n issn = {2220-4806},\n\ + \ keywords = {speech synthesis, augmented guitar, hexaphonic guitar},\n month\ + \ = {May},\n pages = {547--550},\n publisher = {Graduate School of Culture Technology,\ + \ KAIST},\n title = {MAGE 2.0: New Features and its Application in the Development\ + \ of a Talking Guitar},\n url = {http://www.nime.org/proceedings/2013/nime2013_214.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178656 + doi: 10.5281/zenodo.1178467 issn: 2220-4806 - keywords: 'gestural interaction, digital musical instruments, pedagogy, mapping, - enactive approach' + keywords: 'speech synthesis, augmented guitar, hexaphonic guitar' month: May - pages: 55--60 + pages: 547--550 publisher: 'Graduate School of Culture Technology, KAIST' - title: Hybrid Musicianship --- Teaching Gestural Interaction with Traditional and - Digital Instruments - url: http://www.nime.org/proceedings/2013/nime2013_127.pdf + title: 'MAGE 2.0: New Features and its Application in the Development of a Talking + Guitar' + url: http://www.nime.org/proceedings/2013/nime2013_214.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Jackie2013 - abstract: 'SoloTouch is a guitar inspired pocket sized controller system that consists - ofa capacitive touch trigger and a lick-based note selector. The touch triggerallows - an intuitive way to play both velocity sensitive notes and vibratoexpressively - using only one finger. The lick-based note selector is an originalconcept that - provides the player an easy way to play expressive melodic linesby combining pre-programmed - ``licks'''' without the need to learn the actualnotes. The two-part controller - is primarily used as a basic MIDI controller forplaying MIDI controlled virtual - instruments, normally played by keyboardcontrollers. The controller is targeted - towards novice musicians, playerswithout prior musical training could play musical - and expressive solos,suitable for improvised jamming along modern popular music.' + ID: Lee2013a + abstract: 'We introduce a form of networked music performance where a performer + plays amobile music instrument while it is being implemented on the fly by a livecoder. + This setup poses a set of challenges in performing a music instrumentwhich changes + over time and we suggest design guidelines such as making asmooth transition, + varying adoption of change, and sharing information betweenthe pair of two performers. + A proof-of-concept instrument is implemented on amobile device using UrMus, applying + the suggested guidelines. We wish that thismodel would expand the scope of live + coding to the distributed interactivesystem, drawing existing performance ideas + of NIMEs.' address: 'Daejeon, Republic of Korea' - author: Jackie and Yi Tang Chui and Mubarak Marafa and Samson and Ka Fai Young - bibtex: "@inproceedings{Jackie2013,\n abstract = {SoloTouch is a guitar inspired\ - \ pocket sized controller system that consists ofa capacitive touch trigger and\ - \ a lick-based note selector. The touch triggerallows an intuitive way to play\ - \ both velocity sensitive notes and vibratoexpressively using only one finger.\ - \ The lick-based note selector is an originalconcept that provides the player\ - \ an easy way to play expressive melodic linesby combining pre-programmed ``licks''\ - \ without the need to learn the actualnotes. The two-part controller is primarily\ - \ used as a basic MIDI controller forplaying MIDI controlled virtual instruments,\ - \ normally played by keyboardcontrollers. The controller is targeted towards novice\ - \ musicians, playerswithout prior musical training could play musical and expressive\ - \ solos,suitable for improvised jamming along modern popular music.},\n address\ - \ = {Daejeon, Republic of Korea},\n author = {Jackie and Yi Tang Chui and Mubarak\ - \ Marafa and Samson and Ka Fai Young},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178560},\n\ - \ issn = {2220-4806},\n keywords = {Capacitive touch controller, automated note\ - \ selector, virtual instrument MIDI controller, novice musicians.},\n month =\ - \ {May},\n pages = {389--393},\n publisher = {Graduate School of Culture Technology,\ - \ KAIST},\n title = {SoloTouch: A Capacitive Touch Controller with Lick-based\ - \ Note Selector},\n url = {http://www.nime.org/proceedings/2013/nime2013_130.pdf},\n\ - \ year = {2013}\n}\n" + author: Sang Won Lee and Georg Essl + bibtex: "@inproceedings{Lee2013a,\n abstract = {We introduce a form of networked\ + \ music performance where a performer plays amobile music instrument while it\ + \ is being implemented on the fly by a livecoder. This setup poses a set of challenges\ + \ in performing a music instrumentwhich changes over time and we suggest design\ + \ guidelines such as making asmooth transition, varying adoption of change, and\ + \ sharing information betweenthe pair of two performers. A proof-of-concept instrument\ + \ is implemented on amobile device using UrMus, applying the suggested guidelines.\ + \ We wish that thismodel would expand the scope of live coding to the distributed\ + \ interactivesystem, drawing existing performance ideas of NIMEs.},\n address\ + \ = {Daejeon, Republic of Korea},\n author = {Sang Won Lee and Georg Essl},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178592},\n issn = {2220-4806},\n\ + \ keywords = {live coding, network music, on-the-fly instrument, mobile music},\n\ + \ month = {May},\n pages = {493--498},\n publisher = {Graduate School of Culture\ + \ Technology, KAIST},\n title = {Live Coding The Mobile Music Instrument},\n url\ + \ = {http://www.nime.org/proceedings/2013/nime2013_216.pdf},\n year = {2013}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178560 + doi: 10.5281/zenodo.1178592 issn: 2220-4806 - keywords: 'Capacitive touch controller, automated note selector, virtual instrument - MIDI controller, novice musicians.' + keywords: 'live coding, network music, on-the-fly instrument, mobile music' month: May - pages: 389--393 + pages: 493--498 publisher: 'Graduate School of Culture Technology, KAIST' - title: 'SoloTouch: A Capacitive Touch Controller with Lick-based Note Selector' - url: http://www.nime.org/proceedings/2013/nime2013_130.pdf + title: Live Coding The Mobile Music Instrument + url: http://www.nime.org/proceedings/2013/nime2013_216.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Mital2013 - abstract: 'We present an interactive content-based MIR environment specifically - designedto aid in the exploration of databases of experimental electronic music,particularly - in cases where little or no metadata exist. In recent years,several rare archives - of early experimental electronic music have becomeavailable. The Daphne Oram Collection - contains one such archive, consisting ofapproximately 120 hours of 1/4 inch tape - recordings and representing a perioddating from circa 1957. This collection is - recognized as an importantmusicological resource, representing aspects of the - evolution of electronicmusic practices, including early tape editing methods, - experimental synthesistechniques and composition. However, it is extremely challenging - to derivemeaningful information from this dataset, primarily for three reasons. - First,the dataset is very large. Second, there is limited metadata --- some titles,track - lists, and occasional handwritten notes exist, but where this is true,the reliability - of the annotations are unknown. Finally, and mostsignificantly, as this is a collection - of early experimental electronic music,the sonic characteristics of the material - are often not consistent withtraditional musical information. In other words, - there is no score, no knowninstrumentation, and often no recognizable acoustic - source. We present amethod for the construction of a frequency component dictionary - derived fromthe collection via Probabilistic Latent Component Analysis (PLCA), - anddemonstrate how an interactive 3D visualization of the relationships betweenthe - PLCA-derived dictionary and the archive is facilitating researcher''sunderstanding - of the data.' + ID: You2013 + abstract: 'Remix_Dance Music 3 is a four-channel quasi-fixed media piece that can + beimprovised by a single player operating the Max/MSP-based controller on atablet + such as iPad. Within the fixed time limit of six minutes, the performercan freely + (de)activate and displace the eighty seven precomposed audio filesthat are simultaneously + running, generating a sonic structure to one''s likingout of the given network + of musical possibilities. The interface is designed toinvite an integral musical + structuring particularly in the dimensions ofperformatively underexplored (but + still sonically viable) parameters that arelargely based on MPEG-7 audio descriptors.' address: 'Daejeon, Republic of Korea' - author: Parag Kumar Mital and Mick Grierson - bibtex: "@inproceedings{Mital2013,\n abstract = {We present an interactive content-based\ - \ MIR environment specifically designedto aid in the exploration of databases\ - \ of experimental electronic music,particularly in cases where little or no metadata\ - \ exist. In recent years,several rare archives of early experimental electronic\ - \ music have becomeavailable. The Daphne Oram Collection contains one such archive,\ - \ consisting ofapproximately 120 hours of 1/4 inch tape recordings and representing\ - \ a perioddating from circa 1957. This collection is recognized as an importantmusicological\ - \ resource, representing aspects of the evolution of electronicmusic practices,\ - \ including early tape editing methods, experimental synthesistechniques and composition.\ - \ However, it is extremely challenging to derivemeaningful information from this\ - \ dataset, primarily for three reasons. First,the dataset is very large. Second,\ - \ there is limited metadata --- some titles,track lists, and occasional handwritten\ - \ notes exist, but where this is true,the reliability of the annotations are unknown.\ - \ Finally, and mostsignificantly, as this is a collection of early experimental\ - \ electronic music,the sonic characteristics of the material are often not consistent\ - \ withtraditional musical information. In other words, there is no score, no knowninstrumentation,\ - \ and often no recognizable acoustic source. We present amethod for the construction\ - \ of a frequency component dictionary derived fromthe collection via Probabilistic\ - \ Latent Component Analysis (PLCA), anddemonstrate how an interactive 3D visualization\ - \ of the relationships betweenthe PLCA-derived dictionary and the archive is facilitating\ - \ researcher'sunderstanding of the data.},\n address = {Daejeon, Republic of Korea},\n\ - \ author = {Parag Kumar Mital and Mick Grierson},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1178614},\n issn = {2220-4806},\n keywords = {mir, plca, mfcc,\ - \ 3d browser, daphne oram, content-based information retrieval, interactive visualization},\n\ - \ month = {May},\n pages = {227--232},\n publisher = {Graduate School of Culture\ - \ Technology, KAIST},\n title = {Mining Unlabeled Electronic Music Databases through\ - \ {3D} Interactive Visualization of Latent Component Relationships},\n url = {http://www.nime.org/proceedings/2013/nime2013_132.pdf},\n\ + author: Jaeseong You and Red Wierenga + bibtex: "@inproceedings{You2013,\n abstract = {Remix_Dance Music 3 is a four-channel\ + \ quasi-fixed media piece that can beimprovised by a single player operating the\ + \ Max/MSP-based controller on atablet such as iPad. Within the fixed time limit\ + \ of six minutes, the performercan freely (de)activate and displace the eighty\ + \ seven precomposed audio filesthat are simultaneously running, generating a sonic\ + \ structure to one's likingout of the given network of musical possibilities.\ + \ The interface is designed toinvite an integral musical structuring particularly\ + \ in the dimensions ofperformatively underexplored (but still sonically viable)\ + \ parameters that arelargely based on MPEG-7 audio descriptors.},\n address =\ + \ {Daejeon, Republic of Korea},\n author = {Jaeseong You and Red Wierenga},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178696},\n issn = {2220-4806},\n\ + \ keywords = {Novel controllers, interface for musical expression, musical mapping\ + \ strategy, music cognition, music perception, MPEG-7},\n month = {May},\n pages\ + \ = {124--127},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ + \ title = {Remix_Dance 3: Improvisatory Sound Displacing on Touch Screen-Based\ + \ Interface},\n url = {http://www.nime.org/proceedings/2013/nime2013_219.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178614 + doi: 10.5281/zenodo.1178696 issn: 2220-4806 - keywords: 'mir, plca, mfcc, 3d browser, daphne oram, content-based information retrieval, - interactive visualization' + keywords: 'Novel controllers, interface for musical expression, musical mapping + strategy, music cognition, music perception, MPEG-7' month: May - pages: 227--232 + pages: 124--127 publisher: 'Graduate School of Culture Technology, KAIST' - title: Mining Unlabeled Electronic Music Databases through 3D Interactive Visualization - of Latent Component Relationships - url: http://www.nime.org/proceedings/2013/nime2013_132.pdf + title: 'Remix_Dance 3: Improvisatory Sound Displacing on Touch Screen-Based Interface' + url: http://www.nime.org/proceedings/2013/nime2013_219.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Hong2013 - abstract: "Laptap is a laptop-based, real-time sound synthesis/control system for\ - \ musicand multimedia performance. The system produces unique sounds by positive\ - \ audiofeedback between the on-board microphone and the speaker of a laptop com-puter.\ - \ Users can make a variety of sounds by touching the laptop computer inseveral\ - \ different ways, and control their timbre with the gestures of the otherhand\ - \ above the mi,\ncrophone and the speaker to manipulate the characteristicsof\ - \ the acoustic feedback path. We introduce the basic con,\ncept of this audiofeedback\ - \ system, describe its features for sound generation and manipulation,and discuss\ - \ the result of an experimental performance. Finally we suggest somerelevant research\ - \ topics that might follow in the future." + ID: Barbosa2013 + abstract: 'This paper presents an innovative digital musical instrument, the Illusio, + based on an augmented multi-touch interface that combines a traditional multi-touch + surface and a device similar to a guitar pedal. Illusio allows users to perform + by drawing and by associating the sketches with live loops. These loops are manipulated + based on a concept called hierarchical live looping, which extends traditional + live looping through the use of a musical tree, in which any music operation applied + to a given node affects all its children nodes. Finally, we evaluate the instrument + considering the performer and the audience, which are two of the most important + stakeholders involved in the use, conception, and perception of a musical device. + The results achieved are encouraging and led to useful insights about how to improve + instrument features, performance and usability.' address: 'Daejeon, Republic of Korea' - author: Dae Ryong Hong and Woon Seung Yeo - bibtex: "@inproceedings{Hong2013,\n abstract = {Laptap is a laptop-based, real-time\ - \ sound synthesis/control system for musicand multimedia performance. The system\ - \ produces unique sounds by positive audiofeedback between the on-board microphone\ - \ and the speaker of a laptop com-puter. Users can make a variety of sounds by\ - \ touching the laptop computer inseveral different ways, and control their timbre\ - \ with the gestures of the otherhand above the mi,\ncrophone and the speaker to\ - \ manipulate the characteristicsof the acoustic feedback path. We introduce the\ - \ basic con,\ncept of this audiofeedback system, describe its features for sound\ - \ generation and manipulation,and discuss the result of an experimental performance.\ - \ Finally we suggest somerelevant research topics that might follow in the future.},\n\ - \ address = {Daejeon, Republic of Korea},\n author = {Dae Ryong Hong and Woon\ - \ Seung Yeo},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178554},\n issn\ - \ = {2220-4806},\n keywords = {Laptop music, laptop computer, audio feedback,\ - \ hand gesture, gestural control, musical mapping, audio visualization, musical\ - \ notation},\n month = {May},\n pages = {233--236},\n publisher = {Graduate School\ - \ of Culture Technology, KAIST},\n title = {Laptap: Laptop Computer as a Musical\ - \ Instrument using Audio Feedback},\n url = {http://www.nime.org/proceedings/2013/nime2013_137.pdf},\n\ + author: 'Jerônimo Barbosa, Filipe Calegario, Veronica Teichrieb, Geber Ramalho and + Giordano Cabral' + bibtex: "@inproceedings{Barbosa2013,\n abstract = {This paper presents an innovative\ + \ digital musical instrument, the Illusio, based on an augmented multi-touch interface\ + \ that combines a traditional multi-touch surface and a device similar to a guitar\ + \ pedal. Illusio allows users to perform by drawing and by associating the sketches\ + \ with live loops. These loops are manipulated based on a concept called hierarchical\ + \ live looping, which extends traditional live looping through the use of a musical\ + \ tree, in which any music operation applied to a given node affects all its children\ + \ nodes. Finally, we evaluate the instrument considering the performer and the\ + \ audience, which are two of the most important stakeholders involved in the use,\ + \ conception, and perception of a musical device. The results achieved are encouraging\ + \ and led to useful insights about how to improve instrument features, performance\ + \ and usability.},\n address = {Daejeon, Republic of Korea},\n author = {Jer{\\\ + ^o}nimo Barbosa, Filipe Calegario, Veronica Teichrieb, Geber Ramalho and Giordano\ + \ Cabral},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1178566},\n issn = {2220-4806},\n\ + \ keywords = {Digital musical instruments, augmented multi-touch, hierarchical\ + \ live looping, interaction techniques, evaluation methodology},\n month = {May},\n\ + \ pages = {499--502},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ + \ title = {A Drawing-Based Digital Music Instrument},\n url = {http://www.nime.org/proceedings/2013/nime2013_220.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178554 + doi: 10.5281/zenodo.1178566 issn: 2220-4806 - keywords: 'Laptop music, laptop computer, audio feedback, hand gesture, gestural - control, musical mapping, audio visualization, musical notation' + keywords: 'Digital musical instruments, augmented multi-touch, hierarchical live + looping, interaction techniques, evaluation methodology' month: May - pages: 233--236 + pages: 499--502 publisher: 'Graduate School of Culture Technology, KAIST' - title: 'Laptap: Laptop Computer as a Musical Instrument using Audio Feedback' - url: http://www.nime.org/proceedings/2013/nime2013_137.pdf + title: A Drawing-Based Digital Music Instrument + url: http://www.nime.org/proceedings/2013/nime2013_220.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Bragg2013 - abstract: 'This paper presents a graph-theoretic model that supports the design - andanalysis of data flow within digital musical instruments (DMIs). The state - ofthe art in DMI design fails to provide any standards for the scheduling ofcomputations - within a DMI''s data flow. It does not provide a theoreticalframework within which - we can analyze different scheduling protocols and theirimpact on the DMI''s performance. - Indeed, the mapping between the DMI''s sensoryinputs and sonic outputs is classically - treated as a black box. DMI designersand builders are forced to design and schedule - the flow of data through thisblack box on their own. Improper design of the data - flow can produceundesirable results, ranging from overflowing buffers that cause - system crashesto misaligned sensory data that result in strange or disordered - sonic events.In this paper, we attempt to remedy this problem by providing a framework - forthe design and analysis of the DMI data flow. We also provide a schedulingalgorithm - built upon that framework that guarantees desirable properties forthe resulting - DMI.' + ID: Sarwate2013 + abstract: 'The Variator is a compositional assistance tool that allows users to + quicklyproduce and experiment with variations on musical objects, such as chords,melodies, + and chord progressions. The transformations performed by the Variatorcan range + from standard counterpoint transformations (inversion, retrograde,transposition) + to more complicated custom transformations, and the system isbuilt to encourage + the writing of custom transformations.This paper explores the design decisions + involved in creating a compositionalassistance tool, describes the Variator interface + and a preliminary set ofimplemented transformation functions, analyzes the results + of the evaluationsof a prototype system, and lays out future plans for expanding + upon thatsystem, both as a stand-alone application and as the basis for an opensource/collaborative + community where users can implement and share their owntransformation functions.' address: 'Daejeon, Republic of Korea' - author: Danielle Bragg - bibtex: "@inproceedings{Bragg2013,\n abstract = {This paper presents a graph-theoretic\ - \ model that supports the design andanalysis of data flow within digital musical\ - \ instruments (DMIs). The state ofthe art in DMI design fails to provide any standards\ - \ for the scheduling ofcomputations within a DMI's data flow. It does not provide\ - \ a theoreticalframework within which we can analyze different scheduling protocols\ - \ and theirimpact on the DMI's performance. Indeed, the mapping between the DMI's\ - \ sensoryinputs and sonic outputs is classically treated as a black box. DMI designersand\ - \ builders are forced to design and schedule the flow of data through thisblack\ - \ box on their own. Improper design of the data flow can produceundesirable results,\ - \ ranging from overflowing buffers that cause system crashesto misaligned sensory\ - \ data that result in strange or disordered sonic events.In this paper, we attempt\ - \ to remedy this problem by providing a framework forthe design and analysis of\ - \ the DMI data flow. We also provide a schedulingalgorithm built upon that framework\ - \ that guarantees desirable properties forthe resulting DMI.},\n address = {Daejeon,\ - \ Republic of Korea},\n author = {Danielle Bragg},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178486},\n issn = {2220-4806},\n keywords = {DMI design,\ - \ data flow, mapping function},\n month = {May},\n pages = {237--242},\n publisher\ - \ = {Graduate School of Culture Technology, KAIST},\n title = {Synchronous Data\ - \ Flow Modeling for {DMI}s},\n url = {http://www.nime.org/proceedings/2013/nime2013_139.pdf},\n\ - \ year = {2013}\n}\n" + author: Avneesh Sarwate and Rebecca Fiebrink + bibtex: "@inproceedings{Sarwate2013,\n abstract = {The Variator is a compositional\ + \ assistance tool that allows users to quicklyproduce and experiment with variations\ + \ on musical objects, such as chords,melodies, and chord progressions. The transformations\ + \ performed by the Variatorcan range from standard counterpoint transformations\ + \ (inversion, retrograde,transposition) to more complicated custom transformations,\ + \ and the system isbuilt to encourage the writing of custom transformations.This\ + \ paper explores the design decisions involved in creating a compositionalassistance\ + \ tool, describes the Variator interface and a preliminary set ofimplemented transformation\ + \ functions, analyzes the results of the evaluationsof a prototype system, and\ + \ lays out future plans for expanding upon thatsystem, both as a stand-alone application\ + \ and as the basis for an opensource/collaborative community where users can implement\ + \ and share their owntransformation functions.},\n address = {Daejeon, Republic\ + \ of Korea},\n author = {Avneesh Sarwate and Rebecca Fiebrink},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178654},\n issn = {2220-4806},\n keywords = {Composition\ + \ assistance tool, computer-aided composition, social composition},\n month =\ + \ {May},\n pages = {279--282},\n publisher = {Graduate School of Culture Technology,\ + \ KAIST},\n title = {Variator: A Creativity Support Tool for Music Composition},\n\ + \ url = {http://www.nime.org/proceedings/2013/nime2013_224.pdf},\n year = {2013}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178486 + doi: 10.5281/zenodo.1178654 issn: 2220-4806 - keywords: 'DMI design, data flow, mapping function' + keywords: 'Composition assistance tool, computer-aided composition, social composition' month: May - pages: 237--242 + pages: 279--282 publisher: 'Graduate School of Culture Technology, KAIST' - title: Synchronous Data Flow Modeling for DMIs - url: http://www.nime.org/proceedings/2013/nime2013_139.pdf + title: 'Variator: A Creativity Support Tool for Music Composition' + url: http://www.nime.org/proceedings/2013/nime2013_224.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Feugere2013 - abstract: 'Digitartic, a system for bi-manual gestural control of Vowel-Consonant-Vowelperformative - singing synthesis is presented. This system is an extension of areal-time gesture-controlled - vowel singing instrument developed in the Max/MSPlanguage. In addition to pitch, - vowels and voice strength control, Digitarticis designed for gestural control - of articulation parameters for a wide set onconsonant, including various places - and manners of articulation. The phases ofarticulation between two phonemes are - continuously controlled and can bedriven in real time without noticeable delay, - at any stage of the syntheticphoneme production. Thus, as in natural singing, - very accurate rhythmicpatterns are produced and adapted while playing with other - musicians. Theinstrument features two (augmented) pen tablets for controlling - voiceproduction: one is dealing with the glottal source and vowels, the second - oneis dealing with consonant/vowel articulation. The results show very naturalconsonant - and vowel synthesis. Virtual choral practice confirms theeffectiveness of Digitartic - as an expressive musical instrument.' + ID: Grierson2013 + abstract: 'NoiseBear is a wireless malleable controller designed for, and in participationwith, + physically and cognitively disabled children. The aim of the project wasto produce + a musical controller that was robust, and flexible enough to be usedin a wide + range of interactive scenarios in participatory design workshops. NoiseBear demonstrates + an open ended system for designing wireless malleablecontrollers in different + shapes. It uses pressure sensitive material made fromconductive thread and polyester + cushion stuffing, to give the feel of a softtoy. The sensor networks with other + devices using the Bluetooth Low Energyprotocol, running on a BlueGiga BLE112 chip. + This contains an embedded 8051processor which manages the sensor. NoiseBear has + undergone an initialformative evaluation in a workshop session with four autistic + children, andcontinues to evolve in series of participatory design workshops. + The evaluationshowed that controller could be engaging for the children to use, + andhighlighted some technical limitations of the design. Solutions to theselimitations + are discussed, along with plans for future design iterations.' address: 'Daejeon, Republic of Korea' - author: Lionel Feugère and Christophe d'Alessandro - bibtex: "@inproceedings{Feugere2013,\n abstract = {Digitartic, a system for bi-manual\ - \ gestural control of Vowel-Consonant-Vowelperformative singing synthesis is presented.\ - \ This system is an extension of areal-time gesture-controlled vowel singing instrument\ - \ developed in the Max/MSPlanguage. In addition to pitch, vowels and voice strength\ - \ control, Digitarticis designed for gestural control of articulation parameters\ - \ for a wide set onconsonant, including various places and manners of articulation.\ - \ The phases ofarticulation between two phonemes are continuously controlled and\ - \ can bedriven in real time without noticeable delay, at any stage of the syntheticphoneme\ - \ production. Thus, as in natural singing, very accurate rhythmicpatterns are\ - \ produced and adapted while playing with other musicians. Theinstrument features\ - \ two (augmented) pen tablets for controlling voiceproduction: one is dealing\ - \ with the glottal source and vowels, the second oneis dealing with consonant/vowel\ - \ articulation. The results show very naturalconsonant and vowel synthesis. Virtual\ - \ choral practice confirms theeffectiveness of Digitartic as an expressive musical\ - \ instrument.},\n address = {Daejeon, Republic of Korea},\n author = {Lionel Feug{\\\ - `e}re and Christophe d'Alessandro},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178520},\n\ - \ issn = {2220-4806},\n keywords = {singing voice synthesis, gestural control,\ - \ syllabic synthesis, articulation, formants synthesis},\n month = {May},\n pages\ - \ = {331--336},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ - \ title = {Digitartic: bi-manual gestural control of articulation in performative\ - \ singing synthesis},\n url = {http://www.nime.org/proceedings/2013/nime2013_143.pdf},\n\ + author: Mick Grierson and Chris Kiefer + bibtex: "@inproceedings{Grierson2013,\n abstract = {NoiseBear is a wireless malleable\ + \ controller designed for, and in participationwith, physically and cognitively\ + \ disabled children. The aim of the project wasto produce a musical controller\ + \ that was robust, and flexible enough to be usedin a wide range of interactive\ + \ scenarios in participatory design workshops. NoiseBear demonstrates an open\ + \ ended system for designing wireless malleablecontrollers in different shapes.\ + \ It uses pressure sensitive material made fromconductive thread and polyester\ + \ cushion stuffing, to give the feel of a softtoy. The sensor networks with other\ + \ devices using the Bluetooth Low Energyprotocol, running on a BlueGiga BLE112\ + \ chip. This contains an embedded 8051processor which manages the sensor. NoiseBear\ + \ has undergone an initialformative evaluation in a workshop session with four\ + \ autistic children, andcontinues to evolve in series of participatory design\ + \ workshops. The evaluationshowed that controller could be engaging for the children\ + \ to use, andhighlighted some technical limitations of the design. Solutions to\ + \ theselimitations are discussed, along with plans for future design iterations.},\n\ + \ address = {Daejeon, Republic of Korea},\n author = {Mick Grierson and Chris\ + \ Kiefer},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1178536},\n issn = {2220-4806},\n\ + \ keywords = {malleable controllers, assistive technology, multiparametric mapping},\n\ + \ month = {May},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ + \ title = {NoiseBear: A Malleable Wireless Controller Designed In Participation\ + \ with Disabled Children},\n url = {http://www.nime.org/proceedings/2013/nime2013_227.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178520 + doi: 10.5281/zenodo.1178536 issn: 2220-4806 - keywords: 'singing voice synthesis, gestural control, syllabic synthesis, articulation, - formants synthesis' + keywords: 'malleable controllers, assistive technology, multiparametric mapping' month: May - pages: 331--336 publisher: 'Graduate School of Culture Technology, KAIST' - title: 'Digitartic: bi-manual gestural control of articulation in performative singing - synthesis' - url: http://www.nime.org/proceedings/2013/nime2013_143.pdf + title: 'NoiseBear: A Malleable Wireless Controller Designed In Participation with + Disabled Children' + url: http://www.nime.org/proceedings/2013/nime2013_227.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Schacher2013a - abstract: 'This article describes the motivations and reflections that led to thedevelopment - of a gestural sensor instrument called the Quarterstaff. In aniterative design - and fabrication process, several versions of this interfacewere build, tested - and evaluated in performances. A detailed explanation of thedesign choices concerning - the shape but also the sensing capabilities of theinstrument illustrates the emphasis - on establishing an `enactive''instrumental relationship. A musical practice for - this type of instrument isshown by discussing the methods used in the exploration - of the gesturalpotential of the interface and the strategies deployed for the - development ofmappings and compositions. Finally, to gain more information about - how thisinstrument compares with similar designs, two dimension-space analyses - are madethat show a clear positioning in relation to instruments that precede - theQuarterstaff.' + ID: jo2013 + abstract: 'In this paper, we present a method to produce analog records with standardvector + graphics software (i.e. Adobe Illustrator) and two different types ofcutting machines: + laser cutter, and paper cutter. The method enables us toengrave wave forms on + a surface of diverse materials such as paper, wood,acrylic, and leather without + or with prior acoustic information (i.e. digitalaudio data). The results could + be played with standard record players. Wepresent the method with its technical + specification and explain our initialtrials with two performances and a workshop. + The work examines the role ofmusical reproduction in the age of personal fabrication. + ---p.s. If it''s possible, we also would like to submit the work for performanceand + workshop.A video of performance < it contains information on the authorshttp://www.youtube.com/watch?v=vbCLe06P7j0' address: 'Daejeon, Republic of Korea' - author: Jan C. Schacher - bibtex: "@inproceedings{Schacher2013a,\n abstract = {This article describes the\ - \ motivations and reflections that led to thedevelopment of a gestural sensor\ - \ instrument called the Quarterstaff. In aniterative design and fabrication process,\ - \ several versions of this interfacewere build, tested and evaluated in performances.\ - \ A detailed explanation of thedesign choices concerning the shape but also the\ - \ sensing capabilities of theinstrument illustrates the emphasis on establishing\ - \ an `enactive'instrumental relationship. A musical practice for this type of\ - \ instrument isshown by discussing the methods used in the exploration of the\ - \ gesturalpotential of the interface and the strategies deployed for the development\ - \ ofmappings and compositions. Finally, to gain more information about how thisinstrument\ - \ compares with similar designs, two dimension-space analyses are madethat show\ - \ a clear positioning in relation to instruments that precede theQuarterstaff.},\n\ - \ address = {Daejeon, Republic of Korea},\n author = {Jan C. Schacher},\n booktitle\ + author: kazuhiro jo + bibtex: "@inproceedings{jo2013,\n abstract = {In this paper, we present a method\ + \ to produce analog records with standardvector graphics software (i.e. Adobe\ + \ Illustrator) and two different types ofcutting machines: laser cutter, and paper\ + \ cutter. The method enables us toengrave wave forms on a surface of diverse materials\ + \ such as paper, wood,acrylic, and leather without or with prior acoustic information\ + \ (i.e. digitalaudio data). The results could be played with standard record players.\ + \ Wepresent the method with its technical specification and explain our initialtrials\ + \ with two performances and a workshop. The work examines the role ofmusical reproduction\ + \ in the age of personal fabrication. ---p.s. If it's possible, we also would\ + \ like to submit the work for performanceand workshop.A video of performance <\ + \ it contains information on the authorshttp://www.youtube.com/watch?v=vbCLe06P7j0},\n\ + \ address = {Daejeon, Republic of Korea},\n author = {kazuhiro jo},\n booktitle\ \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1178658},\n issn = {2220-4806},\n keywords\ - \ = {Gestural sensor interface, instrument design, body-object relation, composition\ - \ and performance practice, dimension space analysis},\n month = {May},\n pages\ - \ = {535--540},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ - \ title = {The Quarterstaff, a Gestural Sensor Instrument},\n url = {http://www.nime.org/proceedings/2013/nime2013_144.pdf},\n\ - \ year = {2013}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1178658 - issn: 2220-4806 - keywords: 'Gestural sensor interface, instrument design, body-object relation, composition - and performance practice, dimension space analysis' - month: May - pages: 535--540 - publisher: 'Graduate School of Culture Technology, KAIST' - title: 'The Quarterstaff, a Gestural Sensor Instrument' - url: http://www.nime.org/proceedings/2013/nime2013_144.pdf - year: 2013 - - -- ENTRYTYPE: inproceedings - ID: Altavilla2013 - abstract: 'We present a study that explores the affordance evoked by sound andsound-gesture - mappings. In order to do this, we make use of a sensor systemwith minimal form - factor in a user study that minimizes cultural associationThe present study focuses - on understanding how participants describe sounds andgestures produced while playing - designed sonic interaction mappings. Thisapproach seeks to move from object-centric - affordance towards investigatingembodied gestural sonic affordances.' - address: 'Daejeon, Republic of Korea' - author: Alessandro Altavilla and Baptiste Caramiaux and Atau Tanaka - bibtex: "@inproceedings{Altavilla2013,\n abstract = {We present a study that explores\ - \ the affordance evoked by sound andsound-gesture mappings. In order to do this,\ - \ we make use of a sensor systemwith minimal form factor in a user study that\ - \ minimizes cultural associationThe present study focuses on understanding how\ - \ participants describe sounds andgestures produced while playing designed sonic\ - \ interaction mappings. Thisapproach seeks to move from object-centric affordance\ - \ towards investigatingembodied gestural sonic affordances.},\n address = {Daejeon,\ - \ Republic of Korea},\n author = {Alessandro Altavilla and Baptiste Caramiaux\ - \ and Atau Tanaka},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178463},\n\ - \ issn = {2220-4806},\n keywords = {Gestural embodiment of sound, Affordances,\ - \ Mapping},\n month = {May},\n pages = {61--64},\n publisher = {Graduate School\ - \ of Culture Technology, KAIST},\n title = {Towards Gestural Sonic Affordances},\n\ - \ url = {http://www.nime.org/proceedings/2013/nime2013_145.pdf},\n year = {2013}\n\ + \ Expression},\n doi = {10.5281/zenodo.1178578},\n issn = {2220-4806},\n keywords\ + \ = {Analog Record, Personal Fabrication, Media Archaeology},\n month = {May},\n\ + \ pages = {283--286},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ + \ title = {cutting record --- a record without (or with) prior acoustic information},\n\ + \ url = {http://www.nime.org/proceedings/2013/nime2013_228.pdf},\n year = {2013}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178463 + doi: 10.5281/zenodo.1178578 issn: 2220-4806 - keywords: 'Gestural embodiment of sound, Affordances, Mapping' + keywords: 'Analog Record, Personal Fabrication, Media Archaeology' month: May - pages: 61--64 + pages: 283--286 publisher: 'Graduate School of Culture Technology, KAIST' - title: Towards Gestural Sonic Affordances - url: http://www.nime.org/proceedings/2013/nime2013_145.pdf + title: cutting record --- a record without (or with) prior acoustic information + url: http://www.nime.org/proceedings/2013/nime2013_228.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Cerqueira2013 - abstract: 'SoundCraft is a framework that enables real-time data gathering from - aStarCraft 2 game to external software applications, allowing for musicalinterpretation - of the game''s internal structure and strategies in novel ways.While players battle - each other for victory within the game world, a customStarCraft 2 map collects - and writes out data about players'' decision-making,performance, and current focus - on the map. This data is parsed and transmittedover Open Sound Control (OSC) in - real-time, becoming the source for thesoundscape that accompanies the player''s - game. Using SoundCraft, we havecomposed a musical work for two em StarCraft 2 - players, entitled GG Music. Thispaper details the technical and aesthetic development - of SoundCraft, includingdata collection and sonic mapping. Please see the attached - video file for a performance of GG Music using theSoundCraft framework.' + ID: Klugel2013 + abstract: 'Controlling the timbre generated by an audio synthesizerin a goal-oriented + way requires a profound understandingof the synthesizer''s manifold structural + parameters. Especially shapingtimbre expressively to communicate emotional affect + requires expertise.Therefore, novices in particular may not be able to adequately + control timbrein viewof articulating the wealth of affects musically. In this + context, the focus ofthis paper is the development of a model that can represent + a relationshipbetween timbre and an expected emotional affect . The results of + the evaluationof the presented model are encouraging which supports its use in + steering oraugmenting the control of the audio synthesis. We explicitly envision + thispaper as a contribution to the field of Synthesis by Analysis in the broadersense, + albeit being potentially suitable to other related domains.' address: 'Daejeon, Republic of Korea' - author: Mark Cerqueira and Spencer Salazar and Ge Wang - bibtex: "@inproceedings{Cerqueira2013,\n abstract = {SoundCraft is a framework that\ - \ enables real-time data gathering from aStarCraft 2 game to external software\ - \ applications, allowing for musicalinterpretation of the game's internal structure\ - \ and strategies in novel ways.While players battle each other for victory within\ - \ the game world, a customStarCraft 2 map collects and writes out data about players'\ - \ decision-making,performance, and current focus on the map. This data is parsed\ - \ and transmittedover Open Sound Control (OSC) in real-time, becoming the source\ - \ for thesoundscape that accompanies the player's game. Using SoundCraft, we havecomposed\ - \ a musical work for two em StarCraft 2 players, entitled GG Music. Thispaper\ - \ details the technical and aesthetic development of SoundCraft, includingdata\ - \ collection and sonic mapping. Please see the attached video file for a performance\ - \ of GG Music using theSoundCraft framework.},\n address = {Daejeon, Republic\ - \ of Korea},\n author = {Mark Cerqueira and Spencer Salazar and Ge Wang},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1178492},\n issn = {2220-4806},\n keywords\ - \ = {interactive sonification, interactive game music, StarCraft 2},\n month =\ - \ {May},\n pages = {243--247},\n publisher = {Graduate School of Culture Technology,\ - \ KAIST},\n title = {SoundCraft: Transducing StarCraft 2},\n url = {http://www.nime.org/proceedings/2013/nime2013_146.pdf},\n\ + author: Niklas Klügel and Georg Groh + bibtex: "@inproceedings{Klugel2013,\n abstract = {Controlling the timbre generated\ + \ by an audio synthesizerin a goal-oriented way requires a profound understandingof\ + \ the synthesizer's manifold structural parameters. Especially shapingtimbre expressively\ + \ to communicate emotional affect requires expertise.Therefore, novices in particular\ + \ may not be able to adequately control timbrein viewof articulating the wealth\ + \ of affects musically. In this context, the focus ofthis paper is the development\ + \ of a model that can represent a relationshipbetween timbre and an expected emotional\ + \ affect . The results of the evaluationof the presented model are encouraging\ + \ which supports its use in steering oraugmenting the control of the audio synthesis.\ + \ We explicitly envision thispaper as a contribution to the field of Synthesis\ + \ by Analysis in the broadersense, albeit being potentially suitable to other\ + \ related domains.},\n address = {Daejeon, Republic of Korea},\n author = {Niklas\ + \ Kl{\\\"u}gel and Georg Groh},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178586},\n\ + \ issn = {2220-4806},\n keywords = {Emotional affect,Timbre, Machine Learning,\ + \ Deep Belief Networks, Analysis by Synthesis},\n month = {May},\n pages = {525--530},\n\ + \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {Towards\ + \ Mapping Timbre to Emotional Affect},\n url = {http://www.nime.org/proceedings/2013/nime2013_23.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178492 + doi: 10.5281/zenodo.1178586 issn: 2220-4806 - keywords: 'interactive sonification, interactive game music, StarCraft 2' + keywords: 'Emotional affect,Timbre, Machine Learning, Deep Belief Networks, Analysis + by Synthesis' month: May - pages: 243--247 + pages: 525--530 publisher: 'Graduate School of Culture Technology, KAIST' - title: 'SoundCraft: Transducing StarCraft 2' - url: http://www.nime.org/proceedings/2013/nime2013_146.pdf + title: Towards Mapping Timbre to Emotional Affect + url: http://www.nime.org/proceedings/2013/nime2013_23.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Fan2013 - abstract: 'We show how body-centric sensing can be integrated in musical interface - toenable more flexible gestural control. We present a barehanded body-centricinteraction - paradigm where users are able to interact in a spontaneous waythroughperforming - gestures. The paradigm employs a wearable camera and see-throughdisplay to enable - flexible interaction in the 3D space. We designed andimplemented a prototype called - Air Violin, a virtual musical instrument usingdepth camera, to demonstrate the - proposed interaction paradigm. We describedthe design and implementation details.' + ID: Greenlee2013 + abstract: 'In the design of recent systems, I have advanced techniques that positiongraphic + synthesis methods in the context of solo, improvisational performance.Here, the + primary interfaces for musical action are prepared works on paper,scanned by digital + video cameras which in turn pass image data on to softwarefor analysis and interpretation + as sound synthesis and signal processingprocedures. The focus of this paper is + on one of these techniques, a process Idescribe as graphic waveshaping. A discussion + of graphic waveshaping in basicform and as utilized in my performance work, (title + omitted), is offered. Inthe latter case, the performer''s objective is to guide + the interpretation ofimages as sound, constantly tuning and retuning the conversion + while selectingand scanning images from a large catalog. Due to the erratic nature + of thesystem and the precondition that image to sound relationships are unfixed, + theperformance situation is replete with the discovery of new sounds and thecircumstances + that bring them into play. Graphic waveshaping may be understood as non-linear + distortion synthesis withtime-varying transfer functions stemming from visual + scan lines. As a form ofgraphic synthesis, visual images function as motivations + for sound generation.There is a strategy applied for creating one out of the other. + However, counterto compositionally oriented forms of graphic synthesis where one + may assignimage characteristics to musical parameters such as pitches, durations,dynamics, + etc., graphic waveshaping is foremost a processing technique, as itdistorts incoming + signals according to graphically derived transfer functions.As such, it may also + be understood as an audio effect; one that in myimplementations is particularly + feedback dependent, oriented towards shapingthe erratic behavior of synthesis + patches written in Max/MSP/Jitter. Used inthis manner, graphic waveshaping elicits + an emergent system behaviorconditioned by visual features.' address: 'Daejeon, Republic of Korea' - author: Xin Fan and Georg Essl - bibtex: "@inproceedings{Fan2013,\n abstract = {We show how body-centric sensing\ - \ can be integrated in musical interface toenable more flexible gestural control.\ - \ We present a barehanded body-centricinteraction paradigm where users are able\ - \ to interact in a spontaneous waythroughperforming gestures. The paradigm employs\ - \ a wearable camera and see-throughdisplay to enable flexible interaction in the\ - \ 3D space. We designed andimplemented a prototype called Air Violin, a virtual\ - \ musical instrument usingdepth camera, to demonstrate the proposed interaction\ - \ paradigm. We describedthe design and implementation details.},\n address = {Daejeon,\ - \ Republic of Korea},\n author = {Xin Fan and Georg Essl},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178512},\n issn = {2220-4806},\n keywords = {NIME, musical\ - \ instrument, interaction, gesture, Kinect},\n month = {May},\n pages = {122--123},\n\ - \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {Air Violin:\ - \ A Body-centric Style Musical Instrument},\n url = {http://www.nime.org/proceedings/2013/nime2013_149.pdf},\n\ + author: Shawn Greenlee + bibtex: "@inproceedings{Greenlee2013,\n abstract = {In the design of recent systems,\ + \ I have advanced techniques that positiongraphic synthesis methods in the context\ + \ of solo, improvisational performance.Here, the primary interfaces for musical\ + \ action are prepared works on paper,scanned by digital video cameras which in\ + \ turn pass image data on to softwarefor analysis and interpretation as sound\ + \ synthesis and signal processingprocedures. The focus of this paper is on one\ + \ of these techniques, a process Idescribe as graphic waveshaping. A discussion\ + \ of graphic waveshaping in basicform and as utilized in my performance work,\ + \ (title omitted), is offered. Inthe latter case, the performer's objective is\ + \ to guide the interpretation ofimages as sound, constantly tuning and retuning\ + \ the conversion while selectingand scanning images from a large catalog. Due\ + \ to the erratic nature of thesystem and the precondition that image to sound\ + \ relationships are unfixed, theperformance situation is replete with the discovery\ + \ of new sounds and thecircumstances that bring them into play. Graphic waveshaping\ + \ may be understood as non-linear distortion synthesis withtime-varying transfer\ + \ functions stemming from visual scan lines. As a form ofgraphic synthesis, visual\ + \ images function as motivations for sound generation.There is a strategy applied\ + \ for creating one out of the other. However, counterto compositionally oriented\ + \ forms of graphic synthesis where one may assignimage characteristics to musical\ + \ parameters such as pitches, durations,dynamics, etc., graphic waveshaping is\ + \ foremost a processing technique, as itdistorts incoming signals according to\ + \ graphically derived transfer functions.As such, it may also be understood as\ + \ an audio effect; one that in myimplementations is particularly feedback dependent,\ + \ oriented towards shapingthe erratic behavior of synthesis patches written in\ + \ Max/MSP/Jitter. Used inthis manner, graphic waveshaping elicits an emergent\ + \ system behaviorconditioned by visual features.},\n address = {Daejeon, Republic\ + \ of Korea},\n author = {Shawn Greenlee},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178534},\n\ + \ issn = {2220-4806},\n keywords = {Graphic waveshaping, graphic synthesis, waveshaping\ + \ synthesis, graphic sound, drawn sound},\n month = {May},\n pages = {287--290},\n\ + \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {Graphic\ + \ Waveshaping},\n url = {http://www.nime.org/proceedings/2013/nime2013_232.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178512 + doi: 10.5281/zenodo.1178534 issn: 2220-4806 - keywords: 'NIME, musical instrument, interaction, gesture, Kinect' + keywords: 'Graphic waveshaping, graphic synthesis, waveshaping synthesis, graphic + sound, drawn sound' month: May - pages: 122--123 + pages: 287--290 publisher: 'Graduate School of Culture Technology, KAIST' - title: 'Air Violin: A Body-centric Style Musical Instrument' - url: http://www.nime.org/proceedings/2013/nime2013_149.pdf + title: Graphic Waveshaping + url: http://www.nime.org/proceedings/2013/nime2013_232.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Wang2013 - abstract: 'By building a wired passive stylus we have added pressure sensitivity - toexisting capacitive touch screen devices for less than ' + ID: Park2013c + abstract: 'In this paper we present a highly expressive, robust, and easy-to-build + systemthat provides force-feedback interaction for mobile computing devices (MCD).Our + system, which we call Fortissimo (ff), utilizes standard built-inaccelerometer + measurements in conjunction with generic foam padding that can beeasily placed + under a device to render an expressive force-feedback performancesetup. Fortissimo + allows for musically expressive user-interaction with addedforce-feedback which + is integral for any musical controller --a feature that isabsent for touchscreen-centric + MCDs. This paper details ff core concepts,hardware and software designs, and expressivity + of musical features.' address: 'Daejeon, Republic of Korea' - author: Johnty Wang and Nicolas d'Alessandro and Aura Pon and Sidney Fels - bibtex: "@inproceedings{Wang2013,\n abstract = {By building a wired passive stylus\ - \ we have added pressure sensitivity toexisting capacitive touch screen devices\ - \ for less than },\n address = {Daejeon, Republic of Korea},\n author = {Johnty\ - \ Wang and Nicolas d'Alessandro and Aura Pon and Sidney Fels},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178686},\n issn = {2220-4806},\n keywords = {10 in materials,\ - \ about1/10th the cost of existing solutions. The stylus makes use of the built\ - \ inaudio interface that is available on most smartphones and tablets on the markettoday.\ - \ Limitations of the device include the physical constraint of wires, theoccupation\ - \ of one audio input and output channel, and increased latency equalto the period\ - \ of at least one audio buffer duration. The stylus has beendemonstrated in two\ - \ cases thus far: a visual musical score drawing and asinging synthesis application.},\n\ - \ month = {May},\n pages = {input interfaces, touch screens, tablets, pressure-sensitive,\ - \ low-cost},\n publisher = {Graduate School of Culture Technology, KAIST},\n title\ - \ = {PENny: An Extremely Low-Cost Pressure-Sensitive Stylus for Existing Capacitive\ - \ Touchscreens},\n url = {http://www.nime.org/proceedings/2013/nime2013_150.pdf},\n\ + author: Tae Hong Park and Oriol Nieto + bibtex: "@inproceedings{Park2013c,\n abstract = {In this paper we present a highly\ + \ expressive, robust, and easy-to-build systemthat provides force-feedback interaction\ + \ for mobile computing devices (MCD).Our system, which we call Fortissimo (ff),\ + \ utilizes standard built-inaccelerometer measurements in conjunction with generic\ + \ foam padding that can beeasily placed under a device to render an expressive\ + \ force-feedback performancesetup. Fortissimo allows for musically expressive\ + \ user-interaction with addedforce-feedback which is integral for any musical\ + \ controller --a feature that isabsent for touchscreen-centric MCDs. This paper\ + \ details ff core concepts,hardware and software designs, and expressivity of\ + \ musical features.},\n address = {Daejeon, Republic of Korea},\n author = {Tae\ + \ Hong Park and Oriol Nieto},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178638},\n\ + \ issn = {2220-4806},\n keywords = {force-feedback, expression, mobile computing\ + \ devices, mobile music},\n month = {May},\n pages = {291--294},\n publisher =\ + \ {Graduate School of Culture Technology, KAIST},\n title = {Fortissimo: Force-Feedback\ + \ for Mobile Devices},\n url = {http://www.nime.org/proceedings/2013/nime2013_233.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178686 + doi: 10.5281/zenodo.1178638 issn: 2220-4806 - keywords: '10 in materials, about1/10th the cost of existing solutions. The stylus - makes use of the built inaudio interface that is available on most smartphones - and tablets on the markettoday. Limitations of the device include the physical - constraint of wires, theoccupation of one audio input and output channel, and - increased latency equalto the period of at least one audio buffer duration. The - stylus has beendemonstrated in two cases thus far: a visual musical score drawing - and asinging synthesis application.' + keywords: 'force-feedback, expression, mobile computing devices, mobile music' month: May - pages: 'input interfaces, touch screens, tablets, pressure-sensitive, low-cost' + pages: 291--294 publisher: 'Graduate School of Culture Technology, KAIST' - title: 'PENny: An Extremely Low-Cost Pressure-Sensitive Stylus for Existing Capacitive - Touchscreens' - url: http://www.nime.org/proceedings/2013/nime2013_150.pdf + title: 'Fortissimo: Force-Feedback for Mobile Devices' + url: http://www.nime.org/proceedings/2013/nime2013_233.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Johnston2013 - abstract: 'This paper describes an audio-visual performance system based on real-timefluid - simulation. The aim is to provide a rich environment for works whichblur the boundaries - between dance and instrumental performance -- and sound andvisuals -- while maintaining - transparency for audiences and new performers. The system uses infra-red motion - tracking to allow performers to manipulate areal-time fluid simulation, which - in turn provides control data forcomputer-generated audio and visuals. It also - provides a control andconfiguration system which allows the behaviour of the interactive - system to bechanged over time, enabling the structure within which interactions - take placeto be `composed''.' + ID: Scott2013 + abstract: 'Digital music technology has transformed the listener experience and + creatednew avenues for creative interaction and expression within the musical + domain.The barrier to music creation, distribution and collaboration has been + reduced,leading to entirely new ecosystems of musical experience. Software editingtools + such as digital audio workstations (DAW) allow nearly limitlessmanipulation of + source audio into new sonic elements and textures and havepromoted a culture of + recycling and repurposing of content via mashups andremixes. We present a multi-touch + application that allows a user to customizetheir listening experience by blending + various versions of a song in real time.' address: 'Daejeon, Republic of Korea' - author: Andrew Johnston - bibtex: "@inproceedings{Johnston2013,\n abstract = {This paper describes an audio-visual\ - \ performance system based on real-timefluid simulation. The aim is to provide\ - \ a rich environment for works whichblur the boundaries between dance and instrumental\ - \ performance -- and sound andvisuals -- while maintaining transparency for audiences\ - \ and new performers. The system uses infra-red motion tracking to allow performers\ - \ to manipulate areal-time fluid simulation, which in turn provides control data\ - \ forcomputer-generated audio and visuals. It also provides a control andconfiguration\ - \ system which allows the behaviour of the interactive system to bechanged over\ - \ time, enabling the structure within which interactions take placeto be `composed'.},\n\ - \ address = {Daejeon, Republic of Korea},\n author = {Andrew Johnston},\n booktitle\ + author: Jeffrey Scott and Mickey Moorhead and Justin Chapman and Ryan Schwabe and + Youngmoo E. Kim + bibtex: "@inproceedings{Scott2013,\n abstract = {Digital music technology has transformed\ + \ the listener experience and creatednew avenues for creative interaction and\ + \ expression within the musical domain.The barrier to music creation, distribution\ + \ and collaboration has been reduced,leading to entirely new ecosystems of musical\ + \ experience. Software editingtools such as digital audio workstations (DAW) allow\ + \ nearly limitlessmanipulation of source audio into new sonic elements and textures\ + \ and havepromoted a culture of recycling and repurposing of content via mashups\ + \ andremixes. We present a multi-touch application that allows a user to customizetheir\ + \ listening experience by blending various versions of a song in real time.},\n\ + \ address = {Daejeon, Republic of Korea},\n author = {Jeffrey Scott and Mickey\ + \ Moorhead and Justin Chapman and Ryan Schwabe and Youngmoo E. Kim},\n booktitle\ \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1178572},\n issn = {2220-4806},\n keywords\ - \ = {performance, dance, fluid simulation, composition},\n month = {May},\n pages\ - \ = {132--135},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ - \ title = {Fluid Simulation as Full Body Audio-Visual Instrument},\n url = {http://www.nime.org/proceedings/2013/nime2013_151.pdf},\n\ - \ year = {2013}\n}\n" + \ Expression},\n doi = {10.5281/zenodo.1178660},\n issn = {2220-4806},\n keywords\ + \ = {Multi-track, Multi-touch, Mobile devices, Interactive media},\n month = {May},\n\ + \ pages = {417--420},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ + \ title = {Personalized Song Interaction Using a Multi Touch Interface},\n url\ + \ = {http://www.nime.org/proceedings/2013/nime2013_234.pdf},\n year = {2013}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178572 + doi: 10.5281/zenodo.1178660 issn: 2220-4806 - keywords: 'performance, dance, fluid simulation, composition' + keywords: 'Multi-track, Multi-touch, Mobile devices, Interactive media' month: May - pages: 132--135 + pages: 417--420 publisher: 'Graduate School of Culture Technology, KAIST' - title: Fluid Simulation as Full Body Audio-Visual Instrument - url: http://www.nime.org/proceedings/2013/nime2013_151.pdf + title: Personalized Song Interaction Using a Multi Touch Interface + url: http://www.nime.org/proceedings/2013/nime2013_234.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Fan2013a - abstract: 'The BioSync interface presented in this paper merges the heart-rate basedparadigm - with the brain-wave based paradigm into one mobile unit which isscalable for large - audience real-time applications. The goal of BioSync is toprovide a hybrid interface, - which uses audience biometric responses foraudience participation techniques. - To provide an affordable and scalablesolution, BioSync collects the user''s heart - rate via mobile phone pulseoximetry and the EEG data via Bluetooth communication - with the off-the-shelfMindWave Mobile hardware. Various interfaces have been designed - and implementedin the development of audience participation techniques and systems. - In thedesign and concept of BioSync, we first summarize recent interface research - foraudience participation within the NIME-related context, followed by the outlineof - the BioSync methodology and interface design. We then present a techniquefor dynamic - tempo control based on the audience biometric responses and anearly prototype - of a mobile dual-channel pulse oximetry and EEG bi-directionalinterface for iOS - device (BioSync). Finally, we present discussions and ideasfor future applications, - as well as plans for a series of experiments, whichinvestigate if temporal parameters - of an audience''s physiological metricsencourage crowd synchronization during - a live event or performance, acharacteristic, which we see as having great potential - in the creation offuture live musical and audiovisual performance applications.' + ID: Batula2013 + abstract: "We present a system which allows an adult-sized humanoid to determine\ + \ whetheror not it is correctly playing a pitched percussive instrument to produce\ + \ adesired sound. As hu,\nman musicians utilize sensory feedback to determine\ + \ ifthey are successfully using their instruments to generate certain pitches,robot\ + \ performers should be capable of the same feat. We present a noteclassification\ + \ algorithm that uses auditory and haptic feedback to decide if anote was well-\ + \ or poorly-struck. This system is demonstrated using Hubo, anadult-sized humanoid,\ + \ which has been enabled to actu,\nate pitched pipes usingmallets. We show that,\ + \ with this system, Hubo is able to determine whether ornot a note was played\ + \ correctly." address: 'Daejeon, Republic of Korea' - author: Yuan-Yi Fan and Myles Sciotto - bibtex: "@inproceedings{Fan2013a,\n abstract = {The BioSync interface presented\ - \ in this paper merges the heart-rate basedparadigm with the brain-wave based\ - \ paradigm into one mobile unit which isscalable for large audience real-time\ - \ applications. The goal of BioSync is toprovide a hybrid interface, which uses\ - \ audience biometric responses foraudience participation techniques. To provide\ - \ an affordable and scalablesolution, BioSync collects the user's heart rate via\ - \ mobile phone pulseoximetry and the EEG data via Bluetooth communication with\ - \ the off-the-shelfMindWave Mobile hardware. Various interfaces have been designed\ - \ and implementedin the development of audience participation techniques and systems.\ - \ In thedesign and concept of BioSync, we first summarize recent interface research\ - \ foraudience participation within the NIME-related context, followed by the outlineof\ - \ the BioSync methodology and interface design. We then present a techniquefor\ - \ dynamic tempo control based on the audience biometric responses and anearly\ - \ prototype of a mobile dual-channel pulse oximetry and EEG bi-directionalinterface\ - \ for iOS device (BioSync). Finally, we present discussions and ideasfor future\ - \ applications, as well as plans for a series of experiments, whichinvestigate\ - \ if temporal parameters of an audience's physiological metricsencourage crowd\ - \ synchronization during a live event or performance, acharacteristic, which we\ - \ see as having great potential in the creation offuture live musical and audiovisual\ - \ performance applications.},\n address = {Daejeon, Republic of Korea},\n author\ - \ = {Yuan-Yi Fan and Myles Sciotto},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178514},\n\ - \ issn = {2220-4806},\n keywords = {Mobile, Biometrics, Synchronous Interaction,\ - \ Social, Audience, Experience},\n month = {May},\n pages = {248--251},\n publisher\ - \ = {Graduate School of Culture Technology, KAIST},\n title = {BioSync: An Informed\ - \ Participatory Interface for Audience Dynamics and Audiovisual Content Co-creation\ - \ using Mobile PPG and {EEG}},\n url = {http://www.nime.org/proceedings/2013/nime2013_152.pdf},\n\ + author: Alyssa Batula and Manu Colacot and David Grunberg and Youngmoo Kim + bibtex: "@inproceedings{Batula2013,\n abstract = {We present a system which allows\ + \ an adult-sized humanoid to determine whetheror not it is correctly playing a\ + \ pitched percussive instrument to produce adesired sound. As hu,\nman musicians\ + \ utilize sensory feedback to determine ifthey are successfully using their instruments\ + \ to generate certain pitches,robot performers should be capable of the same feat.\ + \ We present a noteclassification algorithm that uses auditory and haptic feedback\ + \ to decide if anote was well- or poorly-struck. This system is demonstrated using\ + \ Hubo, anadult-sized humanoid, which has been enabled to actu,\nate pitched pipes\ + \ usingmallets. We show that, with this system, Hubo is able to determine whether\ + \ ornot a note was played correctly.},\n address = {Daejeon, Republic of Korea},\n\ + \ author = {Alyssa Batula and Manu Colacot and David Grunberg and Youngmoo Kim},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178472},\n issn = {2220-4806},\n\ + \ keywords = {Musical robots, humanoids, auditory feedback, haptic feedback},\n\ + \ month = {May},\n pages = {295--300},\n publisher = {Graduate School of Culture\ + \ Technology, KAIST},\n title = {Using Audio and Haptic Feedback to Improve Pitched\ + \ Percussive Instrument Performance in Humanoids},\n url = {http://www.nime.org/proceedings/2013/nime2013_235.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178514 + doi: 10.5281/zenodo.1178472 issn: 2220-4806 - keywords: 'Mobile, Biometrics, Synchronous Interaction, Social, Audience, Experience' + keywords: 'Musical robots, humanoids, auditory feedback, haptic feedback' month: May - pages: 248--251 + pages: 295--300 publisher: 'Graduate School of Culture Technology, KAIST' - title: 'BioSync: An Informed Participatory Interface for Audience Dynamics and Audiovisual - Content Co-creation using Mobile PPG and EEG' - url: http://www.nime.org/proceedings/2013/nime2013_152.pdf + title: Using Audio and Haptic Feedback to Improve Pitched Percussive Instrument + Performance in Humanoids + url: http://www.nime.org/proceedings/2013/nime2013_235.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Yang2013 - abstract: 'What is the function of visuals in the design of an augmented keyboardperformance - device with projection? We address this question by thinkingthrough the impact - of choices made in three examples on notions of locus ofattention, visual anticipation - and causal gestalt to articulate a space ofdesign choices. Visuals can emphasize - and deemphasize aspects of performanceand help clarify the role input has to the - performance. We suggest that thisprocess might help thinking through visual feedback - design in NIMEs withrespect to the performer or the audience.' + ID: Torresen2013 + abstract: 'A custom designed WLAN (Wireless Local Area Network) based sensor interface + ispresented in this paper. It is aimed at wirelessly interfacing a large varietyof + sensors to supplement built-in sensors in smart phones and media players.The target + application area is collection of human related motions andcondition to be applied + in musical applications. The interface is based oncommercially available units + and allows for up to nine sensors. The benefit ofusing WLAN based communication + is high data rate with low latency. Ourexperiments show that the average transmission + time is less than 2ms for asingle sensor. Further, it is operational for a whole + day without batteryrecharging.' address: 'Daejeon, Republic of Korea' - author: Qi Yang and Georg Essl - bibtex: "@inproceedings{Yang2013,\n abstract = {What is the function of visuals\ - \ in the design of an augmented keyboardperformance device with projection? We\ - \ address this question by thinkingthrough the impact of choices made in three\ - \ examples on notions of locus ofattention, visual anticipation and causal gestalt\ - \ to articulate a space ofdesign choices. Visuals can emphasize and deemphasize\ - \ aspects of performanceand help clarify the role input has to the performance.\ - \ We suggest that thisprocess might help thinking through visual feedback design\ - \ in NIMEs withrespect to the performer or the audience.},\n address = {Daejeon,\ - \ Republic of Korea},\n author = {Qi Yang and Georg Essl},\n booktitle = {Proceedings\ + author: Jim Torresen and Yngve Hafting and Kristian Nymoen + bibtex: "@inproceedings{Torresen2013,\n abstract = {A custom designed WLAN (Wireless\ + \ Local Area Network) based sensor interface ispresented in this paper. It is\ + \ aimed at wirelessly interfacing a large varietyof sensors to supplement built-in\ + \ sensors in smart phones and media players.The target application area is collection\ + \ of human related motions andcondition to be applied in musical applications.\ + \ The interface is based oncommercially available units and allows for up to nine\ + \ sensors. The benefit ofusing WLAN based communication is high data rate with\ + \ low latency. Ourexperiments show that the average transmission time is less\ + \ than 2ms for asingle sensor. Further, it is operational for a whole day without\ + \ batteryrecharging.},\n address = {Daejeon, Republic of Korea},\n author = {Jim\ + \ Torresen and Yngve Hafting and Kristian Nymoen},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178694},\n issn = {2220-4806},\n keywords = {Visual feedback,\ - \ interaction, NIME, musical instrument, interaction, augmented keyboard, gesture,\ - \ Kinect},\n month = {May},\n pages = {252--255},\n publisher = {Graduate School\ - \ of Culture Technology, KAIST},\n title = {Visual Associations in Augmented Keyboard\ - \ Performance},\n url = {http://www.nime.org/proceedings/2013/nime2013_156.pdf},\n\ - \ year = {2013}\n}\n" + \ doi = {10.5281/zenodo.1178680},\n issn = {2220-4806},\n keywords = {wireless\ + \ communication, sensor data collection, WLAN, Arduino},\n month = {May},\n pages\ + \ = {337--340},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ + \ title = {A New Wi-Fi based Platform for Wireless Sensor Data Collection},\n\ + \ url = {http://www.nime.org/proceedings/2013/nime2013_236.pdf},\n year = {2013}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178694 + doi: 10.5281/zenodo.1178680 issn: 2220-4806 - keywords: 'Visual feedback, interaction, NIME, musical instrument, interaction, - augmented keyboard, gesture, Kinect' + keywords: 'wireless communication, sensor data collection, WLAN, Arduino' month: May - pages: 252--255 + pages: 337--340 publisher: 'Graduate School of Culture Technology, KAIST' - title: Visual Associations in Augmented Keyboard Performance - url: http://www.nime.org/proceedings/2013/nime2013_156.pdf + title: A New Wi-Fi based Platform for Wireless Sensor Data Collection + url: http://www.nime.org/proceedings/2013/nime2013_236.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Thorogood2013 - abstract: 'Soundscape composition in improvisation and performance contexts involves - manyprocesses that can become overwhelming for a performer, impacting on thequality - of the composition. One important task is evaluating the mood of acomposition - for evoking accurate associations and memories of a soundscape. Anew system that - uses supervised machine learning is presented for theacquisition and realtime - feedback of soundscape affect. A model of sound-scape mood is created by users - entering evaluations of audio environmentsusing a mobile device. The same device - then provides feedback to the user ofthe predicted mood of other audio environments. - We used a features vector ofTotal Loudness and MFCC extracted from an audio signal - to build a multipleregression models. The evaluation of the system shows the tool - is effective inpredicting soundscape affect.' + ID: Skogstad2013 + abstract: 'In this paper we present some custom designed filters for real-time motioncapture + applications. Our target application is so-called motion controllers,i.e. systems + that interpret hand motion for musical interaction. In earlierresearch we found + effective methods to design nearly optimal filters forreal-time applications. + However, to be able to design suitable filters for ourtarget application, it is + necessary to establish the typical frequency contentof the motion capture data + we want to filter. This will again allow us todetermine a reasonable cutoff frequency + for the filters. We have thereforeconducted an experiment in which we recorded + the hand motion of 20 subjects.The frequency spectra of these data together with + a method similar to theresidual analysis method were then used to determine reasonable + cutofffrequencies. Based on this experiment, we propose three cutoff frequencies + fordifferent scenarios and filtering needs: 5, 10 and 15 Hz, which corresponds + toheavy, medium and light filtering respectively. Finally, we propose a range + ofreal-time filters applicable to motion controllers. In particular, low-passfilters + and low-pass differentiators of degrees one and two, which in ourexperience are + the most useful filters for our target application.' address: 'Daejeon, Republic of Korea' - author: Miles Thorogood and Philippe Pasquier - bibtex: "@inproceedings{Thorogood2013,\n abstract = {Soundscape composition in improvisation\ - \ and performance contexts involves manyprocesses that can become overwhelming\ - \ for a performer, impacting on thequality of the composition. One important task\ - \ is evaluating the mood of acomposition for evoking accurate associations and\ - \ memories of a soundscape. Anew system that uses supervised machine learning\ - \ is presented for theacquisition and realtime feedback of soundscape affect.\ - \ A model of sound-scape mood is created by users entering evaluations of audio\ - \ environmentsusing a mobile device. The same device then provides feedback to\ - \ the user ofthe predicted mood of other audio environments. We used a features\ - \ vector ofTotal Loudness and MFCC extracted from an audio signal to build a multipleregression\ - \ models. The evaluation of the system shows the tool is effective inpredicting\ - \ soundscape affect.},\n address = {Daejeon, Republic of Korea},\n author = {Miles\ - \ Thorogood and Philippe Pasquier},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178674},\n\ - \ issn = {2220-4806},\n keywords = {soundscape, performance, machine learning,\ - \ audio features, affect grid},\n month = {May},\n pages = {256--260},\n publisher\ - \ = {Graduate School of Culture Technology, KAIST},\n title = {Impress: A Machine\ - \ Learning Approach to Soundscape Affect Classification for a Music Performance\ - \ Environment},\n url = {http://www.nime.org/proceedings/2013/nime2013_157.pdf},\n\ - \ year = {2013}\n}\n" + author: Ståle A. Skogstad + bibtex: "@inproceedings{Skogstad2013,\n abstract = {In this paper we present some\ + \ custom designed filters for real-time motioncapture applications. Our target\ + \ application is so-called motion controllers,i.e. systems that interpret hand\ + \ motion for musical interaction. In earlierresearch we found effective methods\ + \ to design nearly optimal filters forreal-time applications. However, to be able\ + \ to design suitable filters for ourtarget application, it is necessary to establish\ + \ the typical frequency contentof the motion capture data we want to filter. This\ + \ will again allow us todetermine a reasonable cutoff frequency for the filters.\ + \ We have thereforeconducted an experiment in which we recorded the hand motion\ + \ of 20 subjects.The frequency spectra of these data together with a method similar\ + \ to theresidual analysis method were then used to determine reasonable cutofffrequencies.\ + \ Based on this experiment, we propose three cutoff frequencies fordifferent scenarios\ + \ and filtering needs: 5, 10 and 15 Hz, which corresponds toheavy, medium and\ + \ light filtering respectively. Finally, we propose a range ofreal-time filters\ + \ applicable to motion controllers. In particular, low-passfilters and low-pass\ + \ differentiators of degrees one and two, which in ourexperience are the most\ + \ useful filters for our target application.},\n address = {Daejeon, Republic\ + \ of Korea},\n author = {St{\\aa}le A. Skogstad},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1178662},\n issn = {2220-4806},\n month = {May},\n pages =\ + \ {142--147},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ + \ title = {Filtering Motion Capture Data for Real-Time Applications},\n url =\ + \ {http://www.nime.org/proceedings/2013/nime2013_238.pdf},\n year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178674 + doi: 10.5281/zenodo.1178662 issn: 2220-4806 - keywords: 'soundscape, performance, machine learning, audio features, affect grid' month: May - pages: 256--260 + pages: 142--147 publisher: 'Graduate School of Culture Technology, KAIST' - title: 'Impress: A Machine Learning Approach to Soundscape Affect Classification - for a Music Performance Environment' - url: http://www.nime.org/proceedings/2013/nime2013_157.pdf + title: Filtering Motion Capture Data for Real-Time Applications + url: http://www.nime.org/proceedings/2013/nime2013_238.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Park2013a - abstract: 'In this paper, we designed a sound effect device, which was applicable - forspray paint art process. For the applicability research of the device, wedesigned - a prototype which had a form not far off the traditional spray cans,using Arduino - and various sensors. Through the test process of the prototype,we verified the - elements that would be necessary to apply our newly designeddevice to real spray - paint art activities. Thus we checked the possibility ofvarious musical expressions - by expanding the functions of the designed device.' + ID: Kleinberger2013 + abstract: 'PAMDI is an electromechanical music controller based on an expansion + of the common metal music boxes. Our system enables an augmentation of the musical + properties by adding different musical channels triggered and parameterized by + natural gestures during the ``performance''''. All the channels are generated + form the original melody recorded once at the start. To capture and treat the + different expressive parameters both natural and intentional, our platform is + composed of a metallic structure supporting sensors. The measured values are processed + by an arduino system that finallysends the results by serial communication to + a Max/MSP patch for signaltreatment and modification. We will explain how our + embedded instrument aims to bring a certain awareness to the player of the mapping + and the potential musical freedom of the very specific -- and not that much automatic + --- instrument that is a music box. We will also address how our design tackles + the different questions of mapping, ergonomics and expressiveness while choosing + the controller modalities and the parameters to be sensed.' address: 'Daejeon, Republic of Korea' - author: Gibeom Park and Kyogu Lee - bibtex: "@inproceedings{Park2013a,\n abstract = {In this paper, we designed a sound\ - \ effect device, which was applicable forspray paint art process. For the applicability\ - \ research of the device, wedesigned a prototype which had a form not far off\ - \ the traditional spray cans,using Arduino and various sensors. Through the test\ - \ process of the prototype,we verified the elements that would be necessary to\ - \ apply our newly designeddevice to real spray paint art activities. Thus we checked\ - \ the possibility ofvarious musical expressions by expanding the functions of\ - \ the designed device.},\n address = {Daejeon, Republic of Korea},\n author =\ - \ {Gibeom Park and Kyogu Lee},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178634},\n\ - \ issn = {2220-4806},\n keywords = {Sound effect device, Spray paint art, Arduino,\ - \ Pure Data},\n month = {May},\n pages = {65--68},\n publisher = {Graduate School\ - \ of Culture Technology, KAIST},\n title = {Sound Spray --- can-shaped sound effect\ - \ device},\n url = {http://www.nime.org/proceedings/2013/nime2013_158.pdf},\n\ + author: Rebecca Kleinberger + bibtex: "@inproceedings{Kleinberger2013,\n abstract = {PAMDI is an electromechanical\ + \ music controller based on an expansion of the common metal music boxes. Our\ + \ system enables an augmentation of the musical properties by adding different\ + \ musical channels triggered and parameterized by natural gestures during the\ + \ ``performance''. All the channels are generated form the original melody recorded\ + \ once at the start. To capture and treat the different expressive parameters\ + \ both natural and intentional, our platform is composed of a metallic structure\ + \ supporting sensors. The measured values are processed by an arduino system that\ + \ finallysends the results by serial communication to a Max/MSP patch for signaltreatment\ + \ and modification. We will explain how our embedded instrument aims to bring\ + \ a certain awareness to the player of the mapping and the potential musical freedom\ + \ of the very specific -- and not that much automatic --- instrument that is a\ + \ music box. We will also address how our design tackles the different questions\ + \ of mapping, ergonomics and expressiveness while choosing the controller modalities\ + \ and the parameters to be sensed.},\n address = {Daejeon, Republic of Korea},\n\ + \ author = {Rebecca Kleinberger},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178588},\n\ + \ issn = {2220-4806},\n keywords = {Tangible interface, musical controller, music\ + \ box, mechanical and electronic coupling, mapping.},\n month = {May},\n pages\ + \ = {19--20},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ + \ title = {{PAM}DI Music Box: Primarily Analogico-Mechanical, Digitally Iterated\ + \ Music Box},\n url = {http://www.nime.org/proceedings/2013/nime2013_24.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178634 + doi: 10.5281/zenodo.1178588 issn: 2220-4806 - keywords: 'Sound effect device, Spray paint art, Arduino, Pure Data' + keywords: 'Tangible interface, musical controller, music box, mechanical and electronic + coupling, mapping.' month: May - pages: 65--68 + pages: 19--20 publisher: 'Graduate School of Culture Technology, KAIST' - title: Sound Spray --- can-shaped sound effect device - url: http://www.nime.org/proceedings/2013/nime2013_158.pdf + title: 'PAMDI Music Box: Primarily Analogico-Mechanical, Digitally Iterated Music + Box' + url: http://www.nime.org/proceedings/2013/nime2013_24.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Tobise2013 - abstract: 'In guitar performance, fingering is an important factor, and complicated. - In particular, the fingering of the left hand comprises various relationshipsbetween - the finger and the string, such as a finger touching the strings, afinger pressing - the strings, and a finger releasing the strings. The recognition of the precise - fingering of the left hand is applied to aself-learning support system, which - is able to detect strings being muted by afinger, and which transcribes music - automatically, including the details offingering techniques. Therefore, the goal - of our study is the construction of a system forrecognizing the touch of strings - for the guitar. We propose a method for recognizing the touch of strings based - on theconductive characteristics of strings and frets. We develop a prototype - system, and evaluate its effectiveness.Furthermore, we propose an application - which utilizes our system.' + ID: McPherson2013 + abstract: 'This paper presents a portable optical measurement system for capturingcontinuous + key motion on any piano. Very few concert venues have MIDI-enabledpianos, and + many performers depend on the versatile but discontinued MoogPianoBar to provide + MIDI from a conventional acoustic instrument. The scannerhardware presented in + this paper addresses the growing need for alternativesolutions while surpassing + existing systems in the level of detail measured.Continuous key position on both + black and white keys is gathered at 1kHz samplerate. Software extracts traditional + and novel features of keyboard touch fromeach note, which can be flexibly mapped + to sound using MIDI or Open SoundControl. RGB LEDs provide rich visual feedback + to assist the performer ininteracting with more complex sound mapping arrangements. + An application ispresented to the magnetic resonator piano, an electromagnetically-augmentedacoustic + grand piano which is performed using continuous key positionmeasurements.' address: 'Daejeon, Republic of Korea' - author: Hayami Tobise and Yoshinari Takegawa and Tsutomu Terada and Masahiko Tsukamoto - bibtex: "@inproceedings{Tobise2013,\n abstract = {In guitar performance, fingering\ - \ is an important factor, and complicated. In particular, the fingering of the\ - \ left hand comprises various relationshipsbetween the finger and the string,\ - \ such as a finger touching the strings, afinger pressing the strings, and a finger\ - \ releasing the strings. The recognition of the precise fingering of the left\ - \ hand is applied to aself-learning support system, which is able to detect strings\ - \ being muted by afinger, and which transcribes music automatically, including\ - \ the details offingering techniques. Therefore, the goal of our study is the\ - \ construction of a system forrecognizing the touch of strings for the guitar.\ - \ We propose a method for recognizing the touch of strings based on theconductive\ - \ characteristics of strings and frets. We develop a prototype system, and evaluate\ - \ its effectiveness.Furthermore, we propose an application which utilizes our\ - \ system.},\n address = {Daejeon, Republic of Korea},\n author = {Hayami Tobise\ - \ and Yoshinari Takegawa and Tsutomu Terada and Masahiko Tsukamoto},\n booktitle\ + author: Andrew McPherson + bibtex: "@inproceedings{McPherson2013,\n abstract = {This paper presents a portable\ + \ optical measurement system for capturingcontinuous key motion on any piano.\ + \ Very few concert venues have MIDI-enabledpianos, and many performers depend\ + \ on the versatile but discontinued MoogPianoBar to provide MIDI from a conventional\ + \ acoustic instrument. The scannerhardware presented in this paper addresses the\ + \ growing need for alternativesolutions while surpassing existing systems in the\ + \ level of detail measured.Continuous key position on both black and white keys\ + \ is gathered at 1kHz samplerate. Software extracts traditional and novel features\ + \ of keyboard touch fromeach note, which can be flexibly mapped to sound using\ + \ MIDI or Open SoundControl. RGB LEDs provide rich visual feedback to assist the\ + \ performer ininteracting with more complex sound mapping arrangements. An application\ + \ ispresented to the magnetic resonator piano, an electromagnetically-augmentedacoustic\ + \ grand piano which is performed using continuous key positionmeasurements.},\n\ + \ address = {Daejeon, Republic of Korea},\n author = {Andrew McPherson},\n booktitle\ \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1178676},\n issn = {2220-4806},\n keywords\ - \ = {Guitar, Touched strings, Fingering recognition},\n month = {May},\n pages\ - \ = {261--266},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ - \ title = {Construction of a System for Recognizing Touch of Strings for Guitar},\n\ - \ url = {http://www.nime.org/proceedings/2013/nime2013_159.pdf},\n year = {2013}\n\ - }\n" + \ Expression},\n doi = {10.5281/zenodo.1178610},\n issn = {2220-4806},\n keywords\ + \ = {Piano, keyboard, optical sensing, gesture sensing, visual feedback, mapping,\ + \ magnetic resonator piano},\n month = {May},\n pages = {152--157},\n publisher\ + \ = {Graduate School of Culture Technology, KAIST},\n title = {Portable Measurement\ + \ and Mapping of Continuous Piano Gesture},\n url = {http://www.nime.org/proceedings/2013/nime2013_240.pdf},\n\ + \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178676 + doi: 10.5281/zenodo.1178610 issn: 2220-4806 - keywords: 'Guitar, Touched strings, Fingering recognition' + keywords: 'Piano, keyboard, optical sensing, gesture sensing, visual feedback, mapping, + magnetic resonator piano' month: May - pages: 261--266 + pages: 152--157 publisher: 'Graduate School of Culture Technology, KAIST' - title: Construction of a System for Recognizing Touch of Strings for Guitar - url: http://www.nime.org/proceedings/2013/nime2013_159.pdf + title: Portable Measurement and Mapping of Continuous Piano Gesture + url: http://www.nime.org/proceedings/2013/nime2013_240.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Tokunaga2013 - abstract: 'We are exploring the design and implementation of artificial expressions,kinetic - audio-visual representations of real-time physiological data whichreflect emotional - and cognitive state. In this work we demonstrate a prototype,the Enactive Mandala, - which maps real-time EEG signals to modulate ambientmusic and animated visual - music. The design draws inspiration from the visualmusic of the Whitney brothers - as well as traditional meditative practices.Transparent real-time audio-visual - feedback ofbrainwave qualities supports intuitive insight into the connection - betweenthoughts and physiological states. Our method is constructive: by linkingphysiology - with an dynamic a/v display, and embedding the human-machine systemin the social - contexts that arise in real-time play, we hope to seed new, andas yet unknown - forms, of non-verbal communication, or ``artificialexpressions''''.' + ID: Tarakajian2013 + abstract: 'Mira is an iPad app for controlling Max patchers in real time with minimalconfiguration. + This submission includes a paper describing Mira''s design andimplementation, + as well as a demo showing how Mira works with Max.The Mira iPad app discovers + open Max patchers automatically using the Bonjourprotocol, connects to them over + WiFi and negotiates a description of the Maxpatcher. As objects change position + and appearance, Mira makes sure that theinterface on the iPad is kept up to date. + Mira eliminates the need for anexplicit mapping step between the interface and + the system being controlled.The user is never asked to input an IP address, nor + to configure the mappingbetween interface objects on the iPad and those in the + Max patcher. So theprototyping composer is free to rapidly configure and reconfigure + theinterface.' address: 'Daejeon, Republic of Korea' - author: Tomohiro Tokunaga and Michael J. Lyons - bibtex: "@inproceedings{Tokunaga2013,\n abstract = {We are exploring the design\ - \ and implementation of artificial expressions,kinetic audio-visual representations\ - \ of real-time physiological data whichreflect emotional and cognitive state.\ - \ In this work we demonstrate a prototype,the Enactive Mandala, which maps real-time\ - \ EEG signals to modulate ambientmusic and animated visual music. The design draws\ - \ inspiration from the visualmusic of the Whitney brothers as well as traditional\ - \ meditative practices.Transparent real-time audio-visual feedback ofbrainwave\ - \ qualities supports intuitive insight into the connection betweenthoughts and\ - \ physiological states. Our method is constructive: by linkingphysiology with\ - \ an dynamic a/v display, and embedding the human-machine systemin the social\ - \ contexts that arise in real-time play, we hope to seed new, andas yet unknown\ - \ forms, of non-verbal communication, or ``artificialexpressions''.},\n address\ - \ = {Daejeon, Republic of Korea},\n author = {Tomohiro Tokunaga and Michael J.\ - \ Lyons},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1178678},\n issn = {2220-4806},\n\ - \ keywords = {Brain-computer Interfaces, BCI, EEG, Sonification, Visualization,\ - \ Artificial Expressions, NIME, Visual Music},\n month = {May},\n pages = {118--119},\n\ - \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {Enactive\ - \ Mandala: Audio-visualizing Brain Waves},\n url = {http://www.nime.org/proceedings/2013/nime2013_16.pdf},\n\ + author: Sam Tarakajian and David Zicarelli and Joshua Clayton + bibtex: "@inproceedings{Tarakajian2013,\n abstract = {Mira is an iPad app for controlling\ + \ Max patchers in real time with minimalconfiguration. This submission includes\ + \ a paper describing Mira's design andimplementation, as well as a demo showing\ + \ how Mira works with Max.The Mira iPad app discovers open Max patchers automatically\ + \ using the Bonjourprotocol, connects to them over WiFi and negotiates a description\ + \ of the Maxpatcher. As objects change position and appearance, Mira makes sure\ + \ that theinterface on the iPad is kept up to date. Mira eliminates the need for\ + \ anexplicit mapping step between the interface and the system being controlled.The\ + \ user is never asked to input an IP address, nor to configure the mappingbetween\ + \ interface objects on the iPad and those in the Max patcher. So theprototyping\ + \ composer is free to rapidly configure and reconfigure theinterface.},\n address\ + \ = {Daejeon, Republic of Korea},\n author = {Sam Tarakajian and David Zicarelli\ + \ and Joshua Clayton},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178670},\n\ + \ issn = {2220-4806},\n keywords = {NIME, Max/MSP/Jitter, Mira, ipad, osc, bonjour,\ + \ zeroconf},\n month = {May},\n pages = {421--426},\n publisher = {Graduate School\ + \ of Culture Technology, KAIST},\n title = {Mira: Liveness in iPad Controllers\ + \ for Max/MSP},\n url = {http://www.nime.org/proceedings/2013/nime2013_241.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178678 + doi: 10.5281/zenodo.1178670 issn: 2220-4806 - keywords: 'Brain-computer Interfaces, BCI, EEG, Sonification, Visualization, Artificial - Expressions, NIME, Visual Music' + keywords: 'NIME, Max/MSP/Jitter, Mira, ipad, osc, bonjour, zeroconf' month: May - pages: 118--119 + pages: 421--426 publisher: 'Graduate School of Culture Technology, KAIST' - title: 'Enactive Mandala: Audio-visualizing Brain Waves' - url: http://www.nime.org/proceedings/2013/nime2013_16.pdf + title: 'Mira: Liveness in iPad Controllers for Max/MSP' + url: http://www.nime.org/proceedings/2013/nime2013_241.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Dobda2013 - abstract: 'Breaking musical and creative expression into elements, layers, and formulas, - we explore how live listeners create unique sonic experiences from a palette of - these elements and their interactions. Bringing us to present-day creative applications, - a social and historical overview of silent disco is presented. The advantages - of this active listening interface are outlined by the author''s expressions requiring - discrete elements, such as binaural beats, 3D audio effects, and multiple live - music acts in the same space. Events and prototypes as well as hardware and software - proposals for live multi-listener manipulation of multielemental sound and music - are presented. Examples in audio production, sound healing, music composition, - tempo phasing, and spatial audio illustrate the applications.' + ID: Kim2013 + abstract: 'We discuss how to model "gestures" in music performance with statistical + latent-states models. A music performance can be described with compositional + and expressive properties varying over time. In those property changes we often + observe particular patterns, and such a pattern can be understood as a "gesture", + since it serves as a medium transferring specific emotions. Assuming a finite + number of latent states on each property value changes, we can describe those + gestures with statistical latent-states models, and train them by unsupervised + learning algorithms. In addition, model entropy provides us a measure for different + effects of each properties on the gesture implementation. Test result on some + of real performances indicates that the trained models could capture the structure + of gestures observed in the given performances, and detect their boundaries. The + entropy-based measure was informative to understand the effectiveness of each + property on the gesture implementation. Test result on large corpora indicates + that our model has potentials for afurther model improvement.' address: 'Daejeon, Republic of Korea' - author: Russell Eric Dobda - bibtex: "@inproceedings{Dobda2013,\n abstract = {Breaking musical and creative expression\ - \ into elements, layers, and formulas, we explore how live listeners create unique\ - \ sonic experiences from a palette of these elements and their interactions. Bringing\ - \ us to present-day creative applications, a social and historical overview of\ - \ silent disco is presented. The advantages of this active listening interface\ - \ are outlined by the author's expressions requiring discrete elements, such as\ - \ binaural beats, 3D audio effects, and multiple live music acts in the same space.\ - \ Events and prototypes as well as hardware and software proposals for live multi-listener\ - \ manipulation of multielemental sound and music are presented. Examples in audio\ - \ production, sound healing, music composition, tempo phasing, and spatial audio\ - \ illustrate the applications.},\n address = {Daejeon, Republic of Korea},\n author\ - \ = {Russell Eric Dobda},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178502},\n\ - \ issn = {2220-4806},\n keywords = {wireless headphones, music production, silent\ - \ disco, headphone concert, binaural beats, multi-track audio, active music listening,\ - \ sound healing, mobile clubbing, smart-phone apps},\n month = {May},\n pages\ - \ = {69--72},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ - \ title = {Applied and Proposed Installations with Silent Disco Headphones for\ - \ Multi-Elemental Creative Expression},\n url = {http://www.nime.org/proceedings/2013/nime2013_161.pdf},\n\ + author: Taehun Kim and Stefan Weinzierl + bibtex: "@inproceedings{Kim2013,\n abstract = {We discuss how to model \"gestures\"\ + \ in music performance with statistical latent-states models. A music performance\ + \ can be described with compositional and expressive properties varying over time.\ + \ In those property changes we often observe particular patterns, and such a pattern\ + \ can be understood as a \"gesture\", since it serves as a medium transferring\ + \ specific emotions. Assuming a finite number of latent states on each property\ + \ value changes, we can describe those gestures with statistical latent-states\ + \ models, and train them by unsupervised learning algorithms. In addition, model\ + \ entropy provides us a measure for different effects of each properties on the\ + \ gesture implementation. Test result on some of real performances indicates that\ + \ the trained models could capture the structure of gestures observed in the given\ + \ performances, and detect their boundaries. The entropy-based measure was informative\ + \ to understand the effectiveness of each property on the gesture implementation.\ + \ Test result on large corpora indicates that our model has potentials for afurther\ + \ model improvement.},\n address = {Daejeon, Republic of Korea},\n author = {Taehun\ + \ Kim and Stefan Weinzierl},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178582},\n\ + \ issn = {2220-4806},\n keywords = {Musical gestures, performance analysis, unsupervised\ + \ machine learning},\n month = {May},\n pages = {427--430},\n publisher = {Graduate\ + \ School of Culture Technology, KAIST},\n title = {Modelling Gestures in Music\ + \ Performance with Statistical Latent-State Models},\n url = {http://www.nime.org/proceedings/2013/nime2013_244.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178502 + doi: 10.5281/zenodo.1178582 issn: 2220-4806 - keywords: 'wireless headphones, music production, silent disco, headphone concert, - binaural beats, multi-track audio, active music listening, sound healing, mobile - clubbing, smart-phone apps' + keywords: 'Musical gestures, performance analysis, unsupervised machine learning' month: May - pages: 69--72 + pages: 427--430 publisher: 'Graduate School of Culture Technology, KAIST' - title: Applied and Proposed Installations with Silent Disco Headphones for Multi-Elemental - Creative Expression - url: http://www.nime.org/proceedings/2013/nime2013_161.pdf + title: Modelling Gestures in Music Performance with Statistical Latent-State Models + url: http://www.nime.org/proceedings/2013/nime2013_244.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Christopher2013 - abstract: 'This paper describes Kontrol, a new hand interface that extends the intuitivecontrol - of electronic music to traditional instrumentalist and dancers. Thegoal of the - authors has been to provide users with a device that is capable ofdetecting the - highly intricate and expressive gestures of the master performer,in order for - that information to be interpreted and used for control ofelectronic music. This - paper discusses related devices, the architecture ofKontrol, it''s potential as - a gesture recognition device, and severalperformance applications.' + ID: Pardue2013 + abstract: 'The Hand Controller is a new interface designed to enable a performer + toachieve detailed control of audio and visual parameters through a tangibleinterface + combined with motion tracking of the hands to capture large scalephysical movement. + Such movement empowers an expressive dynamic for bothperformer and audience. However + tracking movements in free space isnotoriously difficult for virtuosic performance. + The lack of tactile feedbackleads to difficulty learning the repeated muscle movements + required for precisecontrol. In comparison, the hands have shown an impressive + ability to mastercomplex motor tasks through feel. The hand controller uses both + modes ofinteraction. Electro-magnetic field tracking enables 6D hand motion trackingwhile + two options provide tactile interaction- a set of tracks that providelinear positioning + and applied finger pressure, or a set of trumpet like sliderkeys that provide + continuous data describing key depth. Thumbs actuateadditional pressure sensitive + buttons. The two haptic interfaces are mountedto a comfortable hand grip that + allows a significant range of reach, andpressure to be applied without restricting + hand movement highly desirable inexpressive motion.' address: 'Daejeon, Republic of Korea' - author: Kameron Christopher and Jingyin He and Raakhi Kapur and Ajay Kapur - bibtex: "@inproceedings{Christopher2013,\n abstract = {This paper describes Kontrol,\ - \ a new hand interface that extends the intuitivecontrol of electronic music to\ - \ traditional instrumentalist and dancers. Thegoal of the authors has been to\ - \ provide users with a device that is capable ofdetecting the highly intricate\ - \ and expressive gestures of the master performer,in order for that information\ - \ to be interpreted and used for control ofelectronic music. This paper discusses\ - \ related devices, the architecture ofKontrol, it's potential as a gesture recognition\ - \ device, and severalperformance applications.},\n address = {Daejeon, Republic\ - \ of Korea},\n author = {Kameron Christopher and Jingyin He and Raakhi Kapur and\ - \ Ajay Kapur},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178496},\n issn\ - \ = {2220-4806},\n keywords = {Hand controller, computational ethnomusicology,\ - \ dance interface, conducting interface, Wekinator, wearable sensors},\n month\ - \ = {May},\n pages = {267--270},\n publisher = {Graduate School of Culture Technology,\ - \ KAIST},\n title = {Kontrol: Hand Gesture Recognition for Music and Dance Interaction},\n\ - \ url = {http://www.nime.org/proceedings/2013/nime2013_164.pdf},\n year = {2013}\n\ - }\n" + author: Laurel Pardue and William Sebastian + bibtex: "@inproceedings{Pardue2013,\n abstract = {The Hand Controller is a new interface\ + \ designed to enable a performer toachieve detailed control of audio and visual\ + \ parameters through a tangibleinterface combined with motion tracking of the\ + \ hands to capture large scalephysical movement. Such movement empowers an expressive\ + \ dynamic for bothperformer and audience. However tracking movements in free space\ + \ isnotoriously difficult for virtuosic performance. The lack of tactile feedbackleads\ + \ to difficulty learning the repeated muscle movements required for precisecontrol.\ + \ In comparison, the hands have shown an impressive ability to mastercomplex motor\ + \ tasks through feel. The hand controller uses both modes ofinteraction. Electro-magnetic\ + \ field tracking enables 6D hand motion trackingwhile two options provide tactile\ + \ interaction- a set of tracks that providelinear positioning and applied finger\ + \ pressure, or a set of trumpet like sliderkeys that provide continuous data describing\ + \ key depth. Thumbs actuateadditional pressure sensitive buttons. The two haptic\ + \ interfaces are mountedto a comfortable hand grip that allows a significant range\ + \ of reach, andpressure to be applied without restricting hand movement highly\ + \ desirable inexpressive motion.},\n address = {Daejeon, Republic of Korea},\n\ + \ author = {Laurel Pardue and William Sebastian},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1178630},\n issn = {2220-4806},\n keywords = {hand, interface,\ + \ free gesture, force sensing resistor, new musical instrument, tactile feedback,\ + \ position tracking},\n month = {May},\n pages = {90--93},\n publisher = {Graduate\ + \ School of Culture Technology, KAIST},\n title = {Hand-Controller for Combined\ + \ Tactile Control and Motion Tracking},\n url = {http://www.nime.org/proceedings/2013/nime2013_245.pdf},\n\ + \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178496 + doi: 10.5281/zenodo.1178630 issn: 2220-4806 - keywords: 'Hand controller, computational ethnomusicology, dance interface, conducting - interface, Wekinator, wearable sensors' + keywords: 'hand, interface, free gesture, force sensing resistor, new musical instrument, + tactile feedback, position tracking' month: May - pages: 267--270 + pages: 90--93 publisher: 'Graduate School of Culture Technology, KAIST' - title: 'Kontrol: Hand Gesture Recognition for Music and Dance Interaction' - url: http://www.nime.org/proceedings/2013/nime2013_164.pdf + title: Hand-Controller for Combined Tactile Control and Motion Tracking + url: http://www.nime.org/proceedings/2013/nime2013_245.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Han2013a - abstract: 'This paper presents a framework that transforms fingerprint patterns - intoaudio. We describe Digiti Sonus, an interactive installation performingfingerprint - sonification and visualization, including novel techniques forrepresenting user-intended - fingerprint expression as audio parameters. In orderto enable personalized sonification - and broaden timbre of sound, theinstallation employs sound synthesis based on - various visual feature analysissuch as minutiae extraction, area, angle, and push - pressure of fingerprints.The sonification results are discussed and the diverse - timbres of soundretrieved from different fingerprints are compared.' + ID: Wiriadjaja2013 + abstract: 'The Gamelan Sampul is a laptop sleeve with embedded circuitry that allows + usersto practice playing Javanese gamelan instruments without a full set ofinstruments. + It is part of a larger project that aims to develop a set ofportable and mobile + tools for learning, recording and performing classicalJavanese gamelan music.The + accessibility of a portable Javanese gamelan set introduces the musicalgenre to + audiences who have never experienced this traditional music before,passing down + long established customs to future generations. But it also raisesthe question + of what is and what isn''t appropriate to the musical tradition.The Gamelan Sampul + attempts to introduce new technology to traditional folkmusic while staying sensitive + to cultural needs.' address: 'Daejeon, Republic of Korea' - author: Yoon Chung Han and Byeong-jun Han and Matthew Wright - bibtex: "@inproceedings{Han2013a,\n abstract = {This paper presents a framework\ - \ that transforms fingerprint patterns intoaudio. We describe Digiti Sonus, an\ - \ interactive installation performingfingerprint sonification and visualization,\ - \ including novel techniques forrepresenting user-intended fingerprint expression\ - \ as audio parameters. In orderto enable personalized sonification and broaden\ - \ timbre of sound, theinstallation employs sound synthesis based on various visual\ - \ feature analysissuch as minutiae extraction, area, angle, and push pressure\ - \ of fingerprints.The sonification results are discussed and the diverse timbres\ - \ of soundretrieved from different fingerprints are compared.},\n address = {Daejeon,\ - \ Republic of Korea},\n author = {Yoon Chung Han and Byeong-jun Han and Matthew\ - \ Wright},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1178548},\n issn = {2220-4806},\n\ - \ keywords = {Fingerprint, Fingerprint sonification, interactive sonification,\ - \ sound synthesis, biometric data},\n month = {May},\n pages = {136--141},\n publisher\ - \ = {Graduate School of Culture Technology, KAIST},\n title = {Digiti Sonus: Advanced\ - \ Interactive Fingerprint Sonification Using Visual Feature Analysis},\n url =\ - \ {http://www.nime.org/proceedings/2013/nime2013_170.pdf},\n year = {2013}\n}\n" + author: Antonius Wiriadjaja + bibtex: "@inproceedings{Wiriadjaja2013,\n abstract = {The Gamelan Sampul is a laptop\ + \ sleeve with embedded circuitry that allows usersto practice playing Javanese\ + \ gamelan instruments without a full set ofinstruments. It is part of a larger\ + \ project that aims to develop a set ofportable and mobile tools for learning,\ + \ recording and performing classicalJavanese gamelan music.The accessibility of\ + \ a portable Javanese gamelan set introduces the musicalgenre to audiences who\ + \ have never experienced this traditional music before,passing down long established\ + \ customs to future generations. But it also raisesthe question of what is and\ + \ what isn't appropriate to the musical tradition.The Gamelan Sampul attempts\ + \ to introduce new technology to traditional folkmusic while staying sensitive\ + \ to cultural needs.},\n address = {Daejeon, Republic of Korea},\n author = {Antonius\ + \ Wiriadjaja},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178688},\n issn\ + \ = {2220-4806},\n keywords = {Physical computing, product design, traditional\ + \ folk arts, gamelan},\n month = {May},\n pages = {469--470},\n publisher = {Graduate\ + \ School of Culture Technology, KAIST},\n title = {Gamelan Sampul: Laptop Sleeve\ + \ Gamelan},\n url = {http://www.nime.org/proceedings/2013/nime2013_246.pdf},\n\ + \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178548 + doi: 10.5281/zenodo.1178688 issn: 2220-4806 - keywords: 'Fingerprint, Fingerprint sonification, interactive sonification, sound - synthesis, biometric data' + keywords: 'Physical computing, product design, traditional folk arts, gamelan' month: May - pages: 136--141 + pages: 469--470 publisher: 'Graduate School of Culture Technology, KAIST' - title: 'Digiti Sonus: Advanced Interactive Fingerprint Sonification Using Visual - Feature Analysis' - url: http://www.nime.org/proceedings/2013/nime2013_170.pdf + title: 'Gamelan Sampul: Laptop Sleeve Gamelan' + url: http://www.nime.org/proceedings/2013/nime2013_246.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Perrotin2013 - abstract: 'Touch user interfaces such as touchpad or pen tablet are often used forcontinuous - pitch control in synthesis devices. Usually, pitch is set at thecontact point - on the interface, thus introducing possible pitch inaccuracies atthe note onset. - This paper proposes a new algorithm, based on an adaptiveattraction mapping, for - improving initial pitch accuracy with touch userinterfaces with continuous control. - At each new contact on the interface, thealgorithm adjusts the mapping to produce - the most likely targeted note of thescale in the vicinity of the contact point. - Then, pitch remains continuouslyadjustable as long as the contact is maintained, - allowing for vibrato,portamento and other subtle melodic control. The results - of experimentscomparing the users'' pitch accuracy with and without the help of - the algorithmshow that such a correction enables to play sharply in tune at the - contact withthe interface, regardless the musical background of the player. Therefore, - thedynamic mapping algorithm allows for a clean and accurate attack when playing - touch user interfaces for controlling continuous pitch instruments like voicesynthesizers.' + ID: Pardue2013a + abstract: 'This paper explores the potential of near-field optical reflective sensing + formusical instrument gesture capture. Near-field optical sensors are inexpensive,portable + and non-intrusive, and their high spatial and temporal resolutionmakes them ideal + for tracking the finer motions of instrumental performance.The paper discusses + general optical sensor performance with detailedinvestigations of three sensor + models. An application is presented to violinbow position tracking using reflective + sensors mounted on the stick. Bowtracking remains a difficult task, and many existing + solutions are expensive,bulky, or offer limited temporal resolution. Initial results + indicate that bowposition and pressure can be derived from optical measurements + of thehair-string distance, and that similar techniques may be used to measure + bowtilt.' address: 'Daejeon, Republic of Korea' - author: Olivier Perrotin and Christophe d'Alessandro - bibtex: "@inproceedings{Perrotin2013,\n abstract = {Touch user interfaces such as\ - \ touchpad or pen tablet are often used forcontinuous pitch control in synthesis\ - \ devices. Usually, pitch is set at thecontact point on the interface, thus introducing\ - \ possible pitch inaccuracies atthe note onset. This paper proposes a new algorithm,\ - \ based on an adaptiveattraction mapping, for improving initial pitch accuracy\ - \ with touch userinterfaces with continuous control. At each new contact on the\ - \ interface, thealgorithm adjusts the mapping to produce the most likely targeted\ - \ note of thescale in the vicinity of the contact point. Then, pitch remains continuouslyadjustable\ - \ as long as the contact is maintained, allowing for vibrato,portamento and other\ - \ subtle melodic control. The results of experimentscomparing the users' pitch\ - \ accuracy with and without the help of the algorithmshow that such a correction\ - \ enables to play sharply in tune at the contact withthe interface, regardless\ - \ the musical background of the player. Therefore, thedynamic mapping algorithm\ - \ allows for a clean and accurate attack when playing touch user interfaces for\ - \ controlling continuous pitch instruments like voicesynthesizers.},\n address\ - \ = {Daejeon, Republic of Korea},\n author = {Olivier Perrotin and Christophe\ - \ d'Alessandro},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178640},\n\ - \ issn = {2220-4806},\n keywords = {Sound synthesis control, touch user interfaces,\ - \ pen tablet, automatic correction, accuracy, precision},\n month = {May},\n pages\ - \ = {186--189},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ - \ title = {Adaptive mapping for improved pitch accuracy on touch user interfaces},\n\ - \ url = {http://www.nime.org/proceedings/2013/nime2013_178.pdf},\n year = {2013}\n\ - }\n" + author: Laurel Pardue and Andrew McPherson + bibtex: "@inproceedings{Pardue2013a,\n abstract = {This paper explores the potential\ + \ of near-field optical reflective sensing formusical instrument gesture capture.\ + \ Near-field optical sensors are inexpensive,portable and non-intrusive, and their\ + \ high spatial and temporal resolutionmakes them ideal for tracking the finer\ + \ motions of instrumental performance.The paper discusses general optical sensor\ + \ performance with detailedinvestigations of three sensor models. An application\ + \ is presented to violinbow position tracking using reflective sensors mounted\ + \ on the stick. Bowtracking remains a difficult task, and many existing solutions\ + \ are expensive,bulky, or offer limited temporal resolution. Initial results indicate\ + \ that bowposition and pressure can be derived from optical measurements of thehair-string\ + \ distance, and that similar techniques may be used to measure bowtilt.},\n address\ + \ = {Daejeon, Republic of Korea},\n author = {Laurel Pardue and Andrew McPherson},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178628},\n issn = {2220-4806},\n\ + \ keywords = {optical sensor, reflectance, LED, photodiode, phototransistor, violin,\ + \ bow tracking, gesture, near-field sensing},\n month = {May},\n pages = {363--368},\n\ + \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {Near-Field\ + \ Optical Reflective Sensing for Bow Tracking},\n url = {http://www.nime.org/proceedings/2013/nime2013_247.pdf},\n\ + \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178640 + doi: 10.5281/zenodo.1178628 issn: 2220-4806 - keywords: 'Sound synthesis control, touch user interfaces, pen tablet, automatic - correction, accuracy, precision' + keywords: 'optical sensor, reflectance, LED, photodiode, phototransistor, violin, + bow tracking, gesture, near-field sensing' month: May - pages: 186--189 + pages: 363--368 publisher: 'Graduate School of Culture Technology, KAIST' - title: Adaptive mapping for improved pitch accuracy on touch user interfaces - url: http://www.nime.org/proceedings/2013/nime2013_178.pdf + title: Near-Field Optical Reflective Sensing for Bow Tracking + url: http://www.nime.org/proceedings/2013/nime2013_247.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Kikukawa2013 - abstract: 'So far, there are few studies of string instruments with bows because - there aremany parameters to acquire skills and it is difficult to measure theseparameters. - Therefore, the aim of this paper is to propose a design of alearning environment - for a novice learner to acquire an accurate fingerposition skill. For achieving - the aim, we developed a learning environmentwhich can diagnose learner''s finger - position and give the learner advice byusing magnetic position sensors. The system - shows three windows; a fingerposition window for visualization of finger position, - a score window fordiagnosing finger position along the score and command prompt - window forshowing states of system and advices. Finally, we evaluated the system - by anexperiment. The experimental group improved accuracy values about fingerpositions - and also improved accuracy of pitches of sounds compared withcontrol group. These - results shows significant differences.' + ID: Liu2013 + abstract: 'Cloud Bridge is an immersive interactive audiovisual software interface + forboth data exploration and artistic creation. It explores how information can + besonified and visualized to facilitate findings, and eventually becomeinteractive + musical compositions. Cloud Bridge functions as a multi-user,multimodal instrument. + The data represents the history of items checked out bypatrons of the Seattle + Public Library. A single user or agroup of users functioning as a performance + ensemble participate in the pieceby interactively querying the database using + iOS devices. Each device isassociated with aunique timbre and color for contributing + to the piece, whichappears on large shared screens and a surround-sound system + for allparticipants and observers. Cloud Bridge leads to a new media interactiveinterface + utilizing audio synthesis, visualization and real-time interaction.' address: 'Daejeon, Republic of Korea' - author: Fumitaka Kikukawa and Sojiro Ishihara and Masato Soga and Hirokazu Taki - bibtex: "@inproceedings{Kikukawa2013,\n abstract = {So far, there are few studies\ - \ of string instruments with bows because there aremany parameters to acquire\ - \ skills and it is difficult to measure theseparameters. Therefore, the aim of\ - \ this paper is to propose a design of alearning environment for a novice learner\ - \ to acquire an accurate fingerposition skill. For achieving the aim, we developed\ - \ a learning environmentwhich can diagnose learner's finger position and give\ - \ the learner advice byusing magnetic position sensors. The system shows three\ - \ windows; a fingerposition window for visualization of finger position, a score\ - \ window fordiagnosing finger position along the score and command prompt window\ - \ forshowing states of system and advices. Finally, we evaluated the system by\ - \ anexperiment. The experimental group improved accuracy values about fingerpositions\ - \ and also improved accuracy of pitches of sounds compared withcontrol group.\ - \ These results shows significant differences.},\n address = {Daejeon, Republic\ - \ of Korea},\n author = {Fumitaka Kikukawa and Sojiro Ishihara and Masato Soga\ - \ and Hirokazu Taki},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178580},\n\ - \ issn = {2220-4806},\n keywords = {Magnetic Position Sensors, String Instruments,\ - \ Skill, Learning Environment, Finger Position},\n month = {May},\n pages = {271--276},\n\ - \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {Development\ - \ of A Learning Environment for Playing Erhu by Diagnosis and Advice regarding\ - \ Finger Position on Strings},\n url = {http://www.nime.org/proceedings/2013/nime2013_181.pdf},\n\ - \ year = {2013}\n}\n" + author: Qian Liu and Yoon Chung Han and JoAnn Kuchera-Morin and Matthew Wright + bibtex: "@inproceedings{Liu2013,\n abstract = {Cloud Bridge is an immersive interactive\ + \ audiovisual software interface forboth data exploration and artistic creation.\ + \ It explores how information can besonified and visualized to facilitate findings,\ + \ and eventually becomeinteractive musical compositions. Cloud Bridge functions\ + \ as a multi-user,multimodal instrument. The data represents the history of items\ + \ checked out bypatrons of the Seattle Public Library. A single user or agroup\ + \ of users functioning as a performance ensemble participate in the pieceby interactively\ + \ querying the database using iOS devices. Each device isassociated with aunique\ + \ timbre and color for contributing to the piece, whichappears on large shared\ + \ screens and a surround-sound system for allparticipants and observers. Cloud\ + \ Bridge leads to a new media interactiveinterface utilizing audio synthesis,\ + \ visualization and real-time interaction.},\n address = {Daejeon, Republic of\ + \ Korea},\n author = {Qian Liu and Yoon Chung Han and JoAnn Kuchera-Morin and\ + \ Matthew Wright},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178596},\n\ + \ issn = {2220-4806},\n keywords = {Data Sonification, Data Visualization, Sonification,\ + \ User Interface, Sonic Interaction Design, Open Sound Control},\n month = {May},\n\ + \ pages = {431--436},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ + \ title = {Cloud Bridge: a Data-driven Immersive Audio-Visual Software Interface},\n\ + \ url = {http://www.nime.org/proceedings/2013/nime2013_250.pdf},\n year = {2013}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178580 + doi: 10.5281/zenodo.1178596 issn: 2220-4806 - keywords: 'Magnetic Position Sensors, String Instruments, Skill, Learning Environment, - Finger Position' + keywords: 'Data Sonification, Data Visualization, Sonification, User Interface, + Sonic Interaction Design, Open Sound Control' month: May - pages: 271--276 + pages: 431--436 publisher: 'Graduate School of Culture Technology, KAIST' - title: Development of A Learning Environment for Playing Erhu by Diagnosis and Advice - regarding Finger Position on Strings - url: http://www.nime.org/proceedings/2013/nime2013_181.pdf + title: 'Cloud Bridge: a Data-driven Immersive Audio-Visual Software Interface' + url: http://www.nime.org/proceedings/2013/nime2013_250.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Bortz2013 - abstract: 'Mountains and Valleys (an anonymous name for confidentiality) is a communal,site-specific - installation that takes shape as a spatially-responsiveaudio-visual field. The - public participates in the creation of theinstallation, resulting in shared ownership - of the work between both theartists and participants. Furthermore, the installation - takes new shape in eachrealization, both to incorporate the constraints and affordances - of eachspecific site, as well as to address the lessons learned from the previousiteration. - This paper describes the development and execution of Mountains andValleys over - its most recent version, with an eye toward the next iteration ata prestigious - art museum during a national festival in Washington, D.C.' + ID: Everman2013 + abstract: 'Few formal methods exist for evaluating digital musical instruments (DMIs) + .This paper proposes a novel method of DMI evaluation using crowd-sourcedtagging. + One of the challenges in devising such methods is that the evaluationof a musical + instrument is an inherently qualitative task. While previouslyproposed methods + have focused on quantitative methods and largely ignored thequalitative aspects + of the task, tagging is well-suited to this and is alreadyused to classify things + such as websites and musical genres. These, like DMIs,do not lend themselves to + simple categorization or parameterization. Using the social tagging method, participating + individuals assign descriptivelabels, or tags, to a DMI. A DMI can then be evaluated + by analyzing the tagsassociated with it. Metrics can be generated from the tags + assigned to theinstrument, and comparisons made to other instruments. This can + give thedesigner valuable insight into the where the strengths of the design lie + andwhere improvements may be needed. A prototype system for testing the method + is proposed in the paper and iscurrently being implemented as part of an ongoing + DMI evaluation project. It isexpected that results from the prototype will be + available to report by thetime of the conference in May.' address: 'Daejeon, Republic of Korea' - author: Brennon Bortz and Aki Ishida and Ivica Ico Bukvic and R. Benjamin Knapp - bibtex: "@inproceedings{Bortz2013,\n abstract = {Mountains and Valleys (an anonymous\ - \ name for confidentiality) is a communal,site-specific installation that takes\ - \ shape as a spatially-responsiveaudio-visual field. The public participates in\ - \ the creation of theinstallation, resulting in shared ownership of the work between\ - \ both theartists and participants. Furthermore, the installation takes new shape\ - \ in eachrealization, both to incorporate the constraints and affordances of eachspecific\ - \ site, as well as to address the lessons learned from the previousiteration.\ - \ This paper describes the development and execution of Mountains andValleys over\ - \ its most recent version, with an eye toward the next iteration ata prestigious\ - \ art museum during a national festival in Washington, D.C.},\n address = {Daejeon,\ - \ Republic of Korea},\n author = {Brennon Bortz and Aki Ishida and Ivica Ico Bukvic\ - \ and R. Benjamin Knapp},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178484},\n\ - \ issn = {2220-4806},\n keywords = {Participatory creation, communal interaction,\ - \ fields, interactive installation, Japanese lanterns},\n month = {May},\n pages\ - \ = {73--78},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ - \ title = {Lantern Field: Exploring Participatory Design of a Communal, Spatially\ - \ Responsive Installation},\n url = {http://www.nime.org/proceedings/2013/nime2013_192.pdf},\n\ + author: Michael Everman and Colby Leider + bibtex: "@inproceedings{Everman2013,\n abstract = {Few formal methods exist for\ + \ evaluating digital musical instruments (DMIs) .This paper proposes a novel method\ + \ of DMI evaluation using crowd-sourcedtagging. One of the challenges in devising\ + \ such methods is that the evaluationof a musical instrument is an inherently\ + \ qualitative task. While previouslyproposed methods have focused on quantitative\ + \ methods and largely ignored thequalitative aspects of the task, tagging is well-suited\ + \ to this and is alreadyused to classify things such as websites and musical genres.\ + \ These, like DMIs,do not lend themselves to simple categorization or parameterization.\ + \ Using the social tagging method, participating individuals assign descriptivelabels,\ + \ or tags, to a DMI. A DMI can then be evaluated by analyzing the tagsassociated\ + \ with it. Metrics can be generated from the tags assigned to theinstrument, and\ + \ comparisons made to other instruments. This can give thedesigner valuable insight\ + \ into the where the strengths of the design lie andwhere improvements may be\ + \ needed. A prototype system for testing the method is proposed in the paper and\ + \ iscurrently being implemented as part of an ongoing DMI evaluation project.\ + \ It isexpected that results from the prototype will be available to report by\ + \ thetime of the conference in May.},\n address = {Daejeon, Republic of Korea},\n\ + \ author = {Michael Everman and Colby Leider},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1178510},\n issn = {2220-4806},\n keywords = {Evaluation, tagging,\ + \ digital musical instrument},\n month = {May},\n pages = {437--440},\n publisher\ + \ = {Graduate School of Culture Technology, KAIST},\n title = {Toward {DMI} Evaluation\ + \ Using Crowd-Sourced Tagging Techniques},\n url = {http://www.nime.org/proceedings/2013/nime2013_251.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178484 + doi: 10.5281/zenodo.1178510 issn: 2220-4806 - keywords: 'Participatory creation, communal interaction, fields, interactive installation, - Japanese lanterns' + keywords: 'Evaluation, tagging, digital musical instrument' month: May - pages: 73--78 + pages: 437--440 publisher: 'Graduate School of Culture Technology, KAIST' - title: 'Lantern Field: Exploring Participatory Design of a Communal, Spatially Responsive - Installation' - url: http://www.nime.org/proceedings/2013/nime2013_192.pdf + title: Toward DMI Evaluation Using Crowd-Sourced Tagging Techniques + url: http://www.nime.org/proceedings/2013/nime2013_251.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Soria2013 - abstract: 'This work presents a general framework method for cre-ating spatialization - systems focused on electroacoustic andacousmatic music performance and creation. - Although weused the logistic equation as orbit generator, any dynami-cal system - could be suitable. The main idea lies on generating vectors of Rn with entriesfrom - data series of di_x000B_erent orbits from an speci_x000C_c dynami-cal system. - Such vectors will be called system vectors. Ourproposal is to create ordered paths - between those pointsor system vectors using the Splines Quark library by Felix,1which - allow us to generate smooth curves joining the points.Finally, interpolating that - result with a _x000C_xed sample value,we are able to obtain speci_x000C_c and - independent multidimen-sional panning trajectories for each speaker array and - forany number of sound sources.Our contribution is intended to be at the very - root of the compositionalprocess giving to the creator a method for exploring - new ways for spatialsound placement over time for a wide range of speakers ar-rangements. - The advantage of using controlled chaotic dy-namical systems like the logistic - equation, lies on the factthat the composer can freely and consciously choose - be-tween stable or irregular behaviour for the orbits that willgenerate his/her - panning trajectories. Besides, with the useof isometries, it is possible to generate - di_x000B_erent related or-bits with one single evaluation of the system. The use - ofthe spline method in SuperCollider allows the possibilityof joining and relating - those values from orbits into a wellde_x000C_ned and coherent general system. - Further research willinclude controlling synthesis parameters in the same waywe - created panning trajectories.' - address: 'Daejeon, Republic of Korea' - author: Edmar Soria and Roberto Morales-Manzanares - bibtex: "@inproceedings{Soria2013,\n abstract = {This work presents a general framework\ - \ method for cre-ating spatialization systems focused on electroacoustic andacousmatic\ - \ music performance and creation. Although weused the logistic equation as orbit\ - \ generator, any dynami-cal system could be suitable. The main idea lies on generating\ - \ vectors of Rn with entriesfrom data series of di_x000B_erent orbits from an\ - \ speci_x000C_c dynami-cal system. Such vectors will be called system vectors.\ - \ Ourproposal is to create ordered paths between those pointsor system vectors\ - \ using the Splines Quark library by Felix,1which allow us to generate smooth\ - \ curves joining the points.Finally, interpolating that result with a _x000C_xed\ - \ sample value,we are able to obtain speci_x000C_c and independent multidimen-sional\ - \ panning trajectories for each speaker array and forany number of sound sources.Our\ - \ contribution is intended to be at the very root of the compositionalprocess\ - \ giving to the creator a method for exploring new ways for spatialsound placement\ - \ over time for a wide range of speakers ar-rangements. The advantage of using\ - \ controlled chaotic dy-namical systems like the logistic equation, lies on the\ - \ factthat the composer can freely and consciously choose be-tween stable or irregular\ - \ behaviour for the orbits that willgenerate his/her panning trajectories. Besides,\ - \ with the useof isometries, it is possible to generate di_x000B_erent related\ - \ or-bits with one single evaluation of the system. The use ofthe spline method\ - \ in SuperCollider allows the possibilityof joining and relating those values\ - \ from orbits into a wellde_x000C_ned and coherent general system. Further research\ - \ willinclude controlling synthesis parameters in the same waywe created panning\ - \ trajectories.},\n address = {Daejeon, Republic of Korea},\n author = {Edmar\ - \ Soria and Roberto Morales-Manzanares},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178664},\n\ - \ issn = {2220-4806},\n keywords = {NIME, spatialization, dynamical systems, chaos},\n\ - \ month = {May},\n pages = {79--83},\n publisher = {Graduate School of Culture\ - \ Technology, KAIST},\n title = {Multidimensional sound spatialization by means\ - \ of chaotic dynamical systems},\n url = {http://www.nime.org/proceedings/2013/nime2013_195.pdf},\n\ - \ year = {2013}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1178664 - issn: 2220-4806 - keywords: 'NIME, spatialization, dynamical systems, chaos' - month: May - pages: 79--83 - publisher: 'Graduate School of Culture Technology, KAIST' - title: Multidimensional sound spatialization by means of chaotic dynamical systems - url: http://www.nime.org/proceedings/2013/nime2013_195.pdf - year: 2013 - - -- ENTRYTYPE: inproceedings - ID: Rosselet2013 - abstract: 'This paper presents the musical interactions aspects of the design anddevelopment - of a web-based interactive music collaboration system called JamOn. Following - a design science approach, this system is being built accordingto principles taken - from usability engineering and human computer interaction(HCI). The goal of the - system is to allow people with no to little musicalbackground to play a song collaboratively. - The musicians control the musicalcontent and structure of the song thanks to an - interface relying on the freeinking metaphor. One contribution of this interface - is that it displays musicalpatterns of different lengths in the same space. The - design of Jam On is basedon a list of performance criteria aimed at ensuring the - musicality of theperformance and the interactivity of the technical system. The - paper comparestwo alternative interfaces used for the system and explores the - various stagesof the design process aimed at making the system as musical and - interactive aspossible.' + ID: Nam2013 + abstract: 'This paper describes the Musical Poi (mPoi), a unique sensor-based musicalinstrument + rooted in the ancient art of poi spinning. The trajectory ofcircular motion drawn + by the performance and the momentum of the mPoiinstrument are converted to the + energetic and vibrant sound, which makesspiritual and meditative soundscape that + opens everyone up the aura and clearsthe thought forms away. The mPoi project + and its concepts will be introducedfirst and then its interaction with a performer + will be discussed.The mPoi project seeks to develop a prototype for a set of mobile + musicalinstrument based on electronic motion sensors and circuit boards. Thistechnology + is installed in egg-shaped structure and allows communicationbetween a performer + and the mPoi instrument. The principal motivation for themPoi project has been + a desire to develop an extensible interface that willsupport the Poi performance, + which is a style of performance art originatedwith the Maori people of New Zealand + involving swinging tethered weightsthrough a variety of rhythmical and geometric + patterns. As an extension of the body and the expansion of the movement, the mPoiutilizes + the creative performance of Poi to make spatial and spiritual soundand music. + The aims of the mPoi project are:to create a prototype of mPoi instrument that + includes circuit board thatconnects the instrument to a sensor.to develop a software, + which includes programming of the circuit board and forthe sound generation.to + make a new artistic expression to refine the captured sound into artisticmusical + notes. The creative part of the project is to design a unique method to translate + theperformer''s gesture into sound. A unique algorithm was developed to extractfeatures + of the swing motion and translate them into various patterns of sound.' address: 'Daejeon, Republic of Korea' - author: Ulysse Rosselet and Alain Renaud - bibtex: "@inproceedings{Rosselet2013,\n abstract = {This paper presents the musical\ - \ interactions aspects of the design anddevelopment of a web-based interactive\ - \ music collaboration system called JamOn. Following a design science approach,\ - \ this system is being built accordingto principles taken from usability engineering\ - \ and human computer interaction(HCI). The goal of the system is to allow people\ - \ with no to little musicalbackground to play a song collaboratively. The musicians\ - \ control the musicalcontent and structure of the song thanks to an interface\ - \ relying on the freeinking metaphor. One contribution of this interface is that\ - \ it displays musicalpatterns of different lengths in the same space. The design\ - \ of Jam On is basedon a list of performance criteria aimed at ensuring the musicality\ - \ of theperformance and the interactivity of the technical system. The paper comparestwo\ - \ alternative interfaces used for the system and explores the various stagesof\ - \ the design process aimed at making the system as musical and interactive aspossible.},\n\ - \ address = {Daejeon, Republic of Korea},\n author = {Ulysse Rosselet and Alain\ - \ Renaud},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1178650},\n issn = {2220-4806},\n\ - \ keywords = {Networked performance, interface design, mapping, web-based music\ - \ application},\n month = {May},\n pages = {394--399},\n publisher = {Graduate\ - \ School of Culture Technology, KAIST},\n title = {Jam On: a new interface for\ - \ web-based collective music performance},\n url = {http://www.nime.org/proceedings/2013/nime2013_196.pdf},\n\ + author: Sangbong Nam + bibtex: "@inproceedings{Nam2013,\n abstract = {This paper describes the Musical\ + \ Poi (mPoi), a unique sensor-based musicalinstrument rooted in the ancient art\ + \ of poi spinning. The trajectory ofcircular motion drawn by the performance and\ + \ the momentum of the mPoiinstrument are converted to the energetic and vibrant\ + \ sound, which makesspiritual and meditative soundscape that opens everyone up\ + \ the aura and clearsthe thought forms away. The mPoi project and its concepts\ + \ will be introducedfirst and then its interaction with a performer will be discussed.The\ + \ mPoi project seeks to develop a prototype for a set of mobile musicalinstrument\ + \ based on electronic motion sensors and circuit boards. Thistechnology is installed\ + \ in egg-shaped structure and allows communicationbetween a performer and the\ + \ mPoi instrument. The principal motivation for themPoi project has been a desire\ + \ to develop an extensible interface that willsupport the Poi performance, which\ + \ is a style of performance art originatedwith the Maori people of New Zealand\ + \ involving swinging tethered weightsthrough a variety of rhythmical and geometric\ + \ patterns. As an extension of the body and the expansion of the movement, the\ + \ mPoiutilizes the creative performance of Poi to make spatial and spiritual soundand\ + \ music. The aims of the mPoi project are:to create a prototype of mPoi instrument\ + \ that includes circuit board thatconnects the instrument to a sensor.to develop\ + \ a software, which includes programming of the circuit board and forthe sound\ + \ generation.to make a new artistic expression to refine the captured sound into\ + \ artisticmusical notes. The creative part of the project is to design a unique\ + \ method to translate theperformer's gesture into sound. A unique algorithm was\ + \ developed to extractfeatures of the swing motion and translate them into various\ + \ patterns of sound.},\n address = {Daejeon, Republic of Korea},\n author = {Sangbong\ + \ Nam},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1178622},\n issn = {2220-4806},\n\ + \ keywords = {mPoi, Musical Poi, Jwibulnori, Poi, sensor-based musical instrument},\n\ + \ month = {May},\n pages = {148--151},\n publisher = {Graduate School of Culture\ + \ Technology, KAIST},\n title = {Musical Poi (mPoi)},\n url = {http://www.nime.org/proceedings/2013/nime2013_254.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178650 + doi: 10.5281/zenodo.1178622 issn: 2220-4806 - keywords: 'Networked performance, interface design, mapping, web-based music application' + keywords: 'mPoi, Musical Poi, Jwibulnori, Poi, sensor-based musical instrument' month: May - pages: 394--399 + pages: 148--151 publisher: 'Graduate School of Culture Technology, KAIST' - title: 'Jam On: a new interface for web-based collective music performance' - url: http://www.nime.org/proceedings/2013/nime2013_196.pdf + title: Musical Poi (mPoi) + url: http://www.nime.org/proceedings/2013/nime2013_254.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Lai2013 - abstract: 'This paper presents observations from investigating audience experience - of apractice-based research in live sound performance with electronics. In seekingto - understand the communication flow and the engagement between performer andaudience - in this particular performance context, we designed an experiment thatinvolved - the following steps: (a) performing WOSAWIP at a new media festival,(b) conducting - a qualitative research study with audience members and (c)analyzing the data for - new insights.' + ID: Oda2013 + abstract: 'The Internet allows musicians and other artists to collaborate remotely.However, + network latency presents a fundamental challenge for remotecollaborators who need + to coordinate and respond to each other''s performancein real time. In this paper, + we investigate the viability of predictingpercussion hits before they have occurred, + so that information about thepredicted drum hit can be sent over a network, and + the sound can be synthesizedat a receiver''s location at approximately the same + moment the hit occurs atthe sender''s location. Such a system would allow two + percussionists to playin perfect synchrony despite the delays caused by computer + networks. Toinvestigate the feasibility of such an approach, we record vibraphone + malletstrikes with a high-speed camera and track the mallet head position. We + showthat 30 ms before the strike occurs, it is possible to predict strike time + andvelocity with acceptable accuracy. Our method fits a second-order polynomial + tothe data to produce a strike time prediction that is within the bounds ofperceptual + synchrony, and a velocity estimate that will enable the soundpressure level of + the synthesized strike to be accurate within 3 dB.' address: 'Daejeon, Republic of Korea' - author: Chi-Hsia Lai and Till Bovermann - bibtex: "@inproceedings{Lai2013,\n abstract = {This paper presents observations\ - \ from investigating audience experience of apractice-based research in live sound\ - \ performance with electronics. In seekingto understand the communication flow\ - \ and the engagement between performer andaudience in this particular performance\ - \ context, we designed an experiment thatinvolved the following steps: (a) performing\ - \ WOSAWIP at a new media festival,(b) conducting a qualitative research study\ - \ with audience members and (c)analyzing the data for new insights.},\n address\ - \ = {Daejeon, Republic of Korea},\n author = {Chi-Hsia Lai and Till Bovermann},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178590},\n issn = {2220-4806},\n\ - \ keywords = {Audience Experience Study, Live Performance, Evaluation, Research\ - \ Methods},\n month = {May},\n pages = {170--173},\n publisher = {Graduate School\ - \ of Culture Technology, KAIST},\n title = {Audience Experience in Sound Performance},\n\ - \ url = {http://www.nime.org/proceedings/2013/nime2013_197.pdf},\n year = {2013}\n\ - }\n" + author: Reid Oda and Adam Finkelstein and Rebecca Fiebrink + bibtex: "@inproceedings{Oda2013,\n abstract = {The Internet allows musicians and\ + \ other artists to collaborate remotely.However, network latency presents a fundamental\ + \ challenge for remotecollaborators who need to coordinate and respond to each\ + \ other's performancein real time. In this paper, we investigate the viability\ + \ of predictingpercussion hits before they have occurred, so that information\ + \ about thepredicted drum hit can be sent over a network, and the sound can be\ + \ synthesizedat a receiver's location at approximately the same moment the hit\ + \ occurs atthe sender's location. Such a system would allow two percussionists\ + \ to playin perfect synchrony despite the delays caused by computer networks.\ + \ Toinvestigate the feasibility of such an approach, we record vibraphone malletstrikes\ + \ with a high-speed camera and track the mallet head position. We showthat 30\ + \ ms before the strike occurs, it is possible to predict strike time andvelocity\ + \ with acceptable accuracy. Our method fits a second-order polynomial tothe data\ + \ to produce a strike time prediction that is within the bounds ofperceptual synchrony,\ + \ and a velocity estimate that will enable the soundpressure level of the synthesized\ + \ strike to be accurate within 3 dB.},\n address = {Daejeon, Republic of Korea},\n\ + \ author = {Reid Oda and Adam Finkelstein and Rebecca Fiebrink},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178624},\n issn = {2220-4806},\n keywords = {Networked\ + \ performance, prediction, computer vision},\n month = {May},\n pages = {94--97},\n\ + \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {Towards\ + \ Note-Level Prediction for Networked Music Performance},\n url = {http://www.nime.org/proceedings/2013/nime2013_258.pdf},\n\ + \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178590 + doi: 10.5281/zenodo.1178624 issn: 2220-4806 - keywords: 'Audience Experience Study, Live Performance, Evaluation, Research Methods' + keywords: 'Networked performance, prediction, computer vision' month: May - pages: 170--173 + pages: 94--97 publisher: 'Graduate School of Culture Technology, KAIST' - title: Audience Experience in Sound Performance - url: http://www.nime.org/proceedings/2013/nime2013_197.pdf + title: Towards Note-Level Prediction for Networked Music Performance + url: http://www.nime.org/proceedings/2013/nime2013_258.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Everett2013 - abstract: 'This presentation-demonstration discusses the creation of FIRST LIFE, - a75-minute mixed media performance for string quartet, live audio processing,live - motion capture video, and audience participation utilizing stochasticmodels of - chemical data provided by Martha Grover''s Research Group at theSchool of Chemical - and Biomolecular Engineering at Georgia Institute ofTechnology. Each section of - this work is constructed from contingent outcomesdrawn from biochemical research - exploring possible early Earth formations oforganic compounds. Audio-video excerpts - of the composition will be played during the presentation.Max patches for sonification - and for generating stochastic processes will bedemonstrated as well.' + ID: Jenkins2013 + abstract: 'This paper presents a minimally-invasive, wireless optical sensorsystem + for use with any conventional piston valve acoustic trumpet. Itis designed to + be easy to install and remove by any trumpeter. Ourgoal is to offer the extended + control afforded by hyperinstrumentswithout the hard to reverse or irreversible + invasive modificationsthat are typically used for adding digital sensing capabilities. + Weutilize optical sensors to track the continuous position displacementvalues + of the three trumpet valves. These values are trasmittedwirelessly and can be + used by an external controller. The hardware hasbeen designed to be reconfigurable + by having the housing 3D printed sothat the dimensions can be adjusted for any + particular trumpetmodel. The result is a low cost, low power, easily replicable + sensorsolution that offers any trumpeter the ability to augment their ownexisting + trumpet without compromising the instrument''s structure orplaying technique. + The extended digital control afforded by our systemis interweaved with the natural + playing gestures of an acoustictrumpet. We believe that this seemless integration + is critical forenabling effective and musical human computer interaction.Keywords: + hyperinstrument, trumpet, minimally-invasive, gesture sensing,wireless, I2C' address: 'Daejeon, Republic of Korea' - author: Steve Everett - bibtex: "@inproceedings{Everett2013,\n abstract = {This presentation-demonstration\ - \ discusses the creation of FIRST LIFE, a75-minute mixed media performance for\ - \ string quartet, live audio processing,live motion capture video, and audience\ - \ participation utilizing stochasticmodels of chemical data provided by Martha\ - \ Grover's Research Group at theSchool of Chemical and Biomolecular Engineering\ - \ at Georgia Institute ofTechnology. Each section of this work is constructed\ - \ from contingent outcomesdrawn from biochemical research exploring possible early\ - \ Earth formations oforganic compounds. Audio-video excerpts of the composition\ - \ will be played during the presentation.Max patches for sonification and for\ - \ generating stochastic processes will bedemonstrated as well.},\n address = {Daejeon,\ - \ Republic of Korea},\n author = {Steve Everett},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1178508},\n issn = {2220-4806},\n keywords = {Data-driven\ - \ composition, sonification, live electronics-video},\n month = {May},\n pages\ - \ = {277--278},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ - \ title = {Sonifying Chemical Evolution},\n url = {http://www.nime.org/proceedings/2013/nime2013_198.pdf},\n\ + author: Leonardo Jenkins and Shawn Trail and George Tzanetakis and Peter Driessen + and Wyatt Page + bibtex: "@inproceedings{Jenkins2013,\n abstract = {This paper presents a minimally-invasive,\ + \ wireless optical sensorsystem for use with any conventional piston valve acoustic\ + \ trumpet. Itis designed to be easy to install and remove by any trumpeter. Ourgoal\ + \ is to offer the extended control afforded by hyperinstrumentswithout the hard\ + \ to reverse or irreversible invasive modificationsthat are typically used for\ + \ adding digital sensing capabilities. Weutilize optical sensors to track the\ + \ continuous position displacementvalues of the three trumpet valves. These values\ + \ are trasmittedwirelessly and can be used by an external controller. The hardware\ + \ hasbeen designed to be reconfigurable by having the housing 3D printed sothat\ + \ the dimensions can be adjusted for any particular trumpetmodel. The result is\ + \ a low cost, low power, easily replicable sensorsolution that offers any trumpeter\ + \ the ability to augment their ownexisting trumpet without compromising the instrument's\ + \ structure orplaying technique. The extended digital control afforded by our\ + \ systemis interweaved with the natural playing gestures of an acoustictrumpet.\ + \ We believe that this seemless integration is critical forenabling effective\ + \ and musical human computer interaction.Keywords: hyperinstrument, trumpet, minimally-invasive,\ + \ gesture sensing,wireless, I2C},\n address = {Daejeon, Republic of Korea},\n\ + \ author = {Leonardo Jenkins and Shawn Trail and George Tzanetakis and Peter Driessen\ + \ and Wyatt Page},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178562},\n\ + \ issn = {2220-4806},\n keywords = {hyperinstrument, trumpet, minimally-invasive,\ + \ gesture sensing, wireless, I2C},\n month = {May},\n pages = {352--357},\n publisher\ + \ = {Graduate School of Culture Technology, KAIST},\n title = {An Easily Removable,\ + \ wireless Optical Sensing System (EROSS) for the Trumpet},\n url = {http://www.nime.org/proceedings/2013/nime2013_261.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178508 + doi: 10.5281/zenodo.1178562 issn: 2220-4806 - keywords: 'Data-driven composition, sonification, live electronics-video' + keywords: 'hyperinstrument, trumpet, minimally-invasive, gesture sensing, wireless, + I2C' month: May - pages: 277--278 + pages: 352--357 publisher: 'Graduate School of Culture Technology, KAIST' - title: Sonifying Chemical Evolution - url: http://www.nime.org/proceedings/2013/nime2013_198.pdf + title: 'An Easily Removable, wireless Optical Sensing System (EROSS) for the Trumpet' + url: http://www.nime.org/proceedings/2013/nime2013_261.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: McKinney2013 - abstract: 'In this paper we present Shoggoth, a 3D graphics based program for performingnetwork - music. In Shoggoth, users utilize video game style controls to navigateand manipulate - a grid of malleable height maps. Sequences can be created bydefining paths through - the maps which trigger and modulate audio playback. Withrespect to a context of - computer music performance, and specific problems innetwork music, design goals - and technical challenges are outlined. The systemis evaluated through established - taxonomies for describing interfaces, followedby an enumeration of the merits - of 3D graphics in networked performance. Indiscussing proposed improvements to - Shoggoth, design suggestions for otherdevelopers and network musicians are drawn - out.' + ID: Freed2013a + abstract: This paper positively addresses the problem that most NIME devices are + ephemeralasting long enough to signal academic and technical prowess but rarely + longerthan a few musical performances. We offer a case study that shows thatlongevity + of use depends on stabilizing the interface and innovating theimplementation to + maintain the required performance of the controller for theplayer. address: 'Daejeon, Republic of Korea' - author: Chad McKinney and Nick Collins - bibtex: "@inproceedings{McKinney2013,\n abstract = {In this paper we present Shoggoth,\ - \ a 3D graphics based program for performingnetwork music. In Shoggoth, users\ - \ utilize video game style controls to navigateand manipulate a grid of malleable\ - \ height maps. Sequences can be created bydefining paths through the maps which\ - \ trigger and modulate audio playback. Withrespect to a context of computer music\ - \ performance, and specific problems innetwork music, design goals and technical\ - \ challenges are outlined. The systemis evaluated through established taxonomies\ - \ for describing interfaces, followedby an enumeration of the merits of 3D graphics\ - \ in networked performance. Indiscussing proposed improvements to Shoggoth, design\ - \ suggestions for otherdevelopers and network musicians are drawn out.},\n address\ - \ = {Daejeon, Republic of Korea},\n author = {Chad McKinney and Nick Collins},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178606},\n issn = {2220-4806},\n\ - \ keywords = {3D, Generative, Network, Environment},\n month = {May},\n pages\ - \ = {400--405},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ - \ title = {An Interactive {3D} Network Music Space},\n url = {http://www.nime.org/proceedings/2013/nime2013_199.pdf},\n\ + author: Adrian Freed and John MacCallum and Sam Mansfield + bibtex: "@inproceedings{Freed2013a,\n abstract = {This paper positively addresses\ + \ the problem that most NIME devices are ephemeralasting long enough to signal\ + \ academic and technical prowess but rarely longerthan a few musical performances.\ + \ We offer a case study that shows thatlongevity of use depends on stabilizing\ + \ the interface and innovating theimplementation to maintain the required performance\ + \ of the controller for theplayer.},\n address = {Daejeon, Republic of Korea},\n\ + \ author = {Adrian Freed and John MacCallum and Sam Mansfield},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178524},\n issn = {2220-4806},\n keywords = {Fingerboard\ + \ controller, Best practices, Recrudescence, Organology, Unobtainium},\n month\ + \ = {May},\n pages = {441--445},\n publisher = {Graduate School of Culture Technology,\ + \ KAIST},\n title = {``Old'' is the New ``New'': a Fingerboard Case Study in Recrudescence\ + \ as a NIME Development Strategy},\n url = {http://www.nime.org/proceedings/2013/nime2013_265.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178606 + doi: 10.5281/zenodo.1178524 issn: 2220-4806 - keywords: '3D, Generative, Network, Environment' + keywords: 'Fingerboard controller, Best practices, Recrudescence, Organology, Unobtainium' month: May - pages: 400--405 + pages: 441--445 publisher: 'Graduate School of Culture Technology, KAIST' - title: An Interactive 3D Network Music Space - url: http://www.nime.org/proceedings/2013/nime2013_199.pdf + title: '``Old'''' is the New ``New'''': a Fingerboard Case Study in Recrudescence + as a NIME Development Strategy' + url: http://www.nime.org/proceedings/2013/nime2013_265.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Ferguson2013 - abstract: 'Feedback created by guitars and amplifiers is difficult to use in musicalsettings - -- parameters such as pitch and loudness are hard to specify preciselyby fretting - a string or by holding the guitar near an amplifier. This researchinvestigates - methods for controlling the level and pitch of the feedbackproduced by a guitar - and amplifier, which are based on incorporatingcorpus-based control into the system. - Two parameters are used to define thecontrol parameter space -- a simple automatic - gain control system to controlthe output level, and a band-pass filter frequency - for controlling the pitch ofthe feedback. This control parameter space is mapped - to a corpus of soundscreated by these parameters and recorded, and these sounds - are analysed usingsoftware created for concatenative synthesis. Following this - process, thedescriptors taken from the analysis can be used to select control - parametersfrom the feedback system.' + ID: Freed2013 + abstract: 'We describe ``o.expr'''' an expression language for dynamic, object- + and agent-oriented computation of gesture signal processing workflows using OSC + bundles. We illustrate the use of o.expr for a range of gesture processingtasks + showing how stateless programming and homoiconicity simplify applications development + and provide support for heterogeneous computational networks.' address: 'Daejeon, Republic of Korea' - author: Sam Ferguson and Aengus Martin and Andrew Johnston - bibtex: "@inproceedings{Ferguson2013,\n abstract = {Feedback created by guitars\ - \ and amplifiers is difficult to use in musicalsettings -- parameters such as\ - \ pitch and loudness are hard to specify preciselyby fretting a string or by holding\ - \ the guitar near an amplifier. This researchinvestigates methods for controlling\ - \ the level and pitch of the feedbackproduced by a guitar and amplifier, which\ - \ are based on incorporatingcorpus-based control into the system. Two parameters\ - \ are used to define thecontrol parameter space -- a simple automatic gain control\ - \ system to controlthe output level, and a band-pass filter frequency for controlling\ - \ the pitch ofthe feedback. This control parameter space is mapped to a corpus\ - \ of soundscreated by these parameters and recorded, and these sounds are analysed\ - \ usingsoftware created for concatenative synthesis. Following this process, thedescriptors\ - \ taken from the analysis can be used to select control parametersfrom the feedback\ - \ system.},\n address = {Daejeon, Republic of Korea},\n author = {Sam Ferguson\ - \ and Aengus Martin and Andrew Johnston},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178518},\n\ - \ issn = {2220-4806},\n month = {May},\n pages = {541--546},\n publisher = {Graduate\ - \ School of Culture Technology, KAIST},\n title = {A corpus-based method for controlling\ - \ guitar feedback},\n url = {http://www.nime.org/proceedings/2013/nime2013_200.pdf},\n\ - \ year = {2013}\n}\n" + author: Adrian Freed and John MacCallum and David Wessel + bibtex: "@inproceedings{Freed2013,\n abstract = {We describe ``o.expr'' an expression\ + \ language for dynamic, object- and agent-oriented computation of gesture signal\ + \ processing workflows using OSC bundles. We illustrate the use of o.expr for\ + \ a range of gesture processingtasks showing how stateless programming and homoiconicity\ + \ simplify applications development and provide support for heterogeneous computational\ + \ networks.},\n address = {Daejeon, Republic of Korea},\n author = {Adrian Freed\ + \ and John MacCallum and David Wessel},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178526},\n\ + \ issn = {2220-4806},\n keywords = {Gesture Signal Processing, Open Sound Control,\ + \ Functional Programming, Homoiconicity, Process Migration.},\n month = {May},\n\ + \ pages = {347--351},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ + \ title = {Agile Interface Development using OSC Expressions and Process Migration},\n\ + \ url = {http://www.nime.org/proceedings/2013/nime2013_266.pdf},\n year = {2013}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178518 + doi: 10.5281/zenodo.1178526 issn: 2220-4806 + keywords: 'Gesture Signal Processing, Open Sound Control, Functional Programming, + Homoiconicity, Process Migration.' month: May - pages: 541--546 + pages: 347--351 publisher: 'Graduate School of Culture Technology, KAIST' - title: A corpus-based method for controlling guitar feedback - url: http://www.nime.org/proceedings/2013/nime2013_200.pdf + title: Agile Interface Development using OSC Expressions and Process Migration + url: http://www.nime.org/proceedings/2013/nime2013_266.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: KITA2013 - abstract: 'When people learn using Web-based educational resources such as an LMS(Learning - Management System) or other e-learning related systems, they aresitting in front - of their own computer at home and are often physicallyisolated from other online - learners. In some courses they are typically gettingin touch online with each - others for doing some particular group workassignments, but most of the time they - must do their own learning tasks alone.In other courses simply the individual - assignments and quizzes are provided, sothe learners are alone all the time from - the beginning until the end of thecourse.In order to keep the learners'' motivation, - it helps to feel other learnersdoing the same learning activities and belonging - to the same course.Communicating formally or informally with other learners via - Social NetworkingServices or something is one way for learners to get such a feeling, - though ina way it might sometimes disturb their learning. Sonification of the - access logof the e-learning system could be another indirect way to provide such - afeeling.' + ID: Hamilton2013 + abstract: 'With a nod towards digital puppetry and game-based film genres such asmachinima, + recent additions to UDKOSC of- fer an Open Sound Control (OSC)control layer for + external control over both third-person ''''pawn'''' entitiesand camera controllers + in fully rendered game-space. Real-time OSC input,driven by algorithmic process + or parsed from a human-readable timed scriptingsyntax allows users to shape choreographies + of gesture, in this case actormotion and action, as well as an audiences view + into the game-spaceenvironment. As UDKOSC outputs real-time coordinate and action + data generatedby UDK pawns and players with OSC, individual as well as aggregate + virtualactor gesture and motion can be leveraged as a driver for both creative + andprocedural/adaptive gaming music and audio concerns.' address: 'Daejeon, Republic of Korea' - author: Toshihiro KITA and Naotoshi Osaka - bibtex: "@inproceedings{KITA2013,\n abstract = {When people learn using Web-based\ - \ educational resources such as an LMS(Learning Management System) or other e-learning\ - \ related systems, they aresitting in front of their own computer at home and\ - \ are often physicallyisolated from other online learners. In some courses they\ - \ are typically gettingin touch online with each others for doing some particular\ - \ group workassignments, but most of the time they must do their own learning\ - \ tasks alone.In other courses simply the individual assignments and quizzes are\ - \ provided, sothe learners are alone all the time from the beginning until the\ - \ end of thecourse.In order to keep the learners' motivation, it helps to feel\ - \ other learnersdoing the same learning activities and belonging to the same course.Communicating\ - \ formally or informally with other learners via Social NetworkingServices or\ - \ something is one way for learners to get such a feeling, though ina way it might\ - \ sometimes disturb their learning. Sonification of the access logof the e-learning\ - \ system could be another indirect way to provide such afeeling.},\n address =\ - \ {Daejeon, Republic of Korea},\n author = {Toshihiro KITA and Naotoshi Osaka},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178584},\n issn = {2220-4806},\n\ - \ keywords = {e-learning, online learners, Moodle, Csound, realtime sonification,\ - \ OSC (Open Sound Control)},\n month = {May},\n pages = {198--199},\n publisher\ - \ = {Graduate School of Culture Technology, KAIST},\n title = {Providing a feeling\ - \ of other remote learners' presence in an online learning environment via realtime\ - \ sonification of Moodle access log},\n url = {http://www.nime.org/proceedings/2013/nime2013_203.pdf},\n\ + author: Rob Hamilton + bibtex: "@inproceedings{Hamilton2013,\n abstract = {With a nod towards digital puppetry\ + \ and game-based film genres such asmachinima, recent additions to UDKOSC of-\ + \ fer an Open Sound Control (OSC)control layer for external control over both\ + \ third-person ''pawn'' entitiesand camera controllers in fully rendered game-space.\ + \ Real-time OSC input,driven by algorithmic process or parsed from a human-readable\ + \ timed scriptingsyntax allows users to shape choreographies of gesture, in this\ + \ case actormotion and action, as well as an audiences view into the game-spaceenvironment.\ + \ As UDKOSC outputs real-time coordinate and action data generatedby UDK pawns\ + \ and players with OSC, individual as well as aggregate virtualactor gesture and\ + \ motion can be leveraged as a driver for both creative andprocedural/adaptive\ + \ gaming music and audio concerns.},\n address = {Daejeon, Republic of Korea},\n\ + \ author = {Rob Hamilton},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178544},\n\ + \ issn = {2220-4806},\n keywords = {procedural music, procedural audio, interactive\ + \ sonification, game music, Open Sound Control},\n month = {May},\n pages = {446--449},\n\ + \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {Sonifying\ + \ Game-Space Choreographies With UDKOSC},\n url = {http://www.nime.org/proceedings/2013/nime2013_268.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178584 + doi: 10.5281/zenodo.1178544 issn: 2220-4806 - keywords: 'e-learning, online learners, Moodle, Csound, realtime sonification, OSC - (Open Sound Control)' + keywords: 'procedural music, procedural audio, interactive sonification, game music, + Open Sound Control' month: May - pages: 198--199 + pages: 446--449 publisher: 'Graduate School of Culture Technology, KAIST' - title: Providing a feeling of other remote learners' presence in an online learning - environment via realtime sonification of Moodle access log - url: http://www.nime.org/proceedings/2013/nime2013_203.pdf + title: Sonifying Game-Space Choreographies With UDKOSC + url: http://www.nime.org/proceedings/2013/nime2013_268.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Gelineck2013 - abstract: 'This paper presents the continuous work towards the development of an - interface for music mixing targeted towards expert sound technicians and producers. - The mixing interface uses a stage metaphor mapping scheme where audio channels - arerepresented as digital widgets on a 2D surface. These can be controlled bymulti - touch or by smart tangibles, which are tangible blocks with embedded sensors. - The smart tangibles developed for this interface are able to sense howthey are - grasped by the user. The paper presents the design of the mixing interface including - the smart tangible as well as a preliminary user study involving a hands-on focus - group session where 5 different control technologiesare contrasted and discussed. - Preliminary findings suggest that smart tangibles were preferred, but that an - optimal interface would include a combination of touch, smart tangibles and an - extra function control tangible for extending the functionality of the smart tangibles. - Finally, the interface should incorporate both an edit and mix mode---the latter - displaying very limited visual feedback in order to force users to focus their - attention to listening instead of the interface.' - address: 'Daejeon, Republic of Korea' - author: Steven Gelineck and Dan Overholt and Morten Büchert and Jesper Andersen - bibtex: "@inproceedings{Gelineck2013,\n abstract = {This paper presents the continuous\ - \ work towards the development of an interface for music mixing targeted towards\ - \ expert sound technicians and producers. The mixing interface uses a stage metaphor\ - \ mapping scheme where audio channels arerepresented as digital widgets on a 2D\ - \ surface. These can be controlled bymulti touch or by smart tangibles, which\ - \ are tangible blocks with embedded sensors. The smart tangibles developed for\ - \ this interface are able to sense howthey are grasped by the user. The paper\ - \ presents the design of the mixing interface including the smart tangible as\ - \ well as a preliminary user study involving a hands-on focus group session where\ - \ 5 different control technologiesare contrasted and discussed. Preliminary findings\ - \ suggest that smart tangibles were preferred, but that an optimal interface would\ - \ include a combination of touch, smart tangibles and an extra function control\ - \ tangible for extending the functionality of the smart tangibles. Finally, the\ - \ interface should incorporate both an edit and mix mode---the latter displaying\ - \ very limited visual feedback in order to force users to focus their attention\ - \ to listening instead of the interface.},\n address = {Daejeon, Republic of Korea},\n\ - \ author = {Steven Gelineck and Dan Overholt and Morten B{\\''u}chert and Jesper\ - \ Andersen},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178532},\n issn\ - \ = {2220-4806},\n keywords = {music mixing, tangibles, smart objects, multi-touch,\ - \ control surface, graspables, physical-digital interface, tangible user interface,\ - \ wireless sensing, sketching in hardware},\n month = {May},\n pages = {180--185},\n\ - \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {Towards\ - \ an Interface for Music Mixing based on Smart Tangibles and Multitouch},\n url\ - \ = {http://www.nime.org/proceedings/2013/nime2013_206.pdf},\n year = {2013}\n\ - }\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1178532 - issn: 2220-4806 - keywords: 'music mixing, tangibles, smart objects, multi-touch, control surface, - graspables, physical-digital interface, tangible user interface, wireless sensing, - sketching in hardware' - month: May - pages: 180--185 - publisher: 'Graduate School of Culture Technology, KAIST' - title: Towards an Interface for Music Mixing based on Smart Tangibles and Multitouch - url: http://www.nime.org/proceedings/2013/nime2013_206.pdf - year: 2013 - - -- ENTRYTYPE: inproceedings - ID: Tang2013 - abstract: 'CalliMusic, is a system developed for users to generate traditional Chinesemusic - by writing Chinese ink brush calligraphy, turning the long-believedstrong linkage - between the two art forms with rich histories into reality. Inaddition to traditional - calligraphy writing instruments (brush, ink and paper),a camera is the only addition - needed to convert the motion of the ink brushinto musical notes through a variety - of mappings such as human-inspired,statistical and a hybrid. The design of the - system, including details of eachmapping and research issues encountered are discussed. - A user study of systemperformance suggests that the result is quite encouraging. - The technique is,obviously, applicable to other related art forms with a wide - range ofapplications.' + ID: John2013 + abstract: 'This paper reviews the mobile music projects that have been presented + at NIMEin the past ten years in order to assess whether the changes in technology + haveaffected the activities of mobile music research. An overview of mobile musicprojects + is presented using the categories that describe the main activities:projects that + explore the influence and make use of location; applications thatshare audio or + promote collaborative composition; interaction using wearabledevices; the use + of mobile phones as performance devices; projects that exploreHCI design issues. + The relative activity between different types of activity isassessed in order + to identify trends. The classification according totechnological, social or geographic + showed an overwhelming bias to thetechnological, followed by social investigations. + An alternative classificationof survey product, or artifact reveals an increase + in the number of productsdescribed with a corresponding decline in the number + of surveys and artisticprojects. The increase in technical papers appears to be + due to an enthusiasmto make use of increased capability of mobile phones, although + there are signsthat the initial interest has already peaked, and researchers are + againinterested to explore technologies and artistic expression beyond existingmobile + phones.' address: 'Daejeon, Republic of Korea' - author: Will W. W. Tang and Stephen Chan and Grace Ngai and Hong-va Leong - bibtex: "@inproceedings{Tang2013,\n abstract = {CalliMusic, is a system developed\ - \ for users to generate traditional Chinesemusic by writing Chinese ink brush\ - \ calligraphy, turning the long-believedstrong linkage between the two art forms\ - \ with rich histories into reality. Inaddition to traditional calligraphy writing\ - \ instruments (brush, ink and paper),a camera is the only addition needed to convert\ - \ the motion of the ink brushinto musical notes through a variety of mappings\ - \ such as human-inspired,statistical and a hybrid. The design of the system, including\ - \ details of eachmapping and research issues encountered are discussed. A user\ - \ study of systemperformance suggests that the result is quite encouraging. The\ - \ technique is,obviously, applicable to other related art forms with a wide range\ - \ ofapplications.},\n address = {Daejeon, Republic of Korea},\n author = {Will\ - \ W. W. Tang and Stephen Chan and Grace Ngai and Hong-va Leong},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178668},\n issn = {2220-4806},\n keywords = {Chinese\ - \ Calligraphy, Chinese Music, Assisted Music Generation},\n month = {May},\n pages\ - \ = {84--89},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ - \ title = {Computer Assisted Melo-rhythmic Generation of Traditional Chinese Music\ - \ from Ink Brush Calligraphy},\n url = {http://www.nime.org/proceedings/2013/nime2013_208.pdf},\n\ + author: David John + bibtex: "@inproceedings{John2013,\n abstract = {This paper reviews the mobile music\ + \ projects that have been presented at NIMEin the past ten years in order to assess\ + \ whether the changes in technology haveaffected the activities of mobile music\ + \ research. An overview of mobile musicprojects is presented using the categories\ + \ that describe the main activities:projects that explore the influence and make\ + \ use of location; applications thatshare audio or promote collaborative composition;\ + \ interaction using wearabledevices; the use of mobile phones as performance devices;\ + \ projects that exploreHCI design issues. The relative activity between different\ + \ types of activity isassessed in order to identify trends. The classification\ + \ according totechnological, social or geographic showed an overwhelming bias\ + \ to thetechnological, followed by social investigations. An alternative classificationof\ + \ survey product, or artifact reveals an increase in the number of productsdescribed\ + \ with a corresponding decline in the number of surveys and artisticprojects.\ + \ The increase in technical papers appears to be due to an enthusiasmto make use\ + \ of increased capability of mobile phones, although there are signsthat the initial\ + \ interest has already peaked, and researchers are againinterested to explore\ + \ technologies and artistic expression beyond existingmobile phones.},\n address\ + \ = {Daejeon, Republic of Korea},\n author = {David John},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178568},\n issn = {2220-4806},\n keywords = {Mobile Music,\ + \ interactive music, proximity sensing, wearable devices, mobile phone performance,\ + \ interaction design},\n month = {May},\n pages = {301--306},\n publisher = {Graduate\ + \ School of Culture Technology, KAIST},\n title = {Updating the Classifications\ + \ of Mobile Music Projects},\n url = {http://www.nime.org/proceedings/2013/nime2013_273.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178668 + doi: 10.5281/zenodo.1178568 issn: 2220-4806 - keywords: 'Chinese Calligraphy, Chinese Music, Assisted Music Generation' + keywords: 'Mobile Music, interactive music, proximity sensing, wearable devices, + mobile phone performance, interaction design' month: May - pages: 84--89 + pages: 301--306 publisher: 'Graduate School of Culture Technology, KAIST' - title: Computer Assisted Melo-rhythmic Generation of Traditional Chinese Music from - Ink Brush Calligraphy - url: http://www.nime.org/proceedings/2013/nime2013_208.pdf + title: Updating the Classifications of Mobile Music Projects + url: http://www.nime.org/proceedings/2013/nime2013_273.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Kaneko2013 - abstract: 'In this paper, a function-oriented musical interface, named the sound - wheel_x0011_,is presented. This interface is designed to manipulate musical functions - likepitch class sets, tonal centers and scale degrees, rather than the _x0010_musicalsurface_x0011_, - i.e. the individual notes with concrete note heights. The sound wheelhas an interface - summarizing harmony theory, and the playing actions haveexplicit correspondencewith - musical functions. Easy usability is realized by semi-automatizing theconversion - process from musical functions into the musical surface. Thus, theplayer can use - this interface with concentration on the harmonic structure,without having his - attention caught by manipulating the musical surface.Subjective evaluation indicated - the e_x001B_ffectiveness of this interface as a toolhelpful for understanding - the music theory. Because of such features, thisinterface can be used for education - and interactive training of tonal musictheory.' + ID: Walther2013 + abstract: 'Although multi-touch user interfaces have become a widespread form of + humancomputer interaction in many technical areas, they haven''t found their way + intolive performances of musicians and keyboarders yet. In this paper, we present + anovel multi-touch interface method aimed at professional keyboard players. Themethod, + which is inspired by computer trackpads, allows controlling up to tencontinuous + parameters of a keyboard with one hand, without requiring the userto look at the + touch area --- a significant improvement over traditional keyboardinput controls. + We discuss optimizations needed to make our interface reliable,and show in an + evaluation with four keyboarders of different skill level thatthis method is both + intuitive and powerful, and allows users to more quicklyalter the sound of their + keyboard than they could with current input solutions.' address: 'Daejeon, Republic of Korea' - author: Shoken Kaneko - bibtex: "@inproceedings{Kaneko2013,\n abstract = {In this paper, a function-oriented\ - \ musical interface, named the sound wheel_x0011_,is presented. This interface\ - \ is designed to manipulate musical functions likepitch class sets, tonal centers\ - \ and scale degrees, rather than the _x0010_musicalsurface_x0011_, i.e. the individual\ - \ notes with concrete note heights. The sound wheelhas an interface summarizing\ - \ harmony theory, and the playing actions haveexplicit correspondencewith musical\ - \ functions. Easy usability is realized by semi-automatizing theconversion process\ - \ from musical functions into the musical surface. Thus, theplayer can use this\ - \ interface with concentration on the harmonic structure,without having his attention\ - \ caught by manipulating the musical surface.Subjective evaluation indicated the\ - \ e_x001B_ffectiveness of this interface as a toolhelpful for understanding the\ - \ music theory. Because of such features, thisinterface can be used for education\ - \ and interactive training of tonal musictheory.},\n address = {Daejeon, Republic\ - \ of Korea},\n author = {Shoken Kaneko},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178574},\n\ - \ issn = {2220-4806},\n keywords = {Music education, Interactive tonal music generation},\n\ - \ month = {May},\n pages = {202--205},\n publisher = {Graduate School of Culture\ - \ Technology, KAIST},\n title = {A Function-Oriented Interface for Music Education\ - \ and Musical Expressions: ``the Sound Wheel''},\n url = {http://www.nime.org/proceedings/2013/nime2013_21.pdf},\n\ + author: Thomas Walther and Damir Ismailović and Bernd Brügge + bibtex: "@inproceedings{Walther2013,\n abstract = {Although multi-touch user interfaces\ + \ have become a widespread form of humancomputer interaction in many technical\ + \ areas, they haven't found their way intolive performances of musicians and keyboarders\ + \ yet. In this paper, we present anovel multi-touch interface method aimed at\ + \ professional keyboard players. Themethod, which is inspired by computer trackpads,\ + \ allows controlling up to tencontinuous parameters of a keyboard with one hand,\ + \ without requiring the userto look at the touch area --- a significant improvement\ + \ over traditional keyboardinput controls. We discuss optimizations needed to\ + \ make our interface reliable,and show in an evaluation with four keyboarders\ + \ of different skill level thatthis method is both intuitive and powerful, and\ + \ allows users to more quicklyalter the sound of their keyboard than they could\ + \ with current input solutions.},\n address = {Daejeon, Republic of Korea},\n\ + \ author = {Thomas Walther and Damir Ismailovi{\\'c} and Bernd Br{\\''u}gge},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178684},\n issn = {2220-4806},\n\ + \ keywords = {multi-touch, mobile, keyboard, interface},\n month = {May},\n pages\ + \ = {98--101},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ + \ title = {Rocking the Keys with a Multi-Touch Interface},\n url = {http://www.nime.org/proceedings/2013/nime2013_275.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178574 + doi: 10.5281/zenodo.1178684 issn: 2220-4806 - keywords: 'Music education, Interactive tonal music generation' + keywords: 'multi-touch, mobile, keyboard, interface' month: May - pages: 202--205 + pages: 98--101 publisher: 'Graduate School of Culture Technology, KAIST' - title: 'A Function-Oriented Interface for Music Education and Musical Expressions: - ``the Sound Wheel''''' - url: http://www.nime.org/proceedings/2013/nime2013_21.pdf + title: Rocking the Keys with a Multi-Touch Interface + url: http://www.nime.org/proceedings/2013/nime2013_275.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Andersson2013 - abstract: 'Our voice and body are important parts of our self-experience, andcommunication - and relational possibilities. They gradually become moreimportant for Interaction - Design, due to increased development of tangibleinteraction and mobile communication. - In this paper we present and discuss ourwork with voice and tangible interaction - in our ongoing research project XXXXX.The goal is to improve health for families, - adults and children withdisabilities through use of collaborative, musical, tangible - media. We build onuse of voice in Music Therapy and on a humanistic health approach. - Ourchallenge is to design vocal and tangible interactive media that through usereduce - isolation and passivity and increase empowerment for the users. We usesound recognition, - generative sound synthesis, vibrations and cross-mediatechniques, to create rhythms, - melodies and harmonic chords to stimulatebody-voice connections, positive emotions - and structures for actions.' + ID: Berdahl2013 + abstract: 'Satellite CCRMA is a platform for making embedded musical instruments + andembedded installations. The project aims to help prototypes live longer byproviding + a complete prototyping platform in a single, small, and stand-aloneembedded form + factor. A set of scripts makes it easier for artists andbeginning technical students + to access powerful features, while advanced usersenjoy the flexibility of the + open-source software and open-source hardwareplatform.This paper focuses primarily + on networking capabilities of Satellite CCRMA andnew software for enabling interactive + control of the hardware-acceleratedgraphical output. In addition, some results + are presented from robustness testsalongside specific advice and software support + for increasing the lifespan ofthe flash memory.' address: 'Daejeon, Republic of Korea' - author: Anders-Petter Andersson and Birgitta Cappelen - bibtex: "@inproceedings{Andersson2013,\n abstract = {Our voice and body are important\ - \ parts of our self-experience, andcommunication and relational possibilities.\ - \ They gradually become moreimportant for Interaction Design, due to increased\ - \ development of tangibleinteraction and mobile communication. In this paper we\ - \ present and discuss ourwork with voice and tangible interaction in our ongoing\ - \ research project XXXXX.The goal is to improve health for families, adults and\ - \ children withdisabilities through use of collaborative, musical, tangible media.\ - \ We build onuse of voice in Music Therapy and on a humanistic health approach.\ - \ Ourchallenge is to design vocal and tangible interactive media that through\ - \ usereduce isolation and passivity and increase empowerment for the users. We\ - \ usesound recognition, generative sound synthesis, vibrations and cross-mediatechniques,\ - \ to create rhythms, melodies and harmonic chords to stimulatebody-voice connections,\ - \ positive emotions and structures for actions.},\n address = {Daejeon, Republic\ - \ of Korea},\n author = {Anders-Petter Andersson and Birgitta Cappelen},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1178465},\n issn = {2220-4806},\n keywords\ - \ = {Vocal Interaction, Tangible Interaction, Music & Health, Voice, Empowerment,\ - \ Music Therapy, Resource-Oriented},\n month = {May},\n pages = {406--412},\n\ - \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {Designing\ - \ Empowering Vocal and Tangible Interaction},\n url = {http://www.nime.org/proceedings/2013/nime2013_210.pdf},\n\ + author: Edgar Berdahl and Spencer Salazar and Myles Borins + bibtex: "@inproceedings{Berdahl2013,\n abstract = {Satellite CCRMA is a platform\ + \ for making embedded musical instruments andembedded installations. The project\ + \ aims to help prototypes live longer byproviding a complete prototyping platform\ + \ in a single, small, and stand-aloneembedded form factor. A set of scripts makes\ + \ it easier for artists andbeginning technical students to access powerful features,\ + \ while advanced usersenjoy the flexibility of the open-source software and open-source\ + \ hardwareplatform.This paper focuses primarily on networking capabilities of\ + \ Satellite CCRMA andnew software for enabling interactive control of the hardware-acceleratedgraphical\ + \ output. In addition, some results are presented from robustness testsalongside\ + \ specific advice and software support for increasing the lifespan ofthe flash\ + \ memory.},\n address = {Daejeon, Republic of Korea},\n author = {Edgar Berdahl\ + \ and Spencer Salazar and Myles Borins},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178476},\n\ + \ issn = {2220-4806},\n keywords = {Satellite CCRMA, embedded musical instruments,\ + \ embedded installations, Node.js, Interface.js, hardware-accelerated graphics,\ + \ OpenGLES, SimpleGraphicsOSC, union file system, write endurance},\n month =\ + \ {May},\n pages = {325--330},\n publisher = {Graduate School of Culture Technology,\ + \ KAIST},\n title = {Embedded Networking and Hardware-Accelerated Graphics with\ + \ Satellite CCRMA},\n url = {http://www.nime.org/proceedings/2013/nime2013_277.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178465 + doi: 10.5281/zenodo.1178476 issn: 2220-4806 - keywords: 'Vocal Interaction, Tangible Interaction, Music & Health, Voice, Empowerment, - Music Therapy, Resource-Oriented' + keywords: 'Satellite CCRMA, embedded musical instruments, embedded installations, + Node.js, Interface.js, hardware-accelerated graphics, OpenGLES, SimpleGraphicsOSC, + union file system, write endurance' month: May - pages: 406--412 + pages: 325--330 publisher: 'Graduate School of Culture Technology, KAIST' - title: Designing Empowering Vocal and Tangible Interaction - url: http://www.nime.org/proceedings/2013/nime2013_210.pdf + title: Embedded Networking and Hardware-Accelerated Graphics with Satellite CCRMA + url: http://www.nime.org/proceedings/2013/nime2013_277.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Astrinaki2013 - abstract: 'This paper describes the recent progress in our approach to generateperformative - and controllable speech. The goal of the performative HMM-basedspeech and singing - synthesis library, called Mage, is to have the ability togenerate natural sounding - speech with arbitrary speaker''s voicecharacteristics, speaking styles and expressions - and at the same time to haveaccurate reactive user control over all the available - production levels. Mageallows to arbitrarily change between voices, control speaking - style or vocalidentity, manipulate voice characteristics or alter the targeted - contexton-the-fly and also maintain the naturalness and intelligibility of the - output.To achieve these controls, it was essential to redesign and improve the - initiallibrary. This paper focuses on the improvements of the architectural design,the - additional user controls and provides an overview of a prototype, where aguitar - is used to reactively control the generation of a synthetic voice invarious levels.' + ID: Xiao2013 + abstract: 'The body channels rich layers of information when playing music, from + intricatemanipulations of the instrument to vivid personifications of expression. + Butwhen music is captured and replayed across distance and time, the performer''sbody + is too often trapped behind a small screen or absent entirely.This paper introduces + an interface to conjure the recorded performer bycombining the moving keys of + a player piano with life-sized projection of thepianist''s hands and upper body. + Inspired by reflections on a lacquered grandpiano, our interface evokes the sense + that the virtual pianist is playing thephysically moving keys.Through our interface, + we explore the question of how to viscerally simulate aperformer''s presence to + create immersive experiences. We discuss designchoices, outline a space of usage + scenarios and report reactions from users.' address: 'Daejeon, Republic of Korea' - author: Maria Astrinaki and Nicolas d'Alessandro and Loïc Reboursière and Alexis - Moinet and Thierry Dutoit - bibtex: "@inproceedings{Astrinaki2013,\n abstract = {This paper describes the recent\ - \ progress in our approach to generateperformative and controllable speech. The\ - \ goal of the performative HMM-basedspeech and singing synthesis library, called\ - \ Mage, is to have the ability togenerate natural sounding speech with arbitrary\ - \ speaker's voicecharacteristics, speaking styles and expressions and at the same\ - \ time to haveaccurate reactive user control over all the available production\ - \ levels. Mageallows to arbitrarily change between voices, control speaking style\ - \ or vocalidentity, manipulate voice characteristics or alter the targeted contexton-the-fly\ - \ and also maintain the naturalness and intelligibility of the output.To achieve\ - \ these controls, it was essential to redesign and improve the initiallibrary.\ - \ This paper focuses on the improvements of the architectural design,the additional\ - \ user controls and provides an overview of a prototype, where aguitar is used\ - \ to reactively control the generation of a synthetic voice invarious levels.},\n\ - \ address = {Daejeon, Republic of Korea},\n author = {Maria Astrinaki and Nicolas\ - \ d'Alessandro and Lo{\\\"i}c Reboursi{\\`e}re and Alexis Moinet and Thierry Dutoit},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178467},\n issn = {2220-4806},\n\ - \ keywords = {speech synthesis, augmented guitar, hexaphonic guitar},\n month\ - \ = {May},\n pages = {547--550},\n publisher = {Graduate School of Culture Technology,\ - \ KAIST},\n title = {MAGE 2.0: New Features and its Application in the Development\ - \ of a Talking Guitar},\n url = {http://www.nime.org/proceedings/2013/nime2013_214.pdf},\n\ + author: Xiao Xiao and Anna Pereira and Hiroshi Ishii + bibtex: "@inproceedings{Xiao2013,\n abstract = {The body channels rich layers of\ + \ information when playing music, from intricatemanipulations of the instrument\ + \ to vivid personifications of expression. Butwhen music is captured and replayed\ + \ across distance and time, the performer'sbody is too often trapped behind a\ + \ small screen or absent entirely.This paper introduces an interface to conjure\ + \ the recorded performer bycombining the moving keys of a player piano with life-sized\ + \ projection of thepianist's hands and upper body. Inspired by reflections on\ + \ a lacquered grandpiano, our interface evokes the sense that the virtual pianist\ + \ is playing thephysically moving keys.Through our interface, we explore the question\ + \ of how to viscerally simulate aperformer's presence to create immersive experiences.\ + \ We discuss designchoices, outline a space of usage scenarios and report reactions\ + \ from users.},\n address = {Daejeon, Republic of Korea},\n author = {Xiao Xiao\ + \ and Anna Pereira and Hiroshi Ishii},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178692},\n\ + \ issn = {2220-4806},\n keywords = {piano performance, musical expressivity, body\ + \ language, recorded music, player piano, augmented reality, embodiment},\n month\ + \ = {May},\n pages = {7--12},\n publisher = {Graduate School of Culture Technology,\ + \ KAIST},\n title = {Conjuring the Recorded Pianist: A New Medium to Experience\ + \ Musical Performance},\n url = {http://www.nime.org/proceedings/2013/nime2013_28.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178467 + doi: 10.5281/zenodo.1178692 issn: 2220-4806 - keywords: 'speech synthesis, augmented guitar, hexaphonic guitar' + keywords: 'piano performance, musical expressivity, body language, recorded music, + player piano, augmented reality, embodiment' month: May - pages: 547--550 + pages: 7--12 publisher: 'Graduate School of Culture Technology, KAIST' - title: 'MAGE 2.0: New Features and its Application in the Development of a Talking - Guitar' - url: http://www.nime.org/proceedings/2013/nime2013_214.pdf + title: 'Conjuring the Recorded Pianist: A New Medium to Experience Musical Performance' + url: http://www.nime.org/proceedings/2013/nime2013_28.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Lee2013a - abstract: 'We introduce a form of networked music performance where a performer - plays amobile music instrument while it is being implemented on the fly by a livecoder. - This setup poses a set of challenges in performing a music instrumentwhich changes - over time and we suggest design guidelines such as making asmooth transition, - varying adoption of change, and sharing information betweenthe pair of two performers. - A proof-of-concept instrument is implemented on amobile device using UrMus, applying - the suggested guidelines. We wish that thismodel would expand the scope of live - coding to the distributed interactivesystem, drawing existing performance ideas - of NIMEs.' + ID: Taylor2013 + abstract: 'What is the place for Internet Art within the paradigm of remote musicperformance? + In this paper, we discuss techniques for live audiovisualstorytelling through + the Web browsers of remote viewers. We focus on theincorporation of socket technology + to create a real-time link between performerand audience, enabling manipulation + of Web media directly within the eachaudience member''s browser. Finally, we describe + Plum Street, an onlinemultimedia performance, and suggest that by involving remote + performance,appropriating Web media such as Google Maps, social media, and Web + Audio intothe work, we can tell stories in a way that more accurately addresses + modernlife and holistically fulfills the Web browser''s capabilities as a contemporaryperformance + instrument.' address: 'Daejeon, Republic of Korea' - author: Sang Won Lee and Georg Essl - bibtex: "@inproceedings{Lee2013a,\n abstract = {We introduce a form of networked\ - \ music performance where a performer plays amobile music instrument while it\ - \ is being implemented on the fly by a livecoder. This setup poses a set of challenges\ - \ in performing a music instrumentwhich changes over time and we suggest design\ - \ guidelines such as making asmooth transition, varying adoption of change, and\ - \ sharing information betweenthe pair of two performers. A proof-of-concept instrument\ - \ is implemented on amobile device using UrMus, applying the suggested guidelines.\ - \ We wish that thismodel would expand the scope of live coding to the distributed\ - \ interactivesystem, drawing existing performance ideas of NIMEs.},\n address\ - \ = {Daejeon, Republic of Korea},\n author = {Sang Won Lee and Georg Essl},\n\ + author: Ben Taylor and Jesse Allison + bibtex: "@inproceedings{Taylor2013,\n abstract = {What is the place for Internet\ + \ Art within the paradigm of remote musicperformance? In this paper, we discuss\ + \ techniques for live audiovisualstorytelling through the Web browsers of remote\ + \ viewers. We focus on theincorporation of socket technology to create a real-time\ + \ link between performerand audience, enabling manipulation of Web media directly\ + \ within the eachaudience member's browser. Finally, we describe Plum Street,\ + \ an onlinemultimedia performance, and suggest that by involving remote performance,appropriating\ + \ Web media such as Google Maps, social media, and Web Audio intothe work, we\ + \ can tell stories in a way that more accurately addresses modernlife and holistically\ + \ fulfills the Web browser's capabilities as a contemporaryperformance instrument.},\n\ + \ address = {Daejeon, Republic of Korea},\n author = {Ben Taylor and Jesse Allison},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178592},\n issn = {2220-4806},\n\ - \ keywords = {live coding, network music, on-the-fly instrument, mobile music},\n\ - \ month = {May},\n pages = {493--498},\n publisher = {Graduate School of Culture\ - \ Technology, KAIST},\n title = {Live Coding The Mobile Music Instrument},\n url\ - \ = {http://www.nime.org/proceedings/2013/nime2013_216.pdf},\n year = {2013}\n\ - }\n" + \ Musical Expression},\n doi = {10.5281/zenodo.1178672},\n issn = {2220-4806},\n\ + \ keywords = {Remote Performance, Network Music, Internet Art, Storytelling},\n\ + \ month = {May},\n pages = {477--478},\n publisher = {Graduate School of Culture\ + \ Technology, KAIST},\n title = {Plum St: Live Digital Storytelling with Remote\ + \ Browsers},\n url = {http://www.nime.org/proceedings/2013/nime2013_281.pdf},\n\ + \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178592 + doi: 10.5281/zenodo.1178672 issn: 2220-4806 - keywords: 'live coding, network music, on-the-fly instrument, mobile music' + keywords: 'Remote Performance, Network Music, Internet Art, Storytelling' month: May - pages: 493--498 + pages: 477--478 publisher: 'Graduate School of Culture Technology, KAIST' - title: Live Coding The Mobile Music Instrument - url: http://www.nime.org/proceedings/2013/nime2013_216.pdf + title: 'Plum St: Live Digital Storytelling with Remote Browsers' + url: http://www.nime.org/proceedings/2013/nime2013_281.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: You2013 - abstract: 'Remix_Dance Music 3 is a four-channel quasi-fixed media piece that can - beimprovised by a single player operating the Max/MSP-based controller on atablet - such as iPad. Within the fixed time limit of six minutes, the performercan freely - (de)activate and displace the eighty seven precomposed audio filesthat are simultaneously - running, generating a sonic structure to one''s likingout of the given network - of musical possibilities. The interface is designed toinvite an integral musical - structuring particularly in the dimensions ofperformatively underexplored (but - still sonically viable) parameters that arelargely based on MPEG-7 audio descriptors.' + ID: Roberts2013a + abstract: 'Web technologies provide an incredible opportunity to present new musicalinterfaces + to new audiences. Applications written in JavaScript and designed torun in the + browser offer remarkable performance, mobile/desktop portability andlongevity + due to standardization. Our research examines the use and potentialof native web + technologies for musical expression. We introduce two librariestowards this end: + Gibberish.js, a heavily optimized audio DSP library, andInterface.js, a GUI toolkit + that works with mouse, touch and motion events.Together these libraries provide + a complete system for defining musicalinstruments that can be used in both desktop + and mobile browsers. Interface.jsalso enables control of remote synthesis applications + by including anapplication that translates the socket protocol used by browsers + into both MIDIand OSC messages.' address: 'Daejeon, Republic of Korea' - author: Jaeseong You and Red Wierenga - bibtex: "@inproceedings{You2013,\n abstract = {Remix_Dance Music 3 is a four-channel\ - \ quasi-fixed media piece that can beimprovised by a single player operating the\ - \ Max/MSP-based controller on atablet such as iPad. Within the fixed time limit\ - \ of six minutes, the performercan freely (de)activate and displace the eighty\ - \ seven precomposed audio filesthat are simultaneously running, generating a sonic\ - \ structure to one's likingout of the given network of musical possibilities.\ - \ The interface is designed toinvite an integral musical structuring particularly\ - \ in the dimensions ofperformatively underexplored (but still sonically viable)\ - \ parameters that arelargely based on MPEG-7 audio descriptors.},\n address =\ - \ {Daejeon, Republic of Korea},\n author = {Jaeseong You and Red Wierenga},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178696},\n issn = {2220-4806},\n\ - \ keywords = {Novel controllers, interface for musical expression, musical mapping\ - \ strategy, music cognition, music perception, MPEG-7},\n month = {May},\n pages\ - \ = {124--127},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ - \ title = {Remix_Dance 3: Improvisatory Sound Displacing on Touch Screen-Based\ - \ Interface},\n url = {http://www.nime.org/proceedings/2013/nime2013_219.pdf},\n\ + author: Charles Roberts and Graham Wakefield and Matthew Wright + bibtex: "@inproceedings{Roberts2013a,\n abstract = {Web technologies provide an\ + \ incredible opportunity to present new musicalinterfaces to new audiences. Applications\ + \ written in JavaScript and designed torun in the browser offer remarkable performance,\ + \ mobile/desktop portability andlongevity due to standardization. Our research\ + \ examines the use and potentialof native web technologies for musical expression.\ + \ We introduce two librariestowards this end: Gibberish.js, a heavily optimized\ + \ audio DSP library, andInterface.js, a GUI toolkit that works with mouse, touch\ + \ and motion events.Together these libraries provide a complete system for defining\ + \ musicalinstruments that can be used in both desktop and mobile browsers. Interface.jsalso\ + \ enables control of remote synthesis applications by including anapplication\ + \ that translates the socket protocol used by browsers into both MIDIand OSC messages.},\n\ + \ address = {Daejeon, Republic of Korea},\n author = {Charles Roberts and Graham\ + \ Wakefield and Matthew Wright},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178648},\n\ + \ issn = {2220-4806},\n keywords = {mobile devices, javascript, browser-based\ + \ NIMEs, web audio, websockets},\n month = {May},\n pages = {313--318},\n publisher\ + \ = {Graduate School of Culture Technology, KAIST},\n title = {The Web Browser\ + \ As Synthesizer And Interface},\n url = {http://www.nime.org/proceedings/2013/nime2013_282.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178696 + doi: 10.5281/zenodo.1178648 issn: 2220-4806 - keywords: 'Novel controllers, interface for musical expression, musical mapping - strategy, music cognition, music perception, MPEG-7' + keywords: 'mobile devices, javascript, browser-based NIMEs, web audio, websockets' month: May - pages: 124--127 + pages: 313--318 publisher: 'Graduate School of Culture Technology, KAIST' - title: 'Remix_Dance 3: Improvisatory Sound Displacing on Touch Screen-Based Interface' - url: http://www.nime.org/proceedings/2013/nime2013_219.pdf + title: The Web Browser As Synthesizer And Interface + url: http://www.nime.org/proceedings/2013/nime2013_282.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Barbosa2013 - abstract: 'This paper presents an innovative digital musical instrument, the Illusio, - based on an augmented multi-touch interface that combines a traditional multi-touch - surface and a device similar to a guitar pedal. Illusio allows users to perform - by drawing and by associating the sketches with live loops. These loops are manipulated - based on a concept called hierarchical live looping, which extends traditional - live looping through the use of a musical tree, in which any music operation applied - to a given node affects all its children nodes. Finally, we evaluate the instrument - considering the performer and the audience, which are two of the most important - stakeholders involved in the use, conception, and perception of a musical device. - The results achieved are encouraging and led to useful insights about how to improve - instrument features, performance and usability.' + ID: Grosshauser2013 + abstract: 'Several new technologies to capture motion, gesture and forces for musical + instrument players'' analyses have been developed in the last years. In research + and for augmented instruments one parameter is underrepresented so far. It is + finger position and pressure measurement, applied by the musician while playing + the musical instrument. In this paper we show a flexible linear-potentiometer + and forcesensitive-resistor (FSR) based solution for position, pressure and force + sensing between the contact point of the fingers and the musical instrument. A + flexible matrix printed circuit board (PCB) is fixed on a piano key. We further + introduce linear potentiometer based left hand finger position sensing for string + instruments, integrated into a violin and a guitar finger board. Several calibration + and measurement scenarios are shown. The violin sensor was evaluated with 13 music + students regarding playability and robustness of the system. Main focus was a + the integration of the sensors into these two traditional musical instruments + as unobtrusively as possible to keep natural haptic playing sensation. The musicians + playing the violin in different performance situations stated good playability + and no differences in the haptic sensation while playing. The piano sensor is + rated, due to interviews after testing it in a conventional keyboard quite unobtrusive, + too, but still evokes a different haptic sensation.' address: 'Daejeon, Republic of Korea' - author: 'Jerônimo Barbosa, Filipe Calegario, Veronica Teichrieb, Geber Ramalho and - Giordano Cabral' - bibtex: "@inproceedings{Barbosa2013,\n abstract = {This paper presents an innovative\ - \ digital musical instrument, the Illusio, based on an augmented multi-touch interface\ - \ that combines a traditional multi-touch surface and a device similar to a guitar\ - \ pedal. Illusio allows users to perform by drawing and by associating the sketches\ - \ with live loops. These loops are manipulated based on a concept called hierarchical\ - \ live looping, which extends traditional live looping through the use of a musical\ - \ tree, in which any music operation applied to a given node affects all its children\ - \ nodes. Finally, we evaluate the instrument considering the performer and the\ - \ audience, which are two of the most important stakeholders involved in the use,\ - \ conception, and perception of a musical device. The results achieved are encouraging\ - \ and led to useful insights about how to improve instrument features, performance\ - \ and usability.},\n address = {Daejeon, Republic of Korea},\n author = {Jer{\\\ - ^o}nimo Barbosa, Filipe Calegario, Veronica Teichrieb, Geber Ramalho and Giordano\ - \ Cabral},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1178566},\n issn = {2220-4806},\n\ - \ keywords = {Digital musical instruments, augmented multi-touch, hierarchical\ - \ live looping, interaction techniques, evaluation methodology},\n month = {May},\n\ - \ pages = {499--502},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ - \ title = {A Drawing-Based Digital Music Instrument},\n url = {http://www.nime.org/proceedings/2013/nime2013_220.pdf},\n\ + author: Tobias Grosshauser and Gerhard Tröster + bibtex: "@inproceedings{Grosshauser2013,\n abstract = {Several new technologies\ + \ to capture motion, gesture and forces for musical instrument players' analyses\ + \ have been developed in the last years. In research and for augmented instruments\ + \ one parameter is underrepresented so far. It is finger position and pressure\ + \ measurement, applied by the musician while playing the musical instrument. In\ + \ this paper we show a flexible linear-potentiometer and forcesensitive-resistor\ + \ (FSR) based solution for position, pressure and force sensing between the contact\ + \ point of the fingers and the musical instrument. A flexible matrix printed circuit\ + \ board (PCB) is fixed on a piano key. We further introduce linear potentiometer\ + \ based left hand finger position sensing for string instruments, integrated into\ + \ a violin and a guitar finger board. Several calibration and measurement scenarios\ + \ are shown. The violin sensor was evaluated with 13 music students regarding\ + \ playability and robustness of the system. Main focus was a the integration of\ + \ the sensors into these two traditional musical instruments as unobtrusively\ + \ as possible to keep natural haptic playing sensation. The musicians playing\ + \ the violin in different performance situations stated good playability and no\ + \ differences in the haptic sensation while playing. The piano sensor is rated,\ + \ due to interviews after testing it in a conventional keyboard quite unobtrusive,\ + \ too, but still evokes a different haptic sensation.},\n address = {Daejeon,\ + \ Republic of Korea},\n author = {Tobias Grosshauser and Gerhard Tr{\\''o}ster},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178538},\n issn = {2220-4806},\n\ + \ keywords = {Sensor, Piano, Violin, Guitar, Position, Pressure, Keyboard},\n\ + \ month = {May},\n pages = {479--484},\n publisher = {Graduate School of Culture\ + \ Technology, KAIST},\n title = {Finger Position and Pressure Sensing Techniques\ + \ for String and Keyboard Instruments},\n url = {http://www.nime.org/proceedings/2013/nime2013_286.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178566 + doi: 10.5281/zenodo.1178538 issn: 2220-4806 - keywords: 'Digital musical instruments, augmented multi-touch, hierarchical live - looping, interaction techniques, evaluation methodology' + keywords: 'Sensor, Piano, Violin, Guitar, Position, Pressure, Keyboard' month: May - pages: 499--502 + pages: 479--484 publisher: 'Graduate School of Culture Technology, KAIST' - title: A Drawing-Based Digital Music Instrument - url: http://www.nime.org/proceedings/2013/nime2013_220.pdf + title: Finger Position and Pressure Sensing Techniques for String and Keyboard Instruments + url: http://www.nime.org/proceedings/2013/nime2013_286.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Sarwate2013 - abstract: 'The Variator is a compositional assistance tool that allows users to - quicklyproduce and experiment with variations on musical objects, such as chords,melodies, - and chord progressions. The transformations performed by the Variatorcan range - from standard counterpoint transformations (inversion, retrograde,transposition) - to more complicated custom transformations, and the system isbuilt to encourage - the writing of custom transformations.This paper explores the design decisions - involved in creating a compositionalassistance tool, describes the Variator interface - and a preliminary set ofimplemented transformation functions, analyzes the results - of the evaluationsof a prototype system, and lays out future plans for expanding - upon thatsystem, both as a stand-alone application and as the basis for an opensource/collaborative - community where users can implement and share their owntransformation functions.' + ID: Allison2013 + abstract: 'Distributed performance systems present many challenges to the artist + inmanaging performance information, distribution and coordination of interface + tomany users, and cross platform support to provide a reasonable level ofinteraction + to the widest possible user base.Now that many features of HTML 5 are implemented, + powerful browser basedinterfaces can be utilized for distribution across a variety + of static andmobile devices. The author proposes leveraging the power of a web + applicationto handle distribution of user interfaces and passing interactions + via OSC toand from realtime audio/video processing software. Interfaces developed + in thisfashion can reach potential performers by distributing a unique user interfaceto + any device with a browser anywhere in the world.' address: 'Daejeon, Republic of Korea' - author: Avneesh Sarwate and Rebecca Fiebrink - bibtex: "@inproceedings{Sarwate2013,\n abstract = {The Variator is a compositional\ - \ assistance tool that allows users to quicklyproduce and experiment with variations\ - \ on musical objects, such as chords,melodies, and chord progressions. The transformations\ - \ performed by the Variatorcan range from standard counterpoint transformations\ - \ (inversion, retrograde,transposition) to more complicated custom transformations,\ - \ and the system isbuilt to encourage the writing of custom transformations.This\ - \ paper explores the design decisions involved in creating a compositionalassistance\ - \ tool, describes the Variator interface and a preliminary set ofimplemented transformation\ - \ functions, analyzes the results of the evaluationsof a prototype system, and\ - \ lays out future plans for expanding upon thatsystem, both as a stand-alone application\ - \ and as the basis for an opensource/collaborative community where users can implement\ - \ and share their owntransformation functions.},\n address = {Daejeon, Republic\ - \ of Korea},\n author = {Avneesh Sarwate and Rebecca Fiebrink},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178654},\n issn = {2220-4806},\n keywords = {Composition\ - \ assistance tool, computer-aided composition, social composition},\n month =\ - \ {May},\n pages = {279--282},\n publisher = {Graduate School of Culture Technology,\ - \ KAIST},\n title = {Variator: A Creativity Support Tool for Music Composition},\n\ - \ url = {http://www.nime.org/proceedings/2013/nime2013_224.pdf},\n year = {2013}\n\ - }\n" + author: Jesse Allison and Yemin Oh and Benjamin Taylor + bibtex: "@inproceedings{Allison2013,\n abstract = {Distributed performance systems\ + \ present many challenges to the artist inmanaging performance information, distribution\ + \ and coordination of interface tomany users, and cross platform support to provide\ + \ a reasonable level ofinteraction to the widest possible user base.Now that many\ + \ features of HTML 5 are implemented, powerful browser basedinterfaces can be\ + \ utilized for distribution across a variety of static andmobile devices. The\ + \ author proposes leveraging the power of a web applicationto handle distribution\ + \ of user interfaces and passing interactions via OSC toand from realtime audio/video\ + \ processing software. Interfaces developed in thisfashion can reach potential\ + \ performers by distributing a unique user interfaceto any device with a browser\ + \ anywhere in the world.},\n address = {Daejeon, Republic of Korea},\n author\ + \ = {Jesse Allison and Yemin Oh and Benjamin Taylor},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178461},\n issn = {2220-4806},\n keywords = {NIME, distributed\ + \ performance systems, Ruby on Rails, collaborative performance, distributed instruments,\ + \ distributed interface, HTML5, browser based interface},\n month = {May},\n pages\ + \ = {1--6},\n publisher = {Graduate School of Culture Technology, KAIST},\n title\ + \ = {NEXUS: Collaborative Performance for the Masses, Handling Instrument Interface\ + \ Distribution through the Web},\n url = {http://www.nime.org/proceedings/2013/nime2013_287.pdf},\n\ + \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178654 + doi: 10.5281/zenodo.1178461 issn: 2220-4806 - keywords: 'Composition assistance tool, computer-aided composition, social composition' + keywords: 'NIME, distributed performance systems, Ruby on Rails, collaborative performance, + distributed instruments, distributed interface, HTML5, browser based interface' month: May - pages: 279--282 + pages: 1--6 publisher: 'Graduate School of Culture Technology, KAIST' - title: 'Variator: A Creativity Support Tool for Music Composition' - url: http://www.nime.org/proceedings/2013/nime2013_224.pdf + title: 'NEXUS: Collaborative Performance for the Masses, Handling Instrument Interface + Distribution through the Web' + url: http://www.nime.org/proceedings/2013/nime2013_287.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Grierson2013 - abstract: 'NoiseBear is a wireless malleable controller designed for, and in participationwith, - physically and cognitively disabled children. The aim of the project wasto produce - a musical controller that was robust, and flexible enough to be usedin a wide - range of interactive scenarios in participatory design workshops. NoiseBear demonstrates - an open ended system for designing wireless malleablecontrollers in different - shapes. It uses pressure sensitive material made fromconductive thread and polyester - cushion stuffing, to give the feel of a softtoy. The sensor networks with other - devices using the Bluetooth Low Energyprotocol, running on a BlueGiga BLE112 chip. - This contains an embedded 8051processor which manages the sensor. NoiseBear has - undergone an initialformative evaluation in a workshop session with four autistic - children, andcontinues to evolve in series of participatory design workshops. - The evaluationshowed that controller could be engaging for the children to use, - andhighlighted some technical limitations of the design. Solutions to theselimitations - are discussed, along with plans for future design iterations.' + ID: Baldan2013 + abstract: 'This paper presents an audio-based tennis simulation game for mobile + devices, which uses motion input and non-verbal audio feedback as exclusive means + of interaction. Players have to listen carefully to the provided auditory clues, + like racquet hits and ball bounces, rhythmically synchronizing their movements + in order to keep the ball into play. The device can be swung freely and act as + a full-fledged motionbased controller, as the game does not rely at all on visual + feedback and the device display can thus be ignored. The game aims to be entertaining + but also effective for educational purposes, such as ear training or improvement + of the sense of timing, and enjoyable both by visually-impaired and sighted users.' address: 'Daejeon, Republic of Korea' - author: Mick Grierson and Chris Kiefer - bibtex: "@inproceedings{Grierson2013,\n abstract = {NoiseBear is a wireless malleable\ - \ controller designed for, and in participationwith, physically and cognitively\ - \ disabled children. The aim of the project wasto produce a musical controller\ - \ that was robust, and flexible enough to be usedin a wide range of interactive\ - \ scenarios in participatory design workshops. NoiseBear demonstrates an open\ - \ ended system for designing wireless malleablecontrollers in different shapes.\ - \ It uses pressure sensitive material made fromconductive thread and polyester\ - \ cushion stuffing, to give the feel of a softtoy. The sensor networks with other\ - \ devices using the Bluetooth Low Energyprotocol, running on a BlueGiga BLE112\ - \ chip. This contains an embedded 8051processor which manages the sensor. NoiseBear\ - \ has undergone an initialformative evaluation in a workshop session with four\ - \ autistic children, andcontinues to evolve in series of participatory design\ - \ workshops. The evaluationshowed that controller could be engaging for the children\ - \ to use, andhighlighted some technical limitations of the design. Solutions to\ - \ theselimitations are discussed, along with plans for future design iterations.},\n\ - \ address = {Daejeon, Republic of Korea},\n author = {Mick Grierson and Chris\ - \ Kiefer},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1178536},\n issn = {2220-4806},\n\ - \ keywords = {malleable controllers, assistive technology, multiparametric mapping},\n\ - \ month = {May},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ - \ title = {NoiseBear: A Malleable Wireless Controller Designed In Participation\ - \ with Disabled Children},\n url = {http://www.nime.org/proceedings/2013/nime2013_227.pdf},\n\ + author: Stefano Baldan and Amalia De Götzen and Stefania Serafin + bibtex: "@inproceedings{Baldan2013,\n abstract = {This paper presents an audio-based\ + \ tennis simulation game for mobile devices, which uses motion input and non-verbal\ + \ audio feedback as exclusive means of interaction. Players have to listen carefully\ + \ to the provided auditory clues, like racquet hits and ball bounces, rhythmically\ + \ synchronizing their movements in order to keep the ball into play. The device\ + \ can be swung freely and act as a full-fledged motionbased controller, as the\ + \ game does not rely at all on visual feedback and the device display can thus\ + \ be ignored. The game aims to be entertaining but also effective for educational\ + \ purposes, such as ear training or improvement of the sense of timing, and enjoyable\ + \ both by visually-impaired and sighted users.},\n address = {Daejeon, Republic\ + \ of Korea},\n author = {Stefano Baldan and Amalia De G{\\''o}tzen and Stefania\ + \ Serafin},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178470},\n issn\ + \ = {2220-4806},\n keywords = {Audio game, mobile devices, sonic interaction design,\ + \ rhythmic interaction, motion-based},\n month = {May},\n pages = {200--201},\n\ + \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {Sonic\ + \ Tennis: a rhythmic interaction game for mobile devices},\n url = {http://www.nime.org/proceedings/2013/nime2013_288.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178536 + doi: 10.5281/zenodo.1178470 issn: 2220-4806 - keywords: 'malleable controllers, assistive technology, multiparametric mapping' + keywords: 'Audio game, mobile devices, sonic interaction design, rhythmic interaction, + motion-based' month: May + pages: 200--201 publisher: 'Graduate School of Culture Technology, KAIST' - title: 'NoiseBear: A Malleable Wireless Controller Designed In Participation with - Disabled Children' - url: http://www.nime.org/proceedings/2013/nime2013_227.pdf + title: 'Sonic Tennis: a rhythmic interaction game for mobile devices' + url: http://www.nime.org/proceedings/2013/nime2013_288.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: jo2013 - abstract: 'In this paper, we present a method to produce analog records with standardvector - graphics software (i.e. Adobe Illustrator) and two different types ofcutting machines: - laser cutter, and paper cutter. The method enables us toengrave wave forms on - a surface of diverse materials such as paper, wood,acrylic, and leather without - or with prior acoustic information (i.e. digitalaudio data). The results could - be played with standard record players. Wepresent the method with its technical - specification and explain our initialtrials with two performances and a workshop. - The work examines the role ofmusical reproduction in the age of personal fabrication. - ---p.s. If it''s possible, we also would like to submit the work for performanceand - workshop.A video of performance < it contains information on the authorshttp://www.youtube.com/watch?v=vbCLe06P7j0' + ID: Lee2013 + abstract: 'This work aims at a music piece for large-scale audience participation + usingmobile phones as musical instruments at a music performance. Utilizing theubiquity + of smart phones, we attempted to accomplish audience engagement bycrafting an + accessible musical instrument with which audience can be a part ofthe performance. + Drawing lessons learnt from the creative works of mobilemusic, audience participation, + and the networked instrument a mobile musicalinstrument application is developed + so that audience can download the app atthe concert, play the instrument instantly, + interact with other audiencemembers, and contribute to the music by sound generated + from their mobilephones. The post-survey results indicate that the instrument + was easy to use,and the audience felt connected to the music and other musicians.' address: 'Daejeon, Republic of Korea' - author: kazuhiro jo - bibtex: "@inproceedings{jo2013,\n abstract = {In this paper, we present a method\ - \ to produce analog records with standardvector graphics software (i.e. Adobe\ - \ Illustrator) and two different types ofcutting machines: laser cutter, and paper\ - \ cutter. The method enables us toengrave wave forms on a surface of diverse materials\ - \ such as paper, wood,acrylic, and leather without or with prior acoustic information\ - \ (i.e. digitalaudio data). The results could be played with standard record players.\ - \ Wepresent the method with its technical specification and explain our initialtrials\ - \ with two performances and a workshop. The work examines the role ofmusical reproduction\ - \ in the age of personal fabrication. ---p.s. If it's possible, we also would\ - \ like to submit the work for performanceand workshop.A video of performance <\ - \ it contains information on the authorshttp://www.youtube.com/watch?v=vbCLe06P7j0},\n\ - \ address = {Daejeon, Republic of Korea},\n author = {kazuhiro jo},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1178578},\n issn = {2220-4806},\n keywords\ - \ = {Analog Record, Personal Fabrication, Media Archaeology},\n month = {May},\n\ - \ pages = {283--286},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ - \ title = {cutting record --- a record without (or with) prior acoustic information},\n\ - \ url = {http://www.nime.org/proceedings/2013/nime2013_228.pdf},\n year = {2013}\n\ + author: Sang Won Lee and Jason Freeman + bibtex: "@inproceedings{Lee2013,\n abstract = {This work aims at a music piece for\ + \ large-scale audience participation usingmobile phones as musical instruments\ + \ at a music performance. Utilizing theubiquity of smart phones, we attempted\ + \ to accomplish audience engagement bycrafting an accessible musical instrument\ + \ with which audience can be a part ofthe performance. Drawing lessons learnt\ + \ from the creative works of mobilemusic, audience participation, and the networked\ + \ instrument a mobile musicalinstrument application is developed so that audience\ + \ can download the app atthe concert, play the instrument instantly, interact\ + \ with other audiencemembers, and contribute to the music by sound generated from\ + \ their mobilephones. The post-survey results indicate that the instrument was\ + \ easy to use,and the audience felt connected to the music and other musicians.},\n\ + \ address = {Daejeon, Republic of Korea},\n author = {Sang Won Lee and Jason Freeman},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178594},\n issn = {2220-4806},\n\ + \ keywords = {mobile music, audience participation, networked instrument},\n month\ + \ = {May},\n pages = {450--455},\n publisher = {Graduate School of Culture Technology,\ + \ KAIST},\n title = {echobo : Audience Participation Using The Mobile Music Instrument},\n\ + \ url = {http://www.nime.org/proceedings/2013/nime2013_291.pdf},\n year = {2013}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178578 + doi: 10.5281/zenodo.1178594 issn: 2220-4806 - keywords: 'Analog Record, Personal Fabrication, Media Archaeology' + keywords: 'mobile music, audience participation, networked instrument' month: May - pages: 283--286 + pages: 450--455 publisher: 'Graduate School of Culture Technology, KAIST' - title: cutting record --- a record without (or with) prior acoustic information - url: http://www.nime.org/proceedings/2013/nime2013_228.pdf + title: 'echobo : Audience Participation Using The Mobile Music Instrument' + url: http://www.nime.org/proceedings/2013/nime2013_291.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Klugel2013 - abstract: 'Controlling the timbre generated by an audio synthesizerin a goal-oriented - way requires a profound understandingof the synthesizer''s manifold structural - parameters. Especially shapingtimbre expressively to communicate emotional affect - requires expertise.Therefore, novices in particular may not be able to adequately - control timbrein viewof articulating the wealth of affects musically. In this - context, the focus ofthis paper is the development of a model that can represent - a relationshipbetween timbre and an expected emotional affect . The results of - the evaluationof the presented model are encouraging which supports its use in - steering oraugmenting the control of the audio synthesis. We explicitly envision - thispaper as a contribution to the field of Synthesis by Analysis in the broadersense, - albeit being potentially suitable to other related domains.' + ID: Trento2013 + abstract: 'This paper describes the development of a prototype of a sonic toy forpre-scholar + kids. The device, which is a modified version of a footballratchet, is based on + the spinning gesture and it allows to experience fourdifferent types of auditory + feedback. These algorithms let a kid play withmusic rhythm, generate a continuous + sound feedback and control the pitch of apiece of music. An evaluation test of + the device has been performed withfourteen kids in a kindergarten. Results and + observations showed that kidspreferred the algorithms based on the exploration + of the music rhythm and onpitch shifting.' address: 'Daejeon, Republic of Korea' - author: Niklas Klügel and Georg Groh - bibtex: "@inproceedings{Klugel2013,\n abstract = {Controlling the timbre generated\ - \ by an audio synthesizerin a goal-oriented way requires a profound understandingof\ - \ the synthesizer's manifold structural parameters. Especially shapingtimbre expressively\ - \ to communicate emotional affect requires expertise.Therefore, novices in particular\ - \ may not be able to adequately control timbrein viewof articulating the wealth\ - \ of affects musically. In this context, the focus ofthis paper is the development\ - \ of a model that can represent a relationshipbetween timbre and an expected emotional\ - \ affect . The results of the evaluationof the presented model are encouraging\ - \ which supports its use in steering oraugmenting the control of the audio synthesis.\ - \ We explicitly envision thispaper as a contribution to the field of Synthesis\ - \ by Analysis in the broadersense, albeit being potentially suitable to other\ - \ related domains.},\n address = {Daejeon, Republic of Korea},\n author = {Niklas\ - \ Kl{\\\"u}gel and Georg Groh},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178586},\n\ - \ issn = {2220-4806},\n keywords = {Emotional affect,Timbre, Machine Learning,\ - \ Deep Belief Networks, Analysis by Synthesis},\n month = {May},\n pages = {525--530},\n\ - \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {Towards\ - \ Mapping Timbre to Emotional Affect},\n url = {http://www.nime.org/proceedings/2013/nime2013_23.pdf},\n\ + author: Stefano Trento and Stefania Serafin + bibtex: "@inproceedings{Trento2013,\n abstract = {This paper describes the development\ + \ of a prototype of a sonic toy forpre-scholar kids. The device, which is a modified\ + \ version of a footballratchet, is based on the spinning gesture and it allows\ + \ to experience fourdifferent types of auditory feedback. These algorithms let\ + \ a kid play withmusic rhythm, generate a continuous sound feedback and control\ + \ the pitch of apiece of music. An evaluation test of the device has been performed\ + \ withfourteen kids in a kindergarten. Results and observations showed that kidspreferred\ + \ the algorithms based on the exploration of the music rhythm and onpitch shifting.},\n\ + \ address = {Daejeon, Republic of Korea},\n author = {Stefano Trento and Stefania\ + \ Serafin},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178682},\n issn\ + \ = {2220-4806},\n keywords = {Sonic toy, children, auditory feedback.},\n month\ + \ = {May},\n pages = {456--459},\n publisher = {Graduate School of Culture Technology,\ + \ KAIST},\n title = {Flag beat: a novel interface for rhythmic musical expression\ + \ for kids},\n url = {http://www.nime.org/proceedings/2013/nime2013_295.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178586 + doi: 10.5281/zenodo.1178682 issn: 2220-4806 - keywords: 'Emotional affect,Timbre, Machine Learning, Deep Belief Networks, Analysis - by Synthesis' + keywords: 'Sonic toy, children, auditory feedback.' month: May - pages: 525--530 + pages: 456--459 publisher: 'Graduate School of Culture Technology, KAIST' - title: Towards Mapping Timbre to Emotional Affect - url: http://www.nime.org/proceedings/2013/nime2013_23.pdf + title: 'Flag beat: a novel interface for rhythmic musical expression for kids' + url: http://www.nime.org/proceedings/2013/nime2013_295.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Greenlee2013 - abstract: 'In the design of recent systems, I have advanced techniques that positiongraphic - synthesis methods in the context of solo, improvisational performance.Here, the - primary interfaces for musical action are prepared works on paper,scanned by digital - video cameras which in turn pass image data on to softwarefor analysis and interpretation - as sound synthesis and signal processingprocedures. The focus of this paper is - on one of these techniques, a process Idescribe as graphic waveshaping. A discussion - of graphic waveshaping in basicform and as utilized in my performance work, (title - omitted), is offered. Inthe latter case, the performer''s objective is to guide - the interpretation ofimages as sound, constantly tuning and retuning the conversion - while selectingand scanning images from a large catalog. Due to the erratic nature - of thesystem and the precondition that image to sound relationships are unfixed, - theperformance situation is replete with the discovery of new sounds and thecircumstances - that bring them into play. Graphic waveshaping may be understood as non-linear - distortion synthesis withtime-varying transfer functions stemming from visual - scan lines. As a form ofgraphic synthesis, visual images function as motivations - for sound generation.There is a strategy applied for creating one out of the other. - However, counterto compositionally oriented forms of graphic synthesis where one - may assignimage characteristics to musical parameters such as pitches, durations,dynamics, - etc., graphic waveshaping is foremost a processing technique, as itdistorts incoming - signals according to graphically derived transfer functions.As such, it may also - be understood as an audio effect; one that in myimplementations is particularly - feedback dependent, oriented towards shapingthe erratic behavior of synthesis - patches written in Max/MSP/Jitter. Used inthis manner, graphic waveshaping elicits - an emergent system behaviorconditioned by visual features.' + ID: Place2013 + abstract: 'The AlphaSphere is an electronic musical instrument featuring a series + oftactile, pressure sensitive touch pads arranged in a spherical form. It isdesigned + to offer a new playing style, while allowing for the expressivereal-time modulation + of sound available in electronic-based music. It is alsodesigned to be programmable, + enabling the flexibility to map a series ofdifferent notational arrangements to + the pad-based interface. The AlphaSphere functions as an HID, MIDI and OSC device, + which connects to acomputer and/or independent MIDI device, and its control messages + can be mappedthrough the AlphaLive software. Our primary motivations for creating + theAlphaSphere are to design an expressive music interface which can exploit thesound + palate of synthesizers in a design which allows for the mapping ofnotational arrangements.' address: 'Daejeon, Republic of Korea' - author: Shawn Greenlee - bibtex: "@inproceedings{Greenlee2013,\n abstract = {In the design of recent systems,\ - \ I have advanced techniques that positiongraphic synthesis methods in the context\ - \ of solo, improvisational performance.Here, the primary interfaces for musical\ - \ action are prepared works on paper,scanned by digital video cameras which in\ - \ turn pass image data on to softwarefor analysis and interpretation as sound\ - \ synthesis and signal processingprocedures. The focus of this paper is on one\ - \ of these techniques, a process Idescribe as graphic waveshaping. A discussion\ - \ of graphic waveshaping in basicform and as utilized in my performance work,\ - \ (title omitted), is offered. Inthe latter case, the performer's objective is\ - \ to guide the interpretation ofimages as sound, constantly tuning and retuning\ - \ the conversion while selectingand scanning images from a large catalog. Due\ - \ to the erratic nature of thesystem and the precondition that image to sound\ - \ relationships are unfixed, theperformance situation is replete with the discovery\ - \ of new sounds and thecircumstances that bring them into play. Graphic waveshaping\ - \ may be understood as non-linear distortion synthesis withtime-varying transfer\ - \ functions stemming from visual scan lines. As a form ofgraphic synthesis, visual\ - \ images function as motivations for sound generation.There is a strategy applied\ - \ for creating one out of the other. However, counterto compositionally oriented\ - \ forms of graphic synthesis where one may assignimage characteristics to musical\ - \ parameters such as pitches, durations,dynamics, etc., graphic waveshaping is\ - \ foremost a processing technique, as itdistorts incoming signals according to\ - \ graphically derived transfer functions.As such, it may also be understood as\ - \ an audio effect; one that in myimplementations is particularly feedback dependent,\ - \ oriented towards shapingthe erratic behavior of synthesis patches written in\ - \ Max/MSP/Jitter. Used inthis manner, graphic waveshaping elicits an emergent\ - \ system behaviorconditioned by visual features.},\n address = {Daejeon, Republic\ - \ of Korea},\n author = {Shawn Greenlee},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178534},\n\ - \ issn = {2220-4806},\n keywords = {Graphic waveshaping, graphic synthesis, waveshaping\ - \ synthesis, graphic sound, drawn sound},\n month = {May},\n pages = {287--290},\n\ - \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {Graphic\ - \ Waveshaping},\n url = {http://www.nime.org/proceedings/2013/nime2013_232.pdf},\n\ + author: Adam Place and Liam Lacey and Thomas Mitchell + bibtex: "@inproceedings{Place2013,\n abstract = {The AlphaSphere is an electronic\ + \ musical instrument featuring a series oftactile, pressure sensitive touch pads\ + \ arranged in a spherical form. It isdesigned to offer a new playing style, while\ + \ allowing for the expressivereal-time modulation of sound available in electronic-based\ + \ music. It is alsodesigned to be programmable, enabling the flexibility to map\ + \ a series ofdifferent notational arrangements to the pad-based interface. The\ + \ AlphaSphere functions as an HID, MIDI and OSC device, which connects to acomputer\ + \ and/or independent MIDI device, and its control messages can be mappedthrough\ + \ the AlphaLive software. Our primary motivations for creating theAlphaSphere\ + \ are to design an expressive music interface which can exploit thesound palate\ + \ of synthesizers in a design which allows for the mapping ofnotational arrangements.},\n\ + \ address = {Daejeon, Republic of Korea},\n author = {Adam Place and Liam Lacey\ + \ and Thomas Mitchell},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178642},\n\ + \ issn = {2220-4806},\n keywords = {AlphaSphere, MIDI, HID, polyphonic aftertouch,\ + \ open source},\n month = {May},\n pages = {491--492},\n publisher = {Graduate\ + \ School of Culture Technology, KAIST},\n title = {AlphaSphere},\n url = {http://www.nime.org/proceedings/2013/nime2013_300.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178534 + doi: 10.5281/zenodo.1178642 issn: 2220-4806 - keywords: 'Graphic waveshaping, graphic synthesis, waveshaping synthesis, graphic - sound, drawn sound' + keywords: 'AlphaSphere, MIDI, HID, polyphonic aftertouch, open source' month: May - pages: 287--290 + pages: 491--492 publisher: 'Graduate School of Culture Technology, KAIST' - title: Graphic Waveshaping - url: http://www.nime.org/proceedings/2013/nime2013_232.pdf + title: AlphaSphere + url: http://www.nime.org/proceedings/2013/nime2013_300.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Park2013c - abstract: 'In this paper we present a highly expressive, robust, and easy-to-build - systemthat provides force-feedback interaction for mobile computing devices (MCD).Our - system, which we call Fortissimo (ff), utilizes standard built-inaccelerometer - measurements in conjunction with generic foam padding that can beeasily placed - under a device to render an expressive force-feedback performancesetup. Fortissimo - allows for musically expressive user-interaction with addedforce-feedback which - is integral for any musical controller --a feature that isabsent for touchscreen-centric - MCDs. This paper details ff core concepts,hardware and software designs, and expressivity - of musical features.' + ID: Roberts2013 + abstract: 'We present research that extends the scope of the mobile application + Control, aprototyping environment for defining multimodal interfaces that controlreal-time + artistic and musical performances. Control allows users to rapidlycreate interfaces + employing a variety of modalities, including: speechrecognition, computer vision, + musical feature extraction, touchscreen widgets,and inertial sensor data. Information + from these modalities can be transmittedwirelessly to remote applications. Interfaces + are declared using JSON and canbe extended with JavaScript to add complex behaviors, + including the concurrentfusion of multimodal signals. By simplifying the creation + of interfaces viathese simple markup files, Control allows musicians and artists + to make novelapplications that use and combine both discrete and continuous data + from thewide range of sensors available on commodity mobile devices.' address: 'Daejeon, Republic of Korea' - author: Tae Hong Park and Oriol Nieto - bibtex: "@inproceedings{Park2013c,\n abstract = {In this paper we present a highly\ - \ expressive, robust, and easy-to-build systemthat provides force-feedback interaction\ - \ for mobile computing devices (MCD).Our system, which we call Fortissimo (ff),\ - \ utilizes standard built-inaccelerometer measurements in conjunction with generic\ - \ foam padding that can beeasily placed under a device to render an expressive\ - \ force-feedback performancesetup. Fortissimo allows for musically expressive\ - \ user-interaction with addedforce-feedback which is integral for any musical\ - \ controller --a feature that isabsent for touchscreen-centric MCDs. This paper\ - \ details ff core concepts,hardware and software designs, and expressivity of\ - \ musical features.},\n address = {Daejeon, Republic of Korea},\n author = {Tae\ - \ Hong Park and Oriol Nieto},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178638},\n\ - \ issn = {2220-4806},\n keywords = {force-feedback, expression, mobile computing\ - \ devices, mobile music},\n month = {May},\n pages = {291--294},\n publisher =\ - \ {Graduate School of Culture Technology, KAIST},\n title = {Fortissimo: Force-Feedback\ - \ for Mobile Devices},\n url = {http://www.nime.org/proceedings/2013/nime2013_233.pdf},\n\ - \ year = {2013}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1178638 - issn: 2220-4806 - keywords: 'force-feedback, expression, mobile computing devices, mobile music' - month: May - pages: 291--294 - publisher: 'Graduate School of Culture Technology, KAIST' - title: 'Fortissimo: Force-Feedback for Mobile Devices' - url: http://www.nime.org/proceedings/2013/nime2013_233.pdf - year: 2013 - - -- ENTRYTYPE: inproceedings - ID: Scott2013 - abstract: 'Digital music technology has transformed the listener experience and - creatednew avenues for creative interaction and expression within the musical - domain.The barrier to music creation, distribution and collaboration has been - reduced,leading to entirely new ecosystems of musical experience. Software editingtools - such as digital audio workstations (DAW) allow nearly limitlessmanipulation of - source audio into new sonic elements and textures and havepromoted a culture of - recycling and repurposing of content via mashups andremixes. We present a multi-touch - application that allows a user to customizetheir listening experience by blending - various versions of a song in real time.' - address: 'Daejeon, Republic of Korea' - author: Jeffrey Scott and Mickey Moorhead and Justin Chapman and Ryan Schwabe and - Youngmoo E. Kim - bibtex: "@inproceedings{Scott2013,\n abstract = {Digital music technology has transformed\ - \ the listener experience and creatednew avenues for creative interaction and\ - \ expression within the musical domain.The barrier to music creation, distribution\ - \ and collaboration has been reduced,leading to entirely new ecosystems of musical\ - \ experience. Software editingtools such as digital audio workstations (DAW) allow\ - \ nearly limitlessmanipulation of source audio into new sonic elements and textures\ - \ and havepromoted a culture of recycling and repurposing of content via mashups\ - \ andremixes. We present a multi-touch application that allows a user to customizetheir\ - \ listening experience by blending various versions of a song in real time.},\n\ - \ address = {Daejeon, Republic of Korea},\n author = {Jeffrey Scott and Mickey\ - \ Moorhead and Justin Chapman and Ryan Schwabe and Youngmoo E. Kim},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1178660},\n issn = {2220-4806},\n keywords\ - \ = {Multi-track, Multi-touch, Mobile devices, Interactive media},\n month = {May},\n\ - \ pages = {417--420},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ - \ title = {Personalized Song Interaction Using a Multi Touch Interface},\n url\ - \ = {http://www.nime.org/proceedings/2013/nime2013_234.pdf},\n year = {2013}\n\ + author: Charles Roberts and Angus Forbes and Tobias Höllerer + bibtex: "@inproceedings{Roberts2013,\n abstract = {We present research that extends\ + \ the scope of the mobile application Control, aprototyping environment for defining\ + \ multimodal interfaces that controlreal-time artistic and musical performances.\ + \ Control allows users to rapidlycreate interfaces employing a variety of modalities,\ + \ including: speechrecognition, computer vision, musical feature extraction, touchscreen\ + \ widgets,and inertial sensor data. Information from these modalities can be transmittedwirelessly\ + \ to remote applications. Interfaces are declared using JSON and canbe extended\ + \ with JavaScript to add complex behaviors, including the concurrentfusion of\ + \ multimodal signals. By simplifying the creation of interfaces viathese simple\ + \ markup files, Control allows musicians and artists to make novelapplications\ + \ that use and combine both discrete and continuous data from thewide range of\ + \ sensors available on commodity mobile devices.},\n address = {Daejeon, Republic\ + \ of Korea},\n author = {Charles Roberts and Angus Forbes and Tobias H{\\''o}llerer},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178646},\n issn = {2220-4806},\n\ + \ keywords = {Music, mobile, multimodal, interaction},\n month = {May},\n pages\ + \ = {102--105},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ + \ title = {Enabling Multimodal Mobile Interfaces for Musical Performance},\n url\ + \ = {http://www.nime.org/proceedings/2013/nime2013_303.pdf},\n year = {2013}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178660 + doi: 10.5281/zenodo.1178646 issn: 2220-4806 - keywords: 'Multi-track, Multi-touch, Mobile devices, Interactive media' + keywords: 'Music, mobile, multimodal, interaction' month: May - pages: 417--420 + pages: 102--105 publisher: 'Graduate School of Culture Technology, KAIST' - title: Personalized Song Interaction Using a Multi Touch Interface - url: http://www.nime.org/proceedings/2013/nime2013_234.pdf + title: Enabling Multimodal Mobile Interfaces for Musical Performance + url: http://www.nime.org/proceedings/2013/nime2013_303.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Batula2013 - abstract: "We present a system which allows an adult-sized humanoid to determine\ - \ whetheror not it is correctly playing a pitched percussive instrument to produce\ - \ adesired sound. As hu,\nman musicians utilize sensory feedback to determine\ - \ ifthey are successfully using their instruments to generate certain pitches,robot\ - \ performers should be capable of the same feat. We present a noteclassification\ - \ algorithm that uses auditory and haptic feedback to decide if anote was well-\ - \ or poorly-struck. This system is demonstrated using Hubo, anadult-sized humanoid,\ - \ which has been enabled to actu,\nate pitched pipes usingmallets. We show that,\ - \ with this system, Hubo is able to determine whether ornot a note was played\ - \ correctly." + ID: Hadjakos2013 + abstract: Music ensembles have to synchronize themselves with a very high precision + inorder to achieve the desired musical results. For that purpose the musicians + donot only rely on their auditory perception but also perceive and interpret themovements + and gestures of their ensemble colleges. In this paper we present aKinect-based + method to analyze ensemble play based on head tracking. We discussfirst experimental + results with a violin duo performance. address: 'Daejeon, Republic of Korea' - author: Alyssa Batula and Manu Colacot and David Grunberg and Youngmoo Kim - bibtex: "@inproceedings{Batula2013,\n abstract = {We present a system which allows\ - \ an adult-sized humanoid to determine whetheror not it is correctly playing a\ - \ pitched percussive instrument to produce adesired sound. As hu,\nman musicians\ - \ utilize sensory feedback to determine ifthey are successfully using their instruments\ - \ to generate certain pitches,robot performers should be capable of the same feat.\ - \ We present a noteclassification algorithm that uses auditory and haptic feedback\ - \ to decide if anote was well- or poorly-struck. This system is demonstrated using\ - \ Hubo, anadult-sized humanoid, which has been enabled to actu,\nate pitched pipes\ - \ usingmallets. We show that, with this system, Hubo is able to determine whether\ - \ ornot a note was played correctly.},\n address = {Daejeon, Republic of Korea},\n\ - \ author = {Alyssa Batula and Manu Colacot and David Grunberg and Youngmoo Kim},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178472},\n issn = {2220-4806},\n\ - \ keywords = {Musical robots, humanoids, auditory feedback, haptic feedback},\n\ - \ month = {May},\n pages = {295--300},\n publisher = {Graduate School of Culture\ - \ Technology, KAIST},\n title = {Using Audio and Haptic Feedback to Improve Pitched\ - \ Percussive Instrument Performance in Humanoids},\n url = {http://www.nime.org/proceedings/2013/nime2013_235.pdf},\n\ - \ year = {2013}\n}\n" + author: Aristotelis Hadjakos and Tobias Grosshauser + bibtex: "@inproceedings{Hadjakos2013,\n abstract = {Music ensembles have to synchronize\ + \ themselves with a very high precision inorder to achieve the desired musical\ + \ results. For that purpose the musicians donot only rely on their auditory perception\ + \ but also perceive and interpret themovements and gestures of their ensemble\ + \ colleges. In this paper we present aKinect-based method to analyze ensemble\ + \ play based on head tracking. We discussfirst experimental results with a violin\ + \ duo performance.},\n address = {Daejeon, Republic of Korea},\n author = {Aristotelis\ + \ Hadjakos and Tobias Grosshauser},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178540},\n\ + \ issn = {2220-4806},\n keywords = {Kinect, Ensemble, Synchronization, Strings,\ + \ Functional Data Analysis, Cross-Correlogram},\n month = {May},\n pages = {106--110},\n\ + \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {Motion\ + \ and Synchronization Analysis of Musical Ensembles with the Kinect},\n url =\ + \ {http://www.nime.org/proceedings/2013/nime2013_304.pdf},\n year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178472 + doi: 10.5281/zenodo.1178540 issn: 2220-4806 - keywords: 'Musical robots, humanoids, auditory feedback, haptic feedback' + keywords: 'Kinect, Ensemble, Synchronization, Strings, Functional Data Analysis, + Cross-Correlogram' month: May - pages: 295--300 + pages: 106--110 publisher: 'Graduate School of Culture Technology, KAIST' - title: Using Audio and Haptic Feedback to Improve Pitched Percussive Instrument - Performance in Humanoids - url: http://www.nime.org/proceedings/2013/nime2013_235.pdf + title: Motion and Synchronization Analysis of Musical Ensembles with the Kinect + url: http://www.nime.org/proceedings/2013/nime2013_304.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Torresen2013 - abstract: 'A custom designed WLAN (Wireless Local Area Network) based sensor interface - ispresented in this paper. It is aimed at wirelessly interfacing a large varietyof - sensors to supplement built-in sensors in smart phones and media players.The target - application area is collection of human related motions andcondition to be applied - in musical applications. The interface is based oncommercially available units - and allows for up to nine sensors. The benefit ofusing WLAN based communication - is high data rate with low latency. Ourexperiments show that the average transmission - time is less than 2ms for asingle sensor. Further, it is operational for a whole - day without batteryrecharging.' + ID: Park2013b + abstract: 'SSN (Sound Surfing Network) is a performance system that provides a new + musicalexperience by incorporating mobile phone-based spatial sound control tocollaborative + music performance. SSN enables both the performer and theaudience to manipulate + the spatial distribution of sound using the smartphonesof the audience as distributed + speaker system. Proposing a new perspective tothe social aspect music appreciation, + SSN will provide a new possibility tomobile music performances in the context + of interactive audience collaborationas well as sound spatialization.' address: 'Daejeon, Republic of Korea' - author: Jim Torresen and Yngve Hafting and Kristian Nymoen - bibtex: "@inproceedings{Torresen2013,\n abstract = {A custom designed WLAN (Wireless\ - \ Local Area Network) based sensor interface ispresented in this paper. It is\ - \ aimed at wirelessly interfacing a large varietyof sensors to supplement built-in\ - \ sensors in smart phones and media players.The target application area is collection\ - \ of human related motions andcondition to be applied in musical applications.\ - \ The interface is based oncommercially available units and allows for up to nine\ - \ sensors. The benefit ofusing WLAN based communication is high data rate with\ - \ low latency. Ourexperiments show that the average transmission time is less\ - \ than 2ms for asingle sensor. Further, it is operational for a whole day without\ - \ batteryrecharging.},\n address = {Daejeon, Republic of Korea},\n author = {Jim\ - \ Torresen and Yngve Hafting and Kristian Nymoen},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178680},\n issn = {2220-4806},\n keywords = {wireless\ - \ communication, sensor data collection, WLAN, Arduino},\n month = {May},\n pages\ - \ = {337--340},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ - \ title = {A New Wi-Fi based Platform for Wireless Sensor Data Collection},\n\ - \ url = {http://www.nime.org/proceedings/2013/nime2013_236.pdf},\n year = {2013}\n\ - }\n" + author: Saebyul Park and Seonghoon Ban and Dae Ryong Hong and Woon Seung Yeo + bibtex: "@inproceedings{Park2013b,\n abstract = {SSN (Sound Surfing Network) is\ + \ a performance system that provides a new musicalexperience by incorporating\ + \ mobile phone-based spatial sound control tocollaborative music performance.\ + \ SSN enables both the performer and theaudience to manipulate the spatial distribution\ + \ of sound using the smartphonesof the audience as distributed speaker system.\ + \ Proposing a new perspective tothe social aspect music appreciation, SSN will\ + \ provide a new possibility tomobile music performances in the context of interactive\ + \ audience collaborationas well as sound spatialization.},\n address = {Daejeon,\ + \ Republic of Korea},\n author = {Saebyul Park and Seonghoon Ban and Dae Ryong\ + \ Hong and Woon Seung Yeo},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178636},\n\ + \ issn = {2220-4806},\n keywords = {Mobile music, smartphone, audience participation,\ + \ spatial sound control, digital performance},\n month = {May},\n pages = {111--114},\n\ + \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {Sound\ + \ Surfing Network (SSN): Mobile Phone-based Sound Spatialization with Audience\ + \ Collaboration},\n url = {http://www.nime.org/proceedings/2013/nime2013_305.pdf},\n\ + \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178680 + doi: 10.5281/zenodo.1178636 issn: 2220-4806 - keywords: 'wireless communication, sensor data collection, WLAN, Arduino' + keywords: 'Mobile music, smartphone, audience participation, spatial sound control, + digital performance' month: May - pages: 337--340 + pages: 111--114 publisher: 'Graduate School of Culture Technology, KAIST' - title: A New Wi-Fi based Platform for Wireless Sensor Data Collection - url: http://www.nime.org/proceedings/2013/nime2013_236.pdf + title: 'Sound Surfing Network (SSN): Mobile Phone-based Sound Spatialization with + Audience Collaboration' + url: http://www.nime.org/proceedings/2013/nime2013_305.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Skogstad2013 - abstract: 'In this paper we present some custom designed filters for real-time motioncapture - applications. Our target application is so-called motion controllers,i.e. systems - that interpret hand motion for musical interaction. In earlierresearch we found - effective methods to design nearly optimal filters forreal-time applications. - However, to be able to design suitable filters for ourtarget application, it is - necessary to establish the typical frequency contentof the motion capture data - we want to filter. This will again allow us todetermine a reasonable cutoff frequency - for the filters. We have thereforeconducted an experiment in which we recorded - the hand motion of 20 subjects.The frequency spectra of these data together with - a method similar to theresidual analysis method were then used to determine reasonable - cutofffrequencies. Based on this experiment, we propose three cutoff frequencies - fordifferent scenarios and filtering needs: 5, 10 and 15 Hz, which corresponds - toheavy, medium and light filtering respectively. Finally, we propose a range - ofreal-time filters applicable to motion controllers. In particular, low-passfilters - and low-pass differentiators of degrees one and two, which in ourexperience are - the most useful filters for our target application.' + ID: McGee2013 + abstract: 'VOSIS is an interactive image sonification interface that creates complexwavetables + by raster scanning greyscale image pixel data. Using a multi-touchscreen to play + image regions of unique frequency content rather than a linearscale of frequencies, + it becomes a unique performance tool for experimental andvisual music. A number + of image filters controlled by multi-touch gestures addvariation to the sound + palette. On a mobile device, parameters controlled bythe accelerometer add another + layer expressivity to the resulting audio-visualmontages.' address: 'Daejeon, Republic of Korea' - author: Ståle A. Skogstad - bibtex: "@inproceedings{Skogstad2013,\n abstract = {In this paper we present some\ - \ custom designed filters for real-time motioncapture applications. Our target\ - \ application is so-called motion controllers,i.e. systems that interpret hand\ - \ motion for musical interaction. In earlierresearch we found effective methods\ - \ to design nearly optimal filters forreal-time applications. However, to be able\ - \ to design suitable filters for ourtarget application, it is necessary to establish\ - \ the typical frequency contentof the motion capture data we want to filter. This\ - \ will again allow us todetermine a reasonable cutoff frequency for the filters.\ - \ We have thereforeconducted an experiment in which we recorded the hand motion\ - \ of 20 subjects.The frequency spectra of these data together with a method similar\ - \ to theresidual analysis method were then used to determine reasonable cutofffrequencies.\ - \ Based on this experiment, we propose three cutoff frequencies fordifferent scenarios\ - \ and filtering needs: 5, 10 and 15 Hz, which corresponds toheavy, medium and\ - \ light filtering respectively. Finally, we propose a range ofreal-time filters\ - \ applicable to motion controllers. In particular, low-passfilters and low-pass\ - \ differentiators of degrees one and two, which in ourexperience are the most\ - \ useful filters for our target application.},\n address = {Daejeon, Republic\ - \ of Korea},\n author = {St{\\aa}le A. Skogstad},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1178662},\n issn = {2220-4806},\n month = {May},\n pages =\ - \ {142--147},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ - \ title = {Filtering Motion Capture Data for Real-Time Applications},\n url =\ - \ {http://www.nime.org/proceedings/2013/nime2013_238.pdf},\n year = {2013}\n}\n" + author: Ryan McGee + bibtex: "@inproceedings{McGee2013,\n abstract = {VOSIS is an interactive image sonification\ + \ interface that creates complexwavetables by raster scanning greyscale image\ + \ pixel data. Using a multi-touchscreen to play image regions of unique frequency\ + \ content rather than a linearscale of frequencies, it becomes a unique performance\ + \ tool for experimental andvisual music. A number of image filters controlled\ + \ by multi-touch gestures addvariation to the sound palette. On a mobile device,\ + \ parameters controlled bythe accelerometer add another layer expressivity to\ + \ the resulting audio-visualmontages.},\n address = {Daejeon, Republic of Korea},\n\ + \ author = {Ryan McGee},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178604},\n\ + \ issn = {2220-4806},\n keywords = {image sonification, multi-touch, visual music},\n\ + \ month = {May},\n pages = {460--463},\n publisher = {Graduate School of Culture\ + \ Technology, KAIST},\n title = {VOSIS: a Multi-touch Image Sonification Interface},\n\ + \ url = {http://www.nime.org/proceedings/2013/nime2013_310.pdf},\n year = {2013}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178662 + doi: 10.5281/zenodo.1178604 issn: 2220-4806 + keywords: 'image sonification, multi-touch, visual music' month: May - pages: 142--147 + pages: 460--463 publisher: 'Graduate School of Culture Technology, KAIST' - title: Filtering Motion Capture Data for Real-Time Applications - url: http://www.nime.org/proceedings/2013/nime2013_238.pdf + title: 'VOSIS: a Multi-touch Image Sonification Interface' + url: http://www.nime.org/proceedings/2013/nime2013_310.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Kleinberger2013 - abstract: 'PAMDI is an electromechanical music controller based on an expansion - of the common metal music boxes. Our system enables an augmentation of the musical - properties by adding different musical channels triggered and parameterized by - natural gestures during the ``performance''''. All the channels are generated - form the original melody recorded once at the start. To capture and treat the - different expressive parameters both natural and intentional, our platform is - composed of a metallic structure supporting sensors. The measured values are processed - by an arduino system that finallysends the results by serial communication to - a Max/MSP patch for signaltreatment and modification. We will explain how our - embedded instrument aims to bring a certain awareness to the player of the mapping - and the potential musical freedom of the very specific -- and not that much automatic - --- instrument that is a music box. We will also address how our design tackles - the different questions of mapping, ergonomics and expressiveness while choosing - the controller modalities and the parameters to be sensed.' + ID: Hoste2013 + abstract: 'Nowadays many music artists rely on visualisations and light shows to + enhanceand augment their live performances. However, the visualisation and triggeringof + lights is normally scripted in advance and synchronised with the concert,severely + limiting the artist''s freedom for improvisation, expression and ad-hocadaptation + of their show. These scripts result in performances where thetechnology enforces + the artist and their music to stay in synchronisation withthe pre-programmed environment. + We argue that these limitations can be overcomebased on emerging non-invasive + tracking technologies in combination with anadvanced gesture recognition engine.We + present a solution that uses explicit gestures and implicit dance moves tocontrol + the visual augmentation of a live music performance. We furtherillustrate how + our framework overcomes existing limitations of gestureclassification systems + by delivering a precise recognition solution based on asingle gesture sample in + combination with expert knowledge. The presentedsolution enables a more dynamic + and spontaneous performance and, when combinedwith indirect augmented reality, + results in a more intense interaction betweenthe artist and their audience.' address: 'Daejeon, Republic of Korea' - author: Rebecca Kleinberger - bibtex: "@inproceedings{Kleinberger2013,\n abstract = {PAMDI is an electromechanical\ - \ music controller based on an expansion of the common metal music boxes. Our\ - \ system enables an augmentation of the musical properties by adding different\ - \ musical channels triggered and parameterized by natural gestures during the\ - \ ``performance''. All the channels are generated form the original melody recorded\ - \ once at the start. To capture and treat the different expressive parameters\ - \ both natural and intentional, our platform is composed of a metallic structure\ - \ supporting sensors. The measured values are processed by an arduino system that\ - \ finallysends the results by serial communication to a Max/MSP patch for signaltreatment\ - \ and modification. We will explain how our embedded instrument aims to bring\ - \ a certain awareness to the player of the mapping and the potential musical freedom\ - \ of the very specific -- and not that much automatic --- instrument that is a\ - \ music box. We will also address how our design tackles the different questions\ - \ of mapping, ergonomics and expressiveness while choosing the controller modalities\ - \ and the parameters to be sensed.},\n address = {Daejeon, Republic of Korea},\n\ - \ author = {Rebecca Kleinberger},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178588},\n\ - \ issn = {2220-4806},\n keywords = {Tangible interface, musical controller, music\ - \ box, mechanical and electronic coupling, mapping.},\n month = {May},\n pages\ - \ = {19--20},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ - \ title = {{PAM}DI Music Box: Primarily Analogico-Mechanical, Digitally Iterated\ - \ Music Box},\n url = {http://www.nime.org/proceedings/2013/nime2013_24.pdf},\n\ + author: Lode Hoste and Beat Signer + bibtex: "@inproceedings{Hoste2013,\n abstract = {Nowadays many music artists rely\ + \ on visualisations and light shows to enhanceand augment their live performances.\ + \ However, the visualisation and triggeringof lights is normally scripted in advance\ + \ and synchronised with the concert,severely limiting the artist's freedom for\ + \ improvisation, expression and ad-hocadaptation of their show. These scripts\ + \ result in performances where thetechnology enforces the artist and their music\ + \ to stay in synchronisation withthe pre-programmed environment. We argue that\ + \ these limitations can be overcomebased on emerging non-invasive tracking technologies\ + \ in combination with anadvanced gesture recognition engine.We present a solution\ + \ that uses explicit gestures and implicit dance moves tocontrol the visual augmentation\ + \ of a live music performance. We furtherillustrate how our framework overcomes\ + \ existing limitations of gestureclassification systems by delivering a precise\ + \ recognition solution based on asingle gesture sample in combination with expert\ + \ knowledge. The presentedsolution enables a more dynamic and spontaneous performance\ + \ and, when combinedwith indirect augmented reality, results in a more intense\ + \ interaction betweenthe artist and their audience.},\n address = {Daejeon, Republic\ + \ of Korea},\n author = {Lode Hoste and Beat Signer},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178558},\n issn = {2220-4806},\n keywords = {Expressive\ + \ control, augmented reality, live music performance, 3D gesture recognition,\ + \ Kinect, declarative language},\n month = {May},\n pages = {13--18},\n publisher\ + \ = {Graduate School of Culture Technology, KAIST},\n title = {Expressive Control\ + \ of Indirect Augmented Reality During Live Music Performances},\n url = {http://www.nime.org/proceedings/2013/nime2013_32.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178588 + doi: 10.5281/zenodo.1178558 issn: 2220-4806 - keywords: 'Tangible interface, musical controller, music box, mechanical and electronic - coupling, mapping.' + keywords: 'Expressive control, augmented reality, live music performance, 3D gesture + recognition, Kinect, declarative language' month: May - pages: 19--20 + pages: 13--18 publisher: 'Graduate School of Culture Technology, KAIST' - title: 'PAMDI Music Box: Primarily Analogico-Mechanical, Digitally Iterated Music - Box' - url: http://www.nime.org/proceedings/2013/nime2013_24.pdf + title: Expressive Control of Indirect Augmented Reality During Live Music Performances + url: http://www.nime.org/proceedings/2013/nime2013_32.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: McPherson2013 - abstract: 'This paper presents a portable optical measurement system for capturingcontinuous - key motion on any piano. Very few concert venues have MIDI-enabledpianos, and - many performers depend on the versatile but discontinued MoogPianoBar to provide - MIDI from a conventional acoustic instrument. The scannerhardware presented in - this paper addresses the growing need for alternativesolutions while surpassing - existing systems in the level of detail measured.Continuous key position on both - black and white keys is gathered at 1kHz samplerate. Software extracts traditional - and novel features of keyboard touch fromeach note, which can be flexibly mapped - to sound using MIDI or Open SoundControl. RGB LEDs provide rich visual feedback - to assist the performer ininteracting with more complex sound mapping arrangements. - An application ispresented to the magnetic resonator piano, an electromagnetically-augmentedacoustic - grand piano which is performed using continuous key positionmeasurements.' + ID: Murphy2013 + abstract: 'This paper provides a history of robotic guitars and bass guitars as + well as adiscussion of the design, construction, and evaluation of two new roboticinstruments. + Throughout the paper, a focus is made on different techniques toextend the expressivity + of robotic guitars. Swivel and MechBass, two newrobots, are built and discussed. + Construction techniques of likely interest toother musical roboticists are included. + These robots use a variety oftechniques, both new and inspired by prior work, + to afford composers andperformers with the ability to precisely control pitch + and plucking parameters.Both new robots are evaluated to test their precision, + repeatability, andspeed. The paper closes with a discussion of the compositional + and performativeimplications of such levels of control, and how it might affect + humans who wishto interface with the systems.' address: 'Daejeon, Republic of Korea' - author: Andrew McPherson - bibtex: "@inproceedings{McPherson2013,\n abstract = {This paper presents a portable\ - \ optical measurement system for capturingcontinuous key motion on any piano.\ - \ Very few concert venues have MIDI-enabledpianos, and many performers depend\ - \ on the versatile but discontinued MoogPianoBar to provide MIDI from a conventional\ - \ acoustic instrument. The scannerhardware presented in this paper addresses the\ - \ growing need for alternativesolutions while surpassing existing systems in the\ - \ level of detail measured.Continuous key position on both black and white keys\ - \ is gathered at 1kHz samplerate. Software extracts traditional and novel features\ - \ of keyboard touch fromeach note, which can be flexibly mapped to sound using\ - \ MIDI or Open SoundControl. RGB LEDs provide rich visual feedback to assist the\ - \ performer ininteracting with more complex sound mapping arrangements. An application\ - \ ispresented to the magnetic resonator piano, an electromagnetically-augmentedacoustic\ - \ grand piano which is performed using continuous key positionmeasurements.},\n\ - \ address = {Daejeon, Republic of Korea},\n author = {Andrew McPherson},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1178610},\n issn = {2220-4806},\n keywords\ - \ = {Piano, keyboard, optical sensing, gesture sensing, visual feedback, mapping,\ - \ magnetic resonator piano},\n month = {May},\n pages = {152--157},\n publisher\ - \ = {Graduate School of Culture Technology, KAIST},\n title = {Portable Measurement\ - \ and Mapping of Continuous Piano Gesture},\n url = {http://www.nime.org/proceedings/2013/nime2013_240.pdf},\n\ + author: Jim Murphy and James McVay and Ajay Kapur and Dale Carnegie + bibtex: "@inproceedings{Murphy2013,\n abstract = {This paper provides a history\ + \ of robotic guitars and bass guitars as well as adiscussion of the design, construction,\ + \ and evaluation of two new roboticinstruments. Throughout the paper, a focus\ + \ is made on different techniques toextend the expressivity of robotic guitars.\ + \ Swivel and MechBass, two newrobots, are built and discussed. Construction techniques\ + \ of likely interest toother musical roboticists are included. These robots use\ + \ a variety oftechniques, both new and inspired by prior work, to afford composers\ + \ andperformers with the ability to precisely control pitch and plucking parameters.Both\ + \ new robots are evaluated to test their precision, repeatability, andspeed. The\ + \ paper closes with a discussion of the compositional and performativeimplications\ + \ of such levels of control, and how it might affect humans who wishto interface\ + \ with the systems.},\n address = {Daejeon, Republic of Korea},\n author = {Jim\ + \ Murphy and James McVay and Ajay Kapur and Dale Carnegie},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178618},\n issn = {2220-4806},\n keywords = {musical\ + \ robotics, kinetic sculpture, mechatronics},\n month = {May},\n pages = {557--562},\n\ + \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {Designing\ + \ and Building Expressive Robotic Guitars},\n url = {http://www.nime.org/proceedings/2013/nime2013_36.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178610 + doi: 10.5281/zenodo.1178618 issn: 2220-4806 - keywords: 'Piano, keyboard, optical sensing, gesture sensing, visual feedback, mapping, - magnetic resonator piano' + keywords: 'musical robotics, kinetic sculpture, mechatronics' month: May - pages: 152--157 + pages: 557--562 publisher: 'Graduate School of Culture Technology, KAIST' - title: Portable Measurement and Mapping of Continuous Piano Gesture - url: http://www.nime.org/proceedings/2013/nime2013_240.pdf + title: Designing and Building Expressive Robotic Guitars + url: http://www.nime.org/proceedings/2013/nime2013_36.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Tarakajian2013 - abstract: 'Mira is an iPad app for controlling Max patchers in real time with minimalconfiguration. - This submission includes a paper describing Mira''s design andimplementation, - as well as a demo showing how Mira works with Max.The Mira iPad app discovers - open Max patchers automatically using the Bonjourprotocol, connects to them over - WiFi and negotiates a description of the Maxpatcher. As objects change position - and appearance, Mira makes sure that theinterface on the iPad is kept up to date. - Mira eliminates the need for anexplicit mapping step between the interface and - the system being controlled.The user is never asked to input an IP address, nor - to configure the mappingbetween interface objects on the iPad and those in the - Max patcher. So theprototyping composer is free to rapidly configure and reconfigure - theinterface.' + ID: Dezfouli2013 + abstract: Notesaaz is both a new physical interface meant for musical performance + and aproposal for a three-stage process where the controller is used to navigatewithin + a graphical score that on its turn controls the sound generation. It canbe seen + as a dynamic and understandable way of using dynamic mapping betweenthe sensor + input and the sound generation. Furthermore by presenting thegraphical score to + both the performer and the audience a new engagement of theaudience can be established. address: 'Daejeon, Republic of Korea' - author: Sam Tarakajian and David Zicarelli and Joshua Clayton - bibtex: "@inproceedings{Tarakajian2013,\n abstract = {Mira is an iPad app for controlling\ - \ Max patchers in real time with minimalconfiguration. This submission includes\ - \ a paper describing Mira's design andimplementation, as well as a demo showing\ - \ how Mira works with Max.The Mira iPad app discovers open Max patchers automatically\ - \ using the Bonjourprotocol, connects to them over WiFi and negotiates a description\ - \ of the Maxpatcher. As objects change position and appearance, Mira makes sure\ - \ that theinterface on the iPad is kept up to date. Mira eliminates the need for\ - \ anexplicit mapping step between the interface and the system being controlled.The\ - \ user is never asked to input an IP address, nor to configure the mappingbetween\ - \ interface objects on the iPad and those in the Max patcher. So theprototyping\ - \ composer is free to rapidly configure and reconfigure theinterface.},\n address\ - \ = {Daejeon, Republic of Korea},\n author = {Sam Tarakajian and David Zicarelli\ - \ and Joshua Clayton},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178670},\n\ - \ issn = {2220-4806},\n keywords = {NIME, Max/MSP/Jitter, Mira, ipad, osc, bonjour,\ - \ zeroconf},\n month = {May},\n pages = {421--426},\n publisher = {Graduate School\ - \ of Culture Technology, KAIST},\n title = {Mira: Liveness in iPad Controllers\ - \ for Max/MSP},\n url = {http://www.nime.org/proceedings/2013/nime2013_241.pdf},\n\ - \ year = {2013}\n}\n" + author: Erfan Abdi Dezfouli and Edwin van der Heide + bibtex: "@inproceedings{Dezfouli2013,\n abstract = {Notesaaz is both a new physical\ + \ interface meant for musical performance and aproposal for a three-stage process\ + \ where the controller is used to navigatewithin a graphical score that on its\ + \ turn controls the sound generation. It canbe seen as a dynamic and understandable\ + \ way of using dynamic mapping betweenthe sensor input and the sound generation.\ + \ Furthermore by presenting thegraphical score to both the performer and the audience\ + \ a new engagement of theaudience can be established.},\n address = {Daejeon,\ + \ Republic of Korea},\n author = {Erfan Abdi Dezfouli and Edwin van der Heide},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178498},\n issn = {2220-4806},\n\ + \ keywords = {musical instrument, custom controller, gestural input, dynamic score},\n\ + \ month = {May},\n pages = {115--117},\n publisher = {Graduate School of Culture\ + \ Technology, KAIST},\n title = {Notesaaz: a new controller and performance idiom},\n\ + \ url = {http://www.nime.org/proceedings/2013/nime2013_4.pdf},\n year = {2013}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178670 + doi: 10.5281/zenodo.1178498 issn: 2220-4806 - keywords: 'NIME, Max/MSP/Jitter, Mira, ipad, osc, bonjour, zeroconf' + keywords: 'musical instrument, custom controller, gestural input, dynamic score' month: May - pages: 421--426 + pages: 115--117 publisher: 'Graduate School of Culture Technology, KAIST' - title: 'Mira: Liveness in iPad Controllers for Max/MSP' - url: http://www.nime.org/proceedings/2013/nime2013_241.pdf + title: 'Notesaaz: a new controller and performance idiom' + url: http://www.nime.org/proceedings/2013/nime2013_4.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Kim2013 - abstract: 'We discuss how to model "gestures" in music performance with statistical - latent-states models. A music performance can be described with compositional - and expressive properties varying over time. In those property changes we often - observe particular patterns, and such a pattern can be understood as a "gesture", - since it serves as a medium transferring specific emotions. Assuming a finite - number of latent states on each property value changes, we can describe those - gestures with statistical latent-states models, and train them by unsupervised - learning algorithms. In addition, model entropy provides us a measure for different - effects of each properties on the gesture implementation. Test result on some - of real performances indicates that the trained models could capture the structure - of gestures observed in the given performances, and detect their boundaries. The - entropy-based measure was informative to understand the effectiveness of each - property on the gesture implementation. Test result on large corpora indicates - that our model has potentials for afurther model improvement.' + ID: Fuhrmann2013 + abstract: This paper demonstrates how to use multiple Kinect(TM) sensors to map + aperformers motion to music. We merge skeleton data streams from multiplesensors + to compensate for occlusions of the performer. The skeleton jointpositions drive + the performance via open sound control data. We discuss how toregister the different + sensors to each other and how to smoothly merge theresulting data streams and + how to map position data in a general framework tothe live electronics applied + to a chamber music ensemble. address: 'Daejeon, Republic of Korea' - author: Taehun Kim and Stefan Weinzierl - bibtex: "@inproceedings{Kim2013,\n abstract = {We discuss how to model \"gestures\"\ - \ in music performance with statistical latent-states models. A music performance\ - \ can be described with compositional and expressive properties varying over time.\ - \ In those property changes we often observe particular patterns, and such a pattern\ - \ can be understood as a \"gesture\", since it serves as a medium transferring\ - \ specific emotions. Assuming a finite number of latent states on each property\ - \ value changes, we can describe those gestures with statistical latent-states\ - \ models, and train them by unsupervised learning algorithms. In addition, model\ - \ entropy provides us a measure for different effects of each properties on the\ - \ gesture implementation. Test result on some of real performances indicates that\ - \ the trained models could capture the structure of gestures observed in the given\ - \ performances, and detect their boundaries. The entropy-based measure was informative\ - \ to understand the effectiveness of each property on the gesture implementation.\ - \ Test result on large corpora indicates that our model has potentials for afurther\ - \ model improvement.},\n address = {Daejeon, Republic of Korea},\n author = {Taehun\ - \ Kim and Stefan Weinzierl},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178582},\n\ - \ issn = {2220-4806},\n keywords = {Musical gestures, performance analysis, unsupervised\ - \ machine learning},\n month = {May},\n pages = {427--430},\n publisher = {Graduate\ - \ School of Culture Technology, KAIST},\n title = {Modelling Gestures in Music\ - \ Performance with Statistical Latent-State Models},\n url = {http://www.nime.org/proceedings/2013/nime2013_244.pdf},\n\ - \ year = {2013}\n}\n" + author: Anton Fuhrmann and Johannes Kretz and Peter Burwik + bibtex: "@inproceedings{Fuhrmann2013,\n abstract = {This paper demonstrates how\ + \ to use multiple Kinect(TM) sensors to map aperformers motion to music. We merge\ + \ skeleton data streams from multiplesensors to compensate for occlusions of the\ + \ performer. The skeleton jointpositions drive the performance via open sound\ + \ control data. We discuss how toregister the different sensors to each other\ + \ and how to smoothly merge theresulting data streams and how to map position\ + \ data in a general framework tothe live electronics applied to a chamber music\ + \ ensemble.},\n address = {Daejeon, Republic of Korea},\n author = {Anton Fuhrmann\ + \ and Johannes Kretz and Peter Burwik},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178530},\n\ + \ issn = {2220-4806},\n keywords = {kinect, multi sensor, sensor fusion, open\ + \ sound control, motion tracking, parameter mapping, live electronics},\n month\ + \ = {May},\n pages = {358--362},\n publisher = {Graduate School of Culture Technology,\ + \ KAIST},\n title = {Multi Sensor Tracking for Live Sound Transformation},\n url\ + \ = {http://www.nime.org/proceedings/2013/nime2013_44.pdf},\n year = {2013}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178582 + doi: 10.5281/zenodo.1178530 issn: 2220-4806 - keywords: 'Musical gestures, performance analysis, unsupervised machine learning' + keywords: 'kinect, multi sensor, sensor fusion, open sound control, motion tracking, + parameter mapping, live electronics' month: May - pages: 427--430 + pages: 358--362 publisher: 'Graduate School of Culture Technology, KAIST' - title: Modelling Gestures in Music Performance with Statistical Latent-State Models - url: http://www.nime.org/proceedings/2013/nime2013_244.pdf + title: Multi Sensor Tracking for Live Sound Transformation + url: http://www.nime.org/proceedings/2013/nime2013_44.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Pardue2013 - abstract: 'The Hand Controller is a new interface designed to enable a performer - toachieve detailed control of audio and visual parameters through a tangibleinterface - combined with motion tracking of the hands to capture large scalephysical movement. - Such movement empowers an expressive dynamic for bothperformer and audience. However - tracking movements in free space isnotoriously difficult for virtuosic performance. - The lack of tactile feedbackleads to difficulty learning the repeated muscle movements - required for precisecontrol. In comparison, the hands have shown an impressive - ability to mastercomplex motor tasks through feel. The hand controller uses both - modes ofinteraction. Electro-magnetic field tracking enables 6D hand motion trackingwhile - two options provide tactile interaction- a set of tracks that providelinear positioning - and applied finger pressure, or a set of trumpet like sliderkeys that provide - continuous data describing key depth. Thumbs actuateadditional pressure sensitive - buttons. The two haptic interfaces are mountedto a comfortable hand grip that - allows a significant range of reach, andpressure to be applied without restricting - hand movement highly desirable inexpressive motion.' + ID: Mudd2013 + abstract: 'This paper presents a system for exploring different dimensions of a + soundthrough the use of haptic feedback. The Novint Falcon force feedback interfaceis + used to scan through soundfiles as a subject moves their hand horizontallyfrom + left to right, and to relay information about volume, frequency content,noisiness, + or potentially any analysable parameter back to the subject throughforces acting + on their hand. General practicalities of mapping sonic elements to physical forces + areconsidered, such as the problem of representing detailed data through vaguephysical + sensation, approaches to applying forces to the hand that do notinterfering with + the smooth operation of the device, and the relative merits ofdiscreet and continuous + mappings. Three approaches to generating the forcevector are discussed: 1) the + use of simulated detents to identify areas of anaudio parameter over a certain + threshold, 2) applying friction proportional tothe level of the audio parameter + along the axis of movement, and 3) creatingforces perpendicular to the subject''s + hand movements.Presentation of audio information in this manner could be beneficial + for`pre-feeling'' as a method for selecting material to play during a liveperformance, + assisting visually impaired audio engineers, and as a generalaugmentation of standard + audio editing environments.' address: 'Daejeon, Republic of Korea' - author: Laurel Pardue and William Sebastian - bibtex: "@inproceedings{Pardue2013,\n abstract = {The Hand Controller is a new interface\ - \ designed to enable a performer toachieve detailed control of audio and visual\ - \ parameters through a tangibleinterface combined with motion tracking of the\ - \ hands to capture large scalephysical movement. Such movement empowers an expressive\ - \ dynamic for bothperformer and audience. However tracking movements in free space\ - \ isnotoriously difficult for virtuosic performance. The lack of tactile feedbackleads\ - \ to difficulty learning the repeated muscle movements required for precisecontrol.\ - \ In comparison, the hands have shown an impressive ability to mastercomplex motor\ - \ tasks through feel. The hand controller uses both modes ofinteraction. Electro-magnetic\ - \ field tracking enables 6D hand motion trackingwhile two options provide tactile\ - \ interaction- a set of tracks that providelinear positioning and applied finger\ - \ pressure, or a set of trumpet like sliderkeys that provide continuous data describing\ - \ key depth. Thumbs actuateadditional pressure sensitive buttons. The two haptic\ - \ interfaces are mountedto a comfortable hand grip that allows a significant range\ - \ of reach, andpressure to be applied without restricting hand movement highly\ - \ desirable inexpressive motion.},\n address = {Daejeon, Republic of Korea},\n\ - \ author = {Laurel Pardue and William Sebastian},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1178630},\n issn = {2220-4806},\n keywords = {hand, interface,\ - \ free gesture, force sensing resistor, new musical instrument, tactile feedback,\ - \ position tracking},\n month = {May},\n pages = {90--93},\n publisher = {Graduate\ - \ School of Culture Technology, KAIST},\n title = {Hand-Controller for Combined\ - \ Tactile Control and Motion Tracking},\n url = {http://www.nime.org/proceedings/2013/nime2013_245.pdf},\n\ + author: Tom Mudd + bibtex: "@inproceedings{Mudd2013,\n abstract = {This paper presents a system for\ + \ exploring different dimensions of a soundthrough the use of haptic feedback.\ + \ The Novint Falcon force feedback interfaceis used to scan through soundfiles\ + \ as a subject moves their hand horizontallyfrom left to right, and to relay information\ + \ about volume, frequency content,noisiness, or potentially any analysable parameter\ + \ back to the subject throughforces acting on their hand. General practicalities\ + \ of mapping sonic elements to physical forces areconsidered, such as the problem\ + \ of representing detailed data through vaguephysical sensation, approaches to\ + \ applying forces to the hand that do notinterfering with the smooth operation\ + \ of the device, and the relative merits ofdiscreet and continuous mappings. Three\ + \ approaches to generating the forcevector are discussed: 1) the use of simulated\ + \ detents to identify areas of anaudio parameter over a certain threshold, 2)\ + \ applying friction proportional tothe level of the audio parameter along the\ + \ axis of movement, and 3) creatingforces perpendicular to the subject's hand\ + \ movements.Presentation of audio information in this manner could be beneficial\ + \ for`pre-feeling' as a method for selecting material to play during a liveperformance,\ + \ assisting visually impaired audio engineers, and as a generalaugmentation of\ + \ standard audio editing environments.},\n address = {Daejeon, Republic of Korea},\n\ + \ author = {Tom Mudd},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1293003},\n\ + \ issn = {2220-4806},\n keywords = {Haptics, force feedback, mapping, human-computer\ + \ interaction},\n month = {May},\n pages = {369--372},\n publisher = {Graduate\ + \ School of Culture Technology, KAIST},\n title = {Feeling for Sound: Mapping\ + \ Sonic Data to Haptic Perceptions},\n url = {http://www.nime.org/proceedings/2013/nime2013_46.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178630 + doi: 10.5281/zenodo.1293003 issn: 2220-4806 - keywords: 'hand, interface, free gesture, force sensing resistor, new musical instrument, - tactile feedback, position tracking' + keywords: 'Haptics, force feedback, mapping, human-computer interaction' month: May - pages: 90--93 + pages: 369--372 publisher: 'Graduate School of Culture Technology, KAIST' - title: Hand-Controller for Combined Tactile Control and Motion Tracking - url: http://www.nime.org/proceedings/2013/nime2013_245.pdf + title: 'Feeling for Sound: Mapping Sonic Data to Haptic Perceptions' + url: http://www.nime.org/proceedings/2013/nime2013_46.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Wiriadjaja2013 - abstract: 'The Gamelan Sampul is a laptop sleeve with embedded circuitry that allows - usersto practice playing Javanese gamelan instruments without a full set ofinstruments. - It is part of a larger project that aims to develop a set ofportable and mobile - tools for learning, recording and performing classicalJavanese gamelan music.The - accessibility of a portable Javanese gamelan set introduces the musicalgenre to - audiences who have never experienced this traditional music before,passing down - long established customs to future generations. But it also raisesthe question - of what is and what isn''t appropriate to the musical tradition.The Gamelan Sampul - attempts to introduce new technology to traditional folkmusic while staying sensitive - to cultural needs.' + ID: BenAsher2013 + abstract: 'A system is presented for detecting common gestures, musical intentions + andemotions of pianists in real-time using only kinesthetic data retrieved bywireless + motion sensors. The algorithm can detect common Western musicalstructures such + as chords, arpeggios, scales, and trills as well as musicallyintended emotions: + cheerful, mournful, vigorous, dreamy, lyrical, and humorouscompletely and solely + based on low-sample-rate motion sensor data. Thealgorithm can be trained per performer + in real-time or can work based onprevious training sets. The system maps the emotions + to a color set andpresents them as a flowing emotional spectrum on the background + of a pianoroll. This acts as a feedback mechanism for emotional expression as + well as aninteractive display of the music. The system was trained and tested + on a numberof pianists and it classified structures and emotions with promising + results ofup to 92% accuracy.' address: 'Daejeon, Republic of Korea' - author: Antonius Wiriadjaja - bibtex: "@inproceedings{Wiriadjaja2013,\n abstract = {The Gamelan Sampul is a laptop\ - \ sleeve with embedded circuitry that allows usersto practice playing Javanese\ - \ gamelan instruments without a full set ofinstruments. It is part of a larger\ - \ project that aims to develop a set ofportable and mobile tools for learning,\ - \ recording and performing classicalJavanese gamelan music.The accessibility of\ - \ a portable Javanese gamelan set introduces the musicalgenre to audiences who\ - \ have never experienced this traditional music before,passing down long established\ - \ customs to future generations. But it also raisesthe question of what is and\ - \ what isn't appropriate to the musical tradition.The Gamelan Sampul attempts\ - \ to introduce new technology to traditional folkmusic while staying sensitive\ - \ to cultural needs.},\n address = {Daejeon, Republic of Korea},\n author = {Antonius\ - \ Wiriadjaja},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178688},\n issn\ - \ = {2220-4806},\n keywords = {Physical computing, product design, traditional\ - \ folk arts, gamelan},\n month = {May},\n pages = {469--470},\n publisher = {Graduate\ - \ School of Culture Technology, KAIST},\n title = {Gamelan Sampul: Laptop Sleeve\ - \ Gamelan},\n url = {http://www.nime.org/proceedings/2013/nime2013_246.pdf},\n\ + author: Matan Ben-Asher and Colby Leider + bibtex: "@inproceedings{BenAsher2013,\n abstract = {A system is presented for detecting\ + \ common gestures, musical intentions andemotions of pianists in real-time using\ + \ only kinesthetic data retrieved bywireless motion sensors. The algorithm can\ + \ detect common Western musicalstructures such as chords, arpeggios, scales, and\ + \ trills as well as musicallyintended emotions: cheerful, mournful, vigorous,\ + \ dreamy, lyrical, and humorouscompletely and solely based on low-sample-rate\ + \ motion sensor data. Thealgorithm can be trained per performer in real-time or\ + \ can work based onprevious training sets. The system maps the emotions to a color\ + \ set andpresents them as a flowing emotional spectrum on the background of a\ + \ pianoroll. This acts as a feedback mechanism for emotional expression as well\ + \ as aninteractive display of the music. The system was trained and tested on\ + \ a numberof pianists and it classified structures and emotions with promising\ + \ results ofup to 92\\% accuracy.},\n address = {Daejeon, Republic of Korea},\n\ + \ author = {Matan Ben-Asher and Colby Leider},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1178474},\n issn = {2220-4806},\n keywords = {Motion Sensors,\ + \ IMUs, Expressive Piano Performance, Machine Learning, Computer Music, Music\ + \ and Emotion},\n month = {May},\n pages = {21--24},\n publisher = {Graduate School\ + \ of Culture Technology, KAIST},\n title = {Toward an Emotionally Intelligent\ + \ Piano: Real-Time Emotion Detection and Performer Feedback via Kinesthetic Sensing\ + \ in Piano Performance},\n url = {http://www.nime.org/proceedings/2013/nime2013_48.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178688 + doi: 10.5281/zenodo.1178474 issn: 2220-4806 - keywords: 'Physical computing, product design, traditional folk arts, gamelan' + keywords: 'Motion Sensors, IMUs, Expressive Piano Performance, Machine Learning, + Computer Music, Music and Emotion' month: May - pages: 469--470 + pages: 21--24 publisher: 'Graduate School of Culture Technology, KAIST' - title: 'Gamelan Sampul: Laptop Sleeve Gamelan' - url: http://www.nime.org/proceedings/2013/nime2013_246.pdf + title: 'Toward an Emotionally Intelligent Piano: Real-Time Emotion Detection and + Performer Feedback via Kinesthetic Sensing in Piano Performance' + url: http://www.nime.org/proceedings/2013/nime2013_48.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Pardue2013a - abstract: 'This paper explores the potential of near-field optical reflective sensing - formusical instrument gesture capture. Near-field optical sensors are inexpensive,portable - and non-intrusive, and their high spatial and temporal resolutionmakes them ideal - for tracking the finer motions of instrumental performance.The paper discusses - general optical sensor performance with detailedinvestigations of three sensor - models. An application is presented to violinbow position tracking using reflective - sensors mounted on the stick. Bowtracking remains a difficult task, and many existing - solutions are expensive,bulky, or offer limited temporal resolution. Initial results - indicate that bowposition and pressure can be derived from optical measurements - of thehair-string distance, and that similar techniques may be used to measure - bowtilt.' + ID: Diakopoulos2013 + abstract: 'Netpixl is a new micro-toolkit built to network devices within interactiveinstallations + and environments. Using a familiar client-server model, Netpixlcentrally wraps + an important aspect of ubiquitous computing: real-timemessaging. In the context + of sound and music computing, the role of Netpixl isto fluidly integrate endpoints + like OSC and MIDI within a larger multi-usersystem. This paper considers useful + design principles that may be applied totoolkits like Netpixl while also emphasizing + recent approaches to applicationdevelopment via HTML5 and Javascript, highlighting + an evolution in networkedcreative computing.' address: 'Daejeon, Republic of Korea' - author: Laurel Pardue and Andrew McPherson - bibtex: "@inproceedings{Pardue2013a,\n abstract = {This paper explores the potential\ - \ of near-field optical reflective sensing formusical instrument gesture capture.\ - \ Near-field optical sensors are inexpensive,portable and non-intrusive, and their\ - \ high spatial and temporal resolutionmakes them ideal for tracking the finer\ - \ motions of instrumental performance.The paper discusses general optical sensor\ - \ performance with detailedinvestigations of three sensor models. An application\ - \ is presented to violinbow position tracking using reflective sensors mounted\ - \ on the stick. Bowtracking remains a difficult task, and many existing solutions\ - \ are expensive,bulky, or offer limited temporal resolution. Initial results indicate\ - \ that bowposition and pressure can be derived from optical measurements of thehair-string\ - \ distance, and that similar techniques may be used to measure bowtilt.},\n address\ - \ = {Daejeon, Republic of Korea},\n author = {Laurel Pardue and Andrew McPherson},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178628},\n issn = {2220-4806},\n\ - \ keywords = {optical sensor, reflectance, LED, photodiode, phototransistor, violin,\ - \ bow tracking, gesture, near-field sensing},\n month = {May},\n pages = {363--368},\n\ - \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {Near-Field\ - \ Optical Reflective Sensing for Bow Tracking},\n url = {http://www.nime.org/proceedings/2013/nime2013_247.pdf},\n\ + author: Dimitri Diakopoulos and Ajay Kapur + bibtex: "@inproceedings{Diakopoulos2013,\n abstract = {Netpixl is a new micro-toolkit\ + \ built to network devices within interactiveinstallations and environments. Using\ + \ a familiar client-server model, Netpixlcentrally wraps an important aspect of\ + \ ubiquitous computing: real-timemessaging. In the context of sound and music\ + \ computing, the role of Netpixl isto fluidly integrate endpoints like OSC and\ + \ MIDI within a larger multi-usersystem. This paper considers useful design principles\ + \ that may be applied totoolkits like Netpixl while also emphasizing recent approaches\ + \ to applicationdevelopment via HTML5 and Javascript, highlighting an evolution\ + \ in networkedcreative computing.},\n address = {Daejeon, Republic of Korea},\n\ + \ author = {Dimitri Diakopoulos and Ajay Kapur},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1178500},\n issn = {2220-4806},\n keywords = {networking,\ + \ ubiquitious computing, toolkits, html5},\n month = {May},\n pages = {206--209},\n\ + \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {Netpixl:\ + \ Towards a New Paradigm for Networked Application Development},\n url = {http://www.nime.org/proceedings/2013/nime2013_49.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178628 + doi: 10.5281/zenodo.1178500 issn: 2220-4806 - keywords: 'optical sensor, reflectance, LED, photodiode, phototransistor, violin, - bow tracking, gesture, near-field sensing' + keywords: 'networking, ubiquitious computing, toolkits, html5' month: May - pages: 363--368 + pages: 206--209 publisher: 'Graduate School of Culture Technology, KAIST' - title: Near-Field Optical Reflective Sensing for Bow Tracking - url: http://www.nime.org/proceedings/2013/nime2013_247.pdf + title: 'Netpixl: Towards a New Paradigm for Networked Application Development' + url: http://www.nime.org/proceedings/2013/nime2013_49.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Liu2013 - abstract: 'Cloud Bridge is an immersive interactive audiovisual software interface - forboth data exploration and artistic creation. It explores how information can - besonified and visualized to facilitate findings, and eventually becomeinteractive - musical compositions. Cloud Bridge functions as a multi-user,multimodal instrument. - The data represents the history of items checked out bypatrons of the Seattle - Public Library. A single user or agroup of users functioning as a performance - ensemble participate in the pieceby interactively querying the database using - iOS devices. Each device isassociated with aunique timbre and color for contributing - to the piece, whichappears on large shared screens and a surround-sound system - for allparticipants and observers. Cloud Bridge leads to a new media interactiveinterface - utilizing audio synthesis, visualization and real-time interaction.' + ID: Fasciani2013 + abstract: 'Mapping gestures to digital musical instrument parameters is not trivial + when the dimensionality of the sensor-captured data is high and the model relating + the gesture to sensor data is unknown. In these cases, a front-end processing + system for extracting gestural information embedded in the sensor data is essential. + In this paper we propose an unsupervised offline method that learns how to reduce + and map the gestural data to a generic instrument parameter control space. We + make an unconventional use of the Self-Organizing Maps to obtain only a geometrical + transformation of the gestural data, while dimensionality reduction is handled + separately. We introduce a novel training procedure to overcome two main Self-Organizing + Maps limitations which otherwise corrupt the interface usability. As evaluation, + we apply this method to our existing Voice-Controlled Interface for musical instruments, + obtaining sensible usability improvements.' address: 'Daejeon, Republic of Korea' - author: Qian Liu and Yoon Chung Han and JoAnn Kuchera-Morin and Matthew Wright - bibtex: "@inproceedings{Liu2013,\n abstract = {Cloud Bridge is an immersive interactive\ - \ audiovisual software interface forboth data exploration and artistic creation.\ - \ It explores how information can besonified and visualized to facilitate findings,\ - \ and eventually becomeinteractive musical compositions. Cloud Bridge functions\ - \ as a multi-user,multimodal instrument. The data represents the history of items\ - \ checked out bypatrons of the Seattle Public Library. A single user or agroup\ - \ of users functioning as a performance ensemble participate in the pieceby interactively\ - \ querying the database using iOS devices. Each device isassociated with aunique\ - \ timbre and color for contributing to the piece, whichappears on large shared\ - \ screens and a surround-sound system for allparticipants and observers. Cloud\ - \ Bridge leads to a new media interactiveinterface utilizing audio synthesis,\ - \ visualization and real-time interaction.},\n address = {Daejeon, Republic of\ - \ Korea},\n author = {Qian Liu and Yoon Chung Han and JoAnn Kuchera-Morin and\ - \ Matthew Wright},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178596},\n\ - \ issn = {2220-4806},\n keywords = {Data Sonification, Data Visualization, Sonification,\ - \ User Interface, Sonic Interaction Design, Open Sound Control},\n month = {May},\n\ - \ pages = {431--436},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ - \ title = {Cloud Bridge: a Data-driven Immersive Audio-Visual Software Interface},\n\ - \ url = {http://www.nime.org/proceedings/2013/nime2013_250.pdf},\n year = {2013}\n\ - }\n" + author: Stefano Fasciani and Lonce Wyse + bibtex: "@inproceedings{Fasciani2013,\n abstract = {Mapping gestures to digital\ + \ musical instrument parameters is not trivial when the dimensionality of the\ + \ sensor-captured data is high and the model relating the gesture to sensor data\ + \ is unknown. In these cases, a front-end processing system for extracting gestural\ + \ information embedded in the sensor data is essential. In this paper we propose\ + \ an unsupervised offline method that learns how to reduce and map the gestural\ + \ data to a generic instrument parameter control space. We make an unconventional\ + \ use of the Self-Organizing Maps to obtain only a geometrical transformation\ + \ of the gestural data, while dimensionality reduction is handled separately.\ + \ We introduce a novel training procedure to overcome two main Self-Organizing\ + \ Maps limitations which otherwise corrupt the interface usability. As evaluation,\ + \ we apply this method to our existing Voice-Controlled Interface for musical\ + \ instruments, obtaining sensible usability improvements.},\n address = {Daejeon,\ + \ Republic of Korea},\n author = {Stefano Fasciani and Lonce Wyse},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.4582292},\n issn = {2220-4806},\n keywords\ + \ = {Self-Organizing Maps, Gestural Controller, Multi Dimensional Control, Unsupervised\ + \ Gesture Mapping, Voice Control},\n month = {May},\n pages = {507--512},\n publisher\ + \ = {Graduate School of Culture Technology, KAIST},\n title = {A Self-Organizing\ + \ Gesture Map for a Voice-Controlled Instrument Interface},\n url = {http://www.nime.org/proceedings/2013/nime2013_50.pdf},\n\ + \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178596 + doi: 10.5281/zenodo.4582292 issn: 2220-4806 - keywords: 'Data Sonification, Data Visualization, Sonification, User Interface, - Sonic Interaction Design, Open Sound Control' + keywords: 'Self-Organizing Maps, Gestural Controller, Multi Dimensional Control, + Unsupervised Gesture Mapping, Voice Control' month: May - pages: 431--436 + pages: 507--512 publisher: 'Graduate School of Culture Technology, KAIST' - title: 'Cloud Bridge: a Data-driven Immersive Audio-Visual Software Interface' - url: http://www.nime.org/proceedings/2013/nime2013_250.pdf + title: A Self-Organizing Gesture Map for a Voice-Controlled Instrument Interface + url: http://www.nime.org/proceedings/2013/nime2013_50.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Everman2013 - abstract: 'Few formal methods exist for evaluating digital musical instruments (DMIs) - .This paper proposes a novel method of DMI evaluation using crowd-sourcedtagging. - One of the challenges in devising such methods is that the evaluationof a musical - instrument is an inherently qualitative task. While previouslyproposed methods - have focused on quantitative methods and largely ignored thequalitative aspects - of the task, tagging is well-suited to this and is alreadyused to classify things - such as websites and musical genres. These, like DMIs,do not lend themselves to - simple categorization or parameterization. Using the social tagging method, participating - individuals assign descriptivelabels, or tags, to a DMI. A DMI can then be evaluated - by analyzing the tagsassociated with it. Metrics can be generated from the tags - assigned to theinstrument, and comparisons made to other instruments. This can - give thedesigner valuable insight into the where the strengths of the design lie - andwhere improvements may be needed. A prototype system for testing the method - is proposed in the paper and iscurrently being implemented as part of an ongoing - DMI evaluation project. It isexpected that results from the prototype will be - available to report by thetime of the conference in May.' + ID: Berthaut2013 + abstract: 'Digital musical instruments bring new possibilities for musical performance.They + are also more complex for the audience to understand, due to the diversityof their + components and the magical aspect of the musicians'' actions whencompared to acoustic + instruments. This complexity results in a loss of livenessand possibly a poor + experience for the audience. Our approach, called Rouages,is based on a mixed-reality + display system and a 3D visualization application.It reveals the mechanisms of + digital musical instruments by amplifyingmusicians'' gestures with virtual extensions + of the sensors, by representingthe sound components with 3D shapes and specific + behaviors and by showing theimpact ofmusicians gestures on these components. We + believe that Rouages opens up newperspectives to help instrument makers and musicians + improve audienceexperience with their digital musical instruments.' address: 'Daejeon, Republic of Korea' - author: Michael Everman and Colby Leider - bibtex: "@inproceedings{Everman2013,\n abstract = {Few formal methods exist for\ - \ evaluating digital musical instruments (DMIs) .This paper proposes a novel method\ - \ of DMI evaluation using crowd-sourcedtagging. One of the challenges in devising\ - \ such methods is that the evaluationof a musical instrument is an inherently\ - \ qualitative task. While previouslyproposed methods have focused on quantitative\ - \ methods and largely ignored thequalitative aspects of the task, tagging is well-suited\ - \ to this and is alreadyused to classify things such as websites and musical genres.\ - \ These, like DMIs,do not lend themselves to simple categorization or parameterization.\ - \ Using the social tagging method, participating individuals assign descriptivelabels,\ - \ or tags, to a DMI. A DMI can then be evaluated by analyzing the tagsassociated\ - \ with it. Metrics can be generated from the tags assigned to theinstrument, and\ - \ comparisons made to other instruments. This can give thedesigner valuable insight\ - \ into the where the strengths of the design lie andwhere improvements may be\ - \ needed. A prototype system for testing the method is proposed in the paper and\ - \ iscurrently being implemented as part of an ongoing DMI evaluation project.\ - \ It isexpected that results from the prototype will be available to report by\ - \ thetime of the conference in May.},\n address = {Daejeon, Republic of Korea},\n\ - \ author = {Michael Everman and Colby Leider},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1178510},\n issn = {2220-4806},\n keywords = {Evaluation, tagging,\ - \ digital musical instrument},\n month = {May},\n pages = {437--440},\n publisher\ - \ = {Graduate School of Culture Technology, KAIST},\n title = {Toward {DMI} Evaluation\ - \ Using Crowd-Sourced Tagging Techniques},\n url = {http://www.nime.org/proceedings/2013/nime2013_251.pdf},\n\ + author: Florent Berthaut and Mark T. Marshall and Sriram Subramanian and Martin + Hachet + bibtex: "@inproceedings{Berthaut2013,\n abstract = {Digital musical instruments\ + \ bring new possibilities for musical performance.They are also more complex for\ + \ the audience to understand, due to the diversityof their components and the\ + \ magical aspect of the musicians' actions whencompared to acoustic instruments.\ + \ This complexity results in a loss of livenessand possibly a poor experience\ + \ for the audience. Our approach, called Rouages,is based on a mixed-reality display\ + \ system and a 3D visualization application.It reveals the mechanisms of digital\ + \ musical instruments by amplifyingmusicians' gestures with virtual extensions\ + \ of the sensors, by representingthe sound components with 3D shapes and specific\ + \ behaviors and by showing theimpact ofmusicians gestures on these components.\ + \ We believe that Rouages opens up newperspectives to help instrument makers and\ + \ musicians improve audienceexperience with their digital musical instruments.},\n\ + \ address = {Daejeon, Republic of Korea},\n author = {Florent Berthaut and Mark\ + \ T. Marshall and Sriram Subramanian and Martin Hachet},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178478},\n issn = {2220-4806},\n keywords = {rouages,\ + \ digital musical instruments, mappings, 3D interface, mixed-reality,},\n month\ + \ = {May},\n pages = {164--169},\n publisher = {Graduate School of Culture Technology,\ + \ KAIST},\n title = {Rouages: Revealing the Mechanisms of Digital Musical Instruments\ + \ to the Audience},\n url = {http://www.nime.org/proceedings/2013/nime2013_51.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178510 + doi: 10.5281/zenodo.1178478 issn: 2220-4806 - keywords: 'Evaluation, tagging, digital musical instrument' + keywords: 'rouages, digital musical instruments, mappings, 3D interface, mixed-reality,' month: May - pages: 437--440 + pages: 164--169 publisher: 'Graduate School of Culture Technology, KAIST' - title: Toward DMI Evaluation Using Crowd-Sourced Tagging Techniques - url: http://www.nime.org/proceedings/2013/nime2013_251.pdf + title: 'Rouages: Revealing the Mechanisms of Digital Musical Instruments to the + Audience' + url: http://www.nime.org/proceedings/2013/nime2013_51.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Nam2013 - abstract: 'This paper describes the Musical Poi (mPoi), a unique sensor-based musicalinstrument - rooted in the ancient art of poi spinning. The trajectory ofcircular motion drawn - by the performance and the momentum of the mPoiinstrument are converted to the - energetic and vibrant sound, which makesspiritual and meditative soundscape that - opens everyone up the aura and clearsthe thought forms away. The mPoi project - and its concepts will be introducedfirst and then its interaction with a performer - will be discussed.The mPoi project seeks to develop a prototype for a set of mobile - musicalinstrument based on electronic motion sensors and circuit boards. Thistechnology - is installed in egg-shaped structure and allows communicationbetween a performer - and the mPoi instrument. The principal motivation for themPoi project has been - a desire to develop an extensible interface that willsupport the Poi performance, - which is a style of performance art originatedwith the Maori people of New Zealand - involving swinging tethered weightsthrough a variety of rhythmical and geometric - patterns. As an extension of the body and the expansion of the movement, the mPoiutilizes - the creative performance of Poi to make spatial and spiritual soundand music. - The aims of the mPoi project are:to create a prototype of mPoi instrument that - includes circuit board thatconnects the instrument to a sensor.to develop a software, - which includes programming of the circuit board and forthe sound generation.to - make a new artistic expression to refine the captured sound into artisticmusical - notes. The creative part of the project is to design a unique method to translate - theperformer''s gesture into sound. A unique algorithm was developed to extractfeatures - of the swing motion and translate them into various patterns of sound.' + ID: Resch2013 + abstract: 'note~ for Max consists of four objects for the Software Max/MSP which + allow sequencing in floating point resolution and provide a Graphical User Interface + and a Scripting Interface for generating events within a timeline. Due to the + complete integration into Max/MSP it is possible to control almost every type + of client like another software, audio and video or extern hardware by note~ or + control note~ itself by other software and hardware.' address: 'Daejeon, Republic of Korea' - author: Sangbong Nam - bibtex: "@inproceedings{Nam2013,\n abstract = {This paper describes the Musical\ - \ Poi (mPoi), a unique sensor-based musicalinstrument rooted in the ancient art\ - \ of poi spinning. The trajectory ofcircular motion drawn by the performance and\ - \ the momentum of the mPoiinstrument are converted to the energetic and vibrant\ - \ sound, which makesspiritual and meditative soundscape that opens everyone up\ - \ the aura and clearsthe thought forms away. The mPoi project and its concepts\ - \ will be introducedfirst and then its interaction with a performer will be discussed.The\ - \ mPoi project seeks to develop a prototype for a set of mobile musicalinstrument\ - \ based on electronic motion sensors and circuit boards. Thistechnology is installed\ - \ in egg-shaped structure and allows communicationbetween a performer and the\ - \ mPoi instrument. The principal motivation for themPoi project has been a desire\ - \ to develop an extensible interface that willsupport the Poi performance, which\ - \ is a style of performance art originatedwith the Maori people of New Zealand\ - \ involving swinging tethered weightsthrough a variety of rhythmical and geometric\ - \ patterns. As an extension of the body and the expansion of the movement, the\ - \ mPoiutilizes the creative performance of Poi to make spatial and spiritual soundand\ - \ music. The aims of the mPoi project are:to create a prototype of mPoi instrument\ - \ that includes circuit board thatconnects the instrument to a sensor.to develop\ - \ a software, which includes programming of the circuit board and forthe sound\ - \ generation.to make a new artistic expression to refine the captured sound into\ - \ artisticmusical notes. The creative part of the project is to design a unique\ - \ method to translate theperformer's gesture into sound. A unique algorithm was\ - \ developed to extractfeatures of the swing motion and translate them into various\ - \ patterns of sound.},\n address = {Daejeon, Republic of Korea},\n author = {Sangbong\ - \ Nam},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1178622},\n issn = {2220-4806},\n\ - \ keywords = {mPoi, Musical Poi, Jwibulnori, Poi, sensor-based musical instrument},\n\ - \ month = {May},\n pages = {148--151},\n publisher = {Graduate School of Culture\ - \ Technology, KAIST},\n title = {Musical Poi (mPoi)},\n url = {http://www.nime.org/proceedings/2013/nime2013_254.pdf},\n\ + author: Thomas Resch + bibtex: "@inproceedings{Resch2013,\n abstract = {note~ for Max consists of four\ + \ objects for the Software Max/MSP which allow sequencing in floating point resolution\ + \ and provide a Graphical User Interface and a Scripting Interface for generating\ + \ events within a timeline. Due to the complete integration into Max/MSP it is\ + \ possible to control almost every type of client like another software, audio\ + \ and video or extern hardware by note~ or control note~ itself by other software\ + \ and hardware.},\n address = {Daejeon, Republic of Korea},\n author = {Thomas\ + \ Resch},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1178644},\n issn = {2220-4806},\n\ + \ keywords = {Max/MSP, composing, timeline, GUI, sequencing, score, notation.},\n\ + \ month = {May},\n pages = {210--212},\n publisher = {Graduate School of Culture\ + \ Technology, KAIST},\n title = {note~ for Max --- An extension for Max/MSP for\ + \ Media Arts \\& music},\n url = {http://www.nime.org/proceedings/2013/nime2013_57.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178622 + doi: 10.5281/zenodo.1178644 issn: 2220-4806 - keywords: 'mPoi, Musical Poi, Jwibulnori, Poi, sensor-based musical instrument' + keywords: 'Max/MSP, composing, timeline, GUI, sequencing, score, notation.' month: May - pages: 148--151 + pages: 210--212 publisher: 'Graduate School of Culture Technology, KAIST' - title: Musical Poi (mPoi) - url: http://www.nime.org/proceedings/2013/nime2013_254.pdf + title: note~ for Max --- An extension for Max/MSP for Media Arts & music + url: http://www.nime.org/proceedings/2013/nime2013_57.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Oda2013 - abstract: 'The Internet allows musicians and other artists to collaborate remotely.However, - network latency presents a fundamental challenge for remotecollaborators who need - to coordinate and respond to each other''s performancein real time. In this paper, - we investigate the viability of predictingpercussion hits before they have occurred, - so that information about thepredicted drum hit can be sent over a network, and - the sound can be synthesizedat a receiver''s location at approximately the same - moment the hit occurs atthe sender''s location. Such a system would allow two - percussionists to playin perfect synchrony despite the delays caused by computer - networks. Toinvestigate the feasibility of such an approach, we record vibraphone - malletstrikes with a high-speed camera and track the mallet head position. We - showthat 30 ms before the strike occurs, it is possible to predict strike time - andvelocity with acceptable accuracy. Our method fits a second-order polynomial - tothe data to produce a strike time prediction that is within the bounds ofperceptual - synchrony, and a velocity estimate that will enable the soundpressure level of - the synthesized strike to be accurate within 3 dB.' + ID: Han2013 + abstract: 'This paper proposes a musical performance feedback system based on real-time + audio-score alignment for musical instrument education of beginner musicians. + In the proposed system, we do not make use of symbolic data such as MIDI, but + acquire a real-time audio input from on-board microphone of smartphone. Then, + the system finds onset and pitch of the note from the signal, to align this information + with the ground truth musical score. Real-time alignment allows the system to + evaluate whether the user played the correct note or not, regardless of its timing, + which enables user to play at their own speed, as playing same tempo with original + musical score is problematic for beginners. As an output of evaluation, the system + notifies the user about which part they are currently performing, and which note + were played incorrectly.' address: 'Daejeon, Republic of Korea' - author: Reid Oda and Adam Finkelstein and Rebecca Fiebrink - bibtex: "@inproceedings{Oda2013,\n abstract = {The Internet allows musicians and\ - \ other artists to collaborate remotely.However, network latency presents a fundamental\ - \ challenge for remotecollaborators who need to coordinate and respond to each\ - \ other's performancein real time. In this paper, we investigate the viability\ - \ of predictingpercussion hits before they have occurred, so that information\ - \ about thepredicted drum hit can be sent over a network, and the sound can be\ - \ synthesizedat a receiver's location at approximately the same moment the hit\ - \ occurs atthe sender's location. Such a system would allow two percussionists\ - \ to playin perfect synchrony despite the delays caused by computer networks.\ - \ Toinvestigate the feasibility of such an approach, we record vibraphone malletstrikes\ - \ with a high-speed camera and track the mallet head position. We showthat 30\ - \ ms before the strike occurs, it is possible to predict strike time andvelocity\ - \ with acceptable accuracy. Our method fits a second-order polynomial tothe data\ - \ to produce a strike time prediction that is within the bounds ofperceptual synchrony,\ - \ and a velocity estimate that will enable the soundpressure level of the synthesized\ - \ strike to be accurate within 3 dB.},\n address = {Daejeon, Republic of Korea},\n\ - \ author = {Reid Oda and Adam Finkelstein and Rebecca Fiebrink},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178624},\n issn = {2220-4806},\n keywords = {Networked\ - \ performance, prediction, computer vision},\n month = {May},\n pages = {94--97},\n\ - \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {Towards\ - \ Note-Level Prediction for Networked Music Performance},\n url = {http://www.nime.org/proceedings/2013/nime2013_258.pdf},\n\ - \ year = {2013}\n}\n" + author: Yoonchang Han and Sejun Kwon and Kibeom Lee and Kyogu Lee + bibtex: "@inproceedings{Han2013,\n abstract = {This paper proposes a musical performance\ + \ feedback system based on real-time audio-score alignment for musical instrument\ + \ education of beginner musicians. In the proposed system, we do not make use\ + \ of symbolic data such as MIDI, but acquire a real-time audio input from on-board\ + \ microphone of smartphone. Then, the system finds onset and pitch of the note\ + \ from the signal, to align this information with the ground truth musical score.\ + \ Real-time alignment allows the system to evaluate whether the user played the\ + \ correct note or not, regardless of its timing, which enables user to play at\ + \ their own speed, as playing same tempo with original musical score is problematic\ + \ for beginners. As an output of evaluation, the system notifies the user about\ + \ which part they are currently performing, and which note were played incorrectly.},\n\ + \ address = {Daejeon, Republic of Korea},\n author = {Yoonchang Han and Sejun\ + \ Kwon and Kibeom Lee and Kyogu Lee},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178546},\n\ + \ issn = {2220-4806},\n keywords = {Music performance analysis, Music education,\ + \ Real-time score following},\n month = {May},\n pages = {120--121},\n publisher\ + \ = {Graduate School of Culture Technology, KAIST},\n title = {A Musical Performance\ + \ Evaluation System for Beginner Musician based on Real-time Score Following},\n\ + \ url = {http://www.nime.org/proceedings/2013/nime2013_60.pdf},\n year = {2013}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178624 + doi: 10.5281/zenodo.1178546 issn: 2220-4806 - keywords: 'Networked performance, prediction, computer vision' + keywords: 'Music performance analysis, Music education, Real-time score following' month: May - pages: 94--97 + pages: 120--121 publisher: 'Graduate School of Culture Technology, KAIST' - title: Towards Note-Level Prediction for Networked Music Performance - url: http://www.nime.org/proceedings/2013/nime2013_258.pdf + title: A Musical Performance Evaluation System for Beginner Musician based on Real-time + Score Following + url: http://www.nime.org/proceedings/2013/nime2013_60.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Jenkins2013 - abstract: 'This paper presents a minimally-invasive, wireless optical sensorsystem - for use with any conventional piston valve acoustic trumpet. Itis designed to - be easy to install and remove by any trumpeter. Ourgoal is to offer the extended - control afforded by hyperinstrumentswithout the hard to reverse or irreversible - invasive modificationsthat are typically used for adding digital sensing capabilities. - Weutilize optical sensors to track the continuous position displacementvalues - of the three trumpet valves. These values are trasmittedwirelessly and can be - used by an external controller. The hardware hasbeen designed to be reconfigurable - by having the housing 3D printed sothat the dimensions can be adjusted for any - particular trumpetmodel. The result is a low cost, low power, easily replicable - sensorsolution that offers any trumpeter the ability to augment their ownexisting - trumpet without compromising the instrument''s structure orplaying technique. - The extended digital control afforded by our systemis interweaved with the natural - playing gestures of an acoustictrumpet. We believe that this seemless integration - is critical forenabling effective and musical human computer interaction.Keywords: - hyperinstrument, trumpet, minimally-invasive, gesture sensing,wireless, I2C' + ID: Hindle2013 + abstract: 'Audience participation in computer music has long been limited byresources + such as sensor technology or the material goods necessary toshare such an instrument. + A recent paradigm is to take advantageof the incredible popularity of the smart-phone, + a pocket sizedcomputer, and other mobile devices, to provide the audience aninterface + into a computer music instrument. In this paper we discuss amethod of sharing + a computer music instrument''s interface with anaudience to allow them to interact + via their smartphone. We propose amethod that is relatively cross-platform and + device-agnostic, yetstill allows for a rich user-interactive experience. By emulating + acaptive-portal or hotspot we reduce the adoptability issues and configurationproblems + facing performers and their audience. We share ourexperiences with this system, + as well as an implementation of thesystem itself.' address: 'Daejeon, Republic of Korea' - author: Leonardo Jenkins and Shawn Trail and George Tzanetakis and Peter Driessen - and Wyatt Page - bibtex: "@inproceedings{Jenkins2013,\n abstract = {This paper presents a minimally-invasive,\ - \ wireless optical sensorsystem for use with any conventional piston valve acoustic\ - \ trumpet. Itis designed to be easy to install and remove by any trumpeter. Ourgoal\ - \ is to offer the extended control afforded by hyperinstrumentswithout the hard\ - \ to reverse or irreversible invasive modificationsthat are typically used for\ - \ adding digital sensing capabilities. Weutilize optical sensors to track the\ - \ continuous position displacementvalues of the three trumpet valves. These values\ - \ are trasmittedwirelessly and can be used by an external controller. The hardware\ - \ hasbeen designed to be reconfigurable by having the housing 3D printed sothat\ - \ the dimensions can be adjusted for any particular trumpetmodel. The result is\ - \ a low cost, low power, easily replicable sensorsolution that offers any trumpeter\ - \ the ability to augment their ownexisting trumpet without compromising the instrument's\ - \ structure orplaying technique. The extended digital control afforded by our\ - \ systemis interweaved with the natural playing gestures of an acoustictrumpet.\ - \ We believe that this seemless integration is critical forenabling effective\ - \ and musical human computer interaction.Keywords: hyperinstrument, trumpet, minimally-invasive,\ - \ gesture sensing,wireless, I2C},\n address = {Daejeon, Republic of Korea},\n\ - \ author = {Leonardo Jenkins and Shawn Trail and George Tzanetakis and Peter Driessen\ - \ and Wyatt Page},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178562},\n\ - \ issn = {2220-4806},\n keywords = {hyperinstrument, trumpet, minimally-invasive,\ - \ gesture sensing, wireless, I2C},\n month = {May},\n pages = {352--357},\n publisher\ - \ = {Graduate School of Culture Technology, KAIST},\n title = {An Easily Removable,\ - \ wireless Optical Sensing System (EROSS) for the Trumpet},\n url = {http://www.nime.org/proceedings/2013/nime2013_261.pdf},\n\ + author: Abram Hindle + bibtex: "@inproceedings{Hindle2013,\n abstract = {Audience participation in computer\ + \ music has long been limited byresources such as sensor technology or the material\ + \ goods necessary toshare such an instrument. A recent paradigm is to take advantageof\ + \ the incredible popularity of the smart-phone, a pocket sizedcomputer, and other\ + \ mobile devices, to provide the audience aninterface into a computer music instrument.\ + \ In this paper we discuss amethod of sharing a computer music instrument's interface\ + \ with anaudience to allow them to interact via their smartphone. We propose amethod\ + \ that is relatively cross-platform and device-agnostic, yetstill allows for a\ + \ rich user-interactive experience. By emulating acaptive-portal or hotspot we\ + \ reduce the adoptability issues and configurationproblems facing performers and\ + \ their audience. We share ourexperiences with this system, as well as an implementation\ + \ of thesystem itself.},\n address = {Daejeon, Republic of Korea},\n author =\ + \ {Abram Hindle},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178550},\n\ + \ issn = {2220-4806},\n keywords = {Wifi, Smartphone, Audience Interaction, Adoption,\ + \ Captive Portal, Multi-User, Hotspot},\n month = {May},\n pages = {174--179},\n\ + \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {{SW}ARMED:\ + \ Captive Portals, Mobile Devices, and Audience Participation in Multi-User Music\ + \ Performance},\n url = {http://www.nime.org/proceedings/2013/nime2013_62.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178562 + doi: 10.5281/zenodo.1178550 issn: 2220-4806 - keywords: 'hyperinstrument, trumpet, minimally-invasive, gesture sensing, wireless, - I2C' + keywords: 'Wifi, Smartphone, Audience Interaction, Adoption, Captive Portal, Multi-User, + Hotspot' month: May - pages: 352--357 + pages: 174--179 publisher: 'Graduate School of Culture Technology, KAIST' - title: 'An Easily Removable, wireless Optical Sensing System (EROSS) for the Trumpet' - url: http://www.nime.org/proceedings/2013/nime2013_261.pdf + title: 'SWARMED: Captive Portals, Mobile Devices, and Audience Participation in + Multi-User Music Performance' + url: http://www.nime.org/proceedings/2013/nime2013_62.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Freed2013a - abstract: This paper positively addresses the problem that most NIME devices are - ephemeralasting long enough to signal academic and technical prowess but rarely - longerthan a few musical performances. We offer a case study that shows thatlongevity - of use depends on stabilizing the interface and innovating theimplementation to - maintain the required performance of the controller for theplayer. + ID: Park2013 + abstract: 'Since Euler''s development of the Tonnetz in 1739, musicians, composers + and instrument designers have been fascinated with the concept of musicalisomorphism, + the idea that by arranging tones by their harmonic relationships rather than by + their physical properties, the common shapes of musical constructs will appear, + facilitating learning and new ways of exploring harmonic spaces. The construction + of isomorphic instruments, beyond limited square isomorphisms present in many + stringed instruments, has been a challenge in the past for two reasons: The first + problem, that of re-arranging note actuators from their sounding elements, has + been solved by digital instrument design. The second, more conceptual problem, + is that only a single isomorphism can be designed for any one instrument, requiring + the instrument designer (as well as composer and performer) to "lock in" to a + single isomorphism, or to have a different instrument for each isomorphism in + order to experiment. Musix (an iOS application) and Rainboard (a physical device) + are two new musical instruments built to overcome this and other limitations of + existing isomorphic instruments. Musix was developed to allow experimentation + with a wide variety of different isomorphic layouts, to assess the advantages + and disadvantages of each. The Rainboard consists of a hexagonal array of arcade + buttons embedded with RGB-LEDs, which are used to indicate characteristics of + the isomorphism currently in use on the Rainboard. The creation of these two instruments/experimentation + platforms allows for isomorphic layouts to be explored in waysthat are not possible + with existing instruments.' address: 'Daejeon, Republic of Korea' - author: Adrian Freed and John MacCallum and Sam Mansfield - bibtex: "@inproceedings{Freed2013a,\n abstract = {This paper positively addresses\ - \ the problem that most NIME devices are ephemeralasting long enough to signal\ - \ academic and technical prowess but rarely longerthan a few musical performances.\ - \ We offer a case study that shows thatlongevity of use depends on stabilizing\ - \ the interface and innovating theimplementation to maintain the required performance\ - \ of the controller for theplayer.},\n address = {Daejeon, Republic of Korea},\n\ - \ author = {Adrian Freed and John MacCallum and Sam Mansfield},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178524},\n issn = {2220-4806},\n keywords = {Fingerboard\ - \ controller, Best practices, Recrudescence, Organology, Unobtainium},\n month\ - \ = {May},\n pages = {441--445},\n publisher = {Graduate School of Culture Technology,\ - \ KAIST},\n title = {``Old'' is the New ``New'': a Fingerboard Case Study in Recrudescence\ - \ as a NIME Development Strategy},\n url = {http://www.nime.org/proceedings/2013/nime2013_265.pdf},\n\ + author: Brett Park and David Gerhard + bibtex: "@inproceedings{Park2013,\n abstract = {Since Euler's development of the\ + \ Tonnetz in 1739, musicians, composers and instrument designers have been fascinated\ + \ with the concept of musicalisomorphism, the idea that by arranging tones by\ + \ their harmonic relationships rather than by their physical properties, the common\ + \ shapes of musical constructs will appear, facilitating learning and new ways\ + \ of exploring harmonic spaces. The construction of isomorphic instruments, beyond\ + \ limited square isomorphisms present in many stringed instruments, has been a\ + \ challenge in the past for two reasons: The first problem, that of re-arranging\ + \ note actuators from their sounding elements, has been solved by digital instrument\ + \ design. The second, more conceptual problem, is that only a single isomorphism\ + \ can be designed for any one instrument, requiring the instrument designer (as\ + \ well as composer and performer) to \"lock in\" to a single isomorphism, or to\ + \ have a different instrument for each isomorphism in order to experiment. Musix\ + \ (an iOS application) and Rainboard (a physical device) are two new musical instruments\ + \ built to overcome this and other limitations of existing isomorphic instruments.\ + \ Musix was developed to allow experimentation with a wide variety of different\ + \ isomorphic layouts, to assess the advantages and disadvantages of each. The\ + \ Rainboard consists of a hexagonal array of arcade buttons embedded with RGB-LEDs,\ + \ which are used to indicate characteristics of the isomorphism currently in use\ + \ on the Rainboard. The creation of these two instruments/experimentation platforms\ + \ allows for isomorphic layouts to be explored in waysthat are not possible with\ + \ existing instruments.},\n address = {Daejeon, Republic of Korea},\n author =\ + \ {Brett Park and David Gerhard},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178632},\n\ + \ issn = {2220-4806},\n keywords = {isomorphic, mobile application, hexagon, keyboard},\n\ + \ month = {May},\n pages = {319--324},\n publisher = {Graduate School of Culture\ + \ Technology, KAIST},\n title = {Rainboard and Musix: Building dynamic isomorphic\ + \ interfaces},\n url = {http://www.nime.org/proceedings/2013/nime2013_65.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178524 + doi: 10.5281/zenodo.1178632 issn: 2220-4806 - keywords: 'Fingerboard controller, Best practices, Recrudescence, Organology, Unobtainium' + keywords: 'isomorphic, mobile application, hexagon, keyboard' month: May - pages: 441--445 + pages: 319--324 publisher: 'Graduate School of Culture Technology, KAIST' - title: '``Old'''' is the New ``New'''': a Fingerboard Case Study in Recrudescence - as a NIME Development Strategy' - url: http://www.nime.org/proceedings/2013/nime2013_265.pdf + title: 'Rainboard and Musix: Building dynamic isomorphic interfaces' + url: http://www.nime.org/proceedings/2013/nime2013_65.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Freed2013 - abstract: 'We describe ``o.expr'''' an expression language for dynamic, object- - and agent-oriented computation of gesture signal processing workflows using OSC - bundles. We illustrate the use of o.expr for a range of gesture processingtasks - showing how stateless programming and homoiconicity simplify applications development - and provide support for heterogeneous computational networks.' + ID: ElShimy2013 + abstract: 'For a number of years, musicians in different locations have been able + toperform with one another over a network as though present on the same stage.However, + rather than attempt to re-create an environment for Network MusicPerformance (NMP) + that mimics co-present performance as closely as possible, wepropose focusing + on providing musicians with additional controls that can helpincrease the level + of interaction between them. To this end, we have developeda reactive environment + for distributed performance that provides participantsdynamic, real-time control + over several aspects of their performance, enablingthem to change volume levels + and experience exaggerated stereo panning. Inaddition, our reactive environment + reinforces a feeling of a ``shared space'''' between musicians. It differs most + notably from standard ventures into thedesign of novel musical interfaces and + installations in its reliance onuser-centric methodologies borrowed from the field + of Human-ComputerInteraction (HCI). Not only does this research enable us to closely + examine thecommunicative aspects of performance, it also allows us to explore + newinterpretations of the network as a performance space. This paper describes + themotivation and background behind our project, the work that has been undertakentowards + its realization and the future steps that have yet to be explored.' address: 'Daejeon, Republic of Korea' - author: Adrian Freed and John MacCallum and David Wessel - bibtex: "@inproceedings{Freed2013,\n abstract = {We describe ``o.expr'' an expression\ - \ language for dynamic, object- and agent-oriented computation of gesture signal\ - \ processing workflows using OSC bundles. We illustrate the use of o.expr for\ - \ a range of gesture processingtasks showing how stateless programming and homoiconicity\ - \ simplify applications development and provide support for heterogeneous computational\ - \ networks.},\n address = {Daejeon, Republic of Korea},\n author = {Adrian Freed\ - \ and John MacCallum and David Wessel},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178526},\n\ - \ issn = {2220-4806},\n keywords = {Gesture Signal Processing, Open Sound Control,\ - \ Functional Programming, Homoiconicity, Process Migration.},\n month = {May},\n\ - \ pages = {347--351},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ - \ title = {Agile Interface Development using OSC Expressions and Process Migration},\n\ - \ url = {http://www.nime.org/proceedings/2013/nime2013_266.pdf},\n year = {2013}\n\ + author: Dalia El-Shimy and Jeremy R. Cooperstock + bibtex: "@inproceedings{ElShimy2013,\n abstract = {For a number of years, musicians\ + \ in different locations have been able toperform with one another over a network\ + \ as though present on the same stage.However, rather than attempt to re-create\ + \ an environment for Network MusicPerformance (NMP) that mimics co-present performance\ + \ as closely as possible, wepropose focusing on providing musicians with additional\ + \ controls that can helpincrease the level of interaction between them. To this\ + \ end, we have developeda reactive environment for distributed performance that\ + \ provides participantsdynamic, real-time control over several aspects of their\ + \ performance, enablingthem to change volume levels and experience exaggerated\ + \ stereo panning. Inaddition, our reactive environment reinforces a feeling of\ + \ a ``shared space'' between musicians. It differs most notably from standard\ + \ ventures into thedesign of novel musical interfaces and installations in its\ + \ reliance onuser-centric methodologies borrowed from the field of Human-ComputerInteraction\ + \ (HCI). Not only does this research enable us to closely examine thecommunicative\ + \ aspects of performance, it also allows us to explore newinterpretations of the\ + \ network as a performance space. This paper describes themotivation and background\ + \ behind our project, the work that has been undertakentowards its realization\ + \ and the future steps that have yet to be explored.},\n address = {Daejeon, Republic\ + \ of Korea},\n author = {Dalia El-Shimy and Jeremy R. Cooperstock},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1178506},\n issn = {2220-4806},\n month\ + \ = {May},\n pages = {158--163},\n publisher = {Graduate School of Culture Technology,\ + \ KAIST},\n title = {Reactive Environment for Network Music Performance},\n url\ + \ = {http://www.nime.org/proceedings/2013/nime2013_66.pdf},\n year = {2013}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178526 + doi: 10.5281/zenodo.1178506 issn: 2220-4806 - keywords: 'Gesture Signal Processing, Open Sound Control, Functional Programming, - Homoiconicity, Process Migration.' month: May - pages: 347--351 + pages: 158--163 publisher: 'Graduate School of Culture Technology, KAIST' - title: Agile Interface Development using OSC Expressions and Process Migration - url: http://www.nime.org/proceedings/2013/nime2013_266.pdf + title: Reactive Environment for Network Music Performance + url: http://www.nime.org/proceedings/2013/nime2013_66.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Hamilton2013 - abstract: 'With a nod towards digital puppetry and game-based film genres such asmachinima, - recent additions to UDKOSC of- fer an Open Sound Control (OSC)control layer for - external control over both third-person ''''pawn'''' entitiesand camera controllers - in fully rendered game-space. Real-time OSC input,driven by algorithmic process - or parsed from a human-readable timed scriptingsyntax allows users to shape choreographies - of gesture, in this case actormotion and action, as well as an audiences view - into the game-spaceenvironment. As UDKOSC outputs real-time coordinate and action - data generatedby UDK pawns and players with OSC, individual as well as aggregate - virtualactor gesture and motion can be leveraged as a driver for both creative - andprocedural/adaptive gaming music and audio concerns.' + ID: Johnson2013 + abstract: 'This paper presents a new technique for interface-driven diffusion performance. + Details outlining the development of a new tabletop surface-based performance + interface, named tactile.space, are discussed. User interface and amplitude panning + processes employed in the creation of tactile.space are focused upon,and are followed + by a user study-based evaluation of the interface. It is hoped that the techniques + described in this paper afford performers and composers an enhanced level of creative + expression in the diffusion performance practice. This paper introduces and evaluates + tactile.space, a multi-touch performance interface for diffusion built on the + BrickTable. It describes how tactile.space implements Vector Base Amplitude Panning + to achieve real-time source positioning. The final section of this paper presents + the findings of a userstudy that was conducted by those who performed with the + interface, evaluating the interface as a performance tool with a focus on the + increased creative expression the interface affords, and directly comparing it + to the traditional diffusion user interface.' address: 'Daejeon, Republic of Korea' - author: Rob Hamilton - bibtex: "@inproceedings{Hamilton2013,\n abstract = {With a nod towards digital puppetry\ - \ and game-based film genres such asmachinima, recent additions to UDKOSC of-\ - \ fer an Open Sound Control (OSC)control layer for external control over both\ - \ third-person ''pawn'' entitiesand camera controllers in fully rendered game-space.\ - \ Real-time OSC input,driven by algorithmic process or parsed from a human-readable\ - \ timed scriptingsyntax allows users to shape choreographies of gesture, in this\ - \ case actormotion and action, as well as an audiences view into the game-spaceenvironment.\ - \ As UDKOSC outputs real-time coordinate and action data generatedby UDK pawns\ - \ and players with OSC, individual as well as aggregate virtualactor gesture and\ - \ motion can be leveraged as a driver for both creative andprocedural/adaptive\ - \ gaming music and audio concerns.},\n address = {Daejeon, Republic of Korea},\n\ - \ author = {Rob Hamilton},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178544},\n\ - \ issn = {2220-4806},\n keywords = {procedural music, procedural audio, interactive\ - \ sonification, game music, Open Sound Control},\n month = {May},\n pages = {446--449},\n\ - \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {Sonifying\ - \ Game-Space Choreographies With UDKOSC},\n url = {http://www.nime.org/proceedings/2013/nime2013_268.pdf},\n\ + author: Bridget Johnson and Ajay Kapur + bibtex: "@inproceedings{Johnson2013,\n abstract = {This paper presents a new technique\ + \ for interface-driven diffusion performance. Details outlining the development\ + \ of a new tabletop surface-based performance interface, named tactile.space,\ + \ are discussed. User interface and amplitude panning processes employed in the\ + \ creation of tactile.space are focused upon,and are followed by a user study-based\ + \ evaluation of the interface. It is hoped that the techniques described in this\ + \ paper afford performers and composers an enhanced level of creative expression\ + \ in the diffusion performance practice. This paper introduces and evaluates tactile.space,\ + \ a multi-touch performance interface for diffusion built on the BrickTable. It\ + \ describes how tactile.space implements Vector Base Amplitude Panning to achieve\ + \ real-time source positioning. The final section of this paper presents the findings\ + \ of a userstudy that was conducted by those who performed with the interface,\ + \ evaluating the interface as a performance tool with a focus on the increased\ + \ creative expression the interface affords, and directly comparing it to the\ + \ traditional diffusion user interface.},\n address = {Daejeon, Republic of Korea},\n\ + \ author = {Bridget Johnson and Ajay Kapur},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1178570},\n issn = {2220-4806},\n keywords = {Multi touch, diffusion,\ + \ VBAP, tabletop surface},\n month = {May},\n pages = {213--216},\n publisher\ + \ = {Graduate School of Culture Technology, KAIST},\n title = {MULTI-TOUCH INTERFACES\ + \ FOR PHANTOM SOURCE POSITIONING IN LIVE SOUND DIFFUSION},\n url = {http://www.nime.org/proceedings/2013/nime2013_75.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178544 + doi: 10.5281/zenodo.1178570 issn: 2220-4806 - keywords: 'procedural music, procedural audio, interactive sonification, game music, - Open Sound Control' + keywords: 'Multi touch, diffusion, VBAP, tabletop surface' month: May - pages: 446--449 + pages: 213--216 publisher: 'Graduate School of Culture Technology, KAIST' - title: Sonifying Game-Space Choreographies With UDKOSC - url: http://www.nime.org/proceedings/2013/nime2013_268.pdf + title: MULTI-TOUCH INTERFACES FOR PHANTOM SOURCE POSITIONING IN LIVE SOUND DIFFUSION + url: http://www.nime.org/proceedings/2013/nime2013_75.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: John2013 - abstract: 'This paper reviews the mobile music projects that have been presented - at NIMEin the past ten years in order to assess whether the changes in technology - haveaffected the activities of mobile music research. An overview of mobile musicprojects - is presented using the categories that describe the main activities:projects that - explore the influence and make use of location; applications thatshare audio or - promote collaborative composition; interaction using wearabledevices; the use - of mobile phones as performance devices; projects that exploreHCI design issues. - The relative activity between different types of activity isassessed in order - to identify trends. The classification according totechnological, social or geographic - showed an overwhelming bias to thetechnological, followed by social investigations. - An alternative classificationof survey product, or artifact reveals an increase - in the number of productsdescribed with a corresponding decline in the number - of surveys and artisticprojects. The increase in technical papers appears to be - due to an enthusiasmto make use of increased capability of mobile phones, although - there are signsthat the initial interest has already peaked, and researchers are - againinterested to explore technologies and artistic expression beyond existingmobile - phones.' + ID: Lui2013 + abstract: 'Music is expressive and hard to be described by words. Learning music + istherefore not a straightforward task especially for vocal music such as humanbeatboxing. + People usually learn beatboxing in the traditional way of imitatingaudio sample + without steps and instructions. Spectrogram contains a lot ofinformation about + audio, but it is too complicated to be understood inreal-time. Reinforcement learning + is a psychological method, which makes use ofreward and/or punishment as stimulus + to train the decision-making process ofhuman. We propose a novel music learning + approach based on the reinforcementlearning method, which makes use of compact + and easy-to-read spectruminformation as visual clue to assist human beatboxing + learning on smartphone.Experimental result shows that the visual information is + easy to understand inreal-time, which improves the effectiveness of beatboxing + self-learning.' address: 'Daejeon, Republic of Korea' - author: David John - bibtex: "@inproceedings{John2013,\n abstract = {This paper reviews the mobile music\ - \ projects that have been presented at NIMEin the past ten years in order to assess\ - \ whether the changes in technology haveaffected the activities of mobile music\ - \ research. An overview of mobile musicprojects is presented using the categories\ - \ that describe the main activities:projects that explore the influence and make\ - \ use of location; applications thatshare audio or promote collaborative composition;\ - \ interaction using wearabledevices; the use of mobile phones as performance devices;\ - \ projects that exploreHCI design issues. The relative activity between different\ - \ types of activity isassessed in order to identify trends. The classification\ - \ according totechnological, social or geographic showed an overwhelming bias\ - \ to thetechnological, followed by social investigations. An alternative classificationof\ - \ survey product, or artifact reveals an increase in the number of productsdescribed\ - \ with a corresponding decline in the number of surveys and artisticprojects.\ - \ The increase in technical papers appears to be due to an enthusiasmto make use\ - \ of increased capability of mobile phones, although there are signsthat the initial\ - \ interest has already peaked, and researchers are againinterested to explore\ - \ technologies and artistic expression beyond existingmobile phones.},\n address\ - \ = {Daejeon, Republic of Korea},\n author = {David John},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178568},\n issn = {2220-4806},\n keywords = {Mobile Music,\ - \ interactive music, proximity sensing, wearable devices, mobile phone performance,\ - \ interaction design},\n month = {May},\n pages = {301--306},\n publisher = {Graduate\ - \ School of Culture Technology, KAIST},\n title = {Updating the Classifications\ - \ of Mobile Music Projects},\n url = {http://www.nime.org/proceedings/2013/nime2013_273.pdf},\n\ - \ year = {2013}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1178568 - issn: 2220-4806 - keywords: 'Mobile Music, interactive music, proximity sensing, wearable devices, - mobile phone performance, interaction design' - month: May - pages: 301--306 - publisher: 'Graduate School of Culture Technology, KAIST' - title: Updating the Classifications of Mobile Music Projects - url: http://www.nime.org/proceedings/2013/nime2013_273.pdf - year: 2013 - - -- ENTRYTYPE: inproceedings - ID: Walther2013 - abstract: 'Although multi-touch user interfaces have become a widespread form of - humancomputer interaction in many technical areas, they haven''t found their way - intolive performances of musicians and keyboarders yet. In this paper, we present - anovel multi-touch interface method aimed at professional keyboard players. Themethod, - which is inspired by computer trackpads, allows controlling up to tencontinuous - parameters of a keyboard with one hand, without requiring the userto look at the - touch area --- a significant improvement over traditional keyboardinput controls. - We discuss optimizations needed to make our interface reliable,and show in an - evaluation with four keyboarders of different skill level thatthis method is both - intuitive and powerful, and allows users to more quicklyalter the sound of their - keyboard than they could with current input solutions.' - address: 'Daejeon, Republic of Korea' - author: Thomas Walther and Damir Ismailović and Bernd Brügge - bibtex: "@inproceedings{Walther2013,\n abstract = {Although multi-touch user interfaces\ - \ have become a widespread form of humancomputer interaction in many technical\ - \ areas, they haven't found their way intolive performances of musicians and keyboarders\ - \ yet. In this paper, we present anovel multi-touch interface method aimed at\ - \ professional keyboard players. Themethod, which is inspired by computer trackpads,\ - \ allows controlling up to tencontinuous parameters of a keyboard with one hand,\ - \ without requiring the userto look at the touch area --- a significant improvement\ - \ over traditional keyboardinput controls. We discuss optimizations needed to\ - \ make our interface reliable,and show in an evaluation with four keyboarders\ - \ of different skill level thatthis method is both intuitive and powerful, and\ - \ allows users to more quicklyalter the sound of their keyboard than they could\ - \ with current input solutions.},\n address = {Daejeon, Republic of Korea},\n\ - \ author = {Thomas Walther and Damir Ismailovi{\\'c} and Bernd Br{\\''u}gge},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178684},\n issn = {2220-4806},\n\ - \ keywords = {multi-touch, mobile, keyboard, interface},\n month = {May},\n pages\ - \ = {98--101},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ - \ title = {Rocking the Keys with a Multi-Touch Interface},\n url = {http://www.nime.org/proceedings/2013/nime2013_275.pdf},\n\ + author: Simon Lui + bibtex: "@inproceedings{Lui2013,\n abstract = {Music is expressive and hard to be\ + \ described by words. Learning music istherefore not a straightforward task especially\ + \ for vocal music such as humanbeatboxing. People usually learn beatboxing in\ + \ the traditional way of imitatingaudio sample without steps and instructions.\ + \ Spectrogram contains a lot ofinformation about audio, but it is too complicated\ + \ to be understood inreal-time. Reinforcement learning is a psychological method,\ + \ which makes use ofreward and/or punishment as stimulus to train the decision-making\ + \ process ofhuman. We propose a novel music learning approach based on the reinforcementlearning\ + \ method, which makes use of compact and easy-to-read spectruminformation as visual\ + \ clue to assist human beatboxing learning on smartphone.Experimental result shows\ + \ that the visual information is easy to understand inreal-time, which improves\ + \ the effectiveness of beatboxing self-learning.},\n address = {Daejeon, Republic\ + \ of Korea},\n author = {Simon Lui},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178600},\n\ + \ issn = {2220-4806},\n keywords = {Audio analysis, music learning tool, reinforcement\ + \ learning, smartphone app, audio information retrieval.},\n month = {May},\n\ + \ pages = {25--28},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ + \ title = {A Compact Spectrum-Assisted Human Beatboxing Reinforcement Learning\ + \ Tool On Smartphone},\n url = {http://www.nime.org/proceedings/2013/nime2013_79.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178684 + doi: 10.5281/zenodo.1178600 issn: 2220-4806 - keywords: 'multi-touch, mobile, keyboard, interface' + keywords: 'Audio analysis, music learning tool, reinforcement learning, smartphone + app, audio information retrieval.' month: May - pages: 98--101 + pages: 25--28 publisher: 'Graduate School of Culture Technology, KAIST' - title: Rocking the Keys with a Multi-Touch Interface - url: http://www.nime.org/proceedings/2013/nime2013_275.pdf + title: A Compact Spectrum-Assisted Human Beatboxing Reinforcement Learning Tool + On Smartphone + url: http://www.nime.org/proceedings/2013/nime2013_79.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Berdahl2013 - abstract: 'Satellite CCRMA is a platform for making embedded musical instruments - andembedded installations. The project aims to help prototypes live longer byproviding - a complete prototyping platform in a single, small, and stand-aloneembedded form - factor. A set of scripts makes it easier for artists andbeginning technical students - to access powerful features, while advanced usersenjoy the flexibility of the - open-source software and open-source hardwareplatform.This paper focuses primarily - on networking capabilities of Satellite CCRMA andnew software for enabling interactive - control of the hardware-acceleratedgraphical output. In addition, some results - are presented from robustness testsalongside specific advice and software support - for increasing the lifespan ofthe flash memory.' + ID: Lo2013 + abstract: 'Mobile DJ is a music-listening system that allows multiple users to interactand + collaboratively contribute to a single song over a social network. Activelistening + through a tangible interface facilitates users to manipulate musicaleffects, such + as incorporating chords or ``scratching'''' the record. Acommunication and interaction + server further enables multiple users to connectover the Internet and collaborate + and interact through their music. User testsindicate that the device is successful + at facilitating user immersion into theactive listening experience, and that users + enjoy the added sensory input aswell as the novel way of interacting with the + music and each other.' address: 'Daejeon, Republic of Korea' - author: Edgar Berdahl and Spencer Salazar and Myles Borins - bibtex: "@inproceedings{Berdahl2013,\n abstract = {Satellite CCRMA is a platform\ - \ for making embedded musical instruments andembedded installations. The project\ - \ aims to help prototypes live longer byproviding a complete prototyping platform\ - \ in a single, small, and stand-aloneembedded form factor. A set of scripts makes\ - \ it easier for artists andbeginning technical students to access powerful features,\ - \ while advanced usersenjoy the flexibility of the open-source software and open-source\ - \ hardwareplatform.This paper focuses primarily on networking capabilities of\ - \ Satellite CCRMA andnew software for enabling interactive control of the hardware-acceleratedgraphical\ - \ output. In addition, some results are presented from robustness testsalongside\ - \ specific advice and software support for increasing the lifespan ofthe flash\ - \ memory.},\n address = {Daejeon, Republic of Korea},\n author = {Edgar Berdahl\ - \ and Spencer Salazar and Myles Borins},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178476},\n\ - \ issn = {2220-4806},\n keywords = {Satellite CCRMA, embedded musical instruments,\ - \ embedded installations, Node.js, Interface.js, hardware-accelerated graphics,\ - \ OpenGLES, SimpleGraphicsOSC, union file system, write endurance},\n month =\ - \ {May},\n pages = {325--330},\n publisher = {Graduate School of Culture Technology,\ - \ KAIST},\n title = {Embedded Networking and Hardware-Accelerated Graphics with\ - \ Satellite CCRMA},\n url = {http://www.nime.org/proceedings/2013/nime2013_277.pdf},\n\ + author: Kenneth W.K. Lo and Chi Kin Lau and Michael Xuelin Huang and Wai Wa Tang + and Grace Ngai and Stephen C.F. Chan + bibtex: "@inproceedings{Lo2013,\n abstract = {Mobile DJ is a music-listening system\ + \ that allows multiple users to interactand collaboratively contribute to a single\ + \ song over a social network. Activelistening through a tangible interface facilitates\ + \ users to manipulate musicaleffects, such as incorporating chords or ``scratching''\ + \ the record. Acommunication and interaction server further enables multiple users\ + \ to connectover the Internet and collaborate and interact through their music.\ + \ User testsindicate that the device is successful at facilitating user immersion\ + \ into theactive listening experience, and that users enjoy the added sensory\ + \ input aswell as the novel way of interacting with the music and each other.},\n\ + \ address = {Daejeon, Republic of Korea},\n author = {Kenneth W.K. Lo and Chi\ + \ Kin Lau and Michael Xuelin Huang and Wai Wa Tang and Grace Ngai and Stephen\ + \ C.F. Chan},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178598},\n issn\ + \ = {2220-4806},\n keywords = {Mobile, music, interaction design, tangible user\ + \ interface},\n month = {May},\n pages = {217--222},\n publisher = {Graduate School\ + \ of Culture Technology, KAIST},\n title = {Mobile DJ: a Tangible, Mobile Platform\ + \ for Active and Collaborative Music Listening},\n url = {http://www.nime.org/proceedings/2013/nime2013_81.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178476 + doi: 10.5281/zenodo.1178598 issn: 2220-4806 - keywords: 'Satellite CCRMA, embedded musical instruments, embedded installations, - Node.js, Interface.js, hardware-accelerated graphics, OpenGLES, SimpleGraphicsOSC, - union file system, write endurance' + keywords: 'Mobile, music, interaction design, tangible user interface' month: May - pages: 325--330 + pages: 217--222 publisher: 'Graduate School of Culture Technology, KAIST' - title: Embedded Networking and Hardware-Accelerated Graphics with Satellite CCRMA - url: http://www.nime.org/proceedings/2013/nime2013_277.pdf + title: 'Mobile DJ: a Tangible, Mobile Platform for Active and Collaborative Music + Listening' + url: http://www.nime.org/proceedings/2013/nime2013_81.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Xiao2013 - abstract: 'The body channels rich layers of information when playing music, from - intricatemanipulations of the instrument to vivid personifications of expression. - Butwhen music is captured and replayed across distance and time, the performer''sbody - is too often trapped behind a small screen or absent entirely.This paper introduces - an interface to conjure the recorded performer bycombining the moving keys of - a player piano with life-sized projection of thepianist''s hands and upper body. - Inspired by reflections on a lacquered grandpiano, our interface evokes the sense - that the virtual pianist is playing thephysically moving keys.Through our interface, - we explore the question of how to viscerally simulate aperformer''s presence to - create immersive experiences. We discuss designchoices, outline a space of usage - scenarios and report reactions from users.' + ID: Caramiaux2013 + abstract: 'We present an overview of machine learning (ML) techniques and theirapplication + in interactive music and new digital instruments design. We firstgive to the non-specialist + reader an introduction to two ML tasks,classification and regression, that are + particularly relevant for gesturalinteraction. We then present a review of the + literature in current NIMEresearch that uses ML in musical gesture analysis and + gestural sound control.We describe the ways in which machine learning is useful + for creatingexpressive musical interaction, and in turn why live music performance + presentsa pertinent and challenging use case for machine learning.' address: 'Daejeon, Republic of Korea' - author: Xiao Xiao and Anna Pereira and Hiroshi Ishii - bibtex: "@inproceedings{Xiao2013,\n abstract = {The body channels rich layers of\ - \ information when playing music, from intricatemanipulations of the instrument\ - \ to vivid personifications of expression. Butwhen music is captured and replayed\ - \ across distance and time, the performer'sbody is too often trapped behind a\ - \ small screen or absent entirely.This paper introduces an interface to conjure\ - \ the recorded performer bycombining the moving keys of a player piano with life-sized\ - \ projection of thepianist's hands and upper body. Inspired by reflections on\ - \ a lacquered grandpiano, our interface evokes the sense that the virtual pianist\ - \ is playing thephysically moving keys.Through our interface, we explore the question\ - \ of how to viscerally simulate aperformer's presence to create immersive experiences.\ - \ We discuss designchoices, outline a space of usage scenarios and report reactions\ - \ from users.},\n address = {Daejeon, Republic of Korea},\n author = {Xiao Xiao\ - \ and Anna Pereira and Hiroshi Ishii},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178692},\n\ - \ issn = {2220-4806},\n keywords = {piano performance, musical expressivity, body\ - \ language, recorded music, player piano, augmented reality, embodiment},\n month\ - \ = {May},\n pages = {7--12},\n publisher = {Graduate School of Culture Technology,\ - \ KAIST},\n title = {Conjuring the Recorded Pianist: A New Medium to Experience\ - \ Musical Performance},\n url = {http://www.nime.org/proceedings/2013/nime2013_28.pdf},\n\ + author: Baptiste Caramiaux and Atau Tanaka + bibtex: "@inproceedings{Caramiaux2013,\n abstract = {We present an overview of machine\ + \ learning (ML) techniques and theirapplication in interactive music and new digital\ + \ instruments design. We firstgive to the non-specialist reader an introduction\ + \ to two ML tasks,classification and regression, that are particularly relevant\ + \ for gesturalinteraction. We then present a review of the literature in current\ + \ NIMEresearch that uses ML in musical gesture analysis and gestural sound control.We\ + \ describe the ways in which machine learning is useful for creatingexpressive\ + \ musical interaction, and in turn why live music performance presentsa pertinent\ + \ and challenging use case for machine learning.},\n address = {Daejeon, Republic\ + \ of Korea},\n author = {Baptiste Caramiaux and Atau Tanaka},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178490},\n issn = {2220-4806},\n keywords = {Machine\ + \ Learning, Data mining, Musical Expression, Musical Gestures, Analysis, Control,\ + \ Gesture, Sound},\n month = {May},\n pages = {513--518},\n publisher = {Graduate\ + \ School of Culture Technology, KAIST},\n title = {Machine Learning of Musical\ + \ Gestures},\n url = {http://www.nime.org/proceedings/2013/nime2013_84.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178692 + doi: 10.5281/zenodo.1178490 issn: 2220-4806 - keywords: 'piano performance, musical expressivity, body language, recorded music, - player piano, augmented reality, embodiment' + keywords: 'Machine Learning, Data mining, Musical Expression, Musical Gestures, + Analysis, Control, Gesture, Sound' month: May - pages: 7--12 + pages: 513--518 publisher: 'Graduate School of Culture Technology, KAIST' - title: 'Conjuring the Recorded Pianist: A New Medium to Experience Musical Performance' - url: http://www.nime.org/proceedings/2013/nime2013_28.pdf + title: Machine Learning of Musical Gestures + url: http://www.nime.org/proceedings/2013/nime2013_84.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Taylor2013 - abstract: 'What is the place for Internet Art within the paradigm of remote musicperformance? - In this paper, we discuss techniques for live audiovisualstorytelling through - the Web browsers of remote viewers. We focus on theincorporation of socket technology - to create a real-time link between performerand audience, enabling manipulation - of Web media directly within the eachaudience member''s browser. Finally, we describe - Plum Street, an onlinemultimedia performance, and suggest that by involving remote - performance,appropriating Web media such as Google Maps, social media, and Web - Audio intothe work, we can tell stories in a way that more accurately addresses - modernlife and holistically fulfills the Web browser''s capabilities as a contemporaryperformance - instrument.' + ID: Oh2013 + abstract: 'Significant progress in the domains of speech- and singing-synthesis + has enhanced communicative potential of machines. To make computers more vocallyexpressive, + however, we need a deeper understanding of how nonlinguistic social signals are + patterned and perceived. In this paper, we focus on laughter expressions: how + a phrase of vocalized notes that we call ''''laughter'''' may bemodeled and performed + to implicate nuanced meaning imbued in the acousticsignal. In designing our model, + we emphasize (1) using high-level descriptors as control parameters, (2) enabling + real-time performable laughter, and (3) prioritizing expressiveness over realism. + We present an interactive systemimplemented in ChucK that allows users to systematically + play with the musicalingredients of laughter. A crowd sourced study on the perception + of synthesized laughter showed that our model is capable of generating a range + of laughter types, suggesting an exciting potential for expressive laughter synthesis.' address: 'Daejeon, Republic of Korea' - author: Ben Taylor and Jesse Allison - bibtex: "@inproceedings{Taylor2013,\n abstract = {What is the place for Internet\ - \ Art within the paradigm of remote musicperformance? In this paper, we discuss\ - \ techniques for live audiovisualstorytelling through the Web browsers of remote\ - \ viewers. We focus on theincorporation of socket technology to create a real-time\ - \ link between performerand audience, enabling manipulation of Web media directly\ - \ within the eachaudience member's browser. Finally, we describe Plum Street,\ - \ an onlinemultimedia performance, and suggest that by involving remote performance,appropriating\ - \ Web media such as Google Maps, social media, and Web Audio intothe work, we\ - \ can tell stories in a way that more accurately addresses modernlife and holistically\ - \ fulfills the Web browser's capabilities as a contemporaryperformance instrument.},\n\ - \ address = {Daejeon, Republic of Korea},\n author = {Ben Taylor and Jesse Allison},\n\ + author: Jieun Oh and Ge Wang + bibtex: "@inproceedings{Oh2013,\n abstract = {Significant progress in the domains\ + \ of speech- and singing-synthesis has enhanced communicative potential of machines.\ + \ To make computers more vocallyexpressive, however, we need a deeper understanding\ + \ of how nonlinguistic social signals are patterned and perceived. In this paper,\ + \ we focus on laughter expressions: how a phrase of vocalized notes that we call\ + \ ''laughter'' may bemodeled and performed to implicate nuanced meaning imbued\ + \ in the acousticsignal. In designing our model, we emphasize (1) using high-level\ + \ descriptors as control parameters, (2) enabling real-time performable laughter,\ + \ and (3) prioritizing expressiveness over realism. We present an interactive\ + \ systemimplemented in ChucK that allows users to systematically play with the\ + \ musicalingredients of laughter. A crowd sourced study on the perception of synthesized\ + \ laughter showed that our model is capable of generating a range of laughter\ + \ types, suggesting an exciting potential for expressive laughter synthesis.},\n\ + \ address = {Daejeon, Republic of Korea},\n author = {Jieun Oh and Ge Wang},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178672},\n issn = {2220-4806},\n\ - \ keywords = {Remote Performance, Network Music, Internet Art, Storytelling},\n\ - \ month = {May},\n pages = {477--478},\n publisher = {Graduate School of Culture\ - \ Technology, KAIST},\n title = {Plum St: Live Digital Storytelling with Remote\ - \ Browsers},\n url = {http://www.nime.org/proceedings/2013/nime2013_281.pdf},\n\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178626},\n issn = {2220-4806},\n\ + \ keywords = {laughter, vocalization, synthesis model, real-time controller, interface\ + \ for musical expression},\n month = {May},\n pages = {190--195},\n publisher\ + \ = {Graduate School of Culture Technology, KAIST},\n title = {LOLOL: Laugh Out\ + \ Loud On Laptop},\n url = {http://www.nime.org/proceedings/2013/nime2013_86.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178672 + doi: 10.5281/zenodo.1178626 issn: 2220-4806 - keywords: 'Remote Performance, Network Music, Internet Art, Storytelling' + keywords: 'laughter, vocalization, synthesis model, real-time controller, interface + for musical expression' month: May - pages: 477--478 + pages: 190--195 publisher: 'Graduate School of Culture Technology, KAIST' - title: 'Plum St: Live Digital Storytelling with Remote Browsers' - url: http://www.nime.org/proceedings/2013/nime2013_281.pdf + title: 'LOLOL: Laugh Out Loud On Laptop' + url: http://www.nime.org/proceedings/2013/nime2013_86.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Roberts2013a - abstract: 'Web technologies provide an incredible opportunity to present new musicalinterfaces - to new audiences. Applications written in JavaScript and designed torun in the - browser offer remarkable performance, mobile/desktop portability andlongevity - due to standardization. Our research examines the use and potentialof native web - technologies for musical expression. We introduce two librariestowards this end: - Gibberish.js, a heavily optimized audio DSP library, andInterface.js, a GUI toolkit - that works with mouse, touch and motion events.Together these libraries provide - a complete system for defining musicalinstruments that can be used in both desktop - and mobile browsers. Interface.jsalso enables control of remote synthesis applications - by including anapplication that translates the socket protocol used by browsers - into both MIDIand OSC messages.' + ID: Donnarumma2013 + abstract: 'We present the first combined use of the electromyogram (EMG) andmechanomyogram + (MMG), two biosignals that result from muscular activity, forinteractive music + applications. We exploit differences between these twosignals, as reported in + the biomedical literature, to create bi-modalsonification and sound synthesis + mappings that allow performers to distinguishthe two components in a single complex + arm gesture. We study non-expertplayers'' ability to articulate the different + modalities. Results show thatpurposely designed gestures and mapping techniques + enable novices to rapidlylearn to independently control the two biosignals.' address: 'Daejeon, Republic of Korea' - author: Charles Roberts and Graham Wakefield and Matthew Wright - bibtex: "@inproceedings{Roberts2013a,\n abstract = {Web technologies provide an\ - \ incredible opportunity to present new musicalinterfaces to new audiences. Applications\ - \ written in JavaScript and designed torun in the browser offer remarkable performance,\ - \ mobile/desktop portability andlongevity due to standardization. Our research\ - \ examines the use and potentialof native web technologies for musical expression.\ - \ We introduce two librariestowards this end: Gibberish.js, a heavily optimized\ - \ audio DSP library, andInterface.js, a GUI toolkit that works with mouse, touch\ - \ and motion events.Together these libraries provide a complete system for defining\ - \ musicalinstruments that can be used in both desktop and mobile browsers. Interface.jsalso\ - \ enables control of remote synthesis applications by including anapplication\ - \ that translates the socket protocol used by browsers into both MIDIand OSC messages.},\n\ - \ address = {Daejeon, Republic of Korea},\n author = {Charles Roberts and Graham\ - \ Wakefield and Matthew Wright},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178648},\n\ - \ issn = {2220-4806},\n keywords = {mobile devices, javascript, browser-based\ - \ NIMEs, web audio, websockets},\n month = {May},\n pages = {313--318},\n publisher\ - \ = {Graduate School of Culture Technology, KAIST},\n title = {The Web Browser\ - \ As Synthesizer And Interface},\n url = {http://www.nime.org/proceedings/2013/nime2013_282.pdf},\n\ + author: Marco Donnarumma and Baptiste Caramiaux and Atau Tanaka + bibtex: "@inproceedings{Donnarumma2013,\n abstract = {We present the first combined\ + \ use of the electromyogram (EMG) andmechanomyogram (MMG), two biosignals that\ + \ result from muscular activity, forinteractive music applications. We exploit\ + \ differences between these twosignals, as reported in the biomedical literature,\ + \ to create bi-modalsonification and sound synthesis mappings that allow performers\ + \ to distinguishthe two components in a single complex arm gesture. We study non-expertplayers'\ + \ ability to articulate the different modalities. Results show thatpurposely designed\ + \ gestures and mapping techniques enable novices to rapidlylearn to independently\ + \ control the two biosignals.},\n address = {Daejeon, Republic of Korea},\n author\ + \ = {Marco Donnarumma and Baptiste Caramiaux and Atau Tanaka},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178504},\n issn = {2220-4806},\n keywords = {NIME, sensorimotor\ + \ system, EMG, MMG, biosignal, multimodal, mapping},\n month = {May},\n pages\ + \ = {128--131},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ + \ title = {Muscular Interactions. Combining {EMG} and MMG sensing for musical\ + \ practice},\n url = {http://www.nime.org/proceedings/2013/nime2013_90.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178648 + doi: 10.5281/zenodo.1178504 issn: 2220-4806 - keywords: 'mobile devices, javascript, browser-based NIMEs, web audio, websockets' + keywords: 'NIME, sensorimotor system, EMG, MMG, biosignal, multimodal, mapping' month: May - pages: 313--318 + pages: 128--131 publisher: 'Graduate School of Culture Technology, KAIST' - title: The Web Browser As Synthesizer And Interface - url: http://www.nime.org/proceedings/2013/nime2013_282.pdf + title: Muscular Interactions. Combining EMG and MMG sensing for musical practice + url: http://www.nime.org/proceedings/2013/nime2013_90.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Grosshauser2013 - abstract: 'Several new technologies to capture motion, gesture and forces for musical - instrument players'' analyses have been developed in the last years. In research - and for augmented instruments one parameter is underrepresented so far. It is - finger position and pressure measurement, applied by the musician while playing - the musical instrument. In this paper we show a flexible linear-potentiometer - and forcesensitive-resistor (FSR) based solution for position, pressure and force - sensing between the contact point of the fingers and the musical instrument. A - flexible matrix printed circuit board (PCB) is fixed on a piano key. We further - introduce linear potentiometer based left hand finger position sensing for string - instruments, integrated into a violin and a guitar finger board. Several calibration - and measurement scenarios are shown. The violin sensor was evaluated with 13 music - students regarding playability and robustness of the system. Main focus was a - the integration of the sensors into these two traditional musical instruments - as unobtrusively as possible to keep natural haptic playing sensation. The musicians - playing the violin in different performance situations stated good playability - and no differences in the haptic sensation while playing. The piano sensor is - rated, due to interviews after testing it in a conventional keyboard quite unobtrusive, - too, but still evokes a different haptic sensation.' + ID: Honigman2013 + abstract: 'This paper describes a new framework for music creation using 3D audio + andvisual techniques. It describes the Third Room, which uses a Kinect to placeusers + in a virtual environment to interact with new instruments for musicalexpression. + Users can also interact with smart objects, including the Ember(modified mbira + digital interface) and the Fluid (a wireless six degrees offreedom and touch controller). + This project also includes new techniques for 3Daudio connected to a 3D virtual + space using multi-channel speakers anddistributed robotic instruments.' address: 'Daejeon, Republic of Korea' - author: Tobias Grosshauser and Gerhard Tröster - bibtex: "@inproceedings{Grosshauser2013,\n abstract = {Several new technologies\ - \ to capture motion, gesture and forces for musical instrument players' analyses\ - \ have been developed in the last years. In research and for augmented instruments\ - \ one parameter is underrepresented so far. It is finger position and pressure\ - \ measurement, applied by the musician while playing the musical instrument. In\ - \ this paper we show a flexible linear-potentiometer and forcesensitive-resistor\ - \ (FSR) based solution for position, pressure and force sensing between the contact\ - \ point of the fingers and the musical instrument. A flexible matrix printed circuit\ - \ board (PCB) is fixed on a piano key. We further introduce linear potentiometer\ - \ based left hand finger position sensing for string instruments, integrated into\ - \ a violin and a guitar finger board. Several calibration and measurement scenarios\ - \ are shown. The violin sensor was evaluated with 13 music students regarding\ - \ playability and robustness of the system. Main focus was a the integration of\ - \ the sensors into these two traditional musical instruments as unobtrusively\ - \ as possible to keep natural haptic playing sensation. The musicians playing\ - \ the violin in different performance situations stated good playability and no\ - \ differences in the haptic sensation while playing. The piano sensor is rated,\ - \ due to interviews after testing it in a conventional keyboard quite unobtrusive,\ - \ too, but still evokes a different haptic sensation.},\n address = {Daejeon,\ - \ Republic of Korea},\n author = {Tobias Grosshauser and Gerhard Tr{\\''o}ster},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178538},\n issn = {2220-4806},\n\ - \ keywords = {Sensor, Piano, Violin, Guitar, Position, Pressure, Keyboard},\n\ - \ month = {May},\n pages = {479--484},\n publisher = {Graduate School of Culture\ - \ Technology, KAIST},\n title = {Finger Position and Pressure Sensing Techniques\ - \ for String and Keyboard Instruments},\n url = {http://www.nime.org/proceedings/2013/nime2013_286.pdf},\n\ + author: Colin Honigman and Andrew Walton and Ajay Kapur + bibtex: "@inproceedings{Honigman2013,\n abstract = {This paper describes a new framework\ + \ for music creation using 3D audio andvisual techniques. It describes the Third\ + \ Room, which uses a Kinect to placeusers in a virtual environment to interact\ + \ with new instruments for musicalexpression. Users can also interact with smart\ + \ objects, including the Ember(modified mbira digital interface) and the Fluid\ + \ (a wireless six degrees offreedom and touch controller). This project also includes\ + \ new techniques for 3Daudio connected to a 3D virtual space using multi-channel\ + \ speakers anddistributed robotic instruments.},\n address = {Daejeon, Republic\ + \ of Korea},\n author = {Colin Honigman and Andrew Walton and Ajay Kapur},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1178556},\n issn = {2220-4806},\n keywords\ + \ = {Kinect Camera, Third Space, Interface, Virtual Reality, Natural Interaction,\ + \ Robotics, Arduino},\n month = {May},\n pages = {29--34},\n publisher = {Graduate\ + \ School of Culture Technology, KAIST},\n title = {The Third Room: A {3D} Virtual\ + \ Music Framework},\n url = {http://www.nime.org/proceedings/2013/nime2013_92.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178538 + doi: 10.5281/zenodo.1178556 issn: 2220-4806 - keywords: 'Sensor, Piano, Violin, Guitar, Position, Pressure, Keyboard' + keywords: 'Kinect Camera, Third Space, Interface, Virtual Reality, Natural Interaction, + Robotics, Arduino' month: May - pages: 479--484 + pages: 29--34 publisher: 'Graduate School of Culture Technology, KAIST' - title: Finger Position and Pressure Sensing Techniques for String and Keyboard Instruments - url: http://www.nime.org/proceedings/2013/nime2013_286.pdf + title: 'The Third Room: A 3D Virtual Music Framework' + url: http://www.nime.org/proceedings/2013/nime2013_92.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Allison2013 - abstract: 'Distributed performance systems present many challenges to the artist - inmanaging performance information, distribution and coordination of interface - tomany users, and cross platform support to provide a reasonable level ofinteraction - to the widest possible user base.Now that many features of HTML 5 are implemented, - powerful browser basedinterfaces can be utilized for distribution across a variety - of static andmobile devices. The author proposes leveraging the power of a web - applicationto handle distribution of user interfaces and passing interactions - via OSC toand from realtime audio/video processing software. Interfaces developed - in thisfashion can reach potential performers by distributing a unique user interfaceto - any device with a browser anywhere in the world.' + ID: Wolf2013 + abstract: 'As any computer user employs the Internet to accomplish everyday activities, + a flow of data packets moves across the network, forming their own patterns in + response to his or her actions. Artists and sound designers who are interested + in accessing that data to make music must currently possess low-level knowledge + of Internet protocols and spend signifi-cant effort working with low-level networking + code. We have created SonNet, a new software tool that lowers these practical + barriers to experimenting and composing with network data. SonNet executes packet-sniffng + and network connection state analysis automatically, and it includes an easy-touse + ChucK object that can be instantiated, customized, and queried from a user''s + own code. In this paper, we present the design and implementation of the SonNet + system, and we discuss a pilot evaluation of the system with computer music composers. + We also discuss compositional applications of SonNet and illustrate the use of + the system in an example composition.' address: 'Daejeon, Republic of Korea' - author: Jesse Allison and Yemin Oh and Benjamin Taylor - bibtex: "@inproceedings{Allison2013,\n abstract = {Distributed performance systems\ - \ present many challenges to the artist inmanaging performance information, distribution\ - \ and coordination of interface tomany users, and cross platform support to provide\ - \ a reasonable level ofinteraction to the widest possible user base.Now that many\ - \ features of HTML 5 are implemented, powerful browser basedinterfaces can be\ - \ utilized for distribution across a variety of static andmobile devices. The\ - \ author proposes leveraging the power of a web applicationto handle distribution\ - \ of user interfaces and passing interactions via OSC toand from realtime audio/video\ - \ processing software. Interfaces developed in thisfashion can reach potential\ - \ performers by distributing a unique user interfaceto any device with a browser\ - \ anywhere in the world.},\n address = {Daejeon, Republic of Korea},\n author\ - \ = {Jesse Allison and Yemin Oh and Benjamin Taylor},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178461},\n issn = {2220-4806},\n keywords = {NIME, distributed\ - \ performance systems, Ruby on Rails, collaborative performance, distributed instruments,\ - \ distributed interface, HTML5, browser based interface},\n month = {May},\n pages\ - \ = {1--6},\n publisher = {Graduate School of Culture Technology, KAIST},\n title\ - \ = {NEXUS: Collaborative Performance for the Masses, Handling Instrument Interface\ - \ Distribution through the Web},\n url = {http://www.nime.org/proceedings/2013/nime2013_287.pdf},\n\ + author: KatieAnna E Wolf and Rebecca Fiebrink + bibtex: "@inproceedings{Wolf2013,\n abstract = {As any computer user employs the\ + \ Internet to accomplish everyday activities, a flow of data packets moves across\ + \ the network, forming their own patterns in response to his or her actions. Artists\ + \ and sound designers who are interested in accessing that data to make music\ + \ must currently possess low-level knowledge of Internet protocols and spend signifi-cant\ + \ effort working with low-level networking code. We have created SonNet, a new\ + \ software tool that lowers these practical barriers to experimenting and composing\ + \ with network data. SonNet executes packet-sniffng and network connection state\ + \ analysis automatically, and it includes an easy-touse ChucK object that can\ + \ be instantiated, customized, and queried from a user's own code. In this paper,\ + \ we present the design and implementation of the SonNet system, and we discuss\ + \ a pilot evaluation of the system with computer music composers. We also discuss\ + \ compositional applications of SonNet and illustrate the use of the system in\ + \ an example composition.},\n address = {Daejeon, Republic of Korea},\n author\ + \ = {KatieAnna E Wolf and Rebecca Fiebrink},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1178690},\n issn = {2220-4806},\n keywords = {Sonification,\ + \ network data, compositional tools},\n month = {May},\n pages = {503--506},\n\ + \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {SonNet:\ + \ A Code Interface for Sonifying Computer Network Data},\n url = {http://www.nime.org/proceedings/2013/nime2013_94.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178461 + doi: 10.5281/zenodo.1178690 issn: 2220-4806 - keywords: 'NIME, distributed performance systems, Ruby on Rails, collaborative performance, - distributed instruments, distributed interface, HTML5, browser based interface' + keywords: 'Sonification, network data, compositional tools' month: May - pages: 1--6 + pages: 503--506 publisher: 'Graduate School of Culture Technology, KAIST' - title: 'NEXUS: Collaborative Performance for the Masses, Handling Instrument Interface - Distribution through the Web' - url: http://www.nime.org/proceedings/2013/nime2013_287.pdf + title: 'SonNet: A Code Interface for Sonifying Computer Network Data' + url: http://www.nime.org/proceedings/2013/nime2013_94.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Baldan2013 - abstract: 'This paper presents an audio-based tennis simulation game for mobile - devices, which uses motion input and non-verbal audio feedback as exclusive means - of interaction. Players have to listen carefully to the provided auditory clues, - like racquet hits and ball bounces, rhythmically synchronizing their movements - in order to keep the ball into play. The device can be swung freely and act as - a full-fledged motionbased controller, as the game does not rely at all on visual - feedback and the device display can thus be ignored. The game aims to be entertaining - but also effective for educational purposes, such as ear training or improvement - of the sense of timing, and enjoyable both by visually-impaired and sighted users.' + ID: Tahiroglu2013 + abstract: 'This paper introduces a novel collaborative environment (PESI) in whichperformers + are not only free to move and interact with each other but wheretheir social interactions + contribute to the sonic outcome. PESI system isdesigned for co-located collaboration + and provides embodied and spatialopportunities for musical exploration. To evaluate + PESI with skilled musicians,a user-test jam session was conducted. Musicians'' + comments indicate that thesystem facilitates group interaction finely to bring + up further intentions tomusical ideas. Results from our user-test jam session + indicate that, through some modificationof the ''in-space'' response to the improvisation, + and through more intuitiveinteractions with the ''on-body'' mobile instruments, + we could make thecollaborative music activity a more engaging and active experience. + Despitebeing only user-tested once with musicians, the group interview has raisedfruitful + discussions on the precise details of the system components.Furthermore, the paradigms + of musical interaction and social actions in groupactivities need to be questioned + when we seek design requirements for such acollaborative environment. We introduced + a system that we believe can open upnew ways of musical exploration in group music + activity with a number ofmusicians. The system brings up the affordances of accessible + technologieswhile creating opportunities for novel design applications to be explored. + Ourresearch proposes further development of the system, focusing on movementbehavior + in long-term interaction between performers. We plan to implement thisversion + and evaluate design and implementation with distinct skilled musicians.' address: 'Daejeon, Republic of Korea' - author: Stefano Baldan and Amalia De Götzen and Stefania Serafin - bibtex: "@inproceedings{Baldan2013,\n abstract = {This paper presents an audio-based\ - \ tennis simulation game for mobile devices, which uses motion input and non-verbal\ - \ audio feedback as exclusive means of interaction. Players have to listen carefully\ - \ to the provided auditory clues, like racquet hits and ball bounces, rhythmically\ - \ synchronizing their movements in order to keep the ball into play. The device\ - \ can be swung freely and act as a full-fledged motionbased controller, as the\ - \ game does not rely at all on visual feedback and the device display can thus\ - \ be ignored. The game aims to be entertaining but also effective for educational\ - \ purposes, such as ear training or improvement of the sense of timing, and enjoyable\ - \ both by visually-impaired and sighted users.},\n address = {Daejeon, Republic\ - \ of Korea},\n author = {Stefano Baldan and Amalia De G{\\''o}tzen and Stefania\ - \ Serafin},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178470},\n issn\ - \ = {2220-4806},\n keywords = {Audio game, mobile devices, sonic interaction design,\ - \ rhythmic interaction, motion-based},\n month = {May},\n pages = {200--201},\n\ - \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {Sonic\ - \ Tennis: a rhythmic interaction game for mobile devices},\n url = {http://www.nime.org/proceedings/2013/nime2013_288.pdf},\n\ + author: Koray Tahiroğlu and Nuno N. Correia and Miguel Espada + bibtex: "@inproceedings{Tahiroglu2013,\n abstract = {This paper introduces a novel\ + \ collaborative environment (PESI) in whichperformers are not only free to move\ + \ and interact with each other but wheretheir social interactions contribute to\ + \ the sonic outcome. PESI system isdesigned for co-located collaboration and provides\ + \ embodied and spatialopportunities for musical exploration. To evaluate PESI\ + \ with skilled musicians,a user-test jam session was conducted. Musicians' comments\ + \ indicate that thesystem facilitates group interaction finely to bring up further\ + \ intentions tomusical ideas. Results from our user-test jam session indicate\ + \ that, through some modificationof the 'in-space' response to the improvisation,\ + \ and through more intuitiveinteractions with the 'on-body' mobile instruments,\ + \ we could make thecollaborative music activity a more engaging and active experience.\ + \ Despitebeing only user-tested once with musicians, the group interview has raisedfruitful\ + \ discussions on the precise details of the system components.Furthermore, the\ + \ paradigms of musical interaction and social actions in groupactivities need\ + \ to be questioned when we seek design requirements for such acollaborative environment.\ + \ We introduced a system that we believe can open upnew ways of musical exploration\ + \ in group music activity with a number ofmusicians. The system brings up the\ + \ affordances of accessible technologieswhile creating opportunities for novel\ + \ design applications to be explored. Ourresearch proposes further development\ + \ of the system, focusing on movementbehavior in long-term interaction between\ + \ performers. We plan to implement thisversion and evaluate design and implementation\ + \ with distinct skilled musicians.},\n address = {Daejeon, Republic of Korea},\n\ + \ author = {Koray Tahiro{\\u{g}}lu and Nuno N. Correia and Miguel Espada},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1178666},\n issn = {2220-4806},\n keywords\ + \ = {Affordances, collaboration, social interaction, mobile music, extended system,\ + \ NIME},\n month = {May},\n pages = {35--40},\n publisher = {Graduate School of\ + \ Culture Technology, KAIST},\n title = {PESI Extended System: In Space, On Body,\ + \ with 3 Musicians},\n url = {http://www.nime.org/proceedings/2013/nime2013_97.pdf},\n\ \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178470 + doi: 10.5281/zenodo.1178666 issn: 2220-4806 - keywords: 'Audio game, mobile devices, sonic interaction design, rhythmic interaction, - motion-based' + keywords: 'Affordances, collaboration, social interaction, mobile music, extended + system, NIME' month: May - pages: 200--201 + pages: 35--40 publisher: 'Graduate School of Culture Technology, KAIST' - title: 'Sonic Tennis: a rhythmic interaction game for mobile devices' - url: http://www.nime.org/proceedings/2013/nime2013_288.pdf + title: 'PESI Extended System: In Space, On Body, with 3 Musicians' + url: http://www.nime.org/proceedings/2013/nime2013_97.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Lee2013 - abstract: 'This work aims at a music piece for large-scale audience participation - usingmobile phones as musical instruments at a music performance. Utilizing theubiquity - of smart phones, we attempted to accomplish audience engagement bycrafting an - accessible musical instrument with which audience can be a part ofthe performance. - Drawing lessons learnt from the creative works of mobilemusic, audience participation, - and the networked instrument a mobile musicalinstrument application is developed - so that audience can download the app atthe concert, play the instrument instantly, - interact with other audiencemembers, and contribute to the music by sound generated - from their mobilephones. The post-survey results indicate that the instrument - was easy to use,and the audience felt connected to the music and other musicians.' + ID: Sanganeria2013 + abstract: 'GrainProc is a touchscreen interface for real-time granular synthesis + designedfor live performance. The user provides a real-time audio input (electricguitar, + for example) as a granularization source and controls various synthesisparameters + with their fingers or toes. The control parameters are designed togive the user + access to intuitive and expressive live granular manipulations.' address: 'Daejeon, Republic of Korea' - author: Sang Won Lee and Jason Freeman - bibtex: "@inproceedings{Lee2013,\n abstract = {This work aims at a music piece for\ - \ large-scale audience participation usingmobile phones as musical instruments\ - \ at a music performance. Utilizing theubiquity of smart phones, we attempted\ - \ to accomplish audience engagement bycrafting an accessible musical instrument\ - \ with which audience can be a part ofthe performance. Drawing lessons learnt\ - \ from the creative works of mobilemusic, audience participation, and the networked\ - \ instrument a mobile musicalinstrument application is developed so that audience\ - \ can download the app atthe concert, play the instrument instantly, interact\ - \ with other audiencemembers, and contribute to the music by sound generated from\ - \ their mobilephones. The post-survey results indicate that the instrument was\ - \ easy to use,and the audience felt connected to the music and other musicians.},\n\ - \ address = {Daejeon, Republic of Korea},\n author = {Sang Won Lee and Jason Freeman},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178594},\n issn = {2220-4806},\n\ - \ keywords = {mobile music, audience participation, networked instrument},\n month\ - \ = {May},\n pages = {450--455},\n publisher = {Graduate School of Culture Technology,\ - \ KAIST},\n title = {echobo : Audience Participation Using The Mobile Music Instrument},\n\ - \ url = {http://www.nime.org/proceedings/2013/nime2013_291.pdf},\n year = {2013}\n\ - }\n" + author: Mayank Sanganeria and Kurt Werner + bibtex: "@inproceedings{Sanganeria2013,\n abstract = {GrainProc is a touchscreen\ + \ interface for real-time granular synthesis designedfor live performance. The\ + \ user provides a real-time audio input (electricguitar, for example) as a granularization\ + \ source and controls various synthesisparameters with their fingers or toes.\ + \ The control parameters are designed togive the user access to intuitive and\ + \ expressive live granular manipulations.},\n address = {Daejeon, Republic of\ + \ Korea},\n author = {Mayank Sanganeria and Kurt Werner},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178652},\n issn = {2220-4806},\n keywords = {Granular\ + \ synthesis, touch screen interface, toe control, real-time, CCRMA},\n month =\ + \ {May},\n pages = {223--226},\n publisher = {Graduate School of Culture Technology,\ + \ KAIST},\n title = {GrainProc: a real-time granular synthesis interface for live\ + \ performance},\n url = {http://www.nime.org/proceedings/2013/nime2013_99.pdf},\n\ + \ year = {2013}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178594 + doi: 10.5281/zenodo.1178652 issn: 2220-4806 - keywords: 'mobile music, audience participation, networked instrument' + keywords: 'Granular synthesis, touch screen interface, toe control, real-time, CCRMA' month: May - pages: 450--455 + pages: 223--226 publisher: 'Graduate School of Culture Technology, KAIST' - title: 'echobo : Audience Participation Using The Mobile Music Instrument' - url: http://www.nime.org/proceedings/2013/nime2013_291.pdf + title: 'GrainProc: a real-time granular synthesis interface for live performance' + url: http://www.nime.org/proceedings/2013/nime2013_99.pdf year: 2013 - ENTRYTYPE: inproceedings - ID: Trento2013 - abstract: 'This paper describes the development of a prototype of a sonic toy forpre-scholar - kids. The device, which is a modified version of a footballratchet, is based on - the spinning gesture and it allows to experience fourdifferent types of auditory - feedback. These algorithms let a kid play withmusic rhythm, generate a continuous - sound feedback and control the pitch of apiece of music. An evaluation test of - the device has been performed withfourteen kids in a kindergarten. Results and - observations showed that kidspreferred the algorithms based on the exploration - of the music rhythm and onpitch shifting.' - address: 'Daejeon, Republic of Korea' - author: Stefano Trento and Stefania Serafin - bibtex: "@inproceedings{Trento2013,\n abstract = {This paper describes the development\ - \ of a prototype of a sonic toy forpre-scholar kids. The device, which is a modified\ - \ version of a footballratchet, is based on the spinning gesture and it allows\ - \ to experience fourdifferent types of auditory feedback. These algorithms let\ - \ a kid play withmusic rhythm, generate a continuous sound feedback and control\ - \ the pitch of apiece of music. An evaluation test of the device has been performed\ - \ withfourteen kids in a kindergarten. Results and observations showed that kidspreferred\ - \ the algorithms based on the exploration of the music rhythm and onpitch shifting.},\n\ - \ address = {Daejeon, Republic of Korea},\n author = {Stefano Trento and Stefania\ - \ Serafin},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178682},\n issn\ - \ = {2220-4806},\n keywords = {Sonic toy, children, auditory feedback.},\n month\ - \ = {May},\n pages = {456--459},\n publisher = {Graduate School of Culture Technology,\ - \ KAIST},\n title = {Flag beat: a novel interface for rhythmic musical expression\ - \ for kids},\n url = {http://www.nime.org/proceedings/2013/nime2013_295.pdf},\n\ - \ year = {2013}\n}\n" + ID: Gaye2006 + address: 'Paris, France' + author: 'Gaye, Lalya and Holmquist, Lars E. and Behrendt, Frauke and Tanaka, Atau' + bibtex: "@inproceedings{Gaye2006,\n address = {Paris, France},\n author = {Gaye,\ + \ Lalya and Holmquist, Lars E. and Behrendt, Frauke and Tanaka, Atau},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1176909},\n issn = {2220-4806},\n pages\ + \ = {22--25},\n title = {Mobile Music Technology: Report on an Emerging Community},\n\ + \ url = {http://www.nime.org/proceedings/2006/nime2006_022.pdf},\n year = {2006}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178682 + doi: 10.5281/zenodo.1176909 issn: 2220-4806 - keywords: 'Sonic toy, children, auditory feedback.' - month: May - pages: 456--459 - publisher: 'Graduate School of Culture Technology, KAIST' - title: 'Flag beat: a novel interface for rhythmic musical expression for kids' - url: http://www.nime.org/proceedings/2013/nime2013_295.pdf - year: 2013 + pages: 22--25 + title: 'Mobile Music Technology: Report on an Emerging Community' + url: http://www.nime.org/proceedings/2006/nime2006_022.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: Place2013 - abstract: 'The AlphaSphere is an electronic musical instrument featuring a series - oftactile, pressure sensitive touch pads arranged in a spherical form. It isdesigned - to offer a new playing style, while allowing for the expressivereal-time modulation - of sound available in electronic-based music. It is alsodesigned to be programmable, - enabling the flexibility to map a series ofdifferent notational arrangements to - the pad-based interface. The AlphaSphere functions as an HID, MIDI and OSC device, - which connects to acomputer and/or independent MIDI device, and its control messages - can be mappedthrough the AlphaLive software. Our primary motivations for creating - theAlphaSphere are to design an expressive music interface which can exploit thesound - palate of synthesizers in a design which allows for the mapping ofnotational arrangements.' - address: 'Daejeon, Republic of Korea' - author: Adam Place and Liam Lacey and Thomas Mitchell - bibtex: "@inproceedings{Place2013,\n abstract = {The AlphaSphere is an electronic\ - \ musical instrument featuring a series oftactile, pressure sensitive touch pads\ - \ arranged in a spherical form. It isdesigned to offer a new playing style, while\ - \ allowing for the expressivereal-time modulation of sound available in electronic-based\ - \ music. It is alsodesigned to be programmable, enabling the flexibility to map\ - \ a series ofdifferent notational arrangements to the pad-based interface. The\ - \ AlphaSphere functions as an HID, MIDI and OSC device, which connects to acomputer\ - \ and/or independent MIDI device, and its control messages can be mappedthrough\ - \ the AlphaLive software. Our primary motivations for creating theAlphaSphere\ - \ are to design an expressive music interface which can exploit thesound palate\ - \ of synthesizers in a design which allows for the mapping ofnotational arrangements.},\n\ - \ address = {Daejeon, Republic of Korea},\n author = {Adam Place and Liam Lacey\ - \ and Thomas Mitchell},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178642},\n\ - \ issn = {2220-4806},\n keywords = {AlphaSphere, MIDI, HID, polyphonic aftertouch,\ - \ open source},\n month = {May},\n pages = {491--492},\n publisher = {Graduate\ - \ School of Culture Technology, KAIST},\n title = {AlphaSphere},\n url = {http://www.nime.org/proceedings/2013/nime2013_300.pdf},\n\ - \ year = {2013}\n}\n" + ID: Tanaka2006 + abstract: 'This paper presents the concepts and techniques used in afamily of location + based multimedia works. The paper hasthree main sections: 1.) to describe the + architecture of anaudio-visual hardware/software framework we havedeveloped for + the realization of a series of locative mediaartworks, 2.) to discuss the theoretical + and conceptualunderpinnings motivating the design of the technicalframework, and + 3.) to elicit from this, fundamental issuesand questions that can be generalized + and applicable to thegrowing practice of locative media.' + address: 'Paris, France' + author: 'Tanaka, Atau and Gemeinboeck, Petra' + bibtex: "@inproceedings{Tanaka2006,\n abstract = {This paper presents the concepts\ + \ and techniques used in afamily of location based multimedia works. The paper\ + \ hasthree main sections: 1.) to describe the architecture of anaudio-visual hardware/software\ + \ framework we havedeveloped for the realization of a series of locative mediaartworks,\ + \ 2.) to discuss the theoretical and conceptualunderpinnings motivating the design\ + \ of the technicalframework, and 3.) to elicit from this, fundamental issuesand\ + \ questions that can be generalized and applicable to thegrowing practice of locative\ + \ media.},\n address = {Paris, France},\n author = {Tanaka, Atau and Gemeinboeck,\ + \ Petra},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1177013},\n issn = {2220-4806},\n\ + \ keywords = {Mobile music, urban fiction, locative media. },\n pages = {26--30},\n\ + \ title = {A Framework for Spatial Interaction in Locative Media},\n url = {http://www.nime.org/proceedings/2006/nime2006_026.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178642 + doi: 10.5281/zenodo.1177013 issn: 2220-4806 - keywords: 'AlphaSphere, MIDI, HID, polyphonic aftertouch, open source' - month: May - pages: 491--492 - publisher: 'Graduate School of Culture Technology, KAIST' - title: AlphaSphere - url: http://www.nime.org/proceedings/2013/nime2013_300.pdf - year: 2013 + keywords: 'Mobile music, urban fiction, locative media. ' + pages: 26--30 + title: A Framework for Spatial Interaction in Locative Media + url: http://www.nime.org/proceedings/2006/nime2006_026.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: Roberts2013 - abstract: 'We present research that extends the scope of the mobile application - Control, aprototyping environment for defining multimodal interfaces that controlreal-time - artistic and musical performances. Control allows users to rapidlycreate interfaces - employing a variety of modalities, including: speechrecognition, computer vision, - musical feature extraction, touchscreen widgets,and inertial sensor data. Information - from these modalities can be transmittedwirelessly to remote applications. Interfaces - are declared using JSON and canbe extended with JavaScript to add complex behaviors, - including the concurrentfusion of multimodal signals. By simplifying the creation - of interfaces viathese simple markup files, Control allows musicians and artists - to make novelapplications that use and combine both discrete and continuous data - from thewide range of sensors available on commodity mobile devices.' - address: 'Daejeon, Republic of Korea' - author: Charles Roberts and Angus Forbes and Tobias Höllerer - bibtex: "@inproceedings{Roberts2013,\n abstract = {We present research that extends\ - \ the scope of the mobile application Control, aprototyping environment for defining\ - \ multimodal interfaces that controlreal-time artistic and musical performances.\ - \ Control allows users to rapidlycreate interfaces employing a variety of modalities,\ - \ including: speechrecognition, computer vision, musical feature extraction, touchscreen\ - \ widgets,and inertial sensor data. Information from these modalities can be transmittedwirelessly\ - \ to remote applications. Interfaces are declared using JSON and canbe extended\ - \ with JavaScript to add complex behaviors, including the concurrentfusion of\ - \ multimodal signals. By simplifying the creation of interfaces viathese simple\ - \ markup files, Control allows musicians and artists to make novelapplications\ - \ that use and combine both discrete and continuous data from thewide range of\ - \ sensors available on commodity mobile devices.},\n address = {Daejeon, Republic\ - \ of Korea},\n author = {Charles Roberts and Angus Forbes and Tobias H{\\''o}llerer},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178646},\n issn = {2220-4806},\n\ - \ keywords = {Music, mobile, multimodal, interaction},\n month = {May},\n pages\ - \ = {102--105},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ - \ title = {Enabling Multimodal Mobile Interfaces for Musical Performance},\n url\ - \ = {http://www.nime.org/proceedings/2013/nime2013_303.pdf},\n year = {2013}\n\ + ID: Rohs2006 + address: 'Paris, France' + author: 'Rohs, Michael and Essl, Georg and Roth, Martin' + bibtex: "@inproceedings{Rohs2006,\n address = {Paris, France},\n author = {Rohs,\ + \ Michael and Essl, Georg and Roth, Martin},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1176997},\n issn = {2220-4806},\n pages = {31--36},\n title\ + \ = {CaMus: Live Music Performance using Camera Phones and Visual Grid Tracking},\n\ + \ url = {http://www.nime.org/proceedings/2006/nime2006_031.pdf},\n year = {2006}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178646 + doi: 10.5281/zenodo.1176997 issn: 2220-4806 - keywords: 'Music, mobile, multimodal, interaction' - month: May - pages: 102--105 - publisher: 'Graduate School of Culture Technology, KAIST' - title: Enabling Multimodal Mobile Interfaces for Musical Performance - url: http://www.nime.org/proceedings/2013/nime2013_303.pdf - year: 2013 + pages: 31--36 + title: 'CaMus: Live Music Performance using Camera Phones and Visual Grid Tracking' + url: http://www.nime.org/proceedings/2006/nime2006_031.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: Hadjakos2013 - abstract: Music ensembles have to synchronize themselves with a very high precision - inorder to achieve the desired musical results. For that purpose the musicians - donot only rely on their auditory perception but also perceive and interpret themovements - and gestures of their ensemble colleges. In this paper we present aKinect-based - method to analyze ensemble play based on head tracking. We discussfirst experimental - results with a violin duo performance. - address: 'Daejeon, Republic of Korea' - author: Aristotelis Hadjakos and Tobias Grosshauser - bibtex: "@inproceedings{Hadjakos2013,\n abstract = {Music ensembles have to synchronize\ - \ themselves with a very high precision inorder to achieve the desired musical\ - \ results. For that purpose the musicians donot only rely on their auditory perception\ - \ but also perceive and interpret themovements and gestures of their ensemble\ - \ colleges. In this paper we present aKinect-based method to analyze ensemble\ - \ play based on head tracking. We discussfirst experimental results with a violin\ - \ duo performance.},\n address = {Daejeon, Republic of Korea},\n author = {Aristotelis\ - \ Hadjakos and Tobias Grosshauser},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178540},\n\ - \ issn = {2220-4806},\n keywords = {Kinect, Ensemble, Synchronization, Strings,\ - \ Functional Data Analysis, Cross-Correlogram},\n month = {May},\n pages = {106--110},\n\ - \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {Motion\ - \ and Synchronization Analysis of Musical Ensembles with the Kinect},\n url =\ - \ {http://www.nime.org/proceedings/2013/nime2013_304.pdf},\n year = {2013}\n}\n" + ID: Schiemer2006 + abstract: 'This paper describes two new live performance scenarios for performing + music using bluetooth-enabled mobile phones. Interaction between mobile phones + via wireless link is a key feature of the performance interface for each scenario. + Both scenarios are discussed in the context of two publicly performed works for + an ensemble of players in which mobile phone handsets are used both as sound sources + and as hand-held controllers. In both works mobile phones are mounted in a specially + devised pouch attached to a cord and physically swung to produce audio chorusing. + During performance some players swing phones while others operate phones as hand-held + controllers. Wireless connectivity enables interaction between flying and hand-held + phones. Each work features different bluetooth implementations. In one a dedicated + mobile phone acts as a server that interconnects multiple clients, while in the + other point to point communication takes place between clients on an ad hoc basis. + The paper summarises bluetooth tools designed for live performance realisation + and concludes with a comparative evaluation of both scenarios for future implementation + of performance by large ensembles of nonexpert players performing microtonal music + using ubiquitous technology. ' + address: 'Paris, France' + author: 'Schiemer, Greg and Havryliv, Mark' + bibtex: "@inproceedings{Schiemer2006,\n abstract = {This paper describes two new\ + \ live performance scenarios for performing music using bluetooth-enabled mobile\ + \ phones. Interaction between mobile phones via wireless link is a key feature\ + \ of the performance interface for each scenario. Both scenarios are discussed\ + \ in the context of two publicly performed works for an ensemble of players in\ + \ which mobile phone handsets are used both as sound sources and as hand-held\ + \ controllers. In both works mobile phones are mounted in a specially devised\ + \ pouch attached to a cord and physically swung to produce audio chorusing. During\ + \ performance some players swing phones while others operate phones as hand-held\ + \ controllers. Wireless connectivity enables interaction between flying and hand-held\ + \ phones. Each work features different bluetooth implementations. In one a dedicated\ + \ mobile phone acts as a server that interconnects multiple clients, while in\ + \ the other point to point communication takes place between clients on an ad\ + \ hoc basis. The paper summarises bluetooth tools designed for live performance\ + \ realisation and concludes with a comparative evaluation of both scenarios for\ + \ future implementation of performance by large ensembles of nonexpert players\ + \ performing microtonal music using ubiquitous technology. },\n address = {Paris,\ + \ France},\n author = {Schiemer, Greg and Havryliv, Mark},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176999},\n issn = {2220-4806},\n keywords = {Java 2 Micro\ + \ Edition; j2me; Pure Data; PD; Real-Time Media Performance; Just Intonation.\ + \ },\n pages = {37--42},\n title = {Pocket Gamelan: Tuneable Trajectories for\ + \ Flying Sources in Mandala 3 and Mandala 4},\n url = {http://www.nime.org/proceedings/2006/nime2006_037.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178540 + doi: 10.5281/zenodo.1176999 issn: 2220-4806 - keywords: 'Kinect, Ensemble, Synchronization, Strings, Functional Data Analysis, - Cross-Correlogram' - month: May - pages: 106--110 - publisher: 'Graduate School of Culture Technology, KAIST' - title: Motion and Synchronization Analysis of Musical Ensembles with the Kinect - url: http://www.nime.org/proceedings/2013/nime2013_304.pdf - year: 2013 + keywords: 'Java 2 Micro Edition; j2me; Pure Data; PD; Real-Time Media Performance; + Just Intonation. ' + pages: 37--42 + title: 'Pocket Gamelan: Tuneable Trajectories for Flying Sources in Mandala 3 and + Mandala 4' + url: http://www.nime.org/proceedings/2006/nime2006_037.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: Park2013b - abstract: 'SSN (Sound Surfing Network) is a performance system that provides a new - musicalexperience by incorporating mobile phone-based spatial sound control tocollaborative - music performance. SSN enables both the performer and theaudience to manipulate - the spatial distribution of sound using the smartphonesof the audience as distributed - speaker system. Proposing a new perspective tothe social aspect music appreciation, - SSN will provide a new possibility tomobile music performances in the context - of interactive audience collaborationas well as sound spatialization.' - address: 'Daejeon, Republic of Korea' - author: Saebyul Park and Seonghoon Ban and Dae Ryong Hong and Woon Seung Yeo - bibtex: "@inproceedings{Park2013b,\n abstract = {SSN (Sound Surfing Network) is\ - \ a performance system that provides a new musicalexperience by incorporating\ - \ mobile phone-based spatial sound control tocollaborative music performance.\ - \ SSN enables both the performer and theaudience to manipulate the spatial distribution\ - \ of sound using the smartphonesof the audience as distributed speaker system.\ - \ Proposing a new perspective tothe social aspect music appreciation, SSN will\ - \ provide a new possibility tomobile music performances in the context of interactive\ - \ audience collaborationas well as sound spatialization.},\n address = {Daejeon,\ - \ Republic of Korea},\n author = {Saebyul Park and Seonghoon Ban and Dae Ryong\ - \ Hong and Woon Seung Yeo},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178636},\n\ - \ issn = {2220-4806},\n keywords = {Mobile music, smartphone, audience participation,\ - \ spatial sound control, digital performance},\n month = {May},\n pages = {111--114},\n\ - \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {Sound\ - \ Surfing Network (SSN): Mobile Phone-based Sound Spatialization with Audience\ - \ Collaboration},\n url = {http://www.nime.org/proceedings/2013/nime2013_305.pdf},\n\ - \ year = {2013}\n}\n" + ID: Birchfield2006 + abstract: 'Physically situated public art poses significant challenges for the design + and realization of interactive, electronic sound works. Consideration of diverse + audiences, environmental sensitivity, exhibition conditions, and logistics must + guide the artwork. We describe our work in this area, using a recently installed + public piece, Transition Soundings, as a case study that reveals a specialized + interface and open-ended approach to interactive music making. This case study + serves as a vehicle for examination of the real world challenges posed by public + art and its outcomes. ' + address: 'Paris, France' + author: 'Birchfield, David and Phillips, Kelly and Kidané, Assegid and Lorig, David' + bibtex: "@inproceedings{Birchfield2006,\n abstract = {Physically situated public\ + \ art poses significant challenges for the design and realization of interactive,\ + \ electronic sound works. Consideration of diverse audiences, environmental sensitivity,\ + \ exhibition conditions, and logistics must guide the artwork. We describe our\ + \ work in this area, using a recently installed public piece, Transition Soundings,\ + \ as a case study that reveals a specialized interface and open-ended approach\ + \ to interactive music making. This case study serves as a vehicle for examination\ + \ of the real world challenges posed by public art and its outcomes. },\n address\ + \ = {Paris, France},\n author = {Birchfield, David and Phillips, Kelly and Kidan\\\ + '{e}, Assegid and Lorig, David},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176873},\n\ + \ issn = {2220-4806},\n keywords = {Music, Sound, Interactivity, Arts, Public\ + \ Art, Network Systems, Sculpture, Installation Art, Embedded Electronics. },\n\ + \ pages = {43--48},\n title = {Interactive Public Sound Art: a case study},\n\ + \ url = {http://www.nime.org/proceedings/2006/nime2006_043.pdf},\n year = {2006}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178636 + doi: 10.5281/zenodo.1176873 issn: 2220-4806 - keywords: 'Mobile music, smartphone, audience participation, spatial sound control, - digital performance' - month: May - pages: 111--114 - publisher: 'Graduate School of Culture Technology, KAIST' - title: 'Sound Surfing Network (SSN): Mobile Phone-based Sound Spatialization with - Audience Collaboration' - url: http://www.nime.org/proceedings/2013/nime2013_305.pdf - year: 2013 + keywords: 'Music, Sound, Interactivity, Arts, Public Art, Network Systems, Sculpture, + Installation Art, Embedded Electronics. ' + pages: 43--48 + title: 'Interactive Public Sound Art: a case study' + url: http://www.nime.org/proceedings/2006/nime2006_043.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: McGee2013 - abstract: 'VOSIS is an interactive image sonification interface that creates complexwavetables - by raster scanning greyscale image pixel data. Using a multi-touchscreen to play - image regions of unique frequency content rather than a linearscale of frequencies, - it becomes a unique performance tool for experimental andvisual music. A number - of image filters controlled by multi-touch gestures addvariation to the sound - palette. On a mobile device, parameters controlled bythe accelerometer add another - layer expressivity to the resulting audio-visualmontages.' - address: 'Daejeon, Republic of Korea' - author: Ryan McGee - bibtex: "@inproceedings{McGee2013,\n abstract = {VOSIS is an interactive image sonification\ - \ interface that creates complexwavetables by raster scanning greyscale image\ - \ pixel data. Using a multi-touchscreen to play image regions of unique frequency\ - \ content rather than a linearscale of frequencies, it becomes a unique performance\ - \ tool for experimental andvisual music. A number of image filters controlled\ - \ by multi-touch gestures addvariation to the sound palette. On a mobile device,\ - \ parameters controlled bythe accelerometer add another layer expressivity to\ - \ the resulting audio-visualmontages.},\n address = {Daejeon, Republic of Korea},\n\ - \ author = {Ryan McGee},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178604},\n\ - \ issn = {2220-4806},\n keywords = {image sonification, multi-touch, visual music},\n\ - \ month = {May},\n pages = {460--463},\n publisher = {Graduate School of Culture\ - \ Technology, KAIST},\n title = {VOSIS: a Multi-touch Image Sonification Interface},\n\ - \ url = {http://www.nime.org/proceedings/2013/nime2013_310.pdf},\n year = {2013}\n\ - }\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1178604 - issn: 2220-4806 - keywords: 'image sonification, multi-touch, visual music' - month: May - pages: 460--463 - publisher: 'Graduate School of Culture Technology, KAIST' - title: 'VOSIS: a Multi-touch Image Sonification Interface' - url: http://www.nime.org/proceedings/2013/nime2013_310.pdf - year: 2013 + ID: Wang2006 + address: 'Paris, France' + author: 'Wang, Ge and Misra, Ananya and Cook, Perry R.' + bibtex: "@inproceedings{Wang2006,\n address = {Paris, France},\n author = {Wang,\ + \ Ge and Misra, Ananya and Cook, Perry R.},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1177017},\n issn = {2220-4806},\n keywords = {Graphical interfaces,\ + \ collaborative performance, networking, computer music ensemble, emergence, visualization,\ + \ education. },\n pages = {49--52},\n title = {Building Collaborative Graphical\ + \ interFaces in the Audicle},\n url = {http://www.nime.org/proceedings/2006/nime2006_049.pdf},\n\ + \ year = {2006}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1177017 + issn: 2220-4806 + keywords: 'Graphical interfaces, collaborative performance, networking, computer + music ensemble, emergence, visualization, education. ' + pages: 49--52 + title: Building Collaborative Graphical interFaces in the Audicle + url: http://www.nime.org/proceedings/2006/nime2006_049.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: Hoste2013 - abstract: 'Nowadays many music artists rely on visualisations and light shows to - enhanceand augment their live performances. However, the visualisation and triggeringof - lights is normally scripted in advance and synchronised with the concert,severely - limiting the artist''s freedom for improvisation, expression and ad-hocadaptation - of their show. These scripts result in performances where thetechnology enforces - the artist and their music to stay in synchronisation withthe pre-programmed environment. - We argue that these limitations can be overcomebased on emerging non-invasive - tracking technologies in combination with anadvanced gesture recognition engine.We - present a solution that uses explicit gestures and implicit dance moves tocontrol - the visual augmentation of a live music performance. We furtherillustrate how - our framework overcomes existing limitations of gestureclassification systems - by delivering a precise recognition solution based on asingle gesture sample in - combination with expert knowledge. The presentedsolution enables a more dynamic - and spontaneous performance and, when combinedwith indirect augmented reality, - results in a more intense interaction betweenthe artist and their audience.' - address: 'Daejeon, Republic of Korea' - author: Lode Hoste and Beat Signer - bibtex: "@inproceedings{Hoste2013,\n abstract = {Nowadays many music artists rely\ - \ on visualisations and light shows to enhanceand augment their live performances.\ - \ However, the visualisation and triggeringof lights is normally scripted in advance\ - \ and synchronised with the concert,severely limiting the artist's freedom for\ - \ improvisation, expression and ad-hocadaptation of their show. These scripts\ - \ result in performances where thetechnology enforces the artist and their music\ - \ to stay in synchronisation withthe pre-programmed environment. We argue that\ - \ these limitations can be overcomebased on emerging non-invasive tracking technologies\ - \ in combination with anadvanced gesture recognition engine.We present a solution\ - \ that uses explicit gestures and implicit dance moves tocontrol the visual augmentation\ - \ of a live music performance. We furtherillustrate how our framework overcomes\ - \ existing limitations of gestureclassification systems by delivering a precise\ - \ recognition solution based on asingle gesture sample in combination with expert\ - \ knowledge. The presentedsolution enables a more dynamic and spontaneous performance\ - \ and, when combinedwith indirect augmented reality, results in a more intense\ - \ interaction betweenthe artist and their audience.},\n address = {Daejeon, Republic\ - \ of Korea},\n author = {Lode Hoste and Beat Signer},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178558},\n issn = {2220-4806},\n keywords = {Expressive\ - \ control, augmented reality, live music performance, 3D gesture recognition,\ - \ Kinect, declarative language},\n month = {May},\n pages = {13--18},\n publisher\ - \ = {Graduate School of Culture Technology, KAIST},\n title = {Expressive Control\ - \ of Indirect Augmented Reality During Live Music Performances},\n url = {http://www.nime.org/proceedings/2013/nime2013_32.pdf},\n\ - \ year = {2013}\n}\n" + ID: Rebelo2006 + abstract: 'The culture of laptop improvisation has grown tremendously in recent + years. The development of personalized software instruments presents interesting + issues in the context of improvised group performances. This paper examines an + approach that is aimed at increasing the modes of interactivity between laptop + performers and at the same time suggests ways in which audiences can better discern + and identify the sonic characteristics of each laptop performer. We refer to software + implementation that was developed for the BLISS networked laptop ensemble with + view to designing a shared format for the exchange of messages within local and + internet based networks. ' + address: 'Paris, France' + author: 'Rebelo, Pedro and Renaud, Alain B.' + bibtex: "@inproceedings{Rebelo2006,\n abstract = {The culture of laptop improvisation\ + \ has grown tremendously in recent years. The development of personalized software\ + \ instruments presents interesting issues in the context of improvised group performances.\ + \ This paper examines an approach that is aimed at increasing the modes of interactivity\ + \ between laptop performers and at the same time suggests ways in which audiences\ + \ can better discern and identify the sonic characteristics of each laptop performer.\ + \ We refer to software implementation that was developed for the BLISS networked\ + \ laptop ensemble with view to designing a shared format for the exchange of messages\ + \ within local and internet based networks. },\n address = {Paris, France},\n\ + \ author = {Rebelo, Pedro and Renaud, Alain B.},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1176993},\n issn = {2220-4806},\n keywords = {Networked audio\ + \ technologies, laptop ensemble, centralized audio server, improvisation },\n\ + \ pages = {53--56},\n title = {The Frequencyliator -- Distributing Structures\ + \ for Networked Laptop Improvisation},\n url = {http://www.nime.org/proceedings/2006/nime2006_053.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178558 + doi: 10.5281/zenodo.1176993 issn: 2220-4806 - keywords: 'Expressive control, augmented reality, live music performance, 3D gesture - recognition, Kinect, declarative language' - month: May - pages: 13--18 - publisher: 'Graduate School of Culture Technology, KAIST' - title: Expressive Control of Indirect Augmented Reality During Live Music Performances - url: http://www.nime.org/proceedings/2013/nime2013_32.pdf - year: 2013 + keywords: 'Networked audio technologies, laptop ensemble, centralized audio server, + improvisation ' + pages: 53--56 + title: The Frequencyliator -- Distributing Structures for Networked Laptop Improvisation + url: http://www.nime.org/proceedings/2006/nime2006_053.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: Murphy2013 - abstract: 'This paper provides a history of robotic guitars and bass guitars as - well as adiscussion of the design, construction, and evaluation of two new roboticinstruments. - Throughout the paper, a focus is made on different techniques toextend the expressivity - of robotic guitars. Swivel and MechBass, two newrobots, are built and discussed. - Construction techniques of likely interest toother musical roboticists are included. - These robots use a variety oftechniques, both new and inspired by prior work, - to afford composers andperformers with the ability to precisely control pitch - and plucking parameters.Both new robots are evaluated to test their precision, - repeatability, andspeed. The paper closes with a discussion of the compositional - and performativeimplications of such levels of control, and how it might affect - humans who wishto interface with the systems.' - address: 'Daejeon, Republic of Korea' - author: Jim Murphy and James McVay and Ajay Kapur and Dale Carnegie - bibtex: "@inproceedings{Murphy2013,\n abstract = {This paper provides a history\ - \ of robotic guitars and bass guitars as well as adiscussion of the design, construction,\ - \ and evaluation of two new roboticinstruments. Throughout the paper, a focus\ - \ is made on different techniques toextend the expressivity of robotic guitars.\ - \ Swivel and MechBass, two newrobots, are built and discussed. Construction techniques\ - \ of likely interest toother musical roboticists are included. These robots use\ - \ a variety oftechniques, both new and inspired by prior work, to afford composers\ - \ andperformers with the ability to precisely control pitch and plucking parameters.Both\ - \ new robots are evaluated to test their precision, repeatability, andspeed. The\ - \ paper closes with a discussion of the compositional and performativeimplications\ - \ of such levels of control, and how it might affect humans who wishto interface\ - \ with the systems.},\n address = {Daejeon, Republic of Korea},\n author = {Jim\ - \ Murphy and James McVay and Ajay Kapur and Dale Carnegie},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178618},\n issn = {2220-4806},\n keywords = {musical\ - \ robotics, kinetic sculpture, mechatronics},\n month = {May},\n pages = {557--562},\n\ - \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {Designing\ - \ and Building Expressive Robotic Guitars},\n url = {http://www.nime.org/proceedings/2013/nime2013_36.pdf},\n\ - \ year = {2013}\n}\n" + ID: Naef2006 + address: 'Paris, France' + author: 'Naef, Martin and Collicott, Daniel' + bibtex: "@inproceedings{Naef2006,\n address = {Paris, France},\n author = {Naef,\ + \ Martin and Collicott, Daniel},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176975},\n\ + \ issn = {2220-4806},\n pages = {57--60},\n title = {A VR Interface for Collaborative\ + \ {3D} Audio Performance},\n url = {http://www.nime.org/proceedings/2006/nime2006_057.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178618 + doi: 10.5281/zenodo.1176975 issn: 2220-4806 - keywords: 'musical robotics, kinetic sculpture, mechatronics' - month: May - pages: 557--562 - publisher: 'Graduate School of Culture Technology, KAIST' - title: Designing and Building Expressive Robotic Guitars - url: http://www.nime.org/proceedings/2013/nime2013_36.pdf - year: 2013 + pages: 57--60 + title: 'A VR Interface for Collaborative {3D} Audio Performance' + url: http://www.nime.org/proceedings/2006/nime2006_057.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: Dezfouli2013 - abstract: Notesaaz is both a new physical interface meant for musical performance - and aproposal for a three-stage process where the controller is used to navigatewithin - a graphical score that on its turn controls the sound generation. It canbe seen - as a dynamic and understandable way of using dynamic mapping betweenthe sensor - input and the sound generation. Furthermore by presenting thegraphical score to - both the performer and the audience a new engagement of theaudience can be established. - address: 'Daejeon, Republic of Korea' - author: Erfan Abdi Dezfouli and Edwin van der Heide - bibtex: "@inproceedings{Dezfouli2013,\n abstract = {Notesaaz is both a new physical\ - \ interface meant for musical performance and aproposal for a three-stage process\ - \ where the controller is used to navigatewithin a graphical score that on its\ - \ turn controls the sound generation. It canbe seen as a dynamic and understandable\ - \ way of using dynamic mapping betweenthe sensor input and the sound generation.\ - \ Furthermore by presenting thegraphical score to both the performer and the audience\ - \ a new engagement of theaudience can be established.},\n address = {Daejeon,\ - \ Republic of Korea},\n author = {Erfan Abdi Dezfouli and Edwin van der Heide},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178498},\n issn = {2220-4806},\n\ - \ keywords = {musical instrument, custom controller, gestural input, dynamic score},\n\ - \ month = {May},\n pages = {115--117},\n publisher = {Graduate School of Culture\ - \ Technology, KAIST},\n title = {Notesaaz: a new controller and performance idiom},\n\ - \ url = {http://www.nime.org/proceedings/2013/nime2013_4.pdf},\n year = {2013}\n\ - }\n" + ID: Geiger2006 + address: 'Paris, France' + author: 'Geiger, G\''''{u}nter' + bibtex: "@inproceedings{Geiger2006,\n address = {Paris, France},\n author = {Geiger,\ + \ G\\''{u}nter},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176911},\n\ + \ issn = {2220-4806},\n keywords = {touch screen, PDA, Pure Data, controller,\ + \ mobile musical instrument, human computer interaction },\n pages = {61--64},\n\ + \ title = {Using the Touch Screen as a Controller for Portable Computer Music\ + \ Instruments},\n url = {http://www.nime.org/proceedings/2006/nime2006_061.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178498 + doi: 10.5281/zenodo.1176911 issn: 2220-4806 - keywords: 'musical instrument, custom controller, gestural input, dynamic score' - month: May - pages: 115--117 - publisher: 'Graduate School of Culture Technology, KAIST' - title: 'Notesaaz: a new controller and performance idiom' - url: http://www.nime.org/proceedings/2013/nime2013_4.pdf - year: 2013 + keywords: 'touch screen, PDA, Pure Data, controller, mobile musical instrument, + human computer interaction ' + pages: 61--64 + title: Using the Touch Screen as a Controller for Portable Computer Music Instruments + url: http://www.nime.org/proceedings/2006/nime2006_061.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: Fuhrmann2013 - abstract: This paper demonstrates how to use multiple Kinect(TM) sensors to map - aperformers motion to music. We merge skeleton data streams from multiplesensors - to compensate for occlusions of the performer. The skeleton jointpositions drive - the performance via open sound control data. We discuss how toregister the different - sensors to each other and how to smoothly merge theresulting data streams and - how to map position data in a general framework tothe live electronics applied - to a chamber music ensemble. - address: 'Daejeon, Republic of Korea' - author: Anton Fuhrmann and Johannes Kretz and Peter Burwik - bibtex: "@inproceedings{Fuhrmann2013,\n abstract = {This paper demonstrates how\ - \ to use multiple Kinect(TM) sensors to map aperformers motion to music. We merge\ - \ skeleton data streams from multiplesensors to compensate for occlusions of the\ - \ performer. The skeleton jointpositions drive the performance via open sound\ - \ control data. We discuss how toregister the different sensors to each other\ - \ and how to smoothly merge theresulting data streams and how to map position\ - \ data in a general framework tothe live electronics applied to a chamber music\ - \ ensemble.},\n address = {Daejeon, Republic of Korea},\n author = {Anton Fuhrmann\ - \ and Johannes Kretz and Peter Burwik},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178530},\n\ - \ issn = {2220-4806},\n keywords = {kinect, multi sensor, sensor fusion, open\ - \ sound control, motion tracking, parameter mapping, live electronics},\n month\ - \ = {May},\n pages = {358--362},\n publisher = {Graduate School of Culture Technology,\ - \ KAIST},\n title = {Multi Sensor Tracking for Live Sound Transformation},\n url\ - \ = {http://www.nime.org/proceedings/2013/nime2013_44.pdf},\n year = {2013}\n\ - }\n" + ID: Holm2006 + abstract: 'This paper discusses the concept of using background music to control + video game parameters and thus actions on the screen. Each song selected by the + player makes the game look different and behave variedly. The concept is explored + by modifying an existing video game and playtesting it with different kinds of + MIDI music. Several examples of mapping MIDI parameters to game events are presented. + As mobile phones'' MIDI players do not usually have a dedicated callback API, + a real-time MIDI analysis software for Symbian OS was implemented. Future developments + including real-time group performance as a way to control game content are also + considered. ' + address: 'Paris, France' + author: 'Holm, Jukka and Arrasvuori, Juha and Havukainen, Kai' + bibtex: "@inproceedings{Holm2006,\n abstract = {This paper discusses the concept\ + \ of using background music to control video game parameters and thus actions\ + \ on the screen. Each song selected by the player makes the game look different\ + \ and behave variedly. The concept is explored by modifying an existing video\ + \ game and playtesting it with different kinds of MIDI music. Several examples\ + \ of mapping MIDI parameters to game events are presented. As mobile phones' MIDI\ + \ players do not usually have a dedicated callback API, a real-time MIDI analysis\ + \ software for Symbian OS was implemented. Future developments including real-time\ + \ group performance as a way to control game content are also considered. },\n\ + \ address = {Paris, France},\n author = {Holm, Jukka and Arrasvuori, Juha and\ + \ Havukainen, Kai},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176925},\n\ + \ issn = {2220-4806},\n keywords = {Games, MIDI, music, rhythm games, background\ + \ music reactive games, musically controlled games, MIDI-controlled games, Virtual\ + \ Sequencer. },\n pages = {65--70},\n title = {Using {MIDI} to Modify Video Game\ + \ Content},\n url = {http://www.nime.org/proceedings/2006/nime2006_065.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178530 + doi: 10.5281/zenodo.1176925 issn: 2220-4806 - keywords: 'kinect, multi sensor, sensor fusion, open sound control, motion tracking, - parameter mapping, live electronics' - month: May - pages: 358--362 - publisher: 'Graduate School of Culture Technology, KAIST' - title: Multi Sensor Tracking for Live Sound Transformation - url: http://www.nime.org/proceedings/2013/nime2013_44.pdf - year: 2013 + keywords: 'Games, MIDI, music, rhythm games, background music reactive games, musically + controlled games, MIDI-controlled games, Virtual Sequencer. ' + pages: 65--70 + title: Using MIDI to Modify Video Game Content + url: http://www.nime.org/proceedings/2006/nime2006_065.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: Mudd2013 - abstract: 'This paper presents a system for exploring different dimensions of a - soundthrough the use of haptic feedback. The Novint Falcon force feedback interfaceis - used to scan through soundfiles as a subject moves their hand horizontallyfrom - left to right, and to relay information about volume, frequency content,noisiness, - or potentially any analysable parameter back to the subject throughforces acting - on their hand. General practicalities of mapping sonic elements to physical forces - areconsidered, such as the problem of representing detailed data through vaguephysical - sensation, approaches to applying forces to the hand that do notinterfering with - the smooth operation of the device, and the relative merits ofdiscreet and continuous - mappings. Three approaches to generating the forcevector are discussed: 1) the - use of simulated detents to identify areas of anaudio parameter over a certain - threshold, 2) applying friction proportional tothe level of the audio parameter - along the axis of movement, and 3) creatingforces perpendicular to the subject''s - hand movements.Presentation of audio information in this manner could be beneficial - for`pre-feeling'' as a method for selecting material to play during a liveperformance, - assisting visually impaired audio engineers, and as a generalaugmentation of standard - audio editing environments.' - address: 'Daejeon, Republic of Korea' - author: Tom Mudd - bibtex: "@inproceedings{Mudd2013,\n abstract = {This paper presents a system for\ - \ exploring different dimensions of a soundthrough the use of haptic feedback.\ - \ The Novint Falcon force feedback interfaceis used to scan through soundfiles\ - \ as a subject moves their hand horizontallyfrom left to right, and to relay information\ - \ about volume, frequency content,noisiness, or potentially any analysable parameter\ - \ back to the subject throughforces acting on their hand. General practicalities\ - \ of mapping sonic elements to physical forces areconsidered, such as the problem\ - \ of representing detailed data through vaguephysical sensation, approaches to\ - \ applying forces to the hand that do notinterfering with the smooth operation\ - \ of the device, and the relative merits ofdiscreet and continuous mappings. Three\ - \ approaches to generating the forcevector are discussed: 1) the use of simulated\ - \ detents to identify areas of anaudio parameter over a certain threshold, 2)\ - \ applying friction proportional tothe level of the audio parameter along the\ - \ axis of movement, and 3) creatingforces perpendicular to the subject's hand\ - \ movements.Presentation of audio information in this manner could be beneficial\ - \ for`pre-feeling' as a method for selecting material to play during a liveperformance,\ - \ assisting visually impaired audio engineers, and as a generalaugmentation of\ - \ standard audio editing environments.},\n address = {Daejeon, Republic of Korea},\n\ - \ author = {Tom Mudd},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1293003},\n\ - \ issn = {2220-4806},\n keywords = {Haptics, force feedback, mapping, human-computer\ - \ interaction},\n month = {May},\n pages = {369--372},\n publisher = {Graduate\ - \ School of Culture Technology, KAIST},\n title = {Feeling for Sound: Mapping\ - \ Sonic Data to Haptic Perceptions},\n url = {http://www.nime.org/proceedings/2013/nime2013_46.pdf},\n\ - \ year = {2013}\n}\n" + ID: Lippit2006 + abstract: "Turntable musicians have yet to explore new expressions with digital\ + \ technology. New higher-level development tools open possibilities for these\ + \ artists to build their own instruments that can achieve artistic goals commercial\ + \ products cannot. This paper will present a rough overview on the practice and\ + \ recent development of turntable music, followed by descriptions of two projects\ + \ by the ,\n,\nauthor. " + address: 'Paris, France' + author: 'Lippit, Takuro M.' + bibtex: "@inproceedings{Lippit2006,\n abstract = {Turntable musicians have yet to\ + \ explore new expressions with digital technology. New higher-level development\ + \ tools open possibilities for these artists to build their own instruments that\ + \ can achieve artistic goals commercial products cannot. This paper will present\ + \ a rough overview on the practice and recent development of turntable music,\ + \ followed by descriptions of two projects by the ,\n,\nauthor. },\n address =\ + \ {Paris, France},\n author = {Lippit, Takuro M.},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176965},\n issn = {2220-4806},\n keywords = {Turntable\ + \ music, DJ, turntablist, improvisation, Max/MSP, PIC Microcontroller, Physical\ + \ Computing },\n pages = {71--74},\n title = {Turntable Music in the Digital Era:\ + \ Designing Alternative Tools for New Turntable Expression},\n url = {http://www.nime.org/proceedings/2006/nime2006_071.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1293003 + doi: 10.5281/zenodo.1176965 issn: 2220-4806 - keywords: 'Haptics, force feedback, mapping, human-computer interaction' - month: May - pages: 369--372 - publisher: 'Graduate School of Culture Technology, KAIST' - title: 'Feeling for Sound: Mapping Sonic Data to Haptic Perceptions' - url: http://www.nime.org/proceedings/2013/nime2013_46.pdf - year: 2013 + keywords: 'Turntable music, DJ, turntablist, improvisation, Max/MSP, PIC Microcontroller, + Physical Computing ' + pages: 71--74 + title: 'Turntable Music in the Digital Era: Designing Alternative Tools for New + Turntable Expression' + url: http://www.nime.org/proceedings/2006/nime2006_071.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: BenAsher2013 - abstract: 'A system is presented for detecting common gestures, musical intentions - andemotions of pianists in real-time using only kinesthetic data retrieved bywireless - motion sensors. The algorithm can detect common Western musicalstructures such - as chords, arpeggios, scales, and trills as well as musicallyintended emotions: - cheerful, mournful, vigorous, dreamy, lyrical, and humorouscompletely and solely - based on low-sample-rate motion sensor data. Thealgorithm can be trained per performer - in real-time or can work based onprevious training sets. The system maps the emotions - to a color set andpresents them as a flowing emotional spectrum on the background - of a pianoroll. This acts as a feedback mechanism for emotional expression as - well as aninteractive display of the music. The system was trained and tested - on a numberof pianists and it classified structures and emotions with promising - results ofup to 92% accuracy.' - address: 'Daejeon, Republic of Korea' - author: Matan Ben-Asher and Colby Leider - bibtex: "@inproceedings{BenAsher2013,\n abstract = {A system is presented for detecting\ - \ common gestures, musical intentions andemotions of pianists in real-time using\ - \ only kinesthetic data retrieved bywireless motion sensors. The algorithm can\ - \ detect common Western musicalstructures such as chords, arpeggios, scales, and\ - \ trills as well as musicallyintended emotions: cheerful, mournful, vigorous,\ - \ dreamy, lyrical, and humorouscompletely and solely based on low-sample-rate\ - \ motion sensor data. Thealgorithm can be trained per performer in real-time or\ - \ can work based onprevious training sets. The system maps the emotions to a color\ - \ set andpresents them as a flowing emotional spectrum on the background of a\ - \ pianoroll. This acts as a feedback mechanism for emotional expression as well\ - \ as aninteractive display of the music. The system was trained and tested on\ - \ a numberof pianists and it classified structures and emotions with promising\ - \ results ofup to 92\\% accuracy.},\n address = {Daejeon, Republic of Korea},\n\ - \ author = {Matan Ben-Asher and Colby Leider},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1178474},\n issn = {2220-4806},\n keywords = {Motion Sensors,\ - \ IMUs, Expressive Piano Performance, Machine Learning, Computer Music, Music\ - \ and Emotion},\n month = {May},\n pages = {21--24},\n publisher = {Graduate School\ - \ of Culture Technology, KAIST},\n title = {Toward an Emotionally Intelligent\ - \ Piano: Real-Time Emotion Detection and Performer Feedback via Kinesthetic Sensing\ - \ in Piano Performance},\n url = {http://www.nime.org/proceedings/2013/nime2013_48.pdf},\n\ - \ year = {2013}\n}\n" + ID: Kiser2006 + abstract: 'This report presents an interface for musical performance called the + spinCycle. spinCycle enables performers to make visual patterns with brightly + colored objects on a spinning turntable platter that get translated into musical + arrangements in realtime. I will briefly describe the hardware implementation + and the sound generation logic used, as well as provide a historical background + for the project.' + address: 'Paris, France' + author: 'Kiser, Spencer' + bibtex: "@inproceedings{Kiser2006,\n abstract = {This report presents an interface\ + \ for musical performance called the spinCycle. spinCycle enables performers to\ + \ make visual patterns with brightly colored objects on a spinning turntable platter\ + \ that get translated into musical arrangements in realtime. I will briefly describe\ + \ the hardware implementation and the sound generation logic used, as well as\ + \ provide a historical background for the project.},\n address = {Paris, France},\n\ + \ author = {Kiser, Spencer},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176941},\n\ + \ issn = {2220-4806},\n keywords = {Color-tracking, turntable, visualization,\ + \ interactivity, synesthesia },\n pages = {75--76},\n title = {spinCycle: a Color-Tracking\ + \ Turntable Sequencer},\n url = {http://www.nime.org/proceedings/2006/nime2006_075.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178474 + doi: 10.5281/zenodo.1176941 issn: 2220-4806 - keywords: 'Motion Sensors, IMUs, Expressive Piano Performance, Machine Learning, - Computer Music, Music and Emotion' - month: May - pages: 21--24 - publisher: 'Graduate School of Culture Technology, KAIST' - title: 'Toward an Emotionally Intelligent Piano: Real-Time Emotion Detection and - Performer Feedback via Kinesthetic Sensing in Piano Performance' - url: http://www.nime.org/proceedings/2013/nime2013_48.pdf - year: 2013 + keywords: 'Color-tracking, turntable, visualization, interactivity, synesthesia ' + pages: 75--76 + title: 'spinCycle: a Color-Tracking Turntable Sequencer' + url: http://www.nime.org/proceedings/2006/nime2006_075.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: Diakopoulos2013 - abstract: 'Netpixl is a new micro-toolkit built to network devices within interactiveinstallations - and environments. Using a familiar client-server model, Netpixlcentrally wraps - an important aspect of ubiquitous computing: real-timemessaging. In the context - of sound and music computing, the role of Netpixl isto fluidly integrate endpoints - like OSC and MIDI within a larger multi-usersystem. This paper considers useful - design principles that may be applied totoolkits like Netpixl while also emphasizing - recent approaches to applicationdevelopment via HTML5 and Javascript, highlighting - an evolution in networkedcreative computing.' - address: 'Daejeon, Republic of Korea' - author: Dimitri Diakopoulos and Ajay Kapur - bibtex: "@inproceedings{Diakopoulos2013,\n abstract = {Netpixl is a new micro-toolkit\ - \ built to network devices within interactiveinstallations and environments. Using\ - \ a familiar client-server model, Netpixlcentrally wraps an important aspect of\ - \ ubiquitous computing: real-timemessaging. In the context of sound and music\ - \ computing, the role of Netpixl isto fluidly integrate endpoints like OSC and\ - \ MIDI within a larger multi-usersystem. This paper considers useful design principles\ - \ that may be applied totoolkits like Netpixl while also emphasizing recent approaches\ - \ to applicationdevelopment via HTML5 and Javascript, highlighting an evolution\ - \ in networkedcreative computing.},\n address = {Daejeon, Republic of Korea},\n\ - \ author = {Dimitri Diakopoulos and Ajay Kapur},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1178500},\n issn = {2220-4806},\n keywords = {networking,\ - \ ubiquitious computing, toolkits, html5},\n month = {May},\n pages = {206--209},\n\ - \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {Netpixl:\ - \ Towards a New Paradigm for Networked Application Development},\n url = {http://www.nime.org/proceedings/2013/nime2013_49.pdf},\n\ - \ year = {2013}\n}\n" + ID: Lee2006a + address: 'Paris, France' + author: 'Lee, Jason' + bibtex: "@inproceedings{Lee2006a,\n address = {Paris, France},\n author = {Lee,\ + \ Jason},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1176959},\n issn = {2220-4806},\n\ + \ pages = {77--78},\n title = {The Chopping Board: Real-time Sample Editor},\n\ + \ url = {http://www.nime.org/proceedings/2006/nime2006_077.pdf},\n year = {2006}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178500 + doi: 10.5281/zenodo.1176959 issn: 2220-4806 - keywords: 'networking, ubiquitious computing, toolkits, html5' - month: May - pages: 206--209 - publisher: 'Graduate School of Culture Technology, KAIST' - title: 'Netpixl: Towards a New Paradigm for Networked Application Development' - url: http://www.nime.org/proceedings/2013/nime2013_49.pdf - year: 2013 + pages: 77--78 + title: 'The Chopping Board: Real-time Sample Editor' + url: http://www.nime.org/proceedings/2006/nime2006_077.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: Fasciani2013 - abstract: 'Mapping gestures to digital musical instrument parameters is not trivial - when the dimensionality of the sensor-captured data is high and the model relating - the gesture to sensor data is unknown. In these cases, a front-end processing - system for extracting gestural information embedded in the sensor data is essential. - In this paper we propose an unsupervised offline method that learns how to reduce - and map the gestural data to a generic instrument parameter control space. We - make an unconventional use of the Self-Organizing Maps to obtain only a geometrical - transformation of the gestural data, while dimensionality reduction is handled - separately. We introduce a novel training procedure to overcome two main Self-Organizing - Maps limitations which otherwise corrupt the interface usability. As evaluation, - we apply this method to our existing Voice-Controlled Interface for musical instruments, - obtaining sensible usability improvements.' - address: 'Daejeon, Republic of Korea' - author: Stefano Fasciani and Lonce Wyse - bibtex: "@inproceedings{Fasciani2013,\n abstract = {Mapping gestures to digital\ - \ musical instrument parameters is not trivial when the dimensionality of the\ - \ sensor-captured data is high and the model relating the gesture to sensor data\ - \ is unknown. In these cases, a front-end processing system for extracting gestural\ - \ information embedded in the sensor data is essential. In this paper we propose\ - \ an unsupervised offline method that learns how to reduce and map the gestural\ - \ data to a generic instrument parameter control space. We make an unconventional\ - \ use of the Self-Organizing Maps to obtain only a geometrical transformation\ - \ of the gestural data, while dimensionality reduction is handled separately.\ - \ We introduce a novel training procedure to overcome two main Self-Organizing\ - \ Maps limitations which otherwise corrupt the interface usability. As evaluation,\ - \ we apply this method to our existing Voice-Controlled Interface for musical\ - \ instruments, obtaining sensible usability improvements.},\n address = {Daejeon,\ - \ Republic of Korea},\n author = {Stefano Fasciani and Lonce Wyse},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.4582292},\n issn = {2220-4806},\n keywords\ - \ = {Self-Organizing Maps, Gestural Controller, Multi Dimensional Control, Unsupervised\ - \ Gesture Mapping, Voice Control},\n month = {May},\n pages = {507--512},\n publisher\ - \ = {Graduate School of Culture Technology, KAIST},\n title = {A Self-Organizing\ - \ Gesture Map for a Voice-Controlled Instrument Interface},\n url = {http://www.nime.org/proceedings/2013/nime2013_50.pdf},\n\ - \ year = {2013}\n}\n" + ID: DeJong2006 + address: 'Paris, France' + author: 'de Jong, Staas' + bibtex: "@inproceedings{DeJong2006,\n address = {Paris, France},\n author = {de\ + \ Jong, Staas},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176935},\n\ + \ issn = {2220-4806},\n pages = {79--80},\n title = {A Tactile Closed-Loop Device\ + \ for Musical Interaction},\n url = {http://www.nime.org/proceedings/2006/nime2006_079.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.4582292 + doi: 10.5281/zenodo.1176935 issn: 2220-4806 - keywords: 'Self-Organizing Maps, Gestural Controller, Multi Dimensional Control, - Unsupervised Gesture Mapping, Voice Control' - month: May - pages: 507--512 - publisher: 'Graduate School of Culture Technology, KAIST' - title: A Self-Organizing Gesture Map for a Voice-Controlled Instrument Interface - url: http://www.nime.org/proceedings/2013/nime2013_50.pdf - year: 2013 + pages: 79--80 + title: A Tactile Closed-Loop Device for Musical Interaction + url: http://www.nime.org/proceedings/2006/nime2006_079.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: Berthaut2013 - abstract: 'Digital musical instruments bring new possibilities for musical performance.They - are also more complex for the audience to understand, due to the diversityof their - components and the magical aspect of the musicians'' actions whencompared to acoustic - instruments. This complexity results in a loss of livenessand possibly a poor - experience for the audience. Our approach, called Rouages,is based on a mixed-reality - display system and a 3D visualization application.It reveals the mechanisms of - digital musical instruments by amplifyingmusicians'' gestures with virtual extensions - of the sensors, by representingthe sound components with 3D shapes and specific - behaviors and by showing theimpact ofmusicians gestures on these components. We - believe that Rouages opens up newperspectives to help instrument makers and musicians - improve audienceexperience with their digital musical instruments.' - address: 'Daejeon, Republic of Korea' - author: Florent Berthaut and Mark T. Marshall and Sriram Subramanian and Martin - Hachet - bibtex: "@inproceedings{Berthaut2013,\n abstract = {Digital musical instruments\ - \ bring new possibilities for musical performance.They are also more complex for\ - \ the audience to understand, due to the diversityof their components and the\ - \ magical aspect of the musicians' actions whencompared to acoustic instruments.\ - \ This complexity results in a loss of livenessand possibly a poor experience\ - \ for the audience. Our approach, called Rouages,is based on a mixed-reality display\ - \ system and a 3D visualization application.It reveals the mechanisms of digital\ - \ musical instruments by amplifyingmusicians' gestures with virtual extensions\ - \ of the sensors, by representingthe sound components with 3D shapes and specific\ - \ behaviors and by showing theimpact ofmusicians gestures on these components.\ - \ We believe that Rouages opens up newperspectives to help instrument makers and\ - \ musicians improve audienceexperience with their digital musical instruments.},\n\ - \ address = {Daejeon, Republic of Korea},\n author = {Florent Berthaut and Mark\ - \ T. Marshall and Sriram Subramanian and Martin Hachet},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178478},\n issn = {2220-4806},\n keywords = {rouages,\ - \ digital musical instruments, mappings, 3D interface, mixed-reality,},\n month\ - \ = {May},\n pages = {164--169},\n publisher = {Graduate School of Culture Technology,\ - \ KAIST},\n title = {Rouages: Revealing the Mechanisms of Digital Musical Instruments\ - \ to the Audience},\n url = {http://www.nime.org/proceedings/2013/nime2013_51.pdf},\n\ - \ year = {2013}\n}\n" + ID: Bennett2006 + abstract: 'The PETECUBE project consists of a series of musical interfaces designed + to explore multi-modal feedback. This paper will briefly describe the definition + of multimodal feedback, the aim of the project, the development of the first PETECUBE + and proposed further work. ' + address: 'Paris, France' + author: 'Bennett, Peter' + bibtex: "@inproceedings{Bennett2006,\n abstract = {The PETECUBE project consists\ + \ of a series of musical interfaces designed to explore multi-modal feedback.\ + \ This paper will briefly describe the definition of multimodal feedback, the\ + \ aim of the project, the development of the first PETECUBE and proposed further\ + \ work. },\n address = {Paris, France},\n author = {Bennett, Peter},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1176869},\n issn = {2220-4806},\n keywords\ + \ = {Multi-modal Feedback. Haptics. Musical Instrument. },\n pages = {81--84},\n\ + \ title = {{PET}ECUBE: a Multimodal Feedback Interface},\n url = {http://www.nime.org/proceedings/2006/nime2006_081.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178478 + doi: 10.5281/zenodo.1176869 issn: 2220-4806 - keywords: 'rouages, digital musical instruments, mappings, 3D interface, mixed-reality,' - month: May - pages: 164--169 - publisher: 'Graduate School of Culture Technology, KAIST' - title: 'Rouages: Revealing the Mechanisms of Digital Musical Instruments to the - Audience' - url: http://www.nime.org/proceedings/2013/nime2013_51.pdf - year: 2013 + keywords: 'Multi-modal Feedback. Haptics. Musical Instrument. ' + pages: 81--84 + title: 'PETECUBE: a Multimodal Feedback Interface' + url: http://www.nime.org/proceedings/2006/nime2006_081.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: Resch2013 - abstract: 'note~ for Max consists of four objects for the Software Max/MSP which - allow sequencing in floating point resolution and provide a Graphical User Interface - and a Scripting Interface for generating events within a timeline. Due to the - complete integration into Max/MSP it is possible to control almost every type - of client like another software, audio and video or extern hardware by note~ or - control note~ itself by other software and hardware.' - address: 'Daejeon, Republic of Korea' - author: Thomas Resch - bibtex: "@inproceedings{Resch2013,\n abstract = {note~ for Max consists of four\ - \ objects for the Software Max/MSP which allow sequencing in floating point resolution\ - \ and provide a Graphical User Interface and a Scripting Interface for generating\ - \ events within a timeline. Due to the complete integration into Max/MSP it is\ - \ possible to control almost every type of client like another software, audio\ - \ and video or extern hardware by note~ or control note~ itself by other software\ - \ and hardware.},\n address = {Daejeon, Republic of Korea},\n author = {Thomas\ - \ Resch},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1178644},\n issn = {2220-4806},\n\ - \ keywords = {Max/MSP, composing, timeline, GUI, sequencing, score, notation.},\n\ - \ month = {May},\n pages = {210--212},\n publisher = {Graduate School of Culture\ - \ Technology, KAIST},\n title = {note~ for Max --- An extension for Max/MSP for\ - \ Media Arts \\& music},\n url = {http://www.nime.org/proceedings/2013/nime2013_57.pdf},\n\ - \ year = {2013}\n}\n" + ID: Lebel2006 + address: 'Paris, France' + author: 'Lebel, Denis and Malloch, Joseph' + bibtex: "@inproceedings{Lebel2006,\n address = {Paris, France},\n author = {Lebel,\ + \ Denis and Malloch, Joseph},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176955},\n\ + \ issn = {2220-4806},\n keywords = {Digital musical instrument, kinesthetic feedback\ + \ },\n pages = {85--88},\n title = {The G-Spring Controller},\n url = {http://www.nime.org/proceedings/2006/nime2006_085.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178644 + doi: 10.5281/zenodo.1176955 issn: 2220-4806 - keywords: 'Max/MSP, composing, timeline, GUI, sequencing, score, notation.' - month: May - pages: 210--212 - publisher: 'Graduate School of Culture Technology, KAIST' - title: note~ for Max --- An extension for Max/MSP for Media Arts & music - url: http://www.nime.org/proceedings/2013/nime2013_57.pdf - year: 2013 + keywords: 'Digital musical instrument, kinesthetic feedback ' + pages: 85--88 + title: The G-Spring Controller + url: http://www.nime.org/proceedings/2006/nime2006_085.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: Han2013 - abstract: 'This paper proposes a musical performance feedback system based on real-time - audio-score alignment for musical instrument education of beginner musicians. - In the proposed system, we do not make use of symbolic data such as MIDI, but - acquire a real-time audio input from on-board microphone of smartphone. Then, - the system finds onset and pitch of the note from the signal, to align this information - with the ground truth musical score. Real-time alignment allows the system to - evaluate whether the user played the correct note or not, regardless of its timing, - which enables user to play at their own speed, as playing same tempo with original - musical score is problematic for beginners. As an output of evaluation, the system - notifies the user about which part they are currently performing, and which note - were played incorrectly.' - address: 'Daejeon, Republic of Korea' - author: Yoonchang Han and Sejun Kwon and Kibeom Lee and Kyogu Lee - bibtex: "@inproceedings{Han2013,\n abstract = {This paper proposes a musical performance\ - \ feedback system based on real-time audio-score alignment for musical instrument\ - \ education of beginner musicians. In the proposed system, we do not make use\ - \ of symbolic data such as MIDI, but acquire a real-time audio input from on-board\ - \ microphone of smartphone. Then, the system finds onset and pitch of the note\ - \ from the signal, to align this information with the ground truth musical score.\ - \ Real-time alignment allows the system to evaluate whether the user played the\ - \ correct note or not, regardless of its timing, which enables user to play at\ - \ their own speed, as playing same tempo with original musical score is problematic\ - \ for beginners. As an output of evaluation, the system notifies the user about\ - \ which part they are currently performing, and which note were played incorrectly.},\n\ - \ address = {Daejeon, Republic of Korea},\n author = {Yoonchang Han and Sejun\ - \ Kwon and Kibeom Lee and Kyogu Lee},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178546},\n\ - \ issn = {2220-4806},\n keywords = {Music performance analysis, Music education,\ - \ Real-time score following},\n month = {May},\n pages = {120--121},\n publisher\ - \ = {Graduate School of Culture Technology, KAIST},\n title = {A Musical Performance\ - \ Evaluation System for Beginner Musician based on Real-time Score Following},\n\ - \ url = {http://www.nime.org/proceedings/2013/nime2013_60.pdf},\n year = {2013}\n\ + ID: Lock2006 + abstract: 'The Orbophone is a new interface that radiates rather thanprojects sound + and image. It provides a cohesive platformfor audio and visual presentation in + situations where bothmedia are transmitted from the same location andlocalization + in both media is perceptually correlated. Thispaper discusses the advantages of + radiation overconventional sound and image projection for certain kindsof interactive + public multimedia exhibits and describes theartistic motivation for its development + against a historicalbackdrop of sound systems used in public spaces. Oneexhibit + using the Orbophone is described in detail togetherwith description and critique + of the prototype, discussingaspects of its design and construction. The paper + concludeswith an outline of the Orbophone version 2.' + address: 'Paris, France' + author: 'Lock, Damien and Schiemer, Greg' + bibtex: "@inproceedings{Lock2006,\n abstract = {The Orbophone is a new interface\ + \ that radiates rather thanprojects sound and image. It provides a cohesive platformfor\ + \ audio and visual presentation in situations where bothmedia are transmitted\ + \ from the same location andlocalization in both media is perceptually correlated.\ + \ Thispaper discusses the advantages of radiation overconventional sound and image\ + \ projection for certain kindsof interactive public multimedia exhibits and describes\ + \ theartistic motivation for its development against a historicalbackdrop of sound\ + \ systems used in public spaces. Oneexhibit using the Orbophone is described in\ + \ detail togetherwith description and critique of the prototype, discussingaspects\ + \ of its design and construction. The paper concludeswith an outline of the Orbophone\ + \ version 2.},\n address = {Paris, France},\n author = {Lock, Damien and Schiemer,\ + \ Greg},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1176967},\n issn = {2220-4806},\n\ + \ keywords = {Immersive Sound; Multi-channel Sound; Loud-speaker Array; Multimedia;\ + \ Streaming Media; Real-Time Media Performance; Sound Installation. },\n pages\ + \ = {89--92},\n title = {Orbophone: a New Interface for Radiating Sound and Image},\n\ + \ url = {http://www.nime.org/proceedings/2006/nime2006_089.pdf},\n year = {2006}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178546 + doi: 10.5281/zenodo.1176967 issn: 2220-4806 - keywords: 'Music performance analysis, Music education, Real-time score following' - month: May - pages: 120--121 - publisher: 'Graduate School of Culture Technology, KAIST' - title: A Musical Performance Evaluation System for Beginner Musician based on Real-time - Score Following - url: http://www.nime.org/proceedings/2013/nime2013_60.pdf - year: 2013 + keywords: 'Immersive Sound; Multi-channel Sound; Loud-speaker Array; Multimedia; + Streaming Media; Real-Time Media Performance; Sound Installation. ' + pages: 89--92 + title: 'Orbophone: a New Interface for Radiating Sound and Image' + url: http://www.nime.org/proceedings/2006/nime2006_089.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: Hindle2013 - abstract: 'Audience participation in computer music has long been limited byresources - such as sensor technology or the material goods necessary toshare such an instrument. - A recent paradigm is to take advantageof the incredible popularity of the smart-phone, - a pocket sizedcomputer, and other mobile devices, to provide the audience aninterface - into a computer music instrument. In this paper we discuss amethod of sharing - a computer music instrument''s interface with anaudience to allow them to interact - via their smartphone. We propose amethod that is relatively cross-platform and - device-agnostic, yetstill allows for a rich user-interactive experience. By emulating - acaptive-portal or hotspot we reduce the adoptability issues and configurationproblems - facing performers and their audience. We share ourexperiences with this system, - as well as an implementation of thesystem itself.' - address: 'Daejeon, Republic of Korea' - author: Abram Hindle - bibtex: "@inproceedings{Hindle2013,\n abstract = {Audience participation in computer\ - \ music has long been limited byresources such as sensor technology or the material\ - \ goods necessary toshare such an instrument. A recent paradigm is to take advantageof\ - \ the incredible popularity of the smart-phone, a pocket sizedcomputer, and other\ - \ mobile devices, to provide the audience aninterface into a computer music instrument.\ - \ In this paper we discuss amethod of sharing a computer music instrument's interface\ - \ with anaudience to allow them to interact via their smartphone. We propose amethod\ - \ that is relatively cross-platform and device-agnostic, yetstill allows for a\ - \ rich user-interactive experience. By emulating acaptive-portal or hotspot we\ - \ reduce the adoptability issues and configurationproblems facing performers and\ - \ their audience. We share ourexperiences with this system, as well as an implementation\ - \ of thesystem itself.},\n address = {Daejeon, Republic of Korea},\n author =\ - \ {Abram Hindle},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178550},\n\ - \ issn = {2220-4806},\n keywords = {Wifi, Smartphone, Audience Interaction, Adoption,\ - \ Captive Portal, Multi-User, Hotspot},\n month = {May},\n pages = {174--179},\n\ - \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {{SW}ARMED:\ - \ Captive Portals, Mobile Devices, and Audience Participation in Multi-User Music\ - \ Performance},\n url = {http://www.nime.org/proceedings/2013/nime2013_62.pdf},\n\ - \ year = {2013}\n}\n" + ID: Kartadinata2006 + abstract: 'The gluion is a sensor interface that was designed to overcomesome of + the limitations of more traditional designs based onmicrocontrollers, which only + provide a small, fixed number ofdigital modules such as counters and serial interfaces. + These areoften required to handle sensors where the physical parametercannot easily + be converted into a voltage. Other sensors arepacked into modules that include + converters and communicatevia SPI or I2C. Finallly, many designs require outputcapabilities + beyond simple on/off.The gluion approaches these challenges thru its FPGA-baseddesign + which allows for a large number of digital I/O modules.It also provides superior + flexibility regarding theirconfiguration, resolution, and functionality. In addition, + theFPGA enables a software implementation of the host link --- inthe case of the + gluion the OSC protocol as well as theunderlying Ethernet layers.' + address: 'Paris, France' + author: 'Kartadinata, Sukandar' + bibtex: "@inproceedings{Kartadinata2006,\n abstract = {The gluion is a sensor interface\ + \ that was designed to overcomesome of the limitations of more traditional designs\ + \ based onmicrocontrollers, which only provide a small, fixed number ofdigital\ + \ modules such as counters and serial interfaces. These areoften required to handle\ + \ sensors where the physical parametercannot easily be converted into a voltage.\ + \ Other sensors arepacked into modules that include converters and communicatevia\ + \ SPI or I2C. Finallly, many designs require outputcapabilities beyond simple\ + \ on/off.The gluion approaches these challenges thru its FPGA-baseddesign which\ + \ allows for a large number of digital I/O modules.It also provides superior flexibility\ + \ regarding theirconfiguration, resolution, and functionality. In addition, theFPGA\ + \ enables a software implementation of the host link --- inthe case of the gluion\ + \ the OSC protocol as well as theunderlying Ethernet layers.},\n address = {Paris,\ + \ France},\n author = {Kartadinata, Sukandar},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1176937},\n issn = {2220-4806},\n keywords = {actuators,digital\ + \ sensors,fpga,osc,sensor interfaces},\n pages = {93--96},\n title = {The Gluion\ + \ Advantages of an {FPGA}-based Sensor Interface},\n url = {http://www.nime.org/proceedings/2006/nime2006_093.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178550 + doi: 10.5281/zenodo.1176937 issn: 2220-4806 - keywords: 'Wifi, Smartphone, Audience Interaction, Adoption, Captive Portal, Multi-User, - Hotspot' - month: May - pages: 174--179 - publisher: 'Graduate School of Culture Technology, KAIST' - title: 'SWARMED: Captive Portals, Mobile Devices, and Audience Participation in - Multi-User Music Performance' - url: http://www.nime.org/proceedings/2013/nime2013_62.pdf - year: 2013 + keywords: 'actuators,digital sensors,fpga,osc,sensor interfaces' + pages: 93--96 + title: The Gluion Advantages of an FPGA-based Sensor Interface + url: http://www.nime.org/proceedings/2006/nime2006_093.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: Park2013 - abstract: 'Since Euler''s development of the Tonnetz in 1739, musicians, composers - and instrument designers have been fascinated with the concept of musicalisomorphism, - the idea that by arranging tones by their harmonic relationships rather than by - their physical properties, the common shapes of musical constructs will appear, - facilitating learning and new ways of exploring harmonic spaces. The construction - of isomorphic instruments, beyond limited square isomorphisms present in many - stringed instruments, has been a challenge in the past for two reasons: The first - problem, that of re-arranging note actuators from their sounding elements, has - been solved by digital instrument design. The second, more conceptual problem, - is that only a single isomorphism can be designed for any one instrument, requiring - the instrument designer (as well as composer and performer) to "lock in" to a - single isomorphism, or to have a different instrument for each isomorphism in - order to experiment. Musix (an iOS application) and Rainboard (a physical device) - are two new musical instruments built to overcome this and other limitations of - existing isomorphic instruments. Musix was developed to allow experimentation - with a wide variety of different isomorphic layouts, to assess the advantages - and disadvantages of each. The Rainboard consists of a hexagonal array of arcade - buttons embedded with RGB-LEDs, which are used to indicate characteristics of - the isomorphism currently in use on the Rainboard. The creation of these two instruments/experimentation - platforms allows for isomorphic layouts to be explored in waysthat are not possible - with existing instruments.' - address: 'Daejeon, Republic of Korea' - author: Brett Park and David Gerhard - bibtex: "@inproceedings{Park2013,\n abstract = {Since Euler's development of the\ - \ Tonnetz in 1739, musicians, composers and instrument designers have been fascinated\ - \ with the concept of musicalisomorphism, the idea that by arranging tones by\ - \ their harmonic relationships rather than by their physical properties, the common\ - \ shapes of musical constructs will appear, facilitating learning and new ways\ - \ of exploring harmonic spaces. The construction of isomorphic instruments, beyond\ - \ limited square isomorphisms present in many stringed instruments, has been a\ - \ challenge in the past for two reasons: The first problem, that of re-arranging\ - \ note actuators from their sounding elements, has been solved by digital instrument\ - \ design. The second, more conceptual problem, is that only a single isomorphism\ - \ can be designed for any one instrument, requiring the instrument designer (as\ - \ well as composer and performer) to \"lock in\" to a single isomorphism, or to\ - \ have a different instrument for each isomorphism in order to experiment. Musix\ - \ (an iOS application) and Rainboard (a physical device) are two new musical instruments\ - \ built to overcome this and other limitations of existing isomorphic instruments.\ - \ Musix was developed to allow experimentation with a wide variety of different\ - \ isomorphic layouts, to assess the advantages and disadvantages of each. The\ - \ Rainboard consists of a hexagonal array of arcade buttons embedded with RGB-LEDs,\ - \ which are used to indicate characteristics of the isomorphism currently in use\ - \ on the Rainboard. The creation of these two instruments/experimentation platforms\ - \ allows for isomorphic layouts to be explored in waysthat are not possible with\ - \ existing instruments.},\n address = {Daejeon, Republic of Korea},\n author =\ - \ {Brett Park and David Gerhard},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178632},\n\ - \ issn = {2220-4806},\n keywords = {isomorphic, mobile application, hexagon, keyboard},\n\ - \ month = {May},\n pages = {319--324},\n publisher = {Graduate School of Culture\ - \ Technology, KAIST},\n title = {Rainboard and Musix: Building dynamic isomorphic\ - \ interfaces},\n url = {http://www.nime.org/proceedings/2013/nime2013_65.pdf},\n\ - \ year = {2013}\n}\n" + ID: Freed2006 + abstract: A new sensor integration system and its first incarnation i sdescribed. + As well as supporting existing analog sensorarrays a new architecture allows for + easy integration of thenew generation of low-cost digital sensors used in computermusic + performance instruments and installation art. + address: 'Paris, France' + author: 'Freed, Adrian and Avizienis, Rimas and Wright, Matthew' + bibtex: "@inproceedings{Freed2006,\n abstract = {A new sensor integration system\ + \ and its first incarnation i sdescribed. As well as supporting existing analog\ + \ sensorarrays a new architecture allows for easy integration of thenew generation\ + \ of low-cost digital sensors used in computermusic performance instruments and\ + \ installation art.},\n address = {Paris, France},\n author = {Freed, Adrian and\ + \ Avizienis, Rimas and Wright, Matthew},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176903},\n\ + \ issn = {2220-4806},\n keywords = {Gesture, sensor, MEMS, FPGA, network, OSC,\ + \ configurability },\n pages = {97--100},\n title = {Beyond 0-5{V}: Expanding\ + \ Sensor Integration Architectures},\n url = {http://www.nime.org/proceedings/2006/nime2006_097.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178632 + doi: 10.5281/zenodo.1176903 issn: 2220-4806 - keywords: 'isomorphic, mobile application, hexagon, keyboard' - month: May - pages: 319--324 - publisher: 'Graduate School of Culture Technology, KAIST' - title: 'Rainboard and Musix: Building dynamic isomorphic interfaces' - url: http://www.nime.org/proceedings/2013/nime2013_65.pdf - year: 2013 + keywords: 'Gesture, sensor, MEMS, FPGA, network, OSC, configurability ' + pages: 97--100 + title: 'Beyond 0-5V: Expanding Sensor Integration Architectures' + url: http://www.nime.org/proceedings/2006/nime2006_097.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: ElShimy2013 - abstract: 'For a number of years, musicians in different locations have been able - toperform with one another over a network as though present on the same stage.However, - rather than attempt to re-create an environment for Network MusicPerformance (NMP) - that mimics co-present performance as closely as possible, wepropose focusing - on providing musicians with additional controls that can helpincrease the level - of interaction between them. To this end, we have developeda reactive environment - for distributed performance that provides participantsdynamic, real-time control - over several aspects of their performance, enablingthem to change volume levels - and experience exaggerated stereo panning. Inaddition, our reactive environment - reinforces a feeling of a ``shared space'''' between musicians. It differs most - notably from standard ventures into thedesign of novel musical interfaces and - installations in its reliance onuser-centric methodologies borrowed from the field - of Human-ComputerInteraction (HCI). Not only does this research enable us to closely - examine thecommunicative aspects of performance, it also allows us to explore - newinterpretations of the network as a performance space. This paper describes - themotivation and background behind our project, the work that has been undertakentowards - its realization and the future steps that have yet to be explored.' - address: 'Daejeon, Republic of Korea' - author: Dalia El-Shimy and Jeremy R. Cooperstock - bibtex: "@inproceedings{ElShimy2013,\n abstract = {For a number of years, musicians\ - \ in different locations have been able toperform with one another over a network\ - \ as though present on the same stage.However, rather than attempt to re-create\ - \ an environment for Network MusicPerformance (NMP) that mimics co-present performance\ - \ as closely as possible, wepropose focusing on providing musicians with additional\ - \ controls that can helpincrease the level of interaction between them. To this\ - \ end, we have developeda reactive environment for distributed performance that\ - \ provides participantsdynamic, real-time control over several aspects of their\ - \ performance, enablingthem to change volume levels and experience exaggerated\ - \ stereo panning. Inaddition, our reactive environment reinforces a feeling of\ - \ a ``shared space'' between musicians. It differs most notably from standard\ - \ ventures into thedesign of novel musical interfaces and installations in its\ - \ reliance onuser-centric methodologies borrowed from the field of Human-ComputerInteraction\ - \ (HCI). Not only does this research enable us to closely examine thecommunicative\ - \ aspects of performance, it also allows us to explore newinterpretations of the\ - \ network as a performance space. This paper describes themotivation and background\ - \ behind our project, the work that has been undertakentowards its realization\ - \ and the future steps that have yet to be explored.},\n address = {Daejeon, Republic\ - \ of Korea},\n author = {Dalia El-Shimy and Jeremy R. Cooperstock},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1178506},\n issn = {2220-4806},\n month\ - \ = {May},\n pages = {158--163},\n publisher = {Graduate School of Culture Technology,\ - \ KAIST},\n title = {Reactive Environment for Network Music Performance},\n url\ - \ = {http://www.nime.org/proceedings/2013/nime2013_66.pdf},\n year = {2013}\n\ + ID: Johnson2006 + abstract: 'How can we provide interfaces to synthesis algorithms thatwill allow + us to manipulate timbre directly, using the sametimbre-words that are used by + human musicians to communicate about timbre? This paper describes ongoingwork + that uses machine learning methods (principally genetic algorithms and neural + networks) to learn (1) to recognise timbral characteristics of sound and (2) to + adjust timbral characteristics of existing synthesized sounds.' + address: 'Paris, France' + author: 'Johnson, Colin G. and Gounaropoulos, Alex' + bibtex: "@inproceedings{Johnson2006,\n abstract = {How can we provide interfaces\ + \ to synthesis algorithms thatwill allow us to manipulate timbre directly, using\ + \ the sametimbre-words that are used by human musicians to communicate about timbre?\ + \ This paper describes ongoingwork that uses machine learning methods (principally\ + \ genetic algorithms and neural networks) to learn (1) to recognise timbral characteristics\ + \ of sound and (2) to adjust timbral characteristics of existing synthesized sounds.},\n\ + \ address = {Paris, France},\n author = {Johnson, Colin G. and Gounaropoulos,\ + \ Alex},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1176933},\n issn = {2220-4806},\n\ + \ keywords = {timbre; natural language; neural networks },\n pages = {101--102},\n\ + \ title = {Timbre Interfaces using Adjectives and Adverbs},\n url = {http://www.nime.org/proceedings/2006/nime2006_101.pdf},\n\ + \ year = {2006}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1176933 + issn: 2220-4806 + keywords: 'timbre; natural language; neural networks ' + pages: 101--102 + title: Timbre Interfaces using Adjectives and Adverbs + url: http://www.nime.org/proceedings/2006/nime2006_101.pdf + year: 2006 + + +- ENTRYTYPE: inproceedings + ID: Stewart2006 + address: 'Paris, France' + author: 'Stewart, D. Andrew' + bibtex: "@inproceedings{Stewart2006,\n address = {Paris, France},\n author = {Stewart,\ + \ D. Andrew},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177011},\n issn\ + \ = {2220-4806},\n keywords = {composition, process, materials, gesture, controller,\ + \ cross- modal interaction },\n pages = {103--105},\n title = {SonicJumper Composer},\n\ + \ url = {http://www.nime.org/proceedings/2006/nime2006_103.pdf},\n year = {2006}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178506 + doi: 10.5281/zenodo.1177011 issn: 2220-4806 - month: May - pages: 158--163 - publisher: 'Graduate School of Culture Technology, KAIST' - title: Reactive Environment for Network Music Performance - url: http://www.nime.org/proceedings/2013/nime2013_66.pdf - year: 2013 + keywords: 'composition, process, materials, gesture, controller, cross- modal interaction ' + pages: 103--105 + title: SonicJumper Composer + url: http://www.nime.org/proceedings/2006/nime2006_103.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: Johnson2013 - abstract: 'This paper presents a new technique for interface-driven diffusion performance. - Details outlining the development of a new tabletop surface-based performance - interface, named tactile.space, are discussed. User interface and amplitude panning - processes employed in the creation of tactile.space are focused upon,and are followed - by a user study-based evaluation of the interface. It is hoped that the techniques - described in this paper afford performers and composers an enhanced level of creative - expression in the diffusion performance practice. This paper introduces and evaluates - tactile.space, a multi-touch performance interface for diffusion built on the - BrickTable. It describes how tactile.space implements Vector Base Amplitude Panning - to achieve real-time source positioning. The final section of this paper presents - the findings of a userstudy that was conducted by those who performed with the - interface, evaluating the interface as a performance tool with a focus on the - increased creative expression the interface affords, and directly comparing it - to the traditional diffusion user interface.' - address: 'Daejeon, Republic of Korea' - author: Bridget Johnson and Ajay Kapur - bibtex: "@inproceedings{Johnson2013,\n abstract = {This paper presents a new technique\ - \ for interface-driven diffusion performance. Details outlining the development\ - \ of a new tabletop surface-based performance interface, named tactile.space,\ - \ are discussed. User interface and amplitude panning processes employed in the\ - \ creation of tactile.space are focused upon,and are followed by a user study-based\ - \ evaluation of the interface. It is hoped that the techniques described in this\ - \ paper afford performers and composers an enhanced level of creative expression\ - \ in the diffusion performance practice. This paper introduces and evaluates tactile.space,\ - \ a multi-touch performance interface for diffusion built on the BrickTable. It\ - \ describes how tactile.space implements Vector Base Amplitude Panning to achieve\ - \ real-time source positioning. The final section of this paper presents the findings\ - \ of a userstudy that was conducted by those who performed with the interface,\ - \ evaluating the interface as a performance tool with a focus on the increased\ - \ creative expression the interface affords, and directly comparing it to the\ - \ traditional diffusion user interface.},\n address = {Daejeon, Republic of Korea},\n\ - \ author = {Bridget Johnson and Ajay Kapur},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1178570},\n issn = {2220-4806},\n keywords = {Multi touch, diffusion,\ - \ VBAP, tabletop surface},\n month = {May},\n pages = {213--216},\n publisher\ - \ = {Graduate School of Culture Technology, KAIST},\n title = {MULTI-TOUCH INTERFACES\ - \ FOR PHANTOM SOURCE POSITIONING IN LIVE SOUND DIFFUSION},\n url = {http://www.nime.org/proceedings/2013/nime2013_75.pdf},\n\ - \ year = {2013}\n}\n" + ID: Steiner2006 + address: 'Paris, France' + author: 'Steiner, Hans-Christoph' + bibtex: "@inproceedings{Steiner2006,\n address = {Paris, France},\n author = {Steiner,\ + \ Hans-Christoph},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177009},\n\ + \ issn = {2220-4806},\n pages = {106--109},\n title = {Towards a Catalog and Software\ + \ Library of Mapping Methods},\n url = {http://www.nime.org/proceedings/2006/nime2006_106.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178570 + doi: 10.5281/zenodo.1177009 issn: 2220-4806 - keywords: 'Multi touch, diffusion, VBAP, tabletop surface' - month: May - pages: 213--216 - publisher: 'Graduate School of Culture Technology, KAIST' - title: MULTI-TOUCH INTERFACES FOR PHANTOM SOURCE POSITIONING IN LIVE SOUND DIFFUSION - url: http://www.nime.org/proceedings/2013/nime2013_75.pdf - year: 2013 + pages: 106--109 + title: Towards a Catalog and Software Library of Mapping Methods + url: http://www.nime.org/proceedings/2006/nime2006_106.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: Lui2013 - abstract: 'Music is expressive and hard to be described by words. Learning music - istherefore not a straightforward task especially for vocal music such as humanbeatboxing. - People usually learn beatboxing in the traditional way of imitatingaudio sample - without steps and instructions. Spectrogram contains a lot ofinformation about - audio, but it is too complicated to be understood inreal-time. Reinforcement learning - is a psychological method, which makes use ofreward and/or punishment as stimulus - to train the decision-making process ofhuman. We propose a novel music learning - approach based on the reinforcementlearning method, which makes use of compact - and easy-to-read spectruminformation as visual clue to assist human beatboxing - learning on smartphone.Experimental result shows that the visual information is - easy to understand inreal-time, which improves the effectiveness of beatboxing - self-learning.' - address: 'Daejeon, Republic of Korea' - author: Simon Lui - bibtex: "@inproceedings{Lui2013,\n abstract = {Music is expressive and hard to be\ - \ described by words. Learning music istherefore not a straightforward task especially\ - \ for vocal music such as humanbeatboxing. People usually learn beatboxing in\ - \ the traditional way of imitatingaudio sample without steps and instructions.\ - \ Spectrogram contains a lot ofinformation about audio, but it is too complicated\ - \ to be understood inreal-time. Reinforcement learning is a psychological method,\ - \ which makes use ofreward and/or punishment as stimulus to train the decision-making\ - \ process ofhuman. We propose a novel music learning approach based on the reinforcementlearning\ - \ method, which makes use of compact and easy-to-read spectruminformation as visual\ - \ clue to assist human beatboxing learning on smartphone.Experimental result shows\ - \ that the visual information is easy to understand inreal-time, which improves\ - \ the effectiveness of beatboxing self-learning.},\n address = {Daejeon, Republic\ - \ of Korea},\n author = {Simon Lui},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178600},\n\ - \ issn = {2220-4806},\n keywords = {Audio analysis, music learning tool, reinforcement\ - \ learning, smartphone app, audio information retrieval.},\n month = {May},\n\ - \ pages = {25--28},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ - \ title = {A Compact Spectrum-Assisted Human Beatboxing Reinforcement Learning\ - \ Tool On Smartphone},\n url = {http://www.nime.org/proceedings/2013/nime2013_79.pdf},\n\ - \ year = {2013}\n}\n" + ID: Kobori2006 + address: 'Paris, France' + author: 'Kobori, Daisuke and Kagawa, Kojiro and Iida, Makoto and Arakawa, Chuichi' + bibtex: "@inproceedings{Kobori2006,\n address = {Paris, France},\n author = {Kobori,\ + \ Daisuke and Kagawa, Kojiro and Iida, Makoto and Arakawa, Chuichi},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1176947},\n issn = {2220-4806},\n pages\ + \ = {110--113},\n title = {LINE: Interactive Sound and Light Installation},\n\ + \ url = {http://www.nime.org/proceedings/2006/nime2006_110.pdf},\n year = {2006}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178600 + doi: 10.5281/zenodo.1176947 issn: 2220-4806 - keywords: 'Audio analysis, music learning tool, reinforcement learning, smartphone - app, audio information retrieval.' - month: May - pages: 25--28 - publisher: 'Graduate School of Culture Technology, KAIST' - title: A Compact Spectrum-Assisted Human Beatboxing Reinforcement Learning Tool - On Smartphone - url: http://www.nime.org/proceedings/2013/nime2013_79.pdf - year: 2013 + pages: 110--113 + title: 'LINE: Interactive Sound and Light Installation' + url: http://www.nime.org/proceedings/2006/nime2006_110.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: Lo2013 - abstract: 'Mobile DJ is a music-listening system that allows multiple users to interactand - collaboratively contribute to a single song over a social network. Activelistening - through a tangible interface facilitates users to manipulate musicaleffects, such - as incorporating chords or ``scratching'''' the record. Acommunication and interaction - server further enables multiple users to connectover the Internet and collaborate - and interact through their music. User testsindicate that the device is successful - at facilitating user immersion into theactive listening experience, and that users - enjoy the added sensory input aswell as the novel way of interacting with the - music and each other.' - address: 'Daejeon, Republic of Korea' - author: Kenneth W.K. Lo and Chi Kin Lau and Michael Xuelin Huang and Wai Wa Tang - and Grace Ngai and Stephen C.F. Chan - bibtex: "@inproceedings{Lo2013,\n abstract = {Mobile DJ is a music-listening system\ - \ that allows multiple users to interactand collaboratively contribute to a single\ - \ song over a social network. Activelistening through a tangible interface facilitates\ - \ users to manipulate musicaleffects, such as incorporating chords or ``scratching''\ - \ the record. Acommunication and interaction server further enables multiple users\ - \ to connectover the Internet and collaborate and interact through their music.\ - \ User testsindicate that the device is successful at facilitating user immersion\ - \ into theactive listening experience, and that users enjoy the added sensory\ - \ input aswell as the novel way of interacting with the music and each other.},\n\ - \ address = {Daejeon, Republic of Korea},\n author = {Kenneth W.K. Lo and Chi\ - \ Kin Lau and Michael Xuelin Huang and Wai Wa Tang and Grace Ngai and Stephen\ - \ C.F. Chan},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178598},\n issn\ - \ = {2220-4806},\n keywords = {Mobile, music, interaction design, tangible user\ - \ interface},\n month = {May},\n pages = {217--222},\n publisher = {Graduate School\ - \ of Culture Technology, KAIST},\n title = {Mobile DJ: a Tangible, Mobile Platform\ - \ for Active and Collaborative Music Listening},\n url = {http://www.nime.org/proceedings/2013/nime2013_81.pdf},\n\ - \ year = {2013}\n}\n" + ID: BryanKinns2006 + abstract: 'This paper reports on ongoing studies of the design and use ofsupport + for remote group music making. In this paper weoutline the initial findings of + a recent study focusing on thefunction of decay of contributions in collaborative + musicmaking. Findings indicate that persistent contributions lendthemselves to + individual musical composition and learningnovel interfaces, whilst contributions + that quickly decayengender a more focused musical interaction in experiencedparticipants.' + address: 'Paris, France' + author: 'Bryan-Kinns, Nick and Healey, Patrick G.' + bibtex: "@inproceedings{BryanKinns2006,\n abstract = {This paper reports on ongoing\ + \ studies of the design and use ofsupport for remote group music making. In this\ + \ paper weoutline the initial findings of a recent study focusing on thefunction\ + \ of decay of contributions in collaborative musicmaking. Findings indicate that\ + \ persistent contributions lendthemselves to individual musical composition and\ + \ learningnovel interfaces, whilst contributions that quickly decayengender a\ + \ more focused musical interaction in experiencedparticipants.},\n address = {Paris,\ + \ France},\n author = {Bryan-Kinns, Nick and Healey, Patrick G.},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1176885},\n issn = {2220-4806},\n keywords\ + \ = {creativity,design,group interaction,music improvisation},\n pages = {114--117},\n\ + \ title = {Decay in Collaborative Music Making},\n url = {http://www.nime.org/proceedings/2006/nime2006_114.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178598 + doi: 10.5281/zenodo.1176885 issn: 2220-4806 - keywords: 'Mobile, music, interaction design, tangible user interface' - month: May - pages: 217--222 - publisher: 'Graduate School of Culture Technology, KAIST' - title: 'Mobile DJ: a Tangible, Mobile Platform for Active and Collaborative Music - Listening' - url: http://www.nime.org/proceedings/2013/nime2013_81.pdf - year: 2013 + keywords: 'creativity,design,group interaction,music improvisation' + pages: 114--117 + title: Decay in Collaborative Music Making + url: http://www.nime.org/proceedings/2006/nime2006_114.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: Caramiaux2013 - abstract: 'We present an overview of machine learning (ML) techniques and theirapplication - in interactive music and new digital instruments design. We firstgive to the non-specialist - reader an introduction to two ML tasks,classification and regression, that are - particularly relevant for gesturalinteraction. We then present a review of the - literature in current NIMEresearch that uses ML in musical gesture analysis and - gestural sound control.We describe the ways in which machine learning is useful - for creatingexpressive musical interaction, and in turn why live music performance - presentsa pertinent and challenging use case for machine learning.' - address: 'Daejeon, Republic of Korea' - author: Baptiste Caramiaux and Atau Tanaka - bibtex: "@inproceedings{Caramiaux2013,\n abstract = {We present an overview of machine\ - \ learning (ML) techniques and theirapplication in interactive music and new digital\ - \ instruments design. We firstgive to the non-specialist reader an introduction\ - \ to two ML tasks,classification and regression, that are particularly relevant\ - \ for gesturalinteraction. We then present a review of the literature in current\ - \ NIMEresearch that uses ML in musical gesture analysis and gestural sound control.We\ - \ describe the ways in which machine learning is useful for creatingexpressive\ - \ musical interaction, and in turn why live music performance presentsa pertinent\ - \ and challenging use case for machine learning.},\n address = {Daejeon, Republic\ - \ of Korea},\n author = {Baptiste Caramiaux and Atau Tanaka},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178490},\n issn = {2220-4806},\n keywords = {Machine\ - \ Learning, Data mining, Musical Expression, Musical Gestures, Analysis, Control,\ - \ Gesture, Sound},\n month = {May},\n pages = {513--518},\n publisher = {Graduate\ - \ School of Culture Technology, KAIST},\n title = {Machine Learning of Musical\ - \ Gestures},\n url = {http://www.nime.org/proceedings/2013/nime2013_84.pdf},\n\ - \ year = {2013}\n}\n" + ID: Gurevich2006 + address: 'Paris, France' + author: 'Gurevich, Michael' + bibtex: "@inproceedings{Gurevich2006,\n address = {Paris, France},\n author = {Gurevich,\ + \ Michael},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176915},\n issn\ + \ = {2220-4806},\n keywords = {Collaborative interface, remote jamming, network\ + \ music, interaction design, novice, media space INTRODUCTION Most would agree\ + \ that music is an inherently social ac- tivity [30], but since the },\n pages\ + \ = {118--123},\n title = {JamSpace: Designing A Collaborative Networked Music\ + \ Space for Novices},\n url = {http://www.nime.org/proceedings/2006/nime2006_118.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178490 + doi: 10.5281/zenodo.1176915 issn: 2220-4806 - keywords: 'Machine Learning, Data mining, Musical Expression, Musical Gestures, - Analysis, Control, Gesture, Sound' - month: May - pages: 513--518 - publisher: 'Graduate School of Culture Technology, KAIST' - title: Machine Learning of Musical Gestures - url: http://www.nime.org/proceedings/2013/nime2013_84.pdf - year: 2013 + keywords: 'Collaborative interface, remote jamming, network music, interaction design, + novice, media space INTRODUCTION Most would agree that music is an inherently + social ac- tivity [30], but since the ' + pages: 118--123 + title: 'JamSpace: Designing A Collaborative Networked Music Space for Novices' + url: http://www.nime.org/proceedings/2006/nime2006_118.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: Oh2013 - abstract: 'Significant progress in the domains of speech- and singing-synthesis - has enhanced communicative potential of machines. To make computers more vocallyexpressive, - however, we need a deeper understanding of how nonlinguistic social signals are - patterned and perceived. In this paper, we focus on laughter expressions: how - a phrase of vocalized notes that we call ''''laughter'''' may bemodeled and performed - to implicate nuanced meaning imbued in the acousticsignal. In designing our model, - we emphasize (1) using high-level descriptors as control parameters, (2) enabling - real-time performable laughter, and (3) prioritizing expressiveness over realism. - We present an interactive systemimplemented in ChucK that allows users to systematically - play with the musicalingredients of laughter. A crowd sourced study on the perception - of synthesized laughter showed that our model is capable of generating a range - of laughter types, suggesting an exciting potential for expressive laughter synthesis.' - address: 'Daejeon, Republic of Korea' - author: Jieun Oh and Ge Wang - bibtex: "@inproceedings{Oh2013,\n abstract = {Significant progress in the domains\ - \ of speech- and singing-synthesis has enhanced communicative potential of machines.\ - \ To make computers more vocallyexpressive, however, we need a deeper understanding\ - \ of how nonlinguistic social signals are patterned and perceived. In this paper,\ - \ we focus on laughter expressions: how a phrase of vocalized notes that we call\ - \ ''laughter'' may bemodeled and performed to implicate nuanced meaning imbued\ - \ in the acousticsignal. In designing our model, we emphasize (1) using high-level\ - \ descriptors as control parameters, (2) enabling real-time performable laughter,\ - \ and (3) prioritizing expressiveness over realism. We present an interactive\ - \ systemimplemented in ChucK that allows users to systematically play with the\ - \ musicalingredients of laughter. A crowd sourced study on the perception of synthesized\ - \ laughter showed that our model is capable of generating a range of laughter\ - \ types, suggesting an exciting potential for expressive laughter synthesis.},\n\ - \ address = {Daejeon, Republic of Korea},\n author = {Jieun Oh and Ge Wang},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178626},\n issn = {2220-4806},\n\ - \ keywords = {laughter, vocalization, synthesis model, real-time controller, interface\ - \ for musical expression},\n month = {May},\n pages = {190--195},\n publisher\ - \ = {Graduate School of Culture Technology, KAIST},\n title = {LOLOL: Laugh Out\ - \ Loud On Laptop},\n url = {http://www.nime.org/proceedings/2013/nime2013_86.pdf},\n\ - \ year = {2013}\n}\n" + ID: Knapp2006 + abstract: "In this paper, we describe the networking of multiple Integral Music\ + \ Controllers (IMCs) to enable an entirely new method for creating music by tapping\ + \ into the composite gestures and emotions of not just one, but many performers.\ + \ The concept and operation of an IMC is reviewed as well as its use in a network\ + \ of IMC controllers. We then introduce a new technique of Integral Music Control\ + \ by assessing the composite gesture(s) and emotion(s) of a group of performers\ + \ through the use of a wireless mesh network. The Telemuse, an IMC designed precisely\ + \ for this kind of performance, is described and its use in a new musical performance\ + \ project under development by the ,\n,\nauthors is discussed. " + address: 'Paris, France' + author: 'Knapp, Benjamin and Cook, Perry R.' + bibtex: "@inproceedings{Knapp2006,\n abstract = {In this paper, we describe the\ + \ networking of multiple Integral Music Controllers (IMCs) to enable an entirely\ + \ new method for creating music by tapping into the composite gestures and emotions\ + \ of not just one, but many performers. The concept and operation of an IMC is\ + \ reviewed as well as its use in a network of IMC controllers. We then introduce\ + \ a new technique of Integral Music Control by assessing the composite gesture(s)\ + \ and emotion(s) of a group of performers through the use of a wireless mesh network.\ + \ The Telemuse, an IMC designed precisely for this kind of performance, is described\ + \ and its use in a new musical performance project under development by the ,\n\ + ,\nauthors is discussed. },\n address = {Paris, France},\n author = {Knapp, Benjamin\ + \ and Cook, Perry R.},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176943},\n\ + \ issn = {2220-4806},\n keywords = {Community-Institutional Relations,Health Services\ + \ Accessibility,Medically Uninsured,Organizational Case Studies,Primary Health\ + \ Care,Public-Private Sector Partnerships,San Francisco},\n pages = {124--128},\n\ + \ title = {Creating a Network of Integral Music Controllers},\n url = {http://www.nime.org/proceedings/2006/nime2006_124.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178626 + doi: 10.5281/zenodo.1176943 issn: 2220-4806 - keywords: 'laughter, vocalization, synthesis model, real-time controller, interface - for musical expression' - month: May - pages: 190--195 - publisher: 'Graduate School of Culture Technology, KAIST' - title: 'LOLOL: Laugh Out Loud On Laptop' - url: http://www.nime.org/proceedings/2013/nime2013_86.pdf - year: 2013 + keywords: 'Community-Institutional Relations,Health Services Accessibility,Medically + Uninsured,Organizational Case Studies,Primary Health Care,Public-Private Sector + Partnerships,San Francisco' + pages: 124--128 + title: Creating a Network of Integral Music Controllers + url: http://www.nime.org/proceedings/2006/nime2006_124.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: Donnarumma2013 - abstract: 'We present the first combined use of the electromyogram (EMG) andmechanomyogram - (MMG), two biosignals that result from muscular activity, forinteractive music - applications. We exploit differences between these twosignals, as reported in - the biomedical literature, to create bi-modalsonification and sound synthesis - mappings that allow performers to distinguishthe two components in a single complex - arm gesture. We study non-expertplayers'' ability to articulate the different - modalities. Results show thatpurposely designed gestures and mapping techniques - enable novices to rapidlylearn to independently control the two biosignals.' - address: 'Daejeon, Republic of Korea' - author: Marco Donnarumma and Baptiste Caramiaux and Atau Tanaka - bibtex: "@inproceedings{Donnarumma2013,\n abstract = {We present the first combined\ - \ use of the electromyogram (EMG) andmechanomyogram (MMG), two biosignals that\ - \ result from muscular activity, forinteractive music applications. We exploit\ - \ differences between these twosignals, as reported in the biomedical literature,\ - \ to create bi-modalsonification and sound synthesis mappings that allow performers\ - \ to distinguishthe two components in a single complex arm gesture. We study non-expertplayers'\ - \ ability to articulate the different modalities. Results show thatpurposely designed\ - \ gestures and mapping techniques enable novices to rapidlylearn to independently\ - \ control the two biosignals.},\n address = {Daejeon, Republic of Korea},\n author\ - \ = {Marco Donnarumma and Baptiste Caramiaux and Atau Tanaka},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178504},\n issn = {2220-4806},\n keywords = {NIME, sensorimotor\ - \ system, EMG, MMG, biosignal, multimodal, mapping},\n month = {May},\n pages\ - \ = {128--131},\n publisher = {Graduate School of Culture Technology, KAIST},\n\ - \ title = {Muscular Interactions. Combining {EMG} and MMG sensing for musical\ - \ practice},\n url = {http://www.nime.org/proceedings/2013/nime2013_90.pdf},\n\ - \ year = {2013}\n}\n" + ID: Burtner2006 + abstract: "This paper explores the use of perturbation in designing multiperformer\ + \ or multi-agent interactive musical interfaces. A problem with the multi-performer\ + \ approach is how to cohesively organize the independent data inputs into useable\ + \ control information for synthesis engines. Perturbation has proven useful for\ + \ navigating multi-agent NIMEs. The ,\n,\nauthor's Windtree is discussed as an\ + \ example multi-performer instrument in which perturbation is used for multichannel\ + \ ecological modeling. The Windtree uses a physical system turbulence model controlled\ + \ in real time by four performers. " + address: 'Paris, France' + author: 'Burtner, Matthew' + bibtex: "@inproceedings{Burtner2006,\n abstract = {This paper explores the use of\ + \ perturbation in designing multiperformer or multi-agent interactive musical\ + \ interfaces. A problem with the multi-performer approach is how to cohesively\ + \ organize the independent data inputs into useable control information for synthesis\ + \ engines. Perturbation has proven useful for navigating multi-agent NIMEs. The\ + \ ,\n,\nauthor's Windtree is discussed as an example multi-performer instrument\ + \ in which perturbation is used for multichannel ecological modeling. The Windtree\ + \ uses a physical system turbulence model controlled in real time by four performers.\ + \ },\n address = {Paris, France},\n author = {Burtner, Matthew},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176887},\n issn = {2220-4806},\n keywords = {interface,mapping,movement,multi-agent,multi-performer,music\ + \ composition,perturbation},\n pages = {129--133},\n title = {Perturbation Techniques\ + \ for Multi-Performer or Multi- Agent Interactive Musical Interfaces},\n url =\ + \ {http://www.nime.org/proceedings/2006/nime2006_129.pdf},\n year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178504 + doi: 10.5281/zenodo.1176887 issn: 2220-4806 - keywords: 'NIME, sensorimotor system, EMG, MMG, biosignal, multimodal, mapping' - month: May - pages: 128--131 - publisher: 'Graduate School of Culture Technology, KAIST' - title: Muscular Interactions. Combining EMG and MMG sensing for musical practice - url: http://www.nime.org/proceedings/2013/nime2013_90.pdf - year: 2013 + keywords: 'interface,mapping,movement,multi-agent,multi-performer,music composition,perturbation' + pages: 129--133 + title: Perturbation Techniques for Multi-Performer or Multi- Agent Interactive Musical + Interfaces + url: http://www.nime.org/proceedings/2006/nime2006_129.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: Honigman2013 - abstract: 'This paper describes a new framework for music creation using 3D audio - andvisual techniques. It describes the Third Room, which uses a Kinect to placeusers - in a virtual environment to interact with new instruments for musicalexpression. - Users can also interact with smart objects, including the Ember(modified mbira - digital interface) and the Fluid (a wireless six degrees offreedom and touch controller). - This project also includes new techniques for 3Daudio connected to a 3D virtual - space using multi-channel speakers anddistributed robotic instruments.' - address: 'Daejeon, Republic of Korea' - author: Colin Honigman and Andrew Walton and Ajay Kapur - bibtex: "@inproceedings{Honigman2013,\n abstract = {This paper describes a new framework\ - \ for music creation using 3D audio andvisual techniques. It describes the Third\ - \ Room, which uses a Kinect to placeusers in a virtual environment to interact\ - \ with new instruments for musicalexpression. Users can also interact with smart\ - \ objects, including the Ember(modified mbira digital interface) and the Fluid\ - \ (a wireless six degrees offreedom and touch controller). This project also includes\ - \ new techniques for 3Daudio connected to a 3D virtual space using multi-channel\ - \ speakers anddistributed robotic instruments.},\n address = {Daejeon, Republic\ - \ of Korea},\n author = {Colin Honigman and Andrew Walton and Ajay Kapur},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1178556},\n issn = {2220-4806},\n keywords\ - \ = {Kinect Camera, Third Space, Interface, Virtual Reality, Natural Interaction,\ - \ Robotics, Arduino},\n month = {May},\n pages = {29--34},\n publisher = {Graduate\ - \ School of Culture Technology, KAIST},\n title = {The Third Room: A {3D} Virtual\ - \ Music Framework},\n url = {http://www.nime.org/proceedings/2013/nime2013_92.pdf},\n\ - \ year = {2013}\n}\n" + ID: Aylward2006 + abstract: 'We describe the design of a system of compact, wireless sensor modules + meant to capture expressive motion whenworn at the wrists and ankles of a dancer. + The sensors form ahigh-speed RF network geared toward real-time dataacquisition + from multiple devices simultaneously, enabling asmall dance ensemble to become + a collective interface formusic control. Each sensor node includes a 6-axis inertialmeasurement + unit (IMU) comprised of three orthogonalgyroscopes and accelerometers in order + to capture localdynamics, as well as a capacitive sensor to measure closerange + node-to-node proximity. The nodes may also beaugmented with other digital or analog + sensors. This paperdescribes application goals, presents the prototype hardwaredesign, + introduces concepts for feature extraction andinterpretation, and discusses early + test results.' + address: 'Paris, France' + author: 'Aylward, Ryan and Paradiso, Joseph A.' + bibtex: "@inproceedings{Aylward2006,\n abstract = {We describe the design of a system\ + \ of compact, wireless sensor modules meant to capture expressive motion whenworn\ + \ at the wrists and ankles of a dancer. The sensors form ahigh-speed RF network\ + \ geared toward real-time dataacquisition from multiple devices simultaneously,\ + \ enabling asmall dance ensemble to become a collective interface formusic control.\ + \ Each sensor node includes a 6-axis inertialmeasurement unit (IMU) comprised\ + \ of three orthogonalgyroscopes and accelerometers in order to capture localdynamics,\ + \ as well as a capacitive sensor to measure closerange node-to-node proximity.\ + \ The nodes may also beaugmented with other digital or analog sensors. This paperdescribes\ + \ application goals, presents the prototype hardwaredesign, introduces concepts\ + \ for feature extraction andinterpretation, and discusses early test results.},\n\ + \ address = {Paris, France},\n author = {Aylward, Ryan and Paradiso, Joseph A.},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1176865},\n issn = {2220-4806},\n\ + \ keywords = {Interactive dance, wearable sensor networks, inertial gesture tracking,\ + \ collective motion analysis, multi-user interface },\n pages = {134--139},\n\ + \ title = {Sensemble: A Wireless, Compact, Multi-User Sensor System for Interactive\ + \ Dance},\n url = {http://www.nime.org/proceedings/2006/nime2006_134.pdf},\n year\ + \ = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178556 + doi: 10.5281/zenodo.1176865 issn: 2220-4806 - keywords: 'Kinect Camera, Third Space, Interface, Virtual Reality, Natural Interaction, - Robotics, Arduino' - month: May - pages: 29--34 - publisher: 'Graduate School of Culture Technology, KAIST' - title: 'The Third Room: A 3D Virtual Music Framework' - url: http://www.nime.org/proceedings/2013/nime2013_92.pdf - year: 2013 + keywords: 'Interactive dance, wearable sensor networks, inertial gesture tracking, + collective motion analysis, multi-user interface ' + pages: 134--139 + title: 'Sensemble: A Wireless, Compact, Multi-User Sensor System for Interactive + Dance' + url: http://www.nime.org/proceedings/2006/nime2006_134.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: Wolf2013 - abstract: 'As any computer user employs the Internet to accomplish everyday activities, - a flow of data packets moves across the network, forming their own patterns in - response to his or her actions. Artists and sound designers who are interested - in accessing that data to make music must currently possess low-level knowledge - of Internet protocols and spend signifi-cant effort working with low-level networking - code. We have created SonNet, a new software tool that lowers these practical - barriers to experimenting and composing with network data. SonNet executes packet-sniffng - and network connection state analysis automatically, and it includes an easy-touse - ChucK object that can be instantiated, customized, and queried from a user''s - own code. In this paper, we present the design and implementation of the SonNet - system, and we discuss a pilot evaluation of the system with computer music composers. - We also discuss compositional applications of SonNet and illustrate the use of - the system in an example composition.' - address: 'Daejeon, Republic of Korea' - author: KatieAnna E Wolf and Rebecca Fiebrink - bibtex: "@inproceedings{Wolf2013,\n abstract = {As any computer user employs the\ - \ Internet to accomplish everyday activities, a flow of data packets moves across\ - \ the network, forming their own patterns in response to his or her actions. Artists\ - \ and sound designers who are interested in accessing that data to make music\ - \ must currently possess low-level knowledge of Internet protocols and spend signifi-cant\ - \ effort working with low-level networking code. We have created SonNet, a new\ - \ software tool that lowers these practical barriers to experimenting and composing\ - \ with network data. SonNet executes packet-sniffng and network connection state\ - \ analysis automatically, and it includes an easy-touse ChucK object that can\ - \ be instantiated, customized, and queried from a user's own code. In this paper,\ - \ we present the design and implementation of the SonNet system, and we discuss\ - \ a pilot evaluation of the system with computer music composers. We also discuss\ - \ compositional applications of SonNet and illustrate the use of the system in\ - \ an example composition.},\n address = {Daejeon, Republic of Korea},\n author\ - \ = {KatieAnna E Wolf and Rebecca Fiebrink},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1178690},\n issn = {2220-4806},\n keywords = {Sonification,\ - \ network data, compositional tools},\n month = {May},\n pages = {503--506},\n\ - \ publisher = {Graduate School of Culture Technology, KAIST},\n title = {SonNet:\ - \ A Code Interface for Sonifying Computer Network Data},\n url = {http://www.nime.org/proceedings/2013/nime2013_94.pdf},\n\ - \ year = {2013}\n}\n" + ID: Ramakrishnan2006 + address: 'Paris, France' + author: 'Ramakrishnan, Chandrasekhar and Go\ss man, Joachim and Br\''''{u}mmer, + Ludger' + bibtex: "@inproceedings{Ramakrishnan2006,\n address = {Paris, France},\n author\ + \ = {Ramakrishnan, Chandrasekhar and Go\\ss man, Joachim and Br\\''{u}mmer, Ludger},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1176991},\n issn = {2220-4806},\n\ + \ keywords = {Sound Spatialization, Ambisonics, Vector Based Additive Panning\ + \ (VBAP), Wave Field Synthesis, Acousmatic Music },\n pages = {140--143},\n title\ + \ = {The ZKM Klangdom},\n url = {http://www.nime.org/proceedings/2006/nime2006_140.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178690 + doi: 10.5281/zenodo.1176991 issn: 2220-4806 - keywords: 'Sonification, network data, compositional tools' - month: May - pages: 503--506 - publisher: 'Graduate School of Culture Technology, KAIST' - title: 'SonNet: A Code Interface for Sonifying Computer Network Data' - url: http://www.nime.org/proceedings/2013/nime2013_94.pdf - year: 2013 + keywords: 'Sound Spatialization, Ambisonics, Vector Based Additive Panning (VBAP), + Wave Field Synthesis, Acousmatic Music ' + pages: 140--143 + title: The ZKM Klangdom + url: http://www.nime.org/proceedings/2006/nime2006_140.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: Tahiroglu2013 - abstract: 'This paper introduces a novel collaborative environment (PESI) in whichperformers - are not only free to move and interact with each other but wheretheir social interactions - contribute to the sonic outcome. PESI system isdesigned for co-located collaboration - and provides embodied and spatialopportunities for musical exploration. To evaluate - PESI with skilled musicians,a user-test jam session was conducted. Musicians'' - comments indicate that thesystem facilitates group interaction finely to bring - up further intentions tomusical ideas. Results from our user-test jam session - indicate that, through some modificationof the ''in-space'' response to the improvisation, - and through more intuitiveinteractions with the ''on-body'' mobile instruments, - we could make thecollaborative music activity a more engaging and active experience. - Despitebeing only user-tested once with musicians, the group interview has raisedfruitful - discussions on the precise details of the system components.Furthermore, the paradigms - of musical interaction and social actions in groupactivities need to be questioned - when we seek design requirements for such acollaborative environment. We introduced - a system that we believe can open upnew ways of musical exploration in group music - activity with a number ofmusicians. The system brings up the affordances of accessible - technologieswhile creating opportunities for novel design applications to be explored. - Ourresearch proposes further development of the system, focusing on movementbehavior - in long-term interaction between performers. We plan to implement thisversion - and evaluate design and implementation with distinct skilled musicians.' - address: 'Daejeon, Republic of Korea' - author: Koray Tahiroğlu and Nuno N. Correia and Miguel Espada - bibtex: "@inproceedings{Tahiroglu2013,\n abstract = {This paper introduces a novel\ - \ collaborative environment (PESI) in whichperformers are not only free to move\ - \ and interact with each other but wheretheir social interactions contribute to\ - \ the sonic outcome. PESI system isdesigned for co-located collaboration and provides\ - \ embodied and spatialopportunities for musical exploration. To evaluate PESI\ - \ with skilled musicians,a user-test jam session was conducted. Musicians' comments\ - \ indicate that thesystem facilitates group interaction finely to bring up further\ - \ intentions tomusical ideas. Results from our user-test jam session indicate\ - \ that, through some modificationof the 'in-space' response to the improvisation,\ - \ and through more intuitiveinteractions with the 'on-body' mobile instruments,\ - \ we could make thecollaborative music activity a more engaging and active experience.\ - \ Despitebeing only user-tested once with musicians, the group interview has raisedfruitful\ - \ discussions on the precise details of the system components.Furthermore, the\ - \ paradigms of musical interaction and social actions in groupactivities need\ - \ to be questioned when we seek design requirements for such acollaborative environment.\ - \ We introduced a system that we believe can open upnew ways of musical exploration\ - \ in group music activity with a number ofmusicians. The system brings up the\ - \ affordances of accessible technologieswhile creating opportunities for novel\ - \ design applications to be explored. Ourresearch proposes further development\ - \ of the system, focusing on movementbehavior in long-term interaction between\ - \ performers. We plan to implement thisversion and evaluate design and implementation\ - \ with distinct skilled musicians.},\n address = {Daejeon, Republic of Korea},\n\ - \ author = {Koray Tahiro{\\u{g}}lu and Nuno N. Correia and Miguel Espada},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1178666},\n issn = {2220-4806},\n keywords\ - \ = {Affordances, collaboration, social interaction, mobile music, extended system,\ - \ NIME},\n month = {May},\n pages = {35--40},\n publisher = {Graduate School of\ - \ Culture Technology, KAIST},\n title = {PESI Extended System: In Space, On Body,\ - \ with 3 Musicians},\n url = {http://www.nime.org/proceedings/2013/nime2013_97.pdf},\n\ - \ year = {2013}\n}\n" + ID: Wozniewski2006 + abstract: 'Traditional uses of virtual audio environments tend to focus onperceptually + accurate acoustic representations. Though spatialization of sound sources is important, + it is necessary to leveragecontrol of the sonic representation when considering + musical applications. The proposed framework allows for the creation ofperceptually + immersive scenes that function as musical instruments. Loudspeakers and microphones + are modeled within thescene along with the listener/performer, creating a navigable + 3Dsonic space where sound sources and sinks process audio according to user-defined + spatial mappings.' + address: 'Paris, France' + author: 'Wozniewski, Mike and Settel, Zack and Cooperstock, Jeremy R.' + bibtex: "@inproceedings{Wozniewski2006,\n abstract = {Traditional uses of virtual\ + \ audio environments tend to focus onperceptually accurate acoustic representations.\ + \ Though spatialization of sound sources is important, it is necessary to leveragecontrol\ + \ of the sonic representation when considering musical applications. The proposed\ + \ framework allows for the creation ofperceptually immersive scenes that function\ + \ as musical instruments. Loudspeakers and microphones are modeled within thescene\ + \ along with the listener/performer, creating a navigable 3Dsonic space where\ + \ sound sources and sinks process audio according to user-defined spatial mappings.},\n\ + \ address = {Paris, France},\n author = {Wozniewski, Mike and Settel, Zack and\ + \ Cooperstock, Jeremy R.},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177021},\n\ + \ issn = {2220-4806},\n keywords = {Control paradigms, 3D audio, spatialization,\ + \ immersive audio environments, auditory display, acoustic modeling, spatial inter-\ + \ faces, virtual instrument design },\n pages = {144--149},\n title = {A Framework\ + \ for Immersive Spatial Audio Performance},\n url = {http://www.nime.org/proceedings/2006/nime2006_144.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178666 + doi: 10.5281/zenodo.1177021 issn: 2220-4806 - keywords: 'Affordances, collaboration, social interaction, mobile music, extended - system, NIME' - month: May - pages: 35--40 - publisher: 'Graduate School of Culture Technology, KAIST' - title: 'PESI Extended System: In Space, On Body, with 3 Musicians' - url: http://www.nime.org/proceedings/2013/nime2013_97.pdf - year: 2013 + keywords: 'Control paradigms, 3D audio, spatialization, immersive audio environments, + auditory display, acoustic modeling, spatial inter- faces, virtual instrument + design ' + pages: 144--149 + title: A Framework for Immersive Spatial Audio Performance + url: http://www.nime.org/proceedings/2006/nime2006_144.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: Sanganeria2013 - abstract: 'GrainProc is a touchscreen interface for real-time granular synthesis - designedfor live performance. The user provides a real-time audio input (electricguitar, - for example) as a granularization source and controls various synthesisparameters - with their fingers or toes. The control parameters are designed togive the user - access to intuitive and expressive live granular manipulations.' - address: 'Daejeon, Republic of Korea' - author: Mayank Sanganeria and Kurt Werner - bibtex: "@inproceedings{Sanganeria2013,\n abstract = {GrainProc is a touchscreen\ - \ interface for real-time granular synthesis designedfor live performance. The\ - \ user provides a real-time audio input (electricguitar, for example) as a granularization\ - \ source and controls various synthesisparameters with their fingers or toes.\ - \ The control parameters are designed togive the user access to intuitive and\ - \ expressive live granular manipulations.},\n address = {Daejeon, Republic of\ - \ Korea},\n author = {Mayank Sanganeria and Kurt Werner},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178652},\n issn = {2220-4806},\n keywords = {Granular\ - \ synthesis, touch screen interface, toe control, real-time, CCRMA},\n month =\ - \ {May},\n pages = {223--226},\n publisher = {Graduate School of Culture Technology,\ - \ KAIST},\n title = {GrainProc: a real-time granular synthesis interface for live\ - \ performance},\n url = {http://www.nime.org/proceedings/2013/nime2013_99.pdf},\n\ - \ year = {2013}\n}\n" + ID: Francois2006 + address: 'Paris, France' + author: 'Francois, Alexander R. and Chew, Elaine' + bibtex: "@inproceedings{Francois2006,\n address = {Paris, France},\n author = {Francois,\ + \ Alexander R. and Chew, Elaine},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176901},\n\ + \ issn = {2220-4806},\n keywords = {Software Architecture, Interactive Systems,\ + \ Music soft- ware },\n pages = {150--155},\n title = {An Architectural Framework\ + \ for Interactive Music Systems},\n url = {http://www.nime.org/proceedings/2006/nime2006_150.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178652 + doi: 10.5281/zenodo.1176901 issn: 2220-4806 - keywords: 'Granular synthesis, touch screen interface, toe control, real-time, CCRMA' - month: May - pages: 223--226 - publisher: 'Graduate School of Culture Technology, KAIST' - title: 'GrainProc: a real-time granular synthesis interface for live performance' - url: http://www.nime.org/proceedings/2013/nime2013_99.pdf - year: 2013 + keywords: 'Software Architecture, Interactive Systems, Music soft- ware ' + pages: 150--155 + title: An Architectural Framework for Interactive Music Systems + url: http://www.nime.org/proceedings/2006/nime2006_150.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_1 - abstract: 'This paper discusses a quantitative method to evaluate whether an expert - player is able to execute skilled actions on an unfamiliar interface while keeping - the focus of their performance on the musical outcome rather than on the technology - itself. In our study, twelve professional electric guitar players used an augmented - plectrum to replicate prerecorded timbre variations in a set of musical excerpts. - The task was undertaken in two experimental conditions: a reference condition, - and a subtle gradual change in the sensitivity of the augmented plectrum which - is designed to affect the guitarist’s performance without making them consciously - aware of its effect. We propose that players’ subconscious response to the disruption - of changing the sensitivity, as well as their overall ability to replicate the - stimuli, may indicate the strength of the relationship they developed with the - new interface. The case study presented in this paper highlights the strengths - and limitations of this method.' - address: 'The University of Auckland, New Zealand' - articleno: 1 - author: 'Guidi, Andrea and McPherson, Andrew' - bibtex: "@inproceedings{NIME22_1,\n abstract = {This paper discusses a quantitative\ - \ method to evaluate whether an expert player is able to execute skilled actions\ - \ on an unfamiliar interface while keeping the focus of their performance on the\ - \ musical outcome rather than on the technology itself. In our study, twelve professional\ - \ electric guitar players used an augmented plectrum to replicate prerecorded\ - \ timbre variations in a set of musical excerpts. The task was undertaken in two\ - \ experimental conditions: a reference condition, and a subtle gradual change\ - \ in the sensitivity of the augmented plectrum which is designed to affect the\ - \ guitarist’s performance without making them consciously aware of its effect.\ - \ We propose that players’ subconscious response to the disruption of changing\ - \ the sensitivity, as well as their overall ability to replicate the stimuli,\ - \ may indicate the strength of the relationship they developed with the new interface.\ - \ The case study presented in this paper highlights the strengths and limitations\ - \ of this method.},\n address = {The University of Auckland, New Zealand},\n articleno\ - \ = {1},\n author = {Guidi, Andrea and McPherson, Andrew},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.21428/92fbeb44.79d0b38f},\n issn = {2220-4806},\n month = {jun},\n\ - \ pdf = {101.pdf},\n presentation-video = {https://youtu.be/J4981qsq_7c},\n title\ - \ = {Quantitative evaluation of aspects of embodiment in new digital musical instruments},\n\ - \ url = {https://doi.org/10.21428%2F92fbeb44.79d0b38f},\n year = {2022}\n}\n" + ID: Jacquemin2006 + address: 'Paris, France' + author: 'Jacquemin, Christian and de Laubier, Serge' + bibtex: "@inproceedings{Jacquemin2006,\n address = {Paris, France},\n author = {Jacquemin,\ + \ Christian and de Laubier, Serge},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176929},\n\ + \ issn = {2220-4806},\n keywords = {audio-visual composition,feedback,transmodality},\n\ + \ pages = {156--161},\n title = {Transmodal Feedback as a New Perspective for\ + \ Audio-visual Effects},\n url = {http://www.nime.org/proceedings/2006/nime2006_156.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.79d0b38f + doi: 10.5281/zenodo.1176929 issn: 2220-4806 - month: jun - pdf: 101.pdf - presentation-video: https://youtu.be/J4981qsq_7c - title: Quantitative evaluation of aspects of embodiment in new digital musical instruments - url: https://doi.org/10.21428%2F92fbeb44.79d0b38f - year: 2022 + keywords: 'audio-visual composition,feedback,transmodality' + pages: 156--161 + title: Transmodal Feedback as a New Perspective for Audio-visual Effects + url: http://www.nime.org/proceedings/2006/nime2006_156.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_2 - abstract: 'This article explores two in-depth interviews with distinguished Chinese - NIMEers, across generations, from the late 1970s to the present. Tian Jinqin and - Meng Qi represent role models in the Chinese NIME community. From the innovative - NIME designers’ historical technological innovation of the 1970s’ analog ribbon - control string synthesizer Xian Kong Qin to the 2020’s Wing Pinger evolving harmony - synthesizer, the author shines a light from different angles on the Chinese NIME - community.' - address: 'The University of Auckland, New Zealand' - articleno: 2 - author: 'Wu, Jiayue Cecilia' - bibtex: "@inproceedings{NIME22_2,\n abstract = {This article explores two in-depth\ - \ interviews with distinguished Chinese NIMEers, across generations, from the\ - \ late 1970s to the present. Tian Jinqin and Meng Qi represent role models in\ - \ the Chinese NIME community. From the innovative NIME designers’ historical technological\ - \ innovation of the 1970s’ analog ribbon control string synthesizer Xian Kong\ - \ Qin to the 2020’s Wing Pinger evolving harmony synthesizer, the author shines\ - \ a light from different angles on the Chinese NIME community.},\n address = {The\ - \ University of Auckland, New Zealand},\n articleno = {2},\n author = {Wu, Jiayue\ - \ Cecilia},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.57e41c54},\n\ - \ issn = {2220-4806},\n month = {jun},\n pdf = {102.pdf},\n presentation-video\ - \ = {https://www.youtube.com/watch?v=4PMmDnUNgRk},\n title = {Today and Yesterday:\ - \ Two Case Studies of China{\\textquotesingle}s {NIME} Community},\n url = {https://doi.org/10.21428%2F92fbeb44.57e41c54},\n\ - \ year = {2022}\n}\n" + ID: Magnusson2006 + abstract: 'The ixi software project started in 2000 with the intention to explore + new interactive patterns and virtual interfaces in computer music software. The + aim of this paper is not to describe these programs, as they have been described + elsewhere [14][15], but rather explicate the theoretical background that underlies + the design of these screen-based instruments. After an analysis of the similarities + and differences in the design of acoustic and screen-based instruments, the paper + describes how the creation of an interface is essentially the creation of a semiotic + system that affects and influences the musician and the composer. Finally the + terminology of this semiotics is explained as an interaction model. ' + address: 'Paris, France' + author: 'Magnusson, Thor' + bibtex: "@inproceedings{Magnusson2006,\n abstract = {The ixi software project started\ + \ in 2000 with the intention to explore new interactive patterns and virtual interfaces\ + \ in computer music software. The aim of this paper is not to describe these programs,\ + \ as they have been described elsewhere [14][15], but rather explicate the theoretical\ + \ background that underlies the design of these screen-based instruments. After\ + \ an analysis of the similarities and differences in the design of acoustic and\ + \ screen-based instruments, the paper describes how the creation of an interface\ + \ is essentially the creation of a semiotic system that affects and influences\ + \ the musician and the composer. Finally the terminology of this semiotics is\ + \ explained as an interaction model. },\n address = {Paris, France},\n author\ + \ = {Magnusson, Thor},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176969},\n\ + \ issn = {2220-4806},\n keywords = {Interfaces, interaction design, HCI, semiotics,\ + \ actors, OSC, mapping, interaction models, creative tools. },\n pages = {162--167},\n\ + \ title = {Screen-Based Musical Interfaces as Semiotic Machines},\n url = {http://www.nime.org/proceedings/2006/nime2006_162.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.57e41c54 + doi: 10.5281/zenodo.1176969 issn: 2220-4806 - month: jun - pdf: 102.pdf - presentation-video: https://www.youtube.com/watch?v=4PMmDnUNgRk - title: 'Today and Yesterday: Two Case Studies of China’s NIME Community' - url: https://doi.org/10.21428%2F92fbeb44.57e41c54 - year: 2022 + keywords: 'Interfaces, interaction design, HCI, semiotics, actors, OSC, mapping, + interaction models, creative tools. ' + pages: 162--167 + title: Screen-Based Musical Interfaces as Semiotic Machines + url: http://www.nime.org/proceedings/2006/nime2006_162.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_3 - abstract: 'One of the consequences of the pandemic has been the potential to embrace - hybrid support for different human group activities, including music performance, - resulting in accommodating a wider range of situations. We believe that we are - barely at the tip of the iceberg and that we can explore further the possibilities - of the medium by promoting a more active role of the audience during telematic - performance. In this paper, we present personic, a mobile web app designed for - distributed audiences to constitute a digital musical instrument. This has the - twofold purpose of letting the audience contribute to the performance with a non-intrusive - and easy-to-use approach, as well as providing audiovisual feedback that is helpful - for both the performers and the audience alike. The challenges and possibilities - of this approach are discussed from pilot testing the app using a practice-based - approach. We conclude by pointing to new directions of telematic performance, - which is a promising direction for network music and digital performance.' - address: 'The University of Auckland, New Zealand' - articleno: 3 - author: 'Xambó, Anna and Goudarzi, Visda' - bibtex: "@inproceedings{NIME22_3,\n abstract = {One of the consequences of the pandemic\ - \ has been the potential to embrace hybrid support for different human group activities,\ - \ including music performance, resulting in accommodating a wider range of situations.\ - \ We believe that we are barely at the tip of the iceberg and that we can explore\ - \ further the possibilities of the medium by promoting a more active role of the\ - \ audience during telematic performance. In this paper, we present personic, a\ - \ mobile web app designed for distributed audiences to constitute a digital musical\ - \ instrument. This has the twofold purpose of letting the audience contribute\ - \ to the performance with a non-intrusive and easy-to-use approach, as well as\ - \ providing audiovisual feedback that is helpful for both the performers and the\ - \ audience alike. The challenges and possibilities of this approach are discussed\ - \ from pilot testing the app using a practice-based approach. We conclude by pointing\ - \ to new directions of telematic performance, which is a promising direction for\ - \ network music and digital performance.},\n address = {The University of Auckland,\ - \ New Zealand},\n articleno = {3},\n author = {Xamb{\\'{o}}, Anna and Goudarzi,\ - \ Visda},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.21428/92fbeb44.706b549e},\n issn = {2220-4806},\n\ - \ month = {jun},\n pdf = {107.pdf},\n presentation-video = {https://youtu.be/xu5ySfbqYs8},\n\ - \ title = {The Mobile Audience as a Digital Musical Persona in Telematic Performance},\n\ - \ url = {https://doi.org/10.21428%2F92fbeb44.706b549e},\n year = {2022}\n}\n" + ID: Zadel2006 + address: 'Paris, France' + author: 'Zadel, Mark and Scavone, Gary' + bibtex: "@inproceedings{Zadel2006,\n address = {Paris, France},\n author = {Zadel,\ + \ Mark and Scavone, Gary},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177025},\n\ + \ issn = {2220-4806},\n keywords = {Software control of computer music, laptop\ + \ performance, graphical interfaces, freehand input, dynamic simulation },\n pages\ + \ = {168--171},\n title = {Different Strokes: a Prototype Software System for\ + \ Laptop Performance and Improvisation},\n url = {http://www.nime.org/proceedings/2006/nime2006_168.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.706b549e + doi: 10.5281/zenodo.1177025 issn: 2220-4806 - month: jun - pdf: 107.pdf - presentation-video: https://youtu.be/xu5ySfbqYs8 - title: The Mobile Audience as a Digital Musical Persona in Telematic Performance - url: https://doi.org/10.21428%2F92fbeb44.706b549e - year: 2022 + keywords: 'Software control of computer music, laptop performance, graphical interfaces, + freehand input, dynamic simulation ' + pages: 168--171 + title: 'Different Strokes: a Prototype Software System for Laptop Performance and + Improvisation' + url: http://www.nime.org/proceedings/2006/nime2006_168.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_4 - abstract: 'Physical modelling sound synthesis methods generate vast and intricate - sound spaces that are navigated using meaningful parameters. Numerical based physical - modelling synthesis methods provide authentic representations of the physics they - model. Unfortunately, the application of these physical models are often limited - because of their considerable computational requirements. In previous studies, - the CPU has been shown to reliably support two-dimensional linear finite-difference - models in real-time with resolutions up to 64x64. However, the near-ubiquitous - parallel processing units known as GPUs have previously been used to process considerably - larger resolutions, as high as 512x512 in real-time. GPU programming requires - a low-level understanding of the architecture, which often imposes a barrier for - entry for inexperienced practitioners. Therefore, this paper proposes HyperModels, - a framework for automating the mapping of linear finite-difference based physical - modelling synthesis into an optimised parallel form suitable for the GPU. An implementation - of the design is then used to evaluate the objective performance of the framework - by comparing the automated solution to manually developed equivalents. For the - majority of the extensive performance profiling tests, the auto-generated programs - were observed to perform only 60% slower but in the worst-case scenario it was - 50% slower. The initial results suggests that, in most circumstances, the automation - provided by the framework avoids the lowlevel expertise required to manually optimise - the GPU, with only a small reduction in performance. However, there is still scope - to improve the auto-generated optimisations. When comparing the performance of - CPU to GPU equivalents, the parallel CPU version supports resolutions of up to - 128x128 whilst the GPU continues to support higher resolutions up to 512x512. - To conclude the paper, two instruments are developed using HyperModels based on - established physical model designs.' - address: 'The University of Auckland, New Zealand' - articleno: 4 - author: 'Renney, Harri and Willemsen, Silvin and Gaster, Benedict and Mitchell, - Tom' - bibtex: "@inproceedings{NIME22_4,\n abstract = {Physical modelling sound synthesis\ - \ methods generate vast and intricate sound spaces that are navigated using meaningful\ - \ parameters. Numerical based physical modelling synthesis methods provide authentic\ - \ representations of the physics they model. Unfortunately, the application of\ - \ these physical models are often limited because of their considerable computational\ - \ requirements. In previous studies, the CPU has been shown to reliably support\ - \ two-dimensional linear finite-difference models in real-time with resolutions\ - \ up to 64x64. However, the near-ubiquitous parallel processing units known as\ - \ GPUs have previously been used to process considerably larger resolutions, as\ - \ high as 512x512 in real-time. GPU programming requires a low-level understanding\ - \ of the architecture, which often imposes a barrier for entry for inexperienced\ - \ practitioners. Therefore, this paper proposes HyperModels, a framework for automating\ - \ the mapping of linear finite-difference based physical modelling synthesis into\ - \ an optimised parallel form suitable for the GPU. An implementation of the design\ - \ is then used to evaluate the objective performance of the framework by comparing\ - \ the automated solution to manually developed equivalents. For the majority of\ - \ the extensive performance profiling tests, the auto-generated programs were\ - \ observed to perform only 60% slower but in the worst-case scenario it was 50%\ - \ slower. The initial results suggests that, in most circumstances, the automation\ - \ provided by the framework avoids the lowlevel expertise required to manually\ - \ optimise the GPU, with only a small reduction in performance. However, there\ - \ is still scope to improve the auto-generated optimisations. When comparing the\ - \ performance of CPU to GPU equivalents, the parallel CPU version supports resolutions\ - \ of up to 128x128 whilst the GPU continues to support higher resolutions up to\ - \ 512x512. To conclude the paper, two instruments are developed using HyperModels\ - \ based on established physical model designs.},\n address = {The University of\ - \ Auckland, New Zealand},\n articleno = {4},\n author = {Renney, Harri and Willemsen,\ - \ Silvin and Gaster, Benedict and Mitchell, Tom},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.21428/92fbeb44.98a4210a},\n issn = {2220-4806},\n month = {jun},\n pdf\ - \ = {109.pdf},\n presentation-video = {https://youtu.be/Pb4pAr2v4yU},\n title\ - \ = {{HyperModels} - A Framework for {GPU} Accelerated Physical Modelling Sound\ - \ Synthesis},\n url = {https://doi.org/10.21428%2F92fbeb44.98a4210a},\n year =\ - \ {2022}\n}\n" + ID: Nishibori2006 + abstract: 'Development of a musical interface which allows people to play music + intuitively and create music visibly. ' + address: 'Paris, France' + author: 'Nishibori, Yu and Iwai, Toshio' + bibtex: "@inproceedings{Nishibori2006,\n abstract = {Development of a musical interface\ + \ which allows people to play music intuitively and create music visibly. },\n\ + \ address = {Paris, France},\n author = {Nishibori, Yu and Iwai, Toshio},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1176979},\n issn = {2220-4806},\n pages\ + \ = {172--175},\n title = {TENORI-ON},\n url = {http://www.nime.org/proceedings/2006/nime2006_172.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.98a4210a + doi: 10.5281/zenodo.1176979 issn: 2220-4806 - month: jun - pdf: 109.pdf - presentation-video: https://youtu.be/Pb4pAr2v4yU - title: HyperModels - A Framework for GPU Accelerated Physical Modelling Sound Synthesis - url: https://doi.org/10.21428%2F92fbeb44.98a4210a - year: 2022 + pages: 172--175 + title: TENORI-ON + url: http://www.nime.org/proceedings/2006/nime2006_172.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_5 - abstract: 'In this paper, we propose a set of reflections to actively incorporate - environmental sustainability instances in the practice of circuit bending. This - proposal combines circuit bending-related concepts with literature from the domain - of sustainable Human-Computer Interaction (HCI). We commence by presenting an - overview of the critical discourse within the New Interfaces for Musical Expression - (NIME) community, and of circuit bending itself—exposing the linkages this practice - has with themes directly related to this research, such as environmental sustainability - and philosophy. Afterwards, we look at how the topic of environmental sustainability - has been discussed, concerning circuit bending, within the NIME literature. We - conclude by developing a list of recommendations for a sustainable circuit bending - practice.' - address: 'The University of Auckland, New Zealand' - articleno: 5 - author: 'Dorigatti, Enrico and Masu, Raul' - bibtex: "@inproceedings{NIME22_5,\n abstract = {In this paper, we propose a set\ - \ of reflections to actively incorporate environmental sustainability instances\ - \ in the practice of circuit bending. This proposal combines circuit bending-related\ - \ concepts with literature from the domain of sustainable Human-Computer Interaction\ - \ (HCI). We commence by presenting an overview of the critical discourse within\ - \ the New Interfaces for Musical Expression (NIME) community, and of circuit bending\ - \ itself—exposing the linkages this practice has with themes directly related\ - \ to this research, such as environmental sustainability and philosophy. Afterwards,\ - \ we look at how the topic of environmental sustainability has been discussed,\ - \ concerning circuit bending, within the NIME literature. We conclude by developing\ - \ a list of recommendations for a sustainable circuit bending practice.},\n address\ - \ = {The University of Auckland, New Zealand},\n articleno = {5},\n author = {Dorigatti,\ - \ Enrico and Masu, Raul},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.18502d1d},\n\ - \ issn = {2220-4806},\n month = {jun},\n pdf = {11.pdf},\n presentation-video\ - \ = {https://youtu.be/n3GcaaHkats},\n title = {Circuit Bending and Environmental\ - \ Sustainability: Current Situation and Steps Forward},\n url = {https://doi.org/10.21428%2F92fbeb44.18502d1d},\n\ - \ year = {2022}\n}\n" + ID: Jensenius2006a + abstract: 'This paper presents our need for a Gesture Description Interchange Format + (GDIF) for storing, retrieving and sharing information about music-related gestures. + Ideally, it should be possible to store all sorts of data from various commercial + and custom made controllers, motion capture and computer vision systems, as well + as results from different types of gesture analysis, in a coherent and consistent + way. This would make it possible to use the information with different software, + platforms and devices, and also allow for sharing data between research institutions. + We present some of the data types that should be included, and discuss issues + which need to be resolved.' + address: 'Paris, France' + author: 'Jensenius, Alexander Refsum and Kvifte, Tellef and Godøy, Rolf Inge' + bibtex: "@inproceedings{Jensenius2006a,\n abstract = {This paper presents our need\ + \ for a Gesture Description Interchange Format (GDIF) for storing, retrieving\ + \ and sharing information about music-related gestures. Ideally, it should be\ + \ possible to store all sorts of data from various commercial and custom made\ + \ controllers, motion capture and computer vision systems, as well as results\ + \ from different types of gesture analysis, in a coherent and consistent way.\ + \ This would make it possible to use the information with different software,\ + \ platforms and devices, and also allow for sharing data between research institutions.\ + \ We present some of the data types that should be included, and discuss issues\ + \ which need to be resolved.},\n address = {Paris, France},\n author = {Jensenius,\ + \ Alexander Refsum and Kvifte, Tellef and Godøy, Rolf Inge},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176931},\n issn = {2220-4806},\n keywords = {Gesture\ + \ description, gesture analysis, standards },\n pages = {176--179},\n title =\ + \ {Towards a Gesture Description Interchange Format},\n url = {http://www.nime.org/proceedings/2006/nime2006_176.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.18502d1d + doi: 10.5281/zenodo.1176931 issn: 2220-4806 - month: jun - pdf: 11.pdf - presentation-video: https://youtu.be/n3GcaaHkats - title: 'Circuit Bending and Environmental Sustainability: Current Situation and - Steps Forward' - url: https://doi.org/10.21428%2F92fbeb44.18502d1d - year: 2022 + keywords: 'Gesture description, gesture analysis, standards ' + pages: 176--179 + title: Towards a Gesture Description Interchange Format + url: http://www.nime.org/proceedings/2006/nime2006_176.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_6 - abstract: 'This paper explores a minimalist approach to live coding using a single - input parameter to manipulate the graph structure of a finite state machine through - a stream of bits. This constitutes an example of bottom-up live coding, which - operates on a low level language to generate a high level structure output. Here - we examine systematically how to apply mappings of continuous gestural interactions - to develop a bottom-up system for predicting programming behaviours. We conducted - a statistical analysis based on a controlled data generation procedure. The findings - concur with the subjective experience of the behavior of the system when the user - modulates the sampling frequency of a variable clock using a knob as an input - device. This suggests that a sequential predictive model may be applied towards - the development of a tactically predictive system according to Tanimoto’s hierarchy - of liveness. The code is provided in a git repository.' - address: 'The University of Auckland, New Zealand' - articleno: 6 - author: 'Diapoulis, Georgios and Zannos, Iannis and Tatar, Kivanç and Dahlstedt, - Palle' - bibtex: "@inproceedings{NIME22_6,\n abstract = {This paper explores a minimalist\ - \ approach to live coding using a single input parameter to manipulate the graph\ - \ structure of a finite state machine through a stream of bits. This constitutes\ - \ an example of bottom-up live coding, which operates on a low level language\ - \ to generate a high level structure output. Here we examine systematically how\ - \ to apply mappings of continuous gestural interactions to develop a bottom-up\ - \ system for predicting programming behaviours. We conducted a statistical analysis\ - \ based on a controlled data generation procedure. The findings concur with the\ - \ subjective experience of the behavior of the system when the user modulates\ - \ the sampling frequency of a variable clock using a knob as an input device.\ - \ This suggests that a sequential predictive model may be applied towards the\ - \ development of a tactically predictive system according to Tanimoto’s hierarchy\ - \ of liveness. The code is provided in a git repository.},\n address = {The University\ - \ of Auckland, New Zealand},\n articleno = {6},\n author = {Diapoulis, Georgios\ - \ and Zannos, Iannis and Tatar, Kivan{\\c{c}} and Dahlstedt, Palle},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.21428/92fbeb44.51fecaab},\n issn = {2220-4806},\n month\ - \ = {jun},\n pdf = {110.pdf},\n presentation-video = {https://youtu.be/L_v5P7jGK8Y},\n\ - \ title = {Bottom-up live coding: Analysis of continuous interactions towards\ - \ predicting programming behaviours},\n url = {https://doi.org/10.21428%2F92fbeb44.51fecaab},\n\ - \ year = {2022}\n}\n" + ID: Wanderley2006 + address: 'Paris, France' + author: 'Wanderley, Marcelo M. and Birnbaum, David and Malloch, Joseph and Sinyor, + Elliot and Boissinot, Julien' + bibtex: "@inproceedings{Wanderley2006,\n address = {Paris, France},\n author = {Wanderley,\ + \ Marcelo M. and Birnbaum, David and Malloch, Joseph and Sinyor, Elliot and Boissinot,\ + \ Julien},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1177015},\n issn = {2220-4806},\n\ + \ keywords = {sensors, Wiki, collaborative website, open content },\n pages =\ + \ {180--183},\n title = {SensorWiki.org: A Collaborative Resource for Researchers\ + \ and Interface Designers},\n url = {http://www.nime.org/proceedings/2006/nime2006_180.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.51fecaab + doi: 10.5281/zenodo.1177015 issn: 2220-4806 - month: jun - pdf: 110.pdf - presentation-video: https://youtu.be/L_v5P7jGK8Y - title: 'Bottom-up live coding: Analysis of continuous interactions towards predicting - programming behaviours' - url: https://doi.org/10.21428%2F92fbeb44.51fecaab - year: 2022 + keywords: 'sensors, Wiki, collaborative website, open content ' + pages: 180--183 + title: 'SensorWiki.org: A Collaborative Resource for Researchers and Interface Designers' + url: http://www.nime.org/proceedings/2006/nime2006_180.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_7 - abstract: 'Deformable interfaces are an emerging area of Human- Computer Interaction - (HCI) research that offers nuanced and responsive physical interaction with digital - technologies. They are well suited to creative and expressive forms of HCI such - as Digital Musical Interfaces (DMIs). However, research on the design of deformable - DMIs is limited. This paper explores the role that deformable interfaces might - play in DMI design. We conducted an online study with 23 DMI designers in which - they were invited to create non-functional deformable DMIs together. Our results - suggest forms of gestural input and sound mappings that deformable interfaces - intuitively lend themselves to for DMI design. From our results, we highlight - four styles of DMI that deformable interfaces might be most suited to, and suggest - the kinds of experience that deformable DMIs might be most compelling for musicians - and audiences. We discuss how DMI designers explore deformable materials and gestures - input and the role of unexpected affordances in the design process.' - address: 'The University of Auckland, New Zealand' - articleno: 7 - author: 'Zheng, Jianing and Bryan-Kinns, Nick' - bibtex: "@inproceedings{NIME22_7,\n abstract = {Deformable interfaces are an emerging\ - \ area of Human- Computer Interaction (HCI) research that offers nuanced and responsive\ - \ physical interaction with digital technologies. They are well suited to creative\ - \ and expressive forms of HCI such as Digital Musical Interfaces (DMIs). However,\ - \ research on the design of deformable DMIs is limited. This paper explores the\ - \ role that deformable interfaces might play in DMI design. We conducted an online\ - \ study with 23 DMI designers in which they were invited to create non-functional\ - \ deformable DMIs together. Our results suggest forms of gestural input and sound\ - \ mappings that deformable interfaces intuitively lend themselves to for DMI design.\ - \ From our results, we highlight four styles of DMI that deformable interfaces\ - \ might be most suited to, and suggest the kinds of experience that deformable\ - \ DMIs might be most compelling for musicians and audiences. We discuss how DMI\ - \ designers explore deformable materials and gestures input and the role of unexpected\ - \ affordances in the design process.},\n address = {The University of Auckland,\ - \ New Zealand},\n articleno = {7},\n author = {Zheng, Jianing and Bryan-Kinns,\ - \ Nick},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.21428/92fbeb44.41da9da5},\n issn = {2220-4806},\n\ - \ month = {jun},\n pdf = {111.pdf},\n presentation-video = {https://youtu.be/KHqfxL4F7Bg},\n\ - \ title = {Squeeze, Twist, Stretch: Exploring Deformable Digital Musical Interfaces\ - \ Design Through Non-Functional Prototypes},\n url = {https://doi.org/10.21428%2F92fbeb44.41da9da5},\n\ - \ year = {2022}\n}\n" + ID: Dimitrov2006 + address: 'Paris, France' + author: 'Dimitrov, Smilen and Serafin, Stefania' + bibtex: "@inproceedings{Dimitrov2006,\n address = {Paris, France},\n author = {Dimitrov,\ + \ Smilen and Serafin, Stefania},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176891},\n\ + \ issn = {2220-4806},\n pages = {184--187},\n title = {A Simple Practical Approach\ + \ to a Wireless Data Acquisition Board},\n url = {http://www.nime.org/proceedings/2006/nime2006_184.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.41da9da5 + doi: 10.5281/zenodo.1176891 issn: 2220-4806 - month: jun - pdf: 111.pdf - presentation-video: https://youtu.be/KHqfxL4F7Bg - title: 'Squeeze, Twist, Stretch: Exploring Deformable Digital Musical Interfaces - Design Through Non-Functional Prototypes' - url: https://doi.org/10.21428%2F92fbeb44.41da9da5 - year: 2022 + pages: 184--187 + title: A Simple Practical Approach to a Wireless Data Acquisition Board + url: http://www.nime.org/proceedings/2006/nime2006_184.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_8 - abstract: 'We present a systematic review of voice-centered NIME publications from - the past two decades. Musical expression has been a key driver of innovation in - voicebased technologies, from traditional architectures that amplify singing to - cutting-edge research in vocal synthesis. NIME conference has emerged as a prime - venue for innovative vocal interfaces. However, there hasn’t been a systematic - analysis of all voice-related work or an effort to characterize their features. - Analyzing trends in Vocal NIMEs can help the community better understand common - interests, identify uncharted territories, and explore directions for future research. - We identified a corpus of 98 papers about Vocal NIMEs from 2001 to 2021, which - we analyzed in 3 ways. First, we automatically extracted latent themes and possible - categories using natural language processing. Taking inspiration from concepts - surfaced through this process, we then defined several core dimensions with associated - descriptors of Vocal NIMEs and assigned each paper relevant descriptors under - each dimension. Finally, we defined a classification system, which we then used - to uniquely and more precisely situate each paper on a map, taking into account - the overall goals of each work. Based on our analyses, we present trends and challenges, - including questions of gender and diversity in our community, and reflect on opportunities - for future work.' - address: 'The University of Auckland, New Zealand' - articleno: 8 - author: 'Kleinberger, Rébecca and Singh, Nikhil and Xiao, Xiao and Troyer, Akito - van' - bibtex: "@inproceedings{NIME22_8,\n abstract = {We present a systematic review of\ - \ voice-centered NIME publications from the past two decades. Musical expression\ - \ has been a key driver of innovation in voicebased technologies, from traditional\ - \ architectures that amplify singing to cutting-edge research in vocal synthesis.\ - \ NIME conference has emerged as a prime venue for innovative vocal interfaces.\ - \ However, there hasn’t been a systematic analysis of all voice-related work or\ - \ an effort to characterize their features. Analyzing trends in Vocal NIMEs can\ - \ help the community better understand common interests, identify uncharted territories,\ - \ and explore directions for future research. We identified a corpus of 98 papers\ - \ about Vocal NIMEs from 2001 to 2021, which we analyzed in 3 ways. First, we\ - \ automatically extracted latent themes and possible categories using natural\ - \ language processing. Taking inspiration from concepts surfaced through this\ - \ process, we then defined several core dimensions with associated descriptors\ - \ of Vocal NIMEs and assigned each paper relevant descriptors under each dimension.\ - \ Finally, we defined a classification system, which we then used to uniquely\ - \ and more precisely situate each paper on a map, taking into account the overall\ - \ goals of each work. Based on our analyses, we present trends and challenges,\ - \ including questions of gender and diversity in our community, and reflect on\ - \ opportunities for future work.},\n address = {The University of Auckland, New\ - \ Zealand},\n articleno = {8},\n author = {Kleinberger, R{\\'{e}}becca and Singh,\ - \ Nikhil and Xiao, Xiao and Troyer, Akito van},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.21428/92fbeb44.4308fb94},\n issn = {2220-4806},\n month = {jun},\n pdf\ - \ = {112.pdf},\n presentation-video = {https://youtu.be/PUlGjAblfPM},\n title\ - \ = {Voice at {NIME}: a Taxonomy of New Interfaces for Vocal Musical Expression},\n\ - \ url = {https://doi.org/10.21428%2F92fbeb44.4308fb94},\n year = {2022}\n}\n" + ID: Hansen2006 + address: 'Paris, France' + author: 'Hansen, Kjetil F. and Bresin, Roberto' + bibtex: "@inproceedings{Hansen2006,\n address = {Paris, France},\n author = {Hansen,\ + \ Kjetil F. and Bresin, Roberto},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176921},\n\ + \ issn = {2220-4806},\n keywords = {controllers,dj,instrument mapping,scratching,virtual},\n\ + \ pages = {188--191},\n title = {Mapping Strategies in DJ Scratching},\n url =\ + \ {http://www.nime.org/proceedings/2006/nime2006_188.pdf},\n year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.4308fb94 + doi: 10.5281/zenodo.1176921 issn: 2220-4806 - month: jun - pdf: 112.pdf - presentation-video: https://youtu.be/PUlGjAblfPM - title: 'Voice at NIME: a Taxonomy of New Interfaces for Vocal Musical Expression' - url: https://doi.org/10.21428%2F92fbeb44.4308fb94 - year: 2022 + keywords: 'controllers,dj,instrument mapping,scratching,virtual' + pages: 188--191 + title: Mapping Strategies in DJ Scratching + url: http://www.nime.org/proceedings/2006/nime2006_188.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_9 - abstract: 'Digital musical instruments (DMIs) built to be used in performance settings - need to go beyond the prototypical stage of design to become robust, reliable, - and responsive devices for extensive usage. This paper presents the Tapbox and - the Slapbox, two generations of a standalone DMI built for percussion practice. - After summarizing the requirements for performance DMIs from previous surveys, - we introduce the Tapbox and comment on its strong and weak points. We then focus - on the design process of the Slapbox, an improved version that captures a broader - range of percussive gestures. Design tasks are reflected upon, including enclosure - design, sensor evaluations, gesture extraction algorithms, and sound synthesis - methods and mappings. Practical exploration of the Slapbox by two professional - percussionists is performed and their insights summarized, providing directions - for future work.' - address: 'The University of Auckland, New Zealand' - articleno: 9 - author: 'Boettcher, Brady and Sullivan, John and Wanderley, Marcelo M.' - bibtex: "@inproceedings{NIME22_9,\n abstract = {Digital musical instruments (DMIs)\ - \ built to be used in performance settings need to go beyond the prototypical\ - \ stage of design to become robust, reliable, and responsive devices for extensive\ - \ usage. This paper presents the Tapbox and the Slapbox, two generations of a\ - \ standalone DMI built for percussion practice. After summarizing the requirements\ - \ for performance DMIs from previous surveys, we introduce the Tapbox and comment\ - \ on its strong and weak points. We then focus on the design process of the Slapbox,\ - \ an improved version that captures a broader range of percussive gestures. Design\ - \ tasks are reflected upon, including enclosure design, sensor evaluations, gesture\ - \ extraction algorithms, and sound synthesis methods and mappings. Practical exploration\ - \ of the Slapbox by two professional percussionists is performed and their insights\ - \ summarized, providing directions for future work.},\n address = {The University\ - \ of Auckland, New Zealand},\n articleno = {9},\n author = {Boettcher, Brady and\ - \ Sullivan, John and Wanderley, Marcelo M.},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.21428/92fbeb44.78fd89cc},\n issn = {2220-4806},\n month = {jun},\n pdf =\ - \ {114.pdf},\n presentation-video = {https://youtu.be/NkYGAp4rmj8},\n title =\ - \ {Slapbox: Redesign of a Digital Musical Instrument Towards Reliable Long-Term\ - \ Practice},\n url = {https://doi.org/10.21428%2F92fbeb44.78fd89cc},\n year =\ - \ {2022}\n}\n" + ID: Kessous2006 + abstract: 'In this paper we describe a new guitar-like musical controller. The ''GXtar'' + is an instrument which takes as a starting point a guitar but his role is to bring + different and new musical possibilities while preserving the spirit and techniques + of guitar. Therefore, it was conceived and carried out starting from the body + of an electric guitar. The fingerboard of this guitar was equipped with two lines + of sensors: linear position sensors, and tactile pressure sensors. These two lines + of sensors are used as two virtual strings. Their two ends are the bridge and + the nut of the guitar. The design of the instrument is made in a way that the + position of a finger, on one of these virtual strings, corresponds to the note, + which would have been played on a real and vibrating string. On the soundboard + of the guitar, a controller, with 3 degrees of freedom, allows to drive other + synthesis parameters. We then describe how this interface is integrated in a musical + audio system and serves as a musical instrument. ' + address: 'Paris, France' + author: 'Kessous, Loïc and Castet, Julien and Arfib, Daniel' + bibtex: "@inproceedings{Kessous2006,\n abstract = {In this paper we describe a new\ + \ guitar-like musical controller. The 'GXtar' is an instrument which takes as\ + \ a starting point a guitar but his role is to bring different and new musical\ + \ possibilities while preserving the spirit and techniques of guitar. Therefore,\ + \ it was conceived and carried out starting from the body of an electric guitar.\ + \ The fingerboard of this guitar was equipped with two lines of sensors: linear\ + \ position sensors, and tactile pressure sensors. These two lines of sensors are\ + \ used as two virtual strings. Their two ends are the bridge and the nut of the\ + \ guitar. The design of the instrument is made in a way that the position of a\ + \ finger, on one of these virtual strings, corresponds to the note, which would\ + \ have been played on a real and vibrating string. On the soundboard of the guitar,\ + \ a controller, with 3 degrees of freedom, allows to drive other synthesis parameters.\ + \ We then describe how this interface is integrated in a musical audio system\ + \ and serves as a musical instrument. },\n address = {Paris, France},\n author\ + \ = {Kessous, Lo\\\"{i}c and Castet, Julien and Arfib, Daniel},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176857},\n issn = {2220-4806},\n keywords = {Guitar,\ + \ alternate controller, sensors, synthesizer, multidimensional control. },\n pages\ + \ = {192--195},\n title = {'GXtar', an Interface Using Guitar Techniques},\n url\ + \ = {http://www.nime.org/proceedings/2006/nime2006_192.pdf},\n year = {2006}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.78fd89cc + doi: 10.5281/zenodo.1176857 issn: 2220-4806 - month: jun - pdf: 114.pdf - presentation-video: https://youtu.be/NkYGAp4rmj8 - title: 'Slapbox: Redesign of a Digital Musical Instrument Towards Reliable Long-Term - Practice' - url: https://doi.org/10.21428%2F92fbeb44.78fd89cc - year: 2022 + keywords: 'Guitar, alternate controller, sensors, synthesizer, multidimensional + control. ' + pages: 192--195 + title: '''GXtar'', an Interface Using Guitar Techniques' + url: http://www.nime.org/proceedings/2006/nime2006_192.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_10 - abstract: 'This paper presents Mapper4Live, a software plugin made for the popular - digital audio workstation software Ableton Live. Mapper4Live exposes Ableton’s - synthesis and effect parameters on the distributed libmapper signal mapping network, - providing new opportunities for interaction between software and hardware synths, - audio effects, and controllers. The plugin’s uses and relevance in research, music - production and musical performance settings are explored, detailing the development - journey and ideas for future work on the project.' - address: 'The University of Auckland, New Zealand' - articleno: 10 - author: 'Boettcher, Brady and Malloch, Joseph and Wang, Johnty and Wanderley, Marcelo - M.' - bibtex: "@inproceedings{NIME22_10,\n abstract = {This paper presents Mapper4Live,\ - \ a software plugin made for the popular digital audio workstation software Ableton\ - \ Live. Mapper4Live exposes Ableton’s synthesis and effect parameters on the distributed\ - \ libmapper signal mapping network, providing new opportunities for interaction\ - \ between software and hardware synths, audio effects, and controllers. The plugin’s\ - \ uses and relevance in research, music production and musical performance settings\ - \ are explored, detailing the development journey and ideas for future work on\ - \ the project.},\n address = {The University of Auckland, New Zealand},\n articleno\ - \ = {10},\n author = {Boettcher, Brady and Malloch, Joseph and Wang, Johnty and\ - \ Wanderley, Marcelo M.},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.625fbdbf},\n\ - \ issn = {2220-4806},\n month = {jun},\n pdf = {115.pdf},\n presentation-video\ - \ = {https://youtu.be/Sv3v3Jmemp0},\n title = {Mapper4Live: Using Control Structures\ - \ to Embed Complex Mapping Tools into Ableton Live},\n url = {https://doi.org/10.21428%2F92fbeb44.625fbdbf},\n\ - \ year = {2022}\n}\n" + ID: Burns2006 + address: 'Paris, France' + author: 'Burns, Anne-Marie and Wanderley, Marcelo M.' + bibtex: "@inproceedings{Burns2006,\n address = {Paris, France},\n author = {Burns,\ + \ Anne-Marie and Wanderley, Marcelo M.},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176850},\n\ + \ issn = {2220-4806},\n keywords = {finger-tracking,gesture,guitar fingering,hough\ + \ transform},\n pages = {196--199},\n title = {Visual Methods for the Retrieval\ + \ of Guitarist Fingering},\n url = {http://www.nime.org/proceedings/2006/nime2006_196.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.625fbdbf + doi: 10.5281/zenodo.1176850 issn: 2220-4806 - month: jun - pdf: 115.pdf - presentation-video: https://youtu.be/Sv3v3Jmemp0 - title: 'Mapper4Live: Using Control Structures to Embed Complex Mapping Tools into - Ableton Live' - url: https://doi.org/10.21428%2F92fbeb44.625fbdbf - year: 2022 + keywords: 'finger-tracking,gesture,guitar fingering,hough transform' + pages: 196--199 + title: Visual Methods for the Retrieval of Guitarist Fingering + url: http://www.nime.org/proceedings/2006/nime2006_196.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_11 - abstract: 'Music technology ensembles—often consisting of multiple laptops as the - performers’ primary instrument— provide collaborative artistic experiences for - electronic musicians. In an effort to remove the significant technical and financial - barriers that laptops can present to performers looking to start their own group, - this paper proposes a solution in the form of the Norns Shield, a computer music - instrument (CMI) that requires minimal set-up and promotes immediate music-making - to performers of all skill levels. Prior research centered on using alternative - CMIs to supplant laptops in ensemble settings is discussed, and the benefits of - adopting the Norns Shield in service of democratizing and diversifying the music - technology ensemble are demonstrated in a discussion centered on the University - of Texas Rio Grande Valley New Music Ensemble’s adoption of the instrument. A - description of two software packages developed by the author showcases an extension - of the instrument’s abilities to share collaborative control data between internet-enabled - CMIs and to remotely manage script launching and parameter configuration across - a group of Norns Shields, providing resources for ensembles interested in incorporating - the device into their ranks.' - address: 'The University of Auckland, New Zealand' - articleno: 11 - author: 'Marasco, Anthony T.' - bibtex: "@inproceedings{NIME22_11,\n abstract = {Music technology ensembles—often\ - \ consisting of multiple laptops as the performers’ primary instrument— provide\ - \ collaborative artistic experiences for electronic musicians. In an effort to\ - \ remove the significant technical and financial barriers that laptops can present\ - \ to performers looking to start their own group, this paper proposes a solution\ - \ in the form of the Norns Shield, a computer music instrument (CMI) that requires\ - \ minimal set-up and promotes immediate music-making to performers of all skill\ - \ levels. Prior research centered on using alternative CMIs to supplant laptops\ - \ in ensemble settings is discussed, and the benefits of adopting the Norns Shield\ - \ in service of democratizing and diversifying the music technology ensemble are\ - \ demonstrated in a discussion centered on the University of Texas Rio Grande\ - \ Valley New Music Ensemble’s adoption of the instrument. A description of two\ - \ software packages developed by the author showcases an extension of the instrument’s\ - \ abilities to share collaborative control data between internet-enabled CMIs\ - \ and to remotely manage script launching and parameter configuration across a\ - \ group of Norns Shields, providing resources for ensembles interested in incorporating\ - \ the device into their ranks.},\n address = {The University of Auckland, New\ - \ Zealand},\n articleno = {11},\n author = {Marasco, Anthony T.},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.21428/92fbeb44.89003700},\n issn = {2220-4806},\n month\ - \ = {jun},\n pdf = {120.pdf},\n presentation-video = {https://www.youtube.com/watch?v=2XixSYrgRuQ},\n\ - \ title = {Approaching the Norns Shield as a Laptop Alternative for Democratizing\ - \ Music Technology Ensembles},\n url = {https://doi.org/10.21428%2F92fbeb44.89003700},\n\ - \ year = {2022}\n}\n" + ID: Schoonderwaldt2006 + abstract: 'A cost-effective method was developed for the estimation of the bow velocity + in violin playing, using an accelerometer on the bow in combination with point + tracking using a standard video camera. The video data are used to detect the + moments of bow direction changes. This information is used for piece-wise integration + of the accelerometer signal, resulting in a drift-free reconstructed velocity + signal with a high temporal resolution. The method was evaluated using a 3D motion + capturing system, providing a reliable reference of the actual bow velocity. The + method showed good results when the accelerometer and video stream are synchronized. + Additional latency and jitter of the camera stream can importantly decrease the + performance of the method, depending on the bow stroke type. ' + address: 'Paris, France' + author: 'Schoonderwaldt, Erwin and Rasamimanana, Nicolas and Bevilacqua, Frédéric' + bibtex: "@inproceedings{Schoonderwaldt2006,\n abstract = {A cost-effective method\ + \ was developed for the estimation of the bow velocity in violin playing, using\ + \ an accelerometer on the bow in combination with point tracking using a standard\ + \ video camera. The video data are used to detect the moments of bow direction\ + \ changes. This information is used for piece-wise integration of the accelerometer\ + \ signal, resulting in a drift-free reconstructed velocity signal with a high\ + \ temporal resolution. The method was evaluated using a 3D motion capturing system,\ + \ providing a reliable reference of the actual bow velocity. The method showed\ + \ good results when the accelerometer and video stream are synchronized. Additional\ + \ latency and jitter of the camera stream can importantly decrease the performance\ + \ of the method, depending on the bow stroke type. },\n address = {Paris, France},\n\ + \ author = {Schoonderwaldt, Erwin and Rasamimanana, Nicolas and Bevilacqua, Fr\\\ + '{e}d\\'{e}ric},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177003},\n\ + \ issn = {2220-4806},\n keywords = {Bowing gestures, bowed string, violin, bow\ + \ velocity, accelerometer, video tracking. },\n pages = {200--203},\n title =\ + \ {Combining Accelerometer and Video Camera: Reconstruction of Bow Velocity Profiles},\n\ + \ url = {http://www.nime.org/proceedings/2006/nime2006_200.pdf},\n year = {2006}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.89003700 + doi: 10.5281/zenodo.1177003 issn: 2220-4806 - month: jun - pdf: 120.pdf - presentation-video: https://www.youtube.com/watch?v=2XixSYrgRuQ - title: Approaching the Norns Shield as a Laptop Alternative for Democratizing Music - Technology Ensembles - url: https://doi.org/10.21428%2F92fbeb44.89003700 - year: 2022 + keywords: 'Bowing gestures, bowed string, violin, bow velocity, accelerometer, video + tracking. ' + pages: 200--203 + title: 'Combining Accelerometer and Video Camera: Reconstruction of Bow Velocity + Profiles' + url: http://www.nime.org/proceedings/2006/nime2006_200.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_12 - abstract: 'In this article we present Bandoneon 2.0, an interdisciplinary project - whose main objective is to produce electronic bandoneons in Argentina. The current - prices of bandoneons and the scarcity of manufacturers are endangering the possibility - of access for the new generations to one of the most emblematic instruments of - the culture of this country. Therefore, we aim to create an expressive and accessible - electronic bandoneon that can be used in recreational, academic and professional - contexts, providing an inclusive response to the current sociocultural demand. - The project also involves research on instrument acoustics and the development - of specialized software and hardware tools.' - address: 'The University of Auckland, New Zealand' - articleno: 12 - author: 'Ramos, Juan and Calcagno, Esteban and Vergara, Ramiro Oscar and Riera, - Pablo and Rizza, Joaquín' - bibtex: "@inproceedings{NIME22_12,\n abstract = {In this article we present Bandoneon\ - \ 2.0, an interdisciplinary project whose main objective is to produce electronic\ - \ bandoneons in Argentina. The current prices of bandoneons and the scarcity of\ - \ manufacturers are endangering the possibility of access for the new generations\ - \ to one of the most emblematic instruments of the culture of this country. Therefore,\ - \ we aim to create an expressive and accessible electronic bandoneon that can\ - \ be used in recreational, academic and professional contexts, providing an inclusive\ - \ response to the current sociocultural demand. The project also involves research\ - \ on instrument acoustics and the development of specialized software and hardware\ - \ tools.},\n address = {The University of Auckland, New Zealand},\n articleno\ - \ = {12},\n author = {Ramos, Juan and Calcagno, Esteban and Vergara, Ramiro Oscar\ - \ and Riera, Pablo and Rizza, Joaqu{\\'{\\i}}n},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.21428/92fbeb44.c38bfb86},\n issn = {2220-4806},\n month = {jun},\n pdf\ - \ = {123.pdf},\n presentation-video = {https://www.youtube.com/watch?v=5y4BbQWVNGQ},\n\ - \ title = {Bandoneon 2.0: an interdisciplinary project for research and development\ - \ of electronic bandoneons in Argentina},\n url = {https://doi.org/10.21428%2F92fbeb44.c38bfb86},\n\ - \ year = {2022}\n}\n" + ID: Leroy2006 + address: 'Paris, France' + author: 'Leroy, Nicolas and Fl\''{e}ty, Emmanuel and Bevilacqua, Fr\''{e}d\''{e}ric' + bibtex: "@inproceedings{Leroy2006,\n address = {Paris, France},\n author = {Leroy,\ + \ Nicolas and Fl\\'{e}ty, Emmanuel and Bevilacqua, Fr\\'{e}d\\'{e}ric},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1176859},\n issn = {2220-4806},\n pages\ + \ = {204--207},\n title = {Reflective Optical Pickup For Violin},\n url = {http://www.nime.org/proceedings/2006/nime2006_204.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.c38bfb86 + doi: 10.5281/zenodo.1176859 issn: 2220-4806 - month: jun - pdf: 123.pdf - presentation-video: https://www.youtube.com/watch?v=5y4BbQWVNGQ - title: 'Bandoneon 2.0: an interdisciplinary project for research and development - of electronic bandoneons in Argentina' - url: https://doi.org/10.21428%2F92fbeb44.c38bfb86 - year: 2022 + pages: 204--207 + title: Reflective Optical Pickup For Violin + url: http://www.nime.org/proceedings/2006/nime2006_204.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_13 - abstract: 'The On Board Call is a bespoke musical interface designed to engage the - general public’s interest in wildlife sounds—such as bird, frog or animal calls—through - imitation and interaction. The device is a handheld, battery-operated, microprocessor-based - machine that synthesizes sounds using frequency modulation synthesis methods. - It includes a small amplifier and loudspeaker for playback and employs an accelerometer - and force sensor that register gestural motions that control sound parameters - in real time. The device is handmade from off-the-shelf components onto a specially - designed PCB and laser cut wooden boards. Development versions of the device have - been tested in wildlife listening contexts and in location-based ensemble performance. - The device is simple to use, compact and inexpensive to facilitate use in community-based - active listening workshops intended to enhance user’s appreciation of the eco - acoustic richness of natural environments. Unlike most of the previous work in - wildlife call imitation, the Call does not simply play back recorded wildlife - sounds, it is designed for performative interaction by a user to bring synthesized - sounds to life and imbue them with expression.' - address: 'The University of Auckland, New Zealand' - articleno: 13 - author: 'Brown, Andrew R.' - bibtex: "@inproceedings{NIME22_13,\n abstract = {The On Board Call is a bespoke\ - \ musical interface designed to engage the general public’s interest in wildlife\ - \ sounds—such as bird, frog or animal calls—through imitation and interaction.\ - \ The device is a handheld, battery-operated, microprocessor-based machine that\ - \ synthesizes sounds using frequency modulation synthesis methods. It includes\ - \ a small amplifier and loudspeaker for playback and employs an accelerometer\ - \ and force sensor that register gestural motions that control sound parameters\ - \ in real time. The device is handmade from off-the-shelf components onto a specially\ - \ designed PCB and laser cut wooden boards. Development versions of the device\ - \ have been tested in wildlife listening contexts and in location-based ensemble\ - \ performance. The device is simple to use, compact and inexpensive to facilitate\ - \ use in community-based active listening workshops intended to enhance user’s\ - \ appreciation of the eco acoustic richness of natural environments. Unlike most\ - \ of the previous work in wildlife call imitation, the Call does not simply play\ - \ back recorded wildlife sounds, it is designed for performative interaction by\ - \ a user to bring synthesized sounds to life and imbue them with expression.},\n\ - \ address = {The University of Auckland, New Zealand},\n articleno = {13},\n author\ - \ = {Brown, Andrew R.},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.71a5a0ba},\n\ - \ issn = {2220-4806},\n month = {jun},\n pdf = {125.pdf},\n presentation-video\ - \ = {https://www.youtube.com/watch?v=iBTBPpaSGi8},\n title = {On Board Call: A\ - \ Gestural Wildlife Imitation Machine},\n url = {https://doi.org/10.21428%2F92fbeb44.71a5a0ba},\n\ - \ year = {2022}\n}\n" + ID: Jorda2006 + abstract: 'This paper introduces the scoreTable*, a tangible interactive music score + editor which started as a simple application for demoing "traditional" approaches + to music creation, using the reacTable* technology, and which has evolved into + an independent research project on its own. After a brief discussion on the role + of pitch in music, we present a brief overview of related tangible music editors, + and discuss several paradigms in computer music creation, contrasting synchronous + with asynchronous approaches. The final part of the paper describes the current + state of the scoreTable* as well as its future lines of research.' + address: 'Paris, France' + author: 'Jordà, Sergi and Alonso, Marcos' + bibtex: "@inproceedings{Jorda2006,\n abstract = {This paper introduces the scoreTable*,\ + \ a tangible interactive music score editor which started as a simple application\ + \ for demoing \"traditional\" approaches to music creation, using the reacTable*\ + \ technology, and which has evolved into an independent research project on its\ + \ own. After a brief discussion on the role of pitch in music, we present a brief\ + \ overview of related tangible music editors, and discuss several paradigms in\ + \ computer music creation, contrasting synchronous with asynchronous approaches.\ + \ The final part of the paper describes the current state of the scoreTable* as\ + \ well as its future lines of research.},\n address = {Paris, France},\n author\ + \ = {Jord\\`{a}, Sergi and Alonso, Marcos},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1176855},\n issn = {2220-4806},\n keywords = {Musical instrument,\ + \ Collaborative Music, Computer Supported Collaborative Work, Tangible User Interface,\ + \ Music Theory. },\n pages = {208--211},\n title = {Mary Had a Little scoreTable*\ + \ or the reacTable* Goes Melodic},\n url = {http://www.nime.org/proceedings/2006/nime2006_208.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.71a5a0ba + doi: 10.5281/zenodo.1176855 issn: 2220-4806 - month: jun - pdf: 125.pdf - presentation-video: https://www.youtube.com/watch?v=iBTBPpaSGi8 - title: 'On Board Call: A Gestural Wildlife Imitation Machine' - url: https://doi.org/10.21428%2F92fbeb44.71a5a0ba - year: 2022 + keywords: 'Musical instrument, Collaborative Music, Computer Supported Collaborative + Work, Tangible User Interface, Music Theory. ' + pages: 208--211 + title: Mary Had a Little scoreTable* or the reacTable* Goes Melodic + url: http://www.nime.org/proceedings/2006/nime2006_208.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_14 - abstract: 'In a search for a symbiotic relationship between the digital and physical - worlds, I am developing a hybrid, digital-acoustic wind instrument - the Post-Digital - Sax. As the name implies, the instrument combines the advantages and flexibility - of digital control with a hands-on physical interface and a non-orthodox means - of sound production, in which the airflow, supplied by the player’s lungs, is - the actual sound source. The pitch, however, is controlled digitally, allowing - a wide range of musical material manipulation, bringing the possibilities of a - digitally augmented performance into the realm of acoustic sound.' - address: 'The University of Auckland, New Zealand' - articleno: 14 - author: 'Cybulski, Krzysztof' - bibtex: "@inproceedings{NIME22_14,\n abstract = {In a search for a symbiotic relationship\ - \ between the digital and physical worlds, I am developing a hybrid, digital-acoustic\ - \ wind instrument - the Post-Digital Sax. As the name implies, the instrument\ - \ combines the advantages and flexibility of digital control with a hands-on physical\ - \ interface and a non-orthodox means of sound production, in which the airflow,\ - \ supplied by the player’s lungs, is the actual sound source. The pitch, however,\ - \ is controlled digitally, allowing a wide range of musical material manipulation,\ - \ bringing the possibilities of a digitally augmented performance into the realm\ - \ of acoustic sound.},\n address = {The University of Auckland, New Zealand},\n\ - \ articleno = {14},\n author = {Cybulski, Krzysztof},\n booktitle = {Proceedings\ + ID: Crevoisier2006 + address: 'Paris, France' + author: 'Crevoisier, Alain and Bornand, C\''{e}dric and Guichard, Arnaud and Matsumura, + Seiichiro and Arakawa, Chuichi' + bibtex: "@inproceedings{Crevoisier2006,\n address = {Paris, France},\n author =\ + \ {Crevoisier, Alain and Bornand, C\\'{e}dric and Guichard, Arnaud and Matsumura,\ + \ Seiichiro and Arakawa, Chuichi},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176853},\n\ + \ issn = {2220-4806},\n pages = {212--215},\n title = {Sound Rose: Creating Music\ + \ and Images with a Touch Table},\n url = {http://www.nime.org/proceedings/2006/nime2006_212.pdf},\n\ + \ year = {2006}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1176853 + issn: 2220-4806 + pages: 212--215 + title: 'Sound Rose: Creating Music and Images with a Touch Table' + url: http://www.nime.org/proceedings/2006/nime2006_212.pdf + year: 2006 + + +- ENTRYTYPE: inproceedings + ID: Davidson2006 + abstract: 'In this paper, we describe our experience in musical interface design + for a large scale, high-resolution, multi-touch display surface. We provide an + overview of historical and presentday context in multi-touch audio interaction, + and describe our approach to analysis of tracked multi-finger, multi-hand data + for controlling live audio synthesis.' + address: 'Paris, France' + author: 'Davidson, Philip L. and Han, Jefferson Y.' + bibtex: "@inproceedings{Davidson2006,\n abstract = {In this paper, we describe our\ + \ experience in musical interface design for a large scale, high-resolution, multi-touch\ + \ display surface. We provide an overview of historical and presentday context\ + \ in multi-touch audio interaction, and describe our approach to analysis of tracked\ + \ multi-finger, multi-hand data for controlling live audio synthesis.},\n address\ + \ = {Paris, France},\n author = {Davidson, Philip L. and Han, Jefferson Y.},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1176889},\n issn = {2220-4806},\n\ + \ keywords = {multi-touch, touch, tactile, bi-manual, multi-user, synthesis, dynamic\ + \ patching },\n pages = {216--219},\n title = {Synthesis and Control on Large\ + \ Scale Multi-Touch Sensing Displays},\n url = {http://www.nime.org/proceedings/2006/nime2006_216.pdf},\n\ + \ year = {2006}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1176889 + issn: 2220-4806 + keywords: 'multi-touch, touch, tactile, bi-manual, multi-user, synthesis, dynamic + patching ' + pages: 216--219 + title: Synthesis and Control on Large Scale Multi-Touch Sensing Displays + url: http://www.nime.org/proceedings/2006/nime2006_216.pdf + year: 2006 + + +- ENTRYTYPE: inproceedings + ID: Kvifte2006 + abstract: 'This paper discusses the need for a framework for describing musical + instruments and their design, and discusses some possible elements in such a framework. + The framework is meant as an aid in the development of a coherent terminology + for describing, comparing and discussing different musical instruments and musical + instrument designs. Three different perspectives are presented; that of the listener, + the performer, and the constructor, and various levels of descriptions are introduced.' + address: 'Paris, France' + author: 'Kvifte, Tellef and Jensenius, Alexander Refsum' + bibtex: "@inproceedings{Kvifte2006,\n abstract = {This paper discusses the need\ + \ for a framework for describing musical instruments and their design, and discusses\ + \ some possible elements in such a framework. The framework is meant as an aid\ + \ in the development of a coherent terminology for describing, comparing and discussing\ + \ different musical instruments and musical instrument designs. Three different\ + \ perspectives are presented; that of the listener, the performer, and the constructor,\ + \ and various levels of descriptions are introduced.},\n address = {Paris, France},\n\ + \ author = {Kvifte, Tellef and Jensenius, Alexander Refsum},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.21428/92fbeb44.756616d4},\n issn = {2220-4806},\n month = {jun},\n\ - \ pdf = {126.pdf},\n presentation-video = {https://youtu.be/RnuEvjMdEj4},\n title\ - \ = {Post-digital sax - a digitally controlled acoustic single-reed woodwind instrument},\n\ - \ url = {https://doi.org/10.21428%2F92fbeb44.756616d4},\n year = {2022}\n}\n" + \ doi = {10.5281/zenodo.1176951},\n issn = {2220-4806},\n keywords = {Musical\ + \ instrument design, mapping, gestures, organology. },\n pages = {220--225},\n\ + \ title = {Towards a Coherent Terminology and Model of Instrument Description\ + \ and Design},\n url = {http://www.nime.org/proceedings/2006/nime2006_220.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.756616d4 + doi: 10.5281/zenodo.1176951 issn: 2220-4806 - month: jun - pdf: 126.pdf - presentation-video: https://youtu.be/RnuEvjMdEj4 - title: Post-digital sax - a digitally controlled acoustic single-reed woodwind instrument - url: https://doi.org/10.21428%2F92fbeb44.756616d4 - year: 2022 + keywords: 'Musical instrument design, mapping, gestures, organology. ' + pages: 220--225 + title: Towards a Coherent Terminology and Model of Instrument Description and Design + url: http://www.nime.org/proceedings/2006/nime2006_220.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_15 - abstract: 'An emerging approach to building new musical instruments is based on - training neural networks to generate audio conditioned upon parametric input. - We use the term "generative models" rather than "musical instruments" for the - trained networks because it reflects the statistical way the instruments are trained - to "model" the association between parameters and the distribution of audio data, - and because "musical" carries historical baggage as a reference to a restricted - domain of sound. Generative models are musical instruments in that they produce - a prescribed range of sound playable through the expressive manipulation of an - interface. To learn the mapping from interface to audio, generative models require - large amounts of parametrically labeled audio data. This paper introduces the - Synthetic Audio Textures (Syn- Tex1) collection of data set generators. SynTex - is a database of parameterized audio textures and a suite of tools for creating - and labeling datasets designed for training and testing generative neural networks - for parametrically conditioned sound synthesis. While there are many existing - labeled speech and traditional musical instrument databases available for training - generative models, most datasets of general (e.g. environmental) audio are oriented - and labeled for the purpose of classification rather than expressive musical generation. - SynTex is designed to provide an open shareable reference set of audio for creating - generative sound models including their interfaces. SynTex sound sets are synthetically - generated. This facilitates dense and accurate labeling necessary for conditionally - training generative networks conditionally dependent on input parameter values. - SynTex has several characteristics designed to support a data-centric approach - to developing, exploring, training, and testing generative models.' - address: 'The University of Auckland, New Zealand' - articleno: 15 - author: 'Wyse, Lonce and Ravikumar, Prashanth Thattai' - bibtex: "@inproceedings{NIME22_15,\n abstract = {An emerging approach to building\ - \ new musical instruments is based on training neural networks to generate audio\ - \ conditioned upon parametric input. We use the term \"generative models\" rather\ - \ than \"musical instruments\" for the trained networks because it reflects the\ - \ statistical way the instruments are trained to \"model\" the association between\ - \ parameters and the distribution of audio data, and because \"musical\" carries\ - \ historical baggage as a reference to a restricted domain of sound. Generative\ - \ models are musical instruments in that they produce a prescribed range of sound\ - \ playable through the expressive manipulation of an interface. To learn the mapping\ - \ from interface to audio, generative models require large amounts of parametrically\ - \ labeled audio data. This paper introduces the Synthetic Audio Textures (Syn-\ - \ Tex1) collection of data set generators. SynTex is a database of parameterized\ - \ audio textures and a suite of tools for creating and labeling datasets designed\ - \ for training and testing generative neural networks for parametrically conditioned\ - \ sound synthesis. While there are many existing labeled speech and traditional\ - \ musical instrument databases available for training generative models, most\ - \ datasets of general (e.g. environmental) audio are oriented and labeled for\ - \ the purpose of classification rather than expressive musical generation. SynTex\ - \ is designed to provide an open shareable reference set of audio for creating\ - \ generative sound models including their interfaces. SynTex sound sets are synthetically\ - \ generated. This facilitates dense and accurate labeling necessary for conditionally\ - \ training generative networks conditionally dependent on input parameter values.\ - \ SynTex has several characteristics designed to support a data-centric approach\ - \ to developing, exploring, training, and testing generative models.},\n address\ - \ = {The University of Auckland, New Zealand},\n articleno = {15},\n author =\ - \ {Wyse, Lonce and Ravikumar, Prashanth Thattai},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.21428/92fbeb44.0fe70450},\n issn = {2220-4806},\n month = {jun},\n pdf\ - \ = {128.pdf},\n presentation-video = {https://youtu.be/KZHXck9c75s},\n title\ - \ = {Syntex: parametric audio texture datasets for conditional training of instrumental\ - \ interfaces.},\n url = {https://doi.org/10.21428%2F92fbeb44.0fe70450},\n year\ - \ = {2022}\n}\n" + ID: Marshall2006 + address: 'Paris, France' + author: 'Marshall, Mark T. and Wanderley, Marcelo M.' + bibtex: "@inproceedings{Marshall2006,\n address = {Paris, France},\n author = {Marshall,\ + \ Mark T. and Wanderley, Marcelo M.},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176973},\n\ + \ issn = {2220-4806},\n keywords = {digital musical instruments,tactile feedback,vibro-tactile},\n\ + \ pages = {226--229},\n title = {Vibrotactile Feedback in Digital Musical Instruments},\n\ + \ url = {http://www.nime.org/proceedings/2006/nime2006_226.pdf},\n year = {2006}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.0fe70450 + doi: 10.5281/zenodo.1176973 issn: 2220-4806 - month: jun - pdf: 128.pdf - presentation-video: https://youtu.be/KZHXck9c75s - title: 'Syntex: parametric audio texture datasets for conditional training of instrumental - interfaces.' - url: https://doi.org/10.21428%2F92fbeb44.0fe70450 - year: 2022 + keywords: 'digital musical instruments,tactile feedback,vibro-tactile' + pages: 226--229 + title: Vibrotactile Feedback in Digital Musical Instruments + url: http://www.nime.org/proceedings/2006/nime2006_226.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_16 - abstract: 'This paper describes a toolkit for analyzing the NIME proceedings archive, - which facilitates the bibliometric study of the conference papers and the identification - of trends and patterns. The toolkit is implemented as a collection of Python methods - that aggregate, scrape and retrieve various meta-data from published papers. Extracted - data is stored in a large numeric table as well as plain text files. Analytical - functions within the toolkit can be easily extended or modified. The text mining - script that can be highly customized without the need for programming. The toolkit - uses only publicly available information organized in standard formats, and is - available as open-source software to promote continuous development in step with - the NIME archive.' - address: 'The University of Auckland, New Zealand' - articleno: 16 - author: 'Goode, Jackson and Fasciani, Stefano' - bibtex: "@inproceedings{NIME22_16,\n abstract = {This paper describes a toolkit\ - \ for analyzing the NIME proceedings archive, which facilitates the bibliometric\ - \ study of the conference papers and the identification of trends and patterns.\ - \ The toolkit is implemented as a collection of Python methods that aggregate,\ - \ scrape and retrieve various meta-data from published papers. Extracted data\ - \ is stored in a large numeric table as well as plain text files. Analytical functions\ - \ within the toolkit can be easily extended or modified. The text mining script\ - \ that can be highly customized without the need for programming. The toolkit\ - \ uses only publicly available information organized in standard formats, and\ - \ is available as open-source software to promote continuous development in step\ - \ with the NIME archive.},\n address = {The University of Auckland, New Zealand},\n\ - \ articleno = {16},\n author = {Goode, Jackson and Fasciani, Stefano},\n booktitle\ + ID: Koehly2006 + abstract: 'This paper presents the development of novel "home-made" touch sensors + using conductive pigments and various substrate materials. We show that it is + possible to build one''s own position, pressure and bend sensors with various + electrical characteristics, sizes and shapes, and this for a very competitive + price. We give examples and provide results from experimental tests of such developments. ' + address: 'Paris, France' + author: 'Koehly, Rodolphe and Curtil, Denis and Wanderley, Marcelo M.' + bibtex: "@inproceedings{Koehly2006,\n abstract = {This paper presents the development\ + \ of novel \"home-made\" touch sensors using conductive pigments and various substrate\ + \ materials. We show that it is possible to build one's own position, pressure\ + \ and bend sensors with various electrical characteristics, sizes and shapes,\ + \ and this for a very competitive price. We give examples and provide results\ + \ from experimental tests of such developments. },\n address = {Paris, France},\n\ + \ author = {Koehly, Rodolphe and Curtil, Denis and Wanderley, Marcelo M.},\n booktitle\ \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.21428/92fbeb44.58efca21},\n issn = {2220-4806},\n month\ - \ = {jun},\n pdf = {13.pdf},\n presentation-video = {https://youtu.be/Awp5-oxL-NM},\n\ - \ title = {A Toolkit for the Analysis of the {NIME} Proceedings Archive},\n url\ - \ = {https://doi.org/10.21428%2F92fbeb44.58efca21},\n year = {2022}\n}\n" + \ Expression},\n doi = {10.5281/zenodo.1176949},\n issn = {2220-4806},\n keywords\ + \ = {Touch sensors, piezoresistive technology, conductive pigments, sensitive\ + \ materials, interface design },\n pages = {230--233},\n title = {Paper FSRs and\ + \ Latex/Fabric Traction Sensors: Methods for the Development of Home-Made Touch\ + \ Sensors},\n url = {http://www.nime.org/proceedings/2006/nime2006_230.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.58efca21 + doi: 10.5281/zenodo.1176949 issn: 2220-4806 - month: jun - pdf: 13.pdf - presentation-video: https://youtu.be/Awp5-oxL-NM - title: A Toolkit for the Analysis of the NIME Proceedings Archive - url: https://doi.org/10.21428%2F92fbeb44.58efca21 - year: 2022 + keywords: 'Touch sensors, piezoresistive technology, conductive pigments, sensitive + materials, interface design ' + pages: 230--233 + title: 'Paper FSRs and Latex/Fabric Traction Sensors: Methods for the Development + of Home-Made Touch Sensors' + url: http://www.nime.org/proceedings/2006/nime2006_230.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_17 - abstract: 'This paper focuses on the redundancy and physicality of magnetic recording - media as a defining factor in the design of a lo-fi audio device, the Concentric - Sampler. A modified floppy disk drive (FDD) and additional circuitry enables the - FDD to record to and playback audio from a 3.5” floppy disk. The Concentric Sampler - is designed as an instrument for live performance and a tool for sonic manipulation, - resulting in primitive looping and time-based granular synthesis. This paper explains - the motivation and background of the Concentric Sampler, related applications - and approaches, its technical realisation, and its musical possibilities. To conclude, - the Concentric Sampler’s potential as an instrument and compositional tool is - discussed alongside the future possibilities for development.' - address: 'The University of Auckland, New Zealand' - articleno: 17 - author: 'Tate, Timothy' - bibtex: "@inproceedings{NIME22_17,\n abstract = {This paper focuses on the redundancy\ - \ and physicality of magnetic recording media as a defining factor in the design\ - \ of a lo-fi audio device, the Concentric Sampler. A modified floppy disk drive\ - \ (FDD) and additional circuitry enables the FDD to record to and playback audio\ - \ from a 3.5” floppy disk. The Concentric Sampler is designed as an instrument\ - \ for live performance and a tool for sonic manipulation, resulting in primitive\ - \ looping and time-based granular synthesis. This paper explains the motivation\ - \ and background of the Concentric Sampler, related applications and approaches,\ - \ its technical realisation, and its musical possibilities. To conclude, the Concentric\ - \ Sampler’s potential as an instrument and compositional tool is discussed alongside\ - \ the future possibilities for development.},\n address = {The University of Auckland,\ - \ New Zealand},\n articleno = {17},\n author = {Tate, Timothy},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.21428/92fbeb44.324729a3},\n issn = {2220-4806},\n month = {jun},\n\ - \ pdf = {131.pdf},\n presentation-video = {https://youtu.be/7Myu1W7tbts},\n title\ - \ = {The Concentric Sampler: A musical instrument from a repurposed floppy disk\ - \ drive},\n url = {https://doi.org/10.21428%2F92fbeb44.324729a3},\n year = {2022}\n\ - }\n" + ID: Bowers2006 + address: 'Paris, France' + author: 'Bowers, John and Villar, Nicolas' + bibtex: "@inproceedings{Bowers2006,\n address = {Paris, France},\n author = {Bowers,\ + \ John and Villar, Nicolas},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176881},\n\ + \ issn = {2220-4806},\n keywords = {Ad hoc instruments, Pin&Play, physical interfaces,\ + \ music performance, new interfaces for musical expression. },\n pages = {234--239},\n\ + \ title = {Creating Ad Hoc Instruments with Pin\\&Play\\&Perform},\n url = {http://www.nime.org/proceedings/2006/nime2006_234.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.324729a3 + doi: 10.5281/zenodo.1176881 issn: 2220-4806 - month: jun - pdf: 131.pdf - presentation-video: https://youtu.be/7Myu1W7tbts - title: 'The Concentric Sampler: A musical instrument from a repurposed floppy disk - drive' - url: https://doi.org/10.21428%2F92fbeb44.324729a3 - year: 2022 + keywords: 'Ad hoc instruments, Pin&Play, physical interfaces, music performance, + new interfaces for musical expression. ' + pages: 234--239 + title: Creating Ad Hoc Instruments with Pin\&Play\&Perform + url: http://www.nime.org/proceedings/2006/nime2006_234.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_18 - abstract: 'Ghost Play is a violin-playing robot that aims to realize bowing and - fingering similar to human players. Existing violin-playing machines have faced - various problems concerning performance techniques owing to constraints imposed - by their design. Bowing and fingering that require accurate and high-acceleration - movement (e.g., a spiccato, tremolo, and glissando) are essential but challenging. - To overcome this problem, Ghost Play is equipped with seven electromagnetic linear - actuators, three for controlling the bow (i.e., the right hand), and the other - four for controlling the pitch on each string (i.e., the left hand). The violin-playing - robot is mounted with an unmodified violin bow. A sensor is attached to the bow - to measure bow pressure. The control software receives a time series of performance - data and manipulates the actuators accordingly. The performance data consists - of the bow direction, bow speed, bow pressure, pitch, vibrato interval, vibrato - width, and string to be drawn. We also developed an authoring tool for the performance - data using a graphic user interface. Finally, we demonstrated Ghost Play performing - bowing and fingering techniques such as a spiccato, tremolo, and glissando, as - well as a piece of classical music.' - address: 'The University of Auckland, New Zealand' - articleno: 18 - author: 'Kamatani, Takahiro and Sato, Yoshinao and Fujino, Masato' - bibtex: "@inproceedings{NIME22_18,\n abstract = {Ghost Play is a violin-playing\ - \ robot that aims to realize bowing and fingering similar to human players. Existing\ - \ violin-playing machines have faced various problems concerning performance techniques\ - \ owing to constraints imposed by their design. Bowing and fingering that require\ - \ accurate and high-acceleration movement (e.g., a spiccato, tremolo, and glissando)\ - \ are essential but challenging. To overcome this problem, Ghost Play is equipped\ - \ with seven electromagnetic linear actuators, three for controlling the bow (i.e.,\ - \ the right hand), and the other four for controlling the pitch on each string\ - \ (i.e., the left hand). The violin-playing robot is mounted with an unmodified\ - \ violin bow. A sensor is attached to the bow to measure bow pressure. The control\ - \ software receives a time series of performance data and manipulates the actuators\ - \ accordingly. The performance data consists of the bow direction, bow speed,\ - \ bow pressure, pitch, vibrato interval, vibrato width, and string to be drawn.\ - \ We also developed an authoring tool for the performance data using a graphic\ - \ user interface. Finally, we demonstrated Ghost Play performing bowing and fingering\ - \ techniques such as a spiccato, tremolo, and glissando, as well as a piece of\ - \ classical music.},\n address = {The University of Auckland, New Zealand},\n\ - \ articleno = {18},\n author = {Kamatani, Takahiro and Sato, Yoshinao and Fujino,\ - \ Masato},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.21428/92fbeb44.754a50b5},\n issn = {2220-4806},\n\ - \ month = {jun},\n pdf = {136.pdf},\n presentation-video = {https://youtu.be/FOivgYXk1_g},\n\ - \ title = {Ghost Play - A Violin-Playing Robot using Electromagnetic Linear Actuators},\n\ - \ url = {https://doi.org/10.21428%2F92fbeb44.754a50b5},\n year = {2022}\n}\n" + ID: Serafin2006 + abstract: 'In this paper we introduce the Croaker, a novel input deviceinspired + by Russolo''s Intonarumori. We describe the components of the controller and the + sound synthesis engine whichallows to reproduce several everyday sounds.' + address: 'Paris, France' + author: 'Serafin, Stefania and de Götzen, Amalia and Böttcher, Niels and Gelineck, + Steven' + bibtex: "@inproceedings{Serafin2006,\n abstract = {In this paper we introduce the\ + \ Croaker, a novel input deviceinspired by Russolo's Intonarumori. We describe\ + \ the components of the controller and the sound synthesis engine whichallows\ + \ to reproduce several everyday sounds.},\n address = {Paris, France},\n author\ + \ = {Serafin, Stefania and de G\\''{o}tzen, Amalia and B\\''{o}ttcher, Niels and\ + \ Gelineck, Steven},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177005},\n\ + \ issn = {2220-4806},\n keywords = {Noise machines, everyday sounds, physical\ + \ models. },\n pages = {240--245},\n title = {Synthesis and Control of Everyday\ + \ Sounds Reconstructing Russolo's Intonarumori},\n url = {http://www.nime.org/proceedings/2006/nime2006_240.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.754a50b5 + doi: 10.5281/zenodo.1177005 issn: 2220-4806 - month: jun - pdf: 136.pdf - presentation-video: https://youtu.be/FOivgYXk1_g - title: Ghost Play - A Violin-Playing Robot using Electromagnetic Linear Actuators - url: https://doi.org/10.21428%2F92fbeb44.754a50b5 - year: 2022 + keywords: 'Noise machines, everyday sounds, physical models. ' + pages: 240--245 + title: Synthesis and Control of Everyday Sounds Reconstructing Russolo's Intonarumori + url: http://www.nime.org/proceedings/2006/nime2006_240.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_19 - abstract: 'Feedback is a technique that has been used in musical performance since - the advent of electricity. From the early cybernetic explorations of Bebe and - Louis Barron, through the screaming sound of Hendrix’s guitar, to the systems - design of David Tudor or Nic Collins, we find the origins of feedback in music - being technologically and aesthetically diverse. Through interviews with participants - in a recent Feedback Musicianship Network symposium, this paper seeks to investigate - the contemporary use of this technique and explore how key protagonists discuss - the nature of their practice. We see common concepts emerging in these conversations: - agency, complexity, coupling, play, design and posthumanism. The paper presents - a terminological and ideological framework as manifested at this point in time, - and makes a theoretical contribution to the understanding of the rationale and - potential of this technological and compositional approach.' - address: 'The University of Auckland, New Zealand' - articleno: 19 - author: 'Magnusson, Thor and Kiefer, Chris and Ulfarsson, Halldor' - bibtex: "@inproceedings{NIME22_19,\n abstract = {Feedback is a technique that has\ - \ been used in musical performance since the advent of electricity. From the early\ - \ cybernetic explorations of Bebe and Louis Barron, through the screaming sound\ - \ of Hendrix’s guitar, to the systems design of David Tudor or Nic Collins, we\ - \ find the origins of feedback in music being technologically and aesthetically\ - \ diverse. Through interviews with participants in a recent Feedback Musicianship\ - \ Network symposium, this paper seeks to investigate the contemporary use of this\ - \ technique and explore how key protagonists discuss the nature of their practice.\ - \ We see common concepts emerging in these conversations: agency, complexity,\ - \ coupling, play, design and posthumanism. The paper presents a terminological\ - \ and ideological framework as manifested at this point in time, and makes a theoretical\ - \ contribution to the understanding of the rationale and potential of this technological\ - \ and compositional approach.},\n address = {The University of Auckland, New Zealand},\n\ - \ articleno = {19},\n author = {Magnusson, Thor and Kiefer, Chris and Ulfarsson,\ - \ Halldor},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.aa7de712},\n\ - \ issn = {2220-4806},\n month = {jun},\n pdf = {151.pdf},\n presentation-video\ - \ = {https://www.youtube.com/watch?v=ouwIA_aVmEM},\n title = {Reflexions upon\ - \ Feedback},\n url = {https://doi.org/10.21428%2F92fbeb44.aa7de712},\n year =\ - \ {2022}\n}\n" + ID: Weinberg2006 + address: 'Paris, France' + author: 'Weinberg, Gil and Thatcher, Travis' + bibtex: "@inproceedings{Weinberg2006,\n address = {Paris, France},\n author = {Weinberg,\ + \ Gil and Thatcher, Travis},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177019},\n\ + \ issn = {2220-4806},\n keywords = {1,background and motivations,biological research,interactive\ + \ auditory display,neural patterns,scholars are,sonification,with new developments\ + \ in},\n pages = {246--249},\n title = {Interactive Sonification of Neural Activity},\n\ + \ url = {http://www.nime.org/proceedings/2006/nime2006_246.pdf},\n year = {2006}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.aa7de712 + doi: 10.5281/zenodo.1177019 issn: 2220-4806 - month: jun - pdf: 151.pdf - presentation-video: https://www.youtube.com/watch?v=ouwIA_aVmEM - title: Reflexions upon Feedback - url: https://doi.org/10.21428%2F92fbeb44.aa7de712 - year: 2022 + keywords: '1,background and motivations,biological research,interactive auditory + display,neural patterns,scholars are,sonification,with new developments in' + pages: 246--249 + title: Interactive Sonification of Neural Activity + url: http://www.nime.org/proceedings/2006/nime2006_246.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_20 - abstract: 'Beginning, amateur, and professional violinists alike make use of a shoulder - rest with a typical form factor for ergonomic support. Numerous commercial devices - are available. We saturate these inert devices with electronics and actuators - to open a new design space for “active shoulder rests” (ASRs), a pathway for violinists - to adopt inexpensive and transparent electroacoustic interfaces. We present a - dual-mode ASR that features a built-in microphone pickup and parametric control - of mixing between sound diffusion and actuation modes for experiments with active - acoustics and feedback. We document a modular approach to signal processing allowing - quick adaptation and differentiation of control signals, and demonstrate rich - sound processing techniques that create lively improvisation environments. By - fostering participation and convergence among digital media practices and diverse - musical cultures, we envision ASRs broadly rekindling creative practice for the - violin, long a tool of improvisation before the triumph of classical works. ASRs - decolonize the violin by activating new flows and connectivities, freeing up habitual - relations, and refreshing the musical affordances of this otherwise quintessentially - western and canonical instrument.' - address: 'The University of Auckland, New Zealand' - articleno: 20 - author: 'Thorn, Seth and Lahey, Byron' - bibtex: "@inproceedings{NIME22_20,\n abstract = {Beginning, amateur, and professional\ - \ violinists alike make use of a shoulder rest with a typical form factor for\ - \ ergonomic support. Numerous commercial devices are available. We saturate these\ - \ inert devices with electronics and actuators to open a new design space for\ - \ “active shoulder rests” (ASRs), a pathway for violinists to adopt inexpensive\ - \ and transparent electroacoustic interfaces. We present a dual-mode ASR that\ - \ features a built-in microphone pickup and parametric control of mixing between\ - \ sound diffusion and actuation modes for experiments with active acoustics and\ - \ feedback. We document a modular approach to signal processing allowing quick\ - \ adaptation and differentiation of control signals, and demonstrate rich sound\ - \ processing techniques that create lively improvisation environments. By fostering\ - \ participation and convergence among digital media practices and diverse musical\ - \ cultures, we envision ASRs broadly rekindling creative practice for the violin,\ - \ long a tool of improvisation before the triumph of classical works. ASRs decolonize\ - \ the violin by activating new flows and connectivities, freeing up habitual relations,\ - \ and refreshing the musical affordances of this otherwise quintessentially western\ - \ and canonical instrument.},\n address = {The University of Auckland, New Zealand},\n\ - \ articleno = {20},\n author = {Thorn, Seth and Lahey, Byron},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.21428/92fbeb44.91f87875},\n issn = {2220-4806},\n month = {jun},\n\ - \ pdf = {16.pdf},\n presentation-video = {https://youtu.be/7qNTa4QplC4},\n title\ - \ = {Decolonizing the Violin with Active Shoulder Rests ({ASRs})},\n url = {https://doi.org/10.21428%2F92fbeb44.91f87875},\n\ - \ year = {2022}\n}\n" + ID: Remus2006 + address: 'Paris, France' + author: 'R\''{e}mus, Jacques' + bibtex: "@inproceedings{Remus2006,\n address = {Paris, France},\n author = {R\\\ + '{e}mus, Jacques},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176989},\n\ + \ issn = {2220-4806},\n keywords = {camera musicale,interface,jacques r\\'{e}mus,machines,musical\ + \ camera,musical hand,non haptic instrument,s mappings,sculptures and mechanical\ + \ musical,sound},\n pages = {250--253},\n title = {Non Haptic Control of Music\ + \ by Video Analysis of Hand Movements: 14 Years of Experience with the `Cam\\\ + '{e}ra Musicale'},\n url = {http://www.nime.org/proceedings/2006/nime2006_250.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.91f87875 + doi: 10.5281/zenodo.1176989 issn: 2220-4806 - month: jun - pdf: 16.pdf - presentation-video: https://youtu.be/7qNTa4QplC4 - title: Decolonizing the Violin with Active Shoulder Rests (ASRs) - url: https://doi.org/10.21428%2F92fbeb44.91f87875 - year: 2022 + keywords: 'camera musicale,interface,jacques r\''{e}mus,machines,musical camera,musical + hand,non haptic instrument,s mappings,sculptures and mechanical musical,sound' + pages: 250--253 + title: 'Non Haptic Control of Music by Video Analysis of Hand Movements: 14 Years + of Experience with the `Cam\''{e}ra Musicale''' + url: http://www.nime.org/proceedings/2006/nime2006_250.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_21 - abstract: 'Augmented reality (AR) is increasingly being envisaged as a process of - perceptual mediation or modulation, not only as a system that combines aligned - and interactive virtual objects with a real environment. Within artistic practice, - this reconceptualisation has led to a medium that emphasises this multisensory - integration of virtual processes, leading to expressive, narrative-driven, and - thought-provoking AR experiences. This paper outlines the development and evaluation - of the polaris~ experience. polaris~ is built using a set of open-source hardware - and software components that can be used to create privacy-respecting and cost-effective - audiovisual AR experiences. Its wearable component is comprised of the open-source - Project North Star AR headset and a pair of bone conduction headphones, providing - simultaneous real and virtual visual and auditory elements. These elements are - spatially aligned using Unity and PureData to the real space that they appear - in and can be gesturally interacted with in a way that fosters artistic and musical - expression. In order to evaluate the polaris~, 10 participants were recruited, - who spent approximately 30 minutes each in the AR scene and were interviewed about - their experience. Using grounded theory, the author extracted coded remarks from - the transcriptions of these studies, that were then sorted into the categories - of Sentiment, Learning, Adoption, Expression, and Immersion. In evaluating polaris~ - it was found that the experience engaged participants fruitfully, with many noting - their ability to express themselves audiovisually in creative ways. The experience - and the framework the author used to create it is available in a Github respository.' - address: 'The University of Auckland, New Zealand' - articleno: 21 - author: 'Bilbow, Sam' - bibtex: "@inproceedings{NIME22_21,\n abstract = {Augmented reality (AR) is increasingly\ - \ being envisaged as a process of perceptual mediation or modulation, not only\ - \ as a system that combines aligned and interactive virtual objects with a real\ - \ environment. Within artistic practice, this reconceptualisation has led to a\ - \ medium that emphasises this multisensory integration of virtual processes, leading\ - \ to expressive, narrative-driven, and thought-provoking AR experiences. This\ - \ paper outlines the development and evaluation of the polaris~ experience. polaris~\ - \ is built using a set of open-source hardware and software components that can\ - \ be used to create privacy-respecting and cost-effective audiovisual AR experiences.\ - \ Its wearable component is comprised of the open-source Project North Star AR\ - \ headset and a pair of bone conduction headphones, providing simultaneous real\ - \ and virtual visual and auditory elements. These elements are spatially aligned\ - \ using Unity and PureData to the real space that they appear in and can be gesturally\ - \ interacted with in a way that fosters artistic and musical expression. In order\ - \ to evaluate the polaris~, 10 participants were recruited, who spent approximately\ - \ 30 minutes each in the AR scene and were interviewed about their experience.\ - \ Using grounded theory, the author extracted coded remarks from the transcriptions\ - \ of these studies, that were then sorted into the categories of Sentiment, Learning,\ - \ Adoption, Expression, and Immersion. In evaluating polaris~ it was found that\ - \ the experience engaged participants fruitfully, with many noting their ability\ - \ to express themselves audiovisually in creative ways. The experience and the\ - \ framework the author used to create it is available in a Github respository.},\n\ - \ address = {The University of Auckland, New Zealand},\n articleno = {21},\n author\ - \ = {Bilbow, Sam},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.8abb9ce6},\n\ - \ issn = {2220-4806},\n month = {jun},\n pdf = {162.pdf},\n presentation-video\ - \ = {https://www.youtube.com/watch?v=eCdQku5hFOE},\n title = {Evaluating polaris{\\\ - textasciitilde} - An Audiovisual Augmented Reality Experience Built on Open-Source\ - \ Hardware and Software},\n url = {https://doi.org/10.21428%2F92fbeb44.8abb9ce6},\n\ - \ year = {2022}\n}\n" + ID: Borchers2006 + abstract: 'The MICON is an electronic music stand extending Maestro!, the latest + in a series of interactive conducting exhibits that use real orchestral audio + and video recordings. The MICON uses OpenGL-based rendering to display and animate + score pages with a high degree of realism. It offers three different score display + formats to match the user''s level of expertise. A realtime animated visual cueing + system helps users with their conducting. The MICON has been evaluated with music + students. ' + address: 'Paris, France' + author: 'Borchers, Jan and Hadjakos, Aristotelis and Mühlhäuser, Max' + bibtex: "@inproceedings{Borchers2006,\n abstract = {The MICON is an electronic music\ + \ stand extending Maestro!, the latest in a series of interactive conducting exhibits\ + \ that use real orchestral audio and video recordings. The MICON uses OpenGL-based\ + \ rendering to display and animate score pages with a high degree of realism.\ + \ It offers three different score display formats to match the user's level of\ + \ expertise. A realtime animated visual cueing system helps users with their conducting.\ + \ The MICON has been evaluated with music students. },\n address = {Paris, France},\n\ + \ author = {Borchers, Jan and Hadjakos, Aristotelis and M\\''{u}hlh\\''{a}user,\ + \ Max},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1176877},\n issn = {2220-4806},\n\ + \ keywords = {Music stand, score display, exhibit, conducting. },\n pages = {254--259},\n\ + \ title = {MICON A Music Stand for Interactive Conducting},\n url = {http://www.nime.org/proceedings/2006/nime2006_254.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.8abb9ce6 + doi: 10.5281/zenodo.1176877 issn: 2220-4806 - month: jun - pdf: 162.pdf - presentation-video: https://www.youtube.com/watch?v=eCdQku5hFOE - title: Evaluating polaris~ - An Audiovisual Augmented Reality Experience Built on - Open-Source Hardware and Software - url: https://doi.org/10.21428%2F92fbeb44.8abb9ce6 - year: 2022 + keywords: 'Music stand, score display, exhibit, conducting. ' + pages: 254--259 + title: MICON A Music Stand for Interactive Conducting + url: http://www.nime.org/proceedings/2006/nime2006_254.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_22 - abstract: 'This paper presents the MappEMG pipeline. The goal of this pipeline is - to augment the traditional classical concert experience by giving listeners access, - through the sense of touch, to an intimate and non-visible dimension of the musicians’ - bodily experience while performing. The live-stream pipeline produces vibrations - based on muscle activity captured through surface electromyography (EMG). Therefore, - MappEMG allows the audience to experience the performer’s muscle effort, an essential - component of music performance which is typically unavailable to direct visual - observation. The paper is divided in four sections. First, we overview related - works on EMG, music performance, and vibrotactile feedback. We then present conceptual - and methodological issues of capturing musicians’ muscle effort related to their - expressive intentions. We further explain the different components of the live-stream - data pipeline: a python software named Biosiglive for data acquisition and processing, - a Max/MSP patch for data post-processing and mapping, and a mobile application - named hAPPtiks for real-time control of smartphones’ vibration. Finally, we address - the application of the pipeline in an actual music performance. Thanks to their - modular structure, the tools presented could be used in different creative and - biomedical contexts involving gestural control of haptic stimuli.' - address: 'The University of Auckland, New Zealand' - articleno: 22 - author: 'Verdugo, Felipe and Ceglia, Amedeo and Frisson, Christian and Burton, Alexandre - and Begon, Mickael and Gibet, Sylvie and Wanderley, Marcelo M.' - bibtex: "@inproceedings{NIME22_22,\n abstract = {This paper presents the MappEMG\ - \ pipeline. The goal of this pipeline is to augment the traditional classical\ - \ concert experience by giving listeners access, through the sense of touch, to\ - \ an intimate and non-visible dimension of the musicians’ bodily experience while\ - \ performing. The live-stream pipeline produces vibrations based on muscle activity\ - \ captured through surface electromyography (EMG). Therefore, MappEMG allows the\ - \ audience to experience the performer’s muscle effort, an essential component\ - \ of music performance which is typically unavailable to direct visual observation.\ - \ The paper is divided in four sections. First, we overview related works on EMG,\ - \ music performance, and vibrotactile feedback. We then present conceptual and\ - \ methodological issues of capturing musicians’ muscle effort related to their\ - \ expressive intentions. We further explain the different components of the live-stream\ - \ data pipeline: a python software named Biosiglive for data acquisition and processing,\ - \ a Max/MSP patch for data post-processing and mapping, and a mobile application\ - \ named hAPPtiks for real-time control of smartphones’ vibration. Finally, we\ - \ address the application of the pipeline in an actual music performance. Thanks\ - \ to their modular structure, the tools presented could be used in different creative\ - \ and biomedical contexts involving gestural control of haptic stimuli.},\n address\ - \ = {The University of Auckland, New Zealand},\n articleno = {22},\n author =\ - \ {Verdugo, Felipe and Ceglia, Amedeo and Frisson, Christian and Burton, Alexandre\ - \ and Begon, Mickael and Gibet, Sylvie and Wanderley, Marcelo M.},\n booktitle\ + ID: Lee2006 + abstract: 'Designing a conducting gesture analysis system for public spacesposes + unique challenges. We present conga, a software framework that enables automatic + recognition and interpretation ofconducting gestures. conga is able to recognize + multiple types ofgestures with varying levels of difficulty for the user to perform,from + a standard four-beat pattern, to simplified up-down conducting movements, to no + pattern at all. conga provides an extendablelibrary of feature detectors linked + together into a directed acyclicgraph; these graphs represent the various conducting + patterns asgesture profiles. At run-time, conga searches for the best profileto + match a user''s gestures in real-time, and uses a beat prediction algorithm to + provide results at the sub-beat level, in additionto output values such as tempo, + gesture size, and the gesture''sgeometric center. Unlike some previous approaches, + conga doesnot need to be trained with sample data before use. Our preliminary + user tests show that conga has a beat recognition rate ofover 90%. conga is deployed + as the gesture recognition systemfor Maestro!, an interactive conducting exhibit + that opened in theBetty Brinn Children''s Museum in Milwaukee, USA in March2006.' + address: 'Paris, France' + author: 'Lee, Eric and Grüll, Ingo and Keil, Henning and Borchers, Jan' + bibtex: "@inproceedings{Lee2006,\n abstract = {Designing a conducting gesture analysis\ + \ system for public spacesposes unique challenges. We present conga, a software\ + \ framework that enables automatic recognition and interpretation ofconducting\ + \ gestures. conga is able to recognize multiple types ofgestures with varying\ + \ levels of difficulty for the user to perform,from a standard four-beat pattern,\ + \ to simplified up-down conducting movements, to no pattern at all. conga provides\ + \ an extendablelibrary of feature detectors linked together into a directed acyclicgraph;\ + \ these graphs represent the various conducting patterns asgesture profiles. At\ + \ run-time, conga searches for the best profileto match a user's gestures in real-time,\ + \ and uses a beat prediction algorithm to provide results at the sub-beat level,\ + \ in additionto output values such as tempo, gesture size, and the gesture'sgeometric\ + \ center. Unlike some previous approaches, conga doesnot need to be trained with\ + \ sample data before use. Our preliminary user tests show that conga has a beat\ + \ recognition rate ofover 90%. conga is deployed as the gesture recognition systemfor\ + \ Maestro!, an interactive conducting exhibit that opened in theBetty Brinn Children's\ + \ Museum in Milwaukee, USA in March2006.},\n address = {Paris, France},\n author\ + \ = {Lee, Eric and Gr\\''{u}ll, Ingo and Keil, Henning and Borchers, Jan},\n booktitle\ \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.21428/92fbeb44.3ce22588},\n issn = {2220-4806},\n month\ - \ = {jun},\n pdf = {165.pdf},\n presentation-video = {https://youtu.be/gKM0lGs9rxw},\n\ - \ title = {Feeling the Effort of Classical Musicians - A Pipeline from Electromyography\ - \ to Smartphone Vibration for Live Music Performance},\n url = {https://doi.org/10.21428%2F92fbeb44.3ce22588},\n\ - \ year = {2022}\n}\n" + \ Expression},\n doi = {10.5281/zenodo.1176957},\n issn = {2220-4806},\n keywords\ + \ = {gesture recognition, conducting, software gesture frameworks },\n pages =\ + \ {260--265},\n title = {conga: A Framework for Adaptive Conducting Gesture Analysis},\n\ + \ url = {http://www.nime.org/proceedings/2006/nime2006_260.pdf},\n year = {2006}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.3ce22588 + doi: 10.5281/zenodo.1176957 issn: 2220-4806 - month: jun - pdf: 165.pdf - presentation-video: https://youtu.be/gKM0lGs9rxw - title: Feeling the Effort of Classical Musicians - A Pipeline from Electromyography - to Smartphone Vibration for Live Music Performance - url: https://doi.org/10.21428%2F92fbeb44.3ce22588 - year: 2022 + keywords: 'gesture recognition, conducting, software gesture frameworks ' + pages: 260--265 + title: 'conga: A Framework for Adaptive Conducting Gesture Analysis' + url: http://www.nime.org/proceedings/2006/nime2006_260.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_23 - abstract: 'ForceHost is an opensource toolchain for generating firmware that hosts - authoring and rendering of forcefeedback and audio signals and that communicates - through I2C with guest motor and sensor boards. With ForceHost, the stability - of audio and haptic loops is no longer delegated to and dependent on operating - systems and drivers, and devices remain discoverable beyond planned obsolescence. - We modified Faust, a highlevel language and compiler for real-time audio digital - signal processing, to support haptics. Our toolchain compiles audio-haptic firmware - applications with Faust and embeds web-based UIs exposing their parameters. We - validate our toolchain by example applications and modifications of integrated - development environments: script-based programming examples of haptic firmware - applications with our haptic1D Faust library, visual programming by mapping input - and output signals between audio and haptic devices in Webmapper, visual programming - with physically-inspired mass-interaction models in Synth-a-Modeler Designer. - We distribute the documentation and source code of ForceHost and all of its components - and forks.' - address: 'The University of Auckland, New Zealand' - articleno: 23 - author: 'Frisson, Christian and Kirkegaard, Mathias and Pietrzak, Thomas and Wanderley, - Marcelo M.' - bibtex: "@inproceedings{NIME22_23,\n abstract = {ForceHost is an opensource toolchain\ - \ for generating firmware that hosts authoring and rendering of forcefeedback\ - \ and audio signals and that communicates through I2C with guest motor and sensor\ - \ boards. With ForceHost, the stability of audio and haptic loops is no longer\ - \ delegated to and dependent on operating systems and drivers, and devices remain\ - \ discoverable beyond planned obsolescence. We modified Faust, a highlevel language\ - \ and compiler for real-time audio digital signal processing, to support haptics.\ - \ Our toolchain compiles audio-haptic firmware applications with Faust and embeds\ - \ web-based UIs exposing their parameters. We validate our toolchain by example\ - \ applications and modifications of integrated development environments: script-based\ - \ programming examples of haptic firmware applications with our haptic1D Faust\ - \ library, visual programming by mapping input and output signals between audio\ - \ and haptic devices in Webmapper, visual programming with physically-inspired\ - \ mass-interaction models in Synth-a-Modeler Designer. We distribute the documentation\ - \ and source code of ForceHost and all of its components and forks.},\n address\ - \ = {The University of Auckland, New Zealand},\n articleno = {23},\n author =\ - \ {Frisson, Christian and Kirkegaard, Mathias and Pietrzak, Thomas and Wanderley,\ - \ Marcelo M.},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.76cfc96e},\n\ - \ issn = {2220-4806},\n month = {jun},\n pdf = {172.pdf},\n presentation-video\ - \ = {https://youtu.be/smFpkdw-J2w},\n title = {{ForceHost}: an open-source toolchain\ - \ for generating firmware embedding the authoring and rendering of audio and force-feedback\ - \ haptics},\n url = {https://doi.org/10.21428%2F92fbeb44.76cfc96e},\n year = {2022}\n\ - }\n" + ID: dAlessandro2006 + address: 'Paris, France' + author: 'd''Alessandro, Nicolas and d''Alessandro, Christophe and Le Beux, Sylvain + and Doval, Boris' + bibtex: "@inproceedings{dAlessandro2006,\n address = {Paris, France},\n author =\ + \ {d'Alessandro, Nicolas and d'Alessandro, Christophe and Le Beux, Sylvain and\ + \ Doval, Boris},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176863},\n\ + \ issn = {2220-4806},\n keywords = {Singing synthesis, voice source, voice quality,\ + \ spectral model, formant synthesis, instrument, gestural control. },\n pages\ + \ = {266--271},\n title = {Real-time CALM Synthesizer: New Approaches in Hands-Controlled\ + \ Voice Synthesis},\n url = {http://www.nime.org/proceedings/2006/nime2006_266.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.76cfc96e + doi: 10.5281/zenodo.1176863 issn: 2220-4806 - month: jun - pdf: 172.pdf - presentation-video: https://youtu.be/smFpkdw-J2w - title: 'ForceHost: an open-source toolchain for generating firmware embedding the - authoring and rendering of audio and force-feedback haptics' - url: https://doi.org/10.21428%2F92fbeb44.76cfc96e - year: 2022 + keywords: 'Singing synthesis, voice source, voice quality, spectral model, formant + synthesis, instrument, gestural control. ' + pages: 266--271 + title: 'Real-time CALM Synthesizer: New Approaches in Hands-Controlled Voice Synthesis' + url: http://www.nime.org/proceedings/2006/nime2006_266.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_24 - abstract: 'Physical metaphor provides a visceral and universal logical framework - for composing musical gestures. Physical simulations can aid composers in creating - musical gestures based in complex physical metaphors. CHON (Coupled Harmonic Oscillator - Network) is a new crossplatform application for composing musical gestures based - in Newtonian physics. It simulates a network of particles connected by springs - and sonifies the motion of individual particles. CHON is an interactive instrument - that can provide complex yet tangible and physically grounded control data for - synthesis, sound processing, and musical score generation. Composers often deploy - dozens of independent LFOs to control various parameters in a DAW or synthesizer. - By coupling numerous control signals together using physical principles, CHON - represents an innovation on the traditional LFO model of musical control. Unlike - independent LFOs, CHON’s signals push and pull on each other, creating a tangible - causality in the resulting gestures. In this paper, I briefly describe the design - of CHON and discuss its use in composition through examples in my own works.' - address: 'The University of Auckland, New Zealand' - articleno: 24 - author: 'DuPlessis, Rodney' - bibtex: "@inproceedings{NIME22_24,\n abstract = {Physical metaphor provides a visceral\ - \ and universal logical framework for composing musical gestures. Physical simulations\ - \ can aid composers in creating musical gestures based in complex physical metaphors.\ - \ CHON (Coupled Harmonic Oscillator Network) is a new crossplatform application\ - \ for composing musical gestures based in Newtonian physics. It simulates a network\ - \ of particles connected by springs and sonifies the motion of individual particles.\ - \ CHON is an interactive instrument that can provide complex yet tangible and\ - \ physically grounded control data for synthesis, sound processing, and musical\ - \ score generation. Composers often deploy dozens of independent LFOs to control\ - \ various parameters in a DAW or synthesizer. By coupling numerous control signals\ - \ together using physical principles, CHON represents an innovation on the traditional\ - \ LFO model of musical control. Unlike independent LFOs, CHON’s signals push and\ - \ pull on each other, creating a tangible causality in the resulting gestures.\ - \ In this paper, I briefly describe the design of CHON and discuss its use in\ - \ composition through examples in my own works.},\n address = {The University\ - \ of Auckland, New Zealand},\n articleno = {24},\n author = {DuPlessis, Rodney},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.21428/92fbeb44.18aeca0e},\n issn = {2220-4806},\n\ - \ month = {jun},\n pdf = {173.pdf},\n presentation-video = {https://youtu.be/yXr1m6dW5jo},\n\ - \ title = {A virtual instrument for physics-based musical gesture: {CHON}},\n\ - \ url = {https://doi.org/10.21428%2F92fbeb44.18aeca0e},\n year = {2022}\n}\n" + ID: Pritchard2006 + abstract: 'We describe the implementation of an environment for Gesturally-Realized + Audio, Speech and Song Performance (GRASSP), which includes a glove-based interface, + a mapping/training interface, and a collection of Max/MSP/Jitter bpatchers that + allow the user to improvise speech, song, sound synthesis, sound processing, sound + localization, and video processing. The mapping/training interface provides a + framework for performers to specify by example the mapping between gesture and + sound or video controls. We demonstrate the effectiveness of the GRASSP environment + for gestural control of musical expression by creating a gesture-to-voice system + that is currently being used by performers. ' + address: 'Paris, France' + author: 'Pritchard, Bob and Fels, Sidney S.' + bibtex: "@inproceedings{Pritchard2006,\n abstract = {We describe the implementation\ + \ of an environment for Gesturally-Realized Audio, Speech and Song Performance\ + \ (GRASSP), which includes a glove-based interface, a mapping/training interface,\ + \ and a collection of Max/MSP/Jitter bpatchers that allow the user to improvise\ + \ speech, song, sound synthesis, sound processing, sound localization, and video\ + \ processing. The mapping/training interface provides a framework for performers\ + \ to specify by example the mapping between gesture and sound or video controls.\ + \ We demonstrate the effectiveness of the GRASSP environment for gestural control\ + \ of musical expression by creating a gesture-to-voice system that is currently\ + \ being used by performers. },\n address = {Paris, France},\n author = {Pritchard,\ + \ Bob and Fels, Sidney S.},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176987},\n\ + \ issn = {2220-4806},\n keywords = {Speech synthesis, parallel formant speech\ + \ synthesizer, gesture control, Max/MSP, Jitter, Cyberglove, Polhemus, sound diffusion,\ + \ UBC Toolbox, Glove-Talk, },\n pages = {272--276},\n title = {GRASSP: Gesturally-Realized\ + \ Audio, Speech and Song Performance},\n url = {http://www.nime.org/proceedings/2006/nime2006_272.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.18aeca0e + doi: 10.5281/zenodo.1176987 issn: 2220-4806 - month: jun - pdf: 173.pdf - presentation-video: https://youtu.be/yXr1m6dW5jo - title: 'A virtual instrument for physics-based musical gesture: CHON' - url: https://doi.org/10.21428%2F92fbeb44.18aeca0e - year: 2022 + keywords: 'Speech synthesis, parallel formant speech synthesizer, gesture control, + Max/MSP, Jitter, Cyberglove, Polhemus, sound diffusion, UBC Toolbox, Glove-Talk, ' + pages: 272--276 + title: 'GRASSP: Gesturally-Realized Audio, Speech and Song Performance' + url: http://www.nime.org/proceedings/2006/nime2006_272.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_25 - abstract: 'This paper describes the development of CAVI, a coadaptive audiovisual - instrument for collaborative humanmachine improvisation. We created this agent-based - live processing system to explore how a machine can interact musically based on - a human performer’s bodily actions. CAVI utilized a generative deep learning model - that monitored muscle and motion data streamed from a Myo armband worn on the - performer’s forearm. The generated control signals automated layered time-based - effects modules and animated a virtual body representing the artificial agent. - In the final performance, two expert musicians (a guitarist and a drummer) performed - with CAVI. We discuss the outcome of our artistic exploration, present the scientific - methods it was based on, and reflect on developing an interactive system that - is as much an audiovisual composition as an interactive musical instrument.' - address: 'The University of Auckland, New Zealand' - articleno: 25 - author: 'Erdem, Cagri and Wallace, Benedikte and Refsum Jensenius, Alexander' - bibtex: "@inproceedings{NIME22_25,\n abstract = {This paper describes the development\ - \ of CAVI, a coadaptive audiovisual instrument for collaborative humanmachine\ - \ improvisation. We created this agent-based live processing system to explore\ - \ how a machine can interact musically based on a human performer’s bodily actions.\ - \ CAVI utilized a generative deep learning model that monitored muscle and motion\ - \ data streamed from a Myo armband worn on the performer’s forearm. The generated\ - \ control signals automated layered time-based effects modules and animated a\ - \ virtual body representing the artificial agent. In the final performance, two\ - \ expert musicians (a guitarist and a drummer) performed with CAVI. We discuss\ - \ the outcome of our artistic exploration, present the scientific methods it was\ - \ based on, and reflect on developing an interactive system that is as much an\ - \ audiovisual composition as an interactive musical instrument.},\n address =\ - \ {The University of Auckland, New Zealand},\n articleno = {25},\n author = {Erdem,\ - \ Cagri and Wallace, Benedikte and Refsum Jensenius, Alexander},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.21428/92fbeb44.803c24dd},\n issn = {2220-4806},\n month = {jun},\n\ - \ pdf = {176.pdf},\n presentation-video = {https://youtu.be/WO766vmghcQ},\n title\ - \ = {{CAVI}: A Coadaptive Audiovisual Instrument{\\textendash}Composition},\n\ - \ url = {https://doi.org/10.21428%2F92fbeb44.803c24dd},\n year = {2022}\n}\n" + ID: Dobrian2006 + abstract: 'Is there a distinction between New Interfaces for MusicalExpression and + New Interfaces for Controlling Sound? Thisarticle begins with a brief overview + of expression in musicalperformance, and examines some of the characteristics + ofeffective "expressive" computer music instruments. Itbecomes apparent that sophisticated + musical expressionrequires not only a good control interface but also virtuosicmastery + of the instrument it controls. By studying effectiveacoustic instruments, choosing + intuitive but complexgesture-sound mappings that take advantage of establishedinstrumental + skills, designing intelligent characterizationsof performance gestures, and promoting + long-term dedicatedpractice on a new interface, computer music instrumentdesigners + can enhance the expressive quality of computermusic performance.' + address: 'Paris, France' + author: 'Dobrian, Christopher and Koppelman, Daniel' + bibtex: "@inproceedings{Dobrian2006,\n abstract = {Is there a distinction between\ + \ New Interfaces for MusicalExpression and New Interfaces for Controlling Sound?\ + \ Thisarticle begins with a brief overview of expression in musicalperformance,\ + \ and examines some of the characteristics ofeffective \"expressive\" computer\ + \ music instruments. Itbecomes apparent that sophisticated musical expressionrequires\ + \ not only a good control interface but also virtuosicmastery of the instrument\ + \ it controls. By studying effectiveacoustic instruments, choosing intuitive but\ + \ complexgesture-sound mappings that take advantage of establishedinstrumental\ + \ skills, designing intelligent characterizationsof performance gestures, and\ + \ promoting long-term dedicatedpractice on a new interface, computer music instrumentdesigners\ + \ can enhance the expressive quality of computermusic performance.},\n address\ + \ = {Paris, France},\n author = {Dobrian, Christopher and Koppelman, Daniel},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1176893},\n issn = {2220-4806},\n\ + \ keywords = {Expression, instrument design, performance, virtuosity. },\n pages\ + \ = {277--282},\n title = {The E in NIME: Musical Expression with New Computer\ + \ Interfaces},\n url = {http://www.nime.org/proceedings/2006/nime2006_277.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.803c24dd + doi: 10.5281/zenodo.1176893 issn: 2220-4806 - month: jun - pdf: 176.pdf - presentation-video: https://youtu.be/WO766vmghcQ - title: 'CAVI: A Coadaptive Audiovisual Instrument–Composition' - url: https://doi.org/10.21428%2F92fbeb44.803c24dd - year: 2022 + keywords: 'Expression, instrument design, performance, virtuosity. ' + pages: 277--282 + title: 'The E in NIME: Musical Expression with New Computer Interfaces' + url: http://www.nime.org/proceedings/2006/nime2006_277.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_26 - abstract: 'The authors introduce and document how to build the t-Tree, a digital - musical instrument (DMI), interactive music system (IMS), hub, and docking station - that embeds several t-Sticks. The t-Tree’s potential for collaborative performance - as well as an installation is discussed. Specific design choices and inspiration - for the t-Tree are explored. Finally, a prototype is developed and showcased that - attempts to meet the authors’ goals of creating a novel musical experience for - musicians and non-musicians alike, expanding on the premise of the original t-Stick, - and mitigating technical obsolescence of DMIs.' - address: 'The University of Auckland, New Zealand' - articleno: 26 - author: 'Kirby, Linnea and Buser, Paul and Wanderley, Marcelo M.' - bibtex: "@inproceedings{NIME22_26,\n abstract = {The authors introduce and document\ - \ how to build the t-Tree, a digital musical instrument (DMI), interactive music\ - \ system (IMS), hub, and docking station that embeds several t-Sticks. The t-Tree’s\ - \ potential for collaborative performance as well as an installation is discussed.\ - \ Specific design choices and inspiration for the t-Tree are explored. Finally,\ - \ a prototype is developed and showcased that attempts to meet the authors’ goals\ - \ of creating a novel musical experience for musicians and non-musicians alike,\ - \ expanding on the premise of the original t-Stick, and mitigating technical obsolescence\ - \ of DMIs.},\n address = {The University of Auckland, New Zealand},\n articleno\ - \ = {26},\n author = {Kirby, Linnea and Buser, Paul and Wanderley, Marcelo M.},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.21428/92fbeb44.2d00f04f},\n issn = {2220-4806},\n\ - \ month = {jun},\n pdf = {179.pdf},\n presentation-video = {https://youtu.be/gS87Tpg3h_I},\n\ - \ title = {Introducing the t-Tree: Using Multiple t-Sticks for Performance and\ - \ Installation},\n url = {https://doi.org/10.21428%2F92fbeb44.2d00f04f},\n year\ - \ = {2022}\n}\n" + ID: Richards2006 + abstract: 'Why is a seemingly mundane issue such as airline baggageallowance of + great significance in regards to the performancepractice of electronic music? + This paper discusses how aperformance practice has evolved that seeks to question + thebinary and corporate digital world. New ''instruments'' andapproaches have + emerged that explore ''dirty electronics'' and''punktronics'': DIY electronic + instruments made from junk.These instruments are not instruments in the traditionalsense, + defined by physical dimensions or by a set number ofparameters, but modular systems, + constantly evolving, nevercomplete, infinitely variable and designed to be portable. + Acombination of lo- and hi-fi, analogue and digital,synchronous and asynchronous + devices offer new modes ofexpression. The development of these new interfaces + formusical expression run side-by-side with an emerging postdigital aesthetic.' + address: 'Paris, France' + author: 'Richards, John' + bibtex: "@inproceedings{Richards2006,\n abstract = {Why is a seemingly mundane issue\ + \ such as airline baggageallowance of great significance in regards to the performancepractice\ + \ of electronic music? This paper discusses how aperformance practice has evolved\ + \ that seeks to question thebinary and corporate digital world. New 'instruments'\ + \ andapproaches have emerged that explore 'dirty electronics' and'punktronics':\ + \ DIY electronic instruments made from junk.These instruments are not instruments\ + \ in the traditionalsense, defined by physical dimensions or by a set number ofparameters,\ + \ but modular systems, constantly evolving, nevercomplete, infinitely variable\ + \ and designed to be portable. Acombination of lo- and hi-fi, analogue and digital,synchronous\ + \ and asynchronous devices offer new modes ofexpression. The development of these\ + \ new interfaces formusical expression run side-by-side with an emerging postdigital\ + \ aesthetic.},\n address = {Paris, France},\n author = {Richards, John},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1176995},\n issn = {2220-4806},\n keywords\ + \ = {bastardisation,dirty electronics,diy,ebay,live,modular,performance,portability,post-digital,punktronics},\n\ + \ pages = {283--287},\n title = {32kg: Performance Systems for a Post-Digital\ + \ Age},\n url = {http://www.nime.org/proceedings/2006/nime2006_283.pdf},\n year\ + \ = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.2d00f04f + doi: 10.5281/zenodo.1176995 issn: 2220-4806 - month: jun - pdf: 179.pdf - presentation-video: https://youtu.be/gS87Tpg3h_I - title: 'Introducing the t-Tree: Using Multiple t-Sticks for Performance and Installation' - url: https://doi.org/10.21428%2F92fbeb44.2d00f04f - year: 2022 + keywords: 'bastardisation,dirty electronics,diy,ebay,live,modular,performance,portability,post-digital,punktronics' + pages: 283--287 + title: '32kg: Performance Systems for a Post-Digital Age' + url: http://www.nime.org/proceedings/2006/nime2006_283.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_27 - abstract: 'We present an empirical study of designing a NIME for the head-mounted - augmented reality (HMAR) environment. In the NIME community, various sonic applications - have incorporated augmented reality (AR) for sonic experience and audio production. - With this novel digital form, new opportunities for musical expression and interface - are presented. Yet few works consider whether and how the design of the NIME will - be affected given the technology’s affordance. In this paper, we take an autobiographical - design approach to design a NIME in HMAR, exploring what is a genuine application - of AR in a NIMEs and how AR mediates between the performer and sound as a creative - expression. Three interface prototypes are created for a frequency modulation - synthesis system. We report on their design process and our learning and experiences - through self-usage and improvisation. Our designs explore free-hand and embodied - interaction in our interfaces, and we reflect on how these unique qualities of - HMAR contribute to an expressive medium for sonic creation.' - address: 'The University of Auckland, New Zealand' - articleno: 27 - author: 'Wang, Yichen and Martin, Charles' - bibtex: "@inproceedings{NIME22_27,\n abstract = {We present an empirical study of\ - \ designing a NIME for the head-mounted augmented reality (HMAR) environment.\ - \ In the NIME community, various sonic applications have incorporated augmented\ - \ reality (AR) for sonic experience and audio production. With this novel digital\ - \ form, new opportunities for musical expression and interface are presented.\ - \ Yet few works consider whether and how the design of the NIME will be affected\ - \ given the technology’s affordance. In this paper, we take an autobiographical\ - \ design approach to design a NIME in HMAR, exploring what is a genuine application\ - \ of AR in a NIMEs and how AR mediates between the performer and sound as a creative\ - \ expression. Three interface prototypes are created for a frequency modulation\ - \ synthesis system. We report on their design process and our learning and experiences\ - \ through self-usage and improvisation. Our designs explore free-hand and embodied\ - \ interaction in our interfaces, and we reflect on how these unique qualities\ - \ of HMAR contribute to an expressive medium for sonic creation.},\n address =\ - \ {The University of Auckland, New Zealand},\n articleno = {27},\n author = {Wang,\ - \ Yichen and Martin, Charles},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.b540aa59},\n\ - \ issn = {2220-4806},\n month = {jun},\n pdf = {183.pdf},\n presentation-video\ - \ = {https://youtu.be/iOuZqwIwinU},\n title = {Cubing Sound: Designing a {NIME}\ - \ for Head-mounted Augmented Reality},\n url = {https://doi.org/10.21428%2F92fbeb44.b540aa59},\n\ - \ year = {2022}\n}\n" + ID: DeLaubier2006 + address: 'Paris, France' + author: 'de Laubier, Serge and Goudard, Vincent' + bibtex: "@inproceedings{DeLaubier2006,\n address = {Paris, France},\n author = {de\ + \ Laubier, Serge and Goudard, Vincent},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176953},\n\ + \ issn = {2220-4806},\n keywords = {1,audio-graphic portable instrument,ethernet,from\ + \ 1983 to 1988,genesis of the project,on,puce muse studios,r\\'{e}pertoire,we\ + \ worked at the,wifi},\n pages = {288--291},\n title = {Meta-Instrument 3: a Look\ + \ over 17 Years of Practice},\n url = {http://www.nime.org/proceedings/2006/nime2006_288.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.b540aa59 + doi: 10.5281/zenodo.1176953 issn: 2220-4806 - month: jun - pdf: 183.pdf - presentation-video: https://youtu.be/iOuZqwIwinU - title: 'Cubing Sound: Designing a NIME for Head-mounted Augmented Reality' - url: https://doi.org/10.21428%2F92fbeb44.b540aa59 - year: 2022 + keywords: '1,audio-graphic portable instrument,ethernet,from 1983 to 1988,genesis + of the project,on,puce muse studios,r\''{e}pertoire,we worked at the,wifi' + pages: 288--291 + title: 'Meta-Instrument 3: a Look over 17 Years of Practice' + url: http://www.nime.org/proceedings/2006/nime2006_288.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_28 - abstract: 'We describe a new set of affordances for networked live coding performances - in the browser-based environment Gibber, and discuss their implications in the - context of three different performances by three different ensembles at three - universities. Each ensemble possessed differing levels of programming and musical - expertise, leading to different challenges and subsequent extensions to Gibber - to address them. We describe these and additional extensions that came about after - shared reflection on our experiences. While our chosen design contains computational - inefficiencies that pose challenges for larger ensembles, our experiences suggest - that this is a reasonable tradeoff for the low barrier-to-entry that browser-based - environments provide, and that the design in general supports a variety of educational - goals and compositional strategies.' - address: 'The University of Auckland, New Zealand' - articleno: 28 - author: 'Roberts, Charlie and Hattwick, Ian and Sheffield, Eric and Smith, Gillian' - bibtex: "@inproceedings{NIME22_28,\n abstract = {We describe a new set of affordances\ - \ for networked live coding performances in the browser-based environment Gibber,\ - \ and discuss their implications in the context of three different performances\ - \ by three different ensembles at three universities. Each ensemble possessed\ - \ differing levels of programming and musical expertise, leading to different\ - \ challenges and subsequent extensions to Gibber to address them. We describe\ - \ these and additional extensions that came about after shared reflection on our\ - \ experiences. While our chosen design contains computational inefficiencies that\ - \ pose challenges for larger ensembles, our experiences suggest that this is a\ - \ reasonable tradeoff for the low barrier-to-entry that browser-based environments\ - \ provide, and that the design in general supports a variety of educational goals\ - \ and compositional strategies.},\n address = {The University of Auckland, New\ - \ Zealand},\n articleno = {28},\n author = {Roberts, Charlie and Hattwick, Ian\ - \ and Sheffield, Eric and Smith, Gillian},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.38cb7745},\n\ - \ issn = {2220-4806},\n month = {jun},\n pdf = {191.pdf},\n presentation-video\ - \ = {https://youtu.be/BKlHkEAqUOo},\n title = {Rethinking networked collaboration\ - \ in the live coding environment Gibber},\n url = {https://doi.org/10.21428%2F92fbeb44.38cb7745},\n\ - \ year = {2022}\n}\n" + ID: Goto2006 + abstract: 'This paper is intended to introduce the system, which combines "BodySuit" + and "RoboticMusic", as well as its possibilities and its uses in an artistic application. + "BodySuit" refers to a gesture controller in a Data Suit type. "RoboticMusic" + refers to percussion robots, which are appliedto a humanoid robot type. In this + paper, I will discuss their aesthetics and the concept, as well as the idea of + the "Extended Body".' + address: 'Paris, France' + author: 'Goto, Suguru' + bibtex: "@inproceedings{Goto2006,\n abstract = {This paper is intended to introduce\ + \ the system, which combines \"BodySuit\" and \"RoboticMusic\", as well as its\ + \ possibilities and its uses in an artistic application. \"BodySuit\" refers to\ + \ a gesture controller in a Data Suit type. \"RoboticMusic\" refers to percussion\ + \ robots, which are appliedto a humanoid robot type. In this paper, I will discuss\ + \ their aesthetics and the concept, as well as the idea of the \"Extended Body\"\ + .},\n address = {Paris, France},\n author = {Goto, Suguru},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176913},\n issn = {2220-4806},\n keywords = {Robot, Gesture\ + \ Controller, Humanoid Robot, Artificial Intelligence, Interaction },\n pages\ + \ = {292--295},\n title = {The Case Study of An Application of The System, `BodySuit'\ + \ and `RoboticMusic': Its Introduction and Aesthetics},\n url = {http://www.nime.org/proceedings/2006/nime2006_292.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.38cb7745 + doi: 10.5281/zenodo.1176913 issn: 2220-4806 - month: jun - pdf: 191.pdf - presentation-video: https://youtu.be/BKlHkEAqUOo - title: Rethinking networked collaboration in the live coding environment Gibber - url: https://doi.org/10.21428%2F92fbeb44.38cb7745 - year: 2022 + keywords: 'Robot, Gesture Controller, Humanoid Robot, Artificial Intelligence, Interaction ' + pages: 292--295 + title: 'The Case Study of An Application of The System, `BodySuit'' and `RoboticMusic'': + Its Introduction and Aesthetics' + url: http://www.nime.org/proceedings/2006/nime2006_292.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_29 - abstract: 'In the context of immersive sonic interaction, Virtual Reality Musical - Instruments have had the relative majority of attention thus far, fueled by the - increasing availability of affordable technology. Recent advances in Mixed Reality - (MR) experiences have provided the means for a new wave of research that goes - beyond Virtual Reality. In this paper, we explore the taxonomy of Extended Reality - systems, establishing our own notion of MR. From this, we propose a new classification - of Virtual Musical Instrument, known as a Mixed Reality Musical Instrument (MRMI). - We define this system as an embodied interface for expressive musical performance, - characterized by the relationships between the performer, the virtual, and the - physical environment. After a review of existing literature concerning the evaluation - of immersive musical instruments and the affordances of MR systems, we offer a - new framework based on three dimensions to support the design and analysis of - MRMIs. We illustrate its use with application to existing works.' - address: 'The University of Auckland, New Zealand' - articleno: 29 - author: 'Zellerbach, Karitta Christina and Roberts, Charlie' - bibtex: "@inproceedings{NIME22_29,\n abstract = {In the context of immersive sonic\ - \ interaction, Virtual Reality Musical Instruments have had the relative majority\ - \ of attention thus far, fueled by the increasing availability of affordable technology.\ - \ Recent advances in Mixed Reality (MR) experiences have provided the means for\ - \ a new wave of research that goes beyond Virtual Reality. In this paper, we explore\ - \ the taxonomy of Extended Reality systems, establishing our own notion of MR.\ - \ From this, we propose a new classification of Virtual Musical Instrument, known\ - \ as a Mixed Reality Musical Instrument (MRMI). We define this system as an embodied\ - \ interface for expressive musical performance, characterized by the relationships\ - \ between the performer, the virtual, and the physical environment. After a review\ - \ of existing literature concerning the evaluation of immersive musical instruments\ - \ and the affordances of MR systems, we offer a new framework based on three dimensions\ - \ to support the design and analysis of MRMIs. We illustrate its use with application\ - \ to existing works.},\n address = {The University of Auckland, New Zealand},\n\ - \ articleno = {29},\n author = {Zellerbach, Karitta Christina and Roberts, Charlie},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.21428/92fbeb44.b2a44bc9},\n issn = {2220-4806},\n\ - \ month = {jun},\n pdf = {193.pdf},\n presentation-video = {https://youtu.be/Pb4pAr2v4yU},\n\ - \ title = {A Framework for the Design and Analysis of Mixed Reality Musical Instruments},\n\ - \ url = {https://doi.org/10.21428%2F92fbeb44.b2a44bc9},\n year = {2022}\n}\n" + ID: Hindman2006 + address: 'Paris, France' + author: 'Hindman, David' + bibtex: "@inproceedings{Hindman2006,\n address = {Paris, France},\n author = {Hindman,\ + \ David},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1176923},\n issn = {2220-4806},\n\ + \ pages = {296--299},\n title = {Modal Kombat: Competition and Choreography in\ + \ Synesthetic Musical Performance},\n url = {http://www.nime.org/proceedings/2006/nime2006_296.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.b2a44bc9 + doi: 10.5281/zenodo.1176923 issn: 2220-4806 - month: jun - pdf: 193.pdf - presentation-video: https://youtu.be/Pb4pAr2v4yU - title: A Framework for the Design and Analysis of Mixed Reality Musical Instruments - url: https://doi.org/10.21428%2F92fbeb44.b2a44bc9 - year: 2022 + pages: 296--299 + title: 'Modal Kombat: Competition and Choreography in Synesthetic Musical Performance' + url: http://www.nime.org/proceedings/2006/nime2006_296.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_30 - abstract: 'NIME has recently seen critique emerging around colonisation of music - technology, and the need to decolonise digital audio workstations and music software. - While commercial DAWs tend to sideline musical styles outside of western norms - (and even many inside too), viewing this problem through an historical lens of - imperialist legacies misses the influence of a more recent - and often invisible - - hegemony that bears significant direct responsibility: The culture of technological - development. In this paper we focus on the commercial technological development - culture that produces these softwares, to better understand the more latent reasons - why music production software ends up supporting some music practices while failing - others. By using this lens we can more meaningfully separate the influence of - historic cultural colonisation and music tech development culture, in order to - better advocate for and implement meaningful change. We will discuss why the meaning - of the term “decolonisation” should be carefully examined when addressing the - limitations of DAWs, because while larger imperialist legacies continue to have - significant impact on our understanding of culture, this can direct attention - away from the techno-cultural subset of this hegemony that is actively engaged - in making the decisions that shape the software we use. We discuss how the conventions - of this techno-cultural hegemony shape the affordances of major DAWs (and thereby - musical creativity). We also examine specific factors that impact decision making - in developing and evolving typical music software alongside latent social structures, - such as competing commercial demands, how standards are shaped, and the impact - of those standards. Lastly, we suggest that, while we must continue to discuss - the impact of imperialist legacies on the way we make music, understanding the - techno-cultural subset of the colonial hegemony and its motives can create a space - to advocate for conventions in music software that are more widely inclusive.' - address: 'The University of Auckland, New Zealand' - articleno: 30 - author: 'Pardue, Laurel and Bin, S. M. Astrid' - bibtex: "@inproceedings{NIME22_30,\n abstract = {NIME has recently seen critique\ - \ emerging around colonisation of music technology, and the need to decolonise\ - \ digital audio workstations and music software. While commercial DAWs tend to\ - \ sideline musical styles outside of western norms (and even many inside too),\ - \ viewing this problem through an historical lens of imperialist legacies misses\ - \ the influence of a more recent - and often invisible - hegemony that bears significant\ - \ direct responsibility: The culture of technological development. In this paper\ - \ we focus on the commercial technological development culture that produces these\ - \ softwares, to better understand the more latent reasons why music production\ - \ software ends up supporting some music practices while failing others. By using\ - \ this lens we can more meaningfully separate the influence of historic cultural\ - \ colonisation and music tech development culture, in order to better advocate\ - \ for and implement meaningful change. We will discuss why the meaning of the\ - \ term “decolonisation” should be carefully examined when addressing the limitations\ - \ of DAWs, because while larger imperialist legacies continue to have significant\ - \ impact on our understanding of culture, this can direct attention away from\ - \ the techno-cultural subset of this hegemony that is actively engaged in making\ - \ the decisions that shape the software we use. We discuss how the conventions\ - \ of this techno-cultural hegemony shape the affordances of major DAWs (and thereby\ - \ musical creativity). We also examine specific factors that impact decision making\ - \ in developing and evolving typical music software alongside latent social structures,\ - \ such as competing commercial demands, how standards are shaped, and the impact\ - \ of those standards. Lastly, we suggest that, while we must continue to discuss\ - \ the impact of imperialist legacies on the way we make music, understanding the\ - \ techno-cultural subset of the colonial hegemony and its motives can create a\ - \ space to advocate for conventions in music software that are more widely inclusive.},\n\ - \ address = {The University of Auckland, New Zealand},\n articleno = {30},\n author\ - \ = {Pardue, Laurel and Bin, S. M. Astrid},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.21428/92fbeb44.0cc78aeb},\n issn = {2220-4806},\n month = {jun},\n pdf =\ - \ {201.pdf},\n presentation-video = {https://www.youtube.com/watch?v=a53vwOUDh0M},\n\ - \ title = {The Other Hegemony: Effects of software development culture on music\ - \ software, and what we can do about it},\n url = {https://doi.org/10.21428%2F92fbeb44.0cc78aeb},\n\ - \ year = {2022}\n}\n" + ID: Lehrman2006 + address: 'Paris, France' + author: 'Lehrman, Paul D. and Singer, Eric' + bibtex: "@inproceedings{Lehrman2006,\n address = {Paris, France},\n author = {Lehrman,\ + \ Paul D. and Singer, Eric},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176961},\n\ + \ issn = {2220-4806},\n keywords = {Robotics, computer control, MIDI, player pianos,\ + \ mechanical music, percussion, sound effects, Dadaism. },\n pages = {300--303},\n\ + \ title = {A \"Ballet M\\'{e}canique\" for the 21{s}t Century: Performing George\ + \ Antheil's Dadaist Masterpiece with Robots},\n url = {http://www.nime.org/proceedings/2006/nime2006_300.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.0cc78aeb + doi: 10.5281/zenodo.1176961 issn: 2220-4806 - month: jun - pdf: 201.pdf - presentation-video: https://www.youtube.com/watch?v=a53vwOUDh0M - title: 'The Other Hegemony: Effects of software development culture on music software, - and what we can do about it' - url: https://doi.org/10.21428%2F92fbeb44.0cc78aeb - year: 2022 + keywords: 'Robotics, computer control, MIDI, player pianos, mechanical music, percussion, + sound effects, Dadaism. ' + pages: 300--303 + title: 'A "Ballet M\''{e}canique" for the 21{s}t Century: Performing George Antheil''s + Dadaist Masterpiece with Robots' + url: http://www.nime.org/proceedings/2006/nime2006_300.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_31 - abstract: "Latin American (LATAM) contributions to Music Technology date back to\ - \ the early 1940’s. However, as evidenced in historical analyses of NIME, the\ - \ input from LATAM institutions to its proceedings is considerably low, even when\ - \ the conference was recently held in Porto Alegre, Brazil. Reflecting on this\ - \ visible disparity and joining efforts as a group of LATAM researchers, we conducted\ - \ a workshop and distributed a survey with members of the LATAM community with\ - \ the aim of\nsounding out their perspectives on NIME-related practices and the\ - \ prospect of establishing a LATAM NIME Network. Based on our findings we provide\ - \ a contemporary contextual overview of the activities happening in\nLATAM and\ - \ the particular challenges that practitioners face emerging from their socio-political\ - \ reality. We also offer LATAM perspectives on critical epistemological issues\ - \ that affect the NIME community as a whole, contributing to a pluriversal view\ - \ on these matters, and to the embracement of multiple realities and ways of doing\ - \ things." - address: 'The University of Auckland, New Zealand' - articleno: 31 - author: 'Martinez Avila, Juan Pablo and Tragtenberg, Joāo and Calegario, Filipe - and Alarcon, Ximena and Cadavid Hinojosa, Laddy Patricia and Corintha, Isabela - and Dannemann, Teodoro and Jaimovich, Javier and Marquez-Borbon, Adnan and Lerner, - Martin Matus and Ortiz, Miguel and Ramos, Juan and Solís García, Hugo' - bibtex: "@inproceedings{NIME22_31,\n abstract = {Latin American (LATAM) contributions\ - \ to Music Technology date back to the early 1940’s. However, as evidenced in\ - \ historical analyses of NIME, the input from LATAM institutions to its proceedings\ - \ is considerably low, even when the conference was recently held in Porto Alegre,\ - \ Brazil. Reflecting on this visible disparity and joining efforts as a group\ - \ of LATAM researchers, we conducted a workshop and distributed a survey with\ - \ members of the LATAM community with the aim of\nsounding out their perspectives\ - \ on NIME-related practices and the prospect of establishing a LATAM NIME Network.\ - \ Based on our findings we provide a contemporary contextual overview of the activities\ - \ happening in\nLATAM and the particular challenges that practitioners face emerging\ - \ from their socio-political reality. We also offer LATAM perspectives on critical\ - \ epistemological issues that affect the NIME community as a whole, contributing\ - \ to a pluriversal view on these matters, and to the embracement of multiple realities\ - \ and ways of doing things.},\n address = {The University of Auckland, New Zealand},\n\ - \ articleno = {31},\n author = {Martinez Avila, Juan Pablo and Tragtenberg, Jo{\\\ - =a}o and Calegario, Filipe and Alarcon, Ximena and Cadavid Hinojosa, Laddy Patricia\ - \ and Corintha, Isabela and Dannemann, Teodoro and Jaimovich, Javier and Marquez-Borbon,\ - \ Adnan and Lerner, Martin Matus and Ortiz, Miguel and Ramos, Juan and Sol{\\\ - '{i}}s Garc{\\'{i}}a, Hugo},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.b7a7ba4f},\n\ - \ issn = {2220-4806},\n month = {jun},\n pdf = {21.pdf},\n presentation-video\ - \ = {https://youtu.be/dCxkrqrbM-M},\n title = {Being (A)part of NIME: Embracing\ - \ Latin American Perspectives},\n url = {https://doi.org/10.21428/92fbeb44.b7a7ba4f},\n\ - \ year = {2022}\n}\n" + ID: Lemouton2006 + abstract: This paper deals with the first musical usage of anexperimental system + dedicated to the optical detection ofthe position of a trombone's slide. + address: 'Paris, France' + author: 'Lemouton, Serge and Stroppa, Marco and Sluchin, Benny' + bibtex: "@inproceedings{Lemouton2006,\n abstract = {This paper deals with the first\ + \ musical usage of anexperimental system dedicated to the optical detection ofthe\ + \ position of a trombone's slide.},\n address = {Paris, France},\n author = {Lemouton,\ + \ Serge and Stroppa, Marco and Sluchin, Benny},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1176963},\n issn = {2220-4806},\n keywords = {augmented instrument,chamber\ + \ electronics,computer,interaction,musical motivation,performer,trombone},\n pages\ + \ = {304--307},\n title = {Using the Augmented Trombone in \"I will not kiss your\ + \ f.ing flag\"},\n url = {http://www.nime.org/proceedings/2006/nime2006_304.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.b7a7ba4f + doi: 10.5281/zenodo.1176963 issn: 2220-4806 - month: jun - pdf: 21.pdf - presentation-video: https://youtu.be/dCxkrqrbM-M - title: 'Being (A)part of NIME: Embracing Latin American Perspectives' - url: https://doi.org/10.21428/92fbeb44.b7a7ba4f - year: 2022 + keywords: 'augmented instrument,chamber electronics,computer,interaction,musical + motivation,performer,trombone' + pages: 304--307 + title: Using the Augmented Trombone in "I will not kiss your f.ing flag" + url: http://www.nime.org/proceedings/2006/nime2006_304.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_32 - abstract: 'This article provides a lens for viewing technology as land, transformed - through resource extraction, manufacturing, distribution, disassembly and waste. - This lens is applied to processes of artistic creation with technology, exploring - ways of fostering personal and informed relationships with that technology. The - goal of these explorations will be to inspire a greater awareness of the colonial - and capitalist processes that shape the technology we use and the land and people - it is in relationship with. Beyond simply identifying the influence of these colonial - and capitalist processes, the article will also provide creative responses (alterations - to a creative process with technology) which seek to address these colonial processes - in a sensitive and critical way. This will be done not to answer the broad question - of ‘how do we decolonise art making with technology?’, but to break that question - apart into prompts or potential pathways for decolonising.' - address: 'The University of Auckland, New Zealand' - articleno: 32 - author: 'Argabrite, Zak and Murphy, Jim and Norman, Sally Jane and Carnegie, Dale' - bibtex: "@inproceedings{NIME22_32,\n abstract = {This article provides a lens for\ - \ viewing technology as land, transformed through resource extraction, manufacturing,\ - \ distribution, disassembly and waste. This lens is applied to processes of artistic\ - \ creation with technology, exploring ways of fostering personal and informed\ - \ relationships with that technology. The goal of these explorations will be to\ - \ inspire a greater awareness of the colonial and capitalist processes that shape\ - \ the technology we use and the land and people it is in relationship with. Beyond\ - \ simply identifying the influence of these colonial and capitalist processes,\ - \ the article will also provide creative responses (alterations to a creative\ - \ process with technology) which seek to address these colonial processes in a\ - \ sensitive and critical way. This will be done not to answer the broad question\ - \ of ‘how do we decolonise art making with technology?’, but to break that question\ - \ apart into prompts or potential pathways for decolonising.},\n address = {The\ - \ University of Auckland, New Zealand},\n articleno = {32},\n author = {Argabrite,\ - \ Zak and Murphy, Jim and Norman, Sally Jane and Carnegie, Dale},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.21428/92fbeb44.68f7c268},\n issn = {2220-4806},\n month\ - \ = {jun},\n pdf = {222.pdf},\n presentation-video = {https://youtu.be/JZTmiIByYN4},\n\ - \ title = {Technology is Land: Strategies towards decolonisation of technology\ - \ in artmaking},\n url = {https://doi.org/10.21428%2F92fbeb44.68f7c268},\n year\ - \ = {2022}\n}\n" + ID: Schiesser2006 + address: 'Paris, France' + author: 'Schiesser, S\''{e}bastien and Traube, Caroline' + bibtex: "@inproceedings{Schiesser2006,\n address = {Paris, France},\n author = {Schiesser,\ + \ S\\'{e}bastien and Traube, Caroline},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177001},\n\ + \ issn = {2220-4806},\n keywords = {saxophone, augmented instrument, live electronics,\ + \ perfor- mance, gestural control },\n pages = {308--313},\n title = {On Making\ + \ and Playing an Electronically-augmented Saxophone},\n url = {http://www.nime.org/proceedings/2006/nime2006_308.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.68f7c268 + doi: 10.5281/zenodo.1177001 issn: 2220-4806 - month: jun - pdf: 222.pdf - presentation-video: https://youtu.be/JZTmiIByYN4 - title: 'Technology is Land: Strategies towards decolonisation of technology in artmaking' - url: https://doi.org/10.21428%2F92fbeb44.68f7c268 - year: 2022 + keywords: 'saxophone, augmented instrument, live electronics, perfor- mance, gestural + control ' + pages: 308--313 + title: On Making and Playing an Electronically-augmented Saxophone + url: http://www.nime.org/proceedings/2006/nime2006_308.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_33 - abstract: 'The following paper presents L2Ork Tweeter, a new control-data-driven - free and open source crowdsourced telematic musicking platform and a new interface - for musical expression that deterministically addresses three of the greatest - challenges associated with the telematic music medium, that of latency, sync, - and bandwidth. Motivated by the COVID-19 pandemic, Tweeter’s introduction in April - 2020 has ensured uninterrupted operation of Virginia Tech’s Linux Laptop Orchestra - (L2Ork), resulting in 6 international performances over the past 18 months. In - addition to enabling tightly-timed sync between clients, it also uniquely supports - all stages of NIME-centric telematic musicking, from collaborative instrument - design and instruction, to improvisation, composition, rehearsal, and performance, - including audience participation. Tweeter is also envisioned as a prototype for - the crowdsourced approach to telematic musicking. Below, the paper delves deeper - into motivation, constraints, design and implementation, and the observed impact - as an applied instance of a proposed paradigmshift in telematic musicking and - its newfound identity fueled by the live crowdsourced telematic music genre.' - address: 'The University of Auckland, New Zealand' - articleno: 33 - author: 'Bukvic, Ivica' - bibtex: "@inproceedings{NIME22_33,\n abstract = {The following paper presents L2Ork\ - \ Tweeter, a new control-data-driven free and open source crowdsourced telematic\ - \ musicking platform and a new interface for musical expression that deterministically\ - \ addresses three of the greatest challenges associated with the telematic music\ - \ medium, that of latency, sync, and bandwidth. Motivated by the COVID-19 pandemic,\ - \ Tweeter’s introduction in April 2020 has ensured uninterrupted operation of\ - \ Virginia Tech’s Linux Laptop Orchestra (L2Ork), resulting in 6 international\ - \ performances over the past 18 months. In addition to enabling tightly-timed\ - \ sync between clients, it also uniquely supports all stages of NIME-centric telematic\ - \ musicking, from collaborative instrument design and instruction, to improvisation,\ - \ composition, rehearsal, and performance, including audience participation. Tweeter\ - \ is also envisioned as a prototype for the crowdsourced approach to telematic\ - \ musicking. Below, the paper delves deeper into motivation, constraints, design\ - \ and implementation, and the observed impact as an applied instance of a proposed\ - \ paradigmshift in telematic musicking and its newfound identity fueled by the\ - \ live crowdsourced telematic music genre.},\n address = {The University of Auckland,\ - \ New Zealand},\n articleno = {33},\n author = {Bukvic, Ivica},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.21428/92fbeb44.a0a8d914},\n issn = {2220-4806},\n month = {jun},\n\ - \ pdf = {26.pdf},\n presentation-video = {https://youtu.be/5pawphncSmg},\n title\ - \ = {Latency-, Sync-, and Bandwidth-Agnostic Tightly-Timed Telematic and Crowdsourced\ - \ Musicking Made Possible Using L2Ork Tweeter},\n url = {https://doi.org/10.21428%2F92fbeb44.a0a8d914},\n\ - \ year = {2022}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.21428/92fbeb44.a0a8d914 - issn: 2220-4806 - month: jun - pdf: 26.pdf - presentation-video: https://youtu.be/5pawphncSmg - title: 'Latency-, Sync-, and Bandwidth-Agnostic Tightly-Timed Telematic and Crowdsourced - Musicking Made Possible Using L2Ork Tweeter' - url: https://doi.org/10.21428%2F92fbeb44.a0a8d914 - year: 2022 + ID: Smyth2006 + address: 'Paris, France' + author: 'Smyth, Tamara' + bibtex: "@inproceedings{Smyth2006,\n address = {Paris, France},\n author = {Smyth,\ + \ Tamara},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1177007},\n issn = {2220-4806},\n\ + \ keywords = {khaen, sound synthesis control, mapping, musical acoustics },\n\ + \ pages = {314--317},\n title = {Handheld Acoustic Filter Bank for Musical Control},\n\ + \ url = {http://www.nime.org/proceedings/2006/nime2006_314.pdf},\n year = {2006}\n\ + }\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1177007 + issn: 2220-4806 + keywords: 'khaen, sound synthesis control, mapping, musical acoustics ' + pages: 314--317 + title: Handheld Acoustic Filter Bank for Musical Control + url: http://www.nime.org/proceedings/2006/nime2006_314.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_34 - abstract: 'In this paper we propose a Spatial Augmented Reality interface for actuated - acoustic instruments with active vibration control. We adopt a performance-led - research approach to design augmentations throughout multiple residences. The - resulting system enables two musicians to improvise with four augmented instruments - through virtual shapes distributed in their peripheral space: two 12-string guitars - and 1 drum kit actuated with surface speakers and a trumpet attached to an air - compressor. Using ethnographic methods, we document the evolution of the augmentations - and conduct a thematic analysis to shine a light on the collaborative and iterative - design process. In particular, we provide insights on the opportunities brought - by Spatial AR and on the role of improvisation.' - address: 'The University of Auckland, New Zealand' - articleno: 34 - author: 'Arslan, Cagan and Berthaut, Florent and Beuchey, Anthony and Cambourian, - Paul and Paté, Arthur' - bibtex: "@inproceedings{NIME22_34,\n abstract = {In this paper we propose a Spatial\ - \ Augmented Reality interface for actuated acoustic instruments with active vibration\ - \ control. We adopt a performance-led research approach to design augmentations\ - \ throughout multiple residences. The resulting system enables two musicians to\ - \ improvise with four augmented instruments through virtual shapes distributed\ - \ in their peripheral space: two 12-string guitars and 1 drum kit actuated with\ - \ surface speakers and a trumpet attached to an air compressor. Using ethnographic\ - \ methods, we document the evolution of the augmentations and conduct a thematic\ - \ analysis to shine a light on the collaborative and iterative design process.\ - \ In particular, we provide insights on the opportunities brought by Spatial AR\ - \ and on the role of improvisation.},\n address = {The University of Auckland,\ - \ New Zealand},\n articleno = {34},\n author = {Arslan, Cagan and Berthaut, Florent\ - \ and Beuchey, Anthony and Cambourian, Paul and Pat{\\'{e}}, Arthur},\n booktitle\ + ID: Nixdorf2006 + abstract: 'In this paper we will report on the use of real-time soundspatialization + in Challenging Bodies, a trans-disciplinaryperformance project at the University + of Regina. Usingwell-understood spatialization techniques mapped to a custom interface, + a computer system was built that allowedlive spatial control of ten sound signals + from on-stage performers. This spatial control added a unique dynamic element + to an already ultramodern performance. The systemis described in detail, including + the main advantages overexisting spatialization systems: simplicity, usability, + customization and scalability' + address: 'Paris, France' + author: 'Nixdorf, Joshua J. and Gerhard, David' + bibtex: "@inproceedings{Nixdorf2006,\n abstract = {In this paper we will report\ + \ on the use of real-time soundspatialization in Challenging Bodies, a trans-disciplinaryperformance\ + \ project at the University of Regina. Usingwell-understood spatialization techniques\ + \ mapped to a custom interface, a computer system was built that allowedlive spatial\ + \ control of ten sound signals from on-stage performers. This spatial control\ + \ added a unique dynamic element to an already ultramodern performance. The systemis\ + \ described in detail, including the main advantages overexisting spatialization\ + \ systems: simplicity, usability, customization and scalability},\n address =\ + \ {Paris, France},\n author = {Nixdorf, Joshua J. and Gerhard, David},\n booktitle\ \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.21428/92fbeb44.c28dd323},\n issn = {2220-4806},\n month\ - \ = {jun},\n pdf = {30.pdf},\n presentation-video = {https://youtu.be/oxMrv3R6jK0},\n\ - \ title = {Vibrating shapes : Design and evolution of a spatial augmented reality\ - \ interface for actuated instruments},\n url = {https://doi.org/10.21428%2F92fbeb44.c28dd323},\n\ - \ year = {2022}\n}\n" + \ Expression},\n doi = {10.5281/zenodo.1176981},\n issn = {2220-4806},\n keywords\ + \ = {gem,live systems,pd,performance sys-,real-time systems,sound architecture,sound\ + \ localization,sound spatialization,surround sound,tems},\n pages = {318--321},\n\ + \ title = {Real-time Sound Source Spatialization as Used in Challenging Bodies:\ + \ Implementation and Performance},\n url = {http://www.nime.org/proceedings/2006/nime2006_318.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.c28dd323 + doi: 10.5281/zenodo.1176981 issn: 2220-4806 - month: jun - pdf: 30.pdf - presentation-video: https://youtu.be/oxMrv3R6jK0 - title: 'Vibrating shapes : Design and evolution of a spatial augmented reality interface - for actuated instruments' - url: https://doi.org/10.21428%2F92fbeb44.c28dd323 - year: 2022 + keywords: 'gem,live systems,pd,performance sys-,real-time systems,sound architecture,sound + localization,sound spatialization,surround sound,tems' + pages: 318--321 + title: 'Real-time Sound Source Spatialization as Used in Challenging Bodies: Implementation + and Performance' + url: http://www.nime.org/proceedings/2006/nime2006_318.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_35 - abstract: 'Digital Musical Instruments (DMIs) offer new opportunities for collaboration, - such as exchanging sounds or sharing controls between musicians. However, in the - context of spontaneous and heterogeneous orchestras, such as jam sessions, collective - music-making may become challenging due to the diversity and complexity of the - DMIs and the musicians’ unfamiliarity with the others’ instruments. In particular, - the potential lack of visibility into each musician’s respective contribution - to the sound they hear, i.e. who is playing what, might impede their capacity - to play together. In this paper, we propose to augment each instrument in a digital - orchestra with visual feedback extracted in real-time from the instrument’s activity, - in order to increase this awareness. We present the results of a user study in - which we investigate the influence of visualisation level and situational visibility - during short improvisations by groups of three musicians. Our results suggest - that internal visualisations of all instruments displayed close to each musician’s - instrument provide the best awareness.' - address: 'The University of Auckland, New Zealand' - articleno: 35 - author: 'Berthaut, Florent and Dahl, Luke' - bibtex: "@inproceedings{NIME22_35,\n abstract = {Digital Musical Instruments (DMIs)\ - \ offer new opportunities for collaboration, such as exchanging sounds or sharing\ - \ controls between musicians. However, in the context of spontaneous and heterogeneous\ - \ orchestras, such as jam sessions, collective music-making may become challenging\ - \ due to the diversity and complexity of the DMIs and the musicians’ unfamiliarity\ - \ with the others’ instruments. In particular, the potential lack of visibility\ - \ into each musician’s respective contribution to the sound they hear, i.e. who\ - \ is playing what, might impede their capacity to play together. In this paper,\ - \ we propose to augment each instrument in a digital orchestra with visual feedback\ - \ extracted in real-time from the instrument’s activity, in order to increase\ - \ this awareness. We present the results of a user study in which we investigate\ - \ the influence of visualisation level and situational visibility during short\ - \ improvisations by groups of three musicians. Our results suggest that internal\ - \ visualisations of all instruments displayed close to each musician’s instrument\ - \ provide the best awareness.},\n address = {The University of Auckland, New Zealand},\n\ - \ articleno = {35},\n author = {Berthaut, Florent and Dahl, Luke},\n booktitle\ + ID: Bottoni2006 + address: 'Paris, France' + author: 'Bottoni, Paolo and Faralli, Stefano and Labella, Anna and Pierro, Mario' + bibtex: "@inproceedings{Bottoni2006,\n address = {Paris, France},\n author = {Bottoni,\ + \ Paolo and Faralli, Stefano and Labella, Anna and Pierro, Mario},\n booktitle\ \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.21428/92fbeb44.9d974714},\n issn = {2220-4806},\n month\ - \ = {jun},\n pdf = {31.pdf},\n presentation-video = {https://www.youtube.com/watch?v=903cs_oFfwo},\n\ - \ title = {The Effect of Visualisation Level and Situational Visibility in Co-located\ - \ Digital Musical Ensembles},\n url = {https://doi.org/10.21428%2F92fbeb44.9d974714},\n\ - \ year = {2022}\n}\n" + \ Expression},\n doi = {10.5281/zenodo.1176879},\n issn = {2220-4806},\n keywords\ + \ = {mapping, planning, agent, Max/MSP },\n pages = {322--325},\n title = {Mapping\ + \ with Planning Agents in the Max/MSP Environment: the GO/Max Language},\n url\ + \ = {http://www.nime.org/proceedings/2006/nime2006_322.pdf},\n year = {2006}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.9d974714 + doi: 10.5281/zenodo.1176879 issn: 2220-4806 - month: jun - pdf: 31.pdf - presentation-video: https://www.youtube.com/watch?v=903cs_oFfwo - title: The Effect of Visualisation Level and Situational Visibility in Co-located - Digital Musical Ensembles - url: https://doi.org/10.21428%2F92fbeb44.9d974714 - year: 2022 + keywords: 'mapping, planning, agent, Max/MSP ' + pages: 322--325 + title: 'Mapping with Planning Agents in the Max/MSP Environment: the GO/Max Language' + url: http://www.nime.org/proceedings/2006/nime2006_322.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_36 - abstract: 'The management of the musical structures and the awareness of the performer’s - processes during a performance are two important aspects of live coding improvisations. - To support these aspects, we developed and evaluated two systems, Time_X and Time_Z, - for visualizing the musical form during live coding. Time_X allows visualizing - an entire performance, while Time_Z provides a detailed overview of the last improvised - musical events. Following an autobiographical approach, the two systems have been - used in five sessions by the first author of this paper, who created a diary about - the experience. These diaries have been analyzed to understand the two systems - individually and compare them. We finally discuss the main benefits related to - the practical use of these systems, and possible use scenarios.' - address: 'The University of Auckland, New Zealand' - articleno: 36 - author: 'Rì, Francesco Ardan Dal and Masu, Raul' - bibtex: "@inproceedings{NIME22_36,\n abstract = {The management of the musical structures\ - \ and the awareness of the performer’s processes during a performance are two\ - \ important aspects of live coding improvisations. To support these aspects, we\ - \ developed and evaluated two systems, Time_X and Time_Z, for visualizing the\ - \ musical form during live coding. Time_X allows visualizing an entire performance,\ - \ while Time_Z provides a detailed overview of the last improvised musical events.\ - \ Following an autobiographical approach, the two systems have been used in five\ - \ sessions by the first author of this paper, who created a diary about the experience.\ - \ These diaries have been analyzed to understand the two systems individually\ - \ and compare them. We finally discuss the main benefits related to the practical\ - \ use of these systems, and possible use scenarios.},\n address = {The University\ - \ of Auckland, New Zealand},\n articleno = {36},\n author = {R{\\`i}, Francesco\ - \ Ardan Dal and Masu, Raul},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.828b6114},\n\ - \ issn = {2220-4806},\n month = {jun},\n pdf = {32.pdf},\n presentation-video\ - \ = {https://www.youtube.com/watch?v=r-cxEXjnDzg},\n title = {Exploring Musical\ - \ Form: Digital Scores to Support Live Coding Practice},\n url = {https://doi.org/10.21428%2F92fbeb44.828b6114},\n\ - \ year = {2022}\n}\n" + ID: Bonardi2006 + abstract: 'In this article, we present the first step of our research work todesign + a Virtual Assistant for Performers and Stage Directors,able to give a feedback + from performances. We use amethodology to automatically construct fuzzy rules + in a FuzzyRule-Based System that detects contextual emotions from anactor''s performance + during a show. We collect video data from a lot of performances of the sameshow + from which it should be possible to visualize all the emotions and intents or + more precisely "intent graphs". To perform this, the collected data defining low-level + descriptors are aggregated and converted into high-level characterizations. Then, + depending on the retrieved data and on their distributionon the axis, we partition + the universes into classes. The last stepis the building of the fuzzy rules that + are obtained from the classes and that permit to give conclusions to label the + detected emotions.' + address: 'Paris, France' + author: 'Bonardi, Alain and Truck, Isis and Akdag, Herman' + bibtex: "@inproceedings{Bonardi2006,\n abstract = {In this article, we present the\ + \ first step of our research work todesign a Virtual Assistant for Performers\ + \ and Stage Directors,able to give a feedback from performances. We use amethodology\ + \ to automatically construct fuzzy rules in a FuzzyRule-Based System that detects\ + \ contextual emotions from anactor's performance during a show. We collect video\ + \ data from a lot of performances of the sameshow from which it should be possible\ + \ to visualize all the emotions and intents or more precisely \"intent graphs\"\ + . To perform this, the collected data defining low-level descriptors are aggregated\ + \ and converted into high-level characterizations. Then, depending on the retrieved\ + \ data and on their distributionon the axis, we partition the universes into classes.\ + \ The last stepis the building of the fuzzy rules that are obtained from the classes\ + \ and that permit to give conclusions to label the detected emotions.},\n address\ + \ = {Paris, France},\n author = {Bonardi, Alain and Truck, Isis and Akdag, Herman},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1176875},\n issn = {2220-4806},\n\ + \ keywords = {Virtual Assistant, Intents, Emotion detector, Fuzzy Classes, Stage\ + \ Director, Performance. },\n pages = {326--329},\n title = {Towards a Virtual\ + \ Assistant for Performers and Stage Directors},\n url = {http://www.nime.org/proceedings/2006/nime2006_326.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.828b6114 + doi: 10.5281/zenodo.1176875 issn: 2220-4806 - month: jun - pdf: 32.pdf - presentation-video: https://www.youtube.com/watch?v=r-cxEXjnDzg - title: 'Exploring Musical Form: Digital Scores to Support Live Coding Practice' - url: https://doi.org/10.21428%2F92fbeb44.828b6114 - year: 2022 + keywords: 'Virtual Assistant, Intents, Emotion detector, Fuzzy Classes, Stage Director, + Performance. ' + pages: 326--329 + title: Towards a Virtual Assistant for Performers and Stage Directors + url: http://www.nime.org/proceedings/2006/nime2006_326.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_37 - abstract: 'In this paper, we discuss our ongoing work to leverage virtual reality - and digital fabrication to investigate sensory mappings across the visual, auditory, - and haptic modalities in VR, and how such mappings can affect musical expression - in this medium. Specifically, we introduce a custom adapter for the Oculus Touch - controller that allows it to be augmented with physical parts that can be tracked, - visualized, and sonified in VR. This way, a VR instrument can be made to have - a physical manifestation that facilitates additional forms of tactile feedback - besides those offered by the Touch controller, enabling new forms of musical interaction. - We then discuss a case study, where we use the adapter to implement a new VR instrument - that integrates the repelling force between neodymium magnets into the controllers. - This allows us to imbue the virtual instrument, which is inherently devoid of - tactility, with haptic feedback—-an essential affordance of many musical instruments.' - address: 'The University of Auckland, New Zealand' - articleno: 37 - author: 'Çamci, Anil and Granzow, John' - bibtex: "@inproceedings{NIME22_37,\n abstract = {In this paper, we discuss our ongoing\ - \ work to leverage virtual reality and digital fabrication to investigate sensory\ - \ mappings across the visual, auditory, and haptic modalities in VR, and how such\ - \ mappings can affect musical expression in this medium. Specifically, we introduce\ - \ a custom adapter for the Oculus Touch controller that allows it to be augmented\ - \ with physical parts that can be tracked, visualized, and sonified in VR. This\ - \ way, a VR instrument can be made to have a physical manifestation that facilitates\ - \ additional forms of tactile feedback besides those offered by the Touch controller,\ - \ enabling new forms of musical interaction. We then discuss a case study, where\ - \ we use the adapter to implement a new VR instrument that integrates the repelling\ - \ force between neodymium magnets into the controllers. This allows us to imbue\ - \ the virtual instrument, which is inherently devoid of tactility, with haptic\ - \ feedback—-an essential affordance of many musical instruments.},\n address =\ - \ {The University of Auckland, New Zealand},\n articleno = {37},\n author = {{\\\ - c{C}}amci, Anil and Granzow, John},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.a26a4014},\n\ - \ issn = {2220-4806},\n month = {jun},\n pdf = {33.pdf},\n presentation-video\ - \ = {https://youtu.be/fnoQOO4rz4M},\n title = {Augmented Touch: A Mounting Adapter\ - \ for Oculus Touch Controllers that Enables New Hyperreal Instruments},\n url\ - \ = {https://doi.org/10.21428%2F92fbeb44.a26a4014},\n year = {2022}\n}\n" + ID: Nagashima2006 + abstract: 'This is a studio report of researches and projects in SUAC(Shizuoka University + of Art and Culture). SUAC was foundedin April 2000, and organized NIME04 as you + know. SUAC has "Faculty of Design" and "Department of Art and Science" and all + students study interactive systems and media arts.SUAC has organized Media Art + Festival (MAF) from 2001 to2005. Domestic/overseas artists participated in SUAC + MAF,and SUAC students'' projects also joined and exhibited theirworks in MAF. + I will introduce the production cases withinteractive media-installations by SUAC + students'' projectsfrom the aspect experiences with novel interfaces ineducation + and entertainment and reports on students projectsin the framework of NIME related + courses.' + address: 'Paris, France' + author: 'Nagashima, Yoichi' + bibtex: "@inproceedings{Nagashima2006,\n abstract = {This is a studio report of\ + \ researches and projects in SUAC(Shizuoka University of Art and Culture). SUAC\ + \ was foundedin April 2000, and organized NIME04 as you know. SUAC has \"Faculty\ + \ of Design\" and \"Department of Art and Science\" and all students study interactive\ + \ systems and media arts.SUAC has organized Media Art Festival (MAF) from 2001\ + \ to2005. Domestic/overseas artists participated in SUAC MAF,and SUAC students'\ + \ projects also joined and exhibited theirworks in MAF. I will introduce the production\ + \ cases withinteractive media-installations by SUAC students' projectsfrom the\ + \ aspect experiences with novel interfaces ineducation and entertainment and reports\ + \ on students projectsin the framework of NIME related courses.},\n address =\ + \ {Paris, France},\n author = {Nagashima, Yoichi},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176977},\n issn = {2220-4806},\n keywords = {Interactive\ + \ Installation, Sensors, Media Arts, Studio Reports },\n pages = {330--333},\n\ + \ title = {Students' Projects of Interactive Media-installations in SUAC},\n url\ + \ = {http://www.nime.org/proceedings/2006/nime2006_330.pdf},\n year = {2006}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.a26a4014 + doi: 10.5281/zenodo.1176977 issn: 2220-4806 - month: jun - pdf: 33.pdf - presentation-video: https://youtu.be/fnoQOO4rz4M - title: 'Augmented Touch: A Mounting Adapter for Oculus Touch Controllers that Enables - New Hyperreal Instruments' - url: https://doi.org/10.21428%2F92fbeb44.a26a4014 - year: 2022 + keywords: 'Interactive Installation, Sensors, Media Arts, Studio Reports ' + pages: 330--333 + title: Students' Projects of Interactive Media-installations in SUAC + url: http://www.nime.org/proceedings/2006/nime2006_330.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_38 - abstract: 'Automated processes in musical instruments can serve to free a performer - from the physical and mental constraints of music performance, allowing them to - expressively control more aspects of music simultaneously. Modular synthesis has - been a prominent platform for exploring automation through the use of sequencers - and has therefore fostered a tradition of user interface design utilizing increasingly - complex abstraction methods. We investigate the history of sequencer design from - this perspective and introduce machine learning as a potential source for a new - type of intelligent abstraction. We then offer a case study based on this approach - and present Latent Drummer, which is a prototype system dedicated to integrating - machine learning-based interface abstractions into the tradition of sequencers - for modular synthesis.' - address: 'The University of Auckland, New Zealand' - articleno: 38 - author: 'Warren, Nick and Çamci, Anil' - bibtex: "@inproceedings{NIME22_38,\n abstract = {Automated processes in musical\ - \ instruments can serve to free a performer from the physical and mental constraints\ - \ of music performance, allowing them to expressively control more aspects of\ - \ music simultaneously. Modular synthesis has been a prominent platform for exploring\ - \ automation through the use of sequencers and has therefore fostered a tradition\ - \ of user interface design utilizing increasingly complex abstraction methods.\ - \ We investigate the history of sequencer design from this perspective and introduce\ - \ machine learning as a potential source for a new type of intelligent abstraction.\ - \ We then offer a case study based on this approach and present Latent Drummer,\ - \ which is a prototype system dedicated to integrating machine learning-based\ - \ interface abstractions into the tradition of sequencers for modular synthesis.},\n\ - \ address = {The University of Auckland, New Zealand},\n articleno = {38},\n author\ - \ = {Warren, Nick and {\\c{C}}amci, Anil},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.ed873363},\n\ - \ issn = {2220-4806},\n month = {jun},\n pdf = {34.pdf},\n presentation-video\ - \ = {https://www.youtube.com/watch?v=Hr6B5dIhMVo},\n title = {Latent Drummer:\ - \ A New Abstraction for Modular Sequencers},\n url = {https://doi.org/10.21428%2F92fbeb44.ed873363},\n\ - \ year = {2022}\n}\n" + ID: Breinbjerg2006 + abstract: 'In this paper we describe the intentions, the design and functionality + of an Acousmatic Composition Environment that allows children or musical novices + to educate their auditory curiosity by recording, manipulating and mixing sounds + of everyday life. The environment consists of three stands: A stand for sound + recording with a soundproof box that ensure good recording facilities in a noisy + environment; a stand for sound manipulation with five simple, tangible interfaces; + a stand for sound mixing with a graphical computer interface presented on two + touch screens. ' + address: 'Paris, France' + author: 'Breinbjerg, Morten and Caprani, Ole and Lunding, Rasmus and Kramhoft, Line' + bibtex: "@inproceedings{Breinbjerg2006,\n abstract = {In this paper we describe\ + \ the intentions, the design and functionality of an Acousmatic Composition Environment\ + \ that allows children or musical novices to educate their auditory curiosity\ + \ by recording, manipulating and mixing sounds of everyday life. The environment\ + \ consists of three stands: A stand for sound recording with a soundproof box\ + \ that ensure good recording facilities in a noisy environment; a stand for sound\ + \ manipulation with five simple, tangible interfaces; a stand for sound mixing\ + \ with a graphical computer interface presented on two touch screens. },\n address\ + \ = {Paris, France},\n author = {Breinbjerg, Morten and Caprani, Ole and Lunding,\ + \ Rasmus and Kramhoft, Line},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176883},\n\ + \ issn = {2220-4806},\n keywords = {Acousmatic listening, aesthetics, tangible\ + \ interfaces. },\n pages = {334--337},\n title = {An Acousmatic Composition Environment},\n\ + \ url = {http://www.nime.org/proceedings/2006/nime2006_334.pdf},\n year = {2006}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.ed873363 + doi: 10.5281/zenodo.1176883 issn: 2220-4806 - month: jun - pdf: 34.pdf - presentation-video: https://www.youtube.com/watch?v=Hr6B5dIhMVo - title: 'Latent Drummer: A New Abstraction for Modular Sequencers' - url: https://doi.org/10.21428%2F92fbeb44.ed873363 - year: 2022 + keywords: 'Acousmatic listening, aesthetics, tangible interfaces. ' + pages: 334--337 + title: An Acousmatic Composition Environment + url: http://www.nime.org/proceedings/2006/nime2006_334.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_39 - abstract: 'We present an AI-empowered music tutor with a systematic curriculum design. - The tutoring system fully utilizes the interactivity space in the auditory, visual, - and haptic modalities, supporting seven haptic feedback modes and four visual - feedback modes. The combinations of those modes form different cross-modal tasks - of varying difficulties, allowing the curriculum to apply the “scaffolding then - fading” educational technique to foster active learning and amortize cognitive - load. We study the effect of multimodal instructions, guidance, and feedback using - a qualitative pilot study with two subjects over ~11 hours of training with our - tutoring system. The study reveals valuable insights about the music learning - process and points towards new features and learning modes for the next prototype.' - address: 'The University of Auckland, New Zealand' - articleno: 39 - author: 'Chin, Daniel and Xia, Gus' - bibtex: "@inproceedings{NIME22_39,\n abstract = {We present an AI-empowered music\ - \ tutor with a systematic curriculum design. The tutoring system fully utilizes\ - \ the interactivity space in the auditory, visual, and haptic modalities, supporting\ - \ seven haptic feedback modes and four visual feedback modes. The combinations\ - \ of those modes form different cross-modal tasks of varying difficulties, allowing\ - \ the curriculum to apply the “scaffolding then fading” educational technique\ - \ to foster active learning and amortize cognitive load. We study the effect of\ - \ multimodal instructions, guidance, and feedback using a qualitative pilot study\ - \ with two subjects over ~11 hours of training with our tutoring system. The study\ - \ reveals valuable insights about the music learning process and points towards\ - \ new features and learning modes for the next prototype.},\n address = {The University\ - \ of Auckland, New Zealand},\n articleno = {39},\n author = {Chin, Daniel and\ - \ Xia, Gus},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.c6910363},\n\ - \ issn = {2220-4806},\n month = {jun},\n pdf = {39.pdf},\n presentation-video\ - \ = {https://youtu.be/DifOKvH1ErQ},\n title = {A Computer-aided Multimodal Music\ - \ Learning System with Curriculum: A Pilot Study},\n url = {https://doi.org/10.21428%2F92fbeb44.c6910363},\n\ - \ year = {2022}\n}\n" + ID: Hamilton2006 + address: 'Paris, France' + author: 'Hamilton, Robert' + bibtex: "@inproceedings{Hamilton2006,\n address = {Paris, France},\n author = {Hamilton,\ + \ Robert},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1176919},\n issn = {2220-4806},\n\ + \ keywords = {Bioinformatics, composition, real-time score generation. },\n pages\ + \ = {338--341},\n title = {Bioinformatic Feedback: Performer Bio-data as a Driver\ + \ for Real-time Composition},\n url = {http://www.nime.org/proceedings/2006/nime2006_338.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.c6910363 + doi: 10.5281/zenodo.1176919 issn: 2220-4806 - month: jun - pdf: 39.pdf - presentation-video: https://youtu.be/DifOKvH1ErQ - title: 'A Computer-aided Multimodal Music Learning System with Curriculum: A Pilot - Study' - url: https://doi.org/10.21428%2F92fbeb44.c6910363 - year: 2022 + keywords: 'Bioinformatics, composition, real-time score generation. ' + pages: 338--341 + title: 'Bioinformatic Feedback: Performer Bio-data as a Driver for Real-time Composition' + url: http://www.nime.org/proceedings/2006/nime2006_338.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_40 - abstract: 'Movement-sound interactive systems are at the interface of different - artistic and educational practices. Within this multiplicity of uses, we examine - common denominators in terms of learning, appropriation and relationship to technological - systems. While these topics have been previously reported at NIME, we wanted to - investigate how practitioners, coming from different perspectives, relate to these - questions. We conducted interviews with 6 artists who are engaged in movement-sound - interactions: 1 performer, 1 performer/composer, 1 composer, 1 teacher/composer, - 1 dancer/teacher, 1 dancer. Through a thematic analysis of the transcripts we - identified three main themes related to (1) the mediating role of technological - tools (2) usability and normativity, and (3) learning and practice. These results - provide ground for discussion about the design and study of movementsound interactive - systems.' - address: 'The University of Auckland, New Zealand' - articleno: 40 - author: 'Paredes, Victor and Françoise, Jules and Bevilacqua, Frederic' - bibtex: "@inproceedings{NIME22_40,\n abstract = {Movement-sound interactive systems\ - \ are at the interface of different artistic and educational practices. Within\ - \ this multiplicity of uses, we examine common denominators in terms of learning,\ - \ appropriation and relationship to technological systems. While these topics\ - \ have been previously reported at NIME, we wanted to investigate how practitioners,\ - \ coming from different perspectives, relate to these questions. We conducted\ - \ interviews with 6 artists who are engaged in movement-sound interactions: 1\ - \ performer, 1 performer/composer, 1 composer, 1 teacher/composer, 1 dancer/teacher,\ - \ 1 dancer. Through a thematic analysis of the transcripts we identified three\ - \ main themes related to (1) the mediating role of technological tools (2) usability\ - \ and normativity, and (3) learning and practice. These results provide ground\ - \ for discussion about the design and study of movementsound interactive systems.},\n\ - \ address = {The University of Auckland, New Zealand},\n articleno = {40},\n author\ - \ = {Paredes, Victor and Fran{\\c{c}}oise, Jules and Bevilacqua, Frederic},\n\ + ID: Pak2006 + address: 'Paris, France' + author: 'Pak, Jonathan' + bibtex: "@inproceedings{Pak2006,\n address = {Paris, France},\n author = {Pak, Jonathan},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.21428/92fbeb44.5b9ac5ba},\n issn = {2220-4806},\n\ - \ month = {jun},\n pdf = {42.pdf},\n presentation-video = {https://youtu.be/n6DZE7TdEeI},\n\ - \ title = {Entangling Practice with Artistic and Educational Aims: Interviews\ - \ on Technology-based Movement-Sound Interactions},\n url = {https://doi.org/10.21428%2F92fbeb44.5b9ac5ba},\n\ - \ year = {2022}\n}\n" + \ Musical Expression},\n doi = {10.5281/zenodo.1176983},\n issn = {2220-4806},\n\ + \ pages = {342--345},\n title = {The Light Matrix: An Interface for Musical Expression\ + \ and Performance},\n url = {http://www.nime.org/proceedings/2006/nime2006_342.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.5b9ac5ba + doi: 10.5281/zenodo.1176983 issn: 2220-4806 - month: jun - pdf: 42.pdf - presentation-video: https://youtu.be/n6DZE7TdEeI - title: 'Entangling Practice with Artistic and Educational Aims: Interviews on Technology-based - Movement-Sound Interactions' - url: https://doi.org/10.21428%2F92fbeb44.5b9ac5ba - year: 2022 + pages: 342--345 + title: 'The Light Matrix: An Interface for Musical Expression and Performance' + url: http://www.nime.org/proceedings/2006/nime2006_342.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_41 - abstract: 'The Web MIDI API allows the Web browser to interact with hardware and - software MIDI devices detected at the operating system level. This ability for - the browser to interface with most electronic instruments made in the past 30 - years offers significant opportunities to preserve, enhance or re-discover a rich - musical and technical heritage. By including MIDI in the broaderWeb ecosystem, - this API also opens endless possibilities to create music in a networked and socially - engaging way. However, the Web MIDI API specification only offers low-level access - to MIDI devices and messages. For instance, it does not provide semantics on top - of the raw numerical messages exchanged between devices. This is likely to deter - novice programmers and significantly slow down experienced programmers. After - reviewing the usability of the bare Web MIDI API, the WEBMIDI. js JavaScript library - was created to alleviate this situation. By decoding raw MIDI messages, encapsulating - complicated processes and providing semantically significant objects, properties, - methods and events, the library makes it easier to interface with MIDI devices - from compatible browsers. This paper first looks at the context in which the specification - was created and then discusses the usability improvements layered on top of the - API by the opensource WEBMIDI.js library.' - address: 'The University of Auckland, New Zealand' - articleno: 41 - author: 'Côté, Jean-Philippe' - bibtex: "@inproceedings{NIME22_41,\n abstract = {The Web MIDI API allows the Web\ - \ browser to interact with hardware and software MIDI devices detected at the\ - \ operating system level. This ability for the browser to interface with most\ - \ electronic instruments made in the past 30 years offers significant opportunities\ - \ to preserve, enhance or re-discover a rich musical and technical heritage. By\ - \ including MIDI in the broaderWeb ecosystem, this API also opens endless possibilities\ - \ to create music in a networked and socially engaging way. However, the Web MIDI\ - \ API specification only offers low-level access to MIDI devices and messages.\ - \ For instance, it does not provide semantics on top of the raw numerical messages\ - \ exchanged between devices. This is likely to deter novice programmers and significantly\ - \ slow down experienced programmers. After reviewing the usability of the bare\ - \ Web MIDI API, the WEBMIDI. js JavaScript library was created to alleviate this\ - \ situation. By decoding raw MIDI messages, encapsulating complicated processes\ - \ and providing semantically significant objects, properties, methods and events,\ - \ the library makes it easier to interface with MIDI devices from compatible browsers.\ - \ This paper first looks at the context in which the specification was created\ - \ and then discusses the usability improvements layered on top of the API by the\ - \ opensource WEBMIDI.js library.},\n address = {The University of Auckland, New\ - \ Zealand},\n articleno = {41},\n author = {C{\\^o}t{\\'e}, Jean-Philippe},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.21428/92fbeb44.388e4764},\n issn = {2220-4806},\n\ - \ month = {jun},\n pdf = {43.pdf},\n presentation-video = {https://youtu.be/jMzjpUJO860},\n\ - \ title = {User-Friendly {MIDI} in the Web Browser},\n url = {https://doi.org/10.21428%2F92fbeb44.388e4764},\n\ - \ year = {2022}\n}\n" + ID: Kobayashi2006 + address: 'Paris, France' + author: 'Kobayashi, Shigeru and Endo, Takanori and Harada, Katsuhiko and Oishi, + Shosei' + bibtex: "@inproceedings{Kobayashi2006,\n address = {Paris, France},\n author = {Kobayashi,\ + \ Shigeru and Endo, Takanori and Harada, Katsuhiko and Oishi, Shosei},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1176945},\n issn = {2220-4806},\n keywords\ + \ = {learning,rapid prototyping,reconfigurable,sensor interface},\n pages = {346--351},\n\ + \ title = {GAINER: A Reconfigurable {I/O} Module and Software Libraries for Education},\n\ + \ url = {http://www.nime.org/proceedings/2006/nime2006_346.pdf},\n year = {2006}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.388e4764 + doi: 10.5281/zenodo.1176945 issn: 2220-4806 - month: jun - pdf: 43.pdf - presentation-video: https://youtu.be/jMzjpUJO860 - title: User-Friendly MIDI in the Web Browser - url: https://doi.org/10.21428%2F92fbeb44.388e4764 - year: 2022 + keywords: 'learning,rapid prototyping,reconfigurable,sensor interface' + pages: 346--351 + title: 'GAINER: A Reconfigurable {I/O} Module and Software Libraries for Education' + url: http://www.nime.org/proceedings/2006/nime2006_346.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_42 - abstract: 'In the search for better designs, one tool is to specify the design problem - such that globally optimal solutions can be found. I present a design process - using this approach, its strengths and limitations, and its results in the form - of four pitch fingering systems that are ergonomic, simple, and symmetric. In - hindsight, I emphasize the subjectivity of the design process, despite its reliance - on objective quantitative assessment.' - address: 'The University of Auckland, New Zealand' - articleno: 42 - author: 'West, Travis' - bibtex: "@inproceedings{NIME22_42,\n abstract = {In the search for better designs,\ - \ one tool is to specify the design problem such that globally optimal solutions\ - \ can be found. I present a design process using this approach, its strengths\ - \ and limitations, and its results in the form of four pitch fingering systems\ - \ that are ergonomic, simple, and symmetric. In hindsight, I emphasize the subjectivity\ - \ of the design process, despite its reliance on objective quantitative assessment.},\n\ - \ address = {The University of Auckland, New Zealand},\n articleno = {42},\n author\ - \ = {West, Travis},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.d6c9dcae},\n\ - \ issn = {2220-4806},\n month = {jun},\n pdf = {53.pdf},\n presentation-video\ - \ = {https://youtu.be/4QB3sNRmK1E},\n title = {Pitch Fingering Systems and the\ - \ Search for Perfection},\n url = {https://doi.org/10.21428%2F92fbeb44.d6c9dcae},\n\ - \ year = {2022}\n}\n" + ID: Beilharz2006 + abstract: 'Hyper-shaku (Border-Crossing) is an interactive sensor environment that + uses motion sensors to trigger immediate responses and generative processes augmenting + the Japanese bamboo shakuhachi in both the auditory and visual domain. The latter + differentiates this process from many hyper-instruments by building a performance + of visual design as well as electronic music on top of the acoustic performance. + It utilizes a combination of computer vision and wireless sensing technologies + conflated from preceding works. This paper outlines the use of gesture in these + preparatory sound and audio-visual performative, installation and sonification + works, leading to a description of the Hyper-shaku environment integrating sonification + and generative elements. ' + address: 'Paris, France' + author: 'Beilharz, Kirsty and Jakovich, Joanne and Ferguson, Sam' + bibtex: "@inproceedings{Beilharz2006,\n abstract = {Hyper-shaku (Border-Crossing)\ + \ is an interactive sensor environment that uses motion sensors to trigger immediate\ + \ responses and generative processes augmenting the Japanese bamboo shakuhachi\ + \ in both the auditory and visual domain. The latter differentiates this process\ + \ from many hyper-instruments by building a performance of visual design as well\ + \ as electronic music on top of the acoustic performance. It utilizes a combination\ + \ of computer vision and wireless sensing technologies conflated from preceding\ + \ works. This paper outlines the use of gesture in these preparatory sound and\ + \ audio-visual performative, installation and sonification works, leading to a\ + \ description of the Hyper-shaku environment integrating sonification and generative\ + \ elements. },\n address = {Paris, France},\n author = {Beilharz, Kirsty and Jakovich,\ + \ Joanne and Ferguson, Sam},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176867},\n\ + \ issn = {2220-4806},\n keywords = {Gesture-controllers, sonification, hyper-instrument\ + \ },\n pages = {352--357},\n title = {Hyper-shaku (Border-crossing): Towards the\ + \ Multi-modal Gesture-controlled Hyper-Instrument},\n url = {http://www.nime.org/proceedings/2006/nime2006_352.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.d6c9dcae + doi: 10.5281/zenodo.1176867 issn: 2220-4806 - month: jun - pdf: 53.pdf - presentation-video: https://youtu.be/4QB3sNRmK1E - title: Pitch Fingering Systems and the Search for Perfection - url: https://doi.org/10.21428%2F92fbeb44.d6c9dcae - year: 2022 + keywords: 'Gesture-controllers, sonification, hyper-instrument ' + pages: 352--357 + title: 'Hyper-shaku (Border-crossing): Towards the Multi-modal Gesture-controlled + Hyper-Instrument' + url: http://www.nime.org/proceedings/2006/nime2006_352.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_43 - abstract: 'The mubone (lowercase “m”) is a family of instruments descended from - the trombone family, a conceptual design space for trombone augmentations, and - a growing musical practice rooted in this design space and the artistic affordances - that emerge from it. We present the design of the mubone and discuss our initial - implementations. We then reflect on the beginnings of an artistic practice: playing - mubone, as well as exploring how the instrument adapts to diverse creative contexts. - We discuss mappings, musical exercises, and the development of Garcia, a sound-and-movement - composition for mubone.' - address: 'The University of Auckland, New Zealand' - articleno: 43 - author: 'West, Travis and Leung, Kalun' - bibtex: "@inproceedings{NIME22_43,\n abstract = {The mubone (lowercase “m”) is a\ - \ family of instruments descended from the trombone family, a conceptual design\ - \ space for trombone augmentations, and a growing musical practice rooted in this\ - \ design space and the artistic affordances that emerge from it. We present the\ - \ design of the mubone and discuss our initial implementations. We then reflect\ - \ on the beginnings of an artistic practice: playing mubone, as well as exploring\ - \ how the instrument adapts to diverse creative contexts. We discuss mappings,\ - \ musical exercises, and the development of Garcia, a sound-and-movement composition\ - \ for mubone.},\n address = {The University of Auckland, New Zealand},\n articleno\ - \ = {43},\n author = {West, Travis and Leung, Kalun},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.21428/92fbeb44.e56a93c9},\n issn = {2220-4806},\n month = {jun},\n\ - \ pdf = {54.pdf},\n presentation-video = {https://youtu.be/B51eofO4f4Y},\n title\ - \ = {early prototypes and artistic practice with the mubone},\n url = {https://doi.org/10.21428%2F92fbeb44.e56a93c9},\n\ - \ year = {2022}\n}\n" + ID: Farwell2006 + abstract: 'Three electro-acoustic systems were devised for a newtrombone work, Rouse. + This paper presents the technicalsystems and outlines their musical context and + motivation. TheuSlide measures trombone slide-extension by a minimalhardware ultrasonic + technique. An easy calibration proceduremaps linear extension to the slide "positions" + of the player. TheeMouth is a driver that replaces the mouthpiece, with softwareemulation + of trombone tone and algorithmic musical lines,allowing the trombone to appear + to play itself. The eMute isbuilt around a loudspeaker unit, driven so that it + affects stronglythe player''s embouchure, allowing fine control of complex beatpatterns. + eMouth and eMute, under control of the uSlide, set upimprovisatory worlds that + are part of the composed architectureof Rouse.' + address: 'Paris, France' + author: 'Farwell, Neal' + bibtex: "@inproceedings{Farwell2006,\n abstract = {Three electro-acoustic systems\ + \ were devised for a newtrombone work, Rouse. This paper presents the technicalsystems\ + \ and outlines their musical context and motivation. TheuSlide measures trombone\ + \ slide-extension by a minimalhardware ultrasonic technique. An easy calibration\ + \ proceduremaps linear extension to the slide \"positions\" of the player. TheeMouth\ + \ is a driver that replaces the mouthpiece, with softwareemulation of trombone\ + \ tone and algorithmic musical lines,allowing the trombone to appear to play itself.\ + \ The eMute isbuilt around a loudspeaker unit, driven so that it affects stronglythe\ + \ player's embouchure, allowing fine control of complex beatpatterns. eMouth and\ + \ eMute, under control of the uSlide, set upimprovisatory worlds that are part\ + \ of the composed architectureof Rouse.},\n address = {Paris, France},\n author\ + \ = {Farwell, Neal},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176895},\n\ + \ issn = {2220-4806},\n keywords = {composition,electro-acoustic adaptation,emulation,illusion,improvisation,mapping,mute,trombone,ultrasonic},\n\ + \ pages = {358--363},\n title = {Adapting the Trombone: a Suite of Electro-acoustic\ + \ Interventions for the Piece},\n url = {http://www.nime.org/proceedings/2006/nime2006_358.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.e56a93c9 + doi: 10.5281/zenodo.1176895 issn: 2220-4806 - month: jun - pdf: 54.pdf - presentation-video: https://youtu.be/B51eofO4f4Y - title: early prototypes and artistic practice with the mubone - url: https://doi.org/10.21428%2F92fbeb44.e56a93c9 - year: 2022 + keywords: 'composition,electro-acoustic adaptation,emulation,illusion,improvisation,mapping,mute,trombone,ultrasonic' + pages: 358--363 + title: 'Adapting the Trombone: a Suite of Electro-acoustic Interventions for the + Piece' + url: http://www.nime.org/proceedings/2006/nime2006_358.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_44 - abstract: 'The study of extended reality musical instruments is a burgeoning topic - in the field of new interfaces for musical expression. We developed a mixed reality - musical interface (MRMI) as a technology probe to inspire design for experienced - musicians. We namely explore (i) the ergonomics of the interface in relation to - musical expression and (ii) user-adaptive hand pose recognition as gestural control. - The MRMI probe was experienced by 10 musician participants (mean age: 25.6 years - [SD=3.0], 6 females, 4 males). We conducted a user evaluation comprising three - stages. After an experimentation period, participants were asked to accompany - a pre-recorded piece of music. In a post-task stage, participants took part in - semi-structured interviews, which were subjected to thematic analysis. Prevalent - themes included reducing the size of the interface, issues with the field of view - of the device and physical strain from playing. Participants were largely in favour - of hand poses as expressive control, although this depended on customisation and - temporal dynamics; the use of interactive machine learning (IML) for user-adaptive - hand pose recognition was well received by participants.' - address: 'The University of Auckland, New Zealand' - articleno: 44 - author: 'Graf, Max and Barthet, Mathieu' - bibtex: "@inproceedings{NIME22_44,\n abstract = {The study of extended reality musical\ - \ instruments is a burgeoning topic in the field of new interfaces for musical\ - \ expression. We developed a mixed reality musical interface (MRMI) as a technology\ - \ probe to inspire design for experienced musicians. We namely explore (i) the\ - \ ergonomics of the interface in relation to musical expression and (ii) user-adaptive\ - \ hand pose recognition as gestural control. The MRMI probe was experienced by\ - \ 10 musician participants (mean age: 25.6 years [SD=3.0], 6 females, 4 males).\ - \ We conducted a user evaluation comprising three stages. After an experimentation\ - \ period, participants were asked to accompany a pre-recorded piece of music.\ - \ In a post-task stage, participants took part in semi-structured interviews,\ - \ which were subjected to thematic analysis. Prevalent themes included reducing\ - \ the size of the interface, issues with the field of view of the device and physical\ - \ strain from playing. Participants were largely in favour of hand poses as expressive\ - \ control, although this depended on customisation and temporal dynamics; the\ - \ use of interactive machine learning (IML) for user-adaptive hand pose recognition\ - \ was well received by participants.},\n address = {The University of Auckland,\ - \ New Zealand},\n articleno = {44},\n author = {Graf, Max and Barthet, Mathieu},\n\ + ID: MakiPatola2006 + address: 'Paris, France' + author: 'Maki-Patola, Teemu and H\''''{a}m\''''{a}l\''''{a}inen, Perttu and Kanerva, + Aki' + bibtex: "@inproceedings{MakiPatola2006,\n address = {Paris, France},\n author =\ + \ {Maki-Patola, Teemu and H\\''{a}m\\''{a}l\\''{a}inen, Perttu and Kanerva, Aki},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.21428/92fbeb44.56ba9b93},\n issn = {2220-4806},\n\ - \ month = {jun},\n pdf = {59.pdf},\n presentation-video = {https://youtu.be/qhE5X3rAWgg},\n\ - \ title = {Mixed Reality Musical Interface: Exploring Ergonomics and Adaptive\ - \ Hand Pose Recognition for Gestural Control},\n url = {https://doi.org/10.21428%2F92fbeb44.56ba9b93},\n\ - \ year = {2022}\n}\n" + \ Musical Expression},\n doi = {10.5281/zenodo.1176971},\n issn = {2220-4806},\n\ + \ keywords = {1,2,2 9,3897,39,425,43,7,8,9},\n pages = {364--369},\n title = {The\ + \ Augmented Djembe Drum --- Sculpting Rhythms},\n url = {http://www.nime.org/proceedings/2006/nime2006_364.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.56ba9b93 + doi: 10.5281/zenodo.1176971 issn: 2220-4806 - month: jun - pdf: 59.pdf - presentation-video: https://youtu.be/qhE5X3rAWgg - title: 'Mixed Reality Musical Interface: Exploring Ergonomics and Adaptive Hand - Pose Recognition for Gestural Control' - url: https://doi.org/10.21428%2F92fbeb44.56ba9b93 - year: 2022 + keywords: '1,2,2 9,3897,39,425,43,7,8,9' + pages: 364--369 + title: The Augmented Djembe Drum --- Sculpting Rhythms + url: http://www.nime.org/proceedings/2006/nime2006_364.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_45 - abstract: 'Active participation of Deaf individuals in the design and performance - of artistic practice benefits increasing collaboration potentials between Deaf - and hearing individuals. In this research, we present co-design sessions with - a Deaf dancer and a hearing musician to explore how they can influence each other’s - expressive explorations. We also study vibrotactile wearable interface designs - to better support the Deaf dancer’s perception of sound and music. We report our - findings and observations on the co-design process over four workshops and one - performance and public demonstration session. We detail the design and implementation - of the wearable vibrotactile listening garment and participants’ selfreported - experiences. This interface provides participants with more embodied listening - opportunities and felt experiences of sound and music. All participants reported - that the listening experience highlighted their first-person experience, focusing - on their bodies, "regardless of an observer". These findings show how we can improve - both such an internal experience of the listener and the collaboration potential - between performers for increased inclusion. Overall, this paper addresses two - different modalities of haptic feedback, the participation of Deaf users in wearable - haptics design as well as music-movement performance practice, and artistic co-creation - beyond technology development.' - address: 'The University of Auckland, New Zealand' - articleno: 45 - author: 'Cavdir, Doga' - bibtex: "@inproceedings{NIME22_45,\n abstract = {Active participation of Deaf individuals\ - \ in the design and performance of artistic practice benefits increasing collaboration\ - \ potentials between Deaf and hearing individuals. In this research, we present\ - \ co-design sessions with a Deaf dancer and a hearing musician to explore how\ - \ they can influence each other’s expressive explorations. We also study vibrotactile\ - \ wearable interface designs to better support the Deaf dancer’s perception of\ - \ sound and music. We report our findings and observations on the co-design process\ - \ over four workshops and one performance and public demonstration session. We\ - \ detail the design and implementation of the wearable vibrotactile listening\ - \ garment and participants’ selfreported experiences. This interface provides\ - \ participants with more embodied listening opportunities and felt experiences\ - \ of sound and music. All participants reported that the listening experience\ - \ highlighted their first-person experience, focusing on their bodies, \"regardless\ - \ of an observer\". These findings show how we can improve both such an internal\ - \ experience of the listener and the collaboration potential between performers\ - \ for increased inclusion. Overall, this paper addresses two different modalities\ - \ of haptic feedback, the participation of Deaf users in wearable haptics design\ - \ as well as music-movement performance practice, and artistic co-creation beyond\ - \ technology development.},\n address = {The University of Auckland, New Zealand},\n\ - \ articleno = {45},\n author = {Cavdir, Doga},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.21428/92fbeb44.b24043e8},\n issn = {2220-4806},\n month = {jun},\n pdf =\ - \ {64.pdf},\n presentation-video = {https://youtu.be/tuSo2Sq7jy4},\n title = {Touch,\ - \ Listen, (Re)Act: Co-designing Vibrotactile Wearable Instruments for Deaf and\ - \ Hard of Hearing},\n url = {https://doi.org/10.21428%2F92fbeb44.b24043e8},\n\ - \ year = {2022}\n}\n" + ID: Favilla2006 + address: 'Paris, France' + author: 'Favilla, Stuart and Cannon, Joanne' + bibtex: "@inproceedings{Favilla2006,\n address = {Paris, France},\n author = {Favilla,\ + \ Stuart and Cannon, Joanne},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176897},\n\ + \ issn = {2220-4806},\n pages = {370--375},\n title = {Children of Grainger: Leather\ + \ Instruments for Free Music},\n url = {http://www.nime.org/proceedings/2006/nime2006_370.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.b24043e8 + doi: 10.5281/zenodo.1176897 issn: 2220-4806 - month: jun - pdf: 64.pdf - presentation-video: https://youtu.be/tuSo2Sq7jy4 - title: 'Touch, Listen, (Re)Act: Co-designing Vibrotactile Wearable Instruments for - Deaf and Hard of Hearing' - url: https://doi.org/10.21428%2F92fbeb44.b24043e8 - year: 2022 + pages: 370--375 + title: 'Children of Grainger: Leather Instruments for Free Music' + url: http://www.nime.org/proceedings/2006/nime2006_370.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_46 - abstract: 'While the value of new digital musical instruments lies to a large extent - in their music-making capacity, analyses of new instruments in the research literature - often focus on analyses of gesture or performer experience rather than the content - of the music made with the instrument. In this paper we present a motivic analysis - of music made with new instruments. In the context of music, a motive is a small, - analysable musical fragment or phrase that is important in or characteristic of - a composition. We outline our method for identifying and analysing motives in - music made with new instruments, and display its use in a case study in which - 10 musicians created performances with a new large-scale digital musical instrument - that we designed. This research illustrates the value of a musicological approach - to NIME research, suggesting the need for a broader conversation about a musicology - of NIME performances, as distinct from its instruments.' - address: 'The University of Auckland, New Zealand' - articleno: 46 - author: 'Mice, Lia and McPherson, Andrew' - bibtex: "@inproceedings{NIME22_46,\n abstract = {While the value of new digital\ - \ musical instruments lies to a large extent in their music-making capacity, analyses\ - \ of new instruments in the research literature often focus on analyses of gesture\ - \ or performer experience rather than the content of the music made with the instrument.\ - \ In this paper we present a motivic analysis of music made with new instruments.\ - \ In the context of music, a motive is a small, analysable musical fragment or\ - \ phrase that is important in or characteristic of a composition. We outline our\ - \ method for identifying and analysing motives in music made with new instruments,\ - \ and display its use in a case study in which 10 musicians created performances\ - \ with a new large-scale digital musical instrument that we designed. This research\ - \ illustrates the value of a musicological approach to NIME research, suggesting\ - \ the need for a broader conversation about a musicology of NIME performances,\ - \ as distinct from its instruments.},\n address = {The University of Auckland,\ - \ New Zealand},\n articleno = {46},\n author = {Mice, Lia and McPherson, Andrew},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.21428/92fbeb44.8c1c9817},\n issn = {2220-4806},\n\ - \ month = {jun},\n pdf = {65.pdf},\n presentation-video = {https://youtu.be/nXrRJGt11J4},\n\ - \ title = {The M in {NIME}: Motivic analysis and the case for a musicology of\ - \ {NIME} performances},\n url = {https://doi.org/10.21428%2F92fbeb44.8c1c9817},\n\ - \ year = {2022}\n}\n" + ID: Hsu2006 + abstract: 'This paper describes recent enhancements in an interactive system designed + to improvise with saxophonist John Butcher [1]. In addition to musical parameters + such as pitch and loudness, our system is able to analyze timbral characteristics + of the saxophone tone in real-time, and use timbral information to guide the generation + of response material. We capture each saxophone gesture on the fly, extract a + set of gestural and timbral contours, and store them in a repository. Improvising + agents can consult the repository when generating responses. The gestural or timbral + progression of a saxophone phrase can be remapped or transformed; this enables + a variety of response material that also references audible contours of the original + saxophone gestures. A single simple framework is used to manage gestural and timbral + information extracted from analysis, and for expressive control of virtual instruments + in a free improvisation context. ' + address: 'Paris, France' + author: 'Hsu, William' + bibtex: "@inproceedings{Hsu2006,\n abstract = {This paper describes recent enhancements\ + \ in an interactive system designed to improvise with saxophonist John Butcher\ + \ [1]. In addition to musical parameters such as pitch and loudness, our system\ + \ is able to analyze timbral characteristics of the saxophone tone in real-time,\ + \ and use timbral information to guide the generation of response material. We\ + \ capture each saxophone gesture on the fly, extract a set of gestural and timbral\ + \ contours, and store them in a repository. Improvising agents can consult the\ + \ repository when generating responses. The gestural or timbral progression of\ + \ a saxophone phrase can be remapped or transformed; this enables a variety of\ + \ response material that also references audible contours of the original saxophone\ + \ gestures. A single simple framework is used to manage gestural and timbral information\ + \ extracted from analysis, and for expressive control of virtual instruments in\ + \ a free improvisation context. },\n address = {Paris, France},\n author = {Hsu,\ + \ William},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176927},\n issn\ + \ = {2220-4806},\n keywords = {Interactive music systems, timbre analysis, instrument\ + \ control. },\n pages = {376--379},\n title = {Managing Gesture and Timbre for\ + \ Analysis and Instrument Control in an Interactive Environment},\n url = {http://www.nime.org/proceedings/2006/nime2006_376.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.8c1c9817 + doi: 10.5281/zenodo.1176927 issn: 2220-4806 - month: jun - pdf: 65.pdf - presentation-video: https://youtu.be/nXrRJGt11J4 - title: 'The M in NIME: Motivic analysis and the case for a musicology of NIME performances' - url: https://doi.org/10.21428%2F92fbeb44.8c1c9817 - year: 2022 + keywords: 'Interactive music systems, timbre analysis, instrument control. ' + pages: 376--379 + title: Managing Gesture and Timbre for Analysis and Instrument Control in an Interactive + Environment + url: http://www.nime.org/proceedings/2006/nime2006_376.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_47 - abstract: 'While it is accepted that accessible digital musical instruments (ADMIs) - should be created with the involvement of targeted communities, participatory - design (PD) is an unsettled practice that gets defined variously, loosely or not - at all. In this paper, we explore the concept of dialogic design and provide a - case study of how it can be used in the design of an ADMI. While a future publication - will give detail of the design of this instrument and provide an analysis of the - data from this study, in this paper we set out how the conversations between researcher - and participant have prepared us to build an instrument that responds to the lived - experience of the participant.' - address: 'The University of Auckland, New Zealand' - articleno: 47 - author: 'Zayas-Garin, Eevee and McPherson, Andrew' - bibtex: "@inproceedings{NIME22_47,\n abstract = {While it is accepted that accessible\ - \ digital musical instruments (ADMIs) should be created with the involvement of\ - \ targeted communities, participatory design (PD) is an unsettled practice that\ - \ gets defined variously, loosely or not at all. In this paper, we explore the\ - \ concept of dialogic design and provide a case study of how it can be used in\ - \ the design of an ADMI. While a future publication will give detail of the design\ - \ of this instrument and provide an analysis of the data from this study, in this\ - \ paper we set out how the conversations between researcher and participant have\ - \ prepared us to build an instrument that responds to the lived experience of\ - \ the participant.},\n address = {The University of Auckland, New Zealand},\n\ - \ articleno = {47},\n author = {Zayas-Garin, Eevee and McPherson, Andrew},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.21428/92fbeb44.2b8ce9a4},\n issn = {2220-4806},\n month\ - \ = {jun},\n pdf = {66.pdf},\n presentation-video = {https://www.youtube.com/watch?v=8l1N3G0BdKw},\n\ - \ title = {Dialogic Design of Accessible Digital Musical Instruments: Investigating\ - \ Performer Experience},\n url = {https://doi.org/10.21428%2F92fbeb44.2b8ce9a4},\n\ - \ year = {2022}\n}\n" + ID: Hamel2006 + address: 'Paris, France' + author: 'Hamel, Keith' + bibtex: "@inproceedings{Hamel2006,\n address = {Paris, France},\n author = {Hamel,\ + \ Keith},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1176917},\n issn = {2220-4806},\n\ + \ pages = {380--383},\n title = {Integrated Interactive Music Performance Environment},\n\ + \ url = {http://www.nime.org/proceedings/2006/nime2006_380.pdf},\n year = {2006}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.2b8ce9a4 + doi: 10.5281/zenodo.1176917 issn: 2220-4806 - month: jun - pdf: 66.pdf - presentation-video: https://www.youtube.com/watch?v=8l1N3G0BdKw - title: 'Dialogic Design of Accessible Digital Musical Instruments: Investigating - Performer Experience' - url: https://doi.org/10.21428%2F92fbeb44.2b8ce9a4 - year: 2022 + pages: 380--383 + title: Integrated Interactive Music Performance Environment + url: http://www.nime.org/proceedings/2006/nime2006_380.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_48 - abstract: 'To the naked ear, the installation Being With The Waves appears silent, - but a hidden composition of voices, instrumental tones, and maritime sounds is - revealed through wearing modified headphones. The installation consists of an - array of tweeters emitting a multi-channel ultrasonic composition that sounds - physically in the space. Ultrasonic phenomena present at the listener’s ears are - captured by microphones embedded on the outside of headphone earcups, shifted - into audibility, and output to the headphones. The amplitude demodulation of ultrasonic - material results in exaggerated Doppler effects and listeners hear the music bend - and shift precisely with their movement. There are no movement sensors, mappings, - or feedback loops, yet the installation is perceived as interactive due to the - close entanglement of the listener with sound phenomena. The dynamic quality of - interaction emerges solely through the listening faculties of the visitor, as - an embodied sensory experience determined by their orientation to sounds, physical - movement, and perceptual behaviour. This paper describes key influences on the - installation, its ultrasonic technology, the design of modified headphones, and - the compositional approach.' - address: 'The University of Auckland, New Zealand' - articleno: 48 - author: 'Robson, Nicole and McPherson, Andrew and Bryan-Kinns, Nick' - bibtex: "@inproceedings{NIME22_48,\n abstract = {To the naked ear, the installation\ - \ Being With The Waves appears silent, but a hidden composition of voices, instrumental\ - \ tones, and maritime sounds is revealed through wearing modified headphones.\ - \ The installation consists of an array of tweeters emitting a multi-channel ultrasonic\ - \ composition that sounds physically in the space. Ultrasonic phenomena present\ - \ at the listener’s ears are captured by microphones embedded on the outside of\ - \ headphone earcups, shifted into audibility, and output to the headphones. The\ - \ amplitude demodulation of ultrasonic material results in exaggerated Doppler\ - \ effects and listeners hear the music bend and shift precisely with their movement.\ - \ There are no movement sensors, mappings, or feedback loops, yet the installation\ - \ is perceived as interactive due to the close entanglement of the listener with\ - \ sound phenomena. The dynamic quality of interaction emerges solely through the\ - \ listening faculties of the visitor, as an embodied sensory experience determined\ - \ by their orientation to sounds, physical movement, and perceptual behaviour.\ - \ This paper describes key influences on the installation, its ultrasonic technology,\ - \ the design of modified headphones, and the compositional approach.},\n address\ - \ = {The University of Auckland, New Zealand},\n articleno = {48},\n author =\ - \ {Robson, Nicole and McPherson, Andrew and Bryan-Kinns, Nick},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.21428/92fbeb44.376bc758},\n issn = {2220-4806},\n month = {jun},\n\ - \ pdf = {68.pdf},\n presentation-video = {https://www.youtube.com/watch?v=3D5S5moUvUA},\n\ - \ title = {Being With The Waves: An Ultrasonic Art Installation Enabling Rich\ - \ Interaction Without Sensors},\n url = {https://doi.org/10.21428%2F92fbeb44.376bc758},\n\ - \ year = {2022}\n}\n" + ID: Ferguson2006 + address: 'Paris, France' + author: 'Ferguson, Sam' + bibtex: "@inproceedings{Ferguson2006,\n address = {Paris, France},\n author = {Ferguson,\ + \ Sam},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1176899},\n issn = {2220-4806},\n\ + \ keywords = {interactive sonification,music,sonification,sound visualization},\n\ + \ pages = {384--389},\n title = {Learning Musical Instrument Skills Through Interactive\ + \ Sonification},\n url = {http://www.nime.org/proceedings/2006/nime2006_384.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.376bc758 + doi: 10.5281/zenodo.1176899 issn: 2220-4806 - month: jun - pdf: 68.pdf - presentation-video: https://www.youtube.com/watch?v=3D5S5moUvUA - title: 'Being With The Waves: An Ultrasonic Art Installation Enabling Rich Interaction - Without Sensors' - url: https://doi.org/10.21428%2F92fbeb44.376bc758 - year: 2022 + keywords: 'interactive sonification,music,sonification,sound visualization' + pages: 384--389 + title: Learning Musical Instrument Skills Through Interactive Sonification + url: http://www.nime.org/proceedings/2006/nime2006_384.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_49 - abstract: 'This paper introduces micro-phenomenology, a research discipline for - exploring and uncovering the structures of lived experience, as a beneficial methodology - for studying and evaluating interactions with digital musical instruments. Compared - to other subjective methods, micro-phenomenology evokes and returns one to the - moment of experience, allowing access to dimensions and observations which may - not be recalled in reflection alone. We present a case study of five microphenomenological - interviews conducted with musicians about their experiences with existing digital - musical instruments. The interviews reveal deep, clear descriptions of different - modalities of synchronic moments in interaction, especially in tactile connections - and bodily sensations. We highlight the elements of interaction captured in these - interviews which would not have been revealed otherwise and the importance of - these elements in researching perception, understanding, interaction, and performance - with digital musical instruments.' - address: 'The University of Auckland, New Zealand' - articleno: 49 - author: 'Reed, Courtney N. and Nordmoen, Charlotte and Martelloni, Andrea and Lepri, - Giacomo and Robson, Nicole and Zayas-Garin, Eevee and Cotton, Kelsey and Mice, - Lia and McPherson, Andrew' - bibtex: "@inproceedings{NIME22_49,\n abstract = {This paper introduces micro-phenomenology,\ - \ a research discipline for exploring and uncovering the structures of lived experience,\ - \ as a beneficial methodology for studying and evaluating interactions with digital\ - \ musical instruments. Compared to other subjective methods, micro-phenomenology\ - \ evokes and returns one to the moment of experience, allowing access to dimensions\ - \ and observations which may not be recalled in reflection alone. We present a\ - \ case study of five microphenomenological interviews conducted with musicians\ - \ about their experiences with existing digital musical instruments. The interviews\ - \ reveal deep, clear descriptions of different modalities of synchronic moments\ - \ in interaction, especially in tactile connections and bodily sensations. We\ - \ highlight the elements of interaction captured in these interviews which would\ - \ not have been revealed otherwise and the importance of these elements in researching\ - \ perception, understanding, interaction, and performance with digital musical\ - \ instruments.},\n address = {The University of Auckland, New Zealand},\n articleno\ - \ = {49},\n author = {Reed, Courtney N. and Nordmoen, Charlotte and Martelloni,\ - \ Andrea and Lepri, Giacomo and Robson, Nicole and Zayas-Garin, Eevee and Cotton,\ - \ Kelsey and Mice, Lia and McPherson, Andrew},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.21428/92fbeb44.b304e4b1},\n issn = {2220-4806},\n month = {jun},\n pdf =\ - \ {69.pdf},\n presentation-video = {https://youtu.be/-Ket6l90S8I},\n title = {Exploring\ - \ Experiences with New Musical Instruments through Micro-phenomenology},\n url\ - \ = {https://doi.org/10.21428%2F92fbeb44.b304e4b1},\n year = {2022}\n}\n" + ID: Poepel2006 + abstract: 'In this paper, some of the more recent developments in musical instruments + related to the violin family are described, and analyzed according to several + criteria adapted from other publications. While it is impossible to cover all + such developments, we have tried to sample a variety of instruments from the last + decade or so, with a greater focus on those published in the computer music literature. + Experiences in the field of string players focusing on such developments are presented. + Conclusions are drawn in which further research into violin-related digital instruments + for string players may benefit from the presented criteria as well as the experiences. ' + address: 'Paris, France' + author: 'Poepel, Cornelius and Overholt, Dan' + bibtex: "@inproceedings{Poepel2006,\n abstract = {In this paper, some of the more\ + \ recent developments in musical instruments related to the violin family are\ + \ described, and analyzed according to several criteria adapted from other publications.\ + \ While it is impossible to cover all such developments, we have tried to sample\ + \ a variety of instruments from the last decade or so, with a greater focus on\ + \ those published in the computer music literature. Experiences in the field of\ + \ string players focusing on such developments are presented. Conclusions are\ + \ drawn in which further research into violin-related digital instruments for\ + \ string players may benefit from the presented criteria as well as the experiences.\ + \ },\n address = {Paris, France},\n author = {Poepel, Cornelius and Overholt,\ + \ Dan},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1176985},\n issn = {2220-4806},\n\ + \ keywords = {Violin, viola, cello, bass, digital, electronic, synthesis, controller.\ + \ },\n pages = {390--395},\n title = {Recent Developments in Violin-related Digital\ + \ Musical Instruments: Where Are We and Where Are We Going?},\n url = {http://www.nime.org/proceedings/2006/nime2006_390.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.b304e4b1 + doi: 10.5281/zenodo.1176985 issn: 2220-4806 - month: jun - pdf: 69.pdf - presentation-video: https://youtu.be/-Ket6l90S8I - title: Exploring Experiences with New Musical Instruments through Micro-phenomenology - url: https://doi.org/10.21428%2F92fbeb44.b304e4b1 - year: 2022 + keywords: 'Violin, viola, cello, bass, digital, electronic, synthesis, controller. ' + pages: 390--395 + title: 'Recent Developments in Violin-related Digital Musical Instruments: Where + Are We and Where Are We Going?' + url: http://www.nime.org/proceedings/2006/nime2006_390.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_50 - abstract: 'This paper describes the 10,000 Instruments workshop, a collaborative - online event conceived to generate interface ideas and speculate on music technology - through open-ended artefacts and playful design explorations. We first present - the activity, setting its research and artistic scope. We then report on a selection - of outcomes created by workshop attendees, and examine the critical design statements - they convey. The paper concludes with reflections on the make-believe, whimsical - and troublemaking approach to instrument design adopted in the workshop. In particular, - we consider the ways this activity can support individuals’ creativity, unlock - shared musical visions and reveal unconventional perspectives on music technology - development.' - address: 'The University of Auckland, New Zealand' - articleno: 50 - author: 'Lepri, Giacomo and Bowers, John and Topley, Samantha and Stapleton, Paul - and Bennett, Peter and Andersen, Kristina and McPherson, Andrew' - bibtex: "@inproceedings{NIME22_50,\n abstract = {This paper describes the 10,000\ - \ Instruments workshop, a collaborative online event conceived to generate interface\ - \ ideas and speculate on music technology through open-ended artefacts and playful\ - \ design explorations. We first present the activity, setting its research and\ - \ artistic scope. We then report on a selection of outcomes created by workshop\ - \ attendees, and examine the critical design statements they convey. The paper\ - \ concludes with reflections on the make-believe, whimsical and troublemaking\ - \ approach to instrument design adopted in the workshop. In particular, we consider\ - \ the ways this activity can support individuals’ creativity, unlock shared musical\ - \ visions and reveal unconventional perspectives on music technology development.},\n\ - \ address = {The University of Auckland, New Zealand},\n articleno = {50},\n author\ - \ = {Lepri, Giacomo and Bowers, John and Topley, Samantha and Stapleton, Paul\ - \ and Bennett, Peter and Andersen, Kristina and McPherson, Andrew},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.21428/92fbeb44.9e7c9ba3},\n issn = {2220-4806},\n month\ - \ = {jun},\n pdf = {70.pdf},\n presentation-video = {https://youtu.be/dif8K23TR1Y},\n\ - \ title = {The 10,000 Instruments Workshop - (Im)practical Research for Critical\ - \ Speculation},\n url = {https://doi.org/10.21428%2F92fbeb44.9e7c9ba3},\n year\ - \ = {2022}\n}\n" + ID: Young2006 + abstract: 'In this paper we present progress of an ongoingcollaboration between + researchers at the MIT MediaLaboratory and the Royal Academy of Music (RAM). The + aimof this project is to further explore the expressive musicalpotential of the + Hyperbow, a custom music controller firstdesigned for use in violin performance. + Through the creationof new repertoire, we hope to stimulate the evolution of thisinterface, + advancing its usability and refining itscapabilities. In preparation for this + work, the Hyperbowsystem has been adapted for cello (acoustic and electric)performance. + The structure of our collaboration is described,and two of the pieces currently + in progress are presented.Feedback from the performers is also discussed, as well + asfuture plans.' + address: 'Paris, France' + author: 'Young, Diana and Nunn, Patrick and Vassiliev, Artem' + bibtex: "@inproceedings{Young2006,\n abstract = {In this paper we present progress\ + \ of an ongoingcollaboration between researchers at the MIT MediaLaboratory and\ + \ the Royal Academy of Music (RAM). The aimof this project is to further explore\ + \ the expressive musicalpotential of the Hyperbow, a custom music controller firstdesigned\ + \ for use in violin performance. Through the creationof new repertoire, we hope\ + \ to stimulate the evolution of thisinterface, advancing its usability and refining\ + \ itscapabilities. In preparation for this work, the Hyperbowsystem has been adapted\ + \ for cello (acoustic and electric)performance. The structure of our collaboration\ + \ is described,and two of the pieces currently in progress are presented.Feedback\ + \ from the performers is also discussed, as well asfuture plans.},\n address =\ + \ {Paris, France},\n author = {Young, Diana and Nunn, Patrick and Vassiliev, Artem},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1177023},\n issn = {2220-4806},\n\ + \ keywords = {Cello, bow, controller, electroacoustic music, composition. },\n\ + \ pages = {396--401},\n title = {Composing for Hyperbow: A Collaboration Between\ + \ {MIT} and the Royal Academy of Music},\n url = {http://www.nime.org/proceedings/2006/nime2006_396.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.9e7c9ba3 + doi: 10.5281/zenodo.1177023 issn: 2220-4806 - month: jun - pdf: 70.pdf - presentation-video: https://youtu.be/dif8K23TR1Y - title: 'The 10,000 Instruments Workshop - (Im)practical Research for Critical Speculation' - url: https://doi.org/10.21428%2F92fbeb44.9e7c9ba3 - year: 2022 + keywords: 'Cello, bow, controller, electroacoustic music, composition. ' + pages: 396--401 + title: 'Composing for Hyperbow: A Collaboration Between MIT and the Royal Academy + of Music' + url: http://www.nime.org/proceedings/2006/nime2006_396.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_51 - abstract: 'In this paper we present the development of a new gestural musical instrument, - the AirSticks 2.0. The AirSticks 2.0 combines the latest advances in sensor fusion - of Inertial Measurement Units (IMU) and low latency wireless data transmission - over Bluetooth Low Energy (BLE), to give an expressive wireless instrument capable - of triggering and manipulating discrete and continuous sound events in real-time. - We outline the design criteria for this new instrument that has evolved from previous - prototypes, give a technical overview of the custom hardware and software developed, - and present short videos of three distinct mappings that intuitively translate - movement into musical sounds.' - address: 'The University of Auckland, New Zealand' - articleno: 51 - author: 'Trolland, Sam and Ilsar, Alon and Frame, Ciaran and McCormack, Jon and - Wilson, Elliott' - bibtex: "@inproceedings{NIME22_51,\n abstract = {In this paper we present the development\ - \ of a new gestural musical instrument, the AirSticks 2.0. The AirSticks 2.0 combines\ - \ the latest advances in sensor fusion of Inertial Measurement Units (IMU) and\ - \ low latency wireless data transmission over Bluetooth Low Energy (BLE), to give\ - \ an expressive wireless instrument capable of triggering and manipulating discrete\ - \ and continuous sound events in real-time. We outline the design criteria for\ - \ this new instrument that has evolved from previous prototypes, give a technical\ - \ overview of the custom hardware and software developed, and present short videos\ - \ of three distinct mappings that intuitively translate movement into musical\ - \ sounds.},\n address = {The University of Auckland, New Zealand},\n articleno\ - \ = {51},\n author = {Trolland, Sam and Ilsar, Alon and Frame, Ciaran and McCormack,\ - \ Jon and Wilson, Elliott},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.c400bdc2},\n\ - \ issn = {2220-4806},\n month = {jun},\n pdf = {77.pdf},\n presentation-video\ - \ = {https://youtu.be/TnEzwGshr48},\n title = {{AirSticks} 2.0: Instrument Design\ - \ for Expressive Gestural Interaction},\n url = {https://doi.org/10.21428%2F92fbeb44.c400bdc2},\n\ - \ year = {2022}\n}\n" + ID: Bevilacqua2006 + address: 'Paris, France' + author: 'Bevilacqua, Fr\''{e}d\''{e}ric and Rasamimanana, Nicolas and Fl\''{e}ty, + Emmanuel and Lemouton, Serge and Baschet, Florence' + bibtex: "@inproceedings{Bevilacqua2006,\n address = {Paris, France},\n author =\ + \ {Bevilacqua, Fr\\'{e}d\\'{e}ric and Rasamimanana, Nicolas and Fl\\'{e}ty, Emmanuel\ + \ and Lemouton, Serge and Baschet, Florence},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1176871},\n issn = {2220-4806},\n pages = {402--406},\n title\ + \ = {The Augmented Violin Project: Research, Composition and Performance Report},\n\ + \ url = {http://www.nime.org/proceedings/2006/nime2006_402.pdf},\n year = {2006}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.c400bdc2 + doi: 10.5281/zenodo.1176871 issn: 2220-4806 - month: jun - pdf: 77.pdf - presentation-video: https://youtu.be/TnEzwGshr48 - title: 'AirSticks 2.0: Instrument Design for Expressive Gestural Interaction' - url: https://doi.org/10.21428%2F92fbeb44.c400bdc2 - year: 2022 + pages: 402--406 + title: 'The Augmented Violin Project: Research, Composition and Performance Report' + url: http://www.nime.org/proceedings/2006/nime2006_402.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_52 - abstract: 'Musical grid interfaces are becoming an industry standard for interfaces - that allow interaction with music software, electronics, or instruments. However, - there are no clearly defined design standards or guidelines, resulting in grid - interfaces being a multitude of interfaces with competing design approaches, making - these already abstract UIs even more challenging. In this paper, we compare the - co-existing design approaches of UIs for grid interfaces used by commercial and - non-commercial developers and designers, and present the results of three experiments - that tested the benefits of co-existing design approaches to mitigate some of - the inherent design challenges.' - address: 'The University of Auckland, New Zealand' - articleno: 52 - author: 'Rossmy, Beat and Rauh, Maximilian and Wiethoff, Alexander' - bibtex: "@inproceedings{NIME22_52,\n abstract = {Musical grid interfaces are becoming\ - \ an industry standard for interfaces that allow interaction with music software,\ - \ electronics, or instruments. However, there are no clearly defined design standards\ - \ or guidelines, resulting in grid interfaces being a multitude of interfaces\ - \ with competing design approaches, making these already abstract UIs even more\ - \ challenging. In this paper, we compare the co-existing design approaches of\ - \ UIs for grid interfaces used by commercial and non-commercial developers and\ - \ designers, and present the results of three experiments that tested the benefits\ - \ of co-existing design approaches to mitigate some of the inherent design challenges.},\n\ - \ address = {The University of Auckland, New Zealand},\n articleno = {52},\n author\ - \ = {Rossmy, Beat and Rauh, Maximilian and Wiethoff, Alexander},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.21428/92fbeb44.db84ecd0},\n issn = {2220-4806},\n month = {jun},\n\ - \ pdf = {86.pdf},\n presentation-video = {https://www.youtube.com/watch?v=JF514EWYiQ8},\n\ - \ title = {Towards User Interface Guidelines for Musical Grid Interfaces},\n url\ - \ = {https://doi.org/10.21428%2F92fbeb44.db84ecd0},\n year = {2022}\n}\n" + ID: Kimura2006 + abstract: 'This is a description of a demonstration, regarding theuse of auditory + illusions and psycho-acoustic phenomenonused in the interactive work of Jean-Claude + Risset, writtenfor violinist Mari Kimura.' + address: 'Paris, France' + author: 'Kimura, Mari and Risset, Jean-Claude' + bibtex: "@inproceedings{Kimura2006,\n abstract = {This is a description of a demonstration,\ + \ regarding theuse of auditory illusions and psycho-acoustic phenomenonused in\ + \ the interactive work of Jean-Claude Risset, writtenfor violinist Mari Kimura.},\n\ + \ address = {Paris, France},\n author = {Kimura, Mari and Risset, Jean-Claude},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1176939},\n issn = {2220-4806},\n\ + \ keywords = {Violin, psycho-acoustic phenomena, auditory illusions, sig- nal\ + \ processing, subharmonics, Risset, Kimura. },\n pages = {407--408},\n title =\ + \ {Auditory Illusion and Violin: Demonstration of a Work by Jean-Claude Risset\ + \ Written for Mari Kimura},\n url = {http://www.nime.org/proceedings/2006/nime2006_407.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.db84ecd0 + doi: 10.5281/zenodo.1176939 issn: 2220-4806 - month: jun - pdf: 86.pdf - presentation-video: https://www.youtube.com/watch?v=JF514EWYiQ8 - title: Towards User Interface Guidelines for Musical Grid Interfaces - url: https://doi.org/10.21428%2F92fbeb44.db84ecd0 - year: 2022 + keywords: 'Violin, psycho-acoustic phenomena, auditory illusions, sig- nal processing, + subharmonics, Risset, Kimura. ' + pages: 407--408 + title: 'Auditory Illusion and Violin: Demonstration of a Work by Jean-Claude Risset + Written for Mari Kimura' + url: http://www.nime.org/proceedings/2006/nime2006_407.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_53 - abstract: 'Applications for musical grid interfaces are designed without any established - guidelines or defined design rules. However, within applications of different - manufacturers, musicians, and designers, common patterns and conventions can be - observed which might be developing towards unofficial standards. In this survey - we analyzed 40 applications, instruments, or controllers and collected 18 types - of recurring UI elements, which are clustered, described, and interactively presented - in this survey. We further postulate 3 theses which standard UI elements should - meet and propose novel UI elements deduced from WIMP standards.' - address: 'The University of Auckland, New Zealand' - articleno: 53 - author: 'Rossmy, Beat' - bibtex: "@inproceedings{NIME22_53,\n abstract = {Applications for musical grid interfaces\ - \ are designed without any established guidelines or defined design rules. However,\ - \ within applications of different manufacturers, musicians, and designers, common\ - \ patterns and conventions can be observed which might be developing towards unofficial\ - \ standards. In this survey we analyzed 40 applications, instruments, or controllers\ - \ and collected 18 types of recurring UI elements, which are clustered, described,\ - \ and interactively presented in this survey. We further postulate 3 theses which\ - \ standard UI elements should meet and propose novel UI elements deduced from\ - \ WIMP standards.},\n address = {The University of Auckland, New Zealand},\n articleno\ - \ = {53},\n author = {Rossmy, Beat},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.563bfea9},\n\ - \ issn = {2220-4806},\n month = {jun},\n pdf = {87.pdf},\n presentation-video\ - \ = {https://www.youtube.com/watch?v=CPHY4_G_LR0},\n title = {Buttons, Sliders,\ - \ and Keys {\\textendash} A Survey on Musical Grid Interface Standards},\n url\ - \ = {https://doi.org/10.21428%2F92fbeb44.563bfea9},\n year = {2022}\n}\n" + ID: Freed2006a + abstract: 'Software and hardware enhancements to an electric 6-stringcello are described + with a focus on a new mechanical tuningdevice, a novel rotary sensor for bow interaction + and controlstrategies to leverage a suite of polyphonic soundprocessing effects.' + address: 'Paris, France' + author: 'Freed, Adrian and Wessel, David and Zbyszynski, Michael and Uitti, Frances + M.' + bibtex: "@inproceedings{Freed2006a,\n abstract = {Software and hardware enhancements\ + \ to an electric 6-stringcello are described with a focus on a new mechanical\ + \ tuningdevice, a novel rotary sensor for bow interaction and controlstrategies\ + \ to leverage a suite of polyphonic soundprocessing effects.},\n address = {Paris,\ + \ France},\n author = {Freed, Adrian and Wessel, David and Zbyszynski, Michael\ + \ and Uitti, Frances M.},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176905},\n\ + \ issn = {2220-4806},\n keywords = {Cello, chordophone, FSR, Rotary Absolute Position\ + \ Encoder, Double Bowing, triple stops, double stops, convolution. },\n pages\ + \ = {409--413},\n title = {Augmenting the Cello},\n url = {http://www.nime.org/proceedings/2006/nime2006_409.pdf},\n\ + \ year = {2006}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.563bfea9 + doi: 10.5281/zenodo.1176905 issn: 2220-4806 - month: jun - pdf: 87.pdf - presentation-video: https://www.youtube.com/watch?v=CPHY4_G_LR0 - title: 'Buttons, Sliders, and Keys – A Survey on Musical Grid Interface Standards' - url: https://doi.org/10.21428%2F92fbeb44.563bfea9 - year: 2022 + keywords: 'Cello, chordophone, FSR, Rotary Absolute Position Encoder, Double Bowing, + triple stops, double stops, convolution. ' + pages: 409--413 + title: Augmenting the Cello + url: http://www.nime.org/proceedings/2006/nime2006_409.pdf + year: 2006 - ENTRYTYPE: inproceedings - ID: NIME22_54 - abstract: 'Historically marginalised instruments witness and bear vital stories - that can deeply affect identity and galvanise communities when revitalised. We - present the protolangspil as a contemporary interpretation of the langspil, an - Icelandic monochord-like folk instrument, and describe its agential and performative - contributions to the first Icelandic NIME research lab. This paper describes how - the proto-langspil has served as an instrument in establishing the research methodology - of our new lab and concretised the research agenda via a series of encounters - with music performers and composers, luthiers, anthropologists, musicologists, - designers and philosophers. These encounters have informed and challenged our - research practices, mapped our surroundings, and embedded us in the local social - fabric. We share our proto-langspil for replication, and reflect on encounters - as a methodology framing mechanism that eschews the more traditional empirical - approaches in HCI. We conclude with a final provocation for NIME researchers to - embrace AI research with an open mind.' - address: 'The University of Auckland, New Zealand' - articleno: 54 - author: 'Armitage, Jack and Magnusson, Thor and Shepardson, Victor and Ulfarsson, - Halldor' - bibtex: "@inproceedings{NIME22_54,\n abstract = {Historically marginalised instruments\ - \ witness and bear vital stories that can deeply affect identity and galvanise\ - \ communities when revitalised. We present the protolangspil as a contemporary\ - \ interpretation of the langspil, an Icelandic monochord-like folk instrument,\ - \ and describe its agential and performative contributions to the first Icelandic\ - \ NIME research lab. This paper describes how the proto-langspil has served as\ - \ an instrument in establishing the research methodology of our new lab and concretised\ - \ the research agenda via a series of encounters with music performers and composers,\ - \ luthiers, anthropologists, musicologists, designers and philosophers. These\ - \ encounters have informed and challenged our research practices, mapped our surroundings,\ - \ and embedded us in the local social fabric. We share our proto-langspil for\ - \ replication, and reflect on encounters as a methodology framing mechanism that\ - \ eschews the more traditional empirical approaches in HCI. We conclude with a\ - \ final provocation for NIME researchers to embrace AI research with an open mind.},\n\ - \ address = {The University of Auckland, New Zealand},\n articleno = {54},\n author\ - \ = {Armitage, Jack and Magnusson, Thor and Shepardson, Victor and Ulfarsson,\ - \ Halldor},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.6178f575},\n\ - \ issn = {2220-4806},\n month = {jun},\n pdf = {88.pdf},\n presentation-video\ - \ = {https://youtu.be/8tRTF1lB6Hg},\n title = {The Proto-Langspil: Launching an\ - \ Icelandic {NIME} Research Lab with the Help of a Marginalised Instrument},\n\ - \ url = {https://doi.org/10.21428%2F92fbeb44.6178f575},\n year = {2022}\n}\n" + ID: Arfib2002 + abstract: In this paper we describe the digital emulation of a optical photosonic + instrument. First we briefly describe theoptical instrument which is the basis + of this emulation.Then we give a musical description of the instrument implementation + and its musical use and we concludewith the "duo" possibility of such an emulation. + address: 'Dublin, Ireland' + author: 'Arfib, Daniel and Dudon, Jacques' + bibtex: "@inproceedings{Arfib2002,\n abstract = {In this paper we describe the digital\ + \ emulation of a optical photosonic instrument. First we briefly describe theoptical\ + \ instrument which is the basis of this emulation.Then we give a musical description\ + \ of the instrument implementation and its musical use and we concludewith the\ + \ \"duo\" possibility of such an emulation.},\n address = {Dublin, Ireland},\n\ + \ author = {Arfib, Daniel and Dudon, Jacques},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n date =\ + \ {24-26 May, 2002},\n doi = {10.5281/zenodo.1176388},\n issn = {2220-4806},\n\ + \ keywords = {Photosonic synthesis, digital emulation, Max-Msp, gestural devices.},\n\ + \ pages = {1--4},\n title = {A Digital Emulator of the Photosonic Instrument},\n\ + \ url = {http://www.nime.org/proceedings/2002/nime2002_001.pdf},\n year = {2002}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.6178f575 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176388 issn: 2220-4806 - month: jun - pdf: 88.pdf - presentation-video: https://youtu.be/8tRTF1lB6Hg - title: 'The Proto-Langspil: Launching an Icelandic NIME Research Lab with the Help - of a Marginalised Instrument' - url: https://doi.org/10.21428%2F92fbeb44.6178f575 - year: 2022 + keywords: 'Photosonic synthesis, digital emulation, Max-Msp, gestural devices.' + pages: 1--4 + title: A Digital Emulator of the Photosonic Instrument + url: http://www.nime.org/proceedings/2002/nime2002_001.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: NIME22_55 - abstract: 'The lived body, or soma, is the designation for the phenomenological - experience of being a body, rather than simply a corporeal entity. Bodily knowledge, - which evolves through bodily awareness, carries the lived body’s reflectivity. - In this paper, such considerations are put in the context of previous work at - NIME, specifically that revolving around with the vocal tract or the voice, due - to its singular relation with embodiment. We understand that focusing on somaesthetics - allows for novel ways of engaging with technology as well as highlighting biases - that might go unnoticed otherwise. We present an inexpensive application of a - respiration sensor that emerges from the aforementioned conceptualisations. Lastly, - we reflect on how to better frame the role of bodily awareness in NIME.' - address: 'The University of Auckland, New Zealand' - articleno: 55 - author: 'Tapparo, Carla Sophie and Zappi, Victor' - bibtex: "@inproceedings{NIME22_55,\n abstract = {The lived body, or soma, is the\ - \ designation for the phenomenological experience of being a body, rather than\ - \ simply a corporeal entity. Bodily knowledge, which evolves through bodily awareness,\ - \ carries the lived body’s reflectivity. In this paper, such considerations are\ - \ put in the context of previous work at NIME, specifically that revolving around\ - \ with the vocal tract or the voice, due to its singular relation with embodiment.\ - \ We understand that focusing on somaesthetics allows for novel ways of engaging\ - \ with technology as well as highlighting biases that might go unnoticed otherwise.\ - \ We present an inexpensive application of a respiration sensor that emerges from\ - \ the aforementioned conceptualisations. Lastly, we reflect on how to better frame\ - \ the role of bodily awareness in NIME.},\n address = {The University of Auckland,\ - \ New Zealand},\n articleno = {55},\n author = {Tapparo, Carla Sophie and Zappi,\ - \ Victor},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.21428/92fbeb44.7e04cfc8},\n issn = {2220-4806},\n\ - \ month = {jun},\n pdf = {99.pdf},\n presentation-video = {https://youtu.be/GEndgifZmkI},\n\ - \ title = {Bodily Awareness Through {NIMEs}: Deautomatising Music Making Processes},\n\ - \ url = {https://doi.org/10.21428%2F92fbeb44.7e04cfc8},\n year = {2022}\n}\n" + ID: Baumann2002 + abstract: 'In this paper we will have a short overview of some of the systems we + have been developing as an independent company over the last years. We will focus + especially on our latest experiments in developing wireless gestural systems using + the camera as an interactive tool to generate 2D and 3D visuals and music. ' + address: 'Dublin, Ireland' + author: 'Baumann, Alain and Sánchez, Rosa' + bibtex: "@inproceedings{Baumann2002,\n abstract = {In this paper we will have a\ + \ short overview of some of the systems we have been developing as an independent\ + \ company over the last years. We will focus especially on our latest experiments\ + \ in developing wireless gestural systems using the camera as an interactive tool\ + \ to generate 2D and 3D visuals and music. },\n address = {Dublin, Ireland},\n\ + \ author = {Baumann, Alain and S\\'{a}nchez, Rosa},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176390},\n issn = {2220-4806},\n\ + \ keywords = {interdisciplinary applications of new instruments, mixed media instruments},\n\ + \ pages = {5--9},\n title = {Interdisciplinary Applications of New Instruments},\n\ + \ url = {http://www.nime.org/proceedings/2002/nime2002_005.pdf},\n year = {2002}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.21428/92fbeb44.7e04cfc8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176390 issn: 2220-4806 - month: jun - pdf: 99.pdf - presentation-video: https://youtu.be/GEndgifZmkI - title: 'Bodily Awareness Through NIMEs: Deautomatising Music Making Processes' - url: https://doi.org/10.21428%2F92fbeb44.7e04cfc8 - year: 2022 + keywords: 'interdisciplinary applications of new instruments, mixed media instruments' + pages: 5--9 + title: Interdisciplinary Applications of New Instruments + url: http://www.nime.org/proceedings/2002/nime2002_005.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: NIME22_56 - abstract: 'The Kanchay_Yupana// is an open-source NIME for the generation of rhythms, - inspired by the Andean yupana: a tangible board similar to an abacus of different - sizes and materials with a system of carved geometric boxes into which seeds or - pebbles were disposed to perform arithmetic calculations, used since pre-colonial - times. As in the traditional artifact, the interaction of this new electronic - yupana is based on the arrangement of seeds on a specially designed board with - boxes, holes, and photoresistors. The shadow detected by the seeds’ positioning - sends real-time motion data in MIDI messages to Pure Data in a drum machine patch. - As a result, percussion samples of Andean instruments fill pulses in a four-quarter - beat, generating patterns that can be transformed live into different rhythms. - This interface complements the Electronic_Khipu_ (a previous NIME based on an - Andean khipu) by producing the rhythmic component. This experience unites ancestral - and contemporary technologies in experimental sound performance following the - theoretical-practical research on the vindication of the memory in ancestral Andean - technological interfaces made invisible by colonization, reusing them from a decolonial - perspective in NIMEs.' - address: 'The University of Auckland, New Zealand' - articleno: 56 - author: 'Cadavid Hinojosa, Laddy Patricia' - bibtex: "@inproceedings{NIME22_56,\n abstract = {The Kanchay_Yupana// is an open-source\ - \ NIME for the generation of rhythms, inspired by the Andean yupana: a tangible\ - \ board similar to an abacus of different sizes and materials with a system of\ - \ carved geometric boxes into which seeds or pebbles were disposed to perform\ - \ arithmetic calculations, used since pre-colonial times. As in the traditional\ - \ artifact, the interaction of this new electronic yupana is based on the arrangement\ - \ of seeds on a specially designed board with boxes, holes, and photoresistors.\ - \ The shadow detected by the seeds’ positioning sends real-time motion data in\ - \ MIDI messages to Pure Data in a drum machine patch. As a result, percussion\ - \ samples of Andean instruments fill pulses in a four-quarter beat, generating\ - \ patterns that can be transformed live into different rhythms. This interface\ - \ complements the Electronic_Khipu_ (a previous NIME based on an Andean khipu)\ - \ by producing the rhythmic component. This experience unites ancestral and contemporary\ - \ technologies in experimental sound performance following the theoretical-practical\ - \ research on the vindication of the memory in ancestral Andean technological\ - \ interfaces made invisible by colonization, reusing them from a decolonial perspective\ - \ in NIMEs.},\n address = {The University of Auckland, New Zealand},\n articleno\ - \ = {56},\n author = {Cadavid Hinojosa, Laddy Patricia},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ copyright = {Creative Commons Attribution 4.0 International},\n doi = {10.21428/92fbeb44.61d01269},\n\ - \ issn = {2220-4806},\n month = {jun},\n pdf = {49.pdf},\n presentation-video\ - \ = {https://youtu.be/MpMFL6R14kQ},\n title = {Kanchay_Yupana{\\slash \\slash}:\ - \ Tangible rhythm sequencer inspired by ancestral Andean technologies},\n url\ - \ = {https://doi.org/10.21428/92fbeb44.61d01269},\n year = {2022}\n}\n" + ID: Bernard2002 + abstract: 'This paper describes the design and development of several musical instruments + and MIDI controllers built byDavid Bernard (as part of The Sound Surgery project:www.thesoundsurgery.co.uk) + and used in club performances around Glasgow during 1995-2002. It argues that + changing technologies and copyright are shifting ourunderstanding of music from + "live art" to "recorded medium" whilst blurring the boundaries between sound and + visual production.' + address: 'Dublin, Ireland' + author: 'Bernard, David' + bibtex: "@inproceedings{Bernard2002,\n abstract = {This paper describes the design\ + \ and development of several musical instruments and MIDI controllers built byDavid\ + \ Bernard (as part of The Sound Surgery project:www.thesoundsurgery.co.uk) and\ + \ used in club performances around Glasgow during 1995-2002. It argues that changing\ + \ technologies and copyright are shifting ourunderstanding of music from \"live\ + \ art\" to \"recorded medium\" whilst blurring the boundaries between sound and\ + \ visual production.},\n address = {Dublin, Ireland},\n author = {Bernard, David},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176392},\n\ + \ issn = {2220-4806},\n keywords = {Live electronic music, experimental instruments,\ + \ MIDI controllers, audio-visual synchronisation, copyright, SKINS digital hand\ + \ drum.},\n pages = {10--11},\n title = {Experimental Controllers for Live Electronic\ + \ Music Performance (vs. Copyright).},\n url = {http://www.nime.org/proceedings/2002/nime2002_010.pdf},\n\ + \ year = {2002}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - copyright: Creative Commons Attribution 4.0 International - doi: 10.21428/92fbeb44.61d01269 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176392 issn: 2220-4806 - month: jun - pdf: 49.pdf - presentation-video: https://youtu.be/MpMFL6R14kQ - title: 'Kanchay_Yupana/ /: Tangible rhythm sequencer inspired by ancestral Andean - technologies' - url: https://doi.org/10.21428/92fbeb44.61d01269 - year: 2022 + keywords: 'Live electronic music, experimental instruments, MIDI controllers, audio-visual + synchronisation, copyright, SKINS digital hand drum.' + pages: 10--11 + title: Experimental Controllers for Live Electronic Music Performance (vs. Copyright). + url: http://www.nime.org/proceedings/2002/nime2002_010.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Brandtsegg2018 - abstract: 'This paper explores working methods and instrument design for musical - performance sessions (studio and live) where cross-adaptive techniques for audio - processing are utilized. Cross-adaptive processing uses feature extraction methods - and digital processing to allow the actions of one acoustic instrument to influence - the timbre of another. Even though the physical interface for the musician is - the familiar acoustic instrument, the musical dimensions controlled with the actions - on the instrument have been expanded radically. For this reason, and when used - in live performance, the cross-adaptive methods constitute new interfaces for - musical expression. Not only do the musician control his or her own instrumental - expression, but the instrumental actions directly influence the timbre of another - instrument in the ensemble, while their own instrument''s sound is modified by - the actions of other musicians. In the present paper we illustrate and discuss - some design issues relating to the configuration and composition of such tools - for different musical situations. Such configurations include among other things - the mapping of modulators, the choice of applied effects and processing methods.' - address: 'Blacksburg, Virginia, USA' - author: Oeyvind Brandtsegg and Trond Engum and Bernt Isak Wærstad - bibtex: "@inproceedings{Brandtsegg2018,\n abstract = {This paper explores working\ - \ methods and instrument design for musical performance sessions (studio and live)\ - \ where cross-adaptive techniques for audio processing are utilized. Cross-adaptive\ - \ processing uses feature extraction methods and digital processing to allow the\ - \ actions of one acoustic instrument to influence the timbre of another. Even\ - \ though the physical interface for the musician is the familiar acoustic instrument,\ - \ the musical dimensions controlled with the actions on the instrument have been\ - \ expanded radically. For this reason, and when used in live performance, the\ - \ cross-adaptive methods constitute new interfaces for musical expression. Not\ - \ only do the musician control his or her own instrumental expression, but the\ - \ instrumental actions directly influence the timbre of another instrument in\ - \ the ensemble, while their own instrument's sound is modified by the actions\ - \ of other musicians. In the present paper we illustrate and discuss some design\ - \ issues relating to the configuration and composition of such tools for different\ - \ musical situations. Such configurations include among other things the mapping\ - \ of modulators, the choice of applied effects and processing methods.},\n address\ - \ = {Blacksburg, Virginia, USA},\n author = {Oeyvind Brandtsegg and Trond Engum\ - \ and Bernt Isak Wærstad},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302649},\n\ - \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {1--6},\n publisher = {Virginia\ - \ Tech},\n title = {Working methods and instrument design for cross-adaptive sessions},\n\ - \ url = {http://www.nime.org/proceedings/2018/nime2018_paper0001.pdf},\n year\ - \ = {2018}\n}\n" + ID: Blaine2002 + abstract: 'This paper discusses the Jam-O-Drum multi-player musical controller and + its adaptation into a gaming controller interface known as the Jam-O-Whirl. The + Jam-O-World project positioned these two controller devices in a dedicated projection + environment that enabled novice players to participate in immersive musical gaming + experiences. Players'' actions, detected via embedded sensors in an integrated + tabletop surface, control game play, real-time computer graphics and musical interaction. + Jam-O-World requires physical and social interaction as well as collaboration + among players. ' + address: 'Dublin, Ireland' + author: 'Blaine, Tina and Forlines, Clifton' + bibtex: "@inproceedings{Blaine2002,\n abstract = {This paper discusses the Jam-O-Drum\ + \ multi-player musical controller and its adaptation into a gaming controller\ + \ interface known as the Jam-O-Whirl. The Jam-O-World project positioned these\ + \ two controller devices in a dedicated projection environment that enabled novice\ + \ players to participate in immersive musical gaming experiences. Players' actions,\ + \ detected via embedded sensors in an integrated tabletop surface, control game\ + \ play, real-time computer graphics and musical interaction. Jam-O-World requires\ + \ physical and social interaction as well as collaboration among players. },\n\ + \ address = {Dublin, Ireland},\n author = {Blaine, Tina and Forlines, Clifton},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176394},\n\ + \ issn = {2220-4806},\n keywords = {Collaboration, computer graphics, embedded\ + \ sensors, gaming controller, immersive musical gaming experiences, musical controller,\ + \ multi-player, novice, social interaction.},\n pages = {12--17},\n title = {JAM-O-WORLD:\ + \ Evolution of the Jam-O-Drum Multi-player Musical Controller into the Jam-O-Whirl\ + \ Gaming Interface},\n url = {http://www.nime.org/proceedings/2002/nime2002_012.pdf},\n\ + \ year = {2002}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302649 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176394 issn: 2220-4806 - month: June - pages: 1--6 - publisher: Virginia Tech - title: Working methods and instrument design for cross-adaptive sessions - url: http://www.nime.org/proceedings/2018/nime2018_paper0001.pdf - year: 2018 + keywords: 'Collaboration, computer graphics, embedded sensors, gaming controller, + immersive musical gaming experiences, musical controller, multi-player, novice, + social interaction.' + pages: 12--17 + title: 'JAM-O-WORLD: Evolution of the Jam-O-Drum Multi-player Musical Controller + into the Jam-O-Whirl Gaming Interface' + url: http://www.nime.org/proceedings/2002/nime2002_012.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Egozy2018 - abstract: "*12* is chamber music work composed with the goal of letting audience\ - \ members have an engaging, individualized, and influential role in live music\ - \ performance using their mobile phones as custom tailored musical instruments.\ - \ The goals of direct music making, meaningful communication, intuitive interfaces,\ - \ and technical transparency led to a design that purposefully limits the number\ - \ of participating audience members, balances the tradeoffs between interface\ - \ simplicity and control, and prioritizes the role of a graphics and animation\ - \ display system that is both functional and aesthetically integrated. Survey\ - \ results from the audience and stage musicians show a successful and engaging\ - \ experience, and also illuminate the path towards future improvements." - address: 'Blacksburg, Virginia, USA' - author: Eran Egozy and Eun Young Lee - bibtex: "@inproceedings{Egozy2018,\n abstract = {*12* is chamber music work composed\ - \ with the goal of letting audience members have an engaging, individualized,\ - \ and influential role in live music performance using their mobile phones as\ - \ custom tailored musical instruments. The goals of direct music making, meaningful\ - \ communication, intuitive interfaces, and technical transparency led to a design\ - \ that purposefully limits the number of participating audience members, balances\ - \ the tradeoffs between interface simplicity and control, and prioritizes the\ - \ role of a graphics and animation display system that is both functional and\ - \ aesthetically integrated. Survey results from the audience and stage musicians\ - \ show a successful and engaging experience, and also illuminate the path towards\ - \ future improvements.},\n address = {Blacksburg, Virginia, USA},\n author = {Eran\ - \ Egozy and Eun Young Lee},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302655},\n\ - \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {7--12},\n publisher = {Virginia\ - \ Tech},\n title = {*12*: Mobile Phone-Based Audience Participation in a Chamber\ - \ Music Performance},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0002.pdf},\n\ - \ year = {2018}\n}\n" + ID: Bongers2002 + abstract: 'The Video-Organ is an instrument for the live performance of audio-visual + material. To design an interface we apply a modular approach, in an attempt to + split up the complex task of finding physical interfaces and mappings to control + sound and video as generated by the computer. Generally, most modules, or instrumentlets + as they are called, consist of a human interface element mapped to a certain effect. + To describe the instrumentlets a design space is used consisting of the parameters + degrees of freedom, range and precision. This paper is addressing the notion that + traditional approaches to composition are challenged and changed in this situation, + where the material is both audio and visual, and where the design and development + of an instrument becomes involved in the process of performing and composing.' + address: 'Dublin, Ireland' + author: 'Bongers, Bert and Harris, Yolande' + bibtex: "@inproceedings{Bongers2002,\n abstract = {The Video-Organ is an instrument\ + \ for the live performance of audio-visual material. To design an interface we\ + \ apply a modular approach, in an attempt to split up the complex task of finding\ + \ physical interfaces and mappings to control sound and video as generated by\ + \ the computer. Generally, most modules, or instrumentlets as they are called,\ + \ consist of a human interface element mapped to a certain effect. To describe\ + \ the instrumentlets a design space is used consisting of the parameters degrees\ + \ of freedom, range and precision. This paper is addressing the notion that traditional\ + \ approaches to composition are challenged and changed in this situation, where\ + \ the material is both audio and visual, and where the design and development\ + \ of an instrument becomes involved in the process of performing and composing.},\n\ + \ address = {Dublin, Ireland},\n author = {Bongers, Bert and Harris, Yolande},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176396},\n\ + \ issn = {2220-4806},\n pages = {18--23},\n title = {A Structured Instrument Design\ + \ Approach: The Video-Organ},\n url = {http://www.nime.org/proceedings/2002/nime2002_018.pdf},\n\ + \ year = {2002}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302655 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176396 issn: 2220-4806 - month: June - pages: 7--12 - publisher: Virginia Tech - title: "*12*: Mobile Phone-Based Audience Participation in a Chamber Music Performance" - url: http://www.nime.org/proceedings/2018/nime2018_paper0002.pdf - year: 2018 + pages: 18--23 + title: 'A Structured Instrument Design Approach: The Video-Organ' + url: http://www.nime.org/proceedings/2002/nime2002_018.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Lind2018 - abstract: 'The Max Maestro – an animated music notation system was developed to - enable the exploration of artistic possibilities for composition and performance - practices within the field of contemporary art music, more specifically, to enable - a large crowd of non-professional performers regardless of their musical background - to perform a fixed music compositions written in multiple individual parts. Furthermore, - the Max Maestro was developed to facilitate concert hall performances where non-professional - performers could be synchronised with an electronic music part. This paper presents - the background, the content and the artistic ideas with the Max Maestro system - and gives two examples of live concert hall performances where the Max Maestro - was used. An artistic research approach with an auto ethnographic method was adopted - for the study. This paper contributes with new knowledge to the field of animated - music notation.' - address: 'Blacksburg, Virginia, USA' - author: Anders Lind - bibtex: "@inproceedings{Lind2018,\n abstract = {The Max Maestro – an animated music\ - \ notation system was developed to enable the exploration of artistic possibilities\ - \ for composition and performance practices within the field of contemporary art\ - \ music, more specifically, to enable a large crowd of non-professional performers\ - \ regardless of their musical background to perform a fixed music compositions\ - \ written in multiple individual parts. Furthermore, the Max Maestro was developed\ - \ to facilitate concert hall performances where non-professional performers could\ - \ be synchronised with an electronic music part. This paper presents the background,\ - \ the content and the artistic ideas with the Max Maestro system and gives two\ - \ examples of live concert hall performances where the Max Maestro was used. An\ - \ artistic research approach with an auto ethnographic method was adopted for\ - \ the study. This paper contributes with new knowledge to the field of animated\ - \ music notation.},\n address = {Blacksburg, Virginia, USA},\n author = {Anders\ - \ Lind},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1302657},\n editor = {Luke\ - \ Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn =\ - \ {2220-4806},\n month = {June},\n pages = {13--18},\n publisher = {Virginia Tech},\n\ - \ title = {Animated Notation in Multiple Parts for Crowd of Non-professional Performers},\n\ - \ url = {http://www.nime.org/proceedings/2018/nime2018_paper0003.pdf},\n year\ - \ = {2018}\n}\n" + ID: Burtner2002 + abstract: 'Noisegate 67 was the first fully interactive composition written for + the Computer Metasaxophone, a new computer controller interface for electroacoustic + music. The Metasaxophone is an acoustic tenor saxophone retrofitted with an onboard + computer microprocessor and an array of sensors that convert performance data + into MIDI control messages. While maintaining full acoustic functionality the + Metasaxophone is a versatile MIDI controller. This paper discusses the compositionally + driven technical and aesthetic concerns that went into building the Metasaxophone, + and the resulting aesthetic implementations in Noisegate 67. By juxtaposing the + compositional approach to the saxophone before and after the electronic enhancements + an attempt is made to expose working paradigms of composition for metainstruments.' + address: 'Dublin, Ireland' + author: 'Burtner, Matthew' + bibtex: "@inproceedings{Burtner2002,\n abstract = {Noisegate 67 was the first fully\ + \ interactive composition written for the Computer Metasaxophone, a new computer\ + \ controller interface for electroacoustic music. The Metasaxophone is an acoustic\ + \ tenor saxophone retrofitted with an onboard computer microprocessor and an array\ + \ of sensors that convert performance data into MIDI control messages. While maintaining\ + \ full acoustic functionality the Metasaxophone is a versatile MIDI controller.\ + \ This paper discusses the compositionally driven technical and aesthetic concerns\ + \ that went into building the Metasaxophone, and the resulting aesthetic implementations\ + \ in Noisegate 67. By juxtaposing the compositional approach to the saxophone\ + \ before and after the electronic enhancements an attempt is made to expose working\ + \ paradigms of composition for metainstruments.},\n address = {Dublin, Ireland},\n\ + \ author = {Burtner, Matthew},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n date = {24-26 May, 2002},\n\ + \ doi = {10.5281/zenodo.1176398},\n issn = {2220-4806},\n pages = {24--29},\n\ + \ title = {Noisegate 67 for Metasaxophone: Composition and Performance Considerations\ + \ of a New Computer Music Controller},\n url = {http://www.nime.org/proceedings/2002/nime2002_024.pdf},\n\ + \ year = {2002}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302657 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176398 issn: 2220-4806 - month: June - pages: 13--18 - publisher: Virginia Tech - title: Animated Notation in Multiple Parts for Crowd of Non-professional Performers - url: http://www.nime.org/proceedings/2018/nime2018_paper0003.pdf - year: 2018 + pages: 24--29 + title: 'Noisegate 67 for Metasaxophone: Composition and Performance Considerations + of a New Computer Music Controller' + url: http://www.nime.org/proceedings/2002/nime2002_024.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Brown2018 - abstract: 'Musebots are autonomous musical agents that interact with other musebots - to produce music. Inaugurated in 2015, musebots are now an established practice - in the field of musical metacreation, which aims to automate aspects of creative - practice. Originally musebot development focused on software-only ensembles of - musical agents, coded by a community of developers. More recent experiments have - explored humans interfacing with musebot ensembles in various ways: including - through electronic interfaces in which parametric control of high-level musebot - parameters are used; message-based interfaces which allow human users to communicate - with musebots in their own language; and interfaces through which musebots have - jammed with human musicians. Here we report on the recent developments of human - interaction with musebot ensembles and reflect on some of the implications of - these developments for the design of metacreative music systems.' - address: 'Blacksburg, Virginia, USA' - author: Andrew R. Brown and Matthew Horrigan and Arne Eigenfeldt and Toby Gifford - and Daniel Field and Jon McCormack - bibtex: "@inproceedings{Brown2018,\n abstract = {Musebots are autonomous musical\ - \ agents that interact with other musebots to produce music. Inaugurated in 2015,\ - \ musebots are now an established practice in the field of musical metacreation,\ - \ which aims to automate aspects of creative practice. Originally musebot development\ - \ focused on software-only ensembles of musical agents, coded by a community of\ - \ developers. More recent experiments have explored humans interfacing with musebot\ - \ ensembles in various ways: including through electronic interfaces in which\ - \ parametric control of high-level musebot parameters are used; message-based\ - \ interfaces which allow human users to communicate with musebots in their own\ - \ language; and interfaces through which musebots have jammed with human musicians.\ - \ Here we report on the recent developments of human interaction with musebot\ - \ ensembles and reflect on some of the implications of these developments for\ - \ the design of metacreative music systems.},\n address = {Blacksburg, Virginia,\ - \ USA},\n author = {Andrew R. Brown and Matthew Horrigan and Arne Eigenfeldt and\ - \ Toby Gifford and Daniel Field and Jon McCormack},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1302659},\n editor = {Luke Dahl, Douglas Bowman, Thomas\ - \ Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n\ - \ pages = {19--24},\n publisher = {Virginia Tech},\n title = {Interacting with\ - \ Musebots},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0004.pdf},\n\ - \ year = {2018}\n}\n" + ID: Camurri2002 + abstract: 'This paper presents some our recent research on computational models + and algorithms for real-time analysis of full-body human movement. The focus here + is on techniques to extract in real-time expressive cues relevant to KANSEI and + emotional content in human expressive gesture, e.g., in dance and music performances. + Expressive gesture can contribute to new perspectives for the design of interactive + systems. The EyesWeb open software platform is a main concrete result from our + research work. EyesWeb is used in interactive applications, including music and + other artistic productions, museum interactive exhibits, therapy and rehabilitation, + based on the paradigm of expressive gesture. EyesWeb is freely available from + www.eyesweb.org.' + address: 'Dublin, Ireland' + author: 'Camurri, Antonio and Trocca, Riccardo and Volpe, Gualtiero' + bibtex: "@inproceedings{Camurri2002,\n abstract = {This paper presents some our\ + \ recent research on computational models and algorithms for real-time analysis\ + \ of full-body human movement. The focus here is on techniques to extract in real-time\ + \ expressive cues relevant to KANSEI and emotional content in human expressive\ + \ gesture, e.g., in dance and music performances. Expressive gesture can contribute\ + \ to new perspectives for the design of interactive systems. The EyesWeb open\ + \ software platform is a main concrete result from our research work. EyesWeb\ + \ is used in interactive applications, including music and other artistic productions,\ + \ museum interactive exhibits, therapy and rehabilitation, based on the paradigm\ + \ of expressive gesture. EyesWeb is freely available from www.eyesweb.org.},\n\ + \ address = {Dublin, Ireland},\n author = {Camurri, Antonio and Trocca, Riccardo\ + \ and Volpe, Gualtiero},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n date = {24-26 May, 2002},\n doi\ + \ = {10.5281/zenodo.1176400},\n issn = {2220-4806},\n pages = {30--37},\n title\ + \ = {Interactive Systems Design: A KANSEI-based Approach},\n url = {http://www.nime.org/proceedings/2002/nime2002_030.pdf},\n\ + \ year = {2002}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302659 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176400 issn: 2220-4806 - month: June - pages: 19--24 - publisher: Virginia Tech - title: Interacting with Musebots - url: http://www.nime.org/proceedings/2018/nime2018_paper0004.pdf - year: 2018 + pages: 30--37 + title: 'Interactive Systems Design: A KANSEI-based Approach' + url: http://www.nime.org/proceedings/2002/nime2002_030.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Kiefer2018 - abstract: 'We investigate how audio augmented reality can engender new collective - modes of musical expression in the context of a sound art installation, ''Listening - Mirrors'', exploring the creation of interactive sound environments for musicians - and non-musicians alike. ''Listening Mirrors'' is designed to incorporate physical - objects and computational systems for altering the acoustic environment, to enhance - collective listening and challenge traditional musician-instrument performance. - At a formative stage in exploring audio AR technology, we conducted an audience - experience study investigating questions around the potential of audio AR in creating - sound installation environments for collective musical expression. We collected - interview evidence about the participants'' experience and analysed the data with - using a grounded theory approach. The results demonstrated that the technology - has the potential to create immersive spaces where an audience can feel safe to - experiment musically, and showed how AR can intervene in sound perception to instrumentalise - an environment. The results also revealed caveats about the use of audio AR, - mainly centred on social inhibition and seamlessness of experience, and finding - a balance between mediated worlds so that there is space for interplay between - the two.' - address: 'Blacksburg, Virginia, USA' - author: Chris Kiefer and Cecile Chevalier - bibtex: "@inproceedings{Kiefer2018,\n abstract = {We investigate how audio augmented\ - \ reality can engender new collective modes of musical expression in the context\ - \ of a sound art installation, 'Listening Mirrors', exploring the creation of\ - \ interactive sound environments for musicians and non-musicians alike. 'Listening\ - \ Mirrors' is designed to incorporate physical objects and computational systems\ - \ for altering the acoustic environment, to enhance collective listening and challenge\ - \ traditional musician-instrument performance. At a formative stage in exploring\ - \ audio AR technology, we conducted an audience experience study investigating\ - \ questions around the potential of audio AR in creating sound installation environments\ - \ for collective musical expression. We collected interview evidence about the\ - \ participants' experience and analysed the data with using a grounded theory\ - \ approach. The results demonstrated that the technology has the potential to\ - \ create immersive spaces where an audience can feel safe to experiment musically,\ - \ and showed how AR can intervene in sound perception to instrumentalise an environment.\ - \ The results also revealed caveats about the use of audio AR, mainly centred\ - \ on social inhibition and seamlessness of experience, and finding a balance between\ - \ mediated worlds so that there is space for interplay between the two.},\n address\ - \ = {Blacksburg, Virginia, USA},\n author = {Chris Kiefer and Cecile Chevalier},\n\ + ID: Chadabe2002 + abstract: 'Mapping, which describes the way a performer''s controls are connected + to sound variables, is a useful concept when applied to the structure of electronic + instruments modelled after traditional acoustic instruments. But mapping is a + less useful concept when applied to the structure of complex and interactive instruments + in which algorithms generate control information. This paper relates the functioning + and benefits of different types of electronic instruments to the structural principles + on which they are based. Structural models of various instruments will be discussed + and musical examples played. ' + address: 'Dublin, Ireland' + author: 'Chadabe, Joel' + bibtex: "@inproceedings{Chadabe2002,\n abstract = {Mapping, which describes the\ + \ way a performer's controls are connected to sound variables, is a useful concept\ + \ when applied to the structure of electronic instruments modelled after traditional\ + \ acoustic instruments. But mapping is a less useful concept when applied to the\ + \ structure of complex and interactive instruments in which algorithms generate\ + \ control information. This paper relates the functioning and benefits of different\ + \ types of electronic instruments to the structural principles on which they are\ + \ based. Structural models of various instruments will be discussed and musical\ + \ examples played. },\n address = {Dublin, Ireland},\n author = {Chadabe, Joel},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1302661},\n editor = {Luke Dahl,\ - \ Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {25--28},\n publisher = {Virginia Tech},\n title =\ - \ {Towards New Modes of Collective Musical Expression through Audio Augmented\ - \ Reality},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0005.pdf},\n\ - \ year = {2018}\n}\n" + \ Musical Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176402},\n\ + \ issn = {2220-4806},\n keywords = {mapping fly-by-wire algorithmic network interactivity\ + \ instrument deterministic indeterministic},\n pages = {38--42},\n title = {The\ + \ Limitations of Mapping as a Structural Descriptive in Electronic Instruments},\n\ + \ url = {http://www.nime.org/proceedings/2002/nime2002_038.pdf},\n year = {2002}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302661 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176402 issn: 2220-4806 - month: June - pages: 25--28 - publisher: Virginia Tech - title: Towards New Modes of Collective Musical Expression through Audio Augmented - Reality - url: http://www.nime.org/proceedings/2018/nime2018_paper0005.pdf - year: 2018 + keywords: mapping fly-by-wire algorithmic network interactivity instrument deterministic + indeterministic + pages: 38--42 + title: The Limitations of Mapping as a Structural Descriptive in Electronic Instruments + url: http://www.nime.org/proceedings/2002/nime2002_038.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Matsuura2018 - abstract: 'Aphysical Unmodeling Instrument is the title of a sound installation - that re-physicalizes the Whirlwind meta-wind-instrument physical model. We re-implemented - the Whirlwind by using real-world physical objects to comprise a sound installation. - The sound propagation between a speaker and microphone was used as the delay, - and a paper cylinder was employed as the resonator. This paper explains the concept - and implementation of this work at the 2017 HANARART exhibition. We examine the - characteristics of the work, address its limitations, and discuss the possibility - of its interpretation by means of a “re-physicalization.”' - address: 'Blacksburg, Virginia, USA' - author: Tomoya Matsuura and kazuhiro jo - bibtex: "@inproceedings{Matsuura2018,\n abstract = {Aphysical Unmodeling Instrument\ - \ is the title of a sound installation that re-physicalizes the Whirlwind meta-wind-instrument\ - \ physical model. We re-implemented the Whirlwind by using real-world physical\ - \ objects to comprise a sound installation. The sound propagation between a speaker\ - \ and microphone was used as the delay, and a paper cylinder was employed as the\ - \ resonator. This paper explains the concept and implementation of this work at\ - \ the 2017 HANARART exhibition. We examine the characteristics of the work, address\ - \ its limitations, and discuss the possibility of its interpretation by means\ - \ of a “re-physicalization.”},\n address = {Blacksburg, Virginia, USA},\n author\ - \ = {Tomoya Matsuura and kazuhiro jo},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302663},\n\ - \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {29--30},\n publisher = {Virginia\ - \ Tech},\n title = {Aphysical Unmodeling Instrument: Sound Installation that Re-Physicalizes\ - \ a Meta-Wind-Instrument Physical Model, Whirlwind},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0006.pdf},\n\ - \ year = {2018}\n}\n" + ID: Couturier2002 + abstract: 'This paper describes a virtual musical instrument based on the scanned + synthesis technique and implemented in Max-Msp. The device is composed of a computer + and three gesture sensors. The timbre of the produced sound is rich and changing. + The instrument proposes an intuitive and expressive control of the sound thanks + to a complex mapping between gesture and sound. ' + address: 'Dublin, Ireland' + author: 'Couturier, Jean-Michel' + bibtex: "@inproceedings{Couturier2002,\n abstract = {This paper describes a virtual\ + \ musical instrument based on the scanned synthesis technique and implemented\ + \ in Max-Msp. The device is composed of a computer and three gesture sensors.\ + \ The timbre of the produced sound is rich and changing. The instrument proposes\ + \ an intuitive and expressive control of the sound thanks to a complex mapping\ + \ between gesture and sound. },\n address = {Dublin, Ireland},\n author = {Couturier,\ + \ Jean-Michel},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n date = {24-26 May, 2002},\n doi =\ + \ {10.5281/zenodo.1176404},\n issn = {2220-4806},\n keywords = {graphics tablet,\ + \ meta-parameters, multi-touch tactile surface, scanned synthesis},\n pages =\ + \ {43--45},\n title = {A Scanned Synthesis Virtual Instrument},\n url = {http://www.nime.org/proceedings/2002/nime2002_043.pdf},\n\ + \ year = {2002}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302663 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176404 issn: 2220-4806 - month: June - pages: 29--30 - publisher: Virginia Tech - title: 'Aphysical Unmodeling Instrument: Sound Installation that Re-Physicalizes - a Meta-Wind-Instrument Physical Model, Whirlwind' - url: http://www.nime.org/proceedings/2018/nime2018_paper0006.pdf - year: 2018 + keywords: 'graphics tablet, meta-parameters, multi-touch tactile surface, scanned + synthesis' + pages: 43--45 + title: A Scanned Synthesis Virtual Instrument + url: http://www.nime.org/proceedings/2002/nime2002_043.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Holbrook2018 - abstract: Many common and popular sound spatialisation techniques and methods rely - on listeners being positioned in a "sweet-spot" for an optimal listening position - in a circle of speakers. This paper discusses a stochastic spatialisation method - and its first iteration as implemented for the exhibition Hot Pocket at The Museum - of Contemporary Art in Oslo in 2017. This method is implemented in Max and offers - a matrix-based amplitude panning methodology which can provide a flexible means - for the spatialialisation of sounds. - address: 'Blacksburg, Virginia, USA' - author: Ulf A. S. Holbrook - bibtex: "@inproceedings{Holbrook2018,\n abstract = {Many common and popular sound\ - \ spatialisation techniques and methods rely on listeners being positioned in\ - \ a \"sweet-spot\" for an optimal listening position in a circle of speakers.\ - \ This paper discusses a stochastic spatialisation method and its first iteration\ - \ as implemented for the exhibition Hot Pocket at The Museum of Contemporary Art\ - \ in Oslo in 2017. This method is implemented in Max and offers a matrix-based\ - \ amplitude panning methodology which can provide a flexible means for the spatialialisation\ - \ of sounds.},\n address = {Blacksburg, Virginia, USA},\n author = {Ulf A. S.\ - \ Holbrook},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302665},\n editor\ - \ = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {31--32},\n publisher = {Virginia\ - \ Tech},\n title = {An approach to stochastic spatialization --- A case of Hot\ - \ Pocket},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0007.pdf},\n\ - \ year = {2018}\n}\n" + ID: DArcangelo2002 + abstract: 'This paper presents the approaches and expectations of a recently launched + course at New York University (NYU) in the design and development of musical controllers. + The framework for the course, which is also entitled "New Interfaces for Musical + Expression," is largely based on the proceedings of the first NIME workshop held + in Seattle, WA in April 2001.' + address: 'Dublin, Ireland' + author: 'D''Arcangelo, Gideon' + bibtex: "@inproceedings{DArcangelo2002,\n abstract = {This paper presents the approaches\ + \ and expectations of a recently launched course at New York University (NYU)\ + \ in the design and development of musical controllers. The framework for the\ + \ course, which is also entitled \"New Interfaces for Musical Expression,\" is\ + \ largely based on the proceedings of the first NIME workshop held in Seattle,\ + \ WA in April 2001.},\n address = {Dublin, Ireland},\n author = {D'Arcangelo,\ + \ Gideon},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176406},\n\ + \ issn = {2220-4806},\n keywords = {creative expression, input devices, musical\ + \ controllers},\n pages = {46--49},\n title = {Creating a Context for Musical\ + \ Innovation: A NIME Curriculum},\n url = {http://www.nime.org/proceedings/2002/nime2002_046.pdf},\n\ + \ year = {2002}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302665 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176406 issn: 2220-4806 - month: June - pages: 31--32 - publisher: Virginia Tech - title: An approach to stochastic spatialization --- A case of Hot Pocket - url: http://www.nime.org/proceedings/2018/nime2018_paper0007.pdf - year: 2018 + keywords: 'creative expression, input devices, musical controllers' + pages: 46--49 + title: 'Creating a Context for Musical Innovation: A NIME Curriculum' + url: http://www.nime.org/proceedings/2002/nime2002_046.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Champion2018 - abstract: 'AM MODE is a custom-designed software interface for electronic augmentation - of the acoustic drum set. The software is used in the development a series of - recordings, similarly titled as AM MODE. Programmed in Max/MSP, the software uses - live audio input from individual instruments within the drum set as control parameters - for modulation synthesis. By using a combination of microphones and MIDI triggers, - audio signal features such as the velocity of the strike of the drum, or the frequency - at which the drum resonates, are tracked, interpolated, and scaled to user specifications. - The resulting series of recordings is comprised of the digitally generated output - of the modulation engine, in addition to both raw and modulated signals from the - acoustic drum set. In this way, this project explores drum set augmentation not - only at the input and from a performative angle, but also at the output, where - the acoustic and the synthesized elements are merged into each other, forming - a sonic hybrid. ' - address: 'Blacksburg, Virginia, USA' - author: Cory Champion and Mo H Zareei - bibtex: "@inproceedings{Champion2018,\n abstract = {AM MODE is a custom-designed\ - \ software interface for electronic augmentation of the acoustic drum set. The\ - \ software is used in the development a series of recordings, similarly titled\ - \ as AM MODE. Programmed in Max/MSP, the software uses live audio input from individual\ - \ instruments within the drum set as control parameters for modulation synthesis.\ - \ By using a combination of microphones and MIDI triggers, audio signal features\ - \ such as the velocity of the strike of the drum, or the frequency at which the\ - \ drum resonates, are tracked, interpolated, and scaled to user specifications.\ - \ The resulting series of recordings is comprised of the digitally generated output\ - \ of the modulation engine, in addition to both raw and modulated signals from\ - \ the acoustic drum set. In this way, this project explores drum set augmentation\ - \ not only at the input and from a performative angle, but also at the output,\ - \ where the acoustic and the synthesized elements are merged into each other,\ - \ forming a sonic hybrid. },\n address = {Blacksburg, Virginia, USA},\n author\ - \ = {Cory Champion and Mo H Zareei},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302667},\n\ - \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {33--34},\n publisher = {Virginia\ - \ Tech},\n title = {AM MODE: Using AM and FM Synthesis for Acoustic Drum Set Augmentation},\n\ - \ url = {http://www.nime.org/proceedings/2018/nime2018_paper0008.pdf},\n year\ - \ = {2018}\n}\n" + ID: Fels2002 + abstract: 'In this paper we describe three new music controllers, each designed + to be played by two players. As the intimacy between two people increases so does + their ability to anticipate and predict the other''s actions. We hypothesize that + this intimacy between two people can be used as a basis for new controllers for + musical expression. Looking at ways people communicate non-verbally, we are developing + three new instruments based on different communication channels. The Tooka is + a hollow tube with a pressure sensor and buttons for each player. Players place + opposite ends in their mouths and modulate the pressure in the tube with their + tongues and lungs, controlling sound. Coordinated button presses control the music + as well. The Pushka, yet to be built, is a semirigid rod with strain gauges and + position sensors to track the rod''s position. Each player holds opposite ends + of the rod and manipulates it together. Bend, end point position, velocity and + acceleration and torque are mapped to musical parameters. The Pullka, yet to be + built, is simply a string attached at both ends with two bridges. Tension is measured + with strain gauges. Players manipulate the string tension at each end together + to modulate sound. We are looking at different musical mappings appropriate for + two players.' + address: 'Dublin, Ireland' + author: 'Fels, Sidney S. and Vogt, Florian' + bibtex: "@inproceedings{Fels2002,\n abstract = {In this paper we describe three\ + \ new music controllers, each designed to be played by two players. As the intimacy\ + \ between two people increases so does their ability to anticipate and predict\ + \ the other's actions. We hypothesize that this intimacy between two people can\ + \ be used as a basis for new controllers for musical expression. Looking at ways\ + \ people communicate non-verbally, we are developing three new instruments based\ + \ on different communication channels. The Tooka is a hollow tube with a pressure\ + \ sensor and buttons for each player. Players place opposite ends in their mouths\ + \ and modulate the pressure in the tube with their tongues and lungs, controlling\ + \ sound. Coordinated button presses control the music as well. The Pushka, yet\ + \ to be built, is a semirigid rod with strain gauges and position sensors to track\ + \ the rod's position. Each player holds opposite ends of the rod and manipulates\ + \ it together. Bend, end point position, velocity and acceleration and torque\ + \ are mapped to musical parameters. The Pullka, yet to be built, is simply a string\ + \ attached at both ends with two bridges. Tension is measured with strain gauges.\ + \ Players manipulate the string tension at each end together to modulate sound.\ + \ We are looking at different musical mappings appropriate for two players.},\n\ + \ address = {Dublin, Ireland},\n author = {Fels, Sidney S. and Vogt, Florian},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176408},\n\ + \ issn = {2220-4806},\n keywords = {Two person musical instruments, intimacy,\ + \ human-human communication, cooperative music, passive haptic interface},\n pages\ + \ = {50--55},\n title = {Tooka: Explorations of Two Person Instruments},\n url\ + \ = {http://www.nime.org/proceedings/2002/nime2002_050.pdf},\n year = {2002}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302667 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176408 issn: 2220-4806 - month: June - pages: 33--34 - publisher: Virginia Tech - title: 'AM MODE: Using AM and FM Synthesis for Acoustic Drum Set Augmentation' - url: http://www.nime.org/proceedings/2018/nime2018_paper0008.pdf - year: 2018 + keywords: 'Two person musical instruments, intimacy, human-human communication, + cooperative music, passive haptic interface' + pages: 50--55 + title: 'Tooka: Explorations of Two Person Instruments' + url: http://www.nime.org/proceedings/2002/nime2002_050.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Haddad2018 - abstract: 'This paper introduces the Kinesynth, a hybrid kinesthetic synthesizer - that uses the human body as both an analog mixer and as a modulator using a combination - of capacitive sensing in "transmit" mode and skin conductance. This is achieved - when the body, through the skin, relays signals from control & audio sources to - the inputs of the instrument. These signals can be harnessed from the environment, - from within the Kinesynth''s internal synthesizer, or from external instrument, - making the Kinesynth a mediator between the body and the environment.' - address: 'Blacksburg, Virginia, USA' - author: Don Derek Haddad and Joe Paradiso - bibtex: "@inproceedings{Haddad2018,\n abstract = {This paper introduces the Kinesynth,\ - \ a hybrid kinesthetic synthesizer that uses the human body as both an analog\ - \ mixer and as a modulator using a combination of capacitive sensing in \"transmit\"\ - \ mode and skin conductance. This is achieved when the body, through the skin,\ - \ relays signals from control & audio sources to the inputs of the instrument.\ - \ These signals can be harnessed from the environment, from within the Kinesynth's\ - \ internal synthesizer, or from external instrument, making the Kinesynth a mediator\ - \ between the body and the environment.},\n address = {Blacksburg, Virginia, USA},\n\ - \ author = {Don Derek Haddad and Joe Paradiso},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1302669},\n editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n\ - \ isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {35--36},\n publisher = {Virginia Tech},\n title = {Kinesynth: Patching, Modulating,\ - \ and Mixing a Hybrid Kinesthetic Synthesizer.},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0009.pdf},\n\ - \ year = {2018}\n}\n" + ID: Ferris2002 + abstract: 'The Cardboard Box Garden (CBG) originated from a dissatisfaction with + current computer technology as it is presented to children. This paper shall briefly + review the process involved in the creation of this installation, from motivation + through to design and subsequent implementation and user experience with the CBG. + Through the augmentation of an everyday artefact, namely the standard cardboard + box, a simple yet powerful interactive environment was created that has achieved + its goal of stirring childrens imagination judging from the experience of our + users. ' + address: 'Dublin, Ireland' + author: 'Ferris, Kieran and Bannon, Liam' + bibtex: "@inproceedings{Ferris2002,\n abstract = {The Cardboard Box Garden (CBG)\ + \ originated from a dissatisfaction with current computer technology as it is\ + \ presented to children. This paper shall briefly review the process involved\ + \ in the creation of this installation, from motivation through to design and\ + \ subsequent implementation and user experience with the CBG. Through the augmentation\ + \ of an everyday artefact, namely the standard cardboard box, a simple yet powerful\ + \ interactive environment was created that has achieved its goal of stirring childrens\ + \ imagination judging from the experience of our users. },\n address = {Dublin,\ + \ Ireland},\n author = {Ferris, Kieran and Bannon, Liam},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176410},\n issn = {2220-4806},\n\ + \ keywords = {Education, play, augmented reality, pervasive computing, disappearing\ + \ computer, assembly, cardboard box},\n pages = {56--58},\n title = {The Musical\ + \ Box Garden},\n url = {http://www.nime.org/proceedings/2002/nime2002_056.pdf},\n\ + \ year = {2002}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302669 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176410 issn: 2220-4806 - month: June - pages: 35--36 - publisher: Virginia Tech - title: 'Kinesynth: Patching, Modulating, and Mixing a Hybrid Kinesthetic Synthesizer.' - url: http://www.nime.org/proceedings/2018/nime2018_paper0009.pdf - year: 2018 + keywords: 'Education, play, augmented reality, pervasive computing, disappearing + computer, assembly, cardboard box' + pages: 56--58 + title: The Musical Box Garden + url: http://www.nime.org/proceedings/2002/nime2002_056.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Marogna2018 - abstract: 'CABOTO is an interactive system for live performance and composition. - A graphic score sketched on paper is read by a computer vision system. The graphic - elements are scanned following a symbolic-raw hybrid approach, that is, they are - recognised and classified according to their shapes but also scanned as waveforms - and optical signals. All this information is mapped into the synthesis engine, - which implements different kind of synthesis techniques for different shapes. - In CABOTO the score is viewed as a cartographic map explored by some navigators. - These navigators traverse the score in a semi-autonomous way, scanning the graphic - elements found along their paths. The system tries to challenge the boundaries - between the concepts of composition, score, performance, instrument, since the - musical result will depend both on the composed score and the way the navigators - will traverse it during the live performance. ' - address: 'Blacksburg, Virginia, USA' - author: Riccardo Marogna - bibtex: "@inproceedings{Marogna2018,\n abstract = {CABOTO is an interactive system\ - \ for live performance and composition. A graphic score sketched on paper is read\ - \ by a computer vision system. The graphic elements are scanned following a symbolic-raw\ - \ hybrid approach, that is, they are recognised and classified according to their\ - \ shapes but also scanned as waveforms and optical signals. All this information\ - \ is mapped into the synthesis engine, which implements different kind of synthesis\ - \ techniques for different shapes. In CABOTO the score is viewed as a cartographic\ - \ map explored by some navigators. These navigators traverse the score in a semi-autonomous\ - \ way, scanning the graphic elements found along their paths. The system tries\ - \ to challenge the boundaries between the concepts of composition, score, performance,\ - \ instrument, since the musical result will depend both on the composed score\ - \ and the way the navigators will traverse it during the live performance. },\n\ - \ address = {Blacksburg, Virginia, USA},\n author = {Riccardo Marogna},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1302671},\n editor = {Luke Dahl, Douglas\ - \ Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {37--42},\n publisher = {Virginia Tech},\n title =\ - \ {CABOTO: A Graphic-Based Interactive System for Composing and Performing Electronic\ - \ Music},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0010.pdf},\n\ - \ year = {2018}\n}\n" + ID: Flety2002 + abstract: 'Research and musical creation with gestural-oriented interfaces have + recently seen a renewal of interest and activity at Ircam [1][2]. In the course + of several musical projects, undertaken by young composers attending the one-year + Course in Composition and Computer Music or by guests artists, Ircam Education + and Creation departments have proposed various solutions for gesture-controlled + sound synthesis and processing. In this article, we describe the technical aspects + of AtoMIC Pro, an Analog to MIDI converter proposed as a re-usable solution for + digitizing several sensors in different contexts such as interactive sound installation + or virtual instruments.The main direction of our researches, and of this one in + particular, is to create tools that can be fully integrated into an artistic project + as a real part of the composition and performance processes.' + address: 'Dublin, Ireland' + author: 'Fléty, Emmanuel' + bibtex: "@inproceedings{Flety2002,\n abstract = {Research and musical creation with\ + \ gestural-oriented interfaces have recently seen a renewal of interest and activity\ + \ at Ircam [1][2]. In the course of several musical projects, undertaken by young\ + \ composers attending the one-year Course in Composition and Computer Music or\ + \ by guests artists, Ircam Education and Creation departments have proposed various\ + \ solutions for gesture-controlled sound synthesis and processing. In this article,\ + \ we describe the technical aspects of AtoMIC Pro, an Analog to MIDI converter\ + \ proposed as a re-usable solution for digitizing several sensors in different\ + \ contexts such as interactive sound installation or virtual instruments.The main\ + \ direction of our researches, and of this one in particular, is to create tools\ + \ that can be fully integrated into an artistic project as a real part of the\ + \ composition and performance processes.},\n address = {Dublin, Ireland},\n author\ + \ = {Fl\\'{e}ty, Emmanuel},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n date = {24-26 May, 2002},\n doi\ + \ = {10.5281/zenodo.1176412},\n issn = {2220-4806},\n keywords = {Gestural controller,\ + \ Sensor, MIDI, Music. Solution for Multi-sensor Acquisition},\n pages = {59--64},\n\ + \ title = {AtoMIC Pro: a Multiple Sensor Acquisition Device},\n url = {http://www.nime.org/proceedings/2002/nime2002_059.pdf},\n\ + \ year = {2002}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302671 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176412 issn: 2220-4806 - month: June - pages: 37--42 - publisher: Virginia Tech - title: 'CABOTO: A Graphic-Based Interactive System for Composing and Performing - Electronic Music' - url: http://www.nime.org/proceedings/2018/nime2018_paper0010.pdf - year: 2018 + keywords: 'Gestural controller, Sensor, MIDI, Music. Solution for Multi-sensor Acquisition' + pages: 59--64 + title: 'AtoMIC Pro: a Multiple Sensor Acquisition Device' + url: http://www.nime.org/proceedings/2002/nime2002_059.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Oliveira2018 - abstract: 'This paper describes the concept, design, and realization of two iterations - of a new controller called the XT Synth. The development of the instrument came - from the desire to maintain the expressivity and familiarity of string instruments, - while adding the flexibility and power usually found in keyboard controllers. - There are different examples of instruments that bring the physicality and expressiveness - of acoustic instruments into electronic music, from “Do it yourself” (DIY) products - to commercially available ones. This paper discusses the process and the challenges - faced when creating a DIY musical instrument and then subsequently transforming - the instrument into a product suitable for commercialization.' - address: 'Blacksburg, Virginia, USA' - author: 'Oliveira da Silveira, Gustavo' - bibtex: "@inproceedings{Oliveira2018,\n abstract = {This paper describes the concept,\ - \ design, and realization of two iterations of a new controller called the XT\ - \ Synth. The development of the instrument came from the desire to maintain the\ - \ expressivity and familiarity of string instruments, while adding the flexibility\ - \ and power usually found in keyboard controllers. There are different examples\ - \ of instruments that bring the physicality and expressiveness of acoustic instruments\ - \ into electronic music, from “Do it yourself” (DIY) products to commercially\ - \ available ones. This paper discusses the process and the challenges faced when\ - \ creating a DIY musical instrument and then subsequently transforming the instrument\ - \ into a product suitable for commercialization.},\n address = {Blacksburg, Virginia,\ - \ USA},\n author = {Oliveira da Silveira, Gustavo},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1302673},\n editor = {Luke Dahl, Douglas Bowman, Thomas\ - \ Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n\ - \ pages = {43--44},\n publisher = {Virginia Tech},\n title = {The XT Synth: A\ - \ New Controller for String Players},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0011.pdf},\n\ - \ year = {2018}\n}\n" + ID: Gadd2002 + abstract: 'We explore the role that metaphor plays in developing expressive devices + by examining the MetaMuse system. MetaMuse is a prop-based system that uses the + metaphor of rainfall to make the process of granular synthesis understandable. + We discuss MetaMuse within a framework we call ''''transparency'''' that can be + used as a predictor of the expressivity of musical devices. Metaphor depends on + a literature,or cultural basis, which forms the basis for making transparent device + mappings. In this context we evaluate the effect of metaphor in the MetaMuse system.' + address: 'Dublin, Ireland' + author: 'Gadd, Ashley and Fels, Sidney S.' + bibtex: "@inproceedings{Gadd2002,\n abstract = {We explore the role that metaphor\ + \ plays in developing expressive devices by examining the MetaMuse system. MetaMuse\ + \ is a prop-based system that uses the metaphor of rainfall to make the process\ + \ of granular synthesis understandable. We discuss MetaMuse within a framework\ + \ we call ''transparency'' that can be used as a predictor of the expressivity\ + \ of musical devices. Metaphor depends on a literature,or cultural basis, which\ + \ forms the basis for making transparent device mappings. In this context we evaluate\ + \ the effect of metaphor in the MetaMuse system.},\n address = {Dublin, Ireland},\n\ + \ author = {Gadd, Ashley and Fels, Sidney S.},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n date =\ + \ {24-26 May, 2002},\n doi = {10.5281/zenodo.1176414},\n issn = {2220-4806},\n\ + \ keywords = {Expressive interface, transparency, metaphor, prop-based controller,\ + \ granular synthesis.},\n pages = {65--70},\n title = {MetaMuse: Metaphors for\ + \ Expressive Instruments},\n url = {http://www.nime.org/proceedings/2002/nime2002_065.pdf},\n\ + \ year = {2002}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302673 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176414 issn: 2220-4806 - month: June - pages: 43--44 - publisher: Virginia Tech - title: 'The XT Synth: A New Controller for String Players' - url: http://www.nime.org/proceedings/2018/nime2018_paper0011.pdf - year: 2018 + keywords: 'Expressive interface, transparency, metaphor, prop-based controller, + granular synthesis.' + pages: 65--70 + title: 'MetaMuse: Metaphors for Expressive Instruments' + url: http://www.nime.org/proceedings/2002/nime2002_065.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Bin2018 - abstract: 'This paper presents a study examining the effects of disfluent design - on audience perception of digital musical instrument (DMI) performance. Disfluency, - defined as a barrier to effortless cognitive processing, has been shown to generate - better results in some contexts as it engages higher levels of cognition. We were - motivated to determine if disfluent design in a DMI would result in a risk state - that audiences would be able to perceive, and if this would have any effect on - their evaluation of the performance. A DMI was produced that incorporated a disfluent - characteristic: It would turn itself off if not constantly moved. Six physically - identical instruments were produced, each in one of three versions: Control (no - disfluent characteristics), mild disfluency (turned itself off slowly), and heightened - disfluency (turned itself off more quickly). 6 percussionists each performed on - one instrument for a live audience (N=31), and data was collected in the form - of real-time feedback (via a mobile phone app), and post-hoc surveys. Though there - was little difference in ratings of enjoyment between the versions of the instrument, - the real-time and qualitative data suggest that disfluent behaviour in a DMI may - be a way for audiences to perceive and appreciate performer skill.' - address: 'Blacksburg, Virginia, USA' - author: S. M. Astrid Bin and Nick Bryan-Kinns and Andrew P. McPherson - bibtex: "@inproceedings{Bin2018,\n abstract = {This paper presents a study examining\ - \ the effects of disfluent design on audience perception of digital musical instrument\ - \ (DMI) performance. Disfluency, defined as a barrier to effortless cognitive\ - \ processing, has been shown to generate better results in some contexts as it\ - \ engages higher levels of cognition. We were motivated to determine if disfluent\ - \ design in a DMI would result in a risk state that audiences would be able to\ - \ perceive, and if this would have any effect on their evaluation of the performance.\ - \ A DMI was produced that incorporated a disfluent characteristic: It would turn\ - \ itself off if not constantly moved. Six physically identical instruments were\ - \ produced, each in one of three versions: Control (no disfluent characteristics),\ - \ mild disfluency (turned itself off slowly), and heightened disfluency (turned\ - \ itself off more quickly). 6 percussionists each performed on one instrument\ - \ for a live audience (N=31), and data was collected in the form of real-time\ - \ feedback (via a mobile phone app), and post-hoc surveys. Though there was little\ - \ difference in ratings of enjoyment between the versions of the instrument, the\ - \ real-time and qualitative data suggest that disfluent behaviour in a DMI may\ - \ be a way for audiences to perceive and appreciate performer skill.},\n address\ - \ = {Blacksburg, Virginia, USA},\n author = {S. M. Astrid Bin and Nick Bryan-Kinns\ - \ and Andrew P. McPherson},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302675},\n\ - \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {45--50},\n publisher = {Virginia\ - \ Tech},\n title = {Risky business: Disfluency as a design strategy},\n url =\ - \ {http://www.nime.org/proceedings/2018/nime2018_paper0012.pdf},\n year = {2018}\n\ - }\n" + ID: Griffith2002 + abstract: 'The use of free gesture in making music has usually been confined to + instruments that use direct mappings between movement and sound space. Here we + demonstrate the use of categories of gesture as the basis of musical learning + and performance collaboration. These are used in a system that reinterprets the + approach to learning through performance that is found in many musical cultures + and discussed here through the example of Kpelle music. ' + address: 'Dublin, Ireland' + author: 'Griffith, Niall J. and O''Leary, Sean and O''Shea, Donagh and Hammond, + Ed and O''Modhrain, Sile' + bibtex: "@inproceedings{Griffith2002,\n abstract = {The use of free gesture in making\ + \ music has usually been confined to instruments that use direct mappings between\ + \ movement and sound space. Here we demonstrate the use of categories of gesture\ + \ as the basis of musical learning and performance collaboration. These are used\ + \ in a system that reinterprets the approach to learning through performance that\ + \ is found in many musical cultures and discussed here through the example of\ + \ Kpelle music. },\n address = {Dublin, Ireland},\n author = {Griffith, Niall\ + \ J. and O'Leary, Sean and O'Shea, Donagh and Hammond, Ed and O'Modhrain, Sile},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176416},\n\ + \ issn = {2220-4806},\n keywords = {Collaboration, Performance, Metaphor, Gesture},\n\ + \ pages = {71--72},\n title = {Circles and Seeds: Adapting Kpelle Ideas about\ + \ Music Performance for Collaborative Digital Music performance},\n url = {http://www.nime.org/proceedings/2002/nime2002_071.pdf},\n\ + \ year = {2002}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302675 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176416 issn: 2220-4806 - month: June - pages: 45--50 - publisher: Virginia Tech - title: 'Risky business: Disfluency as a design strategy' - url: http://www.nime.org/proceedings/2018/nime2018_paper0012.pdf - year: 2018 - - -- ENTRYTYPE: inproceedings - ID: Gibson2018 - abstract: 'The voice of the theremin is more than just a simple sine wave. Its unique - sound is made through two radio frequency oscillators that, when operating at - almost identical frequencies, gravitate towards each other. Ultimately, this pull - alters the sine wave, creating the signature sound of the theremin. The Theremin - Textural Expander (TTE) explores other textures the theremin can produce when - its sound is processed and manipulated through a Max/MSP patch and controlled - via a MIDI pedalboard. The TTE extends the theremin''s ability, enabling it to - produce five distinct new textures beyond the original. It also features a looping - system that the performer can use to layer textures created with the traditional - theremin sound. Ultimately, this interface introduces a new way to play and experience - the theremin; it extends its expressivity, affording a greater range of compositional - possibilities and greater flexibility in free improvisation contexts. ' - address: 'Blacksburg, Virginia, USA' - author: Rachel Gibson - bibtex: "@inproceedings{Gibson2018,\n abstract = {The voice of the theremin is more\ - \ than just a simple sine wave. Its unique sound is made through two radio frequency\ - \ oscillators that, when operating at almost identical frequencies, gravitate\ - \ towards each other. Ultimately, this pull alters the sine wave, creating the\ - \ signature sound of the theremin. The Theremin Textural Expander (TTE) explores\ - \ other textures the theremin can produce when its sound is processed and manipulated\ - \ through a Max/MSP patch and controlled via a MIDI pedalboard. The TTE extends\ - \ the theremin's ability, enabling it to produce five distinct new textures beyond\ - \ the original. It also features a looping system that the performer can use\ - \ to layer textures created with the traditional theremin sound. Ultimately, this\ - \ interface introduces a new way to play and experience the theremin; it extends\ - \ its expressivity, affording a greater range of compositional possibilities and\ - \ greater flexibility in free improvisation contexts. },\n address = {Blacksburg,\ - \ Virginia, USA},\n author = {Rachel Gibson},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1302527},\n editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n\ - \ isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {51--52},\n publisher = {Virginia Tech},\n title = {The Theremin Textural\ - \ Expander},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0013.pdf},\n\ - \ year = {2018}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1302527 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 - issn: 2220-4806 - month: June - pages: 51--52 - publisher: Virginia Tech - title: The Theremin Textural Expander - url: http://www.nime.org/proceedings/2018/nime2018_paper0013.pdf - year: 2018 + keywords: 'Collaboration, Performance, Metaphor, Gesture' + pages: 71--72 + title: 'Circles and Seeds: Adapting Kpelle Ideas about Music Performance for Collaborative + Digital Music performance' + url: http://www.nime.org/proceedings/2002/nime2002_071.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Toka2018 - abstract: 'This paper introduces Siren, a hybrid system for algorithmic composition - and live-coding performances. Its hierarchical structure allows small modifications - to propagate and aggregate on lower levels for dramatic changes in the musical - output. It uses functional programming language TidalCycles as the core pattern - creation environment due to its inherent ability to create complex pattern relations - with minimal syntax. Borrowing the best from TidalCycles, Siren augments the pattern - creation process by introducing various interface level features: a multi-channel - sequencer, local and global parameters, mathematical expressions, and pattern - history. It presents new opportunities for recording, refining, and reusing the - playback information with the pattern roll component. Subsequently, the paper - concludes with a preliminary evaluation of Siren in the context of user interface - design principles, which originates from the cognitive dimensions framework for - musical notation design.' - address: 'Blacksburg, Virginia, USA' - author: Mert Toka and Can Ince and Mehmet Aydin Baytas - bibtex: "@inproceedings{Toka2018,\n abstract = {This paper introduces Siren, a hybrid\ - \ system for algorithmic composition and live-coding performances. Its hierarchical\ - \ structure allows small modifications to propagate and aggregate on lower levels\ - \ for dramatic changes in the musical output. It uses functional programming language\ - \ TidalCycles as the core pattern creation environment due to its inherent ability\ - \ to create complex pattern relations with minimal syntax. Borrowing the best\ - \ from TidalCycles, Siren augments the pattern creation process by introducing\ - \ various interface level features: a multi-channel sequencer, local and global\ - \ parameters, mathematical expressions, and pattern history. It presents new opportunities\ - \ for recording, refining, and reusing the playback information with the pattern\ - \ roll component. Subsequently, the paper concludes with a preliminary evaluation\ - \ of Siren in the context of user interface design principles, which originates\ - \ from the cognitive dimensions framework for musical notation design.},\n address\ - \ = {Blacksburg, Virginia, USA},\n author = {Mert Toka and Can Ince and Mehmet\ - \ Aydin Baytas},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302677},\n\ - \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {53--58},\n publisher = {Virginia\ - \ Tech},\n title = {Siren: Interface for Pattern Languages},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0014.pdf},\n\ - \ year = {2018}\n}\n" + ID: Gunther2002 + abstract: 'This paper presents a novel coupling of haptics technology and music, + introducing the notion of tactile composition or aesthetic composition for the + sense of touch. A system that facilitates the composition and perception of intricate, + musically structured spatio-temporal patterns of vibration on the surface of the + body is described. An initial test of the system in a performance context is discussed. + The fundamental building blocks of a compositional language for touch are considered. ' + address: 'Dublin, Ireland' + author: 'Gunther, Eric and Davenport, Glorianna and O''Modhrain, Sile' + bibtex: "@inproceedings{Gunther2002,\n abstract = {This paper presents a novel coupling\ + \ of haptics technology and music, introducing the notion of tactile composition\ + \ or aesthetic composition for the sense of touch. A system that facilitates the\ + \ composition and perception of intricate, musically structured spatio-temporal\ + \ patterns of vibration on the surface of the body is described. An initial test\ + \ of the system in a performance context is discussed. The fundamental building\ + \ blocks of a compositional language for touch are considered. },\n address =\ + \ {Dublin, Ireland},\n author = {Gunther, Eric and Davenport, Glorianna and O'Modhrain,\ + \ Sile},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176418},\n\ + \ issn = {2220-4806},\n keywords = {multi-modal,music,tactile composition,vibrotactile},\n\ + \ pages = {73--79},\n title = {Cutaneous Grooves: Composing for the Sense of Touch},\n\ + \ url = {http://www.nime.org/proceedings/2002/nime2002_073.pdf},\n year = {2002}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302677 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176418 issn: 2220-4806 - month: June - pages: 53--58 - publisher: Virginia Tech - title: 'Siren: Interface for Pattern Languages' - url: http://www.nime.org/proceedings/2018/nime2018_paper0014.pdf - year: 2018 + keywords: 'multi-modal,music,tactile composition,vibrotactile' + pages: 73--79 + title: 'Cutaneous Grooves: Composing for the Sense of Touch' + url: http://www.nime.org/proceedings/2002/nime2002_073.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Salazar2018 - abstract: 'This paper documents an extensive and varied series of performances by - the authors over the past year using mobile technology, primarily iPad tablets - running the Auraglyph musical sketchpad software. These include both solo and - group performances, the latter under the auspices of the Mobile Ensemble of CalArts - (MECA), a group created to perform music with mobile technology devices. As a - whole, this diverse mobile technology-based performance practice leverages Auraglyph''s - versatility to explore a number of topical issues in electronic music performance, - including the use of physical and acoustical space, audience participation, and - interaction design of musical instruments.' - address: 'Blacksburg, Virginia, USA' - author: Spencer Salazar and Andrew Piepenbrink and Sarah Reid - bibtex: "@inproceedings{Salazar2018,\n abstract = {This paper documents an extensive\ - \ and varied series of performances by the authors over the past year using mobile\ - \ technology, primarily iPad tablets running the Auraglyph musical sketchpad software.\ - \ These include both solo and group performances, the latter under the auspices\ - \ of the Mobile Ensemble of CalArts (MECA), a group created to perform music with\ - \ mobile technology devices. As a whole, this diverse mobile technology-based\ - \ performance practice leverages Auraglyph's versatility to explore a number of\ - \ topical issues in electronic music performance, including the use of physical\ - \ and acoustical space, audience participation, and interaction design of musical\ - \ instruments.},\n address = {Blacksburg, Virginia, USA},\n author = {Spencer\ - \ Salazar and Andrew Piepenbrink and Sarah Reid},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1302679},\n editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n\ - \ isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {59--64},\n publisher = {Virginia Tech},\n title = {Developing a Performance\ - \ Practice for Mobile Music Technology},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0015.pdf},\n\ - \ year = {2018}\n}\n" + ID: Hankins2002 + abstract: 'The Circular Optical Object Locator is a collaborative and cooperative + music-making device. It uses an inexpensive digital video camera to observe a + rotating platter. Opaque objects placed on the platter are detected by the camera + during rotation. The locations of the objects passing under the camera are used + to generate music. ' + address: 'Dublin, Ireland' + author: 'Hankins, Tim and Merrill, David and Robert, Jocelyn' + bibtex: "@inproceedings{Hankins2002,\n abstract = {The Circular Optical Object Locator\ + \ is a collaborative and cooperative music-making device. It uses an inexpensive\ + \ digital video camera to observe a rotating platter. Opaque objects placed on\ + \ the platter are detected by the camera during rotation. The locations of the\ + \ objects passing under the camera are used to generate music. },\n address =\ + \ {Dublin, Ireland},\n author = {Hankins, Tim and Merrill, David and Robert, Jocelyn},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176420},\n\ + \ issn = {2220-4806},\n keywords = {Input devices, music controllers, collaborative,\ + \ real-time score manipulation.},\n pages = {80--81},\n title = {Circular Optical\ + \ Object Locator},\n url = {http://www.nime.org/proceedings/2002/nime2002_080.pdf},\n\ + \ year = {2002}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302679 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176420 issn: 2220-4806 - month: June - pages: 59--64 - publisher: Virginia Tech - title: Developing a Performance Practice for Mobile Music Technology - url: http://www.nime.org/proceedings/2018/nime2018_paper0015.pdf - year: 2018 + keywords: 'Input devices, music controllers, collaborative, real-time score manipulation.' + pages: 80--81 + title: Circular Optical Object Locator + url: http://www.nime.org/proceedings/2002/nime2002_080.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Momeni2018 - abstract: 'This paper provides an overview of the design, prototyping, deployment - and evaluation of a multi-agent interactive sound instrument named MOM (Mobile - Object for Music). MOM combines a real-time signal processing engine implemented - with Pure Data on an embedded Linux platform, with gestural interaction implemented - via a variety of analog and digital sensors. Power, sound-input and sound-diffusion - subsystems make the instrument autonomous and mobile. This instrument was designed - in coordination with the development of an evening-length dance/music performance - in which the performing musician is engaged in choreographed movements with the - mobile instruments. The design methodology relied on a participatory process - that engaged an interdisciplinary team made up of technologists, musicians, composers, - choreographers, and dancers. The prototyping process relied on a mix of in-house - and out-sourced digital fabrication processes intended to make the open source - hardware and software design of the system accessible and affordable for other - creators. ' - address: 'Blacksburg, Virginia, USA' - author: Ali Momeni and Daniel McNamara and Jesse Stiles - bibtex: "@inproceedings{Momeni2018,\n abstract = {This paper provides an overview\ - \ of the design, prototyping, deployment and evaluation of a multi-agent interactive\ - \ sound instrument named MOM (Mobile Object for Music). MOM combines a real-time\ - \ signal processing engine implemented with Pure Data on an embedded Linux platform,\ - \ with gestural interaction implemented via a variety of analog and digital sensors.\ - \ Power, sound-input and sound-diffusion subsystems make the instrument autonomous\ - \ and mobile. This instrument was designed in coordination with the development\ - \ of an evening-length dance/music performance in which the performing musician\ - \ is engaged in choreographed movements with the mobile instruments. The design\ - \ methodology relied on a participatory process that engaged an interdisciplinary\ - \ team made up of technologists, musicians, composers, choreographers, and dancers.\ - \ The prototyping process relied on a mix of in-house and out-sourced digital\ - \ fabrication processes intended to make the open source hardware and software\ - \ design of the system accessible and affordable for other creators. },\n address\ - \ = {Blacksburg, Virginia, USA},\n author = {Ali Momeni and Daniel McNamara and\ - \ Jesse Stiles},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302681},\n\ - \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {65--71},\n publisher = {Virginia\ - \ Tech},\n title = {MOM: an Extensible Platform for Rapid Prototyping and Design\ - \ of Electroacoustic Instruments},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0016.pdf},\n\ - \ year = {2018}\n}\n" + ID: Hasan2002 + abstract: 'We have created a new electronic musical instrument, referred to as the + Termenova (Russian for "daughter of Theremin") that combines a free-gesture capacitive + sensing device with an optical sensing system that detects the reflection of a + hand when it intersects a beam of an array of red lasers. The laser beams, which + are made visible by a thin layer of theatrical mist, provide visual feedback and + guidance to the performer to alleviate the difficulties of using a non-contact + interface as well as adding an interesting component for the audience to observe. + The system uses capacitive sensing to detect the proximity of the player''s hands; + this distance is mapped to pitch, volume, or other continuous effect. The laser + guide positions are calibrated before play with position controlled servo motors + interfaced to a main controller board; the location of each beam corresponds to + the position where the performer should move his or her hand to achieve a pre-specified + pitch and/or effect. The optical system senses the distance of the player''s hands + from the source of each laser beam, providing an additional dimension of musical + control. ' + address: 'Dublin, Ireland' + author: 'Hasan, Leila and Yu, Nicholas and Paradiso, Joseph A.' + bibtex: "@inproceedings{Hasan2002,\n abstract = {We have created a new electronic\ + \ musical instrument, referred to as the Termenova (Russian for \"daughter of\ + \ Theremin\") that combines a free-gesture capacitive sensing device with an optical\ + \ sensing system that detects the reflection of a hand when it intersects a beam\ + \ of an array of red lasers. The laser beams, which are made visible by a thin\ + \ layer of theatrical mist, provide visual feedback and guidance to the performer\ + \ to alleviate the difficulties of using a non-contact interface as well as adding\ + \ an interesting component for the audience to observe. The system uses capacitive\ + \ sensing to detect the proximity of the player's hands; this distance is mapped\ + \ to pitch, volume, or other continuous effect. The laser guide positions are\ + \ calibrated before play with position controlled servo motors interfaced to a\ + \ main controller board; the location of each beam corresponds to the position\ + \ where the performer should move his or her hand to achieve a pre-specified pitch\ + \ and/or effect. The optical system senses the distance of the player's hands\ + \ from the source of each laser beam, providing an additional dimension of musical\ + \ control. },\n address = {Dublin, Ireland},\n author = {Hasan, Leila and Yu,\ + \ Nicholas and Paradiso, Joseph A.},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n date = {24-26 May, 2002},\n\ + \ doi = {10.5281/zenodo.1176422},\n issn = {2220-4806},\n keywords = {Theremin,\ + \ gesture interface, capacitive sensing, laser harp, optical proximity sensing,\ + \ servo control, musical controller},\n pages = {82--87},\n title = {The Termenova\ + \ : A Hybrid Free-Gesture Interface},\n url = {http://www.nime.org/proceedings/2002/nime2002_082.pdf},\n\ + \ year = {2002}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302681 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176422 issn: 2220-4806 - month: June - pages: 65--71 - publisher: Virginia Tech - title: 'MOM: an Extensible Platform for Rapid Prototyping and Design of Electroacoustic - Instruments' - url: http://www.nime.org/proceedings/2018/nime2018_paper0016.pdf - year: 2018 + keywords: 'Theremin, gesture interface, capacitive sensing, laser harp, optical + proximity sensing, servo control, musical controller' + pages: 82--87 + title: 'The Termenova : A Hybrid Free-Gesture Interface' + url: http://www.nime.org/proceedings/2002/nime2002_082.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Robertson2018 - abstract: 'The Harmonic Wand is a transducer-based instrument that combines physical - excitation, synthesis, and gestural control. Our objective was to design a device - that affords exploratory modes of interaction with the performer''s surroundings, - as well as precise control over microtonal pitch content and other concomitant - parameters. The instrument is comprised of a hand-held wand, containing two piezo-electric - transducers affixed to a pair of metal probes. The performer uses the wand to - physically excite surfaces in the environment and capture resultant signals. Input - materials are then processed using a novel application of Karplus-Strong synthesis, - in which these impulses are imbued with discrete resonances. We achieved gestural - control over synthesis parameters using a secondary tactile interface, consisting - of four force-sensitive resistors (FSR), a fader, and momentary switch. As a - unique feature of our instrument, we modeled pitch organization and associated - parametric controls according to theoretical principles outlined in Harry Partch''s - “monophonic fabric” of Just Intonation—specifically his conception of odentities, - udentities, and a variable numerary nexus. This system classifies pitch content - based upon intervallic structures found in both the overtone and undertone series. Our - paper details the procedural challenges in designing the Harmonic Wand.' - address: 'Blacksburg, Virginia, USA' - author: Ben Luca Robertson and Luke Dahl - bibtex: "@inproceedings{Robertson2018,\n abstract = {The Harmonic Wand is a transducer-based\ - \ instrument that combines physical excitation, synthesis, and gestural control.\ - \ Our objective was to design a device that affords exploratory modes of interaction\ - \ with the performer's surroundings, as well as precise control over microtonal\ - \ pitch content and other concomitant parameters. The instrument is comprised\ - \ of a hand-held wand, containing two piezo-electric transducers affixed to a\ - \ pair of metal probes. The performer uses the wand to physically excite surfaces\ - \ in the environment and capture resultant signals. Input materials are then\ - \ processed using a novel application of Karplus-Strong synthesis, in which these\ - \ impulses are imbued with discrete resonances. We achieved gestural control\ - \ over synthesis parameters using a secondary tactile interface, consisting of\ - \ four force-sensitive resistors (FSR), a fader, and momentary switch. As a unique\ - \ feature of our instrument, we modeled pitch organization and associated parametric\ - \ controls according to theoretical principles outlined in Harry Partch's “monophonic\ - \ fabric” of Just Intonation—specifically his conception of odentities, udentities,\ - \ and a variable numerary nexus. This system classifies pitch content based upon\ - \ intervallic structures found in both the overtone and undertone series. Our\ - \ paper details the procedural challenges in designing the Harmonic Wand.},\n\ - \ address = {Blacksburg, Virginia, USA},\n author = {Ben Luca Robertson and Luke\ - \ Dahl},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1302683},\n editor = {Luke\ - \ Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn =\ - \ {2220-4806},\n month = {June},\n pages = {72--77},\n publisher = {Virginia Tech},\n\ - \ title = {Harmonic Wand: An Instrument for Microtonal Control and Gestural Excitation},\n\ - \ url = {http://www.nime.org/proceedings/2018/nime2018_paper0017.pdf},\n year\ - \ = {2018}\n}\n" + ID: Hunt2002 + abstract: 'In this paper we challenge the assumption that an electronic instrument + consists solely of an interface and a sound generator. We emphasise the importance + of the mapping between input parameters and system parameters, and claim that + this can define the very essence of an instrument.' + address: 'Dublin, Ireland' + author: 'Hunt, Andy D. and Wanderley, Marcelo M. and Paradis, Matthew' + bibtex: "@inproceedings{Hunt2002,\n abstract = {In this paper we challenge the assumption\ + \ that an electronic instrument consists solely of an interface and a sound generator.\ + \ We emphasise the importance of the mapping between input parameters and system\ + \ parameters, and claim that this can define the very essence of an instrument.},\n\ + \ address = {Dublin, Ireland},\n author = {Hunt, Andy D. and Wanderley, Marcelo\ + \ M. and Paradis, Matthew},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n date = {24-26 May, 2002},\n doi\ + \ = {10.5281/zenodo.1176424},\n issn = {2220-4806},\n keywords = {electronic musical\ + \ instruments,human-computer interaction,mapping strategies},\n pages = {88--93},\n\ + \ title = {The importance of Parameter Mapping in Electronic Instrument Design},\n\ + \ url = {http://www.nime.org/proceedings/2002/nime2002_088.pdf},\n year = {2002}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302683 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176424 issn: 2220-4806 - month: June - pages: 72--77 - publisher: Virginia Tech - title: 'Harmonic Wand: An Instrument for Microtonal Control and Gestural Excitation' - url: http://www.nime.org/proceedings/2018/nime2018_paper0017.pdf - year: 2018 + keywords: 'electronic musical instruments,human-computer interaction,mapping strategies' + pages: 88--93 + title: The importance of Parameter Mapping in Electronic Instrument Design + url: http://www.nime.org/proceedings/2002/nime2002_088.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Macionis2018 - abstract: 'Sansa is an extended sansula, a hyper-instrument that is similar in design - and functionality to a kalimba or thumb piano. At the heart of this interface - is a series of sensors that are used to augment the tone and expand the performance - capabilities of the instrument. The sensor data is further exploited using the - machine learning program Wekinator, which gives users the ability to interact - and perform with the instrument using several different modes of operation. In - this way, Sansa is capable of both solo acoustic performances as well as complex - productions that require interactions between multiple technological mediums. - Sansa expands the current community of hyper-instruments by demonstrating the - ways that hardware and software can extend an acoustic instrument''s functionality - and playability in a live performance or studio setting.' - address: 'Blacksburg, Virginia, USA' - author: McLean J Macionis and Ajay Kapur - bibtex: "@inproceedings{Macionis2018,\n abstract = {Sansa is an extended sansula,\ - \ a hyper-instrument that is similar in design and functionality to a kalimba\ - \ or thumb piano. At the heart of this interface is a series of sensors that are\ - \ used to augment the tone and expand the performance capabilities of the instrument.\ - \ The sensor data is further exploited using the machine learning program Wekinator,\ - \ which gives users the ability to interact and perform with the instrument using\ - \ several different modes of operation. In this way, Sansa is capable of both\ - \ solo acoustic performances as well as complex productions that require interactions\ - \ between multiple technological mediums. Sansa expands the current community\ - \ of hyper-instruments by demonstrating the ways that hardware and software can\ - \ extend an acoustic instrument's functionality and playability in a live performance\ - \ or studio setting.},\n address = {Blacksburg, Virginia, USA},\n author = {McLean\ - \ J Macionis and Ajay Kapur},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302685},\n\ - \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {78--81},\n publisher = {Virginia\ - \ Tech},\n title = {Sansa: A Modified Sansula for Extended Compositional Techniques\ - \ Using Machine Learning},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0018.pdf},\n\ - \ year = {2018}\n}\n" + ID: Huott2002 + abstract: 'This paper is a design report on a prototype musical controller based + on fiberoptic sensing pads from Tactex Controls [8]. It will discuss elements + of form factor, technical design, and tuning/sound generation systems tested while + building the device I have dubbed ''the Ski''. The goal is the creation of a fine + musical instrument with which a skilled performer can play music from standard + repertoire as well as break sonic ground in modern forms.' + address: 'Dublin, Ireland' + author: 'Huott, Robert' + bibtex: "@inproceedings{Huott2002,\n abstract = {This paper is a design report on\ + \ a prototype musical controller based on fiberoptic sensing pads from Tactex\ + \ Controls [8]. It will discuss elements of form factor, technical design, and\ + \ tuning/sound generation systems tested while building the device I have dubbed\ + \ 'the Ski'. The goal is the creation of a fine musical instrument with which\ + \ a skilled performer can play music from standard repertoire as well as break\ + \ sonic ground in modern forms.},\n address = {Dublin, Ireland},\n author = {Huott,\ + \ Robert},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176428},\n\ + \ issn = {2220-4806},\n keywords = {musical controller, Tactex, tactile interface,\ + \ tuning systems},\n pages = {94--98},\n title = {An Interface for Precise Musical\ + \ Control},\n url = {http://www.nime.org/proceedings/2002/nime2002_094.pdf},\n\ + \ year = {2002}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302685 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176428 issn: 2220-4806 - month: June - pages: 78--81 - publisher: Virginia Tech - title: 'Sansa: A Modified Sansula for Extended Compositional Techniques Using Machine - Learning' - url: http://www.nime.org/proceedings/2018/nime2018_paper0018.pdf - year: 2018 + keywords: 'musical controller, Tactex, tactile interface, tuning systems' + pages: 94--98 + title: An Interface for Precise Musical Control + url: http://www.nime.org/proceedings/2002/nime2002_094.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Turchet2018 - abstract: 'This demo will showcase technologically mediated interactions between - a performer playing a smart musical instrument (SMIs) and audience members using - Musical Haptic Wearables (MHWs). Smart Instruments are a family of musical instruments - characterized by embedded computational intelligence, wireless connectivity, an - embedded sound delivery system, and an onboard system for feedback to the player. - They offer direct point-to-point communication between each other and other portable - sensor-enabled devices connected to local networks and to the Internet. MHWs are - wearable devices for audience members, which encompass haptic stimulation, gesture - tracking, and wireless connectivity features. This demo will present an architecture - enabling the multidirectional creative communication between a performer playing - a Smart Mandolin and audience members using armband-based MHWs.' - address: 'Blacksburg, Virginia, USA' - author: Luca Turchet and Mathieu Barthet - bibtex: "@inproceedings{Turchet2018,\n abstract = {This demo will showcase technologically\ - \ mediated interactions between a performer playing a smart musical instrument\ - \ (SMIs) and audience members using Musical Haptic Wearables (MHWs). Smart Instruments\ - \ are a family of musical instruments characterized by embedded computational\ - \ intelligence, wireless connectivity, an embedded sound delivery system, and\ - \ an onboard system for feedback to the player. They offer direct point-to-point\ - \ communication between each other and other portable sensor-enabled devices connected\ - \ to local networks and to the Internet. MHWs are wearable devices for audience\ - \ members, which encompass haptic stimulation, gesture tracking, and wireless\ - \ connectivity features. This demo will present an architecture enabling the multidirectional\ - \ creative communication between a performer playing a Smart Mandolin and audience\ - \ members using armband-based MHWs.},\n address = {Blacksburg, Virginia, USA},\n\ - \ author = {Luca Turchet and Mathieu Barthet},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1302687},\n editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n\ - \ isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {82--83},\n publisher = {Virginia Tech},\n title = {Demo of interactions between\ - \ a performer playing a Smart Mandolin and audience members using Musical Haptic\ - \ Wearables},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0019.pdf},\n\ - \ year = {2018}\n}\n" + ID: Magnusson2002 + abstract: 'We are interested in exhibiting our programs at your demo section at + the conference. We believe that the subject of your conference is precisely what + we are experimenting with in our musical software. ' + address: 'Dublin, Ireland' + author: 'Magnusson, Thor' + bibtex: "@inproceedings{Magnusson2002,\n abstract = {We are interested in exhibiting\ + \ our programs at your demo section at the conference. We believe that the subject\ + \ of your conference is precisely what we are experimenting with in our musical\ + \ software. },\n address = {Dublin, Ireland},\n author = {Magnusson, Thor},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176384},\n\ + \ issn = {2220-4806},\n keywords = {Further info on our website http//www.ixi-software.net.},\n\ + \ pages = {101--101},\n title = {IXI software},\n url = {http://www.nime.org/proceedings/2002/nime2002_101.pdf},\n\ + \ year = {2002}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302687 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176384 issn: 2220-4806 - month: June - pages: 82--83 - publisher: Virginia Tech - title: Demo of interactions between a performer playing a Smart Mandolin and audience - members using Musical Haptic Wearables - url: http://www.nime.org/proceedings/2018/nime2018_paper0019.pdf - year: 2018 + keywords: Further info on our website http//www.ixi-software.net. + pages: 101--101 + title: IXI software + url: http://www.nime.org/proceedings/2002/nime2002_101.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Kemper2018 - abstract: 'Robotic instrument designers tend to focus on the number of sound control - parameters and their resolution when trying to develop expressivity in their instruments. - These parameters afford greater sonic nuance related to elements of music that - are traditionally associated with expressive human performances including articulation, - timbre, dynamics, and phrasing. Equating the capacity for sonic nuance and musical - expression stems from the “transitive” perspective that musical expression is - an act of emotional communication from performer to listener. However, this perspective - is problematic in the case of robotic instruments since we do not typically consider - machines to be capable of expressing emotion. Contemporary theories of musical - expression focus on an “intransitive” perspective, where musical meaning is generated - as an embodied experience. Understanding expressivity from this perspective allows - listeners to interpret performances by robotic instruments as possessing their - own expressive meaning, even though the performer is a machine. It also enables - musicians working with robotic instruments to develop their own unique vocabulary - of expressive gestures unique to mechanical instruments. This paper explores these - issues of musical expression, introducing the concept of mechatronic expression - as a compositional and design strategy that highlights the musical and performative - capabilities unique to robotic instruments.' - address: 'Blacksburg, Virginia, USA' - author: Steven Kemper and Scott Barton - bibtex: "@inproceedings{Kemper2018,\n abstract = {Robotic instrument designers tend\ - \ to focus on the number of sound control parameters and their resolution when\ - \ trying to develop expressivity in their instruments. These parameters afford\ - \ greater sonic nuance related to elements of music that are traditionally associated\ - \ with expressive human performances including articulation, timbre, dynamics,\ - \ and phrasing. Equating the capacity for sonic nuance and musical expression\ - \ stems from the “transitive” perspective that musical expression is an act of\ - \ emotional communication from performer to listener. However, this perspective\ - \ is problematic in the case of robotic instruments since we do not typically\ - \ consider machines to be capable of expressing emotion. Contemporary theories\ - \ of musical expression focus on an “intransitive” perspective, where musical\ - \ meaning is generated as an embodied experience. Understanding expressivity from\ - \ this perspective allows listeners to interpret performances by robotic instruments\ - \ as possessing their own expressive meaning, even though the performer is a machine.\ - \ It also enables musicians working with robotic instruments to develop their\ - \ own unique vocabulary of expressive gestures unique to mechanical instruments.\ - \ This paper explores these issues of musical expression, introducing the concept\ - \ of mechatronic expression as a compositional and design strategy that highlights\ - \ the musical and performative capabilities unique to robotic instruments.},\n\ - \ address = {Blacksburg, Virginia, USA},\n author = {Steven Kemper and Scott Barton},\n\ + ID: Jorda2002 + abstract: 'In this paper we present Afasia, an interactive multimedia performance + based in Homer''s Odyssey [2]. Afasia is a one-man digital theater play in which + a lone performer fitted with a sensor-suit conducts, like Homer, the whole show + by himself, controlling 2D animations, DVD video and conducting the music mechanically + performed by a robot quartet. After contextualizing the piece, all of its technical + elements, starting with the hardware input and output components, are described. + A special emphasis is given to the interactivity strategies and the subsequent + software design. Since its first version premiered in Barcelona in 1998, Afasia + has been performed in many European and American countries and has received several + international awards. ' + address: 'Dublin, Ireland' + author: 'Jordà, Sergi' + bibtex: "@inproceedings{Jorda2002,\n abstract = {In this paper we present Afasia,\ + \ an interactive multimedia performance based in Homer's Odyssey [2]. Afasia is\ + \ a one-man digital theater play in which a lone performer fitted with a sensor-suit\ + \ conducts, like Homer, the whole show by himself, controlling 2D animations,\ + \ DVD video and conducting the music mechanically performed by a robot quartet.\ + \ After contextualizing the piece, all of its technical elements, starting with\ + \ the hardware input and output components, are described. A special emphasis\ + \ is given to the interactivity strategies and the subsequent software design.\ + \ Since its first version premiered in Barcelona in 1998, Afasia has been performed\ + \ in many European and American countries and has received several international\ + \ awards. },\n address = {Dublin, Ireland},\n author = {Jord\\`{a}, Sergi},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1302689},\n editor = {Luke Dahl,\ - \ Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {84--87},\n publisher = {Virginia Tech},\n title =\ - \ {Mechatronic Expression: Reconsidering Expressivity in Music for Robotic Instruments\ - \ },\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0020.pdf},\n\ - \ year = {2018}\n}\n" + \ Musical Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176432},\n\ + \ issn = {2220-4806},\n keywords = {Multimedia interaction, musical robots, real-time\ + \ musical systems.},\n pages = {102--107},\n title = {Afasia: the Ultimate Homeric\ + \ One-man-multimedia-band},\n url = {http://www.nime.org/proceedings/2002/nime2002_102.pdf},\n\ + \ year = {2002}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302689 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176432 issn: 2220-4806 - month: June - pages: 84--87 - publisher: Virginia Tech - title: 'Mechatronic Expression: Reconsidering Expressivity in Music for Robotic - Instruments ' - url: http://www.nime.org/proceedings/2018/nime2018_paper0020.pdf - year: 2018 + keywords: 'Multimedia interaction, musical robots, real-time musical systems.' + pages: 102--107 + title: 'Afasia: the Ultimate Homeric One-man-multimedia-band' + url: http://www.nime.org/proceedings/2002/nime2002_102.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Brownb2018 - abstract: 'Musical participation has brought individuals together in on-going communities - throughout human history, aiding in the kinds of social integration essential - for wellbeing. The design of Digital Musical Instruments (DMIs), however, has - generally been driven by idiosyncratic artistic concerns, Western art music and - dance traditions of expert performance, and short-lived interactive art installations - engaging a broader public of musical novices. These DMIs rarely engage with the - problems of on-going use in musical communities with existing performance idioms, - repertoire, and social codes with participants representing the full learning - curve of musical skill, such as social dance. Our project, Interactive Tango Milonga, - an interactive Argentine tango dance system for social dance addresses these challenges - in order to innovate connection, the feeling of intense relation between dance - partners, music, and the larger tango community. ' - address: 'Blacksburg, Virginia, USA' - author: Courtney Brown - bibtex: "@inproceedings{Brownb2018,\n abstract = {Musical participation has brought\ - \ individuals together in on-going communities throughout human history, aiding\ - \ in the kinds of social integration essential for wellbeing. The design of Digital\ - \ Musical Instruments (DMIs), however, has generally been driven by idiosyncratic\ - \ artistic concerns, Western art music and dance traditions of expert performance,\ - \ and short-lived interactive art installations engaging a broader public of musical\ - \ novices. These DMIs rarely engage with the problems of on-going use in musical\ - \ communities with existing performance idioms, repertoire, and social codes with\ - \ participants representing the full learning curve of musical skill, such as\ - \ social dance. Our project, Interactive Tango Milonga, an interactive Argentine\ - \ tango dance system for social dance addresses these challenges in order to innovate\ - \ connection, the feeling of intense relation between dance partners, music, and\ - \ the larger tango community. },\n address = {Blacksburg, Virginia, USA},\n author\ - \ = {Courtney Brown},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302693},\n\ - \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {88--91},\n publisher = {Virginia\ - \ Tech},\n title = {Interactive Tango Milonga: Designing {DMI}s for the Social\ - \ Dance Context },\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0021.pdf},\n\ - \ year = {2018}\n}\n" + ID: Kapur2002 + abstract: 'This paper describes the design of an electronic Tabla controller. The + E-Tabla controls both sound and graphics simultaneously. It allows for a variety + of traditional Tabla strokes and new performance techniques. Graphical feedback + allows for artistical display and pedagogical feedback. ' + address: 'Dublin, Ireland' + author: 'Kapur, Ajay and Essl, Georg and Davidson, Philip L. and Cook, Perry R.' + bibtex: "@inproceedings{Kapur2002,\n abstract = {This paper describes the design\ + \ of an electronic Tabla controller. The E-Tabla controls both sound and graphics\ + \ simultaneously. It allows for a variety of traditional Tabla strokes and new\ + \ performance techniques. Graphical feedback allows for artistical display and\ + \ pedagogical feedback. },\n address = {Dublin, Ireland},\n author = {Kapur, Ajay\ + \ and Essl, Georg and Davidson, Philip L. and Cook, Perry R.},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176434},\n issn = {2220-4806},\n\ + \ keywords = {Electronic Tabla, Indian Drum Controller, Physical Models, Graphical\ + \ Feedback},\n pages = {108--112},\n title = {The Electronic Tabla Controller},\n\ + \ url = {http://www.nime.org/proceedings/2002/nime2002_108.pdf},\n year = {2002}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302693 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176434 issn: 2220-4806 - month: June - pages: 88--91 - publisher: Virginia Tech - title: 'Interactive Tango Milonga: Designing DMIs for the Social Dance Context ' - url: http://www.nime.org/proceedings/2018/nime2018_paper0021.pdf - year: 2018 + keywords: 'Electronic Tabla, Indian Drum Controller, Physical Models, Graphical + Feedback' + pages: 108--112 + title: The Electronic Tabla Controller + url: http://www.nime.org/proceedings/2002/nime2002_108.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Kleinberger2018 - abstract: 'This paper presents an experiment to investigate how new types of vocal - practices can affect psychophysiological activity. We know that health can influence - the voice, but can a certain use of the voice influence health through modification - of mental and physical state? This study took place in the setting of the Vocal - Vibrations installation. For the experiment, participants engage in a multi sensory - vocal exercise with a limited set of guidance to obtain a wide spectrum of vocal - performances across participants. We compare characteristics of those vocal practices - to the participant''s heart rate, breathing rate, electrodermal activity and mental - states. We obtained significant results suggesting that we can correlate psychophysiological - states with characteristics of the vocal practice if we also take into account - biographical information, and in particular mea- surement of how much people “like” - their own voice.' - address: 'Blacksburg, Virginia, USA' - author: Rebecca Kleinberger - bibtex: "@inproceedings{Kleinberger2018,\n abstract = {This paper presents an experiment\ - \ to investigate how new types of vocal practices can affect psychophysiological\ - \ activity. We know that health can influence the voice, but can a certain use\ - \ of the voice influence health through modification of mental and physical state?\ - \ This study took place in the setting of the Vocal Vibrations installation. For\ - \ the experiment, participants engage in a multi sensory vocal exercise with a\ - \ limited set of guidance to obtain a wide spectrum of vocal performances across\ - \ participants. We compare characteristics of those vocal practices to the participant's\ - \ heart rate, breathing rate, electrodermal activity and mental states. We obtained\ - \ significant results suggesting that we can correlate psychophysiological states\ - \ with characteristics of the vocal practice if we also take into account biographical\ - \ information, and in particular mea- surement of how much people “like” their\ - \ own voice.},\n address = {Blacksburg, Virginia, USA},\n author = {Rebecca Kleinberger},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1302693},\n editor = {Luke Dahl,\ - \ Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {92--95},\n publisher = {Virginia Tech},\n title =\ - \ {Vocal Musical Expression with a Tactile Resonating Device and its Psychophysiological\ - \ Effects},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0022.pdf},\n\ - \ year = {2018}\n}\n" + ID: Kessous2002 + abstract: 'In this paper, we describe a computer-based solo musical instrument for + live performance. We have adapted a Wacom graphic tablet equipped with a stylus + transducer and a game joystick to use them as a solo expressive instrument. We + have used a formant-synthesis model that can produce a vowel-like singing voice. + This instrument allows multidimensional expressive fundamental frequency control + and vowel articulation. The fundamental frequency angular control used here allows + different mapping adjustments that correspond to different melodic styles. ' + address: 'Dublin, Ireland' + author: 'Kessous, Loïc' + bibtex: "@inproceedings{Kessous2002,\n abstract = {In this paper, we describe a\ + \ computer-based solo musical instrument for live performance. We have adapted\ + \ a Wacom graphic tablet equipped with a stylus transducer and a game joystick\ + \ to use them as a solo expressive instrument. We have used a formant-synthesis\ + \ model that can produce a vowel-like singing voice. This instrument allows multidimensional\ + \ expressive fundamental frequency control and vowel articulation. The fundamental\ + \ frequency angular control used here allows different mapping adjustments that\ + \ correspond to different melodic styles. },\n address = {Dublin, Ireland},\n\ + \ author = {Kessous, Lo\\\"{i}c},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n date = {24-26 May, 2002},\n\ + \ doi = {10.5281/zenodo.1176436},\n issn = {2220-4806},\n keywords = {Bi-manual,\ + \ off-the-shelf input devices, fundamental frequency control, sound color navigation,\ + \ mapping.},\n pages = {113--114},\n title = {Bi-manual Mapping Experimentation,\ + \ with Angular Fundamental Frequency Control and Sound Color Navigation},\n url\ + \ = {http://www.nime.org/proceedings/2002/nime2002_113.pdf},\n year = {2002}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302693 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176436 issn: 2220-4806 - month: June - pages: 92--95 - publisher: Virginia Tech - title: Vocal Musical Expression with a Tactile Resonating Device and its Psychophysiological - Effects - url: http://www.nime.org/proceedings/2018/nime2018_paper0022.pdf - year: 2018 + keywords: 'Bi-manual, off-the-shelf input devices, fundamental frequency control, + sound color navigation, mapping.' + pages: 113--114 + title: 'Bi-manual Mapping Experimentation, with Angular Fundamental Frequency Control + and Sound Color Navigation' + url: http://www.nime.org/proceedings/2002/nime2002_113.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Palsbröker2018 - abstract: 'In order to facilitate access to playing music spontaneously, the prototype - of an instrument which allows a more natural learning approach was developed as - part of the research project Drum-Dance-Music-Machine. The result was a modular - system consisting of several VST plug-ins, which on the one hand provides a drum - interface to create sounds and tones and on the other hand generates or manipulates - music through dance movement, in order to simplify the understanding of more abstract - characteristics of music. This paper describes the development of a new software - concept for the prototype, which since then has been further developed and evaluated - several times. This will improve the maintainability and extensibility of the - system and eliminate design weaknesses. To do so, the existing system first will - be analyzed and requirements for a new framework, which is based on the concepts - of event driven architecture and dependency injection, will be defined. The components - are then transferred to the new system and their performance is assessed. The - approach chosen in this case study and the lessons learned are intended to provide - a viable solution for solving similar problems in the development of modular VST-based - NIMEs.' - address: 'Blacksburg, Virginia, USA' - author: Patrick Palsbröker and Christine Steinmeier and Dominic Becking - bibtex: "@inproceedings{Palsbröker2018,\n abstract = {In order to facilitate access\ - \ to playing music spontaneously, the prototype of an instrument which allows\ - \ a more natural learning approach was developed as part of the research project\ - \ Drum-Dance-Music-Machine. The result was a modular system consisting of several\ - \ VST plug-ins, which on the one hand provides a drum interface to create sounds\ - \ and tones and on the other hand generates or manipulates music through dance\ - \ movement, in order to simplify the understanding of more abstract characteristics\ - \ of music. This paper describes the development of a new software concept for\ - \ the prototype, which since then has been further developed and evaluated several\ - \ times. This will improve the maintainability and extensibility of the system\ - \ and eliminate design weaknesses. To do so, the existing system first will be\ - \ analyzed and requirements for a new framework, which is based on the concepts\ - \ of event driven architecture and dependency injection, will be defined. The\ - \ components are then transferred to the new system and their performance is assessed.\ - \ The approach chosen in this case study and the lessons learned are intended\ - \ to provide a viable solution for solving similar problems in the development\ - \ of modular VST-based NIMEs.},\n address = {Blacksburg, Virginia, USA},\n author\ - \ = {Patrick Palsbröker and Christine Steinmeier and Dominic Becking},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1302653},\n editor = {Luke Dahl, Douglas\ - \ Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {96--101},\n publisher = {Virginia Tech},\n title\ - \ = {A Framework for Modular VST-based NIMEs Using EDA and Dependency Injection},\n\ - \ url = {http://www.nime.org/proceedings/2018/nime2018_paper0023.pdf},\n year\ - \ = {2018}\n}\n" + ID: Machover2002 + abstract: 'It is astonishing to think that a mere twenty years ago, real-time music + production and performance was not only in a fledgling state with only primitive + (such as the IRCAM 4X machine) or limited (like the Synclavier) capabilities, + but was also the subject of very heated debate. At IRCAM in the early 1980''s, + for instance, some (such as Luciano Berio) questioned whether any digital technology + could ever be truly "instrumental", while others (such as Jean-Claude Risset) + doubted whether real-time activity of any sort would ever acquire the richness + and introspection of composition.' + address: 'Dublin, Ireland' + author: 'Machover, Tod' + bibtex: "@inproceedings{Machover2002,\n abstract = {It is astonishing to think that\ + \ a mere twenty years ago, real-time music production and performance was not\ + \ only in a fledgling state with only primitive (such as the IRCAM 4X machine)\ + \ or limited (like the Synclavier) capabilities, but was also the subject of very\ + \ heated debate. At IRCAM in the early 1980's, for instance, some (such as Luciano\ + \ Berio) questioned whether any digital technology could ever be truly \"instrumental\"\ + , while others (such as Jean-Claude Risset) doubted whether real-time activity\ + \ of any sort would ever acquire the richness and introspection of composition.},\n\ + \ address = {Dublin, Ireland},\n author = {Machover, Tod},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176438},\n issn = {2220-4806},\n\ + \ pages = {115--115},\n title = {Instruments, Interactivity, and Inevitability},\n\ + \ url = {http://www.nime.org/proceedings/2002/nime2002_115.pdf},\n year = {2002}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302653 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176438 issn: 2220-4806 - month: June - pages: 96--101 - publisher: Virginia Tech - title: A Framework for Modular VST-based NIMEs Using EDA and Dependency Injection - url: http://www.nime.org/proceedings/2018/nime2018_paper0023.pdf - year: 2018 + pages: 115--115 + title: 'Instruments, Interactivity, and Inevitability' + url: http://www.nime.org/proceedings/2002/nime2002_115.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Atherton2018 - abstract: 'Chunity is a programming environment for the design of interactive audiovisual - games, instruments, and experiences. It embodies an audio-driven, sound-first - approach that integrates audio programming and graphics programming in the same - workflow, taking advantage of strongly-timed audio programming features of the - ChucK programming language and the state-of-the-art real-time graphics engine - found in Unity. We describe both the system and its intended workflow for the - creation of expressive audiovisual works. Chunity was evaluated as the primary - software platform in a computer music and design course, where students created - a diverse assortment of interactive audiovisual software. We present results from - the evaluation and discuss Chunity''s usability, utility, and aesthetics as a - way of working. Through these, we argue for Chunity as a unique and useful way - to program sound, graphics, and interaction in tandem, giving users the flexibility - to use a game engine to do much more than "just" make games.' - address: 'Blacksburg, Virginia, USA' - author: Jack Atherton and Ge Wang - bibtex: "@inproceedings{Atherton2018,\n abstract = {Chunity is a programming environment\ - \ for the design of interactive audiovisual games, instruments, and experiences.\ - \ It embodies an audio-driven, sound-first approach that integrates audio programming\ - \ and graphics programming in the same workflow, taking advantage of strongly-timed\ - \ audio programming features of the ChucK programming language and the state-of-the-art\ - \ real-time graphics engine found in Unity. We describe both the system and its\ - \ intended workflow for the creation of expressive audiovisual works. Chunity\ - \ was evaluated as the primary software platform in a computer music and design\ - \ course, where students created a diverse assortment of interactive audiovisual\ - \ software. We present results from the evaluation and discuss Chunity's usability,\ - \ utility, and aesthetics as a way of working. Through these, we argue for Chunity\ - \ as a unique and useful way to program sound, graphics, and interaction in tandem,\ - \ giving users the flexibility to use a game engine to do much more than \"just\"\ - \ make games.},\n address = {Blacksburg, Virginia, USA},\n author = {Jack Atherton\ - \ and Ge Wang},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302695},\n\ - \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {102--107},\n publisher = {Virginia\ - \ Tech},\n title = {Chunity: Integrated Audiovisual Programming in Unity},\n url\ - \ = {http://www.nime.org/proceedings/2018/nime2018_paper0024.pdf},\n year = {2018}\n\ - }\n" + ID: Mandelis2002 + abstract: 'This paper describes the Genophone [2], a hyperinstrument developed for + Sound-Performance-Design using the evolutionary paradigm of selective breeding + as the driving process. Sound design, and control assignments (performance mappings), + on most current systems rely heavily on an intimate knowledge of the Sound Synthesis + Techniques (SSTs) employed by the sound generator (hardware or software based). + This intimate knowledge can only be achieved by investing long periods of time + playing around with sounds and experimenting with how parameters change the nature + of the sounds produced. This experience is also needed when control mappings are + defined for performance purposes, so external stimuli can effect changes in SST + parameters. Often such experience can be gained after years of interaction with + one particular SST. The system presented here attempts to aid the user in designing + performance sounds and mappings without the necessity for deep knowledge of the + SSTs involved. This is achieved by a selective breeding process on populations + of individual sounds and their mapping. The initial populations are made up of + individuals of existing hand-coded sounds and their mapping. Initial populations + never have randomly derived individuals (this is not an issue as man''s best friend + was also not selectively bred from protozoa). The user previews the population + then expresses how much individuals are liked by their relative repositioning + on the screen (fitness). Some individuals are selected as parents to create a + new population of offspring, through variable mutation and genetic recombination. + These operators use the fitness as a bias for their function, and they were also + successfully used in MutaSynth [1]. The offspring are then evaluated (as their + parents were) and selected for breeding. This cycle continues until satisfactory + sounds and their mapping are reached. Individuals can also be saved to disk for + future "strain" development. The aim of the system is to encourage the creation + of novel performance mappings and sounds with emphasis on exploration, rather + than designs that satisfy specific a priori criteria.' + address: 'Dublin, Ireland' + author: 'Mandelis, James' + bibtex: "@inproceedings{Mandelis2002,\n abstract = {This paper describes the Genophone\ + \ [2], a hyperinstrument developed for Sound-Performance-Design using the evolutionary\ + \ paradigm of selective breeding as the driving process. Sound design, and control\ + \ assignments (performance mappings), on most current systems rely heavily on\ + \ an intimate knowledge of the Sound Synthesis Techniques (SSTs) employed by the\ + \ sound generator (hardware or software based). This intimate knowledge can only\ + \ be achieved by investing long periods of time playing around with sounds and\ + \ experimenting with how parameters change the nature of the sounds produced.\ + \ This experience is also needed when control mappings are defined for performance\ + \ purposes, so external stimuli can effect changes in SST parameters. Often such\ + \ experience can be gained after years of interaction with one particular SST.\ + \ The system presented here attempts to aid the user in designing performance\ + \ sounds and mappings without the necessity for deep knowledge of the SSTs involved.\ + \ This is achieved by a selective breeding process on populations of individual\ + \ sounds and their mapping. The initial populations are made up of individuals\ + \ of existing hand-coded sounds and their mapping. Initial populations never have\ + \ randomly derived individuals (this is not an issue as man's best friend was\ + \ also not selectively bred from protozoa). The user previews the population then\ + \ expresses how much individuals are liked by their relative repositioning on\ + \ the screen (fitness). Some individuals are selected as parents to create a new\ + \ population of offspring, through variable mutation and genetic recombination.\ + \ These operators use the fitness as a bias for their function, and they were\ + \ also successfully used in MutaSynth [1]. The offspring are then evaluated (as\ + \ their parents were) and selected for breeding. This cycle continues until satisfactory\ + \ sounds and their mapping are reached. Individuals can also be saved to disk\ + \ for future \"strain\" development. The aim of the system is to encourage the\ + \ creation of novel performance mappings and sounds with emphasis on exploration,\ + \ rather than designs that satisfy specific a priori criteria.},\n address = {Dublin,\ + \ Ireland},\n author = {Mandelis, James},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n date = {24-26 May, 2002},\n\ + \ doi = {10.5281/zenodo.1176440},\n issn = {2220-4806},\n keywords = {adaptive\ + \ interfaces, artificial life,expressivity, hyperinstruments, live performance,\ + \ motion-to-sound mapping, selective breeding, sound meta-synthesis},\n pages\ + \ = {116--117},\n title = {Adaptive Hyperinstruments: Applying Evolutionary Techniques\ + \ to Sound Synthesis and Performance},\n url = {http://www.nime.org/proceedings/2002/nime2002_116.pdf},\n\ + \ year = {2002}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302695 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176440 issn: 2220-4806 - month: June - pages: 102--107 - publisher: Virginia Tech - title: 'Chunity: Integrated Audiovisual Programming in Unity' - url: http://www.nime.org/proceedings/2018/nime2018_paper0024.pdf - year: 2018 + keywords: 'adaptive interfaces, artificial life,expressivity, hyperinstruments, + live performance, motion-to-sound mapping, selective breeding, sound meta-synthesis' + pages: 116--117 + title: 'Adaptive Hyperinstruments: Applying Evolutionary Techniques to Sound Synthesis + and Performance' + url: http://www.nime.org/proceedings/2002/nime2002_116.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Ianigro2018 - abstract: 'In this paper we expand on prior research into the use of Continuous - Time Recurrent Neural Networks (CTRNNs) as evolvable generators of musical structures - such as audio waveforms. This type of neural network has a compact structure and - is capable of producing a large range of temporal dynamics. Due to these properties, - we believe that CTRNNs combined with evolutionary algorithms (EA) could offer - musicians many creative possibilities for the exploration of sound. In prior work, - we have explored the use of interactive and target-based EA designs to tap into - the creative possibilities of CTRNNs. Our results have shown promise for the use - of CTRNNs in the audio domain. However, we feel neither EA designs allow both - open-ended discovery and effective navigation of the CTRNN audio search space - by musicians. Within this paper, we explore the possibility of using novelty search - as an alternative algorithm that facilitates both open-ended and rapid discovery - of the CTRNN creative search space.' - address: 'Blacksburg, Virginia, USA' - author: Steffan Carlos Ianigro and Oliver Bown - bibtex: "@inproceedings{Ianigro2018,\n abstract = {In this paper we expand on prior\ - \ research into the use of Continuous Time Recurrent Neural Networks (CTRNNs)\ - \ as evolvable generators of musical structures such as audio waveforms. This\ - \ type of neural network has a compact structure and is capable of producing a\ - \ large range of temporal dynamics. Due to these properties, we believe that CTRNNs\ - \ combined with evolutionary algorithms (EA) could offer musicians many creative\ - \ possibilities for the exploration of sound. In prior work, we have explored\ - \ the use of interactive and target-based EA designs to tap into the creative\ - \ possibilities of CTRNNs. Our results have shown promise for the use of CTRNNs\ - \ in the audio domain. However, we feel neither EA designs allow both open-ended\ - \ discovery and effective navigation of the CTRNN audio search space by musicians.\ - \ Within this paper, we explore the possibility of using novelty search as an\ - \ alternative algorithm that facilitates both open-ended and rapid discovery of\ - \ the CTRNN creative search space.},\n address = {Blacksburg, Virginia, USA},\n\ - \ author = {Steffan Carlos Ianigro and Oliver Bown},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1302697},\n editor = {Luke Dahl, Douglas Bowman, Thomas\ - \ Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n\ - \ pages = {108--113},\n publisher = {Virginia Tech},\n title = {Exploring Continuous\ - \ Time Recurrent Neural Networks through Novelty Search},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0025.pdf},\n\ - \ year = {2018}\n}\n" + ID: Marshall2002 + abstract: 'This paper introduces a subtle interface, which evolved from the design + of an alternative gestural controller in the development of a performance interface. + The conceptual idea used is based on that of the traditional Bodhran instrument, + an Irish frame drum. The design process was user-centered and involved professional + Bodhran players and through prototyping and user testing the resulting Vodhran + emerged. ' + address: 'Dublin, Ireland' + author: 'Marshall, Mark T. and Rath, Matthias and Moynihan, Breege' + bibtex: "@inproceedings{Marshall2002,\n abstract = {This paper introduces a subtle\ + \ interface, which evolved from the design of an alternative gestural controller\ + \ in the development of a performance interface. The conceptual idea used is based\ + \ on that of the traditional Bodhran instrument, an Irish frame drum. The design\ + \ process was user-centered and involved professional Bodhran players and through\ + \ prototyping and user testing the resulting Vodhran emerged. },\n address = {Dublin,\ + \ Ireland},\n author = {Marshall, Mark T. and Rath, Matthias and Moynihan, Breege},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176442},\n\ + \ issn = {2220-4806},\n keywords = {Virtual instrument, sound modeling, gesture,\ + \ user-centered design},\n pages = {118--119},\n title = {The Virtual Bodhran\ + \ -- The Vodhran},\n url = {http://www.nime.org/proceedings/2002/nime2002_118.pdf},\n\ + \ year = {2002}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302697 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176442 issn: 2220-4806 - month: June - pages: 108--113 - publisher: Virginia Tech - title: Exploring Continuous Time Recurrent Neural Networks through Novelty Search - url: http://www.nime.org/proceedings/2018/nime2018_paper0025.pdf - year: 2018 - - -- ENTRYTYPE: inproceedings - ID: Bowers2018 - abstract: 'Research into machine listening has intensified in recent years creating - a variety of techniques for recognising musical features suitable, for example, - in musicological analysis or commercial application in song recognition. Within - NIME, several projects exist seeking to make these techniques useful in real-time - music making. However, we debate whether the functionally-oriented approaches - inherited from engineering domains that much machine listening research manifests - is fully suited to the exploratory, divergent, boundary-stretching, uncertainty-seeking, - playful and irreverent orientations of many artists. To explore this, we engaged - in a concerted collaborative design exercise in which many different listening - algorithms were implemented and presented with input which challenged their customary - range of application and the implicit norms of musicality which research can take - for granted. An immersive 3D spatialised multichannel environment was created - in which the algorithms could be explored in a hybrid installation/performance/lecture - form of research presentation. The paper closes with reflections on the creative - value of ‘hijacking'' formal approaches into deviant contexts, the typically undocumented - practical know-how required to make algorithms work, the productivity of a playfully - irreverent relationship between engineering and artistic approaches to NIME, and - a sketch of a sonocybernetic aesthetics for our work.' - address: 'Blacksburg, Virginia, USA' - author: John Bowers and Owen Green - bibtex: "@inproceedings{Bowers2018,\n abstract = {Research into machine listening\ - \ has intensified in recent years creating a variety of techniques for recognising\ - \ musical features suitable, for example, in musicological analysis or commercial\ - \ application in song recognition. Within NIME, several projects exist seeking\ - \ to make these techniques useful in real-time music making. However, we debate\ - \ whether the functionally-oriented approaches inherited from engineering domains\ - \ that much machine listening research manifests is fully suited to the exploratory,\ - \ divergent, boundary-stretching, uncertainty-seeking, playful and irreverent\ - \ orientations of many artists. To explore this, we engaged in a concerted collaborative\ - \ design exercise in which many different listening algorithms were implemented\ - \ and presented with input which challenged their customary range of application\ - \ and the implicit norms of musicality which research can take for granted. An\ - \ immersive 3D spatialised multichannel environment was created in which the algorithms\ - \ could be explored in a hybrid installation/performance/lecture form of research\ - \ presentation. The paper closes with reflections on the creative value of ‘hijacking'\ - \ formal approaches into deviant contexts, the typically undocumented practical\ - \ know-how required to make algorithms work, the productivity of a playfully irreverent\ - \ relationship between engineering and artistic approaches to NIME, and a sketch\ - \ of a sonocybernetic aesthetics for our work.},\n address = {Blacksburg, Virginia,\ - \ USA},\n author = {John Bowers and Owen Green},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1302699},\n editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n\ - \ isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {114--119},\n publisher = {Virginia Tech},\n title = {All the Noises: Hijacking\ - \ Listening Machines for Performative Research},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0026.pdf},\n\ - \ year = {2018}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1302699 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 - issn: 2220-4806 - month: June - pages: 114--119 - publisher: Virginia Tech - title: 'All the Noises: Hijacking Listening Machines for Performative Research' - url: http://www.nime.org/proceedings/2018/nime2018_paper0026.pdf - year: 2018 + keywords: 'Virtual instrument, sound modeling, gesture, user-centered design' + pages: 118--119 + title: The Virtual Bodhran -- The Vodhran + url: http://www.nime.org/proceedings/2002/nime2002_118.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Schramm2018 - abstract: 'This paper presents a system for easily augmenting polyphonic pitched - instruments. The entire system is designed to run on a low-cost embedded computer, - suitable for live performance and easy to customise for different use cases. The - core of the system implements real-time spectrum factorisation, decomposing polyphonic - audio input signals into music note activations. New instruments can be easily - added to the system with the help of custom spectral template dictionaries. Instrument - augmentation is achieved by replacing or mixing the instrument''s original sounds - with a large variety of synthetic or sampled sounds, which follow the polyphonic - pitch activations.' - address: 'Blacksburg, Virginia, USA' - author: Rodrigo Schramm and Federico Visi and André Brasil and Marcelo O Johann - bibtex: "@inproceedings{Schramm2018,\n abstract = {This paper presents a system\ - \ for easily augmenting polyphonic pitched instruments. The entire system is designed\ - \ to run on a low-cost embedded computer, suitable for live performance and easy\ - \ to customise for different use cases. The core of the system implements real-time\ - \ spectrum factorisation, decomposing polyphonic audio input signals into music\ - \ note activations. New instruments can be easily added to the system with the\ - \ help of custom spectral template dictionaries. Instrument augmentation is achieved\ - \ by replacing or mixing the instrument's original sounds with a large variety\ - \ of synthetic or sampled sounds, which follow the polyphonic pitch activations.},\n\ - \ address = {Blacksburg, Virginia, USA},\n author = {Rodrigo Schramm and Federico\ - \ Visi and André Brasil and Marcelo O Johann},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1302650},\n editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n\ - \ isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {120--125},\n publisher = {Virginia Tech},\n title = {A polyphonic pitch tracking\ - \ embedded system for rapid instrument augmentation},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0027.pdf},\n\ - \ year = {2018}\n}\n" + ID: Mccaig2002 + abstract: 'Here we present 2Hearts, a music system controlled bythe heartbeats of + two people. As the players speak and touch, 2Hearts extracts meaningful variables + from their heartbeat signals. These variables are mapped to musical parameters, + conveying the changing patterns of tension and relaxation in the players'' relationship. + We describe the motivation for creating 2Hearts, observations from the prototypes + that have been built, and principles learnt in the ongoing development process.' + address: 'Dublin, Ireland' + author: 'Mccaig, Graeme and Fels, Sidney S.' + bibtex: "@inproceedings{Mccaig2002,\n abstract = {Here we present 2Hearts, a music\ + \ system controlled bythe heartbeats of two people. As the players speak and touch,\ + \ 2Hearts extracts meaningful variables from their heartbeat signals. These variables\ + \ are mapped to musical parameters, conveying the changing patterns of tension\ + \ and relaxation in the players' relationship. We describe the motivation for\ + \ creating 2Hearts, observations from the prototypes that have been built, and\ + \ principles learnt in the ongoing development process.},\n address = {Dublin,\ + \ Ireland},\n author = {Mccaig, Graeme and Fels, Sidney S.},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176444},\n issn = {2220-4806},\n\ + \ keywords = {Heart Rate, Biosensor, Interactive Music, Non-Verbal Communication,\ + \ Affective Computing, Ambient Display},\n pages = {120--125},\n title = {Playing\ + \ on Heart-Strings: Experiences with the 2{H}earts System},\n url = {http://www.nime.org/proceedings/2002/nime2002_120.pdf},\n\ + \ year = {2002}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302650 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176444 issn: 2220-4806 - month: June + keywords: 'Heart Rate, Biosensor, Interactive Music, Non-Verbal Communication, Affective + Computing, Ambient Display' pages: 120--125 - publisher: Virginia Tech - title: A polyphonic pitch tracking embedded system for rapid instrument augmentation - url: http://www.nime.org/proceedings/2018/nime2018_paper0027.pdf - year: 2018 + title: 'Playing on Heart-Strings: Experiences with the 2Hearts System' + url: http://www.nime.org/proceedings/2002/nime2002_120.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Tahiroglu2018 - abstract: 'This paper introduces various ways that idiomatic gestures emerge in - performance practice with new musical instruments. It demonstrates that idiomatic - gestures can play an important role in the development of personalized performance - practices that can be the basis for the development of style and expression. Three - detailed examples -- biocontrollers, accordion-inspired instruments, and a networked - intelligent controller -- illustrate how a complex suite of factors throughout - the design, composition and performance processes can influence the development - of idiomatic gestures. We argue that the explicit consideration of idiomatic gestures - throughout the life cycle of new instruments can facilitate the emergence of style - and give rise to performances that can develop rich layers of meaning.' - address: 'Blacksburg, Virginia, USA' - author: Koray Tahiroglu and Michael Gurevich and R. Benjamin Knapp - bibtex: "@inproceedings{Tahiroglu2018,\n abstract = {This paper introduces various\ - \ ways that idiomatic gestures emerge in performance practice with new musical\ - \ instruments. It demonstrates that idiomatic gestures can play an important role\ - \ in the development of personalized performance practices that can be the basis\ - \ for the development of style and expression. Three detailed examples -- biocontrollers,\ - \ accordion-inspired instruments, and a networked intelligent controller -- illustrate\ - \ how a complex suite of factors throughout the design, composition and performance\ - \ processes can influence the development of idiomatic gestures. We argue that\ - \ the explicit consideration of idiomatic gestures throughout the life cycle of\ - \ new instruments can facilitate the emergence of style and give rise to performances\ - \ that can develop rich layers of meaning.},\n address = {Blacksburg, Virginia,\ - \ USA},\n author = {Koray Tahiroglu and Michael Gurevich and R. Benjamin Knapp},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1302701},\n editor = {Luke Dahl,\ - \ Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {126--131},\n publisher = {Virginia Tech},\n title\ - \ = {Contextualising Idiomatic Gestures in Musical Interactions with NIMEs},\n\ - \ url = {http://www.nime.org/proceedings/2018/nime2018_paper0028.pdf},\n year\ - \ = {2018}\n}\n" + ID: McElligott2002 + abstract: 'In this paper we discuss the possibility of augmenting existing musical + performance by using a novel sensing device termed ''PegLeg''. This device interprets + the movements and motions of a musician during play by allowing the musician to + manipulate a sensor in three dimensions. A force sensitive surface allows us to + detect, interpret and interface the subtle but integral element of physical "effort" + in music playing. This device is designed to extend the musicians control over + any given instrument, granting an additional means of ''playing'' that would previously + have been impossible - granting an additional limb to extend their playing potential + - a PegLeg...' + address: 'Dublin, Ireland' + author: 'McElligott, Lisa and Dixon, Edward and Dillon, Michelle' + bibtex: "@inproceedings{McElligott2002,\n abstract = {In this paper we discuss the\ + \ possibility of augmenting existing musical performance by using a novel sensing\ + \ device termed 'PegLeg'. This device interprets the movements and motions of\ + \ a musician during play by allowing the musician to manipulate a sensor in three\ + \ dimensions. A force sensitive surface allows us to detect, interpret and interface\ + \ the subtle but integral element of physical \"effort\" in music playing. This\ + \ device is designed to extend the musicians control over any given instrument,\ + \ granting an additional means of 'playing' that would previously have been impossible\ + \ - granting an additional limb to extend their playing potential - a PegLeg...},\n\ + \ address = {Dublin, Ireland},\n author = {McElligott, Lisa and Dixon, Edward\ + \ and Dillon, Michelle},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n date = {24-26 May, 2002},\n doi\ + \ = {10.5281/zenodo.1176446},\n issn = {2220-4806},\n keywords = {Gesture, weight\ + \ distribution, effort, expression, intent, movement, 3D sensing pressure, force,\ + \ sensor, resolution, control device, sound, music, input.},\n pages = {126--130},\n\ + \ title = {`PegLegs in Music' Processing the Effort Generated by Levels of Expressive\ + \ Gesturing in Music},\n url = {http://www.nime.org/proceedings/2002/nime2002_126.pdf},\n\ + \ year = {2002}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302701 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176446 issn: 2220-4806 - month: June - pages: 126--131 - publisher: Virginia Tech - title: Contextualising Idiomatic Gestures in Musical Interactions with NIMEs - url: http://www.nime.org/proceedings/2018/nime2018_paper0028.pdf - year: 2018 + keywords: 'Gesture, weight distribution, effort, expression, intent, movement, 3D + sensing pressure, force, sensor, resolution, control device, sound, music, input.' + pages: 126--130 + title: '`PegLegs in Music'' Processing the Effort Generated by Levels of Expressive + Gesturing in Music' + url: http://www.nime.org/proceedings/2002/nime2002_126.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Hantrakul2018 - abstract: 'Machine learning and deep learning has recently made a large impact in - the artistic community. In many of these applications however, the model is often - used to render the high dimensional output directly e.g. every individual pixel - in the final image. Humans arguably operate in much lower dimensional spaces during - the creative process e.g. the broad movements of a brush. In this paper, we design - a neural gesture system for music generation based around this concept. Instead - of directly generating audio, we train a Long Short Term Memory (LSTM) recurrent - neural network to generate instantaneous position and pressure on the Roli Lightpad - instrument. These generated coordinates in turn, give rise to the sonic output - defined in the synth engine. The system relies on learning these movements from - a musician who has already developed a palette of musical gestures idiomatic to - the Lightpad. Unlike many deep learning systems that render high dimensional output, - our low-dimensional system can be run in real-time, enabling the first real time - gestural duet of its kind between a player and a recurrent neural network on the - Lightpad instrument.' - address: 'Blacksburg, Virginia, USA' - author: Lamtharn Hantrakul - bibtex: "@inproceedings{Hantrakul2018,\n abstract = {Machine learning and deep learning\ - \ has recently made a large impact in the artistic community. In many of these\ - \ applications however, the model is often used to render the high dimensional\ - \ output directly e.g. every individual pixel in the final image. Humans arguably\ - \ operate in much lower dimensional spaces during the creative process e.g. the\ - \ broad movements of a brush. In this paper, we design a neural gesture system\ - \ for music generation based around this concept. Instead of directly generating\ - \ audio, we train a Long Short Term Memory (LSTM) recurrent neural network to\ - \ generate instantaneous position and pressure on the Roli Lightpad instrument.\ - \ These generated coordinates in turn, give rise to the sonic output defined in\ - \ the synth engine. The system relies on learning these movements from a musician\ - \ who has already developed a palette of musical gestures idiomatic to the Lightpad.\ - \ Unlike many deep learning systems that render high dimensional output, our low-dimensional\ - \ system can be run in real-time, enabling the first real time gestural duet of\ - \ its kind between a player and a recurrent neural network on the Lightpad instrument.},\n\ - \ address = {Blacksburg, Virginia, USA},\n author = {Lamtharn Hantrakul},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1302703},\n editor = {Luke Dahl, Douglas\ - \ Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {132--137},\n publisher = {Virginia Tech},\n title\ - \ = {GestureRNN: A neural gesture system for the Roli Lightpad Block},\n url\ - \ = {http://www.nime.org/proceedings/2018/nime2018_paper0029.pdf},\n year = {2018}\n\ - }\n" + ID: Ng2002 + abstract: 'This paper briefly describes a number of performance interfaces under + the broad theme of Interactive Gesture Music (IGM). With a short introduction, + this paper discusses the main components of a Trans-Domain Mapping (TDM) framework, + and presents various prototypes developed under this framework, to translate meaningful + activities from one creative domain onto another, to provide real-time control + of musical events with physical movements. ' + address: 'Dublin, Ireland' + author: 'Ng, Kia' + bibtex: "@inproceedings{Ng2002,\n abstract = {This paper briefly describes a number\ + \ of performance interfaces under the broad theme of Interactive Gesture Music\ + \ (IGM). With a short introduction, this paper discusses the main components of\ + \ a Trans-Domain Mapping (TDM) framework, and presents various prototypes developed\ + \ under this framework, to translate meaningful activities from one creative domain\ + \ onto another, to provide real-time control of musical events with physical movements.\ + \ },\n address = {Dublin, Ireland},\n author = {Ng, Kia},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176448},\n issn = {2220-4806},\n\ + \ keywords = {Gesture, Motion, Interactive, Performance, Music.},\n pages = {131--132},\n\ + \ title = {Interactive Gesture Music Performance Interface},\n url = {http://www.nime.org/proceedings/2002/nime2002_131.pdf},\n\ + \ year = {2002}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302703 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176448 issn: 2220-4806 - month: June - pages: 132--137 - publisher: Virginia Tech - title: 'GestureRNN: A neural gesture system for the Roli Lightpad Block' - url: http://www.nime.org/proceedings/2018/nime2018_paper0029.pdf - year: 2018 + keywords: 'Gesture, Motion, Interactive, Performance, Music.' + pages: 131--132 + title: Interactive Gesture Music Performance Interface + url: http://www.nime.org/proceedings/2002/nime2002_131.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: DiDonato2018 - abstract: 'Myo Mapper is a free and open source cross-platform application to map - data from the gestural device Myo armband into Open Sound Control (OSC) messages. - It represents a `quick and easy'' solution for exploring the Myo''s potential - for realising new interfaces for musical expression. Together with details of - the software, this paper reports some applications in which Myo Mapper has been - successfully used and a qualitative evaluation. We then proposed guidelines for - using Myo data in interactive artworks based on insight gained from the works - described and the evaluation. Findings show that Myo Mapper empowers artists and - non-skilled developers to easily take advantage of Myo data high-level features - for realising interactive artistic works. It also facilitates the recognition - of poses and gestures beyond those included with the product by using third-party - interactive machine learning software.' - address: 'Blacksburg, Virginia, USA' - author: 'Di Donato, Balandino and Jamie Bullock and Atau Tanaka' - bibtex: "@inproceedings{DiDonato2018,\n abstract = {Myo Mapper is a free and open\ - \ source cross-platform application to map data from the gestural device Myo armband\ - \ into Open Sound Control (OSC) messages. It represents a `quick and easy' solution\ - \ for exploring the Myo's potential for realising new interfaces for musical expression.\ - \ Together with details of the software, this paper reports some applications\ - \ in which Myo Mapper has been successfully used and a qualitative evaluation.\ - \ We then proposed guidelines for using Myo data in interactive artworks based\ - \ on insight gained from the works described and the evaluation. Findings show\ - \ that Myo Mapper empowers artists and non-skilled developers to easily take advantage\ - \ of Myo data high-level features for realising interactive artistic works. It\ - \ also facilitates the recognition of poses and gestures beyond those included\ - \ with the product by using third-party interactive machine learning software.},\n\ - \ address = {Blacksburg, Virginia, USA},\n author = {Di Donato, Balandino and\ - \ Jamie Bullock and Atau Tanaka},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302705},\n\ - \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {138--143},\n publisher = {Virginia\ - \ Tech},\n title = {Myo Mapper: a Myo armband to OSC mapper},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0030.pdf},\n\ - \ year = {2018}\n}\n" + ID: Nichols2002 + abstract: 'This paper describes the development of a virtual violin bow haptic human-computer + interface, which senses bow position with encoders, to drive bowed-string physical + model synthesis, while engaging servomotors, to simulate the haptic feedback of + a violin bow on a string. Construction of the hardware and programming of the + software are discussed, as well as the motivation for building the instrument, + and its planned uses.' + address: 'Dublin, Ireland' + author: 'Nichols, Charles' + bibtex: "@inproceedings{Nichols2002,\n abstract = {This paper describes the development\ + \ of a virtual violin bow haptic human-computer interface, which senses bow position\ + \ with encoders, to drive bowed-string physical model synthesis, while engaging\ + \ servomotors, to simulate the haptic feedback of a violin bow on a string. Construction\ + \ of the hardware and programming of the software are discussed, as well as the\ + \ motivation for building the instrument, and its planned uses.},\n address =\ + \ {Dublin, Ireland},\n author = {Nichols, Charles},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176450},\n issn = {2220-4806},\n\ + \ keywords = {bow, controller, haptic, hci, interface, violin},\n pages = {133--136},\n\ + \ title = {The vBow: Development of a Virtual Violin Bow Haptic Human-Computer\ + \ Interface},\n url = {http://www.nime.org/proceedings/2002/nime2002_133.pdf},\n\ + \ year = {2002}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302705 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176450 issn: 2220-4806 - month: June - pages: 138--143 - publisher: Virginia Tech - title: 'Myo Mapper: a Myo armband to OSC mapper' - url: http://www.nime.org/proceedings/2018/nime2018_paper0030.pdf - year: 2018 + keywords: 'bow, controller, haptic, hci, interface, violin' + pages: 133--136 + title: 'The vBow: Development of a Virtual Violin Bow Haptic Human-Computer Interface' + url: http://www.nime.org/proceedings/2002/nime2002_133.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Visi2018 - abstract: 'We present modosc, a set of Max abstractions designed for computing motion - descriptors from raw motion capture data in real time. The library contains methods - for extracting descriptors useful for expressive movement analysis and sonic interaction - design. modosc is designed to address the data handling and synchronization issues - that often arise when working with complex marker sets. This is achieved by adopting - a multiparadigm approach facilitated by odot and Open Sound Control to overcome - some of the limitations of conventional Max programming, and structure incoming - and outgoing data streams in a meaningful and easily accessible manner. After - describing the contents of the library and how data streams are structured and - processed, we report on a sonic interaction design use case involving motion feature - extraction and machine learning.' - address: 'Blacksburg, Virginia, USA' - author: Federico Visi and Luke Dahl - bibtex: "@inproceedings{Visi2018,\n abstract = {We present modosc, a set of Max\ - \ abstractions designed for computing motion descriptors from raw motion capture\ - \ data in real time. The library contains methods for extracting descriptors useful\ - \ for expressive movement analysis and sonic interaction design. modosc is designed\ - \ to address the data handling and synchronization issues that often arise when\ - \ working with complex marker sets. This is achieved by adopting a multiparadigm\ - \ approach facilitated by odot and Open Sound Control to overcome some of the\ - \ limitations of conventional Max programming, and structure incoming and outgoing\ - \ data streams in a meaningful and easily accessible manner. After describing\ - \ the contents of the library and how data streams are structured and processed,\ - \ we report on a sonic interaction design use case involving motion feature extraction\ - \ and machine learning.},\n address = {Blacksburg, Virginia, USA},\n author =\ - \ {Federico Visi and Luke Dahl},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302707},\n\ - \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {144--147},\n publisher = {Virginia\ - \ Tech},\n title = {Real-Time Motion Capture Analysis and Music Interaction with\ - \ the Modosc Descriptor Library},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0031.pdf},\n\ - \ year = {2018}\n}\n" + ID: Oboe2002 + abstract: 'The design of a virtual keyboard, capable of reproducing the tactile + feedback of several musical instruments is reported. The key is driven by a direct + drive motor, which allows friction free operations. The force to be generated + by the motor is calculated in real time by a dynamic simulator, which contains + the model of mechanisms'' components and constraints. Each model is tuned on the + basis of measurements performed on the real system. So far, grand piano action, + harpsichord and Hammond organ have been implemented successfully on the system + presented here. ' + address: 'Dublin, Ireland' + author: 'Oboe, Roberto and De Poli, Giovanni' + bibtex: "@inproceedings{Oboe2002,\n abstract = {The design of a virtual keyboard,\ + \ capable of reproducing the tactile feedback of several musical instruments is\ + \ reported. The key is driven by a direct drive motor, which allows friction free\ + \ operations. The force to be generated by the motor is calculated in real time\ + \ by a dynamic simulator, which contains the model of mechanisms' components and\ + \ constraints. Each model is tuned on the basis of measurements performed on the\ + \ real system. So far, grand piano action, harpsichord and Hammond organ have\ + \ been implemented successfully on the system presented here. },\n address = {Dublin,\ + \ Ireland},\n author = {Oboe, Roberto and De Poli, Giovanni},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176452},\n issn = {2220-4806},\n\ + \ keywords = {Virtual mechanisms, dynamic simulation},\n pages = {137--142},\n\ + \ title = {Multi-instrument Virtual Keyboard -- The MIKEY Project},\n url = {http://www.nime.org/proceedings/2002/nime2002_137.pdf},\n\ + \ year = {2002}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302707 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176452 issn: 2220-4806 - month: June - pages: 144--147 - publisher: Virginia Tech - title: Real-Time Motion Capture Analysis and Music Interaction with the Modosc Descriptor - Library - url: http://www.nime.org/proceedings/2018/nime2018_paper0031.pdf - year: 2018 + keywords: 'Virtual mechanisms, dynamic simulation' + pages: 137--142 + title: Multi-instrument Virtual Keyboard -- The MIKEY Project + url: http://www.nime.org/proceedings/2002/nime2002_137.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Arslan2018 - abstract: 'Mobile devices have been a promising platform for musical performance - thanks to the various sensors readily available on board. In particular, mobile - cameras can provide rich input as they can capture a wide variety of user gestures - or environment dynamics. However, this raw camera input only provides continuous - parameters and requires expensive computation. In this paper, we propose to combine - motion/gesture input with the touch input, in order to filter movement information - both temporally and spatially, thus increasing expressiveness while reducing computation - time. We present a design space which demonstrates the diversity of interactions - that our technique enables. We also report the results of a user study in which - we observe how musicians appropriate the interaction space with an example instrument.' - address: 'Blacksburg, Virginia, USA' - author: Cagan Arslan and Florent Berthaut and Jean Martinet and Ioan Marius Bilasco - and Laurent Grisoni - bibtex: "@inproceedings{Arslan2018,\n abstract = {Mobile devices have been a promising\ - \ platform for musical performance thanks to the various sensors readily available\ - \ on board. In particular, mobile cameras can provide rich input as they can capture\ - \ a wide variety of user gestures or environment dynamics. However, this raw camera\ - \ input only provides continuous parameters and requires expensive computation.\ - \ In this paper, we propose to combine motion/gesture input with the touch input,\ - \ in order to filter movement information both temporally and spatially, thus\ - \ increasing expressiveness while reducing computation time. We present a design\ - \ space which demonstrates the diversity of interactions that our technique enables.\ - \ We also report the results of a user study in which we observe how musicians\ - \ appropriate the interaction space with an example instrument.},\n address =\ - \ {Blacksburg, Virginia, USA},\n author = {Cagan Arslan and Florent Berthaut and\ - \ Jean Martinet and Ioan Marius Bilasco and Laurent Grisoni},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1302709},\n editor = {Luke Dahl, Douglas Bowman, Thomas\ - \ Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n\ - \ pages = {148--151},\n publisher = {Virginia Tech},\n title = {The Phone with\ - \ the Flow: Combining Touch + Optical Flow in Mobile Instruments},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0032.pdf},\n\ - \ year = {2018}\n}\n" + ID: Paine2002 + abstract: 'Interactivity has become a major consideration in the development of + a contemporary art practice that engages with the proliferation of computer based + technologies. Keywords ' + address: 'Dublin, Ireland' + author: 'Paine, Garth' + bibtex: "@inproceedings{Paine2002,\n abstract = {Interactivity has become a major\ + \ consideration in the development of a contemporary art practice that engages\ + \ with the proliferation of computer based technologies. Keywords },\n address\ + \ = {Dublin, Ireland},\n author = {Paine, Garth},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n date\ + \ = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176454},\n issn = {2220-4806},\n\ + \ keywords = {are your choice.},\n pages = {143--144},\n title = {GESTATION},\n\ + \ url = {http://www.nime.org/proceedings/2002/nime2002_143.pdf},\n year = {2002}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302709 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176454 issn: 2220-4806 - month: June - pages: 148--151 - publisher: Virginia Tech - title: 'The Phone with the Flow: Combining Touch + Optical Flow in Mobile Instruments' - url: http://www.nime.org/proceedings/2018/nime2018_paper0032.pdf - year: 2018 + keywords: are your choice. + pages: 143--144 + title: GESTATION + url: http://www.nime.org/proceedings/2002/nime2002_143.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Engeln2018 - abstract: 'Many digital interfaces for audio effects still resemble racks and cases - of their hardware counterparts. For instance, DSP-algorithms are often adjusted - via direct value input, sliders, or knobs. While recent research has started to - experiment with the capabilities offered by modern interfaces, there are no examples - for productive applications such as audio-morphing. Audio-morphing as a special - field of DSP has a high complexity for the morph itself and for the parametrization - of the transition between two sources. We propose a multi-touch enhanced interface - for visual audiomorphing. This interface visualizes the internal processing and - allows direct manipulation of the morphing parameters in the visualization. Using - multi-touch gestures to manipulate audio-morphing in a visual way, sound design - and music production becomes more unrestricted and creative.' - address: 'Blacksburg, Virginia, USA' - author: Lars Engeln and Dietrich Kammer and Leon Brandt and Rainer Groh - bibtex: "@inproceedings{Engeln2018,\n abstract = {Many digital interfaces for audio\ - \ effects still resemble racks and cases of their hardware counterparts. For instance,\ - \ DSP-algorithms are often adjusted via direct value input, sliders, or knobs.\ - \ While recent research has started to experiment with the capabilities offered\ - \ by modern interfaces, there are no examples for productive applications such\ - \ as audio-morphing. Audio-morphing as a special field of DSP has a high complexity\ - \ for the morph itself and for the parametrization of the transition between two\ - \ sources. We propose a multi-touch enhanced interface for visual audiomorphing.\ - \ This interface visualizes the internal processing and allows direct manipulation\ - \ of the morphing parameters in the visualization. Using multi-touch gestures\ - \ to manipulate audio-morphing in a visual way, sound design and music production\ - \ becomes more unrestricted and creative.},\n address = {Blacksburg, Virginia,\ - \ USA},\n author = {Lars Engeln and Dietrich Kammer and Leon Brandt and Rainer\ - \ Groh},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1302711},\n editor = {Luke\ - \ Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn =\ - \ {2220-4806},\n month = {June},\n pages = {152--155},\n publisher = {Virginia\ - \ Tech},\n title = {Multi-Touch Enhanced Visual Audio-Morphing},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0033.pdf},\n\ - \ year = {2018}\n}\n" + ID: Pardue2002 + abstract: 'Passive RF Tagging can provide an attractive medium for development of + free-gesture musical interfaces. This was initially explored in our Musical Trinkets + installation, which used magnetically-coupled resonant LC circuits to identify + and track the position of multiple objects in real-time. Manipulation of these + objects in free space over a read coil triggered simple musical interactions. + Musical Navigatrics builds upon this success with new more sensitive and stable + sensing, multi-dimensional response, and vastly more intricate musical mappings + that enable full musical exploration of free space through the dynamic use and + control of arpeggiatiation and effects. The addition of basic sequencing abilities + also allows for the building of complex, layered musical interactions in a uniquely + easy and intuitive manner. ' + address: 'Dublin, Ireland' + author: 'Pardue, Laurel S. and Paradiso, Joseph A.' + bibtex: "@inproceedings{Pardue2002,\n abstract = {Passive RF Tagging can provide\ + \ an attractive medium for development of free-gesture musical interfaces. This\ + \ was initially explored in our Musical Trinkets installation, which used magnetically-coupled\ + \ resonant LC circuits to identify and track the position of multiple objects\ + \ in real-time. Manipulation of these objects in free space over a read coil triggered\ + \ simple musical interactions. Musical Navigatrics builds upon this success with\ + \ new more sensitive and stable sensing, multi-dimensional response, and vastly\ + \ more intricate musical mappings that enable full musical exploration of free\ + \ space through the dynamic use and control of arpeggiatiation and effects. The\ + \ addition of basic sequencing abilities also allows for the building of complex,\ + \ layered musical interactions in a uniquely easy and intuitive manner. },\n address\ + \ = {Dublin, Ireland},\n author = {Pardue, Laurel S. and Paradiso, Joseph A.},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176456},\n\ + \ issn = {2220-4806},\n keywords = {passive tag, position tracking, music sequencer\ + \ interface},\n pages = {145--147},\n title = {Musical Navigatrics: New Musical\ + \ Interactions with Passive Magnetic Tags},\n url = {http://www.nime.org/proceedings/2002/nime2002_145.pdf},\n\ + \ year = {2002}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302711 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176456 issn: 2220-4806 - month: June - pages: 152--155 - publisher: Virginia Tech - title: Multi-Touch Enhanced Visual Audio-Morphing - url: http://www.nime.org/proceedings/2018/nime2018_paper0033.pdf - year: 2018 + keywords: 'passive tag, position tracking, music sequencer interface' + pages: 145--147 + title: 'Musical Navigatrics: New Musical Interactions with Passive Magnetic Tags' + url: http://www.nime.org/proceedings/2002/nime2002_145.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Çamcı2018 - abstract: 'We describe an innovative multi-touch performance tool for real-time - granular synthesis based on hand-drawn waveform paths. GrainTrain is a cross-platform - web application that can run on both desktop and mobile computers, including tablets - and phones. In this paper, we first offer an analysis of existing granular synthesis - tools from an interaction stand-point, and outline a taxonomy of common interaction - paradigms used in their designs. We then delineate the implementation of GrainTrain, - and its unique approach to controlling real-time granular synthesis. We describe - practical scenarios in which GrainTrain enables new performance possibilities. - Finally, we discuss the results of a user study, and provide reports from expert - users who evaluated GrainTrain.' - address: 'Blacksburg, Virginia, USA' - author: Anıl Çamcı - bibtex: "@inproceedings{Çamcı2018,\n abstract = {We describe an innovative multi-touch\ - \ performance tool for real-time granular synthesis based on hand-drawn waveform\ - \ paths. GrainTrain is a cross-platform web application that can run on both desktop\ - \ and mobile computers, including tablets and phones. In this paper, we first\ - \ offer an analysis of existing granular synthesis tools from an interaction stand-point,\ - \ and outline a taxonomy of common interaction paradigms used in their designs.\ - \ We then delineate the implementation of GrainTrain, and its unique approach\ - \ to controlling real-time granular synthesis. We describe practical scenarios\ - \ in which GrainTrain enables new performance possibilities. Finally, we discuss\ - \ the results of a user study, and provide reports from expert users who evaluated\ - \ GrainTrain.},\n address = {Blacksburg, Virginia, USA},\n author = {Anıl Çamcı},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1302529},\n editor = {Luke Dahl,\ - \ Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {156--161},\n publisher = {Virginia Tech},\n title\ - \ = {GrainTrain: A Hand-drawn Multi-touch Interface for Granular Synthesis},\n\ - \ url = {http://www.nime.org/proceedings/2018/nime2018_paper0034.pdf},\n year\ - \ = {2018}\n}\n" + ID: Patten2002 + abstract: 'We present Audiopad, an interface for musical performance that aims to + combine the modularity of knob based controllers with the expressive character + of multidimensional tracking interfaces. The performer''s manipulations of physical + pucks on a tabletop control a real-time synthesis process. The pucks are embedded + with LC tags that the system tracks in two dimensions with a series of specially + shaped antennae. The system projects graphical information on and around the pucks + to give the performer sophisticated control over the synthesis process.' + address: 'Dublin, Ireland' + author: 'Patten, James and Recht, Ben and Ishii, Hiroshi' + bibtex: "@inproceedings{Patten2002,\n abstract = {We present Audiopad, an interface\ + \ for musical performance that aims to combine the modularity of knob based controllers\ + \ with the expressive character of multidimensional tracking interfaces. The performer's\ + \ manipulations of physical pucks on a tabletop control a real-time synthesis\ + \ process. The pucks are embedded with LC tags that the system tracks in two dimensions\ + \ with a series of specially shaped antennae. The system projects graphical information\ + \ on and around the pucks to give the performer sophisticated control over the\ + \ synthesis process.},\n address = {Dublin, Ireland},\n author = {Patten, James\ + \ and Recht, Ben and Ishii, Hiroshi},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n date = {24-26 May, 2002},\n\ + \ doi = {10.5281/zenodo.1176458},\n issn = {2220-4806},\n keywords = {RF tagging,\ + \ MIDI, tangible interfaces, musical controllers, object tracking},\n pages =\ + \ {148--153},\n title = {Audiopad: A Tag-based Interface for Musical Performance},\n\ + \ url = {http://www.nime.org/proceedings/2002/nime2002_148.pdf},\n year = {2002}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302529 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176458 issn: 2220-4806 - month: June - pages: 156--161 - publisher: Virginia Tech - title: 'GrainTrain: A Hand-drawn Multi-touch Interface for Granular Synthesis' - url: http://www.nime.org/proceedings/2018/nime2018_paper0034.pdf - year: 2018 + keywords: 'RF tagging, MIDI, tangible interfaces, musical controllers, object tracking' + pages: 148--153 + title: 'Audiopad: A Tag-based Interface for Musical Performance' + url: http://www.nime.org/proceedings/2002/nime2002_148.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: xia2018 - abstract: 'Traditional instrument learning procedure is time-consuming; it begins - with learning music notations and necessitates layers of sophistication and abstraction. - Haptic interfaces open another door to the music world for the vast majority of - talentless beginners when traditional training methods are not effective. However, - the existing haptic interfaces can only be used to learn specially designed pieces - with great restrictions on duration and pitch range due to the fact that it is - only feasible to guide a part of performance motion haptically for most instruments. - Our study breaks such restrictions using a semi-haptic guidance method. For the - first time, the pitch range of the haptically learned pieces go beyond an octave - (with the fingering motion covers most of the possible choices) and the duration - of learned pieces cover a whole phrase. This significant change leads to a more - realistic instrument learning process. Experiments show that semi-haptic interface - is effective as long as learners are not “tone deaf”. Using our prototype device, - the learning rate is about 30% faster compared with learning from videos.' - address: 'Blacksburg, Virginia, USA' - author: gus xia and Roger B. Dannenberg - bibtex: "@inproceedings{xia2018,\n abstract = {Traditional instrument learning procedure\ - \ is time-consuming; it begins with learning music notations and necessitates\ - \ layers of sophistication and abstraction. Haptic interfaces open another door\ - \ to the music world for the vast majority of talentless beginners when traditional\ - \ training methods are not effective. However, the existing haptic interfaces\ - \ can only be used to learn specially designed pieces with great restrictions\ - \ on duration and pitch range due to the fact that it is only feasible to guide\ - \ a part of performance motion haptically for most instruments. Our study breaks\ - \ such restrictions using a semi-haptic guidance method. For the first time, the\ - \ pitch range of the haptically learned pieces go beyond an octave (with the fingering\ - \ motion covers most of the possible choices) and the duration of learned pieces\ - \ cover a whole phrase. This significant change leads to a more realistic instrument\ - \ learning process. Experiments show that semi-haptic interface is effective as\ - \ long as learners are not “tone deaf”. Using our prototype device, the learning\ - \ rate is about 30% faster compared with learning from videos.},\n address = {Blacksburg,\ - \ Virginia, USA},\n author = {gus xia and Roger B. Dannenberg},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1302531},\n editor = {Luke Dahl, Douglas Bowman, Thomas\ - \ Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n\ - \ pages = {162--167},\n publisher = {Virginia Tech},\n title = {ShIFT: A Semi-haptic\ - \ Interface for Flute Tutoring},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0035.pdf},\n\ - \ year = {2018}\n}\n" + ID: Wynnychuk2002 + abstract: 'The demo sutoolz 1.0 alpha is a 3D software interface for music performance. + By navigating through a 3D virtual architecture the musician uses a set of 3D + tools to interact with the virtual environment: gameplay zones, speaker volumes, + speaker volume membranes, speaker navigation volumes and 3D multi-band FFT visualization + systems.' + address: 'Dublin, Ireland' + author: 'Wynnychuk, Jordan and Porcher, Richard and Brajovic, Lucas and Brajovic, + Marko and Platas, Nacho' + bibtex: "@inproceedings{Wynnychuk2002,\n abstract = {The demo sutoolz 1.0 alpha\ + \ is a 3D software interface for music performance. By navigating through a 3D\ + \ virtual architecture the musician uses a set of 3D tools to interact with the\ + \ virtual environment: gameplay zones, speaker volumes, speaker volume membranes,\ + \ speaker navigation volumes and 3D multi-band FFT visualization systems.},\n\ + \ address = {Dublin, Ireland},\n author = {Wynnychuk, Jordan and Porcher, Richard\ + \ and Brajovic, Lucas and Brajovic, Marko and Platas, Nacho},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176478},\n issn = {2220-4806},\n\ + \ keywords = {3D music interface, 3D sound, analogue input controllers, audio\ + \ localization, audio visualization, digital architecture, hybrid environments,\ + \ video game navigation},\n pages = {154--155},\n title = {sutoolz 1.0 alpha :\ + \ {3D} Software Music Interface},\n url = {http://www.nime.org/proceedings/2002/nime2002_154.pdf},\n\ + \ year = {2002}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302531 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176478 issn: 2220-4806 - month: June - pages: 162--167 - publisher: Virginia Tech - title: 'ShIFT: A Semi-haptic Interface for Flute Tutoring' - url: http://www.nime.org/proceedings/2018/nime2018_paper0035.pdf - year: 2018 + keywords: '3D music interface, 3D sound, analogue input controllers, audio localization, + audio visualization, digital architecture, hybrid environments, video game navigation' + pages: 154--155 + title: 'sutoolz 1.0 alpha : 3D Software Music Interface' + url: http://www.nime.org/proceedings/2002/nime2002_154.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Morreale2018 - abstract: 'The term `NIME'' --- New Interfaces for Musical Expression --- has come - to signify both technical and cultural characteristics. Not all new musical instruments - are NIMEs, and not all NIMEs are defined as such for the sole ephemeral condition - of being new. So, what are the typical characteristics of NIMEs and what are their - roles in performers'' practice? Is there a typical NIME repertoire? This paper - aims to address these questions with a bottom up approach. We reflect on the answers - of 78 NIME performers to an online questionnaire discussing their performance - experience with NIMEs. The results of our investigation explore the role of NIMEs - in the performers'' practice and identify the values that are common among performers. - We find that most NIMEs are viewed as exploratory tools created by and for performers, - and that they are constantly in development and almost in no occasions in a finite - state. The findings of our survey also reflect upon virtuosity with NIMEs, whose - peculiar performance practice results in learning trajectories that often do not - lead to the development of virtuosity as it is commonly understood in traditional - performance.' - address: 'Blacksburg, Virginia, USA' - author: Fabio Morreale and Andrew P. McPherson and Marcelo Wanderley - bibtex: "@inproceedings{Morreale2018,\n abstract = {The term `NIME' --- New Interfaces\ - \ for Musical Expression --- has come to signify both technical and cultural characteristics.\ - \ Not all new musical instruments are NIMEs, and not all NIMEs are defined as\ - \ such for the sole ephemeral condition of being new. So, what are the typical\ - \ characteristics of NIMEs and what are their roles in performers' practice? Is\ - \ there a typical NIME repertoire? This paper aims to address these questions\ - \ with a bottom up approach. We reflect on the answers of 78 NIME performers to\ - \ an online questionnaire discussing their performance experience with NIMEs.\ - \ The results of our investigation explore the role of NIMEs in the performers'\ - \ practice and identify the values that are common among performers. We find that\ - \ most NIMEs are viewed as exploratory tools created by and for performers, and\ - \ that they are constantly in development and almost in no occasions in a finite\ - \ state. The findings of our survey also reflect upon virtuosity with NIMEs, whose\ - \ peculiar performance practice results in learning trajectories that often do\ - \ not lead to the development of virtuosity as it is commonly understood in traditional\ - \ performance.},\n address = {Blacksburg, Virginia, USA},\n author = {Fabio Morreale\ - \ and Andrew P. McPherson and Marcelo Wanderley},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1302533},\n editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n\ - \ isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {168--173},\n publisher = {Virginia Tech},\n title = {NIME Identity from the\ - \ Performer's Perspective},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0036.pdf},\n\ - \ year = {2018}\n}\n" + ID: Schnell2002 + abstract: 'In this paper, we develop the concept of "composed instruments". We will + look at this idea from two perspectives: the design of computer systems in the + context of live performed music and musicological considerations. A historical + context is developed. Examples will be drawn from recent compositions. Finally + basic concepts from computer science will be examined for their relation ship + to this concept. ' + address: 'Dublin, Ireland' + author: 'Schnell, Norbert and Battier, Marc' + bibtex: "@inproceedings{Schnell2002,\n abstract = {In this paper, we develop the\ + \ concept of \"composed instruments\". We will look at this idea from two perspectives:\ + \ the design of computer systems in the context of live performed music and musicological\ + \ considerations. A historical context is developed. Examples will be drawn from\ + \ recent compositions. Finally basic concepts from computer science will be examined\ + \ for their relation ship to this concept. },\n address = {Dublin, Ireland},\n\ + \ author = {Schnell, Norbert and Battier, Marc},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n date\ + \ = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176460},\n issn = {2220-4806},\n\ + \ keywords = {Instruments, musicology, composed instrument, Theremin, Martenot,\ + \ interaction, streams, MAX.},\n pages = {156--160},\n title = {Introducing Composed\ + \ Instruments, Technical and Musicological Implications},\n url = {http://www.nime.org/proceedings/2002/nime2002_156.pdf},\n\ + \ year = {2002}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302533 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176460 issn: 2220-4806 - month: June - pages: 168--173 - publisher: Virginia Tech - title: NIME Identity from the Performer's Perspective - url: http://www.nime.org/proceedings/2018/nime2018_paper0036.pdf - year: 2018 + keywords: 'Instruments, musicology, composed instrument, Theremin, Martenot, interaction, + streams, MAX.' + pages: 156--160 + title: 'Introducing Composed Instruments, Technical and Musicological Implications' + url: http://www.nime.org/proceedings/2002/nime2002_156.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Xambó2018 - abstract: 'In recent years, there has been an increase in awareness of the underrepresentation - of women in the sound and music computing fields. The New Interfaces for Musical - Expression (NIME) conference is not an exception, with a number of open questions - remaining around the issue. In the present paper, we study the presence and evolution - over time of women authors in NIME since the beginning of the conference in 2001 - until 2017. We discuss the results of such a gender imbalance and potential solutions - by summarizing the actions taken by a number of worldwide initiatives that have - put an effort into making women''s work visible in our field, with a particular - emphasis on Women in Music Tech (WiMT), a student-led organization that aims to - encourage more women to join music technology, as a case study. We conclude with - a hope for an improvement in the representation of women in NIME by presenting - WiNIME, a public online database that details who are the women authors in NIME.' - address: 'Blacksburg, Virginia, USA' - author: Anna Xambó - bibtex: "@inproceedings{Xambó2018,\n abstract = {In recent years, there has been\ - \ an increase in awareness of the underrepresentation of women in the sound and\ - \ music computing fields. The New Interfaces for Musical Expression (NIME) conference\ - \ is not an exception, with a number of open questions remaining around the issue.\ - \ In the present paper, we study the presence and evolution over time of women\ - \ authors in NIME since the beginning of the conference in 2001 until 2017. We\ - \ discuss the results of such a gender imbalance and potential solutions by summarizing\ - \ the actions taken by a number of worldwide initiatives that have put an effort\ - \ into making women's work visible in our field, with a particular emphasis on\ - \ Women in Music Tech (WiMT), a student-led organization that aims to encourage\ - \ more women to join music technology, as a case study. We conclude with a hope\ - \ for an improvement in the representation of women in NIME by presenting WiNIME,\ - \ a public online database that details who are the women authors in NIME.},\n\ - \ address = {Blacksburg, Virginia, USA},\n author = {Anna Xambó},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1302535},\n editor = {Luke Dahl, Douglas\ - \ Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {174--177},\n publisher = {Virginia Tech},\n title\ - \ = {Who Are the Women Authors in NIME?–Improving Gender Balance in NIME Research},\n\ - \ url = {http://www.nime.org/proceedings/2018/nime2018_paper0037.pdf},\n year\ - \ = {2018}\n}\n" + ID: Smyth2002 + abstract: 'The cicada uses a rapid sequence of buckling ribs to initiate and sustain + vibrations in its tymbal plate (the primary mechanical resonator in the cicada''s + sound production system). The tymbalimba, a music controller based on this same + mechanism, has a row of 4 convex aluminum ribs (ason the cicada''s tymbal) arranged + much like the keys on a calimba. Each rib is spring loaded and capable of snapping + down into a V-shape (a motion referred to as buckling), under the downward force + of the user''s finger. This energy generated by the buckling motion is measured + by an accelerometer located under each rib and used as the input to a physical + model.' + address: 'Dublin, Ireland' + author: 'Smyth, Tamara and Smith, Julius O.' + bibtex: "@inproceedings{Smyth2002,\n abstract = {The cicada uses a rapid sequence\ + \ of buckling ribs to initiate and sustain vibrations in its tymbal plate (the\ + \ primary mechanical resonator in the cicada's sound production system). The tymbalimba,\ + \ a music controller based on this same mechanism, has a row of 4 convex aluminum\ + \ ribs (ason the cicada's tymbal) arranged much like the keys on a calimba. Each\ + \ rib is spring loaded and capable of snapping down into a V-shape (a motion referred\ + \ to as buckling), under the downward force of the user's finger. This energy\ + \ generated by the buckling motion is measured by an accelerometer located under\ + \ each rib and used as the input to a physical model.},\n address = {Dublin, Ireland},\n\ + \ author = {Smyth, Tamara and Smith, Julius O.},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n date\ + \ = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176462},\n issn = {2220-4806},\n\ + \ keywords = {Bioacoustics, Physical Modeling, Controllers, Cicada, Buckling mechanism.},\n\ + \ pages = {24--27},\n title = {Creating Sustained Tones with the Cicada's Rapid\ + \ Sequential Buckling Mechanism},\n url = {http://www.nime.org/proceedings/2002/nime2002_161.pdf},\n\ + \ year = {2002}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302535 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176462 issn: 2220-4806 - month: June - pages: 174--177 - publisher: Virginia Tech - title: 'Who Are the Women Authors in NIME?–Improving Gender Balance in NIME Research' - url: http://www.nime.org/proceedings/2018/nime2018_paper0037.pdf - year: 2018 + keywords: 'Bioacoustics, Physical Modeling, Controllers, Cicada, Buckling mechanism.' + pages: 24--27 + title: Creating Sustained Tones with the Cicada's Rapid Sequential Buckling Mechanism + url: http://www.nime.org/proceedings/2002/nime2002_161.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Reid2018 - abstract: 'This paper presents a collection of hardware-based technologies for live - performance developed by women over the last few decades. The field of music technology - and interface design has a significant gender imbalance, with men greatly outnumbering - women. The purpose of this paper is to promote the visibility and representation - of women in this field, and to encourage discussion on the importance of mentorship - and role models for young women and girls in music technology.' - address: 'Blacksburg, Virginia, USA' - author: Sarah Reid and Sara Sithi-Amnuai and Ajay Kapur - bibtex: "@inproceedings{Reid2018,\n abstract = {This paper presents a collection\ - \ of hardware-based technologies for live performance developed by women over\ - \ the last few decades. The field of music technology and interface design has\ - \ a significant gender imbalance, with men greatly outnumbering women. The purpose\ - \ of this paper is to promote the visibility and representation of women in this\ - \ field, and to encourage discussion on the importance of mentorship and role\ - \ models for young women and girls in music technology.},\n address = {Blacksburg,\ - \ Virginia, USA},\n author = {Sarah Reid and Sara Sithi-Amnuai and Ajay Kapur},\n\ + ID: Stanza2002 + abstract: 'Amorphoscapes by Stanza are interactive, generative, audio visual, digital + paintings and drawings created specifically for the internet. This is interactive + art on the Internet, incorporating generative sounds and 3D imaging.' + address: 'Dublin, Ireland' + author: Stanza + bibtex: "@inproceedings{Stanza2002,\n abstract = {Amorphoscapes by Stanza are interactive,\ + \ generative, audio visual, digital paintings and drawings created specifically\ + \ for the internet. This is interactive art on the Internet, incorporating generative\ + \ sounds and 3D imaging.},\n address = {Dublin, Ireland},\n author = {Stanza},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1302537},\n editor = {Luke Dahl,\ - \ Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {178--183},\n publisher = {Virginia Tech},\n title\ - \ = {Women Who Build Things: Gestural Controllers, Augmented Instruments, and\ - \ Musical Mechatronics},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0038.pdf},\n\ - \ year = {2018}\n}\n" + \ Musical Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176386},\n\ + \ issn = {2220-4806},\n pages = {165--166},\n title = {Amorphoscapes \\& Soundtoys},\n\ + \ url = {http://www.nime.org/proceedings/2002/nime2002_165.pdf},\n year = {2002}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302537 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176386 issn: 2220-4806 - month: June - pages: 178--183 - publisher: Virginia Tech - title: 'Women Who Build Things: Gestural Controllers, Augmented Instruments, and - Musical Mechatronics' - url: http://www.nime.org/proceedings/2018/nime2018_paper0038.pdf - year: 2018 + pages: 165--166 + title: Amorphoscapes & Soundtoys + url: http://www.nime.org/proceedings/2002/nime2002_165.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Jack2018 - abstract: 'An oft-cited aspiration of digital musical instrument (DMI) design is - to create instruments, in the words of Wessel and Wright, with a ‘low entry fee - and no ceiling on virtuosity''. This is a difficult task to achieve: many new - instruments are aimed at either the expert or amateur musician, with few instruments - catering for both. There is often a balance between learning curve and the nuance - of musical control in DMIs. In this paper we present a study conducted with non-musicians - and guitarists playing guitar-derivative DMIs with variable levels of control - intimacy: how the richness and nuance of a performer''s movement translates into - the musical output of an instrument. Findings suggest a significant difference - in preference for levels of control intimacy between the guitarists and the non-musicians. - In particular, the guitarists unanimously preferred the richest of the two settings - whereas the non-musicians generally preferred the setting with lower richness. - This difference is notable because it is often taken as a given that increasing - richness is a way to make instruments more enjoyable to play, however, this result - only seems to be true for expert players.' - address: 'Blacksburg, Virginia, USA' - author: Robert H Jack and Jacob Harrison and Fabio Morreale and Andrew P. McPherson - bibtex: "@inproceedings{Jack2018,\n abstract = {An oft-cited aspiration of digital\ - \ musical instrument (DMI) design is to create instruments, in the words of Wessel\ - \ and Wright, with a ‘low entry fee and no ceiling on virtuosity'. This is a difficult\ - \ task to achieve: many new instruments are aimed at either the expert or amateur\ - \ musician, with few instruments catering for both. There is often a balance between\ - \ learning curve and the nuance of musical control in DMIs. In this paper we present\ - \ a study conducted with non-musicians and guitarists playing guitar-derivative\ - \ DMIs with variable levels of control intimacy: how the richness and nuance of\ - \ a performer's movement translates into the musical output of an instrument.\ - \ Findings suggest a significant difference in preference for levels of control\ - \ intimacy between the guitarists and the non-musicians. In particular, the guitarists\ - \ unanimously preferred the richest of the two settings whereas the non-musicians\ - \ generally preferred the setting with lower richness. This difference is notable\ - \ because it is often taken as a given that increasing richness is a way to make\ - \ instruments more enjoyable to play, however, this result only seems to be true\ - \ for expert players.},\n address = {Blacksburg, Virginia, USA},\n author = {Robert\ - \ H Jack and Jacob Harrison and Fabio Morreale and Andrew P. McPherson},\n booktitle\ + ID: Johannes2002 + abstract: 'This paper describes the hardware and the software of a computer-based + doppler-sonar system for movement detection. The design is focused on simplicity + and lowcost do-it-yourself construction. ' + address: 'Dublin, Ireland' + author: 'Johannes, Taelman' + bibtex: "@inproceedings{Johannes2002,\n abstract = {This paper describes the hardware\ + \ and the software of a computer-based doppler-sonar system for movement detection.\ + \ The design is focused on simplicity and lowcost do-it-yourself construction.\ + \ },\n address = {Dublin, Ireland},\n author = {Johannes, Taelman},\n booktitle\ \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1302539},\n editor = {Luke Dahl, Douglas\ - \ Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {184--189},\n publisher = {Virginia Tech},\n title\ - \ = {Democratising {DMI}s: the relationship of expertise and control intimacy},\n\ - \ url = {http://www.nime.org/proceedings/2018/nime2018_paper0039.pdf},\n year\ - \ = {2018}\n}\n" + \ Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176430},\n\ + \ issn = {2220-4806},\n keywords = {sonar},\n pages = {167--170},\n title = {A\ + \ Low-cost Sonar for Unobtrusive Man-machine Interfacing},\n url = {http://www.nime.org/proceedings/2002/nime2002_167.pdf},\n\ + \ year = {2002}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302539 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176430 issn: 2220-4806 - month: June - pages: 184--189 - publisher: Virginia Tech - title: 'Democratising DMIs: the relationship of expertise and control intimacy' - url: http://www.nime.org/proceedings/2018/nime2018_paper0039.pdf - year: 2018 + keywords: sonar + pages: 167--170 + title: A Low-cost Sonar for Unobtrusive Man-machine Interfacing + url: http://www.nime.org/proceedings/2002/nime2002_167.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: MarquezBorbon2018 - abstract: 'This paper addresses the prevailing longevity problem of digital musical - instruments (DMIs) in NIME research and design by proposing a holistic system - design approach. Despite recent efforts to examine the main contributing factors - of DMI falling into obsolescence, such attempts to remedy this issue largely place - focus on the artifacts establishing themselves, their design processes and technologies. - However, few existing studies have attempted to proactively build a community - around technological platforms for DMIs, whilst bearing in mind the social dynamics - and activities necessary for a budding community. We observe that such attempts - while important in their undertaking, are limited in their scope. In this paper - we will discuss that achieving some sort of longevity must be addressed beyond - the device itself and must tackle broader ecosystemic factors. We hypothesize, - that a longevous DMI design must not only take into account a target community - but it may also require a non-traditional pedagogical system that sustains artistic - practice.' - address: 'Blacksburg, Virginia, USA' - author: Adnan Marquez-Borbon and Juan Pablo Martinez-Avila - bibtex: "@inproceedings{MarquezBorbon2018,\n abstract = {This paper addresses the\ - \ prevailing longevity problem of digital musical instruments (DMIs) in NIME research\ - \ and design by proposing a holistic system design approach. Despite recent efforts\ - \ to examine the main contributing factors of DMI falling into obsolescence, such\ - \ attempts to remedy this issue largely place focus on the artifacts establishing\ - \ themselves, their design processes and technologies. However, few existing studies\ - \ have attempted to proactively build a community around technological platforms\ - \ for DMIs, whilst bearing in mind the social dynamics and activities necessary\ - \ for a budding community. We observe that such attempts while important in their\ - \ undertaking, are limited in their scope. In this paper we will discuss that\ - \ achieving some sort of longevity must be addressed beyond the device itself\ - \ and must tackle broader ecosystemic factors. We hypothesize, that a longevous\ - \ DMI design must not only take into account a target community but it may also\ - \ require a non-traditional pedagogical system that sustains artistic practice.},\n\ - \ address = {Blacksburg, Virginia, USA},\n author = {Adnan Marquez-Borbon and\ - \ Juan Pablo Martinez-Avila},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302541},\n\ - \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {190--195},\n publisher = {Virginia\ - \ Tech},\n title = {The Problem of DMI Adoption and Longevity: Envisioning a NIME\ - \ Performance Pedagogy},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0040.pdf},\n\ - \ year = {2018}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1302541 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 - issn: 2220-4806 - month: June - pages: 190--195 - publisher: Virginia Tech - title: 'The Problem of DMI Adoption and Longevity: Envisioning a NIME Performance - Pedagogy' - url: http://www.nime.org/proceedings/2018/nime2018_paper0040.pdf - year: 2018 - - -- ENTRYTYPE: inproceedings - ID: Martin2018 - abstract: This paper describes the process of developing a standstill performance - work using the Myo gesture control armband and the Bela embedded computing platform. - The combination of Myo and Bela allows a portable and extensible version of the - standstill performance concept while introducing muscle tension as an additional - control parameter. We describe the technical details of our setup and introduce - Myo-to-Bela and Myo-to-OSC software bridges that assist with prototyping compositions - using the Myo controller. - address: 'Blacksburg, Virginia, USA' - author: 'Martin, Charles Patrick and Jensenius, Alexander Refsum and Jim Torresen' - bibtex: "@inproceedings{Martin2018,\n abstract = {This paper describes the process\ - \ of developing a standstill performance work using the Myo gesture control armband\ - \ and the Bela embedded computing platform. The combination of Myo and Bela allows\ - \ a portable and extensible version of the standstill performance concept while\ - \ introducing muscle tension as an additional control parameter. We describe\ - \ the technical details of our setup and introduce Myo-to-Bela and Myo-to-OSC\ - \ software bridges that assist with prototyping compositions using the Myo controller.},\n\ - \ address = {Blacksburg, Virginia, USA},\n author = {Martin, Charles Patrick \ - \ and Jensenius, Alexander Refsum and Jim Torresen},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1302543},\n editor = {Luke Dahl, Douglas Bowman, Thomas\ - \ Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n\ - \ pages = {196--197},\n publisher = {Virginia Tech},\n title = {Composing an Ensemble\ - \ Standstill Work for Myo and Bela},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0041.pdf},\n\ - \ year = {2018}\n}\n" + ID: Tanaka2002 + abstract: 'This paper describes a technique of multimodal, multichannel control + of electronic musical devices using two control methodologies, the Electromyogram + (EMG) and relative position sensing. Requirements for the application of multimodal + interaction theory in the musical domain are discussed. We introduce the concept + of bidirectional complementarity to characterize the relationship between the + component sensing technologies. Each control can be used independently, but together + they are mutually complementary. This reveals a fundamental difference from orthogonal + systems. The creation of a concert piece based on this system is given as example. ' + address: 'Dublin, Ireland' + author: 'Tanaka, Atau and Knapp, Benjamin' + bibtex: "@inproceedings{Tanaka2002,\n abstract = {This paper describes a technique\ + \ of multimodal, multichannel control of electronic musical devices using two\ + \ control methodologies, the Electromyogram (EMG) and relative position sensing.\ + \ Requirements for the application of multimodal interaction theory in the musical\ + \ domain are discussed. We introduce the concept of bidirectional complementarity\ + \ to characterize the relationship between the component sensing technologies.\ + \ Each control can be used independently, but together they are mutually complementary.\ + \ This reveals a fundamental difference from orthogonal systems. The creation\ + \ of a concert piece based on this system is given as example. },\n address =\ + \ {Dublin, Ireland},\n author = {Tanaka, Atau and Knapp, Benjamin},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176464},\n\ + \ issn = {2220-4806},\n keywords = {Human Computer Interaction, Musical Controllers,\ + \ Electromyogram, Position Sensing, Sensor Instruments},\n pages = {171--176},\n\ + \ title = {Multimodal Interaction in Music Using the Electromyogram and Relative\ + \ Position Sensing},\n url = {http://www.nime.org/proceedings/2002/nime2002_171.pdf},\n\ + \ year = {2002}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302543 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176464 issn: 2220-4806 - month: June - pages: 196--197 - publisher: Virginia Tech - title: Composing an Ensemble Standstill Work for Myo and Bela - url: http://www.nime.org/proceedings/2018/nime2018_paper0041.pdf - year: 2018 + keywords: 'Human Computer Interaction, Musical Controllers, Electromyogram, Position + Sensing, Sensor Instruments' + pages: 171--176 + title: Multimodal Interaction in Music Using the Electromyogram and Relative Position + Sensing + url: http://www.nime.org/proceedings/2002/nime2002_171.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Nieva2018 - abstract: 'This paper presents the work to maintain several copies of the digital - musical instrument (DMI) called the T-Stick in the hopes of extending their useful - lifetime. The T-Sticks were originally conceived in 2006 and 20 copies have been - built over the last 12 years. While they all preserve the original design concept, - their evolution resulted in variations in choice of microcontrollers, and sensors. - We worked with eight copies of the second and fourth generation T-Sticks to - overcome issues related to the aging of components, changes in external software, - lack of documentation, and in general, the problem of technical maintenance.' - address: 'Blacksburg, Virginia, USA' - author: Alex Nieva and Johnty Wang and Joseph Malloch and Marcelo Wanderley - bibtex: "@inproceedings{Nieva2018,\n abstract = {This paper presents the work to\ - \ maintain several copies of the digital musical instrument (DMI) called the T-Stick\ - \ in the hopes of extending their useful lifetime. The T-Sticks were originally\ - \ conceived in 2006 and 20 copies have been built over the last 12 years. While\ - \ they all preserve the original design concept, their evolution resulted in variations\ - \ in choice of microcontrollers, and sensors. We worked with eight copies\ - \ of the second and fourth generation T-Sticks to overcome issues related to the\ - \ aging of components, changes in external software, lack of documentation, and\ - \ in general, the problem of technical maintenance.},\n address = {Blacksburg,\ - \ Virginia, USA},\n author = {Alex Nieva and Johnty Wang and Joseph Malloch and\ - \ Marcelo Wanderley},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302545},\n\ - \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {198--199},\n publisher = {Virginia\ - \ Tech},\n title = {The T-Stick: Maintaining a 12 year-old Digital Musical Instrument},\n\ - \ url = {http://www.nime.org/proceedings/2018/nime2018_paper0042.pdf},\n year\ - \ = {2018}\n}\n" + ID: Verplank2002 + abstract: 'Active force-feedback holds the potential for precise and rapid controls. + A high performance device can be built from a surplus disk drive and controlled + from an inexpensive microcontroller. Our new design,The Plank has only one axis + of force-feedback with limited range of motion. It is being used to explore methods + of feeling and directly manipulating sound waves and spectra suitable for live + performance of computer music.' + address: 'Dublin, Ireland' + author: 'Verplank, Bill and Gurevich, Michael and Mathews, Max' + bibtex: "@inproceedings{Verplank2002,\n abstract = {Active force-feedback holds\ + \ the potential for precise and rapid controls. A high performance device can\ + \ be built from a surplus disk drive and controlled from an inexpensive microcontroller.\ + \ Our new design,The Plank has only one axis of force-feedback with limited range\ + \ of motion. It is being used to explore methods of feeling and directly manipulating\ + \ sound waves and spectra suitable for live performance of computer music.},\n\ + \ address = {Dublin, Ireland},\n author = {Verplank, Bill and Gurevich, Michael\ + \ and Mathews, Max},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n date = {24-26 May, 2002},\n doi\ + \ = {10.5281/zenodo.1176466},\n issn = {2220-4806},\n keywords = {Haptics, music\ + \ controllers, scanned synthesis.},\n pages = {177--180},\n title = {THE PLANK:\ + \ Designing a Simple Haptic Controller.},\n url = {http://www.nime.org/proceedings/2002/nime2002_177.pdf},\n\ + \ year = {2002}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302545 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176466 issn: 2220-4806 - month: June - pages: 198--199 - publisher: Virginia Tech - title: 'The T-Stick: Maintaining a 12 year-old Digital Musical Instrument' - url: http://www.nime.org/proceedings/2018/nime2018_paper0042.pdf - year: 2018 + keywords: 'Haptics, music controllers, scanned synthesis.' + pages: 177--180 + title: 'THE PLANK: Designing a Simple Haptic Controller.' + url: http://www.nime.org/proceedings/2002/nime2002_177.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Dewey2018 - abstract: This paper explores the use of the ubiquitous MIDI keyboard to control - a DJ performance system. The prototype system uses a two octave keyboard with - each octave controlling one audio track. Each audio track has four two-bar loops - which play in synchronisation switchable by its respective octave's first four - black keys. The top key of the keyboard toggles between frequency filter mode - and time slicer mode. In frequency filter mode the white keys provide seven bands - of latched frequency filtering. In time slicer mode the white keys plus black - B flat key provide latched on/off control of eight time slices of the loop. The - system was informally evaluated by nine subjects. The frequency filter mode combined - with loop switching worked well with the MIDI keyboard interface. All subjects - agreed that all tools had creative performance potential that could be developed - by further practice. - address: 'Blacksburg, Virginia, USA' - author: Christopher Dewey and Jonathan P. Wakefield - bibtex: "@inproceedings{Dewey2018,\n abstract = {This paper explores the use of\ - \ the ubiquitous MIDI keyboard to control a DJ performance system. The prototype\ - \ system uses a two octave keyboard with each octave controlling one audio track.\ - \ Each audio track has four two-bar loops which play in synchronisation switchable\ - \ by its respective octave's first four black keys. The top key of the keyboard\ - \ toggles between frequency filter mode and time slicer mode. In frequency filter\ - \ mode the white keys provide seven bands of latched frequency filtering. In time\ - \ slicer mode the white keys plus black B flat key provide latched on/off control\ - \ of eight time slices of the loop. The system was informally evaluated by nine\ - \ subjects. The frequency filter mode combined with loop switching worked well\ - \ with the MIDI keyboard interface. All subjects agreed that all tools had creative\ - \ performance potential that could be developed by further practice.},\n address\ - \ = {Blacksburg, Virginia, USA},\n author = {Christopher Dewey and Jonathan P.\ - \ Wakefield},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302547},\n editor\ - \ = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {200--201},\n publisher = {Virginia\ - \ Tech},\n title = {{MIDI} Keyboard Defined DJ Performance System},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0043.pdf},\n\ - \ year = {2018}\n}\n" + ID: Vogt2002 + abstract: 'Here we propose a novel musical controller which acquires imaging data + of the tongue with a two-dimensional medical ultrasound scanner. A computer vision + algorithm extracts from the image a discrete tongue shape to control, in realtime, + a musical synthesizer and musical effects. We evaluate the mapping space between + tongue shape and controller parameters and its expressive characteristics.' + address: 'Dublin, Ireland' + author: 'Vogt, Florian and Mccaig, Graeme and Ali, Mir A. and Fels, Sidney S.' + bibtex: "@inproceedings{Vogt2002,\n abstract = {Here we propose a novel musical\ + \ controller which acquires imaging data of the tongue with a two-dimensional\ + \ medical ultrasound scanner. A computer vision algorithm extracts from the image\ + \ a discrete tongue shape to control, in realtime, a musical synthesizer and musical\ + \ effects. We evaluate the mapping space between tongue shape and controller parameters\ + \ and its expressive characteristics.},\n address = {Dublin, Ireland},\n author\ + \ = {Vogt, Florian and Mccaig, Graeme and Ali, Mir A. and Fels, Sidney S.},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176468},\n\ + \ issn = {2220-4806},\n keywords = {Tongue model, ultrasound, real-time, music\ + \ synthesis, speech interface},\n pages = {181--185},\n title = {Tongue `n' Groove:\ + \ An Ultrasound based Music Controller},\n url = {http://www.nime.org/proceedings/2002/nime2002_181.pdf},\n\ + \ year = {2002}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302547 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176468 issn: 2220-4806 - month: June - pages: 200--201 - publisher: Virginia Tech - title: MIDI Keyboard Defined DJ Performance System - url: http://www.nime.org/proceedings/2018/nime2018_paper0043.pdf - year: 2018 + keywords: 'Tongue model, ultrasound, real-time, music synthesis, speech interface' + pages: 181--185 + title: 'Tongue `n'' Groove: An Ultrasound based Music Controller' + url: http://www.nime.org/proceedings/2002/nime2002_181.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Engum2018 - abstract: This paper describes an ongoing research project which address challenges - and opportunities when collaborating interactively in real time in a "virtual" - sound studio with several partners in different locations. "Virtual" in this context - referring to an interconnected and inter-domain studio environment consisting - of several local production systems connected to public and private networks. - This paper reports experiences and challenges related to two different production - scenarios conducted in 2017. - address: 'Blacksburg, Virginia, USA' - author: Trond Engum and Otto Jonassen Wittner - bibtex: "@inproceedings{Engum2018,\n abstract = {This paper describes an ongoing\ - \ research project which address challenges and opportunities when collaborating\ - \ interactively in real time in a \"virtual\" sound studio with several partners\ - \ in different locations. \"Virtual\" in this context referring to an interconnected\ - \ and inter-domain studio environment consisting of several local production systems\ - \ connected to public and private networks. This paper reports experiences and\ - \ challenges related to two different production scenarios conducted in 2017.},\n\ - \ address = {Blacksburg, Virginia, USA},\n author = {Trond Engum and Otto Jonassen\ - \ Wittner},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302549},\n editor\ - \ = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {202--203},\n publisher = {Virginia\ - \ Tech},\n title = {Democratizing Interactive Music Production over the Internet},\n\ - \ url = {http://www.nime.org/proceedings/2018/nime2018_paper0044.pdf},\n year\ - \ = {2018}\n}\n" + ID: Weinberg2002 + abstract: 'The Beatbugs are hand-held percussive instruments that allow the creation, + manipulation, and sharing of rhythmic motifs through a simple interface. When + multiple Beatbugs are connected in a network, players can form large-scale collaborative + compositions by interdependently sharing and developing each other''s motifs. + Each Beatbug player can enter a motif that is then sent through a stochastic computerized + "Nerve Center" to other players in the network. Receiving players can decide whether + to develop the motif further (by continuously manipulating pitch, timbre, and + rhythmic elements using two bend sensor antennae) or to keep it in their personal + instrument (by entering and sending their own new motifs to the group.) The tension + between the system''s stochastic routing scheme and the players'' improvised real-time + decisions leads to an interdependent, dynamic, and constantly evolving musical + experience. A musical composition entitled "Nerve" was written for the system + by author Gil Weinberg. It was premiered on February 2002 as part of Tod Machover''s + Toy Symphony [1] in a concert with the Deutsches Symphonie Orchester Berlin, conducted + by Kent Nagano. The paper concludes with a short evaluative discussion of the + concert and the weeklong workshops that led to it. ' + address: 'Dublin, Ireland' + author: 'Weinberg, Gil and Aimi, Roberto and Jennings, Kevin' + bibtex: "@inproceedings{Weinberg2002,\n abstract = {The Beatbugs are hand-held percussive\ + \ instruments that allow the creation, manipulation, and sharing of rhythmic motifs\ + \ through a simple interface. When multiple Beatbugs are connected in a network,\ + \ players can form large-scale collaborative compositions by interdependently\ + \ sharing and developing each other's motifs. Each Beatbug player can enter a\ + \ motif that is then sent through a stochastic computerized \"Nerve Center\" to\ + \ other players in the network. Receiving players can decide whether to develop\ + \ the motif further (by continuously manipulating pitch, timbre, and rhythmic\ + \ elements using two bend sensor antennae) or to keep it in their personal instrument\ + \ (by entering and sending their own new motifs to the group.) The tension between\ + \ the system's stochastic routing scheme and the players' improvised real-time\ + \ decisions leads to an interdependent, dynamic, and constantly evolving musical\ + \ experience. A musical composition entitled \"Nerve\" was written for the system\ + \ by author Gil Weinberg. It was premiered on February 2002 as part of Tod Machover's\ + \ Toy Symphony [1] in a concert with the Deutsches Symphonie Orchester Berlin,\ + \ conducted by Kent Nagano. The paper concludes with a short evaluative discussion\ + \ of the concert and the weeklong workshops that led to it. },\n address = {Dublin,\ + \ Ireland},\n author = {Weinberg, Gil and Aimi, Roberto and Jennings, Kevin},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176470},\n\ + \ issn = {2220-4806},\n keywords = {Interdependent Musical Networks, group playing,\ + \ percussive controllers.},\n pages = {186--191},\n title = {The Beatbug Network\ + \ -A Rhythmic System for Interdependent Group Collaboration},\n url = {http://www.nime.org/proceedings/2002/nime2002_186.pdf},\n\ + \ year = {2002}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302549 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176470 issn: 2220-4806 - month: June - pages: 202--203 - publisher: Virginia Tech - title: Democratizing Interactive Music Production over the Internet - url: http://www.nime.org/proceedings/2018/nime2018_paper0044.pdf - year: 2018 + keywords: 'Interdependent Musical Networks, group playing, percussive controllers.' + pages: 186--191 + title: The Beatbug Network -A Rhythmic System for Interdependent Group Collaboration + url: http://www.nime.org/proceedings/2002/nime2002_186.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Charles2018 - abstract: 'This paper describes how the Axoloti platform is well suited to teach - a beginners'' course about new elecro-acoustic musical instruments and how it - fits the needs of artists who want to work with an embedded sound processing platform - and get creative at the crossroads of acoustics and electronics. First, we present - the criteria used to choose a platform for the course titled "Creating New Musical - Instruments" given at the University of Iowa in the Fall of 2017. Then, we explain - why we chose the Axoloti board and development environment.' - address: 'Blacksburg, Virginia, USA' - author: 'Jean-Francois Charles and Cotallo Solares, Carlos and Toro Tobon, Carlos - and Andrew Willette' - bibtex: "@inproceedings{Charles2018,\n abstract = {This paper describes how the\ - \ Axoloti platform is well suited to teach a beginners' course about new elecro-acoustic\ - \ musical instruments and how it fits the needs of artists who want to work with\ - \ an embedded sound processing platform and get creative at the crossroads of\ - \ acoustics and electronics. First, we present the criteria used to choose a platform\ - \ for the course titled \"Creating New Musical Instruments\" given at the University\ - \ of Iowa in the Fall of 2017. Then, we explain why we chose the Axoloti board\ - \ and development environment.},\n address = {Blacksburg, Virginia, USA},\n author\ - \ = {Jean-Francois Charles and Cotallo Solares, Carlos and Toro Tobon, Carlos\ - \ and Andrew Willette},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302551},\n\ - \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {204--205},\n publisher = {Virginia\ - \ Tech},\n title = {Using the Axoloti Embedded Sound Processing Platform to Foster\ - \ Experimentation and Creativity},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0045.pdf},\n\ - \ year = {2018}\n}\n" + ID: Wessel2002 + abstract: 'In this demonstration we will show a variety of computer-based musical + instruments designed for live performance. Our design criteria include initial + ease of use coupled with a long term potential for virtuosity, minimal and low + variance latency, and clear and simple strategies for programming the relationship + between gesture and musical result. We present custom controllers and unique adaptations + of standard gestural interfaces, a programmable connectivity processor, a communications + protocol called Open Sound Control (OSC), and a variety of metaphors for musical + control. ' + address: 'Dublin, Ireland' + author: 'Wessel, David and Wright, Matthew and Schott, John' + bibtex: "@inproceedings{Wessel2002,\n abstract = {In this demonstration we will\ + \ show a variety of computer-based musical instruments designed for live performance.\ + \ Our design criteria include initial ease of use coupled with a long term potential\ + \ for virtuosity, minimal and low variance latency, and clear and simple strategies\ + \ for programming the relationship between gesture and musical result. We present\ + \ custom controllers and unique adaptations of standard gestural interfaces, a\ + \ programmable connectivity processor, a communications protocol called Open Sound\ + \ Control (OSC), and a variety of metaphors for musical control. },\n address\ + \ = {Dublin, Ireland},\n author = {Wessel, David and Wright, Matthew and Schott,\ + \ John},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176472},\n\ + \ issn = {2220-4806},\n keywords = {Expressive control, mapping gestures to acoustic\ + \ results, metaphors for musical control, Tactex, Buchla Thunder, digitizing tablets.},\n\ + \ pages = {192--194},\n title = {Intimate Musical Control of Computers with a\ + \ Variety of Controllers and Gesture Mapping Metaphors},\n url = {http://www.nime.org/proceedings/2002/nime2002_192.pdf},\n\ + \ year = {2002}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302551 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176472 issn: 2220-4806 - month: June - pages: 204--205 - publisher: Virginia Tech - title: Using the Axoloti Embedded Sound Processing Platform to Foster Experimentation - and Creativity - url: http://www.nime.org/proceedings/2018/nime2018_paper0045.pdf - year: 2018 + keywords: 'Expressive control, mapping gestures to acoustic results, metaphors for + musical control, Tactex, Buchla Thunder, digitizing tablets.' + pages: 192--194 + title: Intimate Musical Control of Computers with a Variety of Controllers and Gesture + Mapping Metaphors + url: http://www.nime.org/proceedings/2002/nime2002_192.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Tsoukalas2018 - abstract: 'The following paper introduces a new mechatronic NIME kit that uses new - additions to the Pd-L2Ork visual programing environment and its K-12 learning - module. It is designed to facilitate the creation of simple mechatronics systems - for physical sound production in K-12 and production scenarios. The new set of - objects builds on the existing support for the Raspberry Pi platform to also include - the use of electric actuators via the microcomputer''s GPIO system. Moreover, - we discuss implications of the newly introduced kit in the creative and K-12 education - scenarios by sharing observations from a series of pilot workshops, with particular - focus on using mechatronic NIMEs as a catalyst for the development of programing - skills.' - address: 'Blacksburg, Virginia, USA' - author: Kyriakos Tsoukalas and Ivica Ico Bukvic - bibtex: "@inproceedings{Tsoukalas2018,\n abstract = {The following paper introduces\ - \ a new mechatronic NIME kit that uses new additions to the Pd-L2Ork visual programing\ - \ environment and its K-12 learning module. It is designed to facilitate the creation\ - \ of simple mechatronics systems for physical sound production in K-12 and production\ - \ scenarios. The new set of objects builds on the existing support for the Raspberry\ - \ Pi platform to also include the use of electric actuators via the microcomputer's\ - \ GPIO system. Moreover, we discuss implications of the newly introduced kit in\ - \ the creative and K-12 education scenarios by sharing observations from a series\ - \ of pilot workshops, with particular focus on using mechatronic NIMEs as a catalyst\ - \ for the development of programing skills.},\n address = {Blacksburg, Virginia,\ - \ USA},\n author = {Kyriakos Tsoukalas and Ivica Ico Bukvic},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1302553},\n editor = {Luke Dahl, Douglas Bowman, Thomas\ - \ Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n\ - \ pages = {206--209},\n publisher = {Virginia Tech},\n title = {Introducing a\ - \ K-12 Mechatronic NIME Kit},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0046.pdf},\n\ - \ year = {2018}\n}\n" + ID: Wilkerson2002 + abstract: 'The Mutha Rubboard is a musical controller based on the rubboard, washboard + or frottoir metaphor commonly used in the Zydeco music genre of South Louisiana. + It is not onlya metamorphosis of a traditional instrument, but a modern bridge + of exploration into a rich musical heritage. It uses capacitive and piezo sensing + technology to output MIDI and raw audio data.This new controller reads the key + placement in two parallel planes by using radio capacitive sensing circuitry expanding + greatly on the standard corrugated metal playing surface. The percussive output + normally associated with the rubboard is captured through piezo contact sensors + mounted directly on the keys (the playing implements). Additionally,mode functionality + is controlled by discrete switching on the keys.This new instrument is meant to + be easily played by both experienced players and those new to the rubboard. It + lends itself to an expressive freedom by placing the control surface on the chest + and allowing the hands to move uninhibited about it or by playing it in the usual + way, preserving its musical heritage.' + address: 'Dublin, Ireland' + author: 'Wilkerson, Carr and Serafin, Stefania and Ng, Carmen' + bibtex: "@inproceedings{Wilkerson2002,\n abstract = {The Mutha Rubboard is a musical\ + \ controller based on the rubboard, washboard or frottoir metaphor commonly used\ + \ in the Zydeco music genre of South Louisiana. It is not onlya metamorphosis\ + \ of a traditional instrument, but a modern bridge of exploration into a rich\ + \ musical heritage. It uses capacitive and piezo sensing technology to output\ + \ MIDI and raw audio data.This new controller reads the key placement in two parallel\ + \ planes by using radio capacitive sensing circuitry expanding greatly on the\ + \ standard corrugated metal playing surface. The percussive output normally associated\ + \ with the rubboard is captured through piezo contact sensors mounted directly\ + \ on the keys (the playing implements). Additionally,mode functionality is controlled\ + \ by discrete switching on the keys.This new instrument is meant to be easily\ + \ played by both experienced players and those new to the rubboard. It lends itself\ + \ to an expressive freedom by placing the control surface on the chest and allowing\ + \ the hands to move uninhibited about it or by playing it in the usual way, preserving\ + \ its musical heritage.},\n address = {Dublin, Ireland},\n author = {Wilkerson,\ + \ Carr and Serafin, Stefania and Ng, Carmen},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n date =\ + \ {24-26 May, 2002},\n doi = {10.5281/zenodo.1176474},\n issn = {2220-4806},\n\ + \ keywords = {MIDI controllers, computer music, Zydeco music, interactive music,\ + \ electronic musical instrument, human computer interface, Louisiana heritage,\ + \ physical modeling, bowl resonators.},\n pages = {195--198},\n title = {The Mutha\ + \ Rubboard Controller},\n url = {http://www.nime.org/proceedings/2002/nime2002_195.pdf},\n\ + \ year = {2002}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302553 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176474 issn: 2220-4806 - month: June - pages: 206--209 - publisher: Virginia Tech - title: Introducing a K-12 Mechatronic NIME Kit - url: http://www.nime.org/proceedings/2018/nime2018_paper0046.pdf - year: 2018 + keywords: 'MIDI controllers, computer music, Zydeco music, interactive music, electronic + musical instrument, human computer interface, Louisiana heritage, physical modeling, + bowl resonators.' + pages: 195--198 + title: The Mutha Rubboard Controller + url: http://www.nime.org/proceedings/2002/nime2002_195.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Bennett2018 - abstract: 'We describe the development of Neurythmic: an interactive system for - the creation and performance of fluid, expressive musical rhythms using Central - Pattern Generators (CPGs). CPGs are neural networks which generate adaptive rhythmic - signals. They simulate structures in animals which underly behaviours such as - heartbeat, gut peristalsis and complex motor control. Neurythmic is the first - such system to use CPGs for interactive rhythm creation. We discuss how Neurythmic - uses the entrainment behaviour of these networks to support the creation of rhythms - while avoiding the rigidity of grid quantisation approaches. As well as discussing - the development, design and evaluation of Neurythmic, we discuss relevant properties - of the CPG networks used (Matsuoka''s Neural Oscillator), and describe methods - for their control. Evaluation with expert and professional musicians shows that - Neurythmic is a versatile tool, adapting well to a range of quite different musical - approaches.' - address: 'Blacksburg, Virginia, USA' - author: Daniel Bennett and Peter Bennett and Anne Roudaut - bibtex: "@inproceedings{Bennett2018,\n abstract = {We describe the development of\ - \ Neurythmic: an interactive system for the creation and performance of fluid,\ - \ expressive musical rhythms using Central Pattern Generators (CPGs). CPGs are\ - \ neural networks which generate adaptive rhythmic signals. They simulate structures\ - \ in animals which underly behaviours such as heartbeat, gut peristalsis and complex\ - \ motor control. Neurythmic is the first such system to use CPGs for interactive\ - \ rhythm creation. We discuss how Neurythmic uses the entrainment behaviour of\ - \ these networks to support the creation of rhythms while avoiding the rigidity\ - \ of grid quantisation approaches. As well as discussing the development, design\ - \ and evaluation of Neurythmic, we discuss relevant properties of the CPG networks\ - \ used (Matsuoka's Neural Oscillator), and describe methods for their control.\ - \ Evaluation with expert and professional musicians shows that Neurythmic is a\ - \ versatile tool, adapting well to a range of quite different musical approaches.},\n\ - \ address = {Blacksburg, Virginia, USA},\n author = {Daniel Bennett and Peter\ - \ Bennett and Anne Roudaut},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302555},\n\ - \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {210--215},\n publisher = {Virginia\ - \ Tech},\n title = {Neurythmic: A Rhythm Creation Tool Based on Central Pattern\ - \ Generators},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0047.pdf},\n\ - \ year = {2018}\n}\n" + ID: Winkler2002 + abstract: 'Falling Up is an evening-length performance incorporating dance and theatre + with movement-controlled audio/video playback and processing. The solo show is + a collaboration between Cindy Cummings (performance) and Todd Winkler(sound, video), + first performed at the Dublin Fringe Festival,2001. Each thematic section of the + work shows a different typeof interactive relationship between movement, video + and sound. This demonstration explains the various technical configurations and + aesthetic thinking behind aspects of the work.' + address: 'Dublin, Ireland' + author: 'Winkler, Todd' + bibtex: "@inproceedings{Winkler2002,\n abstract = {Falling Up is an evening-length\ + \ performance incorporating dance and theatre with movement-controlled audio/video\ + \ playback and processing. The solo show is a collaboration between Cindy Cummings\ + \ (performance) and Todd Winkler(sound, video), first performed at the Dublin\ + \ Fringe Festival,2001. Each thematic section of the work shows a different typeof\ + \ interactive relationship between movement, video and sound. This demonstration\ + \ explains the various technical configurations and aesthetic thinking behind\ + \ aspects of the work.},\n address = {Dublin, Ireland},\n author = {Winkler, Todd},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176476},\n\ + \ issn = {2220-4806},\n keywords = {Dance, Video processing, Movement sensor,\ + \ VNS, Very Nervous System},\n pages = {199--200},\n title = {Fusing Movement,\ + \ Sound, and Video in Falling Up, an Interactive Dance/Theatre Production},\n\ + \ url = {http://www.nime.org/proceedings/2002/nime2002_199.pdf},\n year = {2002}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302555 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176476 issn: 2220-4806 - month: June - pages: 210--215 - publisher: Virginia Tech - title: 'Neurythmic: A Rhythm Creation Tool Based on Central Pattern Generators' - url: http://www.nime.org/proceedings/2018/nime2018_paper0047.pdf - year: 2018 + keywords: 'Dance, Video processing, Movement sensor, VNS, Very Nervous System' + pages: 199--200 + title: 'Fusing Movement, Sound, and Video in Falling Up, an Interactive Dance/Theatre + Production' + url: http://www.nime.org/proceedings/2002/nime2002_199.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Granger2018 - abstract: 'Composing music typically requires years of music theory experience and - knowledge that includes but is not limited to chord progression, melody composition - theory, and an understanding of whole-step/half-step passing tones among others. - For that reason, certain songwriters such as singers may find a necessity to hire - experienced pianists to help compose their music. In order to facilitate the process - for beginner and aspiring musicians, we have developed Lumanote, a music composition - tool that aids songwriters by presenting real-time suggestions on appropriate - melody notes and chord progression. While a preliminary evaluation yielded favorable - results for beginners, many commented on the difficulty of having to map the note - suggestions displayed on the on-screen interface to the physical keyboard they - were playing on. This paper presents the resulting solution: an LED-based feedback - system that is designed to be directly attached to any standard MIDI keyboard. - This peripheral aims to help map note suggestions directly to the physical keys - of a musical keyboard. A study consisting of 22 individuals was conducted to compare - the effectiveness of the new LED-based system with the existing computer interface, - finding that the vast majority of users preferred the LED system. Three experienced - musicians also judged and ranked the compositions, noting significant improvement - in song quality when using either system, and citing comparable quality between - compositions that used either interface.' - address: 'Blacksburg, Virginia, USA' - author: James Granger and Mateo Aviles and Joshua Kirby and Austin Griffin and Johnny - Yoon and Raniero A. Lara-Garduno and Tracy Hammond - bibtex: "@inproceedings{Granger2018,\n abstract = {Composing music typically requires\ - \ years of music theory experience and knowledge that includes but is not limited\ - \ to chord progression, melody composition theory, and an understanding of whole-step/half-step\ - \ passing tones among others. For that reason, certain songwriters such as singers\ - \ may find a necessity to hire experienced pianists to help compose their music.\ - \ In order to facilitate the process for beginner and aspiring musicians, we have\ - \ developed Lumanote, a music composition tool that aids songwriters by presenting\ - \ real-time suggestions on appropriate melody notes and chord progression. While\ - \ a preliminary evaluation yielded favorable results for beginners, many commented\ - \ on the difficulty of having to map the note suggestions displayed on the on-screen\ - \ interface to the physical keyboard they were playing on. This paper presents\ - \ the resulting solution: an LED-based feedback system that is designed to be\ - \ directly attached to any standard MIDI keyboard. This peripheral aims to help\ - \ map note suggestions directly to the physical keys of a musical keyboard. A\ - \ study consisting of 22 individuals was conducted to compare the effectiveness\ - \ of the new LED-based system with the existing computer interface, finding that\ - \ the vast majority of users preferred the LED system. Three experienced musicians\ - \ also judged and ranked the compositions, noting significant improvement in song\ - \ quality when using either system, and citing comparable quality between compositions\ - \ that used either interface.},\n address = {Blacksburg, Virginia, USA},\n author\ - \ = {James Granger and Mateo Aviles and Joshua Kirby and Austin Griffin and Johnny\ - \ Yoon and Raniero A. Lara-Garduno and Tracy Hammond},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1302557},\n editor = {Luke Dahl, Douglas Bowman, Thomas\ - \ Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n\ - \ pages = {216--221},\n publisher = {Virginia Tech},\n title = {Evaluating LED-based\ - \ interface for Lumanote composition creation tool},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0048.pdf},\n\ - \ year = {2018}\n}\n" + ID: Young2002 + abstract: 'In this paper, the design and construction of a new violin interface, + the Hyperbow, is discussed. The motivation driving the research of this instrument + was the desire to create a violin bow capable of measuring the most intricate + aspects of violin techniquethe subtle elements of physical gesture that immediately + and directly impact the sound of the instrument while playing. In order to provide + this insight into the subtleties of bow articulation, a sensing system has been + integrated into a commercial carbon fiber bow to measure changes in position, + acceleration, and the downward and lateral strains of the bow stick. The sensors + were fashioned using an electromagnetic field sensing technique, commercial MEMS + accelerometers, and foil strain gauges. The measurement techniques used in this + work were found to be quite sensitive and yielded sensors that were easily controllable + by a player using traditional right hand bowing technique.' + address: 'Dublin, Ireland' + author: 'Young, Diana' + bibtex: "@inproceedings{Young2002,\n abstract = {In this paper, the design and construction\ + \ of a new violin interface, the Hyperbow, is discussed. The motivation driving\ + \ the research of this instrument was the desire to create a violin bow capable\ + \ of measuring the most intricate aspects of violin techniquethe subtle elements\ + \ of physical gesture that immediately and directly impact the sound of the instrument\ + \ while playing. In order to provide this insight into the subtleties of bow articulation,\ + \ a sensing system has been integrated into a commercial carbon fiber bow to measure\ + \ changes in position, acceleration, and the downward and lateral strains of the\ + \ bow stick. The sensors were fashioned using an electromagnetic field sensing\ + \ technique, commercial MEMS accelerometers, and foil strain gauges. The measurement\ + \ techniques used in this work were found to be quite sensitive and yielded sensors\ + \ that were easily controllable by a player using traditional right hand bowing\ + \ technique.},\n address = {Dublin, Ireland},\n author = {Young, Diana},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176480},\n\ + \ issn = {2220-4806},\n keywords = {Hyperbow, Hyperviolin, Hyperinstrument, violin,\ + \ bow, position sensor, accelerometer, strain sensor},\n pages = {201--206},\n\ + \ title = {The Hyperbow Controller: Real-Time Dynamics Measurement of Violin Performance},\n\ + \ url = {http://www.nime.org/proceedings/2002/nime2002_201.pdf},\n year = {2002}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302557 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '24-26 May, 2002' + doi: 10.5281/zenodo.1176480 issn: 2220-4806 - month: June - pages: 216--221 - publisher: Virginia Tech - title: Evaluating LED-based interface for Lumanote composition creation tool - url: http://www.nime.org/proceedings/2018/nime2018_paper0048.pdf - year: 2018 + keywords: 'Hyperbow, Hyperviolin, Hyperinstrument, violin, bow, position sensor, + accelerometer, strain sensor' + pages: 201--206 + title: 'The Hyperbow Controller: Real-Time Dynamics Measurement of Violin Performance' + url: http://www.nime.org/proceedings/2002/nime2002_201.pdf + year: 2002 - ENTRYTYPE: inproceedings - ID: Meneses2018 - abstract: 'This paper describes two augmented nylon-string guitar projects developed - in different institutions. GuitarAMI uses sensors to modify the classical guitars - constraints while GuiaRT uses digital signal processing to create virtual guitarists - that interact with the performer in real-time. After a bibliographic review of - Augmented Musical Instruments (AMIs) based on guitars, we present the details - of the two projects and compare them using an adapted dimensional space representation. - Highlighting the complementarity and cross-influences between the projects, we - propose avenues for future collaborative work.' - address: 'Blacksburg, Virginia, USA' - author: Eduardo Meneses and Sergio Freire and Marcelo Wanderley - bibtex: "@inproceedings{Meneses2018,\n abstract = {This paper describes two augmented\ - \ nylon-string guitar projects developed in different institutions. GuitarAMI\ - \ uses sensors to modify the classical guitars constraints while GuiaRT uses digital\ - \ signal processing to create virtual guitarists that interact with the performer\ - \ in real-time. After a bibliographic review of Augmented Musical Instruments\ - \ (AMIs) based on guitars, we present the details of the two projects and compare\ - \ them using an adapted dimensional space representation. Highlighting the complementarity\ - \ and cross-influences between the projects, we propose avenues for future collaborative\ - \ work.},\n address = {Blacksburg, Virginia, USA},\n author = {Eduardo Meneses\ - \ and Sergio Freire and Marcelo Wanderley},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1302559},\n editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n\ - \ isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {222--227},\n publisher = {Virginia Tech},\n title = {GuitarAMI and GuiaRT:\ - \ two independent yet complementary projects on augmented nylon guitars},\n url\ - \ = {http://www.nime.org/proceedings/2018/nime2018_paper0049.pdf},\n year = {2018}\n\ - }\n" + ID: Cannon2003 + abstract: 'In this paper we present a design for the EpipE, a newexpressive electronic + music controller based on the IrishUilleann Pipes, a 7-note polyphonic reeded + woodwind. Thecore of this proposed controller design is a continuouselectronic + tonehole-sensing arrangement, equally applicableto other woodwind interfaces like + those of the flute, recorder orJapanese shakuhachi. The controller will initially + be used todrive a physically-based synthesis model, with the eventualgoal being + the development of a mapping layer allowing theEpipE interface to operate as a + MIDI-like controller of arbitrarysynthesis models.' + address: 'Montreal, Canada' + author: 'Cannon, Cormac and Hughes, Stephen and O''Modhrain, Sile' + bibtex: "@inproceedings{Cannon2003,\n abstract = {In this paper we present a design\ + \ for the EpipE, a newexpressive electronic music controller based on the IrishUilleann\ + \ Pipes, a 7-note polyphonic reeded woodwind. Thecore of this proposed controller\ + \ design is a continuouselectronic tonehole-sensing arrangement, equally applicableto\ + \ other woodwind interfaces like those of the flute, recorder orJapanese shakuhachi.\ + \ The controller will initially be used todrive a physically-based synthesis model,\ + \ with the eventualgoal being the development of a mapping layer allowing theEpipE\ + \ interface to operate as a MIDI-like controller of arbitrarysynthesis models.},\n\ + \ address = {Montreal, Canada},\n author = {Cannon, Cormac and Hughes, Stephen\ + \ and O'Modhrain, Sile},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n doi\ + \ = {10.5281/zenodo.1176497},\n issn = {2220-4806},\n keywords = {Controllers,\ + \ continuous woodwind tonehole sensor, uilleann pipes, Irish bagpipe, physical\ + \ modelling, double reed, conical bore, tonehole. },\n pages = {3--8},\n title\ + \ = {EpipE: Exploration of the Uilleann Pipes as a Potential Controller for Computer-based\ + \ Music},\n url = {http://www.nime.org/proceedings/2003/nime2003_003.pdf},\n year\ + \ = {2003}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302559 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176497 issn: 2220-4806 - month: June - pages: 222--227 - publisher: Virginia Tech - title: 'GuitarAMI and GuiaRT: two independent yet complementary projects on augmented - nylon guitars' - url: http://www.nime.org/proceedings/2018/nime2018_paper0049.pdf - year: 2018 + keywords: 'Controllers, continuous woodwind tonehole sensor, uilleann pipes, Irish + bagpipe, physical modelling, double reed, conical bore, tonehole. ' + pages: 3--8 + title: 'EpipE: Exploration of the Uilleann Pipes as a Potential Controller for Computer-based + Music' + url: http://www.nime.org/proceedings/2003/nime2003_003.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Stolfi2018 - abstract: 'Playsound.space is a web-based tool to search for and play Creative Commons - licensed-sounds which can be applied to free improvisation, experimental music - production and soundscape composition. It provides a fast access to about 400k - non-musical and musical sounds provided by Freesound, and allows users to play/loop - single or multiple sounds retrieved through text based search. Sound discovery - is facilitated by use of semantic searches and sound visual representations (spectrograms). - Guided by the motivation to create an intuitive tool to support music practice - that could suit both novice and trained musicians, we developed and improved the - system in a continuous process, gathering frequent feedback from a range of users - with various skills. We assessed the prototype with 18 non musician and musician - participants during free music improvisation sessions. Results indicate that the - system was found easy to use and supports creative collaboration and expressiveness - irrespective of musical ability. We identified further design challenges linked - to creative identification, control and content quality.' - address: 'Blacksburg, Virginia, USA' - author: Ariane de Souza Stolfi and Miguel Ceriani and Luca Turchet and Mathieu Barthet - bibtex: "@inproceedings{Stolfi2018,\n abstract = {Playsound.space is a web-based\ - \ tool to search for and play Creative Commons licensed-sounds which can be applied\ - \ to free improvisation, experimental music production and soundscape composition.\ - \ It provides a fast access to about 400k non-musical and musical sounds provided\ - \ by Freesound, and allows users to play/loop single or multiple sounds retrieved\ - \ through text based search. Sound discovery is facilitated by use of semantic\ - \ searches and sound visual representations (spectrograms). Guided by the motivation\ - \ to create an intuitive tool to support music practice that could suit both novice\ - \ and trained musicians, we developed and improved the system in a continuous\ - \ process, gathering frequent feedback from a range of users with various skills.\ - \ We assessed the prototype with 18 non musician and musician participants during\ - \ free music improvisation sessions. Results indicate that the system was found\ - \ easy to use and supports creative collaboration and expressiveness irrespective\ - \ of musical ability. We identified further design challenges linked to creative\ - \ identification, control and content quality.},\n address = {Blacksburg, Virginia,\ - \ USA},\n author = {Ariane de Souza Stolfi and Miguel Ceriani and Luca Turchet\ - \ and Mathieu Barthet},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302561},\n\ - \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {228--233},\n publisher = {Virginia\ - \ Tech},\n title = {Playsound.space: Inclusive Free Music Improvisations Using\ - \ Audio Commons},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0050.pdf},\n\ - \ year = {2018}\n}\n" + ID: Young2003 + abstract: HyperPuja is a novel controller that closely mimicks the behavior of a + Tibetan Singing Bowl rubbed with a "puja" stick. Our design hides the electronics + from the performer to maintain the original look and feel of the instrument and + the performance. This is achieved by using wireless technology to keep the stick + un-tethered as well as burying the electronics inside the the core of the stick. + The measured parameters closely resemble the input parameters of a related physical + synthesis model allowing for convenient mapping of sensor parameters to synthesis + input. The new controller allows for flexible choice of sound synthesis while + fully maintaining the characteristics of the physical interaction of the original + instrument. + address: 'Montreal, Canada' + author: 'Young, Diana and Essl, Georg' + bibtex: "@inproceedings{Young2003,\n abstract = {HyperPuja is a novel controller\ + \ that closely mimicks the behavior of a Tibetan Singing Bowl rubbed with a \"\ + puja\" stick. Our design hides the electronics from the performer to maintain\ + \ the original look and feel of the instrument and the performance. This is achieved\ + \ by using wireless technology to keep the stick un-tethered as well as burying\ + \ the electronics inside the the core of the stick. The measured parameters closely\ + \ resemble the input parameters of a related physical synthesis model allowing\ + \ for convenient mapping of sensor parameters to synthesis input. The new controller\ + \ allows for flexible choice of sound synthesis while fully maintaining the characteristics\ + \ of the physical interaction of the original instrument.},\n address = {Montreal,\ + \ Canada},\n author = {Young, Diana and Essl, Georg},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176577},\n issn = {2220-4806},\n\ + \ pages = {9--14},\n title = {HyperPuja: A Tibetan Singing Bowl Controller},\n\ + \ url = {http://www.nime.org/proceedings/2003/nime2003_009.pdf},\n year = {2003}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302561 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176577 issn: 2220-4806 - month: June - pages: 228--233 - publisher: Virginia Tech - title: 'Playsound.space: Inclusive Free Music Improvisations Using Audio Commons' - url: http://www.nime.org/proceedings/2018/nime2018_paper0050.pdf - year: 2018 + pages: 9--14 + title: 'HyperPuja: A Tibetan Singing Bowl Controller' + url: http://www.nime.org/proceedings/2003/nime2003_009.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Harding2018 - abstract: 'This paper provides a new interface for the production and distribution - of high resolution analog control signals, particularly aimed toward the control - of analog modular synthesisers. Control Voltage/Gate interfaces generate Control - Voltage (CV) and Gate Voltage (Gate) as a means of controlling note pitch and - length respectively, and have been with us since 1986 [2]. The authors provide - a unique custom CV/Gate interface and dedicated communication protocol which leverages - standard USB Serial functionality and enables connectivity over a plethora of - computing devices, including embedded devices such as the Raspberry Pi and ARM - based devices including widely available ‘Android TV Boxes''. We provide a general - overview of the unique hardware and communication protocol developments followed - by usage case examples toward tuning and embedded platforms, leveraging softwares - ranging from Pure Data (Pd), Max, and Max for Live (M4L).' - address: 'Blacksburg, Virginia, USA' - author: John Harding and Richard Graham and Edwin Park - bibtex: "@inproceedings{Harding2018,\n abstract = {This paper provides a new interface\ - \ for the production and distribution of high resolution analog control signals,\ - \ particularly aimed toward the control of analog modular synthesisers. Control\ - \ Voltage/Gate interfaces generate Control Voltage (CV) and Gate Voltage (Gate)\ - \ as a means of controlling note pitch and length respectively, and have been\ - \ with us since 1986 [2]. The authors provide a unique custom CV/Gate interface\ - \ and dedicated communication protocol which leverages standard USB Serial functionality\ - \ and enables connectivity over a plethora of computing devices, including embedded\ - \ devices such as the Raspberry Pi and ARM based devices including widely available\ - \ ‘Android TV Boxes'. We provide a general overview of the unique hardware and\ - \ communication protocol developments followed by usage case examples toward tuning\ - \ and embedded platforms, leveraging softwares ranging from Pure Data (Pd), Max,\ - \ and Max for Live (M4L).},\n address = {Blacksburg, Virginia, USA},\n author\ - \ = {John Harding and Richard Graham and Edwin Park},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1302563},\n editor = {Luke Dahl, Douglas Bowman, Thomas\ - \ Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n\ - \ pages = {234--237},\n publisher = {Virginia Tech},\n title = {CTRL: A Flexible,\ - \ Precision Interface for Analog Synthesis},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0051.pdf},\n\ - \ year = {2018}\n}\n" + ID: Scavone2003 + abstract: 'The Pipe is an experimental, general purpose music input device designed + and built in the form of a compact MIDI wind controller. The development of this + device was motivated in part by an interest in exploring breath pressure as a + control input. The Pipe provides a variety of common sensor types, including force + sensing resistors, momentary switches, accelerometers, potentiometers, and an + air pressure transducer, which allow maximum flexibility in the design of a sensor + mapping scheme. The Pipe uses a programmable BASIC Stamp 2sx microprocessor which + outputs control messages via a standard MIDI jack.' + address: 'Montreal, Canada' + author: 'Scavone, Gary' + bibtex: "@inproceedings{Scavone2003,\n abstract = {The Pipe is an experimental,\ + \ general purpose music input device designed and built in the form of a compact\ + \ MIDI wind controller. The development of this device was motivated in part by\ + \ an interest in exploring breath pressure as a control input. The Pipe provides\ + \ a variety of common sensor types, including force sensing resistors, momentary\ + \ switches, accelerometers, potentiometers, and an air pressure transducer, which\ + \ allow maximum flexibility in the design of a sensor mapping scheme. The Pipe\ + \ uses a programmable BASIC Stamp 2sx microprocessor which outputs control messages\ + \ via a standard MIDI jack.},\n address = {Montreal, Canada},\n author = {Scavone,\ + \ Gary},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176557},\n\ + \ issn = {2220-4806},\n keywords = {MIDI Controller, Wind Controller, Breath Control,\ + \ Human Computer Interaction. },\n pages = {15--18},\n title = {THE PIPE: Explorations\ + \ with Breath Control},\n url = {http://www.nime.org/proceedings/2003/nime2003_015.pdf},\n\ + \ year = {2003}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302563 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176557 issn: 2220-4806 - month: June - pages: 234--237 - publisher: Virginia Tech - title: 'CTRL: A Flexible, Precision Interface for Analog Synthesis' - url: http://www.nime.org/proceedings/2018/nime2018_paper0051.pdf - year: 2018 + keywords: 'MIDI Controller, Wind Controller, Breath Control, Human Computer Interaction. ' + pages: 15--18 + title: 'THE PIPE: Explorations with Breath Control' + url: http://www.nime.org/proceedings/2003/nime2003_015.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Beyls2018 - abstract: 'This paper describes a machine learning approach in the context of non-idiomatic - human-machine improvisation. In an attempt to avoid explicit mapping of user actions - to machine responses, an experimental machine learning strategy is suggested where - rewards are derived from the implied motivation of the human interactor – two - motivations are at work: integration (aiming to connect with machine generated - material) and expression (independent activity). By tracking consecutive changes - in musical distance (i.e. melodic similarity) between human and machine, such - motivations can be inferred. A variation of Q-learning is used featuring a self-optimizing - variable length state-action-reward list. The system (called Pock) is tunable - into particular behavioral niches by means of a limited number of parameters. - Pock is designed as a recursive structure and behaves as a complex dynamical system. - When tracking systems variables over time, emergent non-trivial patterns reveal - experimental evidence of attractors demonstrating successful adaptation.' - address: 'Blacksburg, Virginia, USA' - author: Peter Beyls - bibtex: "@inproceedings{Beyls2018,\n abstract = {This paper describes a machine\ - \ learning approach in the context of non-idiomatic human-machine improvisation.\ - \ In an attempt to avoid explicit mapping of user actions to machine responses,\ - \ an experimental machine learning strategy is suggested where rewards are derived\ - \ from the implied motivation of the human interactor – two motivations are at\ - \ work: integration (aiming to connect with machine generated material) and expression\ - \ (independent activity). By tracking consecutive changes in musical distance\ - \ (i.e. melodic similarity) between human and machine, such motivations can be\ - \ inferred. A variation of Q-learning is used featuring a self-optimizing variable\ - \ length state-action-reward list. The system (called Pock) is tunable into particular\ - \ behavioral niches by means of a limited number of parameters. Pock is designed\ - \ as a recursive structure and behaves as a complex dynamical system. When tracking\ - \ systems variables over time, emergent non-trivial patterns reveal experimental\ - \ evidence of attractors demonstrating successful adaptation.},\n address = {Blacksburg,\ - \ Virginia, USA},\n author = {Peter Beyls},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1302565},\n editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n\ - \ isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {238--243},\n publisher = {Virginia Tech},\n title = {Motivated Learning in\ - \ Human-Machine Improvisation},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0052.pdf},\n\ - \ year = {2018}\n}\n" + ID: Baalman2003 + abstract: The STRIMIDILATOR is an instrument that uses the deviation and the vibration + of strings as MIDI-controllers. Thismethod of control gives the user direct tactile + force feedbackand allows for subtle control. The development of the instrument + and its different functions are described. + address: 'Montreal, Canada' + author: 'Baalman, Marije A.' + bibtex: "@inproceedings{Baalman2003,\n abstract = {The STRIMIDILATOR is an instrument\ + \ that uses the deviation and the vibration of strings as MIDI-controllers. Thismethod\ + \ of control gives the user direct tactile force feedbackand allows for subtle\ + \ control. The development of the instrument and its different functions are described.},\n\ + \ address = {Montreal, Canada},\n author = {Baalman, Marije A.},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176486},\n issn = {2220-4806},\n\ + \ keywords = {MIDI controllers, tactile force feedback, strings. Figure The STRIMIDILATOR\ + \ },\n pages = {19--23},\n title = {The {STRIMIDILATOR}: a String Controlled {MIDI}-Instrument},\n\ + \ url = {http://www.nime.org/proceedings/2003/nime2003_019.pdf},\n year = {2003}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302565 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176486 issn: 2220-4806 - month: June - pages: 238--243 - publisher: Virginia Tech - title: Motivated Learning in Human-Machine Improvisation - url: http://www.nime.org/proceedings/2018/nime2018_paper0052.pdf - year: 2018 + keywords: 'MIDI controllers, tactile force feedback, strings. Figure The STRIMIDILATOR ' + pages: 19--23 + title: 'The STRIMIDILATOR: a String Controlled MIDI-Instrument' + url: http://www.nime.org/proceedings/2003/nime2003_019.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Chandran2018 - abstract: 'InterFACE is an interactive system for musical creation, mediated primarily - through the user''s facial expressions and movements. It aims to take advantage - of the expressive capabilities of the human face to create music in a way that - is both expressive and whimsical. This paper introduces the designs of three virtual - instruments in the InterFACE system: namely, FACEdrum (a drum machine), GrannyFACE - (a granular synthesis sampler), and FACEorgan (a laptop mouth organ using both - face tracking and audio analysis). We present the design behind these instruments - and consider what it means to be able to create music with one''s face. Finally, - we discuss the usability and aesthetic criteria for evaluating such a system, - taking into account our initial design goals as well as the resulting experience - for the performer and audience.' - address: 'Blacksburg, Virginia, USA' - author: Deepak Chandran and Ge Wang - bibtex: "@inproceedings{Chandran2018,\n abstract = {InterFACE is an interactive\ - \ system for musical creation, mediated primarily through the user's facial expressions\ - \ and movements. It aims to take advantage of the expressive capabilities of the\ - \ human face to create music in a way that is both expressive and whimsical. This\ - \ paper introduces the designs of three virtual instruments in the InterFACE system:\ - \ namely, FACEdrum (a drum machine), GrannyFACE (a granular synthesis sampler),\ - \ and FACEorgan (a laptop mouth organ using both face tracking and audio analysis).\ - \ We present the design behind these instruments and consider what it means to\ - \ be able to create music with one's face. Finally, we discuss the usability and\ - \ aesthetic criteria for evaluating such a system, taking into account our initial\ - \ design goals as well as the resulting experience for the performer and audience.},\n\ - \ address = {Blacksburg, Virginia, USA},\n author = {Deepak Chandran and Ge Wang},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1302569},\n editor = {Luke Dahl,\ - \ Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {244--248},\n publisher = {Virginia Tech},\n title\ - \ = {InterFACE: new faces for musical expression},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0053.pdf},\n\ - \ year = {2018}\n}\n" + ID: Wilson2003 + abstract: 'Over the past year the instructors of the Human ComputerInteraction courses + at CCRMA have undertaken a technology shift to a much more powerful teaching platform. + Wedescribe the technical features of the new Atmel AVR basedplatform, contrasting + it with the Parallax BASIC Stampplatform used in the past. The successes and failures + ofthe new platform are considered, and some student projectsuccess stories described.' + address: 'Montreal, Canada' + author: 'Wilson, Scott and Gurevich, Michael and Verplank, Bill and Stang, Pascal' + bibtex: "@inproceedings{Wilson2003,\n abstract = {Over the past year the instructors\ + \ of the Human ComputerInteraction courses at CCRMA have undertaken a technology\ + \ shift to a much more powerful teaching platform. Wedescribe the technical features\ + \ of the new Atmel AVR basedplatform, contrasting it with the Parallax BASIC Stampplatform\ + \ used in the past. The successes and failures ofthe new platform are considered,\ + \ and some student projectsuccess stories described.},\n address = {Montreal,\ + \ Canada},\n author = {Wilson, Scott and Gurevich, Michael and Verplank, Bill\ + \ and Stang, Pascal},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n doi\ + \ = {10.5281/zenodo.1176571},\n issn = {2220-4806},\n keywords = {Microcontrollers,\ + \ Music Controllers, Pedagogy, Atmel AVR, BASIC Stamp.},\n pages = {24--29},\n\ + \ title = {Microcontrollers in Music HCI Instruction: Reflections on our Switch\ + \ to the Atmel AVR Platform},\n url = {http://www.nime.org/proceedings/2003/nime2003_024.pdf},\n\ + \ year = {2003}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302569 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176571 issn: 2220-4806 - month: June - pages: 244--248 - publisher: Virginia Tech - title: 'InterFACE: new faces for musical expression' - url: http://www.nime.org/proceedings/2018/nime2018_paper0053.pdf - year: 2018 + keywords: 'Microcontrollers, Music Controllers, Pedagogy, Atmel AVR, BASIC Stamp.' + pages: 24--29 + title: 'Microcontrollers in Music HCI Instruction: Reflections on our Switch to + the Atmel AVR Platform' + url: http://www.nime.org/proceedings/2003/nime2003_024.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Polfreman2018 - abstract: 'Hands are important anatomical structures for musical performance, and - recent developments in input device technology have allowed rather detailed capture - of hand gestures using consumer-level products. While in some musical contexts, - detailed hand and finger movements are required, in others it is sufficient to - communicate discrete hand postures to indicate selection or other state changes. - This research compared three approaches to capturing hand gestures where the shape - of the hand, i.e. the relative positions and angles of finger joints, are an important - part of the gesture. A number of sensor types can be used to capture information - about hand posture, each of which has various practical advantages and disadvantages - for music applications. This study compared three approaches, using optical, inertial - and muscular information, with three sets of 5 hand postures (i.e. static gestures) - and gesture recognition algorithms applied to the device data, aiming to determine - which methods are most effective.' - address: 'Blacksburg, Virginia, USA' - author: Richard Polfreman - bibtex: "@inproceedings{Polfreman2018,\n abstract = {Hands are important anatomical\ - \ structures for musical performance, and recent developments in input device\ - \ technology have allowed rather detailed capture of hand gestures using consumer-level\ - \ products. While in some musical contexts, detailed hand and finger movements\ - \ are required, in others it is sufficient to communicate discrete hand postures\ - \ to indicate selection or other state changes. This research compared three approaches\ - \ to capturing hand gestures where the shape of the hand, i.e. the relative positions\ - \ and angles of finger joints, are an important part of the gesture. A number\ - \ of sensor types can be used to capture information about hand posture, each\ - \ of which has various practical advantages and disadvantages for music applications.\ - \ This study compared three approaches, using optical, inertial and muscular information,\ - \ with three sets of 5 hand postures (i.e. static gestures) and gesture recognition\ - \ algorithms applied to the device data, aiming to determine which methods are\ - \ most effective.},\n address = {Blacksburg, Virginia, USA},\n author = {Richard\ - \ Polfreman},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302571},\n editor\ - \ = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {249--254},\n publisher = {Virginia\ - \ Tech},\n title = {Hand Posture Recognition: IR, IMU and sEMG},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0054.pdf},\n\ - \ year = {2018}\n}\n" + ID: Andersen2003 + abstract: 'The Disc Jockey (DJ) software system Mixxx is presented.Mixxx makes it + possible to conduct studies of new interaction techniques in connection with the + DJ situation, by itsopen design and easy integration of new software modulesand + MIDI connection to external controllers. To gain a better understanding of working + practices, and to aid the designprocess of new interfaces, interviews with two + contemporarymusicians and DJ''s are presented. In contact with thesemusicians + development of several novel prototypes for DJinteraction have been made. Finally + implementation detailsof Mixxx are described.' + address: 'Montreal, Canada' + author: 'Andersen, Tue H.' + bibtex: "@inproceedings{Andersen2003,\n abstract = {The Disc Jockey (DJ) software\ + \ system Mixxx is presented.Mixxx makes it possible to conduct studies of new\ + \ interaction techniques in connection with the DJ situation, by itsopen design\ + \ and easy integration of new software modulesand MIDI connection to external\ + \ controllers. To gain a better understanding of working practices, and to aid\ + \ the designprocess of new interfaces, interviews with two contemporarymusicians\ + \ and DJ's are presented. In contact with thesemusicians development of several\ + \ novel prototypes for DJinteraction have been made. Finally implementation detailsof\ + \ Mixxx are described.},\n address = {Montreal, Canada},\n author = {Andersen,\ + \ Tue H.},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176484},\n\ + \ issn = {2220-4806},\n keywords = {DJ, software, interaction, visualization,\ + \ controllers, augmented reality.},\n pages = {30--35},\n title = {Mixxx : Towards\ + \ Novel DJ Interfaces},\n url = {http://www.nime.org/proceedings/2003/nime2003_030.pdf},\n\ + \ year = {2003}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302571 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176484 issn: 2220-4806 - month: June - pages: 249--254 - publisher: Virginia Tech - title: 'Hand Posture Recognition: IR, IMU and sEMG' - url: http://www.nime.org/proceedings/2018/nime2018_paper0054.pdf - year: 2018 + keywords: 'DJ, software, interaction, visualization, controllers, augmented reality.' + pages: 30--35 + title: 'Mixxx : Towards Novel DJ Interfaces' + url: http://www.nime.org/proceedings/2003/nime2003_030.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Malloch2018 - abstract: 'The Digital Orchestra Toolbox for Max is an open-source collection of - small modular software tools for aiding the development of Digital Musical Instruments. - Each tool takes the form of an "abstraction" for the visual programming environment - Max, meaning it can be opened and understood by users within the Max environment, - as well as copied, modified, and appropriated as desired. This paper describes - the origins of the Toolbox and our motivations for creating it, broadly outlines - the types of tools included, and follows the development of the project over the - last twelve years. We also present examples of several digital musical instruments - built using the Toolbox.' - address: 'Blacksburg, Virginia, USA' - author: Joseph Malloch and Marlon Mario Schumacher and Stephen Sinclair and Marcelo - Wanderley - bibtex: "@inproceedings{Malloch2018,\n abstract = {The Digital Orchestra Toolbox\ - \ for Max is an open-source collection of small modular software tools for aiding\ - \ the development of Digital Musical Instruments. Each tool takes the form of\ - \ an \"abstraction\" for the visual programming environment Max, meaning it can\ - \ be opened and understood by users within the Max environment, as well as copied,\ - \ modified, and appropriated as desired. This paper describes the origins of the\ - \ Toolbox and our motivations for creating it, broadly outlines the types of tools\ - \ included, and follows the development of the project over the last twelve years.\ - \ We also present examples of several digital musical instruments built using\ - \ the Toolbox.},\n address = {Blacksburg, Virginia, USA},\n author = {Joseph Malloch\ - \ and Marlon Mario Schumacher and Stephen Sinclair and Marcelo Wanderley},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1302573},\n editor = {Luke Dahl, Douglas\ - \ Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {255--258},\n publisher = {Virginia Tech},\n title\ - \ = {The Digital Orchestra Toolbox for Max},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0055.pdf},\n\ - \ year = {2018}\n}\n" + ID: Orio2003 + abstract: 'Score following is the synchronisation of a computer with a performer + playing a known musical score. It now has a history of about twenty years as a + research and musical topic, and is an ongoing project at Ircam. We present an + overview of existing and historical score following systems, followed by fundamental + definitions and terminology, and considerations about score formats, evaluation + of score followers, and training. The score follower that we developed at Ircam + is based on a Hidden Markov Model and on the modeling of the expected signal received + from the performer. The model has been implemented in an audio and a Midi version, + and is now being used in production. We report here our first experiences and + our first steps towards a complete evaluation of system performances. Finally, + we indicate directions how score following can go beyond the artistic applications + known today.' + address: 'Montreal, Canada' + author: 'Orio, Nicola and Lemouton, Serge and Schwarz, Diemo' + bibtex: "@inproceedings{Orio2003,\n abstract = {Score following is the synchronisation\ + \ of a computer with a performer playing a known musical score. It now has a history\ + \ of about twenty years as a research and musical topic, and is an ongoing project\ + \ at Ircam. We present an overview of existing and historical score following\ + \ systems, followed by fundamental definitions and terminology, and considerations\ + \ about score formats, evaluation of score followers, and training. The score\ + \ follower that we developed at Ircam is based on a Hidden Markov Model and on\ + \ the modeling of the expected signal received from the performer. The model has\ + \ been implemented in an audio and a Midi version, and is now being used in production.\ + \ We report here our first experiences and our first steps towards a complete\ + \ evaluation of system performances. Finally, we indicate directions how score\ + \ following can go beyond the artistic applications known today.},\n address =\ + \ {Montreal, Canada},\n author = {Orio, Nicola and Lemouton, Serge and Schwarz,\ + \ Diemo},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176547},\n\ + \ issn = {2220-4806},\n keywords = {Score following, score recognition, real time\ + \ audio alignment, virtual accompaniment.},\n pages = {36--41},\n title = {Score\ + \ Following: State of the Art and New Developments},\n url = {http://www.nime.org/proceedings/2003/nime2003_036.pdf},\n\ + \ year = {2003}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302573 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176547 issn: 2220-4806 - month: June - pages: 255--258 - publisher: Virginia Tech - title: The Digital Orchestra Toolbox for Max - url: http://www.nime.org/proceedings/2018/nime2018_paper0055.pdf - year: 2018 + keywords: 'Score following, score recognition, real time audio alignment, virtual + accompaniment.' + pages: 36--41 + title: 'Score Following: State of the Art and New Developments' + url: http://www.nime.org/proceedings/2003/nime2003_036.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Manaris2018 - abstract: 'JythonMusic is a software environment for developing interactive musical - experiences and systems. It is based on jMusic, a software environment for computer-assisted - composition, which was extended within the last decade into a more comprehensive - framework providing composers and software developers with libraries for music - making, image manipulation, building graphical user interfaces, and interacting - with external devices via MIDI and OSC, among others. This environment is free - and open source. It is based on Python, therefore it provides more economical - syntax relative to Java- and C/C++-like languages. JythonMusic rests on top of - Java, so it provides access to the complete Java API and external Java-based libraries - as needed. Also, it works seamlessly with other software, such as PureData, Max/MSP, - and Processing. The paper provides an overview of important JythonMusic libraries - related to constructing interactive musical experiences. It demonstrates their - scope and utility by summarizing several projects developed using JythonMusic, - including interactive sound art installations, new interfaces for sound manipulation - and spatialization, as well as various explorations on mapping among motion, gesture - and music.' - address: 'Blacksburg, Virginia, USA' - author: Bill Manaris and Pangur Brougham-Cook and Dana Hughes and Andrew R. Brown - bibtex: "@inproceedings{Manaris2018,\n abstract = {JythonMusic is a software environment\ - \ for developing interactive musical experiences and systems. It is based on\ - \ jMusic, a software environment for computer-assisted composition, which was\ - \ extended within the last decade into a more comprehensive framework providing\ - \ composers and software developers with libraries for music making, image manipulation,\ - \ building graphical user interfaces, and interacting with external devices via\ - \ MIDI and OSC, among others. This environment is free and open source. It is\ - \ based on Python, therefore it provides more economical syntax relative to Java-\ - \ and C/C++-like languages. JythonMusic rests on top of Java, so it provides\ - \ access to the complete Java API and external Java-based libraries as needed.\ - \ Also, it works seamlessly with other software, such as PureData, Max/MSP, and\ - \ Processing. The paper provides an overview of important JythonMusic libraries\ - \ related to constructing interactive musical experiences. It demonstrates their\ - \ scope and utility by summarizing several projects developed using JythonMusic,\ - \ including interactive sound art installations, new interfaces for sound manipulation\ - \ and spatialization, as well as various explorations on mapping among motion,\ - \ gesture and music.},\n address = {Blacksburg, Virginia, USA},\n author = {Bill\ - \ Manaris and Pangur Brougham-Cook and Dana Hughes and Andrew R. Brown},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1302575},\n editor = {Luke Dahl, Douglas\ - \ Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {259--262},\n publisher = {Virginia Tech},\n title\ - \ = {JythonMusic: An Environment for Developing Interactive Music Systems},\n\ - \ url = {http://www.nime.org/proceedings/2018/nime2018_paper0056.pdf},\n year\ - \ = {2018}\n}\n" + ID: Traube2003 + abstract: 'In this paper, we describe a multi-level approach for the extraction + of instrumental gesture parameters taken from the characteristics of the signal + captured by a microphone and based on the knowledge of physical mechanisms taking + place on the instrument. We also explore the relationships between some features + of timbre and gesture parameters, taking as a starting point for the exploration + the timbre descriptors commonly used by professional musicians when they verbally + describe the sounds they produce with their instrument. Finally, we present how + this multi-level approach can be applied to the study of the timbre space of the + classical guitar.' + address: 'Montreal, Canada' + author: 'Traube, Caroline and Depalle, Philippe and Wanderley, Marcelo M.' + bibtex: "@inproceedings{Traube2003,\n abstract = {In this paper, we describe a multi-level\ + \ approach for the extraction of instrumental gesture parameters taken from the\ + \ characteristics of the signal captured by a microphone and based on the knowledge\ + \ of physical mechanisms taking place on the instrument. We also explore the relationships\ + \ between some features of timbre and gesture parameters, taking as a starting\ + \ point for the exploration the timbre descriptors commonly used by professional\ + \ musicians when they verbally describe the sounds they produce with their instrument.\ + \ Finally, we present how this multi-level approach can be applied to the study\ + \ of the timbre space of the classical guitar.},\n address = {Montreal, Canada},\n\ + \ author = {Traube, Caroline and Depalle, Philippe and Wanderley, Marcelo M.},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176567},\n\ + \ issn = {2220-4806},\n keywords = {Signal analysis, indirect acquisition of instrumental\ + \ gesture, guitar},\n pages = {42--47},\n title = {Indirect Acquisition of Instrumental\ + \ Gesture Based on Signal , Physical and Perceptual Information},\n url = {http://www.nime.org/proceedings/2003/nime2003_042.pdf},\n\ + \ year = {2003}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302575 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176567 issn: 2220-4806 - month: June - pages: 259--262 - publisher: Virginia Tech - title: 'JythonMusic: An Environment for Developing Interactive Music Systems' - url: http://www.nime.org/proceedings/2018/nime2018_paper0056.pdf - year: 2018 + keywords: 'Signal analysis, indirect acquisition of instrumental gesture, guitar' + pages: 42--47 + title: 'Indirect Acquisition of Instrumental Gesture Based on Signal , Physical + and Perceptual Information' + url: http://www.nime.org/proceedings/2003/nime2003_042.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Leib2018 - abstract: 'We introduce the Triplexer, a novel foot controller that gives the performer - 3 degrees of freedom over the control of various effects parameters. With the - Triplexer, we aim to expand the performer''s control space by augmenting the capabilities - of the common expression pedal that is found in most effects rigs. Using industrial-grade - weight-detection sensors and widely-adopted communication protocols, the Triplexer - offers a flexible platform that can be integrated into various performance setups - and situations. In this paper, we detail the design of the Triplexer by describing - its hardware, embedded signal processing, and mapping software implementations. - We also offer the results of a user study, which we conducted to evaluate the - usability of our controller.' - address: 'Blacksburg, Virginia, USA' - author: Steven Leib and Anıl Çamcı - bibtex: "@inproceedings{Leib2018,\n abstract = {We introduce the Triplexer, a novel\ - \ foot controller that gives the performer 3 degrees of freedom over the control\ - \ of various effects parameters. With the Triplexer, we aim to expand the performer's\ - \ control space by augmenting the capabilities of the common expression pedal\ - \ that is found in most effects rigs. Using industrial-grade weight-detection\ - \ sensors and widely-adopted communication protocols, the Triplexer offers a flexible\ - \ platform that can be integrated into various performance setups and situations.\ - \ In this paper, we detail the design of the Triplexer by describing its hardware,\ - \ embedded signal processing, and mapping software implementations. We also offer\ - \ the results of a user study, which we conducted to evaluate the usability of\ - \ our controller.},\n address = {Blacksburg, Virginia, USA},\n author = {Steven\ - \ Leib and Anıl Çamcı},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302577},\n\ - \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {263--268},\n publisher = {Virginia\ - \ Tech},\n title = {Triplexer: An Expression Pedal with New Degrees of Freedom},\n\ - \ url = {http://www.nime.org/proceedings/2018/nime2018_paper0057.pdf},\n year\ - \ = {2018}\n}\n" + ID: Nagashima2003 + abstract: 'This is a report of research and some experimental applications of human-computer + interaction in multi-media performing arts. The human performer and the computer + systems perform computer graphic and computer music interactively in real-time. + In general, many sensors are used for the interactive communication as interfaces, + and the performer receives the output of the system via graphics, sounds and physical + reactions of interfaces like musical instruments. I have produced many types of + interfaces, not only with physical/electrical sensors but also with biological/physiological + sensors. This paper is intended as an investigation of some special approaches: + (1) 16-channel electromyogram sensor called “MiniBioMuse-III” and its application + work called “BioCosmicStorm-II” performed in Paris, Kassel and Hamburg in 2001, + (2) sensing/reacting with “breathing” in performing arts, (3) 8-channel electric-feedback + system and its experiments of “body-hearing sounds” and “body-listening to music”.' + address: 'Montreal, Canada' + author: 'Nagashima, Yoichi' + bibtex: "@inproceedings{Nagashima2003,\n abstract = {This is a report of research\ + \ and some experimental applications of human-computer interaction in multi-media\ + \ performing arts. The human performer and the computer systems perform computer\ + \ graphic and computer music interactively in real-time. In general, many sensors\ + \ are used for the interactive communication as interfaces, and the performer\ + \ receives the output of the system via graphics, sounds and physical reactions\ + \ of interfaces like musical instruments. I have produced many types of interfaces,\ + \ not only with physical/electrical sensors but also with biological/physiological\ + \ sensors. This paper is intended as an investigation of some special approaches:\ + \ (1) 16-channel electromyogram sensor called “MiniBioMuse-III” and its application\ + \ work called “BioCosmicStorm-II” performed in Paris, Kassel and Hamburg in 2001,\ + \ (2) sensing/reacting with “breathing” in performing arts, (3) 8-channel electric-feedback\ + \ system and its experiments of “body-hearing sounds” and “body-listening to music”.},\n\ + \ address = {Montreal, Canada},\n author = {Nagashima, Yoichi},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176539},\n issn = {2220-4806},\n\ + \ pages = {48--53},\n title = {Bio-Sensing Systems and Bio-Feedback Systems for\ + \ Interactive Media Arts},\n url = {http://www.nime.org/proceedings/2003/nime2003_048.pdf},\n\ + \ year = {2003}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302577 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176539 issn: 2220-4806 - month: June - pages: 263--268 - publisher: Virginia Tech - title: 'Triplexer: An Expression Pedal with New Degrees of Freedom' - url: http://www.nime.org/proceedings/2018/nime2018_paper0057.pdf - year: 2018 + pages: 48--53 + title: Bio-Sensing Systems and Bio-Feedback Systems for Interactive Media Arts + url: http://www.nime.org/proceedings/2003/nime2003_048.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Úlfarsson2018 - abstract: "This paper reports upon the process of innovation of a new instrument.\ - \ The author has developed the halldorophone a new electroacoustic string instrument\ - \ which makes use of positive feedback as a key element in generating its sound.\n\ - An important objective of the project has been to encourage its use by practicing\ - \ musicians. After ten years of use, the halldorophone has a growing repertoire\ - \ of works by prominent composers and performers. During the development of the\ - \ instrument, the question has been asked: “why do musicians want to use this\ - \ instrument?” and answers have been found through on-going (informal) user studies\ - \ and feedback. As the project progresses, a picture emerges of what qualities\ - \ have led to a culture of acceptance and use around this new instrument.\nThis\ - \ paper describes the halldorophone and presents the rationale for its major design\ - \ features and ergonomic choices, as they relate to the overarching objective\ - \ of nurturing a culture of use and connects it to wider trends." - address: 'Blacksburg, Virginia, USA' - author: Halldór Úlfarsson - bibtex: "@inproceedings{Úlfarsson2018,\n abstract = {This paper reports upon the\ - \ process of innovation of a new instrument. The author has developed the halldorophone\ - \ a new electroacoustic string instrument which makes use of positive feedback\ - \ as a key element in generating its sound.\nAn important objective of the project\ - \ has been to encourage its use by practicing musicians. After ten years of use,\ - \ the halldorophone has a growing repertoire of works by prominent composers and\ - \ performers. During the development of the instrument, the question has been\ - \ asked: “why do musicians want to use this instrument?” and answers have been\ - \ found through on-going (informal) user studies and feedback. As the project\ - \ progresses, a picture emerges of what qualities have led to a culture of acceptance\ - \ and use around this new instrument.\nThis paper describes the halldorophone\ - \ and presents the rationale for its major design features and ergonomic choices,\ - \ as they relate to the overarching objective of nurturing a culture of use and\ - \ connects it to wider trends.},\n address = {Blacksburg, Virginia, USA},\n author\ - \ = {Halldór Úlfarsson},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302579},\n\ - \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {269--274},\n publisher = {Virginia\ - \ Tech},\n title = {The halldorophone: The ongoing innovation of a cello-like\ - \ drone instrument},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0058.pdf},\n\ - \ year = {2018}\n}\n" + ID: Momeni2003 + abstract: 'In this paper, we examine the use of spatial layouts of musicalmaterial + for live performance control. Emphasis is given tosoftware tools that provide + for the simple and intuitivegeometric organization of sound material, sound processingparameters, + and higher-level musical structures.' + address: 'Montreal, Canada' + author: 'Momeni, Ali and Wessel, David' + bibtex: "@inproceedings{Momeni2003,\n abstract = {In this paper, we examine the\ + \ use of spatial layouts of musicalmaterial for live performance control. Emphasis\ + \ is given tosoftware tools that provide for the simple and intuitivegeometric\ + \ organization of sound material, sound processingparameters, and higher-level\ + \ musical structures.},\n address = {Montreal, Canada},\n author = {Momeni, Ali\ + \ and Wessel, David},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n doi\ + \ = {10.5281/zenodo.1176535},\n issn = {2220-4806},\n keywords = {Perceptual Spaces,\ + \ Graphical Models, Real-time Instruments, Dimensionality Reduction, Multidimensional\ + \ Scaling, Live Performance, Gestural Controllers, Live Interaction, High-level\ + \ Control.},\n pages = {54--62},\n title = {Characterizing and Controlling Musical\ + \ Material Intuitively with Geometric Models},\n url = {http://www.nime.org/proceedings/2003/nime2003_054.pdf},\n\ + \ year = {2003}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302579 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176535 issn: 2220-4806 - month: June - pages: 269--274 - publisher: Virginia Tech - title: 'The halldorophone: The ongoing innovation of a cello-like drone instrument' - url: http://www.nime.org/proceedings/2018/nime2018_paper0058.pdf - year: 2018 + keywords: 'Perceptual Spaces, Graphical Models, Real-time Instruments, Dimensionality + Reduction, Multidimensional Scaling, Live Performance, Gestural Controllers, Live + Interaction, High-level Control.' + pages: 54--62 + title: Characterizing and Controlling Musical Material Intuitively with Geometric + Models + url: http://www.nime.org/proceedings/2003/nime2003_054.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Tsoukalasb2018 - abstract: 'Laptop orchestras create music, although digitally produced, in a collaborative - live performance not unlike a traditional orchestra. The recent increase in interest - and investment in this style of music creation has paved the way for novel methods - for musicians to create and interact with music. To this end, a number of nontraditional - instruments have been constructed that enable musicians to control sound production - beyond pitch and volume, integrating filtering, musical effects, etc. Wii Remotes - (WiiMotes) have seen heavy use in maker communities, including laptop orchestras, - for their robust sensor array and low cost. The placement of sensors and the form - factor of the device itself are suited for video games, not necessarily live music - creation. In this paper, the authors present a new controller design, based on - the WiiMote hardware platform, to address usability in gesture-centric music performance. - Based on the pilot-study data, the new controller offers unrestricted two-hand - gesture production, smaller footprint, and lower muscle strain.' - address: 'Blacksburg, Virginia, USA' - author: Kyriakos Tsoukalas and Joseph Kubalak and Ivica Ico Bukvic - bibtex: "@inproceedings{Tsoukalasb2018,\n abstract = {Laptop orchestras create music,\ - \ although digitally produced, in a collaborative live performance not unlike\ - \ a traditional orchestra. The recent increase in interest and investment in this\ - \ style of music creation has paved the way for novel methods for musicians to\ - \ create and interact with music. To this end, a number of nontraditional instruments\ - \ have been constructed that enable musicians to control sound production beyond\ - \ pitch and volume, integrating filtering, musical effects, etc. Wii Remotes (WiiMotes)\ - \ have seen heavy use in maker communities, including laptop orchestras, for their\ - \ robust sensor array and low cost. The placement of sensors and the form factor\ - \ of the device itself are suited for video games, not necessarily live music\ - \ creation. In this paper, the authors present a new controller design, based\ - \ on the WiiMote hardware platform, to address usability in gesture-centric music\ - \ performance. Based on the pilot-study data, the new controller offers unrestricted\ - \ two-hand gesture production, smaller footprint, and lower muscle strain.},\n\ - \ address = {Blacksburg, Virginia, USA},\n author = {Kyriakos Tsoukalas and Joseph\ - \ Kubalak and Ivica Ico Bukvic},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302581},\n\ - \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {275--280},\n publisher = {Virginia\ - \ Tech},\n title = {L2OrkMote: Reimagining a Low-Cost Wearable Controller for\ - \ a Live Gesture-Centric Music Performance},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0059.pdf},\n\ - \ year = {2018}\n}\n" + ID: Burtner2003 + abstract: This paper explores compositional and notational approaches for working + with controllers. The notational systems devised for the composition (dis)Appearances + are discussed in depth in an attempt to formulate a new approach to composition + using ensembles that navigates a performative space between reality and virtuality. + address: 'Montreal, Canada' + author: 'Burtner, Matthew' + bibtex: "@inproceedings{Burtner2003,\n abstract = {This paper explores compositional\ + \ and notational approaches for working with controllers. The notational systems\ + \ devised for the composition (dis)Appearances are discussed in depth in an attempt\ + \ to formulate a new approach to composition using ensembles that navigates a\ + \ performative space between reality and virtuality.},\n address = {Montreal,\ + \ Canada},\n author = {Burtner, Matthew},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n\ + \ doi = {10.5281/zenodo.1176492},\n issn = {2220-4806},\n keywords = {Composition,\ + \ notation systems, virtual reality, controllers, physical modeling, string, violin.},\n\ + \ pages = {63--69},\n title = {Composing for the (dis)Embodied Ensemble : Notational\ + \ Systems in (dis)Appearances},\n url = {http://www.nime.org/proceedings/2003/nime2003_063.pdf},\n\ + \ year = {2003}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302581 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176492 issn: 2220-4806 - month: June - pages: 275--280 - publisher: Virginia Tech - title: 'L2OrkMote: Reimagining a Low-Cost Wearable Controller for a Live Gesture-Centric - Music Performance' - url: http://www.nime.org/proceedings/2018/nime2018_paper0059.pdf - year: 2018 + keywords: 'Composition, notation systems, virtual reality, controllers, physical + modeling, string, violin.' + pages: 63--69 + title: 'Composing for the (dis)Embodied Ensemble : Notational Systems in (dis)Appearances' + url: http://www.nime.org/proceedings/2003/nime2003_063.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Armitage2018 - abstract: 'In digital musical instrument design, different tools and methods offer - a variety of approaches for constraining the exploration of musical gestures and - sounds. Toolkits made of modular components usefully constrain exploration towards - simple, quick and functional combinations, and methods such as sketching and model-making - alternatively allow imagination and narrative to guide exploration. In this work - we sought to investigate a context where these approaches to exploration were - combined. We designed a craft workshop for 20 musical instrument designers, where - groups were given the same partly-finished instrument to craft for one hour with - raw materials, and though the task was open ended, they were prompted to focus - on subtle details that might distinguish their instruments. Despite the prompt - the groups diverged dramatically in intent and style, and generated gestural language - rapidly and flexibly. By the end, each group had developed a distinctive approach - to constraint, exploratory style, collaboration and interpretation of the instrument - and workshop materials. We reflect on this outcome to discuss advantages and disadvantages - to integrating digital musical instrument design tools and methods, and how to - further investigate and extend this approach.' - address: 'Blacksburg, Virginia, USA' - author: Jack Armitage and Andrew P. McPherson - bibtex: "@inproceedings{Armitage2018,\n abstract = {In digital musical instrument\ - \ design, different tools and methods offer a variety of approaches for constraining\ - \ the exploration of musical gestures and sounds. Toolkits made of modular components\ - \ usefully constrain exploration towards simple, quick and functional combinations,\ - \ and methods such as sketching and model-making alternatively allow imagination\ - \ and narrative to guide exploration. In this work we sought to investigate a\ - \ context where these approaches to exploration were combined. We designed a craft\ - \ workshop for 20 musical instrument designers, where groups were given the same\ - \ partly-finished instrument to craft for one hour with raw materials, and though\ - \ the task was open ended, they were prompted to focus on subtle details that\ - \ might distinguish their instruments. Despite the prompt the groups diverged\ - \ dramatically in intent and style, and generated gestural language rapidly and\ - \ flexibly. By the end, each group had developed a distinctive approach to constraint,\ - \ exploratory style, collaboration and interpretation of the instrument and workshop\ - \ materials. We reflect on this outcome to discuss advantages and disadvantages\ - \ to integrating digital musical instrument design tools and methods, and how\ - \ to further investigate and extend this approach.},\n address = {Blacksburg,\ - \ Virginia, USA},\n author = {Jack Armitage and Andrew P. McPherson},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1302583},\n editor = {Luke Dahl, Douglas\ - \ Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {281--286},\n publisher = {Virginia Tech},\n title\ - \ = {Crafting Digital Musical Instruments: An Exploratory Workshop Study},\n url\ - \ = {http://www.nime.org/proceedings/2018/nime2018_paper0060.pdf},\n year = {2018}\n\ - }\n" + ID: Jorda2003 + abstract: 'This paper first introduces two previous software-based musicinstruments + designed by the author, and analyses the crucialimportance of the visual feedback + introduced by theirinterfaces. A quick taxonomy and analysis of the visualcomponents + in current trends of interactive music software isthen proposed, before introducing + the reacTable*, a newproject that is currently under development. The reacTable* + isa collaborative music instrument, aimed both at novices andadvanced musicians, + which employs computer vision andtangible interfaces technologies, and pushes + further the visualfeedback interface ideas and techniques aforementioned.' + address: 'Montreal, Canada' + author: 'Jordà, Sergi' + bibtex: "@inproceedings{Jorda2003,\n abstract = {This paper first introduces two\ + \ previous software-based musicinstruments designed by the author, and analyses\ + \ the crucialimportance of the visual feedback introduced by theirinterfaces.\ + \ A quick taxonomy and analysis of the visualcomponents in current trends of interactive\ + \ music software isthen proposed, before introducing the reacTable*, a newproject\ + \ that is currently under development. The reacTable* isa collaborative music\ + \ instrument, aimed both at novices andadvanced musicians, which employs computer\ + \ vision andtangible interfaces technologies, and pushes further the visualfeedback\ + \ interface ideas and techniques aforementioned.},\n address = {Montreal, Canada},\n\ + \ author = {Jord\\`{a}, Sergi},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n\ + \ doi = {10.5281/zenodo.1176519},\n issn = {2220-4806},\n keywords = {Interactive\ + \ music instruments, audio visualization, visual interfaces, visual feedback,\ + \ tangible interfaces, computer vision, augmented reality, music instruments for\ + \ novices, collaborative music.},\n pages = {70--76},\n title = {Sonigraphical\ + \ Instruments: From {FM}OL to the reacTable*},\n url = {http://www.nime.org/proceedings/2003/nime2003_070.pdf},\n\ + \ year = {2003}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302583 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176519 issn: 2220-4806 - month: June - pages: 281--286 - publisher: Virginia Tech - title: 'Crafting Digital Musical Instruments: An Exploratory Workshop Study' - url: http://www.nime.org/proceedings/2018/nime2018_paper0060.pdf - year: 2018 + keywords: 'Interactive music instruments, audio visualization, visual interfaces, + visual feedback, tangible interfaces, computer vision, augmented reality, music + instruments for novices, collaborative music.' + pages: 70--76 + title: 'Sonigraphical Instruments: From FMOL to the reacTable*' + url: http://www.nime.org/proceedings/2003/nime2003_070.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Kalo2018 - abstract: 'Incremental robotic sheet forming is used to fabricate a novel cymbal - shape based on models of geometric chaos for stadium shaped boundaries. This provides - a proof-of-concept that this robotic fabrication technique might be a candidate - method for creating novel metallic ideophones that are based on sheet deformations. - Given that the technique does not require molding, it is well suited for both - rapid and iterative prototyping and the fabrication of individual pieces. With - advances in miniaturization, this approach may also be suitable for personal fabrication. - In this paper we discuss this technique as well as aspects of the geometry of - stadium cymbals and their impact on the resulting instrument.' - address: 'Blacksburg, Virginia, USA' - author: Ammar Kalo and Georg Essl - bibtex: "@inproceedings{Kalo2018,\n abstract = {Incremental robotic sheet forming\ - \ is used to fabricate a novel cymbal shape based on models of geometric chaos\ - \ for stadium shaped boundaries. This provides a proof-of-concept that this robotic\ - \ fabrication technique might be a candidate method for creating novel metallic\ - \ ideophones that are based on sheet deformations. Given that the technique does\ - \ not require molding, it is well suited for both rapid and iterative prototyping\ - \ and the fabrication of individual pieces. With advances in miniaturization,\ - \ this approach may also be suitable for personal fabrication. In this paper we\ - \ discuss this technique as well as aspects of the geometry of stadium cymbals\ - \ and their impact on the resulting instrument.},\n address = {Blacksburg, Virginia,\ - \ USA},\n author = {Ammar Kalo and Georg Essl},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1302585},\n editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n\ - \ isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {287--292},\n publisher = {Virginia Tech},\n title = {Individual Fabrication\ - \ of Cymbals using Incremental Robotic Sheet Forming},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0061.pdf},\n\ - \ year = {2018}\n}\n" + ID: Hatanaka2003 + abstract: 'A handheld electronic musical instrument, named the BentoBox, was developed. + The motivation was to develop aninstrument which one can easily carry around and + play inmoments of free time, for example when riding public transportation or + during short breaks at work. The device wasdesigned to enable quick learning by + having various scalesprogrammed for different styles of music, and also beexpressive + by having hand controlled timbral effects whichcan be manipulated while playing. + Design analysis anditeration lead to a compact and ergonomic device. This paperfocuses + on the ergonomic design process of the hardware.' + address: 'Montreal, Canada' + author: 'Hatanaka, Motohide' + bibtex: "@inproceedings{Hatanaka2003,\n abstract = {A handheld electronic musical\ + \ instrument, named the BentoBox, was developed. The motivation was to develop\ + \ aninstrument which one can easily carry around and play inmoments of free time,\ + \ for example when riding public transportation or during short breaks at work.\ + \ The device wasdesigned to enable quick learning by having various scalesprogrammed\ + \ for different styles of music, and also beexpressive by having hand controlled\ + \ timbral effects whichcan be manipulated while playing. Design analysis anditeration\ + \ lead to a compact and ergonomic device. This paperfocuses on the ergonomic design\ + \ process of the hardware.},\n address = {Montreal, Canada},\n author = {Hatanaka,\ + \ Motohide},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176509},\n\ + \ issn = {2220-4806},\n keywords = {MIDI controller, electronic musical instrument,\ + \ musical instrument design, ergonomics, playability, human computer interface.\ + \ },\n pages = {77--82},\n title = {Ergonomic Design of A Portable Musical Instrument},\n\ + \ url = {http://www.nime.org/proceedings/2003/nime2003_077.pdf},\n year = {2003}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302585 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176509 issn: 2220-4806 - month: June - pages: 287--292 - publisher: Virginia Tech - title: Individual Fabrication of Cymbals using Incremental Robotic Sheet Forming - url: http://www.nime.org/proceedings/2018/nime2018_paper0061.pdf - year: 2018 + keywords: 'MIDI controller, electronic musical instrument, musical instrument design, + ergonomics, playability, human computer interface. ' + pages: 77--82 + title: Ergonomic Design of A Portable Musical Instrument + url: http://www.nime.org/proceedings/2003/nime2003_077.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: McDowell2018 - abstract: 'This paper reports the development of a ‘haptic-listening'' system which - presents the listener with a representation of the vibrotactile feedback perceived - by a classical guitarist during performance through the use of haptic feedback - technology. The paper describes the design of the haptic-listening system which - is in two prototypes: the “DIY Haptic Guitar” and a more robust haptic-listening - Trial prototype using a Reckhorn BS-200 shaker. Through two experiments, the perceptual - significance and overall musical contribution of the addition of haptic feedback - in a listening context was evaluated. Subjects preferred listening to the classical - guitar presentation with the addition of haptic feedback and the addition of haptic - feedback contributed to listeners'' engagement with a performance. The results - of the experiments and their implications are discussed in this paper.' - address: 'Blacksburg, Virginia, USA' - author: John McDowell - bibtex: "@inproceedings{McDowell2018,\n abstract = {This paper reports the development\ - \ of a ‘haptic-listening' system which presents the listener with a representation\ - \ of the vibrotactile feedback perceived by a classical guitarist during performance\ - \ through the use of haptic feedback technology. The paper describes the design\ - \ of the haptic-listening system which is in two prototypes: the “DIY Haptic Guitar”\ - \ and a more robust haptic-listening Trial prototype using a Reckhorn BS-200 shaker.\ - \ Through two experiments, the perceptual significance and overall musical contribution\ - \ of the addition of haptic feedback in a listening context was evaluated. Subjects\ - \ preferred listening to the classical guitar presentation with the addition of\ - \ haptic feedback and the addition of haptic feedback contributed to listeners'\ - \ engagement with a performance. The results of the experiments and their implications\ - \ are discussed in this paper.},\n address = {Blacksburg, Virginia, USA},\n author\ - \ = {John McDowell},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302587},\n\ - \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {293--298},\n publisher = {Virginia\ - \ Tech},\n title = {Haptic-Listening and the Classical Guitar},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0062.pdf},\n\ - \ year = {2018}\n}\n" + ID: Shiraiwa2003 + abstract: 'This paper presents a novel use of a chemical experiments’ framework + as a control layer and sound source in a con- cert situation. Signal fluctuations + from electrolytic batteries made out of household chemicals, and acoustic samples + obtained from an acid/base reaction are used for musical purposes beyond the standard + data sonification role. The batteries are controlled in handy ways such as warming, + stirring and pouring that are also visually engaging. Audio mappings include synthetic + and sampled sounds completing a recipe that concocts a live performance of computer + music.' + address: 'Montreal, Canada' + author: 'Shiraiwa, Hiroko and Segnini, Rodrigo and Woo, Vivian' + bibtex: "@inproceedings{Shiraiwa2003,\n abstract = {This paper presents a novel\ + \ use of a chemical experiments’ framework as a control layer and sound source\ + \ in a con- cert situation. Signal fluctuations from electrolytic batteries made\ + \ out of household chemicals, and acoustic samples obtained from an acid/base\ + \ reaction are used for musical purposes beyond the standard data sonification\ + \ role. The batteries are controlled in handy ways such as warming, stirring and\ + \ pouring that are also visually engaging. Audio mappings include synthetic and\ + \ sampled sounds completing a recipe that concocts a live performance of computer\ + \ music.},\n address = {Montreal, Canada},\n author = {Shiraiwa, Hiroko and Segnini,\ + \ Rodrigo and Woo, Vivian},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n doi\ + \ = {10.5281/zenodo.1176561},\n issn = {2220-4806},\n keywords = {Chemical music,\ + \ Applied chemistry, Battery Controller.},\n pages = {83--86},\n title = {Sound\ + \ Kitchen: Designing a Chemically Controlled Musical Performance},\n url = {http://www.nime.org/proceedings/2003/nime2003_083.pdf},\n\ + \ year = {2003}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302587 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176561 issn: 2220-4806 - month: June - pages: 293--298 - publisher: Virginia Tech - title: Haptic-Listening and the Classical Guitar - url: http://www.nime.org/proceedings/2018/nime2018_paper0062.pdf - year: 2018 + keywords: 'Chemical music, Applied chemistry, Battery Controller.' + pages: 83--86 + title: 'Sound Kitchen: Designing a Chemically Controlled Musical Performance' + url: http://www.nime.org/proceedings/2003/nime2003_083.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Harrison2018 - abstract: 'The design of traditional musical instruments is a process of incremental - refinement over many centuries of innovation. Conversely, digital musical instruments - (DMIs), being unconstrained by requirements of efficient acoustic sound production - and ergonomics, can take on forms which are more abstract in their relation to - the mechanism of control and sound production. In this paper we consider the case - of designing DMIs for use in existing musical cultures, and pose questions around - the social and technical acceptability of certain design choices relating to global - physical form and input modality (sensing strategy and the input gestures that - it affords). We designed four guitar-derivative DMIs designed to be suitable to - perform a strummed harmonic accompaniment to a folk tune. Each instrument possessed - varying degrees of `guitar-likeness'', based either on the form and aesthetics - of the guitar or the specific mode of interaction. We conducted a study where - both non-musicians and guitarists played two versions of the instruments and completed - musical tasks with each instrument. The results of this study highlight the complex - interaction between global form and input modality when designing for existing - musical cultures.' - address: 'Blacksburg, Virginia, USA' - author: Jacob Harrison and Robert H Jack and Fabio Morreale and Andrew P. McPherson - bibtex: "@inproceedings{Harrison2018,\n abstract = {The design of traditional musical\ - \ instruments is a process of incremental refinement over many centuries of innovation.\ - \ Conversely, digital musical instruments (DMIs), being unconstrained by requirements\ - \ of efficient acoustic sound production and ergonomics, can take on forms which\ - \ are more abstract in their relation to the mechanism of control and sound production.\ - \ In this paper we consider the case of designing DMIs for use in existing musical\ - \ cultures, and pose questions around the social and technical acceptability of\ - \ certain design choices relating to global physical form and input modality (sensing\ - \ strategy and the input gestures that it affords). We designed four guitar-derivative\ - \ DMIs designed to be suitable to perform a strummed harmonic accompaniment to\ - \ a folk tune. Each instrument possessed varying degrees of `guitar-likeness',\ - \ based either on the form and aesthetics of the guitar or the specific mode of\ - \ interaction. We conducted a study where both non-musicians and guitarists played\ - \ two versions of the instruments and completed musical tasks with each instrument.\ - \ The results of this study highlight the complex interaction between global form\ - \ and input modality when designing for existing musical cultures.},\n address\ - \ = {Blacksburg, Virginia, USA},\n author = {Jacob Harrison and Robert H Jack\ - \ and Fabio Morreale and Andrew P. McPherson},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1302589},\n editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n\ - \ isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {299--304},\n publisher = {Virginia Tech},\n title = {When is a Guitar not\ - \ a Guitar? Cultural Form, Input Modality and Expertise},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0063.pdf},\n\ - \ year = {2018}\n}\n" + ID: Ryan2003 + abstract: 'This report details work on the interdisciplinary mediaproject TGarden. + The authors discuss the challengesencountered while developing a responsive musicalenvironment + for the general public involving wearable,sensor-integrated clothing as the central + interface and input device. The project''s dramaturgical andtechnical/implementation + background are detailed toprovide a framework for the creation of a responsive + hardwareand software system that reinforces a tangible relationshipbetween the + participant''s improvised movement and musicalresponse. Finally, the authors take + into consideration testingscenarios gathered from public prototypes in two Europeanlocales + in 2001 to evaluate user experience of the system.' + address: 'Montreal, Canada' + author: 'Ryan, Joel and Salter, Christopher L.' + bibtex: "@inproceedings{Ryan2003,\n abstract = {This report details work on the\ + \ interdisciplinary mediaproject TGarden. The authors discuss the challengesencountered\ + \ while developing a responsive musicalenvironment for the general public involving\ + \ wearable,sensor-integrated clothing as the central interface and input device.\ + \ The project's dramaturgical andtechnical/implementation background are detailed\ + \ toprovide a framework for the creation of a responsive hardwareand software\ + \ system that reinforces a tangible relationshipbetween the participant's improvised\ + \ movement and musicalresponse. Finally, the authors take into consideration testingscenarios\ + \ gathered from public prototypes in two Europeanlocales in 2001 to evaluate user\ + \ experience of the system.},\n address = {Montreal, Canada},\n author = {Ryan,\ + \ Joel and Salter, Christopher L.},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n\ + \ doi = {10.5281/zenodo.1176555},\n issn = {2220-4806},\n keywords = {Gesture,\ + \ interaction, embodied action, enaction, physical model, responsive environment,\ + \ interactive musical systems, affordance, interface, phenomenology, energy, kinetics,\ + \ time constant, induced ballistics, wearable computing, accelerometer, audience\ + \ participation, dynamical system, dynamic compliance, effort, wearable instrument,\ + \ augmented physicality. },\n pages = {87--90},\n title = {TGarden: Wearable Instruments\ + \ and Augmented Physicality},\n url = {http://www.nime.org/proceedings/2003/nime2003_087.pdf},\n\ + \ year = {2003}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302589 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176555 issn: 2220-4806 - month: June - pages: 299--304 - publisher: Virginia Tech - title: 'When is a Guitar not a Guitar? Cultural Form, Input Modality and Expertise' - url: http://www.nime.org/proceedings/2018/nime2018_paper0063.pdf - year: 2018 + keywords: 'Gesture, interaction, embodied action, enaction, physical model, responsive + environment, interactive musical systems, affordance, interface, phenomenology, + energy, kinetics, time constant, induced ballistics, wearable computing, accelerometer, + audience participation, dynamical system, dynamic compliance, effort, wearable + instrument, augmented physicality. ' + pages: 87--90 + title: 'TGarden: Wearable Instruments and Augmented Physicality' + url: http://www.nime.org/proceedings/2003/nime2003_087.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Larsen2018 - abstract: 'Common emotional effects following a stroke include depression, apathy - and lack of motivation. We conducted a longitudinal case study to investigate - if enabling a post-stroke former guitarist re-learn to play guitar would help - increase motivation for self rehabilitation and quality of life after suffering - a stroke. The intervention lasted three weeks during which the participant had - a fully functional electrical guitar fitted with a strumming device controlled - by a foot pedal at his free disposal. The device replaced right strumming of the - strings, and the study showed that the participant, who was highly motivated, - played 20 sessions despite system latency and reduced musical expression. He incorporated - his own literature and equipment into his playing routine and improved greatly - as the study progressed. He was able to play alone and keep a steady rhythm in - time with backing tracks that went as fast as 120bpm. During the study he was - able to lower his error rate to 33%, while his average flutter also decreased.' - address: 'Blacksburg, Virginia, USA' - author: Jeppe Larsen and Hendrik Knoche and Dan Overholt - bibtex: "@inproceedings{Larsen2018,\n abstract = {Common emotional effects following\ - \ a stroke include depression, apathy and lack of motivation. We conducted a longitudinal\ - \ case study to investigate if enabling a post-stroke former guitarist re-learn\ - \ to play guitar would help increase motivation for self rehabilitation and quality\ - \ of life after suffering a stroke. The intervention lasted three weeks during\ - \ which the participant had a fully functional electrical guitar fitted with a\ - \ strumming device controlled by a foot pedal at his free disposal. The device\ - \ replaced right strumming of the strings, and the study showed that the participant,\ - \ who was highly motivated, played 20 sessions despite system latency and reduced\ - \ musical expression. He incorporated his own literature and equipment into his\ - \ playing routine and improved greatly as the study progressed. He was able to\ - \ play alone and keep a steady rhythm in time with backing tracks that went as\ - \ fast as 120bpm. During the study he was able to lower his error rate to 33%,\ - \ while his average flutter also decreased.},\n address = {Blacksburg, Virginia,\ - \ USA},\n author = {Jeppe Larsen and Hendrik Knoche and Dan Overholt},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1302591},\n editor = {Luke Dahl, Douglas\ - \ Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {305--310},\n publisher = {Virginia Tech},\n title\ - \ = {A Longitudinal Field Trial with a Hemiplegic Guitarist Using The Actuated\ - \ Guitar},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0064.pdf},\n\ - \ year = {2018}\n}\n" + ID: Ventura2003 + abstract: 'We present a sensor-doll interface as a musical outlet forpersonal expression. + A doll serves the dual role of being bothan expressive agent and a playmate by + allowing solo andaccompanied performance. An internal computer and sensorsystem + allow the doll to receive input from the user and itssurroundings, and then respond + accordingly with musicalfeedback. Sets of musical timbres and melodies may bechanged + by presenting the doll with a series of themed clothhats, each suggesting a different + style of play. The doll mayperform by itself and play a number of melodies, or + it maycollaborate with the user when its limbs are squeezed or bent.Shared play + is further encouraged by a basic set of aural tonesmimicking conversation.' + address: 'Montreal, Canada' + author: 'Ventura, David and Mase, Kenji' + bibtex: "@inproceedings{Ventura2003,\n abstract = {We present a sensor-doll interface\ + \ as a musical outlet forpersonal expression. A doll serves the dual role of being\ + \ bothan expressive agent and a playmate by allowing solo andaccompanied performance.\ + \ An internal computer and sensorsystem allow the doll to receive input from the\ + \ user and itssurroundings, and then respond accordingly with musicalfeedback.\ + \ Sets of musical timbres and melodies may bechanged by presenting the doll with\ + \ a series of themed clothhats, each suggesting a different style of play. The\ + \ doll mayperform by itself and play a number of melodies, or it maycollaborate\ + \ with the user when its limbs are squeezed or bent.Shared play is further encouraged\ + \ by a basic set of aural tonesmimicking conversation.},\n address = {Montreal,\ + \ Canada},\n author = {Ventura, David and Mase, Kenji},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176569},\n issn = {2220-4806},\n\ + \ keywords = {Musical improvisation, toy interface agent, sensor doll, context\ + \ awareness. },\n pages = {91--94},\n title = {Duet Musical Companion: Improvisational\ + \ Interfaces for Children},\n url = {http://www.nime.org/proceedings/2003/nime2003_091.pdf},\n\ + \ year = {2003}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302591 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176569 issn: 2220-4806 - month: June - pages: 305--310 - publisher: Virginia Tech - title: A Longitudinal Field Trial with a Hemiplegic Guitarist Using The Actuated - Guitar - url: http://www.nime.org/proceedings/2018/nime2018_paper0064.pdf - year: 2018 + keywords: 'Musical improvisation, toy interface agent, sensor doll, context awareness. ' + pages: 91--94 + title: 'Duet Musical Companion: Improvisational Interfaces for Children' + url: http://www.nime.org/proceedings/2003/nime2003_091.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Stapleton2018 - abstract: 'In this paper we report preliminary observations from an ongoing study - into how musicians explore and adapt to the parameter space of a virtual-acoustic - string bridge plate instrument. These observations inform (and are informed by) - a wider approach to understanding the development of skill and style in interactions - between musicians and musical instruments. We discuss a performance-driven ecosystemic - approach to studying musical relationships, drawing on arguments from the literature - which emphasise the need to go beyond simplistic notions of control and usability - when assessing exploratory and performatory musical interactions. Lastly, we focus - on processes of perceptual learning and co-tuning between musician and instrument, - and how these activities may contribute to the emergence of personal style as - a hallmark of skilful music-making.' - address: 'Blacksburg, Virginia, USA' - author: Paul Stapleton and Maarten van Walstijn and Sandor Mehes - bibtex: "@inproceedings{Stapleton2018,\n abstract = {In this paper we report preliminary\ - \ observations from an ongoing study into how musicians explore and adapt to the\ - \ parameter space of a virtual-acoustic string bridge plate instrument. These\ - \ observations inform (and are informed by) a wider approach to understanding\ - \ the development of skill and style in interactions between musicians and musical\ - \ instruments. We discuss a performance-driven ecosystemic approach to studying\ - \ musical relationships, drawing on arguments from the literature which emphasise\ - \ the need to go beyond simplistic notions of control and usability when assessing\ - \ exploratory and performatory musical interactions. Lastly, we focus on processes\ - \ of perceptual learning and co-tuning between musician and instrument, and how\ - \ these activities may contribute to the emergence of personal style as a hallmark\ - \ of skilful music-making.},\n address = {Blacksburg, Virginia, USA},\n author\ - \ = {Paul Stapleton and Maarten van Walstijn and Sandor Mehes},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1302593},\n editor = {Luke Dahl, Douglas Bowman, Thomas\ - \ Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n\ - \ pages = {311--314},\n publisher = {Virginia Tech},\n title = {Co-Tuning Virtual-Acoustic\ - \ Performance Ecosystems: observations on the development of skill and style in\ - \ the study of musician-instrument relationships},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0065.pdf},\n\ - \ year = {2018}\n}\n" + ID: Howard2003 + abstract: 'A physical modelling music synthesis system known as ‘Cymatic’ is described + that enables ‘virtual instruments’ to be controlled in real-time via a force-feedback + joystick and a force-feedback mouse. These serve to provide the user with gestural + controllers whilst in addition giving tactile feedback to the user. Cymatic virtual + instruments are set up via a graphical user interface in a manner that is highly + intuitive. Users design and play these virtual instruments by interacting directly + with their physical shape and structure in terms of the physical properties of + basic objects such as strings, membranes and solids which can be interconnected + to form complex structures. The virtual instrument can be excited at any point + mass by the following: bowing, plucking, striking, sine/square/sawtooth/random + waveform, or an external sound source. Virtual microphones can be placed at any + point masses to deliver the acoustic output. This paper describes the underlying + structure and principles upon which Cymatic is based, and illustrates its acoustic + output.' + address: 'Montreal, Canada' + author: 'Howard, David M. and Rimell, Stuart and Hunt, Andy D.' + bibtex: "@inproceedings{Howard2003,\n abstract = {A physical modelling music synthesis\ + \ system known as ‘Cymatic’ is described that enables ‘virtual instruments’ to\ + \ be controlled in real-time via a force-feedback joystick and a force-feedback\ + \ mouse. These serve to provide the user with gestural controllers whilst in addition\ + \ giving tactile feedback to the user. Cymatic virtual instruments are set up\ + \ via a graphical user interface in a manner that is highly intuitive. Users design\ + \ and play these virtual instruments by interacting directly with their physical\ + \ shape and structure in terms of the physical properties of basic objects such\ + \ as strings, membranes and solids which can be interconnected to form complex\ + \ structures. The virtual instrument can be excited at any point mass by the following:\ + \ bowing, plucking, striking, sine/square/sawtooth/random waveform, or an external\ + \ sound source. Virtual microphones can be placed at any point masses to deliver\ + \ the acoustic output. This paper describes the underlying structure and principles\ + \ upon which Cymatic is based, and illustrates its acoustic output.},\n address\ + \ = {Montreal, Canada},\n author = {Howard, David M. and Rimell, Stuart and Hunt,\ + \ Andy D.},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176515},\n\ + \ issn = {2220-4806},\n keywords = {Physical modeling, haptic controllers, gesture\ + \ control, force feedback.},\n pages = {95--98},\n title = {Force Feedback Gesture\ + \ Controlled Physical Modelling Synthesis},\n url = {http://www.nime.org/proceedings/2003/nime2003_095.pdf},\n\ + \ year = {2003}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302593 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176515 issn: 2220-4806 - month: June - pages: 311--314 - publisher: Virginia Tech - title: 'Co-Tuning Virtual-Acoustic Performance Ecosystems: observations on the development - of skill and style in the study of musician-instrument relationships' - url: http://www.nime.org/proceedings/2018/nime2018_paper0065.pdf - year: 2018 - - -- ENTRYTYPE: inproceedings - ID: Fish2018 - abstract: 'The environment of zero gravity affords a unique medium for new modalities - of musical performance, both in the design of instruments, and human interactions - with said instruments. To explore this medium, we have created and flown Telemetron, - the first musical instrument specifically designed for and tested in the zero - gravity environment. The resultant instrument (leveraging gyroscopes and wireless - telemetry transmission) and recorded performance represent an initial exploration - of compositions that are unique to the physics and dynamics of outer space. We - describe the motivations for this instrument, and the unique constraints involved - in designing for this environment. This initial design suggests possibilities - for further experiments in musical instrument design for outer space.' - address: 'Blacksburg, Virginia, USA' - author: 'Fish II, Sands A. and Nicole L''Huillier' - bibtex: "@inproceedings{Fish2018,\n abstract = {The environment of zero gravity\ - \ affords a unique medium for new modalities of musical performance, both in the\ - \ design of instruments, and human interactions with said instruments. To explore\ - \ this medium, we have created and flown Telemetron, the first musical instrument\ - \ specifically designed for and tested in the zero gravity environment. The resultant\ - \ instrument (leveraging gyroscopes and wireless telemetry transmission) and recorded\ - \ performance represent an initial exploration of compositions that are unique\ - \ to the physics and dynamics of outer space. We describe the motivations for\ - \ this instrument, and the unique constraints involved in designing for this environment.\ - \ This initial design suggests possibilities for further experiments in musical\ - \ instrument design for outer space.},\n address = {Blacksburg, Virginia, USA},\n\ - \ author = {Fish II, Sands A. and Nicole L'Huillier},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1302595},\n editor = {Luke Dahl, Douglas Bowman, Thomas\ - \ Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n\ - \ pages = {315--317},\n publisher = {Virginia Tech},\n title = {Telemetron: A\ - \ Musical Instrument for Performance in Zero Gravity},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0066.pdf},\n\ - \ year = {2018}\n}\n" + keywords: 'Physical modeling, haptic controllers, gesture control, force feedback.' + pages: 95--98 + title: Force Feedback Gesture Controlled Physical Modelling Synthesis + url: http://www.nime.org/proceedings/2003/nime2003_095.pdf + year: 2003 + + +- ENTRYTYPE: inproceedings + ID: Hoskinson2003 + abstract: 'We describe the design and implementation of an adaptive system to map + control parameters to modal audio synthesis parameters in real-time. The modal + parameters describe the linear response of a virtual vibrating solid, which is + played as a musical instrument by a separate interface. The system uses a three + layer feedforward backpropagation neural network which is trained by a discrete + set of input-output examples. After training, the network extends the training + set, which functions as the specification by example of the controller, to a continuous + mapping allowing the real-time morphing of synthetic sound models. We have implemented + a prototype application using a controller which collects data from a hand-drawn + digital picture. The virtual instrument consists of a bank of modal resonators + whose frequencies, dampings, and gains are the parameters we control. We train + the system by providing pictorial representations of physical objects such as + a bell or a lamp, and associate high quality modal models obtained from measurements + on real objects with these inputs. After training, the user can draw pictures + interactively and “play” modal models which provide interesting (though unrealistic) + interpolations of the models from the training set in real-time.' + address: 'Montreal, Canada' + author: 'Hoskinson, Reynald and van den Doel, Kees and Fels, Sidney S.' + bibtex: "@inproceedings{Hoskinson2003,\n abstract = {We describe the design and\ + \ implementation of an adaptive system to map control parameters to modal audio\ + \ synthesis parameters in real-time. The modal parameters describe the linear\ + \ response of a virtual vibrating solid, which is played as a musical instrument\ + \ by a separate interface. The system uses a three layer feedforward backpropagation\ + \ neural network which is trained by a discrete set of input-output examples.\ + \ After training, the network extends the training set, which functions as the\ + \ specification by example of the controller, to a continuous mapping allowing\ + \ the real-time morphing of synthetic sound models. We have implemented a prototype\ + \ application using a controller which collects data from a hand-drawn digital\ + \ picture. The virtual instrument consists of a bank of modal resonators whose\ + \ frequencies, dampings, and gains are the parameters we control. We train the\ + \ system by providing pictorial representations of physical objects such as a\ + \ bell or a lamp, and associate high quality modal models obtained from measurements\ + \ on real objects with these inputs. After training, the user can draw pictures\ + \ interactively and “play” modal models which provide interesting (though unrealistic)\ + \ interpolations of the models from the training set in real-time.},\n address\ + \ = {Montreal, Canada},\n author = {Hoskinson, Reynald and van den Doel, Kees\ + \ and Fels, Sidney S.},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n doi\ + \ = {10.5281/zenodo.1176513},\n issn = {2220-4806},\n pages = {99--103},\n title\ + \ = {Real-time Adaptive Control of Modal Synthesis},\n url = {http://www.nime.org/proceedings/2003/nime2003_099.pdf},\n\ + \ year = {2003}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302595 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176513 issn: 2220-4806 - month: June - pages: 315--317 - publisher: Virginia Tech - title: 'Telemetron: A Musical Instrument for Performance in Zero Gravity' - url: http://www.nime.org/proceedings/2018/nime2018_paper0066.pdf - year: 2018 + pages: 99--103 + title: Real-time Adaptive Control of Modal Synthesis + url: http://www.nime.org/proceedings/2003/nime2003_099.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Wilcox2018 - abstract: 'This paper covers the technical and aesthetic development of robotcowboy, - the author''s ongoing human-computer wearable performance project. Conceived as - an idiosyncratic manifesto on the embodiment of computational sound, the original - robotcowboy system was built in 2006-2007 using a belt-mounted industrial wearable - computer running GNU/Linux and Pure Data, external USB audio/MIDI interfaces, - HID gamepads, and guitar. Influenced by roadworthy analog gear, chief system requirements - were mobility, plug-and-play, reliability, and low cost. From 2007 to 2011, this - first iteration "Cabled Madness" melded rock music with realtime algorithmic composition - and revolved around cyborg human/system tension, aspects of improvisation, audience - feedback, and an inherent capability of failure. The second iteration "Onward - to Mars" explored storytelling from 2012-2015 through the one-way journey of the - first human on Mars with the computing system adapted into a self-contained spacesuit - backpack. Now 10 years on, a new robotcowboy 2.0 system powers a third iteration - with only an iPhone and PdParty, the author''s open-source iOS application which - runs Pure Data patches and provides full duplex stereo audio, MIDI, HID game controller - support, and Open Sound Control communication. The future is bright, do you have - room to wiggle?' - address: 'Blacksburg, Virginia, USA' - author: Dan Wilcox - bibtex: "@inproceedings{Wilcox2018,\n abstract = {This paper covers the technical\ - \ and aesthetic development of robotcowboy, the author's ongoing human-computer\ - \ wearable performance project. Conceived as an idiosyncratic manifesto on the\ - \ embodiment of computational sound, the original robotcowboy system was built\ - \ in 2006-2007 using a belt-mounted industrial wearable computer running GNU/Linux\ - \ and Pure Data, external USB audio/MIDI interfaces, HID gamepads, and guitar.\ - \ Influenced by roadworthy analog gear, chief system requirements were mobility,\ - \ plug-and-play, reliability, and low cost. From 2007 to 2011, this first iteration\ - \ \"Cabled Madness\" melded rock music with realtime algorithmic composition and\ - \ revolved around cyborg human/system tension, aspects of improvisation, audience\ - \ feedback, and an inherent capability of failure. The second iteration \"Onward\ - \ to Mars\" explored storytelling from 2012-2015 through the one-way journey of\ - \ the first human on Mars with the computing system adapted into a self-contained\ - \ spacesuit backpack. Now 10 years on, a new {robotcowboy 2.0} system powers a\ - \ third iteration with only an iPhone and PdParty, the author's open-source iOS\ - \ application which runs Pure Data patches and provides full duplex stereo audio,\ - \ MIDI, HID game controller support, and Open Sound Control communication. The\ - \ future is bright, do you have room to wiggle?},\n address = {Blacksburg, Virginia,\ - \ USA},\n author = {Dan Wilcox},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302597},\n\ - \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {318--323},\n publisher = {Virginia\ - \ Tech},\n title = {robotcowboy: 10 Years of Wearable Computer Rock},\n url =\ - \ {http://www.nime.org/proceedings/2018/nime2018_paper0067.pdf},\n year = {2018}\n\ + ID: Young2003a + abstract: 'Driving a bowed string physical model using a bow controller, we explore + the potentials of using the real gestures of a violinist to simulate violin sound + using a virtual instrument. After a description of the software and hardware developed, + preliminary results and future work are discussed.' + address: 'Montreal, Canada' + author: 'Young, Diana and Serafin, Stefania' + bibtex: "@inproceedings{Young2003a,\n abstract = {Driving a bowed string physical\ + \ model using a bow controller, we explore the potentials of using the real gestures\ + \ of a violinist to simulate violin sound using a virtual instrument. After a\ + \ description of the software and hardware developed, preliminary results and\ + \ future work are discussed.},\n address = {Montreal, Canada},\n author = {Young,\ + \ Diana and Serafin, Stefania},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n\ + \ doi = {10.5281/zenodo.1176579},\n issn = {2220-4806},\n pages = {104--108},\n\ + \ title = {Playability Evaluation of a Virtual Bowed String Instrument},\n url\ + \ = {http://www.nime.org/proceedings/2003/nime2003_104.pdf},\n year = {2003}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302597 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176579 issn: 2220-4806 - month: June - pages: 318--323 - publisher: Virginia Tech - title: 'robotcowboy: 10 Years of Wearable Computer Rock' - url: http://www.nime.org/proceedings/2018/nime2018_paper0067.pdf - year: 2018 + pages: 104--108 + title: Playability Evaluation of a Virtual Bowed String Instrument + url: http://www.nime.org/proceedings/2003/nime2003_104.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Gonzalez2018 - abstract: 'This article describes the design and construction of a collection of - digitally-controlled augmented acoustic guitars, and the use of these guitars - in the installation Sverm-Resonans. The installation was built around the idea - of exploring `inverse'' sonic microinteraction, that is, controlling sounds by - the micromotion observed when attempting to stand still. It consisted of six acoustic - guitars, each equipped with a Bela embedded computer for sound processing (in - Pure Data), an infrared distance sensor to detect the presence of users, and an - actuator attached to the guitar body to produce sound. With an attached battery - pack, the result was a set of completely autonomous instruments that were easy - to hang in a gallery space. The installation encouraged explorations on the boundary - between the tactile and the kinesthetic, the body and the mind, and between motion - and sound. The use of guitars, albeit with an untraditional `performance'' technique, - made the experience both familiar and unfamiliar at the same time. Many users - reported heightened sensations of stillness, sound, and vibration, and that the - `inverse'' control of the instrument was both challenging and pleasant.' - address: 'Blacksburg, Virginia, USA' - author: 'Gonzalez Sanchez, Victor Evaristo and Martin, Charles Patrick and Agata - Zelechowska and Bjerkestrand, Kari Anne Vadstensvik and Victoria Johnson and - Jensenius, Alexander Refsum ' - bibtex: "@inproceedings{Gonzalez2018,\n abstract = {This article describes the design\ - \ and construction of a collection of digitally-controlled augmented acoustic\ - \ guitars, and the use of these guitars in the installation Sverm-Resonans. The\ - \ installation was built around the idea of exploring `inverse' sonic microinteraction,\ - \ that is, controlling sounds by the micromotion observed when attempting to stand\ - \ still. It consisted of six acoustic guitars, each equipped with a Bela embedded\ - \ computer for sound processing (in Pure Data), an infrared distance sensor to\ - \ detect the presence of users, and an actuator attached to the guitar body to\ - \ produce sound. With an attached battery pack, the result was a set of completely\ - \ autonomous instruments that were easy to hang in a gallery space. The installation\ - \ encouraged explorations on the boundary between the tactile and the kinesthetic,\ - \ the body and the mind, and between motion and sound. The use of guitars, albeit\ - \ with an untraditional `performance' technique, made the experience both familiar\ - \ and unfamiliar at the same time. Many users reported heightened sensations of\ - \ stillness, sound, and vibration, and that the `inverse' control of the instrument\ - \ was both challenging and pleasant.},\n address = {Blacksburg, Virginia, USA},\n\ - \ author = {Gonzalez Sanchez, Victor Evaristo and Martin, Charles Patrick and\ - \ Agata Zelechowska and Bjerkestrand, Kari Anne Vadstensvik and Victoria Johnson\ - \ and Jensenius, Alexander Refsum },\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302599},\n\ - \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {324--327},\n publisher = {Virginia\ - \ Tech},\n title = {Bela-Based Augmented Acoustic Guitars for Sonic Microinteraction},\n\ - \ url = {http://www.nime.org/proceedings/2018/nime2018_paper0068.pdf},\n year\ - \ = {2018}\n}\n" + ID: Gaye2003 + abstract: 'In the project Sonic City, we have developed a system thatenables users + to create electronic music in real time by walkingthrough and interacting with + the urban environment. Weexplore the use of public space and everyday behaviours + forcreative purposes, in particular the city as an interface andmobility as an + interaction model for electronic music making.A multi-disciplinary design process + resulted in theimplementation of a wearable, context-aware prototype. Thesystem + produces music by retrieving information aboutcontext and user action and mapping + it to real-time processingof urban sounds. Potentials, constraints, and implications + ofthis type of music creation are discussed.' + address: 'Montreal, Canada' + author: 'Gaye, Lalya and Mazé, Ramia and Holmquist, Lars E.' + bibtex: "@inproceedings{Gaye2003,\n abstract = {In the project Sonic City, we have\ + \ developed a system thatenables users to create electronic music in real time\ + \ by walkingthrough and interacting with the urban environment. Weexplore the\ + \ use of public space and everyday behaviours forcreative purposes, in particular\ + \ the city as an interface andmobility as an interaction model for electronic\ + \ music making.A multi-disciplinary design process resulted in theimplementation\ + \ of a wearable, context-aware prototype. Thesystem produces music by retrieving\ + \ information aboutcontext and user action and mapping it to real-time processingof\ + \ urban sounds. Potentials, constraints, and implications ofthis type of music\ + \ creation are discussed.},\n address = {Montreal, Canada},\n author = {Gaye,\ + \ Lalya and Maz\\'{e}, Ramia and Holmquist, Lars E.},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176507},\n issn = {2220-4806},\n\ + \ keywords = {Interactive music, interaction design, urban environment, wearable\ + \ computing, context-awareness, mobility},\n pages = {109--115},\n title = {Sonic\ + \ City: The Urban Environment as a Musical Interface},\n url = {http://www.nime.org/proceedings/2003/nime2003_109.pdf},\n\ + \ year = {2003}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302599 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176507 issn: 2220-4806 - month: June - pages: 324--327 - publisher: Virginia Tech - title: Bela-Based Augmented Acoustic Guitars for Sonic Microinteraction - url: http://www.nime.org/proceedings/2018/nime2018_paper0068.pdf - year: 2018 + keywords: 'Interactive music, interaction design, urban environment, wearable computing, + context-awareness, mobility' + pages: 109--115 + title: 'Sonic City: The Urban Environment as a Musical Interface' + url: http://www.nime.org/proceedings/2003/nime2003_109.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Lepri2018 - abstract: "Obsolete and old technologies are often used in interactive art and music\ - \ performance. DIY practices such as hardware hacking and circuit bending provide\n\ - effective methods to the integration of old machines into new artistic inventions.\ - \ This paper presents the Cembalo Scrivano .1, an interactive audio-visual installation\ - \ based on an augmented typewriter. Borrowing concepts from media archaeology\ - \ studies, tangible interaction design and digital lutherie, we discuss how investigations\ - \ into the historical and cultural evolution of a technology can suggest directions\ - \ for the regeneration of obsolete objects. The design approach outlined focuses\ - \ on the remediation of an old device and aims to evoke cultural and physical\ - \ properties associated to the source object." - address: 'Blacksburg, Virginia, USA' - author: Giacomo Lepri and Andrew P. McPherson - bibtex: "@inproceedings{Lepri2018,\n abstract = {Obsolete and old technologies are\ - \ often used in interactive art and music performance. DIY practices such as hardware\ - \ hacking and circuit bending provide\neffective methods to the integration of\ - \ old machines into new artistic inventions. This paper presents the Cembalo Scrivano\ - \ .1, an interactive audio-visual installation based on an augmented typewriter.\ - \ Borrowing concepts from media archaeology studies, tangible interaction design\ - \ and digital lutherie, we discuss how investigations into the historical and\ - \ cultural evolution of a technology can suggest directions for the regeneration\ - \ of obsolete objects. The design approach outlined focuses on the remediation\ - \ of an old device and aims to evoke cultural and physical properties associated\ - \ to the source object.},\n address = {Blacksburg, Virginia, USA},\n author =\ - \ {Giacomo Lepri and Andrew P. McPherson},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302601},\n\ - \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {328--333},\n publisher = {Virginia\ - \ Tech},\n title = {Mirroring the past, from typewriting to interactive art: an\ - \ approach to the re-design of a vintage technology},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0069.pdf},\n\ - \ year = {2018}\n}\n" + ID: Lyons2003 + abstract: 'The role of the face and mouth in speech production as well asnon-verbal + communication suggests the use of facial action tocontrol musical sound. Here + we document work on theMouthesizer, a system which uses a headworn miniaturecamera + and computer vision algorithm to extract shapeparameters from the mouth opening + and output these as MIDIcontrol changes. We report our experience with variousgesture-to-sound + mappings and musical applications, anddescribe a live performance which used the + Mouthesizerinterface.' + address: 'Montreal, Canada' + author: 'Lyons, Michael J. and Haehnel, Michael and Tetsutani, Nobuji' + bibtex: "@inproceedings{Lyons2003,\n abstract = {The role of the face and mouth\ + \ in speech production as well asnon-verbal communication suggests the use of\ + \ facial action tocontrol musical sound. Here we document work on theMouthesizer,\ + \ a system which uses a headworn miniaturecamera and computer vision algorithm\ + \ to extract shapeparameters from the mouth opening and output these as MIDIcontrol\ + \ changes. We report our experience with variousgesture-to-sound mappings and\ + \ musical applications, anddescribe a live performance which used the Mouthesizerinterface.},\n\ + \ address = {Montreal, Canada},\n author = {Lyons, Michael J. and Haehnel, Michael\ + \ and Tetsutani, Nobuji},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n doi\ + \ = {10.5281/zenodo.1176529},\n issn = {2220-4806},\n keywords = {Video-based\ + \ interface; mouth controller; alternative input devices. },\n pages = {116--121},\n\ + \ title = {Designing, Playing, and Performing with a Vision-based Mouth Interface},\n\ + \ url = {http://www.nime.org/proceedings/2003/nime2003_116.pdf},\n year = {2003}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302601 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176529 issn: 2220-4806 - month: June - pages: 328--333 - publisher: Virginia Tech - title: 'Mirroring the past, from typewriting to interactive art: an approach to - the re-design of a vintage technology' - url: http://www.nime.org/proceedings/2018/nime2018_paper0069.pdf - year: 2018 + keywords: 'Video-based interface; mouth controller; alternative input devices. ' + pages: 116--121 + title: 'Designing, Playing, and Performing with a Vision-based Mouth Interface' + url: http://www.nime.org/proceedings/2003/nime2003_116.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Thorn2018 - abstract: 'This paper describes a performer-centric approach to the design, sensor - selection, data interpretation, and mapping schema of a sensor-embedded glove - called the “alto.glove” that the author uses to extend his performance abilities - on violin. The alto.glove is a response to the limitations—both creative and technical—perceived - in feature extraction processes that rely on classification. The hardware answers - one problem: how to extend violin playing in a minimal yet powerful way; the software - answers another: how to create a rich, evolving response that enhances expression - in improvisation. The author approaches this problem from the various roles of - violinist, hardware technician, programmer, sound designer, composer, and improviser. - Importantly, the alto.glove is designed to be cost-effective and relatively easy - to build.' - address: 'Blacksburg, Virginia, USA' - author: Seth Dominicus Thorn - bibtex: "@inproceedings{Thorn2018,\n abstract = {This paper describes a performer-centric\ - \ approach to the design, sensor selection, data interpretation, and mapping schema\ - \ of a sensor-embedded glove called the “alto.glove” that the author uses to extend\ - \ his performance abilities on violin. The alto.glove is a response to the limitations—both\ - \ creative and technical—perceived in feature extraction processes that rely on\ - \ classification. The hardware answers one problem: how to extend violin playing\ - \ in a minimal yet powerful way; the software answers another: how to create a\ - \ rich, evolving response that enhances expression in improvisation. The author\ - \ approaches this problem from the various roles of violinist, hardware technician,\ - \ programmer, sound designer, composer, and improviser. Importantly, the alto.glove\ - \ is designed to be cost-effective and relatively easy to build.},\n address =\ - \ {Blacksburg, Virginia, USA},\n author = {Seth Dominicus Thorn},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1302603},\n editor = {Luke Dahl, Douglas\ - \ Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {334--339},\n publisher = {Virginia Tech},\n title\ - \ = {Alto.Glove: New Techniques for Augmented Violin},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0070.pdf},\n\ - \ year = {2018}\n}\n" + ID: Hewitt2003 + abstract: 'This paper describes work in progress for the development of a gestural + controller interface for contemporary vocal performance and electronic processing. + The paper includes a preliminary investigation of the gestures and movements of + vocalists who use microphones and microphone stands. This repertoire of gestures + forms the foundation of a well-practiced ‘language’ and social code for communication + between performers and audiences and serves as a basis for alternate controller + design principles. A prototype design, based on a modified microphone stand, is + presented along with a discussion of possible controller mapping strategies and + identification of directions for future research.' + address: 'Montreal, Canada' + author: 'Hewitt, Donna and Stevenson, Ian' + bibtex: "@inproceedings{Hewitt2003,\n abstract = {This paper describes work in progress\ + \ for the development of a gestural controller interface for contemporary vocal\ + \ performance and electronic processing. The paper includes a preliminary investigation\ + \ of the gestures and movements of vocalists who use microphones and microphone\ + \ stands. This repertoire of gestures forms the foundation of a well-practiced\ + \ ‘language’ and social code for communication between performers and audiences\ + \ and serves as a basis for alternate controller design principles. A prototype\ + \ design, based on a modified microphone stand, is presented along with a discussion\ + \ of possible controller mapping strategies and identification of directions for\ + \ future research.},\n address = {Montreal, Canada},\n author = {Hewitt, Donna\ + \ and Stevenson, Ian},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n doi\ + \ = {10.5281/zenodo.1176511},\n issn = {2220-4806},\n keywords = {Alternate controller,\ + \ gesture, microphone technique, vocal performance, performance interface, electronic\ + \ music. },\n pages = {122--128},\n title = {E-mic: Extended Mic-stand Interface\ + \ Controller},\n url = {http://www.nime.org/proceedings/2003/nime2003_122.pdf},\n\ + \ year = {2003}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302603 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176511 issn: 2220-4806 - month: June - pages: 334--339 - publisher: Virginia Tech - title: 'Alto.Glove: New Techniques for Augmented Violin' - url: http://www.nime.org/proceedings/2018/nime2018_paper0070.pdf - year: 2018 + keywords: 'Alternate controller, gesture, microphone technique, vocal performance, + performance interface, electronic music. ' + pages: 122--128 + title: 'E-mic: Extended Mic-stand Interface Controller' + url: http://www.nime.org/proceedings/2003/nime2003_122.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Liontiris2018 - abstract: 'This paper illustrates the development of a Feedback Resonating Double - Bass. The instrument is essentially the augmentation of an acoustic double bass - using positive feedback. The research aimed to reply the question of how to augment - and convert a double bass into a feedback resonating one without following an - invasive method. The conversion process illustrated here is applicable and adaptable - to double basses of any size, without making irreversible alterations to the instruments. ' - address: 'Blacksburg, Virginia, USA' - author: 'Liontiris, Thanos Polymeneas' - bibtex: "@inproceedings{Liontiris2018,\n abstract = {This paper illustrates the\ - \ development of a Feedback Resonating Double Bass. The instrument is essentially\ - \ the augmentation of an acoustic double bass using positive feedback. The research\ - \ aimed to reply the question of how to augment and convert a double bass into\ - \ a feedback resonating one without following an invasive method. The conversion\ - \ process illustrated here is applicable and adaptable to double basses of any\ - \ size, without making irreversible alterations to the instruments. },\n address\ - \ = {Blacksburg, Virginia, USA},\n author = {Liontiris, Thanos Polymeneas},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1302605},\n editor = {Luke Dahl,\ - \ Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {340--341},\n publisher = {Virginia Tech},\n title\ - \ = {Low Frequency Feedback Drones: A non-invasive augmentation of the double\ - \ bass},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0071.pdf},\n\ - \ year = {2018}\n}\n" + ID: Blaine2003 + abstract: 'We explore a variety of design criteria applicable to thecreation of + collaborative interfaces for musical experience. Themain factor common to the + design of most collaborativeinterfaces for novices is that musical control is + highlyrestricted, which makes it possible to easily learn andparticipate in the + collective experience. Balancing this tradeoff is a key concern for designers, + as this happens at theexpense of providing an upward path to virtuosity with theinterface. + We attempt to identify design considerationsexemplified by a sampling of recent + collaborative devicesprimarily oriented toward novice interplay. It is our intentionto + provide a non-technical overview of design issues inherentin configuring multiplayer + experiences, particularly for entrylevel players.' + address: 'Montreal, Canada' + author: 'Blaine, Tina and Fels, Sidney S.' + bibtex: "@inproceedings{Blaine2003,\n abstract = {We explore a variety of design\ + \ criteria applicable to thecreation of collaborative interfaces for musical experience.\ + \ Themain factor common to the design of most collaborativeinterfaces for novices\ + \ is that musical control is highlyrestricted, which makes it possible to easily\ + \ learn andparticipate in the collective experience. Balancing this tradeoff is\ + \ a key concern for designers, as this happens at theexpense of providing an upward\ + \ path to virtuosity with theinterface. We attempt to identify design considerationsexemplified\ + \ by a sampling of recent collaborative devicesprimarily oriented toward novice\ + \ interplay. It is our intentionto provide a non-technical overview of design\ + \ issues inherentin configuring multiplayer experiences, particularly for entrylevel\ + \ players.},\n address = {Montreal, Canada},\n author = {Blaine, Tina and Fels,\ + \ Sidney S.},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176490},\n\ + \ issn = {2220-4806},\n keywords = {Design, collaborative interface, musical experience,\ + \ multiplayer, novice, musical control. },\n pages = {129--134},\n title = {Contexts\ + \ of Collaborative Musical Experiences},\n url = {http://www.nime.org/proceedings/2003/nime2003_129.pdf},\n\ + \ year = {2003}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302605 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176490 issn: 2220-4806 - month: June - pages: 340--341 - publisher: Virginia Tech - title: 'Low Frequency Feedback Drones: A non-invasive augmentation of the double - bass' - url: http://www.nime.org/proceedings/2018/nime2018_paper0071.pdf - year: 2018 + keywords: 'Design, collaborative interface, musical experience, multiplayer, novice, + musical control. ' + pages: 129--134 + title: Contexts of Collaborative Musical Experiences + url: http://www.nime.org/proceedings/2003/nime2003_129.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Formo2018 - abstract: 'The Orchestra of Speech is a performance concept resulting from a recent - artistic research project exploring the relationship between music and speech, - in particular improvised music and everyday conversation. As a tool in this exploration, - a digital musical instrument system has been developed for “orchestrating” musical - features of speech into music, in real time. Through artistic practice, this system - has evolved into a personal electroacoustic performance concept.' - address: 'Blacksburg, Virginia, USA' - author: Daniel Formo - bibtex: "@inproceedings{Formo2018,\n abstract = {The Orchestra of Speech is a performance\ - \ concept resulting from a recent artistic research project exploring the relationship\ - \ between music and speech, in particular improvised music and everyday conversation.\ - \ As a tool in this exploration, a digital musical instrument system has been\ - \ developed for “orchestrating” musical features of speech into music, in real\ - \ time. Through artistic practice, this system has evolved into a personal electroacoustic\ - \ performance concept.},\n address = {Blacksburg, Virginia, USA},\n author = {Daniel\ - \ Formo},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1302607},\n editor = {Luke\ - \ Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn =\ - \ {2220-4806},\n month = {June},\n pages = {342--343},\n publisher = {Virginia\ - \ Tech},\n title = {The Orchestra of Speech: a speech-based instrument system},\n\ - \ url = {http://www.nime.org/proceedings/2018/nime2018_paper0072.pdf},\n year\ - \ = {2018}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1302607 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 - issn: 2220-4806 - month: June - pages: 342--343 - publisher: Virginia Tech - title: 'The Orchestra of Speech: a speech-based instrument system' - url: http://www.nime.org/proceedings/2018/nime2018_paper0072.pdf - year: 2018 - - -- ENTRYTYPE: inproceedings - ID: Weisling2018 - abstract: 'This paper presents a brief overview of an online survey conducted with - the objective of gaining insight into compositional and performance practices - of contemporary audiovisual practitioners. The survey gathered information regarding - how practitioners relate aural and visual media in their work, and how compositional - and performance practices involving multiple modalities might differ from other - practices. Discussed here are three themes: compositional approaches, transparency - and audience knowledge, and error and risk, which emerged from participants'' - responses. We believe these themes contribute to a discussion within the NIME - community regarding unique challenges and objectives presented when working with - multiple media.' - address: 'Blacksburg, Virginia, USA' - author: Anna Weisling and Anna Xambó and ireti olowe and Mathieu Barthet - bibtex: "@inproceedings{Weisling2018,\n abstract = {This paper presents a brief\ - \ overview of an online survey conducted with the objective of gaining insight\ - \ into compositional and performance practices of contemporary audiovisual practitioners.\ - \ The survey gathered information regarding how practitioners relate aural and\ - \ visual media in their work, and how compositional and performance practices\ - \ involving multiple modalities might differ from other practices. Discussed here\ - \ are three themes: compositional approaches, transparency and audience knowledge,\ - \ and error and risk, which emerged from participants' responses. We believe these\ - \ themes contribute to a discussion within the NIME community regarding unique\ - \ challenges and objectives presented when working with multiple media.},\n address\ - \ = {Blacksburg, Virginia, USA},\n author = {Anna Weisling and Anna Xambó and\ - \ ireti olowe and Mathieu Barthet},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302609},\n\ - \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {344--345},\n publisher = {Virginia\ - \ Tech},\n title = {Surveying the Compositional and Performance Practices of Audiovisual\ - \ Practitioners},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0073.pdf},\n\ - \ year = {2018}\n}\n" + ID: Hunt2003 + abstract: 'MidiGrid is a computer-based musical instrument, primarilycontrolled + with the computer mouse, which allows liveperformance of MIDI-based musical material + by mapping 2dimensional position onto musical events. Since itsinvention in 1987, + it has gained a small, but enthusiastic,band of users, and has become the primary + instrument forseveral people with physical disabilities. This paper reviewsits + development, uses and user interface issues, and highlightsthe work currently + in progress for its transformation intoMediaGrid.' + address: 'Montreal, Canada' + author: 'Hunt, Andy D. and Kirk, Ross' + bibtex: "@inproceedings{Hunt2003,\n abstract = {MidiGrid is a computer-based musical\ + \ instrument, primarilycontrolled with the computer mouse, which allows liveperformance\ + \ of MIDI-based musical material by mapping 2dimensional position onto musical\ + \ events. Since itsinvention in 1987, it has gained a small, but enthusiastic,band\ + \ of users, and has become the primary instrument forseveral people with physical\ + \ disabilities. This paper reviewsits development, uses and user interface issues,\ + \ and highlightsthe work currently in progress for its transformation intoMediaGrid.},\n\ + \ address = {Montreal, Canada},\n author = {Hunt, Andy D. and Kirk, Ross},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176517},\n\ + \ issn = {2220-4806},\n keywords = {Live performance, Computer-based musical instruments,\ + \ Human Computer Interaction for Music},\n pages = {135--139},\n title = {MidiGrid:\ + \ Past, Present and Future},\n url = {http://www.nime.org/proceedings/2003/nime2003_135.pdf},\n\ + \ year = {2003}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302609 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176517 issn: 2220-4806 - month: June - pages: 344--345 - publisher: Virginia Tech - title: Surveying the Compositional and Performance Practices of Audiovisual Practitioners - url: http://www.nime.org/proceedings/2018/nime2018_paper0073.pdf - year: 2018 + keywords: 'Live performance, Computer-based musical instruments, Human Computer + Interaction for Music' + pages: 135--139 + title: 'MidiGrid: Past, Present and Future' + url: http://www.nime.org/proceedings/2003/nime2003_135.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Marasco2018 - abstract: 'The author presents Sound Opinions, a custom software tool that uses - sentiment analysis to create sound art installations and music compositions. The - software runs inside the NodeRed.js programming environment. It scrapes text from - web pages, pre-processes it, performs sentiment analysis via a remote API, and - parses the resulting data for use in external digital audio programs. The sentiment - analysis itself is handled by IBM''s Watson Tone Analyzer. The author has used - this tool to create an interactive multimedia installation, titled Critique. Sources - of criticism of a chosen musical work are analyzed and the negative or positive - statements about that composition work to warp and change it. This allows the - audience to only hear the work through the lens of its critics, and not in the - original form that its creator intended.' - address: 'Blacksburg, Virginia, USA' - author: Anthony T. Marasco - bibtex: "@inproceedings{Marasco2018,\n abstract = {The author presents Sound Opinions,\ - \ a custom software tool that uses sentiment analysis to create sound art installations\ - \ and music compositions. The software runs inside the NodeRed.js programming\ - \ environment. It scrapes text from web pages, pre-processes it, performs sentiment\ - \ analysis via a remote API, and parses the resulting data for use in external\ - \ digital audio programs. The sentiment analysis itself is handled by IBM's Watson\ - \ Tone Analyzer. The author has used this tool to create an interactive multimedia\ - \ installation, titled Critique. Sources of criticism of a chosen musical work\ - \ are analyzed and the negative or positive statements about that composition\ - \ work to warp and change it. This allows the audience to only hear the work\ - \ through the lens of its critics, and not in the original form that its creator\ - \ intended.},\n address = {Blacksburg, Virginia, USA},\n author = {Anthony T.\ - \ Marasco},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302611},\n editor\ - \ = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {346--347},\n publisher = {Virginia\ - \ Tech},\n title = {Sound Opinions: Creating a Virtual Tool for Sound Art Installations\ - \ through Sentiment Analysis of Critical Reviews},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0074.pdf},\n\ - \ year = {2018}\n}\n" + ID: Kessous2003 + abstract: 'This paper presents a study of bimanual control applied tosound synthesis. + This study deals with coordination,cooperation, and abilities of our hands in + musical context. Wedescribe examples of instruments made using subtractivesynthesis, + scanned synthesis in Max/MSP and commercialstand-alone software synthesizers via + MIDI communicationprotocol. These instruments have been designed according to + amulti-layer-mapping model, which provides modular design.They have been used + in concerts and performanceconsiderations are discussed too.' + address: 'Montreal, Canada' + author: 'Kessous, Loïc and Arfib, Daniel' + bibtex: "@inproceedings{Kessous2003,\n abstract = {This paper presents a study of\ + \ bimanual control applied tosound synthesis. This study deals with coordination,cooperation,\ + \ and abilities of our hands in musical context. Wedescribe examples of instruments\ + \ made using subtractivesynthesis, scanned synthesis in Max/MSP and commercialstand-alone\ + \ software synthesizers via MIDI communicationprotocol. These instruments have\ + \ been designed according to amulti-layer-mapping model, which provides modular\ + \ design.They have been used in concerts and performanceconsiderations are discussed\ + \ too.},\n address = {Montreal, Canada},\n author = {Kessous, Lo\\\"{i}c and Arfib,\ + \ Daniel},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176523},\n\ + \ issn = {2220-4806},\n keywords = {Gesture control, mapping, alternate controllers,\ + \ musical instruments. },\n pages = {140--145},\n title = {Bimanuality in Alternate\ + \ Musical Instruments},\n url = {http://www.nime.org/proceedings/2003/nime2003_140.pdf},\n\ + \ year = {2003}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302611 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176523 issn: 2220-4806 - month: June - pages: 346--347 - publisher: Virginia Tech - title: 'Sound Opinions: Creating a Virtual Tool for Sound Art Installations through - Sentiment Analysis of Critical Reviews' - url: http://www.nime.org/proceedings/2018/nime2018_paper0074.pdf - year: 2018 + keywords: 'Gesture control, mapping, alternate controllers, musical instruments. ' + pages: 140--145 + title: Bimanuality in Alternate Musical Instruments + url: http://www.nime.org/proceedings/2003/nime2003_140.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Kritsis2018 - abstract: 'We present our work in progress on the development of a web-based system - for music performance with virtual instruments in a virtual 3D environment, which - provides three means of interaction (i.e physical, gestural and mixed), using - tracking data from a Leap Motion sensor. Moreover, our system is integrated as - a creative tool within the context of a STEAM education platform that promotes - science learning through musical activities. The presented system models string - and percussion instruments, with realistic sonic feedback based on Modalys, a - physical model-based sound synthesis engine. Our proposal meets the performance - requirements of real-time interactive systems and is implemented strictly with - web technologies.' - address: 'Blacksburg, Virginia, USA' - author: Kosmas Kritsis and Aggelos Gkiokas and Carlos Árpád Acosta and Quentin Lamerand - and Robert Piéchaud and Maximos Kaliakatsos-Papakostas and Vassilis Katsouros - bibtex: "@inproceedings{Kritsis2018,\n abstract = {We present our work in progress\ - \ on the development of a web-based system for music performance with virtual\ - \ instruments in a virtual 3D environment, which provides three means of interaction\ - \ (i.e physical, gestural and mixed), using tracking data from a Leap Motion sensor.\ - \ Moreover, our system is integrated as a creative tool within the context of\ - \ a STEAM education platform that promotes science learning through musical activities.\ - \ The presented system models string and percussion instruments, with realistic\ - \ sonic feedback based on Modalys, a physical model-based sound synthesis engine.\ - \ Our proposal meets the performance requirements of real-time interactive systems\ - \ and is implemented strictly with web technologies.},\n address = {Blacksburg,\ - \ Virginia, USA},\n author = {Kosmas Kritsis and Aggelos Gkiokas and Carlos Árpád\ - \ Acosta and Quentin Lamerand and Robert Piéchaud and Maximos Kaliakatsos-Papakostas\ - \ and Vassilis Katsouros},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302613},\n\ - \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {348--349},\n publisher = {Virginia\ - \ Tech},\n title = {A web-based 3D environment for gestural interaction with virtual\ - \ music instruments as a STEAM education tool},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0075.pdf},\n\ - \ year = {2018}\n}\n" + ID: Modler2003 + abstract: 'This paper describes the implementation of Time Delay NeuralNetworks + (TDNN) to recognize gestures from video images.Video sources are used because + they are non-invasive and do notinhibit performer''s physical movement or require + specialistdevices to be attached to the performer which experience hasshown to + be a significant problem that impacts musiciansperformance and can focus musical + rehearsals and performancesupon technical rather than musical concerns (Myatt + 2003).We describe a set of hand gestures learned by an artificial neuralnetwork + to control musical parameters expressively in real time.The set is made up of + different types of gestures in order toinvestigate:-aspects of the recognition + process-expressive musical control-schemes of parameter mapping-generalization + issues for an extended set for musicalcontrolThe learning procedure of the Neural + Network is describedwhich is based on variations by affine transformations of + imagesequences of the hand gestures.The whole application including the gesture + capturing isimplemented in jMax to achieve real time conditions and easyintegration + into a musical environment to realize differentmappings and routings of the control + stream.The system represents a practice-based research using actualmusic models + like compositions and processes of compositionwhich will follow the work described + in the paper.' + address: 'Montreal, Canada' + author: 'Modler, Paul and Myatt, Tony and Saup, Michael' + bibtex: "@inproceedings{Modler2003,\n abstract = {This paper describes the implementation\ + \ of Time Delay NeuralNetworks (TDNN) to recognize gestures from video images.Video\ + \ sources are used because they are non-invasive and do notinhibit performer's\ + \ physical movement or require specialistdevices to be attached to the performer\ + \ which experience hasshown to be a significant problem that impacts musiciansperformance\ + \ and can focus musical rehearsals and performancesupon technical rather than\ + \ musical concerns (Myatt 2003).We describe a set of hand gestures learned by\ + \ an artificial neuralnetwork to control musical parameters expressively in real\ + \ time.The set is made up of different types of gestures in order toinvestigate:-aspects\ + \ of the recognition process-expressive musical control-schemes of parameter mapping-generalization\ + \ issues for an extended set for musicalcontrolThe learning procedure of the Neural\ + \ Network is describedwhich is based on variations by affine transformations of\ + \ imagesequences of the hand gestures.The whole application including the gesture\ + \ capturing isimplemented in jMax to achieve real time conditions and easyintegration\ + \ into a musical environment to realize differentmappings and routings of the\ + \ control stream.The system represents a practice-based research using actualmusic\ + \ models like compositions and processes of compositionwhich will follow the work\ + \ described in the paper.},\n address = {Montreal, Canada},\n author = {Modler,\ + \ Paul and Myatt, Tony and Saup, Michael},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n\ + \ doi = {10.5281/zenodo.1176533},\n issn = {2220-4806},\n keywords = {Gesture\ + \ Recognition, Artificial Neural Network, Expressive Control, Real-time Interaction\ + \ },\n pages = {146--150},\n title = {An Experimental Set of Hand Gestures for\ + \ Expressive Control of Musical Parameters in Realtime},\n url = {http://www.nime.org/proceedings/2003/nime2003_146.pdf},\n\ + \ year = {2003}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302613 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176533 issn: 2220-4806 - month: June - pages: 348--349 - publisher: Virginia Tech - title: A web-based 3D environment for gestural interaction with virtual music instruments - as a STEAM education tool - url: http://www.nime.org/proceedings/2018/nime2018_paper0075.pdf - year: 2018 + keywords: 'Gesture Recognition, Artificial Neural Network, Expressive Control, Real-time + Interaction ' + pages: 146--150 + title: An Experimental Set of Hand Gestures for Expressive Control of Musical Parameters + in Realtime + url: http://www.nime.org/proceedings/2003/nime2003_146.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Mannone2018 - abstract: 'We developed a new musical interface, CubeHarmonic, with the magnetic - tracking system, IM3D, created at Tohoku University. IM3D system precisely tracks - positions of tiny, wireless, battery-less, and identifiable LC coils in real time. - The CubeHarmonic is a musical application of the Rubik''s cube, with notes on - each little piece. Scrambling the cube, we get different chords and chord sequences. - Positions of the pieces which contain LC coils are detected through IM3D, and - transmitted to the computer, that plays sounds. The central position of the cube - is also computed from the LC coils located into the corners of Rubik''s cube, - and, depending on the computed central position, we can manipulate overall loudness - and pitch changes, as in theremin playing. This new instrument, whose first idea - comes from mathematical theory of music, can be used as a teaching tool both for - math (group theory) and music (music theory, mathematical music theory), as well - as a composition device, a new instrument for avant-garde performances, and a - recreational tool.' - address: 'Blacksburg, Virginia, USA' - author: Maria C. Mannone and Eri Kitamura and Jiawei Huang and Ryo Sugawara and - Yoshifumi Kitamura - bibtex: "@inproceedings{Mannone2018,\n abstract = {We developed a new musical interface,\ - \ CubeHarmonic, with the magnetic tracking system, IM3D, created at Tohoku University.\ - \ IM3D system precisely tracks positions of tiny, wireless, battery-less, and\ - \ identifiable LC coils in real time. The CubeHarmonic is a musical application\ - \ of the Rubik's cube, with notes on each little piece. Scrambling the cube, we\ - \ get different chords and chord sequences. Positions of the pieces which contain\ - \ LC coils are detected through IM3D, and transmitted to the computer, that plays\ - \ sounds. The central position of the cube is also computed from the LC coils\ - \ located into the corners of Rubik's cube, and, depending on the computed central\ - \ position, we can manipulate overall loudness and pitch changes, as in theremin\ - \ playing. This new instrument, whose first idea comes from mathematical theory\ - \ of music, can be used as a teaching tool both for math (group theory) and music\ - \ (music theory, mathematical music theory), as well as a composition device,\ - \ a new instrument for avant-garde performances, and a recreational tool.},\n\ - \ address = {Blacksburg, Virginia, USA},\n author = {Maria C. Mannone and Eri\ - \ Kitamura and Jiawei Huang and Ryo Sugawara and Yoshifumi Kitamura},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1302615},\n editor = {Luke Dahl, Douglas\ - \ Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {350--351},\n publisher = {Virginia Tech},\n title\ - \ = {CubeHarmonic: A New Interface from a Magnetic 3D Motion Tracking System to\ - \ Music Performance},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0076.pdf},\n\ - \ year = {2018}\n}\n" + ID: Nakra2003 + abstract: 'This paper describes the artistic projects undertaken at ImmersionMusic, + Inc. (www.immersionmusic.org) during its three-yearexistence. We detail work in + interactive performance systems,computer-based training systems, and concert production.' + address: 'Montreal, Canada' + author: 'Nakra, Teresa M.' + bibtex: "@inproceedings{Nakra2003,\n abstract = {This paper describes the artistic\ + \ projects undertaken at ImmersionMusic, Inc. (www.immersionmusic.org) during\ + \ its three-yearexistence. We detail work in interactive performance systems,computer-based\ + \ training systems, and concert production.},\n address = {Montreal, Canada},\n\ + \ author = {Nakra, Teresa M.},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n\ + \ doi = {10.5281/zenodo.1176541},\n issn = {2220-4806},\n keywords = {Interactive\ + \ computer music systems, gestural interaction, Conductor's Jacket, Digital Baton\ + \ },\n pages = {151--152},\n title = {Immersion Music: a Progress Report},\n url\ + \ = {http://www.nime.org/proceedings/2003/nime2003_151.pdf},\n year = {2003}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302615 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176541 issn: 2220-4806 - month: June - pages: 350--351 - publisher: Virginia Tech - title: 'CubeHarmonic: A New Interface from a Magnetic 3D Motion Tracking System - to Music Performance' - url: http://www.nime.org/proceedings/2018/nime2018_paper0076.pdf - year: 2018 + keywords: 'Interactive computer music systems, gestural interaction, Conductor''s + Jacket, Digital Baton ' + pages: 151--152 + title: 'Immersion Music: a Progress Report' + url: http://www.nime.org/proceedings/2003/nime2003_151.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Kristoffersen2018 - abstract: 'In this paper we present a novel digital effects controller for electric - guitar based upon the whammy bar as a user interface. The goal with the project - is to give guitarists a way to interact with dynamic effects control that feels - familiar to their instrument and playing style. A 3D-printed prototype has been - made. It replaces the whammy bar of a traditional Fender vibrato system with a - sensor-equipped whammy bar. The functionality of the present prototype includes - separate readings of force applied towards and from the guitar body, as well as - an end knob for variable control. Further functionality includes a hinged system - allowing for digital effect control either with or without the mechanical manipulation - of string tension. By incorporating digital sensors to the idiomatic whammy bar - interface, one would potentially bring guitarists a high level of control intimacy - with the device, and thus lead to a closer interaction with effects.' - address: 'Blacksburg, Virginia, USA' - author: Martin M Kristoffersen and Trond Engum - bibtex: "@inproceedings{Kristoffersen2018,\n abstract = {In this paper we present\ - \ a novel digital effects controller for electric guitar based upon the whammy\ - \ bar as a user interface. The goal with the project is to give guitarists a way\ - \ to interact with dynamic effects control that feels familiar to their instrument\ - \ and playing style. A 3D-printed prototype has been made. It replaces the whammy\ - \ bar of a traditional Fender vibrato system with a sensor-equipped whammy bar.\ - \ The functionality of the present prototype includes separate readings of force\ - \ applied towards and from the guitar body, as well as an end knob for variable\ - \ control. Further functionality includes a hinged system allowing for digital\ - \ effect control either with or without the mechanical manipulation of string\ - \ tension. By incorporating digital sensors to the idiomatic whammy bar interface,\ - \ one would potentially bring guitarists a high level of control intimacy with\ - \ the device, and thus lead to a closer interaction with effects.},\n address\ - \ = {Blacksburg, Virginia, USA},\n author = {Martin M Kristoffersen and Trond\ - \ Engum},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1302617},\n editor = {Luke\ - \ Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn =\ - \ {2220-4806},\n month = {June},\n pages = {352--355},\n publisher = {Virginia\ - \ Tech},\n title = {The Whammy Bar as a Digital Effect Controller},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0077.pdf},\n\ - \ year = {2018}\n}\n" + ID: Wright2003 + abstract: 'OpenSound Control (“OSC”) is a protocol for communication among computers, + sound synthesizers, and other multimedia devices that is optimized for modern + networking technology. OSC has achieved wide use in the field of computer-based + new interfaces for musical expression for wide-area and local-area networked distributed + music systems, inter-process communication, and even within a single application.' + address: 'Montreal, Canada' + author: 'Wright, Matthew and Freed, Adrian and Momeni, Ali' + bibtex: "@inproceedings{Wright2003,\n abstract = {OpenSound Control (“OSC”) is a\ + \ protocol for communication among computers, sound synthesizers, and other multimedia\ + \ devices that is optimized for modern networking technology. OSC has achieved\ + \ wide use in the field of computer-based new interfaces for musical expression\ + \ for wide-area and local-area networked distributed music systems, inter-process\ + \ communication, and even within a single application.},\n address = {Montreal,\ + \ Canada},\n author = {Wright, Matthew and Freed, Adrian and Momeni, Ali},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176575},\n\ + \ issn = {2220-4806},\n keywords = {OpenSound Control, Networking, client/server\ + \ communication},\n pages = {153--159},\n title = {OpenSound Control: State of\ + \ the Art 2003},\n url = {http://www.nime.org/proceedings/2003/nime2003_153.pdf},\n\ + \ year = {2003}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302617 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176575 issn: 2220-4806 - month: June - pages: 352--355 - publisher: Virginia Tech - title: The Whammy Bar as a Digital Effect Controller - url: http://www.nime.org/proceedings/2018/nime2018_paper0077.pdf - year: 2018 + keywords: 'OpenSound Control, Networking, client/server communication' + pages: 153--159 + title: 'OpenSound Control: State of the Art 2003' + url: http://www.nime.org/proceedings/2003/nime2003_153.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Pond2018 - abstract: 'The process of learning to play a string instrument is a notoriously - difficult task. A new student to the instrument is faced with mastering multiple, - interconnected physical movements in order to become a skillful player. In their - development, one measure of a players quality is their tone, which is the result - of the combination of the physical characteristics of the instrument and their - technique in playing it. This paper describes preliminary research into creating - an intuitive, real-time device for evaluating the quality of tone generation on - the cello: a ``timbre-tuner'''' to aid cellists evaluate their tone quality. Data - for the study was collected from six post-secondary music students, consisting - of recordings of scales covering the entire range of the cello. Comprehensive - spectral audio analysis was performed on the data set in order to evaluate features - suitable to describe tone quality. An inverse relationship was found between the - harmonic centroid and pitch played, which became more pronounced when restricted - to the A string. In addition, a model for predicting the harmonic centroid at - different pitches on the A string was created. Results from informal listening - tests support the use of the harmonic centroid as an appropriate measure for tone - quality.' - address: 'Blacksburg, Virginia, USA' - author: Robert Pond and Alexander Klassen and Kirk McNally - bibtex: "@inproceedings{Pond2018,\n abstract = {The process of learning to play\ - \ a string instrument is a notoriously difficult task. A new student to the instrument\ - \ is faced with mastering multiple, interconnected physical movements in order\ - \ to become a skillful player. In their development, one measure of a players\ - \ quality is their tone, which is the result of the combination of the physical\ - \ characteristics of the instrument and their technique in playing it. This paper\ - \ describes preliminary research into creating an intuitive, real-time device\ - \ for evaluating the quality of tone generation on the cello: a ``timbre-tuner''\ - \ to aid cellists evaluate their tone quality. Data for the study was collected\ - \ from six post-secondary music students, consisting of recordings of scales covering\ - \ the entire range of the cello. Comprehensive spectral audio analysis was performed\ - \ on the data set in order to evaluate features suitable to describe tone quality.\ - \ An inverse relationship was found between the harmonic centroid and pitch played,\ - \ which became more pronounced when restricted to the A string. In addition, a\ - \ model for predicting the harmonic centroid at different pitches on the A string\ - \ was created. Results from informal listening tests support the use of the harmonic\ - \ centroid as an appropriate measure for tone quality.},\n address = {Blacksburg,\ - \ Virginia, USA},\n author = {Robert Pond and Alexander Klassen and Kirk McNally},\n\ + ID: Dobrian2003 + abstract: 'This article reports on a project that uses unfettered gestural motion + for expressive musical purposes. The project involves the development of, and + experimentation with, software to receive data from a Vicon motion capture system, + and to translate and map that data into data for the control of music and other + media such as lighting. In addition to the commercially standard MIDI-which allows + direct control of external synthesizers, processors, and other devices-other mappings + are used for direct software control of digital audio and video. This report describes + the design and implementation of the software, discusses specific experiments + performed with it, and evaluates its application in terms of aesthetic pros and + cons.' + address: 'Montreal, Canada' + author: 'Dobrian, Christopher and Bevilacqua, Frédéric' + bibtex: "@inproceedings{Dobrian2003,\n abstract = {This article reports on a project\ + \ that uses unfettered gestural motion for expressive musical purposes. The project\ + \ involves the development of, and experimentation with, software to receive data\ + \ from a Vicon motion capture system, and to translate and map that data into\ + \ data for the control of music and other media such as lighting. In addition\ + \ to the commercially standard MIDI-which allows direct control of external synthesizers,\ + \ processors, and other devices-other mappings are used for direct software control\ + \ of digital audio and video. This report describes the design and implementation\ + \ of the software, discusses specific experiments performed with it, and evaluates\ + \ its application in terms of aesthetic pros and cons.},\n address = {Montreal,\ + \ Canada},\n author = {Dobrian, Christopher and Bevilacqua, Fr\\'{e}d\\'{e}ric},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1302619},\n editor = {Luke Dahl,\ - \ Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {356--359},\n publisher = {Virginia Tech},\n title\ - \ = {Timbre Tuning: Variation in Cello Sprectrum Across Pitches and Instruments},\n\ - \ url = {http://www.nime.org/proceedings/2018/nime2018_paper0078.pdf},\n year\ - \ = {2018}\n}\n" + \ Musical Expression},\n date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176503},\n\ + \ issn = {2220-4806},\n keywords = {Motion capture, gestural control, mapping.\ + \ },\n pages = {161--163},\n title = {Gestural Control of Music Using the Vicon\ + \ 8 Motion Capture System},\n url = {http://www.nime.org/proceedings/2003/nime2003_161.pdf},\n\ + \ year = {2003}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302619 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176503 issn: 2220-4806 - month: June - pages: 356--359 - publisher: Virginia Tech - title: 'Timbre Tuning: Variation in Cello Sprectrum Across Pitches and Instruments' - url: http://www.nime.org/proceedings/2018/nime2018_paper0078.pdf - year: 2018 + keywords: 'Motion capture, gestural control, mapping. ' + pages: 161--163 + title: Gestural Control of Music Using the Vicon 8 Motion Capture System + url: http://www.nime.org/proceedings/2003/nime2003_161.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Mosher2018 - abstract: 'This demonstration paper describes the concepts behind Tributaries of - Our Distant Palpability, an interactive sonified sculpture. It takes form as - a swelling sea anemone, while the sounds it produces recall the quagmire of a - digital ocean. The sculpture responds to changing light conditions with a dynamic - mix of audio tracks, mapping volume to light level. People passing by the sculpture, - or directly engaging it by creating light and shadows with their smart phone flashlights, - will trigger the audio. At the same time, it automatically adapts to gradual - environment light changes, such as the rise and fall of the sun. The piece was - inspired by the searching gestures people make, and emotions they have while, - idly browsing content on their smart devices. It was created through an interdisciplinary - collaboration between a musician, an interaction designer, and a ceramicist.' - address: 'Blacksburg, Virginia, USA' - author: Matthew Mosher and Danielle Wood and Tony Obr - bibtex: "@inproceedings{Mosher2018,\n abstract = {This demonstration paper describes\ - \ the concepts behind Tributaries of Our Distant Palpability, an interactive sonified\ - \ sculpture. It takes form as a swelling sea anemone, while the sounds it produces\ - \ recall the quagmire of a digital ocean. The sculpture responds to changing\ - \ light conditions with a dynamic mix of audio tracks, mapping volume to light\ - \ level. People passing by the sculpture, or directly engaging it by creating\ - \ light and shadows with their smart phone flashlights, will trigger the audio.\ - \ At the same time, it automatically adapts to gradual environment light changes,\ - \ such as the rise and fall of the sun. The piece was inspired by the searching\ - \ gestures people make, and emotions they have while, idly browsing content on\ - \ their smart devices. It was created through an interdisciplinary collaboration\ - \ between a musician, an interaction designer, and a ceramicist.},\n address =\ - \ {Blacksburg, Virginia, USA},\n author = {Matthew Mosher and Danielle Wood and\ - \ Tony Obr},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302621},\n editor\ - \ = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {360--361},\n publisher = {Virginia\ - \ Tech},\n title = {Tributaries of Our Lost Palpability},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0079.pdf},\n\ - \ year = {2018}\n}\n" + ID: Nishimoto2003 + abstract: 'In this paper, we discuss a design principle for the musical instruments + that are useful for both novices and professional musicians and that facilitate + musically rich expression. We believe that the versatility of conventional musical + instruments causes difficulty in performance. By dynamically specializing a musical + instrument for performing a specific (genre of) piece, the musical instrument + could become more useful for performing the piece and facilitates expressive performance. + Based on this idea, we developed two new types of musical instruments, i.e., a + "given-melody-based musical instrument" and a "harmonic-function-based musical + instrument". From the experimental results using two prototypes, we demonstrate + the efficiency of the design principle.' + address: 'Montreal, Canada' + author: 'Nishimoto, Kazushi and Oshima, Chika and Miyagawa, Yohei' + bibtex: "@inproceedings{Nishimoto2003,\n abstract = {In this paper, we discuss a\ + \ design principle for the musical instruments that are useful for both novices\ + \ and professional musicians and that facilitate musically rich expression. We\ + \ believe that the versatility of conventional musical instruments causes difficulty\ + \ in performance. By dynamically specializing a musical instrument for performing\ + \ a specific (genre of) piece, the musical instrument could become more useful\ + \ for performing the piece and facilitates expressive performance. Based on this\ + \ idea, we developed two new types of musical instruments, i.e., a \"given-melody-based\ + \ musical instrument\" and a \"harmonic-function-based musical instrument\". From\ + \ the experimental results using two prototypes, we demonstrate the efficiency\ + \ of the design principle.},\n address = {Montreal, Canada},\n author = {Nishimoto,\ + \ Kazushi and Oshima, Chika and Miyagawa, Yohei},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n date\ + \ = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176545},\n issn = {2220-4806},\n\ + \ keywords = {Musical instruments, expression, design principle, degree of freedom,\ + \ dynamic specialization},\n pages = {164--169},\n title = {Why Always Versatile?\ + \ Dynamically Customizable Musical Instruments Facilitate Expressive Performances},\n\ + \ url = {http://www.nime.org/proceedings/2003/nime2003_164.pdf},\n year = {2003}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302621 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176545 issn: 2220-4806 - month: June - pages: 360--361 - publisher: Virginia Tech - title: Tributaries of Our Lost Palpability - url: http://www.nime.org/proceedings/2018/nime2018_paper0079.pdf - year: 2018 + keywords: 'Musical instruments, expression, design principle, degree of freedom, + dynamic specialization' + pages: 164--169 + title: 'Why Always Versatile? Dynamically Customizable Musical Instruments Facilitate + Expressive Performances' + url: http://www.nime.org/proceedings/2003/nime2003_164.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Piepenbrink2018 - abstract: 'We present a flexible, compact, and affordable embedded physical modeling - synthesizer which functions as a digital shaker. The instrument is self-contained, - battery-powered, wireless, and synthesizes various shakers, rattles, and other - handheld shaken percussion. Beyond modeling existing shakers, the instrument affords - new sonic interactions including hand mutes on its loudspeakers and self-sustaining - feedback. Both low-cost and high-performance versions of the instrument are discussed.' - address: 'Blacksburg, Virginia, USA' - author: Andrew Piepenbrink - bibtex: "@inproceedings{Piepenbrink2018,\n abstract = {We present a flexible, compact,\ - \ and affordable embedded physical modeling synthesizer which functions as a digital\ - \ shaker. The instrument is self-contained, battery-powered, wireless, and synthesizes\ - \ various shakers, rattles, and other handheld shaken percussion. Beyond modeling\ - \ existing shakers, the instrument affords new sonic interactions including hand\ - \ mutes on its loudspeakers and self-sustaining feedback. Both low-cost and high-performance\ - \ versions of the instrument are discussed.},\n address = {Blacksburg, Virginia,\ - \ USA},\n author = {Andrew Piepenbrink},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302623},\n\ - \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {362--363},\n publisher = {Virginia\ - \ Tech},\n title = {Embedded Digital Shakers: Handheld Physical Modeling Synthesizers},\n\ - \ url = {http://www.nime.org/proceedings/2018/nime2018_paper0080.pdf},\n year\ - \ = {2018}\n}\n" + ID: NewtonDunn2003 + abstract: 'In this paper, we introduce Block Jam, a Tangible UserInterface that + controls a dynamic polyrhythmic sequencerusing 26 physical artifacts. These physical + artifacts, that wecall blocks, are a new type of input device for manipulatingan + interactive music system. The blocks'' functional andtopological statuses are + tightly coupled to an ad hocsequencer, interpreting the user''s arrangement of + the blocksas meaningful musical phrases and structures.We demonstrate that we + have created both a tangible andvisual language that enables both the novice and + musicallytrained users by taking advantage of both their explorativeand intuitive + abilities. The tangible nature of the blocks andthe intuitive interface promotes + face-to-face collaborationand social interaction within a single system. The principleof + collaboration is further extended by linking two BlockJam systems together to + create a network.We discuss our project vision, design rational, relatedworks, + and the implementation of Block Jam prototypes.Figure 1. A cluster of blocks, + note the mother block on thebottom right' + address: 'Montreal, Canada' + author: 'Newton-Dunn, Henry and Nakano, Hiroaki and Gibson, James' + bibtex: "@inproceedings{NewtonDunn2003,\n abstract = {In this paper, we introduce\ + \ Block Jam, a Tangible UserInterface that controls a dynamic polyrhythmic sequencerusing\ + \ 26 physical artifacts. These physical artifacts, that wecall blocks, are a new\ + \ type of input device for manipulatingan interactive music system. The blocks'\ + \ functional andtopological statuses are tightly coupled to an ad hocsequencer,\ + \ interpreting the user's arrangement of the blocksas meaningful musical phrases\ + \ and structures.We demonstrate that we have created both a tangible andvisual\ + \ language that enables both the novice and musicallytrained users by taking advantage\ + \ of both their explorativeand intuitive abilities. The tangible nature of the\ + \ blocks andthe intuitive interface promotes face-to-face collaborationand social\ + \ interaction within a single system. The principleof collaboration is further\ + \ extended by linking two BlockJam systems together to create a network.We discuss\ + \ our project vision, design rational, relatedworks, and the implementation of\ + \ Block Jam prototypes.Figure 1. A cluster of blocks, note the mother block on\ + \ thebottom right},\n address = {Montreal, Canada},\n author = {Newton-Dunn, Henry\ + \ and Nakano, Hiroaki and Gibson, James},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n\ + \ doi = {10.5281/zenodo.1176543},\n issn = {2220-4806},\n keywords = {Tangible\ + \ interface, modular system, polyrhythmic sequencer. VISION We believe in a future\ + \ where music will no longer be considered a linear composition, but a dynamic\ + \ structure, and musical composition will extend to interaction. We also believe\ + \ that through the },\n pages = {170--177},\n title = {Block Jam: A Tangible Interface\ + \ for Interactive Music},\n url = {http://www.nime.org/proceedings/2003/nime2003_170.pdf},\n\ + \ year = {2003}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302623 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176543 issn: 2220-4806 - month: June - pages: 362--363 - publisher: Virginia Tech - title: 'Embedded Digital Shakers: Handheld Physical Modeling Synthesizers' - url: http://www.nime.org/proceedings/2018/nime2018_paper0080.pdf - year: 2018 + keywords: 'Tangible interface, modular system, polyrhythmic sequencer. VISION We + believe in a future where music will no longer be considered a linear composition, + but a dynamic structure, and musical composition will extend to interaction. We + also believe that through the ' + pages: 170--177 + title: 'Block Jam: A Tangible Interface for Interactive Music' + url: http://www.nime.org/proceedings/2003/nime2003_170.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Xambób2018 - abstract: 'The recent increase in the accessibility and size of personal and crowdsourced - digital sound collections brought about a valuable resource for music creation. - Finding and retrieving relevant sounds in performance leads to challenges that - can be approached using music information retrieval (MIR). In this paper, we explore - the use of MIR to retrieve and repurpose sounds in musical live coding. We present - a live coding system built on SuperCollider enabling the use of audio content - from online Creative Commons (CC) sound databases such as Freesound or personal - sound databases. The novelty of our approach lies in exploiting high-level MIR - methods (e.g., query by pitch or rhythmic cues) using live coding techniques applied - to sounds. We demonstrate its potential through the reflection of an illustrative - case study and the feedback from four expert users. The users tried the system - with either a personal database or a crowdsourced database and reported its potential - in facilitating tailorability of the tool to their own creative workflows.' - address: 'Blacksburg, Virginia, USA' - author: Anna Xambó and Gerard Roma and Alexander Lerch and Mathieu Barthet and György - Fazekas - bibtex: "@inproceedings{Xambób2018,\n abstract = {The recent increase in the accessibility\ - \ and size of personal and crowdsourced digital sound collections brought about\ - \ a valuable resource for music creation. Finding and retrieving relevant sounds\ - \ in performance leads to challenges that can be approached using music information\ - \ retrieval (MIR). In this paper, we explore the use of MIR to retrieve and repurpose\ - \ sounds in musical live coding. We present a live coding system built on SuperCollider\ - \ enabling the use of audio content from online Creative Commons (CC) sound databases\ - \ such as Freesound or personal sound databases. The novelty of our approach lies\ - \ in exploiting high-level MIR methods (e.g., query by pitch or rhythmic cues)\ - \ using live coding techniques applied to sounds. We demonstrate its potential\ - \ through the reflection of an illustrative case study and the feedback from four\ - \ expert users. The users tried the system with either a personal database or\ - \ a crowdsourced database and reported its potential in facilitating tailorability\ - \ of the tool to their own creative workflows.},\n address = {Blacksburg, Virginia,\ - \ USA},\n author = {Anna Xambó and Gerard Roma and Alexander Lerch and Mathieu\ - \ Barthet and György Fazekas},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302625},\n\ - \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {364--369},\n publisher = {Virginia\ - \ Tech},\n title = {Live Repurposing of Sounds: MIR Explorations with Personal\ - \ and Crowdsourced Databases},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0081.pdf},\n\ - \ year = {2018}\n}\n" + ID: Kartadinata2003 + abstract: 'In this paper I present the gluiph, a single-board computer thatwas conceived + as a platform for integrated electronic musicalinstruments. It aims to provide + new instruments as well asexisting ones with a stronger identity by untethering + themfrom the often lab-like stage setups built around general purpose computers. + The key additions to its core are a flexiblesensor subsystem and multi-channel + audio I/O. In contrast toother stand-alone approaches it retains a higher degree + offlexibility by supporting popular music programming languages, with Miller Puckette''s + pd [1] being the current focus.' + address: 'Montreal, Canada' + author: 'Kartadinata, Sukandar' + bibtex: "@inproceedings{Kartadinata2003,\n abstract = {In this paper I present the\ + \ gluiph, a single-board computer thatwas conceived as a platform for integrated\ + \ electronic musicalinstruments. It aims to provide new instruments as well asexisting\ + \ ones with a stronger identity by untethering themfrom the often lab-like stage\ + \ setups built around general purpose computers. The key additions to its core\ + \ are a flexiblesensor subsystem and multi-channel audio I/O. In contrast toother\ + \ stand-alone approaches it retains a higher degree offlexibility by supporting\ + \ popular music programming languages, with Miller Puckette's pd [1] being the\ + \ current focus.},\n address = {Montreal, Canada},\n author = {Kartadinata, Sukandar},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176521},\n\ + \ issn = {2220-4806},\n keywords = {Musical instrument, integration, single-board\ + \ computer (SBC), embedded system, stand-alone system, pd, DSP, sensor, latency,\ + \ flexibility, coherency.},\n pages = {180--183},\n title = {The Gluiph: a Nucleus\ + \ for Integrated Instruments},\n url = {http://www.nime.org/proceedings/2003/nime2003_180.pdf},\n\ + \ year = {2003}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302625 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176521 issn: 2220-4806 - month: June - pages: 364--369 - publisher: Virginia Tech - title: 'Live Repurposing of Sounds: MIR Explorations with Personal and Crowdsourced - Databases' - url: http://www.nime.org/proceedings/2018/nime2018_paper0081.pdf - year: 2018 + keywords: 'Musical instrument, integration, single-board computer (SBC), embedded + system, stand-alone system, pd, DSP, sensor, latency, flexibility, coherency.' + pages: 180--183 + title: 'The Gluiph: a Nucleus for Integrated Instruments' + url: http://www.nime.org/proceedings/2003/nime2003_180.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Sarwate2018 - abstract: 'This paper explores the question of how live coding musicians can perform - with musicians who are not using code (such as acoustic instrumentalists or those - using graphical and tangible electronic interfaces). This paper investigates performance - systems that facilitate improvisation where the musicians can interact not just - by listening to each other and changing their own output, but also by manipulating - the data stream of the other performer(s). In a course of performance-led research - four prototypes were built and analyzed them using concepts from NIME and creative - collaboration literature. Based on this analysis it was found that the systems - should 1) provide a commonly modifiable visual representation of musical data - for both coder and non-coder, and 2) provide some independent means of sound production - for each user, giving the non-coder the ability to slow down and make non-realtime - decisions for greater performance flexibility. ' - address: 'Blacksburg, Virginia, USA' - author: Avneesh Sarwate and Ryan Taylor Rose and Jason Freeman and Jack Armitage - bibtex: "@inproceedings{Sarwate2018,\n abstract = {This paper explores the question\ - \ of how live coding musicians can perform with musicians who are not using code\ - \ (such as acoustic instrumentalists or those using graphical and tangible electronic\ - \ interfaces). This paper investigates performance systems that facilitate improvisation\ - \ where the musicians can interact not just by listening to each other and changing\ - \ their own output, but also by manipulating the data stream of the other performer(s).\ - \ In a course of performance-led research four prototypes were built and analyzed\ - \ them using concepts from NIME and creative collaboration literature. Based on\ - \ this analysis it was found that the systems should 1) provide a commonly modifiable\ - \ visual representation of musical data for both coder and non-coder, and 2) provide\ - \ some independent means of sound production for each user, giving the non-coder\ - \ the ability to slow down and make non-realtime decisions for greater performance\ - \ flexibility. },\n address = {Blacksburg, Virginia, USA},\n author = {Avneesh\ - \ Sarwate and Ryan Taylor Rose and Jason Freeman and Jack Armitage},\n booktitle\ + ID: Couturier2003 + abstract: 'In this paper, we describe a new interface for musicalperformance, using + the interaction with a graphical userinterface in a powerful manner: the user + directly touches ascreen where graphical objects are displayed and can useseveral + fingers simultaneously to interact with the objects. Theconcept of this interface + is based on the superposition of thegesture spatial place and the visual feedback + spatial place; i tgives the impression that the graphical objects are real. Thisconcept + enables a huge freedom in designing interfaces. Thegesture device we have created + gives the position of fourfingertips using 3D sensors and the data is performed + in theMax/MSP environment. We have realized two practicalexamples of musical use + of such a device, using PhotosonicSynthesis and Scanned Synthesis.' + address: 'Montreal, Canada' + author: 'Couturier, Jean-Michel and Arfib, Daniel' + bibtex: "@inproceedings{Couturier2003,\n abstract = {In this paper, we describe\ + \ a new interface for musicalperformance, using the interaction with a graphical\ + \ userinterface in a powerful manner: the user directly touches ascreen where\ + \ graphical objects are displayed and can useseveral fingers simultaneously to\ + \ interact with the objects. Theconcept of this interface is based on the superposition\ + \ of thegesture spatial place and the visual feedback spatial place; i tgives\ + \ the impression that the graphical objects are real. Thisconcept enables a huge\ + \ freedom in designing interfaces. Thegesture device we have created gives the\ + \ position of fourfingertips using 3D sensors and the data is performed in theMax/MSP\ + \ environment. We have realized two practicalexamples of musical use of such a\ + \ device, using PhotosonicSynthesis and Scanned Synthesis.},\n address = {Montreal,\ + \ Canada},\n author = {Couturier, Jean-Michel and Arfib, Daniel},\n booktitle\ \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1302627},\n editor = {Luke Dahl, Douglas\ - \ Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {370--373},\n publisher = {Virginia Tech},\n title\ - \ = {Performance Systems for Live Coders and Non Coders},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0082.pdf},\n\ - \ year = {2018}\n}\n" + \ Expression},\n date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176501},\n\ + \ issn = {2220-4806},\n keywords = {HCI, touch screen, multimodality, mapping,\ + \ direct interaction, gesture devices, bimanual interaction, two-handed, Max/MSP.\ + \ },\n pages = {184--187},\n title = {Pointing Fingers: Using Multiple Direct\ + \ Interactions with Visual Objects to Perform Music},\n url = {http://www.nime.org/proceedings/2003/nime2003_184.pdf},\n\ + \ year = {2003}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302627 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176501 issn: 2220-4806 - month: June - pages: 370--373 - publisher: Virginia Tech - title: Performance Systems for Live Coders and Non Coders - url: http://www.nime.org/proceedings/2018/nime2018_paper0082.pdf - year: 2018 + keywords: 'HCI, touch screen, multimodality, mapping, direct interaction, gesture + devices, bimanual interaction, two-handed, Max/MSP. ' + pages: 184--187 + title: 'Pointing Fingers: Using Multiple Direct Interactions with Visual Objects + to Perform Music' + url: http://www.nime.org/proceedings/2003/nime2003_184.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Snyder2018 - abstract: 'This paper presents research on control of electronic signal feedback - in brass instruments through the development of a new augmented musical instrument, - the Feedback Trombone. The Feedback Trombone (FBT) extends the traditional acoustic - trombone interface with a speaker, microphone, and custom analog and digital hardware. ' - address: 'Blacksburg, Virginia, USA' - author: Jeff Snyder and Michael R Mulshine and Rajeev S Erramilli - bibtex: "@inproceedings{Snyder2018,\n abstract = {This paper presents research on\ - \ control of electronic signal feedback in brass instruments through the development\ - \ of a new augmented musical instrument, the Feedback Trombone. The Feedback Trombone\ - \ (FBT) extends the traditional acoustic trombone interface with a speaker, microphone,\ - \ and custom analog and digital hardware. },\n address = {Blacksburg, Virginia,\ - \ USA},\n author = {Jeff Snyder and Michael R Mulshine and Rajeev S Erramilli},\n\ + ID: Singer2003a + abstract: 'This paper describes the LEMUR GuitarBot, a robotic musical instrument + composed of four independent MIDI controllable single-stringed movable bridge + units. Design methodology, development and fabrication process, control specification + and results are discussed.' + address: 'Montreal, Canada' + author: 'Singer, Eric and Larke, Kevin and Bianciardi, David' + bibtex: "@inproceedings{Singer2003a,\n abstract = {This paper describes the LEMUR\ + \ GuitarBot, a robotic musical instrument composed of four independent MIDI controllable\ + \ single-stringed movable bridge units. Design methodology, development and fabrication\ + \ process, control specification and results are discussed.},\n address = {Montreal,\ + \ Canada},\n author = {Singer, Eric and Larke, Kevin and Bianciardi, David},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1302629},\n editor = {Luke Dahl,\ - \ Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {374--379},\n publisher = {Virginia Tech},\n title\ - \ = {The Feedback Trombone: Controlling Feedback in Brass Instruments},\n url\ - \ = {http://www.nime.org/proceedings/2018/nime2018_paper0083.pdf},\n year = {2018}\n\ - }\n" + \ Musical Expression},\n date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176565},\n\ + \ issn = {2220-4806},\n keywords = {Robotics, interactive, performance, MIDI,\ + \ string instrument.},\n pages = {188--191},\n title = {{LEMUR} GuitarBot: {MIDI}\ + \ Robotic String Instrument},\n url = {http://www.nime.org/proceedings/2003/nime2003_188.pdf},\n\ + \ year = {2003}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302629 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176565 issn: 2220-4806 - month: June - pages: 374--379 - publisher: Virginia Tech - title: 'The Feedback Trombone: Controlling Feedback in Brass Instruments' - url: http://www.nime.org/proceedings/2018/nime2018_paper0083.pdf - year: 2018 + keywords: 'Robotics, interactive, performance, MIDI, string instrument.' + pages: 188--191 + title: 'LEMUR GuitarBot: MIDI Robotic String Instrument' + url: http://www.nime.org/proceedings/2003/nime2003_188.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Sheffield2018 - abstract: The use of mechatronic components (e.g. DC motors and solenoids) as both - electronic sound source and locus of interaction is explored in a form of embedded - acoustic instruments called mechanoise instruments. Micro-controllers and embedded - computing devices provide a platform for live control of motor speeds and additional - sound processing by a human performer. Digital fabrication and use of salvaged - and found materials are emphasized. - address: 'Blacksburg, Virginia, USA' - author: Eric Sheffield - bibtex: "@inproceedings{Sheffield2018,\n abstract = {The use of mechatronic components\ - \ (e.g. DC motors and solenoids) as both electronic sound source and locus of\ - \ interaction is explored in a form of embedded acoustic instruments called mechanoise\ - \ instruments. Micro-controllers and embedded computing devices provide a platform\ - \ for live control of motor speeds and additional sound processing by a human\ - \ performer. Digital fabrication and use of salvaged and found materials are emphasized.},\n\ - \ address = {Blacksburg, Virginia, USA},\n author = {Eric Sheffield},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1302631},\n editor = {Luke Dahl, Douglas\ - \ Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n\ - \ month = {June},\n pages = {380--381},\n publisher = {Virginia Tech},\n title\ - \ = {Mechanoise: Mechatronic Sound and Interaction in Embedded Acoustic Instruments},\n\ - \ url = {http://www.nime.org/proceedings/2018/nime2018_paper0084.pdf},\n year\ - \ = {2018}\n}\n" + ID: Peiper2003 + abstract: We introduce a software system for real-time classification of violin + bow strokes (articulations). The system uses an electromagnetic motion tracking + system to capture raw gesture data. The data is analyzed to extract stroke features. + These features are provided to a decision tree for training and classification. + Feedback from feature and classification data is presented visually in an immersive + graphic environment. + address: 'Montreal, Canada' + author: 'Peiper, Chad and Warden, David and Garnett, Guy' + bibtex: "@inproceedings{Peiper2003,\n abstract = {We introduce a software system\ + \ for real-time classification of violin bow strokes (articulations). The system\ + \ uses an electromagnetic motion tracking system to capture raw gesture data.\ + \ The data is analyzed to extract stroke features. These features are provided\ + \ to a decision tree for training and classification. Feedback from feature and\ + \ classification data is presented visually in an immersive graphic environment.},\n\ + \ address = {Montreal, Canada},\n author = {Peiper, Chad and Warden, David and\ + \ Garnett, Guy},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n doi =\ + \ {10.5281/zenodo.1176553},\n issn = {2220-4806},\n pages = {192--196},\n title\ + \ = {An Interface for Real-time Classification of Articulations Produced by Violin\ + \ Bowing},\n url = {http://www.nime.org/proceedings/2003/nime2003_192.pdf},\n\ + \ year = {2003}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302631 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176553 issn: 2220-4806 - month: June - pages: 380--381 - publisher: Virginia Tech - title: 'Mechanoise: Mechatronic Sound and Interaction in Embedded Acoustic Instruments' - url: http://www.nime.org/proceedings/2018/nime2018_paper0084.pdf - year: 2018 + pages: 192--196 + title: An Interface for Real-time Classification of Articulations Produced by Violin + Bowing + url: http://www.nime.org/proceedings/2003/nime2003_192.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Pigrem2018 - abstract: 'This paper explores the role of materiality in Digital Musical Instruments - and questions the influence of tacit understandings of sensor technology. Existing - research investigates the use of gesture, physical interaction and subsequent - parameter mapping. We suggest that a tacit knowledge of the ‘sensor layer'' brings - with it definitions, understandings and expectations that forge and guide our - approach to interaction. We argue that the influence of technology starts before - a sound is made, and comes from not only intuition of material properties, but - also received notions of what technology can and should do. On encountering an - instrument with obvious sensors, a potential performer will attempt to predict - what the sensors do and what the designer intends for them to do, becoming influenced - by a machine centered understanding of interaction and not a solely material centred - one. The paper presents an observational study of interaction using non-functional - prototype instruments designed to explore fundamental ideas and understandings - of instrumental interaction in the digital realm. We will show that this understanding - influences both gestural language and ability to characterise an expected sonic/musical - response. ' - address: 'Blacksburg, Virginia, USA' - author: 'Jon Pigrem and McPherson, Andrew P.' - bibtex: "@inproceedings{Pigrem2018,\n abstract = {This paper explores the role of\ - \ materiality in Digital Musical Instruments and questions the influence of tacit\ - \ understandings of sensor technology. Existing research investigates the use\ - \ of gesture, physical interaction and subsequent parameter mapping. We suggest\ - \ that a tacit knowledge of the ‘sensor layer' brings with it definitions, understandings\ - \ and expectations that forge and guide our approach to interaction. We argue\ - \ that the influence of technology starts before a sound is made, and comes from\ - \ not only intuition of material properties, but also received notions of what\ - \ technology can and should do. On encountering an instrument with obvious sensors,\ - \ a potential performer will attempt to predict what the sensors do and what the\ - \ designer intends for them to do, becoming influenced by a machine centered understanding\ - \ of interaction and not a solely material centred one. The paper presents an\ - \ observational study of interaction using non-functional prototype instruments\ - \ designed to explore fundamental ideas and understandings of instrumental interaction\ - \ in the digital realm. We will show that this understanding influences both gestural\ - \ language and ability to characterise an expected sonic/musical response. },\n\ - \ address = {Blacksburg, Virginia, USA},\n author = {Jon Pigrem and McPherson,\ - \ Andrew P.},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302633},\n editor\ - \ = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {382--385},\n publisher = {Virginia\ - \ Tech},\n title = {Do We Speak Sensor? Cultural Constraints of Embodied Interaction\ - \ },\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0085.pdf},\n\ - \ year = {2018}\n}\n" + ID: Settel2003 + abstract: 'The subject of instrument design is quite broad. Much work has been done + at Ircam, MIT, CNMAT, Stanford and elsewhere in the area. In this paper we will + present our own developed approach to designing and using instruments in composition + and performance for the authors’ “Convolution Brothers” pieces. The presentation + of this paper is accompanied by a live Convolution Brothers demonstration.' + address: 'Montreal, Canada' + author: 'Settel, Zack and Lippe, Cort' + bibtex: "@inproceedings{Settel2003,\n abstract = {The subject of instrument design\ + \ is quite broad. Much work has been done at Ircam, MIT, CNMAT, Stanford and elsewhere\ + \ in the area. In this paper we will present our own developed approach to designing\ + \ and using instruments in composition and performance for the authors’ “Convolution\ + \ Brothers” pieces. The presentation of this paper is accompanied by a live Convolution\ + \ Brothers demonstration.},\n address = {Montreal, Canada},\n author = {Settel,\ + \ Zack and Lippe, Cort},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n doi\ + \ = {10.5281/zenodo.1176559},\n issn = {2220-4806},\n pages = {197--200},\n title\ + \ = {Convolution Brother's Instrument Design},\n url = {http://www.nime.org/proceedings/2003/nime2003_197.pdf},\n\ + \ year = {2003}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302633 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176559 issn: 2220-4806 - month: June - pages: 382--385 - publisher: Virginia Tech - title: 'Do We Speak Sensor? Cultural Constraints of Embodied Interaction ' - url: http://www.nime.org/proceedings/2018/nime2018_paper0085.pdf - year: 2018 + pages: 197--200 + title: Convolution Brother's Instrument Design + url: http://www.nime.org/proceedings/2003/nime2003_197.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Salazarb2018 - abstract: 'At first glance, the practice of musical live coding seems distanced - from the gestures and sense of embodiment common in musical performance, electronic - or otherwise. This workshop seeks to explore the extent to which this assertion - is justified, to re-examine notions of gesture and embodiment in the context of - musical live coding performance, to consider historical approaches to synthesizing - musical programming and gesture, and to look to the future for new ways of doing - so. The workshop will consist firstly of a critical discussion of these issues - and related literature. This will be followed by applied practical experiments - involving ideas generated during these discussions. The workshop will conclude - with a recapitulation and examination of these experiments in the context of previous - research and proposed future directions. ' - address: 'Blacksburg, Virginia, USA' - author: Spencer Salazar and Jack Armitage - bibtex: "@inproceedings{Salazarb2018,\n abstract = {At first glance, the practice\ - \ of musical live coding seems distanced from the gestures and sense of embodiment\ - \ common in musical performance, electronic or otherwise. This workshop seeks\ - \ to explore the extent to which this assertion is justified, to re-examine notions\ - \ of gesture and embodiment in the context of musical live coding performance,\ - \ to consider historical approaches to synthesizing musical programming and gesture,\ - \ and to look to the future for new ways of doing so. The workshop will consist\ - \ firstly of a critical discussion of these issues and related literature. This\ - \ will be followed by applied practical experiments involving ideas generated\ - \ during these discussions. The workshop will conclude with a recapitulation and\ - \ examination of these experiments in the context of previous research and proposed\ - \ future directions. },\n address = {Blacksburg, Virginia, USA},\n author = {Spencer\ - \ Salazar and Jack Armitage},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302635},\n\ - \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {386--389},\n publisher = {Virginia\ - \ Tech},\n title = {Re-engaging the Body and Gesture in Musical Live Coding},\n\ - \ url = {http://www.nime.org/proceedings/2018/nime2018_paper0086.pdf},\n year\ - \ = {2018}\n}\n" + ID: Choi2003 + abstract: 'This paper suggests that there is a need for formalizing acomponent model + of gestural primitive throughput in musicinstrument design. The purpose of this + model is to construct acoherent and meaningful interaction between performer andinstrument. + Such a model has been implicit in previous researchfor interactive performance + systems. The model presented heredistinguishes gestural primitives from units + of measure ofgestures. The throughput model identifies symmetry betweenperformance + gestures and musical gestures, and indicates a rolefor gestural primitives when + a performer navigates regions ofstable oscillations in a musical instrument. The + use of a highdimensional interface tool is proposed for instrument design, forfine-tuning + the mapping between movement sensor data andsound synthesis control data.' + address: 'Montreal, Canada' + author: 'Choi, Insook' + bibtex: "@inproceedings{Choi2003,\n abstract = {This paper suggests that there is\ + \ a need for formalizing acomponent model of gestural primitive throughput in\ + \ musicinstrument design. The purpose of this model is to construct acoherent\ + \ and meaningful interaction between performer andinstrument. Such a model has\ + \ been implicit in previous researchfor interactive performance systems. The model\ + \ presented heredistinguishes gestural primitives from units of measure ofgestures.\ + \ The throughput model identifies symmetry betweenperformance gestures and musical\ + \ gestures, and indicates a rolefor gestural primitives when a performer navigates\ + \ regions ofstable oscillations in a musical instrument. The use of a highdimensional\ + \ interface tool is proposed for instrument design, forfine-tuning the mapping\ + \ between movement sensor data andsound synthesis control data.},\n address =\ + \ {Montreal, Canada},\n author = {Choi, Insook},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n date\ + \ = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176499},\n issn = {2220-4806},\n\ + \ keywords = {Performance gestures, musical gestures, instrument design, mapping,\ + \ tuning, affordances, stability. },\n pages = {201--204},\n title = {A Component\ + \ Model of Gestural Primitive Throughput},\n url = {http://www.nime.org/proceedings/2003/nime2003_201.pdf},\n\ + \ year = {2003}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302635 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176499 issn: 2220-4806 - month: June - pages: 386--389 - publisher: Virginia Tech - title: Re-engaging the Body and Gesture in Musical Live Coding - url: http://www.nime.org/proceedings/2018/nime2018_paper0086.pdf - year: 2018 + keywords: 'Performance gestures, musical gestures, instrument design, mapping, tuning, + affordances, stability. ' + pages: 201--204 + title: A Component Model of Gestural Primitive Throughput + url: http://www.nime.org/proceedings/2003/nime2003_201.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Berdahl2018 - abstract: 'For the purpose of creating new musical instruments, chaotic dynamical - systems can be simulated in real time to synthesize complex sounds. This work - investigates a series of discrete-time chaotic maps, which have the potential - to generate intriguing sounds when they are adjusted to be on the edge of chaos. With - these chaotic maps as studied historically, the edge of chaos tends to be razor-thin, - which can make it difficult to employ them for making new musical instruments. The - authors therefore suggest connecting chaotic maps with digital waveguides, which - (1) make it easier to synthesize harmonic tones and (2) make it harder to fall - off of the edge of chaos while playing a musical instrument. The authors argue - therefore that this technique widens the razor-thin edge of chaos into a musical - highway.' - address: 'Blacksburg, Virginia, USA' - author: Edgar Berdahl and Eric Sheffield and Andrew Pfalz and Anthony T. Marasco - bibtex: "@inproceedings{Berdahl2018,\n abstract = {For the purpose of creating new\ - \ musical instruments, chaotic dynamical systems can be simulated in real time\ - \ to synthesize complex sounds. This work investigates a series of discrete-time\ - \ chaotic maps, which have the potential to generate intriguing sounds when they\ - \ are adjusted to be on the edge of chaos. With these chaotic maps as studied\ - \ historically, the edge of chaos tends to be razor-thin, which can make it difficult\ - \ to employ them for making new musical instruments. The authors therefore suggest\ - \ connecting chaotic maps with digital waveguides, which (1) make it easier to\ - \ synthesize harmonic tones and (2) make it harder to fall off of the edge of\ - \ chaos while playing a musical instrument. The authors argue therefore that\ - \ this technique widens the razor-thin edge of chaos into a musical highway.},\n\ - \ address = {Blacksburg, Virginia, USA},\n author = {Edgar Berdahl and Eric Sheffield\ - \ and Andrew Pfalz and Anthony T. Marasco},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1302637},\n editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n\ - \ isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {390--393},\n publisher = {Virginia Tech},\n title = {Widening the Razor-Thin\ - \ Edge of Chaos Into a Musical Highway: Connecting Chaotic Maps to Digital Waveguides},\n\ - \ url = {http://www.nime.org/proceedings/2018/nime2018_paper0087.pdf},\n year\ - \ = {2018}\n}\n" + ID: PalacioQuintin2003 + abstract: 'The Hyper-Flute is a standard Boehm flute (the model used is a Powell + 2100, made in Boston) extended via electronic sensors that link it to a computer, + enabling control of digital sound processing parameters while performing. The + instrument’s electronic extensions are described in some detail, and performance + applications are briefly discussed.' + address: 'Montreal, Canada' + author: 'Palacio-Quintin, Cléo' + bibtex: "@inproceedings{PalacioQuintin2003,\n abstract = {The Hyper-Flute is a standard\ + \ Boehm flute (the model used is a Powell 2100, made in Boston) extended via electronic\ + \ sensors that link it to a computer, enabling control of digital sound processing\ + \ parameters while performing. The instrument’s electronic extensions are described\ + \ in some detail, and performance applications are briefly discussed.},\n address\ + \ = {Montreal, Canada},\n author = {Palacio-Quintin, Cl\\'{e}o},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176549},\n issn = {2220-4806},\n\ + \ keywords = {Digital sound processing, flute, hyper-instrument, interactive music,\ + \ live electronics, performance, sensors.},\n pages = {206--207},\n title = {The\ + \ Hyper-Flute},\n url = {http://www.nime.org/proceedings/2003/nime2003_206.pdf},\n\ + \ year = {2003}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302637 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176549 issn: 2220-4806 - month: June - pages: 390--393 - publisher: Virginia Tech - title: 'Widening the Razor-Thin Edge of Chaos Into a Musical Highway: Connecting - Chaotic Maps to Digital Waveguides' - url: http://www.nime.org/proceedings/2018/nime2018_paper0087.pdf - year: 2018 + keywords: 'Digital sound processing, flute, hyper-instrument, interactive music, + live electronics, performance, sensors.' + pages: 206--207 + title: The Hyper-Flute + url: http://www.nime.org/proceedings/2003/nime2003_206.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Snyderb2018 - abstract: 'This paper describes a project to create a software instrument using - a biological model of neuron behavior for audio synthesis. The translation of - the model to a usable audio synthesis process is described, and a piece for laptop - orchestra created using the instrument is discussed.' - address: 'Blacksburg, Virginia, USA' - author: Jeff Snyder and Aatish Bhatia and Michael R Mulshine - bibtex: "@inproceedings{Snyderb2018,\n abstract = {This paper describes a project\ - \ to create a software instrument using a biological model of neuron behavior\ - \ for audio synthesis. The translation of the model to a usable audio synthesis\ - \ process is described, and a piece for laptop orchestra created using the instrument\ - \ is discussed.},\n address = {Blacksburg, Virginia, USA},\n author = {Jeff Snyder\ - \ and Aatish Bhatia and Michael R Mulshine},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1302639},\n editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n\ - \ isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n pages\ - \ = {394--397},\n publisher = {Virginia Tech},\n title = {Neuron-modeled Audio\ - \ Synthesis: Nonlinear Sound and Control},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0088.pdf},\n\ - \ year = {2018}\n}\n" + ID: Allison2003 + abstract: 'SensorBox is a low cost, low latency, high-resolutioninterface for obtaining + gestural data from sensors for use inrealtime with a computer-based interactive + system. Wediscuss its implementation, benefits, current limitations, andcompare + it with several popular interfaces for gestural dataacquisition.' + address: 'Montreal, Canada' + author: 'Allison, Jesse T. and Place, Timothy' + bibtex: "@inproceedings{Allison2003,\n abstract = {SensorBox is a low cost, low\ + \ latency, high-resolutioninterface for obtaining gestural data from sensors for\ + \ use inrealtime with a computer-based interactive system. Wediscuss its implementation,\ + \ benefits, current limitations, andcompare it with several popular interfaces\ + \ for gestural dataacquisition.},\n address = {Montreal, Canada},\n author = {Allison,\ + \ Jesse T. and Place, Timothy},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n\ + \ doi = {10.5281/zenodo.1176482},\n issn = {2220-4806},\n keywords = {Sensors,\ + \ gestural acquisition, audio interface, interactive music, SensorBox. },\n pages\ + \ = {208--210},\n title = {SensorBox: Practical Audio Interface for Gestural Performance},\n\ + \ url = {http://www.nime.org/proceedings/2003/nime2003_208.pdf},\n year = {2003}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302639 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176482 issn: 2220-4806 - month: June - pages: 394--397 - publisher: Virginia Tech - title: 'Neuron-modeled Audio Synthesis: Nonlinear Sound and Control' - url: http://www.nime.org/proceedings/2018/nime2018_paper0088.pdf - year: 2018 + keywords: 'Sensors, gestural acquisition, audio interface, interactive music, SensorBox. ' + pages: 208--210 + title: 'SensorBox: Practical Audio Interface for Gestural Performance' + url: http://www.nime.org/proceedings/2003/nime2003_208.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Cádiz2018 - abstract: 'In computer or electroacoustic music, it is often the case that the compositional - act and the parametric control of the underlying synthesis algorithms or hardware - are not separable from each other. In these situations, composition and control - of the synthesis parameters are not easy to distinguish. One possible solution - is by means of fuzzy logic. This approach provides a simple, intuitive but powerful - control of the compositional process usually in interesting non-linear ways. Compositional - control in this context is achieved by the fuzzification of the relevant internal - synthesis parameters and the parallel computation of common sense fuzzy rules - of inference specified by the composer. This approach has been implemented computationally - as a software package entitled FLCTK (Fuzzy Logic Control Tool Kit) in the form - of external objects for the widely used real-time compositional environments Max/MSP - and Pd. In this article, we present an updated version of this tool. As a demonstration - of the wide range of situations in which this approach could be used, we provide - two examples of parametric fuzzy control: first, the fuzzy control of a water - tank simulation and second a particle-based sound synthesis technique by a fuzzy - approach. ' - address: 'Blacksburg, Virginia, USA' - author: Rodrigo F. Cádiz and Marie Gonzalez-Inostroza - bibtex: "@inproceedings{Cádiz2018,\n abstract = {In computer or electroacoustic\ - \ music, it is often the case that the compositional act and the parametric control\ - \ of the underlying synthesis algorithms or hardware are not separable from each\ - \ other. In these situations, composition and control of the synthesis parameters\ - \ are not easy to distinguish. One possible solution is by means of fuzzy logic.\ - \ This approach provides a simple, intuitive but powerful control of the compositional\ - \ process usually in interesting non-linear ways. Compositional control in this\ - \ context is achieved by the fuzzification of the relevant internal synthesis\ - \ parameters and the parallel computation of common sense fuzzy rules of inference\ - \ specified by the composer. This approach has been implemented computationally\ - \ as a software package entitled FLCTK (Fuzzy Logic Control Tool Kit) in the form\ - \ of external objects for the widely used real-time compositional environments\ - \ Max/MSP and Pd. In this article, we present an updated version of this tool.\ - \ As a demonstration of the wide range of situations in which this approach could\ - \ be used, we provide two examples of parametric fuzzy control: first, the fuzzy\ - \ control of a water tank simulation and second a particle-based sound synthesis\ - \ technique by a fuzzy approach. },\n address = {Blacksburg, Virginia, USA},\n\ - \ author = {Rodrigo F. Cádiz and Marie Gonzalez-Inostroza},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1302641},\n editor = {Luke Dahl, Douglas Bowman, Thomas\ - \ Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n\ - \ pages = {398--402},\n publisher = {Virginia Tech},\n title = {Fuzzy Logic Control\ - \ Toolkit 2.0: composing and synthesis by fuzzyfication},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0089.pdf},\n\ - \ year = {2018}\n}\n" + ID: Baird2003 + abstract: 'This software tool, developed in Max/MSP, presentsperformers with image + files consisting of traditional notationas well as conducting in the form of video + playback. Theimpetus for this work was the desire to allow the musicalmaterial + for each performer of a given piece to differ withregard to content and tempo.' + address: 'Montreal, Canada' + author: 'Baird, Kevin C.' + bibtex: "@inproceedings{Baird2003,\n abstract = {This software tool, developed in\ + \ Max/MSP, presentsperformers with image files consisting of traditional notationas\ + \ well as conducting in the form of video playback. Theimpetus for this work was\ + \ the desire to allow the musicalmaterial for each performer of a given piece\ + \ to differ withregard to content and tempo.},\n address = {Montreal, Canada},\n\ + \ author = {Baird, Kevin C.},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n\ + \ doi = {10.5281/zenodo.1176488},\n issn = {2220-4806},\n keywords = {Open form,\ + \ notation, polymeter, polytempi, Max/MSP. },\n pages = {211--212},\n title =\ + \ {Multi-Conductor: An Onscreen Polymetrical Conducting and Notation Display System},\n\ + \ url = {http://www.nime.org/proceedings/2003/nime2003_211.pdf},\n year = {2003}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302641 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176488 issn: 2220-4806 - month: June - pages: 398--402 - publisher: Virginia Tech - title: 'Fuzzy Logic Control Toolkit 2.0: composing and synthesis by fuzzyfication' - url: http://www.nime.org/proceedings/2018/nime2018_paper0089.pdf - year: 2018 + keywords: 'Open form, notation, polymeter, polytempi, Max/MSP. ' + pages: 211--212 + title: 'Multi-Conductor: An Onscreen Polymetrical Conducting and Notation Display + System' + url: http://www.nime.org/proceedings/2003/nime2003_211.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Leigh2018 - abstract: 'Playing musical instruments involves producing gradually more challenging - body movements and transitions, where the kinematic constraints of the body play - a crucial role in structuring the resulting music. We seek to make a bridge between - currently accessible motor patterns, and musical possibilities beyond those --- - afforded through the use of a robotic augmentation. Guitar Machine is a robotic - device that presses on guitar strings and assists a musician by fretting alongside - her on the same guitar. This paper discusses the design of the system, strategies - for using the system to create novel musical patterns, and a user study that looks - at the effects of the temporary acquisition of enhanced physical ability. Our - results indicate that the proposed human-robot interaction would equip users to - explore new musical avenues on the guitar, as well as provide an enhanced understanding - of the task at hand on the basis of the robotically acquired ability. ' - address: 'Blacksburg, Virginia, USA' - author: Sang-won Leigh and Pattie Maes - bibtex: "@inproceedings{Leigh2018,\n abstract = {Playing musical instruments involves\ - \ producing gradually more challenging body movements and transitions, where the\ - \ kinematic constraints of the body play a crucial role in structuring the resulting\ - \ music. We seek to make a bridge between currently accessible motor patterns,\ - \ and musical possibilities beyond those --- afforded through the use of a robotic\ - \ augmentation. Guitar Machine is a robotic device that presses on guitar strings\ - \ and assists a musician by fretting alongside her on the same guitar. This paper\ - \ discusses the design of the system, strategies for using the system to create\ - \ novel musical patterns, and a user study that looks at the effects of the temporary\ - \ acquisition of enhanced physical ability. Our results indicate that the proposed\ - \ human-robot interaction would equip users to explore new musical avenues on\ - \ the guitar, as well as provide an enhanced understanding of the task at hand\ - \ on the basis of the robotically acquired ability. },\n address = {Blacksburg,\ - \ Virginia, USA},\n author = {Sang-won Leigh and Pattie Maes},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1302643},\n editor = {Luke Dahl, Douglas Bowman, Thomas\ - \ Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n\ - \ pages = {403--408},\n publisher = {Virginia Tech},\n title = {Guitar Machine:\ - \ Robotic Fretting Augmentation for Hybrid Human-Machine Guitar Play},\n url =\ - \ {http://www.nime.org/proceedings/2018/nime2018_paper0090.pdf},\n year = {2018}\n\ - }\n" + ID: Kleinsasser2003 + abstract: 'This document describes modular software supporting livesignal processing + and sound file playback within theMax/MSP environment. Dsp.rack integrates signalprocessing, + memory buffer recording, and pre-recordedmulti-channel file playback using an + interconnected,programmable signal flow matrix, and an eight-channel i/oformat.' + address: 'Montreal, Canada' + author: 'Kleinsasser, William' + bibtex: "@inproceedings{Kleinsasser2003,\n abstract = {This document describes modular\ + \ software supporting livesignal processing and sound file playback within theMax/MSP\ + \ environment. Dsp.rack integrates signalprocessing, memory buffer recording,\ + \ and pre-recordedmulti-channel file playback using an interconnected,programmable\ + \ signal flow matrix, and an eight-channel i/oformat.},\n address = {Montreal,\ + \ Canada},\n author = {Kleinsasser, William},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n date =\ + \ {22-24 May, 2003},\n doi = {10.5281/zenodo.1176525},\n issn = {2220-4806},\n\ + \ keywords = {Digital signal processing, Max/MSP, computer music performance,\ + \ matrix routing, live performance processing. },\n pages = {213--215},\n title\ + \ = {Dsp.rack: Laptop-based Modular, Programmable Digital Signal Processing and\ + \ Mixing for Live Performance},\n url = {http://www.nime.org/proceedings/2003/nime2003_213.pdf},\n\ + \ year = {2003}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302643 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176525 issn: 2220-4806 - month: June - pages: 403--408 - publisher: Virginia Tech - title: 'Guitar Machine: Robotic Fretting Augmentation for Hybrid Human-Machine Guitar - Play' - url: http://www.nime.org/proceedings/2018/nime2018_paper0090.pdf - year: 2018 + keywords: 'Digital signal processing, Max/MSP, computer music performance, matrix + routing, live performance processing. ' + pages: 213--215 + title: 'Dsp.rack: Laptop-based Modular, Programmable Digital Signal Processing and + Mixing for Live Performance' + url: http://www.nime.org/proceedings/2003/nime2003_213.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Barton2018 - abstract: 'Percussive aerophones are configurable, modular, scalable, and can be - constructed from commonly found materials. They can produce rich timbres, a wide - range of pitches and complex polyphony. Their use by humans, perhaps most famously - by the Blue Man Group, inspired us to build an electromechanically-actuated version - of the instrument in order to explore expressive possibilities enabled by machines. - The Music, Perception, and Robotics Lab at WPI has iteratively designed, built - and composed for a robotic percussive aerophone since 2015, which has both taught - lessons in actuation and revealed promising musical capabilities of the instrument. ' - address: 'Blacksburg, Virginia, USA' - author: Scott Barton and Karl Sundberg and Andrew Walter and Linda Sara Baker and - Tanuj Sane and Alexander O'Brien - bibtex: "@inproceedings{Barton2018,\n abstract = {Percussive aerophones are configurable,\ - \ modular, scalable, and can be constructed from commonly found materials. They\ - \ can produce rich timbres, a wide range of pitches and complex polyphony. Their\ - \ use by humans, perhaps most famously by the Blue Man Group, inspired us to build\ - \ an electromechanically-actuated version of the instrument in order to explore\ - \ expressive possibilities enabled by machines. The Music, Perception, and Robotics\ - \ Lab at WPI has iteratively designed, built and composed for a robotic percussive\ - \ aerophone since 2015, which has both taught lessons in actuation and revealed\ - \ promising musical capabilities of the instrument. },\n address = {Blacksburg,\ - \ Virginia, USA},\n author = {Scott Barton and Karl Sundberg and Andrew Walter\ - \ and Linda Sara Baker and Tanuj Sane and Alexander O'Brien},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1302645},\n editor = {Luke Dahl, Douglas Bowman, Thomas\ - \ Martin},\n isbn = {978-1-949373-99-8},\n issn = {2220-4806},\n month = {June},\n\ - \ pages = {409--412},\n publisher = {Virginia Tech},\n title = {Robotic Percussive\ - \ Aerophone},\n url = {http://www.nime.org/proceedings/2018/nime2018_paper0091.pdf},\n\ - \ year = {2018}\n}\n" + ID: Laibowitz2003 + abstract: 'This paper is a demo proposal for a new musical interfacebased on a DNA-like + double-helix and concepts in charactergeneration. It contains a description of + the interface,motivations behind developing such an interface, variousmappings + of the interface to musical applications, and therequirements to demo the interface.' + address: 'Montreal, Canada' + author: 'Laibowitz, Mat' + bibtex: "@inproceedings{Laibowitz2003,\n abstract = {This paper is a demo proposal\ + \ for a new musical interfacebased on a DNA-like double-helix and concepts in\ + \ charactergeneration. It contains a description of the interface,motivations\ + \ behind developing such an interface, variousmappings of the interface to musical\ + \ applications, and therequirements to demo the interface.},\n address = {Montreal,\ + \ Canada},\n author = {Laibowitz, Mat},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n\ + \ doi = {10.5281/zenodo.1176527},\n issn = {2220-4806},\n keywords = {Performance,\ + \ Design, Experimentation, DNA, Big Five. },\n pages = {216--217},\n title = {BASIS:\ + \ A Genesis in Musical Interfaces},\n url = {http://www.nime.org/proceedings/2003/nime2003_216.pdf},\n\ + \ year = {2003}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302645 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176527 issn: 2220-4806 - month: June - pages: 409--412 - publisher: Virginia Tech - title: Robotic Percussive Aerophone - url: http://www.nime.org/proceedings/2018/nime2018_paper0091.pdf - year: 2018 + keywords: 'Performance, Design, Experimentation, DNA, Big Five. ' + pages: 216--217 + title: 'BASIS: A Genesis in Musical Interfaces' + url: http://www.nime.org/proceedings/2003/nime2003_216.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: VillicañaShaw2018 - abstract: 'This paper introduces seven mechatronic compositions performed over three - years at the xxxxx (xxxx). Each composition is discussed in regard to how it addresses - the performative elements of mechatronic music concerts. The compositions are - grouped into four classifications according to the types of interactions between - human and robotic performers they afford: Non-Interactive, Mechatronic Instruments - Played by Humans, Mechatronic Instruments Playing with Humans, and Social Interaction - as Performance. The orchestration of each composition is described along with - an overview of the piece''s compositional philosophy. Observations on how specific - extra-musical compositional techniques can be incorporated into future mechatronic - performances by human-robot performance ensembles are addressed.' - address: 'Blacksburg, Virginia, USA' - author: Nathan Daniel Villicaña-Shaw and Spencer Salazar and Ajay Kapur - bibtex: "@inproceedings{VillicañaShaw2018,\n abstract = {This paper introduces seven\ - \ mechatronic compositions performed over three years at the xxxxx (xxxx). Each\ - \ composition is discussed in regard to how it addresses the performative elements\ - \ of mechatronic music concerts. The compositions are grouped into four classifications\ - \ according to the types of interactions between human and robotic performers\ - \ they afford: Non-Interactive, Mechatronic Instruments Played by Humans, Mechatronic\ - \ Instruments Playing with Humans, and Social Interaction as Performance. The\ - \ orchestration of each composition is described along with an overview of the\ - \ piece's compositional philosophy. Observations on how specific extra-musical\ - \ compositional techniques can be incorporated into future mechatronic performances\ - \ by human-robot performance ensembles are addressed.},\n address = {Blacksburg,\ - \ Virginia, USA},\n author = {Nathan Daniel Villicaña-Shaw and Spencer Salazar\ - \ and Ajay Kapur},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1302647},\n\ - \ editor = {Luke Dahl, Douglas Bowman, Thomas Martin},\n isbn = {978-1-949373-99-8},\n\ - \ issn = {2220-4806},\n month = {June},\n pages = {413--418},\n publisher = {Virginia\ - \ Tech},\n title = {Mechatronic Performance in Computer Music Compositions},\n\ - \ url = {http://www.nime.org/proceedings/2018/nime2018_paper0092.pdf},\n year\ - \ = {2018}\n}\n" + ID: Merrill2003 + abstract: This paper describes a system which uses the output fromhead-tracking + and gesture recognition software to drive aparameterized guitar effects synthesizer + in real-time. + address: 'Montreal, Canada' + author: 'Merrill, David' + bibtex: "@inproceedings{Merrill2003,\n abstract = {This paper describes a system\ + \ which uses the output fromhead-tracking and gesture recognition software to\ + \ drive aparameterized guitar effects synthesizer in real-time.},\n address =\ + \ {Montreal, Canada},\n author = {Merrill, David},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176531},\n issn = {2220-4806},\n\ + \ keywords = {Head-tracking, gestural control, continuous control, parameterized\ + \ effects processor. },\n pages = {218--219},\n title = {Head-Tracking for Gestural\ + \ and Continuous Control of Parameterized Audio Effects},\n url = {http://www.nime.org/proceedings/2003/nime2003_218.pdf},\n\ + \ year = {2003}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1302647 - editor: 'Luke Dahl, Douglas Bowman, Thomas Martin' - isbn: 978-1-949373-99-8 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176531 issn: 2220-4806 - month: June - pages: 413--418 - publisher: Virginia Tech - title: Mechatronic Performance in Computer Music Compositions - url: http://www.nime.org/proceedings/2018/nime2018_paper0092.pdf - year: 2018 + keywords: 'Head-tracking, gestural control, continuous control, parameterized effects + processor. ' + pages: 218--219 + title: Head-Tracking for Gestural and Continuous Control of Parameterized Audio + Effects + url: http://www.nime.org/proceedings/2003/nime2003_218.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Buchla2005 - address: 'Vancouver, BC, Canada' - author: 'Buchla, Don' - bibtex: "@inproceedings{Buchla2005,\n address = {Vancouver, BC, Canada},\n author\ - \ = {Buchla, Don},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176715},\n\ - \ issn = {2220-4806},\n pages = {1--1},\n title = {A History of Buchla's Musical\ - \ Instruments},\n url = {http://www.nime.org/proceedings/2005/nime2005_001.pdf},\n\ - \ year = {2005}\n}\n" + ID: Singer2003 + abstract: 'This paper describes the Sonic Banana, a bend-sensor based alternative + MIDI controller.' + address: 'Montreal, Canada' + author: 'Singer, Eric' + bibtex: "@inproceedings{Singer2003,\n abstract = {This paper describes the Sonic\ + \ Banana, a bend-sensor based alternative MIDI controller.},\n address = {Montreal,\ + \ Canada},\n author = {Singer, Eric},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n\ + \ doi = {10.5281/zenodo.1176563},\n issn = {2220-4806},\n keywords = {Interactive,\ + \ controller, bend, sensors, performance, MIDI.},\n pages = {220--221},\n title\ + \ = {Sonic Banana: A Novel Bend-Sensor-Based {MIDI} Controller},\n url = {http://www.nime.org/proceedings/2003/nime2003_220.pdf},\n\ + \ year = {2003}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176715 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176563 issn: 2220-4806 - pages: 1--1 - title: A History of Buchla's Musical Instruments - url: http://www.nime.org/proceedings/2005/nime2005_001.pdf - year: 2005 - - -- ENTRYTYPE: inproceedings - ID: Levin2005 - address: 'Vancouver, BC, Canada' - author: 'Levin, Golan' - bibtex: "@inproceedings{Levin2005,\n address = {Vancouver, BC, Canada},\n author\ - \ = {Levin, Golan},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176770},\n\ - \ issn = {2220-4806},\n pages = {2--3},\n title = {A Personal Chronology of Audiovisual\ - \ Systems Research},\n url = {http://www.nime.org/proceedings/2005/nime2005_002.pdf},\n\ - \ year = {2005}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1176770 - issn: 2220-4806 - pages: 2--3 - title: A Personal Chronology of Audiovisual Systems Research - url: http://www.nime.org/proceedings/2005/nime2005_002.pdf - year: 2005 + keywords: 'Interactive, controller, bend, sensors, performance, MIDI.' + pages: 220--221 + title: 'Sonic Banana: A Novel Bend-Sensor-Based MIDI Controller' + url: http://www.nime.org/proceedings/2003/nime2003_220.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Buxton2005 - address: 'Vancouver, BC, Canada' - author: 'Buxton, Bill' - bibtex: "@inproceedings{Buxton2005,\n address = {Vancouver, BC, Canada},\n author\ - \ = {Buxton, Bill},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176717},\n\ - \ issn = {2220-4806},\n pages = {4--4},\n title = {Causality and Striking the\ - \ Right Note},\n url = {http://www.nime.org/proceedings/2005/nime2005_004.pdf},\n\ - \ year = {2005}\n}\n" + ID: Muth2003 + abstract: 'Sodaconductor is a musical interface for generating OSCcontrol data based + on the dynamic physical simulation toolSodaconstructor as it can be seen and heard + onhttp://www.sodaplay.com.' + address: 'Montreal, Canada' + author: 'Muth, David and Burton, Ed' + bibtex: "@inproceedings{Muth2003,\n abstract = {Sodaconductor is a musical interface\ + \ for generating OSCcontrol data based on the dynamic physical simulation toolSodaconstructor\ + \ as it can be seen and heard onhttp://www.sodaplay.com.},\n address = {Montreal,\ + \ Canada},\n author = {Muth, David and Burton, Ed},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176537},\n issn = {2220-4806},\n\ + \ keywords = {Sodaconstrucor, Soda, Open Sound Control, Networked Performance,\ + \ Physical Simulation, Generative Composition, Java Application, Non-Linear Sequencing.},\n\ + \ pages = {222--224},\n title = {Sodaconductor},\n url = {http://www.nime.org/proceedings/2003/nime2003_222.pdf},\n\ + \ year = {2003}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176717 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176537 issn: 2220-4806 - pages: 4--4 - title: Causality and Striking the Right Note - url: http://www.nime.org/proceedings/2005/nime2005_004.pdf - year: 2005 + keywords: 'Sodaconstrucor, Soda, Open Sound Control, Networked Performance, Physical + Simulation, Generative Composition, Java Application, Non-Linear Sequencing.' + pages: 222--224 + title: Sodaconductor + url: http://www.nime.org/proceedings/2003/nime2003_222.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Bowers2005 - abstract: 'As a response to a number of notable contemporary aesthetic tendencies, - this paper introduces the notion of an infra-instrument as a kind of ‘new interface - for musical expression’ worthy of study and systematic design. In contrast to - hyper-, meta- and virtual instruments, we propose infra-instruments as devices - of restricted interactive potential, with little sensor enhancement, which engender - simple musics with scarce opportunity for conventional virtuosity. After presenting - numerous examples from our work, we argue that it is precisely such interactionally - and sonically challenged designs that leave requisite space for computer-generated - augmentations in hybrid, multi-device performance settings.' - address: 'Vancouver, BC, Canada' - author: 'Bowers, John and Archer, Phil' - bibtex: "@inproceedings{Bowers2005,\n abstract = {As a response to a number of notable\ - \ contemporary aesthetic tendencies, this paper introduces the notion of an infra-instrument\ - \ as a kind of ‘new interface for musical expression’ worthy of study and systematic\ - \ design. In contrast to hyper-, meta- and virtual instruments, we propose infra-instruments\ - \ as devices of restricted interactive potential, with little sensor enhancement,\ - \ which engender simple musics with scarce opportunity for conventional virtuosity.\ - \ After presenting numerous examples from our work, we argue that it is precisely\ - \ such interactionally and sonically challenged designs that leave requisite space\ - \ for computer-generated augmentations in hybrid, multi-device performance settings.},\n\ - \ address = {Vancouver, BC, Canada},\n author = {Bowers, John and Archer, Phil},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176713},\n issn = {2220-4806},\n\ - \ keywords = {Infra-instruments, hyperinstruments, meta-instruments, virtual instruments,\ - \ design concepts and principles. },\n pages = {5--10},\n title = {Not Hyper,\ - \ Not Meta, Not Cyber but Infra-Instruments},\n url = {http://www.nime.org/proceedings/2005/nime2005_005.pdf},\n\ - \ year = {2005}\n}\n" + ID: Flety2003 + abstract: 'Ircam has been deeply involved into gesture analysis and sensingfor about + four years now, as several artistic projects demonstrate.Ircam has often been + solicited for sharing software and hardwaretools for gesture sensing, especially + devices for the acquisition andconversion of sensor data, such as the AtoMIC Pro + [1][2]. Thisdemo-paper describes the recent design of a new sensor to MIDIinterface + called EoBody1' + address: 'Montreal, Canada' + author: 'Fléty, Emmanuel and Sirguy, Marc' + bibtex: "@inproceedings{Flety2003,\n abstract = {Ircam has been deeply involved\ + \ into gesture analysis and sensingfor about four years now, as several artistic\ + \ projects demonstrate.Ircam has often been solicited for sharing software and\ + \ hardwaretools for gesture sensing, especially devices for the acquisition andconversion\ + \ of sensor data, such as the AtoMIC Pro [1][2]. Thisdemo-paper describes the\ + \ recent design of a new sensor to MIDIinterface called EoBody1},\n address =\ + \ {Montreal, Canada},\n author = {Fl\\'{e}ty, Emmanuel and Sirguy, Marc},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176505},\n\ + \ issn = {2220-4806},\n keywords = {Gestural controller, Sensor, MIDI, Computer\ + \ Music. },\n pages = {225--226},\n title = {EoBody : a Follow-up to AtoMIC Pro's\ + \ Technology},\n url = {http://www.nime.org/proceedings/2003/nime2003_225.pdf},\n\ + \ year = {2003}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176713 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176505 issn: 2220-4806 - keywords: 'Infra-instruments, hyperinstruments, meta-instruments, virtual instruments, - design concepts and principles. ' - pages: 5--10 - title: 'Not Hyper, Not Meta, Not Cyber but Infra-Instruments' - url: http://www.nime.org/proceedings/2005/nime2005_005.pdf - year: 2005 + keywords: 'Gestural controller, Sensor, MIDI, Computer Music. ' + pages: 225--226 + title: 'EoBody : a Follow-up to AtoMIC Pro''s Technology' + url: http://www.nime.org/proceedings/2003/nime2003_225.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Makipatola2005 - abstract: 'In this paper, we introduce and analyze four gesture-controlled musical - instruments. We briefly discuss the test platform designed to allow for rapid - experimentation of new interfaces and control mappings. We describe our design - experiences and discuss the effects of system features such as latency, resolution - and lack of tactile feedback. The instruments use virtual reality hardware and - computer vision for user input, and three-dimensional stereo vision as well as - simple desktop displays for providing visual feedback. The instrument sounds are - synthesized in real-time using physical sound modeling. ' - address: 'Vancouver, BC, Canada' - author: 'Mäki-patola, Teemu and Laitinen, Juha and Kanerva, Aki and Takala, Tapio' - bibtex: "@inproceedings{Makipatola2005,\n abstract = {In this paper, we introduce\ - \ and analyze four gesture-controlled musical instruments. We briefly discuss\ - \ the test platform designed to allow for rapid experimentation of new interfaces\ - \ and control mappings. We describe our design experiences and discuss the effects\ - \ of system features such as latency, resolution and lack of tactile feedback.\ - \ The instruments use virtual reality hardware and computer vision for user input,\ - \ and three-dimensional stereo vision as well as simple desktop displays for providing\ - \ visual feedback. The instrument sounds are synthesized in real-time using physical\ - \ sound modeling. },\n address = {Vancouver, BC, Canada},\n author = {M\\''{a}ki-patola,\ - \ Teemu and Laitinen, Juha and Kanerva, Aki and Takala, Tapio},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176780},\n issn = {2220-4806},\n keywords = {Musical\ - \ instrument design, virtual instrument, gesture, widgets, physical sound modeling,\ - \ control mapping.},\n pages = {11--16},\n title = {Experiments with Virtual Reality\ - \ Instruments},\n url = {http://www.nime.org/proceedings/2005/nime2005_011.pdf},\n\ - \ year = {2005}\n}\n" + ID: Paradiso2003 + abstract: 'Several well-known alternative musical controllers were inspired by sensor + systems developed in other fields, often coming to their musical application via + surprising routes. Correspondingly, work on electronic music controllers has relevance + to other applications and broader research themes. In this article, I give a tour + though several controller systems that I have been involved with over the past + decade and outline their connections with other areas of inquiry.' + address: 'Montreal, Canada' + author: 'Paradiso, Joseph A.' + bibtex: "@inproceedings{Paradiso2003,\n abstract = {Several well-known alternative\ + \ musical controllers were inspired by sensor systems developed in other fields,\ + \ often coming to their musical application via surprising routes. Correspondingly,\ + \ work on electronic music controllers has relevance to other applications and\ + \ broader research themes. In this article, I give a tour though several controller\ + \ systems that I have been involved with over the past decade and outline their\ + \ connections with other areas of inquiry.},\n address = {Montreal, Canada},\n\ + \ author = {Paradiso, Joseph A.},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n date = {22-24 May, 2003},\n\ + \ doi = {10.5281/zenodo.1176551},\n issn = {2220-4806},\n pages = {228--234},\n\ + \ title = {Dual-Use Technologies for Electronic Music Controllers: A Personal\ + \ Perspective},\n url = {http://www.nime.org/proceedings/2003/nime2003_228.pdf},\n\ + \ year = {2003}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176780 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176551 issn: 2220-4806 - keywords: 'Musical instrument design, virtual instrument, gesture, widgets, physical - sound modeling, control mapping.' - pages: 11--16 - title: Experiments with Virtual Reality Instruments - url: http://www.nime.org/proceedings/2005/nime2005_011.pdf - year: 2005 + pages: 228--234 + title: 'Dual-Use Technologies for Electronic Music Controllers: A Personal Perspective' + url: http://www.nime.org/proceedings/2003/nime2003_228.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Weinberg2005 - abstract: 'The iltur system features a novel method of interaction between expert - and novice musicians through a set of musical controllers called Beatbugs. Beatbug - players can record live musical input from MIDI and acoustic instruments and respond - by transforming the recorded material in real-time, creating motif-and-variation - call-and-response routines on the fly. A central computer system analyzes MIDI - and audio played by expert players and allows novice Beatbug players to personalize - the analyzed material using a variety of transformation algorithms. This paper - presents the motivation for developing the iltur system, followed by a brief survey - of pervious and related work that guided the definition of the project’s goals. - We then present the hardware and software approaches that were taken to address - these goals, as well as a couple of compositions that were written for the system. - The paper ends with a discussion based on observations of players using the iltur - system and a number of suggestions for future work.' - address: 'Vancouver, BC, Canada' - author: 'Weinberg, Gil and Driscoll, Scott' - bibtex: "@inproceedings{Weinberg2005,\n abstract = {The iltur system features a\ - \ novel method of interaction between expert and novice musicians through a set\ - \ of musical controllers called Beatbugs. Beatbug players can record live musical\ - \ input from MIDI and acoustic instruments and respond by transforming the recorded\ - \ material in real-time, creating motif-and-variation call-and-response routines\ - \ on the fly. A central computer system analyzes MIDI and audio played by expert\ - \ players and allows novice Beatbug players to personalize the analyzed material\ - \ using a variety of transformation algorithms. This paper presents the motivation\ - \ for developing the iltur system, followed by a brief survey of pervious and\ - \ related work that guided the definition of the project’s goals. We then present\ - \ the hardware and software approaches that were taken to address these goals,\ - \ as well as a couple of compositions that were written for the system. The paper\ - \ ends with a discussion based on observations of players using the iltur system\ - \ and a number of suggestions for future work.},\n address = {Vancouver, BC, Canada},\n\ - \ author = {Weinberg, Gil and Driscoll, Scott},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1176840},\n issn = {2220-4806},\n keywords = {Collaboration,\ - \ improvisation, gestrual handheld controllers, novices, mapping},\n pages = {17--22},\n\ - \ title = {iltur -- Connecting Novices and Experts Through Collaborative Improvisation},\n\ - \ url = {http://www.nime.org/proceedings/2005/nime2005_017.pdf},\n year = {2005}\n\ - }\n" + ID: Cadoz2003 + address: 'Montreal, Canada' + author: 'Cadoz, Claude and Luciani, Annie and Florens, Jean-Loup and Castagn\''{e}, + Nicolas' + bibtex: "@inproceedings{Cadoz2003,\n address = {Montreal, Canada},\n author = {Cadoz,\ + \ Claude and Luciani, Annie and Florens, Jean-Loup and Castagn\\'{e}, Nicolas},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n date = {22-24 May, 2003},\n doi = {10.5281/zenodo.1176494},\n\ + \ issn = {2220-4806},\n pages = {235--246},\n title = {{AC}ROE --- {ICA} Artistic\ + \ Creation and Computer Interactive Multisensory Simulation Force Feedback Gesture\ + \ Transducers},\n url = {http://www.nime.org/proceedings/2003/nime2003_235.pdf},\n\ + \ year = {2003}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176840 + date: '22-24 May, 2003' + doi: 10.5281/zenodo.1176494 issn: 2220-4806 - keywords: 'Collaboration, improvisation, gestrual handheld controllers, novices, - mapping' - pages: 17--22 - title: iltur -- Connecting Novices and Experts Through Collaborative Improvisation - url: http://www.nime.org/proceedings/2005/nime2005_017.pdf - year: 2005 + pages: 235--246 + title: '{AC}ROE --- {ICA} Artistic Creation and Computer Interactive Multisensory + Simulation Force Feedback Gesture Transducers' + url: http://www.nime.org/proceedings/2003/nime2003_235.pdf + year: 2003 - ENTRYTYPE: inproceedings - ID: Jorda2005 - abstract: 'In this paper we study the potential and the challenges posed by multi-user - instruments, as tools that can facilitate interaction and responsiveness not only - between performers and their instrument but also between performers as well. Several - previous studies and taxonomies are mentioned, after what different paradigms - exposed with examples based on traditional mechanical acoustic instruments. In - the final part, several existing systems and implementations, now in the digital - domain, are described and identified according to the models and paradigms previously - introduced. ' - address: 'Vancouver, BC, Canada' - author: 'Jordà, Sergi' - bibtex: "@inproceedings{Jorda2005,\n abstract = {In this paper we study the potential\ - \ and the challenges posed by multi-user instruments, as tools that can facilitate\ - \ interaction and responsiveness not only between performers and their instrument\ - \ but also between performers as well. Several previous studies and taxonomies\ - \ are mentioned, after what different paradigms exposed with examples based on\ - \ traditional mechanical acoustic instruments. In the final part, several existing\ - \ systems and implementations, now in the digital domain, are described and identified\ - \ according to the models and paradigms previously introduced. },\n address =\ - \ {Vancouver, BC, Canada},\n author = {Jord\\`{a}, Sergi},\n booktitle = {Proceedings\ + ID: NIME22_1 + abstract: 'This paper discusses a quantitative method to evaluate whether an expert + player is able to execute skilled actions on an unfamiliar interface while keeping + the focus of their performance on the musical outcome rather than on the technology + itself. In our study, twelve professional electric guitar players used an augmented + plectrum to replicate prerecorded timbre variations in a set of musical excerpts. + The task was undertaken in two experimental conditions: a reference condition, + and a subtle gradual change in the sensitivity of the augmented plectrum which + is designed to affect the guitarist’s performance without making them consciously + aware of its effect. We propose that players’ subconscious response to the disruption + of changing the sensitivity, as well as their overall ability to replicate the + stimuli, may indicate the strength of the relationship they developed with the + new interface. The case study presented in this paper highlights the strengths + and limitations of this method.' + address: 'The University of Auckland, New Zealand' + articleno: 1 + author: 'Guidi, Andrea and McPherson, Andrew' + bibtex: "@inproceedings{NIME22_1,\n abstract = {This paper discusses a quantitative\ + \ method to evaluate whether an expert player is able to execute skilled actions\ + \ on an unfamiliar interface while keeping the focus of their performance on the\ + \ musical outcome rather than on the technology itself. In our study, twelve professional\ + \ electric guitar players used an augmented plectrum to replicate prerecorded\ + \ timbre variations in a set of musical excerpts. The task was undertaken in two\ + \ experimental conditions: a reference condition, and a subtle gradual change\ + \ in the sensitivity of the augmented plectrum which is designed to affect the\ + \ guitarist’s performance without making them consciously aware of its effect.\ + \ We propose that players’ subconscious response to the disruption of changing\ + \ the sensitivity, as well as their overall ability to replicate the stimuli,\ + \ may indicate the strength of the relationship they developed with the new interface.\ + \ The case study presented in this paper highlights the strengths and limitations\ + \ of this method.},\n address = {The University of Auckland, New Zealand},\n articleno\ + \ = {1},\n author = {Guidi, Andrea and McPherson, Andrew},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176760},\n issn = {2220-4806},\n keywords = {Multi-user\ - \ instruments, collaborative music, new instruments design guidelines. },\n pages\ - \ = {23--26},\n title = {Multi-user Instruments: Models, Examples and Promises},\n\ - \ url = {http://www.nime.org/proceedings/2005/nime2005_023.pdf},\n year = {2005}\n\ - }\n" + \ doi = {10.21428/92fbeb44.79d0b38f},\n issn = {2220-4806},\n month = {jun},\n\ + \ pdf = {101.pdf},\n presentation-video = {https://youtu.be/J4981qsq_7c},\n title\ + \ = {Quantitative evaluation of aspects of embodiment in new digital musical instruments},\n\ + \ url = {https://doi.org/10.21428%2F92fbeb44.79d0b38f},\n year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176760 + doi: 10.21428/92fbeb44.79d0b38f issn: 2220-4806 - keywords: 'Multi-user instruments, collaborative music, new instruments design guidelines. ' - pages: 23--26 - title: 'Multi-user Instruments: Models, Examples and Promises' - url: http://www.nime.org/proceedings/2005/nime2005_023.pdf - year: 2005 + month: jun + pdf: 101.pdf + presentation-video: https://youtu.be/J4981qsq_7c + title: Quantitative evaluation of aspects of embodiment in new digital musical instruments + url: https://doi.org/10.21428%2F92fbeb44.79d0b38f + year: 2022 - ENTRYTYPE: inproceedings - ID: Blaine2005 - abstract: 'This paper will investigate a variety of alternate controllers that are - making an impact in interactive entertainment, particularly in the video game - industry. Since the late 1990''s, the surging popularity of rhythmic and musical - performance games in Japanese arcades has led to the development of new interfaces - and alternate controllers for the consumer market worldwide. Rhythm action games - such as Dance Dance Revolution, Taiko No Tatsujin (Taiko: Drum Master), and Donkey - Konga are stimulating collaborative gameplay and exposing consumers to custom - controllers designed specifically for musical and physical interaction. We are - witnessing the emergence and acceptance of these breakthrough controllers and - models for gameplay as an international cultural phenomenon penetrating the video - game and toy markets in record numbers. Therefore, it is worth considering the - potential benefits to developers of musical interfaces, electronic devices and - alternate controllers in light of these new and emerging opportunities, particularly - in the realm of video gaming, toy development, arcades, and other interactive - entertainment experiences. ' - address: 'Vancouver, BC, Canada' - author: 'Blaine, Tina' - bibtex: "@inproceedings{Blaine2005,\n abstract = {This paper will investigate a\ - \ variety of alternate controllers that are making an impact in interactive entertainment,\ - \ particularly in the video game industry. Since the late 1990's, the surging\ - \ popularity of rhythmic and musical performance games in Japanese arcades has\ - \ led to the development of new interfaces and alternate controllers for the consumer\ - \ market worldwide. Rhythm action games such as Dance Dance Revolution, Taiko\ - \ No Tatsujin (Taiko: Drum Master), and Donkey Konga are stimulating collaborative\ - \ gameplay and exposing consumers to custom controllers designed specifically\ - \ for musical and physical interaction. We are witnessing the emergence and acceptance\ - \ of these breakthrough controllers and models for gameplay as an international\ - \ cultural phenomenon penetrating the video game and toy markets in record numbers.\ - \ Therefore, it is worth considering the potential benefits to developers of musical\ - \ interfaces, electronic devices and alternate controllers in light of these new\ - \ and emerging opportunities, particularly in the realm of video gaming, toy development,\ - \ arcades, and other interactive entertainment experiences. },\n address = {Vancouver,\ - \ BC, Canada},\n author = {Blaine, Tina},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176709},\n\ - \ issn = {2220-4806},\n keywords = {Alternate controllers, musical interaction,\ - \ interactive entertainment, video game industry, arcades, rhythm action, collaborative\ - \ gameplay, musical performance games},\n pages = {27--33},\n title = {The Convergence\ - \ of Alternate Controllers and Musical Interfaces in Interactive Entertainment},\n\ - \ url = {http://www.nime.org/proceedings/2005/nime2005_027.pdf},\n year = {2005}\n\ - }\n" + ID: NIME22_2 + abstract: 'This article explores two in-depth interviews with distinguished Chinese + NIMEers, across generations, from the late 1970s to the present. Tian Jinqin and + Meng Qi represent role models in the Chinese NIME community. From the innovative + NIME designers’ historical technological innovation of the 1970s’ analog ribbon + control string synthesizer Xian Kong Qin to the 2020’s Wing Pinger evolving harmony + synthesizer, the author shines a light from different angles on the Chinese NIME + community.' + address: 'The University of Auckland, New Zealand' + articleno: 2 + author: 'Wu, Jiayue Cecilia' + bibtex: "@inproceedings{NIME22_2,\n abstract = {This article explores two in-depth\ + \ interviews with distinguished Chinese NIMEers, across generations, from the\ + \ late 1970s to the present. Tian Jinqin and Meng Qi represent role models in\ + \ the Chinese NIME community. From the innovative NIME designers’ historical technological\ + \ innovation of the 1970s’ analog ribbon control string synthesizer Xian Kong\ + \ Qin to the 2020’s Wing Pinger evolving harmony synthesizer, the author shines\ + \ a light from different angles on the Chinese NIME community.},\n address = {The\ + \ University of Auckland, New Zealand},\n articleno = {2},\n author = {Wu, Jiayue\ + \ Cecilia},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.57e41c54},\n\ + \ issn = {2220-4806},\n month = {jun},\n pdf = {102.pdf},\n presentation-video\ + \ = {https://www.youtube.com/watch?v=4PMmDnUNgRk},\n title = {Today and Yesterday:\ + \ Two Case Studies of China{\\textquotesingle}s {NIME} Community},\n url = {https://doi.org/10.21428%2F92fbeb44.57e41c54},\n\ + \ year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176709 + doi: 10.21428/92fbeb44.57e41c54 issn: 2220-4806 - keywords: 'Alternate controllers, musical interaction, interactive entertainment, - video game industry, arcades, rhythm action, collaborative gameplay, musical performance - games' - pages: 27--33 - title: The Convergence of Alternate Controllers and Musical Interfaces in Interactive - Entertainment - url: http://www.nime.org/proceedings/2005/nime2005_027.pdf - year: 2005 + month: jun + pdf: 102.pdf + presentation-video: https://www.youtube.com/watch?v=4PMmDnUNgRk + title: 'Today and Yesterday: Two Case Studies of China’s NIME Community' + url: https://doi.org/10.21428%2F92fbeb44.57e41c54 + year: 2022 - ENTRYTYPE: inproceedings - ID: Overholt2005 - address: 'Vancouver, BC, Canada' - author: 'Overholt, Dan' - bibtex: "@inproceedings{Overholt2005,\n address = {Vancouver, BC, Canada},\n author\ - \ = {Overholt, Dan},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176796},\n\ - \ issn = {2220-4806},\n pages = {34--37},\n title = {The Overtone Violin},\n url\ - \ = {http://www.nime.org/proceedings/2005/nime2005_034.pdf},\n year = {2005}\n\ - }\n" + ID: NIME22_3 + abstract: 'One of the consequences of the pandemic has been the potential to embrace + hybrid support for different human group activities, including music performance, + resulting in accommodating a wider range of situations. We believe that we are + barely at the tip of the iceberg and that we can explore further the possibilities + of the medium by promoting a more active role of the audience during telematic + performance. In this paper, we present personic, a mobile web app designed for + distributed audiences to constitute a digital musical instrument. This has the + twofold purpose of letting the audience contribute to the performance with a non-intrusive + and easy-to-use approach, as well as providing audiovisual feedback that is helpful + for both the performers and the audience alike. The challenges and possibilities + of this approach are discussed from pilot testing the app using a practice-based + approach. We conclude by pointing to new directions of telematic performance, + which is a promising direction for network music and digital performance.' + address: 'The University of Auckland, New Zealand' + articleno: 3 + author: 'Xambó, Anna and Goudarzi, Visda' + bibtex: "@inproceedings{NIME22_3,\n abstract = {One of the consequences of the pandemic\ + \ has been the potential to embrace hybrid support for different human group activities,\ + \ including music performance, resulting in accommodating a wider range of situations.\ + \ We believe that we are barely at the tip of the iceberg and that we can explore\ + \ further the possibilities of the medium by promoting a more active role of the\ + \ audience during telematic performance. In this paper, we present personic, a\ + \ mobile web app designed for distributed audiences to constitute a digital musical\ + \ instrument. This has the twofold purpose of letting the audience contribute\ + \ to the performance with a non-intrusive and easy-to-use approach, as well as\ + \ providing audiovisual feedback that is helpful for both the performers and the\ + \ audience alike. The challenges and possibilities of this approach are discussed\ + \ from pilot testing the app using a practice-based approach. We conclude by pointing\ + \ to new directions of telematic performance, which is a promising direction for\ + \ network music and digital performance.},\n address = {The University of Auckland,\ + \ New Zealand},\n articleno = {3},\n author = {Xamb{\\'{o}}, Anna and Goudarzi,\ + \ Visda},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.21428/92fbeb44.706b549e},\n issn = {2220-4806},\n\ + \ month = {jun},\n pdf = {107.pdf},\n presentation-video = {https://youtu.be/xu5ySfbqYs8},\n\ + \ title = {The Mobile Audience as a Digital Musical Persona in Telematic Performance},\n\ + \ url = {https://doi.org/10.21428%2F92fbeb44.706b549e},\n year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176796 + doi: 10.21428/92fbeb44.706b549e issn: 2220-4806 - pages: 34--37 - title: The Overtone Violin - url: http://www.nime.org/proceedings/2005/nime2005_034.pdf - year: 2005 + month: jun + pdf: 107.pdf + presentation-video: https://youtu.be/xu5ySfbqYs8 + title: The Mobile Audience as a Digital Musical Persona in Telematic Performance + url: https://doi.org/10.21428%2F92fbeb44.706b549e + year: 2022 - ENTRYTYPE: inproceedings - ID: Caceres2005 - abstract: 'The Self-Contained Unified Bass Augmenter (SCUBA) is a new augmentative - OSC (Open Sound Control) [5] controller for the tuba. SCUBA adds new expressive - possibilities to the existing tuba interface through onboard sensors. These sensors - provide continuous and discrete user-controlled parametric data to be mapped at - will to signal processing parameters, virtual instrument control parameters, sound - playback, and various other functions. In its current manifestation, control data - is mapped to change the processing of the instrument''s natural sound in Pd (Pure - Data) [3]. SCUBA preserves the unity of the solo instrument interface by acoustically - mixing direct and processed sound in the instrument''s bell via mounted satellite - speakers, which are driven by a subwoofer below the performer''s chair. The end - result augments the existing interface while preserving its original unity and - functionality. ' - address: 'Vancouver, BC, Canada' - author: 'Cáceres, Juan Pablo and Mysore, Gautham J. and Treviño, Jeffrey' - bibtex: "@inproceedings{Caceres2005,\n abstract = {The Self-Contained Unified Bass\ - \ Augmenter (SCUBA) is a new augmentative OSC (Open Sound Control) [5] controller\ - \ for the tuba. SCUBA adds new expressive possibilities to the existing tuba interface\ - \ through onboard sensors. These sensors provide continuous and discrete user-controlled\ - \ parametric data to be mapped at will to signal processing parameters, virtual\ - \ instrument control parameters, sound playback, and various other functions.\ - \ In its current manifestation, control data is mapped to change the processing\ - \ of the instrument's natural sound in Pd (Pure Data) [3]. SCUBA preserves the\ - \ unity of the solo instrument interface by acoustically mixing direct and processed\ - \ sound in the instrument's bell via mounted satellite speakers, which are driven\ - \ by a subwoofer below the performer's chair. The end result augments the existing\ - \ interface while preserving its original unity and functionality. },\n address\ - \ = {Vancouver, BC, Canada},\n author = {C\\'{a}ceres, Juan Pablo and Mysore,\ - \ Gautham J. and Trevi\\~{n}o, Jeffrey},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176719},\n\ - \ issn = {2220-4806},\n keywords = {Interactive music, electro-acoustic musical\ - \ instruments, musical instrument design, human computer interface, signal processing,\ - \ Open Sound Control (OSC) },\n pages = {38--41},\n title = {{SC}UBA: The Self-Contained\ - \ Unified Bass Augmenter},\n url = {http://www.nime.org/proceedings/2005/nime2005_038.pdf},\n\ - \ year = {2005}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1176719 - issn: 2220-4806 - keywords: 'Interactive music, electro-acoustic musical instruments, musical instrument - design, human computer interface, signal processing, Open Sound Control (OSC) ' - pages: 38--41 - title: 'SCUBA: The Self-Contained Unified Bass Augmenter' - url: http://www.nime.org/proceedings/2005/nime2005_038.pdf - year: 2005 - - -- ENTRYTYPE: inproceedings - ID: Sinyor2005 - abstract: 'This paper presents a novel controller built to exploit thephysical behaviour - of a simple dynamical system, namely aspinning wheel. The phenomenon of gyroscopic - precessioncauses the instrument to slowly oscillate when it is spunquickly, providing - the performer with proprioceptive feedback. Also, due to the mass of the wheel - and tire and theresulting rotational inertia, it maintains a relatively constant - angular velocity once it is set in motion. Various sensors were used to measure - continuous and discrete quantitiessuch as the the angular frequency of the wheel, - its spatialorientation, and the performer''s finger pressure. In addition, optical - and hall-effect sensors detect the passing of aspoke-mounted photodiode and two - magnets. A base software layer was developed in Max/MSP and various patcheswere - written with the goal of mapping the dynamic behaviorof the wheel to varied musical - processes.' - address: 'Vancouver, BC, Canada' - author: 'Sinyor, Elliot and Wanderley, Marcelo M.' - bibtex: "@inproceedings{Sinyor2005,\n abstract = {This paper presents a novel controller\ - \ built to exploit thephysical behaviour of a simple dynamical system, namely\ - \ aspinning wheel. The phenomenon of gyroscopic precessioncauses the instrument\ - \ to slowly oscillate when it is spunquickly, providing the performer with proprioceptive\ - \ feedback. Also, due to the mass of the wheel and tire and theresulting rotational\ - \ inertia, it maintains a relatively constant angular velocity once it is set\ - \ in motion. Various sensors were used to measure continuous and discrete quantitiessuch\ - \ as the the angular frequency of the wheel, its spatialorientation, and the performer's\ - \ finger pressure. In addition, optical and hall-effect sensors detect the passing\ - \ of aspoke-mounted photodiode and two magnets. A base software layer was developed\ - \ in Max/MSP and various patcheswere written with the goal of mapping the dynamic\ - \ behaviorof the wheel to varied musical processes.},\n address = {Vancouver,\ - \ BC, Canada},\n author = {Sinyor, Elliot and Wanderley, Marcelo M.},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176820},\n issn = {2220-4806},\n keywords\ - \ = {HCI, Digital Musical Instruments, Gyroscopic Precession, Rotational Inertia,\ - \ Open Sound Control },\n pages = {42--45},\n title = {Gyrotyre : A dynamic hand-held\ - \ computer-music controller based on a spinning wheel},\n url = {http://www.nime.org/proceedings/2005/nime2005_042.pdf},\n\ - \ year = {2005}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1176820 - issn: 2220-4806 - keywords: 'HCI, Digital Musical Instruments, Gyroscopic Precession, Rotational Inertia, - Open Sound Control ' - pages: 42--45 - title: 'Gyrotyre : A dynamic hand-held computer-music controller based on a spinning - wheel' - url: http://www.nime.org/proceedings/2005/nime2005_042.pdf - year: 2005 - - -- ENTRYTYPE: inproceedings - ID: Fraietta2005a - abstract: 'The Smart Controller is a portable hardware device that responds to input - control voltage, OSC, and MIDI messages; producing output control voltage, OSC, - and MIDI messages (depending upon the loaded custom patch). The Smart Controller - is a stand alone device; a powerful, reliable, and compact instrument capable - of reducing the number of electronic modules required in a live performance or - installation, particularly the requirement of a laptop computer. More powerful, - however, is the Smart Controller Workbench, a complete interactive development - environment. In addition to enabling the composer to create and debug their patches, - the Smart Controller Workbench accurately simulates the behaviour of the hardware, - and functions as an incircuit debugger that enables the performer to remotely - monitor, modify, and tune patches running in an installation without the requirement - of stopping or interrupting the live performance. ' - address: 'Vancouver, BC, Canada' - author: 'Fraietta, Angelo' - bibtex: "@inproceedings{Fraietta2005a,\n abstract = {The Smart Controller is a portable\ - \ hardware device that responds to input control voltage, OSC, and MIDI messages;\ - \ producing output control voltage, OSC, and MIDI messages (depending upon the\ - \ loaded custom patch). The Smart Controller is a stand alone device; a powerful,\ - \ reliable, and compact instrument capable of reducing the number of electronic\ - \ modules required in a live performance or installation, particularly the requirement\ - \ of a laptop computer. More powerful, however, is the Smart Controller Workbench,\ - \ a complete interactive development environment. In addition to enabling the\ - \ composer to create and debug their patches, the Smart Controller Workbench accurately\ - \ simulates the behaviour of the hardware, and functions as an incircuit debugger\ - \ that enables the performer to remotely monitor, modify, and tune patches running\ - \ in an installation without the requirement of stopping or interrupting the live\ - \ performance. },\n address = {Vancouver, BC, Canada},\n author = {Fraietta, Angelo},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176745},\n issn = {2220-4806},\n\ - \ keywords = {Control Voltage, Open Sound Control, Algorithmic Composition, MIDI,\ - \ Sound Installations, programmable logic control, synthesizers, electronic music,\ - \ Sensors, Actuators, Interaction. },\n pages = {46--49},\n title = {The Smart\ - \ Controller Workbench},\n url = {http://www.nime.org/proceedings/2005/nime2005_046.pdf},\n\ - \ year = {2005}\n}\n" + ID: NIME22_4 + abstract: 'Physical modelling sound synthesis methods generate vast and intricate + sound spaces that are navigated using meaningful parameters. Numerical based physical + modelling synthesis methods provide authentic representations of the physics they + model. Unfortunately, the application of these physical models are often limited + because of their considerable computational requirements. In previous studies, + the CPU has been shown to reliably support two-dimensional linear finite-difference + models in real-time with resolutions up to 64x64. However, the near-ubiquitous + parallel processing units known as GPUs have previously been used to process considerably + larger resolutions, as high as 512x512 in real-time. GPU programming requires + a low-level understanding of the architecture, which often imposes a barrier for + entry for inexperienced practitioners. Therefore, this paper proposes HyperModels, + a framework for automating the mapping of linear finite-difference based physical + modelling synthesis into an optimised parallel form suitable for the GPU. An implementation + of the design is then used to evaluate the objective performance of the framework + by comparing the automated solution to manually developed equivalents. For the + majority of the extensive performance profiling tests, the auto-generated programs + were observed to perform only 60% slower but in the worst-case scenario it was + 50% slower. The initial results suggests that, in most circumstances, the automation + provided by the framework avoids the lowlevel expertise required to manually optimise + the GPU, with only a small reduction in performance. However, there is still scope + to improve the auto-generated optimisations. When comparing the performance of + CPU to GPU equivalents, the parallel CPU version supports resolutions of up to + 128x128 whilst the GPU continues to support higher resolutions up to 512x512. + To conclude the paper, two instruments are developed using HyperModels based on + established physical model designs.' + address: 'The University of Auckland, New Zealand' + articleno: 4 + author: 'Renney, Harri and Willemsen, Silvin and Gaster, Benedict and Mitchell, + Tom' + bibtex: "@inproceedings{NIME22_4,\n abstract = {Physical modelling sound synthesis\ + \ methods generate vast and intricate sound spaces that are navigated using meaningful\ + \ parameters. Numerical based physical modelling synthesis methods provide authentic\ + \ representations of the physics they model. Unfortunately, the application of\ + \ these physical models are often limited because of their considerable computational\ + \ requirements. In previous studies, the CPU has been shown to reliably support\ + \ two-dimensional linear finite-difference models in real-time with resolutions\ + \ up to 64x64. However, the near-ubiquitous parallel processing units known as\ + \ GPUs have previously been used to process considerably larger resolutions, as\ + \ high as 512x512 in real-time. GPU programming requires a low-level understanding\ + \ of the architecture, which often imposes a barrier for entry for inexperienced\ + \ practitioners. Therefore, this paper proposes HyperModels, a framework for automating\ + \ the mapping of linear finite-difference based physical modelling synthesis into\ + \ an optimised parallel form suitable for the GPU. An implementation of the design\ + \ is then used to evaluate the objective performance of the framework by comparing\ + \ the automated solution to manually developed equivalents. For the majority of\ + \ the extensive performance profiling tests, the auto-generated programs were\ + \ observed to perform only 60% slower but in the worst-case scenario it was 50%\ + \ slower. The initial results suggests that, in most circumstances, the automation\ + \ provided by the framework avoids the lowlevel expertise required to manually\ + \ optimise the GPU, with only a small reduction in performance. However, there\ + \ is still scope to improve the auto-generated optimisations. When comparing the\ + \ performance of CPU to GPU equivalents, the parallel CPU version supports resolutions\ + \ of up to 128x128 whilst the GPU continues to support higher resolutions up to\ + \ 512x512. To conclude the paper, two instruments are developed using HyperModels\ + \ based on established physical model designs.},\n address = {The University of\ + \ Auckland, New Zealand},\n articleno = {4},\n author = {Renney, Harri and Willemsen,\ + \ Silvin and Gaster, Benedict and Mitchell, Tom},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.21428/92fbeb44.98a4210a},\n issn = {2220-4806},\n month = {jun},\n pdf\ + \ = {109.pdf},\n presentation-video = {https://youtu.be/Pb4pAr2v4yU},\n title\ + \ = {{HyperModels} - A Framework for {GPU} Accelerated Physical Modelling Sound\ + \ Synthesis},\n url = {https://doi.org/10.21428%2F92fbeb44.98a4210a},\n year =\ + \ {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176745 + doi: 10.21428/92fbeb44.98a4210a issn: 2220-4806 - keywords: 'Control Voltage, Open Sound Control, Algorithmic Composition, MIDI, Sound - Installations, programmable logic control, synthesizers, electronic music, Sensors, - Actuators, Interaction. ' - pages: 46--49 - title: The Smart Controller Workbench - url: http://www.nime.org/proceedings/2005/nime2005_046.pdf - year: 2005 + month: jun + pdf: 109.pdf + presentation-video: https://youtu.be/Pb4pAr2v4yU + title: HyperModels - A Framework for GPU Accelerated Physical Modelling Sound Synthesis + url: https://doi.org/10.21428%2F92fbeb44.98a4210a + year: 2022 - ENTRYTYPE: inproceedings - ID: Singer2005 - abstract: 'This paper describes an installation created by LEMUR(League of Electronic - Musical Urban Robots) in January, 2005.The installation included over 30 robotic - musical instrumentsand a multi-projector real-time video projection and wascontrollable - and programmable over a MIDI network. Theinstallation was also controllable remotely - via the Internet andcould be heard and viewed via room mics and a robotic webcam - connected to a streaming server.' - address: 'Vancouver, BC, Canada' - author: 'Singer, Eric and Feddersen, Jeff and Bowen, Bil' - bibtex: "@inproceedings{Singer2005,\n abstract = {This paper describes an installation\ - \ created by LEMUR(League of Electronic Musical Urban Robots) in January, 2005.The\ - \ installation included over 30 robotic musical instrumentsand a multi-projector\ - \ real-time video projection and wascontrollable and programmable over a MIDI\ - \ network. Theinstallation was also controllable remotely via the Internet andcould\ - \ be heard and viewed via room mics and a robotic webcam connected to a streaming\ - \ server.},\n address = {Vancouver, BC, Canada},\n author = {Singer, Eric and\ - \ Feddersen, Jeff and Bowen, Bil},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176818},\n\ - \ issn = {2220-4806},\n keywords = {Robotics, music, instruments, MIDI, video,\ - \ interactive, networked, streaming.},\n pages = {50--55},\n title = {A Large-Scale\ - \ Networked Robotic Musical Instrument Installation},\n url = {http://www.nime.org/proceedings/2005/nime2005_050.pdf},\n\ - \ year = {2005}\n}\n" + ID: NIME22_5 + abstract: 'In this paper, we propose a set of reflections to actively incorporate + environmental sustainability instances in the practice of circuit bending. This + proposal combines circuit bending-related concepts with literature from the domain + of sustainable Human-Computer Interaction (HCI). We commence by presenting an + overview of the critical discourse within the New Interfaces for Musical Expression + (NIME) community, and of circuit bending itself—exposing the linkages this practice + has with themes directly related to this research, such as environmental sustainability + and philosophy. Afterwards, we look at how the topic of environmental sustainability + has been discussed, concerning circuit bending, within the NIME literature. We + conclude by developing a list of recommendations for a sustainable circuit bending + practice.' + address: 'The University of Auckland, New Zealand' + articleno: 5 + author: 'Dorigatti, Enrico and Masu, Raul' + bibtex: "@inproceedings{NIME22_5,\n abstract = {In this paper, we propose a set\ + \ of reflections to actively incorporate environmental sustainability instances\ + \ in the practice of circuit bending. This proposal combines circuit bending-related\ + \ concepts with literature from the domain of sustainable Human-Computer Interaction\ + \ (HCI). We commence by presenting an overview of the critical discourse within\ + \ the New Interfaces for Musical Expression (NIME) community, and of circuit bending\ + \ itself—exposing the linkages this practice has with themes directly related\ + \ to this research, such as environmental sustainability and philosophy. Afterwards,\ + \ we look at how the topic of environmental sustainability has been discussed,\ + \ concerning circuit bending, within the NIME literature. We conclude by developing\ + \ a list of recommendations for a sustainable circuit bending practice.},\n address\ + \ = {The University of Auckland, New Zealand},\n articleno = {5},\n author = {Dorigatti,\ + \ Enrico and Masu, Raul},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.18502d1d},\n\ + \ issn = {2220-4806},\n month = {jun},\n pdf = {11.pdf},\n presentation-video\ + \ = {https://youtu.be/n3GcaaHkats},\n title = {Circuit Bending and Environmental\ + \ Sustainability: Current Situation and Steps Forward},\n url = {https://doi.org/10.21428%2F92fbeb44.18502d1d},\n\ + \ year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176818 + doi: 10.21428/92fbeb44.18502d1d issn: 2220-4806 - keywords: 'Robotics, music, instruments, MIDI, video, interactive, networked, streaming.' - pages: 50--55 - title: A Large-Scale Networked Robotic Musical Instrument Installation - url: http://www.nime.org/proceedings/2005/nime2005_050.pdf - year: 2005 + month: jun + pdf: 11.pdf + presentation-video: https://youtu.be/n3GcaaHkats + title: 'Circuit Bending and Environmental Sustainability: Current Situation and + Steps Forward' + url: https://doi.org/10.21428%2F92fbeb44.18502d1d + year: 2022 - ENTRYTYPE: inproceedings - ID: Allison2005 - abstract: 'Artists have long sought after alternative controllers, sensors, and - other means for controlling computer-based musical performance in real-time. Traditional - techniques for transmitting the data generated by such devices typically employ - the use of MIDI as the transport protocol. Recently, several devices have been - developed using alternatives to MIDI, including Ethernet-based and USB-based sensor - interfaces. We have designed and produced a system that uses S/PDIF as the transport - mechanism for a sensor interface. This provides robust performance, together with - extremely low latency and high resolution. In our system, data from all sensors - is multiplexed onto the digital audio line and demultiplexed in software on the - computer using standard techniques. We have written demultiplexer objects and - plugins for Max/MSP and Jade, as well as a MIDI Conversion program for interapplicaton - uses, while others are in the works for PD, SuperCollider, and AudioUnits.' - address: 'Vancouver, BC, Canada' - author: 'Allison, Jesse T. and Place, Timothy' - bibtex: "@inproceedings{Allison2005,\n abstract = {Artists have long sought after\ - \ alternative controllers, sensors, and other means for controlling computer-based\ - \ musical performance in real-time. Traditional techniques for transmitting the\ - \ data generated by such devices typically employ the use of MIDI as the transport\ - \ protocol. Recently, several devices have been developed using alternatives to\ - \ MIDI, including Ethernet-based and USB-based sensor interfaces. We have designed\ - \ and produced a system that uses S/PDIF as the transport mechanism for a sensor\ - \ interface. This provides robust performance, together with extremely low latency\ - \ and high resolution. In our system, data from all sensors is multiplexed onto\ - \ the digital audio line and demultiplexed in software on the computer using standard\ - \ techniques. We have written demultiplexer objects and plugins for Max/MSP and\ - \ Jade, as well as a MIDI Conversion program for interapplicaton uses, while others\ - \ are in the works for PD, SuperCollider, and AudioUnits.},\n address = {Vancouver,\ - \ BC, Canada},\n author = {Allison, Jesse T. and Place, Timothy},\n booktitle\ + ID: NIME22_6 + abstract: 'This paper explores a minimalist approach to live coding using a single + input parameter to manipulate the graph structure of a finite state machine through + a stream of bits. This constitutes an example of bottom-up live coding, which + operates on a low level language to generate a high level structure output. Here + we examine systematically how to apply mappings of continuous gestural interactions + to develop a bottom-up system for predicting programming behaviours. We conducted + a statistical analysis based on a controlled data generation procedure. The findings + concur with the subjective experience of the behavior of the system when the user + modulates the sampling frequency of a variable clock using a knob as an input + device. This suggests that a sequential predictive model may be applied towards + the development of a tactically predictive system according to Tanimoto’s hierarchy + of liveness. The code is provided in a git repository.' + address: 'The University of Auckland, New Zealand' + articleno: 6 + author: 'Diapoulis, Georgios and Zannos, Iannis and Tatar, Kivanç and Dahlstedt, + Palle' + bibtex: "@inproceedings{NIME22_6,\n abstract = {This paper explores a minimalist\ + \ approach to live coding using a single input parameter to manipulate the graph\ + \ structure of a finite state machine through a stream of bits. This constitutes\ + \ an example of bottom-up live coding, which operates on a low level language\ + \ to generate a high level structure output. Here we examine systematically how\ + \ to apply mappings of continuous gestural interactions to develop a bottom-up\ + \ system for predicting programming behaviours. We conducted a statistical analysis\ + \ based on a controlled data generation procedure. The findings concur with the\ + \ subjective experience of the behavior of the system when the user modulates\ + \ the sampling frequency of a variable clock using a knob as an input device.\ + \ This suggests that a sequential predictive model may be applied towards the\ + \ development of a tactically predictive system according to Tanimoto’s hierarchy\ + \ of liveness. The code is provided in a git repository.},\n address = {The University\ + \ of Auckland, New Zealand},\n articleno = {6},\n author = {Diapoulis, Georgios\ + \ and Zannos, Iannis and Tatar, Kivan{\\c{c}} and Dahlstedt, Palle},\n booktitle\ \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176693},\n issn = {2220-4806},\n keywords\ - \ = {Teabox, Electrotap, Sensor Interface, High Speed, High Resolution, Sensors,\ - \ S/PDIF},\n pages = {56--59},\n title = {Teabox: A Sensor Data Interface System},\n\ - \ url = {http://www.nime.org/proceedings/2005/nime2005_056.pdf},\n year = {2005}\n\ - }\n" + \ Expression},\n doi = {10.21428/92fbeb44.51fecaab},\n issn = {2220-4806},\n month\ + \ = {jun},\n pdf = {110.pdf},\n presentation-video = {https://youtu.be/L_v5P7jGK8Y},\n\ + \ title = {Bottom-up live coding: Analysis of continuous interactions towards\ + \ predicting programming behaviours},\n url = {https://doi.org/10.21428%2F92fbeb44.51fecaab},\n\ + \ year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176693 + doi: 10.21428/92fbeb44.51fecaab issn: 2220-4806 - keywords: 'Teabox, Electrotap, Sensor Interface, High Speed, High Resolution, Sensors, - S/PDIF' - pages: 56--59 - title: 'Teabox: A Sensor Data Interface System' - url: http://www.nime.org/proceedings/2005/nime2005_056.pdf - year: 2005 + month: jun + pdf: 110.pdf + presentation-video: https://youtu.be/L_v5P7jGK8Y + title: 'Bottom-up live coding: Analysis of continuous interactions towards predicting + programming behaviours' + url: https://doi.org/10.21428%2F92fbeb44.51fecaab + year: 2022 - ENTRYTYPE: inproceedings - ID: Oore2005 - abstract: 'When learning a classical instrument, people often either take lessons - in which an existing body of “technique” is de- livered, evolved over generations - of performers, or in some cases people will “teach themselves” by watching people - play and listening to existing recordings. What does one do with a complex new - digital instrument? In this paper I address this question drawing on my expe- - rience in learning several very different types of sophisticated instruments: - the Glove Talk II real-time gesture-to-speech interface, the Digital Marionette - controller for virtual 3D puppets, and pianos and keyboards. As the primary user - of the first two systems, I have spent hundreds of hours with Digital Marionette - and Glove-Talk II, and thousands of hours with pianos and keyboards (I continue - to work as a professional musician). I will identify some of the under- lying - principles and approaches that I have observed during my learning and playing - experience common to these instru- ments. While typical accounts of users learning - new inter- faces generally focus on reporting beginner’s experiences, for various - practical reasons, this is fundamentally different by focusing on the expert’s - learning experience.' - address: 'Vancouver, BC, Canada' - author: 'Oore, Sageev' - bibtex: "@inproceedings{Oore2005,\n abstract = {When learning a classical instrument,\ - \ people often either take lessons in which an existing body of “technique” is\ - \ de- livered, evolved over generations of performers, or in some cases people\ - \ will “teach themselves” by watching people play and listening to existing recordings.\ - \ What does one do with a complex new digital instrument? In this paper I address\ - \ this question drawing on my expe- rience in learning several very different\ - \ types of sophisticated instruments: the Glove Talk II real-time gesture-to-speech\ - \ interface, the Digital Marionette controller for virtual 3D puppets, and pianos\ - \ and keyboards. As the primary user of the first two systems, I have spent hundreds\ - \ of hours with Digital Marionette and Glove-Talk II, and thousands of hours with\ - \ pianos and keyboards (I continue to work as a professional musician). I will\ - \ identify some of the under- lying principles and approaches that I have observed\ - \ during my learning and playing experience common to these instru- ments. While\ - \ typical accounts of users learning new inter- faces generally focus on reporting\ - \ beginner’s experiences, for various practical reasons, this is fundamentally\ - \ different by focusing on the expert’s learning experience.},\n address = {Vancouver,\ - \ BC, Canada},\n author = {Oore, Sageev},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176794},\n\ - \ issn = {2220-4806},\n keywords = {performance, learning new instruments },\n\ - \ pages = {60--64},\n title = {Learning Advanced Skills on New Instruments},\n\ - \ url = {http://www.nime.org/proceedings/2005/nime2005_060.pdf},\n year = {2005}\n\ - }\n" + ID: NIME22_7 + abstract: 'Deformable interfaces are an emerging area of Human- Computer Interaction + (HCI) research that offers nuanced and responsive physical interaction with digital + technologies. They are well suited to creative and expressive forms of HCI such + as Digital Musical Interfaces (DMIs). However, research on the design of deformable + DMIs is limited. This paper explores the role that deformable interfaces might + play in DMI design. We conducted an online study with 23 DMI designers in which + they were invited to create non-functional deformable DMIs together. Our results + suggest forms of gestural input and sound mappings that deformable interfaces + intuitively lend themselves to for DMI design. From our results, we highlight + four styles of DMI that deformable interfaces might be most suited to, and suggest + the kinds of experience that deformable DMIs might be most compelling for musicians + and audiences. We discuss how DMI designers explore deformable materials and gestures + input and the role of unexpected affordances in the design process.' + address: 'The University of Auckland, New Zealand' + articleno: 7 + author: 'Zheng, Jianing and Bryan-Kinns, Nick' + bibtex: "@inproceedings{NIME22_7,\n abstract = {Deformable interfaces are an emerging\ + \ area of Human- Computer Interaction (HCI) research that offers nuanced and responsive\ + \ physical interaction with digital technologies. They are well suited to creative\ + \ and expressive forms of HCI such as Digital Musical Interfaces (DMIs). However,\ + \ research on the design of deformable DMIs is limited. This paper explores the\ + \ role that deformable interfaces might play in DMI design. We conducted an online\ + \ study with 23 DMI designers in which they were invited to create non-functional\ + \ deformable DMIs together. Our results suggest forms of gestural input and sound\ + \ mappings that deformable interfaces intuitively lend themselves to for DMI design.\ + \ From our results, we highlight four styles of DMI that deformable interfaces\ + \ might be most suited to, and suggest the kinds of experience that deformable\ + \ DMIs might be most compelling for musicians and audiences. We discuss how DMI\ + \ designers explore deformable materials and gestures input and the role of unexpected\ + \ affordances in the design process.},\n address = {The University of Auckland,\ + \ New Zealand},\n articleno = {7},\n author = {Zheng, Jianing and Bryan-Kinns,\ + \ Nick},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.21428/92fbeb44.41da9da5},\n issn = {2220-4806},\n\ + \ month = {jun},\n pdf = {111.pdf},\n presentation-video = {https://youtu.be/KHqfxL4F7Bg},\n\ + \ title = {Squeeze, Twist, Stretch: Exploring Deformable Digital Musical Interfaces\ + \ Design Through Non-Functional Prototypes},\n url = {https://doi.org/10.21428%2F92fbeb44.41da9da5},\n\ + \ year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176794 + doi: 10.21428/92fbeb44.41da9da5 issn: 2220-4806 - keywords: 'performance, learning new instruments ' - pages: 60--64 - title: Learning Advanced Skills on New Instruments - url: http://www.nime.org/proceedings/2005/nime2005_060.pdf - year: 2005 + month: jun + pdf: 111.pdf + presentation-video: https://youtu.be/KHqfxL4F7Bg + title: 'Squeeze, Twist, Stretch: Exploring Deformable Digital Musical Interfaces + Design Through Non-Functional Prototypes' + url: https://doi.org/10.21428%2F92fbeb44.41da9da5 + year: 2022 - ENTRYTYPE: inproceedings - ID: Livingstone2005 - abstract: 'Haptic and Gestural interfaces offer new and novel ways of interacting - with and creating new musical forms. Increasingly it is the integration of these - interfaces with more complex adaptive systems or dynamically variable social contexts - that provide significant opportunities for socially mediated composition through - conscious and subconscious interaction. This paper includes a brief comparative - survey of related works and articulates the design process and interaction modes - or ‘play states’ for the Orb3 interface – 3 wireless mobile globes that collect - and share environmental data and user interactions to synthesize and diffuse sound - material in real time, a ‘social’ group of composer and listener objects. The - physical interfaces are integrated into a portable 8 channel auditory sphere for - collaborative interaction but can also be integrated with large-scale social environments, - such as atria and other public spaces with embedded sound systems.' - address: 'Vancouver, BC, Canada' - author: 'Livingstone, Dan and Miranda, Eduardo' - bibtex: "@inproceedings{Livingstone2005,\n abstract = {Haptic and Gestural interfaces\ - \ offer new and novel ways of interacting with and creating new musical forms.\ - \ Increasingly it is the integration of these interfaces with more complex adaptive\ - \ systems or dynamically variable social contexts that provide significant opportunities\ - \ for socially mediated composition through conscious and subconscious interaction.\ - \ This paper includes a brief comparative survey of related works and articulates\ - \ the design process and interaction modes or ‘play states’ for the Orb3 interface\ - \ – 3 wireless mobile globes that collect and share environmental data and user\ - \ interactions to synthesize and diffuse sound material in real time, a ‘social’\ - \ group of composer and listener objects. The physical interfaces are integrated\ - \ into a portable 8 channel auditory sphere for collaborative interaction but\ - \ can also be integrated with large-scale social environments, such as atria and\ - \ other public spaces with embedded sound systems.},\n address = {Vancouver, BC,\ - \ Canada},\n author = {Livingstone, Dan and Miranda, Eduardo},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176774},\n issn = {2220-4806},\n keywords = {Adaptive\ - \ System, Sound Installation, Smart Interfaces, Music Robots, Spatial Music, Conscious\ - \ Subconscious Interaction.},\n pages = {65--69},\n title = {Orb3 -- Adaptive\ - \ Interface Design for Real time Sound Synthesis \\& Diffusion within Socially\ - \ Mediated Spaces},\n url = {http://www.nime.org/proceedings/2005/nime2005_065.pdf},\n\ - \ year = {2005}\n}\n" + ID: NIME22_8 + abstract: 'We present a systematic review of voice-centered NIME publications from + the past two decades. Musical expression has been a key driver of innovation in + voicebased technologies, from traditional architectures that amplify singing to + cutting-edge research in vocal synthesis. NIME conference has emerged as a prime + venue for innovative vocal interfaces. However, there hasn’t been a systematic + analysis of all voice-related work or an effort to characterize their features. + Analyzing trends in Vocal NIMEs can help the community better understand common + interests, identify uncharted territories, and explore directions for future research. + We identified a corpus of 98 papers about Vocal NIMEs from 2001 to 2021, which + we analyzed in 3 ways. First, we automatically extracted latent themes and possible + categories using natural language processing. Taking inspiration from concepts + surfaced through this process, we then defined several core dimensions with associated + descriptors of Vocal NIMEs and assigned each paper relevant descriptors under + each dimension. Finally, we defined a classification system, which we then used + to uniquely and more precisely situate each paper on a map, taking into account + the overall goals of each work. Based on our analyses, we present trends and challenges, + including questions of gender and diversity in our community, and reflect on opportunities + for future work.' + address: 'The University of Auckland, New Zealand' + articleno: 8 + author: 'Kleinberger, Rébecca and Singh, Nikhil and Xiao, Xiao and Troyer, Akito + van' + bibtex: "@inproceedings{NIME22_8,\n abstract = {We present a systematic review of\ + \ voice-centered NIME publications from the past two decades. Musical expression\ + \ has been a key driver of innovation in voicebased technologies, from traditional\ + \ architectures that amplify singing to cutting-edge research in vocal synthesis.\ + \ NIME conference has emerged as a prime venue for innovative vocal interfaces.\ + \ However, there hasn’t been a systematic analysis of all voice-related work or\ + \ an effort to characterize their features. Analyzing trends in Vocal NIMEs can\ + \ help the community better understand common interests, identify uncharted territories,\ + \ and explore directions for future research. We identified a corpus of 98 papers\ + \ about Vocal NIMEs from 2001 to 2021, which we analyzed in 3 ways. First, we\ + \ automatically extracted latent themes and possible categories using natural\ + \ language processing. Taking inspiration from concepts surfaced through this\ + \ process, we then defined several core dimensions with associated descriptors\ + \ of Vocal NIMEs and assigned each paper relevant descriptors under each dimension.\ + \ Finally, we defined a classification system, which we then used to uniquely\ + \ and more precisely situate each paper on a map, taking into account the overall\ + \ goals of each work. Based on our analyses, we present trends and challenges,\ + \ including questions of gender and diversity in our community, and reflect on\ + \ opportunities for future work.},\n address = {The University of Auckland, New\ + \ Zealand},\n articleno = {8},\n author = {Kleinberger, R{\\'{e}}becca and Singh,\ + \ Nikhil and Xiao, Xiao and Troyer, Akito van},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.21428/92fbeb44.4308fb94},\n issn = {2220-4806},\n month = {jun},\n pdf\ + \ = {112.pdf},\n presentation-video = {https://youtu.be/PUlGjAblfPM},\n title\ + \ = {Voice at {NIME}: a Taxonomy of New Interfaces for Vocal Musical Expression},\n\ + \ url = {https://doi.org/10.21428%2F92fbeb44.4308fb94},\n year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176774 + doi: 10.21428/92fbeb44.4308fb94 issn: 2220-4806 - keywords: 'Adaptive System, Sound Installation, Smart Interfaces, Music Robots, - Spatial Music, Conscious Subconscious Interaction.' - pages: 65--69 - title: Orb3 -- Adaptive Interface Design for Real time Sound Synthesis & Diffusion - within Socially Mediated Spaces - url: http://www.nime.org/proceedings/2005/nime2005_065.pdf - year: 2005 + month: jun + pdf: 112.pdf + presentation-video: https://youtu.be/PUlGjAblfPM + title: 'Voice at NIME: a Taxonomy of New Interfaces for Vocal Musical Expression' + url: https://doi.org/10.21428%2F92fbeb44.4308fb94 + year: 2022 - ENTRYTYPE: inproceedings - ID: Essl2005 - abstract: 'The Scrubber is a general controller for friction-induced sound. Allowing - the user to engage in familiar gestures and feel- ing actual friction, the synthesized - sound gains an evocative nature for the performer and a meaningful relationship - between gesture and sound for the audience. It can control a variety of sound - synthesis algorithms of which we demonstrate examples based on granular synthesis, - wave-table synthesis and physically informed modeling.' - address: 'Vancouver, BC, Canada' - author: 'Essl, Georg and O''Modhrain, Sile' - bibtex: "@inproceedings{Essl2005,\n abstract = {The Scrubber is a general controller\ - \ for friction-induced sound. Allowing the user to engage in familiar gestures\ - \ and feel- ing actual friction, the synthesized sound gains an evocative nature\ - \ for the performer and a meaningful relationship between gesture and sound for\ - \ the audience. It can control a variety of sound synthesis algorithms of which\ - \ we demonstrate examples based on granular synthesis, wave-table synthesis and\ - \ physically informed modeling.},\n address = {Vancouver, BC, Canada},\n author\ - \ = {Essl, Georg and O'Modhrain, Sile},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176737},\n\ - \ issn = {2220-4806},\n pages = {70--75},\n title = {Scrubber: An Interface for\ - \ Friction-induced Sounds},\n url = {http://www.nime.org/proceedings/2005/nime2005_070.pdf},\n\ - \ year = {2005}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1176737 - issn: 2220-4806 - pages: 70--75 - title: 'Scrubber: An Interface for Friction-induced Sounds' - url: http://www.nime.org/proceedings/2005/nime2005_070.pdf - year: 2005 - - -- ENTRYTYPE: inproceedings - ID: Topper2005 - abstract: 'WISEAR (Wireless Sensor Array) is a Linux based Embeddedx86 TS-5600 SBC - (Single Board Computer) specifically configured for use with music, dance and - video performance technologies. The device offers a general purpose solution to - many sensor and gestural controller problems. Much like the general purpose CPU, - which resolved many issues of its predecessor (ie., the special purpose DSP chip), - the WISEAR box attempts to move beyond custom made BASIC stamp projects that are - often created on a per-performance basis and rely heavily on MIDI. WISEAR is both - lightweight and wireless. Unlike several commercial alternatives, it is also a - completely open source project. PAIR (Partnering Analysis in Real Time) exploits - the power of WISEAR and revisits the potential of hardware-based systems for real-time - measurement of bodily movement. Our goal was to create a robust yet adaptable - system that could attend to both general and precise aspects of performer interaction. - Though certain commonalities with existing hardware systems exist, our PAIR system - takes a fundamentally different approach by focusing specifically on the interaction - of two or more dancers.' - address: 'Vancouver, BC, Canada' - author: 'Topper, David and Swendsen, Peter V.' - bibtex: "@inproceedings{Topper2005,\n abstract = {WISEAR (Wireless Sensor Array)\ - \ is a Linux based Embeddedx86 TS-5600 SBC (Single Board Computer) specifically\ - \ configured for use with music, dance and video performance technologies. The\ - \ device offers a general purpose solution to many sensor and gestural controller\ - \ problems. Much like the general purpose CPU, which resolved many issues of its\ - \ predecessor (ie., the special purpose DSP chip), the WISEAR box attempts to\ - \ move beyond custom made BASIC stamp projects that are often created on a per-performance\ - \ basis and rely heavily on MIDI. WISEAR is both lightweight and wireless. Unlike\ - \ several commercial alternatives, it is also a completely open source project.\ - \ PAIR (Partnering Analysis in Real Time) exploits the power of WISEAR and revisits\ - \ the potential of hardware-based systems for real-time measurement of bodily\ - \ movement. Our goal was to create a robust yet adaptable system that could attend\ - \ to both general and precise aspects of performer interaction. Though certain\ - \ commonalities with existing hardware systems exist, our PAIR system takes a\ - \ fundamentally different approach by focusing specifically on the interaction\ - \ of two or more dancers.},\n address = {Vancouver, BC, Canada},\n author = {Topper,\ - \ David and Swendsen, Peter V.},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176830},\n\ - \ issn = {2220-4806},\n pages = {76--79},\n title = {Wireless Dance Control :\ - \ PAIR and WISEAR},\n url = {http://www.nime.org/proceedings/2005/nime2005_076.pdf},\n\ - \ year = {2005}\n}\n" + ID: NIME22_9 + abstract: 'Digital musical instruments (DMIs) built to be used in performance settings + need to go beyond the prototypical stage of design to become robust, reliable, + and responsive devices for extensive usage. This paper presents the Tapbox and + the Slapbox, two generations of a standalone DMI built for percussion practice. + After summarizing the requirements for performance DMIs from previous surveys, + we introduce the Tapbox and comment on its strong and weak points. We then focus + on the design process of the Slapbox, an improved version that captures a broader + range of percussive gestures. Design tasks are reflected upon, including enclosure + design, sensor evaluations, gesture extraction algorithms, and sound synthesis + methods and mappings. Practical exploration of the Slapbox by two professional + percussionists is performed and their insights summarized, providing directions + for future work.' + address: 'The University of Auckland, New Zealand' + articleno: 9 + author: 'Boettcher, Brady and Sullivan, John and Wanderley, Marcelo M.' + bibtex: "@inproceedings{NIME22_9,\n abstract = {Digital musical instruments (DMIs)\ + \ built to be used in performance settings need to go beyond the prototypical\ + \ stage of design to become robust, reliable, and responsive devices for extensive\ + \ usage. This paper presents the Tapbox and the Slapbox, two generations of a\ + \ standalone DMI built for percussion practice. After summarizing the requirements\ + \ for performance DMIs from previous surveys, we introduce the Tapbox and comment\ + \ on its strong and weak points. We then focus on the design process of the Slapbox,\ + \ an improved version that captures a broader range of percussive gestures. Design\ + \ tasks are reflected upon, including enclosure design, sensor evaluations, gesture\ + \ extraction algorithms, and sound synthesis methods and mappings. Practical exploration\ + \ of the Slapbox by two professional percussionists is performed and their insights\ + \ summarized, providing directions for future work.},\n address = {The University\ + \ of Auckland, New Zealand},\n articleno = {9},\n author = {Boettcher, Brady and\ + \ Sullivan, John and Wanderley, Marcelo M.},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.21428/92fbeb44.78fd89cc},\n issn = {2220-4806},\n month = {jun},\n pdf =\ + \ {114.pdf},\n presentation-video = {https://youtu.be/NkYGAp4rmj8},\n title =\ + \ {Slapbox: Redesign of a Digital Musical Instrument Towards Reliable Long-Term\ + \ Practice},\n url = {https://doi.org/10.21428%2F92fbeb44.78fd89cc},\n year =\ + \ {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176830 + doi: 10.21428/92fbeb44.78fd89cc issn: 2220-4806 - pages: 76--79 - title: 'Wireless Dance Control : PAIR and WISEAR' - url: http://www.nime.org/proceedings/2005/nime2005_076.pdf - year: 2005 + month: jun + pdf: 114.pdf + presentation-video: https://youtu.be/NkYGAp4rmj8 + title: 'Slapbox: Redesign of a Digital Musical Instrument Towards Reliable Long-Term + Practice' + url: https://doi.org/10.21428%2F92fbeb44.78fd89cc + year: 2022 - ENTRYTYPE: inproceedings - ID: Dannenberg2005 - abstract: 'McBlare is a robotic bagpipe player developed by the Robotics Institute - at Carnegie Mellon University. McBlare plays a standard set of bagpipes, using - a custom air compressor to supply air and electromechanical ``fingers'''' to control - the chanter. McBlare is MIDI controlled, allowing for simple interfacing to a - keyboard, computer, or hardware sequencer. The control mechanism exceeds the measured - speed of expert human performers. On the other hand, human performers surpass - McBlare in their ability to compensate for limitations and imperfections in reeds, - and we discuss future enhancements to address these problems. McBlare has been - used to perform traditional bagpipe music as well as experimental computer generated - music. ' - address: 'Vancouver, BC, Canada' - author: 'Dannenberg, Roger B. and Brown, Ben and Zeglin, Garth and Lupish, Ron' - bibtex: "@inproceedings{Dannenberg2005,\n abstract = {McBlare is a robotic bagpipe\ - \ player developed by the Robotics Institute at Carnegie Mellon University. McBlare\ - \ plays a standard set of bagpipes, using a custom air compressor to supply air\ - \ and electromechanical ``fingers'' to control the chanter. McBlare is MIDI controlled,\ - \ allowing for simple interfacing to a keyboard, computer, or hardware sequencer.\ - \ The control mechanism exceeds the measured speed of expert human performers.\ - \ On the other hand, human performers surpass McBlare in their ability to compensate\ - \ for limitations and imperfections in reeds, and we discuss future enhancements\ - \ to address these problems. McBlare has been used to perform traditional bagpipe\ - \ music as well as experimental computer generated music. },\n address = {Vancouver,\ - \ BC, Canada},\n author = {Dannenberg, Roger B. and Brown, Ben and Zeglin, Garth\ - \ and Lupish, Ron},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176729},\n\ - \ issn = {2220-4806},\n keywords = {bagpipes, robot, music, instrument, MIDI },\n\ - \ pages = {80--84},\n title = {McBlare: A Robotic Bagpipe Player},\n url = {http://www.nime.org/proceedings/2005/nime2005_080.pdf},\n\ - \ year = {2005}\n}\n" + ID: NIME22_10 + abstract: 'This paper presents Mapper4Live, a software plugin made for the popular + digital audio workstation software Ableton Live. Mapper4Live exposes Ableton’s + synthesis and effect parameters on the distributed libmapper signal mapping network, + providing new opportunities for interaction between software and hardware synths, + audio effects, and controllers. The plugin’s uses and relevance in research, music + production and musical performance settings are explored, detailing the development + journey and ideas for future work on the project.' + address: 'The University of Auckland, New Zealand' + articleno: 10 + author: 'Boettcher, Brady and Malloch, Joseph and Wang, Johnty and Wanderley, Marcelo + M.' + bibtex: "@inproceedings{NIME22_10,\n abstract = {This paper presents Mapper4Live,\ + \ a software plugin made for the popular digital audio workstation software Ableton\ + \ Live. Mapper4Live exposes Ableton’s synthesis and effect parameters on the distributed\ + \ libmapper signal mapping network, providing new opportunities for interaction\ + \ between software and hardware synths, audio effects, and controllers. The plugin’s\ + \ uses and relevance in research, music production and musical performance settings\ + \ are explored, detailing the development journey and ideas for future work on\ + \ the project.},\n address = {The University of Auckland, New Zealand},\n articleno\ + \ = {10},\n author = {Boettcher, Brady and Malloch, Joseph and Wang, Johnty and\ + \ Wanderley, Marcelo M.},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.625fbdbf},\n\ + \ issn = {2220-4806},\n month = {jun},\n pdf = {115.pdf},\n presentation-video\ + \ = {https://youtu.be/Sv3v3Jmemp0},\n title = {Mapper4Live: Using Control Structures\ + \ to Embed Complex Mapping Tools into Ableton Live},\n url = {https://doi.org/10.21428%2F92fbeb44.625fbdbf},\n\ + \ year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176729 + doi: 10.21428/92fbeb44.625fbdbf issn: 2220-4806 - keywords: 'bagpipes, robot, music, instrument, MIDI ' - pages: 80--84 - title: 'McBlare: A Robotic Bagpipe Player' - url: http://www.nime.org/proceedings/2005/nime2005_080.pdf - year: 2005 + month: jun + pdf: 115.pdf + presentation-video: https://youtu.be/Sv3v3Jmemp0 + title: 'Mapper4Live: Using Control Structures to Embed Complex Mapping Tools into + Ableton Live' + url: https://doi.org/10.21428%2F92fbeb44.625fbdbf + year: 2022 - ENTRYTYPE: inproceedings - ID: Bevilacqua2005 - abstract: 'In this report, we describe our development on the Max/MSPtoolbox MnM - dedicated to mapping between gesture andsound, and more generally to statistical - and machine learningmethods. This library is built on top of the FTM library, - whichenables the efficient use of matrices and other data structuresin Max/MSP. - Mapping examples are described based onvarious matrix manipulations such as Single - ValueDecomposition. The FTM and MnM libraries are freelyavailable.' - address: 'Vancouver, BC, Canada' - author: 'Bevilacqua, Frédéric and Müller, Rémy and Schnell, Norbert' - bibtex: "@inproceedings{Bevilacqua2005,\n abstract = {In this report, we describe\ - \ our development on the Max/MSPtoolbox MnM dedicated to mapping between gesture\ - \ andsound, and more generally to statistical and machine learningmethods. This\ - \ library is built on top of the FTM library, whichenables the efficient use of\ - \ matrices and other data structuresin Max/MSP. Mapping examples are described\ - \ based onvarious matrix manipulations such as Single ValueDecomposition. The\ - \ FTM and MnM libraries are freelyavailable.},\n address = {Vancouver, BC, Canada},\n\ - \ author = {Bevilacqua, Fr\\'{e}d\\'{e}ric and M\\''{u}ller, R\\'{e}my and Schnell,\ - \ Norbert},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176703},\n issn\ - \ = {2220-4806},\n keywords = {Mapping, interface design, matrix, Max/MSP. },\n\ - \ pages = {85--88},\n title = {MnM: a Max/MSP mapping toolbox},\n url = {http://www.nime.org/proceedings/2005/nime2005_085.pdf},\n\ - \ year = {2005}\n}\n" + ID: NIME22_11 + abstract: 'Music technology ensembles—often consisting of multiple laptops as the + performers’ primary instrument— provide collaborative artistic experiences for + electronic musicians. In an effort to remove the significant technical and financial + barriers that laptops can present to performers looking to start their own group, + this paper proposes a solution in the form of the Norns Shield, a computer music + instrument (CMI) that requires minimal set-up and promotes immediate music-making + to performers of all skill levels. Prior research centered on using alternative + CMIs to supplant laptops in ensemble settings is discussed, and the benefits of + adopting the Norns Shield in service of democratizing and diversifying the music + technology ensemble are demonstrated in a discussion centered on the University + of Texas Rio Grande Valley New Music Ensemble’s adoption of the instrument. A + description of two software packages developed by the author showcases an extension + of the instrument’s abilities to share collaborative control data between internet-enabled + CMIs and to remotely manage script launching and parameter configuration across + a group of Norns Shields, providing resources for ensembles interested in incorporating + the device into their ranks.' + address: 'The University of Auckland, New Zealand' + articleno: 11 + author: 'Marasco, Anthony T.' + bibtex: "@inproceedings{NIME22_11,\n abstract = {Music technology ensembles—often\ + \ consisting of multiple laptops as the performers’ primary instrument— provide\ + \ collaborative artistic experiences for electronic musicians. In an effort to\ + \ remove the significant technical and financial barriers that laptops can present\ + \ to performers looking to start their own group, this paper proposes a solution\ + \ in the form of the Norns Shield, a computer music instrument (CMI) that requires\ + \ minimal set-up and promotes immediate music-making to performers of all skill\ + \ levels. Prior research centered on using alternative CMIs to supplant laptops\ + \ in ensemble settings is discussed, and the benefits of adopting the Norns Shield\ + \ in service of democratizing and diversifying the music technology ensemble are\ + \ demonstrated in a discussion centered on the University of Texas Rio Grande\ + \ Valley New Music Ensemble’s adoption of the instrument. A description of two\ + \ software packages developed by the author showcases an extension of the instrument’s\ + \ abilities to share collaborative control data between internet-enabled CMIs\ + \ and to remotely manage script launching and parameter configuration across a\ + \ group of Norns Shields, providing resources for ensembles interested in incorporating\ + \ the device into their ranks.},\n address = {The University of Auckland, New\ + \ Zealand},\n articleno = {11},\n author = {Marasco, Anthony T.},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.21428/92fbeb44.89003700},\n issn = {2220-4806},\n month\ + \ = {jun},\n pdf = {120.pdf},\n presentation-video = {https://www.youtube.com/watch?v=2XixSYrgRuQ},\n\ + \ title = {Approaching the Norns Shield as a Laptop Alternative for Democratizing\ + \ Music Technology Ensembles},\n url = {https://doi.org/10.21428%2F92fbeb44.89003700},\n\ + \ year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176703 + doi: 10.21428/92fbeb44.89003700 issn: 2220-4806 - keywords: 'Mapping, interface design, matrix, Max/MSP. ' - pages: 85--88 - title: 'MnM: a Max/MSP mapping toolbox' - url: http://www.nime.org/proceedings/2005/nime2005_085.pdf - year: 2005 + month: jun + pdf: 120.pdf + presentation-video: https://www.youtube.com/watch?v=2XixSYrgRuQ + title: Approaching the Norns Shield as a Laptop Alternative for Democratizing Music + Technology Ensembles + url: https://doi.org/10.21428%2F92fbeb44.89003700 + year: 2022 - ENTRYTYPE: inproceedings - ID: Pelletier2005 - abstract: 'This paper describes DspMap, a graphical user interface (GUI)designed - to assist the dynamic routing of signal generators andmodifiers currently being - developed at the International Academy of Media Arts & Sciences. Instead of relying - on traditional boxand-line approaches, DspMap proposes a design paradigm whereconnections - are determined by the relative positions of the variouselements in a single virtual - space.' - address: 'Vancouver, BC, Canada' - author: 'Pelletier, Jean-Marc' - bibtex: "@inproceedings{Pelletier2005,\n abstract = {This paper describes DspMap,\ - \ a graphical user interface (GUI)designed to assist the dynamic routing of signal\ - \ generators andmodifiers currently being developed at the International Academy\ - \ of Media Arts \\& Sciences. Instead of relying on traditional boxand-line approaches,\ - \ DspMap proposes a design paradigm whereconnections are determined by the relative\ - \ positions of the variouselements in a single virtual space.},\n address = {Vancouver,\ - \ BC, Canada},\n author = {Pelletier, Jean-Marc},\n booktitle = {Proceedings of\ + ID: NIME22_12 + abstract: 'In this article we present Bandoneon 2.0, an interdisciplinary project + whose main objective is to produce electronic bandoneons in Argentina. The current + prices of bandoneons and the scarcity of manufacturers are endangering the possibility + of access for the new generations to one of the most emblematic instruments of + the culture of this country. Therefore, we aim to create an expressive and accessible + electronic bandoneon that can be used in recreational, academic and professional + contexts, providing an inclusive response to the current sociocultural demand. + The project also involves research on instrument acoustics and the development + of specialized software and hardware tools.' + address: 'The University of Auckland, New Zealand' + articleno: 12 + author: 'Ramos, Juan and Calcagno, Esteban and Vergara, Ramiro Oscar and Riera, + Pablo and Rizza, Joaquín' + bibtex: "@inproceedings{NIME22_12,\n abstract = {In this article we present Bandoneon\ + \ 2.0, an interdisciplinary project whose main objective is to produce electronic\ + \ bandoneons in Argentina. The current prices of bandoneons and the scarcity of\ + \ manufacturers are endangering the possibility of access for the new generations\ + \ to one of the most emblematic instruments of the culture of this country. Therefore,\ + \ we aim to create an expressive and accessible electronic bandoneon that can\ + \ be used in recreational, academic and professional contexts, providing an inclusive\ + \ response to the current sociocultural demand. The project also involves research\ + \ on instrument acoustics and the development of specialized software and hardware\ + \ tools.},\n address = {The University of Auckland, New Zealand},\n articleno\ + \ = {12},\n author = {Ramos, Juan and Calcagno, Esteban and Vergara, Ramiro Oscar\ + \ and Riera, Pablo and Rizza, Joaqu{\\'{\\i}}n},\n booktitle = {Proceedings of\ \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1176800},\n issn = {2220-4806},\n keywords = {Graphical user\ - \ interface, real-time performance, map, dynamic routing },\n pages = {89--92},\n\ - \ title = {A Graphical Interface for Real-Time Signal Routing},\n url = {http://www.nime.org/proceedings/2005/nime2005_089.pdf},\n\ - \ year = {2005}\n}\n" + \ = {10.21428/92fbeb44.c38bfb86},\n issn = {2220-4806},\n month = {jun},\n pdf\ + \ = {123.pdf},\n presentation-video = {https://www.youtube.com/watch?v=5y4BbQWVNGQ},\n\ + \ title = {Bandoneon 2.0: an interdisciplinary project for research and development\ + \ of electronic bandoneons in Argentina},\n url = {https://doi.org/10.21428%2F92fbeb44.c38bfb86},\n\ + \ year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176800 + doi: 10.21428/92fbeb44.c38bfb86 issn: 2220-4806 - keywords: 'Graphical user interface, real-time performance, map, dynamic routing ' - pages: 89--92 - title: A Graphical Interface for Real-Time Signal Routing - url: http://www.nime.org/proceedings/2005/nime2005_089.pdf - year: 2005 + month: jun + pdf: 123.pdf + presentation-video: https://www.youtube.com/watch?v=5y4BbQWVNGQ + title: 'Bandoneon 2.0: an interdisciplinary project for research and development + of electronic bandoneons in Argentina' + url: https://doi.org/10.21428%2F92fbeb44.c38bfb86 + year: 2022 - ENTRYTYPE: inproceedings - ID: Scavone2005 - abstract: 'The breath pressure signal applied to wind music instruments is generally - considered to be a slowly varying function of time. In a context of music control, - this assumptionimplies that a relatively low digital sample rate (100-200Hz) is - sufficient to capture and/or reproduce this signal.We tested this assumption by - evaluating the frequency content in breath pressure, particularly during the use - of extended performance techniques such as growling, humming,and flutter tonguing. - Our results indicate frequency contentin a breath pressure signal up to about - 10 kHz, with especially significant energy within the first 1000 Hz. We furtherinvestigated - the frequency response of several commerciallyavailable pressure sensors to assess - their responsiveness tohigher frequency breath signals. Though results were mixed,some - devices were found capable of sensing frequencies upto at least 1.5 kHz. Finally, - similar measurements were conducted with Yamaha WX11 and WX5 wind controllers - andresults suggest that their breath pressure outputs are sampled at about 320 - Hz and 280 Hz, respectively.' - address: 'Vancouver, BC, Canada' - author: 'Scavone, Gary and Silva, Andrey R.' - bibtex: "@inproceedings{Scavone2005,\n abstract = {The breath pressure signal applied\ - \ to wind music instruments is generally considered to be a slowly varying function\ - \ of time. In a context of music control, this assumptionimplies that a relatively\ - \ low digital sample rate (100-200Hz) is sufficient to capture and/or reproduce\ - \ this signal.We tested this assumption by evaluating the frequency content in\ - \ breath pressure, particularly during the use of extended performance techniques\ - \ such as growling, humming,and flutter tonguing. Our results indicate frequency\ - \ contentin a breath pressure signal up to about 10 kHz, with especially significant\ - \ energy within the first 1000 Hz. We furtherinvestigated the frequency response\ - \ of several commerciallyavailable pressure sensors to assess their responsiveness\ - \ tohigher frequency breath signals. Though results were mixed,some devices were\ - \ found capable of sensing frequencies upto at least 1.5 kHz. Finally, similar\ - \ measurements were conducted with Yamaha WX11 and WX5 wind controllers andresults\ - \ suggest that their breath pressure outputs are sampled at about 320 Hz and 280\ - \ Hz, respectively.},\n address = {Vancouver, BC, Canada},\n author = {Scavone,\ - \ Gary and Silva, Andrey R.},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176810},\n\ - \ issn = {2220-4806},\n keywords = {Breath Control, Wind Controller, Breath Sensors\ - \ },\n pages = {93--96},\n title = {Frequency Content of Breath Pressure and Implications\ - \ for Use in Control},\n url = {http://www.nime.org/proceedings/2005/nime2005_093.pdf},\n\ - \ year = {2005}\n}\n" + ID: NIME22_13 + abstract: 'The On Board Call is a bespoke musical interface designed to engage the + general public’s interest in wildlife sounds—such as bird, frog or animal calls—through + imitation and interaction. The device is a handheld, battery-operated, microprocessor-based + machine that synthesizes sounds using frequency modulation synthesis methods. + It includes a small amplifier and loudspeaker for playback and employs an accelerometer + and force sensor that register gestural motions that control sound parameters + in real time. The device is handmade from off-the-shelf components onto a specially + designed PCB and laser cut wooden boards. Development versions of the device have + been tested in wildlife listening contexts and in location-based ensemble performance. + The device is simple to use, compact and inexpensive to facilitate use in community-based + active listening workshops intended to enhance user’s appreciation of the eco + acoustic richness of natural environments. Unlike most of the previous work in + wildlife call imitation, the Call does not simply play back recorded wildlife + sounds, it is designed for performative interaction by a user to bring synthesized + sounds to life and imbue them with expression.' + address: 'The University of Auckland, New Zealand' + articleno: 13 + author: 'Brown, Andrew R.' + bibtex: "@inproceedings{NIME22_13,\n abstract = {The On Board Call is a bespoke\ + \ musical interface designed to engage the general public’s interest in wildlife\ + \ sounds—such as bird, frog or animal calls—through imitation and interaction.\ + \ The device is a handheld, battery-operated, microprocessor-based machine that\ + \ synthesizes sounds using frequency modulation synthesis methods. It includes\ + \ a small amplifier and loudspeaker for playback and employs an accelerometer\ + \ and force sensor that register gestural motions that control sound parameters\ + \ in real time. The device is handmade from off-the-shelf components onto a specially\ + \ designed PCB and laser cut wooden boards. Development versions of the device\ + \ have been tested in wildlife listening contexts and in location-based ensemble\ + \ performance. The device is simple to use, compact and inexpensive to facilitate\ + \ use in community-based active listening workshops intended to enhance user’s\ + \ appreciation of the eco acoustic richness of natural environments. Unlike most\ + \ of the previous work in wildlife call imitation, the Call does not simply play\ + \ back recorded wildlife sounds, it is designed for performative interaction by\ + \ a user to bring synthesized sounds to life and imbue them with expression.},\n\ + \ address = {The University of Auckland, New Zealand},\n articleno = {13},\n author\ + \ = {Brown, Andrew R.},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.71a5a0ba},\n\ + \ issn = {2220-4806},\n month = {jun},\n pdf = {125.pdf},\n presentation-video\ + \ = {https://www.youtube.com/watch?v=iBTBPpaSGi8},\n title = {On Board Call: A\ + \ Gestural Wildlife Imitation Machine},\n url = {https://doi.org/10.21428%2F92fbeb44.71a5a0ba},\n\ + \ year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176810 + doi: 10.21428/92fbeb44.71a5a0ba issn: 2220-4806 - keywords: 'Breath Control, Wind Controller, Breath Sensors ' - pages: 93--96 - title: Frequency Content of Breath Pressure and Implications for Use in Control - url: http://www.nime.org/proceedings/2005/nime2005_093.pdf - year: 2005 + month: jun + pdf: 125.pdf + presentation-video: https://www.youtube.com/watch?v=iBTBPpaSGi8 + title: 'On Board Call: A Gestural Wildlife Imitation Machine' + url: https://doi.org/10.21428%2F92fbeb44.71a5a0ba + year: 2022 - ENTRYTYPE: inproceedings - ID: Crevoisier2005 - abstract: 'Tangible Acoustic Interfaces (TAI) rely on various acousticsensing technologies, - such as sound source location and acoustic imaging, to detect the position of - contact of users interacting with the surface of solid materials. With their ability - to transform almost any physical objects, flat or curved surfaces and walls into - interactive interfaces, acoustic sensing technologies show a promising way to - bring the sense of touch into the realm of computer interaction. Because music - making has been closely related to this sense during centuries, an application - of particular interest is the use of TAI''s for the design of new musical instruments - that matches the physicality and expressiveness of classical instruments. This - paper gives an overview of the various acoustic-sensing technologies involved - in the realisation of TAI''s and develops on the motivation underlying their use - for the design of new musical instruments. ' - address: 'Vancouver, BC, Canada' - author: 'Crevoisier, Alain and Polotti, Pietro' - bibtex: "@inproceedings{Crevoisier2005,\n abstract = {Tangible Acoustic Interfaces\ - \ (TAI) rely on various acousticsensing technologies, such as sound source location\ - \ and acoustic imaging, to detect the position of contact of users interacting\ - \ with the surface of solid materials. With their ability to transform almost\ - \ any physical objects, flat or curved surfaces and walls into interactive interfaces,\ - \ acoustic sensing technologies show a promising way to bring the sense of touch\ - \ into the realm of computer interaction. Because music making has been closely\ - \ related to this sense during centuries, an application of particular interest\ - \ is the use of TAI's for the design of new musical instruments that matches the\ - \ physicality and expressiveness of classical instruments. This paper gives an\ - \ overview of the various acoustic-sensing technologies involved in the realisation\ - \ of TAI's and develops on the motivation underlying their use for the design\ - \ of new musical instruments. },\n address = {Vancouver, BC, Canada},\n author\ - \ = {Crevoisier, Alain and Polotti, Pietro},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1176727},\n issn = {2220-4806},\n keywords = {Tangible interfaces,\ - \ new musical instruments design. },\n pages = {97--100},\n title = {Tangible\ - \ Acoustic Interfaces and their Applications for the Design of New Musical Instruments},\n\ - \ url = {http://www.nime.org/proceedings/2005/nime2005_097.pdf},\n year = {2005}\n\ - }\n" + ID: NIME22_14 + abstract: 'In a search for a symbiotic relationship between the digital and physical + worlds, I am developing a hybrid, digital-acoustic wind instrument - the Post-Digital + Sax. As the name implies, the instrument combines the advantages and flexibility + of digital control with a hands-on physical interface and a non-orthodox means + of sound production, in which the airflow, supplied by the player’s lungs, is + the actual sound source. The pitch, however, is controlled digitally, allowing + a wide range of musical material manipulation, bringing the possibilities of a + digitally augmented performance into the realm of acoustic sound.' + address: 'The University of Auckland, New Zealand' + articleno: 14 + author: 'Cybulski, Krzysztof' + bibtex: "@inproceedings{NIME22_14,\n abstract = {In a search for a symbiotic relationship\ + \ between the digital and physical worlds, I am developing a hybrid, digital-acoustic\ + \ wind instrument - the Post-Digital Sax. As the name implies, the instrument\ + \ combines the advantages and flexibility of digital control with a hands-on physical\ + \ interface and a non-orthodox means of sound production, in which the airflow,\ + \ supplied by the player’s lungs, is the actual sound source. The pitch, however,\ + \ is controlled digitally, allowing a wide range of musical material manipulation,\ + \ bringing the possibilities of a digitally augmented performance into the realm\ + \ of acoustic sound.},\n address = {The University of Auckland, New Zealand},\n\ + \ articleno = {14},\n author = {Cybulski, Krzysztof},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.21428/92fbeb44.756616d4},\n issn = {2220-4806},\n month = {jun},\n\ + \ pdf = {126.pdf},\n presentation-video = {https://youtu.be/RnuEvjMdEj4},\n title\ + \ = {Post-digital sax - a digitally controlled acoustic single-reed woodwind instrument},\n\ + \ url = {https://doi.org/10.21428%2F92fbeb44.756616d4},\n year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176727 + doi: 10.21428/92fbeb44.756616d4 issn: 2220-4806 - keywords: 'Tangible interfaces, new musical instruments design. ' - pages: 97--100 - title: Tangible Acoustic Interfaces and their Applications for the Design of New - Musical Instruments - url: http://www.nime.org/proceedings/2005/nime2005_097.pdf - year: 2005 + month: jun + pdf: 126.pdf + presentation-video: https://youtu.be/RnuEvjMdEj4 + title: Post-digital sax - a digitally controlled acoustic single-reed woodwind instrument + url: https://doi.org/10.21428%2F92fbeb44.756616d4 + year: 2022 - ENTRYTYPE: inproceedings - ID: Bencina2005 - abstract: 'This report describes The Metasurface – a mapping interface supporting - interactive design of two-to-many mappings through the placement and interpolation - of parameter snapshots on a plane. The Metasurface employs natural neighbour interpolation, - a local interpolation method based on Voronoi tessellation, to interpolate between - parameter snapshots. Compared to global field based methods, natural neighbour - interpolation offers increased predictability and the ability to represent multi-scale - surfaces. An implementation of the Metasurface in the AudioMulch software environment - is presented and key architectural features of AudioMulch which facilitate this - implementation are discussed.' - address: 'Vancouver, BC, Canada' - author: 'Bencina, Ross' - bibtex: "@inproceedings{Bencina2005,\n abstract = {This report describes The Metasurface\ - \ – a mapping interface supporting interactive design of two-to-many mappings\ - \ through the placement and interpolation of parameter snapshots on a plane. The\ - \ Metasurface employs natural neighbour interpolation, a local interpolation method\ - \ based on Voronoi tessellation, to interpolate between parameter snapshots. Compared\ - \ to global field based methods, natural neighbour interpolation offers increased\ - \ predictability and the ability to represent multi-scale surfaces. An implementation\ - \ of the Metasurface in the AudioMulch software environment is presented and key\ - \ architectural features of AudioMulch which facilitate this implementation are\ - \ discussed.},\n address = {Vancouver, BC, Canada},\n author = {Bencina, Ross},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176701},\n issn = {2220-4806},\n\ - \ keywords = {computational geometry,design,design support,high-level control,interpolation,mapping,of\ - \ interpo-,this section reviews related,user interface,work in the field},\n pages\ - \ = {101--104},\n title = {The Metasurface -- Applying Natural Neighbour Interpolation\ - \ to Two-to-Many Mapping},\n url = {http://www.nime.org/proceedings/2005/nime2005_101.pdf},\n\ - \ year = {2005}\n}\n" + ID: NIME22_15 + abstract: 'An emerging approach to building new musical instruments is based on + training neural networks to generate audio conditioned upon parametric input. + We use the term "generative models" rather than "musical instruments" for the + trained networks because it reflects the statistical way the instruments are trained + to "model" the association between parameters and the distribution of audio data, + and because "musical" carries historical baggage as a reference to a restricted + domain of sound. Generative models are musical instruments in that they produce + a prescribed range of sound playable through the expressive manipulation of an + interface. To learn the mapping from interface to audio, generative models require + large amounts of parametrically labeled audio data. This paper introduces the + Synthetic Audio Textures (Syn- Tex1) collection of data set generators. SynTex + is a database of parameterized audio textures and a suite of tools for creating + and labeling datasets designed for training and testing generative neural networks + for parametrically conditioned sound synthesis. While there are many existing + labeled speech and traditional musical instrument databases available for training + generative models, most datasets of general (e.g. environmental) audio are oriented + and labeled for the purpose of classification rather than expressive musical generation. + SynTex is designed to provide an open shareable reference set of audio for creating + generative sound models including their interfaces. SynTex sound sets are synthetically + generated. This facilitates dense and accurate labeling necessary for conditionally + training generative networks conditionally dependent on input parameter values. + SynTex has several characteristics designed to support a data-centric approach + to developing, exploring, training, and testing generative models.' + address: 'The University of Auckland, New Zealand' + articleno: 15 + author: 'Wyse, Lonce and Ravikumar, Prashanth Thattai' + bibtex: "@inproceedings{NIME22_15,\n abstract = {An emerging approach to building\ + \ new musical instruments is based on training neural networks to generate audio\ + \ conditioned upon parametric input. We use the term \"generative models\" rather\ + \ than \"musical instruments\" for the trained networks because it reflects the\ + \ statistical way the instruments are trained to \"model\" the association between\ + \ parameters and the distribution of audio data, and because \"musical\" carries\ + \ historical baggage as a reference to a restricted domain of sound. Generative\ + \ models are musical instruments in that they produce a prescribed range of sound\ + \ playable through the expressive manipulation of an interface. To learn the mapping\ + \ from interface to audio, generative models require large amounts of parametrically\ + \ labeled audio data. This paper introduces the Synthetic Audio Textures (Syn-\ + \ Tex1) collection of data set generators. SynTex is a database of parameterized\ + \ audio textures and a suite of tools for creating and labeling datasets designed\ + \ for training and testing generative neural networks for parametrically conditioned\ + \ sound synthesis. While there are many existing labeled speech and traditional\ + \ musical instrument databases available for training generative models, most\ + \ datasets of general (e.g. environmental) audio are oriented and labeled for\ + \ the purpose of classification rather than expressive musical generation. SynTex\ + \ is designed to provide an open shareable reference set of audio for creating\ + \ generative sound models including their interfaces. SynTex sound sets are synthetically\ + \ generated. This facilitates dense and accurate labeling necessary for conditionally\ + \ training generative networks conditionally dependent on input parameter values.\ + \ SynTex has several characteristics designed to support a data-centric approach\ + \ to developing, exploring, training, and testing generative models.},\n address\ + \ = {The University of Auckland, New Zealand},\n articleno = {15},\n author =\ + \ {Wyse, Lonce and Ravikumar, Prashanth Thattai},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.21428/92fbeb44.0fe70450},\n issn = {2220-4806},\n month = {jun},\n pdf\ + \ = {128.pdf},\n presentation-video = {https://youtu.be/KZHXck9c75s},\n title\ + \ = {Syntex: parametric audio texture datasets for conditional training of instrumental\ + \ interfaces.},\n url = {https://doi.org/10.21428%2F92fbeb44.0fe70450},\n year\ + \ = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176701 + doi: 10.21428/92fbeb44.0fe70450 issn: 2220-4806 - keywords: 'computational geometry,design,design support,high-level control,interpolation,mapping,of - interpo-,this section reviews related,user interface,work in the field' - pages: 101--104 - title: The Metasurface -- Applying Natural Neighbour Interpolation to Two-to-Many - Mapping - url: http://www.nime.org/proceedings/2005/nime2005_101.pdf - year: 2005 + month: jun + pdf: 128.pdf + presentation-video: https://youtu.be/KZHXck9c75s + title: 'Syntex: parametric audio texture datasets for conditional training of instrumental + interfaces.' + url: https://doi.org/10.21428%2F92fbeb44.0fe70450 + year: 2022 - ENTRYTYPE: inproceedings - ID: Silva2005 - abstract: 'This paper aims to present some perspectives on mappingembouchure gestures - of flute players and their use as controlvariables. For this purpose, we have - analyzed several typesof sensors, in terms of sensitivity, dimension, accuracy - andprice, which can be used to implement a system capable ofmapping embouchure - parameters such as air jet velocity andair jet direction. Finally, we describe - the implementationof a sensor system used to map embouchure gestures of aclassical - Boehm flute.' - address: 'Vancouver, BC, Canada' - author: 'Silva, Andrey R. and Wanderley, Marcelo M. and Scavone, Gary' - bibtex: "@inproceedings{Silva2005,\n abstract = {This paper aims to present some\ - \ perspectives on mappingembouchure gestures of flute players and their use as\ - \ controlvariables. For this purpose, we have analyzed several typesof sensors,\ - \ in terms of sensitivity, dimension, accuracy andprice, which can be used to\ - \ implement a system capable ofmapping embouchure parameters such as air jet velocity\ - \ andair jet direction. Finally, we describe the implementationof a sensor system\ - \ used to map embouchure gestures of aclassical Boehm flute.},\n address = {Vancouver,\ - \ BC, Canada},\n author = {Silva, Andrey R. and Wanderley, Marcelo M. and Scavone,\ - \ Gary},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1176814},\n issn = {2220-4806},\n\ - \ keywords = {Embouchure, air pressure sensors, hot wires, mapping, augmented\ - \ flute. },\n pages = {105--108},\n title = {On the Use of Flute Air Jet as A\ - \ Musical Control Variable},\n url = {http://www.nime.org/proceedings/2005/nime2005_105.pdf},\n\ - \ year = {2005}\n}\n" + ID: NIME22_16 + abstract: 'This paper describes a toolkit for analyzing the NIME proceedings archive, + which facilitates the bibliometric study of the conference papers and the identification + of trends and patterns. The toolkit is implemented as a collection of Python methods + that aggregate, scrape and retrieve various meta-data from published papers. Extracted + data is stored in a large numeric table as well as plain text files. Analytical + functions within the toolkit can be easily extended or modified. The text mining + script that can be highly customized without the need for programming. The toolkit + uses only publicly available information organized in standard formats, and is + available as open-source software to promote continuous development in step with + the NIME archive.' + address: 'The University of Auckland, New Zealand' + articleno: 16 + author: 'Goode, Jackson and Fasciani, Stefano' + bibtex: "@inproceedings{NIME22_16,\n abstract = {This paper describes a toolkit\ + \ for analyzing the NIME proceedings archive, which facilitates the bibliometric\ + \ study of the conference papers and the identification of trends and patterns.\ + \ The toolkit is implemented as a collection of Python methods that aggregate,\ + \ scrape and retrieve various meta-data from published papers. Extracted data\ + \ is stored in a large numeric table as well as plain text files. Analytical functions\ + \ within the toolkit can be easily extended or modified. The text mining script\ + \ that can be highly customized without the need for programming. The toolkit\ + \ uses only publicly available information organized in standard formats, and\ + \ is available as open-source software to promote continuous development in step\ + \ with the NIME archive.},\n address = {The University of Auckland, New Zealand},\n\ + \ articleno = {16},\n author = {Goode, Jackson and Fasciani, Stefano},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.21428/92fbeb44.58efca21},\n issn = {2220-4806},\n month\ + \ = {jun},\n pdf = {13.pdf},\n presentation-video = {https://youtu.be/Awp5-oxL-NM},\n\ + \ title = {A Toolkit for the Analysis of the {NIME} Proceedings Archive},\n url\ + \ = {https://doi.org/10.21428%2F92fbeb44.58efca21},\n year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176814 + doi: 10.21428/92fbeb44.58efca21 issn: 2220-4806 - keywords: 'Embouchure, air pressure sensors, hot wires, mapping, augmented flute. ' - pages: 105--108 - title: On the Use of Flute Air Jet as A Musical Control Variable - url: http://www.nime.org/proceedings/2005/nime2005_105.pdf - year: 2005 + month: jun + pdf: 13.pdf + presentation-video: https://youtu.be/Awp5-oxL-NM + title: A Toolkit for the Analysis of the NIME Proceedings Archive + url: https://doi.org/10.21428%2F92fbeb44.58efca21 + year: 2022 - ENTRYTYPE: inproceedings - ID: Rodet2005 - abstract: 'The PHASE project is a research project devoted to the study and the - realization of systems of multi-modal interaction for generation, handling and - control of sound and music. Supported by the network RIAM (Recherche et Innovation - en Audiovisuel et Multimédia), it was carried out by the CEA-LIST for haptic research, - Haption for the realization of the haptic device, Ondim for integration and visual - realization and Ircam for research and realization about sound, music and the - metaphors for interaction. The integration of the three modalities offers completely - innovative capacities for interaction. The objectives are scientific, cultural - and educational. Finally, an additional objective was to test such a prototype - system, including its haptic arm, in real conditions for general public and over - a long duration in order to measure its solidity, its reliability and its interest - for users. Thus, during the last three months of the project, a demonstrator was - presented and evaluated in a museum in Paris, in the form of an interactive installation - offering the public a musical game. Different from a video game, the aim is not - to animate the pixels on the screen but to play music and to incite musical awareness.' - address: 'Vancouver, BC, Canada' - author: 'Rodet, Xavier and Lambert, Jean-Philippe and Cahen, Roland and Gaudy, Thomas - and Guedy, Fabrice and Gosselin, Florian and Mobuchon, Pascal' - bibtex: "@inproceedings{Rodet2005,\n abstract = {The PHASE project is a research\ - \ project devoted to the study and the realization of systems of multi-modal interaction\ - \ for generation, handling and control of sound and music. Supported by the network\ - \ RIAM (Recherche et Innovation en Audiovisuel et Multim\\'{e}dia), it was carried\ - \ out by the CEA-LIST for haptic research, Haption for the realization of the\ - \ haptic device, Ondim for integration and visual realization and Ircam for research\ - \ and realization about sound, music and the metaphors for interaction. The integration\ - \ of the three modalities offers completely innovative capacities for interaction.\ - \ The objectives are scientific, cultural and educational. Finally, an additional\ - \ objective was to test such a prototype system, including its haptic arm, in\ - \ real conditions for general public and over a long duration in order to measure\ - \ its solidity, its reliability and its interest for users. Thus, during the last\ - \ three months of the project, a demonstrator was presented and evaluated in a\ - \ museum in Paris, in the form of an interactive installation offering the public\ - \ a musical game. Different from a video game, the aim is not to animate the pixels\ - \ on the screen but to play music and to incite musical awareness.},\n address\ - \ = {Vancouver, BC, Canada},\n author = {Rodet, Xavier and Lambert, Jean-Philippe\ - \ and Cahen, Roland and Gaudy, Thomas and Guedy, Fabrice and Gosselin, Florian\ - \ and Mobuchon, Pascal},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176804},\n\ - \ issn = {2220-4806},\n keywords = {Haptic, interaction, sound, music, control,\ - \ installation. },\n pages = {109--114},\n title = {Study of haptic and visual\ - \ interaction for sound and music control in the Phase project},\n url = {http://www.nime.org/proceedings/2005/nime2005_109.pdf},\n\ - \ year = {2005}\n}\n" + ID: NIME22_17 + abstract: 'This paper focuses on the redundancy and physicality of magnetic recording + media as a defining factor in the design of a lo-fi audio device, the Concentric + Sampler. A modified floppy disk drive (FDD) and additional circuitry enables the + FDD to record to and playback audio from a 3.5” floppy disk. The Concentric Sampler + is designed as an instrument for live performance and a tool for sonic manipulation, + resulting in primitive looping and time-based granular synthesis. This paper explains + the motivation and background of the Concentric Sampler, related applications + and approaches, its technical realisation, and its musical possibilities. To conclude, + the Concentric Sampler’s potential as an instrument and compositional tool is + discussed alongside the future possibilities for development.' + address: 'The University of Auckland, New Zealand' + articleno: 17 + author: 'Tate, Timothy' + bibtex: "@inproceedings{NIME22_17,\n abstract = {This paper focuses on the redundancy\ + \ and physicality of magnetic recording media as a defining factor in the design\ + \ of a lo-fi audio device, the Concentric Sampler. A modified floppy disk drive\ + \ (FDD) and additional circuitry enables the FDD to record to and playback audio\ + \ from a 3.5” floppy disk. The Concentric Sampler is designed as an instrument\ + \ for live performance and a tool for sonic manipulation, resulting in primitive\ + \ looping and time-based granular synthesis. This paper explains the motivation\ + \ and background of the Concentric Sampler, related applications and approaches,\ + \ its technical realisation, and its musical possibilities. To conclude, the Concentric\ + \ Sampler’s potential as an instrument and compositional tool is discussed alongside\ + \ the future possibilities for development.},\n address = {The University of Auckland,\ + \ New Zealand},\n articleno = {17},\n author = {Tate, Timothy},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.21428/92fbeb44.324729a3},\n issn = {2220-4806},\n month = {jun},\n\ + \ pdf = {131.pdf},\n presentation-video = {https://youtu.be/7Myu1W7tbts},\n title\ + \ = {The Concentric Sampler: A musical instrument from a repurposed floppy disk\ + \ drive},\n url = {https://doi.org/10.21428%2F92fbeb44.324729a3},\n year = {2022}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176804 + doi: 10.21428/92fbeb44.324729a3 issn: 2220-4806 - keywords: 'Haptic, interaction, sound, music, control, installation. ' - pages: 109--114 - title: Study of haptic and visual interaction for sound and music control in the - Phase project - url: http://www.nime.org/proceedings/2005/nime2005_109.pdf - year: 2005 + month: jun + pdf: 131.pdf + presentation-video: https://youtu.be/7Myu1W7tbts + title: 'The Concentric Sampler: A musical instrument from a repurposed floppy disk + drive' + url: https://doi.org/10.21428%2F92fbeb44.324729a3 + year: 2022 - ENTRYTYPE: inproceedings - ID: Levin2005a - abstract: 'We report on The Manual Input Sessions, a series of audiovisual vignettes - which probe the expressive possibilities of free-form hand gestures. Performed - on a hybrid projection system which combines a traditional analog overhead projector - and a digital PC video projector, our vision-based software instruments generate - dynamic sounds and graphics solely in response to the forms and movements of the - silhouette contours of the user''s hands. Interactions and audiovisual mappings - which make use of both positive (exterior) and negative (interior) contours are - discussed. ' - address: 'Vancouver, BC, Canada' - author: 'Levin, Golan and Lieberman, Zachary' - bibtex: "@inproceedings{Levin2005a,\n abstract = {We report on The Manual Input\ - \ Sessions, a series of audiovisual vignettes which probe the expressive possibilities\ - \ of free-form hand gestures. Performed on a hybrid projection system which combines\ - \ a traditional analog overhead projector and a digital PC video projector, our\ - \ vision-based software instruments generate dynamic sounds and graphics solely\ - \ in response to the forms and movements of the silhouette contours of the user's\ - \ hands. Interactions and audiovisual mappings which make use of both positive\ - \ (exterior) and negative (interior) contours are discussed. },\n address = {Vancouver,\ - \ BC, Canada},\n author = {Levin, Golan and Lieberman, Zachary},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176772},\n issn = {2220-4806},\n keywords = {Audiovisual\ - \ performance, hand silhouettes, computer vision, contour analysis, sound-image\ - \ relationships, augmented reality. },\n pages = {115--120},\n title = {Sounds\ - \ from Shapes: Audiovisual Performance with Hand Silhouette Contours in The Manual\ - \ Input Sessions},\n url = {http://www.nime.org/proceedings/2005/nime2005_115.pdf},\n\ - \ year = {2005}\n}\n" + ID: NIME22_18 + abstract: 'Ghost Play is a violin-playing robot that aims to realize bowing and + fingering similar to human players. Existing violin-playing machines have faced + various problems concerning performance techniques owing to constraints imposed + by their design. Bowing and fingering that require accurate and high-acceleration + movement (e.g., a spiccato, tremolo, and glissando) are essential but challenging. + To overcome this problem, Ghost Play is equipped with seven electromagnetic linear + actuators, three for controlling the bow (i.e., the right hand), and the other + four for controlling the pitch on each string (i.e., the left hand). The violin-playing + robot is mounted with an unmodified violin bow. A sensor is attached to the bow + to measure bow pressure. The control software receives a time series of performance + data and manipulates the actuators accordingly. The performance data consists + of the bow direction, bow speed, bow pressure, pitch, vibrato interval, vibrato + width, and string to be drawn. We also developed an authoring tool for the performance + data using a graphic user interface. Finally, we demonstrated Ghost Play performing + bowing and fingering techniques such as a spiccato, tremolo, and glissando, as + well as a piece of classical music.' + address: 'The University of Auckland, New Zealand' + articleno: 18 + author: 'Kamatani, Takahiro and Sato, Yoshinao and Fujino, Masato' + bibtex: "@inproceedings{NIME22_18,\n abstract = {Ghost Play is a violin-playing\ + \ robot that aims to realize bowing and fingering similar to human players. Existing\ + \ violin-playing machines have faced various problems concerning performance techniques\ + \ owing to constraints imposed by their design. Bowing and fingering that require\ + \ accurate and high-acceleration movement (e.g., a spiccato, tremolo, and glissando)\ + \ are essential but challenging. To overcome this problem, Ghost Play is equipped\ + \ with seven electromagnetic linear actuators, three for controlling the bow (i.e.,\ + \ the right hand), and the other four for controlling the pitch on each string\ + \ (i.e., the left hand). The violin-playing robot is mounted with an unmodified\ + \ violin bow. A sensor is attached to the bow to measure bow pressure. The control\ + \ software receives a time series of performance data and manipulates the actuators\ + \ accordingly. The performance data consists of the bow direction, bow speed,\ + \ bow pressure, pitch, vibrato interval, vibrato width, and string to be drawn.\ + \ We also developed an authoring tool for the performance data using a graphic\ + \ user interface. Finally, we demonstrated Ghost Play performing bowing and fingering\ + \ techniques such as a spiccato, tremolo, and glissando, as well as a piece of\ + \ classical music.},\n address = {The University of Auckland, New Zealand},\n\ + \ articleno = {18},\n author = {Kamatani, Takahiro and Sato, Yoshinao and Fujino,\ + \ Masato},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.21428/92fbeb44.754a50b5},\n issn = {2220-4806},\n\ + \ month = {jun},\n pdf = {136.pdf},\n presentation-video = {https://youtu.be/FOivgYXk1_g},\n\ + \ title = {Ghost Play - A Violin-Playing Robot using Electromagnetic Linear Actuators},\n\ + \ url = {https://doi.org/10.21428%2F92fbeb44.754a50b5},\n year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176772 + doi: 10.21428/92fbeb44.754a50b5 issn: 2220-4806 - keywords: 'Audiovisual performance, hand silhouettes, computer vision, contour analysis, - sound-image relationships, augmented reality. ' - pages: 115--120 - title: 'Sounds from Shapes: Audiovisual Performance with Hand Silhouette Contours - in The Manual Input Sessions' - url: http://www.nime.org/proceedings/2005/nime2005_115.pdf - year: 2005 + month: jun + pdf: 136.pdf + presentation-video: https://youtu.be/FOivgYXk1_g + title: Ghost Play - A Violin-Playing Robot using Electromagnetic Linear Actuators + url: https://doi.org/10.21428%2F92fbeb44.754a50b5 + year: 2022 - ENTRYTYPE: inproceedings - ID: Yonezawa2005 - abstract: 'The HandySinger system is a personified tool developed to naturally express - a singing voice controlled by the gestures of a hand puppet. Assuming that a singing - voice is a kind of musical expression, natural expressions of the singing voice - are important for personification. We adopt a singing voice morphing algorithm - that effectively smoothes out the strength of expressions delivered with a singing - voice. The system’s hand puppet consists of a glove with seven bend sensors and - two pressure sensors. It sensitively captures the user’s motion as a personified - puppet’s gesture. To synthesize the different expressional strengths of a singing - voice, the “normal” (without expression) voice of a particular singer is used - as the base of morphing, and three different expressions, “dark,” “whisper” and - “wet,” are used as the target. This configuration provides musically expressed - controls that are intuitive to users. In the experiment, we evaluate whether 1) - the morphing algorithm interpolates expressional strength in a perceptual sense, - 2) the handpuppet interface provides gesture data at sufficient resolution, and - 3) the gestural mapping of the current system works as planned.' - address: 'Vancouver, BC, Canada' - author: 'Yonezawa, Tomoko and Suzuki, Takahiko and Mase, Kenji and Kogure, Kiyoshi' - bibtex: "@inproceedings{Yonezawa2005,\n abstract = {The HandySinger system is a\ - \ personified tool developed to naturally express a singing voice controlled by\ - \ the gestures of a hand puppet. Assuming that a singing voice is a kind of musical\ - \ expression, natural expressions of the singing voice are important for personification.\ - \ We adopt a singing voice morphing algorithm that effectively smoothes out the\ - \ strength of expressions delivered with a singing voice. The system’s hand puppet\ - \ consists of a glove with seven bend sensors and two pressure sensors. It sensitively\ - \ captures the user’s motion as a personified puppet’s gesture. To synthesize\ - \ the different expressional strengths of a singing voice, the “normal” (without\ - \ expression) voice of a particular singer is used as the base of morphing, and\ - \ three different expressions, “dark,” “whisper” and “wet,” are used as the target.\ - \ This configuration provides musically expressed controls that are intuitive\ - \ to users. In the experiment, we evaluate whether 1) the morphing algorithm interpolates\ - \ expressional strength in a perceptual sense, 2) the handpuppet interface provides\ - \ gesture data at sufficient resolution, and 3) the gestural mapping of the current\ - \ system works as planned.},\n address = {Vancouver, BC, Canada},\n author = {Yonezawa,\ - \ Tomoko and Suzuki, Takahiko and Mase, Kenji and Kogure, Kiyoshi},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176844},\n issn = {2220-4806},\n keywords\ - \ = {Personified Expression, Singing Voice Morphing, Voice Ex- pressivity, Hand-puppet\ - \ Interface },\n pages = {121--126},\n title = {HandySinger : Expressive Singing\ - \ Voice Morphing using Personified Hand-puppet Interface},\n url = {http://www.nime.org/proceedings/2005/nime2005_121.pdf},\n\ - \ year = {2005}\n}\n" + ID: NIME22_19 + abstract: 'Feedback is a technique that has been used in musical performance since + the advent of electricity. From the early cybernetic explorations of Bebe and + Louis Barron, through the screaming sound of Hendrix’s guitar, to the systems + design of David Tudor or Nic Collins, we find the origins of feedback in music + being technologically and aesthetically diverse. Through interviews with participants + in a recent Feedback Musicianship Network symposium, this paper seeks to investigate + the contemporary use of this technique and explore how key protagonists discuss + the nature of their practice. We see common concepts emerging in these conversations: + agency, complexity, coupling, play, design and posthumanism. The paper presents + a terminological and ideological framework as manifested at this point in time, + and makes a theoretical contribution to the understanding of the rationale and + potential of this technological and compositional approach.' + address: 'The University of Auckland, New Zealand' + articleno: 19 + author: 'Magnusson, Thor and Kiefer, Chris and Ulfarsson, Halldor' + bibtex: "@inproceedings{NIME22_19,\n abstract = {Feedback is a technique that has\ + \ been used in musical performance since the advent of electricity. From the early\ + \ cybernetic explorations of Bebe and Louis Barron, through the screaming sound\ + \ of Hendrix’s guitar, to the systems design of David Tudor or Nic Collins, we\ + \ find the origins of feedback in music being technologically and aesthetically\ + \ diverse. Through interviews with participants in a recent Feedback Musicianship\ + \ Network symposium, this paper seeks to investigate the contemporary use of this\ + \ technique and explore how key protagonists discuss the nature of their practice.\ + \ We see common concepts emerging in these conversations: agency, complexity,\ + \ coupling, play, design and posthumanism. The paper presents a terminological\ + \ and ideological framework as manifested at this point in time, and makes a theoretical\ + \ contribution to the understanding of the rationale and potential of this technological\ + \ and compositional approach.},\n address = {The University of Auckland, New Zealand},\n\ + \ articleno = {19},\n author = {Magnusson, Thor and Kiefer, Chris and Ulfarsson,\ + \ Halldor},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.aa7de712},\n\ + \ issn = {2220-4806},\n month = {jun},\n pdf = {151.pdf},\n presentation-video\ + \ = {https://www.youtube.com/watch?v=ouwIA_aVmEM},\n title = {Reflexions upon\ + \ Feedback},\n url = {https://doi.org/10.21428%2F92fbeb44.aa7de712},\n year =\ + \ {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176844 + doi: 10.21428/92fbeb44.aa7de712 issn: 2220-4806 - keywords: 'Personified Expression, Singing Voice Morphing, Voice Ex- pressivity, - Hand-puppet Interface ' - pages: 121--126 - title: 'HandySinger : Expressive Singing Voice Morphing using Personified Hand-puppet - Interface' - url: http://www.nime.org/proceedings/2005/nime2005_121.pdf - year: 2005 + month: jun + pdf: 151.pdf + presentation-video: https://www.youtube.com/watch?v=ouwIA_aVmEM + title: Reflexions upon Feedback + url: https://doi.org/10.21428%2F92fbeb44.aa7de712 + year: 2022 - ENTRYTYPE: inproceedings - ID: Funk2005 - abstract: 'The central role of the face in social interaction and non-verbal communication - suggest we explore facial action as a means of musical expression. This paper - presents the design, implementation, and preliminary studies of a novel system - utilizing face detection and optic flow algorithms to associate facial movements - with sound synthesis in a topographically specific fashion. We report on our experience - with various gesture-to-sound mappings and applications, and describe our preliminary - experiments at musical performance using the system. ' - address: 'Vancouver, BC, Canada' - author: 'Funk, Mathias and Kuwabara, Kazuhiro and Lyons, Michael J.' - bibtex: "@inproceedings{Funk2005,\n abstract = {The central role of the face in\ - \ social interaction and non-verbal communication suggest we explore facial action\ - \ as a means of musical expression. This paper presents the design, implementation,\ - \ and preliminary studies of a novel system utilizing face detection and optic\ - \ flow algorithms to associate facial movements with sound synthesis in a topographically\ - \ specific fashion. We report on our experience with various gesture-to-sound\ - \ mappings and applications, and describe our preliminary experiments at musical\ - \ performance using the system. },\n address = {Vancouver, BC, Canada},\n author\ - \ = {Funk, Mathias and Kuwabara, Kazuhiro and Lyons, Michael J.},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176750},\n issn = {2220-4806},\n keywords\ - \ = {Video-based musical interface; gesture-based interaction; facial expression;\ - \ facial therapy interface. },\n pages = {127--131},\n title = {Sonification of\ - \ Facial Actions for Musical Expression},\n url = {http://www.nime.org/proceedings/2005/nime2005_127.pdf},\n\ - \ year = {2005}\n}\n" + ID: NIME22_20 + abstract: 'Beginning, amateur, and professional violinists alike make use of a shoulder + rest with a typical form factor for ergonomic support. Numerous commercial devices + are available. We saturate these inert devices with electronics and actuators + to open a new design space for “active shoulder rests” (ASRs), a pathway for violinists + to adopt inexpensive and transparent electroacoustic interfaces. We present a + dual-mode ASR that features a built-in microphone pickup and parametric control + of mixing between sound diffusion and actuation modes for experiments with active + acoustics and feedback. We document a modular approach to signal processing allowing + quick adaptation and differentiation of control signals, and demonstrate rich + sound processing techniques that create lively improvisation environments. By + fostering participation and convergence among digital media practices and diverse + musical cultures, we envision ASRs broadly rekindling creative practice for the + violin, long a tool of improvisation before the triumph of classical works. ASRs + decolonize the violin by activating new flows and connectivities, freeing up habitual + relations, and refreshing the musical affordances of this otherwise quintessentially + western and canonical instrument.' + address: 'The University of Auckland, New Zealand' + articleno: 20 + author: 'Thorn, Seth and Lahey, Byron' + bibtex: "@inproceedings{NIME22_20,\n abstract = {Beginning, amateur, and professional\ + \ violinists alike make use of a shoulder rest with a typical form factor for\ + \ ergonomic support. Numerous commercial devices are available. We saturate these\ + \ inert devices with electronics and actuators to open a new design space for\ + \ “active shoulder rests” (ASRs), a pathway for violinists to adopt inexpensive\ + \ and transparent electroacoustic interfaces. We present a dual-mode ASR that\ + \ features a built-in microphone pickup and parametric control of mixing between\ + \ sound diffusion and actuation modes for experiments with active acoustics and\ + \ feedback. We document a modular approach to signal processing allowing quick\ + \ adaptation and differentiation of control signals, and demonstrate rich sound\ + \ processing techniques that create lively improvisation environments. By fostering\ + \ participation and convergence among digital media practices and diverse musical\ + \ cultures, we envision ASRs broadly rekindling creative practice for the violin,\ + \ long a tool of improvisation before the triumph of classical works. ASRs decolonize\ + \ the violin by activating new flows and connectivities, freeing up habitual relations,\ + \ and refreshing the musical affordances of this otherwise quintessentially western\ + \ and canonical instrument.},\n address = {The University of Auckland, New Zealand},\n\ + \ articleno = {20},\n author = {Thorn, Seth and Lahey, Byron},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.21428/92fbeb44.91f87875},\n issn = {2220-4806},\n month = {jun},\n\ + \ pdf = {16.pdf},\n presentation-video = {https://youtu.be/7qNTa4QplC4},\n title\ + \ = {Decolonizing the Violin with Active Shoulder Rests ({ASRs})},\n url = {https://doi.org/10.21428%2F92fbeb44.91f87875},\n\ + \ year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176750 + doi: 10.21428/92fbeb44.91f87875 issn: 2220-4806 - keywords: 'Video-based musical interface; gesture-based interaction; facial expression; - facial therapy interface. ' - pages: 127--131 - title: Sonification of Facial Actions for Musical Expression - url: http://www.nime.org/proceedings/2005/nime2005_127.pdf - year: 2005 + month: jun + pdf: 16.pdf + presentation-video: https://youtu.be/7qNTa4QplC4 + title: Decolonizing the Violin with Active Shoulder Rests (ASRs) + url: https://doi.org/10.21428%2F92fbeb44.91f87875 + year: 2022 - ENTRYTYPE: inproceedings - ID: Janer2005 - abstract: 'In this paper we present an example of the use of the singingvoice as - a controller for digital music synthesis. The analysis of the voice with spectral - processing techniques, derivedfrom the Short-Time Fourier Transform, provides - ways ofdetermining a performer''s vocal intentions. We demonstratea prototype, - in which the extracted vocal features drive thesynthesis of a plucked bass guitar. - The sound synthesis stageincludes two different synthesis techniques, Physical - Modelsand Spectral Morph.' - address: 'Vancouver, BC, Canada' - author: 'Janer, Jordi' - bibtex: "@inproceedings{Janer2005,\n abstract = {In this paper we present an example\ - \ of the use of the singingvoice as a controller for digital music synthesis.\ - \ The analysis of the voice with spectral processing techniques, derivedfrom the\ - \ Short-Time Fourier Transform, provides ways ofdetermining a performer's vocal\ - \ intentions. We demonstratea prototype, in which the extracted vocal features\ - \ drive thesynthesis of a plucked bass guitar. The sound synthesis stageincludes\ - \ two different synthesis techniques, Physical Modelsand Spectral Morph.},\n address\ - \ = {Vancouver, BC, Canada},\n author = {Janer, Jordi},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176758},\n issn = {2220-4806},\n keywords = {Singing\ - \ voice, musical controller, sound synthesis, spectral processing. },\n pages\ - \ = {132--135},\n title = {Voice-controlled plucked bass guitar through two synthesis\ - \ techniques},\n url = {http://www.nime.org/proceedings/2005/nime2005_132.pdf},\n\ - \ year = {2005}\n}\n" + ID: NIME22_21 + abstract: 'Augmented reality (AR) is increasingly being envisaged as a process of + perceptual mediation or modulation, not only as a system that combines aligned + and interactive virtual objects with a real environment. Within artistic practice, + this reconceptualisation has led to a medium that emphasises this multisensory + integration of virtual processes, leading to expressive, narrative-driven, and + thought-provoking AR experiences. This paper outlines the development and evaluation + of the polaris~ experience. polaris~ is built using a set of open-source hardware + and software components that can be used to create privacy-respecting and cost-effective + audiovisual AR experiences. Its wearable component is comprised of the open-source + Project North Star AR headset and a pair of bone conduction headphones, providing + simultaneous real and virtual visual and auditory elements. These elements are + spatially aligned using Unity and PureData to the real space that they appear + in and can be gesturally interacted with in a way that fosters artistic and musical + expression. In order to evaluate the polaris~, 10 participants were recruited, + who spent approximately 30 minutes each in the AR scene and were interviewed about + their experience. Using grounded theory, the author extracted coded remarks from + the transcriptions of these studies, that were then sorted into the categories + of Sentiment, Learning, Adoption, Expression, and Immersion. In evaluating polaris~ + it was found that the experience engaged participants fruitfully, with many noting + their ability to express themselves audiovisually in creative ways. The experience + and the framework the author used to create it is available in a Github respository.' + address: 'The University of Auckland, New Zealand' + articleno: 21 + author: 'Bilbow, Sam' + bibtex: "@inproceedings{NIME22_21,\n abstract = {Augmented reality (AR) is increasingly\ + \ being envisaged as a process of perceptual mediation or modulation, not only\ + \ as a system that combines aligned and interactive virtual objects with a real\ + \ environment. Within artistic practice, this reconceptualisation has led to a\ + \ medium that emphasises this multisensory integration of virtual processes, leading\ + \ to expressive, narrative-driven, and thought-provoking AR experiences. This\ + \ paper outlines the development and evaluation of the polaris~ experience. polaris~\ + \ is built using a set of open-source hardware and software components that can\ + \ be used to create privacy-respecting and cost-effective audiovisual AR experiences.\ + \ Its wearable component is comprised of the open-source Project North Star AR\ + \ headset and a pair of bone conduction headphones, providing simultaneous real\ + \ and virtual visual and auditory elements. These elements are spatially aligned\ + \ using Unity and PureData to the real space that they appear in and can be gesturally\ + \ interacted with in a way that fosters artistic and musical expression. In order\ + \ to evaluate the polaris~, 10 participants were recruited, who spent approximately\ + \ 30 minutes each in the AR scene and were interviewed about their experience.\ + \ Using grounded theory, the author extracted coded remarks from the transcriptions\ + \ of these studies, that were then sorted into the categories of Sentiment, Learning,\ + \ Adoption, Expression, and Immersion. In evaluating polaris~ it was found that\ + \ the experience engaged participants fruitfully, with many noting their ability\ + \ to express themselves audiovisually in creative ways. The experience and the\ + \ framework the author used to create it is available in a Github respository.},\n\ + \ address = {The University of Auckland, New Zealand},\n articleno = {21},\n author\ + \ = {Bilbow, Sam},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.8abb9ce6},\n\ + \ issn = {2220-4806},\n month = {jun},\n pdf = {162.pdf},\n presentation-video\ + \ = {https://www.youtube.com/watch?v=eCdQku5hFOE},\n title = {Evaluating polaris{\\\ + textasciitilde} - An Audiovisual Augmented Reality Experience Built on Open-Source\ + \ Hardware and Software},\n url = {https://doi.org/10.21428%2F92fbeb44.8abb9ce6},\n\ + \ year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176758 + doi: 10.21428/92fbeb44.8abb9ce6 issn: 2220-4806 - keywords: 'Singing voice, musical controller, sound synthesis, spectral processing. ' - pages: 132--135 - title: Voice-controlled plucked bass guitar through two synthesis techniques - url: http://www.nime.org/proceedings/2005/nime2005_132.pdf - year: 2005 + month: jun + pdf: 162.pdf + presentation-video: https://www.youtube.com/watch?v=eCdQku5hFOE + title: Evaluating polaris~ - An Audiovisual Augmented Reality Experience Built on + Open-Source Hardware and Software + url: https://doi.org/10.21428%2F92fbeb44.8abb9ce6 + year: 2022 - ENTRYTYPE: inproceedings - ID: Lehrman2005 - abstract: 'Electronic Musical Instrument Design is an excellent vehiclefor bringing - students from multiple disciplines together towork on projects, and help bridge - the perennial gap betweenthe arts and the sciences. This paper describes how at - TuftsUniversity, a school with no music technology program,students from the engineering - (electrical, mechanical, andcomputer), music, performing arts, and visual arts - areas usetheir complementary skills, and teach each other, to developnew devices - and systems for music performance and control.' - address: 'Vancouver, BC, Canada' - author: 'Lehrman, Paul D. and Ryan, Todd M.' - bibtex: "@inproceedings{Lehrman2005,\n abstract = {Electronic Musical Instrument\ - \ Design is an excellent vehiclefor bringing students from multiple disciplines\ - \ together towork on projects, and help bridge the perennial gap betweenthe arts\ - \ and the sciences. This paper describes how at TuftsUniversity, a school with\ - \ no music technology program,students from the engineering (electrical, mechanical,\ - \ andcomputer), music, performing arts, and visual arts areas usetheir complementary\ - \ skills, and teach each other, to developnew devices and systems for music performance\ - \ and control.},\n address = {Vancouver, BC, Canada},\n author = {Lehrman, Paul\ - \ D. and Ryan, Todd M.},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176768},\n\ - \ issn = {2220-4806},\n keywords = {Science education, music education, engineering,\ - \ electronic music, gesture controllers, MIDI. },\n pages = {136--139},\n title\ - \ = {Bridging the Gap Between Art and Science Education Through Teaching Electronic\ - \ Musical Instrument Design},\n url = {http://www.nime.org/proceedings/2005/nime2005_136.pdf},\n\ - \ year = {2005}\n}\n" + ID: NIME22_22 + abstract: 'This paper presents the MappEMG pipeline. The goal of this pipeline is + to augment the traditional classical concert experience by giving listeners access, + through the sense of touch, to an intimate and non-visible dimension of the musicians’ + bodily experience while performing. The live-stream pipeline produces vibrations + based on muscle activity captured through surface electromyography (EMG). Therefore, + MappEMG allows the audience to experience the performer’s muscle effort, an essential + component of music performance which is typically unavailable to direct visual + observation. The paper is divided in four sections. First, we overview related + works on EMG, music performance, and vibrotactile feedback. We then present conceptual + and methodological issues of capturing musicians’ muscle effort related to their + expressive intentions. We further explain the different components of the live-stream + data pipeline: a python software named Biosiglive for data acquisition and processing, + a Max/MSP patch for data post-processing and mapping, and a mobile application + named hAPPtiks for real-time control of smartphones’ vibration. Finally, we address + the application of the pipeline in an actual music performance. Thanks to their + modular structure, the tools presented could be used in different creative and + biomedical contexts involving gestural control of haptic stimuli.' + address: 'The University of Auckland, New Zealand' + articleno: 22 + author: 'Verdugo, Felipe and Ceglia, Amedeo and Frisson, Christian and Burton, Alexandre + and Begon, Mickael and Gibet, Sylvie and Wanderley, Marcelo M.' + bibtex: "@inproceedings{NIME22_22,\n abstract = {This paper presents the MappEMG\ + \ pipeline. The goal of this pipeline is to augment the traditional classical\ + \ concert experience by giving listeners access, through the sense of touch, to\ + \ an intimate and non-visible dimension of the musicians’ bodily experience while\ + \ performing. The live-stream pipeline produces vibrations based on muscle activity\ + \ captured through surface electromyography (EMG). Therefore, MappEMG allows the\ + \ audience to experience the performer’s muscle effort, an essential component\ + \ of music performance which is typically unavailable to direct visual observation.\ + \ The paper is divided in four sections. First, we overview related works on EMG,\ + \ music performance, and vibrotactile feedback. We then present conceptual and\ + \ methodological issues of capturing musicians’ muscle effort related to their\ + \ expressive intentions. We further explain the different components of the live-stream\ + \ data pipeline: a python software named Biosiglive for data acquisition and processing,\ + \ a Max/MSP patch for data post-processing and mapping, and a mobile application\ + \ named hAPPtiks for real-time control of smartphones’ vibration. Finally, we\ + \ address the application of the pipeline in an actual music performance. Thanks\ + \ to their modular structure, the tools presented could be used in different creative\ + \ and biomedical contexts involving gestural control of haptic stimuli.},\n address\ + \ = {The University of Auckland, New Zealand},\n articleno = {22},\n author =\ + \ {Verdugo, Felipe and Ceglia, Amedeo and Frisson, Christian and Burton, Alexandre\ + \ and Begon, Mickael and Gibet, Sylvie and Wanderley, Marcelo M.},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.21428/92fbeb44.3ce22588},\n issn = {2220-4806},\n month\ + \ = {jun},\n pdf = {165.pdf},\n presentation-video = {https://youtu.be/gKM0lGs9rxw},\n\ + \ title = {Feeling the Effort of Classical Musicians - A Pipeline from Electromyography\ + \ to Smartphone Vibration for Live Music Performance},\n url = {https://doi.org/10.21428%2F92fbeb44.3ce22588},\n\ + \ year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176768 + doi: 10.21428/92fbeb44.3ce22588 issn: 2220-4806 - keywords: 'Science education, music education, engineering, electronic music, gesture - controllers, MIDI. ' - pages: 136--139 - title: Bridging the Gap Between Art and Science Education Through Teaching Electronic - Musical Instrument Design - url: http://www.nime.org/proceedings/2005/nime2005_136.pdf - year: 2005 + month: jun + pdf: 165.pdf + presentation-video: https://youtu.be/gKM0lGs9rxw + title: Feeling the Effort of Classical Musicians - A Pipeline from Electromyography + to Smartphone Vibration for Live Music Performance + url: https://doi.org/10.21428%2F92fbeb44.3ce22588 + year: 2022 - ENTRYTYPE: inproceedings - ID: Steiner2005 - abstract: 'The [hid] toolkit is a set of software objects for designingcomputer-based - gestural instruments. All too frequently,computer-based performers are tied to - the keyboard-mousemonitor model, narrowly constraining the range of possiblegestures. - A multitude of gestural input devices are readilyavailable, making it easy to - utilize a broader range of gestures. Human Interface Devices (HIDs) such as joysticks,tablets, - and gamepads are cheap and can be good musicalcontrollers. Some even provide haptic - feedback. The [hid]toolkit provides a unified, consistent framework for gettinggestural - data from these devices, controlling the feedback,and mapping this data to the - desired output. The [hid]toolkit is built in Pd, which provides an ideal platform - forthis work, combining the ability to synthesize and controlaudio and video. - The addition of easy access to gesturaldata allows for rapid prototypes. A usable - environmentalso makes computer music instrument design accessible tonovices.' - address: 'Vancouver, BC, Canada' - author: 'Steiner, Hans-christoph' - bibtex: "@inproceedings{Steiner2005,\n abstract = {The [hid] toolkit is a set of\ - \ software objects for designingcomputer-based gestural instruments. All too frequently,computer-based\ - \ performers are tied to the keyboard-mousemonitor model, narrowly constraining\ - \ the range of possiblegestures. A multitude of gestural input devices are readilyavailable,\ - \ making it easy to utilize a broader range of gestures. Human Interface Devices\ - \ (HIDs) such as joysticks,tablets, and gamepads are cheap and can be good musicalcontrollers.\ - \ Some even provide haptic feedback. The [hid]toolkit provides a unified, consistent\ - \ framework for gettinggestural data from these devices, controlling the feedback,and\ - \ mapping this data to the desired output. The [hid]toolkit is built in Pd, which\ - \ provides an ideal platform forthis work, combining the ability to synthesize\ - \ and controlaudio and video. The addition of easy access to gesturaldata allows\ - \ for rapid prototypes. A usable environmentalso makes computer music instrument\ - \ design accessible tonovices.},\n address = {Vancouver, BC, Canada},\n author\ - \ = {Steiner, Hans-christoph},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176824},\n\ - \ issn = {2220-4806},\n keywords = {Instrument design, haptic feedback, gestural\ - \ control, HID },\n pages = {140--143},\n title = {[hid] toolkit: a Unified Framework\ - \ for Instrument Design},\n url = {http://www.nime.org/proceedings/2005/nime2005_140.pdf},\n\ - \ year = {2005}\n}\n" + ID: NIME22_23 + abstract: 'ForceHost is an opensource toolchain for generating firmware that hosts + authoring and rendering of forcefeedback and audio signals and that communicates + through I2C with guest motor and sensor boards. With ForceHost, the stability + of audio and haptic loops is no longer delegated to and dependent on operating + systems and drivers, and devices remain discoverable beyond planned obsolescence. + We modified Faust, a highlevel language and compiler for real-time audio digital + signal processing, to support haptics. Our toolchain compiles audio-haptic firmware + applications with Faust and embeds web-based UIs exposing their parameters. We + validate our toolchain by example applications and modifications of integrated + development environments: script-based programming examples of haptic firmware + applications with our haptic1D Faust library, visual programming by mapping input + and output signals between audio and haptic devices in Webmapper, visual programming + with physically-inspired mass-interaction models in Synth-a-Modeler Designer. + We distribute the documentation and source code of ForceHost and all of its components + and forks.' + address: 'The University of Auckland, New Zealand' + articleno: 23 + author: 'Frisson, Christian and Kirkegaard, Mathias and Pietrzak, Thomas and Wanderley, + Marcelo M.' + bibtex: "@inproceedings{NIME22_23,\n abstract = {ForceHost is an opensource toolchain\ + \ for generating firmware that hosts authoring and rendering of forcefeedback\ + \ and audio signals and that communicates through I2C with guest motor and sensor\ + \ boards. With ForceHost, the stability of audio and haptic loops is no longer\ + \ delegated to and dependent on operating systems and drivers, and devices remain\ + \ discoverable beyond planned obsolescence. We modified Faust, a highlevel language\ + \ and compiler for real-time audio digital signal processing, to support haptics.\ + \ Our toolchain compiles audio-haptic firmware applications with Faust and embeds\ + \ web-based UIs exposing their parameters. We validate our toolchain by example\ + \ applications and modifications of integrated development environments: script-based\ + \ programming examples of haptic firmware applications with our haptic1D Faust\ + \ library, visual programming by mapping input and output signals between audio\ + \ and haptic devices in Webmapper, visual programming with physically-inspired\ + \ mass-interaction models in Synth-a-Modeler Designer. We distribute the documentation\ + \ and source code of ForceHost and all of its components and forks.},\n address\ + \ = {The University of Auckland, New Zealand},\n articleno = {23},\n author =\ + \ {Frisson, Christian and Kirkegaard, Mathias and Pietrzak, Thomas and Wanderley,\ + \ Marcelo M.},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.76cfc96e},\n\ + \ issn = {2220-4806},\n month = {jun},\n pdf = {172.pdf},\n presentation-video\ + \ = {https://youtu.be/smFpkdw-J2w},\n title = {{ForceHost}: an open-source toolchain\ + \ for generating firmware embedding the authoring and rendering of audio and force-feedback\ + \ haptics},\n url = {https://doi.org/10.21428%2F92fbeb44.76cfc96e},\n year = {2022}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176824 + doi: 10.21428/92fbeb44.76cfc96e issn: 2220-4806 - keywords: 'Instrument design, haptic feedback, gestural control, HID ' - pages: 140--143 - title: "[hid] toolkit: a Unified Framework for Instrument Design" - url: http://www.nime.org/proceedings/2005/nime2005_140.pdf - year: 2005 + month: jun + pdf: 172.pdf + presentation-video: https://youtu.be/smFpkdw-J2w + title: 'ForceHost: an open-source toolchain for generating firmware embedding the + authoring and rendering of audio and force-feedback haptics' + url: https://doi.org/10.21428%2F92fbeb44.76cfc96e + year: 2022 - ENTRYTYPE: inproceedings - ID: Makipatola2005b - abstract: 'An experimental study comparing different user interfaces for a virtual - drum is reported. Virtual here means that the drum is not a physical object. 16 - subjects played the drum on five different interfaces and two metronome patterns - trying to match their hits to the metronome clicks. Temporal accuracy of the playing - was evaluated. The subjects also rated the interfaces subjectively. The results - show that hitting the drum alternately from both sides with motion going through - the drum plate was less accurate than the traditional one sided hitting. A physical - stick was more accurate than a virtual computer graphic stick. Visual feedback - of the drum slightly increased accuracy compared to receiving only auditory feedback. - Most subjects evaluated the physical stick to offer a better feeling and to be - more pleasant than the virtual stick. ' - address: 'Vancouver, BC, Canada' - author: 'Maki-patola, Teemu' - bibtex: "@inproceedings{Makipatola2005b,\n abstract = {An experimental study comparing\ - \ different user interfaces for a virtual drum is reported. Virtual here means\ - \ that the drum is not a physical object. 16 subjects played the drum on five\ - \ different interfaces and two metronome patterns trying to match their hits to\ - \ the metronome clicks. Temporal accuracy of the playing was evaluated. The subjects\ - \ also rated the interfaces subjectively. The results show that hitting the drum\ - \ alternately from both sides with motion going through the drum plate was less\ - \ accurate than the traditional one sided hitting. A physical stick was more accurate\ - \ than a virtual computer graphic stick. Visual feedback of the drum slightly\ - \ increased accuracy compared to receiving only auditory feedback. Most subjects\ - \ evaluated the physical stick to offer a better feeling and to be more pleasant\ - \ than the virtual stick. },\n address = {Vancouver, BC, Canada},\n author = {Maki-patola,\ - \ Teemu},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1176784},\n issn = {2220-4806},\n\ - \ keywords = {Virtual drum, user interface, feedback, musical instrument design,\ - \ virtual reality, sound control, percussion instrument. },\n pages = {144--147},\n\ - \ title = {User Interface Comparison for Virtual Drums},\n url = {http://www.nime.org/proceedings/2005/nime2005_144.pdf},\n\ - \ year = {2005}\n}\n" + ID: NIME22_24 + abstract: 'Physical metaphor provides a visceral and universal logical framework + for composing musical gestures. Physical simulations can aid composers in creating + musical gestures based in complex physical metaphors. CHON (Coupled Harmonic Oscillator + Network) is a new crossplatform application for composing musical gestures based + in Newtonian physics. It simulates a network of particles connected by springs + and sonifies the motion of individual particles. CHON is an interactive instrument + that can provide complex yet tangible and physically grounded control data for + synthesis, sound processing, and musical score generation. Composers often deploy + dozens of independent LFOs to control various parameters in a DAW or synthesizer. + By coupling numerous control signals together using physical principles, CHON + represents an innovation on the traditional LFO model of musical control. Unlike + independent LFOs, CHON’s signals push and pull on each other, creating a tangible + causality in the resulting gestures. In this paper, I briefly describe the design + of CHON and discuss its use in composition through examples in my own works.' + address: 'The University of Auckland, New Zealand' + articleno: 24 + author: 'DuPlessis, Rodney' + bibtex: "@inproceedings{NIME22_24,\n abstract = {Physical metaphor provides a visceral\ + \ and universal logical framework for composing musical gestures. Physical simulations\ + \ can aid composers in creating musical gestures based in complex physical metaphors.\ + \ CHON (Coupled Harmonic Oscillator Network) is a new crossplatform application\ + \ for composing musical gestures based in Newtonian physics. It simulates a network\ + \ of particles connected by springs and sonifies the motion of individual particles.\ + \ CHON is an interactive instrument that can provide complex yet tangible and\ + \ physically grounded control data for synthesis, sound processing, and musical\ + \ score generation. Composers often deploy dozens of independent LFOs to control\ + \ various parameters in a DAW or synthesizer. By coupling numerous control signals\ + \ together using physical principles, CHON represents an innovation on the traditional\ + \ LFO model of musical control. Unlike independent LFOs, CHON’s signals push and\ + \ pull on each other, creating a tangible causality in the resulting gestures.\ + \ In this paper, I briefly describe the design of CHON and discuss its use in\ + \ composition through examples in my own works.},\n address = {The University\ + \ of Auckland, New Zealand},\n articleno = {24},\n author = {DuPlessis, Rodney},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.21428/92fbeb44.18aeca0e},\n issn = {2220-4806},\n\ + \ month = {jun},\n pdf = {173.pdf},\n presentation-video = {https://youtu.be/yXr1m6dW5jo},\n\ + \ title = {A virtual instrument for physics-based musical gesture: {CHON}},\n\ + \ url = {https://doi.org/10.21428%2F92fbeb44.18aeca0e},\n year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176784 + doi: 10.21428/92fbeb44.18aeca0e issn: 2220-4806 - keywords: 'Virtual drum, user interface, feedback, musical instrument design, virtual - reality, sound control, percussion instrument. ' - pages: 144--147 - title: User Interface Comparison for Virtual Drums - url: http://www.nime.org/proceedings/2005/nime2005_144.pdf - year: 2005 + month: jun + pdf: 173.pdf + presentation-video: https://youtu.be/yXr1m6dW5jo + title: 'A virtual instrument for physics-based musical gesture: CHON' + url: https://doi.org/10.21428%2F92fbeb44.18aeca0e + year: 2022 - ENTRYTYPE: inproceedings - ID: Gutknecht2005 - abstract: 'This paper takes the reader through various elements of the GoingPublik - sound artwork for distributive ensemble and introduces the Realtime Score Synthesis - tool (RSS) used as a controller in the work. The collaboration between artists - and scientists, details concerning the experimental hardware and software, and - new theories of sound art are briefly explained and illustrated. The scope of - this project is too broad to be fully covered in this paper, therefore the selection - of topics made attempts to draw attention to the work itself and balance theory - with practice. ' - address: 'Vancouver, BC, Canada' - author: 'Gutknecht, Jürg and Clay, Art and Frey, Thomas' - bibtex: "@inproceedings{Gutknecht2005,\n abstract = {This paper takes the reader\ - \ through various elements of the GoingPublik sound artwork for distributive ensemble\ - \ and introduces the Realtime Score Synthesis tool (RSS) used as a controller\ - \ in the work. The collaboration between artists and scientists, details concerning\ - \ the experimental hardware and software, and new theories of sound art are briefly\ - \ explained and illustrated. The scope of this project is too broad to be fully\ - \ covered in this paper, therefore the selection of topics made attempts to draw\ - \ attention to the work itself and balance theory with practice. },\n address\ - \ = {Vancouver, BC, Canada},\n author = {Gutknecht, J{\\''u}rg and Clay, Art and\ - \ Frey, Thomas},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176754},\n\ - \ issn = {2220-4806},\n keywords = {Mobile Multimedia, Wearable Computers, Score\ - \ Synthesis, Sound Art, System Research, HCIs },\n pages = {148--151},\n title\ - \ = {GoingPublik: Using Realtime Global Score Synthesis},\n url = {http://www.nime.org/proceedings/2005/nime2005_148.pdf},\n\ - \ year = {2005}\n}\n" + ID: NIME22_25 + abstract: 'This paper describes the development of CAVI, a coadaptive audiovisual + instrument for collaborative humanmachine improvisation. We created this agent-based + live processing system to explore how a machine can interact musically based on + a human performer’s bodily actions. CAVI utilized a generative deep learning model + that monitored muscle and motion data streamed from a Myo armband worn on the + performer’s forearm. The generated control signals automated layered time-based + effects modules and animated a virtual body representing the artificial agent. + In the final performance, two expert musicians (a guitarist and a drummer) performed + with CAVI. We discuss the outcome of our artistic exploration, present the scientific + methods it was based on, and reflect on developing an interactive system that + is as much an audiovisual composition as an interactive musical instrument.' + address: 'The University of Auckland, New Zealand' + articleno: 25 + author: 'Erdem, Cagri and Wallace, Benedikte and Refsum Jensenius, Alexander' + bibtex: "@inproceedings{NIME22_25,\n abstract = {This paper describes the development\ + \ of CAVI, a coadaptive audiovisual instrument for collaborative humanmachine\ + \ improvisation. We created this agent-based live processing system to explore\ + \ how a machine can interact musically based on a human performer’s bodily actions.\ + \ CAVI utilized a generative deep learning model that monitored muscle and motion\ + \ data streamed from a Myo armband worn on the performer’s forearm. The generated\ + \ control signals automated layered time-based effects modules and animated a\ + \ virtual body representing the artificial agent. In the final performance, two\ + \ expert musicians (a guitarist and a drummer) performed with CAVI. We discuss\ + \ the outcome of our artistic exploration, present the scientific methods it was\ + \ based on, and reflect on developing an interactive system that is as much an\ + \ audiovisual composition as an interactive musical instrument.},\n address =\ + \ {The University of Auckland, New Zealand},\n articleno = {25},\n author = {Erdem,\ + \ Cagri and Wallace, Benedikte and Refsum Jensenius, Alexander},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.21428/92fbeb44.803c24dd},\n issn = {2220-4806},\n month = {jun},\n\ + \ pdf = {176.pdf},\n presentation-video = {https://youtu.be/WO766vmghcQ},\n title\ + \ = {{CAVI}: A Coadaptive Audiovisual Instrument{\\textendash}Composition},\n\ + \ url = {https://doi.org/10.21428%2F92fbeb44.803c24dd},\n year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176754 + doi: 10.21428/92fbeb44.803c24dd issn: 2220-4806 - keywords: 'Mobile Multimedia, Wearable Computers, Score Synthesis, Sound Art, System - Research, HCIs ' - pages: 148--151 - title: 'GoingPublik: Using Realtime Global Score Synthesis' - url: http://www.nime.org/proceedings/2005/nime2005_148.pdf - year: 2005 + month: jun + pdf: 176.pdf + presentation-video: https://youtu.be/WO766vmghcQ + title: 'CAVI: A Coadaptive Audiovisual Instrument–Composition' + url: https://doi.org/10.21428%2F92fbeb44.803c24dd + year: 2022 - ENTRYTYPE: inproceedings - ID: Pellarin2005 - abstract: 'In this paper we describe a virtual instrument or a performance space, - placed at Høje Tåstrup train station in Denmark, which is meant to establish communicative - connections between strangers, by letting users of the system create soundscapes - together across the rails. We discuss mapping strategies and complexity and suggest - a possible solution for a final instance of our interactive musical performance - system.' - address: 'Vancouver, BC, Canada' - author: 'Pellarin, Lars and Böttcher, Niels and Olsen, Jakob M. and Gregersen, Ole - and Serafin, Stefania and Guglielmi, Michel' - bibtex: "@inproceedings{Pellarin2005,\n abstract = {In this paper we describe a\ - \ virtual instrument or a performance space, placed at H{\\o}je T{\\aa}strup train\ - \ station in Denmark, which is meant to establish communicative connections between\ - \ strangers, by letting users of the system create soundscapes together across\ - \ the rails. We discuss mapping strategies and complexity and suggest a possible\ - \ solution for a final instance of our interactive musical performance system.},\n\ - \ address = {Vancouver, BC, Canada},\n author = {Pellarin, Lars and B\\\"{o}ttcher,\ - \ Niels and Olsen, Jakob M. and Gregersen, Ole and Serafin, Stefania and Guglielmi,\ - \ Michel},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1176798},\n issn = {2220-4806},\n\ - \ keywords = {Motion tracking, mapping strategies, public installation, multiple\ - \ participants music interfaces. },\n pages = {152--155},\n title = {Connecting\ - \ Strangers at a Train Station},\n url = {http://www.nime.org/proceedings/2005/nime2005_152.pdf},\n\ - \ year = {2005}\n}\n" + ID: NIME22_26 + abstract: 'The authors introduce and document how to build the t-Tree, a digital + musical instrument (DMI), interactive music system (IMS), hub, and docking station + that embeds several t-Sticks. The t-Tree’s potential for collaborative performance + as well as an installation is discussed. Specific design choices and inspiration + for the t-Tree are explored. Finally, a prototype is developed and showcased that + attempts to meet the authors’ goals of creating a novel musical experience for + musicians and non-musicians alike, expanding on the premise of the original t-Stick, + and mitigating technical obsolescence of DMIs.' + address: 'The University of Auckland, New Zealand' + articleno: 26 + author: 'Kirby, Linnea and Buser, Paul and Wanderley, Marcelo M.' + bibtex: "@inproceedings{NIME22_26,\n abstract = {The authors introduce and document\ + \ how to build the t-Tree, a digital musical instrument (DMI), interactive music\ + \ system (IMS), hub, and docking station that embeds several t-Sticks. The t-Tree’s\ + \ potential for collaborative performance as well as an installation is discussed.\ + \ Specific design choices and inspiration for the t-Tree are explored. Finally,\ + \ a prototype is developed and showcased that attempts to meet the authors’ goals\ + \ of creating a novel musical experience for musicians and non-musicians alike,\ + \ expanding on the premise of the original t-Stick, and mitigating technical obsolescence\ + \ of DMIs.},\n address = {The University of Auckland, New Zealand},\n articleno\ + \ = {26},\n author = {Kirby, Linnea and Buser, Paul and Wanderley, Marcelo M.},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.21428/92fbeb44.2d00f04f},\n issn = {2220-4806},\n\ + \ month = {jun},\n pdf = {179.pdf},\n presentation-video = {https://youtu.be/gS87Tpg3h_I},\n\ + \ title = {Introducing the t-Tree: Using Multiple t-Sticks for Performance and\ + \ Installation},\n url = {https://doi.org/10.21428%2F92fbeb44.2d00f04f},\n year\ + \ = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176798 + doi: 10.21428/92fbeb44.2d00f04f issn: 2220-4806 - keywords: 'Motion tracking, mapping strategies, public installation, multiple participants - music interfaces. ' - pages: 152--155 - title: Connecting Strangers at a Train Station - url: http://www.nime.org/proceedings/2005/nime2005_152.pdf - year: 2005 + month: jun + pdf: 179.pdf + presentation-video: https://youtu.be/gS87Tpg3h_I + title: 'Introducing the t-Tree: Using Multiple t-Sticks for Performance and Installation' + url: https://doi.org/10.21428%2F92fbeb44.2d00f04f + year: 2022 - ENTRYTYPE: inproceedings - ID: Schiemer2005 - abstract: 'This paper describes software tools used to create java applications - for performing music using mobile phones. The tools provide a means for composers - working in the Pure Data composition environment to design and audition performances - using ensembles of mobile phones. These tools were developed as part of a larger - project motivated by the desire to allow large groups of non-expert players to - perform music based on just intonation using ubiquitous technology. The paper - discusses the process that replicates a Pure Data patch so that it will operate - within the hardware and software constraints of the Java 2 Micro Edition. It also - describes development of objects that will enable mobile phone performances to - be simulated accurately in PD and to audition microtonal tuning implemented using - MIDI in the j2me environment. These tools eliminate the need for composers to - compose for mobile phones by writing java code. In a single desktop application, - they offer the composer the flexibility to write music for multiple phones. ' - address: 'Vancouver, BC, Canada' - author: 'Schiemer, Greg and Havryliv, Mark' - bibtex: "@inproceedings{Schiemer2005,\n abstract = {This paper describes software\ - \ tools used to create java applications for performing music using mobile phones.\ - \ The tools provide a means for composers working in the Pure Data composition\ - \ environment to design and audition performances using ensembles of mobile phones.\ - \ These tools were developed as part of a larger project motivated by the desire\ - \ to allow large groups of non-expert players to perform music based on just intonation\ - \ using ubiquitous technology. The paper discusses the process that replicates\ - \ a Pure Data patch so that it will operate within the hardware and software constraints\ - \ of the Java 2 Micro Edition. It also describes development of objects that will\ - \ enable mobile phone performances to be simulated accurately in PD and to audition\ - \ microtonal tuning implemented using MIDI in the j2me environment. These tools\ - \ eliminate the need for composers to compose for mobile phones by writing java\ - \ code. In a single desktop application, they offer the composer the flexibility\ - \ to write music for multiple phones. },\n address = {Vancouver, BC, Canada},\n\ - \ author = {Schiemer, Greg and Havryliv, Mark},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1176812},\n issn = {2220-4806},\n keywords = {Java 2 Micro\ - \ Edition; j2me; Pure Data; PD; Real-Time Media Performance; Just Intonation.\ - \ },\n pages = {156--159},\n title = {Pocket Gamelan: a Pure Data interface for\ - \ mobile phones},\n url = {http://www.nime.org/proceedings/2005/nime2005_156.pdf},\n\ - \ year = {2005}\n}\n" + ID: NIME22_27 + abstract: 'We present an empirical study of designing a NIME for the head-mounted + augmented reality (HMAR) environment. In the NIME community, various sonic applications + have incorporated augmented reality (AR) for sonic experience and audio production. + With this novel digital form, new opportunities for musical expression and interface + are presented. Yet few works consider whether and how the design of the NIME will + be affected given the technology’s affordance. In this paper, we take an autobiographical + design approach to design a NIME in HMAR, exploring what is a genuine application + of AR in a NIMEs and how AR mediates between the performer and sound as a creative + expression. Three interface prototypes are created for a frequency modulation + synthesis system. We report on their design process and our learning and experiences + through self-usage and improvisation. Our designs explore free-hand and embodied + interaction in our interfaces, and we reflect on how these unique qualities of + HMAR contribute to an expressive medium for sonic creation.' + address: 'The University of Auckland, New Zealand' + articleno: 27 + author: 'Wang, Yichen and Martin, Charles' + bibtex: "@inproceedings{NIME22_27,\n abstract = {We present an empirical study of\ + \ designing a NIME for the head-mounted augmented reality (HMAR) environment.\ + \ In the NIME community, various sonic applications have incorporated augmented\ + \ reality (AR) for sonic experience and audio production. With this novel digital\ + \ form, new opportunities for musical expression and interface are presented.\ + \ Yet few works consider whether and how the design of the NIME will be affected\ + \ given the technology’s affordance. In this paper, we take an autobiographical\ + \ design approach to design a NIME in HMAR, exploring what is a genuine application\ + \ of AR in a NIMEs and how AR mediates between the performer and sound as a creative\ + \ expression. Three interface prototypes are created for a frequency modulation\ + \ synthesis system. We report on their design process and our learning and experiences\ + \ through self-usage and improvisation. Our designs explore free-hand and embodied\ + \ interaction in our interfaces, and we reflect on how these unique qualities\ + \ of HMAR contribute to an expressive medium for sonic creation.},\n address =\ + \ {The University of Auckland, New Zealand},\n articleno = {27},\n author = {Wang,\ + \ Yichen and Martin, Charles},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.b540aa59},\n\ + \ issn = {2220-4806},\n month = {jun},\n pdf = {183.pdf},\n presentation-video\ + \ = {https://youtu.be/iOuZqwIwinU},\n title = {Cubing Sound: Designing a {NIME}\ + \ for Head-mounted Augmented Reality},\n url = {https://doi.org/10.21428%2F92fbeb44.b540aa59},\n\ + \ year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176812 + doi: 10.21428/92fbeb44.b540aa59 issn: 2220-4806 - keywords: 'Java 2 Micro Edition; j2me; Pure Data; PD; Real-Time Media Performance; - Just Intonation. ' - pages: 156--159 - title: 'Pocket Gamelan: a Pure Data interface for mobile phones' - url: http://www.nime.org/proceedings/2005/nime2005_156.pdf - year: 2005 + month: jun + pdf: 183.pdf + presentation-video: https://youtu.be/iOuZqwIwinU + title: 'Cubing Sound: Designing a NIME for Head-mounted Augmented Reality' + url: https://doi.org/10.21428%2F92fbeb44.b540aa59 + year: 2022 - ENTRYTYPE: inproceedings - ID: Birchfield2005 - abstract: 'This paper details the motivations, design, and realization of Sustainable, - a dynamic, robotic sound installation that employs a generative algorithm for - music and sound creation. The piece is comprised of seven autonomous water gong - nodes that are networked together by water tubes to distribute water throughout - the system. A water resource allocation algorithm guides this distribution process - and produces an ever-evolving sonic and visual texture. A simple set of behaviors - govern the individual gongs, and the system as a whole exhibits emergent properties - that yield local and large scale forms in sound and light. ' - address: 'Vancouver, BC, Canada' - author: 'Birchfield, David and Lorig, David and Phillips, Kelly' - bibtex: "@inproceedings{Birchfield2005,\n abstract = {This paper details the motivations,\ - \ design, and realization of Sustainable, a dynamic, robotic sound installation\ - \ that employs a generative algorithm for music and sound creation. The piece\ - \ is comprised of seven autonomous water gong nodes that are networked together\ - \ by water tubes to distribute water throughout the system. A water resource allocation\ - \ algorithm guides this distribution process and produces an ever-evolving sonic\ - \ and visual texture. A simple set of behaviors govern the individual gongs, and\ - \ the system as a whole exhibits emergent properties that yield local and large\ - \ scale forms in sound and light. },\n address = {Vancouver, BC, Canada},\n author\ - \ = {Birchfield, David and Lorig, David and Phillips, Kelly},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176705},\n issn = {2220-4806},\n keywords = {computing,dynamic\ - \ systems,evolutionary,generative arts,installation art,music,robotics,sculpture,sound},\n\ - \ pages = {160--163},\n title = {Sustainable: a dynamic, robotic, sound installation},\n\ - \ url = {http://www.nime.org/proceedings/2005/nime2005_160.pdf},\n year = {2005}\n\ - }\n" + ID: NIME22_28 + abstract: 'We describe a new set of affordances for networked live coding performances + in the browser-based environment Gibber, and discuss their implications in the + context of three different performances by three different ensembles at three + universities. Each ensemble possessed differing levels of programming and musical + expertise, leading to different challenges and subsequent extensions to Gibber + to address them. We describe these and additional extensions that came about after + shared reflection on our experiences. While our chosen design contains computational + inefficiencies that pose challenges for larger ensembles, our experiences suggest + that this is a reasonable tradeoff for the low barrier-to-entry that browser-based + environments provide, and that the design in general supports a variety of educational + goals and compositional strategies.' + address: 'The University of Auckland, New Zealand' + articleno: 28 + author: 'Roberts, Charlie and Hattwick, Ian and Sheffield, Eric and Smith, Gillian' + bibtex: "@inproceedings{NIME22_28,\n abstract = {We describe a new set of affordances\ + \ for networked live coding performances in the browser-based environment Gibber,\ + \ and discuss their implications in the context of three different performances\ + \ by three different ensembles at three universities. Each ensemble possessed\ + \ differing levels of programming and musical expertise, leading to different\ + \ challenges and subsequent extensions to Gibber to address them. We describe\ + \ these and additional extensions that came about after shared reflection on our\ + \ experiences. While our chosen design contains computational inefficiencies that\ + \ pose challenges for larger ensembles, our experiences suggest that this is a\ + \ reasonable tradeoff for the low barrier-to-entry that browser-based environments\ + \ provide, and that the design in general supports a variety of educational goals\ + \ and compositional strategies.},\n address = {The University of Auckland, New\ + \ Zealand},\n articleno = {28},\n author = {Roberts, Charlie and Hattwick, Ian\ + \ and Sheffield, Eric and Smith, Gillian},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.38cb7745},\n\ + \ issn = {2220-4806},\n month = {jun},\n pdf = {191.pdf},\n presentation-video\ + \ = {https://youtu.be/BKlHkEAqUOo},\n title = {Rethinking networked collaboration\ + \ in the live coding environment Gibber},\n url = {https://doi.org/10.21428%2F92fbeb44.38cb7745},\n\ + \ year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176705 + doi: 10.21428/92fbeb44.38cb7745 issn: 2220-4806 - keywords: 'computing,dynamic systems,evolutionary,generative arts,installation art,music,robotics,sculpture,sound' - pages: 160--163 - title: 'Sustainable: a dynamic, robotic, sound installation' - url: http://www.nime.org/proceedings/2005/nime2005_160.pdf - year: 2005 + month: jun + pdf: 191.pdf + presentation-video: https://youtu.be/BKlHkEAqUOo + title: Rethinking networked collaboration in the live coding environment Gibber + url: https://doi.org/10.21428%2F92fbeb44.38cb7745 + year: 2022 - ENTRYTYPE: inproceedings - ID: Rodrigues2005 - abstract: 'We present our work in the development of an interface for an actor/singer - and its use in performing. Our work combines aspects of theatrical music with - technology. Our interface has allowed the development of a new vocabulary for - musical and theatrical expression and the possibility for merging classical and - experimental music. It gave rise to a strong, strange, unpredictable, yet coherent, - "character" and opens up the possibility for a full performance that will explore - aspects of voice, theatrical music and, in the future, image projection. ' - address: 'Vancouver, BC, Canada' - author: 'Rodrigues, Paulo Maria and Girão, Luis Miguel and Gehlhaar, Rolf' - bibtex: "@inproceedings{Rodrigues2005,\n abstract = {We present our work in the\ - \ development of an interface for an actor/singer and its use in performing. Our\ - \ work combines aspects of theatrical music with technology. Our interface has\ - \ allowed the development of a new vocabulary for musical and theatrical expression\ - \ and the possibility for merging classical and experimental music. It gave rise\ - \ to a strong, strange, unpredictable, yet coherent, \"character\" and opens up\ - \ the possibility for a full performance that will explore aspects of voice, theatrical\ - \ music and, in the future, image projection. },\n address = {Vancouver, BC, Canada},\n\ - \ author = {Rodrigues, Paulo Maria and Gir\\~{a}o, Luis Miguel and Gehlhaar, Rolf},\n\ + ID: NIME22_29 + abstract: 'In the context of immersive sonic interaction, Virtual Reality Musical + Instruments have had the relative majority of attention thus far, fueled by the + increasing availability of affordable technology. Recent advances in Mixed Reality + (MR) experiences have provided the means for a new wave of research that goes + beyond Virtual Reality. In this paper, we explore the taxonomy of Extended Reality + systems, establishing our own notion of MR. From this, we propose a new classification + of Virtual Musical Instrument, known as a Mixed Reality Musical Instrument (MRMI). + We define this system as an embodied interface for expressive musical performance, + characterized by the relationships between the performer, the virtual, and the + physical environment. After a review of existing literature concerning the evaluation + of immersive musical instruments and the affordances of MR systems, we offer a + new framework based on three dimensions to support the design and analysis of + MRMIs. We illustrate its use with application to existing works.' + address: 'The University of Auckland, New Zealand' + articleno: 29 + author: 'Zellerbach, Karitta Christina and Roberts, Charlie' + bibtex: "@inproceedings{NIME22_29,\n abstract = {In the context of immersive sonic\ + \ interaction, Virtual Reality Musical Instruments have had the relative majority\ + \ of attention thus far, fueled by the increasing availability of affordable technology.\ + \ Recent advances in Mixed Reality (MR) experiences have provided the means for\ + \ a new wave of research that goes beyond Virtual Reality. In this paper, we explore\ + \ the taxonomy of Extended Reality systems, establishing our own notion of MR.\ + \ From this, we propose a new classification of Virtual Musical Instrument, known\ + \ as a Mixed Reality Musical Instrument (MRMI). We define this system as an embodied\ + \ interface for expressive musical performance, characterized by the relationships\ + \ between the performer, the virtual, and the physical environment. After a review\ + \ of existing literature concerning the evaluation of immersive musical instruments\ + \ and the affordances of MR systems, we offer a new framework based on three dimensions\ + \ to support the design and analysis of MRMIs. We illustrate its use with application\ + \ to existing works.},\n address = {The University of Auckland, New Zealand},\n\ + \ articleno = {29},\n author = {Zellerbach, Karitta Christina and Roberts, Charlie},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176808},\n issn = {2220-4806},\n\ - \ keywords = {Theatrical music, computer interaction, voice, gestural control.\ - \ },\n pages = {164--167},\n title = {CyberSong},\n url = {http://www.nime.org/proceedings/2005/nime2005_164.pdf},\n\ - \ year = {2005}\n}\n" + \ Musical Expression},\n doi = {10.21428/92fbeb44.b2a44bc9},\n issn = {2220-4806},\n\ + \ month = {jun},\n pdf = {193.pdf},\n presentation-video = {https://youtu.be/Pb4pAr2v4yU},\n\ + \ title = {A Framework for the Design and Analysis of Mixed Reality Musical Instruments},\n\ + \ url = {https://doi.org/10.21428%2F92fbeb44.b2a44bc9},\n year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176808 + doi: 10.21428/92fbeb44.b2a44bc9 issn: 2220-4806 - keywords: 'Theatrical music, computer interaction, voice, gestural control. ' - pages: 164--167 - title: CyberSong - url: http://www.nime.org/proceedings/2005/nime2005_164.pdf - year: 2005 + month: jun + pdf: 193.pdf + presentation-video: https://youtu.be/Pb4pAr2v4yU + title: A Framework for the Design and Analysis of Mixed Reality Musical Instruments + url: https://doi.org/10.21428%2F92fbeb44.b2a44bc9 + year: 2022 - ENTRYTYPE: inproceedings - ID: Allen2005 - abstract: 'This paper describes the development, function andperformance contexts - of a digital musical instrument called "boomBox". The instrument is a wireless, - orientation-awarelow-frequency, high-amplitude human motion controller forlive - and sampled sound. The instrument has been used inperformance and sound installation - contexts. I describe someof what I have learned from the project herein.' - address: 'Vancouver, BC, Canada' - author: 'Allen, Jamie' - bibtex: "@inproceedings{Allen2005,\n abstract = {This paper describes the development,\ - \ function andperformance contexts of a digital musical instrument called \"boomBox\"\ - . The instrument is a wireless, orientation-awarelow-frequency, high-amplitude\ - \ human motion controller forlive and sampled sound. The instrument has been used\ - \ inperformance and sound installation contexts. I describe someof what I have\ - \ learned from the project herein.},\n address = {Vancouver, BC, Canada},\n author\ - \ = {Allen, Jamie},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176691},\n\ - \ issn = {2220-4806},\n keywords = {Visceral control, sample manipulation, Bluetooth®,\ - \ metaphor, remutualizing instrument, Human Computer Interaction.},\n pages =\ - \ {168--171},\n title = {boomBox},\n url = {http://www.nime.org/proceedings/2005/nime2005_168.pdf},\n\ - \ year = {2005}\n}\n" + ID: NIME22_30 + abstract: 'NIME has recently seen critique emerging around colonisation of music + technology, and the need to decolonise digital audio workstations and music software. + While commercial DAWs tend to sideline musical styles outside of western norms + (and even many inside too), viewing this problem through an historical lens of + imperialist legacies misses the influence of a more recent - and often invisible + - hegemony that bears significant direct responsibility: The culture of technological + development. In this paper we focus on the commercial technological development + culture that produces these softwares, to better understand the more latent reasons + why music production software ends up supporting some music practices while failing + others. By using this lens we can more meaningfully separate the influence of + historic cultural colonisation and music tech development culture, in order to + better advocate for and implement meaningful change. We will discuss why the meaning + of the term “decolonisation” should be carefully examined when addressing the + limitations of DAWs, because while larger imperialist legacies continue to have + significant impact on our understanding of culture, this can direct attention + away from the techno-cultural subset of this hegemony that is actively engaged + in making the decisions that shape the software we use. We discuss how the conventions + of this techno-cultural hegemony shape the affordances of major DAWs (and thereby + musical creativity). We also examine specific factors that impact decision making + in developing and evolving typical music software alongside latent social structures, + such as competing commercial demands, how standards are shaped, and the impact + of those standards. Lastly, we suggest that, while we must continue to discuss + the impact of imperialist legacies on the way we make music, understanding the + techno-cultural subset of the colonial hegemony and its motives can create a space + to advocate for conventions in music software that are more widely inclusive.' + address: 'The University of Auckland, New Zealand' + articleno: 30 + author: 'Pardue, Laurel and Bin, S. M. Astrid' + bibtex: "@inproceedings{NIME22_30,\n abstract = {NIME has recently seen critique\ + \ emerging around colonisation of music technology, and the need to decolonise\ + \ digital audio workstations and music software. While commercial DAWs tend to\ + \ sideline musical styles outside of western norms (and even many inside too),\ + \ viewing this problem through an historical lens of imperialist legacies misses\ + \ the influence of a more recent - and often invisible - hegemony that bears significant\ + \ direct responsibility: The culture of technological development. In this paper\ + \ we focus on the commercial technological development culture that produces these\ + \ softwares, to better understand the more latent reasons why music production\ + \ software ends up supporting some music practices while failing others. By using\ + \ this lens we can more meaningfully separate the influence of historic cultural\ + \ colonisation and music tech development culture, in order to better advocate\ + \ for and implement meaningful change. We will discuss why the meaning of the\ + \ term “decolonisation” should be carefully examined when addressing the limitations\ + \ of DAWs, because while larger imperialist legacies continue to have significant\ + \ impact on our understanding of culture, this can direct attention away from\ + \ the techno-cultural subset of this hegemony that is actively engaged in making\ + \ the decisions that shape the software we use. We discuss how the conventions\ + \ of this techno-cultural hegemony shape the affordances of major DAWs (and thereby\ + \ musical creativity). We also examine specific factors that impact decision making\ + \ in developing and evolving typical music software alongside latent social structures,\ + \ such as competing commercial demands, how standards are shaped, and the impact\ + \ of those standards. Lastly, we suggest that, while we must continue to discuss\ + \ the impact of imperialist legacies on the way we make music, understanding the\ + \ techno-cultural subset of the colonial hegemony and its motives can create a\ + \ space to advocate for conventions in music software that are more widely inclusive.},\n\ + \ address = {The University of Auckland, New Zealand},\n articleno = {30},\n author\ + \ = {Pardue, Laurel and Bin, S. M. Astrid},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.21428/92fbeb44.0cc78aeb},\n issn = {2220-4806},\n month = {jun},\n pdf =\ + \ {201.pdf},\n presentation-video = {https://www.youtube.com/watch?v=a53vwOUDh0M},\n\ + \ title = {The Other Hegemony: Effects of software development culture on music\ + \ software, and what we can do about it},\n url = {https://doi.org/10.21428%2F92fbeb44.0cc78aeb},\n\ + \ year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176691 + doi: 10.21428/92fbeb44.0cc78aeb issn: 2220-4806 - keywords: 'Visceral control, sample manipulation, Bluetooth®, metaphor, remutualizing - instrument, Human Computer Interaction.' - pages: 168--171 - title: boomBox - url: http://www.nime.org/proceedings/2005/nime2005_168.pdf - year: 2005 + month: jun + pdf: 201.pdf + presentation-video: https://www.youtube.com/watch?v=a53vwOUDh0M + title: 'The Other Hegemony: Effects of software development culture on music software, + and what we can do about it' + url: https://doi.org/10.21428%2F92fbeb44.0cc78aeb + year: 2022 - ENTRYTYPE: inproceedings - ID: Loscos2005 - abstract: 'Using a wah-wah pedal guitar is something guitar players have to learn. - Recently, more intuitive ways to control such effect have been proposed. In this - direction, the Wahwactor system controls a wah-wah transformation in real-time - using the guitar player’s voice, more precisely, using the performer [wa-wa] utterances. - To come up with this system, different vocal features derived from spectral analysis - have been studied as candidates for being used as control parameters. This paper - details the results of the study and presents the implementation of the whole - system.' - address: 'Vancouver, BC, Canada' - author: 'Loscos, Alex and Aussenac, Thomas' - bibtex: "@inproceedings{Loscos2005,\n abstract = {Using a wah-wah pedal guitar is\ - \ something guitar players have to learn. Recently, more intuitive ways to control\ - \ such effect have been proposed. In this direction, the Wahwactor system controls\ - \ a wah-wah transformation in real-time using the guitar player’s voice, more\ - \ precisely, using the performer [wa-wa] utterances. To come up with this system,\ - \ different vocal features derived from spectral analysis have been studied as\ - \ candidates for being used as control parameters. This paper details the results\ - \ of the study and presents the implementation of the whole system.},\n address\ - \ = {Vancouver, BC, Canada},\n author = {Loscos, Alex and Aussenac, Thomas},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176776},\n issn = {2220-4806},\n\ - \ pages = {172--175},\n title = {The wahwactor: a voice controlled wah-wah pedal},\n\ - \ url = {http://www.nime.org/proceedings/2005/nime2005_172.pdf},\n year = {2005}\n\ - }\n" + ID: NIME22_31 + abstract: "Latin American (LATAM) contributions to Music Technology date back to\ + \ the early 1940’s. However, as evidenced in historical analyses of NIME, the\ + \ input from LATAM institutions to its proceedings is considerably low, even when\ + \ the conference was recently held in Porto Alegre, Brazil. Reflecting on this\ + \ visible disparity and joining efforts as a group of LATAM researchers, we conducted\ + \ a workshop and distributed a survey with members of the LATAM community with\ + \ the aim of\nsounding out their perspectives on NIME-related practices and the\ + \ prospect of establishing a LATAM NIME Network. Based on our findings we provide\ + \ a contemporary contextual overview of the activities happening in\nLATAM and\ + \ the particular challenges that practitioners face emerging from their socio-political\ + \ reality. We also offer LATAM perspectives on critical epistemological issues\ + \ that affect the NIME community as a whole, contributing to a pluriversal view\ + \ on these matters, and to the embracement of multiple realities and ways of doing\ + \ things." + address: 'The University of Auckland, New Zealand' + articleno: 31 + author: 'Martinez Avila, Juan Pablo and Tragtenberg, Joāo and Calegario, Filipe + and Alarcon, Ximena and Cadavid Hinojosa, Laddy Patricia and Corintha, Isabela + and Dannemann, Teodoro and Jaimovich, Javier and Marquez-Borbon, Adnan and Lerner, + Martin Matus and Ortiz, Miguel and Ramos, Juan and Solís García, Hugo' + bibtex: "@inproceedings{NIME22_31,\n abstract = {Latin American (LATAM) contributions\ + \ to Music Technology date back to the early 1940’s. However, as evidenced in\ + \ historical analyses of NIME, the input from LATAM institutions to its proceedings\ + \ is considerably low, even when the conference was recently held in Porto Alegre,\ + \ Brazil. Reflecting on this visible disparity and joining efforts as a group\ + \ of LATAM researchers, we conducted a workshop and distributed a survey with\ + \ members of the LATAM community with the aim of\nsounding out their perspectives\ + \ on NIME-related practices and the prospect of establishing a LATAM NIME Network.\ + \ Based on our findings we provide a contemporary contextual overview of the activities\ + \ happening in\nLATAM and the particular challenges that practitioners face emerging\ + \ from their socio-political reality. We also offer LATAM perspectives on critical\ + \ epistemological issues that affect the NIME community as a whole, contributing\ + \ to a pluriversal view on these matters, and to the embracement of multiple realities\ + \ and ways of doing things.},\n address = {The University of Auckland, New Zealand},\n\ + \ articleno = {31},\n author = {Martinez Avila, Juan Pablo and Tragtenberg, Jo{\\\ + =a}o and Calegario, Filipe and Alarcon, Ximena and Cadavid Hinojosa, Laddy Patricia\ + \ and Corintha, Isabela and Dannemann, Teodoro and Jaimovich, Javier and Marquez-Borbon,\ + \ Adnan and Lerner, Martin Matus and Ortiz, Miguel and Ramos, Juan and Sol{\\\ + '{i}}s Garc{\\'{i}}a, Hugo},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.b7a7ba4f},\n\ + \ issn = {2220-4806},\n month = {jun},\n pdf = {21.pdf},\n presentation-video\ + \ = {https://youtu.be/dCxkrqrbM-M},\n title = {Being (A)part of NIME: Embracing\ + \ Latin American Perspectives},\n url = {https://doi.org/10.21428/92fbeb44.b7a7ba4f},\n\ + \ year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176776 + doi: 10.21428/92fbeb44.b7a7ba4f issn: 2220-4806 - pages: 172--175 - title: 'The wahwactor: a voice controlled wah-wah pedal' - url: http://www.nime.org/proceedings/2005/nime2005_172.pdf - year: 2005 + month: jun + pdf: 21.pdf + presentation-video: https://youtu.be/dCxkrqrbM-M + title: 'Being (A)part of NIME: Embracing Latin American Perspectives' + url: https://doi.org/10.21428/92fbeb44.b7a7ba4f + year: 2022 - ENTRYTYPE: inproceedings - ID: Carter2005 - abstract: 'In this paper, we describe a course of research investigating thepotential - for new types of music made possible by locationtracking and wireless technologies. - Listeners walk arounddowntown Culver City, California and explore a new type ofmusical - album by mixing together songs and stories based ontheir movement. By using mobile - devices as an interface, wecan create new types of musical experiences that allowlisteners - to take a more interactive approach to an album.' - address: 'Vancouver, BC, Canada' - author: 'Carter, William and Liu, Leslie S.' - bibtex: "@inproceedings{Carter2005,\n abstract = {In this paper, we describe a course\ - \ of research investigating thepotential for new types of music made possible\ - \ by locationtracking and wireless technologies. Listeners walk arounddowntown\ - \ Culver City, California and explore a new type ofmusical album by mixing together\ - \ songs and stories based ontheir movement. By using mobile devices as an interface,\ - \ wecan create new types of musical experiences that allowlisteners to take a\ - \ more interactive approach to an album.},\n address = {Vancouver, BC, Canada},\n\ - \ author = {Carter, William and Liu, Leslie S.},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1176723},\n issn = {2220-4806},\n keywords = {Mobile Music,\ - \ Digital Soundscape, Location-Based Entertainment, Mobility, Interactive Music,\ - \ Augmented Reality },\n pages = {176--179},\n title = {Location33: A Mobile Musical},\n\ - \ url = {http://www.nime.org/proceedings/2005/nime2005_176.pdf},\n year = {2005}\n\ - }\n" + ID: NIME22_32 + abstract: 'This article provides a lens for viewing technology as land, transformed + through resource extraction, manufacturing, distribution, disassembly and waste. + This lens is applied to processes of artistic creation with technology, exploring + ways of fostering personal and informed relationships with that technology. The + goal of these explorations will be to inspire a greater awareness of the colonial + and capitalist processes that shape the technology we use and the land and people + it is in relationship with. Beyond simply identifying the influence of these colonial + and capitalist processes, the article will also provide creative responses (alterations + to a creative process with technology) which seek to address these colonial processes + in a sensitive and critical way. This will be done not to answer the broad question + of ‘how do we decolonise art making with technology?’, but to break that question + apart into prompts or potential pathways for decolonising.' + address: 'The University of Auckland, New Zealand' + articleno: 32 + author: 'Argabrite, Zak and Murphy, Jim and Norman, Sally Jane and Carnegie, Dale' + bibtex: "@inproceedings{NIME22_32,\n abstract = {This article provides a lens for\ + \ viewing technology as land, transformed through resource extraction, manufacturing,\ + \ distribution, disassembly and waste. This lens is applied to processes of artistic\ + \ creation with technology, exploring ways of fostering personal and informed\ + \ relationships with that technology. The goal of these explorations will be to\ + \ inspire a greater awareness of the colonial and capitalist processes that shape\ + \ the technology we use and the land and people it is in relationship with. Beyond\ + \ simply identifying the influence of these colonial and capitalist processes,\ + \ the article will also provide creative responses (alterations to a creative\ + \ process with technology) which seek to address these colonial processes in a\ + \ sensitive and critical way. This will be done not to answer the broad question\ + \ of ‘how do we decolonise art making with technology?’, but to break that question\ + \ apart into prompts or potential pathways for decolonising.},\n address = {The\ + \ University of Auckland, New Zealand},\n articleno = {32},\n author = {Argabrite,\ + \ Zak and Murphy, Jim and Norman, Sally Jane and Carnegie, Dale},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.21428/92fbeb44.68f7c268},\n issn = {2220-4806},\n month\ + \ = {jun},\n pdf = {222.pdf},\n presentation-video = {https://youtu.be/JZTmiIByYN4},\n\ + \ title = {Technology is Land: Strategies towards decolonisation of technology\ + \ in artmaking},\n url = {https://doi.org/10.21428%2F92fbeb44.68f7c268},\n year\ + \ = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176723 + doi: 10.21428/92fbeb44.68f7c268 issn: 2220-4806 - keywords: 'Mobile Music, Digital Soundscape, Location-Based Entertainment, Mobility, - Interactive Music, Augmented Reality ' - pages: 176--179 - title: 'Location33: A Mobile Musical' - url: http://www.nime.org/proceedings/2005/nime2005_176.pdf - year: 2005 + month: jun + pdf: 222.pdf + presentation-video: https://youtu.be/JZTmiIByYN4 + title: 'Technology is Land: Strategies towards decolonisation of technology in artmaking' + url: https://doi.org/10.21428%2F92fbeb44.68f7c268 + year: 2022 - ENTRYTYPE: inproceedings - ID: Bardos2005 - abstract: 'Bangarama is a music controller using headbanging as the primary interaction - metaphor. It consists of a head-mounted tilt sensor and aguitar-shaped controller - that does not require complex finger positions. We discuss the specific challenges - of designing and building this controller to create a simple, yet responsive and - playable instrument, and show how ordinary materials such as plywood, tinfoil, - and copper wire can be turned into a device that enables a fun, collaborative - music-making experience.' - address: 'Vancouver, BC, Canada' - author: 'Bardos, Laszlo and Korinek, Stefan and Lee, Eric and Borchers, Jan' - bibtex: "@inproceedings{Bardos2005,\n abstract = {Bangarama is a music controller\ - \ using headbanging as the primary interaction metaphor. It consists of a head-mounted\ - \ tilt sensor and aguitar-shaped controller that does not require complex finger\ - \ positions. We discuss the specific challenges of designing and building this\ - \ controller to create a simple, yet responsive and playable instrument, and show\ - \ how ordinary materials such as plywood, tinfoil, and copper wire can be turned\ - \ into a device that enables a fun, collaborative music-making experience.},\n\ - \ address = {Vancouver, BC, Canada},\n author = {Bardos, Laszlo and Korinek, Stefan\ - \ and Lee, Eric and Borchers, Jan},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176699},\n\ - \ issn = {2220-4806},\n keywords = {head movements, music controllers, interface\ - \ design, input devices },\n pages = {180--183},\n title = {Bangarama: Creating\ - \ Music With Headbanging},\n url = {http://www.nime.org/proceedings/2005/nime2005_180.pdf},\n\ - \ year = {2005}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1176699 - issn: 2220-4806 - keywords: 'head movements, music controllers, interface design, input devices ' - pages: 180--183 - title: 'Bangarama: Creating Music With Headbanging' - url: http://www.nime.org/proceedings/2005/nime2005_180.pdf - year: 2005 - - -- ENTRYTYPE: inproceedings - ID: Barbosa2005 - abstract: 'In recent years Computer Network-Music has increasingly captured the - attention of the Computer Music Community. With the advent of Internet communication, - geographical displacement amongst the participants of a computer mediated music - performance achieved world wide extension. However, when established over long - distance networks, this form of musical communication has a fundamental problem: - network latency (or net-delay) is an impediment for real-time collaboration. From - a recent study, carried out by the authors, a relation between network latency - tolerance and Music Tempo was established. This result emerged from an experiment, - in which simulated network latency conditions were applied to the performance - of different musicians playing jazz standard tunes. The Public Sound Objects (PSOs) - project is web-based shared musical space, which has been an experimental framework - to implement and test different approaches for on-line music communication. This - paper describe features implemented in the latest version of the PSOs system, - including the notion of a network-music instrument incorporating latency as a - software function, by dynamically adapting its tempo to the communication delay - measured in real-time.' - address: 'Vancouver, BC, Canada' - author: 'Barbosa, Alvaro and Cardoso, Jorge and Geiger, Günter' - bibtex: "@inproceedings{Barbosa2005,\n abstract = {In recent years Computer Network-Music\ - \ has increasingly captured the attention of the Computer Music Community. With\ - \ the advent of Internet communication, geographical displacement amongst the\ - \ participants of a computer mediated music performance achieved world wide extension.\ - \ However, when established over long distance networks, this form of musical\ - \ communication has a fundamental problem: network latency (or net-delay) is an\ - \ impediment for real-time collaboration. From a recent study, carried out by\ - \ the authors, a relation between network latency tolerance and Music Tempo was\ - \ established. This result emerged from an experiment, in which simulated network\ - \ latency conditions were applied to the performance of different musicians playing\ - \ jazz standard tunes. The Public Sound Objects (PSOs) project is web-based shared\ - \ musical space, which has been an experimental framework to implement and test\ - \ different approaches for on-line music communication. This paper describe features\ - \ implemented in the latest version of the PSOs system, including the notion of\ - \ a network-music instrument incorporating latency as a software function, by\ - \ dynamically adapting its tempo to the communication delay measured in real-time.},\n\ - \ address = {Vancouver, BC, Canada},\n author = {Barbosa, Alvaro and Cardoso,\ - \ Jorge and Geiger, G\\''{u}nter},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176697},\n\ - \ issn = {2220-4806},\n keywords = {Network Music Instruments; Latency in Real-Time\ - \ Performance; Interface-Decoupled Electronic Musical Instruments; Behavioral\ - \ Driven Interfaces; Collaborative Remote Music Performance; },\n pages = {184--187},\n\ - \ title = {Network Latency Adaptive Tempo in the Public Sound Objects System},\n\ - \ url = {http://www.nime.org/proceedings/2005/nime2005_184.pdf},\n year = {2005}\n\ - }\n" + ID: NIME22_33 + abstract: 'The following paper presents L2Ork Tweeter, a new control-data-driven + free and open source crowdsourced telematic musicking platform and a new interface + for musical expression that deterministically addresses three of the greatest + challenges associated with the telematic music medium, that of latency, sync, + and bandwidth. Motivated by the COVID-19 pandemic, Tweeter’s introduction in April + 2020 has ensured uninterrupted operation of Virginia Tech’s Linux Laptop Orchestra + (L2Ork), resulting in 6 international performances over the past 18 months. In + addition to enabling tightly-timed sync between clients, it also uniquely supports + all stages of NIME-centric telematic musicking, from collaborative instrument + design and instruction, to improvisation, composition, rehearsal, and performance, + including audience participation. Tweeter is also envisioned as a prototype for + the crowdsourced approach to telematic musicking. Below, the paper delves deeper + into motivation, constraints, design and implementation, and the observed impact + as an applied instance of a proposed paradigmshift in telematic musicking and + its newfound identity fueled by the live crowdsourced telematic music genre.' + address: 'The University of Auckland, New Zealand' + articleno: 33 + author: 'Bukvic, Ivica' + bibtex: "@inproceedings{NIME22_33,\n abstract = {The following paper presents L2Ork\ + \ Tweeter, a new control-data-driven free and open source crowdsourced telematic\ + \ musicking platform and a new interface for musical expression that deterministically\ + \ addresses three of the greatest challenges associated with the telematic music\ + \ medium, that of latency, sync, and bandwidth. Motivated by the COVID-19 pandemic,\ + \ Tweeter’s introduction in April 2020 has ensured uninterrupted operation of\ + \ Virginia Tech’s Linux Laptop Orchestra (L2Ork), resulting in 6 international\ + \ performances over the past 18 months. In addition to enabling tightly-timed\ + \ sync between clients, it also uniquely supports all stages of NIME-centric telematic\ + \ musicking, from collaborative instrument design and instruction, to improvisation,\ + \ composition, rehearsal, and performance, including audience participation. Tweeter\ + \ is also envisioned as a prototype for the crowdsourced approach to telematic\ + \ musicking. Below, the paper delves deeper into motivation, constraints, design\ + \ and implementation, and the observed impact as an applied instance of a proposed\ + \ paradigmshift in telematic musicking and its newfound identity fueled by the\ + \ live crowdsourced telematic music genre.},\n address = {The University of Auckland,\ + \ New Zealand},\n articleno = {33},\n author = {Bukvic, Ivica},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.21428/92fbeb44.a0a8d914},\n issn = {2220-4806},\n month = {jun},\n\ + \ pdf = {26.pdf},\n presentation-video = {https://youtu.be/5pawphncSmg},\n title\ + \ = {Latency-, Sync-, and Bandwidth-Agnostic Tightly-Timed Telematic and Crowdsourced\ + \ Musicking Made Possible Using L2Ork Tweeter},\n url = {https://doi.org/10.21428%2F92fbeb44.a0a8d914},\n\ + \ year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176697 + doi: 10.21428/92fbeb44.a0a8d914 issn: 2220-4806 - keywords: 'Network Music Instruments; Latency in Real-Time Performance; Interface-Decoupled - Electronic Musical Instruments; Behavioral Driven Interfaces; Collaborative Remote - Music Performance; ' - pages: 184--187 - title: Network Latency Adaptive Tempo in the Public Sound Objects System - url: http://www.nime.org/proceedings/2005/nime2005_184.pdf - year: 2005 + month: jun + pdf: 26.pdf + presentation-video: https://youtu.be/5pawphncSmg + title: 'Latency-, Sync-, and Bandwidth-Agnostic Tightly-Timed Telematic and Crowdsourced + Musicking Made Possible Using L2Ork Tweeter' + url: https://doi.org/10.21428%2F92fbeb44.a0a8d914 + year: 2022 - ENTRYTYPE: inproceedings - ID: Villar2005 - abstract: 'We present the Pin&Play&Perform system: an interface inthe form of a - tablet on which a number of physical controlscan be added, removed and arranged - on the fly. These controls can easily be mapped to existing music sofware usingthe - MIDI protocol. The interface provides a mechanism fordirect manipulation of application - parameters and eventsthrough a set of familiar controls, while also encouraging - ahigh degree of customisation through the ability to arrange,rearrange and annotate - the spatial layout of the interfacecomponents on the surface of the tablet.The - paper describes how we have realized this concept using the Pin&Play technology. - As an application example, wedescribe our experiences in using our interface in - conjunction with Propellerheads'' Reason, a popular piece of musicsynthesis software.' - address: 'Vancouver, BC, Canada' - author: 'Villar, Nicolas and Lindsay, Adam T. and Gellersen, Hans' - bibtex: "@inproceedings{Villar2005,\n abstract = {We present the Pin\\&Play\\&Perform\ - \ system: an interface inthe form of a tablet on which a number of physical controlscan\ - \ be added, removed and arranged on the fly. These controls can easily be mapped\ - \ to existing music sofware usingthe MIDI protocol. The interface provides a mechanism\ - \ fordirect manipulation of application parameters and eventsthrough a set of\ - \ familiar controls, while also encouraging ahigh degree of customisation through\ - \ the ability to arrange,rearrange and annotate the spatial layout of the interfacecomponents\ - \ on the surface of the tablet.The paper describes how we have realized this concept\ - \ using the Pin\\&Play technology. As an application example, wedescribe our experiences\ - \ in using our interface in conjunction with Propellerheads' Reason, a popular\ - \ piece of musicsynthesis software.},\n address = {Vancouver, BC, Canada},\n author\ - \ = {Villar, Nicolas and Lindsay, Adam T. and Gellersen, Hans},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176834},\n issn = {2220-4806},\n keywords = {tangible\ - \ interface, rearrangeable interface, midi controllers },\n pages = {188--191},\n\ - \ title = {Pin \\& Play \\& Perform: A rearrangeable interface for musical composition\ - \ and performance},\n url = {http://www.nime.org/proceedings/2005/nime2005_188.pdf},\n\ - \ year = {2005}\n}\n" + ID: NIME22_34 + abstract: 'In this paper we propose a Spatial Augmented Reality interface for actuated + acoustic instruments with active vibration control. We adopt a performance-led + research approach to design augmentations throughout multiple residences. The + resulting system enables two musicians to improvise with four augmented instruments + through virtual shapes distributed in their peripheral space: two 12-string guitars + and 1 drum kit actuated with surface speakers and a trumpet attached to an air + compressor. Using ethnographic methods, we document the evolution of the augmentations + and conduct a thematic analysis to shine a light on the collaborative and iterative + design process. In particular, we provide insights on the opportunities brought + by Spatial AR and on the role of improvisation.' + address: 'The University of Auckland, New Zealand' + articleno: 34 + author: 'Arslan, Cagan and Berthaut, Florent and Beuchey, Anthony and Cambourian, + Paul and Paté, Arthur' + bibtex: "@inproceedings{NIME22_34,\n abstract = {In this paper we propose a Spatial\ + \ Augmented Reality interface for actuated acoustic instruments with active vibration\ + \ control. We adopt a performance-led research approach to design augmentations\ + \ throughout multiple residences. The resulting system enables two musicians to\ + \ improvise with four augmented instruments through virtual shapes distributed\ + \ in their peripheral space: two 12-string guitars and 1 drum kit actuated with\ + \ surface speakers and a trumpet attached to an air compressor. Using ethnographic\ + \ methods, we document the evolution of the augmentations and conduct a thematic\ + \ analysis to shine a light on the collaborative and iterative design process.\ + \ In particular, we provide insights on the opportunities brought by Spatial AR\ + \ and on the role of improvisation.},\n address = {The University of Auckland,\ + \ New Zealand},\n articleno = {34},\n author = {Arslan, Cagan and Berthaut, Florent\ + \ and Beuchey, Anthony and Cambourian, Paul and Pat{\\'{e}}, Arthur},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.21428/92fbeb44.c28dd323},\n issn = {2220-4806},\n month\ + \ = {jun},\n pdf = {30.pdf},\n presentation-video = {https://youtu.be/oxMrv3R6jK0},\n\ + \ title = {Vibrating shapes : Design and evolution of a spatial augmented reality\ + \ interface for actuated instruments},\n url = {https://doi.org/10.21428%2F92fbeb44.c28dd323},\n\ + \ year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176834 + doi: 10.21428/92fbeb44.c28dd323 issn: 2220-4806 - keywords: 'tangible interface, rearrangeable interface, midi controllers ' - pages: 188--191 - title: 'Pin & Play & Perform: A rearrangeable interface for musical composition - and performance' - url: http://www.nime.org/proceedings/2005/nime2005_188.pdf - year: 2005 + month: jun + pdf: 30.pdf + presentation-video: https://youtu.be/oxMrv3R6jK0 + title: 'Vibrating shapes : Design and evolution of a spatial augmented reality interface + for actuated instruments' + url: https://doi.org/10.21428%2F92fbeb44.c28dd323 + year: 2022 - ENTRYTYPE: inproceedings - ID: Birnbaum2005 - abstract: 'While several researchers have grappled with the problem of comparing - musical devices across performance, installation, and related contexts, no methodology - yet exists for producing holistic, informative visualizations for these devices. - Drawing on existing research in performance interaction, human-computer interaction, - and design space analysis, the authors propose a dimension space representation - that can be adapted for visually displaying musical devices. This paper illustrates - one possible application of the dimension space to existing performance and interaction - systems, revealing its usefulness both in exposing patterns across existing musical - devices and aiding in the design of new ones.' - address: 'Vancouver, BC, Canada' - author: 'Birnbaum, David and Fiebrink, Rebecca and Malloch, Joseph and Wanderley, - Marcelo M.' - bibtex: "@inproceedings{Birnbaum2005,\n abstract = {While several researchers have\ - \ grappled with the problem of comparing musical devices across performance, installation,\ - \ and related contexts, no methodology yet exists for producing holistic, informative\ - \ visualizations for these devices. Drawing on existing research in performance\ - \ interaction, human-computer interaction, and design space analysis, the authors\ - \ propose a dimension space representation that can be adapted for visually displaying\ - \ musical devices. This paper illustrates one possible application of the dimension\ - \ space to existing performance and interaction systems, revealing its usefulness\ - \ both in exposing patterns across existing musical devices and aiding in the\ - \ design of new ones.},\n address = {Vancouver, BC, Canada},\n author = {Birnbaum,\ - \ David and Fiebrink, Rebecca and Malloch, Joseph and Wanderley, Marcelo M.},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176707},\n issn = {2220-4806},\n\ - \ keywords = {design space analysis,human-computer interaction,interfaces for\ - \ musical expression,new},\n pages = {192--195},\n title = {Towards a Dimension\ - \ Space for Musical Devices},\n url = {http://www.nime.org/proceedings/2005/nime2005_192.pdf},\n\ - \ year = {2005}\n}\n" + ID: NIME22_35 + abstract: 'Digital Musical Instruments (DMIs) offer new opportunities for collaboration, + such as exchanging sounds or sharing controls between musicians. However, in the + context of spontaneous and heterogeneous orchestras, such as jam sessions, collective + music-making may become challenging due to the diversity and complexity of the + DMIs and the musicians’ unfamiliarity with the others’ instruments. In particular, + the potential lack of visibility into each musician’s respective contribution + to the sound they hear, i.e. who is playing what, might impede their capacity + to play together. In this paper, we propose to augment each instrument in a digital + orchestra with visual feedback extracted in real-time from the instrument’s activity, + in order to increase this awareness. We present the results of a user study in + which we investigate the influence of visualisation level and situational visibility + during short improvisations by groups of three musicians. Our results suggest + that internal visualisations of all instruments displayed close to each musician’s + instrument provide the best awareness.' + address: 'The University of Auckland, New Zealand' + articleno: 35 + author: 'Berthaut, Florent and Dahl, Luke' + bibtex: "@inproceedings{NIME22_35,\n abstract = {Digital Musical Instruments (DMIs)\ + \ offer new opportunities for collaboration, such as exchanging sounds or sharing\ + \ controls between musicians. However, in the context of spontaneous and heterogeneous\ + \ orchestras, such as jam sessions, collective music-making may become challenging\ + \ due to the diversity and complexity of the DMIs and the musicians’ unfamiliarity\ + \ with the others’ instruments. In particular, the potential lack of visibility\ + \ into each musician’s respective contribution to the sound they hear, i.e. who\ + \ is playing what, might impede their capacity to play together. In this paper,\ + \ we propose to augment each instrument in a digital orchestra with visual feedback\ + \ extracted in real-time from the instrument’s activity, in order to increase\ + \ this awareness. We present the results of a user study in which we investigate\ + \ the influence of visualisation level and situational visibility during short\ + \ improvisations by groups of three musicians. Our results suggest that internal\ + \ visualisations of all instruments displayed close to each musician’s instrument\ + \ provide the best awareness.},\n address = {The University of Auckland, New Zealand},\n\ + \ articleno = {35},\n author = {Berthaut, Florent and Dahl, Luke},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.21428/92fbeb44.9d974714},\n issn = {2220-4806},\n month\ + \ = {jun},\n pdf = {31.pdf},\n presentation-video = {https://www.youtube.com/watch?v=903cs_oFfwo},\n\ + \ title = {The Effect of Visualisation Level and Situational Visibility in Co-located\ + \ Digital Musical Ensembles},\n url = {https://doi.org/10.21428%2F92fbeb44.9d974714},\n\ + \ year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176707 + doi: 10.21428/92fbeb44.9d974714 issn: 2220-4806 - keywords: 'design space analysis,human-computer interaction,interfaces for musical - expression,new' - pages: 192--195 - title: Towards a Dimension Space for Musical Devices - url: http://www.nime.org/proceedings/2005/nime2005_192.pdf - year: 2005 + month: jun + pdf: 31.pdf + presentation-video: https://www.youtube.com/watch?v=903cs_oFfwo + title: The Effect of Visualisation Level and Situational Visibility in Co-located + Digital Musical Ensembles + url: https://doi.org/10.21428%2F92fbeb44.9d974714 + year: 2022 - ENTRYTYPE: inproceedings - ID: Wang2005a - abstract: 'ChucK is a programming language for real-time sound synthesis. It provides - generalized audio abstractions and precise control over timing and concurrency - --- combining the rapid-prototyping advantages of high-level programming tools, - such as Pure Data, with the flexibility and controllability of lower-level, text-based - languages like C/C++. In this paper, we present a new time-based paradigm for - programming controllers with ChucK. In addition to real-time control over sound - synthesis, we show how features such as dynamic patching, on-the-fly controller - mapping, multiple control rates, and precisely-timed recording and playback of - sensors can be employed under the ChucK programming model. Using this framework, - composers, programmers, and performers can quickly write (and read/debug) complex - controller/synthesis programs, and experiment with controller mapping on-the-fly. ' - address: 'Vancouver, BC, Canada' - author: 'Wang, Ge and Misra, Ananya and Cook, Perry R. and Kapur' - bibtex: "@inproceedings{Wang2005a,\n abstract = {ChucK is a programming language\ - \ for real-time sound synthesis. It provides generalized audio abstractions and\ - \ precise control over timing and concurrency --- combining the rapid-prototyping\ - \ advantages of high-level programming tools, such as Pure Data, with the flexibility\ - \ and controllability of lower-level, text-based languages like C/C++. In this\ - \ paper, we present a new time-based paradigm for programming controllers with\ - \ ChucK. In addition to real-time control over sound synthesis, we show how features\ - \ such as dynamic patching, on-the-fly controller mapping, multiple control rates,\ - \ and precisely-timed recording and playback of sensors can be employed under\ - \ the ChucK programming model. Using this framework, composers, programmers, and\ - \ performers can quickly write (and read/debug) complex controller/synthesis programs,\ - \ and experiment with controller mapping on-the-fly. },\n address = {Vancouver,\ - \ BC, Canada},\n author = {Wang, Ge and Misra, Ananya and Cook, Perry R. and Kapur},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176838},\n issn = {2220-4806},\n\ - \ keywords = {Controller mapping, programming language, on-the-fly programming,\ - \ real-time interaction, concurrency. },\n pages = {196--199},\n title = {Yeah,\ - \ ChucK It! = > Dynamic , Controllable Interface Mapping},\n url = {http://www.nime.org/proceedings/2005/nime2005_196.pdf},\n\ - \ year = {2005}\n}\n" + ID: NIME22_36 + abstract: 'The management of the musical structures and the awareness of the performer’s + processes during a performance are two important aspects of live coding improvisations. + To support these aspects, we developed and evaluated two systems, Time_X and Time_Z, + for visualizing the musical form during live coding. Time_X allows visualizing + an entire performance, while Time_Z provides a detailed overview of the last improvised + musical events. Following an autobiographical approach, the two systems have been + used in five sessions by the first author of this paper, who created a diary about + the experience. These diaries have been analyzed to understand the two systems + individually and compare them. We finally discuss the main benefits related to + the practical use of these systems, and possible use scenarios.' + address: 'The University of Auckland, New Zealand' + articleno: 36 + author: 'Rì, Francesco Ardan Dal and Masu, Raul' + bibtex: "@inproceedings{NIME22_36,\n abstract = {The management of the musical structures\ + \ and the awareness of the performer’s processes during a performance are two\ + \ important aspects of live coding improvisations. To support these aspects, we\ + \ developed and evaluated two systems, Time_X and Time_Z, for visualizing the\ + \ musical form during live coding. Time_X allows visualizing an entire performance,\ + \ while Time_Z provides a detailed overview of the last improvised musical events.\ + \ Following an autobiographical approach, the two systems have been used in five\ + \ sessions by the first author of this paper, who created a diary about the experience.\ + \ These diaries have been analyzed to understand the two systems individually\ + \ and compare them. We finally discuss the main benefits related to the practical\ + \ use of these systems, and possible use scenarios.},\n address = {The University\ + \ of Auckland, New Zealand},\n articleno = {36},\n author = {R{\\`i}, Francesco\ + \ Ardan Dal and Masu, Raul},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.828b6114},\n\ + \ issn = {2220-4806},\n month = {jun},\n pdf = {32.pdf},\n presentation-video\ + \ = {https://www.youtube.com/watch?v=r-cxEXjnDzg},\n title = {Exploring Musical\ + \ Form: Digital Scores to Support Live Coding Practice},\n url = {https://doi.org/10.21428%2F92fbeb44.828b6114},\n\ + \ year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176838 + doi: 10.21428/92fbeb44.828b6114 issn: 2220-4806 - keywords: 'Controller mapping, programming language, on-the-fly programming, real-time - interaction, concurrency. ' - pages: 196--199 - title: 'Yeah, ChucK It! = > Dynamic , Controllable Interface Mapping' - url: http://www.nime.org/proceedings/2005/nime2005_196.pdf - year: 2005 + month: jun + pdf: 32.pdf + presentation-video: https://www.youtube.com/watch?v=r-cxEXjnDzg + title: 'Exploring Musical Form: Digital Scores to Support Live Coding Practice' + url: https://doi.org/10.21428%2F92fbeb44.828b6114 + year: 2022 - ENTRYTYPE: inproceedings - ID: Tindale2005 - abstract: Drum controllers designed by researchers and commercialcompanies use a - variety of techniques for capturing percussive gestures. It is challenging to - obtain both quick responsetimes and low-level data (such as position) that contain - expressive information. This research is a comprehensive studyof current methods - to evaluate the available strategies andtechnologies. This study aims to demonstrate - the benefitsand detriments of the current state of percussion controllersas well - as yield tools for those who would wish to conductthis type of study in the future. - address: 'Vancouver, BC, Canada' - author: 'Tindale, Adam R. and Kapur, Ajay and Tzanetakis, George and Driessen, Peter - and Schloss, Andrew' - bibtex: "@inproceedings{Tindale2005,\n abstract = {Drum controllers designed by\ - \ researchers and commercialcompanies use a variety of techniques for capturing\ - \ percussive gestures. It is challenging to obtain both quick responsetimes and\ - \ low-level data (such as position) that contain expressive information. This\ - \ research is a comprehensive studyof current methods to evaluate the available\ - \ strategies andtechnologies. This study aims to demonstrate the benefitsand detriments\ - \ of the current state of percussion controllersas well as yield tools for those\ - \ who would wish to conductthis type of study in the future.},\n address = {Vancouver,\ - \ BC, Canada},\n author = {Tindale, Adam R. and Kapur, Ajay and Tzanetakis, George\ - \ and Driessen, Peter and Schloss, Andrew},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1176828},\n issn = {2220-4806},\n keywords = {Percussion Controllers,\ - \ Timbre-recognition based instruments, Electronic Percussion, Sensors for Interface\ - \ Design },\n pages = {200--203},\n title = {A Comparison of Sensor Strategies\ - \ for Capturing Percussive Gestures},\n url = {http://www.nime.org/proceedings/2005/nime2005_200.pdf},\n\ - \ year = {2005}\n}\n" + ID: NIME22_37 + abstract: 'In this paper, we discuss our ongoing work to leverage virtual reality + and digital fabrication to investigate sensory mappings across the visual, auditory, + and haptic modalities in VR, and how such mappings can affect musical expression + in this medium. Specifically, we introduce a custom adapter for the Oculus Touch + controller that allows it to be augmented with physical parts that can be tracked, + visualized, and sonified in VR. This way, a VR instrument can be made to have + a physical manifestation that facilitates additional forms of tactile feedback + besides those offered by the Touch controller, enabling new forms of musical interaction. + We then discuss a case study, where we use the adapter to implement a new VR instrument + that integrates the repelling force between neodymium magnets into the controllers. + This allows us to imbue the virtual instrument, which is inherently devoid of + tactility, with haptic feedback—-an essential affordance of many musical instruments.' + address: 'The University of Auckland, New Zealand' + articleno: 37 + author: 'Çamci, Anil and Granzow, John' + bibtex: "@inproceedings{NIME22_37,\n abstract = {In this paper, we discuss our ongoing\ + \ work to leverage virtual reality and digital fabrication to investigate sensory\ + \ mappings across the visual, auditory, and haptic modalities in VR, and how such\ + \ mappings can affect musical expression in this medium. Specifically, we introduce\ + \ a custom adapter for the Oculus Touch controller that allows it to be augmented\ + \ with physical parts that can be tracked, visualized, and sonified in VR. This\ + \ way, a VR instrument can be made to have a physical manifestation that facilitates\ + \ additional forms of tactile feedback besides those offered by the Touch controller,\ + \ enabling new forms of musical interaction. We then discuss a case study, where\ + \ we use the adapter to implement a new VR instrument that integrates the repelling\ + \ force between neodymium magnets into the controllers. This allows us to imbue\ + \ the virtual instrument, which is inherently devoid of tactility, with haptic\ + \ feedback—-an essential affordance of many musical instruments.},\n address =\ + \ {The University of Auckland, New Zealand},\n articleno = {37},\n author = {{\\\ + c{C}}amci, Anil and Granzow, John},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.a26a4014},\n\ + \ issn = {2220-4806},\n month = {jun},\n pdf = {33.pdf},\n presentation-video\ + \ = {https://youtu.be/fnoQOO4rz4M},\n title = {Augmented Touch: A Mounting Adapter\ + \ for Oculus Touch Controllers that Enables New Hyperreal Instruments},\n url\ + \ = {https://doi.org/10.21428%2F92fbeb44.a26a4014},\n year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176828 + doi: 10.21428/92fbeb44.a26a4014 issn: 2220-4806 - keywords: 'Percussion Controllers, Timbre-recognition based instruments, Electronic - Percussion, Sensors for Interface Design ' - pages: 200--203 - title: A Comparison of Sensor Strategies for Capturing Percussive Gestures - url: http://www.nime.org/proceedings/2005/nime2005_200.pdf - year: 2005 + month: jun + pdf: 33.pdf + presentation-video: https://youtu.be/fnoQOO4rz4M + title: 'Augmented Touch: A Mounting Adapter for Oculus Touch Controllers that Enables + New Hyperreal Instruments' + url: https://doi.org/10.21428%2F92fbeb44.a26a4014 + year: 2022 - ENTRYTYPE: inproceedings - ID: Lee2005 - abstract: 'Discussion of time in interactive computer music systems engineering - has been largely limited to data acquisition rates and latency.Since music is - an inherently time-based medium, we believe thattime plays a more important role - in both the usability and implementation of these systems. In this paper, we present - a time designspace, which we use to expose some of the challenges of developing - computer music systems with time-based interaction. Wedescribe and analyze the - time-related issues we encountered whilstdesigning and building a series of interactive - music exhibits thatfall into this design space. These issues often occur because - ofthe varying and sometimes conflicting conceptual models of timein the three - domains of user, application (music), and engineering.We present some of our latest - work in conducting gesture interpretation and frameworks for digital audio, which - attempt to analyzeand address these conflicts in temporal conceptual models.' - address: 'Vancouver, BC, Canada' - author: 'Lee, Eric and Borchers, Jan' - bibtex: "@inproceedings{Lee2005,\n abstract = {Discussion of time in interactive\ - \ computer music systems engineering has been largely limited to data acquisition\ - \ rates and latency.Since music is an inherently time-based medium, we believe\ - \ thattime plays a more important role in both the usability and implementation\ - \ of these systems. In this paper, we present a time designspace, which we use\ - \ to expose some of the challenges of developing computer music systems with time-based\ - \ interaction. Wedescribe and analyze the time-related issues we encountered whilstdesigning\ - \ and building a series of interactive music exhibits thatfall into this design\ - \ space. These issues often occur because ofthe varying and sometimes conflicting\ - \ conceptual models of timein the three domains of user, application (music),\ - \ and engineering.We present some of our latest work in conducting gesture interpretation\ - \ and frameworks for digital audio, which attempt to analyzeand address these\ - \ conflicts in temporal conceptual models.},\n address = {Vancouver, BC, Canada},\n\ - \ author = {Lee, Eric and Borchers, Jan},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176766},\n\ - \ issn = {2220-4806},\n keywords = {time design, conceptual models of time, design\ - \ spaces, interactive music exhibits, engineering music systems},\n pages = {204--207},\n\ - \ title = {The Role of Time in Engineering Computer Music Systems},\n url = {http://www.nime.org/proceedings/2005/nime2005_204.pdf},\n\ - \ year = {2005}\n}\n" + ID: NIME22_38 + abstract: 'Automated processes in musical instruments can serve to free a performer + from the physical and mental constraints of music performance, allowing them to + expressively control more aspects of music simultaneously. Modular synthesis has + been a prominent platform for exploring automation through the use of sequencers + and has therefore fostered a tradition of user interface design utilizing increasingly + complex abstraction methods. We investigate the history of sequencer design from + this perspective and introduce machine learning as a potential source for a new + type of intelligent abstraction. We then offer a case study based on this approach + and present Latent Drummer, which is a prototype system dedicated to integrating + machine learning-based interface abstractions into the tradition of sequencers + for modular synthesis.' + address: 'The University of Auckland, New Zealand' + articleno: 38 + author: 'Warren, Nick and Çamci, Anil' + bibtex: "@inproceedings{NIME22_38,\n abstract = {Automated processes in musical\ + \ instruments can serve to free a performer from the physical and mental constraints\ + \ of music performance, allowing them to expressively control more aspects of\ + \ music simultaneously. Modular synthesis has been a prominent platform for exploring\ + \ automation through the use of sequencers and has therefore fostered a tradition\ + \ of user interface design utilizing increasingly complex abstraction methods.\ + \ We investigate the history of sequencer design from this perspective and introduce\ + \ machine learning as a potential source for a new type of intelligent abstraction.\ + \ We then offer a case study based on this approach and present Latent Drummer,\ + \ which is a prototype system dedicated to integrating machine learning-based\ + \ interface abstractions into the tradition of sequencers for modular synthesis.},\n\ + \ address = {The University of Auckland, New Zealand},\n articleno = {38},\n author\ + \ = {Warren, Nick and {\\c{C}}amci, Anil},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.ed873363},\n\ + \ issn = {2220-4806},\n month = {jun},\n pdf = {34.pdf},\n presentation-video\ + \ = {https://www.youtube.com/watch?v=Hr6B5dIhMVo},\n title = {Latent Drummer:\ + \ A New Abstraction for Modular Sequencers},\n url = {https://doi.org/10.21428%2F92fbeb44.ed873363},\n\ + \ year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176766 + doi: 10.21428/92fbeb44.ed873363 issn: 2220-4806 - keywords: 'time design, conceptual models of time, design spaces, interactive music - exhibits, engineering music systems' - pages: 204--207 - title: The Role of Time in Engineering Computer Music Systems - url: http://www.nime.org/proceedings/2005/nime2005_204.pdf - year: 2005 + month: jun + pdf: 34.pdf + presentation-video: https://www.youtube.com/watch?v=Hr6B5dIhMVo + title: 'Latent Drummer: A New Abstraction for Modular Sequencers' + url: https://doi.org/10.21428%2F92fbeb44.ed873363 + year: 2022 - ENTRYTYPE: inproceedings - ID: Kobayashi2005 - abstract: 'This paper reports our recent development on a reconfigurable user interface. - We created a system that consists of a dial type controller ‘Spinner’, and the - GUI (Graphical User Interface) objects for the Max/MSP environment[1]. One physical - controller corresponds to one GUI controller on a PC’s display device, and a user - can freely change the connection on the fly (i.e. associate the physical controller - to another GUI controller). Since the user interface on the PC side is running - on the Max/MSP environment that has high flexibility, a user can freely reconfigure - the layout of GUI controllers. A single ‘Spinner’ control device consists of a - rotary encoder with a push button to count rotations and a photo IC to detect - specific patterns from the GUI objects to identify. Since ‘Spinner’ features a - simple identification method, it is capable of being used with normal display - devices like LCD (Liquid Crystal Display) or a CRT (Cathode Ray Tube) and so on. - A user can access multiple ‘Spinner’ devices simultaneously. By using this system, - a user can build a reconfigurable user interface.' - address: 'Vancouver, BC, Canada' - author: 'Kobayashi, Shigeru and Masayuki, Akamasu' - bibtex: "@inproceedings{Kobayashi2005,\n abstract = {This paper reports our recent\ - \ development on a reconfigurable user interface. We created a system that consists\ - \ of a dial type controller ‘Spinner’, and the GUI (Graphical User Interface)\ - \ objects for the Max/MSP environment[1]. One physical controller corresponds\ - \ to one GUI controller on a PC’s display device, and a user can freely change\ - \ the connection on the fly (i.e. associate the physical controller to another\ - \ GUI controller). Since the user interface on the PC side is running on the Max/MSP\ - \ environment that has high flexibility, a user can freely reconfigure the layout\ - \ of GUI controllers. A single ‘Spinner’ control device consists of a rotary encoder\ - \ with a push button to count rotations and a photo IC to detect specific patterns\ - \ from the GUI objects to identify. Since ‘Spinner’ features a simple identification\ - \ method, it is capable of being used with normal display devices like LCD (Liquid\ - \ Crystal Display) or a CRT (Cathode Ray Tube) and so on. A user can access multiple\ - \ ‘Spinner’ devices simultaneously. By using this system, a user can build a reconfigurable\ - \ user interface.},\n address = {Vancouver, BC, Canada},\n author = {Kobayashi,\ - \ Shigeru and Masayuki, Akamasu},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176764},\n\ - \ issn = {2220-4806},\n keywords = {Reconfigurable, Sensors, Computer Music },\n\ - \ pages = {208--211},\n title = {Spinner: A Simple Approach to Reconfigurable\ - \ User Interfaces},\n url = {http://www.nime.org/proceedings/2005/nime2005_208.pdf},\n\ - \ year = {2005}\n}\n" + ID: NIME22_39 + abstract: 'We present an AI-empowered music tutor with a systematic curriculum design. + The tutoring system fully utilizes the interactivity space in the auditory, visual, + and haptic modalities, supporting seven haptic feedback modes and four visual + feedback modes. The combinations of those modes form different cross-modal tasks + of varying difficulties, allowing the curriculum to apply the “scaffolding then + fading” educational technique to foster active learning and amortize cognitive + load. We study the effect of multimodal instructions, guidance, and feedback using + a qualitative pilot study with two subjects over ~11 hours of training with our + tutoring system. The study reveals valuable insights about the music learning + process and points towards new features and learning modes for the next prototype.' + address: 'The University of Auckland, New Zealand' + articleno: 39 + author: 'Chin, Daniel and Xia, Gus' + bibtex: "@inproceedings{NIME22_39,\n abstract = {We present an AI-empowered music\ + \ tutor with a systematic curriculum design. The tutoring system fully utilizes\ + \ the interactivity space in the auditory, visual, and haptic modalities, supporting\ + \ seven haptic feedback modes and four visual feedback modes. The combinations\ + \ of those modes form different cross-modal tasks of varying difficulties, allowing\ + \ the curriculum to apply the “scaffolding then fading” educational technique\ + \ to foster active learning and amortize cognitive load. We study the effect of\ + \ multimodal instructions, guidance, and feedback using a qualitative pilot study\ + \ with two subjects over ~11 hours of training with our tutoring system. The study\ + \ reveals valuable insights about the music learning process and points towards\ + \ new features and learning modes for the next prototype.},\n address = {The University\ + \ of Auckland, New Zealand},\n articleno = {39},\n author = {Chin, Daniel and\ + \ Xia, Gus},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.c6910363},\n\ + \ issn = {2220-4806},\n month = {jun},\n pdf = {39.pdf},\n presentation-video\ + \ = {https://youtu.be/DifOKvH1ErQ},\n title = {A Computer-aided Multimodal Music\ + \ Learning System with Curriculum: A Pilot Study},\n url = {https://doi.org/10.21428%2F92fbeb44.c6910363},\n\ + \ year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176764 + doi: 10.21428/92fbeb44.c6910363 issn: 2220-4806 - keywords: 'Reconfigurable, Sensors, Computer Music ' - pages: 208--211 - title: 'Spinner: A Simple Approach to Reconfigurable User Interfaces' - url: http://www.nime.org/proceedings/2005/nime2005_208.pdf - year: 2005 + month: jun + pdf: 39.pdf + presentation-video: https://youtu.be/DifOKvH1ErQ + title: 'A Computer-aided Multimodal Music Learning System with Curriculum: A Pilot + Study' + url: https://doi.org/10.21428%2F92fbeb44.c6910363 + year: 2022 - ENTRYTYPE: inproceedings - ID: Magnusson2005 - abstract: 'This paper describes the audio human computer interface experiments of - ixi in the past and outlines the current platform for future research. ixi software - [5] was founded by Thor Magnusson and Enrike Hurtado Mendieta in year 2000 and - since then we''ve been working on building prototypes in the form of screen-based - graphical user interfaces for musical performance, researching human computer - interaction in the field of music and creating environments which other people - can use to do similar work and for us to use in our workshops. Our initial starting - point was that computer music software and the way their interfaces are built - need not necessarily be limited to copying the acoustic musical instruments and - studio technology that we already have, but additionally we can create unique - languages and work processes for the virtual world. The computer is a vast creative - space with specific qualities that can and should be explored. ' - address: 'Vancouver, BC, Canada' - author: 'Magnusson, Thor' - bibtex: "@inproceedings{Magnusson2005,\n abstract = {This paper describes the audio\ - \ human computer interface experiments of ixi in the past and outlines the current\ - \ platform for future research. ixi software [5] was founded by Thor Magnusson\ - \ and Enrike Hurtado Mendieta in year 2000 and since then we've been working on\ - \ building prototypes in the form of screen-based graphical user interfaces for\ - \ musical performance, researching human computer interaction in the field of\ - \ music and creating environments which other people can use to do similar work\ - \ and for us to use in our workshops. Our initial starting point was that computer\ - \ music software and the way their interfaces are built need not necessarily be\ - \ limited to copying the acoustic musical instruments and studio technology that\ - \ we already have, but additionally we can create unique languages and work processes\ - \ for the virtual world. The computer is a vast creative space with specific qualities\ - \ that can and should be explored. },\n address = {Vancouver, BC, Canada},\n author\ - \ = {Magnusson, Thor},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176782},\n\ - \ issn = {2220-4806},\n keywords = {Graphical user interfaces, abstract graphical\ - \ interfaces, hypercontrol, intelligent instruments, live performance, machine\ - \ learning, catalyst software, OSC, interfacing code, open source, Pure Data,\ - \ SuperCollider. },\n pages = {212--215},\n title = {ixi software: The Interface\ - \ as Instrument},\n url = {http://www.nime.org/proceedings/2005/nime2005_212.pdf},\n\ - \ year = {2005}\n}\n" + ID: NIME22_40 + abstract: 'Movement-sound interactive systems are at the interface of different + artistic and educational practices. Within this multiplicity of uses, we examine + common denominators in terms of learning, appropriation and relationship to technological + systems. While these topics have been previously reported at NIME, we wanted to + investigate how practitioners, coming from different perspectives, relate to these + questions. We conducted interviews with 6 artists who are engaged in movement-sound + interactions: 1 performer, 1 performer/composer, 1 composer, 1 teacher/composer, + 1 dancer/teacher, 1 dancer. Through a thematic analysis of the transcripts we + identified three main themes related to (1) the mediating role of technological + tools (2) usability and normativity, and (3) learning and practice. These results + provide ground for discussion about the design and study of movementsound interactive + systems.' + address: 'The University of Auckland, New Zealand' + articleno: 40 + author: 'Paredes, Victor and Françoise, Jules and Bevilacqua, Frederic' + bibtex: "@inproceedings{NIME22_40,\n abstract = {Movement-sound interactive systems\ + \ are at the interface of different artistic and educational practices. Within\ + \ this multiplicity of uses, we examine common denominators in terms of learning,\ + \ appropriation and relationship to technological systems. While these topics\ + \ have been previously reported at NIME, we wanted to investigate how practitioners,\ + \ coming from different perspectives, relate to these questions. We conducted\ + \ interviews with 6 artists who are engaged in movement-sound interactions: 1\ + \ performer, 1 performer/composer, 1 composer, 1 teacher/composer, 1 dancer/teacher,\ + \ 1 dancer. Through a thematic analysis of the transcripts we identified three\ + \ main themes related to (1) the mediating role of technological tools (2) usability\ + \ and normativity, and (3) learning and practice. These results provide ground\ + \ for discussion about the design and study of movementsound interactive systems.},\n\ + \ address = {The University of Auckland, New Zealand},\n articleno = {40},\n author\ + \ = {Paredes, Victor and Fran{\\c{c}}oise, Jules and Bevilacqua, Frederic},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.21428/92fbeb44.5b9ac5ba},\n issn = {2220-4806},\n\ + \ month = {jun},\n pdf = {42.pdf},\n presentation-video = {https://youtu.be/n6DZE7TdEeI},\n\ + \ title = {Entangling Practice with Artistic and Educational Aims: Interviews\ + \ on Technology-based Movement-Sound Interactions},\n url = {https://doi.org/10.21428%2F92fbeb44.5b9ac5ba},\n\ + \ year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176782 + doi: 10.21428/92fbeb44.5b9ac5ba issn: 2220-4806 - keywords: 'Graphical user interfaces, abstract graphical interfaces, hypercontrol, - intelligent instruments, live performance, machine learning, catalyst software, - OSC, interfacing code, open source, Pure Data, SuperCollider. ' - pages: 212--215 - title: 'ixi software: The Interface as Instrument' - url: http://www.nime.org/proceedings/2005/nime2005_212.pdf - year: 2005 + month: jun + pdf: 42.pdf + presentation-video: https://youtu.be/n6DZE7TdEeI + title: 'Entangling Practice with Artistic and Educational Aims: Interviews on Technology-based + Movement-Sound Interactions' + url: https://doi.org/10.21428%2F92fbeb44.5b9ac5ba + year: 2022 - ENTRYTYPE: inproceedings - ID: Miranda2005 - abstract: ' Musicians and composers have been using brainwaves as generative sources - in music for at least 40 years and the possibility of a brain-computer interface - for direct communication and control was first seriously investigated in the early - 1970s. Work has been done by many artists and technologists in the intervening - years to attempt to control music systems with brainwaves and --- indeed --- many - other biological signals. Despite the richness of EEG, fMRI and other data which - can be read from the human brain, there has up to now been only limited success - in translating the complex encephalographic data into satisfactory musical results. - We are currently pursuing research which we believe will lead to the possibility - of direct brain-computer interfaces for rich and expressive musical control. This - report will outline the directions of our current research and results. ' - address: 'Vancouver, BC, Canada' - author: 'Miranda, Eduardo and Brouse, Andrew' - bibtex: "@inproceedings{Miranda2005,\n abstract = { Musicians and composers have\ - \ been using brainwaves as generative sources in music for at least 40 years and\ - \ the possibility of a brain-computer interface for direct communication and control\ - \ was first seriously investigated in the early 1970s. Work has been done by many\ - \ artists and technologists in the intervening years to attempt to control music\ - \ systems with brainwaves and --- indeed --- many other biological signals. Despite\ - \ the richness of EEG, fMRI and other data which can be read from the human brain,\ - \ there has up to now been only limited success in translating the complex encephalographic\ - \ data into satisfactory musical results. We are currently pursuing research which\ - \ we believe will lead to the possibility of direct brain-computer interfaces\ - \ for rich and expressive musical control. This report will outline the directions\ - \ of our current research and results. },\n address = {Vancouver, BC, Canada},\n\ - \ author = {Miranda, Eduardo and Brouse, Andrew},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1176792},\n issn = {2220-4806},\n keywords = {Brain-Computer\ - \ Interface, BCI, Electroencephalogram, EEG, brainwaves, music and the brain,\ - \ interactive music systems.},\n pages = {216--219},\n title = {Toward Direct\ - \ Brain-Computer Musical Interfaces},\n url = {http://www.nime.org/proceedings/2005/nime2005_216.pdf},\n\ - \ year = {2005}\n}\n" + ID: NIME22_41 + abstract: 'The Web MIDI API allows the Web browser to interact with hardware and + software MIDI devices detected at the operating system level. This ability for + the browser to interface with most electronic instruments made in the past 30 + years offers significant opportunities to preserve, enhance or re-discover a rich + musical and technical heritage. By including MIDI in the broaderWeb ecosystem, + this API also opens endless possibilities to create music in a networked and socially + engaging way. However, the Web MIDI API specification only offers low-level access + to MIDI devices and messages. For instance, it does not provide semantics on top + of the raw numerical messages exchanged between devices. This is likely to deter + novice programmers and significantly slow down experienced programmers. After + reviewing the usability of the bare Web MIDI API, the WEBMIDI. js JavaScript library + was created to alleviate this situation. By decoding raw MIDI messages, encapsulating + complicated processes and providing semantically significant objects, properties, + methods and events, the library makes it easier to interface with MIDI devices + from compatible browsers. This paper first looks at the context in which the specification + was created and then discusses the usability improvements layered on top of the + API by the opensource WEBMIDI.js library.' + address: 'The University of Auckland, New Zealand' + articleno: 41 + author: 'Côté, Jean-Philippe' + bibtex: "@inproceedings{NIME22_41,\n abstract = {The Web MIDI API allows the Web\ + \ browser to interact with hardware and software MIDI devices detected at the\ + \ operating system level. This ability for the browser to interface with most\ + \ electronic instruments made in the past 30 years offers significant opportunities\ + \ to preserve, enhance or re-discover a rich musical and technical heritage. By\ + \ including MIDI in the broaderWeb ecosystem, this API also opens endless possibilities\ + \ to create music in a networked and socially engaging way. However, the Web MIDI\ + \ API specification only offers low-level access to MIDI devices and messages.\ + \ For instance, it does not provide semantics on top of the raw numerical messages\ + \ exchanged between devices. This is likely to deter novice programmers and significantly\ + \ slow down experienced programmers. After reviewing the usability of the bare\ + \ Web MIDI API, the WEBMIDI. js JavaScript library was created to alleviate this\ + \ situation. By decoding raw MIDI messages, encapsulating complicated processes\ + \ and providing semantically significant objects, properties, methods and events,\ + \ the library makes it easier to interface with MIDI devices from compatible browsers.\ + \ This paper first looks at the context in which the specification was created\ + \ and then discusses the usability improvements layered on top of the API by the\ + \ opensource WEBMIDI.js library.},\n address = {The University of Auckland, New\ + \ Zealand},\n articleno = {41},\n author = {C{\\^o}t{\\'e}, Jean-Philippe},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.21428/92fbeb44.388e4764},\n issn = {2220-4806},\n\ + \ month = {jun},\n pdf = {43.pdf},\n presentation-video = {https://youtu.be/jMzjpUJO860},\n\ + \ title = {User-Friendly {MIDI} in the Web Browser},\n url = {https://doi.org/10.21428%2F92fbeb44.388e4764},\n\ + \ year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176792 + doi: 10.21428/92fbeb44.388e4764 issn: 2220-4806 - keywords: 'Brain-Computer Interface, BCI, Electroencephalogram, EEG, brainwaves, - music and the brain, interactive music systems.' - pages: 216--219 - title: Toward Direct Brain-Computer Musical Interfaces - url: http://www.nime.org/proceedings/2005/nime2005_216.pdf - year: 2005 + month: jun + pdf: 43.pdf + presentation-video: https://youtu.be/jMzjpUJO860 + title: User-Friendly MIDI in the Web Browser + url: https://doi.org/10.21428%2F92fbeb44.388e4764 + year: 2022 - ENTRYTYPE: inproceedings - ID: Taylor2005 - abstract: 'We present a real-time system which allows musicians tointeract with - synthetic virtual characters as they perform.Using Max/MSP to parameterize keyboard - and vocal input, meaningful features (pitch, amplitude, chord information, and - vocal timbre) are extracted from live performancein real-time. These extracted - musical features are thenmapped to character behaviour in such a way that the - musician''s performance elicits a response from the virtual character. The system - uses the ANIMUS framework to generatebelievable character expressions. Experimental - results arepresented for simple characters.' - address: 'Vancouver, BC, Canada' - author: 'Taylor, Robyn and Torres, Daniel and Boulanger, Pierre' - bibtex: "@inproceedings{Taylor2005,\n abstract = {We present a real-time system\ - \ which allows musicians tointeract with synthetic virtual characters as they\ - \ perform.Using Max/MSP to parameterize keyboard and vocal input, meaningful features\ - \ (pitch, amplitude, chord information, and vocal timbre) are extracted from live\ - \ performancein real-time. These extracted musical features are thenmapped to\ - \ character behaviour in such a way that the musician's performance elicits a\ - \ response from the virtual character. The system uses the ANIMUS framework to\ - \ generatebelievable character expressions. Experimental results arepresented\ - \ for simple characters.},\n address = {Vancouver, BC, Canada},\n author = {Taylor,\ - \ Robyn and Torres, Daniel and Boulanger, Pierre},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176826},\n issn = {2220-4806},\n keywords = {Music, synthetic\ - \ characters, advanced man-machine interfaces, virtual reality, behavioural systems,\ - \ interaction techniques, visualization, immersive entertainment, artistic in-\ - \ stallations },\n pages = {220--223},\n title = {Using Music to Interact with\ - \ a Virtual Character},\n url = {http://www.nime.org/proceedings/2005/nime2005_220.pdf},\n\ - \ year = {2005}\n}\n" + ID: NIME22_42 + abstract: 'In the search for better designs, one tool is to specify the design problem + such that globally optimal solutions can be found. I present a design process + using this approach, its strengths and limitations, and its results in the form + of four pitch fingering systems that are ergonomic, simple, and symmetric. In + hindsight, I emphasize the subjectivity of the design process, despite its reliance + on objective quantitative assessment.' + address: 'The University of Auckland, New Zealand' + articleno: 42 + author: 'West, Travis' + bibtex: "@inproceedings{NIME22_42,\n abstract = {In the search for better designs,\ + \ one tool is to specify the design problem such that globally optimal solutions\ + \ can be found. I present a design process using this approach, its strengths\ + \ and limitations, and its results in the form of four pitch fingering systems\ + \ that are ergonomic, simple, and symmetric. In hindsight, I emphasize the subjectivity\ + \ of the design process, despite its reliance on objective quantitative assessment.},\n\ + \ address = {The University of Auckland, New Zealand},\n articleno = {42},\n author\ + \ = {West, Travis},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.d6c9dcae},\n\ + \ issn = {2220-4806},\n month = {jun},\n pdf = {53.pdf},\n presentation-video\ + \ = {https://youtu.be/4QB3sNRmK1E},\n title = {Pitch Fingering Systems and the\ + \ Search for Perfection},\n url = {https://doi.org/10.21428%2F92fbeb44.d6c9dcae},\n\ + \ year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176826 + doi: 10.21428/92fbeb44.d6c9dcae issn: 2220-4806 - keywords: 'Music, synthetic characters, advanced man-machine interfaces, virtual - reality, behavioural systems, interaction techniques, visualization, immersive - entertainment, artistic in- stallations ' - pages: 220--223 - title: Using Music to Interact with a Virtual Character - url: http://www.nime.org/proceedings/2005/nime2005_220.pdf - year: 2005 + month: jun + pdf: 53.pdf + presentation-video: https://youtu.be/4QB3sNRmK1E + title: Pitch Fingering Systems and the Search for Perfection + url: https://doi.org/10.21428%2F92fbeb44.d6c9dcae + year: 2022 - ENTRYTYPE: inproceedings - ID: Chew2005 - abstract: 'In the Expression Synthesis Project (ESP), we propose adriving interface - for expression synthesis. ESP aims toprovide a compelling metaphor for expressive - performance soas to make high-level expressive decisions accessible to nonexperts. - In ESP, the user drives a car on a virtual road thatrepresents the music with - its twists and turns; and makesdecisions on how to traverse each part of the road. - The driver''sdecisions affect in real-time the rendering of the piece. Thepedals - and wheel provide a tactile interface for controlling thecar dynamics and musical - expression, while the displayportrays a first person view of the road and dashboard - from thedriver''s seat. This game-like interface allows non-experts tocreate expressive - renderings of existing music without havingto master an instrument, and allows - expert musicians toexperiment with expressive choice without having to firstmaster - the notes of the piece. The prototype system has beentested and refined in numerous - demonstrations. This paperpresents the concepts underlying the ESP system and - thearchitectural design and implementation of a prototype.' - address: 'Vancouver, BC, Canada' - author: 'Chew, Elaine and Francois, Alexander R. and Liu, Jie and Yang, Aaron' - bibtex: "@inproceedings{Chew2005,\n abstract = {In the Expression Synthesis Project\ - \ (ESP), we propose adriving interface for expression synthesis. ESP aims toprovide\ - \ a compelling metaphor for expressive performance soas to make high-level expressive\ - \ decisions accessible to nonexperts. In ESP, the user drives a car on a virtual\ - \ road thatrepresents the music with its twists and turns; and makesdecisions\ - \ on how to traverse each part of the road. The driver'sdecisions affect in real-time\ - \ the rendering of the piece. Thepedals and wheel provide a tactile interface\ - \ for controlling thecar dynamics and musical expression, while the displayportrays\ - \ a first person view of the road and dashboard from thedriver's seat. This game-like\ - \ interface allows non-experts tocreate expressive renderings of existing music\ - \ without havingto master an instrument, and allows expert musicians toexperiment\ - \ with expressive choice without having to firstmaster the notes of the piece.\ - \ The prototype system has beentested and refined in numerous demonstrations.\ - \ This paperpresents the concepts underlying the ESP system and thearchitectural\ - \ design and implementation of a prototype.},\n address = {Vancouver, BC, Canada},\n\ - \ author = {Chew, Elaine and Francois, Alexander R. and Liu, Jie and Yang, Aaron},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176725},\n issn = {2220-4806},\n\ - \ keywords = {Music expression synthesis system, driving interface. },\n pages\ - \ = {224--227},\n title = {ESP: A Driving Interface for Expression Synthesis},\n\ - \ url = {http://www.nime.org/proceedings/2005/nime2005_224.pdf},\n year = {2005}\n\ - }\n" + ID: NIME22_43 + abstract: 'The mubone (lowercase “m”) is a family of instruments descended from + the trombone family, a conceptual design space for trombone augmentations, and + a growing musical practice rooted in this design space and the artistic affordances + that emerge from it. We present the design of the mubone and discuss our initial + implementations. We then reflect on the beginnings of an artistic practice: playing + mubone, as well as exploring how the instrument adapts to diverse creative contexts. + We discuss mappings, musical exercises, and the development of Garcia, a sound-and-movement + composition for mubone.' + address: 'The University of Auckland, New Zealand' + articleno: 43 + author: 'West, Travis and Leung, Kalun' + bibtex: "@inproceedings{NIME22_43,\n abstract = {The mubone (lowercase “m”) is a\ + \ family of instruments descended from the trombone family, a conceptual design\ + \ space for trombone augmentations, and a growing musical practice rooted in this\ + \ design space and the artistic affordances that emerge from it. We present the\ + \ design of the mubone and discuss our initial implementations. We then reflect\ + \ on the beginnings of an artistic practice: playing mubone, as well as exploring\ + \ how the instrument adapts to diverse creative contexts. We discuss mappings,\ + \ musical exercises, and the development of Garcia, a sound-and-movement composition\ + \ for mubone.},\n address = {The University of Auckland, New Zealand},\n articleno\ + \ = {43},\n author = {West, Travis and Leung, Kalun},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.21428/92fbeb44.e56a93c9},\n issn = {2220-4806},\n month = {jun},\n\ + \ pdf = {54.pdf},\n presentation-video = {https://youtu.be/B51eofO4f4Y},\n title\ + \ = {early prototypes and artistic practice with the mubone},\n url = {https://doi.org/10.21428%2F92fbeb44.e56a93c9},\n\ + \ year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176725 + doi: 10.21428/92fbeb44.e56a93c9 issn: 2220-4806 - keywords: 'Music expression synthesis system, driving interface. ' - pages: 224--227 - title: 'ESP: A Driving Interface for Expression Synthesis' - url: http://www.nime.org/proceedings/2005/nime2005_224.pdf - year: 2005 + month: jun + pdf: 54.pdf + presentation-video: https://youtu.be/B51eofO4f4Y + title: early prototypes and artistic practice with the mubone + url: https://doi.org/10.21428%2F92fbeb44.e56a93c9 + year: 2022 - ENTRYTYPE: inproceedings - ID: Poepel2005 - abstract: 'While many new interfaces for musical expression have been presented - in the past, methods to evaluate these interfaces are rare.This paper presents - a method and a study comparing the potentialfor musical expression of different - string-instrument based musicalinterfaces. Cues for musical expression are defined - based on results of research in musical expression and on methods for musicaleducation - in instrumental pedagogy. Interfaces are evaluated according to how well they - are estimated to allow players making useof their existing technique for the creation - of expressive music.' - address: 'Vancouver, BC, Canada' - author: 'Poepel, Cornelius' - bibtex: "@inproceedings{Poepel2005,\n abstract = {While many new interfaces for\ - \ musical expression have been presented in the past, methods to evaluate these\ - \ interfaces are rare.This paper presents a method and a study comparing the potentialfor\ - \ musical expression of different string-instrument based musicalinterfaces. Cues\ - \ for musical expression are defined based on results of research in musical expression\ - \ and on methods for musicaleducation in instrumental pedagogy. Interfaces are\ - \ evaluated according to how well they are estimated to allow players making useof\ - \ their existing technique for the creation of expressive music.},\n address =\ - \ {Vancouver, BC, Canada},\n author = {Poepel, Cornelius},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176802},\n issn = {2220-4806},\n keywords = {Musical\ - \ Expression, electronic bowed string instrument, evaluation of musical input\ - \ devices, audio signal driven sound synthesis },\n pages = {228--231},\n title\ - \ = {On Interface Expressivity: A Player-Based Study},\n url = {http://www.nime.org/proceedings/2005/nime2005_228.pdf},\n\ - \ year = {2005}\n}\n" + ID: NIME22_44 + abstract: 'The study of extended reality musical instruments is a burgeoning topic + in the field of new interfaces for musical expression. We developed a mixed reality + musical interface (MRMI) as a technology probe to inspire design for experienced + musicians. We namely explore (i) the ergonomics of the interface in relation to + musical expression and (ii) user-adaptive hand pose recognition as gestural control. + The MRMI probe was experienced by 10 musician participants (mean age: 25.6 years + [SD=3.0], 6 females, 4 males). We conducted a user evaluation comprising three + stages. After an experimentation period, participants were asked to accompany + a pre-recorded piece of music. In a post-task stage, participants took part in + semi-structured interviews, which were subjected to thematic analysis. Prevalent + themes included reducing the size of the interface, issues with the field of view + of the device and physical strain from playing. Participants were largely in favour + of hand poses as expressive control, although this depended on customisation and + temporal dynamics; the use of interactive machine learning (IML) for user-adaptive + hand pose recognition was well received by participants.' + address: 'The University of Auckland, New Zealand' + articleno: 44 + author: 'Graf, Max and Barthet, Mathieu' + bibtex: "@inproceedings{NIME22_44,\n abstract = {The study of extended reality musical\ + \ instruments is a burgeoning topic in the field of new interfaces for musical\ + \ expression. We developed a mixed reality musical interface (MRMI) as a technology\ + \ probe to inspire design for experienced musicians. We namely explore (i) the\ + \ ergonomics of the interface in relation to musical expression and (ii) user-adaptive\ + \ hand pose recognition as gestural control. The MRMI probe was experienced by\ + \ 10 musician participants (mean age: 25.6 years [SD=3.0], 6 females, 4 males).\ + \ We conducted a user evaluation comprising three stages. After an experimentation\ + \ period, participants were asked to accompany a pre-recorded piece of music.\ + \ In a post-task stage, participants took part in semi-structured interviews,\ + \ which were subjected to thematic analysis. Prevalent themes included reducing\ + \ the size of the interface, issues with the field of view of the device and physical\ + \ strain from playing. Participants were largely in favour of hand poses as expressive\ + \ control, although this depended on customisation and temporal dynamics; the\ + \ use of interactive machine learning (IML) for user-adaptive hand pose recognition\ + \ was well received by participants.},\n address = {The University of Auckland,\ + \ New Zealand},\n articleno = {44},\n author = {Graf, Max and Barthet, Mathieu},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.21428/92fbeb44.56ba9b93},\n issn = {2220-4806},\n\ + \ month = {jun},\n pdf = {59.pdf},\n presentation-video = {https://youtu.be/qhE5X3rAWgg},\n\ + \ title = {Mixed Reality Musical Interface: Exploring Ergonomics and Adaptive\ + \ Hand Pose Recognition for Gestural Control},\n url = {https://doi.org/10.21428%2F92fbeb44.56ba9b93},\n\ + \ year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176802 + doi: 10.21428/92fbeb44.56ba9b93 issn: 2220-4806 - keywords: 'Musical Expression, electronic bowed string instrument, evaluation of - musical input devices, audio signal driven sound synthesis ' - pages: 228--231 - title: 'On Interface Expressivity: A Player-Based Study' - url: http://www.nime.org/proceedings/2005/nime2005_228.pdf - year: 2005 + month: jun + pdf: 59.pdf + presentation-video: https://youtu.be/qhE5X3rAWgg + title: 'Mixed Reality Musical Interface: Exploring Ergonomics and Adaptive Hand + Pose Recognition for Gestural Control' + url: https://doi.org/10.21428%2F92fbeb44.56ba9b93 + year: 2022 - ENTRYTYPE: inproceedings - ID: Wingstedt2005 - abstract: 'A typical experiment design within the field of music psychology is playing - music to a test subject who listens and reacts – most often by verbal means. One - limitation of this kind of test is the inherent difficulty of measuring an emotional - reaction in a laboratory setting. This paper describes the design, functions and - possible uses of the software tool REMUPP (Relations between musical parameters - and perceived properties), designed for investigating various aspects of musical - experience. REMUPP allows for non-verbal examination of selected musical parameters - (such as tonality, tempo, timbre, articulation, volume, register etc.) in a musical - context. The musical control is put into the hands of the subject, introducing - an element of creativity and enhancing the sense of immersion. Information acquired - with REMUPP can be output as numerical data for statistical analysis, but the - tool is also suited for the use with more qualitatively oriented methods.' - address: 'Vancouver, BC, Canada' - author: 'Wingstedt, Johnny and Liljedahl, Mats and Lindberg, Stefan and Berg, Jan' - bibtex: "@inproceedings{Wingstedt2005,\n abstract = {A typical experiment design\ - \ within the field of music psychology is playing music to a test subject who\ - \ listens and reacts – most often by verbal means. One limitation of this kind\ - \ of test is the inherent difficulty of measuring an emotional reaction in a laboratory\ - \ setting. This paper describes the design, functions and possible uses of the\ - \ software tool REMUPP (Relations between musical parameters and perceived properties),\ - \ designed for investigating various aspects of musical experience. REMUPP allows\ - \ for non-verbal examination of selected musical parameters (such as tonality,\ - \ tempo, timbre, articulation, volume, register etc.) in a musical context. The\ - \ musical control is put into the hands of the subject, introducing an element\ - \ of creativity and enhancing the sense of immersion. Information acquired with\ - \ REMUPP can be output as numerical data for statistical analysis, but the tool\ - \ is also suited for the use with more qualitatively oriented methods.},\n address\ - \ = {Vancouver, BC, Canada},\n author = {Wingstedt, Johnny and Liljedahl, Mats\ - \ and Lindberg, Stefan and Berg, Jan},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176842},\n\ - \ issn = {2220-4806},\n keywords = {Musical experience, non-verbal test techniques,\ - \ musical parameters.},\n pages = {232--235},\n title = {REMUPP -- An Interactive\ - \ Tool for Investigating Musical Properties and Relations},\n url = {http://www.nime.org/proceedings/2005/nime2005_232.pdf},\n\ - \ year = {2005}\n}\n" + ID: NIME22_45 + abstract: 'Active participation of Deaf individuals in the design and performance + of artistic practice benefits increasing collaboration potentials between Deaf + and hearing individuals. In this research, we present co-design sessions with + a Deaf dancer and a hearing musician to explore how they can influence each other’s + expressive explorations. We also study vibrotactile wearable interface designs + to better support the Deaf dancer’s perception of sound and music. We report our + findings and observations on the co-design process over four workshops and one + performance and public demonstration session. We detail the design and implementation + of the wearable vibrotactile listening garment and participants’ selfreported + experiences. This interface provides participants with more embodied listening + opportunities and felt experiences of sound and music. All participants reported + that the listening experience highlighted their first-person experience, focusing + on their bodies, "regardless of an observer". These findings show how we can improve + both such an internal experience of the listener and the collaboration potential + between performers for increased inclusion. Overall, this paper addresses two + different modalities of haptic feedback, the participation of Deaf users in wearable + haptics design as well as music-movement performance practice, and artistic co-creation + beyond technology development.' + address: 'The University of Auckland, New Zealand' + articleno: 45 + author: 'Cavdir, Doga' + bibtex: "@inproceedings{NIME22_45,\n abstract = {Active participation of Deaf individuals\ + \ in the design and performance of artistic practice benefits increasing collaboration\ + \ potentials between Deaf and hearing individuals. In this research, we present\ + \ co-design sessions with a Deaf dancer and a hearing musician to explore how\ + \ they can influence each other’s expressive explorations. We also study vibrotactile\ + \ wearable interface designs to better support the Deaf dancer’s perception of\ + \ sound and music. We report our findings and observations on the co-design process\ + \ over four workshops and one performance and public demonstration session. We\ + \ detail the design and implementation of the wearable vibrotactile listening\ + \ garment and participants’ selfreported experiences. This interface provides\ + \ participants with more embodied listening opportunities and felt experiences\ + \ of sound and music. All participants reported that the listening experience\ + \ highlighted their first-person experience, focusing on their bodies, \"regardless\ + \ of an observer\". These findings show how we can improve both such an internal\ + \ experience of the listener and the collaboration potential between performers\ + \ for increased inclusion. Overall, this paper addresses two different modalities\ + \ of haptic feedback, the participation of Deaf users in wearable haptics design\ + \ as well as music-movement performance practice, and artistic co-creation beyond\ + \ technology development.},\n address = {The University of Auckland, New Zealand},\n\ + \ articleno = {45},\n author = {Cavdir, Doga},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.21428/92fbeb44.b24043e8},\n issn = {2220-4806},\n month = {jun},\n pdf =\ + \ {64.pdf},\n presentation-video = {https://youtu.be/tuSo2Sq7jy4},\n title = {Touch,\ + \ Listen, (Re)Act: Co-designing Vibrotactile Wearable Instruments for Deaf and\ + \ Hard of Hearing},\n url = {https://doi.org/10.21428%2F92fbeb44.b24043e8},\n\ + \ year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176842 + doi: 10.21428/92fbeb44.b24043e8 issn: 2220-4806 - keywords: 'Musical experience, non-verbal test techniques, musical parameters.' - pages: 232--235 - title: REMUPP -- An Interactive Tool for Investigating Musical Properties and Relations - url: http://www.nime.org/proceedings/2005/nime2005_232.pdf - year: 2005 + month: jun + pdf: 64.pdf + presentation-video: https://youtu.be/tuSo2Sq7jy4 + title: 'Touch, Listen, (Re)Act: Co-designing Vibrotactile Wearable Instruments for + Deaf and Hard of Hearing' + url: https://doi.org/10.21428%2F92fbeb44.b24043e8 + year: 2022 - ENTRYTYPE: inproceedings - ID: Cook2005 - abstract: 'A wide variety of singing synthesis models and methods exist,but there - are remarkably few real-time controllers for thesemodels. This paper describes - a variety of devices developedover the last few years for controlling singing - synthesismodels implemented in the Synthesis Toolkit in C++ (STK),Max/MSP, and - ChucK. All of the controllers share somecommon features, such as air-pressure - sensing for breathingand/or loudness control, means to control pitch, and methodsfor - selecting and blending phonemes, diphones, and words.However, the form factors, - sensors, mappings, and algorithmsvary greatly between the different controllers.' - address: 'Vancouver, BC, Canada' - author: 'Cook, Perry R.' - bibtex: "@inproceedings{Cook2005,\n abstract = {A wide variety of singing synthesis\ - \ models and methods exist,but there are remarkably few real-time controllers\ - \ for thesemodels. This paper describes a variety of devices developedover the\ - \ last few years for controlling singing synthesismodels implemented in the Synthesis\ - \ Toolkit in C++ (STK),Max/MSP, and ChucK. All of the controllers share somecommon\ - \ features, such as air-pressure sensing for breathingand/or loudness control,\ - \ means to control pitch, and methodsfor selecting and blending phonemes, diphones,\ - \ and words.However, the form factors, sensors, mappings, and algorithmsvary greatly\ - \ between the different controllers.},\n address = {Vancouver, BC, Canada},\n\ - \ author = {Cook, Perry R.},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176846},\n\ - \ issn = {2220-4806},\n keywords = {Singing synthesis, real-time singing synthesis\ - \ control. },\n pages = {236--237},\n title = {Real-Time Performance Controllers\ - \ for Synthesized Singing},\n url = {http://www.nime.org/proceedings/2005/nime2005_236.pdf},\n\ - \ year = {2005}\n}\n" + ID: NIME22_46 + abstract: 'While the value of new digital musical instruments lies to a large extent + in their music-making capacity, analyses of new instruments in the research literature + often focus on analyses of gesture or performer experience rather than the content + of the music made with the instrument. In this paper we present a motivic analysis + of music made with new instruments. In the context of music, a motive is a small, + analysable musical fragment or phrase that is important in or characteristic of + a composition. We outline our method for identifying and analysing motives in + music made with new instruments, and display its use in a case study in which + 10 musicians created performances with a new large-scale digital musical instrument + that we designed. This research illustrates the value of a musicological approach + to NIME research, suggesting the need for a broader conversation about a musicology + of NIME performances, as distinct from its instruments.' + address: 'The University of Auckland, New Zealand' + articleno: 46 + author: 'Mice, Lia and McPherson, Andrew' + bibtex: "@inproceedings{NIME22_46,\n abstract = {While the value of new digital\ + \ musical instruments lies to a large extent in their music-making capacity, analyses\ + \ of new instruments in the research literature often focus on analyses of gesture\ + \ or performer experience rather than the content of the music made with the instrument.\ + \ In this paper we present a motivic analysis of music made with new instruments.\ + \ In the context of music, a motive is a small, analysable musical fragment or\ + \ phrase that is important in or characteristic of a composition. We outline our\ + \ method for identifying and analysing motives in music made with new instruments,\ + \ and display its use in a case study in which 10 musicians created performances\ + \ with a new large-scale digital musical instrument that we designed. This research\ + \ illustrates the value of a musicological approach to NIME research, suggesting\ + \ the need for a broader conversation about a musicology of NIME performances,\ + \ as distinct from its instruments.},\n address = {The University of Auckland,\ + \ New Zealand},\n articleno = {46},\n author = {Mice, Lia and McPherson, Andrew},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.21428/92fbeb44.8c1c9817},\n issn = {2220-4806},\n\ + \ month = {jun},\n pdf = {65.pdf},\n presentation-video = {https://youtu.be/nXrRJGt11J4},\n\ + \ title = {The M in {NIME}: Motivic analysis and the case for a musicology of\ + \ {NIME} performances},\n url = {https://doi.org/10.21428%2F92fbeb44.8c1c9817},\n\ + \ year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176846 + doi: 10.21428/92fbeb44.8c1c9817 issn: 2220-4806 - keywords: 'Singing synthesis, real-time singing synthesis control. ' - pages: 236--237 - title: Real-Time Performance Controllers for Synthesized Singing - url: http://www.nime.org/proceedings/2005/nime2005_236.pdf - year: 2005 + month: jun + pdf: 65.pdf + presentation-video: https://youtu.be/nXrRJGt11J4 + title: 'The M in NIME: Motivic analysis and the case for a musicology of NIME performances' + url: https://doi.org/10.21428%2F92fbeb44.8c1c9817 + year: 2022 - ENTRYTYPE: inproceedings - ID: KimBoyle2005 - abstract: 'The author describes a recent composition for piano and computer in which - the score performed by the pianist, read from a computer monitor, is generated - in real-time from a vocabulary of predetermined scanned score excerpts. The author - outlines the algorithm used to choose and display a particular excerpt and describes - some of the musical difficulties faced by the pianist in a performance of the - work.' - address: 'Vancouver, BC, Canada' - author: 'Kim-Boyle, David' - bibtex: "@inproceedings{KimBoyle2005,\n abstract = {The author describes a recent\ - \ composition for piano and computer in which the score performed by the pianist,\ - \ read from a computer monitor, is generated in real-time from a vocabulary of\ - \ predetermined scanned score excerpts. The author outlines the algorithm used\ - \ to choose and display a particular excerpt and describes some of the musical\ - \ difficulties faced by the pianist in a performance of the work.},\n address\ - \ = {Vancouver, BC, Canada},\n author = {Kim-Boyle, David},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176762},\n issn = {2220-4806},\n keywords = {Score generation,\ - \ Jitter. },\n pages = {238--239},\n title = {Musical Score Generation in Valses\ - \ and Etudes},\n url = {http://www.nime.org/proceedings/2005/nime2005_238.pdf},\n\ - \ year = {2005}\n}\n" + ID: NIME22_47 + abstract: 'While it is accepted that accessible digital musical instruments (ADMIs) + should be created with the involvement of targeted communities, participatory + design (PD) is an unsettled practice that gets defined variously, loosely or not + at all. In this paper, we explore the concept of dialogic design and provide a + case study of how it can be used in the design of an ADMI. While a future publication + will give detail of the design of this instrument and provide an analysis of the + data from this study, in this paper we set out how the conversations between researcher + and participant have prepared us to build an instrument that responds to the lived + experience of the participant.' + address: 'The University of Auckland, New Zealand' + articleno: 47 + author: 'Zayas-Garin, Eevee and McPherson, Andrew' + bibtex: "@inproceedings{NIME22_47,\n abstract = {While it is accepted that accessible\ + \ digital musical instruments (ADMIs) should be created with the involvement of\ + \ targeted communities, participatory design (PD) is an unsettled practice that\ + \ gets defined variously, loosely or not at all. In this paper, we explore the\ + \ concept of dialogic design and provide a case study of how it can be used in\ + \ the design of an ADMI. While a future publication will give detail of the design\ + \ of this instrument and provide an analysis of the data from this study, in this\ + \ paper we set out how the conversations between researcher and participant have\ + \ prepared us to build an instrument that responds to the lived experience of\ + \ the participant.},\n address = {The University of Auckland, New Zealand},\n\ + \ articleno = {47},\n author = {Zayas-Garin, Eevee and McPherson, Andrew},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.21428/92fbeb44.2b8ce9a4},\n issn = {2220-4806},\n month\ + \ = {jun},\n pdf = {66.pdf},\n presentation-video = {https://www.youtube.com/watch?v=8l1N3G0BdKw},\n\ + \ title = {Dialogic Design of Accessible Digital Musical Instruments: Investigating\ + \ Performer Experience},\n url = {https://doi.org/10.21428%2F92fbeb44.2b8ce9a4},\n\ + \ year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176762 + doi: 10.21428/92fbeb44.2b8ce9a4 issn: 2220-4806 - keywords: 'Score generation, Jitter. ' - pages: 238--239 - title: Musical Score Generation in Valses and Etudes - url: http://www.nime.org/proceedings/2005/nime2005_238.pdf - year: 2005 + month: jun + pdf: 66.pdf + presentation-video: https://www.youtube.com/watch?v=8l1N3G0BdKw + title: 'Dialogic Design of Accessible Digital Musical Instruments: Investigating + Performer Experience' + url: https://doi.org/10.21428%2F92fbeb44.2b8ce9a4 + year: 2022 - ENTRYTYPE: inproceedings - ID: Baird2005 - abstract: 'No Clergy is an interactive music performance/installation inwhich the - audience is able to shape the ongoing music. In it,members of a small acoustic - ensemble read music notation fromcomputer screens. As each page refreshes, the - notation is alteredand shaped by both stochastic transformations of earlier musicwith - the same performance and audience feedback, collected viastandard CGI forms. ' - address: 'Vancouver, BC, Canada' - author: 'Baird, Kevin C.' - bibtex: "@inproceedings{Baird2005,\n abstract = {No Clergy is an interactive music\ - \ performance/installation inwhich the audience is able to shape the ongoing music.\ - \ In it,members of a small acoustic ensemble read music notation fromcomputer\ - \ screens. As each page refreshes, the notation is alteredand shaped by both stochastic\ - \ transformations of earlier musicwith the same performance and audience feedback,\ - \ collected viastandard CGI forms. },\n address = {Vancouver, BC, Canada},\n author\ - \ = {Baird, Kevin C.},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176695},\n\ - \ issn = {2220-4806},\n keywords = {notation, stochastic, interactive, audience,\ - \ Python, Lilypond },\n pages = {240--241},\n title = {Real-Time Generation of\ - \ Music Notation via Audience Interaction Using Python and {GNU} Lilypond},\n\ - \ url = {http://www.nime.org/proceedings/2005/nime2005_240.pdf},\n year = {2005}\n\ - }\n" + ID: NIME22_48 + abstract: 'To the naked ear, the installation Being With The Waves appears silent, + but a hidden composition of voices, instrumental tones, and maritime sounds is + revealed through wearing modified headphones. The installation consists of an + array of tweeters emitting a multi-channel ultrasonic composition that sounds + physically in the space. Ultrasonic phenomena present at the listener’s ears are + captured by microphones embedded on the outside of headphone earcups, shifted + into audibility, and output to the headphones. The amplitude demodulation of ultrasonic + material results in exaggerated Doppler effects and listeners hear the music bend + and shift precisely with their movement. There are no movement sensors, mappings, + or feedback loops, yet the installation is perceived as interactive due to the + close entanglement of the listener with sound phenomena. The dynamic quality of + interaction emerges solely through the listening faculties of the visitor, as + an embodied sensory experience determined by their orientation to sounds, physical + movement, and perceptual behaviour. This paper describes key influences on the + installation, its ultrasonic technology, the design of modified headphones, and + the compositional approach.' + address: 'The University of Auckland, New Zealand' + articleno: 48 + author: 'Robson, Nicole and McPherson, Andrew and Bryan-Kinns, Nick' + bibtex: "@inproceedings{NIME22_48,\n abstract = {To the naked ear, the installation\ + \ Being With The Waves appears silent, but a hidden composition of voices, instrumental\ + \ tones, and maritime sounds is revealed through wearing modified headphones.\ + \ The installation consists of an array of tweeters emitting a multi-channel ultrasonic\ + \ composition that sounds physically in the space. Ultrasonic phenomena present\ + \ at the listener’s ears are captured by microphones embedded on the outside of\ + \ headphone earcups, shifted into audibility, and output to the headphones. The\ + \ amplitude demodulation of ultrasonic material results in exaggerated Doppler\ + \ effects and listeners hear the music bend and shift precisely with their movement.\ + \ There are no movement sensors, mappings, or feedback loops, yet the installation\ + \ is perceived as interactive due to the close entanglement of the listener with\ + \ sound phenomena. The dynamic quality of interaction emerges solely through the\ + \ listening faculties of the visitor, as an embodied sensory experience determined\ + \ by their orientation to sounds, physical movement, and perceptual behaviour.\ + \ This paper describes key influences on the installation, its ultrasonic technology,\ + \ the design of modified headphones, and the compositional approach.},\n address\ + \ = {The University of Auckland, New Zealand},\n articleno = {48},\n author =\ + \ {Robson, Nicole and McPherson, Andrew and Bryan-Kinns, Nick},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.21428/92fbeb44.376bc758},\n issn = {2220-4806},\n month = {jun},\n\ + \ pdf = {68.pdf},\n presentation-video = {https://www.youtube.com/watch?v=3D5S5moUvUA},\n\ + \ title = {Being With The Waves: An Ultrasonic Art Installation Enabling Rich\ + \ Interaction Without Sensors},\n url = {https://doi.org/10.21428%2F92fbeb44.376bc758},\n\ + \ year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176695 + doi: 10.21428/92fbeb44.376bc758 issn: 2220-4806 - keywords: 'notation, stochastic, interactive, audience, Python, Lilypond ' - pages: 240--241 - title: Real-Time Generation of Music Notation via Audience Interaction Using Python - and GNU Lilypond - url: http://www.nime.org/proceedings/2005/nime2005_240.pdf - year: 2005 + month: jun + pdf: 68.pdf + presentation-video: https://www.youtube.com/watch?v=3D5S5moUvUA + title: 'Being With The Waves: An Ultrasonic Art Installation Enabling Rich Interaction + Without Sensors' + url: https://doi.org/10.21428%2F92fbeb44.376bc758 + year: 2022 - ENTRYTYPE: inproceedings - ID: Fox2005 - abstract: 'This paper describes the design of SoniMime, a system forthe sonification - of hand movement for real-time timbre shaping. We explore the application of the - tristimulus timbremodel for the sonification of gestural data, working towardthe - goals of musical expressivity and physical responsiveness. SoniMime uses two 3-D - accelerometers connected toan Atmel microprocessor which outputs OSC control messages. - Data filtering, parameter mapping, and sound synthesis take place in Pd running - on a Linux computer.' - address: 'Vancouver, BC, Canada' - author: 'Fox, Jesse and Carlile, Jennifer' - bibtex: "@inproceedings{Fox2005,\n abstract = {This paper describes the design of\ - \ SoniMime, a system forthe sonification of hand movement for real-time timbre\ - \ shaping. We explore the application of the tristimulus timbremodel for the sonification\ - \ of gestural data, working towardthe goals of musical expressivity and physical\ - \ responsiveness. SoniMime uses two 3-D accelerometers connected toan Atmel microprocessor\ - \ which outputs OSC control messages. Data filtering, parameter mapping, and sound\ - \ synthesis take place in Pd running on a Linux computer.},\n address = {Vancouver,\ - \ BC, Canada},\n author = {Fox, Jesse and Carlile, Jennifer},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176741},\n issn = {2220-4806},\n keywords = {Sonification,\ - \ Musical Controller, Human Computer Interaction },\n pages = {242--243},\n title\ - \ = {SoniMime: Movement Sonification for Real-Time Timbre Shaping},\n url = {http://www.nime.org/proceedings/2005/nime2005_242.pdf},\n\ - \ year = {2005}\n}\n" + ID: NIME22_49 + abstract: 'This paper introduces micro-phenomenology, a research discipline for + exploring and uncovering the structures of lived experience, as a beneficial methodology + for studying and evaluating interactions with digital musical instruments. Compared + to other subjective methods, micro-phenomenology evokes and returns one to the + moment of experience, allowing access to dimensions and observations which may + not be recalled in reflection alone. We present a case study of five microphenomenological + interviews conducted with musicians about their experiences with existing digital + musical instruments. The interviews reveal deep, clear descriptions of different + modalities of synchronic moments in interaction, especially in tactile connections + and bodily sensations. We highlight the elements of interaction captured in these + interviews which would not have been revealed otherwise and the importance of + these elements in researching perception, understanding, interaction, and performance + with digital musical instruments.' + address: 'The University of Auckland, New Zealand' + articleno: 49 + author: 'Reed, Courtney N. and Nordmoen, Charlotte and Martelloni, Andrea and Lepri, + Giacomo and Robson, Nicole and Zayas-Garin, Eevee and Cotton, Kelsey and Mice, + Lia and McPherson, Andrew' + bibtex: "@inproceedings{NIME22_49,\n abstract = {This paper introduces micro-phenomenology,\ + \ a research discipline for exploring and uncovering the structures of lived experience,\ + \ as a beneficial methodology for studying and evaluating interactions with digital\ + \ musical instruments. Compared to other subjective methods, micro-phenomenology\ + \ evokes and returns one to the moment of experience, allowing access to dimensions\ + \ and observations which may not be recalled in reflection alone. We present a\ + \ case study of five microphenomenological interviews conducted with musicians\ + \ about their experiences with existing digital musical instruments. The interviews\ + \ reveal deep, clear descriptions of different modalities of synchronic moments\ + \ in interaction, especially in tactile connections and bodily sensations. We\ + \ highlight the elements of interaction captured in these interviews which would\ + \ not have been revealed otherwise and the importance of these elements in researching\ + \ perception, understanding, interaction, and performance with digital musical\ + \ instruments.},\n address = {The University of Auckland, New Zealand},\n articleno\ + \ = {49},\n author = {Reed, Courtney N. and Nordmoen, Charlotte and Martelloni,\ + \ Andrea and Lepri, Giacomo and Robson, Nicole and Zayas-Garin, Eevee and Cotton,\ + \ Kelsey and Mice, Lia and McPherson, Andrew},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.21428/92fbeb44.b304e4b1},\n issn = {2220-4806},\n month = {jun},\n pdf =\ + \ {69.pdf},\n presentation-video = {https://youtu.be/-Ket6l90S8I},\n title = {Exploring\ + \ Experiences with New Musical Instruments through Micro-phenomenology},\n url\ + \ = {https://doi.org/10.21428%2F92fbeb44.b304e4b1},\n year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176741 + doi: 10.21428/92fbeb44.b304e4b1 issn: 2220-4806 - keywords: 'Sonification, Musical Controller, Human Computer Interaction ' - pages: 242--243 - title: 'SoniMime: Movement Sonification for Real-Time Timbre Shaping' - url: http://www.nime.org/proceedings/2005/nime2005_242.pdf - year: 2005 + month: jun + pdf: 69.pdf + presentation-video: https://youtu.be/-Ket6l90S8I + title: Exploring Experiences with New Musical Instruments through Micro-phenomenology + url: https://doi.org/10.21428%2F92fbeb44.b304e4b1 + year: 2022 - ENTRYTYPE: inproceedings - ID: Huott2005 - abstract: 'This paper presents the ‘Bean’, a novel controller employing a multi-touch - sensate surface in a compound curve shape. The design goals, construction, and - mapping system are discussed, along with a retrospective from a previous, similar - design.' - address: 'Vancouver, BC, Canada' - author: 'Huott, Robert' - bibtex: "@inproceedings{Huott2005,\n abstract = {This paper presents the ‘Bean’,\ - \ a novel controller employing a multi-touch sensate surface in a compound curve\ - \ shape. The design goals, construction, and mapping system are discussed, along\ - \ with a retrospective from a previous, similar design.},\n address = {Vancouver,\ - \ BC, Canada},\n author = {Huott, Robert},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176848},\n\ - \ issn = {2220-4806},\n keywords = {Musical controller, sensate surface, mapping\ - \ system },\n pages = {244--245},\n title = {Precise Control on Compound Curves},\n\ - \ url = {http://www.nime.org/proceedings/2005/nime2005_244.pdf},\n year = {2005}\n\ - }\n" + ID: NIME22_50 + abstract: 'This paper describes the 10,000 Instruments workshop, a collaborative + online event conceived to generate interface ideas and speculate on music technology + through open-ended artefacts and playful design explorations. We first present + the activity, setting its research and artistic scope. We then report on a selection + of outcomes created by workshop attendees, and examine the critical design statements + they convey. The paper concludes with reflections on the make-believe, whimsical + and troublemaking approach to instrument design adopted in the workshop. In particular, + we consider the ways this activity can support individuals’ creativity, unlock + shared musical visions and reveal unconventional perspectives on music technology + development.' + address: 'The University of Auckland, New Zealand' + articleno: 50 + author: 'Lepri, Giacomo and Bowers, John and Topley, Samantha and Stapleton, Paul + and Bennett, Peter and Andersen, Kristina and McPherson, Andrew' + bibtex: "@inproceedings{NIME22_50,\n abstract = {This paper describes the 10,000\ + \ Instruments workshop, a collaborative online event conceived to generate interface\ + \ ideas and speculate on music technology through open-ended artefacts and playful\ + \ design explorations. We first present the activity, setting its research and\ + \ artistic scope. We then report on a selection of outcomes created by workshop\ + \ attendees, and examine the critical design statements they convey. The paper\ + \ concludes with reflections on the make-believe, whimsical and troublemaking\ + \ approach to instrument design adopted in the workshop. In particular, we consider\ + \ the ways this activity can support individuals’ creativity, unlock shared musical\ + \ visions and reveal unconventional perspectives on music technology development.},\n\ + \ address = {The University of Auckland, New Zealand},\n articleno = {50},\n author\ + \ = {Lepri, Giacomo and Bowers, John and Topley, Samantha and Stapleton, Paul\ + \ and Bennett, Peter and Andersen, Kristina and McPherson, Andrew},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.21428/92fbeb44.9e7c9ba3},\n issn = {2220-4806},\n month\ + \ = {jun},\n pdf = {70.pdf},\n presentation-video = {https://youtu.be/dif8K23TR1Y},\n\ + \ title = {The 10,000 Instruments Workshop - (Im)practical Research for Critical\ + \ Speculation},\n url = {https://doi.org/10.21428%2F92fbeb44.9e7c9ba3},\n year\ + \ = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176848 + doi: 10.21428/92fbeb44.9e7c9ba3 issn: 2220-4806 - keywords: 'Musical controller, sensate surface, mapping system ' - pages: 244--245 - title: Precise Control on Compound Curves - url: http://www.nime.org/proceedings/2005/nime2005_244.pdf - year: 2005 + month: jun + pdf: 70.pdf + presentation-video: https://youtu.be/dif8K23TR1Y + title: 'The 10,000 Instruments Workshop - (Im)practical Research for Critical Speculation' + url: https://doi.org/10.21428%2F92fbeb44.9e7c9ba3 + year: 2022 - ENTRYTYPE: inproceedings - ID: Lugo2005 - abstract: 'This paper describes the design and implementation of BeatBoxing, a percussive - gestural interface for the liveperformance of electronic music and control of - computerbased games and musical activities.' - address: 'Vancouver, BC, Canada' - author: 'Lugo, Robert and Damondrick, Jack' - bibtex: "@inproceedings{Lugo2005,\n abstract = {This paper describes the design\ - \ and implementation of BeatBoxing, a percussive gestural interface for the liveperformance\ - \ of electronic music and control of computerbased games and musical activities.},\n\ - \ address = {Vancouver, BC, Canada},\n author = {Lugo, Robert and Damondrick,\ - \ Jack},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1176778},\n issn = {2220-4806},\n\ - \ keywords = {Performance, Gestural Mapping, Music Controller, Human-Computer\ - \ Interaction, PureData (Pd), OSC },\n pages = {246--247},\n title = {Beat Boxing\ - \ : Expressive Control for Electronic Music Performance and Musical Applications},\n\ - \ url = {http://www.nime.org/proceedings/2005/nime2005_246.pdf},\n year = {2005}\n\ - }\n" + ID: NIME22_51 + abstract: 'In this paper we present the development of a new gestural musical instrument, + the AirSticks 2.0. The AirSticks 2.0 combines the latest advances in sensor fusion + of Inertial Measurement Units (IMU) and low latency wireless data transmission + over Bluetooth Low Energy (BLE), to give an expressive wireless instrument capable + of triggering and manipulating discrete and continuous sound events in real-time. + We outline the design criteria for this new instrument that has evolved from previous + prototypes, give a technical overview of the custom hardware and software developed, + and present short videos of three distinct mappings that intuitively translate + movement into musical sounds.' + address: 'The University of Auckland, New Zealand' + articleno: 51 + author: 'Trolland, Sam and Ilsar, Alon and Frame, Ciaran and McCormack, Jon and + Wilson, Elliott' + bibtex: "@inproceedings{NIME22_51,\n abstract = {In this paper we present the development\ + \ of a new gestural musical instrument, the AirSticks 2.0. The AirSticks 2.0 combines\ + \ the latest advances in sensor fusion of Inertial Measurement Units (IMU) and\ + \ low latency wireless data transmission over Bluetooth Low Energy (BLE), to give\ + \ an expressive wireless instrument capable of triggering and manipulating discrete\ + \ and continuous sound events in real-time. We outline the design criteria for\ + \ this new instrument that has evolved from previous prototypes, give a technical\ + \ overview of the custom hardware and software developed, and present short videos\ + \ of three distinct mappings that intuitively translate movement into musical\ + \ sounds.},\n address = {The University of Auckland, New Zealand},\n articleno\ + \ = {51},\n author = {Trolland, Sam and Ilsar, Alon and Frame, Ciaran and McCormack,\ + \ Jon and Wilson, Elliott},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.c400bdc2},\n\ + \ issn = {2220-4806},\n month = {jun},\n pdf = {77.pdf},\n presentation-video\ + \ = {https://youtu.be/TnEzwGshr48},\n title = {{AirSticks} 2.0: Instrument Design\ + \ for Expressive Gestural Interaction},\n url = {https://doi.org/10.21428%2F92fbeb44.c400bdc2},\n\ + \ year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176778 + doi: 10.21428/92fbeb44.c400bdc2 issn: 2220-4806 - keywords: 'Performance, Gestural Mapping, Music Controller, Human-Computer Interaction, - PureData (Pd), OSC ' - pages: 246--247 - title: 'Beat Boxing : Expressive Control for Electronic Music Performance and Musical - Applications' - url: http://www.nime.org/proceedings/2005/nime2005_246.pdf - year: 2005 + month: jun + pdf: 77.pdf + presentation-video: https://youtu.be/TnEzwGshr48 + title: 'AirSticks 2.0: Instrument Design for Expressive Gestural Interaction' + url: https://doi.org/10.21428%2F92fbeb44.c400bdc2 + year: 2022 - ENTRYTYPE: inproceedings - ID: Franco2005 - abstract: 'This paper describes the development of AirStick, an interface for musical - expression. AirStick is played in the air, in a Theremin style. It is composed - of an array of infrared proximity sensors, which allow the mapping of the position - of any interfering obstacle inside a bi-dimensional zone. This controller sends - both x and y control data to various real-time synthesis algorithms. ' - address: 'Vancouver, BC, Canada' - author: 'Franco, Ivan' - bibtex: "@inproceedings{Franco2005,\n abstract = {This paper describes the development\ - \ of AirStick, an interface for musical expression. AirStick is played {in the\ - \ air}, in a Theremin style. It is composed of an array of infrared proximity\ - \ sensors, which allow the mapping of the position of any interfering obstacle\ - \ inside a bi-dimensional zone. This controller sends both x and y control data\ - \ to various real-time synthesis algorithms. },\n address = {Vancouver, BC, Canada},\n\ - \ author = {Franco, Ivan},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176747},\n\ - \ issn = {2220-4806},\n keywords = {Music Controller, Infrared Sensing, Computer\ - \ Music. },\n pages = {248--249},\n title = {The Airstick: A Free-Gesture Controller\ - \ Using Infrared Sensing},\n url = {http://www.nime.org/proceedings/2005/nime2005_248.pdf},\n\ - \ year = {2005}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1176747 - issn: 2220-4806 - keywords: 'Music Controller, Infrared Sensing, Computer Music. ' - pages: 248--249 - title: 'The Airstick: A Free-Gesture Controller Using Infrared Sensing' - url: http://www.nime.org/proceedings/2005/nime2005_248.pdf - year: 2005 - - -- ENTRYTYPE: inproceedings - ID: Carlile2005 - abstract: 'OROBORO is a novel collaborative controller which focuses on musical - performance as social experience by exploring synchronized actions of two musicians - operating a single instrument. Each performer uses two paddle mechanisms – one - for hand orientation sensing and one for servo-motor actuated feedback. We introduce - a haptic mirror in which the movement of one performer’s sensed hand is used to - induce movement of the partner’s actuated hand and vice versa. We describe theoretical - motivation, and hardware/software implementation.' - address: 'Vancouver, BC, Canada' - author: 'Carlile, Jennifer and Hartmann, Björn' - bibtex: "@inproceedings{Carlile2005,\n abstract = {OROBORO is a novel collaborative\ - \ controller which focuses on musical performance as social experience by exploring\ - \ synchronized actions of two musicians operating a single instrument. Each performer\ - \ uses two paddle mechanisms – one for hand orientation sensing and one for servo-motor\ - \ actuated feedback. We introduce a haptic mirror in which the movement of one\ - \ performer’s sensed hand is used to induce movement of the partner’s actuated\ - \ hand and vice versa. We describe theoretical motivation, and hardware/software\ - \ implementation.},\n address = {Vancouver, BC, Canada},\n author = {Carlile,\ - \ Jennifer and Hartmann, Bj{\\''{o}}rn},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176721},\n\ - \ issn = {2220-4806},\n keywords = {Musical Controller, Collaborative Control,\ - \ Haptic Interfaces },\n pages = {250--251},\n title = {{OR}OBORO: A Collaborative\ - \ Controller with Interpersonal Haptic Feedback},\n url = {http://www.nime.org/proceedings/2005/nime2005_250.pdf},\n\ - \ year = {2005}\n}\n" + ID: NIME22_52 + abstract: 'Musical grid interfaces are becoming an industry standard for interfaces + that allow interaction with music software, electronics, or instruments. However, + there are no clearly defined design standards or guidelines, resulting in grid + interfaces being a multitude of interfaces with competing design approaches, making + these already abstract UIs even more challenging. In this paper, we compare the + co-existing design approaches of UIs for grid interfaces used by commercial and + non-commercial developers and designers, and present the results of three experiments + that tested the benefits of co-existing design approaches to mitigate some of + the inherent design challenges.' + address: 'The University of Auckland, New Zealand' + articleno: 52 + author: 'Rossmy, Beat and Rauh, Maximilian and Wiethoff, Alexander' + bibtex: "@inproceedings{NIME22_52,\n abstract = {Musical grid interfaces are becoming\ + \ an industry standard for interfaces that allow interaction with music software,\ + \ electronics, or instruments. However, there are no clearly defined design standards\ + \ or guidelines, resulting in grid interfaces being a multitude of interfaces\ + \ with competing design approaches, making these already abstract UIs even more\ + \ challenging. In this paper, we compare the co-existing design approaches of\ + \ UIs for grid interfaces used by commercial and non-commercial developers and\ + \ designers, and present the results of three experiments that tested the benefits\ + \ of co-existing design approaches to mitigate some of the inherent design challenges.},\n\ + \ address = {The University of Auckland, New Zealand},\n articleno = {52},\n author\ + \ = {Rossmy, Beat and Rauh, Maximilian and Wiethoff, Alexander},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.21428/92fbeb44.db84ecd0},\n issn = {2220-4806},\n month = {jun},\n\ + \ pdf = {86.pdf},\n presentation-video = {https://www.youtube.com/watch?v=JF514EWYiQ8},\n\ + \ title = {Towards User Interface Guidelines for Musical Grid Interfaces},\n url\ + \ = {https://doi.org/10.21428%2F92fbeb44.db84ecd0},\n year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176721 + doi: 10.21428/92fbeb44.db84ecd0 issn: 2220-4806 - keywords: 'Musical Controller, Collaborative Control, Haptic Interfaces ' - pages: 250--251 - title: 'OROBORO: A Collaborative Controller with Interpersonal Haptic Feedback' - url: http://www.nime.org/proceedings/2005/nime2005_250.pdf - year: 2005 + month: jun + pdf: 86.pdf + presentation-video: https://www.youtube.com/watch?v=JF514EWYiQ8 + title: Towards User Interface Guidelines for Musical Grid Interfaces + url: https://doi.org/10.21428%2F92fbeb44.db84ecd0 + year: 2022 - ENTRYTYPE: inproceedings - ID: Rodriguez2005 - abstract: 'We present a Virtual Interface to Feel Emotions called VIFE _alpha v.01 - (Virtual Interface to Feel Emotions). The work investigates the idea of Synaesthesia - and her enormous possibilities creating new realities, sensations and zones where - the user can find new points of interaction. This interface allows the user to - create sonorous and visual compositions in real time. 6 three-dimensional sonorous - forms are modified according to the movements of the user. These forms represent - sonorous objects that respond to this by means of sensorial stimuli. Multiple - combinations of colors and sound effects superpose to an a the others to give - rise to a unique experience.' - address: 'Vancouver, BC, Canada' - author: 'Rodríguez, David and Rodríguez, Iván' - bibtex: "@inproceedings{Rodriguez2005,\n abstract = {We present a Virtual Interface\ - \ to Feel Emotions called VIFE {\\_}alpha v.01 (Virtual Interface to Feel Emotions).\ - \ The work investigates the idea of Synaesthesia and her enormous possibilities\ - \ creating new realities, sensations and zones where the user can find new points\ - \ of interaction. This interface allows the user to create sonorous and visual\ - \ compositions in real time. 6 three-dimensional sonorous forms are modified according\ - \ to the movements of the user. These forms represent sonorous objects that respond\ - \ to this by means of sensorial stimuli. Multiple combinations of colors and sound\ - \ effects superpose to an a the others to give rise to a unique experience.},\n\ - \ address = {Vancouver, BC, Canada},\n author = {Rodr\\'{\\i}guez, David and Rodr\\\ - '{\\i}guez, Iv\\'{a}n},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176806},\n\ - \ issn = {2220-4806},\n keywords = {Synaesthesia, 3D render, new reality, virtual\ - \ interface, creative interaction, sensors. },\n pages = {252--253},\n title =\ - \ {VIFE \\_alpha v.01 Real-time Visual Sound Installation performed by Glove-Gesture},\n\ - \ url = {http://www.nime.org/proceedings/2005/nime2005_252.pdf},\n year = {2005}\n\ - }\n" + ID: NIME22_53 + abstract: 'Applications for musical grid interfaces are designed without any established + guidelines or defined design rules. However, within applications of different + manufacturers, musicians, and designers, common patterns and conventions can be + observed which might be developing towards unofficial standards. In this survey + we analyzed 40 applications, instruments, or controllers and collected 18 types + of recurring UI elements, which are clustered, described, and interactively presented + in this survey. We further postulate 3 theses which standard UI elements should + meet and propose novel UI elements deduced from WIMP standards.' + address: 'The University of Auckland, New Zealand' + articleno: 53 + author: 'Rossmy, Beat' + bibtex: "@inproceedings{NIME22_53,\n abstract = {Applications for musical grid interfaces\ + \ are designed without any established guidelines or defined design rules. However,\ + \ within applications of different manufacturers, musicians, and designers, common\ + \ patterns and conventions can be observed which might be developing towards unofficial\ + \ standards. In this survey we analyzed 40 applications, instruments, or controllers\ + \ and collected 18 types of recurring UI elements, which are clustered, described,\ + \ and interactively presented in this survey. We further postulate 3 theses which\ + \ standard UI elements should meet and propose novel UI elements deduced from\ + \ WIMP standards.},\n address = {The University of Auckland, New Zealand},\n articleno\ + \ = {53},\n author = {Rossmy, Beat},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.563bfea9},\n\ + \ issn = {2220-4806},\n month = {jun},\n pdf = {87.pdf},\n presentation-video\ + \ = {https://www.youtube.com/watch?v=CPHY4_G_LR0},\n title = {Buttons, Sliders,\ + \ and Keys {\\textendash} A Survey on Musical Grid Interface Standards},\n url\ + \ = {https://doi.org/10.21428%2F92fbeb44.563bfea9},\n year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176806 + doi: 10.21428/92fbeb44.563bfea9 issn: 2220-4806 - keywords: 'Synaesthesia, 3D render, new reality, virtual interface, creative interaction, - sensors. ' - pages: 252--253 - title: VIFE _alpha v.01 Real-time Visual Sound Installation performed by Glove-Gesture - url: http://www.nime.org/proceedings/2005/nime2005_252.pdf - year: 2005 + month: jun + pdf: 87.pdf + presentation-video: https://www.youtube.com/watch?v=CPHY4_G_LR0 + title: 'Buttons, Sliders, and Keys – A Survey on Musical Grid Interface Standards' + url: https://doi.org/10.21428%2F92fbeb44.563bfea9 + year: 2022 - ENTRYTYPE: inproceedings - ID: Hindman2005 - abstract: 'The Sonictroller was originally conceived as a means ofintroducing competition - into an improvisatory musicalperformance. By reverse-engineering a popular video - gameconsole, we were able to map sound information (volume,pitch, and pitch sequences) - to any continuous or momentaryaction of a video game sprite.' - address: 'Vancouver, BC, Canada' - author: 'Hindman, David and Kiser, Spencer' - bibtex: "@inproceedings{Hindman2005,\n abstract = {The Sonictroller was originally\ - \ conceived as a means ofintroducing competition into an improvisatory musicalperformance.\ - \ By reverse-engineering a popular video gameconsole, we were able to map sound\ - \ information (volume,pitch, and pitch sequences) to any continuous or momentaryaction\ - \ of a video game sprite.},\n address = {Vancouver, BC, Canada},\n author = {Hindman,\ - \ David and Kiser, Spencer},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176756},\n\ - \ issn = {2220-4806},\n keywords = {video game, Nintendo, music, sound, controller,\ - \ Mortal Kombat, trumpet, guitar, voice },\n pages = {254--255},\n title = {Sonictroller},\n\ - \ url = {http://www.nime.org/proceedings/2005/nime2005_254.pdf},\n year = {2005}\n\ - }\n" + ID: NIME22_54 + abstract: 'Historically marginalised instruments witness and bear vital stories + that can deeply affect identity and galvanise communities when revitalised. We + present the protolangspil as a contemporary interpretation of the langspil, an + Icelandic monochord-like folk instrument, and describe its agential and performative + contributions to the first Icelandic NIME research lab. This paper describes how + the proto-langspil has served as an instrument in establishing the research methodology + of our new lab and concretised the research agenda via a series of encounters + with music performers and composers, luthiers, anthropologists, musicologists, + designers and philosophers. These encounters have informed and challenged our + research practices, mapped our surroundings, and embedded us in the local social + fabric. We share our proto-langspil for replication, and reflect on encounters + as a methodology framing mechanism that eschews the more traditional empirical + approaches in HCI. We conclude with a final provocation for NIME researchers to + embrace AI research with an open mind.' + address: 'The University of Auckland, New Zealand' + articleno: 54 + author: 'Armitage, Jack and Magnusson, Thor and Shepardson, Victor and Ulfarsson, + Halldor' + bibtex: "@inproceedings{NIME22_54,\n abstract = {Historically marginalised instruments\ + \ witness and bear vital stories that can deeply affect identity and galvanise\ + \ communities when revitalised. We present the protolangspil as a contemporary\ + \ interpretation of the langspil, an Icelandic monochord-like folk instrument,\ + \ and describe its agential and performative contributions to the first Icelandic\ + \ NIME research lab. This paper describes how the proto-langspil has served as\ + \ an instrument in establishing the research methodology of our new lab and concretised\ + \ the research agenda via a series of encounters with music performers and composers,\ + \ luthiers, anthropologists, musicologists, designers and philosophers. These\ + \ encounters have informed and challenged our research practices, mapped our surroundings,\ + \ and embedded us in the local social fabric. We share our proto-langspil for\ + \ replication, and reflect on encounters as a methodology framing mechanism that\ + \ eschews the more traditional empirical approaches in HCI. We conclude with a\ + \ final provocation for NIME researchers to embrace AI research with an open mind.},\n\ + \ address = {The University of Auckland, New Zealand},\n articleno = {54},\n author\ + \ = {Armitage, Jack and Magnusson, Thor and Shepardson, Victor and Ulfarsson,\ + \ Halldor},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.21428/92fbeb44.6178f575},\n\ + \ issn = {2220-4806},\n month = {jun},\n pdf = {88.pdf},\n presentation-video\ + \ = {https://youtu.be/8tRTF1lB6Hg},\n title = {The Proto-Langspil: Launching an\ + \ Icelandic {NIME} Research Lab with the Help of a Marginalised Instrument},\n\ + \ url = {https://doi.org/10.21428%2F92fbeb44.6178f575},\n year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176756 + doi: 10.21428/92fbeb44.6178f575 issn: 2220-4806 - keywords: 'video game, Nintendo, music, sound, controller, Mortal Kombat, trumpet, - guitar, voice ' - pages: 254--255 - title: Sonictroller - url: http://www.nime.org/proceedings/2005/nime2005_254.pdf - year: 2005 + month: jun + pdf: 88.pdf + presentation-video: https://youtu.be/8tRTF1lB6Hg + title: 'The Proto-Langspil: Launching an Icelandic NIME Research Lab with the Help + of a Marginalised Instrument' + url: https://doi.org/10.21428%2F92fbeb44.6178f575 + year: 2022 - ENTRYTYPE: inproceedings - ID: Verplank2005 - abstract: 'Pluck, ring, rub, bang, strike, and squeeze are all simple gestures used - in controlling music. A single motor/encoder plus a force-sensor has proved to - be a useful platform for experimenting with haptic feedback in controlling computer - music. The surprise is that the “best” haptics (precise, stable) may not be the - most “musical”.' - address: 'Vancouver, BC, Canada' - author: 'Verplank, William' - bibtex: "@inproceedings{Verplank2005,\n abstract = {Pluck, ring, rub, bang, strike,\ - \ and squeeze are all simple gestures used in controlling music. A single motor/encoder\ - \ plus a force-sensor has proved to be a useful platform for experimenting with\ - \ haptic feedback in controlling computer music. The surprise is that the “best”\ - \ haptics (precise, stable) may not be the most “musical”.},\n address = {Vancouver,\ - \ BC, Canada},\n author = {Verplank, William},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1176832},\n issn = {2220-4806},\n keywords = {Music control,\ - \ haptic feedback, physical interaction design, Input/output devices, interactive\ - \ systems, haptic I/O},\n pages = {256--257},\n title = {Haptic Music Exercises},\n\ - \ url = {http://www.nime.org/proceedings/2005/nime2005_256.pdf},\n year = {2005}\n\ - }\n" + ID: NIME22_55 + abstract: 'The lived body, or soma, is the designation for the phenomenological + experience of being a body, rather than simply a corporeal entity. Bodily knowledge, + which evolves through bodily awareness, carries the lived body’s reflectivity. + In this paper, such considerations are put in the context of previous work at + NIME, specifically that revolving around with the vocal tract or the voice, due + to its singular relation with embodiment. We understand that focusing on somaesthetics + allows for novel ways of engaging with technology as well as highlighting biases + that might go unnoticed otherwise. We present an inexpensive application of a + respiration sensor that emerges from the aforementioned conceptualisations. Lastly, + we reflect on how to better frame the role of bodily awareness in NIME.' + address: 'The University of Auckland, New Zealand' + articleno: 55 + author: 'Tapparo, Carla Sophie and Zappi, Victor' + bibtex: "@inproceedings{NIME22_55,\n abstract = {The lived body, or soma, is the\ + \ designation for the phenomenological experience of being a body, rather than\ + \ simply a corporeal entity. Bodily knowledge, which evolves through bodily awareness,\ + \ carries the lived body’s reflectivity. In this paper, such considerations are\ + \ put in the context of previous work at NIME, specifically that revolving around\ + \ with the vocal tract or the voice, due to its singular relation with embodiment.\ + \ We understand that focusing on somaesthetics allows for novel ways of engaging\ + \ with technology as well as highlighting biases that might go unnoticed otherwise.\ + \ We present an inexpensive application of a respiration sensor that emerges from\ + \ the aforementioned conceptualisations. Lastly, we reflect on how to better frame\ + \ the role of bodily awareness in NIME.},\n address = {The University of Auckland,\ + \ New Zealand},\n articleno = {55},\n author = {Tapparo, Carla Sophie and Zappi,\ + \ Victor},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.21428/92fbeb44.7e04cfc8},\n issn = {2220-4806},\n\ + \ month = {jun},\n pdf = {99.pdf},\n presentation-video = {https://youtu.be/GEndgifZmkI},\n\ + \ title = {Bodily Awareness Through {NIMEs}: Deautomatising Music Making Processes},\n\ + \ url = {https://doi.org/10.21428%2F92fbeb44.7e04cfc8},\n year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176832 + doi: 10.21428/92fbeb44.7e04cfc8 issn: 2220-4806 - keywords: 'Music control, haptic feedback, physical interaction design, Input/output - devices, interactive systems, haptic I/O' - pages: 256--257 - title: Haptic Music Exercises - url: http://www.nime.org/proceedings/2005/nime2005_256.pdf - year: 2005 + month: jun + pdf: 99.pdf + presentation-video: https://youtu.be/GEndgifZmkI + title: 'Bodily Awareness Through NIMEs: Deautomatising Music Making Processes' + url: https://doi.org/10.21428%2F92fbeb44.7e04cfc8 + year: 2022 - ENTRYTYPE: inproceedings - ID: Eaton2005 - abstract: 'In this presentation, we discuss and demonstrate a multiple touch sensitive - (MTS) keyboard developed by Robert Moog for John Eaton. Each key of the keyboard - is equipped with sensors that detect the three-dimensional position of the performer''s - finger. The presentation includes some of Eaton''s performances for certain earlier - prototypes as well as this keyboard. ' - address: 'Vancouver, BC, Canada' - author: 'Eaton, John and Moog, Robert' - bibtex: "@inproceedings{Eaton2005,\n abstract = {In this presentation, we discuss\ - \ and demonstrate a multiple touch sensitive (MTS) keyboard developed by Robert\ - \ Moog for John Eaton. Each key of the keyboard is equipped with sensors that\ - \ detect the three-dimensional position of the performer's finger. The presentation\ - \ includes some of Eaton's performances for certain earlier prototypes as well\ - \ as this keyboard. },\n address = {Vancouver, BC, Canada},\n author = {Eaton,\ - \ John and Moog, Robert},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176735},\n\ - \ issn = {2220-4806},\n keywords = {Multiple touch sensitive, MTS, keyboard, key\ - \ sensor design, upgrading to present-day computers },\n pages = {258--259},\n\ - \ title = {Multiple-Touch-Sensitive Keyboard},\n url = {http://www.nime.org/proceedings/2005/nime2005_258.pdf},\n\ - \ year = {2005}\n}\n" + ID: NIME22_56 + abstract: 'The Kanchay_Yupana// is an open-source NIME for the generation of rhythms, + inspired by the Andean yupana: a tangible board similar to an abacus of different + sizes and materials with a system of carved geometric boxes into which seeds or + pebbles were disposed to perform arithmetic calculations, used since pre-colonial + times. As in the traditional artifact, the interaction of this new electronic + yupana is based on the arrangement of seeds on a specially designed board with + boxes, holes, and photoresistors. The shadow detected by the seeds’ positioning + sends real-time motion data in MIDI messages to Pure Data in a drum machine patch. + As a result, percussion samples of Andean instruments fill pulses in a four-quarter + beat, generating patterns that can be transformed live into different rhythms. + This interface complements the Electronic_Khipu_ (a previous NIME based on an + Andean khipu) by producing the rhythmic component. This experience unites ancestral + and contemporary technologies in experimental sound performance following the + theoretical-practical research on the vindication of the memory in ancestral Andean + technological interfaces made invisible by colonization, reusing them from a decolonial + perspective in NIMEs.' + address: 'The University of Auckland, New Zealand' + articleno: 56 + author: 'Cadavid Hinojosa, Laddy Patricia' + bibtex: "@inproceedings{NIME22_56,\n abstract = {The Kanchay_Yupana// is an open-source\ + \ NIME for the generation of rhythms, inspired by the Andean yupana: a tangible\ + \ board similar to an abacus of different sizes and materials with a system of\ + \ carved geometric boxes into which seeds or pebbles were disposed to perform\ + \ arithmetic calculations, used since pre-colonial times. As in the traditional\ + \ artifact, the interaction of this new electronic yupana is based on the arrangement\ + \ of seeds on a specially designed board with boxes, holes, and photoresistors.\ + \ The shadow detected by the seeds’ positioning sends real-time motion data in\ + \ MIDI messages to Pure Data in a drum machine patch. As a result, percussion\ + \ samples of Andean instruments fill pulses in a four-quarter beat, generating\ + \ patterns that can be transformed live into different rhythms. This interface\ + \ complements the Electronic_Khipu_ (a previous NIME based on an Andean khipu)\ + \ by producing the rhythmic component. This experience unites ancestral and contemporary\ + \ technologies in experimental sound performance following the theoretical-practical\ + \ research on the vindication of the memory in ancestral Andean technological\ + \ interfaces made invisible by colonization, reusing them from a decolonial perspective\ + \ in NIMEs.},\n address = {The University of Auckland, New Zealand},\n articleno\ + \ = {56},\n author = {Cadavid Hinojosa, Laddy Patricia},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ copyright = {Creative Commons Attribution 4.0 International},\n doi = {10.21428/92fbeb44.61d01269},\n\ + \ issn = {2220-4806},\n month = {jun},\n pdf = {49.pdf},\n presentation-video\ + \ = {https://youtu.be/MpMFL6R14kQ},\n title = {Kanchay_Yupana{\\slash \\slash}:\ + \ Tangible rhythm sequencer inspired by ancestral Andean technologies},\n url\ + \ = {https://doi.org/10.21428/92fbeb44.61d01269},\n year = {2022}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176735 + copyright: Creative Commons Attribution 4.0 International + doi: 10.21428/92fbeb44.61d01269 issn: 2220-4806 - keywords: 'Multiple touch sensitive, MTS, keyboard, key sensor design, upgrading - to present-day computers ' - pages: 258--259 - title: Multiple-Touch-Sensitive Keyboard - url: http://www.nime.org/proceedings/2005/nime2005_258.pdf - year: 2005 + month: jun + pdf: 49.pdf + presentation-video: https://youtu.be/MpMFL6R14kQ + title: 'Kanchay_Yupana/ /: Tangible rhythm sequencer inspired by ancestral Andean + technologies' + url: https://doi.org/10.21428/92fbeb44.61d01269 + year: 2022 - ENTRYTYPE: inproceedings - ID: Fraietta2005 - abstract: 'This paper will demonstrate the use of the Smart Controller workbench - in the Interactive Bell Garden. ' - address: 'Vancouver, BC, Canada' - author: 'Fraietta, Angelo' - bibtex: "@inproceedings{Fraietta2005,\n abstract = {This paper will demonstrate\ - \ the use of the Smart Controller workbench in the Interactive Bell Garden. },\n\ - \ address = {Vancouver, BC, Canada},\n author = {Fraietta, Angelo},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176743},\n issn = {2220-4806},\n keywords\ - \ = {Control Voltage, Open Sound Control, Algorithmic Composition, MIDI, Sound\ - \ Installations, Programmable Logic Control, Synthesizers. },\n pages = {260--261},\n\ - \ title = {Smart Controller / Bell Garden Demo},\n url = {http://www.nime.org/proceedings/2005/nime2005_260.pdf},\n\ - \ year = {2005}\n}\n" + ID: lpereira2014 + abstract: 'The Well--Sequenced Synthesizer is a series of sequencers that create + music in dialog with the user. Through the sequencers'' physical interfaces, users + can control music theory-based generative algorithms. This series --a work-in-progress-is + composed by three sequencers at this time. The first one, called The Counterpointer, + takes a melody input from the user and responds by generating voices based on + the rules of eighteenth--century counterpoint. The second one is based on a recent + treatise on harmony and counterpoint by music theorist Dmitri Tymoczco: El Ordenador + lets users explore a set of features of tonality by constraining randomly generated + music according to one or more of them. El Ordenador gives the user less control + than The Counterpointer, but more than La Mecánica, the third sequencer in the + series. La Mecánica plays back the sequences generated by El Ordenador using a + punch-card reading music box mechanism. It makes the digital patterns visible + and tactile, and links them back to the physical world.' + address: 'London, United Kingdom' + author: Luisa Pereira Hors + bibtex: "@inproceedings{lpereira2014,\n abstract = {The Well--Sequenced Synthesizer\ + \ is a series of sequencers that create music in dialog with the user. Through\ + \ the sequencers' physical interfaces, users can control music theory-based generative\ + \ algorithms. This series --a work-in-progress-is composed by three sequencers\ + \ at this time. The first one, called The Counterpointer, takes a melody input\ + \ from the user and responds by generating voices based on the rules of eighteenth--century\ + \ counterpoint. The second one is based on a recent treatise on harmony and counterpoint\ + \ by music theorist Dmitri Tymoczco: El Ordenador lets users explore a set of\ + \ features of tonality by constraining randomly generated music according to one\ + \ or more of them. El Ordenador gives the user less control than The Counterpointer,\ + \ but more than La Mec{\\'a}nica, the third sequencer in the series. La Mec{\\\ + 'a}nica plays back the sequences generated by El Ordenador using a punch-card\ + \ reading music box mechanism. It makes the digital patterns visible and tactile,\ + \ and links them back to the physical world.},\n address = {London, United Kingdom},\n\ + \ author = {Luisa Pereira Hors},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178806},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {88--89},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {The Well-Sequenced Synthesizer},\n url = {http://www.nime.org/proceedings/2014/nime2014_2.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176743 + doi: 10.5281/zenodo.1178806 issn: 2220-4806 - keywords: 'Control Voltage, Open Sound Control, Algorithmic Composition, MIDI, Sound - Installations, Programmable Logic Control, Synthesizers. ' - pages: 260--261 - title: Smart Controller / Bell Garden Demo - url: http://www.nime.org/proceedings/2005/nime2005_260.pdf - year: 2005 + month: June + pages: 88--89 + publisher: 'Goldsmiths, University of London' + title: The Well-Sequenced Synthesizer + url: http://www.nime.org/proceedings/2014/nime2014_2.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Melo2005 - abstract: 'The Swayway is an audio/MIDI device inspired by the simpleconcept of - the wind chime.This interactive sculpture translates its swaying motion,triggered - by the user, into sound and light. Additionally, themotion of the reeds contributes - to the visual aspect of thepiece, converting the whole into a sensory and engagingexperience.' - address: 'Vancouver, BC, Canada' - author: 'Melo, Mauricio and Fan, Doria' - bibtex: "@inproceedings{Melo2005,\n abstract = {The Swayway is an audio/MIDI device\ - \ inspired by the simpleconcept of the wind chime.This interactive sculpture translates\ - \ its swaying motion,triggered by the user, into sound and light. Additionally,\ - \ themotion of the reeds contributes to the visual aspect of thepiece, converting\ - \ the whole into a sensory and engagingexperience.},\n address = {Vancouver, BC,\ - \ Canada},\n author = {Melo, Mauricio and Fan, Doria},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176790},\n issn = {2220-4806},\n keywords = {Interactive\ - \ sound sculpture, flex sensors, midi chimes, LEDs, sound installation. },\n pages\ - \ = {262--263},\n title = {Swayway --- Midi Chimes},\n url = {http://www.nime.org/proceedings/2005/nime2005_262.pdf},\n\ - \ year = {2005}\n}\n" + ID: ptimothy2014 + abstract: 'This paper describes the technologies, collaborative processes, and artistic + intents of the musical composition Engravings for Prepared Snare Drum, iPad, and + Computer, which was composed by Timothy Polashek for percussionist Brad Meyer + using a jointly created electroacoustic and interactive musical instrument. During + performance, the percussionist equally manipulates and expresses through two surfaces, + an iPad displaying an interactive touch screen and a snare drum augmented with + various foreign objects, including a contact microphone adhered to the drumhead''s + surface. A computer program created for this composition runs on a laptop computer + in front of the percussionist. The software captures sound from the contact microphone + and transforms this sound through audio signal processing controlled by the performer''s + gestures on the iPad. The computer screen displays an animated graphic score, + as well as the current states of iPad controls and audio signal processing, for + the performer. Many compositional and technological approaches used in this project + pay tribute to composer John Cage, since the premiere performance of Engravings + for Prepared Snare Drum, iPad, and Computer took place in 2012, the centennial + celebration of Cage''s birth year.' + address: 'London, United Kingdom' + author: Timothy Polashek and Brad Meyer + bibtex: "@inproceedings{ptimothy2014,\n abstract = {This paper describes the technologies,\ + \ collaborative processes, and artistic intents of the musical composition Engravings\ + \ for Prepared Snare Drum, iPad, and Computer, which was composed by Timothy Polashek\ + \ for percussionist Brad Meyer using a jointly created electroacoustic and interactive\ + \ musical instrument. During performance, the percussionist equally manipulates\ + \ and expresses through two surfaces, an iPad displaying an interactive touch\ + \ screen and a snare drum augmented with various foreign objects, including a\ + \ contact microphone adhered to the drumhead's surface. A computer program created\ + \ for this composition runs on a laptop computer in front of the percussionist.\ + \ The software captures sound from the contact microphone and transforms this\ + \ sound through audio signal processing controlled by the performer's gestures\ + \ on the iPad. The computer screen displays an animated graphic score, as well\ + \ as the current states of iPad controls and audio signal processing, for the\ + \ performer. Many compositional and technological approaches used in this project\ + \ pay tribute to composer John Cage, since the premiere performance of Engravings\ + \ for Prepared Snare Drum, iPad, and Computer took place in 2012, the centennial\ + \ celebration of Cage's birth year.},\n address = {London, United Kingdom},\n\ + \ author = {Timothy Polashek and Brad Meyer},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1178907},\n issn = {2220-4806},\n month = {June},\n pages =\ + \ {82--83},\n publisher = {Goldsmiths, University of London},\n title = {Engravings\ + \ for Prepared Snare Drum, iPad, and Computer},\n url = {http://www.nime.org/proceedings/2014/nime2014_254.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176790 + doi: 10.5281/zenodo.1178907 issn: 2220-4806 - keywords: 'Interactive sound sculpture, flex sensors, midi chimes, LEDs, sound installation. ' - pages: 262--263 - title: Swayway --- Midi Chimes - url: http://www.nime.org/proceedings/2005/nime2005_262.pdf - year: 2005 + month: June + pages: 82--83 + publisher: 'Goldsmiths, University of London' + title: 'Engravings for Prepared Snare Drum, iPad, and Computer' + url: http://www.nime.org/proceedings/2014/nime2014_254.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Wang2005 - abstract: 'This paper describes the transformation of an everyday object into a - digital musical instrument. By tracking hand movements and tilt on one of two - axes, the Bubbaboard, a transformed handheld washboard, allows a user to play - scales at different octaves while simultaneously offering the ability to use its - inherent acoustic percussive qualities. Processed sound is fed to the Mommaspeaker, - which creates physically generated vibrato at a speed determined by tilting the - Bubbaboard on its second axis. ' - address: 'Vancouver, BC, Canada' - author: 'Wang, Derek' - bibtex: "@inproceedings{Wang2005,\n abstract = {This paper describes the transformation\ - \ of an everyday object into a digital musical instrument. By tracking hand movements\ - \ and tilt on one of two axes, the Bubbaboard, a transformed handheld washboard,\ - \ allows a user to play scales at different octaves while simultaneously offering\ - \ the ability to use its inherent acoustic percussive qualities. Processed sound\ - \ is fed to the Mommaspeaker, which creates physically generated vibrato at a\ - \ speed determined by tilting the Bubbaboard on its second axis. },\n address\ - \ = {Vancouver, BC, Canada},\n author = {Wang, Derek},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176836},\n issn = {2220-4806},\n keywords = {Gesture\ - \ based controllers, Musical Performance, MIDI, Accelerometer, Microcontroller,\ - \ Contact Microphone },\n pages = {264--265},\n title = {Bubbaboard and Mommaspeaker:\ - \ Creating Digital Tonal Sounds from an Acoustic Percussive Instrument},\n url\ - \ = {http://www.nime.org/proceedings/2005/nime2005_264.pdf},\n year = {2005}\n\ + ID: mzareei2014 + abstract: 'Over the past few decades, there has been an increasing number of musical + instruments and works of sound art that incorporate robotics and mechatronics. + This paper proposes a new approach in classification of such works and focuses + on those whose ideological roots can be sought in Luigi Russolo''s noise-intoners + (intonarumori). It presents a discussion on works in which mechatronics is used + to investigate new and traditionally perceived as ``extra-musical'''' sonic territories, + and introduces Rasper: a new mechatronic noise-intoner that features an electromechanical + apparatus to create noise physically, while regulating it rhythmically and timbrally.' + address: 'London, United Kingdom' + author: Mo Zareei and Ajay Kapur and Dale A. Carnegie + bibtex: "@inproceedings{mzareei2014,\n abstract = {Over the past few decades, there\ + \ has been an increasing number of musical instruments and works of sound art\ + \ that incorporate robotics and mechatronics. This paper proposes a new approach\ + \ in classification of such works and focuses on those whose ideological roots\ + \ can be sought in Luigi Russolo's noise-intoners (intonarumori). It presents\ + \ a discussion on works in which mechatronics is used to investigate new and traditionally\ + \ perceived as ``extra-musical'' sonic territories, and introduces Rasper: a new\ + \ mechatronic noise-intoner that features an electromechanical apparatus to create\ + \ noise physically, while regulating it rhythmically and timbrally.},\n address\ + \ = {London, United Kingdom},\n author = {Mo Zareei and Ajay Kapur and Dale A.\ + \ Carnegie},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178995},\n issn\ + \ = {2220-4806},\n month = {June},\n pages = {473--478},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Rasper: a Mechatronic Noise-Intoner},\n url\ + \ = {http://www.nime.org/proceedings/2014/nime2014_268.pdf},\n year = {2014}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176836 - issn: 2220-4806 - keywords: 'Gesture based controllers, Musical Performance, MIDI, Accelerometer, - Microcontroller, Contact Microphone ' - pages: 264--265 - title: 'Bubbaboard and Mommaspeaker: Creating Digital Tonal Sounds from an Acoustic - Percussive Instrument' - url: http://www.nime.org/proceedings/2005/nime2005_264.pdf - year: 2005 - - -- ENTRYTYPE: inproceedings - ID: Flety2005 - abstract: 'The Wise Box is a new wireless digitizing interface for sensors and controllers. - An increasing demand for this kind of hardware, especially in the field of dance - and computer performance lead us to design a wireless digitizer that allows for - multiple users, with high bandwidth and accuracy. The interface design was initiated - in early 2004 and shortly described in reference [1]. Our recent effort was directed - to make this device available for the community on the form of a manufactured - product, similarly to our previous interfaces such as AtoMIC Pro, Eobody or Ethersense - [1][2][3]. We describe here the principles we used for the design of the device - as well as its technical specifications. The demo will show several devices running - at once and used in real-time with a various set of sensors. ' - address: 'Vancouver, BC, Canada' - author: 'Fléty, Emmanuel' - bibtex: "@inproceedings{Flety2005,\n abstract = {The Wise Box is a new wireless\ - \ digitizing interface for sensors and controllers. An increasing demand for this\ - \ kind of hardware, especially in the field of dance and computer performance\ - \ lead us to design a wireless digitizer that allows for multiple users, with\ - \ high bandwidth and accuracy. The interface design was initiated in early 2004\ - \ and shortly described in reference [1]. Our recent effort was directed to make\ - \ this device available for the community on the form of a manufactured product,\ - \ similarly to our previous interfaces such as AtoMIC Pro, Eobody or Ethersense\ - \ [1][2][3]. We describe here the principles we used for the design of the device\ - \ as well as its technical specifications. The demo will show several devices\ - \ running at once and used in real-time with a various set of sensors. },\n address\ - \ = {Vancouver, BC, Canada},\n author = {Fl\\'{e}ty, Emmanuel},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176739},\n issn = {2220-4806},\n keywords = {Gesture,\ - \ Sensors, WiFi, 802.11, OpenSoundControl. },\n pages = {266--267},\n title =\ - \ {The WiSe Box: a Multi-performer Wireless Sensor Interface using {WiFi} and\ - \ OSC},\n url = {http://www.nime.org/proceedings/2005/nime2005_266.pdf},\n year\ - \ = {2005}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1176739 + doi: 10.5281/zenodo.1178995 issn: 2220-4806 - keywords: 'Gesture, Sensors, WiFi, 802.11, OpenSoundControl. ' - pages: 266--267 - title: 'The WiSe Box: a Multi-performer Wireless Sensor Interface using WiFi and - OSC' - url: http://www.nime.org/proceedings/2005/nime2005_266.pdf - year: 2005 + month: June + pages: 473--478 + publisher: 'Goldsmiths, University of London' + title: 'Rasper: a Mechatronic Noise-Intoner' + url: http://www.nime.org/proceedings/2014/nime2014_268.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Bowen2005 - abstract: 'Soundstone is a small wireless music controller that tracks movement - and gestures, and maps these signals to characteristics of various synthesized - and sampled sounds. It is intended to become a general-purpose platform for exploring - the sonification of movement, with an emphasis on tactile (haptic) feedback. ' - address: 'Vancouver, BC, Canada' - author: 'Bowen, Adam' - bibtex: "@inproceedings{Bowen2005,\n abstract = {Soundstone is a small wireless\ - \ music controller that tracks movement and gestures, and maps these signals to\ - \ characteristics of various synthesized and sampled sounds. It is intended to\ - \ become a general-purpose platform for exploring the sonification of movement,\ - \ with an emphasis on tactile (haptic) feedback. },\n address = {Vancouver, BC,\ - \ Canada},\n author = {Bowen, Adam},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176711},\n\ - \ issn = {2220-4806},\n keywords = {Gesture recognition, haptics, human factors,\ - \ force, acceleration, tactile feedback, general purpose controller, wireless.\ - \ },\n pages = {268--269},\n title = {Soundstone: A {3-D} Wireless Music Controller},\n\ - \ url = {http://www.nime.org/proceedings/2005/nime2005_268.pdf},\n year = {2005}\n\ - }\n" + ID: cudell2014 + abstract: 'In our current era, where smartphones are commonplace and buzzwords like + ``the internet of things,'''' ``wearable tech,'''' and ``augmented reality'''' + are ubiquitous, translating performance gestures into data and intuitively mapping + it to control musical/visual parameters in the realm of computing should be trivial; + but it isn''t. Technical barriers still persist that limit this activity to exclusive + groups capable of learning skillsets far removed from one''s musical craft. These + skills include programming, soldering, microprocessors, wireless protocols, and + circuit design. Those of us whose creative activity is centered in NIME have to + become polyglots of many disciplines to achieve our work. In the NIME community, + it''s unclear that we should even draw distinctions between ''artist'' and ''technician'', + because these skillsets have become integral to our creative practice. However, + what about the vast communities of musicians, composers, and artists who want + to leverage sensing to take their craft into new territory with no background + in circuits, soldering, embedded programming, and sensor function? eMersion, a + plug-and-play, modular, wireless alternative solution for creating NIMEs will + be presented. It enables one to bypass the technical hurdles listed above in favor + of immediate experimentation with musical practice and wireless sensing. A unique + software architecture will also be unveiled that enables one to quickly and intuitively + process and map unpredictable numbers and types of wireless data streams, the + Digital Data Workstation.' + address: 'London, United Kingdom' + author: Chet Udell and James Paul Sain + bibtex: "@inproceedings{cudell2014,\n abstract = {In our current era, where smartphones\ + \ are commonplace and buzzwords like ``the internet of things,'' ``wearable tech,''\ + \ and ``augmented reality'' are ubiquitous, translating performance gestures into\ + \ data and intuitively mapping it to control musical/visual parameters in the\ + \ realm of computing should be trivial; but it isn't. Technical barriers still\ + \ persist that limit this activity to exclusive groups capable of learning skillsets\ + \ far removed from one's musical craft. These skills include programming, soldering,\ + \ microprocessors, wireless protocols, and circuit design. Those of us whose creative\ + \ activity is centered in NIME have to become polyglots of many disciplines to\ + \ achieve our work. In the NIME community, it's unclear that we should even draw\ + \ distinctions between 'artist' and 'technician', because these skillsets have\ + \ become integral to our creative practice. However, what about the vast communities\ + \ of musicians, composers, and artists who want to leverage sensing to take their\ + \ craft into new territory with no background in circuits, soldering, embedded\ + \ programming, and sensor function? eMersion, a plug-and-play, modular, wireless\ + \ alternative solution for creating NIMEs will be presented. It enables one to\ + \ bypass the technical hurdles listed above in favor of immediate experimentation\ + \ with musical practice and wireless sensing. A unique software architecture will\ + \ also be unveiled that enables one to quickly and intuitively process and map\ + \ unpredictable numbers and types of wireless data streams, the Digital Data Workstation.},\n\ + \ address = {London, United Kingdom},\n author = {Chet Udell and James Paul Sain},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178971},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {130--133},\n publisher = {Goldsmiths, University\ + \ of London},\n title = {eMersion | Sensor-controlled Electronic Music Modules\ + \ \\& Digital Data Workstation},\n url = {http://www.nime.org/proceedings/2014/nime2014_272.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176711 + doi: 10.5281/zenodo.1178971 issn: 2220-4806 - keywords: 'Gesture recognition, haptics, human factors, force, acceleration, tactile - feedback, general purpose controller, wireless. ' - pages: 268--269 - title: 'Soundstone: A 3-D Wireless Music Controller' - url: http://www.nime.org/proceedings/2005/nime2005_268.pdf - year: 2005 + month: June + pages: 130--133 + publisher: 'Goldsmiths, University of London' + title: eMersion | Sensor-controlled Electronic Music Modules & Digital Data Workstation + url: http://www.nime.org/proceedings/2014/nime2014_272.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Guisan2005 - abstract: 'INTRIUM is an interactive sound installation exploring the inside vibration - of the atrium. A certain number of architectural elements are fitted with acoustic - sensors in order to capture the vibration they produce when they are manipulated - or touched by hands. This raw sound is further processed in real-time, allowing - the participants to create a sonic landscape in the atrium, as the result of a - collaborative and collective work between them.' - address: 'Vancouver, BC, Canada' - author: 'Guisan, Alain C.' - bibtex: "@inproceedings{Guisan2005,\n abstract = {INTRIUM is an interactive sound\ - \ installation exploring the inside vibration of the atrium. A certain number\ - \ of architectural elements are fitted with acoustic sensors in order to capture\ - \ the vibration they produce when they are manipulated or touched by hands. This\ - \ raw sound is further processed in real-time, allowing the participants to create\ - \ a sonic landscape in the atrium, as the result of a collaborative and collective\ - \ work between them.},\n address = {Vancouver, BC, Canada},\n author = {Guisan,\ - \ Alain C.},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176752},\n issn\ - \ = {2220-4806},\n keywords = {Interactive sound installation, collaborative work,\ - \ sound processing, acoustic source localization.},\n pages = {270--270},\n title\ - \ = {Interactive Sound Installation: INTRIUM},\n url = {http://www.nime.org/proceedings/2005/nime2005_270.pdf},\n\ - \ year = {2005}\n}\n" + ID: tmurraybrowne2014 + abstract: 'We introduce Harmonic Motion, a free open source toolkit for artists, + musicians and designers working with gestural data. Extracting musically useful + features from captured gesture data can be challenging, with projects often requiring + bespoke processing techniques developed through iterations of tweaking equations + involving a number of constant values -sometimes referred to as `magic numbers''. + Harmonic Motion provides a robust interface for rapid prototyping of patches to + process gestural data and a framework through which approaches may be encapsulated, + reused and shared with others. In addition, we describe our design process in + which both personal experience and a survey of potential users informed a set + of specific goals for the software.' + address: 'London, United Kingdom' + author: Tim Murray-Browne and Mark Plumbley + bibtex: "@inproceedings{tmurraybrowne2014,\n abstract = {We introduce Harmonic Motion,\ + \ a free open source toolkit for artists, musicians and designers working with\ + \ gestural data. Extracting musically useful features from captured gesture data\ + \ can be challenging, with projects often requiring bespoke processing techniques\ + \ developed through iterations of tweaking equations involving a number of constant\ + \ values -sometimes referred to as `magic numbers'. Harmonic Motion provides a\ + \ robust interface for rapid prototyping of patches to process gestural data and\ + \ a framework through which approaches may be encapsulated, reused and shared\ + \ with others. In addition, we describe our design process in which both personal\ + \ experience and a survey of potential users informed a set of specific goals\ + \ for the software.},\n address = {London, United Kingdom},\n author = {Tim Murray-Browne\ + \ and Mark Plumbley},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178887},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {213--216},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Harmonic Motion: A Toolkit for Processing\ + \ Gestural Data for Interactive Sound},\n url = {http://www.nime.org/proceedings/2014/nime2014_273.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176752 + doi: 10.5281/zenodo.1178887 issn: 2220-4806 - keywords: 'Interactive sound installation, collaborative work, sound processing, - acoustic source localization.' - pages: 270--270 - title: 'Interactive Sound Installation: INTRIUM' - url: http://www.nime.org/proceedings/2005/nime2005_270.pdf - year: 2005 + month: June + pages: 213--216 + publisher: 'Goldsmiths, University of London' + title: 'Harmonic Motion: A Toolkit for Processing Gestural Data for Interactive + Sound' + url: http://www.nime.org/proceedings/2014/nime2014_273.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Socolofsky2005 - abstract: 'Contemplace is a spatial personality that redesigns itselfdynamically - according to its conversations with its visitors.Sometimes welcoming, sometimes - shy, and sometimeshostile, Contemplace''s mood is apparent through a display ofprojected - graphics, spatial sound, and physical motion.Contemplace is an environment in - which inhabitationbecomes a two-way dialogue.' - address: 'Vancouver, BC, Canada' - author: 'Socolofsky, Eric' - bibtex: "@inproceedings{Socolofsky2005,\n abstract = {Contemplace is a spatial personality\ - \ that redesigns itselfdynamically according to its conversations with its visitors.Sometimes\ - \ welcoming, sometimes shy, and sometimeshostile, Contemplace's mood is apparent\ - \ through a display ofprojected graphics, spatial sound, and physical motion.Contemplace\ - \ is an environment in which inhabitationbecomes a two-way dialogue.},\n address\ - \ = {Vancouver, BC, Canada},\n author = {Socolofsky, Eric},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176822},\n issn = {2220-4806},\n keywords = {Interactive\ - \ space, spatial installation, graphic and aural display, motion tracking, Processing,\ - \ Flosc },\n pages = {271--271},\n title = {Contemplace},\n url = {http://www.nime.org/proceedings/2005/nime2005_271.pdf},\n\ - \ year = {2005}\n}\n" + ID: slui2014 + abstract: 'Pop music jamming on the keyboard requires massive music knowledge. Musician + needs to understand and memorize the behavior of each chord in different keys. + However, most simple pop music follows a common chord progression pattern. This + pattern applies to most simple pop music on all the 12 keys. We designed an app + that can reduce the difficulty of music jamming on the keyboard by using this + pattern. The app displays the current chord in the Roman numeral and suggests + the expected next chord in an easy to understand way on a smartphone. This work + investigates into the human computer interaction perspective of music performance. + We use a smartphone app as a bridge, which assists musician to react faster in + music jamming by transforming the complex music knowledge into a simple, unified + and easy to understand format. Experiment result shows that this app can help + the non-keyboardist musician to learn pop music jamming. It also shows that the + app is useful to assist keyboardist in making key transpose and playing music + in the key with many sharps and flats. We will use the same interface design to + guide user on playing other chord progressions such as the jazz chord progression.' + address: 'London, United Kingdom' + author: Simon Lui + bibtex: "@inproceedings{slui2014,\n abstract = {Pop music jamming on the keyboard\ + \ requires massive music knowledge. Musician needs to understand and memorize\ + \ the behavior of each chord in different keys. However, most simple pop music\ + \ follows a common chord progression pattern. This pattern applies to most simple\ + \ pop music on all the 12 keys. We designed an app that can reduce the difficulty\ + \ of music jamming on the keyboard by using this pattern. The app displays the\ + \ current chord in the Roman numeral and suggests the expected next chord in an\ + \ easy to understand way on a smartphone. This work investigates into the human\ + \ computer interaction perspective of music performance. We use a smartphone app\ + \ as a bridge, which assists musician to react faster in music jamming by transforming\ + \ the complex music knowledge into a simple, unified and easy to understand format.\ + \ Experiment result shows that this app can help the non-keyboardist musician\ + \ to learn pop music jamming. It also shows that the app is useful to assist keyboardist\ + \ in making key transpose and playing music in the key with many sharps and flats.\ + \ We will use the same interface design to guide user on playing other chord progressions\ + \ such as the jazz chord progression.},\n address = {London, United Kingdom},\n\ + \ author = {Simon Lui},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178855},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {98--101},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {A Real Time Common Chord Progression Guide\ + \ on the Smartphone for Jamming Pop Song on the Music Keyboard},\n url = {http://www.nime.org/proceedings/2014/nime2014_275.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176822 + doi: 10.5281/zenodo.1178855 issn: 2220-4806 - keywords: 'Interactive space, spatial installation, graphic and aural display, motion - tracking, Processing, Flosc ' - pages: 271--271 - title: Contemplace - url: http://www.nime.org/proceedings/2005/nime2005_271.pdf - year: 2005 + month: June + pages: 98--101 + publisher: 'Goldsmiths, University of London' + title: A Real Time Common Chord Progression Guide on the Smartphone for Jamming + Pop Song on the Music Keyboard + url: http://www.nime.org/proceedings/2014/nime2014_275.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Marinelli2005 - abstract: 'Mocean is an immersive environment that creates sensoryrelationships - between natural media, particularly exploringthe potential of water as an emotive - interface.' - address: 'Vancouver, BC, Canada' - author: 'Marinelli, Maia and Lamenzo, Jared and Borissov, Liubo' - bibtex: "@inproceedings{Marinelli2005,\n abstract = {Mocean is an immersive environment\ - \ that creates sensoryrelationships between natural media, particularly exploringthe\ - \ potential of water as an emotive interface.},\n address = {Vancouver, BC, Canada},\n\ - \ author = {Marinelli, Maia and Lamenzo, Jared and Borissov, Liubo},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176786},\n issn = {2220-4806},\n keywords\ - \ = {New interface, water, pipe organ, natural media, PIC microcontroller, wind\ - \ instrument, human computer interface. },\n pages = {272--272},\n title = {Mocean},\n\ - \ url = {http://www.nime.org/proceedings/2005/nime2005_272.pdf},\n year = {2005}\n\ - }\n" + ID: tmagnusson2014 + abstract: 'Live coding emphasises improvisation. It is an art practice that merges + the act of musical composition and performance into a public act of projected + writing. This paper introduces the Threnoscope system, which includes a live coding + micro-language for drone-based microtonal composition. The paper discusses the + aims and objectives of the system, elucidates the design decisions, and introduces + in particular the code score feature present in the Threnoscope. The code score + is a novel element in the design of live coding systems allowing for improvisation + through a graphic score, rendering a visual representation of past and future + events in a real-time performance. The paper demonstrates how the system''s methods + can be mapped ad hoc to GUIor hardware-based control.' + address: 'London, United Kingdom' + author: Thor Magnusson + bibtex: "@inproceedings{tmagnusson2014,\n abstract = {Live coding emphasises improvisation.\ + \ It is an art practice that merges the act of musical composition and performance\ + \ into a public act of projected writing. This paper introduces the Threnoscope\ + \ system, which includes a live coding micro-language for drone-based microtonal\ + \ composition. The paper discusses the aims and objectives of the system, elucidates\ + \ the design decisions, and introduces in particular the code score feature present\ + \ in the Threnoscope. The code score is a novel element in the design of live\ + \ coding systems allowing for improvisation through a graphic score, rendering\ + \ a visual representation of past and future events in a real-time performance.\ + \ The paper demonstrates how the system's methods can be mapped ad hoc to GUIor\ + \ hardware-based control.},\n address = {London, United Kingdom},\n author = {Thor\ + \ Magnusson},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178857},\n issn\ + \ = {2220-4806},\n month = {June},\n pages = {19--22},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Improvising with the Threnoscope: Integrating\ + \ Code, Hardware, GUI, Network, and Graphic Scores},\n url = {http://www.nime.org/proceedings/2014/nime2014_276.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176786 + doi: 10.5281/zenodo.1178857 issn: 2220-4806 - keywords: 'New interface, water, pipe organ, natural media, PIC microcontroller, - wind instrument, human computer interface. ' - pages: 272--272 - title: Mocean - url: http://www.nime.org/proceedings/2005/nime2005_272.pdf - year: 2005 + month: June + pages: 19--22 + publisher: 'Goldsmiths, University of London' + title: 'Improvising with the Threnoscope: Integrating Code, Hardware, GUI, Network, + and Graphic Scores' + url: http://www.nime.org/proceedings/2014/nime2014_276.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Matsumura2005 - abstract: '''Hop Step Junk'' is an interactive sound installation that creates audio - and visual representations of the audience''s footsteps. The sound of a footstep - is very expressive. Depending on one''s weight, clothing and gate, a footstep - can sound quite different. The period between steps defines one''s personal rhythm. - The sound output of ''Hop Step Junk'' is wholly derived from the audience''s footsteps. - ''Hop Step Junk'' creates a multi-generational playground, an instrument that - an audience can easily play.' - address: 'Vancouver, BC, Canada' - author: 'Matsumura, Seiichiro and Arakawa, Chuichi' - bibtex: "@inproceedings{Matsumura2005,\n abstract = {'Hop Step Junk' is an interactive\ - \ sound installation that creates audio and visual representations of the audience's\ - \ footsteps. The sound of a footstep is very expressive. Depending on one's weight,\ - \ clothing and gate, a footstep can sound quite different. The period between\ - \ steps defines one's personal rhythm. The sound output of 'Hop Step Junk' is\ - \ wholly derived from the audience's footsteps. 'Hop Step Junk' creates a multi-generational\ - \ playground, an instrument that an audience can easily play.},\n address = {Vancouver,\ - \ BC, Canada},\n author = {Matsumura, Seiichiro and Arakawa, Chuichi},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176788},\n issn = {2220-4806},\n keywords\ - \ = {Footsteps, body action, interactive, visualization, simple and reliable interface,\ - \ contact microphone, sound playground},\n pages = {273--273},\n title = {Hop\ - \ Step Junk: Sonic Visualization using Footsteps},\n url = {http://www.nime.org/proceedings/2005/nime2005_273.pdf},\n\ - \ year = {2005}\n}\n" + ID: strump2014 + abstract: 'This paper describes the concept and design of Orphion, a new digital + musical instrument based on the Apple iPad. We begin by outlining primary challenges + associated with DMI design, focussing on the specific problems Orphion seeks to + address such as requirements for haptic feedback from the device. Orphion achieves + this by incorporating an interaction model based on tonally tuned virtual ``pads'''' + in user-configurable layouts, where the pitch and timbre associated with each + pad depends on the initial point of touch, touch point size and size variation, + and position after the initial touch. These parameters control a physical model + for sound generation with visual feedback provided via the iPad display. We present + findings from the research and development process including design revisions + made in response to user testing. Finally, conclusions are made about the effectiveness + of the instrument based on large-scale user feedback.' + address: 'London, United Kingdom' + author: Sebastian Trump and Jamie Bullock + bibtex: "@inproceedings{strump2014,\n abstract = {This paper describes the concept\ + \ and design of Orphion, a new digital musical instrument based on the Apple iPad.\ + \ We begin by outlining primary challenges associated with DMI design, focussing\ + \ on the specific problems Orphion seeks to address such as requirements for haptic\ + \ feedback from the device. Orphion achieves this by incorporating an interaction\ + \ model based on tonally tuned virtual ``pads'' in user-configurable layouts,\ + \ where the pitch and timbre associated with each pad depends on the initial point\ + \ of touch, touch point size and size variation, and position after the initial\ + \ touch. These parameters control a physical model for sound generation with visual\ + \ feedback provided via the iPad display. We present findings from the research\ + \ and development process including design revisions made in response to user\ + \ testing. Finally, conclusions are made about the effectiveness of the instrument\ + \ based on large-scale user feedback.},\n address = {London, United Kingdom},\n\ + \ author = {Sebastian Trump and Jamie Bullock},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1178963},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {159--162},\n publisher = {Goldsmiths, University of London},\n title = {Orphion:\ + \ A Gestural Multi-Touch Instrument for the iPad},\n url = {http://www.nime.org/proceedings/2014/nime2014_277.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176788 + doi: 10.5281/zenodo.1178963 issn: 2220-4806 - keywords: 'Footsteps, body action, interactive, visualization, simple and reliable - interface, contact microphone, sound playground' - pages: 273--273 - title: 'Hop Step Junk: Sonic Visualization using Footsteps' - url: http://www.nime.org/proceedings/2005/nime2005_273.pdf - year: 2005 + month: June + pages: 159--162 + publisher: 'Goldsmiths, University of London' + title: 'Orphion: A Gestural Multi-Touch Instrument for the iPad' + url: http://www.nime.org/proceedings/2014/nime2014_277.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Deutscher2005 - address: 'Vancouver, BC, Canada' - author: 'Deutscher, Meghan and Fels, Sidney S. and Hoskinson, Reynald and Takahashi, - Sachiyo' - bibtex: "@inproceedings{Deutscher2005,\n address = {Vancouver, BC, Canada},\n author\ - \ = {Deutscher, Meghan and Fels, Sidney S. and Hoskinson, Reynald and Takahashi,\ - \ Sachiyo},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176733},\n issn\ - \ = {2220-4806},\n keywords = {Mediascape, sound spatialization, interactive art,\ - \ Beluga whale},\n pages = {274--274},\n title = {Echology},\n url = {http://www.nime.org/proceedings/2005/nime2005_274.pdf},\n\ - \ year = {2005}\n}\n" + ID: mkrzyzaniak2014 + abstract: 'This paper describes the implementation of a digital audio / visual feedback + system for an extemporaneous dance performance. The system was designed to automatically + synchronize aesthetically with the dancers. The performance was premiered at the + Slingshot festival in Athens Georgia on March 9, 2013.' + address: 'London, United Kingdom' + author: Michael Krzyzaniak and Julie Akerly and Matthew Mosher and Muharrem Yildirim + and Garth Paine + bibtex: "@inproceedings{mkrzyzaniak2014,\n abstract = {This paper describes the\ + \ implementation of a digital audio / visual feedback system for an extemporaneous\ + \ dance performance. The system was designed to automatically synchronize aesthetically\ + \ with the dancers. The performance was premiered at the Slingshot festival in\ + \ Athens Georgia on March 9, 2013.},\n address = {London, United Kingdom},\n author\ + \ = {Michael Krzyzaniak and Julie Akerly and Matthew Mosher and Muharrem Yildirim\ + \ and Garth Paine},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178841},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {303--306},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Separation: Short Range Repulsion. Implementation\ + \ of an Automated Aesthetic Synchronization System for a Dance Performance.},\n\ + \ url = {http://www.nime.org/proceedings/2014/nime2014_279.pdf},\n year = {2014}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176733 + doi: 10.5281/zenodo.1178841 issn: 2220-4806 - keywords: 'Mediascape, sound spatialization, interactive art, Beluga whale' - pages: 274--274 - title: Echology - url: http://www.nime.org/proceedings/2005/nime2005_274.pdf - year: 2005 + month: June + pages: 303--306 + publisher: 'Goldsmiths, University of London' + title: 'Separation: Short Range Repulsion. Implementation of an Automated Aesthetic + Synchronization System for a Dance Performance.' + url: http://www.nime.org/proceedings/2014/nime2014_279.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Fels2004 - abstract: 'The Tooka was created as an exploration of two personinstruments. We - have worked with two Tooka performers toenhance the original experimental device - to make a musicalinstrument played and enjoyed by them. The main additions tothe - device include: an additional button that behaves as amusic capture button, a - bend sensor, an additional thumbactuated pressure sensor for vibrato, additional - musicalmapping strategies, and new interfacing hardware. Thesedevelopments a rose - through exper iences andrecommendations from the musicians playing it. In addition - tothe changes to the Tooka, this paper describes the learningprocess and experiences - of the musicians performing with theTooka.' - address: 'Hamamatsu, Japan' - author: 'Fels, Sidney S. and Kaastra, Linda and Takahashi, Sachiyo and Mccaig, Graeme' - bibtex: "@inproceedings{Fels2004,\n abstract = {The Tooka was created as an exploration\ - \ of two personinstruments. We have worked with two Tooka performers toenhance\ - \ the original experimental device to make a musicalinstrument played and enjoyed\ - \ by them. The main additions tothe device include: an additional button that\ - \ behaves as amusic capture button, a bend sensor, an additional thumbactuated\ - \ pressure sensor for vibrato, additional musicalmapping strategies, and new interfacing\ - \ hardware. Thesedevelopments a rose through exper iences andrecommendations from\ - \ the musicians playing it. In addition tothe changes to the Tooka, this paper\ - \ describes the learningprocess and experiences of the musicians performing with\ - \ theTooka.},\n address = {Hamamatsu, Japan},\n author = {Fels, Sidney S. and\ - \ Kaastra, Linda and Takahashi, Sachiyo and Mccaig, Graeme},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176595},\n issn = {2220-4806},\n keywords = {Musician-centred\ - \ design, two-person musical instrument.},\n pages = {1--6},\n title = {Evolving\ - \ Tooka: from Experiment to Instrument},\n url = {http://www.nime.org/proceedings/2004/nime2004_001.pdf},\n\ - \ year = {2004}\n}\n" + ID: ylim2014 + abstract: 'Smartphone-based music conducting is a convenient and effective approach + to conducting practice that aims to overcome the practical limitations of traditional + conducting practice and provide enhanced user experience compared to those of + previous virtual conducting examples. This work introduces the v-Maestro, a smartphone + application for music conducting. Powered by the Gyroscope of the device, the + v-Maestro analyzes conducting motions that allows the user to not only control + the tempo but also simulate ``cueing'''' for different instruments. Results from + user tests show that, in spite of certain ergonomic problems, new conducting practice + with the v-Maestro is more satisfactory than traditional methods and has a strong + potential as a conducting practice tool.' + address: 'London, United Kingdom' + author: Yang Kyu Lim and Woon Seung Yeo + bibtex: "@inproceedings{ylim2014,\n abstract = {Smartphone-based music conducting\ + \ is a convenient and effective approach to conducting practice that aims to overcome\ + \ the practical limitations of traditional conducting practice and provide enhanced\ + \ user experience compared to those of previous virtual conducting examples. This\ + \ work introduces the v-Maestro, a smartphone application for music conducting.\ + \ Powered by the Gyroscope of the device, the v-Maestro analyzes conducting motions\ + \ that allows the user to not only control the tempo but also simulate ``cueing''\ + \ for different instruments. Results from user tests show that, in spite of certain\ + \ ergonomic problems, new conducting practice with the v-Maestro is more satisfactory\ + \ than traditional methods and has a strong potential as a conducting practice\ + \ tool.},\n address = {London, United Kingdom},\n author = {Yang Kyu Lim and Woon\ + \ Seung Yeo},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178851},\n issn\ + \ = {2220-4806},\n month = {June},\n pages = {573--576},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Smartphone-based Music Conducting},\n url\ + \ = {http://www.nime.org/proceedings/2014/nime2014_281.pdf},\n year = {2014}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176595 + doi: 10.5281/zenodo.1178851 issn: 2220-4806 - keywords: 'Musician-centred design, two-person musical instrument.' - pages: 1--6 - title: 'Evolving Tooka: from Experiment to Instrument' - url: http://www.nime.org/proceedings/2004/nime2004_001.pdf - year: 2004 + month: June + pages: 573--576 + publisher: 'Goldsmiths, University of London' + title: Smartphone-based Music Conducting + url: http://www.nime.org/proceedings/2014/nime2014_281.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Kapur2004 - abstract: 'This paper describes the design of an Electronic Sitar controller, adigitally - modified version of Saraswati''s (the Hindu Goddess ofMusic) 19-stringed, pumpkin - shelled, traditional North Indianinstrument. The ESitar uses sensor technology - to extract gesturalinformation from a performer, deducing music information suchas - pitch, pluck timing, thumb pressure, and 3-axes of head tilt totrigger real-time - sounds and graphics. It allows for a variety oftraditional sitar technique as - well as new performance methods.Graphical feedback allows for artistic display - and pedagogicalfeedback. The ESitar uses a programmable Atmel microprocessorwhich - outputs control messages via a standard MIDI jack.' - address: 'Hamamatsu, Japan' - author: 'Kapur, Ajay and Lazier, Ariel J. and Davidson, Philip L. and Wilson, Scott - and Cook, Perry R.' - bibtex: "@inproceedings{Kapur2004,\n abstract = {This paper describes the design\ - \ of an Electronic Sitar controller, adigitally modified version of Saraswati's\ - \ (the Hindu Goddess ofMusic) 19-stringed, pumpkin shelled, traditional North\ - \ Indianinstrument. The ESitar uses sensor technology to extract gesturalinformation\ - \ from a performer, deducing music information suchas pitch, pluck timing, thumb\ - \ pressure, and 3-axes of head tilt totrigger real-time sounds and graphics. It\ - \ allows for a variety oftraditional sitar technique as well as new performance\ - \ methods.Graphical feedback allows for artistic display and pedagogicalfeedback.\ - \ The ESitar uses a programmable Atmel microprocessorwhich outputs control messages\ - \ via a standard MIDI jack.},\n address = {Hamamatsu, Japan},\n author = {Kapur,\ - \ Ajay and Lazier, Ariel J. and Davidson, Philip L. and Wilson, Scott and Cook,\ - \ Perry R.},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176623},\n issn\ - \ = {2220-4806},\n keywords = {atmel microcontroller,controller,electronic sitar,esitar,human\ - \ computer interface,indian string controller,instrument graphical feedback,midi,veldt},\n\ - \ pages = {7--12},\n title = {The Electronic Sitar Controller},\n url = {http://www.nime.org/proceedings/2004/nime2004_007.pdf},\n\ - \ year = {2004}\n}\n" + ID: jdeng2014 + abstract: 'Music jamming is an extremely difficult task for musical novices. Trying + to extend this meaningful activity, which can be highly enjoyable, to a larger + recipient group, we present WIJAM, a mobile application for an ad-hoc group of + musical novices to perform improvisation along with a music master. In this ``master-players'''' + paradigm, the master offers a music backing, orchestrates the musical flow, and + gives feedbacks to the players; the players improvise by tapping and sketching + on their smartphones. We believe that this paradigm can be a significant contribution + to the possibility of music playing by a group of novices with no instrumental + training leading to decent musical results.' + address: 'London, United Kingdom' + author: Jun-qi Deng and Francis Chi Moon Lau and Ho-Cheung Ng and Yu-Kwong Kwok + and Hung-Kwan Chen and Yu-heng Liu + bibtex: "@inproceedings{jdeng2014,\n abstract = {Music jamming is an extremely difficult\ + \ task for musical novices. Trying to extend this meaningful activity, which can\ + \ be highly enjoyable, to a larger recipient group, we present WIJAM, a mobile\ + \ application for an ad-hoc group of musical novices to perform improvisation\ + \ along with a music master. In this ``master-players'' paradigm, the master offers\ + \ a music backing, orchestrates the musical flow, and gives feedbacks to the players;\ + \ the players improvise by tapping and sketching on their smartphones. We believe\ + \ that this paradigm can be a significant contribution to the possibility of music\ + \ playing by a group of novices with no instrumental training leading to decent\ + \ musical results.},\n address = {London, United Kingdom},\n author = {Jun-qi\ + \ Deng and Francis Chi Moon Lau and Ho-Cheung Ng and Yu-Kwong Kwok and Hung-Kwan\ + \ Chen and Yu-heng Liu},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178746},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {407--410},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {WIJAM: A Mobile Collaborative Improvisation\ + \ Platform under Master-players Paradigm},\n url = {http://www.nime.org/proceedings/2014/nime2014_284.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176623 + doi: 10.5281/zenodo.1178746 issn: 2220-4806 - keywords: 'atmel microcontroller,controller,electronic sitar,esitar,human computer - interface,indian string controller,instrument graphical feedback,midi,veldt' - pages: 7--12 - title: The Electronic Sitar Controller - url: http://www.nime.org/proceedings/2004/nime2004_007.pdf - year: 2004 + month: June + pages: 407--410 + publisher: 'Goldsmiths, University of London' + title: 'WIJAM: A Mobile Collaborative Improvisation Platform under Master-players + Paradigm' + url: http://www.nime.org/proceedings/2014/nime2014_284.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Takahata2004 - abstract: 'We have developed new sound feedback for powerful karate training with - pleasure, which enables to extract player''s movement, understand player''s activities, - and generate them to sounds. We have designed a karate training environment which - consists of a multimodal room with cameras, microphones, video displays and loud - speakers, and wearable devices with a sensor and a sound generator. Experiments - have been conducted on ten Karate beginnners for ten months to examine the effectiveness - to learn appropriate body action and sharpness in basic punch called TSUKI. The - experimental results suggest the proposed sound feedback and the training environments - enable beginners to achieve enjoyable Karate.' - address: 'Hamamatsu, Japan' - author: 'Takahata, Masami and Shiraki, Kensuke and Sakane, Yutaka and Takebayashi, - Yoichi' - bibtex: "@inproceedings{Takahata2004,\n abstract = {We have developed new sound\ - \ feedback for powerful karate training with pleasure, which enables to extract\ - \ player's movement, understand player's activities, and generate them to sounds.\ - \ We have designed a karate training environment which consists of a multimodal\ - \ room with cameras, microphones, video displays and loud speakers, and wearable\ - \ devices with a sensor and a sound generator. Experiments have been conducted\ - \ on ten Karate beginnners for ten months to examine the effectiveness to learn\ - \ appropriate body action and sharpness in basic punch called TSUKI. The experimental\ - \ results suggest the proposed sound feedback and the training environments enable\ - \ beginners to achieve enjoyable Karate.},\n address = {Hamamatsu, Japan},\n author\ - \ = {Takahata, Masami and Shiraki, Kensuke and Sakane, Yutaka and Takebayashi,\ - \ Yoichi},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1176673},\n issn = {2220-4806},\n\ - \ keywords = {Sound feedback, Karate, Learning environment, Wearable device},\n\ - \ pages = {13--18},\n title = {Sound Feedback for Powerful Karate Training},\n\ - \ url = {http://www.nime.org/proceedings/2004/nime2004_013.pdf},\n year = {2004}\n\ - }\n" + ID: tmurraybrowne12014 + abstract: 'The Cave of Sounds is an interactive sound installation made up of new + musical instruments. Exploring what it means to create instruments together within + the context of NIME and the maker scene, each instrument was created by an individual + but with the aim of forming a part of this new ensemble over ten months, with + the final installation debuting at the Barbican in London in August 2013. In this + paper, we describe how ideas of prehistoric collective music making inspired and + guided this participatory musical work, both in terms of how it was created and + the audience experience of musical collaboration we aimed to create in the final + installation. Following a detailed description of the installation itself, we + reflect on the successes, lessons and future challenges of encouraging creative + musical collaboration among members of an audience.' + address: 'London, United Kingdom' + author: Tim Murray-Browne and Dom Aversano and Susanna Garcia and Wallace Hobbes + and Daniel Lopez and Tadeo Sendon and Panagiotis Tigas and Kacper Ziemianin and + Duncan Chapman + bibtex: "@inproceedings{tmurraybrowne12014,\n abstract = {The Cave of Sounds is\ + \ an interactive sound installation made up of new musical instruments. Exploring\ + \ what it means to create instruments together within the context of NIME and\ + \ the maker scene, each instrument was created by an individual but with the aim\ + \ of forming a part of this new ensemble over ten months, with the final installation\ + \ debuting at the Barbican in London in August 2013. In this paper, we describe\ + \ how ideas of prehistoric collective music making inspired and guided this participatory\ + \ musical work, both in terms of how it was created and the audience experience\ + \ of musical collaboration we aimed to create in the final installation. Following\ + \ a detailed description of the installation itself, we reflect on the successes,\ + \ lessons and future challenges of encouraging creative musical collaboration\ + \ among members of an audience.},\n address = {London, United Kingdom},\n author\ + \ = {Tim Murray-Browne and Dom Aversano and Susanna Garcia and Wallace Hobbes\ + \ and Daniel Lopez and Tadeo Sendon and Panagiotis Tigas and Kacper Ziemianin\ + \ and Duncan Chapman},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178885},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {307--310},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {The Cave of Sounds: An Interactive Installation\ + \ Exploring How We Create Music Together},\n url = {http://www.nime.org/proceedings/2014/nime2014_288.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176673 + doi: 10.5281/zenodo.1178885 issn: 2220-4806 - keywords: 'Sound feedback, Karate, Learning environment, Wearable device' - pages: 13--18 - title: Sound Feedback for Powerful Karate Training - url: http://www.nime.org/proceedings/2004/nime2004_013.pdf - year: 2004 + month: June + pages: 307--310 + publisher: 'Goldsmiths, University of London' + title: 'The Cave of Sounds: An Interactive Installation Exploring How We Create + Music Together' + url: http://www.nime.org/proceedings/2014/nime2014_288.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Kaltenbrunner2004 - abstract: 'This article reflects the current state of the reacTable* project,an - electronic music instrument with a tangible table-basedinterface, which is currently - under development at theAudiovisual Institute at the Universitat Pompeu Fabra. - In thispaper we are focussing on the issue of Dynamic Patching,which is a particular - and unique aspect of the sound synthesisand control paradigms of the reacTable*. - Unlike commonvisual programming languages for sound synthesis, whichconceptually - separate the patch building process from theactual musical performance, the reacTable* - combines theconstruction and playing of the instrument in a unique way.The tangible - interface allows direct manipulation control overany of the used building blocks, - which physically representthe whole synthesizer function.' - address: 'Hamamatsu, Japan' - author: 'Kaltenbrunner, Martin and Geiger, Günter and Jordà, Sergi' - bibtex: "@inproceedings{Kaltenbrunner2004,\n abstract = {This article reflects the\ - \ current state of the reacTable* project,an electronic music instrument with\ - \ a tangible table-basedinterface, which is currently under development at theAudiovisual\ - \ Institute at the Universitat Pompeu Fabra. In thispaper we are focussing on\ - \ the issue of Dynamic Patching,which is a particular and unique aspect of the\ - \ sound synthesisand control paradigms of the reacTable*. Unlike commonvisual\ - \ programming languages for sound synthesis, whichconceptually separate the patch\ - \ building process from theactual musical performance, the reacTable* combines\ - \ theconstruction and playing of the instrument in a unique way.The tangible interface\ - \ allows direct manipulation control overany of the used building blocks, which\ - \ physically representthe whole synthesizer function.},\n address = {Hamamatsu,\ - \ Japan},\n author = {Kaltenbrunner, Martin and Geiger, G\\''{u}nter and Jord\\\ - `{a}, Sergi},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176621},\n issn\ - \ = {2220-4806},\n keywords = {dynamic patching,musical instrument,sound synthesis,tangible\ - \ interfaces,visual programming},\n pages = {19--22},\n title = {Dynamic Patches\ - \ for Live Musical Performance},\n url = {http://www.nime.org/proceedings/2004/nime2004_019.pdf},\n\ - \ year = {2004}\n}\n" + ID: knymoen12014 + abstract: 'We present Funky Sole Music, a musical interface employing a sole embedded + with three force sensitive resistors in combination with a novel algorithm for + continuous movement classification. A heuristics-based music engine has been implemented, + allowing users to control high-level parameters of the musical output. This provides + a greater degree of control to users without musical expertise compared to what + they get with traditional media playes. By using the movement classification result + not as a direct control action in itself, but as a way to change mapping spaces + and musical sections, the control possibilities offered by the simple interface + are greatly increased.' + address: 'London, United Kingdom' + author: Kristian Nymoen and Sichao Song and Yngve Hafting and Jim Torresen + bibtex: "@inproceedings{knymoen12014,\n abstract = {We present Funky Sole Music,\ + \ a musical interface employing a sole embedded with three force sensitive resistors\ + \ in combination with a novel algorithm for continuous movement classification.\ + \ A heuristics-based music engine has been implemented, allowing users to control\ + \ high-level parameters of the musical output. This provides a greater degree\ + \ of control to users without musical expertise compared to what they get with\ + \ traditional media playes. By using the movement classification result not as\ + \ a direct control action in itself, but as a way to change mapping spaces and\ + \ musical sections, the control possibilities offered by the simple interface\ + \ are greatly increased.},\n address = {London, United Kingdom},\n author = {Kristian\ + \ Nymoen and Sichao Song and Yngve Hafting and Jim Torresen},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178895},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {299--302},\n publisher = {Goldsmiths, University of London},\n title = {Funky\ + \ Sole Music: Gait Recognition and Adaptive Mapping},\n url = {http://www.nime.org/proceedings/2014/nime2014_289.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176621 + doi: 10.5281/zenodo.1178895 issn: 2220-4806 - keywords: 'dynamic patching,musical instrument,sound synthesis,tangible interfaces,visual - programming' - pages: 19--22 - title: Dynamic Patches for Live Musical Performance - url: http://www.nime.org/proceedings/2004/nime2004_019.pdf - year: 2004 + month: June + pages: 299--302 + publisher: 'Goldsmiths, University of London' + title: 'Funky Sole Music: Gait Recognition and Adaptive Mapping' + url: http://www.nime.org/proceedings/2014/nime2014_289.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Young2004 - abstract: 'We present a prototype of a new musical interface for Japanese drumming - techniques and styles. Our design used in the Aobachi drumming sticks provides - 5 gesture parameters (3 axes of acceleration, and 2 axes of angular velocity) - for each of the two sticks and transmits this data wirelessly using Bluetooth® - technology. This system utilizes minimal hardware embedded in the two drumming - sticks, allowing for gesture tracking of drum strokes by an interface of traditional - form, appearance, and feel. Aobachi is portable, versatile, and robust, and may - be used for a variety of musical applications, as well as analytical studies.' - address: 'Hamamatsu, Japan' - author: 'Young, Diana and Fujinaga, Ichiro' - bibtex: "@inproceedings{Young2004,\n abstract = {We present a prototype of a new\ - \ musical interface for Japanese drumming techniques and styles. Our design used\ - \ in the Aobachi drumming sticks provides 5 gesture parameters (3 axes of acceleration,\ - \ and 2 axes of angular velocity) for each of the two sticks and transmits this\ - \ data wirelessly using Bluetooth® technology. This system utilizes minimal hardware\ - \ embedded in the two drumming sticks, allowing for gesture tracking of drum strokes\ - \ by an interface of traditional form, appearance, and feel. Aobachi is portable,\ - \ versatile, and robust, and may be used for a variety of musical applications,\ - \ as well as analytical studies.},\n address = {Hamamatsu, Japan},\n author =\ - \ {Young, Diana and Fujinaga, Ichiro},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176687},\n\ - \ issn = {2220-4806},\n keywords = {bluetooth,drum stick,japanese drum,taiko,wireless},\n\ - \ pages = {23--26},\n title = {AoBachi: A New Interface for {Japan}ese Drumming},\n\ - \ url = {http://www.nime.org/proceedings/2004/nime2004_023.pdf},\n year = {2004}\n\ - }\n" + ID: fheller2014 + abstract: 'Although an analog technology, many DJs still value the turntable as + an irreplaceable performance tool. Digital vinyl systems combine the distinct + haptic nature of the analog turntable with the advantages of digital media. They + use special records containing a digital timecode which is then processed by a + computer and mapped to properties like playback speed and direction. These records, + however, are generic and, in contrast to traditional vinyl, do not provide visual + cues representing the structure of the track. We present a system that augments + the timecode record with a visualization of song information such as artist, title, + and track length, but also with a waveform that allows to visually navigate to + a certain beat. We conducted a survey examining the acceptance of such tools in + the DJ community and conducted a user study with professional DJs. The system + was widely accepted as a tool in the DJ community and received very positive feedback + during observational mixing sessions with four professional DJs.' + address: 'London, United Kingdom' + author: Florian Heller and Jan Borchers + bibtex: "@inproceedings{fheller2014,\n abstract = {Although an analog technology,\ + \ many DJs still value the turntable as an irreplaceable performance tool. Digital\ + \ vinyl systems combine the distinct haptic nature of the analog turntable with\ + \ the advantages of digital media. They use special records containing a digital\ + \ timecode which is then processed by a computer and mapped to properties like\ + \ playback speed and direction. These records, however, are generic and, in contrast\ + \ to traditional vinyl, do not provide visual cues representing the structure\ + \ of the track. We present a system that augments the timecode record with a visualization\ + \ of song information such as artist, title, and track length, but also with a\ + \ waveform that allows to visually navigate to a certain beat. We conducted a\ + \ survey examining the acceptance of such tools in the DJ community and conducted\ + \ a user study with professional DJs. The system was widely accepted as a tool\ + \ in the DJ community and received very positive feedback during observational\ + \ mixing sessions with four professional DJs.},\n address = {London, United Kingdom},\n\ + \ author = {Florian Heller and Jan Borchers},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1178796},\n issn = {2220-4806},\n month = {June},\n pages =\ + \ {66--69},\n publisher = {Goldsmiths, University of London},\n title = {Visualizing\ + \ Song Structure on Timecode Vinyls},\n url = {http://www.nime.org/proceedings/2014/nime2014_290.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176687 + doi: 10.5281/zenodo.1178796 issn: 2220-4806 - keywords: 'bluetooth,drum stick,japanese drum,taiko,wireless' - pages: 23--26 - title: 'AoBachi: A New Interface for Japanese Drumming' - url: http://www.nime.org/proceedings/2004/nime2004_023.pdf - year: 2004 + month: June + pages: 66--69 + publisher: 'Goldsmiths, University of London' + title: Visualizing Song Structure on Timecode Vinyls + url: http://www.nime.org/proceedings/2014/nime2014_290.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: BryanKinns2004 - abstract: We have seen many new and exciting developments in new interfaces for - musical expression. In this paper we present the design of an interface for remote - group music improvisation and composition - Daisyphone. The approach relies on - players creating and editing short shared loops of music which are semi-synchronously - updated. The interface emphasizes the looping nature of the music and is designed - to be engaging and deployable on a wide range of interaction devices. Observations - of the use of the tool with different levels of persistence of contribution are - reported and discussed. Future developments centre around ways to string loops - together into larger pieces (composition) and investigating suitable rates of - decay to encourage more group improvisation. - address: 'Hamamatsu, Japan' - author: 'Bryan-Kinns, Nick and Healey, Patrick G.' - bibtex: "@inproceedings{BryanKinns2004,\n abstract = {We have seen many new and\ - \ exciting developments in new interfaces for musical expression. In this paper\ - \ we present the design of an interface for remote group music improvisation and\ - \ composition - Daisyphone. The approach relies on players creating and editing\ - \ short shared loops of music which are semi-synchronously updated. The interface\ - \ emphasizes the looping nature of the music and is designed to be engaging and\ - \ deployable on a wide range of interaction devices. Observations of the use of\ - \ the tool with different levels of persistence of contribution are reported and\ - \ discussed. Future developments centre around ways to string loops together into\ - \ larger pieces (composition) and investigating suitable rates of decay to encourage\ - \ more group improvisation.},\n address = {Hamamatsu, Japan},\n author = {Bryan-Kinns,\ - \ Nick and Healey, Patrick G.},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176583},\n\ - \ issn = {2220-4806},\n keywords = {collaboration,composition,improvisation,music},\n\ - \ pages = {27--30},\n title = {Daisyphone: Support for Remote Music Collaboration},\n\ - \ url = {http://www.nime.org/proceedings/2004/nime2004_027.pdf},\n year = {2004}\n\ - }\n" + ID: mmarier2014 + abstract: 'The development of the cushion-like musical interface called the sponge + started about seven years ago. Since then, it was extensively used to perform + in various settings. The sponge itself is described, but the main focus is on + the evolution of the mapping strategies that are used. The author reviews the + guidelines proposed by other researchers and explains how they were concretely + applied with the sponge. He concludes that no single strategy constitutes a solution + to the issue of mapping and that musical compositions are complex entities that + require the use of a multitude of mapping strategies in parallel. It is hoped + that the mappings described combined with new strategies will eventually lead + to the emergence of a musical language that is idiomatic to the sponge.' + address: 'London, United Kingdom' + author: Martin Marier + bibtex: "@inproceedings{mmarier2014,\n abstract = {The development of the cushion-like\ + \ musical interface called the sponge started about seven years ago. Since then,\ + \ it was extensively used to perform in various settings. The sponge itself is\ + \ described, but the main focus is on the evolution of the mapping strategies\ + \ that are used. The author reviews the guidelines proposed by other researchers\ + \ and explains how they were concretely applied with the sponge. He concludes\ + \ that no single strategy constitutes a solution to the issue of mapping and that\ + \ musical compositions are complex entities that require the use of a multitude\ + \ of mapping strategies in parallel. It is hoped that the mappings described combined\ + \ with new strategies will eventually lead to the emergence of a musical language\ + \ that is idiomatic to the sponge.},\n address = {London, United Kingdom},\n author\ + \ = {Martin Marier},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178863},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {525--528},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Designing Mappings for the Sponge: Towards\ + \ Spongistic Music},\n url = {http://www.nime.org/proceedings/2014/nime2014_292.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176583 + doi: 10.5281/zenodo.1178863 issn: 2220-4806 - keywords: collaboration,composition,improvisation,music - pages: 27--30 - title: 'Daisyphone: Support for Remote Music Collaboration' - url: http://www.nime.org/proceedings/2004/nime2004_027.pdf - year: 2004 + month: June + pages: 525--528 + publisher: 'Goldsmiths, University of London' + title: 'Designing Mappings for the Sponge: Towards Spongistic Music' + url: http://www.nime.org/proceedings/2014/nime2014_292.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Havel2004 - abstract: 'This paper presents a project involving a percussionist playing on a - virtual percussion. Both artistic and technical aspects of the project are developed. - Especially, a method forstrike recognition using the Flock of Birds is presented, - aswell as its use for artistic purpose.' - address: 'Hamamatsu, Japan' - author: 'Havel, Christophe and Desainte-Catherine, Myriam' - bibtex: "@inproceedings{Havel2004,\n abstract = {This paper presents a project involving\ - \ a percussionist playing on a virtual percussion. Both artistic and technical\ - \ aspects of the project are developed. Especially, a method forstrike recognition\ - \ using the Flock of Birds is presented, aswell as its use for artistic purpose.},\n\ - \ address = {Hamamatsu, Japan},\n author = {Havel, Christophe and Desainte-Catherine,\ - \ Myriam},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1176609},\n issn = {2220-4806},\n\ - \ keywords = {Gesture analysis, virtual percussion, strike recognition.},\n pages\ - \ = {31--34},\n title = {Modeling an Air Percussion for Composition and Performance},\n\ - \ url = {http://www.nime.org/proceedings/2004/nime2004_031.pdf},\n year = {2004}\n\ - }\n" + ID: mneupert2014 + abstract: 'We present an instrument for audio-visual performance that allows to + recombine sounds from a collection of sampled media through concatenative synthesis. + A three-dimensional distribution derived from feature-analysis becomes accessible + through a theremin-inspired interface, allowing the player to shift from exploration + and intuitive navigation toward embodied performance on a granular level. In our + example we illustrate this concept by using the audiovisual recording of an instrumental + performance as a source. Our system provides an alternative interface to the musical + instrument''s audiovisual corpus: as the instrument''s sound and behavior is accessed + in ways that are not possible on the instrument itself, the resulting non-linear + playback of the grains generates an instant remix in a cut-up aesthetic. The presented + instrument is a human-computer interface that employs the structural outcome of + machine analysis accessing audiovisual corpora in the context of a musical performance.' + address: 'London, United Kingdom' + author: Joachim Goßmann and Max Neupert + bibtex: "@inproceedings{mneupert2014,\n abstract = {We present an instrument for\ + \ audio-visual performance that allows to recombine sounds from a collection of\ + \ sampled media through concatenative synthesis. A three-dimensional distribution\ + \ derived from feature-analysis becomes accessible through a theremin-inspired\ + \ interface, allowing the player to shift from exploration and intuitive navigation\ + \ toward embodied performance on a granular level. In our example we illustrate\ + \ this concept by using the audiovisual recording of an instrumental performance\ + \ as a source. Our system provides an alternative interface to the musical instrument's\ + \ audiovisual corpus: as the instrument's sound and behavior is accessed in ways\ + \ that are not possible on the instrument itself, the resulting non-linear playback\ + \ of the grains generates an instant remix in a cut-up aesthetic. The presented\ + \ instrument is a human-computer interface that employs the structural outcome\ + \ of machine analysis accessing audiovisual corpora in the context of a musical\ + \ performance.},\n address = {London, United Kingdom},\n author = {Joachim Go{\\\ + ss}mann and Max Neupert},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178772},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {151--154},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Musical Interface to Audiovisual Corpora of\ + \ Arbitrary Instruments},\n url = {http://www.nime.org/proceedings/2014/nime2014_296.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176609 + doi: 10.5281/zenodo.1178772 issn: 2220-4806 - keywords: 'Gesture analysis, virtual percussion, strike recognition.' - pages: 31--34 - title: Modeling an Air Percussion for Composition and Performance - url: http://www.nime.org/proceedings/2004/nime2004_031.pdf - year: 2004 + month: June + pages: 151--154 + publisher: 'Goldsmiths, University of London' + title: Musical Interface to Audiovisual Corpora of Arbitrary Instruments + url: http://www.nime.org/proceedings/2014/nime2014_296.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Nelson2004 - abstract: 'Although MIDI is often used for computer-based interactive music applications, - its real-time performance is rarely quantified, despite concerns about whether - it is capable of adequate performance in realistic settings. We extend existing - proposals for MIDI performance benchmarking so they are useful in realistic interactive - scenarios, including those with heavy MIDI traffic and CPU load. We have produced - a cross-platform freely-available testing suite that is easy to use, and have - used it to survey the interactive performance of several commonly-used computer/MIDI - setups. We describe the suite, summarize the results of our performance survey, - and detail the benefits of this testing methodology.' - address: 'Hamamatsu, Japan' - author: 'Nelson, Mark and Thom, Belinda' - bibtex: "@inproceedings{Nelson2004,\n abstract = {Although MIDI is often used for\ - \ computer-based interactive music applications, its real-time performance is\ - \ rarely quantified, despite concerns about whether it is capable of adequate\ - \ performance in realistic settings. We extend existing proposals for MIDI performance\ - \ benchmarking so they are useful in realistic interactive scenarios, including\ - \ those with heavy MIDI traffic and CPU load. We have produced a cross-platform\ - \ freely-available testing suite that is easy to use, and have used it to survey\ - \ the interactive performance of several commonly-used computer/MIDI setups. We\ - \ describe the suite, summarize the results of our performance survey, and detail\ - \ the benefits of this testing methodology.},\n address = {Hamamatsu, Japan},\n\ - \ author = {Nelson, Mark and Thom, Belinda},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1176643},\n issn = {2220-4806},\n pages = {35--38},\n title\ - \ = {A Survey of Real-Time {MIDI} Performance},\n url = {http://www.nime.org/proceedings/2004/nime2004_035.pdf},\n\ - \ year = {2004}\n}\n" + ID: ibergstrom2014 + abstract: 'We introduce two complementary OSC schemata for two contexts of use. + The first is for the complete description of an OSC namespace: detailing the full + set of messages each OSC-enabled system can receive or send, alongside choice + metadata we deem necessary to make full use of each system''s description. The + second context of use is a snapshot (partial or full) of the system''s state. + We also relate our proposed schemata to the current state of the art, and how + using these resolves issues that were left pending with previous research.' + address: 'London, United Kingdom' + author: Ilias Bergstrom and Joan Llobera + bibtex: "@inproceedings{ibergstrom2014,\n abstract = {We introduce two complementary\ + \ OSC schemata for two contexts of use. The first is for the complete description\ + \ of an OSC namespace: detailing the full set of messages each OSC-enabled system\ + \ can receive or send, alongside choice metadata we deem necessary to make full\ + \ use of each system's description. The second context of use is a snapshot (partial\ + \ or full) of the system's state. We also relate our proposed schemata to the\ + \ current state of the art, and how using these resolves issues that were left\ + \ pending with previous research.},\n address = {London, United Kingdom},\n author\ + \ = {Ilias Bergstrom and Joan Llobera},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178712},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {311--314},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {OSC-Namespace and OSC-State: Schemata for\ + \ Describing the Namespace and State of OSC-Enabled Systems},\n url = {http://www.nime.org/proceedings/2014/nime2014_300.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176643 + doi: 10.5281/zenodo.1178712 issn: 2220-4806 - pages: 35--38 - title: A Survey of Real-Time MIDI Performance - url: http://www.nime.org/proceedings/2004/nime2004_035.pdf - year: 2004 + month: June + pages: 311--314 + publisher: 'Goldsmiths, University of London' + title: 'OSC-Namespace and OSC-State: Schemata for Describing the Namespace and State + of OSC-Enabled Systems' + url: http://www.nime.org/proceedings/2014/nime2014_300.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Cont2004 - abstract: 'In this paper, we describe an adaptive approach to gesture mapping for - musical applications which serves as a mapping system for music instrument design. - A neural network approach is chosen for this goal and all the required interfaces - and abstractions are developed and demonstrated in the Pure Data environment. - In this paper, we will focus on neural network representation and implementation - in a real-time musical environment. This adaptive mapping is evaluated in different - static and dynamic situations by a network of sensors sampled at a rate of 200Hz - in real-time. Finally, some remarks are given on the network design and future - works. ' - address: 'Hamamatsu, Japan' - author: 'Cont, Arshia and Coduys, Thierry and Henry, Cyrille' - bibtex: "@inproceedings{Cont2004,\n abstract = {In this paper, we describe an adaptive\ - \ approach to gesture mapping for musical applications which serves as a mapping\ - \ system for music instrument design. A neural network approach is chosen for\ - \ this goal and all the required interfaces and abstractions are developed and\ - \ demonstrated in the Pure Data environment. In this paper, we will focus on neural\ - \ network representation and implementation in a real-time musical environment.\ - \ This adaptive mapping is evaluated in different static and dynamic situations\ - \ by a network of sensors sampled at a rate of 200Hz in real-time. Finally, some\ - \ remarks are given on the network design and future works. },\n address = {Hamamatsu,\ - \ Japan},\n author = {Cont, Arshia and Coduys, Thierry and Henry, Cyrille},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176589},\n issn = {2220-4806},\n\ - \ keywords = {Real-time gesture control, adaptive interfaces, Sensor and actuator\ - \ technologies for musical applications, Musical mapping algorithms and intelligent\ - \ controllers, Pure Data.},\n pages = {39--42},\n title = {Real-time Gesture Mapping\ - \ in Pd Environment using Neural Networks},\n url = {http://www.nime.org/proceedings/2004/nime2004_039.pdf},\n\ - \ year = {2004}\n}\n" + ID: ebertelli2014 + abstract: 'Through examining the decisions and sequences of presenting a multi-media + instrument fabrication program to students, this paper seeks to uncover practical + elements of best practice and possible improvements in science and music education. + The Conductive Music program incorporates public engagement principles, open-source + hardware, DIY ethos, contemporary composition techniques, and educational activities + for creative and analytical thinking. These activities impart positive skills + through multi-media content delivery for all learning types. The program is designed + to test practices for engaging at-risk young people from urban areas in the construction + and performance of new electronic instruments. The goal is to open up the world + of electronic music performance to a new generation of young digital artists and + to replace negative social behaviours with creative outlets for expression through + technology and performance. This paper highlights the key elements designed to + deliver the program''s agenda and examines the ways in which these aims were realised + or tested in the classroom.' + address: 'London, United Kingdom' + author: Emily Robertson and Enrico Bertelli + bibtex: "@inproceedings{ebertelli2014,\n abstract = {Through examining the decisions\ + \ and sequences of presenting a multi-media instrument fabrication program to\ + \ students, this paper seeks to uncover practical elements of best practice and\ + \ possible improvements in science and music education. The Conductive Music program\ + \ incorporates public engagement principles, open-source hardware, DIY ethos,\ + \ contemporary composition techniques, and educational activities for creative\ + \ and analytical thinking. These activities impart positive skills through multi-media\ + \ content delivery for all learning types. The program is designed to test practices\ + \ for engaging at-risk young people from urban areas in the construction and performance\ + \ of new electronic instruments. The goal is to open up the world of electronic\ + \ music performance to a new generation of young digital artists and to replace\ + \ negative social behaviours with creative outlets for expression through technology\ + \ and performance. This paper highlights the key elements designed to deliver\ + \ the program's agenda and examines the ways in which these aims were realised\ + \ or tested in the classroom.},\n address = {London, United Kingdom},\n author\ + \ = {Emily Robertson and Enrico Bertelli},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178921},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {517--520},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Conductive Music: Teaching Innovative Interface\ + \ Design and Composition Techniques with Open-Source Hardware},\n url = {http://www.nime.org/proceedings/2014/nime2014_301.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176589 + doi: 10.5281/zenodo.1178921 issn: 2220-4806 - keywords: 'Real-time gesture control, adaptive interfaces, Sensor and actuator technologies - for musical applications, Musical mapping algorithms and intelligent controllers, - Pure Data.' - pages: 39--42 - title: Real-time Gesture Mapping in Pd Environment using Neural Networks - url: http://www.nime.org/proceedings/2004/nime2004_039.pdf - year: 2004 + month: June + pages: 517--520 + publisher: 'Goldsmiths, University of London' + title: 'Conductive Music: Teaching Innovative Interface Design and Composition Techniques + with Open-Source Hardware' + url: http://www.nime.org/proceedings/2014/nime2014_301.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Talmudi2004 - abstract: 'This paper presents computer experiments concerning the decentralized - pianola, a hypothetical mechanical music instrument, whose large-scale musical - behavior is the result of local physical interactions between simple elements.Traditional - mechanical music instruments like the pianola and the music box rely for their - operation on the separation between a sequential memory unit and an execution - unit. In a decentralized mechanical instrument, musical memory is an emergent - global property of the system, undistinguishable from the execution process. Such - a machine is botha score andan instrument. The paper starts by discussing the - difference between sequential memory systems and systems exhibiting emergent decentralized - musical behavior. Next, the use of particle system simulation for exploring virtual - decentralized instruments is demonstrated, and the architecture for a simple decentralized - instrument is outlined. The paper continues by describing the use of a genetic - algorithm for evolving decentralized instruments that reproduce a given musical - behavior.' - address: 'Hamamatsu, Japan' - author: 'Talmudi, Assaf K.' - bibtex: "@inproceedings{Talmudi2004,\n abstract = {This paper presents computer\ - \ experiments concerning the decentralized pianola, a hypothetical mechanical\ - \ music instrument, whose large-scale musical behavior is the result of local\ - \ physical interactions between simple elements.Traditional mechanical music instruments\ - \ like the pianola and the music box rely for their operation on the separation\ - \ between a sequential memory unit and an execution unit. In a decentralized mechanical\ - \ instrument, musical memory is an emergent global property of the system, undistinguishable\ - \ from the execution process. Such a machine is botha score andan instrument.\ - \ The paper starts by discussing the difference between sequential memory systems\ - \ and systems exhibiting emergent decentralized musical behavior. Next, the use\ - \ of particle system simulation for exploring virtual decentralized instruments\ - \ is demonstrated, and the architecture for a simple decentralized instrument\ - \ is outlined. The paper continues by describing the use of a genetic algorithm\ - \ for evolving decentralized instruments that reproduce a given musical behavior.},\n\ - \ address = {Hamamatsu, Japan},\n author = {Talmudi, Assaf K.},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176675},\n issn = {2220-4806},\n pages = {43--46},\n\ - \ title = {The Decentralized Pianola: Evolving Mechanical Music Instruments using\ - \ a Genetic Algorithm},\n url = {http://www.nime.org/proceedings/2004/nime2004_043.pdf},\n\ - \ year = {2004}\n}\n" + ID: tmudd2014 + abstract: 'This paper examines electronic instruments that are based on dynamical + systems, where the behaviour of the instrument depends not only upon the immediate + input to the instrument, but also on the past input. Five instruments are presented + as case studies: Michel Waisvisz'' Cracklebox, Dylan Menzies'' Spiro, no-input + mixing desk, the author''s Feedback Joypad, and microphone-loudspeaker feedback. + Links are suggested between the sonic affordances of each instrument and the dynamical + mechanisms embedded in them. This is discussed in the context of contemporary, + materialoriented approaches to composition and particularly to free improvisation + where elements such as unpredictability and instability are often of interest, + and the process of exploration and discovery is an important part of the practice. + Links are also made with the use of dynamical interactions in computer games to + produce situations in which slight variations in the timing and ordering of inputs + can lead to very different outcomes, encouraging similarly explorative approaches.' + address: 'London, United Kingdom' + author: Tom Mudd and Simon Holland and Paul Mulholland and Nick Dalton + bibtex: "@inproceedings{tmudd2014,\n abstract = {This paper examines electronic\ + \ instruments that are based on dynamical systems, where the behaviour of the\ + \ instrument depends not only upon the immediate input to the instrument, but\ + \ also on the past input. Five instruments are presented as case studies: Michel\ + \ Waisvisz' Cracklebox, Dylan Menzies' Spiro, no-input mixing desk, the author's\ + \ Feedback Joypad, and microphone-loudspeaker feedback. Links are suggested between\ + \ the sonic affordances of each instrument and the dynamical mechanisms embedded\ + \ in them. This is discussed in the context of contemporary, materialoriented\ + \ approaches to composition and particularly to free improvisation where elements\ + \ such as unpredictability and instability are often of interest, and the process\ + \ of exploration and discovery is an important part of the practice. Links are\ + \ also made with the use of dynamical interactions in computer games to produce\ + \ situations in which slight variations in the timing and ordering of inputs can\ + \ lead to very different outcomes, encouraging similarly explorative approaches.},\n\ + \ address = {London, United Kingdom},\n author = {Tom Mudd and Simon Holland and\ + \ Paul Mulholland and Nick Dalton},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178881},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {126--129},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Dynamical Interactions with Electronic Instruments},\n\ + \ url = {http://www.nime.org/proceedings/2014/nime2014_302.pdf},\n year = {2014}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176675 + doi: 10.5281/zenodo.1178881 issn: 2220-4806 - pages: 43--46 - title: 'The Decentralized Pianola: Evolving Mechanical Music Instruments using a - Genetic Algorithm' - url: http://www.nime.org/proceedings/2004/nime2004_043.pdf - year: 2004 + month: June + pages: 126--129 + publisher: 'Goldsmiths, University of London' + title: Dynamical Interactions with Electronic Instruments + url: http://www.nime.org/proceedings/2014/nime2014_302.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Mandelis2004 - abstract: 'This paper describes the use of evolutionary and artificial life techniques - in sound design and the development of performance mapping to facilitate the real-time - manipulation of such sounds through some input device controlled by the performer. - A concrete example of such a system is described which allows musicians without - detailed knowledge and experience of sound synthesis techniques to interactively - develop new sounds and performance manipulation mappings according to their own - aesthetic judgements. Experiences with the system are discussed. ' - address: 'Hamamatsu, Japan' - author: 'Mandelis, James and Husbands, Phil' - bibtex: "@inproceedings{Mandelis2004,\n abstract = {This paper describes the use\ - \ of evolutionary and artificial life techniques in sound design and the development\ - \ of performance mapping to facilitate the real-time manipulation of such sounds\ - \ through some input device controlled by the performer. A concrete example of\ - \ such a system is described which allows musicians without detailed knowledge\ - \ and experience of sound synthesis techniques to interactively develop new sounds\ - \ and performance manipulation mappings according to their own aesthetic judgements.\ - \ Experiences with the system are discussed. },\n address = {Hamamatsu, Japan},\n\ - \ author = {Mandelis, James and Husbands, Phil},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1176635},\n issn = {2220-4806},\n keywords = {musical interaction,performance\ - \ mapping,sound synthesis},\n pages = {47--50},\n title = {Don't Just Play it,\ - \ Grow it! : Breeding Sound Synthesis and Performance Mappings},\n url = {http://www.nime.org/proceedings/2004/nime2004_047.pdf},\n\ - \ year = {2004}\n}\n" + ID: mbretan2014 + abstract: 'As robots become more pervasive in the world we think about how this + might influence the way in which people experience music. We introduce the concept + of a "robotic musical companion" (RMC) in the form of Shimi, a smart-phone enabled + five degree-of-freedom (DoF) robotic platform. We discuss experiences individuals + tend to have with music as consumers and performers and explore how these experiences + can be modified, aided, or improved by the inherent synergies between a human + and robot. An overview of several applications developed for Shimi is provided. + These applications place Shimi in various roles and enable human-robotic interactions + (HRIs) that are highlighted by more personable social communications using natural + language and other forms of communication.' + address: 'London, United Kingdom' + author: Mason Bretan and Gil Weinberg + bibtex: "@inproceedings{mbretan2014,\n abstract = {As robots become more pervasive\ + \ in the world we think about how this might influence the way in which people\ + \ experience music. We introduce the concept of a \"robotic musical companion\"\ + \ (RMC) in the form of Shimi, a smart-phone enabled five degree-of-freedom (DoF)\ + \ robotic platform. We discuss experiences individuals tend to have with music\ + \ as consumers and performers and explore how these experiences can be modified,\ + \ aided, or improved by the inherent synergies between a human and robot. An overview\ + \ of several applications developed for Shimi is provided. These applications\ + \ place Shimi in various roles and enable human-robotic interactions (HRIs) that\ + \ are highlighted by more personable social communications using natural language\ + \ and other forms of communication.},\n address = {London, United Kingdom},\n\ + \ author = {Mason Bretan and Gil Weinberg},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1178724},\n issn = {2220-4806},\n month = {June},\n pages =\ + \ {315--318},\n publisher = {Goldsmiths, University of London},\n title = {Chronicles\ + \ of a Robotic Musical Companion},\n url = {http://www.nime.org/proceedings/2014/nime2014_303.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176635 + doi: 10.5281/zenodo.1178724 issn: 2220-4806 - keywords: 'musical interaction,performance mapping,sound synthesis' - pages: 47--50 - title: 'Don''t Just Play it, Grow it! : Breeding Sound Synthesis and Performance - Mappings' - url: http://www.nime.org/proceedings/2004/nime2004_047.pdf - year: 2004 + month: June + pages: 315--318 + publisher: 'Goldsmiths, University of London' + title: Chronicles of a Robotic Musical Companion + url: http://www.nime.org/proceedings/2014/nime2014_303.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Shatin2004 - abstract: 'In this report, we discuss Tree Music, an interactive computer music - installation created using GAIA (Graphical Audio Interface Application), a new - open-source interface for controlling the RTcmix synthesis and effects processing - engine. Tree Music, commissioned by the University of Virginia Art Museum, used - a wireless camera with a wide-angle lens to capture motion and occlusion data - from exhibit visitors. We show how GAIA was used to structure and navigate the - compositional space, and how this program supports both graphical and text-based - programming in the same application. GAIA provides a GUI which combines two open-source - applications: RTcmix and Perl.' - address: 'Hamamatsu, Japan' - author: 'Shatin, Judith and Topper, David' - bibtex: "@inproceedings{Shatin2004,\n abstract = {In this report, we discuss Tree\ - \ Music, an interactive computer music installation created using GAIA (Graphical\ - \ Audio Interface Application), a new open-source interface for controlling the\ - \ RTcmix synthesis and effects processing engine. Tree Music, commissioned by\ - \ the University of Virginia Art Museum, used a wireless camera with a wide-angle\ - \ lens to capture motion and occlusion data from exhibit visitors. We show how\ - \ GAIA was used to structure and navigate the compositional space, and how this\ - \ program supports both graphical and text-based programming in the same application.\ - \ GAIA provides a GUI which combines two open-source applications: RTcmix and\ - \ Perl.},\n address = {Hamamatsu, Japan},\n author = {Shatin, Judith and Topper,\ - \ David},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1176663},\n issn = {2220-4806},\n\ - \ keywords = {Composition, new interfaces, interactive systems, open source, Real\ - \ time audio, GUI controllers, video tracking},\n pages = {51--54},\n title =\ - \ {Tree Music: Composing with GAIA},\n url = {http://www.nime.org/proceedings/2004/nime2004_051.pdf},\n\ - \ year = {2004}\n}\n" + ID: sserafin2014 + abstract: 'In this paper we propose an empirical method to develop mapping strategies + between a gestural based interface (the Gloves) and physically based sound synthesis + models. An experiment was performed in order to investigate which kind of gestures + listeners associate to synthesised sounds produced using physical models, corresponding + to three categories of sound: sustained, iterative and impulsive. The results + of the experiment show that listeners perform similar gestures when controlling + sounds from the different categories. We used such gestures in order to create + the mapping strategy between the Gloves and the physically based synthesis engine.' + address: 'London, United Kingdom' + author: Stefania Serafin and Stefano Trento and Francesco Grani and Hannah Perner-Wilson + and Seb Madgwick and Tom Mitchell + bibtex: "@inproceedings{sserafin2014,\n abstract = {In this paper we propose an\ + \ empirical method to develop mapping strategies between a gestural based interface\ + \ (the Gloves) and physically based sound synthesis models. An experiment was\ + \ performed in order to investigate which kind of gestures listeners associate\ + \ to synthesised sounds produced using physical models, corresponding to three\ + \ categories of sound: sustained, iterative and impulsive. The results of the\ + \ experiment show that listeners perform similar gestures when controlling sounds\ + \ from the different categories. We used such gestures in order to create the\ + \ mapping strategy between the Gloves and the physically based synthesis engine.},\n\ + \ address = {London, United Kingdom},\n author = {Stefania Serafin and Stefano\ + \ Trento and Francesco Grani and Hannah Perner-Wilson and Seb Madgwick and Tom\ + \ Mitchell},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178937},\n issn\ + \ = {2220-4806},\n month = {June},\n pages = {521--524},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Controlling Physically Based Virtual Musical\ + \ Instruments Using The Gloves},\n url = {http://www.nime.org/proceedings/2014/nime2014_307.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176663 + doi: 10.5281/zenodo.1178937 issn: 2220-4806 - keywords: 'Composition, new interfaces, interactive systems, open source, Real time - audio, GUI controllers, video tracking' - pages: 51--54 - title: 'Tree Music: Composing with GAIA' - url: http://www.nime.org/proceedings/2004/nime2004_051.pdf - year: 2004 + month: June + pages: 521--524 + publisher: 'Goldsmiths, University of London' + title: Controlling Physically Based Virtual Musical Instruments Using The Gloves + url: http://www.nime.org/proceedings/2014/nime2014_307.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: DArcangelo2004 - abstract: 'This essay outlines a framework for understanding newmusical compositions - and performances that utilizepre-existing sound recordings. In attempting toarticulate - why musicians are increasingly using soundrecordings in their creative work, the - author calls fornew performance tools that enable the dynamic use ofpre-recorded - music. ' - address: 'Hamamatsu, Japan' - author: 'D''Arcangelo, Gideon' - bibtex: "@inproceedings{DArcangelo2004,\n abstract = {This essay outlines a framework\ - \ for understanding newmusical compositions and performances that utilizepre-existing\ - \ sound recordings. In attempting toarticulate why musicians are increasingly\ - \ using soundrecordings in their creative work, the author calls fornew performance\ - \ tools that enable the dynamic use ofpre-recorded music. },\n address = {Hamamatsu,\ - \ Japan},\n author = {D'Arcangelo, Gideon},\n booktitle = {Proceedings of the\ + ID: cgnegy12014 + abstract: 'CollideFx is a real-time audio effects processor that integrates the + physics of real objects into the parameter space of the signal chain. Much like + a traditional signal chain, the user can choose a series of effects and offer + realtime control to their various parameters. In this work, we introduce a means + of creating tree-like signal graphs that dynamically change their routing in response + to changes in the location of the unit generators in a virtual space. Signals + are rerouted using a crossfading scheme that avoids the harsh clicks and pops + associated with amplitude discontinuities. The unit generators are easily controllable + using a click and drag interface that responds using familiar physics. CollideFx + brings the interactivity of a video game together with the purpose of creating + interesting and complex audio effects. With little difficulty, users can craft + custom effects, or alternatively, can fling a unit generator into a cluster of + several others to obtain more surprising results, letting the physics engine do + the decision making.' + address: 'London, United Kingdom' + author: Chet Gnegy + bibtex: "@inproceedings{cgnegy12014,\n abstract = {CollideFx is a real-time audio\ + \ effects processor that integrates the physics of real objects into the parameter\ + \ space of the signal chain. Much like a traditional signal chain, the user can\ + \ choose a series of effects and offer realtime control to their various parameters.\ + \ In this work, we introduce a means of creating tree-like signal graphs that\ + \ dynamically change their routing in response to changes in the location of the\ + \ unit generators in a virtual space. Signals are rerouted using a crossfading\ + \ scheme that avoids the harsh clicks and pops associated with amplitude discontinuities.\ + \ The unit generators are easily controllable using a click and drag interface\ + \ that responds using familiar physics. CollideFx brings the interactivity of\ + \ a video game together with the purpose of creating interesting and complex audio\ + \ effects. With little difficulty, users can craft custom effects, or alternatively,\ + \ can fling a unit generator into a cluster of several others to obtain more surprising\ + \ results, letting the physics engine do the decision making.},\n address = {London,\ + \ United Kingdom},\n author = {Chet Gnegy},\n booktitle = {Proceedings of the\ \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1176591},\n issn = {2220-4806},\n keywords = {Call and response,\ - \ turntablism, DJ tools, oral culture},\n pages = {55--58},\n title = {Recycling\ - \ Music, Answering Back: Toward an Oral Tradition of Electronic Music},\n url\ - \ = {http://www.nime.org/proceedings/2004/nime2004_055.pdf},\n year = {2004}\n\ - }\n" + \ {10.5281/zenodo.1178770},\n issn = {2220-4806},\n month = {June},\n pages =\ + \ {427--430},\n publisher = {Goldsmiths, University of London},\n title = {CollideFx:\ + \ A Physics-Based Audio Effects Processor},\n url = {http://www.nime.org/proceedings/2014/nime2014_308.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176591 + doi: 10.5281/zenodo.1178770 issn: 2220-4806 - keywords: 'Call and response, turntablism, DJ tools, oral culture' - pages: 55--58 - title: 'Recycling Music, Answering Back: Toward an Oral Tradition of Electronic - Music' - url: http://www.nime.org/proceedings/2004/nime2004_055.pdf - year: 2004 + month: June + pages: 427--430 + publisher: 'Goldsmiths, University of London' + title: 'CollideFx: A Physics-Based Audio Effects Processor' + url: http://www.nime.org/proceedings/2014/nime2014_308.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Jorda2004 - abstract: 'When envisaging new digital instruments, designers do not have to limit - themselves to their sonic capabilities (which can be absolutely any), not even - to their algorithmic power; they must be also especially careful about the instruments'' - conceptual capabilities, to the ways instruments impose or suggest to their players - new ways of thinking, new ways of establishing relations, new ways of interacting, - new ways of organizing time and textures; new ways, in short, of playing new musics. - This article explores the dynamic relation that builds between the player and - the instrument, introducing concepts such as efficiency, apprenticeship and learning - curve It aims at constructing a framework in which the possibilities and the diversity - of music instruments as well as the possibilities and the expressive freedom of - human music performers could start being evaluated. ' - address: 'Hamamatsu, Japan' - author: 'Jordà, Sergi' - bibtex: "@inproceedings{Jorda2004,\n abstract = {When envisaging new digital instruments,\ - \ designers do not have to limit themselves to their sonic capabilities (which\ - \ can be absolutely any), not even to their algorithmic power; they must be also\ - \ especially careful about the instruments' conceptual capabilities, to the ways\ - \ instruments impose or suggest to their players new ways of thinking, new ways\ - \ of establishing relations, new ways of interacting, new ways of organizing time\ - \ and textures; new ways, in short, of playing new musics. This article explores\ - \ the dynamic relation that builds between the player and the instrument, introducing\ - \ concepts such as efficiency, apprenticeship and learning curve It aims at constructing\ - \ a framework in which the possibilities and the diversity of music instruments\ - \ as well as the possibilities and the expressive freedom of human music performers\ - \ could start being evaluated. },\n address = {Hamamatsu, Japan},\n author = {Jord\\\ - `{a}, Sergi},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176619},\n issn\ - \ = {2220-4806},\n keywords = {Musical instruments design, learning curve, apprenticeship,\ - \ musical efficiency.},\n pages = {59--63},\n title = {Digital Instruments and\ - \ Players: Part I -- Efficiency and Apprenticeship},\n url = {http://www.nime.org/proceedings/2004/nime2004_059.pdf},\n\ - \ year = {2004}\n}\n" + ID: tbarraclough2014 + abstract: 'This paper describes the Modulome System, a new hardware interface set + for group-based electronic music performance and installation. Taking influence + from a variety of established interfaces, the Modulome is a modular controller + with application dependant use cases.' + address: 'London, United Kingdom' + author: Timothy J Barraclough and Jim Murphy and Ajay Kapur + bibtex: "@inproceedings{tbarraclough2014,\n abstract = {This paper describes the\ + \ Modulome System, a new hardware interface set for group-based electronic music\ + \ performance and installation. Taking influence from a variety of established\ + \ interfaces, the Modulome is a modular controller with application dependant\ + \ use cases.},\n address = {London, United Kingdom},\n author = {Timothy J Barraclough\ + \ and Jim Murphy and Ajay Kapur},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178708},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {155--158},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {New Open-Source Interfaces for Group Based\ + \ Participatory Performance of Live Electronic Music},\n url = {http://www.nime.org/proceedings/2014/nime2014_309.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176619 + doi: 10.5281/zenodo.1178708 issn: 2220-4806 - keywords: 'Musical instruments design, learning curve, apprenticeship, musical efficiency.' - pages: 59--63 - title: 'Digital Instruments and Players: Part I -- Efficiency and Apprenticeship' - url: http://www.nime.org/proceedings/2004/nime2004_059.pdf - year: 2004 + month: June + pages: 155--158 + publisher: 'Goldsmiths, University of London' + title: New Open-Source Interfaces for Group Based Participatory Performance of Live + Electronic Music + url: http://www.nime.org/proceedings/2014/nime2014_309.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Pashenkov2004 - abstract: 'This report presents a novel interface for musical performance which - utilizes a record-player turntable augmented with a computation engine and a high-density - optical sensing array. The turntable functions as a standalone step sequencer - for MIDI events transmitted to a computer or another device and it is programmed - in real-time using visual disks. The program instructions are represented on printed - paper disks directly as characters of English alphabet that could be read by human - as effectively as they are picked up by the machine''s optical cartridge. The - result is a tangible interface that allows the user to manipulate pre-arranged - musical material by hand, by adding together instrumental tracks to form a dynamic - mix. A functional implementation of this interface is discussed in view of historical - background and other examples of electronic instruments for music creation and - performance incorporating optical turntable as a central element.' - address: 'Hamamatsu, Japan' - author: 'Pashenkov, Nikita' - bibtex: "@inproceedings{Pashenkov2004,\n abstract = {This report presents a novel\ - \ interface for musical performance which utilizes a record-player turntable augmented\ - \ with a computation engine and a high-density optical sensing array. The turntable\ - \ functions as a standalone step sequencer for MIDI events transmitted to a computer\ - \ or another device and it is programmed in real-time using visual disks. The\ - \ program instructions are represented on printed paper disks directly as characters\ - \ of English alphabet that could be read by human as effectively as they are picked\ - \ up by the machine's optical cartridge. The result is a tangible interface that\ - \ allows the user to manipulate pre-arranged musical material by hand, by adding\ - \ together instrumental tracks to form a dynamic mix. A functional implementation\ - \ of this interface is discussed in view of historical background and other examples\ - \ of electronic instruments for music creation and performance incorporating optical\ - \ turntable as a central element.},\n address = {Hamamatsu, Japan},\n author =\ - \ {Pashenkov, Nikita},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176651},\n\ - \ issn = {2220-4806},\n keywords = {Interaction, visualization, tangible interface,\ - \ controllers, optical turntable, performance.},\n pages = {64--67},\n title =\ - \ {A New Mix of Forgotten Technology: Sound Generation, Sequencing and Performance\ - \ Using an Optical Turntable},\n url = {http://www.nime.org/proceedings/2004/nime2004_064.pdf},\n\ - \ year = {2004}\n}\n" + ID: olahdeoja2014 + abstract: 'This paper provides a report of a research effort to transform architectural + and scenographic surfaces into sound sources and use them in artistic creation. + Structure-borne sound drivers are employed to induce sound into the solid surfaces, + making them vibrate and emit sound. The sound waves can be perceived both via + the aural (airborne diffusion) as well as the tactile (structure-borne diffusion) + senses. The paper describes the main challenges encountered in the use of structure-borne + sound technology, as well as the current results in overcoming them. Two completed + artistic projects are presented in order to illustrate the creative possibilities + enabled by the research.' + address: 'London, United Kingdom' + author: Otso Lähdeoja + bibtex: "@inproceedings{olahdeoja2014,\n abstract = {This paper provides a report\ + \ of a research effort to transform architectural and scenographic surfaces into\ + \ sound sources and use them in artistic creation. Structure-borne sound drivers\ + \ are employed to induce sound into the solid surfaces, making them vibrate and\ + \ emit sound. The sound waves can be perceived both via the aural (airborne diffusion)\ + \ as well as the tactile (structure-borne diffusion) senses. The paper describes\ + \ the main challenges encountered in the use of structure-borne sound technology,\ + \ as well as the current results in overcoming them. Two completed artistic projects\ + \ are presented in order to illustrate the creative possibilities enabled by the\ + \ research.},\n address = {London, United Kingdom},\n author = {Otso L\\''ahdeoja},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178843},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {319--322},\n publisher = {Goldsmiths, University\ + \ of London},\n title = {Structure-Borne Sound and Aurally Active Spaces},\n url\ + \ = {http://www.nime.org/proceedings/2014/nime2014_310.pdf},\n year = {2014}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176651 + doi: 10.5281/zenodo.1178843 issn: 2220-4806 - keywords: 'Interaction, visualization, tangible interface, controllers, optical - turntable, performance.' - pages: 64--67 - title: 'A New Mix of Forgotten Technology: Sound Generation, Sequencing and Performance - Using an Optical Turntable' - url: http://www.nime.org/proceedings/2004/nime2004_064.pdf - year: 2004 + month: June + pages: 319--322 + publisher: 'Goldsmiths, University of London' + title: Structure-Borne Sound and Aurally Active Spaces + url: http://www.nime.org/proceedings/2014/nime2014_310.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Lee2004 - abstract: 'This paper describes the first system designed to allow children to conduct - an audio and video recording of an orchestra. No prior music experience is required - to control the orchestra, and the system uses an advanced algorithm to time stretch - the audio in real-time at high quality and without altering the pitch. We will - discuss the requirements and challenges of designing an interface to target our - particular user group (children), followed by some system implementation details. - An overview of the algorithm used for audio time stretching will also be presented. - We are currently using this technology to study and compare professional and non-professional - conducting behavior, and its implications when designing new interfaces for multimedia. - You''re the Conductor is currently a successful exhibit at the Children''s Museum - in Boston, USA.' - address: 'Hamamatsu, Japan' - author: 'Lee, Eric and Nakra, Teresa M. and Borchers, Jan' - bibtex: "@inproceedings{Lee2004,\n abstract = {This paper describes the first system\ - \ designed to allow children to conduct an audio and video recording of an orchestra.\ - \ No prior music experience is required to control the orchestra, and the system\ - \ uses an advanced algorithm to time stretch the audio in real-time at high quality\ - \ and without altering the pitch. We will discuss the requirements and challenges\ - \ of designing an interface to target our particular user group (children), followed\ - \ by some system implementation details. An overview of the algorithm used for\ - \ audio time stretching will also be presented. We are currently using this technology\ - \ to study and compare professional and non-professional conducting behavior,\ - \ and its implications when designing new interfaces for multimedia. You're the\ - \ Conductor is currently a successful exhibit at the Children's Museum in Boston,\ - \ USA.},\n address = {Hamamatsu, Japan},\n author = {Lee, Eric and Nakra, Teresa\ - \ M. and Borchers, Jan},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176629},\n\ - \ issn = {2220-4806},\n keywords = {conducting systems,design patterns,gesture\ - \ recogni-,interactive exhibits,real-time audio stretching,tion},\n pages = {68--73},\n\ - \ title = {You're The Conductor: A Realistic Interactive Conducting System for\ - \ Children},\n url = {http://www.nime.org/proceedings/2004/nime2004_068.pdf},\n\ - \ year = {2004}\n}\n" + ID: dwikstrom2014 + abstract: 'In this paper an emotionally justified approach for controlling sound + with physiology is presented. Measurements of listeners'' physiology, while they + are listening to recorded music of their own choosing, are used to create a regression + model that predicts features extracted from music with the help of the listeners'' + physiological response patterns. This information can be used as a control signal + to drive musical composition and synthesis of new sounds an approach involving + concatenative sound synthesis is suggested. An evaluation study was conducted + to test the feasibility of the model. A multiple linear regression model and an + artificial neural network model were evaluated against a constant regressor, or + dummy model. The dummy model outperformed the other models in prediction accuracy, + but the artificial neural network model achieved significant correlations between + predictions and target values for many acoustic features.' + address: 'London, United Kingdom' + author: D. J. Valtteri Wikström + bibtex: "@inproceedings{dwikstrom2014,\n abstract = {In this paper an emotionally\ + \ justified approach for controlling sound with physiology is presented. Measurements\ + \ of listeners' physiology, while they are listening to recorded music of their\ + \ own choosing, are used to create a regression model that predicts features extracted\ + \ from music with the help of the listeners' physiological response patterns.\ + \ This information can be used as a control signal to drive musical composition\ + \ and synthesis of new sounds an approach involving concatenative sound synthesis\ + \ is suggested. An evaluation study was conducted to test the feasibility of the\ + \ model. A multiple linear regression model and an artificial neural network model\ + \ were evaluated against a constant regressor, or dummy model. The dummy model\ + \ outperformed the other models in prediction accuracy, but the artificial neural\ + \ network model achieved significant correlations between predictions and target\ + \ values for many acoustic features.},\n address = {London, United Kingdom},\n\ + \ author = {D. J. Valtteri Wikstr\\''om},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178981},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {549--552},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Musical Composition by Regressional Mapping\ + \ of Physiological Responses to Acoustic Features},\n url = {http://www.nime.org/proceedings/2014/nime2014_311.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176629 + doi: 10.5281/zenodo.1178981 issn: 2220-4806 - keywords: 'conducting systems,design patterns,gesture recogni-,interactive exhibits,real-time - audio stretching,tion' - pages: 68--73 - title: 'You''re The Conductor: A Realistic Interactive Conducting System for Children' - url: http://www.nime.org/proceedings/2004/nime2004_068.pdf - year: 2004 + month: June + pages: 549--552 + publisher: 'Goldsmiths, University of London' + title: Musical Composition by Regressional Mapping of Physiological Responses to + Acoustic Features + url: http://www.nime.org/proceedings/2014/nime2014_311.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: OModhrain2004 - abstract: 'The PebbleBox and the CrumbleBag are examples of a granular interaction - paradigm, in which the manipulation ofphysical grains of arbitrary material becomes - the basis forinteracting with granular sound synthesis models. The soundsmade - by the grains as they are manipulated are analysed,and parameters such as grain - rate, grain amplitude andgrain density are extracted. These parameters are then - usedto control the granulation of arbitrary sound samples in realtime. In this - way, a direct link is made between the haptic sensation of interacting with grains - and the control ofgranular sounds.' - address: 'Hamamatsu, Japan' - author: 'O''Modhrain, Sile and Essl, Georg' - bibtex: "@inproceedings{OModhrain2004,\n abstract = {The PebbleBox and the CrumbleBag\ - \ are examples of a granular interaction paradigm, in which the manipulation ofphysical\ - \ grains of arbitrary material becomes the basis forinteracting with granular\ - \ sound synthesis models. The soundsmade by the grains as they are manipulated\ - \ are analysed,and parameters such as grain rate, grain amplitude andgrain density\ - \ are extracted. These parameters are then usedto control the granulation of arbitrary\ - \ sound samples in realtime. In this way, a direct link is made between the haptic\ - \ sensation of interacting with grains and the control ofgranular sounds.},\n\ - \ address = {Hamamatsu, Japan},\n author = {O'Modhrain, Sile and Essl, Georg},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176647},\n issn = {2220-4806},\n\ - \ keywords = {Musical instrument, granular synthesis, haptic},\n pages = {74--79},\n\ - \ title = {PebbleBox and CrumbleBag: Tactile Interfaces for Granular Synthesis},\n\ - \ url = {http://www.nime.org/proceedings/2004/nime2004_074.pdf},\n year = {2004}\n\ - }\n" + ID: jlong2014 + abstract: 'This paper describes the Robotic Taishogoto, a new robotic musical instrument + for performance, musical installations, and educational purposes. The primary + goals of its creation is to provide an easy to use, cost effective, compact and + integrated acoustic instrument which is fully automated and controllable via standard + MIDI commands. This paper describes the technical details of its design and implementation + including the mechanics, electronics and firmware. It also outlines various control + methodologies and use cases for the instrument.' + address: 'London, United Kingdom' + author: Jason Long + bibtex: "@inproceedings{jlong2014,\n abstract = {This paper describes the Robotic\ + \ Taishogoto, a new robotic musical instrument for performance, musical installations,\ + \ and educational purposes. The primary goals of its creation is to provide an\ + \ easy to use, cost effective, compact and integrated acoustic instrument which\ + \ is fully automated and controllable via standard MIDI commands. This paper describes\ + \ the technical details of its design and implementation including the mechanics,\ + \ electronics and firmware. It also outlines various control methodologies and\ + \ use cases for the instrument.},\n address = {London, United Kingdom},\n author\ + \ = {Jason Long},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178853},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {479--482},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {The Robotic Taishogoto: A New Plug 'n Play\ + \ Desktop Performance Instrument},\n url = {http://www.nime.org/proceedings/2014/nime2014_313.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176647 + doi: 10.5281/zenodo.1178853 issn: 2220-4806 - keywords: 'Musical instrument, granular synthesis, haptic' - pages: 74--79 - title: 'PebbleBox and CrumbleBag: Tactile Interfaces for Granular Synthesis' - url: http://www.nime.org/proceedings/2004/nime2004_074.pdf - year: 2004 + month: June + pages: 479--482 + publisher: 'Goldsmiths, University of London' + title: 'The Robotic Taishogoto: A New Plug ''n Play Desktop Performance Instrument' + url: http://www.nime.org/proceedings/2014/nime2014_313.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Paine2004 - abstract: 'New Interfaces for Musical Expression must speak to the nature of ''instrument'', - that is, it must always be understood that the interface binds to a complex musical - phenomenon. This paper explores the nature of engagement, the point of performance - that occurs when a human being engages with a computer based instrument. It asks - questions about the nature of the instrument in computer music and offers some - conceptual models for the mapping of gesture to sonic outcomes.' - address: 'Hamamatsu, Japan' - author: 'Paine, Garth' - bibtex: "@inproceedings{Paine2004,\n abstract = {New Interfaces for Musical Expression\ - \ must speak to the nature of 'instrument', that is, it must always be understood\ - \ that the interface binds to a complex musical phenomenon. This paper explores\ - \ the nature of engagement, the point of performance that occurs when a human\ - \ being engages with a computer based instrument. It asks questions about the\ - \ nature of the instrument in computer music and offers some conceptual models\ - \ for the mapping of gesture to sonic outcomes.},\n address = {Hamamatsu, Japan},\n\ - \ author = {Paine, Garth},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176649},\n\ - \ issn = {2220-4806},\n keywords = {dynamic,dynamic morphology,gesture,interaction,mapping,mind,music,orchestration,spectral\ - \ morphology},\n pages = {80--86},\n title = {Gesture and Musical Interaction\ - \ : Interactive Engagement Through Dynamic Morphology},\n url = {http://www.nime.org/proceedings/2004/nime2004_080.pdf},\n\ - \ year = {2004}\n}\n" + ID: pmathews2014 + abstract: 'Networked musical performance using networks of computers for live performance + of electronic music has evolved over a number of decades but has tended to rely + upon customized and highly specialized software designed specifically for particular + artistic goals. This paper presents Tangle, a flexible software framework designed + to provide a basis for performance on any number of distinct instruments. The + network includes features to simplify the control of robotic instruments, such + as automated latency compensation and self-testing, while being simple to extend + in order to implement device-specific logic and failsafes. Tangle has been tested + on two diverse systems incorporating a number of unique and complex mechatronic + instruments.' + address: 'London, United Kingdom' + author: Paul Mathews and Ness Morris and Jim Murphy and Ajay Kapur and Dale Carnegie + bibtex: "@inproceedings{pmathews2014,\n abstract = {Networked musical performance\ + \ using networks of computers for live performance of electronic music has evolved\ + \ over a number of decades but has tended to rely upon customized and highly specialized\ + \ software designed specifically for particular artistic goals. This paper presents\ + \ Tangle, a flexible software framework designed to provide a basis for performance\ + \ on any number of distinct instruments. The network includes features to simplify\ + \ the control of robotic instruments, such as automated latency compensation and\ + \ self-testing, while being simple to extend in order to implement device-specific\ + \ logic and failsafes. Tangle has been tested on two diverse systems incorporating\ + \ a number of unique and complex mechatronic instruments.},\n address = {London,\ + \ United Kingdom},\n author = {Paul Mathews and Ness Morris and Jim Murphy and\ + \ Ajay Kapur and Dale Carnegie},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178867},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {187--190},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Tangle: a Flexible Framework for Performance\ + \ with Advanced Robotic Musical Instruments},\n url = {http://www.nime.org/proceedings/2014/nime2014_314.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176649 + doi: 10.5281/zenodo.1178867 issn: 2220-4806 - keywords: 'dynamic,dynamic morphology,gesture,interaction,mapping,mind,music,orchestration,spectral - morphology' - pages: 80--86 - title: 'Gesture and Musical Interaction : Interactive Engagement Through Dynamic - Morphology' - url: http://www.nime.org/proceedings/2004/nime2004_080.pdf - year: 2004 + month: June + pages: 187--190 + publisher: 'Goldsmiths, University of London' + title: 'Tangle: a Flexible Framework for Performance with Advanced Robotic Musical + Instruments' + url: http://www.nime.org/proceedings/2014/nime2014_314.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: VanNort2004 - abstract: 'The choice of mapping strategies to effectively map controller variables - to sound synthesis algorithms is examined.Specifically, we look at continuous - mappings that have ageometric representation. Drawing from underlying mathematical - theory, this paper presents a way to compare mapping strategies, with the goal - of achieving an appropriatematch between mapping and musical performance context.This - method of comparison is applied to existing techniques,while a suggestion is offered - on how to integrate and extendthis work through a new implementation.' - address: 'Hamamatsu, Japan' - author: 'Van Nort, Doug and Wanderley, Marcelo M. and Depalle, Philippe' - bibtex: "@inproceedings{VanNort2004,\n abstract = {The choice of mapping strategies\ - \ to effectively map controller variables to sound synthesis algorithms is examined.Specifically,\ - \ we look at continuous mappings that have ageometric representation. Drawing\ - \ from underlying mathematical theory, this paper presents a way to compare mapping\ - \ strategies, with the goal of achieving an appropriatematch between mapping and\ - \ musical performance context.This method of comparison is applied to existing\ - \ techniques,while a suggestion is offered on how to integrate and extendthis\ - \ work through a new implementation.},\n address = {Hamamatsu, Japan},\n author\ - \ = {Van Nort, Doug and Wanderley, Marcelo M. and Depalle, Philippe},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176681},\n issn = {2220-4806},\n keywords\ - \ = {Mapping, Interface Design, Interpolation, Computational Geometry},\n pages\ - \ = {87--91},\n title = {On the Choice of Mappings Based on Geometric Properties},\n\ - \ url = {http://www.nime.org/proceedings/2004/nime2004_087.pdf},\n year = {2004}\n\ + ID: ofried2014 + abstract: 'The modern musician enjoys access to a staggering number of audio samples. + Composition software can ship with many gigabytes of data, and there are many + more to be found online. However, conventional methods for navigating these libraries + are still quite rudimentary, and often involve scrolling through alphabetical + lists. We present a system for sample exploration that allows audio clips to be + sorted according to user taste, and arranged in any desired 2D formation such + that similar samples are located near each other. Our method relies on two advances + in machine learning. First, metric learning allows the user to shape the audio + feature space to match their own preferences. Second, kernelized sorting finds + an optimal arrangement for the samples in 2D. We demonstrate our system with two + new interfaces for exploring audio samples, and evaluate the technology qualitatively + and quantitatively via a pair of user studies.' + address: 'London, United Kingdom' + author: Ohad Fried and Zeyu Jin and Reid Oda and Adam Finkelstein + bibtex: "@inproceedings{ofried2014,\n abstract = {The modern musician enjoys access\ + \ to a staggering number of audio samples. Composition software can ship with\ + \ many gigabytes of data, and there are many more to be found online. However,\ + \ conventional methods for navigating these libraries are still quite rudimentary,\ + \ and often involve scrolling through alphabetical lists. We present a system\ + \ for sample exploration that allows audio clips to be sorted according to user\ + \ taste, and arranged in any desired 2D formation such that similar samples are\ + \ located near each other. Our method relies on two advances in machine learning.\ + \ First, metric learning allows the user to shape the audio feature space to match\ + \ their own preferences. Second, kernelized sorting finds an optimal arrangement\ + \ for the samples in 2D. We demonstrate our system with two new interfaces for\ + \ exploring audio samples, and evaluate the technology qualitatively and quantitatively\ + \ via a pair of user studies.},\n address = {London, United Kingdom},\n author\ + \ = {Ohad Fried and Zeyu Jin and Reid Oda and Adam Finkelstein},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178766},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {281--286},\n publisher = {Goldsmiths, University of London},\n title = {AudioQuilt:\ + \ {2D} Arrangements of Audio Samples using Metric Learning and Kernelized Sorting},\n\ + \ url = {http://www.nime.org/proceedings/2014/nime2014_315.pdf},\n year = {2014}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176681 + doi: 10.5281/zenodo.1178766 issn: 2220-4806 - keywords: 'Mapping, Interface Design, Interpolation, Computational Geometry' - pages: 87--91 - title: On the Choice of Mappings Based on Geometric Properties - url: http://www.nime.org/proceedings/2004/nime2004_087.pdf - year: 2004 + month: June + pages: 281--286 + publisher: 'Goldsmiths, University of London' + title: 'AudioQuilt: 2D Arrangements of Audio Samples using Metric Learning and Kernelized + Sorting' + url: http://www.nime.org/proceedings/2014/nime2014_315.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Sheehan2004 - abstract: 'This paper discusses some of the issues pertaining to the design of digital - musical instruments that are to effectively fill the role of traditional instruments - (i.e. those based on physical sound production mechanisms). The design and implementation - of a musical instrument that addresses some of these issues, using scanned synthesis - coupled to a "smart" physical system, is described.' - address: 'Hamamatsu, Japan' - author: 'Sheehan, Brian' - bibtex: "@inproceedings{Sheehan2004,\n abstract = {This paper discusses some of\ - \ the issues pertaining to the design of digital musical instruments that are\ - \ to effectively fill the role of traditional instruments (i.e. those based on\ - \ physical sound production mechanisms). The design and implementation of a musical\ - \ instrument that addresses some of these issues, using scanned synthesis coupled\ - \ to a \"smart\" physical system, is described.},\n address = {Hamamatsu, Japan},\n\ - \ author = {Sheehan, Brian},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176665},\n\ - \ issn = {2220-4806},\n keywords = {Digital musical instruments, real-time performance,\ - \ scanned synthesis, pd, tactile interfaces, sensors, Shapetape, mapping.},\n\ - \ pages = {92--95},\n title = {The Squiggle: A Digital Musical Instrument},\n\ - \ url = {http://www.nime.org/proceedings/2004/nime2004_092.pdf},\n year = {2004}\n\ - }\n" + ID: dgabanaarellano2014 + abstract: 'This paper presents a new circular tangible interface where one or multiple + users can collaborate and interact in real time by placing and moving passive + wooden pucks on a transparent tabletop in order to create music. The design encourages + physical intuition and visual feedback on the music being created. An arm with + six optical sensors rotates beneath a transparent surface, triggering sounds based + on the objects placed above. The interface''s simplicity and tangibility make + it easy to learn and suitable for a broad range of users.' + address: 'London, United Kingdom' + author: Daniel Gábana Arellano and Andrew McPherson + bibtex: "@inproceedings{dgabanaarellano2014,\n abstract = {This paper presents a\ + \ new circular tangible interface where one or multiple users can collaborate\ + \ and interact in real time by placing and moving passive wooden pucks on a transparent\ + \ tabletop in order to create music. The design encourages physical intuition\ + \ and visual feedback on the music being created. An arm with six optical sensors\ + \ rotates beneath a transparent surface, triggering sounds based on the objects\ + \ placed above. The interface's simplicity and tangibility make it easy to learn\ + \ and suitable for a broad range of users.},\n address = {London, United Kingdom},\n\ + \ author = {Daniel G\\'abana Arellano and Andrew McPherson},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178704},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {84--85},\n publisher = {Goldsmiths, University of London},\n title = {Radear:\ + \ A Tangible Spinning Music Sequencer},\n url = {http://www.nime.org/proceedings/2014/nime2014_324.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176665 + doi: 10.5281/zenodo.1178704 issn: 2220-4806 - keywords: 'Digital musical instruments, real-time performance, scanned synthesis, - pd, tactile interfaces, sensors, Shapetape, mapping.' - pages: 92--95 - title: 'The Squiggle: A Digital Musical Instrument' - url: http://www.nime.org/proceedings/2004/nime2004_092.pdf - year: 2004 + month: June + pages: 84--85 + publisher: 'Goldsmiths, University of London' + title: 'Radear: A Tangible Spinning Music Sequencer' + url: http://www.nime.org/proceedings/2014/nime2014_324.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Gerhard2004 - abstract: 'This paper describes an approach to match visual and acoustic parameters - to produce an animated musical expression.Music may be generated to correspond - to animation, asdescribed here; imagery may be created to correspond tomusic; - or both may be developed simultaneously. This approach is intended to provide - new tools to facilitate bothcollaboration between visual artists and musicians - and examination of perceptual issues between visual and acousticmedia. As a proof-of-concept, - a complete example is developed with linear fractals as a basis for the animation, - andarranged rhythmic loops for the music. Since both visualand acoustic elements - in the example are generated fromconcise specifications, the potential of this - approach to create new works through parameter space exploration is accentuated, - however, there are opportunities for applicationto a wide variety of source material. - These additional applications are also discussed, along with issues encounteredin - development of the example.' - address: 'Hamamatsu, Japan' - author: 'Gerhard, David and Hepting, Daryl and Mckague, Matthew' - bibtex: "@inproceedings{Gerhard2004,\n abstract = {This paper describes an approach\ - \ to match visual and acoustic parameters to produce an animated musical expression.Music\ - \ may be generated to correspond to animation, asdescribed here; imagery may be\ - \ created to correspond tomusic; or both may be developed simultaneously. This\ - \ approach is intended to provide new tools to facilitate bothcollaboration between\ - \ visual artists and musicians and examination of perceptual issues between visual\ - \ and acousticmedia. As a proof-of-concept, a complete example is developed with\ - \ linear fractals as a basis for the animation, andarranged rhythmic loops for\ - \ the music. Since both visualand acoustic elements in the example are generated\ - \ fromconcise specifications, the potential of this approach to create new works\ - \ through parameter space exploration is accentuated, however, there are opportunities\ - \ for applicationto a wide variety of source material. These additional applications\ - \ are also discussed, along with issues encounteredin development of the example.},\n\ - \ address = {Hamamatsu, Japan},\n author = {Gerhard, David and Hepting, Daryl\ - \ and Mckague, Matthew},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176603},\n\ - \ issn = {2220-4806},\n keywords = {Multimedia creation and interaction, parameter\ - \ space, visualization, sonification.},\n pages = {96--99},\n title = {Exploration\ - \ of the Correspondence between Visual and Acoustic Parameter Spaces},\n url =\ - \ {http://www.nime.org/proceedings/2004/nime2004_096.pdf},\n year = {2004}\n}\n" + ID: aberndt2014 + abstract: 'We present the digital musical instrument TouchNoise that is based on + multitouch interaction with a particle system. It implements a novel interface + concept for modulating noise spectra. Each particle represents a sine oscillator + that moves through the two-dimensional frequency and stereo panning domain via + Brownian motion. Its behavior can be affected by multitouch gestures allowing + the shaping of the resulting sound in many different ways. Particles can be dragged, + attracted, repelled, accentuated, and their autonomous behavior can be manipulated. + In this paper we introduce the concepts behind this instrument, describe its implementation + and discuss the sonic design space emerging from it.' + address: 'London, United Kingdom' + author: Axel Berndt and Nadia Al-Kassab and Raimund Dachselt + bibtex: "@inproceedings{aberndt2014,\n abstract = {We present the digital musical\ + \ instrument TouchNoise that is based on multitouch interaction with a particle\ + \ system. It implements a novel interface concept for modulating noise spectra.\ + \ Each particle represents a sine oscillator that moves through the two-dimensional\ + \ frequency and stereo panning domain via Brownian motion. Its behavior can be\ + \ affected by multitouch gestures allowing the shaping of the resulting sound\ + \ in many different ways. Particles can be dragged, attracted, repelled, accentuated,\ + \ and their autonomous behavior can be manipulated. In this paper we introduce\ + \ the concepts behind this instrument, describe its implementation and discuss\ + \ the sonic design space emerging from it.},\n address = {London, United Kingdom},\n\ + \ author = {Axel Berndt and Nadia Al-Kassab and Raimund Dachselt},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1178714},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {323--326},\n publisher = {Goldsmiths, University of London},\n\ + \ title = {TouchNoise: A Particle-based Multitouch Noise Modulation Interface},\n\ + \ url = {http://www.nime.org/proceedings/2014/nime2014_325.pdf},\n year = {2014}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176603 + doi: 10.5281/zenodo.1178714 issn: 2220-4806 - keywords: 'Multimedia creation and interaction, parameter space, visualization, - sonification.' - pages: 96--99 - title: Exploration of the Correspondence between Visual and Acoustic Parameter Spaces - url: http://www.nime.org/proceedings/2004/nime2004_096.pdf - year: 2004 + month: June + pages: 323--326 + publisher: 'Goldsmiths, University of London' + title: 'TouchNoise: A Particle-based Multitouch Noise Modulation Interface' + url: http://www.nime.org/proceedings/2014/nime2014_325.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Ramakrishnan2004 - abstract: 'Auracle is a "group instrument," controlled by the voice, for real-time, - interactive, distributed music making over the Internet. It is implemented in - the Java™ programming language using a combination of publicly available libraries - (JSyn and TransJam) and custom-built components. This paper describes how the - various pieces --- the voice analysis, network communication, and sound synthesis - --- are individually built and how they are combined to form Auracle.' - address: 'Hamamatsu, Japan' - author: 'Ramakrishnan, Chandrasekhar and Freeman, Jason and Varnik, Kristjan' - bibtex: "@inproceedings{Ramakrishnan2004,\n abstract = {Auracle is a \"group instrument,\"\ - \ controlled by the voice, for real-time, interactive, distributed music making\ - \ over the Internet. It is implemented in the Java™ programming language using\ - \ a combination of publicly available libraries (JSyn and TransJam) and custom-built\ - \ components. This paper describes how the various pieces --- the voice analysis,\ - \ network communication, and sound synthesis --- are individually built and how\ - \ they are combined to form Auracle.},\n address = {Hamamatsu, Japan},\n author\ - \ = {Ramakrishnan, Chandrasekhar and Freeman, Jason and Varnik, Kristjan},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176657},\n issn = {2220-4806},\n keywords\ - \ = {Interactive Music Systems, Networking and Control, Voice and Speech Analysis,\ - \ Auracle, JSyn, TransJam, Linear Prediction, Neural Networks, Voice Interface,\ - \ Open Sound Control},\n pages = {100--103},\n title = {The Architecture of Auracle:\ - \ a Real-Time, Distributed, Collaborative Instrument},\n url = {http://www.nime.org/proceedings/2004/nime2004_100.pdf},\n\ - \ year = {2004}\n}\n" + ID: ynakanishi2014 + abstract: 'In this paper, the authors introduce a stand-alone synthesizer, ``B.O.M.B. + -Beat Of Magic Box --'''' for electronic music sessions and live performances. + ``B.O.M.B.'''' has a wireless communication system that synchronizes musical scale + and tempo (BPM) between multiple devices. In addition, participants can change + master/slave role between performers immediately. Our primary motivation is to + provide musicians and nonmusicians with opportunities to experience a collaborative + electronic music performance. Here, the hardware and interaction design of the + device is presented. To date, numerous collaborative musical instruments have + been developed in electronic music field [1][2][3]. The authors are interested + in formations of musical sessions using stand-alone devices and leader/follower + relationship in musical sessions. The authors specify three important requirements + of instrument design for musical session. They are as follows: (1) Simple Interface: + Interface that enables performers to control three sound elements (pitch, timbre, + and amplitude) with simple interaction. (2) Portable Stand-alone System: System + that runs standalone (with sound generators, speakers, and butteries). Because + musical sessions can be improvised at any place and time, the authors consider + that portability is essential in designing musical instruments for sessions. (3) + Wireless Synchronization: System that supports ensembles by automatically synchronizing + tempo (BPM) and tonality between multiple devices by air because of portability. + In addition, performers can switch master/slave roles smoothly such as leader/follower + relationship during a musical session. The authors gave ten live performances + using this device at domestic and international events. In these events, the authors + confirmed that our proposed wireless synchronization system worked stable. It + is suggested that our system demonstrate the practicality of wireless synchronization. + In future, the authors will evaluate the device in terms of its stability in multi-performer + musical sessions.' + address: 'London, United Kingdom' + author: Yoshihito Nakanishi and Seiichiro Matsumura and Chuichi Arakawa + bibtex: "@inproceedings{ynakanishi2014,\n abstract = {In this paper, the authors\ + \ introduce a stand-alone synthesizer, ``B.O.M.B. -Beat Of Magic Box --'' for\ + \ electronic music sessions and live performances. ``B.O.M.B.'' has a wireless\ + \ communication system that synchronizes musical scale and tempo (BPM) between\ + \ multiple devices. In addition, participants can change master/slave role between\ + \ performers immediately. Our primary motivation is to provide musicians and nonmusicians\ + \ with opportunities to experience a collaborative electronic music performance.\ + \ Here, the hardware and interaction design of the device is presented. To date,\ + \ numerous collaborative musical instruments have been developed in electronic\ + \ music field [1][2][3]. The authors are interested in formations of musical sessions\ + \ using stand-alone devices and leader/follower relationship in musical sessions.\ + \ The authors specify three important requirements of instrument design for musical\ + \ session. They are as follows: (1) Simple Interface: Interface that enables performers\ + \ to control three sound elements (pitch, timbre, and amplitude) with simple interaction.\ + \ (2) Portable Stand-alone System: System that runs standalone (with sound generators,\ + \ speakers, and butteries). Because musical sessions can be improvised at any\ + \ place and time, the authors consider that portability is essential in designing\ + \ musical instruments for sessions. (3) Wireless Synchronization: System that\ + \ supports ensembles by automatically synchronizing tempo (BPM) and tonality between\ + \ multiple devices by air because of portability. In addition, performers can\ + \ switch master/slave roles smoothly such as leader/follower relationship during\ + \ a musical session. The authors gave ten live performances using this device\ + \ at domestic and international events. In these events, the authors confirmed\ + \ that our proposed wireless synchronization system worked stable. It is suggested\ + \ that our system demonstrate the practicality of wireless synchronization. In\ + \ future, the authors will evaluate the device in terms of its stability in multi-performer\ + \ musical sessions.},\n address = {London, United Kingdom},\n author = {Yoshihito\ + \ Nakanishi and Seiichiro Matsumura and Chuichi Arakawa},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178889},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {80--81},\n publisher = {Goldsmiths, University of London},\n title = {B.O.M.B.\ + \ -Beat Of Magic Box -: Stand-Alone Synthesizer Using Wireless Synchronization\ + \ System For Musical Session and Performance},\n url = {http://www.nime.org/proceedings/2014/nime2014_327.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176657 + doi: 10.5281/zenodo.1178889 issn: 2220-4806 - keywords: 'Interactive Music Systems, Networking and Control, Voice and Speech Analysis, - Auracle, JSyn, TransJam, Linear Prediction, Neural Networks, Voice Interface, - Open Sound Control' - pages: 100--103 - title: 'The Architecture of Auracle: a Real-Time, Distributed, Collaborative Instrument' - url: http://www.nime.org/proceedings/2004/nime2004_100.pdf - year: 2004 + month: June + pages: 80--81 + publisher: 'Goldsmiths, University of London' + title: 'B.O.M.B. -Beat Of Magic Box -: Stand-Alone Synthesizer Using Wireless Synchronization + System For Musical Session and Performance' + url: http://www.nime.org/proceedings/2014/nime2014_327.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Miyashita2004 - abstract: 'In this paper, we propose Thermoscore, a musical score form-that dynamically - alters the temperature of the instrument/player interface. We developed the first - version of theThermoscore display by lining Peltier devices on piano keys.The - system is controlled by MIDI notes-on messages from anMIDI sequencer, so that - a composer can design songs that aresequences of temperature for each piano key. - We also discussmethodologies for composing with this system, and suggesttwo approaches. - The first is to make desirable keys (or otherkeys) hot. The second one uses chroma-profile, - that is, a radarchart representation of the frequency of pitch notations in the-piece. - By making keys of the same chroma hot in reverse proportion to the value of the - chroma-profile, it is possible to-constrain the performer''s improvisation and - to bring the tonality space close to a certain piece.' - address: 'Hamamatsu, Japan' - author: 'Miyashita, Homei and Nishimoto, Kazushi' - bibtex: "@inproceedings{Miyashita2004,\n abstract = {In this paper, we propose Thermoscore,\ - \ a musical score form-that dynamically alters the temperature of the instrument/player\ - \ interface. We developed the first version of theThermoscore display by lining\ - \ Peltier devices on piano keys.The system is controlled by MIDI notes-on messages\ - \ from anMIDI sequencer, so that a composer can design songs that aresequences\ - \ of temperature for each piano key. We also discussmethodologies for composing\ - \ with this system, and suggesttwo approaches. The first is to make desirable\ - \ keys (or otherkeys) hot. The second one uses chroma-profile, that is, a radarchart\ - \ representation of the frequency of pitch notations in the-piece. By making keys\ - \ of the same chroma hot in reverse proportion to the value of the chroma-profile,\ - \ it is possible to-constrain the performer's improvisation and to bring the tonality\ - \ space close to a certain piece.},\n address = {Hamamatsu, Japan},\n author =\ - \ {Miyashita, Homei and Nishimoto, Kazushi},\n booktitle = {Proceedings of the\ + ID: gwakefield2014 + abstract: 'We discuss live coding audio-visual worlds for large-scale virtual reality + environments. We describe Alive, an instrument allowing multiple users to develop + sonic and visual behaviors of agents in a virtual world, through a browserbased + collaborative code interface, accessible while being immersed through spatialized + audio and stereoscopic display. The interface adds terse syntax for query-based + precise or stochastic selections and declarative agent manipulations, lazily-evaluated + expressions for synthesis and behavior, event handling, and flexible scheduling.' + address: 'London, United Kingdom' + author: Graham Wakefield and Charlie Roberts and Matthew Wright and Timothy Wood + and Karl Yerkes + bibtex: "@inproceedings{gwakefield2014,\n abstract = {We discuss live coding audio-visual\ + \ worlds for large-scale virtual reality environments. We describe Alive, an instrument\ + \ allowing multiple users to develop sonic and visual behaviors of agents in a\ + \ virtual world, through a browserbased collaborative code interface, accessible\ + \ while being immersed through spatialized audio and stereoscopic display. The\ + \ interface adds terse syntax for query-based precise or stochastic selections\ + \ and declarative agent manipulations, lazily-evaluated expressions for synthesis\ + \ and behavior, event handling, and flexible scheduling.},\n address = {London,\ + \ United Kingdom},\n author = {Graham Wakefield and Charlie Roberts and Matthew\ + \ Wright and Timothy Wood and Karl Yerkes},\n booktitle = {Proceedings of the\ \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1176637},\n issn = {2220-4806},\n keywords = {musical score,\ - \ improvisation, peltier device, chroma profile},\n pages = {104--107},\n title\ - \ = {Thermoscore: A New-type Musical Score with Temperature Sensation},\n url\ - \ = {http://www.nime.org/proceedings/2004/nime2004_104.pdf},\n year = {2004}\n\ - }\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1176637 - issn: 2220-4806 - keywords: 'musical score, improvisation, peltier device, chroma profile' - pages: 104--107 - title: 'Thermoscore: A New-type Musical Score with Temperature Sensation' - url: http://www.nime.org/proceedings/2004/nime2004_104.pdf - year: 2004 - - -- ENTRYTYPE: inproceedings - ID: Serafin2004 - abstract: 'We present case studies of unusual instruments that share the same excitation - mechanism as that of the bowed string. The musical saw, Tibetan singing bow, glass - harmonica, and bowed cymbal all produce sound by rubbing a hard object on the - surface of the instrument. For each, we discuss the design of its physical model - and present a means for expressively controlling it. Finally, we propose a new - kind of generalized friction controller to be used in all these examples.' - address: 'Hamamatsu, Japan' - author: 'Serafin, Stefania and Young, Diana' - bibtex: "@inproceedings{Serafin2004,\n abstract = {We present case studies of unusual\ - \ instruments that share the same excitation mechanism as that of the bowed string.\ - \ The musical saw, Tibetan singing bow, glass harmonica, and bowed cymbal all\ - \ produce sound by rubbing a hard object on the surface of the instrument. For\ - \ each, we discuss the design of its physical model and present a means for expressively\ - \ controlling it. Finally, we propose a new kind of generalized friction controller\ - \ to be used in all these examples.},\n address = {Hamamatsu, Japan},\n author\ - \ = {Serafin, Stefania and Young, Diana},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176659},\n\ - \ issn = {2220-4806},\n pages = {108--111},\n title = {Toward a Generalized Friction\ - \ Controller: from the Bowed String to Unusual Musical Instruments},\n url = {http://www.nime.org/proceedings/2004/nime2004_108.pdf},\n\ - \ year = {2004}\n}\n" + \ {10.5281/zenodo.1178975},\n issn = {2220-4806},\n month = {June},\n pages =\ + \ {505--508},\n publisher = {Goldsmiths, University of London},\n title = {Collaborative\ + \ Live-Coding with an Immersive Instrument},\n url = {http://www.nime.org/proceedings/2014/nime2014_328.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176659 + doi: 10.5281/zenodo.1178975 issn: 2220-4806 - pages: 108--111 - title: 'Toward a Generalized Friction Controller: from the Bowed String to Unusual - Musical Instruments' - url: http://www.nime.org/proceedings/2004/nime2004_108.pdf - year: 2004 + month: June + pages: 505--508 + publisher: 'Goldsmiths, University of London' + title: Collaborative Live-Coding with an Immersive Instrument + url: http://www.nime.org/proceedings/2014/nime2014_328.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Zaborowski2004 - abstract: 'This paper describes ThumbTEC, a novel general purpose input device for - the thumb or finger that is useful in a wide variety of applications from music - to text entry. The device is made up of three switches in a row and one miniature - joystick on top of the middle switch. The combination of joystick direction and - switch(es) controls what note or alphanumeric character is selected by the finger. - Several applications are detailed.' - address: 'Hamamatsu, Japan' - author: 'Zaborowski, Philippe S.' - bibtex: "@inproceedings{Zaborowski2004,\n abstract = {This paper describes ThumbTEC,\ - \ a novel general purpose input device for the thumb or finger that is useful\ - \ in a wide variety of applications from music to text entry. The device is made\ - \ up of three switches in a row and one miniature joystick on top of the middle\ - \ switch. The combination of joystick direction and switch(es) controls what note\ - \ or alphanumeric character is selected by the finger. Several applications are\ - \ detailed.},\n address = {Hamamatsu, Japan},\n author = {Zaborowski, Philippe\ - \ S.},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1176689},\n issn = {2220-4806},\n\ - \ keywords = {One-Thumb Input Device, HCI, Isometric Joystick, Mobile Computing,\ - \ Handheld Devices, Musical Instrument.},\n pages = {112--115},\n title = {ThumbTec:\ - \ A New Handheld Input Device},\n url = {http://www.nime.org/proceedings/2004/nime2004_112.pdf},\n\ - \ year = {2004}\n}\n" + ID: ssuh2014 + abstract: 'For the electric guitar, which takes a large proportion in modern pop + music, effects unit (or effector) is no longer optional. Many guitarists already + `play'' their effects with their instrument. However, it is not easy to control + these effects during the play, so lots of new controllers and interfaces have + been devised; one example is a pedal type effects that helps players to control + effects with a foot while their hands are busy. Some players put a controller + on their guitars. However, our instruments are so precious to drill a hole, and + the stage is too big for the player who is just kneeling behind the pedals and + turning the knobs. In this paper, we designed a new control system for electric + guitar and bass. This paper is about a gesture-based sound control system that + controls the electric guitar effects (like delay time, reverberation or pitch) + with the player''s hand gesture. This system utilizes TAPIR signal to trace player''s + hand motion. TAPIR signal is an acoustic signal that can rarely be received by + most people, because its frequency exists between 18 kHz to 22 kHz [TAPIR article]. + This system consists of a signal generator, an electric guitar and a sound processor. + From the generator that is attached on the player''s hand, the TAPIR signal transfers + to the magnetic pickup equipped on the electric guitar. Player''s gesture is captured + as a Doppler shift and the processor calculates the value as the sound effect + parameter. In this paper, we focused on the demonstration of the signal transfer + on aforementioned system.' + address: 'London, United Kingdom' + author: Sangwon Suh and Jeong-seob Lee and Woon Seung Yeo + bibtex: "@inproceedings{ssuh2014,\n abstract = {For the electric guitar, which takes\ + \ a large proportion in modern pop music, effects unit (or effector) is no longer\ + \ optional. Many guitarists already `play' their effects with their instrument.\ + \ However, it is not easy to control these effects during the play, so lots of\ + \ new controllers and interfaces have been devised; one example is a pedal type\ + \ effects that helps players to control effects with a foot while their hands\ + \ are busy. Some players put a controller on their guitars. However, our instruments\ + \ are so precious to drill a hole, and the stage is too big for the player who\ + \ is just kneeling behind the pedals and turning the knobs. In this paper, we\ + \ designed a new control system for electric guitar and bass. This paper is about\ + \ a gesture-based sound control system that controls the electric guitar effects\ + \ (like delay time, reverberation or pitch) with the player's hand gesture. This\ + \ system utilizes TAPIR signal to trace player's hand motion. TAPIR signal is\ + \ an acoustic signal that can rarely be received by most people, because its frequency\ + \ exists between 18 kHz to 22 kHz [TAPIR article]. This system consists of a signal\ + \ generator, an electric guitar and a sound processor. From the generator that\ + \ is attached on the player's hand, the TAPIR signal transfers to the magnetic\ + \ pickup equipped on the electric guitar. Player's gesture is captured as a Doppler\ + \ shift and the processor calculates the value as the sound effect parameter.\ + \ In this paper, we focused on the demonstration of the signal transfer on aforementioned\ + \ system.},\n address = {London, United Kingdom},\n author = {Sangwon Suh and\ + \ Jeong-seob Lee and Woon Seung Yeo},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178949},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {90--93},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {A Gesture Detection with Guitar Pickup and\ + \ Earphones},\n url = {http://www.nime.org/proceedings/2014/nime2014_333.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176689 + doi: 10.5281/zenodo.1178949 issn: 2220-4806 - keywords: 'One-Thumb Input Device, HCI, Isometric Joystick, Mobile Computing, Handheld - Devices, Musical Instrument.' - pages: 112--115 - title: 'ThumbTec: A New Handheld Input Device' - url: http://www.nime.org/proceedings/2004/nime2004_112.pdf - year: 2004 + month: June + pages: 90--93 + publisher: 'Goldsmiths, University of London' + title: A Gesture Detection with Guitar Pickup and Earphones + url: http://www.nime.org/proceedings/2014/nime2014_333.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Torchia2004 - abstract: 'The authors have developed several methods for spatially distributing - spectral material in real-time using frequency-domain processing. Applying spectral - spatialization techniques to more than two channels introduces a few obstacles, - particularly with controllers, visualization and the manipulation of large amounts - of control data. Various interfaces are presented which address these issues. - We also discuss 3D “cube” controllers and visualizations, which go a long way - in aiding usability. A range of implementations were realized, each with its own - interface, automation, and output characteristics. We also explore a number of - novel techniques. For example, a sound’s spectral components can be mapped in - space based on its own components’ energy, or the energy of another signal’s components - (a kind of spatial cross-synthesis). Finally, we address aesthetic concerns, such - as perceptual and sonic coherency, which arise when sounds have been spectrally - dissected and scattered across a multi-channel spatial field in 64, 128 or more - spectral bands.' - address: 'Hamamatsu, Japan' - author: 'Torchia, Ryan H. and Lippe, Cort' - bibtex: "@inproceedings{Torchia2004,\n abstract = {The authors have developed several\ - \ methods for spatially distributing spectral material in real-time using frequency-domain\ - \ processing. Applying spectral spatialization techniques to more than two channels\ - \ introduces a few obstacles, particularly with controllers, visualization and\ - \ the manipulation of large amounts of control data. Various interfaces are presented\ - \ which address these issues. We also discuss 3D “cube” controllers and visualizations,\ - \ which go a long way in aiding usability. A range of implementations were realized,\ - \ each with its own interface, automation, and output characteristics. We also\ - \ explore a number of novel techniques. For example, a sound’s spectral components\ - \ can be mapped in space based on its own components’ energy, or the energy of\ - \ another signal’s components (a kind of spatial cross-synthesis). Finally, we\ - \ address aesthetic concerns, such as perceptual and sonic coherency, which arise\ - \ when sounds have been spectrally dissected and scattered across a multi-channel\ - \ spatial field in 64, 128 or more spectral bands.},\n address = {Hamamatsu, Japan},\n\ - \ author = {Torchia, Ryan H. and Lippe, Cort},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1176679},\n issn = {2220-4806},\n pages = {116--119},\n title\ - \ = {Techniques for Multi-Channel Real-Time Spatial Distribution Using Frequency-Domain\ - \ Processing},\n url = {http://www.nime.org/proceedings/2004/nime2004_116.pdf},\n\ - \ year = {2004}\n}\n" + ID: fberthaut2014 + abstract: 'This paper presents a collaborative digital musical instrument that uses + the ephemeral and physical properties of soap bubbles to explore the complexity + layers and oscillating parameters of electronic (bass) music. This instrument, + called Wubbles, aims at encouraging both individual and collaborative musical + manipulations.' + address: 'London, United Kingdom' + author: Florent Berthaut and Jarrod Knibbe + bibtex: "@inproceedings{fberthaut2014,\n abstract = {This paper presents a collaborative\ + \ digital musical instrument that uses the ephemeral and physical properties of\ + \ soap bubbles to explore the complexity layers and oscillating parameters of\ + \ electronic (bass) music. This instrument, called Wubbles, aims at encouraging\ + \ both individual and collaborative musical manipulations.},\n address = {London,\ + \ United Kingdom},\n author = {Florent Berthaut and Jarrod Knibbe},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1178716},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {499--500},\n publisher = {Goldsmiths, University of London},\n\ + \ title = {Wubbles: A Collaborative Ephemeral Musical Instrument},\n url = {http://www.nime.org/proceedings/2014/nime2014_334.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176679 + doi: 10.5281/zenodo.1178716 issn: 2220-4806 - pages: 116--119 - title: Techniques for Multi-Channel Real-Time Spatial Distribution Using Frequency-Domain - Processing - url: http://www.nime.org/proceedings/2004/nime2004_116.pdf - year: 2004 + month: June + pages: 499--500 + publisher: 'Goldsmiths, University of London' + title: 'Wubbles: A Collaborative Ephemeral Musical Instrument' + url: http://www.nime.org/proceedings/2014/nime2014_334.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Hiraga2004 - abstract: 'Rencon is an annual international event that started in 2002. It has - roles of (1) pursuing evaluation methods for systems whose output includes subjective - issues, and (2) providing a forum for researches of several fields related to - musical expression. In the past. Rencon was held as a workshop associated with - a musical contest that provided a forum for presenting and discussing the latest - research in automatic performance rendering. This year we introduce new evaluation - methods of performance expression to Rencon: a Turing Test and a Gnirut Test, - which is a reverse Turing Test, for performance expression. We have opened a section - of the contests to any instruments and genre of music, including synthesized human - voices.' - address: 'Hamamatsu, Japan' - author: 'Hiraga, Rumi and Bresin, Roberto and Hirata, Keiji and Katayose, Haruhiro' - bibtex: "@inproceedings{Hiraga2004,\n abstract = {Rencon is an annual international\ - \ event that started in 2002. It has roles of (1) pursuing evaluation methods\ - \ for systems whose output includes subjective issues, and (2) providing a forum\ - \ for researches of several fields related to musical expression. In the past.\ - \ Rencon was held as a workshop associated with a musical contest that provided\ - \ a forum for presenting and discussing the latest research in automatic performance\ - \ rendering. This year we introduce new evaluation methods of performance expression\ - \ to Rencon: a Turing Test and a Gnirut Test, which is a reverse Turing Test,\ - \ for performance expression. We have opened a section of the contests to any\ - \ instruments and genre of music, including synthesized human voices.},\n address\ - \ = {Hamamatsu, Japan},\n author = {Hiraga, Rumi and Bresin, Roberto and Hirata,\ - \ Keiji and Katayose, Haruhiro},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176611},\n\ - \ issn = {2220-4806},\n keywords = {Rencon, Turing Test, Musical Expression, Performance\ - \ Rendering},\n pages = {120--123},\n title = {Rencon 2004: Turing Test for Musical\ - \ Expression},\n url = {http://www.nime.org/proceedings/2004/nime2004_120.pdf},\n\ - \ year = {2004}\n}\n" + ID: lpardue2014 + abstract: 'This paper presents a multi-modal approach to musical instrument pitch + tracking combining audio and position sensor data. Finger location on a violin + fingerboard is measured using resistive sensors, allowing rapid detection of approximate + pitch. The initial pitch estimate is then used to restrict the search space of + an audio pitch tracking algorithm. Most audio-only pitch tracking algorithms face + a fundamental tradeoff between accuracy and latency, with longer analysis windows + producing better pitch estimates at the cost of noticeable lag in a live performance + environment. Conversely, sensor-only strategies struggle to achieve the fine pitch + accuracy a human listener would expect. By combining the two approaches, high + accuracy and low latency can be simultaneously achieved.' + address: 'London, United Kingdom' + author: Laurel Pardue and Dongjuan Nian and Christopher Harte and Andrew McPherson + bibtex: "@inproceedings{lpardue2014,\n abstract = {This paper presents a multi-modal\ + \ approach to musical instrument pitch tracking combining audio and position sensor\ + \ data. Finger location on a violin fingerboard is measured using resistive sensors,\ + \ allowing rapid detection of approximate pitch. The initial pitch estimate is\ + \ then used to restrict the search space of an audio pitch tracking algorithm.\ + \ Most audio-only pitch tracking algorithms face a fundamental tradeoff between\ + \ accuracy and latency, with longer analysis windows producing better pitch estimates\ + \ at the cost of noticeable lag in a live performance environment. Conversely,\ + \ sensor-only strategies struggle to achieve the fine pitch accuracy a human listener\ + \ would expect. By combining the two approaches, high accuracy and low latency\ + \ can be simultaneously achieved.},\n address = {London, United Kingdom},\n author\ + \ = {Laurel Pardue and Dongjuan Nian and Christopher Harte and Andrew McPherson},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178899},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {54--59},\n publisher = {Goldsmiths, University of\ + \ London},\n title = {Low-Latency Audio Pitch Tracking: A Multi-Modal Sensor-Assisted\ + \ Approach},\n url = {http://www.nime.org/proceedings/2014/nime2014_336.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176611 + doi: 10.5281/zenodo.1178899 issn: 2220-4806 - keywords: 'Rencon, Turing Test, Musical Expression, Performance Rendering' - pages: 120--123 - title: 'Rencon 2004: Turing Test for Musical Expression' - url: http://www.nime.org/proceedings/2004/nime2004_120.pdf - year: 2004 + month: June + pages: 54--59 + publisher: 'Goldsmiths, University of London' + title: 'Low-Latency Audio Pitch Tracking: A Multi-Modal Sensor-Assisted Approach' + url: http://www.nime.org/proceedings/2014/nime2014_336.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Katayose2004 - abstract: 'This paper describes an approach for playing expressivemusic, as it refers - to a pianist''s expressiveness, with atapping-style interface. MIDI-formatted - expressiveperformances played by pianists were first analyzed andtransformed into - performance templates, in which thedeviations from a canonical description was - separatelydescribed for each event. Using one of the templates as askill complement, - a player can play music expressivelyover and under the beat level. This paper - presents ascheduler that allows a player to mix her/his own intensionand the expressiveness - in the performance template. Theresults of a forty-subject user study suggest - that using theexpression template contributes the subject''s joy of playingmusic - with the tapping-style performance interface. Thisresult is also supported by - a brain activation study that wasdone using a near-infrared spectroscopy (NIRS).Categories - and Subject DescriptorsH.5.5 [Information Interfaces and Presentation]: Sound - andMusic Computing methodologies and techniques.' - address: 'Hamamatsu, Japan' - author: 'Katayose, Haruhiro and Okudaira, Keita' - bibtex: "@inproceedings{Katayose2004,\n abstract = {This paper describes an approach\ - \ for playing expressivemusic, as it refers to a pianist's expressiveness, with\ - \ atapping-style interface. MIDI-formatted expressiveperformances played by pianists\ - \ were first analyzed andtransformed into performance templates, in which thedeviations\ - \ from a canonical description was separatelydescribed for each event. Using one\ - \ of the templates as askill complement, a player can play music expressivelyover\ - \ and under the beat level. This paper presents ascheduler that allows a player\ - \ to mix her/his own intensionand the expressiveness in the performance template.\ - \ Theresults of a forty-subject user study suggest that using theexpression template\ - \ contributes the subject's joy of playingmusic with the tapping-style performance\ - \ interface. Thisresult is also supported by a brain activation study that wasdone\ - \ using a near-infrared spectroscopy (NIRS).Categories and Subject DescriptorsH.5.5\ - \ [Information Interfaces and Presentation]: Sound andMusic Computing methodologies\ - \ and techniques.},\n address = {Hamamatsu, Japan},\n author = {Katayose, Haruhiro\ - \ and Okudaira, Keita},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176625},\n\ - \ issn = {2220-4806},\n keywords = {Rencon, interfaces for musical expression,\ - \ visualization},\n pages = {124--129},\n title = {Using an Expressive Performance\ - \ Template in a Music Conducting Interface},\n url = {http://www.nime.org/proceedings/2004/nime2004_124.pdf},\n\ - \ year = {2004}\n}\n" + ID: nklugel12014 + abstract: 'In this contribution, we will discuss a prototype that allows a group + of users to design sound collaboratively in real time using a multi-touch tabletop. + We make use of a machine learning method to generate a mapping from perceptual + audio features to synthesis parameters. This mapping is then used for visualization + and interaction. Finally, we discuss the results of a comparative evaluation study.' + address: 'London, United Kingdom' + author: Niklas Klügel and Timo Becker and Georg Groh + bibtex: "@inproceedings{nklugel12014,\n abstract = {In this contribution, we will\ + \ discuss a prototype that allows a group of users to design sound collaboratively\ + \ in real time using a multi-touch tabletop. We make use of a machine learning\ + \ method to generate a mapping from perceptual audio features to synthesis parameters.\ + \ This mapping is then used for visualization and interaction. Finally, we discuss\ + \ the results of a comparative evaluation study.},\n address = {London, United\ + \ Kingdom},\n author = {Niklas Kl\\''ugel and Timo Becker and Georg Groh},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1178833},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {327--330},\n publisher = {Goldsmiths, University of London},\n\ + \ title = {Designing Sound Collaboratively Perceptually Motivated Audio Synthesis},\n\ + \ url = {http://www.nime.org/proceedings/2014/nime2014_339.pdf},\n year = {2014}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176625 + doi: 10.5281/zenodo.1178833 issn: 2220-4806 - keywords: 'Rencon, interfaces for musical expression, visualization' - pages: 124--129 - title: Using an Expressive Performance Template in a Music Conducting Interface - url: http://www.nime.org/proceedings/2004/nime2004_124.pdf - year: 2004 + month: June + pages: 327--330 + publisher: 'Goldsmiths, University of London' + title: Designing Sound Collaboratively Perceptually Motivated Audio Synthesis + url: http://www.nime.org/proceedings/2014/nime2014_339.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Kawahara2004 - abstract: 'A series of demonstrations of synthesized acappella songsbased on an - auditory morphing using STRAIGHT [5] willbe presented. Singing voice data for - morphing were extracted from the RWCmusic database of musical instrument sound. - Discussions on a new extension of the morphing procedure to deal with vibrato - will be introduced basedon the statistical analysis of the database and its effect - onsynthesized acappella will also be demonstrated.' - address: 'Hamamatsu, Japan' - author: 'Kawahara, Hideki and Banno, Hideki and Morise, Masanori' - bibtex: "@inproceedings{Kawahara2004,\n abstract = {A series of demonstrations of\ - \ synthesized acappella songsbased on an auditory morphing using STRAIGHT [5]\ - \ willbe presented. Singing voice data for morphing were extracted from the RWCmusic\ - \ database of musical instrument sound. Discussions on a new extension of the\ - \ morphing procedure to deal with vibrato will be introduced basedon the statistical\ - \ analysis of the database and its effect onsynthesized acappella will also be\ - \ demonstrated.},\n address = {Hamamatsu, Japan},\n author = {Kawahara, Hideki\ - \ and Banno, Hideki and Morise, Masanori},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176627},\n\ - \ issn = {2220-4806},\n keywords = {Rencon, Acappella, RWCdatabase, STRAIGHT,\ - \ morphing},\n pages = {130--131},\n title = {Acappella Synthesis Demonstrations\ - \ using RWC Music Database},\n url = {http://www.nime.org/proceedings/2004/nime2004_130.pdf},\n\ - \ year = {2004}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1176627 - issn: 2220-4806 - keywords: 'Rencon, Acappella, RWCdatabase, STRAIGHT, morphing' - pages: 130--131 - title: Acappella Synthesis Demonstrations using RWC Music Database - url: http://www.nime.org/proceedings/2004/nime2004_130.pdf - year: 2004 - - -- ENTRYTYPE: inproceedings - ID: Dannenberg2004 - abstract: 'Real-time interactive software can be difficult to construct and debug. - Aura is a software platform to facilitate highly interactive systems that combine - audio signal processing, sophisticated control, sensors, computer animation, video - processing, and graphical user interfaces. Moreover, Aura is open-ended, allowing - diverse software components to be interconnected in a real-time framework. A recent - assessment of Aura has motivated a redesign of the communication system to support - remote procedure call. In addition, the audio signal processing framework has - been altered to reduce programming errors. The motivation behind these changes - is discussed, and measurements of run-time performance offer some general insights - for system designers.' - address: 'Hamamatsu, Japan' - author: 'Dannenberg, Roger B.' - bibtex: "@inproceedings{Dannenberg2004,\n abstract = {Real-time interactive software\ - \ can be difficult to construct and debug. Aura is a software platform to facilitate\ - \ highly interactive systems that combine audio signal processing, sophisticated\ - \ control, sensors, computer animation, video processing, and graphical user interfaces.\ - \ Moreover, Aura is open-ended, allowing diverse software components to be interconnected\ - \ in a real-time framework. A recent assessment of Aura has motivated a redesign\ - \ of the communication system to support remote procedure call. In addition, the\ - \ audio signal processing framework has been altered to reduce programming errors.\ - \ The motivation behind these changes is discussed, and measurements of run-time\ - \ performance offer some general insights for system designers.},\n address =\ - \ {Hamamatsu, Japan},\n author = {Dannenberg, Roger B.},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176593},\n issn = {2220-4806},\n pages = {132--137},\n\ - \ title = {Aura II: Making Real-Time Systems Safe for Music},\n url = {http://www.nime.org/proceedings/2004/nime2004_132.pdf},\n\ - \ year = {2004}\n}\n" + ID: seloul2014 + abstract: 'We use Max/MSP framework to create a reliable but flexible approach for + managing live performances of music bands who rely on live playing with digital + music. This approach utilizes Max/MSP to allow any player an easy and low cost + way to apply and experiment innovative music interfaces for live performance, + without losing the professionalism required on stage. In that approach, every + 1-3 players is plugged to a unit consisting of a standard sound-card and laptop. + This unit is controlled by an interface that schedules and manages all the digital + sounds made by each player (VST effects, VST instruments and ''home-made'' interactive + interfaces). All the player''s units are then remotely controlled by a conductor + patch which is in charge of the synchronization of all the players and background + samples in real time, as well as providing sensitive metronome and scheduling + visual enhancement. Moreover, and not less important, we can take the advantage + of using virtual instruments and virtual effects in Max environment to manage + the mix, and routing the audio. This providing monitors and metronome to the players + ears, and virtual mixing via Max/MSP patch. This privilege almost eliminates the + dependency in the venue''s equipment, and in that way, the sound quality and music + ideas can be taken out directly from the studio to the stage.' + address: 'London, United Kingdom' + author: Yehiel Amo and Gil Zissu and Shaltiel Eloul and Eran Shlomi and Dima Schukin + and Almog Kalifa + bibtex: "@inproceedings{seloul2014,\n abstract = {We use Max/MSP framework to create\ + \ a reliable but flexible approach for managing live performances of music bands\ + \ who rely on live playing with digital music. This approach utilizes Max/MSP\ + \ to allow any player an easy and low cost way to apply and experiment innovative\ + \ music interfaces for live performance, without losing the professionalism required\ + \ on stage. In that approach, every 1-3 players is plugged to a unit consisting\ + \ of a standard sound-card and laptop. This unit is controlled by an interface\ + \ that schedules and manages all the digital sounds made by each player (VST effects,\ + \ VST instruments and 'home-made' interactive interfaces). All the player's units\ + \ are then remotely controlled by a conductor patch which is in charge of the\ + \ synchronization of all the players and background samples in real time, as well\ + \ as providing sensitive metronome and scheduling visual enhancement. Moreover,\ + \ and not less important, we can take the advantage of using virtual instruments\ + \ and virtual effects in Max environment to manage the mix, and routing the audio.\ + \ This providing monitors and metronome to the players ears, and virtual mixing\ + \ via Max/MSP patch. This privilege almost eliminates the dependency in the venue's\ + \ equipment, and in that way, the sound quality and music ideas can be taken out\ + \ directly from the studio to the stage.},\n address = {London, United Kingdom},\n\ + \ author = {Yehiel Amo and Gil Zissu and Shaltiel Eloul and Eran Shlomi and Dima\ + \ Schukin and Almog Kalifa},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178700},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {94--97},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {A Max/MSP Approach for Incorporating Digital\ + \ Music via Laptops in Live Performances of Music Bands},\n url = {http://www.nime.org/proceedings/2014/nime2014_340.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176593 + doi: 10.5281/zenodo.1178700 issn: 2220-4806 - pages: 132--137 - title: 'Aura II: Making Real-Time Systems Safe for Music' - url: http://www.nime.org/proceedings/2004/nime2004_132.pdf - year: 2004 + month: June + pages: 94--97 + publisher: 'Goldsmiths, University of London' + title: A Max/MSP Approach for Incorporating Digital Music via Laptops in Live Performances + of Music Bands + url: http://www.nime.org/proceedings/2014/nime2014_340.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Wang2004 - abstract: 'On-the-fly programming is a style of programming in which the programmer/performer/composer - augments and modifies the program while it is running, without stopping or restarting, - in order to assert expressive, programmable control at runtime. Because of the - fundamental powers of programming languages, we believe the technical and aesthetic - aspects of on-the-fly programming are worth exploring. In this paper, we present - a formalized framework for on-the-fly programming, based on the ChucK synthesis - language, which supports a truly concurrent audio programming model with sample-synchronous - timing, and a highly on-the-fly style of programming. We first provide a well-defined - notion of on-thefly programming. We then address four fundamental issues that - confront the on-the-fly programmer: timing, modularity, conciseness, and flexibility. - Using the features and properties of ChucK, we show how it solves many of these - issues. In this new model, we show that (1) concurrency provides natural modularity - for on-the-fly programming, (2) the timing mechanism in ChucK guarantees on-the-fly - precision and consistency, (3) the Chuck syntax improves conciseness, and (4) - the overall system is a useful framework for exploring on-the-fly programming. - Finally, we discuss the aesthetics of on-the-fly performance. ' - address: 'Hamamatsu, Japan' - author: 'Wang, Ge and Cook, Perry R.' - bibtex: "@inproceedings{Wang2004,\n abstract = {On-the-fly programming is a style\ - \ of programming in which the programmer/performer/composer augments and modifies\ - \ the program while it is running, without stopping or restarting, in order to\ - \ assert expressive, programmable control at runtime. Because of the fundamental\ - \ powers of programming languages, we believe the technical and aesthetic aspects\ - \ of on-the-fly programming are worth exploring. In this paper, we present a formalized\ - \ framework for on-the-fly programming, based on the ChucK synthesis language,\ - \ which supports a truly concurrent audio programming model with sample-synchronous\ - \ timing, and a highly on-the-fly style of programming. We first provide a well-defined\ - \ notion of on-thefly programming. We then address four fundamental issues that\ - \ confront the on-the-fly programmer: timing, modularity, conciseness, and flexibility.\ - \ Using the features and properties of ChucK, we show how it solves many of these\ - \ issues. In this new model, we show that (1) concurrency provides natural modularity\ - \ for on-the-fly programming, (2) the timing mechanism in ChucK guarantees on-the-fly\ - \ precision and consistency, (3) the Chuck syntax improves conciseness, and (4)\ - \ the overall system is a useful framework for exploring on-the-fly programming.\ - \ Finally, we discuss the aesthetics of on-the-fly performance. },\n address =\ - \ {Hamamatsu, Japan},\n author = {Wang, Ge and Cook, Perry R.},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176683},\n issn = {2220-4806},\n keywords = {code as\ - \ interface,compiler,concurrency,concurrent audio programming,on-the-fly programming,real-,synchronization,synthesis,time,timing,virtual\ - \ machine},\n pages = {138--143},\n title = {On-the-fly Programming: Using Code\ - \ as an Expressive Musical Instrument},\n url = {http://www.nime.org/proceedings/2004/nime2004_138.pdf},\n\ - \ year = {2004}\n}\n" + ID: asa2014 + abstract: 'The text exposes a perceptual approach to instrument design and composition, + and it introduces an instrument that outputs acoustic sound, digital sound, and + digital image. We explore disparities between human perception and digital analysis + as creative material. Because the instrument repurposes software intended to create + video games, we establish a distinction between the notion of ``flow'''' in music + and gaming, questioning how it may substantiate in interaction design. Furthermore, + we extrapolate from cognition/attention research to describe how the projected + image creates a reactive stage scene without deviating attention from the music.' + address: 'London, United Kingdom' + author: Adriana Sa + bibtex: "@inproceedings{asa2014,\n abstract = {The text exposes a perceptual approach\ + \ to instrument design and composition, and it introduces an instrument that outputs\ + \ acoustic sound, digital sound, and digital image. We explore disparities between\ + \ human perception and digital analysis as creative material. Because the instrument\ + \ repurposes software intended to create video games, we establish a distinction\ + \ between the notion of ``flow'' in music and gaming, questioning how it may substantiate\ + \ in interaction design. Furthermore, we extrapolate from cognition/attention\ + \ research to describe how the projected image creates a reactive stage scene\ + \ without deviating attention from the music.},\n address = {London, United Kingdom},\n\ + \ author = {Adriana Sa},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178925},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {331--334},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Repurposing Video Game Software for Musical\ + \ Expression: A Perceptual Approach},\n url = {http://www.nime.org/proceedings/2014/nime2014_343.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176683 + doi: 10.5281/zenodo.1178925 issn: 2220-4806 - keywords: 'code as interface,compiler,concurrency,concurrent audio programming,on-the-fly - programming,real-,synchronization,synthesis,time,timing,virtual machine' - pages: 138--143 - title: 'On-the-fly Programming: Using Code as an Expressive Musical Instrument' - url: http://www.nime.org/proceedings/2004/nime2004_138.pdf - year: 2004 + month: June + pages: 331--334 + publisher: 'Goldsmiths, University of London' + title: 'Repurposing Video Game Software for Musical Expression: A Perceptual Approach' + url: http://www.nime.org/proceedings/2014/nime2014_343.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Lew2004 - abstract: 'This paper describes the design of an expressive tangible interface for - cinema editing as a live performance. A short survey of live video practices is - provided. The Live Cinema instrument is a cross between a musical instrument and - a film editing tool, tailored for improvisational control as well as performance - presence. Design specifications for the instrument evolved based on several types - of observations including: our own performances in which we used a prototype based - on available tools; an analysis of performative aspects of contemporary DJ equipment; - and an evaluation of organizational aspects of several generations of film editing - tools. Our instrument presents the performer with a large canvas where projected - images can be grabbed and moved around with both hands simultaneously; the performer - also has access to two video drums featuring haptic display to manipulate the - shots and cut between streams. The paper ends with a discussion of issues related - to the tensions between narrative structure and hands-on control, live and recorded - arts and the scoring of improvised films. ' - address: 'Hamamatsu, Japan' - author: 'Lew, Michael' - bibtex: "@inproceedings{Lew2004,\n abstract = {This paper describes the design of\ - \ an expressive tangible interface for cinema editing as a live performance. A\ - \ short survey of live video practices is provided. The Live Cinema instrument\ - \ is a cross between a musical instrument and a film editing tool, tailored for\ - \ improvisational control as well as performance presence. Design specifications\ - \ for the instrument evolved based on several types of observations including:\ - \ our own performances in which we used a prototype based on available tools;\ - \ an analysis of performative aspects of contemporary DJ equipment; and an evaluation\ - \ of organizational aspects of several generations of film editing tools. Our\ - \ instrument presents the performer with a large canvas where projected images\ - \ can be grabbed and moved around with both hands simultaneously; the performer\ - \ also has access to two video drums featuring haptic display to manipulate the\ - \ shots and cut between streams. The paper ends with a discussion of issues related\ - \ to the tensions between narrative structure and hands-on control, live and recorded\ - \ arts and the scoring of improvised films. },\n address = {Hamamatsu, Japan},\n\ - \ author = {Lew, Michael},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176631},\n\ - \ issn = {2220-4806},\n keywords = {live cinema, video controller, visual music,\ - \ DJ, VJ, film editing, tactile interface, two-hand interaction, improvisation,\ - \ performance, narrative structure.},\n pages = {144--149},\n title = {Live Cinema:\ - \ Designing an Instrument for Cinema Editing as a Live Performance},\n url = {http://www.nime.org/proceedings/2004/nime2004_144.pdf},\n\ - \ year = {2004}\n}\n" + ID: jmurphy2014 + abstract: 'This paper presents a method for a self-tuning procedure for musical + robots capable of continuous pitch-shifting. Such a technique is useful for robots + consisting of many strings: the ability to self-tune allows for long-term installation + without human intervention as well as on-the-fly tuning scheme changes. The presented + method consists of comparing a detuned string''s pitch at runtime to a pre-compiled + table of string responses at varying tensions. The behavior of the current detuned + string is interpolated from the two nearest pre-characterized neighbors, and the + desired virtual fret positions are added to the interpolated model. This method + allows for rapid tuning at runtime, requiring only a single string actuation to + determine the pitch. After a detailed description of the self-tuning technique + and implementation, the results will be evaluated on the new Swivel 2 robotic + slide guitar. The paper concludes with a discussion of performance applications + and ideas for subsequent work on self-tuning musical robotic systems.' + address: 'London, United Kingdom' + author: Jim Murphy and Paul Mathews and Ajay Kapur and Dale Carnegie + bibtex: "@inproceedings{jmurphy2014,\n abstract = {This paper presents a method\ + \ for a self-tuning procedure for musical robots capable of continuous pitch-shifting.\ + \ Such a technique is useful for robots consisting of many strings: the ability\ + \ to self-tune allows for long-term installation without human intervention as\ + \ well as on-the-fly tuning scheme changes. The presented method consists of comparing\ + \ a detuned string's pitch at runtime to a pre-compiled table of string responses\ + \ at varying tensions. The behavior of the current detuned string is interpolated\ + \ from the two nearest pre-characterized neighbors, and the desired virtual fret\ + \ positions are added to the interpolated model. This method allows for rapid\ + \ tuning at runtime, requiring only a single string actuation to determine the\ + \ pitch. After a detailed description of the self-tuning technique and implementation,\ + \ the results will be evaluated on the new Swivel 2 robotic slide guitar. The\ + \ paper concludes with a discussion of performance applications and ideas for\ + \ subsequent work on self-tuning musical robotic systems.},\n address = {London,\ + \ United Kingdom},\n author = {Jim Murphy and Paul Mathews and Ajay Kapur and\ + \ Dale Carnegie},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178883},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {565--568},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Robot: Tune Yourself! Automatic Tuning for\ + \ Musical Robotics},\n url = {http://www.nime.org/proceedings/2014/nime2014_345.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176631 + doi: 10.5281/zenodo.1178883 issn: 2220-4806 - keywords: 'live cinema, video controller, visual music, DJ, VJ, film editing, tactile - interface, two-hand interaction, improvisation, performance, narrative structure.' - pages: 144--149 - title: 'Live Cinema: Designing an Instrument for Cinema Editing as a Live Performance' - url: http://www.nime.org/proceedings/2004/nime2004_144.pdf - year: 2004 + month: June + pages: 565--568 + publisher: 'Goldsmiths, University of London' + title: 'Robot: Tune Yourself! Automatic Tuning for Musical Robotics' + url: http://www.nime.org/proceedings/2014/nime2014_345.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Poepel2004 - abstract: 'A system is introduced that allows a string player to control a synthesis - engine with the gestural skills he is used to. The implemented system is based - on an electric viola and a synthesis engine that is directly controlled by the - unanalysed audio signal of the instrument and indirectly by control parameters - mapped to the synthesis engine. This method offers a highly string-specific playability, - as it is sensitive to the kinds of musical articulation produced by traditional - playing techniques. Nuances of sound variation applied by the player will be present - in the output signal even if those nuances are beyond traditionally measurable - parameters like pitch, amplitude or brightness. The relatively minimal hardware - requirements make the instrument accessible with little expenditure.' - address: 'Hamamatsu, Japan' - author: 'Poepel, Cornelius' - bibtex: "@inproceedings{Poepel2004,\n abstract = {A system is introduced that allows\ - \ a string player to control a synthesis engine with the gestural skills he is\ - \ used to. The implemented system is based on an electric viola and a synthesis\ - \ engine that is directly controlled by the unanalysed audio signal of the instrument\ - \ and indirectly by control parameters mapped to the synthesis engine. This method\ - \ offers a highly string-specific playability, as it is sensitive to the kinds\ - \ of musical articulation produced by traditional playing techniques. Nuances\ - \ of sound variation applied by the player will be present in the output signal\ - \ even if those nuances are beyond traditionally measurable parameters like pitch,\ - \ amplitude or brightness. The relatively minimal hardware requirements make the\ - \ instrument accessible with little expenditure.},\n address = {Hamamatsu, Japan},\n\ - \ author = {Poepel, Cornelius},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176655},\n\ - \ issn = {2220-4806},\n keywords = {Electronic bowed string instrument, playability,\ - \ musical instrument design, human computer interface, oscillation controlled\ - \ sound synthesis},\n pages = {150--153},\n title = {Synthesized Strings for String\ - \ Players},\n url = {http://www.nime.org/proceedings/2004/nime2004_150.pdf},\n\ - \ year = {2004}\n}\n" + ID: astark2014 + abstract: 'Real-time audio analysis has great potential for being used to create + musically responsive applications in live performances. There have been many examples + of such use, including sound-responsive visualisations, adaptive audio effects + and machine musicianship. However, at present, using audio analysis algorithms + in live performance requires either some detailed knowledge about the algorithms + themselves, or programming or both. Those wishing to use audio analysis in live + performances may not have either of these as their strengths. Rather, they may + instead wish to focus upon systems that respond to audio analysis data, such as + visual projections or sound generators. In response, this paper introduces the + Sound Analyser an audio plug-in allowing users to a) select a custom set of audio + analyses to be performed in real-time and b) send that information via OSC so + that it can easily be used by other systems to develop responsive applications + for live performances and installations. A description of the system architecture + and audio analysis algorithms implemented in the plug-in is presented before moving + on to two case studies where the plug-in has been used in the field with artists.' + address: 'London, United Kingdom' + author: Adam Stark + bibtex: "@inproceedings{astark2014,\n abstract = {Real-time audio analysis has great\ + \ potential for being used to create musically responsive applications in live\ + \ performances. There have been many examples of such use, including sound-responsive\ + \ visualisations, adaptive audio effects and machine musicianship. However, at\ + \ present, using audio analysis algorithms in live performance requires either\ + \ some detailed knowledge about the algorithms themselves, or programming or both.\ + \ Those wishing to use audio analysis in live performances may not have either\ + \ of these as their strengths. Rather, they may instead wish to focus upon systems\ + \ that respond to audio analysis data, such as visual projections or sound generators.\ + \ In response, this paper introduces the Sound Analyser an audio plug-in allowing\ + \ users to a) select a custom set of audio analyses to be performed in real-time\ + \ and b) send that information via OSC so that it can easily be used by other\ + \ systems to develop responsive applications for live performances and installations.\ + \ A description of the system architecture and audio analysis algorithms implemented\ + \ in the plug-in is presented before moving on to two case studies where the plug-in\ + \ has been used in the field with artists.},\n address = {London, United Kingdom},\n\ + \ author = {Adam Stark},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178945},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {183--186},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Sound Analyser: A Plug-In for Real-Time Audio\ + \ Analysis in Live Performances and Installations},\n url = {http://www.nime.org/proceedings/2014/nime2014_348.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176655 + doi: 10.5281/zenodo.1178945 issn: 2220-4806 - keywords: 'Electronic bowed string instrument, playability, musical instrument design, - human computer interface, oscillation controlled sound synthesis' - pages: 150--153 - title: Synthesized Strings for String Players - url: http://www.nime.org/proceedings/2004/nime2004_150.pdf - year: 2004 + month: June + pages: 183--186 + publisher: 'Goldsmiths, University of London' + title: 'Sound Analyser: A Plug-In for Real-Time Audio Analysis in Live Performances + and Installations' + url: http://www.nime.org/proceedings/2014/nime2014_348.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Tanaka2004 - abstract: 'We present a system for collaborative musical creation onmobile wireless - networks. The work extends on simple peerto-peer file sharing systems towards - ad-hoc mobility andstreaming. It extends upon music listening from a passiveact - to a proactive, participative activity. The system consistsof a network based - interactive music engine and a portablerendering player. It serves as a platform - for experiments onstudying the sense of agency in collaborative creativeprocess, - and requirements for fostering musical satisfactionin remote collaboration. ' - address: 'Hamamatsu, Japan' - author: 'Tanaka, Atau' - bibtex: "@inproceedings{Tanaka2004,\n abstract = {We present a system for collaborative\ - \ musical creation onmobile wireless networks. The work extends on simple peerto-peer\ - \ file sharing systems towards ad-hoc mobility andstreaming. It extends upon music\ - \ listening from a passiveact to a proactive, participative activity. The system\ - \ consistsof a network based interactive music engine and a portablerendering\ - \ player. It serves as a platform for experiments onstudying the sense of agency\ - \ in collaborative creativeprocess, and requirements for fostering musical satisfactionin\ - \ remote collaboration. },\n address = {Hamamatsu, Japan},\n author = {Tanaka,\ - \ Atau},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1176677},\n issn = {2220-4806},\n\ - \ keywords = {mobile music,peer-to-peer,wireless ad-hoc networks},\n pages = {154--156},\n\ - \ title = {Mobile Music Making},\n url = {http://www.nime.org/proceedings/2004/nime2004_154.pdf},\n\ - \ year = {2004}\n}\n" + ID: bjohnson2014 + abstract: 'This paper introduces recent developments in the Chronus series, a family + of custom controllers that afford a performer gestural interaction with surround + sound systems that can be easily integrated into their personal performance systems. + The controllers are built with the goal of encouraging more electronic musicians + to include the creation of dynamic pantophonic fields in performance. The paper + focuses on technical advances of the Chronus 2.0 prototype that extend the interface + to control both radial and angular positional data, and the controllers'' ease + of integration into electronic performance configurations, both for diffusion + and for performance from the wider electronic music community.' + address: 'London, United Kingdom' + author: Bridget Johnson and Michael Norris and Ajay Kapur + bibtex: "@inproceedings{bjohnson2014,\n abstract = {This paper introduces recent\ + \ developments in the Chronus series, a family of custom controllers that afford\ + \ a performer gestural interaction with surround sound systems that can be easily\ + \ integrated into their personal performance systems. The controllers are built\ + \ with the goal of encouraging more electronic musicians to include the creation\ + \ of dynamic pantophonic fields in performance. The paper focuses on technical\ + \ advances of the Chronus 2.0 prototype that extend the interface to control both\ + \ radial and angular positional data, and the controllers' ease of integration\ + \ into electronic performance configurations, both for diffusion and for performance\ + \ from the wider electronic music community.},\n address = {London, United Kingdom},\n\ + \ author = {Bridget Johnson and Michael Norris and Ajay Kapur},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178820},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {335--338},\n publisher = {Goldsmiths, University of London},\n title = {The\ + \ Development Of Physical Spatial Controllers},\n url = {http://www.nime.org/proceedings/2014/nime2014_349.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176677 + doi: 10.5281/zenodo.1178820 issn: 2220-4806 - keywords: 'mobile music,peer-to-peer,wireless ad-hoc networks' - pages: 154--156 - title: Mobile Music Making - url: http://www.nime.org/proceedings/2004/nime2004_154.pdf - year: 2004 + month: June + pages: 335--338 + publisher: 'Goldsmiths, University of London' + title: The Development Of Physical Spatial Controllers + url: http://www.nime.org/proceedings/2014/nime2014_349.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Flety2004 - abstract: 'This paper reports our recent developments on sensor acquisition systems, - taking advantage of computer network technology. We present a versatile hardware - system which can be connected to wireless modules, Analog to Digital Converters, - and enables Ethernet communication. We are planning to make freely available the - design of this architecture. We describe also several approaches we tested for - wireless communication. Such technology developments are currently used in our - newly formed Performance Arts Technology Group.' - address: 'Hamamatsu, Japan' - author: 'Fléty, Emmanuel and Leroy, Nicolas and Ravarini, Jean-Christophe and Bevilacqua, - Frédéric' - bibtex: "@inproceedings{Flety2004,\n abstract = {This paper reports our recent developments\ - \ on sensor acquisition systems, taking advantage of computer network technology.\ - \ We present a versatile hardware system which can be connected to wireless modules,\ - \ Analog to Digital Converters, and enables Ethernet communication. We are planning\ - \ to make freely available the design of this architecture. We describe also several\ - \ approaches we tested for wireless communication. Such technology developments\ - \ are currently used in our newly formed Performance Arts Technology Group.},\n\ - \ address = {Hamamatsu, Japan},\n author = {Fl\\'{e}ty, Emmanuel and Leroy, Nicolas\ - \ and Ravarini, Jean-Christophe and Bevilacqua, Fr\\'{e}d\\'{e}ric},\n booktitle\ + ID: ajensenius2014 + abstract: 'The term ''gesture'' has represented a buzzword in the NIME community + since the beginning of its conference series. But how often is it actually used, + what is it used to describe, and how does its usage here differ from its usage + in other fields of study? This paper presents a linguistic analysis of the motion-related + terminology used in all of the papers published in the NIME conference proceedings + to date (2001-2013). The results show that ''gesture'' is in fact used in 62 % + of all NIME papers, which is a significantly higher percentage than in other music + conferences (ICMC and SMC), and much more frequently than it is used in the HCI + and biomechanics communities. The results from a collocation analysis support + the claim that ''gesture'' is used broadly in the NIME community, and indicate + that it ranges from the description of concrete human motion and system control + to quite metaphorical applications.' + address: 'London, United Kingdom' + author: Alexander Refsum Jensenius + bibtex: "@inproceedings{ajensenius2014,\n abstract = {The term 'gesture' has represented\ + \ a buzzword in the NIME community since the beginning of its conference series.\ + \ But how often is it actually used, what is it used to describe, and how does\ + \ its usage here differ from its usage in other fields of study? This paper presents\ + \ a linguistic analysis of the motion-related terminology used in all of the papers\ + \ published in the NIME conference proceedings to date (2001-2013). The results\ + \ show that 'gesture' is in fact used in 62 % of all NIME papers, which is a significantly\ + \ higher percentage than in other music conferences (ICMC and SMC), and much more\ + \ frequently than it is used in the HCI and biomechanics communities. The results\ + \ from a collocation analysis support the claim that 'gesture' is used broadly\ + \ in the NIME community, and indicate that it ranges from the description of concrete\ + \ human motion and system control to quite metaphorical applications.},\n address\ + \ = {London, United Kingdom},\n author = {Alexander Refsum Jensenius},\n booktitle\ \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176597},\n issn = {2220-4806},\n keywords\ - \ = {Gesture, Sensors, Ethernet, 802.11, Computer Music.},\n pages = {157--160},\n\ - \ title = {Versatile Sensor Acquisition System Utilizing Network Technology},\n\ - \ url = {http://www.nime.org/proceedings/2004/nime2004_157.pdf},\n year = {2004}\n\ - }\n" + \ Expression},\n doi = {10.5281/zenodo.1178816},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {217--220},\n publisher = {Goldsmiths, University of London},\n\ + \ title = {To gesture or Not? {A}n Analysis of Terminology in {NIME} Proceedings\ + \ 2001--2013},\n url = {http://www.nime.org/proceedings/2014/nime2014_351.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176597 + doi: 10.5281/zenodo.1178816 issn: 2220-4806 - keywords: 'Gesture, Sensors, Ethernet, 802.11, Computer Music.' - pages: 157--160 - title: Versatile Sensor Acquisition System Utilizing Network Technology - url: http://www.nime.org/proceedings/2004/nime2004_157.pdf - year: 2004 + month: June + pages: 217--220 + publisher: 'Goldsmiths, University of London' + title: 'To gesture or Not? An Analysis of Terminology in NIME Proceedings 2001--2013' + url: http://www.nime.org/proceedings/2014/nime2014_351.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Gaye2004 - abstract: 'Sonic City is a wearable system enabling the use of the urban environment - as an interface for real-time electronic music making, when walking through and - interacting with a city. The device senses everyday interactions and surrounding - contexts, and maps this information in real time to the sound processing of urban - sounds. We conducted a short-term study with various participants using our prototype - in everyday settings. This paper describes the course of the study and preliminary - results in terms of how the participants used and experienced the system. These - results showed that the city was perceived as the main performer but that the - user improvised different tactics and ad hoc interventions to actively influence - and participate in how the music was created. ' - address: 'Hamamatsu, Japan' - author: 'Gaye, Lalya and Holmquist, Lars E.' - bibtex: "@inproceedings{Gaye2004,\n abstract = {Sonic City is a wearable system\ - \ enabling the use of the urban environment as an interface for real-time electronic\ - \ music making, when walking through and interacting with a city. The device senses\ - \ everyday interactions and surrounding contexts, and maps this information in\ - \ real time to the sound processing of urban sounds. We conducted a short-term\ - \ study with various participants using our prototype in everyday settings. This\ - \ paper describes the course of the study and preliminary results in terms of\ - \ how the participants used and experienced the system. These results showed that\ - \ the city was perceived as the main performer but that the user improvised different\ - \ tactics and ad hoc interventions to actively influence and participate in how\ - \ the music was created. },\n address = {Hamamatsu, Japan},\n author = {Gaye,\ - \ Lalya and Holmquist, Lars E.},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176601},\n\ - \ issn = {2220-4806},\n keywords = {User study, new interface for musical expression,\ - \ interactive music, wearable computing, mobility, context-awareness.},\n pages\ - \ = {161--164},\n title = {In Duet with Everyday Urban Settings: A User Study\ - \ of Sonic City},\n url = {http://www.nime.org/proceedings/2004/nime2004_161.pdf},\n\ - \ year = {2004}\n}\n" + ID: etomas12014 + abstract: 'Tangible Scores are a new paradigm for musical instrument design with + a physical configuration inspired by graphic scores. In this paper we will focus + on the design aspects of this new interface as well as on some of the related + technical details. Creating an intuitive, modular and expressive instrument for + textural music was the primary driving force. Following these criteria, we literally + incorporated a musical score onto the surface of the instrument as a way of continuously + controlling several parameters of the sound synthesis. Tangible Scores are played + with both hands and they can adopt multiple physical forms. Complex and expressive + sound textures can be easily played over a variety of timbres, enabling precise + control in a natural manner.' + address: 'London, United Kingdom' + author: Enrique Tomás and Martin Kaltenbrunner + bibtex: "@inproceedings{etomas12014,\n abstract = {Tangible Scores are a new paradigm\ + \ for musical instrument design with a physical configuration inspired by graphic\ + \ scores. In this paper we will focus on the design aspects of this new interface\ + \ as well as on some of the related technical details. Creating an intuitive,\ + \ modular and expressive instrument for textural music was the primary driving\ + \ force. Following these criteria, we literally incorporated a musical score onto\ + \ the surface of the instrument as a way of continuously controlling several parameters\ + \ of the sound synthesis. Tangible Scores are played with both hands and they\ + \ can adopt multiple physical forms. Complex and expressive sound textures can\ + \ be easily played over a variety of timbres, enabling precise control in a natural\ + \ manner.},\n address = {London, United Kingdom},\n author = {Enrique Tom\\'as\ + \ and Martin Kaltenbrunner},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178953},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {609--614},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Tangible Scores: Shaping the Inherent Instrument\ + \ Score},\n url = {http://www.nime.org/proceedings/2014/nime2014_352.pdf},\n year\ + \ = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176601 + doi: 10.5281/zenodo.1178953 issn: 2220-4806 - keywords: 'User study, new interface for musical expression, interactive music, - wearable computing, mobility, context-awareness.' - pages: 161--164 - title: 'In Duet with Everyday Urban Settings: A User Study of Sonic City' - url: http://www.nime.org/proceedings/2004/nime2004_161.pdf - year: 2004 + month: June + pages: 609--614 + publisher: 'Goldsmiths, University of London' + title: 'Tangible Scores: Shaping the Inherent Instrument Score' + url: http://www.nime.org/proceedings/2014/nime2014_352.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Franco2004 - abstract: 'This paper begins by evaluating various systems in terms of factors for - building interactive audiovisual environments. The main issues for flexibility - and expressiveness in the generation of dynamic sounds and images are then isolated. - The design and development of an audiovisual system prototype is described at - the end. ' - address: 'Hamamatsu, Japan' - author: 'Franco, Enrique and Griffith, Niall J. and Fernström, Mikael' - bibtex: "@inproceedings{Franco2004,\n abstract = {This paper begins by evaluating\ - \ various systems in terms of factors for building interactive audiovisual environments.\ - \ The main issues for flexibility and expressiveness in the generation of dynamic\ - \ sounds and images are then isolated. The design and development of an audiovisual\ - \ system prototype is described at the end. },\n address = {Hamamatsu, Japan},\n\ - \ author = {Franco, Enrique and Griffith, Niall J. and Fernstr\\''{o}m, Mikael},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176599},\n issn = {2220-4806},\n\ - \ keywords = {Audiovisual, composition, performance, gesture, image, representation,\ - \ mapping, expressiveness.},\n pages = {165--168},\n title = {Issues for Designing\ - \ a Flexible Expressive Audiovisual System for Real-time Performance \\& Composition},\n\ - \ url = {http://www.nime.org/proceedings/2004/nime2004_165.pdf},\n year = {2004}\n\ + ID: emorgan2014 + abstract: 'New technologies have led to the design of exciting interfaces for collaborative + music making. However we still have very little understanding of the underlying + affective and communicative processes which occur during such interactions. To + address this issue, we carried out a pilot study where we collected continuous + behavioural, physiological, and performance related measures from pairs of improvising + drummers. This paper presents preliminary findings, which could be useful for + the evaluation and design of user-centred collaborative interfaces for musical + creativity and expression.' + address: 'London, United Kingdom' + author: Evan Morgan and Hatice Gunes and Nick Bryan-Kinns + bibtex: "@inproceedings{emorgan2014,\n abstract = {New technologies have led to\ + \ the design of exciting interfaces for collaborative music making. However we\ + \ still have very little understanding of the underlying affective and communicative\ + \ processes which occur during such interactions. To address this issue, we carried\ + \ out a pilot study where we collected continuous behavioural, physiological,\ + \ and performance related measures from pairs of improvising drummers. This paper\ + \ presents preliminary findings, which could be useful for the evaluation and\ + \ design of user-centred collaborative interfaces for musical creativity and expression.},\n\ + \ address = {London, United Kingdom},\n author = {Evan Morgan and Hatice Gunes\ + \ and Nick Bryan-Kinns},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178877},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {23--28},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Instrumenting the Interaction: Affective and\ + \ Psychophysiological Features of Live Collaborative Musical Improvisation},\n\ + \ url = {http://www.nime.org/proceedings/2014/nime2014_353.pdf},\n year = {2014}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176599 + doi: 10.5281/zenodo.1178877 issn: 2220-4806 - keywords: 'Audiovisual, composition, performance, gesture, image, representation, - mapping, expressiveness.' - pages: 165--168 - title: Issues for Designing a Flexible Expressive Audiovisual System for Real-time - Performance & Composition - url: http://www.nime.org/proceedings/2004/nime2004_165.pdf - year: 2004 + month: June + pages: 23--28 + publisher: 'Goldsmiths, University of London' + title: 'Instrumenting the Interaction: Affective and Psychophysiological Features + of Live Collaborative Musical Improvisation' + url: http://www.nime.org/proceedings/2014/nime2014_353.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Silva2004 - abstract: 'We describe a simple, computationally light, real-time system for tracking - the lower face and extracting informationabout the shape of the open mouth from - a video sequence.The system allows unencumbered control of audio synthesismodules - by action of the mouth. We report work in progressto use the mouth controller - to interact with a physical modelof sound production by the avian syrinx.' - address: 'Hamamatsu, Japan' - author: 'de Silva, Gamhewage C. and Smyth, Tamara and Lyons, Michael J.' - bibtex: "@inproceedings{Silva2004,\n abstract = {We describe a simple, computationally\ - \ light, real-time system for tracking the lower face and extracting informationabout\ - \ the shape of the open mouth from a video sequence.The system allows unencumbered\ - \ control of audio synthesismodules by action of the mouth. We report work in\ - \ progressto use the mouth controller to interact with a physical modelof sound\ - \ production by the avian syrinx.},\n address = {Hamamatsu, Japan},\n author =\ - \ {de Silva, Gamhewage C. and Smyth, Tamara and Lyons, Michael J.},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176667},\n issn = {2220-4806},\n keywords\ - \ = {Mouth Controller, Face Tracking, Bioacoustics},\n pages = {169--172},\n title\ - \ = {A Novel Face-tracking Mouth Controller and its Application to Interacting\ - \ with Bioacoustic Models},\n url = {http://www.nime.org/proceedings/2004/nime2004_169.pdf},\n\ - \ year = {2004}\n}\n" + ID: bjohnston2014 + abstract: 'This paper details the creation, design, implementation and uses of a + series of new mechanically bowed string instruments. These instruments have been + designed with the objective of allowing for multiple parameters of musical expressivity, + as well as including the physical and spatial features of the instruments to be + integral aspects of their perception as instruments and sonic objects. This paper + focuses on the hardware design, software implementation, and present musical uses + of the ensemble.' + address: 'London, United Kingdom' + author: Blake Johnston and Henry Dengate Thrush and Ajay Kapur and Jim Murphy and + Tane Moleta + bibtex: "@inproceedings{bjohnston2014,\n abstract = {This paper details the creation,\ + \ design, implementation and uses of a series of new mechanically bowed string\ + \ instruments. These instruments have been designed with the objective of allowing\ + \ for multiple parameters of musical expressivity, as well as including the physical\ + \ and spatial features of the instruments to be integral aspects of their perception\ + \ as instruments and sonic objects. This paper focuses on the hardware design,\ + \ software implementation, and present musical uses of the ensemble.},\n address\ + \ = {London, United Kingdom},\n author = {Blake Johnston and Henry Dengate Thrush\ + \ and Ajay Kapur and Jim Murphy and Tane Moleta},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1178822},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {557--560},\n publisher = {Goldsmiths, University of London},\n title = {Polus:\ + \ The Design and Development of a New, Mechanically Bowed String Instrument Ensemble},\n\ + \ url = {http://www.nime.org/proceedings/2014/nime2014_355.pdf},\n year = {2014}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176667 + doi: 10.5281/zenodo.1178822 issn: 2220-4806 - keywords: 'Mouth Controller, Face Tracking, Bioacoustics' - pages: 169--172 - title: A Novel Face-tracking Mouth Controller and its Application to Interacting - with Bioacoustic Models - url: http://www.nime.org/proceedings/2004/nime2004_169.pdf - year: 2004 + month: June + pages: 557--560 + publisher: 'Goldsmiths, University of London' + title: 'Polus: The Design and Development of a New, Mechanically Bowed String Instrument + Ensemble' + url: http://www.nime.org/proceedings/2014/nime2014_355.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Nagashima2004 - abstract: 'In this paper, I would like to introduce my experimental study of multimedia - psychology. My initial focus of investigation is the interaction between perceptions - of auditory and visual beats. When the musical and graphical beats are completely - synchronized with each other, as in a music video for promotional purposes, the - audience feels that they are natural and comforting. My initial experiment has - proved that the actual tempos of music and images are a little different. If a - slight timelag exists between the musical and pictorial beats, the audience tries - to keep them in synchronization by unconsciously changing the interpretation of - the time-based beat points. As the lag increases over time, the audience seems - to perceive that the beat synchronization has changed from being more downbeat - to more upbeat, and continues enjoying it. I have developed an experiment system - that can generateand control out-of-phase visual and auditory beats in real time, - and have tested many subjects with it. This paper describes the measurement of - time lags generated in the experiment system, as part of my psychological experiment.' - address: 'Hamamatsu, Japan' - author: 'Nagashima, Yoichi' - bibtex: "@inproceedings{Nagashima2004,\n abstract = {In this paper, I would like\ - \ to introduce my experimental study of multimedia psychology. My initial focus\ - \ of investigation is the interaction between perceptions of auditory and visual\ - \ beats. When the musical and graphical beats are completely synchronized with\ - \ each other, as in a music video for promotional purposes, the audience feels\ - \ that they are natural and comforting. My initial experiment has proved that\ - \ the actual tempos of music and images are a little different. If a slight timelag\ - \ exists between the musical and pictorial beats, the audience tries to keep them\ - \ in synchronization by unconsciously changing the interpretation of the time-based\ - \ beat points. As the lag increases over time, the audience seems to perceive\ - \ that the beat synchronization has changed from being more downbeat to more upbeat,\ - \ and continues enjoying it. I have developed an experiment system that can generateand\ - \ control out-of-phase visual and auditory beats in real time, and have tested\ - \ many subjects with it. This paper describes the measurement of time lags generated\ - \ in the experiment system, as part of my psychological experiment.},\n address\ - \ = {Hamamatsu, Japan},\n author = {Nagashima, Yoichi},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176641},\n issn = {2220-4806},\n pages = {173--176},\n\ - \ title = {Measurement of Latency in Interactive Multimedia Art},\n url = {http://www.nime.org/proceedings/2004/nime2004_173.pdf},\n\ - \ year = {2004}\n}\n" + ID: oizmirli2014 + abstract: 'We present a framework for imitation of percussion performances with + parameter-based learning for accurate reproduction. We constructed a robotic setup + involving pull-solenoids attached to drum sticks which communicate with a computer + through an Arduino microcontroller. The imitation framework allows for parameter + adaptation to different mechanical constructions by learning the capabilities + of the overall system being used. For the rhythmic vocabulary, we have considered + regular stroke, flam and drag styles. A learning and calibration system was developed + to efficiently perform grace notes for the drag rudiment as well as the single + stroke and the flam rudiment. A second pre-performance process is introduced to + minimize the latency difference between individual drum sticks in our mechanical + setup. We also developed an off-line onset detection method to reliably recognize + onsets from the microphone input. Once these pre-performance steps are taken, + our setup will then listen to a human drummer''s performance pattern, analyze + for onsets, loudness, and rudiment pattern, and then play back using the learned + parameters for the particular system. We conducted three different evaluations + of our constructed system.' + address: 'London, United Kingdom' + author: Ozgur Izmirli and Jake Faris + bibtex: "@inproceedings{oizmirli2014,\n abstract = {We present a framework for imitation\ + \ of percussion performances with parameter-based learning for accurate reproduction.\ + \ We constructed a robotic setup involving pull-solenoids attached to drum sticks\ + \ which communicate with a computer through an Arduino microcontroller. The imitation\ + \ framework allows for parameter adaptation to different mechanical constructions\ + \ by learning the capabilities of the overall system being used. For the rhythmic\ + \ vocabulary, we have considered regular stroke, flam and drag styles. A learning\ + \ and calibration system was developed to efficiently perform grace notes for\ + \ the drag rudiment as well as the single stroke and the flam rudiment. A second\ + \ pre-performance process is introduced to minimize the latency difference between\ + \ individual drum sticks in our mechanical setup. We also developed an off-line\ + \ onset detection method to reliably recognize onsets from the microphone input.\ + \ Once these pre-performance steps are taken, our setup will then listen to a\ + \ human drummer's performance pattern, analyze for onsets, loudness, and rudiment\ + \ pattern, and then play back using the learned parameters for the particular\ + \ system. We conducted three different evaluations of our constructed system.},\n\ + \ address = {London, United Kingdom},\n author = {Ozgur Izmirli and Jake Faris},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178814},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {483--486},\n publisher = {Goldsmiths, University\ + \ of London},\n title = {Imitation Framework for Percussion},\n url = {http://www.nime.org/proceedings/2014/nime2014_360.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176641 + doi: 10.5281/zenodo.1178814 issn: 2220-4806 - pages: 173--176 - title: Measurement of Latency in Interactive Multimedia Art - url: http://www.nime.org/proceedings/2004/nime2004_173.pdf - year: 2004 + month: June + pages: 483--486 + publisher: 'Goldsmiths, University of London' + title: Imitation Framework for Percussion + url: http://www.nime.org/proceedings/2014/nime2014_360.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Ishida2004 - abstract: 'In this paper, we describe a novel improvisation supporting system based - on correcting musically unnatural melodies. Since improvisation is the musical - performance style that involves creating melodies while playing, it is not easy - even for the people who can play musical instruments. However, previous studies - have not dealt with improvisation support for the people who can play musical - instruments but cannot improvise. In this study, to support such players'' improvisation, - we propose a novel improvisation supporting system called ism, which corrects - musically unnatural melodies automatically. The main issue in realizing this system - is how to detect notes to be corrected (i.e., musically unnatural or inappropriate). - We propose a method for detecting notes to be corrected based on the N-gram model. - This method first calculates N-gram probabilities of played notes, and then judges - notes with low N-gram probabilities to be corrected. Experimental results show - that the N-gram-based melody correction and the proposed system are useful for - supporting improvisation.' - address: 'Hamamatsu, Japan' - author: 'Ishida, Katsuhisa and Kitahara, Tetsuro and Takeda, Masayuki' - bibtex: "@inproceedings{Ishida2004,\n abstract = {In this paper, we describe a novel\ - \ improvisation supporting system based on correcting musically unnatural melodies.\ - \ Since improvisation is the musical performance style that involves creating\ - \ melodies while playing, it is not easy even for the people who can play musical\ - \ instruments. However, previous studies have not dealt with improvisation support\ - \ for the people who can play musical instruments but cannot improvise. In this\ - \ study, to support such players' improvisation, we propose a novel improvisation\ - \ supporting system called ism, which corrects musically unnatural melodies automatically.\ - \ The main issue in realizing this system is how to detect notes to be corrected\ - \ (i.e., musically unnatural or inappropriate). We propose a method for detecting\ - \ notes to be corrected based on the N-gram model. This method first calculates\ - \ N-gram probabilities of played notes, and then judges notes with low N-gram\ - \ probabilities to be corrected. Experimental results show that the N-gram-based\ - \ melody correction and the proposed system are useful for supporting improvisation.},\n\ - \ address = {Hamamatsu, Japan},\n author = {Ishida, Katsuhisa and Kitahara, Tetsuro\ - \ and Takeda, Masayuki},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176617},\n\ - \ issn = {2220-4806},\n keywords = {Improvisation support, jam session, melody\ - \ correction, N-gram model, melody modeling, musical instrument},\n pages = {177--180},\n\ - \ title = {ism: Improvisation Supporting System based on Melody Correction},\n\ - \ url = {http://www.nime.org/proceedings/2004/nime2004_177.pdf},\n year = {2004}\n\ - }\n" + ID: ailsar2014 + abstract: 'This paper discusses one particular mapping for a new gestural instrument + called the AirSticks. This mapping was designed to be used for improvised or rehearsed + duos and restricts the performer to only utilising the sound source of one other + musician playing an acoustic instrument. Several pieces with different musicians + were performed and documented, musicians were observed and interviews with these + musicians were transcribed. In this paper we will examine the thoughts of these + musicians to gather a better understanding of how to design effective ensemble + instruments of this type.' + address: 'London, United Kingdom' + author: Alon Ilsar and Mark Havryliv and Andrew Johnston + bibtex: "@inproceedings{ailsar2014,\n abstract = {This paper discusses one particular\ + \ mapping for a new gestural instrument called the AirSticks. This mapping was\ + \ designed to be used for improvised or rehearsed duos and restricts the performer\ + \ to only utilising the sound source of one other musician playing an acoustic\ + \ instrument. Several pieces with different musicians were performed and documented,\ + \ musicians were observed and interviews with these musicians were transcribed.\ + \ In this paper we will examine the thoughts of these musicians to gather a better\ + \ understanding of how to design effective ensemble instruments of this type.},\n\ + \ address = {London, United Kingdom},\n author = {Alon Ilsar and Mark Havryliv\ + \ and Andrew Johnston},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178812},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {339--342},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Evaluating the Performance of a New Gestural\ + \ Instrument Within an Ensemble},\n url = {http://www.nime.org/proceedings/2014/nime2014_363.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176617 + doi: 10.5281/zenodo.1178812 issn: 2220-4806 - keywords: 'Improvisation support, jam session, melody correction, N-gram model, - melody modeling, musical instrument' - pages: 177--180 - title: 'ism: Improvisation Supporting System based on Melody Correction' - url: http://www.nime.org/proceedings/2004/nime2004_177.pdf - year: 2004 + month: June + pages: 339--342 + publisher: 'Goldsmiths, University of London' + title: Evaluating the Performance of a New Gestural Instrument Within an Ensemble + url: http://www.nime.org/proceedings/2014/nime2014_363.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Singer2004 - abstract: 'This paper describes new work and creations of LEMUR, agroup of artists - and technologists creating robotic musicalinstruments.' - address: 'Hamamatsu, Japan' - author: 'Singer, Eric and Feddersen, Jeff and Redmon, Chad and Bowen, Bil' - bibtex: "@inproceedings{Singer2004,\n abstract = {This paper describes new work\ - \ and creations of LEMUR, agroup of artists and technologists creating robotic\ - \ musicalinstruments.},\n address = {Hamamatsu, Japan},\n author = {Singer, Eric\ - \ and Feddersen, Jeff and Redmon, Chad and Bowen, Bil},\n booktitle = {Proceedings\ + ID: abarenca2014 + abstract: The Manipuller is a musical interface based on strings and multi-dimensional + force sensing. This paper presents a new architectural approach to the original + interface design which has been materialized with the implementation of the Manipuller + II system prototype. Besides the short paper we would like to do a poster presentation + plus a demo of the new prototype where the public will be invited to play with + the new musical interface. + address: 'London, United Kingdom' + author: Adrián Barenca and Milos Corak + bibtex: "@inproceedings{abarenca2014,\n abstract = {The Manipuller is a musical\ + \ interface based on strings and multi-dimensional force sensing. This paper presents\ + \ a new architectural approach to the original interface design which has been\ + \ materialized with the implementation of the Manipuller II system prototype.\ + \ Besides the short paper we would like to do a poster presentation plus a demo\ + \ of the new prototype where the public will be invited to play with the new musical\ + \ interface.},\n address = {London, United Kingdom},\n author = {Adri{\\'a}n Barenca\ + \ and Milos Corak},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178706},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {589--592},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {The Manipuller II: Strings within a Force\ + \ Sensing Ring},\n url = {http://www.nime.org/proceedings/2014/nime2014_364.pdf},\n\ + \ year = {2014}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1178706 + issn: 2220-4806 + month: June + pages: 589--592 + publisher: 'Goldsmiths, University of London' + title: 'The Manipuller II: Strings within a Force Sensing Ring' + url: http://www.nime.org/proceedings/2014/nime2014_364.pdf + year: 2014 + + +- ENTRYTYPE: inproceedings + ID: osarier2014 + abstract: 'In the recent years many touch screen interfaces have been designed and + used for musical control. When compared with their physical counterparts, current + control paradigms employed in touch screen musical interfaces do not require the + same level of physical labor and this negatively affects the user experience in + terms of expressivity, engagement and enjoyment. This lack of physicality can + be remedied by using interaction elements, which are designed for the exertion + of the user. Employing intentionally difficult and inefficient interaction design + can enhance the user experience by allowing greater bodily expression, kinesthetic + feedback, more apparent skill acquisition, and performer satisfaction. Rub Synth + is a touch screen musical instrument with an exertion interface. It was made for + creating and testing exertion strategies that are possible by only using 2d touch + coordinates as input and evaluating the outcomes of implementing intentional difficulty. + This paper discusses the strategies that can be employed to model effort on touch + screens, the benefits of having physical difficulty, Rub Synth''s interaction + design, and user experience results of using such an interface.' + address: 'London, United Kingdom' + author: Ozan Sarier + bibtex: "@inproceedings{osarier2014,\n abstract = {In the recent years many touch\ + \ screen interfaces have been designed and used for musical control. When compared\ + \ with their physical counterparts, current control paradigms employed in touch\ + \ screen musical interfaces do not require the same level of physical labor and\ + \ this negatively affects the user experience in terms of expressivity, engagement\ + \ and enjoyment. This lack of physicality can be remedied by using interaction\ + \ elements, which are designed for the exertion of the user. Employing intentionally\ + \ difficult and inefficient interaction design can enhance the user experience\ + \ by allowing greater bodily expression, kinesthetic feedback, more apparent skill\ + \ acquisition, and performer satisfaction. Rub Synth is a touch screen musical\ + \ instrument with an exertion interface. It was made for creating and testing\ + \ exertion strategies that are possible by only using 2d touch coordinates as\ + \ input and evaluating the outcomes of implementing intentional difficulty. This\ + \ paper discusses the strategies that can be employed to model effort on touch\ + \ screens, the benefits of having physical difficulty, Rub Synth's interaction\ + \ design, and user experience results of using such an interface.},\n address\ + \ = {London, United Kingdom},\n author = {Ozan Sarier},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176669},\n issn = {2220-4806},\n keywords = {additional\ - \ computer or special,commands allows,familiar tools with no,improvisations,the\ - \ musician or composer,to control the instrument,use of standard midi,using},\n\ - \ pages = {181--184},\n title = {LEMUR's Musical Robots},\n url = {http://www.nime.org/proceedings/2004/nime2004_181.pdf},\n\ - \ year = {2004}\n}\n" + \ doi = {10.5281/zenodo.1178931},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {179--182},\n publisher = {Goldsmiths, University of London},\n title = {Rub\ + \ Synth : A Study of Implementing Intentional Physical Difficulty Into Touch Screen\ + \ Music Controllers},\n url = {http://www.nime.org/proceedings/2014/nime2014_367.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176669 + doi: 10.5281/zenodo.1178931 issn: 2220-4806 - keywords: 'additional computer or special,commands allows,familiar tools with no,improvisations,the - musician or composer,to control the instrument,use of standard midi,using' - pages: 181--184 - title: LEMUR's Musical Robots - url: http://www.nime.org/proceedings/2004/nime2004_181.pdf - year: 2004 + month: June + pages: 179--182 + publisher: 'Goldsmiths, University of London' + title: 'Rub Synth : A Study of Implementing Intentional Physical Difficulty Into + Touch Screen Music Controllers' + url: http://www.nime.org/proceedings/2014/nime2014_367.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Hornof2004 - abstract: 'Though musical performers routinely use eye movements to communicate - with each other during musical performances, very few performers or composers - have used eye tracking devices to direct musical compositions and performances. - EyeMusic is a system that uses eye movements as an input to electronic music compositions. - The eye movements can directly control the music, or the music can respond to - the eyes moving around a visual scene. EyeMusic is implemented so that any composer - using established composition software can incorporate prerecorded eye movement - data into their musical compositions.' - address: 'Hamamatsu, Japan' - author: 'Hornof, Anthony J. and Sato, Linda' - bibtex: "@inproceedings{Hornof2004,\n abstract = {Though musical performers routinely\ - \ use eye movements to communicate with each other during musical performances,\ - \ very few performers or composers have used eye tracking devices to direct musical\ - \ compositions and performances. EyeMusic is a system that uses eye movements\ - \ as an input to electronic music compositions. The eye movements can directly\ - \ control the music, or the music can respond to the eyes moving around a visual\ - \ scene. EyeMusic is implemented so that any composer using established composition\ - \ software can incorporate prerecorded eye movement data into their musical compositions.},\n\ - \ address = {Hamamatsu, Japan},\n author = {Hornof, Anthony J. and Sato, Linda},\n\ + ID: lfyfe2014 + abstract: 'The Nexus Data Exchange Format (NDEF) is an Open Sound Control (OSC) + namespace specification designed to make connection and message management tasks + easier for OSC-based networked performance systems. New extensions to the NDEF + namespace improve both connection and message management between OSC client and + server nodes. Connection management between nodes now features human-readable + labels for connections and a new message exchange for pinging connections to determine + their status. Message management now has improved namespace synchronization via + a message count exchange and by the ability to add, remove, and replace messages + on connected nodes.' + address: 'London, United Kingdom' + author: Lawrence Fyfe and Adam Tindale and Sheelagh Carpendale + bibtex: "@inproceedings{lfyfe2014,\n abstract = {The Nexus Data Exchange Format\ + \ (NDEF) is an Open Sound Control (OSC) namespace specification designed to make\ + \ connection and message management tasks easier for OSC-based networked performance\ + \ systems. New extensions to the NDEF namespace improve both connection and message\ + \ management between OSC client and server nodes. Connection management between\ + \ nodes now features human-readable labels for connections and a new message exchange\ + \ for pinging connections to determine their status. Message management now has\ + \ improved namespace synchronization via a message count exchange and by the ability\ + \ to add, remove, and replace messages on connected nodes.},\n address = {London,\ + \ United Kingdom},\n author = {Lawrence Fyfe and Adam Tindale and Sheelagh Carpendale},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176613},\n issn = {2220-4806},\n\ - \ keywords = {Electronic music composition, eye movements, eye tracking, human-computer\ - \ interaction, Max/MSP.},\n pages = {185--188},\n title = {EyeMusic: Making Music\ - \ with the Eyes},\n url = {http://www.nime.org/proceedings/2004/nime2004_185.pdf},\n\ - \ year = {2004}\n}\n" + \ Musical Expression},\n doi = {10.5281/zenodo.1178768},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {343--346},\n publisher = {Goldsmiths, University\ + \ of London},\n title = {Extending the Nexus Data Exchange Format (NDEF) Specification},\n\ + \ url = {http://www.nime.org/proceedings/2014/nime2014_368.pdf},\n year = {2014}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176613 + doi: 10.5281/zenodo.1178768 issn: 2220-4806 - keywords: 'Electronic music composition, eye movements, eye tracking, human-computer - interaction, Max/MSP.' - pages: 185--188 - title: 'EyeMusic: Making Music with the Eyes' - url: http://www.nime.org/proceedings/2004/nime2004_185.pdf - year: 2004 + month: June + pages: 343--346 + publisher: 'Goldsmiths, University of London' + title: Extending the Nexus Data Exchange Format (NDEF) Specification + url: http://www.nime.org/proceedings/2014/nime2014_368.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Argo2004 - abstract: 'When working with sample-based media, a performer is managing timelines, - loop points, sample parameters and effects parameters. The Slidepipe is a performance - controller that gives the artist a visually simple way to work with their material. - Its design is modular and lightweight, so it can be easily transported and quickly - assembled. Also, its large stature magnifies the gestures associated with its - play, providing a more convincing performance. In this paper, I will describe - what the controller is, how this new controller interface has affected my live - performance, and how it can be used in different performance scenarios. ' - address: 'Hamamatsu, Japan' - author: 'Argo, Mark' - bibtex: "@inproceedings{Argo2004,\n abstract = {When working with sample-based media,\ - \ a performer is managing timelines, loop points, sample parameters and effects\ - \ parameters. The Slidepipe is a performance controller that gives the artist\ - \ a visually simple way to work with their material. Its design is modular and\ - \ lightweight, so it can be easily transported and quickly assembled. Also, its\ - \ large stature magnifies the gestures associated with its play, providing a more\ - \ convincing performance. In this paper, I will describe what the controller is,\ - \ how this new controller interface has affected my live performance, and how\ - \ it can be used in different performance scenarios. },\n address = {Hamamatsu,\ - \ Japan},\n author = {Argo, Mark},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176581},\n\ - \ issn = {2220-4806},\n keywords = {Controller, Sample Manipulation, Live Performance,\ - \ Open Sound Control, Human Computer Interaction},\n pages = {189--192},\n title\ - \ = {The Slidepipe: A Timeline-Based Controller for Real-Time Sample Manipulation},\n\ - \ url = {http://www.nime.org/proceedings/2004/nime2004_189.pdf},\n year = {2004}\n\ - }\n" + ID: ihattwick12014 + abstract: This paper presents results from the development of a digital musical + instrument which uses audio feature extraction for the control of sound synthesis. + Our implementation utilizes multi-band audio analysis to generate control signals. + This technique is well-suited to instruments for which the gestural interface + is intentionally weakly defined. We present a percussion instrument utilizing + this technique in which the timbral characteristics of found objects are the primary + source of audio for analysis. + address: 'London, United Kingdom' + author: Ian Hattwick and Preston Beebe and Zachary Hale and Marcelo Wanderley and + Philippe Leroux and Fabrice Marandola + bibtex: "@inproceedings{ihattwick12014,\n abstract = {This paper presents results\ + \ from the development of a digital musical instrument which uses audio feature\ + \ extraction for the control of sound synthesis. Our implementation utilizes multi-band\ + \ audio analysis to generate control signals. This technique is well-suited to\ + \ instruments for which the gestural interface is intentionally weakly defined.\ + \ We present a percussion instrument utilizing this technique in which the timbral\ + \ characteristics of found objects are the primary source of audio for analysis.},\n\ + \ address = {London, United Kingdom},\n author = {Ian Hattwick and Preston Beebe\ + \ and Zachary Hale and Marcelo Wanderley and Philippe Leroux and Fabrice Marandola},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178790},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {597--600},\n publisher = {Goldsmiths, University\ + \ of London},\n title = {Unsounding Objects: Audio Feature Extraction for the\ + \ Control of Sound Synthesis},\n url = {http://www.nime.org/proceedings/2014/nime2014_369.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176581 + doi: 10.5281/zenodo.1178790 issn: 2220-4806 - keywords: 'Controller, Sample Manipulation, Live Performance, Open Sound Control, - Human Computer Interaction' - pages: 189--192 - title: 'The Slidepipe: A Timeline-Based Controller for Real-Time Sample Manipulation' - url: http://www.nime.org/proceedings/2004/nime2004_189.pdf - year: 2004 + month: June + pages: 597--600 + publisher: 'Goldsmiths, University of London' + title: 'Unsounding Objects: Audio Feature Extraction for the Control of Sound Synthesis' + url: http://www.nime.org/proceedings/2014/nime2014_369.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Burtner2004 - abstract: This paper describes a theory for modulated objects based on observations - of recent musical interface design trends. The theory implies extensions to an - object-based approach to controller design. Combining NIME research with ethnographic - study of shamanic traditions. The author discusses the creation of new controllers - based on the shamanic use of ritual objects. - address: 'Hamamatsu, Japan' - author: 'Burtner, Matthew' - bibtex: "@inproceedings{Burtner2004,\n abstract = {This paper describes a theory\ - \ for modulated objects based on observations of recent musical interface design\ - \ trends. The theory implies extensions to an object-based approach to controller\ - \ design. Combining NIME research with ethnographic study of shamanic traditions.\ - \ The author discusses the creation of new controllers based on the shamanic use\ - \ of ritual objects.},\n address = {Hamamatsu, Japan},\n author = {Burtner, Matthew},\n\ + ID: ihattwick2014 + abstract: 'Moving new DMIs from the research lab to professional artistic contexts + places new demands on both their design and manufacturing. Through a discussion + of the Prosthetic Instruments, a family of digital musical instruments we designed + for use in an interactive dance performance, we discuss four different approaches + to manufacturing -artisanal, building block, rapid prototyping, and industrial. + We discuss our use of these different approaches as we strove to reconcile the + many conflicting constraints placed upon the instruments'' design due to their + use as hypothetical prosthetic extensions to dancers'' bodies, as aesthetic objects, + and as instruments used in a professional touring context. Experiences and lessons + learned during the design and manufacturing process are discussed in relation + both to these manufacturing approaches as well as to Bill Buxton''s concept of + artist-spec design.' + address: 'London, United Kingdom' + author: Ian Hattwick and Joseph Malloch and Marcelo Wanderley + bibtex: "@inproceedings{ihattwick2014,\n abstract = {Moving new DMIs from the research\ + \ lab to professional artistic contexts places new demands on both their design\ + \ and manufacturing. Through a discussion of the Prosthetic Instruments, a family\ + \ of digital musical instruments we designed for use in an interactive dance performance,\ + \ we discuss four different approaches to manufacturing -artisanal, building block,\ + \ rapid prototyping, and industrial. We discuss our use of these different approaches\ + \ as we strove to reconcile the many conflicting constraints placed upon the instruments'\ + \ design due to their use as hypothetical prosthetic extensions to dancers' bodies,\ + \ as aesthetic objects, and as instruments used in a professional touring context.\ + \ Experiences and lessons learned during the design and manufacturing process\ + \ are discussed in relation both to these manufacturing approaches as well as\ + \ to Bill Buxton's concept of artist-spec design.},\n address = {London, United\ + \ Kingdom},\n author = {Ian Hattwick and Joseph Malloch and Marcelo Wanderley},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176585},\n issn = {2220-4806},\n\ - \ keywords = {Music and Video Controllers, New Interface Design, Music Composition,\ - \ Multimedia, Mythology, Shamanism, Ecoacoustics},\n pages = {193--196},\n title\ - \ = {A Theory of Modulated Objects for New Shamanic Controller Design},\n url\ - \ = {http://www.nime.org/proceedings/2004/nime2004_193.pdf},\n year = {2004}\n\ - }\n" + \ Musical Expression},\n doi = {10.5281/zenodo.1178792},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {443--448},\n publisher = {Goldsmiths, University\ + \ of London},\n title = {Forming Shapes to Bodies: Design for Manufacturing in\ + \ the Prosthetic Instruments},\n url = {http://www.nime.org/proceedings/2014/nime2014_370.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176585 + doi: 10.5281/zenodo.1178792 issn: 2220-4806 - keywords: 'Music and Video Controllers, New Interface Design, Music Composition, - Multimedia, Mythology, Shamanism, Ecoacoustics' - pages: 193--196 - title: A Theory of Modulated Objects for New Shamanic Controller Design - url: http://www.nime.org/proceedings/2004/nime2004_193.pdf - year: 2004 + month: June + pages: 443--448 + publisher: 'Goldsmiths, University of London' + title: 'Forming Shapes to Bodies: Design for Manufacturing in the Prosthetic Instruments' + url: http://www.nime.org/proceedings/2014/nime2014_370.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Pelletier2004 - abstract: 'In this paper, I will describe a computer vision-based musical performance - system that uses morphological assessments to provide control data. Using shape - analysis allows the system to provide qualitative descriptors of the scene being - captured while ensuring its use in a wide variety of different settings. This - system was implemented under Max/MSP/Jitter, augmented with a number of external - objects. (1)' - address: 'Hamamatsu, Japan' - author: 'Pelletier, Jean-Marc' - bibtex: "@inproceedings{Pelletier2004,\n abstract = {In this paper, I will describe\ - \ a computer vision-based musical performance system that uses morphological assessments\ - \ to provide control data. Using shape analysis allows the system to provide qualitative\ - \ descriptors of the scene being captured while ensuring its use in a wide variety\ - \ of different settings. This system was implemented under Max/MSP/Jitter, augmented\ - \ with a number of external objects. (1)},\n address = {Hamamatsu, Japan},\n author\ - \ = {Pelletier, Jean-Marc},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176653},\n\ - \ issn = {2220-4806},\n keywords = {computer vision,image analysis,maxmsp,morphology,musical},\n\ - \ pages = {197--198},\n title = {A Shape-Based Approach to Computer Vision Musical\ - \ Performance Systems},\n url = {http://www.nime.org/proceedings/2004/nime2004_197.pdf},\n\ - \ year = {2004}\n}\n" + ID: cnash2014 + abstract: 'This paper explores the concept of end-user programming languages in + music composition, and introduces the Manhattan system, which integrates formulas + with a grid-based style of music sequencer. Following the paradigm of spreadsheets, + an established model of end-user programming, Manhattan is designed to bridge + the gap between traditional music editing methods (such as MIDI sequencing and + typesetting) and generative and algorithmic music -seeking both to reduce the + learning threshold of programming and support flexible integration of static and + dynamic musical elements in a single work. Interaction draws on rudimentary knowledge + of mathematics and spreadsheets to augment the sequencer notation with programming + concepts such as expressions, built-in functions, variables, pointers and arrays, + iteration (for loops), branching (goto), and conditional statements (if-then-else). + In contrast to other programming tools, formulas emphasise the visibility of musical + data (e.g. notes), rather than code, but also allow composers to interact with + notated music from a more abstract perspective of musical processes. To illustrate + the function and use cases of the system, several examples of traditional and + generative music are provided, the latter drawing on minimalism (process-based + music) as an accessible introduction to algorithmic composition. Throughout, the + system and approach are evaluated using the cognitive dimensions of notations + framework, together with early feedback for use by artists.' + address: 'London, United Kingdom' + author: Chris Nash + bibtex: "@inproceedings{cnash2014,\n abstract = {This paper explores the concept\ + \ of end-user programming languages in music composition, and introduces the Manhattan\ + \ system, which integrates formulas with a grid-based style of music sequencer.\ + \ Following the paradigm of spreadsheets, an established model of end-user programming,\ + \ Manhattan is designed to bridge the gap between traditional music editing methods\ + \ (such as MIDI sequencing and typesetting) and generative and algorithmic music\ + \ -seeking both to reduce the learning threshold of programming and support flexible\ + \ integration of static and dynamic musical elements in a single work. Interaction\ + \ draws on rudimentary knowledge of mathematics and spreadsheets to augment the\ + \ sequencer notation with programming concepts such as expressions, built-in functions,\ + \ variables, pointers and arrays, iteration (for loops), branching (goto), and\ + \ conditional statements (if-then-else). In contrast to other programming tools,\ + \ formulas emphasise the visibility of musical data (e.g. notes), rather than\ + \ code, but also allow composers to interact with notated music from a more abstract\ + \ perspective of musical processes. To illustrate the function and use cases of\ + \ the system, several examples of traditional and generative music are provided,\ + \ the latter drawing on minimalism (process-based music) as an accessible introduction\ + \ to algorithmic composition. Throughout, the system and approach are evaluated\ + \ using the cognitive dimensions of notations framework, together with early feedback\ + \ for use by artists.},\n address = {London, United Kingdom},\n author = {Chris\ + \ Nash},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1178891},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {221--226},\n publisher = {Goldsmiths, University\ + \ of London},\n title = {Manhattan: End-User Programming for Music},\n url = {http://www.nime.org/proceedings/2014/nime2014_371.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176653 + doi: 10.5281/zenodo.1178891 issn: 2220-4806 - keywords: 'computer vision,image analysis,maxmsp,morphology,musical' - pages: 197--198 - title: A Shape-Based Approach to Computer Vision Musical Performance Systems - url: http://www.nime.org/proceedings/2004/nime2004_197.pdf - year: 2004 + month: June + pages: 221--226 + publisher: 'Goldsmiths, University of London' + title: 'Manhattan: End-User Programming for Music' + url: http://www.nime.org/proceedings/2014/nime2014_371.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Hughes2004 - abstract: 'The Epipe is a novel electronic woodwind controller with continuous tonehole - coverage sensing, an initial design for which was introduced at NIME ''03. Since - then, we have successfully completed two fully operational prototypes. This short - paper describes some of the issues encountered during the design and construction - of this controller. It also details our own early experiences and impressions - of the interface as well as its technical specifications. ' - address: 'Hamamatsu, Japan' - author: 'Hughes, Stephen and Cannon, Cormac and O''Modhrain, Sile' - bibtex: "@inproceedings{Hughes2004,\n abstract = {The Epipe is a novel electronic\ - \ woodwind controller with continuous tonehole coverage sensing, an initial design\ - \ for which was introduced at NIME '03. Since then, we have successfully completed\ - \ two fully operational prototypes. This short paper describes some of the issues\ - \ encountered during the design and construction of this controller. It also details\ - \ our own early experiences and impressions of the interface as well as its technical\ - \ specifications. },\n address = {Hamamatsu, Japan},\n author = {Hughes, Stephen\ - \ and Cannon, Cormac and O'Modhrain, Sile},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1176615},\n issn = {2220-4806},\n keywords = {woodwind controller,\ - \ variable tonehole control, MIDI, capacitive sensing},\n pages = {199--200},\n\ - \ title = {Epipe : A Novel Electronic Woodwind Controller},\n url = {http://www.nime.org/proceedings/2004/nime2004_199.pdf},\n\ - \ year = {2004}\n}\n" + ID: croberts2014 + abstract: 'We describe research enabling the rapid creation of digital musical instruments + and their publication to the Internet. This research comprises both high-level + abstractions for making continuous mappings between audio, interactive, and graphical + elements, as well as a centralized database for storing and accessing instruments. + Published instruments run in most devices capable of running a modern web browser. + Notation of instrument design is optimized for readability and expressivity.' + address: 'London, United Kingdom' + author: Charlie Roberts and Matthew Wright and JoAnn Kuchera-Morin and Tobias Höllerer + bibtex: "@inproceedings{croberts2014,\n abstract = {We describe research enabling\ + \ the rapid creation of digital musical instruments and their publication to the\ + \ Internet. This research comprises both high-level abstractions for making continuous\ + \ mappings between audio, interactive, and graphical elements, as well as a centralized\ + \ database for storing and accessing instruments. Published instruments run in\ + \ most devices capable of running a modern web browser. Notation of instrument\ + \ design is optimized for readability and expressivity.},\n address = {London,\ + \ United Kingdom},\n author = {Charlie Roberts and Matthew Wright and JoAnn Kuchera-Morin\ + \ and Tobias H{\\''o}llerer},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178919},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {239--242},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Rapid Creation and Publication of Digital\ + \ Musical Instruments},\n url = {http://www.nime.org/proceedings/2014/nime2014_373.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176615 + doi: 10.5281/zenodo.1178919 issn: 2220-4806 - keywords: 'woodwind controller, variable tonehole control, MIDI, capacitive sensing' - pages: 199--200 - title: 'Epipe : A Novel Electronic Woodwind Controller' - url: http://www.nime.org/proceedings/2004/nime2004_199.pdf - year: 2004 + month: June + pages: 239--242 + publisher: 'Goldsmiths, University of London' + title: Rapid Creation and Publication of Digital Musical Instruments + url: http://www.nime.org/proceedings/2014/nime2014_373.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Morris2004 - abstract: 'This paper describes the SillyTone Squish Factory, a haptically engaging - musical interface. It contains the motivation behind the device''s development, - a description of the interface, various mappings of the interface to musical applications, - details of its construction, and the requirements to demo the interface. ' - address: 'Hamamatsu, Japan' - author: 'Morris, Geoffrey C. and Leitman, Sasha and Kassianidou, Marina' - bibtex: "@inproceedings{Morris2004,\n abstract = {This paper describes the SillyTone\ - \ Squish Factory, a haptically engaging musical interface. It contains the motivation\ - \ behind the device's development, a description of the interface, various mappings\ - \ of the interface to musical applications, details of its construction, and the\ - \ requirements to demo the interface. },\n address = {Hamamatsu, Japan},\n author\ - \ = {Morris, Geoffrey C. and Leitman, Sasha and Kassianidou, Marina},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176639},\n issn = {2220-4806},\n pages\ - \ = {201--202},\n title = {SillyTone Squish Factory},\n url = {http://www.nime.org/proceedings/2004/nime2004_201.pdf},\n\ - \ year = {2004}\n}\n" + ID: ibukvic2014 + abstract: 'The following paper showcases new integrated Pd-L2Ork system and its + K12 educational counterpart running on Raspberry Pi hardware. A collection of + new externals and abstractions in conjunction with the Modern Device LOP shield + transforms Raspberry Pi into a cost-efficient sensing hub providing Arduino-like + connectivity with 10 digital I/O pins (including both software and hardware implementations + of pulse width modulation) and 8 analog inputs, while offering a number of integrated + features, including audio I/O, USB and Ethernet connectivity and video output.' + address: 'London, United Kingdom' + author: Ivica Bukvic + bibtex: "@inproceedings{ibukvic2014,\n abstract = {The following paper showcases\ + \ new integrated Pd-L2Ork system and its K12 educational counterpart running on\ + \ Raspberry Pi hardware. A collection of new externals and abstractions in conjunction\ + \ with the Modern Device LOP shield transforms Raspberry Pi into a cost-efficient\ + \ sensing hub providing Arduino-like connectivity with 10 digital I/O pins (including\ + \ both software and hardware implementations of pulse width modulation) and 8\ + \ analog inputs, while offering a number of integrated features, including audio\ + \ I/O, USB and Ethernet connectivity and video output.},\n address = {London,\ + \ United Kingdom},\n author = {Ivica Bukvic},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1178726},\n issn = {2220-4806},\n month = {June},\n pages =\ + \ {163--166},\n publisher = {Goldsmiths, University of London},\n title = {Pd-L2Ork\ + \ Raspberry Pi Toolkit as a Comprehensive Arduino Alternative in K-12 and Production\ + \ Scenarios},\n url = {http://www.nime.org/proceedings/2014/nime2014_377.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176639 + doi: 10.5281/zenodo.1178726 issn: 2220-4806 - pages: 201--202 - title: SillyTone Squish Factory - url: http://www.nime.org/proceedings/2004/nime2004_201.pdf - year: 2004 + month: June + pages: 163--166 + publisher: 'Goldsmiths, University of London' + title: Pd-L2Ork Raspberry Pi Toolkit as a Comprehensive Arduino Alternative in K-12 + and Production Scenarios + url: http://www.nime.org/proceedings/2014/nime2014_377.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Steiner2004 - abstract: 'StickMusic is an instrument comprised of two haptic devices, a joystick - and a mouse, which control a phase vocoder in real time. The purpose is to experiment - with ideas of how to apply haptic feedback when controlling synthesis algorithms - that have no direct analogy to methods of generating sound in the physical world. ' - address: 'Hamamatsu, Japan' - author: 'Steiner, Hans-Christoph' - bibtex: "@inproceedings{Steiner2004,\n abstract = {StickMusic is an instrument comprised\ - \ of two haptic devices, a joystick and a mouse, which control a phase vocoder\ - \ in real time. The purpose is to experiment with ideas of how to apply haptic\ - \ feedback when controlling synthesis algorithms that have no direct analogy to\ - \ methods of generating sound in the physical world. },\n address = {Hamamatsu,\ - \ Japan},\n author = {Steiner, Hans-Christoph},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1176671},\n issn = {2220-4806},\n keywords = {haptic feedback,\ - \ gestural control, performance, joystick, mouse},\n pages = {203--204},\n title\ - \ = {StickMusic: Using Haptic Feedback with a Phase Vocoder},\n url = {http://www.nime.org/proceedings/2004/nime2004_203.pdf},\n\ - \ year = {2004}\n}\n" + ID: rkleinberger2014 + abstract: 'Vocal Vibrations is a new project by the Opera of the Future group at + the MIT Media Lab that seeks to engage the public in thoughtful singing and vocalizing, + while exploring the relationship between human physiology and the resonant vibrations + of the voice. This paper describes the motivations, the technical implementation, + and the experience design of the Vocal Vibrations public installation. This installation + consists of a space for reflective listening to a vocal composition (the Chapel) + and an interactive space for personal vocal exploration (the Cocoon). In the interactive + experience, the participant also experiences a tangible exteriorization of his + voice by holding the ORB, a handheld device that translates his voice and singing + into tactile vibrations. This installation encourages visitors to explore the + physicality and expressivity of their voices in a rich musical context.' + address: 'London, United Kingdom' + author: Charles Holbrow and Elena Jessop and Rebecca Kleinberger + bibtex: "@inproceedings{rkleinberger2014,\n abstract = {Vocal Vibrations is a new\ + \ project by the Opera of the Future group at the MIT Media Lab that seeks to\ + \ engage the public in thoughtful singing and vocalizing, while exploring the\ + \ relationship between human physiology and the resonant vibrations of the voice.\ + \ This paper describes the motivations, the technical implementation, and the\ + \ experience design of the Vocal Vibrations public installation. This installation\ + \ consists of a space for reflective listening to a vocal composition (the Chapel)\ + \ and an interactive space for personal vocal exploration (the Cocoon). In the\ + \ interactive experience, the participant also experiences a tangible exteriorization\ + \ of his voice by holding the ORB, a handheld device that translates his voice\ + \ and singing into tactile vibrations. This installation encourages visitors to\ + \ explore the physicality and expressivity of their voices in a rich musical context.},\n\ + \ address = {London, United Kingdom},\n author = {Charles Holbrow and Elena Jessop\ + \ and Rebecca Kleinberger},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178800},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {431--434},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Vocal Vibrations: A Multisensory Experience\ + \ of the Voice},\n url = {http://www.nime.org/proceedings/2014/nime2014_378.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176671 + doi: 10.5281/zenodo.1178800 issn: 2220-4806 - keywords: 'haptic feedback, gestural control, performance, joystick, mouse' - pages: 203--204 - title: 'StickMusic: Using Haptic Feedback with a Phase Vocoder' - url: http://www.nime.org/proceedings/2004/nime2004_203.pdf - year: 2004 + month: June + pages: 431--434 + publisher: 'Goldsmiths, University of London' + title: 'Vocal Vibrations: A Multisensory Experience of the Voice' + url: http://www.nime.org/proceedings/2014/nime2014_378.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Coduys2004 - abstract: 'High capacity of transmission lines (Ethernet in particular) is much - higher than what imposed by MIDI today. So it is possible to use capturing interfaces - with high-speed and high-resolution, thanks to the OSC protocol, for musical synthesis - (either in realtime or non real-time). These new interfaces offer many advantages, - not only in the area of musical composition with use of sensors but also in live - and interactive performances. In this manner, the processes of calibration and - signal processing are delocalized on a personal computer and augments possibilities - of processing. In this demo, we present two hardware interfaces developed in La - kitchen with corresponding processing to achieve a high-resolution, high-speed - sensor processing for musical applications. ' - address: 'Hamamatsu, Japan' - author: 'Coduys, Thierry and Henry, Cyrille and Cont, Arshia' - bibtex: "@inproceedings{Coduys2004,\n abstract = {High capacity of transmission\ - \ lines (Ethernet in particular) is much higher than what imposed by MIDI today.\ - \ So it is possible to use capturing interfaces with high-speed and high-resolution,\ - \ thanks to the OSC protocol, for musical synthesis (either in realtime or non\ - \ real-time). These new interfaces offer many advantages, not only in the area\ - \ of musical composition with use of sensors but also in live and interactive\ - \ performances. In this manner, the processes of calibration and signal processing\ - \ are delocalized on a personal computer and augments possibilities of processing.\ - \ In this demo, we present two hardware interfaces developed in La kitchen with\ - \ corresponding processing to achieve a high-resolution, high-speed sensor processing\ - \ for musical applications. },\n address = {Hamamatsu, Japan},\n author = {Coduys,\ - \ Thierry and Henry, Cyrille and Cont, Arshia},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1176587},\n issn = {2220-4806},\n keywords = {Interface, Sensors,\ - \ Calibration, Precision, OSC, Pure Data, Max/MSP.},\n pages = {205--206},\n title\ - \ = {TOASTER and KROONDE: High-Resolution and High- Speed Real-time Sensor Interfaces},\n\ - \ url = {http://www.nime.org/proceedings/2004/nime2004_205.pdf},\n year = {2004}\n\ + ID: gdublon2014 + abstract: 'We present the FingerSynth, a wearable musical instrument made up of + a bracelet and set of rings that enable its player to produce sound by touching + nearly any surface in their environment. Each ring contains a small, independently + controlled exciter transducer commonly used for auditory bone conduction. The + rings sound loudly when they touch a hard object, and are practically silent otherwise. + When a wearer touches their own (or someone else''s) head, the contacted person + hears the sound through bone conduction, inaudible to others. The bracelet contains + a microcontroller, a set of FET transistors, an accelerometer, and a battery. + The microcontroller generates a separate audio signal for each ring, switched + through the FETs, and can take user input through the accelerometer in the form + of taps, flicks, and other gestures. The player controls the envelope and timbre + of the sound by varying the physical pressure and the angle of their finger on + the surface, or by touching differently resonant surfaces. Because its sound is + shaped by direct, physical contact with objects and people, the FingerSynth encourages + players to experiment with the materials around them and with one another, making + music with everything they touch.' + address: 'London, United Kingdom' + author: Gershon Dublon and Joseph A. Paradiso + bibtex: "@inproceedings{gdublon2014,\n abstract = {We present the FingerSynth, a\ + \ wearable musical instrument made up of a bracelet and set of rings that enable\ + \ its player to produce sound by touching nearly any surface in their environment.\ + \ Each ring contains a small, independently controlled exciter transducer commonly\ + \ used for auditory bone conduction. The rings sound loudly when they touch a\ + \ hard object, and are practically silent otherwise. When a wearer touches their\ + \ own (or someone else's) head, the contacted person hears the sound through bone\ + \ conduction, inaudible to others. The bracelet contains a microcontroller, a\ + \ set of FET transistors, an accelerometer, and a battery. The microcontroller\ + \ generates a separate audio signal for each ring, switched through the FETs,\ + \ and can take user input through the accelerometer in the form of taps, flicks,\ + \ and other gestures. The player controls the envelope and timbre of the sound\ + \ by varying the physical pressure and the angle of their finger on the surface,\ + \ or by touching differently resonant surfaces. Because its sound is shaped by\ + \ direct, physical contact with objects and people, the FingerSynth encourages\ + \ players to experiment with the materials around them and with one another, making\ + \ music with everything they touch.},\n address = {London, United Kingdom},\n\ + \ author = {Gershon Dublon and Joseph A. Paradiso},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178754},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {134--135},\n publisher = {Goldsmiths, University of London},\n title = {FingerSynth:\ + \ Wearable Transducers for Exploring the Environment and Playing Music Everywhere},\n\ + \ url = {http://www.nime.org/proceedings/2014/nime2014_379.pdf},\n year = {2014}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176587 - issn: 2220-4806 - keywords: 'Interface, Sensors, Calibration, Precision, OSC, Pure Data, Max/MSP.' - pages: 205--206 - title: 'TOASTER and KROONDE: High-Resolution and High- Speed Real-time Sensor Interfaces' - url: http://www.nime.org/proceedings/2004/nime2004_205.pdf - year: 2004 - - -- ENTRYTYPE: inproceedings - ID: Goto2004 - abstract: 'We will discuss the case study of application of the Virtual Musical - Instrument and Sound Synthesis. Doing this application, the main subject is advanced - Mapping Interface in order to connect these. For this experiment, our discussion - also refers to Neural Network, as well as a brief introduction of the Virtual - Musical Instrument "Le SuperPolm" and Gesture Controller "BodySuit".' - address: 'Hamamatsu, Japan' - author: 'Goto, Suguru and Suzuki, Takahiko' - bibtex: "@inproceedings{Goto2004,\n abstract = {We will discuss the case study of\ - \ application of the Virtual Musical Instrument and Sound Synthesis. Doing this\ - \ application, the main subject is advanced Mapping Interface in order to connect\ - \ these. For this experiment, our discussion also refers to Neural Network, as\ - \ well as a brief introduction of the Virtual Musical Instrument \"Le SuperPolm\"\ - \ and Gesture Controller \"BodySuit\".},\n address = {Hamamatsu, Japan},\n author\ - \ = {Goto, Suguru and Suzuki, Takahiko},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176605},\n\ - \ issn = {2220-4806},\n keywords = {Virtual Musical Instrument, Gesture Controller,\ - \ Mapping Interface},\n pages = {207--208},\n title = {The Case Study of Application\ - \ of Advanced Gesture Interface and Mapping Interface, Virtual Musical Instrument\ - \ \"Le SuperPolm\" and Gesture Controller \"BodySuit\"},\n url = {http://www.nime.org/proceedings/2004/nime2004_207.pdf},\n\ - \ year = {2004}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1176605 + doi: 10.5281/zenodo.1178754 issn: 2220-4806 - keywords: 'Virtual Musical Instrument, Gesture Controller, Mapping Interface' - pages: 207--208 - title: 'The Case Study of Application of Advanced Gesture Interface and Mapping - Interface, Virtual Musical Instrument "Le SuperPolm" and Gesture Controller "BodySuit"' - url: http://www.nime.org/proceedings/2004/nime2004_207.pdf - year: 2004 + month: June + pages: 134--135 + publisher: 'Goldsmiths, University of London' + title: 'FingerSynth: Wearable Transducers for Exploring the Environment and Playing + Music Everywhere' + url: http://www.nime.org/proceedings/2014/nime2014_379.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Won2004 - abstract: 'In this paper, we describe a new MIDI controller, the Light Pipes. The - Light Pipes are a series of pipes that respond to incident light. The paper will - discuss the design of the instrument, and the prototype we built. A piece was - composed for the instrument using algorithms designed in Pure Data.' - address: 'Hamamatsu, Japan' - author: 'Won, Sook Y. and Chan, Humane and Liu, Jeremy' - bibtex: "@inproceedings{Won2004,\n abstract = {In this paper, we describe a new\ - \ MIDI controller, the Light Pipes. The Light Pipes are a series of pipes that\ - \ respond to incident light. The paper will discuss the design of the instrument,\ - \ and the prototype we built. A piece was composed for the instrument using algorithms\ - \ designed in Pure Data.},\n address = {Hamamatsu, Japan},\n author = {Won, Sook\ - \ Y. and Chan, Humane and Liu, Jeremy},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176685},\n\ - \ issn = {2220-4806},\n keywords = {Controllers, MIDI, light sensors, Pure Data.},\n\ - \ pages = {209--210},\n title = {Light Pipes: A Light Controlled {MIDI} Instrument},\n\ - \ url = {http://www.nime.org/proceedings/2004/nime2004_209.pdf},\n year = {2004}\n\ - }\n" + ID: fhashimoto2014 + abstract: 'In recent years, there has been an increase in the number of artists + who make use of automated music performances in their music and live concerts. + Automated music performance is a form of music production using programmed musical + notes. Some artists who introduce automated music performance operate parameters + of the sound in their performance for production of their music. In this paper, + we focus on the music production aspects and describe a method that realizes operation + of the sound parameters via computer. Further, in this study, the probability + distribution of the action (i.e., variation of parameters) is obtained within + the music, using Bayesian filters. The probability distribution of each piece + of music is transformed by passing through a Markov model. After the probability + distribution is obtained, sound parameters can be automatically controlled. We + have developed a system to reproduce the musical expressions of humans and confirmed + the possibilities of our method.' + address: 'London, United Kingdom' + author: Fumito Hashimoto and Motoki Miura + bibtex: "@inproceedings{fhashimoto2014,\n abstract = {In recent years, there has\ + \ been an increase in the number of artists who make use of automated music performances\ + \ in their music and live concerts. Automated music performance is a form of music\ + \ production using programmed musical notes. Some artists who introduce automated\ + \ music performance operate parameters of the sound in their performance for production\ + \ of their music. In this paper, we focus on the music production aspects and\ + \ describe a method that realizes operation of the sound parameters via computer.\ + \ Further, in this study, the probability distribution of the action (i.e., variation\ + \ of parameters) is obtained within the music, using Bayesian filters. The probability\ + \ distribution of each piece of music is transformed by passing through a Markov\ + \ model. After the probability distribution is obtained, sound parameters can\ + \ be automatically controlled. We have developed a system to reproduce the musical\ + \ expressions of humans and confirmed the possibilities of our method.},\n address\ + \ = {London, United Kingdom},\n author = {Fumito Hashimoto and Motoki Miura},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178788},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {347--350},\n publisher = {Goldsmiths, University\ + \ of London},\n title = {Operating Sound Parameters Using {Markov} Model and {Bayes}ian\ + \ Filters in Automated Music Performance},\n url = {http://www.nime.org/proceedings/2014/nime2014_380.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176685 + doi: 10.5281/zenodo.1178788 issn: 2220-4806 - keywords: 'Controllers, MIDI, light sensors, Pure Data.' - pages: 209--210 - title: 'Light Pipes: A Light Controlled MIDI Instrument' - url: http://www.nime.org/proceedings/2004/nime2004_209.pdf - year: 2004 + month: June + pages: 347--350 + publisher: 'Goldsmiths, University of London' + title: Operating Sound Parameters Using Markov Model and Bayesian Filters in Automated + Music Performance + url: http://www.nime.org/proceedings/2014/nime2014_380.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Lippit2004 - abstract: 'In this paper, I describe a realtime sampling system for theturntablist, - and the hardware and software design of the secondprototype, 16padjoystickcontroller.' - address: 'Hamamatsu, Japan' - author: 'Lippit, Takuro M.' - bibtex: "@inproceedings{Lippit2004,\n abstract = {In this paper, I describe a realtime\ - \ sampling system for theturntablist, and the hardware and software design of\ - \ the secondprototype, 16padjoystickcontroller.},\n address = {Hamamatsu, Japan},\n\ - \ author = {Lippit, Takuro M.},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176633},\n\ - \ issn = {2220-4806},\n keywords = {DJ, Turntablism, Realtime Sampling, MAX/MSP,\ - \ Microchip PIC microcontroller, MIDI},\n pages = {211--212},\n title = {Realtime\ - \ Sampling System for the Turntablist, Version 2: 16padjoystickcontroller},\n\ - \ url = {http://www.nime.org/proceedings/2004/nime2004_211.pdf},\n year = {2004}\n\ - }\n" + ID: rgupfinger2014 + abstract: 'Street art opens a new, broad research field in the context of urban + communication and sound aesthetics in public space. The primary focus of this + article is the relevance and effects of using sound technologies and audio devices + to shape urban landscape and soundscape. This paper examines the process of developing + an alternative type of street art that uses sound as its medium. It represents + multiple audio device prototypes, which encourage new chances for street artists + and activists to contribute their messages and signs in public spaces. Furthermore, + it documents different approaches to establishing this alternative urban practice + within the street art and new media art field. The findings also expose a research + space for sound and technical interventions in the context of street art.' + address: 'London, United Kingdom' + author: Reinhard Gupfinger and Martin Kaltenbrunner + bibtex: "@inproceedings{rgupfinger2014,\n abstract = {Street art opens a new, broad\ + \ research field in the context of urban communication and sound aesthetics in\ + \ public space. The primary focus of this article is the relevance and effects\ + \ of using sound technologies and audio devices to shape urban landscape and soundscape.\ + \ This paper examines the process of developing an alternative type of street\ + \ art that uses sound as its medium. It represents multiple audio device prototypes,\ + \ which encourage new chances for street artists and activists to contribute their\ + \ messages and signs in public spaces. Furthermore, it documents different approaches\ + \ to establishing this alternative urban practice within the street art and new\ + \ media art field. The findings also expose a research space for sound and technical\ + \ interventions in the context of street art.},\n address = {London, United Kingdom},\n\ + \ author = {Reinhard Gupfinger and Martin Kaltenbrunner},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178778},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {577--580},\n publisher = {Goldsmiths, University of London},\n title = {SOUND\ + \ TOSSING Audio Devices in the Context of Street Art},\n url = {http://www.nime.org/proceedings/2014/nime2014_385.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176633 + doi: 10.5281/zenodo.1178778 issn: 2220-4806 - keywords: 'DJ, Turntablism, Realtime Sampling, MAX/MSP, Microchip PIC microcontroller, - MIDI' - pages: 211--212 - title: 'Realtime Sampling System for the Turntablist, Version 2: 16padjoystickcontroller' - url: http://www.nime.org/proceedings/2004/nime2004_211.pdf - year: 2004 + month: June + pages: 577--580 + publisher: 'Goldsmiths, University of London' + title: SOUND TOSSING Audio Devices in the Context of Street Art + url: http://www.nime.org/proceedings/2014/nime2014_385.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Sharon2004 - abstract: This paper describes the design and on-going development of an expressive - gestural MIDI interface and how this couldenhance live performance of electronic - music. - address: 'Hamamatsu, Japan' - author: 'Sharon, Michael E.' - bibtex: "@inproceedings{Sharon2004,\n abstract = {This paper describes the design\ - \ and on-going development of an expressive gestural MIDI interface and how this\ - \ couldenhance live performance of electronic music.},\n address = {Hamamatsu,\ - \ Japan},\n author = {Sharon, Michael E.},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176661},\n\ - \ issn = {2220-4806},\n keywords = {gestural control, mapping, Pure Data (pd),\ - \ accelerometers, MIDI, microcontrollers, synthesis, musical instruments},\n pages\ - \ = {213--214},\n title = {The Stranglophone: Enhancing Expressiveness In Live\ - \ Electronic Music},\n url = {http://www.nime.org/proceedings/2004/nime2004_213.pdf},\n\ - \ year = {2004}\n}\n" + ID: tmitchell2014 + abstract: 'Wireless technology is growing increasingly prevalent in the development + of new interfaces for live music performance. However, with a number of different + wireless technologies operating in the 2.4 GHz band, there is a high risk of interference + and congestion, which has the potential to severely disrupt live performances. + With its high transmission power, channel bandwidth and throughput, Wi-Fi (IEEE + 802.11) presents an opportunity for highly robust wireless communications. This + paper presents our preliminary work optimising the components of a Wi-Fi system + for live performance scenarios. We summarise the manufacture and testing of a + prototype directional antenna that is designed to maximise sensitivity to a performer''s + signal while suppressing interference from elsewhere. We also propose a set of + recommended Wi-Fi configurations to reduce latency and increase throughput. Practical + investigations utilising these arrangements demonstrate a single x-OSC device + achieving a latency of <3 ms and a distributed network of 15 devices achieving + a net throughput of ~4800 packets per second (~320 per device); where each packet + is a 104-byte OSC message containing 16 analogue input channels acquired by the + device.' + address: 'London, United Kingdom' + author: Thomas Mitchell and Sebastian Madgwick and Simon Rankine and Geoffrey Hilton + and Adrian Freed and Andrew Nix + bibtex: "@inproceedings{tmitchell2014,\n abstract = {Wireless technology is growing\ + \ increasingly prevalent in the development of new interfaces for live music performance.\ + \ However, with a number of different wireless technologies operating in the 2.4\ + \ GHz band, there is a high risk of interference and congestion, which has the\ + \ potential to severely disrupt live performances. With its high transmission\ + \ power, channel bandwidth and throughput, Wi-Fi (IEEE 802.11) presents an opportunity\ + \ for highly robust wireless communications. This paper presents our preliminary\ + \ work optimising the components of a Wi-Fi system for live performance scenarios.\ + \ We summarise the manufacture and testing of a prototype directional antenna\ + \ that is designed to maximise sensitivity to a performer's signal while suppressing\ + \ interference from elsewhere. We also propose a set of recommended Wi-Fi configurations\ + \ to reduce latency and increase throughput. Practical investigations utilising\ + \ these arrangements demonstrate a single x-OSC device achieving a latency of\ + \ <3 ms and a distributed network of 15 devices achieving a net throughput of\ + \ ~4800 packets per second (~320 per device); where each packet is a 104-byte\ + \ OSC message containing 16 analogue input channels acquired by the device.},\n\ + \ address = {London, United Kingdom},\n author = {Thomas Mitchell and Sebastian\ + \ Madgwick and Simon Rankine and Geoffrey Hilton and Adrian Freed and Andrew Nix},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178875},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {251--256},\n publisher = {Goldsmiths, University\ + \ of London},\n title = {Making the Most of Wi-Fi: Optimisations for Robust Wireless\ + \ Live Music Performance},\n url = {http://www.nime.org/proceedings/2014/nime2014_386.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176661 + doi: 10.5281/zenodo.1178875 issn: 2220-4806 - keywords: 'gestural control, mapping, Pure Data (pd), accelerometers, MIDI, microcontrollers, - synthesis, musical instruments' - pages: 213--214 - title: 'The Stranglophone: Enhancing Expressiveness In Live Electronic Music' - url: http://www.nime.org/proceedings/2004/nime2004_213.pdf - year: 2004 + month: June + pages: 251--256 + publisher: 'Goldsmiths, University of London' + title: 'Making the Most of Wi-Fi: Optimisations for Robust Wireless Live Music Performance' + url: http://www.nime.org/proceedings/2014/nime2014_386.pdf + year: 2014 - ENTRYTYPE: inproceedings - ID: Hashida2004 - abstract: 'This paper proposes an interface for improvisational ensemble plays which - synthesizes musical sounds and graphical images on the floor from people''s act - of "walking". The aim of this paper is to develop such a system that enables nonprofessional - people in our public spaces to play good contrapuntal music without any knowledge - of music theory. The people are just walking. This system is based on the i-trace - system [1] which can capture the people''s behavior and give some visual feedback. ' - address: 'Hamamatsu, Japan' - author: 'Hashida, Tomoko and Kakehi, Yasuaki and Naemura, Takeshi' - bibtex: "@inproceedings{Hashida2004,\n abstract = {This paper proposes an interface\ - \ for improvisational ensemble plays which synthesizes musical sounds and graphical\ - \ images on the floor from people's act of \"walking\". The aim of this paper\ - \ is to develop such a system that enables nonprofessional people in our public\ - \ spaces to play good contrapuntal music without any knowledge of music theory.\ - \ The people are just walking. This system is based on the i-trace system [1]\ - \ which can capture the people's behavior and give some visual feedback. },\n\ - \ address = {Hamamatsu, Japan},\n author = {Hashida, Tomoko and Kakehi, Yasuaki\ - \ and Naemura, Takeshi},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176607},\n\ - \ issn = {2220-4806},\n keywords = {Improvisational Ensemble Play, Contrapuntal\ - \ Music, Human Tracking, Traces, Spatially Augmented Reality},\n pages = {215--216},\n\ - \ title = {Ensemble System with i-trace},\n url = {http://www.nime.org/proceedings/2004/nime2004_215.pdf},\n\ - \ year = {2004}\n}\n" + ID: mmainsbridge2014 + abstract: 'This paper explores the challenge of achieving nuanced control and physical + engagement with gestural interfaces in performance. Performances with a prototype + gestural performance system, Gestate, provide the basis for insights into the + application of gestural systems in live contexts. These reflections stem from + a performer''s perspective, outlining the experience of prototyping and performing + with augmented instruments that extend vocal or instrumental technique through + ancillary gestures. Successful implementation of rapidly evolving gestural technologies + in real-time performance calls for new approaches to performing and musicianship, + centred around a growing understanding of the body''s physical and creative potential. + For musicians hoping to incorporate gestural control seamlessly into their performance + practice a balance of technical mastery and kinaesthetic awareness is needed to + adapt existing systems to their own purposes. Within non-tactile systems, visual + feedback mechanisms can support this process by providing explicit visual cues + that compensate for the absence of haptic or tangible feedback. Experience gained + through prototyping and performance can yield a deeper understanding of the broader + nature of gestural control and the way in which performers inhabit their own bodies.' + address: 'London, United Kingdom' + author: Mary Mainsbridge and Kirsty Beilharz + bibtex: "@inproceedings{mmainsbridge2014,\n abstract = {This paper explores the\ + \ challenge of achieving nuanced control and physical engagement with gestural\ + \ interfaces in performance. Performances with a prototype gestural performance\ + \ system, Gestate, provide the basis for insights into the application of gestural\ + \ systems in live contexts. These reflections stem from a performer's perspective,\ + \ outlining the experience of prototyping and performing with augmented instruments\ + \ that extend vocal or instrumental technique through ancillary gestures. Successful\ + \ implementation of rapidly evolving gestural technologies in real-time performance\ + \ calls for new approaches to performing and musicianship, centred around a growing\ + \ understanding of the body's physical and creative potential. For musicians hoping\ + \ to incorporate gestural control seamlessly into their performance practice a\ + \ balance of technical mastery and kinaesthetic awareness is needed to adapt existing\ + \ systems to their own purposes. Within non-tactile systems, visual feedback mechanisms\ + \ can support this process by providing explicit visual cues that compensate for\ + \ the absence of haptic or tangible feedback. Experience gained through prototyping\ + \ and performance can yield a deeper understanding of the broader nature of gestural\ + \ control and the way in which performers inhabit their own bodies.},\n address\ + \ = {London, United Kingdom},\n author = {Mary Mainsbridge and Kirsty Beilharz},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178859},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {110--113},\n publisher = {Goldsmiths, University\ + \ of London},\n title = {Body As Instrument: Performing with Gestural Interfaces},\n\ + \ url = {http://www.nime.org/proceedings/2014/nime2014_393.pdf},\n year = {2014}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176607 + doi: 10.5281/zenodo.1178859 issn: 2220-4806 - keywords: 'Improvisational Ensemble Play, Contrapuntal Music, Human Tracking, Traces, - Spatially Augmented Reality' - pages: 215--216 - title: Ensemble System with i-trace - url: http://www.nime.org/proceedings/2004/nime2004_215.pdf - year: 2004 + month: June + pages: 110--113 + publisher: 'Goldsmiths, University of London' + title: 'Body As Instrument: Performing with Gestural Interfaces' + url: http://www.nime.org/proceedings/2014/nime2014_393.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_1 - abstract: 'Subtlety and detail are fundamental to what makes musical instruments - special, but accounts of their development in digital lutherie have been constrained - to ethnographies, in-the-wild studies, and personal reflections. Though insightful, - these accounts are imprecise, incomparable, and inefficient for understanding - how fluency with the subtle details of digital musical instruments (DMIs) develops. - We have been designing DMI design probes and activities for closed and constrained - observation of subtle and detailed DMI design, but in two previous studies these - failed to motivate subtle and detailed responses. In this paper we report on our - third attempt, where we designed a tuned percussion DMI and a hybrid handcraft - tool for sculpting its sound using clay, and a one hour activity. Among 26 study - participants were digital luthiers, violin luthiers and musicians, who all engaged - with what we define as micro scale DMI design. We observed technical desires and - needs for experiencing and comparing subtle details systematically, and also widely - varying, subjective emotional and artistic relationships with detail in participants'' - own practices. We reflect on the contexts that motivate subtle and detailed digital - lutherie, and discuss the implications for DMI design researchers and technologists - for studying and supporting this aspect of DMI design and craft practice in future.' - address: 'Mexico City, Mexico' - articleno: 1 - author: Jack Armitage and Thor Magnusson and Andrew McPherson - bibtex: "@article{nime2023_1,\n abstract = {Subtlety and detail are fundamental\ - \ to what makes musical instruments special, but accounts of their development\ - \ in digital lutherie have been constrained to ethnographies, in-the-wild studies,\ - \ and personal reflections. Though insightful, these accounts are imprecise, incomparable,\ - \ and inefficient for understanding how fluency with the subtle details of digital\ - \ musical instruments (DMIs) develops. We have been designing DMI design probes\ - \ and activities for closed and constrained observation of subtle and detailed\ - \ DMI design, but in two previous studies these failed to motivate subtle and\ - \ detailed responses. In this paper we report on our third attempt, where we designed\ - \ a tuned percussion DMI and a hybrid handcraft tool for sculpting its sound using\ - \ clay, and a one hour activity. Among 26 study participants were digital luthiers,\ - \ violin luthiers and musicians, who all engaged with what we define as micro\ - \ scale DMI design. We observed technical desires and needs for experiencing and\ - \ comparing subtle details systematically, and also widely varying, subjective\ - \ emotional and artistic relationships with detail in participants' own practices.\ - \ We reflect on the contexts that motivate subtle and detailed digital lutherie,\ - \ and discuss the implications for DMI design researchers and technologists for\ - \ studying and supporting this aspect of DMI design and craft practice in future.},\n\ - \ address = {Mexico City, Mexico},\n articleno = {1},\n author = {Jack Armitage\ - \ and Thor Magnusson and Andrew McPherson},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n editor\ - \ = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n\ - \ numpages = {9},\n pages = {1--9},\n title = {Studying Subtle and Detailed Digital\ - \ Lutherie: Motivational Contexts and Technical Needs},\n track = {Papers},\n\ - \ url = {http://nime.org/proceedings/2023/nime2023_1.pdf},\n year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: hportner2014 + abstract: 'The Chimaera is a touch-less, expressive, polyphonic and electronic music + controller based on magnetic field sensing. An array of hall-effect sensors and + their vicinity make up a continuous 2D interaction space. The sensors are excited + with Neodymium magnets worn on fingers. The device continuously tracks position + and vicinity of multiple present magnets along the sensor array to produce event + signals accordingly. Apart from the two positional signals, an event also carries + the magnetic field polarization, a unique identifier and group association. We + like to think of it as a mixed analog/digital offspring of theremin and trautonium. + These general-purpose event signals are transmitted and eventually translated + into musical events according to custom mappings on a host system. With its touch-less + control (no friction), high update rates (2-4kHz), its quasi-continuous spatial + resolution and its low-latency (<1 ms), the Chimaera can react to most subtle + motions instantaneously and allows for a highly dynamic and expressive play. Its + open source design additionally gives the user all possibilities to further tune + hardware and firmware to his or her needs. The Chimaera is network-oriented and + configured with and communicated by OSC (Open Sound Control), which makes it straight-forward + to integrate into any setup.' + address: 'London, United Kingdom' + author: Hanspeter Portner + bibtex: "@inproceedings{hportner2014,\n abstract = {The Chimaera is a touch-less,\ + \ expressive, polyphonic and electronic music controller based on magnetic field\ + \ sensing. An array of hall-effect sensors and their vicinity make up a continuous\ + \ 2D interaction space. The sensors are excited with Neodymium magnets worn on\ + \ fingers. The device continuously tracks position and vicinity of multiple present\ + \ magnets along the sensor array to produce event signals accordingly. Apart from\ + \ the two positional signals, an event also carries the magnetic field polarization,\ + \ a unique identifier and group association. We like to think of it as a mixed\ + \ analog/digital offspring of theremin and trautonium. These general-purpose event\ + \ signals are transmitted and eventually translated into musical events according\ + \ to custom mappings on a host system. With its touch-less control (no friction),\ + \ high update rates (2-4kHz), its quasi-continuous spatial resolution and its\ + \ low-latency (<1 ms), the Chimaera can react to most subtle motions instantaneously\ + \ and allows for a highly dynamic and expressive play. Its open source design\ + \ additionally gives the user all possibilities to further tune hardware and firmware\ + \ to his or her needs. The Chimaera is network-oriented and configured with and\ + \ communicated by OSC (Open Sound Control), which makes it straight-forward to\ + \ integrate into any setup.},\n address = {London, United Kingdom},\n author =\ + \ {Hanspeter Portner},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178909},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {501--504},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {CHIMAERA The Poly-Magneto-Phonic Theremin\ + \ An Expressive Touch-Less Hall-Effect Sensor Array},\n url = {http://www.nime.org/proceedings/2014/nime2014_397.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178909 issn: 2220-4806 - month: May - numpages: 9 - pages: 1--9 - title: 'Studying Subtle and Detailed Digital Lutherie: Motivational Contexts and - Technical Needs' - track: Papers - url: http://nime.org/proceedings/2023/nime2023_1.pdf - year: 2023 + month: June + pages: 501--504 + publisher: 'Goldsmiths, University of London' + title: CHIMAERA The Poly-Magneto-Phonic Theremin An Expressive Touch-Less Hall-Effect + Sensor Array + url: http://www.nime.org/proceedings/2014/nime2014_397.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_2 - abstract: 'This paper proposes a human-machine interactive music system for live - performances based on autonomous agents, implemented through immersive extended - reality. The interaction between humans and agents is grounded in concepts related - to Swarm Intelligence and Multi-Agent systems, which are reflected in a technological - platform that involves a 3D physical-virtual solution. This approach requires - visual, auditory, haptic, and proprioceptive modalities, making it necessary to - integrate technologies capable of providing such a multimodal environment. The - prototype of the proposed system is implemented by combining Motion Capture, Spatial - Audio, and Mixed Reality technologies. The system is evaluated in terms of objective - measurements and tested with users through music improvisation sessions. The results - demonstrate that the system is used as intended with respect to multimodal interaction - for musical agents. Furthermore, the results validate the novel design and integration - of the required technologies presented in this paper.' - address: 'Mexico City, Mexico' - articleno: 2 - author: Pedro P Lucas and Stefano Fasciani - bibtex: "@article{nime2023_2,\n abstract = {This paper proposes a human-machine\ - \ interactive music system for live performances based on autonomous agents, implemented\ - \ through immersive extended reality. The interaction between humans and agents\ - \ is grounded in concepts related to Swarm Intelligence and Multi-Agent systems,\ - \ which are reflected in a technological platform that involves a 3D physical-virtual\ - \ solution. This approach requires visual, auditory, haptic, and proprioceptive\ - \ modalities, making it necessary to integrate technologies capable of providing\ - \ such a multimodal environment. The prototype of the proposed system is implemented\ - \ by combining Motion Capture, Spatial Audio, and Mixed Reality technologies.\ - \ The system is evaluated in terms of objective measurements and tested with users\ - \ through music improvisation sessions. The results demonstrate that the system\ - \ is used as intended with respect to multimodal interaction for musical agents.\ - \ Furthermore, the results validate the novel design and integration of the required\ - \ technologies presented in this paper.},\n address = {Mexico City, Mexico},\n\ - \ articleno = {2},\n author = {Pedro P Lucas and Stefano Fasciani},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n\ - \ month = {May},\n numpages = {11},\n pages = {10--20},\n title = {A Human-Agents\ - \ Music Performance System in an Extended Reality Environment},\n track = {Papers},\n\ - \ url = {http://nime.org/proceedings/2023/nime2023_2.pdf},\n year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: twebster12014 + abstract: 'This paper introduces the OWL stage effects pedal and aims to present + the device within the context of Human Computer Interaction (HCI) research. The + OWL is a dedicated, programmable audio device designed to provide an alternative + to the use of laptop computers for bespoke audio processing on stage for music + performance. By creating a software framework that allows the user to program + their own code for the hardware in C++, the OWL project makes it possible to use + homemade audio processing on stage without the need for a laptop running a computer + music environment such as Pure Data or Supercollider. Moving away from the general-purpose + computer to a dedicated audio device means that some of the potential problems + and technical complexity of performing with a laptop computer onstage can be avoided, + allowing the user to focus more of their attention on the musical performance. + Within the format of a traditional guitar ''stomp box'', the OWL aims to integrate + seamlessly into a guitarist''s existing pedal board setup, and in this way presents + as an example of a ubiquitous and tangible computing device -a programmable computer + designed to fit into an existing mode of musical performance whilst being transparent + in use.' + address: 'London, United Kingdom' + author: Thomas Webster and Guillaume LeNost and Martin Klang + bibtex: "@inproceedings{twebster12014,\n abstract = {This paper introduces the OWL\ + \ stage effects pedal and aims to present the device within the context of Human\ + \ Computer Interaction (HCI) research. The OWL is a dedicated, programmable audio\ + \ device designed to provide an alternative to the use of laptop computers for\ + \ bespoke audio processing on stage for music performance. By creating a software\ + \ framework that allows the user to program their own code for the hardware in\ + \ C++, the OWL project makes it possible to use homemade audio processing on stage\ + \ without the need for a laptop running a computer music environment such as Pure\ + \ Data or Supercollider. Moving away from the general-purpose computer to a dedicated\ + \ audio device means that some of the potential problems and technical complexity\ + \ of performing with a laptop computer onstage can be avoided, allowing the user\ + \ to focus more of their attention on the musical performance. Within the format\ + \ of a traditional guitar 'stomp box', the OWL aims to integrate seamlessly into\ + \ a guitarist's existing pedal board setup, and in this way presents as an example\ + \ of a ubiquitous and tangible computing device -a programmable computer designed\ + \ to fit into an existing mode of musical performance whilst being transparent\ + \ in use.},\n address = {London, United Kingdom},\n author = {Thomas Webster and\ + \ Guillaume LeNost and Martin Klang},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178979},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {621--624},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {The OWL programmable stage effects pedal:\ + \ Revising the concept of the on-stage computer for live music performance},\n\ + \ url = {http://www.nime.org/proceedings/2014/nime2014_399.pdf},\n year = {2014}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178979 issn: 2220-4806 - month: May - numpages: 11 - pages: 10--20 - title: A Human-Agents Music Performance System in an Extended Reality Environment - track: Papers - url: http://nime.org/proceedings/2023/nime2023_2.pdf - year: 2023 + month: June + pages: 621--624 + publisher: 'Goldsmiths, University of London' + title: 'The OWL programmable stage effects pedal: Revising the concept of the on-stage + computer for live music performance' + url: http://www.nime.org/proceedings/2014/nime2014_399.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_3 - abstract: "The Explorator genus is a set of hardware and firmware systems, artistic\ - \ motivations, and physical construction methods designed to support the creation\ - \ of transportable environmentally-responsive mechatronic sound objects for exhibition\ - \ outdoors. In order to enable the realization of installation scenarios with\ - \ varied cochlear needs, we developed a generalized hardware and firmware system\ - \ that can be reused between projects and which supports the development of purpose-built\ - \ feedback mechanisms.\nWe introduce five distinct hardware instances that serve\ - \ as test cases for the Explorator genus. The hardware instances are introduced\ - \ as Explorator “species”. Each species shares core hardware and firmware systems\ - \ but uses distinct soundscape augmentation feedback mechanisms to support unique\ - \ installation scenarios. Initial subjective and objective observations, findings,\ - \ and data are provided from fieldwork conducted in four American states. These\ - \ initial test installations highlight the Explorator genus as a modular, transportable,\ - \ environmentally reactive, environmentally protected, self-powered system for\ - \ creating novel mechatronic sound objects for outdoor sonic installation art." - address: 'Mexico City, Mexico' - articleno: 3 - author: Nathan D Villicana-Shaw and Dale Carnegie and Jim Murphy and Mo Zareei - bibtex: "@article{nime2023_3,\n abstract = {The Explorator genus is a set of hardware\ - \ and firmware systems, artistic motivations, and physical construction methods\ - \ designed to support the creation of transportable environmentally-responsive\ - \ mechatronic sound objects for exhibition outdoors. In order to enable the realization\ - \ of installation scenarios with varied cochlear needs, we developed a generalized\ - \ hardware and firmware system that can be reused between projects and which supports\ - \ the development of purpose-built feedback mechanisms.\nWe introduce five distinct\ - \ hardware instances that serve as test cases for the Explorator genus. The hardware\ - \ instances are introduced as Explorator “species”. Each species shares core hardware\ - \ and firmware systems but uses distinct soundscape augmentation feedback mechanisms\ - \ to support unique installation scenarios. Initial subjective and objective observations,\ - \ findings, and data are provided from fieldwork conducted in four American states.\ - \ These initial test installations highlight the Explorator genus as a modular,\ - \ transportable, environmentally reactive, environmentally protected, self-powered\ - \ system for creating novel mechatronic sound objects for outdoor sonic installation\ - \ art.},\n address = {Mexico City, Mexico},\n articleno = {3},\n author = {Nathan\ - \ D Villicana-Shaw and Dale Carnegie and Jim Murphy and Mo Zareei},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n\ - \ month = {May},\n numpages = {9},\n pages = {21--29},\n title = {Explorator Genus:\ - \ Designing Transportable Mechatronic Sound Objects for Outdoor Installation Art},\n\ - \ track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_3.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: gtorre2014 + abstract: 'In this paper we describe the application of a movement-based design + process for digital musical instruments which led to the development of a prototype + DMI named the Twister. The development is described in two parts. Firstly, we + consider the design of the interface or physical controller. Following this we + describe the development of a specific sonic character, mapping approach and performance. + In both these parts an explicit consideration of the type of movement we would + like the device to engender in performance drove the design choices. By considering + these two parts separately we draw attention to two different levels at which + movement might be considered in the design of DMIs; at a general level of ranges + of movement in the creation of the controller and a more specific, but still quite + open, level in the creation of the final instrument and a particular performance. + In light of the results of this process the limitations of existing representations + of movement within the DMI design discourse is discussed. Further, the utility + of a movement focused design approach is discussed.' + address: 'London, United Kingdom' + author: Nicholas Ward and Giuseppe Torre + bibtex: "@inproceedings{gtorre2014,\n abstract = {In this paper we describe the\ + \ application of a movement-based design process for digital musical instruments\ + \ which led to the development of a prototype DMI named the Twister. The development\ + \ is described in two parts. Firstly, we consider the design of the interface\ + \ or physical controller. Following this we describe the development of a specific\ + \ sonic character, mapping approach and performance. In both these parts an explicit\ + \ consideration of the type of movement we would like the device to engender in\ + \ performance drove the design choices. By considering these two parts separately\ + \ we draw attention to two different levels at which movement might be considered\ + \ in the design of DMIs; at a general level of ranges of movement in the creation\ + \ of the controller and a more specific, but still quite open, level in the creation\ + \ of the final instrument and a particular performance. In light of the results\ + \ of this process the limitations of existing representations of movement within\ + \ the DMI design discourse is discussed. Further, the utility of a movement focused\ + \ design approach is discussed.},\n address = {London, United Kingdom},\n author\ + \ = {Nicholas Ward and Giuseppe Torre},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178977},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {449--454},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Constraining Movement as a Basis for {DMI}\ + \ Design and Performance.},\n url = {http://www.nime.org/proceedings/2014/nime2014_404.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178977 issn: 2220-4806 - month: May - numpages: 9 - pages: 21--29 - title: 'Explorator Genus: Designing Transportable Mechatronic Sound Objects for - Outdoor Installation Art' - track: Papers - url: http://nime.org/proceedings/2023/nime2023_3.pdf - year: 2023 + month: June + pages: 449--454 + publisher: 'Goldsmiths, University of London' + title: Constraining Movement as a Basis for DMI Design and Performance. + url: http://www.nime.org/proceedings/2014/nime2014_404.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_4 - abstract: "LoopBoxes is an accessible digital musical instrument designed to create\ - \ an intuitive access to loop based music making for children with special educational\ - \ needs (SEN). This paper describes the evaluation of the instrument in the form\ - \ of a pilot study during a music festival in Berlin, Germany, as well as a case\ - \ study with children and music teachers in a SEN school setting. We created a\ - \ modular system composed of three modules that afford single user as well as\ - \ collaborative music making. The pilot study was evaluated using informal observation\ - \ and questionnaires (n = 39), and indicated that the instrument affords music\ - \ making for people with and without prior musical knowledge across all age groups\ - \ and fosters collaborative musical processes. The case study was based on observation\ - \ and a qualitative interview. It confirmed that the instrument meets the needs\ - \ of the school settings and indicated how future versions could expand access\ - \ to all students.\nespecially those experiencing complex disabilities. In addition,\ - \ out-of-the-box functionality seems to be crucial for the long-term implementation\ - \ of the instrument in a school setting." - address: 'Mexico City, Mexico' - articleno: 4 - author: Andreas Förster and Alarith Uhde and Mathias Komesker and Christina Komesker - and Irina Schmidt - bibtex: "@article{nime2023_4,\n abstract = {LoopBoxes is an accessible digital musical\ - \ instrument designed to create an intuitive access to loop based music making\ - \ for children with special educational needs (SEN). This paper describes the\ - \ evaluation of the instrument in the form of a pilot study during a music festival\ - \ in Berlin, Germany, as well as a case study with children and music teachers\ - \ in a SEN school setting. We created a modular system composed of three modules\ - \ that afford single user as well as collaborative music making. The pilot study\ - \ was evaluated using informal observation and questionnaires (n = 39), and indicated\ - \ that the instrument affords music making for people with and without prior musical\ - \ knowledge across all age groups and fosters collaborative musical processes.\ - \ The case study was based on observation and a qualitative interview. It confirmed\ - \ that the instrument meets the needs of the school settings and indicated how\ - \ future versions could expand access to all students.\nespecially those experiencing\ - \ complex disabilities. In addition, out-of-the-box functionality seems to be\ - \ crucial for the long-term implementation of the instrument in a school setting.},\n\ - \ address = {Mexico City, Mexico},\n articleno = {4},\n author = {Andreas Förster\ - \ and Alarith Uhde and Mathias Komesker and Christina Komesker and Irina Schmidt},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn\ - \ = {2220-4806},\n month = {May},\n numpages = {10},\n pages = {30--39},\n title\ - \ = {LoopBoxes - Evaluation of a Collaborative Accessible Digital Musical Instrument},\n\ - \ track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_4.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: mdavies2014 + abstract: 'In this paper we present Improvasher a real-time musical accompaniment + system which creates an automatic mashup to accompany live musical input. Improvasher + is built around two music processing modules, the first, a performance following + technique, makes beat-synchronous predictions of chroma features from a live musical + input. The second, a music mashup system, determines the compatibility between + beat-synchronous chromagrams from different pieces of music. Through the combination + of these two techniques, a real-time time predict mashup can be generated towards + a new form of automatic accompaniment for interactive musical performance.' + address: 'London, United Kingdom' + author: Matthew Davies and Adam Stark and Fabien Gouyon and Masataka Goto + bibtex: "@inproceedings{mdavies2014,\n abstract = {In this paper we present Improvasher\ + \ a real-time musical accompaniment system which creates an automatic mashup to\ + \ accompany live musical input. Improvasher is built around two music processing\ + \ modules, the first, a performance following technique, makes beat-synchronous\ + \ predictions of chroma features from a live musical input. The second, a music\ + \ mashup system, determines the compatibility between beat-synchronous chromagrams\ + \ from different pieces of music. Through the combination of these two techniques,\ + \ a real-time time predict mashup can be generated towards a new form of automatic\ + \ accompaniment for interactive musical performance.},\n address = {London, United\ + \ Kingdom},\n author = {Matthew Davies and Adam Stark and Fabien Gouyon and Masataka\ + \ Goto},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1178744},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {541--544},\n publisher = {Goldsmiths, University\ + \ of London},\n title = {Improvasher: A Real-Time Mashup System for Live Musical\ + \ Input},\n url = {http://www.nime.org/proceedings/2014/nime2014_405.pdf},\n year\ + \ = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178744 issn: 2220-4806 - month: May - numpages: 10 - pages: 30--39 - title: LoopBoxes - Evaluation of a Collaborative Accessible Digital Musical Instrument - track: Papers - url: http://nime.org/proceedings/2023/nime2023_4.pdf - year: 2023 + month: June + pages: 541--544 + publisher: 'Goldsmiths, University of London' + title: 'Improvasher: A Real-Time Mashup System for Live Musical Input' + url: http://www.nime.org/proceedings/2014/nime2014_405.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_5 - abstract: "Legatus is a three-legged audio and environmentally-reactive soundscape\ - \ augmentation artifact created for outdoor exhibitions in locations without access\ - \ to mains electricity. Legatus has an approximate ingress protection rating of\ - \ IP54, is self-powered, and is easy to transport weighing approximately a kilogram\ - \ while fitting within a 185 mm tall by 110 mm diameter cylinder. With LED-based\ - \ visual feedback and a cochlear loudspeaker-based vocalization system, Legatus\ - \ seeks to capture attention and redirect it to the in-situ sonic environment.\n\ - Informed by related historical and contemporary outdoor sonic installation artworks,\ - \ we conceptualized and tested four installation scenarios in 2021. Installations\ - \ were presented following a soundscape-specific pop-up exhibition strategy, where\ - \ the exhibition venue and artifact placement are determined by in-situ sonic\ - \ conditions. Legatus artifacts use high-level audio features and real-time environmental\ - \ conditions including ambient temperature, humidity, and brightness levels to\ - \ influence the timing and parameters of sample playback routines, audio synthesis,\ - \ and audio recording.\nHaving developed and tested for nine months, Legatus has\ - \ emerged as a portable, rugged, affordable, adaptable, lightweight, and simple\ - \ tool for augmenting natural sonic environments that can provide last-mile distributions\ - \ of sonic installation art experiences to places and communities where these\ - \ works are rarely exhibited." - address: 'Mexico City, Mexico' - articleno: 5 - author: Nathan D Villicana-Shaw and Dale Carnegie and Jim Murphy and Mo Zareei - bibtex: "@article{nime2023_5,\n abstract = {Legatus is a three-legged audio and\ - \ environmentally-reactive soundscape augmentation artifact created for outdoor\ - \ exhibitions in locations without access to mains electricity. Legatus has an\ - \ approximate ingress protection rating of IP54, is self-powered, and is easy\ - \ to transport weighing approximately a kilogram while fitting within a 185 mm\ - \ tall by 110 mm diameter cylinder. With LED-based visual feedback and a cochlear\ - \ loudspeaker-based vocalization system, Legatus seeks to capture attention and\ - \ redirect it to the in-situ sonic environment.\nInformed by related historical\ - \ and contemporary outdoor sonic installation artworks, we conceptualized and\ - \ tested four installation scenarios in 2021. Installations were presented following\ - \ a soundscape-specific pop-up exhibition strategy, where the exhibition venue\ - \ and artifact placement are determined by in-situ sonic conditions. Legatus artifacts\ - \ use high-level audio features and real-time environmental conditions including\ - \ ambient temperature, humidity, and brightness levels to influence the timing\ - \ and parameters of sample playback routines, audio synthesis, and audio recording.\n\ - Having developed and tested for nine months, Legatus has emerged as a portable,\ - \ rugged, affordable, adaptable, lightweight, and simple tool for augmenting natural\ - \ sonic environments that can provide last-mile distributions of sonic installation\ - \ art experiences to places and communities where these works are rarely exhibited.},\n\ - \ address = {Mexico City, Mexico},\n articleno = {5},\n author = {Nathan D Villicana-Shaw\ - \ and Dale Carnegie and Jim Murphy and Mo Zareei},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month\ - \ = {May},\n numpages = {8},\n pages = {40--47},\n title = {Legatus: Design and\ - \ Exhibition of Loudspeaker-Based, Environmentally-Reactive, Soundscape Augmentation\ - \ Artifacts in Outdoor Natural Environments},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_5.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: operrotin2014 + abstract: 'Conceiving digital musical instruments might be challenging in terms + of spectator accessibility. Depending on the interface and the complexity of the + software used as a transition between the controller and sound, a musician performance + can be totally opaque for the audience and loose its interest. This paper examines + the possibility of adding a visual feedback to help the public understanding, + and add expressivity to the performance. It explores the various mapping organizations + between controller and sound, giving different spaces of representation for the + visual feedback. It can be either an amplification of the controller parameters, + or a representation of the related musical parameters. Different examples of visualization + are presented and evaluated, taking the Cantor Digitalis as a support. It appears + the representation of musical parameters, little used compared to the representation + of controllers, received a good opinion from the audience, highlighting the musical + intention of the performers.' + address: 'London, United Kingdom' + author: Olivier Perrotin and Christophe d'Alessandro + bibtex: "@inproceedings{operrotin2014,\n abstract = {Conceiving digital musical\ + \ instruments might be challenging in terms of spectator accessibility. Depending\ + \ on the interface and the complexity of the software used as a transition between\ + \ the controller and sound, a musician performance can be totally opaque for the\ + \ audience and loose its interest. This paper examines the possibility of adding\ + \ a visual feedback to help the public understanding, and add expressivity to\ + \ the performance. It explores the various mapping organizations between controller\ + \ and sound, giving different spaces of representation for the visual feedback.\ + \ It can be either an amplification of the controller parameters, or a representation\ + \ of the related musical parameters. Different examples of visualization are presented\ + \ and evaluated, taking the Cantor Digitalis as a support. It appears the representation\ + \ of musical parameters, little used compared to the representation of controllers,\ + \ received a good opinion from the audience, highlighting the musical intention\ + \ of the performers.},\n address = {London, United Kingdom},\n author = {Olivier\ + \ Perrotin and Christophe d'Alessandro},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178901},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {605--608},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Visualizing Gestures in the Control of a Digital\ + \ Musical Instrument},\n url = {http://www.nime.org/proceedings/2014/nime2014_406.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178901 issn: 2220-4806 - month: May - numpages: 8 - pages: 40--47 - title: 'Legatus: Design and Exhibition of Loudspeaker-Based, Environmentally-Reactive, - Soundscape Augmentation Artifacts in Outdoor Natural Environments' - track: Papers - url: http://nime.org/proceedings/2023/nime2023_5.pdf - year: 2023 + month: June + pages: 605--608 + publisher: 'Goldsmiths, University of London' + title: Visualizing Gestures in the Control of a Digital Musical Instrument + url: http://www.nime.org/proceedings/2014/nime2014_406.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_6 - abstract: 'Since 1966 Star Trek has been exploring imaginary and futuristic worlds - in which humanity comes in contact with alien cultures. Music has always been - a method through which alien cultures are made relatable to humans, and musical - instruments become props through which we learn about an alien culture that is - totally different to that of humans. These musical instruments were not designed - with musical use in mind; rather they are designed as storytelling devices, and - never intended to work or make sound. After discovering one of these instruments - I realised that recreating it in the way it was imagined and making it functional - would require consideration of the instrument''s storytelling function above all - else, including the technology. In this paper I describe the process of re-creating - an instrument from Star Trek as a functional DMI, a process in which design decisions - were guided by what the storytelling intentions were for this imagined instrument, - and what I found out by having to make technical choices that supported them (not - the other way around). As well as reporting the design and implementation process - I summarise the important design lesson that came from having to emphasise the - intended mood and presence of an instrument, instead of the design being steered - by technical affordances.' - address: 'Mexico City, Mexico' - articleno: 6 - author: S. M. Astrid Bin - bibtex: "@article{nime2023_6,\n abstract = {Since 1966 Star Trek has been exploring\ - \ imaginary and futuristic worlds in which humanity comes in contact with alien\ - \ cultures. Music has always been a method through which alien cultures are made\ - \ relatable to humans, and musical instruments become props through which we learn\ - \ about an alien culture that is totally different to that of humans. These musical\ - \ instruments were not designed with musical use in mind; rather they are designed\ - \ as storytelling devices, and never intended to work or make sound. After discovering\ - \ one of these instruments I realised that recreating it in the way it was imagined\ - \ and making it functional would require consideration of the instrument's storytelling\ - \ function above all else, including the technology. In this paper I describe\ - \ the process of re-creating an instrument from Star Trek as a functional DMI,\ - \ a process in which design decisions were guided by what the storytelling intentions\ - \ were for this imagined instrument, and what I found out by having to make technical\ - \ choices that supported them (not the other way around). As well as reporting\ - \ the design and implementation process I summarise the important design lesson\ - \ that came from having to emphasise the intended mood and presence of an instrument,\ - \ instead of the design being steered by technical affordances.},\n address =\ - \ {Mexico City, Mexico},\n articleno = {6},\n author = {S. M. Astrid Bin},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n\ - \ month = {May},\n numpages = {6},\n pages = {48--53},\n title = {Where Few NIMEs\ - \ Have Gone Before: Lessons in instrument design from Star Trek},\n track = {Papers},\n\ - \ url = {http://nime.org/proceedings/2023/nime2023_6.pdf},\n year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: ldonovan2014 + abstract: 'This paper presents the Talking Guitar, an electric guitar augmented + with a system which tracks the position of the headstock in real time and uses + that data to control the parameters of a formant-filtering effect which impresses + upon the guitar sound a sense of speech. A user study is conducted with the device + to establish an indication of the practicality of using headstock tracking to + control effect parameters and to suggest natural and useful mapping strategies. + Individual movements and gestures are evaluated in order to guide further development + of the system.' + address: 'London, United Kingdom' + author: Liam Donovan and Andrew McPherson + bibtex: "@inproceedings{ldonovan2014,\n abstract = {This paper presents the Talking\ + \ Guitar, an electric guitar augmented with a system which tracks the position\ + \ of the headstock in real time and uses that data to control the parameters of\ + \ a formant-filtering effect which impresses upon the guitar sound a sense of\ + \ speech. A user study is conducted with the device to establish an indication\ + \ of the practicality of using headstock tracking to control effect parameters\ + \ and to suggest natural and useful mapping strategies. Individual movements and\ + \ gestures are evaluated in order to guide further development of the system.},\n\ + \ address = {London, United Kingdom},\n author = {Liam Donovan and Andrew McPherson},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178752},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {351--354},\n publisher = {Goldsmiths, University\ + \ of London},\n title = {The Talking Guitar: Headstock Tracking and Mapping Strategies},\n\ + \ url = {http://www.nime.org/proceedings/2014/nime2014_407.pdf},\n year = {2014}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178752 issn: 2220-4806 - month: May - numpages: 6 - pages: 48--53 - title: 'Where Few NIMEs Have Gone Before: Lessons in instrument design from Star - Trek' - track: Papers - url: http://nime.org/proceedings/2023/nime2023_6.pdf - year: 2023 + month: June + pages: 351--354 + publisher: 'Goldsmiths, University of London' + title: 'The Talking Guitar: Headstock Tracking and Mapping Strategies' + url: http://www.nime.org/proceedings/2014/nime2014_407.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_7 - abstract: 'Spatial audio systems are expensive, mostly because they usually imply - the use of a wide range of speakers and hence audio outputs. Some techniques such - as Wave Field Synthesis (WFS) are especially demanding in that regard making them - out of reach to many individuals or even institutions. In this paper, we propose - to leverage recent progress made using Field-Programmable Gate Arrays (FPGA) in - the context of real-time audio signal processing to implement frugal spatial audio - systems. We focus on the case of WFS and we demonstrate how to build a 32 speakers - system that can manage multiple sources in parallel for less than 800 USD (including - speakers). We believe that this approach contributes to making advanced spatial - audio techniques more accessible.' - address: 'Mexico City, Mexico' - articleno: 7 - author: Romain Michon and Joseph Bizien and Maxime Popoff and Tanguy Risset - bibtex: "@article{nime2023_7,\n abstract = {Spatial audio systems are expensive,\ - \ mostly because they usually imply the use of a wide range of speakers and hence\ - \ audio outputs. Some techniques such as Wave Field Synthesis (WFS) are especially\ - \ demanding in that regard making them out of reach to many individuals or even\ - \ institutions. In this paper, we propose to leverage recent progress made using\ - \ Field-Programmable Gate Arrays (FPGA) in the context of real-time audio signal\ - \ processing to implement frugal spatial audio systems. We focus on the case of\ - \ WFS and we demonstrate how to build a 32 speakers system that can manage multiple\ - \ sources in parallel for less than 800 USD (including speakers). We believe that\ - \ this approach contributes to making advanced spatial audio techniques more accessible.},\n\ - \ address = {Mexico City, Mexico},\n articleno = {7},\n author = {Romain Michon\ - \ and Joseph Bizien and Maxime Popoff and Tanguy Risset},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month\ - \ = {May},\n numpages = {6},\n pages = {54--59},\n title = {Making Frugal Spatial\ - \ Audio Systems Using Field-Programmable Gate Arrays},\n track = {Papers},\n url\ - \ = {http://nime.org/proceedings/2023/nime2023_7.pdf},\n year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: vzappi2014 + abstract: 'This paper investigates the process of appropriation in digital musical + instrument performance, examining the effect of instrument complexity on the emergence + of personal playing styles. Ten musicians of varying background were given a deliberately + constrained musical instrument, a wooden cube containing a touch/force sensor, + speaker and embedded computer. Each cube was identical in construction, but half + the instruments were configured for two degrees of freedom while the other half + allowed only a single degree. Each musician practiced at home and presented two + performances, in which their techniques and reactions were assessed through video, + sensor data logs, questionnaires and interviews. Results show that the addition + of a second degree of freedom had the counterintuitive effect of reducing the + exploration of the instrument''s affordances; this suggested the presence of a + dominant constraint in one of the two configurations which strongly differentiated + the process of appropriation across the two groups of participants.' + address: 'London, United Kingdom' + author: Victor Zappi and Andrew McPherson + bibtex: "@inproceedings{vzappi2014,\n abstract = {This paper investigates the process\ + \ of appropriation in digital musical instrument performance, examining the effect\ + \ of instrument complexity on the emergence of personal playing styles. Ten musicians\ + \ of varying background were given a deliberately constrained musical instrument,\ + \ a wooden cube containing a touch/force sensor, speaker and embedded computer.\ + \ Each cube was identical in construction, but half the instruments were configured\ + \ for two degrees of freedom while the other half allowed only a single degree.\ + \ Each musician practiced at home and presented two performances, in which their\ + \ techniques and reactions were assessed through video, sensor data logs, questionnaires\ + \ and interviews. Results show that the addition of a second degree of freedom\ + \ had the counterintuitive effect of reducing the exploration of the instrument's\ + \ affordances; this suggested the presence of a dominant constraint in one of\ + \ the two configurations which strongly differentiated the process of appropriation\ + \ across the two groups of participants.},\n address = {London, United Kingdom},\n\ + \ author = {Victor Zappi and Andrew McPherson},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1178993},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {455--460},\n publisher = {Goldsmiths, University of London},\n title = {Dimensionality\ + \ and Appropriation in Digital Musical Instrument Design},\n url = {http://www.nime.org/proceedings/2014/nime2014_409.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178993 issn: 2220-4806 - month: May - numpages: 6 - pages: 54--59 - title: Making Frugal Spatial Audio Systems Using Field-Programmable Gate Arrays - track: Papers - url: http://nime.org/proceedings/2023/nime2023_7.pdf - year: 2023 + month: June + pages: 455--460 + publisher: 'Goldsmiths, University of London' + title: Dimensionality and Appropriation in Digital Musical Instrument Design + url: http://www.nime.org/proceedings/2014/nime2014_409.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_8 - abstract: 'The history of the New Interfaces for Musical Expression (NIME) conference - starts with the first workshop on NIME during the ACM Conference on Human Factors - in Computing Systems in 2001. But research on musical interfaces has a rich ”prehistoric” - phase with a substantial amount of relevant research material published before - 2001. This paper highlights the variety and importance of musical interface-related - research between the mid-1970s and 2000 published in two major computer music - research venues: the International Computer Music Conference and the Computer - Music Journal. It discusses some early examples of research on musical interfaces - published in these venues, then reviews five other sources of related literature - that pre-date the original NIME CHI workshop. It then presents a series of implications - of this research and introduces a collaborative website that compiles many of - these references in one place. This work is meant as a step into a more inclusive - approach to interface design by facilitating the integration of as many relevant - references as possible into future NIME research.' - address: 'Mexico City, Mexico' - articleno: 8 - author: Marcelo Wanderley - bibtex: "@article{nime2023_8,\n abstract = {The history of the New Interfaces for\ - \ Musical Expression (NIME) conference starts with the first workshop on NIME\ - \ during the ACM Conference on Human Factors in Computing Systems in 2001. But\ - \ research on musical interfaces has a rich ”prehistoric” phase with a substantial\ - \ amount of relevant research material published before 2001. This paper highlights\ - \ the variety and importance of musical interface-related research between the\ - \ mid-1970s and 2000 published in two major computer music research venues: the\ - \ International Computer Music Conference and the Computer Music Journal. It discusses\ - \ some early examples of research on musical interfaces published in these venues,\ - \ then reviews five other sources of related literature that pre-date the original\ - \ NIME CHI workshop. It then presents a series of implications of this research\ - \ and introduces a collaborative website that compiles many of these references\ - \ in one place. This work is meant as a step into a more inclusive approach to\ - \ interface design by facilitating the integration of as many relevant references\ - \ as possible into future NIME research.},\n address = {Mexico City, Mexico},\n\ - \ articleno = {8},\n author = {Marcelo Wanderley},\n booktitle = {Proceedings\ +- ENTRYTYPE: inproceedings + ID: mrodrigues2014 + abstract: 'Digital Musical Instruments (DMIs) have difficulties establishing themselves + after their creation. A huge number of DMIs is presented every year and few of + them actually remain in use. Several causes could explain this reality, among + them the lack of a proper instrumental technique, inadequacy of the traditional + musical notation and the non-existence of a repertoire dedicated to the instrument. + In this paper we present Entoa, the first written music for Intonaspacio, a DMI + we designed in our research project. We propose some strategies for mapping data + from sensors to sound processing, in order to accomplish an expressive performance. + Entoa is divided in five different sections that corresponds to five movements. + For each, a different mapping is designed, introducing subtle alterations that + progressively explore the ensemble of features of the instrument. The performer + is then required to adapt his repertoire of gestures along the piece. Indications + are expressed through a gestural notation, where freedom is give to performer + to control certain parameters at specific moments in the music.' + address: 'London, United Kingdom' + author: Clayton Mamedes and Mailis Rodrigues and Marcelo M. Wanderley and Jônatas + Manzolli and Denise H. L. Garcia and Paulo Ferreira-Lopes + bibtex: "@inproceedings{mrodrigues2014,\n abstract = {Digital Musical Instruments\ + \ (DMIs) have difficulties establishing themselves after their creation. A huge\ + \ number of DMIs is presented every year and few of them actually remain in use.\ + \ Several causes could explain this reality, among them the lack of a proper instrumental\ + \ technique, inadequacy of the traditional musical notation and the non-existence\ + \ of a repertoire dedicated to the instrument. In this paper we present Entoa,\ + \ the first written music for Intonaspacio, a DMI we designed in our research\ + \ project. We propose some strategies for mapping data from sensors to sound processing,\ + \ in order to accomplish an expressive performance. Entoa is divided in five different\ + \ sections that corresponds to five movements. For each, a different mapping is\ + \ designed, introducing subtle alterations that progressively explore the ensemble\ + \ of features of the instrument. The performer is then required to adapt his repertoire\ + \ of gestures along the piece. Indications are expressed through a gestural notation,\ + \ where freedom is give to performer to control certain parameters at specific\ + \ moments in the music.},\n address = {London, United Kingdom},\n author = {Clayton\ + \ Mamedes and Mailis Rodrigues and Marcelo M. Wanderley and J{\\^o}natas Manzolli\ + \ and Denise H. L. Garcia and Paulo Ferreira-Lopes},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month\ - \ = {May},\n numpages = {10},\n pages = {60--69},\n title = {Prehistoric NIME:\ - \ Revisiting Research on New Musical Interfaces in the Computer Music Community\ - \ before NIME},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_8.pdf},\n\ - \ year = {2023}\n}\n" + \ doi = {10.5281/zenodo.1178861},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {509--512},\n publisher = {Goldsmiths, University of London},\n title = {Composing\ + \ for {DMI}s Entoa, a Dedicate Piece for Intonaspacio},\n url = {http://www.nime.org/proceedings/2014/nime2014_411.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178861 issn: 2220-4806 - month: May - numpages: 10 - pages: 60--69 - title: 'Prehistoric NIME: Revisiting Research on New Musical Interfaces in the Computer - Music Community before NIME' - track: Papers - url: http://nime.org/proceedings/2023/nime2023_8.pdf - year: 2023 + month: June + pages: 509--512 + publisher: 'Goldsmiths, University of London' + title: 'Composing for DMIs Entoa, a Dedicate Piece for Intonaspacio' + url: http://www.nime.org/proceedings/2014/nime2014_411.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_9 - abstract: "Within the music improvisation and jazz scenes, playing a wrong note\ - \ may be seen as a source of creativity and novelty, where an initially undesired\ - \ factor (the mistaken note) invites the musician to leverage their skills to\ - \ transform it into new musical material. How does this idea, however, translate\ - \ into more experimental scenes like NIME, where control and virtuosity are not\ - \ necessarily the performance's aim? \nMoreover, within NIME communities the addition\ - \ of randomness or constraints to musical instruments is often an intended aesthetic\ - \ decision rather than a source of mistakes. To explore this contrast, we invited\ - \ four NIME practitioners to participate in the Self-Sabotage Workshop, where\ - \ each practitioner had to build their own sabotaging elements for their musical\ - \ instruments and to give a short demonstration with them. We gathered participants'\ - \ impressions of self-sabotating in a focus group, inquiring about control and\ - \ musicality, and also the strategies they developed for coping with the self-sabotaged\ - \ instruments. We discuss the emergent ideas of planned and unplanned sabotaging,\ - \ and we propose a starting point towards the idea of self-sabotaging as a continuous\ - \ design and musical process where designers/musicians try to overcome barriers\ - \ that they impose upon themselves." - address: 'Mexico City, Mexico' - articleno: 9 - author: Teodoro Dannemann and Nick Bryan-Kinns and Andrew McPherson - bibtex: "@article{nime2023_9,\n abstract = {Within the music improvisation and jazz\ - \ scenes, playing a wrong note may be seen as a source of creativity and novelty,\ - \ where an initially undesired factor (the mistaken note) invites the musician\ - \ to leverage their skills to transform it into new musical material. How does\ - \ this idea, however, translate into more experimental scenes like NIME, where\ - \ control and virtuosity are not necessarily the performance's aim? \nMoreover,\ - \ within NIME communities the addition of randomness or constraints to musical\ - \ instruments is often an intended aesthetic decision rather than a source of\ - \ mistakes. To explore this contrast, we invited four NIME practitioners to participate\ - \ in the Self-Sabotage Workshop, where each practitioner had to build their own\ - \ sabotaging elements for their musical instruments and to give a short demonstration\ - \ with them. We gathered participants' impressions of self-sabotating in a focus\ - \ group, inquiring about control and musicality, and also the strategies they\ - \ developed for coping with the self-sabotaged instruments. We discuss the emergent\ - \ ideas of planned and unplanned sabotaging, and we propose a starting point towards\ - \ the idea of self-sabotaging as a continuous design and musical process where\ - \ designers/musicians try to overcome barriers that they impose upon themselves.},\n\ - \ address = {Mexico City, Mexico},\n articleno = {9},\n author = {Teodoro Dannemann\ - \ and Nick Bryan-Kinns and Andrew McPherson},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n editor\ - \ = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n\ - \ numpages = {9},\n pages = {70--78},\n title = {Self-Sabotage Workshop: a starting\ - \ point to unravel sabotaging of instruments as a design practice},\n track =\ - \ {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_9.pdf},\n year\ - \ = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: dmazzanti2014 + abstract: 'Designing a collaborative performance requires the use of paradigms and + technologies which can deeply influence the whole piece experience. In this paper + we define a set of six variables, and use them to describe and evaluate a number + of platforms for participatory performances. Based on this evaluation, the Augmented + Stage is introduced. Such concept describes how Augmented Reality techniques can + be used to superimpose a performance stage with a virtual environment, populated + with interactive elements. The manipulation of these objects allows spectators + to contribute to the visual and sonic outcome of the performance through their + mobile devices, while keeping their freedom to focus on the stage. An interactive + acoustic rock performance based on this concept was staged. Questionnaires distributed + to the audience and performers'' comments have been analyzed, contributing to + an evaluation of the presented concept and platform done through the defined variables.' + address: 'London, United Kingdom' + author: Dario Mazzanti and Victor Zappi and Darwin Caldwell and Andrea Brogni + bibtex: "@inproceedings{dmazzanti2014,\n abstract = {Designing a collaborative performance\ + \ requires the use of paradigms and technologies which can deeply influence the\ + \ whole piece experience. In this paper we define a set of six variables, and\ + \ use them to describe and evaluate a number of platforms for participatory performances.\ + \ Based on this evaluation, the Augmented Stage is introduced. Such concept describes\ + \ how Augmented Reality techniques can be used to superimpose a performance stage\ + \ with a virtual environment, populated with interactive elements. The manipulation\ + \ of these objects allows spectators to contribute to the visual and sonic outcome\ + \ of the performance through their mobile devices, while keeping their freedom\ + \ to focus on the stage. An interactive acoustic rock performance based on this\ + \ concept was staged. Questionnaires distributed to the audience and performers'\ + \ comments have been analyzed, contributing to an evaluation of the presented\ + \ concept and platform done through the defined variables.},\n address = {London,\ + \ United Kingdom},\n author = {Dario Mazzanti and Victor Zappi and Darwin Caldwell\ + \ and Andrea Brogni},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178871},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {29--34},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Augmented Stage for Participatory Performances},\n\ + \ url = {http://www.nime.org/proceedings/2014/nime2014_413.pdf},\n year = {2014}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178871 issn: 2220-4806 - month: May - numpages: 9 - pages: 70--78 - title: 'Self-Sabotage Workshop: a starting point to unravel sabotaging of instruments - as a design practice' - track: Papers - url: http://nime.org/proceedings/2023/nime2023_9.pdf - year: 2023 + month: June + pages: 29--34 + publisher: 'Goldsmiths, University of London' + title: Augmented Stage for Participatory Performances + url: http://www.nime.org/proceedings/2014/nime2014_413.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_10 - abstract: 'We propose a method that uses music jamming as a tool for the design - of musical instruments. Both designers and musicians collaborate in the music - making process for the subsequent development of individual “music performer’s - profiles” which account for four dimensions: (i) movements and embodiment, (ii) - musical preferences, (iii) difficulties, and (iv) capabilities. These profiles - converge into proposed prototypes that transform into final designs after experts - and performers'' examination and feedback. We ground this method in the context - of physically disabled musicians, and we show that the method provides a decolonial - view to disability, as its purpose moves from the classical view of technology - as an aid for allowing disabled communities to access well-established instruments, - towards a new paradigm where technologies are used for the augmentation of expressive - capabilities, the strengthening of social engagement, and the empowerment of music - makers.' - address: 'Mexico City, Mexico' - articleno: 10 - author: Teodoro Dannemann - bibtex: "@article{nime2023_10,\n abstract = {We propose a method that uses music\ - \ jamming as a tool for the design of musical instruments. Both designers and\ - \ musicians collaborate in the music making process for the subsequent development\ - \ of individual “music performer’s profiles” which account for four dimensions:\ - \ (i) movements and embodiment, (ii) musical preferences, (iii) difficulties,\ - \ and (iv) capabilities. These profiles converge into proposed prototypes that\ - \ transform into final designs after experts and performers' examination and feedback.\ - \ We ground this method in the context of physically disabled musicians, and we\ - \ show that the method provides a decolonial view to disability, as its purpose\ - \ moves from the classical view of technology as an aid for allowing disabled\ - \ communities to access well-established instruments, towards a new paradigm where\ - \ technologies are used for the augmentation of expressive capabilities, the strengthening\ - \ of social engagement, and the empowerment of music makers.},\n address = {Mexico\ - \ City, Mexico},\n articleno = {10},\n author = {Teodoro Dannemann},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n\ - \ month = {May},\n numpages = {7},\n pages = {79--85},\n title = {Music jamming\ - \ as a participatory design method. A case study with disabled musicians},\n track\ - \ = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_10.pdf},\n year\ - \ = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: rtubb2014 + abstract: 'This paper outlines a theoretical framework for creative technology based + on two contrasting processes: divergent exploration and convergent optimisation. + We claim that these two cases require different gesture-to-parameter mapping properties. + Results are presented from a user experiment that motivates this theory. The experiment + was conducted using a publicly available iPad app: ``Sonic Zoom''''. Participants + were encouraged to conduct an open ended exploration of synthesis timbre using + a combination of two different interfaces. The first was a standard interface + with ten sliders, hypothesised to be suited to the ``convergent'''' stage of creation. + The second was a mapping of the entire 10-D combinatorial space to a 2-D surface + using a space filling curve. This novel interface was intended to support the + ``divergent'''' aspect of creativity. The paths of around 250 users through both + 2-D and 10-D space were logged and analysed. Both the interaction data and questionnaire + results show that the different interfaces tended to be used for different aspects + of sound creation, and a combination of these two navigation styles was deemed + to be more useful than either individually. The study indicates that the predictable, + separate parameters found in most music technology are more appropriate for convergent + tasks.' + address: 'London, United Kingdom' + author: Robert Tubb and Simon Dixon + bibtex: "@inproceedings{rtubb2014,\n abstract = {This paper outlines a theoretical\ + \ framework for creative technology based on two contrasting processes: divergent\ + \ exploration and convergent optimisation. We claim that these two cases require\ + \ different gesture-to-parameter mapping properties. Results are presented from\ + \ a user experiment that motivates this theory. The experiment was conducted using\ + \ a publicly available iPad app: ``Sonic Zoom''. Participants were encouraged\ + \ to conduct an open ended exploration of synthesis timbre using a combination\ + \ of two different interfaces. The first was a standard interface with ten sliders,\ + \ hypothesised to be suited to the ``convergent'' stage of creation. The second\ + \ was a mapping of the entire 10-D combinatorial space to a 2-D surface using\ + \ a space filling curve. This novel interface was intended to support the ``divergent''\ + \ aspect of creativity. The paths of around 250 users through both 2-D and 10-D\ + \ space were logged and analysed. Both the interaction data and questionnaire\ + \ results show that the different interfaces tended to be used for different aspects\ + \ of sound creation, and a combination of these two navigation styles was deemed\ + \ to be more useful than either individually. The study indicates that the predictable,\ + \ separate parameters found in most music technology are more appropriate for\ + \ convergent tasks.},\n address = {London, United Kingdom},\n author = {Robert\ + \ Tubb and Simon Dixon},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178967},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {227--232},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {The Divergent Interface: Supporting Creative\ + \ Exploration of Parameter Spaces},\n url = {http://www.nime.org/proceedings/2014/nime2014_415.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178967 issn: 2220-4806 - month: May - numpages: 7 - pages: 79--85 - title: Music jamming as a participatory design method. A case study with disabled - musicians - track: Papers - url: http://nime.org/proceedings/2023/nime2023_10.pdf - year: 2023 + month: June + pages: 227--232 + publisher: 'Goldsmiths, University of London' + title: 'The Divergent Interface: Supporting Creative Exploration of Parameter Spaces' + url: http://www.nime.org/proceedings/2014/nime2014_415.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_11 - abstract: 'This paper presents Puara, a framework created to tackle problems commonly - associated with instrument design, immersive environments, and prototyping. We - discuss how exploring Digital Musical Instruments (DMIs) in a collaborative environment - led to generalizing procedures that constitute a starting point to solve technical - challenges when building, maintaining, and performing with instruments. These - challenges guided the framework organization and focus on maintainability, integrability, - and modularity. Puara was employed in self-contained systems using 3 % hard-to-implement DMI - building blocks (network manager, gestural descriptors, Media Processing Unit) - and supporting 3 established DMIs (GuitarAMI, T-Stick, Probatio) and one new instrument - (AMIWrist). We validated Puara with two use cases where parts of the framework - were used. Finally, we accessed the influence of frameworks when exploring predefined - NIMEs without concern about the inner workings, or shifting composition paradigms - between event-based and gesture-based approaches.' - address: 'Mexico City, Mexico' - articleno: 11 - author: Eduardo A. L. Meneses and Thomas Piquet and Jason Noble and Marcelo Wanderley - bibtex: "@article{nime2023_11,\n abstract = {This paper presents Puara, a framework\ - \ created to tackle problems commonly associated with instrument design, immersive\ - \ environments, and prototyping. We discuss how exploring Digital Musical Instruments\ - \ (DMIs) in a collaborative environment led to generalizing procedures that constitute\ - \ a starting point to solve technical challenges when building, maintaining, and\ - \ performing with instruments. These challenges guided the framework organization\ - \ and focus on maintainability, integrability, and modularity. Puara was employed\ - \ in self-contained systems using 3 % hard-to-implement DMI building blocks (network\ - \ manager, gestural descriptors, Media Processing Unit) and supporting 3 established\ - \ DMIs (GuitarAMI, T-Stick, Probatio) and one new instrument (AMIWrist). We validated\ - \ Puara with two use cases where parts of the framework were used. Finally, we\ - \ accessed the influence of frameworks when exploring predefined NIMEs without\ - \ concern about the inner workings, or shifting composition paradigms between\ - \ event-based and gesture-based approaches.},\n address = {Mexico City, Mexico},\n\ - \ articleno = {11},\n author = {Eduardo A. L. Meneses and Thomas Piquet and Jason\ - \ Noble and Marcelo Wanderley},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz\ - \ and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages\ - \ = {8},\n pages = {86--93},\n title = {The Puara Framework: Hiding complexity\ - \ and modularity for reproducibility and usability in NIMEs},\n track = {Papers},\n\ - \ url = {http://nime.org/proceedings/2023/nime2023_11.pdf},\n year = {2023}\n\ - }\n" +- ENTRYTYPE: inproceedings + ID: sfavilla2014 + abstract: 'This paper presents new touch-screen collaborative music interaction + for people with dementia. The authors argue that dementia technology has yet to + focus on collaborative multi-user group musical interactions. The project aims + to contribute to dementia care while addressing a significant gap in current literature. + Two trials explore contrasting musical scenarios: the performance of abstract + electronic music and the distributed performance of J.S. Bach''s Goldberg Variations. + Findings presented in this paper; demonstrate that people with dementia can successfully + perform and engage in collaborative music performance activities with little or + no scaffolded instruction. Further findings suggest that people with dementia + can develop and retain musical performance skill over time. This paper proposes + a number of guidelines and design solutions.' + address: 'London, United Kingdom' + author: Stu Favilla and Sonja Pedell + bibtex: "@inproceedings{sfavilla2014,\n abstract = {This paper presents new touch-screen\ + \ collaborative music interaction for people with dementia. The authors argue\ + \ that dementia technology has yet to focus on collaborative multi-user group\ + \ musical interactions. The project aims to contribute to dementia care while\ + \ addressing a significant gap in current literature. Two trials explore contrasting\ + \ musical scenarios: the performance of abstract electronic music and the distributed\ + \ performance of J.S. Bach's Goldberg Variations. Findings presented in this paper;\ + \ demonstrate that people with dementia can successfully perform and engage in\ + \ collaborative music performance activities with little or no scaffolded instruction.\ + \ Further findings suggest that people with dementia can develop and retain musical\ + \ performance skill over time. This paper proposes a number of guidelines and\ + \ design solutions.},\n address = {London, United Kingdom},\n author = {Stu Favilla\ + \ and Sonja Pedell},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178760},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {35--39},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Touch Screen Collaborative Music: Designing\ + \ NIME for Older People with Dementia},\n url = {http://www.nime.org/proceedings/2014/nime2014_417.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178760 issn: 2220-4806 - month: May - numpages: 8 - pages: 86--93 - title: 'The Puara Framework: Hiding complexity and modularity for reproducibility - and usability in NIMEs' - track: Papers - url: http://nime.org/proceedings/2023/nime2023_11.pdf - year: 2023 + month: June + pages: 35--39 + publisher: 'Goldsmiths, University of London' + title: 'Touch Screen Collaborative Music: Designing NIME for Older People with Dementia' + url: http://www.nime.org/proceedings/2014/nime2014_417.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_12 - abstract: 'Joakinator is a wearable interactive interface that allows users to activate - different media materials, such as sound, music, and video, through body gestures. - The device, designed in the context of music and performing arts, integrates surface - electromyogram, force sensors, and machine learning algorithms with tailored-made - software for sonifying muscle-tone and force. This allows the body to reflect - expressively the content of the media and the architecture of the device. Recently, - we have started to investigate the potential of Joakinator to alter body perception - in the context of Joakinator, a European Research Council Project focused on the - transformations of body perception through the use of interactive sound/haptics - technology. At NIME-2023, we will showcase Joakinator and invite visitors to experience - the device firsthand. Visitors will have the opportunity to try on the device, - observe others using it, and reflect on its capabilities to transform body movement - and perception through the sonification of muscle-tone and force. Overall, Joakinator - is a technology that pushes the boundaries of body-computer interaction and opens - new possibilities for human-computer interaction and expression.' - address: 'Mexico City, Mexico' - articleno: 12 - author: Joaquín R. Díaz Durán and Laia Turmo Vidal and Ana Tajadura-Jiménez - bibtex: "@article{nime2023_12,\n abstract = {Joakinator is a wearable interactive\ - \ interface that allows users to activate different media materials, such as sound,\ - \ music, and video, through body gestures. The device, designed in the context\ - \ of music and performing arts, integrates surface electromyogram, force sensors,\ - \ and machine learning algorithms with tailored-made software for sonifying muscle-tone\ - \ and force. This allows the body to reflect expressively the content of the media\ - \ and the architecture of the device. Recently, we have started to investigate\ - \ the potential of Joakinator to alter body perception in the context of Joakinator,\ - \ a European Research Council Project focused on the transformations of body perception\ - \ through the use of interactive sound/haptics technology. At NIME-2023, we will\ - \ showcase Joakinator and invite visitors to experience the device firsthand.\ - \ Visitors will have the opportunity to try on the device, observe others using\ - \ it, and reflect on its capabilities to transform body movement and perception\ - \ through the sonification of muscle-tone and force. Overall, Joakinator is a\ - \ technology that pushes the boundaries of body-computer interaction and opens\ - \ new possibilities for human-computer interaction and expression.},\n address\ - \ = {Mexico City, Mexico},\n articleno = {12},\n author = {Joaquín R. Díaz Durán\ - \ and Laia Turmo Vidal and Ana Tajadura-Jiménez},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n editor\ - \ = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n\ - \ numpages = {4},\n pages = {94--97},\n title = {Joakinator: An Interface for\ - \ Transforming Body Movement and Perception through Machine Learning and Sonification\ - \ of Muscle-Tone and Force.},\n track = {Demos},\n url = {http://nime.org/proceedings/2023/nime2023_12.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: jeaton2014 + abstract: 'The Space Between Us is a live performance piece for vocals, piano and + live electronics using a Brain-Computer Music Interface system for emotional control + of the score. The system not only aims to reflect emotional states but to direct + and induce emotional states through the real-time generation of the score, highlighting + the potential of direct neural-emotional manipulation in live performance. The + EEG of the vocalist and one audience member is measured throughout the performance + and the system generates a real-time score based on mapping the emotional features + within the EEG. We measure the two emotional descriptors, valence and arousal, + within EEG and map the two-dimensional correlate of averaged windows to musical + phrases. These pre-composed phrases contain associated emotional content based + on the KTH Performance Rules System (Director Musices). The piece is in three + movements, the first two are led by the emotions of each subject respectively, + whilst the third movement interpolates the combined response of the performer + and audience member. The system not only aims to reflect the individuals'' emotional + states but also attempts to induce a shared emotional experience by drawing the + two responses together. This work highlights the potential available in affecting + neural-emotional manipulation within live performance and demonstrates a new approach + to real-time, affectively-driven composition.' + address: 'London, United Kingdom' + author: Joel Eaton and Weiwei Jin and Eduardo Miranda + bibtex: "@inproceedings{jeaton2014,\n abstract = {The Space Between Us is a live\ + \ performance piece for vocals, piano and live electronics using a Brain-Computer\ + \ Music Interface system for emotional control of the score. The system not only\ + \ aims to reflect emotional states but to direct and induce emotional states through\ + \ the real-time generation of the score, highlighting the potential of direct\ + \ neural-emotional manipulation in live performance. The EEG of the vocalist and\ + \ one audience member is measured throughout the performance and the system generates\ + \ a real-time score based on mapping the emotional features within the EEG. We\ + \ measure the two emotional descriptors, valence and arousal, within EEG and map\ + \ the two-dimensional correlate of averaged windows to musical phrases. These\ + \ pre-composed phrases contain associated emotional content based on the KTH Performance\ + \ Rules System (Director Musices). The piece is in three movements, the first\ + \ two are led by the emotions of each subject respectively, whilst the third movement\ + \ interpolates the combined response of the performer and audience member. The\ + \ system not only aims to reflect the individuals' emotional states but also attempts\ + \ to induce a shared emotional experience by drawing the two responses together.\ + \ This work highlights the potential available in affecting neural-emotional manipulation\ + \ within live performance and demonstrates a new approach to real-time, affectively-driven\ + \ composition.},\n address = {London, United Kingdom},\n author = {Joel Eaton\ + \ and Weiwei Jin and Eduardo Miranda},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178756},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {593--596},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {The Space Between Us. A Live Performance with\ + \ Musical Score Generated via Emotional Levels Measured in {EEG} of One Performer\ + \ and an Audience Member},\n url = {http://www.nime.org/proceedings/2014/nime2014_418.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178756 issn: 2220-4806 - month: May - numpages: 4 - pages: 94--97 - title: 'Joakinator: An Interface for Transforming Body Movement and Perception through - Machine Learning and Sonification of Muscle-Tone and Force.' - track: Demos - url: http://nime.org/proceedings/2023/nime2023_12.pdf - year: 2023 + month: June + pages: 593--596 + publisher: 'Goldsmiths, University of London' + title: The Space Between Us. A Live Performance with Musical Score Generated via + Emotional Levels Measured in EEG of One Performer and an Audience Member + url: http://www.nime.org/proceedings/2014/nime2014_418.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_13 - abstract: 'The novelty and usefulness of the distributed signal mapping framework - libmapper has been demonstrated in many projects and publications, yet its technical - entry and operation requirements are often too high to be feasible as a mapping - option for less-technical users. This paper focuses on completing key development - tasks to overcome these barriers including improvements to software distribution - and mapping session management. The impact of these changes was evaluated by asking - several artists to design an interactive audiovisual installation using libmapper. - Observations and feedback from the artists throughout their projects let us assess - the impact of the developments on the usability of the framework, suggesting key - development principles for related tools created in research contexts.' - address: 'Mexico City, Mexico' - articleno: 13 - author: Brady Boettcher and Eduardo A. L. Meneses and Christian Frisson and Marcelo - Wanderley and Joseph Malloch - bibtex: "@article{nime2023_13,\n abstract = {The novelty and usefulness of the distributed\ - \ signal mapping framework libmapper has been demonstrated in many projects and\ - \ publications, yet its technical entry and operation requirements are often too\ - \ high to be feasible as a mapping option for less-technical users. This paper\ - \ focuses on completing key development tasks to overcome these barriers including\ - \ improvements to software distribution and mapping session management. The impact\ - \ of these changes was evaluated by asking several artists to design an interactive\ - \ audiovisual installation using libmapper. Observations and feedback from the\ - \ artists throughout their projects let us assess the impact of the developments\ - \ on the usability of the framework, suggesting key development principles for\ - \ related tools created in research contexts.},\n address = {Mexico City, Mexico},\n\ - \ articleno = {13},\n author = {Brady Boettcher and Eduardo A. L. Meneses and\ - \ Christian Frisson and Marcelo Wanderley and Joseph Malloch},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month\ - \ = {May},\n numpages = {8},\n pages = {98--105},\n title = {Addressing Barriers\ - \ for Entry and Operation of a Distributed Signal Mapping Framework},\n track\ - \ = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_13.pdf},\n year\ - \ = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: jmathew2014 + abstract: 'Recent technological improvements in audio reproduction systems increased + the possibilities to spatialize sources in a listening environment. The spatialization + of reproduced audio is however highly dependent on the recording technique, the + rendering method, and the loudspeaker configuration. While object-based audio + production has proven to reduce the dependency on loudspeaker configurations, + authoring tools are still considered to be difficult to interact with in current + production environments. In this paper, we investigate the issues of spatialization + techniques for object-based audio production and introduce the Spatial Audio Design + Spaces (SpADS) framework, that provides insights into the spatial manipulation + of object-based audio. Based on interviews with professional sound engineers, + this morphological analysis clarifies the relationships between recording and + rendering techniques that define audio-objects for 3D speaker configurations, + allowing the analysis and the design of advanced object-based controllers as well.' + address: 'London, United Kingdom' + author: Justin Mathew and Stéphane Huot and Alan Blum + bibtex: "@inproceedings{jmathew2014,\n abstract = {Recent technological improvements\ + \ in audio reproduction systems increased the possibilities to spatialize sources\ + \ in a listening environment. The spatialization of reproduced audio is however\ + \ highly dependent on the recording technique, the rendering method, and the loudspeaker\ + \ configuration. While object-based audio production has proven to reduce the\ + \ dependency on loudspeaker configurations, authoring tools are still considered\ + \ to be difficult to interact with in current production environments. In this\ + \ paper, we investigate the issues of spatialization techniques for object-based\ + \ audio production and introduce the Spatial Audio Design Spaces (SpADS) framework,\ + \ that provides insights into the spatial manipulation of object-based audio.\ + \ Based on interviews with professional sound engineers, this morphological analysis\ + \ clarifies the relationships between recording and rendering techniques that\ + \ define audio-objects for 3D speaker configurations, allowing the analysis and\ + \ the design of advanced object-based controllers as well.},\n address = {London,\ + \ United Kingdom},\n author = {Justin Mathew and St{\\'e}phane Huot and Alan Blum},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178865},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {415--420},\n publisher = {Goldsmiths, University\ + \ of London},\n title = {A Morphological Analysis of Audio-Objects and their Control\ + \ Methods for {3D} Audio},\n url = {http://www.nime.org/proceedings/2014/nime2014_420.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178865 issn: 2220-4806 - month: May - numpages: 8 - pages: 98--105 - title: Addressing Barriers for Entry and Operation of a Distributed Signal Mapping - Framework - track: Papers - url: http://nime.org/proceedings/2023/nime2023_13.pdf - year: 2023 + month: June + pages: 415--420 + publisher: 'Goldsmiths, University of London' + title: A Morphological Analysis of Audio-Objects and their Control Methods for 3D + Audio + url: http://www.nime.org/proceedings/2014/nime2014_420.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_14 - abstract: 'In this paper, we reflect on the focus of “newness” in NIME research - and practice and argue that there is a missing O (for “Old”) in framing our academic - discourse. A systematic review of the last year’s conference proceedings reveals - that most papers do, indeed, present new instruments, interfaces, or pieces of - technology. Comparably few papers focus on the prolongation of existing NIMEs. - Our meta-analysis identifies four main categories from these papers: (1) reuse, - (2) update, (3) complement, and (4) long-term engagement. We discuss how focusing - more on these four types of NIME development and engagement can be seen as an - approach to increase sustainability.' - address: 'Mexico City, Mexico' - articleno: 14 - author: Raul Masu and Fabio Morreale and Alexander Refsum Jensenius - bibtex: "@article{nime2023_14,\n abstract = {In this paper, we reflect on the focus\ - \ of “newness” in NIME research and practice and argue that there is a missing\ - \ O (for “Old”) in framing our academic discourse. A systematic review of the\ - \ last year’s conference proceedings reveals that most papers do, indeed, present\ - \ new instruments, interfaces, or pieces of technology. Comparably few papers\ - \ focus on the prolongation of existing NIMEs. Our meta-analysis identifies four\ - \ main categories from these papers: (1) reuse, (2) update, (3) complement, and\ - \ (4) long-term engagement. We discuss how focusing more on these four types of\ - \ NIME development and engagement can be seen as an approach to increase sustainability.},\n\ - \ address = {Mexico City, Mexico},\n articleno = {14},\n author = {Raul Masu and\ - \ Fabio Morreale and Alexander Refsum Jensenius},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n editor\ - \ = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n\ - \ numpages = {10},\n pages = {106--115},\n title = {The O in NIME: Reflecting\ - \ on the Importance of Reusing and Repurposing Old Musical Instruments},\n track\ - \ = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_14.pdf},\n year\ - \ = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: rcanning2014 + abstract: 'This paper describes the Parallaxis Score System, part of the authors + ongoing research into to the development of technological tools that foster creative + interactions between improvising musicians and predefined instructional texts. + The Parallaxis platform places these texts within a networked, interactive environment + with a generalised set of controls in order to explore and devise ontologies of + network performance. As an interactive tool involved in music production the score + system itself undergoes a functional transformation and becomes a distributed + meta-instrument in its own right, independent from, yet intrinsically connected + to those instruments held by the performers.' + address: 'London, United Kingdom' + author: Rob Canning + bibtex: "@inproceedings{rcanning2014,\n abstract = {This paper describes the Parallaxis\ + \ Score System, part of the authors ongoing research into to the development of\ + \ technological tools that foster creative interactions between improvising musicians\ + \ and predefined instructional texts. The Parallaxis platform places these texts\ + \ within a networked, interactive environment with a generalised set of controls\ + \ in order to explore and devise ontologies of network performance. As an interactive\ + \ tool involved in music production the score system itself undergoes a functional\ + \ transformation and becomes a distributed meta-instrument in its own right, independent\ + \ from, yet intrinsically connected to those instruments held by the performers.},\n\ + \ address = {London, United Kingdom},\n author = {Rob Canning},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178728},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {144--146},\n publisher = {Goldsmiths, University of London},\n title = {Interactive\ + \ Parallax Scrolling Score Interface for Composed Networked Improvisation},\n\ + \ url = {http://www.nime.org/proceedings/2014/nime2014_421.pdf},\n year = {2014}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178728 issn: 2220-4806 - month: May - numpages: 10 - pages: 106--115 - title: 'The O in NIME: Reflecting on the Importance of Reusing and Repurposing Old - Musical Instruments' - track: Papers - url: http://nime.org/proceedings/2023/nime2023_14.pdf - year: 2023 + month: June + pages: 144--146 + publisher: 'Goldsmiths, University of London' + title: Interactive Parallax Scrolling Score Interface for Composed Networked Improvisation + url: http://www.nime.org/proceedings/2014/nime2014_421.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_15 - abstract: 'This paper introduces Stringesthesia, an interactive and improvised performance - paradigm. Stringesthesia was designed to explore the connection between performer - and audience by using real-time neuroimaging technology that gave the audience - direct access to the performer''s internal mental state and determined the extent - of how the audience could participate with the performer throughout the performance. - Functional near-infrared spectroscopy (fNIRS) technology was used to assess metabolic - activity in a network of brain areas collectively associated with a metric we - call “trust”. The real-time measurement of the performer’s level of trust was - visualized behind the performer and used to dynamically restrict or promote audience - participation: e.g., as the performer’s trust in the audience grew, more participatory - stations for playing drums and selecting the performer’s chords were activated. - Throughout the paper we discuss prior work that heavily influenced our design, - conceptual and methodological issues with using fNIRS technology, and our system - architecture. We then describe an employment of this paradigm with a solo guitar - player.' - address: 'Mexico City, Mexico' - articleno: 15 - author: Torin Hopkins and Emily Doherty and Netta Ofer and Suibi Che-Chuan Weng - and Peter Gyory and Chad Tobin and Leanne Hirshfield and Ellen Yi-Luen Do - bibtex: "@article{nime2023_15,\n abstract = {This paper introduces Stringesthesia,\ - \ an interactive and improvised performance paradigm. Stringesthesia was designed\ - \ to explore the connection between performer and audience by using real-time\ - \ neuroimaging technology that gave the audience direct access to the performer's\ - \ internal mental state and determined the extent of how the audience could participate\ - \ with the performer throughout the performance. Functional near-infrared spectroscopy\ - \ (fNIRS) technology was used to assess metabolic activity in a network of brain\ - \ areas collectively associated with a metric we call “trust”. The real-time measurement\ - \ of the performer’s level of trust was visualized behind the performer and used\ - \ to dynamically restrict or promote audience participation: e.g., as the performer’s\ - \ trust in the audience grew, more participatory stations for playing drums and\ - \ selecting the performer’s chords were activated. Throughout the paper we discuss\ - \ prior work that heavily influenced our design, conceptual and methodological\ - \ issues with using fNIRS technology, and our system architecture. We then describe\ - \ an employment of this paradigm with a solo guitar player.},\n address = {Mexico\ - \ City, Mexico},\n articleno = {15},\n author = {Torin Hopkins and Emily Doherty\ - \ and Netta Ofer and Suibi Che-Chuan Weng and Peter Gyory and Chad Tobin and Leanne\ - \ Hirshfield and Ellen Yi-Luen Do},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz\ - \ and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages\ - \ = {7},\n pages = {116--122},\n title = {Stringesthesia: Dynamically Shifting\ - \ Musical Agency Between Audience and Performer Based on Trust in an Interactive\ - \ and Improvised Performance},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_15.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: arenaud2014 + abstract: 'This paper provides an overview of a proposed demonstration of 3DinMotion, + a system using real time motion capture of one or several subjects, which can + be used in interactive audiovisual pieces and network performances. The skeleton + of a subject is analyzed in real time and displayed as an abstract avatar as well + as sonified based on mappings and rules to make the interplay experience lively + and rewarding. A series of musical pieces have been composed for the interface + following cueing strategies. In addition a second display, ``the prompter'''' + guides the users through the piece. 3DinMotion has been developed from scratch + and natively, leading to a system with a very low latency, making it suitable + for real time music interactions. In addition, 3DinMotion is fully compatible + with the OpenSoundControl (OSC) protocol, allowing expansion to commonly used + musical and sound design applications.' + address: 'London, United Kingdom' + author: Alain Renaud and Caecilia Charbonnier and Sylvain Chagué + bibtex: "@inproceedings{arenaud2014,\n abstract = {This paper provides an overview\ + \ of a proposed demonstration of 3DinMotion, a system using real time motion capture\ + \ of one or several subjects, which can be used in interactive audiovisual pieces\ + \ and network performances. The skeleton of a subject is analyzed in real time\ + \ and displayed as an abstract avatar as well as sonified based on mappings and\ + \ rules to make the interplay experience lively and rewarding. A series of musical\ + \ pieces have been composed for the interface following cueing strategies. In\ + \ addition a second display, ``the prompter'' guides the users through the piece.\ + \ 3DinMotion has been developed from scratch and natively, leading to a system\ + \ with a very low latency, making it suitable for real time music interactions.\ + \ In addition, 3DinMotion is fully compatible with the OpenSoundControl (OSC)\ + \ protocol, allowing expansion to commonly used musical and sound design applications.},\n\ + \ address = {London, United Kingdom},\n author = {Alain Renaud and Caecilia Charbonnier\ + \ and Sylvain Chagu\\'e},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178915},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {495--496},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {{3D}inMotion A Mocap Based Interface for Real\ + \ Time Visualisation and Sonification of Multi-User Interactions},\n url = {http://www.nime.org/proceedings/2014/nime2014_423.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178915 issn: 2220-4806 - month: May - numpages: 7 - pages: 116--122 - title: 'Stringesthesia: Dynamically Shifting Musical Agency Between Audience and - Performer Based on Trust in an Interactive and Improvised Performance' - track: Papers - url: http://nime.org/proceedings/2023/nime2023_15.pdf - year: 2023 + month: June + pages: 495--496 + publisher: 'Goldsmiths, University of London' + title: 3DinMotion A Mocap Based Interface for Real Time Visualisation and Sonification + of Multi-User Interactions + url: http://www.nime.org/proceedings/2014/nime2014_423.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_16 - abstract: 'The Serge Modular Archive Instrument (SMAI) is a sample-based computer - emulation of selected patches on the vintage Serge Modular instrument that is - housed at (redacted). Hours of recorded audio created by specified parameter combinations - have been analyzed using audio descriptors and machine learning algorithms in - the FluCoMa toolkit. Sound is controlled via (1) a machine learning dimensionality - reduction plot showing all the recorded samples and/or (2) a skeuomorphic graphical - user interface of the patches used to record the sounds. Flexible MIDI and OSC - control of the software enables custom modulation and performance of this archive - from outside the software. Differing from many software synthesis-based emulations, - the SMAI aims to capture and archive the idiosyncrasies of vintage hardware as - digital audio samples; compare and contrast skeuomorphic and machine learning - enabled modes of exploring vintage sounds; and create a flexible instrument for - creatively performing this archive.' - address: 'Mexico City, Mexico' - articleno: 16 - author: Ted Moore and Jean Brazeau - bibtex: "@article{nime2023_16,\n abstract = {The Serge Modular Archive Instrument\ - \ (SMAI) is a sample-based computer emulation of selected patches on the vintage\ - \ Serge Modular instrument that is housed at (redacted). Hours of recorded audio\ - \ created by specified parameter combinations have been analyzed using audio descriptors\ - \ and machine learning algorithms in the FluCoMa toolkit. Sound is controlled\ - \ via (1) a machine learning dimensionality reduction plot showing all the recorded\ - \ samples and/or (2) a skeuomorphic graphical user interface of the patches used\ - \ to record the sounds. Flexible MIDI and OSC control of the software enables\ - \ custom modulation and performance of this archive from outside the software.\ - \ Differing from many software synthesis-based emulations, the SMAI aims to capture\ - \ and archive the idiosyncrasies of vintage hardware as digital audio samples;\ - \ compare and contrast skeuomorphic and machine learning enabled modes of exploring\ - \ vintage sounds; and create a flexible instrument for creatively performing this\ - \ archive.},\n address = {Mexico City, Mexico},\n articleno = {16},\n author =\ - \ {Ted Moore and Jean Brazeau},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz\ - \ and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages\ - \ = {5},\n pages = {123--127},\n title = {Serge Modular Archive Instrument (SMAI):\ - \ Bridging Skeuomorphic & Machine Learning Enabled Interfaces},\n track = {Papers},\n\ - \ url = {http://nime.org/proceedings/2023/nime2023_16.pdf},\n year = {2023}\n\ +- ENTRYTYPE: inproceedings + ID: tkelkar2014 + abstract: 'We propose a new musical interface, TrAP (TRace-A-Phrase) for generating + phrases of Hindustani Classical Music (HCM). In this system the user traces melodic + phrases on a tablet interface to create phrases in a raga. We begin by analyzing + tracings drawn by 28 participants, and train a classifier to categorize them into + one of four melodic categories from the theory of Hindustani Music. Then we create + a model based on note transitions from the raga grammar for the notes used in + the singable octaves in HCM. Upon being given a new tracing, the system segments + the tracing and computes a final phrase that best approximates the tracing.' + address: 'London, United Kingdom' + author: Udit Roy and Tejaswinee Kelkar and Bipin Indurkhya + bibtex: "@inproceedings{tkelkar2014,\n abstract = {We propose a new musical interface,\ + \ TrAP (TRace-A-Phrase) for generating phrases of Hindustani Classical Music (HCM).\ + \ In this system the user traces melodic phrases on a tablet interface to create\ + \ phrases in a raga. We begin by analyzing tracings drawn by 28 participants,\ + \ and train a classifier to categorize them into one of four melodic categories\ + \ from the theory of Hindustani Music. Then we create a model based on note transitions\ + \ from the raga grammar for the notes used in the singable octaves in HCM. Upon\ + \ being given a new tracing, the system segments the tracing and computes a final\ + \ phrase that best approximates the tracing.},\n address = {London, United Kingdom},\n\ + \ author = {Udit Roy and Tejaswinee Kelkar and Bipin Indurkhya},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178923},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {243--246},\n publisher = {Goldsmiths, University of London},\n title = {TrAP:\ + \ An Interactive System to Generate Valid Raga Phrases from Sound-Tracings},\n\ + \ url = {http://www.nime.org/proceedings/2014/nime2014_424.pdf},\n year = {2014}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178923 issn: 2220-4806 - month: May - numpages: 5 - pages: 123--127 - title: 'Serge Modular Archive Instrument (SMAI): Bridging Skeuomorphic & Machine - Learning Enabled Interfaces' - track: Papers - url: http://nime.org/proceedings/2023/nime2023_16.pdf - year: 2023 + month: June + pages: 243--246 + publisher: 'Goldsmiths, University of London' + title: 'TrAP: An Interactive System to Generate Valid Raga Phrases from Sound-Tracings' + url: http://www.nime.org/proceedings/2014/nime2014_424.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_17 - abstract: 'We present a study of a freehand musical system to investigate musicians'' - experiences related to performance in augmented reality (AR). Head-mounted mixed - reality computers present opportunities for natural gestural control in three - dimensions, particularly when using hand-tracking in a creative interface. Existing - musical interfaces with head-mounted displays use dedicated input devices that - are not designed specifically for musical gestures and may not support appropriate - interactions. We are yet to see widespread adoption of head-mounted AR musical - instruments. We conducted an empirical study to evaluate musicians'' (N=20) experience - of performing with a freehand musical interface. The results suggest that the - design of freehand musical interaction in the AR space is highly learnable and - explorable, and that such systems can leverage unique aspects of mobility, space - and sound to deliver an engaging and playful musical experience. The mobile musical - experience with a spatial interface design allowed performers to be more bodily - engaged and facilitated gestural exploration for musical creativity. This work - contributes to a more developed understanding of potentials and challenges in - AR-based interface design for musical creativity.' - address: 'Mexico City, Mexico' - articleno: 17 - author: Yichen Wang and Mingze Xi and Matt Adcock and Charles Patrick Martin - bibtex: "@article{nime2023_17,\n abstract = {We present a study of a freehand musical\ - \ system to investigate musicians' experiences related to performance in augmented\ - \ reality (AR). Head-mounted mixed reality computers present opportunities for\ - \ natural gestural control in three dimensions, particularly when using hand-tracking\ - \ in a creative interface. Existing musical interfaces with head-mounted displays\ - \ use dedicated input devices that are not designed specifically for musical gestures\ - \ and may not support appropriate interactions. We are yet to see widespread adoption\ - \ of head-mounted AR musical instruments. We conducted an empirical study to evaluate\ - \ musicians' (N=20) experience of performing with a freehand musical interface.\ - \ The results suggest that the design of freehand musical interaction in the AR\ - \ space is highly learnable and explorable, and that such systems can leverage\ - \ unique aspects of mobility, space and sound to deliver an engaging and playful\ - \ musical experience. The mobile musical experience with a spatial interface design\ - \ allowed performers to be more bodily engaged and facilitated gestural exploration\ - \ for musical creativity. This work contributes to a more developed understanding\ - \ of potentials and challenges in AR-based interface design for musical creativity.},\n\ - \ address = {Mexico City, Mexico},\n articleno = {17},\n author = {Yichen Wang\ - \ and Mingze Xi and Matt Adcock and Charles Patrick Martin},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month\ - \ = {May},\n numpages = {6},\n pages = {128--133},\n title = {Mobility, Space\ - \ and Sound Activate Expressive Musical Experience in Augmented Reality},\n track\ - \ = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_17.pdf},\n year\ - \ = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: ncollins2014 + abstract: 'The algorave movement has received reasonable international exposure + in the last two years, including a series of concerts in Europe and beyond, and + press coverage in a number of media. This paper seeks to illuminate some of the + historical precedents to the scene, its primary aesthetic goals, and the divergent + technological and musical approaches of representative participants. We keep in + mind the novel possibilities in musical expression explored by algoravers. The + scene is by no means homogeneous, and the very lack of uniformity of technique, + from new live coding languages through code DJing to plug-in combination, with + or without visual extension, is indicative of the flexibility of computers themselves + as general information processors.' + address: 'London, United Kingdom' + author: Nick Collins and Alex McLean + bibtex: "@inproceedings{ncollins2014,\n abstract = {The algorave movement has received\ + \ reasonable international exposure in the last two years, including a series\ + \ of concerts in Europe and beyond, and press coverage in a number of media. This\ + \ paper seeks to illuminate some of the historical precedents to the scene, its\ + \ primary aesthetic goals, and the divergent technological and musical approaches\ + \ of representative participants. We keep in mind the novel possibilities in musical\ + \ expression explored by algoravers. The scene is by no means homogeneous, and\ + \ the very lack of uniformity of technique, from new live coding languages through\ + \ code DJing to plug-in combination, with or without visual extension, is indicative\ + \ of the flexibility of computers themselves as general information processors.},\n\ + \ address = {London, United Kingdom},\n author = {Nick Collins and Alex McLean},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178734},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {355--358},\n publisher = {Goldsmiths, University\ + \ of London},\n title = {Algorave: Live Performance of Algorithmic Electronic\ + \ Dance Music},\n url = {http://www.nime.org/proceedings/2014/nime2014_426.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178734 issn: 2220-4806 - month: May - numpages: 6 - pages: 128--133 - title: 'Mobility, Space and Sound Activate Expressive Musical Experience in Augmented - Reality' - track: Papers - url: http://nime.org/proceedings/2023/nime2023_17.pdf - year: 2023 + month: June + pages: 355--358 + publisher: 'Goldsmiths, University of London' + title: 'Algorave: Live Performance of Algorithmic Electronic Dance Music' + url: http://www.nime.org/proceedings/2014/nime2014_426.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_18 - abstract: "Engineering communities that feed the current proliferation of artificial\ - \ intelligence (AI) have historically been slow to recognise the spectrum of societal\ - \ impacts of their work. Frequent controversies around AI applications in creative\ - \ domains demonstrate insufficient consideration of ethical predicaments, but\ - \ the abstract principles of current AI and data ethics documents provide little\ - \ practical guidance.\nPragmatic methods are urgently needed to support developers\ - \ in ethical reflection of their work on creative-AI tools. \n\nIn the wider context\ - \ of value sensitive, people-oriented design, we present an analytical method\ - \ that implements an ethically informed and power-sensitive stakeholder identification\ - \ and mapping: Ethically Aligned Stakeholder Elicitation (EASE). As a case study,\ - \ we test our method in workshops with six research groups that develop AI in\ - \ musical contexts. Our results demonstrate that EASE supports\ncritical self-reflection\ - \ of the research and outreach practices among developers, discloses power relations\ - \ and value tensions in the development processes, and foregrounds opportunities\ - \ for stakeholder engagement. This can guide developers and the wider NIME community\ - \ towards ethically aligned research and development of creative-AI." - address: 'Mexico City, Mexico' - articleno: 18 - author: Anna-Kaisa Kaila and Petra Jääskeläinen and Andre Holzapfel - bibtex: "@article{nime2023_18,\n abstract = {Engineering communities that feed the\ - \ current proliferation of artificial intelligence (AI) have historically been\ - \ slow to recognise the spectrum of societal impacts of their work. Frequent controversies\ - \ around AI applications in creative domains demonstrate insufficient consideration\ - \ of ethical predicaments, but the abstract principles of current AI and data\ - \ ethics documents provide little practical guidance.\nPragmatic methods are urgently\ - \ needed to support developers in ethical reflection of their work on creative-AI\ - \ tools. \n\nIn the wider context of value sensitive, people-oriented design,\ - \ we present an analytical method that implements an ethically informed and power-sensitive\ - \ stakeholder identification and mapping: Ethically Aligned Stakeholder Elicitation\ - \ (EASE). As a case study, we test our method in workshops with six research groups\ - \ that develop AI in musical contexts. Our results demonstrate that EASE supports\n\ - critical self-reflection of the research and outreach practices among developers,\ - \ discloses power relations and value tensions in the development processes, and\ - \ foregrounds opportunities for stakeholder engagement. This can guide developers\ - \ and the wider NIME community towards ethically aligned research and development\ - \ of creative-AI.},\n address = {Mexico City, Mexico},\n articleno = {18},\n author\ - \ = {Anna-Kaisa Kaila and Petra Jääskeläinen and Andre Holzapfel},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n\ - \ month = {May},\n numpages = {8},\n pages = {134--141},\n title = {Ethically\ - \ Aligned Stakeholder Elicitation (EASE): Case Study in Music-AI},\n track = {Papers},\n\ - \ url = {http://nime.org/proceedings/2023/nime2023_18.pdf},\n year = {2023}\n\ +- ENTRYTYPE: inproceedings + ID: jbowers12014 + abstract: 'In this paper we describe an artistic response to a collection of natural + history museum artefacts, developed as part of a residency organised around a + public participatory workshop. Drawing on a critical literature in studies of + material culture, the work incorporated data sonification, image audification, + field recordings and created a number of instruments for exploring geological + artefacts and meterological data as aesthetic material. The residency culminated + in an exhibition presented as a ''sensorium'' for the sensory exploration of museum + objects. In describing the methods and thinking behind the project this paper + presents an alternative approach to engaging artists and audiences with local + heritage and museum archives, which draws on research in NIME and allied literatures, + and which is devoted to enlivening collections as occasions for varied interpretation, + appropriation and aesthetic response.' + address: 'London, United Kingdom' + author: John Bowers and Tim Shaw + bibtex: "@inproceedings{jbowers12014,\n abstract = {In this paper we describe an\ + \ artistic response to a collection of natural history museum artefacts, developed\ + \ as part of a residency organised around a public participatory workshop. Drawing\ + \ on a critical literature in studies of material culture, the work incorporated\ + \ data sonification, image audification, field recordings and created a number\ + \ of instruments for exploring geological artefacts and meterological data as\ + \ aesthetic material. The residency culminated in an exhibition presented as a\ + \ 'sensorium' for the sensory exploration of museum objects. In describing the\ + \ methods and thinking behind the project this paper presents an alternative approach\ + \ to engaging artists and audiences with local heritage and museum archives, which\ + \ draws on research in NIME and allied literatures, and which is devoted to enlivening\ + \ collections as occasions for varied interpretation, appropriation and aesthetic\ + \ response.},\n address = {London, United Kingdom},\n author = {John Bowers and\ + \ Tim Shaw},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178720},\n issn\ + \ = {2220-4806},\n month = {June},\n pages = {175--178},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Reappropriating Museum Collections: Performing\ + \ Geology Specimens and Meterology Data as New Instruments for Musical Expression},\n\ + \ url = {http://www.nime.org/proceedings/2014/nime2014_429.pdf},\n year = {2014}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178720 issn: 2220-4806 - month: May - numpages: 8 - pages: 134--141 - title: 'Ethically Aligned Stakeholder Elicitation (EASE): Case Study in Music-AI' - track: Papers - url: http://nime.org/proceedings/2023/nime2023_18.pdf - year: 2023 + month: June + pages: 175--178 + publisher: 'Goldsmiths, University of London' + title: 'Reappropriating Museum Collections: Performing Geology Specimens and Meterology + Data as New Instruments for Musical Expression' + url: http://www.nime.org/proceedings/2014/nime2014_429.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_19 - abstract: 'The state-of-the-art recognition of continuous gestures for control of - musical sound by means of machine learning has two notable constraints. The first - is that the system needs to be trained with individual example gestures, the starting - and ending points of which need to be well defined. The second constraint is time - required for the system to recognise that a gesture has occurred, which may prevent - the quick action that musical performance typically requires. This article describes - how a method for unsupervised segmentation of gestures, may be used for delayed - gestural control of a musical system. The system allows a user to perform without - explicitly indicating the starting and ending of gestures in order to train the - machine learning algorithm. To demonstrate the feasibility of the system, an apparatus - for control of musical sound was devised incorporating the time required by the - process into the interaction paradigm. The unsupervised automatic segmentation - method and the concept of delayed control are further proposed to be exploited - in the design and implementation of systems that facilitate seamless human-machine - musical interaction without the need for quick response time, for example when - using broad motion of the human body.' - address: 'Mexico City, Mexico' - articleno: 19 - author: Juan Ignacio Mendoza Garay - bibtex: "@article{nime2023_19,\n abstract = {The state-of-the-art recognition of\ - \ continuous gestures for control of musical sound by means of machine learning\ - \ has two notable constraints. The first is that the system needs to be trained\ - \ with individual example gestures, the starting and ending points of which need\ - \ to be well defined. The second constraint is time required for the system to\ - \ recognise that a gesture has occurred, which may prevent the quick action that\ - \ musical performance typically requires. This article describes how a method\ - \ for unsupervised segmentation of gestures, may be used for delayed gestural\ - \ control of a musical system. The system allows a user to perform without explicitly\ - \ indicating the starting and ending of gestures in order to train the machine\ - \ learning algorithm. To demonstrate the feasibility of the system, an apparatus\ - \ for control of musical sound was devised incorporating the time required by\ - \ the process into the interaction paradigm. The unsupervised automatic segmentation\ - \ method and the concept of delayed control are further proposed to be exploited\ - \ in the design and implementation of systems that facilitate seamless human-machine\ - \ musical interaction without the need for quick response time, for example when\ - \ using broad motion of the human body.},\n address = {Mexico City, Mexico},\n\ - \ articleno = {19},\n author = {Juan Ignacio Mendoza Garay},\n booktitle = {Proceedings\ +- ENTRYTYPE: inproceedings + ID: ahadjakos12014 + abstract: 'Physical computing platforms such as the Arduino have significantly simplified + developing physical musical interfaces. However, those platforms typically target + everyday programmers rather than composers and media artists. On the other hand, + tangible user interface (TUI) toolkits, which provide an integrated, easy-to-use + solution have not gained momentum in modern music creation. We propose a concept + that hybridizes physical computing and TUI toolkit approaches. This helps to tackle + typical TUI toolkit weaknesses, namely quick sensor obsolescence and limited choices. + We developed a physical realization based on the idea of "universal pins", which + can be configured to perform a variety of duties, making it possible to connect + different sensor breakouts and modules. We evaluated our prototype by making performance + measurements and conducting a user study demonstrating the feasibility of our + approach.' + address: 'London, United Kingdom' + author: Aristotelis Hadjakos and Simon Waloschek + bibtex: "@inproceedings{ahadjakos12014,\n abstract = {Physical computing platforms\ + \ such as the Arduino have significantly simplified developing physical musical\ + \ interfaces. However, those platforms typically target everyday programmers rather\ + \ than composers and media artists. On the other hand, tangible user interface\ + \ (TUI) toolkits, which provide an integrated, easy-to-use solution have not gained\ + \ momentum in modern music creation. We propose a concept that hybridizes physical\ + \ computing and TUI toolkit approaches. This helps to tackle typical TUI toolkit\ + \ weaknesses, namely quick sensor obsolescence and limited choices. We developed\ + \ a physical realization based on the idea of \"universal pins\", which can be\ + \ configured to perform a variety of duties, making it possible to connect different\ + \ sensor breakouts and modules. We evaluated our prototype by making performance\ + \ measurements and conducting a user study demonstrating the feasibility of our\ + \ approach.},\n address = {London, United Kingdom},\n author = {Aristotelis Hadjakos\ + \ and Simon Waloschek},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178782},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {625--628},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {SPINE: A TUI Toolkit and Physical Computing\ + \ Hybrid},\n url = {http://www.nime.org/proceedings/2014/nime2014_430.pdf},\n\ + \ year = {2014}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1178782 + issn: 2220-4806 + month: June + pages: 625--628 + publisher: 'Goldsmiths, University of London' + title: 'SPINE: A TUI Toolkit and Physical Computing Hybrid' + url: http://www.nime.org/proceedings/2014/nime2014_430.pdf + year: 2014 + + +- ENTRYTYPE: inproceedings + ID: ogreen2014 + abstract: 'To engage with questions of musicality is to invite into consideration + a complex network of topics beyond the mechanics of soundful interaction with + our interfaces. Drawing on the work of Born, I sketch an outline of the reach + of these topics. I suggest that practice-led methods, by dint of focussing on + the lived experience where many of these topics converge, may be able to serve + as a useful methodological `glue'' for NIME by helping stimulate useful agonistic + discussion on our objects of study, and map the untidy contours of contemporary + practices. I contextualise this discussion by presenting two recently developed + improvisation systems and drawing from these some starting suggestions for how + attention to the grain of lived practice could usefully contribute to considerations + for designers in terms of the pursuit of musicality and the care required in considering + performances in evaluation.' + address: 'London, United Kingdom' + author: Owen Green + bibtex: "@inproceedings{ogreen2014,\n abstract = {To engage with questions of musicality\ + \ is to invite into consideration a complex network of topics beyond the mechanics\ + \ of soundful interaction with our interfaces. Drawing on the work of Born, I\ + \ sketch an outline of the reach of these topics. I suggest that practice-led\ + \ methods, by dint of focussing on the lived experience where many of these topics\ + \ converge, may be able to serve as a useful methodological `glue' for NIME by\ + \ helping stimulate useful agonistic discussion on our objects of study, and map\ + \ the untidy contours of contemporary practices. I contextualise this discussion\ + \ by presenting two recently developed improvisation systems and drawing from\ + \ these some starting suggestions for how attention to the grain of lived practice\ + \ could usefully contribute to considerations for designers in terms of the pursuit\ + \ of musicality and the care required in considering performances in evaluation.},\n\ + \ address = {London, United Kingdom},\n author = {Owen Green},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month\ - \ = {May},\n numpages = {5},\n pages = {142--146},\n title = {The Rearranger Ball:\ - \ Delayed Gestural Control of Musical Sound using Online Unsupervised Temporal\ - \ Segmentation},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_19.pdf},\n\ - \ year = {2023}\n}\n" + \ doi = {10.5281/zenodo.1178776},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {1--6},\n publisher = {Goldsmiths, University of London},\n title = {NIME,\ + \ Musicality and Practice-led Methods},\n url = {http://www.nime.org/proceedings/2014/nime2014_434.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178776 issn: 2220-4806 - month: May - numpages: 5 - pages: 142--146 - title: 'The Rearranger Ball: Delayed Gestural Control of Musical Sound using Online - Unsupervised Temporal Segmentation' - track: Papers - url: http://nime.org/proceedings/2023/nime2023_19.pdf - year: 2023 + month: June + pages: 1--6 + publisher: 'Goldsmiths, University of London' + title: 'NIME, Musicality and Practice-led Methods' + url: http://www.nime.org/proceedings/2014/nime2014_434.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_20 - abstract: 'This paper introduces a performative ecosystem created with the aim of - promoting a joint expression between a live coder and an instrumentalist. The - live coding environment is based on TidalCycles, controlling a sample machine - implemented in SuperCollider. The instrumentalist can record short samples of - his/her playing in different buffers, which the live coder can then process. The - ecosystem was intensively used by the first and the second author of this paper - (respectively live coder and violinist) to develop a performance. At the end of - this paper, we provide a number of reflections on the entanglement of the different - roles and agencies that emerged during the rehearsals.' - address: 'Mexico City, Mexico' - articleno: 20 - author: Francesco Dal Rì and Francesca Zanghellini and Raul Masu - bibtex: "@article{nime2023_20,\n abstract = {This paper introduces a performative\ - \ ecosystem created with the aim of promoting a joint expression between a live\ - \ coder and an instrumentalist. The live coding environment is based on TidalCycles,\ - \ controlling a sample machine implemented in SuperCollider. The instrumentalist\ - \ can record short samples of his/her playing in different buffers, which the\ - \ live coder can then process. The ecosystem was intensively used by the first\ - \ and the second author of this paper (respectively live coder and violinist)\ - \ to develop a performance. At the end of this paper, we provide a number of reflections\ - \ on the entanglement of the different roles and agencies that emerged during\ - \ the rehearsals.},\n address = {Mexico City, Mexico},\n articleno = {20},\n author\ - \ = {Francesco Dal Rì and Francesca Zanghellini and Raul Masu},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month\ - \ = {May},\n numpages = {8},\n pages = {147--154},\n title = {Sharing the Same\ - \ Sound: Reflecting on Interactions between a Live Coder and a Violinist},\n track\ - \ = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_20.pdf},\n year\ - \ = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: jsokolovskis2014 + abstract: 'This paper presents a method for locating the position of a strike on + an acoustic drumhead. Near-field optical sensors were installed underneath the + drumhead of a commercially available snare drum. By implementing time difference + of arrival (TDOA) algorithm accuracy within 2cm was achieved in approximating + the location of strikes. The system can be used for drum performance analysis, + timbre analysis and can form a basis for an augmented drum performance system.' + address: 'London, United Kingdom' + author: Janis Sokolovskis and Andrew McPherson + bibtex: "@inproceedings{jsokolovskis2014,\n abstract = {This paper presents a method\ + \ for locating the position of a strike on an acoustic drumhead. Near-field optical\ + \ sensors were installed underneath the drumhead of a commercially available snare\ + \ drum. By implementing time difference of arrival (TDOA) algorithm accuracy within\ + \ 2cm was achieved in approximating the location of strikes. The system can be\ + \ used for drum performance analysis, timbre analysis and can form a basis for\ + \ an augmented drum performance system.},\n address = {London, United Kingdom},\n\ + \ author = {Janis Sokolovskis and Andrew McPherson},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178943},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {70--73},\n publisher = {Goldsmiths, University of London},\n title = {Optical\ + \ Measurement of Acoustic Drum Strike Locations},\n url = {http://www.nime.org/proceedings/2014/nime2014_436.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178943 issn: 2220-4806 - month: May - numpages: 8 - pages: 147--154 - title: 'Sharing the Same Sound: Reflecting on Interactions between a Live Coder - and a Violinist' - track: Papers - url: http://nime.org/proceedings/2023/nime2023_20.pdf - year: 2023 + month: June + pages: 70--73 + publisher: 'Goldsmiths, University of London' + title: Optical Measurement of Acoustic Drum Strike Locations + url: http://www.nime.org/proceedings/2014/nime2014_436.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_21 - abstract: 'In this paper, the authors describe working with and on the T-Tree, a - device that integrates multiple instances of a gestural controller known as the - T-Stick. The T-Tree is used in two public performance contexts; the results of - those performances are summarized, potential improvements to the design of the - hardware and software are introduced, and issues are identified. Improvements - in the T-Tree from the first version are also discussed. Finally, the authors - present future design improvements for the T-Tree 2.0.' - address: 'Mexico City, Mexico' - articleno: 21 - author: Paul Buser and Kasey LV Pocius and Linnea Kirby and Marcelo Wanderley - bibtex: "@article{nime2023_21,\n abstract = {In this paper, the authors describe\ - \ working with and on the T-Tree, a device that integrates multiple instances\ - \ of a gestural controller known as the T-Stick. The T-Tree is used in two public\ - \ performance contexts; the results of those performances are summarized, potential\ - \ improvements to the design of the hardware and software are introduced, and\ - \ issues are identified. Improvements in the T-Tree from the first version are\ - \ also discussed. Finally, the authors present future design improvements for\ - \ the T-Tree 2.0.},\n address = {Mexico City, Mexico},\n articleno = {21},\n author\ - \ = {Paul Buser and Kasey LV Pocius and Linnea Kirby and Marcelo Wanderley},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn\ - \ = {2220-4806},\n month = {May},\n numpages = {5},\n pages = {155--159},\n title\ - \ = {Towards the T-Tree 2.0: Lessons Learned From Performance With a Novel DMI\ - \ and Instrument Hub},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_21.pdf},\n\ - \ year = {2023}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon - issn: 2220-4806 - month: May - numpages: 5 - pages: 155--159 - title: 'Towards the T-Tree 2.0: Lessons Learned From Performance With a Novel DMI - and Instrument Hub' - track: Papers - url: http://nime.org/proceedings/2023/nime2023_21.pdf - year: 2023 - - -- ENTRYTYPE: article - ID: nime2023_22 - abstract: "Deploying deep learning models on embedded devices is an arduous task:\ - \ oftentimes, there exist no platform-specific instructions, and compilation times\ - \ can be considerably large due to the limited computational resources available\ - \ on-device. Moreover, many music-making applications demand real-time inference.\ - \ Embedded hardware platforms for audio, such as Bela, offer an entry point for\ - \ beginners into physical audio computing; however, the need for cross-compilation\ - \ environments and low-level software development tools for deploying embedded\ - \ deep learning models imposes high entry barriers on non-expert users.\n\nWe\ - \ present a pipeline for deploying neural networks in the Bela embedded hardware\ - \ platform. In our pipeline, we include a tool to record a multichannel dataset\ - \ of sensor signals. Additionally, we provide a dockerised cross-compilation environment\ - \ for faster compilation. With this pipeline, we aim to provide a template for\ - \ programmers and makers to prototype and experiment with neural networks for\ - \ real-time embedded musical applications." - address: 'Mexico City, Mexico' - articleno: 22 - author: Teresa Pelinski and Rodrigo Diaz and Adan L. Benito Temprano and Andrew - McPherson - bibtex: "@article{nime2023_22,\n abstract = {Deploying deep learning models on embedded\ - \ devices is an arduous task: oftentimes, there exist no platform-specific instructions,\ - \ and compilation times can be considerably large due to the limited computational\ - \ resources available on-device. Moreover, many music-making applications demand\ - \ real-time inference. Embedded hardware platforms for audio, such as Bela, offer\ - \ an entry point for beginners into physical audio computing; however, the need\ - \ for cross-compilation environments and low-level software development tools\ - \ for deploying embedded deep learning models imposes high entry barriers on non-expert\ - \ users.\n\nWe present a pipeline for deploying neural networks in the Bela embedded\ - \ hardware platform. In our pipeline, we include a tool to record a multichannel\ - \ dataset of sensor signals. Additionally, we provide a dockerised cross-compilation\ - \ environment for faster compilation. With this pipeline, we aim to provide a\ - \ template for programmers and makers to prototype and experiment with neural\ - \ networks for real-time embedded musical applications.},\n address = {Mexico\ - \ City, Mexico},\n articleno = {22},\n author = {Teresa Pelinski and Rodrigo Diaz\ - \ and Adan L. Benito Temprano and Andrew McPherson},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month\ - \ = {May},\n numpages = {7},\n pages = {160--166},\n title = {Pipeline for recording\ - \ datasets and running neural networks on the Bela embedded hardware platform},\n\ - \ track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_22.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: fmorreale2014 + abstract: 'This paper presents MINUET, a framework for musical interface design + grounded in the experience of the player. MINUET aims to provide new perspectives + on the design of musical interfaces, referred to as a general term that comprises + digital musical instruments and interactive installations. The ultimate purpose + is to reduce the complexity of the design space emphasizing the experience of + the player. MINUET is structured as a design process consisting of two stages: + goal and specifications. The reliability of MINUET is tested through a systematic + comparison with the related work and through a case study. To this end, we present + the design and prototyping of Hexagon, a new musical interface with learning purposes.' + address: 'London, United Kingdom' + author: Fabio Morreale and Antonella De Angeli and Sile O'Modhrain + bibtex: "@inproceedings{fmorreale2014,\n abstract = {This paper presents MINUET,\ + \ a framework for musical interface design grounded in the experience of the player.\ + \ MINUET aims to provide new perspectives on the design of musical interfaces,\ + \ referred to as a general term that comprises digital musical instruments and\ + \ interactive installations. The ultimate purpose is to reduce the complexity\ + \ of the design space emphasizing the experience of the player. MINUET is structured\ + \ as a design process consisting of two stages: goal and specifications. The reliability\ + \ of MINUET is tested through a systematic comparison with the related work and\ + \ through a case study. To this end, we present the design and prototyping of\ + \ Hexagon, a new musical interface with learning purposes.},\n address = {London,\ + \ United Kingdom},\n author = {Fabio Morreale and Antonella De Angeli and Sile\ + \ O'Modhrain},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178879},\n issn\ + \ = {2220-4806},\n month = {June},\n pages = {467--472},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Musical Interface Design: An Experience-oriented\ + \ Framework},\n url = {http://www.nime.org/proceedings/2014/nime2014_437.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178879 issn: 2220-4806 - month: May - numpages: 7 - pages: 160--166 - title: Pipeline for recording datasets and running neural networks on the Bela embedded - hardware platform - track: Papers - url: http://nime.org/proceedings/2023/nime2023_22.pdf - year: 2023 + month: June + pages: 467--472 + publisher: 'Goldsmiths, University of London' + title: 'Musical Interface Design: An Experience-oriented Framework' + url: http://www.nime.org/proceedings/2014/nime2014_437.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_23 - abstract: 'The bandoneon is a free-reed instrument of great cultural value that - is currently struggling to ensure its conservation as heritage, mainly due to - its complex constitution, the lack of sufficient manufacturers to satisfy the - demand, and the high sales prices that this entails. Our research group has been - working on the task of revitalizing the instrument from a modern perspective, - carrying out musical and scientific research for the creation of an accessible - electronic bandoneon. As the next step in this endeavor, we present a method for - synthesizing the bandoneon sound using multiple wavetable interpolation, and parameter - mappings based on acoustic measurements. We discuss a method for capturing and - selecting the wavetables, the implementation on an embedded platform (Bela Mini), - and the trade-offs between realistic sound and computational efficiency. The synthesizer - runs in real-time and has a polyphony of approximately 12 voices, allowing for - an autonomously sounding electronic instrument.' - address: 'Mexico City, Mexico' - articleno: 23 - author: Juan M Ramos and Pablo Riera and Esteban Calcagno - bibtex: "@article{nime2023_23,\n abstract = {The bandoneon is a free-reed instrument\ - \ of great cultural value that is currently struggling to ensure its conservation\ - \ as heritage, mainly due to its complex constitution, the lack of sufficient\ - \ manufacturers to satisfy the demand, and the high sales prices that this entails.\ - \ Our research group has been working on the task of revitalizing the instrument\ - \ from a modern perspective, carrying out musical and scientific research for\ - \ the creation of an accessible electronic bandoneon. As the next step in this\ - \ endeavor, we present a method for synthesizing the bandoneon sound using multiple\ - \ wavetable interpolation, and parameter mappings based on acoustic measurements.\ - \ We discuss a method for capturing and selecting the wavetables, the implementation\ - \ on an embedded platform (Bela Mini), and the trade-offs between realistic sound\ - \ and computational efficiency. The synthesizer runs in real-time and has a polyphony\ - \ of approximately 12 voices, allowing for an autonomously sounding electronic\ - \ instrument.},\n address = {Mexico City, Mexico},\n articleno = {23},\n author\ - \ = {Juan M Ramos and Pablo Riera and Esteban Calcagno},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month\ - \ = {May},\n numpages = {7},\n pages = {167--173},\n title = {An embedded wavetable\ - \ synthesizer for the electronic bandoneon with parameter mappings based on acoustical\ - \ measurements},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_23.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: jbowers2014 + abstract: 'This paper outlines a concept of hybrid resonant assemblages, combinations + of varied materials excited by sound transducers, feeding back to themselves via + digital signal processing. We ground our concept as an extension of work by David + Tudor, Nicolas Collins and Bowers and Archer [NIME 2005] and draw on a variety + of critical perspectives in the social sciences and philosophy to explore such + assemblages as an alternative to more familiar ideas of instruments and interfaces. + We lay out a conceptual framework for the exploration of hybrid resonant assemblages + and describe how we have approached implementing them. Our performance experience + is presented and implications for work are discussed. In the light of our work, + we urge a reconsideration of the implicit norms of performance which underlie + much research in NIME. In particular, drawing on the philosophical work of Jean-Luc + Nancy, we commend a wider notion of touch that also recognises the performative + value of withholding contact.' + address: 'London, United Kingdom' + author: John Bowers and Annika Haas + bibtex: "@inproceedings{jbowers2014,\n abstract = {This paper outlines a concept\ + \ of hybrid resonant assemblages, combinations of varied materials excited by\ + \ sound transducers, feeding back to themselves via digital signal processing.\ + \ We ground our concept as an extension of work by David Tudor, Nicolas Collins\ + \ and Bowers and Archer [NIME 2005] and draw on a variety of critical perspectives\ + \ in the social sciences and philosophy to explore such assemblages as an alternative\ + \ to more familiar ideas of instruments and interfaces. We lay out a conceptual\ + \ framework for the exploration of hybrid resonant assemblages and describe how\ + \ we have approached implementing them. Our performance experience is presented\ + \ and implications for work are discussed. In the light of our work, we urge a\ + \ reconsideration of the implicit norms of performance which underlie much research\ + \ in NIME. In particular, drawing on the philosophical work of Jean-Luc Nancy,\ + \ we commend a wider notion of touch that also recognises the performative value\ + \ of withholding contact.},\n address = {London, United Kingdom},\n author = {John\ + \ Bowers and Annika Haas},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178718},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {7--12},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Hybrid Resonant Assemblages: Rethinking Instruments,\ + \ Touch and Performance in New Interfaces for Musical Expression},\n url = {http://www.nime.org/proceedings/2014/nime2014_438.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178718 issn: 2220-4806 - month: May - numpages: 7 - pages: 167--173 - title: An embedded wavetable synthesizer for the electronic bandoneon with parameter - mappings based on acoustical measurements - track: Papers - url: http://nime.org/proceedings/2023/nime2023_23.pdf - year: 2023 + month: June + pages: 7--12 + publisher: 'Goldsmiths, University of London' + title: 'Hybrid Resonant Assemblages: Rethinking Instruments, Touch and Performance + in New Interfaces for Musical Expression' + url: http://www.nime.org/proceedings/2014/nime2014_438.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_24 - abstract: 'This paper explores the concept of co-creativity in a performance for - feedback-augmented bass clarinet. The bass clarinet is augmented using a loudspeaker - placed on the bell and a supercardiod microphone placed inside the instrument''s - body, allowing for the generation of feedback that is subsequently processed by - a computational system to create new sound material. This feedback loop creates - a symbiotic relationship between the performer and the electronics, resulting - in the co-creation of the final piece, with the performer and the electronics - influencing each other. The result is a unique and ever-evolving musical experience - that poses interesting challenges to the traditional instrument--electronics and - composer--opera relationship. This paper reports on both the hardware and software - augmentation of the bass clarinet, and presents "WYPYM - Were you a part of your - mother?", a piece written especially for this augmented instrument and its feedback - system.' - address: 'Mexico City, Mexico' - articleno: 24 - author: Claudio Panariello and Chiara Percivati - bibtex: "@article{nime2023_24,\n abstract = {This paper explores the concept of\ - \ co-creativity in a performance for feedback-augmented bass clarinet. The bass\ - \ clarinet is augmented using a loudspeaker placed on the bell and a supercardiod\ - \ microphone placed inside the instrument's body, allowing for the generation\ - \ of feedback that is subsequently processed by a computational system to create\ - \ new sound material. This feedback loop creates a symbiotic relationship between\ - \ the performer and the electronics, resulting in the co-creation of the final\ - \ piece, with the performer and the electronics influencing each other. The result\ - \ is a unique and ever-evolving musical experience that poses interesting challenges\ - \ to the traditional instrument--electronics and composer--opera relationship.\ - \ This paper reports on both the hardware and software augmentation of the bass\ - \ clarinet, and presents \"WYPYM - Were you a part of your mother?\", a piece\ - \ written especially for this augmented instrument and its feedback system.},\n\ - \ address = {Mexico City, Mexico},\n articleno = {24},\n author = {Claudio Panariello\ - \ and Chiara Percivati},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan\ - \ Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages = {6},\n\ - \ pages = {174--179},\n title = {“WYPYM”: A Study for Feedback-Augmented Bass\ - \ Clarinet},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_24.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: cgeiger2014 + abstract: 'We present a system that allows users to experience singing without singing + using gesture-based interaction techniques. We designed a set of body-related + interaction and multi-modal feedback techniques and developed a singing voice + synthesizer system that is controlled by the user''s mouth shapes and arm gestures. + Based on the adaption of a number of digital media-related techniques such as + face and body tracking, 3D rendering, singing voice synthesis and physical computing, + we developed a media installation that allows users to perform an aria without + real singing and provide the look and feel from a 20th century performance of + an opera singer. We evaluated this system preliminarily with users.' + address: 'London, United Kingdom' + author: Cornelius Pöpel and Jochen Feitsch and Marco Strobel and Christian Geiger + bibtex: "@inproceedings{cgeiger2014,\n abstract = {We present a system that allows\ + \ users to experience singing without singing using gesture-based interaction\ + \ techniques. We designed a set of body-related interaction and multi-modal feedback\ + \ techniques and developed a singing voice synthesizer system that is controlled\ + \ by the user's mouth shapes and arm gestures. Based on the adaption of a number\ + \ of digital media-related techniques such as face and body tracking, 3D rendering,\ + \ singing voice synthesis and physical computing, we developed a media installation\ + \ that allows users to perform an aria without real singing and provide the look\ + \ and feel from a 20th century performance of an opera singer. We evaluated this\ + \ system preliminarily with users.},\n address = {London, United Kingdom},\n author\ + \ = {Cornelius Pöpel and Jochen Feitsch and Marco Strobel and Christian Geiger},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178905},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {359--362},\n publisher = {Goldsmiths, University\ + \ of London},\n title = {Design and Evaluation of a Gesture Controlled Singing\ + \ Voice Installation},\n url = {http://www.nime.org/proceedings/2014/nime2014_439.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178905 issn: 2220-4806 - month: May - numpages: 6 - pages: 174--179 - title: '“WYPYM”: A Study for Feedback-Augmented Bass Clarinet' - track: Papers - url: http://nime.org/proceedings/2023/nime2023_24.pdf - year: 2023 + month: June + pages: 359--362 + publisher: 'Goldsmiths, University of London' + title: Design and Evaluation of a Gesture Controlled Singing Voice Installation + url: http://www.nime.org/proceedings/2014/nime2014_439.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_25 - abstract: 'This paper presents the design of the Brushing Interface, which aims - to transform brushing gestures into a genuine and expressive musical/sonic performance. - To achieve this, a hardware system consisting of a grid of 216 self-made force - sensitive resistor(FSR) sensors and 8 piezo microphones was implemented, which - enables high-fidelity gesture tracking and sound production closely tied with - brushing gestures. The hardware system, including the sensor itself, was made - in a DIY approach, which provides an economical and high-quality design strategy - for implementing a multi-touch interface. Moreover, it is combined with a unique - gesture mapping strategy that integrates multi-dimensional parameter mapping and - continuous gesture tracking, enabling an expressive performance that is highly - flexible to configure in various settings.' - address: 'Mexico City, Mexico' - articleno: 25 - author: Jaehoon Choi - bibtex: "@article{nime2023_25,\n abstract = {This paper presents the design of the\ - \ Brushing Interface, which aims to transform brushing gestures into a genuine\ - \ and expressive musical/sonic performance. To achieve this, a hardware system\ - \ consisting of a grid of 216 self-made force sensitive resistor(FSR) sensors\ - \ and 8 piezo microphones was implemented, which enables high-fidelity gesture\ - \ tracking and sound production closely tied with brushing gestures. The hardware\ - \ system, including the sensor itself, was made in a DIY approach, which provides\ - \ an economical and high-quality design strategy for implementing a multi-touch\ - \ interface. Moreover, it is combined with a unique gesture mapping strategy that\ - \ integrates multi-dimensional parameter mapping and continuous gesture tracking,\ - \ enabling an expressive performance that is highly flexible to configure in various\ - \ settings.},\n address = {Mexico City, Mexico},\n articleno = {25},\n author\ - \ = {Jaehoon Choi},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan\ - \ Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages = {6},\n\ - \ pages = {180--185},\n title = {Brushing Interface - DIY multi-touch interface\ - \ for expressive gestural performance},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_25.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: dwilliams2014 + abstract: 'This paper presents an implementation of a near real-time timbre morphing + signal processing system, designed to facilitate an element of `liveness'' and + unpredictability in a musical installation. The timbre morpher is a hybrid analysis + and synthesis technique based on Spectral Modeling Synthesis (an additive and + subtractive modeling technique). The musical installation forms an interactive + soundtrack in response to the series of Rosso Luana marble sculptures Shapes in + the Clouds, I, II, III, IV & V by artist Peter Randall-Page, exhibited at the + Peninsula Arts Gallery in Devon, UK, from 1 February to 29 March 2014. The timbre + morphing system is used to transform live input captured at each sculpture with + a discrete microphone array, by morphing towards noisy source signals that have + been associated with each sculpture as part of a pre-determined musical structure. + The resulting morphed audio is then fed-back to the gallery via a five-channel + speaker array. Visitors are encouraged to walk freely through the installation + and interact with the sound world, creating unique audio morphs based on their + own movements, voices, and incidental sounds.' + address: 'London, United Kingdom' + author: Duncan Williams and Peter Randall-Page and Eduardo Miranda + bibtex: "@inproceedings{dwilliams2014,\n abstract = {This paper presents an implementation\ + \ of a near real-time timbre morphing signal processing system, designed to facilitate\ + \ an element of `liveness' and unpredictability in a musical installation. The\ + \ timbre morpher is a hybrid analysis and synthesis technique based on Spectral\ + \ Modeling Synthesis (an additive and subtractive modeling technique). The musical\ + \ installation forms an interactive soundtrack in response to the series of Rosso\ + \ Luana marble sculptures Shapes in the Clouds, I, II, III, IV & V by artist Peter\ + \ Randall-Page, exhibited at the Peninsula Arts Gallery in Devon, UK, from 1 February\ + \ to 29 March 2014. The timbre morphing system is used to transform live input\ + \ captured at each sculpture with a discrete microphone array, by morphing towards\ + \ noisy source signals that have been associated with each sculpture as part of\ + \ a pre-determined musical structure. The resulting morphed audio is then fed-back\ + \ to the gallery via a five-channel speaker array. Visitors are encouraged to\ + \ walk freely through the installation and interact with the sound world, creating\ + \ unique audio morphs based on their own movements, voices, and incidental sounds.},\n\ + \ address = {London, United Kingdom},\n author = {Duncan Williams and Peter Randall-Page\ + \ and Eduardo Miranda},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178983},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {435--438},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Timbre morphing: near real-time hybrid synthesis\ + \ in a musical installation},\n url = {http://www.nime.org/proceedings/2014/nime2014_440.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178983 issn: 2220-4806 - month: May - numpages: 6 - pages: 180--185 - title: Brushing Interface - DIY multi-touch interface for expressive gestural performance - track: Papers - url: http://nime.org/proceedings/2023/nime2023_25.pdf - year: 2023 + month: June + pages: 435--438 + publisher: 'Goldsmiths, University of London' + title: 'Timbre morphing: near real-time hybrid synthesis in a musical installation' + url: http://www.nime.org/proceedings/2014/nime2014_440.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_26 - abstract: "This paper presents the Digitl, a digital processing system using a reduced\ - \ electric guitar as control input. The physical design is based on three fundamental\ - \ elements: String, Body and Electromagnetic Pickup. The main characteristic of\ - \ the instrument lies is the linear matrix x-y configuration of the strings and\ - \ frets. The purpose of the instrument is the application of individual signal\ - \ processing at each X-Y position.\nIt is described the technical aspects of the\ - \ Digitl, including the design of the matrix configuration and the digital signal\ - \ processing algorithms. Specifically, a set of Max/MSP patches that routes the\ - \ signals from the strings to the processing engine. \nThe experimental results\ - \ confirm the importance of the design and configuration of musical instruments\ - \ in the context of expressive performance." - address: 'Mexico City, Mexico' - articleno: 26 - author: Suso Romaris - bibtex: "@article{nime2023_26,\n abstract = {This paper presents the Digitl, a digital\ - \ processing system using a reduced electric guitar as control input. The physical\ - \ design is based on three fundamental elements: String, Body and Electromagnetic\ - \ Pickup. The main characteristic of the instrument lies is the linear matrix\ - \ x-y configuration of the strings and frets. The purpose of the instrument is\ - \ the application of individual signal processing at each X-Y position.\nIt is\ - \ described the technical aspects of the Digitl, including the design of the matrix\ - \ configuration and the digital signal processing algorithms. Specifically, a\ - \ set of Max/MSP patches that routes the signals from the strings to the processing\ - \ engine. \nThe experimental results confirm the importance of the design and\ - \ configuration of musical instruments in the context of expressive performance.},\n\ - \ address = {Mexico City, Mexico},\n articleno = {26},\n author = {Suso Romaris},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn\ - \ = {2220-4806},\n month = {May},\n numpages = {5},\n pages = {186--190},\n title\ - \ = {DIGITL A Reduction of Guitar},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_26.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: pvandertorren12014 + abstract: 'The Striso is a new expressive music instrument with an acoustic feel, + which is designed to be intuitive to play and playable everywhere. The sound of + every note can be precisely controlled using the direction and pressure sensitive + buttons, combined with instrument motion like tilting or shaking. It works standalone, + with an internal speaker and battery, and is meant as a self contained instrument + with its own distinct sound, but can also be connected to a computer to control + other synthesizers. The notes are arranged in an easy and systematic way, according + to the new DCompose note layout that is also presented in this paper. The DCompose + note layout is designed to be compact, ergonomic, easy to learn, and closely bound + to the harmonic properties of the notes.' + address: 'London, United Kingdom' + author: Piers Titus van der Torren + bibtex: "@inproceedings{pvandertorren12014,\n abstract = {The Striso is a new expressive\ + \ music instrument with an acoustic feel, which is designed to be intuitive to\ + \ play and playable everywhere. The sound of every note can be precisely controlled\ + \ using the direction and pressure sensitive buttons, combined with instrument\ + \ motion like tilting or shaking. It works standalone, with an internal speaker\ + \ and battery, and is meant as a self contained instrument with its own distinct\ + \ sound, but can also be connected to a computer to control other synthesizers.\ + \ The notes are arranged in an easy and systematic way, according to the new DCompose\ + \ note layout that is also presented in this paper. The DCompose note layout is\ + \ designed to be compact, ergonomic, easy to learn, and closely bound to the harmonic\ + \ properties of the notes.},\n address = {London, United Kingdom},\n author =\ + \ {Piers Titus van der Torren},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178957},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {615--620},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Striso, a Compact Expressive Instrument Based\ + \ on a New Isomorphic Note Layout},\n url = {http://www.nime.org/proceedings/2014/nime2014_442.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178957 issn: 2220-4806 - month: May - numpages: 5 - pages: 186--190 - title: DIGITL A Reduction of Guitar - track: Papers - url: http://nime.org/proceedings/2023/nime2023_26.pdf - year: 2023 + month: June + pages: 615--620 + publisher: 'Goldsmiths, University of London' + title: 'Striso, a Compact Expressive Instrument Based on a New Isomorphic Note Layout' + url: http://www.nime.org/proceedings/2014/nime2014_442.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_27 - abstract: 'The sustainability of Digital Musical Instruments (DMIs) is a crucial - concern within the NIME community, not only in the design of the instruments but - also in terms of sustaining the instrument over a prolonged period, promoting - longevity, and minimizing obsolescence. The risk of designing advanced instruments - becoming debris quickly is real if longevity is not actively considered. In this - paper, we present the process of redesigning a crafted DMI to fit a small-scale - production process while considering strategies that render the final design more - sustainable and maximize the object''s lifespan. We present the results of a critical - analysis of this process through a sustainability lens. From this analysis, we - distilled a number of reflections that could help similar design processes or - NIME crafting activities. The most innovative reflections are related to inscribing - sustainability into the practice of using the instruments. From this perspective, - we suggest considering the future user as a designer capable of fixing, adjusting, - redesigning, or hacking the DMI and actively provide possible solutions that can - significantly extend the lifespan of a DMI and, consequently, its sustainability.' - address: 'Mexico City, Mexico' - articleno: 27 - author: Nicolo Merendino and Giacomo Lepri and Antonio Rodà and Raul Masu - bibtex: "@article{nime2023_27,\n abstract = {The sustainability of Digital Musical\ - \ Instruments (DMIs) is a crucial concern within the NIME community, not only\ - \ in the design of the instruments but also in terms of sustaining the instrument\ - \ over a prolonged period, promoting longevity, and minimizing obsolescence. The\ - \ risk of designing advanced instruments becoming debris quickly is real if longevity\ - \ is not actively considered. In this paper, we present the process of redesigning\ - \ a crafted DMI to fit a small-scale production process while considering strategies\ - \ that render the final design more sustainable and maximize the object's lifespan.\ - \ We present the results of a critical analysis of this process through a sustainability\ - \ lens. From this analysis, we distilled a number of reflections that could help\ - \ similar design processes or NIME crafting activities. The most innovative reflections\ - \ are related to inscribing sustainability into the practice of using the instruments.\ - \ From this perspective, we suggest considering the future user as a designer\ - \ capable of fixing, adjusting, redesigning, or hacking the DMI and actively provide\ - \ possible solutions that can significantly extend the lifespan of a DMI and,\ - \ consequently, its sustainability.},\n address = {Mexico City, Mexico},\n articleno\ - \ = {27},\n author = {Nicolo Merendino and Giacomo Lepri and Antonio Rodà and\ - \ Raul Masu},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n\ - \ issn = {2220-4806},\n month = {May},\n numpages = {9},\n pages = {191--199},\n\ - \ title = {Redesigning the Chowndolo: a Reflection-on-action Analysis to Identify\ - \ Sustainable Strategies for NIMEs Design},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_27.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: mbugge2014 + abstract: 'Reunion2012 is a work for electronically modified chessboard, chess players + and electronic instruments. The work is based on---but also departs from---John + Cage''s Reunion, which premiered at the Sightsoundsystems Festival, Toronto, 1968. + In the original performance, Cage and Marcel Duchamp played chess on an electronic + board constructed by Lowell Cross. The board `conducted'' various electronic sound + sources played by Cross, Gordon Mumma, David Tudor, and David Behrman, using photoresistors + fitted under the squares [1]. Reunion2012, on the other hand, utilises magnet + sensors via an Arduino. Like in Cage''s Variations V, this resulted in a musical + situation where the improvising musicians had full control over their own sound, + but no control regarding when their sound may be heard. In addition to a concert + version, this paper also describes an interactive installation based on the same + hardware.' + address: 'London, United Kingdom' + author: Anders Tveit and Hans Wilmers and Notto Thelle and Magnus Bugge and Thom + Johansen and Eskil Muan Sæther + bibtex: "@inproceedings{mbugge2014,\n abstract = {Reunion2012 is a work for electronically\ + \ modified chessboard, chess players and electronic instruments. The work is based\ + \ on---but also departs from---John Cage's Reunion, which premiered at the Sightsoundsystems\ + \ Festival, Toronto, 1968. In the original performance, Cage and Marcel Duchamp\ + \ played chess on an electronic board constructed by Lowell Cross. The board `conducted'\ + \ various electronic sound sources played by Cross, Gordon Mumma, David Tudor,\ + \ and David Behrman, using photoresistors fitted under the squares [1]. Reunion2012,\ + \ on the other hand, utilises magnet sensors via an Arduino. Like in Cage's Variations\ + \ V, this resulted in a musical situation where the improvising musicians had\ + \ full control over their own sound, but no control regarding when their sound\ + \ may be heard. In addition to a concert version, this paper also describes an\ + \ interactive installation based on the same hardware.},\n address = {London,\ + \ United Kingdom},\n author = {Anders Tveit and Hans Wilmers and Notto Thelle\ + \ and Magnus Bugge and Thom Johansen and Eskil Muan S{\\ae}ther},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1178969},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {561--564},\n publisher = {Goldsmiths, University of London},\n\ + \ title = {{Reunion}2012: A Novel Interface for Sound Producing Actions Through\ + \ the Game of Chess},\n url = {http://www.nime.org/proceedings/2014/nime2014_443.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178969 issn: 2220-4806 - month: May - numpages: 9 - pages: 191--199 - title: 'Redesigning the Chowndolo: a Reflection-on-action Analysis to Identify Sustainable - Strategies for NIMEs Design' - track: Papers - url: http://nime.org/proceedings/2023/nime2023_27.pdf - year: 2023 + month: June + pages: 561--564 + publisher: 'Goldsmiths, University of London' + title: 'Reunion2012: A Novel Interface for Sound Producing Actions Through the Game + of Chess' + url: http://www.nime.org/proceedings/2014/nime2014_443.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_28 - abstract: 'WebChucK is ChucK—a strongly-timed computer music programming language—running - on the web. Recent advancements in browser technology (including WebAssembly and - the Web Audio API’s AudioWorklet interface) have enabled languages written in - C/C++ (like ChucK) to run in web browsers with nearly native-code performance. - Early adopters have explored the many practical and creative possibilities that - WebChucK enables, ranging from a WebChucK integrated development environment to - interactive browser-based audiovisual experiences. WebChucK has also been adopted - as the programming platform in an introductory computer music course at Stanford - University. Importantly, by running in any browser, WebChucK broadens and simplifies - access to computer music programming, opening the door for new users and creative - workflows. In this paper, we discuss WebChucK and its applications to date, explain - how the tool was designed and implemented, and evaluate the unique affordances - of combining computer music programming with a web development workflow.' - address: 'Mexico City, Mexico' - articleno: 28 - author: Michael Mulshine and Ge Wang and Chris Chafe and Jack Atherton and terry - feng and Celeste Betancur - bibtex: "@article{nime2023_28,\n abstract = {WebChucK is ChucK—a strongly-timed\ - \ computer music programming language—running on the web. Recent advancements\ - \ in browser technology (including WebAssembly and the Web Audio API’s AudioWorklet\ - \ interface) have enabled languages written in C/C++ (like ChucK) to run in web\ - \ browsers with nearly native-code performance. Early adopters have explored the\ - \ many practical and creative possibilities that WebChucK enables, ranging from\ - \ a WebChucK integrated development environment to interactive browser-based audiovisual\ - \ experiences. WebChucK has also been adopted as the programming platform in an\ - \ introductory computer music course at Stanford University. Importantly, by running\ - \ in any browser, WebChucK broadens and simplifies access to computer music programming,\ - \ opening the door for new users and creative workflows. In this paper, we discuss\ - \ WebChucK and its applications to date, explain how the tool was designed and\ - \ implemented, and evaluate the unique affordances of combining computer music\ - \ programming with a web development workflow.},\n address = {Mexico City, Mexico},\n\ - \ articleno = {28},\n author = {Michael Mulshine and Ge Wang and Chris Chafe and\ - \ Jack Atherton and terry feng and Celeste Betancur},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month\ - \ = {May},\n numpages = {6},\n pages = {200--205},\n title = {WebChucK: Computer\ - \ Music Programming on the Web},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_28.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: avantroyer2014 + abstract: 'Acoustic feedback controllers (AFCs) are typically applied to solve feedback + problems evident in applications such as public address (PA) systems, hearing + aids, and speech applications. Applying the techniques of AFCs to different contexts, + such as musical performance, sound installations, and product design, presents + a unique insight into the research of embodied sonic interfaces and environments. + This paper presents techniques that use digital acoustic feedback control algorithms + to augment the sonic properties of environments and discusses approaches to the + design of sonically playful experiences that apply such techniques. Three experimental + prototypes are described to illustrate how the techniques can be applied to versatile + environments and continuous coupling of users'' audible actions with sonically + augmented environments. The knowledge obtained from these prototypes has led to + Acoustic Feedback Ecology System (AFES) design patterns. The paper concludes with + some future research directions based on the prototypes and proposes several other + potentially useful applications ranging from musical performance to everyday contexts.' + address: 'London, United Kingdom' + author: Akito van Troyer + bibtex: "@inproceedings{avantroyer2014,\n abstract = {Acoustic feedback controllers\ + \ (AFCs) are typically applied to solve feedback problems evident in applications\ + \ such as public address (PA) systems, hearing aids, and speech applications.\ + \ Applying the techniques of AFCs to different contexts, such as musical performance,\ + \ sound installations, and product design, presents a unique insight into the\ + \ research of embodied sonic interfaces and environments. This paper presents\ + \ techniques that use digital acoustic feedback control algorithms to augment\ + \ the sonic properties of environments and discusses approaches to the design\ + \ of sonically playful experiences that apply such techniques. Three experimental\ + \ prototypes are described to illustrate how the techniques can be applied to\ + \ versatile environments and continuous coupling of users' audible actions with\ + \ sonically augmented environments. The knowledge obtained from these prototypes\ + \ has led to Acoustic Feedback Ecology System (AFES) design patterns. The paper\ + \ concludes with some future research directions based on the prototypes and proposes\ + \ several other potentially useful applications ranging from musical performance\ + \ to everyday contexts.},\n address = {London, United Kingdom},\n author = {Akito\ + \ van Troyer},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178961},\n issn\ + \ = {2220-4806},\n month = {June},\n pages = {118--121},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Composing Embodied Sonic Play Experiences:\ + \ Towards Acoustic Feedback Ecology},\n url = {http://www.nime.org/proceedings/2014/nime2014_444.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178961 issn: 2220-4806 - month: May - numpages: 6 - pages: 200--205 - title: 'WebChucK: Computer Music Programming on the Web' - track: Papers - url: http://nime.org/proceedings/2023/nime2023_28.pdf - year: 2023 + month: June + pages: 118--121 + publisher: 'Goldsmiths, University of London' + title: 'Composing Embodied Sonic Play Experiences: Towards Acoustic Feedback Ecology' + url: http://www.nime.org/proceedings/2014/nime2014_444.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_29 - abstract: 'We present an affordable, lightweight, and highly portable multichannel - audio solution for surround sound applications and installations. The system was - developed for the “Sound in Space” course, taught by on of the authors at CCRMA - in the winter quarter of 2021, when education was fully remote. Students in the - course were able to listen to and create surround sound compositions from their - homes or dorm rooms. Beyond the course, artists have demonstrated the versatility - and creative affordances of this cheap, lightweight, and highly portable setup - in sound installations and other custom speaker arrays. Such an affordable and - versatile system has the potential to provide more students and artists access - to spatialized sound production and multichannel audio in their work, enabling - deeper technical education and creative applications ranging from Ambisonics to - sound installations. Importantly, the transportability and ease of assembling - this system enables multichannel audio work to be developed outside of the physical - confines of academic institutions, including in spaces like apartments, garages, - the outdoors, and more. This paper steps through the process of creating such - a system, detailing the challenges faced and reflecting on the affordances in - educational and creative usage.' - address: 'Mexico City, Mexico' - articleno: 29 - author: Fernando Lopez-Lezcano and Michael Mulshine - bibtex: "@article{nime2023_29,\n abstract = {We present an affordable, lightweight,\ - \ and highly portable multichannel audio solution for surround sound applications\ - \ and installations. The system was developed for the “Sound in Space” course,\ - \ taught by on of the authors at CCRMA in the winter quarter of 2021, when education\ - \ was fully remote. Students in the course were able to listen to and create surround\ - \ sound compositions from their homes or dorm rooms. Beyond the course, artists\ - \ have demonstrated the versatility and creative affordances of this cheap, lightweight,\ - \ and highly portable setup in sound installations and other custom speaker arrays.\ - \ Such an affordable and versatile system has the potential to provide more students\ - \ and artists access to spatialized sound production and multichannel audio in\ - \ their work, enabling deeper technical education and creative applications ranging\ - \ from Ambisonics to sound installations. Importantly, the transportability and\ - \ ease of assembling this system enables multichannel audio work to be developed\ - \ outside of the physical confines of academic institutions, including in spaces\ - \ like apartments, garages, the outdoors, and more. This paper steps through the\ - \ process of creating such a system, detailing the challenges faced and reflecting\ - \ on the affordances in educational and creative usage.},\n address = {Mexico\ - \ City, Mexico},\n articleno = {29},\n author = {Fernando Lopez-Lezcano and Michael\ - \ Mulshine},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n\ - \ issn = {2220-4806},\n month = {May},\n numpages = {6},\n pages = {206--211},\n\ - \ title = {Affordable Speaker Arrays for Education and Artists},\n track = {Papers},\n\ - \ url = {http://nime.org/proceedings/2023/nime2023_29.pdf},\n year = {2023}\n\ +- ENTRYTYPE: inproceedings + ID: mcartwright2014 + abstract: 'Programming an audio synthesizer can be a difficult task for many. However, + if a user has a general idea of the sound they are trying to program, they may + be able to imitate it with their voice. This paper presents SynthAssist, a system + for interactively searching the synthesis space of an audio synthesizer. In this + work, we present how to use the system for querying a database of audio synthesizer + patches (i.e. settings/parameters) by vocal imitation and user feedback. To account + for the limitations of the human voice, it uses both absolute and relative time + series representations of features and relevance feedback on both the feature + weights and time series to refine the query. The method presented in this paper + can be used to search through large databases of previously existing ``factory + presets'''' or program a synthesizer using the data-driven approach to automatic + synthesizer programming.' + address: 'London, United Kingdom' + author: Mark Cartwright and Bryan Pardo + bibtex: "@inproceedings{mcartwright2014,\n abstract = {Programming an audio synthesizer\ + \ can be a difficult task for many. However, if a user has a general idea of the\ + \ sound they are trying to program, they may be able to imitate it with their\ + \ voice. This paper presents SynthAssist, a system for interactively searching\ + \ the synthesis space of an audio synthesizer. In this work, we present how to\ + \ use the system for querying a database of audio synthesizer patches (i.e. settings/parameters)\ + \ by vocal imitation and user feedback. To account for the limitations of the\ + \ human voice, it uses both absolute and relative time series representations\ + \ of features and relevance feedback on both the feature weights and time series\ + \ to refine the query. The method presented in this paper can be used to search\ + \ through large databases of previously existing ``factory presets'' or program\ + \ a synthesizer using the data-driven approach to automatic synthesizer programming.},\n\ + \ address = {London, United Kingdom},\n author = {Mark Cartwright and Bryan Pardo},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178730},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {363--366},\n publisher = {Goldsmiths, University\ + \ of London},\n title = {SynthAssist: Querying an Audio Synthesizer by Vocal Imitation},\n\ + \ url = {http://www.nime.org/proceedings/2014/nime2014_446.pdf},\n year = {2014}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178730 issn: 2220-4806 - month: May - numpages: 6 - pages: 206--211 - title: Affordable Speaker Arrays for Education and Artists - track: Papers - url: http://nime.org/proceedings/2023/nime2023_29.pdf - year: 2023 + month: June + pages: 363--366 + publisher: 'Goldsmiths, University of London' + title: 'SynthAssist: Querying an Audio Synthesizer by Vocal Imitation' + url: http://www.nime.org/proceedings/2014/nime2014_446.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_30 - abstract: 'There are multiple barriers to the long term use of digital musical instruments. - Among several issues related to instrument accessibility, many DMIs remain as - prototypes in research labs never becoming a robust and stable instrument. Technical - support is an important part of the long term use of a DMI. Though all musical - instruments can eventually break, managing how they are going to be fixed and - built within a research organisation can help with the continued usage of the - instrument. We apply reliability analysis techniques to estimate the reliability, - availability and maintainability characteristics of the T-Stick. Using these characteristics - we estimate the amount of spare parts needed to maintain a 99% availability target - for the T-Stick. This analysis provides insights on expected maintenance time, - costs, and personnel needed when supporting and building DMIs.' - address: 'Mexico City, Mexico' - articleno: 30 - author: Albert-Ngabo Niyonsenga and Marcelo Wanderley - bibtex: "@article{nime2023_30,\n abstract = {There are multiple barriers to the\ - \ long term use of digital musical instruments. Among several issues related to\ - \ instrument accessibility, many DMIs remain as prototypes in research labs never\ - \ becoming a robust and stable instrument. Technical support is an important part\ - \ of the long term use of a DMI. Though all musical instruments can eventually\ - \ break, managing how they are going to be fixed and built within a research organisation\ - \ can help with the continued usage of the instrument. We apply reliability analysis\ - \ techniques to estimate the reliability, availability and maintainability characteristics\ - \ of the T-Stick. Using these characteristics we estimate the amount of spare\ - \ parts needed to maintain a 99% availability target for the T-Stick. This analysis\ - \ provides insights on expected maintenance time, costs, and personnel needed\ - \ when supporting and building DMIs.},\n address = {Mexico City, Mexico},\n articleno\ - \ = {30},\n author = {Albert-Ngabo Niyonsenga and Marcelo Wanderley},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n\ - \ month = {May},\n numpages = {7},\n pages = {212--218},\n title = {Tools and\ - \ Techniques for the Maintenance and Support of Digital Musical Instruments},\n\ - \ track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_30.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: chutchins2014 + abstract: 'Government spying on internet traffic has seemingly become ubiquitous. + Not to be left out, the private sector tracks our online footprint via our ISP + or with a little help from facebook. Web services, such as advertisement servers + and Google track our progress as we surf the net and click on links. The Mozilla + plugin, Lightbeam (formerly Collusion), shows the user a visual map of every site + a surfer sends data to. A interconnected web of advertisers and other (otherwise) + invisible data-gatherers quickly builds during normal usage. We propose modifying + this plugin so that as the graph builds, its state is broadcast visa OSC. Members + of BiLE will receive and interpret those OSC messages in SuperCollider and PD. + We will act as a translational object in a process of live-sonification. The collected + data is the material with which we will develop a set of music tracks based on + patterns we may discover. The findings of our data collection and the developed + music will be presented in the form of an audiovisual live performance. Snippets + of collected text and URLs will both form the basis of our audio interpretation, + but also be projected on to a screen, so an audience can voyeuristically experience + the actions taken on their behalf by governments and advertisers. After the concert, + all of the scripts and documentation related to the data collection and sharing + in the piece will be posted to github under a GPL license.' + address: 'London, United Kingdom' + author: Charles Hutchins and Holger Ballweg and Shelly Knotts and Jonas Hummel and + Antonio Roberts + bibtex: "@inproceedings{chutchins2014,\n abstract = {Government spying on internet\ + \ traffic has seemingly become ubiquitous. Not to be left out, the private sector\ + \ tracks our online footprint via our ISP or with a little help from facebook.\ + \ Web services, such as advertisement servers and Google track our progress as\ + \ we surf the net and click on links. The Mozilla plugin, Lightbeam (formerly\ + \ Collusion), shows the user a visual map of every site a surfer sends data to.\ + \ A interconnected web of advertisers and other (otherwise) invisible data-gatherers\ + \ quickly builds during normal usage. We propose modifying this plugin so that\ + \ as the graph builds, its state is broadcast visa OSC. Members of BiLE will receive\ + \ and interpret those OSC messages in SuperCollider and PD. We will act as a translational\ + \ object in a process of live-sonification. The collected data is the material\ + \ with which we will develop a set of music tracks based on patterns we may discover.\ + \ The findings of our data collection and the developed music will be presented\ + \ in the form of an audiovisual live performance. Snippets of collected text and\ + \ URLs will both form the basis of our audio interpretation, but also be projected\ + \ on to a screen, so an audience can voyeuristically experience the actions taken\ + \ on their behalf by governments and advertisers. After the concert, all of the\ + \ scripts and documentation related to the data collection and sharing in the\ + \ piece will be posted to github under a GPL license.},\n address = {London, United\ + \ Kingdom},\n author = {Charles Hutchins and Holger Ballweg and Shelly Knotts\ + \ and Jonas Hummel and Antonio Roberts},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178810},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {497--498},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Soundbeam: A Platform for Sonyfing Web Tracking},\n\ + \ url = {http://www.nime.org/proceedings/2014/nime2014_447.pdf},\n year = {2014}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178810 issn: 2220-4806 - month: May - numpages: 7 - pages: 212--218 - title: Tools and Techniques for the Maintenance and Support of Digital Musical Instruments - track: Papers - url: http://nime.org/proceedings/2023/nime2023_30.pdf - year: 2023 + month: June + pages: 497--498 + publisher: 'Goldsmiths, University of London' + title: 'Soundbeam: A Platform for Sonyfing Web Tracking' + url: http://www.nime.org/proceedings/2014/nime2014_447.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_31 - abstract: 'While digital music technologies are rapidly growing, music communities - using traditional acoustic instruments are sometimes unable to take full advantage - of all of the digital processing techniques available to electronic musicians. - One way to include them in the latest developments is to develop interfaces connecting - non-electronic instruments to the digital world. This paper presents HarMIDI, - a sensor system to convert keystrokes on an Indian harmonium to MIDI. The paper - presents the sensor assembly, calibration methods, and the algorithm to output - MIDI. The calibration methods calibrate the notes and temporally synchronize the - MIDI stream with audio. The system has been evaluated for time synchronization - of onsets and offsets. The sensor setup is affordable, portable and can be used - with any existing harmonium.' - address: 'Mexico City, Mexico' - articleno: 31 - author: Suraj Jaiswal and Vipul Arora - bibtex: "@article{nime2023_31,\n abstract = {While digital music technologies are\ - \ rapidly growing, music communities using traditional acoustic instruments are\ - \ sometimes unable to take full advantage of all of the digital processing techniques\ - \ available to electronic musicians. One way to include them in the latest developments\ - \ is to develop interfaces connecting non-electronic instruments to the digital\ - \ world. This paper presents HarMIDI, a sensor system to convert keystrokes on\ - \ an Indian harmonium to MIDI. The paper presents the sensor assembly, calibration\ - \ methods, and the algorithm to output MIDI. The calibration methods calibrate\ - \ the notes and temporally synchronize the MIDI stream with audio. The system\ - \ has been evaluated for time synchronization of onsets and offsets. The sensor\ - \ setup is affordable, portable and can be used with any existing harmonium.},\n\ - \ address = {Mexico City, Mexico},\n articleno = {31},\n author = {Suraj Jaiswal\ - \ and Vipul Arora},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan\ - \ Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages = {5},\n\ - \ pages = {219--223},\n title = {HarMIDI: Sensor System To Read MIDI from Indian\ - \ Harmoniums},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_31.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: jcomajuncosas2014 + abstract: 'According to the tradition, music ensembles are usually lead by a conductor + who is the responsible to coordinate and guide the group under a specific musical + criteria. Similarly, computer ensembles resort to a conductor to keep the synchronization + and structural coordination of the performance, often with the assistance of software. + Achieving integration and coherence in a networked performance, however, can be + challenging in certain scenarios. This is the case for configurations with a high + degree of mutual interdependence and shared control. This paper focuses on the + design strategies for developing a software based conductor assistant for collective + instruments. We propose a novel conductor dimension space representation for collective + instruments, which takes into account both its social and structural features. + We present a case study of a collective instrument implementing a software conductor. + Finally, we discuss the implications of human and machine conduction schemes in + the context of the proposed dimension space.' + address: 'London, United Kingdom' + author: Josep Comajuncosas and Enric Guaus + bibtex: "@inproceedings{jcomajuncosas2014,\n abstract = {According to the tradition,\ + \ music ensembles are usually lead by a conductor who is the responsible to coordinate\ + \ and guide the group under a specific musical criteria. Similarly, computer ensembles\ + \ resort to a conductor to keep the synchronization and structural coordination\ + \ of the performance, often with the assistance of software. Achieving integration\ + \ and coherence in a networked performance, however, can be challenging in certain\ + \ scenarios. This is the case for configurations with a high degree of mutual\ + \ interdependence and shared control. This paper focuses on the design strategies\ + \ for developing a software based conductor assistant for collective instruments.\ + \ We propose a novel conductor dimension space representation for collective instruments,\ + \ which takes into account both its social and structural features. We present\ + \ a case study of a collective instrument implementing a software conductor. Finally,\ + \ we discuss the implications of human and machine conduction schemes in the context\ + \ of the proposed dimension space.},\n address = {London, United Kingdom},\n author\ + \ = {Josep Comajuncosas and Enric Guaus},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178736},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {513--516},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Conducting Collective Instruments : A Case\ + \ Study},\n url = {http://www.nime.org/proceedings/2014/nime2014_448.pdf},\n year\ + \ = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178736 issn: 2220-4806 - month: May - numpages: 5 - pages: 219--223 - title: 'HarMIDI: Sensor System To Read MIDI from Indian Harmoniums' - track: Papers - url: http://nime.org/proceedings/2023/nime2023_31.pdf - year: 2023 + month: June + pages: 513--516 + publisher: 'Goldsmiths, University of London' + title: 'Conducting Collective Instruments : A Case Study' + url: http://www.nime.org/proceedings/2014/nime2014_448.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_32 - abstract: 'We describe the Living Looper, a real-time software system for prediction - and continuation of audio signals in the format of a looping pedal. Each of several - channels is activated by a footswitch and repeats or continues incoming audio - using neural synthesis. The live looping pedal format is familiar to electric - guitarists and electronic musicians, which helps the instrument to serve as a - boundary object for musicians and technologists of different backgrounds. Each - Living Loop channel learns in the context of what the other channels are doing, - including those which are momentarily controlled by human players. This leads - to shifting networks of agency and control between players and Living Loops. In - this paper we present the ongoing design of the Living Looper as well as preliminary - encounters with musicians in a workshop and concert setting.' - address: 'Mexico City, Mexico' - articleno: 32 - author: Victor Shepardson and Thor Magnusson - bibtex: "@article{nime2023_32,\n abstract = {We describe the Living Looper, a real-time\ - \ software system for prediction and continuation of audio signals in the format\ - \ of a looping pedal. Each of several channels is activated by a footswitch and\ - \ repeats or continues incoming audio using neural synthesis. The live looping\ - \ pedal format is familiar to electric guitarists and electronic musicians, which\ - \ helps the instrument to serve as a boundary object for musicians and technologists\ - \ of different backgrounds. Each Living Loop channel learns in the context of\ - \ what the other channels are doing, including those which are momentarily controlled\ - \ by human players. This leads to shifting networks of agency and control between\ - \ players and Living Loops. In this paper we present the ongoing design of the\ - \ Living Looper as well as preliminary encounters with musicians in a workshop\ - \ and concert setting.},\n address = {Mexico City, Mexico},\n articleno = {32},\n\ - \ author = {Victor Shepardson and Thor Magnusson},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month\ - \ = {May},\n numpages = {8},\n pages = {224--231},\n title = {The Living Looper:\ - \ Rethinking the Musical Loop as a Machine Action-Perception Loop},\n track =\ - \ {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_32.pdf},\n year\ - \ = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: mgurevich12014 + abstract: 'Drawing on concepts from systemics, cybernetics, and musical automata, + this paper proposes a mechatronic, electroacoustic instrument that allows for + shared control between programmed, mechanized motion and a human interactor. We + suggest that such an instrument, situated somewhere between a robotic musical + instrument and a passive controller, will foster the emergence of new, complex, + and meaningful modes of musical interaction. In line with the methodological principles + of practice as research, we describe the development and design of one such instrument-Stringtrees. + The design process also reflects the notion of ambiguity as a resource in design: + The instrument was endowed with a collection of sensors, controls, and actuators + without a highly specific or prescriptive model for how a musician would interact + with it.' + address: 'London, United Kingdom' + author: Michael Gurevich + bibtex: "@inproceedings{mgurevich12014,\n abstract = {Drawing on concepts from systemics,\ + \ cybernetics, and musical automata, this paper proposes a mechatronic, electroacoustic\ + \ instrument that allows for shared control between programmed, mechanized motion\ + \ and a human interactor. We suggest that such an instrument, situated somewhere\ + \ between a robotic musical instrument and a passive controller, will foster the\ + \ emergence of new, complex, and meaningful modes of musical interaction. In line\ + \ with the methodological principles of practice as research, we describe the\ + \ development and design of one such instrument-Stringtrees. The design process\ + \ also reflects the notion of ambiguity as a resource in design: The instrument\ + \ was endowed with a collection of sensors, controls, and actuators without a\ + \ highly specific or prescriptive model for how a musician would interact with\ + \ it.},\n address = {London, United Kingdom},\n author = {Michael Gurevich},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178780},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {487--490},\n publisher = {Goldsmiths, University\ + \ of London},\n title = {Distributed Control in a Mechatronic Musical Instrument},\n\ + \ url = {http://www.nime.org/proceedings/2014/nime2014_449.pdf},\n year = {2014}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178780 issn: 2220-4806 - month: May - numpages: 8 - pages: 224--231 - title: 'The Living Looper: Rethinking the Musical Loop as a Machine Action-Perception - Loop' - track: Papers - url: http://nime.org/proceedings/2023/nime2023_32.pdf - year: 2023 + month: June + pages: 487--490 + publisher: 'Goldsmiths, University of London' + title: Distributed Control in a Mechatronic Musical Instrument + url: http://www.nime.org/proceedings/2014/nime2014_449.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_33 - abstract: 'This paper relates an early art-research collaboration between two practitioners - in machine learning and virtual worlds toward new embodied musical experiences - of Artificial Intelligence (AI). Instead of a digital music instrument or a music-generating - agent, we propose to craft a soundwalk experience where a human person moves through - a three-dimensional virtual world to explore a latent sound space generated by - deep learning. We report on the diffractive prototyping and iterative crafting - of three such soundwalks through/out deep latent spaces, using nn~ and New Atlantis - as computational platforms for AI audio processing and virtual world experimentation. - We share critical perspectives emerging from our latent soundwalking practice, - with the hope that they contribute to ongoing community-wide reflections toward - new AI for musical expression.' - address: 'Mexico City, Mexico' - articleno: 33 - author: Hugo Scurto and Ludmila Postel - bibtex: "@article{nime2023_33,\n abstract = {This paper relates an early art-research\ - \ collaboration between two practitioners in machine learning and virtual worlds\ - \ toward new embodied musical experiences of Artificial Intelligence (AI). Instead\ - \ of a digital music instrument or a music-generating agent, we propose to craft\ - \ a soundwalk experience where a human person moves through a three-dimensional\ - \ virtual world to explore a latent sound space generated by deep learning. We\ - \ report on the diffractive prototyping and iterative crafting of three such soundwalks\ - \ through/out deep latent spaces, using nn~ and New Atlantis as computational\ - \ platforms for AI audio processing and virtual world experimentation. We share\ - \ critical perspectives emerging from our latent soundwalking practice, with the\ - \ hope that they contribute to ongoing community-wide reflections toward new AI\ - \ for musical expression.},\n address = {Mexico City, Mexico},\n articleno = {33},\n\ - \ author = {Hugo Scurto and Ludmila Postel},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n editor\ - \ = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n\ - \ numpages = {4},\n pages = {232--235},\n title = {Soundwalking Deep Latent Spaces},\n\ - \ track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_33.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: dschwarz12014 + abstract: 'We propose ways of enriching the timbral potential of gestural sonic + material captured via piezo or contact microphones, through latency-free convolution + of the microphone signal with grains from a sound corpus. This creates a new way + to combine the sonic richness of large sound corpora, easily accessible via navigation + through a timbral descriptor space, with the intuitive gestural interaction with + a surface, captured by any contact microphone. We use convolution to excite the + grains from the corpus via the microphone input, capturing the contact interaction + sounds, which allows articulation of the corpus by hitting, scratching, or strumming + a surface with various parts of the hands or objects. We also show how changes + of grains have to be carefully handled, how one can smoothly interpolate between + neighbouring grains, and finally evaluate the system against previous attempts.' + address: 'London, United Kingdom' + author: Diemo Schwarz and Pierre Alexandre Tremblay and Alex Harker + bibtex: "@inproceedings{dschwarz12014,\n abstract = {We propose ways of enriching\ + \ the timbral potential of gestural sonic material captured via piezo or contact\ + \ microphones, through latency-free convolution of the microphone signal with\ + \ grains from a sound corpus. This creates a new way to combine the sonic richness\ + \ of large sound corpora, easily accessible via navigation through a timbral descriptor\ + \ space, with the intuitive gestural interaction with a surface, captured by any\ + \ contact microphone. We use convolution to excite the grains from the corpus\ + \ via the microphone input, capturing the contact interaction sounds, which allows\ + \ articulation of the corpus by hitting, scratching, or strumming a surface with\ + \ various parts of the hands or objects. We also show how changes of grains have\ + \ to be carefully handled, how one can smoothly interpolate between neighbouring\ + \ grains, and finally evaluate the system against previous attempts.},\n address\ + \ = {London, United Kingdom},\n author = {Diemo Schwarz and Pierre Alexandre Tremblay\ + \ and Alex Harker},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178935},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {247--250},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Rich Contacts: Corpus-Based Convolution of\ + \ Contact Interaction Sound for Enhanced Musical Expression},\n url = {http://www.nime.org/proceedings/2014/nime2014_451.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178935 issn: 2220-4806 - month: May - numpages: 4 - pages: 232--235 - title: Soundwalking Deep Latent Spaces - track: Papers - url: http://nime.org/proceedings/2023/nime2023_33.pdf - year: 2023 + month: June + pages: 247--250 + publisher: 'Goldsmiths, University of London' + title: 'Rich Contacts: Corpus-Based Convolution of Contact Interaction Sound for + Enhanced Musical Expression' + url: http://www.nime.org/proceedings/2014/nime2014_451.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_34 - abstract: 'Sampled drums can be used as an affordable way of creating human-like - drum tracks, or perhaps more interestingly, can be used as a mean of experimentation - with rhythm and groove. Similarly, AI-based drum generation tools can focus on - creating human-like drum patterns, or alternatively, focus on providing producers/musicians - with means of experimentation with rhythm. In this work, we aimed to explore the - latter approach. To this end, we present a suite of Transformer-based models aimed - at completing audio drum loops with stylistically consistent symbolic drum events. - Our proposed models rely on a reduced spectral representation of the drum loop, - striking a balance between a raw audio recording and an exact symbolic transcription. - Using a number of objective evaluations, we explore the validity of our approach - and identify several challenges that need to be further studied in future iterations - of this work. Lastly, we provide a real-time VST plugin that allows musicians/producers - to utilize the models in real-time production settings.' - address: 'Mexico City, Mexico' - articleno: 34 - author: Behzad Haki and Teresa Pelinski and Marina Nieto Giménez and Sergi Jordà - bibtex: "@article{nime2023_34,\n abstract = {Sampled drums can be used as an affordable\ - \ way of creating human-like drum tracks, or perhaps more interestingly, can be\ - \ used as a mean of experimentation with rhythm and groove. Similarly, AI-based\ - \ drum generation tools can focus on creating human-like drum patterns, or alternatively,\ - \ focus on providing producers/musicians with means of experimentation with rhythm.\ - \ In this work, we aimed to explore the latter approach. To this end, we present\ - \ a suite of Transformer-based models aimed at completing audio drum loops with\ - \ stylistically consistent symbolic drum events. Our proposed models rely on a\ - \ reduced spectral representation of the drum loop, striking a balance between\ - \ a raw audio recording and an exact symbolic transcription. Using a number of\ - \ objective evaluations, we explore the validity of our approach and identify\ - \ several challenges that need to be further studied in future iterations of this\ - \ work. Lastly, we provide a real-time VST plugin that allows musicians/producers\ - \ to utilize the models in real-time production settings.},\n address = {Mexico\ - \ City, Mexico},\n articleno = {34},\n author = {Behzad Haki and Teresa Pelinski\ - \ and Marina Nieto Giménez and Sergi Jordà},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n editor\ - \ = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n\ - \ numpages = {8},\n pages = {236--243},\n title = {Completing Audio Drum Loops\ - \ with Symbolic Drum Suggestions},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_34.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: fvisi2014 + abstract: 'This work describes a new approach to gesture mapping in a performance + with a traditional musical instrument and live electronics based upon theories + of embodied music cognition (EMC) and musical gestures. Considerations on EMC + and how gestures affect the experience of music inform different mapping strategies. + Our intent is to enhance the expressiveness and the liveness of performance by + tracking gestures via a multimodal motion capture system and to use motion data + to control several features of the music. After a review of recent research in + the field, a proposed application of such theories to a performance with electric + guitar and live electronics will follow, focusing both on aspects of meaning formation + and motion capturing.' + address: 'London, United Kingdom' + author: Federico Visi and Rodrigo Schramm and Eduardo Miranda + bibtex: "@inproceedings{fvisi2014,\n abstract = {This work describes a new approach\ + \ to gesture mapping in a performance with a traditional musical instrument and\ + \ live electronics based upon theories of embodied music cognition (EMC) and musical\ + \ gestures. Considerations on EMC and how gestures affect the experience of music\ + \ inform different mapping strategies. Our intent is to enhance the expressiveness\ + \ and the liveness of performance by tracking gestures via a multimodal motion\ + \ capture system and to use motion data to control several features of the music.\ + \ After a review of recent research in the field, a proposed application of such\ + \ theories to a performance with electric guitar and live electronics will follow,\ + \ focusing both on aspects of meaning formation and motion capturing.},\n address\ + \ = {London, United Kingdom},\n author = {Federico Visi and Rodrigo Schramm and\ + \ Eduardo Miranda},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178973},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {601--604},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Use of Body Motion to Enhance Traditional\ + \ Musical Instruments},\n url = {http://www.nime.org/proceedings/2014/nime2014_460.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178973 issn: 2220-4806 - month: May - numpages: 8 - pages: 236--243 - title: Completing Audio Drum Loops with Symbolic Drum Suggestions - track: Papers - url: http://nime.org/proceedings/2023/nime2023_34.pdf - year: 2023 + month: June + pages: 601--604 + publisher: 'Goldsmiths, University of London' + title: Use of Body Motion to Enhance Traditional Musical Instruments + url: http://www.nime.org/proceedings/2014/nime2014_460.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_35 - abstract: "People have always used new technology to experiment with new forms of\ - \ music creation. However, the latest devel- opments in artificial intelligence\ - \ (AI) suggest that machines are on the verge of becoming more than mere tools—they\ - \ can also be co-creators. In this article, we follow four mu- sicians in the\ - \ project Co-Creative Spaces through a six- month long collaborative process,\ - \ where they created music through improvising with each other and with computer-\ - \ based imitations of themselves. These musical agents were trained through machine\ - \ learning to generate output in the style of the musicians. What happens to musical\ - \ co-creation when AI is included in the creative cycle? The musicians are from\ - \ Norway and Kenya—two countries with fundamen- tally different musical traditions.\ - \ How is the collaboration affected by cultural biases inherent in the technology,\ - \ and in the musicians themselves?\nThese questions were examined through focus\ - \ groups as part of two five-day workshops. An analysis shows how the musicians\ - \ moved between an understanding of machine as tool and machine as co-creator,\ - \ and between the idea of music as object and music as process. These different\ - \ interpretative repertoires were used interchangeably and paint a complex picture\ - \ of what it is like being in the intersection between different musical and cultural\ - \ paradigms." - address: 'Mexico City, Mexico' - articleno: 35 - author: Notto J. W. Thelle and Bernt Isak Wærstad - bibtex: "@article{nime2023_35,\n abstract = {People have always used new technology\ - \ to experiment with new forms of music creation. However, the latest devel- opments\ - \ in artificial intelligence (AI) suggest that machines are on the verge of becoming\ - \ more than mere tools—they can also be co-creators. In this article, we follow\ - \ four mu- sicians in the project Co-Creative Spaces through a six- month long\ - \ collaborative process, where they created music through improvising with each\ - \ other and with computer- based imitations of themselves. These musical agents\ - \ were trained through machine learning to generate output in the style of the\ - \ musicians. What happens to musical co-creation when AI is included in the creative\ - \ cycle? The musicians are from Norway and Kenya—two countries with fundamen-\ - \ tally different musical traditions. How is the collaboration affected by cultural\ - \ biases inherent in the technology, and in the musicians themselves?\nThese questions\ - \ were examined through focus groups as part of two five-day workshops. An analysis\ - \ shows how the musicians moved between an understanding of machine as tool and\ - \ machine as co-creator, and between the idea of music as object and music as\ - \ process. These different interpretative repertoires were used interchangeably\ - \ and paint a complex picture of what it is like being in the intersection between\ - \ different musical and cultural paradigms.},\n address = {Mexico City, Mexico},\n\ - \ articleno = {35},\n author = {Notto J. W. Thelle and Bernt Isak Wærstad},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn\ - \ = {2220-4806},\n month = {May},\n numpages = {7},\n pages = {244--250},\n title\ - \ = {Co-Creatives Spaces: The machine as a collaborator},\n track = {Papers},\n\ - \ url = {http://nime.org/proceedings/2023/nime2023_35.pdf},\n year = {2023}\n\ - }\n" +- ENTRYTYPE: inproceedings + ID: jjeon2014 + abstract: 'This paper presents an enhanced sonic data communication method using + TAPIR (Theoretically Audible, but Practically Inaudible Range: frequencies above + 18kHz) sound and a software toolkit as its implementation. Using inaudible sound + as a data medium, a digital data network among the audience and performer can + be easily built with microphones and speakers, without requiring any additional + hardware. ``TAPIR Sound Tag'''' is a smart device framework for inaudible data + communication that can be easily embedded in audience participatory performances + and interactive arts. With a bandwidth of 900 Hz, a high transmission rate of + 200 bps can be achieved, enabling peer-to-peer or broadcasting real-time data + communication among smart devices. This system can be used without any advanced + knowledge in signal processing and communication system theory; simply specifying + carrier frequency and bandwidth with a few lines of code can start data communication. + Several usage scenarios of the system are also presented, such as participating + in an interactive performance by adding and controlling sound, and collaborative + completion of an artist''s work by audience. We expect this framework to provide + a new way of audience interaction to artists, as well as further promoting audience + participation by simplifying the process: using personal smart devices as a medium + and not requiring additional hardware or complex settings.' + address: 'London, United Kingdom' + author: Jimin Jeon and Gunho Chae and Edward Jangwon Lee and Woon Seung Yeo + bibtex: "@inproceedings{jjeon2014,\n abstract = {This paper presents an enhanced\ + \ sonic data communication method using TAPIR (Theoretically Audible, but Practically\ + \ Inaudible Range: frequencies above 18kHz) sound and a software toolkit as its\ + \ implementation. Using inaudible sound as a data medium, a digital data network\ + \ among the audience and performer can be easily built with microphones and speakers,\ + \ without requiring any additional hardware. ``TAPIR Sound Tag'' is a smart device\ + \ framework for inaudible data communication that can be easily embedded in audience\ + \ participatory performances and interactive arts. With a bandwidth of 900 Hz,\ + \ a high transmission rate of 200 bps can be achieved, enabling peer-to-peer or\ + \ broadcasting real-time data communication among smart devices. This system can\ + \ be used without any advanced knowledge in signal processing and communication\ + \ system theory; simply specifying carrier frequency and bandwidth with a few\ + \ lines of code can start data communication. Several usage scenarios of the system\ + \ are also presented, such as participating in an interactive performance by adding\ + \ and controlling sound, and collaborative completion of an artist's work by audience.\ + \ We expect this framework to provide a new way of audience interaction to artists,\ + \ as well as further promoting audience participation by simplifying the process:\ + \ using personal smart devices as a medium and not requiring additional hardware\ + \ or complex settings.},\n address = {London, United Kingdom},\n author = {Jimin\ + \ Jeon and Gunho Chae and Edward Jangwon Lee and Woon Seung Yeo},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1178818},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {367--370},\n publisher = {Goldsmiths, University of London},\n\ + \ title = {TAPIR Sound Tag: An Enhanced Sonic Communication Framework for Audience\ + \ Participatory Performance},\n url = {http://www.nime.org/proceedings/2014/nime2014_461.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178818 issn: 2220-4806 - month: May - numpages: 7 - pages: 244--250 - title: 'Co-Creatives Spaces: The machine as a collaborator' - track: Papers - url: http://nime.org/proceedings/2023/nime2023_35.pdf - year: 2023 + month: June + pages: 367--370 + publisher: 'Goldsmiths, University of London' + title: 'TAPIR Sound Tag: An Enhanced Sonic Communication Framework for Audience + Participatory Performance' + url: http://www.nime.org/proceedings/2014/nime2014_461.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_36 - abstract: 'Most musical instrument augmentations aim to only fit one specific instrument - and depend on an external sound system to work as intended. In a more acoustic - concert setting this often alienates the electronic sound component. The FLAPIBox - is an integrated solution that fits most acoustic instruments and use its own - resonance for playing electronic sound in a more organic way—through the instrument - itself. Reviewing related works and exploring different hardware and software - components, a modular prototype has been built. The results of this preliminary - study make the body of planning and building the first integrated breadboard prototype. - Because of its flexible design, the FLAPIBox can use several different microphone, - and loudspeaker technologies. Using inexpensive components and developing open-source - software, the FLAPIBox is both affordable and accessible. The development of the - FLAPIBox aim to result in a stable and predictable platform, yet open and versatile - enough for further development.' - address: 'Mexico City, Mexico' - articleno: 36 - author: Erik Stifjell - bibtex: "@article{nime2023_36,\n abstract = {Most musical instrument augmentations\ - \ aim to only fit one specific instrument and depend on an external sound system\ - \ to work as intended. In a more acoustic concert setting this often alienates\ - \ the electronic sound component. The FLAPIBox is an integrated solution that\ - \ fits most acoustic instruments and use its own resonance for playing electronic\ - \ sound in a more organic way—through the instrument itself. Reviewing related\ - \ works and exploring different hardware and software components, a modular prototype\ - \ has been built. The results of this preliminary study make the body of planning\ - \ and building the first integrated breadboard prototype. Because of its flexible\ - \ design, the FLAPIBox can use several different microphone, and loudspeaker technologies.\ - \ Using inexpensive components and developing open-source software, the FLAPIBox\ - \ is both affordable and accessible. The development of the FLAPIBox aim to result\ - \ in a stable and predictable platform, yet open and versatile enough for further\ - \ development.},\n address = {Mexico City, Mexico},\n articleno = {36},\n author\ - \ = {Erik Stifjell},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan\ - \ Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages = {5},\n\ - \ pages = {251--255},\n title = {A FLexible musical instrument Augmentation that\ - \ is Programmable, Integrated in a Box (FLAPIBox)},\n track = {Papers},\n url\ - \ = {http://nime.org/proceedings/2023/nime2023_36.pdf},\n year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: asarasua2014 + abstract: 'Many musical interfaces have used the musical conductor metaphor, allowing + users to control the expressive aspects of a performance by imitating the gestures + of conductors. In most of them, the rules to control these expressive aspects + are predefined and users have to adapt to them. Other works have studied conductors'' + gestures in relation to the performance of the orchestra. The goal of this study + is to analyze, following the path initiated by this latter kind of works, how + simple motion capture descriptors can explain the relationship between the loudness + of a given performance and the way in which different subjects move when asked + to impersonate the conductor of that performance. Twenty-five subjects were asked + to impersonate the conductor of three classical music fragments while listening + to them. The results of different linear regression models with motion capture + descriptors as explanatory variables show that, by studying how descriptors correlate + to loudness differently among subjects, different tendencies can be found and + exploited to design models that better adjust to their expectations.' + address: 'London, United Kingdom' + author: Alvaro Sarasúa and Enric Guaus + bibtex: "@inproceedings{asarasua2014,\n abstract = {Many musical interfaces have\ + \ used the musical conductor metaphor, allowing users to control the expressive\ + \ aspects of a performance by imitating the gestures of conductors. In most of\ + \ them, the rules to control these expressive aspects are predefined and users\ + \ have to adapt to them. Other works have studied conductors' gestures in relation\ + \ to the performance of the orchestra. The goal of this study is to analyze, following\ + \ the path initiated by this latter kind of works, how simple motion capture descriptors\ + \ can explain the relationship between the loudness of a given performance and\ + \ the way in which different subjects move when asked to impersonate the conductor\ + \ of that performance. Twenty-five subjects were asked to impersonate the conductor\ + \ of three classical music fragments while listening to them. The results of different\ + \ linear regression models with motion capture descriptors as explanatory variables\ + \ show that, by studying how descriptors correlate to loudness differently among\ + \ subjects, different tendencies can be found and exploited to design models that\ + \ better adjust to their expectations.},\n address = {London, United Kingdom},\n\ + \ author = {Alvaro Saras\\'ua and Enric Guaus},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1178929},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {195--200},\n publisher = {Goldsmiths, University of London},\n title = {Dynamics\ + \ in Music Conducting: A Computational Comparative Study Among Subjects},\n url\ + \ = {http://www.nime.org/proceedings/2014/nime2014_464.pdf},\n year = {2014}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178929 issn: 2220-4806 - month: May - numpages: 5 - pages: 251--255 - title: 'A FLexible musical instrument Augmentation that is Programmable, Integrated - in a Box (FLAPIBox)' - track: Papers - url: http://nime.org/proceedings/2023/nime2023_36.pdf - year: 2023 + month: June + pages: 195--200 + publisher: 'Goldsmiths, University of London' + title: 'Dynamics in Music Conducting: A Computational Comparative Study Among Subjects' + url: http://www.nime.org/proceedings/2014/nime2014_464.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_37 - abstract: "LiveLily is an open-source system for live sequencing and live scoring\ - \ through live coding in a subset of the Lilypond language. It is written in openFrameworks\ - \ and consists of four distinct parts, the text editor, the language parser, the\ - \ sequencer, and the music score. It supports the MIDI and OSC protocols to communicate\ - \ the sequencer with other software or hardware, as LiveLily does not produce\ - \ any sound. It can be combined with audio synthesis software that supports OSC,\ - \ like Pure Data, SuperCollider, and others, or hardware synthesizers that support\ - \ MIDI. This way, the users can create their sounds in another, audio-complete\ - \ framework or device, and use LiveLily to control their music.\nLiveLily can\ - \ also be used as a live scoring system to write music scores for acoustic instruments\ - \ live. This feature can be combined with its live sequencing capabilities, so\ - \ acoustic instruments can be combined with live electronics. Both live scoring\ - \ and live sequencing in LiveLily provide expressiveness to a great extent, as\ - \ many musical gestures can be included either in the score or the sequencer.\ - \ Such gestures include dynamics, articulation, and arbitrary text that can be\ - \ interpreted in any desired way, much like the way Western-music notation scores\ - \ are written." - address: 'Mexico City, Mexico' - articleno: 37 - author: Alexandros Drymonitis - bibtex: "@article{nime2023_37,\n abstract = {LiveLily is an open-source system for\ - \ live sequencing and live scoring through live coding in a subset of the Lilypond\ - \ language. It is written in openFrameworks and consists of four distinct parts,\ - \ the text editor, the language parser, the sequencer, and the music score. It\ - \ supports the MIDI and OSC protocols to communicate the sequencer with other\ - \ software or hardware, as LiveLily does not produce any sound. It can be combined\ - \ with audio synthesis software that supports OSC, like Pure Data, SuperCollider,\ - \ and others, or hardware synthesizers that support MIDI. This way, the users\ - \ can create their sounds in another, audio-complete framework or device, and\ - \ use LiveLily to control their music.\nLiveLily can also be used as a live scoring\ - \ system to write music scores for acoustic instruments live. This feature can\ - \ be combined with its live sequencing capabilities, so acoustic instruments can\ - \ be combined with live electronics. Both live scoring and live sequencing in\ - \ LiveLily provide expressiveness to a great extent, as many musical gestures\ - \ can be included either in the score or the sequencer. Such gestures include\ - \ dynamics, articulation, and arbitrary text that can be interpreted in any desired\ - \ way, much like the way Western-music notation scores are written.},\n address\ - \ = {Mexico City, Mexico},\n articleno = {37},\n author = {Alexandros Drymonitis},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn\ - \ = {2220-4806},\n month = {May},\n numpages = {6},\n pages = {256--261},\n title\ - \ = {LiveLily: An Expressive Live Sequencing and Live Scoring System Through Live\ - \ Coding With the Lilypond Language},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_37.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: kkeatch2014 + abstract: 'Sounds of Solitaire is a novel interface for musical expression based + on an extended peg solitaire board as a generator of live musical composition. + The classic puzzle game, for one person, is extended by mapping the moves of the + game through a self contained system using Arduino and Raspberry Pi, triggering + both analogue and digital sound. The solitaire board, as instrument, is presented + as a wood and Perspex box with the hardware inside. Ball bearings function as + both solitaire pegs and switches, while a purpose built solenoid controlled monochord + and ball bearing run provide the analogue sound source, which is digitally manipulated + in real-time, according to the sequences of game moves. The creative intention + of Sounds of Solitaire is that the playful approach to participation in a musical + experience, provided by the material for music making in real-time, demonstrates + an integrated approach to concepts of composing, performing and listening.' + address: 'London, United Kingdom' + author: Kirsty Keatch + bibtex: "@inproceedings{kkeatch2014,\n abstract = {Sounds of Solitaire is a novel\ + \ interface for musical expression based on an extended peg solitaire board as\ + \ a generator of live musical composition. The classic puzzle game, for one person,\ + \ is extended by mapping the moves of the game through a self contained system\ + \ using Arduino and Raspberry Pi, triggering both analogue and digital sound.\ + \ The solitaire board, as instrument, is presented as a wood and Perspex box with\ + \ the hardware inside. Ball bearings function as both solitaire pegs and switches,\ + \ while a purpose built solenoid controlled monochord and ball bearing run provide\ + \ the analogue sound source, which is digitally manipulated in real-time, according\ + \ to the sequences of game moves. The creative intention of Sounds of Solitaire\ + \ is that the playful approach to participation in a musical experience, provided\ + \ by the material for music making in real-time, demonstrates an integrated approach\ + \ to concepts of composing, performing and listening.},\n address = {London, United\ + \ Kingdom},\n author = {Kirsty Keatch},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178827},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {102--105},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {An Exploration of Peg Solitaire as a Compositional\ + \ Tool},\n url = {http://www.nime.org/proceedings/2014/nime2014_466.pdf},\n year\ + \ = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178827 issn: 2220-4806 - month: May - numpages: 6 - pages: 256--261 - title: 'LiveLily: An Expressive Live Sequencing and Live Scoring System Through - Live Coding With the Lilypond Language' - track: Papers - url: http://nime.org/proceedings/2023/nime2023_37.pdf - year: 2023 + month: June + pages: 102--105 + publisher: 'Goldsmiths, University of London' + title: An Exploration of Peg Solitaire as a Compositional Tool + url: http://www.nime.org/proceedings/2014/nime2014_466.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_38 - abstract: 'ZRob is a robotic system designed for playing a snare drum. The robot - is constructed with a passive flexible spring-based joint inspired by the human - hand. This paper describes a study exploring rhythmic patterns by exploiting the - chaotic dynamics of two ZRobs. In the experiment, we explored the control configurations - of each arm by trying to create unpredictable patterns. Over 200 samples have - been recorded and analyzed. We show how the chaotic dynamics of ZRob can be used - for creating new drumming patterns.' - address: 'Mexico City, Mexico' - articleno: 38 - author: Seyed Mojtaba Karbasi and Alexander Refsum Jensenius and Rolf Inge Godøy - and Jim Torresen - bibtex: "@article{nime2023_38,\n abstract = {ZRob is a robotic system designed for\ - \ playing a snare drum. The robot is constructed with a passive flexible spring-based\ - \ joint inspired by the human hand. This paper describes a study exploring rhythmic\ - \ patterns by exploiting the chaotic dynamics of two ZRobs. In the experiment,\ - \ we explored the control configurations of each arm by trying to create unpredictable\ - \ patterns. Over 200 samples have been recorded and analyzed. We show how the\ - \ chaotic dynamics of ZRob can be used for creating new drumming patterns.},\n\ - \ address = {Mexico City, Mexico},\n articleno = {38},\n author = {Seyed Mojtaba\ - \ Karbasi and Alexander Refsum Jensenius and Rolf Inge Godøy and Jim Torresen},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn\ - \ = {2220-4806},\n month = {May},\n numpages = {6},\n pages = {262--267},\n title\ - \ = {Exploring Emerging Drumming Patterns in a Chaotic Dynamical System using\ - \ ZRob},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_38.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: xxiao2014 + abstract: 'We present Andante, a representation of music as animated characters + walking along the piano keyboard that appear to play the physical keys with each + step. Based on a view of music pedagogy that emphasizes expressive, full-body + communication early in the learning process, Andante promotes an understanding + of the music rooted in the body, taking advantage of walking as one of the most + fundamental human rhythms. We describe three example visualizations on a preliminary + prototype as well as applications extending our examples for practice feedback, + improvisation and composition. Through our project, we reflect on some high level + considerations for the NIME community.' + address: 'London, United Kingdom' + author: Xiao Xiao and Basheer Tome and Hiroshi Ishii + bibtex: "@inproceedings{xxiao2014,\n abstract = {We present Andante, a representation\ + \ of music as animated characters walking along the piano keyboard that appear\ + \ to play the physical keys with each step. Based on a view of music pedagogy\ + \ that emphasizes expressive, full-body communication early in the learning process,\ + \ Andante promotes an understanding of the music rooted in the body, taking advantage\ + \ of walking as one of the most fundamental human rhythms. We describe three example\ + \ visualizations on a preliminary prototype as well as applications extending\ + \ our examples for practice feedback, improvisation and composition. Through our\ + \ project, we reflect on some high level considerations for the NIME community.},\n\ + \ address = {London, United Kingdom},\n author = {Xiao Xiao and Basheer Tome and\ + \ Hiroshi Ishii},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178987},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {629--632},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Andante: Walking Figures on the Piano Keyboard\ + \ to Visualize Musical Motion},\n url = {http://www.nime.org/proceedings/2014/nime2014_467.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178987 issn: 2220-4806 - month: May - numpages: 6 - pages: 262--267 - title: Exploring Emerging Drumming Patterns in a Chaotic Dynamical System using - ZRob - track: Papers - url: http://nime.org/proceedings/2023/nime2023_38.pdf - year: 2023 + month: June + pages: 629--632 + publisher: 'Goldsmiths, University of London' + title: 'Andante: Walking Figures on the Piano Keyboard to Visualize Musical Motion' + url: http://www.nime.org/proceedings/2014/nime2014_467.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_39 - abstract: 'The eTud,be framework adapts existing improvising musical agents (MA) - for performance with an augmented instrument called the eTube. This instrument - has been developed with deliberate musical and technological limitations including - a simple two-button controller and restricted pitch capacity. We will present - case studies which outline our research-creation framework for mapping the eTube - controller, developing corpora for the MAs, and testing interactive and machine - listening settings which will also be demonstrated by performance examples. A - general summary of the MAs will be followed by specific descriptions of the features - we have utilised in our work, and finally a comparison of the MAs based on these - features. Few papers discuss the process for learning to work with and adapt existing - MAs and we will finish by describing challenges experienced as other users with - these technologies.' - address: 'Mexico City, Mexico' - articleno: 39 - author: Tommy Davis and Kasey LV Pocius and Vincent Cusson and Marcelo Wanderley - and Philippe Pasquier - bibtex: "@article{nime2023_39,\n abstract = {The eTu{d,b}e framework adapts existing\ - \ improvising musical agents (MA) for performance with an augmented instrument\ - \ called the eTube. This instrument has been developed with deliberate musical\ - \ and technological limitations including a simple two-button controller and restricted\ - \ pitch capacity. We will present case studies which outline our research-creation\ - \ framework for mapping the eTube controller, developing corpora for the MAs,\ - \ and testing interactive and machine listening settings which will also be demonstrated\ - \ by performance examples. A general summary of the MAs will be followed by specific\ - \ descriptions of the features we have utilised in our work, and finally a comparison\ - \ of the MAs based on these features. Few papers discuss the process for learning\ - \ to work with and adapt existing MAs and we will finish by describing challenges\ - \ experienced as other users with these technologies.},\n address = {Mexico City,\ - \ Mexico},\n articleno = {39},\n author = {Tommy Davis and Kasey LV Pocius and\ - \ Vincent Cusson and Marcelo Wanderley and Philippe Pasquier},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month\ - \ = {May},\n numpages = {9},\n pages = {268--276},\n title = {eTu{d,b}e: case\ - \ studies in playing with musical agents},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_39.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: avantklooster2014 + abstract: Emotion is a complex topic much studied in music and arguably equally + central to the visual arts where this is usually referred to with the overarching + label of aesthetics. This paper explores how music and the arts have incorporated + the study of emotion. We then introduce the development of a live audio visual + interface entitled In A State that detects emotion from live audio (in this case + a piano performance) and generates visuals and electro acoustic music in response. + address: 'London, United Kingdom' + author: Adinda van 't Klooster and Nick Collins + bibtex: "@inproceedings{avantklooster2014,\n abstract = {Emotion is a complex topic\ + \ much studied in music and arguably equally central to the visual arts where\ + \ this is usually referred to with the overarching label of aesthetics. This paper\ + \ explores how music and the arts have incorporated the study of emotion. We then\ + \ introduce the development of a live audio visual interface entitled In A State\ + \ that detects emotion from live audio (in this case a piano performance) and\ + \ generates visuals and electro acoustic music in response.},\n address = {London,\ + \ United Kingdom},\n author = {Adinda van 't Klooster and Nick Collins},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1178837},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {545--548},\n publisher = {Goldsmiths, University of London},\n\ + \ title = {In A State: Live Emotion Detection and Visualisation for Music Performance},\n\ + \ url = {http://www.nime.org/proceedings/2014/nime2014_469.pdf},\n year = {2014}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178837 issn: 2220-4806 - month: May - numpages: 9 - pages: 268--276 - title: 'eTud,be: case studies in playing with musical agents' - track: Papers - url: http://nime.org/proceedings/2023/nime2023_39.pdf - year: 2023 + month: June + pages: 545--548 + publisher: 'Goldsmiths, University of London' + title: 'In A State: Live Emotion Detection and Visualisation for Music Performance' + url: http://www.nime.org/proceedings/2014/nime2014_469.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_40 - abstract: "This paper presents an affordable and accessible wearable technology\ - \ for wind musicians which provides real-time biofeedback on their breathing.\ - \ We developed the abdominal thoracic expansion measurement prototype wearable\ - \ technology (ATEM-P), to measure a wind musician’s breathing-induced expansion\ - \ and contraction while they are playing.\nOur first study validates the ATEM-P\ - \ with the gold standard of medical grade respiratory exertion measurement devices,\ - \ the respiratory plethysmography inductance system (RIP). The results show that\ - \ the ATEM-P has a strong correlation to the RIP system.\nOur second study provides\ - \ quantitative and qualitative data about the correlation between a musician’s\ - \ breathing technique and the quality of their performance. We expected the results\ - \ to show a correlation between the ATEM-P peak amplitudes and the quality of\ - \ performance, i.e. better breathing-induced expansion leads to better quality\ - \ of performance, however this was not the case. The results did show that there\ - \ is a correlation between a musician’s quality of performance and breath period.\n\ - Results from the studies show that the ATEM-P has potential as an affordable and\ - \ accessible wearable technology for wind musicians: a performance enhancement\ - \ tool and an educational tool." - address: 'Mexico City, Mexico' - articleno: 40 - author: Lucie F Jones and Jeffrey Boyd and Jeremy Brown and Hua Shen - bibtex: "@article{nime2023_40,\n abstract = {This paper presents an affordable and\ - \ accessible wearable technology for wind musicians which provides real-time biofeedback\ - \ on their breathing. We developed the abdominal thoracic expansion measurement\ - \ prototype wearable technology (ATEM-P), to measure a wind musician’s breathing-induced\ - \ expansion and contraction while they are playing.\nOur first study validates\ - \ the ATEM-P with the gold standard of medical grade respiratory exertion measurement\ - \ devices, the respiratory plethysmography inductance system (RIP). The results\ - \ show that the ATEM-P has a strong correlation to the RIP system.\nOur second\ - \ study provides quantitative and qualitative data about the correlation between\ - \ a musician’s breathing technique and the quality of their performance. We expected\ - \ the results to show a correlation between the ATEM-P peak amplitudes and the\ - \ quality of performance, i.e. better breathing-induced expansion leads to better\ - \ quality of performance, however this was not the case. The results did show\ - \ that there is a correlation between a musician’s quality of performance and\ - \ breath period.\nResults from the studies show that the ATEM-P has potential\ - \ as an affordable and accessible wearable technology for wind musicians: a performance\ - \ enhancement tool and an educational tool.},\n address = {Mexico City, Mexico},\n\ - \ articleno = {40},\n author = {Lucie F Jones and Jeffrey Boyd and Jeremy Brown\ - \ and Hua Shen},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n\ - \ issn = {2220-4806},\n month = {May},\n numpages = {11},\n pages = {277--287},\n\ - \ title = {A Wearable Technology For Wind Musicians: Does It Matter How You Breathe?},\n\ - \ track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_40.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: doverholt2014 + abstract: 'We introduce and describe the initial evaluation of a new low-cost augmented + violin prototype, with research focused on the user experience when playing such + hybrid physical-digital instruments, and the exploration of novel interactive + performance techniques. Another goal of this work is wider platform accessibility + for players, via a simple `do-it-yourself'' approach described by the design herein. + While the hardware and software elements are open source, the build process can + nonetheless require non-insignificant investments of time and money, as well as + basic electronics construction skills. These have been kept to a minimum wherever + possible. Our initial prototype is based upon an inexpensive electric violin that + is widely available online for approximately $200 USD. This serves as the starting + point for construction, to which the design adds local Digital Signal Processing + (DSP), gestural sensing, and sound output. Real-time DSP algorithms are running + on a mobile device, which also incorporates orientation/gesture sensors for parameter + mapping, with the resulting sound amplified and rendered via small loudspeakers + mounted on the instrument. The platform combines all necessary elements for digitally-mediated + interactive performance; the need for a traditional computer only arises when + developing new DSP algorithms for the platform. An initial exploratory evaluation + with users is presented, in which performers explore different possibilities with + the proposed platform (various DSP implementations, mapping schemes, physical + setups, etc.) in order to better establish the needs of the performing artist. + Based on these results, future work is outlined leading towards the development + of a complete quartet of instruments.' + address: 'London, United Kingdom' + author: Dan Overholt and Steven Gelineck + bibtex: "@inproceedings{doverholt2014,\n abstract = {We introduce and describe the\ + \ initial evaluation of a new low-cost augmented violin prototype, with research\ + \ focused on the user experience when playing such hybrid physical-digital instruments,\ + \ and the exploration of novel interactive performance techniques. Another goal\ + \ of this work is wider platform accessibility for players, via a simple `do-it-yourself'\ + \ approach described by the design herein. While the hardware and software elements\ + \ are open source, the build process can nonetheless require non-insignificant\ + \ investments of time and money, as well as basic electronics construction skills.\ + \ These have been kept to a minimum wherever possible. Our initial prototype is\ + \ based upon an inexpensive electric violin that is widely available online for\ + \ approximately $200 USD. This serves as the starting point for construction,\ + \ to which the design adds local Digital Signal Processing (DSP), gestural sensing,\ + \ and sound output. Real-time DSP algorithms are running on a mobile device, which\ + \ also incorporates orientation/gesture sensors for parameter mapping, with the\ + \ resulting sound amplified and rendered via small loudspeakers mounted on the\ + \ instrument. The platform combines all necessary elements for digitally-mediated\ + \ interactive performance; the need for a traditional computer only arises when\ + \ developing new DSP algorithms for the platform. An initial exploratory evaluation\ + \ with users is presented, in which performers explore different possibilities\ + \ with the proposed platform (various DSP implementations, mapping schemes, physical\ + \ setups, etc.) in order to better establish the needs of the performing artist.\ + \ Based on these results, future work is outlined leading towards the development\ + \ of a complete quartet of instruments.},\n address = {London, United Kingdom},\n\ + \ author = {Dan Overholt and Steven Gelineck},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1178897},\n issn = {2220-4806},\n month = {June},\n pages =\ + \ {122--125},\n publisher = {Goldsmiths, University of London},\n title = {Design\ + \ \\& Evaluation of an Accessible Hybrid Violin Platform},\n url = {http://www.nime.org/proceedings/2014/nime2014_470.pdf},\n\ + \ year = {2014}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1178897 + issn: 2220-4806 + month: June + pages: 122--125 + publisher: 'Goldsmiths, University of London' + title: Design & Evaluation of an Accessible Hybrid Violin Platform + url: http://www.nime.org/proceedings/2014/nime2014_470.pdf + year: 2014 + + +- ENTRYTYPE: inproceedings + ID: axambo2014 + abstract: 'Co-located tabletop tangible user interfaces (TUIs) for music performance + are known for promoting multi-player collaboration with a shared interface, yet + it is still unclear how to best support the awareness of the workspace in terms + of understanding individual actions and the other group members actions, in parallel. + In this paper, we investigate the effects of providing auditory feedback using + ambisonics spatialisation, aimed at informing users about the location of the + tangibles on the tabletop surface, with groups of mixed musical backgrounds. Participants + were asked to improvise music on SoundXY4: The Art of Noise, a tabletop system + that includes sound samples inspired by Russolo''s taxonomy of noises. We compared + spatialisation vs. no-spatialisation conditions, and findings suggest that, when + using spatialisation, there was a clearer workspace awareness, and a greater engagement + in the musical activity as an immersive experience.' + address: 'London, United Kingdom' + author: Anna Xambó and Gerard Roma and Robin Laney and Chris Dobbyn and Sergi Jordà + bibtex: "@inproceedings{axambo2014,\n abstract = {Co-located tabletop tangible user\ + \ interfaces (TUIs) for music performance are known for promoting multi-player\ + \ collaboration with a shared interface, yet it is still unclear how to best support\ + \ the awareness of the workspace in terms of understanding individual actions\ + \ and the other group members actions, in parallel. In this paper, we investigate\ + \ the effects of providing auditory feedback using ambisonics spatialisation,\ + \ aimed at informing users about the location of the tangibles on the tabletop\ + \ surface, with groups of mixed musical backgrounds. Participants were asked to\ + \ improvise music on {SoundXY4: The Art of Noise}, a tabletop system that includes\ + \ sound samples inspired by Russolo's taxonomy of noises. We compared spatialisation\ + \ vs. no-spatialisation conditions, and findings suggest that, when using spatialisation,\ + \ there was a clearer workspace awareness, and a greater engagement in the musical\ + \ activity as an immersive experience.},\n address = {London, United Kingdom},\n\ + \ author = {Anna Xamb\\'o and Gerard Roma and Robin Laney and Chris Dobbyn and\ + \ Sergi Jord\\`a},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178985},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {40--45},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {SoundXY4: Supporting Tabletop Collaboration\ + \ and Awareness with Ambisonics Spatialisation},\n url = {http://www.nime.org/proceedings/2014/nime2014_471.pdf},\n\ + \ year = {2014}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1178985 + issn: 2220-4806 + month: June + pages: 40--45 + publisher: 'Goldsmiths, University of London' + title: 'SoundXY4: Supporting Tabletop Collaboration and Awareness with Ambisonics + Spatialisation' + url: http://www.nime.org/proceedings/2014/nime2014_471.pdf + year: 2014 + + +- ENTRYTYPE: inproceedings + ID: smealla2014 + abstract: 'The maturation process of the NIME field has brought a growing interest + in teaching the design and implementation of Digital Music Instruments (DMI) as + well as in finding objective evaluation methods to assess the suitability of these + outcomes. In this paper we propose a methodology for teaching NIME design and + a set of tools meant to inform the design process. This approach has been applied + in a master course focused on the exploration of expressiveness and on the role + of the mapping component in the NIME creation chain, through hands-on and self-reflective + approach based on a restrictive setup consisting of smart-phones and the Pd programming + language. Working Groups were formed, and a 2-step DMI design process was applied, + including 2 performance stages. The evaluation tools assessed both System and + Performance aspects of each project, according to Listeners'' impressions after + each performance. Listeners'' previous music knowledge was also considered. Through + this methodology, students with different backgrounds were able to effectively + engage in the NIME design processes, developing working DMI prototypes according + to the demanded requirements; the assessment tools proved to be consistent for + evaluating NIMEs systems and performances, and the fact of informing the design + processes with the outcome of the evaluation, showed a traceable progress in the + students outcomes.' + address: 'London, United Kingdom' + author: Sergi Jordà and Sebastian Mealla + bibtex: "@inproceedings{smealla2014,\n abstract = {The maturation process of the\ + \ NIME field has brought a growing interest in teaching the design and implementation\ + \ of Digital Music Instruments (DMI) as well as in finding objective evaluation\ + \ methods to assess the suitability of these outcomes. In this paper we propose\ + \ a methodology for teaching NIME design and a set of tools meant to inform the\ + \ design process. This approach has been applied in a master course focused on\ + \ the exploration of expressiveness and on the role of the mapping component in\ + \ the NIME creation chain, through hands-on and self-reflective approach based\ + \ on a restrictive setup consisting of smart-phones and the Pd programming language.\ + \ Working Groups were formed, and a 2-step DMI design process was applied, including\ + \ 2 performance stages. The evaluation tools assessed both System and Performance\ + \ aspects of each project, according to Listeners' impressions after each performance.\ + \ Listeners' previous music knowledge was also considered. Through this methodology,\ + \ students with different backgrounds were able to effectively engage in the NIME\ + \ design processes, developing working DMI prototypes according to the demanded\ + \ requirements; the assessment tools proved to be consistent for evaluating NIMEs\ + \ systems and performances, and the fact of informing the design processes with\ + \ the outcome of the evaluation, showed a traceable progress in the students outcomes.},\n\ + \ address = {London, United Kingdom},\n author = {Sergi Jord\\`a and Sebastian\ + \ Mealla},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1178824},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {233--238},\n publisher = {Goldsmiths, University\ + \ of London},\n title = {A Methodological Framework for Teaching, Evaluating and\ + \ Informing NIME Design with a Focus on Mapping and Expressiveness},\n url = {http://www.nime.org/proceedings/2014/nime2014_472.pdf},\n\ + \ year = {2014}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1178824 + issn: 2220-4806 + month: June + pages: 233--238 + publisher: 'Goldsmiths, University of London' + title: 'A Methodological Framework for Teaching, Evaluating and Informing NIME Design + with a Focus on Mapping and Expressiveness' + url: http://www.nime.org/proceedings/2014/nime2014_472.pdf + year: 2014 + + +- ENTRYTYPE: inproceedings + ID: btaylor2014 + abstract: 'Developing for mobile and multimodal platforms is more important now + than ever, as smartphones and tablets proliferate and mobile device orchestras + become commonplace. We detail NexusUI, a JavaScript framework that enables rapid + prototyping and development of expressive multitouch electronic instrument interfaces + within a web browser. Extensions of this project assist in easily creating dynamic + user interfaces. NexusUI contains several novel encapsulations of creative interface + objects, each accessible with one line of code. NexusUp assists in one-button + duplication of Max interfaces into mobile-friendly web pages that transmit to + Max automatically via Open Sound Control. NexusDrop enables drag-and-drop interface + building and saves interfaces to a central Nexus database. Finally, we provide + an overview of several projects made with NexusUI, including mobile instruments, + art installations, sound diffusion tools, and iOS games, and describe Nexus'' + possibilities as an architecture for our future Mobile App Orchestra.' + address: 'London, United Kingdom' + author: Benjamin Taylor and Jesse Allison and William Conlin and Yemin Oh and Daniel + Holmes + bibtex: "@inproceedings{btaylor2014,\n abstract = {Developing for mobile and multimodal\ + \ platforms is more important now than ever, as smartphones and tablets proliferate\ + \ and mobile device orchestras become commonplace. We detail NexusUI, a JavaScript\ + \ framework that enables rapid prototyping and development of expressive multitouch\ + \ electronic instrument interfaces within a web browser. Extensions of this project\ + \ assist in easily creating dynamic user interfaces. NexusUI contains several\ + \ novel encapsulations of creative interface objects, each accessible with one\ + \ line of code. NexusUp assists in one-button duplication of Max interfaces into\ + \ mobile-friendly web pages that transmit to Max automatically via Open Sound\ + \ Control. NexusDrop enables drag-and-drop interface building and saves interfaces\ + \ to a central Nexus database. Finally, we provide an overview of several projects\ + \ made with NexusUI, including mobile instruments, art installations, sound diffusion\ + \ tools, and iOS games, and describe Nexus' possibilities as an architecture for\ + \ our future Mobile App Orchestra.},\n address = {London, United Kingdom},\n author\ + \ = {Benjamin Taylor and Jesse Allison and William Conlin and Yemin Oh and Daniel\ + \ Holmes},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1178951},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {257--262},\n publisher = {Goldsmiths, University\ + \ of London},\n title = {Simplified Expressive Mobile Development with NexusUI,\ + \ NexusUp, and NexusDrop},\n url = {http://www.nime.org/proceedings/2014/nime2014_480.pdf},\n\ + \ year = {2014}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1178951 + issn: 2220-4806 + month: June + pages: 257--262 + publisher: 'Goldsmiths, University of London' + title: 'Simplified Expressive Mobile Development with NexusUI, NexusUp, and NexusDrop' + url: http://www.nime.org/proceedings/2014/nime2014_480.pdf + year: 2014 + + +- ENTRYTYPE: inproceedings + ID: mfunk2014 + abstract: 'When looking at modern music club settings, especially in the area of + electronic music, music is consumed in a unidirectional way -from DJ or producer + to the audience -with little direct means to influence and participate. In this + paper we challenge this phenomenon and aim for a new bond between the audience + and the DJ through the creation of an interactive dance concept: Experio. Experio + allows for multiple audience participants influencing the musical performance + through dance, facilitated by a musical moderator using a tailored interface. + This co-creation of electronic music on both novice and expert levels is a new + participatory live performance approach, which is evaluated on the basis of thousands + of visitors who interacted with Experio during several international exhibitions.' + address: 'London, United Kingdom' + author: Bastiaan van Hout and Luca Giacolini and Bart Hengeveld and Mathias Funk + and Joep Frens + bibtex: "@inproceedings{mfunk2014,\n abstract = {When looking at modern music club\ + \ settings, especially in the area of electronic music, music is consumed in a\ + \ unidirectional way -from DJ or producer to the audience -with little direct\ + \ means to influence and participate. In this paper we challenge this phenomenon\ + \ and aim for a new bond between the audience and the DJ through the creation\ + \ of an interactive dance concept: Experio. Experio allows for multiple audience\ + \ participants influencing the musical performance through dance, facilitated\ + \ by a musical moderator using a tailored interface. This co-creation of electronic\ + \ music on both novice and expert levels is a new participatory live performance\ + \ approach, which is evaluated on the basis of thousands of visitors who interacted\ + \ with Experio during several international exhibitions.},\n address = {London,\ + \ United Kingdom},\n author = {Bastiaan van Hout and Luca Giacolini and Bart Hengeveld\ + \ and Mathias Funk and Joep Frens},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178808},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {46--49},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Experio: a Design for Novel Audience Participation\ + \ in Club Settings},\n url = {http://www.nime.org/proceedings/2014/nime2014_481.pdf},\n\ + \ year = {2014}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1178808 + issn: 2220-4806 + month: June + pages: 46--49 + publisher: 'Goldsmiths, University of London' + title: 'Experio: a Design for Novel Audience Participation in Club Settings' + url: http://www.nime.org/proceedings/2014/nime2014_481.pdf + year: 2014 + + +- ENTRYTYPE: inproceedings + ID: jfrancoise12014 + abstract: We present a set of probabilistic models that support the design of movement + and sound relationships in interactive sonic systems. We focus on a mapping--by--demonstration + approach in which the relationships between motion and sound are defined by a + machine learning model that learns from a set of user examples. We describe four + probabilistic models with complementary characteristics in terms of multimodality + and temporality. We illustrate the practical use of each of the four models with + a prototype application for sound control built using our Max implementation. + address: 'London, United Kingdom' + author: Jules Françoise and Norbert Schnell and Riccardo Borghesi and Frédéric Bevilacqua + bibtex: "@inproceedings{jfrancoise12014,\n abstract = {We present a set of probabilistic\ + \ models that support the design of movement and sound relationships in interactive\ + \ sonic systems. We focus on a mapping--by--demonstration approach in which the\ + \ relationships between motion and sound are defined by a machine learning model\ + \ that learns from a set of user examples. We describe four probabilistic models\ + \ with complementary characteristics in terms of multimodality and temporality.\ + \ We illustrate the practical use of each of the four models with a prototype\ + \ application for sound control built using our Max implementation.},\n address\ + \ = {London, United Kingdom},\n author = {Jules Fran\\c{c}oise and Norbert Schnell\ + \ and Riccardo Borghesi and Fr\\'ed\\'eric Bevilacqua},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178764},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {287--292},\n publisher = {Goldsmiths, University of London},\n title = {Probabilistic\ + \ Models for Designing Motion and Sound Relationships},\n url = {http://www.nime.org/proceedings/2014/nime2014_482.pdf},\n\ + \ year = {2014}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1178764 + issn: 2220-4806 + month: June + pages: 287--292 + publisher: 'Goldsmiths, University of London' + title: Probabilistic Models for Designing Motion and Sound Relationships + url: http://www.nime.org/proceedings/2014/nime2014_482.pdf + year: 2014 + + +- ENTRYTYPE: inproceedings + ID: atsiros12014 + abstract: 'This paper presents the findings of two exploratory studies. In these + studies participants performed a series of image-sound association tasks. The + aim of the studies was to investigate the perceived similarity and the efficacy + of two multidimensional mappings each consisting of three audio-visual associations. + The purpose of the mappings is to enable visual control of corpus-based concatenative + synthesis. More specifically the stimuli in the first study was designed to test + the perceived similarity of six audio-visual associations, between the two mappings + using three corpora resulting in 18 audio-visual stimuli. The corpora differ in + terms of two sound characteristics: harmonic contain and continuity. Data analysis + revealed no significant differences in the participant''s responses between the + three corpora, or between the two mappings. However highly significant differences + were revealed between the individual audio-visual association pairs. The second + study investigates the affects of the mapping and the corpus in the ability of + the participants to detect which image out of three similar images was used to + generate six audio stimuli. The data analysis revealed significant differences + in the ability of the participants'' to detect the correct image depending on + which corpus was used. Less significant was the effect of the mapping in the success + rate of the participant responses.' + address: 'London, United Kingdom' + author: Augoustinos Tsiros + bibtex: "@inproceedings{atsiros12014,\n abstract = {This paper presents the findings\ + \ of two exploratory studies. In these studies participants performed a series\ + \ of image-sound association tasks. The aim of the studies was to investigate\ + \ the perceived similarity and the efficacy of two multidimensional mappings each\ + \ consisting of three audio-visual associations. The purpose of the mappings is\ + \ to enable visual control of corpus-based concatenative synthesis. More specifically\ + \ the stimuli in the first study was designed to test the perceived similarity\ + \ of six audio-visual associations, between the two mappings using three corpora\ + \ resulting in 18 audio-visual stimuli. The corpora differ in terms of two sound\ + \ characteristics: harmonic contain and continuity. Data analysis revealed no\ + \ significant differences in the participant's responses between the three corpora,\ + \ or between the two mappings. However highly significant differences were revealed\ + \ between the individual audio-visual association pairs. The second study investigates\ + \ the affects of the mapping and the corpus in the ability of the participants\ + \ to detect which image out of three similar images was used to generate six audio\ + \ stimuli. The data analysis revealed significant differences in the ability of\ + \ the participants' to detect the correct image depending on which corpus was\ + \ used. Less significant was the effect of the mapping in the success rate of\ + \ the participant responses.},\n address = {London, United Kingdom},\n author\ + \ = {Augoustinos Tsiros},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178965},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {421--426},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Evaluating the Perceived Similarity Between\ + \ Audio-Visual Features Using Corpus-Based Concatenative Synthesis},\n url = {http://www.nime.org/proceedings/2014/nime2014_484.pdf},\n\ + \ year = {2014}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1178965 + issn: 2220-4806 + month: June + pages: 421--426 + publisher: 'Goldsmiths, University of London' + title: Evaluating the Perceived Similarity Between Audio-Visual Features Using Corpus-Based + Concatenative Synthesis + url: http://www.nime.org/proceedings/2014/nime2014_484.pdf + year: 2014 + + +- ENTRYTYPE: inproceedings + ID: ngold2014 + abstract: 'The Leap Motion(TM) sensor offers fine-grained gesture-recognition and + hand tracking. Since its release, there have been several uses of the device for + instrument design, musical interaction and expression control, documented through + online video. However, there has been little formal documented investigation of + the potential and challenges of the platform in this context. This paper presents + lessons learned from work-in-progress on the development of musical instruments + and control applications using the Leap Motion(TM) sensor. Two instruments are + presented: Air-Keys and Air-Pads and the potential for augmentation of a traditional + keyboard is explored. The results show that the platform is promising in this + context but requires various challenges, both physical and logical, to be overcome.' + address: 'London, United Kingdom' + author: Jihyun Han and Nicolas Gold + bibtex: "@inproceedings{ngold2014,\n abstract = {The Leap Motion(TM) sensor offers\ + \ fine-grained gesture-recognition and hand tracking. Since its release, there\ + \ have been several uses of the device for instrument design, musical interaction\ + \ and expression control, documented through online video. However, there has\ + \ been little formal documented investigation of the potential and challenges\ + \ of the platform in this context. This paper presents lessons learned from work-in-progress\ + \ on the development of musical instruments and control applications using the\ + \ Leap Motion(TM) sensor. Two instruments are presented: Air-Keys and Air-Pads\ + \ and the potential for augmentation of a traditional keyboard is explored. The\ + \ results show that the platform is promising in this context but requires various\ + \ challenges, both physical and logical, to be overcome.},\n address = {London,\ + \ United Kingdom},\n author = {Jihyun Han and Nicolas Gold},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178784},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {371--374},\n publisher = {Goldsmiths, University of London},\n title = {Lessons\ + \ Learned in Exploring the Leap Motion(TM) Sensor for Gesture-based Instrument\ + \ Design},\n url = {http://www.nime.org/proceedings/2014/nime2014_485.pdf},\n\ + \ year = {2014}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1178784 + issn: 2220-4806 + month: June + pages: 371--374 + publisher: 'Goldsmiths, University of London' + title: Lessons Learned in Exploring the Leap Motion(TM) Sensor for Gesture-based + Instrument Design + url: http://www.nime.org/proceedings/2014/nime2014_485.pdf + year: 2014 + + +- ENTRYTYPE: inproceedings + ID: jlarsen2014 + abstract: 'People with a physical handicap are often not able to engage and embrace + the world of music on the same terms as normal functioning people. Musical instruments + have been refined the last centuries which makes them highly specialized instruments + that nearly all requires at least two functioning hands. In this study we try + to enable people with hemiplegia to play a real electrical guitar by modifying + it in a way that make people with hemiplegia able to actually play the guitar. + We developed the guitar platform to utilize sensors to capture the rhythmic motion + of alternative fully functioning limbs, such as a foot, knee or the head to activate + a motorized fader moving a pick back and forth across the strings. The approach + employs the flexibility of a programmable digital system which allows us to scale + and map different ranges of data from various sensors to the motion of the actuator + and thereby making it easier adapt to individual users. To validate and test the + instrument platform we collaborated with the Helena Elsass Center during their + 2013 Summer Camp to see if we actually succeeded in creating an electrical guitar + that children with hemiplegia could actually play. The initial user studies showed + that children with hemiplegia were able to play the actuated guitar by producing + rhythmical movement across the strings that enables them to enter a world of music + they so often see as closed.' + address: 'London, United Kingdom' + author: Jeppe Larsen and Dan Overholt and Thomas Moeslund + bibtex: "@inproceedings{jlarsen2014,\n abstract = {People with a physical handicap\ + \ are often not able to engage and embrace the world of music on the same terms\ + \ as normal functioning people. Musical instruments have been refined the last\ + \ centuries which makes them highly specialized instruments that nearly all requires\ + \ at least two functioning hands. In this study we try to enable people with hemiplegia\ + \ to play a real electrical guitar by modifying it in a way that make people with\ + \ hemiplegia able to actually play the guitar. We developed the guitar platform\ + \ to utilize sensors to capture the rhythmic motion of alternative fully functioning\ + \ limbs, such as a foot, knee or the head to activate a motorized fader moving\ + \ a pick back and forth across the strings. The approach employs the flexibility\ + \ of a programmable digital system which allows us to scale and map different\ + \ ranges of data from various sensors to the motion of the actuator and thereby\ + \ making it easier adapt to individual users. To validate and test the instrument\ + \ platform we collaborated with the Helena Elsass Center during their 2013 Summer\ + \ Camp to see if we actually succeeded in creating an electrical guitar that children\ + \ with hemiplegia could actually play. The initial user studies showed that children\ + \ with hemiplegia were able to play the actuated guitar by producing rhythmical\ + \ movement across the strings that enables them to enter a world of music they\ + \ so often see as closed.},\n address = {London, United Kingdom},\n author = {Jeppe\ + \ Larsen and Dan Overholt and Thomas Moeslund},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1178845},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {60--65},\n publisher = {Goldsmiths, University of London},\n title = {The\ + \ Actuated guitar: Implementation and user test on children with Hemiplegia},\n\ + \ url = {http://www.nime.org/proceedings/2014/nime2014_486.pdf},\n year = {2014}\n\ + }\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1178845 + issn: 2220-4806 + month: June + pages: 60--65 + publisher: 'Goldsmiths, University of London' + title: 'The Actuated guitar: Implementation and user test on children with Hemiplegia' + url: http://www.nime.org/proceedings/2014/nime2014_486.pdf + year: 2014 + + +- ENTRYTYPE: inproceedings + ID: tresch2014 + abstract: This paper proposes a simple architecture for creating (indoor) audio + walks by using a server running Max/MSP together with the external object fhnw.audiowalk.state + and smartphone clients running either under Android or iOS using LibPd. Server + and smartphone clients communicate over WLAN by exchanging OSC messages. Server + and client have been designed in a way that allows artists with only little programming + skills to create position-based audio walks. + address: 'London, United Kingdom' + author: Thomas Resch and Matthias Krebs + bibtex: "@inproceedings{tresch2014,\n abstract = {This paper proposes a simple architecture\ + \ for creating (indoor) audio walks by using a server running Max/MSP together\ + \ with the external object fhnw.audiowalk.state and smartphone clients running\ + \ either under Android or iOS using LibPd. Server and smartphone clients communicate\ + \ over WLAN by exchanging OSC messages. Server and client have been designed in\ + \ a way that allows artists with only little programming skills to create position-based\ + \ audio walks.},\n address = {London, United Kingdom},\n author = {Thomas Resch\ + \ and Matthias Krebs},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178917},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {269--272},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {A Simple Architecture for Server-based (Indoor)\ + \ Audio Walks},\n url = {http://www.nime.org/proceedings/2014/nime2014_491.pdf},\n\ + \ year = {2014}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1178917 + issn: 2220-4806 + month: June + pages: 269--272 + publisher: 'Goldsmiths, University of London' + title: A Simple Architecture for Server-based (Indoor) Audio Walks + url: http://www.nime.org/proceedings/2014/nime2014_491.pdf + year: 2014 + + +- ENTRYTYPE: inproceedings + ID: strail2014 + abstract: 'The El-Lamellophone (El-La) is a Lamellophone hyperinstrument incorporating + electronic sensors and integrated DSP. Initial investigations have been made into + digitallycontrolled physical actuation of the acoustic tines. An embedded Linux + micro-computer supplants the laptop. A piezoelectric pickup is mounted to the + underside of the body of the instrument for direct audio acquisition providing + a robust signal with little interference. The signal is used for electric sound-reinforcement, + creative signal processing and audio analysis developed in Puredata (Pd). This + signal inputs and outputs the micro computer via stereo 1/8th inch phono jacks. + Sensors provide gesture recognition affording the performer a broader, more dynamic + range of musical human computer interaction (MHCI) over specific DSP functions. + Work has been done toward electromagnetic actuation of the tines, aiming to allow + performer control and sensation via both traditional Lamellophone techniques, + as well as extended playing techniques that incorporate shared human/computer + control of the resulting sound. The goal is to achieve this without compromising + the traditional sound production methods of the acoustic instrument while leveraging + inherent performance gestures with embedded continuous controller values essential + to MHCI. The result is an intuitive, performer designed, hybrid electro-acoustic + instrument, idiomatic computer interface, and robotic acoustic instrument in one + framework.' + address: 'London, United Kingdom' + author: Shawn Trail and Duncan MacConnell and Leo Jenkins and Jeff Snyder and George + Tzanetakis and Peter Driessen + bibtex: "@inproceedings{strail2014,\n abstract = {The El-Lamellophone (El-La) is\ + \ a Lamellophone hyperinstrument incorporating electronic sensors and integrated\ + \ DSP. Initial investigations have been made into digitallycontrolled physical\ + \ actuation of the acoustic tines. An embedded Linux micro-computer supplants\ + \ the laptop. A piezoelectric pickup is mounted to the underside of the body of\ + \ the instrument for direct audio acquisition providing a robust signal with little\ + \ interference. The signal is used for electric sound-reinforcement, creative\ + \ signal processing and audio analysis developed in Puredata (Pd). This signal\ + \ inputs and outputs the micro computer via stereo 1/8th inch phono jacks. Sensors\ + \ provide gesture recognition affording the performer a broader, more dynamic\ + \ range of musical human computer interaction (MHCI) over specific DSP functions.\ + \ Work has been done toward electromagnetic actuation of the tines, aiming to\ + \ allow performer control and sensation via both traditional Lamellophone techniques,\ + \ as well as extended playing techniques that incorporate shared human/computer\ + \ control of the resulting sound. The goal is to achieve this without compromising\ + \ the traditional sound production methods of the acoustic instrument while leveraging\ + \ inherent performance gestures with embedded continuous controller values essential\ + \ to MHCI. The result is an intuitive, performer designed, hybrid electro-acoustic\ + \ instrument, idiomatic computer interface, and robotic acoustic instrument in\ + \ one framework.},\n address = {London, United Kingdom},\n author = {Shawn Trail\ + \ and Duncan MacConnell and Leo Jenkins and Jeff Snyder and George Tzanetakis\ + \ and Peter Driessen},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178959},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {537--540},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {El-Lamellophone A Low-cost, DIY, Open Framework\ + \ for Acoustic Lemellophone Based Hyperinstruments},\n url = {http://www.nime.org/proceedings/2014/nime2014_492.pdf},\n\ + \ year = {2014}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1178959 + issn: 2220-4806 + month: June + pages: 537--540 + publisher: 'Goldsmiths, University of London' + title: 'El-Lamellophone A Low-cost, DIY, Open Framework for Acoustic Lemellophone + Based Hyperinstruments' + url: http://www.nime.org/proceedings/2014/nime2014_492.pdf + year: 2014 + + +- ENTRYTYPE: inproceedings + ID: nklugel2014 + abstract: In this contribution we will show three prototypical applications that + allow users to collaboratively create rhythmic structures with successively more + degrees of freedom to generate rhythmic complexity. By means of a user study we + analyze the impact of this on the users' satisfaction and further compare it to + data logged during the experiments that allow us to measure the rhythmic complexity + created. + address: 'London, United Kingdom' + author: Niklas Klügel and Gerhard Hagerer and Georg Groh + bibtex: "@inproceedings{nklugel2014,\n abstract = {In this contribution we will\ + \ show three prototypical applications that allow users to collaboratively create\ + \ rhythmic structures with successively more degrees of freedom to generate rhythmic\ + \ complexity. By means of a user study we analyze the impact of this on the users'\ + \ satisfaction and further compare it to data logged during the experiments that\ + \ allow us to measure the rhythmic complexity created.},\n address = {London,\ + \ United Kingdom},\n author = {Niklas Kl\\''ugel and Gerhard Hagerer and Georg\ + \ Groh},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1178835},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {50--53},\n publisher = {Goldsmiths, University of\ + \ London},\n title = {TreeQuencer: Collaborative Rhythm Sequencing A Comparative\ + \ Study},\n url = {http://www.nime.org/proceedings/2014/nime2014_498.pdf},\n year\ + \ = {2014}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1178835 + issn: 2220-4806 + month: June + pages: 50--53 + publisher: 'Goldsmiths, University of London' + title: 'TreeQuencer: Collaborative Rhythm Sequencing A Comparative Study' + url: http://www.nime.org/proceedings/2014/nime2014_498.pdf + year: 2014 + + +- ENTRYTYPE: inproceedings + ID: dschlienger2014 + abstract: 'This paper provides a rationale for choosing acoustic localisation techniques + as an alternative to other principles to provide spatial positions in interactive + locative audio applications (ILAA). By comparing positioning technology in existing + ILAAs to the expected performance of acoustic positioning systems (APS), we can + evaluate if APS would perform equivalently in a particular application. In this + paper, the titles of NIME conference proceedings from 2001 to 2013 were searched + for presentations on ILAA using positioning technology. Over 80 relevant articles + were found. For each of the systems we evaluated if and why APS would be a contender + or not. The results showed that for over 73 percent of the reviewed applications, + APS could possibly provide competitive alternatives and at very low cost.' + address: 'London, United Kingdom' + author: Dominik Schlienger and Sakari Tervo + bibtex: "@inproceedings{dschlienger2014,\n abstract = {This paper provides a rationale\ + \ for choosing acoustic localisation techniques as an alternative to other principles\ + \ to provide spatial positions in interactive locative audio applications (ILAA).\ + \ By comparing positioning technology in existing ILAAs to the expected performance\ + \ of acoustic positioning systems (APS), we can evaluate if APS would perform\ + \ equivalently in a particular application. In this paper, the titles of NIME\ + \ conference proceedings from 2001 to 2013 were searched for presentations on\ + \ ILAA using positioning technology. Over 80 relevant articles were found. For\ + \ each of the systems we evaluated if and why APS would be a contender or not.\ + \ The results showed that for over 73 percent of the reviewed applications, APS\ + \ could possibly provide competitive alternatives and at very low cost.},\n address\ + \ = {London, United Kingdom},\n author = {Dominik Schlienger and Sakari Tervo},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178933},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {439--442},\n publisher = {Goldsmiths, University\ + \ of London},\n title = {Acoustic Localisation as an Alternative to Positioning\ + \ Principles in Applications presented at NIME 2001-2013},\n url = {http://www.nime.org/proceedings/2014/nime2014_501.pdf},\n\ + \ year = {2014}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1178933 + issn: 2220-4806 + month: June + pages: 439--442 + publisher: 'Goldsmiths, University of London' + title: Acoustic Localisation as an Alternative to Positioning Principles in Applications + presented at NIME 2001-2013 + url: http://www.nime.org/proceedings/2014/nime2014_501.pdf + year: 2014 + + +- ENTRYTYPE: inproceedings + ID: cfaubel12014 + abstract: 'In the paper I present a robotic device that offers new ways of interaction + for producing rhythmic patterns. The apparatus is placed on an overhead projector + and a visual presentation of these rhythmic patterns is delivered as a shadow + play. The rhythmic patterns can be manipulated by modifying the environment of + the robot, through direct physical interaction with the robot, by rewiring the + internal connectivity, and by adjusting internal parameters. The theory of embodied + cognition provides the theoretical basis of this device. The core postulate of + embodied cognition is that biological behavior can only be understood through + an understanding of the real-time interactions of an organism''s nervous system, + the organism''s body and the environment. One the one hand the device illustrates + this theory because the patterns that are created equally depend on the real-time + interactions of the electronics, the physical structure of the device and the + environment. On the other hand the device presents a synthesis of these ideas + and it is effectively possible to play with it at all the three levels, the electronics, + the physical configuration of the robot and the environment.' + address: 'London, United Kingdom' + author: Christian Faubel + bibtex: "@inproceedings{cfaubel12014,\n abstract = {In the paper I present a robotic\ + \ device that offers new ways of interaction for producing rhythmic patterns.\ + \ The apparatus is placed on an overhead projector and a visual presentation of\ + \ these rhythmic patterns is delivered as a shadow play. The rhythmic patterns\ + \ can be manipulated by modifying the environment of the robot, through direct\ + \ physical interaction with the robot, by rewiring the internal connectivity,\ + \ and by adjusting internal parameters. The theory of embodied cognition provides\ + \ the theoretical basis of this device. The core postulate of embodied cognition\ + \ is that biological behavior can only be understood through an understanding\ + \ of the real-time interactions of an organism's nervous system, the organism's\ + \ body and the environment. One the one hand the device illustrates this theory\ + \ because the patterns that are created equally depend on the real-time interactions\ + \ of the electronics, the physical structure of the device and the environment.\ + \ On the other hand the device presents a synthesis of these ideas and it is effectively\ + \ possible to play with it at all the three levels, the electronics, the physical\ + \ configuration of the robot and the environment.},\n address = {London, United\ + \ Kingdom},\n author = {Christian Faubel},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1180950},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {491--494},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Rhythm Apparatus on Overhead},\n url = {http://www.nime.org/proceedings/2014/nime2014_503.pdf},\n\ + \ year = {2014}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1180950 + issn: 2220-4806 + month: June + pages: 491--494 + publisher: 'Goldsmiths, University of London' + title: Rhythm Apparatus on Overhead + url: http://www.nime.org/proceedings/2014/nime2014_503.pdf + year: 2014 + + +- ENTRYTYPE: inproceedings + ID: ahazzard2014 + abstract: 'Music plays a vital role in accompanying all manner of our experiences. + Soundtracks within films, video games and ceremonies possess a unique ability + to enhance a narrative, suggest emotional content and mark key transitions. Moreover, + soundtracks often achieve all of this without being the primary focus, on the + contrary they typically assume a supporting role. The proliferation of mobile + devices increasingly leads us to listen to music while on the move and musicians + are seizing on locative technologies as a tool for creating new kinds of music + that directly respond to people''s movements through space. In light of these + trends, we consider the interesting question of how composers might set about + creating musical soundtracks to accompany mobile experiences. What we have in + mind are experiences such as guided walks, tours and even pervasive games. The + novelty of our research here is in the music serving as an accompaniment to enhance + a location specific activity, much as a soundtrack does for a film. This calls + for composers to take into account the key features of the experience, and its + setting, to gently complement them through the music. We examine this process + from a composer''s perspective by presenting `from the field'' an account of how + they address the multifaceted challenges of designing a soundtrack for public + sculpture park. We chart a composer''s rationale as they developed a soundtrack + for this site over multiple iterations of design, testing and refinement. We expose + key relationships between the raw materials of music (melody, harmony, timbre, + rhythm and dynamics) and those of the physical setting, that enable the composer + to gracefully mesh the music into the fabric of the space. The result is to propose + a set of recommendations to inform the composition of mobile soundtracks that + we intend to guide future practice and research.' + address: 'London, United Kingdom' + author: Adrian Hazzard and Steve Benford and Gary Burnett + bibtex: "@inproceedings{ahazzard2014,\n abstract = {Music plays a vital role in\ + \ accompanying all manner of our experiences. Soundtracks within films, video\ + \ games and ceremonies possess a unique ability to enhance a narrative, suggest\ + \ emotional content and mark key transitions. Moreover, soundtracks often achieve\ + \ all of this without being the primary focus, on the contrary they typically\ + \ assume a supporting role. The proliferation of mobile devices increasingly leads\ + \ us to listen to music while on the move and musicians are seizing on locative\ + \ technologies as a tool for creating new kinds of music that directly respond\ + \ to people's movements through space. In light of these trends, we consider the\ + \ interesting question of how composers might set about creating musical soundtracks\ + \ to accompany mobile experiences. What we have in mind are experiences such as\ + \ guided walks, tours and even pervasive games. The novelty of our research here\ + \ is in the music serving as an accompaniment to enhance a location specific activity,\ + \ much as a soundtrack does for a film. This calls for composers to take into\ + \ account the key features of the experience, and its setting, to gently complement\ + \ them through the music. We examine this process from a composer's perspective\ + \ by presenting `from the field' an account of how they address the multifaceted\ + \ challenges of designing a soundtrack for public sculpture park. We chart a composer's\ + \ rationale as they developed a soundtrack for this site over multiple iterations\ + \ of design, testing and refinement. We expose key relationships between the raw\ + \ materials of music (melody, harmony, timbre, rhythm and dynamics) and those\ + \ of the physical setting, that enable the composer to gracefully mesh the music\ + \ into the fabric of the space. The result is to propose a set of recommendations\ + \ to inform the composition of mobile soundtracks that we intend to guide future\ + \ practice and research.},\n address = {London, United Kingdom},\n author = {Adrian\ + \ Hazzard and Steve Benford and Gary Burnett},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1178794},\n issn = {2220-4806},\n month = {June},\n pages =\ + \ {411--414},\n publisher = {Goldsmiths, University of London},\n title = {You'll\ + \ Never Walk Alone: Composing Location-Based Soundtracks},\n url = {http://www.nime.org/proceedings/2014/nime2014_506.pdf},\n\ + \ year = {2014}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1178794 + issn: 2220-4806 + month: June + pages: 411--414 + publisher: 'Goldsmiths, University of London' + title: 'You''ll Never Walk Alone: Composing Location-Based Soundtracks' + url: http://www.nime.org/proceedings/2014/nime2014_506.pdf + year: 2014 + + +- ENTRYTYPE: inproceedings + ID: kyerkes2014 + abstract: 'Twkyr is a new interface for musical expression that emphasizes realtime + manipulation, audification, and visualization of waveforms with a multitouch surface, + offering different interactivity at different time scales, within the same waveform. + The interactive audiovisual design of Tweakyr is motivated by the need for increased + parsimony and transparency in electronic musical instruments and draws from the + work of Curtis Roads on time scales as qualitative musical parameters, and Edward + Tufte''s ``data-ink'''' principles for the improvement of data graphics.' + address: 'London, United Kingdom' + author: Karl Yerkes and Matthew Wright + bibtex: "@inproceedings{kyerkes2014,\n abstract = {Twkyr is a new interface for\ + \ musical expression that emphasizes realtime manipulation, audification, and\ + \ visualization of waveforms with a multitouch surface, offering different interactivity\ + \ at different time scales, within the same waveform. The interactive audiovisual\ + \ design of Tweakyr is motivated by the need for increased parsimony and transparency\ + \ in electronic musical instruments and draws from the work of Curtis Roads on\ + \ time scales as qualitative musical parameters, and Edward Tufte's ``data-ink''\ + \ principles for the improvement of data graphics.},\n address = {London, United\ + \ Kingdom},\n author = {Karl Yerkes and Matthew Wright},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178989},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {375--378},\n publisher = {Goldsmiths, University of London},\n title = {Twkyr:\ + \ a Multitouch Waveform Looper},\n url = {http://www.nime.org/proceedings/2014/nime2014_508.pdf},\n\ + \ year = {2014}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1178989 + issn: 2220-4806 + month: June + pages: 375--378 + publisher: 'Goldsmiths, University of London' + title: 'Twkyr: a Multitouch Waveform Looper' + url: http://www.nime.org/proceedings/2014/nime2014_508.pdf + year: 2014 + + +- ENTRYTYPE: inproceedings + ID: tmays2014 + abstract: 'In this paper we expose the need to go beyond the composer/performer + model of electronic instrument design and programming to encourage the transmission + of compositions and the creation of a repertory via notation of repeatable performance + practice. Drawing on 4 years of practice using the Karlax controller (Da Fact) + as a base for new digital musical instruments, we present our notation system + in detail and cite some mapping strategies and examples from to pieces in a growing + repertory of chamber music compositions for electronic and acoustic instruments' + address: 'London, United Kingdom' + author: Tom Mays and Francis Faber + bibtex: "@inproceedings{tmays2014,\n abstract = {In this paper we expose the need\ + \ to go beyond the composer/performer model of electronic instrument design and\ + \ programming to encourage the transmission of compositions and the creation of\ + \ a repertory via notation of repeatable performance practice. Drawing on 4 years\ + \ of practice using the Karlax controller (Da Fact) as a base for new digital\ + \ musical instruments, we present our notation system in detail and cite some\ + \ mapping strategies and examples from to pieces in a growing repertory of chamber\ + \ music compositions for electronic and acoustic instruments},\n address = {London,\ + \ United Kingdom},\n author = {Tom Mays and Francis Faber},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178869},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {553--556},\n publisher = {Goldsmiths, University of London},\n title = {A\ + \ Notation System for the Karlax Controller},\n url = {http://www.nime.org/proceedings/2014/nime2014_509.pdf},\n\ + \ year = {2014}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1178869 + issn: 2220-4806 + month: June + pages: 553--556 + publisher: 'Goldsmiths, University of London' + title: A Notation System for the Karlax Controller + url: http://www.nime.org/proceedings/2014/nime2014_509.pdf + year: 2014 + + +- ENTRYTYPE: inproceedings + ID: avanzandt2014 + abstract: 'When performing a piece, a pianist''s interpretation is communicated + both through the sound produced and through body gestures. We present PiaF (Piano + Follower), a prototype for augmenting piano performance by measuring gesture variations. + We survey other augmented piano projects, several of which focus on gestural recognition, + and present our prototype which uses machine learning techniques for gesture classification + and estimation of gesture variations in real-time. Our implementation uses the + Kinect depth sensor to track body motion in space, which is used as input data. + During an initial learning phase, the system is taught a set of reference gestures, + or templates. During performance, the live gesture is classified in real-time, + and variations with respect to the recognized template are computed. These values + can then be mapped to audio processing parameters, to control digital effects + which are applied to the acoustic output of the piano in real-time. We discuss + initial tests using PiaF with a pianist, as well as potential applications beyond + live performance, including pedagogy and embodiment of recorded performance.' + address: 'London, United Kingdom' + author: Alejandro Van Zandt-Escobar and Baptiste Caramiaux and Atau Tanaka + bibtex: "@inproceedings{avanzandt2014,\n abstract = {When performing a piece, a\ + \ pianist's interpretation is communicated both through the sound produced and\ + \ through body gestures. We present PiaF (Piano Follower), a prototype for augmenting\ + \ piano performance by measuring gesture variations. We survey other augmented\ + \ piano projects, several of which focus on gestural recognition, and present\ + \ our prototype which uses machine learning techniques for gesture classification\ + \ and estimation of gesture variations in real-time. Our implementation uses the\ + \ Kinect depth sensor to track body motion in space, which is used as input data.\ + \ During an initial learning phase, the system is taught a set of reference gestures,\ + \ or templates. During performance, the live gesture is classified in real-time,\ + \ and variations with respect to the recognized template are computed. These values\ + \ can then be mapped to audio processing parameters, to control digital effects\ + \ which are applied to the acoustic output of the piano in real-time. We discuss\ + \ initial tests using PiaF with a pianist, as well as potential applications beyond\ + \ live performance, including pedagogy and embodiment of recorded performance.},\n\ + \ address = {London, United Kingdom},\n author = {Alejandro Van Zandt-Escobar\ + \ and Baptiste Caramiaux and Atau Tanaka},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178991},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {167--170},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {PiaF: A Tool for Augmented Piano Performance\ + \ Using Gesture Variation Following},\n url = {http://www.nime.org/proceedings/2014/nime2014_511.pdf},\n\ + \ year = {2014}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1178991 + issn: 2220-4806 + month: June + pages: 167--170 + publisher: 'Goldsmiths, University of London' + title: 'PiaF: A Tool for Augmented Piano Performance Using Gesture Variation Following' + url: http://www.nime.org/proceedings/2014/nime2014_511.pdf + year: 2014 + + +- ENTRYTYPE: inproceedings + ID: pdahlstedt12014 + abstract: 'The idea behind the YouHero was two-fold. First, to make an expressive + instrument out of the computer game toy guitar controller from the famous game + GuitarHero. With its limited amount of control parameters, this was a challenge. + Second, through this instrument we wanted to provide an alternative to the view + that you become a hero by perfect imitation of your idols. Instead, play yourself. + You are the hero. In this paper, we describe the design of the instrument, including + its novel mapping approach based on switched timbre vectors scaled by accellerometer + data, unconventional sound engines and the sound and mapping editing features, + including manual editing of individual vectors. The instrument is evaluated through + its practical applications during the whole project, with workshops with teenagers, + a set of state-funded commissions from professional composers, and the development + of considerable skill by the key performers. We have also submitted a performance + proposal for this project.' + address: 'London, United Kingdom' + author: Palle Dahlstedt and Patrik Karlsson and Katarina Widell and Tony Blomdahl + bibtex: "@inproceedings{pdahlstedt12014,\n abstract = {The idea behind the YouHero\ + \ was two-fold. First, to make an expressive instrument out of the computer game\ + \ toy guitar controller from the famous game GuitarHero. With its limited amount\ + \ of control parameters, this was a challenge. Second, through this instrument\ + \ we wanted to provide an alternative to the view that you become a hero by perfect\ + \ imitation of your idols. Instead, play yourself. You are the hero. In this paper,\ + \ we describe the design of the instrument, including its novel mapping approach\ + \ based on switched timbre vectors scaled by accellerometer data, unconventional\ + \ sound engines and the sound and mapping editing features, including manual editing\ + \ of individual vectors. The instrument is evaluated through its practical applications\ + \ during the whole project, with workshops with teenagers, a set of state-funded\ + \ commissions from professional composers, and the development of considerable\ + \ skill by the key performers. We have also submitted a performance proposal for\ + \ this project.},\n address = {London, United Kingdom},\n author = {Palle Dahlstedt\ + \ and Patrik Karlsson and Katarina Widell and Tony Blomdahl},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178742},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {403--406},\n publisher = {Goldsmiths, University of London},\n title = {YouHero\ + \ Making an Expressive Concert Instrument from the GuitarHero Controller},\n url\ + \ = {http://www.nime.org/proceedings/2014/nime2014_513.pdf},\n year = {2014}\n\ + }\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1178742 + issn: 2220-4806 + month: June + pages: 403--406 + publisher: 'Goldsmiths, University of London' + title: YouHero Making an Expressive Concert Instrument from the GuitarHero Controller + url: http://www.nime.org/proceedings/2014/nime2014_513.pdf + year: 2014 + + +- ENTRYTYPE: inproceedings + ID: ldahl2014 + abstract: 'Motion sensing technologies enable musical interfaces where a performer + moves their body "in the air" without manipulating or contacting a physical object. + These interfaces work well when the movement and sound are smooth and continuous, + but it has proven difficult to design a system which triggers discrete sounds + with precision that allows for complex rhythmic performance. We conducted a study + where participants perform ``air-drumming'''' gestures in time to rhythmic sounds. + These movements are recorded, and the timing of various movement features with + respect to the onset of audio events is analyzed. A novel algorithm for detecting + sudden changes in direction is used to find the end of the strike gesture. We + find that these occur on average after the audio onset and that this timing varies + with the tempo of the movement. Sharp peaks in magnitude acceleration occur before + the audio onset and do not vary with tempo. These results suggest that detecting + peaks in acceleration will lead to more naturally responsive air gesture instruments.' + address: 'London, United Kingdom' + author: Luke Dahl + bibtex: "@inproceedings{ldahl2014,\n abstract = {Motion sensing technologies enable\ + \ musical interfaces where a performer moves their body \"in the air\" without\ + \ manipulating or contacting a physical object. These interfaces work well when\ + \ the movement and sound are smooth and continuous, but it has proven difficult\ + \ to design a system which triggers discrete sounds with precision that allows\ + \ for complex rhythmic performance. We conducted a study where participants perform\ + \ ``air-drumming'' gestures in time to rhythmic sounds. These movements are recorded,\ + \ and the timing of various movement features with respect to the onset of audio\ + \ events is analyzed. A novel algorithm for detecting sudden changes in direction\ + \ is used to find the end of the strike gesture. We find that these occur on average\ + \ after the audio onset and that this timing varies with the tempo of the movement.\ + \ Sharp peaks in magnitude acceleration occur before the audio onset and do not\ + \ vary with tempo. These results suggest that detecting peaks in acceleration\ + \ will lead to more naturally responsive air gesture instruments.},\n address\ + \ = {London, United Kingdom},\n author = {Luke Dahl},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178738},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {201--206},\n publisher = {Goldsmiths, University of London},\n title = {Triggering\ + \ Sounds from Discrete Air Gestures: What Movement Feature Has the Best Timing?},\n\ + \ url = {http://www.nime.org/proceedings/2014/nime2014_514.pdf},\n year = {2014}\n\ + }\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1178738 + issn: 2220-4806 + month: June + pages: 201--206 + publisher: 'Goldsmiths, University of London' + title: 'Triggering Sounds from Discrete Air Gestures: What Movement Feature Has + the Best Timing?' + url: http://www.nime.org/proceedings/2014/nime2014_514.pdf + year: 2014 + + +- ENTRYTYPE: inproceedings + ID: chonigman2014 + abstract: 'This paper introduces a new technique for creating Swept Frequency Capacitive + Sensing with open source technology for use in creating richer and more complex + musical gestures. This new style of capacitive touch sensing is extremely robust + compared to older versions and will allow greater implementation of gesture recognition + and touch control in the development of NIMEs. Inspired by the Touché project, + this paper discusses how to implement this technique using the community standard + hardware Arduino instead of custom designed electronics. The technique requires + only passive components and can be used to enhance the touch sensitivity of many + everyday objects and even biological materials and substances such as plants, + which this paper will focus on as a case study through the project known as Cultivating + Frequencies. This paper will discuss different techniques of filtering data captured + by this system, different methods for creating gesture recognition unique to the + object being used, and the implications of this technology as it pertains to the + goal of ubiquitous sensing. Furthermore, this paper will introduce a new Arduino + Library, SweepingCapSense, which simplifies the coding required to implement this + technique.' + address: 'London, United Kingdom' + author: Colin Honigman and Jordan Hochenbaum and Ajay Kapur + bibtex: "@inproceedings{chonigman2014,\n abstract = {This paper introduces a new\ + \ technique for creating Swept Frequency Capacitive Sensing with open source technology\ + \ for use in creating richer and more complex musical gestures. This new style\ + \ of capacitive touch sensing is extremely robust compared to older versions and\ + \ will allow greater implementation of gesture recognition and touch control in\ + \ the development of NIMEs. Inspired by the Touch{\\'e} project, this paper discusses\ + \ how to implement this technique using the community standard hardware Arduino\ + \ instead of custom designed electronics. The technique requires only passive\ + \ components and can be used to enhance the touch sensitivity of many everyday\ + \ objects and even biological materials and substances such as plants, which this\ + \ paper will focus on as a case study through the project known as Cultivating\ + \ Frequencies. This paper will discuss different techniques of filtering data\ + \ captured by this system, different methods for creating gesture recognition\ + \ unique to the object being used, and the implications of this technology as\ + \ it pertains to the goal of ubiquitous sensing. Furthermore, this paper will\ + \ introduce a new Arduino Library, SweepingCapSense, which simplifies the coding\ + \ required to implement this technique.},\n address = {London, United Kingdom},\n\ + \ author = {Colin Honigman and Jordan Hochenbaum and Ajay Kapur},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1178802},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {74--77},\n publisher = {Goldsmiths, University of London},\n\ + \ title = {Techniques in Swept Frequency Capacitive Sensing: An Open Source Approach},\n\ + \ url = {http://www.nime.org/proceedings/2014/nime2014_515.pdf},\n year = {2014}\n\ + }\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1178802 + issn: 2220-4806 + month: June + pages: 74--77 + publisher: 'Goldsmiths, University of London' + title: 'Techniques in Swept Frequency Capacitive Sensing: An Open Source Approach' + url: http://www.nime.org/proceedings/2014/nime2014_515.pdf + year: 2014 + + +- ENTRYTYPE: inproceedings + ID: hdiao2014 + abstract: 'Sketching is a natural way for one person to convey their thoughts and + intentions to another. With the recent rise of tablet-based computing, the use + of sketching as a control and interaction paradigm is one that deserves exploration. + In this paper we present an interactive sketch-based music composition and performance + system called Drawchestra. The aim of the system is to give users an intuitive + way to convey their musical ideas to a computer system with the minimum of technical + training thus enabling them to focus on the creative tasks of composition and + performance. The system provides the user with a canvas upon which they may create + their own instruments by sketching shapes on the tablet screen. The system recognises + a certain set of shapes which it treats as virtual instruments or effects. Once + recognised, these virtual instruments can then be played by the user in real time. + The size of a sketched instrument shape is used to control certain parameters + of the sound so the user can build complex orchestras containing many different + shapes of different sizes. The sketched shapes may also be moved and resized as + desired making it possible to customise and edit the virtual orchestra as the + user goes along. The system has been implemented in Python and user tests conducted + using an iPad as the control surface. We report the results of the user study + at the end of the paper before briefly discussing the outcome and outlining the + next steps for the system design.' + address: 'London, United Kingdom' + author: Haojing Diao and Yanchao Zhou and Christopher Andrew Harte and Nick Bryan-Kinns + bibtex: "@inproceedings{hdiao2014,\n abstract = {Sketching is a natural way for\ + \ one person to convey their thoughts and intentions to another. With the recent\ + \ rise of tablet-based computing, the use of sketching as a control and interaction\ + \ paradigm is one that deserves exploration. In this paper we present an interactive\ + \ sketch-based music composition and performance system called Drawchestra. The\ + \ aim of the system is to give users an intuitive way to convey their musical\ + \ ideas to a computer system with the minimum of technical training thus enabling\ + \ them to focus on the creative tasks of composition and performance. The system\ + \ provides the user with a canvas upon which they may create their own instruments\ + \ by sketching shapes on the tablet screen. The system recognises a certain set\ + \ of shapes which it treats as virtual instruments or effects. Once recognised,\ + \ these virtual instruments can then be played by the user in real time. The size\ + \ of a sketched instrument shape is used to control certain parameters of the\ + \ sound so the user can build complex orchestras containing many different shapes\ + \ of different sizes. The sketched shapes may also be moved and resized as desired\ + \ making it possible to customise and edit the virtual orchestra as the user goes\ + \ along. The system has been implemented in Python and user tests conducted using\ + \ an iPad as the control surface. We report the results of the user study at the\ + \ end of the paper before briefly discussing the outcome and outlining the next\ + \ steps for the system design.},\n address = {London, United Kingdom},\n author\ + \ = {Haojing Diao and Yanchao Zhou and Christopher Andrew Harte and Nick Bryan-Kinns},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178748},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {569--572},\n publisher = {Goldsmiths, University\ + \ of London},\n title = {Sketch-Based Musical Composition and Performance},\n\ + \ url = {http://www.nime.org/proceedings/2014/nime2014_517.pdf},\n year = {2014}\n\ + }\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1178748 + issn: 2220-4806 + month: June + pages: 569--572 + publisher: 'Goldsmiths, University of London' + title: Sketch-Based Musical Composition and Performance + url: http://www.nime.org/proceedings/2014/nime2014_517.pdf + year: 2014 + + +- ENTRYTYPE: inproceedings + ID: jratcliffe2014 + abstract: 'This paper presents a control surface interface for music mixing using + real time computer vision. Two input sensors are considered: the Leap Motion and + the Microsoft Kinect. The author presents significant design considerations, including + improving of the user''s sense of depth and panorama, maintaining broad accessibility + by integrating the system with Digital Audio Workstation (DAW) software, and implementing + a system that is portable and affordable. To provide the user with a heightened + sense of sound spatialization over the traditional channel strip, the concept + of depth is addressed directly using the stage metaphor. Sound sources are represented + as colored spheres in a graphical user interface to provide the user with visual + feedback. Moving sources back and forward controls volume, while left to right + controls panning. To provide broader accessibility, the interface is configured + to control mixing within the Ableton Live DAW. The author also discusses future + plans to expand functionality and evaluate the system.' + address: 'London, United Kingdom' + author: Jarrod Ratcliffe + bibtex: "@inproceedings{jratcliffe2014,\n abstract = {This paper presents a control\ + \ surface interface for music mixing using real time computer vision. Two input\ + \ sensors are considered: the Leap Motion and the Microsoft Kinect. The author\ + \ presents significant design considerations, including improving of the user's\ + \ sense of depth and panorama, maintaining broad accessibility by integrating\ + \ the system with Digital Audio Workstation (DAW) software, and implementing a\ + \ system that is portable and affordable. To provide the user with a heightened\ + \ sense of sound spatialization over the traditional channel strip, the concept\ + \ of depth is addressed directly using the stage metaphor. Sound sources are represented\ + \ as colored spheres in a graphical user interface to provide the user with visual\ + \ feedback. Moving sources back and forward controls volume, while left to right\ + \ controls panning. To provide broader accessibility, the interface is configured\ + \ to control mixing within the Ableton Live DAW. The author also discusses future\ + \ plans to expand functionality and evaluate the system.},\n address = {London,\ + \ United Kingdom},\n author = {Jarrod Ratcliffe},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1178911},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {136--139},\n publisher = {Goldsmiths, University of London},\n title = {Hand\ + \ and Finger Motion-Controlled Audio Mixing Interface},\n url = {http://www.nime.org/proceedings/2014/nime2014_518.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178911 issn: 2220-4806 - month: May - numpages: 11 - pages: 277--287 - title: 'A Wearable Technology For Wind Musicians: Does It Matter How You Breathe?' - track: Papers - url: http://nime.org/proceedings/2023/nime2023_40.pdf - year: 2023 + month: June + pages: 136--139 + publisher: 'Goldsmiths, University of London' + title: Hand and Finger Motion-Controlled Audio Mixing Interface + url: http://www.nime.org/proceedings/2014/nime2014_518.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_41 - abstract: 'In this work we introduce LDSP, a novel technology capable of turning - any Android phone into a high-performance embedded platform for digital musical - instrument (DMI) design. Embedded platforms are powerful technologies that changed - the way we design and even think of DMIs. Their widespread adoption has popularized - low-level audio programming, enabling engineers and artists alike to create highly - responsive, self-contained digital musical instruments that have direct access - to hardware resources. However, if we shift our focus away from the wealthy countries - of the `Global North'', embedded platforms become a commodity that only a few - can afford. DMI researchers, artists and students from Latin America have discussed - at great lengths the effects that the lack of access to these otherwise common - resources have on their practices. And while some solutions have been proposed, - a large gap can still be perceived. By means of appropriating possibly the most - widespread and accessible technology in the world (Android) and turn it into an - embedded platform, LDSP creates an effective opportunity to close this gap. Throughout - the paper, we provide technical details of the full LDSP environment, along with - insights on the surprising performances of the first DMIs that have been designed - with it.' - address: 'Mexico City, Mexico' - articleno: 41 - author: Carla Tapparo and Brooke Chalmers and Victor Zappi - bibtex: "@article{nime2023_41,\n abstract = {In this work we introduce LDSP, a novel\ - \ technology capable of turning any Android phone into a high-performance embedded\ - \ platform for digital musical instrument (DMI) design. Embedded platforms are\ - \ powerful technologies that changed the way we design and even think of DMIs.\ - \ Their widespread adoption has popularized low-level audio programming, enabling\ - \ engineers and artists alike to create highly responsive, self-contained digital\ - \ musical instruments that have direct access to hardware resources. However,\ - \ if we shift our focus away from the wealthy countries of the `Global North',\ - \ embedded platforms become a commodity that only a few can afford. DMI researchers,\ - \ artists and students from Latin America have discussed at great lengths the\ - \ effects that the lack of access to these otherwise common resources have on\ - \ their practices. And while some solutions have been proposed, a large gap can\ - \ still be perceived. By means of appropriating possibly the most widespread and\ - \ accessible technology in the world (Android) and turn it into an embedded platform,\ - \ LDSP creates an effective opportunity to close this gap. Throughout the paper,\ - \ we provide technical details of the full LDSP environment, along with insights\ - \ on the surprising performances of the first DMIs that have been designed with\ - \ it.},\n address = {Mexico City, Mexico},\n articleno = {41},\n author = {Carla\ - \ Tapparo and Brooke Chalmers and Victor Zappi},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n editor\ - \ = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n\ - \ numpages = {7},\n pages = {288--294},\n title = {Leveraging Android Phones to\ - \ Democratize Low-level Audio Programming},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_41.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: cmckinney2014 + abstract: 'With the growing adoption of internet connectivity across the world, + online collaboration is still a difficult and slow endeavor. Many amazing languages + and tools such as SuperCollider, ChucK, and Max/MSP all facilitate networking + and collaboration, however these languages and tools were not created explicitly + to make group performances simple and intuitive. New web standards such as Web + Audio and Web GL introduce the capability for web browsers to duplicate many of + the features in computer music tools. This paper introduces Lich.js, an effort + to bring musicians together over the internet with minimal effort by leveraging + web technologies.' + address: 'London, United Kingdom' + author: Chad McKinney + bibtex: "@inproceedings{cmckinney2014,\n abstract = {With the growing adoption of\ + \ internet connectivity across the world, online collaboration is still a difficult\ + \ and slow endeavor. Many amazing languages and tools such as SuperCollider, ChucK,\ + \ and Max/MSP all facilitate networking and collaboration, however these languages\ + \ and tools were not created explicitly to make group performances simple and\ + \ intuitive. New web standards such as Web Audio and Web GL introduce the capability\ + \ for web browsers to duplicate many of the features in computer music tools.\ + \ This paper introduces Lich.js, an effort to bring musicians together over the\ + \ internet with minimal effort by leveraging web technologies.},\n address = {London,\ + \ United Kingdom},\n author = {Chad McKinney},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1178873},\n issn = {2220-4806},\n month = {June},\n pages =\ + \ {379--382},\n publisher = {Goldsmiths, University of London},\n title = {Quick\ + \ Live Coding Collaboration In The Web Browser},\n url = {http://www.nime.org/proceedings/2014/nime2014_519.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178873 issn: 2220-4806 - month: May - numpages: 7 - pages: 288--294 - title: Leveraging Android Phones to Democratize Low-level Audio Programming - track: Papers - url: http://nime.org/proceedings/2023/nime2023_41.pdf - year: 2023 + month: June + pages: 379--382 + publisher: 'Goldsmiths, University of London' + title: Quick Live Coding Collaboration In The Web Browser + url: http://www.nime.org/proceedings/2014/nime2014_519.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_42 - abstract: '“Reembodied sound” refers to the electroacoustic practice of projecting - sound into resonating objects, thereby turning these objects into a kind of speaker. - The practice, which typically uses an audio transducer attached to the surface - of the object being resonated, lies in a middle-ground between loudspeaker-based - music and augmented/actuated instruments, allowing practitioners to draw upon - and fuse multiple paradigms of new and emerging technologies. This article examines - Refraction Interlude, an interactive environment for solo performer and transducer-actuated - metal percussion instruments. Building on a decade of reembodied sound research, - the work combines augmented and actuated instruments, physical modeling, pre-recorded - performer input, interactivity, and sound spatialization in a manner that facilitates - adaptability to performer creativity and to the acoustic properties of the actuated - instruments. The computational processes were minimized, designed to forefront - the interaction and integration between these multiple domains.' - address: 'Mexico City, Mexico' - articleno: 42 - author: Matthew Goodheart - bibtex: "@article{nime2023_42,\n abstract = {“Reembodied sound” refers to the electroacoustic\ - \ practice of projecting sound into resonating objects, thereby turning these\ - \ objects into a kind of speaker. The practice, which typically uses an audio\ - \ transducer attached to the surface of the object being resonated, lies in a\ - \ middle-ground between loudspeaker-based music and augmented/actuated instruments,\ - \ allowing practitioners to draw upon and fuse multiple paradigms of new and emerging\ - \ technologies. This article examines Refraction Interlude, an interactive environment\ - \ for solo performer and transducer-actuated metal percussion instruments. Building\ - \ on a decade of reembodied sound research, the work combines augmented and actuated\ - \ instruments, physical modeling, pre-recorded performer input, interactivity,\ - \ and sound spatialization in a manner that facilitates adaptability to performer\ - \ creativity and to the acoustic properties of the actuated instruments. The computational\ - \ processes were minimized, designed to forefront the interaction and integration\ - \ between these multiple domains.},\n address = {Mexico City, Mexico},\n articleno\ - \ = {42},\n author = {Matthew Goodheart},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz\ - \ and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages\ - \ = {6},\n pages = {295--300},\n title = {Reembodied Sound and Transducer-actuated\ - \ Instruments in Refraction Interlude},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_42.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: sknotts2014 + abstract: 'This paper reports the results of an online survey of 160 laptop ensembles + and the relative democracy of their organisational and social structures. For + the purposes of this research a laptop ensemble is defined as a performing group + of three or more musicians for whom the laptop is the main sound generating source + and who typically perform together in the same room. The concept of democracy + (i.e. governance by members of the group) has been used as a starting point to + assess firstly what types of organisational structures are currently used in laptop + ensembles and secondarily to what extent laptop ensembles consider the implications + of organisational and social structure on their musical output. To assess this + I recorded a number of data points including ensemble size, whether the group + has a director or conductor, use of homogenous vs. heterogenous hardware and software, + whether they perform composed pieces or mainly improvise, the level of network + interaction and whether or not the ensemble has an academic affiliation. The survey + allowed me to define a scale of democracy in laptop ensembles and typical features + of the most and least democratic groups. Some examples are given of democratic + and autocratic activity in existing laptop ensembles. This work is part of a larger + scale project investigating the effect of social structures on the musical output + of laptop ensembles.' + address: 'London, United Kingdom' + author: Shelly Knotts and Nick Collins + bibtex: "@inproceedings{sknotts2014,\n abstract = {This paper reports the results\ + \ of an online survey of 160 laptop ensembles and the relative democracy of their\ + \ organisational and social structures. For the purposes of this research a laptop\ + \ ensemble is defined as a performing group of three or more musicians for whom\ + \ the laptop is the main sound generating source and who typically perform together\ + \ in the same room. The concept of democracy (i.e. governance by members of the\ + \ group) has been used as a starting point to assess firstly what types of organisational\ + \ structures are currently used in laptop ensembles and secondarily to what extent\ + \ laptop ensembles consider the implications of organisational and social structure\ + \ on their musical output. To assess this I recorded a number of data points including\ + \ ensemble size, whether the group has a director or conductor, use of homogenous\ + \ vs. heterogenous hardware and software, whether they perform composed pieces\ + \ or mainly improvise, the level of network interaction and whether or not the\ + \ ensemble has an academic affiliation. The survey allowed me to define a scale\ + \ of democracy in laptop ensembles and typical features of the most and least\ + \ democratic groups. Some examples are given of democratic and autocratic activity\ + \ in existing laptop ensembles. This work is part of a larger scale project investigating\ + \ the effect of social structures on the musical output of laptop ensembles.},\n\ + \ address = {London, United Kingdom},\n author = {Shelly Knotts and Nick Collins},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178839},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {191--194},\n publisher = {Goldsmiths, University\ + \ of London},\n title = {The Politics of Laptop Ensembles: A Survey of 160 Laptop\ + \ Ensembles and their Organisational Structures},\n url = {http://www.nime.org/proceedings/2014/nime2014_521.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178839 issn: 2220-4806 - month: May - numpages: 6 - pages: 295--300 - title: Reembodied Sound and Transducer-actuated Instruments in Refraction Interlude - track: Papers - url: http://nime.org/proceedings/2023/nime2023_42.pdf - year: 2023 + month: June + pages: 191--194 + publisher: 'Goldsmiths, University of London' + title: 'The Politics of Laptop Ensembles: A Survey of 160 Laptop Ensembles and their + Organisational Structures' + url: http://www.nime.org/proceedings/2014/nime2014_521.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_43 - abstract: 'In recent decades, with the innovation in sensor technology, the trend - towards smaller digital controllers for instruments has expanded. New generations - of performance styles are growing that rely on compact instruments that can travel - easily and are thus versatile. This article cites two interactive performance - practices to illustrate how larger instruments change the nature of interaction - and sonic outcomes of performance. Pressure-sensitive Floors, a wooden set of - platforms for performing electronic music, are compared with a practice on the - Renaissance violone with electronics. Large instruments offer unique additions - to performance and music making that are not accessible in small instruments. - They have their own specific affordances and limitations that affect the musical - decisions of the performer and therefore contribute unique ways of conceptualising - performance. The instruments in this paper have been chosen as the authors have - a ''sustained relationship’ with them and these practices merely act as examples - of the embodied knowledge gained through staying committed to a particular large - instrument. We demonstrate how with such a practice, the performance is recentered - around human presence. This offers a deeper communication between performer and - audience. It creates new avenues for the performance of contemporary music where - the entire body is engaged in movement and sounding. We argue that overlooking - large instruments in favour of their smaller counterparts would result in the - loss of a unique aesthetic as well as conceptual and performance approaches.' - address: 'Mexico City, Mexico' - articleno: 43 - author: iran sanadzadeh and Chloë Sobek - bibtex: "@article{nime2023_43,\n abstract = {In recent decades, with the innovation\ - \ in sensor technology, the trend towards smaller digital controllers for instruments\ - \ has expanded. New generations of performance styles are growing that rely on\ - \ compact instruments that can travel easily and are thus versatile. This article\ - \ cites two interactive performance practices to illustrate how larger instruments\ - \ change the nature of interaction and sonic outcomes of performance. Pressure-sensitive\ - \ Floors, a wooden set of platforms for performing electronic music, are compared\ - \ with a practice on the Renaissance violone with electronics. Large instruments\ - \ offer unique additions to performance and music making that are not accessible\ - \ in small instruments. They have their own specific affordances and limitations\ - \ that affect the musical decisions of the performer and therefore contribute\ - \ unique ways of conceptualising performance. The instruments in this paper have\ - \ been chosen as the authors have a 'sustained relationship’ with them and these\ - \ practices merely act as examples of the embodied knowledge gained through staying\ - \ committed to a particular large instrument. We demonstrate how with such a practice,\ - \ the performance is recentered around human presence. This offers a deeper communication\ - \ between performer and audience. It creates new avenues for the performance of\ - \ contemporary music where the entire body is engaged in movement and sounding.\ - \ We argue that overlooking large instruments in favour of their smaller counterparts\ - \ would result in the loss of a unique aesthetic as well as conceptual and performance\ - \ approaches.},\n address = {Mexico City, Mexico},\n articleno = {43},\n author\ - \ = {iran sanadzadeh and Chloë Sobek},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz\ - \ and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages\ - \ = {6},\n pages = {301--306},\n title = {A sustained relationship with large\ - \ instruments - a case against the convenient interface},\n track = {Papers},\n\ - \ url = {http://nime.org/proceedings/2023/nime2023_43.pdf},\n year = {2023}\n\ +- ENTRYTYPE: inproceedings + ID: lfeugere2014 + abstract: 'In this demonstration, the mapping and the gestural control strategy + developed in the Digitartic are presented. Digitartic is a musical instrument + able to control sung syllables. Performative rule-based synthesis allows for controlling + semi-consonants, plosive, fricative and nasal consonants with a same gesture, + despite the structural differences in natural production of such vocal segments. + A graphic pen tablet is used for capturing the gesture with a high sampling rate + and resolution. This system alows for both performing various manners of articulation + and having a continuous control over the articulation.' + address: 'London, United Kingdom' + author: Lionel Feugère and Christophe d'Alessandro + bibtex: "@inproceedings{lfeugere2014,\n abstract = {In this demonstration, the mapping\ + \ and the gestural control strategy developed in the Digitartic are presented.\ + \ Digitartic is a musical instrument able to control sung syllables. Performative\ + \ rule-based synthesis allows for controlling semi-consonants, plosive, fricative\ + \ and nasal consonants with a same gesture, despite the structural differences\ + \ in natural production of such vocal segments. A graphic pen tablet is used for\ + \ capturing the gesture with a high sampling rate and resolution. This system\ + \ alows for both performing various manners of articulation and having a continuous\ + \ control over the articulation.},\n address = {London, United Kingdom},\n author\ + \ = {Lionel Feug\\`ere and Christophe d'Alessandro},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178762},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {86--87},\n publisher = {Goldsmiths, University of London},\n title = {Rule-Based\ + \ Performative Synthesis of Sung Syllables},\n url = {http://www.nime.org/proceedings/2014/nime2014_522.pdf},\n\ + \ year = {2014}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1178762 + issn: 2220-4806 + month: June + pages: 86--87 + publisher: 'Goldsmiths, University of London' + title: Rule-Based Performative Synthesis of Sung Syllables + url: http://www.nime.org/proceedings/2014/nime2014_522.pdf + year: 2014 + + +- ENTRYTYPE: inproceedings + ID: jharriman2014 + abstract: 'What do new possibilities for music and art making look like in a world + in which the biological and mechanical are increasingly entangled? Can a contrived + environment envelope the senses to the point that one feel fully immersed in it? + It was with these questions in mind that the interactive mechanical sound art + installation endo/exo came into being. Through the use of networked technology + the system becomes more like a self-aware organism, passing messages from node + to node as cells communicate through chemical signals with their neighbors. In + an artistic context, the communication network resembles, but differs from, other + mechanical systems. Issues such as latency are often considered negative factors, + yet they can contribute a touch of personality in this context. This paper is + a reflection on these and other considerations gained from the experience of designing + and constructing endo/exo as well as future implications for the Honeycomb platform + as a tool for creating musical interactions within a new paradigm which allows + for emergent behavior across vast physical spaces. The use of swarming and self-organization, + as well as playful interaction, creates an ``aliveness'''' in the mechanism, and + renders its exploration pleasurable, intriguing and uncanny.' + address: 'London, United Kingdom' + author: Jiffer Harriman and Michael Theodore and Nikolaus Correll and Hunter Ewen + bibtex: "@inproceedings{jharriman2014,\n abstract = {What do new possibilities for\ + \ music and art making look like in a world in which the biological and mechanical\ + \ are increasingly entangled? Can a contrived environment envelope the senses\ + \ to the point that one feel fully immersed in it? It was with these questions\ + \ in mind that the interactive mechanical sound art installation endo/exo came\ + \ into being. Through the use of networked technology the system becomes more\ + \ like a self-aware organism, passing messages from node to node as cells communicate\ + \ through chemical signals with their neighbors. In an artistic context, the communication\ + \ network resembles, but differs from, other mechanical systems. Issues such as\ + \ latency are often considered negative factors, yet they can contribute a touch\ + \ of personality in this context. This paper is a reflection on these and other\ + \ considerations gained from the experience of designing and constructing endo/exo\ + \ as well as future implications for the Honeycomb platform as a tool for creating\ + \ musical interactions within a new paradigm which allows for emergent behavior\ + \ across vast physical spaces. The use of swarming and self-organization, as well\ + \ as playful interaction, creates an ``aliveness'' in the mechanism, and renders\ + \ its exploration pleasurable, intriguing and uncanny.},\n address = {London,\ + \ United Kingdom},\n author = {Jiffer Harriman and Michael Theodore and Nikolaus\ + \ Correll and Hunter Ewen},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178786},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {383--386},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {endo/exo Making Art and Music with Distributed\ + \ Computing},\n url = {http://www.nime.org/proceedings/2014/nime2014_523.pdf},\n\ + \ year = {2014}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1178786 + issn: 2220-4806 + month: June + pages: 383--386 + publisher: 'Goldsmiths, University of London' + title: endo/exo Making Art and Music with Distributed Computing + url: http://www.nime.org/proceedings/2014/nime2014_523.pdf + year: 2014 + + +- ENTRYTYPE: inproceedings + ID: rgraham2014 + abstract: 'This paper describes the theoretical underpinnings, design, and development + of a hyper--instrumental performance system driven by gestural data obtained from + an electric guitar. The system combines a multichannel audio feed from the guitar + (which is parsed for its pitch, spectral content and note inter--onset time data + to provide abstractions of sounded performance gestures) with motion tracking + of the performer''s larger--scale bodily movements using a Microsoft Xbox Kinect + sensor. These gestural materials are used to provide the basis for the structures + of relational mappings, informed by the embodied image schema structures of Lakoff + and Johnson. These theoretical perspectives are refined via larger-scale ecological-embodied + structural relationships in electroacoustic music outlined in Smalley''s theory + of spectromorphology, alongside the incorporation of an additional active-agential + response structure through the use of the boids flocking algorithm by Reynolds + to control the spatialization of outputs and other textural processes. The paper + aims to advance a broadly-applicable ''performance gesture ecology'', providing + a shared spatial-relational mapping (a ''basic gestural space'') which allows + for creative (but still coherent) mappings from the performance gestures to the + control of textural and spatial structures.' + address: 'London, United Kingdom' + author: Ricky Graham and Brian Bridges + bibtex: "@inproceedings{rgraham2014,\n abstract = {This paper describes the theoretical\ + \ underpinnings, design, and development of a hyper--instrumental performance\ + \ system driven by gestural data obtained from an electric guitar. The system\ + \ combines a multichannel audio feed from the guitar (which is parsed for its\ + \ pitch, spectral content and note inter--onset time data to provide abstractions\ + \ of sounded performance gestures) with motion tracking of the performer's larger--scale\ + \ bodily movements using a Microsoft Xbox Kinect sensor. These gestural materials\ + \ are used to provide the basis for the structures of relational mappings, informed\ + \ by the embodied image schema structures of Lakoff and Johnson. These theoretical\ + \ perspectives are refined via larger-scale ecological-embodied structural relationships\ + \ in electroacoustic music outlined in Smalley's theory of spectromorphology,\ + \ alongside the incorporation of an additional active-agential response structure\ + \ through the use of the boids flocking algorithm by Reynolds to control the spatialization\ + \ of outputs and other textural processes. The paper aims to advance a broadly-applicable\ + \ 'performance gesture ecology', providing a shared spatial-relational mapping\ + \ (a 'basic gestural space') which allows for creative (but still coherent) mappings\ + \ from the performance gestures to the control of textural and spatial structures.},\n\ + \ address = {London, United Kingdom},\n author = {Ricky Graham and Brian Bridges},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178774},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {581--584},\n publisher = {Goldsmiths, University\ + \ of London},\n title = {Gesture and Embodied Metaphor in Spatial Music Performance\ + \ Systems Design.},\n url = {http://www.nime.org/proceedings/2014/nime2014_526.pdf},\n\ + \ year = {2014}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1178774 + issn: 2220-4806 + month: June + pages: 581--584 + publisher: 'Goldsmiths, University of London' + title: Gesture and Embodied Metaphor in Spatial Music Performance Systems Design. + url: http://www.nime.org/proceedings/2014/nime2014_526.pdf + year: 2014 + + +- ENTRYTYPE: inproceedings + ID: ckiefer2014 + abstract: 'Echo State Networks (ESNs), a form of recurrent neural network developed + in the field of Reservoir Computing, show significant potential for use as a tool + in the design of mappings for digital musical instruments. They have, however, + seldom been used in this area, so this paper explores their possible uses. This + project contributes a new open source library, which was developed to allow ESNs + to run in the Pure Data dataflow environment. Several use cases were explored, + focusing on addressing current issues in mapping research. ESNs were found to + work successfully in scenarios of pattern classification, multiparametric control, + explorative mapping and the design of nonlinearities and uncontrol. Un-trained + behaviours are proposed, as augmentations to the conventional reservoir system + that allow the player to introduce potentially interesting non-linearities and + uncontrol into the reservoir. Interactive evolution style controls are proposed + as strategies to help design these behaviours, which are otherwise dependent on + arbitrary parameters. A study on sound classification shows that ESNs can reliably + differentiate between two drum sounds, and also generalise to other similar input. + Following evaluation of the use cases, heuristics are proposed to aid the use + of ESNs in computer music scenarios.' + address: 'London, United Kingdom' + author: Chris Kiefer + bibtex: "@inproceedings{ckiefer2014,\n abstract = {Echo State Networks (ESNs), a\ + \ form of recurrent neural network developed in the field of Reservoir Computing,\ + \ show significant potential for use as a tool in the design of mappings for digital\ + \ musical instruments. They have, however, seldom been used in this area, so this\ + \ paper explores their possible uses. This project contributes a new open source\ + \ library, which was developed to allow ESNs to run in the Pure Data dataflow\ + \ environment. Several use cases were explored, focusing on addressing current\ + \ issues in mapping research. ESNs were found to work successfully in scenarios\ + \ of pattern classification, multiparametric control, explorative mapping and\ + \ the design of nonlinearities and uncontrol. Un-trained behaviours are proposed,\ + \ as augmentations to the conventional reservoir system that allow the player\ + \ to introduce potentially interesting non-linearities and uncontrol into the\ + \ reservoir. Interactive evolution style controls are proposed as strategies to\ + \ help design these behaviours, which are otherwise dependent on arbitrary parameters.\ + \ A study on sound classification shows that ESNs can reliably differentiate between\ + \ two drum sounds, and also generalise to other similar input. Following evaluation\ + \ of the use cases, heuristics are proposed to aid the use of ESNs in computer\ + \ music scenarios.},\n address = {London, United Kingdom},\n author = {Chris Kiefer},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178829},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {293--298},\n publisher = {Goldsmiths, University\ + \ of London},\n title = {Musical Instrument Mapping Design with Echo State Networks},\n\ + \ url = {http://www.nime.org/proceedings/2014/nime2014_530.pdf},\n year = {2014}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178829 issn: 2220-4806 - month: May - numpages: 6 - pages: 301--306 - title: A sustained relationship with large instruments - a case against the convenient - interface - track: Papers - url: http://nime.org/proceedings/2023/nime2023_43.pdf - year: 2023 + month: June + pages: 293--298 + publisher: 'Goldsmiths, University of London' + title: Musical Instrument Mapping Design with Echo State Networks + url: http://www.nime.org/proceedings/2014/nime2014_530.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_44 - abstract: 'The audiovisual installation Oscillations, turns irons and ironing boards - into electronic instruments, in an attempt to deconstruct stereotypical ideas - of gender and its assigned roles. The project aims to investigate the relationships - we have with domestic objects, and ponder their structures and significance through - the design and performance of an interactive ecosystem. The project uses a sonic - cyberfeminisms lens to critically explore aesthetic and relational hierarchies - at the intersection of sound, gender and technology. Three irons and ironing boards - have been hacked and retrofitted with embedded electronic instruments that together - create a complex feedback network. While the audience is invited to physically - interact with the irons instruments and manipulate samples, the sonic state of - the installation also changes based on the audio information detected in the environment.' - address: 'Mexico City, Mexico' - articleno: 44 - author: Patty J Preece and Melania Jack and Giacomo Lepri - bibtex: "@article{nime2023_44,\n abstract = {The audiovisual installation Oscillations,\ - \ turns irons and ironing boards into electronic instruments, in an attempt to\ - \ deconstruct stereotypical ideas of gender and its assigned roles. The project\ - \ aims to investigate the relationships we have with domestic objects, and ponder\ - \ their structures and significance through the design and performance of an interactive\ - \ ecosystem. The project uses a sonic cyberfeminisms lens to critically explore\ - \ aesthetic and relational hierarchies at the intersection of sound, gender and\ - \ technology. Three irons and ironing boards have been hacked and retrofitted\ - \ with embedded electronic instruments that together create a complex feedback\ - \ network. While the audience is invited to physically interact with the irons\ - \ instruments and manipulate samples, the sonic state of the installation also\ - \ changes based on the audio information detected in the environment.},\n address\ - \ = {Mexico City, Mexico},\n articleno = {44},\n author = {Patty J Preece and\ - \ Melania Jack and Giacomo Lepri},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz\ - \ and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages\ - \ = {7},\n pages = {307--313},\n title = {Oscillations: Composing a Performance\ - \ Ecosystem through a Sonic Cyberfeminist Lens },\n track = {Papers},\n url =\ - \ {http://nime.org/proceedings/2023/nime2023_44.pdf},\n year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: jallison2014 + abstract: 'Mobile music applications are typically quite limiting to musicians, + as they either attempt to mimic non-touch screen interfaces or do not offer enough + control. Pitch Canvas is a musical interface that was built specifically for the + touchscreen. Pitches are laid out in a hexagonal pattern that allow for easy scale, + chord, and arpeggiation patterns. Notes are played by touch, but are sustained + through continuous movement. Pitch bends can be achieved by passing through the + space between the notes. Its current implementation runs only on Apple iPad tablet + computers using a libPd to convert user interaction into audio. An iPad overlay + offers physical feedback for the circles as well as the pitch bend area between + the circles. A performable version of the application has been built, though several + active developments allow alternative sonic interpretation of the gestures, enhanced + visual response to user interaction, and the ability to control the instrument + with multiple devices.' + address: 'London, United Kingdom' + author: Bradley Strylowski and Jesse Allison and Jesse Guessford + bibtex: "@inproceedings{jallison2014,\n abstract = {Mobile music applications are\ + \ typically quite limiting to musicians, as they either attempt to mimic non-touch\ + \ screen interfaces or do not offer enough control. Pitch Canvas is a musical\ + \ interface that was built specifically for the touchscreen. Pitches are laid\ + \ out in a hexagonal pattern that allow for easy scale, chord, and arpeggiation\ + \ patterns. Notes are played by touch, but are sustained through continuous movement.\ + \ Pitch bends can be achieved by passing through the space between the notes.\ + \ Its current implementation runs only on Apple iPad tablet computers using a\ + \ libPd to convert user interaction into audio. An iPad overlay offers physical\ + \ feedback for the circles as well as the pitch bend area between the circles.\ + \ A performable version of the application has been built, though several active\ + \ developments allow alternative sonic interpretation of the gestures, enhanced\ + \ visual response to user interaction, and the ability to control the instrument\ + \ with multiple devices.},\n address = {London, United Kingdom},\n author = {Bradley\ + \ Strylowski and Jesse Allison and Jesse Guessford},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178947},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {171--174},\n publisher = {Goldsmiths, University of London},\n title = {Pitch\ + \ Canvas: Touchscreen Based Mobile Music Instrument},\n url = {http://www.nime.org/proceedings/2014/nime2014_533.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178947 issn: 2220-4806 - month: May - numpages: 7 - pages: 307--313 - title: 'Oscillations: Composing a Performance Ecosystem through a Sonic Cyberfeminist - Lens ' - track: Papers - url: http://nime.org/proceedings/2023/nime2023_44.pdf - year: 2023 + month: June + pages: 171--174 + publisher: 'Goldsmiths, University of London' + title: 'Pitch Canvas: Touchscreen Based Mobile Music Instrument' + url: http://www.nime.org/proceedings/2014/nime2014_533.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_45 - abstract: 'In this article, we explore practical and artistic considerations of - instrument design and the creation of an instrument ensemble control system for - The Furies: A LaptOpera, an opera for laptop orchestra and live vocalists based - on the Greek tragedy Electra. We outline the artistic principles that guided - the creation of the rope instrument and, specifically, our use of instrument design - to forge direct and visceral connections between the music, the narrative, and - the relationship between characters. This discussion is followed by an overview - of the practical considerations that inspired the creation of an instrument ensemble - control system for the opera and the principles that guided this system''s design. - Through a detailed description of the development of the rope instrument, the - growth of this instrument through the course of the opera, and the design of the - instrument ensemble control system, this paper offers tools and reflections on - the potential of instrument design to invigorate an embodied connection to opera - and useful design strategies to support rehearsal and performance of evening-length - multimedia works.' - address: 'Mexico City, Mexico' - articleno: 45 - author: Anne K Hege and Curtis Ullerich - bibtex: "@article{nime2023_45,\n abstract = {In this article, we explore practical\ - \ and artistic considerations of instrument design and the creation of an instrument\ - \ ensemble control system for The Furies: A LaptOpera, an opera for laptop orchestra\ - \ and live vocalists based on the Greek tragedy Electra. We outline the artistic\ - \ principles that guided the creation of the rope instrument and, specifically,\ - \ our use of instrument design to forge direct and visceral connections between\ - \ the music, the narrative, and the relationship between characters. This discussion\ - \ is followed by an overview of the practical considerations that inspired the\ - \ creation of an instrument ensemble control system for the opera and the principles\ - \ that guided this system's design. Through a detailed description of the development\ - \ of the rope instrument, the growth of this instrument through the course of\ - \ the opera, and the design of the instrument ensemble control system, this paper\ - \ offers tools and reflections on the potential of instrument design to invigorate\ - \ an embodied connection to opera and useful design strategies to support rehearsal\ - \ and performance of evening-length multimedia works.},\n address = {Mexico City,\ - \ Mexico},\n articleno = {45},\n author = {Anne K Hege and Curtis Ullerich},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn\ - \ = {2220-4806},\n month = {May},\n numpages = {5},\n pages = {314--318},\n title\ - \ = {Principles of Instrument and System Design for LaptOperas},\n track = {Papers},\n\ - \ url = {http://nime.org/proceedings/2023/nime2023_45.pdf},\n year = {2023}\n\ - }\n" +- ENTRYTYPE: inproceedings + ID: pdahlstedt2014 + abstract: 'Two related versions of an unstable live algorithm for the Disklavier + player piano are presented. The underlying generative feedback system consists + of four virtual musicians, listening to each other in a circular configuration. + There is no temporal form, and all parameters of the system are controlled by + the performer through an intricate but direct mapping, in an attempt to combine + the experienced musician''s physical control of gesture and phrasing, with the + structural complexities and richness of generative music. In the first version, + Circle Squared, the interface is an array of pressure sensors, and the performer + performs on the system without participating directly, like a puppet master. In + the second version, control parameters are derived directly from playing on the + same piano that performs the output of the system. Here, the performer both plays + with and on the system in an intricate dance with the unpredictable output of + the unstable virtual ensemble. The underlying mapping strategies are presented, + together with the structure of the generative system. Experiences from a series + of performances are discussed, primarily from the perspective of the improvising + musician.' + address: 'London, United Kingdom' + author: Palle Dahlstedt + bibtex: "@inproceedings{pdahlstedt2014,\n abstract = {Two related versions of an\ + \ unstable live algorithm for the Disklavier player piano are presented. The underlying\ + \ generative feedback system consists of four virtual musicians, listening to\ + \ each other in a circular configuration. There is no temporal form, and all parameters\ + \ of the system are controlled by the performer through an intricate but direct\ + \ mapping, in an attempt to combine the experienced musician's physical control\ + \ of gesture and phrasing, with the structural complexities and richness of generative\ + \ music. In the first version, Circle Squared, the interface is an array of pressure\ + \ sensors, and the performer performs on the system without participating directly,\ + \ like a puppet master. In the second version, control parameters are derived\ + \ directly from playing on the same piano that performs the output of the system.\ + \ Here, the performer both plays with and on the system in an intricate dance\ + \ with the unpredictable output of the unstable virtual ensemble. The underlying\ + \ mapping strategies are presented, together with the structure of the generative\ + \ system. Experiences from a series of performances are discussed, primarily from\ + \ the perspective of the improvising musician.},\n address = {London, United Kingdom},\n\ + \ author = {Palle Dahlstedt},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178740},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {114--117},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Circle Squared and Circle Keys Performing\ + \ on and with an Unstable Live Algorithm for the Disklavier},\n url = {http://www.nime.org/proceedings/2014/nime2014_534.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178740 issn: 2220-4806 - month: May - numpages: 5 - pages: 314--318 - title: Principles of Instrument and System Design for LaptOperas - track: Papers - url: http://nime.org/proceedings/2023/nime2023_45.pdf - year: 2023 + month: June + pages: 114--117 + publisher: 'Goldsmiths, University of London' + title: Circle Squared and Circle Keys Performing on and with an Unstable Live Algorithm + for the Disklavier + url: http://www.nime.org/proceedings/2014/nime2014_534.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_46 - abstract: 'For several decades NIME community has always been appropriating machine - learning (ML) to apply for various tasks such as gesture-sound mapping or sound - synthesis for digital musical instruments. Recently, the use of ML methods seems - to have increased and the objectives have diversified. Despite its increasing - use, few contributions have studied what constitutes the culture of learning technologies - for this specific practice. This paper presents an analysis of 69 contributions - selected from a systematic review of the NIME conference over the last 10 years. - This paper aims at analysing the practices involving ML in terms of the techniques - and the task used and the ways to interact this technology. It thus contributes - to a deeper understanding of the specific goals and motivation in using ML for - musical expression. This study allows us to propose new perspectives in the practice - of these techniques.' - address: 'Mexico City, Mexico' - articleno: 46 - author: Théo Jourdan and Baptiste Caramiaux - bibtex: "@article{nime2023_46,\n abstract = {For several decades NIME community\ - \ has always been appropriating machine learning (ML) to apply for various tasks\ - \ such as gesture-sound mapping or sound synthesis for digital musical instruments.\ - \ Recently, the use of ML methods seems to have increased and the objectives have\ - \ diversified. Despite its increasing use, few contributions have studied what\ - \ constitutes the culture of learning technologies for this specific practice.\ - \ This paper presents an analysis of 69 contributions selected from a systematic\ - \ review of the NIME conference over the last 10 years. This paper aims at analysing\ - \ the practices involving ML in terms of the techniques and the task used and\ - \ the ways to interact this technology. It thus contributes to a deeper understanding\ - \ of the specific goals and motivation in using ML for musical expression. This\ - \ study allows us to propose new perspectives in the practice of these techniques.},\n\ - \ address = {Mexico City, Mexico},\n articleno = {46},\n author = {Théo Jourdan\ - \ and Baptiste Caramiaux},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan\ - \ Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages = {13},\n\ - \ pages = {319--331},\n title = {Machine Learning for Musical Expression: A Systematic\ - \ Literature Review},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_46.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: fthalmann2014 + abstract: 'This paper introduces an extension of the Rubato Composer software''s + BigBang rubette module for gestural composition. The extension enables composers + and improvisers to operate BigBang using the Leap Motion controller, which uses + two cameras to detect hand motions in three-dimensional space. The low latency + and high precision of the device make it a good fit for BigBang''s functionality, + which is based on immediate visual and auditive feedback. With the new extensions, + users can define an infinite variety of musical objects, such as oscillators, + pitches, chord progressions, or frequency modulators, in real-time and transform + them in order to generate more complex musical structures on any level of abstraction.' + address: 'London, United Kingdom' + author: Daniel Tormoen and Florian Thalmann and Guerino Mazzola + bibtex: "@inproceedings{fthalmann2014,\n abstract = {This paper introduces an extension\ + \ of the Rubato Composer software's BigBang rubette module for gestural composition.\ + \ The extension enables composers and improvisers to operate BigBang using the\ + \ Leap Motion controller, which uses two cameras to detect hand motions in three-dimensional\ + \ space. The low latency and high precision of the device make it a good fit for\ + \ BigBang's functionality, which is based on immediate visual and auditive feedback.\ + \ With the new extensions, users can define an infinite variety of musical objects,\ + \ such as oscillators, pitches, chord progressions, or frequency modulators, in\ + \ real-time and transform them in order to generate more complex musical structures\ + \ on any level of abstraction.},\n address = {London, United Kingdom},\n author\ + \ = {Daniel Tormoen and Florian Thalmann and Guerino Mazzola},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178955},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {207--212},\n publisher = {Goldsmiths, University of London},\n title = {The\ + \ Composing Hand: Musical Creation with Leap Motion and the BigBang Rubette},\n\ + \ url = {http://www.nime.org/proceedings/2014/nime2014_536.pdf},\n year = {2014}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178955 issn: 2220-4806 - month: May - numpages: 13 - pages: 319--331 - title: 'Machine Learning for Musical Expression: A Systematic Literature Review' - track: Papers - url: http://nime.org/proceedings/2023/nime2023_46.pdf - year: 2023 + month: June + pages: 207--212 + publisher: 'Goldsmiths, University of London' + title: 'The Composing Hand: Musical Creation with Leap Motion and the BigBang Rubette' + url: http://www.nime.org/proceedings/2014/nime2014_536.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_47 - abstract: 'For several years, the various practices around ML techniques have been - increasingly present and diversified. However, the literature associated with - these techniques rarely reveals the cultural and political sides of these practices. - In order to explore how practitioners in the NIME community engage with ML techniques, - we conducted interviews with seven researchers in the NIME community and analysed - them through a thematic analysis. Firstly, we propose findings at the level of - the individual, resisting technological determinism and redefining sense making - in interactive ML. Secondly, we propose findings at the level of the community, - revealing mitigated adoption with respect to ML. This paper aims to provide the - community with some reflections on the use of ML in order to initiate a discussion - about cultural, political and ethical issues surrounding these techniques as their - use grows within the community.' - address: 'Mexico City, Mexico' - articleno: 47 - author: Théo Jourdan and Baptiste Caramiaux - bibtex: "@article{nime2023_47,\n abstract = {For several years, the various practices\ - \ around ML techniques have been increasingly present and diversified. However,\ - \ the literature associated with these techniques rarely reveals the cultural\ - \ and political sides of these practices. In order to explore how practitioners\ - \ in the NIME community engage with ML techniques, we conducted interviews with\ - \ seven researchers in the NIME community and analysed them through a thematic\ - \ analysis. Firstly, we propose findings at the level of the individual, resisting\ - \ technological determinism and redefining sense making in interactive ML. Secondly,\ - \ we propose findings at the level of the community, revealing mitigated adoption\ - \ with respect to ML. This paper aims to provide the community with some reflections\ - \ on the use of ML in order to initiate a discussion about cultural, political\ - \ and ethical issues surrounding these techniques as their use grows within the\ - \ community.},\n address = {Mexico City, Mexico},\n articleno = {47},\n author\ - \ = {Théo Jourdan and Baptiste Caramiaux},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz\ - \ and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages\ - \ = {7},\n pages = {332--338},\n title = {Culture and Politics of Machine Learning\ - \ in NIME: A Preliminary Qualitative Inquiry},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_47.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: obown2014 + abstract: 'Audiences of live laptop music frequently express dismay at the opacity + of performer activity and question how ``live'''' performances actually are. Yet + motionless laptop performers endure as musical spectacles from clubs to concert + halls, suggesting that for many this is a non-issue. Understanding these perceptions + might help performers better achieve their intentions, inform interface design + within the NIME field and help develop theories of liveness and performance. To + this end, a study of listeners'' perception of liveness and performer control + in laptop performance was carried out, in which listeners examined several short + audio-only excerpts of laptop performances and answered questions about their + perception of the performance: what they thought was happening and its sense of + liveness. Our results suggest that audiences are likely to associate liveness + with perceived performer activity such as improvisation and the audibility of + gestures, whereas perceptions of generative material, backing tracks, or other + preconceived material do not appear to inhibit perceptions of liveness.' + address: 'London, United Kingdom' + author: Oliver Bown and Renick Bell and Adam Parkinson + bibtex: "@inproceedings{obown2014,\n abstract = {Audiences of live laptop music\ + \ frequently express dismay at the opacity of performer activity and question\ + \ how ``live'' performances actually are. Yet motionless laptop performers endure\ + \ as musical spectacles from clubs to concert halls, suggesting that for many\ + \ this is a non-issue. Understanding these perceptions might help performers better\ + \ achieve their intentions, inform interface design within the NIME field and\ + \ help develop theories of liveness and performance. To this end, a study of listeners'\ + \ perception of liveness and performer control in laptop performance was carried\ + \ out, in which listeners examined several short audio-only excerpts of laptop\ + \ performances and answered questions about their perception of the performance:\ + \ what they thought was happening and its sense of liveness. Our results suggest\ + \ that audiences are likely to associate liveness with perceived performer activity\ + \ such as improvisation and the audibility of gestures, whereas perceptions of\ + \ generative material, backing tracks, or other preconceived material do not appear\ + \ to inhibit perceptions of liveness.},\n address = {London, United Kingdom},\n\ + \ author = {Oliver Bown and Renick Bell and Adam Parkinson},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178722},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {13--18},\n publisher = {Goldsmiths, University of London},\n title = {Examining\ + \ the Perception of Liveness and Activity in Laptop Music: Listeners' Inference\ + \ about what the Performer is Doing from the Audio Alone},\n url = {http://www.nime.org/proceedings/2014/nime2014_538.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178722 issn: 2220-4806 - month: May - numpages: 7 - pages: 332--338 - title: 'Culture and Politics of Machine Learning in NIME: A Preliminary Qualitative - Inquiry' - track: Papers - url: http://nime.org/proceedings/2023/nime2023_47.pdf - year: 2023 + month: June + pages: 13--18 + publisher: 'Goldsmiths, University of London' + title: 'Examining the Perception of Liveness and Activity in Laptop Music: Listeners'' + Inference about what the Performer is Doing from the Audio Alone' + url: http://www.nime.org/proceedings/2014/nime2014_538.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_48 - abstract: 'Subtlety and detail are fundamental to what makes musical instruments - special, and worth dedicating a life''s practice to, for designer, maker, player - and listener alike. However, research into digital musical instrument (DMI) design - tools and processes have so far mainly focused on high-level conceptual concerns - and low-level technical abstractions, leaving subtlety and detail underexplored - and undervalued. These nuances, and the processes they result from, cannot be - fully articulated in words alone, yet they largely define an instrument''s quality, - and it is therefore important to understand how they come to be. We introduce - a scale-based ontology that divides design details into three levels - macro, - meso and micro - and we present a literature review of DMI design from the perspective - of this ontology. Finally we extrapolate the ontology to consider its utility - in broader contexts, and consider future directions.' - address: 'Mexico City, Mexico' - articleno: 48 - author: Jack Armitage and Thor Magnusson and Andrew McPherson - bibtex: "@article{nime2023_48,\n abstract = {Subtlety and detail are fundamental\ - \ to what makes musical instruments special, and worth dedicating a life's practice\ - \ to, for designer, maker, player and listener alike. However, research into digital\ - \ musical instrument (DMI) design tools and processes have so far mainly focused\ - \ on high-level conceptual concerns and low-level technical abstractions, leaving\ - \ subtlety and detail underexplored and undervalued. These nuances, and the processes\ - \ they result from, cannot be fully articulated in words alone, yet they largely\ - \ define an instrument's quality, and it is therefore important to understand\ - \ how they come to be. We introduce a scale-based ontology that divides design\ - \ details into three levels - macro, meso and micro - and we present a literature\ - \ review of DMI design from the perspective of this ontology. Finally we extrapolate\ - \ the ontology to consider its utility in broader contexts, and consider future\ - \ directions.},\n address = {Mexico City, Mexico},\n articleno = {48},\n author\ - \ = {Jack Armitage and Thor Magnusson and Andrew McPherson},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month\ - \ = {May},\n numpages = {11},\n pages = {339--349},\n title = {A Scale-Based Ontology\ - \ of Musical Instrument Design},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_48.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: jsnyder12014 + abstract: 'This paper discusses the Birl, an electronic wind instrument developed + by the authors. It uses artificial neural nets to apply machine learning to the + mapping of fingering systems and embouchure position. The design features of the + instrument are described, and the machine learning mapping strategy is discussed.' + address: 'London, United Kingdom' + author: Jeff Snyder and Danny Ryan + bibtex: "@inproceedings{jsnyder12014,\n abstract = {This paper discusses the Birl,\ + \ an electronic wind instrument developed by the authors. It uses artificial neural\ + \ nets to apply machine learning to the mapping of fingering systems and embouchure\ + \ position. The design features of the instrument are described, and the machine\ + \ learning mapping strategy is discussed.},\n address = {London, United Kingdom},\n\ + \ author = {Jeff Snyder and Danny Ryan},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178939},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {585--588},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {The Birl: An Electronic Wind Instrument Based\ + \ on an Artificial Neural Network Parameter Mapping Structure},\n url = {http://www.nime.org/proceedings/2014/nime2014_540.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178939 issn: 2220-4806 - month: May - numpages: 11 - pages: 339--349 - title: A Scale-Based Ontology of Musical Instrument Design - track: Papers - url: http://nime.org/proceedings/2023/nime2023_48.pdf - year: 2023 + month: June + pages: 585--588 + publisher: 'Goldsmiths, University of London' + title: 'The Birl: An Electronic Wind Instrument Based on an Artificial Neural Network + Parameter Mapping Structure' + url: http://www.nime.org/proceedings/2014/nime2014_540.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_49 - abstract: 'As the field around computer-mediated musical interaction drives attention - to its sociotechnical, political and epistemological exigencies, it becomes important - to be guided by disability studies, and for researchers and designers of accessible - digital musical instruments (ADMIs) to foreground the lived experience of disabled - musicians. This resonates with the movement to promote disability justice in HCI. - In this paper, we introduce a case study of the design of a string-less guitar, - which was developed in collaboration with a guitarist who lost his ability to - play due to impairment. We present this work as an exploration of the Rashomon - effect, a term that refers to the phenomenon of multiple witnesses describing - the same event from their own perspective. We argue that the Rashomon effect is - a useful way to explore how digital musical instrument (DMI) designers respond - to NIME''s interdisciplinarity, and to reflect on how we produce and transmit - knowledge within our field.' - address: 'Mexico City, Mexico' - articleno: 49 - author: Eevee Zayas-Garin and Charlotte Nordmoen and Andrew McPherson - bibtex: "@article{nime2023_49,\n abstract = {As the field around computer-mediated\ - \ musical interaction drives attention to its sociotechnical, political and epistemological\ - \ exigencies, it becomes important to be guided by disability studies, and for\ - \ researchers and designers of accessible digital musical instruments (ADMIs)\ - \ to foreground the lived experience of disabled musicians. This resonates with\ - \ the movement to promote disability justice in HCI. In this paper, we introduce\ - \ a case study of the design of a string-less guitar, which was developed in collaboration\ - \ with a guitarist who lost his ability to play due to impairment. We present\ - \ this work as an exploration of the Rashomon effect, a term that refers to the\ - \ phenomenon of multiple witnesses describing the same event from their own perspective.\ - \ We argue that the Rashomon effect is a useful way to explore how digital musical\ - \ instrument (DMI) designers respond to NIME's interdisciplinarity, and to reflect\ - \ on how we produce and transmit knowledge within our field.},\n address = {Mexico\ - \ City, Mexico},\n articleno = {49},\n author = {Eevee Zayas-Garin and Charlotte\ - \ Nordmoen and Andrew McPherson},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz\ - \ and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages\ - \ = {8},\n pages = {350--357},\n title = {Transmitting Digital Lutherie Knowledge:\ - \ The Rashomon Effect for DMI Designers},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_49.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: ahindle12014 + abstract: 'One problem with live computer music performance is the transport of + computers to a venue and the following setup of the computers used in playing + and rendering music. The more computers involved the longer the setup and tear-down + of a performance. Each computer adds power and cabling requirements that the venue + must accommodate. Cloud computing can change of all this by simplifying the setup + of many (10s, 100s) of machines at the click of a button. But there''s a catch, + the cloud is not physically near you, you cannot run an audio cable to the cloud. + The audio from a computer music instrument in the cloud needs to streamed back + to the performer and listeners. There are many solutions for streaming audio over + networks and the internet, most of them suffer from high latency, heavy buffering, + or proprietary/non-portable clients. In this paper we propose a portable cloud-friendly + method of streaming, almost a cloud soundcard, whereby performers can use mobile + devices (Android, iOS, laptops) to stream audio from the cloud with far lower + latency than technologies like icecast. This technology enables near-realtime + control over power computer music networks enabling performers to travel light + and perform live with more computers than ever before.' + address: 'London, United Kingdom' + author: Abram Hindle + bibtex: "@inproceedings{ahindle12014,\n abstract = {One problem with live computer\ + \ music performance is the transport of computers to a venue and the following\ + \ setup of the computers used in playing and rendering music. The more computers\ + \ involved the longer the setup and tear-down of a performance. Each computer\ + \ adds power and cabling requirements that the venue must accommodate. Cloud computing\ + \ can change of all this by simplifying the setup of many (10s, 100s) of machines\ + \ at the click of a button. But there's a catch, the cloud is not physically near\ + \ you, you cannot run an audio cable to the cloud. The audio from a computer music\ + \ instrument in the cloud needs to streamed back to the performer and listeners.\ + \ There are many solutions for streaming audio over networks and the internet,\ + \ most of them suffer from high latency, heavy buffering, or proprietary/non-portable\ + \ clients. In this paper we propose a portable cloud-friendly method of streaming,\ + \ almost a cloud soundcard, whereby performers can use mobile devices (Android,\ + \ iOS, laptops) to stream audio from the cloud with far lower latency than technologies\ + \ like icecast. This technology enables near-realtime control over power computer\ + \ music networks enabling performers to travel light and perform live with more\ + \ computers than ever before.},\n address = {London, United Kingdom},\n author\ + \ = {Abram Hindle},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178798},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {277--280},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {CloudOrch: A Portable SoundCard in the Cloud},\n\ + \ url = {http://www.nime.org/proceedings/2014/nime2014_541.pdf},\n year = {2014}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178798 issn: 2220-4806 - month: May - numpages: 8 - pages: 350--357 - title: 'Transmitting Digital Lutherie Knowledge: The Rashomon Effect for DMI Designers' - track: Papers - url: http://nime.org/proceedings/2023/nime2023_49.pdf - year: 2023 + month: June + pages: 277--280 + publisher: 'Goldsmiths, University of London' + title: 'CloudOrch: A Portable SoundCard in the Cloud' + url: http://www.nime.org/proceedings/2014/nime2014_541.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_50 - abstract: 'Learning to play a digital musical instrument (DMI) may be affected by - the acoustic behaviour of that instrument, in addition to its physical characteristics - and form. However, how the timbral properties of an instrument affect learning - has received little systematic empirical research. In an exploratory study, we - assessed whether timbral feedback from a physical model based percussive DMI influences - beginner players’ performance in a musical learning task. We contrasted the timbral - richness of a metallic plate physical model with an amplitude modulated pink-noise - signal that was comparable in response to input controls but with relatively reduced - timbral features. Two groups of participants practiced three sets of simple beats - using their respective version of the instrument (physical model or pink noise), - over the course of an hour. Their performance was recorded throughout and assessed - in the form of rhythmic timing accuracy. Results showed that participants’ performance - in both sound groups significantly improved throughout the task. Timing accuracy - was significantly better in the physical model group for one out of three sets - of beats. We argue that the timbral feedback of a musical instrument may influence - beginner’s playing experience, encouraging further research into how this could - benefit DMI design.' - address: 'Mexico City, Mexico' - articleno: 50 - author: Olivia B Smith and Matthew Rodger and Maarten van Walstijn and Miguel Ortiz - bibtex: "@article{nime2023_50,\n abstract = {Learning to play a digital musical\ - \ instrument (DMI) may be affected by the acoustic behaviour of that instrument,\ - \ in addition to its physical characteristics and form. However, how the timbral\ - \ properties of an instrument affect learning has received little systematic empirical\ - \ research. In an exploratory study, we assessed whether timbral feedback from\ - \ a physical model based percussive DMI influences beginner players’ performance\ - \ in a musical learning task. We contrasted the timbral richness of a metallic\ - \ plate physical model with an amplitude modulated pink-noise signal that was\ - \ comparable in response to input controls but with relatively reduced timbral\ - \ features. Two groups of participants practiced three sets of simple beats using\ - \ their respective version of the instrument (physical model or pink noise), over\ - \ the course of an hour. Their performance was recorded throughout and assessed\ - \ in the form of rhythmic timing accuracy. Results showed that participants’ performance\ - \ in both sound groups significantly improved throughout the task. Timing accuracy\ - \ was significantly better in the physical model group for one out of three sets\ - \ of beats. We argue that the timbral feedback of a musical instrument may influence\ - \ beginner’s playing experience, encouraging further research into how this could\ - \ benefit DMI design.},\n address = {Mexico City, Mexico},\n articleno = {50},\n\ - \ author = {Olivia B Smith and Matthew Rodger and Maarten van Walstijn and Miguel\ - \ Ortiz},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n\ - \ issn = {2220-4806},\n month = {May},\n numpages = {6},\n pages = {358--363},\n\ - \ title = {Sound guiding action: the effect of timbre on learning a new percussive\ - \ DMI for beginner musicians},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_50.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: jsnyder2014 + abstract: 'In this paper, we present the ``Mobile Marching Band'''' (MMB) as a new + mode of musical performance with mobile computing devices. We define an MMB to + be, at its most general, any ensemble utilizing mobile computation that can travel + as it performs, with the performance being independent of its location. We will + discuss the affordances and limitations of mobile-based instrument design and + performance, specifically within the context of a ``moving'''' ensemble. We will + also discuss the use of a Mobile Marching Band as an educational tool. Finally, + we will explore our implementation of a Mobile Parade, a digital Brazilian samba + ensemble.' + address: 'London, United Kingdom' + author: Jeff Snyder and Avneesh Sarwate + bibtex: "@inproceedings{jsnyder2014,\n abstract = {In this paper, we present the\ + \ ``Mobile Marching Band'' (MMB) as a new mode of musical performance with mobile\ + \ computing devices. We define an MMB to be, at its most general, any ensemble\ + \ utilizing mobile computation that can travel as it performs, with the performance\ + \ being independent of its location. We will discuss the affordances and limitations\ + \ of mobile-based instrument design and performance, specifically within the context\ + \ of a ``moving'' ensemble. We will also discuss the use of a Mobile Marching\ + \ Band as an educational tool. Finally, we will explore our implementation of\ + \ a Mobile Parade, a digital Brazilian samba ensemble.},\n address = {London,\ + \ United Kingdom},\n author = {Jeff Snyder and Avneesh Sarwate},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178941},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {147--150},\n publisher = {Goldsmiths, University of London},\n title = {Mobile\ + \ Device Percussion Parade},\n url = {http://www.nime.org/proceedings/2014/nime2014_542.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178941 issn: 2220-4806 - month: May - numpages: 6 - pages: 358--363 - title: 'Sound guiding action: the effect of timbre on learning a new percussive - DMI for beginner musicians' - track: Papers - url: http://nime.org/proceedings/2023/nime2023_50.pdf - year: 2023 + month: June + pages: 147--150 + publisher: 'Goldsmiths, University of London' + title: Mobile Device Percussion Parade + url: http://www.nime.org/proceedings/2014/nime2014_542.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_51 - abstract: 'This paper describes the latest iteration of signal path routing and - mixing control for the halldorophone, an experimental electro-acoustic string - instrument intended for music making with string feedback and describes the design - thinking behind the work which is informed by long term contact with dedicated - users. Specifically, here we discuss the intended “feel” or ergodynamic design - of how the affordances of the instrument are presented and the delicate task of - reducing cognitive load for early use while not limiting options for expert users.' - address: 'Mexico City, Mexico' - articleno: 51 - author: Halldor Ulfarsson - bibtex: "@article{nime2023_51,\n abstract = {This paper describes the latest iteration\ - \ of signal path routing and mixing control for the halldorophone, an experimental\ - \ electro-acoustic string instrument intended for music making with string feedback\ - \ and describes the design thinking behind the work which is informed by long\ - \ term contact with dedicated users. Specifically, here we discuss the intended\ - \ “feel” or ergodynamic design of how the affordances of the instrument are presented\ - \ and the delicate task of reducing cognitive load for early use while not limiting\ - \ options for expert users.},\n address = {Mexico City, Mexico},\n articleno =\ - \ {51},\n author = {Halldor Ulfarsson},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz\ - \ and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages\ - \ = {7},\n pages = {364--370},\n title = {Ergodynamics of String Feedback},\n\ - \ track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_51.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: dvannort2014 + abstract: 'This paper discusses an approach to instrument conception that is based + on a careful consideration of the coupling of tactile and sonic gestural action + both into and out of the performance system. To this end we propose a design approach + that not only considers the materiality of the instrument, but that leverages + it as a central part of the conception of the sonic quality, the control structuring + and what generally falls under the umbrella of "mapping" design. As we will discuss, + this extended computational matter-centric view is of benefit towards holistically + understanding an ``instrument'''' gestural engagement, as it is realized through + physical material, sonic gestural matter and felt human engagement. We present + instrumental systems that have arisen as a result of this approach to instrument + design.' + address: 'London, United Kingdom' + author: Navid Navab and Doug Van Nort and Sha Xin Wei + bibtex: "@inproceedings{dvannort2014,\n abstract = {This paper discusses an approach\ + \ to instrument conception that is based on a careful consideration of the coupling\ + \ of tactile and sonic gestural action both into and out of the performance system.\ + \ To this end we propose a design approach that not only considers the materiality\ + \ of the instrument, but that leverages it as a central part of the conception\ + \ of the sonic quality, the control structuring and what generally falls under\ + \ the umbrella of \"mapping\" design. As we will discuss, this extended computational\ + \ matter-centric view is of benefit towards holistically understanding an ``instrument''\ + \ gestural engagement, as it is realized through physical material, sonic gestural\ + \ matter and felt human engagement. We present instrumental systems that have\ + \ arisen as a result of this approach to instrument design.},\n address = {London,\ + \ United Kingdom},\n author = {Navid Navab and Doug Van Nort and Sha Xin Wei},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178893},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {387--390},\n publisher = {Goldsmiths, University\ + \ of London},\n title = {A Material Computation Perspective on Audio Mosaicing\ + \ and Gestural Conditioning},\n url = {http://www.nime.org/proceedings/2014/nime2014_544.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178893 issn: 2220-4806 - month: May - numpages: 7 - pages: 364--370 - title: Ergodynamics of String Feedback - track: Papers - url: http://nime.org/proceedings/2023/nime2023_51.pdf - year: 2023 + month: June + pages: 387--390 + publisher: 'Goldsmiths, University of London' + title: A Material Computation Perspective on Audio Mosaicing and Gestural Conditioning + url: http://www.nime.org/proceedings/2014/nime2014_544.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_52 - abstract: 'This paper presents a new interface – Laser Phase Synthesis — designed - for audiovisual performance expression. The instrument is informed by the historical - Audio/Video/Laser system developed by Lowell Cross and Carson Jeffries for use - by David Tudor and Experiments in Arts and Technology (E.A.T.) at the 1970 Japan - World Exposition in Osaka, Japan. The current work employs digital audio synthesis, - modern laser display technology, and close collaboration be- tween sound and image - composition to illustrate the har- monic progression of a musical work. The authors - present a micro-history of audiovisual laser displays, a brief introduction to - the process of drawing visual figures with sound, a description of the Pure Data - software and laser display hardware systems used for the Laser Phase Synthesis - instrument, and a discussion of how this instrument shaped the composition process - of one audiovisual performance of electroacoustic music. The paper concludes with - speculations on how the system can be further developed with other kinds of live - performers, specifically vocalists.' - address: 'Mexico City, Mexico' - articleno: 52 - author: Derek Holzer and Luka Aron and Andre Holzapfel - bibtex: "@article{nime2023_52,\n abstract = {This paper presents a new interface\ - \ – Laser Phase Synthesis — designed for audiovisual performance expression. The\ - \ instrument is informed by the historical Audio/Video/Laser system developed\ - \ by Lowell Cross and Carson Jeffries for use by David Tudor and Experiments in\ - \ Arts and Technology (E.A.T.) at the 1970 Japan World Exposition in Osaka, Japan.\ - \ The current work employs digital audio synthesis, modern laser display technology,\ - \ and close collaboration be- tween sound and image composition to illustrate\ - \ the har- monic progression of a musical work. The authors present a micro-history\ - \ of audiovisual laser displays, a brief introduction to the process of drawing\ - \ visual figures with sound, a description of the Pure Data software and laser\ - \ display hardware systems used for the Laser Phase Synthesis instrument, and\ - \ a discussion of how this instrument shaped the composition process of one audiovisual\ - \ performance of electroacoustic music. The paper concludes with speculations\ - \ on how the system can be further developed with other kinds of live performers,\ - \ specifically vocalists.},\n address = {Mexico City, Mexico},\n articleno = {52},\n\ - \ author = {Derek Holzer and Luka Aron and Andre Holzapfel},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month\ - \ = {May},\n numpages = {8},\n pages = {371--378},\n title = {Laser Phase Synthesis},\n\ - \ track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_52.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: slee12014 + abstract: 'This work introduces a way to distribute mobile applications using mobile + ad-hoc network in the context of audience participation. The goal is to minimize + user configuration so that the process is highly accessible for casual smartphone + users. The prototype mobile applications utilize WiFiDirect and Service Discovery + Protocol to distribute code. With the aid of these two technologies, the prototype + system requires no infrastructure and minimum user configuration.' + address: 'London, United Kingdom' + author: Sang Won Lee and Georg Essl and Z. Morley Mao + bibtex: "@inproceedings{slee12014,\n abstract = {This work introduces a way to distribute\ + \ mobile applications using mobile ad-hoc network in the context of audience participation.\ + \ The goal is to minimize user configuration so that the process is highly accessible\ + \ for casual smartphone users. The prototype mobile applications utilize WiFiDirect\ + \ and Service Discovery Protocol to distribute code. With the aid of these two\ + \ technologies, the prototype system requires no infrastructure and minimum user\ + \ configuration.},\n address = {London, United Kingdom},\n author = {Sang Won\ + \ Lee and Georg Essl and Z. Morley Mao},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178849},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {533--536},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Distributing Mobile Music Applications for\ + \ Audience Participation Using Mobile Ad-hoc Network ({MANET})},\n url = {http://www.nime.org/proceedings/2014/nime2014_546.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178849 issn: 2220-4806 - month: May - numpages: 8 - pages: 371--378 - title: Laser Phase Synthesis - track: Papers - url: http://nime.org/proceedings/2023/nime2023_52.pdf - year: 2023 + month: June + pages: 533--536 + publisher: 'Goldsmiths, University of London' + title: Distributing Mobile Music Applications for Audience Participation Using Mobile + Ad-hoc Network (MANET) + url: http://www.nime.org/proceedings/2014/nime2014_546.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_53 - abstract: 'This article presents Jacdac-for-Max: a cross-platform, open-source set - of node.js scripts and custom Cycling ’74 Max objects which enable the use of - Jacdac, an open, modular plug-and-play hardware prototyping platform, with Max - visual programming language frequently used for audio-visual applications. We - discuss the design and implementation of Jacdac-for-Max, and explore a number - of example applications. Through this we show how Jacdac-for-Max can be used to - rapidly prototype digital musical interfaces based on a range of input devices. - Additionally, we discuss these qualities within the context of established principles - for designing musical hardware, and the emerging concepts of long-tail hardware - and frugal innovation. We believe that through Jacdac-for-Max, Jacdac provides - a compelling approach to prototyping musical interfaces while supporting the evolution - beyond a prototype with more robust and scalable solutions.' - address: 'Mexico City, Mexico' - articleno: 53 - author: Kobi Hartley and Steve Hodges and Joe Finney - bibtex: "@article{nime2023_53,\n abstract = {This article presents Jacdac-for-Max:\ - \ a cross-platform, open-source set of node.js scripts and custom Cycling ’74\ - \ Max objects which enable the use of Jacdac, an open, modular plug-and-play hardware\ - \ prototyping platform, with Max visual programming language frequently used for\ - \ audio-visual applications. We discuss the design and implementation of Jacdac-for-Max,\ - \ and explore a number of example applications. Through this we show how Jacdac-for-Max\ - \ can be used to rapidly prototype digital musical interfaces based on a range\ - \ of input devices. Additionally, we discuss these qualities within the context\ - \ of established principles for designing musical hardware, and the emerging concepts\ - \ of long-tail hardware and frugal innovation. We believe that through Jacdac-for-Max,\ - \ Jacdac provides a compelling approach to prototyping musical interfaces while\ - \ supporting the evolution beyond a prototype with more robust and scalable solutions.},\n\ - \ address = {Mexico City, Mexico},\n articleno = {53},\n author = {Kobi Hartley\ - \ and Steve Hodges and Joe Finney},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz\ - \ and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages\ - \ = {8},\n pages = {379--386},\n title = {Jacdac-for-Max: Plug-and-Play Physical\ - \ Prototyping of Musical Interfaces},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_53.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: jherrera2014 + abstract: 'A recently developed system that uses pitched sounds to discover relative + 3D positions of a group of devices located in the same physical space is described. + The measurements are coordinated over an IP network in a decentralized manner, + while the actual measurements are carried out measuring the time-of-flight of + the notes played by different devices. Approaches to sonify the discovery process + are discussed. A specific instantiation of the system is described in detail. + The melody is specified in the form of a score, available to every device in the + network. The system performs the melody by playing different notes consecutively + on different devices, keeping a consistent timing, while carrying out the inter-device + measurements necessary to discover the geometrical configuration of the devices + in the physical space.' + address: 'London, United Kingdom' + author: Hyung Suk Kim and Jorge Herrera and Ge Wang + bibtex: "@inproceedings{jherrera2014,\n abstract = {A recently developed system\ + \ that uses pitched sounds to discover relative 3D positions of a group of devices\ + \ located in the same physical space is described. The measurements are coordinated\ + \ over an IP network in a decentralized manner, while the actual measurements\ + \ are carried out measuring the time-of-flight of the notes played by different\ + \ devices. Approaches to sonify the discovery process are discussed. A specific\ + \ instantiation of the system is described in detail. The melody is specified\ + \ in the form of a score, available to every device in the network. The system\ + \ performs the melody by playing different notes consecutively on different devices,\ + \ keeping a consistent timing, while carrying out the inter-device measurements\ + \ necessary to discover the geometrical configuration of the devices in the physical\ + \ space.},\n address = {London, United Kingdom},\n author = {Hyung Suk Kim and\ + \ Jorge Herrera and Ge Wang},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178831},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {273--276},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Ping-Pong: Musically Discovering Locations},\n\ + \ url = {http://www.nime.org/proceedings/2014/nime2014_550.pdf},\n year = {2014}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178831 issn: 2220-4806 - month: May - numpages: 8 - pages: 379--386 - title: 'Jacdac-for-Max: Plug-and-Play Physical Prototyping of Musical Interfaces' - track: Papers - url: http://nime.org/proceedings/2023/nime2023_53.pdf - year: 2023 + month: June + pages: 273--276 + publisher: 'Goldsmiths, University of London' + title: 'Ping-Pong: Musically Discovering Locations' + url: http://www.nime.org/proceedings/2014/nime2014_550.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_54 - abstract: 'Thales is a composed instrument consisting of two hand-held magnetic - controllers whose interactions with each other and with other magnets produce - the somatosensory manifestation of a tangible interface that the musician generates - and shapes in the act of performing. In this paper we provide a background for - the development of Thales by describing the application of permanent magnets in - HCI and musical interfaces. We also introduce the instrument’s sound generation - based on a neural synthesis model and contextualise the system in relation with - the concept of magnetic scores. We report on our preliminary user study and discuss - the somatosensory response that characterise Thales, observing the interaction - between the opposing magnetic field of the controllers as a tangible magnetic - interface. Finally, we investigate its nature from the perspective of performative - posthumanist ontologies.' - address: 'Mexico City, Mexico' - articleno: 54 - author: Nicola Privato and Thor Magnusson and Einar Torfi Einarsson - bibtex: "@article{nime2023_54,\n abstract = {Thales is a composed instrument consisting\ - \ of two hand-held magnetic controllers whose interactions with each other and\ - \ with other magnets produce the somatosensory manifestation of a tangible interface\ - \ that the musician generates and shapes in the act of performing. In this paper\ - \ we provide a background for the development of Thales by describing the application\ - \ of permanent magnets in HCI and musical interfaces. We also introduce the instrument’s\ - \ sound generation based on a neural synthesis model and contextualise the system\ - \ in relation with the concept of magnetic scores. We report on our preliminary\ - \ user study and discuss the somatosensory response that characterise Thales,\ - \ observing the interaction between the opposing magnetic field of the controllers\ - \ as a tangible magnetic interface. Finally, we investigate its nature from the\ - \ perspective of performative posthumanist ontologies.},\n address = {Mexico City,\ - \ Mexico},\n articleno = {54},\n author = {Nicola Privato and Thor Magnusson and\ - \ Einar Torfi Einarsson},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan\ - \ Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages = {7},\n\ - \ pages = {387--393},\n title = {Magnetic Interactions as a Somatosensory Interface},\n\ - \ track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_54.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: eberdahl2014 + abstract: 'An embedded acoustic instrument is an embedded musical instrument that + provides a direct acoustic output. This paper describes how to make embedded acoustic + instruments using laser cutting for digital fabrication. Several tips are given + for improving the acoustic quality including: employing maximally stiff material, + placing loudspeaker drivers in the corners of enclosure faces, increasing the + stiffness of ``loudspeaker'''' faces by doubling their thickness, choosing side-lengths + with non-integer ratios, and incorporating bracing. Various versions of an open + design of the ``LapBox'''' are provided to help community members replicate and + extend the work. A procedure is suggested for testing and optimizing the acoustic + quality.' + address: 'London, United Kingdom' + author: Edgar Berdahl + bibtex: "@inproceedings{eberdahl2014,\n abstract = {An embedded acoustic instrument\ + \ is an embedded musical instrument that provides a direct acoustic output. This\ + \ paper describes how to make embedded acoustic instruments using laser cutting\ + \ for digital fabrication. Several tips are given for improving the acoustic quality\ + \ including: employing maximally stiff material, placing loudspeaker drivers in\ + \ the corners of enclosure faces, increasing the stiffness of ``loudspeaker''\ + \ faces by doubling their thickness, choosing side-lengths with non-integer ratios,\ + \ and incorporating bracing. Various versions of an open design of the ``LapBox''\ + \ are provided to help community members replicate and extend the work. A procedure\ + \ is suggested for testing and optimizing the acoustic quality.},\n address =\ + \ {London, United Kingdom},\n author = {Edgar Berdahl},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178710},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {140--143},\n publisher = {Goldsmiths, University of London},\n title = {How\ + \ to Make Embedded Acoustic Instruments},\n url = {http://www.nime.org/proceedings/2014/nime2014_551.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178710 issn: 2220-4806 - month: May - numpages: 7 - pages: 387--393 - title: Magnetic Interactions as a Somatosensory Interface - track: Papers - url: http://nime.org/proceedings/2023/nime2023_54.pdf - year: 2023 + month: June + pages: 140--143 + publisher: 'Goldsmiths, University of London' + title: How to Make Embedded Acoustic Instruments + url: http://www.nime.org/proceedings/2014/nime2014_551.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_55 - abstract: "Audio–visual corpus-based synthesis extends the principle of concatenative\ - \ sound synthesis to the visual domain, where, in addition to the sound corpus\ - \ (i.e. a collection of segments of recorded sound with a perceptual description\ - \ of their sound character), the artist uses a corpus of still images with visual\ - \ perceptual description (colour, texture, brightness, entropy), in order to create\ - \ an audio–visual musical performance by navigating in real-time through these\ - \ descriptor spaces, i.e. through the collection of sound grains in a space of\ - \ perceptual audio descriptors, and at the same time through the visual descriptor\ - \ space, i.e. selecting images from the visual corpus for rendering, and thus\ - \ navigate in parallel through both corpora interactively with gestural control\ - \ via movement sensors.\nThe artistic–scientific question that is explored here\ - \ is how to control at the same time the navigation through the audio and the\ - \ image descriptor spaces with gesture sensors, in other words, how to link the\ - \ gesture sensing to both the image descriptors and the sound descriptors in order\ - \ to create a symbiotic multi-modal embodied audio–visual experience." - address: 'Mexico City, Mexico' - articleno: 55 - author: Diemo Schwarz - bibtex: "@article{nime2023_55,\n abstract = {Audio–visual corpus-based synthesis\ - \ extends the principle of concatenative sound synthesis to the visual domain,\ - \ where, in addition to the sound corpus (i.e. a collection of segments of recorded\ - \ sound with a perceptual description of their sound character), the artist uses\ - \ a corpus of still images with visual perceptual description (colour, texture,\ - \ brightness, entropy), in order to create an audio–visual musical performance\ - \ by navigating in real-time through these descriptor spaces, i.e. through the\ - \ collection of sound grains in a space of perceptual audio descriptors, and at\ - \ the same time through the visual descriptor space, i.e. selecting images from\ - \ the visual corpus for rendering, and thus navigate in parallel through both\ - \ corpora interactively with gestural control via movement sensors.\nThe artistic–scientific\ - \ question that is explored here is how to control at the same time the navigation\ - \ through the audio and the image descriptor spaces with gesture sensors, in other\ - \ words, how to link the gesture sensing to both the image descriptors and the\ - \ sound descriptors in order to create a symbiotic multi-modal embodied audio–visual\ - \ experience.},\n address = {Mexico City, Mexico},\n articleno = {55},\n author\ - \ = {Diemo Schwarz},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan\ - \ Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages = {8},\n\ - \ pages = {394--401},\n title = {Touch Interaction for Corpus-based Audio–Visual\ - \ Synthesis},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_55.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: cdominguez2014 + abstract: 'This paper presents a project that discusses a brief history of artistic + systems that use photoresistors (light-dependent resistors) and results in the + construction of an interface and performance controller. The controller combines + an Arduino microcontroller with a grid of photoresistors set into a slab of wood + covered with a thin acrylic sheet. A brief background on past uses of these components + for music and film composition and instrument-building introduces a few different + implementations and performance contexts for the controller. Topics such as implementation, + construction, and performance possibilities (including electroacoustic and audio-visual + performance) of the controller are also discussed.' + address: 'London, United Kingdom' + author: Carlos Dominguez + bibtex: "@inproceedings{cdominguez2014,\n abstract = {This paper presents a project\ + \ that discusses a brief history of artistic systems that use photoresistors (light-dependent\ + \ resistors) and results in the construction of an interface and performance controller.\ + \ The controller combines an Arduino microcontroller with a grid of photoresistors\ + \ set into a slab of wood covered with a thin acrylic sheet. A brief background\ + \ on past uses of these components for music and film composition and instrument-building\ + \ introduces a few different implementations and performance contexts for the\ + \ controller. Topics such as implementation, construction, and performance possibilities\ + \ (including electroacoustic and audio-visual performance) of the controller are\ + \ also discussed.},\n address = {London, United Kingdom},\n author = {Carlos Dominguez},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178750},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {78--79},\n publisher = {Goldsmiths, University of\ + \ London},\n title = {16-{CdS}: A Surface Controller for the Simultaneous Manipulation\ + \ of Multiple Analog Components},\n url = {http://www.nime.org/proceedings/2014/nime2014_552.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178750 issn: 2220-4806 - month: May - numpages: 8 - pages: 394--401 - title: Touch Interaction for Corpus-based Audio–Visual Synthesis - track: Papers - url: http://nime.org/proceedings/2023/nime2023_55.pdf - year: 2023 + month: June + pages: 78--79 + publisher: 'Goldsmiths, University of London' + title: '16-CdS: A Surface Controller for the Simultaneous Manipulation of Multiple + Analog Components' + url: http://www.nime.org/proceedings/2014/nime2014_552.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_56 - abstract: "This paper examines the use of the no-input mixing desk—or feedback mixer—across\ - \ a range of musical practices. The research draws on twenty two artist interviews\ - \ conducted by the authors, and on magazine and forum archives. We focus particularly\ - \ on how the properties of the no-input mixer connect with the musical, aesthetic\ - \ and practical concerns of these practices. The affordability, accessibility,\ - \ and non-hierarchical nature of the instrument are examined as factors that help\ - \ the idea spread, and that can be important political dimensions for artists.\n\ - The material, social and cultural aspects are brought together to provide a detailed\ - \ picture of the instrument that goes beyond technical description. This provides\ - \ a useful case study for NIME in thinking through these intercon- nections, particularly\ - \ in looking outwards to how musical instruments and associated musical ideas\ - \ travel, and how they can effect change and be changed themselves in their encounters\ - \ with real-world musical contexts." - address: 'Mexico City, Mexico' - articleno: 56 - author: Tom Mudd and Akira Brown - bibtex: "@article{nime2023_56,\n abstract = {This paper examines the use of the\ - \ no-input mixing desk—or feedback mixer—across a range of musical practices.\ - \ The research draws on twenty two artist interviews conducted by the authors,\ - \ and on magazine and forum archives. We focus particularly on how the properties\ - \ of the no-input mixer connect with the musical, aesthetic and practical concerns\ - \ of these practices. The affordability, accessibility, and non-hierarchical nature\ - \ of the instrument are examined as factors that help the idea spread, and that\ - \ can be important political dimensions for artists.\nThe material, social and\ - \ cultural aspects are brought together to provide a detailed picture of the instrument\ - \ that goes beyond technical description. This provides a useful case study for\ - \ NIME in thinking through these intercon- nections, particularly in looking outwards\ - \ to how musical instruments and associated musical ideas travel, and how they\ - \ can effect change and be changed themselves in their encounters with real-world\ - \ musical contexts.},\n address = {Mexico City, Mexico},\n articleno = {56},\n\ - \ author = {Tom Mudd and Akira Brown},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz\ - \ and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages\ - \ = {7},\n pages = {402--408},\n title = {Contrasting approaches to the no-input\ - \ mixer},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_56.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: slee2014 + abstract: 'In the setting of collaborative live coding, a number of issues emerge: + (1) need for communication, (2) issues of conflicts in sharing program state space, + and (3) remote control of code execution. In this paper, we propose solutions + to these problems. In the recent extension of UrMus, a programming environment + for mobile music application development, we introduce a paradigm of shared and + individual namespaces safeguard against conflicts in parallel coding activities. + We also develop live variable view that communicates live changes in state among + live coders, networked performers, and the audience. Lastly, we integrate collaborative + aspects of programming execution into built-in live chat, which enables not only + communication with others, but also distributed execution of code.' + address: 'London, United Kingdom' + author: Sang Won Lee and Georg Essl + bibtex: "@inproceedings{slee2014,\n abstract = {In the setting of collaborative\ + \ live coding, a number of issues emerge: (1) need for communication, (2) issues\ + \ of conflicts in sharing program state space, and (3) remote control of code\ + \ execution. In this paper, we propose solutions to these problems. In the recent\ + \ extension of UrMus, a programming environment for mobile music application development,\ + \ we introduce a paradigm of shared and individual namespaces safeguard against\ + \ conflicts in parallel coding activities. We also develop live variable view\ + \ that communicates live changes in state among live coders, networked performers,\ + \ and the audience. Lastly, we integrate collaborative aspects of programming\ + \ execution into built-in live chat, which enables not only communication with\ + \ others, but also distributed execution of code.},\n address = {London, United\ + \ Kingdom},\n author = {Sang Won Lee and Georg Essl},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178847},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {263--268},\n publisher = {Goldsmiths, University of London},\n title = {Communication,\ + \ Control, and State Sharing in Collaborative Live Coding},\n url = {http://www.nime.org/proceedings/2014/nime2014_554.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178847 issn: 2220-4806 - month: May - numpages: 7 - pages: 402--408 - title: Contrasting approaches to the no-input mixer - track: Papers - url: http://nime.org/proceedings/2023/nime2023_56.pdf - year: 2023 + month: June + pages: 263--268 + publisher: 'Goldsmiths, University of London' + title: 'Communication, Control, and State Sharing in Collaborative Live Coding' + url: http://www.nime.org/proceedings/2014/nime2014_554.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_57 - abstract: 'The Electrosteel is a new electronic instrument inspired by the user - interface of the pedal steel guitar (PSG). The Electrosteel uses the interface - concepts of the PSG (a bar in the left hand, plucked strings for the right hand, - foot pedals, knee levers, etc) as a control paradigm for digital synthesis. The - instrument allows performers with skill on the PSG to expand their sonic range, - and creates a powerful new multi-dimensional way to control synthesis. This paper - describes the development of the instrument and its custom embedded synthesis - engine, with a focus on the design challenges posed by mapping an existing performer - interface to a new instrument.' - address: 'Mexico City, Mexico' - articleno: 57 - author: Jeffrey Snyder and Davis Polito and Matthew Wang - bibtex: "@article{nime2023_57,\n abstract = {The Electrosteel is a new electronic\ - \ instrument inspired by the user interface of the pedal steel guitar (PSG). The\ - \ Electrosteel uses the interface concepts of the PSG (a bar in the left hand,\ - \ plucked strings for the right hand, foot pedals, knee levers, etc) as a control\ - \ paradigm for digital synthesis. The instrument allows performers with skill\ - \ on the PSG to expand their sonic range, and creates a powerful new multi-dimensional\ - \ way to control synthesis. This paper describes the development of the instrument\ - \ and its custom embedded synthesis engine, with a focus on the design challenges\ - \ posed by mapping an existing performer interface to a new instrument.},\n address\ - \ = {Mexico City, Mexico},\n articleno = {57},\n author = {Jeffrey Snyder and\ - \ Davis Polito and Matthew Wang},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz\ - \ and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages\ - \ = {8},\n pages = {409--416},\n title = {The Electrosteel: An Electronic Instrument\ - \ Inspired by the Pedal Steel Guitar},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_57.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: rcollecchia2014 + abstract: 'Sirens evoke images of alarm, public service, war, and forthcoming air + raid. Outside of the music of Edgard Varese, sirens have rarely been framed as + musical instruments. By connecting air hoses to spinning disks with evenly-spaced + perforations, the siren timbre is translated musically. Polyphony gives our instrument + an organ-like personality: keys are mapped to different frequencies and the pressure + applied to them determines volume. The siren organ can produce a large range of + sounds both timbrally and dynamically. In addition to a siren timbre, the instrument + produces similar sounds to a harmonica. Portability, robustness, and electronic + stability are all areas for improvement.' + address: 'London, United Kingdom' + author: Regina Collecchia and Dan Somen and Kevin McElroy + bibtex: "@inproceedings{rcollecchia2014,\n abstract = {Sirens evoke images of alarm,\ + \ public service, war, and forthcoming air raid. Outside of the music of Edgard\ + \ Varese, sirens have rarely been framed as musical instruments. By connecting\ + \ air hoses to spinning disks with evenly-spaced perforations, the siren timbre\ + \ is translated musically. Polyphony gives our instrument an organ-like personality:\ + \ keys are mapped to different frequencies and the pressure applied to them determines\ + \ volume. The siren organ can produce a large range of sounds both timbrally and\ + \ dynamically. In addition to a siren timbre, the instrument produces similar\ + \ sounds to a harmonica. Portability, robustness, and electronic stability are\ + \ all areas for improvement.},\n address = {London, United Kingdom},\n author\ + \ = {Regina Collecchia and Dan Somen and Kevin McElroy},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178732},\n issn = {2220-4806},\n month = {June},\n pages\ + \ = {391--394},\n publisher = {Goldsmiths, University of London},\n title = {The\ + \ Siren Organ},\n url = {http://www.nime.org/proceedings/2014/nime2014_558.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178732 issn: 2220-4806 - month: May - numpages: 8 - pages: 409--416 - title: 'The Electrosteel: An Electronic Instrument Inspired by the Pedal Steel Guitar' - track: Papers - url: http://nime.org/proceedings/2023/nime2023_57.pdf - year: 2023 + month: June + pages: 391--394 + publisher: 'Goldsmiths, University of London' + title: The Siren Organ + url: http://www.nime.org/proceedings/2014/nime2014_558.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_58 - abstract: 'This paper describes an extended intercontinental collaboration between - multiple artists, institutions, and their publics, to develop an integrated musical - practice which combines experimental making, performance, and pedagogy. We build - on contributions to NIME which work with art and design-led methods to explore - alternatives to, for example, more engineering-oriented approaches, without loss - of practical utility and theoretical potential. We describe two week-long workshop-residencies - and three performance-installations done under the provocative title Raw Data, - Rough Mix which was intended to encourage exploration of basic processes in physical, - mechanical, electrical, electronic and computational domains to develop musical - artefacts that were frugal in their resource-demands but enabled the interrogation - of human/non-human relationships, performativity, musical ecologies, aesthetics, - and other matters. We close by elaborating our contribution to NIME as offering - an integrated practice combining making, playing and learning, which is critically - informed and practically productive.' - address: 'Mexico City, Mexico' - articleno: 58 - author: John M Bowers and John Richards and Tim Shaw and Robin Foster and AKIHIRO - KUBOTA - bibtex: "@article{nime2023_58,\n abstract = {This paper describes an extended intercontinental\ - \ collaboration between multiple artists, institutions, and their publics, to\ - \ develop an integrated musical practice which combines experimental making, performance,\ - \ and pedagogy. We build on contributions to NIME which work with art and design-led\ - \ methods to explore alternatives to, for example, more engineering-oriented approaches,\ - \ without loss of practical utility and theoretical potential. We describe two\ - \ week-long workshop-residencies and three performance-installations done under\ - \ the provocative title Raw Data, Rough Mix which was intended to encourage exploration\ - \ of basic processes in physical, mechanical, electrical, electronic and computational\ - \ domains to develop musical artefacts that were frugal in their resource-demands\ - \ but enabled the interrogation of human/non-human relationships, performativity,\ - \ musical ecologies, aesthetics, and other matters. We close by elaborating our\ - \ contribution to NIME as offering an integrated practice combining making, playing\ - \ and learning, which is critically informed and practically productive.},\n address\ - \ = {Mexico City, Mexico},\n articleno = {58},\n author = {John M Bowers and John\ - \ Richards and Tim Shaw and Robin Foster and AKIHIRO KUBOTA},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month\ - \ = {May},\n numpages = {11},\n pages = {417--427},\n title = {Raw Data, Rough\ - \ Mix: Towards an Integrated Practice of Making, Performance and Pedagogy},\n\ - \ track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_58.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: drector2014 + abstract: 'Actuated instruments is a growing area of activity for research and composition, + yet there has been little focus on membrane-based instruments. This paper describes + a novel design for an internally actuated drum based on the mechanical principles + of a loudspeaker. Implementation is described in detail; in particular, two modes + of actuation, a moving-coil electromagnet and a moving-magnet design, are described. + We evaluate the drum using a synthesized frequency sweep, and find that the instrument + has a broad frequency response and exhibits qualities of both a drum and speaker.' + address: 'London, United Kingdom' + author: David Rector and Spencer Topel + bibtex: "@inproceedings{drector2014,\n abstract = {Actuated instruments is a growing\ + \ area of activity for research and composition, yet there has been little focus\ + \ on membrane-based instruments. This paper describes a novel design for an internally\ + \ actuated drum based on the mechanical principles of a loudspeaker. Implementation\ + \ is described in detail; in particular, two modes of actuation, a moving-coil\ + \ electromagnet and a moving-magnet design, are described. We evaluate the drum\ + \ using a synthesized frequency sweep, and find that the instrument has a broad\ + \ frequency response and exhibits qualities of both a drum and speaker.},\n address\ + \ = {London, United Kingdom},\n author = {David Rector and Spencer Topel},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1178913},\n issn = {2220-4806},\n month\ + \ = {June},\n pages = {395--398},\n publisher = {Goldsmiths, University of London},\n\ + \ title = {Internally Actuated Drums for Expressive Performance},\n url = {http://www.nime.org/proceedings/2014/nime2014_559.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178913 issn: 2220-4806 - month: May - numpages: 11 - pages: 417--427 - title: 'Raw Data, Rough Mix: Towards an Integrated Practice of Making, Performance - and Pedagogy' - track: Papers - url: http://nime.org/proceedings/2023/nime2023_58.pdf - year: 2023 + month: June + pages: 395--398 + publisher: 'Goldsmiths, University of London' + title: Internally Actuated Drums for Expressive Performance + url: http://www.nime.org/proceedings/2014/nime2014_559.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_59 - abstract: 'The steelpan is a pitched percussion instrument that although generally - known by listeners is typically not included in music instrument audio datasets. This - means that it is usually underrepresented in existing data-driven deep learning - models for fundamental frequency estimation. Furthermore, the steelpan has complex - acoustic properties that make fundamental frequency estimation challenging when - using deep learning models for general fundamental frequency estimation for any - music instrument. Fundamental frequency estimation or pitch detection is a fundamental - task in music information retrieval and it is interesting to explore methods that - are tailored to specific instruments and whether they can outperform general - methods. To address this, we present SASS, the Steelpan Analysis Sample Set that - can be used to train steel-pan specific pitch detection algorithms as well as - propose a custom-trained deep learning model for steelpan fundamental frequency - estimation. This model outperforms general state-of-the-art methods such as pYin - and CREPE on steelpan audio - even while having significantly fewer parameters - and operating on a shorter analysis window. This reduces minimum system latency, - allowing for deployment to a real-time system that can be used in live music contexts.' - address: 'Mexico City, Mexico' - articleno: 59 - author: Colin Malloy and George Tzanetakis - bibtex: "@article{nime2023_59,\n abstract = {The steelpan is a pitched percussion\ - \ instrument that although generally known by listeners is typically not included\ - \ in music instrument audio datasets. This means that it is usually underrepresented\ - \ in existing data-driven deep learning models for fundamental frequency estimation.\ - \ Furthermore, the steelpan has complex acoustic properties that make fundamental\ - \ frequency estimation challenging when using deep learning models for general\ - \ fundamental frequency estimation for any music instrument. Fundamental frequency\ - \ estimation or pitch detection is a fundamental task in music information retrieval\ - \ and it is interesting to explore methods that are tailored to specific instruments\ - \ and whether they can outperform general methods. To address this, we present\ - \ SASS, the Steelpan Analysis Sample Set that can be used to train steel-pan specific\ - \ pitch detection algorithms as well as propose a custom-trained deep learning\ - \ model for steelpan fundamental frequency estimation. This model outperforms\ - \ general state-of-the-art methods such as pYin and CREPE on steelpan audio -\ - \ even while having significantly fewer parameters and operating on a shorter\ - \ analysis window. This reduces minimum system latency, allowing for deployment\ - \ to a real-time system that can be used in live music contexts.},\n address =\ - \ {Mexico City, Mexico},\n articleno = {59},\n author = {Colin Malloy and George\ - \ Tzanetakis},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n\ - \ issn = {2220-4806},\n month = {May},\n numpages = {8},\n pages = {428--435},\n\ - \ title = {Steelpan-specific pitch detection: a dataset and deep learning model},\n\ - \ track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_59.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: ssalazar2014 + abstract: 'Effective software interaction design must consider all of the capabilities + and limitations of the platform for which it is developed. To this end, we propose + a new model for computer music system design on touchscreen devices, combining + both pen/stylus input and multitouch gestures. Such a model surpasses the barrier + of touchscreen-based keyboard input, preserving the primary interaction of touch + and direct manipulation throughout the development of a complex musical program. + We have implemented an iPad software application utilizing these principles, called + ``Auraglyph.'''' Auraglyph offers a number of fundamental audio processing and + control operators, as well as facilities for structured input and output. All + of these software objects are created, parameterized, and interconnected via stylus + and touch input. Underlying this application is an advanced handwriting recognition + framework, LipiTk, which can be trained to recognize both alphanumeric characters + and arbitrary figures, shapes, and patterns.' + address: 'London, United Kingdom' + author: Spencer Salazar and Ge Wang + bibtex: "@inproceedings{ssalazar2014,\n abstract = {Effective software interaction\ + \ design must consider all of the capabilities and limitations of the platform\ + \ for which it is developed. To this end, we propose a new model for computer\ + \ music system design on touchscreen devices, combining both pen/stylus input\ + \ and multitouch gestures. Such a model surpasses the barrier of touchscreen-based\ + \ keyboard input, preserving the primary interaction of touch and direct manipulation\ + \ throughout the development of a complex musical program. We have implemented\ + \ an iPad software application utilizing these principles, called ``Auraglyph.''\ + \ Auraglyph offers a number of fundamental audio processing and control operators,\ + \ as well as facilities for structured input and output. All of these software\ + \ objects are created, parameterized, and interconnected via stylus and touch\ + \ input. Underlying this application is an advanced handwriting recognition framework,\ + \ LipiTk, which can be trained to recognize both alphanumeric characters and arbitrary\ + \ figures, shapes, and patterns.},\n address = {London, United Kingdom},\n author\ + \ = {Spencer Salazar and Ge Wang},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178927},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {106--109},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Auraglyph: Handwritten Computer Music Composition\ + \ and Design},\n url = {http://www.nime.org/proceedings/2014/nime2014_560.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178927 issn: 2220-4806 - month: May - numpages: 8 - pages: 428--435 - title: 'Steelpan-specific pitch detection: a dataset and deep learning model' - track: Papers - url: http://nime.org/proceedings/2023/nime2023_59.pdf - year: 2023 + month: June + pages: 106--109 + publisher: 'Goldsmiths, University of London' + title: 'Auraglyph: Handwritten Computer Music Composition and Design' + url: http://www.nime.org/proceedings/2014/nime2014_560.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_60 - abstract: 'This paper describes AbletonOSC, an Open Sound Control API whose objective - is to expose the complete Ableton Live Object Model via OSC. Embedded within Live - by harnessing its internal Python scripting interface, AbletonOSC allows external - processes to exert real-time control over any element of a Live set, ranging from - generating new melodic sequences to modulating deeply-nested synthesis parameters. - We describe the motivations and historical precedents behind AbletonOSC, provide - an overview of its OSC namespace and the classes of functionality that are exposed - by the API, and look at a series of applied case studies that demonstrate the - new types of musical interface that AbletonOSC enables.' - address: 'Mexico City, Mexico' - articleno: 60 - author: Daniel Jones - bibtex: "@article{nime2023_60,\n abstract = {This paper describes AbletonOSC, an\ - \ Open Sound Control API whose objective is to expose the complete Ableton Live\ - \ Object Model via OSC. Embedded within Live by harnessing its internal Python\ - \ scripting interface, AbletonOSC allows external processes to exert real-time\ - \ control over any element of a Live set, ranging from generating new melodic\ - \ sequences to modulating deeply-nested synthesis parameters. We describe the\ - \ motivations and historical precedents behind AbletonOSC, provide an overview\ - \ of its OSC namespace and the classes of functionality that are exposed by the\ - \ API, and look at a series of applied case studies that demonstrate the new types\ - \ of musical interface that AbletonOSC enables.},\n address = {Mexico City, Mexico},\n\ - \ articleno = {60},\n author = {Daniel Jones},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n editor\ - \ = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n\ - \ numpages = {5},\n pages = {436--440},\n title = {AbletonOSC: A unified control\ - \ API for Ableton Live},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_60.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: ahornof2014 + abstract: 'Although new sensor devices and data streams are increasingly used for + musical expression, and although eye-tracking devices have become increasingly + cost-effective and prevalent in research and as a means of communication for people + with severe motor impairments, eye-controlled musical expression nonetheless remains + somewhat elusive and minimally explored. This paper (a) identifies a number of + fundamental human eye movement capabilities and constraints which determine in + part what can and cannot be musically expressed with eye movements, (b) reviews + prior work on eye-controlled musical expression, and (c) analyzes and provides + a taxonomy of what has been done, and what will need to be addressed in future + eye-controlled musical instruments. The fundamental human constraints and processes + that govern eye movements create a challenge for eye-controlled music in that + the instrument needs to be designed to motivate or at least permit specific unique + visual goals, each of which when accomplished must then be mapped, using the eye + tracker and some sort of sound generator, to different musical outcomes. The control + of the musical instrument is less direct than if it were played with muscles that + can be controlled in a more direct manner, such as the muscles in the hands.' + address: 'London, United Kingdom' + author: Anthony Hornof + bibtex: "@inproceedings{ahornof2014,\n abstract = {Although new sensor devices and\ + \ data streams are increasingly used for musical expression, and although eye-tracking\ + \ devices have become increasingly cost-effective and prevalent in research and\ + \ as a means of communication for people with severe motor impairments, eye-controlled\ + \ musical expression nonetheless remains somewhat elusive and minimally explored.\ + \ This paper (a) identifies a number of fundamental human eye movement capabilities\ + \ and constraints which determine in part what can and cannot be musically expressed\ + \ with eye movements, (b) reviews prior work on eye-controlled musical expression,\ + \ and (c) analyzes and provides a taxonomy of what has been done, and what will\ + \ need to be addressed in future eye-controlled musical instruments. The fundamental\ + \ human constraints and processes that govern eye movements create a challenge\ + \ for eye-controlled music in that the instrument needs to be designed to motivate\ + \ or at least permit specific unique visual goals, each of which when accomplished\ + \ must then be mapped, using the eye tracker and some sort of sound generator,\ + \ to different musical outcomes. The control of the musical instrument is less\ + \ direct than if it were played with muscles that can be controlled in a more\ + \ direct manner, such as the muscles in the hands.},\n address = {London, United\ + \ Kingdom},\n author = {Anthony Hornof},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178804},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {461--466},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {The Prospects For Eye-Controlled Musical Performance},\n\ + \ url = {http://www.nime.org/proceedings/2014/nime2014_562.pdf},\n year = {2014}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178804 issn: 2220-4806 - month: May - numpages: 5 - pages: 436--440 - title: 'AbletonOSC: A unified control API for Ableton Live' - track: Papers - url: http://nime.org/proceedings/2023/nime2023_60.pdf - year: 2023 + month: June + pages: 461--466 + publisher: 'Goldsmiths, University of London' + title: The Prospects For Eye-Controlled Musical Performance + url: http://www.nime.org/proceedings/2014/nime2014_562.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_61 - abstract: "Some of the performer’s gestures, despite corresponding to different\ - \ physical interactions, might produce a similar sonic output. This is the case\ - \ of upward and downward string bends on the guitar where stretching the string\ - \ shifts the pitch upwards. Bending represents \nan expressive resource that\ - \ extends across many different styles of guitar playing. \nIn this study, we\ - \ presented performers with an augmented electric guitar on which the gesture-to-sound\ - \ relationship of downward bending gestures is changed depending on how the instrument\ - \ is configured. Participants were asked to explore and perform a short improvisation\ - \ under three different conditions, two augmentations that correspond to different\ - \ auditory imagery and a constrained scenario. The different sessions of the experiment\ - \ were recorded to conduct thematic analysis as an examination of how gestural\ - \ disambiguation can be exploited in the design of augmentations that focus on\ - \ reusing performer's expertise and how the gesture-to-sound entanglement of the\ - \ different modalities supports or encumbers the performer's embodied relationship\ - \ with the instrument." - address: 'Mexico City, Mexico' - articleno: 61 - author: Adan L. Benito Temprano and Teodoro Dannemann and Andrew McPherson - bibtex: "@article{nime2023_61,\n abstract = {Some of the performer’s gestures, despite\ - \ corresponding to different physical interactions, might produce a similar sonic\ - \ output. This is the case of upward and downward string bends on the guitar where\ - \ stretching the string shifts the pitch upwards. Bending represents \nan expressive\ - \ resource that extends across many different styles of guitar playing. \nIn this\ - \ study, we presented performers with an augmented electric guitar on which the\ - \ gesture-to-sound relationship of downward bending gestures is changed depending\ - \ on how the instrument is configured. Participants were asked to explore and\ - \ perform a short improvisation under three different conditions, two augmentations\ - \ that correspond to different auditory imagery and a constrained scenario. The\ - \ different sessions of the experiment were recorded to conduct thematic analysis\ - \ as an examination of how gestural disambiguation can be exploited in the design\ - \ of augmentations that focus on reusing performer's expertise and how the gesture-to-sound\ - \ entanglement of the different modalities supports or encumbers the performer's\ - \ embodied relationship with the instrument.},\n address = {Mexico City, Mexico},\n\ - \ articleno = {61},\n author = {Adan L. Benito Temprano and Teodoro Dannemann\ - \ and Andrew McPherson},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan\ - \ Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages = {10},\n\ - \ pages = {441--450},\n title = {Exploring the (un)ambiguous guitar: A Qualitative\ - \ Study on the use of Gesture Disambiguation in Augmented Instrument Design},\n\ - \ track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_61.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: aplace2014 + abstract: 'This paper explores the design process of the AlphaSphere, an experimental + new musical instrument that has transitioned into scale production and international + distribution. Initially, the design intentions and engineering processes are covered. + The paper continues by briefly evaluating the user testing process and outlining + the ergonomics, communication protocol and software of the device. The paper closes + by questioning what it takes to evaluate success as a musical instrument.' + address: 'London, United Kingdom' + author: Adam Place and Liam Lacey and Thomas Mitchell + bibtex: "@inproceedings{aplace2014,\n abstract = {This paper explores the design\ + \ process of the AlphaSphere, an experimental new musical instrument that has\ + \ transitioned into scale production and international distribution. Initially,\ + \ the design intentions and engineering processes are covered. The paper continues\ + \ by briefly evaluating the user testing process and outlining the ergonomics,\ + \ communication protocol and software of the device. The paper closes by questioning\ + \ what it takes to evaluate success as a musical instrument.},\n address = {London,\ + \ United Kingdom},\n author = {Adam Place and Liam Lacey and Thomas Mitchell},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178903},\n issn = {2220-4806},\n\ + \ month = {June},\n pages = {399--402},\n publisher = {Goldsmiths, University\ + \ of London},\n title = {AlphaSphere from Prototype to Product},\n url = {http://www.nime.org/proceedings/2014/nime2014_568.pdf},\n\ + \ year = {2014}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178903 issn: 2220-4806 - month: May - numpages: 10 - pages: 441--450 - title: 'Exploring the (un)ambiguous guitar: A Qualitative Study on the use of Gesture - Disambiguation in Augmented Instrument Design' - track: Papers - url: http://nime.org/proceedings/2023/nime2023_61.pdf - year: 2023 + month: June + pages: 399--402 + publisher: 'Goldsmiths, University of London' + title: AlphaSphere from Prototype to Product + url: http://www.nime.org/proceedings/2014/nime2014_568.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_62 - abstract: "The concept of sound sculpture can embrace a rich variety of artistic\ - \ manifestations and disciplines since it contains music, plastic arts, and performance,\ - \ to say the least. Even the conceptual and space design or the skills and crafts\ - \ necessary to transform physical materials demonstrates its interdisciplinary\ - \ potential.\nSensattice is an emerging sound sculpture proposal, which takes\ - \ advantage of organic raw materials considered waste to convert them into biopolymers\ - \ and explores their acoustic and haptic potential taking \"skin and bone\" as\ - \ conceptual premises to synthesize two fundamental materials. Such materials\ - \ were obtained by applying biomaterial engineering and 3D modeling and printing\ - \ as parallel processes.\nSensattice seems to be an emerging system since it is\ - \ not reduced to mere materials but involves people and situated epistemic approaches\ - \ that literally shape a sculptural lattice through the sensory and symbolic perception\ - \ of skin and bones that can be sounded before, during and after the sculptural\ - \ construction." - address: 'Mexico City, Mexico' - articleno: 62 - author: Jonathan Diaz - bibtex: "@article{nime2023_62,\n abstract = {The concept of sound sculpture can\ - \ embrace a rich variety of artistic manifestations and disciplines since it contains\ - \ music, plastic arts, and performance, to say the least. Even the conceptual\ - \ and space design or the skills and crafts necessary to transform physical materials\ - \ demonstrates its interdisciplinary potential.\nSensattice is an emerging sound\ - \ sculpture proposal, which takes advantage of organic raw materials considered\ - \ waste to convert them into biopolymers and explores their acoustic and haptic\ - \ potential taking \"skin and bone\" as conceptual premises to synthesize two\ - \ fundamental materials. Such materials were obtained by applying biomaterial\ - \ engineering and 3D modeling and printing as parallel processes.\nSensattice\ - \ seems to be an emerging system since it is not reduced to mere materials but\ - \ involves people and situated epistemic approaches that literally shape a sculptural\ - \ lattice through the sensory and symbolic perception of skin and bones that can\ - \ be sounded before, during and after the sculptural construction.},\n address\ - \ = {Mexico City, Mexico},\n articleno = {62},\n author = {Jonathan Diaz},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n\ - \ month = {May},\n numpages = {6},\n pages = {451--456},\n title = {Sensattice:\ - \ An emerging collaborative and modular sound sculpture},\n track = {Papers},\n\ - \ url = {http://nime.org/proceedings/2023/nime2023_62.pdf},\n year = {2023}\n\ +- ENTRYTYPE: inproceedings + ID: aandersson2014 + abstract: 'In this paper we explore how we compose sound for an interactive tangible + and mobile interface; where the goal is to improve health and well-being for families + with children with disabilities. We describe the composition process from how + we decompose a linear beat-based and vocal sound material; recompose it with real-time + audio synthesis and composition rules into interactive Scenes. Scenes that make + it possible for the user to select, explore and recreate different ``sound worlds'''' + with the tangible interface as an instrument; create and play with it as a friend; + improvise and create; or relax with it as an ambient sounding furniture. We continue + discussing a user story, how the Scenes are recreated by amateur users, persons + with severe disabilities and family members; improvising with the mobile tangibles. + We discuss composition techniques for mixing sound, tangible-physical and lighting + elements in the Scenes. Based on observations we explore how a diverse audience + in the family and at school can recreate and improvise their own sound experience + and play together with others. We conclude by discussing the possible impact of + our findings for the NIME-community; how the techniques of decomposing, recomposing + and recreating sound, based on a relational perspective, could contribute to the + design of new instruments for musical expression.' + address: 'London, United Kingdom' + author: Anders-Petter Andersson and Birgitta Cappelen and Fredrik Olofsson + bibtex: "@inproceedings{aandersson2014,\n abstract = {In this paper we explore how\ + \ we compose sound for an interactive tangible and mobile interface; where the\ + \ goal is to improve health and well-being for families with children with disabilities.\ + \ We describe the composition process from how we decompose a linear beat-based\ + \ and vocal sound material; recompose it with real-time audio synthesis and composition\ + \ rules into interactive Scenes. Scenes that make it possible for the user to\ + \ select, explore and recreate different ``sound worlds'' with the tangible interface\ + \ as an instrument; create and play with it as a friend; improvise and create;\ + \ or relax with it as an ambient sounding furniture. We continue discussing a\ + \ user story, how the Scenes are recreated by amateur users, persons with severe\ + \ disabilities and family members; improvising with the mobile tangibles. We discuss\ + \ composition techniques for mixing sound, tangible-physical and lighting elements\ + \ in the Scenes. Based on observations we explore how a diverse audience in the\ + \ family and at school can recreate and improvise their own sound experience and\ + \ play together with others. We conclude by discussing the possible impact of\ + \ our findings for the NIME-community; how the techniques of decomposing, recomposing\ + \ and recreating sound, based on a relational perspective, could contribute to\ + \ the design of new instruments for musical expression.},\n address = {London,\ + \ United Kingdom},\n author = {Anders-Petter Andersson and Birgitta Cappelen and\ + \ Fredrik Olofsson},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178702},\n\ + \ issn = {2220-4806},\n month = {June},\n pages = {529--532},\n publisher = {Goldsmiths,\ + \ University of London},\n title = {Designing Sound for Recreation and Well-Being},\n\ + \ url = {http://www.nime.org/proceedings/2014/nime2014_572.pdf},\n year = {2014}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1178702 issn: 2220-4806 - month: May - numpages: 6 - pages: 451--456 - title: 'Sensattice: An emerging collaborative and modular sound sculpture' - track: Papers - url: http://nime.org/proceedings/2023/nime2023_62.pdf - year: 2023 + month: June + pages: 529--532 + publisher: 'Goldsmiths, University of London' + title: Designing Sound for Recreation and Well-Being + url: http://www.nime.org/proceedings/2014/nime2014_572.pdf + year: 2014 -- ENTRYTYPE: article - ID: nime2023_63 - abstract: "In this paper, the NIME “shard-speakers” is situated within the cultural\ - \ context of the typical uses of crystal singing bowls, specifically acknowledging\ - \ the origins of crystal bowls as re-purposed by-products of the silicon chip\ - \ manufaturing process, and their subsequent adoption into the toolkits of New\ - \ Age sound healing practitioners. Following this discussion is a first-person\ - \ anecdotal account of the author/composer’s sonic explorations using crystal\ - \ singing bowls in combination with the shards of broken bowls and custom electronics\ - \ to create a body of recorded, acoustic, and electroacoustic musical works named\ - \ Crushed Matrices #1-7. The last section of this paper explains how the extended\ - \ musical techniques unearthed through the Crushed Matrices investigations informed\ - \ the creation of the shard-speakers, and the electronically-generated musical\ - \ content that was composed for them in the form of a sound artwork, Ode on Crushed\ - \ Matrices. This recording was fed into the shard-speakers via tactile transducers\ - \ on resonating bodies for the 2022 inaugural installation of the work, which\ - \ at the time of writing is the only installation of the work to date. The paper’s\ - \ conclusion addresses the relationship of this body of work to the NIME 2023\ - \ conference’s theme of “Frugal Music Innovation,” correlating or otherwise characterizing\ - \ its relationship to several of the core competencies set forth by the Frugal\ - \ Innovation Hub: adaptability, lightness of weight, mobile design, affordability,\ - \ local material sourcing, and ruggedness." - address: 'Mexico City, Mexico' - articleno: 63 - author: Anastasia Clarke and Anastasia Clarke - bibtex: "@article{nime2023_63,\n abstract = {In this paper, the NIME “shard-speakers”\ - \ is situated within the cultural context of the typical uses of crystal singing\ - \ bowls, specifically acknowledging the origins of crystal bowls as re-purposed\ - \ by-products of the silicon chip manufaturing process, and their subsequent adoption\ - \ into the toolkits of New Age sound healing practitioners. Following this discussion\ - \ is a first-person anecdotal account of the author/composer’s sonic explorations\ - \ using crystal singing bowls in combination with the shards of broken bowls and\ - \ custom electronics to create a body of recorded, acoustic, and electroacoustic\ - \ musical works named Crushed Matrices #1-7. The last section of this paper explains\ - \ how the extended musical techniques unearthed through the Crushed Matrices investigations\ - \ informed the creation of the shard-speakers, and the electronically-generated\ - \ musical content that was composed for them in the form of a sound artwork, Ode\ - \ on Crushed Matrices. This recording was fed into the shard-speakers via tactile\ - \ transducers on resonating bodies for the 2022 inaugural installation of the\ - \ work, which at the time of writing is the only installation of the work to date.\ - \ The paper’s conclusion addresses the relationship of this body of work to the\ - \ NIME 2023 conference’s theme of “Frugal Music Innovation,” correlating or otherwise\ - \ characterizing its relationship to several of the core competencies set forth\ - \ by the Frugal Innovation Hub: adaptability, lightness of weight, mobile design,\ - \ affordability, local material sourcing, and ruggedness.},\n address = {Mexico\ - \ City, Mexico},\n articleno = {63},\n author = {Anastasia Clarke and Anastasia\ - \ Clarke},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n\ - \ issn = {2220-4806},\n month = {May},\n numpages = {6},\n pages = {457--462},\n\ - \ title = {Shard-Speakers: An Inquiry into the History, Sonic Properties, and\ - \ Musical Possibilities of Quartz Crystal Singing Bowls},\n track = {Papers},\n\ - \ url = {http://nime.org/proceedings/2023/nime2023_63.pdf},\n year = {2023}\n\ - }\n" +- ENTRYTYPE: inproceedings + ID: Buchla2005 + address: 'Vancouver, BC, Canada' + author: 'Buchla, Don' + bibtex: "@inproceedings{Buchla2005,\n address = {Vancouver, BC, Canada},\n author\ + \ = {Buchla, Don},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176715},\n\ + \ issn = {2220-4806},\n pages = {1--1},\n title = {A History of Buchla's Musical\ + \ Instruments},\n url = {http://www.nime.org/proceedings/2005/nime2005_001.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1176715 issn: 2220-4806 - month: May - numpages: 6 - pages: 457--462 - title: 'Shard-Speakers: An Inquiry into the History, Sonic Properties, and Musical - Possibilities of Quartz Crystal Singing Bowls' - track: Papers - url: http://nime.org/proceedings/2023/nime2023_63.pdf - year: 2023 + pages: 1--1 + title: A History of Buchla's Musical Instruments + url: http://www.nime.org/proceedings/2005/nime2005_001.pdf + year: 2005 -- ENTRYTYPE: article - ID: nime2023_64 - abstract: 'In music and computer science classrooms, Blind and Visually Impaired - (BVI) learners are often not given alternatives to visual technologies and materials. - FiLOrk, an ensemble at the Filomen M. D''Agostino Greenberg Music School, is made - up of five BVI high school learners who studied and performed computer music using - the live coding language Tidal Cycles over the course of a semester. To make FiLOrk - approachable and accessible we wrote a new curriculum featuring audio/tactile - learning materials, and we designed a collaborative web editor for use with learners'' - assistive technologies, including screen readers and braille displays. In this - article, we describe findings from classroom observations and interviews. We highlight - how learners wrestled with persistent accessibility challenges, connected pre-existing - music knowledge with Tidal Cycles concepts, created a culture of respect and support, - and made suggestions for improving FiLOrk. We conclude by discussing opportunities - to make live coding ensembles accessible to both BVI people and high school learners.' - address: 'Mexico City, Mexico' - articleno: 64 - author: William C Payne and Matthew Kaney and Yuhua Cao and Eric Xu and Xinran Shen - and Katrina Lee and Amy Hurst - bibtex: "@article{nime2023_64,\n abstract = {In music and computer science classrooms,\ - \ Blind and Visually Impaired (BVI) learners are often not given alternatives\ - \ to visual technologies and materials. FiLOrk, an ensemble at the Filomen M.\ - \ D'Agostino Greenberg Music School, is made up of five BVI high school learners\ - \ who studied and performed computer music using the live coding language Tidal\ - \ Cycles over the course of a semester. To make FiLOrk approachable and accessible\ - \ we wrote a new curriculum featuring audio/tactile learning materials, and we\ - \ designed a collaborative web editor for use with learners' assistive technologies,\ - \ including screen readers and braille displays. In this article, we describe\ - \ findings from classroom observations and interviews. We highlight how learners\ - \ wrestled with persistent accessibility challenges, connected pre-existing music\ - \ knowledge with Tidal Cycles concepts, created a culture of respect and support,\ - \ and made suggestions for improving FiLOrk. We conclude by discussing opportunities\ - \ to make live coding ensembles accessible to both BVI people and high school\ - \ learners.},\n address = {Mexico City, Mexico},\n articleno = {64},\n author\ - \ = {William C Payne and Matthew Kaney and Yuhua Cao and Eric Xu and Xinran Shen\ - \ and Katrina Lee and Amy Hurst},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz\ - \ and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages\ - \ = {9},\n pages = {463--471},\n title = {Live Coding Ensemble as Accessible Classroom},\n\ - \ track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_64.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: Levin2005 + address: 'Vancouver, BC, Canada' + author: 'Levin, Golan' + bibtex: "@inproceedings{Levin2005,\n address = {Vancouver, BC, Canada},\n author\ + \ = {Levin, Golan},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176770},\n\ + \ issn = {2220-4806},\n pages = {2--3},\n title = {A Personal Chronology of Audiovisual\ + \ Systems Research},\n url = {http://www.nime.org/proceedings/2005/nime2005_002.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1176770 issn: 2220-4806 - month: May - numpages: 9 - pages: 463--471 - title: Live Coding Ensemble as Accessible Classroom - track: Papers - url: http://nime.org/proceedings/2023/nime2023_64.pdf - year: 2023 + pages: 2--3 + title: A Personal Chronology of Audiovisual Systems Research + url: http://www.nime.org/proceedings/2005/nime2005_002.pdf + year: 2005 -- ENTRYTYPE: article - ID: nime2023_65 - abstract: 'In this article, we describe the challenges of an artistic residency - that included: a distributed improvisation in VR, performances using Digital Musical - Instruments (DMIs), and Open Source software as much as possible. For this residency, - we were constrained to using Mozilla’s Hubs as the Metaverse platform. We describe - the shortcomings of the platform as a performance space in light of our experience, - musical cultures, and the social aspects of a musical performance. We also address - select technical issues pertaining to the context of a hybrid musical performance - (simultaneously in Virtual Reality (VR) and in-real-life (IRL)) using this particular - technology stack. Furthermore, we describe the challenges and surprises that occurred - with Faust (Function Audio Stream), which was our choice of synthesis engine for - the project. We conclude this paper by identifying some possible avenues for future - research, exploration, and performances of a similar nature. We wish to clarify - that although we will be talking a lot about Hubs, which was the Virtual Reality - (VR) platform used for the residency, we were not endorsed by Mozilla.' - address: 'Mexico City, Mexico' - articleno: 65 - author: Michał Seta and Dirk J Stromberg and D STEWART - bibtex: "@article{nime2023_65,\n abstract = {In this article, we describe the challenges\ - \ of an artistic residency that included: a distributed improvisation in VR, performances\ - \ using Digital Musical Instruments (DMIs), and Open Source software as much as\ - \ possible. For this residency, we were constrained to using Mozilla’s Hubs as\ - \ the Metaverse platform. We describe the shortcomings of the platform as a performance\ - \ space in light of our experience, musical cultures, and the social aspects of\ - \ a musical performance. We also address select technical issues pertaining to\ - \ the context of a hybrid musical performance (simultaneously in Virtual Reality\ - \ (VR) and in-real-life (IRL)) using this particular technology stack. Furthermore,\ - \ we describe the challenges and surprises that occurred with Faust (Function\ - \ Audio Stream), which was our choice of synthesis engine for the project. We\ - \ conclude this paper by identifying some possible avenues for future research,\ - \ exploration, and performances of a similar nature. We wish to clarify that although\ - \ we will be talking a lot about Hubs, which was the Virtual Reality (VR) platform\ - \ used for the residency, we were not endorsed by Mozilla.},\n address = {Mexico\ - \ City, Mexico},\n articleno = {65},\n author = {Michał Seta and Dirk J Stromberg\ - \ and D STEWART},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n\ - \ issn = {2220-4806},\n month = {May},\n numpages = {7},\n pages = {472--478},\n\ - \ title = {Building hybrid performances with DMIs, Hubs and Faust},\n track =\ - \ {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_65.pdf},\n year\ - \ = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: Buxton2005 + address: 'Vancouver, BC, Canada' + author: 'Buxton, Bill' + bibtex: "@inproceedings{Buxton2005,\n address = {Vancouver, BC, Canada},\n author\ + \ = {Buxton, Bill},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176717},\n\ + \ issn = {2220-4806},\n pages = {4--4},\n title = {Causality and Striking the\ + \ Right Note},\n url = {http://www.nime.org/proceedings/2005/nime2005_004.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1176717 issn: 2220-4806 - month: May - numpages: 7 - pages: 472--478 - title: 'Building hybrid performances with DMIs, Hubs and Faust' - track: Papers - url: http://nime.org/proceedings/2023/nime2023_65.pdf - year: 2023 + pages: 4--4 + title: Causality and Striking the Right Note + url: http://www.nime.org/proceedings/2005/nime2005_004.pdf + year: 2005 -- ENTRYTYPE: article - ID: nime2023_66 - abstract: 'This paper presents the BioSynth, an affective biofeedback device for - generating electronic music developed over a decade as part of my research-creation - practice. The BioSynth has facilitated the creation of work involving performers - from a variety of ages and professional experiences, contributing to knowledge - regarding emotional performance, exposing the differences between perceived and - felt emotion within biofeedback art, extending emotional quantification techniques - to notions of emotional performance technique, emotional labor, and what feminist - Alva Gotby calls emotional reproduction. The design of the BioSynth privileges - relational and real-world interactions as well as feminist thought regarding gendered - hierarchies between body, mind, musical notation, social context, emotion and - reason, and the division between performers and composers. This feminist inquiry - has led to the development of alternatives to traditional frameworks for biofeedback - music that rely on metaphors of musical instrumentation. After an introduction - presenting two lived scenarios, this article is divided into three sections: hardware, - software, and wetware. The hardware section describes the BioSynth through its - design, which privileges ease-of-use for non-expert users. The software section - describes mapping considerations based on feminist principles of measuring the - emotional subject only against itself. Finally, in the wetware section I describe - a feminist-inspired approach to emotional performance that embraces artificiality, - irony, play, pleasure, and performance in biofeedback art, implying novel models - for composer-instrument-performer relations.' - address: 'Mexico City, Mexico' - articleno: 66 - author: Erin M Gee - bibtex: "@article{nime2023_66,\n abstract = {This paper presents the BioSynth, an\ - \ affective biofeedback device for generating electronic music developed over\ - \ a decade as part of my research-creation practice. The BioSynth has facilitated\ - \ the creation of work involving performers from a variety of ages and professional\ - \ experiences, contributing to knowledge regarding emotional performance, exposing\ - \ the differences between perceived and felt emotion within biofeedback art, extending\ - \ emotional quantification techniques to notions of emotional performance technique,\ - \ emotional labor, and what feminist Alva Gotby calls emotional reproduction.\ - \ The design of the BioSynth privileges relational and real-world interactions\ - \ as well as feminist thought regarding gendered hierarchies between body, mind,\ - \ musical notation, social context, emotion and reason, and the division between\ - \ performers and composers. This feminist inquiry has led to the development of\ - \ alternatives to traditional frameworks for biofeedback music that rely on metaphors\ - \ of musical instrumentation. After an introduction presenting two lived scenarios,\ - \ this article is divided into three sections: hardware, software, and wetware.\ - \ The hardware section describes the BioSynth through its design, which privileges\ - \ ease-of-use for non-expert users. The software section describes mapping considerations\ - \ based on feminist principles of measuring the emotional subject only against\ - \ itself. Finally, in the wetware section I describe a feminist-inspired approach\ - \ to emotional performance that embraces artificiality, irony, play, pleasure,\ - \ and performance in biofeedback art, implying novel models for composer-instrument-performer\ - \ relations.},\n address = {Mexico City, Mexico},\n articleno = {66},\n author\ - \ = {Erin M Gee},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n\ - \ issn = {2220-4806},\n month = {May},\n numpages = {7},\n pages = {479--485},\n\ - \ title = {The BioSynth—an affective biofeedback device grounded in feminist thought},\n\ - \ track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_66.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: Bowers2005 + abstract: 'As a response to a number of notable contemporary aesthetic tendencies, + this paper introduces the notion of an infra-instrument as a kind of ‘new interface + for musical expression’ worthy of study and systematic design. In contrast to + hyper-, meta- and virtual instruments, we propose infra-instruments as devices + of restricted interactive potential, with little sensor enhancement, which engender + simple musics with scarce opportunity for conventional virtuosity. After presenting + numerous examples from our work, we argue that it is precisely such interactionally + and sonically challenged designs that leave requisite space for computer-generated + augmentations in hybrid, multi-device performance settings.' + address: 'Vancouver, BC, Canada' + author: 'Bowers, John and Archer, Phil' + bibtex: "@inproceedings{Bowers2005,\n abstract = {As a response to a number of notable\ + \ contemporary aesthetic tendencies, this paper introduces the notion of an infra-instrument\ + \ as a kind of ‘new interface for musical expression’ worthy of study and systematic\ + \ design. In contrast to hyper-, meta- and virtual instruments, we propose infra-instruments\ + \ as devices of restricted interactive potential, with little sensor enhancement,\ + \ which engender simple musics with scarce opportunity for conventional virtuosity.\ + \ After presenting numerous examples from our work, we argue that it is precisely\ + \ such interactionally and sonically challenged designs that leave requisite space\ + \ for computer-generated augmentations in hybrid, multi-device performance settings.},\n\ + \ address = {Vancouver, BC, Canada},\n author = {Bowers, John and Archer, Phil},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1176713},\n issn = {2220-4806},\n\ + \ keywords = {Infra-instruments, hyperinstruments, meta-instruments, virtual instruments,\ + \ design concepts and principles. },\n pages = {5--10},\n title = {Not Hyper,\ + \ Not Meta, Not Cyber but Infra-Instruments},\n url = {http://www.nime.org/proceedings/2005/nime2005_005.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1176713 issn: 2220-4806 - month: May - numpages: 7 - pages: 479--485 - title: The BioSynth—an affective biofeedback device grounded in feminist thought - track: Papers - url: http://nime.org/proceedings/2023/nime2023_66.pdf - year: 2023 + keywords: 'Infra-instruments, hyperinstruments, meta-instruments, virtual instruments, + design concepts and principles. ' + pages: 5--10 + title: 'Not Hyper, Not Meta, Not Cyber but Infra-Instruments' + url: http://www.nime.org/proceedings/2005/nime2005_005.pdf + year: 2005 -- ENTRYTYPE: article - ID: nime2023_67 - abstract: "This article presents new tools developed in FAUST language to create\ - \ musical interactions using electrophysiological signals as input. The developed\ - \ tools are centered around signal processing and simulation of electrophysiological\ - \ signals. These techniques are used to clean and process the electrophysiological\ - \ signals and subsequently provide real-time interactions to feed the control\ - \ of sound processes. The system provides modules that are highly musically expressive\ - \ especially in the domain of spatial sound.\nThese tools also allow to set up\ - \ a testing environment by replacing the need of electrophysiological capturing\ - \ devices.\nThe findings of this exploration provide a better understanding of\ - \ how the FAUST language can be used in conjunction with electrophysiological\ - \ signals and exposes interesting opportunities to explore further possibilities\ - \ in music creation in an open source environment with the possibility of multitarget\ - \ compilation, allowing our modules to be used either in such softwares as Max\ - \ or embedded on microcontrollers." - address: 'Mexico City, Mexico' - articleno: 67 - author: David Fierro and Alain Bonardi and Atau Tanaka - bibtex: "@article{nime2023_67,\n abstract = {This article presents new tools developed\ - \ in FAUST language to create musical interactions using electrophysiological\ - \ signals as input. The developed tools are centered around signal processing\ - \ and simulation of electrophysiological signals. These techniques are used to\ - \ clean and process the electrophysiological signals and subsequently provide\ - \ real-time interactions to feed the control of sound processes. The system provides\ - \ modules that are highly musically expressive especially in the domain of spatial\ - \ sound.\nThese tools also allow to set up a testing environment by replacing\ - \ the need of electrophysiological capturing devices.\nThe findings of this exploration\ - \ provide a better understanding of how the FAUST language can be used in conjunction\ - \ with electrophysiological signals and exposes interesting opportunities to explore\ - \ further possibilities in music creation in an open source environment with the\ - \ possibility of multitarget compilation, allowing our modules to be used either\ - \ in such softwares as Max or embedded on microcontrollers.},\n address = {Mexico\ - \ City, Mexico},\n articleno = {67},\n author = {David Fierro and Alain Bonardi\ - \ and Atau Tanaka},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan\ - \ Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages = {8},\n\ - \ pages = {486--493},\n title = {FAUST Multiplatform toolbox for Body Brain Digital\ - \ Musical Instruments},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_67.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: Makipatola2005 + abstract: 'In this paper, we introduce and analyze four gesture-controlled musical + instruments. We briefly discuss the test platform designed to allow for rapid + experimentation of new interfaces and control mappings. We describe our design + experiences and discuss the effects of system features such as latency, resolution + and lack of tactile feedback. The instruments use virtual reality hardware and + computer vision for user input, and three-dimensional stereo vision as well as + simple desktop displays for providing visual feedback. The instrument sounds are + synthesized in real-time using physical sound modeling. ' + address: 'Vancouver, BC, Canada' + author: 'Mäki-patola, Teemu and Laitinen, Juha and Kanerva, Aki and Takala, Tapio' + bibtex: "@inproceedings{Makipatola2005,\n abstract = {In this paper, we introduce\ + \ and analyze four gesture-controlled musical instruments. We briefly discuss\ + \ the test platform designed to allow for rapid experimentation of new interfaces\ + \ and control mappings. We describe our design experiences and discuss the effects\ + \ of system features such as latency, resolution and lack of tactile feedback.\ + \ The instruments use virtual reality hardware and computer vision for user input,\ + \ and three-dimensional stereo vision as well as simple desktop displays for providing\ + \ visual feedback. The instrument sounds are synthesized in real-time using physical\ + \ sound modeling. },\n address = {Vancouver, BC, Canada},\n author = {M\\''{a}ki-patola,\ + \ Teemu and Laitinen, Juha and Kanerva, Aki and Takala, Tapio},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176780},\n issn = {2220-4806},\n keywords = {Musical\ + \ instrument design, virtual instrument, gesture, widgets, physical sound modeling,\ + \ control mapping.},\n pages = {11--16},\n title = {Experiments with Virtual Reality\ + \ Instruments},\n url = {http://www.nime.org/proceedings/2005/nime2005_011.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1176780 issn: 2220-4806 - month: May - numpages: 8 - pages: 486--493 - title: FAUST Multiplatform toolbox for Body Brain Digital Musical Instruments - track: Papers - url: http://nime.org/proceedings/2023/nime2023_67.pdf - year: 2023 + keywords: 'Musical instrument design, virtual instrument, gesture, widgets, physical + sound modeling, control mapping.' + pages: 11--16 + title: Experiments with Virtual Reality Instruments + url: http://www.nime.org/proceedings/2005/nime2005_011.pdf + year: 2005 -- ENTRYTYPE: article - ID: nime2023_68 - abstract: 'This paper presents the Feedback Mop Cello, a feedback instrument integrating - acoustic feedback loops generated through a microphone and loudspeaker combination - with a control interface inspired by the cello. Current paradigms of interaction - with feedback instruments are based around ideas of negotiation with autonomous - systems rather than control. We explore the possibility of integration of negotiated - and controlled elements through a design focused on isolating the acoustic feedback - loop signal path from the signal path to which sound processing is applied. We - focus on three musical parameters of timbre, pitch, and dynamics. We present timbre - as a parameter to mainly be negotiated within the feedback loop, while pitch and - dynamics are parameters that can be explicitly controlled through the interface. - An approach is taken to minimize components within the feedback loop in order - to foreground the choice of loudspeaker as an integral part of the instrument’s - sound. A preliminary user study is carried out involving five semi-professional - musicians, focusing on their reflection regarding their interaction with the acoustic - feedback loop.' - address: 'Mexico City, Mexico' - articleno: 68 - author: Hugh A von Arnim and Stefano Fasciani and Çağrı Erdem - bibtex: "@article{nime2023_68,\n abstract = {This paper presents the Feedback Mop\ - \ Cello, a feedback instrument integrating acoustic feedback loops generated through\ - \ a microphone and loudspeaker combination with a control interface inspired by\ - \ the cello. Current paradigms of interaction with feedback instruments are based\ - \ around ideas of negotiation with autonomous systems rather than control. We\ - \ explore the possibility of integration of negotiated and controlled elements\ - \ through a design focused on isolating the acoustic feedback loop signal path\ - \ from the signal path to which sound processing is applied. We focus on three\ - \ musical parameters of timbre, pitch, and dynamics. We present timbre as a parameter\ - \ to mainly be negotiated within the feedback loop, while pitch and dynamics are\ - \ parameters that can be explicitly controlled through the interface. An approach\ - \ is taken to minimize components within the feedback loop in order to foreground\ - \ the choice of loudspeaker as an integral part of the instrument’s sound. A preliminary\ - \ user study is carried out involving five semi-professional musicians, focusing\ - \ on their reflection regarding their interaction with the acoustic feedback loop.},\n\ - \ address = {Mexico City, Mexico},\n articleno = {68},\n author = {Hugh A von\ - \ Arnim and Stefano Fasciani and Çağrı Erdem},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n editor\ - \ = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n\ - \ numpages = {6},\n pages = {494--499},\n title = {The Feedback Mop Cello: An\ - \ Instrument for Interacting with Acoustic Feedback Loops},\n track = {Papers},\n\ - \ url = {http://nime.org/proceedings/2023/nime2023_68.pdf},\n year = {2023}\n\ +- ENTRYTYPE: inproceedings + ID: Weinberg2005 + abstract: 'The iltur system features a novel method of interaction between expert + and novice musicians through a set of musical controllers called Beatbugs. Beatbug + players can record live musical input from MIDI and acoustic instruments and respond + by transforming the recorded material in real-time, creating motif-and-variation + call-and-response routines on the fly. A central computer system analyzes MIDI + and audio played by expert players and allows novice Beatbug players to personalize + the analyzed material using a variety of transformation algorithms. This paper + presents the motivation for developing the iltur system, followed by a brief survey + of pervious and related work that guided the definition of the project’s goals. + We then present the hardware and software approaches that were taken to address + these goals, as well as a couple of compositions that were written for the system. + The paper ends with a discussion based on observations of players using the iltur + system and a number of suggestions for future work.' + address: 'Vancouver, BC, Canada' + author: 'Weinberg, Gil and Driscoll, Scott' + bibtex: "@inproceedings{Weinberg2005,\n abstract = {The iltur system features a\ + \ novel method of interaction between expert and novice musicians through a set\ + \ of musical controllers called Beatbugs. Beatbug players can record live musical\ + \ input from MIDI and acoustic instruments and respond by transforming the recorded\ + \ material in real-time, creating motif-and-variation call-and-response routines\ + \ on the fly. A central computer system analyzes MIDI and audio played by expert\ + \ players and allows novice Beatbug players to personalize the analyzed material\ + \ using a variety of transformation algorithms. This paper presents the motivation\ + \ for developing the iltur system, followed by a brief survey of pervious and\ + \ related work that guided the definition of the project’s goals. We then present\ + \ the hardware and software approaches that were taken to address these goals,\ + \ as well as a couple of compositions that were written for the system. The paper\ + \ ends with a discussion based on observations of players using the iltur system\ + \ and a number of suggestions for future work.},\n address = {Vancouver, BC, Canada},\n\ + \ author = {Weinberg, Gil and Driscoll, Scott},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1176840},\n issn = {2220-4806},\n keywords = {Collaboration,\ + \ improvisation, gestrual handheld controllers, novices, mapping},\n pages = {17--22},\n\ + \ title = {iltur -- Connecting Novices and Experts Through Collaborative Improvisation},\n\ + \ url = {http://www.nime.org/proceedings/2005/nime2005_017.pdf},\n year = {2005}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1176840 issn: 2220-4806 - month: May - numpages: 6 - pages: 494--499 - title: 'The Feedback Mop Cello: An Instrument for Interacting with Acoustic Feedback - Loops' - track: Papers - url: http://nime.org/proceedings/2023/nime2023_68.pdf - year: 2023 + keywords: 'Collaboration, improvisation, gestrual handheld controllers, novices, + mapping' + pages: 17--22 + title: iltur -- Connecting Novices and Experts Through Collaborative Improvisation + url: http://www.nime.org/proceedings/2005/nime2005_017.pdf + year: 2005 -- ENTRYTYPE: article - ID: nime2023_69 - abstract: 'Mixboard is a web / iOS application that allows music lovers to create - and share personalized musical mashups. The app allows users to choose and organize - up to four songs within four different lanes. The system automatically separates - the songs'' sources into corresponding stems, calculates an appropriate tempo - and key for the mashup, and chooses song segments according to users'' visual - creation. Unlike other professional applications used for mashups, Mixboard does - not require experience with Digital Audio Workstations (DAWs) or waveform editing - and supports unlimited library of usable songs. In a co-creative fashion, users - can explore their creativity while the system contributes its own creative input - utilizing Music Information Retrieval (MIR), Digital Signal Processing (DSP), - and compositional templates. User studies were conducted to evaluate Mixboard''s - success in achieving an effective balance between system automation and user control. - Results indicate strong metrics for user creative expression, engagement, and - ownership, as well as high satisfaction with the final musical outcome. Results - also suggest a number of modifications to the balance between user control and - system automation, which will be addressed in future work.' - address: 'Mexico City, Mexico' - articleno: 69 - author: Raghavasimhan Sankaranarayanan and Nitin Hugar and Qinying Lei and Thomas - Ottolin and Hardik Goel and Gil Weinberg - bibtex: "@article{nime2023_69,\n abstract = {Mixboard is a web / iOS application\ - \ that allows music lovers to create and share personalized musical mashups. The app\ - \ allows users to choose and organize up to four songs within four different lanes.\ - \ The system automatically separates the songs' sources into corresponding stems,\ - \ calculates an appropriate tempo and key for the mashup, and chooses song segments\ - \ according to users' visual creation. Unlike other professional applications\ - \ used for mashups, Mixboard does not require experience with Digital Audio Workstations\ - \ (DAWs) or waveform editing and supports unlimited library of usable songs. In\ - \ a co-creative fashion, users can explore their creativity while the system contributes\ - \ its own creative input utilizing Music Information Retrieval (MIR), Digital\ - \ Signal Processing (DSP), and compositional templates. User studies were conducted\ - \ to evaluate Mixboard's success in achieving an effective balance between system\ - \ automation and user control. Results indicate strong metrics for user creative\ - \ expression, engagement, and ownership, as well as high satisfaction with the\ - \ final musical outcome. Results also suggest a number of modifications to the\ - \ balance between user control and system automation, which will be addressed\ - \ in future work.},\n address = {Mexico City, Mexico},\n articleno = {69},\n author\ - \ = {Raghavasimhan Sankaranarayanan and Nitin Hugar and Qinying Lei and Thomas\ - \ Ottolin and Hardik Goel and Gil Weinberg},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n editor\ - \ = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n\ - \ numpages = {6},\n pages = {500--505},\n title = {Mixboard - A Co-Creative Mashup\ - \ Application for Novices},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_69.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: Jorda2005 + abstract: 'In this paper we study the potential and the challenges posed by multi-user + instruments, as tools that can facilitate interaction and responsiveness not only + between performers and their instrument but also between performers as well. Several + previous studies and taxonomies are mentioned, after what different paradigms + exposed with examples based on traditional mechanical acoustic instruments. In + the final part, several existing systems and implementations, now in the digital + domain, are described and identified according to the models and paradigms previously + introduced. ' + address: 'Vancouver, BC, Canada' + author: 'Jordà, Sergi' + bibtex: "@inproceedings{Jorda2005,\n abstract = {In this paper we study the potential\ + \ and the challenges posed by multi-user instruments, as tools that can facilitate\ + \ interaction and responsiveness not only between performers and their instrument\ + \ but also between performers as well. Several previous studies and taxonomies\ + \ are mentioned, after what different paradigms exposed with examples based on\ + \ traditional mechanical acoustic instruments. In the final part, several existing\ + \ systems and implementations, now in the digital domain, are described and identified\ + \ according to the models and paradigms previously introduced. },\n address =\ + \ {Vancouver, BC, Canada},\n author = {Jord\\`{a}, Sergi},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176760},\n issn = {2220-4806},\n keywords = {Multi-user\ + \ instruments, collaborative music, new instruments design guidelines. },\n pages\ + \ = {23--26},\n title = {Multi-user Instruments: Models, Examples and Promises},\n\ + \ url = {http://www.nime.org/proceedings/2005/nime2005_023.pdf},\n year = {2005}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1176760 issn: 2220-4806 - month: May - numpages: 6 - pages: 500--505 - title: Mixboard - A Co-Creative Mashup Application for Novices - track: Papers - url: http://nime.org/proceedings/2023/nime2023_69.pdf - year: 2023 + keywords: 'Multi-user instruments, collaborative music, new instruments design guidelines. ' + pages: 23--26 + title: 'Multi-user Instruments: Models, Examples and Promises' + url: http://www.nime.org/proceedings/2005/nime2005_023.pdf + year: 2005 -- ENTRYTYPE: article - ID: nime2023_70 - abstract: 'This paper presents complex mapping strategies that offer flexibility - for improvising with elaborate digital environments by allowing for more human - control with less physical input. The intention is not to reduce human physicality, - but instead actions are further extended and altered through complex envelopes. - This software was originally designed for the augmented guitar, to address the - issue of a lack of spare bandwidth (Cook, 2001) that is inherent to guitar playing. - This makes it challenging to simultaneously control digital interfaces without - compromising guitar technique. The Slider MultiMap software discussed in this - paper helps to overcome this dilemma by enabling a guitarist to control multiple - audio effects with a single gesture while individually customising how each parameter - is controlled prior to the performance. At the same time, it explores the delegation - of tasks to the computer in situations where indirect control is more desirable.' - address: 'Mexico City, Mexico' - articleno: 70 - author: Nicholas Canny - bibtex: "@article{nime2023_70,\n abstract = {This paper presents complex mapping\ - \ strategies that offer flexibility for improvising with elaborate digital environments\ - \ by allowing for more human control with less physical input. The intention is\ - \ not to reduce human physicality, but instead actions are further extended and\ - \ altered through complex envelopes. This software was originally designed for\ - \ the augmented guitar, to address the issue of a lack of spare bandwidth (Cook,\ - \ 2001) that is inherent to guitar playing. This makes it challenging to simultaneously\ - \ control digital interfaces without compromising guitar technique. The Slider\ - \ MultiMap software discussed in this paper helps to overcome this dilemma by\ - \ enabling a guitarist to control multiple audio effects with a single gesture\ - \ while individually customising how each parameter is controlled prior to the\ - \ performance. At the same time, it explores the delegation of tasks to the computer\ - \ in situations where indirect control is more desirable.},\n address = {Mexico\ - \ City, Mexico},\n articleno = {70},\n author = {Nicholas Canny},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n\ - \ month = {May},\n numpages = {5},\n pages = {506--510},\n title = {The implementation\ - \ of envelope based complex mapping strategies to extend and augment human control},\n\ - \ track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_70.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: Blaine2005 + abstract: 'This paper will investigate a variety of alternate controllers that are + making an impact in interactive entertainment, particularly in the video game + industry. Since the late 1990''s, the surging popularity of rhythmic and musical + performance games in Japanese arcades has led to the development of new interfaces + and alternate controllers for the consumer market worldwide. Rhythm action games + such as Dance Dance Revolution, Taiko No Tatsujin (Taiko: Drum Master), and Donkey + Konga are stimulating collaborative gameplay and exposing consumers to custom + controllers designed specifically for musical and physical interaction. We are + witnessing the emergence and acceptance of these breakthrough controllers and + models for gameplay as an international cultural phenomenon penetrating the video + game and toy markets in record numbers. Therefore, it is worth considering the + potential benefits to developers of musical interfaces, electronic devices and + alternate controllers in light of these new and emerging opportunities, particularly + in the realm of video gaming, toy development, arcades, and other interactive + entertainment experiences. ' + address: 'Vancouver, BC, Canada' + author: 'Blaine, Tina' + bibtex: "@inproceedings{Blaine2005,\n abstract = {This paper will investigate a\ + \ variety of alternate controllers that are making an impact in interactive entertainment,\ + \ particularly in the video game industry. Since the late 1990's, the surging\ + \ popularity of rhythmic and musical performance games in Japanese arcades has\ + \ led to the development of new interfaces and alternate controllers for the consumer\ + \ market worldwide. Rhythm action games such as Dance Dance Revolution, Taiko\ + \ No Tatsujin (Taiko: Drum Master), and Donkey Konga are stimulating collaborative\ + \ gameplay and exposing consumers to custom controllers designed specifically\ + \ for musical and physical interaction. We are witnessing the emergence and acceptance\ + \ of these breakthrough controllers and models for gameplay as an international\ + \ cultural phenomenon penetrating the video game and toy markets in record numbers.\ + \ Therefore, it is worth considering the potential benefits to developers of musical\ + \ interfaces, electronic devices and alternate controllers in light of these new\ + \ and emerging opportunities, particularly in the realm of video gaming, toy development,\ + \ arcades, and other interactive entertainment experiences. },\n address = {Vancouver,\ + \ BC, Canada},\n author = {Blaine, Tina},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176709},\n\ + \ issn = {2220-4806},\n keywords = {Alternate controllers, musical interaction,\ + \ interactive entertainment, video game industry, arcades, rhythm action, collaborative\ + \ gameplay, musical performance games},\n pages = {27--33},\n title = {The Convergence\ + \ of Alternate Controllers and Musical Interfaces in Interactive Entertainment},\n\ + \ url = {http://www.nime.org/proceedings/2005/nime2005_027.pdf},\n year = {2005}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1176709 issn: 2220-4806 - month: May - numpages: 5 - pages: 506--510 - title: The implementation of envelope based complex mapping strategies to extend - and augment human control - track: Papers - url: http://nime.org/proceedings/2023/nime2023_70.pdf - year: 2023 + keywords: 'Alternate controllers, musical interaction, interactive entertainment, + video game industry, arcades, rhythm action, collaborative gameplay, musical performance + games' + pages: 27--33 + title: The Convergence of Alternate Controllers and Musical Interfaces in Interactive + Entertainment + url: http://www.nime.org/proceedings/2005/nime2005_027.pdf + year: 2005 -- ENTRYTYPE: article - ID: nime2023_71 - abstract: 'This paper presents a room feedback system which the author has been - developing and performing with for nearly three years. The design emerged from - an artistic research process which emphasises multiple explorations coexisting - around a research topic while having a sensitivity to the practicalities of a - customary gig (short set-up time, unpredictable acoustics). Typically enabled - by a stereo room-mic and a pair of speakers, many algorithms have been explored - in the loop with some being tributes to historical feedback works. An overall - design is offered where all feedback pathways are simultaneously available and - mutually interfere via the room. Each algorithm is designed to have one significant - performable parameter but how this is mapped to sensors or widgets is itself performable - with various behaviours available, including some explorations of self-programming - and ‘intra-active’ ideas. Concert experience in solo and small ensemble formats - is discussed and a number of contributions are identified in how the work: extends - room feedback research to explore multiple parallel processes of varied spectro-morphological - character, offers connections to historical work in a pedagogically interesting - fashion, demonstrates several novel algorithms, while exemplifying a characteristic - artistic research method. The paper closes with a speculative ‘feedback aesthetics’ - to help configure future work.' - address: 'Mexico City, Mexico' - articleno: 71 - author: John M Bowers - bibtex: "@article{nime2023_71,\n abstract = {This paper presents a room feedback\ - \ system which the author has been developing and performing with for nearly three\ - \ years. The design emerged from an artistic research process which emphasises\ - \ multiple explorations coexisting around a research topic while having a sensitivity\ - \ to the practicalities of a customary gig (short set-up time, unpredictable acoustics).\ - \ Typically enabled by a stereo room-mic and a pair of speakers, many algorithms\ - \ have been explored in the loop with some being tributes to historical feedback\ - \ works. An overall design is offered where all feedback pathways are simultaneously\ - \ available and mutually interfere via the room. Each algorithm is designed to\ - \ have one significant performable parameter but how this is mapped to sensors\ - \ or widgets is itself performable with various behaviours available, including\ - \ some explorations of self-programming and ‘intra-active’ ideas. Concert experience\ - \ in solo and small ensemble formats is discussed and a number of contributions\ - \ are identified in how the work: extends room feedback research to explore multiple\ - \ parallel processes of varied spectro-morphological character, offers connections\ - \ to historical work in a pedagogically interesting fashion, demonstrates several\ - \ novel algorithms, while exemplifying a characteristic artistic research method.\ - \ The paper closes with a speculative ‘feedback aesthetics’ to help configure\ - \ future work.},\n address = {Mexico City, Mexico},\n articleno = {71},\n author\ - \ = {John M Bowers},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan\ - \ Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages = {10},\n\ - \ pages = {511--520},\n title = {A Hapless But Entertaining Roar’: Developing\ - \ a Room Feedback System through Artistic Research and Aesthetic Reflection},\n\ - \ track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_71.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: Overholt2005 + address: 'Vancouver, BC, Canada' + author: 'Overholt, Dan' + bibtex: "@inproceedings{Overholt2005,\n address = {Vancouver, BC, Canada},\n author\ + \ = {Overholt, Dan},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176796},\n\ + \ issn = {2220-4806},\n pages = {34--37},\n title = {The Overtone Violin},\n url\ + \ = {http://www.nime.org/proceedings/2005/nime2005_034.pdf},\n year = {2005}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1176796 issn: 2220-4806 - month: May - numpages: 10 - pages: 511--520 - title: 'A Hapless But Entertaining Roar’: Developing a Room Feedback System through - Artistic Research and Aesthetic Reflection' - track: Papers - url: http://nime.org/proceedings/2023/nime2023_71.pdf - year: 2023 + pages: 34--37 + title: The Overtone Violin + url: http://www.nime.org/proceedings/2005/nime2005_034.pdf + year: 2005 -- ENTRYTYPE: article - ID: nime2023_72 - abstract: 'How do live coders simultaneously develop new creations and master previous - ones? Using conclusions drawn from previous studies about exploratory programming - and our experience practicing live coding, we identified a need to support creation - and mastery in the live coding space—specifically in the realm of live coding - pertaining to musical creations. We developed a tool, SHARP, which attempted to - empower live coders in both their exploration and performances. SHARP is a code - editor extension that visualizes the history of each instrument that the live - coder creates; the visualization can then be used to revisit the previous states - of the instrument and create new ones. We believe that this extension will support - live coders’ exploration in practice as well as enable novel musical aesthetics - in performance contexts. We did an initial evaluation of SHARP using an autoethnographic - approach where one researcher used the tool over multiple sessions to compose - a piece. From the autoethnography, we saw that SHARP supported composition by - making it easier to explore different musical ideas and to revisit past states. - Our analysis also hints at new possible features, such as being able to combine - multiple previous states together using SHARP.' - address: 'Mexico City, Mexico' - articleno: 72 - author: Douglas A Bowman Jr and Daniel Manesh and Sang Won Lee - bibtex: "@article{nime2023_72,\n abstract = {How do live coders simultaneously develop\ - \ new creations and master previous ones? Using conclusions drawn from previous\ - \ studies about exploratory programming and our experience practicing live coding,\ - \ we identified a need to support creation and mastery in the live coding space—specifically\ - \ in the realm of live coding pertaining to musical creations. We developed a\ - \ tool, SHARP, which attempted to empower live coders in both their exploration\ - \ and performances. SHARP is a code editor extension that visualizes the history\ - \ of each instrument that the live coder creates; the visualization can then be\ - \ used to revisit the previous states of the instrument and create new ones. We\ - \ believe that this extension will support live coders’ exploration in practice\ - \ as well as enable novel musical aesthetics in performance contexts. We did an\ - \ initial evaluation of SHARP using an autoethnographic approach where one researcher\ - \ used the tool over multiple sessions to compose a piece. From the autoethnography,\ - \ we saw that SHARP supported composition by making it easier to explore different\ - \ musical ideas and to revisit past states. Our analysis also hints at new possible\ - \ features, such as being able to combine multiple previous states together using\ - \ SHARP.},\n address = {Mexico City, Mexico},\n articleno = {72},\n author = {Douglas\ - \ A Bowman Jr and Daniel Manesh and Sang Won Lee},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month\ - \ = {May},\n numpages = {4},\n pages = {521--524},\n title = {SHARP: Supporting\ - \ Exploration and Rapid State Navigation in Live Coding Music},\n track = {Work\ - \ in Progress},\n url = {http://nime.org/proceedings/2023/nime2023_72.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: Caceres2005 + abstract: 'The Self-Contained Unified Bass Augmenter (SCUBA) is a new augmentative + OSC (Open Sound Control) [5] controller for the tuba. SCUBA adds new expressive + possibilities to the existing tuba interface through onboard sensors. These sensors + provide continuous and discrete user-controlled parametric data to be mapped at + will to signal processing parameters, virtual instrument control parameters, sound + playback, and various other functions. In its current manifestation, control data + is mapped to change the processing of the instrument''s natural sound in Pd (Pure + Data) [3]. SCUBA preserves the unity of the solo instrument interface by acoustically + mixing direct and processed sound in the instrument''s bell via mounted satellite + speakers, which are driven by a subwoofer below the performer''s chair. The end + result augments the existing interface while preserving its original unity and + functionality. ' + address: 'Vancouver, BC, Canada' + author: 'Cáceres, Juan Pablo and Mysore, Gautham J. and Treviño, Jeffrey' + bibtex: "@inproceedings{Caceres2005,\n abstract = {The Self-Contained Unified Bass\ + \ Augmenter (SCUBA) is a new augmentative OSC (Open Sound Control) [5] controller\ + \ for the tuba. SCUBA adds new expressive possibilities to the existing tuba interface\ + \ through onboard sensors. These sensors provide continuous and discrete user-controlled\ + \ parametric data to be mapped at will to signal processing parameters, virtual\ + \ instrument control parameters, sound playback, and various other functions.\ + \ In its current manifestation, control data is mapped to change the processing\ + \ of the instrument's natural sound in Pd (Pure Data) [3]. SCUBA preserves the\ + \ unity of the solo instrument interface by acoustically mixing direct and processed\ + \ sound in the instrument's bell via mounted satellite speakers, which are driven\ + \ by a subwoofer below the performer's chair. The end result augments the existing\ + \ interface while preserving its original unity and functionality. },\n address\ + \ = {Vancouver, BC, Canada},\n author = {C\\'{a}ceres, Juan Pablo and Mysore,\ + \ Gautham J. and Trevi\\~{n}o, Jeffrey},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176719},\n\ + \ issn = {2220-4806},\n keywords = {Interactive music, electro-acoustic musical\ + \ instruments, musical instrument design, human computer interface, signal processing,\ + \ Open Sound Control (OSC) },\n pages = {38--41},\n title = {{SC}UBA: The Self-Contained\ + \ Unified Bass Augmenter},\n url = {http://www.nime.org/proceedings/2005/nime2005_038.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1176719 issn: 2220-4806 - month: May - numpages: 4 - pages: 521--524 - title: 'SHARP: Supporting Exploration and Rapid State Navigation in Live Coding - Music' - track: Work in Progress - url: http://nime.org/proceedings/2023/nime2023_72.pdf - year: 2023 + keywords: 'Interactive music, electro-acoustic musical instruments, musical instrument + design, human computer interface, signal processing, Open Sound Control (OSC) ' + pages: 38--41 + title: 'SCUBA: The Self-Contained Unified Bass Augmenter' + url: http://www.nime.org/proceedings/2005/nime2005_038.pdf + year: 2005 -- ENTRYTYPE: article - ID: nime2023_73 - abstract: 'Lambeosaurine hadrosaurs are duck-billed dinosaurs. Scientists hypothesize - that their large, bony crests which encapsulate complicated, hollow nasal passages - function as resonators for vocal calls. This paper discusses the work-in-process, - Dinosaur Choir, which recreates these vocal capabilities as musical skull instruments. - The skull and nasal passages are fabricated based on Computed Topology (CT) scans - of hadrosaur skulls, and larynx design is informed by scientific research. Musicians - and participants voice the instruments by blowing into a mouthpiece or microphone, - and a larynx mechanism creates the sound in response, which is then resonated - through the nasal passages. The instruments are intended both for interactive - exhibition and for on-going musical performance practice. Dinosaur Choir aims - to give life to the voices of dinosaurs, allowing an embodied experience with - extinct animals long lost to the past. This paper focuses on the development of - the first musical instrument in the series, based on an adult Corythosaurus skull. - We consider how scientific research as well as musical and practical concerns - impact the design process and what trade-offs must be contemplated and made in - order to achieve our aims of dinosaurian embodied sound.' - address: 'Mexico City, Mexico' - articleno: 73 - author: Courtney D Brown and Thomas Dudgeon and Cezary Gajewski - bibtex: "@article{nime2023_73,\n abstract = {Lambeosaurine hadrosaurs are duck-billed\ - \ dinosaurs. Scientists hypothesize that their large, bony crests which encapsulate\ - \ complicated, hollow nasal passages function as resonators for vocal calls. This\ - \ paper discusses the work-in-process, Dinosaur Choir, which recreates these vocal\ - \ capabilities as musical skull instruments. The skull and nasal passages are\ - \ fabricated based on Computed Topology (CT) scans of hadrosaur skulls, and larynx\ - \ design is informed by scientific research. Musicians and participants voice\ - \ the instruments by blowing into a mouthpiece or microphone, and a larynx mechanism\ - \ creates the sound in response, which is then resonated through the nasal passages.\ - \ The instruments are intended both for interactive exhibition and for on-going\ - \ musical performance practice. Dinosaur Choir aims to give life to the voices\ - \ of dinosaurs, allowing an embodied experience with extinct animals long lost\ - \ to the past. This paper focuses on the development of the first musical instrument\ - \ in the series, based on an adult Corythosaurus skull. We consider how scientific\ - \ research as well as musical and practical concerns impact the design process\ - \ and what trade-offs must be contemplated and made in order to achieve our aims\ - \ of dinosaurian embodied sound.},\n address = {Mexico City, Mexico},\n articleno\ - \ = {73},\n author = {Courtney D Brown and Thomas Dudgeon and Cezary Gajewski},\n\ +- ENTRYTYPE: inproceedings + ID: Sinyor2005 + abstract: 'This paper presents a novel controller built to exploit thephysical behaviour + of a simple dynamical system, namely aspinning wheel. The phenomenon of gyroscopic + precessioncauses the instrument to slowly oscillate when it is spunquickly, providing + the performer with proprioceptive feedback. Also, due to the mass of the wheel + and tire and theresulting rotational inertia, it maintains a relatively constant + angular velocity once it is set in motion. Various sensors were used to measure + continuous and discrete quantitiessuch as the the angular frequency of the wheel, + its spatialorientation, and the performer''s finger pressure. In addition, optical + and hall-effect sensors detect the passing of aspoke-mounted photodiode and two + magnets. A base software layer was developed in Max/MSP and various patcheswere + written with the goal of mapping the dynamic behaviorof the wheel to varied musical + processes.' + address: 'Vancouver, BC, Canada' + author: 'Sinyor, Elliot and Wanderley, Marcelo M.' + bibtex: "@inproceedings{Sinyor2005,\n abstract = {This paper presents a novel controller\ + \ built to exploit thephysical behaviour of a simple dynamical system, namely\ + \ aspinning wheel. The phenomenon of gyroscopic precessioncauses the instrument\ + \ to slowly oscillate when it is spunquickly, providing the performer with proprioceptive\ + \ feedback. Also, due to the mass of the wheel and tire and theresulting rotational\ + \ inertia, it maintains a relatively constant angular velocity once it is set\ + \ in motion. Various sensors were used to measure continuous and discrete quantitiessuch\ + \ as the the angular frequency of the wheel, its spatialorientation, and the performer's\ + \ finger pressure. In addition, optical and hall-effect sensors detect the passing\ + \ of aspoke-mounted photodiode and two magnets. A base software layer was developed\ + \ in Max/MSP and various patcheswere written with the goal of mapping the dynamic\ + \ behaviorof the wheel to varied musical processes.},\n address = {Vancouver,\ + \ BC, Canada},\n author = {Sinyor, Elliot and Wanderley, Marcelo M.},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1176820},\n issn = {2220-4806},\n keywords\ + \ = {HCI, Digital Musical Instruments, Gyroscopic Precession, Rotational Inertia,\ + \ Open Sound Control },\n pages = {42--45},\n title = {Gyrotyre : A dynamic hand-held\ + \ computer-music controller based on a spinning wheel},\n url = {http://www.nime.org/proceedings/2005/nime2005_042.pdf},\n\ + \ year = {2005}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1176820 + issn: 2220-4806 + keywords: 'HCI, Digital Musical Instruments, Gyroscopic Precession, Rotational Inertia, + Open Sound Control ' + pages: 42--45 + title: 'Gyrotyre : A dynamic hand-held computer-music controller based on a spinning + wheel' + url: http://www.nime.org/proceedings/2005/nime2005_042.pdf + year: 2005 + + +- ENTRYTYPE: inproceedings + ID: Fraietta2005a + abstract: 'The Smart Controller is a portable hardware device that responds to input + control voltage, OSC, and MIDI messages; producing output control voltage, OSC, + and MIDI messages (depending upon the loaded custom patch). The Smart Controller + is a stand alone device; a powerful, reliable, and compact instrument capable + of reducing the number of electronic modules required in a live performance or + installation, particularly the requirement of a laptop computer. More powerful, + however, is the Smart Controller Workbench, a complete interactive development + environment. In addition to enabling the composer to create and debug their patches, + the Smart Controller Workbench accurately simulates the behaviour of the hardware, + and functions as an incircuit debugger that enables the performer to remotely + monitor, modify, and tune patches running in an installation without the requirement + of stopping or interrupting the live performance. ' + address: 'Vancouver, BC, Canada' + author: 'Fraietta, Angelo' + bibtex: "@inproceedings{Fraietta2005a,\n abstract = {The Smart Controller is a portable\ + \ hardware device that responds to input control voltage, OSC, and MIDI messages;\ + \ producing output control voltage, OSC, and MIDI messages (depending upon the\ + \ loaded custom patch). The Smart Controller is a stand alone device; a powerful,\ + \ reliable, and compact instrument capable of reducing the number of electronic\ + \ modules required in a live performance or installation, particularly the requirement\ + \ of a laptop computer. More powerful, however, is the Smart Controller Workbench,\ + \ a complete interactive development environment. In addition to enabling the\ + \ composer to create and debug their patches, the Smart Controller Workbench accurately\ + \ simulates the behaviour of the hardware, and functions as an incircuit debugger\ + \ that enables the performer to remotely monitor, modify, and tune patches running\ + \ in an installation without the requirement of stopping or interrupting the live\ + \ performance. },\n address = {Vancouver, BC, Canada},\n author = {Fraietta, Angelo},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn\ - \ = {2220-4806},\n month = {May},\n numpages = {6},\n pages = {525--530},\n title\ - \ = {Dinosaur Choir: Designing for Scientific Exploration, Outreach, and Experimental\ - \ Music},\n track = {Work in Progress},\n url = {http://nime.org/proceedings/2023/nime2023_73.pdf},\n\ - \ year = {2023}\n}\n" + \ Musical Expression},\n doi = {10.5281/zenodo.1176745},\n issn = {2220-4806},\n\ + \ keywords = {Control Voltage, Open Sound Control, Algorithmic Composition, MIDI,\ + \ Sound Installations, programmable logic control, synthesizers, electronic music,\ + \ Sensors, Actuators, Interaction. },\n pages = {46--49},\n title = {The Smart\ + \ Controller Workbench},\n url = {http://www.nime.org/proceedings/2005/nime2005_046.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1176745 issn: 2220-4806 - month: May - numpages: 6 - pages: 525--530 - title: 'Dinosaur Choir: Designing for Scientific Exploration, Outreach, and Experimental - Music' - track: Work in Progress - url: http://nime.org/proceedings/2023/nime2023_73.pdf - year: 2023 + keywords: 'Control Voltage, Open Sound Control, Algorithmic Composition, MIDI, Sound + Installations, programmable logic control, synthesizers, electronic music, Sensors, + Actuators, Interaction. ' + pages: 46--49 + title: The Smart Controller Workbench + url: http://www.nime.org/proceedings/2005/nime2005_046.pdf + year: 2005 -- ENTRYTYPE: article - ID: nime2023_74 - abstract: 'This paper introduces the T-Patch, a software application that streamlines - the use of T-Stick Digital Musical Instruments (DMIs). It offers a user-friendly - interface for gesture extraction, mapping, signal conditioning, sound synthesis, - and sequencing with cues, enabling composers to create music without programming. - Our main contribution is two-fold: (1) providing a versatile software solution - to address the current lack of music-making support for T-Stick DMIs, and (2) - highlighting the importance of demonstration content, such as a video, to showcase - the instrument’s capabilities and inspire new users. The T-Patch reduces the barrier - to entry for using the T-Stick DMI and offers a shared software solution for various - music-making scenarios.' - address: 'Mexico City, Mexico' - articleno: 74 - author: Takuto Fukuda and Marcelo Wanderley - bibtex: "@article{nime2023_74,\n abstract = {This paper introduces the T-Patch,\ - \ a software application that streamlines the use of T-Stick Digital Musical Instruments\ - \ (DMIs). It offers a user-friendly interface for gesture extraction, mapping,\ - \ signal conditioning, sound synthesis, and sequencing with cues, enabling composers\ - \ to create music without programming. Our main contribution is two-fold: (1)\ - \ providing a versatile software solution to address the current lack of music-making\ - \ support for T-Stick DMIs, and (2) highlighting the importance of demonstration\ - \ content, such as a video, to showcase the instrument’s capabilities and inspire\ - \ new users. The T-Patch reduces the barrier to entry for using the T-Stick DMI\ - \ and offers a shared software solution for various music-making scenarios.},\n\ - \ address = {Mexico City, Mexico},\n articleno = {74},\n author = {Takuto Fukuda\ - \ and Marcelo Wanderley},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan\ - \ Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages = {5},\n\ - \ pages = {531--535},\n title = {T-Patch: a software application for T-Stick Digital\ - \ Musical Instruments},\n track = {Work in Progress},\n url = {http://nime.org/proceedings/2023/nime2023_74.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: Singer2005 + abstract: 'This paper describes an installation created by LEMUR(League of Electronic + Musical Urban Robots) in January, 2005.The installation included over 30 robotic + musical instrumentsand a multi-projector real-time video projection and wascontrollable + and programmable over a MIDI network. Theinstallation was also controllable remotely + via the Internet andcould be heard and viewed via room mics and a robotic webcam + connected to a streaming server.' + address: 'Vancouver, BC, Canada' + author: 'Singer, Eric and Feddersen, Jeff and Bowen, Bil' + bibtex: "@inproceedings{Singer2005,\n abstract = {This paper describes an installation\ + \ created by LEMUR(League of Electronic Musical Urban Robots) in January, 2005.The\ + \ installation included over 30 robotic musical instrumentsand a multi-projector\ + \ real-time video projection and wascontrollable and programmable over a MIDI\ + \ network. Theinstallation was also controllable remotely via the Internet andcould\ + \ be heard and viewed via room mics and a robotic webcam connected to a streaming\ + \ server.},\n address = {Vancouver, BC, Canada},\n author = {Singer, Eric and\ + \ Feddersen, Jeff and Bowen, Bil},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176818},\n\ + \ issn = {2220-4806},\n keywords = {Robotics, music, instruments, MIDI, video,\ + \ interactive, networked, streaming.},\n pages = {50--55},\n title = {A Large-Scale\ + \ Networked Robotic Musical Instrument Installation},\n url = {http://www.nime.org/proceedings/2005/nime2005_050.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1176818 issn: 2220-4806 - month: May - numpages: 5 - pages: 531--535 - title: 'T-Patch: a software application for T-Stick Digital Musical Instruments' - track: Work in Progress - url: http://nime.org/proceedings/2023/nime2023_74.pdf - year: 2023 + keywords: 'Robotics, music, instruments, MIDI, video, interactive, networked, streaming.' + pages: 50--55 + title: A Large-Scale Networked Robotic Musical Instrument Installation + url: http://www.nime.org/proceedings/2005/nime2005_050.pdf + year: 2005 -- ENTRYTYPE: article - ID: nime2023_75 - abstract: 'Currently, most of the digital musical instruments cannot leave the use - of dedicated hardware devices, making them limited in terms of user popularity - and resource conservation. In this paper, we propose a new computer vision-based - interactive multi-functional musical instrument, called MuGeVI, which requires - no additional hardware circuits or sensors, and allows users to create or play - music through different hand gestures and positions. It firstly uses deep neural - network models for hand key point detection to obtain gesture information, secondly - maps it to pitch, chord or other information based on the current mode, then passes - it to Max/MSP via the OSC protocol, and finally implements the generation and - processing of MIDI or audio. MuGeVI is now available in four modes: performance - mode, accompaniment mode, control mode, and audio effects mode, and can be conveniently - used with just a personal computer with a camera. Designed to be human-centric, - MuGeVI is feature-rich, simple to use, affordable, scalable and programmable, - and is certainly a frugal musical innovation. All the material about this work - can be found in https://yewlife.github.io/MuGeVI/.' - address: 'Mexico City, Mexico' - articleno: 75 - author: Yue Yang and Zhaowen Wang and ZIJIN LI - bibtex: "@article{nime2023_75,\n abstract = {Currently, most of the digital musical\ - \ instruments cannot leave the use of dedicated hardware devices, making them\ - \ limited in terms of user popularity and resource conservation. In this paper,\ - \ we propose a new computer vision-based interactive multi-functional musical\ - \ instrument, called MuGeVI, which requires no additional hardware circuits or\ - \ sensors, and allows users to create or play music through different hand gestures\ - \ and positions. It firstly uses deep neural network models for hand key point\ - \ detection to obtain gesture information, secondly maps it to pitch, chord or\ - \ other information based on the current mode, then passes it to Max/MSP via the\ - \ OSC protocol, and finally implements the generation and processing of MIDI or\ - \ audio. MuGeVI is now available in four modes: performance mode, accompaniment\ - \ mode, control mode, and audio effects mode, and can be conveniently used with\ - \ just a personal computer with a camera. Designed to be human-centric, MuGeVI\ - \ is feature-rich, simple to use, affordable, scalable and programmable, and is\ - \ certainly a frugal musical innovation. All the material about this work can\ - \ be found in https://yewlife.github.io/MuGeVI/.},\n address = {Mexico City, Mexico},\n\ - \ articleno = {75},\n author = {Yue Yang and Zhaowen Wang and ZIJIN LI},\n booktitle\ +- ENTRYTYPE: inproceedings + ID: Allison2005 + abstract: 'Artists have long sought after alternative controllers, sensors, and + other means for controlling computer-based musical performance in real-time. Traditional + techniques for transmitting the data generated by such devices typically employ + the use of MIDI as the transport protocol. Recently, several devices have been + developed using alternatives to MIDI, including Ethernet-based and USB-based sensor + interfaces. We have designed and produced a system that uses S/PDIF as the transport + mechanism for a sensor interface. This provides robust performance, together with + extremely low latency and high resolution. In our system, data from all sensors + is multiplexed onto the digital audio line and demultiplexed in software on the + computer using standard techniques. We have written demultiplexer objects and + plugins for Max/MSP and Jade, as well as a MIDI Conversion program for interapplicaton + uses, while others are in the works for PD, SuperCollider, and AudioUnits.' + address: 'Vancouver, BC, Canada' + author: 'Allison, Jesse T. and Place, Timothy' + bibtex: "@inproceedings{Allison2005,\n abstract = {Artists have long sought after\ + \ alternative controllers, sensors, and other means for controlling computer-based\ + \ musical performance in real-time. Traditional techniques for transmitting the\ + \ data generated by such devices typically employ the use of MIDI as the transport\ + \ protocol. Recently, several devices have been developed using alternatives to\ + \ MIDI, including Ethernet-based and USB-based sensor interfaces. We have designed\ + \ and produced a system that uses S/PDIF as the transport mechanism for a sensor\ + \ interface. This provides robust performance, together with extremely low latency\ + \ and high resolution. In our system, data from all sensors is multiplexed onto\ + \ the digital audio line and demultiplexed in software on the computer using standard\ + \ techniques. We have written demultiplexer objects and plugins for Max/MSP and\ + \ Jade, as well as a MIDI Conversion program for interapplicaton uses, while others\ + \ are in the works for PD, SuperCollider, and AudioUnits.},\n address = {Vancouver,\ + \ BC, Canada},\n author = {Allison, Jesse T. and Place, Timothy},\n booktitle\ \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n\ - \ month = {May},\n numpages = {6},\n pages = {536--541},\n title = {MuGeVI: A\ - \ Multi-Functional Gesture-Controlled Virtual Instrument},\n track = {Work in\ - \ Progress},\n url = {http://nime.org/proceedings/2023/nime2023_75.pdf},\n year\ - \ = {2023}\n}\n" + \ Expression},\n doi = {10.5281/zenodo.1176693},\n issn = {2220-4806},\n keywords\ + \ = {Teabox, Electrotap, Sensor Interface, High Speed, High Resolution, Sensors,\ + \ S/PDIF},\n pages = {56--59},\n title = {Teabox: A Sensor Data Interface System},\n\ + \ url = {http://www.nime.org/proceedings/2005/nime2005_056.pdf},\n year = {2005}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1176693 issn: 2220-4806 - month: May - numpages: 6 - pages: 536--541 - title: 'MuGeVI: A Multi-Functional Gesture-Controlled Virtual Instrument' - track: Work in Progress - url: http://nime.org/proceedings/2023/nime2023_75.pdf - year: 2023 + keywords: 'Teabox, Electrotap, Sensor Interface, High Speed, High Resolution, Sensors, + S/PDIF' + pages: 56--59 + title: 'Teabox: A Sensor Data Interface System' + url: http://www.nime.org/proceedings/2005/nime2005_056.pdf + year: 2005 -- ENTRYTYPE: article - ID: nime2023_76 - abstract: 'This paper is an exploration and creative inquiry into the equilibrium - of audio-visual feedback. Following a Research-Through Design approach, we actualized - this inquiry by designing an ad-hoc audio-visual instrument: TAILSPIN. In this - instrument, a closed audio-visual and physical loop is created between a microphone - and its speaker, and a camera and its display, which are controlled by a performer. - The tenets of feedback are then understood through the contextual research of - cycles and loops in our natural environment. In this paper, we present the technical - details of the instrument and offer novel insights into the audio-visual equilibrium - within the context and intricacies of our own natural environment and organic - feedback systems.' - address: 'Mexico City, Mexico' - articleno: 76 - author: Costa K Colachis Glass and Fabio Morreale - bibtex: "@article{nime2023_76,\n abstract = {This paper is an exploration and creative\ - \ inquiry into the equilibrium of audio-visual feedback. Following a Research-Through\ - \ Design approach, we actualized this inquiry by designing an ad-hoc audio-visual\ - \ instrument: TAILSPIN. In this instrument, a closed audio-visual and physical\ - \ loop is created between a microphone and its speaker, and a camera and its display,\ - \ which are controlled by a performer. The tenets of feedback are then understood\ - \ through the contextual research of cycles and loops in our natural environment.\ - \ In this paper, we present the technical details of the instrument and offer\ - \ novel insights into the audio-visual equilibrium within the context and intricacies\ - \ of our own natural environment and organic feedback systems.},\n address = {Mexico\ - \ City, Mexico},\n articleno = {76},\n author = {Costa K Colachis Glass and Fabio\ - \ Morreale},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n\ - \ issn = {2220-4806},\n month = {May},\n numpages = {7},\n pages = {542--548},\n\ - \ title = {TAILSPIN: AN INQUIRY INTO THE EQUILIBRIUM OF AUDIO-VISUAL FEEDBACK},\n\ - \ track = {Work in Progress},\n url = {http://nime.org/proceedings/2023/nime2023_76.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: Oore2005 + abstract: 'When learning a classical instrument, people often either take lessons + in which an existing body of “technique” is de- livered, evolved over generations + of performers, or in some cases people will “teach themselves” by watching people + play and listening to existing recordings. What does one do with a complex new + digital instrument? In this paper I address this question drawing on my expe- + rience in learning several very different types of sophisticated instruments: + the Glove Talk II real-time gesture-to-speech interface, the Digital Marionette + controller for virtual 3D puppets, and pianos and keyboards. As the primary user + of the first two systems, I have spent hundreds of hours with Digital Marionette + and Glove-Talk II, and thousands of hours with pianos and keyboards (I continue + to work as a professional musician). I will identify some of the under- lying + principles and approaches that I have observed during my learning and playing + experience common to these instru- ments. While typical accounts of users learning + new inter- faces generally focus on reporting beginner’s experiences, for various + practical reasons, this is fundamentally different by focusing on the expert’s + learning experience.' + address: 'Vancouver, BC, Canada' + author: 'Oore, Sageev' + bibtex: "@inproceedings{Oore2005,\n abstract = {When learning a classical instrument,\ + \ people often either take lessons in which an existing body of “technique” is\ + \ de- livered, evolved over generations of performers, or in some cases people\ + \ will “teach themselves” by watching people play and listening to existing recordings.\ + \ What does one do with a complex new digital instrument? In this paper I address\ + \ this question drawing on my expe- rience in learning several very different\ + \ types of sophisticated instruments: the Glove Talk II real-time gesture-to-speech\ + \ interface, the Digital Marionette controller for virtual 3D puppets, and pianos\ + \ and keyboards. As the primary user of the first two systems, I have spent hundreds\ + \ of hours with Digital Marionette and Glove-Talk II, and thousands of hours with\ + \ pianos and keyboards (I continue to work as a professional musician). I will\ + \ identify some of the under- lying principles and approaches that I have observed\ + \ during my learning and playing experience common to these instru- ments. While\ + \ typical accounts of users learning new inter- faces generally focus on reporting\ + \ beginner’s experiences, for various practical reasons, this is fundamentally\ + \ different by focusing on the expert’s learning experience.},\n address = {Vancouver,\ + \ BC, Canada},\n author = {Oore, Sageev},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176794},\n\ + \ issn = {2220-4806},\n keywords = {performance, learning new instruments },\n\ + \ pages = {60--64},\n title = {Learning Advanced Skills on New Instruments},\n\ + \ url = {http://www.nime.org/proceedings/2005/nime2005_060.pdf},\n year = {2005}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1176794 issn: 2220-4806 - month: May - numpages: 7 - pages: 542--548 - title: 'TAILSPIN: AN INQUIRY INTO THE EQUILIBRIUM OF AUDIO-VISUAL FEEDBACK' - track: Work in Progress - url: http://nime.org/proceedings/2023/nime2023_76.pdf - year: 2023 + keywords: 'performance, learning new instruments ' + pages: 60--64 + title: Learning Advanced Skills on New Instruments + url: http://www.nime.org/proceedings/2005/nime2005_060.pdf + year: 2005 -- ENTRYTYPE: article - ID: nime2023_77 - abstract: "In this paper, we present the Hapstrument: a bimanual haptic interface\ - \ for musical expression. This DMI uses two low-cost 2-DoF haptic force-feedback\ - \ devices, one for each hand. The left device controls pitch selection, while\ - \ the right device controls excitation by simulating the feeling of bowing or\ - \ plucking a string. A user study was run to evaluate the effectiveness of the\ - \ Hapstrument. This evaluation\nreceived a wide range of reviews, from excellent\ - \ to poor. Ultimately, the musical backgrounds of the participants greatly impacted\ - \ their experiences with the Hapstrument. For participants whose expectations\ - \ aligned with what the instrument could provide, it was an effective DMI that\ - \ uses force feedback to enhance musical expression." - address: 'Mexico City, Mexico' - articleno: 77 - author: Jonathan Lane-Smith and Derrek Chow and Sahand Ajami and Jeremy Cooperstock - bibtex: "@article{nime2023_77,\n abstract = {In this paper, we present the Hapstrument:\ - \ a bimanual haptic interface for musical expression. This DMI uses two low-cost\ - \ 2-DoF haptic force-feedback devices, one for each hand. The left device controls\ - \ pitch selection, while the right device controls excitation by simulating the\ - \ feeling of bowing or plucking a string. A user study was run to evaluate the\ - \ effectiveness of the Hapstrument. This evaluation\nreceived a wide range of\ - \ reviews, from excellent to poor. Ultimately, the musical backgrounds of the\ - \ participants greatly impacted their experiences with the Hapstrument. For participants\ - \ whose expectations aligned with what the instrument could provide, it was an\ - \ effective DMI that uses force feedback to enhance musical expression.},\n address\ - \ = {Mexico City, Mexico},\n articleno = {77},\n author = {Jonathan Lane-Smith\ - \ and Derrek Chow and Sahand Ajami and Jeremy Cooperstock},\n booktitle = {Proceedings\ +- ENTRYTYPE: inproceedings + ID: Livingstone2005 + abstract: 'Haptic and Gestural interfaces offer new and novel ways of interacting + with and creating new musical forms. Increasingly it is the integration of these + interfaces with more complex adaptive systems or dynamically variable social contexts + that provide significant opportunities for socially mediated composition through + conscious and subconscious interaction. This paper includes a brief comparative + survey of related works and articulates the design process and interaction modes + or ‘play states’ for the Orb3 interface – 3 wireless mobile globes that collect + and share environmental data and user interactions to synthesize and diffuse sound + material in real time, a ‘social’ group of composer and listener objects. The + physical interfaces are integrated into a portable 8 channel auditory sphere for + collaborative interaction but can also be integrated with large-scale social environments, + such as atria and other public spaces with embedded sound systems.' + address: 'Vancouver, BC, Canada' + author: 'Livingstone, Dan and Miranda, Eduardo' + bibtex: "@inproceedings{Livingstone2005,\n abstract = {Haptic and Gestural interfaces\ + \ offer new and novel ways of interacting with and creating new musical forms.\ + \ Increasingly it is the integration of these interfaces with more complex adaptive\ + \ systems or dynamically variable social contexts that provide significant opportunities\ + \ for socially mediated composition through conscious and subconscious interaction.\ + \ This paper includes a brief comparative survey of related works and articulates\ + \ the design process and interaction modes or ‘play states’ for the Orb3 interface\ + \ – 3 wireless mobile globes that collect and share environmental data and user\ + \ interactions to synthesize and diffuse sound material in real time, a ‘social’\ + \ group of composer and listener objects. The physical interfaces are integrated\ + \ into a portable 8 channel auditory sphere for collaborative interaction but\ + \ can also be integrated with large-scale social environments, such as atria and\ + \ other public spaces with embedded sound systems.},\n address = {Vancouver, BC,\ + \ Canada},\n author = {Livingstone, Dan and Miranda, Eduardo},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month\ - \ = {May},\n numpages = {5},\n pages = {549--553},\n title = {The Hapstrument:\ - \ A Bimanual Haptic Interface for Musical Expression},\n track = {Work in Progress},\n\ - \ url = {http://nime.org/proceedings/2023/nime2023_77.pdf},\n year = {2023}\n\ - }\n" + \ doi = {10.5281/zenodo.1176774},\n issn = {2220-4806},\n keywords = {Adaptive\ + \ System, Sound Installation, Smart Interfaces, Music Robots, Spatial Music, Conscious\ + \ Subconscious Interaction.},\n pages = {65--69},\n title = {Orb3 -- Adaptive\ + \ Interface Design for Real time Sound Synthesis \\& Diffusion within Socially\ + \ Mediated Spaces},\n url = {http://www.nime.org/proceedings/2005/nime2005_065.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1176774 issn: 2220-4806 - month: May - numpages: 5 - pages: 549--553 - title: 'The Hapstrument: A Bimanual Haptic Interface for Musical Expression' - track: Work in Progress - url: http://nime.org/proceedings/2023/nime2023_77.pdf - year: 2023 + keywords: 'Adaptive System, Sound Installation, Smart Interfaces, Music Robots, + Spatial Music, Conscious Subconscious Interaction.' + pages: 65--69 + title: Orb3 -- Adaptive Interface Design for Real time Sound Synthesis & Diffusion + within Socially Mediated Spaces + url: http://www.nime.org/proceedings/2005/nime2005_065.pdf + year: 2005 -- ENTRYTYPE: article - ID: nime2023_78 - abstract: "The recent advancements in digital fabrication has led to a wider access\ - \ to prototyping in all sorts of fields. Beginning of the last decade was marked\ - \ by the word “revolution” in relation to “maker’s culture” at least in some publications.\ - \ This has influenced the sphere of physical computing in arts and NIME sphere\ - \ as well. As currently there are more and more possibilities to create new instruments,\ - \ we think that it can be useful to think of approaches to conceptualize these\ - \ creations. This paper is an attempt to suggest methodology for NIME prototyping,\ - \ based on evolutionary metaphor.\nFirst we observe the application of evolutionary\ - \ concepts to the field of music technology, briefly discussing its appearance\ - \ in related publications. We then assemble our own operational concept, which\ - \ can be used for the direct prototyping of interfaces. Mainly by introducing\ - \ metaphorical “DNA”, inside which the “gene” of “interactive kinematic concept”\ - \ is of a particular interest, and also by applying the now obsolete but useful\ - \ “Meckel–Serres recapitulation hypothesis” (embryological parallelism) as a model\ - \ for rapid prototyping. \nUnderstanding the speculative nature of such an approach\ - \ we do not offer it as a scientific basis for classification, research or prediction,\ - \ but as a workable concept for development, which can lead to valuable results.\ - \ \nIn the end we describe two case studies of NIMEs, which were prototyped in\ - \ the discussed fashion, showing illustrations and reflecting on the practicalities." - address: 'Mexico City, Mexico' - articleno: 78 - author: sergey k kasich - bibtex: "@article{nime2023_78,\n abstract = {The recent advancements in digital\ - \ fabrication has led to a wider access to prototyping in all sorts of fields.\ - \ Beginning of the last decade was marked by the word “revolution” in relation\ - \ to “maker’s culture” at least in some publications. This has influenced the\ - \ sphere of physical computing in arts and NIME sphere as well. As currently there\ - \ are more and more possibilities to create new instruments, we think that it\ - \ can be useful to think of approaches to conceptualize these creations. This\ - \ paper is an attempt to suggest methodology for NIME prototyping, based on evolutionary\ - \ metaphor.\nFirst we observe the application of evolutionary concepts to the\ - \ field of music technology, briefly discussing its appearance in related publications.\ - \ We then assemble our own operational concept, which can be used for the direct\ - \ prototyping of interfaces. Mainly by introducing metaphorical “DNA”, inside\ - \ which the “gene” of “interactive kinematic concept” is of a particular interest,\ - \ and also by applying the now obsolete but useful “Meckel–Serres recapitulation\ - \ hypothesis” (embryological parallelism) as a model for rapid prototyping. \n\ - Understanding the speculative nature of such an approach we do not offer it as\ - \ a scientific basis for classification, research or prediction, but as a workable\ - \ concept for development, which can lead to valuable results. \nIn the end we\ - \ describe two case studies of NIMEs, which were prototyped in the discussed fashion,\ - \ showing illustrations and reflecting on the practicalities.},\n address = {Mexico\ - \ City, Mexico},\n articleno = {78},\n author = {sergey k kasich},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n\ - \ month = {May},\n numpages = {6},\n pages = {554--559},\n title = {Morphological\ - \ evolution of musical interface: design case studies},\n track = {Work in Progress},\n\ - \ url = {http://nime.org/proceedings/2023/nime2023_78.pdf},\n year = {2023}\n\ - }\n" +- ENTRYTYPE: inproceedings + ID: Essl2005 + abstract: 'The Scrubber is a general controller for friction-induced sound. Allowing + the user to engage in familiar gestures and feel- ing actual friction, the synthesized + sound gains an evocative nature for the performer and a meaningful relationship + between gesture and sound for the audience. It can control a variety of sound + synthesis algorithms of which we demonstrate examples based on granular synthesis, + wave-table synthesis and physically informed modeling.' + address: 'Vancouver, BC, Canada' + author: 'Essl, Georg and O''Modhrain, Sile' + bibtex: "@inproceedings{Essl2005,\n abstract = {The Scrubber is a general controller\ + \ for friction-induced sound. Allowing the user to engage in familiar gestures\ + \ and feel- ing actual friction, the synthesized sound gains an evocative nature\ + \ for the performer and a meaningful relationship between gesture and sound for\ + \ the audience. It can control a variety of sound synthesis algorithms of which\ + \ we demonstrate examples based on granular synthesis, wave-table synthesis and\ + \ physically informed modeling.},\n address = {Vancouver, BC, Canada},\n author\ + \ = {Essl, Georg and O'Modhrain, Sile},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176737},\n\ + \ issn = {2220-4806},\n pages = {70--75},\n title = {Scrubber: An Interface for\ + \ Friction-induced Sounds},\n url = {http://www.nime.org/proceedings/2005/nime2005_070.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1176737 issn: 2220-4806 - month: May - numpages: 6 - pages: 554--559 - title: 'Morphological evolution of musical interface: design case studies' - track: Work in Progress - url: http://nime.org/proceedings/2023/nime2023_78.pdf - year: 2023 + pages: 70--75 + title: 'Scrubber: An Interface for Friction-induced Sounds' + url: http://www.nime.org/proceedings/2005/nime2005_070.pdf + year: 2005 -- ENTRYTYPE: article - ID: nime2023_79 - abstract: "Just as the way a performer is moved differs even among audiences who\ - \ have the same impression of the performance, the sensations and experiences\ - \ felt by the performers themselves and the audiences' experiences also differ.\ - \ The purpose of this research is to create a new listening experience by analyzing\ - \ and extracting the performer's introspection of rests, groove, and rhythm, and\ - \ physically presenting it to the audience. Although these elements are important\ - \ in shaping music, they are not always directly expressed as auditory sounds.\n\ - Our hypothesis is that this introspection, such as a sense of rhythm and groove,\ - \ is latent and observable in physiological states such as breathing and heartbeat.\ - \ By sensing and presenting them to the audience, music appreciation that includes\ - \ introspection could become possible. In other words, by sensing and presenting\ - \ introspection to the audience, the music listening experience itself can be\ - \ redesigned to include a physicality that is closer to the performer's experience\ - \ of the music, rather than being passive in an auditory sense. In this study,\ - \ preliminary experiments were conducted on the extraction of the performer's\ - \ introspection, and a device was designed to present it to the audience." - address: 'Mexico City, Mexico' - articleno: 79 - author: Aoi Uyama and Danny Hynds and Dingding Zheng and George Chernyshov and Tatsuya - Saito and Kai Kunze and Kouta Minamizawa - bibtex: "@article{nime2023_79,\n abstract = {Just as the way a performer is moved\ - \ differs even among audiences who have the same impression of the performance,\ - \ the sensations and experiences felt by the performers themselves and the audiences'\ - \ experiences also differ. The purpose of this research is to create a new listening\ - \ experience by analyzing and extracting the performer's introspection of rests,\ - \ groove, and rhythm, and physically presenting it to the audience. Although these\ - \ elements are important in shaping music, they are not always directly expressed\ - \ as auditory sounds.\nOur hypothesis is that this introspection, such as a sense\ - \ of rhythm and groove, is latent and observable in physiological states such\ - \ as breathing and heartbeat. By sensing and presenting them to the audience,\ - \ music appreciation that includes introspection could become possible. In other\ - \ words, by sensing and presenting introspection to the audience, the music listening\ - \ experience itself can be redesigned to include a physicality that is closer\ - \ to the performer's experience of the music, rather than being passive in an\ - \ auditory sense. In this study, preliminary experiments were conducted on the\ - \ extraction of the performer's introspection, and a device was designed to present\ - \ it to the audience.},\n address = {Mexico City, Mexico},\n articleno = {79},\n\ - \ author = {Aoi Uyama and Danny Hynds and Dingding Zheng and George Chernyshov\ - \ and Tatsuya Saito and Kai Kunze and Kouta Minamizawa},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month\ - \ = {May},\n numpages = {6},\n pages = {560--565},\n title = {Feel What You Don't\ - \ Hear: A New Framework for Non-aural Music Experiences},\n track = {Work in Progress},\n\ - \ url = {http://nime.org/proceedings/2023/nime2023_79.pdf},\n year = {2023}\n\ - }\n" +- ENTRYTYPE: inproceedings + ID: Topper2005 + abstract: 'WISEAR (Wireless Sensor Array) is a Linux based Embeddedx86 TS-5600 SBC + (Single Board Computer) specifically configured for use with music, dance and + video performance technologies. The device offers a general purpose solution to + many sensor and gestural controller problems. Much like the general purpose CPU, + which resolved many issues of its predecessor (ie., the special purpose DSP chip), + the WISEAR box attempts to move beyond custom made BASIC stamp projects that are + often created on a per-performance basis and rely heavily on MIDI. WISEAR is both + lightweight and wireless. Unlike several commercial alternatives, it is also a + completely open source project. PAIR (Partnering Analysis in Real Time) exploits + the power of WISEAR and revisits the potential of hardware-based systems for real-time + measurement of bodily movement. Our goal was to create a robust yet adaptable + system that could attend to both general and precise aspects of performer interaction. + Though certain commonalities with existing hardware systems exist, our PAIR system + takes a fundamentally different approach by focusing specifically on the interaction + of two or more dancers.' + address: 'Vancouver, BC, Canada' + author: 'Topper, David and Swendsen, Peter V.' + bibtex: "@inproceedings{Topper2005,\n abstract = {WISEAR (Wireless Sensor Array)\ + \ is a Linux based Embeddedx86 TS-5600 SBC (Single Board Computer) specifically\ + \ configured for use with music, dance and video performance technologies. The\ + \ device offers a general purpose solution to many sensor and gestural controller\ + \ problems. Much like the general purpose CPU, which resolved many issues of its\ + \ predecessor (ie., the special purpose DSP chip), the WISEAR box attempts to\ + \ move beyond custom made BASIC stamp projects that are often created on a per-performance\ + \ basis and rely heavily on MIDI. WISEAR is both lightweight and wireless. Unlike\ + \ several commercial alternatives, it is also a completely open source project.\ + \ PAIR (Partnering Analysis in Real Time) exploits the power of WISEAR and revisits\ + \ the potential of hardware-based systems for real-time measurement of bodily\ + \ movement. Our goal was to create a robust yet adaptable system that could attend\ + \ to both general and precise aspects of performer interaction. Though certain\ + \ commonalities with existing hardware systems exist, our PAIR system takes a\ + \ fundamentally different approach by focusing specifically on the interaction\ + \ of two or more dancers.},\n address = {Vancouver, BC, Canada},\n author = {Topper,\ + \ David and Swendsen, Peter V.},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176830},\n\ + \ issn = {2220-4806},\n pages = {76--79},\n title = {Wireless Dance Control :\ + \ PAIR and WISEAR},\n url = {http://www.nime.org/proceedings/2005/nime2005_076.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1176830 issn: 2220-4806 - month: May - numpages: 6 - pages: 560--565 - title: 'Feel What You Don''t Hear: A New Framework for Non-aural Music Experiences' - track: Work in Progress - url: http://nime.org/proceedings/2023/nime2023_79.pdf - year: 2023 + pages: 76--79 + title: 'Wireless Dance Control : PAIR and WISEAR' + url: http://www.nime.org/proceedings/2005/nime2005_076.pdf + year: 2005 -- ENTRYTYPE: article - ID: nime2023_80 - abstract: "We present an update on the EAVI physiological interface, a wireless,\ - \ microcontroller based hardware design for the acquisition of bioelectrical signals.\ - \ The system has been updated to process electroencephalogram brain signals in\ - \ addition to muscle electromyogram. The hardware/firmware system interfaces with\ - \ host software carrying out feature extraction and signal processing.\nRecent\ - \ advances in electronics have made physiological computing applications practical\ - \ and feasible. However, there is a gap between high end biomedical equipment\ - \ and consumer DIY solutions. The hardware design we present here bridges this\ - \ gap, and combines a specialized biosignal acquisition chip mated with a general-purpose\ - \ microcontroller. It is based on the Texas Instruments ADS129x family a single\ - \ chip integrated solution for high quality biosignal amplification and digitization.\ - \ It serves as analogue front end via programmable gain amplifiers to a 24bit\ - \ delta-sigma analog-digital converter. The microcontroller is the STMicroelectronics\ - \ STM32F427, a Cortex-M4 family microcontroller with floating point unit . In\ - \ addition to EMG acquisition, the board includes a Kionix KX122 three-axis accelerometer\ - \ . The TI and Kionix sensing chipts communicate with the ST microcontroller over\ - \ an I2C digital serial bus. The board communicates with the host computer or\ - \ rest of the music system wirelessly over Bluetooth LE 4.2 using an ST SPBTLE-1S\ - \ transceiver. The board can also communicate over USB where it registers with\ - \ the host as a class compliant audio and MIDI device. Audio and physiological\ - \ signals are treated in the same signal processing chain using the OWL framework.\n\ - The demo will show multichannel EMG, and single channel EEG. We call this hybridization\ - \ “ExG”. We will present documentation of the EAVI board used in the lab and on\ - \ stage, in user studies with neuro-diverse musicians and trained instrumentalists,\ - \ as well as in performance with the experimental all-female band, Chicks on Speed." - address: 'Mexico City, Mexico' - articleno: 80 - author: Atau Tanaka - bibtex: "@article{nime2023_80,\n abstract = {We present an update on the EAVI physiological\ - \ interface, a wireless, microcontroller based hardware design for the acquisition\ - \ of bioelectrical signals. The system has been updated to process electroencephalogram\ - \ brain signals in addition to muscle electromyogram. The hardware/firmware system\ - \ interfaces with host software carrying out feature extraction and signal processing.\n\ - Recent advances in electronics have made physiological computing applications\ - \ practical and feasible. However, there is a gap between high end biomedical\ - \ equipment and consumer DIY solutions. The hardware design we present here bridges\ - \ this gap, and combines a specialized biosignal acquisition chip mated with a\ - \ general-purpose microcontroller. It is based on the Texas Instruments ADS129x\ - \ family a single chip integrated solution for high quality biosignal amplification\ - \ and digitization. It serves as analogue front end via programmable gain amplifiers\ - \ to a 24bit delta-sigma analog-digital converter. The microcontroller is the\ - \ STMicroelectronics STM32F427, a Cortex-M4 family microcontroller with floating\ - \ point unit . In addition to EMG acquisition, the board includes a Kionix KX122\ - \ three-axis accelerometer . The TI and Kionix sensing chipts communicate with\ - \ the ST microcontroller over an I2C digital serial bus. The board communicates\ - \ with the host computer or rest of the music system wirelessly over Bluetooth\ - \ LE 4.2 using an ST SPBTLE-1S transceiver. The board can also communicate over\ - \ USB where it registers with the host as a class compliant audio and MIDI device.\ - \ Audio and physiological signals are treated in the same signal processing chain\ - \ using the OWL framework.\nThe demo will show multichannel EMG, and single channel\ - \ EEG. We call this hybridization “ExG”. We will present documentation of the\ - \ EAVI board used in the lab and on stage, in user studies with neuro-diverse\ - \ musicians and trained instrumentalists, as well as in performance with the experimental\ - \ all-female band, Chicks on Speed.},\n address = {Mexico City, Mexico},\n articleno\ - \ = {80},\n author = {Atau Tanaka},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz\ - \ and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages\ - \ = {3},\n pages = {566--568},\n title = {The EAVI EMG/EEG Board: Hybrid physiological\ - \ sensing},\n track = {Demos},\n url = {http://nime.org/proceedings/2023/nime2023_80.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: Dannenberg2005 + abstract: 'McBlare is a robotic bagpipe player developed by the Robotics Institute + at Carnegie Mellon University. McBlare plays a standard set of bagpipes, using + a custom air compressor to supply air and electromechanical ``fingers'''' to control + the chanter. McBlare is MIDI controlled, allowing for simple interfacing to a + keyboard, computer, or hardware sequencer. The control mechanism exceeds the measured + speed of expert human performers. On the other hand, human performers surpass + McBlare in their ability to compensate for limitations and imperfections in reeds, + and we discuss future enhancements to address these problems. McBlare has been + used to perform traditional bagpipe music as well as experimental computer generated + music. ' + address: 'Vancouver, BC, Canada' + author: 'Dannenberg, Roger B. and Brown, Ben and Zeglin, Garth and Lupish, Ron' + bibtex: "@inproceedings{Dannenberg2005,\n abstract = {McBlare is a robotic bagpipe\ + \ player developed by the Robotics Institute at Carnegie Mellon University. McBlare\ + \ plays a standard set of bagpipes, using a custom air compressor to supply air\ + \ and electromechanical ``fingers'' to control the chanter. McBlare is MIDI controlled,\ + \ allowing for simple interfacing to a keyboard, computer, or hardware sequencer.\ + \ The control mechanism exceeds the measured speed of expert human performers.\ + \ On the other hand, human performers surpass McBlare in their ability to compensate\ + \ for limitations and imperfections in reeds, and we discuss future enhancements\ + \ to address these problems. McBlare has been used to perform traditional bagpipe\ + \ music as well as experimental computer generated music. },\n address = {Vancouver,\ + \ BC, Canada},\n author = {Dannenberg, Roger B. and Brown, Ben and Zeglin, Garth\ + \ and Lupish, Ron},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176729},\n\ + \ issn = {2220-4806},\n keywords = {bagpipes, robot, music, instrument, MIDI },\n\ + \ pages = {80--84},\n title = {McBlare: A Robotic Bagpipe Player},\n url = {http://www.nime.org/proceedings/2005/nime2005_080.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1176729 issn: 2220-4806 - month: May - numpages: 3 - pages: 566--568 - title: 'The EAVI EMG/EEG Board: Hybrid physiological sensing' - track: Demos - url: http://nime.org/proceedings/2023/nime2023_80.pdf - year: 2023 + keywords: 'bagpipes, robot, music, instrument, MIDI ' + pages: 80--84 + title: 'McBlare: A Robotic Bagpipe Player' + url: http://www.nime.org/proceedings/2005/nime2005_080.pdf + year: 2005 -- ENTRYTYPE: article - ID: nime2023_81 - abstract: 'In this work, we propose a method for the controllable synthesis of real-time - contact sounds using neural resonators. Previous works have used physically inspired - statistical methods and physical modelling for object materials and excitation - signals. Our method incorporates differentiable second-order resonators and estimates - their coefficients using a neural network that is conditioned on physical parameters. - This allows for interactive dynamic control and the generation of novel sounds - in an intuitive manner. We demonstrate the practical implementation of our method - and explore its potential creative applications.' - address: 'Mexico City, Mexico' - articleno: 81 - author: Rodrigo Diaz and Charalampos Saitis and Mark B Sandler - bibtex: "@article{nime2023_81,\n abstract = {In this work, we propose a method for\ - \ the controllable synthesis of real-time contact sounds using neural resonators.\ - \ Previous works have used physically inspired statistical methods and physical\ - \ modelling for object materials and excitation signals. Our method incorporates\ - \ differentiable second-order resonators and estimates their coefficients using\ - \ a neural network that is conditioned on physical parameters. This allows for\ - \ interactive dynamic control and the generation of novel sounds in an intuitive\ - \ manner. We demonstrate the practical implementation of our method and explore\ - \ its potential creative applications.},\n address = {Mexico City, Mexico},\n\ - \ articleno = {81},\n author = {Rodrigo Diaz and Charalampos Saitis and Mark B\ - \ Sandler},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n\ - \ issn = {2220-4806},\n month = {May},\n numpages = {5},\n pages = {569--573},\n\ - \ title = {Interactive Neural Resonators},\n track = {Work in Progress},\n url\ - \ = {http://nime.org/proceedings/2023/nime2023_81.pdf},\n year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: Bevilacqua2005 + abstract: 'In this report, we describe our development on the Max/MSPtoolbox MnM + dedicated to mapping between gesture andsound, and more generally to statistical + and machine learningmethods. This library is built on top of the FTM library, + whichenables the efficient use of matrices and other data structuresin Max/MSP. + Mapping examples are described based onvarious matrix manipulations such as Single + ValueDecomposition. The FTM and MnM libraries are freelyavailable.' + address: 'Vancouver, BC, Canada' + author: 'Bevilacqua, Frédéric and Müller, Rémy and Schnell, Norbert' + bibtex: "@inproceedings{Bevilacqua2005,\n abstract = {In this report, we describe\ + \ our development on the Max/MSPtoolbox MnM dedicated to mapping between gesture\ + \ andsound, and more generally to statistical and machine learningmethods. This\ + \ library is built on top of the FTM library, whichenables the efficient use of\ + \ matrices and other data structuresin Max/MSP. Mapping examples are described\ + \ based onvarious matrix manipulations such as Single ValueDecomposition. The\ + \ FTM and MnM libraries are freelyavailable.},\n address = {Vancouver, BC, Canada},\n\ + \ author = {Bevilacqua, Fr\\'{e}d\\'{e}ric and M\\''{u}ller, R\\'{e}my and Schnell,\ + \ Norbert},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176703},\n issn\ + \ = {2220-4806},\n keywords = {Mapping, interface design, matrix, Max/MSP. },\n\ + \ pages = {85--88},\n title = {MnM: a Max/MSP mapping toolbox},\n url = {http://www.nime.org/proceedings/2005/nime2005_085.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1176703 issn: 2220-4806 - month: May - numpages: 5 - pages: 569--573 - title: Interactive Neural Resonators - track: Work in Progress - url: http://nime.org/proceedings/2023/nime2023_81.pdf - year: 2023 + keywords: 'Mapping, interface design, matrix, Max/MSP. ' + pages: 85--88 + title: 'MnM: a Max/MSP mapping toolbox' + url: http://www.nime.org/proceedings/2005/nime2005_085.pdf + year: 2005 -- ENTRYTYPE: article - ID: nime2023_82 - abstract: "In this paper we present the Sabotaging Piano, a prepared electronic\ - \ piano that alters key-to-pitch correspondence by reassigning adjacent pitches\ - \ (i.e. one semi-tone higher or lower) to each key. Performers can control how\ - \ many keys to remap through an expression pedal. If the pedal is not pressed\ - \ the Sabotaging Piano works as a normal piano. When fully pressed, each key is\ - \ remapped one semi-tone up or down with equal probability. Each new performance\ - \ (i.e. when the piano is turned on) triggers a new and unknown remapping pattern,\ - \ but the specific pattern remains fixed throughout the whole performance. This\ - \ aims to provide a balance of uncertain but still explorable and learnable behaviour.\ - \ \nWe invited three professional piano improvisers to rehearse with our piano\ - \ in order to prepare a final improvisation concert. We aimed to explore how much\ - \ can be rehearsed or prepared with a piano that will behave somewhat differently\ - \ for each new performance. We asked pianists to document their rehearsal processes\ - \ to witness the appearing of strategies or techniques with the Sabotaging Piano.\ - \ \nThrough analysis of the rehearsals reports and the MIDI data collected in\ - \ the final concert, here we show that the three pianists not only developed different\ - \ techniques with the Sabotaging Piano, but they also leveraged the particularities\ - \ of it to use them as creative resources." - address: 'Mexico City, Mexico' - articleno: 82 - author: Teodoro Dannemann and Nick Bryan-Kinns - bibtex: "@article{nime2023_82,\n abstract = {In this paper we present the Sabotaging\ - \ Piano, a prepared electronic piano that alters key-to-pitch correspondence by\ - \ reassigning adjacent pitches (i.e. one semi-tone higher or lower) to each key.\ - \ Performers can control how many keys to remap through an expression pedal. If\ - \ the pedal is not pressed the Sabotaging Piano works as a normal piano. When\ - \ fully pressed, each key is remapped one semi-tone up or down with equal probability.\ - \ Each new performance (i.e. when the piano is turned on) triggers a new and unknown\ - \ remapping pattern, but the specific pattern remains fixed throughout the whole\ - \ performance. This aims to provide a balance of uncertain but still explorable\ - \ and learnable behaviour. \nWe invited three professional piano improvisers to\ - \ rehearse with our piano in order to prepare a final improvisation concert. We\ - \ aimed to explore how much can be rehearsed or prepared with a piano that will\ - \ behave somewhat differently for each new performance. We asked pianists to document\ - \ their rehearsal processes to witness the appearing of strategies or techniques\ - \ with the Sabotaging Piano. \nThrough analysis of the rehearsals reports and\ - \ the MIDI data collected in the final concert, here we show that the three pianists\ - \ not only developed different techniques with the Sabotaging Piano, but they\ - \ also leveraged the particularities of it to use them as creative resources.},\n\ - \ address = {Mexico City, Mexico},\n articleno = {82},\n author = {Teodoro Dannemann\ - \ and Nick Bryan-Kinns},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan\ - \ Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages = {5},\n\ - \ pages = {574--578},\n title = {The Sabotaging Piano: key-to-pitch remapping\ - \ as a source of new techniques in piano improvisation},\n track = {Work in Progress},\n\ - \ url = {http://nime.org/proceedings/2023/nime2023_82.pdf},\n year = {2023}\n\ - }\n" +- ENTRYTYPE: inproceedings + ID: Pelletier2005 + abstract: 'This paper describes DspMap, a graphical user interface (GUI)designed + to assist the dynamic routing of signal generators andmodifiers currently being + developed at the International Academy of Media Arts & Sciences. Instead of relying + on traditional boxand-line approaches, DspMap proposes a design paradigm whereconnections + are determined by the relative positions of the variouselements in a single virtual + space.' + address: 'Vancouver, BC, Canada' + author: 'Pelletier, Jean-Marc' + bibtex: "@inproceedings{Pelletier2005,\n abstract = {This paper describes DspMap,\ + \ a graphical user interface (GUI)designed to assist the dynamic routing of signal\ + \ generators andmodifiers currently being developed at the International Academy\ + \ of Media Arts \\& Sciences. Instead of relying on traditional boxand-line approaches,\ + \ DspMap proposes a design paradigm whereconnections are determined by the relative\ + \ positions of the variouselements in a single virtual space.},\n address = {Vancouver,\ + \ BC, Canada},\n author = {Pelletier, Jean-Marc},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1176800},\n issn = {2220-4806},\n keywords = {Graphical user\ + \ interface, real-time performance, map, dynamic routing },\n pages = {89--92},\n\ + \ title = {A Graphical Interface for Real-Time Signal Routing},\n url = {http://www.nime.org/proceedings/2005/nime2005_089.pdf},\n\ + \ year = {2005}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1176800 + issn: 2220-4806 + keywords: 'Graphical user interface, real-time performance, map, dynamic routing ' + pages: 89--92 + title: A Graphical Interface for Real-Time Signal Routing + url: http://www.nime.org/proceedings/2005/nime2005_089.pdf + year: 2005 + + +- ENTRYTYPE: inproceedings + ID: Scavone2005 + abstract: 'The breath pressure signal applied to wind music instruments is generally + considered to be a slowly varying function of time. In a context of music control, + this assumptionimplies that a relatively low digital sample rate (100-200Hz) is + sufficient to capture and/or reproduce this signal.We tested this assumption by + evaluating the frequency content in breath pressure, particularly during the use + of extended performance techniques such as growling, humming,and flutter tonguing. + Our results indicate frequency contentin a breath pressure signal up to about + 10 kHz, with especially significant energy within the first 1000 Hz. We furtherinvestigated + the frequency response of several commerciallyavailable pressure sensors to assess + their responsiveness tohigher frequency breath signals. Though results were mixed,some + devices were found capable of sensing frequencies upto at least 1.5 kHz. Finally, + similar measurements were conducted with Yamaha WX11 and WX5 wind controllers + andresults suggest that their breath pressure outputs are sampled at about 320 + Hz and 280 Hz, respectively.' + address: 'Vancouver, BC, Canada' + author: 'Scavone, Gary and Silva, Andrey R.' + bibtex: "@inproceedings{Scavone2005,\n abstract = {The breath pressure signal applied\ + \ to wind music instruments is generally considered to be a slowly varying function\ + \ of time. In a context of music control, this assumptionimplies that a relatively\ + \ low digital sample rate (100-200Hz) is sufficient to capture and/or reproduce\ + \ this signal.We tested this assumption by evaluating the frequency content in\ + \ breath pressure, particularly during the use of extended performance techniques\ + \ such as growling, humming,and flutter tonguing. Our results indicate frequency\ + \ contentin a breath pressure signal up to about 10 kHz, with especially significant\ + \ energy within the first 1000 Hz. We furtherinvestigated the frequency response\ + \ of several commerciallyavailable pressure sensors to assess their responsiveness\ + \ tohigher frequency breath signals. Though results were mixed,some devices were\ + \ found capable of sensing frequencies upto at least 1.5 kHz. Finally, similar\ + \ measurements were conducted with Yamaha WX11 and WX5 wind controllers andresults\ + \ suggest that their breath pressure outputs are sampled at about 320 Hz and 280\ + \ Hz, respectively.},\n address = {Vancouver, BC, Canada},\n author = {Scavone,\ + \ Gary and Silva, Andrey R.},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176810},\n\ + \ issn = {2220-4806},\n keywords = {Breath Control, Wind Controller, Breath Sensors\ + \ },\n pages = {93--96},\n title = {Frequency Content of Breath Pressure and Implications\ + \ for Use in Control},\n url = {http://www.nime.org/proceedings/2005/nime2005_093.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1176810 issn: 2220-4806 - month: May - numpages: 5 - pages: 574--578 - title: 'The Sabotaging Piano: key-to-pitch remapping as a source of new techniques - in piano improvisation' - track: Work in Progress - url: http://nime.org/proceedings/2023/nime2023_82.pdf - year: 2023 + keywords: 'Breath Control, Wind Controller, Breath Sensors ' + pages: 93--96 + title: Frequency Content of Breath Pressure and Implications for Use in Control + url: http://www.nime.org/proceedings/2005/nime2005_093.pdf + year: 2005 -- ENTRYTYPE: article - ID: nime2023_83 - abstract: 'This paper explores the potential of AI text-to-image diffusion models - (e.g. DALLE-2 and Midjourney) to support the early phase design of new digital - musical instruments in collaboration with Disabled musicians. The paper presents - initial findings from two speculative design workshops attended by Disabled participants - who are affiliated with the London-based inclusive arts organisation Joy of Sound. - The workshops included activities enabling participants to co-create speculative - images of new instruments, drawing on their contributions. These included the - overall appearance of the instrument, constituent materials and other design characteristics. - The paper discusses the generated images and examines how diffusion models can - be a useful tool to support the conceptual co-design phase of bespoke accessible - instruments. The project findings indicate that diffusion models can be useful - as a facilitatory tool for idea generation in the initial stages of bespoke instrument - design.' - address: 'Mexico City, Mexico' - articleno: 83 - author: Hugh Aynsley )* and Tom Mitchell and Dave Meckin ) - bibtex: "@article{nime2023_83,\n abstract = {This paper explores the potential of\ - \ AI text-to-image diffusion models (e.g. DALLE-2 and Midjourney) to support the\ - \ early phase design of new digital musical instruments in collaboration with\ - \ Disabled musicians. The paper presents initial findings from two speculative\ - \ design workshops attended by Disabled participants who are affiliated with the\ - \ London-based inclusive arts organisation Joy of Sound. The workshops included\ - \ activities enabling participants to co-create speculative images of new instruments,\ - \ drawing on their contributions. These included the overall appearance of the\ - \ instrument, constituent materials and other design characteristics. The paper\ - \ discusses the generated images and examines how diffusion models can be a useful\ - \ tool to support the conceptual co-design phase of bespoke accessible instruments.\ - \ The project findings indicate that diffusion models can be useful as a facilitatory\ - \ tool for idea generation in the initial stages of bespoke instrument design.},\n\ - \ address = {Mexico City, Mexico},\n articleno = {83},\n author = {Hugh Aynsley\ - \ )* and Tom Mitchell and Dave Meckin )},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz\ - \ and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages\ - \ = {5},\n pages = {579--583},\n title = {Participatory Conceptual Design of Accessible\ - \ Digital Musical Instruments using Generative AI},\n track = {Work in Progress},\n\ - \ url = {http://nime.org/proceedings/2023/nime2023_83.pdf},\n year = {2023}\n\ +- ENTRYTYPE: inproceedings + ID: Crevoisier2005 + abstract: 'Tangible Acoustic Interfaces (TAI) rely on various acousticsensing technologies, + such as sound source location and acoustic imaging, to detect the position of + contact of users interacting with the surface of solid materials. With their ability + to transform almost any physical objects, flat or curved surfaces and walls into + interactive interfaces, acoustic sensing technologies show a promising way to + bring the sense of touch into the realm of computer interaction. Because music + making has been closely related to this sense during centuries, an application + of particular interest is the use of TAI''s for the design of new musical instruments + that matches the physicality and expressiveness of classical instruments. This + paper gives an overview of the various acoustic-sensing technologies involved + in the realisation of TAI''s and develops on the motivation underlying their use + for the design of new musical instruments. ' + address: 'Vancouver, BC, Canada' + author: 'Crevoisier, Alain and Polotti, Pietro' + bibtex: "@inproceedings{Crevoisier2005,\n abstract = {Tangible Acoustic Interfaces\ + \ (TAI) rely on various acousticsensing technologies, such as sound source location\ + \ and acoustic imaging, to detect the position of contact of users interacting\ + \ with the surface of solid materials. With their ability to transform almost\ + \ any physical objects, flat or curved surfaces and walls into interactive interfaces,\ + \ acoustic sensing technologies show a promising way to bring the sense of touch\ + \ into the realm of computer interaction. Because music making has been closely\ + \ related to this sense during centuries, an application of particular interest\ + \ is the use of TAI's for the design of new musical instruments that matches the\ + \ physicality and expressiveness of classical instruments. This paper gives an\ + \ overview of the various acoustic-sensing technologies involved in the realisation\ + \ of TAI's and develops on the motivation underlying their use for the design\ + \ of new musical instruments. },\n address = {Vancouver, BC, Canada},\n author\ + \ = {Crevoisier, Alain and Polotti, Pietro},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1176727},\n issn = {2220-4806},\n keywords = {Tangible interfaces,\ + \ new musical instruments design. },\n pages = {97--100},\n title = {Tangible\ + \ Acoustic Interfaces and their Applications for the Design of New Musical Instruments},\n\ + \ url = {http://www.nime.org/proceedings/2005/nime2005_097.pdf},\n year = {2005}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1176727 issn: 2220-4806 - month: May - numpages: 5 - pages: 579--583 - title: Participatory Conceptual Design of Accessible Digital Musical Instruments - using Generative AI - track: Work in Progress - url: http://nime.org/proceedings/2023/nime2023_83.pdf - year: 2023 + keywords: 'Tangible interfaces, new musical instruments design. ' + pages: 97--100 + title: Tangible Acoustic Interfaces and their Applications for the Design of New + Musical Instruments + url: http://www.nime.org/proceedings/2005/nime2005_097.pdf + year: 2005 -- ENTRYTYPE: article - ID: nime2023_84 - abstract: "The development of bespoke musical tools such as many accessible digital\ - \ musical instruments (ADMI) can necessitate specific design constraints. Within\ - \ a field which often promotes out of the box thinking and new interactions with\ - \ experimental technologies, how do we design for user groups where these notions\ - \ of interaction will be less familiar, and/or increasingly challenging due to\ - \ the progression of cognitive decline?\nThe relationship between age and the\ - \ use of technology is understood within the wider context of human computer interaction\ - \ (HCI), however, how this applies specifically to musical interaction or contributes\ - \ to a ‘dementia-friendly’ approach to digital musical instrument (DMI) design\ - \ is drastically underrepresented within the NIME community. Following a scoping\ - \ review of technology for arts activities designed for older adults with cognitive\ - \ decline, we ran a series of involvement activities with a range of stakeholders\ - \ living with, or caring for those living with dementia. Consolidating the knowledge\ - \ and experience shared at these events, we propose five considerations for designing\ - \ dementia-friendly digital musical instruments. We illustrate our approach with\ - \ a range of new instruments co-designed to enable increased interaction with\ - \ music for people living with dementia." - address: 'Mexico City, Mexico' - articleno: 84 - author: Jonathan M Pigrem and Jennifer MacRitchie and Andrew McPherson - bibtex: "@article{nime2023_84,\n abstract = {The development of bespoke musical\ - \ tools such as many accessible digital musical instruments (ADMI) can necessitate\ - \ specific design constraints. Within a field which often promotes out of the\ - \ box thinking and new interactions with experimental technologies, how do we\ - \ design for user groups where these notions of interaction will be less familiar,\ - \ and/or increasingly challenging due to the progression of cognitive decline?\n\ - The relationship between age and the use of technology is understood within the\ - \ wider context of human computer interaction (HCI), however, how this applies\ - \ specifically to musical interaction or contributes to a ‘dementia-friendly’\ - \ approach to digital musical instrument (DMI) design is drastically underrepresented\ - \ within the NIME community. Following a scoping review of technology for arts\ - \ activities designed for older adults with cognitive decline, we ran a series\ - \ of involvement activities with a range of stakeholders living with, or caring\ - \ for those living with dementia. Consolidating the knowledge and experience shared\ - \ at these events, we propose five considerations for designing dementia-friendly\ - \ digital musical instruments. We illustrate our approach with a range of new\ - \ instruments co-designed to enable increased interaction with music for people\ - \ living with dementia.},\n address = {Mexico City, Mexico},\n articleno = {84},\n\ - \ author = {Jonathan M Pigrem and Jennifer MacRitchie and Andrew McPherson},\n\ +- ENTRYTYPE: inproceedings + ID: Bencina2005 + abstract: 'This report describes The Metasurface – a mapping interface supporting + interactive design of two-to-many mappings through the placement and interpolation + of parameter snapshots on a plane. The Metasurface employs natural neighbour interpolation, + a local interpolation method based on Voronoi tessellation, to interpolate between + parameter snapshots. Compared to global field based methods, natural neighbour + interpolation offers increased predictability and the ability to represent multi-scale + surfaces. An implementation of the Metasurface in the AudioMulch software environment + is presented and key architectural features of AudioMulch which facilitate this + implementation are discussed.' + address: 'Vancouver, BC, Canada' + author: 'Bencina, Ross' + bibtex: "@inproceedings{Bencina2005,\n abstract = {This report describes The Metasurface\ + \ – a mapping interface supporting interactive design of two-to-many mappings\ + \ through the placement and interpolation of parameter snapshots on a plane. The\ + \ Metasurface employs natural neighbour interpolation, a local interpolation method\ + \ based on Voronoi tessellation, to interpolate between parameter snapshots. Compared\ + \ to global field based methods, natural neighbour interpolation offers increased\ + \ predictability and the ability to represent multi-scale surfaces. An implementation\ + \ of the Metasurface in the AudioMulch software environment is presented and key\ + \ architectural features of AudioMulch which facilitate this implementation are\ + \ discussed.},\n address = {Vancouver, BC, Canada},\n author = {Bencina, Ross},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn\ - \ = {2220-4806},\n month = {May},\n numpages = {6},\n pages = {584--589},\n title\ - \ = {Instructions Not Included: Dementia-Friendly approaches to DMI Design},\n\ - \ track = {Work in Progress},\n url = {http://nime.org/proceedings/2023/nime2023_84.pdf},\n\ - \ year = {2023}\n}\n" + \ Musical Expression},\n doi = {10.5281/zenodo.1176701},\n issn = {2220-4806},\n\ + \ keywords = {computational geometry,design,design support,high-level control,interpolation,mapping,of\ + \ interpo-,this section reviews related,user interface,work in the field},\n pages\ + \ = {101--104},\n title = {The Metasurface -- Applying Natural Neighbour Interpolation\ + \ to Two-to-Many Mapping},\n url = {http://www.nime.org/proceedings/2005/nime2005_101.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1176701 issn: 2220-4806 - month: May - numpages: 6 - pages: 584--589 - title: 'Instructions Not Included: Dementia-Friendly approaches to DMI Design' - track: Work in Progress - url: http://nime.org/proceedings/2023/nime2023_84.pdf - year: 2023 + keywords: 'computational geometry,design,design support,high-level control,interpolation,mapping,of + interpo-,this section reviews related,user interface,work in the field' + pages: 101--104 + title: The Metasurface -- Applying Natural Neighbour Interpolation to Two-to-Many + Mapping + url: http://www.nime.org/proceedings/2005/nime2005_101.pdf + year: 2005 -- ENTRYTYPE: article - ID: nime2023_85 - abstract: "The utility of gestural technologies in broadening analytical- and expressive-interface\ - \ possibilities has been documented extensively; both within the sphere of NIME\ - \ and beyond. \n\nWearable gestural sensors have proved integral components of\ - \ many past NIMEs. Previous implementations have typically made use of specialist,\ - \ IMU and EMG based gestural technologies. Few have proved, singularly, as popular\ - \ as the Myo armband. An informal review of the NIME archives found that the Myo\ - \ has featured in 21 NIME publications, since an initial declaration of the Myo’s\ - \ promise as “a new standard controller in the NIME community” by Nyomen et al.\ - \ in 2015. Ten of those found were published after the Myo’s discontinuation in\ - \ 2018, including three as recently as 2022.\n\nThis paper details an assessment\ - \ of smartwatch-based IMU and audio logging as a ubiquitous, accessible alternative\ - \ to the IMU capabilities of the Myo armband. Six violinists were recorded performing\ - \ a number of exercises using VioLogger; a purpose-built application for the Apple\ - \ Watch. Participants were simultaneously recorded using a Myo armband and a freestanding\ - \ microphone. Initial testing upon this pilot dataset indicated promising results\ - \ for the purposes of audio-gestural analysis; both implementations demonstrated\ - \ similar efficacy for the purposes of MLP-based bow-stroke classification." - address: 'Mexico City, Mexico' - articleno: 85 - author: William Francis Wilson and Niccolo Granieri and Islah Ali-Maclachlan - bibtex: "@article{nime2023_85,\n abstract = {The utility of gestural technologies\ - \ in broadening analytical- and expressive-interface possibilities has been documented\ - \ extensively; both within the sphere of NIME and beyond. \n\nWearable gestural\ - \ sensors have proved integral components of many past NIMEs. Previous implementations\ - \ have typically made use of specialist, IMU and EMG based gestural technologies.\ - \ Few have proved, singularly, as popular as the Myo armband. An informal review\ - \ of the NIME archives found that the Myo has featured in 21 NIME publications,\ - \ since an initial declaration of the Myo’s promise as “a new standard controller\ - \ in the NIME community” by Nyomen et al. in 2015. Ten of those found were published\ - \ after the Myo’s discontinuation in 2018, including three as recently as 2022.\n\ - \nThis paper details an assessment of smartwatch-based IMU and audio logging as\ - \ a ubiquitous, accessible alternative to the IMU capabilities of the Myo armband.\ - \ Six violinists were recorded performing a number of exercises using VioLogger;\ - \ a purpose-built application for the Apple Watch. Participants were simultaneously\ - \ recorded using a Myo armband and a freestanding microphone. Initial testing\ - \ upon this pilot dataset indicated promising results for the purposes of audio-gestural\ - \ analysis; both implementations demonstrated similar efficacy for the purposes\ - \ of MLP-based bow-stroke classification.},\n address = {Mexico City, Mexico},\n\ - \ articleno = {85},\n author = {William Francis Wilson and Niccolo Granieri and\ - \ Islah Ali-Maclachlan},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan\ - \ Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages = {4},\n\ - \ pages = {590--593},\n title = {Time's up for the Myo? The smartwatch as a ubiquitous\ - \ alternative for audio-gestural analyses.},\n track = {Work in Progress},\n url\ - \ = {http://nime.org/proceedings/2023/nime2023_85.pdf},\n year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: Silva2005 + abstract: 'This paper aims to present some perspectives on mappingembouchure gestures + of flute players and their use as controlvariables. For this purpose, we have + analyzed several typesof sensors, in terms of sensitivity, dimension, accuracy + andprice, which can be used to implement a system capable ofmapping embouchure + parameters such as air jet velocity andair jet direction. Finally, we describe + the implementationof a sensor system used to map embouchure gestures of aclassical + Boehm flute.' + address: 'Vancouver, BC, Canada' + author: 'Silva, Andrey R. and Wanderley, Marcelo M. and Scavone, Gary' + bibtex: "@inproceedings{Silva2005,\n abstract = {This paper aims to present some\ + \ perspectives on mappingembouchure gestures of flute players and their use as\ + \ controlvariables. For this purpose, we have analyzed several typesof sensors,\ + \ in terms of sensitivity, dimension, accuracy andprice, which can be used to\ + \ implement a system capable ofmapping embouchure parameters such as air jet velocity\ + \ andair jet direction. Finally, we describe the implementationof a sensor system\ + \ used to map embouchure gestures of aclassical Boehm flute.},\n address = {Vancouver,\ + \ BC, Canada},\n author = {Silva, Andrey R. and Wanderley, Marcelo M. and Scavone,\ + \ Gary},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1176814},\n issn = {2220-4806},\n\ + \ keywords = {Embouchure, air pressure sensors, hot wires, mapping, augmented\ + \ flute. },\n pages = {105--108},\n title = {On the Use of Flute Air Jet as A\ + \ Musical Control Variable},\n url = {http://www.nime.org/proceedings/2005/nime2005_105.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1176814 issn: 2220-4806 - month: May - numpages: 4 - pages: 590--593 - title: 'Time''s up for the Myo? The smartwatch as a ubiquitous alternative for audio-gestural - analyses.' - track: Work in Progress - url: http://nime.org/proceedings/2023/nime2023_85.pdf - year: 2023 + keywords: 'Embouchure, air pressure sensors, hot wires, mapping, augmented flute. ' + pages: 105--108 + title: On the Use of Flute Air Jet as A Musical Control Variable + url: http://www.nime.org/proceedings/2005/nime2005_105.pdf + year: 2005 -- ENTRYTYPE: article - ID: nime2023_86 - abstract: 'This paper presents the Hummellaphone, a highly-reconfigurable, open-source, - electromagnetically actuated instrument being developed for research in engineering - learning, haptics, and human-computer interaction (HCI). The reconfigurable performance - interface promotes experimentation with gestural control and mapping. Haptic feedback - reintroduces the tangible bilateral communication between performer and instrument - that is present in many acoustic and electro-acoustic instruments but missing - in most digital musical instruments. The overall aim of the project is to create - an open-source, accessible toolkit for facilitating the development of and research - with electromagnetically actuated musical instruments. This paper describes the - hardware and design of the musical instrument and control interface as well as - example research applications.' - address: 'Mexico City, Mexico' - articleno: 86 - author: Adam G Schmidt and Michael Gurevich - bibtex: "@article{nime2023_86,\n abstract = {This paper presents the Hummellaphone,\ - \ a highly-reconfigurable, open-source, electromagnetically actuated instrument\ - \ being developed for research in engineering learning, haptics, and human-computer\ - \ interaction (HCI). The reconfigurable performance interface promotes experimentation\ - \ with gestural control and mapping. Haptic feedback reintroduces the tangible\ - \ bilateral communication between performer and instrument that is present in\ - \ many acoustic and electro-acoustic instruments but missing in most digital musical\ - \ instruments. The overall aim of the project is to create an open-source, accessible\ - \ toolkit for facilitating the development of and research with electromagnetically\ - \ actuated musical instruments. This paper describes the hardware and design of\ - \ the musical instrument and control interface as well as example research applications.},\n\ - \ address = {Mexico City, Mexico},\n articleno = {86},\n author = {Adam G Schmidt\ - \ and Michael Gurevich},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan\ - \ Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages = {6},\n\ - \ pages = {594--599},\n title = {The Hummellaphone: An Electromagnetically Actuated\ - \ Instrument and Open-Source Toolkit},\n track = {Work in Progress},\n url = {http://nime.org/proceedings/2023/nime2023_86.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: Rodet2005 + abstract: 'The PHASE project is a research project devoted to the study and the + realization of systems of multi-modal interaction for generation, handling and + control of sound and music. Supported by the network RIAM (Recherche et Innovation + en Audiovisuel et Multimédia), it was carried out by the CEA-LIST for haptic research, + Haption for the realization of the haptic device, Ondim for integration and visual + realization and Ircam for research and realization about sound, music and the + metaphors for interaction. The integration of the three modalities offers completely + innovative capacities for interaction. The objectives are scientific, cultural + and educational. Finally, an additional objective was to test such a prototype + system, including its haptic arm, in real conditions for general public and over + a long duration in order to measure its solidity, its reliability and its interest + for users. Thus, during the last three months of the project, a demonstrator was + presented and evaluated in a museum in Paris, in the form of an interactive installation + offering the public a musical game. Different from a video game, the aim is not + to animate the pixels on the screen but to play music and to incite musical awareness.' + address: 'Vancouver, BC, Canada' + author: 'Rodet, Xavier and Lambert, Jean-Philippe and Cahen, Roland and Gaudy, Thomas + and Guedy, Fabrice and Gosselin, Florian and Mobuchon, Pascal' + bibtex: "@inproceedings{Rodet2005,\n abstract = {The PHASE project is a research\ + \ project devoted to the study and the realization of systems of multi-modal interaction\ + \ for generation, handling and control of sound and music. Supported by the network\ + \ RIAM (Recherche et Innovation en Audiovisuel et Multim\\'{e}dia), it was carried\ + \ out by the CEA-LIST for haptic research, Haption for the realization of the\ + \ haptic device, Ondim for integration and visual realization and Ircam for research\ + \ and realization about sound, music and the metaphors for interaction. The integration\ + \ of the three modalities offers completely innovative capacities for interaction.\ + \ The objectives are scientific, cultural and educational. Finally, an additional\ + \ objective was to test such a prototype system, including its haptic arm, in\ + \ real conditions for general public and over a long duration in order to measure\ + \ its solidity, its reliability and its interest for users. Thus, during the last\ + \ three months of the project, a demonstrator was presented and evaluated in a\ + \ museum in Paris, in the form of an interactive installation offering the public\ + \ a musical game. Different from a video game, the aim is not to animate the pixels\ + \ on the screen but to play music and to incite musical awareness.},\n address\ + \ = {Vancouver, BC, Canada},\n author = {Rodet, Xavier and Lambert, Jean-Philippe\ + \ and Cahen, Roland and Gaudy, Thomas and Guedy, Fabrice and Gosselin, Florian\ + \ and Mobuchon, Pascal},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176804},\n\ + \ issn = {2220-4806},\n keywords = {Haptic, interaction, sound, music, control,\ + \ installation. },\n pages = {109--114},\n title = {Study of haptic and visual\ + \ interaction for sound and music control in the Phase project},\n url = {http://www.nime.org/proceedings/2005/nime2005_109.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1176804 issn: 2220-4806 - month: May - numpages: 6 - pages: 594--599 - title: 'The Hummellaphone: An Electromagnetically Actuated Instrument and Open-Source - Toolkit' - track: Work in Progress - url: http://nime.org/proceedings/2023/nime2023_86.pdf - year: 2023 + keywords: 'Haptic, interaction, sound, music, control, installation. ' + pages: 109--114 + title: Study of haptic and visual interaction for sound and music control in the + Phase project + url: http://www.nime.org/proceedings/2005/nime2005_109.pdf + year: 2005 -- ENTRYTYPE: article - ID: nime2023_87 - abstract: 'This paper presents the development of a multichannel sound installation - about atmospheric processes. This instrument is an example of taking inspiration - from ancient cultures for NIME design, and of sensing weather to extend the perception - of the performer, who also then becomes a listener of atmospheric processes. The - interface channels dynamics found in the atmosphere: wind''s force and direction, - air quality, atmospheric pressure, and electromagnetism. These sources are translated - into sound by mapping sensor data into a multichannel sonification composition. - The paper outlines the artistic context and expands on its interaction overview.' - address: 'Mexico City, Mexico' - articleno: 87 - author: Juan C Duarte Regino - bibtex: "@article{nime2023_87,\n abstract = {This paper presents the development\ - \ of a multichannel sound installation about atmospheric processes. This instrument\ - \ is an example of taking inspiration from ancient cultures for NIME design, and\ - \ of sensing weather to extend the perception of the performer, who also then\ - \ becomes a listener of atmospheric processes. The interface channels dynamics\ - \ found in the atmosphere: wind's force and direction, air quality, atmospheric\ - \ pressure, and electromagnetism. These sources are translated into sound by mapping\ - \ sensor data into a multichannel sonification composition. The paper outlines\ - \ the artistic context and expands on its interaction overview.},\n address =\ - \ {Mexico City, Mexico},\n articleno = {87},\n author = {Juan C Duarte Regino},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn\ - \ = {2220-4806},\n month = {May},\n numpages = {4},\n pages = {600--603},\n title\ - \ = {AUGURY : an interface for generating soundscapes inspired by ancient divination},\n\ - \ track = {Work in Progress},\n url = {http://nime.org/proceedings/2023/nime2023_87.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: Levin2005a + abstract: 'We report on The Manual Input Sessions, a series of audiovisual vignettes + which probe the expressive possibilities of free-form hand gestures. Performed + on a hybrid projection system which combines a traditional analog overhead projector + and a digital PC video projector, our vision-based software instruments generate + dynamic sounds and graphics solely in response to the forms and movements of the + silhouette contours of the user''s hands. Interactions and audiovisual mappings + which make use of both positive (exterior) and negative (interior) contours are + discussed. ' + address: 'Vancouver, BC, Canada' + author: 'Levin, Golan and Lieberman, Zachary' + bibtex: "@inproceedings{Levin2005a,\n abstract = {We report on The Manual Input\ + \ Sessions, a series of audiovisual vignettes which probe the expressive possibilities\ + \ of free-form hand gestures. Performed on a hybrid projection system which combines\ + \ a traditional analog overhead projector and a digital PC video projector, our\ + \ vision-based software instruments generate dynamic sounds and graphics solely\ + \ in response to the forms and movements of the silhouette contours of the user's\ + \ hands. Interactions and audiovisual mappings which make use of both positive\ + \ (exterior) and negative (interior) contours are discussed. },\n address = {Vancouver,\ + \ BC, Canada},\n author = {Levin, Golan and Lieberman, Zachary},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176772},\n issn = {2220-4806},\n keywords = {Audiovisual\ + \ performance, hand silhouettes, computer vision, contour analysis, sound-image\ + \ relationships, augmented reality. },\n pages = {115--120},\n title = {Sounds\ + \ from Shapes: Audiovisual Performance with Hand Silhouette Contours in The Manual\ + \ Input Sessions},\n url = {http://www.nime.org/proceedings/2005/nime2005_115.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1176772 issn: 2220-4806 - month: May - numpages: 4 - pages: 600--603 - title: 'AUGURY : an interface for generating soundscapes inspired by ancient divination' - track: Work in Progress - url: http://nime.org/proceedings/2023/nime2023_87.pdf - year: 2023 + keywords: 'Audiovisual performance, hand silhouettes, computer vision, contour analysis, + sound-image relationships, augmented reality. ' + pages: 115--120 + title: 'Sounds from Shapes: Audiovisual Performance with Hand Silhouette Contours + in The Manual Input Sessions' + url: http://www.nime.org/proceedings/2005/nime2005_115.pdf + year: 2005 -- ENTRYTYPE: article - ID: nime2023_88 - abstract: 'Interactive Machine Learning (IML) is an approach previously explored - in music discipline. However, its adaptation in sound synthesis as an algorithmic - method of creation has not been examined. This article presents the prototype - ASCIML, an Assistant for Sound Creation with Interactive Machine Learning, that - allows musicians to use IML to create personalized datasets and generate new sounds. - Additionally, a preliminary study is presented which aims to evaluate the potential - of ASCIML as a tool for sound synthesis and to gather feedback and suggestions - for future improvements. The prototype can be used in Google Colaboratory and - is divided into four main stages: Data Design, Training, Evaluation and Audio - Creation. Results from the study, which involved 27 musicians with no prior knowledge - of Machine Learning (ML), showed that most participants preferred using microphone - recording and synthesis to design their dataset and that the Envelopegram visualization - was found to be particularly meaningful to understand sound datasets. It was also - found that the majority of participants preferred to implement a pre-trained model - on their data and relied on hearing the audio reconstruction provided by the interface - to evaluate the model performance. Overall, the study demonstrates the potential - of ASCIML as a tool for hands-on neural audio sound synthesis and provides valuable - insights for future developments in the field.' - address: 'Mexico City, Mexico' - articleno: 88 - author: Gerardo Meza - bibtex: "@article{nime2023_88,\n abstract = {Interactive Machine Learning (IML)\ - \ is an approach previously explored in music discipline. However, its adaptation\ - \ in sound synthesis as an algorithmic method of creation has not been examined.\ - \ This article presents the prototype ASCIML, an Assistant for Sound Creation\ - \ with Interactive Machine Learning, that allows musicians to use IML to create\ - \ personalized datasets and generate new sounds. Additionally, a preliminary study\ - \ is presented which aims to evaluate the potential of ASCIML as a tool for sound\ - \ synthesis and to gather feedback and suggestions for future improvements. The\ - \ prototype can be used in Google Colaboratory and is divided into four main stages:\ - \ Data Design, Training, Evaluation and Audio Creation. Results from the study,\ - \ which involved 27 musicians with no prior knowledge of Machine Learning (ML),\ - \ showed that most participants preferred using microphone recording and synthesis\ - \ to design their dataset and that the Envelopegram visualization was found to\ - \ be particularly meaningful to understand sound datasets. It was also found that\ - \ the majority of participants preferred to implement a pre-trained model on their\ - \ data and relied on hearing the audio reconstruction provided by the interface\ - \ to evaluate the model performance. Overall, the study demonstrates the potential\ - \ of ASCIML as a tool for hands-on neural audio sound synthesis and provides valuable\ - \ insights for future developments in the field.},\n address = {Mexico City, Mexico},\n\ - \ articleno = {88},\n author = {Gerardo Meza},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n editor\ - \ = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n\ - \ numpages = {4},\n pages = {604--607},\n title = {Exploring the potential of\ - \ interactive Machine Learning for Sound Generation: A preliminary study with\ - \ sound artists},\n track = {Work in Progress},\n url = {http://nime.org/proceedings/2023/nime2023_88.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: Yonezawa2005 + abstract: 'The HandySinger system is a personified tool developed to naturally express + a singing voice controlled by the gestures of a hand puppet. Assuming that a singing + voice is a kind of musical expression, natural expressions of the singing voice + are important for personification. We adopt a singing voice morphing algorithm + that effectively smoothes out the strength of expressions delivered with a singing + voice. The system’s hand puppet consists of a glove with seven bend sensors and + two pressure sensors. It sensitively captures the user’s motion as a personified + puppet’s gesture. To synthesize the different expressional strengths of a singing + voice, the “normal” (without expression) voice of a particular singer is used + as the base of morphing, and three different expressions, “dark,” “whisper” and + “wet,” are used as the target. This configuration provides musically expressed + controls that are intuitive to users. In the experiment, we evaluate whether 1) + the morphing algorithm interpolates expressional strength in a perceptual sense, + 2) the handpuppet interface provides gesture data at sufficient resolution, and + 3) the gestural mapping of the current system works as planned.' + address: 'Vancouver, BC, Canada' + author: 'Yonezawa, Tomoko and Suzuki, Takahiko and Mase, Kenji and Kogure, Kiyoshi' + bibtex: "@inproceedings{Yonezawa2005,\n abstract = {The HandySinger system is a\ + \ personified tool developed to naturally express a singing voice controlled by\ + \ the gestures of a hand puppet. Assuming that a singing voice is a kind of musical\ + \ expression, natural expressions of the singing voice are important for personification.\ + \ We adopt a singing voice morphing algorithm that effectively smoothes out the\ + \ strength of expressions delivered with a singing voice. The system’s hand puppet\ + \ consists of a glove with seven bend sensors and two pressure sensors. It sensitively\ + \ captures the user’s motion as a personified puppet’s gesture. To synthesize\ + \ the different expressional strengths of a singing voice, the “normal” (without\ + \ expression) voice of a particular singer is used as the base of morphing, and\ + \ three different expressions, “dark,” “whisper” and “wet,” are used as the target.\ + \ This configuration provides musically expressed controls that are intuitive\ + \ to users. In the experiment, we evaluate whether 1) the morphing algorithm interpolates\ + \ expressional strength in a perceptual sense, 2) the handpuppet interface provides\ + \ gesture data at sufficient resolution, and 3) the gestural mapping of the current\ + \ system works as planned.},\n address = {Vancouver, BC, Canada},\n author = {Yonezawa,\ + \ Tomoko and Suzuki, Takahiko and Mase, Kenji and Kogure, Kiyoshi},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1176844},\n issn = {2220-4806},\n keywords\ + \ = {Personified Expression, Singing Voice Morphing, Voice Ex- pressivity, Hand-puppet\ + \ Interface },\n pages = {121--126},\n title = {HandySinger : Expressive Singing\ + \ Voice Morphing using Personified Hand-puppet Interface},\n url = {http://www.nime.org/proceedings/2005/nime2005_121.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1176844 issn: 2220-4806 - month: May - numpages: 4 - pages: 604--607 - title: 'Exploring the potential of interactive Machine Learning for Sound Generation: - A preliminary study with sound artists' - track: Work in Progress - url: http://nime.org/proceedings/2023/nime2023_88.pdf - year: 2023 + keywords: 'Personified Expression, Singing Voice Morphing, Voice Ex- pressivity, + Hand-puppet Interface ' + pages: 121--126 + title: 'HandySinger : Expressive Singing Voice Morphing using Personified Hand-puppet + Interface' + url: http://www.nime.org/proceedings/2005/nime2005_121.pdf + year: 2005 -- ENTRYTYPE: article - ID: nime2023_89 - abstract: "CALM is a performance piece from a collection of works that explore trauma\ - \ through trauma-informed therapeutic models, such as bi-lateral coordination\ - \ drawing, yoga, and tapping, and existing movement practices, such as yoga, Pilates,\ - \ dance, and conducting, to control and manipulate sound in performance. This\ - \ work draws from yoga practice to control the volumes and audio effects on pre-composed\ - \ audio layers through use of datagloves (MiMu with their proprietary software\ - \ Glover (MI.MU Gloves Ltd, 2010), though this is not specific to the constraints\ - \ of the MiMu/Glover system) and Max/MSP (Cycling ’74, 2018). \n\nYoga is a movement\ - \ practice often recommended to manage symptoms of trauma and anxiety due to the\ - \ focus on one’s body and generally meditative nature or the practice. However,\ - \ in cases of sexual trauma, yoga may yield the opposite of the desired results\ - \ when not used in a trauma-sensitive context (Khoudari, 2021; Levine et al.,\ - \ 2010). This is because the individual tries to focus on the body in which they\ - \ do not feel safe and encounter unresolved trauma. Thus, instead of a grounding\ - \ effect, the individual hears the mental and physical pain that they have endured\ - \ repeating itself in the present. To reflect this, “stillness” audio material\ - \ is routed to scream-like and abrasive sounds, while “movement” audio quiets\ - \ the listener’s internal landscape. Movements used in the live piece were chosen\ - \ based on providing extramusical benefit to the composer-performer (and areas\ - \ that are typically carrying tension as a result of the trauma) without contributing\ - \ to any negative effects, for example, the pose “Happy Baby/Ananda Balasana”\ - \ was excluded and Malasana (a deep squat pose) was used in its place as it puts\ - \ the performer in a less vulnerable position by being on one’s feet. " - address: 'Mexico City, Mexico' - articleno: 89 - author: Sophie Rose - bibtex: "@article{nime2023_89,\n abstract = {CALM is a performance piece from a\ - \ collection of works that explore trauma through trauma-informed therapeutic\ - \ models, such as bi-lateral coordination drawing, yoga, and tapping, and existing\ - \ movement practices, such as yoga, Pilates, dance, and conducting, to control\ - \ and manipulate sound in performance. This work draws from yoga practice to control\ - \ the volumes and audio effects on pre-composed audio layers through use of datagloves\ - \ (MiMu with their proprietary software Glover (MI.MU Gloves Ltd, 2010), though\ - \ this is not specific to the constraints of the MiMu/Glover system) and Max/MSP\ - \ (Cycling ’74, 2018). \n\nYoga is a movement practice often recommended to manage\ - \ symptoms of trauma and anxiety due to the focus on one’s body and generally\ - \ meditative nature or the practice. However, in cases of sexual trauma, yoga\ - \ may yield the opposite of the desired results when not used in a trauma-sensitive\ - \ context (Khoudari, 2021; Levine et al., 2010). This is because the individual\ - \ tries to focus on the body in which they do not feel safe and encounter unresolved\ - \ trauma. Thus, instead of a grounding effect, the individual hears the mental\ - \ and physical pain that they have endured repeating itself in the present. To\ - \ reflect this, “stillness” audio material is routed to scream-like and abrasive\ - \ sounds, while “movement” audio quiets the listener’s internal landscape. Movements\ - \ used in the live piece were chosen based on providing extramusical benefit to\ - \ the composer-performer (and areas that are typically carrying tension as a result\ - \ of the trauma) without contributing to any negative effects, for example, the\ - \ pose “Happy Baby/Ananda Balasana” was excluded and Malasana (a deep squat pose)\ - \ was used in its place as it puts the performer in a less vulnerable position\ - \ by being on one’s feet. },\n address = {Mexico City, Mexico},\n articleno =\ - \ {89},\n author = {Sophie Rose},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz\ - \ and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages\ - \ = {4},\n pages = {608--611},\n title = {CALM: Mapping yoga practice for gestural\ - \ control to externalise traumatic experiences},\n track = {Work in Progress},\n\ - \ url = {http://nime.org/proceedings/2023/nime2023_89.pdf},\n year = {2023}\n\ - }\n" +- ENTRYTYPE: inproceedings + ID: Funk2005 + abstract: 'The central role of the face in social interaction and non-verbal communication + suggest we explore facial action as a means of musical expression. This paper + presents the design, implementation, and preliminary studies of a novel system + utilizing face detection and optic flow algorithms to associate facial movements + with sound synthesis in a topographically specific fashion. We report on our experience + with various gesture-to-sound mappings and applications, and describe our preliminary + experiments at musical performance using the system. ' + address: 'Vancouver, BC, Canada' + author: 'Funk, Mathias and Kuwabara, Kazuhiro and Lyons, Michael J.' + bibtex: "@inproceedings{Funk2005,\n abstract = {The central role of the face in\ + \ social interaction and non-verbal communication suggest we explore facial action\ + \ as a means of musical expression. This paper presents the design, implementation,\ + \ and preliminary studies of a novel system utilizing face detection and optic\ + \ flow algorithms to associate facial movements with sound synthesis in a topographically\ + \ specific fashion. We report on our experience with various gesture-to-sound\ + \ mappings and applications, and describe our preliminary experiments at musical\ + \ performance using the system. },\n address = {Vancouver, BC, Canada},\n author\ + \ = {Funk, Mathias and Kuwabara, Kazuhiro and Lyons, Michael J.},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1176750},\n issn = {2220-4806},\n keywords\ + \ = {Video-based musical interface; gesture-based interaction; facial expression;\ + \ facial therapy interface. },\n pages = {127--131},\n title = {Sonification of\ + \ Facial Actions for Musical Expression},\n url = {http://www.nime.org/proceedings/2005/nime2005_127.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1176750 issn: 2220-4806 - month: May - numpages: 4 - pages: 608--611 - title: 'CALM: Mapping yoga practice for gestural control to externalise traumatic - experiences' - track: Work in Progress - url: http://nime.org/proceedings/2023/nime2023_89.pdf - year: 2023 + keywords: 'Video-based musical interface; gesture-based interaction; facial expression; + facial therapy interface. ' + pages: 127--131 + title: Sonification of Facial Actions for Musical Expression + url: http://www.nime.org/proceedings/2005/nime2005_127.pdf + year: 2005 -- ENTRYTYPE: article - ID: nime2023_90 - abstract: 'We present SnakeSynth, a web-based lightweight audio synthesizer that - combines audio generated by a deep generative model and real-time continuous two-dimensional - (2D) input to create and control variable-length generative sounds through 2D - interaction gestures. Interaction gestures are touch and mobile-compatible with - analogies to strummed, bowed, and plucked musical instrument controls. Point-and-click - and drag-and-drop gestures directly control audio playback length and we show - that sound length and intensity are modulated by interactions with a programmable - 2D coordinate grid. Leveraging the speed and ubiquity of browser-based audio and - hardware acceleration in Google''s "TensorFlow.js" we generate time-varying high-fidelity - sounds with real-time interactivity. SnakeSynth adaptively reproduces and interpolates - between sounds encountered during model training, notably without long training - times, and we briefly discuss possible futures for deep generative models as an - interactive paradigm for musical expression.' - address: 'Mexico City, Mexico' - articleno: 90 - author: Eric Easthope - bibtex: "@article{nime2023_90,\n abstract = {We present SnakeSynth, a web-based\ - \ lightweight audio synthesizer that combines audio generated by a deep generative\ - \ model and real-time continuous two-dimensional (2D) input to create and control\ - \ variable-length generative sounds through 2D interaction gestures. Interaction\ - \ gestures are touch and mobile-compatible with analogies to strummed, bowed,\ - \ and plucked musical instrument controls. Point-and-click and drag-and-drop gestures\ - \ directly control audio playback length and we show that sound length and intensity\ - \ are modulated by interactions with a programmable 2D coordinate grid. Leveraging\ - \ the speed and ubiquity of browser-based audio and hardware acceleration in Google's\ - \ \"TensorFlow.js\" we generate time-varying high-fidelity sounds with real-time\ - \ interactivity. SnakeSynth adaptively reproduces and interpolates between sounds\ - \ encountered during model training, notably without long training times, and\ - \ we briefly discuss possible futures for deep generative models as an interactive\ - \ paradigm for musical expression.},\n address = {Mexico City, Mexico},\n articleno\ - \ = {90},\n author = {Eric Easthope},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz\ - \ and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages\ - \ = {8},\n pages = {612--619},\n title = {SnakeSynth: New Interactions for Generative\ - \ Audio Synthesis},\n track = {Work in Progress},\n url = {http://nime.org/proceedings/2023/nime2023_90.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: Janer2005 + abstract: 'In this paper we present an example of the use of the singingvoice as + a controller for digital music synthesis. The analysis of the voice with spectral + processing techniques, derivedfrom the Short-Time Fourier Transform, provides + ways ofdetermining a performer''s vocal intentions. We demonstratea prototype, + in which the extracted vocal features drive thesynthesis of a plucked bass guitar. + The sound synthesis stageincludes two different synthesis techniques, Physical + Modelsand Spectral Morph.' + address: 'Vancouver, BC, Canada' + author: 'Janer, Jordi' + bibtex: "@inproceedings{Janer2005,\n abstract = {In this paper we present an example\ + \ of the use of the singingvoice as a controller for digital music synthesis.\ + \ The analysis of the voice with spectral processing techniques, derivedfrom the\ + \ Short-Time Fourier Transform, provides ways ofdetermining a performer's vocal\ + \ intentions. We demonstratea prototype, in which the extracted vocal features\ + \ drive thesynthesis of a plucked bass guitar. The sound synthesis stageincludes\ + \ two different synthesis techniques, Physical Modelsand Spectral Morph.},\n address\ + \ = {Vancouver, BC, Canada},\n author = {Janer, Jordi},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176758},\n issn = {2220-4806},\n keywords = {Singing\ + \ voice, musical controller, sound synthesis, spectral processing. },\n pages\ + \ = {132--135},\n title = {Voice-controlled plucked bass guitar through two synthesis\ + \ techniques},\n url = {http://www.nime.org/proceedings/2005/nime2005_132.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1176758 issn: 2220-4806 - month: May - numpages: 8 - pages: 612--619 - title: 'SnakeSynth: New Interactions for Generative Audio Synthesis' - track: Work in Progress - url: http://nime.org/proceedings/2023/nime2023_90.pdf - year: 2023 + keywords: 'Singing voice, musical controller, sound synthesis, spectral processing. ' + pages: 132--135 + title: Voice-controlled plucked bass guitar through two synthesis techniques + url: http://www.nime.org/proceedings/2005/nime2005_132.pdf + year: 2005 -- ENTRYTYPE: article - ID: nime2023_91 - abstract: 'We present a system for interactive co-creation of expressive performances - of notated music using speech and gestures. The system provides real-time or near-real-time - dialog-based control of performance rendering and interaction in multiple modalities. - It is accessible to people regardless of their musical background via smartphones. - The system is trained using sheet music and associated performances, in particular - using notated performance directions and user-system interaction data to ground - performance directions in performances. Users can listen to an autonomously generated - performance or actively engage in the performance process. A speech- and gesture-based - feedback loop and online learning from past user interactions improve the accuracy - of the performance rendering control. There are two important assumptions behind - our approach: a) that many people can express nuanced aspects of expressive performance - using natural human expressive faculties, such as speech, voice, and gesture, - and b) that by doing so and hearing the music follow their direction with low - latency, they can enjoy playing the music that would otherwise be inaccessible - to them. The ultimate goal of this work is to enable fulfilling and accessible - music making experiences for a large number of people who are not currently musically - active.' - address: 'Mexico City, Mexico' - articleno: 91 - author: Ilya Borovik and Vladimir Viro - bibtex: "@article{nime2023_91,\n abstract = {We present a system for interactive\ - \ co-creation of expressive performances of notated music using speech and gestures.\ - \ The system provides real-time or near-real-time dialog-based control of performance\ - \ rendering and interaction in multiple modalities. It is accessible to people\ - \ regardless of their musical background via smartphones. The system is trained\ - \ using sheet music and associated performances, in particular using notated performance\ - \ directions and user-system interaction data to ground performance directions\ - \ in performances. Users can listen to an autonomously generated performance or\ - \ actively engage in the performance process. A speech- and gesture-based feedback\ - \ loop and online learning from past user interactions improve the accuracy of\ - \ the performance rendering control. There are two important assumptions behind\ - \ our approach: a) that many people can express nuanced aspects of expressive\ - \ performance using natural human expressive faculties, such as speech, voice,\ - \ and gesture, and b) that by doing so and hearing the music follow their direction\ - \ with low latency, they can enjoy playing the music that would otherwise be inaccessible\ - \ to them. The ultimate goal of this work is to enable fulfilling and accessible\ - \ music making experiences for a large number of people who are not currently\ - \ musically active.},\n address = {Mexico City, Mexico},\n articleno = {91},\n\ - \ author = {Ilya Borovik and Vladimir Viro},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n editor\ - \ = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n\ - \ numpages = {6},\n pages = {620--625},\n title = {Real-Time Co-Creation of Expressive\ - \ Music Performances Using Speech and Gestures},\n track = {Work in Progress},\n\ - \ url = {http://nime.org/proceedings/2023/nime2023_91.pdf},\n year = {2023}\n\ - }\n" +- ENTRYTYPE: inproceedings + ID: Lehrman2005 + abstract: 'Electronic Musical Instrument Design is an excellent vehiclefor bringing + students from multiple disciplines together towork on projects, and help bridge + the perennial gap betweenthe arts and the sciences. This paper describes how at + TuftsUniversity, a school with no music technology program,students from the engineering + (electrical, mechanical, andcomputer), music, performing arts, and visual arts + areas usetheir complementary skills, and teach each other, to developnew devices + and systems for music performance and control.' + address: 'Vancouver, BC, Canada' + author: 'Lehrman, Paul D. and Ryan, Todd M.' + bibtex: "@inproceedings{Lehrman2005,\n abstract = {Electronic Musical Instrument\ + \ Design is an excellent vehiclefor bringing students from multiple disciplines\ + \ together towork on projects, and help bridge the perennial gap betweenthe arts\ + \ and the sciences. This paper describes how at TuftsUniversity, a school with\ + \ no music technology program,students from the engineering (electrical, mechanical,\ + \ andcomputer), music, performing arts, and visual arts areas usetheir complementary\ + \ skills, and teach each other, to developnew devices and systems for music performance\ + \ and control.},\n address = {Vancouver, BC, Canada},\n author = {Lehrman, Paul\ + \ D. and Ryan, Todd M.},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176768},\n\ + \ issn = {2220-4806},\n keywords = {Science education, music education, engineering,\ + \ electronic music, gesture controllers, MIDI. },\n pages = {136--139},\n title\ + \ = {Bridging the Gap Between Art and Science Education Through Teaching Electronic\ + \ Musical Instrument Design},\n url = {http://www.nime.org/proceedings/2005/nime2005_136.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1176768 issn: 2220-4806 - month: May - numpages: 6 - pages: 620--625 - title: Real-Time Co-Creation of Expressive Music Performances Using Speech and Gestures - track: Work in Progress - url: http://nime.org/proceedings/2023/nime2023_91.pdf - year: 2023 + keywords: 'Science education, music education, engineering, electronic music, gesture + controllers, MIDI. ' + pages: 136--139 + title: Bridging the Gap Between Art and Science Education Through Teaching Electronic + Musical Instrument Design + url: http://www.nime.org/proceedings/2005/nime2005_136.pdf + year: 2005 -- ENTRYTYPE: article - ID: nime2023_92 - abstract: 'This paper provides an entry into a decolonial approach to AI driven - music and sound arts by describing an ongoing artistic research project Dhvāni. - The project is a series of responsive, self-regulating, and autonomous installations - driven by Artificial Intelligence and Machine Learning and incorporating ritual - and sacred sounds from South Asia. Such mélange re-emphasizes and advocates for - the values of interconnectivity, codependence, network, and community with a decolonial - approach. By giving the AI an autonomous agency, the project aims to reimagine - the future of AI with an inter-subjective reciprocity in human-machine assemblages - transcending the technologically deterministic approach to AI-driven live art, - media arts and music. Through unpacking the project, this paper underscores the - necessity to dehegemonize the AI-driven music field towards a transcultural exchange, - thereby transcend the field’s Eurocentric bias.' - address: 'Mexico City, Mexico' - articleno: 92 - author: Budhaditya Chattopadhyay - bibtex: "@article{nime2023_92,\n abstract = {This paper provides an entry into a\ - \ decolonial approach to AI driven music and sound arts by describing an ongoing\ - \ artistic research project Dhvāni. The project is a series of responsive, self-regulating,\ - \ and autonomous installations driven by Artificial Intelligence and Machine Learning\ - \ and incorporating ritual and sacred sounds from South Asia. Such mélange re-emphasizes\ - \ and advocates for the values of interconnectivity, codependence, network, and\ - \ community with a decolonial approach. By giving the AI an autonomous agency,\ - \ the project aims to reimagine the future of AI with an inter-subjective reciprocity\ - \ in human-machine assemblages transcending the technologically deterministic\ - \ approach to AI-driven live art, media arts and music. Through unpacking the\ - \ project, this paper underscores the necessity to dehegemonize the AI-driven\ - \ music field towards a transcultural exchange, thereby transcend the field’s\ - \ Eurocentric bias.},\n address = {Mexico City, Mexico},\n articleno = {92},\n\ - \ author = {Budhaditya Chattopadhyay},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz\ - \ and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages\ - \ = {3},\n pages = {626--628},\n title = {Dhvāni: Sacred Sounds and Decolonial\ - \ Machines},\n track = {Work in Progress},\n url = {http://nime.org/proceedings/2023/nime2023_92.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: Steiner2005 + abstract: 'The [hid] toolkit is a set of software objects for designingcomputer-based + gestural instruments. All too frequently,computer-based performers are tied to + the keyboard-mousemonitor model, narrowly constraining the range of possiblegestures. + A multitude of gestural input devices are readilyavailable, making it easy to + utilize a broader range of gestures. Human Interface Devices (HIDs) such as joysticks,tablets, + and gamepads are cheap and can be good musicalcontrollers. Some even provide haptic + feedback. The [hid]toolkit provides a unified, consistent framework for gettinggestural + data from these devices, controlling the feedback,and mapping this data to the + desired output. The [hid]toolkit is built in Pd, which provides an ideal platform + forthis work, combining the ability to synthesize and controlaudio and video. + The addition of easy access to gesturaldata allows for rapid prototypes. A usable + environmentalso makes computer music instrument design accessible tonovices.' + address: 'Vancouver, BC, Canada' + author: 'Steiner, Hans-christoph' + bibtex: "@inproceedings{Steiner2005,\n abstract = {The [hid] toolkit is a set of\ + \ software objects for designingcomputer-based gestural instruments. All too frequently,computer-based\ + \ performers are tied to the keyboard-mousemonitor model, narrowly constraining\ + \ the range of possiblegestures. A multitude of gestural input devices are readilyavailable,\ + \ making it easy to utilize a broader range of gestures. Human Interface Devices\ + \ (HIDs) such as joysticks,tablets, and gamepads are cheap and can be good musicalcontrollers.\ + \ Some even provide haptic feedback. The [hid]toolkit provides a unified, consistent\ + \ framework for gettinggestural data from these devices, controlling the feedback,and\ + \ mapping this data to the desired output. The [hid]toolkit is built in Pd, which\ + \ provides an ideal platform forthis work, combining the ability to synthesize\ + \ and controlaudio and video. The addition of easy access to gesturaldata allows\ + \ for rapid prototypes. A usable environmentalso makes computer music instrument\ + \ design accessible tonovices.},\n address = {Vancouver, BC, Canada},\n author\ + \ = {Steiner, Hans-christoph},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176824},\n\ + \ issn = {2220-4806},\n keywords = {Instrument design, haptic feedback, gestural\ + \ control, HID },\n pages = {140--143},\n title = {[hid] toolkit: a Unified Framework\ + \ for Instrument Design},\n url = {http://www.nime.org/proceedings/2005/nime2005_140.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1176824 issn: 2220-4806 - month: May - numpages: 3 - pages: 626--628 - title: 'Dhvāni: Sacred Sounds and Decolonial Machines' - track: Work in Progress - url: http://nime.org/proceedings/2023/nime2023_92.pdf - year: 2023 + keywords: 'Instrument design, haptic feedback, gestural control, HID ' + pages: 140--143 + title: "[hid] toolkit: a Unified Framework for Instrument Design" + url: http://www.nime.org/proceedings/2005/nime2005_140.pdf + year: 2005 -- ENTRYTYPE: article - ID: nime2023_93 - abstract: 'Spinal cord injury is one of the most serious causes of disability that - can affect people''s lives. In tetraplegia, the loss of mobility of the upper - limb has a devastating effect on the quality of life and independence of these - patients, so their rehabilitation is considered a crucial objective. We present - a tool for functional motor rehabilitation of the upper limb in patients with - spinal cord injury, based on the use of bio-sensors and the sonification of EMG - activity during the repetitive execution of a specific gesture. During the hospital - stay, the patient has a wide range of therapies available to improve motor function - or compensate for loss of mobility, including execution of different maneuvers. - The repetitive and continuous performance of these tasks is a key element in motor - recovery. However, in many cases, these tasks do not include sufficient feedback - mechanisms to help the patient or to motivate him/her during execution. Through - the sonification of movement and the design of adapted interaction strategies, - our research aims to offer a new therapeutic tool that musically transforms the - gesture and expands the patient''s mechanisms of expression, proprioception and - cognition, in order to optimize, correct and motivate movement.' - address: 'Mexico City, Mexico' - articleno: 93 - author: Jose M Corredera - bibtex: "@article{nime2023_93,\n abstract = {Spinal cord injury is one of the most\ - \ serious causes of disability that can affect people's lives. In tetraplegia,\ - \ the loss of mobility of the upper limb has a devastating effect on the quality\ - \ of life and independence of these patients, so their rehabilitation is considered\ - \ a crucial objective. We present a tool for functional motor rehabilitation of\ - \ the upper limb in patients with spinal cord injury, based on the use of bio-sensors\ - \ and the sonification of EMG activity during the repetitive execution of a specific\ - \ gesture. During the hospital stay, the patient has a wide range of therapies\ - \ available to improve motor function or compensate for loss of mobility, including\ - \ execution of different maneuvers. The repetitive and continuous performance\ - \ of these tasks is a key element in motor recovery. However, in many cases, these\ - \ tasks do not include sufficient feedback mechanisms to help the patient or to\ - \ motivate him/her during execution. Through the sonification of movement and\ - \ the design of adapted interaction strategies, our research aims to offer a new\ - \ therapeutic tool that musically transforms the gesture and expands the patient's\ - \ mechanisms of expression, proprioception and cognition, in order to optimize,\ - \ correct and motivate movement.},\n address = {Mexico City, Mexico},\n articleno\ - \ = {93},\n author = {Jose M Corredera},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz\ - \ and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages\ - \ = {4},\n pages = {629--632},\n title = {EMG Sonification as a Tool for Functional\ - \ Rehabilitation of Spinal-Cord Injury.},\n track = {Work in Progress},\n url\ - \ = {http://nime.org/proceedings/2023/nime2023_93.pdf},\n year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: Makipatola2005b + abstract: 'An experimental study comparing different user interfaces for a virtual + drum is reported. Virtual here means that the drum is not a physical object. 16 + subjects played the drum on five different interfaces and two metronome patterns + trying to match their hits to the metronome clicks. Temporal accuracy of the playing + was evaluated. The subjects also rated the interfaces subjectively. The results + show that hitting the drum alternately from both sides with motion going through + the drum plate was less accurate than the traditional one sided hitting. A physical + stick was more accurate than a virtual computer graphic stick. Visual feedback + of the drum slightly increased accuracy compared to receiving only auditory feedback. + Most subjects evaluated the physical stick to offer a better feeling and to be + more pleasant than the virtual stick. ' + address: 'Vancouver, BC, Canada' + author: 'Maki-patola, Teemu' + bibtex: "@inproceedings{Makipatola2005b,\n abstract = {An experimental study comparing\ + \ different user interfaces for a virtual drum is reported. Virtual here means\ + \ that the drum is not a physical object. 16 subjects played the drum on five\ + \ different interfaces and two metronome patterns trying to match their hits to\ + \ the metronome clicks. Temporal accuracy of the playing was evaluated. The subjects\ + \ also rated the interfaces subjectively. The results show that hitting the drum\ + \ alternately from both sides with motion going through the drum plate was less\ + \ accurate than the traditional one sided hitting. A physical stick was more accurate\ + \ than a virtual computer graphic stick. Visual feedback of the drum slightly\ + \ increased accuracy compared to receiving only auditory feedback. Most subjects\ + \ evaluated the physical stick to offer a better feeling and to be more pleasant\ + \ than the virtual stick. },\n address = {Vancouver, BC, Canada},\n author = {Maki-patola,\ + \ Teemu},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1176784},\n issn = {2220-4806},\n\ + \ keywords = {Virtual drum, user interface, feedback, musical instrument design,\ + \ virtual reality, sound control, percussion instrument. },\n pages = {144--147},\n\ + \ title = {User Interface Comparison for Virtual Drums},\n url = {http://www.nime.org/proceedings/2005/nime2005_144.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1176784 issn: 2220-4806 - month: May - numpages: 4 - pages: 629--632 - title: EMG Sonification as a Tool for Functional Rehabilitation of Spinal-Cord Injury. - track: Work in Progress - url: http://nime.org/proceedings/2023/nime2023_93.pdf - year: 2023 + keywords: 'Virtual drum, user interface, feedback, musical instrument design, virtual + reality, sound control, percussion instrument. ' + pages: 144--147 + title: User Interface Comparison for Virtual Drums + url: http://www.nime.org/proceedings/2005/nime2005_144.pdf + year: 2005 -- ENTRYTYPE: article - ID: nime2023_94 - abstract: 'This paper describes Improvise+=Chain, an audio-visual installation artwork - of autonomous musical performance using artificial intelligence technology. The - work is designed to provide the audience with an experience exploring the differences - between human and AI-based virtual musicians. Using a transformer decoder, we - developed a four-track (melody, bass, chords and accompaniment, and drums) symbolic - music generation model. The model generates each track in real time to create - an endless chain of phrases, and 3D visuals and LED lights represent the attention - information between four tracks, i.e., four virtual musicians, calculated within - the model. This work aims to highlight the differences for viewers to consider - between humans and artificial intelligence in music jams by visualizing the only - information virtual musicians can communicate with while humans interact in multiple - modals during the performance.' - address: 'Mexico City, Mexico' - articleno: 94 - author: Atsuya Kobayashi and Ryo Nishikado and Nao Tokui - bibtex: "@article{nime2023_94,\n abstract = {This paper describes Improvise+=Chain,\ - \ an audio-visual installation artwork of autonomous musical performance using\ - \ artificial intelligence technology. The work is designed to provide the audience\ - \ with an experience exploring the differences between human and AI-based virtual\ - \ musicians. Using a transformer decoder, we developed a four-track (melody, bass,\ - \ chords and accompaniment, and drums) symbolic music generation model. The model\ - \ generates each track in real time to create an endless chain of phrases, and\ - \ 3D visuals and LED lights represent the attention information between four tracks,\ - \ i.e., four virtual musicians, calculated within the model. This work aims to\ - \ highlight the differences for viewers to consider between humans and artificial\ - \ intelligence in music jams by visualizing the only information virtual musicians\ - \ can communicate with while humans interact in multiple modals during the performance.},\n\ - \ address = {Mexico City, Mexico},\n articleno = {94},\n author = {Atsuya Kobayashi\ - \ and Ryo Nishikado and Nao Tokui},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz\ - \ and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages\ - \ = {4},\n pages = {633--636},\n title = {Improvise+=Chain: Listening to the Ensemble\ - \ Improvisation of an Autoregressive Generative Model},\n track = {Demos},\n url\ - \ = {http://nime.org/proceedings/2023/nime2023_94.pdf},\n year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: Gutknecht2005 + abstract: 'This paper takes the reader through various elements of the GoingPublik + sound artwork for distributive ensemble and introduces the Realtime Score Synthesis + tool (RSS) used as a controller in the work. The collaboration between artists + and scientists, details concerning the experimental hardware and software, and + new theories of sound art are briefly explained and illustrated. The scope of + this project is too broad to be fully covered in this paper, therefore the selection + of topics made attempts to draw attention to the work itself and balance theory + with practice. ' + address: 'Vancouver, BC, Canada' + author: 'Gutknecht, Jürg and Clay, Art and Frey, Thomas' + bibtex: "@inproceedings{Gutknecht2005,\n abstract = {This paper takes the reader\ + \ through various elements of the GoingPublik sound artwork for distributive ensemble\ + \ and introduces the Realtime Score Synthesis tool (RSS) used as a controller\ + \ in the work. The collaboration between artists and scientists, details concerning\ + \ the experimental hardware and software, and new theories of sound art are briefly\ + \ explained and illustrated. The scope of this project is too broad to be fully\ + \ covered in this paper, therefore the selection of topics made attempts to draw\ + \ attention to the work itself and balance theory with practice. },\n address\ + \ = {Vancouver, BC, Canada},\n author = {Gutknecht, J{\\''u}rg and Clay, Art and\ + \ Frey, Thomas},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176754},\n\ + \ issn = {2220-4806},\n keywords = {Mobile Multimedia, Wearable Computers, Score\ + \ Synthesis, Sound Art, System Research, HCIs },\n pages = {148--151},\n title\ + \ = {GoingPublik: Using Realtime Global Score Synthesis},\n url = {http://www.nime.org/proceedings/2005/nime2005_148.pdf},\n\ + \ year = {2005}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1176754 + issn: 2220-4806 + keywords: 'Mobile Multimedia, Wearable Computers, Score Synthesis, Sound Art, System + Research, HCIs ' + pages: 148--151 + title: 'GoingPublik: Using Realtime Global Score Synthesis' + url: http://www.nime.org/proceedings/2005/nime2005_148.pdf + year: 2005 + + +- ENTRYTYPE: inproceedings + ID: Pellarin2005 + abstract: 'In this paper we describe a virtual instrument or a performance space, + placed at Høje Tåstrup train station in Denmark, which is meant to establish communicative + connections between strangers, by letting users of the system create soundscapes + together across the rails. We discuss mapping strategies and complexity and suggest + a possible solution for a final instance of our interactive musical performance + system.' + address: 'Vancouver, BC, Canada' + author: 'Pellarin, Lars and Böttcher, Niels and Olsen, Jakob M. and Gregersen, Ole + and Serafin, Stefania and Guglielmi, Michel' + bibtex: "@inproceedings{Pellarin2005,\n abstract = {In this paper we describe a\ + \ virtual instrument or a performance space, placed at H{\\o}je T{\\aa}strup train\ + \ station in Denmark, which is meant to establish communicative connections between\ + \ strangers, by letting users of the system create soundscapes together across\ + \ the rails. We discuss mapping strategies and complexity and suggest a possible\ + \ solution for a final instance of our interactive musical performance system.},\n\ + \ address = {Vancouver, BC, Canada},\n author = {Pellarin, Lars and B\\\"{o}ttcher,\ + \ Niels and Olsen, Jakob M. and Gregersen, Ole and Serafin, Stefania and Guglielmi,\ + \ Michel},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1176798},\n issn = {2220-4806},\n\ + \ keywords = {Motion tracking, mapping strategies, public installation, multiple\ + \ participants music interfaces. },\n pages = {152--155},\n title = {Connecting\ + \ Strangers at a Train Station},\n url = {http://www.nime.org/proceedings/2005/nime2005_152.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1176798 issn: 2220-4806 - month: May - numpages: 4 - pages: 633--636 - title: 'Improvise+=Chain: Listening to the Ensemble Improvisation of an Autoregressive - Generative Model' - track: Demos - url: http://nime.org/proceedings/2023/nime2023_94.pdf - year: 2023 + keywords: 'Motion tracking, mapping strategies, public installation, multiple participants + music interfaces. ' + pages: 152--155 + title: Connecting Strangers at a Train Station + url: http://www.nime.org/proceedings/2005/nime2005_152.pdf + year: 2005 -- ENTRYTYPE: article - ID: nime2023_95 - abstract: 'SketchSynth is an interface that allows users to create mappings between - synthesised sound and a graphical sketch input based on human cross-modal perception. - The project is rooted in the authors'' research which collected 2692 sound-sketches - from 178 participants representing their associations with various sounds. The - interface extracts sketch features in real-time that were shown to correlate with - sound characteristics and can be mapped to synthesis and audio effect parameters - via Open Sound Control (OSC). This modular approach allows for an easy integration - into an existing workflow and can be tailored to individual preferences. The interface - can be accessed online through a web-browser on a computer, laptop, smartphone - or tablet and does not require specialised hard- or software. We demonstrate SketchSynth - with an iPad for sketch input to control synthesis and audio effect parameters - in the Ableton Live digital audio workstation (DAW). A MIDI controller is used - to play notes and trigger pre-recorded accompaniment. This work serves as an example - of how perceptual research can help create strong, meaningful gesture-to-sound - mappings.' - address: 'Mexico City, Mexico' - articleno: 95 - author: Sebastian Lobbers and George Fazekas - bibtex: "@article{nime2023_95,\n abstract = {SketchSynth is an interface that allows\ - \ users to create mappings between synthesised sound and a graphical sketch input\ - \ based on human cross-modal perception. The project is rooted in the authors'\ - \ research which collected 2692 sound-sketches from 178 participants representing\ - \ their associations with various sounds. The interface extracts sketch features\ - \ in real-time that were shown to correlate with sound characteristics and can\ - \ be mapped to synthesis and audio effect parameters via Open Sound Control (OSC).\ - \ This modular approach allows for an easy integration into an existing workflow\ - \ and can be tailored to individual preferences. The interface can be accessed\ - \ online through a web-browser on a computer, laptop, smartphone or tablet and\ - \ does not require specialised hard- or software. We demonstrate SketchSynth with\ - \ an iPad for sketch input to control synthesis and audio effect parameters in\ - \ the Ableton Live digital audio workstation (DAW). A MIDI controller is used\ - \ to play notes and trigger pre-recorded accompaniment. This work serves as an\ - \ example of how perceptual research can help create strong, meaningful gesture-to-sound\ - \ mappings.},\n address = {Mexico City, Mexico},\n articleno = {95},\n author\ - \ = {Sebastian Lobbers and George Fazekas},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n editor\ - \ = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n\ - \ numpages = {5},\n pages = {637--641},\n title = {SketchSynth: a browser-based\ - \ sketching interface for sound control},\n track = {Demos},\n url = {http://nime.org/proceedings/2023/nime2023_95.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: Schiemer2005 + abstract: 'This paper describes software tools used to create java applications + for performing music using mobile phones. The tools provide a means for composers + working in the Pure Data composition environment to design and audition performances + using ensembles of mobile phones. These tools were developed as part of a larger + project motivated by the desire to allow large groups of non-expert players to + perform music based on just intonation using ubiquitous technology. The paper + discusses the process that replicates a Pure Data patch so that it will operate + within the hardware and software constraints of the Java 2 Micro Edition. It also + describes development of objects that will enable mobile phone performances to + be simulated accurately in PD and to audition microtonal tuning implemented using + MIDI in the j2me environment. These tools eliminate the need for composers to + compose for mobile phones by writing java code. In a single desktop application, + they offer the composer the flexibility to write music for multiple phones. ' + address: 'Vancouver, BC, Canada' + author: 'Schiemer, Greg and Havryliv, Mark' + bibtex: "@inproceedings{Schiemer2005,\n abstract = {This paper describes software\ + \ tools used to create java applications for performing music using mobile phones.\ + \ The tools provide a means for composers working in the Pure Data composition\ + \ environment to design and audition performances using ensembles of mobile phones.\ + \ These tools were developed as part of a larger project motivated by the desire\ + \ to allow large groups of non-expert players to perform music based on just intonation\ + \ using ubiquitous technology. The paper discusses the process that replicates\ + \ a Pure Data patch so that it will operate within the hardware and software constraints\ + \ of the Java 2 Micro Edition. It also describes development of objects that will\ + \ enable mobile phone performances to be simulated accurately in PD and to audition\ + \ microtonal tuning implemented using MIDI in the j2me environment. These tools\ + \ eliminate the need for composers to compose for mobile phones by writing java\ + \ code. In a single desktop application, they offer the composer the flexibility\ + \ to write music for multiple phones. },\n address = {Vancouver, BC, Canada},\n\ + \ author = {Schiemer, Greg and Havryliv, Mark},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1176812},\n issn = {2220-4806},\n keywords = {Java 2 Micro\ + \ Edition; j2me; Pure Data; PD; Real-Time Media Performance; Just Intonation.\ + \ },\n pages = {156--159},\n title = {Pocket Gamelan: a Pure Data interface for\ + \ mobile phones},\n url = {http://www.nime.org/proceedings/2005/nime2005_156.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1176812 issn: 2220-4806 - month: May - numpages: 5 - pages: 637--641 - title: 'SketchSynth: a browser-based sketching interface for sound control' - track: Demos - url: http://nime.org/proceedings/2023/nime2023_95.pdf - year: 2023 + keywords: 'Java 2 Micro Edition; j2me; Pure Data; PD; Real-Time Media Performance; + Just Intonation. ' + pages: 156--159 + title: 'Pocket Gamelan: a Pure Data interface for mobile phones' + url: http://www.nime.org/proceedings/2005/nime2005_156.pdf + year: 2005 -- ENTRYTYPE: article - ID: nime2023_96 - abstract: 'This paper describes the Harvester, a DIY sampler and synthesizer. The - Harvester provides users with a low-cost, accessible platform for making music - with everyday sounds via open-source hardware and software tools that anyone can - use or modify. This paper goes over the motivation, methodology, features, and - use cases of the Harvester instrument, with the intention of the instrument being - demonstrated for people to play with and use at NIME 2023.' - address: 'Mexico City, Mexico' - articleno: 96 - author: Johann Diedrick - bibtex: "@article{nime2023_96,\n abstract = {This paper describes the Harvester,\ - \ a DIY sampler and synthesizer. The Harvester provides users with a low-cost,\ - \ accessible platform for making music with everyday sounds via open-source hardware\ - \ and software tools that anyone can use or modify. This paper goes over the motivation,\ - \ methodology, features, and use cases of the Harvester instrument, with the intention\ - \ of the instrument being demonstrated for people to play with and use at NIME\ - \ 2023.},\n address = {Mexico City, Mexico},\n articleno = {96},\n author = {Johann\ - \ Diedrick},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n\ - \ issn = {2220-4806},\n month = {May},\n numpages = {2},\n pages = {642--643},\n\ - \ title = {The Harvester: A DIY Sampler and Synthesizer - Demo},\n track = {Demos},\n\ - \ url = {http://nime.org/proceedings/2023/nime2023_96.pdf},\n year = {2023}\n\ +- ENTRYTYPE: inproceedings + ID: Birchfield2005 + abstract: 'This paper details the motivations, design, and realization of Sustainable, + a dynamic, robotic sound installation that employs a generative algorithm for + music and sound creation. The piece is comprised of seven autonomous water gong + nodes that are networked together by water tubes to distribute water throughout + the system. A water resource allocation algorithm guides this distribution process + and produces an ever-evolving sonic and visual texture. A simple set of behaviors + govern the individual gongs, and the system as a whole exhibits emergent properties + that yield local and large scale forms in sound and light. ' + address: 'Vancouver, BC, Canada' + author: 'Birchfield, David and Lorig, David and Phillips, Kelly' + bibtex: "@inproceedings{Birchfield2005,\n abstract = {This paper details the motivations,\ + \ design, and realization of Sustainable, a dynamic, robotic sound installation\ + \ that employs a generative algorithm for music and sound creation. The piece\ + \ is comprised of seven autonomous water gong nodes that are networked together\ + \ by water tubes to distribute water throughout the system. A water resource allocation\ + \ algorithm guides this distribution process and produces an ever-evolving sonic\ + \ and visual texture. A simple set of behaviors govern the individual gongs, and\ + \ the system as a whole exhibits emergent properties that yield local and large\ + \ scale forms in sound and light. },\n address = {Vancouver, BC, Canada},\n author\ + \ = {Birchfield, David and Lorig, David and Phillips, Kelly},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176705},\n issn = {2220-4806},\n keywords = {computing,dynamic\ + \ systems,evolutionary,generative arts,installation art,music,robotics,sculpture,sound},\n\ + \ pages = {160--163},\n title = {Sustainable: a dynamic, robotic, sound installation},\n\ + \ url = {http://www.nime.org/proceedings/2005/nime2005_160.pdf},\n year = {2005}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1176705 issn: 2220-4806 - month: May - numpages: 2 - pages: 642--643 - title: 'The Harvester: A DIY Sampler and Synthesizer - Demo' - track: Demos - url: http://nime.org/proceedings/2023/nime2023_96.pdf - year: 2023 + keywords: 'computing,dynamic systems,evolutionary,generative arts,installation art,music,robotics,sculpture,sound' + pages: 160--163 + title: 'Sustainable: a dynamic, robotic, sound installation' + url: http://www.nime.org/proceedings/2005/nime2005_160.pdf + year: 2005 -- ENTRYTYPE: article - ID: nime2023_97 - abstract: 'The Kraakavera (a portmanteau of Kraakdoos—aka the Crackle box, and “calavera”—i.e., - skull in Spanish) is an instrument that honours Michel Waisvisz’s memory by tributing - one of his classic instruments—an exemplary of circuit bending that originated - from STEIM in the 1960s. Inspired by the original design which used six metal - contacts as inputs, I have used conductive paint to paint six pads on a ceramic - skull which interact with the Kraakdoos circuit (using a uA709 IC). The skull - depicts a sugar skull which is a traditional Mexican sweet that is often seen - in altars to honour diseased relatives and loved ones during the Day of the Dead, - but that is also consumed as a treat by children during these festivities. In - this case, I have constructed an altar for Waisvisz, which doubles as an instrument, - where the sugar skull—the centrepiece of the altar (below a picture of Waisvisz) - serves both as traditional decoration but also the main point of contact with - the instrument. Hence, the altar invites the musician to pay their respects by - playing the instrument through the sugar skull. The Kraakavera also features a - second mode which can be accessed by patching the skull’s inputs to another circuit - which features a Trill Craft capacitive sensing board and a Bela board, which - processes a secondary sound output consisting of a sample of a ceramic whistle - running through a granular synthesizer patched in Pure Data (corresponding to - the six pads on the skull). Lastly, the Kraakavera presents a syncretism of Mexican - folklore and circuit bending traditions and a juxtaposition of classic and upcoming - DMIs.' - address: 'Mexico City, Mexico' - articleno: 97 - author: Juan P Martinez Avila - bibtex: "@article{nime2023_97,\n abstract = {The Kraakavera (a portmanteau of Kraakdoos—aka\ - \ the Crackle box, and “calavera”—i.e., skull in Spanish) is an instrument that\ - \ honours Michel Waisvisz’s memory by tributing one of his classic instruments—an\ - \ exemplary of circuit bending that originated from STEIM in the 1960s. Inspired\ - \ by the original design which used six metal contacts as inputs, I have used\ - \ conductive paint to paint six pads on a ceramic skull which interact with the\ - \ Kraakdoos circuit (using a uA709 IC). The skull depicts a sugar skull which\ - \ is a traditional Mexican sweet that is often seen in altars to honour diseased\ - \ relatives and loved ones during the Day of the Dead, but that is also consumed\ - \ as a treat by children during these festivities. In this case, I have constructed\ - \ an altar for Waisvisz, which doubles as an instrument, where the sugar skull—the\ - \ centrepiece of the altar (below a picture of Waisvisz) serves both as traditional\ - \ decoration but also the main point of contact with the instrument. Hence, the\ - \ altar invites the musician to pay their respects by playing the instrument through\ - \ the sugar skull. The Kraakavera also features a second mode which can be accessed\ - \ by patching the skull’s inputs to another circuit which features a Trill Craft\ - \ capacitive sensing board and a Bela board, which processes a secondary sound\ - \ output consisting of a sample of a ceramic whistle running through a granular\ - \ synthesizer patched in Pure Data (corresponding to the six pads on the skull).\ - \ Lastly, the Kraakavera presents a syncretism of Mexican folklore and circuit\ - \ bending traditions and a juxtaposition of classic and upcoming DMIs.},\n address\ - \ = {Mexico City, Mexico},\n articleno = {97},\n author = {Juan P Martinez Avila},\n\ +- ENTRYTYPE: inproceedings + ID: Rodrigues2005 + abstract: 'We present our work in the development of an interface for an actor/singer + and its use in performing. Our work combines aspects of theatrical music with + technology. Our interface has allowed the development of a new vocabulary for + musical and theatrical expression and the possibility for merging classical and + experimental music. It gave rise to a strong, strange, unpredictable, yet coherent, + "character" and opens up the possibility for a full performance that will explore + aspects of voice, theatrical music and, in the future, image projection. ' + address: 'Vancouver, BC, Canada' + author: 'Rodrigues, Paulo Maria and Girão, Luis Miguel and Gehlhaar, Rolf' + bibtex: "@inproceedings{Rodrigues2005,\n abstract = {We present our work in the\ + \ development of an interface for an actor/singer and its use in performing. Our\ + \ work combines aspects of theatrical music with technology. Our interface has\ + \ allowed the development of a new vocabulary for musical and theatrical expression\ + \ and the possibility for merging classical and experimental music. It gave rise\ + \ to a strong, strange, unpredictable, yet coherent, \"character\" and opens up\ + \ the possibility for a full performance that will explore aspects of voice, theatrical\ + \ music and, in the future, image projection. },\n address = {Vancouver, BC, Canada},\n\ + \ author = {Rodrigues, Paulo Maria and Gir\\~{a}o, Luis Miguel and Gehlhaar, Rolf},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn\ - \ = {2220-4806},\n month = {May},\n numpages = {2},\n pages = {644--645},\n title\ - \ = {Kraakavera: A Tribute to Michel Waisvisz},\n track = {Demos},\n url = {http://nime.org/proceedings/2023/nime2023_97.pdf},\n\ - \ year = {2023}\n}\n" + \ Musical Expression},\n doi = {10.5281/zenodo.1176808},\n issn = {2220-4806},\n\ + \ keywords = {Theatrical music, computer interaction, voice, gestural control.\ + \ },\n pages = {164--167},\n title = {CyberSong},\n url = {http://www.nime.org/proceedings/2005/nime2005_164.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1176808 issn: 2220-4806 - month: May - numpages: 2 - pages: 644--645 - title: 'Kraakavera: A Tribute to Michel Waisvisz' - track: Demos - url: http://nime.org/proceedings/2023/nime2023_97.pdf - year: 2023 + keywords: 'Theatrical music, computer interaction, voice, gestural control. ' + pages: 164--167 + title: CyberSong + url: http://www.nime.org/proceedings/2005/nime2005_164.pdf + year: 2005 -- ENTRYTYPE: article - ID: nime2023_98 - abstract: 'MaxMSP is a visual programming language for creating interactive audiovisual - media that has found great success as a flexible and accessible option for computer - music. However, the visual interface requires manual object placement and connection, - which can be inefficient. Automated patch editing is possible either by visual - programming with the [thispatcher] object or text-based programming with the [js] - object. However, these objects cannot automatically create and save new patches, - and they operate at run-time only, requiring live input to trigger patch construction. There - is no solution for automated creation of multiple patches at \textitcompile-time, - such that the constructed patches do not contain their own constructors. To this - end, we present MaxPy, an open-source Python package for programmatic construction - and manipulation of MaxMSP patches. MaxPy replaces the manual actions of placing - objects, connecting patchcords, and saving patch files with text-based Python - functions, thus enabling dynamic, procedural, high-volume patch generation at - compile-time. MaxPy also includes the ability to import existing patches, allowing - users to move freely between text-based Python programming and visual programming - with the Max GUI. MaxPy enables composers, programmers, and creators to explore - expanded possibilities for complex, dynamic, and algorithmic patch construction - through text-based Python programming of MaxMSP.' - address: 'Mexico City, Mexico' - articleno: 98 - author: Ranger Y Liu and Satchel Peterson and Richard T Lee and Mark Santolucito - bibtex: "@article{nime2023_98,\n abstract = {MaxMSP is a visual programming language\ - \ for creating interactive audiovisual media that has found great success as a\ - \ flexible and accessible option for computer music. However, the visual interface\ - \ requires manual object placement and connection, which can be inefficient. Automated\ - \ patch editing is possible either by visual programming with the [thispatcher]\ - \ object or text-based programming with the [js] object. However, these objects\ - \ cannot automatically create and save new patches, and they operate at run-time\ - \ only, requiring live input to trigger patch construction. There is no solution\ - \ for automated creation of multiple patches at \\textit{compile-time}, such that\ - \ the constructed patches do not contain their own constructors. To this end,\ - \ we present MaxPy, an open-source Python package for programmatic construction\ - \ and manipulation of MaxMSP patches. MaxPy replaces the manual actions of placing\ - \ objects, connecting patchcords, and saving patch files with text-based Python\ - \ functions, thus enabling dynamic, procedural, high-volume patch generation at\ - \ compile-time. MaxPy also includes the ability to import existing patches, allowing\ - \ users to move freely between text-based Python programming and visual programming\ - \ with the Max GUI. MaxPy enables composers, programmers, and creators to explore\ - \ expanded possibilities for complex, dynamic, and algorithmic patch construction\ - \ through text-based Python programming of MaxMSP.},\n address = {Mexico City,\ - \ Mexico},\n articleno = {98},\n author = {Ranger Y Liu and Satchel Peterson and\ - \ Richard T Lee and Mark Santolucito},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz\ - \ and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages\ - \ = {4},\n pages = {646--649},\n title = {MaxPy: An open-source Python package\ - \ for text-based generation of MaxMSP patches},\n track = {Demos},\n url = {http://nime.org/proceedings/2023/nime2023_98.pdf},\n\ - \ year = {2023}\n}\n" +- ENTRYTYPE: inproceedings + ID: Allen2005 + abstract: 'This paper describes the development, function andperformance contexts + of a digital musical instrument called "boomBox". The instrument is a wireless, + orientation-awarelow-frequency, high-amplitude human motion controller forlive + and sampled sound. The instrument has been used inperformance and sound installation + contexts. I describe someof what I have learned from the project herein.' + address: 'Vancouver, BC, Canada' + author: 'Allen, Jamie' + bibtex: "@inproceedings{Allen2005,\n abstract = {This paper describes the development,\ + \ function andperformance contexts of a digital musical instrument called \"boomBox\"\ + . The instrument is a wireless, orientation-awarelow-frequency, high-amplitude\ + \ human motion controller forlive and sampled sound. The instrument has been used\ + \ inperformance and sound installation contexts. I describe someof what I have\ + \ learned from the project herein.},\n address = {Vancouver, BC, Canada},\n author\ + \ = {Allen, Jamie},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176691},\n\ + \ issn = {2220-4806},\n keywords = {Visceral control, sample manipulation, Bluetooth®,\ + \ metaphor, remutualizing instrument, Human Computer Interaction.},\n pages =\ + \ {168--171},\n title = {boomBox},\n url = {http://www.nime.org/proceedings/2005/nime2005_168.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1176691 issn: 2220-4806 - month: May - numpages: 4 - pages: 646--649 - title: 'MaxPy: An open-source Python package for text-based generation of MaxMSP - patches' - track: Demos - url: http://nime.org/proceedings/2023/nime2023_98.pdf - year: 2023 + keywords: 'Visceral control, sample manipulation, Bluetooth®, metaphor, remutualizing + instrument, Human Computer Interaction.' + pages: 168--171 + title: boomBox + url: http://www.nime.org/proceedings/2005/nime2005_168.pdf + year: 2005 -- ENTRYTYPE: article - ID: nime2023_99 - abstract: 'In this demo we present an interactive object called Barahúnda Boba, - that was developed through the exploration of Quito’s identity (a city placed - in the mountains in Latin América). The product is an audio container system that - reproduces environmental sounds of the city to preserve the memory of Quito. It - was built after studying the Baroque’s concepts, as a period, extrapolated to - the Baroque’s culture. The program that plays and stores the audio is written - originally in JavaScript under the p5.js’s library. The object is a decorative - product, handcrafted in pine wood. The components are assembled in an Arduino - controller and they are embedded in the product. Although the object has a user - interface, the product (just like the noise of the city) can not be fully controlled.' - address: 'Mexico City, Mexico' - articleno: 99 - author: Xavier Barriga-Abril and Andres Basantes - bibtex: "@article{nime2023_99,\n abstract = {In this demo we present an interactive\ - \ object called Barahúnda Boba, that was developed through the exploration of\ - \ Quito’s identity (a city placed in the mountains in Latin América). The product\ - \ is an audio container system that reproduces environmental sounds of the city\ - \ to preserve the memory of Quito. It was built after studying the Baroque’s concepts,\ - \ as a period, extrapolated to the Baroque’s culture. The program that plays and\ - \ stores the audio is written originally in JavaScript under the p5.js’s library.\ - \ The object is a decorative product, handcrafted in pine wood. The components\ - \ are assembled in an Arduino controller and they are embedded in the product.\ - \ Although the object has a user interface, the product (just like the noise of\ - \ the city) can not be fully controlled.},\n address = {Mexico City, Mexico},\n\ - \ articleno = {99},\n author = {Xavier Barriga-Abril and Andres Basantes},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n\ - \ month = {May},\n numpages = {3},\n pages = {650--652},\n title = {Missing the\ - \ hubbub: Memory and Identity in the Interactive Audio},\n track = {Demos},\n\ - \ url = {http://nime.org/proceedings/2023/nime2023_99.pdf},\n year = {2023}\n\ +- ENTRYTYPE: inproceedings + ID: Loscos2005 + abstract: 'Using a wah-wah pedal guitar is something guitar players have to learn. + Recently, more intuitive ways to control such effect have been proposed. In this + direction, the Wahwactor system controls a wah-wah transformation in real-time + using the guitar player’s voice, more precisely, using the performer [wa-wa] utterances. + To come up with this system, different vocal features derived from spectral analysis + have been studied as candidates for being used as control parameters. This paper + details the results of the study and presents the implementation of the whole + system.' + address: 'Vancouver, BC, Canada' + author: 'Loscos, Alex and Aussenac, Thomas' + bibtex: "@inproceedings{Loscos2005,\n abstract = {Using a wah-wah pedal guitar is\ + \ something guitar players have to learn. Recently, more intuitive ways to control\ + \ such effect have been proposed. In this direction, the Wahwactor system controls\ + \ a wah-wah transformation in real-time using the guitar player’s voice, more\ + \ precisely, using the performer [wa-wa] utterances. To come up with this system,\ + \ different vocal features derived from spectral analysis have been studied as\ + \ candidates for being used as control parameters. This paper details the results\ + \ of the study and presents the implementation of the whole system.},\n address\ + \ = {Vancouver, BC, Canada},\n author = {Loscos, Alex and Aussenac, Thomas},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1176776},\n issn = {2220-4806},\n\ + \ pages = {172--175},\n title = {The wahwactor: a voice controlled wah-wah pedal},\n\ + \ url = {http://www.nime.org/proceedings/2005/nime2005_172.pdf},\n year = {2005}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - editor: Miguel Ortiz and Adnan Marquez-Borbon + doi: 10.5281/zenodo.1176776 issn: 2220-4806 - month: May - numpages: 3 - pages: 650--652 - title: 'Missing the hubbub: Memory and Identity in the Interactive Audio' - track: Demos - url: http://nime.org/proceedings/2023/nime2023_99.pdf - year: 2023 + pages: 172--175 + title: 'The wahwactor: a voice controlled wah-wah pedal' + url: http://www.nime.org/proceedings/2005/nime2005_172.pdf + year: 2005 - ENTRYTYPE: inproceedings - ID: rrooyen2017 - abstract: 'Percussion robots have successfully used a variety of actuator technologies - to activate a wide array of striking mechanisms. Popular types of actuators include - solenoids and DC motors. However, the use of industrial strength voice coil actuators - provides a compelling alternative given a desirable set of heterogeneous features - and requirements that span traditional devices. Their characteristics such as - high acceleration and accurate positioning enable the exploration of rendering - highly accurate and expressive percussion performances.' - address: 'Copenhagen, Denmark' - author: Robert Van Rooyen and Andrew Schloss and George Tzanetakis - bibtex: "@inproceedings{rrooyen2017,\n abstract = {Percussion robots have successfully\ - \ used a variety of actuator technologies to activate a wide array of striking\ - \ mechanisms. Popular types of actuators include solenoids and DC motors. However,\ - \ the use of industrial strength voice coil actuators provides a compelling alternative\ - \ given a desirable set of heterogeneous features and requirements that span traditional\ - \ devices. Their characteristics such as high acceleration and accurate positioning\ - \ enable the exploration of rendering highly accurate and expressive percussion\ - \ performances.},\n address = {Copenhagen, Denmark},\n author = {Robert Van Rooyen\ - \ and Andrew Schloss and George Tzanetakis},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1176149},\n issn = {2220-4806},\n pages = {1--6},\n publisher\ - \ = {Aalborg University Copenhagen},\n title = {Voice Coil Actuators for Percussion\ - \ Robotics},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0001.pdf},\n\ - \ year = {2017}\n}\n" + ID: Carter2005 + abstract: 'In this paper, we describe a course of research investigating thepotential + for new types of music made possible by locationtracking and wireless technologies. + Listeners walk arounddowntown Culver City, California and explore a new type ofmusical + album by mixing together songs and stories based ontheir movement. By using mobile + devices as an interface, wecan create new types of musical experiences that allowlisteners + to take a more interactive approach to an album.' + address: 'Vancouver, BC, Canada' + author: 'Carter, William and Liu, Leslie S.' + bibtex: "@inproceedings{Carter2005,\n abstract = {In this paper, we describe a course\ + \ of research investigating thepotential for new types of music made possible\ + \ by locationtracking and wireless technologies. Listeners walk arounddowntown\ + \ Culver City, California and explore a new type ofmusical album by mixing together\ + \ songs and stories based ontheir movement. By using mobile devices as an interface,\ + \ wecan create new types of musical experiences that allowlisteners to take a\ + \ more interactive approach to an album.},\n address = {Vancouver, BC, Canada},\n\ + \ author = {Carter, William and Liu, Leslie S.},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1176723},\n issn = {2220-4806},\n keywords = {Mobile Music,\ + \ Digital Soundscape, Location-Based Entertainment, Mobility, Interactive Music,\ + \ Augmented Reality },\n pages = {176--179},\n title = {Location33: A Mobile Musical},\n\ + \ url = {http://www.nime.org/proceedings/2005/nime2005_176.pdf},\n year = {2005}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176149 + doi: 10.5281/zenodo.1176723 issn: 2220-4806 - pages: 1--6 - publisher: Aalborg University Copenhagen - title: Voice Coil Actuators for Percussion Robotics - url: http://www.nime.org/proceedings/2017/nime2017_paper0001.pdf - year: 2017 + keywords: 'Mobile Music, Digital Soundscape, Location-Based Entertainment, Mobility, + Interactive Music, Augmented Reality ' + pages: 176--179 + title: 'Location33: A Mobile Musical' + url: http://www.nime.org/proceedings/2005/nime2005_176.pdf + year: 2005 - ENTRYTYPE: inproceedings - ID: mdonneaud2017 - abstract: 'We present a textile pressure sensor matrix, designed to be used as a - musical multi-touch input device. An evaluation of our design demonstrated that - the sensors pressure response profile fits a logarithmic curve (R = 0.98). The - input delay of the sensor is 2.1ms. The average absolute error in one direction - of the sensor was measured to be less than 10% of one of the matrix''s strips - (M = 1.8mm, SD = 1.37mm). We intend this technology to be easy to use and implement - by experts and novices alike: We ensure the ease of use by providing a host application - that tracks touch points and passes these on as OSC or MIDI messages. We make - our design easy to implement by providing open source software and hardware and - by choosing evaluation methods that use accessible tools, making quantitative - comparisons between different branches of the design easy. We chose to work with - textile to take advantage of its tactile properties and its malleability of form - and to pay tribute to textile''s rich cultural heritage. ' - address: 'Copenhagen, Denmark' - author: Maurin Donneaud and Cedric Honnet and Paul Strohmeier - bibtex: "@inproceedings{mdonneaud2017,\n abstract = {We present a textile pressure\ - \ sensor matrix, designed to be used as a musical multi-touch input device. An\ - \ evaluation of our design demonstrated that the sensors pressure response profile\ - \ fits a logarithmic curve (R = 0.98). The input delay of the sensor is 2.1ms.\ - \ The average absolute error in one direction of the sensor was measured to be\ - \ less than 10% of one of the matrix's strips (M = 1.8mm, SD = 1.37mm). We intend\ - \ this technology to be easy to use and implement by experts and novices alike:\ - \ We ensure the ease of use by providing a host application that tracks touch\ - \ points and passes these on as OSC or MIDI messages. We make our design easy\ - \ to implement by providing open source software and hardware and by choosing\ - \ evaluation methods that use accessible tools, making quantitative comparisons\ - \ between different branches of the design easy. We chose to work with textile\ - \ to take advantage of its tactile properties and its malleability of form and\ - \ to pay tribute to textile's rich cultural heritage. },\n address = {Copenhagen,\ - \ Denmark},\n author = {Maurin Donneaud and Cedric Honnet and Paul Strohmeier},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176151},\n issn = {2220-4806},\n\ - \ pages = {7--12},\n publisher = {Aalborg University Copenhagen},\n title = {Designing\ - \ a Multi-Touch eTextile for Music Performances},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0002.pdf},\n\ - \ year = {2017}\n}\n" + ID: Bardos2005 + abstract: 'Bangarama is a music controller using headbanging as the primary interaction + metaphor. It consists of a head-mounted tilt sensor and aguitar-shaped controller + that does not require complex finger positions. We discuss the specific challenges + of designing and building this controller to create a simple, yet responsive and + playable instrument, and show how ordinary materials such as plywood, tinfoil, + and copper wire can be turned into a device that enables a fun, collaborative + music-making experience.' + address: 'Vancouver, BC, Canada' + author: 'Bardos, Laszlo and Korinek, Stefan and Lee, Eric and Borchers, Jan' + bibtex: "@inproceedings{Bardos2005,\n abstract = {Bangarama is a music controller\ + \ using headbanging as the primary interaction metaphor. It consists of a head-mounted\ + \ tilt sensor and aguitar-shaped controller that does not require complex finger\ + \ positions. We discuss the specific challenges of designing and building this\ + \ controller to create a simple, yet responsive and playable instrument, and show\ + \ how ordinary materials such as plywood, tinfoil, and copper wire can be turned\ + \ into a device that enables a fun, collaborative music-making experience.},\n\ + \ address = {Vancouver, BC, Canada},\n author = {Bardos, Laszlo and Korinek, Stefan\ + \ and Lee, Eric and Borchers, Jan},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176699},\n\ + \ issn = {2220-4806},\n keywords = {head movements, music controllers, interface\ + \ design, input devices },\n pages = {180--183},\n title = {Bangarama: Creating\ + \ Music With Headbanging},\n url = {http://www.nime.org/proceedings/2005/nime2005_180.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176151 + doi: 10.5281/zenodo.1176699 issn: 2220-4806 - pages: 7--12 - publisher: Aalborg University Copenhagen - title: Designing a Multi-Touch eTextile for Music Performances - url: http://www.nime.org/proceedings/2017/nime2017_paper0002.pdf - year: 2017 + keywords: 'head movements, music controllers, interface design, input devices ' + pages: 180--183 + title: 'Bangarama: Creating Music With Headbanging' + url: http://www.nime.org/proceedings/2005/nime2005_180.pdf + year: 2005 - ENTRYTYPE: inproceedings - ID: pwilliams2017 - abstract: "While there are a great variety of digital musical interfaces available\ - \ to the working musician, few o\ner the level of immediate, nuanced and instinctive\ - \ control that one\nnds in an acoustic shaker. bEADS is a prototype of a digital\ - \ musical instrument that utilises the gestural vocabulary associated with shaken\ - \ idiophones and expands on the techniques and sonic possibilities associated\ - \ with them. By using a bespoke physically informed synthesis engine, in conjunction\ - \ with accelerometer and pressure sensor data, an actuated handheld instrument\ - \ has been built that allows for quickly switching between widely di\nering percussive\ - \ sound textures. The prototype has been evaluated by three experts with di\n\ - erent levels of involvement in professional music making." - address: 'Copenhagen, Denmark' - author: Peter Williams and Daniel Overholt - bibtex: "@inproceedings{pwilliams2017,\n abstract = {While there are a great variety\ - \ of digital musical interfaces available to the working musician, few o\ner the\ - \ level of immediate, nuanced and instinctive control that one\nnds in an acoustic\ - \ shaker. bEADS is a prototype of a digital musical instrument that utilises\ - \ the gestural vocabulary associated with shaken idiophones and expands on the\ - \ techniques and sonic possibilities associated with them. By using a bespoke\ - \ physically informed synthesis engine, in conjunction with accelerometer and\ - \ pressure sensor data, an actuated handheld instrument has been built that allows\ - \ for quickly switching between widely di\nering percussive sound textures. The\ - \ prototype has been evaluated by three experts with di\nerent levels of involvement\ - \ in professional music making.},\n address = {Copenhagen, Denmark},\n author\ - \ = {Peter Williams and Daniel Overholt},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176153},\n\ - \ issn = {2220-4806},\n pages = {13--18},\n publisher = {Aalborg University Copenhagen},\n\ - \ title = {bEADS Extended Actuated Digital Shaker},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0003.pdf},\n\ - \ year = {2017}\n}\n" + ID: Barbosa2005 + abstract: 'In recent years Computer Network-Music has increasingly captured the + attention of the Computer Music Community. With the advent of Internet communication, + geographical displacement amongst the participants of a computer mediated music + performance achieved world wide extension. However, when established over long + distance networks, this form of musical communication has a fundamental problem: + network latency (or net-delay) is an impediment for real-time collaboration. From + a recent study, carried out by the authors, a relation between network latency + tolerance and Music Tempo was established. This result emerged from an experiment, + in which simulated network latency conditions were applied to the performance + of different musicians playing jazz standard tunes. The Public Sound Objects (PSOs) + project is web-based shared musical space, which has been an experimental framework + to implement and test different approaches for on-line music communication. This + paper describe features implemented in the latest version of the PSOs system, + including the notion of a network-music instrument incorporating latency as a + software function, by dynamically adapting its tempo to the communication delay + measured in real-time.' + address: 'Vancouver, BC, Canada' + author: 'Barbosa, Alvaro and Cardoso, Jorge and Geiger, Günter' + bibtex: "@inproceedings{Barbosa2005,\n abstract = {In recent years Computer Network-Music\ + \ has increasingly captured the attention of the Computer Music Community. With\ + \ the advent of Internet communication, geographical displacement amongst the\ + \ participants of a computer mediated music performance achieved world wide extension.\ + \ However, when established over long distance networks, this form of musical\ + \ communication has a fundamental problem: network latency (or net-delay) is an\ + \ impediment for real-time collaboration. From a recent study, carried out by\ + \ the authors, a relation between network latency tolerance and Music Tempo was\ + \ established. This result emerged from an experiment, in which simulated network\ + \ latency conditions were applied to the performance of different musicians playing\ + \ jazz standard tunes. The Public Sound Objects (PSOs) project is web-based shared\ + \ musical space, which has been an experimental framework to implement and test\ + \ different approaches for on-line music communication. This paper describe features\ + \ implemented in the latest version of the PSOs system, including the notion of\ + \ a network-music instrument incorporating latency as a software function, by\ + \ dynamically adapting its tempo to the communication delay measured in real-time.},\n\ + \ address = {Vancouver, BC, Canada},\n author = {Barbosa, Alvaro and Cardoso,\ + \ Jorge and Geiger, G\\''{u}nter},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176697},\n\ + \ issn = {2220-4806},\n keywords = {Network Music Instruments; Latency in Real-Time\ + \ Performance; Interface-Decoupled Electronic Musical Instruments; Behavioral\ + \ Driven Interfaces; Collaborative Remote Music Performance; },\n pages = {184--187},\n\ + \ title = {Network Latency Adaptive Tempo in the Public Sound Objects System},\n\ + \ url = {http://www.nime.org/proceedings/2005/nime2005_184.pdf},\n year = {2005}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176153 + doi: 10.5281/zenodo.1176697 issn: 2220-4806 - pages: 13--18 - publisher: Aalborg University Copenhagen - title: bEADS Extended Actuated Digital Shaker - url: http://www.nime.org/proceedings/2017/nime2017_paper0003.pdf - year: 2017 + keywords: 'Network Music Instruments; Latency in Real-Time Performance; Interface-Decoupled + Electronic Musical Instruments; Behavioral Driven Interfaces; Collaborative Remote + Music Performance; ' + pages: 184--187 + title: Network Latency Adaptive Tempo in the Public Sound Objects System + url: http://www.nime.org/proceedings/2005/nime2005_184.pdf + year: 2005 - ENTRYTYPE: inproceedings - ID: rmichon2017 - abstract: 'Mobile devices constitute a generic platform to make standalone musical - instruments for live performance. However, they were not designed for such use - and have multiple limitations when compared to other types of instruments. We - introduce a framework to quickly design and prototype passive mobile device augmentations - to leverage existing features of the device for the end goal of mobile musical - instruments. An extended list of examples is provided and the results of a workshop, - organized partly to evaluate our framework, are provided.' - address: 'Copenhagen, Denmark' - author: Romain Michon and Julius O. Smith and Matthew Wright and Chris Chafe and - John Granzow and Ge Wang - bibtex: "@inproceedings{rmichon2017,\n abstract = {Mobile devices constitute a generic\ - \ platform to make standalone musical instruments for live performance. However,\ - \ they were not designed for such use and have multiple limitations when compared\ - \ to other types of instruments. We introduce a framework to quickly design and\ - \ prototype passive mobile device augmentations to leverage existing features\ - \ of the device for the end goal of mobile musical instruments. An extended list\ - \ of examples is provided and the results of a workshop, organized partly to evaluate\ - \ our framework, are provided.},\n address = {Copenhagen, Denmark},\n author =\ - \ {Romain Michon and Julius O. Smith and Matthew Wright and Chris Chafe and John\ - \ Granzow and Ge Wang},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176155},\n\ - \ issn = {2220-4806},\n pages = {19--24},\n publisher = {Aalborg University Copenhagen},\n\ - \ title = {Passively Augmenting Mobile Devices Towards Hybrid Musical Instrument\ - \ Design},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0004.pdf},\n\ - \ year = {2017}\n}\n" + ID: Villar2005 + abstract: 'We present the Pin&Play&Perform system: an interface inthe form of a + tablet on which a number of physical controlscan be added, removed and arranged + on the fly. These controls can easily be mapped to existing music sofware usingthe + MIDI protocol. The interface provides a mechanism fordirect manipulation of application + parameters and eventsthrough a set of familiar controls, while also encouraging + ahigh degree of customisation through the ability to arrange,rearrange and annotate + the spatial layout of the interfacecomponents on the surface of the tablet.The + paper describes how we have realized this concept using the Pin&Play technology. + As an application example, wedescribe our experiences in using our interface in + conjunction with Propellerheads'' Reason, a popular piece of musicsynthesis software.' + address: 'Vancouver, BC, Canada' + author: 'Villar, Nicolas and Lindsay, Adam T. and Gellersen, Hans' + bibtex: "@inproceedings{Villar2005,\n abstract = {We present the Pin\\&Play\\&Perform\ + \ system: an interface inthe form of a tablet on which a number of physical controlscan\ + \ be added, removed and arranged on the fly. These controls can easily be mapped\ + \ to existing music sofware usingthe MIDI protocol. The interface provides a mechanism\ + \ fordirect manipulation of application parameters and eventsthrough a set of\ + \ familiar controls, while also encouraging ahigh degree of customisation through\ + \ the ability to arrange,rearrange and annotate the spatial layout of the interfacecomponents\ + \ on the surface of the tablet.The paper describes how we have realized this concept\ + \ using the Pin\\&Play technology. As an application example, wedescribe our experiences\ + \ in using our interface in conjunction with Propellerheads' Reason, a popular\ + \ piece of musicsynthesis software.},\n address = {Vancouver, BC, Canada},\n author\ + \ = {Villar, Nicolas and Lindsay, Adam T. and Gellersen, Hans},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176834},\n issn = {2220-4806},\n keywords = {tangible\ + \ interface, rearrangeable interface, midi controllers },\n pages = {188--191},\n\ + \ title = {Pin \\& Play \\& Perform: A rearrangeable interface for musical composition\ + \ and performance},\n url = {http://www.nime.org/proceedings/2005/nime2005_188.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176155 + doi: 10.5281/zenodo.1176834 issn: 2220-4806 - pages: 19--24 - publisher: Aalborg University Copenhagen - title: Passively Augmenting Mobile Devices Towards Hybrid Musical Instrument Design - url: http://www.nime.org/proceedings/2017/nime2017_paper0004.pdf - year: 2017 + keywords: 'tangible interface, rearrangeable interface, midi controllers ' + pages: 188--191 + title: 'Pin & Play & Perform: A rearrangeable interface for musical composition + and performance' + url: http://www.nime.org/proceedings/2005/nime2005_188.pdf + year: 2005 - ENTRYTYPE: inproceedings - ID: aeldridge2017 - abstract: 'The Feedback Cello is a new electroacoustic actuated instrument in which - feedback can be induced independently on each string. Built from retro-fitted - acoustic cellos, the signals from electromagnetic pickups sitting under each string - are passed to a speaker built into the back of the instrument and to transducers - clamped in varying places across the instrument body. Placement of acoustic and - mechanical actuators on the resonant body of the cello mean that this simple analogue - feedback system is capable of a wide range of complex self-resonating behaviours. - This paper describes the motivations for building these instruments as both a - physical extension to live coding practice and an electroacoustic augmentation - of cello. The design and physical construction is outlined, and modes of performance - described with reference to the first six months of performances and installations. - Future developments and planned investigations are outlined.' - address: 'Copenhagen, Denmark' - author: Alice Eldridge and Chris Kiefer - bibtex: "@inproceedings{aeldridge2017,\n abstract = {The Feedback Cello is a new\ - \ electroacoustic actuated instrument in which feedback can be induced independently\ - \ on each string. Built from retro-fitted acoustic cellos, the signals from electromagnetic\ - \ pickups sitting under each string are passed to a speaker built into the back\ - \ of the instrument and to transducers clamped in varying places across the instrument\ - \ body. Placement of acoustic and mechanical actuators on the resonant body of\ - \ the cello mean that this simple analogue feedback system is capable of a wide\ - \ range of complex self-resonating behaviours. This paper describes the motivations\ - \ for building these instruments as both a physical extension to live coding practice\ - \ and an electroacoustic augmentation of cello. The design and physical construction\ - \ is outlined, and modes of performance described with reference to the first\ - \ six months of performances and installations. Future developments and planned\ - \ investigations are outlined.},\n address = {Copenhagen, Denmark},\n author =\ - \ {Alice Eldridge and Chris Kiefer},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176157},\n\ - \ issn = {2220-4806},\n pages = {25--29},\n publisher = {Aalborg University Copenhagen},\n\ - \ title = {Self-resonating Feedback Cello: Interfacing gestural and generative\ - \ processes in improvised performance},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0005.pdf},\n\ - \ year = {2017}\n}\n" + ID: Birnbaum2005 + abstract: 'While several researchers have grappled with the problem of comparing + musical devices across performance, installation, and related contexts, no methodology + yet exists for producing holistic, informative visualizations for these devices. + Drawing on existing research in performance interaction, human-computer interaction, + and design space analysis, the authors propose a dimension space representation + that can be adapted for visually displaying musical devices. This paper illustrates + one possible application of the dimension space to existing performance and interaction + systems, revealing its usefulness both in exposing patterns across existing musical + devices and aiding in the design of new ones.' + address: 'Vancouver, BC, Canada' + author: 'Birnbaum, David and Fiebrink, Rebecca and Malloch, Joseph and Wanderley, + Marcelo M.' + bibtex: "@inproceedings{Birnbaum2005,\n abstract = {While several researchers have\ + \ grappled with the problem of comparing musical devices across performance, installation,\ + \ and related contexts, no methodology yet exists for producing holistic, informative\ + \ visualizations for these devices. Drawing on existing research in performance\ + \ interaction, human-computer interaction, and design space analysis, the authors\ + \ propose a dimension space representation that can be adapted for visually displaying\ + \ musical devices. This paper illustrates one possible application of the dimension\ + \ space to existing performance and interaction systems, revealing its usefulness\ + \ both in exposing patterns across existing musical devices and aiding in the\ + \ design of new ones.},\n address = {Vancouver, BC, Canada},\n author = {Birnbaum,\ + \ David and Fiebrink, Rebecca and Malloch, Joseph and Wanderley, Marcelo M.},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1176707},\n issn = {2220-4806},\n\ + \ keywords = {design space analysis,human-computer interaction,interfaces for\ + \ musical expression,new},\n pages = {192--195},\n title = {Towards a Dimension\ + \ Space for Musical Devices},\n url = {http://www.nime.org/proceedings/2005/nime2005_192.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176157 + doi: 10.5281/zenodo.1176707 issn: 2220-4806 - pages: 25--29 - publisher: Aalborg University Copenhagen - title: 'Self-resonating Feedback Cello: Interfacing gestural and generative processes - in improvised performance' - url: http://www.nime.org/proceedings/2017/nime2017_paper0005.pdf - year: 2017 + keywords: 'design space analysis,human-computer interaction,interfaces for musical + expression,new' + pages: 192--195 + title: Towards a Dimension Space for Musical Devices + url: http://www.nime.org/proceedings/2005/nime2005_192.pdf + year: 2005 - ENTRYTYPE: inproceedings - ID: dhaddad2017 - abstract: 'We introduce a family of fragile electronic musical instruments designed - to be "played" through the act of destruction. Each Fragile Instrument consists - of an analog synthesizing circuit with embedded sensors that detect the destruction - of an outer shell, which is destroyed and replaced for each performance. Destruction - plays an integral role in both the spectacle and the generated sounds. This paper - presents several variations of Fragile Instruments we have created, discussing - their circuit design as well as choices of material for the outer shell and tools - of destruction. We conclude by considering other approaches to create intentionally - destructible electronic musical instruments. ' - address: 'Copenhagen, Denmark' - author: Don Derek Haddad and Xiao Xiao and Tod Machover and Joseph Paradiso - bibtex: "@inproceedings{dhaddad2017,\n abstract = {We introduce a family of fragile\ - \ electronic musical instruments designed to be \"played\" through the act of\ - \ destruction. Each Fragile Instrument consists of an analog synthesizing circuit\ - \ with embedded sensors that detect the destruction of an outer shell, which is\ - \ destroyed and replaced for each performance. Destruction plays an integral role\ - \ in both the spectacle and the generated sounds. This paper presents several\ - \ variations of Fragile Instruments we have created, discussing their circuit\ - \ design as well as choices of material for the outer shell and tools of destruction.\ - \ We conclude by considering other approaches to create intentionally destructible\ - \ electronic musical instruments. },\n address = {Copenhagen, Denmark},\n author\ - \ = {Don Derek Haddad and Xiao Xiao and Tod Machover and Joseph Paradiso},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176159},\n issn = {2220-4806},\n pages\ - \ = {30--33},\n publisher = {Aalborg University Copenhagen},\n title = {Fragile\ - \ Instruments: Constructing Destructible Musical Interfaces},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0006.pdf},\n\ - \ year = {2017}\n}\n" + ID: Wang2005a + abstract: 'ChucK is a programming language for real-time sound synthesis. It provides + generalized audio abstractions and precise control over timing and concurrency + --- combining the rapid-prototyping advantages of high-level programming tools, + such as Pure Data, with the flexibility and controllability of lower-level, text-based + languages like C/C++. In this paper, we present a new time-based paradigm for + programming controllers with ChucK. In addition to real-time control over sound + synthesis, we show how features such as dynamic patching, on-the-fly controller + mapping, multiple control rates, and precisely-timed recording and playback of + sensors can be employed under the ChucK programming model. Using this framework, + composers, programmers, and performers can quickly write (and read/debug) complex + controller/synthesis programs, and experiment with controller mapping on-the-fly. ' + address: 'Vancouver, BC, Canada' + author: 'Wang, Ge and Misra, Ananya and Cook, Perry R. and Kapur' + bibtex: "@inproceedings{Wang2005a,\n abstract = {ChucK is a programming language\ + \ for real-time sound synthesis. It provides generalized audio abstractions and\ + \ precise control over timing and concurrency --- combining the rapid-prototyping\ + \ advantages of high-level programming tools, such as Pure Data, with the flexibility\ + \ and controllability of lower-level, text-based languages like C/C++. In this\ + \ paper, we present a new time-based paradigm for programming controllers with\ + \ ChucK. In addition to real-time control over sound synthesis, we show how features\ + \ such as dynamic patching, on-the-fly controller mapping, multiple control rates,\ + \ and precisely-timed recording and playback of sensors can be employed under\ + \ the ChucK programming model. Using this framework, composers, programmers, and\ + \ performers can quickly write (and read/debug) complex controller/synthesis programs,\ + \ and experiment with controller mapping on-the-fly. },\n address = {Vancouver,\ + \ BC, Canada},\n author = {Wang, Ge and Misra, Ananya and Cook, Perry R. and Kapur},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1176838},\n issn = {2220-4806},\n\ + \ keywords = {Controller mapping, programming language, on-the-fly programming,\ + \ real-time interaction, concurrency. },\n pages = {196--199},\n title = {Yeah,\ + \ ChucK It! = > Dynamic , Controllable Interface Mapping},\n url = {http://www.nime.org/proceedings/2005/nime2005_196.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176159 + doi: 10.5281/zenodo.1176838 issn: 2220-4806 - pages: 30--33 - publisher: Aalborg University Copenhagen - title: 'Fragile Instruments: Constructing Destructible Musical Interfaces' - url: http://www.nime.org/proceedings/2017/nime2017_paper0006.pdf - year: 2017 + keywords: 'Controller mapping, programming language, on-the-fly programming, real-time + interaction, concurrency. ' + pages: 196--199 + title: 'Yeah, ChucK It! = > Dynamic , Controllable Interface Mapping' + url: http://www.nime.org/proceedings/2005/nime2005_196.pdf + year: 2005 - ENTRYTYPE: inproceedings - ID: fheller2017 - abstract: 'Learning to play the transverse flute is not an easy task, at least not - for everyone. Since the flute does not have a reed to resonate, the player must - provide a steady, focused stream of air that will cause the flute to resonate - and thereby produce sound. In order to achieve this, the player has to be aware - of the embouchure position to generate an adequate air jet. For a beginner, this - can be a difficult task due to the lack of visual cues or indicators of the air - jet and lips position. This paper attempts to address this problem by presenting - an augmented flute that can make the gestures related to the embouchure visible - and measurable. The augmented flute shows information about the area covered - by the lower lip, estimates the lip hole shape based on noise analysis, and it - shows graphically the air jet direction. Additionally, the augmented flute provides - directional and continuous feedback in real time, based on data acquired by experienced - flutists.' - address: 'Copenhagen, Denmark' - author: Florian Heller and Irene Meying Cheung Ruiz and Jan Borchers - bibtex: "@inproceedings{fheller2017,\n abstract = {Learning to play the transverse\ - \ flute is not an easy task, at least not for everyone. Since the flute does\ - \ not have a reed to resonate, the player must provide a steady, focused stream\ - \ of air that will cause the flute to resonate and thereby produce sound. In\ - \ order to achieve this, the player has to be aware of the embouchure position\ - \ to generate an adequate air jet. For a beginner, this can be a difficult task\ - \ due to the lack of visual cues or indicators of the air jet and lips position.\ - \ This paper attempts to address this problem by presenting an augmented flute\ - \ that can make the gestures related to the embouchure visible and measurable.\ - \ The augmented flute shows information about the area covered by the lower lip,\ - \ estimates the lip hole shape based on noise analysis, and it shows graphically\ - \ the air jet direction. Additionally, the augmented flute provides directional\ - \ and continuous feedback in real time, based on data acquired by experienced\ - \ flutists.},\n address = {Copenhagen, Denmark},\n author = {Florian Heller and\ - \ Irene Meying Cheung Ruiz and Jan Borchers},\n booktitle = {Proceedings of the\ + ID: Tindale2005 + abstract: Drum controllers designed by researchers and commercialcompanies use a + variety of techniques for capturing percussive gestures. It is challenging to + obtain both quick responsetimes and low-level data (such as position) that contain + expressive information. This research is a comprehensive studyof current methods + to evaluate the available strategies andtechnologies. This study aims to demonstrate + the benefitsand detriments of the current state of percussion controllersas well + as yield tools for those who would wish to conductthis type of study in the future. + address: 'Vancouver, BC, Canada' + author: 'Tindale, Adam R. and Kapur, Ajay and Tzanetakis, George and Driessen, Peter + and Schloss, Andrew' + bibtex: "@inproceedings{Tindale2005,\n abstract = {Drum controllers designed by\ + \ researchers and commercialcompanies use a variety of techniques for capturing\ + \ percussive gestures. It is challenging to obtain both quick responsetimes and\ + \ low-level data (such as position) that contain expressive information. This\ + \ research is a comprehensive studyof current methods to evaluate the available\ + \ strategies andtechnologies. This study aims to demonstrate the benefitsand detriments\ + \ of the current state of percussion controllersas well as yield tools for those\ + \ who would wish to conductthis type of study in the future.},\n address = {Vancouver,\ + \ BC, Canada},\n author = {Tindale, Adam R. and Kapur, Ajay and Tzanetakis, George\ + \ and Driessen, Peter and Schloss, Andrew},\n booktitle = {Proceedings of the\ \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1176161},\n issn = {2220-4806},\n pages = {34--37},\n publisher\ - \ = {Aalborg University Copenhagen},\n title = {An Augmented Flute for Beginners},\n\ - \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0007.pdf},\n year\ - \ = {2017}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1176161 - issn: 2220-4806 - pages: 34--37 - publisher: Aalborg University Copenhagen - title: An Augmented Flute for Beginners - url: http://www.nime.org/proceedings/2017/nime2017_paper0007.pdf - year: 2017 - - -- ENTRYTYPE: inproceedings - ID: gisaac2017 - abstract: 'This paper explores the idea of using virtual textural terrains as a - means of generating haptic profiles for force-feedback controllers. This approach - breaks from the paradigm established within audio-haptic research over the last - few decades where physical models within virtual environments are designed to - transduce gesture into sonic output. We outline a method for generating multimodal - terrains using basis functions, which are rendered into monochromatic visual representations - for inspection. This visual terrain is traversed using a haptic controller, the - NovInt Falcon, which in turn receives force information based on the grayscale - value of its location in this virtual space. As the image is traversed by a performer - the levels of resistance vary, and the image is realized as a physical terrain. - We discuss the potential of this approach to afford engaging musical experiences - for both the performer and the audience as iterated through numerous performances.' - address: 'Copenhagen, Denmark' - author: Gabriella Isaac and Lauren Hayes and Todd Ingalls - bibtex: "@inproceedings{gisaac2017,\n abstract = {This paper explores the idea of\ - \ using virtual textural terrains as a means of generating haptic profiles for\ - \ force-feedback controllers. This approach breaks from the paradigm established\ - \ within audio-haptic research over the last few decades where physical models\ - \ within virtual environments are designed to transduce gesture into sonic output.\ - \ We outline a method for generating multimodal terrains using basis functions,\ - \ which are rendered into monochromatic visual representations for inspection.\ - \ This visual terrain is traversed using a haptic controller, the NovInt Falcon,\ - \ which in turn receives force information based on the grayscale value of its\ - \ location in this virtual space. As the image is traversed by a performer the\ - \ levels of resistance vary, and the image is realized as a physical terrain.\ - \ We discuss the potential of this approach to afford engaging musical experiences\ - \ for both the performer and the audience as iterated through numerous performances.},\n\ - \ address = {Copenhagen, Denmark},\n author = {Gabriella Isaac and Lauren Hayes\ - \ and Todd Ingalls},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176163},\n\ - \ issn = {2220-4806},\n pages = {38--41},\n publisher = {Aalborg University Copenhagen},\n\ - \ title = {Cross-Modal Terrains: Navigating Sonic Space through Haptic Feedback},\n\ - \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0008.pdf},\n year\ - \ = {2017}\n}\n" + \ {10.5281/zenodo.1176828},\n issn = {2220-4806},\n keywords = {Percussion Controllers,\ + \ Timbre-recognition based instruments, Electronic Percussion, Sensors for Interface\ + \ Design },\n pages = {200--203},\n title = {A Comparison of Sensor Strategies\ + \ for Capturing Percussive Gestures},\n url = {http://www.nime.org/proceedings/2005/nime2005_200.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176163 + doi: 10.5281/zenodo.1176828 issn: 2220-4806 - pages: 38--41 - publisher: Aalborg University Copenhagen - title: 'Cross-Modal Terrains: Navigating Sonic Space through Haptic Feedback' - url: http://www.nime.org/proceedings/2017/nime2017_paper0008.pdf - year: 2017 + keywords: 'Percussion Controllers, Timbre-recognition based instruments, Electronic + Percussion, Sensors for Interface Design ' + pages: 200--203 + title: A Comparison of Sensor Strategies for Capturing Percussive Gestures + url: http://www.nime.org/proceedings/2005/nime2005_200.pdf + year: 2005 - ENTRYTYPE: inproceedings - ID: jwu2017 - abstract: 'This paper presents solutions to improve reliability and to work around - challenges of using a Leap Motion; sensor as a gestural control and input device - in digital music instrument (DMI) design. We implement supervised learning algorithms - (k-nearest neighbors, support vector machine, binary decision tree, and artificial - neural network) to estimate hand motion data, which is not typically captured - by the sensor. Two problems are addressed: 1) the sensor cannot detect overlapping - hands 2) The sensor''s limited detection range. Training examples included 7 kinds - of overlapping hand gestures as well as hand trajectories where a hand goes out - of the sensor''s range. The overlapping gestures were treated as a classification - problem and the best performing model was k-nearest neighbors with 62% accuracy. - The out-of-range problem was treated first as a clustering problem to group the - training examples into a small number of trajectory types, then as a classification - problem to predict trajectory type based on the hand''s motion before going out - of range. The best performing model was k-nearest neighbors with an accuracy of - 30%. The prediction models were implemented in an ongoing multimedia electroacoustic - vocal performance and an educational project named Embodied Sonic Meditation (ESM). ' - address: 'Copenhagen, Denmark' - author: Jiayue Wu and Mark Rau and Yun Zhang and Yijun Zhou and Matt Wright - bibtex: "@inproceedings{jwu2017,\n abstract = {This paper presents solutions to\ - \ improve reliability and to work around challenges of using a Leap Motion; sensor\ - \ as a gestural control and input device in digital music instrument (DMI) design.\ - \ We implement supervised learning algorithms (k-nearest neighbors, support vector\ - \ machine, binary decision tree, and artificial neural network) to estimate hand\ - \ motion data, which is not typically captured by the sensor. Two problems are\ - \ addressed: 1) the sensor cannot detect overlapping hands 2) The sensor's limited\ - \ detection range. Training examples included 7 kinds of overlapping hand gestures\ - \ as well as hand trajectories where a hand goes out of the sensor's range. The\ - \ overlapping gestures were treated as a classification problem and the best performing\ - \ model was k-nearest neighbors with 62% accuracy. The out-of-range problem was\ - \ treated first as a clustering problem to group the training examples into a\ - \ small number of trajectory types, then as a classification problem to predict\ - \ trajectory type based on the hand's motion before going out of range. The best\ - \ performing model was k-nearest neighbors with an accuracy of 30%. The prediction\ - \ models were implemented in an ongoing multimedia electroacoustic vocal performance\ - \ and an educational project named Embodied Sonic Meditation (ESM). },\n address\ - \ = {Copenhagen, Denmark},\n author = {Jiayue Wu and Mark Rau and Yun Zhang and\ - \ Yijun Zhou and Matt Wright},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176165},\n\ - \ issn = {2220-4806},\n pages = {42--47},\n publisher = {Aalborg University Copenhagen},\n\ - \ title = {Towards Robust Tracking with an Unreliable Motion Sensor Using Machine\ - \ Learning},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0009.pdf},\n\ - \ year = {2017}\n}\n" + ID: Lee2005 + abstract: 'Discussion of time in interactive computer music systems engineering + has been largely limited to data acquisition rates and latency.Since music is + an inherently time-based medium, we believe thattime plays a more important role + in both the usability and implementation of these systems. In this paper, we present + a time designspace, which we use to expose some of the challenges of developing + computer music systems with time-based interaction. Wedescribe and analyze the + time-related issues we encountered whilstdesigning and building a series of interactive + music exhibits thatfall into this design space. These issues often occur because + ofthe varying and sometimes conflicting conceptual models of timein the three + domains of user, application (music), and engineering.We present some of our latest + work in conducting gesture interpretation and frameworks for digital audio, which + attempt to analyzeand address these conflicts in temporal conceptual models.' + address: 'Vancouver, BC, Canada' + author: 'Lee, Eric and Borchers, Jan' + bibtex: "@inproceedings{Lee2005,\n abstract = {Discussion of time in interactive\ + \ computer music systems engineering has been largely limited to data acquisition\ + \ rates and latency.Since music is an inherently time-based medium, we believe\ + \ thattime plays a more important role in both the usability and implementation\ + \ of these systems. In this paper, we present a time designspace, which we use\ + \ to expose some of the challenges of developing computer music systems with time-based\ + \ interaction. Wedescribe and analyze the time-related issues we encountered whilstdesigning\ + \ and building a series of interactive music exhibits thatfall into this design\ + \ space. These issues often occur because ofthe varying and sometimes conflicting\ + \ conceptual models of timein the three domains of user, application (music),\ + \ and engineering.We present some of our latest work in conducting gesture interpretation\ + \ and frameworks for digital audio, which attempt to analyzeand address these\ + \ conflicts in temporal conceptual models.},\n address = {Vancouver, BC, Canada},\n\ + \ author = {Lee, Eric and Borchers, Jan},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176766},\n\ + \ issn = {2220-4806},\n keywords = {time design, conceptual models of time, design\ + \ spaces, interactive music exhibits, engineering music systems},\n pages = {204--207},\n\ + \ title = {The Role of Time in Engineering Computer Music Systems},\n url = {http://www.nime.org/proceedings/2005/nime2005_204.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176165 + doi: 10.5281/zenodo.1176766 issn: 2220-4806 - pages: 42--47 - publisher: Aalborg University Copenhagen - title: Towards Robust Tracking with an Unreliable Motion Sensor Using Machine Learning - url: http://www.nime.org/proceedings/2017/nime2017_paper0009.pdf - year: 2017 + keywords: 'time design, conceptual models of time, design spaces, interactive music + exhibits, engineering music systems' + pages: 204--207 + title: The Role of Time in Engineering Computer Music Systems + url: http://www.nime.org/proceedings/2005/nime2005_204.pdf + year: 2005 - ENTRYTYPE: inproceedings - ID: abarbosa2017 - abstract: 'Sounding Architecture, is the first collaborative teaching development - between Department of Architecture and Department of Music at the University of - Hong Kong (HKU), introduced in Fall 2016. In this paper we present critical observations - about the studio after a final public presentation of all projects. The Review - was conducted with demonstrations by groups of students supervised by different - Lecturer, in each case focusing on a different strategy to create a connection - between Sound, Music, Acoustics, Space and Architectural Design. There was an - assumption that the core working process would have to include the design of a - new musical instrument, which in some cases became the final deliverable of the - Studio and in other cases a step in a process that leads to a different outcome - (such as an architectural Design, a performance or social experiment). One other - relevant aspect was that Digital technology was used in the design and fabrication - of the physical instruments'' prototypes, but in very few cases, it was used in - the actual generation or enhancement of sound, with the instruments relying almost - exclusively in acoustic and mechanical sound. ' - address: 'Copenhagen, Denmark' - author: Álvaro Barbosa and Thomas Tsang - bibtex: "@inproceedings{abarbosa2017,\n abstract = {Sounding Architecture, is the\ - \ first collaborative teaching development between Department of Architecture\ - \ and Department of Music at the University of Hong Kong (HKU), introduced in\ - \ Fall 2016. In this paper we present critical observations about the studio after\ - \ a final public presentation of all projects. The Review was conducted with demonstrations\ - \ by groups of students supervised by different Lecturer, in each case focusing\ - \ on a different strategy to create a connection between Sound, Music, Acoustics,\ - \ Space and Architectural Design. There was an assumption that the core working\ - \ process would have to include the design of a new musical instrument, which\ - \ in some cases became the final deliverable of the Studio and in other cases\ - \ a step in a process that leads to a different outcome (such as an architectural\ - \ Design, a performance or social experiment). One other relevant aspect was that\ - \ Digital technology was used in the design and fabrication of the physical instruments'\ - \ prototypes, but in very few cases, it was used in the actual generation or enhancement\ - \ of sound, with the instruments relying almost exclusively in acoustic and mechanical\ - \ sound. },\n address = {Copenhagen, Denmark},\n author = {Álvaro Barbosa and\ - \ Thomas Tsang},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176167},\n\ - \ issn = {2220-4806},\n pages = {48--51},\n publisher = {Aalborg University Copenhagen},\n\ - \ title = {Sounding Architecture: Inter-Disciplinary Studio at HKU},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0010.pdf},\n\ - \ year = {2017}\n}\n" + ID: Kobayashi2005 + abstract: 'This paper reports our recent development on a reconfigurable user interface. + We created a system that consists of a dial type controller ‘Spinner’, and the + GUI (Graphical User Interface) objects for the Max/MSP environment[1]. One physical + controller corresponds to one GUI controller on a PC’s display device, and a user + can freely change the connection on the fly (i.e. associate the physical controller + to another GUI controller). Since the user interface on the PC side is running + on the Max/MSP environment that has high flexibility, a user can freely reconfigure + the layout of GUI controllers. A single ‘Spinner’ control device consists of a + rotary encoder with a push button to count rotations and a photo IC to detect + specific patterns from the GUI objects to identify. Since ‘Spinner’ features a + simple identification method, it is capable of being used with normal display + devices like LCD (Liquid Crystal Display) or a CRT (Cathode Ray Tube) and so on. + A user can access multiple ‘Spinner’ devices simultaneously. By using this system, + a user can build a reconfigurable user interface.' + address: 'Vancouver, BC, Canada' + author: 'Kobayashi, Shigeru and Masayuki, Akamasu' + bibtex: "@inproceedings{Kobayashi2005,\n abstract = {This paper reports our recent\ + \ development on a reconfigurable user interface. We created a system that consists\ + \ of a dial type controller ‘Spinner’, and the GUI (Graphical User Interface)\ + \ objects for the Max/MSP environment[1]. One physical controller corresponds\ + \ to one GUI controller on a PC’s display device, and a user can freely change\ + \ the connection on the fly (i.e. associate the physical controller to another\ + \ GUI controller). Since the user interface on the PC side is running on the Max/MSP\ + \ environment that has high flexibility, a user can freely reconfigure the layout\ + \ of GUI controllers. A single ‘Spinner’ control device consists of a rotary encoder\ + \ with a push button to count rotations and a photo IC to detect specific patterns\ + \ from the GUI objects to identify. Since ‘Spinner’ features a simple identification\ + \ method, it is capable of being used with normal display devices like LCD (Liquid\ + \ Crystal Display) or a CRT (Cathode Ray Tube) and so on. A user can access multiple\ + \ ‘Spinner’ devices simultaneously. By using this system, a user can build a reconfigurable\ + \ user interface.},\n address = {Vancouver, BC, Canada},\n author = {Kobayashi,\ + \ Shigeru and Masayuki, Akamasu},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176764},\n\ + \ issn = {2220-4806},\n keywords = {Reconfigurable, Sensors, Computer Music },\n\ + \ pages = {208--211},\n title = {Spinner: A Simple Approach to Reconfigurable\ + \ User Interfaces},\n url = {http://www.nime.org/proceedings/2005/nime2005_208.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176167 + doi: 10.5281/zenodo.1176764 issn: 2220-4806 - pages: 48--51 - publisher: Aalborg University Copenhagen - title: 'Sounding Architecture: Inter-Disciplinary Studio at HKU' - url: http://www.nime.org/proceedings/2017/nime2017_paper0010.pdf - year: 2017 + keywords: 'Reconfigurable, Sensors, Computer Music ' + pages: 208--211 + title: 'Spinner: A Simple Approach to Reconfigurable User Interfaces' + url: http://www.nime.org/proceedings/2005/nime2005_208.pdf + year: 2005 - ENTRYTYPE: inproceedings - ID: mlerner2017 - abstract: 'This paper describes the process of creation of a new digital musical - instrument: Osiris. This device is based on the circulation of liquids for the - generation of musical notes. Besides the system of liquid distribution, a module - that generates MIDI events was designed and built based on the Arduino platform; - such module is employed together with a Proteus 2000 sound generator. The programming - of the control module as well as the choice of sound-generating module had as - their main objective that the instrument should provide an ample variety of sound - and musical possibilities, controllable in real time.' - address: 'Copenhagen, Denmark' - author: 'Matus Lerner, Martín' - bibtex: "@inproceedings{mlerner2017,\n abstract = {This paper describes the process\ - \ of creation of a new digital musical instrument: Osiris. This device is based\ - \ on the circulation of liquids for the generation of musical notes. Besides the\ - \ system of liquid distribution, a module that generates MIDI events was designed\ - \ and built based on the Arduino platform; such module is employed together with\ - \ a Proteus 2000 sound generator. The programming of the control module as well\ - \ as the choice of sound-generating module had as their main objective that the\ - \ instrument should provide an ample variety of sound and musical possibilities,\ - \ controllable in real time.},\n address = {Copenhagen, Denmark},\n author = {Matus\ - \ Lerner, Martín},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176169},\n\ - \ issn = {2220-4806},\n pages = {52--55},\n publisher = {Aalborg University Copenhagen},\n\ - \ title = {Osiris: a liquid based digital musical instrument},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0011.pdf},\n\ - \ year = {2017}\n}\n" + ID: Magnusson2005 + abstract: 'This paper describes the audio human computer interface experiments of + ixi in the past and outlines the current platform for future research. ixi software + [5] was founded by Thor Magnusson and Enrike Hurtado Mendieta in year 2000 and + since then we''ve been working on building prototypes in the form of screen-based + graphical user interfaces for musical performance, researching human computer + interaction in the field of music and creating environments which other people + can use to do similar work and for us to use in our workshops. Our initial starting + point was that computer music software and the way their interfaces are built + need not necessarily be limited to copying the acoustic musical instruments and + studio technology that we already have, but additionally we can create unique + languages and work processes for the virtual world. The computer is a vast creative + space with specific qualities that can and should be explored. ' + address: 'Vancouver, BC, Canada' + author: 'Magnusson, Thor' + bibtex: "@inproceedings{Magnusson2005,\n abstract = {This paper describes the audio\ + \ human computer interface experiments of ixi in the past and outlines the current\ + \ platform for future research. ixi software [5] was founded by Thor Magnusson\ + \ and Enrike Hurtado Mendieta in year 2000 and since then we've been working on\ + \ building prototypes in the form of screen-based graphical user interfaces for\ + \ musical performance, researching human computer interaction in the field of\ + \ music and creating environments which other people can use to do similar work\ + \ and for us to use in our workshops. Our initial starting point was that computer\ + \ music software and the way their interfaces are built need not necessarily be\ + \ limited to copying the acoustic musical instruments and studio technology that\ + \ we already have, but additionally we can create unique languages and work processes\ + \ for the virtual world. The computer is a vast creative space with specific qualities\ + \ that can and should be explored. },\n address = {Vancouver, BC, Canada},\n author\ + \ = {Magnusson, Thor},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176782},\n\ + \ issn = {2220-4806},\n keywords = {Graphical user interfaces, abstract graphical\ + \ interfaces, hypercontrol, intelligent instruments, live performance, machine\ + \ learning, catalyst software, OSC, interfacing code, open source, Pure Data,\ + \ SuperCollider. },\n pages = {212--215},\n title = {ixi software: The Interface\ + \ as Instrument},\n url = {http://www.nime.org/proceedings/2005/nime2005_212.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176169 + doi: 10.5281/zenodo.1176782 issn: 2220-4806 - pages: 52--55 - publisher: Aalborg University Copenhagen - title: 'Osiris: a liquid based digital musical instrument' - url: http://www.nime.org/proceedings/2017/nime2017_paper0011.pdf - year: 2017 + keywords: 'Graphical user interfaces, abstract graphical interfaces, hypercontrol, + intelligent instruments, live performance, machine learning, catalyst software, + OSC, interfacing code, open source, Pure Data, SuperCollider. ' + pages: 212--215 + title: 'ixi software: The Interface as Instrument' + url: http://www.nime.org/proceedings/2005/nime2005_212.pdf + year: 2005 - ENTRYTYPE: inproceedings - ID: sstasis2017 - abstract: 'Musicians, audio engineers and producers often make use of common timbral - adjectives to describe musical signals and transformations. However, the subjective - nature of these terms, and the variability with respect to musical context often - leads to inconsistencies in their definition. In this study, a model is proposed - for controlling an equaliser by navigating clusters of datapoints, which represent - grouped parameter settings with the same timbral description. The interface allows - users to identify the nearest cluster to their current parameter setting and recommends - changes based on its relationship to a cluster centroid. To do this, we apply - dimensionality reduction to a dataset of equaliser curves described as warm and - bright using a stacked autoencoder, then group the entries using an agglomerative - clustering algorithm with a coherence based distance criterion. To test the efficacy - of the system, we implement listening tests and show that subjects are able to - match datapoints to their respective sub-representations with 93.75% mean accuracy.' - address: 'Copenhagen, Denmark' - author: Spyridon Stasis and Jason Hockman and Ryan Stables - bibtex: "@inproceedings{sstasis2017,\n abstract = {Musicians, audio engineers and\ - \ producers often make use of common timbral adjectives to describe musical signals\ - \ and transformations. However, the subjective nature of these terms, and the\ - \ variability with respect to musical context often leads to inconsistencies in\ - \ their definition. In this study, a model is proposed for controlling an equaliser\ - \ by navigating clusters of datapoints, which represent grouped parameter settings\ - \ with the same timbral description. The interface allows users to identify the\ - \ nearest cluster to their current parameter setting and recommends changes based\ - \ on its relationship to a cluster centroid. To do this, we apply dimensionality\ - \ reduction to a dataset of equaliser curves described as warm and bright using\ - \ a stacked autoencoder, then group the entries using an agglomerative clustering\ - \ algorithm with a coherence based distance criterion. To test the efficacy of\ - \ the system, we implement listening tests and show that subjects are able to\ - \ match datapoints to their respective sub-representations with 93.75% mean accuracy.},\n\ - \ address = {Copenhagen, Denmark},\n author = {Spyridon Stasis and Jason Hockman\ - \ and Ryan Stables},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176171},\n\ - \ issn = {2220-4806},\n pages = {56--61},\n publisher = {Aalborg University Copenhagen},\n\ - \ title = {Navigating Descriptive Sub-Representations of Musical Timbre},\n url\ - \ = {http://www.nime.org/proceedings/2017/nime2017_paper0012.pdf},\n year = {2017}\n\ - }\n" + ID: Miranda2005 + abstract: ' Musicians and composers have been using brainwaves as generative sources + in music for at least 40 years and the possibility of a brain-computer interface + for direct communication and control was first seriously investigated in the early + 1970s. Work has been done by many artists and technologists in the intervening + years to attempt to control music systems with brainwaves and --- indeed --- many + other biological signals. Despite the richness of EEG, fMRI and other data which + can be read from the human brain, there has up to now been only limited success + in translating the complex encephalographic data into satisfactory musical results. + We are currently pursuing research which we believe will lead to the possibility + of direct brain-computer interfaces for rich and expressive musical control. This + report will outline the directions of our current research and results. ' + address: 'Vancouver, BC, Canada' + author: 'Miranda, Eduardo and Brouse, Andrew' + bibtex: "@inproceedings{Miranda2005,\n abstract = { Musicians and composers have\ + \ been using brainwaves as generative sources in music for at least 40 years and\ + \ the possibility of a brain-computer interface for direct communication and control\ + \ was first seriously investigated in the early 1970s. Work has been done by many\ + \ artists and technologists in the intervening years to attempt to control music\ + \ systems with brainwaves and --- indeed --- many other biological signals. Despite\ + \ the richness of EEG, fMRI and other data which can be read from the human brain,\ + \ there has up to now been only limited success in translating the complex encephalographic\ + \ data into satisfactory musical results. We are currently pursuing research which\ + \ we believe will lead to the possibility of direct brain-computer interfaces\ + \ for rich and expressive musical control. This report will outline the directions\ + \ of our current research and results. },\n address = {Vancouver, BC, Canada},\n\ + \ author = {Miranda, Eduardo and Brouse, Andrew},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1176792},\n issn = {2220-4806},\n keywords = {Brain-Computer\ + \ Interface, BCI, Electroencephalogram, EEG, brainwaves, music and the brain,\ + \ interactive music systems.},\n pages = {216--219},\n title = {Toward Direct\ + \ Brain-Computer Musical Interfaces},\n url = {http://www.nime.org/proceedings/2005/nime2005_216.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176171 + doi: 10.5281/zenodo.1176792 issn: 2220-4806 - pages: 56--61 - publisher: Aalborg University Copenhagen - title: Navigating Descriptive Sub-Representations of Musical Timbre - url: http://www.nime.org/proceedings/2017/nime2017_paper0012.pdf - year: 2017 + keywords: 'Brain-Computer Interface, BCI, Electroencephalogram, EEG, brainwaves, + music and the brain, interactive music systems.' + pages: 216--219 + title: Toward Direct Brain-Computer Musical Interfaces + url: http://www.nime.org/proceedings/2005/nime2005_216.pdf + year: 2005 - ENTRYTYPE: inproceedings - ID: pwilliams2017a - abstract: 'Pitch Fork is a prototype of an alternate, actuated digital musical instrument - (DMI). It uses 5 infra-red and 4 piezoelectric sensors to control an additive - synthesis engine. Iron bars are used as the physical point of contact in interaction - with the aim of using material computation to control aspects of the digitally - produced sound. This choice of material was also chosen to affect player experience. Sensor - readings are relayed to a Macbook via an Arduino Mega. Mappings and audio output - signal is carried out with Pure Data Extended.' - address: 'Copenhagen, Denmark' - author: Peter Williams and Daniel Overholt - bibtex: "@inproceedings{pwilliams2017a,\n abstract = {Pitch Fork is a prototype\ - \ of an alternate, actuated digital musical instrument (DMI). It uses 5 infra-red\ - \ and 4 piezoelectric sensors to control an additive synthesis engine. Iron bars\ - \ are used as the physical point of contact in interaction with the aim of using\ - \ material computation to control aspects of the digitally produced sound. This\ - \ choice of material was also chosen to affect player experience. Sensor readings\ - \ are relayed to a Macbook via an Arduino Mega. Mappings and audio output signal\ - \ is carried out with Pure Data Extended.},\n address = {Copenhagen, Denmark},\n\ - \ author = {Peter Williams and Daniel Overholt},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1176173},\n issn = {2220-4806},\n pages = {62--64},\n publisher\ - \ = {Aalborg University Copenhagen},\n title = {Pitch Fork: A Novel tactile Digital\ - \ Musical Instrument},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0013.pdf},\n\ - \ year = {2017}\n}\n" + ID: Taylor2005 + abstract: 'We present a real-time system which allows musicians tointeract with + synthetic virtual characters as they perform.Using Max/MSP to parameterize keyboard + and vocal input, meaningful features (pitch, amplitude, chord information, and + vocal timbre) are extracted from live performancein real-time. These extracted + musical features are thenmapped to character behaviour in such a way that the + musician''s performance elicits a response from the virtual character. The system + uses the ANIMUS framework to generatebelievable character expressions. Experimental + results arepresented for simple characters.' + address: 'Vancouver, BC, Canada' + author: 'Taylor, Robyn and Torres, Daniel and Boulanger, Pierre' + bibtex: "@inproceedings{Taylor2005,\n abstract = {We present a real-time system\ + \ which allows musicians tointeract with synthetic virtual characters as they\ + \ perform.Using Max/MSP to parameterize keyboard and vocal input, meaningful features\ + \ (pitch, amplitude, chord information, and vocal timbre) are extracted from live\ + \ performancein real-time. These extracted musical features are thenmapped to\ + \ character behaviour in such a way that the musician's performance elicits a\ + \ response from the virtual character. The system uses the ANIMUS framework to\ + \ generatebelievable character expressions. Experimental results arepresented\ + \ for simple characters.},\n address = {Vancouver, BC, Canada},\n author = {Taylor,\ + \ Robyn and Torres, Daniel and Boulanger, Pierre},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176826},\n issn = {2220-4806},\n keywords = {Music, synthetic\ + \ characters, advanced man-machine interfaces, virtual reality, behavioural systems,\ + \ interaction techniques, visualization, immersive entertainment, artistic in-\ + \ stallations },\n pages = {220--223},\n title = {Using Music to Interact with\ + \ a Virtual Character},\n url = {http://www.nime.org/proceedings/2005/nime2005_220.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176173 + doi: 10.5281/zenodo.1176826 issn: 2220-4806 - pages: 62--64 - publisher: Aalborg University Copenhagen - title: 'Pitch Fork: A Novel tactile Digital Musical Instrument' - url: http://www.nime.org/proceedings/2017/nime2017_paper0013.pdf - year: 2017 + keywords: 'Music, synthetic characters, advanced man-machine interfaces, virtual + reality, behavioural systems, interaction techniques, visualization, immersive + entertainment, artistic in- stallations ' + pages: 220--223 + title: Using Music to Interact with a Virtual Character + url: http://www.nime.org/proceedings/2005/nime2005_220.pdf + year: 2005 - ENTRYTYPE: inproceedings - ID: cerdem2017 - abstract: 'Biostomp is a new musical interface that relies on the use mechanomyography - (MMG) as a biocontrol mechanism in live performance situations. Designed in the - form of a stomp box, Biostomp translates a performer''s muscle movements into - control signals. A custom MMG sensor captures the acoustic output of muscle tissue - oscillations resulting from contractions. An analog circuit amplifies and filters - these signals, and a micro-controller translates the processed signals into pulses. - These pulses are used to activate a stepper motor mechanism, which is designed - to be mounted on parameter knobs on effects pedals. The primary goal in designing - Biostomp is to offer a robust, inexpensive, and easy-to-operate platform for integrating - biological signals into both traditional and contemporary music performance practices - without requiring an intermediary computer software. In this paper, we discuss - the design, implementation and evaluation of Biostomp. Following an overview of - related work on the use of biological signals in artistic projects, we offer a - discussion of our approach to conceptualizing and fabricating a biocontrol mechanism - as a new musical interface. We then discuss the results of an evaluation study - conducted with 21 professional musicians. A video abstract for Biostomp can be - viewed at vimeo.com/biostomp/video.' - address: 'Copenhagen, Denmark' - author: Cagri Erdem and Anil Camci and Angus Forbes - bibtex: "@inproceedings{cerdem2017,\n abstract = {Biostomp is a new musical interface\ - \ that relies on the use mechanomyography (MMG) as a biocontrol mechanism in live\ - \ performance situations. Designed in the form of a stomp box, Biostomp translates\ - \ a performer's muscle movements into control signals. A custom MMG sensor captures\ - \ the acoustic output of muscle tissue oscillations resulting from contractions.\ - \ An analog circuit amplifies and filters these signals, and a micro-controller\ - \ translates the processed signals into pulses. These pulses are used to activate\ - \ a stepper motor mechanism, which is designed to be mounted on parameter knobs\ - \ on effects pedals. The primary goal in designing Biostomp is to offer a robust,\ - \ inexpensive, and easy-to-operate platform for integrating biological signals\ - \ into both traditional and contemporary music performance practices without requiring\ - \ an intermediary computer software. In this paper, we discuss the design, implementation\ - \ and evaluation of Biostomp. Following an overview of related work on the use\ - \ of biological signals in artistic projects, we offer a discussion of our approach\ - \ to conceptualizing and fabricating a biocontrol mechanism as a new musical interface.\ - \ We then discuss the results of an evaluation study conducted with 21 professional\ - \ musicians. A video abstract for Biostomp can be viewed at vimeo.com/biostomp/video.},\n\ - \ address = {Copenhagen, Denmark},\n author = {Cagri Erdem and Anil Camci and\ - \ Angus Forbes},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176175},\n\ - \ issn = {2220-4806},\n pages = {65--70},\n publisher = {Aalborg University Copenhagen},\n\ - \ title = {Biostomp: A Biocontrol System for Embodied Performance Using Mechanomyography},\n\ - \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0014.pdf},\n year\ - \ = {2017}\n}\n" + ID: Chew2005 + abstract: 'In the Expression Synthesis Project (ESP), we propose adriving interface + for expression synthesis. ESP aims toprovide a compelling metaphor for expressive + performance soas to make high-level expressive decisions accessible to nonexperts. + In ESP, the user drives a car on a virtual road thatrepresents the music with + its twists and turns; and makesdecisions on how to traverse each part of the road. + The driver''sdecisions affect in real-time the rendering of the piece. Thepedals + and wheel provide a tactile interface for controlling thecar dynamics and musical + expression, while the displayportrays a first person view of the road and dashboard + from thedriver''s seat. This game-like interface allows non-experts tocreate expressive + renderings of existing music without havingto master an instrument, and allows + expert musicians toexperiment with expressive choice without having to firstmaster + the notes of the piece. The prototype system has beentested and refined in numerous + demonstrations. This paperpresents the concepts underlying the ESP system and + thearchitectural design and implementation of a prototype.' + address: 'Vancouver, BC, Canada' + author: 'Chew, Elaine and Francois, Alexander R. and Liu, Jie and Yang, Aaron' + bibtex: "@inproceedings{Chew2005,\n abstract = {In the Expression Synthesis Project\ + \ (ESP), we propose adriving interface for expression synthesis. ESP aims toprovide\ + \ a compelling metaphor for expressive performance soas to make high-level expressive\ + \ decisions accessible to nonexperts. In ESP, the user drives a car on a virtual\ + \ road thatrepresents the music with its twists and turns; and makesdecisions\ + \ on how to traverse each part of the road. The driver'sdecisions affect in real-time\ + \ the rendering of the piece. Thepedals and wheel provide a tactile interface\ + \ for controlling thecar dynamics and musical expression, while the displayportrays\ + \ a first person view of the road and dashboard from thedriver's seat. This game-like\ + \ interface allows non-experts tocreate expressive renderings of existing music\ + \ without havingto master an instrument, and allows expert musicians toexperiment\ + \ with expressive choice without having to firstmaster the notes of the piece.\ + \ The prototype system has beentested and refined in numerous demonstrations.\ + \ This paperpresents the concepts underlying the ESP system and thearchitectural\ + \ design and implementation of a prototype.},\n address = {Vancouver, BC, Canada},\n\ + \ author = {Chew, Elaine and Francois, Alexander R. and Liu, Jie and Yang, Aaron},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1176725},\n issn = {2220-4806},\n\ + \ keywords = {Music expression synthesis system, driving interface. },\n pages\ + \ = {224--227},\n title = {ESP: A Driving Interface for Expression Synthesis},\n\ + \ url = {http://www.nime.org/proceedings/2005/nime2005_224.pdf},\n year = {2005}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176175 + doi: 10.5281/zenodo.1176725 issn: 2220-4806 - pages: 65--70 - publisher: Aalborg University Copenhagen - title: 'Biostomp: A Biocontrol System for Embodied Performance Using Mechanomyography' - url: http://www.nime.org/proceedings/2017/nime2017_paper0014.pdf - year: 2017 + keywords: 'Music expression synthesis system, driving interface. ' + pages: 224--227 + title: 'ESP: A Driving Interface for Expression Synthesis' + url: http://www.nime.org/proceedings/2005/nime2005_224.pdf + year: 2005 - ENTRYTYPE: inproceedings - ID: eknudsen2017 - abstract: 'An application for ballet training is presented that monitors the posture - position (straightness of the spine and rotation of the pelvis) deviation from - the ideal position in real-time. The human skeletal data is acquired through a - Microsoft Kinect v2. The movement of the student is mirrored through an abstract - skeletal figure and instructions are provided through a virtual teacher. Posture - deviation is measured in the following way: Torso misalignment is calculated by - comparing hip center joint, shoulder center joint and neck joint position with - an ideal posture position retrieved in an initial calibration procedure. Pelvis - deviation is expressed as the xz-rotation of the hip-center joint. The posture - deviation is sonified via a varying cut-off frequency of a high-pass filter applied - to floating water sound. The posture deviation is visualized via a curve and a - rigged skeleton in which the misaligned torso parts are color-coded. In an experiment - with 9-12 year-old dance students from a ballet school, comparing the audio-visual - feedback modality with no feedback leads to an increase in posture accuracy (p - < 0.001, Cohen''s d = 1.047). Reaction card feedback and expert interviews - indicate that the feedback is considered fun and useful for training independently - from the teacher.' - address: 'Copenhagen, Denmark' - author: Esben W. Knudsen and Malte L. Hølledig and Mads Juel Nielsen and Rikke K. - Petersen and Sebastian Bach-Nielsen and Bogdan-Constantin Zanescu and Daniel Overholt - and Hendrik Purwins and Kim Helweg - bibtex: "@inproceedings{eknudsen2017,\n abstract = {An application for ballet training\ - \ is presented that monitors the posture position (straightness of the spine and\ - \ rotation of the pelvis) deviation from the ideal position in real-time. The\ - \ human skeletal data is acquired through a Microsoft Kinect v2. The movement\ - \ of the student is mirrored through an abstract skeletal figure and instructions\ - \ are provided through a virtual teacher. Posture deviation is measured in the\ - \ following way: Torso misalignment is calculated by comparing hip center joint,\ - \ shoulder center joint and neck joint position with an ideal posture position\ - \ retrieved in an initial calibration procedure. Pelvis deviation is expressed\ - \ as the xz-rotation of the hip-center joint. The posture deviation is sonified\ - \ via a varying cut-off frequency of a high-pass filter applied to floating water\ - \ sound. The posture deviation is visualized via a curve and a rigged skeleton\ - \ in which the misaligned torso parts are color-coded. In an experiment with 9-12\ - \ year-old dance students from a ballet school, comparing the audio-visual feedback\ - \ modality with no feedback leads to an increase in posture accuracy (p < 0.001,\ - \ Cohen's d = 1.047). Reaction card feedback and expert interviews indicate that\ - \ the feedback is considered fun and useful for training independently from the\ - \ teacher.},\n address = {Copenhagen, Denmark},\n author = {Esben W. Knudsen and\ - \ Malte L. Hølledig and Mads Juel Nielsen and Rikke K. Petersen and Sebastian\ - \ Bach-Nielsen and Bogdan-Constantin Zanescu and Daniel Overholt and Hendrik Purwins\ - \ and Kim Helweg},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1181422},\n\ - \ issn = {2220-4806},\n pages = {71--76},\n publisher = {Aalborg University Copenhagen},\n\ - \ title = {Audio-Visual Feedback for Self-monitoring Posture in Ballet Training},\n\ - \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0015.pdf},\n year\ - \ = {2017}\n}\n" + ID: Poepel2005 + abstract: 'While many new interfaces for musical expression have been presented + in the past, methods to evaluate these interfaces are rare.This paper presents + a method and a study comparing the potentialfor musical expression of different + string-instrument based musicalinterfaces. Cues for musical expression are defined + based on results of research in musical expression and on methods for musicaleducation + in instrumental pedagogy. Interfaces are evaluated according to how well they + are estimated to allow players making useof their existing technique for the creation + of expressive music.' + address: 'Vancouver, BC, Canada' + author: 'Poepel, Cornelius' + bibtex: "@inproceedings{Poepel2005,\n abstract = {While many new interfaces for\ + \ musical expression have been presented in the past, methods to evaluate these\ + \ interfaces are rare.This paper presents a method and a study comparing the potentialfor\ + \ musical expression of different string-instrument based musicalinterfaces. Cues\ + \ for musical expression are defined based on results of research in musical expression\ + \ and on methods for musicaleducation in instrumental pedagogy. Interfaces are\ + \ evaluated according to how well they are estimated to allow players making useof\ + \ their existing technique for the creation of expressive music.},\n address =\ + \ {Vancouver, BC, Canada},\n author = {Poepel, Cornelius},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176802},\n issn = {2220-4806},\n keywords = {Musical\ + \ Expression, electronic bowed string instrument, evaluation of musical input\ + \ devices, audio signal driven sound synthesis },\n pages = {228--231},\n title\ + \ = {On Interface Expressivity: A Player-Based Study},\n url = {http://www.nime.org/proceedings/2005/nime2005_228.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1181422 + doi: 10.5281/zenodo.1176802 issn: 2220-4806 - pages: 71--76 - publisher: Aalborg University Copenhagen - title: Audio-Visual Feedback for Self-monitoring Posture in Ballet Training - url: http://www.nime.org/proceedings/2017/nime2017_paper0015.pdf - year: 2017 + keywords: 'Musical Expression, electronic bowed string instrument, evaluation of + musical input devices, audio signal driven sound synthesis ' + pages: 228--231 + title: 'On Interface Expressivity: A Player-Based Study' + url: http://www.nime.org/proceedings/2005/nime2005_228.pdf + year: 2005 - ENTRYTYPE: inproceedings - ID: rlindell2017 - abstract: "We explore the phenomenology of embodiment based on research through\ - \ design and reflection on the design of artefacts for augmenting embodied performance.\ - \ We present three designs for highly trained musicians; the designs rely on the\ - \ musicians' mastery acquired from years of practice. Through the knowledge of\ - \ the living body their instruments – saxophone, cello, and flute –\ - \ are extensions of themselves; thus, we can explore technology with rich nuances\ - \ and precision in corporeal schemas. With the help of Merleau-Ponty's phenomenology\ - \ of embodiment we present three hypotheses for augmented embodied performance:\ - \ the extended artistic room, the interactively enacted teacher, and the humanisation\ - \ of technology. " - address: 'Copenhagen, Denmark' - author: Rikard Lindell and Tomas Kumlin - bibtex: "@inproceedings{rlindell2017,\n abstract = {We explore the phenomenology\ - \ of embodiment based on research through design and reflection on the design\ - \ of artefacts for augmenting embodied performance. We present three designs for\ - \ highly trained musicians; the designs rely on the musicians' mastery acquired\ - \ from years of practice. Through the knowledge of the living body their instruments\ - \ – saxophone, cello, and flute – are extensions of themselves; thus,\ - \ we can explore technology with rich nuances and precision in corporeal schemas.\ - \ With the help of Merleau-Ponty's phenomenology of embodiment we present three\ - \ hypotheses for augmented embodied performance: the extended artistic room, the\ - \ interactively enacted teacher, and the humanisation of technology. },\n address\ - \ = {Copenhagen, Denmark},\n author = {Rikard Lindell and Tomas Kumlin},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176177},\n issn = {2220-4806},\n pages\ - \ = {77--82},\n publisher = {Aalborg University Copenhagen},\n title = {Augmented\ - \ Embodied Performance – Extended Artistic Room, Enacted Teacher, and Humanisation\ - \ of Technology},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0016.pdf},\n\ - \ year = {2017}\n}\n" + ID: Wingstedt2005 + abstract: 'A typical experiment design within the field of music psychology is playing + music to a test subject who listens and reacts – most often by verbal means. One + limitation of this kind of test is the inherent difficulty of measuring an emotional + reaction in a laboratory setting. This paper describes the design, functions and + possible uses of the software tool REMUPP (Relations between musical parameters + and perceived properties), designed for investigating various aspects of musical + experience. REMUPP allows for non-verbal examination of selected musical parameters + (such as tonality, tempo, timbre, articulation, volume, register etc.) in a musical + context. The musical control is put into the hands of the subject, introducing + an element of creativity and enhancing the sense of immersion. Information acquired + with REMUPP can be output as numerical data for statistical analysis, but the + tool is also suited for the use with more qualitatively oriented methods.' + address: 'Vancouver, BC, Canada' + author: 'Wingstedt, Johnny and Liljedahl, Mats and Lindberg, Stefan and Berg, Jan' + bibtex: "@inproceedings{Wingstedt2005,\n abstract = {A typical experiment design\ + \ within the field of music psychology is playing music to a test subject who\ + \ listens and reacts – most often by verbal means. One limitation of this kind\ + \ of test is the inherent difficulty of measuring an emotional reaction in a laboratory\ + \ setting. This paper describes the design, functions and possible uses of the\ + \ software tool REMUPP (Relations between musical parameters and perceived properties),\ + \ designed for investigating various aspects of musical experience. REMUPP allows\ + \ for non-verbal examination of selected musical parameters (such as tonality,\ + \ tempo, timbre, articulation, volume, register etc.) in a musical context. The\ + \ musical control is put into the hands of the subject, introducing an element\ + \ of creativity and enhancing the sense of immersion. Information acquired with\ + \ REMUPP can be output as numerical data for statistical analysis, but the tool\ + \ is also suited for the use with more qualitatively oriented methods.},\n address\ + \ = {Vancouver, BC, Canada},\n author = {Wingstedt, Johnny and Liljedahl, Mats\ + \ and Lindberg, Stefan and Berg, Jan},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176842},\n\ + \ issn = {2220-4806},\n keywords = {Musical experience, non-verbal test techniques,\ + \ musical parameters.},\n pages = {232--235},\n title = {REMUPP -- An Interactive\ + \ Tool for Investigating Musical Properties and Relations},\n url = {http://www.nime.org/proceedings/2005/nime2005_232.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176177 + doi: 10.5281/zenodo.1176842 issn: 2220-4806 - pages: 77--82 - publisher: Aalborg University Copenhagen - title: 'Augmented Embodied Performance – Extended Artistic Room, Enacted Teacher, - and Humanisation of Technology' - url: http://www.nime.org/proceedings/2017/nime2017_paper0016.pdf - year: 2017 + keywords: 'Musical experience, non-verbal test techniques, musical parameters.' + pages: 232--235 + title: REMUPP -- An Interactive Tool for Investigating Musical Properties and Relations + url: http://www.nime.org/proceedings/2005/nime2005_232.pdf + year: 2005 - ENTRYTYPE: inproceedings - ID: jvetter2017 - abstract: 'In this paper we discuss a modular instrument system for musical expression - consisting of multiple devices using string detection, sound synthesis and wireless - communication. The design of the system allows for different physical arrangements, - which we define as topologies. In particular we will explain our concept and - requirements, the system architecture including custom magnetic string sensors - and our network communication and discuss its use in the performance HOMO RESTIS.' - address: 'Copenhagen, Denmark' - author: Jens Vetter and Sarah Leimcke - bibtex: "@inproceedings{jvetter2017,\n abstract = {In this paper we discuss a modular\ - \ instrument system for musical expression consisting of multiple devices using\ - \ string detection, sound synthesis and wireless communication. The design of\ - \ the system allows for different physical arrangements, which we define as topologies.\ - \ In particular we will explain our concept and requirements, the system architecture\ - \ including custom magnetic string sensors and our network communication and discuss\ - \ its use in the performance HOMO RESTIS.},\n address = {Copenhagen, Denmark},\n\ - \ author = {Jens Vetter and Sarah Leimcke},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1176179},\n issn = {2220-4806},\n pages = {83--86},\n publisher\ - \ = {Aalborg University Copenhagen},\n title = {Homo Restis --- Constructive Control\ - \ Through Modular String Topologies},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0017.pdf},\n\ - \ year = {2017}\n}\n" + ID: Cook2005 + abstract: 'A wide variety of singing synthesis models and methods exist,but there + are remarkably few real-time controllers for thesemodels. This paper describes + a variety of devices developedover the last few years for controlling singing + synthesismodels implemented in the Synthesis Toolkit in C++ (STK),Max/MSP, and + ChucK. All of the controllers share somecommon features, such as air-pressure + sensing for breathingand/or loudness control, means to control pitch, and methodsfor + selecting and blending phonemes, diphones, and words.However, the form factors, + sensors, mappings, and algorithmsvary greatly between the different controllers.' + address: 'Vancouver, BC, Canada' + author: 'Cook, Perry R.' + bibtex: "@inproceedings{Cook2005,\n abstract = {A wide variety of singing synthesis\ + \ models and methods exist,but there are remarkably few real-time controllers\ + \ for thesemodels. This paper describes a variety of devices developedover the\ + \ last few years for controlling singing synthesismodels implemented in the Synthesis\ + \ Toolkit in C++ (STK),Max/MSP, and ChucK. All of the controllers share somecommon\ + \ features, such as air-pressure sensing for breathingand/or loudness control,\ + \ means to control pitch, and methodsfor selecting and blending phonemes, diphones,\ + \ and words.However, the form factors, sensors, mappings, and algorithmsvary greatly\ + \ between the different controllers.},\n address = {Vancouver, BC, Canada},\n\ + \ author = {Cook, Perry R.},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176846},\n\ + \ issn = {2220-4806},\n keywords = {Singing synthesis, real-time singing synthesis\ + \ control. },\n pages = {236--237},\n title = {Real-Time Performance Controllers\ + \ for Synthesized Singing},\n url = {http://www.nime.org/proceedings/2005/nime2005_236.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176179 + doi: 10.5281/zenodo.1176846 issn: 2220-4806 - pages: 83--86 - publisher: Aalborg University Copenhagen - title: Homo Restis --- Constructive Control Through Modular String Topologies - url: http://www.nime.org/proceedings/2017/nime2017_paper0017.pdf - year: 2017 + keywords: 'Singing synthesis, real-time singing synthesis control. ' + pages: 236--237 + title: Real-Time Performance Controllers for Synthesized Singing + url: http://www.nime.org/proceedings/2005/nime2005_236.pdf + year: 2005 - ENTRYTYPE: inproceedings - ID: jbarbosa2017 - abstract: "Play and playfulness compose an essential part of our lives as human\ - \ beings. From childhood to adultness, playfulness is often associated with remarkable\ - \ positive experiences related to fun, pleasure, intimate social activities, imagination,\ - \ and creativity. Perhaps not surprisingly, playfulness has been recurrently used\ - \ in NIME designs as a strategy to engage people, often non-expert, in short term\ - \ musical activities. Yet, designing for playfulness remains a challenging task,\ - \ as little knowledge is available for designers to support their decisions. \ - \ To address this issue, we follow a design rationale approach using the context\ - \ of Live Looping (LL) as a case study. We start by surveying 101 LL tools, summarizing\ - \ our analysis into a new design space. We then use this design space to discuss\ - \ potential guidelines to address playfulness in a design process. These guidelines\ - \ are implemented and discussed in a new LL tool—called the \"Voice Reaping\ - \ Machine\". Finally, we contrast our guidelines with previous works in the literature." - address: 'Copenhagen, Denmark' - author: Jeronimo Barbosa and Marcelo M. Wanderley and Stéphane Huot - bibtex: "@inproceedings{jbarbosa2017,\n abstract = {Play and playfulness compose\ - \ an essential part of our lives as human beings. From childhood to adultness,\ - \ playfulness is often associated with remarkable positive experiences related\ - \ to fun, pleasure, intimate social activities, imagination, and creativity. Perhaps\ - \ not surprisingly, playfulness has been recurrently used in NIME designs as a\ - \ strategy to engage people, often non-expert, in short term musical activities.\ - \ Yet, designing for playfulness remains a challenging task, as little knowledge\ - \ is available for designers to support their decisions. To address this issue,\ - \ we follow a design rationale approach using the context of Live Looping (LL)\ - \ as a case study. We start by surveying 101 LL tools, summarizing our analysis\ - \ into a new design space. We then use this design space to discuss potential\ - \ guidelines to address playfulness in a design process. These guidelines are\ - \ implemented and discussed in a new LL tool—called the \"Voice Reaping\ - \ Machine\". Finally, we contrast our guidelines with previous works in the literature.},\n\ - \ address = {Copenhagen, Denmark},\n author = {Jeronimo Barbosa and Marcelo M.\ - \ Wanderley and Stéphane Huot},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176181},\n\ - \ issn = {2220-4806},\n pages = {87--92},\n publisher = {Aalborg University Copenhagen},\n\ - \ title = {Exploring Playfulness in NIME Design: The Case of Live Looping Tools},\n\ - \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0018.pdf},\n year\ - \ = {2017}\n}\n" + ID: KimBoyle2005 + abstract: 'The author describes a recent composition for piano and computer in which + the score performed by the pianist, read from a computer monitor, is generated + in real-time from a vocabulary of predetermined scanned score excerpts. The author + outlines the algorithm used to choose and display a particular excerpt and describes + some of the musical difficulties faced by the pianist in a performance of the + work.' + address: 'Vancouver, BC, Canada' + author: 'Kim-Boyle, David' + bibtex: "@inproceedings{KimBoyle2005,\n abstract = {The author describes a recent\ + \ composition for piano and computer in which the score performed by the pianist,\ + \ read from a computer monitor, is generated in real-time from a vocabulary of\ + \ predetermined scanned score excerpts. The author outlines the algorithm used\ + \ to choose and display a particular excerpt and describes some of the musical\ + \ difficulties faced by the pianist in a performance of the work.},\n address\ + \ = {Vancouver, BC, Canada},\n author = {Kim-Boyle, David},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176762},\n issn = {2220-4806},\n keywords = {Score generation,\ + \ Jitter. },\n pages = {238--239},\n title = {Musical Score Generation in Valses\ + \ and Etudes},\n url = {http://www.nime.org/proceedings/2005/nime2005_238.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176181 + doi: 10.5281/zenodo.1176762 issn: 2220-4806 - pages: 87--92 - publisher: Aalborg University Copenhagen - title: 'Exploring Playfulness in NIME Design: The Case of Live Looping Tools' - url: http://www.nime.org/proceedings/2017/nime2017_paper0018.pdf - year: 2017 + keywords: 'Score generation, Jitter. ' + pages: 238--239 + title: Musical Score Generation in Valses and Etudes + url: http://www.nime.org/proceedings/2005/nime2005_238.pdf + year: 2005 - ENTRYTYPE: inproceedings - ID: dmanesh2017 - abstract: 'Exquisite Score is a web application which allows users to collaborate - on short musical compositions using the paradigm of the parlor game exquisite - corpse. Through a MIDI-sequencer interface, composers each contribute a section - to a piece of music, only seeing the very end of the preceding section. Exquisite - Score is both a fun and accessible compositional game as well as a system for - encouraging highly novel musical compositions. Exquisite Score was tested by several - students and musicians. Several short pieces were created and a brief discussion - and analysis of these pieces is included.' - address: 'Copenhagen, Denmark' - author: Daniel Manesh and Eran Egozy - bibtex: "@inproceedings{dmanesh2017,\n abstract = {Exquisite Score is a web application\ - \ which allows users to collaborate on short musical compositions using the paradigm\ - \ of the parlor game exquisite corpse. Through a MIDI-sequencer interface, composers\ - \ each contribute a section to a piece of music, only seeing the very end of the\ - \ preceding section. Exquisite Score is both a fun and accessible compositional\ - \ game as well as a system for encouraging highly novel musical compositions.\ - \ Exquisite Score was tested by several students and musicians. Several short\ - \ pieces were created and a brief discussion and analysis of these pieces is included.},\n\ - \ address = {Copenhagen, Denmark},\n author = {Daniel Manesh and Eran Egozy},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176183},\n issn = {2220-4806},\n\ - \ pages = {93--98},\n publisher = {Aalborg University Copenhagen},\n title = {Exquisite\ - \ Score: A System for Collaborative Musical Composition},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0019.pdf},\n\ - \ year = {2017}\n}\n" + ID: Baird2005 + abstract: 'No Clergy is an interactive music performance/installation inwhich the + audience is able to shape the ongoing music. In it,members of a small acoustic + ensemble read music notation fromcomputer screens. As each page refreshes, the + notation is alteredand shaped by both stochastic transformations of earlier musicwith + the same performance and audience feedback, collected viastandard CGI forms. ' + address: 'Vancouver, BC, Canada' + author: 'Baird, Kevin C.' + bibtex: "@inproceedings{Baird2005,\n abstract = {No Clergy is an interactive music\ + \ performance/installation inwhich the audience is able to shape the ongoing music.\ + \ In it,members of a small acoustic ensemble read music notation fromcomputer\ + \ screens. As each page refreshes, the notation is alteredand shaped by both stochastic\ + \ transformations of earlier musicwith the same performance and audience feedback,\ + \ collected viastandard CGI forms. },\n address = {Vancouver, BC, Canada},\n author\ + \ = {Baird, Kevin C.},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176695},\n\ + \ issn = {2220-4806},\n keywords = {notation, stochastic, interactive, audience,\ + \ Python, Lilypond },\n pages = {240--241},\n title = {Real-Time Generation of\ + \ Music Notation via Audience Interaction Using Python and {GNU} Lilypond},\n\ + \ url = {http://www.nime.org/proceedings/2005/nime2005_240.pdf},\n year = {2005}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176183 + doi: 10.5281/zenodo.1176695 issn: 2220-4806 - pages: 93--98 - publisher: Aalborg University Copenhagen - title: 'Exquisite Score: A System for Collaborative Musical Composition' - url: http://www.nime.org/proceedings/2017/nime2017_paper0019.pdf - year: 2017 + keywords: 'notation, stochastic, interactive, audience, Python, Lilypond ' + pages: 240--241 + title: Real-Time Generation of Music Notation via Audience Interaction Using Python + and GNU Lilypond + url: http://www.nime.org/proceedings/2005/nime2005_240.pdf + year: 2005 - ENTRYTYPE: inproceedings - ID: sstenslie2017 - abstract: 'This paper presents a new spherical shaped capacitive sensor device for - creating interactive compositions and embodied user experiences inside of a periphonic, - 3D sound space. The Somatic Sound project is here presented as a) technological - innovative musical instrument, and b) an experiential art installation. One of - the main research foci is to explore embodied experiences through moving, interactive - and somatic sound. The term somatic is here understood and used as in relating - to the body in a physical, holistic and immersive manner.' - address: 'Copenhagen, Denmark' - author: Stahl Stenslie and Kjell Tore Innervik and Ivar Frounberg and Thom Johansen - bibtex: "@inproceedings{sstenslie2017,\n abstract = {This paper presents a new spherical\ - \ shaped capacitive sensor device for creating interactive compositions and embodied\ - \ user experiences inside of a periphonic, 3D sound space. The Somatic Sound project\ - \ is here presented as a) technological innovative musical instrument, and b)\ - \ an experiential art installation. One of the main research foci is to explore\ - \ embodied experiences through moving, interactive and somatic sound. The term\ - \ somatic is here understood and used as in relating to the body in a physical,\ - \ holistic and immersive manner.},\n address = {Copenhagen, Denmark},\n author\ - \ = {Stahl Stenslie and Kjell Tore Innervik and Ivar Frounberg and Thom Johansen},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176185},\n issn = {2220-4806},\n\ - \ pages = {99--103},\n publisher = {Aalborg University Copenhagen},\n title =\ - \ {Somatic Sound in Performative Contexts},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0020.pdf},\n\ - \ year = {2017}\n}\n" + ID: Fox2005 + abstract: 'This paper describes the design of SoniMime, a system forthe sonification + of hand movement for real-time timbre shaping. We explore the application of the + tristimulus timbremodel for the sonification of gestural data, working towardthe + goals of musical expressivity and physical responsiveness. SoniMime uses two 3-D + accelerometers connected toan Atmel microprocessor which outputs OSC control messages. + Data filtering, parameter mapping, and sound synthesis take place in Pd running + on a Linux computer.' + address: 'Vancouver, BC, Canada' + author: 'Fox, Jesse and Carlile, Jennifer' + bibtex: "@inproceedings{Fox2005,\n abstract = {This paper describes the design of\ + \ SoniMime, a system forthe sonification of hand movement for real-time timbre\ + \ shaping. We explore the application of the tristimulus timbremodel for the sonification\ + \ of gestural data, working towardthe goals of musical expressivity and physical\ + \ responsiveness. SoniMime uses two 3-D accelerometers connected toan Atmel microprocessor\ + \ which outputs OSC control messages. Data filtering, parameter mapping, and sound\ + \ synthesis take place in Pd running on a Linux computer.},\n address = {Vancouver,\ + \ BC, Canada},\n author = {Fox, Jesse and Carlile, Jennifer},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176741},\n issn = {2220-4806},\n keywords = {Sonification,\ + \ Musical Controller, Human Computer Interaction },\n pages = {242--243},\n title\ + \ = {SoniMime: Movement Sonification for Real-Time Timbre Shaping},\n url = {http://www.nime.org/proceedings/2005/nime2005_242.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176185 + doi: 10.5281/zenodo.1176741 issn: 2220-4806 - pages: 99--103 - publisher: Aalborg University Copenhagen - title: Somatic Sound in Performative Contexts - url: http://www.nime.org/proceedings/2017/nime2017_paper0020.pdf - year: 2017 + keywords: 'Sonification, Musical Controller, Human Computer Interaction ' + pages: 242--243 + title: 'SoniMime: Movement Sonification for Real-Time Timbre Shaping' + url: http://www.nime.org/proceedings/2005/nime2005_242.pdf + year: 2005 - ENTRYTYPE: inproceedings - ID: jlarsen2017 - abstract: 'Musical instruments and musical user interfaces provide rich input and - feedback through mostly tangible interactions, resulting in complex behavior. However, - publications of novel interfaces often lack the required detail due to the complexity - or the focus on a specific part of the interfaces and absence of a specific template - or structure to describe these interactions. Drawing on and synthesizing models - from interaction design and music making we propose a way for modeling musical - interfaces by providing a scheme and visual language to describe, design, analyze, - and compare interfaces for music making. To illustrate its capabilities we apply - the proposed model to a range of assistive musical instruments, which often draw - on multi-modal in- and output, resulting in complex designs and descriptions thereof.' - address: 'Copenhagen, Denmark' - author: Jeppe Veirum Larsen and Hendrik Knoche - bibtex: "@inproceedings{jlarsen2017,\n abstract = {Musical instruments and musical\ - \ user interfaces provide rich input and feedback through mostly tangible interactions,\ - \ resulting in complex behavior. However, publications of novel interfaces often\ - \ lack the required detail due to the complexity or the focus on a specific part\ - \ of the interfaces and absence of a specific template or structure to describe\ - \ these interactions. Drawing on and synthesizing models from interaction design\ - \ and music making we propose a way for modeling musical interfaces by providing\ - \ a scheme and visual language to describe, design, analyze, and compare interfaces\ - \ for music making. To illustrate its capabilities we apply the proposed model\ - \ to a range of assistive musical instruments, which often draw on multi-modal\ - \ in- and output, resulting in complex designs and descriptions thereof.},\n address\ - \ = {Copenhagen, Denmark},\n author = {Jeppe Veirum Larsen and Hendrik Knoche},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176187},\n issn = {2220-4806},\n\ - \ pages = {104--109},\n publisher = {Aalborg University Copenhagen},\n title =\ - \ {States and Sound: Modelling Interactions with Musical User Interfaces},\n url\ - \ = {http://www.nime.org/proceedings/2017/nime2017_paper0021.pdf},\n year = {2017}\n\ + ID: Huott2005 + abstract: 'This paper presents the ‘Bean’, a novel controller employing a multi-touch + sensate surface in a compound curve shape. The design goals, construction, and + mapping system are discussed, along with a retrospective from a previous, similar + design.' + address: 'Vancouver, BC, Canada' + author: 'Huott, Robert' + bibtex: "@inproceedings{Huott2005,\n abstract = {This paper presents the ‘Bean’,\ + \ a novel controller employing a multi-touch sensate surface in a compound curve\ + \ shape. The design goals, construction, and mapping system are discussed, along\ + \ with a retrospective from a previous, similar design.},\n address = {Vancouver,\ + \ BC, Canada},\n author = {Huott, Robert},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176848},\n\ + \ issn = {2220-4806},\n keywords = {Musical controller, sensate surface, mapping\ + \ system },\n pages = {244--245},\n title = {Precise Control on Compound Curves},\n\ + \ url = {http://www.nime.org/proceedings/2005/nime2005_244.pdf},\n year = {2005}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176187 + doi: 10.5281/zenodo.1176848 issn: 2220-4806 - pages: 104--109 - publisher: Aalborg University Copenhagen - title: 'States and Sound: Modelling Interactions with Musical User Interfaces' - url: http://www.nime.org/proceedings/2017/nime2017_paper0021.pdf - year: 2017 + keywords: 'Musical controller, sensate surface, mapping system ' + pages: 244--245 + title: Precise Control on Compound Curves + url: http://www.nime.org/proceedings/2005/nime2005_244.pdf + year: 2005 - ENTRYTYPE: inproceedings - ID: gxia2017 - abstract: 'The interaction between music improvisers is studied in the context of - piano duets, where one improviser embellishes a melody, and the other plays a - chordal accompaniment with great freedom. We created an automated accompaniment - player that learns to play from example performances. Accompaniments are constructed - by selecting and concatenating one-measure score units from actual performances. - An important innovation is the ability to learn how the improvised accompaniment - should respond to variations in the melody performance, using tempo and embellishment - complexity as features, resulting in a truly interactive performance within a - conventional musical framework. We conducted both objective and subjective evaluations, - showing that the learned improviser performs more interactive, musical, and human-like - accompaniment compared with the less responsive, rule-based baseline algorithm.' - address: 'Copenhagen, Denmark' - author: Guangyu Xia and Roger Dannenberg - bibtex: "@inproceedings{gxia2017,\n abstract = {The interaction between music improvisers\ - \ is studied in the context of piano duets, where one improviser embellishes a\ - \ melody, and the other plays a chordal accompaniment with great freedom. We created\ - \ an automated accompaniment player that learns to play from example performances.\ - \ Accompaniments are constructed by selecting and concatenating one-measure score\ - \ units from actual performances. An important innovation is the ability to learn\ - \ how the improvised accompaniment should respond to variations in the melody\ - \ performance, using tempo and embellishment complexity as features, resulting\ - \ in a truly interactive performance within a conventional musical framework.\ - \ We conducted both objective and subjective evaluations, showing that the learned\ - \ improviser performs more interactive, musical, and human-like accompaniment\ - \ compared with the less responsive, rule-based baseline algorithm.},\n address\ - \ = {Copenhagen, Denmark},\n author = {Guangyu Xia and Roger Dannenberg},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176189},\n issn = {2220-4806},\n pages\ - \ = {110--114},\n publisher = {Aalborg University Copenhagen},\n title = {Improvised\ - \ Duet Interaction: Learning Improvisation Techniques for Automatic Accompaniment},\n\ - \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0022.pdf},\n year\ - \ = {2017}\n}\n" + ID: Lugo2005 + abstract: 'This paper describes the design and implementation of BeatBoxing, a percussive + gestural interface for the liveperformance of electronic music and control of + computerbased games and musical activities.' + address: 'Vancouver, BC, Canada' + author: 'Lugo, Robert and Damondrick, Jack' + bibtex: "@inproceedings{Lugo2005,\n abstract = {This paper describes the design\ + \ and implementation of BeatBoxing, a percussive gestural interface for the liveperformance\ + \ of electronic music and control of computerbased games and musical activities.},\n\ + \ address = {Vancouver, BC, Canada},\n author = {Lugo, Robert and Damondrick,\ + \ Jack},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1176778},\n issn = {2220-4806},\n\ + \ keywords = {Performance, Gestural Mapping, Music Controller, Human-Computer\ + \ Interaction, PureData (Pd), OSC },\n pages = {246--247},\n title = {Beat Boxing\ + \ : Expressive Control for Electronic Music Performance and Musical Applications},\n\ + \ url = {http://www.nime.org/proceedings/2005/nime2005_246.pdf},\n year = {2005}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176189 + doi: 10.5281/zenodo.1176778 issn: 2220-4806 - pages: 110--114 - publisher: Aalborg University Copenhagen - title: 'Improvised Duet Interaction: Learning Improvisation Techniques for Automatic - Accompaniment' - url: http://www.nime.org/proceedings/2017/nime2017_paper0022.pdf - year: 2017 + keywords: 'Performance, Gestural Mapping, Music Controller, Human-Computer Interaction, + PureData (Pd), OSC ' + pages: 246--247 + title: 'Beat Boxing : Expressive Control for Electronic Music Performance and Musical + Applications' + url: http://www.nime.org/proceedings/2005/nime2005_246.pdf + year: 2005 - ENTRYTYPE: inproceedings - ID: pdahlstedt2017 - abstract: 'A new hybrid approach to digital keyboard playing is presented, where - the actual acoustic sounds from a digital keyboard are captured with contact microphones - and applied as excitation signals to a digital model of a prepared piano, i.e., - an extended wave-guide model of strings with the possibility of stopping and muting - the strings at arbitrary positions. The parameters of the string model are controlled - through TouchKeys multitouch sensors on each key, combined with MIDI data and - acoustic signals from the digital keyboard frame, using a novel mapping. The - instrument is evaluated from a performing musician''s perspective, and emerging - playing techniques are discussed. Since the instrument is a hybrid acoustic-digital - system with several feedback paths between the domains, it provides for expressive - and dynamic playing, with qualities approaching that of an acoustic instrument, - yet with new kinds of control. The contributions are two-fold. First, the use - of acoustic sounds from a physical keyboard for excitations and resonances results - in a novel hybrid keyboard instrument in itself. Second, the digital model of - "inside piano" playing, using multitouch keyboard data, allows for performance - techniques going far beyond conventional keyboard playing.' - address: 'Copenhagen, Denmark' - author: Palle Dahlstedt - bibtex: "@inproceedings{pdahlstedt2017,\n abstract = {A new hybrid approach to digital\ - \ keyboard playing is presented, where the actual acoustic sounds from a digital\ - \ keyboard are captured with contact microphones and applied as excitation signals\ - \ to a digital model of a prepared piano, i.e., an extended wave-guide model of\ - \ strings with the possibility of stopping and muting the strings at arbitrary\ - \ positions. The parameters of the string model are controlled through TouchKeys\ - \ multitouch sensors on each key, combined with MIDI data and acoustic signals\ - \ from the digital keyboard frame, using a novel mapping. The instrument is evaluated\ - \ from a performing musician's perspective, and emerging playing techniques are\ - \ discussed. Since the instrument is a hybrid acoustic-digital system with several\ - \ feedback paths between the domains, it provides for expressive and dynamic playing,\ - \ with qualities approaching that of an acoustic instrument, yet with new kinds\ - \ of control. The contributions are two-fold. First, the use of acoustic sounds\ - \ from a physical keyboard for excitations and resonances results in a novel hybrid\ - \ keyboard instrument in itself. Second, the digital model of \"inside piano\"\ - \ playing, using multitouch keyboard data, allows for performance techniques going\ - \ far beyond conventional keyboard playing.},\n address = {Copenhagen, Denmark},\n\ - \ author = {Palle Dahlstedt},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176191},\n\ - \ issn = {2220-4806},\n pages = {115--120},\n publisher = {Aalborg University\ - \ Copenhagen},\n title = {Physical Interactions with Digital Strings --- A hybrid\ - \ approach to a digital keyboard instrument},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0023.pdf},\n\ - \ year = {2017}\n}\n" + ID: Franco2005 + abstract: 'This paper describes the development of AirStick, an interface for musical + expression. AirStick is played in the air, in a Theremin style. It is composed + of an array of infrared proximity sensors, which allow the mapping of the position + of any interfering obstacle inside a bi-dimensional zone. This controller sends + both x and y control data to various real-time synthesis algorithms. ' + address: 'Vancouver, BC, Canada' + author: 'Franco, Ivan' + bibtex: "@inproceedings{Franco2005,\n abstract = {This paper describes the development\ + \ of AirStick, an interface for musical expression. AirStick is played {in the\ + \ air}, in a Theremin style. It is composed of an array of infrared proximity\ + \ sensors, which allow the mapping of the position of any interfering obstacle\ + \ inside a bi-dimensional zone. This controller sends both x and y control data\ + \ to various real-time synthesis algorithms. },\n address = {Vancouver, BC, Canada},\n\ + \ author = {Franco, Ivan},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176747},\n\ + \ issn = {2220-4806},\n keywords = {Music Controller, Infrared Sensing, Computer\ + \ Music. },\n pages = {248--249},\n title = {The Airstick: A Free-Gesture Controller\ + \ Using Infrared Sensing},\n url = {http://www.nime.org/proceedings/2005/nime2005_248.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176191 + doi: 10.5281/zenodo.1176747 issn: 2220-4806 - pages: 115--120 - publisher: Aalborg University Copenhagen - title: Physical Interactions with Digital Strings --- A hybrid approach to a digital - keyboard instrument - url: http://www.nime.org/proceedings/2017/nime2017_paper0023.pdf - year: 2017 + keywords: 'Music Controller, Infrared Sensing, Computer Music. ' + pages: 248--249 + title: 'The Airstick: A Free-Gesture Controller Using Infrared Sensing' + url: http://www.nime.org/proceedings/2005/nime2005_248.pdf + year: 2005 - ENTRYTYPE: inproceedings - ID: croberts2017 - abstract: 'We describe two new versions of the gibberwocky live-coding system. One - integrates with Max/MSP while the second targets MIDI output and runs entirely - in the browser. We discuss commonalities and differences between the three environments, - and how they fit into the live-coding landscape. We also describe lessons learned - while performing with the original version of gibberwocky, both from our perspective - and the perspective of others. These lessons informed the addition of animated - sparkline visualizations depicting modulations to performers and audiences in - all three versions.' - address: 'Copenhagen, Denmark' - author: Charles Roberts and Graham Wakefield - bibtex: "@inproceedings{croberts2017,\n abstract = {We describe two new versions\ - \ of the gibberwocky live-coding system. One integrates with Max/MSP while the\ - \ second targets MIDI output and runs entirely in the browser. We discuss commonalities\ - \ and differences between the three environments, and how they fit into the live-coding\ - \ landscape. We also describe lessons learned while performing with the original\ - \ version of gibberwocky, both from our perspective and the perspective of others.\ - \ These lessons informed the addition of animated sparkline visualizations depicting\ - \ modulations to performers and audiences in all three versions.},\n address =\ - \ {Copenhagen, Denmark},\n author = {Charles Roberts and Graham Wakefield},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176193},\n issn = {2220-4806},\n\ - \ pages = {121--126},\n publisher = {Aalborg University Copenhagen},\n title =\ - \ {gibberwocky: New Live-Coding Instruments for Musical Performance},\n url =\ - \ {http://www.nime.org/proceedings/2017/nime2017_paper0024.pdf},\n year = {2017}\n\ + ID: Carlile2005 + abstract: 'OROBORO is a novel collaborative controller which focuses on musical + performance as social experience by exploring synchronized actions of two musicians + operating a single instrument. Each performer uses two paddle mechanisms – one + for hand orientation sensing and one for servo-motor actuated feedback. We introduce + a haptic mirror in which the movement of one performer’s sensed hand is used to + induce movement of the partner’s actuated hand and vice versa. We describe theoretical + motivation, and hardware/software implementation.' + address: 'Vancouver, BC, Canada' + author: 'Carlile, Jennifer and Hartmann, Björn' + bibtex: "@inproceedings{Carlile2005,\n abstract = {OROBORO is a novel collaborative\ + \ controller which focuses on musical performance as social experience by exploring\ + \ synchronized actions of two musicians operating a single instrument. Each performer\ + \ uses two paddle mechanisms – one for hand orientation sensing and one for servo-motor\ + \ actuated feedback. We introduce a haptic mirror in which the movement of one\ + \ performer’s sensed hand is used to induce movement of the partner’s actuated\ + \ hand and vice versa. We describe theoretical motivation, and hardware/software\ + \ implementation.},\n address = {Vancouver, BC, Canada},\n author = {Carlile,\ + \ Jennifer and Hartmann, Bj{\\''{o}}rn},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176721},\n\ + \ issn = {2220-4806},\n keywords = {Musical Controller, Collaborative Control,\ + \ Haptic Interfaces },\n pages = {250--251},\n title = {{OR}OBORO: A Collaborative\ + \ Controller with Interpersonal Haptic Feedback},\n url = {http://www.nime.org/proceedings/2005/nime2005_250.pdf},\n\ + \ year = {2005}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1176721 + issn: 2220-4806 + keywords: 'Musical Controller, Collaborative Control, Haptic Interfaces ' + pages: 250--251 + title: 'OROBORO: A Collaborative Controller with Interpersonal Haptic Feedback' + url: http://www.nime.org/proceedings/2005/nime2005_250.pdf + year: 2005 + + +- ENTRYTYPE: inproceedings + ID: Rodriguez2005 + abstract: 'We present a Virtual Interface to Feel Emotions called VIFE _alpha v.01 + (Virtual Interface to Feel Emotions). The work investigates the idea of Synaesthesia + and her enormous possibilities creating new realities, sensations and zones where + the user can find new points of interaction. This interface allows the user to + create sonorous and visual compositions in real time. 6 three-dimensional sonorous + forms are modified according to the movements of the user. These forms represent + sonorous objects that respond to this by means of sensorial stimuli. Multiple + combinations of colors and sound effects superpose to an a the others to give + rise to a unique experience.' + address: 'Vancouver, BC, Canada' + author: 'Rodríguez, David and Rodríguez, Iván' + bibtex: "@inproceedings{Rodriguez2005,\n abstract = {We present a Virtual Interface\ + \ to Feel Emotions called VIFE {\\_}alpha v.01 (Virtual Interface to Feel Emotions).\ + \ The work investigates the idea of Synaesthesia and her enormous possibilities\ + \ creating new realities, sensations and zones where the user can find new points\ + \ of interaction. This interface allows the user to create sonorous and visual\ + \ compositions in real time. 6 three-dimensional sonorous forms are modified according\ + \ to the movements of the user. These forms represent sonorous objects that respond\ + \ to this by means of sensorial stimuli. Multiple combinations of colors and sound\ + \ effects superpose to an a the others to give rise to a unique experience.},\n\ + \ address = {Vancouver, BC, Canada},\n author = {Rodr\\'{\\i}guez, David and Rodr\\\ + '{\\i}guez, Iv\\'{a}n},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176806},\n\ + \ issn = {2220-4806},\n keywords = {Synaesthesia, 3D render, new reality, virtual\ + \ interface, creative interaction, sensors. },\n pages = {252--253},\n title =\ + \ {VIFE \\_alpha v.01 Real-time Visual Sound Installation performed by Glove-Gesture},\n\ + \ url = {http://www.nime.org/proceedings/2005/nime2005_252.pdf},\n year = {2005}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176193 + doi: 10.5281/zenodo.1176806 issn: 2220-4806 - pages: 121--126 - publisher: Aalborg University Copenhagen - title: 'gibberwocky: New Live-Coding Instruments for Musical Performance' - url: http://www.nime.org/proceedings/2017/nime2017_paper0024.pdf - year: 2017 + keywords: 'Synaesthesia, 3D render, new reality, virtual interface, creative interaction, + sensors. ' + pages: 252--253 + title: VIFE _alpha v.01 Real-time Visual Sound Installation performed by Glove-Gesture + url: http://www.nime.org/proceedings/2005/nime2005_252.pdf + year: 2005 - ENTRYTYPE: inproceedings - ID: sleitman2017 - abstract: This paper is an overview of the current state of a course on New Interfaces - for Musical Expression taught at Stanford University. It gives an overview of - the various technologies and methodologies used to teach the interdisciplinary - work of new musical interfaces. - address: 'Copenhagen, Denmark' - author: Sasha Leitman - bibtex: "@inproceedings{sleitman2017,\n abstract = {This paper is an overview of\ - \ the current state of a course on New Interfaces for Musical Expression taught\ - \ at Stanford University. It gives an overview of the various technologies and\ - \ methodologies used to teach the interdisciplinary work of new musical interfaces.},\n\ - \ address = {Copenhagen, Denmark},\n author = {Sasha Leitman},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176197},\n issn = {2220-4806},\n pages = {127--132},\n\ - \ publisher = {Aalborg University Copenhagen},\n title = {Current Iteration of\ - \ a Course on Physical Interaction Design for Music},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0025.pdf},\n\ - \ year = {2017}\n}\n" + ID: Hindman2005 + abstract: 'The Sonictroller was originally conceived as a means ofintroducing competition + into an improvisatory musicalperformance. By reverse-engineering a popular video + gameconsole, we were able to map sound information (volume,pitch, and pitch sequences) + to any continuous or momentaryaction of a video game sprite.' + address: 'Vancouver, BC, Canada' + author: 'Hindman, David and Kiser, Spencer' + bibtex: "@inproceedings{Hindman2005,\n abstract = {The Sonictroller was originally\ + \ conceived as a means ofintroducing competition into an improvisatory musicalperformance.\ + \ By reverse-engineering a popular video gameconsole, we were able to map sound\ + \ information (volume,pitch, and pitch sequences) to any continuous or momentaryaction\ + \ of a video game sprite.},\n address = {Vancouver, BC, Canada},\n author = {Hindman,\ + \ David and Kiser, Spencer},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176756},\n\ + \ issn = {2220-4806},\n keywords = {video game, Nintendo, music, sound, controller,\ + \ Mortal Kombat, trumpet, guitar, voice },\n pages = {254--255},\n title = {Sonictroller},\n\ + \ url = {http://www.nime.org/proceedings/2005/nime2005_254.pdf},\n year = {2005}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176197 + doi: 10.5281/zenodo.1176756 issn: 2220-4806 - pages: 127--132 - publisher: Aalborg University Copenhagen - title: Current Iteration of a Course on Physical Interaction Design for Music - url: http://www.nime.org/proceedings/2017/nime2017_paper0025.pdf - year: 2017 + keywords: 'video game, Nintendo, music, sound, controller, Mortal Kombat, trumpet, + guitar, voice ' + pages: 254--255 + title: Sonictroller + url: http://www.nime.org/proceedings/2005/nime2005_254.pdf + year: 2005 - ENTRYTYPE: inproceedings - ID: ahofmann2017 - abstract: 'To build electronic musical instruments, a mapping between the real-time - audio processing software and the physical controllers is required. Different - strategies of mapping were developed and discussed within the NIME community to - improve musical expression in live performances. This paper discusses an interface - focussed instrument design approach, which starts from the physical controller - and its functionality. From this definition, the required, underlying software - instrument is derived. A proof of concept is implemented as a framework for effect - instruments. This framework comprises a library of real-time effects for Csound, - a proposition for a JSON-based mapping format, and a mapping-to-instrument converter - that outputs Csound instrument files. Advantages, limitations and possible future - extensions are discussed.' - address: 'Copenhagen, Denmark' - author: Alex Hofmann and Bernt Isak Waerstad and Saranya Balasubramanian and Kristoffer - E. Koch - bibtex: "@inproceedings{ahofmann2017,\n abstract = {To build electronic musical\ - \ instruments, a mapping between the real-time audio processing software and the\ - \ physical controllers is required. Different strategies of mapping were developed\ - \ and discussed within the NIME community to improve musical expression in live\ - \ performances. This paper discusses an interface focussed instrument design approach,\ - \ which starts from the physical controller and its functionality. From this definition,\ - \ the required, underlying software instrument is derived. A proof of concept\ - \ is implemented as a framework for effect instruments. This framework comprises\ - \ a library of real-time effects for Csound, a proposition for a JSON-based mapping\ - \ format, and a mapping-to-instrument converter that outputs Csound instrument\ - \ files. Advantages, limitations and possible future extensions are discussed.},\n\ - \ address = {Copenhagen, Denmark},\n author = {Alex Hofmann and Bernt Isak Waerstad\ - \ and Saranya Balasubramanian and Kristoffer E. Koch},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176199},\n issn = {2220-4806},\n pages = {133--138},\n\ - \ publisher = {Aalborg University Copenhagen},\n title = {From interface design\ - \ to the software instrument --- Mapping as an approach to FX-instrument building},\n\ - \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0026.pdf},\n year\ - \ = {2017}\n}\n" + ID: Verplank2005 + abstract: 'Pluck, ring, rub, bang, strike, and squeeze are all simple gestures used + in controlling music. A single motor/encoder plus a force-sensor has proved to + be a useful platform for experimenting with haptic feedback in controlling computer + music. The surprise is that the “best” haptics (precise, stable) may not be the + most “musical”.' + address: 'Vancouver, BC, Canada' + author: 'Verplank, William' + bibtex: "@inproceedings{Verplank2005,\n abstract = {Pluck, ring, rub, bang, strike,\ + \ and squeeze are all simple gestures used in controlling music. A single motor/encoder\ + \ plus a force-sensor has proved to be a useful platform for experimenting with\ + \ haptic feedback in controlling computer music. The surprise is that the “best”\ + \ haptics (precise, stable) may not be the most “musical”.},\n address = {Vancouver,\ + \ BC, Canada},\n author = {Verplank, William},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1176832},\n issn = {2220-4806},\n keywords = {Music control,\ + \ haptic feedback, physical interaction design, Input/output devices, interactive\ + \ systems, haptic I/O},\n pages = {256--257},\n title = {Haptic Music Exercises},\n\ + \ url = {http://www.nime.org/proceedings/2005/nime2005_256.pdf},\n year = {2005}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176199 + doi: 10.5281/zenodo.1176832 issn: 2220-4806 - pages: 133--138 - publisher: Aalborg University Copenhagen - title: From interface design to the software instrument --- Mapping as an approach - to FX-instrument building - url: http://www.nime.org/proceedings/2017/nime2017_paper0026.pdf - year: 2017 + keywords: 'Music control, haptic feedback, physical interaction design, Input/output + devices, interactive systems, haptic I/O' + pages: 256--257 + title: Haptic Music Exercises + url: http://www.nime.org/proceedings/2005/nime2005_256.pdf + year: 2005 - ENTRYTYPE: inproceedings - ID: mmarchini2017 - abstract: 'Reflexive Looper (RL) is a live-looping system which allows a solo musician - to incarnate the different roles of a whole rhythm section by looping rhythms, - chord progressions, bassline and more. The loop pedal, is still the most used - device for those types of performances, accounting for many of the cover songs - performances on youtube, but not all kinds of song apply. Unlike a common loop - pedal, each layer of sound in RL is produced by an intelligent looping-agent which - adapts to the musician and respects given constraints, using constrained optimization. In - its original form, RL worked well for jazz guitar improvisation but was unsuited - to structured music such as pop songs. In order to bring the system on pop stage, - we revisited the system interaction, following the guidelines of professional - users who tested it extensively. We describe the revisited system which can accommodate - both pop and jazz. Thanks to intuitive pedal interaction and structure-constraints, - the new RL deals with pop music and has been already used in several in live concert - situations.' - address: 'Copenhagen, Denmark' - author: Marco Marchini and François Pachet and Benoît Carré - bibtex: "@inproceedings{mmarchini2017,\n abstract = {Reflexive Looper (RL) is a\ - \ live-looping system which allows a solo musician to incarnate the different\ - \ roles of a whole rhythm section by looping rhythms, chord progressions, bassline\ - \ and more. The loop pedal, is still the most used device for those types of performances,\ - \ accounting for many of the cover songs performances on youtube, but not all\ - \ kinds of song apply. Unlike a common loop pedal, each layer of sound in RL\ - \ is produced by an intelligent looping-agent which adapts to the musician and\ - \ respects given constraints, using constrained optimization. In its original\ - \ form, RL worked well for jazz guitar improvisation but was unsuited to structured\ - \ music such as pop songs. In order to bring the system on pop stage, we revisited\ - \ the system interaction, following the guidelines of professional users who tested\ - \ it extensively. We describe the revisited system which can accommodate both\ - \ pop and jazz. Thanks to intuitive pedal interaction and structure-constraints,\ - \ the new RL deals with pop music and has been already used in several in live\ - \ concert situations.},\n address = {Copenhagen, Denmark},\n author = {Marco Marchini\ - \ and François Pachet and Benoît Carré},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176201},\n\ - \ issn = {2220-4806},\n pages = {139--144},\n publisher = {Aalborg University\ - \ Copenhagen},\n title = {Rethinking Reflexive Looper for structured pop music},\n\ - \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0027.pdf},\n year\ - \ = {2017}\n}\n" + ID: Eaton2005 + abstract: 'In this presentation, we discuss and demonstrate a multiple touch sensitive + (MTS) keyboard developed by Robert Moog for John Eaton. Each key of the keyboard + is equipped with sensors that detect the three-dimensional position of the performer''s + finger. The presentation includes some of Eaton''s performances for certain earlier + prototypes as well as this keyboard. ' + address: 'Vancouver, BC, Canada' + author: 'Eaton, John and Moog, Robert' + bibtex: "@inproceedings{Eaton2005,\n abstract = {In this presentation, we discuss\ + \ and demonstrate a multiple touch sensitive (MTS) keyboard developed by Robert\ + \ Moog for John Eaton. Each key of the keyboard is equipped with sensors that\ + \ detect the three-dimensional position of the performer's finger. The presentation\ + \ includes some of Eaton's performances for certain earlier prototypes as well\ + \ as this keyboard. },\n address = {Vancouver, BC, Canada},\n author = {Eaton,\ + \ John and Moog, Robert},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176735},\n\ + \ issn = {2220-4806},\n keywords = {Multiple touch sensitive, MTS, keyboard, key\ + \ sensor design, upgrading to present-day computers },\n pages = {258--259},\n\ + \ title = {Multiple-Touch-Sensitive Keyboard},\n url = {http://www.nime.org/proceedings/2005/nime2005_258.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176201 + doi: 10.5281/zenodo.1176735 issn: 2220-4806 - pages: 139--144 - publisher: Aalborg University Copenhagen - title: Rethinking Reflexive Looper for structured pop music - url: http://www.nime.org/proceedings/2017/nime2017_paper0027.pdf - year: 2017 + keywords: 'Multiple touch sensitive, MTS, keyboard, key sensor design, upgrading + to present-day computers ' + pages: 258--259 + title: Multiple-Touch-Sensitive Keyboard + url: http://www.nime.org/proceedings/2005/nime2005_258.pdf + year: 2005 - ENTRYTYPE: inproceedings - ID: vzappi2017 - abstract: 'Physical modelling is a sophisticated synthesis technique, often used - in the design of Digital Musical Instruments (DMIs). Some of the most precise - physical simulations of sound propagation are based on Finite-Difference Time-Domain - (FDTD) methods, which are stable, highly parameterizable but characterized by - an extremely heavy computational load. This drawback hinders the spread of FDTD - from the domain of off-line simulations to the one of DMIs. With this paper, we - present a novel approach to real-time physical modelling synthesis, which implements - a 2D FDTD solver as a shader program running on the GPU directly within the graphics - pipeline. The result is a system capable of running fully interactive, massively - sized simulation domains, suitable for novel DMI design. With the help of diagrams - and code snippets, we provide the implementation details of a first interactive - application, a drum head simulator whose source code is available online. Finally, - we evaluate the proposed system, showing how this new approach can work as a valuable - alternative to classic GPGPU modelling.' - address: 'Copenhagen, Denmark' - author: Victor Zappi and Andrew Allen and Sidney Fels - bibtex: "@inproceedings{vzappi2017,\n abstract = {Physical modelling is a sophisticated\ - \ synthesis technique, often used in the design of Digital Musical Instruments\ - \ (DMIs). Some of the most precise physical simulations of sound propagation are\ - \ based on Finite-Difference Time-Domain (FDTD) methods, which are stable, highly\ - \ parameterizable but characterized by an extremely heavy computational load.\ - \ This drawback hinders the spread of FDTD from the domain of off-line simulations\ - \ to the one of DMIs. With this paper, we present a novel approach to real-time\ - \ physical modelling synthesis, which implements a 2D FDTD solver as a shader\ - \ program running on the GPU directly within the graphics pipeline. The result\ - \ is a system capable of running fully interactive, massively sized simulation\ - \ domains, suitable for novel DMI design. With the help of diagrams and code snippets,\ - \ we provide the implementation details of a first interactive application, a\ - \ drum head simulator whose source code is available online. Finally, we evaluate\ - \ the proposed system, showing how this new approach can work as a valuable alternative\ - \ to classic GPGPU modelling.},\n address = {Copenhagen, Denmark},\n author =\ - \ {Victor Zappi and Andrew Allen and Sidney Fels},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176203},\n issn = {2220-4806},\n pages = {145--150},\n\ - \ publisher = {Aalborg University Copenhagen},\n title = {Shader-based Physical\ - \ Modelling for the Design of Massive Digital Musical Instruments},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0028.pdf},\n\ - \ year = {2017}\n}\n" + ID: Fraietta2005 + abstract: 'This paper will demonstrate the use of the Smart Controller workbench + in the Interactive Bell Garden. ' + address: 'Vancouver, BC, Canada' + author: 'Fraietta, Angelo' + bibtex: "@inproceedings{Fraietta2005,\n abstract = {This paper will demonstrate\ + \ the use of the Smart Controller workbench in the Interactive Bell Garden. },\n\ + \ address = {Vancouver, BC, Canada},\n author = {Fraietta, Angelo},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1176743},\n issn = {2220-4806},\n keywords\ + \ = {Control Voltage, Open Sound Control, Algorithmic Composition, MIDI, Sound\ + \ Installations, Programmable Logic Control, Synthesizers. },\n pages = {260--261},\n\ + \ title = {Smart Controller / Bell Garden Demo},\n url = {http://www.nime.org/proceedings/2005/nime2005_260.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176203 + doi: 10.5281/zenodo.1176743 issn: 2220-4806 - pages: 145--150 - publisher: Aalborg University Copenhagen - title: Shader-based Physical Modelling for the Design of Massive Digital Musical - Instruments - url: http://www.nime.org/proceedings/2017/nime2017_paper0028.pdf - year: 2017 + keywords: 'Control Voltage, Open Sound Control, Algorithmic Composition, MIDI, Sound + Installations, Programmable Logic Control, Synthesizers. ' + pages: 260--261 + title: Smart Controller / Bell Garden Demo + url: http://www.nime.org/proceedings/2005/nime2005_260.pdf + year: 2005 - ENTRYTYPE: inproceedings - ID: djohnson2017 - abstract: 'The recent resurgence of Virtual Reality (VR) technologies provide new - platforms for augmenting traditional music instruments. Instrument augmentation - is a common approach for designing new interfaces for musical expression, as shown - through hyperinstrument research. New visual affordances present in VR give designers - new methods for augmenting instruments to extend not only their expressivity, - but also their capabilities for computer assisted tutoring. In this work, we present - VRMin, a mobile Mixed Reality (MR) application for augmenting a physical theremin, - with an immersive virtual environment (VE), for real time computer assisted tutoring. - We augment a physical theremin with 3D visual cues to indicate correct hand positioning - for performing given notes and volumes. The physical theremin acts as a domain - specific controller for the resulting MR environment. The initial effectiveness - of this approach is measured by analyzing a performer''s hand position while training - with and without the VRMin. We also evaluate the usability of the interface using - heuristic evaluation based on a newly proposed set of guidelines designed for - VR musical environments.' - address: 'Copenhagen, Denmark' - author: David Johnson and George Tzanetakis - bibtex: "@inproceedings{djohnson2017,\n abstract = {The recent resurgence of Virtual\ - \ Reality (VR) technologies provide new platforms for augmenting traditional music\ - \ instruments. Instrument augmentation is a common approach for designing new\ - \ interfaces for musical expression, as shown through hyperinstrument research.\ - \ New visual affordances present in VR give designers new methods for augmenting\ - \ instruments to extend not only their expressivity, but also their capabilities\ - \ for computer assisted tutoring. In this work, we present VRMin, a mobile Mixed\ - \ Reality (MR) application for augmenting a physical theremin, with an immersive\ - \ virtual environment (VE), for real time computer assisted tutoring. We augment\ - \ a physical theremin with 3D visual cues to indicate correct hand positioning\ - \ for performing given notes and volumes. The physical theremin acts as a domain\ - \ specific controller for the resulting MR environment. The initial effectiveness\ - \ of this approach is measured by analyzing a performer's hand position while\ - \ training with and without the VRMin. We also evaluate the usability of the interface\ - \ using heuristic evaluation based on a newly proposed set of guidelines designed\ - \ for VR musical environments.},\n address = {Copenhagen, Denmark},\n author =\ - \ {David Johnson and George Tzanetakis},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176205},\n\ - \ issn = {2220-4806},\n pages = {151--156},\n publisher = {Aalborg University\ - \ Copenhagen},\n title = {VRMin: Using Mixed Reality to Augment the Theremin for\ - \ Musical Tutoring},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0029.pdf},\n\ - \ year = {2017}\n}\n" + ID: Melo2005 + abstract: 'The Swayway is an audio/MIDI device inspired by the simpleconcept of + the wind chime.This interactive sculpture translates its swaying motion,triggered + by the user, into sound and light. Additionally, themotion of the reeds contributes + to the visual aspect of thepiece, converting the whole into a sensory and engagingexperience.' + address: 'Vancouver, BC, Canada' + author: 'Melo, Mauricio and Fan, Doria' + bibtex: "@inproceedings{Melo2005,\n abstract = {The Swayway is an audio/MIDI device\ + \ inspired by the simpleconcept of the wind chime.This interactive sculpture translates\ + \ its swaying motion,triggered by the user, into sound and light. Additionally,\ + \ themotion of the reeds contributes to the visual aspect of thepiece, converting\ + \ the whole into a sensory and engagingexperience.},\n address = {Vancouver, BC,\ + \ Canada},\n author = {Melo, Mauricio and Fan, Doria},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176790},\n issn = {2220-4806},\n keywords = {Interactive\ + \ sound sculpture, flex sensors, midi chimes, LEDs, sound installation. },\n pages\ + \ = {262--263},\n title = {Swayway --- Midi Chimes},\n url = {http://www.nime.org/proceedings/2005/nime2005_262.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176205 + doi: 10.5281/zenodo.1176790 issn: 2220-4806 - pages: 151--156 - publisher: Aalborg University Copenhagen - title: 'VRMin: Using Mixed Reality to Augment the Theremin for Musical Tutoring' - url: http://www.nime.org/proceedings/2017/nime2017_paper0029.pdf - year: 2017 + keywords: 'Interactive sound sculpture, flex sensors, midi chimes, LEDs, sound installation. ' + pages: 262--263 + title: Swayway --- Midi Chimes + url: http://www.nime.org/proceedings/2005/nime2005_262.pdf + year: 2005 - ENTRYTYPE: inproceedings - ID: rgraham2017 - abstract: 'Our paper builds on an ongoing collaboration between theorists and practitioners - within the computer music community, with a specific focus on three-dimensional - environments as an incubator for performance systems design. In particular, we - are concerned with how to provide accessible means of controlling spatialization - and timbral shaping in an integrated manner through the collection of performance - data from various modalities from an electric guitar with a multichannel audio - output. This paper will focus specifically on the combination of pitch data treated - within tonal models and the detection of physical performance gestures using timbral - feature extraction algorithms. We discuss how these tracked gestures may be connected - to concepts and dynamic relationships from embodied cognition, expanding on performative - models for pitch and timbre spaces. Finally, we explore how these ideas support - connections between sonic, formal and performative dimensions. This includes instrumental - technique detection scenes and mapping strategies aimed at bridging music performance - gestures across physical and conceptual planes. ' - address: 'Copenhagen, Denmark' - author: Richard Graham and Brian Bridges and Christopher Manzione and William Brent - bibtex: "@inproceedings{rgraham2017,\n abstract = {Our paper builds on an ongoing\ - \ collaboration between theorists and practitioners within the computer music\ - \ community, with a specific focus on three-dimensional environments as an incubator\ - \ for performance systems design. In particular, we are concerned with how to\ - \ provide accessible means of controlling spatialization and timbral shaping in\ - \ an integrated manner through the collection of performance data from various\ - \ modalities from an electric guitar with a multichannel audio output. This paper\ - \ will focus specifically on the combination of pitch data treated within tonal\ - \ models and the detection of physical performance gestures using timbral feature\ - \ extraction algorithms. We discuss how these tracked gestures may be connected\ - \ to concepts and dynamic relationships from embodied cognition, expanding on\ - \ performative models for pitch and timbre spaces. Finally, we explore how these\ - \ ideas support connections between sonic, formal and performative dimensions.\ - \ This includes instrumental technique detection scenes and mapping strategies\ - \ aimed at bridging music performance gestures across physical and conceptual\ - \ planes. },\n address = {Copenhagen, Denmark},\n author = {Richard Graham and\ - \ Brian Bridges and Christopher Manzione and William Brent},\n booktitle = {Proceedings\ + ID: Wang2005 + abstract: 'This paper describes the transformation of an everyday object into a + digital musical instrument. By tracking hand movements and tilt on one of two + axes, the Bubbaboard, a transformed handheld washboard, allows a user to play + scales at different octaves while simultaneously offering the ability to use its + inherent acoustic percussive qualities. Processed sound is fed to the Mommaspeaker, + which creates physically generated vibrato at a speed determined by tilting the + Bubbaboard on its second axis. ' + address: 'Vancouver, BC, Canada' + author: 'Wang, Derek' + bibtex: "@inproceedings{Wang2005,\n abstract = {This paper describes the transformation\ + \ of an everyday object into a digital musical instrument. By tracking hand movements\ + \ and tilt on one of two axes, the Bubbaboard, a transformed handheld washboard,\ + \ allows a user to play scales at different octaves while simultaneously offering\ + \ the ability to use its inherent acoustic percussive qualities. Processed sound\ + \ is fed to the Mommaspeaker, which creates physically generated vibrato at a\ + \ speed determined by tilting the Bubbaboard on its second axis. },\n address\ + \ = {Vancouver, BC, Canada},\n author = {Wang, Derek},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176207},\n issn = {2220-4806},\n pages = {157--162},\n\ - \ publisher = {Aalborg University Copenhagen},\n title = {Exploring Pitch and\ - \ Timbre through 3D Spaces: Embodied Models in Virtual Reality as a Basis for\ - \ Performance Systems Design},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0030.pdf},\n\ - \ year = {2017}\n}\n" + \ doi = {10.5281/zenodo.1176836},\n issn = {2220-4806},\n keywords = {Gesture\ + \ based controllers, Musical Performance, MIDI, Accelerometer, Microcontroller,\ + \ Contact Microphone },\n pages = {264--265},\n title = {Bubbaboard and Mommaspeaker:\ + \ Creating Digital Tonal Sounds from an Acoustic Percussive Instrument},\n url\ + \ = {http://www.nime.org/proceedings/2005/nime2005_264.pdf},\n year = {2005}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176207 + doi: 10.5281/zenodo.1176836 issn: 2220-4806 - pages: 157--162 - publisher: Aalborg University Copenhagen - title: 'Exploring Pitch and Timbre through 3D Spaces: Embodied Models in Virtual - Reality as a Basis for Performance Systems Design' - url: http://www.nime.org/proceedings/2017/nime2017_paper0030.pdf - year: 2017 + keywords: 'Gesture based controllers, Musical Performance, MIDI, Accelerometer, + Microcontroller, Contact Microphone ' + pages: 264--265 + title: 'Bubbaboard and Mommaspeaker: Creating Digital Tonal Sounds from an Acoustic + Percussive Instrument' + url: http://www.nime.org/proceedings/2005/nime2005_264.pdf + year: 2005 - ENTRYTYPE: inproceedings - ID: mgurevich2017 - abstract: "This paper situates NIME practice with respect to models of social interaction\ - \ among human agents. It argues that the conventional model of composer-performer-listener,\ - \ and the underlying mid-20th century metaphor of music as communication upon\ - \ which it relies, cannot reflect the richness of interaction and possibility\ - \ afforded by interactive digital technologies. Building on Paul Lansky's vision\ - \ of an expanded and dynamic social network, an alternative, ecological view of\ - \ music-making is presented, in which meaning emerges not from \"messages\" communicated\ - \ between individuals, but instead from the \"noise\" that arises through the\ - \ uncertainty in their interactions. However, in our tendency in NIME to collapse\ - \ the various roles in this network into a single individual, we place the increased\ - \ potential afforded by digital systems at risk. Using examples from the author's\ - \ NIME practices, the paper uses a practice-based methodology to describe approaches\ - \ to designing instruments that respond to the technologies that form the interfaces\ - \ of the network, which can include scores and stylistic conventions. In doing\ - \ so, the paper demonstrates that a repertoire—a seemingly anachronistic\ - \ concept—and a corresponding repertoire-driven approach to creating NIMEs\ - \ can in fact be a catalyst for invention and creativity." - address: 'Copenhagen, Denmark' - author: Michael Gurevich - bibtex: "@inproceedings{mgurevich2017,\n abstract = {This paper situates NIME practice\ - \ with respect to models of social interaction among human agents. It argues that\ - \ the conventional model of composer-performer-listener, and the underlying mid-20th\ - \ century metaphor of music as communication upon which it relies, cannot reflect\ - \ the richness of interaction and possibility afforded by interactive digital\ - \ technologies. Building on Paul Lansky's vision of an expanded and dynamic social\ - \ network, an alternative, ecological view of music-making is presented, in which\ - \ meaning emerges not from \"messages\" communicated between individuals, but\ - \ instead from the \"noise\" that arises through the uncertainty in their interactions.\ - \ However, in our tendency in NIME to collapse the various roles in this network\ - \ into a single individual, we place the increased potential afforded by digital\ - \ systems at risk. Using examples from the author's NIME practices, the paper\ - \ uses a practice-based methodology to describe approaches to designing instruments\ - \ that respond to the technologies that form the interfaces of the network, which\ - \ can include scores and stylistic conventions. In doing so, the paper demonstrates\ - \ that a repertoire—a seemingly anachronistic concept—and a corresponding\ - \ repertoire-driven approach to creating NIMEs can in fact be a catalyst for invention\ - \ and creativity.},\n address = {Copenhagen, Denmark},\n author = {Michael Gurevich},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176209},\n issn = {2220-4806},\n\ - \ pages = {163--168},\n publisher = {Aalborg University Copenhagen},\n title =\ - \ {Discovering Instruments in Scores: A Repertoire-Driven Approach to Designing\ - \ New Interfaces for Musical Expression},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0031.pdf},\n\ - \ year = {2017}\n}\n" + ID: Flety2005 + abstract: 'The Wise Box is a new wireless digitizing interface for sensors and controllers. + An increasing demand for this kind of hardware, especially in the field of dance + and computer performance lead us to design a wireless digitizer that allows for + multiple users, with high bandwidth and accuracy. The interface design was initiated + in early 2004 and shortly described in reference [1]. Our recent effort was directed + to make this device available for the community on the form of a manufactured + product, similarly to our previous interfaces such as AtoMIC Pro, Eobody or Ethersense + [1][2][3]. We describe here the principles we used for the design of the device + as well as its technical specifications. The demo will show several devices running + at once and used in real-time with a various set of sensors. ' + address: 'Vancouver, BC, Canada' + author: 'Fléty, Emmanuel' + bibtex: "@inproceedings{Flety2005,\n abstract = {The Wise Box is a new wireless\ + \ digitizing interface for sensors and controllers. An increasing demand for this\ + \ kind of hardware, especially in the field of dance and computer performance\ + \ lead us to design a wireless digitizer that allows for multiple users, with\ + \ high bandwidth and accuracy. The interface design was initiated in early 2004\ + \ and shortly described in reference [1]. Our recent effort was directed to make\ + \ this device available for the community on the form of a manufactured product,\ + \ similarly to our previous interfaces such as AtoMIC Pro, Eobody or Ethersense\ + \ [1][2][3]. We describe here the principles we used for the design of the device\ + \ as well as its technical specifications. The demo will show several devices\ + \ running at once and used in real-time with a various set of sensors. },\n address\ + \ = {Vancouver, BC, Canada},\n author = {Fl\\'{e}ty, Emmanuel},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176739},\n issn = {2220-4806},\n keywords = {Gesture,\ + \ Sensors, WiFi, 802.11, OpenSoundControl. },\n pages = {266--267},\n title =\ + \ {The WiSe Box: a Multi-performer Wireless Sensor Interface using {WiFi} and\ + \ OSC},\n url = {http://www.nime.org/proceedings/2005/nime2005_266.pdf},\n year\ + \ = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176209 + doi: 10.5281/zenodo.1176739 issn: 2220-4806 - pages: 163--168 - publisher: Aalborg University Copenhagen - title: 'Discovering Instruments in Scores: A Repertoire-Driven Approach to Designing - New Interfaces for Musical Expression' - url: http://www.nime.org/proceedings/2017/nime2017_paper0031.pdf - year: 2017 + keywords: 'Gesture, Sensors, WiFi, 802.11, OpenSoundControl. ' + pages: 266--267 + title: 'The WiSe Box: a Multi-performer Wireless Sensor Interface using WiFi and + OSC' + url: http://www.nime.org/proceedings/2005/nime2005_266.pdf + year: 2005 - ENTRYTYPE: inproceedings - ID: jcantrell2017 - abstract: 'The ideation, conception and implementation of new musical interfaces - and instruments provide more than the mere construction of digital objects. As - physical and digital assemblages, interfaces also act as traces of the authoring - entities that created them. Their intentions, likes, dislikes, and ultimate determinations - of what is creatively useful all get embedded into the available choices of the - interface. In this light, the self-perception of the musical HCI and instrument - designer can be seen as occupying a primary importance in the instruments and - interfaces that eventually come to be created. The work of a designer who self-identifies - as an artist may result in a vastly different outcome than one who considers him - or herself to be an entrepreneur, or a scientist, for example. These differing - definitions of self as well as their HCI outcomes require their own means of critique, - understanding and expectations. All too often, these definitions are unclear, - or the considerations of overlapping means of critique remain unexamined.' - address: 'Copenhagen, Denmark' - author: Joe Cantrell - bibtex: "@inproceedings{jcantrell2017,\n abstract = {The ideation, conception and\ - \ implementation of new musical interfaces and instruments provide more than the\ - \ mere construction of digital objects. As physical and digital assemblages, interfaces\ - \ also act as traces of the authoring entities that created them. Their intentions,\ - \ likes, dislikes, and ultimate determinations of what is creatively useful all\ - \ get embedded into the available choices of the interface. In this light, the\ - \ self-perception of the musical HCI and instrument designer can be seen as occupying\ - \ a primary importance in the instruments and interfaces that eventually come\ - \ to be created. The work of a designer who self-identifies as an artist may result\ - \ in a vastly different outcome than one who considers him or herself to be an\ - \ entrepreneur, or a scientist, for example. These differing definitions of self\ - \ as well as their HCI outcomes require their own means of critique, understanding\ - \ and expectations. All too often, these definitions are unclear, or the considerations\ - \ of overlapping means of critique remain unexamined.},\n address = {Copenhagen,\ - \ Denmark},\n author = {Joe Cantrell},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176211},\n\ - \ issn = {2220-4806},\n pages = {169--173},\n publisher = {Aalborg University\ - \ Copenhagen},\n title = {Designing Intent: Defining Critical Meaning for NIME\ - \ Practitioners},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0032.pdf},\n\ - \ year = {2017}\n}\n" + ID: Bowen2005 + abstract: 'Soundstone is a small wireless music controller that tracks movement + and gestures, and maps these signals to characteristics of various synthesized + and sampled sounds. It is intended to become a general-purpose platform for exploring + the sonification of movement, with an emphasis on tactile (haptic) feedback. ' + address: 'Vancouver, BC, Canada' + author: 'Bowen, Adam' + bibtex: "@inproceedings{Bowen2005,\n abstract = {Soundstone is a small wireless\ + \ music controller that tracks movement and gestures, and maps these signals to\ + \ characteristics of various synthesized and sampled sounds. It is intended to\ + \ become a general-purpose platform for exploring the sonification of movement,\ + \ with an emphasis on tactile (haptic) feedback. },\n address = {Vancouver, BC,\ + \ Canada},\n author = {Bowen, Adam},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176711},\n\ + \ issn = {2220-4806},\n keywords = {Gesture recognition, haptics, human factors,\ + \ force, acceleration, tactile feedback, general purpose controller, wireless.\ + \ },\n pages = {268--269},\n title = {Soundstone: A {3-D} Wireless Music Controller},\n\ + \ url = {http://www.nime.org/proceedings/2005/nime2005_268.pdf},\n year = {2005}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176211 + doi: 10.5281/zenodo.1176711 issn: 2220-4806 - pages: 169--173 - publisher: Aalborg University Copenhagen - title: 'Designing Intent: Defining Critical Meaning for NIME Practitioners' - url: http://www.nime.org/proceedings/2017/nime2017_paper0032.pdf - year: 2017 + keywords: 'Gesture recognition, haptics, human factors, force, acceleration, tactile + feedback, general purpose controller, wireless. ' + pages: 268--269 + title: 'Soundstone: A 3-D Wireless Music Controller' + url: http://www.nime.org/proceedings/2005/nime2005_268.pdf + year: 2005 - ENTRYTYPE: inproceedings - ID: jvasquez2017 - abstract: 'One of the reasons of why some musical instruments more successfully - continue their evolution and actively take part in the history of music is partially - attributed to the existing compositions made specifically for them, pieces that - remain and are still played over a long period of time. This is something we know, - performing these compositions keeps the characteristics of the instruments alive - and able to survive. This paper presents our contribution to this discussion with - a context and historical background for idiomatic compositions. Looking beyond - the classical era, we discuss how the concept of idiomatic music has influenced - research and composition practices in the NIME community; drawing more attention - in the way current idiomatic composition practices considered specific NIME affordances - for sonic, social and spatial interaction. We present particular projects that - establish idiomatic writing as a part of a new repertoire for new musical instruments. - The idiomatic writing approach to composing music for NIME can shift the unique - characteristics of new instruments to a more established musical identity, providing - a shared understanding and a common literature to the community.' - address: 'Copenhagen, Denmark' - author: Juan Vasquez and Koray Tahiroğlu and Johan Kildal - bibtex: "@inproceedings{jvasquez2017,\n abstract = {One of the reasons of why some\ - \ musical instruments more successfully continue their evolution and actively\ - \ take part in the history of music is partially attributed to the existing compositions\ - \ made specifically for them, pieces that remain and are still played over a long\ - \ period of time. This is something we know, performing these compositions keeps\ - \ the characteristics of the instruments alive and able to survive. This paper\ - \ presents our contribution to this discussion with a context and historical background\ - \ for idiomatic compositions. Looking beyond the classical era, we discuss how\ - \ the concept of idiomatic music has influenced research and composition practices\ - \ in the NIME community; drawing more attention in the way current idiomatic composition\ - \ practices considered specific NIME affordances for sonic, social and spatial\ - \ interaction. We present particular projects that establish idiomatic writing\ - \ as a part of a new repertoire for new musical instruments. The idiomatic writing\ - \ approach to composing music for NIME can shift the unique characteristics of\ - \ new instruments to a more established musical identity, providing a shared understanding\ - \ and a common literature to the community.},\n address = {Copenhagen, Denmark},\n\ - \ author = {Juan Vasquez and Koray Tahiroğlu and Johan Kildal},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1181424},\n issn = {2220-4806},\n pages = {174--179},\n\ - \ publisher = {Aalborg University Copenhagen},\n title = {Idiomatic Composition\ - \ Practices for New Musical Instruments: Context, Background and Current Applications},\n\ - \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0033.pdf},\n year\ - \ = {2017}\n}\n" + ID: Guisan2005 + abstract: 'INTRIUM is an interactive sound installation exploring the inside vibration + of the atrium. A certain number of architectural elements are fitted with acoustic + sensors in order to capture the vibration they produce when they are manipulated + or touched by hands. This raw sound is further processed in real-time, allowing + the participants to create a sonic landscape in the atrium, as the result of a + collaborative and collective work between them.' + address: 'Vancouver, BC, Canada' + author: 'Guisan, Alain C.' + bibtex: "@inproceedings{Guisan2005,\n abstract = {INTRIUM is an interactive sound\ + \ installation exploring the inside vibration of the atrium. A certain number\ + \ of architectural elements are fitted with acoustic sensors in order to capture\ + \ the vibration they produce when they are manipulated or touched by hands. This\ + \ raw sound is further processed in real-time, allowing the participants to create\ + \ a sonic landscape in the atrium, as the result of a collaborative and collective\ + \ work between them.},\n address = {Vancouver, BC, Canada},\n author = {Guisan,\ + \ Alain C.},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176752},\n issn\ + \ = {2220-4806},\n keywords = {Interactive sound installation, collaborative work,\ + \ sound processing, acoustic source localization.},\n pages = {270--270},\n title\ + \ = {Interactive Sound Installation: INTRIUM},\n url = {http://www.nime.org/proceedings/2005/nime2005_270.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1181424 + doi: 10.5281/zenodo.1176752 issn: 2220-4806 - pages: 174--179 - publisher: Aalborg University Copenhagen - title: 'Idiomatic Composition Practices for New Musical Instruments: Context, Background - and Current Applications' - url: http://www.nime.org/proceedings/2017/nime2017_paper0033.pdf - year: 2017 + keywords: 'Interactive sound installation, collaborative work, sound processing, + acoustic source localization.' + pages: 270--270 + title: 'Interactive Sound Installation: INTRIUM' + url: http://www.nime.org/proceedings/2005/nime2005_270.pdf + year: 2005 - ENTRYTYPE: inproceedings - ID: fberthaut2017 - abstract: 'Gestural interfaces, which make use of physiological signals, hand / - body postures or movements, have become widespread for musical expression. While - they may increase the transparency and expressiveness of instruments, they may - also result in limited agency, for musicians as well as for spectators. This problem - becomes especially true when the implemented mappings between gesture and music - are subtle or complex. These instruments may also restrict the appropriation possibilities - of controls, by comparison to physical interfaces. Most existing solutions to - these issues are based on distant and/or limited visual feedback (LEDs, small - screens). Our approach is to augment the gestures themselves with revealed virtual - objects. Our contributions are, first a novel approach of visual feedback that - allow for additional expressiveness, second a software pipeline for pixel-level - feedback and control that ensures tight coupling between sound and visuals, and - third, a design space for extending gestural control using revealed interfaces. - We also demonstrate and evaluate our approach with the augmentation of three existing - gestural musical instruments.' - address: 'Copenhagen, Denmark' - author: Florent Berthaut and Cagan Arslan and Laurent Grisoni - bibtex: "@inproceedings{fberthaut2017,\n abstract = {Gestural interfaces, which\ - \ make use of physiological signals, hand / body postures or movements, have become\ - \ widespread for musical expression. While they may increase the transparency\ - \ and expressiveness of instruments, they may also result in limited agency, for\ - \ musicians as well as for spectators. This problem becomes especially true when\ - \ the implemented mappings between gesture and music are subtle or complex. These\ - \ instruments may also restrict the appropriation possibilities of controls, by\ - \ comparison to physical interfaces. Most existing solutions to these issues\ - \ are based on distant and/or limited visual feedback (LEDs, small screens). \ - \ Our approach is to augment the gestures themselves with revealed virtual objects.\ - \ Our contributions are, first a novel approach of visual feedback that allow\ - \ for additional expressiveness, second a software pipeline for pixel-level feedback\ - \ and control that ensures tight coupling between sound and visuals, and third,\ - \ a design space for extending gestural control using revealed interfaces. We\ - \ also demonstrate and evaluate our approach with the augmentation of three existing\ - \ gestural musical instruments.},\n address = {Copenhagen, Denmark},\n author\ - \ = {Florent Berthaut and Cagan Arslan and Laurent Grisoni},\n booktitle = {Proceedings\ + ID: Socolofsky2005 + abstract: 'Contemplace is a spatial personality that redesigns itselfdynamically + according to its conversations with its visitors.Sometimes welcoming, sometimes + shy, and sometimeshostile, Contemplace''s mood is apparent through a display ofprojected + graphics, spatial sound, and physical motion.Contemplace is an environment in + which inhabitationbecomes a two-way dialogue.' + address: 'Vancouver, BC, Canada' + author: 'Socolofsky, Eric' + bibtex: "@inproceedings{Socolofsky2005,\n abstract = {Contemplace is a spatial personality\ + \ that redesigns itselfdynamically according to its conversations with its visitors.Sometimes\ + \ welcoming, sometimes shy, and sometimeshostile, Contemplace's mood is apparent\ + \ through a display ofprojected graphics, spatial sound, and physical motion.Contemplace\ + \ is an environment in which inhabitationbecomes a two-way dialogue.},\n address\ + \ = {Vancouver, BC, Canada},\n author = {Socolofsky, Eric},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176213},\n issn = {2220-4806},\n pages = {180--185},\n\ - \ publisher = {Aalborg University Copenhagen},\n title = {Revgest: Augmenting\ - \ Gestural Musical Instruments with Revealed Virtual Objects},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0034.pdf},\n\ - \ year = {2017}\n}\n" + \ doi = {10.5281/zenodo.1176822},\n issn = {2220-4806},\n keywords = {Interactive\ + \ space, spatial installation, graphic and aural display, motion tracking, Processing,\ + \ Flosc },\n pages = {271--271},\n title = {Contemplace},\n url = {http://www.nime.org/proceedings/2005/nime2005_271.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176213 + doi: 10.5281/zenodo.1176822 issn: 2220-4806 - pages: 180--185 - publisher: Aalborg University Copenhagen - title: 'Revgest: Augmenting Gestural Musical Instruments with Revealed Virtual Objects' - url: http://www.nime.org/proceedings/2017/nime2017_paper0034.pdf - year: 2017 + keywords: 'Interactive space, spatial installation, graphic and aural display, motion + tracking, Processing, Flosc ' + pages: 271--271 + title: Contemplace + url: http://www.nime.org/proceedings/2005/nime2005_271.pdf + year: 2005 - ENTRYTYPE: inproceedings - ID: atroyer2017 - abstract: 'MM-RT (material and magnet --- rhythm and timbre) is a tabletop musical - instrument equipped with electromagnetic actuators to offer a new paradigm of - musical expression and exploration. After expanding on prior work with electromagnetic - instrument actuation and tabletop musical interfaces, the paper explains why and - how MM-RT, through its physicality and ergonomics, has been designed specifically - for musical wonderers: people who want to know more about music in installation, - concert, and everyday contexts. Those wonderers aspire to interpret and explore - music rather than focussing on a technically correct realization of music. Informed - by this vision, we then describe the design and technical implementation of this - tabletop musical instrument. The paper concludes with discussions about future - works and how to trigger musical wonderers'' sonic curiosity to encounter, explore, - invent, and organize sounds for music creation using a musical instrument like - MM-RT.' - address: 'Copenhagen, Denmark' - author: Akito van Troyer - bibtex: "@inproceedings{atroyer2017,\n abstract = {MM-RT (material and magnet ---\ - \ rhythm and timbre) is a tabletop musical instrument equipped with electromagnetic\ - \ actuators to offer a new paradigm of musical expression and exploration. After\ - \ expanding on prior work with electromagnetic instrument actuation and tabletop\ - \ musical interfaces, the paper explains why and how MM-RT, through its physicality\ - \ and ergonomics, has been designed specifically for musical wonderers: people\ - \ who want to know more about music in installation, concert, and everyday contexts.\ - \ Those wonderers aspire to interpret and explore music rather than focussing\ - \ on a technically correct realization of music. Informed by this vision, we then\ - \ describe the design and technical implementation of this tabletop musical instrument.\ - \ The paper concludes with discussions about future works and how to trigger musical\ - \ wonderers' sonic curiosity to encounter, explore, invent, and organize sounds\ - \ for music creation using a musical instrument like MM-RT.},\n address = {Copenhagen,\ - \ Denmark},\n author = {Akito van Troyer},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176215},\n\ - \ issn = {2220-4806},\n pages = {186--191},\n publisher = {Aalborg University\ - \ Copenhagen},\n title = {MM-RT: A Tabletop Musical Instrument for Musical Wonderers},\n\ - \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0035.pdf},\n year\ - \ = {2017}\n}\n" + ID: Marinelli2005 + abstract: 'Mocean is an immersive environment that creates sensoryrelationships + between natural media, particularly exploringthe potential of water as an emotive + interface.' + address: 'Vancouver, BC, Canada' + author: 'Marinelli, Maia and Lamenzo, Jared and Borissov, Liubo' + bibtex: "@inproceedings{Marinelli2005,\n abstract = {Mocean is an immersive environment\ + \ that creates sensoryrelationships between natural media, particularly exploringthe\ + \ potential of water as an emotive interface.},\n address = {Vancouver, BC, Canada},\n\ + \ author = {Marinelli, Maia and Lamenzo, Jared and Borissov, Liubo},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1176786},\n issn = {2220-4806},\n keywords\ + \ = {New interface, water, pipe organ, natural media, PIC microcontroller, wind\ + \ instrument, human computer interface. },\n pages = {272--272},\n title = {Mocean},\n\ + \ url = {http://www.nime.org/proceedings/2005/nime2005_272.pdf},\n year = {2005}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176215 + doi: 10.5281/zenodo.1176786 issn: 2220-4806 - pages: 186--191 - publisher: Aalborg University Copenhagen - title: 'MM-RT: A Tabletop Musical Instrument for Musical Wonderers' - url: http://www.nime.org/proceedings/2017/nime2017_paper0035.pdf - year: 2017 + keywords: 'New interface, water, pipe organ, natural media, PIC microcontroller, + wind instrument, human computer interface. ' + pages: 272--272 + title: Mocean + url: http://www.nime.org/proceedings/2005/nime2005_272.pdf + year: 2005 - ENTRYTYPE: inproceedings - ID: fmorreale2017 - abstract: 'Every new edition of NIME brings dozens of new DMIs and the feeling that - only a few of them will eventually break through. Previous work tried to address - this issue with a deductive approach by formulating design frameworks; we addressed - this issue with a inductive approach by elaborating on successes and failures - of previous DMIs. We contacted 97 DMI makers that presented a new instrument at - five successive editions of NIME (2010-2014); 70 answered. They were asked to - indicate the original motivation for designing the DMI and to present information - about its uptake. Results confirmed that most of the instruments have difficulties - establishing themselves. Also, they were asked to reflect on the specific factors - that facilitated and those that hindered instrument longevity. By grounding these - reflections on existing reserach on NIME and HCI, we propose a series of design - considerations for future DMIs. ' - address: 'Copenhagen, Denmark' - author: Fabio Morreale and Andrew McPherson - bibtex: "@inproceedings{fmorreale2017,\n abstract = {Every new edition of NIME brings\ - \ dozens of new DMIs and the feeling that only a few of them will eventually break\ - \ through. Previous work tried to address this issue with a deductive approach\ - \ by formulating design frameworks; we addressed this issue with a inductive approach\ - \ by elaborating on successes and failures of previous DMIs. We contacted 97 DMI\ - \ makers that presented a new instrument at five successive editions of NIME (2010-2014);\ - \ 70 answered. They were asked to indicate the original motivation for designing\ - \ the DMI and to present information about its uptake. Results confirmed that\ - \ most of the instruments have difficulties establishing themselves. Also, they\ - \ were asked to reflect on the specific factors that facilitated and those that\ - \ hindered instrument longevity. By grounding these reflections on existing reserach\ - \ on NIME and HCI, we propose a series of design considerations for future DMIs.\ - \ },\n address = {Copenhagen, Denmark},\n author = {Fabio Morreale and Andrew\ - \ McPherson},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176218},\n issn\ - \ = {2220-4806},\n pages = {192--197},\n publisher = {Aalborg University Copenhagen},\n\ - \ title = {Design for Longevity: Ongoing Use of Instruments from NIME 2010-14},\n\ - \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0036.pdf},\n year\ - \ = {2017}\n}\n" + ID: Matsumura2005 + abstract: '''Hop Step Junk'' is an interactive sound installation that creates audio + and visual representations of the audience''s footsteps. The sound of a footstep + is very expressive. Depending on one''s weight, clothing and gate, a footstep + can sound quite different. The period between steps defines one''s personal rhythm. + The sound output of ''Hop Step Junk'' is wholly derived from the audience''s footsteps. + ''Hop Step Junk'' creates a multi-generational playground, an instrument that + an audience can easily play.' + address: 'Vancouver, BC, Canada' + author: 'Matsumura, Seiichiro and Arakawa, Chuichi' + bibtex: "@inproceedings{Matsumura2005,\n abstract = {'Hop Step Junk' is an interactive\ + \ sound installation that creates audio and visual representations of the audience's\ + \ footsteps. The sound of a footstep is very expressive. Depending on one's weight,\ + \ clothing and gate, a footstep can sound quite different. The period between\ + \ steps defines one's personal rhythm. The sound output of 'Hop Step Junk' is\ + \ wholly derived from the audience's footsteps. 'Hop Step Junk' creates a multi-generational\ + \ playground, an instrument that an audience can easily play.},\n address = {Vancouver,\ + \ BC, Canada},\n author = {Matsumura, Seiichiro and Arakawa, Chuichi},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1176788},\n issn = {2220-4806},\n keywords\ + \ = {Footsteps, body action, interactive, visualization, simple and reliable interface,\ + \ contact microphone, sound playground},\n pages = {273--273},\n title = {Hop\ + \ Step Junk: Sonic Visualization using Footsteps},\n url = {http://www.nime.org/proceedings/2005/nime2005_273.pdf},\n\ + \ year = {2005}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176218 + doi: 10.5281/zenodo.1176788 issn: 2220-4806 - pages: 192--197 - publisher: Aalborg University Copenhagen - title: 'Design for Longevity: Ongoing Use of Instruments from NIME 2010-14' - url: http://www.nime.org/proceedings/2017/nime2017_paper0036.pdf - year: 2017 + keywords: 'Footsteps, body action, interactive, visualization, simple and reliable + interface, contact microphone, sound playground' + pages: 273--273 + title: 'Hop Step Junk: Sonic Visualization using Footsteps' + url: http://www.nime.org/proceedings/2005/nime2005_273.pdf + year: 2005 - ENTRYTYPE: inproceedings - ID: sdelalez2017 - abstract: 'Performative control of voice is the process of real-time speech synthesis - or modification by the means of hands or feet gestures. Vokinesis, a system for - real-time rhythm and pitch modification and control of singing is presented. Pitch - and vocal effort are controlled by a stylus on a graphic tablet. The concept of - Syllabic Control Points (SCP) is introduced for timing and rhythm control. A - chain of phonetic syllables have two types of temporal phases : the steady phases, - which correspond to the vocalic nuclei, and the transient phases, which correspond - to the attacks and/or codas. Thus, syllabic rhythm control methods need transient - and steady phases control points, corresponding to the ancient concept of the - arsis and thesis is prosodic theory. SCP allow for accurate control of articulation, - using hand or feet. In the tap mode, SCP are triggered by pressing and releasing - a control button. In the fader mode, continuous variation of the SCP sequencing - rate is controlled with expression pedals. Vokinesis has been tested successfully - in musical performances, using both syllabic rhythm control modes. This system - opens new musical possibilities, and can be extended to other types of sounds - beyond voice. ' - address: 'Copenhagen, Denmark' - author: Samuel Delalez and Christophe d'Alessandro - bibtex: "@inproceedings{sdelalez2017,\n abstract = {Performative control of voice\ - \ is the process of real-time speech synthesis or modification by the means of\ - \ hands or feet gestures. Vokinesis, a system for real-time rhythm and pitch modification\ - \ and control of singing is presented. Pitch and vocal effort are controlled\ - \ by a stylus on a graphic tablet. The concept of Syllabic Control Points (SCP)\ - \ is introduced for timing and rhythm control. A chain of phonetic syllables\ - \ have two types of temporal phases : the steady phases, which correspond to the\ - \ vocalic nuclei, and the transient phases, which correspond to the attacks and/or\ - \ codas. Thus, syllabic rhythm control methods need transient and steady phases\ - \ control points, corresponding to the ancient concept of the arsis and thesis\ - \ is prosodic theory. SCP allow for accurate control of articulation, using hand\ - \ or feet. In the tap mode, SCP are triggered by pressing and releasing a control\ - \ button. In the fader mode, continuous variation of the SCP sequencing rate is\ - \ controlled with expression pedals. Vokinesis has been tested successfully in\ - \ musical performances, using both syllabic rhythm control modes. This system\ - \ opens new musical possibilities, and can be extended to other types of sounds\ - \ beyond voice. },\n address = {Copenhagen, Denmark},\n author = {Samuel Delalez\ - \ and Christophe d'Alessandro},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176220},\n\ - \ issn = {2220-4806},\n pages = {198--203},\n publisher = {Aalborg University\ - \ Copenhagen},\n title = {Vokinesis: Syllabic Control Points for Performative\ - \ Singing Synthesis},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0037.pdf},\n\ + ID: Deutscher2005 + address: 'Vancouver, BC, Canada' + author: 'Deutscher, Meghan and Fels, Sidney S. and Hoskinson, Reynald and Takahashi, + Sachiyo' + bibtex: "@inproceedings{Deutscher2005,\n address = {Vancouver, BC, Canada},\n author\ + \ = {Deutscher, Meghan and Fels, Sidney S. and Hoskinson, Reynald and Takahashi,\ + \ Sachiyo},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176733},\n issn\ + \ = {2220-4806},\n keywords = {Mediascape, sound spatialization, interactive art,\ + \ Beluga whale},\n pages = {274--274},\n title = {Echology},\n url = {http://www.nime.org/proceedings/2005/nime2005_274.pdf},\n\ + \ year = {2005}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1176733 + issn: 2220-4806 + keywords: 'Mediascape, sound spatialization, interactive art, Beluga whale' + pages: 274--274 + title: Echology + url: http://www.nime.org/proceedings/2005/nime2005_274.pdf + year: 2005 + + +- ENTRYTYPE: inproceedings + ID: rrooyen2017 + abstract: 'Percussion robots have successfully used a variety of actuator technologies + to activate a wide array of striking mechanisms. Popular types of actuators include + solenoids and DC motors. However, the use of industrial strength voice coil actuators + provides a compelling alternative given a desirable set of heterogeneous features + and requirements that span traditional devices. Their characteristics such as + high acceleration and accurate positioning enable the exploration of rendering + highly accurate and expressive percussion performances.' + address: 'Copenhagen, Denmark' + author: Robert Van Rooyen and Andrew Schloss and George Tzanetakis + bibtex: "@inproceedings{rrooyen2017,\n abstract = {Percussion robots have successfully\ + \ used a variety of actuator technologies to activate a wide array of striking\ + \ mechanisms. Popular types of actuators include solenoids and DC motors. However,\ + \ the use of industrial strength voice coil actuators provides a compelling alternative\ + \ given a desirable set of heterogeneous features and requirements that span traditional\ + \ devices. Their characteristics such as high acceleration and accurate positioning\ + \ enable the exploration of rendering highly accurate and expressive percussion\ + \ performances.},\n address = {Copenhagen, Denmark},\n author = {Robert Van Rooyen\ + \ and Andrew Schloss and George Tzanetakis},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1176149},\n issn = {2220-4806},\n pages = {1--6},\n publisher\ + \ = {Aalborg University Copenhagen},\n title = {Voice Coil Actuators for Percussion\ + \ Robotics},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0001.pdf},\n\ \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176220 + doi: 10.5281/zenodo.1176149 issn: 2220-4806 - pages: 198--203 + pages: 1--6 publisher: Aalborg University Copenhagen - title: 'Vokinesis: Syllabic Control Points for Performative Singing Synthesis' - url: http://www.nime.org/proceedings/2017/nime2017_paper0037.pdf + title: Voice Coil Actuators for Percussion Robotics + url: http://www.nime.org/proceedings/2017/nime2017_paper0001.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: gyoung2017 - abstract: 'We present the findings of a pilot-study that analysed the role of haptic - feedback in a musical context. To examine the role of haptics in Digital Musical - Instrument (DMI) design an experiment was formulated to measure the users'' perception - of device usability across four separate feedback stages: fully haptic (force - and tactile combined), constant force only, vibrotactile only, and no feedback. - The study was piloted over extended periods with the intention of exploring the - application and integration of DMIs in real-world musical contexts. Applying a - music orientated analysis of this type enabled the investigative process to not - only take place over a comprehensive period, but allowed for the exploration of - DMI integration in everyday compositional practices. As with any investigation - that involves creativity, it was important that the participants did not feel - rushed or restricted. That is, they were given sufficient time to explore and - assess the different feedback types without constraint. This provided an accurate - and representational set of qualitative data for validating the participants'' - experience with the different feedback types they were presented with.' + ID: mdonneaud2017 + abstract: 'We present a textile pressure sensor matrix, designed to be used as a + musical multi-touch input device. An evaluation of our design demonstrated that + the sensors pressure response profile fits a logarithmic curve (R = 0.98). The + input delay of the sensor is 2.1ms. The average absolute error in one direction + of the sensor was measured to be less than 10% of one of the matrix''s strips + (M = 1.8mm, SD = 1.37mm). We intend this technology to be easy to use and implement + by experts and novices alike: We ensure the ease of use by providing a host application + that tracks touch points and passes these on as OSC or MIDI messages. We make + our design easy to implement by providing open source software and hardware and + by choosing evaluation methods that use accessible tools, making quantitative + comparisons between different branches of the design easy. We chose to work with + textile to take advantage of its tactile properties and its malleability of form + and to pay tribute to textile''s rich cultural heritage. ' address: 'Copenhagen, Denmark' - author: Gareth Young and Dave Murphy and Jeffrey Weeter - bibtex: "@inproceedings{gyoung2017,\n abstract = {We present the findings of a pilot-study\ - \ that analysed the role of haptic feedback in a musical context. To examine the\ - \ role of haptics in Digital Musical Instrument (DMI) design an experiment was\ - \ formulated to measure the users' perception of device usability across four\ - \ separate feedback stages: fully haptic (force and tactile combined), constant\ - \ force only, vibrotactile only, and no feedback. The study was piloted over extended\ - \ periods with the intention of exploring the application and integration of DMIs\ - \ in real-world musical contexts. Applying a music orientated analysis of this\ - \ type enabled the investigative process to not only take place over a comprehensive\ - \ period, but allowed for the exploration of DMI integration in everyday compositional\ - \ practices. As with any investigation that involves creativity, it was important\ - \ that the participants did not feel rushed or restricted. That is, they were\ - \ given sufficient time to explore and assess the different feedback types without\ - \ constraint. This provided an accurate and representational set of qualitative\ - \ data for validating the participants' experience with the different feedback\ - \ types they were presented with.},\n address = {Copenhagen, Denmark},\n author\ - \ = {Gareth Young and Dave Murphy and Jeffrey Weeter},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176222},\n issn = {2220-4806},\n pages = {204--209},\n\ - \ publisher = {Aalborg University Copenhagen},\n title = {A Qualitative Analysis\ - \ of Haptic Feedback in Music Focused Exercises},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0038.pdf},\n\ + author: Maurin Donneaud and Cedric Honnet and Paul Strohmeier + bibtex: "@inproceedings{mdonneaud2017,\n abstract = {We present a textile pressure\ + \ sensor matrix, designed to be used as a musical multi-touch input device. An\ + \ evaluation of our design demonstrated that the sensors pressure response profile\ + \ fits a logarithmic curve (R = 0.98). The input delay of the sensor is 2.1ms.\ + \ The average absolute error in one direction of the sensor was measured to be\ + \ less than 10% of one of the matrix's strips (M = 1.8mm, SD = 1.37mm). We intend\ + \ this technology to be easy to use and implement by experts and novices alike:\ + \ We ensure the ease of use by providing a host application that tracks touch\ + \ points and passes these on as OSC or MIDI messages. We make our design easy\ + \ to implement by providing open source software and hardware and by choosing\ + \ evaluation methods that use accessible tools, making quantitative comparisons\ + \ between different branches of the design easy. We chose to work with textile\ + \ to take advantage of its tactile properties and its malleability of form and\ + \ to pay tribute to textile's rich cultural heritage. },\n address = {Copenhagen,\ + \ Denmark},\n author = {Maurin Donneaud and Cedric Honnet and Paul Strohmeier},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1176151},\n issn = {2220-4806},\n\ + \ pages = {7--12},\n publisher = {Aalborg University Copenhagen},\n title = {Designing\ + \ a Multi-Touch eTextile for Music Performances},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0002.pdf},\n\ \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176222 + doi: 10.5281/zenodo.1176151 issn: 2220-4806 - pages: 204--209 + pages: 7--12 publisher: Aalborg University Copenhagen - title: A Qualitative Analysis of Haptic Feedback in Music Focused Exercises - url: http://www.nime.org/proceedings/2017/nime2017_paper0038.pdf + title: Designing a Multi-Touch eTextile for Music Performances + url: http://www.nime.org/proceedings/2017/nime2017_paper0002.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: jhe2017 - abstract: 'In the recent years, mechatronic musical instruments (MMI) have become - increasingly parametrically rich. Researchers have developed different interaction - strategies to negotiate the challenge of interfacing with each of the MMI''s high-resolution - parameters in real time. While mapping strategies hold an important aspect of - the musical interaction paradigm for MMI, attention on dedicated input devices - to perform these instruments live should not be neglected. This paper presents - the findings of a user study conducted with participants possessing specialized - musicianship skills for MMI music performance and composition. Study participants - are given three musical tasks to complete using a mechatronic chordophone with - high dimensionality of control via different musical input interfaces (one input - device at a time). This representative user study reveals the features of related-dedicated - input controllers, how they compare against the typical MIDI keyboard/sequencer - paradigm in human-MMI interaction, and provide an indication of the musical function - that expert users prefer for each input interface.' + ID: pwilliams2017 + abstract: "While there are a great variety of digital musical interfaces available\ + \ to the working musician, few o\ner the level of immediate, nuanced and instinctive\ + \ control that one\nnds in an acoustic shaker. bEADS is a prototype of a digital\ + \ musical instrument that utilises the gestural vocabulary associated with shaken\ + \ idiophones and expands on the techniques and sonic possibilities associated\ + \ with them. By using a bespoke physically informed synthesis engine, in conjunction\ + \ with accelerometer and pressure sensor data, an actuated handheld instrument\ + \ has been built that allows for quickly switching between widely di\nering percussive\ + \ sound textures. The prototype has been evaluated by three experts with di\n\ + erent levels of involvement in professional music making." address: 'Copenhagen, Denmark' - author: Jingyin He and Jim Murphy and Dale A. Carnegie and Ajay Kapur - bibtex: "@inproceedings{jhe2017,\n abstract = {In the recent years, mechatronic\ - \ musical instruments (MMI) have become increasingly parametrically rich. Researchers\ - \ have developed different interaction strategies to negotiate the challenge of\ - \ interfacing with each of the MMI's high-resolution parameters in real time.\ - \ While mapping strategies hold an important aspect of the musical interaction\ - \ paradigm for MMI, attention on dedicated input devices to perform these instruments\ - \ live should not be neglected. This paper presents the findings of a user study\ - \ conducted with participants possessing specialized musicianship skills for MMI\ - \ music performance and composition. Study participants are given three musical\ - \ tasks to complete using a mechatronic chordophone with high dimensionality of\ - \ control via different musical input interfaces (one input device at a time).\ - \ This representative user study reveals the features of related-dedicated input\ - \ controllers, how they compare against the typical MIDI keyboard/sequencer paradigm\ - \ in human-MMI interaction, and provide an indication of the musical function\ - \ that expert users prefer for each input interface.},\n address = {Copenhagen,\ - \ Denmark},\n author = {Jingyin He and Jim Murphy and Dale A. Carnegie and Ajay\ - \ Kapur},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1176224},\n issn = {2220-4806},\n\ - \ pages = {210--215},\n publisher = {Aalborg University Copenhagen},\n title =\ - \ {Towards Related-Dedicated Input Devices for Parametrically Rich Mechatronic\ - \ Musical Instruments},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0039.pdf},\n\ + author: Peter Williams and Daniel Overholt + bibtex: "@inproceedings{pwilliams2017,\n abstract = {While there are a great variety\ + \ of digital musical interfaces available to the working musician, few o\ner the\ + \ level of immediate, nuanced and instinctive control that one\nnds in an acoustic\ + \ shaker. bEADS is a prototype of a digital musical instrument that utilises\ + \ the gestural vocabulary associated with shaken idiophones and expands on the\ + \ techniques and sonic possibilities associated with them. By using a bespoke\ + \ physically informed synthesis engine, in conjunction with accelerometer and\ + \ pressure sensor data, an actuated handheld instrument has been built that allows\ + \ for quickly switching between widely di\nering percussive sound textures. The\ + \ prototype has been evaluated by three experts with di\nerent levels of involvement\ + \ in professional music making.},\n address = {Copenhagen, Denmark},\n author\ + \ = {Peter Williams and Daniel Overholt},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176153},\n\ + \ issn = {2220-4806},\n pages = {13--18},\n publisher = {Aalborg University Copenhagen},\n\ + \ title = {bEADS Extended Actuated Digital Shaker},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0003.pdf},\n\ \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176224 + doi: 10.5281/zenodo.1176153 issn: 2220-4806 - pages: 210--215 + pages: 13--18 publisher: Aalborg University Copenhagen - title: Towards Related-Dedicated Input Devices for Parametrically Rich Mechatronic - Musical Instruments - url: http://www.nime.org/proceedings/2017/nime2017_paper0039.pdf + title: bEADS Extended Actuated Digital Shaker + url: http://www.nime.org/proceedings/2017/nime2017_paper0003.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: ablatherwick2017 - abstract: 'Music technology can provide unique opportunities to allow access to - music making for those with complex needs in special educational needs (SEN) settings. - Whilst there is a growing trend of research in this area, technology has been - shown to face a variety of issues leading to underuse in this context. This paper - reviews issues raised in literature and in practice for the use of music technology - in SEN settings. The paper then reviews existing principles and frameworks for - designing digital musical instruments (DMIs.) The reviews of literature and current - frameworks are then used to inform a set of design considerations for instruments - for users with complex needs, and in SEN settings. 18 design considerations are - presented with connections to literature and practice. An implementation example - including future work is presented, and finally a conclusion is then offered. ' + ID: rmichon2017 + abstract: 'Mobile devices constitute a generic platform to make standalone musical + instruments for live performance. However, they were not designed for such use + and have multiple limitations when compared to other types of instruments. We + introduce a framework to quickly design and prototype passive mobile device augmentations + to leverage existing features of the device for the end goal of mobile musical + instruments. An extended list of examples is provided and the results of a workshop, + organized partly to evaluate our framework, are provided.' address: 'Copenhagen, Denmark' - author: Asha Blatherwick and Luke Woodbury and Tom Davis - bibtex: "@inproceedings{ablatherwick2017,\n abstract = {Music technology can provide\ - \ unique opportunities to allow access to music making for those with complex\ - \ needs in special educational needs (SEN) settings. Whilst there is a growing\ - \ trend of research in this area, technology has been shown to face a variety\ - \ of issues leading to underuse in this context. This paper reviews issues raised\ - \ in literature and in practice for the use of music technology in SEN settings.\ - \ The paper then reviews existing principles and frameworks for designing digital\ - \ musical instruments (DMIs.) The reviews of literature and current frameworks\ - \ are then used to inform a set of design considerations for instruments for users\ - \ with complex needs, and in SEN settings. 18 design considerations are presented\ - \ with connections to literature and practice. An implementation example including\ - \ future work is presented, and finally a conclusion is then offered. },\n address\ - \ = {Copenhagen, Denmark},\n author = {Asha Blatherwick and Luke Woodbury and\ - \ Tom Davis},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176226},\n issn\ - \ = {2220-4806},\n pages = {216--221},\n publisher = {Aalborg University Copenhagen},\n\ - \ title = {Design Considerations for Instruments for Users with Complex Needs\ - \ in SEN Settings},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0040.pdf},\n\ + author: Romain Michon and Julius O. Smith and Matthew Wright and Chris Chafe and + John Granzow and Ge Wang + bibtex: "@inproceedings{rmichon2017,\n abstract = {Mobile devices constitute a generic\ + \ platform to make standalone musical instruments for live performance. However,\ + \ they were not designed for such use and have multiple limitations when compared\ + \ to other types of instruments. We introduce a framework to quickly design and\ + \ prototype passive mobile device augmentations to leverage existing features\ + \ of the device for the end goal of mobile musical instruments. An extended list\ + \ of examples is provided and the results of a workshop, organized partly to evaluate\ + \ our framework, are provided.},\n address = {Copenhagen, Denmark},\n author =\ + \ {Romain Michon and Julius O. Smith and Matthew Wright and Chris Chafe and John\ + \ Granzow and Ge Wang},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176155},\n\ + \ issn = {2220-4806},\n pages = {19--24},\n publisher = {Aalborg University Copenhagen},\n\ + \ title = {Passively Augmenting Mobile Devices Towards Hybrid Musical Instrument\ + \ Design},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0004.pdf},\n\ \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176226 + doi: 10.5281/zenodo.1176155 issn: 2220-4806 - pages: 216--221 + pages: 19--24 publisher: Aalborg University Copenhagen - title: Design Considerations for Instruments for Users with Complex Needs in SEN - Settings - url: http://www.nime.org/proceedings/2017/nime2017_paper0040.pdf + title: Passively Augmenting Mobile Devices Towards Hybrid Musical Instrument Design + url: http://www.nime.org/proceedings/2017/nime2017_paper0004.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: ahindle2017 - abstract: 'The didgeridoo is a wind instrument composed of a single large tube often - used as drone instrument for backing up the mids and lows of an ensemble. A didgeridoo - is played by buzzing the lips and blowing air into the didgeridoo. To play a didgeridoo - continously one can employ circular breathing but the volume of air required poses - a real challenge to novice players. In this paper we replace the expense of circular - breathing and lip buzzing with electronic excitation, thus creating an electro-acoustic - didgeridoo or electronic didgeridoo. Thus we describe the didgeridoo excitation - signal, how to replicate it, and the hardware necessary to make an electro-acoustic - didgeridoo driven by speakers and controllable from a computer. To properly drive - the didgeridoo we rely upon 4th-order ported bandpass speaker boxes to help guide - our excitation signals into an attached acoustic didgeridoo. The results somewhat - replicate human didgeridoo playing, enabling a new kind of mid to low electro-acoustic - accompaniment without the need for circular breathing. ' + ID: aeldridge2017 + abstract: 'The Feedback Cello is a new electroacoustic actuated instrument in which + feedback can be induced independently on each string. Built from retro-fitted + acoustic cellos, the signals from electromagnetic pickups sitting under each string + are passed to a speaker built into the back of the instrument and to transducers + clamped in varying places across the instrument body. Placement of acoustic and + mechanical actuators on the resonant body of the cello mean that this simple analogue + feedback system is capable of a wide range of complex self-resonating behaviours. + This paper describes the motivations for building these instruments as both a + physical extension to live coding practice and an electroacoustic augmentation + of cello. The design and physical construction is outlined, and modes of performance + described with reference to the first six months of performances and installations. + Future developments and planned investigations are outlined.' address: 'Copenhagen, Denmark' - author: Abram Hindle and Daryl Posnett - bibtex: "@inproceedings{ahindle2017,\n abstract = {The didgeridoo is a wind instrument\ - \ composed of a single large tube often used as drone instrument for backing up\ - \ the mids and lows of an ensemble. A didgeridoo is played by buzzing the lips\ - \ and blowing air into the didgeridoo. To play a didgeridoo continously one can\ - \ employ circular breathing but the volume of air required poses a real challenge\ - \ to novice players. In this paper we replace the expense of circular breathing\ - \ and lip buzzing with electronic excitation, thus creating an electro-acoustic\ - \ didgeridoo or electronic didgeridoo. Thus we describe the didgeridoo excitation\ - \ signal, how to replicate it, and the hardware necessary to make an electro-acoustic\ - \ didgeridoo driven by speakers and controllable from a computer. To properly\ - \ drive the didgeridoo we rely upon 4th-order ported bandpass speaker boxes to\ - \ help guide our excitation signals into an attached acoustic didgeridoo. The\ - \ results somewhat replicate human didgeridoo playing, enabling a new kind of\ - \ mid to low electro-acoustic accompaniment without the need for circular breathing.\ - \ },\n address = {Copenhagen, Denmark},\n author = {Abram Hindle and Daryl Posnett},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176228},\n issn = {2220-4806},\n\ - \ pages = {222--226},\n publisher = {Aalborg University Copenhagen},\n title =\ - \ {Performance with an Electronically Excited Didgeridoo},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0041.pdf},\n\ + author: Alice Eldridge and Chris Kiefer + bibtex: "@inproceedings{aeldridge2017,\n abstract = {The Feedback Cello is a new\ + \ electroacoustic actuated instrument in which feedback can be induced independently\ + \ on each string. Built from retro-fitted acoustic cellos, the signals from electromagnetic\ + \ pickups sitting under each string are passed to a speaker built into the back\ + \ of the instrument and to transducers clamped in varying places across the instrument\ + \ body. Placement of acoustic and mechanical actuators on the resonant body of\ + \ the cello mean that this simple analogue feedback system is capable of a wide\ + \ range of complex self-resonating behaviours. This paper describes the motivations\ + \ for building these instruments as both a physical extension to live coding practice\ + \ and an electroacoustic augmentation of cello. The design and physical construction\ + \ is outlined, and modes of performance described with reference to the first\ + \ six months of performances and installations. Future developments and planned\ + \ investigations are outlined.},\n address = {Copenhagen, Denmark},\n author =\ + \ {Alice Eldridge and Chris Kiefer},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176157},\n\ + \ issn = {2220-4806},\n pages = {25--29},\n publisher = {Aalborg University Copenhagen},\n\ + \ title = {Self-resonating Feedback Cello: Interfacing gestural and generative\ + \ processes in improvised performance},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0005.pdf},\n\ \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176228 + doi: 10.5281/zenodo.1176157 issn: 2220-4806 - pages: 222--226 + pages: 25--29 publisher: Aalborg University Copenhagen - title: Performance with an Electronically Excited Didgeridoo - url: http://www.nime.org/proceedings/2017/nime2017_paper0041.pdf + title: 'Self-resonating Feedback Cello: Interfacing gestural and generative processes + in improvised performance' + url: http://www.nime.org/proceedings/2017/nime2017_paper0005.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: mzbyszynski2017 - abstract: 'Our research examines the use of CodeCircle, an online, collaborative - HTML, CSS, and JavaScript editor, as a rapid prototyping environment for musically - expressive instruments. In CodeCircle, we use two primary libraries: MaxiLib and - RapidLib. MaxiLib is a synthesis and sample processing library, ported from the - C++ library Maximillian, which interfaces with the Web Audio API for sound generation - in the browser. RapidLib is a product of the Rapid-Mix project, and allows users - to implement interactive machine learning, using "programming by demonstration" - to design new expressive interactions.' + ID: dhaddad2017 + abstract: 'We introduce a family of fragile electronic musical instruments designed + to be "played" through the act of destruction. Each Fragile Instrument consists + of an analog synthesizing circuit with embedded sensors that detect the destruction + of an outer shell, which is destroyed and replaced for each performance. Destruction + plays an integral role in both the spectacle and the generated sounds. This paper + presents several variations of Fragile Instruments we have created, discussing + their circuit design as well as choices of material for the outer shell and tools + of destruction. We conclude by considering other approaches to create intentionally + destructible electronic musical instruments. ' address: 'Copenhagen, Denmark' - author: Michael Zbyszyński and Mick Grierson and Matthew Yee-King - bibtex: "@inproceedings{mzbyszynski2017,\n abstract = {Our research examines the\ - \ use of CodeCircle, an online, collaborative HTML, CSS, and JavaScript editor,\ - \ as a rapid prototyping environment for musically expressive instruments. In\ - \ CodeCircle, we use two primary libraries: MaxiLib and RapidLib. MaxiLib is a\ - \ synthesis and sample processing library, ported from the C++ library Maximillian,\ - \ which interfaces with the Web Audio API for sound generation in the browser.\ - \ RapidLib is a product of the Rapid-Mix project, and allows users to implement\ - \ interactive machine learning, using \"programming by demonstration\" to design\ - \ new expressive interactions.},\n address = {Copenhagen, Denmark},\n author =\ - \ {Michael Zbyszyński and Mick Grierson and Matthew Yee-King},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1181420},\n issn = {2220-4806},\n pages = {227--230},\n\ - \ publisher = {Aalborg University Copenhagen},\n title = {Rapid Prototyping of\ - \ New Instruments with CodeCircle},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0042.pdf},\n\ + author: Don Derek Haddad and Xiao Xiao and Tod Machover and Joseph Paradiso + bibtex: "@inproceedings{dhaddad2017,\n abstract = {We introduce a family of fragile\ + \ electronic musical instruments designed to be \"played\" through the act of\ + \ destruction. Each Fragile Instrument consists of an analog synthesizing circuit\ + \ with embedded sensors that detect the destruction of an outer shell, which is\ + \ destroyed and replaced for each performance. Destruction plays an integral role\ + \ in both the spectacle and the generated sounds. This paper presents several\ + \ variations of Fragile Instruments we have created, discussing their circuit\ + \ design as well as choices of material for the outer shell and tools of destruction.\ + \ We conclude by considering other approaches to create intentionally destructible\ + \ electronic musical instruments. },\n address = {Copenhagen, Denmark},\n author\ + \ = {Don Derek Haddad and Xiao Xiao and Tod Machover and Joseph Paradiso},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1176159},\n issn = {2220-4806},\n pages\ + \ = {30--33},\n publisher = {Aalborg University Copenhagen},\n title = {Fragile\ + \ Instruments: Constructing Destructible Musical Interfaces},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0006.pdf},\n\ \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1181420 + doi: 10.5281/zenodo.1176159 issn: 2220-4806 - pages: 227--230 + pages: 30--33 publisher: Aalborg University Copenhagen - title: Rapid Prototyping of New Instruments with CodeCircle - url: http://www.nime.org/proceedings/2017/nime2017_paper0042.pdf + title: 'Fragile Instruments: Constructing Destructible Musical Interfaces' + url: http://www.nime.org/proceedings/2017/nime2017_paper0006.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: fvisi2017 - abstract: 'This paper presents a knowledge-based, data-driven method for using data - describing action-sound couplings collected from a group of people to generate - multiple complex mappings between the performance movements of a musician and - sound synthesis. This is done by using a database of multimodal motion data collected - from multiple subjects coupled with sound synthesis parameters. A series of sound - stimuli is synthesised using the sound engine that will be used in performance. - Multimodal motion data is collected by asking each participant to listen to each - sound stimulus and move as if they were producing the sound using a musical instrument - they are given. Multimodal data is recorded during each performance, and paired - with the synthesis parameters used for generating the sound stimulus. The dataset - created using this method is then used to build a topological representation of - the performance movements of the subjects. This representation is then used to - interactively generate training data for machine learning algorithms, and define - mappings for real-time performance. To better illustrate each step of the procedure, - we describe an implementation involving clarinet, motion capture, wearable sensor - armbands, and waveguide synthesis.' + ID: fheller2017 + abstract: 'Learning to play the transverse flute is not an easy task, at least not + for everyone. Since the flute does not have a reed to resonate, the player must + provide a steady, focused stream of air that will cause the flute to resonate + and thereby produce sound. In order to achieve this, the player has to be aware + of the embouchure position to generate an adequate air jet. For a beginner, this + can be a difficult task due to the lack of visual cues or indicators of the air + jet and lips position. This paper attempts to address this problem by presenting + an augmented flute that can make the gestures related to the embouchure visible + and measurable. The augmented flute shows information about the area covered + by the lower lip, estimates the lip hole shape based on noise analysis, and it + shows graphically the air jet direction. Additionally, the augmented flute provides + directional and continuous feedback in real time, based on data acquired by experienced + flutists.' address: 'Copenhagen, Denmark' - author: Federico Visi and Baptiste Caramiaux and Michael Mcloughlin and Eduardo - Miranda - bibtex: "@inproceedings{fvisi2017,\n abstract = {This paper presents a knowledge-based,\ - \ data-driven method for using data describing action-sound couplings collected\ - \ from a group of people to generate multiple complex mappings between the performance\ - \ movements of a musician and sound synthesis. This is done by using a database\ - \ of multimodal motion data collected from multiple subjects coupled with sound\ - \ synthesis parameters. A series of sound stimuli is synthesised using the sound\ - \ engine that will be used in performance. Multimodal motion data is collected\ - \ by asking each participant to listen to each sound stimulus and move as if they\ - \ were producing the sound using a musical instrument they are given. Multimodal\ - \ data is recorded during each performance, and paired with the synthesis parameters\ - \ used for generating the sound stimulus. The dataset created using this method\ - \ is then used to build a topological representation of the performance movements\ - \ of the subjects. This representation is then used to interactively generate\ - \ training data for machine learning algorithms, and define mappings for real-time\ - \ performance. To better illustrate each step of the procedure, we describe an\ - \ implementation involving clarinet, motion capture, wearable sensor armbands,\ - \ and waveguide synthesis.},\n address = {Copenhagen, Denmark},\n author = {Federico\ - \ Visi and Baptiste Caramiaux and Michael Mcloughlin and Eduardo Miranda},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176230},\n issn = {2220-4806},\n pages\ - \ = {231--236},\n publisher = {Aalborg University Copenhagen},\n title = {A Knowledge-based,\ - \ Data-driven Method for Action-sound Mapping},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0043.pdf},\n\ - \ year = {2017}\n}\n" + author: Florian Heller and Irene Meying Cheung Ruiz and Jan Borchers + bibtex: "@inproceedings{fheller2017,\n abstract = {Learning to play the transverse\ + \ flute is not an easy task, at least not for everyone. Since the flute does\ + \ not have a reed to resonate, the player must provide a steady, focused stream\ + \ of air that will cause the flute to resonate and thereby produce sound. In\ + \ order to achieve this, the player has to be aware of the embouchure position\ + \ to generate an adequate air jet. For a beginner, this can be a difficult task\ + \ due to the lack of visual cues or indicators of the air jet and lips position.\ + \ This paper attempts to address this problem by presenting an augmented flute\ + \ that can make the gestures related to the embouchure visible and measurable.\ + \ The augmented flute shows information about the area covered by the lower lip,\ + \ estimates the lip hole shape based on noise analysis, and it shows graphically\ + \ the air jet direction. Additionally, the augmented flute provides directional\ + \ and continuous feedback in real time, based on data acquired by experienced\ + \ flutists.},\n address = {Copenhagen, Denmark},\n author = {Florian Heller and\ + \ Irene Meying Cheung Ruiz and Jan Borchers},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1176161},\n issn = {2220-4806},\n pages = {34--37},\n publisher\ + \ = {Aalborg University Copenhagen},\n title = {An Augmented Flute for Beginners},\n\ + \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0007.pdf},\n year\ + \ = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176230 + doi: 10.5281/zenodo.1176161 issn: 2220-4806 - pages: 231--236 + pages: 34--37 publisher: Aalborg University Copenhagen - title: 'A Knowledge-based, Data-driven Method for Action-sound Mapping' - url: http://www.nime.org/proceedings/2017/nime2017_paper0043.pdf + title: An Augmented Flute for Beginners + url: http://www.nime.org/proceedings/2017/nime2017_paper0007.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: ssalazar2017 - abstract: 'ChuckPad is a network-based platform for sharing code, modules, patches, - and even entire musical works written on the ChucK programming language and other - music programming platforms. ChuckPad provides a single repository and record - of musical code from supported musical programming systems, an interface for organizing, - browsing, and searching this body of code, and a readily accessible means of evaluating - the musical output of code in the repository. ChuckPad consists of an open-source - modular backend service to be run on a network server or cloud infrastructure - and a client library to facilitate integrating end-user applications with the - platform. While ChuckPad has been initially developed for sharing ChucK source - code, its design can accommodate any type of music programming system oriented - around small text- or binary-format documents. To this end, ChuckPad has also - been extended to the Auraglyph handwriting-based graphical music programming system.' - address: 'Copenhagen, Denmark' - author: Spencer Salazar and Mark Cerqueira - bibtex: "@inproceedings{ssalazar2017,\n abstract = {ChuckPad is a network-based\ - \ platform for sharing code, modules, patches, and even entire musical works written\ - \ on the ChucK programming language and other music programming platforms. ChuckPad\ - \ provides a single repository and record of musical code from supported musical\ - \ programming systems, an interface for organizing, browsing, and searching this\ - \ body of code, and a readily accessible means of evaluating the musical output\ - \ of code in the repository. ChuckPad consists of an open-source modular backend\ - \ service to be run on a network server or cloud infrastructure and a client library\ - \ to facilitate integrating end-user applications with the platform. While ChuckPad\ - \ has been initially developed for sharing ChucK source code, its design can accommodate\ - \ any type of music programming system oriented around small text- or binary-format\ - \ documents. To this end, ChuckPad has also been extended to the Auraglyph handwriting-based\ - \ graphical music programming system.},\n address = {Copenhagen, Denmark},\n author\ - \ = {Spencer Salazar and Mark Cerqueira},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176232},\n\ - \ issn = {2220-4806},\n pages = {237--240},\n publisher = {Aalborg University\ - \ Copenhagen},\n title = {ChuckPad: Social Coding for Computer Music},\n url =\ - \ {http://www.nime.org/proceedings/2017/nime2017_paper0044.pdf},\n year = {2017}\n\ - }\n" + ID: gisaac2017 + abstract: 'This paper explores the idea of using virtual textural terrains as a + means of generating haptic profiles for force-feedback controllers. This approach + breaks from the paradigm established within audio-haptic research over the last + few decades where physical models within virtual environments are designed to + transduce gesture into sonic output. We outline a method for generating multimodal + terrains using basis functions, which are rendered into monochromatic visual representations + for inspection. This visual terrain is traversed using a haptic controller, the + NovInt Falcon, which in turn receives force information based on the grayscale + value of its location in this virtual space. As the image is traversed by a performer + the levels of resistance vary, and the image is realized as a physical terrain. + We discuss the potential of this approach to afford engaging musical experiences + for both the performer and the audience as iterated through numerous performances.' + address: 'Copenhagen, Denmark' + author: Gabriella Isaac and Lauren Hayes and Todd Ingalls + bibtex: "@inproceedings{gisaac2017,\n abstract = {This paper explores the idea of\ + \ using virtual textural terrains as a means of generating haptic profiles for\ + \ force-feedback controllers. This approach breaks from the paradigm established\ + \ within audio-haptic research over the last few decades where physical models\ + \ within virtual environments are designed to transduce gesture into sonic output.\ + \ We outline a method for generating multimodal terrains using basis functions,\ + \ which are rendered into monochromatic visual representations for inspection.\ + \ This visual terrain is traversed using a haptic controller, the NovInt Falcon,\ + \ which in turn receives force information based on the grayscale value of its\ + \ location in this virtual space. As the image is traversed by a performer the\ + \ levels of resistance vary, and the image is realized as a physical terrain.\ + \ We discuss the potential of this approach to afford engaging musical experiences\ + \ for both the performer and the audience as iterated through numerous performances.},\n\ + \ address = {Copenhagen, Denmark},\n author = {Gabriella Isaac and Lauren Hayes\ + \ and Todd Ingalls},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176163},\n\ + \ issn = {2220-4806},\n pages = {38--41},\n publisher = {Aalborg University Copenhagen},\n\ + \ title = {Cross-Modal Terrains: Navigating Sonic Space through Haptic Feedback},\n\ + \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0008.pdf},\n year\ + \ = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176232 + doi: 10.5281/zenodo.1176163 issn: 2220-4806 - pages: 237--240 + pages: 38--41 publisher: Aalborg University Copenhagen - title: 'ChuckPad: Social Coding for Computer Music' - url: http://www.nime.org/proceedings/2017/nime2017_paper0044.pdf + title: 'Cross-Modal Terrains: Navigating Sonic Space through Haptic Feedback' + url: http://www.nime.org/proceedings/2017/nime2017_paper0008.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: aberndt2017 - abstract: 'Tabletop role-playing games are a collaborative narrative experience. - Throughout gaming sessions, Ambient music and noises are frequently used to enrich - and facilitate the narration. With AmbiDice we introduce a tangible interface - and music generator specially devised for this application scenario. We detail - the technical implementation of the device, the software architecture of the music - system (AmbientMusicBox) and the scripting language to compose Ambient music and - soundscapes. AmbiDice was presented to experienced players and gained positive - feedback and constructive suggestions for further development.' + ID: jwu2017 + abstract: 'This paper presents solutions to improve reliability and to work around + challenges of using a Leap Motion; sensor as a gestural control and input device + in digital music instrument (DMI) design. We implement supervised learning algorithms + (k-nearest neighbors, support vector machine, binary decision tree, and artificial + neural network) to estimate hand motion data, which is not typically captured + by the sensor. Two problems are addressed: 1) the sensor cannot detect overlapping + hands 2) The sensor''s limited detection range. Training examples included 7 kinds + of overlapping hand gestures as well as hand trajectories where a hand goes out + of the sensor''s range. The overlapping gestures were treated as a classification + problem and the best performing model was k-nearest neighbors with 62% accuracy. + The out-of-range problem was treated first as a clustering problem to group the + training examples into a small number of trajectory types, then as a classification + problem to predict trajectory type based on the hand''s motion before going out + of range. The best performing model was k-nearest neighbors with an accuracy of + 30%. The prediction models were implemented in an ongoing multimedia electroacoustic + vocal performance and an educational project named Embodied Sonic Meditation (ESM). ' address: 'Copenhagen, Denmark' - author: Axel Berndt and Simon Waloschek and Aristotelis Hadjakos and Alexander Leemhuis - bibtex: "@inproceedings{aberndt2017,\n abstract = {Tabletop role-playing games are\ - \ a collaborative narrative experience. Throughout gaming sessions, Ambient music\ - \ and noises are frequently used to enrich and facilitate the narration. With\ - \ AmbiDice we introduce a tangible interface and music generator specially devised\ - \ for this application scenario. We detail the technical implementation of the\ - \ device, the software architecture of the music system (AmbientMusicBox) and\ - \ the scripting language to compose Ambient music and soundscapes. AmbiDice was\ - \ presented to experienced players and gained positive feedback and constructive\ - \ suggestions for further development.},\n address = {Copenhagen, Denmark},\n\ - \ author = {Axel Berndt and Simon Waloschek and Aristotelis Hadjakos and Alexander\ - \ Leemhuis},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176234},\n issn\ - \ = {2220-4806},\n pages = {241--244},\n publisher = {Aalborg University Copenhagen},\n\ - \ title = {AmbiDice: An Ambient Music Interface for Tabletop Role-Playing Games},\n\ - \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0045.pdf},\n year\ - \ = {2017}\n}\n" + author: Jiayue Wu and Mark Rau and Yun Zhang and Yijun Zhou and Matt Wright + bibtex: "@inproceedings{jwu2017,\n abstract = {This paper presents solutions to\ + \ improve reliability and to work around challenges of using a Leap Motion; sensor\ + \ as a gestural control and input device in digital music instrument (DMI) design.\ + \ We implement supervised learning algorithms (k-nearest neighbors, support vector\ + \ machine, binary decision tree, and artificial neural network) to estimate hand\ + \ motion data, which is not typically captured by the sensor. Two problems are\ + \ addressed: 1) the sensor cannot detect overlapping hands 2) The sensor's limited\ + \ detection range. Training examples included 7 kinds of overlapping hand gestures\ + \ as well as hand trajectories where a hand goes out of the sensor's range. The\ + \ overlapping gestures were treated as a classification problem and the best performing\ + \ model was k-nearest neighbors with 62% accuracy. The out-of-range problem was\ + \ treated first as a clustering problem to group the training examples into a\ + \ small number of trajectory types, then as a classification problem to predict\ + \ trajectory type based on the hand's motion before going out of range. The best\ + \ performing model was k-nearest neighbors with an accuracy of 30%. The prediction\ + \ models were implemented in an ongoing multimedia electroacoustic vocal performance\ + \ and an educational project named Embodied Sonic Meditation (ESM). },\n address\ + \ = {Copenhagen, Denmark},\n author = {Jiayue Wu and Mark Rau and Yun Zhang and\ + \ Yijun Zhou and Matt Wright},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176165},\n\ + \ issn = {2220-4806},\n pages = {42--47},\n publisher = {Aalborg University Copenhagen},\n\ + \ title = {Towards Robust Tracking with an Unreliable Motion Sensor Using Machine\ + \ Learning},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0009.pdf},\n\ + \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176234 + doi: 10.5281/zenodo.1176165 issn: 2220-4806 - pages: 241--244 + pages: 42--47 publisher: Aalborg University Copenhagen - title: 'AmbiDice: An Ambient Music Interface for Tabletop Role-Playing Games' - url: http://www.nime.org/proceedings/2017/nime2017_paper0045.pdf + title: Towards Robust Tracking with an Unreliable Motion Sensor Using Machine Learning + url: http://www.nime.org/proceedings/2017/nime2017_paper0009.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: sferguson2017 - abstract: 'This paper describes the sound design for Bloom, a light and sound installation - made up of 1000 distributed independent audio-visual pixel devices, each with - RGB LEDs, Wifi, Accelerometer, GPS sensor, and sound hardware. These types of - systems have been explored previously, but only a few systems have exceeded 30-50 - devices and very few have included sound capability, and therefore the sound design - possibilities for large systems of distributed audio devices are not yet well - understood. In this article we describe the hardware and software implementation - of sound synthesis for this system, and the implications for design of media for - this context.' + ID: abarbosa2017 + abstract: 'Sounding Architecture, is the first collaborative teaching development + between Department of Architecture and Department of Music at the University of + Hong Kong (HKU), introduced in Fall 2016. In this paper we present critical observations + about the studio after a final public presentation of all projects. The Review + was conducted with demonstrations by groups of students supervised by different + Lecturer, in each case focusing on a different strategy to create a connection + between Sound, Music, Acoustics, Space and Architectural Design. There was an + assumption that the core working process would have to include the design of a + new musical instrument, which in some cases became the final deliverable of the + Studio and in other cases a step in a process that leads to a different outcome + (such as an architectural Design, a performance or social experiment). One other + relevant aspect was that Digital technology was used in the design and fabrication + of the physical instruments'' prototypes, but in very few cases, it was used in + the actual generation or enhancement of sound, with the instruments relying almost + exclusively in acoustic and mechanical sound. ' address: 'Copenhagen, Denmark' - author: Sam Ferguson and Anthony Rowe and Oliver Bown and Liam Birtles and Chris - Bennewith - bibtex: "@inproceedings{sferguson2017,\n abstract = {This paper describes the sound\ - \ design for Bloom, a light and sound installation made up of 1000 distributed\ - \ independent audio-visual pixel devices, each with RGB LEDs, Wifi, Accelerometer,\ - \ GPS sensor, and sound hardware. These types of systems have been explored previously,\ - \ but only a few systems have exceeded 30-50 devices and very few have included\ - \ sound capability, and therefore the sound design possibilities for large systems\ - \ of distributed audio devices are not yet well understood. In this article we\ - \ describe the hardware and software implementation of sound synthesis for this\ - \ system, and the implications for design of media for this context.},\n address\ - \ = {Copenhagen, Denmark},\n author = {Sam Ferguson and Anthony Rowe and Oliver\ - \ Bown and Liam Birtles and Chris Bennewith},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1176236},\n issn = {2220-4806},\n pages = {245--250},\n publisher\ - \ = {Aalborg University Copenhagen},\n title = {Sound Design for a System of 1000\ - \ Distributed Independent Audio-Visual Devices},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0046.pdf},\n\ + author: Álvaro Barbosa and Thomas Tsang + bibtex: "@inproceedings{abarbosa2017,\n abstract = {Sounding Architecture, is the\ + \ first collaborative teaching development between Department of Architecture\ + \ and Department of Music at the University of Hong Kong (HKU), introduced in\ + \ Fall 2016. In this paper we present critical observations about the studio after\ + \ a final public presentation of all projects. The Review was conducted with demonstrations\ + \ by groups of students supervised by different Lecturer, in each case focusing\ + \ on a different strategy to create a connection between Sound, Music, Acoustics,\ + \ Space and Architectural Design. There was an assumption that the core working\ + \ process would have to include the design of a new musical instrument, which\ + \ in some cases became the final deliverable of the Studio and in other cases\ + \ a step in a process that leads to a different outcome (such as an architectural\ + \ Design, a performance or social experiment). One other relevant aspect was that\ + \ Digital technology was used in the design and fabrication of the physical instruments'\ + \ prototypes, but in very few cases, it was used in the actual generation or enhancement\ + \ of sound, with the instruments relying almost exclusively in acoustic and mechanical\ + \ sound. },\n address = {Copenhagen, Denmark},\n author = {Álvaro Barbosa and\ + \ Thomas Tsang},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176167},\n\ + \ issn = {2220-4806},\n pages = {48--51},\n publisher = {Aalborg University Copenhagen},\n\ + \ title = {Sounding Architecture: Inter-Disciplinary Studio at HKU},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0010.pdf},\n\ \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176236 + doi: 10.5281/zenodo.1176167 issn: 2220-4806 - pages: 245--250 + pages: 48--51 publisher: Aalborg University Copenhagen - title: Sound Design for a System of 1000 Distributed Independent Audio-Visual Devices - url: http://www.nime.org/proceedings/2017/nime2017_paper0046.pdf + title: 'Sounding Architecture: Inter-Disciplinary Studio at HKU' + url: http://www.nime.org/proceedings/2017/nime2017_paper0010.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: rvogl2017 - abstract: 'An important part of electronic dance music (EDM) is the so-called beat. It - is defined by the drum track of the piece and is a style defining element. While - producing EDM, creating the drum track tends to be delicate, yet labor intensive - work. In this work we present a touch-interface-based prototype with the goal - to simplify this task. The prototype aims at supporting musicians to create rhythmic - patterns in the context of EDM production and live performances. Starting with - a seed pattern which is provided by the user, a list of variations with varying - degree of deviation from the seed pattern is generated. The interface provides - simple ways to enter, edit, visualize and browse through the patterns. Variations - are generated by means of an artificial neural network which is trained on a database - of drum rhythm patterns extracted from a commercial drum loop library. To evaluate - the user interface and pattern generation quality a user study with experts in - EDM production was conducted. It was found that participants responded positively - to the user interface and the quality of the generated patterns. Furthermore, - the experts consider the prototype helpful for both studio production situations - and live performances.' + ID: mlerner2017 + abstract: 'This paper describes the process of creation of a new digital musical + instrument: Osiris. This device is based on the circulation of liquids for the + generation of musical notes. Besides the system of liquid distribution, a module + that generates MIDI events was designed and built based on the Arduino platform; + such module is employed together with a Proteus 2000 sound generator. The programming + of the control module as well as the choice of sound-generating module had as + their main objective that the instrument should provide an ample variety of sound + and musical possibilities, controllable in real time.' address: 'Copenhagen, Denmark' - author: Richard Vogl and Peter Knees - bibtex: "@inproceedings{rvogl2017,\n abstract = {An important part of electronic\ - \ dance music (EDM) is the so-called beat. It is defined by the drum track of\ - \ the piece and is a style defining element. While producing EDM, creating the\ - \ drum track tends to be delicate, yet labor intensive work. In this work we\ - \ present a touch-interface-based prototype with the goal to simplify this task.\ - \ The prototype aims at supporting musicians to create rhythmic patterns in the\ - \ context of EDM production and live performances. Starting with a seed pattern\ - \ which is provided by the user, a list of variations with varying degree of deviation\ - \ from the seed pattern is generated. The interface provides simple ways to enter,\ - \ edit, visualize and browse through the patterns. Variations are generated by\ - \ means of an artificial neural network which is trained on a database of drum\ - \ rhythm patterns extracted from a commercial drum loop library. To evaluate\ - \ the user interface and pattern generation quality a user study with experts\ - \ in EDM production was conducted. It was found that participants responded positively\ - \ to the user interface and the quality of the generated patterns. Furthermore,\ - \ the experts consider the prototype helpful for both studio production situations\ - \ and live performances.},\n address = {Copenhagen, Denmark},\n author = {Richard\ - \ Vogl and Peter Knees},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176238},\n\ - \ issn = {2220-4806},\n pages = {251--256},\n publisher = {Aalborg University\ - \ Copenhagen},\n title = {An Intelligent Drum Machine for Electronic Dance Music\ - \ Production and Performance},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0047.pdf},\n\ + author: 'Matus Lerner, Martín' + bibtex: "@inproceedings{mlerner2017,\n abstract = {This paper describes the process\ + \ of creation of a new digital musical instrument: Osiris. This device is based\ + \ on the circulation of liquids for the generation of musical notes. Besides the\ + \ system of liquid distribution, a module that generates MIDI events was designed\ + \ and built based on the Arduino platform; such module is employed together with\ + \ a Proteus 2000 sound generator. The programming of the control module as well\ + \ as the choice of sound-generating module had as their main objective that the\ + \ instrument should provide an ample variety of sound and musical possibilities,\ + \ controllable in real time.},\n address = {Copenhagen, Denmark},\n author = {Matus\ + \ Lerner, Martín},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176169},\n\ + \ issn = {2220-4806},\n pages = {52--55},\n publisher = {Aalborg University Copenhagen},\n\ + \ title = {Osiris: a liquid based digital musical instrument},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0011.pdf},\n\ \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176238 + doi: 10.5281/zenodo.1176169 issn: 2220-4806 - pages: 251--256 + pages: 52--55 publisher: Aalborg University Copenhagen - title: An Intelligent Drum Machine for Electronic Dance Music Production and Performance - url: http://www.nime.org/proceedings/2017/nime2017_paper0047.pdf + title: 'Osiris: a liquid based digital musical instrument' + url: http://www.nime.org/proceedings/2017/nime2017_paper0011.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: mjensen2017 - abstract: 'Neuroimaging is a powerful tool to explore how and why humans engage - in music. Magnetic resonance imaging (MRI) has allowed us to identify brain networks - and regions implicated in a range of cognitive tasks including music perception - and performance. However, MRI-scanners are noisy and cramped, presenting a challenging - environment for playing an instrument. Here, we present an MRI-compatible polyphonic - keyboard with a materials cost of 850 USD, designed and tested for safe use in - 3T (three Tesla) MRI-scanners. We describe design considerations, and prior work - in the field. In addition, we provide recommendations for future designs and comment - on the possibility of using the keyboard in magnetoencephalography (MEG) systems. - Preliminary results indicate a comfortable playing experience with no disturbance - of the imaging process.' + ID: sstasis2017 + abstract: 'Musicians, audio engineers and producers often make use of common timbral + adjectives to describe musical signals and transformations. However, the subjective + nature of these terms, and the variability with respect to musical context often + leads to inconsistencies in their definition. In this study, a model is proposed + for controlling an equaliser by navigating clusters of datapoints, which represent + grouped parameter settings with the same timbral description. The interface allows + users to identify the nearest cluster to their current parameter setting and recommends + changes based on its relationship to a cluster centroid. To do this, we apply + dimensionality reduction to a dataset of equaliser curves described as warm and + bright using a stacked autoencoder, then group the entries using an agglomerative + clustering algorithm with a coherence based distance criterion. To test the efficacy + of the system, we implement listening tests and show that subjects are able to + match datapoints to their respective sub-representations with 93.75% mean accuracy.' address: 'Copenhagen, Denmark' - author: Martin Snejbjerg Jensen and Ole Adrian Heggli and Patricia Alves Da Mota - and Peter Vuust - bibtex: "@inproceedings{mjensen2017,\n abstract = {Neuroimaging is a powerful tool\ - \ to explore how and why humans engage in music. Magnetic resonance imaging (MRI)\ - \ has allowed us to identify brain networks and regions implicated in a range\ - \ of cognitive tasks including music perception and performance. However, MRI-scanners\ - \ are noisy and cramped, presenting a challenging environment for playing an instrument.\ - \ Here, we present an MRI-compatible polyphonic keyboard with a materials cost\ - \ of 850 USD, designed and tested for safe use in 3T (three Tesla) MRI-scanners.\ - \ We describe design considerations, and prior work in the field. In addition,\ - \ we provide recommendations for future designs and comment on the possibility\ - \ of using the keyboard in magnetoencephalography (MEG) systems. Preliminary results\ - \ indicate a comfortable playing experience with no disturbance of the imaging\ - \ process.},\n address = {Copenhagen, Denmark},\n author = {Martin Snejbjerg Jensen\ - \ and Ole Adrian Heggli and Patricia Alves Da Mota and Peter Vuust},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176240},\n issn = {2220-4806},\n pages\ - \ = {257--260},\n publisher = {Aalborg University Copenhagen},\n title = {A low-cost\ - \ MRI compatible keyboard},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0048.pdf},\n\ - \ year = {2017}\n}\n" + author: Spyridon Stasis and Jason Hockman and Ryan Stables + bibtex: "@inproceedings{sstasis2017,\n abstract = {Musicians, audio engineers and\ + \ producers often make use of common timbral adjectives to describe musical signals\ + \ and transformations. However, the subjective nature of these terms, and the\ + \ variability with respect to musical context often leads to inconsistencies in\ + \ their definition. In this study, a model is proposed for controlling an equaliser\ + \ by navigating clusters of datapoints, which represent grouped parameter settings\ + \ with the same timbral description. The interface allows users to identify the\ + \ nearest cluster to their current parameter setting and recommends changes based\ + \ on its relationship to a cluster centroid. To do this, we apply dimensionality\ + \ reduction to a dataset of equaliser curves described as warm and bright using\ + \ a stacked autoencoder, then group the entries using an agglomerative clustering\ + \ algorithm with a coherence based distance criterion. To test the efficacy of\ + \ the system, we implement listening tests and show that subjects are able to\ + \ match datapoints to their respective sub-representations with 93.75% mean accuracy.},\n\ + \ address = {Copenhagen, Denmark},\n author = {Spyridon Stasis and Jason Hockman\ + \ and Ryan Stables},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176171},\n\ + \ issn = {2220-4806},\n pages = {56--61},\n publisher = {Aalborg University Copenhagen},\n\ + \ title = {Navigating Descriptive Sub-Representations of Musical Timbre},\n url\ + \ = {http://www.nime.org/proceedings/2017/nime2017_paper0012.pdf},\n year = {2017}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176240 + doi: 10.5281/zenodo.1176171 issn: 2220-4806 - pages: 257--260 + pages: 56--61 publisher: Aalborg University Copenhagen - title: A low-cost MRI compatible keyboard - url: http://www.nime.org/proceedings/2017/nime2017_paper0048.pdf + title: Navigating Descriptive Sub-Representations of Musical Timbre + url: http://www.nime.org/proceedings/2017/nime2017_paper0012.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: slee2017 - abstract: 'Music listening has changed greatly with the emergence of music streaming - services, such as Spotify or YouTube. In this paper, we discuss an artistic practice - that organizes streaming videos to perform a real-time improvisation via live - coding. A live coder uses any available video from YouTube, a video streaming - service, as source material to perform an improvised audiovisual piece. The challenge - is to manipulate the emerging media that are streamed from a networked service. - The musical gesture can be limited due to the provided functionalities of the - YouTube API. However, the potential sonic and visual space that a musician can - explore is practically infinite. The practice embraces the juxtaposition of manipulating - emerging media in old-fashioned ways similar to experimental musicians in the - 60''s physically manipulating tape loops or scratching vinyl records on a phonograph - while exploring the possibility of doing so by drawing on the gigantic repository - of all kinds of videos. In this paper, we discuss the challenges of using streaming - videos from the platform as musical materials in computer music and introduce - a live coding environment that we developed for real-time improvisation. ' + ID: pwilliams2017a + abstract: 'Pitch Fork is a prototype of an alternate, actuated digital musical instrument + (DMI). It uses 5 infra-red and 4 piezoelectric sensors to control an additive + synthesis engine. Iron bars are used as the physical point of contact in interaction + with the aim of using material computation to control aspects of the digitally + produced sound. This choice of material was also chosen to affect player experience. Sensor + readings are relayed to a Macbook via an Arduino Mega. Mappings and audio output + signal is carried out with Pure Data Extended.' address: 'Copenhagen, Denmark' - author: Sang Won Lee and Jungho Bang and Georg Essl - bibtex: "@inproceedings{slee2017,\n abstract = {Music listening has changed greatly\ - \ with the emergence of music streaming services, such as Spotify or YouTube.\ - \ In this paper, we discuss an artistic practice that organizes streaming videos\ - \ to perform a real-time improvisation via live coding. A live coder uses any\ - \ available video from YouTube, a video streaming service, as source material\ - \ to perform an improvised audiovisual piece. The challenge is to manipulate the\ - \ emerging media that are streamed from a networked service. The musical gesture\ - \ can be limited due to the provided functionalities of the YouTube API. However,\ - \ the potential sonic and visual space that a musician can explore is practically\ - \ infinite. The practice embraces the juxtaposition of manipulating emerging media\ - \ in old-fashioned ways similar to experimental musicians in the 60's physically\ - \ manipulating tape loops or scratching vinyl records on a phonograph while exploring\ - \ the possibility of doing so by drawing on the gigantic repository of all kinds\ - \ of videos. In this paper, we discuss the challenges of using streaming videos\ - \ from the platform as musical materials in computer music and introduce a live\ - \ coding environment that we developed for real-time improvisation. },\n address\ - \ = {Copenhagen, Denmark},\n author = {Sang Won Lee and Jungho Bang and Georg\ - \ Essl},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1176242},\n issn = {2220-4806},\n\ - \ pages = {261--266},\n publisher = {Aalborg University Copenhagen},\n title =\ - \ {Live Coding YouTube: Organizing Streaming Media for an Audiovisual Performance},\n\ - \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0049.pdf},\n year\ - \ = {2017}\n}\n" + author: Peter Williams and Daniel Overholt + bibtex: "@inproceedings{pwilliams2017a,\n abstract = {Pitch Fork is a prototype\ + \ of an alternate, actuated digital musical instrument (DMI). It uses 5 infra-red\ + \ and 4 piezoelectric sensors to control an additive synthesis engine. Iron bars\ + \ are used as the physical point of contact in interaction with the aim of using\ + \ material computation to control aspects of the digitally produced sound. This\ + \ choice of material was also chosen to affect player experience. Sensor readings\ + \ are relayed to a Macbook via an Arduino Mega. Mappings and audio output signal\ + \ is carried out with Pure Data Extended.},\n address = {Copenhagen, Denmark},\n\ + \ author = {Peter Williams and Daniel Overholt},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1176173},\n issn = {2220-4806},\n pages = {62--64},\n publisher\ + \ = {Aalborg University Copenhagen},\n title = {Pitch Fork: A Novel tactile Digital\ + \ Musical Instrument},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0013.pdf},\n\ + \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176242 + doi: 10.5281/zenodo.1176173 issn: 2220-4806 - pages: 261--266 + pages: 62--64 publisher: Aalborg University Copenhagen - title: 'Live Coding YouTube: Organizing Streaming Media for an Audiovisual Performance' - url: http://www.nime.org/proceedings/2017/nime2017_paper0049.pdf + title: 'Pitch Fork: A Novel tactile Digital Musical Instrument' + url: http://www.nime.org/proceedings/2017/nime2017_paper0013.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: skiratli2017 - abstract: 'In this paper we present HIVE, a parametrically designed interactive - sound sculpture with embedded multi-channel digital audio which explores the intersection - of sculptural form and musical instrument design. We examine sculpture as an integral - part of music composition and performance, expanding the definition of musical - instrument to include the gestalt of loudspeakers, architectural spaces, and material - form. After examining some related works, we frame HIVE as an interactive sculpture - for musical expression. We then describe our design and production process, which - hinges on the relationship between sound, space, and sculptural form. Finally, - we discuss the installation and its implications.' + ID: cerdem2017 + abstract: 'Biostomp is a new musical interface that relies on the use mechanomyography + (MMG) as a biocontrol mechanism in live performance situations. Designed in the + form of a stomp box, Biostomp translates a performer''s muscle movements into + control signals. A custom MMG sensor captures the acoustic output of muscle tissue + oscillations resulting from contractions. An analog circuit amplifies and filters + these signals, and a micro-controller translates the processed signals into pulses. + These pulses are used to activate a stepper motor mechanism, which is designed + to be mounted on parameter knobs on effects pedals. The primary goal in designing + Biostomp is to offer a robust, inexpensive, and easy-to-operate platform for integrating + biological signals into both traditional and contemporary music performance practices + without requiring an intermediary computer software. In this paper, we discuss + the design, implementation and evaluation of Biostomp. Following an overview of + related work on the use of biological signals in artistic projects, we offer a + discussion of our approach to conceptualizing and fabricating a biocontrol mechanism + as a new musical interface. We then discuss the results of an evaluation study + conducted with 21 professional musicians. A video abstract for Biostomp can be + viewed at vimeo.com/biostomp/video.' address: 'Copenhagen, Denmark' - author: Solen Kiratli and Akshay Cadambi and Yon Visell - bibtex: "@inproceedings{skiratli2017,\n abstract = {In this paper we present HIVE,\ - \ a parametrically designed interactive sound sculpture with embedded multi-channel\ - \ digital audio which explores the intersection of sculptural form and musical\ - \ instrument design. We examine sculpture as an integral part of music composition\ - \ and performance, expanding the definition of musical instrument to include the\ - \ gestalt of loudspeakers, architectural spaces, and material form. After examining\ - \ some related works, we frame HIVE as an interactive sculpture for musical expression.\ - \ We then describe our design and production process, which hinges on the relationship\ - \ between sound, space, and sculptural form. Finally, we discuss the installation\ - \ and its implications.},\n address = {Copenhagen, Denmark},\n author = {Solen\ - \ Kiratli and Akshay Cadambi and Yon Visell},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1176244},\n issn = {2220-4806},\n pages = {267--270},\n publisher\ - \ = {Aalborg University Copenhagen},\n title = {HIVE: An Interactive Sculpture\ - \ for Musical Expression},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0050.pdf},\n\ - \ year = {2017}\n}\n" + author: Cagri Erdem and Anil Camci and Angus Forbes + bibtex: "@inproceedings{cerdem2017,\n abstract = {Biostomp is a new musical interface\ + \ that relies on the use mechanomyography (MMG) as a biocontrol mechanism in live\ + \ performance situations. Designed in the form of a stomp box, Biostomp translates\ + \ a performer's muscle movements into control signals. A custom MMG sensor captures\ + \ the acoustic output of muscle tissue oscillations resulting from contractions.\ + \ An analog circuit amplifies and filters these signals, and a micro-controller\ + \ translates the processed signals into pulses. These pulses are used to activate\ + \ a stepper motor mechanism, which is designed to be mounted on parameter knobs\ + \ on effects pedals. The primary goal in designing Biostomp is to offer a robust,\ + \ inexpensive, and easy-to-operate platform for integrating biological signals\ + \ into both traditional and contemporary music performance practices without requiring\ + \ an intermediary computer software. In this paper, we discuss the design, implementation\ + \ and evaluation of Biostomp. Following an overview of related work on the use\ + \ of biological signals in artistic projects, we offer a discussion of our approach\ + \ to conceptualizing and fabricating a biocontrol mechanism as a new musical interface.\ + \ We then discuss the results of an evaluation study conducted with 21 professional\ + \ musicians. A video abstract for Biostomp can be viewed at vimeo.com/biostomp/video.},\n\ + \ address = {Copenhagen, Denmark},\n author = {Cagri Erdem and Anil Camci and\ + \ Angus Forbes},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176175},\n\ + \ issn = {2220-4806},\n pages = {65--70},\n publisher = {Aalborg University Copenhagen},\n\ + \ title = {Biostomp: A Biocontrol System for Embodied Performance Using Mechanomyography},\n\ + \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0014.pdf},\n year\ + \ = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176244 + doi: 10.5281/zenodo.1176175 issn: 2220-4806 - pages: 267--270 + pages: 65--70 publisher: Aalborg University Copenhagen - title: 'HIVE: An Interactive Sculpture for Musical Expression' - url: http://www.nime.org/proceedings/2017/nime2017_paper0050.pdf + title: 'Biostomp: A Biocontrol System for Embodied Performance Using Mechanomyography' + url: http://www.nime.org/proceedings/2017/nime2017_paper0014.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: mblessing2017 - abstract: 'The JoyStyx Quartet is a series of four embedded acoustic instruments. - Each of these instruments is a five-voice granular synthesizer which processes - a different sound source to give each a unique timbre and range. The performer - interacts with these voices individually with five joysticks positioned to lay - under the performer''s fingertips. The JoyStyx uses a custom-designed printed - circuit board. This board provides the joystick layout and connects them to an - Arduino Micro, which serializes the ten analog X/Y position values and the five - digital button presses. This data controls the granular and spatial parameters - of a Pure Data patch running on a Raspberry Pi 2. The nature of the JoyStyx construction - causes the frequency response to be coloured by the materials and their geometry, - leading to a unique timbre. This endows the instrument with a more ``analog'''' - or ``natural'''' sound, despite relying on computer-based algorithms. In concert, - the quartet performance with the JoyStyx may potentially be the first performance - ever with a quartet of Embedded Acoustic Instruments.' + ID: eknudsen2017 + abstract: 'An application for ballet training is presented that monitors the posture + position (straightness of the spine and rotation of the pelvis) deviation from + the ideal position in real-time. The human skeletal data is acquired through a + Microsoft Kinect v2. The movement of the student is mirrored through an abstract + skeletal figure and instructions are provided through a virtual teacher. Posture + deviation is measured in the following way: Torso misalignment is calculated by + comparing hip center joint, shoulder center joint and neck joint position with + an ideal posture position retrieved in an initial calibration procedure. Pelvis + deviation is expressed as the xz-rotation of the hip-center joint. The posture + deviation is sonified via a varying cut-off frequency of a high-pass filter applied + to floating water sound. The posture deviation is visualized via a curve and a + rigged skeleton in which the misaligned torso parts are color-coded. In an experiment + with 9-12 year-old dance students from a ballet school, comparing the audio-visual + feedback modality with no feedback leads to an increase in posture accuracy (p + < 0.001, Cohen''s d = 1.047). Reaction card feedback and expert interviews + indicate that the feedback is considered fun and useful for training independently + from the teacher.' address: 'Copenhagen, Denmark' - author: Matthew Blessing and Edgar Berdahl - bibtex: "@inproceedings{mblessing2017,\n abstract = {The JoyStyx Quartet is a series\ - \ of four embedded acoustic instruments. Each of these instruments is a five-voice\ - \ granular synthesizer which processes a different sound source to give each a\ - \ unique timbre and range. The performer interacts with these voices individually\ - \ with five joysticks positioned to lay under the performer's fingertips. The\ - \ JoyStyx uses a custom-designed printed circuit board. This board provides the\ - \ joystick layout and connects them to an Arduino Micro, which serializes the\ - \ ten analog X/Y position values and the five digital button presses. This data\ - \ controls the granular and spatial parameters of a Pure Data patch running on\ - \ a Raspberry Pi 2. The nature of the JoyStyx construction causes the frequency\ - \ response to be coloured by the materials and their geometry, leading to a unique\ - \ timbre. This endows the instrument with a more ``analog'' or ``natural'' sound,\ - \ despite relying on computer-based algorithms. In concert, the quartet performance\ - \ with the JoyStyx may potentially be the first performance ever with a quartet\ - \ of Embedded Acoustic Instruments.},\n address = {Copenhagen, Denmark},\n author\ - \ = {Matthew Blessing and Edgar Berdahl},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176246},\n\ - \ issn = {2220-4806},\n pages = {271--274},\n publisher = {Aalborg University\ - \ Copenhagen},\n title = {The JoyStyx: A Quartet of Embedded Acoustic Instruments},\n\ - \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0051.pdf},\n year\ + author: Esben W. Knudsen and Malte L. Hølledig and Mads Juel Nielsen and Rikke K. + Petersen and Sebastian Bach-Nielsen and Bogdan-Constantin Zanescu and Daniel Overholt + and Hendrik Purwins and Kim Helweg + bibtex: "@inproceedings{eknudsen2017,\n abstract = {An application for ballet training\ + \ is presented that monitors the posture position (straightness of the spine and\ + \ rotation of the pelvis) deviation from the ideal position in real-time. The\ + \ human skeletal data is acquired through a Microsoft Kinect v2. The movement\ + \ of the student is mirrored through an abstract skeletal figure and instructions\ + \ are provided through a virtual teacher. Posture deviation is measured in the\ + \ following way: Torso misalignment is calculated by comparing hip center joint,\ + \ shoulder center joint and neck joint position with an ideal posture position\ + \ retrieved in an initial calibration procedure. Pelvis deviation is expressed\ + \ as the xz-rotation of the hip-center joint. The posture deviation is sonified\ + \ via a varying cut-off frequency of a high-pass filter applied to floating water\ + \ sound. The posture deviation is visualized via a curve and a rigged skeleton\ + \ in which the misaligned torso parts are color-coded. In an experiment with 9-12\ + \ year-old dance students from a ballet school, comparing the audio-visual feedback\ + \ modality with no feedback leads to an increase in posture accuracy (p < 0.001,\ + \ Cohen's d = 1.047). Reaction card feedback and expert interviews indicate that\ + \ the feedback is considered fun and useful for training independently from the\ + \ teacher.},\n address = {Copenhagen, Denmark},\n author = {Esben W. Knudsen and\ + \ Malte L. Hølledig and Mads Juel Nielsen and Rikke K. Petersen and Sebastian\ + \ Bach-Nielsen and Bogdan-Constantin Zanescu and Daniel Overholt and Hendrik Purwins\ + \ and Kim Helweg},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1181422},\n\ + \ issn = {2220-4806},\n pages = {71--76},\n publisher = {Aalborg University Copenhagen},\n\ + \ title = {Audio-Visual Feedback for Self-monitoring Posture in Ballet Training},\n\ + \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0015.pdf},\n year\ \ = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176246 + doi: 10.5281/zenodo.1181422 issn: 2220-4806 - pages: 271--274 + pages: 71--76 publisher: Aalborg University Copenhagen - title: 'The JoyStyx: A Quartet of Embedded Acoustic Instruments' - url: http://www.nime.org/proceedings/2017/nime2017_paper0051.pdf + title: Audio-Visual Feedback for Self-monitoring Posture in Ballet Training + url: http://www.nime.org/proceedings/2017/nime2017_paper0015.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: gwakefield2017 - abstract: 'The growth of the live coding community has been coupled with a rich - development of experimentation in new domain-specific languages, sometimes idiosyncratic - to the interests of their performers. Nevertheless, programming language design - may seem foreboding to many, steeped in computer science that is distant from - the expertise of music performance. To broaden access to designing unique languages-as-instruments - we developed an online programming environment that offers liveness in the process - of language design as well as performance. The editor utilizes the Parsing Expression - Grammar formalism for language design, and a virtual machine featuring collaborative - multitasking for execution, in order to support a diversity of language concepts - and affordances. The editor is coupled with online tutorial documentation aimed - at the computer music community, with live examples embedded. This paper documents - the design and use of the editor and its underlying virtual machine.' + ID: rlindell2017 + abstract: "We explore the phenomenology of embodiment based on research through\ + \ design and reflection on the design of artefacts for augmenting embodied performance.\ + \ We present three designs for highly trained musicians; the designs rely on the\ + \ musicians' mastery acquired from years of practice. Through the knowledge of\ + \ the living body their instruments – saxophone, cello, and flute –\ + \ are extensions of themselves; thus, we can explore technology with rich nuances\ + \ and precision in corporeal schemas. With the help of Merleau-Ponty's phenomenology\ + \ of embodiment we present three hypotheses for augmented embodied performance:\ + \ the extended artistic room, the interactively enacted teacher, and the humanisation\ + \ of technology. " address: 'Copenhagen, Denmark' - author: Graham Wakefield and Charles Roberts - bibtex: "@inproceedings{gwakefield2017,\n abstract = {The growth of the live coding\ - \ community has been coupled with a rich development of experimentation in new\ - \ domain-specific languages, sometimes idiosyncratic to the interests of their\ - \ performers. Nevertheless, programming language design may seem foreboding to\ - \ many, steeped in computer science that is distant from the expertise of music\ - \ performance. To broaden access to designing unique languages-as-instruments\ - \ we developed an online programming environment that offers liveness in the process\ - \ of language design as well as performance. The editor utilizes the Parsing\ - \ Expression Grammar formalism for language design, and a virtual machine featuring\ - \ collaborative multitasking for execution, in order to support a diversity of\ - \ language concepts and affordances. The editor is coupled with online tutorial\ - \ documentation aimed at the computer music community, with live examples embedded.\ - \ This paper documents the design and use of the editor and its underlying virtual\ - \ machine.},\n address = {Copenhagen, Denmark},\n author = {Graham Wakefield and\ - \ Charles Roberts},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176248},\n\ - \ issn = {2220-4806},\n pages = {275--278},\n publisher = {Aalborg University\ - \ Copenhagen},\n title = {A Virtual Machine for Live Coding Language Design},\n\ - \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0052.pdf},\n year\ - \ = {2017}\n}\n" + author: Rikard Lindell and Tomas Kumlin + bibtex: "@inproceedings{rlindell2017,\n abstract = {We explore the phenomenology\ + \ of embodiment based on research through design and reflection on the design\ + \ of artefacts for augmenting embodied performance. We present three designs for\ + \ highly trained musicians; the designs rely on the musicians' mastery acquired\ + \ from years of practice. Through the knowledge of the living body their instruments\ + \ – saxophone, cello, and flute – are extensions of themselves; thus,\ + \ we can explore technology with rich nuances and precision in corporeal schemas.\ + \ With the help of Merleau-Ponty's phenomenology of embodiment we present three\ + \ hypotheses for augmented embodied performance: the extended artistic room, the\ + \ interactively enacted teacher, and the humanisation of technology. },\n address\ + \ = {Copenhagen, Denmark},\n author = {Rikard Lindell and Tomas Kumlin},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1176177},\n issn = {2220-4806},\n pages\ + \ = {77--82},\n publisher = {Aalborg University Copenhagen},\n title = {Augmented\ + \ Embodied Performance – Extended Artistic Room, Enacted Teacher, and Humanisation\ + \ of Technology},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0016.pdf},\n\ + \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176248 + doi: 10.5281/zenodo.1176177 issn: 2220-4806 - pages: 275--278 + pages: 77--82 publisher: Aalborg University Copenhagen - title: A Virtual Machine for Live Coding Language Design - url: http://www.nime.org/proceedings/2017/nime2017_paper0052.pdf + title: 'Augmented Embodied Performance – Extended Artistic Room, Enacted Teacher, + and Humanisation of Technology' + url: http://www.nime.org/proceedings/2017/nime2017_paper0016.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: tdavis2017 - abstract: "There have been many NIME papers over the years on augmented or actuated\ - \ instruments [2][10][19][22]. Many of these papers have focused on the technical\ - \ description of how these instruments have been produced, or as in the case of\ - \ Machover's #8216;Hyperinstruments' [19], on producing instruments over which\ - \ performers have ‘absolute control' and emphasise ‘learnability.\ - \ perfectibility and repeatability' [19]. In contrast to this approach, this paper\ - \ outlines a philosophical position concerning the relationship between instruments\ - \ and performers in improvisational contexts that recognises the agency of the\ - \ instrument within the performance process. It builds on a post-phenomenological\ - \ understanding of the human/instrument relationship in which the human and the\ - \ instrument are understood as co-defining entities without fixed boundaries;\ - \ an approach that actively challenges notions of instrumental mastery and ‘absolute\ - \ control'. This paper then takes a practice-based approach to outline how such\ - \ philosophical concerns have fed into the design of an augmented, actuated cello\ - \ system, The Feral Cello, that has been designed to explicitly explore these\ - \ concerns through practice. " + ID: jvetter2017 + abstract: 'In this paper we discuss a modular instrument system for musical expression + consisting of multiple devices using string detection, sound synthesis and wireless + communication. The design of the system allows for different physical arrangements, + which we define as topologies. In particular we will explain our concept and + requirements, the system architecture including custom magnetic string sensors + and our network communication and discuss its use in the performance HOMO RESTIS.' address: 'Copenhagen, Denmark' - author: Tom Davis - bibtex: "@inproceedings{tdavis2017,\n abstract = {There have been many NIME papers\ - \ over the years on augmented or actuated instruments [2][10][19][22]. Many of\ - \ these papers have focused on the technical description of how these instruments\ - \ have been produced, or as in the case of Machover's #8216;Hyperinstruments'\ - \ [19], on producing instruments over which performers have ‘absolute control'\ - \ and emphasise ‘learnability. perfectibility and repeatability' [19]. In\ - \ contrast to this approach, this paper outlines a philosophical position concerning\ - \ the relationship between instruments and performers in improvisational contexts\ - \ that recognises the agency of the instrument within the performance process.\ - \ It builds on a post-phenomenological understanding of the human/instrument relationship\ - \ in which the human and the instrument are understood as co-defining entities\ - \ without fixed boundaries; an approach that actively challenges notions of instrumental\ - \ mastery and ‘absolute control'. This paper then takes a practice-based\ - \ approach to outline how such philosophical concerns have fed into the design\ - \ of an augmented, actuated cello system, The Feral Cello, that has been designed\ - \ to explicitly explore these concerns through practice. },\n address = {Copenhagen,\ - \ Denmark},\n author = {Tom Davis},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176250},\n\ - \ issn = {2220-4806},\n pages = {279--282},\n publisher = {Aalborg University\ - \ Copenhagen},\n title = {The Feral Cello: A Philosophically Informed Approach\ - \ to an Actuated Instrument},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0053.pdf},\n\ + author: Jens Vetter and Sarah Leimcke + bibtex: "@inproceedings{jvetter2017,\n abstract = {In this paper we discuss a modular\ + \ instrument system for musical expression consisting of multiple devices using\ + \ string detection, sound synthesis and wireless communication. The design of\ + \ the system allows for different physical arrangements, which we define as topologies.\ + \ In particular we will explain our concept and requirements, the system architecture\ + \ including custom magnetic string sensors and our network communication and discuss\ + \ its use in the performance HOMO RESTIS.},\n address = {Copenhagen, Denmark},\n\ + \ author = {Jens Vetter and Sarah Leimcke},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1176179},\n issn = {2220-4806},\n pages = {83--86},\n publisher\ + \ = {Aalborg University Copenhagen},\n title = {Homo Restis --- Constructive Control\ + \ Through Modular String Topologies},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0017.pdf},\n\ \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176250 + doi: 10.5281/zenodo.1176179 issn: 2220-4806 - pages: 279--282 + pages: 83--86 publisher: Aalborg University Copenhagen - title: 'The Feral Cello: A Philosophically Informed Approach to an Actuated Instrument' - url: http://www.nime.org/proceedings/2017/nime2017_paper0053.pdf + title: Homo Restis --- Constructive Control Through Modular String Topologies + url: http://www.nime.org/proceedings/2017/nime2017_paper0017.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: fbernardo2017 - abstract: 'This paper describes an exploratory study of the potential for musical - interaction of Soli, a new radar-based sensing technology developed by Google''s - Advanced Technology and Projects Group (ATAP). We report on our hand-on experience - and outcomes within the Soli Alpha Developers program. We present early experiments - demonstrating the use of Soli for creativity in musical contexts. We discuss the - tools, workflow, the affordances of the prototypes for music making, and the potential - for design of future NIME projects that may integrate Soli.' + ID: jbarbosa2017 + abstract: "Play and playfulness compose an essential part of our lives as human\ + \ beings. From childhood to adultness, playfulness is often associated with remarkable\ + \ positive experiences related to fun, pleasure, intimate social activities, imagination,\ + \ and creativity. Perhaps not surprisingly, playfulness has been recurrently used\ + \ in NIME designs as a strategy to engage people, often non-expert, in short term\ + \ musical activities. Yet, designing for playfulness remains a challenging task,\ + \ as little knowledge is available for designers to support their decisions. \ + \ To address this issue, we follow a design rationale approach using the context\ + \ of Live Looping (LL) as a case study. We start by surveying 101 LL tools, summarizing\ + \ our analysis into a new design space. We then use this design space to discuss\ + \ potential guidelines to address playfulness in a design process. These guidelines\ + \ are implemented and discussed in a new LL tool—called the \"Voice Reaping\ + \ Machine\". Finally, we contrast our guidelines with previous works in the literature." address: 'Copenhagen, Denmark' - author: Francisco Bernardo and Nicholas Arner and Paul Batchelor - bibtex: "@inproceedings{fbernardo2017,\n abstract = {This paper describes an exploratory\ - \ study of the potential for musical interaction of Soli, a new radar-based sensing\ - \ technology developed by Google's Advanced Technology and Projects Group (ATAP).\ - \ We report on our hand-on experience and outcomes within the Soli Alpha Developers\ - \ program. We present early experiments demonstrating the use of Soli for creativity\ - \ in musical contexts. We discuss the tools, workflow, the affordances of the\ - \ prototypes for music making, and the potential for design of future NIME projects\ - \ that may integrate Soli.},\n address = {Copenhagen, Denmark},\n author = {Francisco\ - \ Bernardo and Nicholas Arner and Paul Batchelor},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176252},\n issn = {2220-4806},\n pages = {283--286},\n\ - \ publisher = {Aalborg University Copenhagen},\n title = {O Soli Mio: Exploring\ - \ Millimeter Wave Radar for Musical Interaction},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0054.pdf},\n\ - \ year = {2017}\n}\n" + author: Jeronimo Barbosa and Marcelo M. Wanderley and Stéphane Huot + bibtex: "@inproceedings{jbarbosa2017,\n abstract = {Play and playfulness compose\ + \ an essential part of our lives as human beings. From childhood to adultness,\ + \ playfulness is often associated with remarkable positive experiences related\ + \ to fun, pleasure, intimate social activities, imagination, and creativity. Perhaps\ + \ not surprisingly, playfulness has been recurrently used in NIME designs as a\ + \ strategy to engage people, often non-expert, in short term musical activities.\ + \ Yet, designing for playfulness remains a challenging task, as little knowledge\ + \ is available for designers to support their decisions. To address this issue,\ + \ we follow a design rationale approach using the context of Live Looping (LL)\ + \ as a case study. We start by surveying 101 LL tools, summarizing our analysis\ + \ into a new design space. We then use this design space to discuss potential\ + \ guidelines to address playfulness in a design process. These guidelines are\ + \ implemented and discussed in a new LL tool—called the \"Voice Reaping\ + \ Machine\". Finally, we contrast our guidelines with previous works in the literature.},\n\ + \ address = {Copenhagen, Denmark},\n author = {Jeronimo Barbosa and Marcelo M.\ + \ Wanderley and Stéphane Huot},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176181},\n\ + \ issn = {2220-4806},\n pages = {87--92},\n publisher = {Aalborg University Copenhagen},\n\ + \ title = {Exploring Playfulness in NIME Design: The Case of Live Looping Tools},\n\ + \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0018.pdf},\n year\ + \ = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176252 + doi: 10.5281/zenodo.1176181 issn: 2220-4806 - pages: 283--286 + pages: 87--92 publisher: Aalborg University Copenhagen - title: 'O Soli Mio: Exploring Millimeter Wave Radar for Musical Interaction' - url: http://www.nime.org/proceedings/2017/nime2017_paper0054.pdf + title: 'Exploring Playfulness in NIME Design: The Case of Live Looping Tools' + url: http://www.nime.org/proceedings/2017/nime2017_paper0018.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: clevican2017 - abstract: 'Brain computer interfaces are being widely adopted for music creation - and interpretation, and they are becoming a truly new category of musical instruments. - Indeed, Miranda has coined the term Brain-computer Musical Interface (BCMI) to - refer to this category. There are no "plug-n-play" solutions for a BCMI, these - kinds of tools usually require the setup and implementation of particular software - configurations, customized for each EEG device. The Emotiv Insight is a low-cost - EEG apparatus that outputs several kinds of data, such as EEG rhythms or facial - expressions, from the user''s brain activity. We have developed a BCMI, in the - form of a freely available middle-ware, using the Emotiv Insight for EEG input - and signal processing. The obtained data, via blue-tooth is broad-casted over - the network formatted for the OSC protocol. Using this software, we tested the - device''s adequacy as a BCMI by using the provided data in order to control different - sound synthesis algorithms in MaxMSP. We conclude that the Emotiv Insight is an - interesting choice for a BCMI due to its low-cost and ease of use, but we also - question its reliability and robustness.' + ID: dmanesh2017 + abstract: 'Exquisite Score is a web application which allows users to collaborate + on short musical compositions using the paradigm of the parlor game exquisite + corpse. Through a MIDI-sequencer interface, composers each contribute a section + to a piece of music, only seeing the very end of the preceding section. Exquisite + Score is both a fun and accessible compositional game as well as a system for + encouraging highly novel musical compositions. Exquisite Score was tested by several + students and musicians. Several short pieces were created and a brief discussion + and analysis of these pieces is included.' address: 'Copenhagen, Denmark' - author: Constanza Levican and Andres Aparicio and Vernon Belaunde and Rodrigo Cadiz - bibtex: "@inproceedings{clevican2017,\n abstract = {Brain computer interfaces are\ - \ being widely adopted for music creation and interpretation, and they are becoming\ - \ a truly new category of musical instruments. Indeed, Miranda has coined the\ - \ term Brain-computer Musical Interface (BCMI) to refer to this category. There\ - \ are no \"plug-n-play\" solutions for a BCMI, these kinds of tools usually require\ - \ the setup and implementation of particular software configurations, customized\ - \ for each EEG device. The Emotiv Insight is a low-cost EEG apparatus that outputs\ - \ several kinds of data, such as EEG rhythms or facial expressions, from the user's\ - \ brain activity. We have developed a BCMI, in the form of a freely available\ - \ middle-ware, using the Emotiv Insight for EEG input and signal processing. The\ - \ obtained data, via blue-tooth is broad-casted over the network formatted for\ - \ the OSC protocol. Using this software, we tested the device's adequacy as a\ - \ BCMI by using the provided data in order to control different sound synthesis\ - \ algorithms in MaxMSP. We conclude that the Emotiv Insight is an interesting\ - \ choice for a BCMI due to its low-cost and ease of use, but we also question\ - \ its reliability and robustness.},\n address = {Copenhagen, Denmark},\n author\ - \ = {Constanza Levican and Andres Aparicio and Vernon Belaunde and Rodrigo Cadiz},\n\ + author: Daniel Manesh and Eran Egozy + bibtex: "@inproceedings{dmanesh2017,\n abstract = {Exquisite Score is a web application\ + \ which allows users to collaborate on short musical compositions using the paradigm\ + \ of the parlor game exquisite corpse. Through a MIDI-sequencer interface, composers\ + \ each contribute a section to a piece of music, only seeing the very end of the\ + \ preceding section. Exquisite Score is both a fun and accessible compositional\ + \ game as well as a system for encouraging highly novel musical compositions.\ + \ Exquisite Score was tested by several students and musicians. Several short\ + \ pieces were created and a brief discussion and analysis of these pieces is included.},\n\ + \ address = {Copenhagen, Denmark},\n author = {Daniel Manesh and Eran Egozy},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176254},\n issn = {2220-4806},\n\ - \ pages = {287--290},\n publisher = {Aalborg University Copenhagen},\n title =\ - \ {Insight2OSC: using the brain and the body as a musical instrument with the\ - \ Emotiv Insight},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0055.pdf},\n\ + \ Musical Expression},\n doi = {10.5281/zenodo.1176183},\n issn = {2220-4806},\n\ + \ pages = {93--98},\n publisher = {Aalborg University Copenhagen},\n title = {Exquisite\ + \ Score: A System for Collaborative Musical Composition},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0019.pdf},\n\ \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176254 + doi: 10.5281/zenodo.1176183 issn: 2220-4806 - pages: 287--290 + pages: 93--98 publisher: Aalborg University Copenhagen - title: 'Insight2OSC: using the brain and the body as a musical instrument with the - Emotiv Insight' - url: http://www.nime.org/proceedings/2017/nime2017_paper0055.pdf + title: 'Exquisite Score: A System for Collaborative Musical Composition' + url: http://www.nime.org/proceedings/2017/nime2017_paper0019.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: bsmith2017 - abstract: 'Interactive real-time spatialization of audio over large immersive speaker - arrays poses significant interface and control challenges for live performers. - Fluidly moving and mixing numerous sound objects over unique speaker configurations - requires specifically designed software interfaces and systems. Currently available - software solutions either impose configuration limitations, require extreme degrees - of expertise, or extensive configuration time to use. A new system design, focusing - on simplicity, ease of use, and live interactive spatialization is described. - Automation of array calibration and tuning is included to facilitate rapid deployment - and configuration. Comparisons with other solutions show favorability in terms - of complexity, depth of control, and required features. ' + ID: sstenslie2017 + abstract: 'This paper presents a new spherical shaped capacitive sensor device for + creating interactive compositions and embodied user experiences inside of a periphonic, + 3D sound space. The Somatic Sound project is here presented as a) technological + innovative musical instrument, and b) an experiential art installation. One of + the main research foci is to explore embodied experiences through moving, interactive + and somatic sound. The term somatic is here understood and used as in relating + to the body in a physical, holistic and immersive manner.' address: 'Copenhagen, Denmark' - author: Benjamin Smith and Neal Anderson - bibtex: "@inproceedings{bsmith2017,\n abstract = {Interactive real-time spatialization\ - \ of audio over large immersive speaker arrays poses significant interface and\ - \ control challenges for live performers. Fluidly moving and mixing numerous sound\ - \ objects over unique speaker configurations requires specifically designed software\ - \ interfaces and systems. Currently available software solutions either impose\ - \ configuration limitations, require extreme degrees of expertise, or extensive\ - \ configuration time to use. A new system design, focusing on simplicity, ease\ - \ of use, and live interactive spatialization is described. Automation of array\ - \ calibration and tuning is included to facilitate rapid deployment and configuration.\ - \ Comparisons with other solutions show favorability in terms of complexity, depth\ - \ of control, and required features. },\n address = {Copenhagen, Denmark},\n\ - \ author = {Benjamin Smith and Neal Anderson},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1176256},\n issn = {2220-4806},\n pages = {291--295},\n publisher\ - \ = {Aalborg University Copenhagen},\n title = {ArraYnger: New Interface for Interactive\ - \ 360° Spatialization},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0056.pdf},\n\ + author: Stahl Stenslie and Kjell Tore Innervik and Ivar Frounberg and Thom Johansen + bibtex: "@inproceedings{sstenslie2017,\n abstract = {This paper presents a new spherical\ + \ shaped capacitive sensor device for creating interactive compositions and embodied\ + \ user experiences inside of a periphonic, 3D sound space. The Somatic Sound project\ + \ is here presented as a) technological innovative musical instrument, and b)\ + \ an experiential art installation. One of the main research foci is to explore\ + \ embodied experiences through moving, interactive and somatic sound. The term\ + \ somatic is here understood and used as in relating to the body in a physical,\ + \ holistic and immersive manner.},\n address = {Copenhagen, Denmark},\n author\ + \ = {Stahl Stenslie and Kjell Tore Innervik and Ivar Frounberg and Thom Johansen},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1176185},\n issn = {2220-4806},\n\ + \ pages = {99--103},\n publisher = {Aalborg University Copenhagen},\n title =\ + \ {Somatic Sound in Performative Contexts},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0020.pdf},\n\ \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176256 + doi: 10.5281/zenodo.1176185 issn: 2220-4806 - pages: 291--295 + pages: 99--103 publisher: Aalborg University Copenhagen - title: 'ArraYnger: New Interface for Interactive 360° Spatialization' - url: http://www.nime.org/proceedings/2017/nime2017_paper0056.pdf + title: Somatic Sound in Performative Contexts + url: http://www.nime.org/proceedings/2017/nime2017_paper0020.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: aleslie2017 - abstract: "This paper describes a series of fashionable sounding shoe and foot based\ - \ appendages made between 2007-2017. The research attempts to demake the physical\ - \ high-heeled shoe through the iterative design and fabrication of new foot based\ - \ musical instruments. This process of demaking also changes the usual purpose\ - \ of shoes and associated stereotypes of high heeled shoe wear. Through turning\ - \ high heeled shoes into wearable musical instruments for theatrical audio visual\ - \ expressivity we question why so many musical instruments are made for the hands\ - \ and not the feet? With this creative work we explore ways to redress the imbalance\ - \ and consider what a genuinely “foot based” expressivity could be.\ - \ " + ID: jlarsen2017 + abstract: 'Musical instruments and musical user interfaces provide rich input and + feedback through mostly tangible interactions, resulting in complex behavior. However, + publications of novel interfaces often lack the required detail due to the complexity + or the focus on a specific part of the interfaces and absence of a specific template + or structure to describe these interactions. Drawing on and synthesizing models + from interaction design and music making we propose a way for modeling musical + interfaces by providing a scheme and visual language to describe, design, analyze, + and compare interfaces for music making. To illustrate its capabilities we apply + the proposed model to a range of assistive musical instruments, which often draw + on multi-modal in- and output, resulting in complex designs and descriptions thereof.' address: 'Copenhagen, Denmark' - author: Alexandra Murray-Leslie and Andrew Johnston - bibtex: "@inproceedings{aleslie2017,\n abstract = {This paper describes a series\ - \ of fashionable sounding shoe and foot based appendages made between 2007-2017.\ - \ The research attempts to demake the physical high-heeled shoe through the iterative\ - \ design and fabrication of new foot based musical instruments. This process of\ - \ demaking also changes the usual purpose of shoes and associated stereotypes\ - \ of high heeled shoe wear. Through turning high heeled shoes into wearable musical\ - \ instruments for theatrical audio visual expressivity we question why so many\ - \ musical instruments are made for the hands and not the feet? With this creative\ - \ work we explore ways to redress the imbalance and consider what a genuinely\ - \ “foot based” expressivity could be. },\n address = {Copenhagen,\ - \ Denmark},\n author = {Alexandra Murray-Leslie and Andrew Johnston},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176258},\n issn = {2220-4806},\n pages\ - \ = {296--301},\n publisher = {Aalborg University Copenhagen},\n title = {The\ - \ Liberation of the Feet: Demaking the High Heeled Shoe For Theatrical Audio-Visual\ - \ Expression},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0057.pdf},\n\ - \ year = {2017}\n}\n" + author: Jeppe Veirum Larsen and Hendrik Knoche + bibtex: "@inproceedings{jlarsen2017,\n abstract = {Musical instruments and musical\ + \ user interfaces provide rich input and feedback through mostly tangible interactions,\ + \ resulting in complex behavior. However, publications of novel interfaces often\ + \ lack the required detail due to the complexity or the focus on a specific part\ + \ of the interfaces and absence of a specific template or structure to describe\ + \ these interactions. Drawing on and synthesizing models from interaction design\ + \ and music making we propose a way for modeling musical interfaces by providing\ + \ a scheme and visual language to describe, design, analyze, and compare interfaces\ + \ for music making. To illustrate its capabilities we apply the proposed model\ + \ to a range of assistive musical instruments, which often draw on multi-modal\ + \ in- and output, resulting in complex designs and descriptions thereof.},\n address\ + \ = {Copenhagen, Denmark},\n author = {Jeppe Veirum Larsen and Hendrik Knoche},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1176187},\n issn = {2220-4806},\n\ + \ pages = {104--109},\n publisher = {Aalborg University Copenhagen},\n title =\ + \ {States and Sound: Modelling Interactions with Musical User Interfaces},\n url\ + \ = {http://www.nime.org/proceedings/2017/nime2017_paper0021.pdf},\n year = {2017}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176258 + doi: 10.5281/zenodo.1176187 issn: 2220-4806 - pages: 296--301 + pages: 104--109 publisher: Aalborg University Copenhagen - title: 'The Liberation of the Feet: Demaking the High Heeled Shoe For Theatrical - Audio-Visual Expression' - url: http://www.nime.org/proceedings/2017/nime2017_paper0057.pdf + title: 'States and Sound: Modelling Interactions with Musical User Interfaces' + url: http://www.nime.org/proceedings/2017/nime2017_paper0021.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: crose2017 - abstract: 'Wearable sensor technology and aerial dance movement can be integrated - to provide a new performance practice and perspective on interactive kinesonic - composition. SALTO (Sonic Aerialist eLecTrOacoustic system), is a system which - allows for the creation of collaborative works between electroacoustic composer - and aerial choreographer. The system incorporates aerial dance trapeze movement, - sensors, digital synthesis, and electroacoustic composition. In SALTO, the Max - software programming environment employs parameters and mapping techniques for - translating the performer''s movement and internal experience into sound. Splinter - (2016), a work for aerial choreographer/performer and the SALTO system, highlights - the expressive qualities of the system in a performance setting.' + ID: gxia2017 + abstract: 'The interaction between music improvisers is studied in the context of + piano duets, where one improviser embellishes a melody, and the other plays a + chordal accompaniment with great freedom. We created an automated accompaniment + player that learns to play from example performances. Accompaniments are constructed + by selecting and concatenating one-measure score units from actual performances. + An important innovation is the ability to learn how the improvised accompaniment + should respond to variations in the melody performance, using tempo and embellishment + complexity as features, resulting in a truly interactive performance within a + conventional musical framework. We conducted both objective and subjective evaluations, + showing that the learned improviser performs more interactive, musical, and human-like + accompaniment compared with the less responsive, rule-based baseline algorithm.' address: 'Copenhagen, Denmark' - author: Christiana Rose - bibtex: "@inproceedings{crose2017,\n abstract = {Wearable sensor technology and\ - \ aerial dance movement can be integrated to provide a new performance practice\ - \ and perspective on interactive kinesonic composition. SALTO (Sonic Aerialist\ - \ eLecTrOacoustic system), is a system which allows for the creation of collaborative\ - \ works between electroacoustic composer and aerial choreographer. The system\ - \ incorporates aerial dance trapeze movement, sensors, digital synthesis, and\ - \ electroacoustic composition. In SALTO, the Max software programming environment\ - \ employs parameters and mapping techniques for translating the performer's movement\ - \ and internal experience into sound. Splinter (2016), a work for aerial choreographer/performer\ - \ and the SALTO system, highlights the expressive qualities of the system in a\ - \ performance setting.},\n address = {Copenhagen, Denmark},\n author = {Christiana\ - \ Rose},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1176260},\n issn = {2220-4806},\n\ - \ pages = {302--306},\n publisher = {Aalborg University Copenhagen},\n title =\ - \ {SALTO: A System for Musical Expression in the Aerial Arts},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0058.pdf},\n\ - \ year = {2017}\n}\n" + author: Guangyu Xia and Roger Dannenberg + bibtex: "@inproceedings{gxia2017,\n abstract = {The interaction between music improvisers\ + \ is studied in the context of piano duets, where one improviser embellishes a\ + \ melody, and the other plays a chordal accompaniment with great freedom. We created\ + \ an automated accompaniment player that learns to play from example performances.\ + \ Accompaniments are constructed by selecting and concatenating one-measure score\ + \ units from actual performances. An important innovation is the ability to learn\ + \ how the improvised accompaniment should respond to variations in the melody\ + \ performance, using tempo and embellishment complexity as features, resulting\ + \ in a truly interactive performance within a conventional musical framework.\ + \ We conducted both objective and subjective evaluations, showing that the learned\ + \ improviser performs more interactive, musical, and human-like accompaniment\ + \ compared with the less responsive, rule-based baseline algorithm.},\n address\ + \ = {Copenhagen, Denmark},\n author = {Guangyu Xia and Roger Dannenberg},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1176189},\n issn = {2220-4806},\n pages\ + \ = {110--114},\n publisher = {Aalborg University Copenhagen},\n title = {Improvised\ + \ Duet Interaction: Learning Improvisation Techniques for Automatic Accompaniment},\n\ + \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0022.pdf},\n year\ + \ = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176260 + doi: 10.5281/zenodo.1176189 issn: 2220-4806 - pages: 302--306 + pages: 110--114 publisher: Aalborg University Copenhagen - title: 'SALTO: A System for Musical Expression in the Aerial Arts' - url: http://www.nime.org/proceedings/2017/nime2017_paper0058.pdf + title: 'Improvised Duet Interaction: Learning Improvisation Techniques for Automatic + Accompaniment' + url: http://www.nime.org/proceedings/2017/nime2017_paper0022.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: mbaalman2017 - abstract: 'Academic research projects focusing on wireless sensor networks rarely - live on after the funded research project has ended. In contrast, the Sense/Stage - project has evolved over the past 6 years outside of an academic context and has - been used in a multitude of artistic projects. This paper presents how the project - has developed, the diversity of the projects that have been made with the technology, - feedback from users on the system and an outline for the design of a successor - to the current system. ' + ID: pdahlstedt2017 + abstract: 'A new hybrid approach to digital keyboard playing is presented, where + the actual acoustic sounds from a digital keyboard are captured with contact microphones + and applied as excitation signals to a digital model of a prepared piano, i.e., + an extended wave-guide model of strings with the possibility of stopping and muting + the strings at arbitrary positions. The parameters of the string model are controlled + through TouchKeys multitouch sensors on each key, combined with MIDI data and + acoustic signals from the digital keyboard frame, using a novel mapping. The + instrument is evaluated from a performing musician''s perspective, and emerging + playing techniques are discussed. Since the instrument is a hybrid acoustic-digital + system with several feedback paths between the domains, it provides for expressive + and dynamic playing, with qualities approaching that of an acoustic instrument, + yet with new kinds of control. The contributions are two-fold. First, the use + of acoustic sounds from a physical keyboard for excitations and resonances results + in a novel hybrid keyboard instrument in itself. Second, the digital model of + "inside piano" playing, using multitouch keyboard data, allows for performance + techniques going far beyond conventional keyboard playing.' address: 'Copenhagen, Denmark' - author: Marije Baalman - bibtex: "@inproceedings{mbaalman2017,\n abstract = {Academic research projects focusing\ - \ on wireless sensor networks rarely live on after the funded research project\ - \ has ended. In contrast, the Sense/Stage project has evolved over the past 6\ - \ years outside of an academic context and has been used in a multitude of artistic\ - \ projects. This paper presents how the project has developed, the diversity of\ - \ the projects that have been made with the technology, feedback from users on\ - \ the system and an outline for the design of a successor to the current system.\ - \ },\n address = {Copenhagen, Denmark},\n author = {Marije Baalman},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176262},\n issn = {2220-4806},\n pages\ - \ = {307--312},\n publisher = {Aalborg University Copenhagen},\n title = {Wireless\ - \ Sensing for Artistic Applications, a Reflection on Sense/Stage to Motivate the\ - \ Design of the Next Stage},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0059.pdf},\n\ + author: Palle Dahlstedt + bibtex: "@inproceedings{pdahlstedt2017,\n abstract = {A new hybrid approach to digital\ + \ keyboard playing is presented, where the actual acoustic sounds from a digital\ + \ keyboard are captured with contact microphones and applied as excitation signals\ + \ to a digital model of a prepared piano, i.e., an extended wave-guide model of\ + \ strings with the possibility of stopping and muting the strings at arbitrary\ + \ positions. The parameters of the string model are controlled through TouchKeys\ + \ multitouch sensors on each key, combined with MIDI data and acoustic signals\ + \ from the digital keyboard frame, using a novel mapping. The instrument is evaluated\ + \ from a performing musician's perspective, and emerging playing techniques are\ + \ discussed. Since the instrument is a hybrid acoustic-digital system with several\ + \ feedback paths between the domains, it provides for expressive and dynamic playing,\ + \ with qualities approaching that of an acoustic instrument, yet with new kinds\ + \ of control. The contributions are two-fold. First, the use of acoustic sounds\ + \ from a physical keyboard for excitations and resonances results in a novel hybrid\ + \ keyboard instrument in itself. Second, the digital model of \"inside piano\"\ + \ playing, using multitouch keyboard data, allows for performance techniques going\ + \ far beyond conventional keyboard playing.},\n address = {Copenhagen, Denmark},\n\ + \ author = {Palle Dahlstedt},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176191},\n\ + \ issn = {2220-4806},\n pages = {115--120},\n publisher = {Aalborg University\ + \ Copenhagen},\n title = {Physical Interactions with Digital Strings --- A hybrid\ + \ approach to a digital keyboard instrument},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0023.pdf},\n\ \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176262 + doi: 10.5281/zenodo.1176191 issn: 2220-4806 - pages: 307--312 + pages: 115--120 publisher: Aalborg University Copenhagen - title: 'Wireless Sensing for Artistic Applications, a Reflection on Sense/Stage - to Motivate the Design of the Next Stage' - url: http://www.nime.org/proceedings/2017/nime2017_paper0059.pdf + title: Physical Interactions with Digital Strings --- A hybrid approach to a digital + keyboard instrument + url: http://www.nime.org/proceedings/2017/nime2017_paper0023.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: ibukvic2017 - abstract: 'The following paper explores the Inconspicuous Head-Mounted Display within - the context of a live technology-mediated music performance. For this purpose - in 2014 the authors have developed Glasstra, an Android/Google Glass networked - display designed to project real-time orchestra status to the conductor, with - the primary goal of minimizing the on-stage technology footprint and with it audience''s - potential distraction with technology. In preparation for its deployment in a - real-world performance setting the team conducted a user study aimed to define - relevant constraints of the Google Glass display. Based on the observed data, - a conductor part from an existing laptop orchestra piece was retrofitted, thereby - replacing the laptop with a Google Glass running Glasstra and a similarly inconspicuous - forearm-mounted Wiimote controller. Below we present findings from the user study - that have informed the design of the visual display, as well as multi-perspective - observations from a series of real-world performances, including the designer, - user, and the audience. We use findings to offer a new hypothesis, an inverse - uncanny valley or what we refer to as uncanny mountain pertaining to audience''s - potential distraction with the technology within the context of a live technology-mediated - music performance as a function of minimizing on-stage technological footprint.' + ID: croberts2017 + abstract: 'We describe two new versions of the gibberwocky live-coding system. One + integrates with Max/MSP while the second targets MIDI output and runs entirely + in the browser. We discuss commonalities and differences between the three environments, + and how they fit into the live-coding landscape. We also describe lessons learned + while performing with the original version of gibberwocky, both from our perspective + and the perspective of others. These lessons informed the addition of animated + sparkline visualizations depicting modulations to performers and audiences in + all three versions.' address: 'Copenhagen, Denmark' - author: Ivica Bukvic and Spencer Lee - bibtex: "@inproceedings{ibukvic2017,\n abstract = {The following paper explores\ - \ the Inconspicuous Head-Mounted Display within the context of a live technology-mediated\ - \ music performance. For this purpose in 2014 the authors have developed Glasstra,\ - \ an Android/Google Glass networked display designed to project real-time orchestra\ - \ status to the conductor, with the primary goal of minimizing the on-stage technology\ - \ footprint and with it audience's potential distraction with technology. In preparation\ - \ for its deployment in a real-world performance setting the team conducted a\ - \ user study aimed to define relevant constraints of the Google Glass display.\ - \ Based on the observed data, a conductor part from an existing laptop orchestra\ - \ piece was retrofitted, thereby replacing the laptop with a Google Glass running\ - \ Glasstra and a similarly inconspicuous forearm-mounted Wiimote controller. Below\ - \ we present findings from the user study that have informed the design of the\ - \ visual display, as well as multi-perspective observations from a series of real-world\ - \ performances, including the designer, user, and the audience. We use findings\ - \ to offer a new hypothesis, an inverse uncanny valley or what we refer to as\ - \ uncanny mountain pertaining to audience's potential distraction with the technology\ - \ within the context of a live technology-mediated music performance as a function\ - \ of minimizing on-stage technological footprint.},\n address = {Copenhagen, Denmark},\n\ - \ author = {Ivica Bukvic and Spencer Lee},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176264},\n\ - \ issn = {2220-4806},\n pages = {313--318},\n publisher = {Aalborg University\ - \ Copenhagen},\n title = {Glasstra: Exploring the Use of an Inconspicuous Head\ - \ Mounted Display in a Live Technology-Mediated Music Performance},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0060.pdf},\n\ - \ year = {2017}\n}\n" + author: Charles Roberts and Graham Wakefield + bibtex: "@inproceedings{croberts2017,\n abstract = {We describe two new versions\ + \ of the gibberwocky live-coding system. One integrates with Max/MSP while the\ + \ second targets MIDI output and runs entirely in the browser. We discuss commonalities\ + \ and differences between the three environments, and how they fit into the live-coding\ + \ landscape. We also describe lessons learned while performing with the original\ + \ version of gibberwocky, both from our perspective and the perspective of others.\ + \ These lessons informed the addition of animated sparkline visualizations depicting\ + \ modulations to performers and audiences in all three versions.},\n address =\ + \ {Copenhagen, Denmark},\n author = {Charles Roberts and Graham Wakefield},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1176193},\n issn = {2220-4806},\n\ + \ pages = {121--126},\n publisher = {Aalborg University Copenhagen},\n title =\ + \ {gibberwocky: New Live-Coding Instruments for Musical Performance},\n url =\ + \ {http://www.nime.org/proceedings/2017/nime2017_paper0024.pdf},\n year = {2017}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176264 + doi: 10.5281/zenodo.1176193 issn: 2220-4806 - pages: 313--318 + pages: 121--126 publisher: Aalborg University Copenhagen - title: 'Glasstra: Exploring the Use of an Inconspicuous Head Mounted Display in - a Live Technology-Mediated Music Performance' - url: http://www.nime.org/proceedings/2017/nime2017_paper0060.pdf + title: 'gibberwocky: New Live-Coding Instruments for Musical Performance' + url: http://www.nime.org/proceedings/2017/nime2017_paper0024.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: sbarton2017 - abstract: "Human-robot musical interaction typically consists of independent, physically-separated\ - \ agents. We developed Cyther --- a human-playable, self-tuning robotic zither\ - \ – to allow a human and a robot to interact cooperatively through the same\ - \ physical medium to generate music. The resultant co- dependence creates new\ - \ responsibilities, roles, and expressive possibilities for human musicians. We\ - \ describe some of these possibilities in the context of both technical features\ - \ and artistic implementations of the system." + ID: sleitman2017 + abstract: This paper is an overview of the current state of a course on New Interfaces + for Musical Expression taught at Stanford University. It gives an overview of + the various technologies and methodologies used to teach the interdisciplinary + work of new musical interfaces. address: 'Copenhagen, Denmark' - author: Scott Barton and Ethan Prihar and Paulo Carvalho - bibtex: "@inproceedings{sbarton2017,\n abstract = {Human-robot musical interaction\ - \ typically consists of independent, physically-separated agents. We developed\ - \ Cyther --- a human-playable, self-tuning robotic zither – to allow a human\ - \ and a robot to interact cooperatively through the same physical medium to generate\ - \ music. The resultant co- dependence creates new responsibilities, roles, and\ - \ expressive possibilities for human musicians. We describe some of these possibilities\ - \ in the context of both technical features and artistic implementations of the\ - \ system.},\n address = {Copenhagen, Denmark},\n author = {Scott Barton and Ethan\ - \ Prihar and Paulo Carvalho},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176266},\n\ - \ issn = {2220-4806},\n pages = {319--324},\n publisher = {Aalborg University\ - \ Copenhagen},\n title = {Cyther: a Human-playable, Self-tuning Robotic Zither},\n\ - \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0061.pdf},\n year\ - \ = {2017}\n}\n" + author: Sasha Leitman + bibtex: "@inproceedings{sleitman2017,\n abstract = {This paper is an overview of\ + \ the current state of a course on New Interfaces for Musical Expression taught\ + \ at Stanford University. It gives an overview of the various technologies and\ + \ methodologies used to teach the interdisciplinary work of new musical interfaces.},\n\ + \ address = {Copenhagen, Denmark},\n author = {Sasha Leitman},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176197},\n issn = {2220-4806},\n pages = {127--132},\n\ + \ publisher = {Aalborg University Copenhagen},\n title = {Current Iteration of\ + \ a Course on Physical Interaction Design for Music},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0025.pdf},\n\ + \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176266 + doi: 10.5281/zenodo.1176197 issn: 2220-4806 - pages: 319--324 + pages: 127--132 publisher: Aalborg University Copenhagen - title: 'Cyther: a Human-playable, Self-tuning Robotic Zither' - url: http://www.nime.org/proceedings/2017/nime2017_paper0061.pdf + title: Current Iteration of a Course on Physical Interaction Design for Music + url: http://www.nime.org/proceedings/2017/nime2017_paper0025.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: bliang2017 - abstract: 'This paper presents the results of a study of piano pedalling techniques - on the sustain pedal using a newly designed measurement system named Piano Pedaller. - The system is comprised of an optical sensor mounted in the piano pedal bearing - block and an embedded platform for recording audio and sensor data. This enables - recording the pedalling gesture of real players and the piano sound under normal - playing conditions. Using the gesture data collected from the system, the task - of classifying these data by pedalling technique was undertaken using a Support - Vector Machine (SVM). Results can be visualised in an audio based score following - application to show pedalling together with the player''s position in the score.' + ID: ahofmann2017 + abstract: 'To build electronic musical instruments, a mapping between the real-time + audio processing software and the physical controllers is required. Different + strategies of mapping were developed and discussed within the NIME community to + improve musical expression in live performances. This paper discusses an interface + focussed instrument design approach, which starts from the physical controller + and its functionality. From this definition, the required, underlying software + instrument is derived. A proof of concept is implemented as a framework for effect + instruments. This framework comprises a library of real-time effects for Csound, + a proposition for a JSON-based mapping format, and a mapping-to-instrument converter + that outputs Csound instrument files. Advantages, limitations and possible future + extensions are discussed.' address: 'Copenhagen, Denmark' - author: Beici Liang and György Fazekas and Andrew McPherson and Mark Sandler - bibtex: "@inproceedings{bliang2017,\n abstract = {This paper presents the results\ - \ of a study of piano pedalling techniques on the sustain pedal using a newly\ - \ designed measurement system named Piano Pedaller. The system is comprised of\ - \ an optical sensor mounted in the piano pedal bearing block and an embedded platform\ - \ for recording audio and sensor data. This enables recording the pedalling gesture\ - \ of real players and the piano sound under normal playing conditions. Using the\ - \ gesture data collected from the system, the task of classifying these data by\ - \ pedalling technique was undertaken using a Support Vector Machine (SVM). Results\ - \ can be visualised in an audio based score following application to show pedalling\ - \ together with the player's position in the score.},\n address = {Copenhagen,\ - \ Denmark},\n author = {Beici Liang and György Fazekas and Andrew McPherson and\ - \ Mark Sandler},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176268},\n\ - \ issn = {2220-4806},\n pages = {325--329},\n publisher = {Aalborg University\ - \ Copenhagen},\n title = {Piano Pedaller: A Measurement System for Classification\ - \ and Visualisation of Piano Pedalling Techniques},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0062.pdf},\n\ - \ year = {2017}\n}\n" + author: Alex Hofmann and Bernt Isak Waerstad and Saranya Balasubramanian and Kristoffer + E. Koch + bibtex: "@inproceedings{ahofmann2017,\n abstract = {To build electronic musical\ + \ instruments, a mapping between the real-time audio processing software and the\ + \ physical controllers is required. Different strategies of mapping were developed\ + \ and discussed within the NIME community to improve musical expression in live\ + \ performances. This paper discusses an interface focussed instrument design approach,\ + \ which starts from the physical controller and its functionality. From this definition,\ + \ the required, underlying software instrument is derived. A proof of concept\ + \ is implemented as a framework for effect instruments. This framework comprises\ + \ a library of real-time effects for Csound, a proposition for a JSON-based mapping\ + \ format, and a mapping-to-instrument converter that outputs Csound instrument\ + \ files. Advantages, limitations and possible future extensions are discussed.},\n\ + \ address = {Copenhagen, Denmark},\n author = {Alex Hofmann and Bernt Isak Waerstad\ + \ and Saranya Balasubramanian and Kristoffer E. Koch},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176199},\n issn = {2220-4806},\n pages = {133--138},\n\ + \ publisher = {Aalborg University Copenhagen},\n title = {From interface design\ + \ to the software instrument --- Mapping as an approach to FX-instrument building},\n\ + \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0026.pdf},\n year\ + \ = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176268 + doi: 10.5281/zenodo.1176199 issn: 2220-4806 - pages: 325--329 + pages: 133--138 publisher: Aalborg University Copenhagen - title: 'Piano Pedaller: A Measurement System for Classification and Visualisation - of Piano Pedalling Techniques' - url: http://www.nime.org/proceedings/2017/nime2017_paper0062.pdf + title: From interface design to the software instrument --- Mapping as an approach + to FX-instrument building + url: http://www.nime.org/proceedings/2017/nime2017_paper0026.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: jlong2017 - abstract: 'While most musical robots that are capable of playing the drum kit utilise - a relatively simple striking motion, the hi-hat, with the additional degree of - motion provided by its pedal, requires more involved control strategies in order - to produce expressive performances on the instrument. A robotic hi-hat should - be able to control not only the striking timing and velocity to a high degree - of precision, but also dynamically control the position of the two cymbals in - a way that is consistent, reproducible and intuitive for composers and other musicians - to use. This paper describes the creation of a new, multifaceted hi-hat control - system that utilises a closed-loop distance sensing and calibration mechanism - in conjunction with an embedded musical information retrieval system to continuously - calibrate the hi-hat''s action both before and during a musical performance. This - is achieved by combining existing musical robotic devices with a newly created - linear actuation mechanism, custom amplification, acquisition and DSP hardware, - and embedded software algorithms. This new approach allows musicians to create - expressive and reproducible musical performances with the instrument using consistent - musical parameters, and the self-calibrating nature of the instrument lets users - focus on creating music instead of maintaining equipment. ' + ID: mmarchini2017 + abstract: 'Reflexive Looper (RL) is a live-looping system which allows a solo musician + to incarnate the different roles of a whole rhythm section by looping rhythms, + chord progressions, bassline and more. The loop pedal, is still the most used + device for those types of performances, accounting for many of the cover songs + performances on youtube, but not all kinds of song apply. Unlike a common loop + pedal, each layer of sound in RL is produced by an intelligent looping-agent which + adapts to the musician and respects given constraints, using constrained optimization. In + its original form, RL worked well for jazz guitar improvisation but was unsuited + to structured music such as pop songs. In order to bring the system on pop stage, + we revisited the system interaction, following the guidelines of professional + users who tested it extensively. We describe the revisited system which can accommodate + both pop and jazz. Thanks to intuitive pedal interaction and structure-constraints, + the new RL deals with pop music and has been already used in several in live concert + situations.' address: 'Copenhagen, Denmark' - author: Jason Long and Jim Murphy and Dale A. Carnegie and Ajay Kapur - bibtex: "@inproceedings{jlong2017,\n abstract = {While most musical robots that\ - \ are capable of playing the drum kit utilise a relatively simple striking motion,\ - \ the hi-hat, with the additional degree of motion provided by its pedal, requires\ - \ more involved control strategies in order to produce expressive performances\ - \ on the instrument. A robotic hi-hat should be able to control not only the striking\ - \ timing and velocity to a high degree of precision, but also dynamically control\ - \ the position of the two cymbals in a way that is consistent, reproducible and\ - \ intuitive for composers and other musicians to use. This paper describes the\ - \ creation of a new, multifaceted hi-hat control system that utilises a closed-loop\ - \ distance sensing and calibration mechanism in conjunction with an embedded musical\ - \ information retrieval system to continuously calibrate the hi-hat's action both\ - \ before and during a musical performance. This is achieved by combining existing\ - \ musical robotic devices with a newly created linear actuation mechanism, custom\ - \ amplification, acquisition and DSP hardware, and embedded software algorithms.\ - \ This new approach allows musicians to create expressive and reproducible musical\ - \ performances with the instrument using consistent musical parameters, and the\ - \ self-calibrating nature of the instrument lets users focus on creating music\ - \ instead of maintaining equipment. },\n address = {Copenhagen, Denmark},\n author\ - \ = {Jason Long and Jim Murphy and Dale A. Carnegie and Ajay Kapur},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176272},\n issn = {2220-4806},\n pages\ - \ = {330--335},\n publisher = {Aalborg University Copenhagen},\n title = {A Closed-Loop\ - \ Control System for Robotic Hi-hats},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0063.pdf},\n\ - \ year = {2017}\n}\n" + author: Marco Marchini and François Pachet and Benoît Carré + bibtex: "@inproceedings{mmarchini2017,\n abstract = {Reflexive Looper (RL) is a\ + \ live-looping system which allows a solo musician to incarnate the different\ + \ roles of a whole rhythm section by looping rhythms, chord progressions, bassline\ + \ and more. The loop pedal, is still the most used device for those types of performances,\ + \ accounting for many of the cover songs performances on youtube, but not all\ + \ kinds of song apply. Unlike a common loop pedal, each layer of sound in RL\ + \ is produced by an intelligent looping-agent which adapts to the musician and\ + \ respects given constraints, using constrained optimization. In its original\ + \ form, RL worked well for jazz guitar improvisation but was unsuited to structured\ + \ music such as pop songs. In order to bring the system on pop stage, we revisited\ + \ the system interaction, following the guidelines of professional users who tested\ + \ it extensively. We describe the revisited system which can accommodate both\ + \ pop and jazz. Thanks to intuitive pedal interaction and structure-constraints,\ + \ the new RL deals with pop music and has been already used in several in live\ + \ concert situations.},\n address = {Copenhagen, Denmark},\n author = {Marco Marchini\ + \ and François Pachet and Benoît Carré},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176201},\n\ + \ issn = {2220-4806},\n pages = {139--144},\n publisher = {Aalborg University\ + \ Copenhagen},\n title = {Rethinking Reflexive Looper for structured pop music},\n\ + \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0027.pdf},\n year\ + \ = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176272 + doi: 10.5281/zenodo.1176201 issn: 2220-4806 - pages: 330--335 + pages: 139--144 publisher: Aalborg University Copenhagen - title: A Closed-Loop Control System for Robotic Hi-hats - url: http://www.nime.org/proceedings/2017/nime2017_paper0063.pdf + title: Rethinking Reflexive Looper for structured pop music + url: http://www.nime.org/proceedings/2017/nime2017_paper0027.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: skountouras2017 - abstract: 'Tangible user interfaces empower artists, boost their creative expression - and enhance performing art. However, most of them are designed to work with a - set of rules, many of which require advanced skills and target users above a certain - age. Here we present a comparative and quantitative study of using TUIs as an - alternative teaching tool in experimenting with and creating soundscapes with - children. We describe an informal interactive workshop involving schoolchildren. - We focus on the development of playful uses of technology to help children empirically - understand audio feature extraction basic techniques. We promote tangible interaction - as an alternative learning method in the creation of synthetic soundscape based - on sounds recorded in a natural outdoor environment as main sources of sound. - We investigate how schoolchildren perceive natural sources of sound and explore - practices that reuse prerecorded material through a tangible interactive controller. - We discuss the potential benefits of using TUIs as an alternative empirical method - for tangible learning and interaction design, and its impact on encouraging and - motivating creativity in children. We summarize our findings and review children''s - biehavioural indicators of engagement and enjoyment in order to provide insight - to the design of TUIs based on user experience.' + ID: vzappi2017 + abstract: 'Physical modelling is a sophisticated synthesis technique, often used + in the design of Digital Musical Instruments (DMIs). Some of the most precise + physical simulations of sound propagation are based on Finite-Difference Time-Domain + (FDTD) methods, which are stable, highly parameterizable but characterized by + an extremely heavy computational load. This drawback hinders the spread of FDTD + from the domain of off-line simulations to the one of DMIs. With this paper, we + present a novel approach to real-time physical modelling synthesis, which implements + a 2D FDTD solver as a shader program running on the GPU directly within the graphics + pipeline. The result is a system capable of running fully interactive, massively + sized simulation domains, suitable for novel DMI design. With the help of diagrams + and code snippets, we provide the implementation details of a first interactive + application, a drum head simulator whose source code is available online. Finally, + we evaluate the proposed system, showing how this new approach can work as a valuable + alternative to classic GPGPU modelling.' address: 'Copenhagen, Denmark' - author: Stratos Kountouras and Ioannis Zannos - bibtex: "@inproceedings{skountouras2017,\n abstract = {Tangible user interfaces\ - \ empower artists, boost their creative expression and enhance performing art.\ - \ However, most of them are designed to work with a set of rules, many of which\ - \ require advanced skills and target users above a certain age. Here we present\ - \ a comparative and quantitative study of using TUIs as an alternative teaching\ - \ tool in experimenting with and creating soundscapes with children. We describe\ - \ an informal interactive workshop involving schoolchildren. We focus on the development\ - \ of playful uses of technology to help children empirically understand audio\ - \ feature extraction basic techniques. We promote tangible interaction as an alternative\ - \ learning method in the creation of synthetic soundscape based on sounds recorded\ - \ in a natural outdoor environment as main sources of sound. We investigate how\ - \ schoolchildren perceive natural sources of sound and explore practices that\ - \ reuse prerecorded material through a tangible interactive controller. We discuss\ - \ the potential benefits of using TUIs as an alternative empirical method for\ - \ tangible learning and interaction design, and its impact on encouraging and\ - \ motivating creativity in children. We summarize our findings and review children's\ - \ biehavioural indicators of engagement and enjoyment in order to provide insight\ - \ to the design of TUIs based on user experience.},\n address = {Copenhagen, Denmark},\n\ - \ author = {Stratos Kountouras and Ioannis Zannos},\n booktitle = {Proceedings\ + author: Victor Zappi and Andrew Allen and Sidney Fels + bibtex: "@inproceedings{vzappi2017,\n abstract = {Physical modelling is a sophisticated\ + \ synthesis technique, often used in the design of Digital Musical Instruments\ + \ (DMIs). Some of the most precise physical simulations of sound propagation are\ + \ based on Finite-Difference Time-Domain (FDTD) methods, which are stable, highly\ + \ parameterizable but characterized by an extremely heavy computational load.\ + \ This drawback hinders the spread of FDTD from the domain of off-line simulations\ + \ to the one of DMIs. With this paper, we present a novel approach to real-time\ + \ physical modelling synthesis, which implements a 2D FDTD solver as a shader\ + \ program running on the GPU directly within the graphics pipeline. The result\ + \ is a system capable of running fully interactive, massively sized simulation\ + \ domains, suitable for novel DMI design. With the help of diagrams and code snippets,\ + \ we provide the implementation details of a first interactive application, a\ + \ drum head simulator whose source code is available online. Finally, we evaluate\ + \ the proposed system, showing how this new approach can work as a valuable alternative\ + \ to classic GPGPU modelling.},\n address = {Copenhagen, Denmark},\n author =\ + \ {Victor Zappi and Andrew Allen and Sidney Fels},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176274},\n issn = {2220-4806},\n pages = {336--341},\n\ - \ publisher = {Aalborg University Copenhagen},\n title = {Gestus: Teaching Soundscape\ - \ Composition and Performance with a Tangible Interface},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0064.pdf},\n\ + \ doi = {10.5281/zenodo.1176203},\n issn = {2220-4806},\n pages = {145--150},\n\ + \ publisher = {Aalborg University Copenhagen},\n title = {Shader-based Physical\ + \ Modelling for the Design of Massive Digital Musical Instruments},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0028.pdf},\n\ \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176274 + doi: 10.5281/zenodo.1176203 issn: 2220-4806 - pages: 336--341 + pages: 145--150 publisher: Aalborg University Copenhagen - title: 'Gestus: Teaching Soundscape Composition and Performance with a Tangible - Interface' - url: http://www.nime.org/proceedings/2017/nime2017_paper0064.pdf + title: Shader-based Physical Modelling for the Design of Massive Digital Musical + Instruments + url: http://www.nime.org/proceedings/2017/nime2017_paper0028.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: htez2017 - abstract: 'This research investigates how applying interaction constraints to digital - music instruments (DMIs) affects the way that experienced music performers collaborate - and find creative ways to make live improvised music on stage. The constraints - are applied in two forms: i) Physically implemented on the instruments themselves, - and ii) hidden rules that are defined on a network between the instruments and - triggered depending on the musical actions of the performers. Six experienced - musicians were recruited for a user study which involved rehearsal and performance. - Performers were given deliberately constrained instruments containing a touch - sensor, speaker, battery and an embedded computer. Results of the study show that - whilst constraints can lead to more structured improvisation, the resultant music - may not fit with performers'' true intentions. It was also found that when external - musical material is introduced to guide the performers into a collective convergence, - it is likely to be ignored because it was perceived by performers as being out - of context.' - address: 'Copenhagen, Denmark' - author: Hazar Emre Tez and Nick Bryan-Kinns - bibtex: "@inproceedings{htez2017,\n abstract = {This research investigates how applying\ - \ interaction constraints to digital music instruments (DMIs) affects the way\ - \ that experienced music performers collaborate and find creative ways to make\ - \ live improvised music on stage. The constraints are applied in two forms: i)\ - \ Physically implemented on the instruments themselves, and ii) hidden rules that\ - \ are defined on a network between the instruments and triggered depending on\ - \ the musical actions of the performers. Six experienced musicians were recruited\ - \ for a user study which involved rehearsal and performance. Performers were given\ - \ deliberately constrained instruments containing a touch sensor, speaker, battery\ - \ and an embedded computer. Results of the study show that whilst constraints\ - \ can lead to more structured improvisation, the resultant music may not fit with\ - \ performers' true intentions. It was also found that when external musical material\ - \ is introduced to guide the performers into a collective convergence, it is likely\ - \ to be ignored because it was perceived by performers as being out of context.},\n\ - \ address = {Copenhagen, Denmark},\n author = {Hazar Emre Tez and Nick Bryan-Kinns},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176276},\n issn = {2220-4806},\n\ - \ pages = {342--347},\n publisher = {Aalborg University Copenhagen},\n title =\ - \ {Exploring the Effect of Interface Constraints on Live Collaborative Music Improvisation},\n\ - \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0065.pdf},\n year\ - \ = {2017}\n}\n" + ID: djohnson2017 + abstract: 'The recent resurgence of Virtual Reality (VR) technologies provide new + platforms for augmenting traditional music instruments. Instrument augmentation + is a common approach for designing new interfaces for musical expression, as shown + through hyperinstrument research. New visual affordances present in VR give designers + new methods for augmenting instruments to extend not only their expressivity, + but also their capabilities for computer assisted tutoring. In this work, we present + VRMin, a mobile Mixed Reality (MR) application for augmenting a physical theremin, + with an immersive virtual environment (VE), for real time computer assisted tutoring. + We augment a physical theremin with 3D visual cues to indicate correct hand positioning + for performing given notes and volumes. The physical theremin acts as a domain + specific controller for the resulting MR environment. The initial effectiveness + of this approach is measured by analyzing a performer''s hand position while training + with and without the VRMin. We also evaluate the usability of the interface using + heuristic evaluation based on a newly proposed set of guidelines designed for + VR musical environments.' + address: 'Copenhagen, Denmark' + author: David Johnson and George Tzanetakis + bibtex: "@inproceedings{djohnson2017,\n abstract = {The recent resurgence of Virtual\ + \ Reality (VR) technologies provide new platforms for augmenting traditional music\ + \ instruments. Instrument augmentation is a common approach for designing new\ + \ interfaces for musical expression, as shown through hyperinstrument research.\ + \ New visual affordances present in VR give designers new methods for augmenting\ + \ instruments to extend not only their expressivity, but also their capabilities\ + \ for computer assisted tutoring. In this work, we present VRMin, a mobile Mixed\ + \ Reality (MR) application for augmenting a physical theremin, with an immersive\ + \ virtual environment (VE), for real time computer assisted tutoring. We augment\ + \ a physical theremin with 3D visual cues to indicate correct hand positioning\ + \ for performing given notes and volumes. The physical theremin acts as a domain\ + \ specific controller for the resulting MR environment. The initial effectiveness\ + \ of this approach is measured by analyzing a performer's hand position while\ + \ training with and without the VRMin. We also evaluate the usability of the interface\ + \ using heuristic evaluation based on a newly proposed set of guidelines designed\ + \ for VR musical environments.},\n address = {Copenhagen, Denmark},\n author =\ + \ {David Johnson and George Tzanetakis},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176205},\n\ + \ issn = {2220-4806},\n pages = {151--156},\n publisher = {Aalborg University\ + \ Copenhagen},\n title = {VRMin: Using Mixed Reality to Augment the Theremin for\ + \ Musical Tutoring},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0029.pdf},\n\ + \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176276 + doi: 10.5281/zenodo.1176205 issn: 2220-4806 - pages: 342--347 + pages: 151--156 publisher: Aalborg University Copenhagen - title: Exploring the Effect of Interface Constraints on Live Collaborative Music - Improvisation - url: http://www.nime.org/proceedings/2017/nime2017_paper0065.pdf + title: 'VRMin: Using Mixed Reality to Augment the Theremin for Musical Tutoring' + url: http://www.nime.org/proceedings/2017/nime2017_paper0029.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: iwicaksono2017 - abstract: 'This paper presents FabricKeyboard: a novel deformable keyboard interface - based on a multi-modal fabric sensate surface. Multi-layer fabric sensors that - detect touch, proximity, electric field, pressure, and stretch are machine-sewn - in a keyboard pattern on a stretchable substrate. The result is a fabric-based - musical controller that combines both the discrete controls of a keyboard and - various continuous controls from the embedded fabric sensors. This enables unique - tactile experiences and new interactions both with physical and non-contact gestures: - physical by pressing, pulling, stretching, and twisting the keys or the fabric - and non-contact by hovering and waving towards/against the keyboard and an electromagnetic - source. We have also developed additional fabric-based modular interfaces such - as a ribbon-controller and trackpad, allowing performers to add more expressive - and continuous controls. This paper will discuss implementation strategies for - our system-on-textile, fabric-based sensor developments, as well as sensor-computer - interfacing and musical mapping examples of this multi-modal and expressive fabric - keyboard. ' + ID: rgraham2017 + abstract: 'Our paper builds on an ongoing collaboration between theorists and practitioners + within the computer music community, with a specific focus on three-dimensional + environments as an incubator for performance systems design. In particular, we + are concerned with how to provide accessible means of controlling spatialization + and timbral shaping in an integrated manner through the collection of performance + data from various modalities from an electric guitar with a multichannel audio + output. This paper will focus specifically on the combination of pitch data treated + within tonal models and the detection of physical performance gestures using timbral + feature extraction algorithms. We discuss how these tracked gestures may be connected + to concepts and dynamic relationships from embodied cognition, expanding on performative + models for pitch and timbre spaces. Finally, we explore how these ideas support + connections between sonic, formal and performative dimensions. This includes instrumental + technique detection scenes and mapping strategies aimed at bridging music performance + gestures across physical and conceptual planes. ' address: 'Copenhagen, Denmark' - author: Irmandy Wicaksono and Joseph Paradiso - bibtex: "@inproceedings{iwicaksono2017,\n abstract = {This paper presents FabricKeyboard:\ - \ a novel deformable keyboard interface based on a multi-modal fabric sensate\ - \ surface. Multi-layer fabric sensors that detect touch, proximity, electric field,\ - \ pressure, and stretch are machine-sewn in a keyboard pattern on a stretchable\ - \ substrate. The result is a fabric-based musical controller that combines both\ - \ the discrete controls of a keyboard and various continuous controls from the\ - \ embedded fabric sensors. This enables unique tactile experiences and new interactions\ - \ both with physical and non-contact gestures: physical by pressing, pulling,\ - \ stretching, and twisting the keys or the fabric and non-contact by hovering\ - \ and waving towards/against the keyboard and an electromagnetic source. We have\ - \ also developed additional fabric-based modular interfaces such as a ribbon-controller\ - \ and trackpad, allowing performers to add more expressive and continuous controls.\ - \ This paper will discuss implementation strategies for our system-on-textile,\ - \ fabric-based sensor developments, as well as sensor-computer interfacing and\ - \ musical mapping examples of this multi-modal and expressive fabric keyboard.\ - \ },\n address = {Copenhagen, Denmark},\n author = {Irmandy Wicaksono and Joseph\ - \ Paradiso},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176278},\n issn\ - \ = {2220-4806},\n pages = {348--353},\n publisher = {Aalborg University Copenhagen},\n\ - \ title = {FabricKeyboard: Multimodal Textile Sensate Media as an Expressive and\ - \ Deformable Musical Interface},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0066.pdf},\n\ + author: Richard Graham and Brian Bridges and Christopher Manzione and William Brent + bibtex: "@inproceedings{rgraham2017,\n abstract = {Our paper builds on an ongoing\ + \ collaboration between theorists and practitioners within the computer music\ + \ community, with a specific focus on three-dimensional environments as an incubator\ + \ for performance systems design. In particular, we are concerned with how to\ + \ provide accessible means of controlling spatialization and timbral shaping in\ + \ an integrated manner through the collection of performance data from various\ + \ modalities from an electric guitar with a multichannel audio output. This paper\ + \ will focus specifically on the combination of pitch data treated within tonal\ + \ models and the detection of physical performance gestures using timbral feature\ + \ extraction algorithms. We discuss how these tracked gestures may be connected\ + \ to concepts and dynamic relationships from embodied cognition, expanding on\ + \ performative models for pitch and timbre spaces. Finally, we explore how these\ + \ ideas support connections between sonic, formal and performative dimensions.\ + \ This includes instrumental technique detection scenes and mapping strategies\ + \ aimed at bridging music performance gestures across physical and conceptual\ + \ planes. },\n address = {Copenhagen, Denmark},\n author = {Richard Graham and\ + \ Brian Bridges and Christopher Manzione and William Brent},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176207},\n issn = {2220-4806},\n pages = {157--162},\n\ + \ publisher = {Aalborg University Copenhagen},\n title = {Exploring Pitch and\ + \ Timbre through 3D Spaces: Embodied Models in Virtual Reality as a Basis for\ + \ Performance Systems Design},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0030.pdf},\n\ \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176278 + doi: 10.5281/zenodo.1176207 issn: 2220-4806 - pages: 348--353 + pages: 157--162 publisher: Aalborg University Copenhagen - title: 'FabricKeyboard: Multimodal Textile Sensate Media as an Expressive and Deformable - Musical Interface' - url: http://www.nime.org/proceedings/2017/nime2017_paper0066.pdf + title: 'Exploring Pitch and Timbre through 3D Spaces: Embodied Models in Virtual + Reality as a Basis for Performance Systems Design' + url: http://www.nime.org/proceedings/2017/nime2017_paper0030.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: kkonovalovs2017 - abstract: 'This paper explores a new interaction possibility for increasing performer - freedom via a foot-mounted wearable, and an instrument-mounted device that maintain - stomp-box styles of interactivity, but without the restrictions normally associated - with the original design of guitar effect pedals. The classic foot activated effect - pedals that are used to alter the sound of the instrument are stationary, forcing - the performer to return to the same location in order to interact with the pedals. - This paper presents a new design that enables the performer to interact with the - effect pedals anywhere on the stage. By designing a foot&instrument-mounted effect - controller, we kept the strongest part of the classical pedal design, while allowing - the activation of the effect at any location on the stage. The usability of the - device has been tested on thirty experienced guitar players. Their performance - has been recorded and compared, and their opinion has been investigated through - questionnaire and interview. The results of the experiment showed that, in theory, - foot&instrument-mounted effect controller can replace standard effect pedals and - at the same time provide more mobility on a stage. ' + ID: mgurevich2017 + abstract: "This paper situates NIME practice with respect to models of social interaction\ + \ among human agents. It argues that the conventional model of composer-performer-listener,\ + \ and the underlying mid-20th century metaphor of music as communication upon\ + \ which it relies, cannot reflect the richness of interaction and possibility\ + \ afforded by interactive digital technologies. Building on Paul Lansky's vision\ + \ of an expanded and dynamic social network, an alternative, ecological view of\ + \ music-making is presented, in which meaning emerges not from \"messages\" communicated\ + \ between individuals, but instead from the \"noise\" that arises through the\ + \ uncertainty in their interactions. However, in our tendency in NIME to collapse\ + \ the various roles in this network into a single individual, we place the increased\ + \ potential afforded by digital systems at risk. Using examples from the author's\ + \ NIME practices, the paper uses a practice-based methodology to describe approaches\ + \ to designing instruments that respond to the technologies that form the interfaces\ + \ of the network, which can include scores and stylistic conventions. In doing\ + \ so, the paper demonstrates that a repertoire—a seemingly anachronistic\ + \ concept—and a corresponding repertoire-driven approach to creating NIMEs\ + \ can in fact be a catalyst for invention and creativity." address: 'Copenhagen, Denmark' - author: Kristians Konovalovs and Jelizaveta Zovnercuka and Ali Adjorlu and Daniel - Overholt - bibtex: "@inproceedings{kkonovalovs2017,\n abstract = {This paper explores a new\ - \ interaction possibility for increasing performer freedom via a foot-mounted\ - \ wearable, and an instrument-mounted device that maintain stomp-box styles of\ - \ interactivity, but without the restrictions normally associated with the original\ - \ design of guitar effect pedals. The classic foot activated effect pedals that\ - \ are used to alter the sound of the instrument are stationary, forcing the performer\ - \ to return to the same location in order to interact with the pedals. This paper\ - \ presents a new design that enables the performer to interact with the effect\ - \ pedals anywhere on the stage. By designing a foot\\&instrument-mounted effect\ - \ controller, we kept the strongest part of the classical pedal design, while\ - \ allowing the activation of the effect at any location on the stage. The usability\ - \ of the device has been tested on thirty experienced guitar players. Their performance\ - \ has been recorded and compared, and their opinion has been investigated through\ - \ questionnaire and interview. The results of the experiment showed that, in theory,\ - \ foot\\&instrument-mounted effect controller can replace standard effect pedals\ - \ and at the same time provide more mobility on a stage. },\n address = {Copenhagen,\ - \ Denmark},\n author = {Kristians Konovalovs and Jelizaveta Zovnercuka and Ali\ - \ Adjorlu and Daniel Overholt},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176280},\n\ - \ issn = {2220-4806},\n pages = {354--357},\n publisher = {Aalborg University\ - \ Copenhagen},\n title = {A Wearable Foot-mounted / Instrument-mounted Effect\ - \ Controller: Design and Evaluation},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0067.pdf},\n\ + author: Michael Gurevich + bibtex: "@inproceedings{mgurevich2017,\n abstract = {This paper situates NIME practice\ + \ with respect to models of social interaction among human agents. It argues that\ + \ the conventional model of composer-performer-listener, and the underlying mid-20th\ + \ century metaphor of music as communication upon which it relies, cannot reflect\ + \ the richness of interaction and possibility afforded by interactive digital\ + \ technologies. Building on Paul Lansky's vision of an expanded and dynamic social\ + \ network, an alternative, ecological view of music-making is presented, in which\ + \ meaning emerges not from \"messages\" communicated between individuals, but\ + \ instead from the \"noise\" that arises through the uncertainty in their interactions.\ + \ However, in our tendency in NIME to collapse the various roles in this network\ + \ into a single individual, we place the increased potential afforded by digital\ + \ systems at risk. Using examples from the author's NIME practices, the paper\ + \ uses a practice-based methodology to describe approaches to designing instruments\ + \ that respond to the technologies that form the interfaces of the network, which\ + \ can include scores and stylistic conventions. In doing so, the paper demonstrates\ + \ that a repertoire—a seemingly anachronistic concept—and a corresponding\ + \ repertoire-driven approach to creating NIMEs can in fact be a catalyst for invention\ + \ and creativity.},\n address = {Copenhagen, Denmark},\n author = {Michael Gurevich},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1176209},\n issn = {2220-4806},\n\ + \ pages = {163--168},\n publisher = {Aalborg University Copenhagen},\n title =\ + \ {Discovering Instruments in Scores: A Repertoire-Driven Approach to Designing\ + \ New Interfaces for Musical Expression},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0031.pdf},\n\ \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176280 + doi: 10.5281/zenodo.1176209 issn: 2220-4806 - pages: 354--357 + pages: 163--168 publisher: Aalborg University Copenhagen - title: 'A Wearable Foot-mounted / Instrument-mounted Effect Controller: Design and - Evaluation' - url: http://www.nime.org/proceedings/2017/nime2017_paper0067.pdf + title: 'Discovering Instruments in Scores: A Repertoire-Driven Approach to Designing + New Interfaces for Musical Expression' + url: http://www.nime.org/proceedings/2017/nime2017_paper0031.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: hchang2017 - abstract: 'This paper discusses nonlinear acoustic synthesis in augmented musical - instruments via acoustic transduction. Our work expands previous investigations - into acoustic amplitude modulation, offering new prototypes that produce intermodulation - in several instrumental contexts. Our results show nonlinear intermodulation distortion - can be generated and controlled in electromagnetically driven acoustic interfaces - that can be deployed in acoustic instruments through augmentation, thus extending - the nonlinear acoustic synthesis to a broader range of sonic applications.' + ID: jcantrell2017 + abstract: 'The ideation, conception and implementation of new musical interfaces + and instruments provide more than the mere construction of digital objects. As + physical and digital assemblages, interfaces also act as traces of the authoring + entities that created them. Their intentions, likes, dislikes, and ultimate determinations + of what is creatively useful all get embedded into the available choices of the + interface. In this light, the self-perception of the musical HCI and instrument + designer can be seen as occupying a primary importance in the instruments and + interfaces that eventually come to be created. The work of a designer who self-identifies + as an artist may result in a vastly different outcome than one who considers him + or herself to be an entrepreneur, or a scientist, for example. These differing + definitions of self as well as their HCI outcomes require their own means of critique, + understanding and expectations. All too often, these definitions are unclear, + or the considerations of overlapping means of critique remain unexamined.' address: 'Copenhagen, Denmark' - author: Herbert Ho-Chun Chang and Lloyd May and Spencer Topel - bibtex: "@inproceedings{hchang2017,\n abstract = {This paper discusses nonlinear\ - \ acoustic synthesis in augmented musical instruments via acoustic transduction.\ - \ Our work expands previous investigations into acoustic amplitude modulation,\ - \ offering new prototypes that produce intermodulation in several instrumental\ - \ contexts. Our results show nonlinear intermodulation distortion can be generated\ - \ and controlled in electromagnetically driven acoustic interfaces that can be\ - \ deployed in acoustic instruments through augmentation, thus extending the nonlinear\ - \ acoustic synthesis to a broader range of sonic applications.},\n address = {Copenhagen,\ - \ Denmark},\n author = {Herbert Ho-Chun Chang and Lloyd May and Spencer Topel},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176282},\n issn = {2220-4806},\n\ - \ pages = {358--363},\n publisher = {Aalborg University Copenhagen},\n title =\ - \ {Nonlinear Acoustic Synthesis in Augmented Musical Instruments},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0068.pdf},\n\ + author: Joe Cantrell + bibtex: "@inproceedings{jcantrell2017,\n abstract = {The ideation, conception and\ + \ implementation of new musical interfaces and instruments provide more than the\ + \ mere construction of digital objects. As physical and digital assemblages, interfaces\ + \ also act as traces of the authoring entities that created them. Their intentions,\ + \ likes, dislikes, and ultimate determinations of what is creatively useful all\ + \ get embedded into the available choices of the interface. In this light, the\ + \ self-perception of the musical HCI and instrument designer can be seen as occupying\ + \ a primary importance in the instruments and interfaces that eventually come\ + \ to be created. The work of a designer who self-identifies as an artist may result\ + \ in a vastly different outcome than one who considers him or herself to be an\ + \ entrepreneur, or a scientist, for example. These differing definitions of self\ + \ as well as their HCI outcomes require their own means of critique, understanding\ + \ and expectations. All too often, these definitions are unclear, or the considerations\ + \ of overlapping means of critique remain unexamined.},\n address = {Copenhagen,\ + \ Denmark},\n author = {Joe Cantrell},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176211},\n\ + \ issn = {2220-4806},\n pages = {169--173},\n publisher = {Aalborg University\ + \ Copenhagen},\n title = {Designing Intent: Defining Critical Meaning for NIME\ + \ Practitioners},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0032.pdf},\n\ \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176282 + doi: 10.5281/zenodo.1176211 issn: 2220-4806 - pages: 358--363 + pages: 169--173 publisher: Aalborg University Copenhagen - title: Nonlinear Acoustic Synthesis in Augmented Musical Instruments - url: http://www.nime.org/proceedings/2017/nime2017_paper0068.pdf + title: 'Designing Intent: Defining Critical Meaning for NIME Practitioners' + url: http://www.nime.org/proceedings/2017/nime2017_paper0032.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: ghajdu2017 - abstract: 'This paper is a description of a pilot project conducted at the Hamburg - University of Music and Drama (HfMT) during the academic year 2015-16. In this - project we have addressed how interventions via interactive, generative music - systems may contribute to the improvement of the atmosphere and thus to the well-being - of patients in hospital waiting areas. The project was conducted by both the students - of the music therapy and multimedia composition programs and has thus offered - rare insights into the dynamic of such undertakings covering both the therapeutic - underpinnings, as well as the technical means required to achieve a particular - result. DJster, the engine we used for the generative processes is based on Clarence - Barlow''s probabilistic algorithms. Equipped with the proper periphery (sensors, - sound modules and spatializers), we looked at three different scenarios, each - requiring specific musical and technological solutions. The pilot was concluded - by a symposium in 2017 and the development of a prototype system. The symposium - yielded a diagram detailing the circular dynamic of the factors involved in this - particular project, while the prototype was demoed in 2016 at the HfMT facilities. - The system will be installed permanently at the University Medical Center Hamburg-Eppendorf - (UKE) in June 2017.' + ID: jvasquez2017 + abstract: 'One of the reasons of why some musical instruments more successfully + continue their evolution and actively take part in the history of music is partially + attributed to the existing compositions made specifically for them, pieces that + remain and are still played over a long period of time. This is something we know, + performing these compositions keeps the characteristics of the instruments alive + and able to survive. This paper presents our contribution to this discussion with + a context and historical background for idiomatic compositions. Looking beyond + the classical era, we discuss how the concept of idiomatic music has influenced + research and composition practices in the NIME community; drawing more attention + in the way current idiomatic composition practices considered specific NIME affordances + for sonic, social and spatial interaction. We present particular projects that + establish idiomatic writing as a part of a new repertoire for new musical instruments. + The idiomatic writing approach to composing music for NIME can shift the unique + characteristics of new instruments to a more established musical identity, providing + a shared understanding and a common literature to the community.' address: 'Copenhagen, Denmark' - author: Georg Hajdu and Benedict Carey and Goran Lazarevic and Eckhard Weymann - bibtex: "@inproceedings{ghajdu2017,\n abstract = {This paper is a description of\ - \ a pilot project conducted at the Hamburg University of Music and Drama (HfMT)\ - \ during the academic year 2015-16. In this project we have addressed how interventions\ - \ via interactive, generative music systems may contribute to the improvement\ - \ of the atmosphere and thus to the well-being of patients in hospital waiting\ - \ areas. The project was conducted by both the students of the music therapy and\ - \ multimedia composition programs and has thus offered rare insights into the\ - \ dynamic of such undertakings covering both the therapeutic underpinnings, as\ - \ well as the technical means required to achieve a particular result. DJster,\ - \ the engine we used for the generative processes is based on Clarence Barlow's\ - \ probabilistic algorithms. Equipped with the proper periphery (sensors, sound\ - \ modules and spatializers), we looked at three different scenarios, each requiring\ - \ specific musical and technological solutions. The pilot was concluded by a symposium\ - \ in 2017 and the development of a prototype system. The symposium yielded a diagram\ - \ detailing the circular dynamic of the factors involved in this particular project,\ - \ while the prototype was demoed in 2016 at the HfMT facilities. The system will\ - \ be installed permanently at the University Medical Center Hamburg-Eppendorf\ - \ (UKE) in June 2017.},\n address = {Copenhagen, Denmark},\n author = {Georg Hajdu\ - \ and Benedict Carey and Goran Lazarevic and Eckhard Weymann},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176284},\n issn = {2220-4806},\n pages = {364--369},\n\ - \ publisher = {Aalborg University Copenhagen},\n title = {From Atmosphere to Intervention:\ - \ The circular dynamic of installations in hospital waiting areas},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0069.pdf},\n\ - \ year = {2017}\n}\n" + author: Juan Vasquez and Koray Tahiroğlu and Johan Kildal + bibtex: "@inproceedings{jvasquez2017,\n abstract = {One of the reasons of why some\ + \ musical instruments more successfully continue their evolution and actively\ + \ take part in the history of music is partially attributed to the existing compositions\ + \ made specifically for them, pieces that remain and are still played over a long\ + \ period of time. This is something we know, performing these compositions keeps\ + \ the characteristics of the instruments alive and able to survive. This paper\ + \ presents our contribution to this discussion with a context and historical background\ + \ for idiomatic compositions. Looking beyond the classical era, we discuss how\ + \ the concept of idiomatic music has influenced research and composition practices\ + \ in the NIME community; drawing more attention in the way current idiomatic composition\ + \ practices considered specific NIME affordances for sonic, social and spatial\ + \ interaction. We present particular projects that establish idiomatic writing\ + \ as a part of a new repertoire for new musical instruments. The idiomatic writing\ + \ approach to composing music for NIME can shift the unique characteristics of\ + \ new instruments to a more established musical identity, providing a shared understanding\ + \ and a common literature to the community.},\n address = {Copenhagen, Denmark},\n\ + \ author = {Juan Vasquez and Koray Tahiroğlu and Johan Kildal},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1181424},\n issn = {2220-4806},\n pages = {174--179},\n\ + \ publisher = {Aalborg University Copenhagen},\n title = {Idiomatic Composition\ + \ Practices for New Musical Instruments: Context, Background and Current Applications},\n\ + \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0033.pdf},\n year\ + \ = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176284 + doi: 10.5281/zenodo.1181424 issn: 2220-4806 - pages: 364--369 + pages: 174--179 publisher: Aalborg University Copenhagen - title: 'From Atmosphere to Intervention: The circular dynamic of installations in - hospital waiting areas' - url: http://www.nime.org/proceedings/2017/nime2017_paper0069.pdf + title: 'Idiomatic Composition Practices for New Musical Instruments: Context, Background + and Current Applications' + url: http://www.nime.org/proceedings/2017/nime2017_paper0033.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: dbrown2017 - abstract: 'The need for thorough evaluations is an emerging area of interest and - importance in music interaction research. As a large degree of DMI evaluation - is concerned with exploring the subjective experience: ergonomics, action-sound - mappings and control intimacy; User Experience (UX) methods are increasingly being - utilised to analyse an individual''s experience of new musical instruments, from - which we can extract meaningful, robust findings and subsequently generalised - and useful recommendations. However, many music interaction evaluations remain - informal. In this paper, we provide a meta-review of 132 papers from the 2014 - -- 2016 proceedings of the NIME, SMC and ICMC conferences to collate the aspects - of UX research that are already present in music interaction literature, and to - highlight methods from UX''s widening field of research that have not yet been - explored. Our findings show that usability and aesthetics are the primary focus - of evaluations in music interaction research, and other important components of - the user experience such as enchantment, motivation and frustration are frequently - if not always overlooked. We argue that these factors are prime areas for future - research in the field and their consideration in design and evaluation could lead - to a better understanding of NIMEs and other computer music technology.' + ID: fberthaut2017 + abstract: 'Gestural interfaces, which make use of physiological signals, hand / + body postures or movements, have become widespread for musical expression. While + they may increase the transparency and expressiveness of instruments, they may + also result in limited agency, for musicians as well as for spectators. This problem + becomes especially true when the implemented mappings between gesture and music + are subtle or complex. These instruments may also restrict the appropriation possibilities + of controls, by comparison to physical interfaces. Most existing solutions to + these issues are based on distant and/or limited visual feedback (LEDs, small + screens). Our approach is to augment the gestures themselves with revealed virtual + objects. Our contributions are, first a novel approach of visual feedback that + allow for additional expressiveness, second a software pipeline for pixel-level + feedback and control that ensures tight coupling between sound and visuals, and + third, a design space for extending gestural control using revealed interfaces. + We also demonstrate and evaluate our approach with the augmentation of three existing + gestural musical instruments.' address: 'Copenhagen, Denmark' - author: Dom Brown and Chris Nash and Tom Mitchell - bibtex: "@inproceedings{dbrown2017,\n abstract = {The need for thorough evaluations\ - \ is an emerging area of interest and importance in music interaction research.\ - \ As a large degree of DMI evaluation is concerned with exploring the subjective\ - \ experience: ergonomics, action-sound mappings and control intimacy; User Experience\ - \ (UX) methods are increasingly being utilised to analyse an individual's experience\ - \ of new musical instruments, from which we can extract meaningful, robust findings\ - \ and subsequently generalised and useful recommendations. However, many music\ - \ interaction evaluations remain informal. In this paper, we provide a meta-review\ - \ of 132 papers from the 2014 -- 2016 proceedings of the NIME, SMC and ICMC conferences\ - \ to collate the aspects of UX research that are already present in music interaction\ - \ literature, and to highlight methods from UX's widening field of research that\ - \ have not yet been explored. Our findings show that usability and aesthetics\ - \ are the primary focus of evaluations in music interaction research, and other\ - \ important components of the user experience such as enchantment, motivation\ - \ and frustration are frequently if not always overlooked. We argue that these\ - \ factors are prime areas for future research in the field and their consideration\ - \ in design and evaluation could lead to a better understanding of NIMEs and other\ - \ computer music technology.},\n address = {Copenhagen, Denmark},\n author = {Dom\ - \ Brown and Chris Nash and Tom Mitchell},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176286},\n\ - \ issn = {2220-4806},\n pages = {370--375},\n publisher = {Aalborg University\ - \ Copenhagen},\n title = {A User Experience Review of Music Interaction Evaluations},\n\ - \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0070.pdf},\n year\ - \ = {2017}\n}\n" + author: Florent Berthaut and Cagan Arslan and Laurent Grisoni + bibtex: "@inproceedings{fberthaut2017,\n abstract = {Gestural interfaces, which\ + \ make use of physiological signals, hand / body postures or movements, have become\ + \ widespread for musical expression. While they may increase the transparency\ + \ and expressiveness of instruments, they may also result in limited agency, for\ + \ musicians as well as for spectators. This problem becomes especially true when\ + \ the implemented mappings between gesture and music are subtle or complex. These\ + \ instruments may also restrict the appropriation possibilities of controls, by\ + \ comparison to physical interfaces. Most existing solutions to these issues\ + \ are based on distant and/or limited visual feedback (LEDs, small screens). \ + \ Our approach is to augment the gestures themselves with revealed virtual objects.\ + \ Our contributions are, first a novel approach of visual feedback that allow\ + \ for additional expressiveness, second a software pipeline for pixel-level feedback\ + \ and control that ensures tight coupling between sound and visuals, and third,\ + \ a design space for extending gestural control using revealed interfaces. We\ + \ also demonstrate and evaluate our approach with the augmentation of three existing\ + \ gestural musical instruments.},\n address = {Copenhagen, Denmark},\n author\ + \ = {Florent Berthaut and Cagan Arslan and Laurent Grisoni},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176213},\n issn = {2220-4806},\n pages = {180--185},\n\ + \ publisher = {Aalborg University Copenhagen},\n title = {Revgest: Augmenting\ + \ Gestural Musical Instruments with Revealed Virtual Objects},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0034.pdf},\n\ + \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176286 + doi: 10.5281/zenodo.1176213 issn: 2220-4806 - pages: 370--375 + pages: 180--185 publisher: Aalborg University Copenhagen - title: A User Experience Review of Music Interaction Evaluations - url: http://www.nime.org/proceedings/2017/nime2017_paper0070.pdf + title: 'Revgest: Augmenting Gestural Musical Instruments with Revealed Virtual Objects' + url: http://www.nime.org/proceedings/2017/nime2017_paper0034.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: wsiegel2017 - abstract: 'This paper discusses control of multichannel sound diffusion by means - of motion-tracking hardware and software within the context of a live performance. - The idea developed from the author''s previous use of motion-tracking technology - in his own artistic practice as a composer and performer. Various motion tracking - systems were considered, experiments were conducted with three sound diffusion - setups at three venues and a new composition for solo performer and motion-tracking - system took form.' + ID: atroyer2017 + abstract: 'MM-RT (material and magnet --- rhythm and timbre) is a tabletop musical + instrument equipped with electromagnetic actuators to offer a new paradigm of + musical expression and exploration. After expanding on prior work with electromagnetic + instrument actuation and tabletop musical interfaces, the paper explains why and + how MM-RT, through its physicality and ergonomics, has been designed specifically + for musical wonderers: people who want to know more about music in installation, + concert, and everyday contexts. Those wonderers aspire to interpret and explore + music rather than focussing on a technically correct realization of music. Informed + by this vision, we then describe the design and technical implementation of this + tabletop musical instrument. The paper concludes with discussions about future + works and how to trigger musical wonderers'' sonic curiosity to encounter, explore, + invent, and organize sounds for music creation using a musical instrument like + MM-RT.' address: 'Copenhagen, Denmark' - author: Wayne Siegel - bibtex: "@inproceedings{wsiegel2017,\n abstract = {This paper discusses control\ - \ of multichannel sound diffusion by means of motion-tracking hardware and software\ - \ within the context of a live performance. The idea developed from the author's\ - \ previous use of motion-tracking technology in his own artistic practice as a\ - \ composer and performer. Various motion tracking systems were considered, experiments\ - \ were conducted with three sound diffusion setups at three venues and a new composition\ - \ for solo performer and motion-tracking system took form.},\n address = {Copenhagen,\ - \ Denmark},\n author = {Wayne Siegel},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176288},\n\ - \ issn = {2220-4806},\n pages = {376--380},\n publisher = {Aalborg University\ - \ Copenhagen},\n title = {Conducting Sound in Space},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0071.pdf},\n\ - \ year = {2017}\n}\n" + author: Akito van Troyer + bibtex: "@inproceedings{atroyer2017,\n abstract = {MM-RT (material and magnet ---\ + \ rhythm and timbre) is a tabletop musical instrument equipped with electromagnetic\ + \ actuators to offer a new paradigm of musical expression and exploration. After\ + \ expanding on prior work with electromagnetic instrument actuation and tabletop\ + \ musical interfaces, the paper explains why and how MM-RT, through its physicality\ + \ and ergonomics, has been designed specifically for musical wonderers: people\ + \ who want to know more about music in installation, concert, and everyday contexts.\ + \ Those wonderers aspire to interpret and explore music rather than focussing\ + \ on a technically correct realization of music. Informed by this vision, we then\ + \ describe the design and technical implementation of this tabletop musical instrument.\ + \ The paper concludes with discussions about future works and how to trigger musical\ + \ wonderers' sonic curiosity to encounter, explore, invent, and organize sounds\ + \ for music creation using a musical instrument like MM-RT.},\n address = {Copenhagen,\ + \ Denmark},\n author = {Akito van Troyer},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176215},\n\ + \ issn = {2220-4806},\n pages = {186--191},\n publisher = {Aalborg University\ + \ Copenhagen},\n title = {MM-RT: A Tabletop Musical Instrument for Musical Wonderers},\n\ + \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0035.pdf},\n year\ + \ = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176288 + doi: 10.5281/zenodo.1176215 issn: 2220-4806 - pages: 376--380 + pages: 186--191 publisher: Aalborg University Copenhagen - title: Conducting Sound in Space - url: http://www.nime.org/proceedings/2017/nime2017_paper0071.pdf + title: 'MM-RT: A Tabletop Musical Instrument for Musical Wonderers' + url: http://www.nime.org/proceedings/2017/nime2017_paper0035.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: ssalazar2017a - abstract: 'The Fragment String is a new digital musical instrument designed to reinterpret - and reflect upon the sounds of the instruments it is performed in collaboration - with. At its core, it samples an input audio signal and allows the performer to - replay these samples through a granular resynthesizer. Normally the Fragment String - samples an acoustic instrument that accompanies it, but in the absence of this - input it will amplify the ambient environment and electronic noise of the input - audio path to audible levels and sample these. This ability to leverage both structural, - tonal sound and unstructured noise provide the instrument with multiple dimensions - of musical expressivity. The relative magnitude of the physical gestures required - to manipulate the instrument and control the sound also engage an audience in - its performance. This straightforward yet expressive design has lent the Fragment - String to a variety of performance techniques and settings. These are explored - through case studies in a five year history of Fragment String-based compositions - and performances, illustrating the strengths and limitations of these interactions - and their sonic output. ' + ID: fmorreale2017 + abstract: 'Every new edition of NIME brings dozens of new DMIs and the feeling that + only a few of them will eventually break through. Previous work tried to address + this issue with a deductive approach by formulating design frameworks; we addressed + this issue with a inductive approach by elaborating on successes and failures + of previous DMIs. We contacted 97 DMI makers that presented a new instrument at + five successive editions of NIME (2010-2014); 70 answered. They were asked to + indicate the original motivation for designing the DMI and to present information + about its uptake. Results confirmed that most of the instruments have difficulties + establishing themselves. Also, they were asked to reflect on the specific factors + that facilitated and those that hindered instrument longevity. By grounding these + reflections on existing reserach on NIME and HCI, we propose a series of design + considerations for future DMIs. ' address: 'Copenhagen, Denmark' - author: Spencer Salazar and Sarah Reid and Daniel McNamara - bibtex: "@inproceedings{ssalazar2017a,\n abstract = {The Fragment String is a new\ - \ digital musical instrument designed to reinterpret and reflect upon the sounds\ - \ of the instruments it is performed in collaboration with. At its core, it samples\ - \ an input audio signal and allows the performer to replay these samples through\ - \ a granular resynthesizer. Normally the Fragment String samples an acoustic instrument\ - \ that accompanies it, but in the absence of this input it will amplify the ambient\ - \ environment and electronic noise of the input audio path to audible levels and\ - \ sample these. This ability to leverage both structural, tonal sound and unstructured\ - \ noise provide the instrument with multiple dimensions of musical expressivity.\ - \ The relative magnitude of the physical gestures required to manipulate the instrument\ - \ and control the sound also engage an audience in its performance. This straightforward\ - \ yet expressive design has lent the Fragment String to a variety of performance\ - \ techniques and settings. These are explored through case studies in a five year\ - \ history of Fragment String-based compositions and performances, illustrating\ - \ the strengths and limitations of these interactions and their sonic output.\ - \ },\n address = {Copenhagen, Denmark},\n author = {Spencer Salazar and Sarah\ - \ Reid and Daniel McNamara},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176290},\n\ - \ issn = {2220-4806},\n pages = {381--386},\n publisher = {Aalborg University\ - \ Copenhagen},\n title = {The Fragment String},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0072.pdf},\n\ - \ year = {2017}\n}\n" + author: Fabio Morreale and Andrew McPherson + bibtex: "@inproceedings{fmorreale2017,\n abstract = {Every new edition of NIME brings\ + \ dozens of new DMIs and the feeling that only a few of them will eventually break\ + \ through. Previous work tried to address this issue with a deductive approach\ + \ by formulating design frameworks; we addressed this issue with a inductive approach\ + \ by elaborating on successes and failures of previous DMIs. We contacted 97 DMI\ + \ makers that presented a new instrument at five successive editions of NIME (2010-2014);\ + \ 70 answered. They were asked to indicate the original motivation for designing\ + \ the DMI and to present information about its uptake. Results confirmed that\ + \ most of the instruments have difficulties establishing themselves. Also, they\ + \ were asked to reflect on the specific factors that facilitated and those that\ + \ hindered instrument longevity. By grounding these reflections on existing reserach\ + \ on NIME and HCI, we propose a series of design considerations for future DMIs.\ + \ },\n address = {Copenhagen, Denmark},\n author = {Fabio Morreale and Andrew\ + \ McPherson},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176218},\n issn\ + \ = {2220-4806},\n pages = {192--197},\n publisher = {Aalborg University Copenhagen},\n\ + \ title = {Design for Longevity: Ongoing Use of Instruments from NIME 2010-14},\n\ + \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0036.pdf},\n year\ + \ = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176290 + doi: 10.5281/zenodo.1176218 issn: 2220-4806 - pages: 381--386 + pages: 192--197 publisher: Aalborg University Copenhagen - title: The Fragment String - url: http://www.nime.org/proceedings/2017/nime2017_paper0072.pdf + title: 'Design for Longevity: Ongoing Use of Instruments from NIME 2010-14' + url: http://www.nime.org/proceedings/2017/nime2017_paper0036.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: sjong2017 - abstract: "We present Ghostfinger, a technology for highly dynamic up/down fingertip\ - \ haptics and control. The overall user experience offered by the technology can\ - \ be described as that of tangibly and audibly interacting with a small hologram.\ - \   More specifically, Ghostfinger implements automatic visualization of\ - \ the dynamic instantiation/parametrization of algorithmic primitives that together\ - \ determine the current haptic conditions for fingertip action. Some aspects of\ - \ this visualization are visuospatial: A floating see-through cursor provides\ - \ real-time, to-scale display of the fingerpad transducer, as it is being moved\ - \ by the user. Simultaneously, each haptic primitive instance is represented\ - \ by a floating block shape, type-colored, variably transparent, and possibly\ - \ overlapping with other such block shapes. Further aspects of visualization are\ - \ symbolic: Each instance is also represented by a type symbol, lighting up within\ - \ a grid if the instance is providing output to the user.   We discuss the\ - \ system's user interface, programming interface, and potential applications.\ - \ This from a general perspective that articulates and emphasizes the uniquely\ - \ enabling role of the principle of computation in the implementation of new forms\ - \ of instrumental control of musical sound. Beyond the currently presented technology,\ - \ this also reflects more broadly on the role of Digital Musical Instruments (DMIs)\ - \ in NIME." + ID: sdelalez2017 + abstract: 'Performative control of voice is the process of real-time speech synthesis + or modification by the means of hands or feet gestures. Vokinesis, a system for + real-time rhythm and pitch modification and control of singing is presented. Pitch + and vocal effort are controlled by a stylus on a graphic tablet. The concept of + Syllabic Control Points (SCP) is introduced for timing and rhythm control. A + chain of phonetic syllables have two types of temporal phases : the steady phases, + which correspond to the vocalic nuclei, and the transient phases, which correspond + to the attacks and/or codas. Thus, syllabic rhythm control methods need transient + and steady phases control points, corresponding to the ancient concept of the + arsis and thesis is prosodic theory. SCP allow for accurate control of articulation, + using hand or feet. In the tap mode, SCP are triggered by pressing and releasing + a control button. In the fader mode, continuous variation of the SCP sequencing + rate is controlled with expression pedals. Vokinesis has been tested successfully + in musical performances, using both syllabic rhythm control modes. This system + opens new musical possibilities, and can be extended to other types of sounds + beyond voice. ' address: 'Copenhagen, Denmark' - author: Staas de Jong - bibtex: "@inproceedings{sjong2017,\n abstract = {We present Ghostfinger, a technology\ - \ for highly dynamic up/down fingertip haptics and control. The overall user experience\ - \ offered by the technology can be described as that of tangibly and audibly interacting\ - \ with a small hologram.   More specifically, Ghostfinger implements automatic\ - \ visualization of the dynamic instantiation/parametrization of algorithmic primitives\ - \ that together determine the current haptic conditions for fingertip action.\ - \ Some aspects of this visualization are visuospatial: A floating see-through\ - \ cursor provides real-time, to-scale display of the fingerpad transducer, as\ - \ it is being moved by the user. Simultaneously, each haptic primitive instance\ - \ is represented by a floating block shape, type-colored, variably transparent,\ - \ and possibly overlapping with other such block shapes. Further aspects of visualization\ - \ are symbolic: Each instance is also represented by a type symbol, lighting up\ - \ within a grid if the instance is providing output to the user.   We discuss\ - \ the system's user interface, programming interface, and potential applications.\ - \ This from a general perspective that articulates and emphasizes the uniquely\ - \ enabling role of the principle of computation in the implementation of new forms\ - \ of instrumental control of musical sound. Beyond the currently presented technology,\ - \ this also reflects more broadly on the role of Digital Musical Instruments (DMIs)\ - \ in NIME.},\n address = {Copenhagen, Denmark},\n author = {Staas de Jong},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176292},\n issn = {2220-4806},\n\ - \ pages = {387--392},\n publisher = {Aalborg University Copenhagen},\n title =\ - \ {Ghostfinger: a novel platform for fully computational fingertip controllers},\n\ - \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0073.pdf},\n year\ - \ = {2017}\n}\n" + author: Samuel Delalez and Christophe d'Alessandro + bibtex: "@inproceedings{sdelalez2017,\n abstract = {Performative control of voice\ + \ is the process of real-time speech synthesis or modification by the means of\ + \ hands or feet gestures. Vokinesis, a system for real-time rhythm and pitch modification\ + \ and control of singing is presented. Pitch and vocal effort are controlled\ + \ by a stylus on a graphic tablet. The concept of Syllabic Control Points (SCP)\ + \ is introduced for timing and rhythm control. A chain of phonetic syllables\ + \ have two types of temporal phases : the steady phases, which correspond to the\ + \ vocalic nuclei, and the transient phases, which correspond to the attacks and/or\ + \ codas. Thus, syllabic rhythm control methods need transient and steady phases\ + \ control points, corresponding to the ancient concept of the arsis and thesis\ + \ is prosodic theory. SCP allow for accurate control of articulation, using hand\ + \ or feet. In the tap mode, SCP are triggered by pressing and releasing a control\ + \ button. In the fader mode, continuous variation of the SCP sequencing rate is\ + \ controlled with expression pedals. Vokinesis has been tested successfully in\ + \ musical performances, using both syllabic rhythm control modes. This system\ + \ opens new musical possibilities, and can be extended to other types of sounds\ + \ beyond voice. },\n address = {Copenhagen, Denmark},\n author = {Samuel Delalez\ + \ and Christophe d'Alessandro},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176220},\n\ + \ issn = {2220-4806},\n pages = {198--203},\n publisher = {Aalborg University\ + \ Copenhagen},\n title = {Vokinesis: Syllabic Control Points for Performative\ + \ Singing Synthesis},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0037.pdf},\n\ + \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176292 + doi: 10.5281/zenodo.1176220 issn: 2220-4806 - pages: 387--392 + pages: 198--203 publisher: Aalborg University Copenhagen - title: 'Ghostfinger: a novel platform for fully computational fingertip controllers' - url: http://www.nime.org/proceedings/2017/nime2017_paper0073.pdf + title: 'Vokinesis: Syllabic Control Points for Performative Singing Synthesis' + url: http://www.nime.org/proceedings/2017/nime2017_paper0037.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: jarmitage2017 - abstract: 'Many digital musical instrument design frameworks have been proposed - that are well suited for analysis and comparison. However, not all provide applicable - design suggestions, especially where subtle but important details are concerned. - Using traditional lutherie as a model, we conducted a series of interviews to - explore how violin makers ``go beyond the obvious'''', and how players perceive - and describe subtle details of instrumental quality. We find that lutherie frameworks - provide clear design methods and have substantial empirical backing, but are not - enough to make a fine violin. Success comes after acquiring sufficient tacit knowledge, - which enables detailed craft through subjective, empirical methods. Testing instruments - for subtle qualities was suggested to be a different skill to playing. Whilst - players are able to identify some specific details about instrumental quality - by comparison, these are often not actionable, and important aspects of ``sound - and feeling'''' are much more difficult to describe. In the DMI domain, we introduce - NIMEcraft to describe subtle differences between otherwise identical instruments - and their underlying design processes, and consider how to improve the dissemination - of NIMEcraft.' + ID: gyoung2017 + abstract: 'We present the findings of a pilot-study that analysed the role of haptic + feedback in a musical context. To examine the role of haptics in Digital Musical + Instrument (DMI) design an experiment was formulated to measure the users'' perception + of device usability across four separate feedback stages: fully haptic (force + and tactile combined), constant force only, vibrotactile only, and no feedback. + The study was piloted over extended periods with the intention of exploring the + application and integration of DMIs in real-world musical contexts. Applying a + music orientated analysis of this type enabled the investigative process to not + only take place over a comprehensive period, but allowed for the exploration of + DMI integration in everyday compositional practices. As with any investigation + that involves creativity, it was important that the participants did not feel + rushed or restricted. That is, they were given sufficient time to explore and + assess the different feedback types without constraint. This provided an accurate + and representational set of qualitative data for validating the participants'' + experience with the different feedback types they were presented with.' address: 'Copenhagen, Denmark' - author: Jack Armitage and Fabio Morreale and Andrew McPherson - bibtex: "@inproceedings{jarmitage2017,\n abstract = {Many digital musical instrument\ - \ design frameworks have been proposed that are well suited for analysis and comparison.\ - \ However, not all provide applicable design suggestions, especially where subtle\ - \ but important details are concerned. Using traditional lutherie as a model,\ - \ we conducted a series of interviews to explore how violin makers ``go beyond\ - \ the obvious'', and how players perceive and describe subtle details of instrumental\ - \ quality. We find that lutherie frameworks provide clear design methods and have\ - \ substantial empirical backing, but are not enough to make a fine violin. Success\ - \ comes after acquiring sufficient tacit knowledge, which enables detailed craft\ - \ through subjective, empirical methods. Testing instruments for subtle qualities\ - \ was suggested to be a different skill to playing. Whilst players are able to\ - \ identify some specific details about instrumental quality by comparison, these\ - \ are often not actionable, and important aspects of ``sound and feeling'' are\ - \ much more difficult to describe. In the DMI domain, we introduce NIMEcraft to\ - \ describe subtle differences between otherwise identical instruments and their\ - \ underlying design processes, and consider how to improve the dissemination of\ - \ NIMEcraft.},\n address = {Copenhagen, Denmark},\n author = {Jack Armitage and\ - \ Fabio Morreale and Andrew McPherson},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176294},\n\ - \ issn = {2220-4806},\n pages = {393--398},\n publisher = {Aalborg University\ - \ Copenhagen},\n title = {The finer the musician, the smaller the details: NIMEcraft\ - \ under the microscope},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0074.pdf},\n\ + author: Gareth Young and Dave Murphy and Jeffrey Weeter + bibtex: "@inproceedings{gyoung2017,\n abstract = {We present the findings of a pilot-study\ + \ that analysed the role of haptic feedback in a musical context. To examine the\ + \ role of haptics in Digital Musical Instrument (DMI) design an experiment was\ + \ formulated to measure the users' perception of device usability across four\ + \ separate feedback stages: fully haptic (force and tactile combined), constant\ + \ force only, vibrotactile only, and no feedback. The study was piloted over extended\ + \ periods with the intention of exploring the application and integration of DMIs\ + \ in real-world musical contexts. Applying a music orientated analysis of this\ + \ type enabled the investigative process to not only take place over a comprehensive\ + \ period, but allowed for the exploration of DMI integration in everyday compositional\ + \ practices. As with any investigation that involves creativity, it was important\ + \ that the participants did not feel rushed or restricted. That is, they were\ + \ given sufficient time to explore and assess the different feedback types without\ + \ constraint. This provided an accurate and representational set of qualitative\ + \ data for validating the participants' experience with the different feedback\ + \ types they were presented with.},\n address = {Copenhagen, Denmark},\n author\ + \ = {Gareth Young and Dave Murphy and Jeffrey Weeter},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176222},\n issn = {2220-4806},\n pages = {204--209},\n\ + \ publisher = {Aalborg University Copenhagen},\n title = {A Qualitative Analysis\ + \ of Haptic Feedback in Music Focused Exercises},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0038.pdf},\n\ \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176294 + doi: 10.5281/zenodo.1176222 issn: 2220-4806 - pages: 393--398 + pages: 204--209 publisher: Aalborg University Copenhagen - title: 'The finer the musician, the smaller the details: NIMEcraft under the microscope' - url: http://www.nime.org/proceedings/2017/nime2017_paper0074.pdf + title: A Qualitative Analysis of Haptic Feedback in Music Focused Exercises + url: http://www.nime.org/proceedings/2017/nime2017_paper0038.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: smehes2017 - abstract: 'Exploration is an intrinsic element of designing and engaging with acoustic - as well as digital musical instruments. This paper reports on the ongoing development - of a virtual-acoustic instrument based on a physical model of a string coupled - nonlinearly to a plate. The performer drives the model by tactile interaction - with a string-board controller fitted with piezo-electric sensors. The string-plate - model is formulated in a way that prioritises its parametric explorability. Where - the roles of creating performance gestures and designing instruments are traditionally - separated, such a design provides a continuum across these domains. The string-plate - model, its real-time implementation, and the control interface are described, - and the system is preliminarily evaluated through informal observations of how - musicians engage with the system.' + ID: jhe2017 + abstract: 'In the recent years, mechatronic musical instruments (MMI) have become + increasingly parametrically rich. Researchers have developed different interaction + strategies to negotiate the challenge of interfacing with each of the MMI''s high-resolution + parameters in real time. While mapping strategies hold an important aspect of + the musical interaction paradigm for MMI, attention on dedicated input devices + to perform these instruments live should not be neglected. This paper presents + the findings of a user study conducted with participants possessing specialized + musicianship skills for MMI music performance and composition. Study participants + are given three musical tasks to complete using a mechatronic chordophone with + high dimensionality of control via different musical input interfaces (one input + device at a time). This representative user study reveals the features of related-dedicated + input controllers, how they compare against the typical MIDI keyboard/sequencer + paradigm in human-MMI interaction, and provide an indication of the musical function + that expert users prefer for each input interface.' address: 'Copenhagen, Denmark' - author: Sandor Mehes and Maarten van Walstijn and Paul Stapleton - bibtex: "@inproceedings{smehes2017,\n abstract = {Exploration is an intrinsic element\ - \ of designing and engaging with acoustic as well as digital musical instruments.\ - \ This paper reports on the ongoing development of a virtual-acoustic instrument\ - \ based on a physical model of a string coupled nonlinearly to a plate. The performer\ - \ drives the model by tactile interaction with a string-board controller fitted\ - \ with piezo-electric sensors. The string-plate model is formulated in a way\ - \ that prioritises its parametric explorability. Where the roles of creating performance\ - \ gestures and designing instruments are traditionally separated, such a design\ - \ provides a continuum across these domains. The string-plate model, its real-time\ - \ implementation, and the control interface are described, and the system is preliminarily\ - \ evaluated through informal observations of how musicians engage with the system.},\n\ - \ address = {Copenhagen, Denmark},\n author = {Sandor Mehes and Maarten van Walstijn\ - \ and Paul Stapleton},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176296},\n\ - \ issn = {2220-4806},\n pages = {399--403},\n publisher = {Aalborg University\ - \ Copenhagen},\n title = {Virtual-Acoustic Instrument Design: Exploring the Parameter\ - \ Space of a String-Plate Model},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0075.pdf},\n\ + author: Jingyin He and Jim Murphy and Dale A. Carnegie and Ajay Kapur + bibtex: "@inproceedings{jhe2017,\n abstract = {In the recent years, mechatronic\ + \ musical instruments (MMI) have become increasingly parametrically rich. Researchers\ + \ have developed different interaction strategies to negotiate the challenge of\ + \ interfacing with each of the MMI's high-resolution parameters in real time.\ + \ While mapping strategies hold an important aspect of the musical interaction\ + \ paradigm for MMI, attention on dedicated input devices to perform these instruments\ + \ live should not be neglected. This paper presents the findings of a user study\ + \ conducted with participants possessing specialized musicianship skills for MMI\ + \ music performance and composition. Study participants are given three musical\ + \ tasks to complete using a mechatronic chordophone with high dimensionality of\ + \ control via different musical input interfaces (one input device at a time).\ + \ This representative user study reveals the features of related-dedicated input\ + \ controllers, how they compare against the typical MIDI keyboard/sequencer paradigm\ + \ in human-MMI interaction, and provide an indication of the musical function\ + \ that expert users prefer for each input interface.},\n address = {Copenhagen,\ + \ Denmark},\n author = {Jingyin He and Jim Murphy and Dale A. Carnegie and Ajay\ + \ Kapur},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1176224},\n issn = {2220-4806},\n\ + \ pages = {210--215},\n publisher = {Aalborg University Copenhagen},\n title =\ + \ {Towards Related-Dedicated Input Devices for Parametrically Rich Mechatronic\ + \ Musical Instruments},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0039.pdf},\n\ \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176296 + doi: 10.5281/zenodo.1176224 issn: 2220-4806 - pages: 399--403 + pages: 210--215 publisher: Aalborg University Copenhagen - title: 'Virtual-Acoustic Instrument Design: Exploring the Parameter Space of a String-Plate - Model' - url: http://www.nime.org/proceedings/2017/nime2017_paper0075.pdf + title: Towards Related-Dedicated Input Devices for Parametrically Rich Mechatronic + Musical Instruments + url: http://www.nime.org/proceedings/2017/nime2017_paper0039.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: nbouillot2017 - abstract: 'Recent advances in computing offer the possibility to scale real-time - 3D virtual audio scenes to include hundreds of simultaneous sound sources, rendered - in realtime, for large numbers of audio outputs. Our Spatial Audio Toolkit for - Immersive Environments (SATIE), allows us to render these dense audio scenes to - large multi-channel (e.g. 32 or more) loudspeaker systems, in realtime and controlled - from external software such as 3D scenegraph software. As we describe here, SATIE - is designed for improved scalability: minimum dependency between nodes in the - audio DSP graph for parallel audio computation, controlling sound objects by groups - and load balancing computation of geometry that allow to reduce the number of - messages for controlling simultaneously a high number of sound sources. The paper - presents SATIE along with example use case scenarios. Our initial work demonstrates - SATIE''s flexibility, and has provided us with novel sonic sensations such as - ``audio depth of field'''' and real-time sound swarming.' + ID: ablatherwick2017 + abstract: 'Music technology can provide unique opportunities to allow access to + music making for those with complex needs in special educational needs (SEN) settings. + Whilst there is a growing trend of research in this area, technology has been + shown to face a variety of issues leading to underuse in this context. This paper + reviews issues raised in literature and in practice for the use of music technology + in SEN settings. The paper then reviews existing principles and frameworks for + designing digital musical instruments (DMIs.) The reviews of literature and current + frameworks are then used to inform a set of design considerations for instruments + for users with complex needs, and in SEN settings. 18 design considerations are + presented with connections to literature and practice. An implementation example + including future work is presented, and finally a conclusion is then offered. ' address: 'Copenhagen, Denmark' - author: Nicolas Bouillot and Zack Settel and Michal Seta - bibtex: "@inproceedings{nbouillot2017,\n abstract = {Recent advances in computing\ - \ offer the possibility to scale real-time 3D virtual audio scenes to include\ - \ hundreds of simultaneous sound sources, rendered in realtime, for large numbers\ - \ of audio outputs. Our Spatial Audio Toolkit for Immersive Environments (SATIE),\ - \ allows us to render these dense audio scenes to large multi-channel (e.g. 32\ - \ or more) loudspeaker systems, in realtime and controlled from external software\ - \ such as 3D scenegraph software. As we describe here, SATIE is designed for\ - \ improved scalability: minimum dependency between nodes in the audio DSP graph\ - \ for parallel audio computation, controlling sound objects by groups and load\ - \ balancing computation of geometry that allow to reduce the number of messages\ - \ for controlling simultaneously a high number of sound sources. The paper presents\ - \ SATIE along with example use case scenarios. Our initial work demonstrates SATIE's\ - \ flexibility, and has provided us with novel sonic sensations such as ``audio\ - \ depth of field'' and real-time sound swarming.},\n address = {Copenhagen, Denmark},\n\ - \ author = {Nicolas Bouillot and Zack Settel and Michal Seta},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176298},\n issn = {2220-4806},\n pages = {404--409},\n\ - \ publisher = {Aalborg University Copenhagen},\n title = {SATIE: a live and scalable\ - \ 3D audio scene rendering environment for large multi-channel loudspeaker configurations},\n\ - \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0076.pdf},\n year\ - \ = {2017}\n}\n" + author: Asha Blatherwick and Luke Woodbury and Tom Davis + bibtex: "@inproceedings{ablatherwick2017,\n abstract = {Music technology can provide\ + \ unique opportunities to allow access to music making for those with complex\ + \ needs in special educational needs (SEN) settings. Whilst there is a growing\ + \ trend of research in this area, technology has been shown to face a variety\ + \ of issues leading to underuse in this context. This paper reviews issues raised\ + \ in literature and in practice for the use of music technology in SEN settings.\ + \ The paper then reviews existing principles and frameworks for designing digital\ + \ musical instruments (DMIs.) The reviews of literature and current frameworks\ + \ are then used to inform a set of design considerations for instruments for users\ + \ with complex needs, and in SEN settings. 18 design considerations are presented\ + \ with connections to literature and practice. An implementation example including\ + \ future work is presented, and finally a conclusion is then offered. },\n address\ + \ = {Copenhagen, Denmark},\n author = {Asha Blatherwick and Luke Woodbury and\ + \ Tom Davis},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176226},\n issn\ + \ = {2220-4806},\n pages = {216--221},\n publisher = {Aalborg University Copenhagen},\n\ + \ title = {Design Considerations for Instruments for Users with Complex Needs\ + \ in SEN Settings},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0040.pdf},\n\ + \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176298 + doi: 10.5281/zenodo.1176226 issn: 2220-4806 - pages: 404--409 + pages: 216--221 publisher: Aalborg University Copenhagen - title: 'SATIE: a live and scalable 3D audio scene rendering environment for large - multi-channel loudspeaker configurations' - url: http://www.nime.org/proceedings/2017/nime2017_paper0076.pdf + title: Design Considerations for Instruments for Users with Complex Needs in SEN + Settings + url: http://www.nime.org/proceedings/2017/nime2017_paper0040.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: hscurto2017 - abstract: 'Machine learning tools for designing motion-sound relationships often - rely on a two-phase iterative process, where users must alternate between designing - gestures and performing mappings. We present a first prototype of a user adaptable - tool that aims at merging these design and performance steps into one fully interactive - experience. It is based on an online learning implementation of a Gaussian Mixture - Model supporting real-time adaptation to user movement and generation of sound - parameters. To allow both fine-tune modification tasks and open-ended improvisational - practices, we designed two interaction modes that either let users shape, or guide - interactive motion-sound mappings. Considering an improvisational use case, we - propose two example musical applications to illustrate how our tool might support - various forms of corporeal engagement with sound, and inspire further perspectives - for machine learning-mediated embodied musical expression.' + ID: ahindle2017 + abstract: 'The didgeridoo is a wind instrument composed of a single large tube often + used as drone instrument for backing up the mids and lows of an ensemble. A didgeridoo + is played by buzzing the lips and blowing air into the didgeridoo. To play a didgeridoo + continously one can employ circular breathing but the volume of air required poses + a real challenge to novice players. In this paper we replace the expense of circular + breathing and lip buzzing with electronic excitation, thus creating an electro-acoustic + didgeridoo or electronic didgeridoo. Thus we describe the didgeridoo excitation + signal, how to replicate it, and the hardware necessary to make an electro-acoustic + didgeridoo driven by speakers and controllable from a computer. To properly drive + the didgeridoo we rely upon 4th-order ported bandpass speaker boxes to help guide + our excitation signals into an attached acoustic didgeridoo. The results somewhat + replicate human didgeridoo playing, enabling a new kind of mid to low electro-acoustic + accompaniment without the need for circular breathing. ' address: 'Copenhagen, Denmark' - author: Hugo Scurto and Frédéric Bevilacqua and Jules Françoise - bibtex: "@inproceedings{hscurto2017,\n abstract = {Machine learning tools for designing\ - \ motion-sound relationships often rely on a two-phase iterative process, where\ - \ users must alternate between designing gestures and performing mappings. We\ - \ present a first prototype of a user adaptable tool that aims at merging these\ - \ design and performance steps into one fully interactive experience. It is based\ - \ on an online learning implementation of a Gaussian Mixture Model supporting\ - \ real-time adaptation to user movement and generation of sound parameters. To\ - \ allow both fine-tune modification tasks and open-ended improvisational practices,\ - \ we designed two interaction modes that either let users shape, or guide interactive\ - \ motion-sound mappings. Considering an improvisational use case, we propose two\ - \ example musical applications to illustrate how our tool might support various\ - \ forms of corporeal engagement with sound, and inspire further perspectives for\ - \ machine learning-mediated embodied musical expression.},\n address = {Copenhagen,\ - \ Denmark},\n author = {Hugo Scurto and Frédéric Bevilacqua and Jules Françoise},\n\ + author: Abram Hindle and Daryl Posnett + bibtex: "@inproceedings{ahindle2017,\n abstract = {The didgeridoo is a wind instrument\ + \ composed of a single large tube often used as drone instrument for backing up\ + \ the mids and lows of an ensemble. A didgeridoo is played by buzzing the lips\ + \ and blowing air into the didgeridoo. To play a didgeridoo continously one can\ + \ employ circular breathing but the volume of air required poses a real challenge\ + \ to novice players. In this paper we replace the expense of circular breathing\ + \ and lip buzzing with electronic excitation, thus creating an electro-acoustic\ + \ didgeridoo or electronic didgeridoo. Thus we describe the didgeridoo excitation\ + \ signal, how to replicate it, and the hardware necessary to make an electro-acoustic\ + \ didgeridoo driven by speakers and controllable from a computer. To properly\ + \ drive the didgeridoo we rely upon 4th-order ported bandpass speaker boxes to\ + \ help guide our excitation signals into an attached acoustic didgeridoo. The\ + \ results somewhat replicate human didgeridoo playing, enabling a new kind of\ + \ mid to low electro-acoustic accompaniment without the need for circular breathing.\ + \ },\n address = {Copenhagen, Denmark},\n author = {Abram Hindle and Daryl Posnett},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176270},\n issn = {2220-4806},\n\ - \ pages = {410--415},\n publisher = {Aalborg University Copenhagen},\n title =\ - \ {Shaping and Exploring Interactive Motion-Sound Mappings Using Online Clustering\ - \ Techniques},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0077.pdf},\n\ + \ Musical Expression},\n doi = {10.5281/zenodo.1176228},\n issn = {2220-4806},\n\ + \ pages = {222--226},\n publisher = {Aalborg University Copenhagen},\n title =\ + \ {Performance with an Electronically Excited Didgeridoo},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0041.pdf},\n\ \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176270 + doi: 10.5281/zenodo.1176228 issn: 2220-4806 - pages: 410--415 - publisher: Aalborg University Copenhagen - title: Shaping and Exploring Interactive Motion-Sound Mappings Using Online Clustering - Techniques - url: http://www.nime.org/proceedings/2017/nime2017_paper0077.pdf - year: 2017 - - -- ENTRYTYPE: inproceedings - ID: kbhumber2017 - abstract: 'We describe the Responsive User Body Suit (RUBS), a tactile instrument - worn by performers that allows the generation and manipulation of audio output - using touch triggers. The RUBS system is a responsive interface between organic - touch and electronic audio, intimately located on the performer''s body. This - system offers an entry point into a more intuitive method of music performance. - A short overview of body instrument philosophy and related work is followed by - the development and implementation process of the RUBS as both an interface and - performance instrument. Lastly, observations, design challenges and future goals - are discussed.' + pages: 222--226 + publisher: Aalborg University Copenhagen + title: Performance with an Electronically Excited Didgeridoo + url: http://www.nime.org/proceedings/2017/nime2017_paper0041.pdf + year: 2017 + + +- ENTRYTYPE: inproceedings + ID: mzbyszynski2017 + abstract: 'Our research examines the use of CodeCircle, an online, collaborative + HTML, CSS, and JavaScript editor, as a rapid prototyping environment for musically + expressive instruments. In CodeCircle, we use two primary libraries: MaxiLib and + RapidLib. MaxiLib is a synthesis and sample processing library, ported from the + C++ library Maximillian, which interfaces with the Web Audio API for sound generation + in the browser. RapidLib is a product of the Rapid-Mix project, and allows users + to implement interactive machine learning, using "programming by demonstration" + to design new expressive interactions.' address: 'Copenhagen, Denmark' - author: Kiran Bhumber and Bob Pritchard and Kitty Rodé - bibtex: "@inproceedings{kbhumber2017,\n abstract = {We describe the Responsive User\ - \ Body Suit (RUBS), a tactile instrument worn by performers that allows the generation\ - \ and manipulation of audio output using touch triggers. The RUBS system is a\ - \ responsive interface between organic touch and electronic audio, intimately\ - \ located on the performer's body. This system offers an entry point into a more\ - \ intuitive method of music performance. A short overview of body instrument philosophy\ - \ and related work is followed by the development and implementation process of\ - \ the RUBS as both an interface and performance instrument. Lastly, observations,\ - \ design challenges and future goals are discussed.},\n address = {Copenhagen,\ - \ Denmark},\n author = {Kiran Bhumber and Bob Pritchard and Kitty Rodé},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176300},\n issn = {2220-4806},\n pages\ - \ = {416--419},\n publisher = {Aalborg University Copenhagen},\n title = {A Responsive\ - \ User Body Suit (RUBS)},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0078.pdf},\n\ + author: Michael Zbyszyński and Mick Grierson and Matthew Yee-King + bibtex: "@inproceedings{mzbyszynski2017,\n abstract = {Our research examines the\ + \ use of CodeCircle, an online, collaborative HTML, CSS, and JavaScript editor,\ + \ as a rapid prototyping environment for musically expressive instruments. In\ + \ CodeCircle, we use two primary libraries: MaxiLib and RapidLib. MaxiLib is a\ + \ synthesis and sample processing library, ported from the C++ library Maximillian,\ + \ which interfaces with the Web Audio API for sound generation in the browser.\ + \ RapidLib is a product of the Rapid-Mix project, and allows users to implement\ + \ interactive machine learning, using \"programming by demonstration\" to design\ + \ new expressive interactions.},\n address = {Copenhagen, Denmark},\n author =\ + \ {Michael Zbyszyński and Mick Grierson and Matthew Yee-King},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1181420},\n issn = {2220-4806},\n pages = {227--230},\n\ + \ publisher = {Aalborg University Copenhagen},\n title = {Rapid Prototyping of\ + \ New Instruments with CodeCircle},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0042.pdf},\n\ \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176300 + doi: 10.5281/zenodo.1181420 issn: 2220-4806 - pages: 416--419 + pages: 227--230 publisher: Aalborg University Copenhagen - title: A Responsive User Body Suit (RUBS) - url: http://www.nime.org/proceedings/2017/nime2017_paper0078.pdf + title: Rapid Prototyping of New Instruments with CodeCircle + url: http://www.nime.org/proceedings/2017/nime2017_paper0042.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: mhojlund2017 - abstract: 'This paper describes the development of a loudness-based compressor for - live audio streams. The need for this device arose while developing the public - sound art project The Overheard, which involves mixing together several live audio - streams through a web based mixing interface. In order to preserve a natural sounding - dynamic image from the varying sound sources that can be played back under varying - conditions, an adaptation of the EBU R128 loudness measurement recommendation, - originally developed for levelling non-real-time broadcast material, has been - applied. The paper describes the Pure Data implementation and the necessary compromises - enforced by the live streaming condition. Lastly observations regarding design - challenges, related application areas and future goals are presented. ' + ID: fvisi2017 + abstract: 'This paper presents a knowledge-based, data-driven method for using data + describing action-sound couplings collected from a group of people to generate + multiple complex mappings between the performance movements of a musician and + sound synthesis. This is done by using a database of multimodal motion data collected + from multiple subjects coupled with sound synthesis parameters. A series of sound + stimuli is synthesised using the sound engine that will be used in performance. + Multimodal motion data is collected by asking each participant to listen to each + sound stimulus and move as if they were producing the sound using a musical instrument + they are given. Multimodal data is recorded during each performance, and paired + with the synthesis parameters used for generating the sound stimulus. The dataset + created using this method is then used to build a topological representation of + the performance movements of the subjects. This representation is then used to + interactively generate training data for machine learning algorithms, and define + mappings for real-time performance. To better illustrate each step of the procedure, + we describe an implementation involving clarinet, motion capture, wearable sensor + armbands, and waveguide synthesis.' address: 'Copenhagen, Denmark' - author: Marie Højlund and Morten Riis and Daniel Rothmann and Jonas Kirkegaard - bibtex: "@inproceedings{mhojlund2017,\n abstract = {This paper describes the development\ - \ of a loudness-based compressor for live audio streams. The need for this device\ - \ arose while developing the public sound art project The Overheard, which involves\ - \ mixing together several live audio streams through a web based mixing interface.\ - \ In order to preserve a natural sounding dynamic image from the varying sound\ - \ sources that can be played back under varying conditions, an adaptation of the\ - \ EBU R128 loudness measurement recommendation, originally developed for levelling\ - \ non-real-time broadcast material, has been applied. The paper describes the\ - \ Pure Data implementation and the necessary compromises enforced by the live\ - \ streaming condition. Lastly observations regarding design challenges, related\ - \ application areas and future goals are presented. },\n address = {Copenhagen,\ - \ Denmark},\n author = {Marie Højlund and Morten Riis and Daniel Rothmann and\ - \ Jonas Kirkegaard},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176354},\n\ - \ issn = {2220-4806},\n pages = {420--425},\n publisher = {Aalborg University\ - \ Copenhagen},\n title = {Applying the EBU R128 Loudness Standard in live-streaming\ - \ sound sculptures},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0079.pdf},\n\ + author: Federico Visi and Baptiste Caramiaux and Michael Mcloughlin and Eduardo + Miranda + bibtex: "@inproceedings{fvisi2017,\n abstract = {This paper presents a knowledge-based,\ + \ data-driven method for using data describing action-sound couplings collected\ + \ from a group of people to generate multiple complex mappings between the performance\ + \ movements of a musician and sound synthesis. This is done by using a database\ + \ of multimodal motion data collected from multiple subjects coupled with sound\ + \ synthesis parameters. A series of sound stimuli is synthesised using the sound\ + \ engine that will be used in performance. Multimodal motion data is collected\ + \ by asking each participant to listen to each sound stimulus and move as if they\ + \ were producing the sound using a musical instrument they are given. Multimodal\ + \ data is recorded during each performance, and paired with the synthesis parameters\ + \ used for generating the sound stimulus. The dataset created using this method\ + \ is then used to build a topological representation of the performance movements\ + \ of the subjects. This representation is then used to interactively generate\ + \ training data for machine learning algorithms, and define mappings for real-time\ + \ performance. To better illustrate each step of the procedure, we describe an\ + \ implementation involving clarinet, motion capture, wearable sensor armbands,\ + \ and waveguide synthesis.},\n address = {Copenhagen, Denmark},\n author = {Federico\ + \ Visi and Baptiste Caramiaux and Michael Mcloughlin and Eduardo Miranda},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1176230},\n issn = {2220-4806},\n pages\ + \ = {231--236},\n publisher = {Aalborg University Copenhagen},\n title = {A Knowledge-based,\ + \ Data-driven Method for Action-sound Mapping},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0043.pdf},\n\ \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176354 + doi: 10.5281/zenodo.1176230 issn: 2220-4806 - pages: 420--425 + pages: 231--236 publisher: Aalborg University Copenhagen - title: Applying the EBU R128 Loudness Standard in live-streaming sound sculptures - url: http://www.nime.org/proceedings/2017/nime2017_paper0079.pdf + title: 'A Knowledge-based, Data-driven Method for Action-sound Mapping' + url: http://www.nime.org/proceedings/2017/nime2017_paper0043.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: eberdahl2017 - abstract: The concept of embedded acoustic systems for diffusing spatial audio is - considered. This paradigm is enabled by advancements in floating-point hardware - on inexpensive embedded Linux systems. Examples are presented using line array - configurations for electroacoustic music and for making interactive kiosk and - poster systems. + ID: ssalazar2017 + abstract: 'ChuckPad is a network-based platform for sharing code, modules, patches, + and even entire musical works written on the ChucK programming language and other + music programming platforms. ChuckPad provides a single repository and record + of musical code from supported musical programming systems, an interface for organizing, + browsing, and searching this body of code, and a readily accessible means of evaluating + the musical output of code in the repository. ChuckPad consists of an open-source + modular backend service to be run on a network server or cloud infrastructure + and a client library to facilitate integrating end-user applications with the + platform. While ChuckPad has been initially developed for sharing ChucK source + code, its design can accommodate any type of music programming system oriented + around small text- or binary-format documents. To this end, ChuckPad has also + been extended to the Auraglyph handwriting-based graphical music programming system.' address: 'Copenhagen, Denmark' - author: Edgar Berdahl and Matthew Blessing and Matthew Williams and Pacco Tan and - Brygg Ullmer and Jesse Allison - bibtex: "@inproceedings{eberdahl2017,\n abstract = {The concept of embedded acoustic\ - \ systems for diffusing spatial audio is considered. This paradigm is enabled\ - \ by advancements in floating-point hardware on inexpensive embedded Linux systems.\ - \ Examples are presented using line array configurations for electroacoustic music\ - \ and for making interactive kiosk and poster systems.},\n address = {Copenhagen,\ - \ Denmark},\n author = {Edgar Berdahl and Matthew Blessing and Matthew Williams\ - \ and Pacco Tan and Brygg Ullmer and Jesse Allison},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176302},\n issn = {2220-4806},\n pages = {426--430},\n\ - \ publisher = {Aalborg University Copenhagen},\n title = {Spatial Audio Approaches\ - \ for Embedded Sound Art Installations with Loudspeaker Line Arrays},\n url =\ - \ {http://www.nime.org/proceedings/2017/nime2017_paper0080.pdf},\n year = {2017}\n\ + author: Spencer Salazar and Mark Cerqueira + bibtex: "@inproceedings{ssalazar2017,\n abstract = {ChuckPad is a network-based\ + \ platform for sharing code, modules, patches, and even entire musical works written\ + \ on the ChucK programming language and other music programming platforms. ChuckPad\ + \ provides a single repository and record of musical code from supported musical\ + \ programming systems, an interface for organizing, browsing, and searching this\ + \ body of code, and a readily accessible means of evaluating the musical output\ + \ of code in the repository. ChuckPad consists of an open-source modular backend\ + \ service to be run on a network server or cloud infrastructure and a client library\ + \ to facilitate integrating end-user applications with the platform. While ChuckPad\ + \ has been initially developed for sharing ChucK source code, its design can accommodate\ + \ any type of music programming system oriented around small text- or binary-format\ + \ documents. To this end, ChuckPad has also been extended to the Auraglyph handwriting-based\ + \ graphical music programming system.},\n address = {Copenhagen, Denmark},\n author\ + \ = {Spencer Salazar and Mark Cerqueira},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176232},\n\ + \ issn = {2220-4806},\n pages = {237--240},\n publisher = {Aalborg University\ + \ Copenhagen},\n title = {ChuckPad: Social Coding for Computer Music},\n url =\ + \ {http://www.nime.org/proceedings/2017/nime2017_paper0044.pdf},\n year = {2017}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176302 + doi: 10.5281/zenodo.1176232 issn: 2220-4806 - pages: 426--430 + pages: 237--240 publisher: Aalborg University Copenhagen - title: Spatial Audio Approaches for Embedded Sound Art Installations with Loudspeaker - Line Arrays - url: http://www.nime.org/proceedings/2017/nime2017_paper0080.pdf + title: 'ChuckPad: Social Coding for Computer Music' + url: http://www.nime.org/proceedings/2017/nime2017_paper0044.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: fkeenan2017 - abstract: 'This paper presents the next stage of an investigation into the potential - of historical theatre sound effects as a resource for Sonic Interaction Design - (SID). An acoustic theatre wind machine was constructed, and a digital physical - modelling-based version of this specific machine was programmed using the Sound - Designer''s Toolkit (SDT) in Max/MSP. The acoustic wind machine was fitted with - 3D printed gearing to mechanically drive an optical encoder and control the digital - synthesis engine in real time. The design of this system was informed by an initial - comparison between the acoustic wind machine and the first iteration of its digital - counterpart. To explore the main acoustic parameters and the sonic range of the - acoustic and digital wind machines in operation, three simple and distinct rotational - gestures were performed, with the resulting sounds recorded simultaneously, facilitating - an analysis of the real-time performance of both sources. The results are reported, - with an outline of future work. ' + ID: aberndt2017 + abstract: 'Tabletop role-playing games are a collaborative narrative experience. + Throughout gaming sessions, Ambient music and noises are frequently used to enrich + and facilitate the narration. With AmbiDice we introduce a tangible interface + and music generator specially devised for this application scenario. We detail + the technical implementation of the device, the software architecture of the music + system (AmbientMusicBox) and the scripting language to compose Ambient music and + soundscapes. AmbiDice was presented to experienced players and gained positive + feedback and constructive suggestions for further development.' address: 'Copenhagen, Denmark' - author: Fiona Keenan and Sandra Pauletto - bibtex: "@inproceedings{fkeenan2017,\n abstract = {This paper presents the next\ - \ stage of an investigation into the potential of historical theatre sound effects\ - \ as a resource for Sonic Interaction Design (SID). An acoustic theatre wind machine\ - \ was constructed, and a digital physical modelling-based version of this specific\ - \ machine was programmed using the Sound Designer's Toolkit (SDT) in Max/MSP.\ - \ The acoustic wind machine was fitted with 3D printed gearing to mechanically\ - \ drive an optical encoder and control the digital synthesis engine in real time.\ - \ The design of this system was informed by an initial comparison between the\ - \ acoustic wind machine and the first iteration of its digital counterpart. To\ - \ explore the main acoustic parameters and the sonic range of the acoustic and\ - \ digital wind machines in operation, three simple and distinct rotational gestures\ - \ were performed, with the resulting sounds recorded simultaneously, facilitating\ - \ an analysis of the real-time performance of both sources. The results are reported,\ - \ with an outline of future work. },\n address = {Copenhagen, Denmark},\n author\ - \ = {Fiona Keenan and Sandra Pauletto},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176304},\n\ - \ issn = {2220-4806},\n pages = {431--435},\n publisher = {Aalborg University\ - \ Copenhagen},\n title = {Design and Evaluation of a Digital Theatre Wind Machine},\n\ - \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0081.pdf},\n year\ + author: Axel Berndt and Simon Waloschek and Aristotelis Hadjakos and Alexander Leemhuis + bibtex: "@inproceedings{aberndt2017,\n abstract = {Tabletop role-playing games are\ + \ a collaborative narrative experience. Throughout gaming sessions, Ambient music\ + \ and noises are frequently used to enrich and facilitate the narration. With\ + \ AmbiDice we introduce a tangible interface and music generator specially devised\ + \ for this application scenario. We detail the technical implementation of the\ + \ device, the software architecture of the music system (AmbientMusicBox) and\ + \ the scripting language to compose Ambient music and soundscapes. AmbiDice was\ + \ presented to experienced players and gained positive feedback and constructive\ + \ suggestions for further development.},\n address = {Copenhagen, Denmark},\n\ + \ author = {Axel Berndt and Simon Waloschek and Aristotelis Hadjakos and Alexander\ + \ Leemhuis},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176234},\n issn\ + \ = {2220-4806},\n pages = {241--244},\n publisher = {Aalborg University Copenhagen},\n\ + \ title = {AmbiDice: An Ambient Music Interface for Tabletop Role-Playing Games},\n\ + \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0045.pdf},\n year\ \ = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176304 + doi: 10.5281/zenodo.1176234 issn: 2220-4806 - pages: 431--435 + pages: 241--244 publisher: Aalborg University Copenhagen - title: Design and Evaluation of a Digital Theatre Wind Machine - url: http://www.nime.org/proceedings/2017/nime2017_paper0081.pdf + title: 'AmbiDice: An Ambient Music Interface for Tabletop Role-Playing Games' + url: http://www.nime.org/proceedings/2017/nime2017_paper0045.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: ihattwick2017 - abstract: 'In this paper we present a discussion of the development of hardware - systems in collaboration with professional artists, a context which presents both - challenges and opportunities for researchers interested in the uses of technology - in artistic practice. The establishment of design specifications within these - contexts can be challenging, especially as they are likely to change during the - development process. In order to assist in the consideration of the complete set - of design specifications, we identify seven aspects of hardware design relevant - to our applications: function, aesthetics, support for artistic creation, system - architecture, manufacturing, robustness, and reusability. Examples drawn from - our previous work are used to illustrate the characteristics of interdependency - and temporality, and form the basis of case studies investigating support for - artistic creation and reusability. We argue that the consideration of these design - aspects at appropriate times within the development process may facilitate the - ability of hardware systems to support continued use in professional applications.' + ID: sferguson2017 + abstract: 'This paper describes the sound design for Bloom, a light and sound installation + made up of 1000 distributed independent audio-visual pixel devices, each with + RGB LEDs, Wifi, Accelerometer, GPS sensor, and sound hardware. These types of + systems have been explored previously, but only a few systems have exceeded 30-50 + devices and very few have included sound capability, and therefore the sound design + possibilities for large systems of distributed audio devices are not yet well + understood. In this article we describe the hardware and software implementation + of sound synthesis for this system, and the implications for design of media for + this context.' address: 'Copenhagen, Denmark' - author: Ian Hattwick and Marcelo M. Wanderley - bibtex: "@inproceedings{ihattwick2017,\n abstract = {In this paper we present a\ - \ discussion of the development of hardware systems in collaboration with professional\ - \ artists, a context which presents both challenges and opportunities for researchers\ - \ interested in the uses of technology in artistic practice. The establishment\ - \ of design specifications within these contexts can be challenging, especially\ - \ as they are likely to change during the development process. In order to assist\ - \ in the consideration of the complete set of design specifications, we identify\ - \ seven aspects of hardware design relevant to our applications: function, aesthetics,\ - \ support for artistic creation, system architecture, manufacturing, robustness,\ - \ and reusability. Examples drawn from our previous work are used to illustrate\ - \ the characteristics of interdependency and temporality, and form the basis of\ - \ case studies investigating support for artistic creation and reusability. We\ - \ argue that the consideration of these design aspects at appropriate times within\ - \ the development process may facilitate the ability of hardware systems to support\ - \ continued use in professional applications.},\n address = {Copenhagen, Denmark},\n\ - \ author = {Ian Hattwick and Marcelo M. Wanderley},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176306},\n issn = {2220-4806},\n pages = {436--441},\n\ - \ publisher = {Aalborg University Copenhagen},\n title = {Design of Hardware Systems\ - \ for Professional Artistic Applications},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0082.pdf},\n\ + author: Sam Ferguson and Anthony Rowe and Oliver Bown and Liam Birtles and Chris + Bennewith + bibtex: "@inproceedings{sferguson2017,\n abstract = {This paper describes the sound\ + \ design for Bloom, a light and sound installation made up of 1000 distributed\ + \ independent audio-visual pixel devices, each with RGB LEDs, Wifi, Accelerometer,\ + \ GPS sensor, and sound hardware. These types of systems have been explored previously,\ + \ but only a few systems have exceeded 30-50 devices and very few have included\ + \ sound capability, and therefore the sound design possibilities for large systems\ + \ of distributed audio devices are not yet well understood. In this article we\ + \ describe the hardware and software implementation of sound synthesis for this\ + \ system, and the implications for design of media for this context.},\n address\ + \ = {Copenhagen, Denmark},\n author = {Sam Ferguson and Anthony Rowe and Oliver\ + \ Bown and Liam Birtles and Chris Bennewith},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1176236},\n issn = {2220-4806},\n pages = {245--250},\n publisher\ + \ = {Aalborg University Copenhagen},\n title = {Sound Design for a System of 1000\ + \ Distributed Independent Audio-Visual Devices},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0046.pdf},\n\ \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176306 + doi: 10.5281/zenodo.1176236 issn: 2220-4806 - pages: 436--441 + pages: 245--250 publisher: Aalborg University Copenhagen - title: Design of Hardware Systems for Professional Artistic Applications - url: http://www.nime.org/proceedings/2017/nime2017_paper0082.pdf + title: Sound Design for a System of 1000 Distributed Independent Audio-Visual Devices + url: http://www.nime.org/proceedings/2017/nime2017_paper0046.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: ajensenius2017 - abstract: 'This paper explores sonic microinteraction using muscle sensing through - the Myo armband. The first part presents results from a small series of experiments - aimed at finding the baseline micromotion and muscle activation data of people - being at rest or performing short/small actions. The second part presents the - prototype instrument MicroMyo, built around the concept of making sound with little - motion. The instrument plays with the convention that inputting more energy into - an instrument results in more sound. MicroMyo, on the other hand, is built so - that the less you move, the more it sounds. Our user study shows that while such - an "inverse instrument" may seem puzzling at first, it also opens a space for - interesting musical interactions. ' + ID: rvogl2017 + abstract: 'An important part of electronic dance music (EDM) is the so-called beat. It + is defined by the drum track of the piece and is a style defining element. While + producing EDM, creating the drum track tends to be delicate, yet labor intensive + work. In this work we present a touch-interface-based prototype with the goal + to simplify this task. The prototype aims at supporting musicians to create rhythmic + patterns in the context of EDM production and live performances. Starting with + a seed pattern which is provided by the user, a list of variations with varying + degree of deviation from the seed pattern is generated. The interface provides + simple ways to enter, edit, visualize and browse through the patterns. Variations + are generated by means of an artificial neural network which is trained on a database + of drum rhythm patterns extracted from a commercial drum loop library. To evaluate + the user interface and pattern generation quality a user study with experts in + EDM production was conducted. It was found that participants responded positively + to the user interface and the quality of the generated patterns. Furthermore, + the experts consider the prototype helpful for both studio production situations + and live performances.' address: 'Copenhagen, Denmark' - author: Alexander Refsum Jensenius and Victor Gonzalez Sanchez and Agata Zelechowska - and Kari Anne Vadstensvik Bjerkestrand - bibtex: "@inproceedings{ajensenius2017,\n abstract = {This paper explores sonic\ - \ microinteraction using muscle sensing through the Myo armband. The first part\ - \ presents results from a small series of experiments aimed at finding the baseline\ - \ micromotion and muscle activation data of people being at rest or performing\ - \ short/small actions. The second part presents the prototype instrument MicroMyo,\ - \ built around the concept of making sound with little motion. The instrument\ - \ plays with the convention that inputting more energy into an instrument results\ - \ in more sound. MicroMyo, on the other hand, is built so that the less you move,\ - \ the more it sounds. Our user study shows that while such an \"inverse instrument\"\ - \ may seem puzzling at first, it also opens a space for interesting musical interactions.\ - \ },\n address = {Copenhagen, Denmark},\n author = {Alexander Refsum Jensenius\ - \ and Victor Gonzalez Sanchez and Agata Zelechowska and Kari Anne Vadstensvik\ - \ Bjerkestrand},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176308},\n\ - \ issn = {2220-4806},\n pages = {442--445},\n publisher = {Aalborg University\ - \ Copenhagen},\n title = {Exploring the Myo controller for sonic microinteraction},\n\ - \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0083.pdf},\n year\ - \ = {2017}\n}\n" + author: Richard Vogl and Peter Knees + bibtex: "@inproceedings{rvogl2017,\n abstract = {An important part of electronic\ + \ dance music (EDM) is the so-called beat. It is defined by the drum track of\ + \ the piece and is a style defining element. While producing EDM, creating the\ + \ drum track tends to be delicate, yet labor intensive work. In this work we\ + \ present a touch-interface-based prototype with the goal to simplify this task.\ + \ The prototype aims at supporting musicians to create rhythmic patterns in the\ + \ context of EDM production and live performances. Starting with a seed pattern\ + \ which is provided by the user, a list of variations with varying degree of deviation\ + \ from the seed pattern is generated. The interface provides simple ways to enter,\ + \ edit, visualize and browse through the patterns. Variations are generated by\ + \ means of an artificial neural network which is trained on a database of drum\ + \ rhythm patterns extracted from a commercial drum loop library. To evaluate\ + \ the user interface and pattern generation quality a user study with experts\ + \ in EDM production was conducted. It was found that participants responded positively\ + \ to the user interface and the quality of the generated patterns. Furthermore,\ + \ the experts consider the prototype helpful for both studio production situations\ + \ and live performances.},\n address = {Copenhagen, Denmark},\n author = {Richard\ + \ Vogl and Peter Knees},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176238},\n\ + \ issn = {2220-4806},\n pages = {251--256},\n publisher = {Aalborg University\ + \ Copenhagen},\n title = {An Intelligent Drum Machine for Electronic Dance Music\ + \ Production and Performance},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0047.pdf},\n\ + \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176308 + doi: 10.5281/zenodo.1176238 issn: 2220-4806 - pages: 442--445 + pages: 251--256 publisher: Aalborg University Copenhagen - title: Exploring the Myo controller for sonic microinteraction - url: http://www.nime.org/proceedings/2017/nime2017_paper0083.pdf + title: An Intelligent Drum Machine for Electronic Dance Music Production and Performance + url: http://www.nime.org/proceedings/2017/nime2017_paper0047.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: jtilbian2017 - abstract: 'Stride is a language tailored for designing new digital musical instruments - and interfaces. Stride enables designers to fine tune the sound and the interactivity - of the instruments they wish to create. Stride code provides a high-level description - of processes in a platform agnostic manner. The syntax used to define these processes - can also be used to define low-level signal processing algorithms. Unlike other - domain-specific languages for sound synthesis and audio processing, Stride can - generate optimized code that can run on any supported hardware platform. The generated - code can be compiled to run on a full featured operating system or bare metal - on embedded devices. Stride goes further and enables a designer to consolidate - various supported hardware and software platforms, define the communication between - them, and target them as a single heterogeneous system.' + ID: mjensen2017 + abstract: 'Neuroimaging is a powerful tool to explore how and why humans engage + in music. Magnetic resonance imaging (MRI) has allowed us to identify brain networks + and regions implicated in a range of cognitive tasks including music perception + and performance. However, MRI-scanners are noisy and cramped, presenting a challenging + environment for playing an instrument. Here, we present an MRI-compatible polyphonic + keyboard with a materials cost of 850 USD, designed and tested for safe use in + 3T (three Tesla) MRI-scanners. We describe design considerations, and prior work + in the field. In addition, we provide recommendations for future designs and comment + on the possibility of using the keyboard in magnetoencephalography (MEG) systems. + Preliminary results indicate a comfortable playing experience with no disturbance + of the imaging process.' address: 'Copenhagen, Denmark' - author: Joseph Tilbian and Andres Cabrera - bibtex: "@inproceedings{jtilbian2017,\n abstract = {Stride is a language tailored\ - \ for designing new digital musical instruments and interfaces. Stride enables\ - \ designers to fine tune the sound and the interactivity of the instruments they\ - \ wish to create. Stride code provides a high-level description of processes in\ - \ a platform agnostic manner. The syntax used to define these processes can also\ - \ be used to define low-level signal processing algorithms. Unlike other domain-specific\ - \ languages for sound synthesis and audio processing, Stride can generate optimized\ - \ code that can run on any supported hardware platform. The generated code can\ - \ be compiled to run on a full featured operating system or bare metal on embedded\ - \ devices. Stride goes further and enables a designer to consolidate various supported\ - \ hardware and software platforms, define the communication between them, and\ - \ target them as a single heterogeneous system.},\n address = {Copenhagen, Denmark},\n\ - \ author = {Joseph Tilbian and Andres Cabrera},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1176310},\n issn = {2220-4806},\n pages = {446--449},\n publisher\ - \ = {Aalborg University Copenhagen},\n title = {Stride for Interactive Musical\ - \ Instrument Design},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0084.pdf},\n\ + author: Martin Snejbjerg Jensen and Ole Adrian Heggli and Patricia Alves Da Mota + and Peter Vuust + bibtex: "@inproceedings{mjensen2017,\n abstract = {Neuroimaging is a powerful tool\ + \ to explore how and why humans engage in music. Magnetic resonance imaging (MRI)\ + \ has allowed us to identify brain networks and regions implicated in a range\ + \ of cognitive tasks including music perception and performance. However, MRI-scanners\ + \ are noisy and cramped, presenting a challenging environment for playing an instrument.\ + \ Here, we present an MRI-compatible polyphonic keyboard with a materials cost\ + \ of 850 USD, designed and tested for safe use in 3T (three Tesla) MRI-scanners.\ + \ We describe design considerations, and prior work in the field. In addition,\ + \ we provide recommendations for future designs and comment on the possibility\ + \ of using the keyboard in magnetoencephalography (MEG) systems. Preliminary results\ + \ indicate a comfortable playing experience with no disturbance of the imaging\ + \ process.},\n address = {Copenhagen, Denmark},\n author = {Martin Snejbjerg Jensen\ + \ and Ole Adrian Heggli and Patricia Alves Da Mota and Peter Vuust},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1176240},\n issn = {2220-4806},\n pages\ + \ = {257--260},\n publisher = {Aalborg University Copenhagen},\n title = {A low-cost\ + \ MRI compatible keyboard},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0048.pdf},\n\ \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176310 + doi: 10.5281/zenodo.1176240 issn: 2220-4806 - pages: 446--449 + pages: 257--260 publisher: Aalborg University Copenhagen - title: Stride for Interactive Musical Instrument Design - url: http://www.nime.org/proceedings/2017/nime2017_paper0084.pdf + title: A low-cost MRI compatible keyboard + url: http://www.nime.org/proceedings/2017/nime2017_paper0048.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: jfernandez2017 - abstract: 'We present here GeKiPe, a gestural interface for musical expression, - combining images and sounds, generated and controlled in real time by a performer. - GeKiPe is developed as part of a creation project, exploring the control of virtual - instruments through the analysis of gestures specific to instrumentalists, and - to percussionists in particular. GeKiPe was used for the creation of a collaborative - stage performance (Sculpt), in which the musician and their movements are captured - by different methods (infrared Kinect cameras and gesture-sensors on controller - gloves). The use of GeKiPe as an alternate sound and image controller allowed - us to combine body movement, musical gestures and audiovisual expressions to create - challenging collaborative performances.' - address: 'Copenhagen, Denmark' - author: José Miguel Fernandez and Thomas Köppel and Nina Verstraete and Grégoire - Lorieux and Alexander Vert and Philippe Spiesser - bibtex: "@inproceedings{jfernandez2017,\n abstract = {We present here GeKiPe, a\ - \ gestural interface for musical expression, combining images and sounds, generated\ - \ and controlled in real time by a performer. GeKiPe is developed as part of a\ - \ creation project, exploring the control of virtual instruments through the analysis\ - \ of gestures specific to instrumentalists, and to percussionists in particular.\ - \ GeKiPe was used for the creation of a collaborative stage performance (Sculpt),\ - \ in which the musician and their movements are captured by different methods\ - \ (infrared Kinect cameras and gesture-sensors on controller gloves). The use\ - \ of GeKiPe as an alternate sound and image controller allowed us to combine body\ - \ movement, musical gestures and audiovisual expressions to create challenging\ - \ collaborative performances.},\n address = {Copenhagen, Denmark},\n author =\ - \ {José Miguel Fernandez and Thomas Köppel and Nina Verstraete and Grégoire Lorieux\ - \ and Alexander Vert and Philippe Spiesser},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1176312},\n issn = {2220-4806},\n pages = {450--455},\n publisher\ - \ = {Aalborg University Copenhagen},\n title = {GeKiPe, a gesture-based interface\ - \ for audiovisual performance},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0085.pdf},\n\ - \ year = {2017}\n}\n" + ID: slee2017 + abstract: 'Music listening has changed greatly with the emergence of music streaming + services, such as Spotify or YouTube. In this paper, we discuss an artistic practice + that organizes streaming videos to perform a real-time improvisation via live + coding. A live coder uses any available video from YouTube, a video streaming + service, as source material to perform an improvised audiovisual piece. The challenge + is to manipulate the emerging media that are streamed from a networked service. + The musical gesture can be limited due to the provided functionalities of the + YouTube API. However, the potential sonic and visual space that a musician can + explore is practically infinite. The practice embraces the juxtaposition of manipulating + emerging media in old-fashioned ways similar to experimental musicians in the + 60''s physically manipulating tape loops or scratching vinyl records on a phonograph + while exploring the possibility of doing so by drawing on the gigantic repository + of all kinds of videos. In this paper, we discuss the challenges of using streaming + videos from the platform as musical materials in computer music and introduce + a live coding environment that we developed for real-time improvisation. ' + address: 'Copenhagen, Denmark' + author: Sang Won Lee and Jungho Bang and Georg Essl + bibtex: "@inproceedings{slee2017,\n abstract = {Music listening has changed greatly\ + \ with the emergence of music streaming services, such as Spotify or YouTube.\ + \ In this paper, we discuss an artistic practice that organizes streaming videos\ + \ to perform a real-time improvisation via live coding. A live coder uses any\ + \ available video from YouTube, a video streaming service, as source material\ + \ to perform an improvised audiovisual piece. The challenge is to manipulate the\ + \ emerging media that are streamed from a networked service. The musical gesture\ + \ can be limited due to the provided functionalities of the YouTube API. However,\ + \ the potential sonic and visual space that a musician can explore is practically\ + \ infinite. The practice embraces the juxtaposition of manipulating emerging media\ + \ in old-fashioned ways similar to experimental musicians in the 60's physically\ + \ manipulating tape loops or scratching vinyl records on a phonograph while exploring\ + \ the possibility of doing so by drawing on the gigantic repository of all kinds\ + \ of videos. In this paper, we discuss the challenges of using streaming videos\ + \ from the platform as musical materials in computer music and introduce a live\ + \ coding environment that we developed for real-time improvisation. },\n address\ + \ = {Copenhagen, Denmark},\n author = {Sang Won Lee and Jungho Bang and Georg\ + \ Essl},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1176242},\n issn = {2220-4806},\n\ + \ pages = {261--266},\n publisher = {Aalborg University Copenhagen},\n title =\ + \ {Live Coding YouTube: Organizing Streaming Media for an Audiovisual Performance},\n\ + \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0049.pdf},\n year\ + \ = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176312 + doi: 10.5281/zenodo.1176242 issn: 2220-4806 - pages: 450--455 + pages: 261--266 publisher: Aalborg University Copenhagen - title: 'GeKiPe, a gesture-based interface for audiovisual performance' - url: http://www.nime.org/proceedings/2017/nime2017_paper0085.pdf + title: 'Live Coding YouTube: Organizing Streaming Media for an Audiovisual Performance' + url: http://www.nime.org/proceedings/2017/nime2017_paper0049.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: jlarsen2017a - abstract: 'Many musical instruments exhibit an inherent latency or delayed auditory - feedback (DAF) between actuator activation and the occurrence of sound. We investigated - how DAF (73ms and 250ms) affects musically trained (MT) and non-musically trained - (NMT) people''s ability to synchronize the audible strum of an actuated guitar - to a metronome at 60bpm and 120bpm. The long DAF matched a subdivision of the - overall tempo. We compared their performance using two different input devices - with feedback before or on activation. While 250ms DAF hardly affected musically - trained participants, non-musically trained participants'' performance declined - substantially both in mean synchronization error and its spread. Neither tempo - nor input devices affected performance.' + ID: skiratli2017 + abstract: 'In this paper we present HIVE, a parametrically designed interactive + sound sculpture with embedded multi-channel digital audio which explores the intersection + of sculptural form and musical instrument design. We examine sculpture as an integral + part of music composition and performance, expanding the definition of musical + instrument to include the gestalt of loudspeakers, architectural spaces, and material + form. After examining some related works, we frame HIVE as an interactive sculpture + for musical expression. We then describe our design and production process, which + hinges on the relationship between sound, space, and sculptural form. Finally, + we discuss the installation and its implications.' address: 'Copenhagen, Denmark' - author: Jeppe Larsen and Hendrik Knoche - bibtex: "@inproceedings{jlarsen2017a,\n abstract = {Many musical instruments exhibit\ - \ an inherent latency or delayed auditory feedback (DAF) between actuator activation\ - \ and the occurrence of sound. We investigated how DAF (73ms and 250ms) affects\ - \ musically trained (MT) and non-musically trained (NMT) people's ability to synchronize\ - \ the audible strum of an actuated guitar to a metronome at 60bpm and 120bpm.\ - \ The long DAF matched a subdivision of the overall tempo. We compared their performance\ - \ using two different input devices with feedback before or on activation. While\ - \ 250ms DAF hardly affected musically trained participants, non-musically trained\ - \ participants' performance declined substantially both in mean synchronization\ - \ error and its spread. Neither tempo nor input devices affected performance.},\n\ - \ address = {Copenhagen, Denmark},\n author = {Jeppe Larsen and Hendrik Knoche},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176314},\n issn = {2220-4806},\n\ - \ pages = {456--459},\n publisher = {Aalborg University Copenhagen},\n title =\ - \ {Hear you later alligator: How delayed auditory feedback affects non-musically\ - \ trained people's strumming},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0086.pdf},\n\ + author: Solen Kiratli and Akshay Cadambi and Yon Visell + bibtex: "@inproceedings{skiratli2017,\n abstract = {In this paper we present HIVE,\ + \ a parametrically designed interactive sound sculpture with embedded multi-channel\ + \ digital audio which explores the intersection of sculptural form and musical\ + \ instrument design. We examine sculpture as an integral part of music composition\ + \ and performance, expanding the definition of musical instrument to include the\ + \ gestalt of loudspeakers, architectural spaces, and material form. After examining\ + \ some related works, we frame HIVE as an interactive sculpture for musical expression.\ + \ We then describe our design and production process, which hinges on the relationship\ + \ between sound, space, and sculptural form. Finally, we discuss the installation\ + \ and its implications.},\n address = {Copenhagen, Denmark},\n author = {Solen\ + \ Kiratli and Akshay Cadambi and Yon Visell},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1176244},\n issn = {2220-4806},\n pages = {267--270},\n publisher\ + \ = {Aalborg University Copenhagen},\n title = {HIVE: An Interactive Sculpture\ + \ for Musical Expression},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0050.pdf},\n\ \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176314 + doi: 10.5281/zenodo.1176244 issn: 2220-4806 - pages: 456--459 + pages: 267--270 publisher: Aalborg University Copenhagen - title: 'Hear you later alligator: How delayed auditory feedback affects non-musically - trained people''s strumming' - url: http://www.nime.org/proceedings/2017/nime2017_paper0086.pdf + title: 'HIVE: An Interactive Sculpture for Musical Expression' + url: http://www.nime.org/proceedings/2017/nime2017_paper0050.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: mmulshine2017 - abstract: 'This paper introduces an audio synthesis library written in C with "object - oriented" programming principles in mind. We call it OOPS: Object-Oriented Programming - Sound, or, "Oops, it''s not quite Object-Oriented Programming in C". The library - consists of several UGens (audio components) and a framework to manage these components. - The design emphases of the library are efficiency and organizational simplicity, - with particular attention to the needs of embedded systems audio development. ' + ID: mblessing2017 + abstract: 'The JoyStyx Quartet is a series of four embedded acoustic instruments. + Each of these instruments is a five-voice granular synthesizer which processes + a different sound source to give each a unique timbre and range. The performer + interacts with these voices individually with five joysticks positioned to lay + under the performer''s fingertips. The JoyStyx uses a custom-designed printed + circuit board. This board provides the joystick layout and connects them to an + Arduino Micro, which serializes the ten analog X/Y position values and the five + digital button presses. This data controls the granular and spatial parameters + of a Pure Data patch running on a Raspberry Pi 2. The nature of the JoyStyx construction + causes the frequency response to be coloured by the materials and their geometry, + leading to a unique timbre. This endows the instrument with a more ``analog'''' + or ``natural'''' sound, despite relying on computer-based algorithms. In concert, + the quartet performance with the JoyStyx may potentially be the first performance + ever with a quartet of Embedded Acoustic Instruments.' address: 'Copenhagen, Denmark' - author: Michael Mulshine and Jeff Snyder - bibtex: "@inproceedings{mmulshine2017,\n abstract = {This paper introduces an audio\ - \ synthesis library written in C with \"object oriented\" programming principles\ - \ in mind. We call it OOPS: Object-Oriented Programming Sound, or, \"Oops, it's\ - \ not quite Object-Oriented Programming in C\". The library consists of several\ - \ UGens (audio components) and a framework to manage these components. The design\ - \ emphases of the library are efficiency and organizational simplicity, with particular\ - \ attention to the needs of embedded systems audio development. },\n address\ - \ = {Copenhagen, Denmark},\n author = {Michael Mulshine and Jeff Snyder},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176316},\n issn = {2220-4806},\n pages\ - \ = {460--463},\n publisher = {Aalborg University Copenhagen},\n title = {OOPS:\ - \ An Audio Synthesis Library in C for Embedded (and Other) Applications},\n url\ - \ = {http://www.nime.org/proceedings/2017/nime2017_paper0087.pdf},\n year = {2017}\n\ - }\n" + author: Matthew Blessing and Edgar Berdahl + bibtex: "@inproceedings{mblessing2017,\n abstract = {The JoyStyx Quartet is a series\ + \ of four embedded acoustic instruments. Each of these instruments is a five-voice\ + \ granular synthesizer which processes a different sound source to give each a\ + \ unique timbre and range. The performer interacts with these voices individually\ + \ with five joysticks positioned to lay under the performer's fingertips. The\ + \ JoyStyx uses a custom-designed printed circuit board. This board provides the\ + \ joystick layout and connects them to an Arduino Micro, which serializes the\ + \ ten analog X/Y position values and the five digital button presses. This data\ + \ controls the granular and spatial parameters of a Pure Data patch running on\ + \ a Raspberry Pi 2. The nature of the JoyStyx construction causes the frequency\ + \ response to be coloured by the materials and their geometry, leading to a unique\ + \ timbre. This endows the instrument with a more ``analog'' or ``natural'' sound,\ + \ despite relying on computer-based algorithms. In concert, the quartet performance\ + \ with the JoyStyx may potentially be the first performance ever with a quartet\ + \ of Embedded Acoustic Instruments.},\n address = {Copenhagen, Denmark},\n author\ + \ = {Matthew Blessing and Edgar Berdahl},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176246},\n\ + \ issn = {2220-4806},\n pages = {271--274},\n publisher = {Aalborg University\ + \ Copenhagen},\n title = {The JoyStyx: A Quartet of Embedded Acoustic Instruments},\n\ + \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0051.pdf},\n year\ + \ = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176316 + doi: 10.5281/zenodo.1176246 issn: 2220-4806 - pages: 460--463 + pages: 271--274 publisher: Aalborg University Copenhagen - title: 'OOPS: An Audio Synthesis Library in C for Embedded (and Other) Applications' - url: http://www.nime.org/proceedings/2017/nime2017_paper0087.pdf + title: 'The JoyStyx: A Quartet of Embedded Acoustic Instruments' + url: http://www.nime.org/proceedings/2017/nime2017_paper0051.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: mkallionpaa2017 - abstract: "“Climb!” is a musical composition that combines the ideas\ - \ of a classical virtuoso piece and a computer game. We present a case study of\ - \ the composition process and realization of “Climb!”, written for\ - \ Disklavier and a digital interactive engine, which was co-developed together\ - \ with the musical score. Specifically, the engine combines a system for recognising\ - \ and responding to musical trigger phrases along with a dynamic digital score\ - \ renderer. This tool chain allows for the composer's original scoring to include\ - \ notational elements such as trigger phrases to be automatically extracted to\ - \ auto-configure the engine for live performance. We reflect holistically on the\ - \ development process to date and highlight the emerging challenges and opportunities.\ - \ For example, this includes the potential for further developing the workflow\ - \ around the scoring process and the ways in which support for musical triggers\ - \ has shaped the compositional approach." + ID: gwakefield2017 + abstract: 'The growth of the live coding community has been coupled with a rich + development of experimentation in new domain-specific languages, sometimes idiosyncratic + to the interests of their performers. Nevertheless, programming language design + may seem foreboding to many, steeped in computer science that is distant from + the expertise of music performance. To broaden access to designing unique languages-as-instruments + we developed an online programming environment that offers liveness in the process + of language design as well as performance. The editor utilizes the Parsing Expression + Grammar formalism for language design, and a virtual machine featuring collaborative + multitasking for execution, in order to support a diversity of language concepts + and affordances. The editor is coupled with online tutorial documentation aimed + at the computer music community, with live examples embedded. This paper documents + the design and use of the editor and its underlying virtual machine.' address: 'Copenhagen, Denmark' - author: Maria Kallionpää and Chris Greenhalgh and Adrian Hazzard and David M. Weigl - and Kevin R. Page and Steve Benford - bibtex: "@inproceedings{mkallionpaa2017,\n abstract = {“Climb!” is a\ - \ musical composition that combines the ideas of a classical virtuoso piece and\ - \ a computer game. We present a case study of the composition process and realization\ - \ of “Climb!”, written for Disklavier and a digital interactive engine,\ - \ which was co-developed together with the musical score. Specifically, the engine\ - \ combines a system for recognising and responding to musical trigger phrases\ - \ along with a dynamic digital score renderer. This tool chain allows for the\ - \ composer's original scoring to include notational elements such as trigger phrases\ - \ to be automatically extracted to auto-configure the engine for live performance.\ - \ We reflect holistically on the development process to date and highlight the\ - \ emerging challenges and opportunities. For example, this includes the potential\ - \ for further developing the workflow around the scoring process and the ways\ - \ in which support for musical triggers has shaped the compositional approach.},\n\ - \ address = {Copenhagen, Denmark},\n author = {Maria Kallionpää and Chris Greenhalgh\ - \ and Adrian Hazzard and David M. Weigl and Kevin R. Page and Steve Benford},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176318},\n issn = {2220-4806},\n\ - \ pages = {464--469},\n publisher = {Aalborg University Copenhagen},\n title =\ - \ {Composing and Realising a Game-like Performance for Disklavier and Electronics},\n\ - \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0088.pdf},\n year\ + author: Graham Wakefield and Charles Roberts + bibtex: "@inproceedings{gwakefield2017,\n abstract = {The growth of the live coding\ + \ community has been coupled with a rich development of experimentation in new\ + \ domain-specific languages, sometimes idiosyncratic to the interests of their\ + \ performers. Nevertheless, programming language design may seem foreboding to\ + \ many, steeped in computer science that is distant from the expertise of music\ + \ performance. To broaden access to designing unique languages-as-instruments\ + \ we developed an online programming environment that offers liveness in the process\ + \ of language design as well as performance. The editor utilizes the Parsing\ + \ Expression Grammar formalism for language design, and a virtual machine featuring\ + \ collaborative multitasking for execution, in order to support a diversity of\ + \ language concepts and affordances. The editor is coupled with online tutorial\ + \ documentation aimed at the computer music community, with live examples embedded.\ + \ This paper documents the design and use of the editor and its underlying virtual\ + \ machine.},\n address = {Copenhagen, Denmark},\n author = {Graham Wakefield and\ + \ Charles Roberts},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176248},\n\ + \ issn = {2220-4806},\n pages = {275--278},\n publisher = {Aalborg University\ + \ Copenhagen},\n title = {A Virtual Machine for Live Coding Language Design},\n\ + \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0052.pdf},\n year\ \ = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176318 + doi: 10.5281/zenodo.1176248 issn: 2220-4806 - pages: 464--469 + pages: 275--278 publisher: Aalborg University Copenhagen - title: Composing and Realising a Game-like Performance for Disklavier and Electronics - url: http://www.nime.org/proceedings/2017/nime2017_paper0088.pdf + title: A Virtual Machine for Live Coding Language Design + url: http://www.nime.org/proceedings/2017/nime2017_paper0052.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: tmagnusson2017 - abstract: 'New digital musical instruments are difficult for organologists to deal - with, due to their heterogeneous origins, interdisciplinary science, and fluid, - open-ended nature. NIMEs are studied from a range of disciplines, such as musicology, - engineering, human-computer interaction, psychology, design, and performance studies. - Attempts to continue traditional organology classifications for electronic and - digital instruments have been made, but with unsatisfactory results. This paper - raises the problem of tree-like classifications of digital instruments, proposing - an alternative approach: musical organics . Musical organics is a philosophical - attempt to tackle the problems inherent in the organological classification of - digital instruments. Shifting the emphasis from hand-coded classification to information - retrieval supported search and clustering, an open and distributed system that - anyone can contribute to is proposed. In order to show how such a system could - incorporate third-party additions, the paper also presents an organological ontogenesis - of three innovative musical instruments: the saxophone, the Minimoog, and the - Reactable. This micro-analysis of innovation in the field of musical instruments - can help forming a framework for the study of how instruments are adopted in musical - culture.' + ID: tdavis2017 + abstract: "There have been many NIME papers over the years on augmented or actuated\ + \ instruments [2][10][19][22]. Many of these papers have focused on the technical\ + \ description of how these instruments have been produced, or as in the case of\ + \ Machover's #8216;Hyperinstruments' [19], on producing instruments over which\ + \ performers have ‘absolute control' and emphasise ‘learnability.\ + \ perfectibility and repeatability' [19]. In contrast to this approach, this paper\ + \ outlines a philosophical position concerning the relationship between instruments\ + \ and performers in improvisational contexts that recognises the agency of the\ + \ instrument within the performance process. It builds on a post-phenomenological\ + \ understanding of the human/instrument relationship in which the human and the\ + \ instrument are understood as co-defining entities without fixed boundaries;\ + \ an approach that actively challenges notions of instrumental mastery and ‘absolute\ + \ control'. This paper then takes a practice-based approach to outline how such\ + \ philosophical concerns have fed into the design of an augmented, actuated cello\ + \ system, The Feral Cello, that has been designed to explicitly explore these\ + \ concerns through practice. " address: 'Copenhagen, Denmark' - author: Thor Magnusson - bibtex: "@inproceedings{tmagnusson2017,\n abstract = {New digital musical instruments\ - \ are difficult for organologists to deal with, due to their heterogeneous origins,\ - \ interdisciplinary science, and fluid, open-ended nature. NIMEs are studied from\ - \ a range of disciplines, such as musicology, engineering, human-computer interaction,\ - \ psychology, design, and performance studies. Attempts to continue traditional\ - \ organology classifications for electronic and digital instruments have been\ - \ made, but with unsatisfactory results. This paper raises the problem of tree-like\ - \ classifications of digital instruments, proposing an alternative approach: musical\ - \ organics . Musical organics is a philosophical attempt to tackle the problems\ - \ inherent in the organological classification of digital instruments. Shifting\ - \ the emphasis from hand-coded classification to information retrieval supported\ - \ search and clustering, an open and distributed system that anyone can contribute\ - \ to is proposed. In order to show how such a system could incorporate third-party\ - \ additions, the paper also presents an organological ontogenesis of three innovative\ - \ musical instruments: the saxophone, the Minimoog, and the Reactable. This micro-analysis\ - \ of innovation in the field of musical instruments can help forming a framework\ - \ for the study of how instruments are adopted in musical culture.},\n address\ - \ = {Copenhagen, Denmark},\n author = {Thor Magnusson},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176320},\n issn = {2220-4806},\n pages = {470--475},\n\ - \ publisher = {Aalborg University Copenhagen},\n title = {Contextualizing Musical\ - \ Organics: An Ad-hoc Organological Classification Approach},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0089.pdf},\n\ + author: Tom Davis + bibtex: "@inproceedings{tdavis2017,\n abstract = {There have been many NIME papers\ + \ over the years on augmented or actuated instruments [2][10][19][22]. Many of\ + \ these papers have focused on the technical description of how these instruments\ + \ have been produced, or as in the case of Machover's #8216;Hyperinstruments'\ + \ [19], on producing instruments over which performers have ‘absolute control'\ + \ and emphasise ‘learnability. perfectibility and repeatability' [19]. In\ + \ contrast to this approach, this paper outlines a philosophical position concerning\ + \ the relationship between instruments and performers in improvisational contexts\ + \ that recognises the agency of the instrument within the performance process.\ + \ It builds on a post-phenomenological understanding of the human/instrument relationship\ + \ in which the human and the instrument are understood as co-defining entities\ + \ without fixed boundaries; an approach that actively challenges notions of instrumental\ + \ mastery and ‘absolute control'. This paper then takes a practice-based\ + \ approach to outline how such philosophical concerns have fed into the design\ + \ of an augmented, actuated cello system, The Feral Cello, that has been designed\ + \ to explicitly explore these concerns through practice. },\n address = {Copenhagen,\ + \ Denmark},\n author = {Tom Davis},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176250},\n\ + \ issn = {2220-4806},\n pages = {279--282},\n publisher = {Aalborg University\ + \ Copenhagen},\n title = {The Feral Cello: A Philosophically Informed Approach\ + \ to an Actuated Instrument},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0053.pdf},\n\ \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176320 + doi: 10.5281/zenodo.1176250 issn: 2220-4806 - pages: 470--475 + pages: 279--282 publisher: Aalborg University Copenhagen - title: 'Contextualizing Musical Organics: An Ad-hoc Organological Classification - Approach' - url: http://www.nime.org/proceedings/2017/nime2017_paper0089.pdf + title: 'The Feral Cello: A Philosophically Informed Approach to an Actuated Instrument' + url: http://www.nime.org/proceedings/2017/nime2017_paper0053.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: sfasciani2017 - abstract: 'We propose an approach to insert physical objects in audio digital signal - processing chains, filtering the sound with the acoustic impulse response of any - solid measured in real-time. We model physical objects as a linear time-invariant - system, which is used as an audio filter. By interacting with the object or with - the measuring hardware we can dynamically modify the characteristics of the filter. - The impulse response is obtained correlating a noise signal injected in the object - through an acoustic actuator with the signal received from an acoustic sensor - placed on the object. We also present an efficient multichannel implementation - of the system, which enables further creative applications beyond audio filtering, - including tangible signal patching and sound spatialization.' + ID: fbernardo2017 + abstract: 'This paper describes an exploratory study of the potential for musical + interaction of Soli, a new radar-based sensing technology developed by Google''s + Advanced Technology and Projects Group (ATAP). We report on our hand-on experience + and outcomes within the Soli Alpha Developers program. We present early experiments + demonstrating the use of Soli for creativity in musical contexts. We discuss the + tools, workflow, the affordances of the prototypes for music making, and the potential + for design of future NIME projects that may integrate Soli.' address: 'Copenhagen, Denmark' - author: Stefano Fasciani - bibtex: "@inproceedings{sfasciani2017,\n abstract = {We propose an approach to insert\ - \ physical objects in audio digital signal processing chains, filtering the sound\ - \ with the acoustic impulse response of any solid measured in real-time. We model\ - \ physical objects as a linear time-invariant system, which is used as an audio\ - \ filter. By interacting with the object or with the measuring hardware we can\ - \ dynamically modify the characteristics of the filter. The impulse response is\ - \ obtained correlating a noise signal injected in the object through an acoustic\ - \ actuator with the signal received from an acoustic sensor placed on the object.\ - \ We also present an efficient multichannel implementation of the system, which\ - \ enables further creative applications beyond audio filtering, including tangible\ - \ signal patching and sound spatialization.},\n address = {Copenhagen, Denmark},\n\ - \ author = {Stefano Fasciani},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176322},\n\ - \ issn = {2220-4806},\n pages = {476--480},\n publisher = {Aalborg University\ - \ Copenhagen},\n title = {Physical Audio Digital Filters},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0090.pdf},\n\ + author: Francisco Bernardo and Nicholas Arner and Paul Batchelor + bibtex: "@inproceedings{fbernardo2017,\n abstract = {This paper describes an exploratory\ + \ study of the potential for musical interaction of Soli, a new radar-based sensing\ + \ technology developed by Google's Advanced Technology and Projects Group (ATAP).\ + \ We report on our hand-on experience and outcomes within the Soli Alpha Developers\ + \ program. We present early experiments demonstrating the use of Soli for creativity\ + \ in musical contexts. We discuss the tools, workflow, the affordances of the\ + \ prototypes for music making, and the potential for design of future NIME projects\ + \ that may integrate Soli.},\n address = {Copenhagen, Denmark},\n author = {Francisco\ + \ Bernardo and Nicholas Arner and Paul Batchelor},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176252},\n issn = {2220-4806},\n pages = {283--286},\n\ + \ publisher = {Aalborg University Copenhagen},\n title = {O Soli Mio: Exploring\ + \ Millimeter Wave Radar for Musical Interaction},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0054.pdf},\n\ \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176322 + doi: 10.5281/zenodo.1176252 issn: 2220-4806 - pages: 476--480 + pages: 283--286 publisher: Aalborg University Copenhagen - title: Physical Audio Digital Filters - url: http://www.nime.org/proceedings/2017/nime2017_paper0090.pdf + title: 'O Soli Mio: Exploring Millimeter Wave Radar for Musical Interaction' + url: http://www.nime.org/proceedings/2017/nime2017_paper0054.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: btaylor2017 - abstract: 'Distributed music as a performance practice has seen significant growth - over the past decade. This paper surveys the development of the genre, documenting - important precedents, peripheral influences, and core works. We additionally discuss - common modes of implementation in the genre and contrast these approaches and - their motivations.' + ID: clevican2017 + abstract: 'Brain computer interfaces are being widely adopted for music creation + and interpretation, and they are becoming a truly new category of musical instruments. + Indeed, Miranda has coined the term Brain-computer Musical Interface (BCMI) to + refer to this category. There are no "plug-n-play" solutions for a BCMI, these + kinds of tools usually require the setup and implementation of particular software + configurations, customized for each EEG device. The Emotiv Insight is a low-cost + EEG apparatus that outputs several kinds of data, such as EEG rhythms or facial + expressions, from the user''s brain activity. We have developed a BCMI, in the + form of a freely available middle-ware, using the Emotiv Insight for EEG input + and signal processing. The obtained data, via blue-tooth is broad-casted over + the network formatted for the OSC protocol. Using this software, we tested the + device''s adequacy as a BCMI by using the provided data in order to control different + sound synthesis algorithms in MaxMSP. We conclude that the Emotiv Insight is an + interesting choice for a BCMI due to its low-cost and ease of use, but we also + question its reliability and robustness.' address: 'Copenhagen, Denmark' - author: Benjamin Taylor - bibtex: "@inproceedings{btaylor2017,\n abstract = {Distributed music as a performance\ - \ practice has seen significant growth over the past decade. This paper surveys\ - \ the development of the genre, documenting important precedents, peripheral influences,\ - \ and core works. We additionally discuss common modes of implementation in the\ - \ genre and contrast these approaches and their motivations.},\n address = {Copenhagen,\ - \ Denmark},\n author = {Benjamin Taylor},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176324},\n\ - \ issn = {2220-4806},\n pages = {481--486},\n publisher = {Aalborg University\ - \ Copenhagen},\n title = {A History of the Audience as a Speaker Array},\n url\ - \ = {http://www.nime.org/proceedings/2017/nime2017_paper0091.pdf},\n year = {2017}\n\ - }\n" + author: Constanza Levican and Andres Aparicio and Vernon Belaunde and Rodrigo Cadiz + bibtex: "@inproceedings{clevican2017,\n abstract = {Brain computer interfaces are\ + \ being widely adopted for music creation and interpretation, and they are becoming\ + \ a truly new category of musical instruments. Indeed, Miranda has coined the\ + \ term Brain-computer Musical Interface (BCMI) to refer to this category. There\ + \ are no \"plug-n-play\" solutions for a BCMI, these kinds of tools usually require\ + \ the setup and implementation of particular software configurations, customized\ + \ for each EEG device. The Emotiv Insight is a low-cost EEG apparatus that outputs\ + \ several kinds of data, such as EEG rhythms or facial expressions, from the user's\ + \ brain activity. We have developed a BCMI, in the form of a freely available\ + \ middle-ware, using the Emotiv Insight for EEG input and signal processing. The\ + \ obtained data, via blue-tooth is broad-casted over the network formatted for\ + \ the OSC protocol. Using this software, we tested the device's adequacy as a\ + \ BCMI by using the provided data in order to control different sound synthesis\ + \ algorithms in MaxMSP. We conclude that the Emotiv Insight is an interesting\ + \ choice for a BCMI due to its low-cost and ease of use, but we also question\ + \ its reliability and robustness.},\n address = {Copenhagen, Denmark},\n author\ + \ = {Constanza Levican and Andres Aparicio and Vernon Belaunde and Rodrigo Cadiz},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1176254},\n issn = {2220-4806},\n\ + \ pages = {287--290},\n publisher = {Aalborg University Copenhagen},\n title =\ + \ {Insight2OSC: using the brain and the body as a musical instrument with the\ + \ Emotiv Insight},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0055.pdf},\n\ + \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176324 + doi: 10.5281/zenodo.1176254 issn: 2220-4806 - pages: 481--486 + pages: 287--290 publisher: Aalborg University Copenhagen - title: A History of the Audience as a Speaker Array - url: http://www.nime.org/proceedings/2017/nime2017_paper0091.pdf + title: 'Insight2OSC: using the brain and the body as a musical instrument with the + Emotiv Insight' + url: http://www.nime.org/proceedings/2017/nime2017_paper0055.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: togata2017 - abstract: 'This paper is about a novel robotic guitar that establishes shared control - between human performers and mechanical actuators. Unlike other mechatronic guitar - instruments that perform pre-programmed music automatically, this guitar allows - the human and actuators to produce sounds jointly; there exists a distributed - control between the human and robotic components. The interaction allows human - performers to have full control over the melodic, harmonic, and expressive elements - of the instrument while mechanical actuators excite and dampen the string with - a rhythmic pattern. Guitarists can still access the fretboard without the physical - interference of a mechatronic system, so they can play melodies and chords as - well as perform bends, slides, vibrato, and other expressive techniques. Leveraging - the capabilities of mechanical actuators, the mechanized hammers can output complex - rhythms and speeds not attainable by humans. Furthermore, the rhythmic patterns - can be algorithmically or stochastically generated by the hammer, which supports - real-time interactive improvising.' - address: 'Copenhagen, Denmark' - author: Takumi Ogata and Gil Weinberg - bibtex: "@inproceedings{togata2017,\n abstract = {This paper is about a novel robotic\ - \ guitar that establishes shared control between human performers and mechanical\ - \ actuators. Unlike other mechatronic guitar instruments that perform pre-programmed\ - \ music automatically, this guitar allows the human and actuators to produce sounds\ - \ jointly; there exists a distributed control between the human and robotic components.\ - \ The interaction allows human performers to have full control over the melodic,\ - \ harmonic, and expressive elements of the instrument while mechanical actuators\ - \ excite and dampen the string with a rhythmic pattern. Guitarists can still\ - \ access the fretboard without the physical interference of a mechatronic system,\ - \ so they can play melodies and chords as well as perform bends, slides, vibrato,\ - \ and other expressive techniques. Leveraging the capabilities of mechanical actuators,\ - \ the mechanized hammers can output complex rhythms and speeds not attainable\ - \ by humans. Furthermore, the rhythmic patterns can be algorithmically or stochastically\ - \ generated by the hammer, which supports real-time interactive improvising.},\n\ - \ address = {Copenhagen, Denmark},\n author = {Takumi Ogata and Gil Weinberg},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176326},\n issn = {2220-4806},\n\ - \ pages = {487--488},\n publisher = {Aalborg University Copenhagen},\n title =\ - \ {Robotically Augmented Electric Guitar for Shared Control},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0092.pdf},\n\ + ID: bsmith2017 + abstract: 'Interactive real-time spatialization of audio over large immersive speaker + arrays poses significant interface and control challenges for live performers. + Fluidly moving and mixing numerous sound objects over unique speaker configurations + requires specifically designed software interfaces and systems. Currently available + software solutions either impose configuration limitations, require extreme degrees + of expertise, or extensive configuration time to use. A new system design, focusing + on simplicity, ease of use, and live interactive spatialization is described. + Automation of array calibration and tuning is included to facilitate rapid deployment + and configuration. Comparisons with other solutions show favorability in terms + of complexity, depth of control, and required features. ' + address: 'Copenhagen, Denmark' + author: Benjamin Smith and Neal Anderson + bibtex: "@inproceedings{bsmith2017,\n abstract = {Interactive real-time spatialization\ + \ of audio over large immersive speaker arrays poses significant interface and\ + \ control challenges for live performers. Fluidly moving and mixing numerous sound\ + \ objects over unique speaker configurations requires specifically designed software\ + \ interfaces and systems. Currently available software solutions either impose\ + \ configuration limitations, require extreme degrees of expertise, or extensive\ + \ configuration time to use. A new system design, focusing on simplicity, ease\ + \ of use, and live interactive spatialization is described. Automation of array\ + \ calibration and tuning is included to facilitate rapid deployment and configuration.\ + \ Comparisons with other solutions show favorability in terms of complexity, depth\ + \ of control, and required features. },\n address = {Copenhagen, Denmark},\n\ + \ author = {Benjamin Smith and Neal Anderson},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1176256},\n issn = {2220-4806},\n pages = {291--295},\n publisher\ + \ = {Aalborg University Copenhagen},\n title = {ArraYnger: New Interface for Interactive\ + \ 360° Spatialization},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0056.pdf},\n\ \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176326 + doi: 10.5281/zenodo.1176256 issn: 2220-4806 - pages: 487--488 + pages: 291--295 publisher: Aalborg University Copenhagen - title: Robotically Augmented Electric Guitar for Shared Control - url: http://www.nime.org/proceedings/2017/nime2017_paper0092.pdf + title: 'ArraYnger: New Interface for Interactive 360° Spatialization' + url: http://www.nime.org/proceedings/2017/nime2017_paper0056.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: bneill2017 - abstract: 'Ben Neill will demonstrate the mutantrumpet, a hybrid electro-acoustic - instrument. The capabilities of the mutantrumpet are designed to erase the boundaries - between acoustic and electronic musical creation and performance. It is both an - expanded acoustic instrument and an electronic controller capable of interacting - with audio and video simultaneously. The demonstration will explore the multi-faceted - possibilities that are offered by the mutantrumpet in several brief, wide ranging - musical examples composed and improvised by Neill. Interactive video performance - techniques and collaborations will be integrated into the excerpts. The aesthetics - of live intermedia performance will be discussed along with a technical overview - of the interface and associated software applications Junxion and RoSa from STEIM, - Amsterdam. Reflections on the development of a virtuosic performance technique - with a hybrid instrument and influences from collaborators Robert Moog, David - Behrman, Ralph Abraham, DJ Spooky and others will be included in the presentation.' + ID: aleslie2017 + abstract: "This paper describes a series of fashionable sounding shoe and foot based\ + \ appendages made between 2007-2017. The research attempts to demake the physical\ + \ high-heeled shoe through the iterative design and fabrication of new foot based\ + \ musical instruments. This process of demaking also changes the usual purpose\ + \ of shoes and associated stereotypes of high heeled shoe wear. Through turning\ + \ high heeled shoes into wearable musical instruments for theatrical audio visual\ + \ expressivity we question why so many musical instruments are made for the hands\ + \ and not the feet? With this creative work we explore ways to redress the imbalance\ + \ and consider what a genuinely “foot based” expressivity could be.\ + \ " address: 'Copenhagen, Denmark' - author: Ben Neill - bibtex: "@inproceedings{bneill2017,\n abstract = {Ben Neill will demonstrate the\ - \ mutantrumpet, a hybrid electro-acoustic instrument. The capabilities of the\ - \ mutantrumpet are designed to erase the boundaries between acoustic and electronic\ - \ musical creation and performance. It is both an expanded acoustic instrument\ - \ and an electronic controller capable of interacting with audio and video simultaneously.\ - \ The demonstration will explore the multi-faceted possibilities that are offered\ - \ by the mutantrumpet in several brief, wide ranging musical examples composed\ - \ and improvised by Neill. Interactive video performance techniques and collaborations\ - \ will be integrated into the excerpts. The aesthetics of live intermedia performance\ - \ will be discussed along with a technical overview of the interface and associated\ - \ software applications Junxion and RoSa from STEIM, Amsterdam. Reflections on\ - \ the development of a virtuosic performance technique with a hybrid instrument\ - \ and influences from collaborators Robert Moog, David Behrman, Ralph Abraham,\ - \ DJ Spooky and others will be included in the presentation.},\n address = {Copenhagen,\ - \ Denmark},\n author = {Ben Neill},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176328},\n\ - \ issn = {2220-4806},\n pages = {489--490},\n publisher = {Aalborg University\ - \ Copenhagen},\n title = {The Mutantrumpet},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0093.pdf},\n\ + author: Alexandra Murray-Leslie and Andrew Johnston + bibtex: "@inproceedings{aleslie2017,\n abstract = {This paper describes a series\ + \ of fashionable sounding shoe and foot based appendages made between 2007-2017.\ + \ The research attempts to demake the physical high-heeled shoe through the iterative\ + \ design and fabrication of new foot based musical instruments. This process of\ + \ demaking also changes the usual purpose of shoes and associated stereotypes\ + \ of high heeled shoe wear. Through turning high heeled shoes into wearable musical\ + \ instruments for theatrical audio visual expressivity we question why so many\ + \ musical instruments are made for the hands and not the feet? With this creative\ + \ work we explore ways to redress the imbalance and consider what a genuinely\ + \ “foot based” expressivity could be. },\n address = {Copenhagen,\ + \ Denmark},\n author = {Alexandra Murray-Leslie and Andrew Johnston},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1176258},\n issn = {2220-4806},\n pages\ + \ = {296--301},\n publisher = {Aalborg University Copenhagen},\n title = {The\ + \ Liberation of the Feet: Demaking the High Heeled Shoe For Theatrical Audio-Visual\ + \ Expression},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0057.pdf},\n\ \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176328 + doi: 10.5281/zenodo.1176258 issn: 2220-4806 - pages: 489--490 + pages: 296--301 publisher: Aalborg University Copenhagen - title: The Mutantrumpet - url: http://www.nime.org/proceedings/2017/nime2017_paper0093.pdf + title: 'The Liberation of the Feet: Demaking the High Heeled Shoe For Theatrical + Audio-Visual Expression' + url: http://www.nime.org/proceedings/2017/nime2017_paper0057.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: ssmallwood2017 - abstract: 'This paper/poster describes the development of an experimental listening - game called Locus Sono; a 3D audio puzzle game where listening and exploration - are the key forms of interaction. The game was developed by a motivation to create - an interactive audio environment in which sound is the key to solving in-game - puzzles. This work is a prototype for a larger planned work and illustrates a - first step in a more complex audio gaming scenario, which will also be partially - described in this short paper' + ID: crose2017 + abstract: 'Wearable sensor technology and aerial dance movement can be integrated + to provide a new performance practice and perspective on interactive kinesonic + composition. SALTO (Sonic Aerialist eLecTrOacoustic system), is a system which + allows for the creation of collaborative works between electroacoustic composer + and aerial choreographer. The system incorporates aerial dance trapeze movement, + sensors, digital synthesis, and electroacoustic composition. In SALTO, the Max + software programming environment employs parameters and mapping techniques for + translating the performer''s movement and internal experience into sound. Splinter + (2016), a work for aerial choreographer/performer and the SALTO system, highlights + the expressive qualities of the system in a performance setting.' address: 'Copenhagen, Denmark' - author: Scott Smallwood - bibtex: "@inproceedings{ssmallwood2017,\n abstract = {This paper/poster describes\ - \ the development of an experimental listening game called Locus Sono; a 3D audio\ - \ puzzle game where listening and exploration are the key forms of interaction.\ - \ The game was developed by a motivation to create an interactive audio environment\ - \ in which sound is the key to solving in-game puzzles. This work is a prototype\ - \ for a larger planned work and illustrates a first step in a more complex audio\ - \ gaming scenario, which will also be partially described in this short paper},\n\ - \ address = {Copenhagen, Denmark},\n author = {Scott Smallwood},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176330},\n issn = {2220-4806},\n pages = {491--492},\n\ - \ publisher = {Aalborg University Copenhagen},\n title = {Locus Sono: A Listening\ - \ Game for NIME},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0094.pdf},\n\ + author: Christiana Rose + bibtex: "@inproceedings{crose2017,\n abstract = {Wearable sensor technology and\ + \ aerial dance movement can be integrated to provide a new performance practice\ + \ and perspective on interactive kinesonic composition. SALTO (Sonic Aerialist\ + \ eLecTrOacoustic system), is a system which allows for the creation of collaborative\ + \ works between electroacoustic composer and aerial choreographer. The system\ + \ incorporates aerial dance trapeze movement, sensors, digital synthesis, and\ + \ electroacoustic composition. In SALTO, the Max software programming environment\ + \ employs parameters and mapping techniques for translating the performer's movement\ + \ and internal experience into sound. Splinter (2016), a work for aerial choreographer/performer\ + \ and the SALTO system, highlights the expressive qualities of the system in a\ + \ performance setting.},\n address = {Copenhagen, Denmark},\n author = {Christiana\ + \ Rose},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1176260},\n issn = {2220-4806},\n\ + \ pages = {302--306},\n publisher = {Aalborg University Copenhagen},\n title =\ + \ {SALTO: A System for Musical Expression in the Aerial Arts},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0058.pdf},\n\ \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176330 + doi: 10.5281/zenodo.1176260 issn: 2220-4806 - pages: 491--492 + pages: 302--306 publisher: Aalborg University Copenhagen - title: 'Locus Sono: A Listening Game for NIME' - url: http://www.nime.org/proceedings/2017/nime2017_paper0094.pdf + title: 'SALTO: A System for Musical Expression in the Aerial Arts' + url: http://www.nime.org/proceedings/2017/nime2017_paper0058.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: rpolfreman2017 - abstract: "2017 marks the 40th anniversary of the Rubik's Cube (under its original\ - \ name the Magic Cube). This paper-demonstration describes explorations of the\ - \ cube as a performance controller for music. The pattern of colors on a face\ - \ of the cube is detected via USB video camera and supplemented by EMG data from\ - \ the performer to model the performer's interaction with the cube. This system\ - \ was trialed in a variety of audio scenarios and deployed in the composition\ - \ “Rubik's Study No. 1”, a work based on solving the cube with audible\ - \ connections to 1980's pop culture. The cube was found to be an engaging musical\ - \ controller, with further potential to be explored." + ID: mbaalman2017 + abstract: 'Academic research projects focusing on wireless sensor networks rarely + live on after the funded research project has ended. In contrast, the Sense/Stage + project has evolved over the past 6 years outside of an academic context and has + been used in a multitude of artistic projects. This paper presents how the project + has developed, the diversity of the projects that have been made with the technology, + feedback from users on the system and an outline for the design of a successor + to the current system. ' address: 'Copenhagen, Denmark' - author: Richard Polfreman and Benjamin Oliver - bibtex: "@inproceedings{rpolfreman2017,\n abstract = {2017 marks the 40th anniversary\ - \ of the Rubik's Cube (under its original name the Magic Cube). This paper-demonstration\ - \ describes explorations of the cube as a performance controller for music. The\ - \ pattern of colors on a face of the cube is detected via USB video camera and\ - \ supplemented by EMG data from the performer to model the performer's interaction\ - \ with the cube. This system was trialed in a variety of audio scenarios and deployed\ - \ in the composition “Rubik's Study No. 1”, a work based on solving\ - \ the cube with audible connections to 1980's pop culture. The cube was found\ - \ to be an engaging musical controller, with further potential to be explored.},\n\ - \ address = {Copenhagen, Denmark},\n author = {Richard Polfreman and Benjamin\ - \ Oliver},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1176332},\n issn = {2220-4806},\n\ - \ pages = {493--494},\n publisher = {Aalborg University Copenhagen},\n title =\ - \ {Rubik's Cube, Music's Cube},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0095.pdf},\n\ + author: Marije Baalman + bibtex: "@inproceedings{mbaalman2017,\n abstract = {Academic research projects focusing\ + \ on wireless sensor networks rarely live on after the funded research project\ + \ has ended. In contrast, the Sense/Stage project has evolved over the past 6\ + \ years outside of an academic context and has been used in a multitude of artistic\ + \ projects. This paper presents how the project has developed, the diversity of\ + \ the projects that have been made with the technology, feedback from users on\ + \ the system and an outline for the design of a successor to the current system.\ + \ },\n address = {Copenhagen, Denmark},\n author = {Marije Baalman},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1176262},\n issn = {2220-4806},\n pages\ + \ = {307--312},\n publisher = {Aalborg University Copenhagen},\n title = {Wireless\ + \ Sensing for Artistic Applications, a Reflection on Sense/Stage to Motivate the\ + \ Design of the Next Stage},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0059.pdf},\n\ \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176332 + doi: 10.5281/zenodo.1176262 issn: 2220-4806 - pages: 493--494 + pages: 307--312 publisher: Aalborg University Copenhagen - title: 'Rubik''s Cube, Music''s Cube' - url: http://www.nime.org/proceedings/2017/nime2017_paper0095.pdf + title: 'Wireless Sensing for Artistic Applications, a Reflection on Sense/Stage + to Motivate the Design of the Next Stage' + url: http://www.nime.org/proceedings/2017/nime2017_paper0059.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: cmartin2017 - abstract: 'MicroJam is a mobile app for sharing tiny touch-screen performances. - Mobile applications that streamline creativity and social interaction have enabled - a very broad audience to develop their own creative practices. While these apps - have been very successful in visual arts (particularly photography), the idea - of social music-making has not had such a broad impact. MicroJam includes several - novel performance concepts intended to engage the casual music maker and inspired - by current trends in social creativity support tools. Touch-screen performances - are limited to five seconds, instrument settings are posed as sonic ``filters'''', - and past performances are arranged as a timeline with replies and layers. These - features of MicroJam encourage users not only to perform music more frequently, - but to engage with others in impromptu ensemble music making.' + ID: ibukvic2017 + abstract: 'The following paper explores the Inconspicuous Head-Mounted Display within + the context of a live technology-mediated music performance. For this purpose + in 2014 the authors have developed Glasstra, an Android/Google Glass networked + display designed to project real-time orchestra status to the conductor, with + the primary goal of minimizing the on-stage technology footprint and with it audience''s + potential distraction with technology. In preparation for its deployment in a + real-world performance setting the team conducted a user study aimed to define + relevant constraints of the Google Glass display. Based on the observed data, + a conductor part from an existing laptop orchestra piece was retrofitted, thereby + replacing the laptop with a Google Glass running Glasstra and a similarly inconspicuous + forearm-mounted Wiimote controller. Below we present findings from the user study + that have informed the design of the visual display, as well as multi-perspective + observations from a series of real-world performances, including the designer, + user, and the audience. We use findings to offer a new hypothesis, an inverse + uncanny valley or what we refer to as uncanny mountain pertaining to audience''s + potential distraction with the technology within the context of a live technology-mediated + music performance as a function of minimizing on-stage technological footprint.' address: 'Copenhagen, Denmark' - author: Charles Martin and Jim Torresen - bibtex: "@inproceedings{cmartin2017,\n abstract = {MicroJam is a mobile app for\ - \ sharing tiny touch-screen performances. Mobile applications that streamline\ - \ creativity and social interaction have enabled a very broad audience to develop\ - \ their own creative practices. While these apps have been very successful in\ - \ visual arts (particularly photography), the idea of social music-making has\ - \ not had such a broad impact. MicroJam includes several novel performance concepts\ - \ intended to engage the casual music maker and inspired by current trends in\ - \ social creativity support tools. Touch-screen performances are limited to five\ - \ seconds, instrument settings are posed as sonic ``filters'', and past performances\ - \ are arranged as a timeline with replies and layers. These features of MicroJam\ - \ encourage users not only to perform music more frequently, but to engage with\ - \ others in impromptu ensemble music making.},\n address = {Copenhagen, Denmark},\n\ - \ author = {Charles Martin and Jim Torresen},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1176334},\n issn = {2220-4806},\n pages = {495--496},\n publisher\ - \ = {Aalborg University Copenhagen},\n title = {MicroJam: An App for Sharing Tiny\ - \ Touch-Screen Performances},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0096.pdf},\n\ + author: Ivica Bukvic and Spencer Lee + bibtex: "@inproceedings{ibukvic2017,\n abstract = {The following paper explores\ + \ the Inconspicuous Head-Mounted Display within the context of a live technology-mediated\ + \ music performance. For this purpose in 2014 the authors have developed Glasstra,\ + \ an Android/Google Glass networked display designed to project real-time orchestra\ + \ status to the conductor, with the primary goal of minimizing the on-stage technology\ + \ footprint and with it audience's potential distraction with technology. In preparation\ + \ for its deployment in a real-world performance setting the team conducted a\ + \ user study aimed to define relevant constraints of the Google Glass display.\ + \ Based on the observed data, a conductor part from an existing laptop orchestra\ + \ piece was retrofitted, thereby replacing the laptop with a Google Glass running\ + \ Glasstra and a similarly inconspicuous forearm-mounted Wiimote controller. Below\ + \ we present findings from the user study that have informed the design of the\ + \ visual display, as well as multi-perspective observations from a series of real-world\ + \ performances, including the designer, user, and the audience. We use findings\ + \ to offer a new hypothesis, an inverse uncanny valley or what we refer to as\ + \ uncanny mountain pertaining to audience's potential distraction with the technology\ + \ within the context of a live technology-mediated music performance as a function\ + \ of minimizing on-stage technological footprint.},\n address = {Copenhagen, Denmark},\n\ + \ author = {Ivica Bukvic and Spencer Lee},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176264},\n\ + \ issn = {2220-4806},\n pages = {313--318},\n publisher = {Aalborg University\ + \ Copenhagen},\n title = {Glasstra: Exploring the Use of an Inconspicuous Head\ + \ Mounted Display in a Live Technology-Mediated Music Performance},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0060.pdf},\n\ \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176334 + doi: 10.5281/zenodo.1176264 issn: 2220-4806 - pages: 495--496 + pages: 313--318 publisher: Aalborg University Copenhagen - title: 'MicroJam: An App for Sharing Tiny Touch-Screen Performances' - url: http://www.nime.org/proceedings/2017/nime2017_paper0096.pdf + title: 'Glasstra: Exploring the Use of an Inconspicuous Head Mounted Display in + a Live Technology-Mediated Music Performance' + url: http://www.nime.org/proceedings/2017/nime2017_paper0060.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: rnakagawa2017 - abstract: "The AEVE provides for a brain-computer-interface (BCI) controlled audiovisual\ - \ experience, presented through a virtual reality head-mounted display (VRHMD).\ - \ We have developed an audiovisual art piece where progression through 3 sections\ - \ and 1 extra section occurs using an “Attention” value derived from\ - \ the Electroencephalography (EEG) data. The only interaction in this work is\ - \ perspective that is participant's view, and the EEG data. However, we believe\ - \ the simple interaction amplifies the participant's feeling of immersion. Through\ - \ the narrative of the work and the simple interaction, we attempt to connect\ - \ some concepts such as audiovisual experience, virtual reality (VR), BCI, grid,\ - \ consciousness, memory, universe, etc. in a minimal way." + ID: sbarton2017 + abstract: "Human-robot musical interaction typically consists of independent, physically-separated\ + \ agents. We developed Cyther --- a human-playable, self-tuning robotic zither\ + \ – to allow a human and a robot to interact cooperatively through the same\ + \ physical medium to generate music. The resultant co- dependence creates new\ + \ responsibilities, roles, and expressive possibilities for human musicians. We\ + \ describe some of these possibilities in the context of both technical features\ + \ and artistic implementations of the system." address: 'Copenhagen, Denmark' - author: Ryu Nakagawa and Shotaro Hirata - bibtex: "@inproceedings{rnakagawa2017,\n abstract = {The AEVE provides for a brain-computer-interface\ - \ (BCI) controlled audiovisual experience, presented through a virtual reality\ - \ head-mounted display (VRHMD). We have developed an audiovisual art piece where\ - \ progression through 3 sections and 1 extra section occurs using an “Attention”\ - \ value derived from the Electroencephalography (EEG) data. The only interaction\ - \ in this work is perspective that is participant's view, and the EEG data. However,\ - \ we believe the simple interaction amplifies the participant's feeling of immersion.\ - \ Through the narrative of the work and the simple interaction, we attempt to\ - \ connect some concepts such as audiovisual experience, virtual reality (VR),\ - \ BCI, grid, consciousness, memory, universe, etc. in a minimal way.},\n address\ - \ = {Copenhagen, Denmark},\n author = {Ryu Nakagawa and Shotaro Hirata},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1176336},\n issn = {2220-4806},\n pages\ - \ = {497--498},\n publisher = {Aalborg University Copenhagen},\n title = {AEVE:\ - \ An Audiovisual Experience Using VRHMD and EEG},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0097.pdf},\n\ - \ year = {2017}\n}\n" + author: Scott Barton and Ethan Prihar and Paulo Carvalho + bibtex: "@inproceedings{sbarton2017,\n abstract = {Human-robot musical interaction\ + \ typically consists of independent, physically-separated agents. We developed\ + \ Cyther --- a human-playable, self-tuning robotic zither – to allow a human\ + \ and a robot to interact cooperatively through the same physical medium to generate\ + \ music. The resultant co- dependence creates new responsibilities, roles, and\ + \ expressive possibilities for human musicians. We describe some of these possibilities\ + \ in the context of both technical features and artistic implementations of the\ + \ system.},\n address = {Copenhagen, Denmark},\n author = {Scott Barton and Ethan\ + \ Prihar and Paulo Carvalho},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176266},\n\ + \ issn = {2220-4806},\n pages = {319--324},\n publisher = {Aalborg University\ + \ Copenhagen},\n title = {Cyther: a Human-playable, Self-tuning Robotic Zither},\n\ + \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0061.pdf},\n year\ + \ = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176336 + doi: 10.5281/zenodo.1176266 issn: 2220-4806 - pages: 497--498 + pages: 319--324 publisher: Aalborg University Copenhagen - title: 'AEVE: An Audiovisual Experience Using VRHMD and EEG' - url: http://www.nime.org/proceedings/2017/nime2017_paper0097.pdf + title: 'Cyther: a Human-playable, Self-tuning Robotic Zither' + url: http://www.nime.org/proceedings/2017/nime2017_paper0061.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: rcadiz2017 - abstract: 'The Arcontinuo is an electronic musical instrument designed from a perspective - based in the study of their potential users and their interaction with existing - musical interfaces. Arcontinuo aims to change the way electronic music is performed, - as it is capable of incorporating natural and ergonomic human gestures, allowing - the musician to engage with the instrument and as a result, enhance the connection - with the audience. Arcontinuo challenges the notion of what a musical gesture - is and goes against traditional ways of performing music, by proposing a concept - that we call smart playing mapping, as a way of achieving a better and more meaningful - performance.' + ID: bliang2017 + abstract: 'This paper presents the results of a study of piano pedalling techniques + on the sustain pedal using a newly designed measurement system named Piano Pedaller. + The system is comprised of an optical sensor mounted in the piano pedal bearing + block and an embedded platform for recording audio and sensor data. This enables + recording the pedalling gesture of real players and the piano sound under normal + playing conditions. Using the gesture data collected from the system, the task + of classifying these data by pedalling technique was undertaken using a Support + Vector Machine (SVM). Results can be visualised in an audio based score following + application to show pedalling together with the player''s position in the score.' address: 'Copenhagen, Denmark' - author: Rodrigo Cadiz and Alvaro Sylleros - bibtex: "@inproceedings{rcadiz2017,\n abstract = {The Arcontinuo is an electronic\ - \ musical instrument designed from a perspective based in the study of their potential\ - \ users and their interaction with existing musical interfaces. Arcontinuo aims\ - \ to change the way electronic music is performed, as it is capable of incorporating\ - \ natural and ergonomic human gestures, allowing the musician to engage with the\ - \ instrument and as a result, enhance the connection with the audience. Arcontinuo\ - \ challenges the notion of what a musical gesture is and goes against traditional\ - \ ways of performing music, by proposing a concept that we call smart playing\ - \ mapping, as a way of achieving a better and more meaningful performance.},\n\ - \ address = {Copenhagen, Denmark},\n author = {Rodrigo Cadiz and Alvaro Sylleros},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176338},\n issn = {2220-4806},\n\ - \ pages = {499--500},\n publisher = {Aalborg University Copenhagen},\n title =\ - \ {Arcontinuo: the Instrument of Change},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0098.pdf},\n\ + author: Beici Liang and György Fazekas and Andrew McPherson and Mark Sandler + bibtex: "@inproceedings{bliang2017,\n abstract = {This paper presents the results\ + \ of a study of piano pedalling techniques on the sustain pedal using a newly\ + \ designed measurement system named Piano Pedaller. The system is comprised of\ + \ an optical sensor mounted in the piano pedal bearing block and an embedded platform\ + \ for recording audio and sensor data. This enables recording the pedalling gesture\ + \ of real players and the piano sound under normal playing conditions. Using the\ + \ gesture data collected from the system, the task of classifying these data by\ + \ pedalling technique was undertaken using a Support Vector Machine (SVM). Results\ + \ can be visualised in an audio based score following application to show pedalling\ + \ together with the player's position in the score.},\n address = {Copenhagen,\ + \ Denmark},\n author = {Beici Liang and György Fazekas and Andrew McPherson and\ + \ Mark Sandler},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176268},\n\ + \ issn = {2220-4806},\n pages = {325--329},\n publisher = {Aalborg University\ + \ Copenhagen},\n title = {Piano Pedaller: A Measurement System for Classification\ + \ and Visualisation of Piano Pedalling Techniques},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0062.pdf},\n\ \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176338 + doi: 10.5281/zenodo.1176268 issn: 2220-4806 - pages: 499--500 + pages: 325--329 publisher: Aalborg University Copenhagen - title: 'Arcontinuo: the Instrument of Change' - url: http://www.nime.org/proceedings/2017/nime2017_paper0098.pdf + title: 'Piano Pedaller: A Measurement System for Classification and Visualisation + of Piano Pedalling Techniques' + url: http://www.nime.org/proceedings/2017/nime2017_paper0062.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: rblazey2017 - abstract: 'Kalimbo is an extended kalimba, built from repurposed materials and fitted - with sensors that enable it to function as a reductionist control interface through - physical gestures and capacitive sensing. The work demonstrates an attempt to - apply theories and techniques from visual collage art to the concept of musical - performance ecologies. The body of the instrument emerged from material-led making, - and the disparate elements of a particular musical performance ecology (acoustic - instrument, audio effects, samples, synthesis and controls) are juxtaposed and - unified into one coherent whole. As such, Kalimbo demonstrates how visual arts, - in particular collage, can inform the design and creation of new musical instruments, - interfaces and streamlined performance ecologies.' + ID: jlong2017 + abstract: 'While most musical robots that are capable of playing the drum kit utilise + a relatively simple striking motion, the hi-hat, with the additional degree of + motion provided by its pedal, requires more involved control strategies in order + to produce expressive performances on the instrument. A robotic hi-hat should + be able to control not only the striking timing and velocity to a high degree + of precision, but also dynamically control the position of the two cymbals in + a way that is consistent, reproducible and intuitive for composers and other musicians + to use. This paper describes the creation of a new, multifaceted hi-hat control + system that utilises a closed-loop distance sensing and calibration mechanism + in conjunction with an embedded musical information retrieval system to continuously + calibrate the hi-hat''s action both before and during a musical performance. This + is achieved by combining existing musical robotic devices with a newly created + linear actuation mechanism, custom amplification, acquisition and DSP hardware, + and embedded software algorithms. This new approach allows musicians to create + expressive and reproducible musical performances with the instrument using consistent + musical parameters, and the self-calibrating nature of the instrument lets users + focus on creating music instead of maintaining equipment. ' address: 'Copenhagen, Denmark' - author: Rob Blazey - bibtex: "@inproceedings{rblazey2017,\n abstract = {Kalimbo is an extended kalimba,\ - \ built from repurposed materials and fitted with sensors that enable it to function\ - \ as a reductionist control interface through physical gestures and capacitive\ - \ sensing. The work demonstrates an attempt to apply theories and techniques\ - \ from visual collage art to the concept of musical performance ecologies. The\ - \ body of the instrument emerged from material-led making, and the disparate elements\ - \ of a particular musical performance ecology (acoustic instrument, audio effects,\ - \ samples, synthesis and controls) are juxtaposed and unified into one coherent\ - \ whole. As such, Kalimbo demonstrates how visual arts, in particular collage,\ - \ can inform the design and creation of new musical instruments, interfaces and\ - \ streamlined performance ecologies.},\n address = {Copenhagen, Denmark},\n author\ - \ = {Rob Blazey},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176340},\n\ - \ issn = {2220-4806},\n pages = {501--502},\n publisher = {Aalborg University\ - \ Copenhagen},\n title = {Kalimbo: an Extended Thumb Piano and Minimal Control\ - \ Interface},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0099.pdf},\n\ + author: Jason Long and Jim Murphy and Dale A. Carnegie and Ajay Kapur + bibtex: "@inproceedings{jlong2017,\n abstract = {While most musical robots that\ + \ are capable of playing the drum kit utilise a relatively simple striking motion,\ + \ the hi-hat, with the additional degree of motion provided by its pedal, requires\ + \ more involved control strategies in order to produce expressive performances\ + \ on the instrument. A robotic hi-hat should be able to control not only the striking\ + \ timing and velocity to a high degree of precision, but also dynamically control\ + \ the position of the two cymbals in a way that is consistent, reproducible and\ + \ intuitive for composers and other musicians to use. This paper describes the\ + \ creation of a new, multifaceted hi-hat control system that utilises a closed-loop\ + \ distance sensing and calibration mechanism in conjunction with an embedded musical\ + \ information retrieval system to continuously calibrate the hi-hat's action both\ + \ before and during a musical performance. This is achieved by combining existing\ + \ musical robotic devices with a newly created linear actuation mechanism, custom\ + \ amplification, acquisition and DSP hardware, and embedded software algorithms.\ + \ This new approach allows musicians to create expressive and reproducible musical\ + \ performances with the instrument using consistent musical parameters, and the\ + \ self-calibrating nature of the instrument lets users focus on creating music\ + \ instead of maintaining equipment. },\n address = {Copenhagen, Denmark},\n author\ + \ = {Jason Long and Jim Murphy and Dale A. Carnegie and Ajay Kapur},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1176272},\n issn = {2220-4806},\n pages\ + \ = {330--335},\n publisher = {Aalborg University Copenhagen},\n title = {A Closed-Loop\ + \ Control System for Robotic Hi-hats},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0063.pdf},\n\ + \ year = {2017}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1176272 + issn: 2220-4806 + pages: 330--335 + publisher: Aalborg University Copenhagen + title: A Closed-Loop Control System for Robotic Hi-hats + url: http://www.nime.org/proceedings/2017/nime2017_paper0063.pdf + year: 2017 + + +- ENTRYTYPE: inproceedings + ID: skountouras2017 + abstract: 'Tangible user interfaces empower artists, boost their creative expression + and enhance performing art. However, most of them are designed to work with a + set of rules, many of which require advanced skills and target users above a certain + age. Here we present a comparative and quantitative study of using TUIs as an + alternative teaching tool in experimenting with and creating soundscapes with + children. We describe an informal interactive workshop involving schoolchildren. + We focus on the development of playful uses of technology to help children empirically + understand audio feature extraction basic techniques. We promote tangible interaction + as an alternative learning method in the creation of synthetic soundscape based + on sounds recorded in a natural outdoor environment as main sources of sound. + We investigate how schoolchildren perceive natural sources of sound and explore + practices that reuse prerecorded material through a tangible interactive controller. + We discuss the potential benefits of using TUIs as an alternative empirical method + for tangible learning and interaction design, and its impact on encouraging and + motivating creativity in children. We summarize our findings and review children''s + biehavioural indicators of engagement and enjoyment in order to provide insight + to the design of TUIs based on user experience.' + address: 'Copenhagen, Denmark' + author: Stratos Kountouras and Ioannis Zannos + bibtex: "@inproceedings{skountouras2017,\n abstract = {Tangible user interfaces\ + \ empower artists, boost their creative expression and enhance performing art.\ + \ However, most of them are designed to work with a set of rules, many of which\ + \ require advanced skills and target users above a certain age. Here we present\ + \ a comparative and quantitative study of using TUIs as an alternative teaching\ + \ tool in experimenting with and creating soundscapes with children. We describe\ + \ an informal interactive workshop involving schoolchildren. We focus on the development\ + \ of playful uses of technology to help children empirically understand audio\ + \ feature extraction basic techniques. We promote tangible interaction as an alternative\ + \ learning method in the creation of synthetic soundscape based on sounds recorded\ + \ in a natural outdoor environment as main sources of sound. We investigate how\ + \ schoolchildren perceive natural sources of sound and explore practices that\ + \ reuse prerecorded material through a tangible interactive controller. We discuss\ + \ the potential benefits of using TUIs as an alternative empirical method for\ + \ tangible learning and interaction design, and its impact on encouraging and\ + \ motivating creativity in children. We summarize our findings and review children's\ + \ biehavioural indicators of engagement and enjoyment in order to provide insight\ + \ to the design of TUIs based on user experience.},\n address = {Copenhagen, Denmark},\n\ + \ author = {Stratos Kountouras and Ioannis Zannos},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176274},\n issn = {2220-4806},\n pages = {336--341},\n\ + \ publisher = {Aalborg University Copenhagen},\n title = {Gestus: Teaching Soundscape\ + \ Composition and Performance with a Tangible Interface},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0064.pdf},\n\ \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176340 + doi: 10.5281/zenodo.1176274 issn: 2220-4806 - pages: 501--502 + pages: 336--341 publisher: Aalborg University Copenhagen - title: 'Kalimbo: an Extended Thumb Piano and Minimal Control Interface' - url: http://www.nime.org/proceedings/2017/nime2017_paper0099.pdf + title: 'Gestus: Teaching Soundscape Composition and Performance with a Tangible + Interface' + url: http://www.nime.org/proceedings/2017/nime2017_paper0064.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: jtilbian2017a - abstract: 'This demonstration introduces the Stride programming language, the Stride - IDE, and the Saturn M7 embedded audio development board. Stride is a declarative - and reactive domain specific programming language for real-time sound synthesis, - processing, and interaction design. The Stride IDE is a cross-platform integrated - development environment for Stride. Saturn M7 is an embedded audio development - board by Okra Engineering, designed around an ARM Cortex-M7 processor based microcontroller. It - targets high-end multi-channel audio processing and synthesis with very low latency - and power consumption. The microcontroller has a rich set of audio and communication - peripherals, capable of performing complex real-time DSP tasks with double precision - floating point accuracy. This demonstration will showcase specific features of - the Stride language, which facilitates the design of new interactive musical instruments. - The Stride IDE will be used to compose Stride code and generate code for the Saturn - M7 board. The various hardware capabilities of the Saturn M7 board will also - be presented.' + ID: htez2017 + abstract: 'This research investigates how applying interaction constraints to digital + music instruments (DMIs) affects the way that experienced music performers collaborate + and find creative ways to make live improvised music on stage. The constraints + are applied in two forms: i) Physically implemented on the instruments themselves, + and ii) hidden rules that are defined on a network between the instruments and + triggered depending on the musical actions of the performers. Six experienced + musicians were recruited for a user study which involved rehearsal and performance. + Performers were given deliberately constrained instruments containing a touch + sensor, speaker, battery and an embedded computer. Results of the study show that + whilst constraints can lead to more structured improvisation, the resultant music + may not fit with performers'' true intentions. It was also found that when external + musical material is introduced to guide the performers into a collective convergence, + it is likely to be ignored because it was perceived by performers as being out + of context.' address: 'Copenhagen, Denmark' - author: Joseph Tilbian and Andres Cabrera and Steffen Martin and Lukasz Olczyk - bibtex: "@inproceedings{jtilbian2017a,\n abstract = {This demonstration introduces\ - \ the Stride programming language, the Stride IDE, and the Saturn M7 embedded\ - \ audio development board. Stride is a declarative and reactive domain specific\ - \ programming language for real-time sound synthesis, processing, and interaction\ - \ design. The Stride IDE is a cross-platform integrated development environment\ - \ for Stride. Saturn M7 is an embedded audio development board by Okra Engineering,\ - \ designed around an ARM Cortex-M7 processor based microcontroller. It targets\ - \ high-end multi-channel audio processing and synthesis with very low latency\ - \ and power consumption. The microcontroller has a rich set of audio and communication\ - \ peripherals, capable of performing complex real-time DSP tasks with double precision\ - \ floating point accuracy. This demonstration will showcase specific features\ - \ of the Stride language, which facilitates the design of new interactive musical\ - \ instruments. The Stride IDE will be used to compose Stride code and generate\ - \ code for the Saturn M7 board. The various hardware capabilities of the Saturn\ - \ M7 board will also be presented.},\n address = {Copenhagen, Denmark},\n author\ - \ = {Joseph Tilbian and Andres Cabrera and Steffen Martin and Lukasz Olczyk},\n\ + author: Hazar Emre Tez and Nick Bryan-Kinns + bibtex: "@inproceedings{htez2017,\n abstract = {This research investigates how applying\ + \ interaction constraints to digital music instruments (DMIs) affects the way\ + \ that experienced music performers collaborate and find creative ways to make\ + \ live improvised music on stage. The constraints are applied in two forms: i)\ + \ Physically implemented on the instruments themselves, and ii) hidden rules that\ + \ are defined on a network between the instruments and triggered depending on\ + \ the musical actions of the performers. Six experienced musicians were recruited\ + \ for a user study which involved rehearsal and performance. Performers were given\ + \ deliberately constrained instruments containing a touch sensor, speaker, battery\ + \ and an embedded computer. Results of the study show that whilst constraints\ + \ can lead to more structured improvisation, the resultant music may not fit with\ + \ performers' true intentions. It was also found that when external musical material\ + \ is introduced to guide the performers into a collective convergence, it is likely\ + \ to be ignored because it was perceived by performers as being out of context.},\n\ + \ address = {Copenhagen, Denmark},\n author = {Hazar Emre Tez and Nick Bryan-Kinns},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176342},\n issn = {2220-4806},\n\ - \ pages = {503--504},\n publisher = {Aalborg University Copenhagen},\n title =\ - \ {Stride on Saturn M7 for Interactive Musical Instrument Design},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0100.pdf},\n\ - \ year = {2017}\n}\n" + \ Musical Expression},\n doi = {10.5281/zenodo.1176276},\n issn = {2220-4806},\n\ + \ pages = {342--347},\n publisher = {Aalborg University Copenhagen},\n title =\ + \ {Exploring the Effect of Interface Constraints on Live Collaborative Music Improvisation},\n\ + \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0065.pdf},\n year\ + \ = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176342 + doi: 10.5281/zenodo.1176276 issn: 2220-4806 - pages: 503--504 + pages: 342--347 publisher: Aalborg University Copenhagen - title: Stride on Saturn M7 for Interactive Musical Instrument Design - url: http://www.nime.org/proceedings/2017/nime2017_paper0100.pdf + title: Exploring the Effect of Interface Constraints on Live Collaborative Music + Improvisation + url: http://www.nime.org/proceedings/2017/nime2017_paper0065.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: tkitahara2017 - abstract: 'In this paper, we present JamSketch, a real-time improvisation support - system which automatically generates melodies according to melodic outlines drawn - by the users. The system generates the improvised melodies based on (1) an outline - sketched by the user using a mouse or a touch screen, (2) a genetic algorithm - based on a dataset of existing music pieces as well as musical knowledge, and - (3) an expressive performance model for timing and dynamic transformations. The - aim of the system is to allow people with no prior musical knowledge to be able - to enjoy playing music by improvising melodies in real time.' + ID: iwicaksono2017 + abstract: 'This paper presents FabricKeyboard: a novel deformable keyboard interface + based on a multi-modal fabric sensate surface. Multi-layer fabric sensors that + detect touch, proximity, electric field, pressure, and stretch are machine-sewn + in a keyboard pattern on a stretchable substrate. The result is a fabric-based + musical controller that combines both the discrete controls of a keyboard and + various continuous controls from the embedded fabric sensors. This enables unique + tactile experiences and new interactions both with physical and non-contact gestures: + physical by pressing, pulling, stretching, and twisting the keys or the fabric + and non-contact by hovering and waving towards/against the keyboard and an electromagnetic + source. We have also developed additional fabric-based modular interfaces such + as a ribbon-controller and trackpad, allowing performers to add more expressive + and continuous controls. This paper will discuss implementation strategies for + our system-on-textile, fabric-based sensor developments, as well as sensor-computer + interfacing and musical mapping examples of this multi-modal and expressive fabric + keyboard. ' address: 'Copenhagen, Denmark' - author: Tetsuro Kitahara and Sergio Giraldo and Rafael Ramírez - bibtex: "@inproceedings{tkitahara2017,\n abstract = {In this paper, we present JamSketch,\ - \ a real-time improvisation support system which automatically generates melodies\ - \ according to melodic outlines drawn by the users. The system generates the improvised\ - \ melodies based on (1) an outline sketched by the user using a mouse or a touch\ - \ screen, (2) a genetic algorithm based on a dataset of existing music pieces\ - \ as well as musical knowledge, and (3) an expressive performance model for timing\ - \ and dynamic transformations. The aim of the system is to allow people with no\ - \ prior musical knowledge to be able to enjoy playing music by improvising melodies\ - \ in real time.},\n address = {Copenhagen, Denmark},\n author = {Tetsuro Kitahara\ - \ and Sergio Giraldo and Rafael Ramírez},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176344},\n\ - \ issn = {2220-4806},\n pages = {505--506},\n publisher = {Aalborg University\ - \ Copenhagen},\n title = {JamSketch: A Drawing-based Real-time Evolutionary Improvisation\ - \ Support System},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0101.pdf},\n\ + author: Irmandy Wicaksono and Joseph Paradiso + bibtex: "@inproceedings{iwicaksono2017,\n abstract = {This paper presents FabricKeyboard:\ + \ a novel deformable keyboard interface based on a multi-modal fabric sensate\ + \ surface. Multi-layer fabric sensors that detect touch, proximity, electric field,\ + \ pressure, and stretch are machine-sewn in a keyboard pattern on a stretchable\ + \ substrate. The result is a fabric-based musical controller that combines both\ + \ the discrete controls of a keyboard and various continuous controls from the\ + \ embedded fabric sensors. This enables unique tactile experiences and new interactions\ + \ both with physical and non-contact gestures: physical by pressing, pulling,\ + \ stretching, and twisting the keys or the fabric and non-contact by hovering\ + \ and waving towards/against the keyboard and an electromagnetic source. We have\ + \ also developed additional fabric-based modular interfaces such as a ribbon-controller\ + \ and trackpad, allowing performers to add more expressive and continuous controls.\ + \ This paper will discuss implementation strategies for our system-on-textile,\ + \ fabric-based sensor developments, as well as sensor-computer interfacing and\ + \ musical mapping examples of this multi-modal and expressive fabric keyboard.\ + \ },\n address = {Copenhagen, Denmark},\n author = {Irmandy Wicaksono and Joseph\ + \ Paradiso},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176278},\n issn\ + \ = {2220-4806},\n pages = {348--353},\n publisher = {Aalborg University Copenhagen},\n\ + \ title = {FabricKeyboard: Multimodal Textile Sensate Media as an Expressive and\ + \ Deformable Musical Interface},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0066.pdf},\n\ \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176344 + doi: 10.5281/zenodo.1176278 issn: 2220-4806 - pages: 505--506 + pages: 348--353 publisher: Aalborg University Copenhagen - title: 'JamSketch: A Drawing-based Real-time Evolutionary Improvisation Support - System' - url: http://www.nime.org/proceedings/2017/nime2017_paper0101.pdf + title: 'FabricKeyboard: Multimodal Textile Sensate Media as an Expressive and Deformable + Musical Interface' + url: http://www.nime.org/proceedings/2017/nime2017_paper0066.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: jharrison2017 - abstract: 'We present an attachment for the bass guitar which allows MIDI-controlled - actuated fretting. This adapted instrument is presented as a potential method - of augmenting the bass guitar for those with upper-limb disabilities. We conducted - an online survey of 48 bassists in order to highlight the most important aspects - of bass playing. We found that timbral and dynamic features related to the plucking - hand were most important to the survey respondents. We designed an actuated fretting - mechanism to replace the role of the fretting hand in order to preserve plucking - hand techniques. We then conducted a performance study in which experienced bassists - prepared and performed an accompaniment to a backing track with the adapted bass. - The performances highlighted ways in which adapting a fretted string instrument - in this way impacts plucking hand technique. ' + ID: kkonovalovs2017 + abstract: 'This paper explores a new interaction possibility for increasing performer + freedom via a foot-mounted wearable, and an instrument-mounted device that maintain + stomp-box styles of interactivity, but without the restrictions normally associated + with the original design of guitar effect pedals. The classic foot activated effect + pedals that are used to alter the sound of the instrument are stationary, forcing + the performer to return to the same location in order to interact with the pedals. + This paper presents a new design that enables the performer to interact with the + effect pedals anywhere on the stage. By designing a foot&instrument-mounted effect + controller, we kept the strongest part of the classical pedal design, while allowing + the activation of the effect at any location on the stage. The usability of the + device has been tested on thirty experienced guitar players. Their performance + has been recorded and compared, and their opinion has been investigated through + questionnaire and interview. The results of the experiment showed that, in theory, + foot&instrument-mounted effect controller can replace standard effect pedals and + at the same time provide more mobility on a stage. ' address: 'Copenhagen, Denmark' - author: Jacob Harrison and Andrew McPherson - bibtex: "@inproceedings{jharrison2017,\n abstract = {We present an attachment for\ - \ the bass guitar which allows MIDI-controlled actuated fretting. This adapted\ - \ instrument is presented as a potential method of augmenting the bass guitar\ - \ for those with upper-limb disabilities. We conducted an online survey of 48\ - \ bassists in order to highlight the most important aspects of bass playing. We\ - \ found that timbral and dynamic features related to the plucking hand were most\ - \ important to the survey respondents. We designed an actuated fretting mechanism\ - \ to replace the role of the fretting hand in order to preserve plucking hand\ - \ techniques. We then conducted a performance study in which experienced bassists\ - \ prepared and performed an accompaniment to a backing track with the adapted\ - \ bass. The performances highlighted ways in which adapting a fretted string instrument\ - \ in this way impacts plucking hand technique. },\n address = {Copenhagen, Denmark},\n\ - \ author = {Jacob Harrison and Andrew McPherson},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1176346},\n issn = {2220-4806},\n pages = {507--508},\n publisher\ - \ = {Aalborg University Copenhagen},\n title = {An Adapted Bass Guitar for One-Handed\ - \ Playing},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0102.pdf},\n\ + author: Kristians Konovalovs and Jelizaveta Zovnercuka and Ali Adjorlu and Daniel + Overholt + bibtex: "@inproceedings{kkonovalovs2017,\n abstract = {This paper explores a new\ + \ interaction possibility for increasing performer freedom via a foot-mounted\ + \ wearable, and an instrument-mounted device that maintain stomp-box styles of\ + \ interactivity, but without the restrictions normally associated with the original\ + \ design of guitar effect pedals. The classic foot activated effect pedals that\ + \ are used to alter the sound of the instrument are stationary, forcing the performer\ + \ to return to the same location in order to interact with the pedals. This paper\ + \ presents a new design that enables the performer to interact with the effect\ + \ pedals anywhere on the stage. By designing a foot\\&instrument-mounted effect\ + \ controller, we kept the strongest part of the classical pedal design, while\ + \ allowing the activation of the effect at any location on the stage. The usability\ + \ of the device has been tested on thirty experienced guitar players. Their performance\ + \ has been recorded and compared, and their opinion has been investigated through\ + \ questionnaire and interview. The results of the experiment showed that, in theory,\ + \ foot\\&instrument-mounted effect controller can replace standard effect pedals\ + \ and at the same time provide more mobility on a stage. },\n address = {Copenhagen,\ + \ Denmark},\n author = {Kristians Konovalovs and Jelizaveta Zovnercuka and Ali\ + \ Adjorlu and Daniel Overholt},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176280},\n\ + \ issn = {2220-4806},\n pages = {354--357},\n publisher = {Aalborg University\ + \ Copenhagen},\n title = {A Wearable Foot-mounted / Instrument-mounted Effect\ + \ Controller: Design and Evaluation},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0067.pdf},\n\ \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176346 + doi: 10.5281/zenodo.1176280 issn: 2220-4806 - pages: 507--508 + pages: 354--357 publisher: Aalborg University Copenhagen - title: An Adapted Bass Guitar for One-Handed Playing - url: http://www.nime.org/proceedings/2017/nime2017_paper0102.pdf + title: 'A Wearable Foot-mounted / Instrument-mounted Effect Controller: Design and + Evaluation' + url: http://www.nime.org/proceedings/2017/nime2017_paper0067.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: kcybulski2017 - abstract: "Feedboxes are interactive sound objects that generate rhythmic and harmonic\ - \ patterns. Their purpose is to create intuitive tools for live improvisation,\ - \ without the need for using computer with midi controller or fixed playback.\ - \ Their only means of communication is sound --- they \"listen\" with the microphone\ - \ and \"speak\" with the speaker, thus interaction with Feedboxes is very similar\ - \ to playing with real musicians. The boxes could be used together with any instrument,\ - \ or on their own – in this case they create a feedback loop by listening\ - \ and responding to each other, creating ever-changing rhythmic structures. Feedboxes\ - \ react to incoming sounds in simple, predefined manner. Yet, when used together,\ - \ their behaviour may become quite complex. Each of two boxes has its own sound\ - \ and set of simple rules." + ID: hchang2017 + abstract: 'This paper discusses nonlinear acoustic synthesis in augmented musical + instruments via acoustic transduction. Our work expands previous investigations + into acoustic amplitude modulation, offering new prototypes that produce intermodulation + in several instrumental contexts. Our results show nonlinear intermodulation distortion + can be generated and controlled in electromagnetically driven acoustic interfaces + that can be deployed in acoustic instruments through augmentation, thus extending + the nonlinear acoustic synthesis to a broader range of sonic applications.' address: 'Copenhagen, Denmark' - author: Krzysztof Cybulski - bibtex: "@inproceedings{kcybulski2017,\n abstract = {Feedboxes are interactive sound\ - \ objects that generate rhythmic and harmonic patterns. Their purpose is to create\ - \ intuitive tools for live improvisation, without the need for using computer\ - \ with midi controller or fixed playback. Their only means of communication is\ - \ sound --- they \"listen\" with the microphone and \"speak\" with the speaker,\ - \ thus interaction with Feedboxes is very similar to playing with real musicians.\ - \ The boxes could be used together with any instrument, or on their own –\ - \ in this case they create a feedback loop by listening and responding to each\ - \ other, creating ever-changing rhythmic structures. Feedboxes react to incoming\ - \ sounds in simple, predefined manner. Yet, when used together, their behaviour\ - \ may become quite complex. Each of two boxes has its own sound and set of simple\ - \ rules.},\n address = {Copenhagen, Denmark},\n author = {Krzysztof Cybulski},\n\ + author: Herbert Ho-Chun Chang and Lloyd May and Spencer Topel + bibtex: "@inproceedings{hchang2017,\n abstract = {This paper discusses nonlinear\ + \ acoustic synthesis in augmented musical instruments via acoustic transduction.\ + \ Our work expands previous investigations into acoustic amplitude modulation,\ + \ offering new prototypes that produce intermodulation in several instrumental\ + \ contexts. Our results show nonlinear intermodulation distortion can be generated\ + \ and controlled in electromagnetically driven acoustic interfaces that can be\ + \ deployed in acoustic instruments through augmentation, thus extending the nonlinear\ + \ acoustic synthesis to a broader range of sonic applications.},\n address = {Copenhagen,\ + \ Denmark},\n author = {Herbert Ho-Chun Chang and Lloyd May and Spencer Topel},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1176348},\n issn = {2220-4806},\n\ - \ pages = {509--510},\n publisher = {Aalborg University Copenhagen},\n title =\ - \ {Feedboxes},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0103.pdf},\n\ + \ Musical Expression},\n doi = {10.5281/zenodo.1176282},\n issn = {2220-4806},\n\ + \ pages = {358--363},\n publisher = {Aalborg University Copenhagen},\n title =\ + \ {Nonlinear Acoustic Synthesis in Augmented Musical Instruments},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0068.pdf},\n\ \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176348 + doi: 10.5281/zenodo.1176282 issn: 2220-4806 - pages: 509--510 + pages: 358--363 publisher: Aalborg University Copenhagen - title: Feedboxes - url: http://www.nime.org/proceedings/2017/nime2017_paper0103.pdf + title: Nonlinear Acoustic Synthesis in Augmented Musical Instruments + url: http://www.nime.org/proceedings/2017/nime2017_paper0068.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: sglickman2017 - abstract: "This paper describes the design and implementation of an augmented reality\ - \ (AR) piano learning tool that uses a Microsoft HoloLens and a MIDI-over-Bluetooth-enabled\ - \ electric piano. The tool presents a unique visual interface—a “mirror\ - \ key overlay” approach—fitted for the AR environment, and opens up\ - \ the possibility of on-instrument learning experiences. The curriculum focuses\ - \ on teaching improvisation in blues, rock, jazz and classical genres. Users at\ - \ the piano engage with interactive lessons, watch virtual hand demonstrations,\ - \ see and hear example improvisations, and play their own solos and accompaniment\ - \ along with AR-projected virtual musicians. The tool aims to be entertaining\ - \ yet also effective in teaching core musical concepts." + ID: ghajdu2017 + abstract: 'This paper is a description of a pilot project conducted at the Hamburg + University of Music and Drama (HfMT) during the academic year 2015-16. In this + project we have addressed how interventions via interactive, generative music + systems may contribute to the improvement of the atmosphere and thus to the well-being + of patients in hospital waiting areas. The project was conducted by both the students + of the music therapy and multimedia composition programs and has thus offered + rare insights into the dynamic of such undertakings covering both the therapeutic + underpinnings, as well as the technical means required to achieve a particular + result. DJster, the engine we used for the generative processes is based on Clarence + Barlow''s probabilistic algorithms. Equipped with the proper periphery (sensors, + sound modules and spatializers), we looked at three different scenarios, each + requiring specific musical and technological solutions. The pilot was concluded + by a symposium in 2017 and the development of a prototype system. The symposium + yielded a diagram detailing the circular dynamic of the factors involved in this + particular project, while the prototype was demoed in 2016 at the HfMT facilities. + The system will be installed permanently at the University Medical Center Hamburg-Eppendorf + (UKE) in June 2017.' address: 'Copenhagen, Denmark' - author: Seth Glickman and Byunghwan Lee and Fu Yen Hsiao and Shantanu Das - bibtex: "@inproceedings{sglickman2017,\n abstract = {This paper describes the design\ - \ and implementation of an augmented reality (AR) piano learning tool that uses\ - \ a Microsoft HoloLens and a MIDI-over-Bluetooth-enabled electric piano. The tool\ - \ presents a unique visual interface—a “mirror key overlay”\ - \ approach—fitted for the AR environment, and opens up the possibility of\ - \ on-instrument learning experiences. The curriculum focuses on teaching improvisation\ - \ in blues, rock, jazz and classical genres. Users at the piano engage with interactive\ - \ lessons, watch virtual hand demonstrations, see and hear example improvisations,\ - \ and play their own solos and accompaniment along with AR-projected virtual musicians.\ - \ The tool aims to be entertaining yet also effective in teaching core musical\ - \ concepts.},\n address = {Copenhagen, Denmark},\n author = {Seth Glickman and\ - \ Byunghwan Lee and Fu Yen Hsiao and Shantanu Das},\n booktitle = {Proceedings\ + author: Georg Hajdu and Benedict Carey and Goran Lazarevic and Eckhard Weymann + bibtex: "@inproceedings{ghajdu2017,\n abstract = {This paper is a description of\ + \ a pilot project conducted at the Hamburg University of Music and Drama (HfMT)\ + \ during the academic year 2015-16. In this project we have addressed how interventions\ + \ via interactive, generative music systems may contribute to the improvement\ + \ of the atmosphere and thus to the well-being of patients in hospital waiting\ + \ areas. The project was conducted by both the students of the music therapy and\ + \ multimedia composition programs and has thus offered rare insights into the\ + \ dynamic of such undertakings covering both the therapeutic underpinnings, as\ + \ well as the technical means required to achieve a particular result. DJster,\ + \ the engine we used for the generative processes is based on Clarence Barlow's\ + \ probabilistic algorithms. Equipped with the proper periphery (sensors, sound\ + \ modules and spatializers), we looked at three different scenarios, each requiring\ + \ specific musical and technological solutions. The pilot was concluded by a symposium\ + \ in 2017 and the development of a prototype system. The symposium yielded a diagram\ + \ detailing the circular dynamic of the factors involved in this particular project,\ + \ while the prototype was demoed in 2016 at the HfMT facilities. The system will\ + \ be installed permanently at the University Medical Center Hamburg-Eppendorf\ + \ (UKE) in June 2017.},\n address = {Copenhagen, Denmark},\n author = {Georg Hajdu\ + \ and Benedict Carey and Goran Lazarevic and Eckhard Weymann},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1176350},\n issn = {2220-4806},\n pages = {511--512},\n\ - \ publisher = {Aalborg University Copenhagen},\n title = {Music Everywhere ---\ - \ Augmented Reality Piano Improvisation Learning System},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0104.pdf},\n\ + \ doi = {10.5281/zenodo.1176284},\n issn = {2220-4806},\n pages = {364--369},\n\ + \ publisher = {Aalborg University Copenhagen},\n title = {From Atmosphere to Intervention:\ + \ The circular dynamic of installations in hospital waiting areas},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0069.pdf},\n\ \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176350 + doi: 10.5281/zenodo.1176284 issn: 2220-4806 - pages: 511--512 + pages: 364--369 publisher: Aalborg University Copenhagen - title: Music Everywhere --- Augmented Reality Piano Improvisation Learning System - url: http://www.nime.org/proceedings/2017/nime2017_paper0104.pdf + title: 'From Atmosphere to Intervention: The circular dynamic of installations in + hospital waiting areas' + url: http://www.nime.org/proceedings/2017/nime2017_paper0069.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: jbender2017 - abstract: 'Song Kernel is a chord-and-note harmonizing musical input interface applicable - to electronic instruments in both hardware and software format. It enables to - play chords and melodies while visualizing harmonic functions of chords within - a scale of western music in one single static pattern. It provides amateur musicians, - as well as people with no experience in playing music, a graphic and intuitive - way to play songs, manage harmonic structures and identify composition patterns. ' + ID: dbrown2017 + abstract: 'The need for thorough evaluations is an emerging area of interest and + importance in music interaction research. As a large degree of DMI evaluation + is concerned with exploring the subjective experience: ergonomics, action-sound + mappings and control intimacy; User Experience (UX) methods are increasingly being + utilised to analyse an individual''s experience of new musical instruments, from + which we can extract meaningful, robust findings and subsequently generalised + and useful recommendations. However, many music interaction evaluations remain + informal. In this paper, we provide a meta-review of 132 papers from the 2014 + -- 2016 proceedings of the NIME, SMC and ICMC conferences to collate the aspects + of UX research that are already present in music interaction literature, and to + highlight methods from UX''s widening field of research that have not yet been + explored. Our findings show that usability and aesthetics are the primary focus + of evaluations in music interaction research, and other important components of + the user experience such as enchantment, motivation and frustration are frequently + if not always overlooked. We argue that these factors are prime areas for future + research in the field and their consideration in design and evaluation could lead + to a better understanding of NIMEs and other computer music technology.' address: 'Copenhagen, Denmark' - author: Juan Bender and Gabriel Lecup and Sergio Fernandez - bibtex: "@inproceedings{jbender2017,\n abstract = {Song Kernel is a chord-and-note\ - \ harmonizing musical input interface applicable to electronic instruments in\ - \ both hardware and software format. It enables to play chords and melodies while\ - \ visualizing harmonic functions of chords within a scale of western music in\ - \ one single static pattern. It provides amateur musicians, as well as people\ - \ with no experience in playing music, a graphic and intuitive way to play songs,\ - \ manage harmonic structures and identify composition patterns. },\n address\ - \ = {Copenhagen, Denmark},\n author = {Juan Bender and Gabriel Lecup and Sergio\ - \ Fernandez},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176352},\n issn\ - \ = {2220-4806},\n pages = {513--514},\n publisher = {Aalborg University Copenhagen},\n\ - \ title = {Song Kernel --- Explorations in Intuitive Use of Harmony},\n url =\ - \ {http://www.nime.org/proceedings/2017/nime2017_paper0105.pdf},\n year = {2017}\n\ - }\n" + author: Dom Brown and Chris Nash and Tom Mitchell + bibtex: "@inproceedings{dbrown2017,\n abstract = {The need for thorough evaluations\ + \ is an emerging area of interest and importance in music interaction research.\ + \ As a large degree of DMI evaluation is concerned with exploring the subjective\ + \ experience: ergonomics, action-sound mappings and control intimacy; User Experience\ + \ (UX) methods are increasingly being utilised to analyse an individual's experience\ + \ of new musical instruments, from which we can extract meaningful, robust findings\ + \ and subsequently generalised and useful recommendations. However, many music\ + \ interaction evaluations remain informal. In this paper, we provide a meta-review\ + \ of 132 papers from the 2014 -- 2016 proceedings of the NIME, SMC and ICMC conferences\ + \ to collate the aspects of UX research that are already present in music interaction\ + \ literature, and to highlight methods from UX's widening field of research that\ + \ have not yet been explored. Our findings show that usability and aesthetics\ + \ are the primary focus of evaluations in music interaction research, and other\ + \ important components of the user experience such as enchantment, motivation\ + \ and frustration are frequently if not always overlooked. We argue that these\ + \ factors are prime areas for future research in the field and their consideration\ + \ in design and evaluation could lead to a better understanding of NIMEs and other\ + \ computer music technology.},\n address = {Copenhagen, Denmark},\n author = {Dom\ + \ Brown and Chris Nash and Tom Mitchell},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176286},\n\ + \ issn = {2220-4806},\n pages = {370--375},\n publisher = {Aalborg University\ + \ Copenhagen},\n title = {A User Experience Review of Music Interaction Evaluations},\n\ + \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0070.pdf},\n year\ + \ = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1176352 + doi: 10.5281/zenodo.1176286 issn: 2220-4806 - pages: 513--514 + pages: 370--375 publisher: Aalborg University Copenhagen - title: Song Kernel --- Explorations in Intuitive Use of Harmony - url: http://www.nime.org/proceedings/2017/nime2017_paper0105.pdf + title: A User Experience Review of Music Interaction Evaluations + url: http://www.nime.org/proceedings/2017/nime2017_paper0070.pdf year: 2017 - ENTRYTYPE: inproceedings - ID: ckorda2015 - abstract: 'Improvising to non-modal chord progressions such as those found in jazz - necessitates switching between the different scales implied by each chord. This - work attempted to simplify improvisation by delegating the process of switching - scales to a computer. An open-source software MIDI remapper called ChordEase was - developed that dynamically alters the pitch of notes, in order to fit them to - the chord scales of a predetermined song. ChordEase modifies the behavior of ordinary - MIDI instruments, giving them new interfaces that permit non-modal music to be - approached as if it were modal. Multiple instruments can be remapped simultaneously, - using a variety of mapping functions, each optimized for a particular musical - role. Harmonization and orchestration can also be automated. By facilitating the - selection of scale tones, ChordEase enables performers to focus on other aspects - of improvisation, and thus creates new possibilities for musical expression.' - address: 'Baton Rouge, Louisiana, USA' - author: Chris Korda - bibtex: "@inproceedings{ckorda2015,\n abstract = {Improvising to non-modal chord\ - \ progressions such as those found in jazz necessitates switching between the\ - \ different scales implied by each chord. This work attempted to simplify improvisation\ - \ by delegating the process of switching scales to a computer. An open-source\ - \ software MIDI remapper called ChordEase was developed that dynamically alters\ - \ the pitch of notes, in order to fit them to the chord scales of a predetermined\ - \ song. ChordEase modifies the behavior of ordinary MIDI instruments, giving them\ - \ new interfaces that permit non-modal music to be approached as if it were modal.\ - \ Multiple instruments can be remapped simultaneously, using a variety of mapping\ - \ functions, each optimized for a particular musical role. Harmonization and orchestration\ - \ can also be automated. By facilitating the selection of scale tones, ChordEase\ - \ enables performers to focus on other aspects of improvisation, and thus creates\ - \ new possibilities for musical expression.},\n address = {Baton Rouge, Louisiana,\ - \ USA},\n author = {Chris Korda},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179110},\n\ - \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ - \ {May},\n pages = {322--324},\n publisher = {Louisiana State University},\n title\ - \ = {ChordEase: A {MIDI} remapper for intuitive performance of non-modal music},\n\ - \ url = {http://www.nime.org/proceedings/2015/nime2015_103.pdf},\n urlsuppl1 =\ - \ {http://www.nime.org/proceedings/2015/103/0103-file1.avi},\n urlsuppl2 = {http://www.nime.org/proceedings/2015/103/0103-file2.avi},\n\ - \ year = {2015}\n}\n" + ID: wsiegel2017 + abstract: 'This paper discusses control of multichannel sound diffusion by means + of motion-tracking hardware and software within the context of a live performance. + The idea developed from the author''s previous use of motion-tracking technology + in his own artistic practice as a composer and performer. Various motion tracking + systems were considered, experiments were conducted with three sound diffusion + setups at three venues and a new composition for solo performer and motion-tracking + system took form.' + address: 'Copenhagen, Denmark' + author: Wayne Siegel + bibtex: "@inproceedings{wsiegel2017,\n abstract = {This paper discusses control\ + \ of multichannel sound diffusion by means of motion-tracking hardware and software\ + \ within the context of a live performance. The idea developed from the author's\ + \ previous use of motion-tracking technology in his own artistic practice as a\ + \ composer and performer. Various motion tracking systems were considered, experiments\ + \ were conducted with three sound diffusion setups at three venues and a new composition\ + \ for solo performer and motion-tracking system took form.},\n address = {Copenhagen,\ + \ Denmark},\n author = {Wayne Siegel},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176288},\n\ + \ issn = {2220-4806},\n pages = {376--380},\n publisher = {Aalborg University\ + \ Copenhagen},\n title = {Conducting Sound in Space},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0071.pdf},\n\ + \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179110 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1176288 issn: 2220-4806 - month: May - pages: 322--324 - publisher: Louisiana State University - title: 'ChordEase: A MIDI remapper for intuitive performance of non-modal music' - url: http://www.nime.org/proceedings/2015/nime2015_103.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/103/0103-file1.avi - urlsuppl2: http://www.nime.org/proceedings/2015/103/0103-file2.avi - year: 2015 + pages: 376--380 + publisher: Aalborg University Copenhagen + title: Conducting Sound in Space + url: http://www.nime.org/proceedings/2017/nime2017_paper0071.pdf + year: 2017 - ENTRYTYPE: inproceedings - ID: makbari2015 - abstract: 'One important problem in Musical Information Retrieval is Automatic Music - Transcription, which is an automated conversion process from played music to a - symbolic notation such as sheet music. Since the accuracy of previous audio-based - transcription systems is not satisfactory, we propose an innovative visual-based - automatic music transcription system named claVision to perform piano music transcription. - Instead of processing the music audio, the system performs the transcription only - from the video performance captured by a camera mounted over the piano keyboard. - claVision can be used as a transcription tool, but it also has other applications - such as music education. The claVision software has a very high accuracy (over - 95%) and a very low latency in real-time music transcription, even under different - illumination conditions.' - address: 'Baton Rouge, Louisiana, USA' - author: Mohammad Akbari and Howard Cheng - bibtex: "@inproceedings{makbari2015,\n abstract = {One important problem in Musical\ - \ Information Retrieval is Automatic Music Transcription, which is an automated\ - \ conversion process from played music to a symbolic notation such as sheet music.\ - \ Since the accuracy of previous audio-based transcription systems is not satisfactory,\ - \ we propose an innovative visual-based automatic music transcription system named\ - \ claVision to perform piano music transcription. Instead of processing the music\ - \ audio, the system performs the transcription only from the video performance\ - \ captured by a camera mounted over the piano keyboard. claVision can be used\ - \ as a transcription tool, but it also has other applications such as music education.\ - \ The claVision software has a very high accuracy (over 95%) and a very low latency\ - \ in real-time music transcription, even under different illumination conditions.},\n\ - \ address = {Baton Rouge, Louisiana, USA},\n author = {Mohammad Akbari and Howard\ - \ Cheng},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1179002},\n editor = {Edgar\ - \ Berdahl and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages =\ - \ {313--314},\n publisher = {Louisiana State University},\n title = {claVision:\ - \ Visual Automatic Piano Music Transcription},\n url = {http://www.nime.org/proceedings/2015/nime2015_105.pdf},\n\ - \ urlsuppl1 = {http://www.nime.org/proceedings/2015/105/0105-file1.avi},\n year\ - \ = {2015}\n}\n" + ID: ssalazar2017a + abstract: 'The Fragment String is a new digital musical instrument designed to reinterpret + and reflect upon the sounds of the instruments it is performed in collaboration + with. At its core, it samples an input audio signal and allows the performer to + replay these samples through a granular resynthesizer. Normally the Fragment String + samples an acoustic instrument that accompanies it, but in the absence of this + input it will amplify the ambient environment and electronic noise of the input + audio path to audible levels and sample these. This ability to leverage both structural, + tonal sound and unstructured noise provide the instrument with multiple dimensions + of musical expressivity. The relative magnitude of the physical gestures required + to manipulate the instrument and control the sound also engage an audience in + its performance. This straightforward yet expressive design has lent the Fragment + String to a variety of performance techniques and settings. These are explored + through case studies in a five year history of Fragment String-based compositions + and performances, illustrating the strengths and limitations of these interactions + and their sonic output. ' + address: 'Copenhagen, Denmark' + author: Spencer Salazar and Sarah Reid and Daniel McNamara + bibtex: "@inproceedings{ssalazar2017a,\n abstract = {The Fragment String is a new\ + \ digital musical instrument designed to reinterpret and reflect upon the sounds\ + \ of the instruments it is performed in collaboration with. At its core, it samples\ + \ an input audio signal and allows the performer to replay these samples through\ + \ a granular resynthesizer. Normally the Fragment String samples an acoustic instrument\ + \ that accompanies it, but in the absence of this input it will amplify the ambient\ + \ environment and electronic noise of the input audio path to audible levels and\ + \ sample these. This ability to leverage both structural, tonal sound and unstructured\ + \ noise provide the instrument with multiple dimensions of musical expressivity.\ + \ The relative magnitude of the physical gestures required to manipulate the instrument\ + \ and control the sound also engage an audience in its performance. This straightforward\ + \ yet expressive design has lent the Fragment String to a variety of performance\ + \ techniques and settings. These are explored through case studies in a five year\ + \ history of Fragment String-based compositions and performances, illustrating\ + \ the strengths and limitations of these interactions and their sonic output.\ + \ },\n address = {Copenhagen, Denmark},\n author = {Spencer Salazar and Sarah\ + \ Reid and Daniel McNamara},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176290},\n\ + \ issn = {2220-4806},\n pages = {381--386},\n publisher = {Aalborg University\ + \ Copenhagen},\n title = {The Fragment String},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0072.pdf},\n\ + \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179002 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1176290 issn: 2220-4806 - month: May - pages: 313--314 - publisher: Louisiana State University - title: 'claVision: Visual Automatic Piano Music Transcription' - url: http://www.nime.org/proceedings/2015/nime2015_105.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/105/0105-file1.avi - year: 2015 + pages: 381--386 + publisher: Aalborg University Copenhagen + title: The Fragment String + url: http://www.nime.org/proceedings/2017/nime2017_paper0072.pdf + year: 2017 - ENTRYTYPE: inproceedings - ID: jschacher2015 - abstract: 'When performing with gestural devices in combination with machine learning - techniques, a mode of high-level interaction can be achieved. The methods of machine - learning and pattern recognition can be re-appropriated to serve as a generative - principle. The goal is not classification but reaction from the system in an interactive - and autonomous manner. This investigation looks at how machine learning algorithms - fit generative purposes and what independent behaviours they enable. To this end - we describe artistic and technical developments made to leverage existing machine - learning algorithms as generative devices and discuss their relevance to the field - of gestural interaction.' - address: 'Baton Rouge, Louisiana, USA' - author: Jan C. Schacher and Chikashi Miyama and Daniel Bisig - bibtex: "@inproceedings{jschacher2015,\n abstract = {When performing with gestural\ - \ devices in combination with machine learning techniques, a mode of high-level\ - \ interaction can be achieved. The methods of machine learning and pattern recognition\ - \ can be re-appropriated to serve as a generative principle. The goal is not classification\ - \ but reaction from the system in an interactive and autonomous manner. This investigation\ - \ looks at how machine learning algorithms fit generative purposes and what independent\ - \ behaviours they enable. To this end we describe artistic and technical developments\ - \ made to leverage existing machine learning algorithms as generative devices\ - \ and discuss their relevance to the field of gestural interaction.},\n address\ - \ = {Baton Rouge, Louisiana, USA},\n author = {{Jan C.} Schacher and Chikashi\ - \ Miyama and Daniel Bisig},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179172},\n\ - \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ - \ {May},\n pages = {347--350},\n publisher = {Louisiana State University},\n title\ - \ = {Gestural Electronic Music using Machine Learning as Generative Device},\n\ - \ url = {http://www.nime.org/proceedings/2015/nime2015_117.pdf},\n year = {2015}\n\ - }\n" + ID: sjong2017 + abstract: "We present Ghostfinger, a technology for highly dynamic up/down fingertip\ + \ haptics and control. The overall user experience offered by the technology can\ + \ be described as that of tangibly and audibly interacting with a small hologram.\ + \   More specifically, Ghostfinger implements automatic visualization of\ + \ the dynamic instantiation/parametrization of algorithmic primitives that together\ + \ determine the current haptic conditions for fingertip action. Some aspects of\ + \ this visualization are visuospatial: A floating see-through cursor provides\ + \ real-time, to-scale display of the fingerpad transducer, as it is being moved\ + \ by the user. Simultaneously, each haptic primitive instance is represented\ + \ by a floating block shape, type-colored, variably transparent, and possibly\ + \ overlapping with other such block shapes. Further aspects of visualization are\ + \ symbolic: Each instance is also represented by a type symbol, lighting up within\ + \ a grid if the instance is providing output to the user.   We discuss the\ + \ system's user interface, programming interface, and potential applications.\ + \ This from a general perspective that articulates and emphasizes the uniquely\ + \ enabling role of the principle of computation in the implementation of new forms\ + \ of instrumental control of musical sound. Beyond the currently presented technology,\ + \ this also reflects more broadly on the role of Digital Musical Instruments (DMIs)\ + \ in NIME." + address: 'Copenhagen, Denmark' + author: Staas de Jong + bibtex: "@inproceedings{sjong2017,\n abstract = {We present Ghostfinger, a technology\ + \ for highly dynamic up/down fingertip haptics and control. The overall user experience\ + \ offered by the technology can be described as that of tangibly and audibly interacting\ + \ with a small hologram.   More specifically, Ghostfinger implements automatic\ + \ visualization of the dynamic instantiation/parametrization of algorithmic primitives\ + \ that together determine the current haptic conditions for fingertip action.\ + \ Some aspects of this visualization are visuospatial: A floating see-through\ + \ cursor provides real-time, to-scale display of the fingerpad transducer, as\ + \ it is being moved by the user. Simultaneously, each haptic primitive instance\ + \ is represented by a floating block shape, type-colored, variably transparent,\ + \ and possibly overlapping with other such block shapes. Further aspects of visualization\ + \ are symbolic: Each instance is also represented by a type symbol, lighting up\ + \ within a grid if the instance is providing output to the user.   We discuss\ + \ the system's user interface, programming interface, and potential applications.\ + \ This from a general perspective that articulates and emphasizes the uniquely\ + \ enabling role of the principle of computation in the implementation of new forms\ + \ of instrumental control of musical sound. Beyond the currently presented technology,\ + \ this also reflects more broadly on the role of Digital Musical Instruments (DMIs)\ + \ in NIME.},\n address = {Copenhagen, Denmark},\n author = {Staas de Jong},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1176292},\n issn = {2220-4806},\n\ + \ pages = {387--392},\n publisher = {Aalborg University Copenhagen},\n title =\ + \ {Ghostfinger: a novel platform for fully computational fingertip controllers},\n\ + \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0073.pdf},\n year\ + \ = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179172 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1176292 issn: 2220-4806 - month: May - pages: 347--350 - publisher: Louisiana State University - title: Gestural Electronic Music using Machine Learning as Generative Device - url: http://www.nime.org/proceedings/2015/nime2015_117.pdf - year: 2015 + pages: 387--392 + publisher: Aalborg University Copenhagen + title: 'Ghostfinger: a novel platform for fully computational fingertip controllers' + url: http://www.nime.org/proceedings/2017/nime2017_paper0073.pdf + year: 2017 - ENTRYTYPE: inproceedings - ID: spapetti2015 - abstract: 'This paper describes the design of a hardware/software system for rendering - multi-point, localized vibrotactile feedback in a multi-touch musical interface. - A prototype was developed, based on the Madrona Labs Soundplane, which was chosen - for it provides easy access to multi-touch data, including force, and its easily - expandable layered construction. The proposed solution makes use of several piezo - actuator discs, densely arranged in a honeycomb pattern on a thin PCB layer. Based - on off-the-shelf components, custom amplifying and routing electronics were designed - to drive each piezo element with standard audio signals. Features, as well as - electronic and mechanical issues of the current prototype are discussed.' - address: 'Baton Rouge, Louisiana, USA' - author: Stefano Papetti and Sébastien Schiesser and Martin Fröhlich - bibtex: "@inproceedings{spapetti2015,\n abstract = {This paper describes the design\ - \ of a hardware/software system for rendering multi-point, localized vibrotactile\ - \ feedback in a multi-touch musical interface. A prototype was developed, based\ - \ on the Madrona Labs Soundplane, which was chosen for it provides easy access\ - \ to multi-touch data, including force, and its easily expandable layered construction.\ - \ The proposed solution makes use of several piezo actuator discs, densely arranged\ - \ in a honeycomb pattern on a thin PCB layer. Based on off-the-shelf components,\ - \ custom amplifying and routing electronics were designed to drive each piezo\ - \ element with standard audio signals. Features, as well as electronic and mechanical\ - \ issues of the current prototype are discussed.},\n address = {Baton Rouge, Louisiana,\ - \ USA},\n author = {Stefano Papetti and S\\'ebastien Schiesser and Martin Fr\\\ - ''ohlich},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1179152},\n editor = {Edgar\ - \ Berdahl and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages =\ - \ {235--240},\n publisher = {Louisiana State University},\n title = {Multi-point\ - \ vibrotactile feedback for an expressive musical interface},\n url = {http://www.nime.org/proceedings/2015/nime2015_118.pdf},\n\ - \ year = {2015}\n}\n" + ID: jarmitage2017 + abstract: 'Many digital musical instrument design frameworks have been proposed + that are well suited for analysis and comparison. However, not all provide applicable + design suggestions, especially where subtle but important details are concerned. + Using traditional lutherie as a model, we conducted a series of interviews to + explore how violin makers ``go beyond the obvious'''', and how players perceive + and describe subtle details of instrumental quality. We find that lutherie frameworks + provide clear design methods and have substantial empirical backing, but are not + enough to make a fine violin. Success comes after acquiring sufficient tacit knowledge, + which enables detailed craft through subjective, empirical methods. Testing instruments + for subtle qualities was suggested to be a different skill to playing. Whilst + players are able to identify some specific details about instrumental quality + by comparison, these are often not actionable, and important aspects of ``sound + and feeling'''' are much more difficult to describe. In the DMI domain, we introduce + NIMEcraft to describe subtle differences between otherwise identical instruments + and their underlying design processes, and consider how to improve the dissemination + of NIMEcraft.' + address: 'Copenhagen, Denmark' + author: Jack Armitage and Fabio Morreale and Andrew McPherson + bibtex: "@inproceedings{jarmitage2017,\n abstract = {Many digital musical instrument\ + \ design frameworks have been proposed that are well suited for analysis and comparison.\ + \ However, not all provide applicable design suggestions, especially where subtle\ + \ but important details are concerned. Using traditional lutherie as a model,\ + \ we conducted a series of interviews to explore how violin makers ``go beyond\ + \ the obvious'', and how players perceive and describe subtle details of instrumental\ + \ quality. We find that lutherie frameworks provide clear design methods and have\ + \ substantial empirical backing, but are not enough to make a fine violin. Success\ + \ comes after acquiring sufficient tacit knowledge, which enables detailed craft\ + \ through subjective, empirical methods. Testing instruments for subtle qualities\ + \ was suggested to be a different skill to playing. Whilst players are able to\ + \ identify some specific details about instrumental quality by comparison, these\ + \ are often not actionable, and important aspects of ``sound and feeling'' are\ + \ much more difficult to describe. In the DMI domain, we introduce NIMEcraft to\ + \ describe subtle differences between otherwise identical instruments and their\ + \ underlying design processes, and consider how to improve the dissemination of\ + \ NIMEcraft.},\n address = {Copenhagen, Denmark},\n author = {Jack Armitage and\ + \ Fabio Morreale and Andrew McPherson},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176294},\n\ + \ issn = {2220-4806},\n pages = {393--398},\n publisher = {Aalborg University\ + \ Copenhagen},\n title = {The finer the musician, the smaller the details: NIMEcraft\ + \ under the microscope},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0074.pdf},\n\ + \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179152 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1176294 issn: 2220-4806 - month: May - pages: 235--240 - publisher: Louisiana State University - title: Multi-point vibrotactile feedback for an expressive musical interface - url: http://www.nime.org/proceedings/2015/nime2015_118.pdf - year: 2015 + pages: 393--398 + publisher: Aalborg University Copenhagen + title: 'The finer the musician, the smaller the details: NIMEcraft under the microscope' + url: http://www.nime.org/proceedings/2017/nime2017_paper0074.pdf + year: 2017 - ENTRYTYPE: inproceedings - ID: dramsay2015 - abstract: 'GroupLoop is a browser-based, collaborative audio feedback control system - for musical performance. GroupLoop users send their microphone stream to other - participants while simultaneously controlling the mix of other users'' streams - played through their speakers. Collaborations among users can yield complex feedback - loops where feedback paths overlap and interact. Users are able to shape the feedback - sounds in real-time by adjusting delay, EQ, and gain, as well as manipulating - the acoustics of their portion of the audio feedback path. This paper outlines - the basic principles underlying Grouploop, describes its design and feature-set, - and discusses observations of GroupLoop in performances. It concludes with a look - at future research and refinement. ' - address: 'Baton Rouge, Louisiana, USA' - author: David Ramsay and Joseph Paradiso - bibtex: "@inproceedings{dramsay2015,\n abstract = {GroupLoop is a browser-based,\ - \ collaborative audio feedback control system for musical performance. GroupLoop\ - \ users send their microphone stream to other participants while simultaneously\ - \ controlling the mix of other users' streams played through their speakers. Collaborations\ - \ among users can yield complex feedback loops where feedback paths overlap and\ - \ interact. Users are able to shape the feedback sounds in real-time by adjusting\ - \ delay, EQ, and gain, as well as manipulating the acoustics of their portion\ - \ of the audio feedback path. This paper outlines the basic principles underlying\ - \ Grouploop, describes its design and feature-set, and discusses observations\ - \ of GroupLoop in performances. It concludes with a look at future research and\ - \ refinement. },\n address = {Baton Rouge, Louisiana, USA},\n author = {David\ - \ Ramsay and Joseph Paradiso},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179158},\n\ - \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ - \ {May},\n pages = {251--254},\n publisher = {Louisiana State University},\n title\ - \ = {GroupLoop: A Collaborative, Network-Enabled Audio Feedback Instrument},\n\ - \ url = {http://www.nime.org/proceedings/2015/nime2015_119.pdf},\n year = {2015}\n\ - }\n" + ID: smehes2017 + abstract: 'Exploration is an intrinsic element of designing and engaging with acoustic + as well as digital musical instruments. This paper reports on the ongoing development + of a virtual-acoustic instrument based on a physical model of a string coupled + nonlinearly to a plate. The performer drives the model by tactile interaction + with a string-board controller fitted with piezo-electric sensors. The string-plate + model is formulated in a way that prioritises its parametric explorability. Where + the roles of creating performance gestures and designing instruments are traditionally + separated, such a design provides a continuum across these domains. The string-plate + model, its real-time implementation, and the control interface are described, + and the system is preliminarily evaluated through informal observations of how + musicians engage with the system.' + address: 'Copenhagen, Denmark' + author: Sandor Mehes and Maarten van Walstijn and Paul Stapleton + bibtex: "@inproceedings{smehes2017,\n abstract = {Exploration is an intrinsic element\ + \ of designing and engaging with acoustic as well as digital musical instruments.\ + \ This paper reports on the ongoing development of a virtual-acoustic instrument\ + \ based on a physical model of a string coupled nonlinearly to a plate. The performer\ + \ drives the model by tactile interaction with a string-board controller fitted\ + \ with piezo-electric sensors. The string-plate model is formulated in a way\ + \ that prioritises its parametric explorability. Where the roles of creating performance\ + \ gestures and designing instruments are traditionally separated, such a design\ + \ provides a continuum across these domains. The string-plate model, its real-time\ + \ implementation, and the control interface are described, and the system is preliminarily\ + \ evaluated through informal observations of how musicians engage with the system.},\n\ + \ address = {Copenhagen, Denmark},\n author = {Sandor Mehes and Maarten van Walstijn\ + \ and Paul Stapleton},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176296},\n\ + \ issn = {2220-4806},\n pages = {399--403},\n publisher = {Aalborg University\ + \ Copenhagen},\n title = {Virtual-Acoustic Instrument Design: Exploring the Parameter\ + \ Space of a String-Plate Model},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0075.pdf},\n\ + \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179158 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1176296 issn: 2220-4806 - month: May - pages: 251--254 - publisher: Louisiana State University - title: 'GroupLoop: A Collaborative, Network-Enabled Audio Feedback Instrument' - url: http://www.nime.org/proceedings/2015/nime2015_119.pdf - year: 2015 + pages: 399--403 + publisher: Aalborg University Copenhagen + title: 'Virtual-Acoustic Instrument Design: Exploring the Parameter Space of a String-Plate + Model' + url: http://www.nime.org/proceedings/2017/nime2017_paper0075.pdf + year: 2017 - ENTRYTYPE: inproceedings - ID: kyamamoto2015 - abstract: 'We propose a novel user interface that enables control of a singing voice - synthesizer at a live improvisational performance. The user first registers the - lyrics of a song with the system before performance, and the system builds a probabilistic - model that models the possible jumps within the lyrics. During performance, the - user simultaneously inputs the lyrics of a song with the left hand using a vowel - keyboard and the melodies with the right hand using a standard musical keyboard. - Our system searches for a portion of the registered lyrics whose vowel sequence - matches the current user input using the probabilistic model, and sends the matched - lyrics to the singing voice synthesizer. The vowel input keys are mapped onto - a standard musical keyboard, enabling experienced keyboard players to learn the - system from a standard musical score. We examine the feasibility of the system - through a series of evaluations and user studies. ' - address: 'Baton Rouge, Louisiana, USA' - author: Kazuhiko Yamamoto and Takeo Igarashi - bibtex: "@inproceedings{kyamamoto2015,\n abstract = {We propose a novel user interface\ - \ that enables control of a singing voice synthesizer at a live improvisational\ - \ performance. The user first registers the lyrics of a song with the system before\ - \ performance, and the system builds a probabilistic model that models the possible\ - \ jumps within the lyrics. During performance, the user simultaneously inputs\ - \ the lyrics of a song with the left hand using a vowel keyboard and the melodies\ - \ with the right hand using a standard musical keyboard. Our system searches for\ - \ a portion of the registered lyrics whose vowel sequence matches the current\ - \ user input using the probabilistic model, and sends the matched lyrics to the\ - \ singing voice synthesizer. The vowel input keys are mapped onto a standard musical\ - \ keyboard, enabling experienced keyboard players to learn the system from a standard\ - \ musical score. We examine the feasibility of the system through a series of\ - \ evaluations and user studies. },\n address = {Baton Rouge, Louisiana, USA},\n\ - \ author = {Kazuhiko Yamamoto and Takeo Igarashi},\n booktitle = {Proceedings\ + ID: nbouillot2017 + abstract: 'Recent advances in computing offer the possibility to scale real-time + 3D virtual audio scenes to include hundreds of simultaneous sound sources, rendered + in realtime, for large numbers of audio outputs. Our Spatial Audio Toolkit for + Immersive Environments (SATIE), allows us to render these dense audio scenes to + large multi-channel (e.g. 32 or more) loudspeaker systems, in realtime and controlled + from external software such as 3D scenegraph software. As we describe here, SATIE + is designed for improved scalability: minimum dependency between nodes in the + audio DSP graph for parallel audio computation, controlling sound objects by groups + and load balancing computation of geometry that allow to reduce the number of + messages for controlling simultaneously a high number of sound sources. The paper + presents SATIE along with example use case scenarios. Our initial work demonstrates + SATIE''s flexibility, and has provided us with novel sonic sensations such as + ``audio depth of field'''' and real-time sound swarming.' + address: 'Copenhagen, Denmark' + author: Nicolas Bouillot and Zack Settel and Michal Seta + bibtex: "@inproceedings{nbouillot2017,\n abstract = {Recent advances in computing\ + \ offer the possibility to scale real-time 3D virtual audio scenes to include\ + \ hundreds of simultaneous sound sources, rendered in realtime, for large numbers\ + \ of audio outputs. Our Spatial Audio Toolkit for Immersive Environments (SATIE),\ + \ allows us to render these dense audio scenes to large multi-channel (e.g. 32\ + \ or more) loudspeaker systems, in realtime and controlled from external software\ + \ such as 3D scenegraph software. As we describe here, SATIE is designed for\ + \ improved scalability: minimum dependency between nodes in the audio DSP graph\ + \ for parallel audio computation, controlling sound objects by groups and load\ + \ balancing computation of geometry that allow to reduce the number of messages\ + \ for controlling simultaneously a high number of sound sources. The paper presents\ + \ SATIE along with example use case scenarios. Our initial work demonstrates SATIE's\ + \ flexibility, and has provided us with novel sonic sensations such as ``audio\ + \ depth of field'' and real-time sound swarming.},\n address = {Copenhagen, Denmark},\n\ + \ author = {Nicolas Bouillot and Zack Settel and Michal Seta},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1181414},\n editor = {Edgar Berdahl and Jesse Allison},\n\ - \ issn = {2220-4806},\n month = {May},\n pages = {205--208},\n publisher = {Louisiana\ - \ State University},\n title = {LiVo: Sing a Song with a Vowel Keyboard},\n url\ - \ = {http://www.nime.org/proceedings/2015/nime2015_120.pdf},\n urlsuppl1 = {http://www.nime.org/proceedings/2015/120/0120-file1.mp4},\n\ - \ year = {2015}\n}\n" + \ doi = {10.5281/zenodo.1176298},\n issn = {2220-4806},\n pages = {404--409},\n\ + \ publisher = {Aalborg University Copenhagen},\n title = {SATIE: a live and scalable\ + \ 3D audio scene rendering environment for large multi-channel loudspeaker configurations},\n\ + \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0076.pdf},\n year\ + \ = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1181414 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1176298 issn: 2220-4806 - month: May - pages: 205--208 - publisher: Louisiana State University - title: 'LiVo: Sing a Song with a Vowel Keyboard' - url: http://www.nime.org/proceedings/2015/nime2015_120.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/120/0120-file1.mp4 - year: 2015 + pages: 404--409 + publisher: Aalborg University Copenhagen + title: 'SATIE: a live and scalable 3D audio scene rendering environment for large + multi-channel loudspeaker configurations' + url: http://www.nime.org/proceedings/2017/nime2017_paper0076.pdf + year: 2017 - ENTRYTYPE: inproceedings - ID: ktahiroglu2015 - abstract: 'This paper presents our current research in which we study the notion - of performer engagement within the variance and diversities of the intentional - activities of the performer in musical interaction. We introduce a user-test study - with the aim to evaluate our system''s engagement prediction capability and to - understand in detail the system''s response behaviour. The quantitative results - indicate that our system recognises and monitors performer''s engagement successfully, - although we found that the system''s response to maintain and deepen the performer''s - engagement is perceived differently among participants. The results reported in - this paper can be used to inform the design of interactive systems that enhance - the quality of performer''s engagement in musical interaction with new interfaces.' - address: 'Baton Rouge, Louisiana, USA' - author: Koray Tahiroglu and Thomas Svedström and Valtteri Wikström - bibtex: "@inproceedings{ktahiroglu2015,\n abstract = {This paper presents our current\ - \ research in which we study the notion of performer engagement within the variance\ - \ and diversities of the intentional activities of the performer in musical interaction.\ - \ We introduce a user-test study with the aim to evaluate our system's engagement\ - \ prediction capability and to understand in detail the system's response behaviour.\ - \ The quantitative results indicate that our system recognises and monitors performer's\ - \ engagement successfully, although we found that the system's response to maintain\ - \ and deepen the performer's engagement is perceived differently among participants.\ - \ The results reported in this paper can be used to inform the design of interactive\ - \ systems that enhance the quality of performer's engagement in musical interaction\ - \ with new interfaces.},\n address = {Baton Rouge, Louisiana, USA},\n author =\ - \ {Koray Tahiroglu and Thomas Svedstr\\''om and Valtteri Wikstr\\''om},\n booktitle\ + ID: hscurto2017 + abstract: 'Machine learning tools for designing motion-sound relationships often + rely on a two-phase iterative process, where users must alternate between designing + gestures and performing mappings. We present a first prototype of a user adaptable + tool that aims at merging these design and performance steps into one fully interactive + experience. It is based on an online learning implementation of a Gaussian Mixture + Model supporting real-time adaptation to user movement and generation of sound + parameters. To allow both fine-tune modification tasks and open-ended improvisational + practices, we designed two interaction modes that either let users shape, or guide + interactive motion-sound mappings. Considering an improvisational use case, we + propose two example musical applications to illustrate how our tool might support + various forms of corporeal engagement with sound, and inspire further perspectives + for machine learning-mediated embodied musical expression.' + address: 'Copenhagen, Denmark' + author: Hugo Scurto and Frédéric Bevilacqua and Jules Françoise + bibtex: "@inproceedings{hscurto2017,\n abstract = {Machine learning tools for designing\ + \ motion-sound relationships often rely on a two-phase iterative process, where\ + \ users must alternate between designing gestures and performing mappings. We\ + \ present a first prototype of a user adaptable tool that aims at merging these\ + \ design and performance steps into one fully interactive experience. It is based\ + \ on an online learning implementation of a Gaussian Mixture Model supporting\ + \ real-time adaptation to user movement and generation of sound parameters. To\ + \ allow both fine-tune modification tasks and open-ended improvisational practices,\ + \ we designed two interaction modes that either let users shape, or guide interactive\ + \ motion-sound mappings. Considering an improvisational use case, we propose two\ + \ example musical applications to illustrate how our tool might support various\ + \ forms of corporeal engagement with sound, and inspire further perspectives for\ + \ machine learning-mediated embodied musical expression.},\n address = {Copenhagen,\ + \ Denmark},\n author = {Hugo Scurto and Frédéric Bevilacqua and Jules Françoise},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1176270},\n issn = {2220-4806},\n\ + \ pages = {410--415},\n publisher = {Aalborg University Copenhagen},\n title =\ + \ {Shaping and Exploring Interactive Motion-Sound Mappings Using Online Clustering\ + \ Techniques},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0077.pdf},\n\ + \ year = {2017}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1176270 + issn: 2220-4806 + pages: 410--415 + publisher: Aalborg University Copenhagen + title: Shaping and Exploring Interactive Motion-Sound Mappings Using Online Clustering + Techniques + url: http://www.nime.org/proceedings/2017/nime2017_paper0077.pdf + year: 2017 + + +- ENTRYTYPE: inproceedings + ID: kbhumber2017 + abstract: 'We describe the Responsive User Body Suit (RUBS), a tactile instrument + worn by performers that allows the generation and manipulation of audio output + using touch triggers. The RUBS system is a responsive interface between organic + touch and electronic audio, intimately located on the performer''s body. This + system offers an entry point into a more intuitive method of music performance. + A short overview of body instrument philosophy and related work is followed by + the development and implementation process of the RUBS as both an interface and + performance instrument. Lastly, observations, design challenges and future goals + are discussed.' + address: 'Copenhagen, Denmark' + author: Kiran Bhumber and Bob Pritchard and Kitty Rodé + bibtex: "@inproceedings{kbhumber2017,\n abstract = {We describe the Responsive User\ + \ Body Suit (RUBS), a tactile instrument worn by performers that allows the generation\ + \ and manipulation of audio output using touch triggers. The RUBS system is a\ + \ responsive interface between organic touch and electronic audio, intimately\ + \ located on the performer's body. This system offers an entry point into a more\ + \ intuitive method of music performance. A short overview of body instrument philosophy\ + \ and related work is followed by the development and implementation process of\ + \ the RUBS as both an interface and performance instrument. Lastly, observations,\ + \ design challenges and future goals are discussed.},\n address = {Copenhagen,\ + \ Denmark},\n author = {Kiran Bhumber and Bob Pritchard and Kitty Rodé},\n booktitle\ \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1179182},\n editor = {Edgar Berdahl and\ - \ Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {132--135},\n\ - \ publisher = {Louisiana State University},\n title = {Musical Engagement that\ - \ is Predicated on Intentional Activity of the Performer with NOISA Instruments},\n\ - \ url = {http://www.nime.org/proceedings/2015/nime2015_121.pdf},\n year = {2015}\n\ - }\n" + \ Expression},\n doi = {10.5281/zenodo.1176300},\n issn = {2220-4806},\n pages\ + \ = {416--419},\n publisher = {Aalborg University Copenhagen},\n title = {A Responsive\ + \ User Body Suit (RUBS)},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0078.pdf},\n\ + \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179182 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1176300 issn: 2220-4806 - month: May - pages: 132--135 - publisher: Louisiana State University - title: Musical Engagement that is Predicated on Intentional Activity of the Performer - with NOISA Instruments - url: http://www.nime.org/proceedings/2015/nime2015_121.pdf - year: 2015 + pages: 416--419 + publisher: Aalborg University Copenhagen + title: A Responsive User Body Suit (RUBS) + url: http://www.nime.org/proceedings/2017/nime2017_paper0078.pdf + year: 2017 - ENTRYTYPE: inproceedings - ID: jlong2015 - abstract: 'This paper presents a methodology for evaluating the performance of several - types of striking mechanism commonly utilized in musical robotic percussion systems. - The goal is to take steps towards standardizing methods of comparing the attributes - of a range of devices to inform their design and application in various musical - situations. A system for testing the latency, consistency, loudness and striking - speed of these mechanisms is described and the methods are demonstrated by subjecting - several new robotic percussion mechanisms to these tests. An analysis of the results - of the evaluation is also presented and the advantages and disadvantages of each - of the types of mechanism in various musical contexts is discussed.' - address: 'Baton Rouge, Louisiana, USA' - author: Jason Long and Jim Murphy and Ajay Kapur and Dale Carnegie - bibtex: "@inproceedings{jlong2015,\n abstract = {This paper presents a methodology\ - \ for evaluating the performance of several types of striking mechanism commonly\ - \ utilized in musical robotic percussion systems. The goal is to take steps towards\ - \ standardizing methods of comparing the attributes of a range of devices to inform\ - \ their design and application in various musical situations. A system for testing\ - \ the latency, consistency, loudness and striking speed of these mechanisms is\ - \ described and the methods are demonstrated by subjecting several new robotic\ - \ percussion mechanisms to these tests. An analysis of the results of the evaluation\ - \ is also presented and the advantages and disadvantages of each of the types\ - \ of mechanism in various musical contexts is discussed.},\n address = {Baton\ - \ Rouge, Louisiana, USA},\n author = {Jason Long and Jim Murphy and Ajay Kapur\ - \ and Dale Carnegie},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179120},\n\ - \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ - \ {May},\n pages = {404--407},\n publisher = {Louisiana State University},\n title\ - \ = {A Methodology for Evaluating Robotic Striking Mechanisms for Musical Contexts},\n\ - \ url = {http://www.nime.org/proceedings/2015/nime2015_130.pdf},\n year = {2015}\n\ - }\n" + ID: mhojlund2017 + abstract: 'This paper describes the development of a loudness-based compressor for + live audio streams. The need for this device arose while developing the public + sound art project The Overheard, which involves mixing together several live audio + streams through a web based mixing interface. In order to preserve a natural sounding + dynamic image from the varying sound sources that can be played back under varying + conditions, an adaptation of the EBU R128 loudness measurement recommendation, + originally developed for levelling non-real-time broadcast material, has been + applied. The paper describes the Pure Data implementation and the necessary compromises + enforced by the live streaming condition. Lastly observations regarding design + challenges, related application areas and future goals are presented. ' + address: 'Copenhagen, Denmark' + author: Marie Højlund and Morten Riis and Daniel Rothmann and Jonas Kirkegaard + bibtex: "@inproceedings{mhojlund2017,\n abstract = {This paper describes the development\ + \ of a loudness-based compressor for live audio streams. The need for this device\ + \ arose while developing the public sound art project The Overheard, which involves\ + \ mixing together several live audio streams through a web based mixing interface.\ + \ In order to preserve a natural sounding dynamic image from the varying sound\ + \ sources that can be played back under varying conditions, an adaptation of the\ + \ EBU R128 loudness measurement recommendation, originally developed for levelling\ + \ non-real-time broadcast material, has been applied. The paper describes the\ + \ Pure Data implementation and the necessary compromises enforced by the live\ + \ streaming condition. Lastly observations regarding design challenges, related\ + \ application areas and future goals are presented. },\n address = {Copenhagen,\ + \ Denmark},\n author = {Marie Højlund and Morten Riis and Daniel Rothmann and\ + \ Jonas Kirkegaard},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176354},\n\ + \ issn = {2220-4806},\n pages = {420--425},\n publisher = {Aalborg University\ + \ Copenhagen},\n title = {Applying the EBU R128 Loudness Standard in live-streaming\ + \ sound sculptures},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0079.pdf},\n\ + \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179120 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1176354 issn: 2220-4806 - month: May - pages: 404--407 - publisher: Louisiana State University - title: A Methodology for Evaluating Robotic Striking Mechanisms for Musical Contexts - url: http://www.nime.org/proceedings/2015/nime2015_130.pdf - year: 2015 + pages: 420--425 + publisher: Aalborg University Copenhagen + title: Applying the EBU R128 Loudness Standard in live-streaming sound sculptures + url: http://www.nime.org/proceedings/2017/nime2017_paper0079.pdf + year: 2017 - ENTRYTYPE: inproceedings - ID: skemper2015 - abstract: 'The Modular Electro-Acoustic Robotic Instrument System (MEARIS) represents - a new type of hybrid electroacoustic-electromechanical instrument model. Monochord-Aerophone - Robotic Instrument Ensemble (MARIE), the first realization of a MEARIS, is a set - of interconnected monochord and cylindrical aerophone robotic musical instruments - created by Expressive Machines Musical Instruments (EMMI). MARIE comprises one - or more matched pairs of Automatic Monochord Instruments (AMI) and Cylindrical - Aerophone Robotic Instruments (CARI). Each AMI and CARI is a self-contained, independently - operable robotic instrument with an acoustic element, a control system that enables - automated manipulation of this element, and an audio system that includes input - and output transducers coupled to the acoustic element. Each AMI-CARI pair can - also operate as an interconnected hybrid instrument, allowing for effects that - have heretofore been the domain of physical modeling technologies, such as a plucked - air column or blown string. Since its creation, MARIE has toured widely, performed - with dozens of human instrumentalists, and has been utilized by nine composers - in the realization of more than twenty new musical works.' - address: 'Baton Rouge, Louisiana, USA' - author: Troy Rogers and Steven Kemper and Scott Barton - bibtex: "@inproceedings{skemper2015,\n abstract = {The Modular Electro-Acoustic\ - \ Robotic Instrument System (MEARIS) represents a new type of hybrid electroacoustic-electromechanical\ - \ instrument model. Monochord-Aerophone Robotic Instrument Ensemble (MARIE), the\ - \ first realization of a MEARIS, is a set of interconnected monochord and cylindrical\ - \ aerophone robotic musical instruments created by Expressive Machines Musical\ - \ Instruments (EMMI). MARIE comprises one or more matched pairs of Automatic Monochord\ - \ Instruments (AMI) and Cylindrical Aerophone Robotic Instruments (CARI). Each\ - \ AMI and CARI is a self-contained, independently operable robotic instrument\ - \ with an acoustic element, a control system that enables automated manipulation\ - \ of this element, and an audio system that includes input and output transducers\ - \ coupled to the acoustic element. Each AMI-CARI pair can also operate as an interconnected\ - \ hybrid instrument, allowing for effects that have heretofore been the domain\ - \ of physical modeling technologies, such as a plucked air column or blown string.\ - \ Since its creation, MARIE has toured widely, performed with dozens of human\ - \ instrumentalists, and has been utilized by nine composers in the realization\ - \ of more than twenty new musical works.},\n address = {Baton Rouge, Louisiana,\ - \ USA},\n author = {Troy Rogers and Steven Kemper and Scott Barton},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1179166},\n editor = {Edgar Berdahl and\ - \ Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {408--411},\n\ - \ publisher = {Louisiana State University},\n title = {MARIE: Monochord-Aerophone\ - \ Robotic Instrument Ensemble},\n url = {http://www.nime.org/proceedings/2015/nime2015_141.pdf},\n\ - \ urlsuppl1 = {http://www.nime.org/proceedings/2015/141/0141-file1.mov},\n year\ - \ = {2015}\n}\n" + ID: eberdahl2017 + abstract: The concept of embedded acoustic systems for diffusing spatial audio is + considered. This paradigm is enabled by advancements in floating-point hardware + on inexpensive embedded Linux systems. Examples are presented using line array + configurations for electroacoustic music and for making interactive kiosk and + poster systems. + address: 'Copenhagen, Denmark' + author: Edgar Berdahl and Matthew Blessing and Matthew Williams and Pacco Tan and + Brygg Ullmer and Jesse Allison + bibtex: "@inproceedings{eberdahl2017,\n abstract = {The concept of embedded acoustic\ + \ systems for diffusing spatial audio is considered. This paradigm is enabled\ + \ by advancements in floating-point hardware on inexpensive embedded Linux systems.\ + \ Examples are presented using line array configurations for electroacoustic music\ + \ and for making interactive kiosk and poster systems.},\n address = {Copenhagen,\ + \ Denmark},\n author = {Edgar Berdahl and Matthew Blessing and Matthew Williams\ + \ and Pacco Tan and Brygg Ullmer and Jesse Allison},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176302},\n issn = {2220-4806},\n pages = {426--430},\n\ + \ publisher = {Aalborg University Copenhagen},\n title = {Spatial Audio Approaches\ + \ for Embedded Sound Art Installations with Loudspeaker Line Arrays},\n url =\ + \ {http://www.nime.org/proceedings/2017/nime2017_paper0080.pdf},\n year = {2017}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179166 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1176302 issn: 2220-4806 - month: May - pages: 408--411 - publisher: Louisiana State University - title: 'MARIE: Monochord-Aerophone Robotic Instrument Ensemble' - url: http://www.nime.org/proceedings/2015/nime2015_141.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/141/0141-file1.mov - year: 2015 + pages: 426--430 + publisher: Aalborg University Copenhagen + title: Spatial Audio Approaches for Embedded Sound Art Installations with Loudspeaker + Line Arrays + url: http://www.nime.org/proceedings/2017/nime2017_paper0080.pdf + year: 2017 - ENTRYTYPE: inproceedings - ID: jharriman2015 - abstract: 'Music offers an intriguing context to engage children in electronics, - programming and more. Over the last year we been developing a hardware and software - toolkit for music called modular-muse. Here we describe the design and goals for - these tools and how they have been used in different settings to introduce children - to concepts of interaction design for music and sound design. Two exploratory - studies which used modular-muse are described here with different approaches; - a two day build your own instrument workshop where participants learned how to - use both hardware and software concurrently to control synthesized sounds and - trigger solenoids, and a middle school music classroom where the focus was only - on programming for sound synthesis using the modular-muse Pd library. During the - second study, a project called Pd Poems, a teaching progression emerged we call - Build-Play-Share-Focus which is also described. ' - address: 'Baton Rouge, Louisiana, USA' - author: Jiffer Harriman - bibtex: "@inproceedings{jharriman2015,\n abstract = {Music offers an intriguing\ - \ context to engage children in electronics, programming and more. Over the last\ - \ year we been developing a hardware and software toolkit for music called modular-muse.\ - \ Here we describe the design and goals for these tools and how they have been\ - \ used in different settings to introduce children to concepts of interaction\ - \ design for music and sound design. Two exploratory studies which used modular-muse\ - \ are described here with different approaches; a two day build your own instrument\ - \ workshop where participants learned how to use both hardware and software concurrently\ - \ to control synthesized sounds and trigger solenoids, and a middle school music\ - \ classroom where the focus was only on programming for sound synthesis using\ - \ the modular-muse Pd library. During the second study, a project called Pd Poems,\ - \ a teaching progression emerged we call Build-Play-Share-Focus which is also\ - \ described. },\n address = {Baton Rouge, Louisiana, USA},\n author = {Jiffer\ - \ Harriman},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179074},\n editor\ - \ = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n\ - \ pages = {331--334},\n publisher = {Louisiana State University},\n title = {Pd\ - \ Poems and Teaching Tools},\n url = {http://www.nime.org/proceedings/2015/nime2015_145.pdf},\n\ - \ year = {2015}\n}\n" + ID: fkeenan2017 + abstract: 'This paper presents the next stage of an investigation into the potential + of historical theatre sound effects as a resource for Sonic Interaction Design + (SID). An acoustic theatre wind machine was constructed, and a digital physical + modelling-based version of this specific machine was programmed using the Sound + Designer''s Toolkit (SDT) in Max/MSP. The acoustic wind machine was fitted with + 3D printed gearing to mechanically drive an optical encoder and control the digital + synthesis engine in real time. The design of this system was informed by an initial + comparison between the acoustic wind machine and the first iteration of its digital + counterpart. To explore the main acoustic parameters and the sonic range of the + acoustic and digital wind machines in operation, three simple and distinct rotational + gestures were performed, with the resulting sounds recorded simultaneously, facilitating + an analysis of the real-time performance of both sources. The results are reported, + with an outline of future work. ' + address: 'Copenhagen, Denmark' + author: Fiona Keenan and Sandra Pauletto + bibtex: "@inproceedings{fkeenan2017,\n abstract = {This paper presents the next\ + \ stage of an investigation into the potential of historical theatre sound effects\ + \ as a resource for Sonic Interaction Design (SID). An acoustic theatre wind machine\ + \ was constructed, and a digital physical modelling-based version of this specific\ + \ machine was programmed using the Sound Designer's Toolkit (SDT) in Max/MSP.\ + \ The acoustic wind machine was fitted with 3D printed gearing to mechanically\ + \ drive an optical encoder and control the digital synthesis engine in real time.\ + \ The design of this system was informed by an initial comparison between the\ + \ acoustic wind machine and the first iteration of its digital counterpart. To\ + \ explore the main acoustic parameters and the sonic range of the acoustic and\ + \ digital wind machines in operation, three simple and distinct rotational gestures\ + \ were performed, with the resulting sounds recorded simultaneously, facilitating\ + \ an analysis of the real-time performance of both sources. The results are reported,\ + \ with an outline of future work. },\n address = {Copenhagen, Denmark},\n author\ + \ = {Fiona Keenan and Sandra Pauletto},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176304},\n\ + \ issn = {2220-4806},\n pages = {431--435},\n publisher = {Aalborg University\ + \ Copenhagen},\n title = {Design and Evaluation of a Digital Theatre Wind Machine},\n\ + \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0081.pdf},\n year\ + \ = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179074 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1176304 issn: 2220-4806 - month: May - pages: 331--334 - publisher: Louisiana State University - title: Pd Poems and Teaching Tools - url: http://www.nime.org/proceedings/2015/nime2015_145.pdf - year: 2015 + pages: 431--435 + publisher: Aalborg University Copenhagen + title: Design and Evaluation of a Digital Theatre Wind Machine + url: http://www.nime.org/proceedings/2017/nime2017_paper0081.pdf + year: 2017 - ENTRYTYPE: inproceedings - ID: rhayward2015 - abstract: 'The Hayward Tuning Vine is a software interface for exploring the system - of microtonal tuning known as Just Intonation. Based ultimately on prime number - relationships, harmonic space in Just Intonation is inherently multidimensional, - with each prime number tracing a unique path in space. Taking this multidimensionality - as its point of departure, the Tuning Vine interface assigns a unique angle and - colour to each prime number, along with aligning melodic pitch height to vertical - height on the computer screen. These features allow direct and intuitive interaction - with Just Intonation. The inclusion of a transposition function along each prime - number axis also enables potentially unlimited exploration of harmonic space within - prime limit 23. Currently available as desktop software, a prototype for a hardware - version has also been constructed, and future tablet app and hardware versions - of the Tuning Vine are planned that will allow tangible as well as audiovisual - interaction with microtonal harmonic space.' - address: 'Baton Rouge, Louisiana, USA' - author: Robin Hayward - bibtex: "@inproceedings{rhayward2015,\n abstract = {The Hayward Tuning Vine is a\ - \ software interface for exploring the system of microtonal tuning known as Just\ - \ Intonation. Based ultimately on prime number relationships, harmonic space in\ - \ Just Intonation is inherently multidimensional, with each prime number tracing\ - \ a unique path in space. Taking this multidimensionality as its point of departure,\ - \ the Tuning Vine interface assigns a unique angle and colour to each prime number,\ - \ along with aligning melodic pitch height to vertical height on the computer\ - \ screen. These features allow direct and intuitive interaction with Just Intonation.\ - \ The inclusion of a transposition function along each prime number axis also\ - \ enables potentially unlimited exploration of harmonic space within prime limit\ - \ 23. Currently available as desktop software, a prototype for a hardware version\ - \ has also been constructed, and future tablet app and hardware versions of the\ - \ Tuning Vine are planned that will allow tangible as well as audiovisual interaction\ - \ with microtonal harmonic space.},\n address = {Baton Rouge, Louisiana, USA},\n\ - \ author = {Robin Hayward},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179084},\n\ - \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ - \ {May},\n pages = {209--214},\n publisher = {Louisiana State University},\n title\ - \ = {The Hayward Tuning Vine: an interface for Just Intonation},\n url = {http://www.nime.org/proceedings/2015/nime2015_146.pdf},\n\ - \ urlsuppl1 = {http://www.nime.org/proceedings/2015/146/0146-file1.mov},\n year\ - \ = {2015}\n}\n" + ID: ihattwick2017 + abstract: 'In this paper we present a discussion of the development of hardware + systems in collaboration with professional artists, a context which presents both + challenges and opportunities for researchers interested in the uses of technology + in artistic practice. The establishment of design specifications within these + contexts can be challenging, especially as they are likely to change during the + development process. In order to assist in the consideration of the complete set + of design specifications, we identify seven aspects of hardware design relevant + to our applications: function, aesthetics, support for artistic creation, system + architecture, manufacturing, robustness, and reusability. Examples drawn from + our previous work are used to illustrate the characteristics of interdependency + and temporality, and form the basis of case studies investigating support for + artistic creation and reusability. We argue that the consideration of these design + aspects at appropriate times within the development process may facilitate the + ability of hardware systems to support continued use in professional applications.' + address: 'Copenhagen, Denmark' + author: Ian Hattwick and Marcelo M. Wanderley + bibtex: "@inproceedings{ihattwick2017,\n abstract = {In this paper we present a\ + \ discussion of the development of hardware systems in collaboration with professional\ + \ artists, a context which presents both challenges and opportunities for researchers\ + \ interested in the uses of technology in artistic practice. The establishment\ + \ of design specifications within these contexts can be challenging, especially\ + \ as they are likely to change during the development process. In order to assist\ + \ in the consideration of the complete set of design specifications, we identify\ + \ seven aspects of hardware design relevant to our applications: function, aesthetics,\ + \ support for artistic creation, system architecture, manufacturing, robustness,\ + \ and reusability. Examples drawn from our previous work are used to illustrate\ + \ the characteristics of interdependency and temporality, and form the basis of\ + \ case studies investigating support for artistic creation and reusability. We\ + \ argue that the consideration of these design aspects at appropriate times within\ + \ the development process may facilitate the ability of hardware systems to support\ + \ continued use in professional applications.},\n address = {Copenhagen, Denmark},\n\ + \ author = {Ian Hattwick and Marcelo M. Wanderley},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176306},\n issn = {2220-4806},\n pages = {436--441},\n\ + \ publisher = {Aalborg University Copenhagen},\n title = {Design of Hardware Systems\ + \ for Professional Artistic Applications},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0082.pdf},\n\ + \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179084 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1176306 issn: 2220-4806 - month: May - pages: 209--214 - publisher: Louisiana State University - title: 'The Hayward Tuning Vine: an interface for Just Intonation' - url: http://www.nime.org/proceedings/2015/nime2015_146.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/146/0146-file1.mov - year: 2015 + pages: 436--441 + publisher: Aalborg University Copenhagen + title: Design of Hardware Systems for Professional Artistic Applications + url: http://www.nime.org/proceedings/2017/nime2017_paper0082.pdf + year: 2017 - ENTRYTYPE: inproceedings - ID: mkrzyzaniak2015 - abstract: 'Herein is presented a method of classifying hand-drum strokes in real-time - by analyzing 50 milliseconds of audio signal as recorded by a contact-mic affixed - to the body of the instrument. The classifier performs with an average accuracy - of about 95% across several experiments on archetypical strokes, and 89% on uncontrived - playing. A complete ANSI C implementation for OSX and Linux is available on the - author''s website.' - address: 'Baton Rouge, Louisiana, USA' - author: Michael Krzyzaniak and Garth Paine - bibtex: "@inproceedings{mkrzyzaniak2015,\n abstract = {Herein is presented a method\ - \ of classifying hand-drum strokes in real-time by analyzing 50 milliseconds of\ - \ audio signal as recorded by a contact-mic affixed to the body of the instrument.\ - \ The classifier performs with an average accuracy of about 95% across several\ - \ experiments on archetypical strokes, and 89% on uncontrived playing. A complete\ - \ ANSI C implementation for OSX and Linux is available on the author's website.},\n\ - \ address = {Baton Rouge, Louisiana, USA},\n author = {Michael Krzyzaniak and\ - \ Garth Paine},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179112},\n\ - \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ - \ {May},\n pages = {400--403},\n publisher = {Louisiana State University},\n title\ - \ = {Realtime Classification of Hand-Drum Strokes},\n url = {http://www.nime.org/proceedings/2015/nime2015_147.pdf},\n\ - \ year = {2015}\n}\n" + ID: ajensenius2017 + abstract: 'This paper explores sonic microinteraction using muscle sensing through + the Myo armband. The first part presents results from a small series of experiments + aimed at finding the baseline micromotion and muscle activation data of people + being at rest or performing short/small actions. The second part presents the + prototype instrument MicroMyo, built around the concept of making sound with little + motion. The instrument plays with the convention that inputting more energy into + an instrument results in more sound. MicroMyo, on the other hand, is built so + that the less you move, the more it sounds. Our user study shows that while such + an "inverse instrument" may seem puzzling at first, it also opens a space for + interesting musical interactions. ' + address: 'Copenhagen, Denmark' + author: Alexander Refsum Jensenius and Victor Gonzalez Sanchez and Agata Zelechowska + and Kari Anne Vadstensvik Bjerkestrand + bibtex: "@inproceedings{ajensenius2017,\n abstract = {This paper explores sonic\ + \ microinteraction using muscle sensing through the Myo armband. The first part\ + \ presents results from a small series of experiments aimed at finding the baseline\ + \ micromotion and muscle activation data of people being at rest or performing\ + \ short/small actions. The second part presents the prototype instrument MicroMyo,\ + \ built around the concept of making sound with little motion. The instrument\ + \ plays with the convention that inputting more energy into an instrument results\ + \ in more sound. MicroMyo, on the other hand, is built so that the less you move,\ + \ the more it sounds. Our user study shows that while such an \"inverse instrument\"\ + \ may seem puzzling at first, it also opens a space for interesting musical interactions.\ + \ },\n address = {Copenhagen, Denmark},\n author = {Alexander Refsum Jensenius\ + \ and Victor Gonzalez Sanchez and Agata Zelechowska and Kari Anne Vadstensvik\ + \ Bjerkestrand},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176308},\n\ + \ issn = {2220-4806},\n pages = {442--445},\n publisher = {Aalborg University\ + \ Copenhagen},\n title = {Exploring the Myo controller for sonic microinteraction},\n\ + \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0083.pdf},\n year\ + \ = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179112 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1176308 issn: 2220-4806 - month: May - pages: 400--403 - publisher: Louisiana State University - title: Realtime Classification of Hand-Drum Strokes - url: http://www.nime.org/proceedings/2015/nime2015_147.pdf - year: 2015 + pages: 442--445 + publisher: Aalborg University Copenhagen + title: Exploring the Myo controller for sonic microinteraction + url: http://www.nime.org/proceedings/2017/nime2017_paper0083.pdf + year: 2017 - ENTRYTYPE: inproceedings - ID: rvanrooyen2015 - abstract: 'Comparative studies require a baseline reference and a documented process - to capture new subject data. This paper combined with its principal reference - [1] presents a definitive dataset in the context of snare drum performances along - with a procedure for data acquisition, and a methodology for quantitative analysis. - The dataset contains video, audio, and discrete two dimensional motion data for - forty standardized percussive rudiments.' - address: 'Baton Rouge, Louisiana, USA' - author: Robert Van Rooyen and Andrew Schloss and George Tzanetakis - bibtex: "@inproceedings{rvanrooyen2015,\n abstract = {Comparative studies require\ - \ a baseline reference and a documented process to capture new subject data. This\ - \ paper combined with its principal reference [1] presents a definitive dataset\ - \ in the context of snare drum performances along with a procedure for data acquisition,\ - \ and a methodology for quantitative analysis. The dataset contains video, audio,\ - \ and discrete two dimensional motion data for forty standardized percussive rudiments.},\n\ - \ address = {Baton Rouge, Louisiana, USA},\n author = {Robert Van Rooyen and Andrew\ - \ Schloss and George Tzanetakis},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179168},\n\ - \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ - \ {May},\n pages = {329--330},\n publisher = {Louisiana State University},\n title\ - \ = {Snare Drum Motion Capture Dataset},\n url = {http://www.nime.org/proceedings/2015/nime2015_148.pdf},\n\ - \ urlsuppl1 = {http://www.nime.org/proceedings/2015/148/0148-file1.mp4},\n year\ - \ = {2015}\n}\n" + ID: jtilbian2017 + abstract: 'Stride is a language tailored for designing new digital musical instruments + and interfaces. Stride enables designers to fine tune the sound and the interactivity + of the instruments they wish to create. Stride code provides a high-level description + of processes in a platform agnostic manner. The syntax used to define these processes + can also be used to define low-level signal processing algorithms. Unlike other + domain-specific languages for sound synthesis and audio processing, Stride can + generate optimized code that can run on any supported hardware platform. The generated + code can be compiled to run on a full featured operating system or bare metal + on embedded devices. Stride goes further and enables a designer to consolidate + various supported hardware and software platforms, define the communication between + them, and target them as a single heterogeneous system.' + address: 'Copenhagen, Denmark' + author: Joseph Tilbian and Andres Cabrera + bibtex: "@inproceedings{jtilbian2017,\n abstract = {Stride is a language tailored\ + \ for designing new digital musical instruments and interfaces. Stride enables\ + \ designers to fine tune the sound and the interactivity of the instruments they\ + \ wish to create. Stride code provides a high-level description of processes in\ + \ a platform agnostic manner. The syntax used to define these processes can also\ + \ be used to define low-level signal processing algorithms. Unlike other domain-specific\ + \ languages for sound synthesis and audio processing, Stride can generate optimized\ + \ code that can run on any supported hardware platform. The generated code can\ + \ be compiled to run on a full featured operating system or bare metal on embedded\ + \ devices. Stride goes further and enables a designer to consolidate various supported\ + \ hardware and software platforms, define the communication between them, and\ + \ target them as a single heterogeneous system.},\n address = {Copenhagen, Denmark},\n\ + \ author = {Joseph Tilbian and Andres Cabrera},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1176310},\n issn = {2220-4806},\n pages = {446--449},\n publisher\ + \ = {Aalborg University Copenhagen},\n title = {Stride for Interactive Musical\ + \ Instrument Design},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0084.pdf},\n\ + \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179168 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1176310 issn: 2220-4806 - month: May - pages: 329--330 - publisher: Louisiana State University - title: Snare Drum Motion Capture Dataset - url: http://www.nime.org/proceedings/2015/nime2015_148.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/148/0148-file1.mp4 - year: 2015 + pages: 446--449 + publisher: Aalborg University Copenhagen + title: Stride for Interactive Musical Instrument Design + url: http://www.nime.org/proceedings/2017/nime2017_paper0084.pdf + year: 2017 - ENTRYTYPE: inproceedings - ID: rbhandari2015 - abstract: 'Biofeedback tools generally use visualizations to display physiological - information to the user. As such, these tools are incompatible with visually demanding - tasks such as driving. While auditory or haptic biofeedback may be used in these - cases, the additional sensory channels can increase workload or act as a nuisance - to the user. A number of studies, however, have shown that music can improve mood - and concentration, while also reduce aggression and boredom. Here, we propose - an intervention that combines the benefits of biofeedback and music to help users - regulate their stress response while performing a visual task (driving a car simulator). - Our approach encourages slow breathing by adjusting the quality of the music in - response to the user''s breathing rate. We evaluate the intervention on a 2×2 - design with music and auditory biofeedback as independent variables. Our results - indicate that our music-biofeedback intervention leads to lower arousal (reduced - electrodermal activity and increased heart rate variability) than music alone, - auditory biofeedback alone and a control condition. ' - address: 'Baton Rouge, Louisiana, USA' - author: Rhushabh Bhandari and Avinash Parnandi and Eva Shipp and Beena Ahmed and - Ricardo Gutierrez-Osuna - bibtex: "@inproceedings{rbhandari2015,\n abstract = {Biofeedback tools generally\ - \ use visualizations to display physiological information to the user. As such,\ - \ these tools are incompatible with visually demanding tasks such as driving.\ - \ While auditory or haptic biofeedback may be used in these cases, the additional\ - \ sensory channels can increase workload or act as a nuisance to the user. A number\ - \ of studies, however, have shown that music can improve mood and concentration,\ - \ while also reduce aggression and boredom. Here, we propose an intervention that\ - \ combines the benefits of biofeedback and music to help users regulate their\ - \ stress response while performing a visual task (driving a car simulator). Our\ - \ approach encourages slow breathing by adjusting the quality of the music in\ - \ response to the user's breathing rate. We evaluate the intervention on a 2$\\\ - times$2 design with music and auditory biofeedback as independent variables. Our\ - \ results indicate that our music-biofeedback intervention leads to lower arousal\ - \ (reduced electrodermal activity and increased heart rate variability) than music\ - \ alone, auditory biofeedback alone and a control condition. },\n address = {Baton\ - \ Rouge, Louisiana, USA},\n author = {Rhushabh Bhandari and Avinash Parnandi and\ - \ Eva Shipp and Beena Ahmed and Ricardo Gutierrez-Osuna},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1179030},\n editor = {Edgar Berdahl and Jesse Allison},\n\ - \ issn = {2220-4806},\n month = {May},\n pages = {78--82},\n publisher = {Louisiana\ - \ State University},\n title = {Music-based respiratory biofeedback in visually-demanding\ - \ tasks},\n url = {http://www.nime.org/proceedings/2015/nime2015_149.pdf},\n urlsuppl1\ - \ = {http://www.nime.org/proceedings/2015/149/0149-file1.mp4},\n year = {2015}\n\ - }\n" + ID: jfernandez2017 + abstract: 'We present here GeKiPe, a gestural interface for musical expression, + combining images and sounds, generated and controlled in real time by a performer. + GeKiPe is developed as part of a creation project, exploring the control of virtual + instruments through the analysis of gestures specific to instrumentalists, and + to percussionists in particular. GeKiPe was used for the creation of a collaborative + stage performance (Sculpt), in which the musician and their movements are captured + by different methods (infrared Kinect cameras and gesture-sensors on controller + gloves). The use of GeKiPe as an alternate sound and image controller allowed + us to combine body movement, musical gestures and audiovisual expressions to create + challenging collaborative performances.' + address: 'Copenhagen, Denmark' + author: José Miguel Fernandez and Thomas Köppel and Nina Verstraete and Grégoire + Lorieux and Alexander Vert and Philippe Spiesser + bibtex: "@inproceedings{jfernandez2017,\n abstract = {We present here GeKiPe, a\ + \ gestural interface for musical expression, combining images and sounds, generated\ + \ and controlled in real time by a performer. GeKiPe is developed as part of a\ + \ creation project, exploring the control of virtual instruments through the analysis\ + \ of gestures specific to instrumentalists, and to percussionists in particular.\ + \ GeKiPe was used for the creation of a collaborative stage performance (Sculpt),\ + \ in which the musician and their movements are captured by different methods\ + \ (infrared Kinect cameras and gesture-sensors on controller gloves). The use\ + \ of GeKiPe as an alternate sound and image controller allowed us to combine body\ + \ movement, musical gestures and audiovisual expressions to create challenging\ + \ collaborative performances.},\n address = {Copenhagen, Denmark},\n author =\ + \ {José Miguel Fernandez and Thomas Köppel and Nina Verstraete and Grégoire Lorieux\ + \ and Alexander Vert and Philippe Spiesser},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1176312},\n issn = {2220-4806},\n pages = {450--455},\n publisher\ + \ = {Aalborg University Copenhagen},\n title = {GeKiPe, a gesture-based interface\ + \ for audiovisual performance},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0085.pdf},\n\ + \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179030 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1176312 issn: 2220-4806 - month: May - pages: 78--82 - publisher: Louisiana State University - title: Music-based respiratory biofeedback in visually-demanding tasks - url: http://www.nime.org/proceedings/2015/nime2015_149.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/149/0149-file1.mp4 - year: 2015 + pages: 450--455 + publisher: Aalborg University Copenhagen + title: 'GeKiPe, a gesture-based interface for audiovisual performance' + url: http://www.nime.org/proceedings/2017/nime2017_paper0085.pdf + year: 2017 - ENTRYTYPE: inproceedings - ID: mmyllykoski2015 - abstract: 'This paper discusses perspectives for conceptualizing and developing - hand-based wearable musical interface. Previous implementations of such interfaces - have not been targeted for music pedagogical use. We propose principles for pedagogically - oriented `musical hand'' and outline its development through the process of prototyping, - which involves a variety of methods. The current functional prototype, a touch-based - musical glove, is presented. ' - address: 'Baton Rouge, Louisiana, USA' - author: Mikko Myllykoski and Kai Tuuri and Esa Viirret and Jukka Louhivuori and - Antti Peltomaa and Janne Kekäläinen - bibtex: "@inproceedings{mmyllykoski2015,\n abstract = {This paper discusses perspectives\ - \ for conceptualizing and developing hand-based wearable musical interface. Previous\ - \ implementations of such interfaces have not been targeted for music pedagogical\ - \ use. We propose principles for pedagogically oriented `musical hand' and outline\ - \ its development through the process of prototyping, which involves a variety\ - \ of methods. The current functional prototype, a touch-based musical glove, is\ - \ presented. },\n address = {Baton Rouge, Louisiana, USA},\n author = {Mikko Myllykoski\ - \ and Kai Tuuri and Esa Viirret and Jukka Louhivuori and Antti Peltomaa and Janne\ - \ Kek\\''al\\''ainen},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179144},\n\ - \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ - \ {May},\n pages = {182--183},\n publisher = {Louisiana State University},\n title\ - \ = {Prototyping hand-based wearable music education technology},\n url = {http://www.nime.org/proceedings/2015/nime2015_151.pdf},\n\ - \ urlsuppl1 = {http://www.nime.org/proceedings/2015/151/0151-file1.m4v},\n year\ - \ = {2015}\n}\n" + ID: jlarsen2017a + abstract: 'Many musical instruments exhibit an inherent latency or delayed auditory + feedback (DAF) between actuator activation and the occurrence of sound. We investigated + how DAF (73ms and 250ms) affects musically trained (MT) and non-musically trained + (NMT) people''s ability to synchronize the audible strum of an actuated guitar + to a metronome at 60bpm and 120bpm. The long DAF matched a subdivision of the + overall tempo. We compared their performance using two different input devices + with feedback before or on activation. While 250ms DAF hardly affected musically + trained participants, non-musically trained participants'' performance declined + substantially both in mean synchronization error and its spread. Neither tempo + nor input devices affected performance.' + address: 'Copenhagen, Denmark' + author: Jeppe Larsen and Hendrik Knoche + bibtex: "@inproceedings{jlarsen2017a,\n abstract = {Many musical instruments exhibit\ + \ an inherent latency or delayed auditory feedback (DAF) between actuator activation\ + \ and the occurrence of sound. We investigated how DAF (73ms and 250ms) affects\ + \ musically trained (MT) and non-musically trained (NMT) people's ability to synchronize\ + \ the audible strum of an actuated guitar to a metronome at 60bpm and 120bpm.\ + \ The long DAF matched a subdivision of the overall tempo. We compared their performance\ + \ using two different input devices with feedback before or on activation. While\ + \ 250ms DAF hardly affected musically trained participants, non-musically trained\ + \ participants' performance declined substantially both in mean synchronization\ + \ error and its spread. Neither tempo nor input devices affected performance.},\n\ + \ address = {Copenhagen, Denmark},\n author = {Jeppe Larsen and Hendrik Knoche},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1176314},\n issn = {2220-4806},\n\ + \ pages = {456--459},\n publisher = {Aalborg University Copenhagen},\n title =\ + \ {Hear you later alligator: How delayed auditory feedback affects non-musically\ + \ trained people's strumming},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0086.pdf},\n\ + \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179144 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1176314 issn: 2220-4806 - month: May - pages: 182--183 - publisher: Louisiana State University - title: Prototyping hand-based wearable music education technology - url: http://www.nime.org/proceedings/2015/nime2015_151.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/151/0151-file1.m4v - year: 2015 + pages: 456--459 + publisher: Aalborg University Copenhagen + title: 'Hear you later alligator: How delayed auditory feedback affects non-musically + trained people''s strumming' + url: http://www.nime.org/proceedings/2017/nime2017_paper0086.pdf + year: 2017 - ENTRYTYPE: inproceedings - ID: jharrimanb2015 - abstract: 'The Feedback Lap Steel is an actuated instrument which makes use of mechanical - vibration of the instruments bridge to excite the strings. A custom bridge mounted - directly to a tactile transducer enables the strings to be driven with any audio - signal from a standard audio amplifier. The instrument can be played as a traditional - lap steel guitar without any changes to playing technique as well as be used to - create new sounds which blur the line between acoustic and electronic through - a combination of acoustic and computer generated and controlled sounds. This introduces - a new approach to string actuation using commonly available parts. This demonstration - paper details the construction, uses and lessons learned in the making of the - Feedback Lap Steel guitar.' - address: 'Baton Rouge, Louisiana, USA' - author: Jiffer Harriman - bibtex: "@inproceedings{jharrimanb2015,\n abstract = {The Feedback Lap Steel is\ - \ an actuated instrument which makes use of mechanical vibration of the instruments\ - \ bridge to excite the strings. A custom bridge mounted directly to a tactile\ - \ transducer enables the strings to be driven with any audio signal from a standard\ - \ audio amplifier. The instrument can be played as a traditional lap steel guitar\ - \ without any changes to playing technique as well as be used to create new sounds\ - \ which blur the line between acoustic and electronic through a combination of\ - \ acoustic and computer generated and controlled sounds. This introduces a new\ - \ approach to string actuation using commonly available parts. This demonstration\ - \ paper details the construction, uses and lessons learned in the making of the\ - \ Feedback Lap Steel guitar.},\n address = {Baton Rouge, Louisiana, USA},\n author\ - \ = {Jiffer Harriman},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179076},\n\ - \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ - \ {May},\n pages = {178--179},\n publisher = {Louisiana State University},\n title\ - \ = {Feedback Lapsteel : Exploring Tactile Transducers As String Actuators},\n\ - \ url = {http://www.nime.org/proceedings/2015/nime2015_152.pdf},\n urlsuppl1 =\ - \ {http://www.nime.org/proceedings/2015/152/0152-file1.mp4},\n year = {2015}\n\ + ID: mmulshine2017 + abstract: 'This paper introduces an audio synthesis library written in C with "object + oriented" programming principles in mind. We call it OOPS: Object-Oriented Programming + Sound, or, "Oops, it''s not quite Object-Oriented Programming in C". The library + consists of several UGens (audio components) and a framework to manage these components. + The design emphases of the library are efficiency and organizational simplicity, + with particular attention to the needs of embedded systems audio development. ' + address: 'Copenhagen, Denmark' + author: Michael Mulshine and Jeff Snyder + bibtex: "@inproceedings{mmulshine2017,\n abstract = {This paper introduces an audio\ + \ synthesis library written in C with \"object oriented\" programming principles\ + \ in mind. We call it OOPS: Object-Oriented Programming Sound, or, \"Oops, it's\ + \ not quite Object-Oriented Programming in C\". The library consists of several\ + \ UGens (audio components) and a framework to manage these components. The design\ + \ emphases of the library are efficiency and organizational simplicity, with particular\ + \ attention to the needs of embedded systems audio development. },\n address\ + \ = {Copenhagen, Denmark},\n author = {Michael Mulshine and Jeff Snyder},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1176316},\n issn = {2220-4806},\n pages\ + \ = {460--463},\n publisher = {Aalborg University Copenhagen},\n title = {OOPS:\ + \ An Audio Synthesis Library in C for Embedded (and Other) Applications},\n url\ + \ = {http://www.nime.org/proceedings/2017/nime2017_paper0087.pdf},\n year = {2017}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179076 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1176316 issn: 2220-4806 - month: May - pages: 178--179 - publisher: Louisiana State University - title: 'Feedback Lapsteel : Exploring Tactile Transducers As String Actuators' - url: http://www.nime.org/proceedings/2015/nime2015_152.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/152/0152-file1.mp4 - year: 2015 + pages: 460--463 + publisher: Aalborg University Copenhagen + title: 'OOPS: An Audio Synthesis Library in C for Embedded (and Other) Applications' + url: http://www.nime.org/proceedings/2017/nime2017_paper0087.pdf + year: 2017 - ENTRYTYPE: inproceedings - ID: rmichon2015 - abstract: 'This work presents a series of tools to turn Faust code into various - elements ranging from fully functional applications to multi-platform libraries - for real time audio signal processing on iOS and Android. Technical details about - their use and function are provided along with audio latency and performance comparisons, - and examples of applications.' - address: 'Baton Rouge, Louisiana, USA' - author: Romain Michon and Julius Orion Smith III and Yann Orlarey - bibtex: "@inproceedings{rmichon2015,\n abstract = {This work presents a series of\ - \ tools to turn Faust code into various elements ranging from fully functional\ - \ applications to multi-platform libraries for real time audio signal processing\ - \ on iOS and Android. Technical details about their use and function are provided\ - \ along with audio latency and performance comparisons, and examples of applications.},\n\ - \ address = {Baton Rouge, Louisiana, USA},\n author = {Romain Michon and {Julius\ - \ Orion} {Smith III} and Yann Orlarey},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179140},\n\ - \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ - \ {May},\n pages = {396--399},\n publisher = {Louisiana State University},\n title\ - \ = {MobileFaust: a Set of Tools to Make Musical Mobile Applications with the\ - \ Faust Programming Language},\n url = {http://www.nime.org/proceedings/2015/nime2015_153.pdf},\n\ - \ year = {2015}\n}\n" + ID: mkallionpaa2017 + abstract: "“Climb!” is a musical composition that combines the ideas\ + \ of a classical virtuoso piece and a computer game. We present a case study of\ + \ the composition process and realization of “Climb!”, written for\ + \ Disklavier and a digital interactive engine, which was co-developed together\ + \ with the musical score. Specifically, the engine combines a system for recognising\ + \ and responding to musical trigger phrases along with a dynamic digital score\ + \ renderer. This tool chain allows for the composer's original scoring to include\ + \ notational elements such as trigger phrases to be automatically extracted to\ + \ auto-configure the engine for live performance. We reflect holistically on the\ + \ development process to date and highlight the emerging challenges and opportunities.\ + \ For example, this includes the potential for further developing the workflow\ + \ around the scoring process and the ways in which support for musical triggers\ + \ has shaped the compositional approach." + address: 'Copenhagen, Denmark' + author: Maria Kallionpää and Chris Greenhalgh and Adrian Hazzard and David M. Weigl + and Kevin R. Page and Steve Benford + bibtex: "@inproceedings{mkallionpaa2017,\n abstract = {“Climb!” is a\ + \ musical composition that combines the ideas of a classical virtuoso piece and\ + \ a computer game. We present a case study of the composition process and realization\ + \ of “Climb!”, written for Disklavier and a digital interactive engine,\ + \ which was co-developed together with the musical score. Specifically, the engine\ + \ combines a system for recognising and responding to musical trigger phrases\ + \ along with a dynamic digital score renderer. This tool chain allows for the\ + \ composer's original scoring to include notational elements such as trigger phrases\ + \ to be automatically extracted to auto-configure the engine for live performance.\ + \ We reflect holistically on the development process to date and highlight the\ + \ emerging challenges and opportunities. For example, this includes the potential\ + \ for further developing the workflow around the scoring process and the ways\ + \ in which support for musical triggers has shaped the compositional approach.},\n\ + \ address = {Copenhagen, Denmark},\n author = {Maria Kallionpää and Chris Greenhalgh\ + \ and Adrian Hazzard and David M. Weigl and Kevin R. Page and Steve Benford},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1176318},\n issn = {2220-4806},\n\ + \ pages = {464--469},\n publisher = {Aalborg University Copenhagen},\n title =\ + \ {Composing and Realising a Game-like Performance for Disklavier and Electronics},\n\ + \ url = {http://www.nime.org/proceedings/2017/nime2017_paper0088.pdf},\n year\ + \ = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179140 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1176318 issn: 2220-4806 - month: May - pages: 396--399 - publisher: Louisiana State University - title: 'MobileFaust: a Set of Tools to Make Musical Mobile Applications with the - Faust Programming Language' - url: http://www.nime.org/proceedings/2015/nime2015_153.pdf - year: 2015 + pages: 464--469 + publisher: Aalborg University Copenhagen + title: Composing and Realising a Game-like Performance for Disklavier and Electronics + url: http://www.nime.org/proceedings/2017/nime2017_paper0088.pdf + year: 2017 - ENTRYTYPE: inproceedings - ID: amercertaylor2015 - abstract: 'On a traditional keyboard, the actions required to play a consonant chord - progression must be quite precise; accidentally strike a neighboring key, and - a pleasant sonority is likely to become a jarring one. Inspired by the Tonnetz - (a tonal diagram), we present a new layout of pitches defined using low-level - harmonic notions. We demonstrate the potential of our system by mapping the random - movements of fish in an aquarium to this layout. Qualitatively, we find that this - captures the intuition behind mapping motion to sound. Similarly moving fish produce - consonant chords, while fish moving in non-unison produce dissonant chords. We - introduce an open source MATLAB library implementing these techniques, which can - be used for sonifying multimodal streaming data. ' - address: 'Baton Rouge, Louisiana, USA' - author: Andrew Mercer-Taylor and Jaan Altosaar - bibtex: "@inproceedings{amercertaylor2015,\n abstract = {On a traditional keyboard,\ - \ the actions required to play a consonant chord progression must be quite precise;\ - \ accidentally strike a neighboring key, and a pleasant sonority is likely to\ - \ become a jarring one. Inspired by the Tonnetz (a tonal diagram), we present\ - \ a new layout of pitches defined using low-level harmonic notions. We demonstrate\ - \ the potential of our system by mapping the random movements of fish in an aquarium\ - \ to this layout. Qualitatively, we find that this captures the intuition behind\ - \ mapping motion to sound. Similarly moving fish produce consonant chords, while\ - \ fish moving in non-unison produce dissonant chords. We introduce an open source\ - \ MATLAB library implementing these techniques, which can be used for sonifying\ - \ multimodal streaming data. },\n address = {Baton Rouge, Louisiana, USA},\n author\ - \ = {Andrew Mercer-Taylor and Jaan Altosaar},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1179138},\n editor = {Edgar Berdahl and Jesse Allison},\n issn\ - \ = {2220-4806},\n month = {May},\n pages = {28--29},\n publisher = {Louisiana\ - \ State University},\n title = {Sonification of Fish Movement Using Pitch Mesh\ - \ Pairs},\n url = {http://www.nime.org/proceedings/2015/nime2015_155.pdf},\n urlsuppl1\ - \ = {http://www.nime.org/proceedings/2015/155/0155-file1.mp4},\n year = {2015}\n\ - }\n" + ID: tmagnusson2017 + abstract: 'New digital musical instruments are difficult for organologists to deal + with, due to their heterogeneous origins, interdisciplinary science, and fluid, + open-ended nature. NIMEs are studied from a range of disciplines, such as musicology, + engineering, human-computer interaction, psychology, design, and performance studies. + Attempts to continue traditional organology classifications for electronic and + digital instruments have been made, but with unsatisfactory results. This paper + raises the problem of tree-like classifications of digital instruments, proposing + an alternative approach: musical organics . Musical organics is a philosophical + attempt to tackle the problems inherent in the organological classification of + digital instruments. Shifting the emphasis from hand-coded classification to information + retrieval supported search and clustering, an open and distributed system that + anyone can contribute to is proposed. In order to show how such a system could + incorporate third-party additions, the paper also presents an organological ontogenesis + of three innovative musical instruments: the saxophone, the Minimoog, and the + Reactable. This micro-analysis of innovation in the field of musical instruments + can help forming a framework for the study of how instruments are adopted in musical + culture.' + address: 'Copenhagen, Denmark' + author: Thor Magnusson + bibtex: "@inproceedings{tmagnusson2017,\n abstract = {New digital musical instruments\ + \ are difficult for organologists to deal with, due to their heterogeneous origins,\ + \ interdisciplinary science, and fluid, open-ended nature. NIMEs are studied from\ + \ a range of disciplines, such as musicology, engineering, human-computer interaction,\ + \ psychology, design, and performance studies. Attempts to continue traditional\ + \ organology classifications for electronic and digital instruments have been\ + \ made, but with unsatisfactory results. This paper raises the problem of tree-like\ + \ classifications of digital instruments, proposing an alternative approach: musical\ + \ organics . Musical organics is a philosophical attempt to tackle the problems\ + \ inherent in the organological classification of digital instruments. Shifting\ + \ the emphasis from hand-coded classification to information retrieval supported\ + \ search and clustering, an open and distributed system that anyone can contribute\ + \ to is proposed. In order to show how such a system could incorporate third-party\ + \ additions, the paper also presents an organological ontogenesis of three innovative\ + \ musical instruments: the saxophone, the Minimoog, and the Reactable. This micro-analysis\ + \ of innovation in the field of musical instruments can help forming a framework\ + \ for the study of how instruments are adopted in musical culture.},\n address\ + \ = {Copenhagen, Denmark},\n author = {Thor Magnusson},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176320},\n issn = {2220-4806},\n pages = {470--475},\n\ + \ publisher = {Aalborg University Copenhagen},\n title = {Contextualizing Musical\ + \ Organics: An Ad-hoc Organological Classification Approach},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0089.pdf},\n\ + \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179138 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1176320 issn: 2220-4806 - month: May - pages: 28--29 - publisher: Louisiana State University - title: Sonification of Fish Movement Using Pitch Mesh Pairs - url: http://www.nime.org/proceedings/2015/nime2015_155.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/155/0155-file1.mp4 - year: 2015 + pages: 470--475 + publisher: Aalborg University Copenhagen + title: 'Contextualizing Musical Organics: An Ad-hoc Organological Classification + Approach' + url: http://www.nime.org/proceedings/2017/nime2017_paper0089.pdf + year: 2017 - ENTRYTYPE: inproceedings - ID: klin2015 - abstract: 'Many new melodic instruments use a touch sensitive surface with notes - arranged in a two-dimensional grid. Most of these arrange notes in chromatic half-steps - along the horizontal axis and in intervals of fourths along the vertical axis. - Although many alternatives exist, this arrangement, which resembles that of a - bass guitar, is quickly becoming the de facto standard. In this study we present - experimental evidence that grid based instruments are significantly easier to - play when we tune adjacent rows in Major thirds rather than fourths. We have developed - a grid-based instrument as an iPad app that has sold 8,000 units since 2012. To - test our proposed alternative tuning, we taught a group twenty new users to play - basic chords on our app, using both the standard tuning and our proposed alternative. - Our results show that the Major thirds tuning is much easier to learn, even for - users that have previous experience playing guitar.' - address: 'Baton Rouge, Louisiana, USA' - author: Hans Anderson and Kin Wah Edward Lin and Natalie Agus and Simon Lui - bibtex: "@inproceedings{klin2015,\n abstract = {Many new melodic instruments use\ - \ a touch sensitive surface with notes arranged in a two-dimensional grid. Most\ - \ of these arrange notes in chromatic half-steps along the horizontal axis and\ - \ in intervals of fourths along the vertical axis. Although many alternatives\ - \ exist, this arrangement, which resembles that of a bass guitar, is quickly becoming\ - \ the de facto standard. In this study we present experimental evidence that grid\ - \ based instruments are significantly easier to play when we tune adjacent rows\ - \ in Major thirds rather than fourths. We have developed a grid-based instrument\ - \ as an iPad app that has sold 8,000 units since 2012. To test our proposed alternative\ - \ tuning, we taught a group twenty new users to play basic chords on our app,\ - \ using both the standard tuning and our proposed alternative. Our results show\ - \ that the Major thirds tuning is much easier to learn, even for users that have\ - \ previous experience playing guitar.},\n address = {Baton Rouge, Louisiana, USA},\n\ - \ author = {Hans Anderson and Kin Wah Edward Lin and Natalie Agus and Simon Lui},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1179006},\n editor = {Edgar Berdahl\ - \ and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {365--368},\n\ - \ publisher = {Louisiana State University},\n title = {Major Thirds: A Better\ - \ Way to Tune Your iPad},\n url = {http://www.nime.org/proceedings/2015/nime2015_157.pdf},\n\ - \ year = {2015}\n}\n" + ID: sfasciani2017 + abstract: 'We propose an approach to insert physical objects in audio digital signal + processing chains, filtering the sound with the acoustic impulse response of any + solid measured in real-time. We model physical objects as a linear time-invariant + system, which is used as an audio filter. By interacting with the object or with + the measuring hardware we can dynamically modify the characteristics of the filter. + The impulse response is obtained correlating a noise signal injected in the object + through an acoustic actuator with the signal received from an acoustic sensor + placed on the object. We also present an efficient multichannel implementation + of the system, which enables further creative applications beyond audio filtering, + including tangible signal patching and sound spatialization.' + address: 'Copenhagen, Denmark' + author: Stefano Fasciani + bibtex: "@inproceedings{sfasciani2017,\n abstract = {We propose an approach to insert\ + \ physical objects in audio digital signal processing chains, filtering the sound\ + \ with the acoustic impulse response of any solid measured in real-time. We model\ + \ physical objects as a linear time-invariant system, which is used as an audio\ + \ filter. By interacting with the object or with the measuring hardware we can\ + \ dynamically modify the characteristics of the filter. The impulse response is\ + \ obtained correlating a noise signal injected in the object through an acoustic\ + \ actuator with the signal received from an acoustic sensor placed on the object.\ + \ We also present an efficient multichannel implementation of the system, which\ + \ enables further creative applications beyond audio filtering, including tangible\ + \ signal patching and sound spatialization.},\n address = {Copenhagen, Denmark},\n\ + \ author = {Stefano Fasciani},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176322},\n\ + \ issn = {2220-4806},\n pages = {476--480},\n publisher = {Aalborg University\ + \ Copenhagen},\n title = {Physical Audio Digital Filters},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0090.pdf},\n\ + \ year = {2017}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1176322 + issn: 2220-4806 + pages: 476--480 + publisher: Aalborg University Copenhagen + title: Physical Audio Digital Filters + url: http://www.nime.org/proceedings/2017/nime2017_paper0090.pdf + year: 2017 + + +- ENTRYTYPE: inproceedings + ID: btaylor2017 + abstract: 'Distributed music as a performance practice has seen significant growth + over the past decade. This paper surveys the development of the genre, documenting + important precedents, peripheral influences, and core works. We additionally discuss + common modes of implementation in the genre and contrast these approaches and + their motivations.' + address: 'Copenhagen, Denmark' + author: Benjamin Taylor + bibtex: "@inproceedings{btaylor2017,\n abstract = {Distributed music as a performance\ + \ practice has seen significant growth over the past decade. This paper surveys\ + \ the development of the genre, documenting important precedents, peripheral influences,\ + \ and core works. We additionally discuss common modes of implementation in the\ + \ genre and contrast these approaches and their motivations.},\n address = {Copenhagen,\ + \ Denmark},\n author = {Benjamin Taylor},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176324},\n\ + \ issn = {2220-4806},\n pages = {481--486},\n publisher = {Aalborg University\ + \ Copenhagen},\n title = {A History of the Audience as a Speaker Array},\n url\ + \ = {http://www.nime.org/proceedings/2017/nime2017_paper0091.pdf},\n year = {2017}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179006 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1176324 issn: 2220-4806 - month: May - pages: 365--368 - publisher: Louisiana State University - title: 'Major Thirds: A Better Way to Tune Your iPad' - url: http://www.nime.org/proceedings/2015/nime2015_157.pdf - year: 2015 + pages: 481--486 + publisher: Aalborg University Copenhagen + title: A History of the Audience as a Speaker Array + url: http://www.nime.org/proceedings/2017/nime2017_paper0091.pdf + year: 2017 - ENTRYTYPE: inproceedings - ID: jaltosaar2015 - abstract: 'Much of the challenge and appeal in remixing music comes from manipulating - samples. Typically, identifying distinct samples of a song requires expertise - in music production software. Additionally, it is difficult to visualize similarities - and differences between all samples of a song simultaneously and use this to select - samples. MusicMapper is a web application that allows nonexpert users to find - and visualize distinctive samples from a song without any manual intervention, - and enables remixing by having users play back clusterings of such samples. This - is accomplished by splitting audio from the Soundcloud API into appropriately-sized - spectrograms, and applying the t-SNE algorithm to visualize these spectrograms - in two dimensions. Next, we apply k-means to guide the user''s eye toward related - clusters and set $k=26$ to enable playback of the clusters by pressing keys on - an ordinary keyboard. We present the source code (https://github.com/fatsmcgee/MusicMappr) - and a demo video (http://youtu.be/mvD6e1uiO8k) of the MusicMapper web application - that can be run in most modern browsers.' - address: 'Baton Rouge, Louisiana, USA' - author: Ethan Benjamin and Jaan Altosaar - bibtex: "@inproceedings{jaltosaar2015,\n abstract = {Much of the challenge and appeal\ - \ in remixing music comes from manipulating samples. Typically, identifying distinct\ - \ samples of a song requires expertise in music production software. Additionally,\ - \ it is difficult to visualize similarities and differences between all samples\ - \ of a song simultaneously and use this to select samples. MusicMapper is a web\ - \ application that allows nonexpert users to find and visualize distinctive samples\ - \ from a song without any manual intervention, and enables remixing by having\ - \ users play back clusterings of such samples. This is accomplished by splitting\ - \ audio from the Soundcloud API into appropriately-sized spectrograms, and applying\ - \ the t-SNE algorithm to visualize these spectrograms in two dimensions. Next,\ - \ we apply k-means to guide the user's eye toward related clusters and set $k=26$\ - \ to enable playback of the clusters by pressing keys on an ordinary keyboard.\ - \ We present the source code (https://github.com/fatsmcgee/MusicMappr) and a demo\ - \ video (http://youtu.be/mvD6e1uiO8k) of the MusicMapper web application that\ - \ can be run in most modern browsers.},\n address = {Baton Rouge, Louisiana, USA},\n\ - \ author = {Ethan Benjamin and Jaan Altosaar},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1179018},\n editor = {Edgar Berdahl and Jesse Allison},\n issn\ - \ = {2220-4806},\n month = {May},\n pages = {325--326},\n publisher = {Louisiana\ - \ State University},\n title = {MusicMapper: Interactive {2D} representations\ - \ of music samples for in-browser remixing and exploration},\n url = {http://www.nime.org/proceedings/2015/nime2015_161.pdf},\n\ - \ urlsuppl1 = {http://www.nime.org/proceedings/2015/161/0161-file1.mp4},\n year\ - \ = {2015}\n}\n" + ID: togata2017 + abstract: 'This paper is about a novel robotic guitar that establishes shared control + between human performers and mechanical actuators. Unlike other mechatronic guitar + instruments that perform pre-programmed music automatically, this guitar allows + the human and actuators to produce sounds jointly; there exists a distributed + control between the human and robotic components. The interaction allows human + performers to have full control over the melodic, harmonic, and expressive elements + of the instrument while mechanical actuators excite and dampen the string with + a rhythmic pattern. Guitarists can still access the fretboard without the physical + interference of a mechatronic system, so they can play melodies and chords as + well as perform bends, slides, vibrato, and other expressive techniques. Leveraging + the capabilities of mechanical actuators, the mechanized hammers can output complex + rhythms and speeds not attainable by humans. Furthermore, the rhythmic patterns + can be algorithmically or stochastically generated by the hammer, which supports + real-time interactive improvising.' + address: 'Copenhagen, Denmark' + author: Takumi Ogata and Gil Weinberg + bibtex: "@inproceedings{togata2017,\n abstract = {This paper is about a novel robotic\ + \ guitar that establishes shared control between human performers and mechanical\ + \ actuators. Unlike other mechatronic guitar instruments that perform pre-programmed\ + \ music automatically, this guitar allows the human and actuators to produce sounds\ + \ jointly; there exists a distributed control between the human and robotic components.\ + \ The interaction allows human performers to have full control over the melodic,\ + \ harmonic, and expressive elements of the instrument while mechanical actuators\ + \ excite and dampen the string with a rhythmic pattern. Guitarists can still\ + \ access the fretboard without the physical interference of a mechatronic system,\ + \ so they can play melodies and chords as well as perform bends, slides, vibrato,\ + \ and other expressive techniques. Leveraging the capabilities of mechanical actuators,\ + \ the mechanized hammers can output complex rhythms and speeds not attainable\ + \ by humans. Furthermore, the rhythmic patterns can be algorithmically or stochastically\ + \ generated by the hammer, which supports real-time interactive improvising.},\n\ + \ address = {Copenhagen, Denmark},\n author = {Takumi Ogata and Gil Weinberg},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1176326},\n issn = {2220-4806},\n\ + \ pages = {487--488},\n publisher = {Aalborg University Copenhagen},\n title =\ + \ {Robotically Augmented Electric Guitar for Shared Control},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0092.pdf},\n\ + \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179018 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1176326 issn: 2220-4806 - month: May - pages: 325--326 - publisher: Louisiana State University - title: 'MusicMapper: Interactive 2D representations of music samples for in-browser - remixing and exploration' - url: http://www.nime.org/proceedings/2015/nime2015_161.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/161/0161-file1.mp4 - year: 2015 + pages: 487--488 + publisher: Aalborg University Copenhagen + title: Robotically Augmented Electric Guitar for Shared Control + url: http://www.nime.org/proceedings/2017/nime2017_paper0092.pdf + year: 2017 - ENTRYTYPE: inproceedings - ID: jjaimovich2015 - abstract: 'Previously the design of algorithms and parameter calibration for biosignal - music performances has been based on testing with a small number of individuals - --- in fact usually the performer themselves. This paper uses the data collected - from over 4000 people to begin to create a truly robust set of algorithms for - heart rate and electrodermal activity measures, as well as the understanding of - how the calibration of these vary by individual.' - address: 'Baton Rouge, Louisiana, USA' - author: Javier Jaimovich and R. Benjamin Knapp - bibtex: "@inproceedings{jjaimovich2015,\n abstract = {Previously the design of algorithms\ - \ and parameter calibration for biosignal music performances has been based on\ - \ testing with a small number of individuals --- in fact usually the performer\ - \ themselves. This paper uses the data collected from over 4000 people to begin\ - \ to create a truly robust set of algorithms for heart rate and electrodermal\ - \ activity measures, as well as the understanding of how the calibration of these\ - \ vary by individual.},\n address = {Baton Rouge, Louisiana, USA},\n author =\ - \ {Javier Jaimovich and {R. Benjamin} Knapp},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1179096},\n editor = {Edgar Berdahl and Jesse Allison},\n issn\ - \ = {2220-4806},\n month = {May},\n pages = {1--4},\n publisher = {Louisiana State\ - \ University},\n title = {Creating Biosignal Algorithms for Musical Applications\ - \ from an Extensive Physiological Database},\n url = {http://www.nime.org/proceedings/2015/nime2015_163.pdf},\n\ - \ year = {2015}\n}\n" + ID: bneill2017 + abstract: 'Ben Neill will demonstrate the mutantrumpet, a hybrid electro-acoustic + instrument. The capabilities of the mutantrumpet are designed to erase the boundaries + between acoustic and electronic musical creation and performance. It is both an + expanded acoustic instrument and an electronic controller capable of interacting + with audio and video simultaneously. The demonstration will explore the multi-faceted + possibilities that are offered by the mutantrumpet in several brief, wide ranging + musical examples composed and improvised by Neill. Interactive video performance + techniques and collaborations will be integrated into the excerpts. The aesthetics + of live intermedia performance will be discussed along with a technical overview + of the interface and associated software applications Junxion and RoSa from STEIM, + Amsterdam. Reflections on the development of a virtuosic performance technique + with a hybrid instrument and influences from collaborators Robert Moog, David + Behrman, Ralph Abraham, DJ Spooky and others will be included in the presentation.' + address: 'Copenhagen, Denmark' + author: Ben Neill + bibtex: "@inproceedings{bneill2017,\n abstract = {Ben Neill will demonstrate the\ + \ mutantrumpet, a hybrid electro-acoustic instrument. The capabilities of the\ + \ mutantrumpet are designed to erase the boundaries between acoustic and electronic\ + \ musical creation and performance. It is both an expanded acoustic instrument\ + \ and an electronic controller capable of interacting with audio and video simultaneously.\ + \ The demonstration will explore the multi-faceted possibilities that are offered\ + \ by the mutantrumpet in several brief, wide ranging musical examples composed\ + \ and improvised by Neill. Interactive video performance techniques and collaborations\ + \ will be integrated into the excerpts. The aesthetics of live intermedia performance\ + \ will be discussed along with a technical overview of the interface and associated\ + \ software applications Junxion and RoSa from STEIM, Amsterdam. Reflections on\ + \ the development of a virtuosic performance technique with a hybrid instrument\ + \ and influences from collaborators Robert Moog, David Behrman, Ralph Abraham,\ + \ DJ Spooky and others will be included in the presentation.},\n address = {Copenhagen,\ + \ Denmark},\n author = {Ben Neill},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176328},\n\ + \ issn = {2220-4806},\n pages = {489--490},\n publisher = {Aalborg University\ + \ Copenhagen},\n title = {The Mutantrumpet},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0093.pdf},\n\ + \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179096 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1176328 issn: 2220-4806 - month: May - pages: 1--4 - publisher: Louisiana State University - title: Creating Biosignal Algorithms for Musical Applications from an Extensive - Physiological Database - url: http://www.nime.org/proceedings/2015/nime2015_163.pdf - year: 2015 + pages: 489--490 + publisher: Aalborg University Copenhagen + title: The Mutantrumpet + url: http://www.nime.org/proceedings/2017/nime2017_paper0093.pdf + year: 2017 - ENTRYTYPE: inproceedings - ID: bknichel2015 - abstract: 'Resonate was a musical installation created with focus on interactivity - and collaboration. In this paper we will focus on the design-process and the different - steps involved. We describe and discuss the methods to create, synchronize and - combine the aspects of space, object, music and interaction for the development - of resonate. The realized space-filling tangible installation allowed visitors - to interact with different interaction objects and change therefore the musical - expression as well as the visual response and aesthetic. After a non-formal quality - evaluation of this installation we changed some aspects which resulted in a more - refined version which we will also discuss here. ' - address: 'Baton Rouge, Louisiana, USA' - author: Benjamin Knichel and Holger Reckter and Peter Kiefer - bibtex: "@inproceedings{bknichel2015,\n abstract = {Resonate was a musical installation\ - \ created with focus on interactivity and collaboration. In this paper we will\ - \ focus on the design-process and the different steps involved. We describe and\ - \ discuss the methods to create, synchronize and combine the aspects of space,\ - \ object, music and interaction for the development of resonate. The realized\ - \ space-filling tangible installation allowed visitors to interact with different\ - \ interaction objects and change therefore the musical expression as well as the\ - \ visual response and aesthetic. After a non-formal quality evaluation of this\ - \ installation we changed some aspects which resulted in a more refined version\ - \ which we will also discuss here. },\n address = {Baton Rouge, Louisiana, USA},\n\ - \ author = {Benjamin Knichel and Holger Reckter and Peter Kiefer},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1179108},\n editor = {Edgar Berdahl and\ - \ Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {111--115},\n\ - \ publisher = {Louisiana State University},\n title = {resonate -- a social musical\ - \ installation which integrates tangible multiuser interaction},\n url = {http://www.nime.org/proceedings/2015/nime2015_164.pdf},\n\ - \ urlsuppl1 = {http://www.nime.org/proceedings/2015/164/0164-file1.mp4},\n year\ - \ = {2015}\n}\n" + ID: ssmallwood2017 + abstract: 'This paper/poster describes the development of an experimental listening + game called Locus Sono; a 3D audio puzzle game where listening and exploration + are the key forms of interaction. The game was developed by a motivation to create + an interactive audio environment in which sound is the key to solving in-game + puzzles. This work is a prototype for a larger planned work and illustrates a + first step in a more complex audio gaming scenario, which will also be partially + described in this short paper' + address: 'Copenhagen, Denmark' + author: Scott Smallwood + bibtex: "@inproceedings{ssmallwood2017,\n abstract = {This paper/poster describes\ + \ the development of an experimental listening game called Locus Sono; a 3D audio\ + \ puzzle game where listening and exploration are the key forms of interaction.\ + \ The game was developed by a motivation to create an interactive audio environment\ + \ in which sound is the key to solving in-game puzzles. This work is a prototype\ + \ for a larger planned work and illustrates a first step in a more complex audio\ + \ gaming scenario, which will also be partially described in this short paper},\n\ + \ address = {Copenhagen, Denmark},\n author = {Scott Smallwood},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176330},\n issn = {2220-4806},\n pages = {491--492},\n\ + \ publisher = {Aalborg University Copenhagen},\n title = {Locus Sono: A Listening\ + \ Game for NIME},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0094.pdf},\n\ + \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179108 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1176330 issn: 2220-4806 - month: May - pages: 111--115 - publisher: Louisiana State University - title: resonate -- a social musical installation which integrates tangible multiuser - interaction - url: http://www.nime.org/proceedings/2015/nime2015_164.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/164/0164-file1.mp4 - year: 2015 + pages: 491--492 + publisher: Aalborg University Copenhagen + title: 'Locus Sono: A Listening Game for NIME' + url: http://www.nime.org/proceedings/2017/nime2017_paper0094.pdf + year: 2017 - ENTRYTYPE: inproceedings - ID: gdublon2015 - abstract: 'The Electronic Fox Ears helmet is a listening device that changes its - wearer''s experience of hearing. A pair of head-mounted, independently articulated - parabolic microphones and built-in bone conduction transducers allow the wearer - to sharply direct their attention to faraway sound sources. Joysticks in each - hand control the orientations of the microphones, which are mounted on servo gimbals - for precise targeting. Paired with a mobile device, the helmet can function as - a specialized, wearable field recording platform. Field recording and ambient - sound have long been a part of electronic music; our device extends these practices - by drawing on a tradition of wearable technologies and prosthetic art that blur - the boundaries of human perception. ' - address: 'Baton Rouge, Louisiana, USA' - author: Rebecca Kleinberger and Gershon Dublon and Joseph A. Paradiso and Tod Machover - bibtex: "@inproceedings{gdublon2015,\n abstract = {The Electronic Fox Ears helmet\ - \ is a listening device that changes its wearer's experience of hearing. A pair\ - \ of head-mounted, independently articulated parabolic microphones and built-in\ - \ bone conduction transducers allow the wearer to sharply direct their attention\ - \ to faraway sound sources. Joysticks in each hand control the orientations of\ - \ the microphones, which are mounted on servo gimbals for precise targeting. Paired\ - \ with a mobile device, the helmet can function as a specialized, wearable field\ - \ recording platform. Field recording and ambient sound have long been a part\ - \ of electronic music; our device extends these practices by drawing on a tradition\ - \ of wearable technologies and prosthetic art that blur the boundaries of human\ - \ perception. },\n address = {Baton Rouge, Louisiana, USA},\n author = {Rebecca\ - \ Kleinberger and Gershon Dublon and {Joseph A.} Paradiso and Tod Machover},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1179106},\n editor = {Edgar Berdahl\ - \ and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {30--31},\n\ - \ publisher = {Louisiana State University},\n title = {PHOX Ears: A Parabolic,\ - \ Head-mounted, Orientable, eXtrasensory Listening Device},\n url = {http://www.nime.org/proceedings/2015/nime2015_165.pdf},\n\ - \ urlsuppl1 = {http://www.nime.org/proceedings/2015/165/0165-file1.mp4},\n year\ - \ = {2015}\n}\n" + ID: rpolfreman2017 + abstract: "2017 marks the 40th anniversary of the Rubik's Cube (under its original\ + \ name the Magic Cube). This paper-demonstration describes explorations of the\ + \ cube as a performance controller for music. The pattern of colors on a face\ + \ of the cube is detected via USB video camera and supplemented by EMG data from\ + \ the performer to model the performer's interaction with the cube. This system\ + \ was trialed in a variety of audio scenarios and deployed in the composition\ + \ “Rubik's Study No. 1”, a work based on solving the cube with audible\ + \ connections to 1980's pop culture. The cube was found to be an engaging musical\ + \ controller, with further potential to be explored." + address: 'Copenhagen, Denmark' + author: Richard Polfreman and Benjamin Oliver + bibtex: "@inproceedings{rpolfreman2017,\n abstract = {2017 marks the 40th anniversary\ + \ of the Rubik's Cube (under its original name the Magic Cube). This paper-demonstration\ + \ describes explorations of the cube as a performance controller for music. The\ + \ pattern of colors on a face of the cube is detected via USB video camera and\ + \ supplemented by EMG data from the performer to model the performer's interaction\ + \ with the cube. This system was trialed in a variety of audio scenarios and deployed\ + \ in the composition “Rubik's Study No. 1”, a work based on solving\ + \ the cube with audible connections to 1980's pop culture. The cube was found\ + \ to be an engaging musical controller, with further potential to be explored.},\n\ + \ address = {Copenhagen, Denmark},\n author = {Richard Polfreman and Benjamin\ + \ Oliver},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1176332},\n issn = {2220-4806},\n\ + \ pages = {493--494},\n publisher = {Aalborg University Copenhagen},\n title =\ + \ {Rubik's Cube, Music's Cube},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0095.pdf},\n\ + \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179106 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1176332 issn: 2220-4806 - month: May - pages: 30--31 - publisher: Louisiana State University - title: 'PHOX Ears: A Parabolic, Head-mounted, Orientable, eXtrasensory Listening - Device' - url: http://www.nime.org/proceedings/2015/nime2015_165.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/165/0165-file1.mp4 - year: 2015 + pages: 493--494 + publisher: Aalborg University Copenhagen + title: 'Rubik''s Cube, Music''s Cube' + url: http://www.nime.org/proceedings/2017/nime2017_paper0095.pdf + year: 2017 - ENTRYTYPE: inproceedings - ID: pdahlstedt2015 - abstract: 'Based on a combination of novel mapping techniques and carefully designed - sound engines, I present an augmented hybrid piano specifically designed for improvisation. - The mapping technique, originally developed for other control interfaces but here - adapted to the piano keyboard, is based on a dynamic vectorization of control - parameters, allowing both wild sonic exploration and minute intimate expression. - The original piano sound is used as the sole sound source, subjected to processing - techniques such as virtual resonance strings, dynamic buffer shuffling, and acoustic - and virtual feedback. Thanks to speaker and microphone placement, the acoustic - and processed sounds interact in both directions and blend into one new instrument. - This also allows for unorthodox playing (knocking, plucking, shouting). Processing - parameters are controlled from the keyboard playing alone, allowing intuitive - control of complex processing by ear, integrating expressive musical playing with - sonic exploration. The instrument is not random, but somewhat unpredictable. This - feeds into the improvisation, defining a particular idiomatics of the instruments. - Hence, the instrument itself is an essential part of the musical work. Performances - include concerts in UK, Japan, Singapore, Australia and Sweden, in solos and ensembles, - performed by several pianists. Variations of this hybrid instrument for digital - keyboards are also presented.' - address: 'Baton Rouge, Louisiana, USA' - author: Palle Dahlstedt - bibtex: "@inproceedings{pdahlstedt2015,\n abstract = {Based on a combination of\ - \ novel mapping techniques and carefully designed sound engines, I present an\ - \ augmented hybrid piano specifically designed for improvisation. The mapping\ - \ technique, originally developed for other control interfaces but here adapted\ - \ to the piano keyboard, is based on a dynamic vectorization of control parameters,\ - \ allowing both wild sonic exploration and minute intimate expression. The original\ - \ piano sound is used as the sole sound source, subjected to processing techniques\ - \ such as virtual resonance strings, dynamic buffer shuffling, and acoustic and\ - \ virtual feedback. Thanks to speaker and microphone placement, the acoustic and\ - \ processed sounds interact in both directions and blend into one new instrument.\ - \ This also allows for unorthodox playing (knocking, plucking, shouting). Processing\ - \ parameters are controlled from the keyboard playing alone, allowing intuitive\ - \ control of complex processing by ear, integrating expressive musical playing\ - \ with sonic exploration. The instrument is not random, but somewhat unpredictable.\ - \ This feeds into the improvisation, defining a particular idiomatics of the instruments.\ - \ Hence, the instrument itself is an essential part of the musical work. Performances\ - \ include concerts in UK, Japan, Singapore, Australia and Sweden, in solos and\ - \ ensembles, performed by several pianists. Variations of this hybrid instrument\ - \ for digital keyboards are also presented.},\n address = {Baton Rouge, Louisiana,\ - \ USA},\n author = {Palle Dahlstedt},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179046},\n\ - \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ - \ {May},\n pages = {271--276},\n publisher = {Louisiana State University},\n title\ - \ = {Mapping Strategies and Sound Engine Design for an Augmented Hybrid Piano},\n\ - \ url = {http://www.nime.org/proceedings/2015/nime2015_170.pdf},\n urlsuppl1 =\ - \ {http://www.nime.org/proceedings/2015/170/0170-file1.zip},\n year = {2015}\n\ - }\n" + ID: cmartin2017 + abstract: 'MicroJam is a mobile app for sharing tiny touch-screen performances. + Mobile applications that streamline creativity and social interaction have enabled + a very broad audience to develop their own creative practices. While these apps + have been very successful in visual arts (particularly photography), the idea + of social music-making has not had such a broad impact. MicroJam includes several + novel performance concepts intended to engage the casual music maker and inspired + by current trends in social creativity support tools. Touch-screen performances + are limited to five seconds, instrument settings are posed as sonic ``filters'''', + and past performances are arranged as a timeline with replies and layers. These + features of MicroJam encourage users not only to perform music more frequently, + but to engage with others in impromptu ensemble music making.' + address: 'Copenhagen, Denmark' + author: Charles Martin and Jim Torresen + bibtex: "@inproceedings{cmartin2017,\n abstract = {MicroJam is a mobile app for\ + \ sharing tiny touch-screen performances. Mobile applications that streamline\ + \ creativity and social interaction have enabled a very broad audience to develop\ + \ their own creative practices. While these apps have been very successful in\ + \ visual arts (particularly photography), the idea of social music-making has\ + \ not had such a broad impact. MicroJam includes several novel performance concepts\ + \ intended to engage the casual music maker and inspired by current trends in\ + \ social creativity support tools. Touch-screen performances are limited to five\ + \ seconds, instrument settings are posed as sonic ``filters'', and past performances\ + \ are arranged as a timeline with replies and layers. These features of MicroJam\ + \ encourage users not only to perform music more frequently, but to engage with\ + \ others in impromptu ensemble music making.},\n address = {Copenhagen, Denmark},\n\ + \ author = {Charles Martin and Jim Torresen},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1176334},\n issn = {2220-4806},\n pages = {495--496},\n publisher\ + \ = {Aalborg University Copenhagen},\n title = {MicroJam: An App for Sharing Tiny\ + \ Touch-Screen Performances},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0096.pdf},\n\ + \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179046 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1176334 issn: 2220-4806 - month: May - pages: 271--276 - publisher: Louisiana State University - title: Mapping Strategies and Sound Engine Design for an Augmented Hybrid Piano - url: http://www.nime.org/proceedings/2015/nime2015_170.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/170/0170-file1.zip - year: 2015 + pages: 495--496 + publisher: Aalborg University Copenhagen + title: 'MicroJam: An App for Sharing Tiny Touch-Screen Performances' + url: http://www.nime.org/proceedings/2017/nime2017_paper0096.pdf + year: 2017 - ENTRYTYPE: inproceedings - ID: pdahlstedtb2015 - abstract: 'The Bucket System is a new system for computer-mediated ensemble improvisation, - designed by improvisers for improvisers. Coming from a tradition of structured - free ensemble improvisation practices (comprovisation), influenced by post-WW2 - experimental music practices, it is a signaling system implemented with a set - of McMillen QuNeo controllers as input and output interfaces, powered by custom - software. It allows for a new kind of on-stage compositional/improvisation interaction.' - address: 'Baton Rouge, Louisiana, USA' - author: Palle Dahlstedt and Per Anders Nilsson and Gino Robair - bibtex: "@inproceedings{pdahlstedtb2015,\n abstract = {The Bucket System is a new\ - \ system for computer-mediated ensemble improvisation, designed by improvisers\ - \ for improvisers. Coming from a tradition of structured free ensemble improvisation\ - \ practices (comprovisation), influenced by post-WW2 experimental music practices,\ - \ it is a signaling system implemented with a set of McMillen QuNeo controllers\ - \ as input and output interfaces, powered by custom software. It allows for a\ - \ new kind of on-stage compositional/improvisation interaction.},\n address =\ - \ {Baton Rouge, Louisiana, USA},\n author = {Palle Dahlstedt and Per Anders Nilsson\ - \ and Gino Robair},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179048},\n\ - \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ - \ {May},\n pages = {317--318},\n publisher = {Louisiana State University},\n title\ - \ = {The Bucket System --- A computer mediated signalling system for group improvisation},\n\ - \ url = {http://www.nime.org/proceedings/2015/nime2015_171.pdf},\n urlsuppl1 =\ - \ {http://www.nime.org/proceedings/2015/171/0171-file1.mp4},\n year = {2015}\n\ - }\n" + ID: rnakagawa2017 + abstract: "The AEVE provides for a brain-computer-interface (BCI) controlled audiovisual\ + \ experience, presented through a virtual reality head-mounted display (VRHMD).\ + \ We have developed an audiovisual art piece where progression through 3 sections\ + \ and 1 extra section occurs using an “Attention” value derived from\ + \ the Electroencephalography (EEG) data. The only interaction in this work is\ + \ perspective that is participant's view, and the EEG data. However, we believe\ + \ the simple interaction amplifies the participant's feeling of immersion. Through\ + \ the narrative of the work and the simple interaction, we attempt to connect\ + \ some concepts such as audiovisual experience, virtual reality (VR), BCI, grid,\ + \ consciousness, memory, universe, etc. in a minimal way." + address: 'Copenhagen, Denmark' + author: Ryu Nakagawa and Shotaro Hirata + bibtex: "@inproceedings{rnakagawa2017,\n abstract = {The AEVE provides for a brain-computer-interface\ + \ (BCI) controlled audiovisual experience, presented through a virtual reality\ + \ head-mounted display (VRHMD). We have developed an audiovisual art piece where\ + \ progression through 3 sections and 1 extra section occurs using an “Attention”\ + \ value derived from the Electroencephalography (EEG) data. The only interaction\ + \ in this work is perspective that is participant's view, and the EEG data. However,\ + \ we believe the simple interaction amplifies the participant's feeling of immersion.\ + \ Through the narrative of the work and the simple interaction, we attempt to\ + \ connect some concepts such as audiovisual experience, virtual reality (VR),\ + \ BCI, grid, consciousness, memory, universe, etc. in a minimal way.},\n address\ + \ = {Copenhagen, Denmark},\n author = {Ryu Nakagawa and Shotaro Hirata},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1176336},\n issn = {2220-4806},\n pages\ + \ = {497--498},\n publisher = {Aalborg University Copenhagen},\n title = {AEVE:\ + \ An Audiovisual Experience Using VRHMD and EEG},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0097.pdf},\n\ + \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179048 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1176336 issn: 2220-4806 - month: May - pages: 317--318 - publisher: Louisiana State University - title: The Bucket System --- A computer mediated signalling system for group improvisation - url: http://www.nime.org/proceedings/2015/nime2015_171.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/171/0171-file1.mp4 - year: 2015 - + pages: 497--498 + publisher: Aalborg University Copenhagen + title: 'AEVE: An Audiovisual Experience Using VRHMD and EEG' + url: http://www.nime.org/proceedings/2017/nime2017_paper0097.pdf + year: 2017 -- ENTRYTYPE: inproceedings - ID: salexanderadams2015 - abstract: 'This paper outlines the development of a versatile platform for the performance - and composition of tangible graphic scores, providing technical details of the - hardware and software design. The system is conceived as a touch surface facilitating - modular textured plates, coupled with corresponding visual feedback.' - address: 'Baton Rouge, Louisiana, USA' - author: Simon Alexander-Adams and Michael Gurevich - bibtex: "@inproceedings{salexanderadams2015,\n abstract = {This paper outlines the\ - \ development of a versatile platform for the performance and composition of tangible\ - \ graphic scores, providing technical details of the hardware and software design.\ - \ The system is conceived as a touch surface facilitating modular textured plates,\ - \ coupled with corresponding visual feedback.},\n address = {Baton Rouge, Louisiana,\ - \ USA},\n author = {Simon Alexander-Adams and Michael Gurevich},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1179004},\n editor = {Edgar Berdahl and Jesse Allison},\n\ - \ issn = {2220-4806},\n month = {May},\n pages = {174--175},\n publisher = {Louisiana\ - \ State University},\n title = {A Flexible Platform for Tangible Graphic Scores},\n\ - \ url = {http://www.nime.org/proceedings/2015/nime2015_172.pdf},\n urlsuppl1 =\ - \ {http://www.nime.org/proceedings/2015/172/0172-file1.mov},\n year = {2015}\n\ - }\n" + +- ENTRYTYPE: inproceedings + ID: rcadiz2017 + abstract: 'The Arcontinuo is an electronic musical instrument designed from a perspective + based in the study of their potential users and their interaction with existing + musical interfaces. Arcontinuo aims to change the way electronic music is performed, + as it is capable of incorporating natural and ergonomic human gestures, allowing + the musician to engage with the instrument and as a result, enhance the connection + with the audience. Arcontinuo challenges the notion of what a musical gesture + is and goes against traditional ways of performing music, by proposing a concept + that we call smart playing mapping, as a way of achieving a better and more meaningful + performance.' + address: 'Copenhagen, Denmark' + author: Rodrigo Cadiz and Alvaro Sylleros + bibtex: "@inproceedings{rcadiz2017,\n abstract = {The Arcontinuo is an electronic\ + \ musical instrument designed from a perspective based in the study of their potential\ + \ users and their interaction with existing musical interfaces. Arcontinuo aims\ + \ to change the way electronic music is performed, as it is capable of incorporating\ + \ natural and ergonomic human gestures, allowing the musician to engage with the\ + \ instrument and as a result, enhance the connection with the audience. Arcontinuo\ + \ challenges the notion of what a musical gesture is and goes against traditional\ + \ ways of performing music, by proposing a concept that we call smart playing\ + \ mapping, as a way of achieving a better and more meaningful performance.},\n\ + \ address = {Copenhagen, Denmark},\n author = {Rodrigo Cadiz and Alvaro Sylleros},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1176338},\n issn = {2220-4806},\n\ + \ pages = {499--500},\n publisher = {Aalborg University Copenhagen},\n title =\ + \ {Arcontinuo: the Instrument of Change},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0098.pdf},\n\ + \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179004 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1176338 issn: 2220-4806 - month: May - pages: 174--175 - publisher: Louisiana State University - title: A Flexible Platform for Tangible Graphic Scores - url: http://www.nime.org/proceedings/2015/nime2015_172.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/172/0172-file1.mov - year: 2015 + pages: 499--500 + publisher: Aalborg University Copenhagen + title: 'Arcontinuo: the Instrument of Change' + url: http://www.nime.org/proceedings/2017/nime2017_paper0098.pdf + year: 2017 - ENTRYTYPE: inproceedings - ID: rvanrooyenb2015 - abstract: 'The ability to acquire and analyze a percussion performance in an efficient, - affordable, and non-invasive manner has been made possible by a unique composite - of off-the-shelf products. Through various methods of calibration and analysis, - human motion as imparted on a striking implement can be tracked and correlated - with traditional audio data in order to compare performances. Ultimately, conclusions - can be drawn that drive pedagogical studies as well as advances in musical robots.' - address: 'Baton Rouge, Louisiana, USA' - author: Robert Van Rooyen and George Tzanetakis - bibtex: "@inproceedings{rvanrooyenb2015,\n abstract = {The ability to acquire and\ - \ analyze a percussion performance in an efficient, affordable, and non-invasive\ - \ manner has been made possible by a unique composite of off-the-shelf products.\ - \ Through various methods of calibration and analysis, human motion as imparted\ - \ on a striking implement can be tracked and correlated with traditional audio\ - \ data in order to compare performances. Ultimately, conclusions can be drawn\ - \ that drive pedagogical studies as well as advances in musical robots.},\n address\ - \ = {Baton Rouge, Louisiana, USA},\n author = {Robert {Van Rooyen} and George\ - \ Tzanetakis},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1181400},\n editor\ - \ = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n\ - \ pages = {339--342},\n publisher = {Louisiana State University},\n title = {Pragmatic\ - \ Drum Motion Capture System},\n url = {http://www.nime.org/proceedings/2015/nime2015_173.pdf},\n\ - \ urlsuppl1 = {http://www.nime.org/proceedings/2015/173/0173-file1.mp4},\n year\ - \ = {2015}\n}\n" + ID: rblazey2017 + abstract: 'Kalimbo is an extended kalimba, built from repurposed materials and fitted + with sensors that enable it to function as a reductionist control interface through + physical gestures and capacitive sensing. The work demonstrates an attempt to + apply theories and techniques from visual collage art to the concept of musical + performance ecologies. The body of the instrument emerged from material-led making, + and the disparate elements of a particular musical performance ecology (acoustic + instrument, audio effects, samples, synthesis and controls) are juxtaposed and + unified into one coherent whole. As such, Kalimbo demonstrates how visual arts, + in particular collage, can inform the design and creation of new musical instruments, + interfaces and streamlined performance ecologies.' + address: 'Copenhagen, Denmark' + author: Rob Blazey + bibtex: "@inproceedings{rblazey2017,\n abstract = {Kalimbo is an extended kalimba,\ + \ built from repurposed materials and fitted with sensors that enable it to function\ + \ as a reductionist control interface through physical gestures and capacitive\ + \ sensing. The work demonstrates an attempt to apply theories and techniques\ + \ from visual collage art to the concept of musical performance ecologies. The\ + \ body of the instrument emerged from material-led making, and the disparate elements\ + \ of a particular musical performance ecology (acoustic instrument, audio effects,\ + \ samples, synthesis and controls) are juxtaposed and unified into one coherent\ + \ whole. As such, Kalimbo demonstrates how visual arts, in particular collage,\ + \ can inform the design and creation of new musical instruments, interfaces and\ + \ streamlined performance ecologies.},\n address = {Copenhagen, Denmark},\n author\ + \ = {Rob Blazey},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176340},\n\ + \ issn = {2220-4806},\n pages = {501--502},\n publisher = {Aalborg University\ + \ Copenhagen},\n title = {Kalimbo: an Extended Thumb Piano and Minimal Control\ + \ Interface},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0099.pdf},\n\ + \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1181400 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1176340 issn: 2220-4806 - month: May - pages: 339--342 - publisher: Louisiana State University - title: Pragmatic Drum Motion Capture System - url: http://www.nime.org/proceedings/2015/nime2015_173.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/173/0173-file1.mp4 - year: 2015 + pages: 501--502 + publisher: Aalborg University Copenhagen + title: 'Kalimbo: an Extended Thumb Piano and Minimal Control Interface' + url: http://www.nime.org/proceedings/2017/nime2017_paper0099.pdf + year: 2017 - ENTRYTYPE: inproceedings - ID: qyang2015 - abstract: 'Multi-touch mobile devices provide a fresh paradigm for interactions, - as well as a platform for building rich musical applications. This paper presents - a multi-touch mobile programming environment that supports the exploration of - different representations in visual programming for music and audio interfaces. - Using a common flow-based visual programming vocabulary, we implemented a system - based on the urMus platform that explores three types of touch-based interaction - representations: a text-based menu representation, a graphical icon-based representation, - and a novel multi-touch gesture-based representation. We illustrated their use - on interface design for musical controllers.' - address: 'Baton Rouge, Louisiana, USA' - author: Qi Yang and Georg Essl - bibtex: "@inproceedings{qyang2015,\n abstract = {Multi-touch mobile devices provide\ - \ a fresh paradigm for interactions, as well as a platform for building rich musical\ - \ applications. This paper presents a multi-touch mobile programming environment\ - \ that supports the exploration of different representations in visual programming\ - \ for music and audio interfaces. Using a common flow-based visual programming\ - \ vocabulary, we implemented a system based on the urMus platform that explores\ - \ three types of touch-based interaction representations: a text-based menu representation,\ - \ a graphical icon-based representation, and a novel multi-touch gesture-based\ - \ representation. We illustrated their use on interface design for musical controllers.},\n\ - \ address = {Baton Rouge, Louisiana, USA},\n author = {Qi Yang and Georg Essl},\n\ + ID: jtilbian2017a + abstract: 'This demonstration introduces the Stride programming language, the Stride + IDE, and the Saturn M7 embedded audio development board. Stride is a declarative + and reactive domain specific programming language for real-time sound synthesis, + processing, and interaction design. The Stride IDE is a cross-platform integrated + development environment for Stride. Saturn M7 is an embedded audio development + board by Okra Engineering, designed around an ARM Cortex-M7 processor based microcontroller. It + targets high-end multi-channel audio processing and synthesis with very low latency + and power consumption. The microcontroller has a rich set of audio and communication + peripherals, capable of performing complex real-time DSP tasks with double precision + floating point accuracy. This demonstration will showcase specific features of + the Stride language, which facilitates the design of new interactive musical instruments. + The Stride IDE will be used to compose Stride code and generate code for the Saturn + M7 board. The various hardware capabilities of the Saturn M7 board will also + be presented.' + address: 'Copenhagen, Denmark' + author: Joseph Tilbian and Andres Cabrera and Steffen Martin and Lukasz Olczyk + bibtex: "@inproceedings{jtilbian2017a,\n abstract = {This demonstration introduces\ + \ the Stride programming language, the Stride IDE, and the Saturn M7 embedded\ + \ audio development board. Stride is a declarative and reactive domain specific\ + \ programming language for real-time sound synthesis, processing, and interaction\ + \ design. The Stride IDE is a cross-platform integrated development environment\ + \ for Stride. Saturn M7 is an embedded audio development board by Okra Engineering,\ + \ designed around an ARM Cortex-M7 processor based microcontroller. It targets\ + \ high-end multi-channel audio processing and synthesis with very low latency\ + \ and power consumption. The microcontroller has a rich set of audio and communication\ + \ peripherals, capable of performing complex real-time DSP tasks with double precision\ + \ floating point accuracy. This demonstration will showcase specific features\ + \ of the Stride language, which facilitates the design of new interactive musical\ + \ instruments. The Stride IDE will be used to compose Stride code and generate\ + \ code for the Saturn M7 board. The various hardware capabilities of the Saturn\ + \ M7 board will also be presented.},\n address = {Copenhagen, Denmark},\n author\ + \ = {Joseph Tilbian and Andres Cabrera and Steffen Martin and Lukasz Olczyk},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1181416},\n editor = {Edgar Berdahl\ - \ and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {369--373},\n\ - \ publisher = {Louisiana State University},\n title = {Representation-Plurality\ - \ in Multi-Touch Mobile Visual Programming for Music},\n url = {http://www.nime.org/proceedings/2015/nime2015_177.pdf},\n\ - \ year = {2015}\n}\n" + \ Musical Expression},\n doi = {10.5281/zenodo.1176342},\n issn = {2220-4806},\n\ + \ pages = {503--504},\n publisher = {Aalborg University Copenhagen},\n title =\ + \ {Stride on Saturn M7 for Interactive Musical Instrument Design},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0100.pdf},\n\ + \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1181416 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1176342 issn: 2220-4806 - month: May - pages: 369--373 - publisher: Louisiana State University - title: Representation-Plurality in Multi-Touch Mobile Visual Programming for Music - url: http://www.nime.org/proceedings/2015/nime2015_177.pdf - year: 2015 + pages: 503--504 + publisher: Aalborg University Copenhagen + title: Stride on Saturn M7 for Interactive Musical Instrument Design + url: http://www.nime.org/proceedings/2017/nime2017_paper0100.pdf + year: 2017 - ENTRYTYPE: inproceedings - ID: ajensenius2015 - abstract: 'This paper presents the scientific-artistic project Sverm, which has - focused on the use of micromotion and microsound in artistic practice. Starting - from standing still in silence, the artists involved have developed conceptual - and experiential knowledge of microactions, microsounds and the possibilities - of microinteracting with light and sound.' - address: 'Baton Rouge, Louisiana, USA' - author: 'Jensenius, Alexander Refsum' - bibtex: "@inproceedings{ajensenius2015,\n abstract = {This paper presents the scientific-artistic\ - \ project Sverm, which has focused on the use of micromotion and microsound in\ - \ artistic practice. Starting from standing still in silence, the artists involved\ - \ have developed conceptual and experiential knowledge of microactions, microsounds\ - \ and the possibilities of microinteracting with light and sound.},\n address\ - \ = {Baton Rouge, Louisiana, USA},\n author = {Jensenius, Alexander Refsum},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1179100},\n editor = {Edgar Berdahl\ - \ and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {16--19},\n\ - \ publisher = {Louisiana State University},\n title = {Microinteraction in Music/Dance\ - \ Performance},\n url = {http://www.nime.org/proceedings/2015/nime2015_178.pdf},\n\ - \ year = {2015}\n}\n" + ID: tkitahara2017 + abstract: 'In this paper, we present JamSketch, a real-time improvisation support + system which automatically generates melodies according to melodic outlines drawn + by the users. The system generates the improvised melodies based on (1) an outline + sketched by the user using a mouse or a touch screen, (2) a genetic algorithm + based on a dataset of existing music pieces as well as musical knowledge, and + (3) an expressive performance model for timing and dynamic transformations. The + aim of the system is to allow people with no prior musical knowledge to be able + to enjoy playing music by improvising melodies in real time.' + address: 'Copenhagen, Denmark' + author: Tetsuro Kitahara and Sergio Giraldo and Rafael Ramírez + bibtex: "@inproceedings{tkitahara2017,\n abstract = {In this paper, we present JamSketch,\ + \ a real-time improvisation support system which automatically generates melodies\ + \ according to melodic outlines drawn by the users. The system generates the improvised\ + \ melodies based on (1) an outline sketched by the user using a mouse or a touch\ + \ screen, (2) a genetic algorithm based on a dataset of existing music pieces\ + \ as well as musical knowledge, and (3) an expressive performance model for timing\ + \ and dynamic transformations. The aim of the system is to allow people with no\ + \ prior musical knowledge to be able to enjoy playing music by improvising melodies\ + \ in real time.},\n address = {Copenhagen, Denmark},\n author = {Tetsuro Kitahara\ + \ and Sergio Giraldo and Rafael Ramírez},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176344},\n\ + \ issn = {2220-4806},\n pages = {505--506},\n publisher = {Aalborg University\ + \ Copenhagen},\n title = {JamSketch: A Drawing-based Real-time Evolutionary Improvisation\ + \ Support System},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0101.pdf},\n\ + \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179100 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1176344 issn: 2220-4806 - month: May - pages: 16--19 - publisher: Louisiana State University - title: Microinteraction in Music/Dance Performance - url: http://www.nime.org/proceedings/2015/nime2015_178.pdf - year: 2015 + pages: 505--506 + publisher: Aalborg University Copenhagen + title: 'JamSketch: A Drawing-based Real-time Evolutionary Improvisation Support + System' + url: http://www.nime.org/proceedings/2017/nime2017_paper0101.pdf + year: 2017 - ENTRYTYPE: inproceedings - ID: ajenseniusb2015 - abstract: 'The MYO armband from Thalmic Labs is a complete and wireless motion and - muscle sensing platform. This paper evaluates the armband''s sensors and its potential - for NIME applications. This is followed by a presentation of the prototype instrument - MuMYO. We conclude that, despite some shortcomings, the armband has potential - of becoming a new ``standard'''' controller in the NIME community.' - address: 'Baton Rouge, Louisiana, USA' - author: 'Nymoen, Kristian and Haugen, Mari Romarheim and Jensenius, Alexander Refsum' - bibtex: "@inproceedings{ajenseniusb2015,\n abstract = {The MYO armband from Thalmic\ - \ Labs is a complete and wireless motion and muscle sensing platform. This paper\ - \ evaluates the armband's sensors and its potential for NIME applications. This\ - \ is followed by a presentation of the prototype instrument MuMYO. We conclude\ - \ that, despite some shortcomings, the armband has potential of becoming a new\ - \ ``standard'' controller in the NIME community.},\n address = {Baton Rouge, Louisiana,\ - \ USA},\n author = {Nymoen, Kristian and Haugen, Mari Romarheim and Jensenius,\ - \ Alexander Refsum},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179150},\n\ - \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ - \ {May},\n pages = {215--218},\n publisher = {Louisiana State University},\n title\ - \ = {MuMYO --- Evaluating and Exploring the MYO Armband for Musical Interaction},\n\ - \ url = {http://www.nime.org/proceedings/2015/nime2015_179.pdf},\n urlsuppl1 =\ - \ {http://www.nime.org/proceedings/2015/179/0179-file1.mov},\n year = {2015}\n\ - }\n" + ID: jharrison2017 + abstract: 'We present an attachment for the bass guitar which allows MIDI-controlled + actuated fretting. This adapted instrument is presented as a potential method + of augmenting the bass guitar for those with upper-limb disabilities. We conducted + an online survey of 48 bassists in order to highlight the most important aspects + of bass playing. We found that timbral and dynamic features related to the plucking + hand were most important to the survey respondents. We designed an actuated fretting + mechanism to replace the role of the fretting hand in order to preserve plucking + hand techniques. We then conducted a performance study in which experienced bassists + prepared and performed an accompaniment to a backing track with the adapted bass. + The performances highlighted ways in which adapting a fretted string instrument + in this way impacts plucking hand technique. ' + address: 'Copenhagen, Denmark' + author: Jacob Harrison and Andrew McPherson + bibtex: "@inproceedings{jharrison2017,\n abstract = {We present an attachment for\ + \ the bass guitar which allows MIDI-controlled actuated fretting. This adapted\ + \ instrument is presented as a potential method of augmenting the bass guitar\ + \ for those with upper-limb disabilities. We conducted an online survey of 48\ + \ bassists in order to highlight the most important aspects of bass playing. We\ + \ found that timbral and dynamic features related to the plucking hand were most\ + \ important to the survey respondents. We designed an actuated fretting mechanism\ + \ to replace the role of the fretting hand in order to preserve plucking hand\ + \ techniques. We then conducted a performance study in which experienced bassists\ + \ prepared and performed an accompaniment to a backing track with the adapted\ + \ bass. The performances highlighted ways in which adapting a fretted string instrument\ + \ in this way impacts plucking hand technique. },\n address = {Copenhagen, Denmark},\n\ + \ author = {Jacob Harrison and Andrew McPherson},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1176346},\n issn = {2220-4806},\n pages = {507--508},\n publisher\ + \ = {Aalborg University Copenhagen},\n title = {An Adapted Bass Guitar for One-Handed\ + \ Playing},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0102.pdf},\n\ + \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179150 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1176346 issn: 2220-4806 - month: May - pages: 215--218 - publisher: Louisiana State University - title: MuMYO --- Evaluating and Exploring the MYO Armband for Musical Interaction - url: http://www.nime.org/proceedings/2015/nime2015_179.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/179/0179-file1.mov - year: 2015 + pages: 507--508 + publisher: Aalborg University Copenhagen + title: An Adapted Bass Guitar for One-Handed Playing + url: http://www.nime.org/proceedings/2017/nime2017_paper0102.pdf + year: 2017 - ENTRYTYPE: inproceedings - ID: esheffieldb2015 - abstract: 'This paper describes a system for interactive mechanically actuated percussion. - Design principles regarding seamless control and retention of natural acoustic - properties are established. Performance patterns on a preliminary version are - examined, including the potential for cooperative and distributed performance.' - address: 'Baton Rouge, Louisiana, USA' - author: Eric Sheffield and Michael Gurevich - bibtex: "@inproceedings{esheffieldb2015,\n abstract = {This paper describes a system\ - \ for interactive mechanically actuated percussion. Design principles regarding\ - \ seamless control and retention of natural acoustic properties are established.\ - \ Performance patterns on a preliminary version are examined, including the potential\ - \ for cooperative and distributed performance.},\n address = {Baton Rouge, Louisiana,\ - \ USA},\n author = {Eric Sheffield and Michael Gurevich},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1179176},\n editor = {Edgar Berdahl and Jesse Allison},\n\ - \ issn = {2220-4806},\n month = {May},\n pages = {11--15},\n publisher = {Louisiana\ - \ State University},\n title = {Distributed Mechanical Actuation of Percussion\ - \ Instruments},\n url = {http://www.nime.org/proceedings/2015/nime2015_183.pdf},\n\ - \ urlsuppl1 = {http://www.nime.org/proceedings/2015/183/DistributedActuationDemo.mp4},\n\ - \ year = {2015}\n}\n" + ID: kcybulski2017 + abstract: "Feedboxes are interactive sound objects that generate rhythmic and harmonic\ + \ patterns. Their purpose is to create intuitive tools for live improvisation,\ + \ without the need for using computer with midi controller or fixed playback.\ + \ Their only means of communication is sound --- they \"listen\" with the microphone\ + \ and \"speak\" with the speaker, thus interaction with Feedboxes is very similar\ + \ to playing with real musicians. The boxes could be used together with any instrument,\ + \ or on their own – in this case they create a feedback loop by listening\ + \ and responding to each other, creating ever-changing rhythmic structures. Feedboxes\ + \ react to incoming sounds in simple, predefined manner. Yet, when used together,\ + \ their behaviour may become quite complex. Each of two boxes has its own sound\ + \ and set of simple rules." + address: 'Copenhagen, Denmark' + author: Krzysztof Cybulski + bibtex: "@inproceedings{kcybulski2017,\n abstract = {Feedboxes are interactive sound\ + \ objects that generate rhythmic and harmonic patterns. Their purpose is to create\ + \ intuitive tools for live improvisation, without the need for using computer\ + \ with midi controller or fixed playback. Their only means of communication is\ + \ sound --- they \"listen\" with the microphone and \"speak\" with the speaker,\ + \ thus interaction with Feedboxes is very similar to playing with real musicians.\ + \ The boxes could be used together with any instrument, or on their own –\ + \ in this case they create a feedback loop by listening and responding to each\ + \ other, creating ever-changing rhythmic structures. Feedboxes react to incoming\ + \ sounds in simple, predefined manner. Yet, when used together, their behaviour\ + \ may become quite complex. Each of two boxes has its own sound and set of simple\ + \ rules.},\n address = {Copenhagen, Denmark},\n author = {Krzysztof Cybulski},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1176348},\n issn = {2220-4806},\n\ + \ pages = {509--510},\n publisher = {Aalborg University Copenhagen},\n title =\ + \ {Feedboxes},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0103.pdf},\n\ + \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179176 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1176348 issn: 2220-4806 - month: May - pages: 11--15 - publisher: Louisiana State University - title: Distributed Mechanical Actuation of Percussion Instruments - url: http://www.nime.org/proceedings/2015/nime2015_183.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/183/DistributedActuationDemo.mp4 - year: 2015 + pages: 509--510 + publisher: Aalborg University Copenhagen + title: Feedboxes + url: http://www.nime.org/proceedings/2017/nime2017_paper0103.pdf + year: 2017 - ENTRYTYPE: inproceedings - ID: jhe2015 - abstract: 'Motion-based musical interfaces are ubiquitous. With the plethora of - sensing solutions and the possibility of developing custom designs, it is important - that the new musical interface has the capability to perform any number of tasks. - This paper presents the theoretical framework for defining, designing, and evaluation - process of a physical gesture acquisition for Guqin performance. The framework - is based on an iterative design process, and draws upon the knowledge in Guqin - performance to develop a system to determine the interaction between a Guqin player - and the computer. This paper emphasizes the definition, conception, and evaluation - of the acquisition system.' - address: 'Baton Rouge, Louisiana, USA' - author: Jingyin He and Ajay Kapur and Dale Carnegie - bibtex: "@inproceedings{jhe2015,\n abstract = {Motion-based musical interfaces are\ - \ ubiquitous. With the plethora of sensing solutions and the possibility of developing\ - \ custom designs, it is important that the new musical interface has the capability\ - \ to perform any number of tasks. This paper presents the theoretical framework\ - \ for defining, designing, and evaluation process of a physical gesture acquisition\ - \ for Guqin performance. The framework is based on an iterative design process,\ - \ and draws upon the knowledge in Guqin performance to develop a system to determine\ - \ the interaction between a Guqin player and the computer. This paper emphasizes\ - \ the definition, conception, and evaluation of the acquisition system.},\n address\ - \ = {Baton Rouge, Louisiana, USA},\n author = {Jingyin He and Ajay Kapur and Dale\ - \ Carnegie},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179088},\n editor\ - \ = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n\ - \ pages = {187--190},\n publisher = {Louisiana State University},\n title = {Developing\ - \ A Physical Gesture Acquisition System for Guqin Performance},\n url = {http://www.nime.org/proceedings/2015/nime2015_184.pdf},\n\ - \ year = {2015}\n}\n" + ID: sglickman2017 + abstract: "This paper describes the design and implementation of an augmented reality\ + \ (AR) piano learning tool that uses a Microsoft HoloLens and a MIDI-over-Bluetooth-enabled\ + \ electric piano. The tool presents a unique visual interface—a “mirror\ + \ key overlay” approach—fitted for the AR environment, and opens up\ + \ the possibility of on-instrument learning experiences. The curriculum focuses\ + \ on teaching improvisation in blues, rock, jazz and classical genres. Users at\ + \ the piano engage with interactive lessons, watch virtual hand demonstrations,\ + \ see and hear example improvisations, and play their own solos and accompaniment\ + \ along with AR-projected virtual musicians. The tool aims to be entertaining\ + \ yet also effective in teaching core musical concepts." + address: 'Copenhagen, Denmark' + author: Seth Glickman and Byunghwan Lee and Fu Yen Hsiao and Shantanu Das + bibtex: "@inproceedings{sglickman2017,\n abstract = {This paper describes the design\ + \ and implementation of an augmented reality (AR) piano learning tool that uses\ + \ a Microsoft HoloLens and a MIDI-over-Bluetooth-enabled electric piano. The tool\ + \ presents a unique visual interface—a “mirror key overlay”\ + \ approach—fitted for the AR environment, and opens up the possibility of\ + \ on-instrument learning experiences. The curriculum focuses on teaching improvisation\ + \ in blues, rock, jazz and classical genres. Users at the piano engage with interactive\ + \ lessons, watch virtual hand demonstrations, see and hear example improvisations,\ + \ and play their own solos and accompaniment along with AR-projected virtual musicians.\ + \ The tool aims to be entertaining yet also effective in teaching core musical\ + \ concepts.},\n address = {Copenhagen, Denmark},\n author = {Seth Glickman and\ + \ Byunghwan Lee and Fu Yen Hsiao and Shantanu Das},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176350},\n issn = {2220-4806},\n pages = {511--512},\n\ + \ publisher = {Aalborg University Copenhagen},\n title = {Music Everywhere ---\ + \ Augmented Reality Piano Improvisation Learning System},\n url = {http://www.nime.org/proceedings/2017/nime2017_paper0104.pdf},\n\ + \ year = {2017}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179088 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1176350 issn: 2220-4806 - month: May - pages: 187--190 - publisher: Louisiana State University - title: Developing A Physical Gesture Acquisition System for Guqin Performance - url: http://www.nime.org/proceedings/2015/nime2015_184.pdf - year: 2015 + pages: 511--512 + publisher: Aalborg University Copenhagen + title: Music Everywhere --- Augmented Reality Piano Improvisation Learning System + url: http://www.nime.org/proceedings/2017/nime2017_paper0104.pdf + year: 2017 - ENTRYTYPE: inproceedings - ID: rgrahamb2015 - abstract: 'Multichannel (or divided) audio pickups are becoming increasingly ubiquitous - in electric guitar and computer music communities. These systems allow performers - to access signals for each string of their instrument independently and concurrently - in real-time creative practice. This paper presents an open-source audio breakout - circuit that provides independent audio outputs per string of any chordophone - (stringed instrument) that is fitted with a multichannel audio pickup system. - The following sections include a brief historical contextualization and discussion - on the significance of multichannel audio technology in instrumental guitar music, - an overview of our proposed impedance matching circuit for piezoelectric-based - audio pickups, and a presentation of a new open-source PCB design (SEPTAR V2) - that includes a mountable 13-pin DIN connection to improve compatibility with - commercial multichannel pickup systems. This paper will also include a short summary - of the potential creative applications and perceptual implications of this multichannel - technology when used in creative practice.' - address: 'Baton Rouge, Louisiana, USA' - author: Richard Graham and John Harding - bibtex: "@inproceedings{rgrahamb2015,\n abstract = {Multichannel (or divided) audio\ - \ pickups are becoming increasingly ubiquitous in electric guitar and computer\ - \ music communities. These systems allow performers to access signals for each\ - \ string of their instrument independently and concurrently in real-time creative\ - \ practice. This paper presents an open-source audio breakout circuit that provides\ - \ independent audio outputs per string of any chordophone (stringed instrument)\ - \ that is fitted with a multichannel audio pickup system. The following sections\ - \ include a brief historical contextualization and discussion on the significance\ - \ of multichannel audio technology in instrumental guitar music, an overview of\ - \ our proposed impedance matching circuit for piezoelectric-based audio pickups,\ - \ and a presentation of a new open-source PCB design (SEPTAR V2) that includes\ - \ a mountable 13-pin DIN connection to improve compatibility with commercial multichannel\ - \ pickup systems. This paper will also include a short summary of the potential\ - \ creative applications and perceptual implications of this multichannel technology\ - \ when used in creative practice.},\n address = {Baton Rouge, Louisiana, USA},\n\ - \ author = {Richard Graham and John Harding},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1179070},\n editor = {Edgar Berdahl and Jesse Allison},\n issn\ - \ = {2220-4806},\n month = {May},\n pages = {241--244},\n publisher = {Louisiana\ - \ State University},\n title = {SEPTAR: Audio Breakout Design for Multichannel\ - \ Guitar},\n url = {http://www.nime.org/proceedings/2015/nime2015_187.pdf},\n\ - \ urlsuppl1 = {http://www.nime.org/proceedings/2015/187/0187-file1.wav},\n urlsuppl2\ - \ = {http://www.nime.org/proceedings/2015/187/0187-file2.wav},\n year = {2015}\n\ + ID: jbender2017 + abstract: 'Song Kernel is a chord-and-note harmonizing musical input interface applicable + to electronic instruments in both hardware and software format. It enables to + play chords and melodies while visualizing harmonic functions of chords within + a scale of western music in one single static pattern. It provides amateur musicians, + as well as people with no experience in playing music, a graphic and intuitive + way to play songs, manage harmonic structures and identify composition patterns. ' + address: 'Copenhagen, Denmark' + author: Juan Bender and Gabriel Lecup and Sergio Fernandez + bibtex: "@inproceedings{jbender2017,\n abstract = {Song Kernel is a chord-and-note\ + \ harmonizing musical input interface applicable to electronic instruments in\ + \ both hardware and software format. It enables to play chords and melodies while\ + \ visualizing harmonic functions of chords within a scale of western music in\ + \ one single static pattern. It provides amateur musicians, as well as people\ + \ with no experience in playing music, a graphic and intuitive way to play songs,\ + \ manage harmonic structures and identify composition patterns. },\n address\ + \ = {Copenhagen, Denmark},\n author = {Juan Bender and Gabriel Lecup and Sergio\ + \ Fernandez},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176352},\n issn\ + \ = {2220-4806},\n pages = {513--514},\n publisher = {Aalborg University Copenhagen},\n\ + \ title = {Song Kernel --- Explorations in Intuitive Use of Harmony},\n url =\ + \ {http://www.nime.org/proceedings/2017/nime2017_paper0105.pdf},\n year = {2017}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179070 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1176352 issn: 2220-4806 - month: May - pages: 241--244 - publisher: Louisiana State University - title: 'SEPTAR: Audio Breakout Design for Multichannel Guitar' - url: http://www.nime.org/proceedings/2015/nime2015_187.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/187/0187-file1.wav - urlsuppl2: http://www.nime.org/proceedings/2015/187/0187-file2.wav - year: 2015 + pages: 513--514 + publisher: Aalborg University Copenhagen + title: Song Kernel --- Explorations in Intuitive Use of Harmony + url: http://www.nime.org/proceedings/2017/nime2017_paper0105.pdf + year: 2017 - ENTRYTYPE: inproceedings - ID: fberthaut2015 - abstract: 'We present Reflets, a mixed-reality environment for musical performances - that allows for freely displaying virtual content on stage, such as 3D virtual - musical interfaces or visual augmentations of instruments and performers. It relies - on spectators and performers revealing virtual objects by slicing through them - with body parts or objects, and on planar slightly reflective transparent panels - that combine the stage and audience spaces. In this paper, we describe the approach - and implementation challenges of Reflets. We then demonstrate that it matches - the requirements of musical performances. It allows for placing virtual content - anywhere on large stages, even overlapping with physical elements and provides - a consistent rendering of this content for large numbers of spectators. It also - preserves non-verbal communication between the audience and the performers, and - is inherently engaging for the spectators. We finally show that Reflets opens - musical performance opportunities such as augmented interaction between musicians - and novel techniques for 3D sound shapes manipulation.' - address: 'Baton Rouge, Louisiana, USA' - author: Florent Berthaut and Diego Martinez and Martin Hachet and Sriram Subramanian - bibtex: "@inproceedings{fberthaut2015,\n abstract = {We present Reflets, a mixed-reality\ - \ environment for musical performances that allows for freely displaying virtual\ - \ content on stage, such as 3D virtual musical interfaces or visual augmentations\ - \ of instruments and performers. It relies on spectators and performers revealing\ - \ virtual objects by slicing through them with body parts or objects, and on planar\ - \ slightly reflective transparent panels that combine the stage and audience spaces.\ - \ In this paper, we describe the approach and implementation challenges of Reflets.\ - \ We then demonstrate that it matches the requirements of musical performances.\ - \ It allows for placing virtual content anywhere on large stages, even overlapping\ - \ with physical elements and provides a consistent rendering of this content for\ - \ large numbers of spectators. It also preserves non-verbal communication between\ - \ the audience and the performers, and is inherently engaging for the spectators.\ - \ We finally show that Reflets opens musical performance opportunities such as\ - \ augmented interaction between musicians and novel techniques for 3D sound shapes\ - \ manipulation.},\n address = {Baton Rouge, Louisiana, USA},\n author = {Florent\ - \ Berthaut and Diego Martinez and Martin Hachet and Sriram Subramanian},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1179028},\n editor = {Edgar Berdahl and\ - \ Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {116--120},\n\ - \ publisher = {Louisiana State University},\n title = {Reflets: Combining and\ - \ Revealing Spaces for Musical Performances},\n url = {http://www.nime.org/proceedings/2015/nime2015_190.pdf},\n\ - \ urlsuppl1 = {http://www.nime.org/proceedings/2015/190/0190-file1.mp4},\n year\ - \ = {2015}\n}\n" + ID: Vallis2010 + abstract: 'The aim of this paper is to define the process of iterative interface + design as it pertains to musical performance. Embodying this design approach, + the Monome OSC/MIDI USB controller represents a minimalist, open-source hardware + device. The open-source nature of the device has allowed for a small group of + Monome users to modify the hardware, firmware, and software associated with the + interface. These user driven modifications have allowed the re-imagining of the + interface for new and novel purposes, beyond even that of the device''s original + intentions. With development being driven by a community of users, a device can + become several related but unique generations of musical controllers, each one + focused on a specific set of needs. ' + address: 'Sydney, Australia' + author: 'Vallis, Owen and Hochenbaum, Jordan and Kapur, Ajay' + bibtex: "@inproceedings{Vallis2010,\n abstract = {The aim of this paper is to define\ + \ the process of iterative interface design as it pertains to musical performance.\ + \ Embodying this design approach, the Monome OSC/MIDI USB controller represents\ + \ a minimalist, open-source hardware device. The open-source nature of the device\ + \ has allowed for a small group of Monome users to modify the hardware, firmware,\ + \ and software associated with the interface. These user driven modifications\ + \ have allowed the re-imagining of the interface for new and novel purposes, beyond\ + \ even that of the device's original intentions. With development being driven\ + \ by a community of users, a device can become several related but unique generations\ + \ of musical controllers, each one focused on a specific set of needs. },\n address\ + \ = {Sydney, Australia},\n author = {Vallis, Owen and Hochenbaum, Jordan and Kapur,\ + \ Ajay},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1177919},\n issn = {2220-4806},\n\ + \ keywords = {Iterative Design, Monome, Arduinome, Arduino.},\n pages = {1--6},\n\ + \ title = {A Shift Towards Iterative and Open-Source Design for Musical Interfaces},\n\ + \ url = {http://www.nime.org/proceedings/2010/nime2010_001.pdf},\n year = {2010}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179028 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177919 issn: 2220-4806 - month: May - pages: 116--120 - publisher: Louisiana State University - title: 'Reflets: Combining and Revealing Spaces for Musical Performances' - url: http://www.nime.org/proceedings/2015/nime2015_190.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/190/0190-file1.mp4 - year: 2015 + keywords: 'Iterative Design, Monome, Arduinome, Arduino.' + pages: 1--6 + title: A Shift Towards Iterative and Open-Source Design for Musical Interfaces + url: http://www.nime.org/proceedings/2010/nime2010_001.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: slui2015 - abstract: 'The multi-touch music table is a novel tabletop tangible interface for - expressive musical performance. User touches the picture projected on the table - glass surface to perform music. User can click, drag or use various multi-touch - gestures with fingers to perform music expressively. The picture color, luminosity, - size, finger gesture and pressure determine the music output. The table detects - up to 10 finger touches with their touch pressure. We use a glass, a wood stand, - a mini projector, a web camera and a computer to construct this music table. Hence - this table is highly customizable. The table generates music via a re-interpretation - of the artistic components of pictures. It is a cross modal inspiration of music - from visual art on a tangible interface. ' - address: 'Baton Rouge, Louisiana, USA' - author: Simon Lui - bibtex: "@inproceedings{slui2015,\n abstract = {The multi-touch music table is a\ - \ novel tabletop tangible interface for expressive musical performance. User touches\ - \ the picture projected on the table glass surface to perform music. User can\ - \ click, drag or use various multi-touch gestures with fingers to perform music\ - \ expressively. The picture color, luminosity, size, finger gesture and pressure\ - \ determine the music output. The table detects up to 10 finger touches with their\ - \ touch pressure. We use a glass, a wood stand, a mini projector, a web camera\ - \ and a computer to construct this music table. Hence this table is highly customizable.\ - \ The table generates music via a re-interpretation of the artistic components\ - \ of pictures. It is a cross modal inspiration of music from visual art on a tangible\ - \ interface. },\n address = {Baton Rouge, Louisiana, USA},\n author = {Simon Lui},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1179122},\n editor = {Edgar Berdahl\ - \ and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {374--377},\n\ - \ publisher = {Louisiana State University},\n title = {Generate expressive music\ - \ from picture with a handmade multi-touch music table},\n url = {http://www.nime.org/proceedings/2015/nime2015_191.pdf},\n\ - \ year = {2015}\n}\n" + ID: Maruyama2010 + abstract: 'Musical instruments have a long history, and many types of musical instruments + have been created to attain ideal sound production. At the same time, various + types of electronic musical instruments have been developed. Since the main purpose + of conventional electronic instruments is to duplicate the shape of acoustic instruments + with no change in their hardware configuration, the diapason and the performance + style of each instrument is inflexible. Therefore, the goal of our study is to + construct the UnitInstrument that consists of various types of musical units. + A unit is constructed by simulating functional elements of conventional musical + instruments, such as output timing of sound and pitch decision. Each unit has + connectors for connecting other units to create various types of musical instruments. + Additionally, we propose a language for easily and flexibly describing the settings + of units. We evaluated the effectiveness of our proposed system by using it in + actual performances.' + address: 'Sydney, Australia' + author: 'Maruyama, Yutaro and Takegawa, Yoshinari and Terada, Tsutomu and Tsukamoto, + Masahiko' + bibtex: "@inproceedings{Maruyama2010,\n abstract = {Musical instruments have a long\ + \ history, and many types of musical instruments have been created to attain ideal\ + \ sound production. At the same time, various types of electronic musical instruments\ + \ have been developed. Since the main purpose of conventional electronic instruments\ + \ is to duplicate the shape of acoustic instruments with no change in their hardware\ + \ configuration, the diapason and the performance style of each instrument is\ + \ inflexible. Therefore, the goal of our study is to construct the UnitInstrument\ + \ that consists of various types of musical units. A unit is constructed by simulating\ + \ functional elements of conventional musical instruments, such as output timing\ + \ of sound and pitch decision. Each unit has connectors for connecting other units\ + \ to create various types of musical instruments. Additionally, we propose a language\ + \ for easily and flexibly describing the settings of units. We evaluated the effectiveness\ + \ of our proposed system by using it in actual performances.},\n address = {Sydney,\ + \ Australia},\n author = {Maruyama, Yutaro and Takegawa, Yoshinari and Terada,\ + \ Tsutomu and Tsukamoto, Masahiko},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177845},\n\ + \ issn = {2220-4806},\n keywords = {Musical instruments, Script language},\n pages\ + \ = {7--12},\n title = {UnitInstrument : Easy Configurable Musical Instruments},\n\ + \ url = {http://www.nime.org/proceedings/2010/nime2010_007.pdf},\n year = {2010}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179122 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177845 issn: 2220-4806 - month: May - pages: 374--377 - publisher: Louisiana State University - title: Generate expressive music from picture with a handmade multi-touch music - table - url: http://www.nime.org/proceedings/2015/nime2015_191.pdf - year: 2015 + keywords: 'Musical instruments, Script language' + pages: 7--12 + title: 'UnitInstrument : Easy Configurable Musical Instruments' + url: http://www.nime.org/proceedings/2010/nime2010_007.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: swaite2015 - abstract: 'This paper discusses the use of typed text as a real-time input for interactive - performance systems. A brief review of the literature discusses text-based generative - systems, links between typing and playing percussion instruments and the use of - typing gestures in contemporary performance practice. The paper then documents - the author''s audio-visual system that is driven by the typing of text/lyrics - in real-time. It is argued that the system promotes the sensation of liveness - through clear, perceptible links between the performer''s gestures, the system''s - audio outputs and the its visual outputs. The system also provides a novel approach - to the use of generative techniques in the composition and live performance of - songs. Future developments would include the use of dynamic text effects linked - to sound generation and greater interaction between human performer and the visuals. ' - address: 'Baton Rouge, Louisiana, USA' - author: Si Waite - bibtex: "@inproceedings{swaite2015,\n abstract = {This paper discusses the use of\ - \ typed text as a real-time input for interactive performance systems. A brief\ - \ review of the literature discusses text-based generative systems, links between\ - \ typing and playing percussion instruments and the use of typing gestures in\ - \ contemporary performance practice. The paper then documents the author's audio-visual\ - \ system that is driven by the typing of text/lyrics in real-time. It is argued\ - \ that the system promotes the sensation of liveness through clear, perceptible\ - \ links between the performer's gestures, the system's audio outputs and the its\ - \ visual outputs. The system also provides a novel approach to the use of generative\ - \ techniques in the composition and live performance of songs. Future developments\ - \ would include the use of dynamic text effects linked to sound generation and\ - \ greater interaction between human performer and the visuals. },\n address =\ - \ {Baton Rouge, Louisiana, USA},\n author = {Si Waite},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1179192},\n editor = {Edgar Berdahl and Jesse Allison},\n\ - \ issn = {2220-4806},\n month = {May},\n pages = {168--169},\n publisher = {Louisiana\ - \ State University},\n title = {Reimagining the Computer Keyboard as a Musical\ - \ Interface},\n url = {http://www.nime.org/proceedings/2015/nime2015_193.pdf},\n\ - \ urlsuppl1 = {http://www.nime.org/proceedings/2015/193/0193-file1.mov},\n urlsuppl2\ - \ = {http://www.nime.org/proceedings/2015/193/0193-file2.mp4},\n year = {2015}\n\ - }\n" + ID: Mulder2010 + abstract: 'With the author’s own experiences in mind, this paper argues that, when + used to amplify musical instruments or to play back other sonic material to an + audience, loudspeakers and the technology that drives them, can be considered + as a musical instrument. Particularly in situations with acoustic instruments + this perspective can provide insight into the often cumbersome relation between + the –technology orientated– sound engineer and the –music orientated– performer. + Playing a musical instrument (whether acoustic, electric or electronic) involves + navigating often complicated but very precise interfaces. The interface for sound + amplification technology in a certain environment is not limited to the control + surface of a mixing desk but includes the interaction with other stakeholder, + i.e. the performers and the choice of loudspeakers and microphones and their positions. + As such this interface can be as accurate and intimate but also as complicated + as the interfaces of ''normal'' musical instruments. By zooming in on differences + between acoustic and electronic sources a step is taken towards inclusion in this + discussion of the perception of amplified music and the possible influence of + that amplification on performance practise.' + address: 'Sydney, Australia' + author: 'Mulder, Jos' + bibtex: "@inproceedings{Mulder2010,\n abstract = {With the author’s own experiences\ + \ in mind, this paper argues that, when used to amplify musical instruments or\ + \ to play back other sonic material to an audience, loudspeakers and the technology\ + \ that drives them, can be considered as a musical instrument. Particularly in\ + \ situations with acoustic instruments this perspective can provide insight into\ + \ the often cumbersome relation between the –technology orientated– sound engineer\ + \ and the –music orientated– performer. Playing a musical instrument (whether\ + \ acoustic, electric or electronic) involves navigating often complicated but\ + \ very precise interfaces. The interface for sound amplification technology in\ + \ a certain environment is not limited to the control surface of a mixing desk\ + \ but includes the interaction with other stakeholder, i.e. the performers and\ + \ the choice of loudspeakers and microphones and their positions. As such this\ + \ interface can be as accurate and intimate but also as complicated as the interfaces\ + \ of 'normal' musical instruments. By zooming in on differences between acoustic\ + \ and electronic sources a step is taken towards inclusion in this discussion\ + \ of the perception of amplified music and the possible influence of that amplification\ + \ on performance practise.},\n address = {Sydney, Australia},\n author = {Mulder,\ + \ Jos},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1177861},\n issn = {2220-4806},\n\ + \ keywords = {Sound technology (amplification), musical instruments, multi modal\ + \ perception, performance practice.},\n pages = {13--18},\n title = {The Loudspeaker\ + \ as Musical Instrument},\n url = {http://www.nime.org/proceedings/2010/nime2010_013.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179192 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177861 issn: 2220-4806 - month: May - pages: 168--169 - publisher: Louisiana State University - title: Reimagining the Computer Keyboard as a Musical Interface - url: http://www.nime.org/proceedings/2015/nime2015_193.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/193/0193-file1.mov - urlsuppl2: http://www.nime.org/proceedings/2015/193/0193-file2.mp4 - year: 2015 + keywords: 'Sound technology (amplification), musical instruments, multi modal perception, + performance practice.' + pages: 13--18 + title: The Loudspeaker as Musical Instrument + url: http://www.nime.org/proceedings/2010/nime2010_013.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: mhirabayashi2015 - abstract: 'We performed the musical work ``Sense of Space'''' which uses the sound - ID with high frequency sound DTMF. The IDs are embedded into the music, audiences'' - smartphones and tablets at the venue reacted to the IDs and then they play music - pieces. We considered the possibility for novel music experiences brought about - through the participation of audiences and spreading sound at the music venue.' - address: 'Baton Rouge, Louisiana, USA' - author: Masami Hirabayashi and Kazuomi Eshima - bibtex: "@inproceedings{mhirabayashi2015,\n abstract = {We performed the musical\ - \ work ``Sense of Space'' which uses the sound ID with high frequency sound DTMF.\ - \ The IDs are embedded into the music, audiences' smartphones and tablets at the\ - \ venue reacted to the IDs and then they play music pieces. We considered the\ - \ possibility for novel music experiences brought about through the participation\ - \ of audiences and spreading sound at the music venue.},\n address = {Baton Rouge,\ - \ Louisiana, USA},\n author = {Masami Hirabayashi and Kazuomi Eshima},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1179092},\n editor = {Edgar Berdahl and\ - \ Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {58--60},\n\ - \ publisher = {Louisiana State University},\n title = {Sense of Space: The Audience\ - \ Participation Music Performance with High-Frequency Sound ID},\n url = {http://www.nime.org/proceedings/2015/nime2015_195.pdf},\n\ - \ urlsuppl1 = {http://www.nime.org/proceedings/2015/195/0195-file1.mp4},\n year\ - \ = {2015}\n}\n" + ID: Ciglar2010 + abstract: 'This paper, describes the second phase of an ongoing research project + dealing with the implementation of an interactive interface. It is a "hands free" + instrument, utilizing a non-contact tactile feedback method based on airborne + ultrasound. The three main elements/components of the interface that will be discussed + in this paper are: 1. Generation of audible sound by self-demodulation of an ultrasound + signal during its propagation through air; 2. The condensation of the ultrasound + energy in one spatial point generating a precise tactile reproduction of the audible + sound; and 3. The feed-forward method enabling a real-time intervention of the + musician, by shaping the tactile (ultra)sound directly with his hands.' + address: 'Sydney, Australia' + author: 'Ciglar, Miha' + bibtex: "@inproceedings{Ciglar2010,\n abstract = {This paper, describes the second\ + \ phase of an ongoing research project dealing with the implementation of an interactive\ + \ interface. It is a \"hands free\" instrument, utilizing a non-contact tactile\ + \ feedback method based on airborne ultrasound. The three main elements/components\ + \ of the interface that will be discussed in this paper are: 1. Generation of\ + \ audible sound by self-demodulation of an ultrasound signal during its propagation\ + \ through air; 2. The condensation of the ultrasound energy in one spatial point\ + \ generating a precise tactile reproduction of the audible sound; and 3. The feed-forward\ + \ method enabling a real-time intervention of the musician, by shaping the tactile\ + \ (ultra)sound directly with his hands.},\n address = {Sydney, Australia},\n author\ + \ = {Ciglar, Miha},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177745},\n\ + \ issn = {2220-4806},\n keywords = {haptics, vibro-tactility, feedback, ultrasound,\ + \ hands-free interface, nonlinear acoustics, parametric array.},\n pages = {19--22},\n\ + \ title = {An Ultrasound Based Instrument Generating Audible and Tactile Sound},\n\ + \ url = {http://www.nime.org/proceedings/2010/nime2010_019.pdf},\n year = {2010}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179092 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177745 issn: 2220-4806 - month: May - pages: 58--60 - publisher: Louisiana State University - title: 'Sense of Space: The Audience Participation Music Performance with High-Frequency - Sound ID' - url: http://www.nime.org/proceedings/2015/nime2015_195.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/195/0195-file1.mp4 - year: 2015 + keywords: 'haptics, vibro-tactility, feedback, ultrasound, hands-free interface, + nonlinear acoustics, parametric array.' + pages: 19--22 + title: An Ultrasound Based Instrument Generating Audible and Tactile Sound + url: http://www.nime.org/proceedings/2010/nime2010_019.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: tshaw2015 - abstract: 'In this paper we present Fields, a sound diffusion performance implemented - with web technologies that run on the mobile devices of audience members. Both - a technical system and bespoke composition, Fields allows for a range of sonic - diffusions to occur, and therefore has the potential to open up new paradigms - for spatialised music and media performances. The project introduces how handheld - technology used as a collective array of speakers controlled live by a centralized - performer can create alternative types of participation within musical performance. - Fields not only offers a new technological approach to sound diffusion, it also - provides an alternative way for audiences to participate in live events, and opens - up unique forms of engagement within sonic media contexts. ' - address: 'Baton Rouge, Louisiana, USA' - author: Tim Shaw and Sébastien Piquemal and John Bowers - bibtex: "@inproceedings{tshaw2015,\n abstract = {In this paper we present Fields,\ - \ a sound diffusion performance implemented with web technologies that run on\ - \ the mobile devices of audience members. Both a technical system and bespoke\ - \ composition, Fields allows for a range of sonic diffusions to occur, and therefore\ - \ has the potential to open up new paradigms for spatialised music and media performances.\ - \ The project introduces how handheld technology used as a collective array of\ - \ speakers controlled live by a centralized performer can create alternative types\ - \ of participation within musical performance. Fields not only offers a new technological\ - \ approach to sound diffusion, it also provides an alternative way for audiences\ - \ to participate in live events, and opens up unique forms of engagement within\ - \ sonic media contexts. },\n address = {Baton Rouge, Louisiana, USA},\n author\ - \ = {Tim Shaw and S\\'ebastien Piquemal and John Bowers},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1179174},\n editor = {Edgar Berdahl and Jesse Allison},\n\ - \ issn = {2220-4806},\n month = {May},\n pages = {281--284},\n publisher = {Louisiana\ - \ State University},\n title = {Fields: An Exploration into the use of Mobile\ - \ Devices as a Medium for Sound Diffusion},\n url = {http://www.nime.org/proceedings/2015/nime2015_196.pdf},\n\ - \ year = {2015}\n}\n" + ID: Hayes2010 + abstract: 'The Neurohedron is a multi-modal interface for a nonlinear sequencer + software model, embodied physically in a dodecahedron. The faces of the dodecahedron + are both inputs and outputs, allowing the device to visualize the activity of + the software model as well as convey input to it. The software model maps MIDI + notes to the faces of the device, and defines and controls the behavior of the + sequencer''s progression around its surface, resulting in a unique instrument + for computer-based performance and composition. ' + address: 'Sydney, Australia' + author: 'Hayes, Ted' + bibtex: "@inproceedings{Hayes2010,\n abstract = {The Neurohedron is a multi-modal\ + \ interface for a nonlinear sequencer software model, embodied physically in a\ + \ dodecahedron. The faces of the dodecahedron are both inputs and outputs, allowing\ + \ the device to visualize the activity of the software model as well as convey\ + \ input to it. The software model maps MIDI notes to the faces of the device,\ + \ and defines and controls the behavior of the sequencer's progression around\ + \ its surface, resulting in a unique instrument for computer-based performance\ + \ and composition. },\n address = {Sydney, Australia},\n author = {Hayes, Ted},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1177799},\n issn = {2220-4806},\n\ + \ keywords = {controller, human computer interaction, interface, live performance,\ + \ neural network, sequencer},\n pages = {23--25},\n title = {Neurohedron : A Nonlinear\ + \ Sequencer Interface},\n url = {http://www.nime.org/proceedings/2010/nime2010_023.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179174 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177799 issn: 2220-4806 - month: May - pages: 281--284 - publisher: Louisiana State University - title: 'Fields: An Exploration into the use of Mobile Devices as a Medium for Sound - Diffusion' - url: http://www.nime.org/proceedings/2015/nime2015_196.pdf - year: 2015 + keywords: 'controller, human computer interaction, interface, live performance, + neural network, sequencer' + pages: 23--25 + title: 'Neurohedron : A Nonlinear Sequencer Interface' + url: http://www.nime.org/proceedings/2010/nime2010_023.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: rdannenberg2015 - abstract: "Optical music recognition (OMR) is the task of recognizing images of\ - \ musical scores. In this paper, improved algorithms for the fi\nrst steps of\ - \ optical music recognition were developed, which facilitated bulk annotation\ - \ of scanned scores for use in an interactive score display system. Creating an\ - \ initial annotation by OMR and verifying by hand substantially reduced the manual\ - \ eff\nort required to process scanned scores to be used in a live performance\ - \ setting." - address: 'Baton Rouge, Louisiana, USA' - author: Dan Ringwalt and Roger Dannenberg and Andrew Russell - bibtex: "@inproceedings{rdannenberg2015,\n abstract = {Optical music recognition\ - \ (OMR) is the task of recognizing images of musical scores. In this paper, improved\ - \ algorithms for the fi\nrst steps of optical music recognition were developed,\ - \ which facilitated bulk annotation of scanned scores for use in an interactive\ - \ score display system. Creating an initial annotation by OMR and verifying by\ - \ hand substantially reduced the manual eff\nort required to process scanned scores\ - \ to be used in a live performance setting.},\n address = {Baton Rouge, Louisiana,\ - \ USA},\n author = {Dan Ringwalt and Roger Dannenberg and Andrew Russell},\n booktitle\ + ID: Umetani2010 + abstract: 'We introduce an interactive interface for the custom designof metallophones. + The shape of each plate must be determined in the design process so that the metallophone + willproduce the proper tone when struck with a mallet. Unfortunately, the relationship + between plate shape and tone iscomplex, which makes it difficult to design plates + with arbitrary shapes. Our system addresses this problem by runninga concurrent + numerical eigenanalysis during interactive geometry editing. It continuously presents + a predicted tone tothe user with both visual and audio feedback, thus makingit + possible to design a plate with any desired shape and tone.We developed this system + to demonstrate the effectivenessof integrating real-time finite element method + analysis intogeometric editing to facilitate the design of custom-mademusical + instruments. An informal study demonstrated theability of technically unsophisticated + user to apply the system to complex metallophone design.' + address: 'Sydney, Australia' + author: 'Umetani, Nobuyuki and Mitani, Jun and Igarashi, Takeo' + bibtex: "@inproceedings{Umetani2010,\n abstract = {We introduce an interactive interface\ + \ for the custom designof metallophones. The shape of each plate must be determined\ + \ in the design process so that the metallophone willproduce the proper tone when\ + \ struck with a mallet. Unfortunately, the relationship between plate shape and\ + \ tone iscomplex, which makes it difficult to design plates with arbitrary shapes.\ + \ Our system addresses this problem by runninga concurrent numerical eigenanalysis\ + \ during interactive geometry editing. It continuously presents a predicted tone\ + \ tothe user with both visual and audio feedback, thus makingit possible to design\ + \ a plate with any desired shape and tone.We developed this system to demonstrate\ + \ the effectivenessof integrating real-time finite element method analysis intogeometric\ + \ editing to facilitate the design of custom-mademusical instruments. An informal\ + \ study demonstrated theability of technically unsophisticated user to apply the\ + \ system to complex metallophone design.},\n address = {Sydney, Australia},\n\ + \ author = {Umetani, Nobuyuki and Mitani, Jun and Igarashi, Takeo},\n booktitle\ \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1179162},\n editor = {Edgar Berdahl and\ - \ Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {95--98},\n\ - \ publisher = {Louisiana State University},\n title = {Optical Music Recognition\ - \ for Interactive Score Display},\n url = {http://www.nime.org/proceedings/2015/nime2015_198.pdf},\n\ - \ year = {2015}\n}\n" + \ Expression},\n doi = {10.5281/zenodo.1177917},\n issn = {2220-4806},\n keywords\ + \ = {Modeling Interfaces, Geometric Modeling, CAD, Education, Real-time FEM},\n\ + \ pages = {26--30},\n title = {Designing Custom-made Metallophone with Concurrent\ + \ Eigenanalysis},\n url = {http://www.nime.org/proceedings/2010/nime2010_026.pdf},\n\ + \ year = {2010}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1177917 + issn: 2220-4806 + keywords: 'Modeling Interfaces, Geometric Modeling, CAD, Education, Real-time FEM' + pages: 26--30 + title: Designing Custom-made Metallophone with Concurrent Eigenanalysis + url: http://www.nime.org/proceedings/2010/nime2010_026.pdf + year: 2010 + + +- ENTRYTYPE: inproceedings + ID: Chun2010 + abstract: 'The field of mixed-reality interface design is relatively young and in + regards to music, has not been explored in great depth. Using computer vision + and collision detection techniques, Freepad further explores the development of + mixed-reality interfaces for music. The result is an accessible user-definable + MIDI interface for anyone with a webcam, pen and paper, which outputs MIDI notes + with velocity values based on the speed of the strikes on drawn pads. ' + address: 'Sydney, Australia' + author: 'Chun, Sungkuk and Hawryshkewich, Andrew and Jung, Keechul and Pasquier, + Philippe' + bibtex: "@inproceedings{Chun2010,\n abstract = {The field of mixed-reality interface\ + \ design is relatively young and in regards to music, has not been explored in\ + \ great depth. Using computer vision and collision detection techniques, Freepad\ + \ further explores the development of mixed-reality interfaces for music. The\ + \ result is an accessible user-definable MIDI interface for anyone with a webcam,\ + \ pen and paper, which outputs MIDI notes with velocity values based on the speed\ + \ of the strikes on drawn pads. },\n address = {Sydney, Australia},\n author =\ + \ {Chun, Sungkuk and Hawryshkewich, Andrew and Jung, Keechul and Pasquier, Philippe},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1177743},\n issn = {2220-4806},\n\ + \ keywords = {Computer vision, form recognition, collision detection, mixed- reality,\ + \ custom interface, MIDI},\n pages = {31--36},\n title = {Freepad : A Custom Paper-based\ + \ MIDI Interface},\n url = {http://www.nime.org/proceedings/2010/nime2010_031.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179162 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177743 issn: 2220-4806 - month: May - pages: 95--98 - publisher: Louisiana State University - title: Optical Music Recognition for Interactive Score Display - url: http://www.nime.org/proceedings/2015/nime2015_198.pdf - year: 2015 + keywords: 'Computer vision, form recognition, collision detection, mixed- reality, + custom interface, MIDI' + pages: 31--36 + title: 'Freepad : A Custom Paper-based MIDI Interface' + url: http://www.nime.org/proceedings/2010/nime2010_031.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: amomeni2015 - abstract: 'This paper documents the development of Caress, an electroacoustic percussive - instrument that blends drumming and audio synthesis in a small and portable form - factor. Caress is an octophonic miniature drum-set for the fingertips that employs - eight acoustically isolated piezo-microphones, coupled with eight independent - signal chains that excite a unique resonance model with audio from the piezos. - The hardware is designed to be robust and quickly reproducible (parametric design - and machine fabrication), while the software aims to be light-weight (low-CPU - requirements) and portable (multiple platforms, multiple computing architectures). - Above all, the instrument aims for the level of control intimacy and tactile expressivity - achieved by traditional acoustic percussive instruments, while leveraging real-time - software synthesis and control to expand the sonic palette. This instrument as - well as this document are dedicated to the memory of the late David Wessel, pioneering - composer, performer, researcher, mentor and all-around Yoda of electroacoustic - music. ' - address: 'Baton Rouge, Louisiana, USA' - author: Ali Momeni - bibtex: "@inproceedings{amomeni2015,\n abstract = {This paper documents the development\ - \ of Caress, an electroacoustic percussive instrument that blends drumming and\ - \ audio synthesis in a small and portable form factor. Caress is an octophonic\ - \ miniature drum-set for the fingertips that employs eight acoustically isolated\ - \ piezo-microphones, coupled with eight independent signal chains that excite\ - \ a unique resonance model with audio from the piezos. The hardware is designed\ - \ to be robust and quickly reproducible (parametric design and machine fabrication),\ - \ while the software aims to be light-weight (low-CPU requirements) and portable\ - \ (multiple platforms, multiple computing architectures). Above all, the instrument\ - \ aims for the level of control intimacy and tactile expressivity achieved by\ - \ traditional acoustic percussive instruments, while leveraging real-time software\ - \ synthesis and control to expand the sonic palette. This instrument as well as\ - \ this document are dedicated to the memory of the late David Wessel, pioneering\ - \ composer, performer, researcher, mentor and all-around Yoda of electroacoustic\ - \ music. },\n address = {Baton Rouge, Louisiana, USA},\n author = {Ali Momeni},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1179142},\n editor = {Edgar Berdahl\ - \ and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {245--250},\n\ - \ publisher = {Louisiana State University},\n title = {Caress: An Electro-acoustic\ - \ Percussive Instrument for Caressing Sounds},\n url = {http://www.nime.org/proceedings/2015/nime2015_199.pdf},\n\ - \ year = {2015}\n}\n" + ID: Mills2010 + abstract: 'Our team realized that a need existed for a music programming interface + in the Minim audio library of the Processingprogramming environment. The audience + for this new interface would be the novice programmer interested in usingmusic + as part of the learning experience, though the interface should also be complex + enough to benefit experiencedartist-programmers. We collected many ideas from + currently available music programming languages and librariesto design and create + the new capabilities in Minim. Thebasic mechanisms include chained unit generators, + instruments, and notes. In general, one "patches" unit generators(for example, + oscillators, delays, and envelopes) together inorder to create synthesis algorithms. + These algorithms canthen either create continuous sound, or be used in instruments + to play notes with specific start time and duration.We have written a base set + of unit generators to enablea wide variety of synthesis options, and the capabilities + ofthe unit generators, instruments, and Processing allow fora wide range of composition + techniques.' + address: 'Sydney, Australia' + author: 'Mills, John A. and Di Fede, Damien and Brix, Nicolas' + bibtex: "@inproceedings{Mills2010,\n abstract = {Our team realized that a need existed\ + \ for a music programming interface in the Minim audio library of the Processingprogramming\ + \ environment. The audience for this new interface would be the novice programmer\ + \ interested in usingmusic as part of the learning experience, though the interface\ + \ should also be complex enough to benefit experiencedartist-programmers. We collected\ + \ many ideas from currently available music programming languages and librariesto\ + \ design and create the new capabilities in Minim. Thebasic mechanisms include\ + \ chained unit generators, instruments, and notes. In general, one \"patches\"\ + \ unit generators(for example, oscillators, delays, and envelopes) together inorder\ + \ to create synthesis algorithms. These algorithms canthen either create continuous\ + \ sound, or be used in instruments to play notes with specific start time and\ + \ duration.We have written a base set of unit generators to enablea wide variety\ + \ of synthesis options, and the capabilities ofthe unit generators, instruments,\ + \ and Processing allow fora wide range of composition techniques.},\n address\ + \ = {Sydney, Australia},\n author = {Mills, John A. and Di Fede, Damien and Brix,\ + \ Nicolas},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177855},\n issn\ + \ = {2220-4806},\n keywords = {Minim, music programming, audio library, Processing,\ + \ mu- sic software},\n pages = {37--42},\n title = {Music Programming in Minim},\n\ + \ url = {http://www.nime.org/proceedings/2010/nime2010_037.pdf},\n year = {2010}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179142 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177855 issn: 2220-4806 - month: May - pages: 245--250 - publisher: Louisiana State University - title: 'Caress: An Electro-acoustic Percussive Instrument for Caressing Sounds' - url: http://www.nime.org/proceedings/2015/nime2015_199.pdf - year: 2015 + keywords: 'Minim, music programming, audio library, Processing, mu- sic software' + pages: 37--42 + title: Music Programming in Minim + url: http://www.nime.org/proceedings/2010/nime2010_037.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: rdannenbergb2015 - abstract: 'Human-Computer Music Performance for popular music -- where musical structure - is important, but where musicians often decide on the spur of the moment exactly - what the musical form will be -- presents many challenges to make computer systems - that are flexible and adaptable to human musicians. One particular challenge is - that humans easily follow scores and chord charts, adapt these to new performance - plans, and understand media locations in musical terms (beats and measures), while - computer music systems often use rigid and even numerical representations that - are difficult to work with. We present new formalisms and representations, and - a corresponding implementation, where musical material in various media is synchronized, - where musicians can quickly alter the performance order by specifying (re-)arrangements - of the material, and where interfaces are supported in a natural way by music - notation.' - address: 'Baton Rouge, Louisiana, USA' - author: Roger Dannenberg and Andrew Russell - bibtex: "@inproceedings{rdannenbergb2015,\n abstract = {Human-Computer Music Performance\ - \ for popular music -- where musical structure is important, but where musicians\ - \ often decide on the spur of the moment exactly what the musical form will be\ - \ -- presents many challenges to make computer systems that are flexible and adaptable\ - \ to human musicians. One particular challenge is that humans easily follow scores\ - \ and chord charts, adapt these to new performance plans, and understand media\ - \ locations in musical terms (beats and measures), while computer music systems\ - \ often use rigid and even numerical representations that are difficult to work\ - \ with. We present new formalisms and representations, and a corresponding implementation,\ - \ where musical material in various media is synchronized, where musicians can\ - \ quickly alter the performance order by specifying (re-)arrangements of the material,\ - \ and where interfaces are supported in a natural way by music notation.},\n address\ - \ = {Baton Rouge, Louisiana, USA},\n author = {Roger Dannenberg and Andrew Russell},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1179050},\n editor = {Edgar Berdahl\ - \ and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {315--316},\n\ - \ publisher = {Louisiana State University},\n title = {Arrangements: Flexibly\ - \ Adapting Music Data for Live Performance},\n url = {http://www.nime.org/proceedings/2015/nime2015_200.pdf},\n\ - \ urlsuppl1 = {http://www.nime.org/proceedings/2015/200/0200-file1.mp4},\n year\ - \ = {2015}\n}\n" + ID: Magnusson2010 + abstract: 'The analysis of digital music systems has traditionally been characterized + by an approach that can be defined as phenomenological. The focus has been on + the body and its relationship to the machine, often neglecting the system''s conceptual + design. This paper brings into focus the epistemic features of digital systems, + which implies emphasizing the cognitive, conceptual and music theoretical side + of our musical instruments. An epistemic dimension space for the analysis of musical + devices is proposed. ' + address: 'Sydney, Australia' + author: 'Magnusson, Thor' + bibtex: "@inproceedings{Magnusson2010,\n abstract = {The analysis of digital music\ + \ systems has traditionally been characterized by an approach that can be defined\ + \ as phenomenological. The focus has been on the body and its relationship to\ + \ the machine, often neglecting the system's conceptual design. This paper brings\ + \ into focus the epistemic features of digital systems, which implies emphasizing\ + \ the cognitive, conceptual and music theoretical side of our musical instruments.\ + \ An epistemic dimension space for the analysis of musical devices is proposed.\ + \ },\n address = {Sydney, Australia},\n author = {Magnusson, Thor},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1177837},\n issn = {2220-4806},\n keywords\ + \ = {Epistemic tools, music theory, dimension space, analysis.},\n pages = {43--46},\n\ + \ title = {An Epistemic Dimension Space for Musical Devices},\n url = {http://www.nime.org/proceedings/2010/nime2010_043.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179050 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177837 issn: 2220-4806 - month: May - pages: 315--316 - publisher: Louisiana State University - title: 'Arrangements: Flexibly Adapting Music Data for Live Performance' - url: http://www.nime.org/proceedings/2015/nime2015_200.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/200/0200-file1.mp4 - year: 2015 + keywords: 'Epistemic tools, music theory, dimension space, analysis.' + pages: 43--46 + title: An Epistemic Dimension Space for Musical Devices + url: http://www.nime.org/proceedings/2010/nime2010_043.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: amomenib2015 - abstract: 'This paper documents the development of ml.lib: a set of open-source - tools designed for employing a wide range of machine learning techniques within - two popular real-time programming environments, namely Max and Pure Data. ml.lib - is a cross-platform, lightweight wrapper around Nick Gillian''s Gesture Recognition - Toolkit, a C++ library that includes a wide range of data processing and machine - learning techniques. ml.lib adapts these techniques for real-time use within popular - data-flow IDEs, allowing instrument designers and performers to integrate robust - learning, classification and mapping approaches within their existing workflows. - ml.lib has been carefully de-signed to allow users to experiment with and incorporate - ma-chine learning techniques within an interactive arts context with minimal prior - knowledge. A simple, logical and consistent, scalable interface has been provided - across over sixteen exter-nals in order to maximize learnability and discoverability. - A focus on portability and maintainability has enabled ml.lib to support a range - of computing architectures---including ARM---and operating systems such as Mac - OS, GNU/Linux and Win-dows, making it the most comprehensive machine learning - implementation available for Max and Pure Data.' - address: 'Baton Rouge, Louisiana, USA' - author: Jamie Bullock and Ali Momeni - bibtex: "@inproceedings{amomenib2015,\n abstract = {This paper documents the development\ - \ of ml.lib: a set of open-source tools designed for employing a wide range of\ - \ machine learning techniques within two popular real-time programming environments,\ - \ namely Max and Pure Data. ml.lib is a cross-platform, lightweight wrapper around\ - \ Nick Gillian's Gesture Recognition Toolkit, a C++ library that includes a wide\ - \ range of data processing and machine learning techniques. ml.lib adapts these\ - \ techniques for real-time use within popular data-flow IDEs, allowing instrument\ - \ designers and performers to integrate robust learning, classification and mapping\ - \ approaches within their existing workflows. ml.lib has been carefully de-signed\ - \ to allow users to experiment with and incorporate ma-chine learning techniques\ - \ within an interactive arts context with minimal prior knowledge. A simple, logical\ - \ and consistent, scalable interface has been provided across over sixteen exter-nals\ - \ in order to maximize learnability and discoverability. A focus on portability\ - \ and maintainability has enabled ml.lib to support a range of computing architectures---including\ - \ ARM---and operating systems such as Mac OS, GNU/Linux and Win-dows, making it\ - \ the most comprehensive machine learning implementation available for Max and\ - \ Pure Data.},\n address = {Baton Rouge, Louisiana, USA},\n author = {Jamie Bullock\ - \ and Ali Momeni},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179038},\n\ - \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ - \ {May},\n pages = {265--270},\n publisher = {Louisiana State University},\n title\ - \ = {ml.lib: Robust, Cross-platform, Open-source Machine Learning for Max and\ - \ Pure Data},\n url = {http://www.nime.org/proceedings/2015/nime2015_201.pdf},\n\ - \ year = {2015}\n}\n" + ID: Kocaballi2010 + abstract: 'Human agency, our capacity for action, has been at the hub of discussions + centring upon philosophical enquiry for a long period of time. Sensory supplementation + devices can provide us with unique opportunities to investigate the different + aspects of our agency by enabling new modes of perception and facilitating the + emergence of novel interactions, all of which is impossible without the aforesaid + devices. Our preliminary study investigates the non-verbal strategies employed + for negotiation of our capacity for action with other bodies and the surrounding + space through body-to-body and body-to-space couplings enabled by sensory supplementation + devices. We employed a lowfi rapid prototyping approach to build this device, + enabling distal perception by sonic and haptic feedback. Further, we conducted + a workshop in which participants equipped with this device engaged in game-like + activities. ' + address: 'Sydney, Australia' + author: 'Kocaballi, A. Baki and Gemeinboeck, Petra and Saunders, Rob' + bibtex: "@inproceedings{Kocaballi2010,\n abstract = {Human agency, our capacity\ + \ for action, has been at the hub of discussions centring upon philosophical enquiry\ + \ for a long period of time. Sensory supplementation devices can provide us with\ + \ unique opportunities to investigate the different aspects of our agency by enabling\ + \ new modes of perception and facilitating the emergence of novel interactions,\ + \ all of which is impossible without the aforesaid devices. Our preliminary study\ + \ investigates the non-verbal strategies employed for negotiation of our capacity\ + \ for action with other bodies and the surrounding space through body-to-body\ + \ and body-to-space couplings enabled by sensory supplementation devices. We employed\ + \ a lowfi rapid prototyping approach to build this device, enabling distal perception\ + \ by sonic and haptic feedback. Further, we conducted a workshop in which participants\ + \ equipped with this device engaged in game-like activities. },\n address = {Sydney,\ + \ Australia},\n author = {Kocaballi, A. Baki and Gemeinboeck, Petra and Saunders,\ + \ Rob},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1177829},\n issn = {2220-4806},\n\ + \ keywords = {Human agency, sensory supplementation, distal perception, sonic\ + \ feedback, tactile feedback, enactive interfaces},\n pages = {47--50},\n title\ + \ = {Investigating the Potential for Shared Agency using Enactive Interfaces},\n\ + \ url = {http://www.nime.org/proceedings/2010/nime2010_047.pdf},\n year = {2010}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179038 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177829 issn: 2220-4806 - month: May - pages: 265--270 - publisher: Louisiana State University - title: 'ml.lib: Robust, Cross-platform, Open-source Machine Learning for Max and - Pure Data' - url: http://www.nime.org/proceedings/2015/nime2015_201.pdf - year: 2015 + keywords: 'Human agency, sensory supplementation, distal perception, sonic feedback, + tactile feedback, enactive interfaces' + pages: 47--50 + title: Investigating the Potential for Shared Agency using Enactive Interfaces + url: http://www.nime.org/proceedings/2010/nime2010_047.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: rdannenbergc2015 - abstract: 'Computer music systems can interact with humans at different levels, - including scores, phrases, notes, beats, and gestures. However, most current systems - lack basic musicianship skills. As a consequence, the results of human-computer - interaction are often far less musical than the interaction between human musicians. - In this paper, we explore the possibility of learning some basic music performance - skills from rehearsal data. In particular, we consider the piano duet scenario - where two musicians expressively interact with each other. Our work extends previous - automatic accompaniment systems. We have built an artificial pianist that can - automatically improve its ability to sense and coordinate with a human pianist, - learning from rehearsal experience. We describe different machine learning algorithms - to learn musicianship for duet interaction, explore the properties of the learned - models, such as dominant features, limits of validity, and minimal training size, - and claim that a more human-like interaction is achieved.' - address: 'Baton Rouge, Louisiana, USA' - author: Guangyu Xia and Roger Dannenberg - bibtex: "@inproceedings{rdannenbergc2015,\n abstract = {Computer music systems can\ - \ interact with humans at different levels, including scores, phrases, notes,\ - \ beats, and gestures. However, most current systems lack basic musicianship skills.\ - \ As a consequence, the results of human-computer interaction are often far less\ - \ musical than the interaction between human musicians. In this paper, we explore\ - \ the possibility of learning some basic music performance skills from rehearsal\ - \ data. In particular, we consider the piano duet scenario where two musicians\ - \ expressively interact with each other. Our work extends previous automatic accompaniment\ - \ systems. We have built an artificial pianist that can automatically improve\ - \ its ability to sense and coordinate with a human pianist, learning from rehearsal\ - \ experience. We describe different machine learning algorithms to learn musicianship\ - \ for duet interaction, explore the properties of the learned models, such as\ - \ dominant features, limits of validity, and minimal training size, and claim\ - \ that a more human-like interaction is achieved.},\n address = {Baton Rouge,\ - \ Louisiana, USA},\n author = {Guangyu Xia and Roger Dannenberg},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1179198},\n editor = {Edgar Berdahl and\ - \ Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {259--264},\n\ - \ publisher = {Louisiana State University},\n title = {Duet Interaction: Learning\ - \ Musicianship for Automatic Accompaniment},\n url = {http://www.nime.org/proceedings/2015/nime2015_202.pdf},\n\ - \ year = {2015}\n}\n" + ID: Liebman2010 + abstract: 'We present Cuebert, a mixing board concept for musical theatre. Using + a user-centered design process, our goal was to reconceptualize the mixer using + modern technology and interaction techniques, questioning over fifty years of + interface design in audio technology. Our research resulted in a design that retains + the physical controls — faders and knobs — demanded by sound engineers while taking + advantage of multitouch display technology to allow for flexible display of dynamic + and context-sensitive content.' + address: 'Sydney, Australia' + author: 'Liebman, Noah and Nagara, Michael and Spiewla, Jacek and Zolkosky, Erin' + bibtex: "@inproceedings{Liebman2010,\n abstract = {We present Cuebert, a mixing\ + \ board concept for musical theatre. Using a user-centered design process, our\ + \ goal was to reconceptualize the mixer using modern technology and interaction\ + \ techniques, questioning over fifty years of interface design in audio technology.\ + \ Our research resulted in a design that retains the physical controls — faders\ + \ and knobs — demanded by sound engineers while taking advantage of multitouch\ + \ display technology to allow for flexible display of dynamic and context-sensitive\ + \ content.},\n address = {Sydney, Australia},\n author = {Liebman, Noah and Nagara,\ + \ Michael and Spiewla, Jacek and Zolkosky, Erin},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1177833},\n issn = {2220-4806},\n keywords = {audio, control\ + \ surfaces, mixing board, multitouch, sound, theatre, touch-screen, user-centered\ + \ design},\n pages = {51--56},\n title = {Cuebert : A New Mixing Board Concept\ + \ for Musical Theatre},\n url = {http://www.nime.org/proceedings/2010/nime2010_051.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179198 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177833 issn: 2220-4806 - month: May - pages: 259--264 - publisher: Louisiana State University - title: 'Duet Interaction: Learning Musicianship for Automatic Accompaniment' - url: http://www.nime.org/proceedings/2015/nime2015_202.pdf - year: 2015 + keywords: 'audio, control surfaces, mixing board, multitouch, sound, theatre, touch-screen, + user-centered design' + pages: 51--56 + title: 'Cuebert : A New Mixing Board Concept for Musical Theatre' + url: http://www.nime.org/proceedings/2010/nime2010_051.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: jleonard2015 - abstract: 'This paper discusses how haptic devices and physical modelling can be - employed to design and simulate multisensory virtual musical instruments, providing - the musician with joint audio, visual and haptic feedback. After briefly reviewing - some of the main use-cases of haptics in Computer Music, we present GENESIS-RT, - a software and hardware platform dedicated to the design and real-time haptic - playing of virtual musical instruments using mass-interaction physical modelling. - We discuss our approach and report on advancements in modelling various instrument - categories instruments, including physical models of percussion, plucked and bowed - instruments. Finally, we comment on the constraints, challenges and new possibilities - opened by modelling haptic virtual instruments with our platform, and discuss - common points and differences in regards to classical Digital Musical Instruments. ' - address: 'Baton Rouge, Louisiana, USA' - author: James Leonard and Claude Cadoz - bibtex: "@inproceedings{jleonard2015,\n abstract = {This paper discusses how haptic\ - \ devices and physical modelling can be employed to design and simulate multisensory\ - \ virtual musical instruments, providing the musician with joint audio, visual\ - \ and haptic feedback. After briefly reviewing some of the main use-cases of haptics\ - \ in Computer Music, we present GENESIS-RT, a software and hardware platform dedicated\ - \ to the design and real-time haptic playing of virtual musical instruments using\ - \ mass-interaction physical modelling. We discuss our approach and report on advancements\ - \ in modelling various instrument categories instruments, including physical models\ - \ of percussion, plucked and bowed instruments. Finally, we comment on the constraints,\ - \ challenges and new possibilities opened by modelling haptic virtual instruments\ - \ with our platform, and discuss common points and differences in regards to classical\ - \ Digital Musical Instruments. },\n address = {Baton Rouge, Louisiana, USA},\n\ - \ author = {James Leonard and Claude Cadoz},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1179116},\n editor = {Edgar Berdahl and Jesse Allison},\n issn\ - \ = {2220-4806},\n month = {May},\n pages = {150--155},\n publisher = {Louisiana\ - \ State University},\n title = {Physical Modelling Concepts for a Collection of\ - \ Multisensory Virtual Musical Instruments},\n url = {http://www.nime.org/proceedings/2015/nime2015_203.pdf},\n\ - \ year = {2015}\n}\n" + ID: Roberts2010 + abstract: 'We present the Device Server, a framework and application driving interaction + in the AlloSphere virtual reality environment. The motivation and development + of the Device Server stems from the practical concerns of managing multi-user + interactivity with a variety of physical devices for disparate performance and + virtual reality environments housed in the same physical location. The interface + of the Device Server allows users to see how devices are assigned to application + functionalities, alter these assignments and save them into configuration files + for later use. Configurations defining how applications use devices can be changed + on the fly without recompiling or relaunching applications. Multiple applications + can be connected to the Device Server concurrently. The Device Server provides + several conveniences for performance environments. It can process control data + efficiently using Just-In-Time compiled Lua expressions; in doing so it frees + processing cycles on audio and video rendering computers. All control signals + entering the Device Server can be recorded, saved, and played back allowing performances + based on control data to be recreated in their entirety. The Device Server attempts + to homogenize the appearance of different control signals to applications so that + users can assign any interface element they choose to application functionalities + and easily experiment with different control configurations.' + address: 'Sydney, Australia' + author: 'Roberts, Charles and Wright, Matthew and Kuchera-Morin, JoAnn and Putnam, + Lance' + bibtex: "@inproceedings{Roberts2010,\n abstract = {We present the Device Server,\ + \ a framework and application driving interaction in the AlloSphere virtual reality\ + \ environment. The motivation and development of the Device Server stems from\ + \ the practical concerns of managing multi-user interactivity with a variety of\ + \ physical devices for disparate performance and virtual reality environments\ + \ housed in the same physical location. The interface of the Device Server allows\ + \ users to see how devices are assigned to application functionalities, alter\ + \ these assignments and save them into configuration files for later use. Configurations\ + \ defining how applications use devices can be changed on the fly without recompiling\ + \ or relaunching applications. Multiple applications can be connected to the Device\ + \ Server concurrently. The Device Server provides several conveniences for performance\ + \ environments. It can process control data efficiently using Just-In-Time compiled\ + \ Lua expressions; in doing so it frees processing cycles on audio and video rendering\ + \ computers. All control signals entering the Device Server can be recorded, saved,\ + \ and played back allowing performances based on control data to be recreated\ + \ in their entirety. The Device Server attempts to homogenize the appearance of\ + \ different control signals to applications so that users can assign any interface\ + \ element they choose to application functionalities and easily experiment with\ + \ different control configurations.},\n address = {Sydney, Australia},\n author\ + \ = {Roberts, Charles and Wright, Matthew and Kuchera-Morin, JoAnn and Putnam,\ + \ Lance},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1177883},\n issn = {2220-4806},\n\ + \ keywords = {AlloSphere, mapping, performance, HCI, interactivity, Virtual Reality,\ + \ OSC, multi-user, network},\n pages = {57--62},\n title = {Dynamic Interactivity\ + \ Inside the AlloSphere},\n url = {http://www.nime.org/proceedings/2010/nime2010_057.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179116 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177883 issn: 2220-4806 - month: May - pages: 150--155 - publisher: Louisiana State University - title: Physical Modelling Concepts for a Collection of Multisensory Virtual Musical - Instruments - url: http://www.nime.org/proceedings/2015/nime2015_203.pdf - year: 2015 + keywords: 'AlloSphere, mapping, performance, HCI, interactivity, Virtual Reality, + OSC, multi-user, network' + pages: 57--62 + title: Dynamic Interactivity Inside the AlloSphere + url: http://www.nime.org/proceedings/2010/nime2010_057.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: jvilleneuve2015 - abstract: 'The motivation of this paper is to highlight the importance of visual - representations for artists when modeling and simulating mass-interaction physical - networks in the context of sound synthesis and musical composition. GENESIS is - a musician-oriented software environment for sound synthesis and musical composition. - However, despite this orientation, a substantial amount of effort has been put - into building a rich variety of tools based on static or dynamic visual representations - of models and of abstractions of their properties. After a quick survey of these - tools, we will illustrate the significant role they play in the creative process - involved when going from a musical idea and exploration to the production of a - complete musical piece. To that aim, our analysis will rely on the work and practice - of several artists having used GENESIS in various ways.' - address: 'Baton Rouge, Louisiana, USA' - author: Jérôme Villeneuve and Claude Cadoz and Nicolas Castagné - bibtex: "@inproceedings{jvilleneuve2015,\n abstract = {The motivation of this paper\ - \ is to highlight the importance of visual representations for artists when modeling\ - \ and simulating mass-interaction physical networks in the context of sound synthesis\ - \ and musical composition. GENESIS is a musician-oriented software environment\ - \ for sound synthesis and musical composition. However, despite this orientation,\ - \ a substantial amount of effort has been put into building a rich variety of\ - \ tools based on static or dynamic visual representations of models and of abstractions\ - \ of their properties. After a quick survey of these tools, we will illustrate\ - \ the significant role they play in the creative process involved when going from\ - \ a musical idea and exploration to the production of a complete musical piece.\ - \ To that aim, our analysis will rely on the work and practice of several artists\ - \ having used GENESIS in various ways.},\n address = {Baton Rouge, Louisiana,\ - \ USA},\n author = {J\\'er\\^ome Villeneuve and Claude Cadoz and Nicolas Castagn\\\ - 'e},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1179190},\n editor = {Edgar\ - \ Berdahl and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages =\ - \ {195--200},\n publisher = {Louisiana State University},\n title = {Visual Representation\ - \ in GENESIS as a tool for Physical Modeling, Sound Synthesis and Musical Composition},\n\ - \ url = {http://www.nime.org/proceedings/2015/nime2015_204.pdf},\n urlsuppl1 =\ - \ {http://www.nime.org/proceedings/2015/204/0204-file1.mov},\n year = {2015}\n\ + ID: Alt2010 + abstract: 'Writing text messages (e.g. email, SMS, instant messaging) is a popular + form of synchronous and asynchronous communication. However, when it comes to + notifying users about new messages, current audio-based approaches, such as notification + tones, are very limited in conveying information. In this paper we show how entire + text messages can be encoded into a meaningful and euphonic melody in such a way + that users can guess a message’s intention without actually seeing the content. + First, as a proof of concept, we report on the findings of an initial on-line + survey among 37 musicians and 32 non-musicians evaluating the feasibility and + validity of our approach. We show that our representation is understandable and + that there are no significant differences between musicians and non-musicians. + Second, we evaluated the approach in a real world scenario based on a Skype plug-in. + In a field study with 14 participants we showed that sonified text messages strongly + impact on the users’ message checking behavior by significantly reducing the time + between receiving and reading an incoming message.' + address: 'Sydney, Australia' + author: 'Alt, Florian and Shirazi, Alireza S. and Legien, Stefan and Schmidt, Albrecht + and Mennenöh, Julian' + bibtex: "@inproceedings{Alt2010,\n abstract = {Writing text messages (e.g. email,\ + \ SMS, instant messaging) is a popular form of synchronous and asynchronous communication.\ + \ However, when it comes to notifying users about new messages, current audio-based\ + \ approaches, such as notification tones, are very limited in conveying information.\ + \ In this paper we show how entire text messages can be encoded into a meaningful\ + \ and euphonic melody in such a way that users can guess a message’s intention\ + \ without actually seeing the content. First, as a proof of concept, we report\ + \ on the findings of an initial on-line survey among 37 musicians and 32 non-musicians\ + \ evaluating the feasibility and validity of our approach. We show that our representation\ + \ is understandable and that there are no significant differences between musicians\ + \ and non-musicians. Second, we evaluated the approach in a real world scenario\ + \ based on a Skype plug-in. In a field study with 14 participants we showed that\ + \ sonified text messages strongly impact on the users’ message checking behavior\ + \ by significantly reducing the time between receiving and reading an incoming\ + \ message.},\n address = {Sydney, Australia},\n author = {Alt, Florian and Shirazi,\ + \ Alireza S. and Legien, Stefan and Schmidt, Albrecht and Mennen\\''{o}h, Julian},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1177713},\n issn = {2220-4806},\n\ + \ keywords = {instant messaging, sms, sonority, text sonification},\n pages =\ + \ {63--68},\n title = {Creating Meaningful Melodies from Text Messages},\n url\ + \ = {http://www.nime.org/proceedings/2010/nime2010_063.pdf},\n year = {2010}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179190 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177713 issn: 2220-4806 - month: May - pages: 195--200 - publisher: Louisiana State University - title: 'Visual Representation in GENESIS as a tool for Physical Modeling, Sound - Synthesis and Musical Composition' - url: http://www.nime.org/proceedings/2015/nime2015_204.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/204/0204-file1.mov - year: 2015 + keywords: 'instant messaging, sms, sonority, text sonification' + pages: 63--68 + title: Creating Meaningful Melodies from Text Messages + url: http://www.nime.org/proceedings/2010/nime2010_063.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: fcalegario2015 - abstract: 'Regarding the design of new DMIs, it is possible to fit the majority - of projects into two main cases: those developed by the academic research centers, - which focus on North American and European contemporary classical and experimental - music; and the DIY projects, in which the luthier also plays the roles of performer - and/or composer. In both cases, the design process is not focused on creating - DMIs for a community with a particular culture --- with established instruments, - repertoire and playing styles --- outside European and North American traditions. - This challenge motivated our research. In this paper, we discuss lessons learned - during an one-year project called Batebit. Our approach was based on Design Thinking - methodology, comprising cycles of inspiration, ideation and implementation. It - resulted in two new DMIs developed collaboratively with musicians from the Brazilian - Northeast.' - address: 'Baton Rouge, Louisiana, USA' - author: Jerônimo Barbosa and Filipe Calegario and João Tragtenberg and Giordano - Cabral and Geber Ramalho and Marcelo M. Wanderley - bibtex: "@inproceedings{fcalegario2015,\n abstract = {Regarding the design of new\ - \ DMIs, it is possible to fit the majority of projects into two main cases: those\ - \ developed by the academic research centers, which focus on North American and\ - \ European contemporary classical and experimental music; and the DIY projects,\ - \ in which the luthier also plays the roles of performer and/or composer. In both\ - \ cases, the design process is not focused on creating DMIs for a community with\ - \ a particular culture --- with established instruments, repertoire and playing\ - \ styles --- outside European and North American traditions. This challenge motivated\ - \ our research. In this paper, we discuss lessons learned during an one-year project\ - \ called Batebit. Our approach was based on Design Thinking methodology, comprising\ - \ cycles of inspiration, ideation and implementation. It resulted in two new DMIs\ - \ developed collaboratively with musicians from the Brazilian Northeast.},\n address\ - \ = {Baton Rouge, Louisiana, USA},\n author = {Jer\\^onimo Barbosa and Filipe\ - \ Calegario and Jo\\~ao Tragtenberg and Giordano Cabral and Geber Ramalho and\ - \ {Marcelo M.} Wanderley},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179008},\n\ - \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ - \ {May},\n pages = {277--280},\n publisher = {Louisiana State University},\n title\ - \ = {Designing {DMI}s for Popular Music in the {Brazil}ian Northeast: Lessons\ - \ Learned},\n url = {http://www.nime.org/proceedings/2015/nime2015_207.pdf},\n\ - \ year = {2015}\n}\n" + ID: Humphrey2010 + abstract: 'This paper articulates an interest in a kind of interactive musical instrument + and artwork that defines the mechanisms for instrumental interactivity from the + iconic morphologies of ready-mades, casting historical utilitarian objects as + the basis for performed musical experiences by spectators. The interactive repertoires + are therefore partially pre-determined through enculturated behaviors that are + associated with particular objects, but more importantly, inextricably linked + to the thematic and meaningful assemblage of the work itself. Our new work epi-thet + gathers data from individual interactions with common microscopes placed on platforms + within a large space. This data is correlated with public domain genetic datasets + obtained from micro-array analysis. A sonification algorithm generates unique + compositions associated with the spectator "as measured" through their individual + specification in performing an iconic measurement action. The apparatus is a receptacle + for unique compositions in sound, and invites a participatory choreography of + stillness that is available for reception as a live musical performance. ' + address: 'Sydney, Australia' + author: 'Humphrey, Tim and Flynn, Madeleine and Stevens, Jesse' + bibtex: "@inproceedings{Humphrey2010,\n abstract = {This paper articulates an interest\ + \ in a kind of interactive musical instrument and artwork that defines the mechanisms\ + \ for instrumental interactivity from the iconic morphologies of {ready-mades},\ + \ casting historical utilitarian objects as the basis for performed musical experiences\ + \ by spectators. The interactive repertoires are therefore partially pre-determined\ + \ through enculturated behaviors that are associated with particular objects,\ + \ but more importantly, inextricably linked to the thematic and meaningful assemblage\ + \ of the work itself. Our new work epi-thet gathers data from individual interactions\ + \ with common microscopes placed on platforms within a large space. This data\ + \ is correlated with public domain genetic datasets obtained from micro-array\ + \ analysis. A sonification algorithm generates unique compositions associated\ + \ with the spectator \"as measured\" through their individual specification in\ + \ performing an iconic measurement action. The apparatus is a receptacle for unique\ + \ compositions in sound, and invites a participatory choreography of stillness\ + \ that is available for reception as a live musical performance. },\n address\ + \ = {Sydney, Australia},\n author = {Humphrey, Tim and Flynn, Madeleine and Stevens,\ + \ Jesse},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1177811},\n issn = {2220-4806},\n\ + \ keywords = {Sonification installation spectator-choreography micro-array ready-mades\ + \ morphology stillness},\n pages = {69--71},\n title = {Epi-thet : A Musical Performance\ + \ Installation and a Choreography of Stillness},\n url = {http://www.nime.org/proceedings/2010/nime2010_069.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179008 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177811 issn: 2220-4806 - month: May - pages: 277--280 - publisher: Louisiana State University - title: 'Designing DMIs for Popular Music in the Brazilian Northeast: Lessons Learned' - url: http://www.nime.org/proceedings/2015/nime2015_207.pdf - year: 2015 + keywords: Sonification installation spectator-choreography micro-array ready-mades + morphology stillness + pages: 69--71 + title: 'Epi-thet : A Musical Performance Installation and a Choreography of Stillness' + url: http://www.nime.org/proceedings/2010/nime2010_069.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: dmenzies2015 - abstract: 'This work uses a custom-built digital bagpipe chanter interface to assist - in the process of learning the Great Highland Bagpipe (GHB). In this paper, a - new algorithm is presented for the automatic recognition and evaluation of the - various ornamentation techniques that are a central aspect of traditional Highland - bagpipe music. The algorithm is evaluated alongside a previously published approach, - and is shown to provide a significant improvement in performance. The ornament - detection facility forms part of a complete hardware and software system for use - in both tuition and solo practice situations, allowing details of ornamentation - errors made by the player to be provided as visual and textual feedback. The system - also incorporates new functionality for the identification and description of - GHB fingering errors.' - address: 'Baton Rouge, Louisiana, USA' - author: Duncan Menzies and Andrew McPherson - bibtex: "@inproceedings{dmenzies2015,\n abstract = {This work uses a custom-built\ - \ digital bagpipe chanter interface to assist in the process of learning the Great\ - \ Highland Bagpipe (GHB). In this paper, a new algorithm is presented for the\ - \ automatic recognition and evaluation of the various ornamentation techniques\ - \ that are a central aspect of traditional Highland bagpipe music. The algorithm\ - \ is evaluated alongside a previously published approach, and is shown to provide\ - \ a significant improvement in performance. The ornament detection facility forms\ - \ part of a complete hardware and software system for use in both tuition and\ - \ solo practice situations, allowing details of ornamentation errors made by the\ - \ player to be provided as visual and textual feedback. The system also incorporates\ - \ new functionality for the identification and description of GHB fingering errors.},\n\ - \ address = {Baton Rouge, Louisiana, USA},\n author = {Duncan Menzies and Andrew\ - \ McPherson},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179136},\n editor\ - \ = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n\ - \ pages = {50--53},\n publisher = {Louisiana State University},\n title = {Highland\ - \ Piping Ornament Recognition Using Dynamic Time Warping},\n url = {http://www.nime.org/proceedings/2015/nime2015_208.pdf},\n\ - \ year = {2015}\n}\n" + ID: Hahnel2010 + abstract: 'The propriety of articulation, especially of notes that lackannotations, + is influenced by the origin of the particularmusic. This paper presents a rule + system for articulationderived from late Baroque and early Classic treatises on + performance. Expressive articulation, in this respect, is understood as a combination + of alterable tone features like duration, loudness, and timbre. The model differentiates + globalcharacteristics and local particularities, provides a generalframework for + human-like music performances, and, therefore, serves as a basis for further and + more complex rulesystems.' + address: 'Sydney, Australia' + author: 'Hähnel, Tilo' + bibtex: "@inproceedings{Hahnel2010,\n abstract = {The propriety of articulation,\ + \ especially of notes that lackannotations, is influenced by the origin of the\ + \ particularmusic. This paper presents a rule system for articulationderived from\ + \ late Baroque and early Classic treatises on performance. Expressive articulation,\ + \ in this respect, is understood as a combination of alterable tone features like\ + \ duration, loudness, and timbre. The model differentiates globalcharacteristics\ + \ and local particularities, provides a generalframework for human-like music\ + \ performances, and, therefore, serves as a basis for further and more complex\ + \ rulesystems.},\n address = {Sydney, Australia},\n author = {H\\''{a}hnel, Tilo},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1177787},\n issn = {2220-4806},\n\ + \ keywords = {Articulation, Historically Informed Performance, Expres- sive Performance,\ + \ Synthetic Performance},\n pages = {72--75},\n title = {From Mozart to {MIDI}\ + \ : A Rule System for Expressive Articulation},\n url = {http://www.nime.org/proceedings/2010/nime2010_072.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179136 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177787 issn: 2220-4806 - month: May - pages: 50--53 - publisher: Louisiana State University - title: Highland Piping Ornament Recognition Using Dynamic Time Warping - url: http://www.nime.org/proceedings/2015/nime2015_208.pdf - year: 2015 + keywords: 'Articulation, Historically Informed Performance, Expres- sive Performance, + Synthetic Performance' + pages: 72--75 + title: 'From Mozart to MIDI : A Rule System for Expressive Articulation' + url: http://www.nime.org/proceedings/2010/nime2010_072.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: aflo2015 - abstract: 'This paper presents the sound art installation Doppelgänger. In Doppelgänger, - we combine an artistic concept on a large scale with a high degree of control - over timbre and dynamics. This puts great demands on the technical aspects of - the work. The installation consists of seven 3.5 meters-tall objects weighing - a total of 1500 kilos. Doppelgänger transfers one soundscape into another using - audio analysis, mapping, and computer-controlled acoustic sound objects. The technical - realization is based on hammer mechanisms actuated by powerful solenoids, driven - by a network of Arduino boards with high power PWM outputs, and a Max-patch running - audio analysis and mapping. We look into the special requirements in mechanics - for large-scale projects. Great care has been taken in the technical design to - ensure that the resulting work is scalable both in numbers of elements and in - physical dimensions. This makes our findings easily applicable to other projects - of a similar nature.' - address: 'Baton Rouge, Louisiana, USA' - author: Asbjørn Blokkum Flø and Hans Wilmers - bibtex: "@inproceedings{aflo2015,\n abstract = {This paper presents the sound art\ - \ installation Doppelg{\\''a}nger. In Doppelg{\\''a}nger, we combine an artistic\ - \ concept on a large scale with a high degree of control over timbre and dynamics.\ - \ This puts great demands on the technical aspects of the work. The installation\ - \ consists of seven 3.5 meters-tall objects weighing a total of 1500 kilos. Doppelg{\\\ - ''a}nger transfers one soundscape into another using audio analysis, mapping,\ - \ and computer-controlled acoustic sound objects. The technical realization is\ - \ based on hammer mechanisms actuated by powerful solenoids, driven by a network\ - \ of Arduino boards with high power PWM outputs, and a Max-patch running audio\ - \ analysis and mapping. We look into the special requirements in mechanics for\ - \ large-scale projects. Great care has been taken in the technical design to ensure\ - \ that the resulting work is scalable both in numbers of elements and in physical\ - \ dimensions. This makes our findings easily applicable to other projects of a\ - \ similar nature.},\n address = {Baton Rouge, Louisiana, USA},\n author = {{Asbj{\\\ - o}rn Blokkum} Fl\\o and Hans Wilmers},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179060},\n\ - \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ - \ {May},\n pages = {61--64},\n publisher = {Louisiana State University},\n title\ - \ = {Doppelg{\\''a}nger: A solenoid-based large scale sound installation.},\n\ - \ url = {http://www.nime.org/proceedings/2015/nime2015_212.pdf},\n urlsuppl1 =\ - \ {http://www.nime.org/proceedings/2015/212/0212-file1.mp4},\n year = {2015}\n\ + ID: Essl2010 + abstract: 'We discuss how the environment urMus was designed to allow creation of + mobile musical instruments on multi-touch smartphones. The design of a mobile + musical instrument consists of connecting sensory capabilities to output modalities + through various means of processing. We describe how the default mapping interface + was designed which allows to set up such a pipeline and how visual and interactive + multi-touch UIs for musical instruments can be designed within the system. ' + address: 'Sydney, Australia' + author: 'Essl, Georg and Müller, Alexander' + bibtex: "@inproceedings{Essl2010,\n abstract = {We discuss how the environment urMus\ + \ was designed to allow creation of mobile musical instruments on multi-touch\ + \ smartphones. The design of a mobile musical instrument consists of connecting\ + \ sensory capabilities to output modalities through various means of processing.\ + \ We describe how the default mapping interface was designed which allows to set\ + \ up such a pipeline and how visual and interactive multi-touch UIs for musical\ + \ instruments can be designed within the system. },\n address = {Sydney, Australia},\n\ + \ author = {Essl, Georg and M\\''{u}ller, Alexander},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177759},\n issn = {2220-4806},\n keywords = {Mobile music\ + \ making, meta-environment, design, mapping, user interface},\n pages = {76--81},\n\ + \ title = {Designing Mobile Musical Instruments and Environments with urMus},\n\ + \ url = {http://www.nime.org/proceedings/2010/nime2010_076.pdf},\n year = {2010}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179060 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177759 issn: 2220-4806 - month: May - pages: 61--64 - publisher: Louisiana State University - title: 'Doppelgänger: A solenoid-based large scale sound installation.' - url: http://www.nime.org/proceedings/2015/nime2015_212.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/212/0212-file1.mp4 - year: 2015 + keywords: 'Mobile music making, meta-environment, design, mapping, user interface' + pages: 76--81 + title: Designing Mobile Musical Instruments and Environments with urMus + url: http://www.nime.org/proceedings/2010/nime2010_076.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: ahazzard2015 - abstract: 'Locative music experiences are often non-linear and as such they are - co-created, as the final arrangement of the music heard is guided by the movements - of the user. We note an absence of principles and guidelines regarding how composers - should approach the structuring of such locative soundtracks. For instance, how - does one compose for a non-linear, indeterminate experience using linear pre-composed - placed sounds, where fixed musical time is situated into the indeterminate time - of the user''s experience? Furthermore, how does one create a soundtrack that - is suitable for the location, but also functions as a coherent musical structure? - We explore these questions by analyzing an existing `placed sound'' work from - a traditional music theory perspective and in doing so reveal some structural - principals from `fixed'' musical forms can also support the composition of contemporary - locative music experiences.' - address: 'Baton Rouge, Louisiana, USA' - author: Adrian Hazzard and Steve Benford and Alan Chamberlain and Chris Greenhalgh - bibtex: "@inproceedings{ahazzard2015,\n abstract = {Locative music experiences are\ - \ often non-linear and as such they are co-created, as the final arrangement of\ - \ the music heard is guided by the movements of the user. We note an absence of\ - \ principles and guidelines regarding how composers should approach the structuring\ - \ of such locative soundtracks. For instance, how does one compose for a non-linear,\ - \ indeterminate experience using linear pre-composed placed sounds, where fixed\ - \ musical time is situated into the indeterminate time of the user's experience?\ - \ Furthermore, how does one create a soundtrack that is suitable for the location,\ - \ but also functions as a coherent musical structure? We explore these questions\ - \ by analyzing an existing `placed sound' work from a traditional music theory\ - \ perspective and in doing so reveal some structural principals from `fixed' musical\ - \ forms can also support the composition of contemporary locative music experiences.},\n\ - \ address = {Baton Rouge, Louisiana, USA},\n author = {Adrian Hazzard and Steve\ - \ Benford and Alan Chamberlain and Chris Greenhalgh},\n booktitle = {Proceedings\ + ID: Oh2010 + abstract: 'In this paper, we describe the development of the Stanford Mobile Phone + Orchestra (MoPhO) since its inceptionin 2007. As a newly structured ensemble of + musicians withiPhones and wearable speakers, MoPhO takes advantageof the ubiquity + and mobility of smartphones as well asthe unique interaction techniques offered + by such devices.MoPhO offers a new platform for research, instrument design, composition, + and performance that can be juxtaposedto that of a laptop orchestra. We trace + the origins of MoPhO,describe the motivations behind the current hardware andsoftware + design in relation to the backdrop of current trendsin mobile music making, detail + key interaction conceptsaround new repertoire, and conclude with an analysis onthe + development of MoPhO thus far.' + address: 'Sydney, Australia' + author: 'Oh, Jieun and Herrera, Jorge and Bryan, Nicholas J. and Dahl, Luke and + Wang, Ge' + bibtex: "@inproceedings{Oh2010,\n abstract = {In this paper, we describe the development\ + \ of the Stanford Mobile Phone Orchestra (MoPhO) since its inceptionin 2007. As\ + \ a newly structured ensemble of musicians withiPhones and wearable speakers,\ + \ MoPhO takes advantageof the ubiquity and mobility of smartphones as well asthe\ + \ unique interaction techniques offered by such devices.MoPhO offers a new platform\ + \ for research, instrument design, composition, and performance that can be juxtaposedto\ + \ that of a laptop orchestra. We trace the origins of MoPhO,describe the motivations\ + \ behind the current hardware andsoftware design in relation to the backdrop of\ + \ current trendsin mobile music making, detail key interaction conceptsaround\ + \ new repertoire, and conclude with an analysis onthe development of MoPhO thus\ + \ far.},\n address = {Sydney, Australia},\n author = {Oh, Jieun and Herrera, Jorge\ + \ and Bryan, Nicholas J. and Dahl, Luke and Wang, Ge},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1179086},\n editor = {Edgar Berdahl and Jesse Allison},\n\ - \ issn = {2220-4806},\n month = {May},\n pages = {378--381},\n publisher = {Louisiana\ - \ State University},\n title = {Considering musical structure in location-based\ - \ experiences},\n url = {http://www.nime.org/proceedings/2015/nime2015_214.pdf},\n\ - \ year = {2015}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1179086 - editor: Edgar Berdahl and Jesse Allison - issn: 2220-4806 - month: May - pages: 378--381 - publisher: Louisiana State University - title: Considering musical structure in location-based experiences - url: http://www.nime.org/proceedings/2015/nime2015_214.pdf - year: 2015 - - -- ENTRYTYPE: inproceedings - ID: btome2015 - abstract: 'Twitter has provided a social platform for everyone to enter the previously - exclusive world of the internet, enriching this online social tapestry with cultural - diversity and enabling revolutions. We believe this same tool can be used to also - change the world of music creation. Thus we present MMODM, an online drum machine - based on the Twitter streaming API, using tweets from around the world to create - and perform musical sequences together in real time. Users anywhere can express - 16-beat note sequences across 26 different instruments using plain text tweets - on their favorite device, in real-time. Meanwhile, users on the site itself can - use the graphical interface to locally DJ the rhythm, filters, and sequence blending. - By harnessing this duo of website and Twitter network, MMODM enables a whole new - scale of synchronous musical collaboration between users locally, remotely, across - a wide variety of computing devices, and across a variety of cultures.' - address: 'Baton Rouge, Louisiana, USA' - author: Basheer Tome and Donald Derek Haddad and Tod Machover and Joseph Paradiso - bibtex: "@inproceedings{btome2015,\n abstract = {Twitter has provided a social platform\ - \ for everyone to enter the previously exclusive world of the internet, enriching\ - \ this online social tapestry with cultural diversity and enabling revolutions.\ - \ We believe this same tool can be used to also change the world of music creation.\ - \ Thus we present MMODM, an online drum machine based on the Twitter streaming\ - \ API, using tweets from around the world to create and perform musical sequences\ - \ together in real time. Users anywhere can express 16-beat note sequences across\ - \ 26 different instruments using plain text tweets on their favorite device, in\ - \ real-time. Meanwhile, users on the site itself can use the graphical interface\ - \ to locally DJ the rhythm, filters, and sequence blending. By harnessing this\ - \ duo of website and Twitter network, MMODM enables a whole new scale of synchronous\ - \ musical collaboration between users locally, remotely, across a wide variety\ - \ of computing devices, and across a variety of cultures.},\n address = {Baton\ - \ Rouge, Louisiana, USA},\n author = {Basheer Tome and {Donald Derek} Haddad and\ - \ Tod Machover and Joseph Paradiso},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179184},\n\ - \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ - \ {May},\n pages = {285--288},\n publisher = {Louisiana State University},\n title\ - \ = {MMODM: Massively Multipler Online Drum Machine},\n url = {http://www.nime.org/proceedings/2015/nime2015_215.pdf},\n\ - \ urlsuppl1 = {http://www.nime.org/proceedings/2015/215/0215-file1.mp4},\n year\ - \ = {2015}\n}\n" + \ doi = {10.5281/zenodo.1177871},\n issn = {2220-4806},\n keywords = {mobile phone\ + \ orchestra, live performance, iPhone, mobile music},\n pages = {82--87},\n title\ + \ = {Evolving The Mobile Phone Orchestra},\n url = {http://www.nime.org/proceedings/2010/nime2010_082.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179184 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177871 issn: 2220-4806 - month: May - pages: 285--288 - publisher: Louisiana State University - title: 'MMODM: Massively Multipler Online Drum Machine' - url: http://www.nime.org/proceedings/2015/nime2015_215.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/215/0215-file1.mp4 - year: 2015 + keywords: 'mobile phone orchestra, live performance, iPhone, mobile music' + pages: 82--87 + title: Evolving The Mobile Phone Orchestra + url: http://www.nime.org/proceedings/2010/nime2010_082.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: nbarrett2015 - abstract: 'Electroacoustic music has a longstanding relationship with gesture and - space. This paper marks the start of a project investigating acousmatic spatial - imagery, real gestural behaviour and ultimately the formation of tangible acousmatic - images. These concepts are explored experimentally using motion tracking in a - source-sound recording context, interactive parameter-mapping sonification in - three-dimensional high-order ambisonics, composition and performance. The spatio-musical - role of physical actions in relation to instrument excitation is used as a point - of departure for embodying physical spatial gestures in the creative process. - The work draws on how imagery for music is closely linked with imagery for music-related - actions.' - address: 'Baton Rouge, Louisiana, USA' - author: Natasha Barrett - bibtex: "@inproceedings{nbarrett2015,\n abstract = {Electroacoustic music has a\ - \ longstanding relationship with gesture and space. This paper marks the start\ - \ of a project investigating acousmatic spatial imagery, real gestural behaviour\ - \ and ultimately the formation of tangible acousmatic images. These concepts are\ - \ explored experimentally using motion tracking in a source-sound recording context,\ - \ interactive parameter-mapping sonification in three-dimensional high-order ambisonics,\ - \ composition and performance. The spatio-musical role of physical actions in\ - \ relation to instrument excitation is used as a point of departure for embodying\ - \ physical spatial gestures in the creative process. The work draws on how imagery\ - \ for music is closely linked with imagery for music-related actions.},\n address\ - \ = {Baton Rouge, Louisiana, USA},\n author = {Natasha Barrett},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1179014},\n editor = {Edgar Berdahl and Jesse Allison},\n\ - \ issn = {2220-4806},\n month = {May},\n pages = {191--194},\n publisher = {Louisiana\ - \ State University},\n title = {Creating tangible spatial-musical images from\ - \ physical performance gestures},\n url = {http://www.nime.org/proceedings/2015/nime2015_216.pdf},\n\ - \ urlsuppl1 = {http://www.nime.org/proceedings/2015/216/0216-file1.zip},\n year\ - \ = {2015}\n}\n" + ID: Tanaka2010 + abstract: This paper reviews and extends questions of the scope of an interactive + musical instrument and mapping strategies for expressive performance. We apply + notions of embodiment and affordance to characterize gestural instruments. We + note that the democratization of sensor technology in consumer devices has extended + the cultural contexts for interaction. We revisit questions of mapping drawing + upon the theory of affordances to consider mapping and instrument together. This + is applied to recent work by the author and his collaborators in the development + of instruments based on mobile devices designed for specific performance situations. + address: 'Sydney, Australia' + author: 'Tanaka, Atau' + bibtex: "@inproceedings{Tanaka2010,\n abstract = {This paper reviews and extends\ + \ questions of the scope of an interactive musical instrument and mapping strategies\ + \ for expressive performance. We apply notions of embodiment and affordance to\ + \ characterize gestural instruments. We note that the democratization of sensor\ + \ technology in consumer devices has extended the cultural contexts for interaction.\ + \ We revisit questions of mapping drawing upon the theory of affordances to consider\ + \ mapping and instrument together. This is applied to recent work by the author\ + \ and his collaborators in the development of instruments based on mobile devices\ + \ designed for specific performance situations.},\n address = {Sydney, Australia},\n\ + \ author = {Tanaka, Atau},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177903},\n\ + \ issn = {2220-4806},\n keywords = {Musical affordance, NIME, mapping, instrument\ + \ definition, mobile, multimodal interaction.},\n pages = {88--93},\n title =\ + \ {Mapping Out Instruments, Affordances, and Mobiles},\n url = {http://www.nime.org/proceedings/2010/nime2010_088.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179014 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177903 issn: 2220-4806 - month: May - pages: 191--194 - publisher: Louisiana State University - title: Creating tangible spatial-musical images from physical performance gestures - url: http://www.nime.org/proceedings/2015/nime2015_216.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/216/0216-file1.zip - year: 2015 + keywords: 'Musical affordance, NIME, mapping, instrument definition, mobile, multimodal + interaction.' + pages: 88--93 + title: 'Mapping Out Instruments, Affordances, and Mobiles' + url: http://www.nime.org/proceedings/2010/nime2010_088.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: jharrimanc2015 - abstract: 'Designing and building Digital Music Instruments (DMIs) is a promising - context to engage children in technology design with parallels to hands on and - project based learning educational approaches. Looking at tools and approaches - used in STEM education we find much in common with the tools and approaches used - in the creation of DMIs as well as opportunities for future development, in particular - the use of scaffolded software and hardware toolkits. Current approaches to teaching - and designing DMIs within the community suggest fruitful ideas for engaging novices - in authentic design activities. Hardware toolkits and programming approaches are - considered to identify productive approaches to teach technology design through - building DMIs.' - address: 'Baton Rouge, Louisiana, USA' - author: Jiffer Harriman - bibtex: "@inproceedings{jharrimanc2015,\n abstract = {Designing and building Digital\ - \ Music Instruments (DMIs) is a promising context to engage children in technology\ - \ design with parallels to hands on and project based learning educational approaches.\ - \ Looking at tools and approaches used in STEM education we find much in common\ - \ with the tools and approaches used in the creation of DMIs as well as opportunities\ - \ for future development, in particular the use of scaffolded software and hardware\ - \ toolkits. Current approaches to teaching and designing DMIs within the community\ - \ suggest fruitful ideas for engaging novices in authentic design activities.\ - \ Hardware toolkits and programming approaches are considered to identify productive\ - \ approaches to teach technology design through building DMIs.},\n address = {Baton\ - \ Rouge, Louisiana, USA},\n author = {Jiffer Harriman},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1179078},\n editor = {Edgar Berdahl and Jesse Allison},\n\ - \ issn = {2220-4806},\n month = {May},\n pages = {70--73},\n publisher = {Louisiana\ - \ State University},\n title = {Start 'em Young: Digital Music Instrument for\ - \ Education},\n url = {http://www.nime.org/proceedings/2015/nime2015_218.pdf},\n\ - \ year = {2015}\n}\n" + ID: Havryliv2010 + abstract: 'This paper describes a novel method for composing andimprovisation with + real-time chaotic oscillators. Recentlydiscovered algebraically simple nonlinear + third-order differential equations are solved and acoustical descriptors relating + to their frequency spectrums are determined accordingto the MPEG-7 specification. + A second nonlinearity is thenadded to these equations: a real-time audio signal. + Descriptive properties of the complex behaviour of these equationsare then determined + as a function of difference tones derived from a Just Intonation scale and the + amplitude ofthe audio signal. By using only the real-time audio signalfrom live + performer/s as an input the causal relationshipbetween acoustic performance gestures + and computer output, including any visual or performer-instruction output,is deterministic + even if the chaotic behaviours are not.' + address: 'Sydney, Australia' + author: 'Havryliv, Mark' + bibtex: "@inproceedings{Havryliv2010,\n abstract = {This paper describes a novel\ + \ method for composing andimprovisation with real-time chaotic oscillators. Recentlydiscovered\ + \ algebraically simple nonlinear third-order differential equations are solved\ + \ and acoustical descriptors relating to their frequency spectrums are determined\ + \ accordingto the MPEG-7 specification. A second nonlinearity is thenadded to\ + \ these equations: a real-time audio signal. Descriptive properties of the complex\ + \ behaviour of these equationsare then determined as a function of difference\ + \ tones derived from a Just Intonation scale and the amplitude ofthe audio signal.\ + \ By using only the real-time audio signalfrom live performer/s as an input the\ + \ causal relationshipbetween acoustic performance gestures and computer output,\ + \ including any visual or performer-instruction output,is deterministic even if\ + \ the chaotic behaviours are not.},\n address = {Sydney, Australia},\n author\ + \ = {Havryliv, Mark},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177795},\n\ + \ issn = {2220-4806},\n keywords = {chaos and music, chaotic dynamics and oscillators,\ + \ differential equations and music, mathematica, audio descriptors and mpeg-7},\n\ + \ pages = {94--99},\n title = {Composing For Improvisation with Chaotic Oscillators},\n\ + \ url = {http://www.nime.org/proceedings/2010/nime2010_094.pdf},\n year = {2010}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179078 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177795 issn: 2220-4806 - month: May - pages: 70--73 - publisher: Louisiana State University - title: 'Start ''em Young: Digital Music Instrument for Education' - url: http://www.nime.org/proceedings/2015/nime2015_218.pdf - year: 2015 + keywords: 'chaos and music, chaotic dynamics and oscillators, differential equations + and music, mathematica, audio descriptors and mpeg-7' + pages: 94--99 + title: Composing For Improvisation with Chaotic Oscillators + url: http://www.nime.org/proceedings/2010/nime2010_094.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: dcazzani2015 - abstract: 'The following paper documents the creation of a prototype of shoe-soles - designed to detect various postures of standing musicians using non-intrusive - pressure sensors. In order to do so, flexible algorithms were designed with the - capacity of working even with an imprecise placement of the sensors. This makes - it easy and accessible for all potential users. At least 4 sensors are required: - 2 for the front and 2 for the back; this prototype uses 6. The sensors are rather - inexpensive, widening the economic availability. For each individual musician, - the algorithms are capable of ``personalising'''' postures in order to create - consistent evaluations; the results of which may be, but are not limited to: new - musical interfaces, educational analysis of technique, or music controllers. In - building a prototype for the algorithms, data was acquired by wiring the sensors - to a data-logger. The algorithms and tests were implemented using MATLAB. After - designing the algorithms, various tests were run in order to prove their reliability. - These determined that indeed the algorithms work to a sufficient degree of certainty, - allowing for a reliable classification of a musician''s posture or position.' - address: 'Baton Rouge, Louisiana, USA' - author: Dario Cazzani - bibtex: "@inproceedings{dcazzani2015,\n abstract = {The following paper documents\ - \ the creation of a prototype of shoe-soles designed to detect various postures\ - \ of standing musicians using non-intrusive pressure sensors. In order to do so,\ - \ flexible algorithms were designed with the capacity of working even with an\ - \ imprecise placement of the sensors. This makes it easy and accessible for all\ - \ potential users. At least 4 sensors are required: 2 for the front and 2 for\ - \ the back; this prototype uses 6. The sensors are rather inexpensive, widening\ - \ the economic availability. For each individual musician, the algorithms are\ - \ capable of ``personalising'' postures in order to create consistent evaluations;\ - \ the results of which may be, but are not limited to: new musical interfaces,\ - \ educational analysis of technique, or music controllers. In building a prototype\ - \ for the algorithms, data was acquired by wiring the sensors to a data-logger.\ - \ The algorithms and tests were implemented using MATLAB. After designing the\ - \ algorithms, various tests were run in order to prove their reliability. These\ - \ determined that indeed the algorithms work to a sufficient degree of certainty,\ - \ allowing for a reliable classification of a musician's posture or position.},\n\ - \ address = {Baton Rouge, Louisiana, USA},\n author = {Dario Cazzani},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1179042},\n editor = {Edgar Berdahl and\ - \ Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {54--57},\n\ - \ publisher = {Louisiana State University},\n title = {Posture Identification\ - \ of Musicians Using Non-Intrusive Low-Cost Resistive Pressure Sensors},\n url\ - \ = {http://www.nime.org/proceedings/2015/nime2015_220.pdf},\n year = {2015}\n\ + ID: Hawryshkewich2010 + abstract: 'Traditional drum machines and digital drum-kits offer users the ability + to practice or perform with a supporting ensemble – such as a bass, guitar and + piano – but rarely provide support in the form of an accompanying percussion part. + Beatback is a system which develops upon this missing interaction through offering + a MIDI enabled drum system which learns and plays in the user''s style. In the + contexts of rhythmic practise and exploration, Beatback looks at call-response + and accompaniment models of interaction to enable new possibilities for rhythmic + creativity.' + address: 'Sydney, Australia' + author: 'Hawryshkewich, Andrew and Pasquier, Philippe and Eigenfeldt, Arne' + bibtex: "@inproceedings{Hawryshkewich2010,\n abstract = {Traditional drum machines\ + \ and digital drum-kits offer users the ability to practice or perform with a\ + \ supporting ensemble – such as a bass, guitar and piano – but rarely provide\ + \ support in the form of an accompanying percussion part. Beatback is a system\ + \ which develops upon this missing interaction through offering a MIDI enabled\ + \ drum system which learns and plays in the user's style. In the contexts of rhythmic\ + \ practise and exploration, Beatback looks at call-response and accompaniment\ + \ models of interaction to enable new possibilities for rhythmic creativity.},\n\ + \ address = {Sydney, Australia},\n author = {Hawryshkewich, Andrew and Pasquier,\ + \ Philippe and Eigenfeldt, Arne},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177797},\n\ + \ issn = {2220-4806},\n keywords = {Interactive music interface, real-time, percussion,\ + \ machine learning, Markov models, MIDI.},\n pages = {100--105},\n title = {Beatback\ + \ : A Real-time Interactive Percussion System for Rhythmic Practise and Exploration},\n\ + \ url = {http://www.nime.org/proceedings/2010/nime2010_100.pdf},\n year = {2010}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179042 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177797 issn: 2220-4806 - month: May - pages: 54--57 - publisher: Louisiana State University - title: Posture Identification of Musicians Using Non-Intrusive Low-Cost Resistive - Pressure Sensors - url: http://www.nime.org/proceedings/2015/nime2015_220.pdf - year: 2015 + keywords: 'Interactive music interface, real-time, percussion, machine learning, + Markov models, MIDI.' + pages: 100--105 + title: 'Beatback : A Real-time Interactive Percussion System for Rhythmic Practise + and Exploration' + url: http://www.nime.org/proceedings/2010/nime2010_100.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: roda2015 - abstract: 'The Internet holds a lot of potential as a music listening, collaboration, - and performance space. It has become commonplace to stream music and video of - musical performance over the web. However, the goal of playing rhythmically synchronized - music over long distances has remained elusive due to the latency inherent in - networked communication. The farther apart two artists are from one another, the - greater the delay. Furthermore, latency times can change abruptly with no warning. - In this paper, we demonstrate that it is possible to create a distributed, synchronized - musical instrument that allows performers to play together over long distances, - despite latency. We describe one such instrument, MalLo, which combats latency - by predicting a musician''s action before it is completed. MalLo sends information - about a predicted musical note over the Internet before it is played, and synthesizes - this note at a collaborator''s location at nearly the same moment it is played - by the performer. MalLo also protects against latency spikes by sending the prediction - data across multiple network paths, with the intention of routing around latency. ' - address: 'Baton Rouge, Louisiana, USA' - author: Zeyu Jin and Reid Oda and Adam Finkelstein and Rebecca Fiebrink - bibtex: "@inproceedings{roda2015,\n abstract = {The Internet holds a lot of potential\ - \ as a music listening, collaboration, and performance space. It has become commonplace\ - \ to stream music and video of musical performance over the web. However, the\ - \ goal of playing rhythmically synchronized music over long distances has remained\ - \ elusive due to the latency inherent in networked communication. The farther\ - \ apart two artists are from one another, the greater the delay. Furthermore,\ - \ latency times can change abruptly with no warning. In this paper, we demonstrate\ - \ that it is possible to create a distributed, synchronized musical instrument\ - \ that allows performers to play together over long distances, despite latency.\ - \ We describe one such instrument, MalLo, which combats latency by predicting\ - \ a musician's action before it is completed. MalLo sends information about a\ - \ predicted musical note over the Internet before it is played, and synthesizes\ - \ this note at a collaborator's location at nearly the same moment it is played\ - \ by the performer. MalLo also protects against latency spikes by sending the\ - \ prediction data across multiple network paths, with the intention of routing\ - \ around latency. },\n address = {Baton Rouge, Louisiana, USA},\n author = {Zeyu\ - \ Jin and Reid Oda and Adam Finkelstein and Rebecca Fiebrink},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1179102},\n editor = {Edgar Berdahl and Jesse Allison},\n\ - \ issn = {2220-4806},\n month = {May},\n pages = {293--298},\n publisher = {Louisiana\ - \ State University},\n title = {MalLo: A Distributed Synchronized Musical Instrument\ - \ Designed For Internet Performance},\n url = {http://www.nime.org/proceedings/2015/nime2015_223.pdf},\n\ - \ urlsuppl1 = {http://www.nime.org/proceedings/2015/223/0223-file1.mp4},\n year\ - \ = {2015}\n}\n" + ID: Gurevich2010 + abstract: 'A qualitative study to investigate the development of stylein performance + with a highly constrained musical instrument is described. A new one-button instrument + was designed, with which several musicians were each asked topractice and develop + a solo performance. Observations oftrends in attributes of these performances + are detailed in relation to participants'' statements in structured interviews.Participants + were observed to develop stylistic variationsboth within the domain of activities + suggested by the constraint, and by discovering non-obvious techniques througha + variety of strategies. Data suggest that stylistic variationsoccurred in spite + of perceived constraint, but also becauseof perceived constraint. Furthermore, + participants tendedto draw on unique experiences, approaches and perspectivesthat + shaped individual performances.' + address: 'Sydney, Australia' + author: 'Gurevich, Michael and Stapleton, Paul and Marquez-Borbon, Adnan' + bibtex: "@inproceedings{Gurevich2010,\n abstract = {A qualitative study to investigate\ + \ the development of stylein performance with a highly constrained musical instrument\ + \ is described. A new one-button instrument was designed, with which several musicians\ + \ were each asked topractice and develop a solo performance. Observations oftrends\ + \ in attributes of these performances are detailed in relation to participants'\ + \ statements in structured interviews.Participants were observed to develop stylistic\ + \ variationsboth within the domain of activities suggested by the constraint,\ + \ and by discovering non-obvious techniques througha variety of strategies. Data\ + \ suggest that stylistic variationsoccurred in spite of perceived constraint,\ + \ but also becauseof perceived constraint. Furthermore, participants tendedto\ + \ draw on unique experiences, approaches and perspectivesthat shaped individual\ + \ performances.},\n address = {Sydney, Australia},\n author = {Gurevich, Michael\ + \ and Stapleton, Paul and Marquez-Borbon, Adnan},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1177785},\n issn = {2220-4806},\n keywords = {design, interaction,\ + \ performance, persuasive technology},\n pages = {106--111},\n title = {Style\ + \ and Constraint in Electronic Musical Instruments},\n url = {http://www.nime.org/proceedings/2010/nime2010_106.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179102 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177785 issn: 2220-4806 - month: May - pages: 293--298 - publisher: Louisiana State University - title: 'MalLo: A Distributed Synchronized Musical Instrument Designed For Internet - Performance' - url: http://www.nime.org/proceedings/2015/nime2015_223.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/223/0223-file1.mp4 - year: 2015 + keywords: 'design, interaction, performance, persuasive technology' + pages: 106--111 + title: Style and Constraint in Electronic Musical Instruments + url: http://www.nime.org/proceedings/2010/nime2010_106.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: lhayes2015 - abstract: 'Live music making can be understood as an enactive process, whereby musical - experiences are created through human action. This suggests that musical worlds - coevolve with their agents through repeated sensorimotor interactions with the - environment (where the music is being created), and at the same time cannot be - separated from their sociocultural contexts. This paper investigates this claim - by exploring ways in which technology, physiology, and context are bound up within - two different musical scenarios: live electronic musical performance; and person-centred - arts applications of NIMEs. In this paper I outline an ethnographic and phenomenological - enquiry into my experiences as both a performer of live electronic and electro-instrumental - music, as well as my extensive background in working with new technologies in - various therapeutic and person-centred artistic situations. This is in order to - explore the sociocultural and technological contexts in which these activities - take place. I propose that by understanding creative musical participation as - a highly contextualised practice, we may discover that the greatest impact of - rapidly developing technological resources is their ability to afford richly diverse, - personalised, and embodied forms of music making. I argue that this is applicable - over a wide range of musical communities.' - address: 'Baton Rouge, Louisiana, USA' - author: Lauren Hayes - bibtex: "@inproceedings{lhayes2015,\n abstract = {Live music making can be understood\ - \ as an enactive process, whereby musical experiences are created through human\ - \ action. This suggests that musical worlds coevolve with their agents through\ - \ repeated sensorimotor interactions with the environment (where the music is\ - \ being created), and at the same time cannot be separated from their sociocultural\ - \ contexts. This paper investigates this claim by exploring ways in which technology,\ - \ physiology, and context are bound up within two different musical scenarios:\ - \ live electronic musical performance; and person-centred arts applications of\ - \ NIMEs. In this paper I outline an ethnographic and phenomenological enquiry\ - \ into my experiences as both a performer of live electronic and electro-instrumental\ - \ music, as well as my extensive background in working with new technologies in\ - \ various therapeutic and person-centred artistic situations. This is in order\ - \ to explore the sociocultural and technological contexts in which these activities\ - \ take place. I propose that by understanding creative musical participation as\ - \ a highly contextualised practice, we may discover that the greatest impact of\ - \ rapidly developing technological resources is their ability to afford richly\ - \ diverse, personalised, and embodied forms of music making. I argue that this\ - \ is applicable over a wide range of musical communities.},\n address = {Baton\ - \ Rouge, Louisiana, USA},\n author = {Lauren Hayes},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1179082},\n editor = {Edgar Berdahl and Jesse Allison},\n\ - \ issn = {2220-4806},\n month = {May},\n pages = {299--302},\n publisher = {Louisiana\ - \ State University},\n title = {Enacting Musical Worlds: Common Approaches to\ - \ using NIMEs within both Performance and Person-Centred Arts Practices},\n url\ - \ = {http://www.nime.org/proceedings/2015/nime2015_227.pdf},\n year = {2015}\n\ - }\n" + ID: Choi2010 + abstract: 'We propose an environment that allows users to create music by leveraging + playful visualization and organic interaction. Our attempt to improve ideas drawn + from traditional sequencer paradigm has been made in terms of extemporizing music + and associating with visualization in real-time. In order to offer different user + experience and musical possibility, this system incorporates many techniques, + including; flocking simulation, nondeterministic finite automata (NFA), score + file analysis, vector calculation, OpenGL animation, and networking. We transform + a sequencer into an audiovisual platform for composition and performance, which + is furnished with artistry and ease of use. Thus we believe that it is suitable + for not only artists such as algorithmic composers or audiovisual performers, + but also anyone who wants to play music and imagery in a different way. ' + address: 'Sydney, Australia' + author: 'Choi, Hongchan and Wang, Ge' + bibtex: "@inproceedings{Choi2010,\n abstract = {We propose an environment that allows\ + \ users to create music by leveraging playful visualization and organic interaction.\ + \ Our attempt to improve ideas drawn from traditional sequencer paradigm has been\ + \ made in terms of extemporizing music and associating with visualization in real-time.\ + \ In order to offer different user experience and musical possibility, this system\ + \ incorporates many techniques, including; flocking simulation, nondeterministic\ + \ finite automata (NFA), score file analysis, vector calculation, OpenGL animation,\ + \ and networking. We transform a sequencer into an audiovisual platform for composition\ + \ and performance, which is furnished with artistry and ease of use. Thus we believe\ + \ that it is suitable for not only artists such as algorithmic composers or audiovisual\ + \ performers, but also anyone who wants to play music and imagery in a different\ + \ way. },\n address = {Sydney, Australia},\n author = {Choi, Hongchan and Wang,\ + \ Ge},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1177741},\n issn = {2220-4806},\n\ + \ keywords = {algorithmic composition,audiovisual,automata,behavior simulation,music,music\ + \ sequencer,musical interface,nime10,visualization},\n pages = {112--115},\n title\ + \ = {LUSH : An Organic Eco + Music System},\n url = {http://www.nime.org/proceedings/2010/nime2010_112.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179082 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177741 issn: 2220-4806 - month: May - pages: 299--302 - publisher: Louisiana State University - title: 'Enacting Musical Worlds: Common Approaches to using NIMEs within both Performance - and Person-Centred Arts Practices' - url: http://www.nime.org/proceedings/2015/nime2015_227.pdf - year: 2015 + keywords: 'algorithmic composition,audiovisual,automata,behavior simulation,music,music + sequencer,musical interface,nime10,visualization' + pages: 112--115 + title: 'LUSH : An Organic Eco + Music System' + url: http://www.nime.org/proceedings/2010/nime2010_112.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: ncorreia2015 - abstract: 'We present a user-centered approach for prototyping tools for performance - with procedural sound and graphics, based on a hackathon. We also present the - resulting prototypes. These prototypes respond to a challenge originating from - earlier stages of the research: to combine ease-of-use with expressiveness and - visibility of interaction in tools for audiovisual performance. We aimed to convert - sketches, resulting from an earlier brainstorming session, into functional prototypes - in a short period of time. The outcomes include open-source software base released - online. The conclusions reflect on the methodology adopted and the effectiveness - of the prototypes.' - address: 'Baton Rouge, Louisiana, USA' - author: Nuno N. Correia and Atau Tanaka - bibtex: "@inproceedings{ncorreia2015,\n abstract = {We present a user-centered approach\ - \ for prototyping tools for performance with procedural sound and graphics, based\ - \ on a hackathon. We also present the resulting prototypes. These prototypes respond\ - \ to a challenge originating from earlier stages of the research: to combine ease-of-use\ - \ with expressiveness and visibility of interaction in tools for audiovisual performance.\ - \ We aimed to convert sketches, resulting from an earlier brainstorming session,\ - \ into functional prototypes in a short period of time. The outcomes include open-source\ - \ software base released online. The conclusions reflect on the methodology adopted\ - \ and the effectiveness of the prototypes.},\n address = {Baton Rouge, Louisiana,\ - \ USA},\n author = {{Nuno N.} Correia and Atau Tanaka},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1179044},\n editor = {Edgar Berdahl and Jesse Allison},\n\ - \ issn = {2220-4806},\n month = {May},\n pages = {319--321},\n publisher = {Louisiana\ - \ State University},\n title = {Prototyping Audiovisual Performance Tools: A Hackathon\ - \ Approach},\n url = {http://www.nime.org/proceedings/2015/nime2015_230.pdf},\n\ - \ urlsuppl1 = {http://www.nime.org/proceedings/2015/230/0230-file1.mp4},\n year\ - \ = {2015}\n}\n" + ID: Yamaguchi2010 + abstract: 'In this paper, we introduce a wireless musical interface driven by grasping + forces and human motion. The sounds generated by the traditional digital musical + instruments are dependent on the physical shape of the musical instruments. The + freedom of the musical performance is restricted by its structure. Therefore, + the sounds cannot be generated with the body expression like the dance. We developed + a ball-shaped interface, TwinkleBall, to achieve the free-style performance. A + photo sensor is embedded in the translucent rubber ball to detect the grasping + force of the performer. The grasping force is translated into the luminance intensity + for processing. Moreover, an accelerometer is also embedded in the interface for + motion sensing. By using these sensors, a performer can control the note and volume + by varying grasping force and motion respectively. The features of the proposed + interface are ball-shaped, wireless, and handheld size. As a result, the proposed + interface is able to generate the sound from the body expression such as dance. ' + address: 'Sydney, Australia' + author: 'Yamaguchi, Tomoyuki and Kobayashi, Tsukasa and Ariga, Anna and Hashimoto, + Shuji' + bibtex: "@inproceedings{Yamaguchi2010,\n abstract = {In this paper, we introduce\ + \ a wireless musical interface driven by grasping forces and human motion. The\ + \ sounds generated by the traditional digital musical instruments are dependent\ + \ on the physical shape of the musical instruments. The freedom of the musical\ + \ performance is restricted by its structure. Therefore, the sounds cannot be\ + \ generated with the body expression like the dance. We developed a ball-shaped\ + \ interface, TwinkleBall, to achieve the free-style performance. A photo sensor\ + \ is embedded in the translucent rubber ball to detect the grasping force of the\ + \ performer. The grasping force is translated into the luminance intensity for\ + \ processing. Moreover, an accelerometer is also embedded in the interface for\ + \ motion sensing. By using these sensors, a performer can control the note and\ + \ volume by varying grasping force and motion respectively. The features of the\ + \ proposed interface are ball-shaped, wireless, and handheld size. As a result,\ + \ the proposed interface is able to generate the sound from the body expression\ + \ such as dance. },\n address = {Sydney, Australia},\n author = {Yamaguchi, Tomoyuki\ + \ and Kobayashi, Tsukasa and Ariga, Anna and Hashimoto, Shuji},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177927},\n issn = {2220-4806},\n keywords = {Musical\ + \ Interface, Embodied Sound Media, Dance Performance.},\n pages = {116--119},\n\ + \ title = {TwinkleBall : A Wireless Musical Interface for Embodied Sound Media},\n\ + \ url = {http://www.nime.org/proceedings/2010/nime2010_116.pdf},\n year = {2010}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179044 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177927 issn: 2220-4806 - month: May - pages: 319--321 - publisher: Louisiana State University - title: 'Prototyping Audiovisual Performance Tools: A Hackathon Approach' - url: http://www.nime.org/proceedings/2015/nime2015_230.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/230/0230-file1.mp4 - year: 2015 + keywords: 'Musical Interface, Embodied Sound Media, Dance Performance.' + pages: 116--119 + title: 'TwinkleBall : A Wireless Musical Interface for Embodied Sound Media' + url: http://www.nime.org/proceedings/2010/nime2010_116.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: pbennett2015 - abstract: 'Resonant Bits proposes giving digital information resonant dynamic properties, - requiring skill and concerted effort for interaction. This paper applies resonant - interaction to musical control, exploring musical instruments that are controlled - through both purposeful and subconscious resonance. We detail three exploratory - prototypes, the first two illustrating the use of resonant gestures and the third - focusing on the detection and use of the ideomotor (subconscious micro-movement) - effect.' - address: 'Baton Rouge, Louisiana, USA' - author: Peter Bennett and Jarrod Knibbe and Florent Berthaut and Kirsten Cater - bibtex: "@inproceedings{pbennett2015,\n abstract = {Resonant Bits proposes giving\ - \ digital information resonant dynamic properties, requiring skill and concerted\ - \ effort for interaction. This paper applies resonant interaction to musical control,\ - \ exploring musical instruments that are controlled through both purposeful and\ - \ subconscious resonance. We detail three exploratory prototypes, the first two\ - \ illustrating the use of resonant gestures and the third focusing on the detection\ - \ and use of the ideomotor (subconscious micro-movement) effect.},\n address =\ - \ {Baton Rouge, Louisiana, USA},\n author = {Peter Bennett and Jarrod Knibbe and\ - \ Florent Berthaut and Kirsten Cater},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179020},\n\ - \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ - \ {May},\n pages = {176--177},\n publisher = {Louisiana State University},\n title\ - \ = {Resonant Bits: Controlling Digital Musical Instruments with Resonance and\ - \ the Ideomotor Effect},\n url = {http://www.nime.org/proceedings/2015/nime2015_235.pdf},\n\ - \ urlsuppl1 = {http://www.nime.org/proceedings/2015/235/0235-file1.mp4},\n year\ - \ = {2015}\n}\n" + ID: Cannon2010 + abstract: This paper presents research undertaken by the Bent Leather Band investigating + the application of live Ambisonics to large digital-instrument ensemble improvisation. + Their playable approach to live ambisonic projection is inspired by the work of + Trevor Wishart and presents a systematic investigation of the potential for live + spatial motion improvisation. + address: 'Sydney, Australia' + author: 'Cannon, Joanne and Favilla, Stuart' + bibtex: "@inproceedings{Cannon2010,\n abstract = {This paper presents research undertaken\ + \ by the Bent Leather Band investigating the application of live Ambisonics to\ + \ large digital-instrument ensemble improvisation. Their playable approach to\ + \ live ambisonic projection is inspired by the work of Trevor Wishart and presents\ + \ a systematic investigation of the potential for live spatial motion improvisation.},\n\ + \ address = {Sydney, Australia},\n author = {Cannon, Joanne and Favilla, Stuart},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1177735},\n issn = {2220-4806},\n\ + \ keywords = {ambisonics, augmented instruments, expressive spatial motion, playable\ + \ instruments},\n pages = {120--124},\n title = {Expression and Spatial Motion\ + \ : Playable Ambisonics},\n url = {http://www.nime.org/proceedings/2010/nime2010_120.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179020 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177735 issn: 2220-4806 - month: May - pages: 176--177 - publisher: Louisiana State University - title: 'Resonant Bits: Controlling Digital Musical Instruments with Resonance and - the Ideomotor Effect' - url: http://www.nime.org/proceedings/2015/nime2015_235.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/235/0235-file1.mp4 - year: 2015 + keywords: 'ambisonics, augmented instruments, expressive spatial motion, playable + instruments' + pages: 120--124 + title: 'Expression and Spatial Motion : Playable Ambisonics' + url: http://www.nime.org/proceedings/2010/nime2010_120.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: adecarvalhojunior2015 - abstract: 'The position of a participant during an installation is a valuable data. - One may want to start some sample when someone cross a line or stop the music - automatically whenever there is nobody inside the main area. GPS is a good solution - for localization, but it usually loses its capabilities inside buildings. This - paper discuss the use of Wi-Fi signal strength during an installation as an alternative - to GPS.' - address: 'Baton Rouge, Louisiana, USA' - author: 'de Carvalho Junior, Antonio Deusany' - bibtex: "@inproceedings{adecarvalhojunior2015,\n abstract = {The position of a participant\ - \ during an installation is a valuable data. One may want to start some sample\ - \ when someone cross a line or stop the music automatically whenever there is\ - \ nobody inside the main area. GPS is a good solution for localization, but it\ - \ usually loses its capabilities inside buildings. This paper discuss the use\ - \ of Wi-Fi signal strength during an installation as an alternative to GPS.},\n\ - \ address = {Baton Rouge, Louisiana, USA},\n author = {de Carvalho Junior, Antonio\ - \ Deusany},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179052},\n editor\ - \ = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n pages = {40--41},\n\ - \ publisher = {Louisiana State University},\n title = {Indoor localization during\ - \ installations using {WiFi}},\n url = {http://www.nime.org/proceedings/2015/nime2015_237.pdf},\n\ - \ year = {2015}\n}\n" + ID: Collins2010 + abstract: 'The hypothesis of this interaction research project is that it can be + stimulating for experimental musicians to confront a system which ‘opposes’ their + musical style. The ‘contrary motion’ of the title is the name of a MIDI-based + realtime musical software agent which uses machine listening to establish the + musical context, and thereby chooses its own responses to differentiate its position + from that of its human interlocutant. To do this requires a deep consideration + of the space of musical actions, so as to explicate what opposition should constitute, + and machine listening technology (most prominently represented by new online beat + and stream tracking algorithms) which gives an accurate measurement of player + position so as to consistently avoid it. An initial pilot evaluation was undertaken, + feeding back critical data to the developing design.' + address: 'Sydney, Australia' + author: 'Collins, Nick' + bibtex: "@inproceedings{Collins2010,\n abstract = {The hypothesis of this interaction\ + \ research project is that it can be stimulating for experimental musicians to\ + \ confront a system which ‘opposes’ their musical style. The ‘contrary motion’\ + \ of the title is the name of a MIDI-based realtime musical software agent which\ + \ uses machine listening to establish the musical context, and thereby chooses\ + \ its own responses to differentiate its position from that of its human interlocutant.\ + \ To do this requires a deep consideration of the space of musical actions, so\ + \ as to explicate what opposition should constitute, and machine listening technology\ + \ (most prominently represented by new online beat and stream tracking algorithms)\ + \ which gives an accurate measurement of player position so as to consistently\ + \ avoid it. An initial pilot evaluation was undertaken, feeding back critical\ + \ data to the developing design.},\n address = {Sydney, Australia},\n author =\ + \ {Collins, Nick},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177747},\n\ + \ issn = {2220-4806},\n keywords = {contrary, beat tracking, stream analysis,\ + \ musical agent},\n pages = {125--129},\n title = {Contrary Motion : An Oppositional\ + \ Interactive Music System},\n url = {http://www.nime.org/proceedings/2010/nime2010_125.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1179052 - editor: Edgar Berdahl and Jesse Allison - issn: 2220-4806 - pages: 40--41 - publisher: Louisiana State University - title: Indoor localization during installations using WiFi - url: http://www.nime.org/proceedings/2015/nime2015_237.pdf - year: 2015 + Expression + doi: 10.5281/zenodo.1177747 + issn: 2220-4806 + keywords: 'contrary, beat tracking, stream analysis, musical agent' + pages: 125--129 + title: 'Contrary Motion : An Oppositional Interactive Music System' + url: http://www.nime.org/proceedings/2010/nime2010_125.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: cmartin2015 - abstract: We present and evaluate a novel interface for tracking ensemble performances - on touch-screens. The system uses a Random Forest classifier to extract touch-screen - gestures and transition matrix statistics. It analyses the resulting gesture-state - sequences across an ensemble of performers. A series of specially designed iPad - apps respond to this real-time analysis of free-form gestural performances with - calculated modifications to their musical interfaces. We describe our system and - evaluate it through cross-validation and profiling as well as concert experience. - address: 'Baton Rouge, Louisiana, USA' - author: Charles Martin and Henry Gardner and Ben Swift - bibtex: "@inproceedings{cmartin2015,\n abstract = {We present and evaluate a novel\ - \ interface for tracking ensemble performances on touch-screens. The system uses\ - \ a Random Forest classifier to extract touch-screen gestures and transition matrix\ - \ statistics. It analyses the resulting gesture-state sequences across an ensemble\ - \ of performers. A series of specially designed iPad apps respond to this real-time\ - \ analysis of free-form gestural performances with calculated modifications to\ - \ their musical interfaces. We describe our system and evaluate it through cross-validation\ - \ and profiling as well as concert experience.},\n address = {Baton Rouge, Louisiana,\ - \ USA},\n author = {Charles Martin and Henry Gardner and Ben Swift},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1179130},\n editor = {Edgar Berdahl and\ - \ Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {359--364},\n\ - \ publisher = {Louisiana State University},\n title = {Tracking Ensemble Performance\ - \ on Touch-Screens with Gesture Classification and Transition Matrices},\n url\ - \ = {http://www.nime.org/proceedings/2015/nime2015_242.pdf},\n year = {2015}\n\ + ID: Deleflie2010 + abstract: 'The tools for spatial composition typically model just a small subset + of the spatial audio cues known to researchers. As composers explore this medium + it has become evident that the nature of spatial sound perception is complex. + Yet interfaces for spatial composition are often simplistic and the end results + can be disappointing. This paper presents an interface that is designed to liberate + the composer from thinking of spatialised sound as points in space. Instead, visual + images are used to define sound in terms of shape, size and location. Images can + be sequenced into video, thereby creating rich and complex temporal soundscapes. + The interface offers both the ability to craft soundscapes and also compose their + evolution in time. ' + address: 'Sydney, Australia' + author: 'Deleflie, Etienne and Schiemer, Greg' + bibtex: "@inproceedings{Deleflie2010,\n abstract = {The tools for spatial composition\ + \ typically model just a small subset of the spatial audio cues known to researchers.\ + \ As composers explore this medium it has become evident that the nature of spatial\ + \ sound perception is complex. Yet interfaces for spatial composition are often\ + \ simplistic and the end results can be disappointing. This paper presents an\ + \ interface that is designed to liberate the composer from thinking of spatialised\ + \ sound as points in space. Instead, visual images are used to define sound in\ + \ terms of shape, size and location. Images can be sequenced into video, thereby\ + \ creating rich and complex temporal soundscapes. The interface offers both the\ + \ ability to craft soundscapes and also compose their evolution in time. },\n\ + \ address = {Sydney, Australia},\n author = {Deleflie, Etienne and Schiemer, Greg},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1177753},\n issn = {2220-4806},\n\ + \ keywords = {Spatial audio, surround sound, ambisonics, granular synthesis, decorrelation,\ + \ diffusion.},\n pages = {130--135},\n title = {Images as Spatial Sound Maps},\n\ + \ url = {http://www.nime.org/proceedings/2010/nime2010_130.pdf},\n year = {2010}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179130 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177753 issn: 2220-4806 - month: May - pages: 359--364 - publisher: Louisiana State University - title: Tracking Ensemble Performance on Touch-Screens with Gesture Classification - and Transition Matrices - url: http://www.nime.org/proceedings/2015/nime2015_242.pdf - year: 2015 + keywords: 'Spatial audio, surround sound, ambisonics, granular synthesis, decorrelation, + diffusion.' + pages: 130--135 + title: Images as Spatial Sound Maps + url: http://www.nime.org/proceedings/2010/nime2010_130.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: byuksel2015 - abstract: 'We present a novel brain-computer interface (BCI) integrated with a musical - instrument that adapts implicitly (with no extra effort from user) to users'' - changing cognitive state during musical improvisation. Most previous musical BCI - systems use either a mapping of brainwaves to create audio signals or use explicit - brain signals to control some aspect of the music. Such systems do not take advantage - of higher level semantically meaningful brain data which could be used in adaptive - systems or without detracting from the attention of the user. We present a new - type of real-time BCI that assists users in musical improvisation by adapting - to users'' measured cognitive workload implicitly. Our system advances the state - of the art in this area in three ways: 1) We demonstrate that cognitive workload - can be classified in real-time while users play the piano using functional near-infrared - spectroscopy. 2) We build a real-time, implicit system using this brain signal - that musically adapts to what users are playing. 3) We demonstrate that users - prefer this novel musical instrument over other conditions and report that they - feel more creative.' - address: 'Baton Rouge, Louisiana, USA' - author: Beste Filiz Yuksel and Daniel Afergan and Evan Peck and Garth Griffin and - Lane Harrison and Nick Chen and Remco Chang and Robert Jacob - bibtex: "@inproceedings{byuksel2015,\n abstract = {We present a novel brain-computer\ - \ interface (BCI) integrated with a musical instrument that adapts implicitly\ - \ (with no extra effort from user) to users' changing cognitive state during musical\ - \ improvisation. Most previous musical BCI systems use either a mapping of brainwaves\ - \ to create audio signals or use explicit brain signals to control some aspect\ - \ of the music. Such systems do not take advantage of higher level semantically\ - \ meaningful brain data which could be used in adaptive systems or without detracting\ - \ from the attention of the user. We present a new type of real-time BCI that\ - \ assists users in musical improvisation by adapting to users' measured cognitive\ - \ workload implicitly. Our system advances the state of the art in this area in\ - \ three ways: 1) We demonstrate that cognitive workload can be classified in real-time\ - \ while users play the piano using functional near-infrared spectroscopy. 2) We\ - \ build a real-time, implicit system using this brain signal that musically adapts\ - \ to what users are playing. 3) We demonstrate that users prefer this novel musical\ - \ instrument over other conditions and report that they feel more creative.},\n\ - \ address = {Baton Rouge, Louisiana, USA},\n author = {{Beste Filiz} Yuksel and\ - \ Daniel Afergan and Evan Peck and Garth Griffin and Lane Harrison and Nick Chen\ - \ and Remco Chang and Robert Jacob},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1181418},\n\ - \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ - \ {May},\n pages = {136--139},\n publisher = {Louisiana State University},\n title\ - \ = {BRAAHMS: A Novel Adaptive Musical Interface Based on Users' Cognitive State},\n\ - \ url = {http://www.nime.org/proceedings/2015/nime2015_243.pdf},\n urlsuppl1 =\ - \ {http://www.nime.org/proceedings/2015/243/0243-file1.mp4},\n year = {2015}\n\ - }\n" + ID: Schlei2010 + abstract: 'Multi-point devices are rapidly becoming a practical interface choice + for electronic musicians. Interfaces that generate multiple simultaneous streams + of point data present a unique mapping challenge. This paper describes an analysis + system for point relationships that acts as a bridge between raw streams of multi-point + data and the instruments they control, using a multipoint trackpad to test various + configurations. The aim is to provide a practical approach for instrument programmers + working with multi-point tools, while highlighting the difference between mapping + systems based on point coordinate streams, grid evaluations, or object interaction + and mapping systems based on multi-point data relationships. ' + address: 'Sydney, Australia' + author: 'Schlei, Kevin' + bibtex: "@inproceedings{Schlei2010,\n abstract = {Multi-point devices are rapidly\ + \ becoming a practical interface choice for electronic musicians. Interfaces that\ + \ generate multiple simultaneous streams of point data present a unique mapping\ + \ challenge. This paper describes an analysis system for point relationships that\ + \ acts as a bridge between raw streams of multi-point data and the instruments\ + \ they control, using a multipoint trackpad to test various configurations. The\ + \ aim is to provide a practical approach for instrument programmers working with\ + \ multi-point tools, while highlighting the difference between mapping systems\ + \ based on point coordinate streams, grid evaluations, or object interaction and\ + \ mapping systems based on multi-point data relationships. },\n address = {Sydney,\ + \ Australia},\n author = {Schlei, Kevin},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177891},\n\ + \ issn = {2220-4806},\n keywords = {Multi-point, multi-touch interface, instrument\ + \ mapping, multi-point data analysis, trackpad instrument},\n pages = {136--139},\n\ + \ title = {Relationship-Based Instrument Mapping of Multi-Point Data Streams Using\ + \ a Trackpad Interface},\n url = {http://www.nime.org/proceedings/2010/nime2010_136.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1181418 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177891 issn: 2220-4806 - month: May + keywords: 'Multi-point, multi-touch interface, instrument mapping, multi-point data + analysis, trackpad instrument' pages: 136--139 - publisher: Louisiana State University - title: 'BRAAHMS: A Novel Adaptive Musical Interface Based on Users'' Cognitive State' - url: http://www.nime.org/proceedings/2015/nime2015_243.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/243/0243-file1.mp4 - year: 2015 + title: Relationship-Based Instrument Mapping of Multi-Point Data Streams Using a + Trackpad Interface + url: http://www.nime.org/proceedings/2010/nime2010_136.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: ahindle2015 - abstract: ' Cloud computing potentially ushers in a new era of computer music performance - with exceptionally large computer music instruments consisting of 10s to 100s - of virtual machines which we propose to call a `cloud-orchestra''. Cloud computing - allows for the rapid provisioning of resources, but to deploy such a complicated - and interconnected network of software synthesizers in the cloud requires a lot - of manual work, system administration knowledge, and developer/operator skills. - This is a barrier to computer musicians whose goal is to produce and perform music, - and not to administer 100s of computers. This work discusses the issues facing - cloud-orchestra deployment and offers an abstract solution and a concrete implementation. - The abstract solution is to generate cloud-orchestra deployment plans by allowing - computer musicians to model their network of synthesizers and to describe their - resources. A model optimizer will compute near-optimal deployment plans to synchronize, - deploy, and orchestrate the start-up of a complex network of synthesizers deployed - to many computers. This model driven development approach frees computer musicians - from much of the hassle of deployment and allocation. Computer musicians can focus - on the configuration of musical components and leave the resource allocation up - to the modelling software to optimize.' - address: 'Baton Rouge, Louisiana, USA' - author: Abram Hindle - bibtex: "@inproceedings{ahindle2015,\n abstract = { Cloud computing potentially\ - \ ushers in a new era of computer music performance with exceptionally large computer\ - \ music instruments consisting of 10s to 100s of virtual machines which we propose\ - \ to call a `cloud-orchestra'. Cloud computing allows for the rapid provisioning\ - \ of resources, but to deploy such a complicated and interconnected network of\ - \ software synthesizers in the cloud requires a lot of manual work, system administration\ - \ knowledge, and developer/operator skills. This is a barrier to computer musicians\ - \ whose goal is to produce and perform music, and not to administer 100s of computers.\ - \ This work discusses the issues facing cloud-orchestra deployment and offers\ - \ an abstract solution and a concrete implementation. The abstract solution is\ - \ to generate cloud-orchestra deployment plans by allowing computer musicians\ - \ to model their network of synthesizers and to describe their resources. A model\ - \ optimizer will compute near-optimal deployment plans to synchronize, deploy,\ - \ and orchestrate the start-up of a complex network of synthesizers deployed to\ - \ many computers. This model driven development approach frees computer musicians\ - \ from much of the hassle of deployment and allocation. Computer musicians can\ - \ focus on the configuration of musical components and leave the resource allocation\ - \ up to the modelling software to optimize.},\n address = {Baton Rouge, Louisiana,\ - \ USA},\n author = {Abram Hindle},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179090},\n\ - \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ - \ {May},\n pages = {121--125},\n publisher = {Louisiana State University},\n title\ - \ = {Orchestrating Your Cloud Orchestra},\n url = {http://www.nime.org/proceedings/2015/nime2015_244.pdf},\n\ - \ urlsuppl1 = {http://www.nime.org/proceedings/2015/244/0244-file1.mp4},\n year\ - \ = {2015}\n}\n" + ID: Wyse2010 + abstract: 'An important part of building interactive sound models is designing the + interface and control strategy. The multidimensional structure of the gestures + natural for a musical or physical interface may have little obvious relationship + to the parameters that a sound synthesis algorithm exposes for control. A common + situation arises when there is a nonlinear synthesis technique for which a traditional + instrumental interface with quasi-independent control of pitch and expression + is desired. This paper presents a semi-automatic meta-modeling tool called the + Instrumentalizer for embedding arbitrary synthesis algorithms in a control structure + that exposes traditional instrument controls for pitch and expression. ' + address: 'Sydney, Australia' + author: 'Wyse, Lonce and Duy, Nguyen D.' + bibtex: "@inproceedings{Wyse2010,\n abstract = {An important part of building interactive\ + \ sound models is designing the interface and control strategy. The multidimensional\ + \ structure of the gestures natural for a musical or physical interface may have\ + \ little obvious relationship to the parameters that a sound synthesis algorithm\ + \ exposes for control. A common situation arises when there is a nonlinear synthesis\ + \ technique for which a traditional instrumental interface with quasi-independent\ + \ control of pitch and expression is desired. This paper presents a semi-automatic\ + \ meta-modeling tool called the Instrumentalizer for embedding arbitrary synthesis\ + \ algorithms in a control structure that exposes traditional instrument controls\ + \ for pitch and expression. },\n address = {Sydney, Australia},\n author = {Wyse,\ + \ Lonce and Duy, Nguyen D.},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177925},\n\ + \ issn = {2220-4806},\n keywords = {Musical interface, parameter mapping, expressive\ + \ control.},\n pages = {140--143},\n title = {Instrumentalizing Synthesis Models},\n\ + \ url = {http://www.nime.org/proceedings/2010/nime2010_140.pdf},\n year = {2010}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179090 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177925 issn: 2220-4806 - month: May - pages: 121--125 - publisher: Louisiana State University - title: Orchestrating Your Cloud Orchestra - url: http://www.nime.org/proceedings/2015/nime2015_244.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/244/0244-file1.mp4 - year: 2015 + keywords: 'Musical interface, parameter mapping, expressive control.' + pages: 140--143 + title: Instrumentalizing Synthesis Models + url: http://www.nime.org/proceedings/2010/nime2010_140.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: apiepenbrink2015 - abstract: 'We present the Bistable Resonator Cymbal, a type of actuated acoustic - instrument which augments a conventional cymbal with feedback-induced resonance. - The system largely employs standard, commercially-available sound reinforcement - and signal processing hardware and software, and no permanent modifications to - the cymbal are needed. Several types of cymbals may be used, each capable of producing - a number of physical audio effects. Cymbal acoustics, implementation, stability - issues, interaction behavior, and sonic results are discussed.' - address: 'Baton Rouge, Louisiana, USA' - author: Andrew Piepenbrink and Matthew Wright - bibtex: "@inproceedings{apiepenbrink2015,\n abstract = {We present the Bistable\ - \ Resonator Cymbal, a type of actuated acoustic instrument which augments a conventional\ - \ cymbal with feedback-induced resonance. The system largely employs standard,\ - \ commercially-available sound reinforcement and signal processing hardware and\ - \ software, and no permanent modifications to the cymbal are needed. Several types\ - \ of cymbals may be used, each capable of producing a number of physical audio\ - \ effects. Cymbal acoustics, implementation, stability issues, interaction behavior,\ - \ and sonic results are discussed.},\n address = {Baton Rouge, Louisiana, USA},\n\ - \ author = {Andrew Piepenbrink and Matthew Wright},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1179154},\n editor = {Edgar Berdahl and Jesse Allison},\n\ - \ issn = {2220-4806},\n month = {May},\n pages = {227--230},\n publisher = {Louisiana\ - \ State University},\n title = {The Bistable Resonator Cymbal: An Actuated Acoustic\ - \ Instrument Displaying Physical Audio Effects},\n url = {http://www.nime.org/proceedings/2015/nime2015_245.pdf},\n\ - \ urlsuppl1 = {http://www.nime.org/proceedings/2015/245/0245-file1.mov},\n urlsuppl2\ - \ = {http://www.nime.org/proceedings/2015/245/0245-file2.zip},\n year = {2015}\n\ - }\n" + ID: Cassinelli2010 + abstract: 'scoreLight is a playful musical instrument capable of generating sound + from the lines of drawings as well as from theedges of three-dimensional objects + nearby (including everyday objects, sculptures and architectural details, but + alsothe performer''s hands or even the moving silhouettes ofdancers). There is + no camera nor projector: a laser spotexplores shapes as a pick-up head would search + for soundover the surface of a vinyl record --- with the significant difference + that the groove is generated by the contours of thedrawing itself.' + address: 'Sydney, Australia' + author: 'Cassinelli, Alavaro and Kuribara, Yusaku and Zerroug, Alexis and Ishikawa, + Masatoshi and Manabe, Daito' + bibtex: "@inproceedings{Cassinelli2010,\n abstract = {scoreLight is a playful musical\ + \ instrument capable of generating sound from the lines of drawings as well as\ + \ from theedges of three-dimensional objects nearby (including everyday objects,\ + \ sculptures and architectural details, but alsothe performer's hands or even\ + \ the moving silhouettes ofdancers). There is no camera nor projector: a laser\ + \ spotexplores shapes as a pick-up head would search for soundover the surface\ + \ of a vinyl record --- with the significant difference that the groove is generated\ + \ by the contours of thedrawing itself.},\n address = {Sydney, Australia},\n author\ + \ = {Cassinelli, Alavaro and Kuribara, Yusaku and Zerroug, Alexis and Ishikawa,\ + \ Masatoshi and Manabe, Daito},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177739},\n\ + \ issn = {2220-4806},\n keywords = {H5.2 [User Interfaces] interaction styles\ + \ / H.5.5 [Sound and Music Computing] Methodologies and techniques / J.5 [Arts\ + \ and Humanities] performing arts},\n pages = {144--149},\n title = {scoreLight\ + \ : Playing with a Human-Sized Laser Pick-Up},\n url = {http://www.nime.org/proceedings/2010/nime2010_144.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179154 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177739 issn: 2220-4806 - month: May - pages: 227--230 - publisher: Louisiana State University - title: 'The Bistable Resonator Cymbal: An Actuated Acoustic Instrument Displaying - Physical Audio Effects' - url: http://www.nime.org/proceedings/2015/nime2015_245.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/245/0245-file1.mov - urlsuppl2: http://www.nime.org/proceedings/2015/245/0245-file2.zip - year: 2015 + keywords: 'H5.2 [User Interfaces] interaction styles / H.5.5 [Sound and Music Computing] + Methodologies and techniques / J.5 [Arts and Humanities] performing arts' + pages: 144--149 + title: 'scoreLight : Playing with a Human-Sized Laser Pick-Up' + url: http://www.nime.org/proceedings/2010/nime2010_144.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: abergsland2015 - abstract: 'The authors have developed a new hardware/software device for persons - with disabilities (the MotionComposer), and in the process created a number of - interactive dance pieces for non-disabled professional dancers. The paper briefly - describes the hardware and motion tracking software of the device before going - into more detail concerning the mapping strategies and sound design applied to - three interactive dance pieces. The paper concludes by discussing a particular - philosophy championing transparency and intuitiveness (clear causality) in the - interactive relationship, which the authors apply to both the device and to the - pieces that came from it.' - address: 'Baton Rouge, Louisiana, USA' - author: Andreas Bergsland and Robert Wechsler - bibtex: "@inproceedings{abergsland2015,\n abstract = {The authors have developed\ - \ a new hardware/software device for persons with disabilities (the MotionComposer),\ - \ and in the process created a number of interactive dance pieces for non-disabled\ - \ professional dancers. The paper briefly describes the hardware and motion tracking\ - \ software of the device before going into more detail concerning the mapping\ - \ strategies and sound design applied to three interactive dance pieces. The paper\ - \ concludes by discussing a particular philosophy championing transparency and\ - \ intuitiveness (clear causality) in the interactive relationship, which the authors\ - \ apply to both the device and to the pieces that came from it.},\n address =\ - \ {Baton Rouge, Louisiana, USA},\n author = {Andreas Bergsland and Robert Wechsler},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1179024},\n editor = {Edgar Berdahl\ - \ and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {20--23},\n\ - \ publisher = {Louisiana State University},\n title = {Composing Interactive Dance\ - \ Pieces for the MotionComposer, a device for Persons with Disabilities},\n url\ - \ = {http://www.nime.org/proceedings/2015/nime2015_246.pdf},\n urlsuppl1 = {http://www.nime.org/proceedings/2015/246/0246-file2.mp4},\n\ - \ urlsuppl2 = {http://www.nime.org/proceedings/2015/246/La_Danse_II.mp4},\n urlsuppl3\ - \ = {http://www.nime.org/proceedings/2015/246/SongShanMountain-SD.mp4},\n year\ - \ = {2015}\n}\n" + ID: Yerkes2010 + abstract: Disky is a computer hard drive re-purposed into a do-it-yourself USB turntable + controller that offers high resolution and low latency for controlling parameters + of multimedia performance software. Disky is a response to the challenge “re-purpose + something that is often discarded and share it with the do-it-yourself community + to promote reuse!” + address: 'Sydney, Australia' + author: 'Yerkes, Karl and Shear, Greg and Wright, Matthew' + bibtex: "@inproceedings{Yerkes2010,\n abstract = {Disky is a computer hard drive\ + \ re-purposed into a do-it-yourself USB turntable controller that offers high\ + \ resolution and low latency for controlling parameters of multimedia performance\ + \ software. Disky is a response to the challenge “re-purpose something that is\ + \ often discarded and share it with the do-it-yourself community to promote reuse!”},\n\ + \ address = {Sydney, Australia},\n author = {Yerkes, Karl and Shear, Greg and\ + \ Wright, Matthew},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177929},\n\ + \ issn = {2220-4806},\n keywords = {turntable, dial, encoder, re-purposed, hard\ + \ drive, scratch-ing, inherent dynamics, DIY},\n pages = {150--155},\n title =\ + \ {Disky : a DIY Rotational Interface with Inherent Dynamics},\n url = {http://www.nime.org/proceedings/2010/nime2010_150.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179024 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177929 issn: 2220-4806 - month: May - pages: 20--23 - publisher: Louisiana State University - title: 'Composing Interactive Dance Pieces for the MotionComposer, a device for - Persons with Disabilities' - url: http://www.nime.org/proceedings/2015/nime2015_246.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/246/0246-file2.mp4 - urlsuppl2: http://www.nime.org/proceedings/2015/246/La_Danse_II.mp4 - urlsuppl3: http://www.nime.org/proceedings/2015/246/SongShanMountain-SD.mp4 - year: 2015 + keywords: 'turntable, dial, encoder, re-purposed, hard drive, scratch-ing, inherent + dynamics, DIY' + pages: 150--155 + title: 'Disky : a DIY Rotational Interface with Inherent Dynamics' + url: http://www.nime.org/proceedings/2010/nime2010_150.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: bmccloskey2015 - abstract: 'Inclusive music activities for people with physical disabilities commonly - emphasise facilitated processes, based both on constrained gestural capabilities, - and on the simplicity of the available interfaces. Inclusive music processes employ - consumer controllers, computer access tools and/or specialized digital musical - instruments (DMIs). The first category reveals a design ethos identified by the - authors as artefact multiplication -- many sliders, buttons, dials and menu layers; - the latter types offer ergonomic accessibility through artefact magnification. - We present a prototype DMI that eschews artefact multiplication in pursuit of - enhanced real time creative independence. We reconceptualise the universal click-drag - interaction model via a single sensor type, which affords both binary and continuous - performance control. Accessibility is optimized via a familiar interaction model - and through customized ergonomics, but it is the mapping strategy that emphasizes - transparency and sophistication in the hierarchical correspondences between the - available gesture dimensions and expressive musical cues. Through a participatory - and progressive methodology we identify an ostensibly simple targeting gesture - rich in dynamic and reliable features: (1) contact location; (2) contact duration; - (3) momentary force; (4) continuous force, and; (5) dyad orientation. These features - are mapped onto dynamic musical cues, most notably via new mappings for vibrato - and arpeggio execution. ' - address: 'Baton Rouge, Louisiana, USA' - author: Brendan McCloskey and Brian Bridges and Frank Lyons - bibtex: "@inproceedings{bmccloskey2015,\n abstract = {Inclusive music activities\ - \ for people with physical disabilities commonly emphasise facilitated processes,\ - \ based both on constrained gestural capabilities, and on the simplicity of the\ - \ available interfaces. Inclusive music processes employ consumer controllers,\ - \ computer access tools and/or specialized digital musical instruments (DMIs).\ - \ The first category reveals a design ethos identified by the authors as artefact\ - \ multiplication -- many sliders, buttons, dials and menu layers; the latter types\ - \ offer ergonomic accessibility through artefact magnification. We present a prototype\ - \ DMI that eschews artefact multiplication in pursuit of enhanced real time creative\ - \ independence. We reconceptualise the universal click-drag interaction model\ - \ via a single sensor type, which affords both binary and continuous performance\ - \ control. Accessibility is optimized via a familiar interaction model and through\ - \ customized ergonomics, but it is the mapping strategy that emphasizes transparency\ - \ and sophistication in the hierarchical correspondences between the available\ - \ gesture dimensions and expressive musical cues. Through a participatory and\ - \ progressive methodology we identify an ostensibly simple targeting gesture rich\ - \ in dynamic and reliable features: (1) contact location; (2) contact duration;\ - \ (3) momentary force; (4) continuous force, and; (5) dyad orientation. These\ - \ features are mapped onto dynamic musical cues, most notably via new mappings\ - \ for vibrato and arpeggio execution. },\n address = {Baton Rouge, Louisiana,\ - \ USA},\n author = {Brendan McCloskey and Brian Bridges and Frank Lyons},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1179132},\n editor = {Edgar Berdahl and\ - \ Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {24--27},\n\ - \ publisher = {Louisiana State University},\n title = {Accessibility and dimensionalty:\ - \ enhanced real-time creative independence for digital musicians with quadriplegic\ - \ cerebral palsy},\n url = {http://www.nime.org/proceedings/2015/nime2015_250.pdf},\n\ - \ urlsuppl1 = {http://www.nime.org/proceedings/2015/250/0250-file1.zip},\n year\ - \ = {2015}\n}\n" + ID: Solis2010 + abstract: 'Since 2007, our research is related to the development of an anthropomorphic + saxophonist robot, which it has been designed to imitate the saxophonist playing + by mechanically reproducing the organs involved for playing a saxophone. Our research + aims in understanding the motor control from an engineering point of view and + enabling the communication. In this paper, the Waseda Saxophone Robot No. 2 (WAS-2) + which is composed by 22-DOFs is detailed. The lip mechanism of WAS-2 has been + designed with 3-DOFs to control the motion of the lower, upper and sideway lips. + In addition, a human-like hand (16 DOF-s) has been designed to enable to play + all the keys of the instrument. Regarding the improvement of the control system, + a feed-forward control system with dead-time compensation has been implemented + to assure the accurate control of the air pressure. In addition, the implementation + of an auditory feedback control system has been proposed and implemented in order + to adjust the positioning of the physical parameters of the components of the + robot by providing a pitch feedback and defining a recovery position (off-line). + A set of experiments were carried out to verify the mechanical design improvements + and the dynamic response of the air pressure. As a result, the range of sound + pressure has been increased and the proposed control system improved the dynamic + response of the air pressure control. ' + address: 'Sydney, Australia' + author: 'Solis, Jorge and Petersen, Klaus and Yamamoto, Tetsuro and Takeuchi, Masaki + and Ishikawa, Shimpei and Takanishi, Atsuo and Hashimoto, Kunimatsu' + bibtex: "@inproceedings{Solis2010,\n abstract = {Since 2007, our research is related\ + \ to the development of an anthropomorphic saxophonist robot, which it has been\ + \ designed to imitate the saxophonist playing by mechanically reproducing the\ + \ organs involved for playing a saxophone. Our research aims in understanding\ + \ the motor control from an engineering point of view and enabling the communication.\ + \ In this paper, the Waseda Saxophone Robot No. 2 (WAS-2) which is composed by\ + \ 22-DOFs is detailed. The lip mechanism of WAS-2 has been designed with 3-DOFs\ + \ to control the motion of the lower, upper and sideway lips. In addition, a human-like\ + \ hand (16 DOF-s) has been designed to enable to play all the keys of the instrument.\ + \ Regarding the improvement of the control system, a feed-forward control system\ + \ with dead-time compensation has been implemented to assure the accurate control\ + \ of the air pressure. In addition, the implementation of an auditory feedback\ + \ control system has been proposed and implemented in order to adjust the positioning\ + \ of the physical parameters of the components of the robot by providing a pitch\ + \ feedback and defining a recovery position (off-line). A set of experiments were\ + \ carried out to verify the mechanical design improvements and the dynamic response\ + \ of the air pressure. As a result, the range of sound pressure has been increased\ + \ and the proposed control system improved the dynamic response of the air pressure\ + \ control. },\n address = {Sydney, Australia},\n author = {Solis, Jorge and Petersen,\ + \ Klaus and Yamamoto, Tetsuro and Takeuchi, Masaki and Ishikawa, Shimpei and Takanishi,\ + \ Atsuo and Hashimoto, Kunimatsu},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177897},\n\ + \ issn = {2220-4806},\n keywords = {Humanoid Robot, Auditory Feedback, Music,\ + \ Saxophone.},\n pages = {156--161},\n title = {Development of the Waseda Saxophonist\ + \ Robot and Implementation of an Auditory Feedback Control},\n url = {http://www.nime.org/proceedings/2010/nime2010_156.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179132 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177897 issn: 2220-4806 - month: May - pages: 24--27 - publisher: Louisiana State University - title: 'Accessibility and dimensionalty: enhanced real-time creative independence - for digital musicians with quadriplegic cerebral palsy' - url: http://www.nime.org/proceedings/2015/nime2015_250.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/250/0250-file1.zip - year: 2015 + keywords: 'Humanoid Robot, Auditory Feedback, Music, Saxophone.' + pages: 156--161 + title: Development of the Waseda Saxophonist Robot and Implementation of an Auditory + Feedback Control + url: http://www.nime.org/proceedings/2010/nime2010_156.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: anath2015 - abstract: 'In this paper the authors describe the VESBALL, which is a ball-shaped - musical interface designed for group music therapy. Therapy sessions take the - form of ``musical ensembles'''' comprised of individuals with Autism Spectrum - Disorder (ASD), typically led by one or more certified music therapists. VESBALL - had been developed in close consultation with therapists, clients, and other stakeholders, - and had undergone several phases of trials at a music therapy facility over a - period of 6 months. VESBALL has an advantage over other related work in terms - of its robustness, ease of operation and setup (for clients and therapists), sound - source integration, and low cost of production. The authors hope VESBALL would - positively impact the conditions of individuals with ASD, and pave way for new - research in custom-designed NIME for communities with specific therapeutic needs.' - address: 'Baton Rouge, Louisiana, USA' - author: Ajit Nath and Samson Young - bibtex: "@inproceedings{anath2015,\n abstract = {In this paper the authors describe\ - \ the VESBALL, which is a ball-shaped musical interface designed for group music\ - \ therapy. Therapy sessions take the form of ``musical ensembles'' comprised of\ - \ individuals with Autism Spectrum Disorder (ASD), typically led by one or more\ - \ certified music therapists. VESBALL had been developed in close consultation\ - \ with therapists, clients, and other stakeholders, and had undergone several\ - \ phases of trials at a music therapy facility over a period of 6 months. VESBALL\ - \ has an advantage over other related work in terms of its robustness, ease of\ - \ operation and setup (for clients and therapists), sound source integration,\ - \ and low cost of production. The authors hope VESBALL would positively impact\ - \ the conditions of individuals with ASD, and pave way for new research in custom-designed\ - \ NIME for communities with specific therapeutic needs.},\n address = {Baton Rouge,\ - \ Louisiana, USA},\n author = {Ajit Nath and Samson Young},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1179146},\n editor = {Edgar Berdahl and Jesse Allison},\n\ - \ issn = {2220-4806},\n month = {May},\n pages = {387--391},\n publisher = {Louisiana\ - \ State University},\n title = {VESBALL: A ball-shaped instrument for music therapy},\n\ - \ url = {http://www.nime.org/proceedings/2015/nime2015_252.pdf},\n year = {2015}\n\ - }\n" + ID: Kapur2010 + abstract: 'This paper describes the making of a class to teach the history and art + of musical robotics. The details of the curriculum are described as well as designs + for our custom schematics for robotic solenoid driven percussion. This paper also + introduces four new robotic instruments that were built during the term of this + course. This paper also introduces the Machine Orchestra, a laptop orchestra with + ten human performers and our five robotic instruments. ' + address: 'Sydney, Australia' + author: 'Kapur, Ajay and Darling, Michael' + bibtex: "@inproceedings{Kapur2010,\n abstract = {This paper describes the making\ + \ of a class to teach the history and art of musical robotics. The details of\ + \ the curriculum are described as well as designs for our custom schematics for\ + \ robotic solenoid driven percussion. This paper also introduces four new robotic\ + \ instruments that were built during the term of this course. This paper also\ + \ introduces the Machine Orchestra, a laptop orchestra with ten human performers\ + \ and our five robotic instruments. },\n address = {Sydney, Australia},\n author\ + \ = {Kapur, Ajay and Darling, Michael},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177821},\n\ + \ issn = {2220-4806},\n keywords = {dartron,digital classroom,laptop orchestra,machine\ + \ orchestra,musical robotics,nime pedagogy,nime10,solenoid},\n pages = {162--165},\n\ + \ title = {A Pedagogical Paradigm for Musical Robotics},\n url = {http://www.nime.org/proceedings/2010/nime2010_162.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179146 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177821 issn: 2220-4806 - month: May - pages: 387--391 - publisher: Louisiana State University - title: 'VESBALL: A ball-shaped instrument for music therapy' - url: http://www.nime.org/proceedings/2015/nime2015_252.pdf - year: 2015 + keywords: 'dartron,digital classroom,laptop orchestra,machine orchestra,musical + robotics,nime pedagogy,nime10,solenoid' + pages: 162--165 + title: A Pedagogical Paradigm for Musical Robotics + url: http://www.nime.org/proceedings/2010/nime2010_162.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: swaloschek2015 - abstract: 'With the rapid evolution of technology, sensor aided performances and - installations have gained popularity. We identified a number of important criteria - for stage usage and artistic experimentation. These are partially met by existing - approaches, oftentimes trading off programmability for ease of use. We propose - our new sensor interface SPINE-2 that presents a comprehensive solution to these - stage requirements without that trade-off.' - address: 'Baton Rouge, Louisiana, USA' - author: Simon Waloschek and Aristotelis Hadjakos - bibtex: "@inproceedings{swaloschek2015,\n abstract = {With the rapid evolution of\ - \ technology, sensor aided performances and installations have gained popularity.\ - \ We identified a number of important criteria for stage usage and artistic experimentation.\ - \ These are partially met by existing approaches, oftentimes trading off programmability\ - \ for ease of use. We propose our new sensor interface SPINE-2 that presents a\ - \ comprehensive solution to these stage requirements without that trade-off.},\n\ - \ address = {Baton Rouge, Louisiana, USA},\n author = {Simon Waloschek and Aristotelis\ - \ Hadjakos},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179194},\n editor\ - \ = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n\ - \ pages = {351--354},\n publisher = {Louisiana State University},\n title = {Sensors\ - \ on Stage: Conquering the Requirements of Artistic Experiments and Live Performances},\n\ - \ url = {http://www.nime.org/proceedings/2015/nime2015_254.pdf},\n year = {2015}\n\ - }\n" + ID: Pan2010 + abstract: 'This paper proposes a novel method to realize an initiativeexchange for + robot. A humanoid robot plays vibraphone exchanging initiative with a human performer + by perceivingmultimodal cues in real time. It understands the initiative exchange + cues through vision and audio information.In order to achieve the natural initiative + exchange betweena human and a robot in musical performance, we built thesystem + and the software architecture and carried out the experiments for fundamental + algorithms which are necessaryto the initiative exchange.' + address: 'Sydney, Australia' + author: 'Pan, Ye and Kim, Min-Gyu and Suzuki, Kenji' + bibtex: "@inproceedings{Pan2010,\n abstract = {This paper proposes a novel method\ + \ to realize an initiativeexchange for robot. A humanoid robot plays vibraphone\ + \ exchanging initiative with a human performer by perceivingmultimodal cues in\ + \ real time. It understands the initiative exchange cues through vision and audio\ + \ information.In order to achieve the natural initiative exchange betweena human\ + \ and a robot in musical performance, we built thesystem and the software architecture\ + \ and carried out the experiments for fundamental algorithms which are necessaryto\ + \ the initiative exchange.},\n address = {Sydney, Australia},\n author = {Pan,\ + \ Ye and Kim, Min-Gyu and Suzuki, Kenji},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177875},\n\ + \ issn = {2220-4806},\n keywords = {Human-robot interaction, initiative exchange,\ + \ prediction},\n pages = {166--169},\n title = {A Robot Musician Interacting with\ + \ a Human Partner through Initiative Exchange},\n url = {http://www.nime.org/proceedings/2010/nime2010_166.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179194 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177875 issn: 2220-4806 - month: May - pages: 351--354 - publisher: Louisiana State University - title: 'Sensors on Stage: Conquering the Requirements of Artistic Experiments and - Live Performances' - url: http://www.nime.org/proceedings/2015/nime2015_254.pdf - year: 2015 + keywords: 'Human-robot interaction, initiative exchange, prediction' + pages: 166--169 + title: A Robot Musician Interacting with a Human Partner through Initiative Exchange + url: http://www.nime.org/proceedings/2010/nime2010_166.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: amcpherson2015 - abstract: 'The implementation of digital musical instruments is often opaque to - the performer. Even when the relationship between action and sound is readily - understandable, the internal hardware or software operations that create that - relationship may be inaccessible to scrutiny or modification. This paper presents - a new approach to digital instrument design which lets the performer alter and - subvert the instrument''s internal operation through circuit-bending techniques. - The approach uses low-latency feedback loops between software and analog hardware - to expose the internal working of the instrument. Compared to the standard control - voltage approach used on analog synths, alterations to the feedback loops produce - distinctive and less predictable changes in behaviour with original artistic applications. - This paper discusses the technical foundations of the approach, its roots in hacking - and circuit bending, and case studies of its use in live performance with the - D-Box hackable instrument.' - address: 'Baton Rouge, Louisiana, USA' - author: Andrew McPherson and Victor Zappi - bibtex: "@inproceedings{amcpherson2015,\n abstract = {The implementation of digital\ - \ musical instruments is often opaque to the performer. Even when the relationship\ - \ between action and sound is readily understandable, the internal hardware or\ - \ software operations that create that relationship may be inaccessible to scrutiny\ - \ or modification. This paper presents a new approach to digital instrument design\ - \ which lets the performer alter and subvert the instrument's internal operation\ - \ through circuit-bending techniques. The approach uses low-latency feedback loops\ - \ between software and analog hardware to expose the internal working of the instrument.\ - \ Compared to the standard control voltage approach used on analog synths, alterations\ - \ to the feedback loops produce distinctive and less predictable changes in behaviour\ - \ with original artistic applications. This paper discusses the technical foundations\ - \ of the approach, its roots in hacking and circuit bending, and case studies\ - \ of its use in live performance with the D-Box hackable instrument.},\n address\ - \ = {Baton Rouge, Louisiana, USA},\n author = {Andrew McPherson and Victor Zappi},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1179134},\n editor = {Edgar Berdahl\ - \ and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {162--167},\n\ - \ publisher = {Louisiana State University},\n title = {Exposing the Scaffolding\ - \ of Digital Instruments with Hardware-Software Feedback Loops},\n url = {http://www.nime.org/proceedings/2015/nime2015_258.pdf},\n\ - \ year = {2015}\n}\n" + ID: Bukvic2010 + abstract: 'Virginia Tech Department of Music’s Digital Interactive Sound & Intermedia + Studio in collaboration with the College of Engineering and School of Visual Arts + presents the latest addition to the *Ork family, the Linux Laptop Orchestra. Apart + from maintaining compatibility with its precursors and sources of inspiration, + Princeton’s PLOrk, and Stanford’s SLOrk, L2Ork’s particular focus is on delivering + unprecedented affordability without sacrificing quality, as well as flexibility + necessary to encourage a more widespread adoption and standardization of the laptop + orchestra ensemble. The newfound strengths of L2Ork’s design have resulted in + opportunities in K-12 education with a particular focus on cross-pollinating STEM + and Arts, as well as research of an innovative content delivery system that can + seamlessly engage students regardless of their educational background. In this + document we discuss key components of the L2Ork initiative, their benefits, and + offer resources necessary for the creation of other Linux-based *Orks' + address: 'Sydney, Australia' + author: 'Bukvic, Ivika and Martin, Thomas and Standley, Eric and Matthews, Michael' + bibtex: "@inproceedings{Bukvic2010,\n abstract = {Virginia Tech Department of Music’s\ + \ Digital Interactive Sound & Intermedia Studio in collaboration with the College\ + \ of Engineering and School of Visual Arts presents the latest addition to the\ + \ *Ork family, the Linux Laptop Orchestra. Apart from maintaining compatibility\ + \ with its precursors and sources of inspiration, Princeton’s PLOrk, and Stanford’s\ + \ SLOrk, L2Ork’s particular focus is on delivering unprecedented affordability\ + \ without sacrificing quality, as well as flexibility necessary to encourage a\ + \ more widespread adoption and standardization of the laptop orchestra ensemble.\ + \ The newfound strengths of L2Ork’s design have resulted in opportunities in K-12\ + \ education with a particular focus on cross-pollinating STEM and Arts, as well\ + \ as research of an innovative content delivery system that can seamlessly engage\ + \ students regardless of their educational background. In this document we discuss\ + \ key components of the L2Ork initiative, their benefits, and offer resources\ + \ necessary for the creation of other Linux-based *Orks},\n address = {Sydney,\ + \ Australia},\n author = {Bukvic, Ivika and Martin, Thomas and Standley, Eric\ + \ and Matthews, Michael},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177731},\n\ + \ issn = {2220-4806},\n keywords = {l2ork,laptop orchestra,linux,nime10},\n pages\ + \ = {170--173},\n title = {Introducing L2Ork : Linux Laptop Orchestra},\n url\ + \ = {http://www.nime.org/proceedings/2010/nime2010_170.pdf},\n year = {2010}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179134 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177731 issn: 2220-4806 - month: May - pages: 162--167 - publisher: Louisiana State University - title: Exposing the Scaffolding of Digital Instruments with Hardware-Software Feedback - Loops - url: http://www.nime.org/proceedings/2015/nime2015_258.pdf - year: 2015 + keywords: 'l2ork,laptop orchestra,linux,nime10' + pages: 170--173 + title: 'Introducing L2Ork : Linux Laptop Orchestra' + url: http://www.nime.org/proceedings/2010/nime2010_170.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: arau2015 - abstract: 'We introduce the Peripipe, a tangible remote control for a music player - that comes in the shape of a wooden tobacco pipe. The design is based on breath - control, using sips and puffs as control commands. An atmospheric pressure sensor - in the Peripipe senses changes in the air pressure. Based on these changes, the - pipe determines when the user performs a puff, double-puff, sip, double-sip or - a long puff or long sip action, and wirelessly sends commands to a smartphone - running the music player. Additionally, the Peripipe provides fumeovisual feedback, - using color-illuminated smoke to display the system status. With the form factor, - the materials used, the interaction through breath, and the ephemeral feedback - we aim to emphasize the emotional component of listening to music that, in our - eyes, is not very well reflected in traditional remote controls.' - address: 'Baton Rouge, Louisiana, USA' - author: Tommy Feldt and Sarah Freilich and Shaun Mendonsa and Daniel Molin and Andreas - Rau - bibtex: "@inproceedings{arau2015,\n abstract = {We introduce the Peripipe, a tangible\ - \ remote control for a music player that comes in the shape of a wooden tobacco\ - \ pipe. The design is based on breath control, using sips and puffs as control\ - \ commands. An atmospheric pressure sensor in the Peripipe senses changes in the\ - \ air pressure. Based on these changes, the pipe determines when the user performs\ - \ a puff, double-puff, sip, double-sip or a long puff or long sip action, and\ - \ wirelessly sends commands to a smartphone running the music player. Additionally,\ - \ the Peripipe provides fumeovisual feedback, using color-illuminated smoke to\ - \ display the system status. With the form factor, the materials used, the interaction\ - \ through breath, and the ephemeral feedback we aim to emphasize the emotional\ - \ component of listening to music that, in our eyes, is not very well reflected\ - \ in traditional remote controls.},\n address = {Baton Rouge, Louisiana, USA},\n\ - \ author = {Tommy Feldt and Sarah Freilich and Shaun Mendonsa and Daniel Molin\ - \ and Andreas Rau},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179058},\n\ - \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ - \ {May},\n pages = {34--35},\n publisher = {Louisiana State University},\n title\ - \ = {Puff, Puff, Play: A Sip-And-Puff Remote Control for Music Playback},\n url\ - \ = {http://www.nime.org/proceedings/2015/nime2015_260.pdf},\n urlsuppl1 = {http://www.nime.org/proceedings/2015/260/0260-file1.mp4},\n\ - \ year = {2015}\n}\n" + ID: Bryan2010 + abstract: 'The Mobile Music (MoMu) toolkit is a new open-sourcesoftware development + toolkit focusing on musical interaction design for mobile phones. The toolkit, + currently implemented for iPhone OS, emphasizes usability and rapidprototyping + with the end goal of aiding developers in creating real-time interactive audio + applications. Simple andunified access to onboard sensors along with utilities + forcommon tasks found in mobile music development are provided. The toolkit has + been deployed and evaluated in theStanford Mobile Phone Orchestra (MoPhO) and + serves asthe primary software platform in a new course exploringmobile music.' + address: 'Sydney, Australia' + author: 'Bryan, Nicholas J. and Herrera, Jorge and Oh, Jieun and Wang, Ge' + bibtex: "@inproceedings{Bryan2010,\n abstract = {The Mobile Music (MoMu) toolkit\ + \ is a new open-sourcesoftware development toolkit focusing on musical interaction\ + \ design for mobile phones. The toolkit, currently implemented for iPhone OS,\ + \ emphasizes usability and rapidprototyping with the end goal of aiding developers\ + \ in creating real-time interactive audio applications. Simple andunified access\ + \ to onboard sensors along with utilities forcommon tasks found in mobile music\ + \ development are provided. The toolkit has been deployed and evaluated in theStanford\ + \ Mobile Phone Orchestra (MoPhO) and serves asthe primary software platform in\ + \ a new course exploringmobile music.},\n address = {Sydney, Australia},\n author\ + \ = {Bryan, Nicholas J. and Herrera, Jorge and Oh, Jieun and Wang, Ge},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1177725},\n issn = {2220-4806},\n keywords\ + \ = {instrument design, iPhone, mobile music, software develop- ment, toolkit},\n\ + \ pages = {174--177},\n title = {MoMu : A Mobile Music Toolkit},\n url = {http://www.nime.org/proceedings/2010/nime2010_174.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179058 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177725 issn: 2220-4806 - month: May - pages: 34--35 - publisher: Louisiana State University - title: 'Puff, Puff, Play: A Sip-And-Puff Remote Control for Music Playback' - url: http://www.nime.org/proceedings/2015/nime2015_260.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/260/0260-file1.mp4 - year: 2015 + keywords: 'instrument design, iPhone, mobile music, software develop- ment, toolkit' + pages: 174--177 + title: 'MoMu : A Mobile Music Toolkit' + url: http://www.nime.org/proceedings/2010/nime2010_174.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: ndalessandro2015 - abstract: 'In this paper, we describe the prototyping of two musical interfaces - that use the LeapMotion camera in conjunction with two different touch surfaces: - a Wacom tablet and a transparent PVC sheet. In the Wacom use case, the camera - is between the hand and the surface. In the PVC use case, the camera is under - the transparent sheet and tracks the hand through it. The aim of this research - is to explore hovering motion surrounding the touch interaction on the surface - and include properties of such motion in the musical interaction. We present our - unifying software, called AirPiano, that discretises the 3D space into ''keys'' - and proposes several mapping strategies with the available dimensions. These control - dimensions are mapped onto a modified HandSketch sound engine that achieves multitimbral - pitch-synchronous point cloud granulation.' - address: 'Baton Rouge, Louisiana, USA' - author: Nicolas d'Alessandro and Joëlle Tilmanne and Ambroise Moreau and Antonin - Puleo - bibtex: "@inproceedings{ndalessandro2015,\n abstract = {In this paper, we describe\ - \ the prototyping of two musical interfaces that use the LeapMotion camera in\ - \ conjunction with two different touch surfaces: a Wacom tablet and a transparent\ - \ PVC sheet. In the Wacom use case, the camera is between the hand and the surface.\ - \ In the PVC use case, the camera is under the transparent sheet and tracks the\ - \ hand through it. The aim of this research is to explore hovering motion surrounding\ - \ the touch interaction on the surface and include properties of such motion in\ - \ the musical interaction. We present our unifying software, called AirPiano,\ - \ that discretises the 3D space into 'keys' and proposes several mapping strategies\ - \ with the available dimensions. These control dimensions are mapped onto a modified\ - \ HandSketch sound engine that achieves multitimbral pitch-synchronous point cloud\ - \ granulation.},\n address = {Baton Rouge, Louisiana, USA},\n author = {Nicolas\ - \ d'Alessandro and Jo\\''elle Tilmanne and Ambroise Moreau and Antonin Puleo},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1181434},\n editor = {Edgar Berdahl\ - \ and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {255--258},\n\ - \ publisher = {Louisiana State University},\n title = {AirPiano: A Multi-Touch\ - \ Keyboard with Hovering Control},\n url = {http://www.nime.org/proceedings/2015/nime2015_261.pdf},\n\ - \ year = {2015}\n}\n" + ID: Dahl2010 + abstract: 'The use of metaphor has a prominent role in HCI, both as a device to + help users understand unfamiliar technologies, and as a tool to guide the design + process. Creators of new computerbased instruments face similar design challenges + as those in HCI. In the course of creating a new piece for Mobile Phone Orchestra + we propose the metaphor of a sound as a ball and explore the interactions and + sound mappings it suggests. These lead to the design of a gesture-controlled instrument + that allows players to "bounce" sounds, "throw" them to other players, and compete + in a game to "knock out" others'' sounds. We composed the piece SoundBounce based + on these interactions, and note that audiences seem to find performances of the + piece accessible and engaging, perhaps due to the visibility of the metaphor. ' + address: 'Sydney, Australia' + author: 'Dahl, Luke and Wang, Ge' + bibtex: "@inproceedings{Dahl2010,\n abstract = {The use of metaphor has a prominent\ + \ role in HCI, both as a device to help users understand unfamiliar technologies,\ + \ and as a tool to guide the design process. Creators of new computerbased instruments\ + \ face similar design challenges as those in HCI. In the course of creating a\ + \ new piece for Mobile Phone Orchestra we propose the metaphor of a sound as a\ + \ ball and explore the interactions and sound mappings it suggests. These lead\ + \ to the design of a gesture-controlled instrument that allows players to \"bounce\"\ + \ sounds, \"throw\" them to other players, and compete in a game to \"knock out\"\ + \ others' sounds. We composed the piece SoundBounce based on these interactions,\ + \ and note that audiences seem to find performances of the piece accessible and\ + \ engaging, perhaps due to the visibility of the metaphor. },\n address = {Sydney,\ + \ Australia},\n author = {Dahl, Luke and Wang, Ge},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177751},\n issn = {2220-4806},\n keywords = {Mobile music,\ + \ design, metaphor, performance, gameplay.},\n pages = {178--181},\n title = {Sound\ + \ Bounce : Physical Metaphors in Designing Mobile Music Performance},\n url =\ + \ {http://www.nime.org/proceedings/2010/nime2010_178.pdf},\n year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1181434 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177751 issn: 2220-4806 - month: May - pages: 255--258 - publisher: Louisiana State University - title: 'AirPiano: A Multi-Touch Keyboard with Hovering Control' - url: http://www.nime.org/proceedings/2015/nime2015_261.pdf - year: 2015 + keywords: 'Mobile music, design, metaphor, performance, gameplay.' + pages: 178--181 + title: 'Sound Bounce : Physical Metaphors in Designing Mobile Music Performance' + url: http://www.nime.org/proceedings/2010/nime2010_178.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: ahazzardb2015 - abstract: 'We explore how to digitally augment musical instruments by connecting - them to their social histories. We describe the use of Internet of Things technologies - to connect an acoustic guitar to its digital footprint -- a record of how it was - designed, built and played. We introduce the approach of crafting interactive - decorative inlay into the body of an instrument that can then be scanned using - mobile devices to reveal its digital footprint. We describe the design and construction - of an augmented acoustic guitar called Carolan alongside activities to build its - digital footprint through documented encounters with twenty-seven players in a - variety of settings. We reveal the design challenge of mapping the different surfaces - of the instrument to various facets of its footprint so as to afford appropriate - experiences to players, audiences and technicians. We articulate an agenda for - further research on the topic of connecting instruments to their social histories, - including capturing and performing digital footprints and creating personalized - and legacy experiences.' - address: 'Baton Rouge, Louisiana, USA' - author: Steve Benford and Adrian Hazzard and Alan Chamberlain and Liming Xu - bibtex: "@inproceedings{ahazzardb2015,\n abstract = {We explore how to digitally\ - \ augment musical instruments by connecting them to their social histories. We\ - \ describe the use of Internet of Things technologies to connect an acoustic guitar\ - \ to its digital footprint -- a record of how it was designed, built and played.\ - \ We introduce the approach of crafting interactive decorative inlay into the\ - \ body of an instrument that can then be scanned using mobile devices to reveal\ - \ its digital footprint. We describe the design and construction of an augmented\ - \ acoustic guitar called Carolan alongside activities to build its digital footprint\ - \ through documented encounters with twenty-seven players in a variety of settings.\ - \ We reveal the design challenge of mapping the different surfaces of the instrument\ - \ to various facets of its footprint so as to afford appropriate experiences to\ - \ players, audiences and technicians. We articulate an agenda for further research\ - \ on the topic of connecting instruments to their social histories, including\ - \ capturing and performing digital footprints and creating personalized and legacy\ - \ experiences.},\n address = {Baton Rouge, Louisiana, USA},\n author = {Steve\ - \ Benford and Adrian Hazzard and Alan Chamberlain and Liming Xu},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1179016},\n editor = {Edgar Berdahl and\ - \ Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {303--306},\n\ - \ publisher = {Louisiana State University},\n title = {Augmenting a Guitar with\ - \ its Digital Footprint},\n url = {http://www.nime.org/proceedings/2015/nime2015_264.pdf},\n\ - \ year = {2015}\n}\n" + ID: Essl2010a + abstract: 'Impact force is an important dimension for percussive musical instruments + such as the piano. We explore three possible mechanisms how to get impact forces + on mobile multi-touch devices: using built-in accelerometers, the pressure sensing + capability of Android phones, and external force sensing resistors. We find that + accelerometers are difficult to control for this purpose. Android''s pressure + sensing shows some promise, especially when combined with augmented playing technique. + Force sensing resistors can offer good dynamic resolution but this technology + is not currently offered in commodity devices and proper coupling of the sensor + with the applied impact is difficult. ' + address: 'Sydney, Australia' + author: 'Essl, Georg and Rohs, Michael and Kratz, Sven' + bibtex: "@inproceedings{Essl2010a,\n abstract = {Impact force is an important dimension\ + \ for percussive musical instruments such as the piano. We explore three possible\ + \ mechanisms how to get impact forces on mobile multi-touch devices: using built-in\ + \ accelerometers, the pressure sensing capability of Android phones, and external\ + \ force sensing resistors. We find that accelerometers are difficult to control\ + \ for this purpose. Android's pressure sensing shows some promise, especially\ + \ when combined with augmented playing technique. Force sensing resistors can\ + \ offer good dynamic resolution but this technology is not currently offered in\ + \ commodity devices and proper coupling of the sensor with the applied impact\ + \ is difficult. },\n address = {Sydney, Australia},\n author = {Essl, Georg and\ + \ Rohs, Michael and Kratz, Sven},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177761},\n\ + \ issn = {2220-4806},\n keywords = {Force, impact, pressure, multi-touch, mobile\ + \ phone, mobile music making.},\n pages = {182--185},\n title = {Use the Force\ + \ (or something) --- Pressure and Pressure --- Like Input for Mobile Music Performance},\n\ + \ url = {http://www.nime.org/proceedings/2010/nime2010_182.pdf},\n year = {2010}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179016 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177761 issn: 2220-4806 - month: May - pages: 303--306 - publisher: Louisiana State University - title: Augmenting a Guitar with its Digital Footprint - url: http://www.nime.org/proceedings/2015/nime2015_264.pdf - year: 2015 + keywords: 'Force, impact, pressure, multi-touch, mobile phone, mobile music making.' + pages: 182--185 + title: Use the Force (or something) --- Pressure and Pressure --- Like Input for + Mobile Music Performance + url: http://www.nime.org/proceedings/2010/nime2010_182.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: skestelli2015 - abstract: 'There have been more interest and research towards multisensory aspects - of sound as well as vision and movement, especially in the last two decades. An - emerging research field related with multi-sensory research is ''motor imagery'', - which could be defined as the mental representation of a movement without actual - production of muscle activity necessary for its execution. Emphasizing its close - relationship and potential future use in new digital musical instruments (DMI) - practice and reviewing literature, this paper will introduce fundamental concepts - about motor imagery (MI), various methods of measuring MI in different configurations - and summarize some important findings about MI in various studies. Following, - it will discuss how this research area is related to DMI practice and propose - potential uses of MI in this field. ' - address: 'Baton Rouge, Louisiana, USA' - author: Sair Sinan Kestelli - bibtex: "@inproceedings{skestelli2015,\n abstract = {There have been more interest\ - \ and research towards multisensory aspects of sound as well as vision and movement,\ - \ especially in the last two decades. An emerging research field related with\ - \ multi-sensory research is 'motor imagery', which could be defined as the mental\ - \ representation of a movement without actual production of muscle activity necessary\ - \ for its execution. Emphasizing its close relationship and potential future use\ - \ in new digital musical instruments (DMI) practice and reviewing literature,\ - \ this paper will introduce fundamental concepts about motor imagery (MI), various\ - \ methods of measuring MI in different configurations and summarize some important\ - \ findings about MI in various studies. Following, it will discuss how this research\ - \ area is related to DMI practice and propose potential uses of MI in this field.\ - \ },\n address = {Baton Rouge, Louisiana, USA},\n author = {{Sair Sinan} Kestelli},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1179104},\n editor = {Edgar Berdahl\ - \ and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {107--110},\n\ - \ publisher = {Louisiana State University},\n title = {Motor Imagery: What Does\ - \ It Offer for New Digital Musical Instruments?},\n url = {http://www.nime.org/proceedings/2015/nime2015_265.pdf},\n\ - \ year = {2015}\n}\n" + ID: Mills2010a + abstract: 'The evolution of networked audio technologies has created unprecedented + opportunities for musicians to improvise with instrumentalists from a diverse + range of cultures and disciplines. As network speeds increase and latency is consigned + to history, tele-musical collaboration, and in particular improvisation will be + shaped by new methodologies that respond to this potential. While networked technologies + eliminate distance in physical space, for the remote improviser, this creates + a liminality of experience through which their performance is mediated. As a first + step in understanding the conditions arising from collaboration in networked audio + platforms, this paper will examine selected case studies of improvisation in a + variety of networked interfaces. The author will examine how platform characteristics + and network conditions influence the process of collective improvisation and the + methodologies musicians are employing to negotiate their networked experiences.' + address: 'Sydney, Australia' + author: 'Mills, Roger' + bibtex: "@inproceedings{Mills2010a,\n abstract = {The evolution of networked audio\ + \ technologies has created unprecedented opportunities for musicians to improvise\ + \ with instrumentalists from a diverse range of cultures and disciplines. As network\ + \ speeds increase and latency is consigned to history, tele-musical collaboration,\ + \ and in particular improvisation will be shaped by new methodologies that respond\ + \ to this potential. While networked technologies eliminate distance in physical\ + \ space, for the remote improviser, this creates a liminality of experience through\ + \ which their performance is mediated. As a first step in understanding the conditions\ + \ arising from collaboration in networked audio platforms, this paper will examine\ + \ selected case studies of improvisation in a variety of networked interfaces.\ + \ The author will examine how platform characteristics and network conditions\ + \ influence the process of collective improvisation and the methodologies musicians\ + \ are employing to negotiate their networked experiences.},\n address = {Sydney,\ + \ Australia},\n author = {Mills, Roger},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177857},\n\ + \ issn = {2220-4806},\n keywords = {improvisation, internet audio, networked collaboration,\ + \ sound art},\n pages = {186--191},\n title = {Dislocated Sound : A Survey of\ + \ Improvisation in Networked Audio Platforms},\n url = {http://www.nime.org/proceedings/2010/nime2010_186.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179104 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177857 issn: 2220-4806 - month: May - pages: 107--110 - publisher: Louisiana State University - title: 'Motor Imagery: What Does It Offer for New Digital Musical Instruments?' - url: http://www.nime.org/proceedings/2015/nime2015_265.pdf - year: 2015 + keywords: 'improvisation, internet audio, networked collaboration, sound art' + pages: 186--191 + title: 'Dislocated Sound : A Survey of Improvisation in Networked Audio Platforms' + url: http://www.nime.org/proceedings/2010/nime2010_186.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: hpurwins2015 - abstract: 'An interactive music instrument museum experience for children of 10-12 - years is presented. Equipped with tablet devices, the children are sent on a treasure - hunt. Based on given sound samples, the participants have to identify the right - musical instrument (harpsichord, double bass, viola) out of an instrument collection. - As the right instrument is located, a challenge of playing an application on the - tablet is initiated. This application is an interactive digital representation - of the found instrument, mimicking some of its key playing techniques, using a - simplified scrolling on screen musical notation. The musical performance of the - participant is graded on a point scale. After completion of the challenge, the - participants'' performances of the three instruments are played back simultaneously, - constituting a composition. A qualitative evaluation of the application in a focus - group interview with school children revealed that the children were more engaged - when playing with the interactive application than when only watching a music - video.' - address: 'Baton Rouge, Louisiana, USA' - author: Mikkel Jörgensen and Aske Knudsen and Thomas Wilmot and Kasper Lund and - Stefania Serafin and Hendrik Purwins - bibtex: "@inproceedings{hpurwins2015,\n abstract = {An interactive music instrument\ - \ museum experience for children of 10-12 years is presented. Equipped with tablet\ - \ devices, the children are sent on a treasure hunt. Based on given sound samples,\ - \ the participants have to identify the right musical instrument (harpsichord,\ - \ double bass, viola) out of an instrument collection. As the right instrument\ - \ is located, a challenge of playing an application on the tablet is initiated.\ - \ This application is an interactive digital representation of the found instrument,\ - \ mimicking some of its key playing techniques, using a simplified scrolling on\ - \ screen musical notation. The musical performance of the participant is graded\ - \ on a point scale. After completion of the challenge, the participants' performances\ - \ of the three instruments are played back simultaneously, constituting a composition.\ - \ A qualitative evaluation of the application in a focus group interview with\ - \ school children revealed that the children were more engaged when playing with\ - \ the interactive application than when only watching a music video.},\n address\ - \ = {Baton Rouge, Louisiana, USA},\n author = {Mikkel J\\''{o}rgensen and Aske\ - \ Knudsen and Thomas Wilmot and Kasper Lund and Stefania Serafin and Hendrik Purwins},\n\ + ID: Berthaut2010 + abstract: 'We present Drile, a multiprocess immersive instrument built uponthe hierarchical + live-looping technique and aimed at musical performance. This technique consists + in creating musical trees whosenodes are composed of sound effects applied to + a musical content.In the leaves, this content is a one-shot sound, whereas in + higherlevel nodes this content is composed of live-recorded sequencesof parameters + of the children nodes. Drile allows musicians tointeract efficiently with these + trees in an immersive environment.Nodes are represented as worms, which are 3D + audiovisual objects. Worms can be manipulated using 3D interaction techniques,and + several operations can be applied to the live-looping trees. Theenvironment is + composed of several virtual rooms, i.e. group oftrees, corresponding to specific + sounds and effects. Learning Drileis progressive since the musical control complexity + varies according to the levels in live-looping trees. Thus beginners may havelimited + control over only root worms while still obtaining musically interesting results. + Advanced users may modify the trees andmanipulate each of the worms.' + address: 'Sydney, Australia' + author: 'Berthaut, Florent and Desainte-Catherine, Myriam and Hachet, Martin' + bibtex: "@inproceedings{Berthaut2010,\n abstract = {We present Drile, a multiprocess\ + \ immersive instrument built uponthe hierarchical live-looping technique and aimed\ + \ at musical performance. This technique consists in creating musical trees whosenodes\ + \ are composed of sound effects applied to a musical content.In the leaves, this\ + \ content is a one-shot sound, whereas in higherlevel nodes this content is composed\ + \ of live-recorded sequencesof parameters of the children nodes. Drile allows\ + \ musicians tointeract efficiently with these trees in an immersive environment.Nodes\ + \ are represented as worms, which are 3D audiovisual objects. Worms can be manipulated\ + \ using 3D interaction techniques,and several operations can be applied to the\ + \ live-looping trees. Theenvironment is composed of several virtual rooms, i.e.\ + \ group oftrees, corresponding to specific sounds and effects. Learning Drileis\ + \ progressive since the musical control complexity varies according to the levels\ + \ in live-looping trees. Thus beginners may havelimited control over only root\ + \ worms while still obtaining musically interesting results. Advanced users may\ + \ modify the trees andmanipulate each of the worms.},\n address = {Sydney, Australia},\n\ + \ author = {Berthaut, Florent and Desainte-Catherine, Myriam and Hachet, Martin},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178997},\n editor = {Edgar Berdahl\ - \ and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {36--37},\n\ - \ publisher = {Louisiana State University},\n title = {A Mobile Music Museum Experience\ - \ for Children},\n url = {http://www.nime.org/proceedings/2015/nime2015_267.pdf},\n\ - \ urlsuppl1 = {http://www.nime.org/proceedings/2015/267/0267-file1.mov},\n year\ - \ = {2015}\n}\n" + \ Musical Expression},\n doi = {10.5281/zenodo.1177721},\n issn = {2220-4806},\n\ + \ keywords = {Drile, immersive instrument, hierarchical live-looping, 3D interac-\ + \ tion},\n pages = {192--197},\n title = {DRILE : An Immersive Environment for\ + \ Hierarchical Live-Looping},\n url = {http://www.nime.org/proceedings/2010/nime2010_192.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178997 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177721 issn: 2220-4806 - month: May - pages: 36--37 - publisher: Louisiana State University - title: A Mobile Music Museum Experience for Children - url: http://www.nime.org/proceedings/2015/nime2015_267.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/267/0267-file1.mov - year: 2015 + keywords: 'Drile, immersive instrument, hierarchical live-looping, 3D interac- tion' + pages: 192--197 + title: 'DRILE : An Immersive Environment for Hierarchical Live-Looping' + url: http://www.nime.org/proceedings/2010/nime2010_192.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: tresch2015 - abstract: 'Audio guides and (interactive) sound walks have existed for decades. - Even smartphone games taking place in the real world are no longer a novelty. - But due to the lack of a sufficient middleware which fulfills the requirements - for creating this software genre, artists, game developers and institutions such - as museums are forced to implement rather similar functionality over and over - again. This paper describes the basic principles of Real World Audio (RWA), an - extendable audio game engine for targeting smartphone operating systems, which - rolls out all functionality for the generation of the above-mentioned software - genres. It combines the ability for building location-based audio walks and -guides - with the components necessary for game development. Using either the smartphone - sensors or an external sensor board for head tracking and gesture recognition, - RWA allows developers to create audio walks, audio adventures and audio role playing - games (RPG) outside in the real world.' - address: 'Baton Rouge, Louisiana, USA' - author: Thomas Resch - bibtex: "@inproceedings{tresch2015,\n abstract = {Audio guides and (interactive)\ - \ sound walks have existed for decades. Even smartphone games taking place in\ - \ the real world are no longer a novelty. But due to the lack of a sufficient\ - \ middleware which fulfills the requirements for creating this software genre,\ - \ artists, game developers and institutions such as museums are forced to implement\ - \ rather similar functionality over and over again. This paper describes the basic\ - \ principles of Real World Audio (RWA), an extendable audio game engine for targeting\ - \ smartphone operating systems, which rolls out all functionality for the generation\ - \ of the above-mentioned software genres. It combines the ability for building\ - \ location-based audio walks and -guides with the components necessary for game\ - \ development. Using either the smartphone sensors or an external sensor board\ - \ for head tracking and gesture recognition, RWA allows developers to create audio\ - \ walks, audio adventures and audio role playing games (RPG) outside in the real\ - \ world.},\n address = {Baton Rouge, Louisiana, USA},\n author = {Thomas Resch},\n\ + ID: Fencott2010 + abstract: 'This research is concerned with issues of privacy, awareness and the + emergence of roles in the process of digitallymediated collaborative music making. + Specifically we areinterested in how providing collaborators with varying degrees + of privacy and awareness of one another influencesthe group interaction. A study + is presented whereby ninegroups of co-located musicians compose music together + using three different interface designs. We use qualitative andquantitative data + to study and characterise the musician''sinteraction with each other and the software. + We show thatwhen made available to them, participants make extensiveuse of a private + working area to develop musical contributions before they are introduced to the + group. We also arguethat our awareness mechanisms change the perceived quality + of the musical interaction, but have no impact on theway musicians interact with + the software. We then reflecton implications for the design of new collaborative + musicmaking tools which exploit the potential of digital technologies, while at + the same time support creative musicalinteraction.' + address: 'Sydney, Australia' + author: 'Fencott, Robin and Bryan-Kinns, Nick' + bibtex: "@inproceedings{Fencott2010,\n abstract = {This research is concerned with\ + \ issues of privacy, awareness and the emergence of roles in the process of digitallymediated\ + \ collaborative music making. Specifically we areinterested in how providing collaborators\ + \ with varying degrees of privacy and awareness of one another influencesthe group\ + \ interaction. A study is presented whereby ninegroups of co-located musicians\ + \ compose music together using three different interface designs. We use qualitative\ + \ andquantitative data to study and characterise the musician'sinteraction with\ + \ each other and the software. We show thatwhen made available to them, participants\ + \ make extensiveuse of a private working area to develop musical contributions\ + \ before they are introduced to the group. We also arguethat our awareness mechanisms\ + \ change the perceived quality of the musical interaction, but have no impact\ + \ on theway musicians interact with the software. We then reflecton implications\ + \ for the design of new collaborative musicmaking tools which exploit the potential\ + \ of digital technologies, while at the same time support creative musicalinteraction.},\n\ + \ address = {Sydney, Australia},\n author = {Fencott, Robin and Bryan-Kinns, Nick},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1179160},\n editor = {Edgar Berdahl\ - \ and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {392--395},\n\ - \ publisher = {Louisiana State University},\n title = {RWA -- A Game Engine for\ - \ Real World Audio Games},\n url = {http://www.nime.org/proceedings/2015/nime2015_269.pdf},\n\ - \ year = {2015}\n}\n" + \ Musical Expression},\n doi = {10.5281/zenodo.1177763},\n issn = {2220-4806},\n\ + \ keywords = {Awareness, Privacy, Collaboration, Music, Interaction, En- gagement,\ + \ Group Music Making, Design, Evaluation.},\n pages = {198--203},\n title = {Hey\ + \ Man, You're Invading my Personal Space ! Privacy and Awareness in Collaborative\ + \ Music},\n url = {http://www.nime.org/proceedings/2010/nime2010_198.pdf},\n year\ + \ = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179160 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177763 issn: 2220-4806 - month: May - pages: 392--395 - publisher: Louisiana State University - title: RWA -- A Game Engine for Real World Audio Games - url: http://www.nime.org/proceedings/2015/nime2015_269.pdf - year: 2015 + keywords: 'Awareness, Privacy, Collaboration, Music, Interaction, En- gagement, + Group Music Making, Design, Evaluation.' + pages: 198--203 + title: 'Hey Man, You''re Invading my Personal Space ! Privacy and Awareness in Collaborative + Music' + url: http://www.nime.org/proceedings/2010/nime2010_198.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: ajense2015 - abstract: 'This paper looks at the design process of the WamBam; a self-contained - electronic hand-drum meant for music therapy sessions with severely intellectually - disabled clients. Using co-reflection with four musical therapists and literature - research, design guidelines related to this specific user-group and context are - formed. This leads to a concept of which the most relevant aspects are discussed, - before describing the user studies. Finally, the plan for the redesign is discussed. - The WamBam has unique possibilities to deal with the low motor skills and cognitive - abilities of severely intellectually disabled users while music therapists benefit - from the greater versatility and portability of this design compared to other - musical instruments. A prototype was tested with twenty users. Participants proved - to be triggered positively by the WamBam, but three limiting usability issues - were found. These issues were used as the fundamentals for a second prototype. - Music therapists confirm the value of the WamBam for their practice. ' - address: 'Baton Rouge, Louisiana, USA' - author: Arvid Jense and Hans Leeuw - bibtex: "@inproceedings{ajense2015,\n abstract = {This paper looks at the design\ - \ process of the WamBam; a self-contained electronic hand-drum meant for music\ - \ therapy sessions with severely intellectually disabled clients. Using co-reflection\ - \ with four musical therapists and literature research, design guidelines related\ - \ to this specific user-group and context are formed. This leads to a concept\ - \ of which the most relevant aspects are discussed, before describing the user\ - \ studies. Finally, the plan for the redesign is discussed. The WamBam has unique\ - \ possibilities to deal with the low motor skills and cognitive abilities of severely\ - \ intellectually disabled users while music therapists benefit from the greater\ - \ versatility and portability of this design compared to other musical instruments.\ - \ A prototype was tested with twenty users. Participants proved to be triggered\ - \ positively by the WamBam, but three limiting usability issues were found. These\ - \ issues were used as the fundamentals for a second prototype. Music therapists\ - \ confirm the value of the WamBam for their practice. },\n address = {Baton Rouge,\ - \ Louisiana, USA},\n author = {Arvid Jense and Hans Leeuw},\n booktitle = {Proceedings\ + ID: Martin2010a + abstract: 'In 2009 the cross artform group, Last Man to Die, presenteda series of + performances using new interfaces and networkedperformance to integrate the three + artforms of its members(actor, Hanna Cormick, visual artist, Benjamin Forster + andpercussionist, Charles Martin). This paper explains ourartistic motivations + and design for a computer vision surfaceand networked heartbeat sensor as well + as the experience ofmounting our first major work, Vital LMTD.' + address: 'Sydney, Australia' + author: 'Martin, Charles and Forster, Benjamin and Cormick, Hanna' + bibtex: "@inproceedings{Martin2010a,\n abstract = {In 2009 the cross artform group,\ + \ Last Man to Die, presenteda series of performances using new interfaces and\ + \ networkedperformance to integrate the three artforms of its members(actor, Hanna\ + \ Cormick, visual artist, Benjamin Forster andpercussionist, Charles Martin).\ + \ This paper explains ourartistic motivations and design for a computer vision\ + \ surfaceand networked heartbeat sensor as well as the experience ofmounting our\ + \ first major work, Vital LMTD.},\n address = {Sydney, Australia},\n author =\ + \ {Martin, Charles and Forster, Benjamin and Cormick, Hanna},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1179098},\n editor = {Edgar Berdahl and Jesse Allison},\n\ - \ issn = {2220-4806},\n month = {May},\n pages = {74--77},\n publisher = {Louisiana\ - \ State University},\n title = {WamBam: A case study in design for an electronic\ - \ musical instrument for severely intellectually disabled users},\n url = {http://www.nime.org/proceedings/2015/nime2015_270.pdf},\n\ - \ year = {2015}\n}\n" + \ doi = {10.5281/zenodo.1177843},\n issn = {2220-4806},\n keywords = {cross-artform\ + \ performance, networked performance, physi- cal computing},\n pages = {204--207},\n\ + \ title = {Cross-Artform Performance Using Networked Interfaces : Last Man to\ + \ Die's Vital LMTD},\n url = {http://www.nime.org/proceedings/2010/nime2010_204.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179098 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177843 issn: 2220-4806 - month: May - pages: 74--77 - publisher: Louisiana State University - title: 'WamBam: A case study in design for an electronic musical instrument for - severely intellectually disabled users' - url: http://www.nime.org/proceedings/2015/nime2015_270.pdf - year: 2015 + keywords: 'cross-artform performance, networked performance, physi- cal computing' + pages: 204--207 + title: 'Cross-Artform Performance Using Networked Interfaces : Last Man to Die''s + Vital LMTD' + url: http://www.nime.org/proceedings/2010/nime2010_204.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: hlimerick2015 - abstract: 'Liveness is a well-known problem with Digital Musical Instruments (DMIs). - When used in performances, DMIs provide less visual information than acoustic - instruments, preventing the audience from understanding how the musicians influence - the music. In this paper, we look at this issue through the lens of causality. - More specifically, we investigate the attribution of causality by an external - observer to a performer, relying on the theory of apparent mental causation. We - suggest that the perceived causality between a performer''s gestures and the musical - result is central to liveness. We present a framework for assessing attributed - causality and agency to a performer, based on a psychological theory which suggests - three criteria for inferred causality. These criteria then provide the basis of - an experimental study investigating the effect of visual augmentations on audience''s - inferred causality. The results provide insights on how the visual component of - performances with DMIs impacts the audience''s causal inferences about the performer. - In particular we show that visual augmentations help highlight the influence of - the musician when parts of the music are automated, and help clarify complex mappings - between gestures and sounds. Finally we discuss the potential wider implications - for assessing liveness in the design of new musical interfaces.' - address: 'Baton Rouge, Louisiana, USA' - author: Florent Berthaut and David Coyle and James Moore and Hannah Limerick - bibtex: "@inproceedings{hlimerick2015,\n abstract = {Liveness is a well-known problem\ - \ with Digital Musical Instruments (DMIs). When used in performances, DMIs provide\ - \ less visual information than acoustic instruments, preventing the audience from\ - \ understanding how the musicians influence the music. In this paper, we look\ - \ at this issue through the lens of causality. More specifically, we investigate\ - \ the attribution of causality by an external observer to a performer, relying\ - \ on the theory of apparent mental causation. We suggest that the perceived causality\ - \ between a performer's gestures and the musical result is central to liveness.\ - \ We present a framework for assessing attributed causality and agency to a performer,\ - \ based on a psychological theory which suggests three criteria for inferred causality.\ - \ These criteria then provide the basis of an experimental study investigating\ - \ the effect of visual augmentations on audience's inferred causality. The results\ - \ provide insights on how the visual component of performances with DMIs impacts\ - \ the audience's causal inferences about the performer. In particular we show\ - \ that visual augmentations help highlight the influence of the musician when\ - \ parts of the music are automated, and help clarify complex mappings between\ - \ gestures and sounds. Finally we discuss the potential wider implications for\ - \ assessing liveness in the design of new musical interfaces.},\n address = {Baton\ - \ Rouge, Louisiana, USA},\n author = {Florent Berthaut and David Coyle and James\ - \ Moore and Hannah Limerick},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179026},\n\ - \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ - \ {May},\n pages = {382--386},\n publisher = {Louisiana State University},\n title\ - \ = {Liveness Through the Lens of Agency and Causality},\n url = {http://www.nime.org/proceedings/2015/nime2015_272.pdf},\n\ - \ urlsuppl1 = {http://www.nime.org/proceedings/2015/272/0272-file1.mp4},\n year\ - \ = {2015}\n}\n" + ID: Jensenius2010 + abstract: 'We report on a study of perceptual and acoustic featuresrelated to the + placement of microphones around a custommade glass instrument. Different microphone + setups weretested: above, inside and outside the instrument and at different distances. + The sounds were evaluated by an expertperformer, and further qualitative and quantitative + analyses have been carried out. Preference was given to therecordings from microphones + placed close to the rim of theinstrument, either from the inside or the outside.' + address: 'Sydney, Australia' + author: 'Jensenius, Alexander R. and Innervik, Kjell Tore and Frounberg, Ivar' + bibtex: "@inproceedings{Jensenius2010,\n abstract = {We report on a study of perceptual\ + \ and acoustic featuresrelated to the placement of microphones around a custommade\ + \ glass instrument. Different microphone setups weretested: above, inside and\ + \ outside the instrument and at different distances. The sounds were evaluated\ + \ by an expertperformer, and further qualitative and quantitative analyses have\ + \ been carried out. Preference was given to therecordings from microphones placed\ + \ close to the rim of theinstrument, either from the inside or the outside.},\n\ + \ address = {Sydney, Australia},\n author = {Jensenius, Alexander R. and Innervik,\ + \ Kjell Tore and Frounberg, Ivar},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177817},\n\ + \ issn = {2220-4806},\n keywords = {glass instruments, microphone placement, sound\ + \ analysis},\n pages = {208--211},\n title = {Evaluating the Subjective Effects\ + \ of Microphone Placement on Glass Instruments},\n url = {http://www.nime.org/proceedings/2010/nime2010_208.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179026 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177817 issn: 2220-4806 - month: May - pages: 382--386 - publisher: Louisiana State University - title: Liveness Through the Lens of Agency and Causality - url: http://www.nime.org/proceedings/2015/nime2015_272.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/272/0272-file1.mp4 - year: 2015 + keywords: 'glass instruments, microphone placement, sound analysis' + pages: 208--211 + title: Evaluating the Subjective Effects of Microphone Placement on Glass Instruments + url: http://www.nime.org/proceedings/2010/nime2010_208.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: dverdonk2015 - abstract: 'In electronic music performance, a good relationship between what is - visible and what is audible can contribute to a more succesful way of conveying - thought or feeling. This connection can be enhanced by putting visible energy - into an electronic interface or instrument. This paper discusses the advantages - and implementations of visible excitation methods, and how these could reinforce - the bridge between the performance of acoustic and electronic instruments concerning - expressiveness.' - address: 'Baton Rouge, Louisiana, USA' - author: Dianne Verdonk - bibtex: "@inproceedings{dverdonk2015,\n abstract = {In electronic music performance,\ - \ a good relationship between what is visible and what is audible can contribute\ - \ to a more succesful way of conveying thought or feeling. This connection can\ - \ be enhanced by putting visible energy into an electronic interface or instrument.\ - \ This paper discusses the advantages and implementations of visible excitation\ - \ methods, and how these could reinforce the bridge between the performance of\ - \ acoustic and electronic instruments concerning expressiveness.},\n address =\ - \ {Baton Rouge, Louisiana, USA},\n author = {Dianne Verdonk},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1179188},\n editor = {Edgar Berdahl and Jesse Allison},\n\ - \ issn = {2220-4806},\n month = {May},\n pages = {42--43},\n publisher = {Louisiana\ - \ State University},\n title = {Visible Excitation Methods: Energy and Expressiveness\ - \ in Electronic Music Performance},\n url = {http://www.nime.org/proceedings/2015/nime2015_273.pdf},\n\ - \ urlsuppl1 = {http://www.nime.org/proceedings/2015/273/0273-file1.m4v},\n urlsuppl2\ - \ = {http://www.nime.org/proceedings/2015/273/0273-file2.m4v},\n year = {2015}\n\ + ID: Quintas2010 + abstract: 'Glitch DeLighter is a HyperInstrument conceived for Glitch music, based + on the idea of using fire expressiveness to digitally distort sound, pushing the + body and primitive ritualism into a computer mediated sound performance. Glitch + DeLighter uses ordinary lighters as physical controllers that can be played by + creating a flame and moving it in the air. Droned sounds are played by sustaining + the flame and beats by generating sparks and fast flames. The pitch of every sound + can be changed moving the flame vertically in the air. This is achieved by using + a custom computer vision system as an interface which maps the real-time the data + extracted from the flame and transmits those parameters to the sound generator. + As a result, the flame visual dynamics are deeply connected to the aural perception + of the sound - ‘the sound seems to be burning’. This process establishes a metaphor + dramaturgically engaging for an audience. This paper contextualizes the glitch + music aesthetics, prior research, the design and development of the instrument + and reports on Burning The Sound– the first music composition created and performed + with the instrument (by the author).' + address: 'Sydney, Australia' + author: 'Quintas, Rudolfo' + bibtex: "@inproceedings{Quintas2010,\n abstract = {Glitch DeLighter is a HyperInstrument\ + \ conceived for Glitch music, based on the idea of using fire expressiveness to\ + \ digitally distort sound, pushing the body and primitive ritualism into a computer\ + \ mediated sound performance. Glitch DeLighter uses ordinary lighters as physical\ + \ controllers that can be played by creating a flame and moving it in the air.\ + \ Droned sounds are played by sustaining the flame and beats by generating sparks\ + \ and fast flames. The pitch of every sound can be changed moving the flame vertically\ + \ in the air. This is achieved by using a custom computer vision system as an\ + \ interface which maps the real-time the data extracted from the flame and transmits\ + \ those parameters to the sound generator. As a result, the flame visual dynamics\ + \ are deeply connected to the aural perception of the sound - ‘the sound seems\ + \ to be burning’. This process establishes a metaphor dramaturgically engaging\ + \ for an audience. This paper contextualizes the glitch music aesthetics, prior\ + \ research, the design and development of the instrument and reports on Burning\ + \ The Sound– the first music composition created and performed with the instrument\ + \ (by the author).},\n address = {Sydney, Australia},\n author = {Quintas, Rudolfo},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1177879},\n issn = {2220-4806},\n\ + \ keywords = {Hyper-Instruments, Glitch Music, Interactive Systems, Electronic\ + \ Music Performance.},\n pages = {212--216},\n title = {Glitch Delighter : Lighter's\ + \ Flame Base Hyper-Instrument for Glitch Music in Burning The Sound Performance},\n\ + \ url = {http://www.nime.org/proceedings/2010/nime2010_212.pdf},\n year = {2010}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179188 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177879 issn: 2220-4806 - month: May - pages: 42--43 - publisher: Louisiana State University - title: 'Visible Excitation Methods: Energy and Expressiveness in Electronic Music - Performance' - url: http://www.nime.org/proceedings/2015/nime2015_273.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/273/0273-file1.m4v - urlsuppl2: http://www.nime.org/proceedings/2015/273/0273-file2.m4v - year: 2015 + keywords: 'Hyper-Instruments, Glitch Music, Interactive Systems, Electronic Music + Performance.' + pages: 212--216 + title: 'Glitch Delighter : Lighter''s Flame Base Hyper-Instrument for Glitch Music + in Burning The Sound Performance' + url: http://www.nime.org/proceedings/2010/nime2010_212.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: snyder2015 - abstract: 'This paper describes a project undertaken in the Spring of 2014 that - sought to create an audio-visual performance using an industrial robotic arm. - Some relevant examples of previous robotic art are discussed, and the design challenges - posed by the unusual situation are explored. The resulting design solutions for - the sound, robotic motion, and video projection mapping involved in the piece - are explained, as well as the artistic reasoning behind those solutions. Where - applicable, links to open source code developed for the project are provided.' - address: 'Baton Rouge, Louisiana, USA' - author: Jeff Snyder and Ryan Johns and Charles Avis and Gene Kogan and Axel Kilian - bibtex: "@inproceedings{snyder2015,\n abstract = {This paper describes a project\ - \ undertaken in the Spring of 2014 that sought to create an audio-visual performance\ - \ using an industrial robotic arm. Some relevant examples of previous robotic\ - \ art are discussed, and the design challenges posed by the unusual situation\ - \ are explored. The resulting design solutions for the sound, robotic motion,\ - \ and video projection mapping involved in the piece are explained, as well as\ - \ the artistic reasoning behind those solutions. Where applicable, links to open\ - \ source code developed for the project are provided.},\n address = {Baton Rouge,\ - \ Louisiana, USA},\n author = {Jeff Snyder and Ryan Johns and Charles Avis and\ - \ Gene Kogan and Axel Kilian},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179180},\n\ - \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ - \ {May},\n pages = {184--186},\n publisher = {Louisiana State University},\n title\ - \ = {Machine Yearning: An Industrial Robotic Arm as a Performance Instrument},\n\ - \ url = {http://www.nime.org/proceedings/2015/nime2015_275.pdf},\n urlsuppl1 =\ - \ {http://www.nime.org/proceedings/2015/275/0275-file1.mp3},\n urlsuppl2 = {http://www.nime.org/proceedings/2015/275/0275-file2.mp4},\n\ - \ year = {2015}\n}\n" + ID: McPherson2010 + abstract: 'This paper presents the magnetic resonator piano, an augmented instrument + enhancing the capabilities of the acoustic grand piano. Electromagnetic actuators + induce the stringsto vibration, allowing each note to be continuously controlled + in amplitude, frequency, and timbre without external loudspeakers. Feedback from + a single pickup on thepiano soundboard allows the actuator waveforms to remainlocked + in phase with the natural motion of each string. Wealso present an augmented piano + keyboard which reportsthe continuous position of every key. Time and spatial resolution + are sufficient to capture detailed data about keypress, release, pretouch, aftertouch, + and other extended gestures. The system, which is designed with cost and setupconstraints + in mind, seeks to give pianists continuous control over the musical sound of their + instrument. The instrument has been used in concert performances, with theelectronically-actuated + sounds blending with acoustic instruments naturally and without amplification.' + address: 'Sydney, Australia' + author: 'McPherson, Andrew and Kim, Youngmoo' + bibtex: "@inproceedings{McPherson2010,\n abstract = {This paper presents the magnetic\ + \ resonator piano, an augmented instrument enhancing the capabilities of the acoustic\ + \ grand piano. Electromagnetic actuators induce the stringsto vibration, allowing\ + \ each note to be continuously controlled in amplitude, frequency, and timbre\ + \ without external loudspeakers. Feedback from a single pickup on thepiano soundboard\ + \ allows the actuator waveforms to remainlocked in phase with the natural motion\ + \ of each string. Wealso present an augmented piano keyboard which reportsthe\ + \ continuous position of every key. Time and spatial resolution are sufficient\ + \ to capture detailed data about keypress, release, pretouch, aftertouch, and\ + \ other extended gestures. The system, which is designed with cost and setupconstraints\ + \ in mind, seeks to give pianists continuous control over the musical sound of\ + \ their instrument. The instrument has been used in concert performances, with\ + \ theelectronically-actuated sounds blending with acoustic instruments naturally\ + \ and without amplification.},\n address = {Sydney, Australia},\n author = {McPherson,\ + \ Andrew and Kim, Youngmoo},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177849},\n\ + \ issn = {2220-4806},\n keywords = {Augmented instruments, piano, interfaces,\ + \ electromagnetic actuation, gesture measurement},\n pages = {217--222},\n title\ + \ = {Augmenting the Acoustic Piano with Electromagnetic String Actuation and Continuous\ + \ Key Position Sensing},\n url = {http://www.nime.org/proceedings/2010/nime2010_217.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179180 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177849 issn: 2220-4806 - month: May - pages: 184--186 - publisher: Louisiana State University - title: 'Machine Yearning: An Industrial Robotic Arm as a Performance Instrument' - url: http://www.nime.org/proceedings/2015/nime2015_275.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/275/0275-file1.mp3 - urlsuppl2: http://www.nime.org/proceedings/2015/275/0275-file2.mp4 - year: 2015 + keywords: 'Augmented instruments, piano, interfaces, electromagnetic actuation, + gesture measurement' + pages: 217--222 + title: Augmenting the Acoustic Piano with Electromagnetic String Actuation and Continuous + Key Position Sensing + url: http://www.nime.org/proceedings/2010/nime2010_217.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: eberdahl2015 - abstract: The haptic hand is a greatly simplified robotic hand that is designed - to mirror the human hand and provide haptic force feedback for applications in - music. The fingers of the haptic hand device are laid out to align with four of - the fingers of the human hand. A key is placed on each of the fingers so that - a human hand can perform music by interacting with the keys. The haptic hand is - distinguished from other haptic keyboards in the sense that each finger is meant - to stay with a particular key. The haptic hand promotes unencumbered interaction - with the keys. The user can easily position a finger over a key and press downward - to activate it---the user does not need to insert his or her fingers into an unwieldy - exoskeleton or set of thimbles. An example video demonstrates some musical ideas - afforded by this open-source software and hardware project. - address: 'Baton Rouge, Louisiana, USA' - author: Edgar Berdahl and Denis Huber - bibtex: "@inproceedings{eberdahl2015,\n abstract = {The haptic hand is a greatly\ - \ simplified robotic hand that is designed to mirror the human hand and provide\ - \ haptic force feedback for applications in music. The fingers of the haptic hand\ - \ device are laid out to align with four of the fingers of the human hand. A key\ - \ is placed on each of the fingers so that a human hand can perform music by interacting\ - \ with the keys. The haptic hand is distinguished from other haptic keyboards\ - \ in the sense that each finger is meant to stay with a particular key. The haptic\ - \ hand promotes unencumbered interaction with the keys. The user can easily position\ - \ a finger over a key and press downward to activate it---the user does not need\ - \ to insert his or her fingers into an unwieldy exoskeleton or set of thimbles.\ - \ An example video demonstrates some musical ideas afforded by this open-source\ - \ software and hardware project.},\n address = {Baton Rouge, Louisiana, USA},\n\ - \ author = {Edgar Berdahl and Denis Huber},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1179022},\n editor = {Edgar Berdahl and Jesse Allison},\n issn\ - \ = {2220-4806},\n month = {May},\n pages = {303--306},\n publisher = {Louisiana\ - \ State University},\n title = {The Haptic Hand},\n url = {http://www.nime.org/proceedings/2015/nime2015_281.pdf},\n\ - \ urlsuppl1 = {http://www.nime.org/proceedings/2015/281/0281-file1.mov},\n urlsuppl2\ - \ = {http://www.nime.org/proceedings/2015/281/0281-file2.mov},\n year = {2015}\n\ - }\n" + ID: Grossmann2010 + abstract: 'In this paper I describe aspects that have been involved in my experience + of developing a hybrid instrument. The process of transformation and extension + of the instrument is informed by ideas concerning the intrinsic communication + aspects of musical activities. Decisions taken for designing the instrument and + performing with it take into account the hypothesis that there are ontological + levels of human reception in music that are related to the intercorporeal. Arguing + that it is necessary to encounter resistances for achieving expression, it is + suggested that new instrumental development ought to reflect on the concern for + keeping the natural connections of live performances. ' + address: 'Sydney, Australia' + author: 'Grossmann, Cesar M.' + bibtex: "@inproceedings{Grossmann2010,\n abstract = {In this paper I describe aspects\ + \ that have been involved in my experience of developing a hybrid instrument.\ + \ The process of transformation and extension of the instrument is informed by\ + \ ideas concerning the intrinsic communication aspects of musical activities.\ + \ Decisions taken for designing the instrument and performing with it take into\ + \ account the hypothesis that there are ontological levels of human reception\ + \ in music that are related to the intercorporeal. Arguing that it is necessary\ + \ to encounter resistances for achieving expression, it is suggested that new\ + \ instrumental development ought to reflect on the concern for keeping the natural\ + \ connections of live performances. },\n address = {Sydney, Australia},\n author\ + \ = {Grossmann, Cesar M.},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177781},\n\ + \ issn = {2220-4806},\n keywords = {live processing,new instruments,nime10,recorder},\n\ + \ pages = {223--228},\n title = {Developing a Hybrid Contrabass Recorder Resistances,\ + \ Expression, Gestures and Rhetoric},\n url = {http://www.nime.org/proceedings/2010/nime2010_223.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179022 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177781 issn: 2220-4806 - month: May - pages: 303--306 - publisher: Louisiana State University - title: The Haptic Hand - url: http://www.nime.org/proceedings/2015/nime2015_281.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/281/0281-file1.mov - urlsuppl2: http://www.nime.org/proceedings/2015/281/0281-file2.mov - year: 2015 + keywords: 'live processing,new instruments,nime10,recorder' + pages: 223--228 + title: 'Developing a Hybrid Contrabass Recorder Resistances, Expression, Gestures + and Rhetoric' + url: http://www.nime.org/proceedings/2010/nime2010_223.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: slee2015 - abstract: 'This paper introduces programmable text rendering that enables temporal - typography in web browsers. Typing is seen not only as a dynamic but interactive - process facilitating both scripted and live musical expression in various contexts - such as audio-visual performance using keyboards and live coding visualization. - With the programmable text animation , we turn plain text into a highly audiovisual - medium and a musical interface which is visually expressive. We describe a concrete - technical realization of the concept using Web Audio API, WebGL and GLSL shaders. - We further show a number of examples that illustrate instances of the concept - in various scenarios ranging from simple textual visualization to live coding - environments. Lastly, we present an audiovisual music piece that involves live - writing augmented by the visualization technique.' - address: 'Baton Rouge, Louisiana, USA' - author: 'Lee, Sang Won and Georg Essl' - bibtex: "@inproceedings{slee2015,\n abstract = {This paper introduces programmable\ - \ text rendering that enables temporal typography in web browsers. Typing is seen\ - \ not only as a dynamic but interactive process facilitating both scripted and\ - \ live musical expression in various contexts such as audio-visual performance\ - \ using keyboards and live coding visualization. With the programmable text animation\ - \ , we turn plain text into a highly audiovisual medium and a musical interface\ - \ which is visually expressive. We describe a concrete technical realization of\ - \ the concept using Web Audio API, WebGL and GLSL shaders. We further show a number\ - \ of examples that illustrate instances of the concept in various scenarios ranging\ - \ from simple textual visualization to live coding environments. Lastly, we present\ - \ an audiovisual music piece that involves live writing augmented by the visualization\ - \ technique.},\n address = {Baton Rouge, Louisiana, USA},\n author = {Lee, Sang\ - \ Won and Georg Essl},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179114},\n\ - \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ - \ {May},\n pages = {65--69},\n publisher = {Louisiana State University},\n title\ - \ = {Web-Based Temporal Typography for Musical Expression and Performance},\n\ - \ url = {http://www.nime.org/proceedings/2015/nime2015_283.pdf},\n year = {2015}\n\ - }\n" + ID: Carrillo2010 + abstract: 'This paper presents a virtual violin for real-time performances consisting + of two modules: a violin spectral model and a control interface. The interface + is composed by a sensing bow and a tube with drawn strings in substitution of + a real violin. The spectral model is driven by the bowing controls captured with + the control interface and it is able to predict spectral envelopes of the sound + corresponding to those controls. The envelopes are filled with harmonic andnoisy + content and given to an additive synthesizer in order to produce violin sounds. + The sensing system is based on two motion trackers with 6 degrees of freedom. + One tracker is attached to the bow and the other to the tube. Bowing controls + are computed after a calibration process where the position of virtual strings + and the hair-ribbon of the bowis obtained. A real time implementation was developed + asa MAX/MSP patch with external objects for each of the modules.' + address: 'Sydney, Australia' + author: 'Carrillo, Alfonso P. and Bonada, Jordi' + bibtex: "@inproceedings{Carrillo2010,\n abstract = {This paper presents a virtual\ + \ violin for real-time performances consisting of two modules: a violin spectral\ + \ model and a control interface. The interface is composed by a sensing bow and\ + \ a tube with drawn strings in substitution of a real violin. The spectral model\ + \ is driven by the bowing controls captured with the control interface and it\ + \ is able to predict spectral envelopes of the sound corresponding to those controls.\ + \ The envelopes are filled with harmonic andnoisy content and given to an additive\ + \ synthesizer in order to produce violin sounds. The sensing system is based on\ + \ two motion trackers with 6 degrees of freedom. One tracker is attached to the\ + \ bow and the other to the tube. Bowing controls are computed after a calibration\ + \ process where the position of virtual strings and the hair-ribbon of the bowis\ + \ obtained. A real time implementation was developed asa MAX/MSP patch with external\ + \ objects for each of the modules.},\n address = {Sydney, Australia},\n author\ + \ = {Carrillo, Alfonso P. and Bonada, Jordi},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1177737},\n issn = {2220-4806},\n keywords = {violin, synthesis,\ + \ control, spectral, virtual},\n pages = {229--232},\n title = {The Bowed Tube\ + \ : a Virtual Violin},\n url = {http://www.nime.org/proceedings/2010/nime2010_229.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179114 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177737 issn: 2220-4806 - month: May - pages: 65--69 - publisher: Louisiana State University - title: Web-Based Temporal Typography for Musical Expression and Performance - url: http://www.nime.org/proceedings/2015/nime2015_283.pdf - year: 2015 + keywords: 'violin, synthesis, control, spectral, virtual' + pages: 229--232 + title: 'The Bowed Tube : a Virtual Violin' + url: http://www.nime.org/proceedings/2010/nime2010_229.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: esheffield2015 - abstract: 'The Pneumatic Practice Pad is a commercially available 10'''' practice - pad that has been modified to allow for tension changes in a matter of seconds - using a small electric air pump. In this paper, we examine the rebound characteristics - of the Pneumatic Practice Pad at various pressure presets and compare them to - a sample of acoustic drums. We also review subjective feedback from participants - in a playing test.' - address: 'Baton Rouge, Louisiana, USA' - author: Eric Sheffield and Sile O'Modhrain and Michael Gould and Brent Gillespie - bibtex: "@inproceedings{esheffield2015,\n abstract = {The Pneumatic Practice Pad\ - \ is a commercially available 10'' practice pad that has been modified to allow\ - \ for tension changes in a matter of seconds using a small electric air pump.\ - \ In this paper, we examine the rebound characteristics of the Pneumatic Practice\ - \ Pad at various pressure presets and compare them to a sample of acoustic drums.\ - \ We also review subjective feedback from participants in a playing test.},\n\ - \ address = {Baton Rouge, Louisiana, USA},\n author = {Eric Sheffield and Sile\ - \ O'Modhrain and Michael Gould and Brent Gillespie},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1179178},\n editor = {Edgar Berdahl and Jesse Allison},\n\ - \ issn = {2220-4806},\n month = {May},\n pages = {231--234},\n publisher = {Louisiana\ - \ State University},\n title = {The Pneumatic Practice Pad},\n url = {http://www.nime.org/proceedings/2015/nime2015_286.pdf},\n\ - \ year = {2015}\n}\n" + ID: Hochenbaum2010 + abstract: 'This research is an initial effort in showing how a multimodal approach + can improve systems for gaining insight into a musician''s practice and technique. + Embedding a variety of sensors inside musical instruments and synchronously recording + the sensors'' data along with audio, we gather a database of gestural information + from multiple performers, then use machine-learning techniques to recognize which + musician is performing. Our multimodal approach (using both audio and sensor data) + yields promising performer classification results, which we see as a first step + in a larger effort to gain insight into musicians'' practice and technique. ' + address: 'Sydney, Australia' + author: 'Hochenbaum, Jordan and Kapur, Ajay and Wright, Matthew' + bibtex: "@inproceedings{Hochenbaum2010,\n abstract = {This research is an initial\ + \ effort in showing how a multimodal approach can improve systems for gaining\ + \ insight into a musician's practice and technique. Embedding a variety of sensors\ + \ inside musical instruments and synchronously recording the sensors' data along\ + \ with audio, we gather a database of gestural information from multiple performers,\ + \ then use machine-learning techniques to recognize which musician is performing.\ + \ Our multimodal approach (using both audio and sensor data) yields promising\ + \ performer classification results, which we see as a first step in a larger effort\ + \ to gain insight into musicians' practice and technique. },\n address = {Sydney,\ + \ Australia},\n author = {Hochenbaum, Jordan and Kapur, Ajay and Wright, Matthew},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1177805},\n issn = {2220-4806},\n\ + \ keywords = {Performer Recognition, Multimodal, HCI, Machine Learning, Hyperinstrument,\ + \ eSitar},\n pages = {233--237},\n title = {Multimodal Musician Recognition},\n\ + \ url = {http://www.nime.org/proceedings/2010/nime2010_233.pdf},\n year = {2010}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179178 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177805 issn: 2220-4806 - month: May - pages: 231--234 - publisher: Louisiana State University - title: The Pneumatic Practice Pad - url: http://www.nime.org/proceedings/2015/nime2015_286.pdf - year: 2015 + keywords: 'Performer Recognition, Multimodal, HCI, Machine Learning, Hyperinstrument, + eSitar' + pages: 233--237 + title: Multimodal Musician Recognition + url: http://www.nime.org/proceedings/2010/nime2010_233.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: wmarley2015 - abstract: 'This paper describes a software extension to the Reactable entitled Gestroviser - that was developed to explore musician machine collaboration at the control signal - level. The system functions by sampling a performers input, processing or reshaping - this sampled input, and then repeatedly replaying it. The degree to which the - sampled control signal is processed during replay is adjustable in real-time by - the manipulation of a continuous finger slider function. The reshaping algorithm - uses stochastic methods commonly used for MIDI note generation from a provided - dataset. The reshaped signal therefore varies in an unpredictable manner. In this - way the Gestroviser is a device to capture, reshape and replay an instrumental - gesture. We describe the result of initial user testing of the system and discuss - possible further development.' - address: 'Baton Rouge, Louisiana, USA' - author: William Marley and Nicholas Ward - bibtex: "@inproceedings{wmarley2015,\n abstract = {This paper describes a software\ - \ extension to the Reactable entitled Gestroviser that was developed to explore\ - \ musician machine collaboration at the control signal level. The system functions\ - \ by sampling a performers input, processing or reshaping this sampled input,\ - \ and then repeatedly replaying it. The degree to which the sampled control signal\ - \ is processed during replay is adjustable in real-time by the manipulation of\ - \ a continuous finger slider function. The reshaping algorithm uses stochastic\ - \ methods commonly used for MIDI note generation from a provided dataset. The\ - \ reshaped signal therefore varies in an unpredictable manner. In this way the\ - \ Gestroviser is a device to capture, reshape and replay an instrumental gesture.\ - \ We describe the result of initial user testing of the system and discuss possible\ - \ further development.},\n address = {Baton Rouge, Louisiana, USA},\n author =\ - \ {William Marley and Nicholas Ward},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179124},\n\ - \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ - \ {May},\n pages = {140--143},\n publisher = {Louisiana State University},\n title\ - \ = {Gestroviser: Toward Collaborative Agency in Digital Musical Instruments.},\n\ - \ url = {http://www.nime.org/proceedings/2015/nime2015_287.pdf},\n urlsuppl1 =\ - \ {http://www.nime.org/proceedings/2015/287/0287-file1.mp4},\n year = {2015}\n\ - }\n" + ID: Guaus2010 + abstract: 'In this paper, we present our research on the acquisitionof gesture information + for the study of the expressivenessin guitar performances. For that purpose, we + design a sensor system which is able to gather the movements from lefthand fingers. + Our effort is focused on a design that is (1)non-intrusive to the performer and + (2) able to detect fromstrong movements of the left hand to subtle movements ofthe + fingers. The proposed system is based on capacitive sensors mounted on the fingerboard + of the guitar. We presentthe setup of the sensor system and analyze its response + toseveral finger movements.' + address: 'Sydney, Australia' + author: 'Guaus, Enric and Ozaslan, Tan and Palacios, Eric and Arcos, Josep L.' + bibtex: "@inproceedings{Guaus2010,\n abstract = {In this paper, we present our research\ + \ on the acquisitionof gesture information for the study of the expressivenessin\ + \ guitar performances. For that purpose, we design a sensor system which is able\ + \ to gather the movements from lefthand fingers. Our effort is focused on a design\ + \ that is (1)non-intrusive to the performer and (2) able to detect fromstrong\ + \ movements of the left hand to subtle movements ofthe fingers. The proposed system\ + \ is based on capacitive sensors mounted on the fingerboard of the guitar. We\ + \ presentthe setup of the sensor system and analyze its response toseveral finger\ + \ movements.},\n address = {Sydney, Australia},\n author = {Guaus, Enric and Ozaslan,\ + \ Tan and Palacios, Eric and Arcos, Josep L.},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1177783},\n issn = {2220-4806},\n keywords = {Guitar; Gesture\ + \ acquisition; Capacitive sensors},\n pages = {238--243},\n title = {A Left Hand\ + \ Gesture Caption System for Guitar Based on Capacitive Sensors},\n url = {http://www.nime.org/proceedings/2010/nime2010_238.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179124 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177783 issn: 2220-4806 - month: May - pages: 140--143 - publisher: Louisiana State University - title: 'Gestroviser: Toward Collaborative Agency in Digital Musical Instruments.' - url: http://www.nime.org/proceedings/2015/nime2015_287.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/287/0287-file1.mp4 - year: 2015 + keywords: Guitar; Gesture acquisition; Capacitive sensors + pages: 238--243 + title: A Left Hand Gesture Caption System for Guitar Based on Capacitive Sensors + url: http://www.nime.org/proceedings/2010/nime2010_238.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: kschlei2015 - abstract: 'This paper explores the creation and testing of a new system for notating - physical actions on a surface. This notation is conceptualized through the medium - of, and initially tested on, multi-touch interfaces. Existing methods of notating - movement are reviewed, followed by a detailed explanation of our notation. User - trials were carried out in order to test how effective this notation was, the - results of which be explained. An analysis of the collected data follows, as well - as criticisms of the notation and testing process.' - address: 'Baton Rouge, Louisiana, USA' - author: Warren Enström and Josh Dennis and Brian Lynch and Kevin Schlei - bibtex: "@inproceedings{kschlei2015,\n abstract = {This paper explores the creation\ - \ and testing of a new system for notating physical actions on a surface. This\ - \ notation is conceptualized through the medium of, and initially tested on, multi-touch\ - \ interfaces. Existing methods of notating movement are reviewed, followed by\ - \ a detailed explanation of our notation. User trials were carried out in order\ - \ to test how effective this notation was, the results of which be explained.\ - \ An analysis of the collected data follows, as well as criticisms of the notation\ - \ and testing process.},\n address = {Baton Rouge, Louisiana, USA},\n author =\ - \ {Warren Enstr\\''om and Josh Dennis and Brian Lynch and Kevin Schlei},\n booktitle\ + ID: Schmeder2010 + abstract: 'The design of an unusually simple fabric-based touchlocation and pressure + sensor is introduced. An analysisof the raw sensor data is shown to have significant + nonlinearities and non-uniform noise. Using support vectormachine learning and + a state-dependent adaptive filter itis demonstrated that these problems can be + overcome.The method is evaluated quantitatively using a statisticalestimate of + the instantaneous rate of information transfer.The SVM regression alone is shown + to improve the gesturesignal information rate by up to 20% with zero addedlatency, + and in combination with filtering by 40% subjectto a constant latency bound of + 10 milliseconds.' + address: 'Sydney, Australia' + author: 'Schmeder, Andrew and Freed, Adrian' + bibtex: "@inproceedings{Schmeder2010,\n abstract = {The design of an unusually simple\ + \ fabric-based touchlocation and pressure sensor is introduced. An analysisof\ + \ the raw sensor data is shown to have significant nonlinearities and non-uniform\ + \ noise. Using support vectormachine learning and a state-dependent adaptive filter\ + \ itis demonstrated that these problems can be overcome.The method is evaluated\ + \ quantitatively using a statisticalestimate of the instantaneous rate of information\ + \ transfer.The SVM regression alone is shown to improve the gesturesignal information\ + \ rate by up to 20% with zero addedlatency, and in combination with filtering\ + \ by 40% subjectto a constant latency bound of 10 milliseconds.},\n address =\ + \ {Sydney, Australia},\n author = {Schmeder, Andrew and Freed, Adrian},\n booktitle\ \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1179056},\n editor = {Edgar Berdahl and\ - \ Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {83--86},\n\ - \ publisher = {Louisiana State University},\n title = {Musical Notation for Multi-Touch\ - \ Interfaces},\n url = {http://www.nime.org/proceedings/2015/nime2015_289.pdf},\n\ - \ year = {2015}\n}\n" + \ Expression},\n doi = {10.5281/zenodo.1177893},\n issn = {2220-4806},\n keywords\ + \ = {gesture signal processing, support vector machine, touch sensor},\n pages\ + \ = {244--249},\n title = {Support Vector Machine Learning for Gesture Signal\ + \ Estimation with a Piezo-Resistive Fabric Touch Surface},\n url = {http://www.nime.org/proceedings/2010/nime2010_244.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179056 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177893 issn: 2220-4806 - month: May - pages: 83--86 - publisher: Louisiana State University - title: Musical Notation for Multi-Touch Interfaces - url: http://www.nime.org/proceedings/2015/nime2015_289.pdf - year: 2015 + keywords: 'gesture signal processing, support vector machine, touch sensor' + pages: 244--249 + title: Support Vector Machine Learning for Gesture Signal Estimation with a Piezo-Resistive + Fabric Touch Surface + url: http://www.nime.org/proceedings/2010/nime2010_244.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: bbortz2015 - abstract: 'Our experiment, Emotion in Motion, has amassed the world''s largest database - of human physiology associated with emotion in response to the presentation of - various selections of musical works. What began as a doctoral research study has - grown to include the emotional responses to musical experience from over ten thousand - participants across the world, from installations in Dublin, New York City, Norway, - Singapore, the Philippines, and Taiwan. The most recent iteration of is currently - underway in Taipei City, Taiwan. Preparation for this installation provided an - opportunity to reimagine the architecture of , allowing for a wider range of potential - applications than were originally possible with the original tools that drove - the experiment. Now more than an experiment, is a framework for developing myriad - emotional/musical/biomusical interactions with multiple co-located or remote participants. - This paper describes the development of this open-source framework and includes - discussion of its various components: hardware agnostic sensor inputs, refined - physiological signal processing tools, and a public database of data collected - during various instantiations of applications built on the framework. We also - discuss our ongoing work with this tool, and provide the reader with other potential - applications that they might realize in using .' - address: 'Baton Rouge, Louisiana, USA' - author: Brennon Bortz and Javier Jaimovich and R. Benjamin Knapp - bibtex: "@inproceedings{bbortz2015,\n abstract = {Our experiment, Emotion in Motion,\ - \ has amassed the world's largest database of human physiology associated with\ - \ emotion in response to the presentation of various selections of musical works.\ - \ What began as a doctoral research study has grown to include the emotional responses\ - \ to musical experience from over ten thousand participants across the world,\ - \ from installations in Dublin, New York City, Norway, Singapore, the Philippines,\ - \ and Taiwan. The most recent iteration of is currently underway in Taipei City,\ - \ Taiwan. Preparation for this installation provided an opportunity to reimagine\ - \ the architecture of , allowing for a wider range of potential applications than\ - \ were originally possible with the original tools that drove the experiment.\ - \ Now more than an experiment, is a framework for developing myriad emotional/musical/biomusical\ - \ interactions with multiple co-located or remote participants. This paper describes\ - \ the development of this open-source framework and includes discussion of its\ - \ various components: hardware agnostic sensor inputs, refined physiological signal\ - \ processing tools, and a public database of data collected during various instantiations\ - \ of applications built on the framework. We also discuss our ongoing work with\ - \ this tool, and provide the reader with other potential applications that they\ - \ might realize in using .},\n address = {Baton Rouge, Louisiana, USA},\n author\ - \ = {Brennon Bortz and Javier Jaimovich and {R. Benjamin} Knapp},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1179034},\n editor = {Edgar Berdahl and\ - \ Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {44--49},\n\ - \ publisher = {Louisiana State University},\n title = {Emotion in Motion: A Reimagined\ - \ Framework for Biomusical/Emotional Interaction},\n url = {http://www.nime.org/proceedings/2015/nime2015_291.pdf},\n\ - \ year = {2015}\n}\n" + ID: Schacher2010 + abstract: 'Mapping in interactive dance performance poses a number of questions + related to the perception and expression of gestures in contrast to pure motion-detection + and analysis. A specific interactive dance project is discussed, in which two + complementary sensing modes are integrated to obtain higherlevel expressive gestures. + These are applied to a modular nonlinear composition, in which the exploratory + dance performance assumes the role of instrumentalist and conductor. The development + strategies and methods for each of the involved artists are discussed and the + software tools and wearable devices that were developed for this project are presented. ' + address: 'Sydney, Australia' + author: 'Schacher, Jan C.' + bibtex: "@inproceedings{Schacher2010,\n abstract = {Mapping in interactive dance\ + \ performance poses a number of questions related to the perception and expression\ + \ of gestures in contrast to pure motion-detection and analysis. A specific interactive\ + \ dance project is discussed, in which two complementary sensing modes are integrated\ + \ to obtain higherlevel expressive gestures. These are applied to a modular nonlinear\ + \ composition, in which the exploratory dance performance assumes the role of\ + \ instrumentalist and conductor. The development strategies and methods for each\ + \ of the involved artists are discussed and the software tools and wearable devices\ + \ that were developed for this project are presented. },\n address = {Sydney,\ + \ Australia},\n author = {Schacher, Jan C.},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1177889},\n issn = {2220-4806},\n keywords = {Mapping, motion\ + \ sensing, computer vision, artistic strategies, wearable sensors, mapping tools,\ + \ splines, delaunay tessellation.},\n pages = {250--254},\n title = {Motion To\ + \ Gesture To Sound : Mapping For Interactive Dance},\n url = {http://www.nime.org/proceedings/2010/nime2010_250.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179034 - editor: Edgar Berdahl and Jesse Allison - issn: 2220-4806 - month: May - pages: 44--49 - publisher: Louisiana State University - title: 'Emotion in Motion: A Reimagined Framework for Biomusical/Emotional Interaction' - url: http://www.nime.org/proceedings/2015/nime2015_291.pdf - year: 2015 - - -- ENTRYTYPE: inproceedings - ID: hlin2015 - abstract: 'Pedagogical research demonstrates theories and practices of perception - or production of melodic or harmonic ``intonation'''', i.e. the realization of - pitch accuracy. There are software and hardware to help students improve intonation. - Those tools have various functions. Nevertheless, they still miss something which - could benefit users very much. Even worse, they are not easy to be revised. Most - importantly, there should be more amusing and engaging interaction between a tuning - trainer and a user which is able to exchange roles of tuner and player. In this - research, we implement an open-source program named ``Harmonic Intonation Trainer'''' - in Pure Data. It includes most of essential elements of a smart tuner. A user - can tune his pitch while optionally hearing (through earphones) the target pitch - and other harmonic intervals in respective octaves. Moreover, in its interactive - accompanist mode, a user''s input pitch serves as the reference frequency; the - program follows his intonation to generate corresponding harmonic intervals. Additionally, - user can straightforwardly edit all parameters and patches by Pure Data. Any adoption - or revision is absolutely welcome. Finally, we will initiate another research - to test and to inspect experimental results from student orchestras so that its - future version is expected to be more sophisticated.' - address: 'Baton Rouge, Louisiana, USA' - author: Hsin-Ming Lin and Chin-Ming Lin - bibtex: "@inproceedings{hlin2015,\n abstract = {Pedagogical research demonstrates\ - \ theories and practices of perception or production of melodic or harmonic ``intonation'',\ - \ i.e. the realization of pitch accuracy. There are software and hardware to help\ - \ students improve intonation. Those tools have various functions. Nevertheless,\ - \ they still miss something which could benefit users very much. Even worse, they\ - \ are not easy to be revised. Most importantly, there should be more amusing and\ - \ engaging interaction between a tuning trainer and a user which is able to exchange\ - \ roles of tuner and player. In this research, we implement an open-source program\ - \ named ``Harmonic Intonation Trainer'' in Pure Data. It includes most of essential\ - \ elements of a smart tuner. A user can tune his pitch while optionally hearing\ - \ (through earphones) the target pitch and other harmonic intervals in respective\ - \ octaves. Moreover, in its interactive accompanist mode, a user's input pitch\ - \ serves as the reference frequency; the program follows his intonation to generate\ - \ corresponding harmonic intervals. Additionally, user can straightforwardly edit\ - \ all parameters and patches by Pure Data. Any adoption or revision is absolutely\ - \ welcome. Finally, we will initiate another research to test and to inspect experimental\ - \ results from student orchestras so that its future version is expected to be\ - \ more sophisticated.},\n address = {Baton Rouge, Louisiana, USA},\n author =\ - \ {Hsin-Ming Lin and Chin-Ming Lin},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179118},\n\ - \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ - \ {May},\n pages = {38--39},\n publisher = {Louisiana State University},\n title\ - \ = {Harmonic Intonation Trainer: An Open Implementation in Pure Data},\n url\ - \ = {http://www.nime.org/proceedings/2015/nime2015_300.pdf},\n urlsuppl1 = {http://www.nime.org/proceedings/2015/300/0300-file1.mp4},\n\ - \ year = {2015}\n}\n" + doi: 10.5281/zenodo.1177889 + issn: 2220-4806 + keywords: 'Mapping, motion sensing, computer vision, artistic strategies, wearable + sensors, mapping tools, splines, delaunay tessellation.' + pages: 250--254 + title: 'Motion To Gesture To Sound : Mapping For Interactive Dance' + url: http://www.nime.org/proceedings/2010/nime2010_250.pdf + year: 2010 + + +- ENTRYTYPE: inproceedings + ID: Whalley2010 + abstract: 'GIIMP addresses the criticism that in many interactive music systems + the machine simply reacts. Interaction is addressed by extending Winkler''s [18] + model toward adapting Paine''s [10] conversational model of interaction. Realized + using commercial tools, GIIMP implements a machine/human generative improvisation + system using human gesture input, machine gesture capture, and a gesture mutation + module in conjunction with a flocking patch, mapped through microtonal/spectral + techniques to sound. The intention is to meld some established and current practices, + and combine aspects of symbolic and sub-symbolic approaches, toward musical outcomes. ' + address: 'Sydney, Australia' + author: 'Whalley, Ian' + bibtex: "@inproceedings{Whalley2010,\n abstract = {GIIMP addresses the criticism\ + \ that in many interactive music systems the machine simply reacts. Interaction\ + \ is addressed by extending Winkler's [18] model toward adapting Paine's [10]\ + \ conversational model of interaction. Realized using commercial tools, GIIMP\ + \ implements a machine/human generative improvisation system using human gesture\ + \ input, machine gesture capture, and a gesture mutation module in conjunction\ + \ with a flocking patch, mapped through microtonal/spectral techniques to sound.\ + \ The intention is to meld some established and current practices, and combine\ + \ aspects of symbolic and sub-symbolic approaches, toward musical outcomes. },\n\ + \ address = {Sydney, Australia},\n author = {Whalley, Ian},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177923},\n issn = {2220-4806},\n keywords = {Interaction,\ + \ gesture, genetic algorithm, flocking, improvisation.},\n pages = {255--258},\n\ + \ title = {Generative Improv . \\& Interactive Music Project (GIIMP)},\n url =\ + \ {http://www.nime.org/proceedings/2010/nime2010_255.pdf},\n year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179118 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177923 issn: 2220-4806 - month: May - pages: 38--39 - publisher: Louisiana State University - title: 'Harmonic Intonation Trainer: An Open Implementation in Pure Data' - url: http://www.nime.org/proceedings/2015/nime2015_300.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/300/0300-file1.mp4 - year: 2015 + keywords: 'Interaction, gesture, genetic algorithm, flocking, improvisation.' + pages: 255--258 + title: Generative Improv . & Interactive Music Project (GIIMP) + url: http://www.nime.org/proceedings/2010/nime2010_255.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: jbarbosa2015 - abstract: 'Evaluation has been suggested to be one of the main trends in current - NIME research. However, the meaning of the term for the community may not be as - clear as it seems. In order to explore this issue, we have analyzed all papers - and posters published in the proceedings of the NIME conference from 2012 to 2014. - For each publication that explicitly mentioned the term evaluation, we looked - for: a) What targets and stakeholders were considered? b) What goals were set? - c) What criteria were used? d) What methods were used? e) How long did the evaluation - last? Results show different understandings of evaluation, with little consistency - regarding the usage of the word. Surprisingly in some cases, not even basic information - such as goal, criteria and methods were provided. In this paper, we attempt to - provide an idea of what evaluation means for the NIME community, pushing the discussion - towards how could we make a better use of evaluation on NIME design and what criteria - should be used regarding each goal.' - address: 'Baton Rouge, Louisiana, USA' - author: Jeronimo Barbosa and Joseph Malloch and Marcelo Wanderley and Stéphane Huot - bibtex: "@inproceedings{jbarbosa2015,\n abstract = {Evaluation has been suggested\ - \ to be one of the main trends in current NIME research. However, the meaning\ - \ of the term for the community may not be as clear as it seems. In order to explore\ - \ this issue, we have analyzed all papers and posters published in the proceedings\ - \ of the NIME conference from 2012 to 2014. For each publication that explicitly\ - \ mentioned the term evaluation, we looked for: a) What targets and stakeholders\ - \ were considered? b) What goals were set? c) What criteria were used? d) What\ - \ methods were used? e) How long did the evaluation last? Results show different\ - \ understandings of evaluation, with little consistency regarding the usage of\ - \ the word. Surprisingly in some cases, not even basic information such as goal,\ - \ criteria and methods were provided. In this paper, we attempt to provide an\ - \ idea of what evaluation means for the NIME community, pushing the discussion\ - \ towards how could we make a better use of evaluation on NIME design and what\ - \ criteria should be used regarding each goal.},\n address = {Baton Rouge, Louisiana,\ - \ USA},\n author = {Jeronimo Barbosa and Joseph Malloch and Marcelo Wanderley\ - \ and St\\'ephane Huot},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179010},\n\ - \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ - \ {May},\n pages = {156--161},\n publisher = {Louisiana State University},\n title\ - \ = {What does 'Evaluation' mean for the NIME community?},\n url = {http://www.nime.org/proceedings/2015/nime2015_301.pdf},\n\ - \ urlsuppl1 = {http://www.nime.org/proceedings/2015/301/0301-file1.xlsx},\n year\ - \ = {2015}\n}\n" + ID: Nymoen2010 + abstract: 'In this paper we present a method for studying relationships between + features of sound and features of movement. The method has been tested by carrying + out an experiment with people moving an object in space along with short sounds. + 3D position data of the object was recorded and several features were calculated + from each of the recordings. These features were provided as input to a classifier + which was able to classify the recorded actions satisfactorily, particularly when + taking into account that the only link between the actions performed by the different + subjects was the sound they heard while making the action.' + address: 'Sydney, Australia' + author: 'Nymoen, Kristian and Glette, Kyrre and Skogstad, Ståle A. and Torresen, + Jim and Jensenius, Alexander Refsum' + bibtex: "@inproceedings{Nymoen2010,\n abstract = {In this paper we present a method\ + \ for studying relationships between features of sound and features of movement.\ + \ The method has been tested by carrying out an experiment with people moving\ + \ an object in space along with short sounds. 3D position data of the object was\ + \ recorded and several features were calculated from each of the recordings. These\ + \ features were provided as input to a classifier which was able to classify the\ + \ recorded actions satisfactorily, particularly when taking into account that\ + \ the only link between the actions performed by the different subjects was the\ + \ sound they heard while making the action.},\n address = {Sydney, Australia},\n\ + \ author = {Nymoen, Kristian and Glette, Kyrre and Skogstad, Ståle A. and Torresen,\ + \ Jim and Jensenius, Alexander Refsum},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177869},\n\ + \ issn = {2220-4806},\n keywords = {nime10},\n pages = {259--262},\n title = {Searching\ + \ for Cross-Individual Relationships between Sound and Movement Features using\ + \ an {SVM} Classifier},\n url = {http://www.nime.org/proceedings/2010/nime2010_259.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179010 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177869 issn: 2220-4806 - month: May - pages: 156--161 - publisher: Louisiana State University - title: 'What does ''Evaluation'' mean for the NIME community?' - url: http://www.nime.org/proceedings/2015/nime2015_301.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/301/0301-file1.xlsx - year: 2015 + keywords: nime10 + pages: 259--262 + title: Searching for Cross-Individual Relationships between Sound and Movement Features + using an SVM Classifier + url: http://www.nime.org/proceedings/2010/nime2010_259.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: ihattwick2015 - abstract: 'The Pearl is a multi-modal computer interface initially conceived as - an interactive prop for a multi-artistic theatrical performance. It is a spherical - hand-held wireless controller embedded with various sensor technologies and interactive - lighting. The lighting was a key conceptual component in the instrument''s creation - both as a theatrical prop and also as an interface for musical performance as - it helps to address conceptual challenges and opportunities posed by the instrument''s - spherical form. This paper begins by providing a brief description of the Pearl - and its use as a spherical instrument. We then discuss mapping the Pearl both - to generate sound and control its interactive lighting, and identify different - strategies for its use. Strategies we identify include feedback regarding performer - gesture, information about the state of the instrument, and use as an aesthetic - performance component. ' - address: 'Baton Rouge, Louisiana, USA' - author: Ian Hattwick and Marcelo Wanderley - bibtex: "@inproceedings{ihattwick2015,\n abstract = {The Pearl is a multi-modal\ - \ computer interface initially conceived as an interactive prop for a multi-artistic\ - \ theatrical performance. It is a spherical hand-held wireless controller embedded\ - \ with various sensor technologies and interactive lighting. The lighting was\ - \ a key conceptual component in the instrument's creation both as a theatrical\ - \ prop and also as an interface for musical performance as it helps to address\ - \ conceptual challenges and opportunities posed by the instrument's spherical\ - \ form. This paper begins by providing a brief description of the Pearl and its\ - \ use as a spherical instrument. We then discuss mapping the Pearl both to generate\ - \ sound and control its interactive lighting, and identify different strategies\ - \ for its use. Strategies we identify include feedback regarding performer gesture,\ - \ information about the state of the instrument, and use as an aesthetic performance\ - \ component. },\n address = {Baton Rouge, Louisiana, USA},\n author = {Ian Hattwick\ - \ and Marcelo Wanderley},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179080},\n\ - \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ - \ {May},\n pages = {201--204},\n publisher = {Louisiana State University},\n title\ - \ = {Interactive Lighting in the Pearl: Considerations and Implementation},\n\ - \ url = {http://www.nime.org/proceedings/2015/nime2015_302.pdf},\n year = {2015}\n\ - }\n" + ID: Baba2010 + abstract: '''''VirtualPhilharmony'''' (V.P.) is a conducting interface that enables + users to perform expressive music with conducting action. Several previously developed + conducting interfaces do not satisfy users who have conducting experience because + the feedback from the conducting action does not always correspond with a natural + performance. The tempo scheduler, which is the main engine of a conducting system, + must be improved. V.P. solves this problem by introducing heuristics of conducting + an orchestra in detecting beats, applying rules regarding the tempo expression + in a bar, etc. We confirmed with users that the system realized a high "following" + performance and had musical persuasiveness. ' + address: 'Sydney, Australia' + author: 'Baba, Takashi and Hashida, Mitsuyo and Katayose, Haruhiro' + bibtex: "@inproceedings{Baba2010,\n abstract = {''VirtualPhilharmony'' (V.P.) is\ + \ a conducting interface that enables users to perform expressive music with conducting\ + \ action. Several previously developed conducting interfaces do not satisfy users\ + \ who have conducting experience because the feedback from the conducting action\ + \ does not always correspond with a natural performance. The tempo scheduler,\ + \ which is the main engine of a conducting system, must be improved. V.P. solves\ + \ this problem by introducing heuristics of conducting an orchestra in detecting\ + \ beats, applying rules regarding the tempo expression in a bar, etc. We confirmed\ + \ with users that the system realized a high \"following\" performance and had\ + \ musical persuasiveness. },\n address = {Sydney, Australia},\n author = {Baba,\ + \ Takashi and Hashida, Mitsuyo and Katayose, Haruhiro},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177715},\n issn = {2220-4806},\n keywords = {Conducting\ + \ system, heuristics, sensor, template.},\n pages = {263--270},\n title = {''VirtualPhilharmony'':\ + \ A Conducting System with Heuristics of Conducting an Orchestra},\n url = {http://www.nime.org/proceedings/2010/nime2010_263.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179080 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177715 issn: 2220-4806 - month: May - pages: 201--204 - publisher: Louisiana State University - title: 'Interactive Lighting in the Pearl: Considerations and Implementation' - url: http://www.nime.org/proceedings/2015/nime2015_302.pdf - year: 2015 + keywords: 'Conducting system, heuristics, sensor, template.' + pages: 263--270 + title: '''''VirtualPhilharmony'''': A Conducting System with Heuristics of Conducting + an Orchestra' + url: http://www.nime.org/proceedings/2010/nime2010_263.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: rgraham2015 - abstract: 'This paper presents the ideas and mapping strategies behind a performance - system that uses a combination of motion tracking and feature extraction tools - to manage complex multichannel audio materials for real-time music composition. - The use of embodied metaphors within these mappings is seen as a means of managing - the complexity of a musical performance across multiple modalities. In particular, - we will investigate how these mapping strategies may facilitate the creation of - performance systems whose accessibility and richness are enhanced by common integrating - bases. A key focus for this work is the investigation of the embodied image schema - theories of Lakoff and Johnson alongside similarly embodied metaphorical models - within Smalley''s influential theory of electroacoustic music (spectromorphology). - These metaphors will be investigated for their use as grounding structural components - and dynamics for creative practices and musical interaction design. We argue that - pairing metaphorical models of forces with environmental forms may have particular - significance for the design of complex mappings for digital music performance.' - address: 'Baton Rouge, Louisiana, USA' - author: Richard Graham and Brian Bridges - bibtex: "@inproceedings{rgraham2015,\n abstract = {This paper presents the ideas\ - \ and mapping strategies behind a performance system that uses a combination of\ - \ motion tracking and feature extraction tools to manage complex multichannel\ - \ audio materials for real-time music composition. The use of embodied metaphors\ - \ within these mappings is seen as a means of managing the complexity of a musical\ - \ performance across multiple modalities. In particular, we will investigate how\ - \ these mapping strategies may facilitate the creation of performance systems\ - \ whose accessibility and richness are enhanced by common integrating bases. A\ - \ key focus for this work is the investigation of the embodied image schema theories\ - \ of Lakoff and Johnson alongside similarly embodied metaphorical models within\ - \ Smalley's influential theory of electroacoustic music (spectromorphology). These\ - \ metaphors will be investigated for their use as grounding structural components\ - \ and dynamics for creative practices and musical interaction design. We argue\ - \ that pairing metaphorical models of forces with environmental forms may have\ - \ particular significance for the design of complex mappings for digital music\ - \ performance.},\n address = {Baton Rouge, Louisiana, USA},\n author = {Richard\ - \ Graham and Brian Bridges},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179066},\n\ - \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ - \ {May},\n pages = {103--106},\n publisher = {Louisiana State University},\n title\ - \ = {Managing Musical Complexity with Embodied Metaphors},\n url = {http://www.nime.org/proceedings/2015/nime2015_303.pdf},\n\ - \ urlsuppl1 = {http://www.nime.org/proceedings/2015/303/0303-file1.mov},\n urlsuppl2\ - \ = {http://www.nime.org/proceedings/2015/303/0303-file2.wav},\n year = {2015}\n\ - }\n" + ID: Grosshauser2010 + abstract: 'Pressure, motion, and gesture are important parameters inmusical instrument + playing. Pressure sensing allows to interpret complex hidden forces, which appear + during playinga musical instrument. The combination of our new sensorsetup with + pattern recognition techniques like the lately developed ordered means models + allows fast and precise recognition of highly skilled playing techniques. This + includes leftand right hand analysis as well as a combination of both. Inthis + paper we show bow position recognition for string instruments by means of support + vector regression machineson the right hand finger pressure, as well as bowing + recognition and inaccurate playing detection with ordered meansmodels. We also + introduce a new left hand and chin pressuresensing method for coordination and + position change analysis. Our methods in combination with our audio, video,and + gesture recording software can be used for teachingand exercising. Especially + studies of complex movementsand finger force distribution changes can benefit + from suchan approach. Practical applications include the recognitionof inaccuracy, + cramping, or malposition, and, last but notleast, the development of augmented + instruments and newplaying techniques.' + address: 'Sydney, Australia' + author: 'Großhauser, Tobias and Großekathöfer, Ulf and Hermann, Thomas' + bibtex: "@inproceedings{Grosshauser2010,\n abstract = {Pressure, motion, and gesture\ + \ are important parameters inmusical instrument playing. Pressure sensing allows\ + \ to interpret complex hidden forces, which appear during playinga musical instrument.\ + \ The combination of our new sensorsetup with pattern recognition techniques like\ + \ the lately developed ordered means models allows fast and precise recognition\ + \ of highly skilled playing techniques. This includes leftand right hand analysis\ + \ as well as a combination of both. Inthis paper we show bow position recognition\ + \ for string instruments by means of support vector regression machineson the\ + \ right hand finger pressure, as well as bowing recognition and inaccurate playing\ + \ detection with ordered meansmodels. We also introduce a new left hand and chin\ + \ pressuresensing method for coordination and position change analysis. Our methods\ + \ in combination with our audio, video,and gesture recording software can be used\ + \ for teachingand exercising. Especially studies of complex movementsand finger\ + \ force distribution changes can benefit from suchan approach. Practical applications\ + \ include the recognitionof inaccuracy, cramping, or malposition, and, last but\ + \ notleast, the development of augmented instruments and newplaying techniques.},\n\ + \ address = {Sydney, Australia},\n author = {Gro{\\ss}hauser, Tobias and Gro{\\\ + ss}ekath\\\"{o}fer, Ulf and Hermann, Thomas},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1177779},\n issn = {2220-4806},\n keywords = {left hand,nime10,ordered\ + \ means models,pressure,sensor,strings},\n pages = {271--276},\n title = {New\ + \ Sensors and Pattern Recognition Techniques for String Instruments},\n url =\ + \ {http://www.nime.org/proceedings/2010/nime2010_271.pdf},\n year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179066 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177779 issn: 2220-4806 - month: May - pages: 103--106 - publisher: Louisiana State University - title: Managing Musical Complexity with Embodied Metaphors - url: http://www.nime.org/proceedings/2015/nime2015_303.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/303/0303-file1.mov - urlsuppl2: http://www.nime.org/proceedings/2015/303/0303-file2.wav - year: 2015 + keywords: 'left hand,nime10,ordered means models,pressure,sensor,strings' + pages: 271--276 + title: New Sensors and Pattern Recognition Techniques for String Instruments + url: http://www.nime.org/proceedings/2010/nime2010_271.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: apon2015 - abstract: 'This paper describes the motivation and process of developing a musical - instrument for an unborn child. Well established research shows a fetus in the - womb can respond to and benefit from stimuli from the outside world. A musical - instrument designed for this unique context can leverage the power of this interaction. - Two prototypes were constructed and tested during separate pregnancies and the - experiences are presented, and the limitation of the sensor technology identified. - We discuss our discoveries about design considerations and challenges for such - an instrument, and project thought-provoking questions that arise from its potential - applications.' - address: 'Baton Rouge, Louisiana, USA' - author: Aura Pon and Johnty Wang and Laurie Radford and Sheelagh Carpendale - bibtex: "@inproceedings{apon2015,\n abstract = {This paper describes the motivation\ - \ and process of developing a musical instrument for an unborn child. Well established\ - \ research shows a fetus in the womb can respond to and benefit from stimuli from\ - \ the outside world. A musical instrument designed for this unique context can\ - \ leverage the power of this interaction. Two prototypes were constructed and\ - \ tested during separate pregnancies and the experiences are presented, and the\ - \ limitation of the sensor technology identified. We discuss our discoveries about\ - \ design considerations and challenges for such an instrument, and project thought-provoking\ - \ questions that arise from its potential applications.},\n address = {Baton Rouge,\ - \ Louisiana, USA},\n author = {Aura Pon and Johnty Wang and Laurie Radford and\ - \ Sheelagh Carpendale},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179156},\n\ - \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ - \ {May},\n pages = {87--90},\n publisher = {Louisiana State University},\n title\ - \ = {Womba: A Musical Instrument for an Unborn Child},\n url = {http://www.nime.org/proceedings/2015/nime2015_304.pdf},\n\ - \ urlsuppl1 = {http://www.nime.org/proceedings/2015/304/0304-file1.mp4},\n year\ - \ = {2015}\n}\n" + ID: Hahnel2010b + abstract: 'As one of the main expressive feature in music, articulationaffects a + wide range of tone attributes. Based on experimental recordings we analyzed human + articulation in the lateBaroque style. The results are useful for both the understanding + of historically informed performance practices andfurther progress in synthetic + performance generation. Thispaper reports of our findings and the implementation + in aperformance system. Because of its flexibility and universality the system + allows more than Baroque articulation.' + address: 'Sydney, Australia' + author: 'Hähnel, Tilo and Berndt, Axel' + bibtex: "@inproceedings{Hahnel2010b,\n abstract = {As one of the main expressive\ + \ feature in music, articulationaffects a wide range of tone attributes. Based\ + \ on experimental recordings we analyzed human articulation in the lateBaroque\ + \ style. The results are useful for both the understanding of historically informed\ + \ performance practices andfurther progress in synthetic performance generation.\ + \ Thispaper reports of our findings and the implementation in aperformance system.\ + \ Because of its flexibility and universality the system allows more than Baroque\ + \ articulation.},\n address = {Sydney, Australia},\n author = {H\\''{a}hnel, Tilo\ + \ and Berndt, Axel},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177789},\n\ + \ issn = {2220-4806},\n keywords = {Expressive Performance, Articulation, Historically\ + \ Informed Performance},\n pages = {277--282},\n title = {Expressive Articulation\ + \ for Synthetic Music Performances},\n url = {http://www.nime.org/proceedings/2010/nime2010_277.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179156 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177789 issn: 2220-4806 - month: May - pages: 87--90 - publisher: Louisiana State University - title: 'Womba: A Musical Instrument for an Unborn Child' - url: http://www.nime.org/proceedings/2015/nime2015_304.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/304/0304-file1.mp4 - year: 2015 + keywords: 'Expressive Performance, Articulation, Historically Informed Performance' + pages: 277--282 + title: Expressive Articulation for Synthetic Music Performances + url: http://www.nime.org/proceedings/2010/nime2010_277.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: amarquezborbon2015 - abstract: 'This paper examines the notion of community as commonly employed within - NIME discourses. Our aim is to clarify and define the term through the community - of practice framework. We argue that through its formal use and application, the - notion of community becomes a significant space for the examination of emergent - musical practices that could otherwise be overlooked. This paper defines community - of practice, as originally developed in the social sciences by Lave and Wegener, - and applies it within the NIME context through the examination of existing communities - of practice such as the laptop performance community, laptop orchestras, as well - as the Satellite CCRMA and Patchblocks communities. ' - address: 'Baton Rouge, Louisiana, USA' - author: Adnan Marquez-Borbon and Paul Stapleton - bibtex: "@inproceedings{amarquezborbon2015,\n abstract = {This paper examines the\ - \ notion of community as commonly employed within NIME discourses. Our aim is\ - \ to clarify and define the term through the community of practice framework.\ - \ We argue that through its formal use and application, the notion of community\ - \ becomes a significant space for the examination of emergent musical practices\ - \ that could otherwise be overlooked. This paper defines community of practice,\ - \ as originally developed in the social sciences by Lave and Wegener, and applies\ - \ it within the NIME context through the examination of existing communities of\ - \ practice such as the laptop performance community, laptop orchestras, as well\ - \ as the Satellite CCRMA and Patchblocks communities. },\n address = {Baton Rouge,\ - \ Louisiana, USA},\n author = {Adnan Marquez-Borbon and Paul Stapleton},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1179128},\n editor = {Edgar Berdahl and\ - \ Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {307--312},\n\ - \ publisher = {Louisiana State University},\n title = {Fourteen Years of NIME:\ - \ The Value and Meaning of `Community' in Interactive Music Research},\n url =\ - \ {http://www.nime.org/proceedings/2015/nime2015_308.pdf},\n year = {2015}\n}\n" + ID: Brown2010 + abstract: 'Generative music systems can be played by musicians who manipulate the + values of algorithmic parameters, and their datacentric nature provides an opportunity + for coordinated interaction amongst a group of systems linked over IP networks; + a practice we call Network Jamming. This paper outlines the characteristics of + this networked performance practice and discusses the types of mediated musical + relationships and ensemble configurations that can arise. We have developed and + tested the jam2jam network jamming software over recent years. We describe this + system, draw from our experiences with it, and use it to illustrate some characteristics + of Network Jamming.' + address: 'Sydney, Australia' + author: 'Brown, Andrew R.' + bibtex: "@inproceedings{Brown2010,\n abstract = {Generative music systems can be\ + \ played by musicians who manipulate the values of algorithmic parameters, and\ + \ their datacentric nature provides an opportunity for coordinated interaction\ + \ amongst a group of systems linked over IP networks; a practice we call Network\ + \ Jamming. This paper outlines the characteristics of this networked performance\ + \ practice and discusses the types of mediated musical relationships and ensemble\ + \ configurations that can arise. We have developed and tested the jam2jam network\ + \ jamming software over recent years. We describe this system, draw from our experiences\ + \ with it, and use it to illustrate some characteristics of Network Jamming.},\n\ + \ address = {Sydney, Australia},\n author = {Brown, Andrew R.},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177723},\n issn = {2220-4806},\n keywords = {collaborative,ensemble,generative,interaction,network,nime10},\n\ + \ pages = {283--286},\n title = {Network Jamming : Distributed Performance using\ + \ Generative Music},\n url = {http://www.nime.org/proceedings/2010/nime2010_283.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179128 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177723 issn: 2220-4806 - month: May - pages: 307--312 - publisher: Louisiana State University - title: 'Fourteen Years of NIME: The Value and Meaning of `Community'' in Interactive - Music Research' - url: http://www.nime.org/proceedings/2015/nime2015_308.pdf - year: 2015 + keywords: collaborative,ensemble,generative,interaction,network,nime10 + pages: 283--286 + title: 'Network Jamming : Distributed Performance using Generative Music' + url: http://www.nime.org/proceedings/2010/nime2010_283.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: croberts2015 - abstract: 'We describe research extending the interactive affordances of textual - code fragments in creative coding environments. In particular we examine the potential - of source code both to display the state of running processes and also to alter - state using means other than traditional text editing. In contrast to previous - research that has focused on the inclusion of additional interactive widgets inside - or alongside text editors, our research adds a parsing stage to the runtime evaluation - of code fragments and imparts additional interactive capabilities on the source - code itself. After implementing various techniques in the creative coding environment - Gibber, we evaluate our research through a survey on the various methods of visual - feedback provided by our research. In addition to results quantifying preferences - for certain techniques over others, we found near unanimous support among survey - respondents for including similar techniques in other live coding environments.' - address: 'Baton Rouge, Louisiana, USA' - author: Charles Roberts and Matthew Wright and JoAnn Kuchera-Morin - bibtex: "@inproceedings{croberts2015,\n abstract = {We describe research extending\ - \ the interactive affordances of textual code fragments in creative coding environments.\ - \ In particular we examine the potential of source code both to display the state\ - \ of running processes and also to alter state using means other than traditional\ - \ text editing. In contrast to previous research that has focused on the inclusion\ - \ of additional interactive widgets inside or alongside text editors, our research\ - \ adds a parsing stage to the runtime evaluation of code fragments and imparts\ - \ additional interactive capabilities on the source code itself. After implementing\ - \ various techniques in the creative coding environment Gibber, we evaluate our\ - \ research through a survey on the various methods of visual feedback provided\ - \ by our research. In addition to results quantifying preferences for certain\ - \ techniques over others, we found near unanimous support among survey respondents\ - \ for including similar techniques in other live coding environments.},\n address\ - \ = {Baton Rouge, Louisiana, USA},\n author = {Charles Roberts and Matthew Wright\ - \ and JoAnn Kuchera-Morin},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179164},\n\ - \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ - \ {May},\n pages = {126--131},\n publisher = {Louisiana State University},\n title\ - \ = {Beyond Editing: Extended Interaction with Textual Code Fragments},\n url\ - \ = {http://www.nime.org/proceedings/2015/nime2015_310.pdf},\n urlsuppl1 = {http://www.nime.org/proceedings/2015/310/0310-file1.mov},\n\ - \ year = {2015}\n}\n" + ID: Frounberg2010 + abstract: 'The paper reports on the development of prototypes of glassinstruments. + The focus has been on developing acousticinstruments specifically designed for + electronic treatment,and where timbral qualities have had priority over pitch.The + paper starts with a brief historical overview of glassinstruments and their artistic + use. Then follows an overviewof the glass blowing process. Finally the musical + use of theinstruments is discussed.' + address: 'Sydney, Australia' + author: 'Frounberg, Ivar and Innervik, Kjell Tore and Jensenius, Alexander R.' + bibtex: "@inproceedings{Frounberg2010,\n abstract = {The paper reports on the development\ + \ of prototypes of glassinstruments. The focus has been on developing acousticinstruments\ + \ specifically designed for electronic treatment,and where timbral qualities have\ + \ had priority over pitch.The paper starts with a brief historical overview of\ + \ glassinstruments and their artistic use. Then follows an overviewof the glass\ + \ blowing process. Finally the musical use of theinstruments is discussed.},\n\ + \ address = {Sydney, Australia},\n author = {Frounberg, Ivar and Innervik, Kjell\ + \ Tore and Jensenius, Alexander R.},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177773},\n\ + \ issn = {2220-4806},\n keywords = {glass instruments,nime,nime10,performance\ + \ practice},\n pages = {287--290},\n title = {Glass Instruments -- From Pitch\ + \ to Timbre},\n url = {http://www.nime.org/proceedings/2010/nime2010_287.pdf},\n\ + \ year = {2010}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1177773 + issn: 2220-4806 + keywords: 'glass instruments,nime,nime10,performance practice' + pages: 287--290 + title: Glass Instruments -- From Pitch to Timbre + url: http://www.nime.org/proceedings/2010/nime2010_287.pdf + year: 2010 + + +- ENTRYTYPE: inproceedings + ID: Kiefer2010 + abstract: 'Input devices for controlling music software can benefit fromexploiting + the use of perceptual-motor skill in interaction.The project described here is + a new musical controller, designed with the aim of enabling intuitive and nuanced + interaction through direct physical manipulation of malleablematerial.The controller + is made from conductive foam. This foamchanges electrical resistance when deformed; + the controllerworks by measuring resistance at multiple points in a single piece + of foam in order to track its shape. These measurements are complex and interdependent + so an echo statenetwork, a form of recurrent neural network, is employed totranslate + the sensor readings into usable control data.A cube shaped controller was built + and evaluated in thecontext of the haptic exploration of sound synthesis parameter + spaces. Eight participants experimented with the controller and were interviewed + about their experiences. Thecontroller achieves its aim of enabling intuitive + interaction,but in terms of nuanced interaction, accuracy and repeatability were + issues for some participants. It''s not clear fromthe short evaluation study whether + these issues would improve with practice, a longitudinal study that gives musicians + time to practice and find the creative limitations ofthe controller would help + to evaluate this fully.The evaluation highlighted interesting issues concerningthe + high level nature of malleable control and different approaches to sonic exploration.' + address: 'Sydney, Australia' + author: 'Kiefer, Chris' + bibtex: "@inproceedings{Kiefer2010,\n abstract = {Input devices for controlling\ + \ music software can benefit fromexploiting the use of perceptual-motor skill\ + \ in interaction.The project described here is a new musical controller, designed\ + \ with the aim of enabling intuitive and nuanced interaction through direct physical\ + \ manipulation of malleablematerial.The controller is made from conductive foam.\ + \ This foamchanges electrical resistance when deformed; the controllerworks by\ + \ measuring resistance at multiple points in a single piece of foam in order to\ + \ track its shape. These measurements are complex and interdependent so an echo\ + \ statenetwork, a form of recurrent neural network, is employed totranslate the\ + \ sensor readings into usable control data.A cube shaped controller was built\ + \ and evaluated in thecontext of the haptic exploration of sound synthesis parameter\ + \ spaces. Eight participants experimented with the controller and were interviewed\ + \ about their experiences. Thecontroller achieves its aim of enabling intuitive\ + \ interaction,but in terms of nuanced interaction, accuracy and repeatability\ + \ were issues for some participants. It's not clear fromthe short evaluation study\ + \ whether these issues would improve with practice, a longitudinal study that\ + \ gives musicians time to practice and find the creative limitations ofthe controller\ + \ would help to evaluate this fully.The evaluation highlighted interesting issues\ + \ concerningthe high level nature of malleable control and different approaches\ + \ to sonic exploration.},\n address = {Sydney, Australia},\n author = {Kiefer,\ + \ Chris},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1177823},\n issn = {2220-4806},\n\ + \ keywords = {Musical Controller, Reservoir Computing, Human Computer Interaction,\ + \ Tangible User Interface, Evaluation},\n pages = {291--296},\n title = {A Malleable\ + \ Interface for Sonic Exploration},\n url = {http://www.nime.org/proceedings/2010/nime2010_291.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179164 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177823 issn: 2220-4806 - month: May - pages: 126--131 - publisher: Louisiana State University - title: 'Beyond Editing: Extended Interaction with Textual Code Fragments' - url: http://www.nime.org/proceedings/2015/nime2015_310.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/310/0310-file1.mov - year: 2015 + keywords: 'Musical Controller, Reservoir Computing, Human Computer Interaction, + Tangible User Interface, Evaluation' + pages: 291--296 + title: A Malleable Interface for Sonic Exploration + url: http://www.nime.org/proceedings/2010/nime2010_291.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: anovello2015 - abstract: 'We present the prototype of a hybrid instrument, which uses two contact - microphones to sonify the gestures of a player on a generic surface, while a gesture - localization algorithm controls the pitch of the sonified output depending on - the position of the gestures. To achieve the gesture localization we use a novel - approach combining attack parametrization and template matching across the two - microphone channels. With this method we can correctly localize 80 ± 9 % of the - percussive gestures. The user can assign determined pitches to specific positions - and change the pitch palette in real time. The tactile feedback characteristic - of every surface opens a set of new playing strategies and possibilities specific - to any chosen object. The advantages of such a system are the affordable production, - flexibility of concert location, object-specific musical instruments, portability, - and easy setup.' - address: 'Baton Rouge, Louisiana, USA' - author: Alberto Novello and Antoni Rayzhekov - bibtex: "@inproceedings{anovello2015,\n abstract = {We present the prototype of\ - \ a hybrid instrument, which uses two contact microphones to sonify the gestures\ - \ of a player on a generic surface, while a gesture localization algorithm controls\ - \ the pitch of the sonified output depending on the position of the gestures.\ - \ To achieve the gesture localization we use a novel approach combining attack\ - \ parametrization and template matching across the two microphone channels. With\ - \ this method we can correctly localize 80 $\\pm$ 9 % of the percussive gestures.\ - \ The user can assign determined pitches to specific positions and change the\ - \ pitch palette in real time. The tactile feedback characteristic of every surface\ - \ opens a set of new playing strategies and possibilities specific to any chosen\ - \ object. The advantages of such a system are the affordable production, flexibility\ - \ of concert location, object-specific musical instruments, portability, and easy\ - \ setup.},\n address = {Baton Rouge, Louisiana, USA},\n author = {Alberto Novello\ - \ and Antoni Rayzhekov},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179148},\n\ - \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ - \ {May},\n pages = {170--173},\n publisher = {Louisiana State University},\n title\ - \ = {A prototype for pitched gestural sonification of surfaces using two contact\ - \ microphones},\n url = {http://www.nime.org/proceedings/2015/nime2015_311.pdf},\n\ - \ year = {2015}\n}\n" + ID: Zappi2010 + abstract: 'The number of artists who express themselves through music in an unconventional + way is constantly growing. Thistrend strongly depends on the high diffusion of + laptops,which proved to be powerful and flexible musical devices.However laptops + still lack in flexible interface, specificallydesigned for music creation in live + and studio performances.To resolve this issue many controllers have been developed,taking + into account not only the performer''s needs andhabits during music creation, + but also the audience desire tovisually understand how performer''s gestures are + linked tothe way music is made. According to the common need ofadaptable visual + interface to manipulate music, in this paper we present a custom tridimensional + controller, based onOpen Sound Control protocol and completely designed towork + inside Virtual Reality: simple geometrical shapes canbe created to directly control + loop triggering and parametermodification, just using free hand interaction.' + address: 'Sydney, Australia' + author: 'Zappi, Victor and Brogni, Andrea and Caldwell, Darwin' + bibtex: "@inproceedings{Zappi2010,\n abstract = {The number of artists who express\ + \ themselves through music in an unconventional way is constantly growing. Thistrend\ + \ strongly depends on the high diffusion of laptops,which proved to be powerful\ + \ and flexible musical devices.However laptops still lack in flexible interface,\ + \ specificallydesigned for music creation in live and studio performances.To resolve\ + \ this issue many controllers have been developed,taking into account not only\ + \ the performer's needs andhabits during music creation, but also the audience\ + \ desire tovisually understand how performer's gestures are linked tothe way music\ + \ is made. According to the common need ofadaptable visual interface to manipulate\ + \ music, in this paper we present a custom tridimensional controller, based onOpen\ + \ Sound Control protocol and completely designed towork inside Virtual Reality:\ + \ simple geometrical shapes canbe created to directly control loop triggering\ + \ and parametermodification, just using free hand interaction.},\n address = {Sydney,\ + \ Australia},\n author = {Zappi, Victor and Brogni, Andrea and Caldwell, Darwin},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1177931},\n issn = {2220-4806},\n\ + \ keywords = {Glove device, Music controller, Virtual Reality, OSC, con- trol\ + \ mapping},\n pages = {297--302},\n title = {OSC Virtual Controller},\n url =\ + \ {http://www.nime.org/proceedings/2010/nime2010_297.pdf},\n year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179148 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177931 issn: 2220-4806 - month: May - pages: 170--173 - publisher: Louisiana State University - title: A prototype for pitched gestural sonification of surfaces using two contact - microphones - url: http://www.nime.org/proceedings/2015/nime2015_311.pdf - year: 2015 + keywords: 'Glove device, Music controller, Virtual Reality, OSC, con- trol mapping' + pages: 297--302 + title: OSC Virtual Controller + url: http://www.nime.org/proceedings/2010/nime2010_297.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: oizmirli2015 - abstract: 'This paper presents a framework for the analysis and exploration of performance - space. It enables the user to visualize performances in relation to other performances - of the same piece based on a set of features extracted from audio. A performance - space is formed from a set of performances through spectral analysis, alignment, - dimensionality reduction and visualization. Operation of the system is demonstrated - initially with synthetic MIDI performances and then with a case study of recorded - piano performances.' - address: 'Baton Rouge, Louisiana, USA' - author: Ozgur Izmirli - bibtex: "@inproceedings{oizmirli2015,\n abstract = {This paper presents a framework\ - \ for the analysis and exploration of performance space. It enables the user to\ - \ visualize performances in relation to other performances of the same piece based\ - \ on a set of features extracted from audio. A performance space is formed from\ - \ a set of performances through spectral analysis, alignment, dimensionality reduction\ - \ and visualization. Operation of the system is demonstrated initially with synthetic\ - \ MIDI performances and then with a case study of recorded piano performances.},\n\ - \ address = {Baton Rouge, Louisiana, USA},\n author = {Ozgur Izmirli},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1179094},\n editor = {Edgar Berdahl and\ - \ Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {99--102},\n\ - \ publisher = {Louisiana State University},\n title = {Framework for Exploration\ - \ of Performance Space},\n url = {http://www.nime.org/proceedings/2015/nime2015_312.pdf},\n\ - \ year = {2015}\n}\n" + ID: Dimitrov2010 + abstract: 'The sound card anno 2010, is an ubiquitous part of almostany personal + computing system; what was once considereda high-end, CD-quality audio fidelity, + is today found in mostcommon sound cards. The increased presence of multichannel + devices, along with the high sampling frequency, makesthe sound card desirable + as a generic interface for acquisition of analog signals in prototyping of sensor-based + musicinterfaces. However, due to the need for coupling capacitorsat a sound card''s + inputs and outputs, the use as a genericsignal interface of a sound card is limited + to signals not carrying information in a constant DC component. Through arevisit + of a card design for the (now defunct) ISA bus, thispaper proposes use of analog + gates for bypassing the DCfiltering input sections, controllable from software + --- therebyallowing for arbitrary choice by the user, if a soundcardinput channel + is to be used as a generic analog-to-digitalsensor interface. Issues regarding + use of obsolete technology and educational aspects are discussed as well.' + address: 'Sydney, Australia' + author: 'Dimitrov, Smilen' + bibtex: "@inproceedings{Dimitrov2010,\n abstract = {The sound card anno 2010, is\ + \ an ubiquitous part of almostany personal computing system; what was once considereda\ + \ high-end, CD-quality audio fidelity, is today found in mostcommon sound cards.\ + \ The increased presence of multichannel devices, along with the high sampling\ + \ frequency, makesthe sound card desirable as a generic interface for acquisition\ + \ of analog signals in prototyping of sensor-based musicinterfaces. However, due\ + \ to the need for coupling capacitorsat a sound card's inputs and outputs, the\ + \ use as a genericsignal interface of a sound card is limited to signals not carrying\ + \ information in a constant DC component. Through arevisit of a card design for\ + \ the (now defunct) ISA bus, thispaper proposes use of analog gates for bypassing\ + \ the DCfiltering input sections, controllable from software --- therebyallowing\ + \ for arbitrary choice by the user, if a soundcardinput channel is to be used\ + \ as a generic analog-to-digitalsensor interface. Issues regarding use of obsolete\ + \ technology and educational aspects are discussed as well.},\n address = {Sydney,\ + \ Australia},\n author = {Dimitrov, Smilen},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1177755},\n issn = {2220-4806},\n keywords = {dc,isa,nime10,sensors,soundcard},\n\ + \ pages = {303--308},\n title = {Extending the Soundcard for Use with Generic\ + \ {DC} Sensors Demonstrated by Revisiting a Vintage ISA Design},\n url = {http://www.nime.org/proceedings/2010/nime2010_303.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179094 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177755 issn: 2220-4806 - month: May - pages: 99--102 - publisher: Louisiana State University - title: Framework for Exploration of Performance Space - url: http://www.nime.org/proceedings/2015/nime2015_312.pdf - year: 2015 + keywords: dc,isa,nime10,sensors,soundcard + pages: 303--308 + title: Extending the Soundcard for Use with Generic DC Sensors Demonstrated by Revisiting + a Vintage ISA Design + url: http://www.nime.org/proceedings/2010/nime2010_303.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: tbarraclough2015 - abstract: 'This paper presents the iterative design process based upon multiple - rounds of user studies that guided the the design of a novel social music application, - Pyxis Minor. The application was designed based on the concept of democratising - electronic music creation and performance. This required the development to be - based upon user studies to inform and drive the development process in order to - create a novel musical interface that can be enjoyed by users of any prior musicianship - training.' - address: 'Baton Rouge, Louisiana, USA' - author: Timothy J. Barraclough and Dale A. Carnegie and Ajay Kapur - bibtex: "@inproceedings{tbarraclough2015,\n abstract = {This paper presents the\ - \ iterative design process based upon multiple rounds of user studies that guided\ - \ the the design of a novel social music application, Pyxis Minor. The application\ - \ was designed based on the concept of democratising electronic music creation\ - \ and performance. This required the development to be based upon user studies\ - \ to inform and drive the development process in order to create a novel musical\ - \ interface that can be enjoyed by users of any prior musicianship training.},\n\ - \ address = {Baton Rouge, Louisiana, USA},\n author = {{Timothy J.} Barraclough\ - \ and {Dale A.} Carnegie and Ajay Kapur},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179012},\n\ - \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ - \ {May},\n pages = {289--292},\n publisher = {Louisiana State University},\n title\ - \ = {Musical Instrument Design Process for Mobile Technology},\n url = {http://www.nime.org/proceedings/2015/nime2015_313.pdf},\n\ - \ urlsuppl1 = {http://www.nime.org/proceedings/2015/313/0313-file1.mp4},\n year\ - \ = {2015}\n}\n" + ID: LeGroux2010 + abstract: 'Most new digital musical interfaces have evolved upon theintuitive idea + that there is a causality between sonic outputand physical actions. Nevertheless, + the advent of braincomputer interfaces (BCI) now allows us to directly accesssubjective + mental states and express these in the physicalworld without bodily actions. In + the context of an interactive and collaborative live performance, we propose to + exploit novel brain-computer technologies to achieve unmediated brain control + over music generation and expression.We introduce a general framework for the + generation, synchronization and modulation of musical material from brainsignal + and describe its use in the realization of Xmotion, amultimodal performance for + a "brain quartet".' + address: 'Sydney, Australia' + author: 'Le Groux, Sylvain and Manzolli, Jonatas and Verschure, Paul F. and Marti + Sanchez and Andre Luvizotto and Anna Mura and Aleksander Valjamae and Christoph + Guger and Robert Prueckl and Ulysses Bernardet' + bibtex: "@inproceedings{LeGroux2010,\n abstract = {Most new digital musical interfaces\ + \ have evolved upon theintuitive idea that there is a causality between sonic\ + \ outputand physical actions. Nevertheless, the advent of braincomputer interfaces\ + \ (BCI) now allows us to directly accesssubjective mental states and express these\ + \ in the physicalworld without bodily actions. In the context of an interactive\ + \ and collaborative live performance, we propose to exploit novel brain-computer\ + \ technologies to achieve unmediated brain control over music generation and expression.We\ + \ introduce a general framework for the generation, synchronization and modulation\ + \ of musical material from brainsignal and describe its use in the realization\ + \ of Xmotion, amultimodal performance for a \"brain quartet\".},\n address = {Sydney,\ + \ Australia},\n author = {Le Groux, Sylvain and Manzolli, Jonatas and Verschure,\ + \ Paul F. and Marti Sanchez and Andre Luvizotto and Anna Mura and Aleksander Valjamae\ + \ and Christoph Guger and Robert Prueckl and Ulysses Bernardet},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177831},\n issn = {2220-4806},\n keywords = {Brain-computer\ + \ Interface, Biosignals, Interactive Music System, Collaborative Musical Performance},\n\ + \ pages = {309--314},\n title = {Disembodied and Collaborative Musical Interaction\ + \ in the Multimodal Brain Orchestra},\n url = {http://www.nime.org/proceedings/2010/nime2010_309.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179012 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177831 issn: 2220-4806 - month: May - pages: 289--292 - publisher: Louisiana State University - title: Musical Instrument Design Process for Mobile Technology - url: http://www.nime.org/proceedings/2015/nime2015_313.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/313/0313-file1.mp4 - year: 2015 + keywords: 'Brain-computer Interface, Biosignals, Interactive Music System, Collaborative + Musical Performance' + pages: 309--314 + title: Disembodied and Collaborative Musical Interaction in the Multimodal Brain + Orchestra + url: http://www.nime.org/proceedings/2010/nime2010_309.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: rduindam2015 - abstract: 'Tingle is a new digital music controller that attempts to recapture the - acoustic touch and feel, and also gives new opportunities for expressive play. - Tingle resembles a pin-art toy which has been made interactive through a new sensing - technology, with added haptic feedback and motion control. It pushes back, vibrates, - and warps the sound through the musicians nuanced input. In this article Tingle - will be discussed in combination with CataRT. ' - address: 'Baton Rouge, Louisiana, USA' - author: Rhys Duindam and Diemo Schwarz and Hans Leeuw - bibtex: "@inproceedings{rduindam2015,\n abstract = {Tingle is a new digital music\ - \ controller that attempts to recapture the acoustic touch and feel, and also\ - \ gives new opportunities for expressive play. Tingle resembles a pin-art toy\ - \ which has been made interactive through a new sensing technology, with added\ - \ haptic feedback and motion control. It pushes back, vibrates, and warps the\ - \ sound through the musicians nuanced input. In this article Tingle will be discussed\ - \ in combination with CataRT. },\n address = {Baton Rouge, Louisiana, USA},\n\ - \ author = {Rhys Duindam and Diemo Schwarz and Hans Leeuw},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1179054},\n editor = {Edgar Berdahl and Jesse Allison},\n\ - \ issn = {2220-4806},\n month = {May},\n pages = {219--222},\n publisher = {Louisiana\ - \ State University},\n title = {Tingle: A Digital Music Controller Re-Capturing\ - \ the Acoustic Instrument Experience},\n url = {http://www.nime.org/proceedings/2015/nime2015_319.pdf},\n\ - \ urlsuppl1 = {http://www.nime.org/proceedings/2015/319/0319-file1.mp4},\n year\ - \ = {2015}\n}\n" + ID: Hochenbaum2010a + abstract: 'This paper explores the evolution of collaborative, multi-user, musical + interfaces developed for the Bricktable interactive surface. Two key types of + applications are addressed: user interfaces for artistic installation and interfaces + for musical performance. In describing our software, we provide insight on the + methodologies and practicalities of designing interactive musical systems for + tabletop surfaces. Additionally, subtleties of working with custom-designed tabletop + hardware are addressed. ' + address: 'Sydney, Australia' + author: 'Hochenbaum, Jordan and Vallis, Owen and Diakopoulos, Dimitri and Murphy, + Jim and Kapur, Ajay' + bibtex: "@inproceedings{Hochenbaum2010a,\n abstract = {This paper explores the evolution\ + \ of collaborative, multi-user, musical interfaces developed for the Bricktable\ + \ interactive surface. Two key types of applications are addressed: user interfaces\ + \ for artistic installation and interfaces for musical performance. In describing\ + \ our software, we provide insight on the methodologies and practicalities of\ + \ designing interactive musical systems for tabletop surfaces. Additionally, subtleties\ + \ of working with custom-designed tabletop hardware are addressed. },\n address\ + \ = {Sydney, Australia},\n author = {Hochenbaum, Jordan and Vallis, Owen and Diakopoulos,\ + \ Dimitri and Murphy, Jim and Kapur, Ajay},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1177807},\n issn = {2220-4806},\n keywords = {Bricktable, Multi-touch\ + \ Interface, Tangible Interface, Generative Music, Music Information Retrieval},\n\ + \ pages = {315--318},\n title = {Designing Expressive Musical Interfaces for Tabletop\ + \ Surfaces},\n url = {http://www.nime.org/proceedings/2010/nime2010_315.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179054 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177807 issn: 2220-4806 - month: May - pages: 219--222 - publisher: Louisiana State University - title: 'Tingle: A Digital Music Controller Re-Capturing the Acoustic Instrument - Experience' - url: http://www.nime.org/proceedings/2015/nime2015_319.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/319/0319-file1.mp4 - year: 2015 + keywords: 'Bricktable, Multi-touch Interface, Tangible Interface, Generative Music, + Music Information Retrieval' + pages: 315--318 + title: Designing Expressive Musical Interfaces for Tabletop Surfaces + url: http://www.nime.org/proceedings/2010/nime2010_315.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: sgelineck2015 - abstract: 'This study compares the stage metaphor and the channel strip metaphor - in terms of performance. Traditionally, music mixing consoles employ a channels - strip control metaphor for adjusting parameters such as volume and panning of - each track. An alternative control metaphor, the so-called stage metaphor lets - the user adjust volume and panning by positioning tracks relative to a virtual - listening position. In this study test participants are given the task to adjust - volume and panning of one channel (in mixes consisting of three channels) in order - to replicate a series of simple pre-rendered mixes. They do this using (1) a small - physical mixing controller and (2) using an iPad app, which implements a simple - stage metaphor interface. We measure how accurately they are able to replicate - mixes in terms of volume and panning and how fast they are at doing so. Results - reveal that performance is surprisingly similar and thus we are not able to detect - any significant difference in performance between the two interfaces. Qualitative - data however, suggests that the stage metaphor is largely favoured for its intuitive - interaction --- confirming earlier studies. ' - address: 'Baton Rouge, Louisiana, USA' - author: Steven Gelineck and Dannie Korsgaard and Morten Büchert - bibtex: "@inproceedings{sgelineck2015,\n abstract = {This study compares the stage\ - \ metaphor and the channel strip metaphor in terms of performance. Traditionally,\ - \ music mixing consoles employ a channels strip control metaphor for adjusting\ - \ parameters such as volume and panning of each track. An alternative control\ - \ metaphor, the so-called stage metaphor lets the user adjust volume and panning\ - \ by positioning tracks relative to a virtual listening position. In this study\ - \ test participants are given the task to adjust volume and panning of one channel\ - \ (in mixes consisting of three channels) in order to replicate a series of simple\ - \ pre-rendered mixes. They do this using (1) a small physical mixing controller\ - \ and (2) using an iPad app, which implements a simple stage metaphor interface.\ - \ We measure how accurately they are able to replicate mixes in terms of volume\ - \ and panning and how fast they are at doing so. Results reveal that performance\ - \ is surprisingly similar and thus we are not able to detect any significant difference\ - \ in performance between the two interfaces. Qualitative data however, suggests\ - \ that the stage metaphor is largely favoured for its intuitive interaction ---\ - \ confirming earlier studies. },\n address = {Baton Rouge, Louisiana, USA},\n\ - \ author = {Steven Gelineck and Dannie Korsgaard and Morten B\\''uchert},\n booktitle\ + ID: Suiter2010 + abstract: 'This paper introduces the concept of composing expressive music using + the principles of Fuzzy Logic. The paper provides a conceptual model of a musical + work which follows compositional decision making processes. Significant features + of this Fuzzy Logic framework are its inclusiveness through the consideration + of all the many and varied musical details, while also incorporating the imprecision + that characterises musical terminology and discourse. A significant attribute + of my Fuzzy Logic method is that it traces the trajectory of all musical details, + since it is both the individual elements and their combination over time which + is significant to the effectiveness of a musical work in achieving its goals. + The goal of this work is to find a set of elements and rules, which will ultimately + enable the construction of a genralised algorithmic compositional system which + can produce expressive music if so desired. ' + address: 'Sydney, Australia' + author: 'Suiter, Wendy' + bibtex: "@inproceedings{Suiter2010,\n abstract = {This paper introduces the concept\ + \ of composing expressive music using the principles of Fuzzy Logic. The paper\ + \ provides a conceptual model of a musical work which follows compositional decision\ + \ making processes. Significant features of this Fuzzy Logic framework are its\ + \ inclusiveness through the consideration of all the many and varied musical details,\ + \ while also incorporating the imprecision that characterises musical terminology\ + \ and discourse. A significant attribute of my Fuzzy Logic method is that it traces\ + \ the trajectory of all musical details, since it is both the individual elements\ + \ and their combination over time which is significant to the effectiveness of\ + \ a musical work in achieving its goals. The goal of this work is to find a set\ + \ of elements and rules, which will ultimately enable the construction of a genralised\ + \ algorithmic compositional system which can produce expressive music if so desired.\ + \ },\n address = {Sydney, Australia},\n author = {Suiter, Wendy},\n booktitle\ \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1179064},\n editor = {Edgar Berdahl and\ - \ Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {343--346},\n\ - \ publisher = {Louisiana State University},\n title = {Stage- vs. Channel-strip\ - \ Metaphor --- Comparing Performance when Adjusting Volume and Panning of a Single\ - \ Channel in a Stereo Mix},\n url = {http://www.nime.org/proceedings/2015/nime2015_320.pdf},\n\ - \ year = {2015}\n}\n" + \ Expression},\n doi = {10.5281/zenodo.1177901},\n issn = {2220-4806},\n keywords\ + \ = {fuzzy logic,music composition,musical expression,nime10},\n pages = {319--322},\n\ + \ title = {Toward Algorithmic Composition of Expression in Music Using Fuzzy Logic},\n\ + \ url = {http://www.nime.org/proceedings/2010/nime2010_319.pdf},\n year = {2010}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179064 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177901 issn: 2220-4806 - month: May - pages: 343--346 - publisher: Louisiana State University - title: Stage- vs. Channel-strip Metaphor --- Comparing Performance when Adjusting - Volume and Panning of a Single Channel in a Stereo Mix - url: http://www.nime.org/proceedings/2015/nime2015_320.pdf - year: 2015 + keywords: 'fuzzy logic,music composition,musical expression,nime10' + pages: 319--322 + title: Toward Algorithmic Composition of Expression in Music Using Fuzzy Logic + url: http://www.nime.org/proceedings/2010/nime2010_319.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: jwu2015 - abstract: 'This paper presents the Tibetan Singing Prayer Wheel, a hand-held, wireless, - sensor-based musical instrument with a human-computer interface that simultaneously - processes vocals and synthesizes sound based on the performer''s hand gestures - with a one-to-many mapping strategy. A physical model simulates the singing bowl, - while a modal reverberator and a delay-and-window effect process the performer''s - vocals. This system is designed for an electroacoustic vocalist interested in - using a solo instrument to achieve performance goals that would normally require - multiple instruments and activities.' - address: 'Baton Rouge, Louisiana, USA' - author: J. Cecilia Wu and Yoo Hsiu Yeh and Romain Michon and Nathan Weitzner and - Jonathan Abel and Matthew Wright - bibtex: "@inproceedings{jwu2015,\n abstract = {This paper presents the Tibetan Singing\ - \ Prayer Wheel, a hand-held, wireless, sensor-based musical instrument with a\ - \ human-computer interface that simultaneously processes vocals and synthesizes\ - \ sound based on the performer's hand gestures with a one-to-many mapping strategy.\ - \ A physical model simulates the singing bowl, while a modal reverberator and\ - \ a delay-and-window effect process the performer's vocals. This system is designed\ - \ for an electroacoustic vocalist interested in using a solo instrument to achieve\ - \ performance goals that would normally require multiple instruments and activities.},\n\ - \ address = {Baton Rouge, Louisiana, USA},\n author = {{J. Cecilia} Wu and {Yoo\ - \ Hsiu} Yeh and Romain Michon and Nathan Weitzner and Jonathan Abel and Matthew\ - \ Wright},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1179196},\n editor = {Edgar\ - \ Berdahl and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages =\ - \ {91--94},\n publisher = {Louisiana State University},\n title = {Tibetan Singing\ - \ Prayer Wheel: A Hybrid Musical- Spiritual Instrument Using Gestural Control},\n\ - \ url = {http://www.nime.org/proceedings/2015/nime2015_322.pdf},\n year = {2015}\n\ - }\n" + ID: Beilharz2010 + abstract: 'In this paper we examine a wearable sonification and visualisation display + that uses physical analogue visualisation and digital sonification to convey feedback + about the wearer''s activity and environment. Intended to bridge a gap between + art aesthetics, fashionable technologies and informative physical computing, the + user experience evaluation reveals the wearers'' responses and understanding of + a novel medium for wearable expression. The study reveals useful insights for + wearable device design in general and future iterations of this sonification and + visualisation display. ' + address: 'Sydney, Australia' + author: 'Beilharz, Kirsty and Vande Moere, Andrew and Stiel, Barbara and Calo, Claudia + and Tomitsch, Martin and Lombard, Adrian' + bibtex: "@inproceedings{Beilharz2010,\n abstract = {In this paper we examine a wearable\ + \ sonification and visualisation display that uses physical analogue visualisation\ + \ and digital sonification to convey feedback about the wearer's activity and\ + \ environment. Intended to bridge a gap between art aesthetics, fashionable technologies\ + \ and informative physical computing, the user experience evaluation reveals the\ + \ wearers' responses and understanding of a novel medium for wearable expression.\ + \ The study reveals useful insights for wearable device design in general and\ + \ future iterations of this sonification and visualisation display. },\n address\ + \ = {Sydney, Australia},\n author = {Beilharz, Kirsty and Vande Moere, Andrew\ + \ and Stiel, Barbara and Calo, Claudia and Tomitsch, Martin and Lombard, Adrian},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1177717},\n issn = {2220-4806},\n\ + \ keywords = {Wearable display, sonification, visualisation, design aesthetics,\ + \ physical computing, multimodal expression, bimodal display},\n pages = {323--326},\n\ + \ title = {Expressive Wearable Sonification and Visualisation : Design and Evaluation\ + \ of a Flexible Display},\n url = {http://www.nime.org/proceedings/2010/nime2010_323.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179196 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177717 issn: 2220-4806 - month: May - pages: 91--94 - publisher: Louisiana State University - title: 'Tibetan Singing Prayer Wheel: A Hybrid Musical- Spiritual Instrument Using - Gestural Control' - url: http://www.nime.org/proceedings/2015/nime2015_322.pdf - year: 2015 + keywords: 'Wearable display, sonification, visualisation, design aesthetics, physical + computing, multimodal expression, bimodal display' + pages: 323--326 + title: 'Expressive Wearable Sonification and Visualisation : Design and Evaluation + of a Flexible Display' + url: http://www.nime.org/proceedings/2010/nime2010_323.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: ifranco2015 - abstract: 'The proliferation and easy access to a new breed of ARM-based single-board - computers has promoted an increased usage of these platforms in the creation of - self-contained Digital Music Instruments. These directly incorporate all of the - necessary processing power for tasks such as sensor signal acquisition, control - data processing and audio synthesis. They can also run full Linux operating systems, - through which domain-specific languages for audio computing facilitate a low entry - barrier for the community. In computer music the adoption of these computing platforms - will naturally depend on their ability to withstand the demanding computing tasks - associated to high-quality audio synthesis. In the context of computer music practice - there are few reports about this quantification for practical purposes. This paper - aims at presenting the results of performance tests of SuperCollider running on - the BeagleBone Black, a popular mid-tier single-board computer, while performing - commonly used audio synthesis techniques.' - address: 'Baton Rouge, Louisiana, USA' - author: Ivan Franco and Marcelo Wanderley - bibtex: "@inproceedings{ifranco2015,\n abstract = {The proliferation and easy access\ - \ to a new breed of ARM-based single-board computers has promoted an increased\ - \ usage of these platforms in the creation of self-contained Digital Music Instruments.\ - \ These directly incorporate all of the necessary processing power for tasks such\ - \ as sensor signal acquisition, control data processing and audio synthesis. They\ - \ can also run full Linux operating systems, through which domain-specific languages\ - \ for audio computing facilitate a low entry barrier for the community. In computer\ - \ music the adoption of these computing platforms will naturally depend on their\ - \ ability to withstand the demanding computing tasks associated to high-quality\ - \ audio synthesis. In the context of computer music practice there are few reports\ - \ about this quantification for practical purposes. This paper aims at presenting\ - \ the results of performance tests of SuperCollider running on the BeagleBone\ - \ Black, a popular mid-tier single-board computer, while performing commonly used\ - \ audio synthesis techniques.},\n address = {Baton Rouge, Louisiana, USA},\n author\ - \ = {Ivan Franco and Marcelo Wanderley},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179062},\n\ - \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ - \ {May},\n pages = {223--226},\n publisher = {Louisiana State University},\n title\ - \ = {Pratical Evaluation of Synthesis Performance on the Beaglebone Black},\n\ - \ url = {http://www.nime.org/proceedings/2015/nime2015_323.pdf},\n year = {2015}\n\ + ID: Nugroho2010 + abstract: 'In this paper, we describe the shaping factors, which simplify and help + us understand the multi-dimensional aspects of designing Wearable Expressions. + These descriptive shaping factors contribute to both the design and user-experience + evaluation of Wearable Expressions. ' + address: 'Sydney, Australia' + author: 'Nugroho, Jeremiah and Beilharz, Kirsty' + bibtex: "@inproceedings{Nugroho2010,\n abstract = {In this paper, we describe the\ + \ shaping factors, which simplify and help us understand the multi-dimensional\ + \ aspects of designing Wearable Expressions. These descriptive shaping factors\ + \ contribute to both the design and user-experience evaluation of Wearable Expressions.\ + \ },\n address = {Sydney, Australia},\n author = {Nugroho, Jeremiah and Beilharz,\ + \ Kirsty},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1177867},\n issn = {2220-4806},\n\ + \ keywords = {Wearable expressions, body, user-centered design.},\n pages = {327--330},\n\ + \ title = {Understanding and Evaluating User Centred Design in Wearable Expressions},\n\ + \ url = {http://www.nime.org/proceedings/2010/nime2010_327.pdf},\n year = {2010}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179062 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177867 issn: 2220-4806 - month: May - pages: 223--226 - publisher: Louisiana State University - title: Pratical Evaluation of Synthesis Performance on the Beaglebone Black - url: http://www.nime.org/proceedings/2015/nime2015_323.pdf - year: 2015 + keywords: 'Wearable expressions, body, user-centered design.' + pages: 327--330 + title: Understanding and Evaluating User Centred Design in Wearable Expressions + url: http://www.nime.org/proceedings/2010/nime2010_327.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: cbrown2015 - abstract: 'Lambeosaurine hadrosaurs are duck-billed dinosaurs known for their large - head crests, which researchers hypothesize were resonators for vocal calls. This - paper describes the motivation and process of iteratively designing a musical - instrument and interactive sound installation based on imagining the sounds of - this extinct dinosaur. We used scientific research as a starting point to create - a means of sound production and resonator, using a 3D model obtained from Computed - Topology (CT) scans of a Corythosaurus skull and an endocast of its crest and - nasal passages. Users give voice to the dinosaur by blowing into a mouthpiece, - exciting a larynx mechanism and resonating the sound through the hadrosaur''s - full-scale nasal cavities and skull. This action allows an embodied glimpse into - an ancient past. Users know the dinosaur through the controlled exhalation of - their breath, how the compression of the lungs leads to a whisper or a roar.' - address: 'Baton Rouge, Louisiana, USA' - author: Courtney Brown and Sharif Razzaque and Garth Paine - bibtex: "@inproceedings{cbrown2015,\n abstract = {Lambeosaurine hadrosaurs are duck-billed\ - \ dinosaurs known for their large head crests, which researchers hypothesize were\ - \ resonators for vocal calls. This paper describes the motivation and process\ - \ of iteratively designing a musical instrument and interactive sound installation\ - \ based on imagining the sounds of this extinct dinosaur. We used scientific research\ - \ as a starting point to create a means of sound production and resonator, using\ - \ a 3D model obtained from Computed Topology (CT) scans of a Corythosaurus skull\ - \ and an endocast of its crest and nasal passages. Users give voice to the dinosaur\ - \ by blowing into a mouthpiece, exciting a larynx mechanism and resonating the\ - \ sound through the hadrosaur's full-scale nasal cavities and skull. This action\ - \ allows an embodied glimpse into an ancient past. Users know the dinosaur through\ - \ the controlled exhalation of their breath, how the compression of the lungs\ - \ leads to a whisper or a roar.},\n address = {Baton Rouge, Louisiana, USA},\n\ - \ author = {Courtney Brown and Sharif Razzaque and Garth Paine},\n booktitle =\ + ID: Park2010 + abstract: 'In this paper, we discuss the musical potential of COMPath --- an online + map based music-making tool --- as a noveland unique interface for interactive + music composition andperformance. COMPath provides an intuitive environmentfor + creative music making by sonification of georeferenceddata. Users can generate + musical events with simple andfamiliar actions on an online map interface; a set + of local information is collected along the user-drawn route andthen interpreted + as sounds of various musical instruments.We discuss the musical interpretation + of routes on a map,review the design and implementation of COMPath, andpresent + selected sonification results with focus on mappingstrategies for map-based composition.' + address: 'Sydney, Australia' + author: 'Park, Sihwa and Kim, Seunghun and Lee, Samuel and Yeo, Woon Seung' + bibtex: "@inproceedings{Park2010,\n abstract = {In this paper, we discuss the musical\ + \ potential of COMPath --- an online map based music-making tool --- as a noveland\ + \ unique interface for interactive music composition andperformance. COMPath provides\ + \ an intuitive environmentfor creative music making by sonification of georeferenceddata.\ + \ Users can generate musical events with simple andfamiliar actions on an online\ + \ map interface; a set of local information is collected along the user-drawn\ + \ route andthen interpreted as sounds of various musical instruments.We discuss\ + \ the musical interpretation of routes on a map,review the design and implementation\ + \ of COMPath, andpresent selected sonification results with focus on mappingstrategies\ + \ for map-based composition.},\n address = {Sydney, Australia},\n author = {Park,\ + \ Sihwa and Kim, Seunghun and Lee, Samuel and Yeo, Woon Seung},\n booktitle =\ \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1179036},\n editor = {Edgar Berdahl and Jesse Allison},\n\ - \ issn = {2220-4806},\n month = {May},\n pages = {5--10},\n publisher = {Louisiana\ - \ State University},\n title = {Rawr! A Study in Sonic Skulls: Embodied Natural\ - \ History},\n url = {http://www.nime.org/proceedings/2015/nime2015_325.pdf},\n\ - \ year = {2015}\n}\n" + \ doi = {10.5281/zenodo.1177877},\n issn = {2220-4806},\n keywords = {Musical\ + \ sonification, map interface, online map service, geo- referenced data, composition,\ + \ mashup},\n pages = {331--334},\n title = {Online Map Interface for Creative\ + \ and Interactive},\n url = {http://www.nime.org/proceedings/2010/nime2010_331.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179036 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177877 issn: 2220-4806 - month: May - pages: 5--10 - publisher: Louisiana State University - title: 'Rawr! A Study in Sonic Skulls: Embodied Natural History' - url: http://www.nime.org/proceedings/2015/nime2015_325.pdf - year: 2015 + keywords: 'Musical sonification, map interface, online map service, geo- referenced + data, composition, mashup' + pages: 331--334 + title: Online Map Interface for Creative and Interactive + url: http://www.nime.org/proceedings/2010/nime2010_331.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: kyerkes2015 - abstract: 'We document results from exploring ensemble feedback in loosely-structured - electroacoustic improvisations. A conceptual justification for the explorations - is provided, in addition to discussion of tools and methodologies. Physical configurations - of intra-ensemble feedback networks are documented, along with qualitative analysis - of their effectiveness.' - address: 'Baton Rouge, Louisiana, USA' - author: Muhammad Hafiz Wan Rosli and Karl Yerkes and Matthew Wright and Timothy - Wood and Hannah Wolfe and Charlie Roberts and Anis Haron and Fernando Rincon Estrada - bibtex: "@inproceedings{kyerkes2015,\n abstract = {We document results from exploring\ - \ ensemble feedback in loosely-structured electroacoustic improvisations. A conceptual\ - \ justification for the explorations is provided, in addition to discussion of\ - \ tools and methodologies. Physical configurations of intra-ensemble feedback\ - \ networks are documented, along with qualitative analysis of their effectiveness.},\n\ - \ address = {Baton Rouge, Louisiana, USA},\n author = {{Muhammad Hafiz Wan} Rosli\ - \ and Karl Yerkes and Matthew Wright and Timothy Wood and Hannah Wolfe and Charlie\ - \ Roberts and Anis Haron and {Fernando Rincon} Estrada},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1179170},\n editor = {Edgar Berdahl and Jesse Allison},\n\ - \ issn = {2220-4806},\n month = {May},\n pages = {144--149},\n publisher = {Louisiana\ - \ State University},\n title = {Ensemble Feedback Instruments},\n url = {http://www.nime.org/proceedings/2015/nime2015_329.pdf},\n\ - \ urlsuppl1 = {http://www.nime.org/proceedings/2015/329/0329-file1.mp4},\n year\ - \ = {2015}\n}\n" + ID: Hadjakos2010 + abstract: Awareness of playing movements can help a piano student to improve technique. + We are developing a piano pedagogy application that uses sensor data of hand and + arm movement and generates feedback to increase movement awareness. This paper + reports on a method for analysis of piano playing movements. The method allows + to judge whether an active movement in a joint has occurred during a given time + interval. This time interval may include one or more touches. The problem is complicated + by the fact that the mechanical interaction between the arm and piano action generates + additional movements that are not under direct control of the player. The analysis + method is able to ignore these movements and can therefore be used to provide + useful feedback. + address: 'Sydney, Australia' + author: 'Hadjakos, Aristotelis and Mühlhäuser, Max' + bibtex: "@inproceedings{Hadjakos2010,\n abstract = {Awareness of playing movements\ + \ can help a piano student to improve technique. We are developing a piano pedagogy\ + \ application that uses sensor data of hand and arm movement and generates feedback\ + \ to increase movement awareness. This paper reports on a method for analysis\ + \ of piano playing movements. The method allows to judge whether an active movement\ + \ in a joint has occurred during a given time interval. This time interval may\ + \ include one or more touches. The problem is complicated by the fact that the\ + \ mechanical interaction between the arm and piano action generates additional\ + \ movements that are not under direct control of the player. The analysis method\ + \ is able to ignore these movements and can therefore be used to provide useful\ + \ feedback.},\n address = {Sydney, Australia},\n author = {Hadjakos, Aristotelis\ + \ and M\\''{u}hlh\\''{a}user, Max},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177791},\n\ + \ issn = {2220-4806},\n keywords = {nime10},\n pages = {335--338},\n title = {Analysis\ + \ of Piano Playing Movements Spanning Multiple Touches},\n url = {http://www.nime.org/proceedings/2010/nime2010_335.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179170 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177791 issn: 2220-4806 - month: May - pages: 144--149 - publisher: Louisiana State University - title: Ensemble Feedback Instruments - url: http://www.nime.org/proceedings/2015/nime2015_329.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/329/0329-file1.mp4 - year: 2015 + keywords: nime10 + pages: 335--338 + title: Analysis of Piano Playing Movements Spanning Multiple Touches + url: http://www.nime.org/proceedings/2010/nime2010_335.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: jgregorio2015 - abstract: 'Quality assessment of jazz improvisation is a multi-faceted, high-level - cognitive task routinely performed by educators in university jazz programs and - other discriminating music listeners. In this pilot study, we present a novel - dataset of 88 MIDI jazz piano improvisations with ratings of creativity, technical - proficiency, and aesthetic appeal provided by four jazz experts, and we detail - the design of a feature set that can represent some of the rhythmic, melodic, - harmonic, and other expressive attributes humans recognize as salient in assessment - of performance quality. Inherent subjectivity in these assessments is inevitable, - yet the recognition of performance attributes by which humans perceive quality - has wide applicability to related tasks in the music information retrieval (MIR) - community and jazz pedagogy. Preliminary results indicate that several musiciologically-informed - features of relatively low computational complexity perform reasonably well in - predicting performance quality labels via ordinary least squares regression.' - address: 'Baton Rouge, Louisiana, USA' - author: Jeff Gregorio and David Rosen and Michael Caro and Youngmoo E. Kim - bibtex: "@inproceedings{jgregorio2015,\n abstract = {Quality assessment of jazz\ - \ improvisation is a multi-faceted, high-level cognitive task routinely performed\ - \ by educators in university jazz programs and other discriminating music listeners.\ - \ In this pilot study, we present a novel dataset of 88 MIDI jazz piano improvisations\ - \ with ratings of creativity, technical proficiency, and aesthetic appeal provided\ - \ by four jazz experts, and we detail the design of a feature set that can represent\ - \ some of the rhythmic, melodic, harmonic, and other expressive attributes humans\ - \ recognize as salient in assessment of performance quality. Inherent subjectivity\ - \ in these assessments is inevitable, yet the recognition of performance attributes\ - \ by which humans perceive quality has wide applicability to related tasks in\ - \ the music information retrieval (MIR) community and jazz pedagogy. Preliminary\ - \ results indicate that several musiciologically-informed features of relatively\ - \ low computational complexity perform reasonably well in predicting performance\ - \ quality labels via ordinary least squares regression.},\n address = {Baton Rouge,\ - \ Louisiana, USA},\n author = {Jeff Gregorio and David Rosen and Michael Caro\ - \ and {Youngmoo E.} Kim},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179072},\n\ - \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ - \ {May},\n pages = {327--328},\n publisher = {Louisiana State University},\n title\ - \ = {Descriptors for Perception of Quality in Jazz Piano Improvisation},\n url\ - \ = {http://www.nime.org/proceedings/2015/nime2015_331.pdf},\n year = {2015}\n\ - }\n" + ID: Heinz2010 + abstract: 'This paper proposes a design concept for a tangible interface forcollaborative + performances that incorporates two social factorspresent during performance, the + individual creation andadaptation of technology and the sharing of it within acommunity. + These factors are identified using the example of alaptop ensemble and then applied + to three existing collaborativeperformance paradigms. Finally relevant technology, + challengesand the current state of our implementation are discussed.' + address: 'Sydney, Australia' + author: 'Heinz, Sebastian and O''Modhrain, Sile' + bibtex: "@inproceedings{Heinz2010,\n abstract = {This paper proposes a design concept\ + \ for a tangible interface forcollaborative performances that incorporates two\ + \ social factorspresent during performance, the individual creation andadaptation\ + \ of technology and the sharing of it within acommunity. These factors are identified\ + \ using the example of alaptop ensemble and then applied to three existing collaborativeperformance\ + \ paradigms. Finally relevant technology, challengesand the current state of our\ + \ implementation are discussed.},\n address = {Sydney, Australia},\n author =\ + \ {Heinz, Sebastian and O'Modhrain, Sile},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177803},\n\ + \ issn = {2220-4806},\n keywords = {Tangible User Interfaces, collaborative performances,\ + \ social factors},\n pages = {339--342},\n title = {Designing a Shareable Musical\ + \ TUI},\n url = {http://www.nime.org/proceedings/2010/nime2010_339.pdf},\n year\ + \ = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179072 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177803 issn: 2220-4806 - month: May - pages: 327--328 - publisher: Louisiana State University - title: Descriptors for Perception of Quality in Jazz Piano Improvisation - url: http://www.nime.org/proceedings/2015/nime2015_331.pdf - year: 2015 + keywords: 'Tangible User Interfaces, collaborative performances, social factors' + pages: 339--342 + title: Designing a Shareable Musical TUI + url: http://www.nime.org/proceedings/2010/nime2010_339.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: amarquezborbonb2015 - abstract: 'This paper discusses the particular aesthetic and contextual considerations - emergent from the design process of a site-specific sound art installation, the - Wave Duet. The main point of this paper proposes that beyond the initial motivation - produced by new technologies and their artistic potential, there are many profound - artistic considerations that drive the development and design of a work in unique - ways. Thus, in the case of the Wave Duet, the produced buoys were prompted by - investigating the relationship between sonic objects and natural phenomena. As - a result, the mappings, physical and sound designs directly reflect these issues. - Finally, it is also suggested that during the course of development, unintended - issues may emerge and further inform how the work is perceived in a broader sense. ' - address: 'Baton Rouge, Louisiana, USA' - author: Adnan Marquez-Borbon - bibtex: "@inproceedings{amarquezborbonb2015,\n abstract = {This paper discusses\ - \ the particular aesthetic and contextual considerations emergent from the design\ - \ process of a site-specific sound art installation, the Wave Duet. The main point\ - \ of this paper proposes that beyond the initial motivation produced by new technologies\ - \ and their artistic potential, there are many profound artistic considerations\ - \ that drive the development and design of a work in unique ways. Thus, in the\ - \ case of the Wave Duet, the produced buoys were prompted by investigating the\ - \ relationship between sonic objects and natural phenomena. As a result, the mappings,\ - \ physical and sound designs directly reflect these issues. Finally, it is also\ - \ suggested that during the course of development, unintended issues may emerge\ - \ and further inform how the work is perceived in a broader sense. },\n address\ - \ = {Baton Rouge, Louisiana, USA},\n author = {Adnan Marquez-Borbon},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1179126},\n editor = {Edgar Berdahl and\ - \ Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {335--338},\n\ - \ publisher = {Louisiana State University},\n title = {But Does it Float? Reflections\ - \ on a Sound Art Ecological Intervention},\n url = {http://www.nime.org/proceedings/2015/nime2015_333.pdf},\n\ - \ urlsuppl1 = {http://www.nime.org/proceedings/2015/333/0333-file1.mp4},\n year\ - \ = {2015}\n}\n" + ID: Freed2010 + abstract: We present two complementary approaches for the visualization and interaction + of dimensionally reduced data setsusing hybridization interfaces. Our implementations + privilege syncretic systems allowing one to explore combinations(hybrids) of disparate + elements of a data set through theirplacement in a 2-D space. The first approach + allows for theplacement of data points anywhere on the plane accordingto an anticipated + performance strategy. The contribution(weight) of each data point varies according + to a power function of the distance from the control cursor. The secondapproach + uses constrained vertex colored triangulations ofmanifolds with labels placed + at the vertices of triangulartiles. Weights are computed by barycentric projection + ofthe control cursor position. + address: 'Sydney, Australia' + author: 'Freed, Adrian' + bibtex: "@inproceedings{Freed2010,\n abstract = {We present two complementary approaches\ + \ for the visualization and interaction of dimensionally reduced data setsusing\ + \ hybridization interfaces. Our implementations privilege syncretic systems allowing\ + \ one to explore combinations(hybrids) of disparate elements of a data set through\ + \ theirplacement in a 2-D space. The first approach allows for theplacement of\ + \ data points anywhere on the plane accordingto an anticipated performance strategy.\ + \ The contribution(weight) of each data point varies according to a power function\ + \ of the distance from the control cursor. The secondapproach uses constrained\ + \ vertex colored triangulations ofmanifolds with labels placed at the vertices\ + \ of triangulartiles. Weights are computed by barycentric projection ofthe control\ + \ cursor position.},\n address = {Sydney, Australia},\n author = {Freed, Adrian},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1177769},\n issn = {2220-4806},\n\ + \ keywords = {Interpolation, dimension reduction, radial basis functions, triangular\ + \ mesh},\n pages = {343--347},\n title = {Visualizations and Interaction Strategies\ + \ for Hybridization Interfaces},\n url = {http://www.nime.org/proceedings/2010/nime2010_343.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179126 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177769 issn: 2220-4806 - month: May - pages: 335--338 - publisher: Louisiana State University - title: 'But Does it Float? Reflections on a Sound Art Ecological Intervention' - url: http://www.nime.org/proceedings/2015/nime2015_333.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/333/0333-file1.mp4 - year: 2015 + keywords: 'Interpolation, dimension reduction, radial basis functions, triangular + mesh' + pages: 343--347 + title: Visualizations and Interaction Strategies for Hybridization Interfaces + url: http://www.nime.org/proceedings/2010/nime2010_343.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: mblessing2015 - abstract: 'A LapBox derivative, the Textural Crossfader is a keyboard-based embedded - acoustic instrument, which sits comfortably across the performer''s lap and radiates - sound out of integrated stereo speakers. The performer controls the sound by manipulating - the keys on a pair of mini-keyboard interfaces. A unique one-to-one mapping enables - the performer to precisely crossfade among a set of looped audio wave files, creating - a conveniently portable system for navigating through a complex timbre space. - The axes of the timbre space can be reconfigured by replacing the wave files stored - in the flash memory.' - address: 'Baton Rouge, Louisiana, USA' - author: Matthew Blessing and Edgar Berdahl - bibtex: "@inproceedings{mblessing2015,\n abstract = {A LapBox derivative, the Textural\ - \ Crossfader is a keyboard-based embedded acoustic instrument, which sits comfortably\ - \ across the performer's lap and radiates sound out of integrated stereo speakers.\ - \ The performer controls the sound by manipulating the keys on a pair of mini-keyboard\ - \ interfaces. A unique one-to-one mapping enables the performer to precisely crossfade\ - \ among a set of looped audio wave files, creating a conveniently portable system\ - \ for navigating through a complex timbre space. The axes of the timbre space\ - \ can be reconfigured by replacing the wave files stored in the flash memory.},\n\ - \ address = {Baton Rouge, Louisiana, USA},\n author = {Matthew Blessing and Edgar\ - \ Berdahl},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179032},\n editor\ - \ = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n\ - \ pages = {180--181},\n publisher = {Louisiana State University},\n title = {Textural\ - \ Crossfader},\n url = {http://www.nime.org/proceedings/2015/nime2015_337.pdf},\n\ - \ urlsuppl1 = {http://www.nime.org/proceedings/2015/337/0337-file1.mp4},\n urlsuppl2\ - \ = {http://www.nime.org/proceedings/2015/337/0337-file2.mov},\n year = {2015}\n\ - }\n" + ID: Woldecke2010 + abstract: In this paper we describe work in progress on generative music generation + on multi-touch devices. Our goal is to create a musical application framework + for multiple casual users that use state of the art multitouch devices. We choose + the metaphor of ants moving on a hexagonal grid to interact with a pitch pattern. + The set of devices used includes a custom built multitouch table and a number + of iPhones to jointly create musical expressions. + address: 'Sydney, Australia' + author: 'Wöldecke, Björn and Geiger, Christian and Reckter, Holger and Schulz, Florian' + bibtex: "@inproceedings{Woldecke2010,\n abstract = {In this paper we describe work\ + \ in progress on generative music generation on multi-touch devices. Our goal\ + \ is to create a musical application framework for multiple casual users that\ + \ use state of the art multitouch devices. We choose the metaphor of ants moving\ + \ on a hexagonal grid to interact with a pitch pattern. The set of devices used\ + \ includes a custom built multitouch table and a number of iPhones to jointly\ + \ create musical expressions.},\n address = {Sydney, Australia},\n author = {W\\\ + ''{o}ldecke, Bj\\''{o}rn and Geiger, Christian and Reckter, Holger and Schulz,\ + \ Florian},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177921},\n issn\ + \ = {2220-4806},\n keywords = {Generative music, mobile interfaces, multitouch\ + \ interaction},\n pages = {348--351},\n title = {ANTracks 2.0 --- Generative Music\ + \ on Multiple Multitouch Devices Categories and Subject Descriptors},\n url =\ + \ {http://www.nime.org/proceedings/2010/nime2010_348.pdf},\n year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179032 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177921 issn: 2220-4806 - month: May - pages: 180--181 - publisher: Louisiana State University - title: Textural Crossfader - url: http://www.nime.org/proceedings/2015/nime2015_337.pdf - urlsuppl1: http://www.nime.org/proceedings/2015/337/0337-file1.mp4 - urlsuppl2: http://www.nime.org/proceedings/2015/337/0337-file2.mov - year: 2015 + keywords: 'Generative music, mobile interfaces, multitouch interaction' + pages: 348--351 + title: ANTracks 2.0 --- Generative Music on Multiple Multitouch Devices Categories + and Subject Descriptors + url: http://www.nime.org/proceedings/2010/nime2010_348.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: acabrera2015 - abstract: 'This paper presents the concept and implementation of a decentralized, - server-less and peer-to-peer network for the interchange of musical control interfaces - and data using the OSC protocol. Graphical control elements that form the control - interface can be freely edited and exchanged to and from any device in the network, - doing away with the need for a separate server or editing application. All graphical - elements representing the same parameter will have their value synchronized through - the network mechanisms. Some practical considerations surrounding the implementation - of this idea like automatic layout of control, editing interfaces on mobile touch-screen - devices and auto-discovery of network nodes are discussed. Finally, GoOSC, a mobile - application implementing these ideas is presented.' - address: 'Baton Rouge, Louisiana, USA' - author: Andres Cabrera - bibtex: "@inproceedings{acabrera2015,\n abstract = {This paper presents the concept\ - \ and implementation of a decentralized, server-less and peer-to-peer network\ - \ for the interchange of musical control interfaces and data using the OSC protocol.\ - \ Graphical control elements that form the control interface can be freely edited\ - \ and exchanged to and from any device in the network, doing away with the need\ - \ for a separate server or editing application. All graphical elements representing\ - \ the same parameter will have their value synchronized through the network mechanisms.\ - \ Some practical considerations surrounding the implementation of this idea like\ - \ automatic layout of control, editing interfaces on mobile touch-screen devices\ - \ and auto-discovery of network nodes are discussed. Finally, GoOSC, a mobile\ - \ application implementing these ideas is presented.},\n address = {Baton Rouge,\ - \ Louisiana, USA},\n author = {Andres Cabrera},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1179040},\n editor = {Edgar Berdahl and Jesse Allison},\n\ - \ issn = {2220-4806},\n month = {May},\n pages = {355--358},\n publisher = {Louisiana\ - \ State University},\n title = {Serverless and Peer-to-peer distributed interfaces\ - \ for musical control},\n url = {http://www.nime.org/proceedings/2015/nime2015_351.pdf},\n\ - \ year = {2015}\n}\n" + ID: Kang2010 + abstract: 'The project Hé(和, harmony) is a sound installation that enables a user + to play music by writing calligraphy. We developed a system where calligraphic + symbols can be detected and converted to a sound composed of pitch, pitch length, + and volume though MIDI and serial communication. The Hé sound installation involves + a micro-controller, photocells, and multiplexers. A DC motor controls the speed + of a spooled paper roll that is capable of setting the music tempo. This paper + presents the design concept and implementation of Hé. We discuss the major research + issues such as using photocells for detecting components of calligraphy like thickness + and location. Hardware and software details are also discussed. Finally, we explore + the potential for further extending musical and visual experience through this + project’s applications and outcomes.' + address: 'Sydney, Australia' + author: 'Kang, Laewoo and Chien, Hsin-Yi' + bibtex: "@inproceedings{Kang2010,\n abstract = {The project H\\'{e}(和, harmony)\ + \ is a sound installation that enables a user to play music by writing calligraphy.\ + \ We developed a system where calligraphic symbols can be detected and converted\ + \ to a sound composed of pitch, pitch length, and volume though MIDI and serial\ + \ communication. The H\\'{e} sound installation involves a micro-controller, photocells,\ + \ and multiplexers. A DC motor controls the speed of a spooled paper roll that\ + \ is capable of setting the music tempo. This paper presents the design concept\ + \ and implementation of H\\'{e}. We discuss the major research issues such as\ + \ using photocells for detecting components of calligraphy like thickness and\ + \ location. Hardware and software details are also discussed. Finally, we explore\ + \ the potential for further extending musical and visual experience through this\ + \ project’s applications and outcomes.},\n address = {Sydney, Australia},\n author\ + \ = {Kang, Laewoo and Chien, Hsin-Yi},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177819},\n\ + \ issn = {2220-4806},\n keywords = {Interactive music interface, calligraphy,\ + \ graphical music composing, sonification},\n pages = {352--355},\n title = {H\\\ + '{e} : Calligraphy as a Musical Interface},\n url = {http://www.nime.org/proceedings/2010/nime2010_352.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1179040 - editor: Edgar Berdahl and Jesse Allison + doi: 10.5281/zenodo.1177819 issn: 2220-4806 - month: May - pages: 355--358 - publisher: Louisiana State University - title: Serverless and Peer-to-peer distributed interfaces for musical control - url: http://www.nime.org/proceedings/2015/nime2015_351.pdf - year: 2015 + keywords: 'Interactive music interface, calligraphy, graphical music composing, + sonification' + pages: 352--355 + title: 'Hé : Calligraphy as a Musical Interface' + url: http://www.nime.org/proceedings/2010/nime2010_352.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: Murphy2012 - abstract: 'A problem with many contemporary musical robotic percussion systems lies - in the fact that solenoids fail to respond lin-early to linear increases in input - velocity. This nonlinearity forces performers to individually tailor their compositions - to specific robotic drummers. To address this problem, we introduce a method of - pre-performance calibration using metaheuristic search techniques. A variety of - such techniques are introduced and evaluated and the results of the optimized - solenoid-based percussion systems are presented and compared with output from - non-calibrated systems.' - address: 'Ann Arbor, Michigan' - author: Jim Murphy and Ajay Kapur and Dale Carnegie - bibtex: "@inproceedings{Murphy2012,\n abstract = {A problem with many contemporary\ - \ musical robotic percussion systems lies in the fact that solenoids fail to respond\ - \ lin-early to linear increases in input velocity. This nonlinearity forces performers\ - \ to individually tailor their compositions to specific robotic drummers. To address\ - \ this problem, we introduce a method of pre-performance calibration using metaheuristic\ - \ search techniques. A variety of such techniques are introduced and evaluated\ - \ and the results of the optimized solenoid-based percussion systems are presented\ - \ and compared with output from non-calibrated systems.},\n address = {Ann Arbor,\ - \ Michigan},\n author = {Jim Murphy and Ajay Kapur and Dale Carnegie},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1180545},\n issn = {2220-4806},\n keywords\ - \ = {musical robotics, human-robot interaction},\n publisher = {University of\ - \ Michigan},\n title = {Better Drumming Through Calibration: Techniques for Pre-Performance\ - \ Robotic Percussion Optimization},\n url = {http://www.nime.org/proceedings/2012/nime2012_100.pdf},\n\ - \ year = {2012}\n}\n" + ID: Marier2010 + abstract: 'The sponge is an interface that allows a clear link to beestablished + between gesture and sound in electroacousticmusic. The goals in developing the + sponge were to reintroduce the pleasure of playing and to improve the interaction + between the composer/performer and the audience. Ithas been argued that expenditure + of effort or energy is required to obtain expressive interfaces. The sponge favors + anenergy-sound relationship in two ways : 1) it senses acceleration, which is + closely related to energy; and 2) it is madeout of a flexible material (foam) + that requires effort to besqueezed or twisted. Some of the mapping strategies + usedin a performance context with the sponge are discussed.' + address: 'Sydney, Australia' + author: 'Marier, Martin' + bibtex: "@inproceedings{Marier2010,\n abstract = {The sponge is an interface that\ + \ allows a clear link to beestablished between gesture and sound in electroacousticmusic.\ + \ The goals in developing the sponge were to reintroduce the pleasure of playing\ + \ and to improve the interaction between the composer/performer and the audience.\ + \ Ithas been argued that expenditure of effort or energy is required to obtain\ + \ expressive interfaces. The sponge favors anenergy-sound relationship in two\ + \ ways : 1) it senses acceleration, which is closely related to energy; and 2)\ + \ it is madeout of a flexible material (foam) that requires effort to besqueezed\ + \ or twisted. Some of the mapping strategies usedin a performance context with\ + \ the sponge are discussed.},\n address = {Sydney, Australia},\n author = {Marier,\ + \ Martin},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1177839},\n issn = {2220-4806},\n\ + \ keywords = {Interface, electroacoustic music, performance, expressivity, mapping},\n\ + \ pages = {356--359},\n title = {The Sponge A Flexible Interface},\n url = {http://www.nime.org/proceedings/2010/nime2010_356.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1180545 + doi: 10.5281/zenodo.1177839 issn: 2220-4806 - keywords: 'musical robotics, human-robot interaction' - publisher: University of Michigan - title: 'Better Drumming Through Calibration: Techniques for Pre-Performance Robotic - Percussion Optimization' - url: http://www.nime.org/proceedings/2012/nime2012_100.pdf - year: 2012 + keywords: 'Interface, electroacoustic music, performance, expressivity, mapping' + pages: 356--359 + title: The Sponge A Flexible Interface + url: http://www.nime.org/proceedings/2010/nime2010_356.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: Britt2012 - abstract: 'The EMvibe is an augmented vibraphone that allows for continuous control - over the amplitude and spectrum of in-dividual notes. The system uses electromagnetic - actuators to induce vibrations in the vibraphone''s aluminum tone bars. The tone - bars and the electromagnetic actuators are coupled via neodymium magnets affixed - to each bar. The acoustic properties of the vibraphone allowed us to develop a - very simple, low-cost and powerful amplification solution that requires no heat - sinking. The physical design is meant to be portable and robust, and the system - can be easily installed on any vibraphone without interfering with normal performance - techniques. The system supports multiple in-terfacing solutions, affording the - performer and composer the ability to interact with the EMvibe in different ways - depending on the musical context.' - address: 'Ann Arbor, Michigan' - author: N. Cameron Britt and Jeff Snyder and Andrew McPherson - bibtex: "@inproceedings{Britt2012,\n abstract = {The EMvibe is an augmented vibraphone\ - \ that allows for continuous control over the amplitude and spectrum of in-dividual\ - \ notes. The system uses electromagnetic actuators to induce vibrations in the\ - \ vibraphone's aluminum tone bars. The tone bars and the electromagnetic actuators\ - \ are coupled via neodymium magnets affixed to each bar. The acoustic properties\ - \ of the vibraphone allowed us to develop a very simple, low-cost and powerful\ - \ amplification solution that requires no heat sinking. The physical design is\ - \ meant to be portable and robust, and the system can be easily installed on any\ - \ vibraphone without interfering with normal performance techniques. The system\ - \ supports multiple in-terfacing solutions, affording the performer and composer\ - \ the ability to interact with the EMvibe in different ways depending on the musical\ - \ context.},\n address = {Ann Arbor, Michigan},\n author = {N. Cameron Britt and\ - \ Jeff Snyder and Andrew McPherson},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178221},\n\ - \ issn = {2220-4806},\n keywords = {Vibraphone, augmented instrument, electromagnetic\ - \ actuation},\n publisher = {University of Michigan},\n title = {The EMvibe: An\ - \ Electromagnetically Actuated Vibraphone},\n url = {http://www.nime.org/proceedings/2012/nime2012_101.pdf},\n\ - \ year = {2012}\n}\n" + ID: Fyfe2010 + abstract: 'In this paper we discuss SurfaceMusic, a tabletop music system in which + touch gestures are mapped to physical modelsof instruments. With physical models, + parametric controlover the sound allows for a more natural interaction between + gesture and sound. We discuss the design and implementation of a simple gestural + interface for interactingwith virtual instruments and a messaging system that + conveys gesture data to the audio system.' + address: 'Sydney, Australia' + author: 'Fyfe, Lawrence and Lynch, Sean and Hull, Carmen and Carpendale, Sheelagh' + bibtex: "@inproceedings{Fyfe2010,\n abstract = {In this paper we discuss SurfaceMusic,\ + \ a tabletop music system in which touch gestures are mapped to physical modelsof\ + \ instruments. With physical models, parametric controlover the sound allows for\ + \ a more natural interaction between gesture and sound. We discuss the design\ + \ and implementation of a simple gestural interface for interactingwith virtual\ + \ instruments and a messaging system that conveys gesture data to the audio system.},\n\ + \ address = {Sydney, Australia},\n author = {Fyfe, Lawrence and Lynch, Sean and\ + \ Hull, Carmen and Carpendale, Sheelagh},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177777},\n\ + \ issn = {2220-4806},\n keywords = {Tabletop, multi-touch, gesture, physical model,\ + \ Open Sound Control.},\n pages = {360--363},\n title = {SurfaceMusic : Mapping\ + \ Virtual Touch-based Instruments to Physical Models},\n url = {http://www.nime.org/proceedings/2010/nime2010_360.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178221 + doi: 10.5281/zenodo.1177777 issn: 2220-4806 - keywords: 'Vibraphone, augmented instrument, electromagnetic actuation' - publisher: University of Michigan - title: 'The EMvibe: An Electromagnetically Actuated Vibraphone' - url: http://www.nime.org/proceedings/2012/nime2012_101.pdf - year: 2012 + keywords: 'Tabletop, multi-touch, gesture, physical model, Open Sound Control.' + pages: 360--363 + title: 'SurfaceMusic : Mapping Virtual Touch-based Instruments to Physical Models' + url: http://www.nime.org/proceedings/2010/nime2010_360.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: Brent2012 - abstract: 'This paper introduces the Gesturally Extended Piano---an augmented instrument - controller that relies on information drawn from performer motion tracking in - order to control real-time audiovisual processing and synthesis. Specifically, - the positions, heights, velocities, and relative distances and angles of points - on the hands and forearms are followed. Technical details and installation of - the tracking system are covered, as well as strategies for interpreting and mapping - the resulting data in relation to synthesis parameters. Design factors surrounding - mapping choices and the interrelation between mapped parameters are also considered.' - address: 'Ann Arbor, Michigan' - author: William Brent - bibtex: "@inproceedings{Brent2012,\n abstract = {This paper introduces the Gesturally\ - \ Extended Piano---an augmented instrument controller that relies on information\ - \ drawn from performer motion tracking in order to control real-time audiovisual\ - \ processing and synthesis. Specifically, the positions, heights, velocities,\ - \ and relative distances and angles of points on the hands and forearms are followed.\ - \ Technical details and installation of the tracking system are covered, as well\ - \ as strategies for interpreting and mapping the resulting data in relation to\ - \ synthesis parameters. Design factors surrounding mapping choices and the interrelation\ - \ between mapped parameters are also considered.},\n address = {Ann Arbor, Michigan},\n\ - \ author = {William Brent},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178219},\n\ - \ issn = {2220-4806},\n keywords = {Augmented instruments, controllers, motion\ - \ tracking, mapping},\n publisher = {University of Michigan},\n title = {The Gesturally\ - \ Extended Piano},\n url = {http://www.nime.org/proceedings/2012/nime2012_102.pdf},\n\ - \ year = {2012}\n}\n" + ID: Martin2010 + abstract: 'Many musical instruments have interfaces which emphasisethe pitch of + the sound produced over other perceptual characteristics, such as its timbre. + This is at odds with the musical developments of the last century. In this paper, + weintroduce a method for replacing the interface of musicalinstruments (both conventional + and unconventional) witha more flexible interface which can present the intrument''savailable + sounds according to variety of different perceptualcharacteristics, such as their + brightness or roughness. Weapply this method to an instrument of our own design + whichcomprises an electro-mechanically controlled electric guitarand amplifier + configured to produce feedback tones.' + address: 'Sydney, Australia' + author: 'Martin, Aengus and Ferguson, Sam and Beilharz, Kirsty' + bibtex: "@inproceedings{Martin2010,\n abstract = {Many musical instruments have\ + \ interfaces which emphasisethe pitch of the sound produced over other perceptual\ + \ characteristics, such as its timbre. This is at odds with the musical developments\ + \ of the last century. In this paper, weintroduce a method for replacing the interface\ + \ of musicalinstruments (both conventional and unconventional) witha more flexible\ + \ interface which can present the intrument'savailable sounds according to variety\ + \ of different perceptualcharacteristics, such as their brightness or roughness.\ + \ Weapply this method to an instrument of our own design whichcomprises an electro-mechanically\ + \ controlled electric guitarand amplifier configured to produce feedback tones.},\n\ + \ address = {Sydney, Australia},\n author = {Martin, Aengus and Ferguson, Sam\ + \ and Beilharz, Kirsty},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177841},\n\ + \ issn = {2220-4806},\n keywords = {Concatenative Synthesis, Feedback, Guitar},\n\ + \ pages = {364--367},\n title = {Mechanisms for Controlling Complex Sound Sources\ + \ : Applications to Guitar Feedback Control},\n url = {http://www.nime.org/proceedings/2010/nime2010_364.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178219 + doi: 10.5281/zenodo.1177841 issn: 2220-4806 - keywords: 'Augmented instruments, controllers, motion tracking, mapping' - publisher: University of Michigan - title: The Gesturally Extended Piano - url: http://www.nime.org/proceedings/2012/nime2012_102.pdf - year: 2012 + keywords: 'Concatenative Synthesis, Feedback, Guitar' + pages: 364--367 + title: 'Mechanisms for Controlling Complex Sound Sources : Applications to Guitar + Feedback Control' + url: http://www.nime.org/proceedings/2010/nime2010_364.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: Wyse2012 - abstract: 'The upper limit of frequency sensitivity for vibrotactile stimulation - of the fingers and hand is commonly accepted as 1 kHz. However, during the course - of our research to develop a full-hand vibrotactile musical communication device - for the hearing-impaired, we repeatedly found evidence suggesting sensitivity - to higher frequencies. Most of the studies on which vibrotactile sensitivity are - based have been conducted using sine tones delivered by point-contact actuators. - The current study was designed to investigate vibrotactile sensitivity using complex - signals and full, open-hand contact with a flat vibrating surface representing - more natural environmental conditions. Sensitivity to frequencies considerably - higher than previously reported was demonstrated for all the signal types tested. - Furthermore, complex signals seem to be more easily detected than sine tones, - especially at low frequencies. Our findings are applicable to a general understanding - of sensory physiology, and to the development of new vibrotactile display devices - for music and other applications.' - address: 'Ann Arbor, Michigan' - author: Lonce Wyse and Suranga Nanayakkara and Paul Seekings and Sim Heng Ong and - Elizabeth Taylor - bibtex: "@inproceedings{Wyse2012,\n abstract = {The upper limit of frequency sensitivity\ - \ for vibrotactile stimulation of the fingers and hand is commonly accepted as\ - \ 1 kHz. However, during the course of our research to develop a full-hand vibrotactile\ - \ musical communication device for the hearing-impaired, we repeatedly found evidence\ - \ suggesting sensitivity to higher frequencies. Most of the studies on which vibrotactile\ - \ sensitivity are based have been conducted using sine tones delivered by point-contact\ - \ actuators. The current study was designed to investigate vibrotactile sensitivity\ - \ using complex signals and full, open-hand contact with a flat vibrating surface\ - \ representing more natural environmental conditions. Sensitivity to frequencies\ - \ considerably higher than previously reported was demonstrated for all the signal\ - \ types tested. Furthermore, complex signals seem to be more easily detected than\ - \ sine tones, especially at low frequencies. Our findings are applicable to a\ - \ general understanding of sensory physiology, and to the development of new vibrotactile\ - \ display devices for music and other applications.},\n address = {Ann Arbor,\ - \ Michigan},\n author = {Lonce Wyse and Suranga Nanayakkara and Paul Seekings\ - \ and Sim Heng Ong and Elizabeth Taylor},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178453},\n\ - \ issn = {2220-4806},\n keywords = {Haptic Sensitivity, Hearing-impaired, Vibrotactile\ - \ Threshold},\n publisher = {University of Michigan},\n title = {Palm-area sensitivity\ - \ to vibrotactile stimuli above 1~{kHz}},\n url = {http://www.nime.org/proceedings/2012/nime2012_105.pdf},\n\ - \ year = {2012}\n}\n" + ID: Torresen2010 + abstract: 'This paper presents a comparison of different configurationsof a wireless + sensor system for capturing human motion.The systems consist of sensor elements + which wirelesslytransfers motion data to a receiver element. The sensorelements + consist of a microcontroller, accelerometer(s) anda radio transceiver. The receiver + element consists of a radioreceiver connected through a microcontroller to a computerfor + real time sound synthesis. The wireless transmission between the sensor elements + and the receiver element is basedon the low rate IEEE 802.15.4/ZigBee standard.A + configuration with several accelerometers connected bywire to a wireless sensor + element is compared to using multiple wireless sensor elements with only one accelerometer + ineach. The study shows that it would be feasable to connect5-6 accelerometers + in the given setups.Sensor data processing can be done in either the receiverelement + or in the sensor element. For various reasons it canbe reasonable to implement + some sensor data processing inthe sensor element. The paper also looks at how + much timethat typically would be needed for a simple pre-processingtask.' + address: 'Sydney, Australia' + author: 'Torresen, Jim and Renton, Eirik and Jensenius, Alexander R.' + bibtex: "@inproceedings{Torresen2010,\n abstract = {This paper presents a comparison\ + \ of different configurationsof a wireless sensor system for capturing human motion.The\ + \ systems consist of sensor elements which wirelesslytransfers motion data to\ + \ a receiver element. The sensorelements consist of a microcontroller, accelerometer(s)\ + \ anda radio transceiver. The receiver element consists of a radioreceiver connected\ + \ through a microcontroller to a computerfor real time sound synthesis. The wireless\ + \ transmission between the sensor elements and the receiver element is basedon\ + \ the low rate IEEE 802.15.4/ZigBee standard.A configuration with several accelerometers\ + \ connected bywire to a wireless sensor element is compared to using multiple\ + \ wireless sensor elements with only one accelerometer ineach. The study shows\ + \ that it would be feasable to connect5-6 accelerometers in the given setups.Sensor\ + \ data processing can be done in either the receiverelement or in the sensor element.\ + \ For various reasons it canbe reasonable to implement some sensor data processing\ + \ inthe sensor element. The paper also looks at how much timethat typically would\ + \ be needed for a simple pre-processingtask.},\n address = {Sydney, Australia},\n\ + \ author = {Torresen, Jim and Renton, Eirik and Jensenius, Alexander R.},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1177911},\n issn = {2220-4806},\n keywords\ + \ = {wireless communication, ZigBee, microcontroller},\n pages = {368--371},\n\ + \ title = {Wireless Sensor Data Collection based on {ZigBee} Communication},\n\ + \ url = {http://www.nime.org/proceedings/2010/nime2010_368.pdf},\n year = {2010}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178453 + doi: 10.5281/zenodo.1177911 issn: 2220-4806 - keywords: 'Haptic Sensitivity, Hearing-impaired, Vibrotactile Threshold' - publisher: University of Michigan - title: Palm-area sensitivity to vibrotactile stimuli above 1~kHz - url: http://www.nime.org/proceedings/2012/nime2012_105.pdf - year: 2012 + keywords: 'wireless communication, ZigBee, microcontroller' + pages: 368--371 + title: Wireless Sensor Data Collection based on ZigBee Communication + url: http://www.nime.org/proceedings/2010/nime2010_368.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: Pugliese2012 - abstract: 'In this paper strategies for augmenting the social dimension of collaborative - music making, in particular in the form of bodily and situated interaction are - presented. Mobile instruments are extended by means of relational descriptors - democratically controlled by the group and mapped to sound parameters. A qualitative - evaluation approach is described and a user test with participants playing in - groups of three conducted. The results of the analysis show core-categories such - as familiarity with instrument and situation, shift of focus in activity, family - of interactions and different categories of the experience emerging from the interviews. - Our evaluation shows the suitability of our approach but also the need for iterating - on our design on the basis of the perspectives brought forth by the users. This - latter observation confirms the importance of conducting a thorough interview - session followed by data analysis on the line of grounded theory.' - address: 'Ann Arbor, Michigan' - author: Roberto Pugliese and Koray Tahiroglu and Callum Goddard and James Nesfield - bibtex: "@inproceedings{Pugliese2012,\n abstract = {In this paper strategies for\ - \ augmenting the social dimension of collaborative music making, in particular\ - \ in the form of bodily and situated interaction are presented. Mobile instruments\ - \ are extended by means of relational descriptors democratically controlled by\ - \ the group and mapped to sound parameters. A qualitative evaluation approach\ - \ is described and a user test with participants playing in groups of three conducted.\ - \ The results of the analysis show core-categories such as familiarity with instrument\ - \ and situation, shift of focus in activity, family of interactions and different\ - \ categories of the experience emerging from the interviews. Our evaluation shows\ - \ the suitability of our approach but also the need for iterating on our design\ - \ on the basis of the perspectives brought forth by the users. This latter observation\ - \ confirms the importance of conducting a thorough interview session followed\ - \ by data analysis on the line of grounded theory.},\n address = {Ann Arbor, Michigan},\n\ - \ author = {Roberto Pugliese and Koray Tahiroglu and Callum Goddard and James\ - \ Nesfield},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1180573},\n issn\ - \ = {2220-4806},\n keywords = {Collaborative music making, evaluation methods,\ - \ mobile music, human-human interaction.},\n publisher = {University of Michigan},\n\ - \ title = {Augmenting human-human interaction in mobile group improvisation},\n\ - \ url = {http://www.nime.org/proceedings/2012/nime2012_108.pdf},\n year = {2012}\n\ - }\n" + ID: Jaimovich2010a + abstract: 'The past decade has seen an increase of low-cost technology for sensor + data acquisition, which has been utilized for the expanding field of research + in gesture measurement for music performance. Unfortunately, these devices are + still far from being compatible with the audiovisual recording platforms which + have been used to record synchronized streams of data. In this paper, we describe + a practical solution for simultaneous recording of heterogeneous multimodal signals. + The recording system presented uses MIDI Time Code to time-stamp sensor data and + to synchronize with standard video and audio recording systems. We also present + a set of tools for recording sensor data, as well as a set of analysis tools to + evaluate in realtime the sample rate of different signals, and the overall synchronization + status of the recording system. ' + address: 'Sydney, Australia' + author: 'Jaimovich, Javier and Knapp, Benjamin' + bibtex: "@inproceedings{Jaimovich2010a,\n abstract = {The past decade has seen an\ + \ increase of low-cost technology for sensor data acquisition, which has been\ + \ utilized for the expanding field of research in gesture measurement for music\ + \ performance. Unfortunately, these devices are still far from being compatible\ + \ with the audiovisual recording platforms which have been used to record synchronized\ + \ streams of data. In this paper, we describe a practical solution for simultaneous\ + \ recording of heterogeneous multimodal signals. The recording system presented\ + \ uses MIDI Time Code to time-stamp sensor data and to synchronize with standard\ + \ video and audio recording systems. We also present a set of tools for recording\ + \ sensor data, as well as a set of analysis tools to evaluate in realtime the\ + \ sample rate of different signals, and the overall synchronization status of\ + \ the recording system. },\n address = {Sydney, Australia},\n author = {Jaimovich,\ + \ Javier and Knapp, Benjamin},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177815},\n\ + \ issn = {2220-4806},\n keywords = {Synchronization, Multimodal Signals, Sensor\ + \ Data Acquisition, Signal Recording.},\n pages = {372--374},\n title = {Synchronization\ + \ of Multimodal Recordings for Musical Performance Research},\n url = {http://www.nime.org/proceedings/2010/nime2010_372.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1180573 + doi: 10.5281/zenodo.1177815 issn: 2220-4806 - keywords: 'Collaborative music making, evaluation methods, mobile music, human-human - interaction.' - publisher: University of Michigan - title: Augmenting human-human interaction in mobile group improvisation - url: http://www.nime.org/proceedings/2012/nime2012_108.pdf - year: 2012 + keywords: 'Synchronization, Multimodal Signals, Sensor Data Acquisition, Signal + Recording.' + pages: 372--374 + title: Synchronization of Multimodal Recordings for Musical Performance Research + url: http://www.nime.org/proceedings/2010/nime2010_372.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: Oliver2012 - abstract: 'There is some evidence that structured training can benefit cochlear - implant (CI) users'' appraisal of music as well as their music perception abilities. - There are currently very limited music training resources available for CI users - to explore. This demonstration will introduce delegates to the `Interactive Music - Awareness Program'' (IMAP) for cochlear implant users, which was developed in - response to the need for a client-centered, structured, interactive, creative, - open-ended, educational and challenging music (re)habilitation resource.' - address: 'Ann Arbor, Michigan' - author: Benjamin R. Oliver and Rachel M. van Besouw and David R. Nicholls - bibtex: "@inproceedings{Oliver2012,\n abstract = {There is some evidence that structured\ - \ training can benefit cochlear implant (CI) users' appraisal of music as well\ - \ as their music perception abilities. There are currently very limited music\ - \ training resources available for CI users to explore. This demonstration will\ - \ introduce delegates to the `Interactive Music Awareness Program' (IMAP) for\ - \ cochlear implant users, which was developed in response to the need for a client-centered,\ - \ structured, interactive, creative, open-ended, educational and challenging music\ - \ (re)habilitation resource.},\n address = {Ann Arbor, Michigan},\n author = {Benjamin\ - \ R. Oliver and Rachel M. van Besouw and David R. Nicholls},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1180557},\n issn = {2220-4806},\n keywords = {music, cochlear\ - \ implants, perception, rehabilitation, auditory training, interactive learning,\ - \ client-centred software},\n publisher = {University of Michigan},\n title =\ - \ {The `Interactive Music Awareness Program' (IMAP) for Cochlear Implant Users},\n\ - \ url = {http://www.nime.org/proceedings/2012/nime2012_109.pdf},\n year = {2012}\n\ - }\n" + ID: Torre2010 + abstract: 'This paper describes the development of an interactive 3Daudio/visual + and network installation entitled POLLEN.Specifically designed for large computer + Laboratories, theartwork explores the regeneration of those spaces throughthe + creation of a fully immersive multimedia art experience.The paper describes the + technical, aesthetic and educational development of the piece.' + address: 'Sydney, Australia' + author: 'Torre, Giuseppe and O''Leary, Mark and Tuohy, Brian' + bibtex: "@inproceedings{Torre2010,\n abstract = {This paper describes the development\ + \ of an interactive 3Daudio/visual and network installation entitled POLLEN.Specifically\ + \ designed for large computer Laboratories, theartwork explores the regeneration\ + \ of those spaces throughthe creation of a fully immersive multimedia art experience.The\ + \ paper describes the technical, aesthetic and educational development of the\ + \ piece.},\n address = {Sydney, Australia},\n author = {Torre, Giuseppe and O'Leary,\ + \ Mark and Tuohy, Brian},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177909},\n\ + \ issn = {2220-4806},\n keywords = {Interactive, Installation, Network, 3D Physics\ + \ Emulator, Educational Tools, Public Spaces, Computer Labs, Sound Design, Site-Specific\ + \ Art},\n pages = {375--376},\n title = {POLLEN A Multimedia Interactive Network\ + \ Installation},\n url = {http://www.nime.org/proceedings/2010/nime2010_375.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1180557 + doi: 10.5281/zenodo.1177909 issn: 2220-4806 - keywords: 'music, cochlear implants, perception, rehabilitation, auditory training, - interactive learning, client-centred software' - publisher: University of Michigan - title: The `Interactive Music Awareness Program' (IMAP) for Cochlear Implant Users - url: http://www.nime.org/proceedings/2012/nime2012_109.pdf - year: 2012 + keywords: 'Interactive, Installation, Network, 3D Physics Emulator, Educational + Tools, Public Spaces, Computer Labs, Sound Design, Site-Specific Art' + pages: 375--376 + title: POLLEN A Multimedia Interactive Network Installation + url: http://www.nime.org/proceedings/2010/nime2010_375.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: Pineyro2012 - abstract: The Electric Slide Organistrum (Figure 1) is an acoustic stringed instrument - played through a video capture system. The vibration of the instrument string - is generated electro-magnetically and the pitch variation is achieved by movements - carried out by the player in front of a video camera. This instrument results - from integrating an ancient technique for the production of sounds as it is the - vibration of a string on a soundbox and actual human-computer interaction technology - such as motion detection. - address: 'Ann Arbor, Michigan' - author: Martin Piñeyro - bibtex: "@inproceedings{Pineyro2012,\n abstract = {The Electric Slide Organistrum\ - \ (Figure 1) is an acoustic stringed instrument played through a video capture\ - \ system. The vibration of the instrument string is generated electro-magnetically\ - \ and the pitch variation is achieved by movements carried out by the player in\ - \ front of a video camera. This instrument results from integrating an ancient\ - \ technique for the production of sounds as it is the vibration of a string on\ - \ a soundbox and actual human-computer interaction technology such as motion detection.},\n\ - \ address = {Ann Arbor, Michigan},\n author = {Martin Pi{\\~n}eyro},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1180571},\n issn = {2220-4806},\n keywords\ - \ = {Gestural Interface, eBow, Pickup, Bowed string, Electromagnetic actuation},\n\ - \ publisher = {University of Michigan},\n title = {Electric Slide Organistrum},\n\ - \ url = {http://www.nime.org/proceedings/2012/nime2012_114.pdf},\n year = {2012}\n\ - }\n" + ID: Feng2010 + abstract: ' Irregular Incurve is a MIDI controllable robotic string instrument. + The twelve independent string-units compose the complete musical scale of 12 units. + Each string can be plucked by a motor control guitar pick. A MIDI keyboard is + attached to the instrument and serves as an interface for real-time interactions + between the instrument and the audience. Irregular Incurve can also play preprogrammed + music by itself. This paper presents the design concept and the technical solutions + to realizing the functionality of Irregular Incurve. The future features are also + discussed. ' + address: 'Sydney, Australia' + author: 'Feng, Xiaoyang' + bibtex: "@inproceedings{Feng2010,\n abstract = { Irregular Incurve is a MIDI controllable\ + \ robotic string instrument. The twelve independent string-units compose the complete\ + \ musical scale of 12 units. Each string can be plucked by a motor control guitar\ + \ pick. A MIDI keyboard is attached to the instrument and serves as an interface\ + \ for real-time interactions between the instrument and the audience. Irregular\ + \ Incurve can also play preprogrammed music by itself. This paper presents the\ + \ design concept and the technical solutions to realizing the functionality of\ + \ Irregular Incurve. The future features are also discussed. },\n address = {Sydney,\ + \ Australia},\n author = {Feng, Xiaoyang},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177765},\n\ + \ issn = {2220-4806},\n keywords = {NIME, Robotics, Acoustic, Interactive, MIDI,\ + \ Real time Performance, String Instrument, Arduino, Servo, Motor Control},\n\ + \ pages = {377--379},\n title = {Irregular Incurve},\n url = {http://www.nime.org/proceedings/2010/nime2010_377.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1180571 + doi: 10.5281/zenodo.1177765 issn: 2220-4806 - keywords: 'Gestural Interface, eBow, Pickup, Bowed string, Electromagnetic actuation' - publisher: University of Michigan - title: Electric Slide Organistrum - url: http://www.nime.org/proceedings/2012/nime2012_114.pdf - year: 2012 + keywords: 'NIME, Robotics, Acoustic, Interactive, MIDI, Real time Performance, String + Instrument, Arduino, Servo, Motor Control' + pages: 377--379 + title: Irregular Incurve + url: http://www.nime.org/proceedings/2010/nime2010_377.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: McPherson2012a - abstract: 'There is growing interest in the field of augmented musical instruments, - which extend traditional acoustic instruments using new sensors and actuators. - Several designs use electromagnetic actuation to induce vibrations in the acoustic - mechanism, manipulating the traditional sound of the in-strument without external - speakers. This paper presents techniques and guidelines for the use of electromagnetic - actuation in augmented instruments, including actuator design and selection, interfacing - with the instrument, and cir-cuits for driving the actuators. The material in - this pa-per forms the basis of the magnetic resonator piano, an electromagnetically-augmented - acoustic grand piano now in its second design iteration. In addition to discussing - applications to the piano, this paper aims to provide a toolbox to accelerate - the design of new hybrid acoustic-electronic instruments.' - address: 'Ann Arbor, Michigan' - author: Andrew McPherson - bibtex: "@inproceedings{McPherson2012a,\n abstract = {There is growing interest\ - \ in the field of augmented musical instruments, which extend traditional acoustic\ - \ instruments using new sensors and actuators. Several designs use electromagnetic\ - \ actuation to induce vibrations in the acoustic mechanism, manipulating the traditional\ - \ sound of the in-strument without external speakers. This paper presents techniques\ - \ and guidelines for the use of electromagnetic actuation in augmented instruments,\ - \ including actuator design and selection, interfacing with the instrument, and\ - \ cir-cuits for driving the actuators. The material in this pa-per forms the basis\ - \ of the magnetic resonator piano, an electromagnetically-augmented acoustic grand\ - \ piano now in its second design iteration. In addition to discussing applications\ - \ to the piano, this paper aims to provide a toolbox to accelerate the design\ - \ of new hybrid acoustic-electronic instruments.},\n address = {Ann Arbor, Michigan},\n\ - \ author = {Andrew McPherson},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1180533},\n\ - \ issn = {2220-4806},\n keywords = {augmented instruments, electromagnetic actuation,\ - \ circuit design, hardware},\n publisher = {University of Michigan},\n title =\ - \ {Techniques and Circuits for Electromagnetic Instrument Actuation},\n url =\ - \ {http://www.nime.org/proceedings/2012/nime2012_117.pdf},\n year = {2012}\n}\n" + ID: Miyama2010 + abstract: 'Peacock is a newly designed interface for improvisational performances. + The interface is equipped with thirty-five proximity sensors arranged in five + rows and seven columns. The sensors detect the movements of a performer''s hands + and arms in a three-dimensional space above them. The interface digitizes the + output of the sensors into sets of high precision digital packets, and sends them + to a patch running in Pdextended with a sufficiently high bandwidth for performances + with almost no computational resource consumption in Pd. The precision, speed, + and efficiency of the system enable the sonification of hand gestures in realtime + without the need to attach any physical devices to the performer''s body. This + paper traces the interface''s evolution, discussing relevant technologies, hardware + construction, system design, and input monitoring. ' + address: 'Sydney, Australia' + author: 'Miyama, Chikashi' + bibtex: "@inproceedings{Miyama2010,\n abstract = {Peacock is a newly designed interface\ + \ for improvisational performances. The interface is equipped with thirty-five\ + \ proximity sensors arranged in five rows and seven columns. The sensors detect\ + \ the movements of a performer's hands and arms in a three-dimensional space above\ + \ them. The interface digitizes the output of the sensors into sets of high precision\ + \ digital packets, and sends them to a patch running in Pdextended with a sufficiently\ + \ high bandwidth for performances with almost no computational resource consumption\ + \ in Pd. The precision, speed, and efficiency of the system enable the sonification\ + \ of hand gestures in realtime without the need to attach any physical devices\ + \ to the performer's body. This paper traces the interface's evolution, discussing\ + \ relevant technologies, hardware construction, system design, and input monitoring.\ + \ },\n address = {Sydney, Australia},\n author = {Miyama, Chikashi},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1177859},\n issn = {2220-4806},\n keywords\ + \ = {Musical interface, Sensor technologies, Computer music, Hardware and software\ + \ design},\n pages = {380--382},\n title = {Peacock : A Non-Haptic {3D} Performance\ + \ Interface},\n url = {http://www.nime.org/proceedings/2010/nime2010_380.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1180533 + doi: 10.5281/zenodo.1177859 issn: 2220-4806 - keywords: 'augmented instruments, electromagnetic actuation, circuit design, hardware' - publisher: University of Michigan - title: Techniques and Circuits for Electromagnetic Instrument Actuation - url: http://www.nime.org/proceedings/2012/nime2012_117.pdf - year: 2012 + keywords: 'Musical interface, Sensor technologies, Computer music, Hardware and + software design' + pages: 380--382 + title: 'Peacock : A Non-Haptic 3D Performance Interface' + url: http://www.nime.org/proceedings/2010/nime2010_380.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: Subramanian2012 - abstract: 'This paper describes a recent addition to LOLC, a text-based environment - for collaborative improvisation for laptop ensembles, incorporating a machine - musician that plays along with human performers. The machine musician LOLbot analyses - the patterns created by human performers and the composite music they create as - they are layered in performance. Based on user specified settings, LOLbot chooses - appropriate patterns to play with the ensemble, either to add contrast to the - existing performance or to be coherent with the rhythmic structure of the performance. - The paper describes the background and motivations of the project, outlines the - design of the original LOLC environment and describes the architecture and implementation - of LOLbot.' - address: 'Ann Arbor, Michigan' - author: Sidharth Subramanian and Jason Freeman and Scott McCoid - bibtex: "@inproceedings{Subramanian2012,\n abstract = {This paper describes a recent\ - \ addition to LOLC, a text-based environment for collaborative improvisation for\ - \ laptop ensembles, incorporating a machine musician that plays along with human\ - \ performers. The machine musician LOLbot analyses the patterns created by human\ - \ performers and the composite music they create as they are layered in performance.\ - \ Based on user specified settings, LOLbot chooses appropriate patterns to play\ - \ with the ensemble, either to add contrast to the existing performance or to\ - \ be coherent with the rhythmic structure of the performance. The paper describes\ - \ the background and motivations of the project, outlines the design of the original\ - \ LOLC environment and describes the architecture and implementation of LOLbot.},\n\ - \ address = {Ann Arbor, Michigan},\n author = {Sidharth Subramanian and Jason\ - \ Freeman and Scott McCoid},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178425},\n\ - \ issn = {2220-4806},\n keywords = {Machine Musicianship, Live Coding, Laptop\ - \ Orchestra},\n publisher = {University of Michigan},\n title = {LOLbot: Machine\ - \ Musicianship in Laptop Ensembles},\n url = {http://www.nime.org/proceedings/2012/nime2012_119.pdf},\n\ - \ year = {2012}\n}\n" + ID: Holm2010 + abstract: 'Music recommendation systems can observe user''s personal preferences + and suggest new tracks from a large online catalog. In the case of context-aware + recommenders, user''s current emotional state plays an important role. One simple + way to visualize emotions and moods is graphical emoticons. In this study, we + researched a high-level mapping between genres, as descriptions of music, and + emoticons, as descriptions of emotions and moods. An online questionnaire with + 87 participants was arranged. Based on the results, we present a list of genres + that could be used as a starting point for making recommendations fitting the + current mood of the user. ' + address: 'Sydney, Australia' + author: 'Holm, Jukka and Holm, Harri and Seppänen, Jarno' + bibtex: "@inproceedings{Holm2010,\n abstract = {Music recommendation systems can\ + \ observe user's personal preferences and suggest new tracks from a large online\ + \ catalog. In the case of context-aware recommenders, user's current emotional\ + \ state plays an important role. One simple way to visualize emotions and moods\ + \ is graphical emoticons. In this study, we researched a high-level mapping between\ + \ genres, as descriptions of music, and emoticons, as descriptions of emotions\ + \ and moods. An online questionnaire with 87 participants was arranged. Based\ + \ on the results, we present a list of genres that could be used as a starting\ + \ point for making recommendations fitting the current mood of the user. },\n\ + \ address = {Sydney, Australia},\n author = {Holm, Jukka and Holm, Harri and Sepp\\\ + ''{a}nen, Jarno},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177809},\n\ + \ issn = {2220-4806},\n keywords = {Music, music recommendation, context, facial\ + \ expression, mood, emotion, emoticon, and musical genre.},\n pages = {383--386},\n\ + \ title = {Associating Emoticons with Musical Genres},\n url = {http://www.nime.org/proceedings/2010/nime2010_383.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178425 + doi: 10.5281/zenodo.1177809 issn: 2220-4806 - keywords: 'Machine Musicianship, Live Coding, Laptop Orchestra' - publisher: University of Michigan - title: 'LOLbot: Machine Musicianship in Laptop Ensembles' - url: http://www.nime.org/proceedings/2012/nime2012_119.pdf - year: 2012 + keywords: 'Music, music recommendation, context, facial expression, mood, emotion, + emoticon, and musical genre.' + pages: 383--386 + title: Associating Emoticons with Musical Genres + url: http://www.nime.org/proceedings/2010/nime2010_383.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: Schwarz2012 - abstract: 'Corpus-based concatenative synthesis is a fairly recent sound synthesis - method, based on descriptor analysis of any number of existing or live-recorded - sounds, and synthesis by selection of sound segments from the database matching - given sound characteristics. It is well described in the literature, but has been - rarely examined for its capacity as a new interface for musical expression. The - interesting outcome of such an examination is that the actual instrument is the - space of sound characteristics, through which the performer navigates with gestures - captured by various input devices. We will take a look at different types of interaction - modes and controllers (positional, inertial, audio analysis) and the gestures - they afford, and provide a critical assessment of their musical and expressive - capabilities, based on several years of musical experience, performing with the - CataRT system for real-time CBCS.' - address: 'Ann Arbor, Michigan' - author: Diemo Schwarz - bibtex: "@inproceedings{Schwarz2012,\n abstract = {Corpus-based concatenative synthesis\ - \ is a fairly recent sound synthesis method, based on descriptor analysis of any\ - \ number of existing or live-recorded sounds, and synthesis by selection of sound\ - \ segments from the database matching given sound characteristics. It is well\ - \ described in the literature, but has been rarely examined for its capacity as\ - \ a new interface for musical expression. The interesting outcome of such an examination\ - \ is that the actual instrument is the space of sound characteristics, through\ - \ which the performer navigates with gestures captured by various input devices.\ - \ We will take a look at different types of interaction modes and controllers\ - \ (positional, inertial, audio analysis) and the gestures they afford, and provide\ - \ a critical assessment of their musical and expressive capabilities, based on\ - \ several years of musical experience, performing with the CataRT system for real-time\ - \ CBCS.},\n address = {Ann Arbor, Michigan},\n author = {Diemo Schwarz},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1180593},\n issn = {2220-4806},\n keywords\ - \ = {CataRT, corpus-based concatenative synthesis, gesture},\n publisher = {University\ - \ of Michigan},\n title = {The Sound Space as Musical Instrument: Playing Corpus-Based\ - \ Concatenative Synthesis},\n url = {http://www.nime.org/proceedings/2012/nime2012_120.pdf},\n\ - \ year = {2012}\n}\n" + ID: Nagashima2010 + abstract: This paper is a report on the development of a new musical instrument + in which the main concept is "Untouchable". The key concept of this instrument + is "sound generation by body gesture (both hands)" and "sound generation by kneading + with hands". The new composition project had completed as the premiere of a new + work "controllable untouchableness" with this new instrument in December 2009. + address: 'Sydney, Australia' + author: 'Nagashima, Yoichi' + bibtex: "@inproceedings{Nagashima2010,\n abstract = {This paper is a report on the\ + \ development of a new musical instrument in which the main concept is \"Untouchable\"\ + . The key concept of this instrument is \"sound generation by body gesture (both\ + \ hands)\" and \"sound generation by kneading with hands\". The new composition\ + \ project had completed as the premiere of a new work \"controllable untouchableness\"\ + \ with this new instrument in December 2009.},\n address = {Sydney, Australia},\n\ + \ author = {Nagashima, Yoichi},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177865},\n\ + \ issn = {2220-4806},\n keywords = {Theremin, untouchable, distance sensor, Propeller\ + \ processor},\n pages = {387--390},\n title = {Untouchable Instrument \"Peller-Min\"\ + },\n url = {http://www.nime.org/proceedings/2010/nime2010_387.pdf},\n year = {2010}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1180593 + doi: 10.5281/zenodo.1177865 issn: 2220-4806 - keywords: 'CataRT, corpus-based concatenative synthesis, gesture' - publisher: University of Michigan - title: 'The Sound Space as Musical Instrument: Playing Corpus-Based Concatenative - Synthesis' - url: http://www.nime.org/proceedings/2012/nime2012_120.pdf - year: 2012 + keywords: 'Theremin, untouchable, distance sensor, Propeller processor' + pages: 387--390 + title: Untouchable Instrument "Peller-Min" + url: http://www.nime.org/proceedings/2010/nime2010_387.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: Hansen2012 - abstract: 'This paper presents the results of user interaction with two explorative - music environments (sound system A and B) that were inspired from the Banda Linda - music tradition in two different ways. The sound systems adapted to how a team - of two players improvised and made a melody together in an interleaved fashion: - Systems A and B used a fuzzy logic algorithm and pattern recognition to respond - with modifications of a background rhythms. In an experiment with a pen tablet - interface as the music instrument, users aged 10-13 were to tap tones and continue - each other''s melody. The sound systems rewarded users sonically, if they managed - to add tones to their mutual melody in a rapid turn taking manner with rhythmical - patterns. Videos of experiment sessions show that user teams contributed to a - melody in ways that resemble conversation. Interaction data show that each sound - system made player teams play in different ways, but players in general had a - hard time adjusting to a non-Western music tradition. The paper concludes with - a comparison and evaluation of the two sound systems. Finally it proposes a new - approach to the design of collaborative and shared music environments that is - based on ''''listening applications''''.' - address: 'Ann Arbor, Michigan' - author: Anne-Marie Skriver Hansen and Hans Jørgen Andersen and Pirkko Raudaskoski - bibtex: "@inproceedings{Hansen2012,\n abstract = {This paper presents the results\ - \ of user interaction with two explorative music environments (sound system A\ - \ and B) that were inspired from the Banda Linda music tradition in two different\ - \ ways. The sound systems adapted to how a team of two players improvised and\ - \ made a melody together in an interleaved fashion: Systems A and B used a fuzzy\ - \ logic algorithm and pattern recognition to respond with modifications of a background\ - \ rhythms. In an experiment with a pen tablet interface as the music instrument,\ - \ users aged 10-13 were to tap tones and continue each other's melody. The sound\ - \ systems rewarded users sonically, if they managed to add tones to their mutual\ - \ melody in a rapid turn taking manner with rhythmical patterns. Videos of experiment\ - \ sessions show that user teams contributed to a melody in ways that resemble\ - \ conversation. Interaction data show that each sound system made player teams\ - \ play in different ways, but players in general had a hard time adjusting to\ - \ a non-Western music tradition. The paper concludes with a comparison and evaluation\ - \ of the two sound systems. Finally it proposes a new approach to the design of\ - \ collaborative and shared music environments that is based on ''listening applications''.},\n\ - \ address = {Ann Arbor, Michigan},\n author = {Anne-Marie Skriver Hansen and Hans\ - \ J{\\o}rgen Andersen and Pirkko Raudaskoski},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1178275},\n issn = {2220-4806},\n keywords = {Music improvisation,\ - \ novices, social learning, interaction studies, interaction design.},\n publisher\ - \ = {University of Michigan},\n title = {Two Shared Rapid Turn Taking Sound Interfaces\ - \ for Novices},\n url = {http://www.nime.org/proceedings/2012/nime2012_123.pdf},\n\ - \ year = {2012}\n}\n" + ID: Jaimovich2010 + abstract: 'This paper describes the design, implementation and outcome of Ground + Me!, an interactive sound installation set up in the Sonic Lab of the Sonic Arts + Research Centre. The site-specific interactive installation consists of multiple + copper poles hanging from the Sonic Lab''s ceiling panels, which trigger samples + of electricity sounds when grounded through the visitor''s'' body to the space''s + metallic floor. ' + address: 'Sydney, Australia' + author: 'Jaimovich, Javier' + bibtex: "@inproceedings{Jaimovich2010,\n abstract = {This paper describes the design,\ + \ implementation and outcome of Ground Me!, an interactive sound installation\ + \ set up in the Sonic Lab of the Sonic Arts Research Centre. The site-specific\ + \ interactive installation consists of multiple copper poles hanging from the\ + \ Sonic Lab's ceiling panels, which trigger samples of electricity sounds when\ + \ grounded through the visitor's' body to the space's metallic floor. },\n address\ + \ = {Sydney, Australia},\n author = {Jaimovich, Javier},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177813},\n issn = {2220-4806},\n keywords = {Interactive\ + \ sound installation, body impedance, skin conductivity, site-specific sound installation,\ + \ human network, Sonic Lab, Arduino.},\n pages = {391--394},\n title = {Ground\ + \ Me ! An Interactive Sound Art Installation},\n url = {http://www.nime.org/proceedings/2010/nime2010_391.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178275 + doi: 10.5281/zenodo.1177813 issn: 2220-4806 - keywords: 'Music improvisation, novices, social learning, interaction studies, interaction - design.' - publisher: University of Michigan - title: Two Shared Rapid Turn Taking Sound Interfaces for Novices - url: http://www.nime.org/proceedings/2012/nime2012_123.pdf - year: 2012 + keywords: 'Interactive sound installation, body impedance, skin conductivity, site-specific + sound installation, human network, Sonic Lab, Arduino.' + pages: 391--394 + title: Ground Me ! An Interactive Sound Art Installation + url: http://www.nime.org/proceedings/2010/nime2010_391.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: Shahar2012 - abstract: "SoundStrand is a tangible music composition tool. It demonstrates a paradigm\ - \ developed to enable music composition through the use of tangible interfaces.\ - \ This paradigm attempts to overcome the contrast between the relatively small\ - \ of amount degrees of freedom usually demonstrated by tangible interfaces and\ - \ the vast number of possibilities that musical composition presents.\nSoundStrand\ - \ is comprised of a set of physical objects called cells, each representing a\ - \ musical phrase. Cells can be sequentially connected to each other to create\ - \ a musical theme. Cells can also be physically manipulated to access a wide range\ - \ of melodic, rhythmic and harmonic variations. The SoundStrand software assures\ - \ that as the cells are manipulated, the melodic flow, harmonic transitions and\ - \ rhythmic patterns of the theme remain musically plausible while preserving the\ - \ user's intentions." - address: 'Ann Arbor, Michigan' - author: Eyal Shahar - bibtex: "@inproceedings{Shahar2012,\n abstract = {SoundStrand is a tangible music\ - \ composition tool. It demonstrates a paradigm developed to enable music composition\ - \ through the use of tangible interfaces. This paradigm attempts to overcome the\ - \ contrast between the relatively small of amount degrees of freedom usually demonstrated\ - \ by tangible interfaces and the vast number of possibilities that musical composition\ - \ presents.\nSoundStrand is comprised of a set of physical objects called cells,\ - \ each representing a musical phrase. Cells can be sequentially connected to each\ - \ other to create a musical theme. Cells can also be physically manipulated to\ - \ access a wide range of melodic, rhythmic and harmonic variations. The SoundStrand\ - \ software assures that as the cells are manipulated, the melodic flow, harmonic\ - \ transitions and rhythmic patterns of the theme remain musically plausible while\ - \ preserving the user's intentions.},\n address = {Ann Arbor, Michigan},\n author\ - \ = {Eyal Shahar},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1180595},\n\ - \ issn = {2220-4806},\n keywords = {Tangible, algorithmic, composition, computer\ - \ assisted},\n publisher = {University of Michigan},\n title = {SoundStrand: a\ - \ Tangible Interface for Composing Music with Limited Degrees of Freedom},\n url\ - \ = {http://www.nime.org/proceedings/2012/nime2012_125.pdf},\n year = {2012}\n\ - }\n" + ID: Savage2010 + abstract: 'This paper presents Mmmmm; a Multimodal Mobile Music Mixer that provides + DJs a new interface for mixing musicon the Nokia N900 phones. Mmmmm presents a + novel way for DJ to become more interactive with their audience andvise versa. + The software developed for the N900 mobilephone utilizes the phones built-in accelerometer + sensor andBluetooth audio streaming capabilities to mix and apply effects to music + using hand gestures and have the mixed audiostream to Bluetooth speakers, which + allows the DJ to moveabout the environment and get familiarized with their audience, + turning the experience of DJing into an interactiveand audience engaging process. + Mmmmm is designed so that the DJ can utilize handgestures and haptic feedback + to help them perform the various tasks involved in DJing (mixing, applying effects, + andetc). This allows the DJ to focus on the crowd, thus providing the DJ a better + intuition of what kind of music ormusical mixing style the audience is more likely + to enjoyand engage with. Additionally, Mmmmm has an Ambient Tempo Detection mode + in which the phones camera is utilized to detect the amount of movement in the + environment and suggest to the DJ the tempo of music that should be played. This + mode utilizes frame differencing and pixelchange overtime to get a sense of how + fast the environmentis changing, loosely correlating to how fast the audience + isdancing or the lights are flashing in the scene. By determining the ambient + tempo of the environment the DJ canget a better sense for the type of music that + would fit bestfor their venue.Mmmmm helps novice DJs achieve a better music repertoire + by allowing them to interact with their audience andreceive direct feedback on + their performance. The DJ canchoose to utilize these modes of interaction and + performance or utilize traditional DJ controls using MmmmmsN900 touch screen based + graphics user interface.' + address: 'Sydney, Australia' + author: 'Savage, Norma S. and Ali, Syed R. and Chavez, Norma E.' + bibtex: "@inproceedings{Savage2010,\n abstract = {This paper presents Mmmmm; a Multimodal\ + \ Mobile Music Mixer that provides DJs a new interface for mixing musicon the\ + \ Nokia N900 phones. Mmmmm presents a novel way for DJ to become more interactive\ + \ with their audience andvise versa. The software developed for the N900 mobilephone\ + \ utilizes the phones built-in accelerometer sensor andBluetooth audio streaming\ + \ capabilities to mix and apply effects to music using hand gestures and have\ + \ the mixed audiostream to Bluetooth speakers, which allows the DJ to moveabout\ + \ the environment and get familiarized with their audience, turning the experience\ + \ of DJing into an interactiveand audience engaging process. Mmmmm is designed\ + \ so that the DJ can utilize handgestures and haptic feedback to help them perform\ + \ the various tasks involved in DJing (mixing, applying effects, andetc). This\ + \ allows the DJ to focus on the crowd, thus providing the DJ a better intuition\ + \ of what kind of music ormusical mixing style the audience is more likely to\ + \ enjoyand engage with. Additionally, Mmmmm has an Ambient Tempo Detection mode\ + \ in which the phones camera is utilized to detect the amount of movement in the\ + \ environment and suggest to the DJ the tempo of music that should be played.\ + \ This mode utilizes frame differencing and pixelchange overtime to get a sense\ + \ of how fast the environmentis changing, loosely correlating to how fast the\ + \ audience isdancing or the lights are flashing in the scene. By determining the\ + \ ambient tempo of the environment the DJ canget a better sense for the type of\ + \ music that would fit bestfor their venue.Mmmmm helps novice DJs achieve a better\ + \ music repertoire by allowing them to interact with their audience andreceive\ + \ direct feedback on their performance. The DJ canchoose to utilize these modes\ + \ of interaction and performance or utilize traditional DJ controls using MmmmmsN900\ + \ touch screen based graphics user interface.},\n address = {Sydney, Australia},\n\ + \ author = {Savage, Norma S. and Ali, Syed R. and Chavez, Norma E.},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1177887},\n issn = {2220-4806},\n keywords\ + \ = {Multi-modal, interaction, music, mixer, mobile, interactive, DJ, smart phones,\ + \ Nokia, n900, touch screen, accelerometer, phone, audience},\n pages = {395--398},\n\ + \ title = {Mmmmm: A Multi-modal Mobile Music Mixer},\n url = {http://www.nime.org/proceedings/2010/nime2010_395.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1180595 + doi: 10.5281/zenodo.1177887 issn: 2220-4806 - keywords: 'Tangible, algorithmic, composition, computer assisted' - publisher: University of Michigan - title: 'SoundStrand: a Tangible Interface for Composing Music with Limited Degrees - of Freedom' - url: http://www.nime.org/proceedings/2012/nime2012_125.pdf - year: 2012 + keywords: 'Multi-modal, interaction, music, mixer, mobile, interactive, DJ, smart + phones, Nokia, n900, touch screen, accelerometer, phone, audience' + pages: 395--398 + title: 'Mmmmm: A Multi-modal Mobile Music Mixer' + url: http://www.nime.org/proceedings/2010/nime2010_395.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: Weitzner2012 - abstract: 'massMobile is a client-server system for mass audience participation - in live performances using smartphones. It was designed to flexibly adapt to a - variety of participatory performance needs and to a variety of performance venues. - It allows for real time bi-directional communication between performers and audiences - utilizing existing wireless 3G, 4G, or WiFi networks. In this paper, we discuss - the goals, design, and implementation of the framework, and we describe several - projects realized with massMobile.' - address: 'Ann Arbor, Michigan' - author: Nathan Weitzner and Jason Freeman and Stephen Garrett and Yan-Ling Chen - bibtex: "@inproceedings{Weitzner2012,\n abstract = {massMobile is a client-server\ - \ system for mass audience participation in live performances using smartphones.\ - \ It was designed to flexibly adapt to a variety of participatory performance\ - \ needs and to a variety of performance venues. It allows for real time bi-directional\ - \ communication between performers and audiences utilizing existing wireless 3G,\ - \ 4G, or WiFi networks. In this paper, we discuss the goals, design, and implementation\ - \ of the framework, and we describe several projects realized with massMobile.},\n\ - \ address = {Ann Arbor, Michigan},\n author = {Nathan Weitzner and Jason Freeman\ - \ and Stephen Garrett and Yan-Ling Chen},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178449},\n\ - \ issn = {2220-4806},\n keywords = {audience participation, network music, smartphone,\ - \ performance, mobile},\n publisher = {University of Michigan},\n title = {massMobile\ - \ -an Audience Participation Framework},\n url = {http://www.nime.org/proceedings/2012/nime2012_128.pdf},\n\ - \ year = {2012}\n}\n" + ID: Tsai2010 + abstract: 'With the decreasing audience of classical music performance, this research + aims to develop a performance-enhancement system, called AIDA, to help classical + performers better communicating with their audiences. With three procedures Input-Processing-Output, + AIDA system can sense and analyze the body information of performers and further + reflect it onto the responsive skin. Thus abstract and intangible emotional expressions + of performers are transformed into tangible and concrete visual elements, which + clearly facilitating the audiences'' threshold for music appreciation. ' + address: 'Sydney, Australia' + author: 'Tsai, Chih-Chieh and Liu, Cha-Lin and Chang, Teng-Wen' + bibtex: "@inproceedings{Tsai2010,\n abstract = {With the decreasing audience of\ + \ classical music performance, this research aims to develop a performance-enhancement\ + \ system, called AIDA, to help classical performers better communicating with\ + \ their audiences. With three procedures Input-Processing-Output, AIDA system\ + \ can sense and analyze the body information of performers and further reflect\ + \ it onto the responsive skin. Thus abstract and intangible emotional expressions\ + \ of performers are transformed into tangible and concrete visual elements, which\ + \ clearly facilitating the audiences' threshold for music appreciation. },\n address\ + \ = {Sydney, Australia},\n author = {Tsai, Chih-Chieh and Liu, Cha-Lin and Chang,\ + \ Teng-Wen},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177915},\n issn\ + \ = {2220-4806},\n keywords = {Interactive Performance, Ambient Environment, Responsive\ + \ Skin, Music performance.},\n pages = {399--402},\n title = {An Interactive Responsive\ + \ Skin for Music},\n url = {http://www.nime.org/proceedings/2010/nime2010_399.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178449 + doi: 10.5281/zenodo.1177915 issn: 2220-4806 - keywords: 'audience participation, network music, smartphone, performance, mobile' - publisher: University of Michigan - title: massMobile -an Audience Participation Framework - url: http://www.nime.org/proceedings/2012/nime2012_128.pdf - year: 2012 + keywords: 'Interactive Performance, Ambient Environment, Responsive Skin, Music + performance.' + pages: 399--402 + title: An Interactive Responsive Skin for Music + url: http://www.nime.org/proceedings/2010/nime2010_399.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: Henson2012 - abstract: 'This paper introduces the concept of Kugelschwung, a digital musical - instrument centrally based around the use of pendulums and lasers to create unique - and highly interactive electronic ambient soundscapes. Here, we explore the underlying - design and physical construction of the instrument, as well as its implementation - and feasibility as an instrument in the real world. To conclude, we outline potential - expansions to the instrument, describing how its range of applications can be - extended to accommodate a variety of musical styles.' - address: 'Ann Arbor, Michigan' - author: Jamie Henson and Benjamin Collins and Alexander Giles and Kathryn Webb and - Matthew Livingston and Thomas Mortensson - bibtex: "@inproceedings{Henson2012,\n abstract = {This paper introduces the concept\ - \ of Kugelschwung, a digital musical instrument centrally based around the use\ - \ of pendulums and lasers to create unique and highly interactive electronic ambient\ - \ soundscapes. Here, we explore the underlying design and physical construction\ - \ of the instrument, as well as its implementation and feasibility as an instrument\ - \ in the real world. To conclude, we outline potential expansions to the instrument,\ - \ describing how its range of applications can be extended to accommodate a variety\ - \ of musical styles.},\n address = {Ann Arbor, Michigan},\n author = {Jamie Henson\ - \ and Benjamin Collins and Alexander Giles and Kathryn Webb and Matthew Livingston\ - \ and Thomas Mortensson},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178285},\n\ - \ issn = {2220-4806},\n keywords = {laser, pendulums, instrument design, electronic,\ - \ sampler, soundscape, expressive performance},\n publisher = {University of Michigan},\n\ - \ title = {Kugelschwung -a Pendulum-based Musical Instrument},\n url = {http://www.nime.org/proceedings/2012/nime2012_131.pdf},\n\ - \ year = {2012}\n}\n" + ID: BryanKinns2010 + abstract: 'In this paper we outline the emerging field of Interactional Sound and + Music which concerns itself with multi-person technologically mediated interactions + primarily using audio. We present several examples of interactive systems in our + group, and reflect on how they were designed and evaluated. Evaluation techniques + for collective, performative, and task oriented activities are outlined and compared. + We emphasise the importance of designing for awareness in these systems, and provide + examples of different awareness mechanisms. ' + address: 'Sydney, Australia' + author: 'Bryan-Kinns, Nick and Fencott, Robin and Metatla, Oussama and Nabavian, + Shahin and Sheridan, Jennifer G.' + bibtex: "@inproceedings{BryanKinns2010,\n abstract = {In this paper we outline the\ + \ emerging field of Interactional Sound and Music which concerns itself with multi-person\ + \ technologically mediated interactions primarily using audio. We present several\ + \ examples of interactive systems in our group, and reflect on how they were designed\ + \ and evaluated. Evaluation techniques for collective, performative, and task\ + \ oriented activities are outlined and compared. We emphasise the importance of\ + \ designing for awareness in these systems, and provide examples of different\ + \ awareness mechanisms. },\n address = {Sydney, Australia},\n author = {Bryan-Kinns,\ + \ Nick and Fencott, Robin and Metatla, Oussama and Nabavian, Shahin and Sheridan,\ + \ Jennifer G.},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177727},\n\ + \ issn = {2220-4806},\n keywords = {Interactional, sound, music, mutual engagement,\ + \ improvisation, composition, collaboration, awareness.},\n pages = {403--406},\n\ + \ title = {Interactional Sound and Music : Listening to CSCW, Sonification, and\ + \ Sound Art},\n url = {http://www.nime.org/proceedings/2010/nime2010_403.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178285 + doi: 10.5281/zenodo.1177727 issn: 2220-4806 - keywords: 'laser, pendulums, instrument design, electronic, sampler, soundscape, - expressive performance' - publisher: University of Michigan - title: Kugelschwung -a Pendulum-based Musical Instrument - url: http://www.nime.org/proceedings/2012/nime2012_131.pdf - year: 2012 + keywords: 'Interactional, sound, music, mutual engagement, improvisation, composition, + collaboration, awareness.' + pages: 403--406 + title: 'Interactional Sound and Music : Listening to CSCW, Sonification, and Sound + Art' + url: http://www.nime.org/proceedings/2010/nime2010_403.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: McGlynn2012 - abstract: 'This paper contends that the development of expressive performance interfaces - using multi-touch technology has been hindered by an over-reliance upon GUI paradigms. - Despite offering rich and robust data output and multiple ways to interpret it, - approaches towards using multi-touch technology in digit al musical inst rument - design have been markedly conservative, showing a strong tendency towards modeling - existing hardware. This not only negates many of the benefits of multi-touch technology - but also creates specific difficulties in the context of live music performance. - A case study of two other interface types that have seen considerable musical - use --the XY pad and button grid --illustrates the manner in which the implicit - characteristics of a device determine the conditions under which it will favorably - perform. Accordingly, this paper proposes an alternative approach to multi-touch - which emp hasizes the imp licit strengths of the technology and establishes a - philosophy of design around them. Finally, we introduce two toolkits currently - being used to assess the validity of this approach.' - address: 'Ann Arbor, Michigan' - author: Patrick McGlynn and Victor Lazzarini and Gordon Delap and Xiaoyu Chen - bibtex: "@inproceedings{McGlynn2012,\n abstract = {This paper contends that the\ - \ development of expressive performance interfaces using multi-touch technology\ - \ has been hindered by an over-reliance upon GUI paradigms. Despite offering rich\ - \ and robust data output and multiple ways to interpret it, approaches towards\ - \ using multi-touch technology in digit al musical inst rument design have been\ - \ markedly conservative, showing a strong tendency towards modeling existing hardware.\ - \ This not only negates many of the benefits of multi-touch technology but also\ - \ creates specific difficulties in the context of live music performance. A case\ - \ study of two other interface types that have seen considerable musical use --the\ - \ XY pad and button grid --illustrates the manner in which the implicit characteristics\ - \ of a device determine the conditions under which it will favorably perform.\ - \ Accordingly, this paper proposes an alternative approach to multi-touch which\ - \ emp hasizes the imp licit strengths of the technology and establishes a philosophy\ - \ of design around them. Finally, we introduce two toolkits currently being used\ - \ to assess the validity of this approach.},\n address = {Ann Arbor, Michigan},\n\ - \ author = {Patrick McGlynn and Victor Lazzarini and Gordon Delap and Xiaoyu Chen},\n\ + ID: Skogstad2010 + abstract: 'The paper presents a conceptual overview of how optical infrared marker + based motion capture systems (IrMoCap) can be used in musical interaction. First + we present a review of related work of using IrMoCap for musical control. This + is followed by a discussion of possible features which can be exploited. Finally, + the question of mapping movement features to sound features is presented and discussed.' + address: 'Sydney, Australia' + author: 'Skogstad, Ståle A. and Jensenius, Alexander Refsum and Nymoen, Kristian' + bibtex: "@inproceedings{Skogstad2010,\n abstract = {The paper presents a conceptual\ + \ overview of how optical infrared marker based motion capture systems (IrMoCap)\ + \ can be used in musical interaction. First we present a review of related work\ + \ of using IrMoCap for musical control. This is followed by a discussion of possible\ + \ features which can be exploited. Finally, the question of mapping movement features\ + \ to sound features is presented and discussed.},\n address = {Sydney, Australia},\n\ + \ author = {Skogstad, Ståle A. and Jensenius, Alexander Refsum and Nymoen, Kristian},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178349},\n issn = {2220-4806},\n\ - \ keywords = {Multi-touch, controllers, mapping, gesture, GUIs, physical interfaces,\ - \ perceptual & cognitive issues},\n publisher = {University of Michigan},\n title\ - \ = {Recontextualizing the Multi-touch Surface},\n url = {http://www.nime.org/proceedings/2012/nime2012_132.pdf},\n\ - \ year = {2012}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1178349 - issn: 2220-4806 - keywords: 'Multi-touch, controllers, mapping, gesture, GUIs, physical interfaces, - perceptual & cognitive issues' - publisher: University of Michigan - title: Recontextualizing the Multi-touch Surface - url: http://www.nime.org/proceedings/2012/nime2012_132.pdf - year: 2012 - - -- ENTRYTYPE: inproceedings - ID: Donnarumma2012 - abstract: "Performing music with a computer and loudspeakers represents always a\ - \ challenge. The lack of a traditional instrument requires the performer to study\ - \ idiomatic strategies by which musicianship becomes apparent. On the other hand,\ - \ the audience needs to decode those strategies, so to achieve an understanding\ - \ and appreciation of the music being played. The issue is particularly relevant\ - \ to the performance of music that results from the mediation between biological\ - \ signals of the human body and physical performance.\nThe present article tackles\ - \ this concern by demonstrating a new model of musical performance; what I define\ - \ biophysical music. This is music generated and played in real time by amplifying\ - \ and processing the acoustic sound of a performer's muscle contractions. The\ - \ model relies on an original and open source technology made of custom biosensors\ - \ and a related software framework. The succesfull application of these tools\ - \ is discussed in the practical context of a solo piece for sensors, laptop and\ - \ loudspeakers. Eventually, the compositional strategies that characterize the\ - \ piece are discussed along with a systematic description of the relevant mapping\ - \ techniques and their sonic outcome." - address: 'Ann Arbor, Michigan' - author: Marco Donnarumma - bibtex: "@inproceedings{Donnarumma2012,\n abstract = {Performing music with a computer\ - \ and loudspeakers represents always a challenge. The lack of a traditional instrument\ - \ requires the performer to study idiomatic strategies by which musicianship becomes\ - \ apparent. On the other hand, the audience needs to decode those strategies,\ - \ so to achieve an understanding and appreciation of the music being played. The\ - \ issue is particularly relevant to the performance of music that results from\ - \ the mediation between biological signals of the human body and physical performance.\n\ - The present article tackles this concern by demonstrating a new model of musical\ - \ performance; what I define biophysical music. This is music generated and played\ - \ in real time by amplifying and processing the acoustic sound of a performer's\ - \ muscle contractions. The model relies on an original and open source technology\ - \ made of custom biosensors and a related software framework. The succesfull application\ - \ of these tools is discussed in the practical context of a solo piece for sensors,\ - \ laptop and loudspeakers. Eventually, the compositional strategies that characterize\ - \ the piece are discussed along with a systematic description of the relevant\ - \ mapping techniques and their sonic outcome.},\n address = {Ann Arbor, Michigan},\n\ - \ author = {Marco Donnarumma},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178245},\n\ - \ issn = {2220-4806},\n keywords = {Muscle sounds, biophysical music, augmented\ - \ body, realtime performance, human-computer interaction, embodiment.},\n publisher\ - \ = {University of Michigan},\n title = {Music for Flesh II: informing interactive\ - \ music performance with the viscerality of the body system},\n url = {http://www.nime.org/proceedings/2012/nime2012_133.pdf},\n\ - \ year = {2012}\n}\n" + \ Musical Expression},\n doi = {10.5281/zenodo.1177895},\n issn = {2220-4806},\n\ + \ keywords = {nime10},\n pages = {407--410},\n title = {Using {IR} Optical Marker\ + \ Based Motion Capture for Exploring Musical Interaction},\n url = {http://www.nime.org/proceedings/2010/nime2010_407.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178245 + doi: 10.5281/zenodo.1177895 issn: 2220-4806 - keywords: 'Muscle sounds, biophysical music, augmented body, realtime performance, - human-computer interaction, embodiment.' - publisher: University of Michigan - title: 'Music for Flesh II: informing interactive music performance with the viscerality - of the body system' - url: http://www.nime.org/proceedings/2012/nime2012_133.pdf - year: 2012 + keywords: nime10 + pages: 407--410 + title: Using IR Optical Marker Based Motion Capture for Exploring Musical Interaction + url: http://www.nime.org/proceedings/2010/nime2010_407.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: Booth2012 - abstract: 'In this paper, we argue that the design of New Interfaces for Musical - Expression has much to gain from the study of interaction in ensemble laptop performance - contexts using ethnographic techniques. Inspired by recent third-stream research - in the field of human computer interaction, we describe a recent ethnomethodologically-informed - study of the Birmingham Laptop Ensemble (BiLE), and detail our approach to thick - description of the group''s working practices. Initial formal analysis of this - material sheds light on the fluidity of composer, performer and designer roles - within the ensemble and shows how confluences of these roles constitute member''s - differing viewpoints. We go on to draw out a number of strands of interaction - that highlight the essentially complex, socially constructed and value driven - nature of the group''s practice and conclude by reviewing the implications of - these factors on the design of software tools for laptop ensembles.' - address: 'Ann Arbor, Michigan' - author: Graham Booth and Michael Gurevich - bibtex: "@inproceedings{Booth2012,\n abstract = {In this paper, we argue that the\ - \ design of New Interfaces for Musical Expression has much to gain from the study\ - \ of interaction in ensemble laptop performance contexts using ethnographic techniques.\ - \ Inspired by recent third-stream research in the field of human computer interaction,\ - \ we describe a recent ethnomethodologically-informed study of the Birmingham\ - \ Laptop Ensemble (BiLE), and detail our approach to thick description of the\ - \ group's working practices. Initial formal analysis of this material sheds light\ - \ on the fluidity of composer, performer and designer roles within the ensemble\ - \ and shows how confluences of these roles constitute member's differing viewpoints.\ - \ We go on to draw out a number of strands of interaction that highlight the essentially\ - \ complex, socially constructed and value driven nature of the group's practice\ - \ and conclude by reviewing the implications of these factors on the design of\ - \ software tools for laptop ensembles.},\n address = {Ann Arbor, Michigan},\n\ - \ author = {Graham Booth and Michael Gurevich},\n booktitle = {Proceedings of\ + ID: Buch2010 + abstract: 'In this study artistic human-robot interaction design is introduced as + a means for scientific research and artistic investigations. It serves as a methodology + for situated cognitionintegrating empirical methodology and computational modeling, + and is exemplified by the installation playing robot.Its artistic purpose is to + aid to create and explore robots as anew medium for art and entertainment. We + discuss the useof finite state machines to organize robots'' behavioral reactions + to sensor data, and give a brief outlook on structuredobservation as a potential + method for data collection.' + address: 'Sydney, Australia' + author: 'Buch, Benjamin and Coussement, Pieter and Schmidt, Lüder' + bibtex: "@inproceedings{Buch2010,\n abstract = {In this study artistic human-robot\ + \ interaction design is introduced as a means for scientific research and artistic\ + \ investigations. It serves as a methodology for situated cognitionintegrating\ + \ empirical methodology and computational modeling, and is exemplified by the\ + \ installation playing robot.Its artistic purpose is to aid to create and explore\ + \ robots as anew medium for art and entertainment. We discuss the useof finite\ + \ state machines to organize robots' behavioral reactions to sensor data, and\ + \ give a brief outlook on structuredobservation as a potential method for data\ + \ collection.},\n address = {Sydney, Australia},\n author = {Buch, Benjamin and\ + \ Coussement, Pieter and Schmidt, L\\''{u}der},\n booktitle = {Proceedings of\ \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1178215},\n issn = {2220-4806},\n keywords = {Laptop Performance,\ - \ Ethnography, Ethnomethodology, Human Computer Interaction.},\n publisher = {University\ - \ of Michigan},\n title = {Collaborative composition and socially constituted\ - \ instruments: Ensemble laptop performance through the lens of ethnography},\n\ - \ url = {http://www.nime.org/proceedings/2012/nime2012_136.pdf},\n year = {2012}\n\ - }\n" + \ = {10.5281/zenodo.1177729},\n issn = {2220-4806},\n keywords = {dynamic mapping,embodiment,finite\ + \ state au-,human-robot interaction,new media art,nime10,structured,tomata},\n\ + \ pages = {411--414},\n title = {''playing robot'' : An Interactive Sound Installation\ + \ in Human-Robot Interaction Design for New Media Art},\n url = {http://www.nime.org/proceedings/2010/nime2010_411.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178215 + doi: 10.5281/zenodo.1177729 issn: 2220-4806 - keywords: 'Laptop Performance, Ethnography, Ethnomethodology, Human Computer Interaction.' - publisher: University of Michigan - title: 'Collaborative composition and socially constituted instruments: Ensemble - laptop performance through the lens of ethnography' - url: http://www.nime.org/proceedings/2012/nime2012_136.pdf - year: 2012 + keywords: 'dynamic mapping,embodiment,finite state au-,human-robot interaction,new + media art,nime10,structured,tomata' + pages: 411--414 + title: '''''playing robot'''' : An Interactive Sound Installation in Human-Robot + Interaction Design for New Media Art' + url: http://www.nime.org/proceedings/2010/nime2010_411.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: Manousakis2012 - abstract: 'This paper presents the system and technology developed for the distributed, - micro-telematic, interactive sound art installation, The Network Is A Blind Space. - The piece uses sound to explore the physical yet invisible electromagnetic spaces - created by Wireless Local Area Networks (WLANs). To this end, the author created - a framework for indoor WiFi localization, providing a variety of control data - for various types of `musical echolocation''. This data, generated mostly by visitors - exploring the installation while holding WiFi-enabled devices, is used to convey - the hidden properties of wireless networks as dynamic spaces through an artistic - experience.' - address: 'Ann Arbor, Michigan' - author: Stelios Manousakis - bibtex: "@inproceedings{Manousakis2012,\n abstract = {This paper presents the system\ - \ and technology developed for the distributed, micro-telematic, interactive sound\ - \ art installation, The Network Is A Blind Space. The piece uses sound to explore\ - \ the physical yet invisible electromagnetic spaces created by Wireless Local\ - \ Area Networks (WLANs). To this end, the author created a framework for indoor\ - \ WiFi localization, providing a variety of control data for various types of\ - \ `musical echolocation'. This data, generated mostly by visitors exploring the\ - \ installation while holding WiFi-enabled devices, is used to convey the hidden\ - \ properties of wireless networks as dynamic spaces through an artistic experience.},\n\ - \ address = {Ann Arbor, Michigan},\n author = {Stelios Manousakis},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1178341},\n issn = {2220-4806},\n keywords\ - \ = {Network music, mobile music, distributed music, interactivity, sound art\ - \ installation, collaborative instrument, site-specific, electromagnetic signals,\ - \ WiFi, trilateration, traceroute, echolocation, SuperCollider, Pure Data, RjDj,\ - \ mapping},\n publisher = {University of Michigan},\n title = {Network spaces\ - \ as collaborative instruments: {WLAN} trilateration for musical echolocation\ - \ in sound art},\n url = {http://www.nime.org/proceedings/2012/nime2012_142.pdf},\n\ - \ year = {2012}\n}\n" + ID: Reboursiere2010 + abstract: 'This project aims at studying how recent interactive and interactions + technologies would help extend how we play theguitar, thus defining the "multimodal + guitar". Our contributions target three main axes: audio analysis, gestural control + and audio synthesis. For this purpose, we designed anddeveloped a freely-available + toolbox for augmented guitarperformances, compliant with the PureData and Max/MSPenvironments, + gathering tools for: polyphonic pitch estimation, fretboard visualization and + grouping, pressure sensing,modal synthesis, infinite sustain, rearranging looping + and "smart" harmonizing.' + address: 'Sydney, Australia' + author: 'Reboursière, Loïc and Frisson, Christian and Lähdeoja, Otso and Mills, + John A. and Picard-Limpens, Cécile and Todoroff, Todor' + bibtex: "@inproceedings{Reboursiere2010,\n abstract = {This project aims at studying\ + \ how recent interactive and interactions technologies would help extend how we\ + \ play theguitar, thus defining the \"multimodal guitar\". Our contributions target\ + \ three main axes: audio analysis, gestural control and audio synthesis. For this\ + \ purpose, we designed anddeveloped a freely-available toolbox for augmented guitarperformances,\ + \ compliant with the PureData and Max/MSPenvironments, gathering tools for: polyphonic\ + \ pitch estimation, fretboard visualization and grouping, pressure sensing,modal\ + \ synthesis, infinite sustain, rearranging looping and \"smart\" harmonizing.},\n\ + \ address = {Sydney, Australia},\n author = {Reboursi\\`{e}re, Lo\\\"{i}c and\ + \ Frisson, Christian and L\\\"{a}hdeoja, Otso and Mills, John A. and Picard-Limpens,\ + \ C\\'{e}cile and Todoroff, Todor},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177881},\n\ + \ issn = {2220-4806},\n keywords = {Augmented guitar, audio synthesis, digital\ + \ audio effects, multimodal interaction, gestural sensing, polyphonic tran- scription,\ + \ hexaphonic guitar},\n pages = {415--418},\n title = {Multimodal Guitar : A Toolbox\ + \ For Augmented Guitar Performances},\n url = {http://www.nime.org/proceedings/2010/nime2010_415.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178341 + doi: 10.5281/zenodo.1177881 issn: 2220-4806 - keywords: 'Network music, mobile music, distributed music, interactivity, sound - art installation, collaborative instrument, site-specific, electromagnetic signals, - WiFi, trilateration, traceroute, echolocation, SuperCollider, Pure Data, RjDj, - mapping' - publisher: University of Michigan - title: 'Network spaces as collaborative instruments: WLAN trilateration for musical - echolocation in sound art' - url: http://www.nime.org/proceedings/2012/nime2012_142.pdf - year: 2012 + keywords: 'Augmented guitar, audio synthesis, digital audio effects, multimodal + interaction, gestural sensing, polyphonic tran- scription, hexaphonic guitar' + pages: 415--418 + title: 'Multimodal Guitar : A Toolbox For Augmented Guitar Performances' + url: http://www.nime.org/proceedings/2010/nime2010_415.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: McGee2012 - abstract: 'SenSynth is an open-source mobile application that allows for arbitrary, - dynamic mapping between several sensors and sound synthesis parameters. In addition - to synthesis techniques commonly found on mobile devices, SenSynth includes a - scanned synthesis source for the audification of sensor data. Using SenSynth, - we present a novel instrument based on the audification of accelerometer data - and introduce a new means of mobile synthesis control via a wearable magnetic - ring. SenSynth also employs a global pitch quantizer so one may adjust the level - of virtuosity required to play any instruments created via mapping.' - address: 'Ann Arbor, Michigan' - author: Ryan McGee and Daniel Ashbrook and Sean White - bibtex: "@inproceedings{McGee2012,\n abstract = {SenSynth is an open-source mobile\ - \ application that allows for arbitrary, dynamic mapping between several sensors\ - \ and sound synthesis parameters. In addition to synthesis techniques commonly\ - \ found on mobile devices, SenSynth includes a scanned synthesis source for the\ - \ audification of sensor data. Using SenSynth, we present a novel instrument based\ - \ on the audification of accelerometer data and introduce a new means of mobile\ - \ synthesis control via a wearable magnetic ring. SenSynth also employs a global\ - \ pitch quantizer so one may adjust the level of virtuosity required to play any\ - \ instruments created via mapping.},\n address = {Ann Arbor, Michigan},\n author\ - \ = {Ryan McGee and Daniel Ashbrook and Sean White},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178347},\n issn = {2220-4806},\n keywords = {mobile music,\ - \ sonification, audification, mobile sensors},\n publisher = {University of Michigan},\n\ - \ title = {SenSynth: a Mobile Application for Dynamic Sensor to Sound Mapping},\n\ - \ url = {http://www.nime.org/proceedings/2012/nime2012_149.pdf},\n year = {2012}\n\ + ID: Berger2010 + abstract: 'This paper introduces my research in physical interactive design with + my "GRIP MAESTRO" electroacoustic performance interface. It then discusses the + considerations involved in creating intuitive software mappings of emotive performative + gestures such that they are idiomatic not only of the sounds they create but also + of the physical nature of the interface itself. ' + address: 'Sydney, Australia' + author: 'Berger, Michael' + bibtex: "@inproceedings{Berger2010,\n abstract = {This paper introduces my research\ + \ in physical interactive design with my \"GRIP MAESTRO\" electroacoustic performance\ + \ interface. It then discusses the considerations involved in creating intuitive\ + \ software mappings of emotive performative gestures such that they are idiomatic\ + \ not only of the sounds they create but also of the physical nature of the interface\ + \ itself. },\n address = {Sydney, Australia},\n author = {Berger, Michael},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1177719},\n issn = {2220-4806},\n\ + \ keywords = {emotive gesture and music,hall effect,human-controller interaction,musical\ + \ mapping strategies,nime10,novel musical instrument,passive haptic feedback,sensor-augmented\ + \ hand-exerciser},\n pages = {419--422},\n title = {The GRIP MAESTRO : Idiomatic\ + \ Mappings of Emotive Gestures for Control of Live Electroacoustic Music},\n url\ + \ = {http://www.nime.org/proceedings/2010/nime2010_419.pdf},\n year = {2010}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178347 + doi: 10.5281/zenodo.1177719 issn: 2220-4806 - keywords: 'mobile music, sonification, audification, mobile sensors' - publisher: University of Michigan - title: 'SenSynth: a Mobile Application for Dynamic Sensor to Sound Mapping' - url: http://www.nime.org/proceedings/2012/nime2012_149.pdf - year: 2012 + keywords: 'emotive gesture and music,hall effect,human-controller interaction,musical + mapping strategies,nime10,novel musical instrument,passive haptic feedback,sensor-augmented + hand-exerciser' + pages: 419--422 + title: 'The GRIP MAESTRO : Idiomatic Mappings of Emotive Gestures for Control of + Live Electroacoustic Music' + url: http://www.nime.org/proceedings/2010/nime2010_419.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: Hattwick2012 - abstract: 'The configurability and networking abilities of digital musical instruments - increases the possibilities for collaboration in musical performances. Computer - music ensembles such as laptop orchestras are becoming increasingly common and - provide laboratories for the exploration of these possibilities. However, much - of the literature regarding the creation of DMIs has been focused on individual - expressivity, and their potential for collaborative performance has been under-utilized. - This paper makes the case for the benefits of an approach to digital musical instrument - design that begins with their collaborative potential, examines several frameworks - and sets of principles for the creation of digital musical instruments, and proposes - a dimension space representation of collaborative approaches which can be used - to evaluate and guide future DMI creation. Several examples of DMIs and compositions - are then evaluated and discussed in the context of this dimension space.' - address: 'Ann Arbor, Michigan' - author: Ian Hattwick and Marcelo Wanderley - bibtex: "@inproceedings{Hattwick2012,\n abstract = {The configurability and networking\ - \ abilities of digital musical instruments increases the possibilities for collaboration\ - \ in musical performances. Computer music ensembles such as laptop orchestras\ - \ are becoming increasingly common and provide laboratories for the exploration\ - \ of these possibilities. However, much of the literature regarding the creation\ - \ of DMIs has been focused on individual expressivity, and their potential for\ - \ collaborative performance has been under-utilized. This paper makes the case\ - \ for the benefits of an approach to digital musical instrument design that begins\ - \ with their collaborative potential, examines several frameworks and sets of\ - \ principles for the creation of digital musical instruments, and proposes a dimension\ - \ space representation of collaborative approaches which can be used to evaluate\ - \ and guide future DMI creation. Several examples of DMIs and compositions are\ - \ then evaluated and discussed in the context of this dimension space.},\n address\ - \ = {Ann Arbor, Michigan},\n author = {Ian Hattwick and Marcelo Wanderley},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178281},\n issn = {2220-4806},\n\ - \ keywords = {dimension space, collaborative, digital musical instrument, dmi,\ - \ digital music ensemble, dme},\n publisher = {University of Michigan},\n title\ - \ = {A Dimension Space for Evaluating Collaborative Musical Performance Systems},\n\ - \ url = {http://www.nime.org/proceedings/2012/nime2012_150.pdf},\n year = {2012}\n\ - }\n" + ID: Headlee2010 + abstract: 'In this paper, we present an interactive system that uses the body as + a generative tool for creating music. We explore innovative ways to make music, + create self-awareness, and provide the opportunity for unique, interactive social + experiences. The system uses a multi-player game paradigm, where players work + together to add layers to a soundscape of three distinct environments. Various + sensors and hardware are attached to the body and transmit signals to a workstation, + where they are processed using Max/MSP. The game is divided into three levels, + each of a different soundscape. The underlying purpose of our system is to move + the player''s focus away from complexities of the modern urban world toward a + more internalized meditative state. The system is currently viewed as an interactive + installation piece, but future iterations have potential applications in music + therapy, bio games, extended performance art, and as a prototype for new interfaces + for musical expression. ' + address: 'Sydney, Australia' + author: 'Headlee, Kimberlee and Koziupa, Tatyana and Siwiak, Diana' + bibtex: "@inproceedings{Headlee2010,\n abstract = {In this paper, we present an\ + \ interactive system that uses the body as a generative tool for creating music.\ + \ We explore innovative ways to make music, create self-awareness, and provide\ + \ the opportunity for unique, interactive social experiences. The system uses\ + \ a multi-player game paradigm, where players work together to add layers to a\ + \ soundscape of three distinct environments. Various sensors and hardware are\ + \ attached to the body and transmit signals to a workstation, where they are processed\ + \ using Max/MSP. The game is divided into three levels, each of a different soundscape.\ + \ The underlying purpose of our system is to move the player's focus away from\ + \ complexities of the modern urban world toward a more internalized meditative\ + \ state. The system is currently viewed as an interactive installation piece,\ + \ but future iterations have potential applications in music therapy, bio games,\ + \ extended performance art, and as a prototype for new interfaces for musical\ + \ expression. },\n address = {Sydney, Australia},\n author = {Headlee, Kimberlee\ + \ and Koziupa, Tatyana and Siwiak, Diana},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177801},\n\ + \ issn = {2220-4806},\n keywords = {biomusic, collaborative, expressive, hci,\ + \ interactive, interactivity design, interface for musical expression, multimodal,\ + \ musical mapping strategies,nime10,performance,sonification},\n pages = {423--426},\n\ + \ title = {Sonic Virtual Reality Game : How Does Your Body Sound ?},\n url = {http://www.nime.org/proceedings/2010/nime2010_423.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178281 + doi: 10.5281/zenodo.1177801 issn: 2220-4806 - keywords: 'dimension space, collaborative, digital musical instrument, dmi, digital - music ensemble, dme' - publisher: University of Michigan - title: A Dimension Space for Evaluating Collaborative Musical Performance Systems - url: http://www.nime.org/proceedings/2012/nime2012_150.pdf - year: 2012 + keywords: 'biomusic, collaborative, expressive, hci, interactive, interactivity + design, interface for musical expression, multimodal, musical mapping strategies,nime10,performance,sonification' + pages: 423--426 + title: 'Sonic Virtual Reality Game : How Does Your Body Sound ?' + url: http://www.nime.org/proceedings/2010/nime2010_423.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: Carlson2012 - abstract: 'Borderlands is a new interface for composing and performing with granular - synthesis. The software enables flexible, realtime improvisation and is designed - to allow users to engage with sonic material on a fundamental level, breaking - free of traditional paradigms for interaction with this technique. The user is - envisioned as an organizer of sound, simultaneously assuming the roles of curator, - performer, and listener. This paper places the software within the context of - painterly interfaces and describes the user interaction design and synthesis methodology.' - address: 'Ann Arbor, Michigan' - author: Chris Carlson and Ge Wang - bibtex: "@inproceedings{Carlson2012,\n abstract = {Borderlands is a new interface\ - \ for composing and performing with granular synthesis. The software enables flexible,\ - \ realtime improvisation and is designed to allow users to engage with sonic material\ - \ on a fundamental level, breaking free of traditional paradigms for interaction\ - \ with this technique. The user is envisioned as an organizer of sound, simultaneously\ - \ assuming the roles of curator, performer, and listener. This paper places the\ - \ software within the context of painterly interfaces and describes the user interaction\ - \ design and synthesis methodology.},\n address = {Ann Arbor, Michigan},\n author\ - \ = {Chris Carlson and Ge Wang},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178229},\n\ - \ issn = {2220-4806},\n keywords = {Granular synthesis, painterly interfaces,\ - \ improvisation, organized sound, NIME, CCRMA},\n publisher = {University of Michigan},\n\ - \ title = {Borderlands -An Audiovisual Interface for Granular Synthesis},\n url\ - \ = {http://www.nime.org/proceedings/2012/nime2012_152.pdf},\n year = {2012}\n\ + ID: Stahl2010 + abstract: 'Maintaining a sense of personal connection between increasingly synthetic + performers and increasingly diffuse audiences is vital to storytelling and entertainment. + Sonic intimacy is important, because voice is one of the highestbandwidth channels + for expressing our real and imagined selves.New tools for highly focused spatialization + could help improve acoustical clarity, encourage audience engagement, reduce noise + pollution and inspire creative expression. We have a particular interest in embodied, + embedded systems for vocal performance enhancement and transformation. This short + paper describes work in progress on a toolkit for high-quality wearable sound + suits. Design goals include tailored directionality and resonance, full bandwidth, + and sensible ergonomics. Engineering details to accompany a demonstration of recent + prototypes are presented, highlighting a novel magnetostrictive flextensional + transducer. Based on initial observations we suggest that vocal acoustic output + from the torso, and spatial perception of situated low frequency sources, are + two areas deserving greater attention and further study.' + address: 'Sydney, Australia' + author: 'Stahl, Alex and Clemens, Patricia' + bibtex: "@inproceedings{Stahl2010,\n abstract = {Maintaining a sense of personal\ + \ connection between increasingly synthetic performers and increasingly diffuse\ + \ audiences is vital to storytelling and entertainment. Sonic intimacy is important,\ + \ because voice is one of the highestbandwidth channels for expressing our real\ + \ and imagined selves.New tools for highly focused spatialization could help improve\ + \ acoustical clarity, encourage audience engagement, reduce noise pollution and\ + \ inspire creative expression. We have a particular interest in embodied, embedded\ + \ systems for vocal performance enhancement and transformation. This short paper\ + \ describes work in progress on a toolkit for high-quality wearable sound suits.\ + \ Design goals include tailored directionality and resonance, full bandwidth,\ + \ and sensible ergonomics. Engineering details to accompany a demonstration of\ + \ recent prototypes are presented, highlighting a novel magnetostrictive flextensional\ + \ transducer. Based on initial observations we suggest that vocal acoustic output\ + \ from the torso, and spatial perception of situated low frequency sources, are\ + \ two areas deserving greater attention and further study.},\n address = {Sydney,\ + \ Australia},\n author = {Stahl, Alex and Clemens, Patricia},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177899},\n issn = {2220-4806},\n keywords = {magnetostrictive\ + \ flextensional transducer,nime10,paralinguistics,sound reinforcement,spatialization,speech\ + \ enhancement,transformation,voice,wearable systems},\n pages = {427--430},\n\ + \ title = {Auditory Masquing : Wearable Sound Systems for Diegetic Character Voices},\n\ + \ url = {http://www.nime.org/proceedings/2010/nime2010_427.pdf},\n year = {2010}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178229 + doi: 10.5281/zenodo.1177899 issn: 2220-4806 - keywords: 'Granular synthesis, painterly interfaces, improvisation, organized sound, - NIME, CCRMA' - publisher: University of Michigan - title: Borderlands -An Audiovisual Interface for Granular Synthesis - url: http://www.nime.org/proceedings/2012/nime2012_152.pdf - year: 2012 + keywords: 'magnetostrictive flextensional transducer,nime10,paralinguistics,sound + reinforcement,spatialization,speech enhancement,transformation,voice,wearable + systems' + pages: 427--430 + title: 'Auditory Masquing : Wearable Sound Systems for Diegetic Character Voices' + url: http://www.nime.org/proceedings/2010/nime2010_427.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: Hattwick2012a - abstract: "The Physical Computing Ensemble was created in order to determine the\ - \ viability of an approach to musical performance which focuses on the relationships\ - \ and interactions of the performers. Three performance systems utilizing gestural\ - \ controllers were designed and implemented, each with a different strategy for\ - \ performer interaction.\nThese strategies took advantage of the opportunities\ - \ for collaborative performance inherent in digital musical instruments due to\ - \ their networking abilities and reconfigurable software. These characteristics\ - \ allow for the easy implementation of varying approaches to collaborative performance.\ - \ Ensembles who utilize digital musical instruments provide a fertile environment\ - \ for the design, testing, and utilization of collaborative performance systems.\n\ - The three strategies discussed in this paper are the parameterization of musical\ - \ elements, turn-based collaborative control of sound, and the interaction of\ - \ musical systems created by multiple performers. Design principles, implementation,\ - \ and a performance using these strategies are discussed, and the conclusion is\ - \ drawn that performer interaction and collaboration as a primary focus for system\ - \ design, composition, and performance is viable." - address: 'Ann Arbor, Michigan' - author: Ian Hattwick and Kojiro Umezaki - bibtex: "@inproceedings{Hattwick2012a,\n abstract = {The Physical Computing Ensemble\ - \ was created in order to determine the viability of an approach to musical performance\ - \ which focuses on the relationships and interactions of the performers. Three\ - \ performance systems utilizing gestural controllers were designed and implemented,\ - \ each with a different strategy for performer interaction.\nThese strategies\ - \ took advantage of the opportunities for collaborative performance inherent in\ - \ digital musical instruments due to their networking abilities and reconfigurable\ - \ software. These characteristics allow for the easy implementation of varying\ - \ approaches to collaborative performance. Ensembles who utilize digital musical\ - \ instruments provide a fertile environment for the design, testing, and utilization\ - \ of collaborative performance systems.\nThe three strategies discussed in this\ - \ paper are the parameterization of musical elements, turn-based collaborative\ - \ control of sound, and the interaction of musical systems created by multiple\ - \ performers. Design principles, implementation, and a performance using these\ - \ strategies are discussed, and the conclusion is drawn that performer interaction\ - \ and collaboration as a primary focus for system design, composition, and performance\ - \ is viable.},\n address = {Ann Arbor, Michigan},\n author = {Ian Hattwick and\ - \ Kojiro Umezaki},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178279},\n\ - \ issn = {2220-4806},\n keywords = {Collaborative performance, interaction, digital\ - \ musical instruments, gestural controller, digital music ensemble, Wii},\n publisher\ - \ = {University of Michigan},\n title = {Approaches to Interaction in a Digital\ - \ Music Ensemble},\n url = {http://www.nime.org/proceedings/2012/nime2012_153.pdf},\n\ - \ year = {2012}\n}\n" + ID: Rothman2010 + abstract: 'The Ghost has been developed to create a merger between the standard + MIDI keyboard controller, MIDI/digital guitars and alternative desktop controllers. + Using a custom software editor, The Ghost''s controls can be mapped to suit the + users performative needs. The interface takes its interaction and gestural cues + from the guitar but it is not a MIDI guitar. The Ghost''s hardware, firmware and + software will be open sourced with the hopes of creating a community of users + that are invested in creating music with controller.' + address: 'Sydney, Australia' + author: 'Rothman, Paul' + bibtex: "@inproceedings{Rothman2010,\n abstract = {The Ghost has been developed\ + \ to create a merger between the standard MIDI keyboard controller, MIDI/digital\ + \ guitars and alternative desktop controllers. Using a custom software editor,\ + \ The Ghost's controls can be mapped to suit the users performative needs. The\ + \ interface takes its interaction and gestural cues from the guitar but it is\ + \ not a MIDI guitar. The Ghost's hardware, firmware and software will be open\ + \ sourced with the hopes of creating a community of users that are invested in\ + \ creating music with controller.},\n address = {Sydney, Australia},\n author\ + \ = {Rothman, Paul},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177885},\n\ + \ issn = {2220-4806},\n keywords = {Controller, MIDI, Live Performance, Programmable,\ + \ Open-Source},\n pages = {431--435},\n title = {The Ghost : An Open-Source, User\ + \ Programmable {MIDI} Performance Controller},\n url = {http://www.nime.org/proceedings/2010/nime2010_431.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178279 + doi: 10.5281/zenodo.1177885 issn: 2220-4806 - keywords: 'Collaborative performance, interaction, digital musical instruments, - gestural controller, digital music ensemble, Wii' - publisher: University of Michigan - title: Approaches to Interaction in a Digital Music Ensemble - url: http://www.nime.org/proceedings/2012/nime2012_153.pdf - year: 2012 + keywords: 'Controller, MIDI, Live Performance, Programmable, Open-Source' + pages: 431--435 + title: 'The Ghost : An Open-Source, User Programmable MIDI Performance Controller' + url: http://www.nime.org/proceedings/2010/nime2010_431.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: Vigliensoni2012 - abstract: 'This paper presents a comparison of three-dimensional (3D) position tracking - systems in terms of some of their performance parameters such as static accuracy - and precision, update rate, and shape of the space they sense. The underlying - concepts and characteristics of position tracking tech-nologies are reviewed, - and four position tracking systems (Vicon, Polhemus, Kinect, and Gametrak), based - on dif-ferent technologies, are empirically compared according to their performance - parameters and technical specifications. Our results show that, overall, the Vicon - was the position tracker with the best performance.' - address: 'Ann Arbor, Michigan' - author: Gabriel Vigliensoni and Marcelo M. Wanderley - bibtex: "@inproceedings{Vigliensoni2012,\n abstract = {This paper presents a comparison\ - \ of three-dimensional (3D) position tracking systems in terms of some of their\ - \ performance parameters such as static accuracy and precision, update rate, and\ - \ shape of the space they sense. The underlying concepts and characteristics of\ - \ position tracking tech-nologies are reviewed, and four position tracking systems\ - \ (Vicon, Polhemus, Kinect, and Gametrak), based on dif-ferent technologies, are\ - \ empirically compared according to their performance parameters and technical\ - \ specifications. Our results show that, overall, the Vicon was the position tracker\ - \ with the best performance.},\n address = {Ann Arbor, Michigan},\n author = {Gabriel\ - \ Vigliensoni and Marcelo M. Wanderley},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178445},\n\ - \ issn = {2220-4806},\n keywords = {Position tracker, comparison, touch-less,\ - \ gestural control},\n publisher = {University of Michigan},\n title = {A Quantitative\ - \ Comparison of Position Trackers for the Development of a Touch-less Musical\ - \ Interface},\n url = {http://www.nime.org/proceedings/2012/nime2012_155.pdf},\n\ - \ year = {2012}\n}\n" + ID: Paine2010 + abstract: 'This paper presents a discussion regarding organology classification + and taxonomies for digital musical instruments (DMI), arising from the TIEM (Taxonomy + of Interfaces for Electronic Music performance) survey (http://tiem.emf.org/), + conducted as part of an Australian Research Council Linkage project titled "Performance + Practice in New Interfaces for Realtime Electronic Music Performance". This research + is being carried out at the VIPRe Lab at, the University of Western Sydney in + partnership with the Electronic Music Foundation (EMF), Infusion Systems1 and + The Input Devices and Music Interaction Laboratory (IDMIL) at McGill University. + The project seeks to develop a schema of new interfaces for realtime electronic + music performance. ' + address: 'Sydney, Australia' + author: 'Paine, Garth' + bibtex: "@inproceedings{Paine2010,\n abstract = {This paper presents a discussion\ + \ regarding organology classification and taxonomies for digital musical instruments\ + \ (DMI), arising from the TIEM (Taxonomy of Interfaces for Electronic Music performance)\ + \ survey (http://tiem.emf.org/), conducted as part of an Australian Research Council\ + \ Linkage project titled \"Performance Practice in New Interfaces for Realtime\ + \ Electronic Music Performance\". This research is being carried out at the VIPRe\ + \ Lab at, the University of Western Sydney in partnership with the Electronic\ + \ Music Foundation (EMF), Infusion Systems1 and The Input Devices and Music Interaction\ + \ Laboratory (IDMIL) at McGill University. The project seeks to develop a schema\ + \ of new interfaces for realtime electronic music performance. },\n address =\ + \ {Sydney, Australia},\n author = {Paine, Garth},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1177873},\n issn = {2220-4806},\n keywords = {Instrument,\ + \ Interface, Organology, Taxonomy.},\n pages = {436--439},\n title = {Towards\ + \ a Taxonomy of Realtime Interfaces for Electronic Music Performance},\n url =\ + \ {http://www.nime.org/proceedings/2010/nime2010_436.pdf},\n year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178445 + doi: 10.5281/zenodo.1177873 issn: 2220-4806 - keywords: 'Position tracker, comparison, touch-less, gestural control' - publisher: University of Michigan - title: A Quantitative Comparison of Position Trackers for the Development of a Touch-less - Musical Interface - url: http://www.nime.org/proceedings/2012/nime2012_155.pdf - year: 2012 + keywords: 'Instrument, Interface, Organology, Taxonomy.' + pages: 436--439 + title: Towards a Taxonomy of Realtime Interfaces for Electronic Music Performance + url: http://www.nime.org/proceedings/2010/nime2010_436.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: Marier2012 - abstract: 'A new method for interpolating between presets is described. The interpolation - algorithm called Intersecting N-Spheres Interpolation is simple to compute and - its generalization to higher dimensions is straightforward. The current imple-mentation - in the SuperCollider environment is presented as a tool that eases the design - of many-to-many mappings for musical interfaces. Examples of its uses, including - such mappings in conjunction with a musical interface called the sponge, are given - and discussed.' - address: 'Ann Arbor, Michigan' - author: Martin Marier - bibtex: "@inproceedings{Marier2012,\n abstract = {A new method for interpolating\ - \ between presets is described. The interpolation algorithm called Intersecting\ - \ N-Spheres Interpolation is simple to compute and its generalization to higher\ - \ dimensions is straightforward. The current imple-mentation in the SuperCollider\ - \ environment is presented as a tool that eases the design of many-to-many mappings\ - \ for musical interfaces. Examples of its uses, including such mappings in conjunction\ - \ with a musical interface called the sponge, are given and discussed.},\n address\ - \ = {Ann Arbor, Michigan},\n author = {Martin Marier},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178343},\n issn = {2220-4806},\n keywords = {Mapping,\ - \ Preset, Interpolation, Sponge, SuperCollider},\n publisher = {University of\ - \ Michigan},\n title = {Designing Mappings for Musical Interfaces Using Preset\ - \ Interpolation},\n url = {http://www.nime.org/proceedings/2012/nime2012_159.pdf},\n\ - \ year = {2012}\n}\n" + ID: Taylor2010 + abstract: 'humanaquarium is a self-contained, transportable performance environment + that is used to stage technology-mediated interactive performances in public spaces. + Drawing upon the creative practices of busking and street performance, humanaquarium + incorporates live musicians, real-time audiovisual content generation, and frustrated + total internal reflection (FTIR) technology to facilitate participatory interaction + by members of the public. ' + address: 'Sydney, Australia' + author: 'Taylor, Robyn and Schofield, Guy and Shearer, John and Boulanger, Pierre + and Wallace, Jayne and Olivier, Patrick' + bibtex: "@inproceedings{Taylor2010,\n abstract = {humanaquarium is a self-contained,\ + \ transportable performance environment that is used to stage technology-mediated\ + \ interactive performances in public spaces. Drawing upon the creative practices\ + \ of busking and street performance, humanaquarium incorporates live musicians,\ + \ real-time audiovisual content generation, and frustrated total internal reflection\ + \ (FTIR) technology to facilitate participatory interaction by members of the\ + \ public. },\n address = {Sydney, Australia},\n author = {Taylor, Robyn and Schofield,\ + \ Guy and Shearer, John and Boulanger, Pierre and Wallace, Jayne and Olivier,\ + \ Patrick},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177905},\n issn\ + \ = {2220-4806},\n keywords = {busking, collaborative interface, creative practice,\ + \ experience centered design, frustrated total internal reflection (FTIR), multi-touch\ + \ screen, multimedia, participatory performance},\n pages = {88--93},\n title\ + \ = {humanaquarium : A Participatory Performance System},\n url = {http://www.nime.org/proceedings/2010/nime2010_440.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178343 + doi: 10.5281/zenodo.1177905 issn: 2220-4806 - keywords: 'Mapping, Preset, Interpolation, Sponge, SuperCollider' - publisher: University of Michigan - title: Designing Mappings for Musical Interfaces Using Preset Interpolation - url: http://www.nime.org/proceedings/2012/nime2012_159.pdf - year: 2012 + keywords: 'busking, collaborative interface, creative practice, experience centered + design, frustrated total internal reflection (FTIR), multi-touch screen, multimedia, + participatory performance' + pages: 88--93 + title: 'humanaquarium : A Participatory Performance System' + url: http://www.nime.org/proceedings/2010/nime2010_440.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: Jensenius2012 - abstract: 'We report on the Music Ball Project, a longterm, exploratory project - focused on creating novel instruments/controllers with a spherical shape as the - common denominator. Besides a simple and attractive geometrical shape, balls afford - many different types of use, including play. This has made our music balls popular - among widely different groups of people, from toddlers to seniors, including those - that would not otherwise engage with a musical instrument. The paper summarises - our experience of designing, constructing and using a number of music balls of - various sizes and with different types of sound-producing elements.' - address: 'Ann Arbor, Michigan' - author: 'Jensenius, Alexander Refsum and Voldsund, Arve' - bibtex: "@inproceedings{Jensenius2012,\n abstract = {We report on the Music Ball\ - \ Project, a longterm, exploratory project focused on creating novel instruments/controllers\ - \ with a spherical shape as the common denominator. Besides a simple and attractive\ - \ geometrical shape, balls afford many different types of use, including play.\ - \ This has made our music balls popular among widely different groups of people,\ - \ from toddlers to seniors, including those that would not otherwise engage with\ - \ a musical instrument. The paper summarises our experience of designing, constructing\ - \ and using a number of music balls of various sizes and with different types\ - \ of sound-producing elements.},\n address = {Ann Arbor, Michigan},\n author =\ - \ {Jensenius, Alexander Refsum and Voldsund, Arve},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1180579},\n issn = {2220-4806},\n keywords = {music balls,\ - \ instruments, controllers, inexpensive},\n publisher = {University of Michigan},\n\ - \ title = {The Music Ball Project: Concept, Design, Development, Performance},\n\ - \ url = {http://www.nime.org/proceedings/2012/nime2012_161.pdf},\n year = {2012}\n\ + ID: Kim2010 + abstract: 'In this paper, we present and demonstrate Samsung’s new concept music + creation engine and music composer application for mobile devices such as touch + phones or MP3 players, ‘Interactive Music Studio : the soloist’.' + address: 'Sydney, Australia' + author: 'Kim, Hyun-Soo and Yoon, Je-Han and Jung, Moon-Sik' + bibtex: "@inproceedings{Kim2010,\n abstract = {In this paper, we present and demonstrate\ + \ Samsung’s new concept music creation engine and music composer application for\ + \ mobile devices such as touch phones or MP3 players, ‘Interactive Music Studio\ + \ : the soloist’.},\n address = {Sydney, Australia},\n author = {Kim, Hyun-Soo\ + \ and Yoon, Je-Han and Jung, Moon-Sik},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177825},\n\ + \ issn = {2220-4806},\n keywords = {Mobile device, music composer, pattern composing,\ + \ MIDI},\n pages = {444--446},\n title = {Interactive Music Studio : The Soloist},\n\ + \ url = {http://www.nime.org/proceedings/2010/nime2010_444.pdf},\n year = {2010}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1180579 + doi: 10.5281/zenodo.1177825 issn: 2220-4806 - keywords: 'music balls, instruments, controllers, inexpensive' - publisher: University of Michigan - title: 'The Music Ball Project: Concept, Design, Development, Performance' - url: http://www.nime.org/proceedings/2012/nime2012_161.pdf - year: 2012 + keywords: 'Mobile device, music composer, pattern composing, MIDI' + pages: 444--446 + title: 'Interactive Music Studio : The Soloist' + url: http://www.nime.org/proceedings/2010/nime2010_444.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: Nesfield2012 - abstract: 'A general strategy for encouraging embodied engagement within musical - interface design is introduced. A pair of ex-ample implementations of this strategy - are described, one tangible and one graphical. As part of a potentially larger - set within our general approach, two separate relationships are described termed - `decay and contribution'' and `instability and adjustment'', which are heavily - dependent on the action requirements and timeliness of the interaction. By suggesting - this process occurs on a timescale of less than one second it is hoped attentiveness - and engagement can be en-couraged to the possible benefit of future developments - in digital musical instrument design.' - address: 'Ann Arbor, Michigan' - author: James Nesfield - bibtex: "@inproceedings{Nesfield2012,\n abstract = {A general strategy for encouraging\ - \ embodied engagement within musical interface design is introduced. A pair of\ - \ ex-ample implementations of this strategy are described, one tangible and one\ - \ graphical. As part of a potentially larger set within our general approach,\ - \ two separate relationships are described termed `decay and contribution' and\ - \ `instability and adjustment', which are heavily dependent on the action requirements\ - \ and timeliness of the interaction. By suggesting this process occurs on a timescale\ - \ of less than one second it is hoped attentiveness and engagement can be en-couraged\ - \ to the possible benefit of future developments in digital musical instrument\ - \ design.},\n address = {Ann Arbor, Michigan},\n author = {James Nesfield},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1180549},\n issn = {2220-4806},\n\ - \ keywords = {engagement, embodiment, flow, decay, instability, design, NIME},\n\ - \ publisher = {University of Michigan},\n title = {Strategies for Engagement in\ - \ Computer-Mediated Musical Performance},\n url = {http://www.nime.org/proceedings/2012/nime2012_162.pdf},\n\ - \ year = {2012}\n}\n" + ID: Tremblay2010 + abstract: "In this paper, the authors describe how they use an electric bass as\ + \ a subtle, expressive and intuitive interface to browse the rich sample bank\ + \ available to most laptop owners. This is achieved by audio mosaicing of the\ + \ live bass performance audio, through corpus-based concatenative synthesis (CBCS)\ + \ techniques, allowing a mapping of the multi-dimensional expressivity of the\ + \ performance onto foreign audio material, thus recycling the virtuosity acquired\ + \ on the electric instrument with a trivial learning curve. This design hypothesis\ + \ is contextualised and assessed within the Sandbox#n series of bass+laptop meta-instruments,\ + \ and the authors describe technical means of the implementation through the use\ + \ of the open-source CataRT CBCS system adapted for live mosaicing. They also\ + \ discuss their encouraging early results and provide a list of further explorations\ + \ to be made with that rich new interface." + address: 'Sydney, Australia' + author: 'Tremblay, Pierre Alexandre and Schwarz, Diemo' + bibtex: "@inproceedings{Tremblay2010,\n abstract = {In this paper, the authors describe\ + \ how they use an electric bass as a subtle, expressive and intuitive interface\ + \ to browse the rich sample bank available to most laptop owners. This is achieved\ + \ by audio mosaicing of the live bass performance audio, through corpus-based\ + \ concatenative synthesis (CBCS) techniques, allowing a mapping of the multi-dimensional\ + \ expressivity of the performance onto foreign audio material, thus recycling\ + \ the virtuosity acquired on the electric instrument with a trivial learning curve.\ + \ This design hypothesis is contextualised and assessed within the Sandbox#n series\ + \ of bass+laptop meta-instruments, and the authors describe technical means of\ + \ the implementation through the use of the open-source CataRT CBCS system adapted\ + \ for live mosaicing. They also discuss their encouraging early results and provide\ + \ a list of further explorations to be made with that rich new interface.},\n\ + \ address = {Sydney, Australia},\n author = {Tremblay, Pierre Alexandre and Schwarz,\ + \ Diemo},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1177913},\n issn = {2220-4806},\n\ + \ keywords = {laptop improvisation, corpus-based concatenative synthesis, haptic\ + \ interface, multi-dimensional mapping, audio mosaic},\n pages = {447--450},\n\ + \ title = {Surfing the Waves : Live Audio Mosaicing of an Electric Bass Performance\ + \ as a Corpus Browsing Interface},\n url = {http://www.nime.org/proceedings/2010/nime2010_447.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1180549 + doi: 10.5281/zenodo.1177913 issn: 2220-4806 - keywords: 'engagement, embodiment, flow, decay, instability, design, NIME' - publisher: University of Michigan - title: Strategies for Engagement in Computer-Mediated Musical Performance - url: http://www.nime.org/proceedings/2012/nime2012_162.pdf - year: 2012 + keywords: 'laptop improvisation, corpus-based concatenative synthesis, haptic interface, + multi-dimensional mapping, audio mosaic' + pages: 447--450 + title: 'Surfing the Waves : Live Audio Mosaicing of an Electric Bass Performance + as a Corpus Browsing Interface' + url: http://www.nime.org/proceedings/2010/nime2010_447.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: Astrinaki2012 - abstract: 'In this paper, we describe our pioneering work in developing speech synthesis - beyond the Text-To-Speech paradigm. We introduce tangible speech synthesis as - an alternate way of envisioning how artificial speech content can be produced. - Tangible speech synthesis refers to the ability, for a given system, to provide - some physicality and interactivity to important speech production parameters. - We present MAGE, our new software platform for high-quality reactive speech synthesis, - based on statistical parametric modeling and more particularly hidden Markov models. - We also introduce a new HandSketch-based musical instrument. This instrument brings - pen and posture based interaction on the top of MAGE, and demonstrates a first - proof of concept.' - address: 'Ann Arbor, Michigan' - author: Maria Astrinaki and Nicolas d'Alessandro and Thierry Dutoit - bibtex: "@inproceedings{Astrinaki2012,\n abstract = {In this paper, we describe\ - \ our pioneering work in developing speech synthesis beyond the Text-To-Speech\ - \ paradigm. We introduce tangible speech synthesis as an alternate way of envisioning\ - \ how artificial speech content can be produced. Tangible speech synthesis refers\ - \ to the ability, for a given system, to provide some physicality and interactivity\ - \ to important speech production parameters. We present MAGE, our new software\ - \ platform for high-quality reactive speech synthesis, based on statistical parametric\ - \ modeling and more particularly hidden Markov models. We also introduce a new\ - \ HandSketch-based musical instrument. This instrument brings pen and posture\ - \ based interaction on the top of MAGE, and demonstrates a first proof of concept.},\n\ - \ address = {Ann Arbor, Michigan},\n author = {Maria Astrinaki and Nicolas d'Alessandro\ - \ and Thierry Dutoit},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178207},\n\ - \ issn = {2220-4806},\n keywords = {speech synthesis, Hidden Markov Models, tangible\ - \ interaction, software library, MAGE, HTS, performative},\n publisher = {University\ - \ of Michigan},\n title = {MAGE --A Platform for Tangible Speech Synthesis},\n\ - \ url = {http://www.nime.org/proceedings/2012/nime2012_164.pdf},\n year = {2012}\n\ - }\n" + ID: Fyans2010 + abstract: 'Drawing on a model of spectator understanding of error inperformance + in the literature, we document a qualitativeexperiment that explores the relationships + between domainknowledge, mental models, intention and error recognitionby spectators + of performances with electronic instruments.Participants saw two performances + with contrasting instruments, with controls on their mental model and understanding + of intention. Based on data from a subsequent structured interview, we identify + themes in participants'' judgements and understanding of performance and explanationsof + their spectator experience. These reveal both elementsof similarity and difference + between the two performances,instruments and between domain knowledge groups. + Fromthese, we suggest and discuss implications for the design ofnovel performative + interactions with technology.' + address: 'Sydney, Australia' + author: 'Fyans, A. Cavan and Gurevich, Michael and Stapleton, Paul' + bibtex: "@inproceedings{Fyans2010,\n abstract = {Drawing on a model of spectator\ + \ understanding of error inperformance in the literature, we document a qualitativeexperiment\ + \ that explores the relationships between domainknowledge, mental models, intention\ + \ and error recognitionby spectators of performances with electronic instruments.Participants\ + \ saw two performances with contrasting instruments, with controls on their mental\ + \ model and understanding of intention. Based on data from a subsequent structured\ + \ interview, we identify themes in participants' judgements and understanding\ + \ of performance and explanationsof their spectator experience. These reveal both\ + \ elementsof similarity and difference between the two performances,instruments\ + \ and between domain knowledge groups. Fromthese, we suggest and discuss implications\ + \ for the design ofnovel performative interactions with technology.},\n address\ + \ = {Sydney, Australia},\n author = {Fyans, A. Cavan and Gurevich, Michael and\ + \ Stapleton, Paul},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177775},\n\ + \ issn = {2220-4806},\n keywords = {error,intention,mental model,nime10,qualitative,spectator},\n\ + \ pages = {451--454},\n title = {Examining the Spectator Experience},\n url =\ + \ {http://www.nime.org/proceedings/2010/nime2010_451.pdf},\n year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178207 + doi: 10.5281/zenodo.1177775 issn: 2220-4806 - keywords: 'speech synthesis, Hidden Markov Models, tangible interaction, software - library, MAGE, HTS, performative' - publisher: University of Michigan - title: MAGE --A Platform for Tangible Speech Synthesis - url: http://www.nime.org/proceedings/2012/nime2012_164.pdf - year: 2012 + keywords: 'error,intention,mental model,nime10,qualitative,spectator' + pages: 451--454 + title: Examining the Spectator Experience + url: http://www.nime.org/proceedings/2010/nime2010_451.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: tKlooster2012 - abstract: 'This paper describes the development of the Emotion Light, an interactive - biofeedback artwork where the user listens to a piece of electronic music whilst - holding a semi-transparent sculpture that tracks his/her bodily responses and - translates these into changing light patterns that emerge from the sculpture. - The context of this work is briefly described and the questions it poses are derived - from interviews held with audience members.' - address: 'Ann Arbor, Michigan' - author: Adinda Rosa van 't Klooster - bibtex: "@inproceedings{tKlooster2012,\n abstract = {This paper describes the development\ - \ of the Emotion Light, an interactive biofeedback artwork where the user listens\ - \ to a piece of electronic music whilst holding a semi-transparent sculpture that\ - \ tracks his/her bodily responses and translates these into changing light patterns\ - \ that emerge from the sculpture. The context of this work is briefly described\ - \ and the questions it poses are derived from interviews held with audience members.},\n\ - \ address = {Ann Arbor, Michigan},\n author = {Adinda Rosa van 't Klooster},\n\ + ID: Collins2010a + abstract: 'Gaining access to a prototype motion capture suit designedby the Animazoo + company, the Interactive Systems groupat the University of Sussex have been investigating + application areas. This paper describes our initial experimentsin mapping the + suit control data to sonic attributes for musical purposes. Given the lab conditions + under which weworked, an agile design cycle methodology was employed,with live + coding of audio software incorporating fast feedback, and more reflective preparations + between sessions, exploiting both individual and pair programming. As the suitprovides + up to 66 channels of information, we confront achallenging mapping problem, and + techniques are describedfor automatic calibration, and the use of echo state networksfor + dimensionality reduction.' + address: 'Sydney, Australia' + author: 'Collins, Nick and Kiefer, Chris and Patoli, Zeeshan and White, Martin' + bibtex: "@inproceedings{Collins2010a,\n abstract = {Gaining access to a prototype\ + \ motion capture suit designedby the Animazoo company, the Interactive Systems\ + \ groupat the University of Sussex have been investigating application areas.\ + \ This paper describes our initial experimentsin mapping the suit control data\ + \ to sonic attributes for musical purposes. Given the lab conditions under which\ + \ weworked, an agile design cycle methodology was employed,with live coding of\ + \ audio software incorporating fast feedback, and more reflective preparations\ + \ between sessions, exploiting both individual and pair programming. As the suitprovides\ + \ up to 66 channels of information, we confront achallenging mapping problem,\ + \ and techniques are describedfor automatic calibration, and the use of echo state\ + \ networksfor dimensionality reduction.},\n address = {Sydney, Australia},\n author\ + \ = {Collins, Nick and Kiefer, Chris and Patoli, Zeeshan and White, Martin},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178307},\n issn = {2220-4806},\n\ - \ keywords = {Interactive biofeedback artwork, music and emotion, novel interfaces,\ - \ practice based research, bodily response, heart rate, biosignals, affective\ - \ computing, aesthetic interaction, mediating body, biology inspired system},\n\ - \ publisher = {University of Michigan},\n title = {The body as mediator of music\ - \ in the Emotion Light},\n url = {http://www.nime.org/proceedings/2012/nime2012_167.pdf},\n\ - \ year = {2012}\n}\n" + \ Musical Expression},\n doi = {10.5281/zenodo.1177749},\n issn = {2220-4806},\n\ + \ keywords = {Motion Capture, Musical Controller, Mapping, Agile Design},\n pages\ + \ = {455--458},\n title = {Musical Exoskeletons : Experiments with a Motion Capture\ + \ Suit},\n url = {http://www.nime.org/proceedings/2010/nime2010_455.pdf},\n year\ + \ = {2010}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1177749 + issn: 2220-4806 + keywords: 'Motion Capture, Musical Controller, Mapping, Agile Design' + pages: 455--458 + title: 'Musical Exoskeletons : Experiments with a Motion Capture Suit' + url: http://www.nime.org/proceedings/2010/nime2010_455.pdf + year: 2010 + + +- ENTRYTYPE: inproceedings + ID: Murphy2010 + abstract: 'This paper describes a study of membrane potentiometers and long force + sensing resistors as tools to enable greater interaction between performers and + audiences. This is accomplished through the building of a new interface called + the Helio. In preparation for the Helio''s construction, a variety of brands of + membrane potentiometers and long force sensing resistors were analyzed for their + suitability for use in a performance interface. Analog and digital circuit design + considerations are discussed. We discuss in detail the design process and performance + scenarios explored with the Helio. ' + address: 'Sydney, Australia' + author: 'Murphy, Jim and Kapur, Ajay and Burgin, Carl' + bibtex: "@inproceedings{Murphy2010,\n abstract = {This paper describes a study of\ + \ membrane potentiometers and long force sensing resistors as tools to enable\ + \ greater interaction between performers and audiences. This is accomplished through\ + \ the building of a new interface called the Helio. In preparation for the Helio's\ + \ construction, a variety of brands of membrane potentiometers and long force\ + \ sensing resistors were analyzed for their suitability for use in a performance\ + \ interface. Analog and digital circuit design considerations are discussed. We\ + \ discuss in detail the design process and performance scenarios explored with\ + \ the Helio. },\n address = {Sydney, Australia},\n author = {Murphy, Jim and Kapur,\ + \ Ajay and Burgin, Carl},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177863},\n\ + \ issn = {2220-4806},\n keywords = {Force Sensing Resistors, Membrane Potentiometers,\ + \ Force Sensing Resistors, Haptic Feedback, Helio},\n pages = {459--462},\n title\ + \ = {The Helio : A Study of Membrane Potentiometers and Long Force Sensing Resistors\ + \ for Musical Interfaces},\n url = {http://www.nime.org/proceedings/2010/nime2010_459.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178307 + doi: 10.5281/zenodo.1177863 issn: 2220-4806 - keywords: 'Interactive biofeedback artwork, music and emotion, novel interfaces, - practice based research, bodily response, heart rate, biosignals, affective computing, - aesthetic interaction, mediating body, biology inspired system' - publisher: University of Michigan - title: The body as mediator of music in the Emotion Light - url: http://www.nime.org/proceedings/2012/nime2012_167.pdf - year: 2012 + keywords: 'Force Sensing Resistors, Membrane Potentiometers, Force Sensing Resistors, + Haptic Feedback, Helio' + pages: 459--462 + title: 'The Helio : A Study of Membrane Potentiometers and Long Force Sensing Resistors + for Musical Interfaces' + url: http://www.nime.org/proceedings/2010/nime2010_459.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: Bergsland2012 - abstract: "As a part of the research project Voice Meetings, a solo live-electronic\ - \ vocal performance was presented for 63 students. Through a mixed method approach\ - \ applying both written and oral response, feedback from one blindfolded and one\ - \ seeing audience group was collected and analyzed.\nThere were marked differences\ - \ between the groups regarding focus, in that the participants in blindfolded\ - \ group tended to focus on fewer aspects, have a heightened focus and be less\ - \ distracted than the seeing group. The seeing group, on its part, focused more\ - \ on the technological instruments applied in the performance, the performer herself\ - \ and her actions. This study also shows that there were only minor differences\ - \ between the groups regarding the experience of skill and control, and argues\ - \ that this observation can be explained by earlier research on skill in NIMEs." - address: 'Ann Arbor, Michigan' - author: Andreas Bergsland and Tone Åse - bibtex: "@inproceedings{Bergsland2012,\n abstract = {As a part of the research project\ - \ Voice Meetings, a solo live-electronic vocal performance was presented for 63\ - \ students. Through a mixed method approach applying both written and oral response,\ - \ feedback from one blindfolded and one seeing audience group was collected and\ - \ analyzed.\nThere were marked differences between the groups regarding focus,\ - \ in that the participants in blindfolded group tended to focus on fewer aspects,\ - \ have a heightened focus and be less distracted than the seeing group. The seeing\ - \ group, on its part, focused more on the technological instruments applied in\ - \ the performance, the performer herself and her actions. This study also shows\ - \ that there were only minor differences between the groups regarding the experience\ - \ of skill and control, and argues that this observation can be explained by earlier\ - \ research on skill in NIMEs.},\n address = {Ann Arbor, Michigan},\n author =\ - \ {Andreas Bergsland and Tone {\\AA}se},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178213},\n\ - \ issn = {2220-4806},\n keywords = {Performance, audience reception, acousmatic\ - \ listening, live-electronics, voice, qualitative research},\n publisher = {University\ - \ of Michigan},\n title = {Using a seeing/blindfolded paradigm to study audience\ - \ experiences of live-electronic performances with voice},\n url = {http://www.nime.org/proceedings/2012/nime2012_168.pdf},\n\ - \ year = {2012}\n}\n" + ID: Taylor2010a + abstract: 'We present a novel user interface device based around ferromagnetic sensing. + The physical form of the interface can easily be reconfigured by simply adding + and removing a variety of ferromagnetic objects to the device''s sensing surface. + This allows the user to change the physical form of the interface resulting in + a variety of different interaction modes. When used in a musical context, the + performer can leverage the physical reconfiguration of the device to affect the + method of playing and ultimately the sound produced. We describe the implementation + of the sensing system, along with a range of mapping techniques used to transform + the sensor data into musical output, including both the direct synthesis of sound + and also the generation of MIDI data for use with Ableton Live. We conclude with + a discussion of future directions for the device. ' + address: 'Sydney, Australia' + author: 'Taylor, Stuart and Hook, Jonathan' + bibtex: "@inproceedings{Taylor2010a,\n abstract = {We present a novel user interface\ + \ device based around ferromagnetic sensing. The physical form of the interface\ + \ can easily be reconfigured by simply adding and removing a variety of ferromagnetic\ + \ objects to the device's sensing surface. This allows the user to change the\ + \ physical form of the interface resulting in a variety of different interaction\ + \ modes. When used in a musical context, the performer can leverage the physical\ + \ reconfiguration of the device to affect the method of playing and ultimately\ + \ the sound produced. We describe the implementation of the sensing system, along\ + \ with a range of mapping techniques used to transform the sensor data into musical\ + \ output, including both the direct synthesis of sound and also the generation\ + \ of MIDI data for use with Ableton Live. We conclude with a discussion of future\ + \ directions for the device. },\n address = {Sydney, Australia},\n author = {Taylor,\ + \ Stuart and Hook, Jonathan},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177907},\n\ + \ issn = {2220-4806},\n keywords = {Ferromagnetic sensing, ferrofluid, reconfigurable\ + \ user interface, wave terrain synthesis, MIDI controller.},\n pages = {463--466},\n\ + \ title = {FerroSynth : A Ferromagnetic Music Interface},\n url = {http://www.nime.org/proceedings/2010/nime2010_463.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178213 + doi: 10.5281/zenodo.1177907 issn: 2220-4806 - keywords: 'Performance, audience reception, acousmatic listening, live-electronics, - voice, qualitative research' - publisher: University of Michigan - title: Using a seeing/blindfolded paradigm to study audience experiences of live-electronic - performances with voice - url: http://www.nime.org/proceedings/2012/nime2012_168.pdf - year: 2012 + keywords: 'Ferromagnetic sensing, ferrofluid, reconfigurable user interface, wave + terrain synthesis, MIDI controller.' + pages: 463--466 + title: 'FerroSynth : A Ferromagnetic Music Interface' + url: http://www.nime.org/proceedings/2010/nime2010_463.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: Lympouridis2012 - abstract: 'Through a series of collaborative research projects usingOrient, a wireless, - inertial sensor-based motion capture system,I have studied the requirements of - musicians, dancers,performers and choreographers and identified various design - strategies for the realization of Whole Body Interactive (WBI)performance systems. - The acquired experience and knowledge led to the design and development of EnActor, - prototypeWhole Body Interaction Design software. The software has been realized - as a collection of modules that were proved valuable for the design of interactive - performance systems that are directly controlled by the body.This paper presents - EnActor''s layout as a blueprint for the design and development of more sophisticated - descendants.Complete video archive of my research projects in WBI performance - systems at: http://www.inter-axions.com' - address: 'Ann Arbor, Michigan' - author: Vangelis Lympouridis - bibtex: "@inproceedings{Lympouridis2012,\n abstract = {Through a series of collaborative\ - \ research projects usingOrient, a wireless, inertial sensor-based motion capture\ - \ system,I have studied the requirements of musicians, dancers,performers and\ - \ choreographers and identified various design strategies for the realization\ - \ of Whole Body Interactive (WBI)performance systems. The acquired experience\ - \ and knowledge led to the design and development of EnActor, prototypeWhole Body\ - \ Interaction Design software. The software has been realized as a collection\ - \ of modules that were proved valuable for the design of interactive performance\ - \ systems that are directly controlled by the body.This paper presents EnActor's\ - \ layout as a blueprint for the design and development of more sophisticated descendants.Complete\ - \ video archive of my research projects in WBI performance systems at: http://www.inter-axions.com},\n\ - \ address = {Ann Arbor, Michigan},\n author = {Vangelis Lympouridis},\n booktitle\ + ID: Dubrau2010 + abstract: 'P[a]ra[pra]xis is an ongoing collaborative project incorporating a two-piece + software package which explores human relations to language through dynamic sound + and text production. Incorporating an exploration of the potential functions and + limitations of the ‘sign’ and the intrusions of the Unconscious into the linguistic + utterance via parapraxes, or ‘Freudian slips’, our software utilises realtime + subject response to automatically- generated changes in a narrative of their own + writing to create music. This paper considers the relative paucity of truly interactive + realtime text and audio works and provides an account of current and future potential + for the simultaneous production of realtime poetry and electronic music through + the P[a]ra[pra]xis software. It also provides the basis for a demonstration session + in which we hope to show users how the program works, discuss possibilities for + different applications of the software, and collect data for future collaborative + work.' + address: 'Sydney, Australia' + author: 'Dubrau, Josh M. and Havryliv, Mark' + bibtex: "@inproceedings{Dubrau2010,\n abstract = {P[a]ra[pra]xis is an ongoing collaborative\ + \ project incorporating a two-piece software package which explores human relations\ + \ to language through dynamic sound and text production. Incorporating an exploration\ + \ of the potential functions and limitations of the ‘sign’ and the intrusions\ + \ of the Unconscious into the linguistic utterance via parapraxes, or ‘Freudian\ + \ slips’, our software utilises realtime subject response to automatically- generated\ + \ changes in a narrative of their own writing to create music. This paper considers\ + \ the relative paucity of truly interactive realtime text and audio works and\ + \ provides an account of current and future potential for the simultaneous production\ + \ of realtime poetry and electronic music through the P[a]ra[pra]xis software.\ + \ It also provides the basis for a demonstration session in which we hope to show\ + \ users how the program works, discuss possibilities for different applications\ + \ of the software, and collect data for future collaborative work.},\n address\ + \ = {Sydney, Australia},\n author = {Dubrau, Josh M. and Havryliv, Mark},\n booktitle\ \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1178333},\n issn = {2220-4806},\n keywords\ - \ = {Whole Body Interaction, Motion Capture, Interactive Performance Systems,\ - \ Interaction Design, Software Prototype},\n publisher = {University of Michigan},\n\ - \ title = {EnActor: A Blueprint for a Whole Body Interaction Design Software Platform},\n\ - \ url = {http://www.nime.org/proceedings/2012/nime2012_169.pdf},\n year = {2012}\n\ - }\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1178333 - issn: 2220-4806 - keywords: 'Whole Body Interaction, Motion Capture, Interactive Performance Systems, - Interaction Design, Software Prototype' - publisher: University of Michigan - title: 'EnActor: A Blueprint for a Whole Body Interaction Design Software Platform' - url: http://www.nime.org/proceedings/2012/nime2012_169.pdf - year: 2012 - - -- ENTRYTYPE: inproceedings - ID: Kim2012 - abstract: 'In this paper we introduce an interactive mobile music performance system - using the digital compass of mobile phones. Compass-based interface can detect - the aiming orientation of performers on stage, allowing us to obtain information - on interactions between performers and use it for both musical mappings and visualizations - on screen for the audience. We document and discuss the result of a compass-based - mobile music performance, Where Are You Standing, and present an algorithm for - a new app to track down the performers'' positions in real-time.' - address: 'Ann Arbor, Michigan' - author: Bongjun Kim and Woon Seung Yeo - bibtex: "@inproceedings{Kim2012,\n abstract = {In this paper we introduce an interactive\ - \ mobile music performance system using the digital compass of mobile phones.\ - \ Compass-based interface can detect the aiming orientation of performers on stage,\ - \ allowing us to obtain information on interactions between performers and use\ - \ it for both musical mappings and visualizations on screen for the audience.\ - \ We document and discuss the result of a compass-based mobile music performance,\ - \ Where Are You Standing, and present an algorithm for a new app to track down\ - \ the performers' positions in real-time.},\n address = {Ann Arbor, Michigan},\n\ - \ author = {Bongjun Kim and Woon Seung Yeo},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1178303},\n issn = {2220-4806},\n keywords = {Mobile music,\ - \ mobile phone, smartphone, compass, magnetometer, aiming gesture, musical mapping,\ - \ musical sonification},\n publisher = {University of Michigan},\n title = {Interactive\ - \ Mobile Music Performance with Digital Compass},\n url = {http://www.nime.org/proceedings/2012/nime2012_170.pdf},\n\ - \ year = {2012}\n}\n" + \ Expression},\n doi = {10.5281/zenodo.117777},\n issn = {2220-4806},\n keywords\ + \ = {language sonification, new media poetry, realtime, Lacan, semiotics, collaborative\ + \ environment, psychoanalysis, Freud},\n pages = {467--468},\n title = {P[a]ra[pra]xis\ + \ : Towards Genuine Realtime 'Audiopoetry'},\n url = {http://www.nime.org/proceedings/2010/nime2010_467.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178303 + doi: 10.5281/zenodo.117777 issn: 2220-4806 - keywords: 'Mobile music, mobile phone, smartphone, compass, magnetometer, aiming - gesture, musical mapping, musical sonification' - publisher: University of Michigan - title: Interactive Mobile Music Performance with Digital Compass - url: http://www.nime.org/proceedings/2012/nime2012_170.pdf - year: 2012 + keywords: 'language sonification, new media poetry, realtime, Lacan, semiotics, + collaborative environment, psychoanalysis, Freud' + pages: 467--468 + title: 'P[a]ra[pra]xis : Towards Genuine Realtime ''Audiopoetry''' + url: http://www.nime.org/proceedings/2010/nime2010_467.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: Rotondo2012 - abstract: 'In this paper we explore the concept of instruments which are played - by more than one person, and present two case studies. We designed, built and - performed with Feedbørk, a two-player instrument comprising two iPads which form - a video feedback loop, and Barrel, a nine-player instrument made up of eight Gametrak - controllers fastened to a steel industrial barrel. By splitting the control of - these instruments into distinct but interdependent roles, we allow each individual - to easily play a part while retaining a rich complexity of output for the whole - system. We found that the relationships between those roles had a significant - effect on how the players communicated with each other, and on how the performance - was perceived by the audience.' - address: 'Ann Arbor, Michigan' - author: Michael Rotondo and Nick Kruge and Ge Wang - bibtex: "@inproceedings{Rotondo2012,\n abstract = {In this paper we explore the\ - \ concept of instruments which are played by more than one person, and present\ - \ two case studies. We designed, built and performed with Feedb{\\o}rk, a two-player\ - \ instrument comprising two iPads which form a video feedback loop, and Barrel,\ - \ a nine-player instrument made up of eight Gametrak controllers fastened to a\ - \ steel industrial barrel. By splitting the control of these instruments into\ - \ distinct but interdependent roles, we allow each individual to easily play a\ - \ part while retaining a rich complexity of output for the whole system. We found\ - \ that the relationships between those roles had a significant effect on how the\ - \ players communicated with each other, and on how the performance was perceived\ - \ by the audience.},\n address = {Ann Arbor, Michigan},\n author = {Michael Rotondo\ - \ and Nick Kruge and Ge Wang},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1180583},\n\ - \ issn = {2220-4806},\n keywords = {Many person musical instruments, cooperative\ - \ music, asymmetric interfaces, transmodal feedback},\n publisher = {University\ - \ of Michigan},\n title = {Many-Person Instruments for Computer Music Performance},\n\ - \ url = {http://www.nime.org/proceedings/2012/nime2012_171.pdf},\n year = {2012}\n\ - }\n" + ID: Kitani2010 + abstract: 'We propose an online generative algorithm to enhance musical expression + via intelligent improvisation accompaniment.Our framework called the ImprovGenerator, + takes a livestream of percussion patterns and generates an improvisedaccompaniment + track in real-time to stimulate new expressions in the improvisation. We use a + mixture model togenerate an accompaniment pattern, that takes into account both + the hierarchical temporal structure of the liveinput patterns and the current + musical context of the performance. The hierarchical structure is represented + as astochastic context-free grammar, which is used to generateaccompaniment patterns + based on the history of temporalpatterns. We use a transition probability model + to augmentthe grammar generated pattern to take into account thecurrent context + of the performance. In our experiments weshow how basic beat patterns performed + by a percussioniston a cajon can be used to automatically generate on-the-flyimprovisation + accompaniment for live performance.' + address: 'Sydney, Australia' + author: 'Kitani, Kris M. and Koike, Hideki' + bibtex: "@inproceedings{Kitani2010,\n abstract = {We propose an online generative\ + \ algorithm to enhance musical expression via intelligent improvisation accompaniment.Our\ + \ framework called the ImprovGenerator, takes a livestream of percussion patterns\ + \ and generates an improvisedaccompaniment track in real-time to stimulate new\ + \ expressions in the improvisation. We use a mixture model togenerate an accompaniment\ + \ pattern, that takes into account both the hierarchical temporal structure of\ + \ the liveinput patterns and the current musical context of the performance. The\ + \ hierarchical structure is represented as astochastic context-free grammar, which\ + \ is used to generateaccompaniment patterns based on the history of temporalpatterns.\ + \ We use a transition probability model to augmentthe grammar generated pattern\ + \ to take into account thecurrent context of the performance. In our experiments\ + \ weshow how basic beat patterns performed by a percussioniston a cajon can be\ + \ used to automatically generate on-the-flyimprovisation accompaniment for live\ + \ performance.},\n address = {Sydney, Australia},\n author = {Kitani, Kris M.\ + \ and Koike, Hideki},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177827},\n\ + \ issn = {2220-4806},\n keywords = {Machine Improvisation, Grammatical Induction,\ + \ Stochastic Context-Free Grammars, Algorithmic Composition},\n pages = {469--472},\n\ + \ title = {ImprovGenerator : Online Grammatical Induction for On-the-Fly Improvisation\ + \ Accompaniment},\n url = {http://www.nime.org/proceedings/2010/nime2010_469.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1180583 + doi: 10.5281/zenodo.1177827 issn: 2220-4806 - keywords: 'Many person musical instruments, cooperative music, asymmetric interfaces, - transmodal feedback' - publisher: University of Michigan - title: Many-Person Instruments for Computer Music Performance - url: http://www.nime.org/proceedings/2012/nime2012_171.pdf - year: 2012 + keywords: 'Machine Improvisation, Grammatical Induction, Stochastic Context-Free + Grammars, Algorithmic Composition' + pages: 469--472 + title: 'ImprovGenerator : Online Grammatical Induction for On-the-Fly Improvisation + Accompaniment' + url: http://www.nime.org/proceedings/2010/nime2010_469.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: Barbosa2012 - abstract: 'The authors propose the development of a more complete Digital Music - Instrument (DMI) evaluation methodology, which provides structured tools for the - incremental development of prototypes based on user feedback. This paper emphasizes - an important but often ignored stakeholder present in the context of musical performance: - the audience. We demonstrate the practical application of an audience focused - methodology through a case study (`Illusio''), discuss the obtained results and - possible improvements for future works.' - address: 'Ann Arbor, Michigan' - author: Jerônimo Barbosa and Filipe Calegario and Verônica Teichrieb and Geber Ramalho - and Patrick McGlynn - bibtex: "@inproceedings{Barbosa2012,\n abstract = {The authors propose the development\ - \ of a more complete Digital Music Instrument (DMI) evaluation methodology, which\ - \ provides structured tools for the incremental development of prototypes based\ - \ on user feedback. This paper emphasizes an important but often ignored stakeholder\ - \ present in the context of musical performance: the audience. We demonstrate\ - \ the practical application of an audience focused methodology through a case\ - \ study (`Illusio'), discuss the obtained results and possible improvements for\ - \ future works.},\n address = {Ann Arbor, Michigan},\n author = {Jer{\\^o}nimo\ - \ Barbosa and Filipe Calegario and Ver{\\^o}nica Teichrieb and Geber Ramalho and\ - \ Patrick McGlynn},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178209},\n\ - \ issn = {2220-4806},\n keywords = {Empirical methods, quantitative, usability\ - \ testing and evaluation, digital musical instruments, evaluation methodology,\ - \ Illusio},\n publisher = {University of Michigan},\n title = {Considering Audience's\ - \ View Towards an Evaluation Methodology for Digital Musical Instruments},\n url\ - \ = {http://www.nime.org/proceedings/2012/nime2012_174.pdf},\n year = {2012}\n\ + ID: Frisson2010 + abstract: 'This paper presents the development of rapid and reusablegestural interface + prototypes for navigation by similarity inan audio database and for sound manipulation, + using theAudioCycle application. For this purpose, we propose andfollow guidelines + for rapid prototyping that we apply usingthe PureData visual programming environment. + We havemainly developed three prototypes of manual control: onecombining a 3D + mouse and a jog wheel, a second featuring a force-feedback 3D mouse, and a third + taking advantage of the multitouch trackpad. We discuss benefits andshortcomings + we experienced while prototyping using thisapproach.' + address: 'Sydney, Australia' + author: 'Frisson, Christian and Macq, Benoît and Dupont, Stéphane and Siebert, Xavier + and Tardieu, Damien and Dutoit, Thierry' + bibtex: "@inproceedings{Frisson2010,\n abstract = {This paper presents the development\ + \ of rapid and reusablegestural interface prototypes for navigation by similarity\ + \ inan audio database and for sound manipulation, using theAudioCycle application.\ + \ For this purpose, we propose andfollow guidelines for rapid prototyping that\ + \ we apply usingthe PureData visual programming environment. We havemainly developed\ + \ three prototypes of manual control: onecombining a 3D mouse and a jog wheel,\ + \ a second featuring a force-feedback 3D mouse, and a third taking advantage of\ + \ the multitouch trackpad. We discuss benefits andshortcomings we experienced\ + \ while prototyping using thisapproach.},\n address = {Sydney, Australia},\n author\ + \ = {Frisson, Christian and Macq, Beno{\\^i}t and Dupont, St\\'{e}phane and Siebert,\ + \ Xavier and Tardieu, Damien and Dutoit, Thierry},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1177771},\n issn = {2220-4806},\n keywords = {Human-computer\ + \ interaction, gestural interfaces, rapid prototyping, browsing by similarity,\ + \ audio database},\n pages = {473--476},\n title = {DeviceCycle : Rapid and Reusable\ + \ Prototyping of Gestural Interfaces, Applied to Audio Browsing by Similarity},\n\ + \ url = {http://www.nime.org/proceedings/2010/nime2010_473.pdf},\n year = {2010}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178209 + doi: 10.5281/zenodo.1177771 issn: 2220-4806 - keywords: 'Empirical methods, quantitative, usability testing and evaluation, digital - musical instruments, evaluation methodology, Illusio' - publisher: University of Michigan - title: Considering Audience's View Towards an Evaluation Methodology for Digital - Musical Instruments - url: http://www.nime.org/proceedings/2012/nime2012_174.pdf - year: 2012 + keywords: 'Human-computer interaction, gestural interfaces, rapid prototyping, browsing + by similarity, audio database' + pages: 473--476 + title: 'DeviceCycle : Rapid and Reusable Prototyping of Gestural Interfaces, Applied + to Audio Browsing by Similarity' + url: http://www.nime.org/proceedings/2010/nime2010_473.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: Beilharz2012 - abstract: 'In site-specific installation or situated media, a significant part of - the "I" in NIME is the environment, the site and the implicit features of site - such as humans, weather, materials, natural acoustics, etc. These could be viewed - as design constraints, or features, even agency determining the outcome of responsive - sound installation works. This paper discusses the notion of interface in public - (especially outdoor) installation, starting with the authors'' Sculpture by the - Sea Windtraces work using this recent experience as the launch-pad, with reference - to ways in which others have approached it (focusing on sensor, weather-activated - outdoor installations in a brief traverse of related cases, e.g. works by Garth - Paine, James Bulley and Daniel Jones, and David Bowen). This is a dialogical paper - on the topic of interface and `site'' as the aetiology of interaction/interface/instrument - and its type of response (e.g. to environment and audience). While the focus here - is on outdoor factors (particularly the climatic environment), indoor site-specific - installation also experiences the effects of ambient noise, acoustic context, - and audience as integral agents in the interface and perception of the work, its - musical expression. The way in which features of the situation are integrated - has relevance for others in the NIME community in the design of responsive spaces, - art installation, and large-scale or installed instruments in which users, participants, - acoustics play a significant role.' - address: 'Ann Arbor, Michigan' - author: Kirsty Beilharz and Aengus Martin - bibtex: "@inproceedings{Beilharz2012,\n abstract = {In site-specific installation\ - \ or situated media, a significant part of the \"I\" in NIME is the environment,\ - \ the site and the implicit features of site such as humans, weather, materials,\ - \ natural acoustics, etc. These could be viewed as design constraints, or features,\ - \ even agency determining the outcome of responsive sound installation works.\ - \ This paper discusses the notion of interface in public (especially outdoor)\ - \ installation, starting with the authors' Sculpture by the Sea Windtraces work\ - \ using this recent experience as the launch-pad, with reference to ways in which\ - \ others have approached it (focusing on sensor, weather-activated outdoor installations\ - \ in a brief traverse of related cases, e.g. works by Garth Paine, James Bulley\ - \ and Daniel Jones, and David Bowen). This is a dialogical paper on the topic\ - \ of interface and `site' as the aetiology of interaction/interface/instrument\ - \ and its type of response (e.g. to environment and audience). While the focus\ - \ here is on outdoor factors (particularly the climatic environment), indoor site-specific\ - \ installation also experiences the effects of ambient noise, acoustic context,\ - \ and audience as integral agents in the interface and perception of the work,\ - \ its musical expression. The way in which features of the situation are integrated\ - \ has relevance for others in the NIME community in the design of responsive spaces,\ - \ art installation, and large-scale or installed instruments in which users, participants,\ - \ acoustics play a significant role.},\n address = {Ann Arbor, Michigan},\n author\ - \ = {Kirsty Beilharz and Aengus Martin},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178211},\n\ - \ issn = {2220-4806},\n keywords = {NIME, site-specific installation, outdoor\ - \ sound installation},\n publisher = {University of Michigan},\n title = {The\ - \ `Interface' in Site-Specific Sound Installation},\n url = {http://www.nime.org/proceedings/2012/nime2012_175.pdf},\n\ - \ year = {2012}\n}\n" + ID: Muller2010 + abstract: 'In this paper we present a novel system for tactile actuation in stylus-based + musical interactions. The proposed controller aims to support rhythmical musical + performance. The system builds on resistive force feedback, which is achieved + through a brakeaugmented ball pen stylus on a sticky touch-sensitive surface. + Along the device itself, we present musical interaction principles that are enabled + through the aforementioned tactile response. Further variations of the device + and perspectives of the friction-based feedback are outlined. ' + address: 'Sydney, Australia' + author: 'Müller, Alexander and Hemmert, Fabian and Wintergerst, Götz and Jagodzinski, + Ron' + bibtex: "@inproceedings{Muller2010,\n abstract = {In this paper we present a novel\ + \ system for tactile actuation in stylus-based musical interactions. The proposed\ + \ controller aims to support rhythmical musical performance. The system builds\ + \ on resistive force feedback, which is achieved through a brakeaugmented ball\ + \ pen stylus on a sticky touch-sensitive surface. Along the device itself, we\ + \ present musical interaction principles that are enabled through the aforementioned\ + \ tactile response. Further variations of the device and perspectives of the friction-based\ + \ feedback are outlined. },\n address = {Sydney, Australia},\n author = {M\\\"\ + {u}ller, Alexander and Hemmert, Fabian and Wintergerst, G\\\"{o}tz and Jagodzinski,\ + \ Ron},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1177835},\n issn = {2220-4806},\n\ + \ keywords = {force feedback, haptic feedback, interactive, pen controller},\n\ + \ pages = {477--478},\n title = {Reflective Haptics : Resistive Force Feedback\ + \ for Musical Performances with Stylus-Controlled Instruments},\n url = {http://www.nime.org/proceedings/2010/nime2010_477.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178211 + doi: 10.5281/zenodo.1177835 issn: 2220-4806 - keywords: 'NIME, site-specific installation, outdoor sound installation' - publisher: University of Michigan - title: The `Interface' in Site-Specific Sound Installation - url: http://www.nime.org/proceedings/2012/nime2012_175.pdf - year: 2012 + keywords: 'force feedback, haptic feedback, interactive, pen controller' + pages: 477--478 + title: 'Reflective Haptics : Resistive Force Feedback for Musical Performances with + Stylus-Controlled Instruments' + url: http://www.nime.org/proceedings/2010/nime2010_477.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: Muller2012 - abstract: 'This paper discusses the utilization of human skin as a tangible interface - for musical expression and collaborative performance. We present an overview of - existing different instrument designs that include the skin as the main input. - As a further development of a previous exploration [16] we outline the setup and - interaction methods of `Skintimacy'', an instrument that appropriates the skin - for low voltage power transmission in multi-player interaction. Observations deriving - from proof-of-concept exploration and performances using the instrument are brought - into the reflection and discussion concerning the capabilities and limitations - of skin as an input surface.' - address: 'Ann Arbor, Michigan' - author: Alexander Müller-Rakow and Jochen Fuchs - bibtex: "@inproceedings{Muller2012,\n abstract = {This paper discusses the utilization\ - \ of human skin as a tangible interface for musical expression and collaborative\ - \ performance. We present an overview of existing different instrument designs\ - \ that include the skin as the main input. As a further development of a previous\ - \ exploration [16] we outline the setup and interaction methods of `Skintimacy',\ - \ an instrument that appropriates the skin for low voltage power transmission\ - \ in multi-player interaction. Observations deriving from proof-of-concept exploration\ - \ and performances using the instrument are brought into the reflection and discussion\ - \ concerning the capabilities and limitations of skin as an input surface.},\n\ - \ address = {Ann Arbor, Michigan},\n author = {Alexander M{\\''u}ller-Rakow and\ - \ Jochen Fuchs},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178335},\n\ - \ issn = {2220-4806},\n keywords = {Skin-based instruments, skin conductivity,\ - \ collaborative interfaces, embodiment, intimacy, multi-player performance},\n\ - \ publisher = {University of Michigan},\n title = {The Human Skin as an Interface\ - \ for Musical Expression},\n url = {http://www.nime.org/proceedings/2012/nime2012_177.pdf},\n\ - \ year = {2012}\n}\n" + ID: Mattek2010 + abstract: 'The American experimental tradition in music emphasizes a process-oriented + – rather than goal-oriented – composition style. According to this tradition, + the composition process is considered an experiment beginning with a problem resolved + by the composer. The noted experimental composer John Cage believed that the artist’s + role in composition should be one of coexistence, as opposed to the traditional + view of directly controlling the process. Consequently, Cage devel- oped methods + of composing that upheld this philosophy by utilizing musical charts and the I + Ching, also known as the Chinese Book of Changes. This project investigates these + methods and models them via an interactive computer system to explore the use + of modern interfaces in experimental composition.' + address: 'Sydney, Australia' + author: 'Mattek, Alison and Freeman, Mark and Humphrey, Eric' + bibtex: "@inproceedings{Mattek2010,\n abstract = {The American experimental tradition\ + \ in music emphasizes a process-oriented – rather than goal-oriented – composition\ + \ style. According to this tradition, the composition process is considered an\ + \ experiment beginning with a problem resolved by the composer. The noted experimental\ + \ composer John Cage believed that the artist’s role in composition should be\ + \ one of coexistence, as opposed to the traditional view of directly controlling\ + \ the process. Consequently, Cage devel- oped methods of composing that upheld\ + \ this philosophy by utilizing musical charts and the I Ching, also known as the\ + \ Chinese Book of Changes. This project investigates these methods and models\ + \ them via an interactive computer system to explore the use of modern interfaces\ + \ in experimental composition.},\n address = {Sydney, Australia},\n author = {Mattek,\ + \ Alison and Freeman, Mark and Humphrey, Eric},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1177847},\n issn = {2220-4806},\n keywords = {Multi-touch\ + \ Interfaces, Computer-Assisted Composition},\n pages = {479--480},\n title =\ + \ {Revisiting Cagean Composition Methodology with a Modern Computational Implementation},\n\ + \ url = {http://www.nime.org/proceedings/2010/nime2010_479.pdf},\n year = {2010}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178335 + doi: 10.5281/zenodo.1177847 issn: 2220-4806 - keywords: 'Skin-based instruments, skin conductivity, collaborative interfaces, - embodiment, intimacy, multi-player performance' - publisher: University of Michigan - title: The Human Skin as an Interface for Musical Expression - url: http://www.nime.org/proceedings/2012/nime2012_177.pdf - year: 2012 + keywords: 'Multi-touch Interfaces, Computer-Assisted Composition' + pages: 479--480 + title: Revisiting Cagean Composition Methodology with a Modern Computational Implementation + url: http://www.nime.org/proceedings/2010/nime2010_479.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: Lee2012 - abstract: 'Empatheater is a video playing system that is controlled by multimodal - interaction. As the video is played, the user must interact and emulate predefined - ``events'''' for the video to continue on. The user is given the illusion of playing - an active role in the unraveling video content and can empathize with the performer. - In this paper, we report about user experiences with Empatheater when applied - to musical video contents.' - address: 'Ann Arbor, Michigan' - author: Myunghee Lee and Youngsun Kim and Gerard Kim - bibtex: "@inproceedings{Lee2012,\n abstract = {Empatheater is a video playing system\ - \ that is controlled by multimodal interaction. As the video is played, the user\ - \ must interact and emulate predefined ``events'' for the video to continue on.\ - \ The user is given the illusion of playing an active role in the unraveling video\ - \ content and can empathize with the performer. In this paper, we report about\ - \ user experiences with Empatheater when applied to musical video contents.},\n\ - \ address = {Ann Arbor, Michigan},\n author = {Myunghee Lee and Youngsun Kim and\ - \ Gerard Kim},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178313},\n issn\ - \ = {2220-4806},\n keywords = {Music video, Empathy, Interactive video, Musical\ - \ event, Multimodal interaction.},\n publisher = {University of Michigan},\n title\ - \ = {Empathetic Interactive Music Video Experience},\n url = {http://www.nime.org/proceedings/2012/nime2012_179.pdf},\n\ - \ year = {2012}\n}\n" + ID: Ferguson2010 + abstract: 'In this paper, we describe a comparison between parameters drawn from + 3-dimensional measurement of a dance performance, and continuous emotional response + data recorded from an audience present during this performance. A continuous time + series representing the mean movement as the dance unfolds is extracted from the + 3-dimensional data. The audiences'' continuous emotional response data are also + represented as a time series, and the series are compared. We concluded that movement + in the dance performance directly influences the emotional arousal response of + the audience. ' + address: 'Sydney, Australia' + author: 'Ferguson, Sam and Schubert, Emery and Stevens, Catherine' + bibtex: "@inproceedings{Ferguson2010,\n abstract = {In this paper, we describe a\ + \ comparison between parameters drawn from 3-dimensional measurement of a dance\ + \ performance, and continuous emotional response data recorded from an audience\ + \ present during this performance. A continuous time series representing the mean\ + \ movement as the dance unfolds is extracted from the 3-dimensional data. The\ + \ audiences' continuous emotional response data are also represented as a time\ + \ series, and the series are compared. We concluded that movement in the dance\ + \ performance directly influences the emotional arousal response of the audience.\ + \ },\n address = {Sydney, Australia},\n author = {Ferguson, Sam and Schubert,\ + \ Emery and Stevens, Catherine},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177767},\n\ + \ issn = {2220-4806},\n keywords = {Dance, Emotion, Motion Capture, Continuous\ + \ Response.},\n pages = {481--484},\n title = {Movement in a Contemporary Dance\ + \ Work and its Relation to Continuous Emotional Response},\n url = {http://www.nime.org/proceedings/2010/nime2010_481.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178313 + doi: 10.5281/zenodo.1177767 issn: 2220-4806 - keywords: 'Music video, Empathy, Interactive video, Musical event, Multimodal interaction.' - publisher: University of Michigan - title: Empathetic Interactive Music Video Experience - url: http://www.nime.org/proceedings/2012/nime2012_179.pdf - year: 2012 + keywords: 'Dance, Emotion, Motion Capture, Continuous Response.' + pages: 481--484 + title: Movement in a Contemporary Dance Work and its Relation to Continuous Emotional + Response + url: http://www.nime.org/proceedings/2010/nime2010_481.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: Clay2012 - abstract: 'The augmented ballet project aims at gathering research from several - fields and directing them towards a same application case: adding virtual elements - (visual and acoustic) to a dance live performance, and allowing the dancer to - interact with them. In this paper, we describe a novel interaction that we used - in the frame of this project: using the dancer''s movements to recognize the emotions - he expresses, and use these emotions to generate musical audio flows evolving - in real-time. The originality of this interaction is threefold. First, it covers - the whole interaction cycle from the input (the dancer''s movements) to the output - (the generated music). Second, this interaction isn''t direct but goes through - a high level of abstraction: dancer''s emotional expression is recognized and - is the source of music generation. Third, this interaction has been designed and - validated through constant collaboration with a choreographer, culminating in - an augmented ballet performance in front of a live audience.' - address: 'Ann Arbor, Michigan' - author: Alexis Clay and Nadine Couture and Myriam Desainte-Catherine and Pierre-Henri - Vulliard and Joseph Larralde and Elodie Decarsin - bibtex: "@inproceedings{Clay2012,\n abstract = {The augmented ballet project aims\ - \ at gathering research from several fields and directing them towards a same\ - \ application case: adding virtual elements (visual and acoustic) to a dance live\ - \ performance, and allowing the dancer to interact with them. In this paper, we\ - \ describe a novel interaction that we used in the frame of this project: using\ - \ the dancer's movements to recognize the emotions he expresses, and use these\ - \ emotions to generate musical audio flows evolving in real-time. The originality\ - \ of this interaction is threefold. First, it covers the whole interaction cycle\ - \ from the input (the dancer's movements) to the output (the generated music).\ - \ Second, this interaction isn't direct but goes through a high level of abstraction:\ - \ dancer's emotional expression is recognized and is the source of music generation.\ - \ Third, this interaction has been designed and validated through constant collaboration\ - \ with a choreographer, culminating in an augmented ballet performance in front\ - \ of a live audience.},\n address = {Ann Arbor, Michigan},\n author = {Alexis\ - \ Clay and Nadine Couture and Myriam Desainte-Catherine and Pierre-Henri Vulliard\ - \ and Joseph Larralde and Elodie Decarsin},\n booktitle = {Proceedings of the\ + ID: Ahmaniemi2010 + abstract: 'This paper investigates whether a dynamic vibrotactile feedback improves + the playability of a gesture controlled virtual instrument. The instrument described + in this study is based on a virtual control surface that player strikes with a + hand held sensor-actuator device. We designed two tactile cues to augment the + stroke across the control surface: a static and dynamic cue. The static cue was + a simple burst of vibration triggered when crossing the control surface. The dynamic + cue was continuous vibration increasing in amplitude when approaching the surface. + We arranged an experiment to study the influence of the tactile cues in performance. + In a tempo follow task, the dynamic cue yielded significantly the best temporal + and periodic accuracy and control of movement velocity and amplitude. The static + cue did not significantly improve the rhythmic accuracy but assisted the control + of movement velocity compared to the condition without tactile feedback at all. + The findings of the study indicate that careful design of dynamic vibrotactile + feedback can improve the controllability of gesture based virtual instrument. ' + address: 'Sydney, Australia' + author: 'Ahmaniemi, Teemu' + bibtex: "@inproceedings{Ahmaniemi2010,\n abstract = {This paper investigates whether\ + \ a dynamic vibrotactile feedback improves the playability of a gesture controlled\ + \ virtual instrument. The instrument described in this study is based on a virtual\ + \ control surface that player strikes with a hand held sensor-actuator device.\ + \ We designed two tactile cues to augment the stroke across the control surface:\ + \ a static and dynamic cue. The static cue was a simple burst of vibration triggered\ + \ when crossing the control surface. The dynamic cue was continuous vibration\ + \ increasing in amplitude when approaching the surface. We arranged an experiment\ + \ to study the influence of the tactile cues in performance. In a tempo follow\ + \ task, the dynamic cue yielded significantly the best temporal and periodic accuracy\ + \ and control of movement velocity and amplitude. The static cue did not significantly\ + \ improve the rhythmic accuracy but assisted the control of movement velocity\ + \ compared to the condition without tactile feedback at all. The findings of the\ + \ study indicate that careful design of dynamic vibrotactile feedback can improve\ + \ the controllability of gesture based virtual instrument. },\n address = {Sydney,\ + \ Australia},\n author = {Ahmaniemi, Teemu},\n booktitle = {Proceedings of the\ \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1178237},\n issn = {2220-4806},\n keywords = {Interactive sonification,\ - \ motion, gesture and music, interaction, live performance, musical human-computer\ - \ interaction},\n publisher = {University of Michigan},\n title = {Movement to\ - \ emotions to music: using whole body emotional expression as an interaction for\ - \ electronic music generation},\n url = {http://www.nime.org/proceedings/2012/nime2012_180.pdf},\n\ - \ year = {2012}\n}\n" + \ {10.5281/zenodo.1177711},\n issn = {2220-4806},\n keywords = {Virtual instrument,\ + \ Gesture, Tactile feedback, Motor control},\n pages = {485--488},\n title = {Gesture\ + \ Controlled Virtual Instrument with Dynamic Vibrotactile Feedback},\n url = {http://www.nime.org/proceedings/2010/nime2010_485.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178237 + doi: 10.5281/zenodo.1177711 issn: 2220-4806 - keywords: 'Interactive sonification, motion, gesture and music, interaction, live - performance, musical human-computer interaction' - publisher: University of Michigan - title: 'Movement to emotions to music: using whole body emotional expression as - an interaction for electronic music generation' - url: http://www.nime.org/proceedings/2012/nime2012_180.pdf - year: 2012 + keywords: 'Virtual instrument, Gesture, Tactile feedback, Motor control' + pages: 485--488 + title: Gesture Controlled Virtual Instrument with Dynamic Vibrotactile Feedback + url: http://www.nime.org/proceedings/2010/nime2010_485.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: Trappe2012 - abstract: 'In this paper we present our project to make sound synthesis and music - controller construction accessible to children in a technology design workshop. - We present the work we have carried out to develop a graphical user interface, - and give account of the workshop we conducted in collaboration with a local primary - school. Our results indicate that the production of audio events by means of digital - synthesis and algorithmic composition provides a rich and interesting field to - be discovered for pedagogical workshops taking a Constructionist approach.' - address: 'Ann Arbor, Michigan' - author: Christoph Trappe - bibtex: "@inproceedings{Trappe2012,\n abstract = {In this paper we present our project\ - \ to make sound synthesis and music controller construction accessible to children\ - \ in a technology design workshop. We present the work we have carried out to\ - \ develop a graphical user interface, and give account of the workshop we conducted\ - \ in collaboration with a local primary school. Our results indicate that the\ - \ production of audio events by means of digital synthesis and algorithmic composition\ - \ provides a rich and interesting field to be discovered for pedagogical workshops\ - \ taking a Constructionist approach.},\n address = {Ann Arbor, Michigan},\n author\ - \ = {Christoph Trappe},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178439},\n\ - \ issn = {2220-4806},\n keywords = {Child Computer Interaction, Constructionism,\ - \ Sound and Music Computing, Human-Computer Interface Design, Mu-sic Composition\ - \ and Generation, Interactive Audio Sys-tems, Technology Design Activities.},\n\ - \ publisher = {University of Michigan},\n title = {Making Sound Synthesis Accessible\ - \ for Children},\n url = {http://www.nime.org/proceedings/2012/nime2012_181.pdf},\n\ - \ year = {2012}\n}\n" + ID: Hass2010 + abstract: 'In his demonstration, the author discusses the sequential progress of + his technical and aesthetic decisions as composer and videographer for four large-scale + works for dance through annotated video examples of live performances and PowerPoint + slides. In addition, he discusses his current real-time dance work with wireless + sensor interfaces using sewable LilyPad Arduino modules and Xbee radio hardware.' + address: 'Sydney, Australia' + author: 'Hass, Jeffrey' + bibtex: "@inproceedings{Hass2010,\n abstract = {In his demonstration, the author\ + \ discusses the sequential progress of his technical and aesthetic decisions as\ + \ composer and videographer for four large-scale works for dance through annotated\ + \ video examples of live performances and PowerPoint slides. In addition, he discusses\ + \ his current real-time dance work with wireless sensor interfaces using sewable\ + \ LilyPad Arduino modules and Xbee radio hardware.},\n address = {Sydney, Australia},\n\ + \ author = {Hass, Jeffrey},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177793},\n\ + \ issn = {2220-4806},\n keywords = {dance, video processing, video tracking, LilyPad\ + \ Arduino.},\n pages = {489--492},\n title = {Creating Integrated Music and Video\ + \ for Dance : Lessons Learned and Lessons Ignored},\n url = {http://www.nime.org/proceedings/2010/nime2010_489.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178439 + doi: 10.5281/zenodo.1177793 issn: 2220-4806 - keywords: 'Child Computer Interaction, Constructionism, Sound and Music Computing, - Human-Computer Interface Design, Mu-sic Composition and Generation, Interactive - Audio Sys-tems, Technology Design Activities.' - publisher: University of Michigan - title: Making Sound Synthesis Accessible for Children - url: http://www.nime.org/proceedings/2012/nime2012_181.pdf - year: 2012 + keywords: 'dance, video processing, video tracking, LilyPad Arduino.' + pages: 489--492 + title: 'Creating Integrated Music and Video for Dance : Lessons Learned and Lessons + Ignored' + url: http://www.nime.org/proceedings/2010/nime2010_489.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: Skogstad2012 - abstract: 'In this paper we present the Dance Jockey System, a system developed - for using a full body inertial motion capture suit (Xsens MVN) in music/dance - performances. We present different strategies for extracting relevant postures - and actions from the continuous data, and how these postures and actions can be - used to control sonic and musical features. The system has been used in several - public performances, and we believe it has great potential for further exploration. - However, to overcome the current practical and technical challenges when working - with the system, it is important to further refine tools and software in order - to facilitate making of new performance pieces.' - address: 'Ann Arbor, Michigan' - author: 'Skogstad, Ståle A. and Kristian Nymoen and de Quay, Yago and Jensenius, - Alexander Refsum' - bibtex: "@inproceedings{Skogstad2012,\n abstract = {In this paper we present the\ - \ Dance Jockey System, a system developed for using a full body inertial motion\ - \ capture suit (Xsens MVN) in music/dance performances. We present different strategies\ - \ for extracting relevant postures and actions from the continuous data, and how\ - \ these postures and actions can be used to control sonic and musical features.\ - \ The system has been used in several public performances, and we believe it has\ - \ great potential for further exploration. However, to overcome the current practical\ - \ and technical challenges when working with the system, it is important to further\ - \ refine tools and software in order to facilitate making of new performance pieces.},\n\ - \ address = {Ann Arbor, Michigan},\n author = {Skogstad, St{\\aa}le A. and Kristian\ - \ Nymoen and de Quay, Yago and Jensenius, Alexander Refsum},\n booktitle = {Proceedings\ + ID: Burt2010 + abstract: 'This paper describes a series of mathematical functions implemented by + the author in the commercial algorithmic software language ArtWonk, written by + John Dunn, which are offered with that language as resources for composers. It + gives a history of the development of the functions, with an emphasis on how I + developed them for use in my compositions.' + address: 'Sydney, Australia' + author: 'Burt, Warren' + bibtex: "@inproceedings{Burt2010,\n abstract = {This paper describes a series of\ + \ mathematical functions implemented by the author in the commercial algorithmic\ + \ software language ArtWonk, written by John Dunn, which are offered with that\ + \ language as resources for composers. It gives a history of the development of\ + \ the functions, with an emphasis on how I developed them for use in my compositions.},\n\ + \ address = {Sydney, Australia},\n author = {Burt, Warren},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1180601},\n issn = {2220-4806},\n publisher = {University\ - \ of Michigan},\n title = {Developing the Dance Jockey System for Musical Interaction\ - \ with the Xsens {MV}N Suit},\n url = {http://www.nime.org/proceedings/2012/nime2012_182.pdf},\n\ - \ year = {2012}\n}\n" + \ doi = {10.5281/zenodo.1177733},\n issn = {2220-4806},\n keywords = {Algorithmic\ + \ composition, mathematical composition, probability distributions, fractals,\ + \ additive sequences},\n pages = {493--496},\n title = {Packages for ArtWonk :\ + \ New Mathematical Tools for Composers},\n url = {http://www.nime.org/proceedings/2010/nime2010_493.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1180601 + doi: 10.5281/zenodo.1177733 issn: 2220-4806 - publisher: University of Michigan - title: Developing the Dance Jockey System for Musical Interaction with the Xsens - MVN Suit - url: http://www.nime.org/proceedings/2012/nime2012_182.pdf - year: 2012 + keywords: 'Algorithmic composition, mathematical composition, probability distributions, + fractals, additive sequences' + pages: 493--496 + title: 'Packages for ArtWonk : New Mathematical Tools for Composers' + url: http://www.nime.org/proceedings/2010/nime2010_493.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: Senturk2012 - abstract: 'Meaning crossword of sound, Crossole is a musical meta-instrument where - the music is visualized as a set of virtual blocks that resemble a crossword puzzle. - In Crossole, the chord progressions are visually presented as a set of virtual - blocks. With the aid of the Kinect sensing technology, a performer controls music - by manipulating the crossword blocks using hand movements. The performer can build - chords in the high level, traverse over the blocks, step into the low level to - control the chord arpeggiations note by note, loop a chord progression or map - gestures to various processing algorithms to enhance the timbral scenery.' - address: 'Ann Arbor, Michigan' - author: Sertan Şentürk and Sang Won Lee and Avinash Sastry and Anosh Daruwalla and - Gil Weinberg - bibtex: "@inproceedings{Senturk2012,\n abstract = {Meaning crossword of sound, Crossole\ - \ is a musical meta-instrument where the music is visualized as a set of virtual\ - \ blocks that resemble a crossword puzzle. In Crossole, the chord progressions\ - \ are visually presented as a set of virtual blocks. With the aid of the Kinect\ - \ sensing technology, a performer controls music by manipulating the crossword\ - \ blocks using hand movements. The performer can build chords in the high level,\ - \ traverse over the blocks, step into the low level to control the chord arpeggiations\ - \ note by note, loop a chord progression or map gestures to various processing\ - \ algorithms to enhance the timbral scenery.},\n address = {Ann Arbor, Michigan},\n\ - \ author = {Sertan \\c{S}ent\\\"{u}rk and Sang Won Lee and Avinash Sastry and\ - \ Anosh Daruwalla and Gil Weinberg},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178201},\n\ - \ issn = {2220-4806},\n keywords = {Kinect, meta-instrument, chord progression,\ - \ body gesture},\n publisher = {University of Michigan},\n title = {Crossole:\ - \ A Gestural Interface for Composition, Improvisation and Performance using Kinect},\n\ - \ url = {http://www.nime.org/proceedings/2012/nime2012_185.pdf},\n year = {2012}\n\ - }\n" + ID: Miller2010 + abstract: 'The console gaming industry is experiencing a revolution in terms of + user control, and a large part to Nintendo''s introduction of the Wii remote. + The online open source development community has embraced the Wii remote, integrating + the inexpensive technology into numerous applications. Some of the more interesting + applications demonstrate how the remote hardware can be leveraged for nonstandard + uses. In this paper we describe a new way of interacting with the Wii remote and + sensor bar to produce music. The Wiiolin is a virtual instrument which can mimic + a violin or cello. Sensor bar motion relative to the Wii remote and button presses + are analyzed in real-time to generate notes. Our design is novel in that it involves + the remote''s infrared camera and sensor bar as an integral part of music production, + allowing users to change notes by simply altering the angle of their wrist, and + henceforth, bow. The Wiiolin introduces a more realistic way of instrument interaction + than other attempts that rely on button presses and accelerometer data alone. ' + address: 'Sydney, Australia' + author: 'Miller, Jace and Hammond, Tracy' + bibtex: "@inproceedings{Miller2010,\n abstract = {The console gaming industry is\ + \ experiencing a revolution in terms of user control, and a large part to Nintendo's\ + \ introduction of the Wii remote. The online open source development community\ + \ has embraced the Wii remote, integrating the inexpensive technology into numerous\ + \ applications. Some of the more interesting applications demonstrate how the\ + \ remote hardware can be leveraged for nonstandard uses. In this paper we describe\ + \ a new way of interacting with the Wii remote and sensor bar to produce music.\ + \ The Wiiolin is a virtual instrument which can mimic a violin or cello. Sensor\ + \ bar motion relative to the Wii remote and button presses are analyzed in real-time\ + \ to generate notes. Our design is novel in that it involves the remote's infrared\ + \ camera and sensor bar as an integral part of music production, allowing users\ + \ to change notes by simply altering the angle of their wrist, and henceforth,\ + \ bow. The Wiiolin introduces a more realistic way of instrument interaction than\ + \ other attempts that rely on button presses and accelerometer data alone. },\n\ + \ address = {Sydney, Australia},\n author = {Miller, Jace and Hammond, Tracy},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1177853},\n issn = {2220-4806},\n\ + \ keywords = {Wii remote, virtual instrument, violin, cello, motion recognition,\ + \ human computer interaction, gesture recognition.},\n pages = {497--500},\n title\ + \ = {Wiiolin : a Virtual Instrument Using the Wii Remote},\n url = {http://www.nime.org/proceedings/2010/nime2010_497.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178201 + doi: 10.5281/zenodo.1177853 issn: 2220-4806 - keywords: 'Kinect, meta-instrument, chord progression, body gesture' - publisher: University of Michigan - title: 'Crossole: A Gestural Interface for Composition, Improvisation and Performance - using Kinect' - url: http://www.nime.org/proceedings/2012/nime2012_185.pdf - year: 2012 + keywords: 'Wii remote, virtual instrument, violin, cello, motion recognition, human + computer interaction, gesture recognition.' + pages: 497--500 + title: 'Wiiolin : a Virtual Instrument Using the Wii Remote' + url: http://www.nime.org/proceedings/2010/nime2010_497.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: Snyder2012 - abstract: 'This paper presents the JD-1, a digital controller for analog modular - synthesizers. The JD-1 features a capacitive touch-sensing keyboard that responds - to continuous variations in finger contact, high-accuracy polyphonic control-voltage - outputs, a built-in sequencer, and digital interfaces for connection to MIDI and - OSC devices. Design goals include interoperability with a wide range of synthesizers, - very high-resolution pitch control, and intuitive control of the sequencer from - the keyboard.' - address: 'Ann Arbor, Michigan' - author: Jeff Snyder and Andrew McPherson - bibtex: "@inproceedings{Snyder2012,\n abstract = {This paper presents the JD-1,\ - \ a digital controller for analog modular synthesizers. The JD-1 features a capacitive\ - \ touch-sensing keyboard that responds to continuous variations in finger contact,\ - \ high-accuracy polyphonic control-voltage outputs, a built-in sequencer, and\ - \ digital interfaces for connection to MIDI and OSC devices. Design goals include\ - \ interoperability with a wide range of synthesizers, very high-resolution pitch\ - \ control, and intuitive control of the sequencer from the keyboard.},\n address\ - \ = {Ann Arbor, Michigan},\n author = {Jeff Snyder and Andrew McPherson},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1178421},\n issn = {2220-4806},\n keywords\ - \ = {keyboard, sequencer, analog synthesizer, capacitive touch sensing},\n publisher\ - \ = {University of Michigan},\n title = {The JD-1: an Implementation of a Hybrid\ - \ Keyboard/Sequencer Controller for Analog Synthesizers},\n url = {http://www.nime.org/proceedings/2012/nime2012_187.pdf},\n\ - \ year = {2012}\n}\n" + ID: Meier2010 + abstract: '‘The Planets’ combines a novel approach for algorithmic composition with + new human-computer interaction paradigms and realistic painting techniques. The + main inspiration for it was the composition ‘The Planets’ from Gustav Holst who + portrayed each planet in our solar system with music. Our application allows to + interactively compose music in real-time by arranging planet constellations on + an interactive table. The music generation is controlled by painted miniatures + of the planets and the sun which are detected by the table and supplemented with + an additional graphical visualization, creating a unique audio-visual experience. + A video of the application can be found in [1].' + address: 'Sydney, Australia' + author: 'Meier, Max and Schranner, Max' + bibtex: "@inproceedings{Meier2010,\n abstract = {‘The Planets’ combines a novel\ + \ approach for algorithmic composition with new human-computer interaction paradigms\ + \ and realistic painting techniques. The main inspiration for it was the composition\ + \ ‘The Planets’ from Gustav Holst who portrayed each planet in our solar system\ + \ with music. Our application allows to interactively compose music in real-time\ + \ by arranging planet constellations on an interactive table. The music generation\ + \ is controlled by painted miniatures of the planets and the sun which are detected\ + \ by the table and supplemented with an additional graphical visualization, creating\ + \ a unique audio-visual experience. A video of the application can be found in\ + \ [1].},\n address = {Sydney, Australia},\n author = {Meier, Max and Schranner,\ + \ Max},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1177851},\n issn = {2220-4806},\n\ + \ keywords = {algorithmic composition, soft constraints, tangible interaction},\n\ + \ pages = {501--504},\n title = {The Planets},\n url = {http://www.nime.org/proceedings/2010/nime2010_501.pdf},\n\ + \ year = {2010}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178421 + doi: 10.5281/zenodo.1177851 issn: 2220-4806 - keywords: 'keyboard, sequencer, analog synthesizer, capacitive touch sensing' - publisher: University of Michigan - title: 'The JD-1: an Implementation of a Hybrid Keyboard/Sequencer Controller for - Analog Synthesizers' - url: http://www.nime.org/proceedings/2012/nime2012_187.pdf - year: 2012 + keywords: 'algorithmic composition, soft constraints, tangible interaction' + pages: 501--504 + title: The Planets + url: http://www.nime.org/proceedings/2010/nime2010_501.pdf + year: 2010 - ENTRYTYPE: inproceedings - ID: OSullivan2012 - abstract: 'Development of new musical interfaces often requires experimentation - with the mapping of available controller inputs to output parameters. Useful mappings - for a particular application may be complex in nature, with one or more inputs - being linked to one or more outputs. Existing development environments are commonly - used to program such mappings, while code libraries provide powerful data-stream - manipulation. However, room exists for a standalone application with a simpler - graphical user interface for dynamically patching between inputs and outputs. - This paper presents an early prototype version of a software tool that allows - the user to route control signals in real time, using various messaging formats. - It is cross-platform and runs as a standalone application in desktop and Android - OS versions. The latter allows the users of mobile devices to experiment with - mapping signals to and from physical computing components using the inbuilt multi-touch - screen. Potential uses therefore include real-time mapping during performance - in a more expressive manner than facilitated by existing tools.' - address: 'Ann Arbor, Michigan' - author: Liam O'Sullivan and Dermot Furlong and Frank Boland - bibtex: "@inproceedings{OSullivan2012,\n abstract = {Development of new musical\ - \ interfaces often requires experimentation with the mapping of available controller\ - \ inputs to output parameters. Useful mappings for a particular application may\ - \ be complex in nature, with one or more inputs being linked to one or more outputs.\ - \ Existing development environments are commonly used to program such mappings,\ - \ while code libraries provide powerful data-stream manipulation. However, room\ - \ exists for a standalone application with a simpler graphical user interface\ - \ for dynamically patching between inputs and outputs. This paper presents an\ - \ early prototype version of a software tool that allows the user to route control\ - \ signals in real time, using various messaging formats. It is cross-platform\ - \ and runs as a standalone application in desktop and Android OS versions. The\ - \ latter allows the users of mobile devices to experiment with mapping signals\ - \ to and from physical computing components using the inbuilt multi-touch screen.\ - \ Potential uses therefore include real-time mapping during performance in a more\ - \ expressive manner than facilitated by existing tools.},\n address = {Ann Arbor,\ - \ Michigan},\n author = {Liam O'Sullivan and Dermot Furlong and Frank Boland},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1180555},\n issn = {2220-4806},\n\ - \ keywords = {Mapping, Software Tools, Android.},\n publisher = {University of\ - \ Michigan},\n title = {Introducing CrossMapper: Another Tool for Mapping Musical\ - \ Control Parameters},\n url = {http://www.nime.org/proceedings/2012/nime2012_189.pdf},\n\ - \ year = {2012}\n}\n" + ID: ckorda2015 + abstract: 'Improvising to non-modal chord progressions such as those found in jazz + necessitates switching between the different scales implied by each chord. This + work attempted to simplify improvisation by delegating the process of switching + scales to a computer. An open-source software MIDI remapper called ChordEase was + developed that dynamically alters the pitch of notes, in order to fit them to + the chord scales of a predetermined song. ChordEase modifies the behavior of ordinary + MIDI instruments, giving them new interfaces that permit non-modal music to be + approached as if it were modal. Multiple instruments can be remapped simultaneously, + using a variety of mapping functions, each optimized for a particular musical + role. Harmonization and orchestration can also be automated. By facilitating the + selection of scale tones, ChordEase enables performers to focus on other aspects + of improvisation, and thus creates new possibilities for musical expression.' + address: 'Baton Rouge, Louisiana, USA' + author: Chris Korda + bibtex: "@inproceedings{ckorda2015,\n abstract = {Improvising to non-modal chord\ + \ progressions such as those found in jazz necessitates switching between the\ + \ different scales implied by each chord. This work attempted to simplify improvisation\ + \ by delegating the process of switching scales to a computer. An open-source\ + \ software MIDI remapper called ChordEase was developed that dynamically alters\ + \ the pitch of notes, in order to fit them to the chord scales of a predetermined\ + \ song. ChordEase modifies the behavior of ordinary MIDI instruments, giving them\ + \ new interfaces that permit non-modal music to be approached as if it were modal.\ + \ Multiple instruments can be remapped simultaneously, using a variety of mapping\ + \ functions, each optimized for a particular musical role. Harmonization and orchestration\ + \ can also be automated. By facilitating the selection of scale tones, ChordEase\ + \ enables performers to focus on other aspects of improvisation, and thus creates\ + \ new possibilities for musical expression.},\n address = {Baton Rouge, Louisiana,\ + \ USA},\n author = {Chris Korda},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179110},\n\ + \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ + \ {May},\n pages = {322--324},\n publisher = {Louisiana State University},\n title\ + \ = {ChordEase: A {MIDI} remapper for intuitive performance of non-modal music},\n\ + \ url = {http://www.nime.org/proceedings/2015/nime2015_103.pdf},\n urlsuppl1 =\ + \ {http://www.nime.org/proceedings/2015/103/0103-file1.avi},\n urlsuppl2 = {http://www.nime.org/proceedings/2015/103/0103-file2.avi},\n\ + \ year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1180555 + doi: 10.5281/zenodo.1179110 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Mapping, Software Tools, Android.' - publisher: University of Michigan - title: 'Introducing CrossMapper: Another Tool for Mapping Musical Control Parameters' - url: http://www.nime.org/proceedings/2012/nime2012_189.pdf - year: 2012 + month: May + pages: 322--324 + publisher: Louisiana State University + title: 'ChordEase: A MIDI remapper for intuitive performance of non-modal music' + url: http://www.nime.org/proceedings/2015/nime2015_103.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/103/0103-file1.avi + urlsuppl2: http://www.nime.org/proceedings/2015/103/0103-file2.avi + year: 2015 - ENTRYTYPE: inproceedings - ID: Schiesser2012 - abstract: "An augmented bass clarinet is developed in order to extend the performance\ - \ and composition potential of the instru-ment. Four groups of sensors are added:\ - \ key positions, inertial movement, mouth pressure and trigger switches. The instrument\ - \ communicates wirelessly with a receiver setup which produces an OSC data stream,\ - \ usable by any appli-cation on a host computer.\nThe SABRe projects intention\ - \ is to be neither tied to its inventors nor to one single player but to offer\ - \ a reference design for a larger community of bass clarinet players and composers.\ - \ For this purpose, several instruments are made available and a number of composer\ - \ residencies, workshops, presentations and concerts are organized. These serve\ - \ for evaluation and improvement purposes in order to build a robust and user\ - \ friendly extended musical instrument, that opens new playing modalities." - address: 'Ann Arbor, Michigan' - author: Sébastien Schiesser and Jan C. Schacher - bibtex: "@inproceedings{Schiesser2012,\n abstract = {An augmented bass clarinet\ - \ is developed in order to extend the performance and composition potential of\ - \ the instru-ment. Four groups of sensors are added: key positions, inertial movement,\ - \ mouth pressure and trigger switches. The instrument communicates wirelessly\ - \ with a receiver setup which produces an OSC data stream, usable by any appli-cation\ - \ on a host computer.\nThe SABRe projects intention is to be neither tied to its\ - \ inventors nor to one single player but to offer a reference design for a larger\ - \ community of bass clarinet players and composers. For this purpose, several\ - \ instruments are made available and a number of composer residencies, workshops,\ - \ presentations and concerts are organized. These serve for evaluation and improvement\ - \ purposes in order to build a robust and user friendly extended musical instrument,\ - \ that opens new playing modalities.},\n address = {Ann Arbor, Michigan},\n author\ - \ = {S{\\'e}bastien Schiesser and Jan C. Schacher},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1180587},\n issn = {2220-4806},\n keywords = {augmented\ - \ instrument, bass clarinet, sensors, air pressure, gesture, OSC},\n publisher\ - \ = {University of Michigan},\n title = {SABRe: The Augmented Bass Clarinet},\n\ - \ url = {http://www.nime.org/proceedings/2012/nime2012_193.pdf},\n year = {2012}\n\ - }\n" + ID: makbari2015 + abstract: 'One important problem in Musical Information Retrieval is Automatic Music + Transcription, which is an automated conversion process from played music to a + symbolic notation such as sheet music. Since the accuracy of previous audio-based + transcription systems is not satisfactory, we propose an innovative visual-based + automatic music transcription system named claVision to perform piano music transcription. + Instead of processing the music audio, the system performs the transcription only + from the video performance captured by a camera mounted over the piano keyboard. + claVision can be used as a transcription tool, but it also has other applications + such as music education. The claVision software has a very high accuracy (over + 95%) and a very low latency in real-time music transcription, even under different + illumination conditions.' + address: 'Baton Rouge, Louisiana, USA' + author: Mohammad Akbari and Howard Cheng + bibtex: "@inproceedings{makbari2015,\n abstract = {One important problem in Musical\ + \ Information Retrieval is Automatic Music Transcription, which is an automated\ + \ conversion process from played music to a symbolic notation such as sheet music.\ + \ Since the accuracy of previous audio-based transcription systems is not satisfactory,\ + \ we propose an innovative visual-based automatic music transcription system named\ + \ claVision to perform piano music transcription. Instead of processing the music\ + \ audio, the system performs the transcription only from the video performance\ + \ captured by a camera mounted over the piano keyboard. claVision can be used\ + \ as a transcription tool, but it also has other applications such as music education.\ + \ The claVision software has a very high accuracy (over 95%) and a very low latency\ + \ in real-time music transcription, even under different illumination conditions.},\n\ + \ address = {Baton Rouge, Louisiana, USA},\n author = {Mohammad Akbari and Howard\ + \ Cheng},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1179002},\n editor = {Edgar\ + \ Berdahl and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages =\ + \ {313--314},\n publisher = {Louisiana State University},\n title = {claVision:\ + \ Visual Automatic Piano Music Transcription},\n url = {http://www.nime.org/proceedings/2015/nime2015_105.pdf},\n\ + \ urlsuppl1 = {http://www.nime.org/proceedings/2015/105/0105-file1.avi},\n year\ + \ = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1180587 + doi: 10.5281/zenodo.1179002 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'augmented instrument, bass clarinet, sensors, air pressure, gesture, - OSC' - publisher: University of Michigan - title: 'SABRe: The Augmented Bass Clarinet' - url: http://www.nime.org/proceedings/2012/nime2012_193.pdf - year: 2012 + month: May + pages: 313--314 + publisher: Louisiana State University + title: 'claVision: Visual Automatic Piano Music Transcription' + url: http://www.nime.org/proceedings/2015/nime2015_105.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/105/0105-file1.avi + year: 2015 - ENTRYTYPE: inproceedings - ID: Overholt2012 - abstract: 'The Create USB Interface is an open source microcontroller board that - can be programmed in C, BASIC, or Arduino languages. The latest version is called - the CUI32Stem, and it is designed to work `hand-in-hand'' with the GROVE prototyping - system that includes a wide range of sensors and actuators. It utilizes a high-performance - Microchip® PIC32 microcontroller unit to allow programmable user interfaces. Its - development and typical uses are described, focusing on musical interaction design - scenarios. Several options for wireless connectivity are described as well, enabling - the CUI32Stem to pair with a smartphone and/or a normal computer. Finally, SeeedStudio''s - GROVE system is explained, which provides a prototyping system comprised of various - elements that incorporate simple plugs, allowing the CUI32Stem to easily connect - to the growing collection of open source GROVE transducers.' - address: 'Ann Arbor, Michigan' - author: Dan Overholt - bibtex: "@inproceedings{Overholt2012,\n abstract = {The Create USB Interface is\ - \ an open source microcontroller board that can be programmed in C, BASIC, or\ - \ Arduino languages. The latest version is called the CUI32Stem, and it is designed\ - \ to work `hand-in-hand' with the GROVE prototyping system that includes a wide\ - \ range of sensors and actuators. It utilizes a high-performance Microchip{\\\ - textregistered} PIC32 microcontroller unit to allow programmable user interfaces.\ - \ Its development and typical uses are described, focusing on musical interaction\ - \ design scenarios. Several options for wireless connectivity are described as\ - \ well, enabling the CUI32Stem to pair with a smartphone and/or a normal computer.\ - \ Finally, SeeedStudio's GROVE system is explained, which provides a prototyping\ - \ system comprised of various elements that incorporate simple plugs, allowing\ - \ the CUI32Stem to easily connect to the growing collection of open source GROVE\ - \ transducers.},\n address = {Ann Arbor, Michigan},\n author = {Dan Overholt},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1180561},\n issn = {2220-4806},\n\ - \ keywords = {Musical Interaction Design, NIME education, Microcontroller, Arduino\ - \ language, StickOS BASIC, Open Sound Control, Microchip PIC32, Wireless, Zigflea,\ - \ Wifi, 802.11g, Bluetooth, CUI32, CUI32Stem},\n publisher = {University of Michigan},\n\ - \ title = {Musical Interaction Design with the CUI32{S}tem: Wireless Options and\ - \ the GROVE system for prototyping new interfaces},\n url = {http://www.nime.org/proceedings/2012/nime2012_194.pdf},\n\ - \ year = {2012}\n}\n" + ID: jschacher2015 + abstract: 'When performing with gestural devices in combination with machine learning + techniques, a mode of high-level interaction can be achieved. The methods of machine + learning and pattern recognition can be re-appropriated to serve as a generative + principle. The goal is not classification but reaction from the system in an interactive + and autonomous manner. This investigation looks at how machine learning algorithms + fit generative purposes and what independent behaviours they enable. To this end + we describe artistic and technical developments made to leverage existing machine + learning algorithms as generative devices and discuss their relevance to the field + of gestural interaction.' + address: 'Baton Rouge, Louisiana, USA' + author: Jan C. Schacher and Chikashi Miyama and Daniel Bisig + bibtex: "@inproceedings{jschacher2015,\n abstract = {When performing with gestural\ + \ devices in combination with machine learning techniques, a mode of high-level\ + \ interaction can be achieved. The methods of machine learning and pattern recognition\ + \ can be re-appropriated to serve as a generative principle. The goal is not classification\ + \ but reaction from the system in an interactive and autonomous manner. This investigation\ + \ looks at how machine learning algorithms fit generative purposes and what independent\ + \ behaviours they enable. To this end we describe artistic and technical developments\ + \ made to leverage existing machine learning algorithms as generative devices\ + \ and discuss their relevance to the field of gestural interaction.},\n address\ + \ = {Baton Rouge, Louisiana, USA},\n author = {{Jan C.} Schacher and Chikashi\ + \ Miyama and Daniel Bisig},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179172},\n\ + \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ + \ {May},\n pages = {347--350},\n publisher = {Louisiana State University},\n title\ + \ = {Gestural Electronic Music using Machine Learning as Generative Device},\n\ + \ url = {http://www.nime.org/proceedings/2015/nime2015_117.pdf},\n year = {2015}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1180561 + doi: 10.5281/zenodo.1179172 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Musical Interaction Design, NIME education, Microcontroller, Arduino - language, StickOS BASIC, Open Sound Control, Microchip PIC32, Wireless, Zigflea, - Wifi, 802.11g, Bluetooth, CUI32, CUI32Stem' - publisher: University of Michigan - title: 'Musical Interaction Design with the CUI32Stem: Wireless Options and the - GROVE system for prototyping new interfaces' - url: http://www.nime.org/proceedings/2012/nime2012_194.pdf - year: 2012 + month: May + pages: 347--350 + publisher: Louisiana State University + title: Gestural Electronic Music using Machine Learning as Generative Device + url: http://www.nime.org/proceedings/2015/nime2015_117.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: McPherson2012 - abstract: 'Capacitive touch sensing is increasingly used in musical con-trollers, - particularly those based on multi-touch screen interfaces. However, in contrast - to the venerable piano-style keyboard, touch screen controllers lack the tactile - feedback many performers find crucial. This paper presents an augmentation system - for acoustic and electronic keyboards in which multi-touch capacitive sensors - are added to the surface of each key. Each key records the position of fingers - on the surface, and by combining this data with MIDI note onsets and aftertouch - from the host keyboard, the system functions as a multidimensional polyphonic - controller for a wide variety of synthesis software. The paper will discuss general - capacitive touch sensor design, keyboard-specific implementation strategies, and - the development of a flexible mapping engine using OSC and MIDI.' - address: 'Ann Arbor, Michigan' - author: Andrew McPherson - bibtex: "@inproceedings{McPherson2012,\n abstract = {Capacitive touch sensing is\ - \ increasingly used in musical con-trollers, particularly those based on multi-touch\ - \ screen interfaces. However, in contrast to the venerable piano-style keyboard,\ - \ touch screen controllers lack the tactile feedback many performers find crucial.\ - \ This paper presents an augmentation system for acoustic and electronic keyboards\ - \ in which multi-touch capacitive sensors are added to the surface of each key.\ - \ Each key records the position of fingers on the surface, and by combining this\ - \ data with MIDI note onsets and aftertouch from the host keyboard, the system\ - \ functions as a multidimensional polyphonic controller for a wide variety of\ - \ synthesis software. The paper will discuss general capacitive touch sensor design,\ - \ keyboard-specific implementation strategies, and the development of a flexible\ - \ mapping engine using OSC and MIDI.},\n address = {Ann Arbor, Michigan},\n author\ - \ = {Andrew McPherson},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1180531},\n\ - \ issn = {2220-4806},\n keywords = {augmented instruments, keyboard, capacitive\ - \ sensing, multitouch},\n publisher = {University of Michigan},\n title = {TouchKeys:\ - \ Capacitive Multi-Touch Sensing on a Physical Keyboard},\n url = {http://www.nime.org/proceedings/2012/nime2012_195.pdf},\n\ - \ year = {2012}\n}\n" + ID: spapetti2015 + abstract: 'This paper describes the design of a hardware/software system for rendering + multi-point, localized vibrotactile feedback in a multi-touch musical interface. + A prototype was developed, based on the Madrona Labs Soundplane, which was chosen + for it provides easy access to multi-touch data, including force, and its easily + expandable layered construction. The proposed solution makes use of several piezo + actuator discs, densely arranged in a honeycomb pattern on a thin PCB layer. Based + on off-the-shelf components, custom amplifying and routing electronics were designed + to drive each piezo element with standard audio signals. Features, as well as + electronic and mechanical issues of the current prototype are discussed.' + address: 'Baton Rouge, Louisiana, USA' + author: Stefano Papetti and Sébastien Schiesser and Martin Fröhlich + bibtex: "@inproceedings{spapetti2015,\n abstract = {This paper describes the design\ + \ of a hardware/software system for rendering multi-point, localized vibrotactile\ + \ feedback in a multi-touch musical interface. A prototype was developed, based\ + \ on the Madrona Labs Soundplane, which was chosen for it provides easy access\ + \ to multi-touch data, including force, and its easily expandable layered construction.\ + \ The proposed solution makes use of several piezo actuator discs, densely arranged\ + \ in a honeycomb pattern on a thin PCB layer. Based on off-the-shelf components,\ + \ custom amplifying and routing electronics were designed to drive each piezo\ + \ element with standard audio signals. Features, as well as electronic and mechanical\ + \ issues of the current prototype are discussed.},\n address = {Baton Rouge, Louisiana,\ + \ USA},\n author = {Stefano Papetti and S\\'ebastien Schiesser and Martin Fr\\\ + ''ohlich},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1179152},\n editor = {Edgar\ + \ Berdahl and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages =\ + \ {235--240},\n publisher = {Louisiana State University},\n title = {Multi-point\ + \ vibrotactile feedback for an expressive musical interface},\n url = {http://www.nime.org/proceedings/2015/nime2015_118.pdf},\n\ + \ year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1180531 + doi: 10.5281/zenodo.1179152 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'augmented instruments, keyboard, capacitive sensing, multitouch' - publisher: University of Michigan - title: 'TouchKeys: Capacitive Multi-Touch Sensing on a Physical Keyboard' - url: http://www.nime.org/proceedings/2012/nime2012_195.pdf - year: 2012 + month: May + pages: 235--240 + publisher: Louisiana State University + title: Multi-point vibrotactile feedback for an expressive musical interface + url: http://www.nime.org/proceedings/2015/nime2015_118.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Lai2012 - abstract: 'This paper addresses the issue of engaging the audience with new musical - instruments in live performance context. We introduce design concerns that we - consider influential to enhance the communication flow between the audience and - the performer. We also propose and put in practice a design approach that considers - the use of performance space as a way to engage with the audience. A collaborative - project, Sound Gloves, presented here exemplifies such a concept by dissolving - the space between performers and audience. Our approach resulted in a continuous - interaction between audience and performers, in which the social dynamics was - changed in a positive way in a live performance context of NIMEs. Such an approach, - we argue, may be considered as one way to further engage and interact with the - audience.' - address: 'Ann Arbor, Michigan' - author: Chi-Hsia Lai and Koray Tahiroglu - bibtex: "@inproceedings{Lai2012,\n abstract = {This paper addresses the issue of\ - \ engaging the audience with new musical instruments in live performance context.\ - \ We introduce design concerns that we consider influential to enhance the communication\ - \ flow between the audience and the performer. We also propose and put in practice\ - \ a design approach that considers the use of performance space as a way to engage\ - \ with the audience. A collaborative project, Sound Gloves, presented here exemplifies\ - \ such a concept by dissolving the space between performers and audience. Our\ - \ approach resulted in a continuous interaction between audience and performers,\ - \ in which the social dynamics was changed in a positive way in a live performance\ - \ context of NIMEs. Such an approach, we argue, may be considered as one way to\ - \ further engage and interact with the audience.},\n address = {Ann Arbor, Michigan},\n\ - \ author = {Chi-Hsia Lai and Koray Tahiroglu},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1178309},\n issn = {2220-4806},\n keywords = {NIME, wearable\ - \ electronics, performance, design approach},\n publisher = {University of Michigan},\n\ - \ title = {A Design Approach to Engage with Audience with Wearable Musical Instruments:\ - \ Sound Gloves},\n url = {http://www.nime.org/proceedings/2012/nime2012_197.pdf},\n\ - \ year = {2012}\n}\n" + ID: dramsay2015 + abstract: 'GroupLoop is a browser-based, collaborative audio feedback control system + for musical performance. GroupLoop users send their microphone stream to other + participants while simultaneously controlling the mix of other users'' streams + played through their speakers. Collaborations among users can yield complex feedback + loops where feedback paths overlap and interact. Users are able to shape the feedback + sounds in real-time by adjusting delay, EQ, and gain, as well as manipulating + the acoustics of their portion of the audio feedback path. This paper outlines + the basic principles underlying Grouploop, describes its design and feature-set, + and discusses observations of GroupLoop in performances. It concludes with a look + at future research and refinement. ' + address: 'Baton Rouge, Louisiana, USA' + author: David Ramsay and Joseph Paradiso + bibtex: "@inproceedings{dramsay2015,\n abstract = {GroupLoop is a browser-based,\ + \ collaborative audio feedback control system for musical performance. GroupLoop\ + \ users send their microphone stream to other participants while simultaneously\ + \ controlling the mix of other users' streams played through their speakers. Collaborations\ + \ among users can yield complex feedback loops where feedback paths overlap and\ + \ interact. Users are able to shape the feedback sounds in real-time by adjusting\ + \ delay, EQ, and gain, as well as manipulating the acoustics of their portion\ + \ of the audio feedback path. This paper outlines the basic principles underlying\ + \ Grouploop, describes its design and feature-set, and discusses observations\ + \ of GroupLoop in performances. It concludes with a look at future research and\ + \ refinement. },\n address = {Baton Rouge, Louisiana, USA},\n author = {David\ + \ Ramsay and Joseph Paradiso},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179158},\n\ + \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ + \ {May},\n pages = {251--254},\n publisher = {Louisiana State University},\n title\ + \ = {GroupLoop: A Collaborative, Network-Enabled Audio Feedback Instrument},\n\ + \ url = {http://www.nime.org/proceedings/2015/nime2015_119.pdf},\n year = {2015}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178309 + doi: 10.5281/zenodo.1179158 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'NIME, wearable electronics, performance, design approach' - publisher: University of Michigan - title: 'A Design Approach to Engage with Audience with Wearable Musical Instruments: - Sound Gloves' - url: http://www.nime.org/proceedings/2012/nime2012_197.pdf - year: 2012 + month: May + pages: 251--254 + publisher: Louisiana State University + title: 'GroupLoop: A Collaborative, Network-Enabled Audio Feedback Instrument' + url: http://www.nime.org/proceedings/2015/nime2015_119.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Nymoen2012 - abstract: 'The paper presents an analysis of the quality of motion data from an - iPod Touch (4th gen.). Acceleration and orientation data derived from internal - sensors of an iPod is com-pared to data from a high end optical infrared marker-based - motion capture system (Qualisys) in terms of latency, jitter, accuracy and precision. - We identify some rotational drift in the iPod, and some time lag between the two - systems. Still, the iPod motion data is quite reliable, especially for describing - relative motion over a short period of time.' - address: 'Ann Arbor, Michigan' - author: 'Kristian Nymoen and Arve Voldsund and Skogstad, Ståle A. and Jensenius, - Alexander Refsum and Jim Torresen' - bibtex: "@inproceedings{Nymoen2012,\n abstract = {The paper presents an analysis\ - \ of the quality of motion data from an iPod Touch (4th gen.). Acceleration and\ - \ orientation data derived from internal sensors of an iPod is com-pared to data\ - \ from a high end optical infrared marker-based motion capture system (Qualisys)\ - \ in terms of latency, jitter, accuracy and precision. We identify some rotational\ - \ drift in the iPod, and some time lag between the two systems. Still, the iPod\ - \ motion data is quite reliable, especially for describing relative motion over\ - \ a short period of time.},\n address = {Ann Arbor, Michigan},\n author = {Kristian\ - \ Nymoen and Arve Voldsund and Skogstad, St{\\aa}le A. and Jensenius, Alexander\ - \ Refsum and Jim Torresen},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1180553},\n\ - \ issn = {2220-4806},\n publisher = {University of Michigan},\n title = {Comparing\ - \ Motion Data from an iPod Touch to a High-End Optical Infrared Marker-Based Motion\ - \ Capture System},\n url = {http://www.nime.org/proceedings/2012/nime2012_198.pdf},\n\ - \ year = {2012}\n}\n" + ID: kyamamoto2015 + abstract: 'We propose a novel user interface that enables control of a singing voice + synthesizer at a live improvisational performance. The user first registers the + lyrics of a song with the system before performance, and the system builds a probabilistic + model that models the possible jumps within the lyrics. During performance, the + user simultaneously inputs the lyrics of a song with the left hand using a vowel + keyboard and the melodies with the right hand using a standard musical keyboard. + Our system searches for a portion of the registered lyrics whose vowel sequence + matches the current user input using the probabilistic model, and sends the matched + lyrics to the singing voice synthesizer. The vowel input keys are mapped onto + a standard musical keyboard, enabling experienced keyboard players to learn the + system from a standard musical score. We examine the feasibility of the system + through a series of evaluations and user studies. ' + address: 'Baton Rouge, Louisiana, USA' + author: Kazuhiko Yamamoto and Takeo Igarashi + bibtex: "@inproceedings{kyamamoto2015,\n abstract = {We propose a novel user interface\ + \ that enables control of a singing voice synthesizer at a live improvisational\ + \ performance. The user first registers the lyrics of a song with the system before\ + \ performance, and the system builds a probabilistic model that models the possible\ + \ jumps within the lyrics. During performance, the user simultaneously inputs\ + \ the lyrics of a song with the left hand using a vowel keyboard and the melodies\ + \ with the right hand using a standard musical keyboard. Our system searches for\ + \ a portion of the registered lyrics whose vowel sequence matches the current\ + \ user input using the probabilistic model, and sends the matched lyrics to the\ + \ singing voice synthesizer. The vowel input keys are mapped onto a standard musical\ + \ keyboard, enabling experienced keyboard players to learn the system from a standard\ + \ musical score. We examine the feasibility of the system through a series of\ + \ evaluations and user studies. },\n address = {Baton Rouge, Louisiana, USA},\n\ + \ author = {Kazuhiko Yamamoto and Takeo Igarashi},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1181414},\n editor = {Edgar Berdahl and Jesse Allison},\n\ + \ issn = {2220-4806},\n month = {May},\n pages = {205--208},\n publisher = {Louisiana\ + \ State University},\n title = {LiVo: Sing a Song with a Vowel Keyboard},\n url\ + \ = {http://www.nime.org/proceedings/2015/nime2015_120.pdf},\n urlsuppl1 = {http://www.nime.org/proceedings/2015/120/0120-file1.mp4},\n\ + \ year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1180553 + doi: 10.5281/zenodo.1181414 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - publisher: University of Michigan - title: Comparing Motion Data from an iPod Touch to a High-End Optical Infrared Marker-Based - Motion Capture System - url: http://www.nime.org/proceedings/2012/nime2012_198.pdf - year: 2012 + month: May + pages: 205--208 + publisher: Louisiana State University + title: 'LiVo: Sing a Song with a Vowel Keyboard' + url: http://www.nime.org/proceedings/2015/nime2015_120.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/120/0120-file1.mp4 + year: 2015 - ENTRYTYPE: inproceedings - ID: Park2012 - abstract: 'This paper describes an interactive gestural microphone for vocal performance - named Voicon. Voicon is a non-invasive and gesture-sensitive microphone which - allows vocal performers to use natural gestures to create vocal augmentations - and modifications by using embedded sensors in a microphone. Through vocal augmentation - and modulation, the performers can easily generate desired amount of the vibrato - and achieve wider vocal range. These vocal en-hancements will deliberately enrich - the vocal performance both in its expressiveness and the dynamics. Using Voicon, - singers can generate additional vibrato, control the pitch and activate customizable - vocal effect by simple and intuitive gestures in live and recording context.' - address: 'Ann Arbor, Michigan' - author: Yongki Park and Hoon Heo and Kyogu Lee - bibtex: "@inproceedings{Park2012,\n abstract = {This paper describes an interactive\ - \ gestural microphone for vocal performance named Voicon. Voicon is a non-invasive\ - \ and gesture-sensitive microphone which allows vocal performers to use natural\ - \ gestures to create vocal augmentations and modifications by using embedded sensors\ - \ in a microphone. Through vocal augmentation and modulation, the performers can\ - \ easily generate desired amount of the vibrato and achieve wider vocal range.\ - \ These vocal en-hancements will deliberately enrich the vocal performance both\ - \ in its expressiveness and the dynamics. Using Voicon, singers can generate additional\ - \ vibrato, control the pitch and activate customizable vocal effect by simple\ - \ and intuitive gestures in live and recording context.},\n address = {Ann Arbor,\ - \ Michigan},\n author = {Yongki Park and Hoon Heo and Kyogu Lee},\n booktitle\ + ID: ktahiroglu2015 + abstract: 'This paper presents our current research in which we study the notion + of performer engagement within the variance and diversities of the intentional + activities of the performer in musical interaction. We introduce a user-test study + with the aim to evaluate our system''s engagement prediction capability and to + understand in detail the system''s response behaviour. The quantitative results + indicate that our system recognises and monitors performer''s engagement successfully, + although we found that the system''s response to maintain and deepen the performer''s + engagement is perceived differently among participants. The results reported in + this paper can be used to inform the design of interactive systems that enhance + the quality of performer''s engagement in musical interaction with new interfaces.' + address: 'Baton Rouge, Louisiana, USA' + author: Koray Tahiroglu and Thomas Svedström and Valtteri Wikström + bibtex: "@inproceedings{ktahiroglu2015,\n abstract = {This paper presents our current\ + \ research in which we study the notion of performer engagement within the variance\ + \ and diversities of the intentional activities of the performer in musical interaction.\ + \ We introduce a user-test study with the aim to evaluate our system's engagement\ + \ prediction capability and to understand in detail the system's response behaviour.\ + \ The quantitative results indicate that our system recognises and monitors performer's\ + \ engagement successfully, although we found that the system's response to maintain\ + \ and deepen the performer's engagement is perceived differently among participants.\ + \ The results reported in this paper can be used to inform the design of interactive\ + \ systems that enhance the quality of performer's engagement in musical interaction\ + \ with new interfaces.},\n address = {Baton Rouge, Louisiana, USA},\n author =\ + \ {Koray Tahiroglu and Thomas Svedstr\\''om and Valtteri Wikstr\\''om},\n booktitle\ \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1180565},\n issn = {2220-4806},\n keywords\ - \ = {Gesture, Microphone, Vocal Performance, Performance In-terface},\n publisher\ - \ = {University of Michigan},\n title = {Voicon: An Interactive Gestural Microphone\ - \ For Vocal Performance},\n url = {http://www.nime.org/proceedings/2012/nime2012_199.pdf},\n\ - \ year = {2012}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1180565 - issn: 2220-4806 - keywords: 'Gesture, Microphone, Vocal Performance, Performance In-terface' - publisher: University of Michigan - title: 'Voicon: An Interactive Gestural Microphone For Vocal Performance' - url: http://www.nime.org/proceedings/2012/nime2012_199.pdf - year: 2012 - - -- ENTRYTYPE: inproceedings - ID: Henriques2012 - abstract: 'The Sonik Spring is a portable and wireless digital instrument, created - for real-time synthesis and control of sound. It brings together different types - of sensory input, linking gestural motion and kinesthetic feedback to the production - of sound. The interface consists of a 15-inch spring with unique flexibility, - which allows multiple degrees of variation in its shape and length. The design - of the instrument is described and its features discussed. Three performance modes - are detailed highlighting the instrument''s expressive potential and wide range - of functionality. ' - address: 'Ann Arbor, Michigan' - author: Tomas Henriques - bibtex: "@inproceedings{Henriques2012,\n abstract = {The Sonik Spring is a portable\ - \ and wireless digital instrument, created for real-time synthesis and control\ - \ of sound. It brings together different types of sensory input, linking gestural\ - \ motion and kinesthetic feedback to the production of sound. The interface consists\ - \ of a 15-inch spring with unique flexibility, which allows multiple degrees of\ - \ variation in its shape and length. The design of the instrument is described\ - \ and its features discussed. Three performance modes are detailed highlighting\ - \ the instrument's expressive potential and wide range of functionality. },\n\ - \ address = {Ann Arbor, Michigan},\n author = {Tomas Henriques},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178283},\n issn = {2220-4806},\n keywords = {Interface\ - \ for sound and music, Gestural control of sound, Kinesthetic and visual feedback},\n\ - \ publisher = {University of Michigan},\n title = {SONIK SPRING},\n url = {http://www.nime.org/proceedings/2012/nime2012_20.pdf},\n\ - \ year = {2012}\n}\n" + \ Expression},\n doi = {10.5281/zenodo.1179182},\n editor = {Edgar Berdahl and\ + \ Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {132--135},\n\ + \ publisher = {Louisiana State University},\n title = {Musical Engagement that\ + \ is Predicated on Intentional Activity of the Performer with NOISA Instruments},\n\ + \ url = {http://www.nime.org/proceedings/2015/nime2015_121.pdf},\n year = {2015}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178283 + doi: 10.5281/zenodo.1179182 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Interface for sound and music, Gestural control of sound, Kinesthetic - and visual feedback' - publisher: University of Michigan - title: SONIK SPRING - url: http://www.nime.org/proceedings/2012/nime2012_20.pdf - year: 2012 + month: May + pages: 132--135 + publisher: Louisiana State University + title: Musical Engagement that is Predicated on Intentional Activity of the Performer + with NOISA Instruments + url: http://www.nime.org/proceedings/2015/nime2015_121.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Menzies2012 - abstract: "The Highland piping tradition requires the performer to learn and accurately\ - \ reproduce a diverse array of ornaments, which can be a daunting prospect to\ - \ the novice piper. This paper presents a system which analyses a player's technique\ - \ using sensor data obtained from an electronic bagpipe chanter interface. Automatic\ - \ recognition of a broad range of piping embellishments allows real-time visual\ - \ feedback to be generated, enabling the learner to ensure that they are practicing\ - \ each movement correctly.\nThe electronic chanter employs a robust and responsive\ - \ infrared (IR) sensing strategy, and uses audio samples from acoustic recordings\ - \ to produce a high quality bagpipe sound. Moreover, the continuous nature of\ - \ the IR sensors offers the controller a considerable degree of flexibility, indicating\ - \ sig-nificant potential for the inclusion of extended and novel techniques for\ - \ musical expression in the future." - address: 'Ann Arbor, Michigan' - author: Duncan Menzies and Andrew McPherson - bibtex: "@inproceedings{Menzies2012,\n abstract = {The Highland piping tradition\ - \ requires the performer to learn and accurately reproduce a diverse array of\ - \ ornaments, which can be a daunting prospect to the novice piper. This paper\ - \ presents a system which analyses a player's technique using sensor data obtained\ - \ from an electronic bagpipe chanter interface. Automatic recognition of a broad\ - \ range of piping embellishments allows real-time visual feedback to be generated,\ - \ enabling the learner to ensure that they are practicing each movement correctly.\n\ - The electronic chanter employs a robust and responsive infrared (IR) sensing strategy,\ - \ and uses audio samples from acoustic recordings to produce a high quality bagpipe\ - \ sound. Moreover, the continuous nature of the IR sensors offers the controller\ - \ a considerable degree of flexibility, indicating sig-nificant potential for\ - \ the inclusion of extended and novel techniques for musical expression in the\ - \ future.},\n address = {Ann Arbor, Michigan},\n author = {Duncan Menzies and\ - \ Andrew McPherson},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1180537},\n\ - \ issn = {2220-4806},\n keywords = {Great Highland Bagpipe, continuous infrared\ - \ sensors, ornament recognition, practice tool, SuperCollider, OSC.},\n publisher\ - \ = {University of Michigan},\n title = {An Electronic Bagpipe Chanter for Automatic\ - \ Recognition of Highland Piping Ornamentation},\n url = {http://www.nime.org/proceedings/2012/nime2012_200.pdf},\n\ - \ year = {2012}\n}\n" + ID: jlong2015 + abstract: 'This paper presents a methodology for evaluating the performance of several + types of striking mechanism commonly utilized in musical robotic percussion systems. + The goal is to take steps towards standardizing methods of comparing the attributes + of a range of devices to inform their design and application in various musical + situations. A system for testing the latency, consistency, loudness and striking + speed of these mechanisms is described and the methods are demonstrated by subjecting + several new robotic percussion mechanisms to these tests. An analysis of the results + of the evaluation is also presented and the advantages and disadvantages of each + of the types of mechanism in various musical contexts is discussed.' + address: 'Baton Rouge, Louisiana, USA' + author: Jason Long and Jim Murphy and Ajay Kapur and Dale Carnegie + bibtex: "@inproceedings{jlong2015,\n abstract = {This paper presents a methodology\ + \ for evaluating the performance of several types of striking mechanism commonly\ + \ utilized in musical robotic percussion systems. The goal is to take steps towards\ + \ standardizing methods of comparing the attributes of a range of devices to inform\ + \ their design and application in various musical situations. A system for testing\ + \ the latency, consistency, loudness and striking speed of these mechanisms is\ + \ described and the methods are demonstrated by subjecting several new robotic\ + \ percussion mechanisms to these tests. An analysis of the results of the evaluation\ + \ is also presented and the advantages and disadvantages of each of the types\ + \ of mechanism in various musical contexts is discussed.},\n address = {Baton\ + \ Rouge, Louisiana, USA},\n author = {Jason Long and Jim Murphy and Ajay Kapur\ + \ and Dale Carnegie},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179120},\n\ + \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ + \ {May},\n pages = {404--407},\n publisher = {Louisiana State University},\n title\ + \ = {A Methodology for Evaluating Robotic Striking Mechanisms for Musical Contexts},\n\ + \ url = {http://www.nime.org/proceedings/2015/nime2015_130.pdf},\n year = {2015}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1180537 + doi: 10.5281/zenodo.1179120 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Great Highland Bagpipe, continuous infrared sensors, ornament recognition, - practice tool, SuperCollider, OSC.' - publisher: University of Michigan - title: An Electronic Bagpipe Chanter for Automatic Recognition of Highland Piping - Ornamentation - url: http://www.nime.org/proceedings/2012/nime2012_200.pdf - year: 2012 + month: May + pages: 404--407 + publisher: Louisiana State University + title: A Methodology for Evaluating Robotic Striking Mechanisms for Musical Contexts + url: http://www.nime.org/proceedings/2015/nime2015_130.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Gong2012 - abstract: 'This paper describes a novel music control sensate surface, which enables - integration between any musical instruments with a v ersatile, customizable, and - essentially cost-effective user interface. This sensate surface is based on c - onductive inkjet printing technology which allows capacitive sensor electrodes - and connections between electronics components to be printed onto a large roll - of flexible substrate that is unrestricted in length. The high dynamic range capacitive - sensing electrodes can not only infer touch, but near-range, non-contact gestural - nuance in a music performance. With this sensate surface, users can ``cut'''' - out their desired shapes, ``paste'''' the number of inputs, and customize their - controller interface, which can then send signals wirelessly to effects or software - synthesizers. We seek to find a solution for integrating the form factor of traditional - music controllers seamlessly on top of one''s music instrument and meanwhile adding - expressiveness to the music performance by sensing and incorporating movements - and gestures to manipulate the musical output. We present an example of implementation - on an electric ukulele and provide several design examples to demonstrate the - versatile capabilities of this system.' - address: 'Ann Arbor, Michigan' - author: Nan-Wei Gong and Nan Zhao and Joseph Paradiso - bibtex: "@inproceedings{Gong2012,\n abstract = {This paper describes a novel music\ - \ control sensate surface, which enables integration between any musical instruments\ - \ with a v ersatile, customizable, and essentially cost-effective user interface.\ - \ This sensate surface is based on c onductive inkjet printing technology which\ - \ allows capacitive sensor electrodes and connections between electronics components\ - \ to be printed onto a large roll of flexible substrate that is unrestricted in\ - \ length. The high dynamic range capacitive sensing electrodes can not only infer\ - \ touch, but near-range, non-contact gestural nuance in a music performance. With\ - \ this sensate surface, users can ``cut'' out their desired shapes, ``paste''\ - \ the number of inputs, and customize their controller interface, which can then\ - \ send signals wirelessly to effects or software synthesizers. We seek to find\ - \ a solution for integrating the form factor of traditional music controllers\ - \ seamlessly on top of one's music instrument and meanwhile adding expressiveness\ - \ to the music performance by sensing and incorporating movements and gestures\ - \ to manipulate the musical output. We present an example of implementation on\ - \ an electric ukulele and provide several design examples to demonstrate the versatile\ - \ capabilities of this system.},\n address = {Ann Arbor, Michigan},\n author =\ - \ {Nan-Wei Gong and Nan Zhao and Joseph Paradiso},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178267},\n issn = {2220-4806},\n keywords = {Sensate\ - \ surface, music controller skin, customizable controller surface, flexible electronics},\n\ - \ publisher = {University of Michigan},\n title = {A Customizable Sensate Surface\ - \ for Music Control},\n url = {http://www.nime.org/proceedings/2012/nime2012_201.pdf},\n\ - \ year = {2012}\n}\n" + ID: skemper2015 + abstract: 'The Modular Electro-Acoustic Robotic Instrument System (MEARIS) represents + a new type of hybrid electroacoustic-electromechanical instrument model. Monochord-Aerophone + Robotic Instrument Ensemble (MARIE), the first realization of a MEARIS, is a set + of interconnected monochord and cylindrical aerophone robotic musical instruments + created by Expressive Machines Musical Instruments (EMMI). MARIE comprises one + or more matched pairs of Automatic Monochord Instruments (AMI) and Cylindrical + Aerophone Robotic Instruments (CARI). Each AMI and CARI is a self-contained, independently + operable robotic instrument with an acoustic element, a control system that enables + automated manipulation of this element, and an audio system that includes input + and output transducers coupled to the acoustic element. Each AMI-CARI pair can + also operate as an interconnected hybrid instrument, allowing for effects that + have heretofore been the domain of physical modeling technologies, such as a plucked + air column or blown string. Since its creation, MARIE has toured widely, performed + with dozens of human instrumentalists, and has been utilized by nine composers + in the realization of more than twenty new musical works.' + address: 'Baton Rouge, Louisiana, USA' + author: Troy Rogers and Steven Kemper and Scott Barton + bibtex: "@inproceedings{skemper2015,\n abstract = {The Modular Electro-Acoustic\ + \ Robotic Instrument System (MEARIS) represents a new type of hybrid electroacoustic-electromechanical\ + \ instrument model. Monochord-Aerophone Robotic Instrument Ensemble (MARIE), the\ + \ first realization of a MEARIS, is a set of interconnected monochord and cylindrical\ + \ aerophone robotic musical instruments created by Expressive Machines Musical\ + \ Instruments (EMMI). MARIE comprises one or more matched pairs of Automatic Monochord\ + \ Instruments (AMI) and Cylindrical Aerophone Robotic Instruments (CARI). Each\ + \ AMI and CARI is a self-contained, independently operable robotic instrument\ + \ with an acoustic element, a control system that enables automated manipulation\ + \ of this element, and an audio system that includes input and output transducers\ + \ coupled to the acoustic element. Each AMI-CARI pair can also operate as an interconnected\ + \ hybrid instrument, allowing for effects that have heretofore been the domain\ + \ of physical modeling technologies, such as a plucked air column or blown string.\ + \ Since its creation, MARIE has toured widely, performed with dozens of human\ + \ instrumentalists, and has been utilized by nine composers in the realization\ + \ of more than twenty new musical works.},\n address = {Baton Rouge, Louisiana,\ + \ USA},\n author = {Troy Rogers and Steven Kemper and Scott Barton},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1179166},\n editor = {Edgar Berdahl and\ + \ Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {408--411},\n\ + \ publisher = {Louisiana State University},\n title = {MARIE: Monochord-Aerophone\ + \ Robotic Instrument Ensemble},\n url = {http://www.nime.org/proceedings/2015/nime2015_141.pdf},\n\ + \ urlsuppl1 = {http://www.nime.org/proceedings/2015/141/0141-file1.mov},\n year\ + \ = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178267 + doi: 10.5281/zenodo.1179166 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Sensate surface, music controller skin, customizable controller surface, - flexible electronics' - publisher: University of Michigan - title: A Customizable Sensate Surface for Music Control - url: http://www.nime.org/proceedings/2012/nime2012_201.pdf - year: 2012 + month: May + pages: 408--411 + publisher: Louisiana State University + title: 'MARIE: Monochord-Aerophone Robotic Instrument Ensemble' + url: http://www.nime.org/proceedings/2015/nime2015_141.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/141/0141-file1.mov + year: 2015 - ENTRYTYPE: inproceedings - ID: Schlessinger2012 - abstract: 'We have developed a prototype wireless microphone that provides vocalists - with control over their vocal effects directly from the body of the microphone. - A wireless microphone has been augmented with six momentary switches, one fader, - and three axes of motion and position sensors, all of which provide MIDI output - from the wireless receiver. The MIDI data is used to control external vocal effects - units such as live loopers, reverbs, distortion pedals, etc. The goal was to to - provide dramatically increased expressive control to vocal performances, and address - some of the shortcomings of pedal-controlled effects. The addition of gestural - controls from the motion sensors opens up new performance possibilities such as - panning the voice simply by pointing the microphone in one direction or another. - The result is a hybrid microphone-musical instrument which has recieved extremely - positive results from vocalists in numerous infor-mal workshops.' - address: 'Ann Arbor, Michigan' - author: Dan Moses Schlessinger - bibtex: "@inproceedings{Schlessinger2012,\n abstract = {We have developed a prototype\ - \ wireless microphone that provides vocalists with control over their vocal effects\ - \ directly from the body of the microphone. A wireless microphone has been augmented\ - \ with six momentary switches, one fader, and three axes of motion and position\ - \ sensors, all of which provide MIDI output from the wireless receiver. The MIDI\ - \ data is used to control external vocal effects units such as live loopers, reverbs,\ - \ distortion pedals, etc. The goal was to to provide dramatically increased expressive\ - \ control to vocal performances, and address some of the shortcomings of pedal-controlled\ - \ effects. The addition of gestural controls from the motion sensors opens up\ - \ new performance possibilities such as panning the voice simply by pointing the\ - \ microphone in one direction or another. The result is a hybrid microphone-musical\ - \ instrument which has recieved extremely positive results from vocalists in numerous\ - \ infor-mal workshops.},\n address = {Ann Arbor, Michigan},\n author = {Dan Moses\ - \ Schlessinger},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1180591},\n\ - \ issn = {2220-4806},\n keywords = {NIME, Sennheiser, Concept Tahoe, MIDI, control,\ - \ microphone},\n publisher = {University of Michigan},\n title = {Concept Tahoe:\ - \ Microphone Midi Control},\n url = {http://www.nime.org/proceedings/2012/nime2012_202.pdf},\n\ - \ year = {2012}\n}\n" + ID: jharriman2015 + abstract: 'Music offers an intriguing context to engage children in electronics, + programming and more. Over the last year we been developing a hardware and software + toolkit for music called modular-muse. Here we describe the design and goals for + these tools and how they have been used in different settings to introduce children + to concepts of interaction design for music and sound design. Two exploratory + studies which used modular-muse are described here with different approaches; + a two day build your own instrument workshop where participants learned how to + use both hardware and software concurrently to control synthesized sounds and + trigger solenoids, and a middle school music classroom where the focus was only + on programming for sound synthesis using the modular-muse Pd library. During the + second study, a project called Pd Poems, a teaching progression emerged we call + Build-Play-Share-Focus which is also described. ' + address: 'Baton Rouge, Louisiana, USA' + author: Jiffer Harriman + bibtex: "@inproceedings{jharriman2015,\n abstract = {Music offers an intriguing\ + \ context to engage children in electronics, programming and more. Over the last\ + \ year we been developing a hardware and software toolkit for music called modular-muse.\ + \ Here we describe the design and goals for these tools and how they have been\ + \ used in different settings to introduce children to concepts of interaction\ + \ design for music and sound design. Two exploratory studies which used modular-muse\ + \ are described here with different approaches; a two day build your own instrument\ + \ workshop where participants learned how to use both hardware and software concurrently\ + \ to control synthesized sounds and trigger solenoids, and a middle school music\ + \ classroom where the focus was only on programming for sound synthesis using\ + \ the modular-muse Pd library. During the second study, a project called Pd Poems,\ + \ a teaching progression emerged we call Build-Play-Share-Focus which is also\ + \ described. },\n address = {Baton Rouge, Louisiana, USA},\n author = {Jiffer\ + \ Harriman},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179074},\n editor\ + \ = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n\ + \ pages = {331--334},\n publisher = {Louisiana State University},\n title = {Pd\ + \ Poems and Teaching Tools},\n url = {http://www.nime.org/proceedings/2015/nime2015_145.pdf},\n\ + \ year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1180591 + doi: 10.5281/zenodo.1179074 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'NIME, Sennheiser, Concept Tahoe, MIDI, control, microphone' - publisher: University of Michigan - title: 'Concept Tahoe: Microphone Midi Control' - url: http://www.nime.org/proceedings/2012/nime2012_202.pdf - year: 2012 + month: May + pages: 331--334 + publisher: Louisiana State University + title: Pd Poems and Teaching Tools + url: http://www.nime.org/proceedings/2015/nime2015_145.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Yang2012 - abstract: 'We augment the piano keyboard with a 3D gesture space using Microsoft - Kinect for sensing and top-down projection for visual feedback. This interface - provides multi-axial gesture controls to enable continuous adjustments to multiple - acoustic parameters such as those on the typical digital synthesizers. We believe - that using gesture control is more visceral and aesthetically pleasing, especially - during concert performance where the visibility of the performer''s action is - important. Our system can also be used for other types of gesture interaction - as well as for pedagogical applications.' - address: 'Ann Arbor, Michigan' - author: Qi Yang and Georg Essl - bibtex: "@inproceedings{Yang2012,\n abstract = {We augment the piano keyboard with\ - \ a 3D gesture space using Microsoft Kinect for sensing and top-down projection\ - \ for visual feedback. This interface provides multi-axial gesture controls to\ - \ enable continuous adjustments to multiple acoustic parameters such as those\ - \ on the typical digital synthesizers. We believe that using gesture control is\ - \ more visceral and aesthetically pleasing, especially during concert performance\ - \ where the visibility of the performer's action is important. Our system can\ - \ also be used for other types of gesture interaction as well as for pedagogical\ - \ applications.},\n address = {Ann Arbor, Michigan},\n author = {Qi Yang and Georg\ - \ Essl},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1178455},\n issn = {2220-4806},\n\ - \ keywords = {NIME, piano, depth camera, musical instrument, gesture, tabletop\ - \ projection},\n publisher = {University of Michigan},\n title = {Augmented Piano\ - \ Performance using a Depth Camera},\n url = {http://www.nime.org/proceedings/2012/nime2012_203.pdf},\n\ - \ year = {2012}\n}\n" + ID: rhayward2015 + abstract: 'The Hayward Tuning Vine is a software interface for exploring the system + of microtonal tuning known as Just Intonation. Based ultimately on prime number + relationships, harmonic space in Just Intonation is inherently multidimensional, + with each prime number tracing a unique path in space. Taking this multidimensionality + as its point of departure, the Tuning Vine interface assigns a unique angle and + colour to each prime number, along with aligning melodic pitch height to vertical + height on the computer screen. These features allow direct and intuitive interaction + with Just Intonation. The inclusion of a transposition function along each prime + number axis also enables potentially unlimited exploration of harmonic space within + prime limit 23. Currently available as desktop software, a prototype for a hardware + version has also been constructed, and future tablet app and hardware versions + of the Tuning Vine are planned that will allow tangible as well as audiovisual + interaction with microtonal harmonic space.' + address: 'Baton Rouge, Louisiana, USA' + author: Robin Hayward + bibtex: "@inproceedings{rhayward2015,\n abstract = {The Hayward Tuning Vine is a\ + \ software interface for exploring the system of microtonal tuning known as Just\ + \ Intonation. Based ultimately on prime number relationships, harmonic space in\ + \ Just Intonation is inherently multidimensional, with each prime number tracing\ + \ a unique path in space. Taking this multidimensionality as its point of departure,\ + \ the Tuning Vine interface assigns a unique angle and colour to each prime number,\ + \ along with aligning melodic pitch height to vertical height on the computer\ + \ screen. These features allow direct and intuitive interaction with Just Intonation.\ + \ The inclusion of a transposition function along each prime number axis also\ + \ enables potentially unlimited exploration of harmonic space within prime limit\ + \ 23. Currently available as desktop software, a prototype for a hardware version\ + \ has also been constructed, and future tablet app and hardware versions of the\ + \ Tuning Vine are planned that will allow tangible as well as audiovisual interaction\ + \ with microtonal harmonic space.},\n address = {Baton Rouge, Louisiana, USA},\n\ + \ author = {Robin Hayward},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179084},\n\ + \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ + \ {May},\n pages = {209--214},\n publisher = {Louisiana State University},\n title\ + \ = {The Hayward Tuning Vine: an interface for Just Intonation},\n url = {http://www.nime.org/proceedings/2015/nime2015_146.pdf},\n\ + \ urlsuppl1 = {http://www.nime.org/proceedings/2015/146/0146-file1.mov},\n year\ + \ = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178455 + doi: 10.5281/zenodo.1179084 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'NIME, piano, depth camera, musical instrument, gesture, tabletop projection' - publisher: University of Michigan - title: Augmented Piano Performance using a Depth Camera - url: http://www.nime.org/proceedings/2012/nime2012_203.pdf - year: 2012 + month: May + pages: 209--214 + publisher: Louisiana State University + title: 'The Hayward Tuning Vine: an interface for Just Intonation' + url: http://www.nime.org/proceedings/2015/nime2015_146.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/146/0146-file1.mov + year: 2015 - ENTRYTYPE: inproceedings - ID: Torresen2012 - abstract: 'We present a new wireless transceiver board for the CUI32 sensor interface, - aimed at creating a solution that is flexible, reliable, and with little power - consumption. Communica-tion with the board is based on the ZigFlea protocol and - it has been evaluated on a CUI32 using the StickOS oper-ating system. Experiments - show that the total sensor data collection time is linearly increasing with the - number of sensor samples used. A data rate of 0.8 kbit/s is achieved for wirelessly - transmitting three axes of a 3D accelerometer. Although this data rate is low - compared to other systems, our solution benefits from ease-of-use and stability, - and is useful for applications that are not time-critical.' - address: 'Ann Arbor, Michigan' - author: 'Jim Torresen and Hauback, Øyvind N. and Dan Overholt and Jensenius, Alexander - Refsum' - bibtex: "@inproceedings{Torresen2012,\n abstract = {We present a new wireless transceiver\ - \ board for the CUI32 sensor interface, aimed at creating a solution that is flexible,\ - \ reliable, and with little power consumption. Communica-tion with the board is\ - \ based on the ZigFlea protocol and it has been evaluated on a CUI32 using the\ - \ StickOS oper-ating system. Experiments show that the total sensor data collection\ - \ time is linearly increasing with the number of sensor samples used. A data rate\ - \ of 0.8 kbit/s is achieved for wirelessly transmitting three axes of a 3D accelerometer.\ - \ Although this data rate is low compared to other systems, our solution benefits\ - \ from ease-of-use and stability, and is useful for applications that are not\ - \ time-critical.},\n address = {Ann Arbor, Michigan},\n author = {Jim Torresen\ - \ and Hauback, Øyvind N. and Dan Overholt and Jensenius, Alexander Refsum},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178433},\n issn = {2220-4806},\n\ - \ keywords = {wireless sensing, CUI32, StickOS, ZigBee, ZigFlea},\n publisher\ - \ = {University of Michigan},\n title = {Development and Evaluation of a ZigFlea-based\ - \ Wireless Transceiver Board for CUI32},\n url = {http://www.nime.org/proceedings/2012/nime2012_205.pdf},\n\ - \ year = {2012}\n}\n" + ID: mkrzyzaniak2015 + abstract: 'Herein is presented a method of classifying hand-drum strokes in real-time + by analyzing 50 milliseconds of audio signal as recorded by a contact-mic affixed + to the body of the instrument. The classifier performs with an average accuracy + of about 95% across several experiments on archetypical strokes, and 89% on uncontrived + playing. A complete ANSI C implementation for OSX and Linux is available on the + author''s website.' + address: 'Baton Rouge, Louisiana, USA' + author: Michael Krzyzaniak and Garth Paine + bibtex: "@inproceedings{mkrzyzaniak2015,\n abstract = {Herein is presented a method\ + \ of classifying hand-drum strokes in real-time by analyzing 50 milliseconds of\ + \ audio signal as recorded by a contact-mic affixed to the body of the instrument.\ + \ The classifier performs with an average accuracy of about 95% across several\ + \ experiments on archetypical strokes, and 89% on uncontrived playing. A complete\ + \ ANSI C implementation for OSX and Linux is available on the author's website.},\n\ + \ address = {Baton Rouge, Louisiana, USA},\n author = {Michael Krzyzaniak and\ + \ Garth Paine},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179112},\n\ + \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ + \ {May},\n pages = {400--403},\n publisher = {Louisiana State University},\n title\ + \ = {Realtime Classification of Hand-Drum Strokes},\n url = {http://www.nime.org/proceedings/2015/nime2015_147.pdf},\n\ + \ year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178433 + doi: 10.5281/zenodo.1179112 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'wireless sensing, CUI32, StickOS, ZigBee, ZigFlea' - publisher: University of Michigan - title: Development and Evaluation of a ZigFlea-based Wireless Transceiver Board - for CUI32 - url: http://www.nime.org/proceedings/2012/nime2012_205.pdf - year: 2012 + month: May + pages: 400--403 + publisher: Louisiana State University + title: Realtime Classification of Hand-Drum Strokes + url: http://www.nime.org/proceedings/2015/nime2015_147.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Makelberge2012 - abstract: '''''Perfect Take'''' is a public installation out of networked acoustic - instruments that let composers from all over the world exhibit their MIDI-works - by means of the Internet. The primary aim of this system is to offer composers - a way to have works exhibited and recorded in venues and with technologies not - accessible to him/her under normal circumstances. The Secondary aim of this research - is to highlight experience design as a complement to interaction design, and a - shift of focus from functionality of a specific gestural controller, towards the - environments, events and processes that they are part of.' - address: 'Ann Arbor, Michigan' - author: Nicolas Makelberge and Álvaro Barbosa and André Perrotta and Luís Sarmento - Ferreira - bibtex: "@inproceedings{Makelberge2012,\n abstract = {''Perfect Take'' is a public\ - \ installation out of networked acoustic instruments that let composers from all\ - \ over the world exhibit their MIDI-works by means of the Internet. The primary\ - \ aim of this system is to offer composers a way to have works exhibited and recorded\ - \ in venues and with technologies not accessible to him/her under normal circumstances.\ - \ The Secondary aim of this research is to highlight experience design as a complement\ - \ to interaction design, and a shift of focus from functionality of a specific\ - \ gestural controller, towards the environments, events and processes that they\ - \ are part of.},\n address = {Ann Arbor, Michigan},\n author = {Nicolas Makelberge\ - \ and {\\'A}lvaro Barbosa and Andr{\\'e} Perrotta and Lu{\\'i}s Sarmento Ferreira},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178339},\n issn = {2220-4806},\n\ - \ keywords = {NIME, Networked Music, MIDI, Disklavier, music collaboration, creativity},\n\ - \ publisher = {University of Michigan},\n title = {Perfect Take: Experience design\ - \ and new interfaces for musical expression},\n url = {http://www.nime.org/proceedings/2012/nime2012_208.pdf},\n\ - \ year = {2012}\n}\n" + ID: rvanrooyen2015 + abstract: 'Comparative studies require a baseline reference and a documented process + to capture new subject data. This paper combined with its principal reference + [1] presents a definitive dataset in the context of snare drum performances along + with a procedure for data acquisition, and a methodology for quantitative analysis. + The dataset contains video, audio, and discrete two dimensional motion data for + forty standardized percussive rudiments.' + address: 'Baton Rouge, Louisiana, USA' + author: Robert Van Rooyen and Andrew Schloss and George Tzanetakis + bibtex: "@inproceedings{rvanrooyen2015,\n abstract = {Comparative studies require\ + \ a baseline reference and a documented process to capture new subject data. This\ + \ paper combined with its principal reference [1] presents a definitive dataset\ + \ in the context of snare drum performances along with a procedure for data acquisition,\ + \ and a methodology for quantitative analysis. The dataset contains video, audio,\ + \ and discrete two dimensional motion data for forty standardized percussive rudiments.},\n\ + \ address = {Baton Rouge, Louisiana, USA},\n author = {Robert Van Rooyen and Andrew\ + \ Schloss and George Tzanetakis},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179168},\n\ + \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ + \ {May},\n pages = {329--330},\n publisher = {Louisiana State University},\n title\ + \ = {Snare Drum Motion Capture Dataset},\n url = {http://www.nime.org/proceedings/2015/nime2015_148.pdf},\n\ + \ urlsuppl1 = {http://www.nime.org/proceedings/2015/148/0148-file1.mp4},\n year\ + \ = {2015}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1179168 + editor: Edgar Berdahl and Jesse Allison + issn: 2220-4806 + month: May + pages: 329--330 + publisher: Louisiana State University + title: Snare Drum Motion Capture Dataset + url: http://www.nime.org/proceedings/2015/nime2015_148.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/148/0148-file1.mp4 + year: 2015 + + +- ENTRYTYPE: inproceedings + ID: rbhandari2015 + abstract: 'Biofeedback tools generally use visualizations to display physiological + information to the user. As such, these tools are incompatible with visually demanding + tasks such as driving. While auditory or haptic biofeedback may be used in these + cases, the additional sensory channels can increase workload or act as a nuisance + to the user. A number of studies, however, have shown that music can improve mood + and concentration, while also reduce aggression and boredom. Here, we propose + an intervention that combines the benefits of biofeedback and music to help users + regulate their stress response while performing a visual task (driving a car simulator). + Our approach encourages slow breathing by adjusting the quality of the music in + response to the user''s breathing rate. We evaluate the intervention on a 2×2 + design with music and auditory biofeedback as independent variables. Our results + indicate that our music-biofeedback intervention leads to lower arousal (reduced + electrodermal activity and increased heart rate variability) than music alone, + auditory biofeedback alone and a control condition. ' + address: 'Baton Rouge, Louisiana, USA' + author: Rhushabh Bhandari and Avinash Parnandi and Eva Shipp and Beena Ahmed and + Ricardo Gutierrez-Osuna + bibtex: "@inproceedings{rbhandari2015,\n abstract = {Biofeedback tools generally\ + \ use visualizations to display physiological information to the user. As such,\ + \ these tools are incompatible with visually demanding tasks such as driving.\ + \ While auditory or haptic biofeedback may be used in these cases, the additional\ + \ sensory channels can increase workload or act as a nuisance to the user. A number\ + \ of studies, however, have shown that music can improve mood and concentration,\ + \ while also reduce aggression and boredom. Here, we propose an intervention that\ + \ combines the benefits of biofeedback and music to help users regulate their\ + \ stress response while performing a visual task (driving a car simulator). Our\ + \ approach encourages slow breathing by adjusting the quality of the music in\ + \ response to the user's breathing rate. We evaluate the intervention on a 2$\\\ + times$2 design with music and auditory biofeedback as independent variables. Our\ + \ results indicate that our music-biofeedback intervention leads to lower arousal\ + \ (reduced electrodermal activity and increased heart rate variability) than music\ + \ alone, auditory biofeedback alone and a control condition. },\n address = {Baton\ + \ Rouge, Louisiana, USA},\n author = {Rhushabh Bhandari and Avinash Parnandi and\ + \ Eva Shipp and Beena Ahmed and Ricardo Gutierrez-Osuna},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1179030},\n editor = {Edgar Berdahl and Jesse Allison},\n\ + \ issn = {2220-4806},\n month = {May},\n pages = {78--82},\n publisher = {Louisiana\ + \ State University},\n title = {Music-based respiratory biofeedback in visually-demanding\ + \ tasks},\n url = {http://www.nime.org/proceedings/2015/nime2015_149.pdf},\n urlsuppl1\ + \ = {http://www.nime.org/proceedings/2015/149/0149-file1.mp4},\n year = {2015}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178339 + doi: 10.5281/zenodo.1179030 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'NIME, Networked Music, MIDI, Disklavier, music collaboration, creativity' - publisher: University of Michigan - title: 'Perfect Take: Experience design and new interfaces for musical expression' - url: http://www.nime.org/proceedings/2012/nime2012_208.pdf - year: 2012 + month: May + pages: 78--82 + publisher: Louisiana State University + title: Music-based respiratory biofeedback in visually-demanding tasks + url: http://www.nime.org/proceedings/2015/nime2015_149.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/149/0149-file1.mp4 + year: 2015 - ENTRYTYPE: inproceedings - ID: Han2012a - abstract: 'FutureGrab is a new wearable musical instrument for live performance - that is highly intuitive while still generating an interesting sound by subtractive - synthesis. Its sound effects resemble the human vowel pronunciation, which were - mapped to hand gestures that are similar to the mouth shape of human to pronounce - corresponding vowel. FutureGrab also provides all necessary features for a lead - musical instrument such as pitch control, trigger, glissando and key adjustment. - In addition, pitch indicator was added to give visual feedback to the performer, - which can reduce the mistakes during live performances. This paper describes the - motivation, system design, mapping strategy and implementation of FutureGrab, - and evaluates the overall experience.' - address: 'Ann Arbor, Michigan' - author: Yoonchang Han and Jinsoo Na and Kyogu Lee - bibtex: "@inproceedings{Han2012a,\n abstract = {FutureGrab is a new wearable musical\ - \ instrument for live performance that is highly intuitive while still generating\ - \ an interesting sound by subtractive synthesis. Its sound effects resemble the\ - \ human vowel pronunciation, which were mapped to hand gestures that are similar\ - \ to the mouth shape of human to pronounce corresponding vowel. FutureGrab also\ - \ provides all necessary features for a lead musical instrument such as pitch\ - \ control, trigger, glissando and key adjustment. In addition, pitch indicator\ - \ was added to give visual feedback to the performer, which can reduce the mistakes\ - \ during live performances. This paper describes the motivation, system design,\ - \ mapping strategy and implementation of FutureGrab, and evaluates the overall\ - \ experience.},\n address = {Ann Arbor, Michigan},\n author = {Yoonchang Han and\ - \ Jinsoo Na and Kyogu Lee},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178271},\n\ - \ issn = {2220-4806},\n keywords = {Wearable musical instrument, Pure Data, gestural\ - \ synthesis, formant synthesis, data-glove, visual feedback, subtractive synthesis},\n\ - \ publisher = {University of Michigan},\n title = {FutureGrab: A wearable subtractive\ - \ synthesizer using hand gesture},\n url = {http://www.nime.org/proceedings/2012/nime2012_209.pdf},\n\ - \ year = {2012}\n}\n" + ID: mmyllykoski2015 + abstract: 'This paper discusses perspectives for conceptualizing and developing + hand-based wearable musical interface. Previous implementations of such interfaces + have not been targeted for music pedagogical use. We propose principles for pedagogically + oriented `musical hand'' and outline its development through the process of prototyping, + which involves a variety of methods. The current functional prototype, a touch-based + musical glove, is presented. ' + address: 'Baton Rouge, Louisiana, USA' + author: Mikko Myllykoski and Kai Tuuri and Esa Viirret and Jukka Louhivuori and + Antti Peltomaa and Janne Kekäläinen + bibtex: "@inproceedings{mmyllykoski2015,\n abstract = {This paper discusses perspectives\ + \ for conceptualizing and developing hand-based wearable musical interface. Previous\ + \ implementations of such interfaces have not been targeted for music pedagogical\ + \ use. We propose principles for pedagogically oriented `musical hand' and outline\ + \ its development through the process of prototyping, which involves a variety\ + \ of methods. The current functional prototype, a touch-based musical glove, is\ + \ presented. },\n address = {Baton Rouge, Louisiana, USA},\n author = {Mikko Myllykoski\ + \ and Kai Tuuri and Esa Viirret and Jukka Louhivuori and Antti Peltomaa and Janne\ + \ Kek\\''al\\''ainen},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179144},\n\ + \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ + \ {May},\n pages = {182--183},\n publisher = {Louisiana State University},\n title\ + \ = {Prototyping hand-based wearable music education technology},\n url = {http://www.nime.org/proceedings/2015/nime2015_151.pdf},\n\ + \ urlsuppl1 = {http://www.nime.org/proceedings/2015/151/0151-file1.m4v},\n year\ + \ = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178271 + doi: 10.5281/zenodo.1179144 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Wearable musical instrument, Pure Data, gestural synthesis, formant synthesis, - data-glove, visual feedback, subtractive synthesis' - publisher: University of Michigan - title: 'FutureGrab: A wearable subtractive synthesizer using hand gesture' - url: http://www.nime.org/proceedings/2012/nime2012_209.pdf - year: 2012 + month: May + pages: 182--183 + publisher: Louisiana State University + title: Prototyping hand-based wearable music education technology + url: http://www.nime.org/proceedings/2015/nime2015_151.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/151/0151-file1.m4v + year: 2015 - ENTRYTYPE: inproceedings - ID: Wierenga2012 - abstract: 'In an attempt to utilize the expert pianist''s technique and spare bandwidth, - a new keyboard-based instrument augmented by sensors suggested by the examination - of existing acoustic instruments is introduced. The complete instrument includes - a keyboard, various pedals and knee levers, several bowing controllers, and breath - and embouchure sensors connected to an Arduino microcontroller that sends sensor - data to a laptop running Max/MSP, where custom software maps the data to synthesis - algorithms. The audio is output to a digital amplifier powering a transducer mounted - on a resonator box to which several of the sensors are attached. Careful sensor - selection and mapping help to facilitate performance mode.' - address: 'Ann Arbor, Michigan' - author: Red Wierenga - bibtex: "@inproceedings{Wierenga2012,\n abstract = {In an attempt to utilize the\ - \ expert pianist's technique and spare bandwidth, a new keyboard-based instrument\ - \ augmented by sensors suggested by the examination of existing acoustic instruments\ - \ is introduced. The complete instrument includes a keyboard, various pedals and\ - \ knee levers, several bowing controllers, and breath and embouchure sensors connected\ - \ to an Arduino microcontroller that sends sensor data to a laptop running Max/MSP,\ - \ where custom software maps the data to synthesis algorithms. The audio is output\ - \ to a digital amplifier powering a transducer mounted on a resonator box to which\ - \ several of the sensors are attached. Careful sensor selection and mapping help\ - \ to facilitate performance mode.},\n address = {Ann Arbor, Michigan},\n author\ - \ = {Red Wierenga},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178451},\n\ - \ issn = {2220-4806},\n keywords = {Gesture, controllers, Digital Musical Instrument,\ - \ keyboard},\n publisher = {University of Michigan},\n title = {A New Keyboard-Based,\ - \ Sensor-Augmented Instrument For Live Performance},\n url = {http://www.nime.org/proceedings/2012/nime2012_211.pdf},\n\ - \ year = {2012}\n}\n" + ID: jharrimanb2015 + abstract: 'The Feedback Lap Steel is an actuated instrument which makes use of mechanical + vibration of the instruments bridge to excite the strings. A custom bridge mounted + directly to a tactile transducer enables the strings to be driven with any audio + signal from a standard audio amplifier. The instrument can be played as a traditional + lap steel guitar without any changes to playing technique as well as be used to + create new sounds which blur the line between acoustic and electronic through + a combination of acoustic and computer generated and controlled sounds. This introduces + a new approach to string actuation using commonly available parts. This demonstration + paper details the construction, uses and lessons learned in the making of the + Feedback Lap Steel guitar.' + address: 'Baton Rouge, Louisiana, USA' + author: Jiffer Harriman + bibtex: "@inproceedings{jharrimanb2015,\n abstract = {The Feedback Lap Steel is\ + \ an actuated instrument which makes use of mechanical vibration of the instruments\ + \ bridge to excite the strings. A custom bridge mounted directly to a tactile\ + \ transducer enables the strings to be driven with any audio signal from a standard\ + \ audio amplifier. The instrument can be played as a traditional lap steel guitar\ + \ without any changes to playing technique as well as be used to create new sounds\ + \ which blur the line between acoustic and electronic through a combination of\ + \ acoustic and computer generated and controlled sounds. This introduces a new\ + \ approach to string actuation using commonly available parts. This demonstration\ + \ paper details the construction, uses and lessons learned in the making of the\ + \ Feedback Lap Steel guitar.},\n address = {Baton Rouge, Louisiana, USA},\n author\ + \ = {Jiffer Harriman},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179076},\n\ + \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ + \ {May},\n pages = {178--179},\n publisher = {Louisiana State University},\n title\ + \ = {Feedback Lapsteel : Exploring Tactile Transducers As String Actuators},\n\ + \ url = {http://www.nime.org/proceedings/2015/nime2015_152.pdf},\n urlsuppl1 =\ + \ {http://www.nime.org/proceedings/2015/152/0152-file1.mp4},\n year = {2015}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178451 + doi: 10.5281/zenodo.1179076 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Gesture, controllers, Digital Musical Instrument, keyboard' - publisher: University of Michigan - title: 'A New Keyboard-Based, Sensor-Augmented Instrument For Live Performance' - url: http://www.nime.org/proceedings/2012/nime2012_211.pdf - year: 2012 + month: May + pages: 178--179 + publisher: Louisiana State University + title: 'Feedback Lapsteel : Exploring Tactile Transducers As String Actuators' + url: http://www.nime.org/proceedings/2015/nime2015_152.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/152/0152-file1.mp4 + year: 2015 - ENTRYTYPE: inproceedings - ID: Savary2012 - abstract: 'Dirty Tangible Interfaces (DIRTI) are a new concept in interface design - that forgoes the dogma of repeatability in favor of a richer and more complex - experience, constantly evolving, never reversible, and infinitely modifiable. - We built a prototype based on granular or liquid interaction material placed in - a glass dish, that is analyzed by video tracking for its 3D relief. This relief, - and the dynamic changes applied to it by the user, are interpreted as activation - profiles to drive corpus-based concatenative sound synthesis, allowing one or - more players to mold sonic landscapes and to plow through them in an inherently - collaborative, expressive, and dynamic experience.' - address: 'Ann Arbor, Michigan' - author: Matthieu Savary and Diemo Schwarz and Denis Pellerin - bibtex: "@inproceedings{Savary2012,\n abstract = {Dirty Tangible Interfaces (DIRTI)\ - \ are a new concept in interface design that forgoes the dogma of repeatability\ - \ in favor of a richer and more complex experience, constantly evolving, never\ - \ reversible, and infinitely modifiable. We built a prototype based on granular\ - \ or liquid interaction material placed in a glass dish, that is analyzed by video\ - \ tracking for its 3D relief. This relief, and the dynamic changes applied to\ - \ it by the user, are interpreted as activation profiles to drive corpus-based\ - \ concatenative sound synthesis, allowing one or more players to mold sonic landscapes\ - \ and to plow through them in an inherently collaborative, expressive, and dynamic\ - \ experience.},\n address = {Ann Arbor, Michigan},\n author = {Matthieu Savary\ - \ and Diemo Schwarz and Denis Pellerin},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1180585},\n\ - \ issn = {2220-4806},\n keywords = {Tangible interface, Corpus-based concatenative\ - \ synthesis, Non-standard interaction},\n publisher = {University of Michigan},\n\ - \ title = {DIRTI ---Dirty Tangible Interfaces},\n url = {http://www.nime.org/proceedings/2012/nime2012_212.pdf},\n\ - \ year = {2012}\n}\n" + ID: rmichon2015 + abstract: 'This work presents a series of tools to turn Faust code into various + elements ranging from fully functional applications to multi-platform libraries + for real time audio signal processing on iOS and Android. Technical details about + their use and function are provided along with audio latency and performance comparisons, + and examples of applications.' + address: 'Baton Rouge, Louisiana, USA' + author: Romain Michon and Julius Orion Smith III and Yann Orlarey + bibtex: "@inproceedings{rmichon2015,\n abstract = {This work presents a series of\ + \ tools to turn Faust code into various elements ranging from fully functional\ + \ applications to multi-platform libraries for real time audio signal processing\ + \ on iOS and Android. Technical details about their use and function are provided\ + \ along with audio latency and performance comparisons, and examples of applications.},\n\ + \ address = {Baton Rouge, Louisiana, USA},\n author = {Romain Michon and {Julius\ + \ Orion} {Smith III} and Yann Orlarey},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179140},\n\ + \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ + \ {May},\n pages = {396--399},\n publisher = {Louisiana State University},\n title\ + \ = {MobileFaust: a Set of Tools to Make Musical Mobile Applications with the\ + \ Faust Programming Language},\n url = {http://www.nime.org/proceedings/2015/nime2015_153.pdf},\n\ + \ year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1180585 + doi: 10.5281/zenodo.1179140 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Tangible interface, Corpus-based concatenative synthesis, Non-standard - interaction' - publisher: University of Michigan - title: DIRTI ---Dirty Tangible Interfaces - url: http://www.nime.org/proceedings/2012/nime2012_212.pdf - year: 2012 + month: May + pages: 396--399 + publisher: Louisiana State University + title: 'MobileFaust: a Set of Tools to Make Musical Mobile Applications with the + Faust Programming Language' + url: http://www.nime.org/proceedings/2015/nime2015_153.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Reboursiere2012 - abstract: 'In this paper we present a series of algorithms developed to detect the - following guitar playing techniques : bend, hammer-on, pull-off, slide, palm muting - and harmonic. Detection of playing techniques can be used to control exter-nal - content (i.e audio loops and effects, videos, light events, etc.), as well as - to write real-time score or to assist guitar novices in their learning process. - The guitar used is a Godin Multiac with an under-saddle RMC hexaphonic piezo pickup - (one pickup per string, i.e six mono signals).' - address: 'Ann Arbor, Michigan' - author: Loïc Reboursière and Otso Lähdeoja and Thomas Drugman and Stéphane Dupont - and Cécile Picard-Limpens and Nicolas Riche - bibtex: "@inproceedings{Reboursiere2012,\n abstract = {In this paper we present\ - \ a series of algorithms developed to detect the following guitar playing techniques\ - \ : bend, hammer-on, pull-off, slide, palm muting and harmonic. Detection of playing\ - \ techniques can be used to control exter-nal content (i.e audio loops and effects,\ - \ videos, light events, etc.), as well as to write real-time score or to assist\ - \ guitar novices in their learning process. The guitar used is a Godin Multiac\ - \ with an under-saddle RMC hexaphonic piezo pickup (one pickup per string, i.e\ - \ six mono signals).},\n address = {Ann Arbor, Michigan},\n author = {Lo{\\\"\ - i}c Reboursi{\\`e}re and Otso L{\\\"a}hdeoja and Thomas Drugman and St{\\'e}phane\ - \ Dupont and C{\\'e}cile Picard-Limpens and Nicolas Riche},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1180575},\n issn = {2220-4806},\n keywords = {Guitar audio\ - \ analysis, playing techniques, hexaphonic pickup, controller, augmented guitar},\n\ - \ publisher = {University of Michigan},\n title = {Left and right-hand guitar\ - \ playing techniques detection},\n url = {http://www.nime.org/proceedings/2012/nime2012_213.pdf},\n\ - \ year = {2012}\n}\n" + ID: amercertaylor2015 + abstract: 'On a traditional keyboard, the actions required to play a consonant chord + progression must be quite precise; accidentally strike a neighboring key, and + a pleasant sonority is likely to become a jarring one. Inspired by the Tonnetz + (a tonal diagram), we present a new layout of pitches defined using low-level + harmonic notions. We demonstrate the potential of our system by mapping the random + movements of fish in an aquarium to this layout. Qualitatively, we find that this + captures the intuition behind mapping motion to sound. Similarly moving fish produce + consonant chords, while fish moving in non-unison produce dissonant chords. We + introduce an open source MATLAB library implementing these techniques, which can + be used for sonifying multimodal streaming data. ' + address: 'Baton Rouge, Louisiana, USA' + author: Andrew Mercer-Taylor and Jaan Altosaar + bibtex: "@inproceedings{amercertaylor2015,\n abstract = {On a traditional keyboard,\ + \ the actions required to play a consonant chord progression must be quite precise;\ + \ accidentally strike a neighboring key, and a pleasant sonority is likely to\ + \ become a jarring one. Inspired by the Tonnetz (a tonal diagram), we present\ + \ a new layout of pitches defined using low-level harmonic notions. We demonstrate\ + \ the potential of our system by mapping the random movements of fish in an aquarium\ + \ to this layout. Qualitatively, we find that this captures the intuition behind\ + \ mapping motion to sound. Similarly moving fish produce consonant chords, while\ + \ fish moving in non-unison produce dissonant chords. We introduce an open source\ + \ MATLAB library implementing these techniques, which can be used for sonifying\ + \ multimodal streaming data. },\n address = {Baton Rouge, Louisiana, USA},\n author\ + \ = {Andrew Mercer-Taylor and Jaan Altosaar},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1179138},\n editor = {Edgar Berdahl and Jesse Allison},\n issn\ + \ = {2220-4806},\n month = {May},\n pages = {28--29},\n publisher = {Louisiana\ + \ State University},\n title = {Sonification of Fish Movement Using Pitch Mesh\ + \ Pairs},\n url = {http://www.nime.org/proceedings/2015/nime2015_155.pdf},\n urlsuppl1\ + \ = {http://www.nime.org/proceedings/2015/155/0155-file1.mp4},\n year = {2015}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1180575 + doi: 10.5281/zenodo.1179138 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Guitar audio analysis, playing techniques, hexaphonic pickup, controller, - augmented guitar' - publisher: University of Michigan - title: Left and right-hand guitar playing techniques detection - url: http://www.nime.org/proceedings/2012/nime2012_213.pdf - year: 2012 + month: May + pages: 28--29 + publisher: Louisiana State University + title: Sonification of Fish Movement Using Pitch Mesh Pairs + url: http://www.nime.org/proceedings/2015/nime2015_155.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/155/0155-file1.mp4 + year: 2015 - ENTRYTYPE: inproceedings - ID: Choi2012 - abstract: 'The Deckle Group1 is an ensemble that designs, builds and performs on - electroacoustic drawing boards. These draw-ing surfaces are augmented with Satellite - CCRMA Beagle-Boards and Arduinos2.[1] Piezo microphones are used in conjunction - with other sensors to produce sounds that are coupled tightly to mark-making gestures. - Position tracking is achieved with infra-red object tracking, conductive fabric - and a magnetometer.' - address: 'Ann Arbor, Michigan' - author: Hongchan Choi and John Granzow and Joel Sadler - bibtex: "@inproceedings{Choi2012,\n abstract = {The Deckle Group1 is an ensemble\ - \ that designs, builds and performs on electroacoustic drawing boards. These draw-ing\ - \ surfaces are augmented with Satellite CCRMA Beagle-Boards and Arduinos2.[1]\ - \ Piezo microphones are used in conjunction with other sensors to produce sounds\ - \ that are coupled tightly to mark-making gestures. Position tracking is achieved\ - \ with infra-red object tracking, conductive fabric and a magnetometer.},\n address\ - \ = {Ann Arbor, Michigan},\n author = {Hongchan Choi and John Granzow and Joel\ - \ Sadler},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1178235},\n issn = {2220-4806},\n\ - \ keywords = {Deckle, BeagleBoard, Drawing, Sonification, Performance, Audiovisual,\ - \ Gestural Interface},\n publisher = {University of Michigan},\n title = {The\ - \ Deckle Project : A Sketch of Three Sensors},\n url = {http://www.nime.org/proceedings/2012/nime2012_214.pdf},\n\ - \ year = {2012}\n}\n" + ID: klin2015 + abstract: 'Many new melodic instruments use a touch sensitive surface with notes + arranged in a two-dimensional grid. Most of these arrange notes in chromatic half-steps + along the horizontal axis and in intervals of fourths along the vertical axis. + Although many alternatives exist, this arrangement, which resembles that of a + bass guitar, is quickly becoming the de facto standard. In this study we present + experimental evidence that grid based instruments are significantly easier to + play when we tune adjacent rows in Major thirds rather than fourths. We have developed + a grid-based instrument as an iPad app that has sold 8,000 units since 2012. To + test our proposed alternative tuning, we taught a group twenty new users to play + basic chords on our app, using both the standard tuning and our proposed alternative. + Our results show that the Major thirds tuning is much easier to learn, even for + users that have previous experience playing guitar.' + address: 'Baton Rouge, Louisiana, USA' + author: Hans Anderson and Kin Wah Edward Lin and Natalie Agus and Simon Lui + bibtex: "@inproceedings{klin2015,\n abstract = {Many new melodic instruments use\ + \ a touch sensitive surface with notes arranged in a two-dimensional grid. Most\ + \ of these arrange notes in chromatic half-steps along the horizontal axis and\ + \ in intervals of fourths along the vertical axis. Although many alternatives\ + \ exist, this arrangement, which resembles that of a bass guitar, is quickly becoming\ + \ the de facto standard. In this study we present experimental evidence that grid\ + \ based instruments are significantly easier to play when we tune adjacent rows\ + \ in Major thirds rather than fourths. We have developed a grid-based instrument\ + \ as an iPad app that has sold 8,000 units since 2012. To test our proposed alternative\ + \ tuning, we taught a group twenty new users to play basic chords on our app,\ + \ using both the standard tuning and our proposed alternative. Our results show\ + \ that the Major thirds tuning is much easier to learn, even for users that have\ + \ previous experience playing guitar.},\n address = {Baton Rouge, Louisiana, USA},\n\ + \ author = {Hans Anderson and Kin Wah Edward Lin and Natalie Agus and Simon Lui},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1179006},\n editor = {Edgar Berdahl\ + \ and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {365--368},\n\ + \ publisher = {Louisiana State University},\n title = {Major Thirds: A Better\ + \ Way to Tune Your iPad},\n url = {http://www.nime.org/proceedings/2015/nime2015_157.pdf},\n\ + \ year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178235 + doi: 10.5281/zenodo.1179006 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Deckle, BeagleBoard, Drawing, Sonification, Performance, Audiovisual, - Gestural Interface' - publisher: University of Michigan - title: 'The Deckle Project : A Sketch of Three Sensors' - url: http://www.nime.org/proceedings/2012/nime2012_214.pdf - year: 2012 + month: May + pages: 365--368 + publisher: Louisiana State University + title: 'Major Thirds: A Better Way to Tune Your iPad' + url: http://www.nime.org/proceedings/2015/nime2015_157.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Vamvakousis2012 - abstract: 'In this paper we describe the EyeHarp, a new gaze-controlled musical - instrument, and the new features we recently added to its design. In particular, - we report on the EyeHarp new controls, the arpeggiator, the new remote eye-tracking - device, and the EyeHarp capacity to act as a MIDI controller for any VST plugin - virtual instrument. We conducted an evaluation of the EyeHarp Temporal accuracy - by monitor-ing 10 users while performing a melody task, and comparing their gaze - control accuracy with their accuracy using a com-puter keyboard. We report on - the results of the evaluation.' - address: 'Ann Arbor, Michigan' - author: Zacharias Vamvakousis and Rafael Ramirez - bibtex: "@inproceedings{Vamvakousis2012,\n abstract = {In this paper we describe\ - \ the EyeHarp, a new gaze-controlled musical instrument, and the new features\ - \ we recently added to its design. In particular, we report on the EyeHarp new\ - \ controls, the arpeggiator, the new remote eye-tracking device, and the EyeHarp\ - \ capacity to act as a MIDI controller for any VST plugin virtual instrument.\ - \ We conducted an evaluation of the EyeHarp Temporal accuracy by monitor-ing 10\ - \ users while performing a melody task, and comparing their gaze control accuracy\ - \ with their accuracy using a com-puter keyboard. We report on the results of\ - \ the evaluation.},\n address = {Ann Arbor, Michigan},\n author = {Zacharias Vamvakousis\ - \ and Rafael Ramirez},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178443},\n\ - \ issn = {2220-4806},\n keywords = {Eye-tracking systems, music interfaces, gaze\ - \ interaction},\n publisher = {University of Michigan},\n title = {Temporal Control\ - \ In the EyeHarp Gaze-Controlled Musical Interface},\n url = {http://www.nime.org/proceedings/2012/nime2012_215.pdf},\n\ - \ year = {2012}\n}\n" + ID: jaltosaar2015 + abstract: 'Much of the challenge and appeal in remixing music comes from manipulating + samples. Typically, identifying distinct samples of a song requires expertise + in music production software. Additionally, it is difficult to visualize similarities + and differences between all samples of a song simultaneously and use this to select + samples. MusicMapper is a web application that allows nonexpert users to find + and visualize distinctive samples from a song without any manual intervention, + and enables remixing by having users play back clusterings of such samples. This + is accomplished by splitting audio from the Soundcloud API into appropriately-sized + spectrograms, and applying the t-SNE algorithm to visualize these spectrograms + in two dimensions. Next, we apply k-means to guide the user''s eye toward related + clusters and set $k=26$ to enable playback of the clusters by pressing keys on + an ordinary keyboard. We present the source code (https://github.com/fatsmcgee/MusicMappr) + and a demo video (http://youtu.be/mvD6e1uiO8k) of the MusicMapper web application + that can be run in most modern browsers.' + address: 'Baton Rouge, Louisiana, USA' + author: Ethan Benjamin and Jaan Altosaar + bibtex: "@inproceedings{jaltosaar2015,\n abstract = {Much of the challenge and appeal\ + \ in remixing music comes from manipulating samples. Typically, identifying distinct\ + \ samples of a song requires expertise in music production software. Additionally,\ + \ it is difficult to visualize similarities and differences between all samples\ + \ of a song simultaneously and use this to select samples. MusicMapper is a web\ + \ application that allows nonexpert users to find and visualize distinctive samples\ + \ from a song without any manual intervention, and enables remixing by having\ + \ users play back clusterings of such samples. This is accomplished by splitting\ + \ audio from the Soundcloud API into appropriately-sized spectrograms, and applying\ + \ the t-SNE algorithm to visualize these spectrograms in two dimensions. Next,\ + \ we apply k-means to guide the user's eye toward related clusters and set $k=26$\ + \ to enable playback of the clusters by pressing keys on an ordinary keyboard.\ + \ We present the source code (https://github.com/fatsmcgee/MusicMappr) and a demo\ + \ video (http://youtu.be/mvD6e1uiO8k) of the MusicMapper web application that\ + \ can be run in most modern browsers.},\n address = {Baton Rouge, Louisiana, USA},\n\ + \ author = {Ethan Benjamin and Jaan Altosaar},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1179018},\n editor = {Edgar Berdahl and Jesse Allison},\n issn\ + \ = {2220-4806},\n month = {May},\n pages = {325--326},\n publisher = {Louisiana\ + \ State University},\n title = {MusicMapper: Interactive {2D} representations\ + \ of music samples for in-browser remixing and exploration},\n url = {http://www.nime.org/proceedings/2015/nime2015_161.pdf},\n\ + \ urlsuppl1 = {http://www.nime.org/proceedings/2015/161/0161-file1.mp4},\n year\ + \ = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178443 + doi: 10.5281/zenodo.1179018 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Eye-tracking systems, music interfaces, gaze interaction' - publisher: University of Michigan - title: Temporal Control In the EyeHarp Gaze-Controlled Musical Interface - url: http://www.nime.org/proceedings/2012/nime2012_215.pdf - year: 2012 + month: May + pages: 325--326 + publisher: Louisiana State University + title: 'MusicMapper: Interactive 2D representations of music samples for in-browser + remixing and exploration' + url: http://www.nime.org/proceedings/2015/nime2015_161.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/161/0161-file1.mp4 + year: 2015 - ENTRYTYPE: inproceedings - ID: Han2012 - abstract: 'Virtual Pottery is an interactive audiovisual piece that uses hand gesture - to create 3D pottery objects and sound shape. Using the OptiTrack motion capture - (Rigid Body) system at TransLab in UCSB, performers can take a glove with attached - trackers, move the hand in x, y, and z axis and create their own sound pieces. - Performers can also manipulate their pottery pieces in real time and change arrangement - on the musical score interface in order to create a continuous musical composition. - In this paper we address the relationship between body, sound and 3D shapes. We - also describe the origin of Virtual Pottery, its design process, discuss its aesthetic - value and musical sound synthesis system, and evaluate the overall experience.' - address: 'Ann Arbor, Michigan' - author: Yoon Chung Han and Byeong-jun Han - bibtex: "@inproceedings{Han2012,\n abstract = {Virtual Pottery is an interactive\ - \ audiovisual piece that uses hand gesture to create 3D pottery objects and sound\ - \ shape. Using the OptiTrack motion capture (Rigid Body) system at TransLab in\ - \ UCSB, performers can take a glove with attached trackers, move the hand in x,\ - \ y, and z axis and create their own sound pieces. Performers can also manipulate\ - \ their pottery pieces in real time and change arrangement on the musical score\ - \ interface in order to create a continuous musical composition. In this paper\ - \ we address the relationship between body, sound and 3D shapes. We also describe\ - \ the origin of Virtual Pottery, its design process, discuss its aesthetic value\ - \ and musical sound synthesis system, and evaluate the overall experience.},\n\ - \ address = {Ann Arbor, Michigan},\n author = {Yoon Chung Han and Byeong-jun Han},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178273},\n issn = {2220-4806},\n\ - \ keywords = {Virtual Pottery, virtual musical instrument, sound synthesis, motion\ - \ and gesture, pottery, motion perception, interactive sound installation.},\n\ - \ publisher = {University of Michigan},\n title = {Virtual Pottery: An Interactive\ - \ Audio-Visual Installation},\n url = {http://www.nime.org/proceedings/2012/nime2012_216.pdf},\n\ - \ year = {2012}\n}\n" + ID: jjaimovich2015 + abstract: 'Previously the design of algorithms and parameter calibration for biosignal + music performances has been based on testing with a small number of individuals + --- in fact usually the performer themselves. This paper uses the data collected + from over 4000 people to begin to create a truly robust set of algorithms for + heart rate and electrodermal activity measures, as well as the understanding of + how the calibration of these vary by individual.' + address: 'Baton Rouge, Louisiana, USA' + author: Javier Jaimovich and R. Benjamin Knapp + bibtex: "@inproceedings{jjaimovich2015,\n abstract = {Previously the design of algorithms\ + \ and parameter calibration for biosignal music performances has been based on\ + \ testing with a small number of individuals --- in fact usually the performer\ + \ themselves. This paper uses the data collected from over 4000 people to begin\ + \ to create a truly robust set of algorithms for heart rate and electrodermal\ + \ activity measures, as well as the understanding of how the calibration of these\ + \ vary by individual.},\n address = {Baton Rouge, Louisiana, USA},\n author =\ + \ {Javier Jaimovich and {R. Benjamin} Knapp},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1179096},\n editor = {Edgar Berdahl and Jesse Allison},\n issn\ + \ = {2220-4806},\n month = {May},\n pages = {1--4},\n publisher = {Louisiana State\ + \ University},\n title = {Creating Biosignal Algorithms for Musical Applications\ + \ from an Extensive Physiological Database},\n url = {http://www.nime.org/proceedings/2015/nime2015_163.pdf},\n\ + \ year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178273 + doi: 10.5281/zenodo.1179096 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Virtual Pottery, virtual musical instrument, sound synthesis, motion - and gesture, pottery, motion perception, interactive sound installation.' - publisher: University of Michigan - title: 'Virtual Pottery: An Interactive Audio-Visual Installation' - url: http://www.nime.org/proceedings/2012/nime2012_216.pdf - year: 2012 + month: May + pages: 1--4 + publisher: Louisiana State University + title: Creating Biosignal Algorithms for Musical Applications from an Extensive + Physiological Database + url: http://www.nime.org/proceedings/2015/nime2015_163.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Nash2012 - abstract: 'This paper presents concepts, models, and empirical findings relating - to liveness and flow in the user experience of systems mediated by notation. Results - from an extensive two-year field study of over 1,000 sequencer and tracker users, - combining interaction logging, user surveys, and a video study, are used to illustrate - the properties of notations and interfaces that facilitate greater immersion in - musical activities and domains, borrowing concepts from programming to illustrate - the role of visual and musical feedback, from the notation and domain respectively. - The Cognitive Dimensions of Notations framework and Csikszentmihalyi''s flow theory - are combined to demonstrate how non-realtime, notation-mediated interaction can - support focused, immersive, energetic, and intrinsically-rewarding musical experiences, - and to what extent they are supported in the interfaces of music production software. - Users are shown to maintain liveness through a rapid, iterative edit-audition - cycle that integrates audio and visual feedback.' - address: 'Ann Arbor, Michigan' - author: Chris Nash and Alan Blackwell - bibtex: "@inproceedings{Nash2012,\n abstract = {This paper presents concepts, models,\ - \ and empirical findings relating to liveness and flow in the user experience\ - \ of systems mediated by notation. Results from an extensive two-year field study\ - \ of over 1,000 sequencer and tracker users, combining interaction logging, user\ - \ surveys, and a video study, are used to illustrate the properties of notations\ - \ and interfaces that facilitate greater immersion in musical activities and domains,\ - \ borrowing concepts from programming to illustrate the role of visual and musical\ - \ feedback, from the notation and domain respectively. The Cognitive Dimensions\ - \ of Notations framework and Csikszentmihalyi's flow theory are combined to demonstrate\ - \ how non-realtime, notation-mediated interaction can support focused, immersive,\ - \ energetic, and intrinsically-rewarding musical experiences, and to what extent\ - \ they are supported in the interfaces of music production software. Users are\ - \ shown to maintain liveness through a rapid, iterative edit-audition cycle that\ - \ integrates audio and visual feedback.},\n address = {Ann Arbor, Michigan},\n\ - \ author = {Chris Nash and Alan Blackwell},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1180547},\n issn = {2220-4806},\n keywords = {notation, composition,\ - \ liveness, flow, feedback, sequencers, DAWs, soundtracking, performance, user\ - \ studies, programming},\n publisher = {University of Michigan},\n title = {Liveness\ - \ and Flow in Notation Use},\n url = {http://www.nime.org/proceedings/2012/nime2012_217.pdf},\n\ - \ year = {2012}\n}\n" + ID: bknichel2015 + abstract: 'Resonate was a musical installation created with focus on interactivity + and collaboration. In this paper we will focus on the design-process and the different + steps involved. We describe and discuss the methods to create, synchronize and + combine the aspects of space, object, music and interaction for the development + of resonate. The realized space-filling tangible installation allowed visitors + to interact with different interaction objects and change therefore the musical + expression as well as the visual response and aesthetic. After a non-formal quality + evaluation of this installation we changed some aspects which resulted in a more + refined version which we will also discuss here. ' + address: 'Baton Rouge, Louisiana, USA' + author: Benjamin Knichel and Holger Reckter and Peter Kiefer + bibtex: "@inproceedings{bknichel2015,\n abstract = {Resonate was a musical installation\ + \ created with focus on interactivity and collaboration. In this paper we will\ + \ focus on the design-process and the different steps involved. We describe and\ + \ discuss the methods to create, synchronize and combine the aspects of space,\ + \ object, music and interaction for the development of resonate. The realized\ + \ space-filling tangible installation allowed visitors to interact with different\ + \ interaction objects and change therefore the musical expression as well as the\ + \ visual response and aesthetic. After a non-formal quality evaluation of this\ + \ installation we changed some aspects which resulted in a more refined version\ + \ which we will also discuss here. },\n address = {Baton Rouge, Louisiana, USA},\n\ + \ author = {Benjamin Knichel and Holger Reckter and Peter Kiefer},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1179108},\n editor = {Edgar Berdahl and\ + \ Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {111--115},\n\ + \ publisher = {Louisiana State University},\n title = {resonate -- a social musical\ + \ installation which integrates tangible multiuser interaction},\n url = {http://www.nime.org/proceedings/2015/nime2015_164.pdf},\n\ + \ urlsuppl1 = {http://www.nime.org/proceedings/2015/164/0164-file1.mp4},\n year\ + \ = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1180547 + doi: 10.5281/zenodo.1179108 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'notation, composition, liveness, flow, feedback, sequencers, DAWs, soundtracking, - performance, user studies, programming' - publisher: University of Michigan - title: Liveness and Flow in Notation Use - url: http://www.nime.org/proceedings/2012/nime2012_217.pdf - year: 2012 + month: May + pages: 111--115 + publisher: Louisiana State University + title: resonate -- a social musical installation which integrates tangible multiuser + interaction + url: http://www.nime.org/proceedings/2015/nime2015_164.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/164/0164-file1.mp4 + year: 2015 - ENTRYTYPE: inproceedings - ID: Trail2012 - abstract: 'The Gyil is a pentatonic African wooden xylophone with 14-15 keys. The - work described in this paper has been motivated by three applications: computer - analysis of Gyil performance, live improvised electro-acoustic music incorporating - the Gyil, and hybrid sampling and physical mod-eling. In all three of these cases, - detailed information about what is played on the Gyil needs to be digitally captured - in real-time. We describe a direct sensing apparatus that can be used to achieve - this. It is based on contact microphones and is informed by the specific characteristics - of the Gyil. An alternative approach based on indirect acquisition is to apply - polyphonic transcription on the signal acquired by a microphone without requiring - the instrument to be modified. The direct sensing apparatus we have developed - can be used to acquire ground truth for evaluating different approaches to polyphonic - transcription and help create a ``surrogate'''' sensor. Some initial results comparing - different strategies to polyphonic transcription are presented.' - address: 'Ann Arbor, Michigan' - author: Shawn Trail and Tiago Fernandes Tavares and Dan Godlovitch and George Tzanetakis - bibtex: "@inproceedings{Trail2012,\n abstract = {The Gyil is a pentatonic African\ - \ wooden xylophone with 14-15 keys. The work described in this paper has been\ - \ motivated by three applications: computer analysis of Gyil performance, live\ - \ improvised electro-acoustic music incorporating the Gyil, and hybrid sampling\ - \ and physical mod-eling. In all three of these cases, detailed information about\ - \ what is played on the Gyil needs to be digitally captured in real-time. We describe\ - \ a direct sensing apparatus that can be used to achieve this. It is based on\ - \ contact microphones and is informed by the specific characteristics of the Gyil.\ - \ An alternative approach based on indirect acquisition is to apply polyphonic\ - \ transcription on the signal acquired by a microphone without requiring the instrument\ - \ to be modified. The direct sensing apparatus we have developed can be used to\ - \ acquire ground truth for evaluating different approaches to polyphonic transcription\ - \ and help create a ``surrogate'' sensor. Some initial results comparing different\ - \ strategies to polyphonic transcription are presented.},\n address = {Ann Arbor,\ - \ Michigan},\n author = {Shawn Trail and Tiago Fernandes Tavares and Dan Godlovitch\ - \ and George Tzanetakis},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178437},\n\ - \ issn = {2220-4806},\n keywords = {hyperinstruments, indirect acquisition, surrogate\ - \ sensors, computational ethnomusicology, physical modeling, perfor-mance analysis},\n\ - \ publisher = {University of Michigan},\n title = {Direct and surrogate sensing\ - \ for the Gyil african xylophone},\n url = {http://www.nime.org/proceedings/2012/nime2012_222.pdf},\n\ - \ year = {2012}\n}\n" + ID: gdublon2015 + abstract: 'The Electronic Fox Ears helmet is a listening device that changes its + wearer''s experience of hearing. A pair of head-mounted, independently articulated + parabolic microphones and built-in bone conduction transducers allow the wearer + to sharply direct their attention to faraway sound sources. Joysticks in each + hand control the orientations of the microphones, which are mounted on servo gimbals + for precise targeting. Paired with a mobile device, the helmet can function as + a specialized, wearable field recording platform. Field recording and ambient + sound have long been a part of electronic music; our device extends these practices + by drawing on a tradition of wearable technologies and prosthetic art that blur + the boundaries of human perception. ' + address: 'Baton Rouge, Louisiana, USA' + author: Rebecca Kleinberger and Gershon Dublon and Joseph A. Paradiso and Tod Machover + bibtex: "@inproceedings{gdublon2015,\n abstract = {The Electronic Fox Ears helmet\ + \ is a listening device that changes its wearer's experience of hearing. A pair\ + \ of head-mounted, independently articulated parabolic microphones and built-in\ + \ bone conduction transducers allow the wearer to sharply direct their attention\ + \ to faraway sound sources. Joysticks in each hand control the orientations of\ + \ the microphones, which are mounted on servo gimbals for precise targeting. Paired\ + \ with a mobile device, the helmet can function as a specialized, wearable field\ + \ recording platform. Field recording and ambient sound have long been a part\ + \ of electronic music; our device extends these practices by drawing on a tradition\ + \ of wearable technologies and prosthetic art that blur the boundaries of human\ + \ perception. },\n address = {Baton Rouge, Louisiana, USA},\n author = {Rebecca\ + \ Kleinberger and Gershon Dublon and {Joseph A.} Paradiso and Tod Machover},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1179106},\n editor = {Edgar Berdahl\ + \ and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {30--31},\n\ + \ publisher = {Louisiana State University},\n title = {PHOX Ears: A Parabolic,\ + \ Head-mounted, Orientable, eXtrasensory Listening Device},\n url = {http://www.nime.org/proceedings/2015/nime2015_165.pdf},\n\ + \ urlsuppl1 = {http://www.nime.org/proceedings/2015/165/0165-file1.mp4},\n year\ + \ = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178437 + doi: 10.5281/zenodo.1179106 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'hyperinstruments, indirect acquisition, surrogate sensors, computational - ethnomusicology, physical modeling, perfor-mance analysis' - publisher: University of Michigan - title: Direct and surrogate sensing for the Gyil african xylophone - url: http://www.nime.org/proceedings/2012/nime2012_222.pdf - year: 2012 + month: May + pages: 30--31 + publisher: Louisiana State University + title: 'PHOX Ears: A Parabolic, Head-mounted, Orientable, eXtrasensory Listening + Device' + url: http://www.nime.org/proceedings/2015/nime2015_165.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/165/0165-file1.mp4 + year: 2015 - ENTRYTYPE: inproceedings - ID: Gerhard2012 - abstract: 'The Instant Instrument Anywhere (IIA) is a small device which can be - attached to any metal object to create an electronic instrument. The device uses - capacitive sensing to detect proximity of the player''s body to the metal object, - and sound is generated through a surface transducer which can be attached to any - flat surface. Because the capacitive sensor can be any shape or size, absolute - capacitive thresholding is not possible since the baseline capacitance will change. - Instead, we use a differential-based moving sum threshold which can rapidly adjust - to changes in the environment or be re-calibrated to a new metal object. We show - that this dynamic threshold is effective in rejecting environmental noise and - rapidly adapting to new objects. We also present details for constructing Instant - Instruments Anywhere, including using smartphone as the synthesis engine and power - supply.' - address: 'Ann Arbor, Michigan' - author: David Gerhard and Brett Park - bibtex: "@inproceedings{Gerhard2012,\n abstract = {The Instant Instrument Anywhere\ - \ (IIA) is a small device which can be attached to any metal object to create\ - \ an electronic instrument. The device uses capacitive sensing to detect proximity\ - \ of the player's body to the metal object, and sound is generated through a surface\ - \ transducer which can be attached to any flat surface. Because the capacitive\ - \ sensor can be any shape or size, absolute capacitive thresholding is not possible\ - \ since the baseline capacitance will change. Instead, we use a differential-based\ - \ moving sum threshold which can rapidly adjust to changes in the environment\ - \ or be re-calibrated to a new metal object. We show that this dynamic threshold\ - \ is effective in rejecting environmental noise and rapidly adapting to new objects.\ - \ We also present details for constructing Instant Instruments Anywhere, including\ - \ using smartphone as the synthesis engine and power supply.},\n address = {Ann\ - \ Arbor, Michigan},\n author = {David Gerhard and Brett Park},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178261},\n issn = {2220-4806},\n keywords = {Capacitive\ - \ Sensing, Arduino},\n publisher = {University of Michigan},\n title = {Instant\ - \ Instrument Anywhere: A Self-Contained Capacitive Synthesizer},\n url = {http://www.nime.org/proceedings/2012/nime2012_223.pdf},\n\ - \ year = {2012}\n}\n" + ID: pdahlstedt2015 + abstract: 'Based on a combination of novel mapping techniques and carefully designed + sound engines, I present an augmented hybrid piano specifically designed for improvisation. + The mapping technique, originally developed for other control interfaces but here + adapted to the piano keyboard, is based on a dynamic vectorization of control + parameters, allowing both wild sonic exploration and minute intimate expression. + The original piano sound is used as the sole sound source, subjected to processing + techniques such as virtual resonance strings, dynamic buffer shuffling, and acoustic + and virtual feedback. Thanks to speaker and microphone placement, the acoustic + and processed sounds interact in both directions and blend into one new instrument. + This also allows for unorthodox playing (knocking, plucking, shouting). Processing + parameters are controlled from the keyboard playing alone, allowing intuitive + control of complex processing by ear, integrating expressive musical playing with + sonic exploration. The instrument is not random, but somewhat unpredictable. This + feeds into the improvisation, defining a particular idiomatics of the instruments. + Hence, the instrument itself is an essential part of the musical work. Performances + include concerts in UK, Japan, Singapore, Australia and Sweden, in solos and ensembles, + performed by several pianists. Variations of this hybrid instrument for digital + keyboards are also presented.' + address: 'Baton Rouge, Louisiana, USA' + author: Palle Dahlstedt + bibtex: "@inproceedings{pdahlstedt2015,\n abstract = {Based on a combination of\ + \ novel mapping techniques and carefully designed sound engines, I present an\ + \ augmented hybrid piano specifically designed for improvisation. The mapping\ + \ technique, originally developed for other control interfaces but here adapted\ + \ to the piano keyboard, is based on a dynamic vectorization of control parameters,\ + \ allowing both wild sonic exploration and minute intimate expression. The original\ + \ piano sound is used as the sole sound source, subjected to processing techniques\ + \ such as virtual resonance strings, dynamic buffer shuffling, and acoustic and\ + \ virtual feedback. Thanks to speaker and microphone placement, the acoustic and\ + \ processed sounds interact in both directions and blend into one new instrument.\ + \ This also allows for unorthodox playing (knocking, plucking, shouting). Processing\ + \ parameters are controlled from the keyboard playing alone, allowing intuitive\ + \ control of complex processing by ear, integrating expressive musical playing\ + \ with sonic exploration. The instrument is not random, but somewhat unpredictable.\ + \ This feeds into the improvisation, defining a particular idiomatics of the instruments.\ + \ Hence, the instrument itself is an essential part of the musical work. Performances\ + \ include concerts in UK, Japan, Singapore, Australia and Sweden, in solos and\ + \ ensembles, performed by several pianists. Variations of this hybrid instrument\ + \ for digital keyboards are also presented.},\n address = {Baton Rouge, Louisiana,\ + \ USA},\n author = {Palle Dahlstedt},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179046},\n\ + \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ + \ {May},\n pages = {271--276},\n publisher = {Louisiana State University},\n title\ + \ = {Mapping Strategies and Sound Engine Design for an Augmented Hybrid Piano},\n\ + \ url = {http://www.nime.org/proceedings/2015/nime2015_170.pdf},\n urlsuppl1 =\ + \ {http://www.nime.org/proceedings/2015/170/0170-file1.zip},\n year = {2015}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178261 + doi: 10.5281/zenodo.1179046 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Capacitive Sensing, Arduino' - publisher: University of Michigan - title: 'Instant Instrument Anywhere: A Self-Contained Capacitive Synthesizer' - url: http://www.nime.org/proceedings/2012/nime2012_223.pdf - year: 2012 + month: May + pages: 271--276 + publisher: Louisiana State University + title: Mapping Strategies and Sound Engine Design for an Augmented Hybrid Piano + url: http://www.nime.org/proceedings/2015/nime2015_170.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/170/0170-file1.zip + year: 2015 - ENTRYTYPE: inproceedings - ID: Luhtala2012 - abstract: "This paper introduces `The Aesthetic Experience Prism', a framework for\ - \ studying how components of aesthetic experience materialize in the model's of\ - \ interaction of novel musical interfaces as well as how the role of aesthetics\ - \ could be made more explicit in the processes of designing interaction for musical\ - \ technologies. The Aesthetic Experience Prism makes use of Arthur Danto's framework\ - \ of aesthetic experience that consists of three conceptual entities: (1) metaphor;\ - \ (2) expression; and (3) style. In this paper we present key questions driving\ - \ the research, theoretical background, artistic research approach and user research\ - \ activities.\nIn the DIYSE project a proof-of-concept music creation system prototype\ - \ was developed in a collaborative design setting. The prototype provides means\ - \ to the performer to create music with minimum effort while allowing for versatile\ - \ interaction. We argue that by using an artistic research approach specifically\ - \ targeting designing for aesthetic experience we were able to transform the knowledge\ - \ from early design ideas to resulting technology products in which model's of\ - \ interaction metaphors, expression and style are in an apparent role." - address: 'Ann Arbor, Michigan' - author: Matti Luhtala and Ilkka Niemeläinen and Johan Plomp and Markku Turunen and - Julius Tuomisto - bibtex: "@inproceedings{Luhtala2012,\n abstract = {This paper introduces `The Aesthetic\ - \ Experience Prism', a framework for studying how components of aesthetic experience\ - \ materialize in the model's of interaction of novel musical interfaces as well\ - \ as how the role of aesthetics could be made more explicit in the processes of\ - \ designing interaction for musical technologies. The Aesthetic Experience Prism\ - \ makes use of Arthur Danto's framework of aesthetic experience that consists\ - \ of three conceptual entities: (1) metaphor; (2) expression; and (3) style. In\ - \ this paper we present key questions driving the research, theoretical background,\ - \ artistic research approach and user research activities.\nIn the DIYSE project\ - \ a proof-of-concept music creation system prototype was developed in a collaborative\ - \ design setting. The prototype provides means to the performer to create music\ - \ with minimum effort while allowing for versatile interaction. We argue that\ - \ by using an artistic research approach specifically targeting designing for\ - \ aesthetic experience we were able to transform the knowledge from early design\ - \ ideas to resulting technology products in which model's of interaction metaphors,\ - \ expression and style are in an apparent role.},\n address = {Ann Arbor, Michigan},\n\ - \ author = {Matti Luhtala and Ilkka Niemel{\\''a}inen and Johan Plomp and Markku\ - \ Turunen and Julius Tuomisto},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178331},\n\ - \ issn = {2220-4806},\n keywords = {Aesthetics, Interaction Design, Artistic Research,\ - \ Exploration},\n publisher = {University of Michigan},\n title = {Studying Aesthetics\ - \ in a Musical Interface Design Process Through `Aesthetic Experience Prism'},\n\ - \ url = {http://www.nime.org/proceedings/2012/nime2012_226.pdf},\n year = {2012}\n\ + ID: pdahlstedtb2015 + abstract: 'The Bucket System is a new system for computer-mediated ensemble improvisation, + designed by improvisers for improvisers. Coming from a tradition of structured + free ensemble improvisation practices (comprovisation), influenced by post-WW2 + experimental music practices, it is a signaling system implemented with a set + of McMillen QuNeo controllers as input and output interfaces, powered by custom + software. It allows for a new kind of on-stage compositional/improvisation interaction.' + address: 'Baton Rouge, Louisiana, USA' + author: Palle Dahlstedt and Per Anders Nilsson and Gino Robair + bibtex: "@inproceedings{pdahlstedtb2015,\n abstract = {The Bucket System is a new\ + \ system for computer-mediated ensemble improvisation, designed by improvisers\ + \ for improvisers. Coming from a tradition of structured free ensemble improvisation\ + \ practices (comprovisation), influenced by post-WW2 experimental music practices,\ + \ it is a signaling system implemented with a set of McMillen QuNeo controllers\ + \ as input and output interfaces, powered by custom software. It allows for a\ + \ new kind of on-stage compositional/improvisation interaction.},\n address =\ + \ {Baton Rouge, Louisiana, USA},\n author = {Palle Dahlstedt and Per Anders Nilsson\ + \ and Gino Robair},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179048},\n\ + \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ + \ {May},\n pages = {317--318},\n publisher = {Louisiana State University},\n title\ + \ = {The Bucket System --- A computer mediated signalling system for group improvisation},\n\ + \ url = {http://www.nime.org/proceedings/2015/nime2015_171.pdf},\n urlsuppl1 =\ + \ {http://www.nime.org/proceedings/2015/171/0171-file1.mp4},\n year = {2015}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178331 + doi: 10.5281/zenodo.1179048 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Aesthetics, Interaction Design, Artistic Research, Exploration' - publisher: University of Michigan - title: Studying Aesthetics in a Musical Interface Design Process Through `Aesthetic - Experience Prism' - url: http://www.nime.org/proceedings/2012/nime2012_226.pdf - year: 2012 + month: May + pages: 317--318 + publisher: Louisiana State University + title: The Bucket System --- A computer mediated signalling system for group improvisation + url: http://www.nime.org/proceedings/2015/nime2015_171.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/171/0171-file1.mp4 + year: 2015 - ENTRYTYPE: inproceedings - ID: Hollinger2012 - abstract: 'A modular and reconfigurable hardware platform for analog optoelectronic - signal acquisition is presented. Its intended application is for fiber optic sensing - in electronic musical interfaces, however the flexible design enables its use - with a wide range of analog and digital sensors. Multiple gain and multiplexing - stages as well as programmable analog and digital hardware blocks allow for the - acquisition, processing, and communication of single-ended and differential signals. - Along with a hub board, multiple acquisition boards can be connected to modularly - extend the system''s capabilities to suit the needs of the application. Fiber - optic sensors and their application in DMIs are briefly discussed, as well as - the use of the hardware platform with specific musical interfaces.' - address: 'Ann Arbor, Michigan' - author: Avrum Hollinger and Marcelo M. Wanderley - bibtex: "@inproceedings{Hollinger2012,\n abstract = {A modular and reconfigurable\ - \ hardware platform for analog optoelectronic signal acquisition is presented.\ - \ Its intended application is for fiber optic sensing in electronic musical interfaces,\ - \ however the flexible design enables its use with a wide range of analog and\ - \ digital sensors. Multiple gain and multiplexing stages as well as programmable\ - \ analog and digital hardware blocks allow for the acquisition, processing, and\ - \ communication of single-ended and differential signals. Along with a hub board,\ - \ multiple acquisition boards can be connected to modularly extend the system's\ - \ capabilities to suit the needs of the application. Fiber optic sensors and their\ - \ application in DMIs are briefly discussed, as well as the use of the hardware\ - \ platform with specific musical interfaces.},\n address = {Ann Arbor, Michigan},\n\ - \ author = {Avrum Hollinger and Marcelo M. Wanderley},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178289},\n issn = {2220-4806},\n keywords = {fiber optic\ - \ sensing, analog signal acquisition, musical interface, MRI-compatible},\n publisher\ - \ = {University of Michigan},\n title = {Optoelectronic Acquisition and Control\ - \ Board for Musical Applications},\n url = {http://www.nime.org/proceedings/2012/nime2012_228.pdf},\n\ - \ year = {2012}\n}\n" + ID: salexanderadams2015 + abstract: 'This paper outlines the development of a versatile platform for the performance + and composition of tangible graphic scores, providing technical details of the + hardware and software design. The system is conceived as a touch surface facilitating + modular textured plates, coupled with corresponding visual feedback.' + address: 'Baton Rouge, Louisiana, USA' + author: Simon Alexander-Adams and Michael Gurevich + bibtex: "@inproceedings{salexanderadams2015,\n abstract = {This paper outlines the\ + \ development of a versatile platform for the performance and composition of tangible\ + \ graphic scores, providing technical details of the hardware and software design.\ + \ The system is conceived as a touch surface facilitating modular textured plates,\ + \ coupled with corresponding visual feedback.},\n address = {Baton Rouge, Louisiana,\ + \ USA},\n author = {Simon Alexander-Adams and Michael Gurevich},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1179004},\n editor = {Edgar Berdahl and Jesse Allison},\n\ + \ issn = {2220-4806},\n month = {May},\n pages = {174--175},\n publisher = {Louisiana\ + \ State University},\n title = {A Flexible Platform for Tangible Graphic Scores},\n\ + \ url = {http://www.nime.org/proceedings/2015/nime2015_172.pdf},\n urlsuppl1 =\ + \ {http://www.nime.org/proceedings/2015/172/0172-file1.mov},\n year = {2015}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178289 + doi: 10.5281/zenodo.1179004 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'fiber optic sensing, analog signal acquisition, musical interface, MRI-compatible' - publisher: University of Michigan - title: Optoelectronic Acquisition and Control Board for Musical Applications - url: http://www.nime.org/proceedings/2012/nime2012_228.pdf - year: 2012 + month: May + pages: 174--175 + publisher: Louisiana State University + title: A Flexible Platform for Tangible Graphic Scores + url: http://www.nime.org/proceedings/2015/nime2015_172.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/172/0172-file1.mov + year: 2015 - ENTRYTYPE: inproceedings - ID: Ouzounian2012 - abstract: 'Music for Sleeping & Waking Minds (2011-2012) is a new, overnight work - in which four performers fall asleep while wearing custom designed EEG sensors - which monitor their brainwave activity. The data gathered from the EEG sensors - is applied in real time to different audio and image signal processing functions, - resulting in continuously evolving multi-channel sound environment and visual - projection. This material serves as an audiovisual description of the individual - and collective neurophysiological state of the ensemble. Audiences are invited - to experience the work in different states of attention: while alert and asleep, - resting and awakening.' - address: 'Ann Arbor, Michigan' - author: Gascia Ouzounian and R. Benjamin Knapp and Eric Lyon and Luke DuBois - bibtex: "@inproceedings{Ouzounian2012,\n abstract = {Music for Sleeping & Waking\ - \ Minds (2011-2012) is a new, overnight work in which four performers fall asleep\ - \ while wearing custom designed EEG sensors which monitor their brainwave activity.\ - \ The data gathered from the EEG sensors is applied in real time to different\ - \ audio and image signal processing functions, resulting in continuously evolving\ - \ multi-channel sound environment and visual projection. This material serves\ - \ as an audiovisual description of the individual and collective neurophysiological\ - \ state of the ensemble. Audiences are invited to experience the work in different\ - \ states of attention: while alert and asleep, resting and awakening.},\n address\ - \ = {Ann Arbor, Michigan},\n author = {Gascia Ouzounian and R. Benjamin Knapp\ - \ and Eric Lyon and Luke DuBois},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1180559},\n\ - \ issn = {2220-4806},\n keywords = {EEG, sleep, dream, biosignals, bio art, consciousness,\ - \ BCI},\n publisher = {University of Michigan},\n title = {Music for Sleeping\ - \ \\& Waking Minds},\n url = {http://www.nime.org/proceedings/2012/nime2012_229.pdf},\n\ - \ year = {2012}\n}\n" + ID: rvanrooyenb2015 + abstract: 'The ability to acquire and analyze a percussion performance in an efficient, + affordable, and non-invasive manner has been made possible by a unique composite + of off-the-shelf products. Through various methods of calibration and analysis, + human motion as imparted on a striking implement can be tracked and correlated + with traditional audio data in order to compare performances. Ultimately, conclusions + can be drawn that drive pedagogical studies as well as advances in musical robots.' + address: 'Baton Rouge, Louisiana, USA' + author: Robert Van Rooyen and George Tzanetakis + bibtex: "@inproceedings{rvanrooyenb2015,\n abstract = {The ability to acquire and\ + \ analyze a percussion performance in an efficient, affordable, and non-invasive\ + \ manner has been made possible by a unique composite of off-the-shelf products.\ + \ Through various methods of calibration and analysis, human motion as imparted\ + \ on a striking implement can be tracked and correlated with traditional audio\ + \ data in order to compare performances. Ultimately, conclusions can be drawn\ + \ that drive pedagogical studies as well as advances in musical robots.},\n address\ + \ = {Baton Rouge, Louisiana, USA},\n author = {Robert {Van Rooyen} and George\ + \ Tzanetakis},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1181400},\n editor\ + \ = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n\ + \ pages = {339--342},\n publisher = {Louisiana State University},\n title = {Pragmatic\ + \ Drum Motion Capture System},\n url = {http://www.nime.org/proceedings/2015/nime2015_173.pdf},\n\ + \ urlsuppl1 = {http://www.nime.org/proceedings/2015/173/0173-file1.mp4},\n year\ + \ = {2015}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1181400 + editor: Edgar Berdahl and Jesse Allison + issn: 2220-4806 + month: May + pages: 339--342 + publisher: Louisiana State University + title: Pragmatic Drum Motion Capture System + url: http://www.nime.org/proceedings/2015/nime2015_173.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/173/0173-file1.mp4 + year: 2015 + + +- ENTRYTYPE: inproceedings + ID: qyang2015 + abstract: 'Multi-touch mobile devices provide a fresh paradigm for interactions, + as well as a platform for building rich musical applications. This paper presents + a multi-touch mobile programming environment that supports the exploration of + different representations in visual programming for music and audio interfaces. + Using a common flow-based visual programming vocabulary, we implemented a system + based on the urMus platform that explores three types of touch-based interaction + representations: a text-based menu representation, a graphical icon-based representation, + and a novel multi-touch gesture-based representation. We illustrated their use + on interface design for musical controllers.' + address: 'Baton Rouge, Louisiana, USA' + author: Qi Yang and Georg Essl + bibtex: "@inproceedings{qyang2015,\n abstract = {Multi-touch mobile devices provide\ + \ a fresh paradigm for interactions, as well as a platform for building rich musical\ + \ applications. This paper presents a multi-touch mobile programming environment\ + \ that supports the exploration of different representations in visual programming\ + \ for music and audio interfaces. Using a common flow-based visual programming\ + \ vocabulary, we implemented a system based on the urMus platform that explores\ + \ three types of touch-based interaction representations: a text-based menu representation,\ + \ a graphical icon-based representation, and a novel multi-touch gesture-based\ + \ representation. We illustrated their use on interface design for musical controllers.},\n\ + \ address = {Baton Rouge, Louisiana, USA},\n author = {Qi Yang and Georg Essl},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1181416},\n editor = {Edgar Berdahl\ + \ and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {369--373},\n\ + \ publisher = {Louisiana State University},\n title = {Representation-Plurality\ + \ in Multi-Touch Mobile Visual Programming for Music},\n url = {http://www.nime.org/proceedings/2015/nime2015_177.pdf},\n\ + \ year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1180559 + doi: 10.5281/zenodo.1181416 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'EEG, sleep, dream, biosignals, bio art, consciousness, BCI' - publisher: University of Michigan - title: Music for Sleeping & Waking Minds - url: http://www.nime.org/proceedings/2012/nime2012_229.pdf - year: 2012 + month: May + pages: 369--373 + publisher: Louisiana State University + title: Representation-Plurality in Multi-Touch Mobile Visual Programming for Music + url: http://www.nime.org/proceedings/2015/nime2015_177.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Schlei2012 - abstract: 'This paper describes the design and realization of TC-11, a software - instrument based on programmable multi-point controllers. TC-11 is a modular synthesizer - for the iPad that uses multi-touch and device motion sensors for control. It has - a robust patch programming interface that centers around multi-point controllers, - providing powerful flexibility. This paper details the origin, design principles, - programming implementation, and performance result of TC-11.' - address: 'Ann Arbor, Michigan' - author: Kevin Schlei - bibtex: "@inproceedings{Schlei2012,\n abstract = {This paper describes the design\ - \ and realization of TC-11, a software instrument based on programmable multi-point\ - \ controllers. TC-11 is a modular synthesizer for the iPad that uses multi-touch\ - \ and device motion sensors for control. It has a robust patch programming interface\ - \ that centers around multi-point controllers, providing powerful flexibility.\ - \ This paper details the origin, design principles, programming implementation,\ - \ and performance result of TC-11.},\n address = {Ann Arbor, Michigan},\n author\ - \ = {Kevin Schlei},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1180589},\n\ - \ issn = {2220-4806},\n keywords = {TC-11, iPad, multi-touch, multi-point, controller\ - \ mapping, synthesis programming},\n publisher = {University of Michigan},\n title\ - \ = {TC-11: A Programmable Multi-Touch Synthesizer for the iPad},\n url = {http://www.nime.org/proceedings/2012/nime2012_230.pdf},\n\ - \ year = {2012}\n}\n" + ID: ajensenius2015 + abstract: 'This paper presents the scientific-artistic project Sverm, which has + focused on the use of micromotion and microsound in artistic practice. Starting + from standing still in silence, the artists involved have developed conceptual + and experiential knowledge of microactions, microsounds and the possibilities + of microinteracting with light and sound.' + address: 'Baton Rouge, Louisiana, USA' + author: 'Jensenius, Alexander Refsum' + bibtex: "@inproceedings{ajensenius2015,\n abstract = {This paper presents the scientific-artistic\ + \ project Sverm, which has focused on the use of micromotion and microsound in\ + \ artistic practice. Starting from standing still in silence, the artists involved\ + \ have developed conceptual and experiential knowledge of microactions, microsounds\ + \ and the possibilities of microinteracting with light and sound.},\n address\ + \ = {Baton Rouge, Louisiana, USA},\n author = {Jensenius, Alexander Refsum},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1179100},\n editor = {Edgar Berdahl\ + \ and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {16--19},\n\ + \ publisher = {Louisiana State University},\n title = {Microinteraction in Music/Dance\ + \ Performance},\n url = {http://www.nime.org/proceedings/2015/nime2015_178.pdf},\n\ + \ year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1180589 + doi: 10.5281/zenodo.1179100 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'TC-11, iPad, multi-touch, multi-point, controller mapping, synthesis - programming' - publisher: University of Michigan - title: 'TC-11: A Programmable Multi-Touch Synthesizer for the iPad' - url: http://www.nime.org/proceedings/2012/nime2012_230.pdf - year: 2012 + month: May + pages: 16--19 + publisher: Louisiana State University + title: Microinteraction in Music/Dance Performance + url: http://www.nime.org/proceedings/2015/nime2015_178.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Kikukawa2012 - abstract: 'We developed original solenoid actuator units with several built-in sensors, - and produced a box-shaped musical inter-face ``PocoPoco'''' using 16 units of - them as a universal input/output device. We applied up-and-down movement of the - solenoid-units and user''s intuitive input to musical interface. Using transformation - of the physical interface, we can apply movement of the units to new interaction - design. At the same time we intend to suggest a new interface whose movement itself - can attract the user.' - address: 'Ann Arbor, Michigan' - author: Yuya Kikukawa and Takaharu Kanai and Tatsuhiko Suzuki and Toshiki Yoshiike - and Tetsuaki Baba and Kumiko Kushiyama - bibtex: "@inproceedings{Kikukawa2012,\n abstract = {We developed original solenoid\ - \ actuator units with several built-in sensors, and produced a box-shaped musical\ - \ inter-face ``PocoPoco'' using 16 units of them as a universal input/output device.\ - \ We applied up-and-down movement of the solenoid-units and user's intuitive input\ - \ to musical interface. Using transformation of the physical interface, we can\ - \ apply movement of the units to new interaction design. At the same time we intend\ - \ to suggest a new interface whose movement itself can attract the user.},\n address\ - \ = {Ann Arbor, Michigan},\n author = {Yuya Kikukawa and Takaharu Kanai and Tatsuhiko\ - \ Suzuki and Toshiki Yoshiike and Tetsuaki Baba and Kumiko Kushiyama},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1178301},\n issn = {2220-4806},\n keywords\ - \ = {musical interface, interaction design, tactile, moving, kinetic},\n publisher\ - \ = {University of Michigan},\n title = {PocoPoco: A Kinetic Musical Interface\ - \ With Electro-Magnetic Levitation Units},\n url = {http://www.nime.org/proceedings/2012/nime2012_232.pdf},\n\ - \ year = {2012}\n}\n" + ID: ajenseniusb2015 + abstract: 'The MYO armband from Thalmic Labs is a complete and wireless motion and + muscle sensing platform. This paper evaluates the armband''s sensors and its potential + for NIME applications. This is followed by a presentation of the prototype instrument + MuMYO. We conclude that, despite some shortcomings, the armband has potential + of becoming a new ``standard'''' controller in the NIME community.' + address: 'Baton Rouge, Louisiana, USA' + author: 'Nymoen, Kristian and Haugen, Mari Romarheim and Jensenius, Alexander Refsum' + bibtex: "@inproceedings{ajenseniusb2015,\n abstract = {The MYO armband from Thalmic\ + \ Labs is a complete and wireless motion and muscle sensing platform. This paper\ + \ evaluates the armband's sensors and its potential for NIME applications. This\ + \ is followed by a presentation of the prototype instrument MuMYO. We conclude\ + \ that, despite some shortcomings, the armband has potential of becoming a new\ + \ ``standard'' controller in the NIME community.},\n address = {Baton Rouge, Louisiana,\ + \ USA},\n author = {Nymoen, Kristian and Haugen, Mari Romarheim and Jensenius,\ + \ Alexander Refsum},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179150},\n\ + \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ + \ {May},\n pages = {215--218},\n publisher = {Louisiana State University},\n title\ + \ = {MuMYO --- Evaluating and Exploring the MYO Armband for Musical Interaction},\n\ + \ url = {http://www.nime.org/proceedings/2015/nime2015_179.pdf},\n urlsuppl1 =\ + \ {http://www.nime.org/proceedings/2015/179/0179-file1.mov},\n year = {2015}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178301 + doi: 10.5281/zenodo.1179150 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'musical interface, interaction design, tactile, moving, kinetic' - publisher: University of Michigan - title: 'PocoPoco: A Kinetic Musical Interface With Electro-Magnetic Levitation Units' - url: http://www.nime.org/proceedings/2012/nime2012_232.pdf - year: 2012 + month: May + pages: 215--218 + publisher: Louisiana State University + title: MuMYO --- Evaluating and Exploring the MYO Armband for Musical Interaction + url: http://www.nime.org/proceedings/2015/nime2015_179.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/179/0179-file1.mov + year: 2015 - ENTRYTYPE: inproceedings - ID: Nort2012 - abstract: 'In this paper we discuss aspects of our work in develop-ing performance - systems that are geared towards human-machine co-performance with a particular - emphasis on improvisation. We present one particular system, FILTER, which was - created in the context of a larger project related to artificial intelligence - and performance, and has been tested in the context of our electro-acoustic performance - trio. We discuss how this timbrally rich and highly non-idiomatic musical context - has challenged the design of the system, with particular emphasis on the mapping - of machine listening parameters to higher-level behaviors of the system in such - a way that spontaneity and creativity are encouraged while maintaining a sense - of novel dialogue.' - address: 'Ann Arbor, Michigan' - author: Doug Van Nort and Jonas Braasch and Pauline Oliveros - bibtex: "@inproceedings{Nort2012,\n abstract = {In this paper we discuss aspects\ - \ of our work in develop-ing performance systems that are geared towards human-machine\ - \ co-performance with a particular emphasis on improvisation. We present one particular\ - \ system, FILTER, which was created in the context of a larger project related\ - \ to artificial intelligence and performance, and has been tested in the context\ - \ of our electro-acoustic performance trio. We discuss how this timbrally rich\ - \ and highly non-idiomatic musical context has challenged the design of the system,\ - \ with particular emphasis on the mapping of machine listening parameters to higher-level\ - \ behaviors of the system in such a way that spontaneity and creativity are encouraged\ - \ while maintaining a sense of novel dialogue.},\n address = {Ann Arbor, Michigan},\n\ - \ author = {Doug Van Nort and Jonas Braasch and Pauline Oliveros},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1180551},\n issn = {2220-4806},\n keywords\ - \ = {Electroacoustic Improvisation, Machine Learning, Mapping, Sonic Gestures,\ - \ Spatialization},\n publisher = {University of Michigan},\n title = {Mapping\ - \ to musical actions in the FILTER system},\n url = {http://www.nime.org/proceedings/2012/nime2012_235.pdf},\n\ - \ year = {2012}\n}\n" + ID: esheffieldb2015 + abstract: 'This paper describes a system for interactive mechanically actuated percussion. + Design principles regarding seamless control and retention of natural acoustic + properties are established. Performance patterns on a preliminary version are + examined, including the potential for cooperative and distributed performance.' + address: 'Baton Rouge, Louisiana, USA' + author: Eric Sheffield and Michael Gurevich + bibtex: "@inproceedings{esheffieldb2015,\n abstract = {This paper describes a system\ + \ for interactive mechanically actuated percussion. Design principles regarding\ + \ seamless control and retention of natural acoustic properties are established.\ + \ Performance patterns on a preliminary version are examined, including the potential\ + \ for cooperative and distributed performance.},\n address = {Baton Rouge, Louisiana,\ + \ USA},\n author = {Eric Sheffield and Michael Gurevich},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1179176},\n editor = {Edgar Berdahl and Jesse Allison},\n\ + \ issn = {2220-4806},\n month = {May},\n pages = {11--15},\n publisher = {Louisiana\ + \ State University},\n title = {Distributed Mechanical Actuation of Percussion\ + \ Instruments},\n url = {http://www.nime.org/proceedings/2015/nime2015_183.pdf},\n\ + \ urlsuppl1 = {http://www.nime.org/proceedings/2015/183/DistributedActuationDemo.mp4},\n\ + \ year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1180551 + doi: 10.5281/zenodo.1179176 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Electroacoustic Improvisation, Machine Learning, Mapping, Sonic Gestures, - Spatialization' - publisher: University of Michigan - title: Mapping to musical actions in the FILTER system - url: http://www.nime.org/proceedings/2012/nime2012_235.pdf - year: 2012 + month: May + pages: 11--15 + publisher: Louisiana State University + title: Distributed Mechanical Actuation of Percussion Instruments + url: http://www.nime.org/proceedings/2015/nime2015_183.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/183/DistributedActuationDemo.mp4 + year: 2015 - ENTRYTYPE: inproceedings - ID: Magnus2012 - abstract: "The purpose of the Musician Assistance and Score Distribution (MASD)\ - \ system is to assist novice musicians with playing in an orchestra, concert band,\ - \ choir or other musical ensemble. MASD helps novice musicians in three ways.\ - \ It removes the confusion that results from page turns, aides a musician's return\ - \ to the proper location in the music score after the looking at the conductor\ - \ and notifies musicians of conductor instructions. MASD is currently verified\ - \ by evaluating the time between sending beats or conductor information and this\ - \ information being rendered for the musician. Future work includes user testing\ - \ of this system.\nThere are three major components to the MASD system. These\ - \ components are Score Distribution, Score Rendering and Information Distribution.\ - \ Score Distribution passes score information to clients and is facilitated by\ - \ the Internet Communication Engine (ICE). Score Rendering uses the GUIDO Library\ - \ to display the musical score. Information Distribution uses ICE and the IceStorm\ - \ service to pass beat and instruction information to musicians." - address: 'Ann Arbor, Michigan' - author: Nathan Magnus and David Gerhard - bibtex: "@inproceedings{Magnus2012,\n abstract = {The purpose of the Musician Assistance\ - \ and Score Distribution (MASD) system is to assist novice musicians with playing\ - \ in an orchestra, concert band, choir or other musical ensemble. MASD helps novice\ - \ musicians in three ways. It removes the confusion that results from page turns,\ - \ aides a musician's return to the proper location in the music score after the\ - \ looking at the conductor and notifies musicians of conductor instructions. MASD\ - \ is currently verified by evaluating the time between sending beats or conductor\ - \ information and this information being rendered for the musician. Future work\ - \ includes user testing of this system.\nThere are three major components to the\ - \ MASD system. These components are Score Distribution, Score Rendering and Information\ - \ Distribution. Score Distribution passes score information to clients and is\ - \ facilitated by the Internet Communication Engine (ICE). Score Rendering uses\ - \ the GUIDO Library to display the musical score. Information Distribution uses\ - \ ICE and the IceStorm service to pass beat and instruction information to musicians.},\n\ - \ address = {Ann Arbor, Michigan},\n author = {Nathan Magnus and David Gerhard},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178337},\n issn = {2220-4806},\n\ - \ keywords = {score distribution, score-following, score rendering, musician assistance},\n\ - \ publisher = {University of Michigan},\n title = {Musician Assistance and Score\ - \ Distribution (MASD)},\n url = {http://www.nime.org/proceedings/2012/nime2012_237.pdf},\n\ - \ year = {2012}\n}\n" + ID: jhe2015 + abstract: 'Motion-based musical interfaces are ubiquitous. With the plethora of + sensing solutions and the possibility of developing custom designs, it is important + that the new musical interface has the capability to perform any number of tasks. + This paper presents the theoretical framework for defining, designing, and evaluation + process of a physical gesture acquisition for Guqin performance. The framework + is based on an iterative design process, and draws upon the knowledge in Guqin + performance to develop a system to determine the interaction between a Guqin player + and the computer. This paper emphasizes the definition, conception, and evaluation + of the acquisition system.' + address: 'Baton Rouge, Louisiana, USA' + author: Jingyin He and Ajay Kapur and Dale Carnegie + bibtex: "@inproceedings{jhe2015,\n abstract = {Motion-based musical interfaces are\ + \ ubiquitous. With the plethora of sensing solutions and the possibility of developing\ + \ custom designs, it is important that the new musical interface has the capability\ + \ to perform any number of tasks. This paper presents the theoretical framework\ + \ for defining, designing, and evaluation process of a physical gesture acquisition\ + \ for Guqin performance. The framework is based on an iterative design process,\ + \ and draws upon the knowledge in Guqin performance to develop a system to determine\ + \ the interaction between a Guqin player and the computer. This paper emphasizes\ + \ the definition, conception, and evaluation of the acquisition system.},\n address\ + \ = {Baton Rouge, Louisiana, USA},\n author = {Jingyin He and Ajay Kapur and Dale\ + \ Carnegie},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179088},\n editor\ + \ = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n\ + \ pages = {187--190},\n publisher = {Louisiana State University},\n title = {Developing\ + \ A Physical Gesture Acquisition System for Guqin Performance},\n url = {http://www.nime.org/proceedings/2015/nime2015_184.pdf},\n\ + \ year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178337 + doi: 10.5281/zenodo.1179088 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'score distribution, score-following, score rendering, musician assistance' - publisher: University of Michigan - title: Musician Assistance and Score Distribution (MASD) - url: http://www.nime.org/proceedings/2012/nime2012_237.pdf - year: 2012 + month: May + pages: 187--190 + publisher: Louisiana State University + title: Developing A Physical Gesture Acquisition System for Guqin Performance + url: http://www.nime.org/proceedings/2015/nime2015_184.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Tanaka2012 - abstract: 'Mobile devices represent a growing research field within NIME, and a - growing area for commercial music software. They present unique design challenges - and opportunities, which are yet to be fully explored and exploited. In this paper, - we propose using a survey method combined with qualitative analysis to investigate - the way in which people use mobiles musically. We subsequently present as an area - of future research our own PDplayer, which provides a completely self contained - end application in the mobile device, potentially making the mobile a more viable - and expressive tool for musicians.' - address: 'Ann Arbor, Michigan' - author: Atau Tanaka and Adam Parkinson and Zack Settel and Koray Tahiroglu - bibtex: "@inproceedings{Tanaka2012,\n abstract = {Mobile devices represent a growing\ - \ research field within NIME, and a growing area for commercial music software.\ - \ They present unique design challenges and opportunities, which are yet to be\ - \ fully explored and exploited. In this paper, we propose using a survey method\ - \ combined with qualitative analysis to investigate the way in which people use\ - \ mobiles musically. We subsequently present as an area of future research our\ - \ own PDplayer, which provides a completely self contained end application in\ - \ the mobile device, potentially making the mobile a more viable and expressive\ - \ tool for musicians.},\n address = {Ann Arbor, Michigan},\n author = {Atau Tanaka\ - \ and Adam Parkinson and Zack Settel and Koray Tahiroglu},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178431},\n issn = {2220-4806},\n keywords = {NIME, Mobile\ - \ Music, Pure Data},\n publisher = {University of Michigan},\n title = {A Survey\ - \ and Thematic Analysis Approach as Input to the Design of Mobile Music GUIs},\n\ - \ url = {http://www.nime.org/proceedings/2012/nime2012_240.pdf},\n year = {2012}\n\ + ID: rgrahamb2015 + abstract: 'Multichannel (or divided) audio pickups are becoming increasingly ubiquitous + in electric guitar and computer music communities. These systems allow performers + to access signals for each string of their instrument independently and concurrently + in real-time creative practice. This paper presents an open-source audio breakout + circuit that provides independent audio outputs per string of any chordophone + (stringed instrument) that is fitted with a multichannel audio pickup system. + The following sections include a brief historical contextualization and discussion + on the significance of multichannel audio technology in instrumental guitar music, + an overview of our proposed impedance matching circuit for piezoelectric-based + audio pickups, and a presentation of a new open-source PCB design (SEPTAR V2) + that includes a mountable 13-pin DIN connection to improve compatibility with + commercial multichannel pickup systems. This paper will also include a short summary + of the potential creative applications and perceptual implications of this multichannel + technology when used in creative practice.' + address: 'Baton Rouge, Louisiana, USA' + author: Richard Graham and John Harding + bibtex: "@inproceedings{rgrahamb2015,\n abstract = {Multichannel (or divided) audio\ + \ pickups are becoming increasingly ubiquitous in electric guitar and computer\ + \ music communities. These systems allow performers to access signals for each\ + \ string of their instrument independently and concurrently in real-time creative\ + \ practice. This paper presents an open-source audio breakout circuit that provides\ + \ independent audio outputs per string of any chordophone (stringed instrument)\ + \ that is fitted with a multichannel audio pickup system. The following sections\ + \ include a brief historical contextualization and discussion on the significance\ + \ of multichannel audio technology in instrumental guitar music, an overview of\ + \ our proposed impedance matching circuit for piezoelectric-based audio pickups,\ + \ and a presentation of a new open-source PCB design (SEPTAR V2) that includes\ + \ a mountable 13-pin DIN connection to improve compatibility with commercial multichannel\ + \ pickup systems. This paper will also include a short summary of the potential\ + \ creative applications and perceptual implications of this multichannel technology\ + \ when used in creative practice.},\n address = {Baton Rouge, Louisiana, USA},\n\ + \ author = {Richard Graham and John Harding},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1179070},\n editor = {Edgar Berdahl and Jesse Allison},\n issn\ + \ = {2220-4806},\n month = {May},\n pages = {241--244},\n publisher = {Louisiana\ + \ State University},\n title = {SEPTAR: Audio Breakout Design for Multichannel\ + \ Guitar},\n url = {http://www.nime.org/proceedings/2015/nime2015_187.pdf},\n\ + \ urlsuppl1 = {http://www.nime.org/proceedings/2015/187/0187-file1.wav},\n urlsuppl2\ + \ = {http://www.nime.org/proceedings/2015/187/0187-file2.wav},\n year = {2015}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178431 + doi: 10.5281/zenodo.1179070 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'NIME, Mobile Music, Pure Data' - publisher: University of Michigan - title: A Survey and Thematic Analysis Approach as Input to the Design of Mobile - Music GUIs - url: http://www.nime.org/proceedings/2012/nime2012_240.pdf - year: 2012 + month: May + pages: 241--244 + publisher: Louisiana State University + title: 'SEPTAR: Audio Breakout Design for Multichannel Guitar' + url: http://www.nime.org/proceedings/2015/nime2015_187.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/187/0187-file1.wav + urlsuppl2: http://www.nime.org/proceedings/2015/187/0187-file2.wav + year: 2015 - ENTRYTYPE: inproceedings - ID: Derbinsky2012 - abstract: 'This paper presents a system for mobile percussive collaboration. We - show that reinforcement learning can incrementally learn percussive beat patterns - played by humans and supports realtime collaborative performance in the absence - of one or more performers. This work leverages an existing integration between - urMus and Soar and addresses multiple challenges involved in the deployment of - machine-learning algorithms for mobile music expression, including tradeoffs between - learning speed & quality; interface design for human collaborators; and real-time - performance and improvisation.' - address: 'Ann Arbor, Michigan' - author: Nate Derbinsky and Georg Essl - bibtex: "@inproceedings{Derbinsky2012,\n abstract = {This paper presents a system\ - \ for mobile percussive collaboration. We show that reinforcement learning can\ - \ incrementally learn percussive beat patterns played by humans and supports realtime\ - \ collaborative performance in the absence of one or more performers. This work\ - \ leverages an existing integration between urMus and Soar and addresses multiple\ - \ challenges involved in the deployment of machine-learning algorithms for mobile\ - \ music expression, including tradeoffs between learning speed & quality; interface\ - \ design for human collaborators; and real-time performance and improvisation.},\n\ - \ address = {Ann Arbor, Michigan},\n author = {Nate Derbinsky and Georg Essl},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178243},\n issn = {2220-4806},\n\ - \ keywords = {Mobile music, machine learning, cognitive architecture},\n publisher\ - \ = {University of Michigan},\n title = {Exploring Reinforcement Learning for\ - \ Mobile Percussive Collaboration},\n url = {http://www.nime.org/proceedings/2012/nime2012_241.pdf},\n\ - \ year = {2012}\n}\n" + ID: fberthaut2015 + abstract: 'We present Reflets, a mixed-reality environment for musical performances + that allows for freely displaying virtual content on stage, such as 3D virtual + musical interfaces or visual augmentations of instruments and performers. It relies + on spectators and performers revealing virtual objects by slicing through them + with body parts or objects, and on planar slightly reflective transparent panels + that combine the stage and audience spaces. In this paper, we describe the approach + and implementation challenges of Reflets. We then demonstrate that it matches + the requirements of musical performances. It allows for placing virtual content + anywhere on large stages, even overlapping with physical elements and provides + a consistent rendering of this content for large numbers of spectators. It also + preserves non-verbal communication between the audience and the performers, and + is inherently engaging for the spectators. We finally show that Reflets opens + musical performance opportunities such as augmented interaction between musicians + and novel techniques for 3D sound shapes manipulation.' + address: 'Baton Rouge, Louisiana, USA' + author: Florent Berthaut and Diego Martinez and Martin Hachet and Sriram Subramanian + bibtex: "@inproceedings{fberthaut2015,\n abstract = {We present Reflets, a mixed-reality\ + \ environment for musical performances that allows for freely displaying virtual\ + \ content on stage, such as 3D virtual musical interfaces or visual augmentations\ + \ of instruments and performers. It relies on spectators and performers revealing\ + \ virtual objects by slicing through them with body parts or objects, and on planar\ + \ slightly reflective transparent panels that combine the stage and audience spaces.\ + \ In this paper, we describe the approach and implementation challenges of Reflets.\ + \ We then demonstrate that it matches the requirements of musical performances.\ + \ It allows for placing virtual content anywhere on large stages, even overlapping\ + \ with physical elements and provides a consistent rendering of this content for\ + \ large numbers of spectators. It also preserves non-verbal communication between\ + \ the audience and the performers, and is inherently engaging for the spectators.\ + \ We finally show that Reflets opens musical performance opportunities such as\ + \ augmented interaction between musicians and novel techniques for 3D sound shapes\ + \ manipulation.},\n address = {Baton Rouge, Louisiana, USA},\n author = {Florent\ + \ Berthaut and Diego Martinez and Martin Hachet and Sriram Subramanian},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1179028},\n editor = {Edgar Berdahl and\ + \ Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {116--120},\n\ + \ publisher = {Louisiana State University},\n title = {Reflets: Combining and\ + \ Revealing Spaces for Musical Performances},\n url = {http://www.nime.org/proceedings/2015/nime2015_190.pdf},\n\ + \ urlsuppl1 = {http://www.nime.org/proceedings/2015/190/0190-file1.mp4},\n year\ + \ = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178243 + doi: 10.5281/zenodo.1179028 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Mobile music, machine learning, cognitive architecture' - publisher: University of Michigan - title: Exploring Reinforcement Learning for Mobile Percussive Collaboration - url: http://www.nime.org/proceedings/2012/nime2012_241.pdf - year: 2012 - - -- ENTRYTYPE: inproceedings - ID: Tache2012 - abstract: 'Force-feedback and physical modeling technologies now allow to achieve - the same kind of relation with virtual instruments as with acoustic instruments, - but the design of such elaborate models needs guidelines based on the study of - the human sensory-motor system and behaviour. This article presents a qualitative - study of a simulated instrumental interaction in the case of the virtual bowed - string, using both waveguide and mass-interaction models. Subjects were invited - to explore the possibilities of the simulations and to express themselves verbally - at the same time, allowing us to identify key qualities of the proposed systems - that determine the construction of an intimate and rich relationship with the - users.' - address: 'Ann Arbor, Michigan' - author: Olivier Tache and Stephen Sinclair and Jean-Loup Florens and Marcelo Wanderley - bibtex: "@inproceedings{Tache2012,\n abstract = {Force-feedback and physical modeling\ - \ technologies now allow to achieve the same kind of relation with virtual instruments\ - \ as with acoustic instruments, but the design of such elaborate models needs\ - \ guidelines based on the study of the human sensory-motor system and behaviour.\ - \ This article presents a qualitative study of a simulated instrumental interaction\ - \ in the case of the virtual bowed string, using both waveguide and mass-interaction\ - \ models. Subjects were invited to explore the possibilities of the simulations\ - \ and to express themselves verbally at the same time, allowing us to identify\ - \ key qualities of the proposed systems that determine the construction of an\ - \ intimate and rich relationship with the users.},\n address = {Ann Arbor, Michigan},\n\ - \ author = {Olivier Tache and Stephen Sinclair and Jean-Loup Florens and Marcelo\ - \ Wanderley},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178429},\n issn\ - \ = {2220-4806},\n keywords = {Instrumental interaction, presence, force-feedback,\ - \ physical modeling, simulation, haptics, bowed string.},\n publisher = {University\ - \ of Michigan},\n title = {Exploring audio and tactile qualities of instrumentality\ - \ with bowed string simulations},\n url = {http://www.nime.org/proceedings/2012/nime2012_243.pdf},\n\ - \ year = {2012}\n}\n" + month: May + pages: 116--120 + publisher: Louisiana State University + title: 'Reflets: Combining and Revealing Spaces for Musical Performances' + url: http://www.nime.org/proceedings/2015/nime2015_190.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/190/0190-file1.mp4 + year: 2015 + + +- ENTRYTYPE: inproceedings + ID: slui2015 + abstract: 'The multi-touch music table is a novel tabletop tangible interface for + expressive musical performance. User touches the picture projected on the table + glass surface to perform music. User can click, drag or use various multi-touch + gestures with fingers to perform music expressively. The picture color, luminosity, + size, finger gesture and pressure determine the music output. The table detects + up to 10 finger touches with their touch pressure. We use a glass, a wood stand, + a mini projector, a web camera and a computer to construct this music table. Hence + this table is highly customizable. The table generates music via a re-interpretation + of the artistic components of pictures. It is a cross modal inspiration of music + from visual art on a tangible interface. ' + address: 'Baton Rouge, Louisiana, USA' + author: Simon Lui + bibtex: "@inproceedings{slui2015,\n abstract = {The multi-touch music table is a\ + \ novel tabletop tangible interface for expressive musical performance. User touches\ + \ the picture projected on the table glass surface to perform music. User can\ + \ click, drag or use various multi-touch gestures with fingers to perform music\ + \ expressively. The picture color, luminosity, size, finger gesture and pressure\ + \ determine the music output. The table detects up to 10 finger touches with their\ + \ touch pressure. We use a glass, a wood stand, a mini projector, a web camera\ + \ and a computer to construct this music table. Hence this table is highly customizable.\ + \ The table generates music via a re-interpretation of the artistic components\ + \ of pictures. It is a cross modal inspiration of music from visual art on a tangible\ + \ interface. },\n address = {Baton Rouge, Louisiana, USA},\n author = {Simon Lui},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1179122},\n editor = {Edgar Berdahl\ + \ and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {374--377},\n\ + \ publisher = {Louisiana State University},\n title = {Generate expressive music\ + \ from picture with a handmade multi-touch music table},\n url = {http://www.nime.org/proceedings/2015/nime2015_191.pdf},\n\ + \ year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178429 + doi: 10.5281/zenodo.1179122 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Instrumental interaction, presence, force-feedback, physical modeling, - simulation, haptics, bowed string.' - publisher: University of Michigan - title: Exploring audio and tactile qualities of instrumentality with bowed string - simulations - url: http://www.nime.org/proceedings/2012/nime2012_243.pdf - year: 2012 + month: May + pages: 374--377 + publisher: Louisiana State University + title: Generate expressive music from picture with a handmade multi-touch music + table + url: http://www.nime.org/proceedings/2015/nime2015_191.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Leeuw2012a - abstract: "This position paper likes to stress the role and importance of performance\ - \ based education in NIME like subjects. It describes the `klankontwerp' learning\ - \ line at the `school of the arts Utrecht' in its department Music Technology.\n\ - Our educational system also reflects the way that we could treat performance in\ - \ the NIME community as a whole. The importance of performing with our instruments\ - \ other then in the form of a mere demonstration should get more emphasis." - address: 'Ann Arbor, Michigan' - author: Hans Leeuw and Jorrit Tamminga - bibtex: "@inproceedings{Leeuw2012a,\n abstract = {This position paper likes to stress\ - \ the role and importance of performance based education in NIME like subjects.\ - \ It describes the `klankontwerp' learning line at the `school of the arts Utrecht'\ - \ in its department Music Technology.\nOur educational system also reflects the\ - \ way that we could treat performance in the NIME community as a whole. The importance\ - \ of performing with our instruments other then in the form of a mere demonstration\ - \ should get more emphasis.},\n address = {Ann Arbor, Michigan},\n author = {Hans\ - \ Leeuw and Jorrit Tamminga},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178321},\n\ - \ issn = {2220-4806},\n keywords = {NIME, education, position paper, live electronics,\ - \ performance},\n publisher = {University of Michigan},\n title = {{NIME} Education\ - \ at the {HKU}, Emphasizing performance},\n url = {http://www.nime.org/proceedings/2012/nime2012_247.pdf},\n\ - \ year = {2012}\n}\n" + ID: swaite2015 + abstract: 'This paper discusses the use of typed text as a real-time input for interactive + performance systems. A brief review of the literature discusses text-based generative + systems, links between typing and playing percussion instruments and the use of + typing gestures in contemporary performance practice. The paper then documents + the author''s audio-visual system that is driven by the typing of text/lyrics + in real-time. It is argued that the system promotes the sensation of liveness + through clear, perceptible links between the performer''s gestures, the system''s + audio outputs and the its visual outputs. The system also provides a novel approach + to the use of generative techniques in the composition and live performance of + songs. Future developments would include the use of dynamic text effects linked + to sound generation and greater interaction between human performer and the visuals. ' + address: 'Baton Rouge, Louisiana, USA' + author: Si Waite + bibtex: "@inproceedings{swaite2015,\n abstract = {This paper discusses the use of\ + \ typed text as a real-time input for interactive performance systems. A brief\ + \ review of the literature discusses text-based generative systems, links between\ + \ typing and playing percussion instruments and the use of typing gestures in\ + \ contemporary performance practice. The paper then documents the author's audio-visual\ + \ system that is driven by the typing of text/lyrics in real-time. It is argued\ + \ that the system promotes the sensation of liveness through clear, perceptible\ + \ links between the performer's gestures, the system's audio outputs and the its\ + \ visual outputs. The system also provides a novel approach to the use of generative\ + \ techniques in the composition and live performance of songs. Future developments\ + \ would include the use of dynamic text effects linked to sound generation and\ + \ greater interaction between human performer and the visuals. },\n address =\ + \ {Baton Rouge, Louisiana, USA},\n author = {Si Waite},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1179192},\n editor = {Edgar Berdahl and Jesse Allison},\n\ + \ issn = {2220-4806},\n month = {May},\n pages = {168--169},\n publisher = {Louisiana\ + \ State University},\n title = {Reimagining the Computer Keyboard as a Musical\ + \ Interface},\n url = {http://www.nime.org/proceedings/2015/nime2015_193.pdf},\n\ + \ urlsuppl1 = {http://www.nime.org/proceedings/2015/193/0193-file1.mov},\n urlsuppl2\ + \ = {http://www.nime.org/proceedings/2015/193/0193-file2.mp4},\n year = {2015}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178321 + doi: 10.5281/zenodo.1179192 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'NIME, education, position paper, live electronics, performance' - publisher: University of Michigan - title: 'NIME Education at the HKU, Emphasizing performance' - url: http://www.nime.org/proceedings/2012/nime2012_247.pdf - year: 2012 + month: May + pages: 168--169 + publisher: Louisiana State University + title: Reimagining the Computer Keyboard as a Musical Interface + url: http://www.nime.org/proceedings/2015/nime2015_193.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/193/0193-file1.mov + urlsuppl2: http://www.nime.org/proceedings/2015/193/0193-file2.mp4 + year: 2015 - ENTRYTYPE: inproceedings - ID: Gillian2012 - abstract: 'This paper presents Digito, a gesturally controlled virtual musical instrument. - Digito is controlled through a number of intricate hand gestures, providing both - discrete and continuous control of Digito''s sound engine; with the fine-grain - hand gestures captured by a 3D depth sensor and recognized using computer vision - and machine learning algorithms. We describe the design and initial iterative - development of Digito, the hand and finger tracking algorithms and gesture recognition - algorithms that drive the system, and report the insights gained during the initial - development cycles and user testing of this gesturally controlled virtual musical - instrument.' - address: 'Ann Arbor, Michigan' - author: Nicholas Gillian and Joseph A. Paradiso - bibtex: "@inproceedings{Gillian2012,\n abstract = {This paper presents Digito, a\ - \ gesturally controlled virtual musical instrument. Digito is controlled through\ - \ a number of intricate hand gestures, providing both discrete and continuous\ - \ control of Digito's sound engine; with the fine-grain hand gestures captured\ - \ by a 3D depth sensor and recognized using computer vision and machine learning\ - \ algorithms. We describe the design and initial iterative development of Digito,\ - \ the hand and finger tracking algorithms and gesture recognition algorithms that\ - \ drive the system, and report the insights gained during the initial development\ - \ cycles and user testing of this gesturally controlled virtual musical instrument.},\n\ - \ address = {Ann Arbor, Michigan},\n author = {Nicholas Gillian and Joseph A.\ - \ Paradiso},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178263},\n issn\ - \ = {2220-4806},\n keywords = {Gesture Recognition, Virtual Musical Instrument},\n\ - \ publisher = {University of Michigan},\n title = {Digito: A Fine-Grain Gesturally\ - \ Controlled Virtual Musical Instrument},\n url = {http://www.nime.org/proceedings/2012/nime2012_248.pdf},\n\ - \ year = {2012}\n}\n" + ID: mhirabayashi2015 + abstract: 'We performed the musical work ``Sense of Space'''' which uses the sound + ID with high frequency sound DTMF. The IDs are embedded into the music, audiences'' + smartphones and tablets at the venue reacted to the IDs and then they play music + pieces. We considered the possibility for novel music experiences brought about + through the participation of audiences and spreading sound at the music venue.' + address: 'Baton Rouge, Louisiana, USA' + author: Masami Hirabayashi and Kazuomi Eshima + bibtex: "@inproceedings{mhirabayashi2015,\n abstract = {We performed the musical\ + \ work ``Sense of Space'' which uses the sound ID with high frequency sound DTMF.\ + \ The IDs are embedded into the music, audiences' smartphones and tablets at the\ + \ venue reacted to the IDs and then they play music pieces. We considered the\ + \ possibility for novel music experiences brought about through the participation\ + \ of audiences and spreading sound at the music venue.},\n address = {Baton Rouge,\ + \ Louisiana, USA},\n author = {Masami Hirabayashi and Kazuomi Eshima},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1179092},\n editor = {Edgar Berdahl and\ + \ Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {58--60},\n\ + \ publisher = {Louisiana State University},\n title = {Sense of Space: The Audience\ + \ Participation Music Performance with High-Frequency Sound ID},\n url = {http://www.nime.org/proceedings/2015/nime2015_195.pdf},\n\ + \ urlsuppl1 = {http://www.nime.org/proceedings/2015/195/0195-file1.mp4},\n year\ + \ = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178263 + doi: 10.5281/zenodo.1179092 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Gesture Recognition, Virtual Musical Instrument' - publisher: University of Michigan - title: 'Digito: A Fine-Grain Gesturally Controlled Virtual Musical Instrument' - url: http://www.nime.org/proceedings/2012/nime2012_248.pdf - year: 2012 + month: May + pages: 58--60 + publisher: Louisiana State University + title: 'Sense of Space: The Audience Participation Music Performance with High-Frequency + Sound ID' + url: http://www.nime.org/proceedings/2015/nime2015_195.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/195/0195-file1.mp4 + year: 2015 - ENTRYTYPE: inproceedings - ID: Lehrman2012 - abstract: "George Antheil's notorious Ballet mécanique (1924-1925) was originally\ - \ scored for percussion ensemble, sound effects, and 16 pianolas. He was never\ - \ able to perform the piece with those forces, however, due to his inability to\ - \ synchronize multiple pianolas. Thus all performances of the piece in his lifetime,\ - \ and for decades after, were done with a single pianola or player piano.*\nThe\ - \ author traces the origin of the concept of synchronizing multiple pianolas,\ - \ and explains the attendant technological issues. He examines attempts to synchronize\ - \ mechanical pianos and other time-based devices at the time of Ballet mécanique's\ - \ composition, and suggests that Antheil's vision for his piece was not as farfetched\ - \ as has long been thought." - address: 'Ann Arbor, Michigan' - author: Paul Lehrman - bibtex: "@inproceedings{Lehrman2012,\n abstract = {George Antheil's notorious Ballet\ - \ m{\\'e}canique (1924-1925) was originally scored for percussion ensemble, sound\ - \ effects, and 16 pianolas. He was never able to perform the piece with those\ - \ forces, however, due to his inability to synchronize multiple pianolas. Thus\ - \ all performances of the piece in his lifetime, and for decades after, were done\ - \ with a single pianola or player piano.*\nThe author traces the origin of the\ - \ concept of synchronizing multiple pianolas, and explains the attendant technological\ - \ issues. He examines attempts to synchronize mechanical pianos and other time-based\ - \ devices at the time of Ballet m{\\'e}canique's composition, and suggests that\ - \ Antheil's vision for his piece was not as farfetched as has long been thought.},\n\ - \ address = {Ann Arbor, Michigan},\n author = {Paul Lehrman},\n booktitle = {Proceedings\ + ID: tshaw2015 + abstract: 'In this paper we present Fields, a sound diffusion performance implemented + with web technologies that run on the mobile devices of audience members. Both + a technical system and bespoke composition, Fields allows for a range of sonic + diffusions to occur, and therefore has the potential to open up new paradigms + for spatialised music and media performances. The project introduces how handheld + technology used as a collective array of speakers controlled live by a centralized + performer can create alternative types of participation within musical performance. + Fields not only offers a new technological approach to sound diffusion, it also + provides an alternative way for audiences to participate in live events, and opens + up unique forms of engagement within sonic media contexts. ' + address: 'Baton Rouge, Louisiana, USA' + author: Tim Shaw and Sébastien Piquemal and John Bowers + bibtex: "@inproceedings{tshaw2015,\n abstract = {In this paper we present Fields,\ + \ a sound diffusion performance implemented with web technologies that run on\ + \ the mobile devices of audience members. Both a technical system and bespoke\ + \ composition, Fields allows for a range of sonic diffusions to occur, and therefore\ + \ has the potential to open up new paradigms for spatialised music and media performances.\ + \ The project introduces how handheld technology used as a collective array of\ + \ speakers controlled live by a centralized performer can create alternative types\ + \ of participation within musical performance. Fields not only offers a new technological\ + \ approach to sound diffusion, it also provides an alternative way for audiences\ + \ to participate in live events, and opens up unique forms of engagement within\ + \ sonic media contexts. },\n address = {Baton Rouge, Louisiana, USA},\n author\ + \ = {Tim Shaw and S\\'ebastien Piquemal and John Bowers},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178323},\n issn = {2220-4806},\n keywords = {Antheil,\ - \ Stravinsky, player piano, pianola, mechanical instruments, synchronization},\n\ - \ publisher = {University of Michigan},\n title = {Multiple Pianolas in Antheil's\ - \ Ballet m{\\'e}canique},\n url = {http://www.nime.org/proceedings/2012/nime2012_25.pdf},\n\ - \ year = {2012}\n}\n" + \ doi = {10.5281/zenodo.1179174},\n editor = {Edgar Berdahl and Jesse Allison},\n\ + \ issn = {2220-4806},\n month = {May},\n pages = {281--284},\n publisher = {Louisiana\ + \ State University},\n title = {Fields: An Exploration into the use of Mobile\ + \ Devices as a Medium for Sound Diffusion},\n url = {http://www.nime.org/proceedings/2015/nime2015_196.pdf},\n\ + \ year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178323 + doi: 10.5281/zenodo.1179174 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Antheil, Stravinsky, player piano, pianola, mechanical instruments, synchronization' - publisher: University of Michigan - title: Multiple Pianolas in Antheil's Ballet mécanique - url: http://www.nime.org/proceedings/2012/nime2012_25.pdf - year: 2012 + month: May + pages: 281--284 + publisher: Louisiana State University + title: 'Fields: An Exploration into the use of Mobile Devices as a Medium for Sound + Diffusion' + url: http://www.nime.org/proceedings/2015/nime2015_196.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Fyans2012 - abstract: 'A study is presented examining the participatory design of digital musical - interactions. The study takes into consideration the entire ecology of digital - musical interactions including the designer, performer and spectator. A new instrument - is developed through iterative participatory design involving a group of performers. - Across the study the evolution of creative practice and skill development in an - emerging community of practice is examined and a spectator study addresses the - cognition of performance and the perception of skill with the instrument. Observations - are presented regarding the cognition of a novel interaction and evolving notions - of skill. The design process of digital musical interactions is reflected on focusing - on involvement of the spectator in design contexts.' - address: 'Ann Arbor, Michigan' - author: A. Cavan Fyans and Adnan Marquez-Borbon and Paul Stapleton and Michael Gurevich - bibtex: "@inproceedings{Fyans2012,\n abstract = {A study is presented examining\ - \ the participatory design of digital musical interactions. The study takes into\ - \ consideration the entire ecology of digital musical interactions including the\ - \ designer, performer and spectator. A new instrument is developed through iterative\ - \ participatory design involving a group of performers. Across the study the evolution\ - \ of creative practice and skill development in an emerging community of practice\ - \ is examined and a spectator study addresses the cognition of performance and\ - \ the perception of skill with the instrument. Observations are presented regarding\ - \ the cognition of a novel interaction and evolving notions of skill. The design\ - \ process of digital musical interactions is reflected on focusing on involvement\ - \ of the spectator in design contexts.},\n address = {Ann Arbor, Michigan},\n\ - \ author = {A. Cavan Fyans and Adnan Marquez-Borbon and Paul Stapleton and Michael\ - \ Gurevich},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178257},\n issn\ - \ = {2220-4806},\n keywords = {participatory design, DMIs, skill, cognition, spectator},\n\ - \ publisher = {University of Michigan},\n title = {Ecological considerations for\ - \ participatory design of DMIs},\n url = {http://www.nime.org/proceedings/2012/nime2012_253.pdf},\n\ - \ year = {2012}\n}\n" + ID: rdannenberg2015 + abstract: "Optical music recognition (OMR) is the task of recognizing images of\ + \ musical scores. In this paper, improved algorithms for the fi\nrst steps of\ + \ optical music recognition were developed, which facilitated bulk annotation\ + \ of scanned scores for use in an interactive score display system. Creating an\ + \ initial annotation by OMR and verifying by hand substantially reduced the manual\ + \ eff\nort required to process scanned scores to be used in a live performance\ + \ setting." + address: 'Baton Rouge, Louisiana, USA' + author: Dan Ringwalt and Roger Dannenberg and Andrew Russell + bibtex: "@inproceedings{rdannenberg2015,\n abstract = {Optical music recognition\ + \ (OMR) is the task of recognizing images of musical scores. In this paper, improved\ + \ algorithms for the fi\nrst steps of optical music recognition were developed,\ + \ which facilitated bulk annotation of scanned scores for use in an interactive\ + \ score display system. Creating an initial annotation by OMR and verifying by\ + \ hand substantially reduced the manual eff\nort required to process scanned scores\ + \ to be used in a live performance setting.},\n address = {Baton Rouge, Louisiana,\ + \ USA},\n author = {Dan Ringwalt and Roger Dannenberg and Andrew Russell},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1179162},\n editor = {Edgar Berdahl and\ + \ Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {95--98},\n\ + \ publisher = {Louisiana State University},\n title = {Optical Music Recognition\ + \ for Interactive Score Display},\n url = {http://www.nime.org/proceedings/2015/nime2015_198.pdf},\n\ + \ year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178257 + doi: 10.5281/zenodo.1179162 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'participatory design, DMIs, skill, cognition, spectator' - publisher: University of Michigan - title: Ecological considerations for participatory design of DMIs - url: http://www.nime.org/proceedings/2012/nime2012_253.pdf - year: 2012 + month: May + pages: 95--98 + publisher: Louisiana State University + title: Optical Music Recognition for Interactive Score Display + url: http://www.nime.org/proceedings/2015/nime2015_198.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Jaimovich2012 - abstract: 'In order to further understand our emotional reaction to music, a museum-based - installation was designed to collect physiological and self-report data from people - listening to music. This demo will describe the technical implementation of this - installation as a tool for collecting large samples of data in public spaces. - The Emotion in Motion terminal is built upon a standard desktop computer running - Max/MSP and using sensors that measure physiological indicators of emotion that - are connected to an Arduino. The terminal has been installed in museums and galleries - in Europe and the USA, helping create the largest database of physiology and self-report - data while listening to music.' - address: 'Ann Arbor, Michigan' - author: Javier Jaimovich and Miguel Ortiz and Niall Coghlan and R. Benjamin Knapp - bibtex: "@inproceedings{Jaimovich2012,\n abstract = {In order to further understand\ - \ our emotional reaction to music, a museum-based installation was designed to\ - \ collect physiological and self-report data from people listening to music. This\ - \ demo will describe the technical implementation of this installation as a tool\ - \ for collecting large samples of data in public spaces. The Emotion in Motion\ - \ terminal is built upon a standard desktop computer running Max/MSP and using\ - \ sensors that measure physiological indicators of emotion that are connected\ - \ to an Arduino. The terminal has been installed in museums and galleries in Europe\ - \ and the USA, helping create the largest database of physiology and self-report\ - \ data while listening to music.},\n address = {Ann Arbor, Michigan},\n author\ - \ = {Javier Jaimovich and Miguel Ortiz and Niall Coghlan and R. Benjamin Knapp},\n\ + ID: amomeni2015 + abstract: 'This paper documents the development of Caress, an electroacoustic percussive + instrument that blends drumming and audio synthesis in a small and portable form + factor. Caress is an octophonic miniature drum-set for the fingertips that employs + eight acoustically isolated piezo-microphones, coupled with eight independent + signal chains that excite a unique resonance model with audio from the piezos. + The hardware is designed to be robust and quickly reproducible (parametric design + and machine fabrication), while the software aims to be light-weight (low-CPU + requirements) and portable (multiple platforms, multiple computing architectures). + Above all, the instrument aims for the level of control intimacy and tactile expressivity + achieved by traditional acoustic percussive instruments, while leveraging real-time + software synthesis and control to expand the sonic palette. This instrument as + well as this document are dedicated to the memory of the late David Wessel, pioneering + composer, performer, researcher, mentor and all-around Yoda of electroacoustic + music. ' + address: 'Baton Rouge, Louisiana, USA' + author: Ali Momeni + bibtex: "@inproceedings{amomeni2015,\n abstract = {This paper documents the development\ + \ of Caress, an electroacoustic percussive instrument that blends drumming and\ + \ audio synthesis in a small and portable form factor. Caress is an octophonic\ + \ miniature drum-set for the fingertips that employs eight acoustically isolated\ + \ piezo-microphones, coupled with eight independent signal chains that excite\ + \ a unique resonance model with audio from the piezos. The hardware is designed\ + \ to be robust and quickly reproducible (parametric design and machine fabrication),\ + \ while the software aims to be light-weight (low-CPU requirements) and portable\ + \ (multiple platforms, multiple computing architectures). Above all, the instrument\ + \ aims for the level of control intimacy and tactile expressivity achieved by\ + \ traditional acoustic percussive instruments, while leveraging real-time software\ + \ synthesis and control to expand the sonic palette. This instrument as well as\ + \ this document are dedicated to the memory of the late David Wessel, pioneering\ + \ composer, performer, researcher, mentor and all-around Yoda of electroacoustic\ + \ music. },\n address = {Baton Rouge, Louisiana, USA},\n author = {Ali Momeni},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178295},\n issn = {2220-4806},\n\ - \ keywords = {Biosignals, EDA, SC, GSR, HR, POX, Self-Report, Database, Physiological\ - \ Signals, Max/MSP, FTM, SAM, GEMS},\n publisher = {University of Michigan},\n\ - \ title = {The Emotion in Motion Experiment: Using an Interactive Installation\ - \ as a Means for Understanding Emotional Response to Music},\n url = {http://www.nime.org/proceedings/2012/nime2012_254.pdf},\n\ - \ year = {2012}\n}\n" + \ Musical Expression},\n doi = {10.5281/zenodo.1179142},\n editor = {Edgar Berdahl\ + \ and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {245--250},\n\ + \ publisher = {Louisiana State University},\n title = {Caress: An Electro-acoustic\ + \ Percussive Instrument for Caressing Sounds},\n url = {http://www.nime.org/proceedings/2015/nime2015_199.pdf},\n\ + \ year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178295 + doi: 10.5281/zenodo.1179142 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Biosignals, EDA, SC, GSR, HR, POX, Self-Report, Database, Physiological - Signals, Max/MSP, FTM, SAM, GEMS' - publisher: University of Michigan - title: 'The Emotion in Motion Experiment: Using an Interactive Installation as a - Means for Understanding Emotional Response to Music' - url: http://www.nime.org/proceedings/2012/nime2012_254.pdf - year: 2012 + month: May + pages: 245--250 + publisher: Louisiana State University + title: 'Caress: An Electro-acoustic Percussive Instrument for Caressing Sounds' + url: http://www.nime.org/proceedings/2015/nime2015_199.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Grosshauser2012 - abstract: 'From a technical point of view, instrumental music mak-ing involves audible, - visible and hidden playing parameters. Hidden parameters like force, pressure - and fast movements, happening within milliseconds are particularly difficult to - capture. Here, we present data focusing on movement coordination parameters of - the left hand fingers with the bow hand in violinists and between two violinists - in group playing. Data was recorded with different position sensors, a micro camcorder - fixed on a violin and an acceleration sensor placed on the bow. Sensor measurements - were obtained at a high sampling rate, gathering the data with a small mi-crocontroller - unit, connected with a laptop computer. To capture bow''s position, rotation and - angle directly on the bow to string contact point, the micro camcorder was fixed - near the bridge. Main focuses of interest were the changes of the left hand finger, - the temporal synchronization between left hand fingers with the right hand, the - close up view to the bow to string contact point and the contact of the left hand - finger and/or string to the fingerboard. Seven violinists, from beginners to master - class students played scales in different rhythms, speeds and bowings and music - excerpts of free choice while being recorded. One measure-ment with 2 violinists - was made to see the time differences between two musicians while playing together. - For simple integration of a conventional violin into electronic music environments, - left hand sensor data were exemplary converted to MIDI and OSC.' - address: 'Ann Arbor, Michigan' - author: Tobias Grosshauser and Victor Candia and Horst Hildebrand and Gerhard Tröster - bibtex: "@inproceedings{Grosshauser2012,\n abstract = {From a technical point of\ - \ view, instrumental music mak-ing involves audible, visible and hidden playing\ - \ parameters. Hidden parameters like force, pressure and fast movements, happening\ - \ within milliseconds are particularly difficult to capture. Here, we present\ - \ data focusing on movement coordination parameters of the left hand fingers with\ - \ the bow hand in violinists and between two violinists in group playing. Data\ - \ was recorded with different position sensors, a micro camcorder fixed on a violin\ - \ and an acceleration sensor placed on the bow. Sensor measurements were obtained\ - \ at a high sampling rate, gathering the data with a small mi-crocontroller unit,\ - \ connected with a laptop computer. To capture bow's position, rotation and angle\ - \ directly on the bow to string contact point, the micro camcorder was fixed near\ - \ the bridge. Main focuses of interest were the changes of the left hand finger,\ - \ the temporal synchronization between left hand fingers with the right hand,\ - \ the close up view to the bow to string contact point and the contact of the\ - \ left hand finger and/or string to the fingerboard. Seven violinists, from beginners\ - \ to master class students played scales in different rhythms, speeds and bowings\ - \ and music excerpts of free choice while being recorded. One measure-ment with\ - \ 2 violinists was made to see the time differences between two musicians while\ - \ playing together. For simple integration of a conventional violin into electronic\ - \ music environments, left hand sensor data were exemplary converted to MIDI and\ - \ OSC.},\n address = {Ann Arbor, Michigan},\n author = {Tobias Grosshauser and\ - \ Victor Candia and Horst Hildebrand and Gerhard Tr{\\''o}ster},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178269},\n issn = {2220-4806},\n keywords = {Strings,\ - \ violin, coordination, left, finger, right, hand},\n publisher = {University\ - \ of Michigan},\n title = {Sensor Based Measurements of Musicians' Synchronization\ - \ Issues},\n url = {http://www.nime.org/proceedings/2012/nime2012_256.pdf},\n\ - \ year = {2012}\n}\n" + ID: rdannenbergb2015 + abstract: 'Human-Computer Music Performance for popular music -- where musical structure + is important, but where musicians often decide on the spur of the moment exactly + what the musical form will be -- presents many challenges to make computer systems + that are flexible and adaptable to human musicians. One particular challenge is + that humans easily follow scores and chord charts, adapt these to new performance + plans, and understand media locations in musical terms (beats and measures), while + computer music systems often use rigid and even numerical representations that + are difficult to work with. We present new formalisms and representations, and + a corresponding implementation, where musical material in various media is synchronized, + where musicians can quickly alter the performance order by specifying (re-)arrangements + of the material, and where interfaces are supported in a natural way by music + notation.' + address: 'Baton Rouge, Louisiana, USA' + author: Roger Dannenberg and Andrew Russell + bibtex: "@inproceedings{rdannenbergb2015,\n abstract = {Human-Computer Music Performance\ + \ for popular music -- where musical structure is important, but where musicians\ + \ often decide on the spur of the moment exactly what the musical form will be\ + \ -- presents many challenges to make computer systems that are flexible and adaptable\ + \ to human musicians. One particular challenge is that humans easily follow scores\ + \ and chord charts, adapt these to new performance plans, and understand media\ + \ locations in musical terms (beats and measures), while computer music systems\ + \ often use rigid and even numerical representations that are difficult to work\ + \ with. We present new formalisms and representations, and a corresponding implementation,\ + \ where musical material in various media is synchronized, where musicians can\ + \ quickly alter the performance order by specifying (re-)arrangements of the material,\ + \ and where interfaces are supported in a natural way by music notation.},\n address\ + \ = {Baton Rouge, Louisiana, USA},\n author = {Roger Dannenberg and Andrew Russell},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1179050},\n editor = {Edgar Berdahl\ + \ and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {315--316},\n\ + \ publisher = {Louisiana State University},\n title = {Arrangements: Flexibly\ + \ Adapting Music Data for Live Performance},\n url = {http://www.nime.org/proceedings/2015/nime2015_200.pdf},\n\ + \ urlsuppl1 = {http://www.nime.org/proceedings/2015/200/0200-file1.mp4},\n year\ + \ = {2015}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1179050 + editor: Edgar Berdahl and Jesse Allison + issn: 2220-4806 + month: May + pages: 315--316 + publisher: Louisiana State University + title: 'Arrangements: Flexibly Adapting Music Data for Live Performance' + url: http://www.nime.org/proceedings/2015/nime2015_200.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/200/0200-file1.mp4 + year: 2015 + + +- ENTRYTYPE: inproceedings + ID: amomenib2015 + abstract: 'This paper documents the development of ml.lib: a set of open-source + tools designed for employing a wide range of machine learning techniques within + two popular real-time programming environments, namely Max and Pure Data. ml.lib + is a cross-platform, lightweight wrapper around Nick Gillian''s Gesture Recognition + Toolkit, a C++ library that includes a wide range of data processing and machine + learning techniques. ml.lib adapts these techniques for real-time use within popular + data-flow IDEs, allowing instrument designers and performers to integrate robust + learning, classification and mapping approaches within their existing workflows. + ml.lib has been carefully de-signed to allow users to experiment with and incorporate + ma-chine learning techniques within an interactive arts context with minimal prior + knowledge. A simple, logical and consistent, scalable interface has been provided + across over sixteen exter-nals in order to maximize learnability and discoverability. + A focus on portability and maintainability has enabled ml.lib to support a range + of computing architectures---including ARM---and operating systems such as Mac + OS, GNU/Linux and Win-dows, making it the most comprehensive machine learning + implementation available for Max and Pure Data.' + address: 'Baton Rouge, Louisiana, USA' + author: Jamie Bullock and Ali Momeni + bibtex: "@inproceedings{amomenib2015,\n abstract = {This paper documents the development\ + \ of ml.lib: a set of open-source tools designed for employing a wide range of\ + \ machine learning techniques within two popular real-time programming environments,\ + \ namely Max and Pure Data. ml.lib is a cross-platform, lightweight wrapper around\ + \ Nick Gillian's Gesture Recognition Toolkit, a C++ library that includes a wide\ + \ range of data processing and machine learning techniques. ml.lib adapts these\ + \ techniques for real-time use within popular data-flow IDEs, allowing instrument\ + \ designers and performers to integrate robust learning, classification and mapping\ + \ approaches within their existing workflows. ml.lib has been carefully de-signed\ + \ to allow users to experiment with and incorporate ma-chine learning techniques\ + \ within an interactive arts context with minimal prior knowledge. A simple, logical\ + \ and consistent, scalable interface has been provided across over sixteen exter-nals\ + \ in order to maximize learnability and discoverability. A focus on portability\ + \ and maintainability has enabled ml.lib to support a range of computing architectures---including\ + \ ARM---and operating systems such as Mac OS, GNU/Linux and Win-dows, making it\ + \ the most comprehensive machine learning implementation available for Max and\ + \ Pure Data.},\n address = {Baton Rouge, Louisiana, USA},\n author = {Jamie Bullock\ + \ and Ali Momeni},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179038},\n\ + \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ + \ {May},\n pages = {265--270},\n publisher = {Louisiana State University},\n title\ + \ = {ml.lib: Robust, Cross-platform, Open-source Machine Learning for Max and\ + \ Pure Data},\n url = {http://www.nime.org/proceedings/2015/nime2015_201.pdf},\n\ + \ year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178269 + doi: 10.5281/zenodo.1179038 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Strings, violin, coordination, left, finger, right, hand' - publisher: University of Michigan - title: Sensor Based Measurements of Musicians' Synchronization Issues - url: http://www.nime.org/proceedings/2012/nime2012_256.pdf - year: 2012 + month: May + pages: 265--270 + publisher: Louisiana State University + title: 'ml.lib: Robust, Cross-platform, Open-source Machine Learning for Max and + Pure Data' + url: http://www.nime.org/proceedings/2015/nime2015_201.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Bosi2012 - abstract: 'Tangible tabletop musical interfaces allowing for a collabo-rative real-time - interaction in live music performances are one of the promising fields in NIMEs. - At present, this kind of interfaces present at least some of the following charac-teristics - that limit their musical use: latency in the inter-action, and partial or complete - lack of responsiveness to gestures such as tapping, scrubbing or pressing force. - Our current research is exploring ways of improving the quality of interaction - with this kind of interfaces, and in particular with the tangible tabletop instrument - Reactable . In this paper we present a system based on a circular array of me-chanically - intercoupled force sensing resistors used to obtain a low-latency, affordable, - and easily embeddable hardware system able to detect surface impacts and pressures - on the tabletop perimeter. We also consider the option of com-pleting this detected - gestural information with the sound information coming from a contact microphone - attached to the mechanical coupling layer, to control physical modelling synthesis - of percussion instruments.' - address: 'Ann Arbor, Michigan' - author: Mathieu Bosi and Sergi Jordà - bibtex: "@inproceedings{Bosi2012,\n abstract = {Tangible tabletop musical interfaces\ - \ allowing for a collabo-rative real-time interaction in live music performances\ - \ are one of the promising fields in NIMEs. At present, this kind of interfaces\ - \ present at least some of the following charac-teristics that limit their musical\ - \ use: latency in the inter-action, and partial or complete lack of responsiveness\ - \ to gestures such as tapping, scrubbing or pressing force. Our current research\ - \ is exploring ways of improving the quality of interaction with this kind of\ - \ interfaces, and in particular with the tangible tabletop instrument Reactable\ - \ . In this paper we present a system based on a circular array of me-chanically\ - \ intercoupled force sensing resistors used to obtain a low-latency, affordable,\ - \ and easily embeddable hardware system able to detect surface impacts and pressures\ - \ on the tabletop perimeter. We also consider the option of com-pleting this detected\ - \ gestural information with the sound information coming from a contact microphone\ - \ attached to the mechanical coupling layer, to control physical modelling synthesis\ - \ of percussion instruments.},\n address = {Ann Arbor, Michigan},\n author = {Mathieu\ - \ Bosi and Sergi Jord{\\`a}},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178217},\n\ - \ issn = {2220-4806},\n keywords = {tangible tabletop interfaces, force sensing\ - \ resistor, mechanical coupling, fast low-noise analog to digital conversion,\ - \ low-latency sensing, micro controller, multimodal systems, complementary sensing.},\n\ - \ publisher = {University of Michigan},\n title = {Towards fast multi-point force\ - \ and hit detection in tabletops using mechanically intercoupled force sensing\ - \ resisors},\n url = {http://www.nime.org/proceedings/2012/nime2012_257.pdf},\n\ - \ year = {2012}\n}\n" + ID: rdannenbergc2015 + abstract: 'Computer music systems can interact with humans at different levels, + including scores, phrases, notes, beats, and gestures. However, most current systems + lack basic musicianship skills. As a consequence, the results of human-computer + interaction are often far less musical than the interaction between human musicians. + In this paper, we explore the possibility of learning some basic music performance + skills from rehearsal data. In particular, we consider the piano duet scenario + where two musicians expressively interact with each other. Our work extends previous + automatic accompaniment systems. We have built an artificial pianist that can + automatically improve its ability to sense and coordinate with a human pianist, + learning from rehearsal experience. We describe different machine learning algorithms + to learn musicianship for duet interaction, explore the properties of the learned + models, such as dominant features, limits of validity, and minimal training size, + and claim that a more human-like interaction is achieved.' + address: 'Baton Rouge, Louisiana, USA' + author: Guangyu Xia and Roger Dannenberg + bibtex: "@inproceedings{rdannenbergc2015,\n abstract = {Computer music systems can\ + \ interact with humans at different levels, including scores, phrases, notes,\ + \ beats, and gestures. However, most current systems lack basic musicianship skills.\ + \ As a consequence, the results of human-computer interaction are often far less\ + \ musical than the interaction between human musicians. In this paper, we explore\ + \ the possibility of learning some basic music performance skills from rehearsal\ + \ data. In particular, we consider the piano duet scenario where two musicians\ + \ expressively interact with each other. Our work extends previous automatic accompaniment\ + \ systems. We have built an artificial pianist that can automatically improve\ + \ its ability to sense and coordinate with a human pianist, learning from rehearsal\ + \ experience. We describe different machine learning algorithms to learn musicianship\ + \ for duet interaction, explore the properties of the learned models, such as\ + \ dominant features, limits of validity, and minimal training size, and claim\ + \ that a more human-like interaction is achieved.},\n address = {Baton Rouge,\ + \ Louisiana, USA},\n author = {Guangyu Xia and Roger Dannenberg},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1179198},\n editor = {Edgar Berdahl and\ + \ Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {259--264},\n\ + \ publisher = {Louisiana State University},\n title = {Duet Interaction: Learning\ + \ Musicianship for Automatic Accompaniment},\n url = {http://www.nime.org/proceedings/2015/nime2015_202.pdf},\n\ + \ year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178217 + doi: 10.5281/zenodo.1179198 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'tangible tabletop interfaces, force sensing resistor, mechanical coupling, - fast low-noise analog to digital conversion, low-latency sensing, micro controller, - multimodal systems, complementary sensing.' - publisher: University of Michigan - title: Towards fast multi-point force and hit detection in tabletops using mechanically - intercoupled force sensing resisors - url: http://www.nime.org/proceedings/2012/nime2012_257.pdf - year: 2012 + month: May + pages: 259--264 + publisher: Louisiana State University + title: 'Duet Interaction: Learning Musicianship for Automatic Accompaniment' + url: http://www.nime.org/proceedings/2015/nime2015_202.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Zamorano2012 - abstract: "This paper introduces Simpletones, an interactive sound system that enables\ - \ a sense of musical collaboration for non-musicians. Participants can easily\ - \ create simple sound compositions in real time by collaboratively operating physical\ - \ artifacts as sound controllers. The physical configuration of the artifacts\ - \ requires coordinated actions between participants to control sound (thus requiring,\ - \ and emphasizing collaboration).\nSimpletones encourages playful human-to-human\ - \ interaction by introducing a simple interface and a set of basic rules [1].\ - \ This enables novices to focus on the collaborative aspects of making music as\ - \ a group (such as synchronization and taking collective decisions through non-verbal\ - \ communication) to ultimately engage a state of group flow[2].\nThis project\ - \ is relevant to a contemporary discourse on musical expression because it allows\ - \ novices to experience the social aspects of group music making, something that\ - \ is usually reserved only for trained performers [3]." - address: 'Ann Arbor, Michigan' - author: Francisco Zamorano - bibtex: "@inproceedings{Zamorano2012,\n abstract = {This paper introduces Simpletones,\ - \ an interactive sound system that enables a sense of musical collaboration for\ - \ non-musicians. Participants can easily create simple sound compositions in real\ - \ time by collaboratively operating physical artifacts as sound controllers. The\ - \ physical configuration of the artifacts requires coordinated actions between\ - \ participants to control sound (thus requiring, and emphasizing collaboration).\n\ - Simpletones encourages playful human-to-human interaction by introducing a simple\ - \ interface and a set of basic rules [1]. This enables novices to focus on the\ - \ collaborative aspects of making music as a group (such as synchronization and\ - \ taking collective decisions through non-verbal communication) to ultimately\ - \ engage a state of group flow[2].\nThis project is relevant to a contemporary\ - \ discourse on musical expression because it allows novices to experience the\ - \ social aspects of group music making, something that is usually reserved only\ - \ for trained performers [3].},\n address = {Ann Arbor, Michigan},\n author =\ - \ {Francisco Zamorano},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178459},\n\ - \ issn = {2220-4806},\n keywords = {Collaboration, Artifacts, Computer Vision,\ - \ Color Tracking, State of Flow.},\n publisher = {University of Michigan},\n title\ - \ = {Simpletones: A System of Collaborative Physical Controllers for Novices},\n\ - \ url = {http://www.nime.org/proceedings/2012/nime2012_258.pdf},\n year = {2012}\n\ - }\n" + ID: jleonard2015 + abstract: 'This paper discusses how haptic devices and physical modelling can be + employed to design and simulate multisensory virtual musical instruments, providing + the musician with joint audio, visual and haptic feedback. After briefly reviewing + some of the main use-cases of haptics in Computer Music, we present GENESIS-RT, + a software and hardware platform dedicated to the design and real-time haptic + playing of virtual musical instruments using mass-interaction physical modelling. + We discuss our approach and report on advancements in modelling various instrument + categories instruments, including physical models of percussion, plucked and bowed + instruments. Finally, we comment on the constraints, challenges and new possibilities + opened by modelling haptic virtual instruments with our platform, and discuss + common points and differences in regards to classical Digital Musical Instruments. ' + address: 'Baton Rouge, Louisiana, USA' + author: James Leonard and Claude Cadoz + bibtex: "@inproceedings{jleonard2015,\n abstract = {This paper discusses how haptic\ + \ devices and physical modelling can be employed to design and simulate multisensory\ + \ virtual musical instruments, providing the musician with joint audio, visual\ + \ and haptic feedback. After briefly reviewing some of the main use-cases of haptics\ + \ in Computer Music, we present GENESIS-RT, a software and hardware platform dedicated\ + \ to the design and real-time haptic playing of virtual musical instruments using\ + \ mass-interaction physical modelling. We discuss our approach and report on advancements\ + \ in modelling various instrument categories instruments, including physical models\ + \ of percussion, plucked and bowed instruments. Finally, we comment on the constraints,\ + \ challenges and new possibilities opened by modelling haptic virtual instruments\ + \ with our platform, and discuss common points and differences in regards to classical\ + \ Digital Musical Instruments. },\n address = {Baton Rouge, Louisiana, USA},\n\ + \ author = {James Leonard and Claude Cadoz},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1179116},\n editor = {Edgar Berdahl and Jesse Allison},\n issn\ + \ = {2220-4806},\n month = {May},\n pages = {150--155},\n publisher = {Louisiana\ + \ State University},\n title = {Physical Modelling Concepts for a Collection of\ + \ Multisensory Virtual Musical Instruments},\n url = {http://www.nime.org/proceedings/2015/nime2015_203.pdf},\n\ + \ year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178459 + doi: 10.5281/zenodo.1179116 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Collaboration, Artifacts, Computer Vision, Color Tracking, State of Flow.' - publisher: University of Michigan - title: 'Simpletones: A System of Collaborative Physical Controllers for Novices' - url: http://www.nime.org/proceedings/2012/nime2012_258.pdf - year: 2012 + month: May + pages: 150--155 + publisher: Louisiana State University + title: Physical Modelling Concepts for a Collection of Multisensory Virtual Musical + Instruments + url: http://www.nime.org/proceedings/2015/nime2015_203.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Dahl2012 - abstract: 'Composing music for ensembles of computer-based instruments, such as - laptop orchestra or mobile phone orchestra, is a multi-faceted and challenging - endeavor whose parameters and criteria for success are ill-defined. In the design - community, tasks with these qualities are known as wicked problems. This paper - frames composing for computer-based ensemble as a design task, shows how Buchanan''s - four domains of design are present in the task, and discusses its wicked properties. - The themes of visibility, risk, and embodiment, as formulated by Klemmer, are - shown to be implicitly present in this design task. Composers are encouraged to - address them explicitly and to take advantage of the practices of prototyping - and iteration.' - address: 'Ann Arbor, Michigan' - author: Luke Dahl - bibtex: "@inproceedings{Dahl2012,\n abstract = {Composing music for ensembles of\ - \ computer-based instruments, such as laptop orchestra or mobile phone orchestra,\ - \ is a multi-faceted and challenging endeavor whose parameters and criteria for\ - \ success are ill-defined. In the design community, tasks with these qualities\ - \ are known as wicked problems. This paper frames composing for computer-based\ - \ ensemble as a design task, shows how Buchanan's four domains of design are present\ - \ in the task, and discusses its wicked properties. The themes of visibility,\ - \ risk, and embodiment, as formulated by Klemmer, are shown to be implicitly present\ - \ in this design task. Composers are encouraged to address them explicitly and\ - \ to take advantage of the practices of prototyping and iteration.},\n address\ - \ = {Ann Arbor, Michigan},\n author = {Luke Dahl},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178239},\n issn = {2220-4806},\n keywords = {Design,\ - \ laptop orchestra, mobile phone orchestra, instrument design, interaction design,\ - \ composition},\n publisher = {University of Michigan},\n title = {Wicked Problems\ - \ and Design Considerations in Composing for Laptop Orchestra},\n url = {http://www.nime.org/proceedings/2012/nime2012_259.pdf},\n\ - \ year = {2012}\n}\n" + ID: jvilleneuve2015 + abstract: 'The motivation of this paper is to highlight the importance of visual + representations for artists when modeling and simulating mass-interaction physical + networks in the context of sound synthesis and musical composition. GENESIS is + a musician-oriented software environment for sound synthesis and musical composition. + However, despite this orientation, a substantial amount of effort has been put + into building a rich variety of tools based on static or dynamic visual representations + of models and of abstractions of their properties. After a quick survey of these + tools, we will illustrate the significant role they play in the creative process + involved when going from a musical idea and exploration to the production of a + complete musical piece. To that aim, our analysis will rely on the work and practice + of several artists having used GENESIS in various ways.' + address: 'Baton Rouge, Louisiana, USA' + author: Jérôme Villeneuve and Claude Cadoz and Nicolas Castagné + bibtex: "@inproceedings{jvilleneuve2015,\n abstract = {The motivation of this paper\ + \ is to highlight the importance of visual representations for artists when modeling\ + \ and simulating mass-interaction physical networks in the context of sound synthesis\ + \ and musical composition. GENESIS is a musician-oriented software environment\ + \ for sound synthesis and musical composition. However, despite this orientation,\ + \ a substantial amount of effort has been put into building a rich variety of\ + \ tools based on static or dynamic visual representations of models and of abstractions\ + \ of their properties. After a quick survey of these tools, we will illustrate\ + \ the significant role they play in the creative process involved when going from\ + \ a musical idea and exploration to the production of a complete musical piece.\ + \ To that aim, our analysis will rely on the work and practice of several artists\ + \ having used GENESIS in various ways.},\n address = {Baton Rouge, Louisiana,\ + \ USA},\n author = {J\\'er\\^ome Villeneuve and Claude Cadoz and Nicolas Castagn\\\ + 'e},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1179190},\n editor = {Edgar\ + \ Berdahl and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages =\ + \ {195--200},\n publisher = {Louisiana State University},\n title = {Visual Representation\ + \ in GENESIS as a tool for Physical Modeling, Sound Synthesis and Musical Composition},\n\ + \ url = {http://www.nime.org/proceedings/2015/nime2015_204.pdf},\n urlsuppl1 =\ + \ {http://www.nime.org/proceedings/2015/204/0204-file1.mov},\n year = {2015}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178239 + doi: 10.5281/zenodo.1179190 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Design, laptop orchestra, mobile phone orchestra, instrument design, - interaction design, composition' - publisher: University of Michigan - title: Wicked Problems and Design Considerations in Composing for Laptop Orchestra - url: http://www.nime.org/proceedings/2012/nime2012_259.pdf - year: 2012 + month: May + pages: 195--200 + publisher: Louisiana State University + title: 'Visual Representation in GENESIS as a tool for Physical Modeling, Sound + Synthesis and Musical Composition' + url: http://www.nime.org/proceedings/2015/nime2015_204.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/204/0204-file1.mov + year: 2015 - ENTRYTYPE: inproceedings - ID: Frisson2012 - abstract: 'This paper presents the LoopJam installation which allows participants - to interact with a sound map using a 3D com-puter vision tracking system. The - sound map results from similarity-based clustering of sounds. The playback of - these sounds is controlled by the positions or gestures of partic-ipants tracked - with a Kinect depth-sensing camera. The beat-inclined bodily movements of participants - in the in-stallation are mapped to the tempo of played sounds, while the playback - speed is synchronized by default among all sounds. We presented and tested an - early version of the in-stallation to three exhibitions in Belgium, Italy and - France. The reactions among participants ranged between curiosity and amusement.' - address: 'Ann Arbor, Michigan' - author: Christian Frisson and Stéphane Dupont and Julien Leroy and Alexis Moinet - and Thierry Ravet and Xavier Siebert and Thierry Dutoit - bibtex: "@inproceedings{Frisson2012,\n abstract = {This paper presents the LoopJam\ - \ installation which allows participants to interact with a sound map using a\ - \ 3D com-puter vision tracking system. The sound map results from similarity-based\ - \ clustering of sounds. The playback of these sounds is controlled by the positions\ - \ or gestures of partic-ipants tracked with a Kinect depth-sensing camera. The\ - \ beat-inclined bodily movements of participants in the in-stallation are mapped\ - \ to the tempo of played sounds, while the playback speed is synchronized by default\ - \ among all sounds. We presented and tested an early version of the in-stallation\ - \ to three exhibitions in Belgium, Italy and France. The reactions among participants\ - \ ranged between curiosity and amusement.},\n address = {Ann Arbor, Michigan},\n\ - \ author = {Christian Frisson and St{\\'e}phane Dupont and Julien Leroy and Alexis\ - \ Moinet and Thierry Ravet and Xavier Siebert and Thierry Dutoit},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1178255},\n issn = {2220-4806},\n keywords\ - \ = {Interactive music systems and retrieval, user interaction and interfaces,\ - \ audio similarity, depth sensors},\n publisher = {University of Michigan},\n\ - \ title = {LoopJam: turning the dance floor into a collaborative instrumental\ - \ map},\n url = {http://www.nime.org/proceedings/2012/nime2012_260.pdf},\n year\ - \ = {2012}\n}\n" + ID: fcalegario2015 + abstract: 'Regarding the design of new DMIs, it is possible to fit the majority + of projects into two main cases: those developed by the academic research centers, + which focus on North American and European contemporary classical and experimental + music; and the DIY projects, in which the luthier also plays the roles of performer + and/or composer. In both cases, the design process is not focused on creating + DMIs for a community with a particular culture --- with established instruments, + repertoire and playing styles --- outside European and North American traditions. + This challenge motivated our research. In this paper, we discuss lessons learned + during an one-year project called Batebit. Our approach was based on Design Thinking + methodology, comprising cycles of inspiration, ideation and implementation. It + resulted in two new DMIs developed collaboratively with musicians from the Brazilian + Northeast.' + address: 'Baton Rouge, Louisiana, USA' + author: Jerônimo Barbosa and Filipe Calegario and João Tragtenberg and Giordano + Cabral and Geber Ramalho and Marcelo M. Wanderley + bibtex: "@inproceedings{fcalegario2015,\n abstract = {Regarding the design of new\ + \ DMIs, it is possible to fit the majority of projects into two main cases: those\ + \ developed by the academic research centers, which focus on North American and\ + \ European contemporary classical and experimental music; and the DIY projects,\ + \ in which the luthier also plays the roles of performer and/or composer. In both\ + \ cases, the design process is not focused on creating DMIs for a community with\ + \ a particular culture --- with established instruments, repertoire and playing\ + \ styles --- outside European and North American traditions. This challenge motivated\ + \ our research. In this paper, we discuss lessons learned during an one-year project\ + \ called Batebit. Our approach was based on Design Thinking methodology, comprising\ + \ cycles of inspiration, ideation and implementation. It resulted in two new DMIs\ + \ developed collaboratively with musicians from the Brazilian Northeast.},\n address\ + \ = {Baton Rouge, Louisiana, USA},\n author = {Jer\\^onimo Barbosa and Filipe\ + \ Calegario and Jo\\~ao Tragtenberg and Giordano Cabral and Geber Ramalho and\ + \ {Marcelo M.} Wanderley},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179008},\n\ + \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ + \ {May},\n pages = {277--280},\n publisher = {Louisiana State University},\n title\ + \ = {Designing {DMI}s for Popular Music in the {Brazil}ian Northeast: Lessons\ + \ Learned},\n url = {http://www.nime.org/proceedings/2015/nime2015_207.pdf},\n\ + \ year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178255 + doi: 10.5281/zenodo.1179008 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Interactive music systems and retrieval, user interaction and interfaces, - audio similarity, depth sensors' - publisher: University of Michigan - title: 'LoopJam: turning the dance floor into a collaborative instrumental map' - url: http://www.nime.org/proceedings/2012/nime2012_260.pdf - year: 2012 + month: May + pages: 277--280 + publisher: Louisiana State University + title: 'Designing DMIs for Popular Music in the Brazilian Northeast: Lessons Learned' + url: http://www.nime.org/proceedings/2015/nime2015_207.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Melo2012 - abstract: 'This paper describes the conceptualization and development of an open - source tool for controlling the sound of a saxophone via the gestures of its performer. - The motivation behind this work is the need for easy access tools to explore, - compose and perform electroacoustic music in Colombian music schools and conservatories. - This work led to the adaptation of common hardware to be used as a sensor attached - to an acoustic instrument and the development of software applications to record, - visualize and map performers gesture data into signal processing parameters. The - scope of this work suggested that focus was to be made on a specific instrument - so the saxophone was chosen. Gestures were selected in an iterative process with - the performer, although a more ambitious strategy to figure out main gestures - of an instruments performance was first defined. Detailed gesture-to-sound processing - mappings are exposed in the text. An electroacoustic musical piece was successfully - rehearsed and recorded using the Gest-O system.' - address: 'Ann Arbor, Michigan' - author: Jonh Melo and Daniel Gómez and Miguel Vargas - bibtex: "@inproceedings{Melo2012,\n abstract = {This paper describes the conceptualization\ - \ and development of an open source tool for controlling the sound of a saxophone\ - \ via the gestures of its performer. The motivation behind this work is the need\ - \ for easy access tools to explore, compose and perform electroacoustic music\ - \ in Colombian music schools and conservatories. This work led to the adaptation\ - \ of common hardware to be used as a sensor attached to an acoustic instrument\ - \ and the development of software applications to record, visualize and map performers\ - \ gesture data into signal processing parameters. The scope of this work suggested\ - \ that focus was to be made on a specific instrument so the saxophone was chosen.\ - \ Gestures were selected in an iterative process with the performer, although\ - \ a more ambitious strategy to figure out main gestures of an instruments performance\ - \ was first defined. Detailed gesture-to-sound processing mappings are exposed\ - \ in the text. An electroacoustic musical piece was successfully rehearsed and\ - \ recorded using the Gest-O system.},\n address = {Ann Arbor, Michigan},\n author\ - \ = {Jonh Melo and Daniel G{\\'o}mez and Miguel Vargas},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1180535},\n issn = {2220-4806},\n keywords = {Electroacoustic\ - \ music, saxophone, expanded instrument, gesture.},\n publisher = {University\ - \ of Michigan},\n title = {Gest-O: Performer gestures used to expand the sounds\ - \ of the saxophone},\n url = {http://www.nime.org/proceedings/2012/nime2012_262.pdf},\n\ - \ year = {2012}\n}\n" + ID: dmenzies2015 + abstract: 'This work uses a custom-built digital bagpipe chanter interface to assist + in the process of learning the Great Highland Bagpipe (GHB). In this paper, a + new algorithm is presented for the automatic recognition and evaluation of the + various ornamentation techniques that are a central aspect of traditional Highland + bagpipe music. The algorithm is evaluated alongside a previously published approach, + and is shown to provide a significant improvement in performance. The ornament + detection facility forms part of a complete hardware and software system for use + in both tuition and solo practice situations, allowing details of ornamentation + errors made by the player to be provided as visual and textual feedback. The system + also incorporates new functionality for the identification and description of + GHB fingering errors.' + address: 'Baton Rouge, Louisiana, USA' + author: Duncan Menzies and Andrew McPherson + bibtex: "@inproceedings{dmenzies2015,\n abstract = {This work uses a custom-built\ + \ digital bagpipe chanter interface to assist in the process of learning the Great\ + \ Highland Bagpipe (GHB). In this paper, a new algorithm is presented for the\ + \ automatic recognition and evaluation of the various ornamentation techniques\ + \ that are a central aspect of traditional Highland bagpipe music. The algorithm\ + \ is evaluated alongside a previously published approach, and is shown to provide\ + \ a significant improvement in performance. The ornament detection facility forms\ + \ part of a complete hardware and software system for use in both tuition and\ + \ solo practice situations, allowing details of ornamentation errors made by the\ + \ player to be provided as visual and textual feedback. The system also incorporates\ + \ new functionality for the identification and description of GHB fingering errors.},\n\ + \ address = {Baton Rouge, Louisiana, USA},\n author = {Duncan Menzies and Andrew\ + \ McPherson},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179136},\n editor\ + \ = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n\ + \ pages = {50--53},\n publisher = {Louisiana State University},\n title = {Highland\ + \ Piping Ornament Recognition Using Dynamic Time Warping},\n url = {http://www.nime.org/proceedings/2015/nime2015_208.pdf},\n\ + \ year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1180535 + doi: 10.5281/zenodo.1179136 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Electroacoustic music, saxophone, expanded instrument, gesture.' - publisher: University of Michigan - title: 'Gest-O: Performer gestures used to expand the sounds of the saxophone' - url: http://www.nime.org/proceedings/2012/nime2012_262.pdf - year: 2012 + month: May + pages: 50--53 + publisher: Louisiana State University + title: Highland Piping Ornament Recognition Using Dynamic Time Warping + url: http://www.nime.org/proceedings/2015/nime2015_208.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Freed2012 - abstract: 'The Fingerphone, a reworking of the Stylophone in conductive paper, is - presented as an example of new design approaches for sustainability and playability - of electronic musical instruments.' - address: 'Ann Arbor, Michigan' - author: Adrian Freed - bibtex: "@inproceedings{Freed2012,\n abstract = {The Fingerphone, a reworking of\ - \ the Stylophone in conductive paper, is presented as an example of new design\ - \ approaches for sustainability and playability of electronic musical instruments.},\n\ - \ address = {Ann Arbor, Michigan},\n author = {Adrian Freed},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178253},\n issn = {2220-4806},\n keywords = {Stylophone,\ - \ Conductive Paper, Pressure Sensing, Touch Sensing, Capacitive Sensing, Plurifunctionality,\ - \ Fingerphone, Sustainable Design},\n publisher = {University of Michigan},\n\ - \ title = {The Fingerphone: a Case Study of Sustainable Instrument Redesign},\n\ - \ url = {http://www.nime.org/proceedings/2012/nime2012_264.pdf},\n year = {2012}\n\ + ID: aflo2015 + abstract: 'This paper presents the sound art installation Doppelgänger. In Doppelgänger, + we combine an artistic concept on a large scale with a high degree of control + over timbre and dynamics. This puts great demands on the technical aspects of + the work. The installation consists of seven 3.5 meters-tall objects weighing + a total of 1500 kilos. Doppelgänger transfers one soundscape into another using + audio analysis, mapping, and computer-controlled acoustic sound objects. The technical + realization is based on hammer mechanisms actuated by powerful solenoids, driven + by a network of Arduino boards with high power PWM outputs, and a Max-patch running + audio analysis and mapping. We look into the special requirements in mechanics + for large-scale projects. Great care has been taken in the technical design to + ensure that the resulting work is scalable both in numbers of elements and in + physical dimensions. This makes our findings easily applicable to other projects + of a similar nature.' + address: 'Baton Rouge, Louisiana, USA' + author: Asbjørn Blokkum Flø and Hans Wilmers + bibtex: "@inproceedings{aflo2015,\n abstract = {This paper presents the sound art\ + \ installation Doppelg{\\''a}nger. In Doppelg{\\''a}nger, we combine an artistic\ + \ concept on a large scale with a high degree of control over timbre and dynamics.\ + \ This puts great demands on the technical aspects of the work. The installation\ + \ consists of seven 3.5 meters-tall objects weighing a total of 1500 kilos. Doppelg{\\\ + ''a}nger transfers one soundscape into another using audio analysis, mapping,\ + \ and computer-controlled acoustic sound objects. The technical realization is\ + \ based on hammer mechanisms actuated by powerful solenoids, driven by a network\ + \ of Arduino boards with high power PWM outputs, and a Max-patch running audio\ + \ analysis and mapping. We look into the special requirements in mechanics for\ + \ large-scale projects. Great care has been taken in the technical design to ensure\ + \ that the resulting work is scalable both in numbers of elements and in physical\ + \ dimensions. This makes our findings easily applicable to other projects of a\ + \ similar nature.},\n address = {Baton Rouge, Louisiana, USA},\n author = {{Asbj{\\\ + o}rn Blokkum} Fl\\o and Hans Wilmers},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179060},\n\ + \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ + \ {May},\n pages = {61--64},\n publisher = {Louisiana State University},\n title\ + \ = {Doppelg{\\''a}nger: A solenoid-based large scale sound installation.},\n\ + \ url = {http://www.nime.org/proceedings/2015/nime2015_212.pdf},\n urlsuppl1 =\ + \ {http://www.nime.org/proceedings/2015/212/0212-file1.mp4},\n year = {2015}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178253 + doi: 10.5281/zenodo.1179060 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Stylophone, Conductive Paper, Pressure Sensing, Touch Sensing, Capacitive - Sensing, Plurifunctionality, Fingerphone, Sustainable Design' - publisher: University of Michigan - title: 'The Fingerphone: a Case Study of Sustainable Instrument Redesign' - url: http://www.nime.org/proceedings/2012/nime2012_264.pdf - year: 2012 + month: May + pages: 61--64 + publisher: Louisiana State University + title: 'Doppelgänger: A solenoid-based large scale sound installation.' + url: http://www.nime.org/proceedings/2015/nime2015_212.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/212/0212-file1.mp4 + year: 2015 - ENTRYTYPE: inproceedings - ID: Leeuw2012 - abstract: "This short paper follows an earlier NIME paper [1] describing the invention\ - \ and construction of the Electrumpet. Revisions and playing experience are both\ - \ part of the current paper.\nThe Electrumpet can be heard in the performance\ - \ given by Hans Leeuw and Diemo Schwarz at this NIME conference." - address: 'Ann Arbor, Michigan' - author: Hans Leeuw - bibtex: "@inproceedings{Leeuw2012,\n abstract = {This short paper follows an earlier\ - \ NIME paper [1] describing the invention and construction of the Electrumpet.\ - \ Revisions and playing experience are both part of the current paper.\nThe Electrumpet\ - \ can be heard in the performance given by Hans Leeuw and Diemo Schwarz at this\ - \ NIME conference.},\n address = {Ann Arbor, Michigan},\n author = {Hans Leeuw},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178319},\n issn = {2220-4806},\n\ - \ keywords = {NIME, Electrumpet, live-electronics, hybrid instruments.},\n publisher\ - \ = {University of Michigan},\n title = {The electrumpet, additions and revisions},\n\ - \ url = {http://www.nime.org/proceedings/2012/nime2012_271.pdf},\n year = {2012}\n\ - }\n" + ID: ahazzard2015 + abstract: 'Locative music experiences are often non-linear and as such they are + co-created, as the final arrangement of the music heard is guided by the movements + of the user. We note an absence of principles and guidelines regarding how composers + should approach the structuring of such locative soundtracks. For instance, how + does one compose for a non-linear, indeterminate experience using linear pre-composed + placed sounds, where fixed musical time is situated into the indeterminate time + of the user''s experience? Furthermore, how does one create a soundtrack that + is suitable for the location, but also functions as a coherent musical structure? + We explore these questions by analyzing an existing `placed sound'' work from + a traditional music theory perspective and in doing so reveal some structural + principals from `fixed'' musical forms can also support the composition of contemporary + locative music experiences.' + address: 'Baton Rouge, Louisiana, USA' + author: Adrian Hazzard and Steve Benford and Alan Chamberlain and Chris Greenhalgh + bibtex: "@inproceedings{ahazzard2015,\n abstract = {Locative music experiences are\ + \ often non-linear and as such they are co-created, as the final arrangement of\ + \ the music heard is guided by the movements of the user. We note an absence of\ + \ principles and guidelines regarding how composers should approach the structuring\ + \ of such locative soundtracks. For instance, how does one compose for a non-linear,\ + \ indeterminate experience using linear pre-composed placed sounds, where fixed\ + \ musical time is situated into the indeterminate time of the user's experience?\ + \ Furthermore, how does one create a soundtrack that is suitable for the location,\ + \ but also functions as a coherent musical structure? We explore these questions\ + \ by analyzing an existing `placed sound' work from a traditional music theory\ + \ perspective and in doing so reveal some structural principals from `fixed' musical\ + \ forms can also support the composition of contemporary locative music experiences.},\n\ + \ address = {Baton Rouge, Louisiana, USA},\n author = {Adrian Hazzard and Steve\ + \ Benford and Alan Chamberlain and Chris Greenhalgh},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1179086},\n editor = {Edgar Berdahl and Jesse Allison},\n\ + \ issn = {2220-4806},\n month = {May},\n pages = {378--381},\n publisher = {Louisiana\ + \ State University},\n title = {Considering musical structure in location-based\ + \ experiences},\n url = {http://www.nime.org/proceedings/2015/nime2015_214.pdf},\n\ + \ year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178319 + doi: 10.5281/zenodo.1179086 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'NIME, Electrumpet, live-electronics, hybrid instruments.' - publisher: University of Michigan - title: 'The electrumpet, additions and revisions' - url: http://www.nime.org/proceedings/2012/nime2012_271.pdf - year: 2012 + month: May + pages: 378--381 + publisher: Louisiana State University + title: Considering musical structure in location-based experiences + url: http://www.nime.org/proceedings/2015/nime2015_214.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Mitchell2012 - abstract: 'This paper presents a toolbox of gestural control mechanisms which are - available when the input sensing apparatus is a pair of data gloves fitted with - orientation sensors. The toolbox was developed in advance of a live music performance - in which the mapping from gestural input to audio output was to be developed rapidly - in collaboration with the performer. The paper begins with an introduction to - the associated literature before introducing a range of continuous, discrete and - combined control mechanisms, enabling a flexible range of mappings to be explored - and modified easily. An application of the toolbox within a live music performance - is then described with an evaluation of the system with ideas for future developments.' - address: 'Ann Arbor, Michigan' - author: Thomas Mitchell and Sebastian Madgwick and Imogen Heap - bibtex: "@inproceedings{Mitchell2012,\n abstract = {This paper presents a toolbox\ - \ of gestural control mechanisms which are available when the input sensing apparatus\ - \ is a pair of data gloves fitted with orientation sensors. The toolbox was developed\ - \ in advance of a live music performance in which the mapping from gestural input\ - \ to audio output was to be developed rapidly in collaboration with the performer.\ - \ The paper begins with an introduction to the associated literature before introducing\ - \ a range of continuous, discrete and combined control mechanisms, enabling a\ - \ flexible range of mappings to be explored and modified easily. An application\ - \ of the toolbox within a live music performance is then described with an evaluation\ - \ of the system with ideas for future developments.},\n address = {Ann Arbor,\ - \ Michigan},\n author = {Thomas Mitchell and Sebastian Madgwick and Imogen Heap},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1180543},\n issn = {2220-4806},\n\ - \ keywords = {Computer Music, Gestural Control, Data Gloves},\n publisher = {University\ - \ of Michigan},\n title = {Musical Interaction with Hand Posture and Orientation:\ - \ A Toolbox of Gestural Control Mechanisms},\n url = {http://www.nime.org/proceedings/2012/nime2012_272.pdf},\n\ - \ year = {2012}\n}\n" + ID: btome2015 + abstract: 'Twitter has provided a social platform for everyone to enter the previously + exclusive world of the internet, enriching this online social tapestry with cultural + diversity and enabling revolutions. We believe this same tool can be used to also + change the world of music creation. Thus we present MMODM, an online drum machine + based on the Twitter streaming API, using tweets from around the world to create + and perform musical sequences together in real time. Users anywhere can express + 16-beat note sequences across 26 different instruments using plain text tweets + on their favorite device, in real-time. Meanwhile, users on the site itself can + use the graphical interface to locally DJ the rhythm, filters, and sequence blending. + By harnessing this duo of website and Twitter network, MMODM enables a whole new + scale of synchronous musical collaboration between users locally, remotely, across + a wide variety of computing devices, and across a variety of cultures.' + address: 'Baton Rouge, Louisiana, USA' + author: Basheer Tome and Donald Derek Haddad and Tod Machover and Joseph Paradiso + bibtex: "@inproceedings{btome2015,\n abstract = {Twitter has provided a social platform\ + \ for everyone to enter the previously exclusive world of the internet, enriching\ + \ this online social tapestry with cultural diversity and enabling revolutions.\ + \ We believe this same tool can be used to also change the world of music creation.\ + \ Thus we present MMODM, an online drum machine based on the Twitter streaming\ + \ API, using tweets from around the world to create and perform musical sequences\ + \ together in real time. Users anywhere can express 16-beat note sequences across\ + \ 26 different instruments using plain text tweets on their favorite device, in\ + \ real-time. Meanwhile, users on the site itself can use the graphical interface\ + \ to locally DJ the rhythm, filters, and sequence blending. By harnessing this\ + \ duo of website and Twitter network, MMODM enables a whole new scale of synchronous\ + \ musical collaboration between users locally, remotely, across a wide variety\ + \ of computing devices, and across a variety of cultures.},\n address = {Baton\ + \ Rouge, Louisiana, USA},\n author = {Basheer Tome and {Donald Derek} Haddad and\ + \ Tod Machover and Joseph Paradiso},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179184},\n\ + \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ + \ {May},\n pages = {285--288},\n publisher = {Louisiana State University},\n title\ + \ = {MMODM: Massively Multipler Online Drum Machine},\n url = {http://www.nime.org/proceedings/2015/nime2015_215.pdf},\n\ + \ urlsuppl1 = {http://www.nime.org/proceedings/2015/215/0215-file1.mp4},\n year\ + \ = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1180543 + doi: 10.5281/zenodo.1179184 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Computer Music, Gestural Control, Data Gloves' - publisher: University of Michigan - title: 'Musical Interaction with Hand Posture and Orientation: A Toolbox of Gestural - Control Mechanisms' - url: http://www.nime.org/proceedings/2012/nime2012_272.pdf - year: 2012 + month: May + pages: 285--288 + publisher: Louisiana State University + title: 'MMODM: Massively Multipler Online Drum Machine' + url: http://www.nime.org/proceedings/2015/nime2015_215.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/215/0215-file1.mp4 + year: 2015 - ENTRYTYPE: inproceedings - ID: Dahlstedt2012 - abstract: 'I present a novel low-tech multidimensional gestural con-troller, based - on the resistive properties of a 2D field of pencil markings on paper. A set of - movable electrodes (+, -, ground) made from soldered stacks of coins create a - dynamic voltage potential field in the carbon layer, and an-other set of movable - electrodes tap voltages from this field. These voltages are used to control complex - sound engines in an analogue modular synthesizer. Both the voltage field and the - tap electrodes can be moved freely. The design was inspired by previous research - in complex mappings for advanced digital instruments, and provides a similarly - dynamic playing environment for analogue synthesis. The interface is cheap to - build, and provides flexible control over a large set of parameters. It is musically - satisfying to play, and allows for a wide range of playing techniques, from wild - exploration to subtle expressions. I also present an inven-tory of the available - playing techniques, motivated by the interface design, musically, conceptually - and theatrically. The performance aspects of the interface are also discussed. - The interface has been used in a number of performances in Sweden and Japan in - 2011, and is also used by other musicians.' - address: 'Ann Arbor, Michigan' - author: Palle Dahlstedt - bibtex: "@inproceedings{Dahlstedt2012,\n abstract = {I present a novel low-tech\ - \ multidimensional gestural con-troller, based on the resistive properties of\ - \ a 2D field of pencil markings on paper. A set of movable electrodes (+, -, ground)\ - \ made from soldered stacks of coins create a dynamic voltage potential field\ - \ in the carbon layer, and an-other set of movable electrodes tap voltages from\ - \ this field. These voltages are used to control complex sound engines in an analogue\ - \ modular synthesizer. Both the voltage field and the tap electrodes can be moved\ - \ freely. The design was inspired by previous research in complex mappings for\ - \ advanced digital instruments, and provides a similarly dynamic playing environment\ - \ for analogue synthesis. The interface is cheap to build, and provides flexible\ - \ control over a large set of parameters. It is musically satisfying to play,\ - \ and allows for a wide range of playing techniques, from wild exploration to\ - \ subtle expressions. I also present an inven-tory of the available playing techniques,\ - \ motivated by the interface design, musically, conceptually and theatrically.\ - \ The performance aspects of the interface are also discussed. The interface has\ - \ been used in a number of performances in Sweden and Japan in 2011, and is also\ - \ used by other musicians.},\n address = {Ann Arbor, Michigan},\n author = {Palle\ - \ Dahlstedt},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178241},\n issn\ - \ = {2220-4806},\n keywords = {gestural interface, 2d, analog synthesis, performance,\ - \ improvisation},\n publisher = {University of Michigan},\n title = {Pencil Fields:\ - \ An Expressive Low-Tech Performance Interface for Analog Synthesis},\n url =\ - \ {http://www.nime.org/proceedings/2012/nime2012_275.pdf},\n year = {2012}\n}\n" + ID: nbarrett2015 + abstract: 'Electroacoustic music has a longstanding relationship with gesture and + space. This paper marks the start of a project investigating acousmatic spatial + imagery, real gestural behaviour and ultimately the formation of tangible acousmatic + images. These concepts are explored experimentally using motion tracking in a + source-sound recording context, interactive parameter-mapping sonification in + three-dimensional high-order ambisonics, composition and performance. The spatio-musical + role of physical actions in relation to instrument excitation is used as a point + of departure for embodying physical spatial gestures in the creative process. + The work draws on how imagery for music is closely linked with imagery for music-related + actions.' + address: 'Baton Rouge, Louisiana, USA' + author: Natasha Barrett + bibtex: "@inproceedings{nbarrett2015,\n abstract = {Electroacoustic music has a\ + \ longstanding relationship with gesture and space. This paper marks the start\ + \ of a project investigating acousmatic spatial imagery, real gestural behaviour\ + \ and ultimately the formation of tangible acousmatic images. These concepts are\ + \ explored experimentally using motion tracking in a source-sound recording context,\ + \ interactive parameter-mapping sonification in three-dimensional high-order ambisonics,\ + \ composition and performance. The spatio-musical role of physical actions in\ + \ relation to instrument excitation is used as a point of departure for embodying\ + \ physical spatial gestures in the creative process. The work draws on how imagery\ + \ for music is closely linked with imagery for music-related actions.},\n address\ + \ = {Baton Rouge, Louisiana, USA},\n author = {Natasha Barrett},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1179014},\n editor = {Edgar Berdahl and Jesse Allison},\n\ + \ issn = {2220-4806},\n month = {May},\n pages = {191--194},\n publisher = {Louisiana\ + \ State University},\n title = {Creating tangible spatial-musical images from\ + \ physical performance gestures},\n url = {http://www.nime.org/proceedings/2015/nime2015_216.pdf},\n\ + \ urlsuppl1 = {http://www.nime.org/proceedings/2015/216/0216-file1.zip},\n year\ + \ = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178241 + doi: 10.5281/zenodo.1179014 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'gestural interface, 2d, analog synthesis, performance, improvisation' - publisher: University of Michigan - title: 'Pencil Fields: An Expressive Low-Tech Performance Interface for Analog Synthesis' - url: http://www.nime.org/proceedings/2012/nime2012_275.pdf - year: 2012 + month: May + pages: 191--194 + publisher: Louisiana State University + title: Creating tangible spatial-musical images from physical performance gestures + url: http://www.nime.org/proceedings/2015/nime2015_216.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/216/0216-file1.zip + year: 2015 - ENTRYTYPE: inproceedings - ID: Kimura2012 - abstract: 'As a 2010 Artist in Residence in Musical Research at IRCAM, Mari Kimura - used the Augmented Violin to develop new compositional approaches, and new ways - of creating interactive performances [1]. She contributed her empirical and historical - knowledge of violin bowing technique, working with the Real Time Musical Interactions - Team at IRCAM. Thanks to this residency, her ongoing long-distance collaboration - with the team since 2007 dramatically accelerated, and led to solving several - compositional and calibration issues of the Gesture Follower (GF) [2]. Kimura - was also the first artist to develop projects between the two teams at IRCAM, - using OMAX (Musical Representation Team) with GF. In the past year, the performance - with Augmented Violin has been expanded in larger scale interactive audio/visual - projects as well. In this paper, we report on the various techniques developed - for the Augmented Violin and compositions by Kimura using them, offering specific - examples and scores.' - address: 'Ann Arbor, Michigan' - author: Mari Kimura and Nicolas Rasamimanana and Frédéric Bevilacqua and Norbert - Schnell and Bruno Zamborlin and Emmanuel Fléty - bibtex: "@inproceedings{Kimura2012,\n abstract = {As a 2010 Artist in Residence\ - \ in Musical Research at IRCAM, Mari Kimura used the Augmented Violin to develop\ - \ new compositional approaches, and new ways of creating interactive performances\ - \ [1]. She contributed her empirical and historical knowledge of violin bowing\ - \ technique, working with the Real Time Musical Interactions Team at IRCAM. Thanks\ - \ to this residency, her ongoing long-distance collaboration with the team since\ - \ 2007 dramatically accelerated, and led to solving several compositional and\ - \ calibration issues of the Gesture Follower (GF) [2]. Kimura was also the first\ - \ artist to develop projects between the two teams at IRCAM, using OMAX (Musical\ - \ Representation Team) with GF. In the past year, the performance with Augmented\ - \ Violin has been expanded in larger scale interactive audio/visual projects as\ - \ well. In this paper, we report on the various techniques developed for the Augmented\ - \ Violin and compositions by Kimura using them, offering specific examples and\ - \ scores.},\n address = {Ann Arbor, Michigan},\n author = {Mari Kimura and Nicolas\ - \ Rasamimanana and Fr{\\'e}d{\\'e}ric Bevilacqua and Norbert Schnell and Bruno\ - \ Zamborlin and Emmanuel Fl{\\'e}ty},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178305},\n\ - \ issn = {2220-4806},\n keywords = {Augmented Violin, Gesture Follower, Interactive\ - \ Performance},\n publisher = {University of Michigan},\n title = {Extracting\ - \ Human Expression For Interactive Composition with the Augmented Violin},\n url\ - \ = {http://www.nime.org/proceedings/2012/nime2012_279.pdf},\n year = {2012}\n\ - }\n" + ID: jharrimanc2015 + abstract: 'Designing and building Digital Music Instruments (DMIs) is a promising + context to engage children in technology design with parallels to hands on and + project based learning educational approaches. Looking at tools and approaches + used in STEM education we find much in common with the tools and approaches used + in the creation of DMIs as well as opportunities for future development, in particular + the use of scaffolded software and hardware toolkits. Current approaches to teaching + and designing DMIs within the community suggest fruitful ideas for engaging novices + in authentic design activities. Hardware toolkits and programming approaches are + considered to identify productive approaches to teach technology design through + building DMIs.' + address: 'Baton Rouge, Louisiana, USA' + author: Jiffer Harriman + bibtex: "@inproceedings{jharrimanc2015,\n abstract = {Designing and building Digital\ + \ Music Instruments (DMIs) is a promising context to engage children in technology\ + \ design with parallels to hands on and project based learning educational approaches.\ + \ Looking at tools and approaches used in STEM education we find much in common\ + \ with the tools and approaches used in the creation of DMIs as well as opportunities\ + \ for future development, in particular the use of scaffolded software and hardware\ + \ toolkits. Current approaches to teaching and designing DMIs within the community\ + \ suggest fruitful ideas for engaging novices in authentic design activities.\ + \ Hardware toolkits and programming approaches are considered to identify productive\ + \ approaches to teach technology design through building DMIs.},\n address = {Baton\ + \ Rouge, Louisiana, USA},\n author = {Jiffer Harriman},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1179078},\n editor = {Edgar Berdahl and Jesse Allison},\n\ + \ issn = {2220-4806},\n month = {May},\n pages = {70--73},\n publisher = {Louisiana\ + \ State University},\n title = {Start 'em Young: Digital Music Instrument for\ + \ Education},\n url = {http://www.nime.org/proceedings/2015/nime2015_218.pdf},\n\ + \ year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178305 + doi: 10.5281/zenodo.1179078 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Augmented Violin, Gesture Follower, Interactive Performance' - publisher: University of Michigan - title: Extracting Human Expression For Interactive Composition with the Augmented - Violin - url: http://www.nime.org/proceedings/2012/nime2012_279.pdf - year: 2012 + month: May + pages: 70--73 + publisher: Louisiana State University + title: 'Start ''em Young: Digital Music Instrument for Education' + url: http://www.nime.org/proceedings/2015/nime2015_218.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Shear2012 - abstract: "The Electromagnetically Sustained Rhodes Piano is an orig-inal Rhodes\ - \ Piano modified to provide control over the amplitude envelope of individual\ - \ notes through aftertouch pressure. Although there are many opportunities to\ - \ shape the amplitude envelope before loudspeaker amplification, they are all\ - \ governed by the ever-decaying physical vibra-tions of the tone generating mechanism.\ - \ A single-note proof of concept for electromagnetic control over this vibrating\ - \ mechanism was presented at NIME 2011.\nIn the past year, virtually every aspect\ - \ of the system has been improved. We use a different vibration sensor that is\ - \ immune to electromagnetic interference, thus eliminat-ing troublesome feedback.\ - \ For control, we both reduce cost and gain continuous position sensing throughout\ - \ the entire range of key motion in addition to aftertouch pressure. Finally,\ - \ the entire system now fits within the space constraints presented by the original\ - \ piano, allowing it to be installed on adjacent notes." - address: 'Ann Arbor, Michigan' - author: Greg Shear and Matthew Wright - bibtex: "@inproceedings{Shear2012,\n abstract = {The Electromagnetically Sustained\ - \ Rhodes Piano is an orig-inal Rhodes Piano modified to provide control over the\ - \ amplitude envelope of individual notes through aftertouch pressure. Although\ - \ there are many opportunities to shape the amplitude envelope before loudspeaker\ - \ amplification, they are all governed by the ever-decaying physical vibra-tions\ - \ of the tone generating mechanism. A single-note proof of concept for electromagnetic\ - \ control over this vibrating mechanism was presented at NIME 2011.\nIn the past\ - \ year, virtually every aspect of the system has been improved. We use a different\ - \ vibration sensor that is immune to electromagnetic interference, thus eliminat-ing\ - \ troublesome feedback. For control, we both reduce cost and gain continuous position\ - \ sensing throughout the entire range of key motion in addition to aftertouch\ - \ pressure. Finally, the entire system now fits within the space constraints presented\ - \ by the original piano, allowing it to be installed on adjacent notes.},\n address\ - \ = {Ann Arbor, Michigan},\n author = {Greg Shear and Matthew Wright},\n booktitle\ + ID: dcazzani2015 + abstract: 'The following paper documents the creation of a prototype of shoe-soles + designed to detect various postures of standing musicians using non-intrusive + pressure sensors. In order to do so, flexible algorithms were designed with the + capacity of working even with an imprecise placement of the sensors. This makes + it easy and accessible for all potential users. At least 4 sensors are required: + 2 for the front and 2 for the back; this prototype uses 6. The sensors are rather + inexpensive, widening the economic availability. For each individual musician, + the algorithms are capable of ``personalising'''' postures in order to create + consistent evaluations; the results of which may be, but are not limited to: new + musical interfaces, educational analysis of technique, or music controllers. In + building a prototype for the algorithms, data was acquired by wiring the sensors + to a data-logger. The algorithms and tests were implemented using MATLAB. After + designing the algorithms, various tests were run in order to prove their reliability. + These determined that indeed the algorithms work to a sufficient degree of certainty, + allowing for a reliable classification of a musician''s posture or position.' + address: 'Baton Rouge, Louisiana, USA' + author: Dario Cazzani + bibtex: "@inproceedings{dcazzani2015,\n abstract = {The following paper documents\ + \ the creation of a prototype of shoe-soles designed to detect various postures\ + \ of standing musicians using non-intrusive pressure sensors. In order to do so,\ + \ flexible algorithms were designed with the capacity of working even with an\ + \ imprecise placement of the sensors. This makes it easy and accessible for all\ + \ potential users. At least 4 sensors are required: 2 for the front and 2 for\ + \ the back; this prototype uses 6. The sensors are rather inexpensive, widening\ + \ the economic availability. For each individual musician, the algorithms are\ + \ capable of ``personalising'' postures in order to create consistent evaluations;\ + \ the results of which may be, but are not limited to: new musical interfaces,\ + \ educational analysis of technique, or music controllers. In building a prototype\ + \ for the algorithms, data was acquired by wiring the sensors to a data-logger.\ + \ The algorithms and tests were implemented using MATLAB. After designing the\ + \ algorithms, various tests were run in order to prove their reliability. These\ + \ determined that indeed the algorithms work to a sufficient degree of certainty,\ + \ allowing for a reliable classification of a musician's posture or position.},\n\ + \ address = {Baton Rouge, Louisiana, USA},\n author = {Dario Cazzani},\n booktitle\ \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1180599},\n issn = {2220-4806},\n keywords\ - \ = {Rhodes, piano, mechanical synthesizer, electromagnetic, sustain, feedback},\n\ - \ publisher = {University of Michigan},\n title = {Further Developments in the\ - \ Electromagnetically Sustained Rhodes Piano},\n url = {http://www.nime.org/proceedings/2012/nime2012_284.pdf},\n\ - \ year = {2012}\n}\n" + \ Expression},\n doi = {10.5281/zenodo.1179042},\n editor = {Edgar Berdahl and\ + \ Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {54--57},\n\ + \ publisher = {Louisiana State University},\n title = {Posture Identification\ + \ of Musicians Using Non-Intrusive Low-Cost Resistive Pressure Sensors},\n url\ + \ = {http://www.nime.org/proceedings/2015/nime2015_220.pdf},\n year = {2015}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1180599 + doi: 10.5281/zenodo.1179042 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Rhodes, piano, mechanical synthesizer, electromagnetic, sustain, feedback' - publisher: University of Michigan - title: Further Developments in the Electromagnetically Sustained Rhodes Piano - url: http://www.nime.org/proceedings/2012/nime2012_284.pdf - year: 2012 + month: May + pages: 54--57 + publisher: Louisiana State University + title: Posture Identification of Musicians Using Non-Intrusive Low-Cost Resistive + Pressure Sensors + url: http://www.nime.org/proceedings/2015/nime2015_220.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Wang2012 - abstract: 'We have added a dynamic bio-mechanical mapping layer that contains a - model of the human vocal tract with tongue muscle activations as input and tract - geometry as output to a real time gesture controlled voice synthesizer system - used for musical performance and speech research. Using this mapping layer, we - conducted user studies comparing controlling the model muscle activations using - a 2D set of force sensors with a position controlled kinematic input space that - maps directly to the sound. Preliminary user evaluation suggests that it was more - difficult to using force input but the resultant output sound was more intelligible - and natural compared to the kinematic controller. This result shows that force - input is a potentially feasible for browsing through a vowel space for an articulatory - voice synthesis system, although further evaluation is required.' - address: 'Ann Arbor, Michigan' - author: Johnty Wang and Nicolas d'Alessandro and Sidney Fels and Robert Pritchard - bibtex: "@inproceedings{Wang2012,\n abstract = {We have added a dynamic bio-mechanical\ - \ mapping layer that contains a model of the human vocal tract with tongue muscle\ - \ activations as input and tract geometry as output to a real time gesture controlled\ - \ voice synthesizer system used for musical performance and speech research. Using\ - \ this mapping layer, we conducted user studies comparing controlling the model\ - \ muscle activations using a 2D set of force sensors with a position controlled\ - \ kinematic input space that maps directly to the sound. Preliminary user evaluation\ - \ suggests that it was more difficult to using force input but the resultant output\ - \ sound was more intelligible and natural compared to the kinematic controller.\ - \ This result shows that force input is a potentially feasible for browsing through\ - \ a vowel space for an articulatory voice synthesis system, although further evaluation\ - \ is required.},\n address = {Ann Arbor, Michigan},\n author = {Johnty Wang and\ - \ Nicolas d'Alessandro and Sidney Fels and Robert Pritchard},\n booktitle = {Proceedings\ + ID: roda2015 + abstract: 'The Internet holds a lot of potential as a music listening, collaboration, + and performance space. It has become commonplace to stream music and video of + musical performance over the web. However, the goal of playing rhythmically synchronized + music over long distances has remained elusive due to the latency inherent in + networked communication. The farther apart two artists are from one another, the + greater the delay. Furthermore, latency times can change abruptly with no warning. + In this paper, we demonstrate that it is possible to create a distributed, synchronized + musical instrument that allows performers to play together over long distances, + despite latency. We describe one such instrument, MalLo, which combats latency + by predicting a musician''s action before it is completed. MalLo sends information + about a predicted musical note over the Internet before it is played, and synthesizes + this note at a collaborator''s location at nearly the same moment it is played + by the performer. MalLo also protects against latency spikes by sending the prediction + data across multiple network paths, with the intention of routing around latency. ' + address: 'Baton Rouge, Louisiana, USA' + author: Zeyu Jin and Reid Oda and Adam Finkelstein and Rebecca Fiebrink + bibtex: "@inproceedings{roda2015,\n abstract = {The Internet holds a lot of potential\ + \ as a music listening, collaboration, and performance space. It has become commonplace\ + \ to stream music and video of musical performance over the web. However, the\ + \ goal of playing rhythmically synchronized music over long distances has remained\ + \ elusive due to the latency inherent in networked communication. The farther\ + \ apart two artists are from one another, the greater the delay. Furthermore,\ + \ latency times can change abruptly with no warning. In this paper, we demonstrate\ + \ that it is possible to create a distributed, synchronized musical instrument\ + \ that allows performers to play together over long distances, despite latency.\ + \ We describe one such instrument, MalLo, which combats latency by predicting\ + \ a musician's action before it is completed. MalLo sends information about a\ + \ predicted musical note over the Internet before it is played, and synthesizes\ + \ this note at a collaborator's location at nearly the same moment it is played\ + \ by the performer. MalLo also protects against latency spikes by sending the\ + \ prediction data across multiple network paths, with the intention of routing\ + \ around latency. },\n address = {Baton Rouge, Louisiana, USA},\n author = {Zeyu\ + \ Jin and Reid Oda and Adam Finkelstein and Rebecca Fiebrink},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178447},\n issn = {2220-4806},\n keywords = {Gesture,\ - \ Mapping, Articulatory, Speech, Singing, Synthesis},\n publisher = {University\ - \ of Michigan},\n title = {Investigation of Gesture Controlled Articulatory Vocal\ - \ Synthesizer using a Bio-Mechanical Mapping Layer},\n url = {http://www.nime.org/proceedings/2012/nime2012_291.pdf},\n\ - \ year = {2012}\n}\n" + \ doi = {10.5281/zenodo.1179102},\n editor = {Edgar Berdahl and Jesse Allison},\n\ + \ issn = {2220-4806},\n month = {May},\n pages = {293--298},\n publisher = {Louisiana\ + \ State University},\n title = {MalLo: A Distributed Synchronized Musical Instrument\ + \ Designed For Internet Performance},\n url = {http://www.nime.org/proceedings/2015/nime2015_223.pdf},\n\ + \ urlsuppl1 = {http://www.nime.org/proceedings/2015/223/0223-file1.mp4},\n year\ + \ = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178447 + doi: 10.5281/zenodo.1179102 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Gesture, Mapping, Articulatory, Speech, Singing, Synthesis' - publisher: University of Michigan - title: Investigation of Gesture Controlled Articulatory Vocal Synthesizer using - a Bio-Mechanical Mapping Layer - url: http://www.nime.org/proceedings/2012/nime2012_291.pdf - year: 2012 + month: May + pages: 293--298 + publisher: Louisiana State University + title: 'MalLo: A Distributed Synchronized Musical Instrument Designed For Internet + Performance' + url: http://www.nime.org/proceedings/2015/nime2015_223.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/223/0223-file1.mp4 + year: 2015 - ENTRYTYPE: inproceedings - ID: Carey2012 - abstract: 'This paper presents the author''s derivations system, an interactive - performance system for solo improvising instrumentalist. The system makes use - of a combination of real-time audio analysis, live sampling and spectral re-synthesis - to build a vocabulary of possible performative responses to live instrumental - input throughout an improvisatory performance. A form of timbral matching is employed - to form a link between the live performer and an expanding database of musical - materials. In addition, the system takes into account the unique nature of the - rehearsal/practice space in musical performance through the implementation of - performer-configurable cumulative rehearsal databases into the final design. This - paper discusses the system in detail with reference to related work in the field, - making specific reference to the system''s interactive potential both inside and - outside of a real-time performance context.' - address: 'Ann Arbor, Michigan' - author: Benjamin Carey - bibtex: "@inproceedings{Carey2012,\n abstract = {This paper presents the author's\ - \ derivations system, an interactive performance system for solo improvising instrumentalist.\ - \ The system makes use of a combination of real-time audio analysis, live sampling\ - \ and spectral re-synthesis to build a vocabulary of possible performative responses\ - \ to live instrumental input throughout an improvisatory performance. A form of\ - \ timbral matching is employed to form a link between the live performer and an\ - \ expanding database of musical materials. In addition, the system takes into\ - \ account the unique nature of the rehearsal/practice space in musical performance\ - \ through the implementation of performer-configurable cumulative rehearsal databases\ - \ into the final design. This paper discusses the system in detail with reference\ - \ to related work in the field, making specific reference to the system's interactive\ - \ potential both inside and outside of a real-time performance context.},\n address\ - \ = {Ann Arbor, Michigan},\n author = {Benjamin Carey},\n booktitle = {Proceedings\ + ID: lhayes2015 + abstract: 'Live music making can be understood as an enactive process, whereby musical + experiences are created through human action. This suggests that musical worlds + coevolve with their agents through repeated sensorimotor interactions with the + environment (where the music is being created), and at the same time cannot be + separated from their sociocultural contexts. This paper investigates this claim + by exploring ways in which technology, physiology, and context are bound up within + two different musical scenarios: live electronic musical performance; and person-centred + arts applications of NIMEs. In this paper I outline an ethnographic and phenomenological + enquiry into my experiences as both a performer of live electronic and electro-instrumental + music, as well as my extensive background in working with new technologies in + various therapeutic and person-centred artistic situations. This is in order to + explore the sociocultural and technological contexts in which these activities + take place. I propose that by understanding creative musical participation as + a highly contextualised practice, we may discover that the greatest impact of + rapidly developing technological resources is their ability to afford richly diverse, + personalised, and embodied forms of music making. I argue that this is applicable + over a wide range of musical communities.' + address: 'Baton Rouge, Louisiana, USA' + author: Lauren Hayes + bibtex: "@inproceedings{lhayes2015,\n abstract = {Live music making can be understood\ + \ as an enactive process, whereby musical experiences are created through human\ + \ action. This suggests that musical worlds coevolve with their agents through\ + \ repeated sensorimotor interactions with the environment (where the music is\ + \ being created), and at the same time cannot be separated from their sociocultural\ + \ contexts. This paper investigates this claim by exploring ways in which technology,\ + \ physiology, and context are bound up within two different musical scenarios:\ + \ live electronic musical performance; and person-centred arts applications of\ + \ NIMEs. In this paper I outline an ethnographic and phenomenological enquiry\ + \ into my experiences as both a performer of live electronic and electro-instrumental\ + \ music, as well as my extensive background in working with new technologies in\ + \ various therapeutic and person-centred artistic situations. This is in order\ + \ to explore the sociocultural and technological contexts in which these activities\ + \ take place. I propose that by understanding creative musical participation as\ + \ a highly contextualised practice, we may discover that the greatest impact of\ + \ rapidly developing technological resources is their ability to afford richly\ + \ diverse, personalised, and embodied forms of music making. I argue that this\ + \ is applicable over a wide range of musical communities.},\n address = {Baton\ + \ Rouge, Louisiana, USA},\n author = {Lauren Hayes},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178227},\n issn = {2220-4806},\n keywords = {Interactivity,\ - \ performance systems, improvisation},\n publisher = {University of Michigan},\n\ - \ title = {Designing for Cumulative Interactivity: The {\\_}derivations System},\n\ - \ url = {http://www.nime.org/proceedings/2012/nime2012_292.pdf},\n year = {2012}\n\ + \ doi = {10.5281/zenodo.1179082},\n editor = {Edgar Berdahl and Jesse Allison},\n\ + \ issn = {2220-4806},\n month = {May},\n pages = {299--302},\n publisher = {Louisiana\ + \ State University},\n title = {Enacting Musical Worlds: Common Approaches to\ + \ using NIMEs within both Performance and Person-Centred Arts Practices},\n url\ + \ = {http://www.nime.org/proceedings/2015/nime2015_227.pdf},\n year = {2015}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178227 + doi: 10.5281/zenodo.1179082 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Interactivity, performance systems, improvisation' - publisher: University of Michigan - title: 'Designing for Cumulative Interactivity: The _derivations System' - url: http://www.nime.org/proceedings/2012/nime2012_292.pdf - year: 2012 + month: May + pages: 299--302 + publisher: Louisiana State University + title: 'Enacting Musical Worlds: Common Approaches to using NIMEs within both Performance + and Person-Centred Arts Practices' + url: http://www.nime.org/proceedings/2015/nime2015_227.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Mayton2012 - abstract: 'We present Patchwerk, a networked synthesizer module with tightly coupled - web browser and tangible interfaces. Patchwerk connects to a pre-existing modular - synthesizer using the emerging cross-platform HTML5 WebSocket standard to enable - low-latency, high-bandwidth, concurrent control of analog signals by multiple - users. Online users control physical outputs on a custom-designed cabinet that - reflects their activity through a combination of motorized knobs and LEDs, and - streams the resultant audio. In a typical installation, a composer creates a complex - physical patch on the modular synth that exposes a set of analog and digital parameters - (knobs, buttons, toggles, and triggers) to the web-enabled cabinet. Both physically - present and online audiences can control those parameters, simultane-ously seeing - and hearing the results of each other''s actions. By enabling collaborative interaction - with a massive analog synthesizer, Patchwerk brings a broad audience closer to - a rare and historically important instrument. Patchwerk is available online at - http://synth.media.mit.edu.' - address: 'Ann Arbor, Michigan' - author: Brian Mayton and Gershon Dublon and Nicholas Joliat and Joseph A. Paradiso - bibtex: "@inproceedings{Mayton2012,\n abstract = {We present Patchwerk, a networked\ - \ synthesizer module with tightly coupled web browser and tangible interfaces.\ - \ Patchwerk connects to a pre-existing modular synthesizer using the emerging\ - \ cross-platform HTML5 WebSocket standard to enable low-latency, high-bandwidth,\ - \ concurrent control of analog signals by multiple users. Online users control\ - \ physical outputs on a custom-designed cabinet that reflects their activity through\ - \ a combination of motorized knobs and LEDs, and streams the resultant audio.\ - \ In a typical installation, a composer creates a complex physical patch on the\ - \ modular synth that exposes a set of analog and digital parameters (knobs, buttons,\ - \ toggles, and triggers) to the web-enabled cabinet. Both physically present and\ - \ online audiences can control those parameters, simultane-ously seeing and hearing\ - \ the results of each other's actions. By enabling collaborative interaction with\ - \ a massive analog synthesizer, Patchwerk brings a broad audience closer to a\ - \ rare and historically important instrument. Patchwerk is available online at\ - \ http://synth.media.mit.edu.},\n address = {Ann Arbor, Michigan},\n author =\ - \ {Brian Mayton and Gershon Dublon and Nicholas Joliat and Joseph A. Paradiso},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178345},\n issn = {2220-4806},\n\ - \ keywords = {Modular synthesizer, HTML5, tangible interface, collaborative musical\ - \ instrument},\n publisher = {University of Michigan},\n title = {Patchwork: Multi-User\ - \ Network Control of a Massive Modular Synthesizer},\n url = {http://www.nime.org/proceedings/2012/nime2012_293.pdf},\n\ - \ year = {2012}\n}\n" + ID: ncorreia2015 + abstract: 'We present a user-centered approach for prototyping tools for performance + with procedural sound and graphics, based on a hackathon. We also present the + resulting prototypes. These prototypes respond to a challenge originating from + earlier stages of the research: to combine ease-of-use with expressiveness and + visibility of interaction in tools for audiovisual performance. We aimed to convert + sketches, resulting from an earlier brainstorming session, into functional prototypes + in a short period of time. The outcomes include open-source software base released + online. The conclusions reflect on the methodology adopted and the effectiveness + of the prototypes.' + address: 'Baton Rouge, Louisiana, USA' + author: Nuno N. Correia and Atau Tanaka + bibtex: "@inproceedings{ncorreia2015,\n abstract = {We present a user-centered approach\ + \ for prototyping tools for performance with procedural sound and graphics, based\ + \ on a hackathon. We also present the resulting prototypes. These prototypes respond\ + \ to a challenge originating from earlier stages of the research: to combine ease-of-use\ + \ with expressiveness and visibility of interaction in tools for audiovisual performance.\ + \ We aimed to convert sketches, resulting from an earlier brainstorming session,\ + \ into functional prototypes in a short period of time. The outcomes include open-source\ + \ software base released online. The conclusions reflect on the methodology adopted\ + \ and the effectiveness of the prototypes.},\n address = {Baton Rouge, Louisiana,\ + \ USA},\n author = {{Nuno N.} Correia and Atau Tanaka},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1179044},\n editor = {Edgar Berdahl and Jesse Allison},\n\ + \ issn = {2220-4806},\n month = {May},\n pages = {319--321},\n publisher = {Louisiana\ + \ State University},\n title = {Prototyping Audiovisual Performance Tools: A Hackathon\ + \ Approach},\n url = {http://www.nime.org/proceedings/2015/nime2015_230.pdf},\n\ + \ urlsuppl1 = {http://www.nime.org/proceedings/2015/230/0230-file1.mp4},\n year\ + \ = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178345 + doi: 10.5281/zenodo.1179044 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Modular synthesizer, HTML5, tangible interface, collaborative musical - instrument' - publisher: University of Michigan - title: 'Patchwork: Multi-User Network Control of a Massive Modular Synthesizer' - url: http://www.nime.org/proceedings/2012/nime2012_293.pdf - year: 2012 + month: May + pages: 319--321 + publisher: Louisiana State University + title: 'Prototyping Audiovisual Performance Tools: A Hackathon Approach' + url: http://www.nime.org/proceedings/2015/nime2015_230.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/230/0230-file1.mp4 + year: 2015 - ENTRYTYPE: inproceedings - ID: Trail2012a - abstract: 'Hyper-instruments extend traditional acoustic instruments with sensing - technologies that capture digitally subtle and sophisticated aspects of human - performance. They leverage the long training and skills of performers while simultaneously - providing rich possibilities for digital control. Many existing hyper-instruments - suffer from being one of a kind instruments that require invasive modifications - to the underlying acoustic instrument. In this paper we focus on the pitched percussion - family and describe a non-invasive sensing approach for extending them to hyper-instruments. - Our primary concern is to retain the technical integrity of the acoustic instrument - and sound production methods while being able to intuitively interface the computer. - This is accomplished by utilizing the Kinect sensor to track the position of the - mallets without any modification to the instrument which enables easy and cheap - replication of the pro-posed hyper-instrument extensions. In addition we describe - two approaches to higher-level gesture control that remove the need for additional - control devices such as foot pedals and fader boxes that are frequently used in - electro-acoustic performance. This gesture control integrates more organically - with the natural flow of playing the instrument providing user selectable control - over filter parameters, synthesis, sampling, sequencing, and improvisation using - a commer-cially available low-cost sensing apparatus.' - address: 'Ann Arbor, Michigan' - author: Shawn Trail and Michael Dean and Gabrielle Odowichuk and Tiago Fernandes - Tavares and Peter Driessen and W. Andrew Schloss and George Tzanetakis - bibtex: "@inproceedings{Trail2012a,\n abstract = {Hyper-instruments extend traditional\ - \ acoustic instruments with sensing technologies that capture digitally subtle\ - \ and sophisticated aspects of human performance. They leverage the long training\ - \ and skills of performers while simultaneously providing rich possibilities for\ - \ digital control. Many existing hyper-instruments suffer from being one of a\ - \ kind instruments that require invasive modifications to the underlying acoustic\ - \ instrument. In this paper we focus on the pitched percussion family and describe\ - \ a non-invasive sensing approach for extending them to hyper-instruments. Our\ - \ primary concern is to retain the technical integrity of the acoustic instrument\ - \ and sound production methods while being able to intuitively interface the computer.\ - \ This is accomplished by utilizing the Kinect sensor to track the position of\ - \ the mallets without any modification to the instrument which enables easy and\ - \ cheap replication of the pro-posed hyper-instrument extensions. In addition\ - \ we describe two approaches to higher-level gesture control that remove the need\ - \ for additional control devices such as foot pedals and fader boxes that are\ - \ frequently used in electro-acoustic performance. This gesture control integrates\ - \ more organically with the natural flow of playing the instrument providing user\ - \ selectable control over filter parameters, synthesis, sampling, sequencing,\ - \ and improvisation using a commer-cially available low-cost sensing apparatus.},\n\ - \ address = {Ann Arbor, Michigan},\n author = {Shawn Trail and Michael Dean and\ - \ Gabrielle Odowichuk and Tiago Fernandes Tavares and Peter Driessen and W. Andrew\ - \ Schloss and George Tzanetakis},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178435},\n\ - \ issn = {2220-4806},\n publisher = {University of Michigan},\n title = {Non-invasive\ - \ sensing and gesture control for pitched percussion hyper-instruments using the\ - \ Kinect},\n url = {http://www.nime.org/proceedings/2012/nime2012_297.pdf},\n\ - \ year = {2012}\n}\n" + ID: pbennett2015 + abstract: 'Resonant Bits proposes giving digital information resonant dynamic properties, + requiring skill and concerted effort for interaction. This paper applies resonant + interaction to musical control, exploring musical instruments that are controlled + through both purposeful and subconscious resonance. We detail three exploratory + prototypes, the first two illustrating the use of resonant gestures and the third + focusing on the detection and use of the ideomotor (subconscious micro-movement) + effect.' + address: 'Baton Rouge, Louisiana, USA' + author: Peter Bennett and Jarrod Knibbe and Florent Berthaut and Kirsten Cater + bibtex: "@inproceedings{pbennett2015,\n abstract = {Resonant Bits proposes giving\ + \ digital information resonant dynamic properties, requiring skill and concerted\ + \ effort for interaction. This paper applies resonant interaction to musical control,\ + \ exploring musical instruments that are controlled through both purposeful and\ + \ subconscious resonance. We detail three exploratory prototypes, the first two\ + \ illustrating the use of resonant gestures and the third focusing on the detection\ + \ and use of the ideomotor (subconscious micro-movement) effect.},\n address =\ + \ {Baton Rouge, Louisiana, USA},\n author = {Peter Bennett and Jarrod Knibbe and\ + \ Florent Berthaut and Kirsten Cater},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179020},\n\ + \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ + \ {May},\n pages = {176--177},\n publisher = {Louisiana State University},\n title\ + \ = {Resonant Bits: Controlling Digital Musical Instruments with Resonance and\ + \ the Ideomotor Effect},\n url = {http://www.nime.org/proceedings/2015/nime2015_235.pdf},\n\ + \ urlsuppl1 = {http://www.nime.org/proceedings/2015/235/0235-file1.mp4},\n year\ + \ = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178435 + doi: 10.5281/zenodo.1179020 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - publisher: University of Michigan - title: Non-invasive sensing and gesture control for pitched percussion hyper-instruments - using the Kinect - url: http://www.nime.org/proceedings/2012/nime2012_297.pdf - year: 2012 + month: May + pages: 176--177 + publisher: Louisiana State University + title: 'Resonant Bits: Controlling Digital Musical Instruments with Resonance and + the Ideomotor Effect' + url: http://www.nime.org/proceedings/2015/nime2015_235.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/235/0235-file1.mp4 + year: 2015 - ENTRYTYPE: inproceedings - ID: Fyfe2012 - abstract: 'Message mapping between control interfaces and sound engines is an important - task that could benefit from tools that streamline development. A new Open Sound - Control (OSC) namespace called Nexus Data Exchange Format (NDEF) streamlines message - mapping by offering developers the ability to manage sound engines as network - nodes and to query those nodes for the messages in their OSC address spaces. By - using NDEF, developers will have an eas-ier time managing nodes and their messages, - especially for scenarios in which a single application or interface controls multiple - sound engines. NDEF is currently implemented in the JunctionBox interaction toolkit - but could easily be implemented in other toolkits.' - address: 'Ann Arbor, Michigan' - author: Lawrence Fyfe and Adam Tindale and Sheelagh Carpendale - bibtex: "@inproceedings{Fyfe2012,\n abstract = {Message mapping between control\ - \ interfaces and sound engines is an important task that could benefit from tools\ - \ that streamline development. A new Open Sound Control (OSC) namespace called\ - \ Nexus Data Exchange Format (NDEF) streamlines message mapping by offering developers\ - \ the ability to manage sound engines as network nodes and to query those nodes\ - \ for the messages in their OSC address spaces. By using NDEF, developers will\ - \ have an eas-ier time managing nodes and their messages, especially for scenarios\ - \ in which a single application or interface controls multiple sound engines.\ - \ NDEF is currently implemented in the JunctionBox interaction toolkit but could\ - \ easily be implemented in other toolkits.},\n address = {Ann Arbor, Michigan},\n\ - \ author = {Lawrence Fyfe and Adam Tindale and Sheelagh Carpendale},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1178259},\n issn = {2220-4806},\n keywords\ - \ = {OSC, namespace, interaction, node},\n publisher = {University of Michigan},\n\ - \ title = {Node and Message Management with the JunctionBox Interaction Toolkit},\n\ - \ url = {http://www.nime.org/proceedings/2012/nime2012_299.pdf},\n year = {2012}\n\ - }\n" + ID: adecarvalhojunior2015 + abstract: 'The position of a participant during an installation is a valuable data. + One may want to start some sample when someone cross a line or stop the music + automatically whenever there is nobody inside the main area. GPS is a good solution + for localization, but it usually loses its capabilities inside buildings. This + paper discuss the use of Wi-Fi signal strength during an installation as an alternative + to GPS.' + address: 'Baton Rouge, Louisiana, USA' + author: 'de Carvalho Junior, Antonio Deusany' + bibtex: "@inproceedings{adecarvalhojunior2015,\n abstract = {The position of a participant\ + \ during an installation is a valuable data. One may want to start some sample\ + \ when someone cross a line or stop the music automatically whenever there is\ + \ nobody inside the main area. GPS is a good solution for localization, but it\ + \ usually loses its capabilities inside buildings. This paper discuss the use\ + \ of Wi-Fi signal strength during an installation as an alternative to GPS.},\n\ + \ address = {Baton Rouge, Louisiana, USA},\n author = {de Carvalho Junior, Antonio\ + \ Deusany},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179052},\n editor\ + \ = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n pages = {40--41},\n\ + \ publisher = {Louisiana State University},\n title = {Indoor localization during\ + \ installations using {WiFi}},\n url = {http://www.nime.org/proceedings/2015/nime2015_237.pdf},\n\ + \ year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178259 + doi: 10.5281/zenodo.1179052 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'OSC, namespace, interaction, node' - publisher: University of Michigan - title: Node and Message Management with the JunctionBox Interaction Toolkit - url: http://www.nime.org/proceedings/2012/nime2012_299.pdf - year: 2012 + pages: 40--41 + publisher: Louisiana State University + title: Indoor localization during installations using WiFi + url: http://www.nime.org/proceedings/2015/nime2015_237.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Castet2012 - abstract: 'This paper presents ongoing work on methods dedicated torelations between - composers and performers in the contextof experimental music. The computer music - community hasover the last decade paid a strong interest on various kindsof gestural - interfaces to control sound synthesis processes.The mapping between gesture and - sound parameters hasspecially been investigated in order to design the most relevant - schemes of sonic interaction. In fact, this relevanceresults in an aesthetic choice - that encroaches on the process of composition. This work proposes to examine therelations - between composers and performers in the contextof the new interfaces for musical - expression. It aims to define a theoretical and methodological framework clarifyingthese - relations. In this project, this paper is the first experimental study about the - use of physical models as gesturalmaps for the production of textural sounds.' - address: 'Ann Arbor, Michigan' - author: Julien Castet - bibtex: "@inproceedings{Castet2012,\n abstract = {This paper presents ongoing work\ - \ on methods dedicated torelations between composers and performers in the contextof\ - \ experimental music. The computer music community hasover the last decade paid\ - \ a strong interest on various kindsof gestural interfaces to control sound synthesis\ - \ processes.The mapping between gesture and sound parameters hasspecially been\ - \ investigated in order to design the most relevant schemes of sonic interaction.\ - \ In fact, this relevanceresults in an aesthetic choice that encroaches on the\ - \ process of composition. This work proposes to examine therelations between composers\ - \ and performers in the contextof the new interfaces for musical expression. It\ - \ aims to define a theoretical and methodological framework clarifyingthese relations.\ - \ In this project, this paper is the first experimental study about the use of\ - \ physical models as gesturalmaps for the production of textural sounds.},\n address\ - \ = {Ann Arbor, Michigan},\n author = {Julien Castet},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178231},\n issn = {2220-4806},\n keywords = {Simulation,\ - \ Interaction, Sonic textures},\n publisher = {University of Michigan},\n title\ - \ = {Performing experimental music by physical simulation},\n url = {http://www.nime.org/proceedings/2012/nime2012_30.pdf},\n\ - \ year = {2012}\n}\n" + ID: cmartin2015 + abstract: We present and evaluate a novel interface for tracking ensemble performances + on touch-screens. The system uses a Random Forest classifier to extract touch-screen + gestures and transition matrix statistics. It analyses the resulting gesture-state + sequences across an ensemble of performers. A series of specially designed iPad + apps respond to this real-time analysis of free-form gestural performances with + calculated modifications to their musical interfaces. We describe our system and + evaluate it through cross-validation and profiling as well as concert experience. + address: 'Baton Rouge, Louisiana, USA' + author: Charles Martin and Henry Gardner and Ben Swift + bibtex: "@inproceedings{cmartin2015,\n abstract = {We present and evaluate a novel\ + \ interface for tracking ensemble performances on touch-screens. The system uses\ + \ a Random Forest classifier to extract touch-screen gestures and transition matrix\ + \ statistics. It analyses the resulting gesture-state sequences across an ensemble\ + \ of performers. A series of specially designed iPad apps respond to this real-time\ + \ analysis of free-form gestural performances with calculated modifications to\ + \ their musical interfaces. We describe our system and evaluate it through cross-validation\ + \ and profiling as well as concert experience.},\n address = {Baton Rouge, Louisiana,\ + \ USA},\n author = {Charles Martin and Henry Gardner and Ben Swift},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1179130},\n editor = {Edgar Berdahl and\ + \ Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {359--364},\n\ + \ publisher = {Louisiana State University},\n title = {Tracking Ensemble Performance\ + \ on Touch-Screens with Gesture Classification and Transition Matrices},\n url\ + \ = {http://www.nime.org/proceedings/2015/nime2015_242.pdf},\n year = {2015}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178231 + doi: 10.5281/zenodo.1179130 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Simulation, Interaction, Sonic textures' - publisher: University of Michigan - title: Performing experimental music by physical simulation - url: http://www.nime.org/proceedings/2012/nime2012_30.pdf - year: 2012 + month: May + pages: 359--364 + publisher: Louisiana State University + title: Tracking Ensemble Performance on Touch-Screens with Gesture Classification + and Transition Matrices + url: http://www.nime.org/proceedings/2015/nime2015_242.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Allison2012 - abstract: "Aural -of or relateing to the ear or hearing\nAura -an invisible breath,\ - \ emanation, or radiation AR -Augmented Reality\nAuRal is an environmental audio\ - \ system in which individual participants form ad hoc ensembles based on geolocation\ - \ and affect the overall sound of the music associated with the location that\ - \ they are in.\nThe AuRal environment binds physical location and the choices\ - \ of multiple, simultaneous performers to act as the generative force of music\ - \ tied to the region. Through a mobile device interface, musical participants,\ - \ or agents, have a degree of input into the generated music essentially defining\ - \ the sound of a given region. The audio landscape is superimposed onto the physical\ - \ one. The resultant musical experience is not tied simply to the passage of time,\ - \ but through the incorporation of participants over time and spatial proximity,\ - \ it becomes an aural location as much as a piece of music. As a result, walking\ - \ through the same location at different times results in unique collaborative\ - \ listening experiences." - address: 'Ann Arbor, Michigan' - author: Jesse Allison and Christian Dell - bibtex: "@inproceedings{Allison2012,\n abstract = {Aural -of or relateing to the\ - \ ear or hearing\nAura -an invisible breath, emanation, or radiation AR -Augmented\ - \ Reality\nAuRal is an environmental audio system in which individual participants\ - \ form ad hoc ensembles based on geolocation and affect the overall sound of the\ - \ music associated with the location that they are in.\nThe AuRal environment\ - \ binds physical location and the choices of multiple, simultaneous performers\ - \ to act as the generative force of music tied to the region. Through a mobile\ - \ device interface, musical participants, or agents, have a degree of input into\ - \ the generated music essentially defining the sound of a given region. The audio\ - \ landscape is superimposed onto the physical one. The resultant musical experience\ - \ is not tied simply to the passage of time, but through the incorporation of\ - \ participants over time and spatial proximity, it becomes an aural location as\ - \ much as a piece of music. As a result, walking through the same location at\ - \ different times results in unique collaborative listening experiences.},\n address\ - \ = {Ann Arbor, Michigan},\n author = {Jesse Allison and Christian Dell},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1178203},\n issn = {2220-4806},\n keywords\ - \ = {AuRal, sonic environment, distributed performance system, mobile music, android,\ - \ ruby on rails, supercollider},\n publisher = {University of Michigan},\n title\ - \ = {AuRal: A Mobile Interactive System for Geo-Locative Audio Synthesis},\n url\ - \ = {http://www.nime.org/proceedings/2012/nime2012_301.pdf},\n year = {2012}\n\ + ID: byuksel2015 + abstract: 'We present a novel brain-computer interface (BCI) integrated with a musical + instrument that adapts implicitly (with no extra effort from user) to users'' + changing cognitive state during musical improvisation. Most previous musical BCI + systems use either a mapping of brainwaves to create audio signals or use explicit + brain signals to control some aspect of the music. Such systems do not take advantage + of higher level semantically meaningful brain data which could be used in adaptive + systems or without detracting from the attention of the user. We present a new + type of real-time BCI that assists users in musical improvisation by adapting + to users'' measured cognitive workload implicitly. Our system advances the state + of the art in this area in three ways: 1) We demonstrate that cognitive workload + can be classified in real-time while users play the piano using functional near-infrared + spectroscopy. 2) We build a real-time, implicit system using this brain signal + that musically adapts to what users are playing. 3) We demonstrate that users + prefer this novel musical instrument over other conditions and report that they + feel more creative.' + address: 'Baton Rouge, Louisiana, USA' + author: Beste Filiz Yuksel and Daniel Afergan and Evan Peck and Garth Griffin and + Lane Harrison and Nick Chen and Remco Chang and Robert Jacob + bibtex: "@inproceedings{byuksel2015,\n abstract = {We present a novel brain-computer\ + \ interface (BCI) integrated with a musical instrument that adapts implicitly\ + \ (with no extra effort from user) to users' changing cognitive state during musical\ + \ improvisation. Most previous musical BCI systems use either a mapping of brainwaves\ + \ to create audio signals or use explicit brain signals to control some aspect\ + \ of the music. Such systems do not take advantage of higher level semantically\ + \ meaningful brain data which could be used in adaptive systems or without detracting\ + \ from the attention of the user. We present a new type of real-time BCI that\ + \ assists users in musical improvisation by adapting to users' measured cognitive\ + \ workload implicitly. Our system advances the state of the art in this area in\ + \ three ways: 1) We demonstrate that cognitive workload can be classified in real-time\ + \ while users play the piano using functional near-infrared spectroscopy. 2) We\ + \ build a real-time, implicit system using this brain signal that musically adapts\ + \ to what users are playing. 3) We demonstrate that users prefer this novel musical\ + \ instrument over other conditions and report that they feel more creative.},\n\ + \ address = {Baton Rouge, Louisiana, USA},\n author = {{Beste Filiz} Yuksel and\ + \ Daniel Afergan and Evan Peck and Garth Griffin and Lane Harrison and Nick Chen\ + \ and Remco Chang and Robert Jacob},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1181418},\n\ + \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ + \ {May},\n pages = {136--139},\n publisher = {Louisiana State University},\n title\ + \ = {BRAAHMS: A Novel Adaptive Musical Interface Based on Users' Cognitive State},\n\ + \ url = {http://www.nime.org/proceedings/2015/nime2015_243.pdf},\n urlsuppl1 =\ + \ {http://www.nime.org/proceedings/2015/243/0243-file1.mp4},\n year = {2015}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178203 + doi: 10.5281/zenodo.1181418 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'AuRal, sonic environment, distributed performance system, mobile music, - android, ruby on rails, supercollider' - publisher: University of Michigan - title: 'AuRal: A Mobile Interactive System for Geo-Locative Audio Synthesis' - url: http://www.nime.org/proceedings/2012/nime2012_301.pdf - year: 2012 + month: May + pages: 136--139 + publisher: Louisiana State University + title: 'BRAAHMS: A Novel Adaptive Musical Interface Based on Users'' Cognitive State' + url: http://www.nime.org/proceedings/2015/nime2015_243.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/243/0243-file1.mp4 + year: 2015 - ENTRYTYPE: inproceedings - ID: Roberts2012 - abstract: 'Designing mobile interfaces for computer-based musical performance is - generally a time-consuming task that can be exasperating for performers. Instead - of being able to experiment freely with physical interfaces'' affordances, performers - must spend time and attention on non-musical tasks including network configuration, - development environments for the mobile devices, defining OSC address spaces, - and handling the receipt of OSC in the environment that will control and produce - sound. Our research seeks to overcome such obstacles by minimizing the code needed - to both generate and read the output of interfaces on mobile devices. For iOS - and Android devices, our implementation extends the application Control to use - a simple set of OSC messages to define interfaces and automatically route output. - On the desktop, our implementations in Max/MSP/Jitter, LuaAV, and Su-perCollider - allow users to create mobile widgets mapped to sonic parameters with a single - line of code. We believe the fluidity of our approach will encourage users to - incorporate mobile devices into their everyday performance practice.' - address: 'Ann Arbor, Michigan' - author: Charles Roberts and Graham Wakefield and Matt Wright - bibtex: "@inproceedings{Roberts2012,\n abstract = {Designing mobile interfaces for\ - \ computer-based musical performance is generally a time-consuming task that can\ - \ be exasperating for performers. Instead of being able to experiment freely with\ - \ physical interfaces' affordances, performers must spend time and attention on\ - \ non-musical tasks including network configuration, development environments\ - \ for the mobile devices, defining OSC address spaces, and handling the receipt\ - \ of OSC in the environment that will control and produce sound. Our research\ - \ seeks to overcome such obstacles by minimizing the code needed to both generate\ - \ and read the output of interfaces on mobile devices. For iOS and Android devices,\ - \ our implementation extends the application Control to use a simple set of OSC\ - \ messages to define interfaces and automatically route output. On the desktop,\ - \ our implementations in Max/MSP/Jitter, LuaAV, and Su-perCollider allow users\ - \ to create mobile widgets mapped to sonic parameters with a single line of code.\ - \ We believe the fluidity of our approach will encourage users to incorporate\ - \ mobile devices into their everyday performance practice.},\n address = {Ann\ - \ Arbor, Michigan},\n author = {Charles Roberts and Graham Wakefield and Matt\ - \ Wright},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1180581},\n issn = {2220-4806},\n\ - \ keywords = {NIME, OSC, Zeroconf, iOS, Android, Max/MSP/Jitter, LuaAV, SuperCollider,\ - \ Mobile},\n publisher = {University of Michigan},\n title = {Mobile Controls\ - \ On-The-Fly: An Abstraction for Distributed {NIME}s},\n url = {http://www.nime.org/proceedings/2012/nime2012_303.pdf},\n\ - \ year = {2012}\n}\n" + ID: ahindle2015 + abstract: ' Cloud computing potentially ushers in a new era of computer music performance + with exceptionally large computer music instruments consisting of 10s to 100s + of virtual machines which we propose to call a `cloud-orchestra''. Cloud computing + allows for the rapid provisioning of resources, but to deploy such a complicated + and interconnected network of software synthesizers in the cloud requires a lot + of manual work, system administration knowledge, and developer/operator skills. + This is a barrier to computer musicians whose goal is to produce and perform music, + and not to administer 100s of computers. This work discusses the issues facing + cloud-orchestra deployment and offers an abstract solution and a concrete implementation. + The abstract solution is to generate cloud-orchestra deployment plans by allowing + computer musicians to model their network of synthesizers and to describe their + resources. A model optimizer will compute near-optimal deployment plans to synchronize, + deploy, and orchestrate the start-up of a complex network of synthesizers deployed + to many computers. This model driven development approach frees computer musicians + from much of the hassle of deployment and allocation. Computer musicians can focus + on the configuration of musical components and leave the resource allocation up + to the modelling software to optimize.' + address: 'Baton Rouge, Louisiana, USA' + author: Abram Hindle + bibtex: "@inproceedings{ahindle2015,\n abstract = { Cloud computing potentially\ + \ ushers in a new era of computer music performance with exceptionally large computer\ + \ music instruments consisting of 10s to 100s of virtual machines which we propose\ + \ to call a `cloud-orchestra'. Cloud computing allows for the rapid provisioning\ + \ of resources, but to deploy such a complicated and interconnected network of\ + \ software synthesizers in the cloud requires a lot of manual work, system administration\ + \ knowledge, and developer/operator skills. This is a barrier to computer musicians\ + \ whose goal is to produce and perform music, and not to administer 100s of computers.\ + \ This work discusses the issues facing cloud-orchestra deployment and offers\ + \ an abstract solution and a concrete implementation. The abstract solution is\ + \ to generate cloud-orchestra deployment plans by allowing computer musicians\ + \ to model their network of synthesizers and to describe their resources. A model\ + \ optimizer will compute near-optimal deployment plans to synchronize, deploy,\ + \ and orchestrate the start-up of a complex network of synthesizers deployed to\ + \ many computers. This model driven development approach frees computer musicians\ + \ from much of the hassle of deployment and allocation. Computer musicians can\ + \ focus on the configuration of musical components and leave the resource allocation\ + \ up to the modelling software to optimize.},\n address = {Baton Rouge, Louisiana,\ + \ USA},\n author = {Abram Hindle},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179090},\n\ + \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ + \ {May},\n pages = {121--125},\n publisher = {Louisiana State University},\n title\ + \ = {Orchestrating Your Cloud Orchestra},\n url = {http://www.nime.org/proceedings/2015/nime2015_244.pdf},\n\ + \ urlsuppl1 = {http://www.nime.org/proceedings/2015/244/0244-file1.mp4},\n year\ + \ = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1180581 + doi: 10.5281/zenodo.1179090 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'NIME, OSC, Zeroconf, iOS, Android, Max/MSP/Jitter, LuaAV, SuperCollider, - Mobile' - publisher: University of Michigan - title: 'Mobile Controls On-The-Fly: An Abstraction for Distributed NIMEs' - url: http://www.nime.org/proceedings/2012/nime2012_303.pdf - year: 2012 + month: May + pages: 121--125 + publisher: Louisiana State University + title: Orchestrating Your Cloud Orchestra + url: http://www.nime.org/proceedings/2015/nime2015_244.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/244/0244-file1.mp4 + year: 2015 - ENTRYTYPE: inproceedings - ID: Harriman2012 - abstract: 'This paper provides an overview of a new method for approaching beat - sequencing. As we have come to know them drum machines provide means to loop rhythmic - patterns over a certain interval. Usually with the option to specify different - beat divisions. What I developed and propose for consideration is a rethinking - of the traditional drum machine confines. The Sinkapater is an untethered beat - sequencer in that the beat division, and the loop length can be arbitrarily modified - for each track. The result is the capability to create complex syncopated patterns - which evolve over time as different tracks follow their own loop rate. To keep - cohesion all channels can be locked to a master channel forcing a loop to be an - integer number of "Master Beats". Further a visualization mode enables exploring - the patterns in another new way. Using synchronized OpenGL a 3-Dimensional environment - visualizes the beats as droplets falling from faucets of varying heights determined - by the loop length. Waves form in the bottom as beats splash into the virtual - "sink". By combining compelling visuals and a new approach to sequencing a new - way of exploring beats and experiencing music has been created.' - address: 'Ann Arbor, Michigan' - author: Jiffer Harriman - bibtex: "@inproceedings{Harriman2012,\n abstract = {This paper provides an overview\ - \ of a new method for approaching beat sequencing. As we have come to know them\ - \ drum machines provide means to loop rhythmic patterns over a certain interval.\ - \ Usually with the option to specify different beat divisions. What I developed\ - \ and propose for consideration is a rethinking of the traditional drum machine\ - \ confines. The Sinkapater is an untethered beat sequencer in that the beat division,\ - \ and the loop length can be arbitrarily modified for each track. The result is\ - \ the capability to create complex syncopated patterns which evolve over time\ - \ as different tracks follow their own loop rate. To keep cohesion all channels\ - \ can be locked to a master channel forcing a loop to be an integer number of\ - \ \"Master Beats\". Further a visualization mode enables exploring the patterns\ - \ in another new way. Using synchronized OpenGL a 3-Dimensional environment visualizes\ - \ the beats as droplets falling from faucets of varying heights determined by\ - \ the loop length. Waves form in the bottom as beats splash into the virtual \"\ - sink\". By combining compelling visuals and a new approach to sequencing a new\ - \ way of exploring beats and experiencing music has been created.},\n address\ - \ = {Ann Arbor, Michigan},\n author = {Jiffer Harriman},\n booktitle = {Proceedings\ + ID: apiepenbrink2015 + abstract: 'We present the Bistable Resonator Cymbal, a type of actuated acoustic + instrument which augments a conventional cymbal with feedback-induced resonance. + The system largely employs standard, commercially-available sound reinforcement + and signal processing hardware and software, and no permanent modifications to + the cymbal are needed. Several types of cymbals may be used, each capable of producing + a number of physical audio effects. Cymbal acoustics, implementation, stability + issues, interaction behavior, and sonic results are discussed.' + address: 'Baton Rouge, Louisiana, USA' + author: Andrew Piepenbrink and Matthew Wright + bibtex: "@inproceedings{apiepenbrink2015,\n abstract = {We present the Bistable\ + \ Resonator Cymbal, a type of actuated acoustic instrument which augments a conventional\ + \ cymbal with feedback-induced resonance. The system largely employs standard,\ + \ commercially-available sound reinforcement and signal processing hardware and\ + \ software, and no permanent modifications to the cymbal are needed. Several types\ + \ of cymbals may be used, each capable of producing a number of physical audio\ + \ effects. Cymbal acoustics, implementation, stability issues, interaction behavior,\ + \ and sonic results are discussed.},\n address = {Baton Rouge, Louisiana, USA},\n\ + \ author = {Andrew Piepenbrink and Matthew Wright},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178277},\n issn = {2220-4806},\n keywords = {NIME, proceedings,\ - \ drum machine, sequencer, visualization},\n publisher = {University of Michigan},\n\ - \ title = {Sinkapater -An Untethered Beat Sequencer},\n url = {http://www.nime.org/proceedings/2012/nime2012_308.pdf},\n\ - \ year = {2012}\n}\n" + \ doi = {10.5281/zenodo.1179154},\n editor = {Edgar Berdahl and Jesse Allison},\n\ + \ issn = {2220-4806},\n month = {May},\n pages = {227--230},\n publisher = {Louisiana\ + \ State University},\n title = {The Bistable Resonator Cymbal: An Actuated Acoustic\ + \ Instrument Displaying Physical Audio Effects},\n url = {http://www.nime.org/proceedings/2015/nime2015_245.pdf},\n\ + \ urlsuppl1 = {http://www.nime.org/proceedings/2015/245/0245-file1.mov},\n urlsuppl2\ + \ = {http://www.nime.org/proceedings/2015/245/0245-file2.zip},\n year = {2015}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178277 + doi: 10.5281/zenodo.1179154 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'NIME, proceedings, drum machine, sequencer, visualization' - publisher: University of Michigan - title: Sinkapater -An Untethered Beat Sequencer - url: http://www.nime.org/proceedings/2012/nime2012_308.pdf - year: 2012 + month: May + pages: 227--230 + publisher: Louisiana State University + title: 'The Bistable Resonator Cymbal: An Actuated Acoustic Instrument Displaying + Physical Audio Effects' + url: http://www.nime.org/proceedings/2015/nime2015_245.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/245/0245-file1.mov + urlsuppl2: http://www.nime.org/proceedings/2015/245/0245-file2.zip + year: 2015 - ENTRYTYPE: inproceedings - ID: Lee2012c - abstract: "This research aims to improve the correspondence between music and dance,\ - \ and explores the use of human respiration pattern for musical applications with\ - \ focus on the motional aspect of breathing. While respiration is frequently considered\ - \ as an indicator of the metabolic state of human body that contains meaningful\ - \ information for medicine or psychology, motional aspect of respiration has been\ - \ relatively unnoticed in spite of its strong correlation with muscles and the\ - \ brain.\nThis paper introduces an interactive system to control music playback\ - \ for dance performances based on the respiration pattern of the dancer. A wireless\ - \ wearable sensor device detects the dancer's respiration, which is then utilized\ - \ to modify the dynamic of music. Two different respiration-dynamic mappings were\ - \ designed and evaluated through public performances and private tests by professional\ - \ choreographers. Results from this research suggest a new conceptual approach\ - \ to musical applications of respiration based on the technical characteristics\ - \ of music and dance." - address: 'Ann Arbor, Michigan' - author: Jeong-seob Lee and Woon Seung Yeo - bibtex: "@inproceedings{Lee2012c,\n abstract = {This research aims to improve the\ - \ correspondence between music and dance, and explores the use of human respiration\ - \ pattern for musical applications with focus on the motional aspect of breathing.\ - \ While respiration is frequently considered as an indicator of the metabolic\ - \ state of human body that contains meaningful information for medicine or psychology,\ - \ motional aspect of respiration has been relatively unnoticed in spite of its\ - \ strong correlation with muscles and the brain.\nThis paper introduces an interactive\ - \ system to control music playback for dance performances based on the respiration\ - \ pattern of the dancer. A wireless wearable sensor device detects the dancer's\ - \ respiration, which is then utilized to modify the dynamic of music. Two different\ - \ respiration-dynamic mappings were designed and evaluated through public performances\ - \ and private tests by professional choreographers. Results from this research\ - \ suggest a new conceptual approach to musical applications of respiration based\ - \ on the technical characteristics of music and dance.},\n address = {Ann Arbor,\ - \ Michigan},\n author = {Jeong-seob Lee and Woon Seung Yeo},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178311},\n issn = {2220-4806},\n keywords = {Music, dance,\ - \ respiration, correspondence, wireless interface, interactive performance},\n\ - \ publisher = {University of Michigan},\n title = {Real-time Modification of Music\ - \ with Dancer's Respiration Pattern},\n url = {http://www.nime.org/proceedings/2012/nime2012_309.pdf},\n\ - \ year = {2012}\n}\n" + ID: abergsland2015 + abstract: 'The authors have developed a new hardware/software device for persons + with disabilities (the MotionComposer), and in the process created a number of + interactive dance pieces for non-disabled professional dancers. The paper briefly + describes the hardware and motion tracking software of the device before going + into more detail concerning the mapping strategies and sound design applied to + three interactive dance pieces. The paper concludes by discussing a particular + philosophy championing transparency and intuitiveness (clear causality) in the + interactive relationship, which the authors apply to both the device and to the + pieces that came from it.' + address: 'Baton Rouge, Louisiana, USA' + author: Andreas Bergsland and Robert Wechsler + bibtex: "@inproceedings{abergsland2015,\n abstract = {The authors have developed\ + \ a new hardware/software device for persons with disabilities (the MotionComposer),\ + \ and in the process created a number of interactive dance pieces for non-disabled\ + \ professional dancers. The paper briefly describes the hardware and motion tracking\ + \ software of the device before going into more detail concerning the mapping\ + \ strategies and sound design applied to three interactive dance pieces. The paper\ + \ concludes by discussing a particular philosophy championing transparency and\ + \ intuitiveness (clear causality) in the interactive relationship, which the authors\ + \ apply to both the device and to the pieces that came from it.},\n address =\ + \ {Baton Rouge, Louisiana, USA},\n author = {Andreas Bergsland and Robert Wechsler},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1179024},\n editor = {Edgar Berdahl\ + \ and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {20--23},\n\ + \ publisher = {Louisiana State University},\n title = {Composing Interactive Dance\ + \ Pieces for the MotionComposer, a device for Persons with Disabilities},\n url\ + \ = {http://www.nime.org/proceedings/2015/nime2015_246.pdf},\n urlsuppl1 = {http://www.nime.org/proceedings/2015/246/0246-file2.mp4},\n\ + \ urlsuppl2 = {http://www.nime.org/proceedings/2015/246/La_Danse_II.mp4},\n urlsuppl3\ + \ = {http://www.nime.org/proceedings/2015/246/SongShanMountain-SD.mp4},\n year\ + \ = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178311 + doi: 10.5281/zenodo.1179024 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Music, dance, respiration, correspondence, wireless interface, interactive - performance' - publisher: University of Michigan - title: Real-time Modification of Music with Dancer's Respiration Pattern - url: http://www.nime.org/proceedings/2012/nime2012_309.pdf - year: 2012 + month: May + pages: 20--23 + publisher: Louisiana State University + title: 'Composing Interactive Dance Pieces for the MotionComposer, a device for + Persons with Disabilities' + url: http://www.nime.org/proceedings/2015/nime2015_246.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/246/0246-file2.mp4 + urlsuppl2: http://www.nime.org/proceedings/2015/246/La_Danse_II.mp4 + urlsuppl3: http://www.nime.org/proceedings/2015/246/SongShanMountain-SD.mp4 + year: 2015 - ENTRYTYPE: inproceedings - ID: dAlessandro2012 - abstract: 'We present the integration of two musical interfaces into a new music-making - system that seeks to capture the expe-rience of a choir and bring it into the - mobile space. This system relies on three pervasive technologies that each support - a different part of the musical experience. First, the mobile device application - for performing with an artificial voice, called ChoirMob. Then, a central composing - and conducting application running on a local interactive display, called Vuzik. - Finally, a network protocol to synchronize the two. ChoirMob musicians can perform - music together at any location where they can connect to a Vuzik central conducting - device displaying a composed piece of music. We explored this system by creating - a chamber choir of ChoirMob performers, consisting of both experienced musicians - and novices, that performed in rehearsals and live concert scenarios with music - composed using the Vuzik interface.' - address: 'Ann Arbor, Michigan' - author: Nicolas d'Alessandro and Aura Pon and Johnty Wang and David Eagle and Ehud - Sharlin and Sidney Fels - bibtex: "@inproceedings{dAlessandro2012,\n abstract = {We present the integration\ - \ of two musical interfaces into a new music-making system that seeks to capture\ - \ the expe-rience of a choir and bring it into the mobile space. This system relies\ - \ on three pervasive technologies that each support a different part of the musical\ - \ experience. First, the mobile device application for performing with an artificial\ - \ voice, called ChoirMob. Then, a central composing and conducting application\ - \ running on a local interactive display, called Vuzik. Finally, a network protocol\ - \ to synchronize the two. ChoirMob musicians can perform music together at any\ - \ location where they can connect to a Vuzik central conducting device displaying\ - \ a composed piece of music. We explored this system by creating a chamber choir\ - \ of ChoirMob performers, consisting of both experienced musicians and novices,\ - \ that performed in rehearsals and live concert scenarios with music composed\ - \ using the Vuzik interface.},\n address = {Ann Arbor, Michigan},\n author = {Nicolas\ - \ d'Alessandro and Aura Pon and Johnty Wang and David Eagle and Ehud Sharlin and\ - \ Sidney Fels},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178199},\n\ - \ issn = {2220-4806},\n keywords = {singing synthesis, mobile music, interactive\ - \ display, interface design, OSC, ChoirMob, Vuzik, social music, choir},\n publisher\ - \ = {University of Michigan},\n title = {A Digital Mobile Choir: Joining Two Interfaces\ - \ towards Composing and Performing Collaborative Mobile Music},\n url = {http://www.nime.org/proceedings/2012/nime2012_310.pdf},\n\ - \ year = {2012}\n}\n" + ID: bmccloskey2015 + abstract: 'Inclusive music activities for people with physical disabilities commonly + emphasise facilitated processes, based both on constrained gestural capabilities, + and on the simplicity of the available interfaces. Inclusive music processes employ + consumer controllers, computer access tools and/or specialized digital musical + instruments (DMIs). The first category reveals a design ethos identified by the + authors as artefact multiplication -- many sliders, buttons, dials and menu layers; + the latter types offer ergonomic accessibility through artefact magnification. + We present a prototype DMI that eschews artefact multiplication in pursuit of + enhanced real time creative independence. We reconceptualise the universal click-drag + interaction model via a single sensor type, which affords both binary and continuous + performance control. Accessibility is optimized via a familiar interaction model + and through customized ergonomics, but it is the mapping strategy that emphasizes + transparency and sophistication in the hierarchical correspondences between the + available gesture dimensions and expressive musical cues. Through a participatory + and progressive methodology we identify an ostensibly simple targeting gesture + rich in dynamic and reliable features: (1) contact location; (2) contact duration; + (3) momentary force; (4) continuous force, and; (5) dyad orientation. These features + are mapped onto dynamic musical cues, most notably via new mappings for vibrato + and arpeggio execution. ' + address: 'Baton Rouge, Louisiana, USA' + author: Brendan McCloskey and Brian Bridges and Frank Lyons + bibtex: "@inproceedings{bmccloskey2015,\n abstract = {Inclusive music activities\ + \ for people with physical disabilities commonly emphasise facilitated processes,\ + \ based both on constrained gestural capabilities, and on the simplicity of the\ + \ available interfaces. Inclusive music processes employ consumer controllers,\ + \ computer access tools and/or specialized digital musical instruments (DMIs).\ + \ The first category reveals a design ethos identified by the authors as artefact\ + \ multiplication -- many sliders, buttons, dials and menu layers; the latter types\ + \ offer ergonomic accessibility through artefact magnification. We present a prototype\ + \ DMI that eschews artefact multiplication in pursuit of enhanced real time creative\ + \ independence. We reconceptualise the universal click-drag interaction model\ + \ via a single sensor type, which affords both binary and continuous performance\ + \ control. Accessibility is optimized via a familiar interaction model and through\ + \ customized ergonomics, but it is the mapping strategy that emphasizes transparency\ + \ and sophistication in the hierarchical correspondences between the available\ + \ gesture dimensions and expressive musical cues. Through a participatory and\ + \ progressive methodology we identify an ostensibly simple targeting gesture rich\ + \ in dynamic and reliable features: (1) contact location; (2) contact duration;\ + \ (3) momentary force; (4) continuous force, and; (5) dyad orientation. These\ + \ features are mapped onto dynamic musical cues, most notably via new mappings\ + \ for vibrato and arpeggio execution. },\n address = {Baton Rouge, Louisiana,\ + \ USA},\n author = {Brendan McCloskey and Brian Bridges and Frank Lyons},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1179132},\n editor = {Edgar Berdahl and\ + \ Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {24--27},\n\ + \ publisher = {Louisiana State University},\n title = {Accessibility and dimensionalty:\ + \ enhanced real-time creative independence for digital musicians with quadriplegic\ + \ cerebral palsy},\n url = {http://www.nime.org/proceedings/2015/nime2015_250.pdf},\n\ + \ urlsuppl1 = {http://www.nime.org/proceedings/2015/250/0250-file1.zip},\n year\ + \ = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178199 + doi: 10.5281/zenodo.1179132 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'singing synthesis, mobile music, interactive display, interface design, - OSC, ChoirMob, Vuzik, social music, choir' - publisher: University of Michigan - title: 'A Digital Mobile Choir: Joining Two Interfaces towards Composing and Performing - Collaborative Mobile Music' - url: http://www.nime.org/proceedings/2012/nime2012_310.pdf - year: 2012 + month: May + pages: 24--27 + publisher: Louisiana State University + title: 'Accessibility and dimensionalty: enhanced real-time creative independence + for digital musicians with quadriplegic cerebral palsy' + url: http://www.nime.org/proceedings/2015/nime2015_250.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/250/0250-file1.zip + year: 2015 - ENTRYTYPE: inproceedings - ID: Bukvic2012 - abstract: 'In the following paper we propose a new tiered granularity approach to - developing modules or abstractions in the Pd-L2Ork visual multimedia programming - environment with the specific goal of devising creative environments that scale - their educational scope and difficulty to encompass several stages within the - context of primary and secondary (K-12) education. As part of a preliminary study, - the team designed modules targeting 4th and 5th grade students, the primary focus - being exploration of creativity and collaborative learning. The resulting environment - infrastructure -coupled with the Boys & Girls Club of Southwest Virginia Satellite - Linux Laptop Orchestra -offers opportunities for students to design and build - original instruments, master them through a series of rehearsals, and ultimately - utilize them as part of an ensemble in a performance of a predetermined piece - whose parameters are coordinated by instructor through an embedded networked module. - The ensuing model will serve for the assessment and development of a stronger - connection with content-area standards and the development of creative thinking - and collaboration skills.' - address: 'Ann Arbor, Michigan' - author: Ivica Bukvic and Liesl Baum and Bennett Layman and Kendall Woodard - bibtex: "@inproceedings{Bukvic2012,\n abstract = {In the following paper we propose\ - \ a new tiered granularity approach to developing modules or abstractions in the\ - \ Pd-L2Ork visual multimedia programming environment with the specific goal of\ - \ devising creative environments that scale their educational scope and difficulty\ - \ to encompass several stages within the context of primary and secondary (K-12)\ - \ education. As part of a preliminary study, the team designed modules targeting\ - \ 4th and 5th grade students, the primary focus being exploration of creativity\ - \ and collaborative learning. The resulting environment infrastructure -coupled\ - \ with the Boys & Girls Club of Southwest Virginia Satellite Linux Laptop Orchestra\ - \ -offers opportunities for students to design and build original instruments,\ - \ master them through a series of rehearsals, and ultimately utilize them as part\ - \ of an ensemble in a performance of a predetermined piece whose parameters are\ - \ coordinated by instructor through an embedded networked module. The ensuing\ - \ model will serve for the assessment and development of a stronger connection\ - \ with content-area standards and the development of creative thinking and collaboration\ - \ skills.},\n address = {Ann Arbor, Michigan},\n author = {Ivica Bukvic and Liesl\ - \ Baum and Bennett Layman and Kendall Woodard},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1178223},\n issn = {2220-4806},\n keywords = {Granular, Learning\ - \ Objects, K-12, Education, L2Ork, PdL2Ork},\n publisher = {University of Michigan},\n\ - \ title = {Granular Learning Objects for Instrument Design and Collaborative Performance\ - \ in K-12 Education},\n url = {http://www.nime.org/proceedings/2012/nime2012_315.pdf},\n\ - \ year = {2012}\n}\n" + ID: anath2015 + abstract: 'In this paper the authors describe the VESBALL, which is a ball-shaped + musical interface designed for group music therapy. Therapy sessions take the + form of ``musical ensembles'''' comprised of individuals with Autism Spectrum + Disorder (ASD), typically led by one or more certified music therapists. VESBALL + had been developed in close consultation with therapists, clients, and other stakeholders, + and had undergone several phases of trials at a music therapy facility over a + period of 6 months. VESBALL has an advantage over other related work in terms + of its robustness, ease of operation and setup (for clients and therapists), sound + source integration, and low cost of production. The authors hope VESBALL would + positively impact the conditions of individuals with ASD, and pave way for new + research in custom-designed NIME for communities with specific therapeutic needs.' + address: 'Baton Rouge, Louisiana, USA' + author: Ajit Nath and Samson Young + bibtex: "@inproceedings{anath2015,\n abstract = {In this paper the authors describe\ + \ the VESBALL, which is a ball-shaped musical interface designed for group music\ + \ therapy. Therapy sessions take the form of ``musical ensembles'' comprised of\ + \ individuals with Autism Spectrum Disorder (ASD), typically led by one or more\ + \ certified music therapists. VESBALL had been developed in close consultation\ + \ with therapists, clients, and other stakeholders, and had undergone several\ + \ phases of trials at a music therapy facility over a period of 6 months. VESBALL\ + \ has an advantage over other related work in terms of its robustness, ease of\ + \ operation and setup (for clients and therapists), sound source integration,\ + \ and low cost of production. The authors hope VESBALL would positively impact\ + \ the conditions of individuals with ASD, and pave way for new research in custom-designed\ + \ NIME for communities with specific therapeutic needs.},\n address = {Baton Rouge,\ + \ Louisiana, USA},\n author = {Ajit Nath and Samson Young},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1179146},\n editor = {Edgar Berdahl and Jesse Allison},\n\ + \ issn = {2220-4806},\n month = {May},\n pages = {387--391},\n publisher = {Louisiana\ + \ State University},\n title = {VESBALL: A ball-shaped instrument for music therapy},\n\ + \ url = {http://www.nime.org/proceedings/2015/nime2015_252.pdf},\n year = {2015}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178223 + doi: 10.5281/zenodo.1179146 + editor: Edgar Berdahl and Jesse Allison + issn: 2220-4806 + month: May + pages: 387--391 + publisher: Louisiana State University + title: 'VESBALL: A ball-shaped instrument for music therapy' + url: http://www.nime.org/proceedings/2015/nime2015_252.pdf + year: 2015 + + +- ENTRYTYPE: inproceedings + ID: swaloschek2015 + abstract: 'With the rapid evolution of technology, sensor aided performances and + installations have gained popularity. We identified a number of important criteria + for stage usage and artistic experimentation. These are partially met by existing + approaches, oftentimes trading off programmability for ease of use. We propose + our new sensor interface SPINE-2 that presents a comprehensive solution to these + stage requirements without that trade-off.' + address: 'Baton Rouge, Louisiana, USA' + author: Simon Waloschek and Aristotelis Hadjakos + bibtex: "@inproceedings{swaloschek2015,\n abstract = {With the rapid evolution of\ + \ technology, sensor aided performances and installations have gained popularity.\ + \ We identified a number of important criteria for stage usage and artistic experimentation.\ + \ These are partially met by existing approaches, oftentimes trading off programmability\ + \ for ease of use. We propose our new sensor interface SPINE-2 that presents a\ + \ comprehensive solution to these stage requirements without that trade-off.},\n\ + \ address = {Baton Rouge, Louisiana, USA},\n author = {Simon Waloschek and Aristotelis\ + \ Hadjakos},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179194},\n editor\ + \ = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n\ + \ pages = {351--354},\n publisher = {Louisiana State University},\n title = {Sensors\ + \ on Stage: Conquering the Requirements of Artistic Experiments and Live Performances},\n\ + \ url = {http://www.nime.org/proceedings/2015/nime2015_254.pdf},\n year = {2015}\n\ + }\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1179194 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Granular, Learning Objects, K-12, Education, L2Ork, PdL2Ork' - publisher: University of Michigan - title: Granular Learning Objects for Instrument Design and Collaborative Performance - in K-12 Education - url: http://www.nime.org/proceedings/2012/nime2012_315.pdf - year: 2012 + month: May + pages: 351--354 + publisher: Louisiana State University + title: 'Sensors on Stage: Conquering the Requirements of Artistic Experiments and + Live Performances' + url: http://www.nime.org/proceedings/2015/nime2015_254.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Buschert2012 - abstract: 'Musician Maker is a system to allow novice players the opportunity to - create expressive improvisational music. While the system plays an accompaniment - background chord progression, each participant plays some kind of controller to - make music through the system. The program takes the signals from the controllers - and adjusts the pitches somewhat so that the players are limited to notes which - fit the chord progression. The various controllers are designed to be very easy - and intuitive so anyone can pick one up and quickly be able to play it. Since - the computer is making sure that wrong notes are avoided, even inexperienced players - can immediately make music and enjoy focusing on some of the more expressive elements - and thus become musicians.' - address: 'Ann Arbor, Michigan' - author: John Buschert - bibtex: "@inproceedings{Buschert2012,\n abstract = {Musician Maker is a system to\ - \ allow novice players the opportunity to create expressive improvisational music.\ - \ While the system plays an accompaniment background chord progression, each participant\ - \ plays some kind of controller to make music through the system. The program\ - \ takes the signals from the controllers and adjusts the pitches somewhat so that\ - \ the players are limited to notes which fit the chord progression. The various\ - \ controllers are designed to be very easy and intuitive so anyone can pick one\ - \ up and quickly be able to play it. Since the computer is making sure that wrong\ - \ notes are avoided, even inexperienced players can immediately make music and\ - \ enjoy focusing on some of the more expressive elements and thus become musicians.},\n\ - \ address = {Ann Arbor, Michigan},\n author = {John Buschert},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178225},\n issn = {2220-4806},\n keywords = {Musical\ - \ Instrument, Electronic, Computer Music, Novice, Controller},\n publisher = {University\ - \ of Michigan},\n title = {Musician Maker: Play expressive music without practice},\n\ - \ url = {http://www.nime.org/proceedings/2012/nime2012_36.pdf},\n year = {2012}\n\ - }\n" + ID: amcpherson2015 + abstract: 'The implementation of digital musical instruments is often opaque to + the performer. Even when the relationship between action and sound is readily + understandable, the internal hardware or software operations that create that + relationship may be inaccessible to scrutiny or modification. This paper presents + a new approach to digital instrument design which lets the performer alter and + subvert the instrument''s internal operation through circuit-bending techniques. + The approach uses low-latency feedback loops between software and analog hardware + to expose the internal working of the instrument. Compared to the standard control + voltage approach used on analog synths, alterations to the feedback loops produce + distinctive and less predictable changes in behaviour with original artistic applications. + This paper discusses the technical foundations of the approach, its roots in hacking + and circuit bending, and case studies of its use in live performance with the + D-Box hackable instrument.' + address: 'Baton Rouge, Louisiana, USA' + author: Andrew McPherson and Victor Zappi + bibtex: "@inproceedings{amcpherson2015,\n abstract = {The implementation of digital\ + \ musical instruments is often opaque to the performer. Even when the relationship\ + \ between action and sound is readily understandable, the internal hardware or\ + \ software operations that create that relationship may be inaccessible to scrutiny\ + \ or modification. This paper presents a new approach to digital instrument design\ + \ which lets the performer alter and subvert the instrument's internal operation\ + \ through circuit-bending techniques. The approach uses low-latency feedback loops\ + \ between software and analog hardware to expose the internal working of the instrument.\ + \ Compared to the standard control voltage approach used on analog synths, alterations\ + \ to the feedback loops produce distinctive and less predictable changes in behaviour\ + \ with original artistic applications. This paper discusses the technical foundations\ + \ of the approach, its roots in hacking and circuit bending, and case studies\ + \ of its use in live performance with the D-Box hackable instrument.},\n address\ + \ = {Baton Rouge, Louisiana, USA},\n author = {Andrew McPherson and Victor Zappi},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1179134},\n editor = {Edgar Berdahl\ + \ and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {162--167},\n\ + \ publisher = {Louisiana State University},\n title = {Exposing the Scaffolding\ + \ of Digital Instruments with Hardware-Software Feedback Loops},\n url = {http://www.nime.org/proceedings/2015/nime2015_258.pdf},\n\ + \ year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178225 + doi: 10.5281/zenodo.1179134 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Musical Instrument, Electronic, Computer Music, Novice, Controller' - publisher: University of Michigan - title: 'Musician Maker: Play expressive music without practice' - url: http://www.nime.org/proceedings/2012/nime2012_36.pdf - year: 2012 + month: May + pages: 162--167 + publisher: Louisiana State University + title: Exposing the Scaffolding of Digital Instruments with Hardware-Software Feedback + Loops + url: http://www.nime.org/proceedings/2015/nime2015_258.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Giordano2012 - abstract: 'Force-feedback devices can provide haptic feedback duringinteraction - with physical models for sound synthesis. However, low-end devices may not always - provide high-fidelitydisplay of the acoustic characteristics of the model. This - article describes an enhanced handle for the Phantom Omnicontaining a vibration - actuator intended to display the highfrequency portion of the synthesized forces. - Measurementsare provided to show that this approach achieves a morefaithful representation - of the acoustic signal, overcominglimitations in the device control and dynamics.' - address: 'Ann Arbor, Michigan' - author: Marcello Giordano and Stephen Sinclair and Marcelo M. Wanderley - bibtex: "@inproceedings{Giordano2012,\n abstract = {Force-feedback devices can provide\ - \ haptic feedback duringinteraction with physical models for sound synthesis.\ - \ However, low-end devices may not always provide high-fidelitydisplay of the\ - \ acoustic characteristics of the model. This article describes an enhanced handle\ - \ for the Phantom Omnicontaining a vibration actuator intended to display the\ - \ highfrequency portion of the synthesized forces. Measurementsare provided to\ - \ show that this approach achieves a morefaithful representation of the acoustic\ - \ signal, overcominglimitations in the device control and dynamics.},\n address\ - \ = {Ann Arbor, Michigan},\n author = {Marcello Giordano and Stephen Sinclair\ - \ and Marcelo M. Wanderley},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178265},\n\ - \ issn = {2220-4806},\n keywords = {Haptics, force feedback, bowing, audio, interaction},\n\ - \ publisher = {University of Michigan},\n title = {Bowing a vibration-enhanced\ - \ force feedback device},\n url = {http://www.nime.org/proceedings/2012/nime2012_37.pdf},\n\ - \ year = {2012}\n}\n" + ID: arau2015 + abstract: 'We introduce the Peripipe, a tangible remote control for a music player + that comes in the shape of a wooden tobacco pipe. The design is based on breath + control, using sips and puffs as control commands. An atmospheric pressure sensor + in the Peripipe senses changes in the air pressure. Based on these changes, the + pipe determines when the user performs a puff, double-puff, sip, double-sip or + a long puff or long sip action, and wirelessly sends commands to a smartphone + running the music player. Additionally, the Peripipe provides fumeovisual feedback, + using color-illuminated smoke to display the system status. With the form factor, + the materials used, the interaction through breath, and the ephemeral feedback + we aim to emphasize the emotional component of listening to music that, in our + eyes, is not very well reflected in traditional remote controls.' + address: 'Baton Rouge, Louisiana, USA' + author: Tommy Feldt and Sarah Freilich and Shaun Mendonsa and Daniel Molin and Andreas + Rau + bibtex: "@inproceedings{arau2015,\n abstract = {We introduce the Peripipe, a tangible\ + \ remote control for a music player that comes in the shape of a wooden tobacco\ + \ pipe. The design is based on breath control, using sips and puffs as control\ + \ commands. An atmospheric pressure sensor in the Peripipe senses changes in the\ + \ air pressure. Based on these changes, the pipe determines when the user performs\ + \ a puff, double-puff, sip, double-sip or a long puff or long sip action, and\ + \ wirelessly sends commands to a smartphone running the music player. Additionally,\ + \ the Peripipe provides fumeovisual feedback, using color-illuminated smoke to\ + \ display the system status. With the form factor, the materials used, the interaction\ + \ through breath, and the ephemeral feedback we aim to emphasize the emotional\ + \ component of listening to music that, in our eyes, is not very well reflected\ + \ in traditional remote controls.},\n address = {Baton Rouge, Louisiana, USA},\n\ + \ author = {Tommy Feldt and Sarah Freilich and Shaun Mendonsa and Daniel Molin\ + \ and Andreas Rau},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179058},\n\ + \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ + \ {May},\n pages = {34--35},\n publisher = {Louisiana State University},\n title\ + \ = {Puff, Puff, Play: A Sip-And-Puff Remote Control for Music Playback},\n url\ + \ = {http://www.nime.org/proceedings/2015/nime2015_260.pdf},\n urlsuppl1 = {http://www.nime.org/proceedings/2015/260/0260-file1.mp4},\n\ + \ year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178265 + doi: 10.5281/zenodo.1179058 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Haptics, force feedback, bowing, audio, interaction' - publisher: University of Michigan - title: Bowing a vibration-enhanced force feedback device - url: http://www.nime.org/proceedings/2012/nime2012_37.pdf - year: 2012 + month: May + pages: 34--35 + publisher: Louisiana State University + title: 'Puff, Puff, Play: A Sip-And-Puff Remote Control for Music Playback' + url: http://www.nime.org/proceedings/2015/nime2015_260.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/260/0260-file1.mp4 + year: 2015 - ENTRYTYPE: inproceedings - ID: Parson2012 - abstract: 'With the advent of high resolution digital video projection and high - quality spatial sound systems in modern planetariums, the planetarium can become - the basis for a unique set of virtual musical instrument capabilities that go - well beyond packaged multimedia shows. The dome, circular speaker and circular - seating arrangements provide means for skilled composers and performers to create - a virtual reality in which attendees are immersed in the composite instrument. - This initial foray into designing an audio-visual computerbased instrument for - improvisational performance in a planetarium builds on prior, successful work - in mapping the rules and state of two-dimensional computer board games to improvised - computer music. The unique visual and audio geometries of the planetarium present - challenges and opportunities. The game tessellates the dome in mobile, colored - hexagons that emulate both atoms and musical scale intervals in an expanding universe. - Spatial activity in the game maps to spatial locale and instrument voices in the - speakers, in essence creating a virtual orchestra with a string section, percussion - section, etc. on the dome. Future work includes distribution of game play via - mobile devices to permit attendees to participate in a performance. This environment - is open-ended, with great educational and aesthetic potential.' - address: 'Ann Arbor, Michigan' - author: Dale Parson and Phillip Reed - bibtex: "@inproceedings{Parson2012,\n abstract = {With the advent of high resolution\ - \ digital video projection and high quality spatial sound systems in modern planetariums,\ - \ the planetarium can become the basis for a unique set of virtual musical instrument\ - \ capabilities that go well beyond packaged multimedia shows. The dome, circular\ - \ speaker and circular seating arrangements provide means for skilled composers\ - \ and performers to create a virtual reality in which attendees are immersed in\ - \ the composite instrument. This initial foray into designing an audio-visual\ - \ computerbased instrument for improvisational performance in a planetarium builds\ - \ on prior, successful work in mapping the rules and state of two-dimensional\ - \ computer board games to improvised computer music. The unique visual and audio\ - \ geometries of the planetarium present challenges and opportunities. The game\ - \ tessellates the dome in mobile, colored hexagons that emulate both atoms and\ - \ musical scale intervals in an expanding universe. Spatial activity in the game\ - \ maps to spatial locale and instrument voices in the speakers, in essence creating\ - \ a virtual orchestra with a string section, percussion section, etc. on the dome.\ - \ Future work includes distribution of game play via mobile devices to permit\ - \ attendees to participate in a performance. This environment is open-ended, with\ - \ great educational and aesthetic potential.},\n address = {Ann Arbor, Michigan},\n\ - \ author = {Dale Parson and Phillip Reed},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1180567},\n\ - \ issn = {2220-4806},\n keywords = {aleatory music, algorithmic improvisation,\ - \ computer game, planetarium},\n publisher = {University of Michigan},\n title\ - \ = {The Planetarium as a Musical Instrument},\n url = {http://www.nime.org/proceedings/2012/nime2012_47.pdf},\n\ - \ year = {2012}\n}\n" + ID: ndalessandro2015 + abstract: 'In this paper, we describe the prototyping of two musical interfaces + that use the LeapMotion camera in conjunction with two different touch surfaces: + a Wacom tablet and a transparent PVC sheet. In the Wacom use case, the camera + is between the hand and the surface. In the PVC use case, the camera is under + the transparent sheet and tracks the hand through it. The aim of this research + is to explore hovering motion surrounding the touch interaction on the surface + and include properties of such motion in the musical interaction. We present our + unifying software, called AirPiano, that discretises the 3D space into ''keys'' + and proposes several mapping strategies with the available dimensions. These control + dimensions are mapped onto a modified HandSketch sound engine that achieves multitimbral + pitch-synchronous point cloud granulation.' + address: 'Baton Rouge, Louisiana, USA' + author: Nicolas d'Alessandro and Joëlle Tilmanne and Ambroise Moreau and Antonin + Puleo + bibtex: "@inproceedings{ndalessandro2015,\n abstract = {In this paper, we describe\ + \ the prototyping of two musical interfaces that use the LeapMotion camera in\ + \ conjunction with two different touch surfaces: a Wacom tablet and a transparent\ + \ PVC sheet. In the Wacom use case, the camera is between the hand and the surface.\ + \ In the PVC use case, the camera is under the transparent sheet and tracks the\ + \ hand through it. The aim of this research is to explore hovering motion surrounding\ + \ the touch interaction on the surface and include properties of such motion in\ + \ the musical interaction. We present our unifying software, called AirPiano,\ + \ that discretises the 3D space into 'keys' and proposes several mapping strategies\ + \ with the available dimensions. These control dimensions are mapped onto a modified\ + \ HandSketch sound engine that achieves multitimbral pitch-synchronous point cloud\ + \ granulation.},\n address = {Baton Rouge, Louisiana, USA},\n author = {Nicolas\ + \ d'Alessandro and Jo\\''elle Tilmanne and Ambroise Moreau and Antonin Puleo},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1181434},\n editor = {Edgar Berdahl\ + \ and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {255--258},\n\ + \ publisher = {Louisiana State University},\n title = {AirPiano: A Multi-Touch\ + \ Keyboard with Hovering Control},\n url = {http://www.nime.org/proceedings/2015/nime2015_261.pdf},\n\ + \ year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1180567 + doi: 10.5281/zenodo.1181434 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'aleatory music, algorithmic improvisation, computer game, planetarium' - publisher: University of Michigan - title: The Planetarium as a Musical Instrument - url: http://www.nime.org/proceedings/2012/nime2012_47.pdf - year: 2012 + month: May + pages: 255--258 + publisher: Louisiana State University + title: 'AirPiano: A Multi-Touch Keyboard with Hovering Control' + url: http://www.nime.org/proceedings/2015/nime2015_261.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Chacin2012 - abstract: 'This paper is an in depth exploration of the fashion object and device, - the Play-A-Grill. It details inspirations, socio-cultural implications, technical - function and operation, and potential applications for the Play-A-Grill system.' - address: 'Ann Arbor, Michigan' - author: Aisen Caro Chacin - bibtex: "@inproceedings{Chacin2012,\n abstract = {This paper is an in depth exploration\ - \ of the fashion object and device, the Play-A-Grill. It details inspirations,\ - \ socio-cultural implications, technical function and operation, and potential\ - \ applications for the Play-A-Grill system.},\n address = {Ann Arbor, Michigan},\n\ - \ author = {Aisen Caro Chacin},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178233},\n\ - \ issn = {2220-4806},\n keywords = {Digital Music Players, Hip Hop, Rap, Music\ - \ Fashion, Grills, Mouth Jewelry, Mouth Controllers, and Bone Conduction Hearing.},\n\ - \ publisher = {University of Michigan},\n title = {Play-A-Grill: Music To Your\ - \ Teeth},\n url = {http://www.nime.org/proceedings/2012/nime2012_48.pdf},\n year\ - \ = {2012}\n}\n" + ID: ahazzardb2015 + abstract: 'We explore how to digitally augment musical instruments by connecting + them to their social histories. We describe the use of Internet of Things technologies + to connect an acoustic guitar to its digital footprint -- a record of how it was + designed, built and played. We introduce the approach of crafting interactive + decorative inlay into the body of an instrument that can then be scanned using + mobile devices to reveal its digital footprint. We describe the design and construction + of an augmented acoustic guitar called Carolan alongside activities to build its + digital footprint through documented encounters with twenty-seven players in a + variety of settings. We reveal the design challenge of mapping the different surfaces + of the instrument to various facets of its footprint so as to afford appropriate + experiences to players, audiences and technicians. We articulate an agenda for + further research on the topic of connecting instruments to their social histories, + including capturing and performing digital footprints and creating personalized + and legacy experiences.' + address: 'Baton Rouge, Louisiana, USA' + author: Steve Benford and Adrian Hazzard and Alan Chamberlain and Liming Xu + bibtex: "@inproceedings{ahazzardb2015,\n abstract = {We explore how to digitally\ + \ augment musical instruments by connecting them to their social histories. We\ + \ describe the use of Internet of Things technologies to connect an acoustic guitar\ + \ to its digital footprint -- a record of how it was designed, built and played.\ + \ We introduce the approach of crafting interactive decorative inlay into the\ + \ body of an instrument that can then be scanned using mobile devices to reveal\ + \ its digital footprint. We describe the design and construction of an augmented\ + \ acoustic guitar called Carolan alongside activities to build its digital footprint\ + \ through documented encounters with twenty-seven players in a variety of settings.\ + \ We reveal the design challenge of mapping the different surfaces of the instrument\ + \ to various facets of its footprint so as to afford appropriate experiences to\ + \ players, audiences and technicians. We articulate an agenda for further research\ + \ on the topic of connecting instruments to their social histories, including\ + \ capturing and performing digital footprints and creating personalized and legacy\ + \ experiences.},\n address = {Baton Rouge, Louisiana, USA},\n author = {Steve\ + \ Benford and Adrian Hazzard and Alan Chamberlain and Liming Xu},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1179016},\n editor = {Edgar Berdahl and\ + \ Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {303--306},\n\ + \ publisher = {Louisiana State University},\n title = {Augmenting a Guitar with\ + \ its Digital Footprint},\n url = {http://www.nime.org/proceedings/2015/nime2015_264.pdf},\n\ + \ year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178233 + doi: 10.5281/zenodo.1179016 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Digital Music Players, Hip Hop, Rap, Music Fashion, Grills, Mouth Jewelry, - Mouth Controllers, and Bone Conduction Hearing.' - publisher: University of Michigan - title: 'Play-A-Grill: Music To Your Teeth' - url: http://www.nime.org/proceedings/2012/nime2012_48.pdf - year: 2012 + month: May + pages: 303--306 + publisher: Louisiana State University + title: Augmenting a Guitar with its Digital Footprint + url: http://www.nime.org/proceedings/2015/nime2015_264.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: FASCIANI2012 - abstract: 'Sound generators and synthesis engines expose a large set of parameters, - allowing run-time timbre morphing and exploration of sonic space. However, control - over these high-dimensional interfaces is constrained by the physical limitations - of performers. In this paper we propose the exploitation of vocal gesture as an - extension or alternative to traditional physical controllers. The approach uses - dynamic aspects of vocal sound to control variations in the timbre of the synthesized - sound. The mapping from vocal to synthesis parameters is automatically adapted - to information extracted from vocal examples as well as to the relationship between - parameters and timbre within the synthesizer. The mapping strategy aims to maximize - the breadth of the explorable perceptual sonic space over a set of the synthesizer''s - real-valued parameters, indirectly driven by the voice-controlled interface.' - address: 'Ann Arbor, Michigan' - author: STEFANO FASCIANI and LONCE WYSE - bibtex: "@inproceedings{FASCIANI2012,\n abstract = {Sound generators and synthesis\ - \ engines expose a large set of parameters, allowing run-time timbre morphing\ - \ and exploration of sonic space. However, control over these high-dimensional\ - \ interfaces is constrained by the physical limitations of performers. In this\ - \ paper we propose the exploitation of vocal gesture as an extension or alternative\ - \ to traditional physical controllers. The approach uses dynamic aspects of vocal\ - \ sound to control variations in the timbre of the synthesized sound. The mapping\ - \ from vocal to synthesis parameters is automatically adapted to information extracted\ - \ from vocal examples as well as to the relationship between parameters and timbre\ - \ within the synthesizer. The mapping strategy aims to maximize the breadth of\ - \ the explorable perceptual sonic space over a set of the synthesizer's real-valued\ - \ parameters, indirectly driven by the voice-controlled interface.},\n address\ - \ = {Ann Arbor, Michigan},\n author = {STEFANO FASCIANI and LONCE WYSE},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1178251},\n issn = {2220-4806},\n keywords\ - \ = {Voice Control, Adaptive Interface, Automatic Mapping, Timbre Morphing, Sonic\ - \ Space Exploration},\n publisher = {University of Michigan},\n title = {A Voice\ - \ Interface for Sound Generators: adaptive and automatic mapping of gestures to\ - \ sound},\n url = {http://www.nime.org/proceedings/2012/nime2012_57.pdf},\n year\ - \ = {2012}\n}\n" + ID: skestelli2015 + abstract: 'There have been more interest and research towards multisensory aspects + of sound as well as vision and movement, especially in the last two decades. An + emerging research field related with multi-sensory research is ''motor imagery'', + which could be defined as the mental representation of a movement without actual + production of muscle activity necessary for its execution. Emphasizing its close + relationship and potential future use in new digital musical instruments (DMI) + practice and reviewing literature, this paper will introduce fundamental concepts + about motor imagery (MI), various methods of measuring MI in different configurations + and summarize some important findings about MI in various studies. Following, + it will discuss how this research area is related to DMI practice and propose + potential uses of MI in this field. ' + address: 'Baton Rouge, Louisiana, USA' + author: Sair Sinan Kestelli + bibtex: "@inproceedings{skestelli2015,\n abstract = {There have been more interest\ + \ and research towards multisensory aspects of sound as well as vision and movement,\ + \ especially in the last two decades. An emerging research field related with\ + \ multi-sensory research is 'motor imagery', which could be defined as the mental\ + \ representation of a movement without actual production of muscle activity necessary\ + \ for its execution. Emphasizing its close relationship and potential future use\ + \ in new digital musical instruments (DMI) practice and reviewing literature,\ + \ this paper will introduce fundamental concepts about motor imagery (MI), various\ + \ methods of measuring MI in different configurations and summarize some important\ + \ findings about MI in various studies. Following, it will discuss how this research\ + \ area is related to DMI practice and propose potential uses of MI in this field.\ + \ },\n address = {Baton Rouge, Louisiana, USA},\n author = {{Sair Sinan} Kestelli},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1179104},\n editor = {Edgar Berdahl\ + \ and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {107--110},\n\ + \ publisher = {Louisiana State University},\n title = {Motor Imagery: What Does\ + \ It Offer for New Digital Musical Instruments?},\n url = {http://www.nime.org/proceedings/2015/nime2015_265.pdf},\n\ + \ year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178251 + doi: 10.5281/zenodo.1179104 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Voice Control, Adaptive Interface, Automatic Mapping, Timbre Morphing, - Sonic Space Exploration' - publisher: University of Michigan - title: 'A Voice Interface for Sound Generators: adaptive and automatic mapping of - gestures to sound' - url: http://www.nime.org/proceedings/2012/nime2012_57.pdf - year: 2012 + month: May + pages: 107--110 + publisher: Louisiana State University + title: 'Motor Imagery: What Does It Offer for New Digital Musical Instruments?' + url: http://www.nime.org/proceedings/2015/nime2015_265.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Yuksel2012 - abstract: 'In this work, a comprehensive study is performed on the relationship - between audio, visual and emotion by applying the principles of cognitive emotion - theory into digital creation. The study is driven by an audiovisual emotion library - project that is named AVIEM, which provides an interactive interface for experimentation - and evaluation of the perception and creation processes of audiovisuals. AVIEM - primarily consists of separate audio and visual libraries and grows with user - contribution as users explore different combinations between them. The library - provides a wide range of experimentation possibilities by allowing users to create - audiovisual relations and logging their emotional responses through its interface. - Besides being a resourceful tool of experimentation, AVIEM aims to become a source - of inspiration, where digitally created abstract virtual environments and soundscapes - can elicit target emotions at a preconscious level, by building genuine audiovisual - relations that would engage the viewer on a strong emotional stage. Lastly, various - schemes are proposed to visualize information extracted through AVIEM, to improve - the navigation and designate the trends and dependencies among audiovisual relations.' - address: 'Ann Arbor, Michigan' - author: Kamer Ali Yuksel and Sinan Buyukbas and Elif Ayiter - bibtex: "@inproceedings{Yuksel2012,\n abstract = {In this work, a comprehensive\ - \ study is performed on the relationship between audio, visual and emotion by\ - \ applying the principles of cognitive emotion theory into digital creation. The\ - \ study is driven by an audiovisual emotion library project that is named AVIEM,\ - \ which provides an interactive interface for experimentation and evaluation of\ - \ the perception and creation processes of audiovisuals. AVIEM primarily consists\ - \ of separate audio and visual libraries and grows with user contribution as users\ - \ explore different combinations between them. The library provides a wide range\ - \ of experimentation possibilities by allowing users to create audiovisual relations\ - \ and logging their emotional responses through its interface. Besides being a\ - \ resourceful tool of experimentation, AVIEM aims to become a source of inspiration,\ - \ where digitally created abstract virtual environments and soundscapes can elicit\ - \ target emotions at a preconscious level, by building genuine audiovisual relations\ - \ that would engage the viewer on a strong emotional stage. Lastly, various schemes\ - \ are proposed to visualize information extracted through AVIEM, to improve the\ - \ navigation and designate the trends and dependencies among audiovisual relations.},\n\ - \ address = {Ann Arbor, Michigan},\n author = {Kamer Ali Yuksel and Sinan Buyukbas\ - \ and Elif Ayiter},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178457},\n\ - \ issn = {2220-4806},\n keywords = {Designing emotive audiovisuals, cognitive\ - \ emotion theory, audiovisual perception and interaction, synaesthesia},\n publisher\ - \ = {University of Michigan},\n title = {An Interface for Emotional Expression\ - \ in Audio-Visuals},\n url = {http://www.nime.org/proceedings/2012/nime2012_60.pdf},\n\ - \ year = {2012}\n}\n" + ID: hpurwins2015 + abstract: 'An interactive music instrument museum experience for children of 10-12 + years is presented. Equipped with tablet devices, the children are sent on a treasure + hunt. Based on given sound samples, the participants have to identify the right + musical instrument (harpsichord, double bass, viola) out of an instrument collection. + As the right instrument is located, a challenge of playing an application on the + tablet is initiated. This application is an interactive digital representation + of the found instrument, mimicking some of its key playing techniques, using a + simplified scrolling on screen musical notation. The musical performance of the + participant is graded on a point scale. After completion of the challenge, the + participants'' performances of the three instruments are played back simultaneously, + constituting a composition. A qualitative evaluation of the application in a focus + group interview with school children revealed that the children were more engaged + when playing with the interactive application than when only watching a music + video.' + address: 'Baton Rouge, Louisiana, USA' + author: Mikkel Jörgensen and Aske Knudsen and Thomas Wilmot and Kasper Lund and + Stefania Serafin and Hendrik Purwins + bibtex: "@inproceedings{hpurwins2015,\n abstract = {An interactive music instrument\ + \ museum experience for children of 10-12 years is presented. Equipped with tablet\ + \ devices, the children are sent on a treasure hunt. Based on given sound samples,\ + \ the participants have to identify the right musical instrument (harpsichord,\ + \ double bass, viola) out of an instrument collection. As the right instrument\ + \ is located, a challenge of playing an application on the tablet is initiated.\ + \ This application is an interactive digital representation of the found instrument,\ + \ mimicking some of its key playing techniques, using a simplified scrolling on\ + \ screen musical notation. The musical performance of the participant is graded\ + \ on a point scale. After completion of the challenge, the participants' performances\ + \ of the three instruments are played back simultaneously, constituting a composition.\ + \ A qualitative evaluation of the application in a focus group interview with\ + \ school children revealed that the children were more engaged when playing with\ + \ the interactive application than when only watching a music video.},\n address\ + \ = {Baton Rouge, Louisiana, USA},\n author = {Mikkel J\\''{o}rgensen and Aske\ + \ Knudsen and Thomas Wilmot and Kasper Lund and Stefania Serafin and Hendrik Purwins},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178997},\n editor = {Edgar Berdahl\ + \ and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {36--37},\n\ + \ publisher = {Louisiana State University},\n title = {A Mobile Music Museum Experience\ + \ for Children},\n url = {http://www.nime.org/proceedings/2015/nime2015_267.pdf},\n\ + \ urlsuppl1 = {http://www.nime.org/proceedings/2015/267/0267-file1.mov},\n year\ + \ = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178457 + doi: 10.5281/zenodo.1178997 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Designing emotive audiovisuals, cognitive emotion theory, audiovisual - perception and interaction, synaesthesia' - publisher: University of Michigan - title: An Interface for Emotional Expression in Audio-Visuals - url: http://www.nime.org/proceedings/2012/nime2012_60.pdf - year: 2012 + month: May + pages: 36--37 + publisher: Louisiana State University + title: A Mobile Music Museum Experience for Children + url: http://www.nime.org/proceedings/2015/nime2015_267.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/267/0267-file1.mov + year: 2015 - ENTRYTYPE: inproceedings - ID: Lee2012b - abstract: 'Tok! is a collaborative acoustic instrument application for iOS devices - aimed at real time percussive music making in a colocated setup. It utilizes the - mobility of hand-held devices and transforms them into drumsticks to tap on flat - surfaces and produce acoustic music. Tok! is also networked and consists of a - shared interactive music score to which the players tap their phones, creating - a percussion ensemble. Through their social interaction and real-time modifications - to the music score, and through their creative selection of tapping surfaces, - the players can collaborate and dynamically create interesting rhythmic music - with a variety of timbres.' - address: 'Ann Arbor, Michigan' - author: Sang Won Lee and Ajay Srinivasamurthy and Gregoire Tronel and Weibin Shen - and Jason Freeman - bibtex: "@inproceedings{Lee2012b,\n abstract = {Tok! is a collaborative acoustic\ - \ instrument application for iOS devices aimed at real time percussive music making\ - \ in a colocated setup. It utilizes the mobility of hand-held devices and transforms\ - \ them into drumsticks to tap on flat surfaces and produce acoustic music. Tok!\ - \ is also networked and consists of a shared interactive music score to which\ - \ the players tap their phones, creating a percussion ensemble. Through their\ - \ social interaction and real-time modifications to the music score, and through\ - \ their creative selection of tapping surfaces, the players can collaborate and\ - \ dynamically create interesting rhythmic music with a variety of timbres.},\n\ - \ address = {Ann Arbor, Michigan},\n author = {Sang Won Lee and Ajay Srinivasamurthy\ - \ and Gregoire Tronel and Weibin Shen and Jason Freeman},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178317},\n issn = {2220-4806},\n keywords = {Mobile Phones,\ - \ Collaboration, Social Interaction, Acoustic Musical Instrument},\n publisher\ - \ = {University of Michigan},\n title = {Tok! : A Collaborative Acoustic Instrument\ - \ using Mobile Phones},\n url = {http://www.nime.org/proceedings/2012/nime2012_61.pdf},\n\ - \ year = {2012}\n}\n" + ID: tresch2015 + abstract: 'Audio guides and (interactive) sound walks have existed for decades. + Even smartphone games taking place in the real world are no longer a novelty. + But due to the lack of a sufficient middleware which fulfills the requirements + for creating this software genre, artists, game developers and institutions such + as museums are forced to implement rather similar functionality over and over + again. This paper describes the basic principles of Real World Audio (RWA), an + extendable audio game engine for targeting smartphone operating systems, which + rolls out all functionality for the generation of the above-mentioned software + genres. It combines the ability for building location-based audio walks and -guides + with the components necessary for game development. Using either the smartphone + sensors or an external sensor board for head tracking and gesture recognition, + RWA allows developers to create audio walks, audio adventures and audio role playing + games (RPG) outside in the real world.' + address: 'Baton Rouge, Louisiana, USA' + author: Thomas Resch + bibtex: "@inproceedings{tresch2015,\n abstract = {Audio guides and (interactive)\ + \ sound walks have existed for decades. Even smartphone games taking place in\ + \ the real world are no longer a novelty. But due to the lack of a sufficient\ + \ middleware which fulfills the requirements for creating this software genre,\ + \ artists, game developers and institutions such as museums are forced to implement\ + \ rather similar functionality over and over again. This paper describes the basic\ + \ principles of Real World Audio (RWA), an extendable audio game engine for targeting\ + \ smartphone operating systems, which rolls out all functionality for the generation\ + \ of the above-mentioned software genres. It combines the ability for building\ + \ location-based audio walks and -guides with the components necessary for game\ + \ development. Using either the smartphone sensors or an external sensor board\ + \ for head tracking and gesture recognition, RWA allows developers to create audio\ + \ walks, audio adventures and audio role playing games (RPG) outside in the real\ + \ world.},\n address = {Baton Rouge, Louisiana, USA},\n author = {Thomas Resch},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1179160},\n editor = {Edgar Berdahl\ + \ and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {392--395},\n\ + \ publisher = {Louisiana State University},\n title = {RWA -- A Game Engine for\ + \ Real World Audio Games},\n url = {http://www.nime.org/proceedings/2015/nime2015_269.pdf},\n\ + \ year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178317 + doi: 10.5281/zenodo.1179160 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Mobile Phones, Collaboration, Social Interaction, Acoustic Musical Instrument' - publisher: University of Michigan - title: 'Tok! : A Collaborative Acoustic Instrument using Mobile Phones' - url: http://www.nime.org/proceedings/2012/nime2012_61.pdf - year: 2012 + month: May + pages: 392--395 + publisher: Louisiana State University + title: RWA -- A Game Engine for Real World Audio Games + url: http://www.nime.org/proceedings/2015/nime2015_269.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Lee2012a - abstract: 'This paper describes recent extensions to LOLC, a text-based environment - for collaborative improvisation for laptop ensembles, which integrate acoustic - instrumental musicians into the environment. Laptop musicians author short commands - to create, transform, and share pre-composed musical fragments, and the resulting - notation is digitally displayed, in real time, to instrumental musicians to sight-read - in performance. The paper describes the background and motivations of the project, - outlines the design of the original LOLC environment and describes its new real-time - notation components in detail, and explains the use of these new components in - a musical composition, SGLC, by one of the authors.' - address: 'Ann Arbor, Michigan' - author: Sang Won Lee and Jason Freeman and Andrew Collela - bibtex: "@inproceedings{Lee2012a,\n abstract = {This paper describes recent extensions\ - \ to LOLC, a text-based environment for collaborative improvisation for laptop\ - \ ensembles, which integrate acoustic instrumental musicians into the environment.\ - \ Laptop musicians author short commands to create, transform, and share pre-composed\ - \ musical fragments, and the resulting notation is digitally displayed, in real\ - \ time, to instrumental musicians to sight-read in performance. The paper describes\ - \ the background and motivations of the project, outlines the design of the original\ - \ LOLC environment and describes its new real-time notation components in detail,\ - \ and explains the use of these new components in a musical composition, SGLC,\ - \ by one of the authors.},\n address = {Ann Arbor, Michigan},\n author = {Sang\ - \ Won Lee and Jason Freeman and Andrew Collela},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1178315},\n issn = {2220-4806},\n keywords = {Real-time Music\ - \ Notation, Live Coding, Laptop Orchestra},\n publisher = {University of Michigan},\n\ - \ title = {Real-Time Music Notation, Collaborative Improvisation, and Laptop Ensembles},\n\ - \ url = {http://www.nime.org/proceedings/2012/nime2012_62.pdf},\n year = {2012}\n\ - }\n" + ID: ajense2015 + abstract: 'This paper looks at the design process of the WamBam; a self-contained + electronic hand-drum meant for music therapy sessions with severely intellectually + disabled clients. Using co-reflection with four musical therapists and literature + research, design guidelines related to this specific user-group and context are + formed. This leads to a concept of which the most relevant aspects are discussed, + before describing the user studies. Finally, the plan for the redesign is discussed. + The WamBam has unique possibilities to deal with the low motor skills and cognitive + abilities of severely intellectually disabled users while music therapists benefit + from the greater versatility and portability of this design compared to other + musical instruments. A prototype was tested with twenty users. Participants proved + to be triggered positively by the WamBam, but three limiting usability issues + were found. These issues were used as the fundamentals for a second prototype. + Music therapists confirm the value of the WamBam for their practice. ' + address: 'Baton Rouge, Louisiana, USA' + author: Arvid Jense and Hans Leeuw + bibtex: "@inproceedings{ajense2015,\n abstract = {This paper looks at the design\ + \ process of the WamBam; a self-contained electronic hand-drum meant for music\ + \ therapy sessions with severely intellectually disabled clients. Using co-reflection\ + \ with four musical therapists and literature research, design guidelines related\ + \ to this specific user-group and context are formed. This leads to a concept\ + \ of which the most relevant aspects are discussed, before describing the user\ + \ studies. Finally, the plan for the redesign is discussed. The WamBam has unique\ + \ possibilities to deal with the low motor skills and cognitive abilities of severely\ + \ intellectually disabled users while music therapists benefit from the greater\ + \ versatility and portability of this design compared to other musical instruments.\ + \ A prototype was tested with twenty users. Participants proved to be triggered\ + \ positively by the WamBam, but three limiting usability issues were found. These\ + \ issues were used as the fundamentals for a second prototype. Music therapists\ + \ confirm the value of the WamBam for their practice. },\n address = {Baton Rouge,\ + \ Louisiana, USA},\n author = {Arvid Jense and Hans Leeuw},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1179098},\n editor = {Edgar Berdahl and Jesse Allison},\n\ + \ issn = {2220-4806},\n month = {May},\n pages = {74--77},\n publisher = {Louisiana\ + \ State University},\n title = {WamBam: A case study in design for an electronic\ + \ musical instrument for severely intellectually disabled users},\n url = {http://www.nime.org/proceedings/2015/nime2015_270.pdf},\n\ + \ year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178315 + doi: 10.5281/zenodo.1179098 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Real-time Music Notation, Live Coding, Laptop Orchestra' - publisher: University of Michigan - title: 'Real-Time Music Notation, Collaborative Improvisation, and Laptop Ensembles' - url: http://www.nime.org/proceedings/2012/nime2012_62.pdf - year: 2012 + month: May + pages: 74--77 + publisher: Louisiana State University + title: 'WamBam: A case study in design for an electronic musical instrument for + severely intellectually disabled users' + url: http://www.nime.org/proceedings/2015/nime2015_270.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Migneco2012 - abstract: 'Platforms for mobile computing and gesture recognitionprovide enticing - interfaces for creative expression on virtualmusical instruments. However, sound - synthesis on thesesystems is often limited to sample-based synthesizers, whichlimits - their expressive capabilities. Source-filter models areadept for such interfaces - since they provide flexible, algorithmic sound synthesis, especially in the case - of the guitar.In this paper, we present a data-driven approach for modeling guitar - excitation signals using principal componentsderived from a corpus of excitation - signals. Using thesecomponents as features, we apply nonlinear principal components - analysis to derive a feature space that describesthe expressive attributes characteristic - to our corpus. Finally, we propose using the reduced dimensionality space asa - control interface for an expressive guitar synthesizer.' - address: 'Ann Arbor, Michigan' - author: Raymond Migneco and Youngmoo Kim - bibtex: "@inproceedings{Migneco2012,\n abstract = {Platforms for mobile computing\ - \ and gesture recognitionprovide enticing interfaces for creative expression on\ - \ virtualmusical instruments. However, sound synthesis on thesesystems is often\ - \ limited to sample-based synthesizers, whichlimits their expressive capabilities.\ - \ Source-filter models areadept for such interfaces since they provide flexible,\ - \ algorithmic sound synthesis, especially in the case of the guitar.In this paper,\ - \ we present a data-driven approach for modeling guitar excitation signals using\ - \ principal componentsderived from a corpus of excitation signals. Using thesecomponents\ - \ as features, we apply nonlinear principal components analysis to derive a feature\ - \ space that describesthe expressive attributes characteristic to our corpus.\ - \ Finally, we propose using the reduced dimensionality space asa control interface\ - \ for an expressive guitar synthesizer.},\n address = {Ann Arbor, Michigan},\n\ - \ author = {Raymond Migneco and Youngmoo Kim},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1180541},\n issn = {2220-4806},\n keywords = {Source-filter\ - \ models, musical instrument synthesis, PCA, touch musical interfaces},\n publisher\ - \ = {University of Michigan},\n title = {A Component-Based Approach for Modeling\ - \ Plucked-Guitar Excitation Signals},\n url = {http://www.nime.org/proceedings/2012/nime2012_63.pdf},\n\ - \ year = {2012}\n}\n" + ID: hlimerick2015 + abstract: 'Liveness is a well-known problem with Digital Musical Instruments (DMIs). + When used in performances, DMIs provide less visual information than acoustic + instruments, preventing the audience from understanding how the musicians influence + the music. In this paper, we look at this issue through the lens of causality. + More specifically, we investigate the attribution of causality by an external + observer to a performer, relying on the theory of apparent mental causation. We + suggest that the perceived causality between a performer''s gestures and the musical + result is central to liveness. We present a framework for assessing attributed + causality and agency to a performer, based on a psychological theory which suggests + three criteria for inferred causality. These criteria then provide the basis of + an experimental study investigating the effect of visual augmentations on audience''s + inferred causality. The results provide insights on how the visual component of + performances with DMIs impacts the audience''s causal inferences about the performer. + In particular we show that visual augmentations help highlight the influence of + the musician when parts of the music are automated, and help clarify complex mappings + between gestures and sounds. Finally we discuss the potential wider implications + for assessing liveness in the design of new musical interfaces.' + address: 'Baton Rouge, Louisiana, USA' + author: Florent Berthaut and David Coyle and James Moore and Hannah Limerick + bibtex: "@inproceedings{hlimerick2015,\n abstract = {Liveness is a well-known problem\ + \ with Digital Musical Instruments (DMIs). When used in performances, DMIs provide\ + \ less visual information than acoustic instruments, preventing the audience from\ + \ understanding how the musicians influence the music. In this paper, we look\ + \ at this issue through the lens of causality. More specifically, we investigate\ + \ the attribution of causality by an external observer to a performer, relying\ + \ on the theory of apparent mental causation. We suggest that the perceived causality\ + \ between a performer's gestures and the musical result is central to liveness.\ + \ We present a framework for assessing attributed causality and agency to a performer,\ + \ based on a psychological theory which suggests three criteria for inferred causality.\ + \ These criteria then provide the basis of an experimental study investigating\ + \ the effect of visual augmentations on audience's inferred causality. The results\ + \ provide insights on how the visual component of performances with DMIs impacts\ + \ the audience's causal inferences about the performer. In particular we show\ + \ that visual augmentations help highlight the influence of the musician when\ + \ parts of the music are automated, and help clarify complex mappings between\ + \ gestures and sounds. Finally we discuss the potential wider implications for\ + \ assessing liveness in the design of new musical interfaces.},\n address = {Baton\ + \ Rouge, Louisiana, USA},\n author = {Florent Berthaut and David Coyle and James\ + \ Moore and Hannah Limerick},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179026},\n\ + \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ + \ {May},\n pages = {382--386},\n publisher = {Louisiana State University},\n title\ + \ = {Liveness Through the Lens of Agency and Causality},\n url = {http://www.nime.org/proceedings/2015/nime2015_272.pdf},\n\ + \ urlsuppl1 = {http://www.nime.org/proceedings/2015/272/0272-file1.mp4},\n year\ + \ = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1180541 + doi: 10.5281/zenodo.1179026 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Source-filter models, musical instrument synthesis, PCA, touch musical - interfaces' - publisher: University of Michigan - title: A Component-Based Approach for Modeling Plucked-Guitar Excitation Signals - url: http://www.nime.org/proceedings/2012/nime2012_63.pdf - year: 2012 + month: May + pages: 382--386 + publisher: Louisiana State University + title: Liveness Through the Lens of Agency and Causality + url: http://www.nime.org/proceedings/2015/nime2015_272.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/272/0272-file1.mp4 + year: 2015 - ENTRYTYPE: inproceedings - ID: Patricio2012 - abstract: 'This article proposes a wireless handheld multimedia digital instrument, - which allows one to compose and perform digital music for films in real-time. - Not only does it allow the performer and the audience to follow the film images - in question, but also the relationship between the gestures performed and the - sound generated. Furthermore, it allows one to have an effective control over - the sound, and consequently achieve great musical expression. In addition, a method - for calibrating the multimedia digital instrument, devised to overcome the lack - of a reliable reference point of the accelerometer and a process to obtain a video - score are presented. This instrument has been used in a number of concerts (Portugal - and Brazil) so as to test its robustness.' - address: 'Ann Arbor, Michigan' - author: Pedro Patrício - bibtex: "@inproceedings{Patricio2012,\n abstract = {This article proposes a wireless\ - \ handheld multimedia digital instrument, which allows one to compose and perform\ - \ digital music for films in real-time. Not only does it allow the performer and\ - \ the audience to follow the film images in question, but also the relationship\ - \ between the gestures performed and the sound generated. Furthermore, it allows\ - \ one to have an effective control over the sound, and consequently achieve great\ - \ musical expression. In addition, a method for calibrating the multimedia digital\ - \ instrument, devised to overcome the lack of a reliable reference point of the\ - \ accelerometer and a process to obtain a video score are presented. This instrument\ - \ has been used in a number of concerts (Portugal and Brazil) so as to test its\ - \ robustness.},\n address = {Ann Arbor, Michigan},\n author = {Pedro Patr{\\'i}cio},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1180569},\n issn = {2220-4806},\n\ - \ keywords = {Digital musical instrument, mobile music performance, real-time\ - \ musical composition, digital sound synthesis.},\n publisher = {University of\ - \ Michigan},\n title = {MuDI - Multimedia Digital Instrument for Composing and\ - \ Performing Digital Music for Films in Real-time},\n url = {http://www.nime.org/proceedings/2012/nime2012_64.pdf},\n\ - \ year = {2012}\n}\n" + ID: dverdonk2015 + abstract: 'In electronic music performance, a good relationship between what is + visible and what is audible can contribute to a more succesful way of conveying + thought or feeling. This connection can be enhanced by putting visible energy + into an electronic interface or instrument. This paper discusses the advantages + and implementations of visible excitation methods, and how these could reinforce + the bridge between the performance of acoustic and electronic instruments concerning + expressiveness.' + address: 'Baton Rouge, Louisiana, USA' + author: Dianne Verdonk + bibtex: "@inproceedings{dverdonk2015,\n abstract = {In electronic music performance,\ + \ a good relationship between what is visible and what is audible can contribute\ + \ to a more succesful way of conveying thought or feeling. This connection can\ + \ be enhanced by putting visible energy into an electronic interface or instrument.\ + \ This paper discusses the advantages and implementations of visible excitation\ + \ methods, and how these could reinforce the bridge between the performance of\ + \ acoustic and electronic instruments concerning expressiveness.},\n address =\ + \ {Baton Rouge, Louisiana, USA},\n author = {Dianne Verdonk},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1179188},\n editor = {Edgar Berdahl and Jesse Allison},\n\ + \ issn = {2220-4806},\n month = {May},\n pages = {42--43},\n publisher = {Louisiana\ + \ State University},\n title = {Visible Excitation Methods: Energy and Expressiveness\ + \ in Electronic Music Performance},\n url = {http://www.nime.org/proceedings/2015/nime2015_273.pdf},\n\ + \ urlsuppl1 = {http://www.nime.org/proceedings/2015/273/0273-file1.m4v},\n urlsuppl2\ + \ = {http://www.nime.org/proceedings/2015/273/0273-file2.m4v},\n year = {2015}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1180569 + doi: 10.5281/zenodo.1179188 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Digital musical instrument, mobile music performance, real-time musical - composition, digital sound synthesis.' - publisher: University of Michigan - title: MuDI - Multimedia Digital Instrument for Composing and Performing Digital - Music for Films in Real-time - url: http://www.nime.org/proceedings/2012/nime2012_64.pdf - year: 2012 + month: May + pages: 42--43 + publisher: Louisiana State University + title: 'Visible Excitation Methods: Energy and Expressiveness in Electronic Music + Performance' + url: http://www.nime.org/proceedings/2015/nime2015_273.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/273/0273-file1.m4v + urlsuppl2: http://www.nime.org/proceedings/2015/273/0273-file2.m4v + year: 2015 - ENTRYTYPE: inproceedings - ID: Endo2012 - abstract: 'Tweet Harp is a musical instrument using Twitter and a laser harp. This - instrument features the use of the human voice speaking tweets in Twitter as sounds - for music. It is played by touching the six harp strings of laser beams. Tweet - Harp gets the latest tweets from Twitter in real-time, and it creates music like - a song with unexpected words. It also creates animation displaying the texts at - the same time. The audience can visually enjoy this performance by sounds synchronized - with animation. If the audience has a Twitter account, they can participate in - the performance by tweeting.' - address: 'Ann Arbor, Michigan' - author: Ayaka Endo and Takuma Moriyama and Yasuo Kuhara - bibtex: "@inproceedings{Endo2012,\n abstract = {Tweet Harp is a musical instrument\ - \ using Twitter and a laser harp. This instrument features the use of the human\ - \ voice speaking tweets in Twitter as sounds for music. It is played by touching\ - \ the six harp strings of laser beams. Tweet Harp gets the latest tweets from\ - \ Twitter in real-time, and it creates music like a song with unexpected words.\ - \ It also creates animation displaying the texts at the same time. The audience\ - \ can visually enjoy this performance by sounds synchronized with animation. If\ - \ the audience has a Twitter account, they can participate in the performance\ - \ by tweeting.},\n address = {Ann Arbor, Michigan},\n author = {Ayaka Endo and\ - \ Takuma Moriyama and Yasuo Kuhara},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178249},\n\ - \ issn = {2220-4806},\n keywords = {Twitter, laser harp, text, speech, voice,\ - \ AppleScript, Quartz Composer, Max/MSP, TTS, Arduino},\n publisher = {University\ - \ of Michigan},\n title = {Tweet Harp: Laser Harp Generating Voice and Text of\ - \ Real-time Tweets in Twitter},\n url = {http://www.nime.org/proceedings/2012/nime2012_66.pdf},\n\ - \ year = {2012}\n}\n" + ID: snyder2015 + abstract: 'This paper describes a project undertaken in the Spring of 2014 that + sought to create an audio-visual performance using an industrial robotic arm. + Some relevant examples of previous robotic art are discussed, and the design challenges + posed by the unusual situation are explored. The resulting design solutions for + the sound, robotic motion, and video projection mapping involved in the piece + are explained, as well as the artistic reasoning behind those solutions. Where + applicable, links to open source code developed for the project are provided.' + address: 'Baton Rouge, Louisiana, USA' + author: Jeff Snyder and Ryan Johns and Charles Avis and Gene Kogan and Axel Kilian + bibtex: "@inproceedings{snyder2015,\n abstract = {This paper describes a project\ + \ undertaken in the Spring of 2014 that sought to create an audio-visual performance\ + \ using an industrial robotic arm. Some relevant examples of previous robotic\ + \ art are discussed, and the design challenges posed by the unusual situation\ + \ are explored. The resulting design solutions for the sound, robotic motion,\ + \ and video projection mapping involved in the piece are explained, as well as\ + \ the artistic reasoning behind those solutions. Where applicable, links to open\ + \ source code developed for the project are provided.},\n address = {Baton Rouge,\ + \ Louisiana, USA},\n author = {Jeff Snyder and Ryan Johns and Charles Avis and\ + \ Gene Kogan and Axel Kilian},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179180},\n\ + \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ + \ {May},\n pages = {184--186},\n publisher = {Louisiana State University},\n title\ + \ = {Machine Yearning: An Industrial Robotic Arm as a Performance Instrument},\n\ + \ url = {http://www.nime.org/proceedings/2015/nime2015_275.pdf},\n urlsuppl1 =\ + \ {http://www.nime.org/proceedings/2015/275/0275-file1.mp3},\n urlsuppl2 = {http://www.nime.org/proceedings/2015/275/0275-file2.mp4},\n\ + \ year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178249 + doi: 10.5281/zenodo.1179180 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Twitter, laser harp, text, speech, voice, AppleScript, Quartz Composer, - Max/MSP, TTS, Arduino' - publisher: University of Michigan - title: 'Tweet Harp: Laser Harp Generating Voice and Text of Real-time Tweets in - Twitter' - url: http://www.nime.org/proceedings/2012/nime2012_66.pdf - year: 2012 + month: May + pages: 184--186 + publisher: Louisiana State University + title: 'Machine Yearning: An Industrial Robotic Arm as a Performance Instrument' + url: http://www.nime.org/proceedings/2015/nime2015_275.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/275/0275-file1.mp3 + urlsuppl2: http://www.nime.org/proceedings/2015/275/0275-file2.mp4 + year: 2015 - ENTRYTYPE: inproceedings - ID: Smith2012 - abstract: 'Machine learning models are useful and attractive tools forthe interactive - computer musician, enabling a breadth of interfaces and instruments. With current - consumer hardwareit becomes possible to run advanced machine learning algorithms - in demanding performance situations, yet expertiseremains a prominent entry barrier - for most would-be users.Currently available implementations predominantly employsupervised - machine learning techniques, while the adaptive,self-organizing capabilities of - unsupervised models are notgenerally available. We present a free, new toolbox - of unsupervised machine learning algorithms implemented in Max5 to support real-time - interactive music and video, aimedat the non-expert computer artist.' - address: 'Ann Arbor, Michigan' - author: Benjamin D. Smith and Guy E. Garnett - bibtex: "@inproceedings{Smith2012,\n abstract = {Machine learning models are useful\ - \ and attractive tools forthe interactive computer musician, enabling a breadth\ - \ of interfaces and instruments. With current consumer hardwareit becomes possible\ - \ to run advanced machine learning algorithms in demanding performance situations,\ - \ yet expertiseremains a prominent entry barrier for most would-be users.Currently\ - \ available implementations predominantly employsupervised machine learning techniques,\ - \ while the adaptive,self-organizing capabilities of unsupervised models are notgenerally\ - \ available. We present a free, new toolbox of unsupervised machine learning algorithms\ - \ implemented in Max5 to support real-time interactive music and video, aimedat\ - \ the non-expert computer artist.},\n address = {Ann Arbor, Michigan},\n author\ - \ = {Benjamin D. Smith and Guy E. Garnett},\n booktitle = {Proceedings of the\ + ID: eberdahl2015 + abstract: The haptic hand is a greatly simplified robotic hand that is designed + to mirror the human hand and provide haptic force feedback for applications in + music. The fingers of the haptic hand device are laid out to align with four of + the fingers of the human hand. A key is placed on each of the fingers so that + a human hand can perform music by interacting with the keys. The haptic hand is + distinguished from other haptic keyboards in the sense that each finger is meant + to stay with a particular key. The haptic hand promotes unencumbered interaction + with the keys. The user can easily position a finger over a key and press downward + to activate it---the user does not need to insert his or her fingers into an unwieldy + exoskeleton or set of thimbles. An example video demonstrates some musical ideas + afforded by this open-source software and hardware project. + address: 'Baton Rouge, Louisiana, USA' + author: Edgar Berdahl and Denis Huber + bibtex: "@inproceedings{eberdahl2015,\n abstract = {The haptic hand is a greatly\ + \ simplified robotic hand that is designed to mirror the human hand and provide\ + \ haptic force feedback for applications in music. The fingers of the haptic hand\ + \ device are laid out to align with four of the fingers of the human hand. A key\ + \ is placed on each of the fingers so that a human hand can perform music by interacting\ + \ with the keys. The haptic hand is distinguished from other haptic keyboards\ + \ in the sense that each finger is meant to stay with a particular key. The haptic\ + \ hand promotes unencumbered interaction with the keys. The user can easily position\ + \ a finger over a key and press downward to activate it---the user does not need\ + \ to insert his or her fingers into an unwieldy exoskeleton or set of thimbles.\ + \ An example video demonstrates some musical ideas afforded by this open-source\ + \ software and hardware project.},\n address = {Baton Rouge, Louisiana, USA},\n\ + \ author = {Edgar Berdahl and Denis Huber},\n booktitle = {Proceedings of the\ \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1178419},\n issn = {2220-4806},\n keywords = {NIME, unsupervised\ - \ machine learning, adaptive resonance theory, self-organizing maps, Max 5},\n\ - \ publisher = {University of Michigan},\n title = {Unsupervised Play: Machine\ - \ Learning Toolkit for Max},\n url = {http://www.nime.org/proceedings/2012/nime2012_68.pdf},\n\ - \ year = {2012}\n}\n" + \ {10.5281/zenodo.1179022},\n editor = {Edgar Berdahl and Jesse Allison},\n issn\ + \ = {2220-4806},\n month = {May},\n pages = {303--306},\n publisher = {Louisiana\ + \ State University},\n title = {The Haptic Hand},\n url = {http://www.nime.org/proceedings/2015/nime2015_281.pdf},\n\ + \ urlsuppl1 = {http://www.nime.org/proceedings/2015/281/0281-file1.mov},\n urlsuppl2\ + \ = {http://www.nime.org/proceedings/2015/281/0281-file2.mov},\n year = {2015}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178419 + doi: 10.5281/zenodo.1179022 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'NIME, unsupervised machine learning, adaptive resonance theory, self-organizing - maps, Max 5' - publisher: University of Michigan - title: 'Unsupervised Play: Machine Learning Toolkit for Max' - url: http://www.nime.org/proceedings/2012/nime2012_68.pdf - year: 2012 - - -- ENTRYTYPE: inproceedings - ID: Smith2012a - abstract: 'Machine learning models are useful and attractive tools for the interactive - computer musician, enabling a breadth of interfaces and instruments. With current - consumer hardware it becomes possible to run advanced machine learning algorithms - in demanding performance situations, yet expertise remains a prominent entry barrier - for most would-be users. Currently available implementations predominantly employ - supervised machine learning techniques, while the adaptive, self-organizing capabilities - of unsupervised models are not generally available. We present a free, new toolbox - of unsupervised machine learning algorithms implemented in Max 5 to support real-time - interactive music and video, aimed at the non-expert computer artist.' - address: 'Ann Arbor, Michigan' - author: Benjamin D. Smith and Guy E. Garnett - bibtex: "@inproceedings{Smith2012a,\n abstract = {Machine learning models are useful\ - \ and attractive tools for the interactive computer musician, enabling a breadth\ - \ of interfaces and instruments. With current consumer hardware it becomes possible\ - \ to run advanced machine learning algorithms in demanding performance situations,\ - \ yet expertise remains a prominent entry barrier for most would-be users. Currently\ - \ available implementations predominantly employ supervised machine learning techniques,\ - \ while the adaptive, self-organizing capabilities of unsupervised models are\ - \ not generally available. We present a free, new toolbox of unsupervised machine\ - \ learning algorithms implemented in Max 5 to support real-time interactive music\ - \ and video, aimed at the non-expert computer artist.},\n address = {Ann Arbor,\ - \ Michigan},\n author = {Benjamin D. Smith and Guy E. Garnett},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178419},\n issn = {2220-4806},\n keywords = {NIME, unsupervised\ - \ machine learning, adaptive resonance theory, self-organizing maps, Max 5},\n\ - \ publisher = {University of Michigan},\n title = {Unsupervised Play: Machine\ - \ Learning Toolkit for Max},\n url = {http://www.nime.org/proceedings/2012/nime2012_68.pdf},\n\ - \ year = {2012}\n}\n" + month: May + pages: 303--306 + publisher: Louisiana State University + title: The Haptic Hand + url: http://www.nime.org/proceedings/2015/nime2015_281.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/281/0281-file1.mov + urlsuppl2: http://www.nime.org/proceedings/2015/281/0281-file2.mov + year: 2015 + + +- ENTRYTYPE: inproceedings + ID: slee2015 + abstract: 'This paper introduces programmable text rendering that enables temporal + typography in web browsers. Typing is seen not only as a dynamic but interactive + process facilitating both scripted and live musical expression in various contexts + such as audio-visual performance using keyboards and live coding visualization. + With the programmable text animation , we turn plain text into a highly audiovisual + medium and a musical interface which is visually expressive. We describe a concrete + technical realization of the concept using Web Audio API, WebGL and GLSL shaders. + We further show a number of examples that illustrate instances of the concept + in various scenarios ranging from simple textual visualization to live coding + environments. Lastly, we present an audiovisual music piece that involves live + writing augmented by the visualization technique.' + address: 'Baton Rouge, Louisiana, USA' + author: 'Lee, Sang Won and Georg Essl' + bibtex: "@inproceedings{slee2015,\n abstract = {This paper introduces programmable\ + \ text rendering that enables temporal typography in web browsers. Typing is seen\ + \ not only as a dynamic but interactive process facilitating both scripted and\ + \ live musical expression in various contexts such as audio-visual performance\ + \ using keyboards and live coding visualization. With the programmable text animation\ + \ , we turn plain text into a highly audiovisual medium and a musical interface\ + \ which is visually expressive. We describe a concrete technical realization of\ + \ the concept using Web Audio API, WebGL and GLSL shaders. We further show a number\ + \ of examples that illustrate instances of the concept in various scenarios ranging\ + \ from simple textual visualization to live coding environments. Lastly, we present\ + \ an audiovisual music piece that involves live writing augmented by the visualization\ + \ technique.},\n address = {Baton Rouge, Louisiana, USA},\n author = {Lee, Sang\ + \ Won and Georg Essl},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179114},\n\ + \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ + \ {May},\n pages = {65--69},\n publisher = {Louisiana State University},\n title\ + \ = {Web-Based Temporal Typography for Musical Expression and Performance},\n\ + \ url = {http://www.nime.org/proceedings/2015/nime2015_283.pdf},\n year = {2015}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178419 + doi: 10.5281/zenodo.1179114 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'NIME, unsupervised machine learning, adaptive resonance theory, self-organizing - maps, Max 5' - publisher: University of Michigan - title: 'Unsupervised Play: Machine Learning Toolkit for Max' - url: http://www.nime.org/proceedings/2012/nime2012_68.pdf - year: 2012 + month: May + pages: 65--69 + publisher: Louisiana State University + title: Web-Based Temporal Typography for Musical Expression and Performance + url: http://www.nime.org/proceedings/2015/nime2015_283.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Troyer2012 - abstract: 'We introduce a prototype of a new tangible step sequencerthat transforms - everyday objects into percussive musicalinstruments. DrumTop adapts our everyday - task-orientedhand gestures with everyday objects as the basis of musicalinteraction, - resulting in an easily graspable musical interfacefor musical novices. The sound, - tactile, and visual feedbackcomes directly from everyday objects as the players - programdrum patterns and rearrange the objects on the tabletopinterface. DrumTop - encourages the players to explore themusical potentiality of their surroundings - and be musicallycreative through rhythmic interactions with everyday objects. - The interface consists of transducers that trigger ahit, causing the objects themselves - to produce sound whenthey are in close contact with the transducers. We discusshow - we designed and implemented our current DrumTopprototype and describe how players - interact with the interface. We then highlight the players'' experience with Drumtop - and our plans for future work in the fields of musiceducation and performance.' - address: 'Ann Arbor, Michigan' - author: Akito van Troyer - bibtex: "@inproceedings{Troyer2012,\n abstract = {We introduce a prototype of a\ - \ new tangible step sequencerthat transforms everyday objects into percussive\ - \ musicalinstruments. DrumTop adapts our everyday task-orientedhand gestures with\ - \ everyday objects as the basis of musicalinteraction, resulting in an easily\ - \ graspable musical interfacefor musical novices. The sound, tactile, and visual\ - \ feedbackcomes directly from everyday objects as the players programdrum patterns\ - \ and rearrange the objects on the tabletopinterface. DrumTop encourages the players\ - \ to explore themusical potentiality of their surroundings and be musicallycreative\ - \ through rhythmic interactions with everyday objects. The interface consists\ - \ of transducers that trigger ahit, causing the objects themselves to produce\ - \ sound whenthey are in close contact with the transducers. We discusshow we designed\ - \ and implemented our current DrumTopprototype and describe how players interact\ - \ with the interface. We then highlight the players' experience with Drumtop and\ - \ our plans for future work in the fields of musiceducation and performance.},\n\ - \ address = {Ann Arbor, Michigan},\n author = {Akito van Troyer},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1178441},\n issn = {2220-4806},\n keywords\ - \ = {Tangible User Interfaces, Playful Experience, Percussion, Step Sequencer,\ - \ Transducers, Everyday Objects},\n publisher = {University of Michigan},\n title\ - \ = {DrumTop: Playing with Everyday Objects},\n url = {http://www.nime.org/proceedings/2012/nime2012_70.pdf},\n\ - \ year = {2012}\n}\n" + ID: esheffield2015 + abstract: 'The Pneumatic Practice Pad is a commercially available 10'''' practice + pad that has been modified to allow for tension changes in a matter of seconds + using a small electric air pump. In this paper, we examine the rebound characteristics + of the Pneumatic Practice Pad at various pressure presets and compare them to + a sample of acoustic drums. We also review subjective feedback from participants + in a playing test.' + address: 'Baton Rouge, Louisiana, USA' + author: Eric Sheffield and Sile O'Modhrain and Michael Gould and Brent Gillespie + bibtex: "@inproceedings{esheffield2015,\n abstract = {The Pneumatic Practice Pad\ + \ is a commercially available 10'' practice pad that has been modified to allow\ + \ for tension changes in a matter of seconds using a small electric air pump.\ + \ In this paper, we examine the rebound characteristics of the Pneumatic Practice\ + \ Pad at various pressure presets and compare them to a sample of acoustic drums.\ + \ We also review subjective feedback from participants in a playing test.},\n\ + \ address = {Baton Rouge, Louisiana, USA},\n author = {Eric Sheffield and Sile\ + \ O'Modhrain and Michael Gould and Brent Gillespie},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1179178},\n editor = {Edgar Berdahl and Jesse Allison},\n\ + \ issn = {2220-4806},\n month = {May},\n pages = {231--234},\n publisher = {Louisiana\ + \ State University},\n title = {The Pneumatic Practice Pad},\n url = {http://www.nime.org/proceedings/2015/nime2015_286.pdf},\n\ + \ year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178441 + doi: 10.5281/zenodo.1179178 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Tangible User Interfaces, Playful Experience, Percussion, Step Sequencer, - Transducers, Everyday Objects' - publisher: University of Michigan - title: 'DrumTop: Playing with Everyday Objects' - url: http://www.nime.org/proceedings/2012/nime2012_70.pdf - year: 2012 + month: May + pages: 231--234 + publisher: Louisiana State University + title: The Pneumatic Practice Pad + url: http://www.nime.org/proceedings/2015/nime2015_286.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Ariza2012 - abstract: 'This paper demonstrates the practical benefits and performance opportunities - of using the dual-analog gamepad as a controller for real-time live electronics. - Numerous diverse instruments and interfaces, as well as detailed control mappings, - are described. Approaches to instrument and preset switching are also presented. - While all of the instrument implementations presented are made available through - the Martingale Pd library, resources for other synthesis languages are also described.' - address: 'Ann Arbor, Michigan' - author: Christopher Ariza - bibtex: "@inproceedings{Ariza2012,\n abstract = {This paper demonstrates the practical\ - \ benefits and performance opportunities of using the dual-analog gamepad as a\ - \ controller for real-time live electronics. Numerous diverse instruments and\ - \ interfaces, as well as detailed control mappings, are described. Approaches\ - \ to instrument and preset switching are also presented. While all of the instrument\ - \ implementations presented are made available through the Martingale Pd library,\ - \ resources for other synthesis languages are also described.},\n address = {Ann\ - \ Arbor, Michigan},\n author = {Christopher Ariza},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178205},\n issn = {2220-4806},\n keywords = {Controllers,\ - \ live electronics, dual-analog, gamepad, joystick, computer music, instrument,\ - \ interface},\n publisher = {University of Michigan},\n title = {The Dual-Analog\ - \ Gamepad as a Practical Platform for Live Electronics Instrument and Interface\ - \ Design},\n url = {http://www.nime.org/proceedings/2012/nime2012_73.pdf},\n year\ - \ = {2012}\n}\n" + ID: wmarley2015 + abstract: 'This paper describes a software extension to the Reactable entitled Gestroviser + that was developed to explore musician machine collaboration at the control signal + level. The system functions by sampling a performers input, processing or reshaping + this sampled input, and then repeatedly replaying it. The degree to which the + sampled control signal is processed during replay is adjustable in real-time by + the manipulation of a continuous finger slider function. The reshaping algorithm + uses stochastic methods commonly used for MIDI note generation from a provided + dataset. The reshaped signal therefore varies in an unpredictable manner. In this + way the Gestroviser is a device to capture, reshape and replay an instrumental + gesture. We describe the result of initial user testing of the system and discuss + possible further development.' + address: 'Baton Rouge, Louisiana, USA' + author: William Marley and Nicholas Ward + bibtex: "@inproceedings{wmarley2015,\n abstract = {This paper describes a software\ + \ extension to the Reactable entitled Gestroviser that was developed to explore\ + \ musician machine collaboration at the control signal level. The system functions\ + \ by sampling a performers input, processing or reshaping this sampled input,\ + \ and then repeatedly replaying it. The degree to which the sampled control signal\ + \ is processed during replay is adjustable in real-time by the manipulation of\ + \ a continuous finger slider function. The reshaping algorithm uses stochastic\ + \ methods commonly used for MIDI note generation from a provided dataset. The\ + \ reshaped signal therefore varies in an unpredictable manner. In this way the\ + \ Gestroviser is a device to capture, reshape and replay an instrumental gesture.\ + \ We describe the result of initial user testing of the system and discuss possible\ + \ further development.},\n address = {Baton Rouge, Louisiana, USA},\n author =\ + \ {William Marley and Nicholas Ward},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179124},\n\ + \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ + \ {May},\n pages = {140--143},\n publisher = {Louisiana State University},\n title\ + \ = {Gestroviser: Toward Collaborative Agency in Digital Musical Instruments.},\n\ + \ url = {http://www.nime.org/proceedings/2015/nime2015_287.pdf},\n urlsuppl1 =\ + \ {http://www.nime.org/proceedings/2015/287/0287-file1.mp4},\n year = {2015}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178205 + doi: 10.5281/zenodo.1179124 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Controllers, live electronics, dual-analog, gamepad, joystick, computer - music, instrument, interface' - publisher: University of Michigan - title: The Dual-Analog Gamepad as a Practical Platform for Live Electronics Instrument - and Interface Design - url: http://www.nime.org/proceedings/2012/nime2012_73.pdf - year: 2012 + month: May + pages: 140--143 + publisher: Louisiana State University + title: 'Gestroviser: Toward Collaborative Agency in Digital Musical Instruments.' + url: http://www.nime.org/proceedings/2015/nime2015_287.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/287/0287-file1.mp4 + year: 2015 - ENTRYTYPE: inproceedings - ID: Pardo2012 - abstract: 'Potential users of audio production software, such as parametric audio - equalizers, may be discouraged by the complexity of the interface. A new approach - creates a personalized on-screen slider that lets the user manipulate the audio - in terms of a descriptive term (e.g. "warm"), without the user needing to learn - or use the interface of an equalizer. This system learns mappings by presenting - a sequence of sounds to the user and correlating the gain in each frequency band - with the user''s preference rating. The system speeds learning through transfer - learning. Results on a study of 35 participants show how an effective, personalized - audio manipulation tool can be automatically built after only three ratings from - the user.' - address: 'Ann Arbor, Michigan' - author: Bryan Pardo and David Little and Darren Gergle - bibtex: "@inproceedings{Pardo2012,\n abstract = {Potential users of audio production\ - \ software, such as parametric audio equalizers, may be discouraged by the complexity\ - \ of the interface. A new approach creates a personalized on-screen slider that\ - \ lets the user manipulate the audio in terms of a descriptive term (e.g. \"warm\"\ - ), without the user needing to learn or use the interface of an equalizer. This\ - \ system learns mappings by presenting a sequence of sounds to the user and correlating\ - \ the gain in each frequency band with the user's preference rating. The system\ - \ speeds learning through transfer learning. Results on a study of 35 participants\ - \ show how an effective, personalized audio manipulation tool can be automatically\ - \ built after only three ratings from the user.},\n address = {Ann Arbor, Michigan},\n\ - \ author = {Bryan Pardo and David Little and Darren Gergle},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1180563},\n issn = {2220-4806},\n keywords = {Human computer\ - \ interaction, music, multimedia production, transfer learning},\n publisher =\ - \ {University of Michigan},\n title = {Towards Speeding Audio EQ Interface Building\ - \ with Transfer Learning},\n url = {http://www.nime.org/proceedings/2012/nime2012_74.pdf},\n\ - \ year = {2012}\n}\n" + ID: kschlei2015 + abstract: 'This paper explores the creation and testing of a new system for notating + physical actions on a surface. This notation is conceptualized through the medium + of, and initially tested on, multi-touch interfaces. Existing methods of notating + movement are reviewed, followed by a detailed explanation of our notation. User + trials were carried out in order to test how effective this notation was, the + results of which be explained. An analysis of the collected data follows, as well + as criticisms of the notation and testing process.' + address: 'Baton Rouge, Louisiana, USA' + author: Warren Enström and Josh Dennis and Brian Lynch and Kevin Schlei + bibtex: "@inproceedings{kschlei2015,\n abstract = {This paper explores the creation\ + \ and testing of a new system for notating physical actions on a surface. This\ + \ notation is conceptualized through the medium of, and initially tested on, multi-touch\ + \ interfaces. Existing methods of notating movement are reviewed, followed by\ + \ a detailed explanation of our notation. User trials were carried out in order\ + \ to test how effective this notation was, the results of which be explained.\ + \ An analysis of the collected data follows, as well as criticisms of the notation\ + \ and testing process.},\n address = {Baton Rouge, Louisiana, USA},\n author =\ + \ {Warren Enstr\\''om and Josh Dennis and Brian Lynch and Kevin Schlei},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1179056},\n editor = {Edgar Berdahl and\ + \ Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {83--86},\n\ + \ publisher = {Louisiana State University},\n title = {Musical Notation for Multi-Touch\ + \ Interfaces},\n url = {http://www.nime.org/proceedings/2015/nime2015_289.pdf},\n\ + \ year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1180563 + doi: 10.5281/zenodo.1179056 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Human computer interaction, music, multimedia production, transfer learning' - publisher: University of Michigan - title: Towards Speeding Audio EQ Interface Building with Transfer Learning - url: http://www.nime.org/proceedings/2012/nime2012_74.pdf - year: 2012 + month: May + pages: 83--86 + publisher: Louisiana State University + title: Musical Notation for Multi-Touch Interfaces + url: http://www.nime.org/proceedings/2015/nime2015_289.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Stead2012 - abstract: 'We describe a system that allows non-programmers to specify the grammar - for a novel graphic score notation of their own design, defining performance notations - suitable for drawing in live situations on a surface such as a whiteboard. Thescore - can be interpreted via the camera of a smartphone,interactively scanned over the - whiteboard to control the parameters of synthesisers implemented in Overtone. - The visual grammar of the score, and its correspondence to the sound parameters, - can be defined by the user with a simple visual condition-action language. This - language can be edited on the touchscreen of an Android phone, allowing the grammar - to be modified live in performance situations.Interactive scanning of the score - is visible to the audience asa performance interface, with a colour classifier - and visual feature recogniser causing the grammar-specified events to be sent - using OSC messages via Wi-Fi from the hand-held smartphone to an audio workstation.' - address: 'Ann Arbor, Michigan' - author: Alistair G. Stead and Alan F. Blackwell and Samual Aaron - bibtex: "@inproceedings{Stead2012,\n abstract = {We describe a system that allows\ - \ non-programmers to specify the grammar for a novel graphic score notation of\ - \ their own design, defining performance notations suitable for drawing in live\ - \ situations on a surface such as a whiteboard. Thescore can be interpreted via\ - \ the camera of a smartphone,interactively scanned over the whiteboard to control\ - \ the parameters of synthesisers implemented in Overtone. The visual grammar of\ - \ the score, and its correspondence to the sound parameters, can be defined by\ - \ the user with a simple visual condition-action language. This language can be\ - \ edited on the touchscreen of an Android phone, allowing the grammar to be modified\ - \ live in performance situations.Interactive scanning of the score is visible\ - \ to the audience asa performance interface, with a colour classifier and visual\ - \ feature recogniser causing the grammar-specified events to be sent using OSC\ - \ messages via Wi-Fi from the hand-held smartphone to an audio workstation.},\n\ - \ address = {Ann Arbor, Michigan},\n author = {Alistair G. Stead and Alan F. Blackwell\ - \ and Samual Aaron},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178423},\n\ - \ issn = {2220-4806},\n keywords = {Graphic Notation, Disposable Notation, Live\ - \ Coding, Com-puter Vision, Mobile Music},\n publisher = {University of Michigan},\n\ - \ title = {Graphic Score Grammars for End-Users},\n url = {http://www.nime.org/proceedings/2012/nime2012_77.pdf},\n\ - \ year = {2012}\n}\n" + ID: bbortz2015 + abstract: 'Our experiment, Emotion in Motion, has amassed the world''s largest database + of human physiology associated with emotion in response to the presentation of + various selections of musical works. What began as a doctoral research study has + grown to include the emotional responses to musical experience from over ten thousand + participants across the world, from installations in Dublin, New York City, Norway, + Singapore, the Philippines, and Taiwan. The most recent iteration of is currently + underway in Taipei City, Taiwan. Preparation for this installation provided an + opportunity to reimagine the architecture of , allowing for a wider range of potential + applications than were originally possible with the original tools that drove + the experiment. Now more than an experiment, is a framework for developing myriad + emotional/musical/biomusical interactions with multiple co-located or remote participants. + This paper describes the development of this open-source framework and includes + discussion of its various components: hardware agnostic sensor inputs, refined + physiological signal processing tools, and a public database of data collected + during various instantiations of applications built on the framework. We also + discuss our ongoing work with this tool, and provide the reader with other potential + applications that they might realize in using .' + address: 'Baton Rouge, Louisiana, USA' + author: Brennon Bortz and Javier Jaimovich and R. Benjamin Knapp + bibtex: "@inproceedings{bbortz2015,\n abstract = {Our experiment, Emotion in Motion,\ + \ has amassed the world's largest database of human physiology associated with\ + \ emotion in response to the presentation of various selections of musical works.\ + \ What began as a doctoral research study has grown to include the emotional responses\ + \ to musical experience from over ten thousand participants across the world,\ + \ from installations in Dublin, New York City, Norway, Singapore, the Philippines,\ + \ and Taiwan. The most recent iteration of is currently underway in Taipei City,\ + \ Taiwan. Preparation for this installation provided an opportunity to reimagine\ + \ the architecture of , allowing for a wider range of potential applications than\ + \ were originally possible with the original tools that drove the experiment.\ + \ Now more than an experiment, is a framework for developing myriad emotional/musical/biomusical\ + \ interactions with multiple co-located or remote participants. This paper describes\ + \ the development of this open-source framework and includes discussion of its\ + \ various components: hardware agnostic sensor inputs, refined physiological signal\ + \ processing tools, and a public database of data collected during various instantiations\ + \ of applications built on the framework. We also discuss our ongoing work with\ + \ this tool, and provide the reader with other potential applications that they\ + \ might realize in using .},\n address = {Baton Rouge, Louisiana, USA},\n author\ + \ = {Brennon Bortz and Javier Jaimovich and {R. Benjamin} Knapp},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1179034},\n editor = {Edgar Berdahl and\ + \ Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {44--49},\n\ + \ publisher = {Louisiana State University},\n title = {Emotion in Motion: A Reimagined\ + \ Framework for Biomusical/Emotional Interaction},\n url = {http://www.nime.org/proceedings/2015/nime2015_291.pdf},\n\ + \ year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178423 + doi: 10.5281/zenodo.1179034 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Graphic Notation, Disposable Notation, Live Coding, Com-puter Vision, - Mobile Music' - publisher: University of Michigan - title: Graphic Score Grammars for End-Users - url: http://www.nime.org/proceedings/2012/nime2012_77.pdf - year: 2012 + month: May + pages: 44--49 + publisher: Louisiana State University + title: 'Emotion in Motion: A Reimagined Framework for Biomusical/Emotional Interaction' + url: http://www.nime.org/proceedings/2015/nime2015_291.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Jackson2012 - abstract: 'This paper describes the bubble drum set, along with several polyrhythm - games and interactive music activities that have been developed to show its potential - for use as an input controller. The bubble drum set combines various sizes of - colorful exercise balls, held in place or suspended with conventional drum hardware - and thus creating a trap kit configuration in which the spherical surfaces can - be struck and stroked from varying angles using sticks, brushes, or even by hands - alone. The acoustic properties of these fitness balls are surprisingly rich, capable - of producing subtle differences in timbre while being responsive over a wide dynamic - range. The entire set has been purposefully designed to provide a player with - the means to achieve a rigorous and healthy physical workout, in addition to the - achieving beneficial cognitive and sensory stimulation that comes from playing - music with a sensitive and expressive instrument.' - address: 'Ann Arbor, Michigan' - author: Jay Alan Jackson - bibtex: "@inproceedings{Jackson2012,\n abstract = {This paper describes the bubble\ - \ drum set, along with several polyrhythm games and interactive music activities\ - \ that have been developed to show its potential for use as an input controller.\ - \ The bubble drum set combines various sizes of colorful exercise balls, held\ - \ in place or suspended with conventional drum hardware and thus creating a trap\ - \ kit configuration in which the spherical surfaces can be struck and stroked\ - \ from varying angles using sticks, brushes, or even by hands alone. The acoustic\ - \ properties of these fitness balls are surprisingly rich, capable of producing\ - \ subtle differences in timbre while being responsive over a wide dynamic range.\ - \ The entire set has been purposefully designed to provide a player with the means\ - \ to achieve a rigorous and healthy physical workout, in addition to the achieving\ - \ beneficial cognitive and sensory stimulation that comes from playing music with\ - \ a sensitive and expressive instrument.},\n address = {Ann Arbor, Michigan},\n\ - \ author = {Jay Alan Jackson},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178293},\n\ - \ issn = {2220-4806},\n keywords = {Bubble Drums, WaveMachine Lab’s Drumagog,\ - \ Polyrhythms.},\n publisher = {University of Michigan},\n title = {Bubble Drum-agog-ing:\ - \ Polyrhythm Games \\& Other Inter Activities},\n url = {http://www.nime.org/proceedings/2012/nime2012_8.pdf},\n\ - \ year = {2012}\n}\n" + ID: hlin2015 + abstract: 'Pedagogical research demonstrates theories and practices of perception + or production of melodic or harmonic ``intonation'''', i.e. the realization of + pitch accuracy. There are software and hardware to help students improve intonation. + Those tools have various functions. Nevertheless, they still miss something which + could benefit users very much. Even worse, they are not easy to be revised. Most + importantly, there should be more amusing and engaging interaction between a tuning + trainer and a user which is able to exchange roles of tuner and player. In this + research, we implement an open-source program named ``Harmonic Intonation Trainer'''' + in Pure Data. It includes most of essential elements of a smart tuner. A user + can tune his pitch while optionally hearing (through earphones) the target pitch + and other harmonic intervals in respective octaves. Moreover, in its interactive + accompanist mode, a user''s input pitch serves as the reference frequency; the + program follows his intonation to generate corresponding harmonic intervals. Additionally, + user can straightforwardly edit all parameters and patches by Pure Data. Any adoption + or revision is absolutely welcome. Finally, we will initiate another research + to test and to inspect experimental results from student orchestras so that its + future version is expected to be more sophisticated.' + address: 'Baton Rouge, Louisiana, USA' + author: Hsin-Ming Lin and Chin-Ming Lin + bibtex: "@inproceedings{hlin2015,\n abstract = {Pedagogical research demonstrates\ + \ theories and practices of perception or production of melodic or harmonic ``intonation'',\ + \ i.e. the realization of pitch accuracy. There are software and hardware to help\ + \ students improve intonation. Those tools have various functions. Nevertheless,\ + \ they still miss something which could benefit users very much. Even worse, they\ + \ are not easy to be revised. Most importantly, there should be more amusing and\ + \ engaging interaction between a tuning trainer and a user which is able to exchange\ + \ roles of tuner and player. In this research, we implement an open-source program\ + \ named ``Harmonic Intonation Trainer'' in Pure Data. It includes most of essential\ + \ elements of a smart tuner. A user can tune his pitch while optionally hearing\ + \ (through earphones) the target pitch and other harmonic intervals in respective\ + \ octaves. Moreover, in its interactive accompanist mode, a user's input pitch\ + \ serves as the reference frequency; the program follows his intonation to generate\ + \ corresponding harmonic intervals. Additionally, user can straightforwardly edit\ + \ all parameters and patches by Pure Data. Any adoption or revision is absolutely\ + \ welcome. Finally, we will initiate another research to test and to inspect experimental\ + \ results from student orchestras so that its future version is expected to be\ + \ more sophisticated.},\n address = {Baton Rouge, Louisiana, USA},\n author =\ + \ {Hsin-Ming Lin and Chin-Ming Lin},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179118},\n\ + \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ + \ {May},\n pages = {38--39},\n publisher = {Louisiana State University},\n title\ + \ = {Harmonic Intonation Trainer: An Open Implementation in Pure Data},\n url\ + \ = {http://www.nime.org/proceedings/2015/nime2015_300.pdf},\n urlsuppl1 = {http://www.nime.org/proceedings/2015/300/0300-file1.mp4},\n\ + \ year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178293 + doi: 10.5281/zenodo.1179118 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Bubble Drums, WaveMachine Lab’s Drumagog, Polyrhythms.' - publisher: University of Michigan - title: 'Bubble Drum-agog-ing: Polyrhythm Games & Other Inter Activities' - url: http://www.nime.org/proceedings/2012/nime2012_8.pdf - year: 2012 + month: May + pages: 38--39 + publisher: Louisiana State University + title: 'Harmonic Intonation Trainer: An Open Implementation in Pure Data' + url: http://www.nime.org/proceedings/2015/nime2015_300.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/300/0300-file1.mp4 + year: 2015 - ENTRYTYPE: inproceedings - ID: Hochenbaum2012 - abstract: 'In this paper we present a multimodal system for analyzing drum performance. - In the first example we perform automatic drum hand recognition utilizing a technique - for automatic labeling of training data using direct sensors, and only indirect - sensors (e.g. a microphone) for testing. Left/Right drum hand recognition is achieved - with an average accuracy of 84.95% for two performers. Secondly we provide a study - investigating multimodality dependent performance metrics analysis.' - address: 'Ann Arbor, Michigan' - author: Jordan Hochenbaum and Ajay Kapur - bibtex: "@inproceedings{Hochenbaum2012,\n abstract = {In this paper we present a\ - \ multimodal system for analyzing drum performance. In the first example we perform\ - \ automatic drum hand recognition utilizing a technique for automatic labeling\ - \ of training data using direct sensors, and only indirect sensors (e.g. a microphone)\ - \ for testing. Left/Right drum hand recognition is achieved with an average accuracy\ - \ of 84.95% for two performers. Secondly we provide a study investigating multimodality\ - \ dependent performance metrics analysis.},\n address = {Ann Arbor, Michigan},\n\ - \ author = {Jordan Hochenbaum and Ajay Kapur},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1178287},\n issn = {2220-4806},\n keywords = {Multimodality,\ - \ Drum stroke identification, surrogate sensors, surrogate data training, machine\ - \ learning, music information retrieval, performance metrics},\n publisher = {University\ - \ of Michigan},\n title = {Drum Stroke Computing: Multimodal Signal Processing\ - \ for Drum Stroke Identification and Performance Metrics},\n url = {http://www.nime.org/proceedings/2012/nime2012_82.pdf},\n\ - \ year = {2012}\n}\n" + ID: jbarbosa2015 + abstract: 'Evaluation has been suggested to be one of the main trends in current + NIME research. However, the meaning of the term for the community may not be as + clear as it seems. In order to explore this issue, we have analyzed all papers + and posters published in the proceedings of the NIME conference from 2012 to 2014. + For each publication that explicitly mentioned the term evaluation, we looked + for: a) What targets and stakeholders were considered? b) What goals were set? + c) What criteria were used? d) What methods were used? e) How long did the evaluation + last? Results show different understandings of evaluation, with little consistency + regarding the usage of the word. Surprisingly in some cases, not even basic information + such as goal, criteria and methods were provided. In this paper, we attempt to + provide an idea of what evaluation means for the NIME community, pushing the discussion + towards how could we make a better use of evaluation on NIME design and what criteria + should be used regarding each goal.' + address: 'Baton Rouge, Louisiana, USA' + author: Jeronimo Barbosa and Joseph Malloch and Marcelo Wanderley and Stéphane Huot + bibtex: "@inproceedings{jbarbosa2015,\n abstract = {Evaluation has been suggested\ + \ to be one of the main trends in current NIME research. However, the meaning\ + \ of the term for the community may not be as clear as it seems. In order to explore\ + \ this issue, we have analyzed all papers and posters published in the proceedings\ + \ of the NIME conference from 2012 to 2014. For each publication that explicitly\ + \ mentioned the term evaluation, we looked for: a) What targets and stakeholders\ + \ were considered? b) What goals were set? c) What criteria were used? d) What\ + \ methods were used? e) How long did the evaluation last? Results show different\ + \ understandings of evaluation, with little consistency regarding the usage of\ + \ the word. Surprisingly in some cases, not even basic information such as goal,\ + \ criteria and methods were provided. In this paper, we attempt to provide an\ + \ idea of what evaluation means for the NIME community, pushing the discussion\ + \ towards how could we make a better use of evaluation on NIME design and what\ + \ criteria should be used regarding each goal.},\n address = {Baton Rouge, Louisiana,\ + \ USA},\n author = {Jeronimo Barbosa and Joseph Malloch and Marcelo Wanderley\ + \ and St\\'ephane Huot},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179010},\n\ + \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ + \ {May},\n pages = {156--161},\n publisher = {Louisiana State University},\n title\ + \ = {What does 'Evaluation' mean for the NIME community?},\n url = {http://www.nime.org/proceedings/2015/nime2015_301.pdf},\n\ + \ urlsuppl1 = {http://www.nime.org/proceedings/2015/301/0301-file1.xlsx},\n year\ + \ = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178287 + doi: 10.5281/zenodo.1179010 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Multimodality, Drum stroke identification, surrogate sensors, surrogate - data training, machine learning, music information retrieval, performance metrics' - publisher: University of Michigan - title: 'Drum Stroke Computing: Multimodal Signal Processing for Drum Stroke Identification - and Performance Metrics' - url: http://www.nime.org/proceedings/2012/nime2012_82.pdf - year: 2012 + month: May + pages: 156--161 + publisher: Louisiana State University + title: 'What does ''Evaluation'' mean for the NIME community?' + url: http://www.nime.org/proceedings/2015/nime2015_301.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/301/0301-file1.xlsx + year: 2015 - ENTRYTYPE: inproceedings - ID: Levy2012 - abstract: 'OMax is an improvisation software based on a graph representation encoding - the pattern repetitions and structures of a sequence, built incrementally and - in real-time from a live Midi or Audio source. We present in this paper a totally - rewritten version of the software. The new design leads to refine the spectral - listening of OMax and to consider different methods to build the symbolic alphabet - labeling our symbolic units. The very modular and versatile architecture makes - possible new musical configurations and we tried the software with different styles - and musical situations. A novel visualization is proposed, which displays the - current state of the learnt knowledge and allows to notice, both on the fly and - a posteriori, points of musical interest and higher level structures.' - address: 'Ann Arbor, Michigan' - author: Benjamin Levy and Georges Bloch and Gerard Assayag - bibtex: "@inproceedings{Levy2012,\n abstract = {OMax is an improvisation software\ - \ based on a graph representation encoding the pattern repetitions and structures\ - \ of a sequence, built incrementally and in real-time from a live Midi or Audio\ - \ source. We present in this paper a totally rewritten version of the software.\ - \ The new design leads to refine the spectral listening of OMax and to consider\ - \ different methods to build the symbolic alphabet labeling our symbolic units.\ - \ The very modular and versatile architecture makes possible new musical configurations\ - \ and we tried the software with different styles and musical situations. A novel\ - \ visualization is proposed, which displays the current state of the learnt knowledge\ - \ and allows to notice, both on the fly and a posteriori, points of musical interest\ - \ and higher level structures.},\n address = {Ann Arbor, Michigan},\n author =\ - \ {Benjamin Levy and Georges Bloch and Gerard Assayag},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178327},\n issn = {2220-4806},\n keywords = {OMax, Improvisation,\ - \ Machine Learning, Machine Listen-ing, Visualization, Sequence Model, Software\ - \ Architecture},\n publisher = {University of Michigan},\n title = {OMaxist Dialectics:\ - \ Capturing, Visualizing and Expanding Improvisations},\n url = {http://www.nime.org/proceedings/2012/nime2012_87.pdf},\n\ - \ year = {2012}\n}\n" + ID: ihattwick2015 + abstract: 'The Pearl is a multi-modal computer interface initially conceived as + an interactive prop for a multi-artistic theatrical performance. It is a spherical + hand-held wireless controller embedded with various sensor technologies and interactive + lighting. The lighting was a key conceptual component in the instrument''s creation + both as a theatrical prop and also as an interface for musical performance as + it helps to address conceptual challenges and opportunities posed by the instrument''s + spherical form. This paper begins by providing a brief description of the Pearl + and its use as a spherical instrument. We then discuss mapping the Pearl both + to generate sound and control its interactive lighting, and identify different + strategies for its use. Strategies we identify include feedback regarding performer + gesture, information about the state of the instrument, and use as an aesthetic + performance component. ' + address: 'Baton Rouge, Louisiana, USA' + author: Ian Hattwick and Marcelo Wanderley + bibtex: "@inproceedings{ihattwick2015,\n abstract = {The Pearl is a multi-modal\ + \ computer interface initially conceived as an interactive prop for a multi-artistic\ + \ theatrical performance. It is a spherical hand-held wireless controller embedded\ + \ with various sensor technologies and interactive lighting. The lighting was\ + \ a key conceptual component in the instrument's creation both as a theatrical\ + \ prop and also as an interface for musical performance as it helps to address\ + \ conceptual challenges and opportunities posed by the instrument's spherical\ + \ form. This paper begins by providing a brief description of the Pearl and its\ + \ use as a spherical instrument. We then discuss mapping the Pearl both to generate\ + \ sound and control its interactive lighting, and identify different strategies\ + \ for its use. Strategies we identify include feedback regarding performer gesture,\ + \ information about the state of the instrument, and use as an aesthetic performance\ + \ component. },\n address = {Baton Rouge, Louisiana, USA},\n author = {Ian Hattwick\ + \ and Marcelo Wanderley},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179080},\n\ + \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ + \ {May},\n pages = {201--204},\n publisher = {Louisiana State University},\n title\ + \ = {Interactive Lighting in the Pearl: Considerations and Implementation},\n\ + \ url = {http://www.nime.org/proceedings/2015/nime2015_302.pdf},\n year = {2015}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178327 + doi: 10.5281/zenodo.1179080 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'OMax, Improvisation, Machine Learning, Machine Listen-ing, Visualization, - Sequence Model, Software Architecture' - publisher: University of Michigan - title: 'OMaxist Dialectics: Capturing, Visualizing and Expanding Improvisations' - url: http://www.nime.org/proceedings/2012/nime2012_87.pdf - year: 2012 + month: May + pages: 201--204 + publisher: Louisiana State University + title: 'Interactive Lighting in the Pearl: Considerations and Implementation' + url: http://www.nime.org/proceedings/2015/nime2015_302.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: ElShimy2012 - abstract: 'In this paper, we discuss the design and testing of a reactive environment - for musical performance. Driven by the interpersonal interactions amongst musicians, - our system gives users, i.e., several musicians playing together in a band, real-time - control over certain aspects of their performance, enabling them to change volume - levels dynamically simply by moving around. It differs most notably from the majority - of ventures into the design of novel musical interfaces and installations in its - multidisciplinary approach, drawing on techniques from Human-Computer Interaction, - social sciences and ludology. Our User-Centered Design methodology was central - to producing an interactive environment that enhances traditional performance - with novel functionalities. During a formal experiment, musicians reported finding - our system exciting and enjoyable. We also introduce some additional interactions - that can further enhance the interactivity of our reactive environment. In describing - the particular challenges of working with such a unique and creative user as the - musician, we hope that our approach can be of guidance to interface developers - working on applications of a creative nature.' - address: 'Ann Arbor, Michigan' - author: Dalia El-Shimy and Thomas Hermann and Jeremy Cooperstock - bibtex: "@inproceedings{ElShimy2012,\n abstract = {In this paper, we discuss the\ - \ design and testing of a reactive environment for musical performance. Driven\ - \ by the interpersonal interactions amongst musicians, our system gives users,\ - \ i.e., several musicians playing together in a band, real-time control over certain\ - \ aspects of their performance, enabling them to change volume levels dynamically\ - \ simply by moving around. It differs most notably from the majority of ventures\ - \ into the design of novel musical interfaces and installations in its multidisciplinary\ - \ approach, drawing on techniques from Human-Computer Interaction, social sciences\ - \ and ludology. Our User-Centered Design methodology was central to producing\ - \ an interactive environment that enhances traditional performance with novel\ - \ functionalities. During a formal experiment, musicians reported finding our\ - \ system exciting and enjoyable. We also introduce some additional interactions\ - \ that can further enhance the interactivity of our reactive environment. In describing\ - \ the particular challenges of working with such a unique and creative user as\ - \ the musician, we hope that our approach can be of guidance to interface developers\ - \ working on applications of a creative nature.},\n address = {Ann Arbor, Michigan},\n\ - \ author = {Dalia El-Shimy and Thomas Hermann and Jeremy Cooperstock},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1178247},\n issn = {2220-4806},\n publisher\ - \ = {University of Michigan},\n title = {A Reactive Environment for Dynamic Volume\ - \ Control},\n url = {http://www.nime.org/proceedings/2012/nime2012_88.pdf},\n\ - \ year = {2012}\n}\n" + ID: rgraham2015 + abstract: 'This paper presents the ideas and mapping strategies behind a performance + system that uses a combination of motion tracking and feature extraction tools + to manage complex multichannel audio materials for real-time music composition. + The use of embodied metaphors within these mappings is seen as a means of managing + the complexity of a musical performance across multiple modalities. In particular, + we will investigate how these mapping strategies may facilitate the creation of + performance systems whose accessibility and richness are enhanced by common integrating + bases. A key focus for this work is the investigation of the embodied image schema + theories of Lakoff and Johnson alongside similarly embodied metaphorical models + within Smalley''s influential theory of electroacoustic music (spectromorphology). + These metaphors will be investigated for their use as grounding structural components + and dynamics for creative practices and musical interaction design. We argue that + pairing metaphorical models of forces with environmental forms may have particular + significance for the design of complex mappings for digital music performance.' + address: 'Baton Rouge, Louisiana, USA' + author: Richard Graham and Brian Bridges + bibtex: "@inproceedings{rgraham2015,\n abstract = {This paper presents the ideas\ + \ and mapping strategies behind a performance system that uses a combination of\ + \ motion tracking and feature extraction tools to manage complex multichannel\ + \ audio materials for real-time music composition. The use of embodied metaphors\ + \ within these mappings is seen as a means of managing the complexity of a musical\ + \ performance across multiple modalities. In particular, we will investigate how\ + \ these mapping strategies may facilitate the creation of performance systems\ + \ whose accessibility and richness are enhanced by common integrating bases. A\ + \ key focus for this work is the investigation of the embodied image schema theories\ + \ of Lakoff and Johnson alongside similarly embodied metaphorical models within\ + \ Smalley's influential theory of electroacoustic music (spectromorphology). These\ + \ metaphors will be investigated for their use as grounding structural components\ + \ and dynamics for creative practices and musical interaction design. We argue\ + \ that pairing metaphorical models of forces with environmental forms may have\ + \ particular significance for the design of complex mappings for digital music\ + \ performance.},\n address = {Baton Rouge, Louisiana, USA},\n author = {Richard\ + \ Graham and Brian Bridges},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179066},\n\ + \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ + \ {May},\n pages = {103--106},\n publisher = {Louisiana State University},\n title\ + \ = {Managing Musical Complexity with Embodied Metaphors},\n url = {http://www.nime.org/proceedings/2015/nime2015_303.pdf},\n\ + \ urlsuppl1 = {http://www.nime.org/proceedings/2015/303/0303-file1.mov},\n urlsuppl2\ + \ = {http://www.nime.org/proceedings/2015/303/0303-file2.wav},\n year = {2015}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178247 + doi: 10.5281/zenodo.1179066 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - publisher: University of Michigan - title: A Reactive Environment for Dynamic Volume Control - url: http://www.nime.org/proceedings/2012/nime2012_88.pdf - year: 2012 + month: May + pages: 103--106 + publisher: Louisiana State University + title: Managing Musical Complexity with Embodied Metaphors + url: http://www.nime.org/proceedings/2015/nime2015_303.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/303/0303-file1.mov + urlsuppl2: http://www.nime.org/proceedings/2015/303/0303-file2.wav + year: 2015 - ENTRYTYPE: inproceedings - ID: Surges2012 - abstract: 'This paper describes three hardware devices for integrating modular synthesizers - with computers, each with a different approach to the relationship between hardware - and software. The devices discussed are the USB-Octomod, an 8-channel OSC-compatible - computer-controlled control-voltage generator, the tabulaRasa, a hardware table-lookup - oscillator synthesis module with corresponding waveform design software, and the - pucktronix.snake.corral, a dual 8x8 computer-controlled analog signal routing - matrix. The devices make use of open-source hardware and software, and are designed - around affordable micro-controllers and integrated circuits. ' - address: 'Ann Arbor, Michigan' - author: Greg Surges - bibtex: "@inproceedings{Surges2012,\n abstract = {This paper describes three hardware\ - \ devices for integrating modular synthesizers with computers, each with a different\ - \ approach to the relationship between hardware and software. The devices discussed\ - \ are the USB-Octomod, an 8-channel OSC-compatible computer-controlled control-voltage\ - \ generator, the tabulaRasa, a hardware table-lookup oscillator synthesis module\ - \ with corresponding waveform design software, and the pucktronix.snake.corral,\ - \ a dual 8x8 computer-controlled analog signal routing matrix. The devices make\ - \ use of open-source hardware and software, and are designed around affordable\ - \ micro-controllers and integrated circuits. },\n address = {Ann Arbor, Michigan},\n\ - \ author = {Greg Surges},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178427},\n\ - \ issn = {2220-4806},\n keywords = {modular synthesis, interface, diy, open-source},\n\ - \ publisher = {University of Michigan},\n title = {DIY Hybrid Analog/Digital Modular\ - \ Synthesis},\n url = {http://www.nime.org/proceedings/2012/nime2012_9.pdf},\n\ - \ year = {2012}\n}\n" + ID: apon2015 + abstract: 'This paper describes the motivation and process of developing a musical + instrument for an unborn child. Well established research shows a fetus in the + womb can respond to and benefit from stimuli from the outside world. A musical + instrument designed for this unique context can leverage the power of this interaction. + Two prototypes were constructed and tested during separate pregnancies and the + experiences are presented, and the limitation of the sensor technology identified. + We discuss our discoveries about design considerations and challenges for such + an instrument, and project thought-provoking questions that arise from its potential + applications.' + address: 'Baton Rouge, Louisiana, USA' + author: Aura Pon and Johnty Wang and Laurie Radford and Sheelagh Carpendale + bibtex: "@inproceedings{apon2015,\n abstract = {This paper describes the motivation\ + \ and process of developing a musical instrument for an unborn child. Well established\ + \ research shows a fetus in the womb can respond to and benefit from stimuli from\ + \ the outside world. A musical instrument designed for this unique context can\ + \ leverage the power of this interaction. Two prototypes were constructed and\ + \ tested during separate pregnancies and the experiences are presented, and the\ + \ limitation of the sensor technology identified. We discuss our discoveries about\ + \ design considerations and challenges for such an instrument, and project thought-provoking\ + \ questions that arise from its potential applications.},\n address = {Baton Rouge,\ + \ Louisiana, USA},\n author = {Aura Pon and Johnty Wang and Laurie Radford and\ + \ Sheelagh Carpendale},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179156},\n\ + \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ + \ {May},\n pages = {87--90},\n publisher = {Louisiana State University},\n title\ + \ = {Womba: A Musical Instrument for an Unborn Child},\n url = {http://www.nime.org/proceedings/2015/nime2015_304.pdf},\n\ + \ urlsuppl1 = {http://www.nime.org/proceedings/2015/304/0304-file1.mp4},\n year\ + \ = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178427 + doi: 10.5281/zenodo.1179156 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'modular synthesis, interface, diy, open-source' - publisher: University of Michigan - title: DIY Hybrid Analog/Digital Modular Synthesis - url: http://www.nime.org/proceedings/2012/nime2012_9.pdf - year: 2012 + month: May + pages: 87--90 + publisher: Louisiana State University + title: 'Womba: A Musical Instrument for an Unborn Child' + url: http://www.nime.org/proceedings/2015/nime2015_304.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/304/0304-file1.mp4 + year: 2015 - ENTRYTYPE: inproceedings - ID: Johnston2012 - abstract: 'Mapping between musical interfaces, and sound engines, is integral to - the nature of an interface [3]. Traditionally, musical applications for touch - surfaces have directly mapped touch coordinates to control parameters. However, - recent work [9] is looking at new methods of control that use relational multi-point - analysis. Instead of directly using touch coordinates, which are related to a - global screen space, an initial touch is used as an `anchor'' to create a local - coordinate space in which subsequent touches can be located and compared. This - local coordinate space frees touches from being locked to one single relationship, - and allows for more complex interaction between touch events. So far, this method - has only been implemented on Apple computer''s small capacitive touch pads. Additionally, - there has yet to be a user study that directly compares [9] against mappings of - touch events within global coordinate spaces. With this in mind, we have developed - and evaluated two interfaces with the aim of determining and quantifying some - of these differences within the context of our custom large multi-touch surfaces - [1].' - address: 'Ann Arbor, Michigan' - author: Blake Johnston and Owen Vallis and Ajay Kapur - bibtex: "@inproceedings{Johnston2012,\n abstract = {Mapping between musical interfaces,\ - \ and sound engines, is integral to the nature of an interface [3]. Traditionally,\ - \ musical applications for touch surfaces have directly mapped touch coordinates\ - \ to control parameters. However, recent work [9] is looking at new methods of\ - \ control that use relational multi-point analysis. Instead of directly using\ - \ touch coordinates, which are related to a global screen space, an initial touch\ - \ is used as an `anchor' to create a local coordinate space in which subsequent\ - \ touches can be located and compared. This local coordinate space frees touches\ - \ from being locked to one single relationship, and allows for more complex interaction\ - \ between touch events. So far, this method has only been implemented on Apple\ - \ computer's small capacitive touch pads. Additionally, there has yet to be a\ - \ user study that directly compares [9] against mappings of touch events within\ - \ global coordinate spaces. With this in mind, we have developed and evaluated\ - \ two interfaces with the aim of determining and quantifying some of these differences\ - \ within the context of our custom large multi-touch surfaces [1].},\n address\ - \ = {Ann Arbor, Michigan},\n author = {Blake Johnston and Owen Vallis and Ajay\ - \ Kapur},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1178297},\n issn = {2220-4806},\n\ - \ keywords = {Multi-Touch, User Study, Relational-point interface},\n publisher\ - \ = {University of Michigan},\n title = {A Comparative User Study of Two Methods\ - \ of Control on a Multi-Touch Surface for Musical Expression},\n url = {http://www.nime.org/proceedings/2012/nime2012_94.pdf},\n\ - \ year = {2012}\n}\n" + ID: amarquezborbon2015 + abstract: 'This paper examines the notion of community as commonly employed within + NIME discourses. Our aim is to clarify and define the term through the community + of practice framework. We argue that through its formal use and application, the + notion of community becomes a significant space for the examination of emergent + musical practices that could otherwise be overlooked. This paper defines community + of practice, as originally developed in the social sciences by Lave and Wegener, + and applies it within the NIME context through the examination of existing communities + of practice such as the laptop performance community, laptop orchestras, as well + as the Satellite CCRMA and Patchblocks communities. ' + address: 'Baton Rouge, Louisiana, USA' + author: Adnan Marquez-Borbon and Paul Stapleton + bibtex: "@inproceedings{amarquezborbon2015,\n abstract = {This paper examines the\ + \ notion of community as commonly employed within NIME discourses. Our aim is\ + \ to clarify and define the term through the community of practice framework.\ + \ We argue that through its formal use and application, the notion of community\ + \ becomes a significant space for the examination of emergent musical practices\ + \ that could otherwise be overlooked. This paper defines community of practice,\ + \ as originally developed in the social sciences by Lave and Wegener, and applies\ + \ it within the NIME context through the examination of existing communities of\ + \ practice such as the laptop performance community, laptop orchestras, as well\ + \ as the Satellite CCRMA and Patchblocks communities. },\n address = {Baton Rouge,\ + \ Louisiana, USA},\n author = {Adnan Marquez-Borbon and Paul Stapleton},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1179128},\n editor = {Edgar Berdahl and\ + \ Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {307--312},\n\ + \ publisher = {Louisiana State University},\n title = {Fourteen Years of NIME:\ + \ The Value and Meaning of `Community' in Interactive Music Research},\n url =\ + \ {http://www.nime.org/proceedings/2015/nime2015_308.pdf},\n year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178297 + doi: 10.5281/zenodo.1179128 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Multi-Touch, User Study, Relational-point interface' - publisher: University of Michigan - title: A Comparative User Study of Two Methods of Control on a Multi-Touch Surface - for Musical Expression - url: http://www.nime.org/proceedings/2012/nime2012_94.pdf - year: 2012 + month: May + pages: 307--312 + publisher: Louisiana State University + title: 'Fourteen Years of NIME: The Value and Meaning of `Community'' in Interactive + Music Research' + url: http://www.nime.org/proceedings/2015/nime2015_308.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Levinson2012 - abstract: 'TedStick is a new wireless musical instrument that processes acoustic - sounds resonating within its wooden body and ma-nipulates them via gestural movements. - The sounds are transduced by a piezoelectric sensor inside the wooden body, so - any tactile contact with TedStick is transmitted as audio and further processed - by a computer. The main method for performing with TedStick focuses on extracting - diverse sounds from within the resonant properties of TedStick it-self. This is - done by holding TedStick in one hand and a standard drumstick in the opposite - hand while tapping, rubbing, or scraping the two against each other. Gestural - movements of TedStick are then mapped to parameters for several sound effects - including pitch shift, delay, reverb and low/high pass filters. Using this technique - the hand holding the drumstick can control the acoustic sounds/interaction between - the sticks while the hand holding TedStick can fo-cus purely on controlling the - sound manipulation and effects parameters.' - address: 'Ann Arbor, Michigan' - author: Cory Levinson - bibtex: "@inproceedings{Levinson2012,\n abstract = {TedStick is a new wireless musical\ - \ instrument that processes acoustic sounds resonating within its wooden body\ - \ and ma-nipulates them via gestural movements. The sounds are transduced by a\ - \ piezoelectric sensor inside the wooden body, so any tactile contact with TedStick\ - \ is transmitted as audio and further processed by a computer. The main method\ - \ for performing with TedStick focuses on extracting diverse sounds from within\ - \ the resonant properties of TedStick it-self. This is done by holding TedStick\ - \ in one hand and a standard drumstick in the opposite hand while tapping, rubbing,\ - \ or scraping the two against each other. Gestural movements of TedStick are then\ - \ mapped to parameters for several sound effects including pitch shift, delay,\ - \ reverb and low/high pass filters. Using this technique the hand holding the\ - \ drumstick can control the acoustic sounds/interaction between the sticks while\ - \ the hand holding TedStick can fo-cus purely on controlling the sound manipulation\ - \ and effects parameters.},\n address = {Ann Arbor, Michigan},\n author = {Cory\ - \ Levinson},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178325},\n issn\ - \ = {2220-4806},\n keywords = {tangible user interface, piezoelectric sensors,\ - \ gestural per-formance, digital sound manipulation},\n publisher = {University\ - \ of Michigan},\n title = {TedStick: A Tangible Electrophonic Drumstick},\n url\ - \ = {http://www.nime.org/proceedings/2012/nime2012_96.pdf},\n year = {2012}\n\ - }\n" + ID: croberts2015 + abstract: 'We describe research extending the interactive affordances of textual + code fragments in creative coding environments. In particular we examine the potential + of source code both to display the state of running processes and also to alter + state using means other than traditional text editing. In contrast to previous + research that has focused on the inclusion of additional interactive widgets inside + or alongside text editors, our research adds a parsing stage to the runtime evaluation + of code fragments and imparts additional interactive capabilities on the source + code itself. After implementing various techniques in the creative coding environment + Gibber, we evaluate our research through a survey on the various methods of visual + feedback provided by our research. In addition to results quantifying preferences + for certain techniques over others, we found near unanimous support among survey + respondents for including similar techniques in other live coding environments.' + address: 'Baton Rouge, Louisiana, USA' + author: Charles Roberts and Matthew Wright and JoAnn Kuchera-Morin + bibtex: "@inproceedings{croberts2015,\n abstract = {We describe research extending\ + \ the interactive affordances of textual code fragments in creative coding environments.\ + \ In particular we examine the potential of source code both to display the state\ + \ of running processes and also to alter state using means other than traditional\ + \ text editing. In contrast to previous research that has focused on the inclusion\ + \ of additional interactive widgets inside or alongside text editors, our research\ + \ adds a parsing stage to the runtime evaluation of code fragments and imparts\ + \ additional interactive capabilities on the source code itself. After implementing\ + \ various techniques in the creative coding environment Gibber, we evaluate our\ + \ research through a survey on the various methods of visual feedback provided\ + \ by our research. In addition to results quantifying preferences for certain\ + \ techniques over others, we found near unanimous support among survey respondents\ + \ for including similar techniques in other live coding environments.},\n address\ + \ = {Baton Rouge, Louisiana, USA},\n author = {Charles Roberts and Matthew Wright\ + \ and JoAnn Kuchera-Morin},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179164},\n\ + \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ + \ {May},\n pages = {126--131},\n publisher = {Louisiana State University},\n title\ + \ = {Beyond Editing: Extended Interaction with Textual Code Fragments},\n url\ + \ = {http://www.nime.org/proceedings/2015/nime2015_310.pdf},\n urlsuppl1 = {http://www.nime.org/proceedings/2015/310/0310-file1.mov},\n\ + \ year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178325 + doi: 10.5281/zenodo.1179164 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'tangible user interface, piezoelectric sensors, gestural per-formance, - digital sound manipulation' - publisher: University of Michigan - title: 'TedStick: A Tangible Electrophonic Drumstick' - url: http://www.nime.org/proceedings/2012/nime2012_96.pdf - year: 2012 + month: May + pages: 126--131 + publisher: Louisiana State University + title: 'Beyond Editing: Extended Interaction with Textual Code Fragments' + url: http://www.nime.org/proceedings/2015/nime2015_310.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/310/0310-file1.mov + year: 2015 - ENTRYTYPE: inproceedings - ID: Lu2012 - abstract: 'WIS platform is a wireless interactive sensor platform de-signed to support - dynamic and interactive applications. The platform consists of a capture system - which includes multi-ple on-body Zigbee compatible motion sensors, a processing - unit and an audio-visual display control unit. It has a com-plete open architecture - and provides interfaces to interact with other user-designed applications. Therefore, - WIS plat-form is highly extensible. Through gesture recognitions by on-body sensor - nodes and data processing, WIS platform can offer real-time audio and visual experiences - to the users. Based on this platform, we set up a multimedia installation that - presents a new interaction model between the partic-ipants and the audio-visual - environment. Furthermore, we are also trying to apply WIS platform to other installations - and performances.' - address: 'Ann Arbor, Michigan' - author: Jia-Liang Lu and Da-Lei Fang and Yi Qin and Jiu-Qiang Tang - bibtex: "@inproceedings{Lu2012,\n abstract = {WIS platform is a wireless interactive\ - \ sensor platform de-signed to support dynamic and interactive applications. The\ - \ platform consists of a capture system which includes multi-ple on-body Zigbee\ - \ compatible motion sensors, a processing unit and an audio-visual display control\ - \ unit. It has a com-plete open architecture and provides interfaces to interact\ - \ with other user-designed applications. Therefore, WIS plat-form is highly extensible.\ - \ Through gesture recognitions by on-body sensor nodes and data processing, WIS\ - \ platform can offer real-time audio and visual experiences to the users. Based\ - \ on this platform, we set up a multimedia installation that presents a new interaction\ - \ model between the partic-ipants and the audio-visual environment. Furthermore,\ - \ we are also trying to apply WIS platform to other installations and performances.},\n\ - \ address = {Ann Arbor, Michigan},\n author = {Jia-Liang Lu and Da-Lei Fang and\ - \ Yi Qin and Jiu-Qiang Tang},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178329},\n\ - \ issn = {2220-4806},\n keywords = {Interactive, Audio-visual experience},\n publisher\ - \ = {University of Michigan},\n title = {Wireless Interactive Sensor Platform\ - \ for Real-Time Audio-Visual Experience},\n url = {http://www.nime.org/proceedings/2012/nime2012_98.pdf},\n\ - \ year = {2012}\n}\n" + ID: anovello2015 + abstract: 'We present the prototype of a hybrid instrument, which uses two contact + microphones to sonify the gestures of a player on a generic surface, while a gesture + localization algorithm controls the pitch of the sonified output depending on + the position of the gestures. To achieve the gesture localization we use a novel + approach combining attack parametrization and template matching across the two + microphone channels. With this method we can correctly localize 80 ± 9 % of the + percussive gestures. The user can assign determined pitches to specific positions + and change the pitch palette in real time. The tactile feedback characteristic + of every surface opens a set of new playing strategies and possibilities specific + to any chosen object. The advantages of such a system are the affordable production, + flexibility of concert location, object-specific musical instruments, portability, + and easy setup.' + address: 'Baton Rouge, Louisiana, USA' + author: Alberto Novello and Antoni Rayzhekov + bibtex: "@inproceedings{anovello2015,\n abstract = {We present the prototype of\ + \ a hybrid instrument, which uses two contact microphones to sonify the gestures\ + \ of a player on a generic surface, while a gesture localization algorithm controls\ + \ the pitch of the sonified output depending on the position of the gestures.\ + \ To achieve the gesture localization we use a novel approach combining attack\ + \ parametrization and template matching across the two microphone channels. With\ + \ this method we can correctly localize 80 $\\pm$ 9 % of the percussive gestures.\ + \ The user can assign determined pitches to specific positions and change the\ + \ pitch palette in real time. The tactile feedback characteristic of every surface\ + \ opens a set of new playing strategies and possibilities specific to any chosen\ + \ object. The advantages of such a system are the affordable production, flexibility\ + \ of concert location, object-specific musical instruments, portability, and easy\ + \ setup.},\n address = {Baton Rouge, Louisiana, USA},\n author = {Alberto Novello\ + \ and Antoni Rayzhekov},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179148},\n\ + \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ + \ {May},\n pages = {170--173},\n publisher = {Louisiana State University},\n title\ + \ = {A prototype for pitched gestural sonification of surfaces using two contact\ + \ microphones},\n url = {http://www.nime.org/proceedings/2015/nime2015_311.pdf},\n\ + \ year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178329 + doi: 10.5281/zenodo.1179148 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Interactive, Audio-visual experience' - publisher: University of Michigan - title: Wireless Interactive Sensor Platform for Real-Time Audio-Visual Experience - url: http://www.nime.org/proceedings/2012/nime2012_98.pdf - year: 2012 + month: May + pages: 170--173 + publisher: Louisiana State University + title: A prototype for pitched gestural sonification of surfaces using two contact + microphones + url: http://www.nime.org/proceedings/2015/nime2015_311.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Kapur2012 - abstract: 'In this paper, we introduce Kritaanjli, a robotic harmo-nium. Details - concerning the design, construction, and use of Kritaanjli are discussed. After - an examination of related work, quantitative research concerning the hardware - chosen in the construction of the instrument is shown, as is a thor-ough exposition - of the design process and use of CAD/CAM techniques in the design lifecycle of - the instrument. Addi-tionally, avenues for future work and compositional prac-tices - are focused upon, with particular emphasis placed on human/robot interaction, - pedagogical techniques afforded by the robotic instrument, and compositional avenues - made accessible through the use of Kritaanjli.' - address: 'Ann Arbor, Michigan' - author: Ajay Kapur and Jim Murphy and Dale Carnegie - bibtex: "@inproceedings{Kapur2012,\n abstract = {In this paper, we introduce Kritaanjli,\ - \ a robotic harmo-nium. Details concerning the design, construction, and use of\ - \ Kritaanjli are discussed. After an examination of related work, quantitative\ - \ research concerning the hardware chosen in the construction of the instrument\ - \ is shown, as is a thor-ough exposition of the design process and use of CAD/CAM\ - \ techniques in the design lifecycle of the instrument. Addi-tionally, avenues\ - \ for future work and compositional prac-tices are focused upon, with particular\ - \ emphasis placed on human/robot interaction, pedagogical techniques afforded\ - \ by the robotic instrument, and compositional avenues made accessible through\ - \ the use of Kritaanjli.},\n address = {Ann Arbor, Michigan},\n author = {Ajay\ - \ Kapur and Jim Murphy and Dale Carnegie},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178299},\n\ - \ issn = {2220-4806},\n keywords = {Musical Robotics, pedagogy, North Indian Classical\ - \ Music, augmented instruments},\n publisher = {University of Michigan},\n title\ - \ = {Kritaanjali: A Robotic Harmonium for Performance, Pedogogy and Research},\n\ - \ url = {http://www.nime.org/proceedings/2012/nime2012_99.pdf},\n year = {2012}\n\ - }\n" + ID: oizmirli2015 + abstract: 'This paper presents a framework for the analysis and exploration of performance + space. It enables the user to visualize performances in relation to other performances + of the same piece based on a set of features extracted from audio. A performance + space is formed from a set of performances through spectral analysis, alignment, + dimensionality reduction and visualization. Operation of the system is demonstrated + initially with synthetic MIDI performances and then with a case study of recorded + piano performances.' + address: 'Baton Rouge, Louisiana, USA' + author: Ozgur Izmirli + bibtex: "@inproceedings{oizmirli2015,\n abstract = {This paper presents a framework\ + \ for the analysis and exploration of performance space. It enables the user to\ + \ visualize performances in relation to other performances of the same piece based\ + \ on a set of features extracted from audio. A performance space is formed from\ + \ a set of performances through spectral analysis, alignment, dimensionality reduction\ + \ and visualization. Operation of the system is demonstrated initially with synthetic\ + \ MIDI performances and then with a case study of recorded piano performances.},\n\ + \ address = {Baton Rouge, Louisiana, USA},\n author = {Ozgur Izmirli},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1179094},\n editor = {Edgar Berdahl and\ + \ Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {99--102},\n\ + \ publisher = {Louisiana State University},\n title = {Framework for Exploration\ + \ of Performance Space},\n url = {http://www.nime.org/proceedings/2015/nime2015_312.pdf},\n\ + \ year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178299 + doi: 10.5281/zenodo.1179094 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Musical Robotics, pedagogy, North Indian Classical Music, augmented instruments' - publisher: University of Michigan - title: 'Kritaanjali: A Robotic Harmonium for Performance, Pedogogy and Research' - url: http://www.nime.org/proceedings/2012/nime2012_99.pdf - year: 2012 + month: May + pages: 99--102 + publisher: Louisiana State University + title: Framework for Exploration of Performance Space + url: http://www.nime.org/proceedings/2015/nime2015_312.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Cook2001 - abstract: 'This paper will present observations on the design, artistic, and human - factors of creating digital music controllers. Specific projects will be presented, - and a set of design principles will be supported from those examples. ' - address: 'Seattle, WA' - author: 'Cook, Perry R.' - bibtex: "@inproceedings{Cook2001,\n abstract = {This paper will present observations\ - \ on the design, artistic, and human factors of creating digital music controllers.\ - \ Specific projects will be presented, and a set of design principles will be\ - \ supported from those examples. },\n address = {Seattle, WA},\n author = {Cook,\ - \ Perry R.},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n date = {1-2 April, 2001},\n doi = {10.5281/zenodo.1176358},\n\ - \ issn = {2220-4806},\n keywords = {Musical control, artistic interfaces.},\n\ - \ pages = {3--6},\n title = {Principles for Designing Computer Music Controllers},\n\ - \ url = {http://www.nime.org/proceedings/2001/nime2001_003.pdf},\n year = {2001}\n\ - }\n" + ID: tbarraclough2015 + abstract: 'This paper presents the iterative design process based upon multiple + rounds of user studies that guided the the design of a novel social music application, + Pyxis Minor. The application was designed based on the concept of democratising + electronic music creation and performance. This required the development to be + based upon user studies to inform and drive the development process in order to + create a novel musical interface that can be enjoyed by users of any prior musicianship + training.' + address: 'Baton Rouge, Louisiana, USA' + author: Timothy J. Barraclough and Dale A. Carnegie and Ajay Kapur + bibtex: "@inproceedings{tbarraclough2015,\n abstract = {This paper presents the\ + \ iterative design process based upon multiple rounds of user studies that guided\ + \ the the design of a novel social music application, Pyxis Minor. The application\ + \ was designed based on the concept of democratising electronic music creation\ + \ and performance. This required the development to be based upon user studies\ + \ to inform and drive the development process in order to create a novel musical\ + \ interface that can be enjoyed by users of any prior musicianship training.},\n\ + \ address = {Baton Rouge, Louisiana, USA},\n author = {{Timothy J.} Barraclough\ + \ and {Dale A.} Carnegie and Ajay Kapur},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179012},\n\ + \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ + \ {May},\n pages = {289--292},\n publisher = {Louisiana State University},\n title\ + \ = {Musical Instrument Design Process for Mobile Technology},\n url = {http://www.nime.org/proceedings/2015/nime2015_313.pdf},\n\ + \ urlsuppl1 = {http://www.nime.org/proceedings/2015/313/0313-file1.mp4},\n year\ + \ = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '1-2 April, 2001' - doi: 10.5281/zenodo.1176358 + doi: 10.5281/zenodo.1179012 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Musical control, artistic interfaces.' - pages: 3--6 - title: Principles for Designing Computer Music Controllers - url: http://www.nime.org/proceedings/2001/nime2001_003.pdf - year: 2001 + month: May + pages: 289--292 + publisher: Louisiana State University + title: Musical Instrument Design Process for Mobile Technology + url: http://www.nime.org/proceedings/2015/nime2015_313.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/313/0313-file1.mp4 + year: 2015 - ENTRYTYPE: inproceedings - ID: Verplank2001 - abstract: 'Over the last four years, we have developed a series of lectures, labs - and project assignments aimed at introducing enough technology so that students - from a mix of disciplines can design and build innovative interface devices.' - address: 'Seattle, WA' - author: 'Verplank, Bill and Sapp, Craig and Mathews, Max' - bibtex: "@inproceedings{Verplank2001,\n abstract = {Over the last four years, we\ - \ have developed a series of lectures, labs and project assignments aimed at introducing\ - \ enough technology so that students from a mix of disciplines can design and\ - \ build innovative interface devices.},\n address = {Seattle, WA},\n author =\ - \ {Verplank, Bill and Sapp, Craig and Mathews, Max},\n booktitle = {Proceedings\ + ID: rduindam2015 + abstract: 'Tingle is a new digital music controller that attempts to recapture the + acoustic touch and feel, and also gives new opportunities for expressive play. + Tingle resembles a pin-art toy which has been made interactive through a new sensing + technology, with added haptic feedback and motion control. It pushes back, vibrates, + and warps the sound through the musicians nuanced input. In this article Tingle + will be discussed in combination with CataRT. ' + address: 'Baton Rouge, Louisiana, USA' + author: Rhys Duindam and Diemo Schwarz and Hans Leeuw + bibtex: "@inproceedings{rduindam2015,\n abstract = {Tingle is a new digital music\ + \ controller that attempts to recapture the acoustic touch and feel, and also\ + \ gives new opportunities for expressive play. Tingle resembles a pin-art toy\ + \ which has been made interactive through a new sensing technology, with added\ + \ haptic feedback and motion control. It pushes back, vibrates, and warps the\ + \ sound through the musicians nuanced input. In this article Tingle will be discussed\ + \ in combination with CataRT. },\n address = {Baton Rouge, Louisiana, USA},\n\ + \ author = {Rhys Duindam and Diemo Schwarz and Hans Leeuw},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ date = {1-2 April, 2001},\n doi = {10.5281/zenodo.1176380},\n issn = {2220-4806},\n\ - \ keywords = {Input devices, music controllers, CHI technology, courses.},\n pages\ - \ = {7--10},\n title = {A Course on Controllers},\n url = {http://www.nime.org/proceedings/2001/nime2001_007.pdf},\n\ - \ year = {2001}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - date: '1-2 April, 2001' - doi: 10.5281/zenodo.1176380 - issn: 2220-4806 - keywords: 'Input devices, music controllers, CHI technology, courses.' - pages: 7--10 - title: A Course on Controllers - url: http://www.nime.org/proceedings/2001/nime2001_007.pdf - year: 2001 - - -- ENTRYTYPE: inproceedings - ID: Wessel2001 - abstract: 'In this paper we describe our efforts towards the development of live - performance computer-based musical instrumentation. Our design criteria include - initial ease of use coupled with a long term potential for virtuosity,minimal - and low variance latency, and clear and simple strategies for programming the - relationship between gesture and musical result. We present custom controllers - and unique adaptations of standard gestural interfaces, a programmable connectivity - processor, a communications protocol called Open Sound Control(OSC), and a variety - of metaphors for musical control. We further describe applications of our technology - to a variety of real musical performances and directions for future research.' - address: 'Seattle, WA' - author: 'Wessel, David and Wright, Matthew' - bibtex: "@inproceedings{Wessel2001,\n abstract = {In this paper we describe our\ - \ efforts towards the development of live performance computer-based musical instrumentation.\ - \ Our design criteria include initial ease of use coupled with a long term potential\ - \ for virtuosity,minimal and low variance latency, and clear and simple strategies\ - \ for programming the relationship between gesture and musical result. We present\ - \ custom controllers and unique adaptations of standard gestural interfaces, a\ - \ programmable connectivity processor, a communications protocol called Open Sound\ - \ Control(OSC), and a variety of metaphors for musical control. We further describe\ - \ applications of our technology to a variety of real musical performances and\ - \ directions for future research.},\n address = {Seattle, WA},\n author = {Wessel,\ - \ David and Wright, Matthew},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n date = {1-2 April, 2001},\n\ - \ doi = {10.5281/zenodo.1176382},\n issn = {2220-4806},\n keywords = {communications\ - \ protocols,gestural controllers,latency,musical,reactive computing,signal processing},\n\ - \ pages = {11--14},\n title = {Problems and Prospects for Intimate Musical Control\ - \ of Computers},\n url = {http://www.nime.org/proceedings/2001/nime2001_011.pdf},\n\ - \ year = {2001}\n}\n" + \ doi = {10.5281/zenodo.1179054},\n editor = {Edgar Berdahl and Jesse Allison},\n\ + \ issn = {2220-4806},\n month = {May},\n pages = {219--222},\n publisher = {Louisiana\ + \ State University},\n title = {Tingle: A Digital Music Controller Re-Capturing\ + \ the Acoustic Instrument Experience},\n url = {http://www.nime.org/proceedings/2015/nime2015_319.pdf},\n\ + \ urlsuppl1 = {http://www.nime.org/proceedings/2015/319/0319-file1.mp4},\n year\ + \ = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '1-2 April, 2001' - doi: 10.5281/zenodo.1176382 + doi: 10.5281/zenodo.1179054 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'communications protocols,gestural controllers,latency,musical,reactive - computing,signal processing' - pages: 11--14 - title: Problems and Prospects for Intimate Musical Control of Computers - url: http://www.nime.org/proceedings/2001/nime2001_011.pdf - year: 2001 + month: May + pages: 219--222 + publisher: Louisiana State University + title: 'Tingle: A Digital Music Controller Re-Capturing the Acoustic Instrument + Experience' + url: http://www.nime.org/proceedings/2015/nime2015_319.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/319/0319-file1.mp4 + year: 2015 - ENTRYTYPE: inproceedings - ID: Orio2001 - abstract: 'This paper reviews the existing literature on input device evaluation - and design in human-computer interaction (HCI)and discusses possible applications - of this knowledge to the design and evaluation of new interfaces for musical expression. - Specifically, a set of musical tasks is suggested to allow the evaluation of different - existing controllers. ' - address: 'Seattle, WA' - author: 'Orio, Nicola and Schnell, Norbert and Wanderley, Marcelo M.' - bibtex: "@inproceedings{Orio2001,\n abstract = {This paper reviews the existing\ - \ literature on input device evaluation and design in human-computer interaction\ - \ (HCI)and discusses possible applications of this knowledge to the design and\ - \ evaluation of new interfaces for musical expression. Specifically, a set of\ - \ musical tasks is suggested to allow the evaluation of different existing controllers.\ - \ },\n address = {Seattle, WA},\n author = {Orio, Nicola and Schnell, Norbert\ - \ and Wanderley, Marcelo M.},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n date = {1-2 April, 2001},\n\ - \ doi = {10.5281/zenodo.1176370},\n issn = {2220-4806},\n keywords = {Input device\ - \ design, gestural control, interactive systems},\n pages = {15--18},\n title\ - \ = {Input Devices for Musical Expression : Borrowing Tools from HCI},\n url =\ - \ {http://www.nime.org/proceedings/2001/nime2001_015.pdf},\n year = {2001}\n}\n" + ID: sgelineck2015 + abstract: 'This study compares the stage metaphor and the channel strip metaphor + in terms of performance. Traditionally, music mixing consoles employ a channels + strip control metaphor for adjusting parameters such as volume and panning of + each track. An alternative control metaphor, the so-called stage metaphor lets + the user adjust volume and panning by positioning tracks relative to a virtual + listening position. In this study test participants are given the task to adjust + volume and panning of one channel (in mixes consisting of three channels) in order + to replicate a series of simple pre-rendered mixes. They do this using (1) a small + physical mixing controller and (2) using an iPad app, which implements a simple + stage metaphor interface. We measure how accurately they are able to replicate + mixes in terms of volume and panning and how fast they are at doing so. Results + reveal that performance is surprisingly similar and thus we are not able to detect + any significant difference in performance between the two interfaces. Qualitative + data however, suggests that the stage metaphor is largely favoured for its intuitive + interaction --- confirming earlier studies. ' + address: 'Baton Rouge, Louisiana, USA' + author: Steven Gelineck and Dannie Korsgaard and Morten Büchert + bibtex: "@inproceedings{sgelineck2015,\n abstract = {This study compares the stage\ + \ metaphor and the channel strip metaphor in terms of performance. Traditionally,\ + \ music mixing consoles employ a channels strip control metaphor for adjusting\ + \ parameters such as volume and panning of each track. An alternative control\ + \ metaphor, the so-called stage metaphor lets the user adjust volume and panning\ + \ by positioning tracks relative to a virtual listening position. In this study\ + \ test participants are given the task to adjust volume and panning of one channel\ + \ (in mixes consisting of three channels) in order to replicate a series of simple\ + \ pre-rendered mixes. They do this using (1) a small physical mixing controller\ + \ and (2) using an iPad app, which implements a simple stage metaphor interface.\ + \ We measure how accurately they are able to replicate mixes in terms of volume\ + \ and panning and how fast they are at doing so. Results reveal that performance\ + \ is surprisingly similar and thus we are not able to detect any significant difference\ + \ in performance between the two interfaces. Qualitative data however, suggests\ + \ that the stage metaphor is largely favoured for its intuitive interaction ---\ + \ confirming earlier studies. },\n address = {Baton Rouge, Louisiana, USA},\n\ + \ author = {Steven Gelineck and Dannie Korsgaard and Morten B\\''uchert},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1179064},\n editor = {Edgar Berdahl and\ + \ Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {343--346},\n\ + \ publisher = {Louisiana State University},\n title = {Stage- vs. Channel-strip\ + \ Metaphor --- Comparing Performance when Adjusting Volume and Panning of a Single\ + \ Channel in a Stereo Mix},\n url = {http://www.nime.org/proceedings/2015/nime2015_320.pdf},\n\ + \ year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '1-2 April, 2001' - doi: 10.5281/zenodo.1176370 + doi: 10.5281/zenodo.1179064 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Input device design, gestural control, interactive systems' - pages: 15--18 - title: 'Input Devices for Musical Expression : Borrowing Tools from HCI' - url: http://www.nime.org/proceedings/2001/nime2001_015.pdf - year: 2001 + month: May + pages: 343--346 + publisher: Louisiana State University + title: Stage- vs. Channel-strip Metaphor --- Comparing Performance when Adjusting + Volume and Panning of a Single Channel in a Stereo Mix + url: http://www.nime.org/proceedings/2015/nime2015_320.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Bahn2001 - abstract: 'This paper presents the interface developments and music of the duo "interface," - formed by Curtis Bahn and Dan Trueman. We describe gestural instrument design, - interactive performance interfaces for improvisational music, spherical speakers - (multi-channel, outward-radiating geodesic speaker arrays) and Sensor-Speaker-Arrays - (SenSAs: combinations of various sensor devices with spherical speaker arrays). - We discuss the concept, design and construction of these systems, and, give examples - from several new published CDs of work by Bahn and Trueman.' - address: 'Seattle, WA' - author: 'Bahn, Curtis and Trueman, Dan' - bibtex: "@inproceedings{Bahn2001,\n abstract = {This paper presents the interface\ - \ developments and music of the duo \"interface,\" formed by Curtis Bahn and Dan\ - \ Trueman. We describe gestural instrument design, interactive performance interfaces\ - \ for improvisational music, spherical speakers (multi-channel, outward-radiating\ - \ geodesic speaker arrays) and Sensor-Speaker-Arrays (SenSAs: combinations of\ - \ various sensor devices with spherical speaker arrays). We discuss the concept,\ - \ design and construction of these systems, and, give examples from several new\ - \ published CDs of work by Bahn and Trueman.},\n address = {Seattle, WA},\n author\ - \ = {Bahn, Curtis and Trueman, Dan},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n date = {1-2 April, 2001},\n\ - \ doi = {10.5281/zenodo.1176356},\n issn = {2220-4806},\n pages = {19--23},\n\ - \ title = {interface : Electronic Chamber Ensemble},\n url = {http://www.nime.org/proceedings/2001/nime2001_019.pdf},\n\ - \ year = {2001}\n}\n" + ID: jwu2015 + abstract: 'This paper presents the Tibetan Singing Prayer Wheel, a hand-held, wireless, + sensor-based musical instrument with a human-computer interface that simultaneously + processes vocals and synthesizes sound based on the performer''s hand gestures + with a one-to-many mapping strategy. A physical model simulates the singing bowl, + while a modal reverberator and a delay-and-window effect process the performer''s + vocals. This system is designed for an electroacoustic vocalist interested in + using a solo instrument to achieve performance goals that would normally require + multiple instruments and activities.' + address: 'Baton Rouge, Louisiana, USA' + author: J. Cecilia Wu and Yoo Hsiu Yeh and Romain Michon and Nathan Weitzner and + Jonathan Abel and Matthew Wright + bibtex: "@inproceedings{jwu2015,\n abstract = {This paper presents the Tibetan Singing\ + \ Prayer Wheel, a hand-held, wireless, sensor-based musical instrument with a\ + \ human-computer interface that simultaneously processes vocals and synthesizes\ + \ sound based on the performer's hand gestures with a one-to-many mapping strategy.\ + \ A physical model simulates the singing bowl, while a modal reverberator and\ + \ a delay-and-window effect process the performer's vocals. This system is designed\ + \ for an electroacoustic vocalist interested in using a solo instrument to achieve\ + \ performance goals that would normally require multiple instruments and activities.},\n\ + \ address = {Baton Rouge, Louisiana, USA},\n author = {{J. Cecilia} Wu and {Yoo\ + \ Hsiu} Yeh and Romain Michon and Nathan Weitzner and Jonathan Abel and Matthew\ + \ Wright},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1179196},\n editor = {Edgar\ + \ Berdahl and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages =\ + \ {91--94},\n publisher = {Louisiana State University},\n title = {Tibetan Singing\ + \ Prayer Wheel: A Hybrid Musical- Spiritual Instrument Using Gestural Control},\n\ + \ url = {http://www.nime.org/proceedings/2015/nime2015_322.pdf},\n year = {2015}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '1-2 April, 2001' - doi: 10.5281/zenodo.1176356 + doi: 10.5281/zenodo.1179196 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - pages: 19--23 - title: 'interface : Electronic Chamber Ensemble' - url: http://www.nime.org/proceedings/2001/nime2001_019.pdf - year: 2001 + month: May + pages: 91--94 + publisher: Louisiana State University + title: 'Tibetan Singing Prayer Wheel: A Hybrid Musical- Spiritual Instrument Using + Gestural Control' + url: http://www.nime.org/proceedings/2015/nime2015_322.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Goudeseune2001 - abstract: 'We present an acoustic musical instrument played through a resonance - model of another sound. The resonance model is controlled in real time as part - of the composite instrument. Our implementation uses an electric violin, whose - spatial position modifies filter parameters of the resonance model. Simplicial - interpolation defines the mapping from spatial position to filter parameters. - With some effort, pitch tracking can also control the filter parameters. The individual - technologies -- motion tracking, pitch tracking, resonance models -- are easily - adapted to other instruments.' - address: 'Seattle, WA' - author: 'Goudeseune, Camille and Garnett, Guy and Johnson, Timothy' - bibtex: "@inproceedings{Goudeseune2001,\n abstract = {We present an acoustic musical\ - \ instrument played through a resonance model of another sound. The resonance\ - \ model is controlled in real time as part of the composite instrument. Our implementation\ - \ uses an electric violin, whose spatial position modifies filter parameters of\ - \ the resonance model. Simplicial interpolation defines the mapping from spatial\ - \ position to filter parameters. With some effort, pitch tracking can also control\ - \ the filter parameters. The individual technologies -- motion tracking, pitch\ - \ tracking, resonance models -- are easily adapted to other instruments.},\n address\ - \ = {Seattle, WA},\n author = {Goudeseune, Camille and Garnett, Guy and Johnson,\ - \ Timothy},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n date = {1-2 April, 2001},\n doi = {10.5281/zenodo.1176362},\n\ - \ issn = {2220-4806},\n keywords = {multidimensionality, control, resonance, pitch\ - \ tracking},\n pages = {24--26},\n title = {Resonant Processing of Instrumental\ - \ Sound Controlled by Spatial Position},\n url = {http://www.nime.org/proceedings/2001/nime2001_024.pdf},\n\ - \ year = {2001}\n}\n" + ID: ifranco2015 + abstract: 'The proliferation and easy access to a new breed of ARM-based single-board + computers has promoted an increased usage of these platforms in the creation of + self-contained Digital Music Instruments. These directly incorporate all of the + necessary processing power for tasks such as sensor signal acquisition, control + data processing and audio synthesis. They can also run full Linux operating systems, + through which domain-specific languages for audio computing facilitate a low entry + barrier for the community. In computer music the adoption of these computing platforms + will naturally depend on their ability to withstand the demanding computing tasks + associated to high-quality audio synthesis. In the context of computer music practice + there are few reports about this quantification for practical purposes. This paper + aims at presenting the results of performance tests of SuperCollider running on + the BeagleBone Black, a popular mid-tier single-board computer, while performing + commonly used audio synthesis techniques.' + address: 'Baton Rouge, Louisiana, USA' + author: Ivan Franco and Marcelo Wanderley + bibtex: "@inproceedings{ifranco2015,\n abstract = {The proliferation and easy access\ + \ to a new breed of ARM-based single-board computers has promoted an increased\ + \ usage of these platforms in the creation of self-contained Digital Music Instruments.\ + \ These directly incorporate all of the necessary processing power for tasks such\ + \ as sensor signal acquisition, control data processing and audio synthesis. They\ + \ can also run full Linux operating systems, through which domain-specific languages\ + \ for audio computing facilitate a low entry barrier for the community. In computer\ + \ music the adoption of these computing platforms will naturally depend on their\ + \ ability to withstand the demanding computing tasks associated to high-quality\ + \ audio synthesis. In the context of computer music practice there are few reports\ + \ about this quantification for practical purposes. This paper aims at presenting\ + \ the results of performance tests of SuperCollider running on the BeagleBone\ + \ Black, a popular mid-tier single-board computer, while performing commonly used\ + \ audio synthesis techniques.},\n address = {Baton Rouge, Louisiana, USA},\n author\ + \ = {Ivan Franco and Marcelo Wanderley},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179062},\n\ + \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ + \ {May},\n pages = {223--226},\n publisher = {Louisiana State University},\n title\ + \ = {Pratical Evaluation of Synthesis Performance on the Beaglebone Black},\n\ + \ url = {http://www.nime.org/proceedings/2015/nime2015_323.pdf},\n year = {2015}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '1-2 April, 2001' - doi: 10.5281/zenodo.1176362 + doi: 10.5281/zenodo.1179062 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'multidimensionality, control, resonance, pitch tracking' - pages: 24--26 - title: Resonant Processing of Instrumental Sound Controlled by Spatial Position - url: http://www.nime.org/proceedings/2001/nime2001_024.pdf - year: 2001 + month: May + pages: 223--226 + publisher: Louisiana State University + title: Pratical Evaluation of Synthesis Performance on the Beaglebone Black + url: http://www.nime.org/proceedings/2015/nime2015_323.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Gurevich2001 - abstract: 'The Accordiatron is a new MIDI controller for real-time performance based - on the paradigm of a conventional squeeze box or concertina. It translates the - gestures of a performer to the standard communication protocol ofMIDI, allowing - for flexible mappings of performance data to sonic parameters. When used in conjunction - with a realtime signal processing environment, the Accordiatron becomes an expressive, - versatile musical instrument. A combination of sensory outputs providing both - discrete and continuous data gives the subtle expressiveness and control necessary - for interactive music.' - address: 'Seattle, WA' - author: 'Gurevich, Michael and von Muehlen, Stephan' - bibtex: "@inproceedings{Gurevich2001,\n abstract = {The Accordiatron is a new MIDI\ - \ controller for real-time performance based on the paradigm of a conventional\ - \ squeeze box or concertina. It translates the gestures of a performer to the\ - \ standard communication protocol ofMIDI, allowing for flexible mappings of performance\ - \ data to sonic parameters. When used in conjunction with a realtime signal processing\ - \ environment, the Accordiatron becomes an expressive, versatile musical instrument.\ - \ A combination of sensory outputs providing both discrete and continuous data\ - \ gives the subtle expressiveness and control necessary for interactive music.},\n\ - \ address = {Seattle, WA},\n author = {Gurevich, Michael and von Muehlen, Stephan},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n date = {1-2 April, 2001},\n doi = {10.5281/zenodo.1176364},\n\ - \ issn = {2220-4806},\n keywords = {MIDI controllers, computer music, interactive\ - \ music, electronic musical instruments, musical instrument design, human computer\ - \ interface},\n pages = {27--29},\n title = {The Accordiatron : A {MIDI} Controller\ - \ For Interactive Music},\n url = {http://www.nime.org/proceedings/2001/nime2001_027.pdf},\n\ - \ year = {2001}\n}\n" + ID: cbrown2015 + abstract: 'Lambeosaurine hadrosaurs are duck-billed dinosaurs known for their large + head crests, which researchers hypothesize were resonators for vocal calls. This + paper describes the motivation and process of iteratively designing a musical + instrument and interactive sound installation based on imagining the sounds of + this extinct dinosaur. We used scientific research as a starting point to create + a means of sound production and resonator, using a 3D model obtained from Computed + Topology (CT) scans of a Corythosaurus skull and an endocast of its crest and + nasal passages. Users give voice to the dinosaur by blowing into a mouthpiece, + exciting a larynx mechanism and resonating the sound through the hadrosaur''s + full-scale nasal cavities and skull. This action allows an embodied glimpse into + an ancient past. Users know the dinosaur through the controlled exhalation of + their breath, how the compression of the lungs leads to a whisper or a roar.' + address: 'Baton Rouge, Louisiana, USA' + author: Courtney Brown and Sharif Razzaque and Garth Paine + bibtex: "@inproceedings{cbrown2015,\n abstract = {Lambeosaurine hadrosaurs are duck-billed\ + \ dinosaurs known for their large head crests, which researchers hypothesize were\ + \ resonators for vocal calls. This paper describes the motivation and process\ + \ of iteratively designing a musical instrument and interactive sound installation\ + \ based on imagining the sounds of this extinct dinosaur. We used scientific research\ + \ as a starting point to create a means of sound production and resonator, using\ + \ a 3D model obtained from Computed Topology (CT) scans of a Corythosaurus skull\ + \ and an endocast of its crest and nasal passages. Users give voice to the dinosaur\ + \ by blowing into a mouthpiece, exciting a larynx mechanism and resonating the\ + \ sound through the hadrosaur's full-scale nasal cavities and skull. This action\ + \ allows an embodied glimpse into an ancient past. Users know the dinosaur through\ + \ the controlled exhalation of their breath, how the compression of the lungs\ + \ leads to a whisper or a roar.},\n address = {Baton Rouge, Louisiana, USA},\n\ + \ author = {Courtney Brown and Sharif Razzaque and Garth Paine},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1179036},\n editor = {Edgar Berdahl and Jesse Allison},\n\ + \ issn = {2220-4806},\n month = {May},\n pages = {5--10},\n publisher = {Louisiana\ + \ State University},\n title = {Rawr! A Study in Sonic Skulls: Embodied Natural\ + \ History},\n url = {http://www.nime.org/proceedings/2015/nime2015_325.pdf},\n\ + \ year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '1-2 April, 2001' - doi: 10.5281/zenodo.1176364 + doi: 10.5281/zenodo.1179036 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'MIDI controllers, computer music, interactive music, electronic musical - instruments, musical instrument design, human computer interface' - pages: 27--29 - title: 'The Accordiatron : A MIDI Controller For Interactive Music' - url: http://www.nime.org/proceedings/2001/nime2001_027.pdf - year: 2001 + month: May + pages: 5--10 + publisher: Louisiana State University + title: 'Rawr! A Study in Sonic Skulls: Embodied Natural History' + url: http://www.nime.org/proceedings/2015/nime2015_325.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Paradiso2001 - abstract: 'The technologies behind passive resonant magnetically coupled tags are - introduced and their application as a musical controller is illustrated for solo - or group performances, interactive installations, and music toys. ' - address: 'Seattle, WA' - author: 'Paradiso, Joseph A. and Hsiao, Kai-yuh and Benbasat, Ari' - bibtex: "@inproceedings{Paradiso2001,\n abstract = {The technologies behind passive\ - \ resonant magnetically coupled tags are introduced and their application as a\ - \ musical controller is illustrated for solo or group performances, interactive\ - \ installations, and music toys. },\n address = {Seattle, WA},\n author = {Paradiso,\ - \ Joseph A. and Hsiao, Kai-yuh and Benbasat, Ari},\n booktitle = {Proceedings\ + ID: kyerkes2015 + abstract: 'We document results from exploring ensemble feedback in loosely-structured + electroacoustic improvisations. A conceptual justification for the explorations + is provided, in addition to discussion of tools and methodologies. Physical configurations + of intra-ensemble feedback networks are documented, along with qualitative analysis + of their effectiveness.' + address: 'Baton Rouge, Louisiana, USA' + author: Muhammad Hafiz Wan Rosli and Karl Yerkes and Matthew Wright and Timothy + Wood and Hannah Wolfe and Charlie Roberts and Anis Haron and Fernando Rincon Estrada + bibtex: "@inproceedings{kyerkes2015,\n abstract = {We document results from exploring\ + \ ensemble feedback in loosely-structured electroacoustic improvisations. A conceptual\ + \ justification for the explorations is provided, in addition to discussion of\ + \ tools and methodologies. Physical configurations of intra-ensemble feedback\ + \ networks are documented, along with qualitative analysis of their effectiveness.},\n\ + \ address = {Baton Rouge, Louisiana, USA},\n author = {{Muhammad Hafiz Wan} Rosli\ + \ and Karl Yerkes and Matthew Wright and Timothy Wood and Hannah Wolfe and Charlie\ + \ Roberts and Anis Haron and {Fernando Rincon} Estrada},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ date = {1-2 April, 2001},\n doi = {10.5281/zenodo.1176374},\n issn = {2220-4806},\n\ - \ keywords = {RFID, resonant tags, EAS tags, musical controller, tangible interface},\n\ - \ pages = {30--33},\n title = {Tangible Music Interfaces Using Passive Magnetic\ - \ Tags},\n url = {http://www.nime.org/proceedings/2001/nime2001_030.pdf},\n year\ - \ = {2001}\n}\n" + \ doi = {10.5281/zenodo.1179170},\n editor = {Edgar Berdahl and Jesse Allison},\n\ + \ issn = {2220-4806},\n month = {May},\n pages = {144--149},\n publisher = {Louisiana\ + \ State University},\n title = {Ensemble Feedback Instruments},\n url = {http://www.nime.org/proceedings/2015/nime2015_329.pdf},\n\ + \ urlsuppl1 = {http://www.nime.org/proceedings/2015/329/0329-file1.mp4},\n year\ + \ = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '1-2 April, 2001' - doi: 10.5281/zenodo.1176374 + doi: 10.5281/zenodo.1179170 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'RFID, resonant tags, EAS tags, musical controller, tangible interface' - pages: 30--33 - title: Tangible Music Interfaces Using Passive Magnetic Tags - url: http://www.nime.org/proceedings/2001/nime2001_030.pdf - year: 2001 + month: May + pages: 144--149 + publisher: Louisiana State University + title: Ensemble Feedback Instruments + url: http://www.nime.org/proceedings/2015/nime2015_329.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/329/0329-file1.mp4 + year: 2015 - ENTRYTYPE: inproceedings - ID: Mase2001 - abstract: 'In this paper, we introduce our research challenges for creating new - musical instruments using everyday-life media with intimate interfaces, such as - the self-body, clothes, water and stuffed toys. Various sensor technologies including - image processing and general touch sensitive devices are employed to exploit these - interaction media. The focus of our effort is to provide user-friendly and enjoyable - experiences for new music and sound performances. Multimodality of musical instruments - is explored in each attempt. The degree of controllability in the performance - and the richness of expressions are also discussed for each installation. ' - address: 'Seattle, WA' - author: 'Mase, Kenji and Yonezawa, Tomoko' - bibtex: "@inproceedings{Mase2001,\n abstract = {In this paper, we introduce our\ - \ research challenges for creating new musical instruments using everyday-life\ - \ media with intimate interfaces, such as the self-body, clothes, water and stuffed\ - \ toys. Various sensor technologies including image processing and general touch\ - \ sensitive devices are employed to exploit these interaction media. The focus\ - \ of our effort is to provide user-friendly and enjoyable experiences for new\ - \ music and sound performances. Multimodality of musical instruments is explored\ - \ in each attempt. The degree of controllability in the performance and the richness\ - \ of expressions are also discussed for each installation. },\n address = {Seattle,\ - \ WA},\n author = {Mase, Kenji and Yonezawa, Tomoko},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ date = {1-2 April, 2001},\n doi = {10.5281/zenodo.1176368},\n issn = {2220-4806},\n\ - \ keywords = {New interface, music controller, dance, image processing, water\ - \ interface, stuffed toy},\n pages = {34--37},\n title = {Body , Clothes , Water\ - \ and Toys : Media Towards Natural Music Expressions with Digital Sounds},\n url\ - \ = {http://www.nime.org/proceedings/2001/nime2001_034.pdf},\n year = {2001}\n\ + ID: jgregorio2015 + abstract: 'Quality assessment of jazz improvisation is a multi-faceted, high-level + cognitive task routinely performed by educators in university jazz programs and + other discriminating music listeners. In this pilot study, we present a novel + dataset of 88 MIDI jazz piano improvisations with ratings of creativity, technical + proficiency, and aesthetic appeal provided by four jazz experts, and we detail + the design of a feature set that can represent some of the rhythmic, melodic, + harmonic, and other expressive attributes humans recognize as salient in assessment + of performance quality. Inherent subjectivity in these assessments is inevitable, + yet the recognition of performance attributes by which humans perceive quality + has wide applicability to related tasks in the music information retrieval (MIR) + community and jazz pedagogy. Preliminary results indicate that several musiciologically-informed + features of relatively low computational complexity perform reasonably well in + predicting performance quality labels via ordinary least squares regression.' + address: 'Baton Rouge, Louisiana, USA' + author: Jeff Gregorio and David Rosen and Michael Caro and Youngmoo E. Kim + bibtex: "@inproceedings{jgregorio2015,\n abstract = {Quality assessment of jazz\ + \ improvisation is a multi-faceted, high-level cognitive task routinely performed\ + \ by educators in university jazz programs and other discriminating music listeners.\ + \ In this pilot study, we present a novel dataset of 88 MIDI jazz piano improvisations\ + \ with ratings of creativity, technical proficiency, and aesthetic appeal provided\ + \ by four jazz experts, and we detail the design of a feature set that can represent\ + \ some of the rhythmic, melodic, harmonic, and other expressive attributes humans\ + \ recognize as salient in assessment of performance quality. Inherent subjectivity\ + \ in these assessments is inevitable, yet the recognition of performance attributes\ + \ by which humans perceive quality has wide applicability to related tasks in\ + \ the music information retrieval (MIR) community and jazz pedagogy. Preliminary\ + \ results indicate that several musiciologically-informed features of relatively\ + \ low computational complexity perform reasonably well in predicting performance\ + \ quality labels via ordinary least squares regression.},\n address = {Baton Rouge,\ + \ Louisiana, USA},\n author = {Jeff Gregorio and David Rosen and Michael Caro\ + \ and {Youngmoo E.} Kim},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179072},\n\ + \ editor = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month =\ + \ {May},\n pages = {327--328},\n publisher = {Louisiana State University},\n title\ + \ = {Descriptors for Perception of Quality in Jazz Piano Improvisation},\n url\ + \ = {http://www.nime.org/proceedings/2015/nime2015_331.pdf},\n year = {2015}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '1-2 April, 2001' - doi: 10.5281/zenodo.1176368 + doi: 10.5281/zenodo.1179072 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'New interface, music controller, dance, image processing, water interface, - stuffed toy' - pages: 34--37 - title: 'Body , Clothes , Water and Toys : Media Towards Natural Music Expressions - with Digital Sounds' - url: http://www.nime.org/proceedings/2001/nime2001_034.pdf - year: 2001 + month: May + pages: 327--328 + publisher: Louisiana State University + title: Descriptors for Perception of Quality in Jazz Piano Improvisation + url: http://www.nime.org/proceedings/2015/nime2015_331.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Overholt2001 - abstract: 'The MATRIX (Multipurpose Array of Tactile Rods for Interactive eXpression) - is a new musical interface for amateurs and professionals alike. It gives users - a 3dimensional tangible interface to control music using their hands, and can - be used in conjunction with a traditional musical instrument and a microphone, - or as a stand-alone gestural input device. The surface of the MATRIX acts as areal-time - interface that can manipulate the parameters of a synthesis engine or effect algorithm - in response to a performer''s expressive gestures. One example is to have the - rods of the MATRIX control the individual grains of a granular synthesizer, thereby - "sonically sculpting" the microstructure of a sound. In this way, the MATRIX provides - an intuitive method of manipulating sound with avery high level of real-time control.' - address: 'Seattle, WA' - author: 'Overholt, Dan' - bibtex: "@inproceedings{Overholt2001,\n abstract = {The MATRIX (Multipurpose Array\ - \ of Tactile Rods for Interactive eXpression) is a new musical interface for amateurs\ - \ and professionals alike. It gives users a 3dimensional tangible interface to\ - \ control music using their hands, and can be used in conjunction with a traditional\ - \ musical instrument and a microphone, or as a stand-alone gestural input device.\ - \ The surface of the MATRIX acts as areal-time interface that can manipulate the\ - \ parameters of a synthesis engine or effect algorithm in response to a performer's\ - \ expressive gestures. One example is to have the rods of the MATRIX control the\ - \ individual grains of a granular synthesizer, thereby \"sonically sculpting\"\ - \ the microstructure of a sound. In this way, the MATRIX provides an intuitive\ - \ method of manipulating sound with avery high level of real-time control.},\n\ - \ address = {Seattle, WA},\n author = {Overholt, Dan},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ date = {1-2 April, 2001},\n doi = {10.5281/zenodo.1176372},\n issn = {2220-4806},\n\ - \ keywords = {Musical controller, tangible interface, real-time expression, audio\ - \ synthesis, effects algorithms, signal processing, 3-D interface, sculptable\ - \ surface},\n pages = {38--41},\n title = {The MATRIX : A Novel Controller for\ - \ Musical Expression},\n url = {http://www.nime.org/proceedings/2001/nime2001_038.pdf},\n\ - \ year = {2001}\n}\n" + ID: amarquezborbonb2015 + abstract: 'This paper discusses the particular aesthetic and contextual considerations + emergent from the design process of a site-specific sound art installation, the + Wave Duet. The main point of this paper proposes that beyond the initial motivation + produced by new technologies and their artistic potential, there are many profound + artistic considerations that drive the development and design of a work in unique + ways. Thus, in the case of the Wave Duet, the produced buoys were prompted by + investigating the relationship between sonic objects and natural phenomena. As + a result, the mappings, physical and sound designs directly reflect these issues. + Finally, it is also suggested that during the course of development, unintended + issues may emerge and further inform how the work is perceived in a broader sense. ' + address: 'Baton Rouge, Louisiana, USA' + author: Adnan Marquez-Borbon + bibtex: "@inproceedings{amarquezborbonb2015,\n abstract = {This paper discusses\ + \ the particular aesthetic and contextual considerations emergent from the design\ + \ process of a site-specific sound art installation, the Wave Duet. The main point\ + \ of this paper proposes that beyond the initial motivation produced by new technologies\ + \ and their artistic potential, there are many profound artistic considerations\ + \ that drive the development and design of a work in unique ways. Thus, in the\ + \ case of the Wave Duet, the produced buoys were prompted by investigating the\ + \ relationship between sonic objects and natural phenomena. As a result, the mappings,\ + \ physical and sound designs directly reflect these issues. Finally, it is also\ + \ suggested that during the course of development, unintended issues may emerge\ + \ and further inform how the work is perceived in a broader sense. },\n address\ + \ = {Baton Rouge, Louisiana, USA},\n author = {Adnan Marquez-Borbon},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1179126},\n editor = {Edgar Berdahl and\ + \ Jesse Allison},\n issn = {2220-4806},\n month = {May},\n pages = {335--338},\n\ + \ publisher = {Louisiana State University},\n title = {But Does it Float? Reflections\ + \ on a Sound Art Ecological Intervention},\n url = {http://www.nime.org/proceedings/2015/nime2015_333.pdf},\n\ + \ urlsuppl1 = {http://www.nime.org/proceedings/2015/333/0333-file1.mp4},\n year\ + \ = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '1-2 April, 2001' - doi: 10.5281/zenodo.1176372 + doi: 10.5281/zenodo.1179126 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - keywords: 'Musical controller, tangible interface, real-time expression, audio synthesis, - effects algorithms, signal processing, 3-D interface, sculptable surface' - pages: 38--41 - title: 'The MATRIX : A Novel Controller for Musical Expression' - url: http://www.nime.org/proceedings/2001/nime2001_038.pdf - year: 2001 + month: May + pages: 335--338 + publisher: Louisiana State University + title: 'But Does it Float? Reflections on a Sound Art Ecological Intervention' + url: http://www.nime.org/proceedings/2015/nime2015_333.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/333/0333-file1.mp4 + year: 2015 - ENTRYTYPE: inproceedings - ID: DArcangelo2001 - abstract: 'This paper describes a series of projects that explore the possibilities - of musical expression through the combination of pre-composed, interlocking, modular - components. In particular, this paper presents a modular soundtrack recently - composed by the author for “Currentsof Creativity,” a permanent interactive video - wall installation at the Pope John Paul II Cultural Center which is slated to - open Easter 2001 in Washington, DC.' - address: 'Seattle, WA' - author: 'D''Arcangelo, Gideon' - bibtex: "@inproceedings{DArcangelo2001,\n abstract = {This paper describes a series\ - \ of projects that explore the possibilities of musical expression through the\ - \ combination of pre-composed, interlocking, modular components. In particular,\ - \ this paper presents a modular soundtrack recently composed by the author for\ - \ “Currentsof Creativity,” a permanent interactive video wall installation at\ - \ the Pope John Paul II Cultural Center which is slated to open Easter 2001 in\ - \ Washington, DC.},\n address = {Seattle, WA},\n author = {D'Arcangelo, Gideon},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n date = {1-2 April, 2001},\n doi = {10.5281/zenodo.1176360},\n\ - \ issn = {2220-4806},\n pages = {42--45},\n title = {Creating Contexts of Creativity\ - \ : Musical Composition with Modular Components},\n url = {http://www.nime.org/proceedings/2001/nime2001_042.pdf},\n\ - \ year = {2001}\n}\n" + ID: mblessing2015 + abstract: 'A LapBox derivative, the Textural Crossfader is a keyboard-based embedded + acoustic instrument, which sits comfortably across the performer''s lap and radiates + sound out of integrated stereo speakers. The performer controls the sound by manipulating + the keys on a pair of mini-keyboard interfaces. A unique one-to-one mapping enables + the performer to precisely crossfade among a set of looped audio wave files, creating + a conveniently portable system for navigating through a complex timbre space. + The axes of the timbre space can be reconfigured by replacing the wave files stored + in the flash memory.' + address: 'Baton Rouge, Louisiana, USA' + author: Matthew Blessing and Edgar Berdahl + bibtex: "@inproceedings{mblessing2015,\n abstract = {A LapBox derivative, the Textural\ + \ Crossfader is a keyboard-based embedded acoustic instrument, which sits comfortably\ + \ across the performer's lap and radiates sound out of integrated stereo speakers.\ + \ The performer controls the sound by manipulating the keys on a pair of mini-keyboard\ + \ interfaces. A unique one-to-one mapping enables the performer to precisely crossfade\ + \ among a set of looped audio wave files, creating a conveniently portable system\ + \ for navigating through a complex timbre space. The axes of the timbre space\ + \ can be reconfigured by replacing the wave files stored in the flash memory.},\n\ + \ address = {Baton Rouge, Louisiana, USA},\n author = {Matthew Blessing and Edgar\ + \ Berdahl},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1179032},\n editor\ + \ = {Edgar Berdahl and Jesse Allison},\n issn = {2220-4806},\n month = {May},\n\ + \ pages = {180--181},\n publisher = {Louisiana State University},\n title = {Textural\ + \ Crossfader},\n url = {http://www.nime.org/proceedings/2015/nime2015_337.pdf},\n\ + \ urlsuppl1 = {http://www.nime.org/proceedings/2015/337/0337-file1.mp4},\n urlsuppl2\ + \ = {http://www.nime.org/proceedings/2015/337/0337-file2.mov},\n year = {2015}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '1-2 April, 2001' - doi: 10.5281/zenodo.1176360 + doi: 10.5281/zenodo.1179032 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - pages: 42--45 - title: 'Creating Contexts of Creativity : Musical Composition with Modular Components' - url: http://www.nime.org/proceedings/2001/nime2001_042.pdf - year: 2001 + month: May + pages: 180--181 + publisher: Louisiana State University + title: Textural Crossfader + url: http://www.nime.org/proceedings/2015/nime2015_337.pdf + urlsuppl1: http://www.nime.org/proceedings/2015/337/0337-file1.mp4 + urlsuppl2: http://www.nime.org/proceedings/2015/337/0337-file2.mov + year: 2015 - ENTRYTYPE: inproceedings - ID: Jorda2001 - abstract: 'The conception and design of new musical interfaces is a multidisciplinary - area that tightly relates technology and artistic creation. In this paper, the - author first exposes some of the questions he has posed himself during more than - a decade experience as a performer, composer, interface and software designer, - and educator. Finally, he illustrates these topics with some examples of his work.' - address: 'Seattle, WA' - author: 'Jordà, Sergi' - bibtex: "@inproceedings{Jorda2001,\n abstract = {The conception and design of new\ - \ musical interfaces is a multidisciplinary area that tightly relates technology\ - \ and artistic creation. In this paper, the author first exposes some of the questions\ - \ he has posed himself during more than a decade experience as a performer, composer,\ - \ interface and software designer, and educator. Finally, he illustrates these\ - \ topics with some examples of his work.},\n address = {Seattle, WA},\n author\ - \ = {Jord\\`{a}, Sergi},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n date = {1-2 April, 2001},\n doi\ - \ = {10.5281/zenodo.1176366},\n issn = {2220-4806},\n pages = {46--50},\n title\ - \ = {New Musical Interfaces and New Music-making Paradigms},\n url = {http://www.nime.org/proceedings/2001/nime2001_046.pdf},\n\ - \ year = {2001}\n}\n" + ID: acabrera2015 + abstract: 'This paper presents the concept and implementation of a decentralized, + server-less and peer-to-peer network for the interchange of musical control interfaces + and data using the OSC protocol. Graphical control elements that form the control + interface can be freely edited and exchanged to and from any device in the network, + doing away with the need for a separate server or editing application. All graphical + elements representing the same parameter will have their value synchronized through + the network mechanisms. Some practical considerations surrounding the implementation + of this idea like automatic layout of control, editing interfaces on mobile touch-screen + devices and auto-discovery of network nodes are discussed. Finally, GoOSC, a mobile + application implementing these ideas is presented.' + address: 'Baton Rouge, Louisiana, USA' + author: Andres Cabrera + bibtex: "@inproceedings{acabrera2015,\n abstract = {This paper presents the concept\ + \ and implementation of a decentralized, server-less and peer-to-peer network\ + \ for the interchange of musical control interfaces and data using the OSC protocol.\ + \ Graphical control elements that form the control interface can be freely edited\ + \ and exchanged to and from any device in the network, doing away with the need\ + \ for a separate server or editing application. All graphical elements representing\ + \ the same parameter will have their value synchronized through the network mechanisms.\ + \ Some practical considerations surrounding the implementation of this idea like\ + \ automatic layout of control, editing interfaces on mobile touch-screen devices\ + \ and auto-discovery of network nodes are discussed. Finally, GoOSC, a mobile\ + \ application implementing these ideas is presented.},\n address = {Baton Rouge,\ + \ Louisiana, USA},\n author = {Andres Cabrera},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1179040},\n editor = {Edgar Berdahl and Jesse Allison},\n\ + \ issn = {2220-4806},\n month = {May},\n pages = {355--358},\n publisher = {Louisiana\ + \ State University},\n title = {Serverless and Peer-to-peer distributed interfaces\ + \ for musical control},\n url = {http://www.nime.org/proceedings/2015/nime2015_351.pdf},\n\ + \ year = {2015}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '1-2 April, 2001' - doi: 10.5281/zenodo.1176366 + doi: 10.5281/zenodo.1179040 + editor: Edgar Berdahl and Jesse Allison issn: 2220-4806 - pages: 46--50 - title: New Musical Interfaces and New Music-making Paradigms - url: http://www.nime.org/proceedings/2001/nime2001_046.pdf - year: 2001 + month: May + pages: 355--358 + publisher: Louisiana State University + title: Serverless and Peer-to-peer distributed interfaces for musical control + url: http://www.nime.org/proceedings/2015/nime2015_351.pdf + year: 2015 - ENTRYTYPE: inproceedings - ID: Robson2001 - abstract: 'This paper reviews a number of projects that explore building electronic - musical things, interfaces and objects designed to be used and enjoyed by anybody - but in particular those who do not see themselves as naturally musical. On reflecting - on the strengths of these projects, interesting directions for similar work in - the future are considered.' - address: 'Seattle, WA' - author: 'Robson, Dominic' - bibtex: "@inproceedings{Robson2001,\n abstract = {This paper reviews a number of\ - \ projects that explore building electronic musical things, interfaces and objects\ - \ designed to be used and enjoyed by anybody but in particular those who do not\ - \ see themselves as naturally musical. On reflecting on the strengths of these\ - \ projects, interesting directions for similar work in the future are considered.},\n\ - \ address = {Seattle, WA},\n author = {Robson, Dominic},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ date = {1-2 April, 2001},\n doi = {10.5281/zenodo.1176376},\n issn = {2220-4806},\n\ - \ pages = {51--53},\n title = {PLAY! : Sound Toys For the Non Musical},\n url\ - \ = {http://www.nime.org/proceedings/2001/nime2001_051.pdf},\n year = {2001}\n\ - }\n" + ID: nime2023_1 + abstract: 'Subtlety and detail are fundamental to what makes musical instruments + special, but accounts of their development in digital lutherie have been constrained + to ethnographies, in-the-wild studies, and personal reflections. Though insightful, + these accounts are imprecise, incomparable, and inefficient for understanding + how fluency with the subtle details of digital musical instruments (DMIs) develops. + We have been designing DMI design probes and activities for closed and constrained + observation of subtle and detailed DMI design, but in two previous studies these + failed to motivate subtle and detailed responses. In this paper we report on our + third attempt, where we designed a tuned percussion DMI and a hybrid handcraft + tool for sculpting its sound using clay, and a one hour activity. Among 26 study + participants were digital luthiers, violin luthiers and musicians, who all engaged + with what we define as micro scale DMI design. We observed technical desires and + needs for experiencing and comparing subtle details systematically, and also widely + varying, subjective emotional and artistic relationships with detail in participants'' + own practices. We reflect on the contexts that motivate subtle and detailed digital + lutherie, and discuss the implications for DMI design researchers and technologists + for studying and supporting this aspect of DMI design and craft practice in future.' + address: 'Mexico City, Mexico' + articleno: 1 + author: Jack Armitage and Thor Magnusson and Andrew McPherson + bibtex: "@inproceedings{nime2023_1,\n abstract = {Subtlety and detail are fundamental\ + \ to what makes musical instruments special, but accounts of their development\ + \ in digital lutherie have been constrained to ethnographies, in-the-wild studies,\ + \ and personal reflections. Though insightful, these accounts are imprecise, incomparable,\ + \ and inefficient for understanding how fluency with the subtle details of digital\ + \ musical instruments (DMIs) develops. We have been designing DMI design probes\ + \ and activities for closed and constrained observation of subtle and detailed\ + \ DMI design, but in two previous studies these failed to motivate subtle and\ + \ detailed responses. In this paper we report on our third attempt, where we designed\ + \ a tuned percussion DMI and a hybrid handcraft tool for sculpting its sound using\ + \ clay, and a one hour activity. Among 26 study participants were digital luthiers,\ + \ violin luthiers and musicians, who all engaged with what we define as micro\ + \ scale DMI design. We observed technical desires and needs for experiencing and\ + \ comparing subtle details systematically, and also widely varying, subjective\ + \ emotional and artistic relationships with detail in participants' own practices.\ + \ We reflect on the contexts that motivate subtle and detailed digital lutherie,\ + \ and discuss the implications for DMI design researchers and technologists for\ + \ studying and supporting this aspect of DMI design and craft practice in future.},\n\ + \ address = {Mexico City, Mexico},\n articleno = {1},\n author = {Jack Armitage\ + \ and Thor Magnusson and Andrew McPherson},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n editor\ + \ = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n\ + \ numpages = {9},\n pages = {1--9},\n title = {Studying Subtle and Detailed Digital\ + \ Lutherie: Motivational Contexts and Technical Needs},\n track = {Papers},\n\ + \ url = {http://nime.org/proceedings/2023/nime2023_1.pdf},\n year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '1-2 April, 2001' - doi: 10.5281/zenodo.1176376 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - pages: 51--53 - title: 'PLAY! : Sound Toys For the Non Musical' - url: http://www.nime.org/proceedings/2001/nime2001_051.pdf - year: 2001 + month: May + numpages: 9 + pages: 1--9 + title: 'Studying Subtle and Detailed Digital Lutherie: Motivational Contexts and + Technical Needs' + track: Papers + url: http://nime.org/proceedings/2023/nime2023_1.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Ulyate2001 - abstract: 'In 1998 we designed enabling technology and a venue concept that allowed - several participants to influence a shared musical and visual experience. Our - primary goal was to deliver musically coherent and visually satisfying results - from several participants'' input. The result, the Interactive Dance Club, ran - for four nights at the ACM SIGGRAPH 98 convention in Orlando, Florida.In this - paper we will briefly describe the Interactive Dance Club, our "10 Commandments - of Interactivity", and what we learned from it''s premiere at SIGGRAPH 98.' - address: 'Seattle, WA' - author: 'Ulyate, Ryan and Bianciardi, David' - bibtex: "@inproceedings{Ulyate2001,\n abstract = {In 1998 we designed enabling technology\ - \ and a venue concept that allowed several participants to influence a shared\ - \ musical and visual experience. Our primary goal was to deliver musically coherent\ - \ and visually satisfying results from several participants' input. The result,\ - \ the Interactive Dance Club, ran for four nights at the ACM SIGGRAPH 98 convention\ - \ in Orlando, Florida.In this paper we will briefly describe the Interactive Dance\ - \ Club, our \"10 Commandments of Interactivity\", and what we learned from it's\ - \ premiere at SIGGRAPH 98.},\n address = {Seattle, WA},\n author = {Ulyate, Ryan\ - \ and Bianciardi, David},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n date = {1-2 April, 2001},\n doi\ - \ = {10.5281/zenodo.1176378},\n issn = {2220-4806},\n pages = {54--56},\n title\ - \ = {The Interactive Dance Club : Avoiding Chaos In A Multi Participant Environment},\n\ - \ url = {http://www.nime.org/proceedings/2001/nime2001_054.pdf},\n year = {2001}\n\ - }\n" + ID: nime2023_2 + abstract: 'This paper proposes a human-machine interactive music system for live + performances based on autonomous agents, implemented through immersive extended + reality. The interaction between humans and agents is grounded in concepts related + to Swarm Intelligence and Multi-Agent systems, which are reflected in a technological + platform that involves a 3D physical-virtual solution. This approach requires + visual, auditory, haptic, and proprioceptive modalities, making it necessary to + integrate technologies capable of providing such a multimodal environment. The + prototype of the proposed system is implemented by combining Motion Capture, Spatial + Audio, and Mixed Reality technologies. The system is evaluated in terms of objective + measurements and tested with users through music improvisation sessions. The results + demonstrate that the system is used as intended with respect to multimodal interaction + for musical agents. Furthermore, the results validate the novel design and integration + of the required technologies presented in this paper.' + address: 'Mexico City, Mexico' + articleno: 2 + author: Pedro P Lucas and Stefano Fasciani + bibtex: "@inproceedings{nime2023_2,\n abstract = {This paper proposes a human-machine\ + \ interactive music system for live performances based on autonomous agents, implemented\ + \ through immersive extended reality. The interaction between humans and agents\ + \ is grounded in concepts related to Swarm Intelligence and Multi-Agent systems,\ + \ which are reflected in a technological platform that involves a 3D physical-virtual\ + \ solution. This approach requires visual, auditory, haptic, and proprioceptive\ + \ modalities, making it necessary to integrate technologies capable of providing\ + \ such a multimodal environment. The prototype of the proposed system is implemented\ + \ by combining Motion Capture, Spatial Audio, and Mixed Reality technologies.\ + \ The system is evaluated in terms of objective measurements and tested with users\ + \ through music improvisation sessions. The results demonstrate that the system\ + \ is used as intended with respect to multimodal interaction for musical agents.\ + \ Furthermore, the results validate the novel design and integration of the required\ + \ technologies presented in this paper.},\n address = {Mexico City, Mexico},\n\ + \ articleno = {2},\n author = {Pedro P Lucas and Stefano Fasciani},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n\ + \ month = {May},\n numpages = {11},\n pages = {10--20},\n title = {A Human-Agents\ + \ Music Performance System in an Extended Reality Environment},\n track = {Papers},\n\ + \ url = {http://nime.org/proceedings/2023/nime2023_2.pdf},\n year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '1-2 April, 2001' - doi: 10.5281/zenodo.1176378 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - pages: 54--56 - title: 'The Interactive Dance Club : Avoiding Chaos In A Multi Participant Environment' - url: http://www.nime.org/proceedings/2001/nime2001_054.pdf - year: 2001 + month: May + numpages: 11 + pages: 10--20 + title: A Human-Agents Music Performance System in an Extended Reality Environment + track: Papers + url: http://nime.org/proceedings/2023/nime2023_2.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Collicutt2009 - abstract: 'This paper presents an evaluation and comparison of four input devices - for percussion tasks: a standard tom drum, Roland V-Drum, and two established - examples of gestural controllers: the Buchla Lightning II, and the Radio Baton. - The primary goal of this study was to determine how players'' actions changed - when moving from an acoustic instrument like the tom drum, to a gestural controller - like the Buchla Lightning, which bears little resemblance to an acoustic percussion - instrument. Motion capture data was analyzed by comparing a subject''s hand height - variability and timing accuracy across the four instruments as they performed - simple musical tasks. Results suggest that certain gestures such as hand height - amplitude can be adapted to these gestural controllers with little change and - that in general subjects'' timing variability is significantly affected when playing - on the Lightning and Radio Baton when compared to the more familiar tom drum and - VDrum. Possible explanations and other observations are also presented. ' - address: 'Pittsburgh, PA, United States' - author: 'Collicutt, Mike and Casciato, Carmine and Wanderley, Marcelo M.' - bibtex: "@inproceedings{Collicutt2009,\n abstract = {This paper presents an evaluation\ - \ and comparison of four input devices for percussion tasks: a standard tom drum,\ - \ Roland V-Drum, and two established examples of gestural controllers: the Buchla\ - \ Lightning II, and the Radio Baton. The primary goal of this study was to determine\ - \ how players' actions changed when moving from an acoustic instrument like the\ - \ tom drum, to a gestural controller like the Buchla Lightning, which bears little\ - \ resemblance to an acoustic percussion instrument. Motion capture data was analyzed\ - \ by comparing a subject's hand height variability and timing accuracy across\ - \ the four instruments as they performed simple musical tasks. Results suggest\ - \ that certain gestures such as hand height amplitude can be adapted to these\ - \ gestural controllers with little change and that in general subjects' timing\ - \ variability is significantly affected when playing on the Lightning and Radio\ - \ Baton when compared to the more familiar tom drum and VDrum. Possible explanations\ - \ and other observations are also presented. },\n address = {Pittsburgh, PA, United\ - \ States},\n author = {Collicutt, Mike and Casciato, Carmine and Wanderley, Marcelo\ - \ M.},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1177491},\n issn = {2220-4806},\n\ - \ keywords = {Evaluation of Input Devices, Motion Capture, Buchla Lightning II,\ - \ Radio Baton. },\n pages = {1--6},\n title = {From Real to Virtual : A Comparison\ - \ of Input Devices for Percussion Tasks},\n url = {http://www.nime.org/proceedings/2009/nime2009_001.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_3 + abstract: "The Explorator genus is a set of hardware and firmware systems, artistic\ + \ motivations, and physical construction methods designed to support the creation\ + \ of transportable environmentally-responsive mechatronic sound objects for exhibition\ + \ outdoors. In order to enable the realization of installation scenarios with\ + \ varied cochlear needs, we developed a generalized hardware and firmware system\ + \ that can be reused between projects and which supports the development of purpose-built\ + \ feedback mechanisms.\nWe introduce five distinct hardware instances that serve\ + \ as test cases for the Explorator genus. The hardware instances are introduced\ + \ as Explorator “species”. Each species shares core hardware and firmware systems\ + \ but uses distinct soundscape augmentation feedback mechanisms to support unique\ + \ installation scenarios. Initial subjective and objective observations, findings,\ + \ and data are provided from fieldwork conducted in four American states. These\ + \ initial test installations highlight the Explorator genus as a modular, transportable,\ + \ environmentally reactive, environmentally protected, self-powered system for\ + \ creating novel mechatronic sound objects for outdoor sonic installation art." + address: 'Mexico City, Mexico' + articleno: 3 + author: Nathan D Villicana-Shaw and Dale Carnegie and Jim Murphy and Mo Zareei + bibtex: "@inproceedings{nime2023_3,\n abstract = {The Explorator genus is a set\ + \ of hardware and firmware systems, artistic motivations, and physical construction\ + \ methods designed to support the creation of transportable environmentally-responsive\ + \ mechatronic sound objects for exhibition outdoors. In order to enable the realization\ + \ of installation scenarios with varied cochlear needs, we developed a generalized\ + \ hardware and firmware system that can be reused between projects and which supports\ + \ the development of purpose-built feedback mechanisms.\nWe introduce five distinct\ + \ hardware instances that serve as test cases for the Explorator genus. The hardware\ + \ instances are introduced as Explorator “species”. Each species shares core hardware\ + \ and firmware systems but uses distinct soundscape augmentation feedback mechanisms\ + \ to support unique installation scenarios. Initial subjective and objective observations,\ + \ findings, and data are provided from fieldwork conducted in four American states.\ + \ These initial test installations highlight the Explorator genus as a modular,\ + \ transportable, environmentally reactive, environmentally protected, self-powered\ + \ system for creating novel mechatronic sound objects for outdoor sonic installation\ + \ art.},\n address = {Mexico City, Mexico},\n articleno = {3},\n author = {Nathan\ + \ D Villicana-Shaw and Dale Carnegie and Jim Murphy and Mo Zareei},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n\ + \ month = {May},\n numpages = {9},\n pages = {21--29},\n title = {Explorator Genus:\ + \ Designing Transportable Mechatronic Sound Objects for Outdoor Installation Art},\n\ + \ track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_3.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177491 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'Evaluation of Input Devices, Motion Capture, Buchla Lightning II, Radio - Baton. ' - pages: 1--6 - title: 'From Real to Virtual : A Comparison of Input Devices for Percussion Tasks' - url: http://www.nime.org/proceedings/2009/nime2009_001.pdf - year: 2009 + month: May + numpages: 9 + pages: 21--29 + title: 'Explorator Genus: Designing Transportable Mechatronic Sound Objects for + Outdoor Installation Art' + track: Papers + url: http://nime.org/proceedings/2023/nime2023_3.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Hadjakos2009 - abstract: 'Measurement of pianists'' arm movement provides a signal,which is composed - of controlled movements and noise. Thenoise is composed of uncontrolled movement - generated bythe interaction of the arm with the piano action and measurement error. - We propose a probabilistic model for armtouch movements, which allows to estimate - the amount ofnoise in a joint. This estimation helps to interpret the movement - signal, which is of interest for augmented piano andpiano pedagogy applications.' - address: 'Pittsburgh, PA, United States' - author: 'Hadjakos, Aristotelis and Aitenbichler, Erwin and Mühlhäuser, Max' - bibtex: "@inproceedings{Hadjakos2009,\n abstract = {Measurement of pianists' arm\ - \ movement provides a signal,which is composed of controlled movements and noise.\ - \ Thenoise is composed of uncontrolled movement generated bythe interaction of\ - \ the arm with the piano action and measurement error. We propose a probabilistic\ - \ model for armtouch movements, which allows to estimate the amount ofnoise in\ - \ a joint. This estimation helps to interpret the movement signal, which is of\ - \ interest for augmented piano andpiano pedagogy applications.},\n address = {Pittsburgh,\ - \ PA, United States},\n author = {Hadjakos, Aristotelis and Aitenbichler, Erwin\ - \ and M\\''{u}hlh\\''{a}user, Max},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177567},\n\ - \ issn = {2220-4806},\n keywords = {Piano, arm movement, gesture, classification,\ - \ augmented instrument, inertial sensing. },\n pages = {7--12},\n title = {Probabilistic\ - \ Model of Pianists' Arm Touch Movements},\n url = {http://www.nime.org/proceedings/2009/nime2009_007.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_4 + abstract: "LoopBoxes is an accessible digital musical instrument designed to create\ + \ an intuitive access to loop based music making for children with special educational\ + \ needs (SEN). This paper describes the evaluation of the instrument in the form\ + \ of a pilot study during a music festival in Berlin, Germany, as well as a case\ + \ study with children and music teachers in a SEN school setting. We created a\ + \ modular system composed of three modules that afford single user as well as\ + \ collaborative music making. The pilot study was evaluated using informal observation\ + \ and questionnaires (n = 39), and indicated that the instrument affords music\ + \ making for people with and without prior musical knowledge across all age groups\ + \ and fosters collaborative musical processes. The case study was based on observation\ + \ and a qualitative interview. It confirmed that the instrument meets the needs\ + \ of the school settings and indicated how future versions could expand access\ + \ to all students.\nespecially those experiencing complex disabilities. In addition,\ + \ out-of-the-box functionality seems to be crucial for the long-term implementation\ + \ of the instrument in a school setting." + address: 'Mexico City, Mexico' + articleno: 4 + author: Andreas Förster and Alarith Uhde and Mathias Komesker and Christina Komesker + and Irina Schmidt + bibtex: "@inproceedings{nime2023_4,\n abstract = {LoopBoxes is an accessible digital\ + \ musical instrument designed to create an intuitive access to loop based music\ + \ making for children with special educational needs (SEN). This paper describes\ + \ the evaluation of the instrument in the form of a pilot study during a music\ + \ festival in Berlin, Germany, as well as a case study with children and music\ + \ teachers in a SEN school setting. We created a modular system composed of three\ + \ modules that afford single user as well as collaborative music making. The pilot\ + \ study was evaluated using informal observation and questionnaires (n = 39),\ + \ and indicated that the instrument affords music making for people with and without\ + \ prior musical knowledge across all age groups and fosters collaborative musical\ + \ processes. The case study was based on observation and a qualitative interview.\ + \ It confirmed that the instrument meets the needs of the school settings and\ + \ indicated how future versions could expand access to all students.\nespecially\ + \ those experiencing complex disabilities. In addition, out-of-the-box functionality\ + \ seems to be crucial for the long-term implementation of the instrument in a\ + \ school setting.},\n address = {Mexico City, Mexico},\n articleno = {4},\n author\ + \ = {Andreas Förster and Alarith Uhde and Mathias Komesker and Christina Komesker\ + \ and Irina Schmidt},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan\ + \ Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages = {10},\n\ + \ pages = {30--39},\n title = {LoopBoxes - Evaluation of a Collaborative Accessible\ + \ Digital Musical Instrument},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_4.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177567 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'Piano, arm movement, gesture, classification, augmented instrument, inertial - sensing. ' - pages: 7--12 - title: Probabilistic Model of Pianists' Arm Touch Movements - url: http://www.nime.org/proceedings/2009/nime2009_007.pdf - year: 2009 + month: May + numpages: 10 + pages: 30--39 + title: LoopBoxes - Evaluation of a Collaborative Accessible Digital Musical Instrument + track: Papers + url: http://nime.org/proceedings/2023/nime2023_4.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Gelineck2009 - abstract: 'This paper presents a HCI inspired evaluation of simple physical interfaces - used to control physical models. Specifically knobs and sliders are compared in - a creative and exploratory framework, which simulates the natural environment - in which an electronic musician would normally explore a new instrument. No significant - difference was measured between using knobs and sliders for controlling parameters - of a physical modeling electronic instrument. Thereported difference between the - tested instruments were mostlydue to the sound synthesis models.' - address: 'Pittsburgh, PA, United States' - author: 'Gelineck, Steven and Serafin, Stefania' - bibtex: "@inproceedings{Gelineck2009,\n abstract = {This paper presents a HCI inspired\ - \ evaluation of simple physical interfaces used to control physical models. Specifically\ - \ knobs and sliders are compared in a creative and exploratory framework, which\ - \ simulates the natural environment in which an electronic musician would normally\ - \ explore a new instrument. No significant difference was measured between using\ - \ knobs and sliders for controlling parameters of a physical modeling electronic\ - \ instrument. Thereported difference between the tested instruments were mostlydue\ - \ to the sound synthesis models.},\n address = {Pittsburgh, PA, United States},\n\ - \ author = {Gelineck, Steven and Serafin, Stefania},\n booktitle = {Proceedings\ + ID: nime2023_5 + abstract: "Legatus is a three-legged audio and environmentally-reactive soundscape\ + \ augmentation artifact created for outdoor exhibitions in locations without access\ + \ to mains electricity. Legatus has an approximate ingress protection rating of\ + \ IP54, is self-powered, and is easy to transport weighing approximately a kilogram\ + \ while fitting within a 185 mm tall by 110 mm diameter cylinder. With LED-based\ + \ visual feedback and a cochlear loudspeaker-based vocalization system, Legatus\ + \ seeks to capture attention and redirect it to the in-situ sonic environment.\n\ + Informed by related historical and contemporary outdoor sonic installation artworks,\ + \ we conceptualized and tested four installation scenarios in 2021. Installations\ + \ were presented following a soundscape-specific pop-up exhibition strategy, where\ + \ the exhibition venue and artifact placement are determined by in-situ sonic\ + \ conditions. Legatus artifacts use high-level audio features and real-time environmental\ + \ conditions including ambient temperature, humidity, and brightness levels to\ + \ influence the timing and parameters of sample playback routines, audio synthesis,\ + \ and audio recording.\nHaving developed and tested for nine months, Legatus has\ + \ emerged as a portable, rugged, affordable, adaptable, lightweight, and simple\ + \ tool for augmenting natural sonic environments that can provide last-mile distributions\ + \ of sonic installation art experiences to places and communities where these\ + \ works are rarely exhibited." + address: 'Mexico City, Mexico' + articleno: 5 + author: Nathan D Villicana-Shaw and Dale Carnegie and Jim Murphy and Mo Zareei + bibtex: "@inproceedings{nime2023_5,\n abstract = {Legatus is a three-legged audio\ + \ and environmentally-reactive soundscape augmentation artifact created for outdoor\ + \ exhibitions in locations without access to mains electricity. Legatus has an\ + \ approximate ingress protection rating of IP54, is self-powered, and is easy\ + \ to transport weighing approximately a kilogram while fitting within a 185 mm\ + \ tall by 110 mm diameter cylinder. With LED-based visual feedback and a cochlear\ + \ loudspeaker-based vocalization system, Legatus seeks to capture attention and\ + \ redirect it to the in-situ sonic environment.\nInformed by related historical\ + \ and contemporary outdoor sonic installation artworks, we conceptualized and\ + \ tested four installation scenarios in 2021. Installations were presented following\ + \ a soundscape-specific pop-up exhibition strategy, where the exhibition venue\ + \ and artifact placement are determined by in-situ sonic conditions. Legatus artifacts\ + \ use high-level audio features and real-time environmental conditions including\ + \ ambient temperature, humidity, and brightness levels to influence the timing\ + \ and parameters of sample playback routines, audio synthesis, and audio recording.\n\ + Having developed and tested for nine months, Legatus has emerged as a portable,\ + \ rugged, affordable, adaptable, lightweight, and simple tool for augmenting natural\ + \ sonic environments that can provide last-mile distributions of sonic installation\ + \ art experiences to places and communities where these works are rarely exhibited.},\n\ + \ address = {Mexico City, Mexico},\n articleno = {5},\n author = {Nathan D Villicana-Shaw\ + \ and Dale Carnegie and Jim Murphy and Mo Zareei},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177549},\n issn = {2220-4806},\n keywords = {Evaluation,\ - \ Interfaces, Sliders, Knobs, Physi- cal Modeling, Electronic Musicians, Exploration,\ - \ Creativ- ity, Affordances. },\n pages = {13--18},\n title = {A Quantitative\ - \ Evaluation of the Differences between Knobs and Sliders},\n url = {http://www.nime.org/proceedings/2009/nime2009_013.pdf},\n\ - \ year = {2009}\n}\n" + \ editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month\ + \ = {May},\n numpages = {8},\n pages = {40--47},\n title = {Legatus: Design and\ + \ Exhibition of Loudspeaker-Based, Environmentally-Reactive, Soundscape Augmentation\ + \ Artifacts in Outdoor Natural Environments},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_5.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177549 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'Evaluation, Interfaces, Sliders, Knobs, Physi- cal Modeling, Electronic - Musicians, Exploration, Creativ- ity, Affordances. ' - pages: 13--18 - title: A Quantitative Evaluation of the Differences between Knobs and Sliders - url: http://www.nime.org/proceedings/2009/nime2009_013.pdf - year: 2009 + month: May + numpages: 8 + pages: 40--47 + title: 'Legatus: Design and Exhibition of Loudspeaker-Based, Environmentally-Reactive, + Soundscape Augmentation Artifacts in Outdoor Natural Environments' + track: Papers + url: http://nime.org/proceedings/2023/nime2023_5.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Pedrosa2009 - abstract: 'Haptic feedback is an important element that needs to be carefully designed - in computer music interfaces. This paper presents an evaluation of several force - renderings for target acquisition in space when used to support a music related - task. The study presented here addresses only one musical aspect: the need to - repeat elements accurately in time and in content. Several force scenarios will - be rendered over a simple 3D target acquisition task and users'' performance will - be quantitatively and qualitatively evaluated. The results show how the users'' - subjective preference for a particular kind of force support does not always correlate - to a quantitative measurement of performance enhancement. We describe a way in - which a control mapping for a musical interface could be achieved without contradicting - the users'' preferences as obtained from the study. ' - address: 'Pittsburgh, PA, United States' - author: 'Pedrosa, Ricardo and Maclean, Karon E.' - bibtex: "@inproceedings{Pedrosa2009,\n abstract = {Haptic feedback is an important\ - \ element that needs to be carefully designed in computer music interfaces. This\ - \ paper presents an evaluation of several force renderings for target acquisition\ - \ in space when used to support a music related task. The study presented here\ - \ addresses only one musical aspect: the need to repeat elements accurately in\ - \ time and in content. Several force scenarios will be rendered over a simple\ - \ 3D target acquisition task and users' performance will be quantitatively and\ - \ qualitatively evaluated. The results show how the users' subjective preference\ - \ for a particular kind of force support does not always correlate to a quantitative\ - \ measurement of performance enhancement. We describe a way in which a control\ - \ mapping for a musical interface could be achieved without contradicting the\ - \ users' preferences as obtained from the study. },\n address = {Pittsburgh, PA,\ - \ United States},\n author = {Pedrosa, Ricardo and Maclean, Karon E.},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1177657},\n issn = {2220-4806},\n keywords\ - \ = {music interfaces, force feedback, tempo, comfort, target acquisition. },\n\ - \ pages = {19--24},\n title = {Evaluation of {3D} Haptic Target Rendering to Support\ - \ Timing in Music Tasks},\n url = {http://www.nime.org/proceedings/2009/nime2009_019.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_6 + abstract: 'Since 1966 Star Trek has been exploring imaginary and futuristic worlds + in which humanity comes in contact with alien cultures. Music has always been + a method through which alien cultures are made relatable to humans, and musical + instruments become props through which we learn about an alien culture that is + totally different to that of humans. These musical instruments were not designed + with musical use in mind; rather they are designed as storytelling devices, and + never intended to work or make sound. After discovering one of these instruments + I realised that recreating it in the way it was imagined and making it functional + would require consideration of the instrument''s storytelling function above all + else, including the technology. In this paper I describe the process of re-creating + an instrument from Star Trek as a functional DMI, a process in which design decisions + were guided by what the storytelling intentions were for this imagined instrument, + and what I found out by having to make technical choices that supported them (not + the other way around). As well as reporting the design and implementation process + I summarise the important design lesson that came from having to emphasise the + intended mood and presence of an instrument, instead of the design being steered + by technical affordances.' + address: 'Mexico City, Mexico' + articleno: 6 + author: S. M. Astrid Bin + bibtex: "@inproceedings{nime2023_6,\n abstract = {Since 1966 Star Trek has been\ + \ exploring imaginary and futuristic worlds in which humanity comes in contact\ + \ with alien cultures. Music has always been a method through which alien cultures\ + \ are made relatable to humans, and musical instruments become props through which\ + \ we learn about an alien culture that is totally different to that of humans.\ + \ These musical instruments were not designed with musical use in mind; rather\ + \ they are designed as storytelling devices, and never intended to work or make\ + \ sound. After discovering one of these instruments I realised that recreating\ + \ it in the way it was imagined and making it functional would require consideration\ + \ of the instrument's storytelling function above all else, including the technology.\ + \ In this paper I describe the process of re-creating an instrument from Star\ + \ Trek as a functional DMI, a process in which design decisions were guided by\ + \ what the storytelling intentions were for this imagined instrument, and what\ + \ I found out by having to make technical choices that supported them (not the\ + \ other way around). As well as reporting the design and implementation process\ + \ I summarise the important design lesson that came from having to emphasise the\ + \ intended mood and presence of an instrument, instead of the design being steered\ + \ by technical affordances.},\n address = {Mexico City, Mexico},\n articleno =\ + \ {6},\n author = {S. M. Astrid Bin},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz\ + \ and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages\ + \ = {6},\n pages = {48--53},\n title = {Where Few NIMEs Have Gone Before: Lessons\ + \ in instrument design from Star Trek},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_6.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177657 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'music interfaces, force feedback, tempo, comfort, target acquisition. ' - pages: 19--24 - title: Evaluation of 3D Haptic Target Rendering to Support Timing in Music Tasks - url: http://www.nime.org/proceedings/2009/nime2009_019.pdf - year: 2009 + month: May + numpages: 6 + pages: 48--53 + title: 'Where Few NIMEs Have Gone Before: Lessons in instrument design from Star + Trek' + track: Papers + url: http://nime.org/proceedings/2023/nime2023_6.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Hsu2009 - abstract: 'In this paper, we discuss a number of issues related to the design of - evaluation tests for comparing interactive music systems for improvisation. Our - testing procedure covers rehearsal and performance environments, and captures - the experiences of a musician/participant as well as an audience member/observer. - We attempt to isolate salient components of system behavior, and test whether - the musician or audience are able to discern between systems with significantly - different behavioral components. We report on our experiences with our testing - methodology, in comparative studies of our London and ARHS improvisation systems - [1]. ' - address: 'Pittsburgh, PA, United States' - author: 'Hsu, William and Sosnick, Marc' - bibtex: "@inproceedings{Hsu2009,\n abstract = {In this paper, we discuss a number\ - \ of issues related to the design of evaluation tests for comparing interactive\ - \ music systems for improvisation. Our testing procedure covers rehearsal and\ - \ performance environments, and captures the experiences of a musician/participant\ - \ as well as an audience member/observer. We attempt to isolate salient components\ - \ of system behavior, and test whether the musician or audience are able to discern\ - \ between systems with significantly different behavioral components. We report\ - \ on our experiences with our testing methodology, in comparative studies of our\ - \ London and ARHS improvisation systems [1]. },\n address = {Pittsburgh, PA, United\ - \ States},\n author = {Hsu, William and Sosnick, Marc},\n booktitle = {Proceedings\ + ID: nime2023_7 + abstract: 'Spatial audio systems are expensive, mostly because they usually imply + the use of a wide range of speakers and hence audio outputs. Some techniques such + as Wave Field Synthesis (WFS) are especially demanding in that regard making them + out of reach to many individuals or even institutions. In this paper, we propose + to leverage recent progress made using Field-Programmable Gate Arrays (FPGA) in + the context of real-time audio signal processing to implement frugal spatial audio + systems. We focus on the case of WFS and we demonstrate how to build a 32 speakers + system that can manage multiple sources in parallel for less than 800 USD (including + speakers). We believe that this approach contributes to making advanced spatial + audio techniques more accessible.' + address: 'Mexico City, Mexico' + articleno: 7 + author: Romain Michon and Joseph Bizien and Maxime Popoff and Tanguy Risset + bibtex: "@inproceedings{nime2023_7,\n abstract = {Spatial audio systems are expensive,\ + \ mostly because they usually imply the use of a wide range of speakers and hence\ + \ audio outputs. Some techniques such as Wave Field Synthesis (WFS) are especially\ + \ demanding in that regard making them out of reach to many individuals or even\ + \ institutions. In this paper, we propose to leverage recent progress made using\ + \ Field-Programmable Gate Arrays (FPGA) in the context of real-time audio signal\ + \ processing to implement frugal spatial audio systems. We focus on the case of\ + \ WFS and we demonstrate how to build a 32 speakers system that can manage multiple\ + \ sources in parallel for less than 800 USD (including speakers). We believe that\ + \ this approach contributes to making advanced spatial audio techniques more accessible.},\n\ + \ address = {Mexico City, Mexico},\n articleno = {7},\n author = {Romain Michon\ + \ and Joseph Bizien and Maxime Popoff and Tanguy Risset},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177579},\n issn = {2220-4806},\n keywords = {Interactive\ - \ music systems, human computer interaction, evaluation tests. },\n pages = {25--28},\n\ - \ title = {Evaluating Interactive Music Systems : An HCI Approach},\n url = {http://www.nime.org/proceedings/2009/nime2009_025.pdf},\n\ - \ year = {2009}\n}\n" + \ editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month\ + \ = {May},\n numpages = {6},\n pages = {54--59},\n title = {Making Frugal Spatial\ + \ Audio Systems Using Field-Programmable Gate Arrays},\n track = {Papers},\n url\ + \ = {http://nime.org/proceedings/2023/nime2023_7.pdf},\n year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177579 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'Interactive music systems, human computer interaction, evaluation tests. ' - pages: 25--28 - title: 'Evaluating Interactive Music Systems : An HCI Approach' - url: http://www.nime.org/proceedings/2009/nime2009_025.pdf - year: 2009 + month: May + numpages: 6 + pages: 54--59 + title: Making Frugal Spatial Audio Systems Using Field-Programmable Gate Arrays + track: Papers + url: http://nime.org/proceedings/2023/nime2023_7.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Spowage2009 - abstract: 'Due to the accelerating development of ‘rapidly to become redundant’ - technologies, there is a growing mountain of perfectly serviceable discarded electronic - devices hiding quietly at the bottom of almost every domestic rubbish pile or - at the back of nearly every second hand shop. If you add in to this scenario the - accelerating nature of our society where people don’t have time or the motivation - in their lives to sell or auction their redundant electronics, one can discover - a plethora of discarded materials available for salvage. Using this as a starting - point, I have produced a portable noise instrument from recycled materials, that - is primarily an artistic led venture, built specifically for live performance.' - address: 'Pittsburgh, PA, United States' - author: 'Spowage, Neal' - bibtex: "@inproceedings{Spowage2009,\n abstract = {Due to the accelerating development\ - \ of ‘rapidly to become redundant’ technologies, there is a growing mountain of\ - \ perfectly serviceable discarded electronic devices hiding quietly at the bottom\ - \ of almost every domestic rubbish pile or at the back of nearly every second\ - \ hand shop. If you add in to this scenario the accelerating nature of our society\ - \ where people don’t have time or the motivation in their lives to sell or auction\ - \ their redundant electronics, one can discover a plethora of discarded materials\ - \ available for salvage. Using this as a starting point, I have produced a portable\ - \ noise instrument from recycled materials, that is primarily an artistic led\ - \ venture, built specifically for live performance.},\n address = {Pittsburgh,\ - \ PA, United States},\n author = {Spowage, Neal},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1177683},\n issn = {2220-4806},\n keywords = {nime09},\n pages\ - \ = {29--30},\n title = {The Ghetto Bastard : A Portable Noise Instrument},\n\ - \ url = {http://www.nime.org/proceedings/2009/nime2009_029.pdf},\n year = {2009}\n\ - }\n" + ID: nime2023_8 + abstract: 'The history of the New Interfaces for Musical Expression (NIME) conference + starts with the first workshop on NIME during the ACM Conference on Human Factors + in Computing Systems in 2001. But research on musical interfaces has a rich ”prehistoric” + phase with a substantial amount of relevant research material published before + 2001. This paper highlights the variety and importance of musical interface-related + research between the mid-1970s and 2000 published in two major computer music + research venues: the International Computer Music Conference and the Computer + Music Journal. It discusses some early examples of research on musical interfaces + published in these venues, then reviews five other sources of related literature + that pre-date the original NIME CHI workshop. It then presents a series of implications + of this research and introduces a collaborative website that compiles many of + these references in one place. This work is meant as a step into a more inclusive + approach to interface design by facilitating the integration of as many relevant + references as possible into future NIME research.' + address: 'Mexico City, Mexico' + articleno: 8 + author: Marcelo Wanderley + bibtex: "@inproceedings{nime2023_8,\n abstract = {The history of the New Interfaces\ + \ for Musical Expression (NIME) conference starts with the first workshop on NIME\ + \ during the ACM Conference on Human Factors in Computing Systems in 2001. But\ + \ research on musical interfaces has a rich ”prehistoric” phase with a substantial\ + \ amount of relevant research material published before 2001. This paper highlights\ + \ the variety and importance of musical interface-related research between the\ + \ mid-1970s and 2000 published in two major computer music research venues: the\ + \ International Computer Music Conference and the Computer Music Journal. It discusses\ + \ some early examples of research on musical interfaces published in these venues,\ + \ then reviews five other sources of related literature that pre-date the original\ + \ NIME CHI workshop. It then presents a series of implications of this research\ + \ and introduces a collaborative website that compiles many of these references\ + \ in one place. This work is meant as a step into a more inclusive approach to\ + \ interface design by facilitating the integration of as many relevant references\ + \ as possible into future NIME research.},\n address = {Mexico City, Mexico},\n\ + \ articleno = {8},\n author = {Marcelo Wanderley},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month\ + \ = {May},\n numpages = {10},\n pages = {60--69},\n title = {Prehistoric NIME:\ + \ Revisiting Research on New Musical Interfaces in the Computer Music Community\ + \ before NIME},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_8.pdf},\n\ + \ year = {2023}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + editor: Miguel Ortiz and Adnan Marquez-Borbon + issn: 2220-4806 + month: May + numpages: 10 + pages: 60--69 + title: 'Prehistoric NIME: Revisiting Research on New Musical Interfaces in the Computer + Music Community before NIME' + track: Papers + url: http://nime.org/proceedings/2023/nime2023_8.pdf + year: 2023 + + +- ENTRYTYPE: inproceedings + ID: nime2023_9 + abstract: "Within the music improvisation and jazz scenes, playing a wrong note\ + \ may be seen as a source of creativity and novelty, where an initially undesired\ + \ factor (the mistaken note) invites the musician to leverage their skills to\ + \ transform it into new musical material. How does this idea, however, translate\ + \ into more experimental scenes like NIME, where control and virtuosity are not\ + \ necessarily the performance's aim? \nMoreover, within NIME communities the addition\ + \ of randomness or constraints to musical instruments is often an intended aesthetic\ + \ decision rather than a source of mistakes. To explore this contrast, we invited\ + \ four NIME practitioners to participate in the Self-Sabotage Workshop, where\ + \ each practitioner had to build their own sabotaging elements for their musical\ + \ instruments and to give a short demonstration with them. We gathered participants'\ + \ impressions of self-sabotating in a focus group, inquiring about control and\ + \ musicality, and also the strategies they developed for coping with the self-sabotaged\ + \ instruments. We discuss the emergent ideas of planned and unplanned sabotaging,\ + \ and we propose a starting point towards the idea of self-sabotaging as a continuous\ + \ design and musical process where designers/musicians try to overcome barriers\ + \ that they impose upon themselves." + address: 'Mexico City, Mexico' + articleno: 9 + author: Teodoro Dannemann and Nick Bryan-Kinns and Andrew McPherson + bibtex: "@inproceedings{nime2023_9,\n abstract = {Within the music improvisation\ + \ and jazz scenes, playing a wrong note may be seen as a source of creativity\ + \ and novelty, where an initially undesired factor (the mistaken note) invites\ + \ the musician to leverage their skills to transform it into new musical material.\ + \ How does this idea, however, translate into more experimental scenes like NIME,\ + \ where control and virtuosity are not necessarily the performance's aim? \nMoreover,\ + \ within NIME communities the addition of randomness or constraints to musical\ + \ instruments is often an intended aesthetic decision rather than a source of\ + \ mistakes. To explore this contrast, we invited four NIME practitioners to participate\ + \ in the Self-Sabotage Workshop, where each practitioner had to build their own\ + \ sabotaging elements for their musical instruments and to give a short demonstration\ + \ with them. We gathered participants' impressions of self-sabotating in a focus\ + \ group, inquiring about control and musicality, and also the strategies they\ + \ developed for coping with the self-sabotaged instruments. We discuss the emergent\ + \ ideas of planned and unplanned sabotaging, and we propose a starting point towards\ + \ the idea of self-sabotaging as a continuous design and musical process where\ + \ designers/musicians try to overcome barriers that they impose upon themselves.},\n\ + \ address = {Mexico City, Mexico},\n articleno = {9},\n author = {Teodoro Dannemann\ + \ and Nick Bryan-Kinns and Andrew McPherson},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n editor\ + \ = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n\ + \ numpages = {9},\n pages = {70--78},\n title = {Self-Sabotage Workshop: a starting\ + \ point to unravel sabotaging of instruments as a design practice},\n track =\ + \ {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_9.pdf},\n year\ + \ = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177683 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: nime09 - pages: 29--30 - title: 'The Ghetto Bastard : A Portable Noise Instrument' - url: http://www.nime.org/proceedings/2009/nime2009_029.pdf - year: 2009 + month: May + numpages: 9 + pages: 70--78 + title: 'Self-Sabotage Workshop: a starting point to unravel sabotaging of instruments + as a design practice' + track: Papers + url: http://nime.org/proceedings/2023/nime2023_9.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Humphrey2009 - abstract: 'Motivated by previous work aimed at developing mathematical models to - describe expressive timing in music, and specifically the final ritardandi, using - measured kinematic data, we further investigate the linkage of locomotion and - timing in music. The natural running behavior of four subjects is measured with - a wearable sensor prototype and analyzed to create normalized tempo curves. The - resulting curves are then used to modulate the final ritard of MIDI scores, which - are also performed by an expert musician. A Turing-inspired listening test is - conducted to observe a human listener''s ability to determine the nature of the - performer. ' - address: 'Pittsburgh, PA, United States' - author: 'Humphrey, Eric and Leider, Colby' - bibtex: "@inproceedings{Humphrey2009,\n abstract = {Motivated by previous work aimed\ - \ at developing mathematical models to describe expressive timing in music, and\ - \ specifically the final ritardandi, using measured kinematic data, we further\ - \ investigate the linkage of locomotion and timing in music. The natural running\ - \ behavior of four subjects is measured with a wearable sensor prototype and analyzed\ - \ to create normalized tempo curves. The resulting curves are then used to modulate\ - \ the final ritard of MIDI scores, which are also performed by an expert musician.\ - \ A Turing-inspired listening test is conducted to observe a human listener's\ - \ ability to determine the nature of the performer. },\n address = {Pittsburgh,\ - \ PA, United States},\n author = {Humphrey, Eric and Leider, Colby},\n booktitle\ + ID: nime2023_10 + abstract: 'We propose a method that uses music jamming as a tool for the design + of musical instruments. Both designers and musicians collaborate in the music + making process for the subsequent development of individual “music performer’s + profiles” which account for four dimensions: (i) movements and embodiment, (ii) + musical preferences, (iii) difficulties, and (iv) capabilities. These profiles + converge into proposed prototypes that transform into final designs after experts + and performers'' examination and feedback. We ground this method in the context + of physically disabled musicians, and we show that the method provides a decolonial + view to disability, as its purpose moves from the classical view of technology + as an aid for allowing disabled communities to access well-established instruments, + towards a new paradigm where technologies are used for the augmentation of expressive + capabilities, the strengthening of social engagement, and the empowerment of music + makers.' + address: 'Mexico City, Mexico' + articleno: 10 + author: Teodoro Dannemann + bibtex: "@inproceedings{nime2023_10,\n abstract = {We propose a method that uses\ + \ music jamming as a tool for the design of musical instruments. Both designers\ + \ and musicians collaborate in the music making process for the subsequent development\ + \ of individual “music performer’s profiles” which account for four dimensions:\ + \ (i) movements and embodiment, (ii) musical preferences, (iii) difficulties,\ + \ and (iv) capabilities. These profiles converge into proposed prototypes that\ + \ transform into final designs after experts and performers' examination and feedback.\ + \ We ground this method in the context of physically disabled musicians, and we\ + \ show that the method provides a decolonial view to disability, as its purpose\ + \ moves from the classical view of technology as an aid for allowing disabled\ + \ communities to access well-established instruments, towards a new paradigm where\ + \ technologies are used for the augmentation of expressive capabilities, the strengthening\ + \ of social engagement, and the empowerment of music makers.},\n address = {Mexico\ + \ City, Mexico},\n articleno = {10},\n author = {Teodoro Dannemann},\n booktitle\ \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1177581},\n issn = {2220-4806},\n keywords\ - \ = {Musical kinematics, expressive tempo, machine music. },\n pages = {31--32},\n\ - \ title = {The Navi Activity Monitor : Toward Using Kinematic Data to Humanize\ - \ Computer Music},\n url = {http://www.nime.org/proceedings/2009/nime2009_031.pdf},\n\ - \ year = {2009}\n}\n" + \ Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n\ + \ month = {May},\n numpages = {7},\n pages = {79--85},\n title = {Music jamming\ + \ as a participatory design method. A case study with disabled musicians},\n track\ + \ = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_10.pdf},\n year\ + \ = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177581 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'Musical kinematics, expressive tempo, machine music. ' - pages: 31--32 - title: 'The Navi Activity Monitor : Toward Using Kinematic Data to Humanize Computer - Music' - url: http://www.nime.org/proceedings/2009/nime2009_031.pdf - year: 2009 + month: May + numpages: 7 + pages: 79--85 + title: Music jamming as a participatory design method. A case study with disabled + musicians + track: Papers + url: http://nime.org/proceedings/2023/nime2023_10.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Muller2009 - abstract: 'Vibetone is a musical input device which was build to explore tactile - feedback in gesture based interaction. It is a prototype aimed to allow the performer - to play both continuously and discrete pitched sounds in the same space. Our primary - focus is on tactile feedback to guide the artist''s movements during his performance. - Thus, also untrained users are enabled to musical expression through bodily actions - and precisely arm movements, guided through tactile feedback signals. ' - address: 'Pittsburgh, PA, United States' - author: 'Müller, Alexander and Essl, Georg' - bibtex: "@inproceedings{Muller2009,\n abstract = {Vibetone is a musical input device\ - \ which was build to explore tactile feedback in gesture based interaction. It\ - \ is a prototype aimed to allow the performer to play both continuously and discrete\ - \ pitched sounds in the same space. Our primary focus is on tactile feedback to\ - \ guide the artist's movements during his performance. Thus, also untrained users\ - \ are enabled to musical expression through bodily actions and precisely arm movements,\ - \ guided through tactile feedback signals. },\n address = {Pittsburgh, PA, United\ - \ States},\n author = {M\\''{u}ller, Alexander and Essl, Georg},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177623},\n issn = {2220-4806},\n keywords = {tactile\ - \ feedback, intuitive interaction, gestural interaction, MIDI controller },\n\ - \ pages = {33--34},\n title = {Utilizing Tactile Feedback to Guide Movements Between\ - \ Sounds},\n url = {http://www.nime.org/proceedings/2009/nime2009_033.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_11 + abstract: 'This paper presents Puara, a framework created to tackle problems commonly + associated with instrument design, immersive environments, and prototyping. We + discuss how exploring Digital Musical Instruments (DMIs) in a collaborative environment + led to generalizing procedures that constitute a starting point to solve technical + challenges when building, maintaining, and performing with instruments. These + challenges guided the framework organization and focus on maintainability, integrability, + and modularity. Puara was employed in self-contained systems using 3 % hard-to-implement DMI + building blocks (network manager, gestural descriptors, Media Processing Unit) + and supporting 3 established DMIs (GuitarAMI, T-Stick, Probatio) and one new instrument + (AMIWrist). We validated Puara with two use cases where parts of the framework + were used. Finally, we accessed the influence of frameworks when exploring predefined + NIMEs without concern about the inner workings, or shifting composition paradigms + between event-based and gesture-based approaches.' + address: 'Mexico City, Mexico' + articleno: 11 + author: Eduardo A. L. Meneses and Thomas Piquet and Jason Noble and Marcelo Wanderley + bibtex: "@inproceedings{nime2023_11,\n abstract = {This paper presents Puara, a\ + \ framework created to tackle problems commonly associated with instrument design,\ + \ immersive environments, and prototyping. We discuss how exploring Digital Musical\ + \ Instruments (DMIs) in a collaborative environment led to generalizing procedures\ + \ that constitute a starting point to solve technical challenges when building,\ + \ maintaining, and performing with instruments. These challenges guided the framework\ + \ organization and focus on maintainability, integrability, and modularity. Puara\ + \ was employed in self-contained systems using 3 % hard-to-implement DMI building\ + \ blocks (network manager, gestural descriptors, Media Processing Unit) and supporting\ + \ 3 established DMIs (GuitarAMI, T-Stick, Probatio) and one new instrument (AMIWrist).\ + \ We validated Puara with two use cases where parts of the framework were used.\ + \ Finally, we accessed the influence of frameworks when exploring predefined NIMEs\ + \ without concern about the inner workings, or shifting composition paradigms\ + \ between event-based and gesture-based approaches.},\n address = {Mexico City,\ + \ Mexico},\n articleno = {11},\n author = {Eduardo A. L. Meneses and Thomas Piquet\ + \ and Jason Noble and Marcelo Wanderley},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz\ + \ and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages\ + \ = {8},\n pages = {86--93},\n title = {The Puara Framework: Hiding complexity\ + \ and modularity for reproducibility and usability in NIMEs},\n track = {Papers},\n\ + \ url = {http://nime.org/proceedings/2023/nime2023_11.pdf},\n year = {2023}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177623 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'tactile feedback, intuitive interaction, gestural interaction, MIDI controller ' - pages: 33--34 - title: Utilizing Tactile Feedback to Guide Movements Between Sounds - url: http://www.nime.org/proceedings/2009/nime2009_033.pdf - year: 2009 + month: May + numpages: 8 + pages: 86--93 + title: 'The Puara Framework: Hiding complexity and modularity for reproducibility + and usability in NIMEs' + track: Papers + url: http://nime.org/proceedings/2023/nime2023_11.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Ferguson2009 - abstract: 'Sonification is generally considered in a statistical data analysis context. - This research discusses the development of an interface for live control of sonification - – for controlling and altering sonifications over the course of their playback. - This is designed primarily with real-time sources in mind, rather than with static - datasets, and is intended as a performative, live data-art creative activity. - The interface enables the performer to use the interface as an instrument for - iterative interpretations and variations of sonifications of multiple datastreams. - Using the interface, the performer can alter the scale, granularity, timbre, hierarchy - of elements, spatialisation, spectral filtering, key/modality, rhythmic distribution - and register ‘on-the-fly’ to both perform data-generated music, and investigate - data in a live exploratory, interactive manner.' - address: 'Pittsburgh, PA, United States' - author: 'Ferguson, Sam and Beilharz, Kirsty' - bibtex: "@inproceedings{Ferguson2009,\n abstract = {Sonification is generally considered\ - \ in a statistical data analysis context. This research discusses the development\ - \ of an interface for live control of sonification – for controlling and altering\ - \ sonifications over the course of their playback. This is designed primarily\ - \ with real-time sources in mind, rather than with static datasets, and is intended\ - \ as a performative, live data-art creative activity. The interface enables the\ - \ performer to use the interface as an instrument for iterative interpretations\ - \ and variations of sonifications of multiple datastreams. Using the interface,\ - \ the performer can alter the scale, granularity, timbre, hierarchy of elements,\ - \ spatialisation, spectral filtering, key/modality, rhythmic distribution and\ - \ register ‘on-the-fly’ to both perform data-generated music, and investigate\ - \ data in a live exploratory, interactive manner.},\n address = {Pittsburgh, PA,\ - \ United States},\n author = {Ferguson, Sam and Beilharz, Kirsty},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1177511},\n issn = {2220-4806},\n keywords\ - \ = {Sonification, Interactive Sonification, Auditory Display. },\n pages = {35--36},\n\ - \ title = {An Interface for Live Interactive Sonification},\n url = {http://www.nime.org/proceedings/2009/nime2009_035.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_12 + abstract: 'Joakinator is a wearable interactive interface that allows users to activate + different media materials, such as sound, music, and video, through body gestures. + The device, designed in the context of music and performing arts, integrates surface + electromyogram, force sensors, and machine learning algorithms with tailored-made + software for sonifying muscle-tone and force. This allows the body to reflect + expressively the content of the media and the architecture of the device. Recently, + we have started to investigate the potential of Joakinator to alter body perception + in the context of Joakinator, a European Research Council Project focused on the + transformations of body perception through the use of interactive sound/haptics + technology. At NIME-2023, we will showcase Joakinator and invite visitors to experience + the device firsthand. Visitors will have the opportunity to try on the device, + observe others using it, and reflect on its capabilities to transform body movement + and perception through the sonification of muscle-tone and force. Overall, Joakinator + is a technology that pushes the boundaries of body-computer interaction and opens + new possibilities for human-computer interaction and expression.' + address: 'Mexico City, Mexico' + articleno: 12 + author: Joaquín R. Díaz Durán and Laia Turmo Vidal and Ana Tajadura-Jiménez + bibtex: "@inproceedings{nime2023_12,\n abstract = {Joakinator is a wearable interactive\ + \ interface that allows users to activate different media materials, such as sound,\ + \ music, and video, through body gestures. The device, designed in the context\ + \ of music and performing arts, integrates surface electromyogram, force sensors,\ + \ and machine learning algorithms with tailored-made software for sonifying muscle-tone\ + \ and force. This allows the body to reflect expressively the content of the media\ + \ and the architecture of the device. Recently, we have started to investigate\ + \ the potential of Joakinator to alter body perception in the context of Joakinator,\ + \ a European Research Council Project focused on the transformations of body perception\ + \ through the use of interactive sound/haptics technology. At NIME-2023, we will\ + \ showcase Joakinator and invite visitors to experience the device firsthand.\ + \ Visitors will have the opportunity to try on the device, observe others using\ + \ it, and reflect on its capabilities to transform body movement and perception\ + \ through the sonification of muscle-tone and force. Overall, Joakinator is a\ + \ technology that pushes the boundaries of body-computer interaction and opens\ + \ new possibilities for human-computer interaction and expression.},\n address\ + \ = {Mexico City, Mexico},\n articleno = {12},\n author = {Joaquín R. Díaz Durán\ + \ and Laia Turmo Vidal and Ana Tajadura-Jiménez},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n editor\ + \ = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n\ + \ numpages = {4},\n pages = {94--97},\n title = {Joakinator: An Interface for\ + \ Transforming Body Movement and Perception through Machine Learning and Sonification\ + \ of Muscle-Tone and Force.},\n track = {Demos},\n url = {http://nime.org/proceedings/2023/nime2023_12.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177511 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'Sonification, Interactive Sonification, Auditory Display. ' - pages: 35--36 - title: An Interface for Live Interactive Sonification - url: http://www.nime.org/proceedings/2009/nime2009_035.pdf - year: 2009 + month: May + numpages: 4 + pages: 94--97 + title: 'Joakinator: An Interface for Transforming Body Movement and Perception through + Machine Learning and Sonification of Muscle-Tone and Force.' + track: Demos + url: http://nime.org/proceedings/2023/nime2023_12.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Reben2009 - abstract: 'In this project we have developed reactive instruments for performance. - Reactive instruments provide feedback for the performer thereby providing a more - dynamic experience. This is achieved through the use of haptics and robotics. - Haptics provide a feedback system to the control surface. Robotics provides a - way to actuate the instruments and their control surfaces. This allows a highly - coordinated "dance" between performer and the instrument. An application for this - idea is presented as a linear slide interface. Reactive interfaces represent a - dynamic way for music to be portrayed in performance. ' - address: 'Pittsburgh, PA, United States' - author: 'Reben, Alexander and Laibowitz, Mat and Paradiso, Joseph A.' - bibtex: "@inproceedings{Reben2009,\n abstract = {In this project we have developed\ - \ reactive instruments for performance. Reactive instruments provide feedback\ - \ for the performer thereby providing a more dynamic experience. This is achieved\ - \ through the use of haptics and robotics. Haptics provide a feedback system to\ - \ the control surface. Robotics provides a way to actuate the instruments and\ - \ their control surfaces. This allows a highly coordinated \"dance\" between performer\ - \ and the instrument. An application for this idea is presented as a linear slide\ - \ interface. Reactive interfaces represent a dynamic way for music to be portrayed\ - \ in performance. },\n address = {Pittsburgh, PA, United States},\n author = {Reben,\ - \ Alexander and Laibowitz, Mat and Paradiso, Joseph A.},\n booktitle = {Proceedings\ + ID: nime2023_13 + abstract: 'The novelty and usefulness of the distributed signal mapping framework + libmapper has been demonstrated in many projects and publications, yet its technical + entry and operation requirements are often too high to be feasible as a mapping + option for less-technical users. This paper focuses on completing key development + tasks to overcome these barriers including improvements to software distribution + and mapping session management. The impact of these changes was evaluated by asking + several artists to design an interactive audiovisual installation using libmapper. + Observations and feedback from the artists throughout their projects let us assess + the impact of the developments on the usability of the framework, suggesting key + development principles for related tools created in research contexts.' + address: 'Mexico City, Mexico' + articleno: 13 + author: Brady Boettcher and Eduardo A. L. Meneses and Christian Frisson and Marcelo + Wanderley and Joseph Malloch + bibtex: "@inproceedings{nime2023_13,\n abstract = {The novelty and usefulness of\ + \ the distributed signal mapping framework libmapper has been demonstrated in\ + \ many projects and publications, yet its technical entry and operation requirements\ + \ are often too high to be feasible as a mapping option for less-technical users.\ + \ This paper focuses on completing key development tasks to overcome these barriers\ + \ including improvements to software distribution and mapping session management.\ + \ The impact of these changes was evaluated by asking several artists to design\ + \ an interactive audiovisual installation using libmapper. Observations and feedback\ + \ from the artists throughout their projects let us assess the impact of the developments\ + \ on the usability of the framework, suggesting key development principles for\ + \ related tools created in research contexts.},\n address = {Mexico City, Mexico},\n\ + \ articleno = {13},\n author = {Brady Boettcher and Eduardo A. L. Meneses and\ + \ Christian Frisson and Marcelo Wanderley and Joseph Malloch},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177663},\n issn = {2220-4806},\n keywords = {haptics,\ - \ robotics, dynamic interfaces },\n pages = {37--38},\n title = {Responsive Music\ - \ Interfaces for Performance},\n url = {http://www.nime.org/proceedings/2009/nime2009_037.pdf},\n\ - \ year = {2009}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1177663 - issn: 2220-4806 - keywords: 'haptics, robotics, dynamic interfaces ' - pages: 37--38 - title: Responsive Music Interfaces for Performance - url: http://www.nime.org/proceedings/2009/nime2009_037.pdf - year: 2009 - - -- ENTRYTYPE: inproceedings - ID: Lai2009 - abstract: 'Hands On Stage, designed from a percussionist''s perspective, is a new - performance interface designed for audiovisual improvisation. It comprises a custom-built - table interface and a performance system programmed in two environments, SuperCollider - 3 and Isadora. This paper traces the interface''s evolution over matters of relevant - technology, concept, construction, system design, and its creative outcomes. ' - address: 'Pittsburgh, PA, United States' - author: 'Lai, Chi-Hsia' - bibtex: "@inproceedings{Lai2009,\n abstract = {Hands On Stage, designed from a percussionist's\ - \ perspective, is a new performance interface designed for audiovisual improvisation.\ - \ It comprises a custom-built table interface and a performance system programmed\ - \ in two environments, SuperCollider 3 and Isadora. This paper traces the interface's\ - \ evolution over matters of relevant technology, concept, construction, system\ - \ design, and its creative outcomes. },\n address = {Pittsburgh, PA, United States},\n\ - \ author = {Lai, Chi-Hsia},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177609},\n\ - \ issn = {2220-4806},\n keywords = {audiovisual, interface design, performance.\ - \ },\n pages = {39--40},\n title = {Hands On Stage : A Sound and Image Performance\ - \ Interface},\n url = {http://www.nime.org/proceedings/2009/nime2009_039.pdf},\n\ - \ year = {2009}\n}\n" + \ editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month\ + \ = {May},\n numpages = {8},\n pages = {98--105},\n title = {Addressing Barriers\ + \ for Entry and Operation of a Distributed Signal Mapping Framework},\n track\ + \ = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_13.pdf},\n year\ + \ = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177609 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'audiovisual, interface design, performance. ' - pages: 39--40 - title: 'Hands On Stage : A Sound and Image Performance Interface' - url: http://www.nime.org/proceedings/2009/nime2009_039.pdf - year: 2009 + month: May + numpages: 8 + pages: 98--105 + title: Addressing Barriers for Entry and Operation of a Distributed Signal Mapping + Framework + track: Papers + url: http://nime.org/proceedings/2023/nime2023_13.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: McDonald2009 - abstract: 'The Vibrobyte is a wireless haptic interface specialized forco-located - musical performance. The hardware is designedaround the open source Arduino platform, - with haptic control data encapsulated in OSC messages, and OSC/hardwarecommunications - handled by Processing. The Vibrobyte wasfeatured at the International Computer - Music Conference2008 (ICMC) in a telematic performance between ensembles in Belfast, - Palo Alto (California, USA), and Troy (NewYork, USA).' - address: 'Pittsburgh, PA, United States' - author: 'McDonald, Kyle and Kouttron, Dane and Bahn, Curtis and Braasch, Jonas and - Oliveros, Pauline' - bibtex: "@inproceedings{McDonald2009,\n abstract = {The Vibrobyte is a wireless\ - \ haptic interface specialized forco-located musical performance. The hardware\ - \ is designedaround the open source Arduino platform, with haptic control data\ - \ encapsulated in OSC messages, and OSC/hardwarecommunications handled by Processing.\ - \ The Vibrobyte wasfeatured at the International Computer Music Conference2008\ - \ (ICMC) in a telematic performance between ensembles in Belfast, Palo Alto (California,\ - \ USA), and Troy (NewYork, USA).},\n address = {Pittsburgh, PA, United States},\n\ - \ author = {McDonald, Kyle and Kouttron, Dane and Bahn, Curtis and Braasch, Jonas\ - \ and Oliveros, Pauline},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177627},\n\ - \ issn = {2220-4806},\n keywords = {haptics,interface,nime09,performance,telematic},\n\ - \ pages = {41--42},\n title = {The Vibrobyte : A Haptic Interface for Co-Located\ - \ Performance},\n url = {http://www.nime.org/proceedings/2009/nime2009_041.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_14 + abstract: 'In this paper, we reflect on the focus of “newness” in NIME research + and practice and argue that there is a missing O (for “Old”) in framing our academic + discourse. A systematic review of the last year’s conference proceedings reveals + that most papers do, indeed, present new instruments, interfaces, or pieces of + technology. Comparably few papers focus on the prolongation of existing NIMEs. + Our meta-analysis identifies four main categories from these papers: (1) reuse, + (2) update, (3) complement, and (4) long-term engagement. We discuss how focusing + more on these four types of NIME development and engagement can be seen as an + approach to increase sustainability.' + address: 'Mexico City, Mexico' + articleno: 14 + author: Raul Masu and Fabio Morreale and Alexander Refsum Jensenius + bibtex: "@inproceedings{nime2023_14,\n abstract = {In this paper, we reflect on\ + \ the focus of “newness” in NIME research and practice and argue that there is\ + \ a missing O (for “Old”) in framing our academic discourse. A systematic review\ + \ of the last year’s conference proceedings reveals that most papers do, indeed,\ + \ present new instruments, interfaces, or pieces of technology. Comparably few\ + \ papers focus on the prolongation of existing NIMEs. Our meta-analysis identifies\ + \ four main categories from these papers: (1) reuse, (2) update, (3) complement,\ + \ and (4) long-term engagement. We discuss how focusing more on these four types\ + \ of NIME development and engagement can be seen as an approach to increase sustainability.},\n\ + \ address = {Mexico City, Mexico},\n articleno = {14},\n author = {Raul Masu and\ + \ Fabio Morreale and Alexander Refsum Jensenius},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n editor\ + \ = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n\ + \ numpages = {10},\n pages = {106--115},\n title = {The O in NIME: Reflecting\ + \ on the Importance of Reusing and Repurposing Old Musical Instruments},\n track\ + \ = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_14.pdf},\n year\ + \ = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177627 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: haptics,interface,nime09,performance,telematic - pages: 41--42 - title: 'The Vibrobyte : A Haptic Interface for Co-Located Performance' - url: http://www.nime.org/proceedings/2009/nime2009_041.pdf - year: 2009 + month: May + numpages: 10 + pages: 106--115 + title: 'The O in NIME: Reflecting on the Importance of Reusing and Repurposing Old + Musical Instruments' + track: Papers + url: http://nime.org/proceedings/2023/nime2023_14.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Wiley2009 - abstract: 'This paper describes a cost-effective, modular, open source framework - for a laser interface design that is open to community development, interaction - and user modification. The following paper highlights ways in which we are implementing - the multi-laser gestural interface in musical, visual, and robotic contexts. ' - address: 'Pittsburgh, PA, United States' - author: 'Wiley, Meason and Kapur, Ajay' - bibtex: "@inproceedings{Wiley2009,\n abstract = {This paper describes a cost-effective,\ - \ modular, open source framework for a laser interface design that is open to\ - \ community development, interaction and user modification. The following paper\ - \ highlights ways in which we are implementing the multi-laser gestural interface\ - \ in musical, visual, and robotic contexts. },\n address = {Pittsburgh, PA, United\ - \ States},\n author = {Wiley, Meason and Kapur, Ajay},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177709},\n issn = {2220-4806},\n keywords = {Lasers,\ - \ photocell sensor, UltraSound, Open Source controller design, digital gamelan,\ - \ digital tanpura },\n pages = {43--44},\n title = {Multi-Laser Gestural Interface\ - \ --- Solutions for Cost-Effective and Open Source Controllers},\n url = {http://www.nime.org/proceedings/2009/nime2009_043.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_15 + abstract: 'This paper introduces Stringesthesia, an interactive and improvised performance + paradigm. Stringesthesia was designed to explore the connection between performer + and audience by using real-time neuroimaging technology that gave the audience + direct access to the performer''s internal mental state and determined the extent + of how the audience could participate with the performer throughout the performance. + Functional near-infrared spectroscopy (fNIRS) technology was used to assess metabolic + activity in a network of brain areas collectively associated with a metric we + call “trust”. The real-time measurement of the performer’s level of trust was + visualized behind the performer and used to dynamically restrict or promote audience + participation: e.g., as the performer’s trust in the audience grew, more participatory + stations for playing drums and selecting the performer’s chords were activated. + Throughout the paper we discuss prior work that heavily influenced our design, + conceptual and methodological issues with using fNIRS technology, and our system + architecture. We then describe an employment of this paradigm with a solo guitar + player.' + address: 'Mexico City, Mexico' + articleno: 15 + author: Torin Hopkins and Emily Doherty and Netta Ofer and Suibi Che-Chuan Weng + and Peter Gyory and Chad Tobin and Leanne Hirshfield and Ellen Yi-Luen Do + bibtex: "@inproceedings{nime2023_15,\n abstract = {This paper introduces Stringesthesia,\ + \ an interactive and improvised performance paradigm. Stringesthesia was designed\ + \ to explore the connection between performer and audience by using real-time\ + \ neuroimaging technology that gave the audience direct access to the performer's\ + \ internal mental state and determined the extent of how the audience could participate\ + \ with the performer throughout the performance. Functional near-infrared spectroscopy\ + \ (fNIRS) technology was used to assess metabolic activity in a network of brain\ + \ areas collectively associated with a metric we call “trust”. The real-time measurement\ + \ of the performer’s level of trust was visualized behind the performer and used\ + \ to dynamically restrict or promote audience participation: e.g., as the performer’s\ + \ trust in the audience grew, more participatory stations for playing drums and\ + \ selecting the performer’s chords were activated. Throughout the paper we discuss\ + \ prior work that heavily influenced our design, conceptual and methodological\ + \ issues with using fNIRS technology, and our system architecture. We then describe\ + \ an employment of this paradigm with a solo guitar player.},\n address = {Mexico\ + \ City, Mexico},\n articleno = {15},\n author = {Torin Hopkins and Emily Doherty\ + \ and Netta Ofer and Suibi Che-Chuan Weng and Peter Gyory and Chad Tobin and Leanne\ + \ Hirshfield and Ellen Yi-Luen Do},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz\ + \ and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages\ + \ = {7},\n pages = {116--122},\n title = {Stringesthesia: Dynamically Shifting\ + \ Musical Agency Between Audience and Performer Based on Trust in an Interactive\ + \ and Improvised Performance},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_15.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177709 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'Lasers, photocell sensor, UltraSound, Open Source controller design, - digital gamelan, digital tanpura ' - pages: 43--44 - title: Multi-Laser Gestural Interface --- Solutions for Cost-Effective and Open - Source Controllers - url: http://www.nime.org/proceedings/2009/nime2009_043.pdf - year: 2009 - + month: May + numpages: 7 + pages: 116--122 + title: 'Stringesthesia: Dynamically Shifting Musical Agency Between Audience and + Performer Based on Trust in an Interactive and Improvised Performance' + track: Papers + url: http://nime.org/proceedings/2023/nime2023_15.pdf + year: 2023 -- ENTRYTYPE: inproceedings - ID: Kanda2009 - abstract: 'We introduce Mims, which is an interactive-multimedia live-performance - system, where pieces rendered by a performer’s voice are translated into floating - objects called voice objects. The voice objects are generated from the performer’s - current position on the screen, and absorbed by another flying object called Mims. - Voice sounds are modulated by the behavior of Mims. Performers can control these - objects and sound effects by using their own gestures. Mims provides performers - and their audiences with expressive visual feedback in terms of sound manipulations - and results.' - address: 'Pittsburgh, PA, United States' - author: 'Kanda, Ryo and Hashida, Mitsuyo and Katayose, Haruhiro' - bibtex: "@inproceedings{Kanda2009,\n abstract = {We introduce Mims, which is an\ - \ interactive-multimedia live-performance system, where pieces rendered by a performer’s\ - \ voice are translated into floating objects called voice objects. The voice objects\ - \ are generated from the performer’s current position on the screen, and absorbed\ - \ by another flying object called Mims. Voice sounds are modulated by the behavior\ - \ of Mims. Performers can control these objects and sound effects by using their\ - \ own gestures. Mims provides performers and their audiences with expressive visual\ - \ feedback in terms of sound manipulations and results.},\n address = {Pittsburgh,\ - \ PA, United States},\n author = {Kanda, Ryo and Hashida, Mitsuyo and Katayose,\ - \ Haruhiro},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177595},\n issn\ - \ = {2220-4806},\n keywords = {Interaction, audience, performer, visualize, sensor,\ - \ physical, gesture. },\n pages = {45--47},\n title = {Mims : Interactive Multimedia\ - \ Live Performance System},\n url = {http://www.nime.org/proceedings/2009/nime2009_045.pdf},\n\ - \ year = {2009}\n}\n" + +- ENTRYTYPE: inproceedings + ID: nime2023_16 + abstract: 'The Serge Modular Archive Instrument (SMAI) is a sample-based computer + emulation of selected patches on the vintage Serge Modular instrument that is + housed at (redacted). Hours of recorded audio created by specified parameter combinations + have been analyzed using audio descriptors and machine learning algorithms in + the FluCoMa toolkit. Sound is controlled via (1) a machine learning dimensionality + reduction plot showing all the recorded samples and/or (2) a skeuomorphic graphical + user interface of the patches used to record the sounds. Flexible MIDI and OSC + control of the software enables custom modulation and performance of this archive + from outside the software. Differing from many software synthesis-based emulations, + the SMAI aims to capture and archive the idiosyncrasies of vintage hardware as + digital audio samples; compare and contrast skeuomorphic and machine learning + enabled modes of exploring vintage sounds; and create a flexible instrument for + creatively performing this archive.' + address: 'Mexico City, Mexico' + articleno: 16 + author: Ted Moore and Jean Brazeau + bibtex: "@inproceedings{nime2023_16,\n abstract = {The Serge Modular Archive Instrument\ + \ (SMAI) is a sample-based computer emulation of selected patches on the vintage\ + \ Serge Modular instrument that is housed at (redacted). Hours of recorded audio\ + \ created by specified parameter combinations have been analyzed using audio descriptors\ + \ and machine learning algorithms in the FluCoMa toolkit. Sound is controlled\ + \ via (1) a machine learning dimensionality reduction plot showing all the recorded\ + \ samples and/or (2) a skeuomorphic graphical user interface of the patches used\ + \ to record the sounds. Flexible MIDI and OSC control of the software enables\ + \ custom modulation and performance of this archive from outside the software.\ + \ Differing from many software synthesis-based emulations, the SMAI aims to capture\ + \ and archive the idiosyncrasies of vintage hardware as digital audio samples;\ + \ compare and contrast skeuomorphic and machine learning enabled modes of exploring\ + \ vintage sounds; and create a flexible instrument for creatively performing this\ + \ archive.},\n address = {Mexico City, Mexico},\n articleno = {16},\n author =\ + \ {Ted Moore and Jean Brazeau},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz\ + \ and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages\ + \ = {5},\n pages = {123--127},\n title = {Serge Modular Archive Instrument (SMAI):\ + \ Bridging Skeuomorphic & Machine Learning Enabled Interfaces},\n track = {Papers},\n\ + \ url = {http://nime.org/proceedings/2023/nime2023_16.pdf},\n year = {2023}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177595 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'Interaction, audience, performer, visualize, sensor, physical, gesture. ' - pages: 45--47 - title: 'Mims : Interactive Multimedia Live Performance System' - url: http://www.nime.org/proceedings/2009/nime2009_045.pdf - year: 2009 + month: May + numpages: 5 + pages: 123--127 + title: 'Serge Modular Archive Instrument (SMAI): Bridging Skeuomorphic & Machine + Learning Enabled Interfaces' + track: Papers + url: http://nime.org/proceedings/2023/nime2023_16.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Goto2009a - abstract: 'This is intended to introduce the system, which combines BodySuit, especially - Powered Suit, and Second Life, as well as its possibilities and its uses in a - musical performance application. The system which we propose contains both a gesture - controller and robots at the same time. In this system, the Data Suit, BodySuit - controls the avatar in Second Life and Second Life controls the exoskeleton, Powered - Suit in real time. These are related with each other in conjunction with Second - Life in Internet. BodySuit doesn''t contain a hand-held controller. A performer, - for example a dancer, wears a suit. Gestures are transformed into electronic signals - by sensors. Powered Suit is another suit that a dancer wears, but gestures are - generated by motors. This is a sort of wearable robot. Second Life is software - that is developed by Linden Lab. It allows creating a virtual world and a virtual - human (avatar) in Internet. Working together with BodySuit, Powered Suit, and - Second Life the idea behind the system is that a human body is augmented by electronic - signals and is reflected in a virtual world in order to be able to perform interactively. ' - address: 'Pittsburgh, PA, United States' - author: 'Goto, Suguru and Powell, Rob' - bibtex: "@inproceedings{Goto2009a,\n abstract = {This is intended to introduce the\ - \ system, which combines BodySuit, especially Powered Suit, and Second Life, as\ - \ well as its possibilities and its uses in a musical performance application.\ - \ The system which we propose contains both a gesture controller and robots at\ - \ the same time. In this system, the Data Suit, BodySuit controls the avatar in\ - \ Second Life and Second Life controls the exoskeleton, Powered Suit in real time.\ - \ These are related with each other in conjunction with Second Life in Internet.\ - \ BodySuit doesn't contain a hand-held controller. A performer, for example a\ - \ dancer, wears a suit. Gestures are transformed into electronic signals by sensors.\ - \ Powered Suit is another suit that a dancer wears, but gestures are generated\ - \ by motors. This is a sort of wearable robot. Second Life is software that is\ - \ developed by Linden Lab. It allows creating a virtual world and a virtual human\ - \ (avatar) in Internet. Working together with BodySuit, Powered Suit, and Second\ - \ Life the idea behind the system is that a human body is augmented by electronic\ - \ signals and is reflected in a virtual world in order to be able to perform interactively.\ - \ },\n address = {Pittsburgh, PA, United States},\n author = {Goto, Suguru and\ - \ Powell, Rob},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177559},\n\ - \ issn = {2220-4806},\n keywords = {artificial intelligence,gesture controller,humanoid\ - \ robot,interaction,internet,nime09,robot},\n pages = {48--49},\n title = {netBody\ - \ --- \"Augmented Body and Virtual Body II\" with the System, BodySuit, Powered\ - \ Suit and Second Life --- Its Introduction of an Application of the System},\n\ - \ url = {http://www.nime.org/proceedings/2009/nime2009_048.pdf},\n year = {2009}\n\ - }\n" + ID: nime2023_17 + abstract: 'We present a study of a freehand musical system to investigate musicians'' + experiences related to performance in augmented reality (AR). Head-mounted mixed + reality computers present opportunities for natural gestural control in three + dimensions, particularly when using hand-tracking in a creative interface. Existing + musical interfaces with head-mounted displays use dedicated input devices that + are not designed specifically for musical gestures and may not support appropriate + interactions. We are yet to see widespread adoption of head-mounted AR musical + instruments. We conducted an empirical study to evaluate musicians'' (N=20) experience + of performing with a freehand musical interface. The results suggest that the + design of freehand musical interaction in the AR space is highly learnable and + explorable, and that such systems can leverage unique aspects of mobility, space + and sound to deliver an engaging and playful musical experience. The mobile musical + experience with a spatial interface design allowed performers to be more bodily + engaged and facilitated gestural exploration for musical creativity. This work + contributes to a more developed understanding of potentials and challenges in + AR-based interface design for musical creativity.' + address: 'Mexico City, Mexico' + articleno: 17 + author: Yichen Wang and Mingze Xi and Matt Adcock and Charles Patrick Martin + bibtex: "@inproceedings{nime2023_17,\n abstract = {We present a study of a freehand\ + \ musical system to investigate musicians' experiences related to performance\ + \ in augmented reality (AR). Head-mounted mixed reality computers present opportunities\ + \ for natural gestural control in three dimensions, particularly when using hand-tracking\ + \ in a creative interface. Existing musical interfaces with head-mounted displays\ + \ use dedicated input devices that are not designed specifically for musical gestures\ + \ and may not support appropriate interactions. We are yet to see widespread adoption\ + \ of head-mounted AR musical instruments. We conducted an empirical study to evaluate\ + \ musicians' (N=20) experience of performing with a freehand musical interface.\ + \ The results suggest that the design of freehand musical interaction in the AR\ + \ space is highly learnable and explorable, and that such systems can leverage\ + \ unique aspects of mobility, space and sound to deliver an engaging and playful\ + \ musical experience. The mobile musical experience with a spatial interface design\ + \ allowed performers to be more bodily engaged and facilitated gestural exploration\ + \ for musical creativity. This work contributes to a more developed understanding\ + \ of potentials and challenges in AR-based interface design for musical creativity.},\n\ + \ address = {Mexico City, Mexico},\n articleno = {17},\n author = {Yichen Wang\ + \ and Mingze Xi and Matt Adcock and Charles Patrick Martin},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month\ + \ = {May},\n numpages = {6},\n pages = {128--133},\n title = {Mobility, Space\ + \ and Sound Activate Expressive Musical Experience in Augmented Reality},\n track\ + \ = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_17.pdf},\n year\ + \ = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177559 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'artificial intelligence,gesture controller,humanoid robot,interaction,internet,nime09,robot' - pages: 48--49 - title: 'netBody --- "Augmented Body and Virtual Body II" with the System, BodySuit, - Powered Suit and Second Life --- Its Introduction of an Application of the System' - url: http://www.nime.org/proceedings/2009/nime2009_048.pdf - year: 2009 + month: May + numpages: 6 + pages: 128--133 + title: 'Mobility, Space and Sound Activate Expressive Musical Experience in Augmented + Reality' + track: Papers + url: http://nime.org/proceedings/2023/nime2023_17.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Ogawa2009 - abstract: 'We developed a system called Life Game Orchestra that generates music - by translating cellular patterns of Conway''s Game of Life into musical scales. - A performer can compose music by controlling varying cell patterns and sounds - with visual and auditory fun. A performer assigns the elements of tone to two-dimensional - cell patterns in the matrix of the Game of Life. Our system searches defined cell - patterns in the varying matrix dynamically. If the patterns are matched, corresponding - tones are generated. A performer can make cells in the matrix by moving in front - of a camera and interactively influencing the generation of music. The progress - of the Game of Life is controlled with a clock defined by the performer to configure - the groove of the music. By running multiple matrices with different pattern mapping, - clock timing, and instruments, we can perform an ensemble. The Life Game Orchestra - is a fusion system of the design of a performer and the emergence of cellular - automata as a complex system. ' - address: 'Pittsburgh, PA, United States' - author: 'Ogawa, Keisuke and Kuhara, Yasuo' - bibtex: "@inproceedings{Ogawa2009,\n abstract = {We developed a system called Life\ - \ Game Orchestra that generates music by translating cellular patterns of Conway's\ - \ Game of Life into musical scales. A performer can compose music by controlling\ - \ varying cell patterns and sounds with visual and auditory fun. A performer assigns\ - \ the elements of tone to two-dimensional cell patterns in the matrix of the Game\ - \ of Life. Our system searches defined cell patterns in the varying matrix dynamically.\ - \ If the patterns are matched, corresponding tones are generated. A performer\ - \ can make cells in the matrix by moving in front of a camera and interactively\ - \ influencing the generation of music. The progress of the Game of Life is controlled\ - \ with a clock defined by the performer to configure the groove of the music.\ - \ By running multiple matrices with different pattern mapping, clock timing, and\ - \ instruments, we can perform an ensemble. The Life Game Orchestra is a fusion\ - \ system of the design of a performer and the emergence of cellular automata as\ - \ a complex system. },\n address = {Pittsburgh, PA, United States},\n author =\ - \ {Ogawa, Keisuke and Kuhara, Yasuo},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177647},\n\ - \ issn = {2220-4806},\n keywords = {Conway's Game of Life, Cellular automata,\ - \ Cell pattern, scale, Interactive composition, performance. },\n pages = {50--51},\n\ - \ title = {Life Game Orchestra as an Interactive Music Composition System Translating\ - \ Cellular Patterns of Automata into Musical Scales},\n url = {http://www.nime.org/proceedings/2009/nime2009_050.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_18 + abstract: "Engineering communities that feed the current proliferation of artificial\ + \ intelligence (AI) have historically been slow to recognise the spectrum of societal\ + \ impacts of their work. Frequent controversies around AI applications in creative\ + \ domains demonstrate insufficient consideration of ethical predicaments, but\ + \ the abstract principles of current AI and data ethics documents provide little\ + \ practical guidance.\nPragmatic methods are urgently needed to support developers\ + \ in ethical reflection of their work on creative-AI tools. \n\nIn the wider context\ + \ of value sensitive, people-oriented design, we present an analytical method\ + \ that implements an ethically informed and power-sensitive stakeholder identification\ + \ and mapping: Ethically Aligned Stakeholder Elicitation (EASE). As a case study,\ + \ we test our method in workshops with six research groups that develop AI in\ + \ musical contexts. Our results demonstrate that EASE supports\ncritical self-reflection\ + \ of the research and outreach practices among developers, discloses power relations\ + \ and value tensions in the development processes, and foregrounds opportunities\ + \ for stakeholder engagement. This can guide developers and the wider NIME community\ + \ towards ethically aligned research and development of creative-AI." + address: 'Mexico City, Mexico' + articleno: 18 + author: Anna-Kaisa Kaila and Petra Jääskeläinen and Andre Holzapfel + bibtex: "@inproceedings{nime2023_18,\n abstract = {Engineering communities that\ + \ feed the current proliferation of artificial intelligence (AI) have historically\ + \ been slow to recognise the spectrum of societal impacts of their work. Frequent\ + \ controversies around AI applications in creative domains demonstrate insufficient\ + \ consideration of ethical predicaments, but the abstract principles of current\ + \ AI and data ethics documents provide little practical guidance.\nPragmatic methods\ + \ are urgently needed to support developers in ethical reflection of their work\ + \ on creative-AI tools. \n\nIn the wider context of value sensitive, people-oriented\ + \ design, we present an analytical method that implements an ethically informed\ + \ and power-sensitive stakeholder identification and mapping: Ethically Aligned\ + \ Stakeholder Elicitation (EASE). As a case study, we test our method in workshops\ + \ with six research groups that develop AI in musical contexts. Our results demonstrate\ + \ that EASE supports\ncritical self-reflection of the research and outreach practices\ + \ among developers, discloses power relations and value tensions in the development\ + \ processes, and foregrounds opportunities for stakeholder engagement. This can\ + \ guide developers and the wider NIME community towards ethically aligned research\ + \ and development of creative-AI.},\n address = {Mexico City, Mexico},\n articleno\ + \ = {18},\n author = {Anna-Kaisa Kaila and Petra Jääskeläinen and Andre Holzapfel},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn\ + \ = {2220-4806},\n month = {May},\n numpages = {8},\n pages = {134--141},\n title\ + \ = {Ethically Aligned Stakeholder Elicitation (EASE): Case Study in Music-AI},\n\ + \ track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_18.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177647 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'Conway''s Game of Life, Cellular automata, Cell pattern, scale, Interactive - composition, performance. ' - pages: 50--51 - title: Life Game Orchestra as an Interactive Music Composition System Translating - Cellular Patterns of Automata into Musical Scales - url: http://www.nime.org/proceedings/2009/nime2009_050.pdf - year: 2009 + month: May + numpages: 8 + pages: 134--141 + title: 'Ethically Aligned Stakeholder Elicitation (EASE): Case Study in Music-AI' + track: Papers + url: http://nime.org/proceedings/2023/nime2023_18.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Toenjes2009 - abstract: 'This article describes the implications of design and materials of computer - controllers used in the context of interactive dance performance. Size, shape, - and layout all influence audience perception of the performer, and materials imply - context for further interpretation of the interactive performance work. It describes - the construction of the "Control/Recorder" and the "VideoLyre", two custom computer - control surfaces made for Leonardo''s Chimes, a work by Toenjes, Marchant and - Smith, and how these controllers contribute to theatrical aesthetic intent. ' - address: 'Pittsburgh, PA, United States' - author: 'Toenjes, John' - bibtex: "@inproceedings{Toenjes2009,\n abstract = {This article describes the implications\ - \ of design and materials of computer controllers used in the context of interactive\ - \ dance performance. Size, shape, and layout all influence audience perception\ - \ of the performer, and materials imply context for further interpretation of\ - \ the interactive performance work. It describes the construction of the \"Control/Recorder\"\ - \ and the \"VideoLyre\", two custom computer control surfaces made for Leonardo's\ - \ Chimes, a work by Toenjes, Marchant and Smith, and how these controllers contribute\ - \ to theatrical aesthetic intent. },\n address = {Pittsburgh, PA, United States},\n\ - \ author = {Toenjes, John},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177693},\n\ - \ issn = {2220-4806},\n keywords = {control surface, interface, tactile, natural,\ - \ organic, interactive dance. },\n pages = {52--53},\n title = {Natural Materials\ - \ on Stage : Custom Controllers for Aesthetic Effect},\n url = {http://www.nime.org/proceedings/2009/nime2009_052.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_19 + abstract: 'The state-of-the-art recognition of continuous gestures for control of + musical sound by means of machine learning has two notable constraints. The first + is that the system needs to be trained with individual example gestures, the starting + and ending points of which need to be well defined. The second constraint is time + required for the system to recognise that a gesture has occurred, which may prevent + the quick action that musical performance typically requires. This article describes + how a method for unsupervised segmentation of gestures, may be used for delayed + gestural control of a musical system. The system allows a user to perform without + explicitly indicating the starting and ending of gestures in order to train the + machine learning algorithm. To demonstrate the feasibility of the system, an apparatus + for control of musical sound was devised incorporating the time required by the + process into the interaction paradigm. The unsupervised automatic segmentation + method and the concept of delayed control are further proposed to be exploited + in the design and implementation of systems that facilitate seamless human-machine + musical interaction without the need for quick response time, for example when + using broad motion of the human body.' + address: 'Mexico City, Mexico' + articleno: 19 + author: Juan Ignacio Mendoza Garay + bibtex: "@inproceedings{nime2023_19,\n abstract = {The state-of-the-art recognition\ + \ of continuous gestures for control of musical sound by means of machine learning\ + \ has two notable constraints. The first is that the system needs to be trained\ + \ with individual example gestures, the starting and ending points of which need\ + \ to be well defined. The second constraint is time required for the system to\ + \ recognise that a gesture has occurred, which may prevent the quick action that\ + \ musical performance typically requires. This article describes how a method\ + \ for unsupervised segmentation of gestures, may be used for delayed gestural\ + \ control of a musical system. The system allows a user to perform without explicitly\ + \ indicating the starting and ending of gestures in order to train the machine\ + \ learning algorithm. To demonstrate the feasibility of the system, an apparatus\ + \ for control of musical sound was devised incorporating the time required by\ + \ the process into the interaction paradigm. The unsupervised automatic segmentation\ + \ method and the concept of delayed control are further proposed to be exploited\ + \ in the design and implementation of systems that facilitate seamless human-machine\ + \ musical interaction without the need for quick response time, for example when\ + \ using broad motion of the human body.},\n address = {Mexico City, Mexico},\n\ + \ articleno = {19},\n author = {Juan Ignacio Mendoza Garay},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month\ + \ = {May},\n numpages = {5},\n pages = {142--146},\n title = {The Rearranger Ball:\ + \ Delayed Gestural Control of Musical Sound using Online Unsupervised Temporal\ + \ Segmentation},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_19.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177693 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'control surface, interface, tactile, natural, organic, interactive dance. ' - pages: 52--53 - title: 'Natural Materials on Stage : Custom Controllers for Aesthetic Effect' - url: http://www.nime.org/proceedings/2009/nime2009_052.pdf - year: 2009 + month: May + numpages: 5 + pages: 142--146 + title: 'The Rearranger Ball: Delayed Gestural Control of Musical Sound using Online + Unsupervised Temporal Segmentation' + track: Papers + url: http://nime.org/proceedings/2023/nime2023_19.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Keith2009 - abstract: 'Deviate generates multiple streams of melodic and rhythmic output in - real-time, according to user-specified control parameters. This performance system - has been implemented using Max 5 [1] within the genre of popular contemporary - electronic music, incorporating techno, IDM, and related forms. The aim of this - project is not musical style synthesis, but to construct an environment in which - a range of creative and musical goals may be achieved. A key aspect is control - over generative processes, as well as consistent yet varied output. An approach - is described which frees the user from determining note-level output while allowing - control to be maintained over larger structural details, focusing specifically - on the melodic aspect of this system. Audio examples are located online at http://www.cetenbaath.com/cb/about-deviate/. ' - address: 'Pittsburgh, PA, United States' - author: 'Keith, Sarah' - bibtex: "@inproceedings{Keith2009,\n abstract = {Deviate generates multiple streams\ - \ of melodic and rhythmic output in real-time, according to user-specified control\ - \ parameters. This performance system has been implemented using Max 5 [1] within\ - \ the genre of popular contemporary electronic music, incorporating techno, IDM,\ - \ and related forms. The aim of this project is not musical style synthesis, but\ - \ to construct an environment in which a range of creative and musical goals may\ - \ be achieved. A key aspect is control over generative processes, as well as consistent\ - \ yet varied output. An approach is described which frees the user from determining\ - \ note-level output while allowing control to be maintained over larger structural\ - \ details, focusing specifically on the melodic aspect of this system. Audio examples\ - \ are located online at http://www.cetenbaath.com/cb/about-deviate/. },\n address\ - \ = {Pittsburgh, PA, United States},\n author = {Keith, Sarah},\n booktitle =\ + ID: nime2023_20 + abstract: 'This paper introduces a performative ecosystem created with the aim of + promoting a joint expression between a live coder and an instrumentalist. The + live coding environment is based on TidalCycles, controlling a sample machine + implemented in SuperCollider. The instrumentalist can record short samples of + his/her playing in different buffers, which the live coder can then process. The + ecosystem was intensively used by the first and the second author of this paper + (respectively live coder and violinist) to develop a performance. At the end of + this paper, we provide a number of reflections on the entanglement of the different + roles and agencies that emerged during the rehearsals.' + address: 'Mexico City, Mexico' + articleno: 20 + author: Francesco Dal Rì and Francesca Zanghellini and Raul Masu + bibtex: "@inproceedings{nime2023_20,\n abstract = {This paper introduces a performative\ + \ ecosystem created with the aim of promoting a joint expression between a live\ + \ coder and an instrumentalist. The live coding environment is based on TidalCycles,\ + \ controlling a sample machine implemented in SuperCollider. The instrumentalist\ + \ can record short samples of his/her playing in different buffers, which the\ + \ live coder can then process. The ecosystem was intensively used by the first\ + \ and the second author of this paper (respectively live coder and violinist)\ + \ to develop a performance. At the end of this paper, we provide a number of reflections\ + \ on the entanglement of the different roles and agencies that emerged during\ + \ the rehearsals.},\n address = {Mexico City, Mexico},\n articleno = {20},\n author\ + \ = {Francesco Dal Rì and Francesca Zanghellini and Raul Masu},\n booktitle =\ \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177599},\n issn = {2220-4806},\n keywords = {generative,\ - \ performance, laptop, popular music },\n pages = {54--55},\n title = {Controlling\ - \ Live Generative Electronic Music with Deviate},\n url = {http://www.nime.org/proceedings/2009/nime2009_054.pdf},\n\ - \ year = {2009}\n}\n" + \ editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month\ + \ = {May},\n numpages = {8},\n pages = {147--154},\n title = {Sharing the Same\ + \ Sound: Reflecting on Interactions between a Live Coder and a Violinist},\n track\ + \ = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_20.pdf},\n year\ + \ = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177599 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'generative, performance, laptop, popular music ' - pages: 54--55 - title: Controlling Live Generative Electronic Music with Deviate - url: http://www.nime.org/proceedings/2009/nime2009_054.pdf - year: 2009 + month: May + numpages: 8 + pages: 147--154 + title: 'Sharing the Same Sound: Reflecting on Interactions between a Live Coder + and a Violinist' + track: Papers + url: http://nime.org/proceedings/2023/nime2023_20.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Dolphin2009 - abstract: 'SpiralSet is a sound toy incorporating game enginesoftware used in conjunction - with a spectral synthesissound engine constructed in Max/MSP/Jitter. SpiralSetwas - presented as an interactive installation piece at theSonic Arts Expo 2008, in - Brighton, UK. A custom madesensor-based interface is used for control of the system.The - user interactions are designed to be quickly accessiblein an installation context, - yet allowing the potential forsonic depth and variation.' - address: 'Pittsburgh, PA, United States' - author: 'Dolphin, Andy' - bibtex: "@inproceedings{Dolphin2009,\n abstract = {SpiralSet is a sound toy incorporating\ - \ game enginesoftware used in conjunction with a spectral synthesissound engine\ - \ constructed in Max/MSP/Jitter. SpiralSetwas presented as an interactive installation\ - \ piece at theSonic Arts Expo 2008, in Brighton, UK. A custom madesensor-based\ - \ interface is used for control of the system.The user interactions are designed\ - \ to be quickly accessiblein an installation context, yet allowing the potential\ - \ forsonic depth and variation.},\n address = {Pittsburgh, PA, United States},\n\ - \ author = {Dolphin, Andy},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177497},\n\ - \ issn = {2220-4806},\n keywords = {Sound Toys, Game Engines, Animated Interfaces,\ - \ Spectral Synthesis, Open Work, Max/MSP. },\n pages = {56--57},\n title = {SpiralSet\ - \ : A Sound Toy Utilizing Game Engine Technologies},\n url = {http://www.nime.org/proceedings/2009/nime2009_056.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_21 + abstract: 'In this paper, the authors describe working with and on the T-Tree, a + device that integrates multiple instances of a gestural controller known as the + T-Stick. The T-Tree is used in two public performance contexts; the results of + those performances are summarized, potential improvements to the design of the + hardware and software are introduced, and issues are identified. Improvements + in the T-Tree from the first version are also discussed. Finally, the authors + present future design improvements for the T-Tree 2.0.' + address: 'Mexico City, Mexico' + articleno: 21 + author: Paul Buser and Kasey LV Pocius and Linnea Kirby and Marcelo Wanderley + bibtex: "@inproceedings{nime2023_21,\n abstract = {In this paper, the authors describe\ + \ working with and on the T-Tree, a device that integrates multiple instances\ + \ of a gestural controller known as the T-Stick. The T-Tree is used in two public\ + \ performance contexts; the results of those performances are summarized, potential\ + \ improvements to the design of the hardware and software are introduced, and\ + \ issues are identified. Improvements in the T-Tree from the first version are\ + \ also discussed. Finally, the authors present future design improvements for\ + \ the T-Tree 2.0.},\n address = {Mexico City, Mexico},\n articleno = {21},\n author\ + \ = {Paul Buser and Kasey LV Pocius and Linnea Kirby and Marcelo Wanderley},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn\ + \ = {2220-4806},\n month = {May},\n numpages = {5},\n pages = {155--159},\n title\ + \ = {Towards the T-Tree 2.0: Lessons Learned From Performance With a Novel DMI\ + \ and Instrument Hub},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_21.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177497 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'Sound Toys, Game Engines, Animated Interfaces, Spectral Synthesis, Open - Work, Max/MSP. ' - pages: 56--57 - title: 'SpiralSet : A Sound Toy Utilizing Game Engine Technologies' - url: http://www.nime.org/proceedings/2009/nime2009_056.pdf - year: 2009 + month: May + numpages: 5 + pages: 155--159 + title: 'Towards the T-Tree 2.0: Lessons Learned From Performance With a Novel DMI + and Instrument Hub' + track: Papers + url: http://nime.org/proceedings/2023/nime2023_21.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Gao2009 - abstract: 'This paper explores a rapidly developed, new musical interface involving - a touch-screen, 32 pressure sensitive button pads, infrared sensor, 8 knobs and - cross-fader. We provide a versatile platform for computer-based music performance - and production using a human computer interface that has strong visual and tactile - feedback as well as robust software that exploits the strengths of each individual - system component. ' - address: 'Pittsburgh, PA, United States' - author: 'Gao, Mingfei and Hanson, Craig' - bibtex: "@inproceedings{Gao2009,\n abstract = {This paper explores a rapidly developed,\ - \ new musical interface involving a touch-screen, 32 pressure sensitive button\ - \ pads, infrared sensor, 8 knobs and cross-fader. We provide a versatile platform\ - \ for computer-based music performance and production using a human computer interface\ - \ that has strong visual and tactile feedback as well as robust software that\ - \ exploits the strengths of each individual system component. },\n address = {Pittsburgh,\ - \ PA, United States},\n author = {Gao, Mingfei and Hanson, Craig},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1177547},\n issn = {2220-4806},\n keywords\ - \ = {live performance interface,lumi,nime09,pressure},\n pages = {58--59},\n title\ - \ = {LUMI : Live Performance Paradigms Utilizing Software Integrated Touch Screen\ - \ and Pressure Sensitive Button Matrix},\n url = {http://www.nime.org/proceedings/2009/nime2009_058.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_22 + abstract: "Deploying deep learning models on embedded devices is an arduous task:\ + \ oftentimes, there exist no platform-specific instructions, and compilation times\ + \ can be considerably large due to the limited computational resources available\ + \ on-device. Moreover, many music-making applications demand real-time inference.\ + \ Embedded hardware platforms for audio, such as Bela, offer an entry point for\ + \ beginners into physical audio computing; however, the need for cross-compilation\ + \ environments and low-level software development tools for deploying embedded\ + \ deep learning models imposes high entry barriers on non-expert users.\n\nWe\ + \ present a pipeline for deploying neural networks in the Bela embedded hardware\ + \ platform. In our pipeline, we include a tool to record a multichannel dataset\ + \ of sensor signals. Additionally, we provide a dockerised cross-compilation environment\ + \ for faster compilation. With this pipeline, we aim to provide a template for\ + \ programmers and makers to prototype and experiment with neural networks for\ + \ real-time embedded musical applications." + address: 'Mexico City, Mexico' + articleno: 22 + author: Teresa Pelinski and Rodrigo Diaz and Adan L. Benito Temprano and Andrew + McPherson + bibtex: "@inproceedings{nime2023_22,\n abstract = {Deploying deep learning models\ + \ on embedded devices is an arduous task: oftentimes, there exist no platform-specific\ + \ instructions, and compilation times can be considerably large due to the limited\ + \ computational resources available on-device. Moreover, many music-making applications\ + \ demand real-time inference. Embedded hardware platforms for audio, such as Bela,\ + \ offer an entry point for beginners into physical audio computing; however, the\ + \ need for cross-compilation environments and low-level software development tools\ + \ for deploying embedded deep learning models imposes high entry barriers on non-expert\ + \ users.\n\nWe present a pipeline for deploying neural networks in the Bela embedded\ + \ hardware platform. In our pipeline, we include a tool to record a multichannel\ + \ dataset of sensor signals. Additionally, we provide a dockerised cross-compilation\ + \ environment for faster compilation. With this pipeline, we aim to provide a\ + \ template for programmers and makers to prototype and experiment with neural\ + \ networks for real-time embedded musical applications.},\n address = {Mexico\ + \ City, Mexico},\n articleno = {22},\n author = {Teresa Pelinski and Rodrigo Diaz\ + \ and Adan L. Benito Temprano and Andrew McPherson},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month\ + \ = {May},\n numpages = {7},\n pages = {160--166},\n title = {Pipeline for recording\ + \ datasets and running neural networks on the Bela embedded hardware platform},\n\ + \ track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_22.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177547 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'live performance interface,lumi,nime09,pressure' - pages: 58--59 - title: 'LUMI : Live Performance Paradigms Utilizing Software Integrated Touch Screen - and Pressure Sensitive Button Matrix' - url: http://www.nime.org/proceedings/2009/nime2009_058.pdf - year: 2009 + month: May + numpages: 7 + pages: 160--166 + title: Pipeline for recording datasets and running neural networks on the Bela embedded + hardware platform + track: Papers + url: http://nime.org/proceedings/2023/nime2023_22.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Gillian2009 - abstract: 'This paper presents the SARC EyesWeb Catalog (SEC), agroup of blocks - designed for real-time gesture recognitionthat have been developed for the open - source program EyesWeb. We describe how the recognition of real-time bodymovements - can be used for musician-computer-interaction.' - address: 'Pittsburgh, PA, United States' - author: 'Gillian, Nicholas and Knapp, Benjamin and O''Modhrain, Sile' - bibtex: "@inproceedings{Gillian2009,\n abstract = {This paper presents the SARC\ - \ EyesWeb Catalog (SEC), agroup of blocks designed for real-time gesture recognitionthat\ - \ have been developed for the open source program EyesWeb. We describe how the\ - \ recognition of real-time bodymovements can be used for musician-computer-interaction.},\n\ - \ address = {Pittsburgh, PA, United States},\n author = {Gillian, Nicholas and\ - \ Knapp, Benjamin and O'Modhrain, Sile},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177551},\n\ - \ issn = {2220-4806},\n keywords = {SARC EyesWeb Catalog, gesture recognition\ - \ },\n pages = {60--61},\n title = {The {SAR}C EyesWeb Catalog : A Pattern Recognition\ - \ Toolbox for Musician-Computer Interaction},\n url = {http://www.nime.org/proceedings/2009/nime2009_060.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_23 + abstract: 'The bandoneon is a free-reed instrument of great cultural value that + is currently struggling to ensure its conservation as heritage, mainly due to + its complex constitution, the lack of sufficient manufacturers to satisfy the + demand, and the high sales prices that this entails. Our research group has been + working on the task of revitalizing the instrument from a modern perspective, + carrying out musical and scientific research for the creation of an accessible + electronic bandoneon. As the next step in this endeavor, we present a method for + synthesizing the bandoneon sound using multiple wavetable interpolation, and parameter + mappings based on acoustic measurements. We discuss a method for capturing and + selecting the wavetables, the implementation on an embedded platform (Bela Mini), + and the trade-offs between realistic sound and computational efficiency. The synthesizer + runs in real-time and has a polyphony of approximately 12 voices, allowing for + an autonomously sounding electronic instrument.' + address: 'Mexico City, Mexico' + articleno: 23 + author: Juan M Ramos and Pablo Riera and Esteban Calcagno + bibtex: "@inproceedings{nime2023_23,\n abstract = {The bandoneon is a free-reed\ + \ instrument of great cultural value that is currently struggling to ensure its\ + \ conservation as heritage, mainly due to its complex constitution, the lack of\ + \ sufficient manufacturers to satisfy the demand, and the high sales prices that\ + \ this entails. Our research group has been working on the task of revitalizing\ + \ the instrument from a modern perspective, carrying out musical and scientific\ + \ research for the creation of an accessible electronic bandoneon. As the next\ + \ step in this endeavor, we present a method for synthesizing the bandoneon sound\ + \ using multiple wavetable interpolation, and parameter mappings based on acoustic\ + \ measurements. We discuss a method for capturing and selecting the wavetables,\ + \ the implementation on an embedded platform (Bela Mini), and the trade-offs between\ + \ realistic sound and computational efficiency. The synthesizer runs in real-time\ + \ and has a polyphony of approximately 12 voices, allowing for an autonomously\ + \ sounding electronic instrument.},\n address = {Mexico City, Mexico},\n articleno\ + \ = {23},\n author = {Juan M Ramos and Pablo Riera and Esteban Calcagno},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n\ + \ month = {May},\n numpages = {7},\n pages = {167--173},\n title = {An embedded\ + \ wavetable synthesizer for the electronic bandoneon with parameter mappings based\ + \ on acoustical measurements},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_23.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177551 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'SARC EyesWeb Catalog, gesture recognition ' - pages: 60--61 - title: 'The SARC EyesWeb Catalog : A Pattern Recognition Toolbox for Musician-Computer - Interaction' - url: http://www.nime.org/proceedings/2009/nime2009_060.pdf - year: 2009 + month: May + numpages: 7 + pages: 167--173 + title: An embedded wavetable synthesizer for the electronic bandoneon with parameter + mappings based on acoustical measurements + track: Papers + url: http://nime.org/proceedings/2023/nime2023_23.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Nishino2009 - abstract: 'We describe a new method for 2D fiducial tracking. We use region adjacency - information together with angles between regions to encode IDs inside fiducials, - whereas previous research by Kaltenbrunner and Bencina utilize region adjacency - tree. Our method supports a wide ID range and is fast enough to accommodate real-time - video. It is also very robust against false positive detection. ' - address: 'Pittsburgh, PA, United States' - author: 'Nishino, Hiroki' - bibtex: "@inproceedings{Nishino2009,\n abstract = {We describe a new method for\ - \ 2D fiducial tracking. We use region adjacency information together with angles\ - \ between regions to encode IDs inside fiducials, whereas previous research by\ - \ Kaltenbrunner and Bencina utilize region adjacency tree. Our method supports\ - \ a wide ID range and is fast enough to accommodate real-time video. It is also\ - \ very robust against false positive detection. },\n address = {Pittsburgh, PA,\ - \ United States},\n author = {Nishino, Hiroki},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1177643},\n issn = {2220-4806},\n keywords = {fiducial tracking,\ - \ computer vision, tangible user interface, interaction techniques. },\n pages\ - \ = {62--63},\n title = {A {2D} Fiducial Tracking Method based on Topological\ - \ Region Adjacency and Angle Information},\n url = {http://www.nime.org/proceedings/2009/nime2009_062.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_24 + abstract: 'This paper explores the concept of co-creativity in a performance for + feedback-augmented bass clarinet. The bass clarinet is augmented using a loudspeaker + placed on the bell and a supercardiod microphone placed inside the instrument''s + body, allowing for the generation of feedback that is subsequently processed by + a computational system to create new sound material. This feedback loop creates + a symbiotic relationship between the performer and the electronics, resulting + in the co-creation of the final piece, with the performer and the electronics + influencing each other. The result is a unique and ever-evolving musical experience + that poses interesting challenges to the traditional instrument--electronics and + composer--opera relationship. This paper reports on both the hardware and software + augmentation of the bass clarinet, and presents "WYPYM - Were you a part of your + mother?", a piece written especially for this augmented instrument and its feedback + system.' + address: 'Mexico City, Mexico' + articleno: 24 + author: Claudio Panariello and Chiara Percivati + bibtex: "@inproceedings{nime2023_24,\n abstract = {This paper explores the concept\ + \ of co-creativity in a performance for feedback-augmented bass clarinet. The\ + \ bass clarinet is augmented using a loudspeaker placed on the bell and a supercardiod\ + \ microphone placed inside the instrument's body, allowing for the generation\ + \ of feedback that is subsequently processed by a computational system to create\ + \ new sound material. This feedback loop creates a symbiotic relationship between\ + \ the performer and the electronics, resulting in the co-creation of the final\ + \ piece, with the performer and the electronics influencing each other. The result\ + \ is a unique and ever-evolving musical experience that poses interesting challenges\ + \ to the traditional instrument--electronics and composer--opera relationship.\ + \ This paper reports on both the hardware and software augmentation of the bass\ + \ clarinet, and presents \"WYPYM - Were you a part of your mother?\", a piece\ + \ written especially for this augmented instrument and its feedback system.},\n\ + \ address = {Mexico City, Mexico},\n articleno = {24},\n author = {Claudio Panariello\ + \ and Chiara Percivati},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan\ + \ Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages = {6},\n\ + \ pages = {174--179},\n title = {“WYPYM”: A Study for Feedback-Augmented Bass\ + \ Clarinet},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_24.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177643 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'fiducial tracking, computer vision, tangible user interface, interaction - techniques. ' - pages: 62--63 - title: A 2D Fiducial Tracking Method based on Topological Region Adjacency and Angle - Information - url: http://www.nime.org/proceedings/2009/nime2009_062.pdf - year: 2009 + month: May + numpages: 6 + pages: 174--179 + title: '“WYPYM”: A Study for Feedback-Augmented Bass Clarinet' + track: Papers + url: http://nime.org/proceedings/2023/nime2023_24.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Solis2009 - abstract: 'During several decades, the research at Waseda University has been focused - on developing anthropomorphic robots capable performing musical instruments. As - a result of our research efforts, the Waseda Flutist Robot WF-4RIV and the Waseda - Saxophonist Robot WAS-1 have been designed to reproduce the human player performance. - As a long-term goal, we are proposing to enable the interaction between musical - performance robots as well as with human players. In general the communication - of humans within a band is a special case of conventional human social behavior. - Rhythm, harmony and timbre of the music played represent the emotional states - of the musicians. So the development of an artificial entity that participates - in such an interaction may contribute to the better understanding of some of the - mechanisms that enable the communication of humans in musical terms. Therefore, - we are not considering a musical performance robot (MPR) just as a mere sophisticated - MIDI instrument. Instead, its human-like design and the integration of perceptual - capabilities may enable to act on its own autonomous initiative based on models - which consider its own physical constrains. In this paper, we present an overview - of our research approaches towards enabling the interaction between musical performance - robots as well as with musicians. ' - address: 'Pittsburgh, PA, United States' - author: 'Solis, Jorge and Ninomiya, Takeshi and Petersen, Klaus and Takeuchi, Masaki - and Takanishi, Atsuo' - bibtex: "@inproceedings{Solis2009,\n abstract = {During several decades, the research\ - \ at Waseda University has been focused on developing anthropomorphic robots capable\ - \ performing musical instruments. As a result of our research efforts, the Waseda\ - \ Flutist Robot WF-4RIV and the Waseda Saxophonist Robot WAS-1 have been designed\ - \ to reproduce the human player performance. As a long-term goal, we are proposing\ - \ to enable the interaction between musical performance robots as well as with\ - \ human players. In general the communication of humans within a band is a special\ - \ case of conventional human social behavior. Rhythm, harmony and timbre of the\ - \ music played represent the emotional states of the musicians. So the development\ - \ of an artificial entity that participates in such an interaction may contribute\ - \ to the better understanding of some of the mechanisms that enable the communication\ - \ of humans in musical terms. Therefore, we are not considering a musical performance\ - \ robot (MPR) just as a mere sophisticated MIDI instrument. Instead, its human-like\ - \ design and the integration of perceptual capabilities may enable to act on its\ - \ own autonomous initiative based on models which consider its own physical constrains.\ - \ In this paper, we present an overview of our research approaches towards enabling\ - \ the interaction between musical performance robots as well as with musicians.\ - \ },\n address = {Pittsburgh, PA, United States},\n author = {Solis, Jorge and\ - \ Ninomiya, Takeshi and Petersen, Klaus and Takeuchi, Masaki and Takanishi, Atsuo},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177681},\n issn = {2220-4806},\n\ - \ keywords = {nime09},\n pages = {64--69},\n title = {Anthropomorphic Musical\ - \ Performance Robots at Waseda University : Increasing Understanding of the Nature\ - \ of Human Musical Interaction Abstract},\n url = {http://www.nime.org/proceedings/2009/nime2009_064.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_25 + abstract: 'This paper presents the design of the Brushing Interface, which aims + to transform brushing gestures into a genuine and expressive musical/sonic performance. + To achieve this, a hardware system consisting of a grid of 216 self-made force + sensitive resistor(FSR) sensors and 8 piezo microphones was implemented, which + enables high-fidelity gesture tracking and sound production closely tied with + brushing gestures. The hardware system, including the sensor itself, was made + in a DIY approach, which provides an economical and high-quality design strategy + for implementing a multi-touch interface. Moreover, it is combined with a unique + gesture mapping strategy that integrates multi-dimensional parameter mapping and + continuous gesture tracking, enabling an expressive performance that is highly + flexible to configure in various settings.' + address: 'Mexico City, Mexico' + articleno: 25 + author: Jaehoon Choi + bibtex: "@inproceedings{nime2023_25,\n abstract = {This paper presents the design\ + \ of the Brushing Interface, which aims to transform brushing gestures into a\ + \ genuine and expressive musical/sonic performance. To achieve this, a hardware\ + \ system consisting of a grid of 216 self-made force sensitive resistor(FSR) sensors\ + \ and 8 piezo microphones was implemented, which enables high-fidelity gesture\ + \ tracking and sound production closely tied with brushing gestures. The hardware\ + \ system, including the sensor itself, was made in a DIY approach, which provides\ + \ an economical and high-quality design strategy for implementing a multi-touch\ + \ interface. Moreover, it is combined with a unique gesture mapping strategy that\ + \ integrates multi-dimensional parameter mapping and continuous gesture tracking,\ + \ enabling an expressive performance that is highly flexible to configure in various\ + \ settings.},\n address = {Mexico City, Mexico},\n articleno = {25},\n author\ + \ = {Jaehoon Choi},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan\ + \ Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages = {6},\n\ + \ pages = {180--185},\n title = {Brushing Interface - DIY multi-touch interface\ + \ for expressive gestural performance},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_25.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177681 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: nime09 - pages: 64--69 - title: 'Anthropomorphic Musical Performance Robots at Waseda University : Increasing - Understanding of the Nature of Human Musical Interaction Abstract' - url: http://www.nime.org/proceedings/2009/nime2009_064.pdf - year: 2009 + month: May + numpages: 6 + pages: 180--185 + title: Brushing Interface - DIY multi-touch interface for expressive gestural performance + track: Papers + url: http://nime.org/proceedings/2023/nime2023_25.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Weinberg2009a - abstract: 'This paper presents an interactive and improvisational jam session, including - human players and two robotic musicians. The project was developed in an effort - to create novel and inspiring music through human-robot collaboration. The jam - session incorporates Shimon, a newly-developed socially-interactive robotic marimba - player, and Haile, a perceptual robotic percussionist developed in previous work. - The paper gives an overview of the musical perception modules, adaptive improvisation - modes and human-robot musical interaction models that were developed for the session. - The paper also addresses the musical output that can be created from increased - interconnections in an expanded multiple-robot multiplehuman ensemble, and suggests - directions for future work. ' - address: 'Pittsburgh, PA, United States' - author: 'Weinberg, Gil and Blosser, Brian and Mallikarjuna, Trishul and Raman, Aparna' - bibtex: "@inproceedings{Weinberg2009a,\n abstract = {This paper presents an interactive\ - \ and improvisational jam session, including human players and two robotic musicians.\ - \ The project was developed in an effort to create novel and inspiring music through\ - \ human-robot collaboration. The jam session incorporates Shimon, a newly-developed\ - \ socially-interactive robotic marimba player, and Haile, a perceptual robotic\ - \ percussionist developed in previous work. The paper gives an overview of the\ - \ musical perception modules, adaptive improvisation modes and human-robot musical\ - \ interaction models that were developed for the session. The paper also addresses\ - \ the musical output that can be created from increased interconnections in an\ - \ expanded multiple-robot multiplehuman ensemble, and suggests directions for\ - \ future work. },\n address = {Pittsburgh, PA, United States},\n author = {Weinberg,\ - \ Gil and Blosser, Brian and Mallikarjuna, Trishul and Raman, Aparna},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1177705},\n issn = {2220-4806},\n keywords\ - \ = {Robotic musicianship, Shimon, Haile. },\n pages = {70--73},\n title = {The\ - \ Creation of a Multi-Human, Multi-Robot Interactive Jam Session},\n url = {http://www.nime.org/proceedings/2009/nime2009_070.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_26 + abstract: "This paper presents the Digitl, a digital processing system using a reduced\ + \ electric guitar as control input. The physical design is based on three fundamental\ + \ elements: String, Body and Electromagnetic Pickup. The main characteristic of\ + \ the instrument lies is the linear matrix x-y configuration of the strings and\ + \ frets. The purpose of the instrument is the application of individual signal\ + \ processing at each X-Y position.\nIt is described the technical aspects of the\ + \ Digitl, including the design of the matrix configuration and the digital signal\ + \ processing algorithms. Specifically, a set of Max/MSP patches that routes the\ + \ signals from the strings to the processing engine. \nThe experimental results\ + \ confirm the importance of the design and configuration of musical instruments\ + \ in the context of expressive performance." + address: 'Mexico City, Mexico' + articleno: 26 + author: Suso Romaris + bibtex: "@inproceedings{nime2023_26,\n abstract = {This paper presents the Digitl,\ + \ a digital processing system using a reduced electric guitar as control input.\ + \ The physical design is based on three fundamental elements: String, Body and\ + \ Electromagnetic Pickup. The main characteristic of the instrument lies is the\ + \ linear matrix x-y configuration of the strings and frets. The purpose of the\ + \ instrument is the application of individual signal processing at each X-Y position.\n\ + It is described the technical aspects of the Digitl, including the design of the\ + \ matrix configuration and the digital signal processing algorithms. Specifically,\ + \ a set of Max/MSP patches that routes the signals from the strings to the processing\ + \ engine. \nThe experimental results confirm the importance of the design and\ + \ configuration of musical instruments in the context of expressive performance.},\n\ + \ address = {Mexico City, Mexico},\n articleno = {26},\n author = {Suso Romaris},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn\ + \ = {2220-4806},\n month = {May},\n numpages = {5},\n pages = {186--190},\n title\ + \ = {DIGITL A Reduction of Guitar},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_26.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177705 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'Robotic musicianship, Shimon, Haile. ' - pages: 70--73 - title: 'The Creation of a Multi-Human, Multi-Robot Interactive Jam Session' - url: http://www.nime.org/proceedings/2009/nime2009_070.pdf - year: 2009 + month: May + numpages: 5 + pages: 186--190 + title: DIGITL A Reduction of Guitar + track: Papers + url: http://nime.org/proceedings/2023/nime2023_26.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Gong2009 - abstract: 'In this project, we have developed a real-time writing instrument for - music control. The controller, MusicGrip, can capture the subtle dynamics of the - user''s grip while writing or drawing and map this to musical control signals - and sonic outputs. This paper discusses this conversion of the common motor motion - of handwriting into an innovative form of music expression. The presented example - instrument can be used to integrate the composing aspect of music with painting - and writing, creating a new art form from the resultant aural and visual representation - of the collaborative performing process. ' - address: 'Pittsburgh, PA, United States' - author: 'Gong, Nan-Wei and Laibowitz, Mat and Paradiso, Joseph A.' - bibtex: "@inproceedings{Gong2009,\n abstract = {In this project, we have developed\ - \ a real-time writing instrument for music control. The controller, MusicGrip,\ - \ can capture the subtle dynamics of the user's grip while writing or drawing\ - \ and map this to musical control signals and sonic outputs. This paper discusses\ - \ this conversion of the common motor motion of handwriting into an innovative\ - \ form of music expression. The presented example instrument can be used to integrate\ - \ the composing aspect of music with painting and writing, creating a new art\ - \ form from the resultant aural and visual representation of the collaborative\ - \ performing process. },\n address = {Pittsburgh, PA, United States},\n author\ - \ = {Gong, Nan-Wei and Laibowitz, Mat and Paradiso, Joseph A.},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177555},\n issn = {2220-4806},\n keywords = {Interactive\ - \ music control, writing instrument, pen controller, MIDI, group performing activity.\ - \ },\n pages = {74--77},\n title = {MusicGrip : A Writing Instrument for Music\ - \ Control},\n url = {http://www.nime.org/proceedings/2009/nime2009_074.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_27 + abstract: 'The sustainability of Digital Musical Instruments (DMIs) is a crucial + concern within the NIME community, not only in the design of the instruments but + also in terms of sustaining the instrument over a prolonged period, promoting + longevity, and minimizing obsolescence. The risk of designing advanced instruments + becoming debris quickly is real if longevity is not actively considered. In this + paper, we present the process of redesigning a crafted DMI to fit a small-scale + production process while considering strategies that render the final design more + sustainable and maximize the object''s lifespan. We present the results of a critical + analysis of this process through a sustainability lens. From this analysis, we + distilled a number of reflections that could help similar design processes or + NIME crafting activities. The most innovative reflections are related to inscribing + sustainability into the practice of using the instruments. From this perspective, + we suggest considering the future user as a designer capable of fixing, adjusting, + redesigning, or hacking the DMI and actively provide possible solutions that can + significantly extend the lifespan of a DMI and, consequently, its sustainability.' + address: 'Mexico City, Mexico' + articleno: 27 + author: Nicolo Merendino and Giacomo Lepri and Antonio Rodà and Raul Masu + bibtex: "@inproceedings{nime2023_27,\n abstract = {The sustainability of Digital\ + \ Musical Instruments (DMIs) is a crucial concern within the NIME community, not\ + \ only in the design of the instruments but also in terms of sustaining the instrument\ + \ over a prolonged period, promoting longevity, and minimizing obsolescence. The\ + \ risk of designing advanced instruments becoming debris quickly is real if longevity\ + \ is not actively considered. In this paper, we present the process of redesigning\ + \ a crafted DMI to fit a small-scale production process while considering strategies\ + \ that render the final design more sustainable and maximize the object's lifespan.\ + \ We present the results of a critical analysis of this process through a sustainability\ + \ lens. From this analysis, we distilled a number of reflections that could help\ + \ similar design processes or NIME crafting activities. The most innovative reflections\ + \ are related to inscribing sustainability into the practice of using the instruments.\ + \ From this perspective, we suggest considering the future user as a designer\ + \ capable of fixing, adjusting, redesigning, or hacking the DMI and actively provide\ + \ possible solutions that can significantly extend the lifespan of a DMI and,\ + \ consequently, its sustainability.},\n address = {Mexico City, Mexico},\n articleno\ + \ = {27},\n author = {Nicolo Merendino and Giacomo Lepri and Antonio Rodà and\ + \ Raul Masu},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n\ + \ issn = {2220-4806},\n month = {May},\n numpages = {9},\n pages = {191--199},\n\ + \ title = {Redesigning the Chowndolo: a Reflection-on-action Analysis to Identify\ + \ Sustainable Strategies for NIMEs Design},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_27.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177555 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'Interactive music control, writing instrument, pen controller, MIDI, - group performing activity. ' - pages: 74--77 - title: 'MusicGrip : A Writing Instrument for Music Control' - url: http://www.nime.org/proceedings/2009/nime2009_074.pdf - year: 2009 + month: May + numpages: 9 + pages: 191--199 + title: 'Redesigning the Chowndolo: a Reflection-on-action Analysis to Identify Sustainable + Strategies for NIMEs Design' + track: Papers + url: http://nime.org/proceedings/2023/nime2023_27.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Partridge2009 - abstract: 'Tabletops—and by extension, tabletop computers— naturally facilitate - group work. In particular, they provide a fascinating platform for exploring the - possibilities of collaborative audio improvisation. Existing tabletop instruments - (and digital instruments in general) tend to impose either a steep learning curve - on novice players or a frustrating ceiling of expressivity upon experts. We introduce - WallBalls, an intuitive tabletop instrument designed to support both novice and - expert performance. At first glance, WallBalls resembles a toy, game or whimsical - sketchpad, but it quickly reveals itself as a deeply expressive and highly adaptable - sample-based instrument capable of facilitating a startling variety of collaborative - sound art.' - address: 'Pittsburgh, PA, United States' - author: 'Partridge, Grant and Irani, Pourang and Fitzell, Gordon' - bibtex: "@inproceedings{Partridge2009,\n abstract = {Tabletops—and by extension,\ - \ tabletop computers— naturally facilitate group work. In particular, they provide\ - \ a fascinating platform for exploring the possibilities of collaborative audio\ - \ improvisation. Existing tabletop instruments (and digital instruments in general)\ - \ tend to impose either a steep learning curve on novice players or a frustrating\ - \ ceiling of expressivity upon experts. We introduce WallBalls, an intuitive tabletop\ - \ instrument designed to support both novice and expert performance. At first\ - \ glance, WallBalls resembles a toy, game or whimsical sketchpad, but it quickly\ - \ reveals itself as a deeply expressive and highly adaptable sample-based instrument\ - \ capable of facilitating a startling variety of collaborative sound art.},\n\ - \ address = {Pittsburgh, PA, United States},\n author = {Partridge, Grant and\ - \ Irani, Pourang and Fitzell, Gordon},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177655},\n\ - \ issn = {2220-4806},\n keywords = {Tabletop computers, collaborative instruments,\ - \ collaborative composition, group improvisation, spatial audio interfaces, customizable\ - \ instruments. },\n pages = {78--81},\n title = {Let Loose with WallBalls, a Collaborative\ - \ Tabletop Instrument for Tomorrow},\n url = {http://www.nime.org/proceedings/2009/nime2009_078.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_28 + abstract: 'WebChucK is ChucK—a strongly-timed computer music programming language—running + on the web. Recent advancements in browser technology (including WebAssembly and + the Web Audio API’s AudioWorklet interface) have enabled languages written in + C/C++ (like ChucK) to run in web browsers with nearly native-code performance. + Early adopters have explored the many practical and creative possibilities that + WebChucK enables, ranging from a WebChucK integrated development environment to + interactive browser-based audiovisual experiences. WebChucK has also been adopted + as the programming platform in an introductory computer music course at Stanford + University. Importantly, by running in any browser, WebChucK broadens and simplifies + access to computer music programming, opening the door for new users and creative + workflows. In this paper, we discuss WebChucK and its applications to date, explain + how the tool was designed and implemented, and evaluate the unique affordances + of combining computer music programming with a web development workflow.' + address: 'Mexico City, Mexico' + articleno: 28 + author: Michael Mulshine and Ge Wang and Chris Chafe and Jack Atherton and terry + feng and Celeste Betancur + bibtex: "@inproceedings{nime2023_28,\n abstract = {WebChucK is ChucK—a strongly-timed\ + \ computer music programming language—running on the web. Recent advancements\ + \ in browser technology (including WebAssembly and the Web Audio API’s AudioWorklet\ + \ interface) have enabled languages written in C/C++ (like ChucK) to run in web\ + \ browsers with nearly native-code performance. Early adopters have explored the\ + \ many practical and creative possibilities that WebChucK enables, ranging from\ + \ a WebChucK integrated development environment to interactive browser-based audiovisual\ + \ experiences. WebChucK has also been adopted as the programming platform in an\ + \ introductory computer music course at Stanford University. Importantly, by running\ + \ in any browser, WebChucK broadens and simplifies access to computer music programming,\ + \ opening the door for new users and creative workflows. In this paper, we discuss\ + \ WebChucK and its applications to date, explain how the tool was designed and\ + \ implemented, and evaluate the unique affordances of combining computer music\ + \ programming with a web development workflow.},\n address = {Mexico City, Mexico},\n\ + \ articleno = {28},\n author = {Michael Mulshine and Ge Wang and Chris Chafe and\ + \ Jack Atherton and terry feng and Celeste Betancur},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month\ + \ = {May},\n numpages = {6},\n pages = {200--205},\n title = {WebChucK: Computer\ + \ Music Programming on the Web},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_28.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177655 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'Tabletop computers, collaborative instruments, collaborative composition, - group improvisation, spatial audio interfaces, customizable instruments. ' - pages: 78--81 - title: 'Let Loose with WallBalls, a Collaborative Tabletop Instrument for Tomorrow' - url: http://www.nime.org/proceedings/2009/nime2009_078.pdf - year: 2009 + month: May + numpages: 6 + pages: 200--205 + title: 'WebChucK: Computer Music Programming on the Web' + track: Papers + url: http://nime.org/proceedings/2023/nime2023_28.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Min2009 - abstract: 'It is surely not difficult for anyone with experience in thesubject known - as Music Theory to realize that there is avery definite and precise relationship - between music andmathematics. This paper describes the SoriSu, a newelectronic - musical instrument based on Sudoku puzzles,which probe the expressive possibilities - of mathematicalconcepts in music. The concept proposes a new way ofmapping numbers - to sound. This interface was designed toprovide easy and pleasing access to music - for users whoare unfamiliar or uncomfortable with current musicaldevices. The - motivation behind the project is presented, aswell as hardware and software design.' - address: 'Pittsburgh, PA, United States' - author: 'Min, Hye Ki' - bibtex: "@inproceedings{Min2009,\n abstract = {It is surely not difficult for anyone\ - \ with experience in thesubject known as Music Theory to realize that there is\ - \ avery definite and precise relationship between music andmathematics. This paper\ - \ describes the SoriSu, a newelectronic musical instrument based on Sudoku puzzles,which\ - \ probe the expressive possibilities of mathematicalconcepts in music. The concept\ - \ proposes a new way ofmapping numbers to sound. This interface was designed toprovide\ - \ easy and pleasing access to music for users whoare unfamiliar or uncomfortable\ - \ with current musicaldevices. The motivation behind the project is presented,\ - \ aswell as hardware and software design.},\n address = {Pittsburgh, PA, United\ - \ States},\n author = {Min, Hye Ki},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177631},\n\ - \ issn = {2220-4806},\n keywords = {Numbers, Game Interfaces, Mathematics and\ - \ Sound, Mathematics in Music, Puzzles, Tangible User Interfaces. },\n pages =\ - \ {82--85},\n title = {SORISU : Sound with Numbers},\n url = {http://www.nime.org/proceedings/2009/nime2009_082.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_29 + abstract: 'We present an affordable, lightweight, and highly portable multichannel + audio solution for surround sound applications and installations. The system was + developed for the “Sound in Space” course, taught by on of the authors at CCRMA + in the winter quarter of 2021, when education was fully remote. Students in the + course were able to listen to and create surround sound compositions from their + homes or dorm rooms. Beyond the course, artists have demonstrated the versatility + and creative affordances of this cheap, lightweight, and highly portable setup + in sound installations and other custom speaker arrays. Such an affordable and + versatile system has the potential to provide more students and artists access + to spatialized sound production and multichannel audio in their work, enabling + deeper technical education and creative applications ranging from Ambisonics to + sound installations. Importantly, the transportability and ease of assembling + this system enables multichannel audio work to be developed outside of the physical + confines of academic institutions, including in spaces like apartments, garages, + the outdoors, and more. This paper steps through the process of creating such + a system, detailing the challenges faced and reflecting on the affordances in + educational and creative usage.' + address: 'Mexico City, Mexico' + articleno: 29 + author: Fernando Lopez-Lezcano and Michael Mulshine + bibtex: "@inproceedings{nime2023_29,\n abstract = {We present an affordable, lightweight,\ + \ and highly portable multichannel audio solution for surround sound applications\ + \ and installations. The system was developed for the “Sound in Space” course,\ + \ taught by on of the authors at CCRMA in the winter quarter of 2021, when education\ + \ was fully remote. Students in the course were able to listen to and create surround\ + \ sound compositions from their homes or dorm rooms. Beyond the course, artists\ + \ have demonstrated the versatility and creative affordances of this cheap, lightweight,\ + \ and highly portable setup in sound installations and other custom speaker arrays.\ + \ Such an affordable and versatile system has the potential to provide more students\ + \ and artists access to spatialized sound production and multichannel audio in\ + \ their work, enabling deeper technical education and creative applications ranging\ + \ from Ambisonics to sound installations. Importantly, the transportability and\ + \ ease of assembling this system enables multichannel audio work to be developed\ + \ outside of the physical confines of academic institutions, including in spaces\ + \ like apartments, garages, the outdoors, and more. This paper steps through the\ + \ process of creating such a system, detailing the challenges faced and reflecting\ + \ on the affordances in educational and creative usage.},\n address = {Mexico\ + \ City, Mexico},\n articleno = {29},\n author = {Fernando Lopez-Lezcano and Michael\ + \ Mulshine},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n\ + \ issn = {2220-4806},\n month = {May},\n numpages = {6},\n pages = {206--211},\n\ + \ title = {Affordable Speaker Arrays for Education and Artists},\n track = {Papers},\n\ + \ url = {http://nime.org/proceedings/2023/nime2023_29.pdf},\n year = {2023}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177631 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'Numbers, Game Interfaces, Mathematics and Sound, Mathematics in Music, - Puzzles, Tangible User Interfaces. ' - pages: 82--85 - title: 'SORISU : Sound with Numbers' - url: http://www.nime.org/proceedings/2009/nime2009_082.pdf - year: 2009 + month: May + numpages: 6 + pages: 206--211 + title: Affordable Speaker Arrays for Education and Artists + track: Papers + url: http://nime.org/proceedings/2023/nime2023_29.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Mann2009 - abstract: 'This paper describes the inspiration and implementation of a tactile, - tabletop synthesizer/step sequencer. The Tactus is an expandable and inexpensive - musical interface for the creation of loop-based music inspired by the Bubblegum - Sequencer [2]. An optical camera, coupled with a computer running Max/MSP/Jitter - can turn almost any matrix-like object into a step sequencer. The empty cells - in the gridded object are filled with a fitting, colored object; the placement - of which is analogous to adding an instrument or switching on a box in a step - sequencer grid. The color and column position of every element in the matrix are - used as parameters for a synthesizer while the row position of that element corresponds - to the moment within the loop that entry is sounded. The two dimensional array - can be positioned anywhere within the camera''s visibility. Both the translation - and rotation of the physical matrix are assigned to global parameters that affect - the music while preserving the color and order of the cells. A rotation of 180 - degrees, for example, will not reverse the sequence, but instead change an assigned - global parameter.' - address: 'Pittsburgh, PA, United States' - author: 'Mann, Yotam and Lubow, Jeff and Freed, Adrian' - bibtex: "@inproceedings{Mann2009,\n abstract = {This paper describes the inspiration\ - \ and implementation of a tactile, tabletop synthesizer/step sequencer. The Tactus\ - \ is an expandable and inexpensive musical interface for the creation of loop-based\ - \ music inspired by the Bubblegum Sequencer [2]. An optical camera, coupled with\ - \ a computer running Max/MSP/Jitter can turn almost any matrix-like object into\ - \ a step sequencer. The empty cells in the gridded object are filled with a fitting,\ - \ colored object; the placement of which is analogous to adding an instrument\ - \ or switching on a box in a step sequencer grid. The color and column position\ - \ of every element in the matrix are used as parameters for a synthesizer while\ - \ the row position of that element corresponds to the moment within the loop that\ - \ entry is sounded. The two dimensional array can be positioned anywhere within\ - \ the camera's visibility. Both the translation and rotation of the physical matrix\ - \ are assigned to global parameters that affect the music while preserving the\ - \ color and order of the cells. A rotation of 180 degrees, for example, will not\ - \ reverse the sequence, but instead change an assigned global parameter.},\n address\ - \ = {Pittsburgh, PA, United States},\n author = {Mann, Yotam and Lubow, Jeff and\ - \ Freed, Adrian},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177625},\n\ - \ issn = {2220-4806},\n keywords = {nime09},\n pages = {86--89},\n title = {The\ - \ Tactus : a Tangible , Rhythmic Grid Interface Using Found-Objects},\n url =\ - \ {http://www.nime.org/proceedings/2009/nime2009_086.pdf},\n year = {2009}\n}\n" + ID: nime2023_30 + abstract: 'There are multiple barriers to the long term use of digital musical instruments. + Among several issues related to instrument accessibility, many DMIs remain as + prototypes in research labs never becoming a robust and stable instrument. Technical + support is an important part of the long term use of a DMI. Though all musical + instruments can eventually break, managing how they are going to be fixed and + built within a research organisation can help with the continued usage of the + instrument. We apply reliability analysis techniques to estimate the reliability, + availability and maintainability characteristics of the T-Stick. Using these characteristics + we estimate the amount of spare parts needed to maintain a 99% availability target + for the T-Stick. This analysis provides insights on expected maintenance time, + costs, and personnel needed when supporting and building DMIs.' + address: 'Mexico City, Mexico' + articleno: 30 + author: Albert-Ngabo Niyonsenga and Marcelo Wanderley + bibtex: "@inproceedings{nime2023_30,\n abstract = {There are multiple barriers to\ + \ the long term use of digital musical instruments. Among several issues related\ + \ to instrument accessibility, many DMIs remain as prototypes in research labs\ + \ never becoming a robust and stable instrument. Technical support is an important\ + \ part of the long term use of a DMI. Though all musical instruments can eventually\ + \ break, managing how they are going to be fixed and built within a research organisation\ + \ can help with the continued usage of the instrument. We apply reliability analysis\ + \ techniques to estimate the reliability, availability and maintainability characteristics\ + \ of the T-Stick. Using these characteristics we estimate the amount of spare\ + \ parts needed to maintain a 99% availability target for the T-Stick. This analysis\ + \ provides insights on expected maintenance time, costs, and personnel needed\ + \ when supporting and building DMIs.},\n address = {Mexico City, Mexico},\n articleno\ + \ = {30},\n author = {Albert-Ngabo Niyonsenga and Marcelo Wanderley},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n\ + \ month = {May},\n numpages = {7},\n pages = {212--218},\n title = {Tools and\ + \ Techniques for the Maintenance and Support of Digital Musical Instruments},\n\ + \ track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_30.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177625 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: nime09 - pages: 86--89 - title: 'The Tactus : a Tangible , Rhythmic Grid Interface Using Found-Objects' - url: http://www.nime.org/proceedings/2009/nime2009_086.pdf - year: 2009 + month: May + numpages: 7 + pages: 212--218 + title: Tools and Techniques for the Maintenance and Support of Digital Musical Instruments + track: Papers + url: http://nime.org/proceedings/2023/nime2023_30.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Hockman2009 - abstract: 'This paper presents a method for using a runner''s pacefor real-time - control of the time-scaling facility of a phasevocoder, resulting in the automated - synchronization of anaudio track tempo to the generated control signal. The increase - in usage of portable music players during exercisehas given rise to the development - of new personal exerciseaids, most notably the Nike+iPod system, which relies - onembedded sensor technologies to provide kinematic workout statistics. There - are also systems that select songs basedon the measured step frequency of a runner. - The proposedsystem also uses the pace of a runner, but this information isused - to change the tempo of the music.' - address: 'Pittsburgh, PA, United States' - author: 'Hockman, Jason A. and Wanderley, Marcelo M. and Fujinaga, Ichiro' - bibtex: "@inproceedings{Hockman2009,\n abstract = {This paper presents a method\ - \ for using a runner's pacefor real-time control of the time-scaling facility\ - \ of a phasevocoder, resulting in the automated synchronization of anaudio track\ - \ tempo to the generated control signal. The increase in usage of portable music\ - \ players during exercisehas given rise to the development of new personal exerciseaids,\ - \ most notably the Nike+iPod system, which relies onembedded sensor technologies\ - \ to provide kinematic workout statistics. There are also systems that select\ - \ songs basedon the measured step frequency of a runner. The proposedsystem also\ - \ uses the pace of a runner, but this information isused to change the tempo of\ - \ the music.},\n address = {Pittsburgh, PA, United States},\n author = {Hockman,\ - \ Jason A. and Wanderley, Marcelo M. and Fujinaga, Ichiro},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177575},\n issn = {2220-4806},\n keywords = {NIME, synchronization,\ - \ exercise, time-scaling. },\n pages = {90--93},\n title = {Real-Time Phase Vocoder\ - \ Manipulation by Runner's Pace},\n url = {http://www.nime.org/proceedings/2009/nime2009_090.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_31 + abstract: 'While digital music technologies are rapidly growing, music communities + using traditional acoustic instruments are sometimes unable to take full advantage + of all of the digital processing techniques available to electronic musicians. + One way to include them in the latest developments is to develop interfaces connecting + non-electronic instruments to the digital world. This paper presents HarMIDI, + a sensor system to convert keystrokes on an Indian harmonium to MIDI. The paper + presents the sensor assembly, calibration methods, and the algorithm to output + MIDI. The calibration methods calibrate the notes and temporally synchronize the + MIDI stream with audio. The system has been evaluated for time synchronization + of onsets and offsets. The sensor setup is affordable, portable and can be used + with any existing harmonium.' + address: 'Mexico City, Mexico' + articleno: 31 + author: Suraj Jaiswal and Vipul Arora + bibtex: "@inproceedings{nime2023_31,\n abstract = {While digital music technologies\ + \ are rapidly growing, music communities using traditional acoustic instruments\ + \ are sometimes unable to take full advantage of all of the digital processing\ + \ techniques available to electronic musicians. One way to include them in the\ + \ latest developments is to develop interfaces connecting non-electronic instruments\ + \ to the digital world. This paper presents HarMIDI, a sensor system to convert\ + \ keystrokes on an Indian harmonium to MIDI. The paper presents the sensor assembly,\ + \ calibration methods, and the algorithm to output MIDI. The calibration methods\ + \ calibrate the notes and temporally synchronize the MIDI stream with audio. The\ + \ system has been evaluated for time synchronization of onsets and offsets. The\ + \ sensor setup is affordable, portable and can be used with any existing harmonium.},\n\ + \ address = {Mexico City, Mexico},\n articleno = {31},\n author = {Suraj Jaiswal\ + \ and Vipul Arora},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan\ + \ Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages = {5},\n\ + \ pages = {219--223},\n title = {HarMIDI: Sensor System To Read MIDI from Indian\ + \ Harmoniums},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_31.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177575 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'NIME, synchronization, exercise, time-scaling. ' - pages: 90--93 - title: Real-Time Phase Vocoder Manipulation by Runner's Pace - url: http://www.nime.org/proceedings/2009/nime2009_090.pdf - year: 2009 + month: May + numpages: 5 + pages: 219--223 + title: 'HarMIDI: Sensor System To Read MIDI from Indian Harmoniums' + track: Papers + url: http://nime.org/proceedings/2023/nime2023_31.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Nymoen2009 - abstract: 'The paper presents Nymophone2, an acoustic instrument with a complex - relationship between performance actions and emergent sound. A method for describing - the multidimensional control actions needed to play the instrument is presented - and discussed.' - address: 'Pittsburgh, PA, United States' - author: 'Nymoen, Kristian and Jensenius, Alexander R.' - bibtex: "@inproceedings{Nymoen2009,\n abstract = {The paper presents Nymophone2,\ - \ an acoustic instrument with a complex relationship between performance actions\ - \ and emergent sound. A method for describing the multidimensional control actions\ - \ needed to play the instrument is presented and discussed.},\n address = {Pittsburgh,\ - \ PA, United States},\n author = {Nymoen, Kristian and Jensenius, Alexander R.},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177645},\n issn = {2220-4806},\n\ - \ keywords = {nime09},\n pages = {94--97},\n title = {A Discussion of Multidimensional\ - \ Mapping in Nymophone2},\n url = {http://www.nime.org/proceedings/2009/nime2009_094.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_32 + abstract: 'We describe the Living Looper, a real-time software system for prediction + and continuation of audio signals in the format of a looping pedal. Each of several + channels is activated by a footswitch and repeats or continues incoming audio + using neural synthesis. The live looping pedal format is familiar to electric + guitarists and electronic musicians, which helps the instrument to serve as a + boundary object for musicians and technologists of different backgrounds. Each + Living Loop channel learns in the context of what the other channels are doing, + including those which are momentarily controlled by human players. This leads + to shifting networks of agency and control between players and Living Loops. In + this paper we present the ongoing design of the Living Looper as well as preliminary + encounters with musicians in a workshop and concert setting.' + address: 'Mexico City, Mexico' + articleno: 32 + author: Victor Shepardson and Thor Magnusson + bibtex: "@inproceedings{nime2023_32,\n abstract = {We describe the Living Looper,\ + \ a real-time software system for prediction and continuation of audio signals\ + \ in the format of a looping pedal. Each of several channels is activated by a\ + \ footswitch and repeats or continues incoming audio using neural synthesis. The\ + \ live looping pedal format is familiar to electric guitarists and electronic\ + \ musicians, which helps the instrument to serve as a boundary object for musicians\ + \ and technologists of different backgrounds. Each Living Loop channel learns\ + \ in the context of what the other channels are doing, including those which are\ + \ momentarily controlled by human players. This leads to shifting networks of\ + \ agency and control between players and Living Loops. In this paper we present\ + \ the ongoing design of the Living Looper as well as preliminary encounters with\ + \ musicians in a workshop and concert setting.},\n address = {Mexico City, Mexico},\n\ + \ articleno = {32},\n author = {Victor Shepardson and Thor Magnusson},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n\ + \ month = {May},\n numpages = {8},\n pages = {224--231},\n title = {The Living\ + \ Looper: Rethinking the Musical Loop as a Machine Action-Perception Loop},\n\ + \ track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_32.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177645 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: nime09 - pages: 94--97 - title: A Discussion of Multidimensional Mapping in Nymophone2 - url: http://www.nime.org/proceedings/2009/nime2009_094.pdf - year: 2009 + month: May + numpages: 8 + pages: 224--231 + title: 'The Living Looper: Rethinking the Musical Loop as a Machine Action-Perception + Loop' + track: Papers + url: http://nime.org/proceedings/2023/nime2023_32.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Schlessinger2009 - abstract: ' We present the Kalichord: a small, handheld electro/acoustic instrument - in which the player''s right hand plucks virtual strings while his left hand uses - buttons to play independent bass lines. The Kalichord uses the analog signal from - plucked acoustic tines to excite a physical string model, allowing a nuanced and - intuitive plucking experience. First, we catalog instruments related to the Kalichord. - Then we examine the use of analog signals to excite a physical string model and - discuss the expressiveness and form factors that this technique affords. We then - describe the overall construction of the Kalichord and possible playing styles, - and finally we consider ways we hope to improve upon the current prototype. ' - address: 'Pittsburgh, PA, United States' - author: 'Schlessinger, Daniel and Smith, Julius O.' - bibtex: "@inproceedings{Schlessinger2009,\n abstract = { We present the Kalichord:\ - \ a small, handheld electro/acoustic instrument in which the player's right hand\ - \ plucks virtual strings while his left hand uses buttons to play independent\ - \ bass lines. The Kalichord uses the analog signal from plucked acoustic tines\ - \ to excite a physical string model, allowing a nuanced and intuitive plucking\ - \ experience. First, we catalog instruments related to the Kalichord. Then we\ - \ examine the use of analog signals to excite a physical string model and discuss\ - \ the expressiveness and form factors that this technique affords. We then describe\ - \ the overall construction of the Kalichord and possible playing styles, and finally\ - \ we consider ways we hope to improve upon the current prototype. },\n address\ - \ = {Pittsburgh, PA, United States},\n author = {Schlessinger, Daniel and Smith,\ - \ Julius O.},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177671},\n issn\ - \ = {2220-4806},\n keywords = {Kalichord, physical model, tine, piezo, plucked\ - \ string, electro-acoustic instruments, kalimba, accordion },\n pages = {98--101},\n\ - \ title = {The Kalichord : A Physically Modeled Electro-Acoustic Plucked String\ - \ Instrument},\n url = {http://www.nime.org/proceedings/2009/nime2009_098.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_33 + abstract: 'This paper relates an early art-research collaboration between two practitioners + in machine learning and virtual worlds toward new embodied musical experiences + of Artificial Intelligence (AI). Instead of a digital music instrument or a music-generating + agent, we propose to craft a soundwalk experience where a human person moves through + a three-dimensional virtual world to explore a latent sound space generated by + deep learning. We report on the diffractive prototyping and iterative crafting + of three such soundwalks through/out deep latent spaces, using nn~ and New Atlantis + as computational platforms for AI audio processing and virtual world experimentation. + We share critical perspectives emerging from our latent soundwalking practice, + with the hope that they contribute to ongoing community-wide reflections toward + new AI for musical expression.' + address: 'Mexico City, Mexico' + articleno: 33 + author: Hugo Scurto and Ludmila Postel + bibtex: "@inproceedings{nime2023_33,\n abstract = {This paper relates an early art-research\ + \ collaboration between two practitioners in machine learning and virtual worlds\ + \ toward new embodied musical experiences of Artificial Intelligence (AI). Instead\ + \ of a digital music instrument or a music-generating agent, we propose to craft\ + \ a soundwalk experience where a human person moves through a three-dimensional\ + \ virtual world to explore a latent sound space generated by deep learning. We\ + \ report on the diffractive prototyping and iterative crafting of three such soundwalks\ + \ through/out deep latent spaces, using nn~ and New Atlantis as computational\ + \ platforms for AI audio processing and virtual world experimentation. We share\ + \ critical perspectives emerging from our latent soundwalking practice, with the\ + \ hope that they contribute to ongoing community-wide reflections toward new AI\ + \ for musical expression.},\n address = {Mexico City, Mexico},\n articleno = {33},\n\ + \ author = {Hugo Scurto and Ludmila Postel},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n editor\ + \ = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n\ + \ numpages = {4},\n pages = {232--235},\n title = {Soundwalking Deep Latent Spaces},\n\ + \ track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_33.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177671 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'Kalichord, physical model, tine, piezo, plucked string, electro-acoustic - instruments, kalimba, accordion ' - pages: 98--101 - title: 'The Kalichord : A Physically Modeled Electro-Acoustic Plucked String Instrument' - url: http://www.nime.org/proceedings/2009/nime2009_098.pdf - year: 2009 + month: May + numpages: 4 + pages: 232--235 + title: Soundwalking Deep Latent Spaces + track: Papers + url: http://nime.org/proceedings/2023/nime2023_33.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Lahdeoja2009 - abstract: 'In this paper we describe an approach for introducing newelectronic percussive - sound possibilities for stringinstruments by "listening" to the sounds of the - instrument''sbody and extracting audio and data from the wood''sacoustic vibrations. - A method for capturing, localizing andanalyzing the percussive hits on the instrument''s - body ispresented, in connection with an audio-driven electronicpercussive sound - module. The system introduces a newgesture-sound relationship in the electric - string instrumentplaying environment, namely the use of percussivetechniques on - the instrument''s body which are null inregular circumstances due to selective - and exclusivemicrophone use for the strings. Instrument bodypercussions are widely - used in the acoustic instrumentalpraxis. They yield a strong potential for providing - anextended soundscape via instrument augmentation, directlycontrolled by the musician - through haptic manipulation ofthe instrument itself. The research work was carried - out onthe electric guitar, but the method used can apply to anystring instrument - with a resonating body.' - address: 'Pittsburgh, PA, United States' - author: 'Lähdeoja, Otso' - bibtex: "@inproceedings{Lahdeoja2009,\n abstract = {In this paper we describe an\ - \ approach for introducing newelectronic percussive sound possibilities for stringinstruments\ - \ by \"listening\" to the sounds of the instrument'sbody and extracting audio\ - \ and data from the wood'sacoustic vibrations. A method for capturing, localizing\ - \ andanalyzing the percussive hits on the instrument's body ispresented, in connection\ - \ with an audio-driven electronicpercussive sound module. The system introduces\ - \ a newgesture-sound relationship in the electric string instrumentplaying environment,\ - \ namely the use of percussivetechniques on the instrument's body which are null\ - \ inregular circumstances due to selective and exclusivemicrophone use for the\ - \ strings. Instrument bodypercussions are widely used in the acoustic instrumentalpraxis.\ - \ They yield a strong potential for providing anextended soundscape via instrument\ - \ augmentation, directlycontrolled by the musician through haptic manipulation\ - \ ofthe instrument itself. The research work was carried out onthe electric guitar,\ - \ but the method used can apply to anystring instrument with a resonating body.},\n\ - \ address = {Pittsburgh, PA, United States},\n author = {L\\''{a}hdeoja, Otso},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177607},\n issn = {2220-4806},\n\ - \ keywords = {augmented instrument,chordophone,contact microphone systems,electric,electronic\ - \ percussion,even with,guitar,leaving the instrument body,nime09,there is always\ - \ a,trade-off,virtually mute},\n pages = {102--105},\n title = {Augmenting Chordophones\ - \ with Hybrid Percussive Sound Possibilities},\n url = {http://www.nime.org/proceedings/2009/nime2009_102.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_34 + abstract: 'Sampled drums can be used as an affordable way of creating human-like + drum tracks, or perhaps more interestingly, can be used as a mean of experimentation + with rhythm and groove. Similarly, AI-based drum generation tools can focus on + creating human-like drum patterns, or alternatively, focus on providing producers/musicians + with means of experimentation with rhythm. In this work, we aimed to explore the + latter approach. To this end, we present a suite of Transformer-based models aimed + at completing audio drum loops with stylistically consistent symbolic drum events. + Our proposed models rely on a reduced spectral representation of the drum loop, + striking a balance between a raw audio recording and an exact symbolic transcription. + Using a number of objective evaluations, we explore the validity of our approach + and identify several challenges that need to be further studied in future iterations + of this work. Lastly, we provide a real-time VST plugin that allows musicians/producers + to utilize the models in real-time production settings.' + address: 'Mexico City, Mexico' + articleno: 34 + author: Behzad Haki and Teresa Pelinski and Marina Nieto Giménez and Sergi Jordà + bibtex: "@inproceedings{nime2023_34,\n abstract = {Sampled drums can be used as\ + \ an affordable way of creating human-like drum tracks, or perhaps more interestingly,\ + \ can be used as a mean of experimentation with rhythm and groove. Similarly,\ + \ AI-based drum generation tools can focus on creating human-like drum patterns,\ + \ or alternatively, focus on providing producers/musicians with means of experimentation\ + \ with rhythm. In this work, we aimed to explore the latter approach. To this\ + \ end, we present a suite of Transformer-based models aimed at completing audio\ + \ drum loops with stylistically consistent symbolic drum events. Our proposed\ + \ models rely on a reduced spectral representation of the drum loop, striking\ + \ a balance between a raw audio recording and an exact symbolic transcription.\ + \ Using a number of objective evaluations, we explore the validity of our approach\ + \ and identify several challenges that need to be further studied in future iterations\ + \ of this work. Lastly, we provide a real-time VST plugin that allows musicians/producers\ + \ to utilize the models in real-time production settings.},\n address = {Mexico\ + \ City, Mexico},\n articleno = {34},\n author = {Behzad Haki and Teresa Pelinski\ + \ and Marina Nieto Giménez and Sergi Jordà},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n editor\ + \ = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n\ + \ numpages = {8},\n pages = {236--243},\n title = {Completing Audio Drum Loops\ + \ with Symbolic Drum Suggestions},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_34.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177607 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'augmented instrument,chordophone,contact microphone systems,electric,electronic - percussion,even with,guitar,leaving the instrument body,nime09,there is always - a,trade-off,virtually mute' - pages: 102--105 - title: Augmenting Chordophones with Hybrid Percussive Sound Possibilities - url: http://www.nime.org/proceedings/2009/nime2009_102.pdf - year: 2009 + month: May + numpages: 8 + pages: 236--243 + title: Completing Audio Drum Loops with Symbolic Drum Suggestions + track: Papers + url: http://nime.org/proceedings/2023/nime2023_34.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Kahrs2009 - abstract: 'Large vibrating plates are used as thunder sheets in orchestras. We have - extended the use of flat plates by cementing aflat panel electroacoustic transducer - on a large brass sheet.Because of the thickness of the panel, the output is subject - tononlinear distortion. When combined with a real-time inputand signal processing - algorithm, the active brass plate canbecome an effective musical instrument for - performance ofnew music.' - address: 'Pittsburgh, PA, United States' - author: 'Kahrs, Mark and Skulina, David and Bilbao, Stefan and Campbell, Murray' - bibtex: "@inproceedings{Kahrs2009,\n abstract = {Large vibrating plates are used\ - \ as thunder sheets in orchestras. We have extended the use of flat plates by\ - \ cementing aflat panel electroacoustic transducer on a large brass sheet.Because\ - \ of the thickness of the panel, the output is subject tononlinear distortion.\ - \ When combined with a real-time inputand signal processing algorithm, the active\ - \ brass plate canbecome an effective musical instrument for performance ofnew\ - \ music.},\n address = {Pittsburgh, PA, United States},\n author = {Kahrs, Mark\ - \ and Skulina, David and Bilbao, Stefan and Campbell, Murray},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177593},\n issn = {2220-4806},\n keywords = {Electroacoustics,\ - \ flat panel },\n pages = {106--109},\n title = {An Electroacoustically Controlled\ - \ Vibrating Plate},\n url = {http://www.nime.org/proceedings/2009/nime2009_106.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_35 + abstract: "People have always used new technology to experiment with new forms of\ + \ music creation. However, the latest devel- opments in artificial intelligence\ + \ (AI) suggest that machines are on the verge of becoming more than mere tools—they\ + \ can also be co-creators. In this article, we follow four mu- sicians in the\ + \ project Co-Creative Spaces through a six- month long collaborative process,\ + \ where they created music through improvising with each other and with computer-\ + \ based imitations of themselves. These musical agents were trained through machine\ + \ learning to generate output in the style of the musicians. What happens to musical\ + \ co-creation when AI is included in the creative cycle? The musicians are from\ + \ Norway and Kenya—two countries with fundamen- tally different musical traditions.\ + \ How is the collaboration affected by cultural biases inherent in the technology,\ + \ and in the musicians themselves?\nThese questions were examined through focus\ + \ groups as part of two five-day workshops. An analysis shows how the musicians\ + \ moved between an understanding of machine as tool and machine as co-creator,\ + \ and between the idea of music as object and music as process. These different\ + \ interpretative repertoires were used interchangeably and paint a complex picture\ + \ of what it is like being in the intersection between different musical and cultural\ + \ paradigms." + address: 'Mexico City, Mexico' + articleno: 35 + author: Notto J. W. Thelle and Bernt Isak Wærstad + bibtex: "@inproceedings{nime2023_35,\n abstract = {People have always used new technology\ + \ to experiment with new forms of music creation. However, the latest devel- opments\ + \ in artificial intelligence (AI) suggest that machines are on the verge of becoming\ + \ more than mere tools—they can also be co-creators. In this article, we follow\ + \ four mu- sicians in the project Co-Creative Spaces through a six- month long\ + \ collaborative process, where they created music through improvising with each\ + \ other and with computer- based imitations of themselves. These musical agents\ + \ were trained through machine learning to generate output in the style of the\ + \ musicians. What happens to musical co-creation when AI is included in the creative\ + \ cycle? The musicians are from Norway and Kenya—two countries with fundamen-\ + \ tally different musical traditions. How is the collaboration affected by cultural\ + \ biases inherent in the technology, and in the musicians themselves?\nThese questions\ + \ were examined through focus groups as part of two five-day workshops. An analysis\ + \ shows how the musicians moved between an understanding of machine as tool and\ + \ machine as co-creator, and between the idea of music as object and music as\ + \ process. These different interpretative repertoires were used interchangeably\ + \ and paint a complex picture of what it is like being in the intersection between\ + \ different musical and cultural paradigms.},\n address = {Mexico City, Mexico},\n\ + \ articleno = {35},\n author = {Notto J. W. Thelle and Bernt Isak Wærstad},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn\ + \ = {2220-4806},\n month = {May},\n numpages = {7},\n pages = {244--250},\n title\ + \ = {Co-Creatives Spaces: The machine as a collaborator},\n track = {Papers},\n\ + \ url = {http://nime.org/proceedings/2023/nime2023_35.pdf},\n year = {2023}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177593 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'Electroacoustics, flat panel ' - pages: 106--109 - title: An Electroacoustically Controlled Vibrating Plate - url: http://www.nime.org/proceedings/2009/nime2009_106.pdf - year: 2009 + month: May + numpages: 7 + pages: 244--250 + title: 'Co-Creatives Spaces: The machine as a collaborator' + track: Papers + url: http://nime.org/proceedings/2023/nime2023_35.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Smallwood2009a - abstract: 'This paper gives a historical overview of the development of alternative - sonic display systems at Princeton University; in particular, the design, construction, - and use in live performance of a series of spherical and hemispherical speaker - systems. We also provide a DIY guide to constructing the latest series of loudspeakers - that we are currently using in our research and music making. ' - address: 'Pittsburgh, PA, United States' - author: 'Smallwood, Scott and Cook, Perry R. and Trueman, Dan and McIntyre, Lawrence' - bibtex: "@inproceedings{Smallwood2009a,\n abstract = {This paper gives a historical\ - \ overview of the development of alternative sonic display systems at Princeton\ - \ University; in particular, the design, construction, and use in live performance\ - \ of a series of spherical and hemispherical speaker systems. We also provide\ - \ a DIY guide to constructing the latest series of loudspeakers that we are currently\ - \ using in our research and music making. },\n address = {Pittsburgh, PA, United\ - \ States},\n author = {Smallwood, Scott and Cook, Perry R. and Trueman, Dan and\ - \ McIntyre, Lawrence},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177679},\n\ - \ issn = {2220-4806},\n keywords = {loudspeakers, hemispherical speakers, sonic\ - \ display systems, laptop orchestras. },\n pages = {110--115},\n title = {Don't\ - \ Forget the Loudspeaker --- A History of Hemispherical Speakers at Princeton\ - \ , Plus a DIY Guide},\n url = {http://www.nime.org/proceedings/2009/nime2009_110.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_36 + abstract: 'Most musical instrument augmentations aim to only fit one specific instrument + and depend on an external sound system to work as intended. In a more acoustic + concert setting this often alienates the electronic sound component. The FLAPIBox + is an integrated solution that fits most acoustic instruments and use its own + resonance for playing electronic sound in a more organic way—through the instrument + itself. Reviewing related works and exploring different hardware and software + components, a modular prototype has been built. The results of this preliminary + study make the body of planning and building the first integrated breadboard prototype. + Because of its flexible design, the FLAPIBox can use several different microphone, + and loudspeaker technologies. Using inexpensive components and developing open-source + software, the FLAPIBox is both affordable and accessible. The development of the + FLAPIBox aim to result in a stable and predictable platform, yet open and versatile + enough for further development.' + address: 'Mexico City, Mexico' + articleno: 36 + author: Erik Stifjell + bibtex: "@inproceedings{nime2023_36,\n abstract = {Most musical instrument augmentations\ + \ aim to only fit one specific instrument and depend on an external sound system\ + \ to work as intended. In a more acoustic concert setting this often alienates\ + \ the electronic sound component. The FLAPIBox is an integrated solution that\ + \ fits most acoustic instruments and use its own resonance for playing electronic\ + \ sound in a more organic way—through the instrument itself. Reviewing related\ + \ works and exploring different hardware and software components, a modular prototype\ + \ has been built. The results of this preliminary study make the body of planning\ + \ and building the first integrated breadboard prototype. Because of its flexible\ + \ design, the FLAPIBox can use several different microphone, and loudspeaker technologies.\ + \ Using inexpensive components and developing open-source software, the FLAPIBox\ + \ is both affordable and accessible. The development of the FLAPIBox aim to result\ + \ in a stable and predictable platform, yet open and versatile enough for further\ + \ development.},\n address = {Mexico City, Mexico},\n articleno = {36},\n author\ + \ = {Erik Stifjell},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan\ + \ Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages = {5},\n\ + \ pages = {251--255},\n title = {A FLexible musical instrument Augmentation that\ + \ is Programmable, Integrated in a Box (FLAPIBox)},\n track = {Papers},\n url\ + \ = {http://nime.org/proceedings/2023/nime2023_36.pdf},\n year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177679 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'loudspeakers, hemispherical speakers, sonic display systems, laptop orchestras. ' - pages: 110--115 - title: 'Don''t Forget the Loudspeaker --- A History of Hemispherical Speakers at - Princeton , Plus a DIY Guide' - url: http://www.nime.org/proceedings/2009/nime2009_110.pdf - year: 2009 + month: May + numpages: 5 + pages: 251--255 + title: 'A FLexible musical instrument Augmentation that is Programmable, Integrated + in a Box (FLAPIBox)' + track: Papers + url: http://nime.org/proceedings/2023/nime2023_36.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Freed2009a - abstract: 'The history and future of Open Sound Control (OSC) is discussed and the - next iteration of the OSC specification is introduced with discussion of new features - to support NIME community activities. The roadmap to a major revision of OSC is - developed. ' - address: 'Pittsburgh, PA, United States' - author: 'Freed, Adrian and Schmeder, Andrew' - bibtex: "@inproceedings{Freed2009a,\n abstract = {The history and future of Open\ - \ Sound Control (OSC) is discussed and the next iteration of the OSC specification\ - \ is introduced with discussion of new features to support NIME community activities.\ - \ The roadmap to a major revision of OSC is developed. },\n address = {Pittsburgh,\ - \ PA, United States},\n author = {Freed, Adrian and Schmeder, Andrew},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1177517},\n issn = {2220-4806},\n keywords\ - \ = {Open Sound Control, Time Tag, OSC, Reservation Protocols. },\n pages = {116--120},\n\ - \ title = {Features and Future of Open Sound Control version 1.1 for NIME},\n\ - \ url = {http://www.nime.org/proceedings/2009/nime2009_116.pdf},\n year = {2009}\n\ - }\n" + ID: nime2023_37 + abstract: "LiveLily is an open-source system for live sequencing and live scoring\ + \ through live coding in a subset of the Lilypond language. It is written in openFrameworks\ + \ and consists of four distinct parts, the text editor, the language parser, the\ + \ sequencer, and the music score. It supports the MIDI and OSC protocols to communicate\ + \ the sequencer with other software or hardware, as LiveLily does not produce\ + \ any sound. It can be combined with audio synthesis software that supports OSC,\ + \ like Pure Data, SuperCollider, and others, or hardware synthesizers that support\ + \ MIDI. This way, the users can create their sounds in another, audio-complete\ + \ framework or device, and use LiveLily to control their music.\nLiveLily can\ + \ also be used as a live scoring system to write music scores for acoustic instruments\ + \ live. This feature can be combined with its live sequencing capabilities, so\ + \ acoustic instruments can be combined with live electronics. Both live scoring\ + \ and live sequencing in LiveLily provide expressiveness to a great extent, as\ + \ many musical gestures can be included either in the score or the sequencer.\ + \ Such gestures include dynamics, articulation, and arbitrary text that can be\ + \ interpreted in any desired way, much like the way Western-music notation scores\ + \ are written." + address: 'Mexico City, Mexico' + articleno: 37 + author: Alexandros Drymonitis + bibtex: "@inproceedings{nime2023_37,\n abstract = {LiveLily is an open-source system\ + \ for live sequencing and live scoring through live coding in a subset of the\ + \ Lilypond language. It is written in openFrameworks and consists of four distinct\ + \ parts, the text editor, the language parser, the sequencer, and the music score.\ + \ It supports the MIDI and OSC protocols to communicate the sequencer with other\ + \ software or hardware, as LiveLily does not produce any sound. It can be combined\ + \ with audio synthesis software that supports OSC, like Pure Data, SuperCollider,\ + \ and others, or hardware synthesizers that support MIDI. This way, the users\ + \ can create their sounds in another, audio-complete framework or device, and\ + \ use LiveLily to control their music.\nLiveLily can also be used as a live scoring\ + \ system to write music scores for acoustic instruments live. This feature can\ + \ be combined with its live sequencing capabilities, so acoustic instruments can\ + \ be combined with live electronics. Both live scoring and live sequencing in\ + \ LiveLily provide expressiveness to a great extent, as many musical gestures\ + \ can be included either in the score or the sequencer. Such gestures include\ + \ dynamics, articulation, and arbitrary text that can be interpreted in any desired\ + \ way, much like the way Western-music notation scores are written.},\n address\ + \ = {Mexico City, Mexico},\n articleno = {37},\n author = {Alexandros Drymonitis},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn\ + \ = {2220-4806},\n month = {May},\n numpages = {6},\n pages = {256--261},\n title\ + \ = {LiveLily: An Expressive Live Sequencing and Live Scoring System Through Live\ + \ Coding With the Lilypond Language},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_37.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177517 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'Open Sound Control, Time Tag, OSC, Reservation Protocols. ' - pages: 116--120 - title: Features and Future of Open Sound Control version 1.1 for NIME - url: http://www.nime.org/proceedings/2009/nime2009_116.pdf - year: 2009 + month: May + numpages: 6 + pages: 256--261 + title: 'LiveLily: An Expressive Live Sequencing and Live Scoring System Through + Live Coding With the Lilypond Language' + track: Papers + url: http://nime.org/proceedings/2023/nime2023_37.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Schmeder2009 - abstract: 'An on-the-fly reconfigurable low-level embedded servicearchitecture is - presented as a means to improve scalability, improve conceptual comprehensibility, - reduce humanerror and reduce development time when designing newsensor-based electronic - musical instruments with real-timeresponsiveness. The implementation of the concept - ina project called micro-OSC is described. Other sensorinterfacing products are - evaluated in the context of DIYprototyping of musical instruments. The capabilities - ofthe micro-OSC platform are demonstrated through a set ofexamples including resistive - sensing, mixed digital-analogsystems, many-channel sensor interfaces and time-basedmeasurement - methods.' - address: 'Pittsburgh, PA, United States' - author: 'Schmeder, Andrew and Freed, Adrian' - bibtex: "@inproceedings{Schmeder2009,\n abstract = {An on-the-fly reconfigurable\ - \ low-level embedded servicearchitecture is presented as a means to improve scalability,\ - \ improve conceptual comprehensibility, reduce humanerror and reduce development\ - \ time when designing newsensor-based electronic musical instruments with real-timeresponsiveness.\ - \ The implementation of the concept ina project called micro-OSC is described.\ - \ Other sensorinterfacing products are evaluated in the context of DIYprototyping\ - \ of musical instruments. The capabilities ofthe micro-OSC platform are demonstrated\ - \ through a set ofexamples including resistive sensing, mixed digital-analogsystems,\ - \ many-channel sensor interfaces and time-basedmeasurement methods.},\n address\ - \ = {Pittsburgh, PA, United States},\n author = {Schmeder, Andrew and Freed, Adrian},\n\ + ID: nime2023_38 + abstract: 'ZRob is a robotic system designed for playing a snare drum. The robot + is constructed with a passive flexible spring-based joint inspired by the human + hand. This paper describes a study exploring rhythmic patterns by exploiting the + chaotic dynamics of two ZRobs. In the experiment, we explored the control configurations + of each arm by trying to create unpredictable patterns. Over 200 samples have + been recorded and analyzed. We show how the chaotic dynamics of ZRob can be used + for creating new drumming patterns.' + address: 'Mexico City, Mexico' + articleno: 38 + author: Seyed Mojtaba Karbasi and Alexander Refsum Jensenius and Rolf Inge Godøy + and Jim Torresen + bibtex: "@inproceedings{nime2023_38,\n abstract = {ZRob is a robotic system designed\ + \ for playing a snare drum. The robot is constructed with a passive flexible spring-based\ + \ joint inspired by the human hand. This paper describes a study exploring rhythmic\ + \ patterns by exploiting the chaotic dynamics of two ZRobs. In the experiment,\ + \ we explored the control configurations of each arm by trying to create unpredictable\ + \ patterns. Over 200 samples have been recorded and analyzed. We show how the\ + \ chaotic dynamics of ZRob can be used for creating new drumming patterns.},\n\ + \ address = {Mexico City, Mexico},\n articleno = {38},\n author = {Seyed Mojtaba\ + \ Karbasi and Alexander Refsum Jensenius and Rolf Inge Godøy and Jim Torresen},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177673},\n issn = {2220-4806},\n\ - \ keywords = {real-time musical interface, DIY design, em- bedded web services,\ - \ rapid prototyping, reconfigurable firmware },\n pages = {121--124},\n title\ - \ = {A Low-level Embedded Service Architecture for Rapid DIY Design of Real-time\ - \ Musical Instruments},\n url = {http://www.nime.org/proceedings/2009/nime2009_121.pdf},\n\ - \ year = {2009}\n}\n" + \ Musical Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn\ + \ = {2220-4806},\n month = {May},\n numpages = {6},\n pages = {262--267},\n title\ + \ = {Exploring Emerging Drumming Patterns in a Chaotic Dynamical System using\ + \ ZRob},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_38.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177673 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'real-time musical interface, DIY design, em- bedded web services, rapid - prototyping, reconfigurable firmware ' - pages: 121--124 - title: A Low-level Embedded Service Architecture for Rapid DIY Design of Real-time - Musical Instruments - url: http://www.nime.org/proceedings/2009/nime2009_121.pdf - year: 2009 + month: May + numpages: 6 + pages: 262--267 + title: Exploring Emerging Drumming Patterns in a Chaotic Dynamical System using + ZRob + track: Papers + url: http://nime.org/proceedings/2023/nime2023_38.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Steiner2009 - abstract: 'Firmata is a generic protocol for communicating with microcontrollers - from software on a host computer. The central goal is to make the microcontroller - an extension of theprogramming environment on the host computer in a manner that - feels natural in that programming environment. Itwas designed to be open and flexible - so that any programming environment can support it, and simple to implementboth - on the microcontroller and the host computer to ensurea wide range of implementations. - The current reference implementation is a library for Arduino/Wiring and is includedwith - Arduino software package since version 0012. Thereare matching software modules - for a number of languages,like Pd, OpenFrameworks, Max/MSP, and Processing.' - address: 'Pittsburgh, PA, United States' - author: 'Steiner, Hans-Christoph' - bibtex: "@inproceedings{Steiner2009,\n abstract = {Firmata is a generic protocol\ - \ for communicating with microcontrollers from software on a host computer. The\ - \ central goal is to make the microcontroller an extension of theprogramming environment\ - \ on the host computer in a manner that feels natural in that programming environment.\ - \ Itwas designed to be open and flexible so that any programming environment can\ - \ support it, and simple to implementboth on the microcontroller and the host\ - \ computer to ensurea wide range of implementations. The current reference implementation\ - \ is a library for Arduino/Wiring and is includedwith Arduino software package\ - \ since version 0012. Thereare matching software modules for a number of languages,like\ - \ Pd, OpenFrameworks, Max/MSP, and Processing.},\n address = {Pittsburgh, PA,\ - \ United States},\n author = {Steiner, Hans-Christoph},\n booktitle = {Proceedings\ + ID: nime2023_39 + abstract: 'The eTud,be framework adapts existing improvising musical agents (MA) + for performance with an augmented instrument called the eTube. This instrument + has been developed with deliberate musical and technological limitations including + a simple two-button controller and restricted pitch capacity. We will present + case studies which outline our research-creation framework for mapping the eTube + controller, developing corpora for the MAs, and testing interactive and machine + listening settings which will also be demonstrated by performance examples. A + general summary of the MAs will be followed by specific descriptions of the features + we have utilised in our work, and finally a comparison of the MAs based on these + features. Few papers discuss the process for learning to work with and adapt existing + MAs and we will finish by describing challenges experienced as other users with + these technologies.' + address: 'Mexico City, Mexico' + articleno: 39 + author: Tommy Davis and Kasey LV Pocius and Vincent Cusson and Marcelo Wanderley + and Philippe Pasquier + bibtex: "@inproceedings{nime2023_39,\n abstract = {The eTu{d,b}e framework adapts\ + \ existing improvising musical agents (MA) for performance with an augmented instrument\ + \ called the eTube. This instrument has been developed with deliberate musical\ + \ and technological limitations including a simple two-button controller and restricted\ + \ pitch capacity. We will present case studies which outline our research-creation\ + \ framework for mapping the eTube controller, developing corpora for the MAs,\ + \ and testing interactive and machine listening settings which will also be demonstrated\ + \ by performance examples. A general summary of the MAs will be followed by specific\ + \ descriptions of the features we have utilised in our work, and finally a comparison\ + \ of the MAs based on these features. Few papers discuss the process for learning\ + \ to work with and adapt existing MAs and we will finish by describing challenges\ + \ experienced as other users with these technologies.},\n address = {Mexico City,\ + \ Mexico},\n articleno = {39},\n author = {Tommy Davis and Kasey LV Pocius and\ + \ Vincent Cusson and Marcelo Wanderley and Philippe Pasquier},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177689},\n issn = {2220-4806},\n keywords = {arduino,microcontroller,nime09,processing,pure\ - \ data},\n pages = {125--130},\n title = {Firmata : Towards Making Microcontrollers\ - \ Act Like Extensions of the Computer},\n url = {http://www.nime.org/proceedings/2009/nime2009_125.pdf},\n\ - \ year = {2009}\n}\n" + \ editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month\ + \ = {May},\n numpages = {9},\n pages = {268--276},\n title = {eTu{d,b}e: case\ + \ studies in playing with musical agents},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_39.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177689 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'arduino,microcontroller,nime09,processing,pure data' - pages: 125--130 - title: 'Firmata : Towards Making Microcontrollers Act Like Extensions of the Computer' - url: http://www.nime.org/proceedings/2009/nime2009_125.pdf - year: 2009 + month: May + numpages: 9 + pages: 268--276 + title: 'eTud,be: case studies in playing with musical agents' + track: Papers + url: http://nime.org/proceedings/2023/nime2023_39.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Baalman2009a - abstract: 'The SenseWorld DataNetwork framework addresses the is- sue of sharing - and manipulating multiple data streams among different media systems in a heterogenous - interactive per- formance environment. It is intended to facilitate the cre- ation, - rehearsal process and performance practice of collab- orative interactive media - art works, by making the sharing of data (from sensors or internal processes) - between collab- orators easier, faster and more flexible.' - address: 'Pittsburgh, PA, United States' - author: 'Baalman, Marije A. and Smoak, Harry C. and Salter, Christopher L. and Malloch, - Joseph and Wanderley, Marcelo M.' - bibtex: "@inproceedings{Baalman2009a,\n abstract = {The SenseWorld DataNetwork framework\ - \ addresses the is- sue of sharing and manipulating multiple data streams among\ - \ different media systems in a heterogenous interactive per- formance environment.\ - \ It is intended to facilitate the cre- ation, rehearsal process and performance\ - \ practice of collab- orative interactive media art works, by making the sharing\ - \ of data (from sensors or internal processes) between collab- orators easier,\ - \ faster and more flexible.},\n address = {Pittsburgh, PA, United States},\n author\ - \ = {Baalman, Marije A. and Smoak, Harry C. and Salter, Christopher L. and Malloch,\ - \ Joseph and Wanderley, Marcelo M.},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177471},\n\ - \ issn = {2220-4806},\n keywords = {Data exchange, collaborative performance,\ - \ interactive performance, interactive art works, sensor data, OpenSoundControl,\ - \ SuperCollider, Max/MSP},\n pages = {131--134},\n title = {Sharing Data in Collaborative,\ - \ Interactive Performances : the SenseWorld DataNetwork},\n url = {http://www.nime.org/proceedings/2009/nime2009_131.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_40 + abstract: "This paper presents an affordable and accessible wearable technology\ + \ for wind musicians which provides real-time biofeedback on their breathing.\ + \ We developed the abdominal thoracic expansion measurement prototype wearable\ + \ technology (ATEM-P), to measure a wind musician’s breathing-induced expansion\ + \ and contraction while they are playing.\nOur first study validates the ATEM-P\ + \ with the gold standard of medical grade respiratory exertion measurement devices,\ + \ the respiratory plethysmography inductance system (RIP). The results show that\ + \ the ATEM-P has a strong correlation to the RIP system.\nOur second study provides\ + \ quantitative and qualitative data about the correlation between a musician’s\ + \ breathing technique and the quality of their performance. We expected the results\ + \ to show a correlation between the ATEM-P peak amplitudes and the quality of\ + \ performance, i.e. better breathing-induced expansion leads to better quality\ + \ of performance, however this was not the case. The results did show that there\ + \ is a correlation between a musician’s quality of performance and breath period.\n\ + Results from the studies show that the ATEM-P has potential as an affordable and\ + \ accessible wearable technology for wind musicians: a performance enhancement\ + \ tool and an educational tool." + address: 'Mexico City, Mexico' + articleno: 40 + author: Lucie F Jones and Jeffrey Boyd and Jeremy Brown and Hua Shen + bibtex: "@inproceedings{nime2023_40,\n abstract = {This paper presents an affordable\ + \ and accessible wearable technology for wind musicians which provides real-time\ + \ biofeedback on their breathing. We developed the abdominal thoracic expansion\ + \ measurement prototype wearable technology (ATEM-P), to measure a wind musician’s\ + \ breathing-induced expansion and contraction while they are playing.\nOur first\ + \ study validates the ATEM-P with the gold standard of medical grade respiratory\ + \ exertion measurement devices, the respiratory plethysmography inductance system\ + \ (RIP). The results show that the ATEM-P has a strong correlation to the RIP\ + \ system.\nOur second study provides quantitative and qualitative data about the\ + \ correlation between a musician’s breathing technique and the quality of their\ + \ performance. We expected the results to show a correlation between the ATEM-P\ + \ peak amplitudes and the quality of performance, i.e. better breathing-induced\ + \ expansion leads to better quality of performance, however this was not the case.\ + \ The results did show that there is a correlation between a musician’s quality\ + \ of performance and breath period.\nResults from the studies show that the ATEM-P\ + \ has potential as an affordable and accessible wearable technology for wind musicians:\ + \ a performance enhancement tool and an educational tool.},\n address = {Mexico\ + \ City, Mexico},\n articleno = {40},\n author = {Lucie F Jones and Jeffrey Boyd\ + \ and Jeremy Brown and Hua Shen},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz\ + \ and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages\ + \ = {11},\n pages = {277--287},\n title = {A Wearable Technology For Wind Musicians:\ + \ Does It Matter How You Breathe?},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_40.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177471 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'Data exchange, collaborative performance, interactive performance, interactive - art works, sensor data, OpenSoundControl, SuperCollider, Max/MSP' - pages: 131--134 - title: 'Sharing Data in Collaborative, Interactive Performances : the SenseWorld - DataNetwork' - url: http://www.nime.org/proceedings/2009/nime2009_131.pdf - year: 2009 + month: May + numpages: 11 + pages: 277--287 + title: 'A Wearable Technology For Wind Musicians: Does It Matter How You Breathe?' + track: Papers + url: http://nime.org/proceedings/2023/nime2023_40.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Bouillot2009 - abstract: 'Low-latency streaming of high-quality audio has the potential to dramatically - transform the world of interactive musical applications. We provide methods for - accurately measuring the end-to-end latency and audio quality of a delivered audio - stream and apply these methods to an empirical evaluation of several streaming - engines. In anticipationof future demands for emerging applications involving - audio interaction, we also review key features of streamingengines and discuss - potential challenges that remain to beovercome.' - address: 'Pittsburgh, PA, United States' - author: 'Bouillot, Nicolas and Cooperstock, Jeremy R.' - bibtex: "@inproceedings{Bouillot2009,\n abstract = {Low-latency streaming of high-quality\ - \ audio has the potential to dramatically transform the world of interactive musical\ - \ applications. We provide methods for accurately measuring the end-to-end latency\ - \ and audio quality of a delivered audio stream and apply these methods to an\ - \ empirical evaluation of several streaming engines. In anticipationof future\ - \ demands for emerging applications involving audio interaction, we also review\ - \ key features of streamingengines and discuss potential challenges that remain\ - \ to beovercome.},\n address = {Pittsburgh, PA, United States},\n author = {Bouillot,\ - \ Nicolas and Cooperstock, Jeremy R.},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177485},\n\ - \ issn = {2220-4806},\n keywords = {Networked Musical Performance, high-fidelity\ - \ audio streaming, glitch detection, latency measurement },\n pages = {135--140},\n\ - \ title = {Challenges and Performance of High-Fidelity Audio Streaming for Interactive\ - \ Performances},\n url = {http://www.nime.org/proceedings/2009/nime2009_135.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_41 + abstract: 'In this work we introduce LDSP, a novel technology capable of turning + any Android phone into a high-performance embedded platform for digital musical + instrument (DMI) design. Embedded platforms are powerful technologies that changed + the way we design and even think of DMIs. Their widespread adoption has popularized + low-level audio programming, enabling engineers and artists alike to create highly + responsive, self-contained digital musical instruments that have direct access + to hardware resources. However, if we shift our focus away from the wealthy countries + of the `Global North'', embedded platforms become a commodity that only a few + can afford. DMI researchers, artists and students from Latin America have discussed + at great lengths the effects that the lack of access to these otherwise common + resources have on their practices. And while some solutions have been proposed, + a large gap can still be perceived. By means of appropriating possibly the most + widespread and accessible technology in the world (Android) and turn it into an + embedded platform, LDSP creates an effective opportunity to close this gap. Throughout + the paper, we provide technical details of the full LDSP environment, along with + insights on the surprising performances of the first DMIs that have been designed + with it.' + address: 'Mexico City, Mexico' + articleno: 41 + author: Carla Tapparo and Brooke Chalmers and Victor Zappi + bibtex: "@inproceedings{nime2023_41,\n abstract = {In this work we introduce LDSP,\ + \ a novel technology capable of turning any Android phone into a high-performance\ + \ embedded platform for digital musical instrument (DMI) design. Embedded platforms\ + \ are powerful technologies that changed the way we design and even think of DMIs.\ + \ Their widespread adoption has popularized low-level audio programming, enabling\ + \ engineers and artists alike to create highly responsive, self-contained digital\ + \ musical instruments that have direct access to hardware resources. However,\ + \ if we shift our focus away from the wealthy countries of the `Global North',\ + \ embedded platforms become a commodity that only a few can afford. DMI researchers,\ + \ artists and students from Latin America have discussed at great lengths the\ + \ effects that the lack of access to these otherwise common resources have on\ + \ their practices. And while some solutions have been proposed, a large gap can\ + \ still be perceived. By means of appropriating possibly the most widespread and\ + \ accessible technology in the world (Android) and turn it into an embedded platform,\ + \ LDSP creates an effective opportunity to close this gap. Throughout the paper,\ + \ we provide technical details of the full LDSP environment, along with insights\ + \ on the surprising performances of the first DMIs that have been designed with\ + \ it.},\n address = {Mexico City, Mexico},\n articleno = {41},\n author = {Carla\ + \ Tapparo and Brooke Chalmers and Victor Zappi},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n editor\ + \ = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n\ + \ numpages = {7},\n pages = {288--294},\n title = {Leveraging Android Phones to\ + \ Democratize Low-level Audio Programming},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_41.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177485 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'Networked Musical Performance, high-fidelity audio streaming, glitch - detection, latency measurement ' - pages: 135--140 - title: Challenges and Performance of High-Fidelity Audio Streaming for Interactive - Performances - url: http://www.nime.org/proceedings/2009/nime2009_135.pdf - year: 2009 + month: May + numpages: 7 + pages: 288--294 + title: Leveraging Android Phones to Democratize Low-level Audio Programming + track: Papers + url: http://nime.org/proceedings/2023/nime2023_41.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Todoroff2009 - abstract: "''Extension du corps sonore'' is long-term project initiatedby Musiques\ - \ Nouvelles [4], a contemporary music ensemble in Mons. It aims at giving instrumental\ - \ music performers an extended control over the sound of their instrument byextending\ - \ the understanding of the sound body from the instrument only to the combination\ - \ of the instrument and thewhole body of the performer. The development started\ - \ atARTeM and got the benefit of a three month numediartresearch project [1] that\ - \ focused on three axes of research:pre-processing of sensor data, gesture recognition\ - \ and mapping through interpolation. The objectives were the development of computing\ - \ methods and flexible Max/MSP externals to be later integrated in the ARTeM software\ - \ framework for the concerts with viola player Dominica Eyckmans. They could be\ - \ used in a variety of other artistic worksand will be made available on the numediart\ - \ website [1],where more detailed information can be found in the Quarterly Progress\ - \ Scientific Report #4." - address: 'Pittsburgh, PA, United States' - author: 'Todoroff, Todor and Bettens, Frédéric and Reboursière, Loïc and Chu, Wen-Yang' - bibtex: "@inproceedings{Todoroff2009,\n abstract = {''Extension du corps sonore''\ - \ is long-term project initiatedby Musiques Nouvelles [4], a contemporary music\ - \ ensemble in Mons. It aims at giving instrumental music performers an extended\ - \ control over the sound of their instrument byextending the understanding of\ - \ the sound body from the instrument only to the combination of the instrument\ - \ and thewhole body of the performer. The development started atARTeM and got\ - \ the benefit of a three month numediartresearch project [1] that focused on three\ - \ axes of research:pre-processing of sensor data, gesture recognition and mapping\ - \ through interpolation. The objectives were the development of computing methods\ - \ and flexible Max/MSP externals to be later integrated in the ARTeM software\ - \ framework for the concerts with viola player Dominica Eyckmans. They could be\ - \ used in a variety of other artistic worksand will be made available on the numediart\ - \ website [1],where more detailed information can be found in the Quarterly Progress\ - \ Scientific Report \\#4.},\n address = {Pittsburgh, PA, United States},\n author\ - \ = {Todoroff, Todor and Bettens, Fr\\'{e}d\\'{e}ric and Reboursi\\`{e}re, Lo\\\ - \"{i}c and Chu, Wen-Yang},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177691},\n\ - \ issn = {2220-4806},\n keywords = {Sensor data pre-processing, gesture recognition,\ - \ mapping, interpolation, extension du corps sonore },\n pages = {141--146},\n\ - \ title = {''Extension du Corps Sonore'' --- Dancing Viola},\n url = {http://www.nime.org/proceedings/2009/nime2009_141.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_42 + abstract: '“Reembodied sound” refers to the electroacoustic practice of projecting + sound into resonating objects, thereby turning these objects into a kind of speaker. + The practice, which typically uses an audio transducer attached to the surface + of the object being resonated, lies in a middle-ground between loudspeaker-based + music and augmented/actuated instruments, allowing practitioners to draw upon + and fuse multiple paradigms of new and emerging technologies. This article examines + Refraction Interlude, an interactive environment for solo performer and transducer-actuated + metal percussion instruments. Building on a decade of reembodied sound research, + the work combines augmented and actuated instruments, physical modeling, pre-recorded + performer input, interactivity, and sound spatialization in a manner that facilitates + adaptability to performer creativity and to the acoustic properties of the actuated + instruments. The computational processes were minimized, designed to forefront + the interaction and integration between these multiple domains.' + address: 'Mexico City, Mexico' + articleno: 42 + author: Matthew Goodheart + bibtex: "@inproceedings{nime2023_42,\n abstract = {“Reembodied sound” refers to\ + \ the electroacoustic practice of projecting sound into resonating objects, thereby\ + \ turning these objects into a kind of speaker. The practice, which typically\ + \ uses an audio transducer attached to the surface of the object being resonated,\ + \ lies in a middle-ground between loudspeaker-based music and augmented/actuated\ + \ instruments, allowing practitioners to draw upon and fuse multiple paradigms\ + \ of new and emerging technologies. This article examines Refraction Interlude,\ + \ an interactive environment for solo performer and transducer-actuated metal\ + \ percussion instruments. Building on a decade of reembodied sound research, the\ + \ work combines augmented and actuated instruments, physical modeling, pre-recorded\ + \ performer input, interactivity, and sound spatialization in a manner that facilitates\ + \ adaptability to performer creativity and to the acoustic properties of the actuated\ + \ instruments. The computational processes were minimized, designed to forefront\ + \ the interaction and integration between these multiple domains.},\n address\ + \ = {Mexico City, Mexico},\n articleno = {42},\n author = {Matthew Goodheart},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn\ + \ = {2220-4806},\n month = {May},\n numpages = {6},\n pages = {295--300},\n title\ + \ = {Reembodied Sound and Transducer-actuated Instruments in Refraction Interlude},\n\ + \ track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_42.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177691 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'Sensor data pre-processing, gesture recognition, mapping, interpolation, - extension du corps sonore ' - pages: 141--146 - title: '''''Extension du Corps Sonore'''' --- Dancing Viola' - url: http://www.nime.org/proceedings/2009/nime2009_141.pdf - year: 2009 + month: May + numpages: 6 + pages: 295--300 + title: Reembodied Sound and Transducer-actuated Instruments in Refraction Interlude + track: Papers + url: http://nime.org/proceedings/2023/nime2023_42.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Leider2009a - abstract: 'We describe initial prototypes and a design strategy for new, user-customized - audio-manipulation and editing tools. These tools are designed to enable intuitive - control of audio-processing tasks while anthropomorphically matching the target - user. ' - address: 'Pittsburgh, PA, United States' - author: 'Leider, Colby and Mann, Doug and Plazas, Daniel and Battaglia, Michael - and Draper, Reid' - bibtex: "@inproceedings{Leider2009a,\n abstract = {We describe initial prototypes\ - \ and a design strategy for new, user-customized audio-manipulation and editing\ - \ tools. These tools are designed to enable intuitive control of audio-processing\ - \ tasks while anthropomorphically matching the target user. },\n address = {Pittsburgh,\ - \ PA, United States},\n author = {Leider, Colby and Mann, Doug and Plazas, Daniel\ - \ and Battaglia, Michael and Draper, Reid},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1177617},\n issn = {2220-4806},\n keywords = {user modeling,\ - \ user customization },\n pages = {147--148},\n title = {The elBo and footPad\ - \ : Toward Personalized Hardware for Audio Manipulation},\n url = {http://www.nime.org/proceedings/2009/nime2009_147.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_43 + abstract: 'In recent decades, with the innovation in sensor technology, the trend + towards smaller digital controllers for instruments has expanded. New generations + of performance styles are growing that rely on compact instruments that can travel + easily and are thus versatile. This article cites two interactive performance + practices to illustrate how larger instruments change the nature of interaction + and sonic outcomes of performance. Pressure-sensitive Floors, a wooden set of + platforms for performing electronic music, are compared with a practice on the + Renaissance violone with electronics. Large instruments offer unique additions + to performance and music making that are not accessible in small instruments. + They have their own specific affordances and limitations that affect the musical + decisions of the performer and therefore contribute unique ways of conceptualising + performance. The instruments in this paper have been chosen as the authors have + a ''sustained relationship’ with them and these practices merely act as examples + of the embodied knowledge gained through staying committed to a particular large + instrument. We demonstrate how with such a practice, the performance is recentered + around human presence. This offers a deeper communication between performer and + audience. It creates new avenues for the performance of contemporary music where + the entire body is engaged in movement and sounding. We argue that overlooking + large instruments in favour of their smaller counterparts would result in the + loss of a unique aesthetic as well as conceptual and performance approaches.' + address: 'Mexico City, Mexico' + articleno: 43 + author: iran sanadzadeh and Chloë Sobek + bibtex: "@inproceedings{nime2023_43,\n abstract = {In recent decades, with the innovation\ + \ in sensor technology, the trend towards smaller digital controllers for instruments\ + \ has expanded. New generations of performance styles are growing that rely on\ + \ compact instruments that can travel easily and are thus versatile. This article\ + \ cites two interactive performance practices to illustrate how larger instruments\ + \ change the nature of interaction and sonic outcomes of performance. Pressure-sensitive\ + \ Floors, a wooden set of platforms for performing electronic music, are compared\ + \ with a practice on the Renaissance violone with electronics. Large instruments\ + \ offer unique additions to performance and music making that are not accessible\ + \ in small instruments. They have their own specific affordances and limitations\ + \ that affect the musical decisions of the performer and therefore contribute\ + \ unique ways of conceptualising performance. The instruments in this paper have\ + \ been chosen as the authors have a 'sustained relationship’ with them and these\ + \ practices merely act as examples of the embodied knowledge gained through staying\ + \ committed to a particular large instrument. We demonstrate how with such a practice,\ + \ the performance is recentered around human presence. This offers a deeper communication\ + \ between performer and audience. It creates new avenues for the performance of\ + \ contemporary music where the entire body is engaged in movement and sounding.\ + \ We argue that overlooking large instruments in favour of their smaller counterparts\ + \ would result in the loss of a unique aesthetic as well as conceptual and performance\ + \ approaches.},\n address = {Mexico City, Mexico},\n articleno = {43},\n author\ + \ = {iran sanadzadeh and Chloë Sobek},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz\ + \ and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages\ + \ = {6},\n pages = {301--306},\n title = {A sustained relationship with large\ + \ instruments - a case against the convenient interface},\n track = {Papers},\n\ + \ url = {http://nime.org/proceedings/2023/nime2023_43.pdf},\n year = {2023}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177617 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'user modeling, user customization ' - pages: 147--148 - title: 'The elBo and footPad : Toward Personalized Hardware for Audio Manipulation' - url: http://www.nime.org/proceedings/2009/nime2009_147.pdf - year: 2009 + month: May + numpages: 6 + pages: 301--306 + title: A sustained relationship with large instruments - a case against the convenient + interface + track: Papers + url: http://nime.org/proceedings/2023/nime2023_43.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Crawford2009 - abstract: The MIDI-Airguitar is a hand held musical controller based on Force Sensing - Resister (FSR) and Accelerometer technology. The hardware and software implementation - of the MIDI-Airguitars are described below. Current practices of the authors in - performance are discussed. - address: 'Pittsburgh, PA, United States' - author: 'Crawford, Langdon and Fastenow, William D.' - bibtex: "@inproceedings{Crawford2009,\n abstract = {The MIDI-Airguitar is a hand\ - \ held musical controller based on Force Sensing Resister (FSR) and Accelerometer\ - \ technology. The hardware and software implementation of the MIDI-Airguitars\ - \ are described below. Current practices of the authors in performance are discussed.},\n\ - \ address = {Pittsburgh, PA, United States},\n author = {Crawford, Langdon and\ - \ Fastenow, William D.},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177495},\n\ - \ issn = {2220-4806},\n keywords = {nime09},\n pages = {149--150},\n title = {The\ - \ Midi-AirGuitar , A serious Musical Controller with a Funny Name Music Technology\ - \ Program},\n url = {http://www.nime.org/proceedings/2009/nime2009_149.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_44 + abstract: 'The audiovisual installation Oscillations, turns irons and ironing boards + into electronic instruments, in an attempt to deconstruct stereotypical ideas + of gender and its assigned roles. The project aims to investigate the relationships + we have with domestic objects, and ponder their structures and significance through + the design and performance of an interactive ecosystem. The project uses a sonic + cyberfeminisms lens to critically explore aesthetic and relational hierarchies + at the intersection of sound, gender and technology. Three irons and ironing boards + have been hacked and retrofitted with embedded electronic instruments that together + create a complex feedback network. While the audience is invited to physically + interact with the irons instruments and manipulate samples, the sonic state of + the installation also changes based on the audio information detected in the environment.' + address: 'Mexico City, Mexico' + articleno: 44 + author: Patty J Preece and Melania Jack and Giacomo Lepri + bibtex: "@inproceedings{nime2023_44,\n abstract = {The audiovisual installation\ + \ Oscillations, turns irons and ironing boards into electronic instruments, in\ + \ an attempt to deconstruct stereotypical ideas of gender and its assigned roles.\ + \ The project aims to investigate the relationships we have with domestic objects,\ + \ and ponder their structures and significance through the design and performance\ + \ of an interactive ecosystem. The project uses a sonic cyberfeminisms lens to\ + \ critically explore aesthetic and relational hierarchies at the intersection\ + \ of sound, gender and technology. Three irons and ironing boards have been hacked\ + \ and retrofitted with embedded electronic instruments that together create a\ + \ complex feedback network. While the audience is invited to physically interact\ + \ with the irons instruments and manipulate samples, the sonic state of the installation\ + \ also changes based on the audio information detected in the environment.},\n\ + \ address = {Mexico City, Mexico},\n articleno = {44},\n author = {Patty J Preece\ + \ and Melania Jack and Giacomo Lepri},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz\ + \ and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages\ + \ = {7},\n pages = {307--313},\n title = {Oscillations: Composing a Performance\ + \ Ecosystem through a Sonic Cyberfeminist Lens },\n track = {Papers},\n url =\ + \ {http://nime.org/proceedings/2023/nime2023_44.pdf},\n year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177495 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: nime09 - pages: 149--150 - title: 'The Midi-AirGuitar , A serious Musical Controller with a Funny Name Music - Technology Program' - url: http://www.nime.org/proceedings/2009/nime2009_149.pdf - year: 2009 + month: May + numpages: 7 + pages: 307--313 + title: 'Oscillations: Composing a Performance Ecosystem through a Sonic Cyberfeminist + Lens ' + track: Papers + url: http://nime.org/proceedings/2023/nime2023_44.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Bottcher2009 - abstract: 'In this poster we present the early prototype of the augmented Psychophone - --- a saxophone with various applied sensors, allowing the saxophone player to - attach effects like pitch shifting, wah-wah and ring modulation to the saxophone, - simply by moving the saxophone as one would do when really being enthusiastic - and involved in the performance. The possibility of scratching on the previously - recorded sound is also possible directly on the saxophone. ' - address: 'Pittsburgh, PA, United States' - author: 'Böttcher, Niels and Dimitrov, Smilen' - bibtex: "@inproceedings{Bottcher2009,\n abstract = {In this poster we present the\ - \ early prototype of the augmented Psychophone --- a saxophone with various applied\ - \ sensors, allowing the saxophone player to attach effects like pitch shifting,\ - \ wah-wah and ring modulation to the saxophone, simply by moving the saxophone\ - \ as one would do when really being enthusiastic and involved in the performance.\ - \ The possibility of scratching on the previously recorded sound is also possible\ - \ directly on the saxophone. },\n address = {Pittsburgh, PA, United States},\n\ - \ author = {B\\''{o}ttcher, Niels and Dimitrov, Smilen},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177467},\n issn = {2220-4806},\n keywords = {Augmented\ - \ saxophone, Physical computing, hyper instruments, mapping. },\n pages = {151--152},\n\ - \ title = {An Early Prototype of the Augmented PsychoPhone},\n url = {http://www.nime.org/proceedings/2009/nime2009_151.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_45 + abstract: 'In this article, we explore practical and artistic considerations of + instrument design and the creation of an instrument ensemble control system for + The Furies: A LaptOpera, an opera for laptop orchestra and live vocalists based + on the Greek tragedy Electra. We outline the artistic principles that guided + the creation of the rope instrument and, specifically, our use of instrument design + to forge direct and visceral connections between the music, the narrative, and + the relationship between characters. This discussion is followed by an overview + of the practical considerations that inspired the creation of an instrument ensemble + control system for the opera and the principles that guided this system''s design. + Through a detailed description of the development of the rope instrument, the + growth of this instrument through the course of the opera, and the design of the + instrument ensemble control system, this paper offers tools and reflections on + the potential of instrument design to invigorate an embodied connection to opera + and useful design strategies to support rehearsal and performance of evening-length + multimedia works.' + address: 'Mexico City, Mexico' + articleno: 45 + author: Anne K Hege and Curtis Ullerich + bibtex: "@inproceedings{nime2023_45,\n abstract = {In this article, we explore practical\ + \ and artistic considerations of instrument design and the creation of an instrument\ + \ ensemble control system for The Furies: A LaptOpera, an opera for laptop orchestra\ + \ and live vocalists based on the Greek tragedy Electra. We outline the artistic\ + \ principles that guided the creation of the rope instrument and, specifically,\ + \ our use of instrument design to forge direct and visceral connections between\ + \ the music, the narrative, and the relationship between characters. This discussion\ + \ is followed by an overview of the practical considerations that inspired the\ + \ creation of an instrument ensemble control system for the opera and the principles\ + \ that guided this system's design. Through a detailed description of the development\ + \ of the rope instrument, the growth of this instrument through the course of\ + \ the opera, and the design of the instrument ensemble control system, this paper\ + \ offers tools and reflections on the potential of instrument design to invigorate\ + \ an embodied connection to opera and useful design strategies to support rehearsal\ + \ and performance of evening-length multimedia works.},\n address = {Mexico City,\ + \ Mexico},\n articleno = {45},\n author = {Anne K Hege and Curtis Ullerich},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn\ + \ = {2220-4806},\n month = {May},\n numpages = {5},\n pages = {314--318},\n title\ + \ = {Principles of Instrument and System Design for LaptOperas},\n track = {Papers},\n\ + \ url = {http://nime.org/proceedings/2023/nime2023_45.pdf},\n year = {2023}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177467 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'Augmented saxophone, Physical computing, hyper instruments, mapping. ' - pages: 151--152 - title: An Early Prototype of the Augmented PsychoPhone - url: http://www.nime.org/proceedings/2009/nime2009_151.pdf - year: 2009 + month: May + numpages: 5 + pages: 314--318 + title: Principles of Instrument and System Design for LaptOperas + track: Papers + url: http://nime.org/proceedings/2023/nime2023_45.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Siwiak2009 - abstract: 'Catch Your Breath is an interactive audiovisual bio-feedbacksystem adapted - from a project designed to reduce respiratory irregularity in patients undergoing - 4D CT scans for oncological diagnosis. The system is currently implementedand - assessed as a potential means to reduce motion-induceddistortion in CT images.A - museum installation based on the same principle wascreated in which an inexpensive - wall-mounted web camera tracks an IR sensor embedded into a pendant worn bythe - user. The motion of the subjects breathing is trackedand interpreted as a real-time - variable tempo adjustment toa stored musical file. The subject can then adjust - his/herbreathing to synchronize with a separate accompanimentline. When the breathing - is regular and is at the desiredtempo, the audible result sounds synchronous and - harmonious. The accompaniment''s tempo progresses and gradually decrease which - causes the breathing to synchronize andslow down, thus increasing relaxation.' - address: 'Pittsburgh, PA, United States' - author: 'Siwiak, Diana and Berger, Jonathan and Yang, Yao' - bibtex: "@inproceedings{Siwiak2009,\n abstract = {Catch Your Breath is an interactive\ - \ audiovisual bio-feedbacksystem adapted from a project designed to reduce respiratory\ - \ irregularity in patients undergoing 4D CT scans for oncological diagnosis. The\ - \ system is currently implementedand assessed as a potential means to reduce motion-induceddistortion\ - \ in CT images.A museum installation based on the same principle wascreated in\ - \ which an inexpensive wall-mounted web camera tracks an IR sensor embedded into\ - \ a pendant worn bythe user. The motion of the subjects breathing is trackedand\ - \ interpreted as a real-time variable tempo adjustment toa stored musical file.\ - \ The subject can then adjust his/herbreathing to synchronize with a separate\ - \ accompanimentline. When the breathing is regular and is at the desiredtempo,\ - \ the audible result sounds synchronous and harmonious. The accompaniment's tempo\ - \ progresses and gradually decrease which causes the breathing to synchronize\ - \ andslow down, thus increasing relaxation.},\n address = {Pittsburgh, PA, United\ - \ States},\n author = {Siwiak, Diana and Berger, Jonathan and Yang, Yao},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1177675},\n issn = {2220-4806},\n keywords\ - \ = {sensor, music, auditory display. },\n pages = {153--154},\n title = {Catch\ - \ Your Breath --- Musical Biofeedback for Breathing Regulation},\n url = {http://www.nime.org/proceedings/2009/nime2009_153.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_46 + abstract: 'For several decades NIME community has always been appropriating machine + learning (ML) to apply for various tasks such as gesture-sound mapping or sound + synthesis for digital musical instruments. Recently, the use of ML methods seems + to have increased and the objectives have diversified. Despite its increasing + use, few contributions have studied what constitutes the culture of learning technologies + for this specific practice. This paper presents an analysis of 69 contributions + selected from a systematic review of the NIME conference over the last 10 years. + This paper aims at analysing the practices involving ML in terms of the techniques + and the task used and the ways to interact this technology. It thus contributes + to a deeper understanding of the specific goals and motivation in using ML for + musical expression. This study allows us to propose new perspectives in the practice + of these techniques.' + address: 'Mexico City, Mexico' + articleno: 46 + author: Théo Jourdan and Baptiste Caramiaux + bibtex: "@inproceedings{nime2023_46,\n abstract = {For several decades NIME community\ + \ has always been appropriating machine learning (ML) to apply for various tasks\ + \ such as gesture-sound mapping or sound synthesis for digital musical instruments.\ + \ Recently, the use of ML methods seems to have increased and the objectives have\ + \ diversified. Despite its increasing use, few contributions have studied what\ + \ constitutes the culture of learning technologies for this specific practice.\ + \ This paper presents an analysis of 69 contributions selected from a systematic\ + \ review of the NIME conference over the last 10 years. This paper aims at analysing\ + \ the practices involving ML in terms of the techniques and the task used and\ + \ the ways to interact this technology. It thus contributes to a deeper understanding\ + \ of the specific goals and motivation in using ML for musical expression. This\ + \ study allows us to propose new perspectives in the practice of these techniques.},\n\ + \ address = {Mexico City, Mexico},\n articleno = {46},\n author = {Théo Jourdan\ + \ and Baptiste Caramiaux},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan\ + \ Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages = {13},\n\ + \ pages = {319--331},\n title = {Machine Learning for Musical Expression: A Systematic\ + \ Literature Review},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_46.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177675 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'sensor, music, auditory display. ' - pages: 153--154 - title: Catch Your Breath --- Musical Biofeedback for Breathing Regulation - url: http://www.nime.org/proceedings/2009/nime2009_153.pdf - year: 2009 + month: May + numpages: 13 + pages: 319--331 + title: 'Machine Learning for Musical Expression: A Systematic Literature Review' + track: Papers + url: http://nime.org/proceedings/2023/nime2023_46.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Peng2009 - abstract: 'With the increase of sales of Wii game consoles, it is becoming commonplace - for the Wii remote to be used as analternative input device for other computer - systems. In thispaper, we present a system which makes use of the infraredcamera - within the Wii remote to capture the gestures of aconductor using a baton with - an infrared LED and battery.Our system then performs data analysis with gesture - classification and following, and finally displays the gestures using visual baton - trajectories and audio feedback. Gesturetrajectories are displayed in real time - and can be comparedto the corresponding diagram shown in a textbook. In addition, - since a conductor normally does not look at a screenwhile conducting, tones are - played to represent a certainbeat in a conducting gesture. Further, the system - can be controlled entirely with the baton, removing the need to switchfrom baton - to mouse. The interface is intended to be usedfor pedagogy purposes.' - address: 'Pittsburgh, PA, United States' - author: 'Peng, Lijuan and Gerhard, David' - bibtex: "@inproceedings{Peng2009,\n abstract = {With the increase of sales of Wii\ - \ game consoles, it is becoming commonplace for the Wii remote to be used as analternative\ - \ input device for other computer systems. In thispaper, we present a system which\ - \ makes use of the infraredcamera within the Wii remote to capture the gestures\ - \ of aconductor using a baton with an infrared LED and battery.Our system then\ - \ performs data analysis with gesture classification and following, and finally\ - \ displays the gestures using visual baton trajectories and audio feedback. Gesturetrajectories\ - \ are displayed in real time and can be comparedto the corresponding diagram shown\ - \ in a textbook. In addition, since a conductor normally does not look at a screenwhile\ - \ conducting, tones are played to represent a certainbeat in a conducting gesture.\ - \ Further, the system can be controlled entirely with the baton, removing the\ - \ need to switchfrom baton to mouse. The interface is intended to be usedfor pedagogy\ - \ purposes.},\n address = {Pittsburgh, PA, United States},\n author = {Peng, Lijuan\ - \ and Gerhard, David},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177659},\n\ - \ issn = {2220-4806},\n keywords = {Conducting, Gesture, Infrared, Learning, Wii.\ - \ },\n pages = {155--156},\n title = {A Wii-Based Gestural Interface for Computer\ - \ Conducting Systems},\n url = {http://www.nime.org/proceedings/2009/nime2009_155.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_47 + abstract: 'For several years, the various practices around ML techniques have been + increasingly present and diversified. However, the literature associated with + these techniques rarely reveals the cultural and political sides of these practices. + In order to explore how practitioners in the NIME community engage with ML techniques, + we conducted interviews with seven researchers in the NIME community and analysed + them through a thematic analysis. Firstly, we propose findings at the level of + the individual, resisting technological determinism and redefining sense making + in interactive ML. Secondly, we propose findings at the level of the community, + revealing mitigated adoption with respect to ML. This paper aims to provide the + community with some reflections on the use of ML in order to initiate a discussion + about cultural, political and ethical issues surrounding these techniques as their + use grows within the community.' + address: 'Mexico City, Mexico' + articleno: 47 + author: Théo Jourdan and Baptiste Caramiaux + bibtex: "@inproceedings{nime2023_47,\n abstract = {For several years, the various\ + \ practices around ML techniques have been increasingly present and diversified.\ + \ However, the literature associated with these techniques rarely reveals the\ + \ cultural and political sides of these practices. In order to explore how practitioners\ + \ in the NIME community engage with ML techniques, we conducted interviews with\ + \ seven researchers in the NIME community and analysed them through a thematic\ + \ analysis. Firstly, we propose findings at the level of the individual, resisting\ + \ technological determinism and redefining sense making in interactive ML. Secondly,\ + \ we propose findings at the level of the community, revealing mitigated adoption\ + \ with respect to ML. This paper aims to provide the community with some reflections\ + \ on the use of ML in order to initiate a discussion about cultural, political\ + \ and ethical issues surrounding these techniques as their use grows within the\ + \ community.},\n address = {Mexico City, Mexico},\n articleno = {47},\n author\ + \ = {Théo Jourdan and Baptiste Caramiaux},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz\ + \ and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages\ + \ = {7},\n pages = {332--338},\n title = {Culture and Politics of Machine Learning\ + \ in NIME: A Preliminary Qualitative Inquiry},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_47.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177659 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'Conducting, Gesture, Infrared, Learning, Wii. ' - pages: 155--156 - title: A Wii-Based Gestural Interface for Computer Conducting Systems - url: http://www.nime.org/proceedings/2009/nime2009_155.pdf - year: 2009 + month: May + numpages: 7 + pages: 332--338 + title: 'Culture and Politics of Machine Learning in NIME: A Preliminary Qualitative + Inquiry' + track: Papers + url: http://nime.org/proceedings/2023/nime2023_47.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Parson2009 - abstract: '''''Music for 32 Chess Pieces'''' is a software system that supports - composing, performing and improvising music by playing a chess game. A game server - stores a representation of the state of a game, validates proposed moves by players, - updates game state, and extracts a graph of piece-to-piece relationships. It also - loads a plugin code module that acts as a composition. A plugin maps pieces and - relationships on the board, such as support or attack relationships, to a timed - sequence of notes and accents. The server transmits notes in a sequence to an - audio renderer process via network datagrams. Two players can perform a composition - by playing chess, and a player can improvise by adjusting a plugin''s music mapping - parameters via a graphical user interface. A composer can create a new composition - by writing a new plugin that uses a distinct algorithm for mapping game rules - and states to music. A composer can also write a new note-to-sound mapping program - in the audio renderer language. This software is available at http://faculty.kutztown.edu/parson/music/ParsonMusic.html. ' - address: 'Pittsburgh, PA, United States' - author: 'Parson, Dale E.' - bibtex: "@inproceedings{Parson2009,\n abstract = {''Music for 32 Chess Pieces''\ - \ is a software system that supports composing, performing and improvising music\ - \ by playing a chess game. A game server stores a representation of the state\ - \ of a game, validates proposed moves by players, updates game state, and extracts\ - \ a graph of piece-to-piece relationships. It also loads a plugin code module\ - \ that acts as a composition. A plugin maps pieces and relationships on the board,\ - \ such as support or attack relationships, to a timed sequence of notes and accents.\ - \ The server transmits notes in a sequence to an audio renderer process via network\ - \ datagrams. Two players can perform a composition by playing chess, and a player\ - \ can improvise by adjusting a plugin's music mapping parameters via a graphical\ - \ user interface. A composer can create a new composition by writing a new plugin\ - \ that uses a distinct algorithm for mapping game rules and states to music. A\ - \ composer can also write a new note-to-sound mapping program in the audio renderer\ - \ language. This software is available at http://faculty.kutztown.edu/parson/music/ParsonMusic.html.\ - \ },\n address = {Pittsburgh, PA, United States},\n author = {Parson, Dale E.},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177653},\n issn = {2220-4806},\n\ - \ keywords = {algorithmic composition, chess, ChucK, improvisation, Max/MSP, SuperCollider.\ - \ },\n pages = {157--158},\n title = {Chess-Based Composition and Improvisation\ - \ for Non-Musicians},\n url = {http://www.nime.org/proceedings/2009/nime2009_157.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_48 + abstract: 'Subtlety and detail are fundamental to what makes musical instruments + special, and worth dedicating a life''s practice to, for designer, maker, player + and listener alike. However, research into digital musical instrument (DMI) design + tools and processes have so far mainly focused on high-level conceptual concerns + and low-level technical abstractions, leaving subtlety and detail underexplored + and undervalued. These nuances, and the processes they result from, cannot be + fully articulated in words alone, yet they largely define an instrument''s quality, + and it is therefore important to understand how they come to be. We introduce + a scale-based ontology that divides design details into three levels - macro, + meso and micro - and we present a literature review of DMI design from the perspective + of this ontology. Finally we extrapolate the ontology to consider its utility + in broader contexts, and consider future directions.' + address: 'Mexico City, Mexico' + articleno: 48 + author: Jack Armitage and Thor Magnusson and Andrew McPherson + bibtex: "@inproceedings{nime2023_48,\n abstract = {Subtlety and detail are fundamental\ + \ to what makes musical instruments special, and worth dedicating a life's practice\ + \ to, for designer, maker, player and listener alike. However, research into digital\ + \ musical instrument (DMI) design tools and processes have so far mainly focused\ + \ on high-level conceptual concerns and low-level technical abstractions, leaving\ + \ subtlety and detail underexplored and undervalued. These nuances, and the processes\ + \ they result from, cannot be fully articulated in words alone, yet they largely\ + \ define an instrument's quality, and it is therefore important to understand\ + \ how they come to be. We introduce a scale-based ontology that divides design\ + \ details into three levels - macro, meso and micro - and we present a literature\ + \ review of DMI design from the perspective of this ontology. Finally we extrapolate\ + \ the ontology to consider its utility in broader contexts, and consider future\ + \ directions.},\n address = {Mexico City, Mexico},\n articleno = {48},\n author\ + \ = {Jack Armitage and Thor Magnusson and Andrew McPherson},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month\ + \ = {May},\n numpages = {11},\n pages = {339--349},\n title = {A Scale-Based Ontology\ + \ of Musical Instrument Design},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_48.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177653 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'algorithmic composition, chess, ChucK, improvisation, Max/MSP, SuperCollider. ' - pages: 157--158 - title: Chess-Based Composition and Improvisation for Non-Musicians - url: http://www.nime.org/proceedings/2009/nime2009_157.pdf - year: 2009 + month: May + numpages: 11 + pages: 339--349 + title: A Scale-Based Ontology of Musical Instrument Design + track: Papers + url: http://nime.org/proceedings/2023/nime2023_48.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Dolphin2009a - abstract: 'This paper reports on work in progress on the creativeproject MagNular, - part of a wider practical study of thepotential collaborative compositional applications - of gameengine technologies. MagNular is a sound toy utilizingcomputer game and - physics engine technologies to createan animated interface used in conjunction - with an externalsound engine developed within Max/MSP. The playercontrols virtual - magnets that attract or repel numerousparticle objects, moving them freely around - the virtualspace. Particle object collision data is mapped to controlsound onsets - and synthesis/DSP (Digital SignalProcessing) parameters. The user "composes" bycontrolling - and influencing the simulated physicalbehaviors of the particle objects within - the animatedinterface.' - address: 'Pittsburgh, PA, United States' - author: 'Dolphin, Andy' - bibtex: "@inproceedings{Dolphin2009a,\n abstract = {This paper reports on work in\ - \ progress on the creativeproject MagNular, part of a wider practical study of\ - \ thepotential collaborative compositional applications of gameengine technologies.\ - \ MagNular is a sound toy utilizingcomputer game and physics engine technologies\ - \ to createan animated interface used in conjunction with an externalsound engine\ - \ developed within Max/MSP. The playercontrols virtual magnets that attract or\ - \ repel numerousparticle objects, moving them freely around the virtualspace.\ - \ Particle object collision data is mapped to controlsound onsets and synthesis/DSP\ - \ (Digital SignalProcessing) parameters. The user \"composes\" bycontrolling and\ - \ influencing the simulated physicalbehaviors of the particle objects within the\ - \ animatedinterface.},\n address = {Pittsburgh, PA, United States},\n author =\ - \ {Dolphin, Andy},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177499},\n\ - \ issn = {2220-4806},\n keywords = {Sound Toys, Open Work, Game Engines, Animated\ - \ Interfaces, Max/MSP. },\n pages = {159--160},\n title = {MagNular : Symbolic\ - \ Control of an External Sound Engine Using an Animated Interface},\n url = {http://www.nime.org/proceedings/2009/nime2009_159.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_49 + abstract: 'As the field around computer-mediated musical interaction drives attention + to its sociotechnical, political and epistemological exigencies, it becomes important + to be guided by disability studies, and for researchers and designers of accessible + digital musical instruments (ADMIs) to foreground the lived experience of disabled + musicians. This resonates with the movement to promote disability justice in HCI. + In this paper, we introduce a case study of the design of a string-less guitar, + which was developed in collaboration with a guitarist who lost his ability to + play due to impairment. We present this work as an exploration of the Rashomon + effect, a term that refers to the phenomenon of multiple witnesses describing + the same event from their own perspective. We argue that the Rashomon effect is + a useful way to explore how digital musical instrument (DMI) designers respond + to NIME''s interdisciplinarity, and to reflect on how we produce and transmit + knowledge within our field.' + address: 'Mexico City, Mexico' + articleno: 49 + author: Eevee Zayas-Garin and Charlotte Nordmoen and Andrew McPherson + bibtex: "@inproceedings{nime2023_49,\n abstract = {As the field around computer-mediated\ + \ musical interaction drives attention to its sociotechnical, political and epistemological\ + \ exigencies, it becomes important to be guided by disability studies, and for\ + \ researchers and designers of accessible digital musical instruments (ADMIs)\ + \ to foreground the lived experience of disabled musicians. This resonates with\ + \ the movement to promote disability justice in HCI. In this paper, we introduce\ + \ a case study of the design of a string-less guitar, which was developed in collaboration\ + \ with a guitarist who lost his ability to play due to impairment. We present\ + \ this work as an exploration of the Rashomon effect, a term that refers to the\ + \ phenomenon of multiple witnesses describing the same event from their own perspective.\ + \ We argue that the Rashomon effect is a useful way to explore how digital musical\ + \ instrument (DMI) designers respond to NIME's interdisciplinarity, and to reflect\ + \ on how we produce and transmit knowledge within our field.},\n address = {Mexico\ + \ City, Mexico},\n articleno = {49},\n author = {Eevee Zayas-Garin and Charlotte\ + \ Nordmoen and Andrew McPherson},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz\ + \ and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages\ + \ = {8},\n pages = {350--357},\n title = {Transmitting Digital Lutherie Knowledge:\ + \ The Rashomon Effect for DMI Designers},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_49.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177499 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'Sound Toys, Open Work, Game Engines, Animated Interfaces, Max/MSP. ' - pages: 159--160 - title: 'MagNular : Symbolic Control of an External Sound Engine Using an Animated - Interface' - url: http://www.nime.org/proceedings/2009/nime2009_159.pdf - year: 2009 + month: May + numpages: 8 + pages: 350--357 + title: 'Transmitting Digital Lutherie Knowledge: The Rashomon Effect for DMI Designers' + track: Papers + url: http://nime.org/proceedings/2023/nime2023_49.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Feehan2009 - abstract: 'AUDIO ORIENTEERING is a collaborative performance environment in which - physical tokens are used to navigate an invisible sonic landscape. In this paper, - I describe the hardware and software used to implement a prototype audio terrain - with multiple interaction modes and sonic behaviors mapped onto three-dimensional - space. ' - address: 'Pittsburgh, PA, United States' - author: 'Feehan, Noah' - bibtex: "@inproceedings{Feehan2009,\n abstract = {AUDIO ORIENTEERING is a collaborative\ - \ performance environment in which physical tokens are used to navigate an invisible\ - \ sonic landscape. In this paper, I describe the hardware and software used to\ - \ implement a prototype audio terrain with multiple interaction modes and sonic\ - \ behaviors mapped onto three-dimensional space. },\n address = {Pittsburgh, PA,\ - \ United States},\n author = {Feehan, Noah},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1177505},\n issn = {2220-4806},\n keywords = {wii, 3-d positioning,\ - \ audio terrain, collaborative performance. },\n pages = {161--162},\n title =\ - \ {Audio Orienteering -- Navigating an Invisible Terrain},\n url = {http://www.nime.org/proceedings/2009/nime2009_161.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_50 + abstract: 'Learning to play a digital musical instrument (DMI) may be affected by + the acoustic behaviour of that instrument, in addition to its physical characteristics + and form. However, how the timbral properties of an instrument affect learning + has received little systematic empirical research. In an exploratory study, we + assessed whether timbral feedback from a physical model based percussive DMI influences + beginner players’ performance in a musical learning task. We contrasted the timbral + richness of a metallic plate physical model with an amplitude modulated pink-noise + signal that was comparable in response to input controls but with relatively reduced + timbral features. Two groups of participants practiced three sets of simple beats + using their respective version of the instrument (physical model or pink noise), + over the course of an hour. Their performance was recorded throughout and assessed + in the form of rhythmic timing accuracy. Results showed that participants’ performance + in both sound groups significantly improved throughout the task. Timing accuracy + was significantly better in the physical model group for one out of three sets + of beats. We argue that the timbral feedback of a musical instrument may influence + beginner’s playing experience, encouraging further research into how this could + benefit DMI design.' + address: 'Mexico City, Mexico' + articleno: 50 + author: Olivia B Smith and Matthew Rodger and Maarten van Walstijn and Miguel Ortiz + bibtex: "@inproceedings{nime2023_50,\n abstract = {Learning to play a digital musical\ + \ instrument (DMI) may be affected by the acoustic behaviour of that instrument,\ + \ in addition to its physical characteristics and form. However, how the timbral\ + \ properties of an instrument affect learning has received little systematic empirical\ + \ research. In an exploratory study, we assessed whether timbral feedback from\ + \ a physical model based percussive DMI influences beginner players’ performance\ + \ in a musical learning task. We contrasted the timbral richness of a metallic\ + \ plate physical model with an amplitude modulated pink-noise signal that was\ + \ comparable in response to input controls but with relatively reduced timbral\ + \ features. Two groups of participants practiced three sets of simple beats using\ + \ their respective version of the instrument (physical model or pink noise), over\ + \ the course of an hour. Their performance was recorded throughout and assessed\ + \ in the form of rhythmic timing accuracy. Results showed that participants’ performance\ + \ in both sound groups significantly improved throughout the task. Timing accuracy\ + \ was significantly better in the physical model group for one out of three sets\ + \ of beats. We argue that the timbral feedback of a musical instrument may influence\ + \ beginner’s playing experience, encouraging further research into how this could\ + \ benefit DMI design.},\n address = {Mexico City, Mexico},\n articleno = {50},\n\ + \ author = {Olivia B Smith and Matthew Rodger and Maarten van Walstijn and Miguel\ + \ Ortiz},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n\ + \ issn = {2220-4806},\n month = {May},\n numpages = {6},\n pages = {358--363},\n\ + \ title = {Sound guiding action: the effect of timbre on learning a new percussive\ + \ DMI for beginner musicians},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_50.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177505 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'wii, 3-d positioning, audio terrain, collaborative performance. ' - pages: 161--162 - title: Audio Orienteering -- Navigating an Invisible Terrain - url: http://www.nime.org/proceedings/2009/nime2009_161.pdf - year: 2009 + month: May + numpages: 6 + pages: 358--363 + title: 'Sound guiding action: the effect of timbre on learning a new percussive + DMI for beginner musicians' + track: Papers + url: http://nime.org/proceedings/2023/nime2023_50.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: DeJong2009 - abstract: 'This paper presents developments in the technology underlying the cyclotactor, - a finger-based tactile I/O device for musical interaction. These include significant - improvements both in the basic characteristics of tactile interaction and in the - related (vibro)tactile sample rates, latencies, and timing precision. After presenting - the new prototype''s tactile output force landscape, some of the new possibilities - for interaction are discussed, especially those for musical interaction with zero - audio/tactile latency.' - address: 'Pittsburgh, PA, United States' - author: 'de Jong, Staas' - bibtex: "@inproceedings{DeJong2009,\n abstract = {This paper presents developments\ - \ in the technology underlying the cyclotactor, a finger-based tactile I/O device\ - \ for musical interaction. These include significant improvements both in the\ - \ basic characteristics of tactile interaction and in the related (vibro)tactile\ - \ sample rates, latencies, and timing precision. After presenting the new prototype's\ - \ tactile output force landscape, some of the new possibilities for interaction\ - \ are discussed, especially those for musical interaction with zero audio/tactile\ - \ latency.},\n address = {Pittsburgh, PA, United States},\n author = {de Jong,\ - \ Staas},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1177591},\n issn = {2220-4806},\n\ - \ keywords = {Musical controller, tactile interface. },\n pages = {163--164},\n\ - \ title = {Developing the Cyclotactor},\n url = {http://www.nime.org/proceedings/2009/nime2009_163.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_51 + abstract: 'This paper describes the latest iteration of signal path routing and + mixing control for the halldorophone, an experimental electro-acoustic string + instrument intended for music making with string feedback and describes the design + thinking behind the work which is informed by long term contact with dedicated + users. Specifically, here we discuss the intended “feel” or ergodynamic design + of how the affordances of the instrument are presented and the delicate task of + reducing cognitive load for early use while not limiting options for expert users.' + address: 'Mexico City, Mexico' + articleno: 51 + author: Halldor Ulfarsson + bibtex: "@inproceedings{nime2023_51,\n abstract = {This paper describes the latest\ + \ iteration of signal path routing and mixing control for the halldorophone, an\ + \ experimental electro-acoustic string instrument intended for music making with\ + \ string feedback and describes the design thinking behind the work which is informed\ + \ by long term contact with dedicated users. Specifically, here we discuss the\ + \ intended “feel” or ergodynamic design of how the affordances of the instrument\ + \ are presented and the delicate task of reducing cognitive load for early use\ + \ while not limiting options for expert users.},\n address = {Mexico City, Mexico},\n\ + \ articleno = {51},\n author = {Halldor Ulfarsson},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month\ + \ = {May},\n numpages = {7},\n pages = {364--370},\n title = {Ergodynamics of\ + \ String Feedback},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_51.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177591 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'Musical controller, tactile interface. ' - pages: 163--164 - title: Developing the Cyclotactor - url: http://www.nime.org/proceedings/2009/nime2009_163.pdf - year: 2009 + month: May + numpages: 7 + pages: 364--370 + title: Ergodynamics of String Feedback + track: Papers + url: http://nime.org/proceedings/2023/nime2023_51.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Schiesser2009 - abstract: 'A MIDI-to-OSC converter is implemented on a commercially available embedded - linux system, tighly integratedwith a microcontroller. A layered method is developed - whichpermits the conversion of serial data such as MIDI to OSCformatted network - packets with an overall system latencybelow 5 milliseconds for common MIDI messages.The - Gumstix embedded computer provide an interesting and modular platform for the - development of such anembedded applications. The project shows great potentialto - evolve into a generic sensors-to-OSC ethernet converterwhich should be very useful - for artistic purposes and couldbe used as a fast prototyping interface for gesture - acquisitiondevices.' - address: 'Pittsburgh, PA, United States' - author: 'Schiesser, Sébastien' - bibtex: "@inproceedings{Schiesser2009,\n abstract = {A MIDI-to-OSC converter is\ - \ implemented on a commercially available embedded linux system, tighly integratedwith\ - \ a microcontroller. A layered method is developed whichpermits the conversion\ - \ of serial data such as MIDI to OSCformatted network packets with an overall\ - \ system latencybelow 5 milliseconds for common MIDI messages.The Gumstix embedded\ - \ computer provide an interesting and modular platform for the development of\ - \ such anembedded applications. The project shows great potentialto evolve into\ - \ a generic sensors-to-OSC ethernet converterwhich should be very useful for artistic\ - \ purposes and couldbe used as a fast prototyping interface for gesture acquisitiondevices.},\n\ - \ address = {Pittsburgh, PA, United States},\n author = {Schiesser, S\\'{e}bastien},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177669},\n issn = {2220-4806},\n\ - \ keywords = {MIDI, Open Sound Control, converter, gumstix },\n pages = {165--168},\n\ - \ title = {midOSC : a Gumstix-Based {MIDI-to-OSC} Converter},\n url = {http://www.nime.org/proceedings/2009/nime2009_165.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_52 + abstract: 'This paper presents a new interface – Laser Phase Synthesis — designed + for audiovisual performance expression. The instrument is informed by the historical + Audio/Video/Laser system developed by Lowell Cross and Carson Jeffries for use + by David Tudor and Experiments in Arts and Technology (E.A.T.) at the 1970 Japan + World Exposition in Osaka, Japan. The current work employs digital audio synthesis, + modern laser display technology, and close collaboration be- tween sound and image + composition to illustrate the har- monic progression of a musical work. The authors + present a micro-history of audiovisual laser displays, a brief introduction to + the process of drawing visual figures with sound, a description of the Pure Data + software and laser display hardware systems used for the Laser Phase Synthesis + instrument, and a discussion of how this instrument shaped the composition process + of one audiovisual performance of electroacoustic music. The paper concludes with + speculations on how the system can be further developed with other kinds of live + performers, specifically vocalists.' + address: 'Mexico City, Mexico' + articleno: 52 + author: Derek Holzer and Luka Aron and Andre Holzapfel + bibtex: "@inproceedings{nime2023_52,\n abstract = {This paper presents a new interface\ + \ – Laser Phase Synthesis — designed for audiovisual performance expression. The\ + \ instrument is informed by the historical Audio/Video/Laser system developed\ + \ by Lowell Cross and Carson Jeffries for use by David Tudor and Experiments in\ + \ Arts and Technology (E.A.T.) at the 1970 Japan World Exposition in Osaka, Japan.\ + \ The current work employs digital audio synthesis, modern laser display technology,\ + \ and close collaboration be- tween sound and image composition to illustrate\ + \ the har- monic progression of a musical work. The authors present a micro-history\ + \ of audiovisual laser displays, a brief introduction to the process of drawing\ + \ visual figures with sound, a description of the Pure Data software and laser\ + \ display hardware systems used for the Laser Phase Synthesis instrument, and\ + \ a discussion of how this instrument shaped the composition process of one audiovisual\ + \ performance of electroacoustic music. The paper concludes with speculations\ + \ on how the system can be further developed with other kinds of live performers,\ + \ specifically vocalists.},\n address = {Mexico City, Mexico},\n articleno = {52},\n\ + \ author = {Derek Holzer and Luka Aron and Andre Holzapfel},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month\ + \ = {May},\n numpages = {8},\n pages = {371--378},\n title = {Laser Phase Synthesis},\n\ + \ track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_52.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177669 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'MIDI, Open Sound Control, converter, gumstix ' - pages: 165--168 - title: 'midOSC : a Gumstix-Based MIDI-to-OSC Converter' - url: http://www.nime.org/proceedings/2009/nime2009_165.pdf - year: 2009 + month: May + numpages: 8 + pages: 371--378 + title: Laser Phase Synthesis + track: Papers + url: http://nime.org/proceedings/2023/nime2023_52.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Nagashima2009 - abstract: 'This is a technical and experimental report of parallel processing, using - the "Propeller" chip. Its eight 32 bits processors (cogs) can operate simultaneously, - either independently or cooperatively, sharing common resources through a central - hub. I introduce this unique processor and discuss about the possibility to develop - interactive systems and smart interfaces in media arts, because we need many kinds - of tasks at a same time with NIMErelated systems and installations. I will report - about (1) Propeller chip and its powerful IDE, (2) external interfaces for analog/digital - inputs/outputs, (3) VGA/NTSC/PAL video generation, (4) audio signal processing, - and (5) originally-developed MIDI input/output method. I also introduce three - experimental prototype systems.' - address: 'Pittsburgh, PA, United States' - author: 'Nagashima, Yoichi' - bibtex: "@inproceedings{Nagashima2009,\n abstract = {This is a technical and experimental\ - \ report of parallel processing, using the \"Propeller\" chip. Its eight 32 bits\ - \ processors (cogs) can operate simultaneously, either independently or cooperatively,\ - \ sharing common resources through a central hub. I introduce this unique processor\ - \ and discuss about the possibility to develop interactive systems and smart interfaces\ - \ in media arts, because we need many kinds of tasks at a same time with NIMErelated\ - \ systems and installations. I will report about (1) Propeller chip and its powerful\ - \ IDE, (2) external interfaces for analog/digital inputs/outputs, (3) VGA/NTSC/PAL\ - \ video generation, (4) audio signal processing, and (5) originally-developed\ - \ MIDI input/output method. I also introduce three experimental prototype systems.},\n\ - \ address = {Pittsburgh, PA, United States},\n author = {Nagashima, Yoichi},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177635},\n issn = {2220-4806},\n\ - \ keywords = {Propeller, parallel processing, MIDI, sensor, interfaces. },\n pages\ - \ = {169--170},\n title = {Parallel Processing System Design with \"Propeller\"\ - \ Processor},\n url = {http://www.nime.org/proceedings/2009/nime2009_169.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_53 + abstract: 'This article presents Jacdac-for-Max: a cross-platform, open-source set + of node.js scripts and custom Cycling ’74 Max objects which enable the use of + Jacdac, an open, modular plug-and-play hardware prototyping platform, with Max + visual programming language frequently used for audio-visual applications. We + discuss the design and implementation of Jacdac-for-Max, and explore a number + of example applications. Through this we show how Jacdac-for-Max can be used to + rapidly prototype digital musical interfaces based on a range of input devices. + Additionally, we discuss these qualities within the context of established principles + for designing musical hardware, and the emerging concepts of long-tail hardware + and frugal innovation. We believe that through Jacdac-for-Max, Jacdac provides + a compelling approach to prototyping musical interfaces while supporting the evolution + beyond a prototype with more robust and scalable solutions.' + address: 'Mexico City, Mexico' + articleno: 53 + author: Kobi Hartley and Steve Hodges and Joe Finney + bibtex: "@inproceedings{nime2023_53,\n abstract = {This article presents Jacdac-for-Max:\ + \ a cross-platform, open-source set of node.js scripts and custom Cycling ’74\ + \ Max objects which enable the use of Jacdac, an open, modular plug-and-play hardware\ + \ prototyping platform, with Max visual programming language frequently used for\ + \ audio-visual applications. We discuss the design and implementation of Jacdac-for-Max,\ + \ and explore a number of example applications. Through this we show how Jacdac-for-Max\ + \ can be used to rapidly prototype digital musical interfaces based on a range\ + \ of input devices. Additionally, we discuss these qualities within the context\ + \ of established principles for designing musical hardware, and the emerging concepts\ + \ of long-tail hardware and frugal innovation. We believe that through Jacdac-for-Max,\ + \ Jacdac provides a compelling approach to prototyping musical interfaces while\ + \ supporting the evolution beyond a prototype with more robust and scalable solutions.},\n\ + \ address = {Mexico City, Mexico},\n articleno = {53},\n author = {Kobi Hartley\ + \ and Steve Hodges and Joe Finney},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz\ + \ and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages\ + \ = {8},\n pages = {379--386},\n title = {Jacdac-for-Max: Plug-and-Play Physical\ + \ Prototyping of Musical Interfaces},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_53.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177635 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'Propeller, parallel processing, MIDI, sensor, interfaces. ' - pages: 169--170 - title: Parallel Processing System Design with "Propeller" Processor - url: http://www.nime.org/proceedings/2009/nime2009_169.pdf - year: 2009 + month: May + numpages: 8 + pages: 379--386 + title: 'Jacdac-for-Max: Plug-and-Play Physical Prototyping of Musical Interfaces' + track: Papers + url: http://nime.org/proceedings/2023/nime2023_53.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Fyans2009 - abstract: 'The development of new interfaces for musical expressionhas created a - need to study how spectators comprehend newperformance technologies and practices. - As part of a largerproject examining how interactions with technology can becommunicated - with the spectator, we relate our model ofspectator understanding of error to - the NIME discourse surrounding transparency, mapping, skill and success.' - address: 'Pittsburgh, PA, United States' - author: 'Fyans, A. Cavan and Gurevich, Michael and Stapleton, Paul' - bibtex: "@inproceedings{Fyans2009,\n abstract = {The development of new interfaces\ - \ for musical expressionhas created a need to study how spectators comprehend\ - \ newperformance technologies and practices. As part of a largerproject examining\ - \ how interactions with technology can becommunicated with the spectator, we relate\ - \ our model ofspectator understanding of error to the NIME discourse surrounding\ - \ transparency, mapping, skill and success.},\n address = {Pittsburgh, PA, United\ - \ States},\n author = {Fyans, A. Cavan and Gurevich, Michael and Stapleton, Paul},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177519},\n issn = {2220-4806},\n\ - \ keywords = {performance, skill, transparency, design, HCI },\n pages = {171--172},\n\ - \ title = {Where Did It All Go Wrong ? A Model of Error From the Spectator's Perspective},\n\ - \ url = {http://www.nime.org/proceedings/2009/nime2009_171.pdf},\n year = {2009}\n\ - }\n" + ID: nime2023_54 + abstract: 'Thales is a composed instrument consisting of two hand-held magnetic + controllers whose interactions with each other and with other magnets produce + the somatosensory manifestation of a tangible interface that the musician generates + and shapes in the act of performing. In this paper we provide a background for + the development of Thales by describing the application of permanent magnets in + HCI and musical interfaces. We also introduce the instrument’s sound generation + based on a neural synthesis model and contextualise the system in relation with + the concept of magnetic scores. We report on our preliminary user study and discuss + the somatosensory response that characterise Thales, observing the interaction + between the opposing magnetic field of the controllers as a tangible magnetic + interface. Finally, we investigate its nature from the perspective of performative + posthumanist ontologies.' + address: 'Mexico City, Mexico' + articleno: 54 + author: Nicola Privato and Thor Magnusson and Einar Torfi Einarsson + bibtex: "@inproceedings{nime2023_54,\n abstract = {Thales is a composed instrument\ + \ consisting of two hand-held magnetic controllers whose interactions with each\ + \ other and with other magnets produce the somatosensory manifestation of a tangible\ + \ interface that the musician generates and shapes in the act of performing. In\ + \ this paper we provide a background for the development of Thales by describing\ + \ the application of permanent magnets in HCI and musical interfaces. We also\ + \ introduce the instrument’s sound generation based on a neural synthesis model\ + \ and contextualise the system in relation with the concept of magnetic scores.\ + \ We report on our preliminary user study and discuss the somatosensory response\ + \ that characterise Thales, observing the interaction between the opposing magnetic\ + \ field of the controllers as a tangible magnetic interface. Finally, we investigate\ + \ its nature from the perspective of performative posthumanist ontologies.},\n\ + \ address = {Mexico City, Mexico},\n articleno = {54},\n author = {Nicola Privato\ + \ and Thor Magnusson and Einar Torfi Einarsson},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n editor\ + \ = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n\ + \ numpages = {7},\n pages = {387--393},\n title = {Magnetic Interactions as a\ + \ Somatosensory Interface},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_54.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177519 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'performance, skill, transparency, design, HCI ' - pages: 171--172 - title: 'Where Did It All Go Wrong ? A Model of Error From the Spectator''s Perspective' - url: http://www.nime.org/proceedings/2009/nime2009_171.pdf - year: 2009 + month: May + numpages: 7 + pages: 387--393 + title: Magnetic Interactions as a Somatosensory Interface + track: Papers + url: http://nime.org/proceedings/2023/nime2023_54.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: dAlessandro2009 - abstract: 'In this paper we present new issues and challenges relatedto the vertical - tablet playing. The approach is based on apreviously presented instrument, the - HANDSKETCH. Thisinstrument has now been played regularly for more than twoyears - by several performers. Therefore this is an opportunityto propose a better understanding - of the performing strategy.We present the behavior of the whole body as an underlyingaspect - in the manipulation of the instrument.' - address: 'Pittsburgh, PA, United States' - author: 'd''Alessandro, Nicolas and Dutoit, Thierry' - bibtex: "@inproceedings{dAlessandro2009,\n abstract = {In this paper we present\ - \ new issues and challenges relatedto the vertical tablet playing. The approach\ - \ is based on apreviously presented instrument, the HANDSKETCH. Thisinstrument\ - \ has now been played regularly for more than twoyears by several performers.\ - \ Therefore this is an opportunityto propose a better understanding of the performing\ - \ strategy.We present the behavior of the whole body as an underlyingaspect in\ - \ the manipulation of the instrument.},\n address = {Pittsburgh, PA, United States},\n\ - \ author = {d'Alessandro, Nicolas and Dutoit, Thierry},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177465},\n issn = {2220-4806},\n keywords = {graphic\ - \ tablet, playing position, techniques },\n pages = {173--174},\n title = {Advanced\ - \ Techniques for Vertical Tablet Playing A Overview of Two Years of Practicing\ - \ the HandSketch 1.x},\n url = {http://www.nime.org/proceedings/2009/nime2009_173.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_55 + abstract: "Audio–visual corpus-based synthesis extends the principle of concatenative\ + \ sound synthesis to the visual domain, where, in addition to the sound corpus\ + \ (i.e. a collection of segments of recorded sound with a perceptual description\ + \ of their sound character), the artist uses a corpus of still images with visual\ + \ perceptual description (colour, texture, brightness, entropy), in order to create\ + \ an audio–visual musical performance by navigating in real-time through these\ + \ descriptor spaces, i.e. through the collection of sound grains in a space of\ + \ perceptual audio descriptors, and at the same time through the visual descriptor\ + \ space, i.e. selecting images from the visual corpus for rendering, and thus\ + \ navigate in parallel through both corpora interactively with gestural control\ + \ via movement sensors.\nThe artistic–scientific question that is explored here\ + \ is how to control at the same time the navigation through the audio and the\ + \ image descriptor spaces with gesture sensors, in other words, how to link the\ + \ gesture sensing to both the image descriptors and the sound descriptors in order\ + \ to create a symbiotic multi-modal embodied audio–visual experience." + address: 'Mexico City, Mexico' + articleno: 55 + author: Diemo Schwarz + bibtex: "@inproceedings{nime2023_55,\n abstract = {Audio–visual corpus-based synthesis\ + \ extends the principle of concatenative sound synthesis to the visual domain,\ + \ where, in addition to the sound corpus (i.e. a collection of segments of recorded\ + \ sound with a perceptual description of their sound character), the artist uses\ + \ a corpus of still images with visual perceptual description (colour, texture,\ + \ brightness, entropy), in order to create an audio–visual musical performance\ + \ by navigating in real-time through these descriptor spaces, i.e. through the\ + \ collection of sound grains in a space of perceptual audio descriptors, and at\ + \ the same time through the visual descriptor space, i.e. selecting images from\ + \ the visual corpus for rendering, and thus navigate in parallel through both\ + \ corpora interactively with gestural control via movement sensors.\nThe artistic–scientific\ + \ question that is explored here is how to control at the same time the navigation\ + \ through the audio and the image descriptor spaces with gesture sensors, in other\ + \ words, how to link the gesture sensing to both the image descriptors and the\ + \ sound descriptors in order to create a symbiotic multi-modal embodied audio–visual\ + \ experience.},\n address = {Mexico City, Mexico},\n articleno = {55},\n author\ + \ = {Diemo Schwarz},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan\ + \ Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages = {8},\n\ + \ pages = {394--401},\n title = {Touch Interaction for Corpus-based Audio–Visual\ + \ Synthesis},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_55.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177465 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'graphic tablet, playing position, techniques ' - pages: 173--174 - title: Advanced Techniques for Vertical Tablet Playing A Overview of Two Years of - Practicing the HandSketch 1.x - url: http://www.nime.org/proceedings/2009/nime2009_173.pdf - year: 2009 + month: May + numpages: 8 + pages: 394--401 + title: Touch Interaction for Corpus-based Audio–Visual Synthesis + track: Papers + url: http://nime.org/proceedings/2023/nime2023_55.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Hoofer2009 - abstract: This paper describes a method for classification of different beat gestures - within traditional beat patterns based on gyroscope data and machine learning - techniques and provides a quantitative evaluation. - address: 'Pittsburgh, PA, United States' - author: 'Höofer, Andreas and Hadjakos, Aristotelis and Mühlhäuser, Max' - bibtex: "@inproceedings{Hoofer2009,\n abstract = {This paper describes a method\ - \ for classification of different beat gestures within traditional beat patterns\ - \ based on gyroscope data and machine learning techniques and provides a quantitative\ - \ evaluation.},\n address = {Pittsburgh, PA, United States},\n author = {H\\''{o}ofer,\ - \ Andreas and Hadjakos, Aristotelis and M\\''{u}hlh\\''{a}user, Max},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1177565},\n issn = {2220-4806},\n keywords\ - \ = {nime09},\n pages = {175--176},\n title = {Gyroscope-Based Conducting Gesture\ - \ Recognition},\n url = {http://www.nime.org/proceedings/2009/nime2009_175.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_56 + abstract: "This paper examines the use of the no-input mixing desk—or feedback mixer—across\ + \ a range of musical practices. The research draws on twenty two artist interviews\ + \ conducted by the authors, and on magazine and forum archives. We focus particularly\ + \ on how the properties of the no-input mixer connect with the musical, aesthetic\ + \ and practical concerns of these practices. The affordability, accessibility,\ + \ and non-hierarchical nature of the instrument are examined as factors that help\ + \ the idea spread, and that can be important political dimensions for artists.\n\ + The material, social and cultural aspects are brought together to provide a detailed\ + \ picture of the instrument that goes beyond technical description. This provides\ + \ a useful case study for NIME in thinking through these intercon- nections, particularly\ + \ in looking outwards to how musical instruments and associated musical ideas\ + \ travel, and how they can effect change and be changed themselves in their encounters\ + \ with real-world musical contexts." + address: 'Mexico City, Mexico' + articleno: 56 + author: Tom Mudd and Akira Brown + bibtex: "@inproceedings{nime2023_56,\n abstract = {This paper examines the use of\ + \ the no-input mixing desk—or feedback mixer—across a range of musical practices.\ + \ The research draws on twenty two artist interviews conducted by the authors,\ + \ and on magazine and forum archives. We focus particularly on how the properties\ + \ of the no-input mixer connect with the musical, aesthetic and practical concerns\ + \ of these practices. The affordability, accessibility, and non-hierarchical nature\ + \ of the instrument are examined as factors that help the idea spread, and that\ + \ can be important political dimensions for artists.\nThe material, social and\ + \ cultural aspects are brought together to provide a detailed picture of the instrument\ + \ that goes beyond technical description. This provides a useful case study for\ + \ NIME in thinking through these intercon- nections, particularly in looking outwards\ + \ to how musical instruments and associated musical ideas travel, and how they\ + \ can effect change and be changed themselves in their encounters with real-world\ + \ musical contexts.},\n address = {Mexico City, Mexico},\n articleno = {56},\n\ + \ author = {Tom Mudd and Akira Brown},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz\ + \ and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages\ + \ = {7},\n pages = {402--408},\n title = {Contrasting approaches to the no-input\ + \ mixer},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_56.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177565 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: nime09 - pages: 175--176 - title: Gyroscope-Based Conducting Gesture Recognition - url: http://www.nime.org/proceedings/2009/nime2009_175.pdf - year: 2009 + month: May + numpages: 7 + pages: 402--408 + title: Contrasting approaches to the no-input mixer + track: Papers + url: http://nime.org/proceedings/2023/nime2023_56.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Berdahl2009b - abstract: 'Haptic technology, providing force cues and creating a programmable physical - instrument interface, can assist musicians in making gestures. The finite reaction - time of thehuman motor control system implies that the execution of abrief musical - gesture does not rely on immediate feedbackfrom the senses, rather it is preprogrammed - to some degree.Consequently, we suggest designing relatively simple anddeterministic - interfaces for providing haptic assistance.In this paper, we consider the specific - problem of assisting a musician in selecting pitches from a continuous range.We - build on a prior study by O''Modhrain of the accuracyof pitches selected by musicians - on a Theremin-like hapticinterface. To improve the assistance, we augment the - interface with programmed detents so that the musician can feelthe locations of - equal tempered pitches. Nevertheless, themusician can still perform arbitrary - pitch inflections such asglissandi, falls, and scoops. We investigate various - formsof haptic detents, including fixed detent levels and forcesensitive detent - levels. Preliminary results from a subjecttest confirm improved accuracy in pitch - selection broughtabout by detents.' - address: 'Pittsburgh, PA, United States' - author: 'Berdahl, Edgar and Niemeyer, Günter and Smith, Julius O.' - bibtex: "@inproceedings{Berdahl2009b,\n abstract = {Haptic technology, providing\ - \ force cues and creating a programmable physical instrument interface, can assist\ - \ musicians in making gestures. The finite reaction time of thehuman motor control\ - \ system implies that the execution of abrief musical gesture does not rely on\ - \ immediate feedbackfrom the senses, rather it is preprogrammed to some degree.Consequently,\ - \ we suggest designing relatively simple anddeterministic interfaces for providing\ - \ haptic assistance.In this paper, we consider the specific problem of assisting\ - \ a musician in selecting pitches from a continuous range.We build on a prior\ - \ study by O'Modhrain of the accuracyof pitches selected by musicians on a Theremin-like\ - \ hapticinterface. To improve the assistance, we augment the interface with programmed\ - \ detents so that the musician can feelthe locations of equal tempered pitches.\ - \ Nevertheless, themusician can still perform arbitrary pitch inflections such\ - \ asglissandi, falls, and scoops. We investigate various formsof haptic detents,\ - \ including fixed detent levels and forcesensitive detent levels. Preliminary\ - \ results from a subjecttest confirm improved accuracy in pitch selection broughtabout\ - \ by detents.},\n address = {Pittsburgh, PA, United States},\n author = {Berdahl,\ - \ Edgar and Niemeyer, G\\''{u}nter and Smith, Julius O.},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177481},\n issn = {2220-4806},\n keywords = {Haptic,\ - \ detent, pitch selection, human motor system, feedback control, response time,\ - \ gravity well },\n pages = {177--182},\n title = {Using Haptics to Assist Performers\ - \ in Making Gestures to a Musical Instrument},\n url = {http://www.nime.org/proceedings/2009/nime2009_177.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_57 + abstract: 'The Electrosteel is a new electronic instrument inspired by the user + interface of the pedal steel guitar (PSG). The Electrosteel uses the interface + concepts of the PSG (a bar in the left hand, plucked strings for the right hand, + foot pedals, knee levers, etc) as a control paradigm for digital synthesis. The + instrument allows performers with skill on the PSG to expand their sonic range, + and creates a powerful new multi-dimensional way to control synthesis. This paper + describes the development of the instrument and its custom embedded synthesis + engine, with a focus on the design challenges posed by mapping an existing performer + interface to a new instrument.' + address: 'Mexico City, Mexico' + articleno: 57 + author: Jeffrey Snyder and Davis Polito and Matthew Wang + bibtex: "@inproceedings{nime2023_57,\n abstract = {The Electrosteel is a new electronic\ + \ instrument inspired by the user interface of the pedal steel guitar (PSG). The\ + \ Electrosteel uses the interface concepts of the PSG (a bar in the left hand,\ + \ plucked strings for the right hand, foot pedals, knee levers, etc) as a control\ + \ paradigm for digital synthesis. The instrument allows performers with skill\ + \ on the PSG to expand their sonic range, and creates a powerful new multi-dimensional\ + \ way to control synthesis. This paper describes the development of the instrument\ + \ and its custom embedded synthesis engine, with a focus on the design challenges\ + \ posed by mapping an existing performer interface to a new instrument.},\n address\ + \ = {Mexico City, Mexico},\n articleno = {57},\n author = {Jeffrey Snyder and\ + \ Davis Polito and Matthew Wang},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz\ + \ and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages\ + \ = {8},\n pages = {409--416},\n title = {The Electrosteel: An Electronic Instrument\ + \ Inspired by the Pedal Steel Guitar},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_57.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177481 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'Haptic, detent, pitch selection, human motor system, feedback control, - response time, gravity well ' - pages: 177--182 - title: Using Haptics to Assist Performers in Making Gestures to a Musical Instrument - url: http://www.nime.org/proceedings/2009/nime2009_177.pdf - year: 2009 + month: May + numpages: 8 + pages: 409--416 + title: 'The Electrosteel: An Electronic Instrument Inspired by the Pedal Steel Guitar' + track: Papers + url: http://nime.org/proceedings/2023/nime2023_57.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Berdahl2009a - abstract: 'A haptic musical instrument is an electronic musical instrument that - provides the musician not only with audio feedback but also with force feedback. - By programming feedback controllers to emulate the laws of physics, many haptic - musical instruments have been previously designed thatmimic real acoustic musical - instruments. The controllerprograms have been implemented using finite difference - and(approximate) hybrid digital waveguide models. We presenta novel method for - constructing haptic musical instrumentsin which a haptic device is directly interfaced - with a conventional digital waveguide model by way of a junction element, improving - the quality of the musician''s interactionwith the virtual instrument. We introduce - both the explicitdigital waveguide control junction and the implicit digitalwaveguide - control junction.' - address: 'Pittsburgh, PA, United States' - author: 'Berdahl, Edgar and Niemeyer, Günter and Smith, Julius O.' - bibtex: "@inproceedings{Berdahl2009a,\n abstract = {A haptic musical instrument\ - \ is an electronic musical instrument that provides the musician not only with\ - \ audio feedback but also with force feedback. By programming feedback controllers\ - \ to emulate the laws of physics, many haptic musical instruments have been previously\ - \ designed thatmimic real acoustic musical instruments. The controllerprograms\ - \ have been implemented using finite difference and(approximate) hybrid digital\ - \ waveguide models. We presenta novel method for constructing haptic musical instrumentsin\ - \ which a haptic device is directly interfaced with a conventional digital waveguide\ - \ model by way of a junction element, improving the quality of the musician's\ - \ interactionwith the virtual instrument. We introduce both the explicitdigital\ - \ waveguide control junction and the implicit digitalwaveguide control junction.},\n\ - \ address = {Pittsburgh, PA, United States},\n author = {Berdahl, Edgar and Niemeyer,\ - \ G\\''{u}nter and Smith, Julius O.},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177479},\n\ - \ issn = {2220-4806},\n keywords = {haptic musical instrument, digital waveguide,\ - \ control junction, explicit, implicit, teleoperation },\n pages = {183--186},\n\ - \ title = {Using Haptic Devices to Interface Directly with Digital Waveguide-Based\ - \ Musical Instruments},\n url = {http://www.nime.org/proceedings/2009/nime2009_183.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_58 + abstract: 'This paper describes an extended intercontinental collaboration between + multiple artists, institutions, and their publics, to develop an integrated musical + practice which combines experimental making, performance, and pedagogy. We build + on contributions to NIME which work with art and design-led methods to explore + alternatives to, for example, more engineering-oriented approaches, without loss + of practical utility and theoretical potential. We describe two week-long workshop-residencies + and three performance-installations done under the provocative title Raw Data, + Rough Mix which was intended to encourage exploration of basic processes in physical, + mechanical, electrical, electronic and computational domains to develop musical + artefacts that were frugal in their resource-demands but enabled the interrogation + of human/non-human relationships, performativity, musical ecologies, aesthetics, + and other matters. We close by elaborating our contribution to NIME as offering + an integrated practice combining making, playing and learning, which is critically + informed and practically productive.' + address: 'Mexico City, Mexico' + articleno: 58 + author: John M Bowers and John Richards and Tim Shaw and Robin Foster and AKIHIRO + KUBOTA + bibtex: "@inproceedings{nime2023_58,\n abstract = {This paper describes an extended\ + \ intercontinental collaboration between multiple artists, institutions, and their\ + \ publics, to develop an integrated musical practice which combines experimental\ + \ making, performance, and pedagogy. We build on contributions to NIME which work\ + \ with art and design-led methods to explore alternatives to, for example, more\ + \ engineering-oriented approaches, without loss of practical utility and theoretical\ + \ potential. We describe two week-long workshop-residencies and three performance-installations\ + \ done under the provocative title Raw Data, Rough Mix which was intended to encourage\ + \ exploration of basic processes in physical, mechanical, electrical, electronic\ + \ and computational domains to develop musical artefacts that were frugal in their\ + \ resource-demands but enabled the interrogation of human/non-human relationships,\ + \ performativity, musical ecologies, aesthetics, and other matters. We close by\ + \ elaborating our contribution to NIME as offering an integrated practice combining\ + \ making, playing and learning, which is critically informed and practically productive.},\n\ + \ address = {Mexico City, Mexico},\n articleno = {58},\n author = {John M Bowers\ + \ and John Richards and Tim Shaw and Robin Foster and AKIHIRO KUBOTA},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n\ + \ month = {May},\n numpages = {11},\n pages = {417--427},\n title = {Raw Data,\ + \ Rough Mix: Towards an Integrated Practice of Making, Performance and Pedagogy},\n\ + \ track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_58.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177479 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'haptic musical instrument, digital waveguide, control junction, explicit, - implicit, teleoperation ' - pages: 183--186 - title: Using Haptic Devices to Interface Directly with Digital Waveguide-Based Musical - Instruments - url: http://www.nime.org/proceedings/2009/nime2009_183.pdf - year: 2009 + month: May + numpages: 11 + pages: 417--427 + title: 'Raw Data, Rough Mix: Towards an Integrated Practice of Making, Performance + and Pedagogy' + track: Papers + url: http://nime.org/proceedings/2023/nime2023_58.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Havryliv2009 - abstract: 'The carillon is one of the few instruments that elicit sophisticated - haptic interaction from amateur and professional players alike. Like the piano - keyboard, the velocity of a player''s impact on each carillon key, or baton, affects - the quality of the resultant tone; unlike the piano, each carillon baton returns - a different forcefeedback. Force-feedback varies widely from one baton to the - next across the entire range of the instrument and with further idiosyncratic - variation from one instrument to another. This makes the carillon an ideal candidate - for haptic simulation. The application of synthesized forcefeedback based on an - analysis of forces operating in a typical carillon mechanism offers a blueprint - for the design of an electronic practice clavier and with it the solution to a - problem that has vexed carillonists for centuries, namely the inability to rehearse - repertoire in private. This paper will focus on design and implementation of a - haptic carillon clavier derived from an analysis of the Australian National Carillon - in Canberra. ' - address: 'Pittsburgh, PA, United States' - author: 'Havryliv, Mark and Naghdy, Fazel and Schiemer, Greg and Hurd, Timothy' - bibtex: "@inproceedings{Havryliv2009,\n abstract = {The carillon is one of the few\ - \ instruments that elicit sophisticated haptic interaction from amateur and professional\ - \ players alike. Like the piano keyboard, the velocity of a player's impact on\ - \ each carillon key, or baton, affects the quality of the resultant tone; unlike\ - \ the piano, each carillon baton returns a different forcefeedback. Force-feedback\ - \ varies widely from one baton to the next across the entire range of the instrument\ - \ and with further idiosyncratic variation from one instrument to another. This\ - \ makes the carillon an ideal candidate for haptic simulation. The application\ - \ of synthesized forcefeedback based on an analysis of forces operating in a typical\ - \ carillon mechanism offers a blueprint for the design of an electronic practice\ - \ clavier and with it the solution to a problem that has vexed carillonists for\ - \ centuries, namely the inability to rehearse repertoire in private. This paper\ - \ will focus on design and implementation of a haptic carillon clavier derived\ - \ from an analysis of the Australian National Carillon in Canberra. },\n address\ - \ = {Pittsburgh, PA, United States},\n author = {Havryliv, Mark and Naghdy, Fazel\ - \ and Schiemer, Greg and Hurd, Timothy},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177569},\n\ - \ issn = {2220-4806},\n keywords = {Haptics, force-feedback, mechanical analysis.\ - \ },\n pages = {187--192},\n title = {Haptic Carillon -- Analysis \\& Design of\ - \ the Carillon Mechanism},\n url = {http://www.nime.org/proceedings/2009/nime2009_187.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_59 + abstract: 'The steelpan is a pitched percussion instrument that although generally + known by listeners is typically not included in music instrument audio datasets. This + means that it is usually underrepresented in existing data-driven deep learning + models for fundamental frequency estimation. Furthermore, the steelpan has complex + acoustic properties that make fundamental frequency estimation challenging when + using deep learning models for general fundamental frequency estimation for any + music instrument. Fundamental frequency estimation or pitch detection is a fundamental + task in music information retrieval and it is interesting to explore methods that + are tailored to specific instruments and whether they can outperform general + methods. To address this, we present SASS, the Steelpan Analysis Sample Set that + can be used to train steel-pan specific pitch detection algorithms as well as + propose a custom-trained deep learning model for steelpan fundamental frequency + estimation. This model outperforms general state-of-the-art methods such as pYin + and CREPE on steelpan audio - even while having significantly fewer parameters + and operating on a shorter analysis window. This reduces minimum system latency, + allowing for deployment to a real-time system that can be used in live music contexts.' + address: 'Mexico City, Mexico' + articleno: 59 + author: Colin Malloy and George Tzanetakis + bibtex: "@inproceedings{nime2023_59,\n abstract = {The steelpan is a pitched percussion\ + \ instrument that although generally known by listeners is typically not included\ + \ in music instrument audio datasets. This means that it is usually underrepresented\ + \ in existing data-driven deep learning models for fundamental frequency estimation.\ + \ Furthermore, the steelpan has complex acoustic properties that make fundamental\ + \ frequency estimation challenging when using deep learning models for general\ + \ fundamental frequency estimation for any music instrument. Fundamental frequency\ + \ estimation or pitch detection is a fundamental task in music information retrieval\ + \ and it is interesting to explore methods that are tailored to specific instruments\ + \ and whether they can outperform general methods. To address this, we present\ + \ SASS, the Steelpan Analysis Sample Set that can be used to train steel-pan specific\ + \ pitch detection algorithms as well as propose a custom-trained deep learning\ + \ model for steelpan fundamental frequency estimation. This model outperforms\ + \ general state-of-the-art methods such as pYin and CREPE on steelpan audio -\ + \ even while having significantly fewer parameters and operating on a shorter\ + \ analysis window. This reduces minimum system latency, allowing for deployment\ + \ to a real-time system that can be used in live music contexts.},\n address =\ + \ {Mexico City, Mexico},\n articleno = {59},\n author = {Colin Malloy and George\ + \ Tzanetakis},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n\ + \ issn = {2220-4806},\n month = {May},\n numpages = {8},\n pages = {428--435},\n\ + \ title = {Steelpan-specific pitch detection: a dataset and deep learning model},\n\ + \ track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_59.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177569 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'Haptics, force-feedback, mechanical analysis. ' - pages: 187--192 - title: Haptic Carillon -- Analysis & Design of the Carillon Mechanism - url: http://www.nime.org/proceedings/2009/nime2009_187.pdf - year: 2009 + month: May + numpages: 8 + pages: 428--435 + title: 'Steelpan-specific pitch detection: a dataset and deep learning model' + track: Papers + url: http://nime.org/proceedings/2023/nime2023_59.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Leeuw2009 - abstract: 'The Electrumpet is an enhancement of a normal trumpet with a variety - of electronic sensors and buttons. It is a new hybrid instrument that facilitates - simultaneous acoustic and electronic playing. The normal playing skills of a trumpet - player apply to the new instrument. The placing of the buttons and sensors is - not a hindrance to acoustic use of the instrument and they are conveniently located. - The device can be easily attached to and detached from a normal Bb-trumpet. The - device has a wireless connection with the computer through Bluetooth-serial (Arduino). - Audio and data processing in the computer is effected by three separate instances - of MAX/MSP connected through OSC (controller data) and Soundflower (sound data). - The current prototype consists of 7 analogue sensors (4 valve-like potentiometers, - 2 pressure sensors, 1 "Ribbon" controller) and 9 digital switches. An LCD screen - that is controlled by a separate Arduino (mini) is attached to the trumpet and - displays the current controller settings that are sent through a serial connection. ' - address: 'Pittsburgh, PA, United States' - author: 'Leeuw, Hans' - bibtex: "@inproceedings{Leeuw2009,\n abstract = {The Electrumpet is an enhancement\ - \ of a normal trumpet with a variety of electronic sensors and buttons. It is\ - \ a new hybrid instrument that facilitates simultaneous acoustic and electronic\ - \ playing. The normal playing skills of a trumpet player apply to the new instrument.\ - \ The placing of the buttons and sensors is not a hindrance to acoustic use of\ - \ the instrument and they are conveniently located. The device can be easily attached\ - \ to and detached from a normal Bb-trumpet. The device has a wireless connection\ - \ with the computer through Bluetooth-serial (Arduino). Audio and data processing\ - \ in the computer is effected by three separate instances of MAX/MSP connected\ - \ through OSC (controller data) and Soundflower (sound data). The current prototype\ - \ consists of 7 analogue sensors (4 valve-like potentiometers, 2 pressure sensors,\ - \ 1 \"Ribbon\" controller) and 9 digital switches. An LCD screen that is controlled\ - \ by a separate Arduino (mini) is attached to the trumpet and displays the current\ - \ controller settings that are sent through a serial connection. },\n address\ - \ = {Pittsburgh, PA, United States},\n author = {Leeuw, Hans},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177613},\n issn = {2220-4806},\n keywords = {Trumpet,\ - \ multiple Arduinos, Bluetooth, LCD, low latency, OSC, MAX/MSP. },\n pages = {193--198},\n\ - \ title = {The Electrumpet , a Hybrid Electro-Acoustic Instrument},\n url = {http://www.nime.org/proceedings/2009/nime2009_193.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_60 + abstract: 'This paper describes AbletonOSC, an Open Sound Control API whose objective + is to expose the complete Ableton Live Object Model via OSC. Embedded within Live + by harnessing its internal Python scripting interface, AbletonOSC allows external + processes to exert real-time control over any element of a Live set, ranging from + generating new melodic sequences to modulating deeply-nested synthesis parameters. + We describe the motivations and historical precedents behind AbletonOSC, provide + an overview of its OSC namespace and the classes of functionality that are exposed + by the API, and look at a series of applied case studies that demonstrate the + new types of musical interface that AbletonOSC enables.' + address: 'Mexico City, Mexico' + articleno: 60 + author: Daniel Jones + bibtex: "@inproceedings{nime2023_60,\n abstract = {This paper describes AbletonOSC,\ + \ an Open Sound Control API whose objective is to expose the complete Ableton\ + \ Live Object Model via OSC. Embedded within Live by harnessing its internal Python\ + \ scripting interface, AbletonOSC allows external processes to exert real-time\ + \ control over any element of a Live set, ranging from generating new melodic\ + \ sequences to modulating deeply-nested synthesis parameters. We describe the\ + \ motivations and historical precedents behind AbletonOSC, provide an overview\ + \ of its OSC namespace and the classes of functionality that are exposed by the\ + \ API, and look at a series of applied case studies that demonstrate the new types\ + \ of musical interface that AbletonOSC enables.},\n address = {Mexico City, Mexico},\n\ + \ articleno = {60},\n author = {Daniel Jones},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n editor\ + \ = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n\ + \ numpages = {5},\n pages = {436--440},\n title = {AbletonOSC: A unified control\ + \ API for Ableton Live},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_60.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177613 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'Trumpet, multiple Arduinos, Bluetooth, LCD, low latency, OSC, MAX/MSP. ' - pages: 193--198 - title: 'The Electrumpet , a Hybrid Electro-Acoustic Instrument' - url: http://www.nime.org/proceedings/2009/nime2009_193.pdf - year: 2009 + month: May + numpages: 5 + pages: 436--440 + title: 'AbletonOSC: A unified control API for Ableton Live' + track: Papers + url: http://nime.org/proceedings/2023/nime2023_60.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Gallin2009 - abstract: 'Starting from a parallelism between the effervescence of the 1920s in - the exploration of new ways of controlling music and the actual revolution in - the design of new control possibilities, this paper aims to explore the possibilities - of rethinking instruments from the past towards instruments of the future. Through - three examples (the experience of the Persephone, the design of the Persephone2 - and the 4 strings ribbon cello project), I will explore the contemporary notion - of “instruments of the future” vs. controls that people expect from such instruments - nowadays.' - address: 'Pittsburgh, PA, United States' - author: 'Gallin, Emmanuelle and Sirguy, Marc' - bibtex: "@inproceedings{Gallin2009,\n abstract = {Starting from a parallelism between\ - \ the effervescence of the 1920s in the exploration of new ways of controlling\ - \ music and the actual revolution in the design of new control possibilities,\ - \ this paper aims to explore the possibilities of rethinking instruments from\ - \ the past towards instruments of the future. Through three examples (the experience\ - \ of the Persephone, the design of the Persephone2 and the 4 strings ribbon cello\ - \ project), I will explore the contemporary notion of “instruments of the future”\ - \ vs. controls that people expect from such instruments nowadays.},\n address\ - \ = {Pittsburgh, PA, United States},\n author = {Gallin, Emmanuelle and Sirguy,\ - \ Marc},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1177521},\n issn = {2220-4806},\n\ - \ keywords = {Controller, Sensor, MIDI, USB, Computer Music, ribbon controllers,\ - \ ribbon cello. },\n pages = {199--202},\n title = {Sensor Technology and the\ - \ Remaking of Instruments from the Past},\n url = {http://www.nime.org/proceedings/2009/nime2009_199.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_61 + abstract: "Some of the performer’s gestures, despite corresponding to different\ + \ physical interactions, might produce a similar sonic output. This is the case\ + \ of upward and downward string bends on the guitar where stretching the string\ + \ shifts the pitch upwards. Bending represents \nan expressive resource that\ + \ extends across many different styles of guitar playing. \nIn this study, we\ + \ presented performers with an augmented electric guitar on which the gesture-to-sound\ + \ relationship of downward bending gestures is changed depending on how the instrument\ + \ is configured. Participants were asked to explore and perform a short improvisation\ + \ under three different conditions, two augmentations that correspond to different\ + \ auditory imagery and a constrained scenario. The different sessions of the experiment\ + \ were recorded to conduct thematic analysis as an examination of how gestural\ + \ disambiguation can be exploited in the design of augmentations that focus on\ + \ reusing performer's expertise and how the gesture-to-sound entanglement of the\ + \ different modalities supports or encumbers the performer's embodied relationship\ + \ with the instrument." + address: 'Mexico City, Mexico' + articleno: 61 + author: Adan L. Benito Temprano and Teodoro Dannemann and Andrew McPherson + bibtex: "@inproceedings{nime2023_61,\n abstract = {Some of the performer’s gestures,\ + \ despite corresponding to different physical interactions, might produce a similar\ + \ sonic output. This is the case of upward and downward string bends on the guitar\ + \ where stretching the string shifts the pitch upwards. Bending represents \n\ + an expressive resource that extends across many different styles of guitar playing.\ + \ \nIn this study, we presented performers with an augmented electric guitar on\ + \ which the gesture-to-sound relationship of downward bending gestures is changed\ + \ depending on how the instrument is configured. Participants were asked to explore\ + \ and perform a short improvisation under three different conditions, two augmentations\ + \ that correspond to different auditory imagery and a constrained scenario. The\ + \ different sessions of the experiment were recorded to conduct thematic analysis\ + \ as an examination of how gestural disambiguation can be exploited in the design\ + \ of augmentations that focus on reusing performer's expertise and how the gesture-to-sound\ + \ entanglement of the different modalities supports or encumbers the performer's\ + \ embodied relationship with the instrument.},\n address = {Mexico City, Mexico},\n\ + \ articleno = {61},\n author = {Adan L. Benito Temprano and Teodoro Dannemann\ + \ and Andrew McPherson},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan\ + \ Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages = {10},\n\ + \ pages = {441--450},\n title = {Exploring the (un)ambiguous guitar: A Qualitative\ + \ Study on the use of Gesture Disambiguation in Augmented Instrument Design},\n\ + \ track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_61.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177521 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'Controller, Sensor, MIDI, USB, Computer Music, ribbon controllers, ribbon - cello. ' - pages: 199--202 - title: Sensor Technology and the Remaking of Instruments from the Past - url: http://www.nime.org/proceedings/2009/nime2009_199.pdf - year: 2009 + month: May + numpages: 10 + pages: 441--450 + title: 'Exploring the (un)ambiguous guitar: A Qualitative Study on the use of Gesture + Disambiguation in Augmented Instrument Design' + track: Papers + url: http://nime.org/proceedings/2023/nime2023_61.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Nicolls2009 - abstract: '“The reinvigoration of the role of the human body” - as John Richards - recently described trends in using homemade electronics to move away from laptop - performance [1] - is mirrored in an ambition of instrumentalists to interact more - closely with the electronic sounds they are helping to create. For these players, - there has often been a one-way street of the ‘instrument feeds MAX patch’ paradigm - and arguments are made here for more complete performance feedback systems. Instrumentalists - come to the question of interactivity with a whole array of gestures, sounds and - associations already in place, so must choose carefully the means by which the - instrumental performance is augmented. Frances-Marie Uitti [2] is a pioneer in - the field, creating techniques to amplify the cellist’s innate performative gestures - and in parallel developing the instrument. This paper intends to give an overview - of the author’s work in developing interactivity in piano performance, mechanical - augmentation of the piano and possible structural developments of the instrument - to bring it into the twenty-first century.' - address: 'Pittsburgh, PA, United States' - author: 'Nicolls, Sarah' - bibtex: "@inproceedings{Nicolls2009,\n abstract = {“The reinvigoration of the role\ - \ of the human body” - as John Richards recently described trends in using homemade\ - \ electronics to move away from laptop performance [1] - is mirrored in an ambition\ - \ of instrumentalists to interact more closely with the electronic sounds they\ - \ are helping to create. For these players, there has often been a one-way street\ - \ of the ‘instrument feeds MAX patch’ paradigm and arguments are made here for\ - \ more complete performance feedback systems. Instrumentalists come to the question\ - \ of interactivity with a whole array of gestures, sounds and associations already\ - \ in place, so must choose carefully the means by which the instrumental performance\ - \ is augmented. Frances-Marie Uitti [2] is a pioneer in the field, creating techniques\ - \ to amplify the cellist’s innate performative gestures and in parallel developing\ - \ the instrument. This paper intends to give an overview of the author’s work\ - \ in developing interactivity in piano performance, mechanical augmentation of\ - \ the piano and possible structural developments of the instrument to bring it\ - \ into the twenty-first century.},\n address = {Pittsburgh, PA, United States},\n\ - \ author = {Nicolls, Sarah},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177641},\n\ - \ issn = {2220-4806},\n keywords = {sensor, gestural, technology, performance,\ - \ piano, motors, interactive },\n pages = {203--206},\n title = {Twenty-First\ - \ Century Piano},\n url = {http://www.nime.org/proceedings/2009/nime2009_203.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_62 + abstract: "The concept of sound sculpture can embrace a rich variety of artistic\ + \ manifestations and disciplines since it contains music, plastic arts, and performance,\ + \ to say the least. Even the conceptual and space design or the skills and crafts\ + \ necessary to transform physical materials demonstrates its interdisciplinary\ + \ potential.\nSensattice is an emerging sound sculpture proposal, which takes\ + \ advantage of organic raw materials considered waste to convert them into biopolymers\ + \ and explores their acoustic and haptic potential taking \"skin and bone\" as\ + \ conceptual premises to synthesize two fundamental materials. Such materials\ + \ were obtained by applying biomaterial engineering and 3D modeling and printing\ + \ as parallel processes.\nSensattice seems to be an emerging system since it is\ + \ not reduced to mere materials but involves people and situated epistemic approaches\ + \ that literally shape a sculptural lattice through the sensory and symbolic perception\ + \ of skin and bones that can be sounded before, during and after the sculptural\ + \ construction." + address: 'Mexico City, Mexico' + articleno: 62 + author: Jonathan Diaz + bibtex: "@inproceedings{nime2023_62,\n abstract = {The concept of sound sculpture\ + \ can embrace a rich variety of artistic manifestations and disciplines since\ + \ it contains music, plastic arts, and performance, to say the least. Even the\ + \ conceptual and space design or the skills and crafts necessary to transform\ + \ physical materials demonstrates its interdisciplinary potential.\nSensattice\ + \ is an emerging sound sculpture proposal, which takes advantage of organic raw\ + \ materials considered waste to convert them into biopolymers and explores their\ + \ acoustic and haptic potential taking \"skin and bone\" as conceptual premises\ + \ to synthesize two fundamental materials. Such materials were obtained by applying\ + \ biomaterial engineering and 3D modeling and printing as parallel processes.\n\ + Sensattice seems to be an emerging system since it is not reduced to mere materials\ + \ but involves people and situated epistemic approaches that literally shape a\ + \ sculptural lattice through the sensory and symbolic perception of skin and bones\ + \ that can be sounded before, during and after the sculptural construction.},\n\ + \ address = {Mexico City, Mexico},\n articleno = {62},\n author = {Jonathan Diaz},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn\ + \ = {2220-4806},\n month = {May},\n numpages = {6},\n pages = {451--456},\n title\ + \ = {Sensattice: An emerging collaborative and modular sound sculpture},\n track\ + \ = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_62.pdf},\n year\ + \ = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177641 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'sensor, gestural, technology, performance, piano, motors, interactive ' - pages: 203--206 - title: Twenty-First Century Piano - url: http://www.nime.org/proceedings/2009/nime2009_203.pdf - year: 2009 + month: May + numpages: 6 + pages: 451--456 + title: 'Sensattice: An emerging collaborative and modular sound sculpture' + track: Papers + url: http://nime.org/proceedings/2023/nime2023_62.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Johnston2009 - abstract: 'In this paper we describe an interaction framework which classifies musicians'' - interactions with virtual musical instruments into three modes: instrumental, - ornamental and conversational. We argue that conversational interactions are the - most difficult to design for, but also the most interesting. To illustrate our - approach to designing for conversational interactions we describe the performance - work Partial Reflections 3 for two clarinets and interactive software. This software - uses simulated physical models to create a virtual sound sculpture which both - responds to and produces sounds and visuals.' - address: 'Pittsburgh, PA, United States' - author: 'Johnston, Andrew and Candy, Linda and Edmonds, Ernest' - bibtex: "@inproceedings{Johnston2009,\n abstract = {In this paper we describe an\ - \ interaction framework which classifies musicians' interactions with virtual\ - \ musical instruments into three modes: instrumental, ornamental and conversational.\ - \ We argue that conversational interactions are the most difficult to design for,\ - \ but also the most interesting. To illustrate our approach to designing for conversational\ - \ interactions we describe the performance work Partial Reflections 3 for two\ - \ clarinets and interactive software. This software uses simulated physical models\ - \ to create a virtual sound sculpture which both responds to and produces sounds\ - \ and visuals.},\n address = {Pittsburgh, PA, United States},\n author = {Johnston,\ - \ Andrew and Candy, Linda and Edmonds, Ernest},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1177585},\n issn = {2220-4806},\n keywords = {Music, instruments,\ - \ interaction. },\n pages = {207--212},\n title = {Designing for Conversational\ - \ Interaction},\n url = {http://www.nime.org/proceedings/2009/nime2009_207.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_63 + abstract: "In this paper, the NIME “shard-speakers” is situated within the cultural\ + \ context of the typical uses of crystal singing bowls, specifically acknowledging\ + \ the origins of crystal bowls as re-purposed by-products of the silicon chip\ + \ manufaturing process, and their subsequent adoption into the toolkits of New\ + \ Age sound healing practitioners. Following this discussion is a first-person\ + \ anecdotal account of the author/composer’s sonic explorations using crystal\ + \ singing bowls in combination with the shards of broken bowls and custom electronics\ + \ to create a body of recorded, acoustic, and electroacoustic musical works named\ + \ Crushed Matrices #1-7. The last section of this paper explains how the extended\ + \ musical techniques unearthed through the Crushed Matrices investigations informed\ + \ the creation of the shard-speakers, and the electronically-generated musical\ + \ content that was composed for them in the form of a sound artwork, Ode on Crushed\ + \ Matrices. This recording was fed into the shard-speakers via tactile transducers\ + \ on resonating bodies for the 2022 inaugural installation of the work, which\ + \ at the time of writing is the only installation of the work to date. The paper’s\ + \ conclusion addresses the relationship of this body of work to the NIME 2023\ + \ conference’s theme of “Frugal Music Innovation,” correlating or otherwise characterizing\ + \ its relationship to several of the core competencies set forth by the Frugal\ + \ Innovation Hub: adaptability, lightness of weight, mobile design, affordability,\ + \ local material sourcing, and ruggedness." + address: 'Mexico City, Mexico' + articleno: 63 + author: Anastasia Clarke and Anastasia Clarke + bibtex: "@inproceedings{nime2023_63,\n abstract = {In this paper, the NIME “shard-speakers”\ + \ is situated within the cultural context of the typical uses of crystal singing\ + \ bowls, specifically acknowledging the origins of crystal bowls as re-purposed\ + \ by-products of the silicon chip manufaturing process, and their subsequent adoption\ + \ into the toolkits of New Age sound healing practitioners. Following this discussion\ + \ is a first-person anecdotal account of the author/composer’s sonic explorations\ + \ using crystal singing bowls in combination with the shards of broken bowls and\ + \ custom electronics to create a body of recorded, acoustic, and electroacoustic\ + \ musical works named Crushed Matrices #1-7. The last section of this paper explains\ + \ how the extended musical techniques unearthed through the Crushed Matrices investigations\ + \ informed the creation of the shard-speakers, and the electronically-generated\ + \ musical content that was composed for them in the form of a sound artwork, Ode\ + \ on Crushed Matrices. This recording was fed into the shard-speakers via tactile\ + \ transducers on resonating bodies for the 2022 inaugural installation of the\ + \ work, which at the time of writing is the only installation of the work to date.\ + \ The paper’s conclusion addresses the relationship of this body of work to the\ + \ NIME 2023 conference’s theme of “Frugal Music Innovation,” correlating or otherwise\ + \ characterizing its relationship to several of the core competencies set forth\ + \ by the Frugal Innovation Hub: adaptability, lightness of weight, mobile design,\ + \ affordability, local material sourcing, and ruggedness.},\n address = {Mexico\ + \ City, Mexico},\n articleno = {63},\n author = {Anastasia Clarke and Anastasia\ + \ Clarke},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n\ + \ issn = {2220-4806},\n month = {May},\n numpages = {6},\n pages = {457--462},\n\ + \ title = {Shard-Speakers: An Inquiry into the History, Sonic Properties, and\ + \ Musical Possibilities of Quartz Crystal Singing Bowls},\n track = {Papers},\n\ + \ url = {http://nime.org/proceedings/2023/nime2023_63.pdf},\n year = {2023}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177585 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'Music, instruments, interaction. ' - pages: 207--212 - title: Designing for Conversational Interaction - url: http://www.nime.org/proceedings/2009/nime2009_207.pdf - year: 2009 + month: May + numpages: 6 + pages: 457--462 + title: 'Shard-Speakers: An Inquiry into the History, Sonic Properties, and Musical + Possibilities of Quartz Crystal Singing Bowls' + track: Papers + url: http://nime.org/proceedings/2023/nime2023_63.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Gurevich2009 - abstract: 'In this paper we discuss the concept of style, focusing in particular - on methods of designing new instruments that facilitate the cultivation and recognition - of style. We distinguishbetween style and structure of an interaction and discuss - thesignificance of this formulation within the context of NIME.Two workshops that - were conducted to explore style in interaction design are described, from which - we identify elements of style that can inform and influence the design process. - From these, we suggest steps toward designing forstyle in new musical interactions.' - address: 'Pittsburgh, PA, United States' - author: 'Gurevich, Michael and Stapleton, Paul and Bennett, Peter' - bibtex: "@inproceedings{Gurevich2009,\n abstract = {In this paper we discuss the\ - \ concept of style, focusing in particular on methods of designing new instruments\ - \ that facilitate the cultivation and recognition of style. We distinguishbetween\ - \ style and structure of an interaction and discuss thesignificance of this formulation\ - \ within the context of NIME.Two workshops that were conducted to explore style\ - \ in interaction design are described, from which we identify elements of style\ - \ that can inform and influence the design process. From these, we suggest steps\ - \ toward designing forstyle in new musical interactions.},\n address = {Pittsburgh,\ - \ PA, United States},\n author = {Gurevich, Michael and Stapleton, Paul and Bennett,\ - \ Peter},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1177563},\n issn = {2220-4806},\n\ - \ keywords = {expression, style, structure, skill, virtuosity },\n pages = {213--217},\n\ - \ title = {Designing for Style in New Musical Interactions},\n url = {http://www.nime.org/proceedings/2009/nime2009_213.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_64 + abstract: 'In music and computer science classrooms, Blind and Visually Impaired + (BVI) learners are often not given alternatives to visual technologies and materials. + FiLOrk, an ensemble at the Filomen M. D''Agostino Greenberg Music School, is made + up of five BVI high school learners who studied and performed computer music using + the live coding language Tidal Cycles over the course of a semester. To make FiLOrk + approachable and accessible we wrote a new curriculum featuring audio/tactile + learning materials, and we designed a collaborative web editor for use with learners'' + assistive technologies, including screen readers and braille displays. In this + article, we describe findings from classroom observations and interviews. We highlight + how learners wrestled with persistent accessibility challenges, connected pre-existing + music knowledge with Tidal Cycles concepts, created a culture of respect and support, + and made suggestions for improving FiLOrk. We conclude by discussing opportunities + to make live coding ensembles accessible to both BVI people and high school learners.' + address: 'Mexico City, Mexico' + articleno: 64 + author: William C Payne and Matthew Kaney and Yuhua Cao and Eric Xu and Xinran Shen + and Katrina Lee and Amy Hurst + bibtex: "@inproceedings{nime2023_64,\n abstract = {In music and computer science\ + \ classrooms, Blind and Visually Impaired (BVI) learners are often not given alternatives\ + \ to visual technologies and materials. FiLOrk, an ensemble at the Filomen M.\ + \ D'Agostino Greenberg Music School, is made up of five BVI high school learners\ + \ who studied and performed computer music using the live coding language Tidal\ + \ Cycles over the course of a semester. To make FiLOrk approachable and accessible\ + \ we wrote a new curriculum featuring audio/tactile learning materials, and we\ + \ designed a collaborative web editor for use with learners' assistive technologies,\ + \ including screen readers and braille displays. In this article, we describe\ + \ findings from classroom observations and interviews. We highlight how learners\ + \ wrestled with persistent accessibility challenges, connected pre-existing music\ + \ knowledge with Tidal Cycles concepts, created a culture of respect and support,\ + \ and made suggestions for improving FiLOrk. We conclude by discussing opportunities\ + \ to make live coding ensembles accessible to both BVI people and high school\ + \ learners.},\n address = {Mexico City, Mexico},\n articleno = {64},\n author\ + \ = {William C Payne and Matthew Kaney and Yuhua Cao and Eric Xu and Xinran Shen\ + \ and Katrina Lee and Amy Hurst},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz\ + \ and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages\ + \ = {9},\n pages = {463--471},\n title = {Live Coding Ensemble as Accessible Classroom},\n\ + \ track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_64.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177563 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'expression, style, structure, skill, virtuosity ' - pages: 213--217 - title: Designing for Style in New Musical Interactions - url: http://www.nime.org/proceedings/2009/nime2009_213.pdf - year: 2009 + month: May + numpages: 9 + pages: 463--471 + title: Live Coding Ensemble as Accessible Classroom + track: Papers + url: http://nime.org/proceedings/2023/nime2023_64.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Cook2009 - abstract: 'This paper revisits/extends “Principles for Designing Computer Music - Controllers” (NIME 2001), subsequently updated in a NIME 2007 keynote address. - A redesign of SqueezeVox Maggie (a reoccurring NIME character) is used as an example - of which principles have held fast over the years, and which have changed due - to advances in technology. A few new principles are also added to the list.' - address: 'Pittsburgh, PA, United States' - author: 'Cook, Perry R.' - bibtex: "@inproceedings{Cook2009,\n abstract = {This paper revisits/extends “Principles\ - \ for Designing Computer Music Controllers” (NIME 2001), subsequently updated\ - \ in a NIME 2007 keynote address. A redesign of SqueezeVox Maggie (a reoccurring\ - \ NIME character) is used as an example of which principles have held fast over\ - \ the years, and which have changed due to advances in technology. A few new principles\ - \ are also added to the list.},\n address = {Pittsburgh, PA, United States},\n\ - \ author = {Cook, Perry R.},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177493},\n\ - \ issn = {2220-4806},\n keywords = {HCI, Composed Instruments, Voice Synthesis,\ - \ Wireless, Batteries, Laptop Orchestras, SenSAs.},\n pages = {218--221},\n title\ - \ = {Re-Designing Principles for Computer Music Controllers : a Case Study of\ - \ SqueezeVox Maggie},\n url = {http://www.nime.org/proceedings/2009/nime2009_218.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_65 + abstract: 'In this article, we describe the challenges of an artistic residency + that included: a distributed improvisation in VR, performances using Digital Musical + Instruments (DMIs), and Open Source software as much as possible. For this residency, + we were constrained to using Mozilla’s Hubs as the Metaverse platform. We describe + the shortcomings of the platform as a performance space in light of our experience, + musical cultures, and the social aspects of a musical performance. We also address + select technical issues pertaining to the context of a hybrid musical performance + (simultaneously in Virtual Reality (VR) and in-real-life (IRL)) using this particular + technology stack. Furthermore, we describe the challenges and surprises that occurred + with Faust (Function Audio Stream), which was our choice of synthesis engine for + the project. We conclude this paper by identifying some possible avenues for future + research, exploration, and performances of a similar nature. We wish to clarify + that although we will be talking a lot about Hubs, which was the Virtual Reality + (VR) platform used for the residency, we were not endorsed by Mozilla.' + address: 'Mexico City, Mexico' + articleno: 65 + author: Michał Seta and Dirk J Stromberg and D STEWART + bibtex: "@inproceedings{nime2023_65,\n abstract = {In this article, we describe\ + \ the challenges of an artistic residency that included: a distributed improvisation\ + \ in VR, performances using Digital Musical Instruments (DMIs), and Open Source\ + \ software as much as possible. For this residency, we were constrained to using\ + \ Mozilla’s Hubs as the Metaverse platform. We describe the shortcomings of the\ + \ platform as a performance space in light of our experience, musical cultures,\ + \ and the social aspects of a musical performance. We also address select technical\ + \ issues pertaining to the context of a hybrid musical performance (simultaneously\ + \ in Virtual Reality (VR) and in-real-life (IRL)) using this particular technology\ + \ stack. Furthermore, we describe the challenges and surprises that occurred with\ + \ Faust (Function Audio Stream), which was our choice of synthesis engine for\ + \ the project. We conclude this paper by identifying some possible avenues for\ + \ future research, exploration, and performances of a similar nature. We wish\ + \ to clarify that although we will be talking a lot about Hubs, which was the\ + \ Virtual Reality (VR) platform used for the residency, we were not endorsed by\ + \ Mozilla.},\n address = {Mexico City, Mexico},\n articleno = {65},\n author =\ + \ {Michał Seta and Dirk J Stromberg and D STEWART},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month\ + \ = {May},\n numpages = {7},\n pages = {472--478},\n title = {Building hybrid\ + \ performances with DMIs, Hubs and Faust},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_65.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177493 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'HCI, Composed Instruments, Voice Synthesis, Wireless, Batteries, Laptop - Orchestras, SenSAs.' - pages: 218--221 - title: 'Re-Designing Principles for Computer Music Controllers : a Case Study of - SqueezeVox Maggie' - url: http://www.nime.org/proceedings/2009/nime2009_218.pdf - year: 2009 + month: May + numpages: 7 + pages: 472--478 + title: 'Building hybrid performances with DMIs, Hubs and Faust' + track: Papers + url: http://nime.org/proceedings/2023/nime2023_65.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Kapuscinski2009 - abstract: 'This paper reports on initial stages of research leading to the development - of an intermedia performance Counterlines --- a duet for Disklavier and Wacom - Cintiq, in which both performers generate audiovisual gestures that relate to - each other contrapuntally. The pianist generates graphic elements while playing - music and the graphic performer generates piano notes by drawing lines. The paper - focuses on interfacing sounds and images performed by the pianist. It provides - rationale for the choice of materials of great simplicity and describes our approach - to mapping. ' - address: 'Pittsburgh, PA, United States' - author: 'Kapuscinski, Jaroslaw and Sanchez, Javier' - bibtex: "@inproceedings{Kapuscinski2009,\n abstract = {This paper reports on initial\ - \ stages of research leading to the development of an intermedia performance Counterlines\ - \ --- a duet for Disklavier and Wacom Cintiq, in which both performers generate\ - \ audiovisual gestures that relate to each other contrapuntally. The pianist generates\ - \ graphic elements while playing music and the graphic performer generates piano\ - \ notes by drawing lines. The paper focuses on interfacing sounds and images performed\ - \ by the pianist. It provides rationale for the choice of materials of great simplicity\ - \ and describes our approach to mapping. },\n address = {Pittsburgh, PA, United\ - \ States},\n author = {Kapuscinski, Jaroslaw and Sanchez, Javier},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1177597},\n issn = {2220-4806},\n keywords\ - \ = {intermedia, Disklavier, piano, Wacom Cintiq, mapping, visual music },\n pages\ - \ = {222--225},\n title = {Interfacing Graphic and Musical Elements in Counterlines},\n\ - \ url = {http://www.nime.org/proceedings/2009/nime2009_222.pdf},\n year = {2009}\n\ - }\n" + ID: nime2023_66 + abstract: 'This paper presents the BioSynth, an affective biofeedback device for + generating electronic music developed over a decade as part of my research-creation + practice. The BioSynth has facilitated the creation of work involving performers + from a variety of ages and professional experiences, contributing to knowledge + regarding emotional performance, exposing the differences between perceived and + felt emotion within biofeedback art, extending emotional quantification techniques + to notions of emotional performance technique, emotional labor, and what feminist + Alva Gotby calls emotional reproduction. The design of the BioSynth privileges + relational and real-world interactions as well as feminist thought regarding gendered + hierarchies between body, mind, musical notation, social context, emotion and + reason, and the division between performers and composers. This feminist inquiry + has led to the development of alternatives to traditional frameworks for biofeedback + music that rely on metaphors of musical instrumentation. After an introduction + presenting two lived scenarios, this article is divided into three sections: hardware, + software, and wetware. The hardware section describes the BioSynth through its + design, which privileges ease-of-use for non-expert users. The software section + describes mapping considerations based on feminist principles of measuring the + emotional subject only against itself. Finally, in the wetware section I describe + a feminist-inspired approach to emotional performance that embraces artificiality, + irony, play, pleasure, and performance in biofeedback art, implying novel models + for composer-instrument-performer relations.' + address: 'Mexico City, Mexico' + articleno: 66 + author: Erin M Gee + bibtex: "@inproceedings{nime2023_66,\n abstract = {This paper presents the BioSynth,\ + \ an affective biofeedback device for generating electronic music developed over\ + \ a decade as part of my research-creation practice. The BioSynth has facilitated\ + \ the creation of work involving performers from a variety of ages and professional\ + \ experiences, contributing to knowledge regarding emotional performance, exposing\ + \ the differences between perceived and felt emotion within biofeedback art, extending\ + \ emotional quantification techniques to notions of emotional performance technique,\ + \ emotional labor, and what feminist Alva Gotby calls emotional reproduction.\ + \ The design of the BioSynth privileges relational and real-world interactions\ + \ as well as feminist thought regarding gendered hierarchies between body, mind,\ + \ musical notation, social context, emotion and reason, and the division between\ + \ performers and composers. This feminist inquiry has led to the development of\ + \ alternatives to traditional frameworks for biofeedback music that rely on metaphors\ + \ of musical instrumentation. After an introduction presenting two lived scenarios,\ + \ this article is divided into three sections: hardware, software, and wetware.\ + \ The hardware section describes the BioSynth through its design, which privileges\ + \ ease-of-use for non-expert users. The software section describes mapping considerations\ + \ based on feminist principles of measuring the emotional subject only against\ + \ itself. Finally, in the wetware section I describe a feminist-inspired approach\ + \ to emotional performance that embraces artificiality, irony, play, pleasure,\ + \ and performance in biofeedback art, implying novel models for composer-instrument-performer\ + \ relations.},\n address = {Mexico City, Mexico},\n articleno = {66},\n author\ + \ = {Erin M Gee},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n\ + \ issn = {2220-4806},\n month = {May},\n numpages = {7},\n pages = {479--485},\n\ + \ title = {The BioSynth—an affective biofeedback device grounded in feminist thought},\n\ + \ track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_66.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177597 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'intermedia, Disklavier, piano, Wacom Cintiq, mapping, visual music ' - pages: 222--225 - title: Interfacing Graphic and Musical Elements in Counterlines - url: http://www.nime.org/proceedings/2009/nime2009_222.pdf - year: 2009 + month: May + numpages: 7 + pages: 479--485 + title: The BioSynth—an affective biofeedback device grounded in feminist thought + track: Papers + url: http://nime.org/proceedings/2023/nime2023_66.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Polfreman2009 - abstract: 'Music composition on computer is a challenging task, involving a range - of data types to be managed within a single software tool. A composition typically - comprises a complex arrangement of material, with many internal relationships - between data in different locations repetition, inversion, retrograde, reversal - and more sophisticated transformations. The creation of such complex artefacts - is labour intensive, and current systems typically place a significant cognitive - burden on the composer in terms of maintaining a work as a coherent whole. FrameWorks - 3D is an attempt to improve support for composition tasks within a Digital Audio - Workstation (DAW) style environment via a novel three-dimensional (3D) user-interface. - In addition to the standard paradigm of tracks, regions and tape recording analogy, - FrameWorks displays hierarchical and transformational information in a single, - fully navigable workspace. The implementation combines Java with Max/MSP to create - a cross-platform, user-extensible package and will be used to assess the viability - of such a tool and to develop the ideas further. ' - address: 'Pittsburgh, PA, United States' - author: 'Polfreman, Richard' - bibtex: "@inproceedings{Polfreman2009,\n abstract = {Music composition on computer\ - \ is a challenging task, involving a range of data types to be managed within\ - \ a single software tool. A composition typically comprises a complex arrangement\ - \ of material, with many internal relationships between data in different locations\ - \ repetition, inversion, retrograde, reversal and more sophisticated transformations.\ - \ The creation of such complex artefacts is labour intensive, and current systems\ - \ typically place a significant cognitive burden on the composer in terms of maintaining\ - \ a work as a coherent whole. FrameWorks 3D is an attempt to improve support for\ - \ composition tasks within a Digital Audio Workstation (DAW) style environment\ - \ via a novel three-dimensional (3D) user-interface. In addition to the standard\ - \ paradigm of tracks, regions and tape recording analogy, FrameWorks displays\ - \ hierarchical and transformational information in a single, fully navigable workspace.\ - \ The implementation combines Java with Max/MSP to create a cross-platform, user-extensible\ - \ package and will be used to assess the viability of such a tool and to develop\ - \ the ideas further. },\n address = {Pittsburgh, PA, United States},\n author\ - \ = {Polfreman, Richard},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177661},\n\ - \ issn = {2220-4806},\n keywords = {Digital Audio Workstation, graphical user-interfaces,\ - \ 3D graphics, Max/MSP, Java. },\n pages = {226--229},\n title = {FrameWorks {3D}\ - \ : Composition in the Third Dimension},\n url = {http://www.nime.org/proceedings/2009/nime2009_226.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_67 + abstract: "This article presents new tools developed in FAUST language to create\ + \ musical interactions using electrophysiological signals as input. The developed\ + \ tools are centered around signal processing and simulation of electrophysiological\ + \ signals. These techniques are used to clean and process the electrophysiological\ + \ signals and subsequently provide real-time interactions to feed the control\ + \ of sound processes. The system provides modules that are highly musically expressive\ + \ especially in the domain of spatial sound.\nThese tools also allow to set up\ + \ a testing environment by replacing the need of electrophysiological capturing\ + \ devices.\nThe findings of this exploration provide a better understanding of\ + \ how the FAUST language can be used in conjunction with electrophysiological\ + \ signals and exposes interesting opportunities to explore further possibilities\ + \ in music creation in an open source environment with the possibility of multitarget\ + \ compilation, allowing our modules to be used either in such softwares as Max\ + \ or embedded on microcontrollers." + address: 'Mexico City, Mexico' + articleno: 67 + author: David Fierro and Alain Bonardi and Atau Tanaka + bibtex: "@inproceedings{nime2023_67,\n abstract = {This article presents new tools\ + \ developed in FAUST language to create musical interactions using electrophysiological\ + \ signals as input. The developed tools are centered around signal processing\ + \ and simulation of electrophysiological signals. These techniques are used to\ + \ clean and process the electrophysiological signals and subsequently provide\ + \ real-time interactions to feed the control of sound processes. The system provides\ + \ modules that are highly musically expressive especially in the domain of spatial\ + \ sound.\nThese tools also allow to set up a testing environment by replacing\ + \ the need of electrophysiological capturing devices.\nThe findings of this exploration\ + \ provide a better understanding of how the FAUST language can be used in conjunction\ + \ with electrophysiological signals and exposes interesting opportunities to explore\ + \ further possibilities in music creation in an open source environment with the\ + \ possibility of multitarget compilation, allowing our modules to be used either\ + \ in such softwares as Max or embedded on microcontrollers.},\n address = {Mexico\ + \ City, Mexico},\n articleno = {67},\n author = {David Fierro and Alain Bonardi\ + \ and Atau Tanaka},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan\ + \ Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages = {8},\n\ + \ pages = {486--493},\n title = {FAUST Multiplatform toolbox for Body Brain Digital\ + \ Musical Instruments},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_67.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177661 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'Digital Audio Workstation, graphical user-interfaces, 3D graphics, Max/MSP, - Java. ' - pages: 226--229 - title: 'FrameWorks 3D : Composition in the Third Dimension' - url: http://www.nime.org/proceedings/2009/nime2009_226.pdf - year: 2009 + month: May + numpages: 8 + pages: 486--493 + title: FAUST Multiplatform toolbox for Body Brain Digital Musical Instruments + track: Papers + url: http://nime.org/proceedings/2023/nime2023_67.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Freed2009 - abstract: 'A compendium of foundational circuits for interfacing resistive pressure - and position sensors is presented with example applications for music controllers - and tangible interfaces. ' - address: 'Pittsburgh, PA, United States' - author: 'Freed, Adrian' - bibtex: "@inproceedings{Freed2009,\n abstract = {A compendium of foundational circuits\ - \ for interfacing resistive pressure and position sensors is presented with example\ - \ applications for music controllers and tangible interfaces. },\n address = {Pittsburgh,\ - \ PA, United States},\n author = {Freed, Adrian},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1177515},\n issn = {2220-4806},\n keywords = {Piezoresistive\ - \ Touch Sensor Pressure Sensing Current Steering Multitouch. },\n pages = {230--235},\n\ - \ title = {Novel and Forgotten Current-steering Techniques for Resistive Multitouch,\ - \ Duotouch, and Polytouch Position Sensing with Pressure},\n url = {http://www.nime.org/proceedings/2009/nime2009_230.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_68 + abstract: 'This paper presents the Feedback Mop Cello, a feedback instrument integrating + acoustic feedback loops generated through a microphone and loudspeaker combination + with a control interface inspired by the cello. Current paradigms of interaction + with feedback instruments are based around ideas of negotiation with autonomous + systems rather than control. We explore the possibility of integration of negotiated + and controlled elements through a design focused on isolating the acoustic feedback + loop signal path from the signal path to which sound processing is applied. We + focus on three musical parameters of timbre, pitch, and dynamics. We present timbre + as a parameter to mainly be negotiated within the feedback loop, while pitch and + dynamics are parameters that can be explicitly controlled through the interface. + An approach is taken to minimize components within the feedback loop in order + to foreground the choice of loudspeaker as an integral part of the instrument’s + sound. A preliminary user study is carried out involving five semi-professional + musicians, focusing on their reflection regarding their interaction with the acoustic + feedback loop.' + address: 'Mexico City, Mexico' + articleno: 68 + author: Hugh A von Arnim and Stefano Fasciani and Çağrı Erdem + bibtex: "@inproceedings{nime2023_68,\n abstract = {This paper presents the Feedback\ + \ Mop Cello, a feedback instrument integrating acoustic feedback loops generated\ + \ through a microphone and loudspeaker combination with a control interface inspired\ + \ by the cello. Current paradigms of interaction with feedback instruments are\ + \ based around ideas of negotiation with autonomous systems rather than control.\ + \ We explore the possibility of integration of negotiated and controlled elements\ + \ through a design focused on isolating the acoustic feedback loop signal path\ + \ from the signal path to which sound processing is applied. We focus on three\ + \ musical parameters of timbre, pitch, and dynamics. We present timbre as a parameter\ + \ to mainly be negotiated within the feedback loop, while pitch and dynamics are\ + \ parameters that can be explicitly controlled through the interface. An approach\ + \ is taken to minimize components within the feedback loop in order to foreground\ + \ the choice of loudspeaker as an integral part of the instrument’s sound. A preliminary\ + \ user study is carried out involving five semi-professional musicians, focusing\ + \ on their reflection regarding their interaction with the acoustic feedback loop.},\n\ + \ address = {Mexico City, Mexico},\n articleno = {68},\n author = {Hugh A von\ + \ Arnim and Stefano Fasciani and Çağrı Erdem},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n editor\ + \ = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n\ + \ numpages = {6},\n pages = {494--499},\n title = {The Feedback Mop Cello: An\ + \ Instrument for Interacting with Acoustic Feedback Loops},\n track = {Papers},\n\ + \ url = {http://nime.org/proceedings/2023/nime2023_68.pdf},\n year = {2023}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177515 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'Piezoresistive Touch Sensor Pressure Sensing Current Steering Multitouch. ' - pages: 230--235 - title: 'Novel and Forgotten Current-steering Techniques for Resistive Multitouch, - Duotouch, and Polytouch Position Sensing with Pressure' - url: http://www.nime.org/proceedings/2009/nime2009_230.pdf - year: 2009 + month: May + numpages: 6 + pages: 494--499 + title: 'The Feedback Mop Cello: An Instrument for Interacting with Acoustic Feedback + Loops' + track: Papers + url: http://nime.org/proceedings/2023/nime2023_68.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Jones2009a - abstract: 'This paper presents a new force-sensitive surface designedfor playing - music. A prototype system has been implemented using a passive capacitive sensor, - a commodity multichannel audio interface, and decoding software running ona laptop - computer. This setup has been a successful, lowcost route to a number of experiments - in intimate musicalcontrol.' - address: 'Pittsburgh, PA, United States' - author: 'Jones, Randy and Driessen, Peter and Schloss, Andrew and Tzanetakis, George' - bibtex: "@inproceedings{Jones2009a,\n abstract = {This paper presents a new force-sensitive\ - \ surface designedfor playing music. A prototype system has been implemented using\ - \ a passive capacitive sensor, a commodity multichannel audio interface, and decoding\ - \ software running ona laptop computer. This setup has been a successful, lowcost\ - \ route to a number of experiments in intimate musicalcontrol.},\n address = {Pittsburgh,\ - \ PA, United States},\n author = {Jones, Randy and Driessen, Peter and Schloss,\ - \ Andrew and Tzanetakis, George},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177589},\n\ - \ issn = {2220-4806},\n keywords = {Multitouch, sensors, tactile, capacitive,\ - \ percussion controllers. },\n pages = {236--241},\n title = {A Force-Sensitive\ - \ Surface for Intimate Control},\n url = {http://www.nime.org/proceedings/2009/nime2009_236.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_69 + abstract: 'Mixboard is a web / iOS application that allows music lovers to create + and share personalized musical mashups. The app allows users to choose and organize + up to four songs within four different lanes. The system automatically separates + the songs'' sources into corresponding stems, calculates an appropriate tempo + and key for the mashup, and chooses song segments according to users'' visual + creation. Unlike other professional applications used for mashups, Mixboard does + not require experience with Digital Audio Workstations (DAWs) or waveform editing + and supports unlimited library of usable songs. In a co-creative fashion, users + can explore their creativity while the system contributes its own creative input + utilizing Music Information Retrieval (MIR), Digital Signal Processing (DSP), + and compositional templates. User studies were conducted to evaluate Mixboard''s + success in achieving an effective balance between system automation and user control. + Results indicate strong metrics for user creative expression, engagement, and + ownership, as well as high satisfaction with the final musical outcome. Results + also suggest a number of modifications to the balance between user control and + system automation, which will be addressed in future work.' + address: 'Mexico City, Mexico' + articleno: 69 + author: Raghavasimhan Sankaranarayanan and Nitin Hugar and Qinying Lei and Thomas + Ottolin and Hardik Goel and Gil Weinberg + bibtex: "@inproceedings{nime2023_69,\n abstract = {Mixboard is a web / iOS application\ + \ that allows music lovers to create and share personalized musical mashups. The app\ + \ allows users to choose and organize up to four songs within four different lanes.\ + \ The system automatically separates the songs' sources into corresponding stems,\ + \ calculates an appropriate tempo and key for the mashup, and chooses song segments\ + \ according to users' visual creation. Unlike other professional applications\ + \ used for mashups, Mixboard does not require experience with Digital Audio Workstations\ + \ (DAWs) or waveform editing and supports unlimited library of usable songs. In\ + \ a co-creative fashion, users can explore their creativity while the system contributes\ + \ its own creative input utilizing Music Information Retrieval (MIR), Digital\ + \ Signal Processing (DSP), and compositional templates. User studies were conducted\ + \ to evaluate Mixboard's success in achieving an effective balance between system\ + \ automation and user control. Results indicate strong metrics for user creative\ + \ expression, engagement, and ownership, as well as high satisfaction with the\ + \ final musical outcome. Results also suggest a number of modifications to the\ + \ balance between user control and system automation, which will be addressed\ + \ in future work.},\n address = {Mexico City, Mexico},\n articleno = {69},\n author\ + \ = {Raghavasimhan Sankaranarayanan and Nitin Hugar and Qinying Lei and Thomas\ + \ Ottolin and Hardik Goel and Gil Weinberg},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n editor\ + \ = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n\ + \ numpages = {6},\n pages = {500--505},\n title = {Mixboard - A Co-Creative Mashup\ + \ Application for Novices},\n track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_69.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177589 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'Multitouch, sensors, tactile, capacitive, percussion controllers. ' - pages: 236--241 - title: A Force-Sensitive Surface for Intimate Control - url: http://www.nime.org/proceedings/2009/nime2009_236.pdf - year: 2009 + month: May + numpages: 6 + pages: 500--505 + title: Mixboard - A Co-Creative Mashup Application for Novices + track: Papers + url: http://nime.org/proceedings/2023/nime2023_69.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Kellum2009 - abstract: 'This paper introduces a flexible mapping editor, which transforms multi-touch - devices into musical instruments. The editor enables users to create interfaces - by dragging and dropping components onto the interface and attaching actions to - them, which will be executed when certain userdefined conditions obtain. The editor - receives touch information via the non-proprietary communication protocol, TUIO - [9], and can, therefore, be used together with a variety of different multi-touch - input devices. ' - address: 'Pittsburgh, PA, United States' - author: 'Kellum, Greg and Crevoisier, Alain' - bibtex: "@inproceedings{Kellum2009,\n abstract = {This paper introduces a flexible\ - \ mapping editor, which transforms multi-touch devices into musical instruments.\ - \ The editor enables users to create interfaces by dragging and dropping components\ - \ onto the interface and attaching actions to them, which will be executed when\ - \ certain userdefined conditions obtain. The editor receives touch information\ - \ via the non-proprietary communication protocol, TUIO [9], and can, therefore,\ - \ be used together with a variety of different multi-touch input devices. },\n\ - \ address = {Pittsburgh, PA, United States},\n author = {Kellum, Greg and Crevoisier,\ - \ Alain},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1177601},\n issn = {2220-4806},\n\ - \ keywords = {NIME, multi-touch, multi-modal interface, sonic interaction design.\ - \ },\n pages = {242--245},\n title = {A Flexible Mapping Editor for Multi-touch\ - \ Musical Instruments},\n url = {http://www.nime.org/proceedings/2009/nime2009_242.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_70 + abstract: 'This paper presents complex mapping strategies that offer flexibility + for improvising with elaborate digital environments by allowing for more human + control with less physical input. The intention is not to reduce human physicality, + but instead actions are further extended and altered through complex envelopes. + This software was originally designed for the augmented guitar, to address the + issue of a lack of spare bandwidth (Cook, 2001) that is inherent to guitar playing. + This makes it challenging to simultaneously control digital interfaces without + compromising guitar technique. The Slider MultiMap software discussed in this + paper helps to overcome this dilemma by enabling a guitarist to control multiple + audio effects with a single gesture while individually customising how each parameter + is controlled prior to the performance. At the same time, it explores the delegation + of tasks to the computer in situations where indirect control is more desirable.' + address: 'Mexico City, Mexico' + articleno: 70 + author: Nicholas Canny + bibtex: "@inproceedings{nime2023_70,\n abstract = {This paper presents complex mapping\ + \ strategies that offer flexibility for improvising with elaborate digital environments\ + \ by allowing for more human control with less physical input. The intention is\ + \ not to reduce human physicality, but instead actions are further extended and\ + \ altered through complex envelopes. This software was originally designed for\ + \ the augmented guitar, to address the issue of a lack of spare bandwidth (Cook,\ + \ 2001) that is inherent to guitar playing. This makes it challenging to simultaneously\ + \ control digital interfaces without compromising guitar technique. The Slider\ + \ MultiMap software discussed in this paper helps to overcome this dilemma by\ + \ enabling a guitarist to control multiple audio effects with a single gesture\ + \ while individually customising how each parameter is controlled prior to the\ + \ performance. At the same time, it explores the delegation of tasks to the computer\ + \ in situations where indirect control is more desirable.},\n address = {Mexico\ + \ City, Mexico},\n articleno = {70},\n author = {Nicholas Canny},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n\ + \ month = {May},\n numpages = {5},\n pages = {506--510},\n title = {The implementation\ + \ of envelope based complex mapping strategies to extend and augment human control},\n\ + \ track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_70.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177601 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'NIME, multi-touch, multi-modal interface, sonic interaction design. ' - pages: 242--245 - title: A Flexible Mapping Editor for Multi-touch Musical Instruments - url: http://www.nime.org/proceedings/2009/nime2009_242.pdf - year: 2009 + month: May + numpages: 5 + pages: 506--510 + title: The implementation of envelope based complex mapping strategies to extend + and augment human control + track: Papers + url: http://nime.org/proceedings/2023/nime2023_70.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Kiefer2009 - abstract: 'Phalanger is a system which facilitates the control of music software - with hand and finger motion, with the aim of creating a fluid style of interaction - that promotes musicality. The system is purely video based, requires no wearables - or accessories and uses affordable and accessible technology. It employs a neural - network for background segmentation, a combination of imaging techniques for frame - analysis, and a support vector machine (SVM) for recognition of hand positions. - System evaluation showed the SVM to reliably differentiate between eight different - classes. An initial formative user evaluation with ten musicians was carried out - to help build a picture of how users responded to the system; this highlighted - areas that need improvement and lent some insight into useful features for the - next version.' - address: 'Pittsburgh, PA, United States' - author: 'Kiefer, Chris and Collins, Nick and Fitzpatrick, Geraldine' - bibtex: "@inproceedings{Kiefer2009,\n abstract = {Phalanger is a system which facilitates\ - \ the control of music software with hand and finger motion, with the aim of creating\ - \ a fluid style of interaction that promotes musicality. The system is purely\ - \ video based, requires no wearables or accessories and uses affordable and accessible\ - \ technology. It employs a neural network for background segmentation, a combination\ - \ of imaging techniques for frame analysis, and a support vector machine (SVM)\ - \ for recognition of hand positions. System evaluation showed the SVM to reliably\ - \ differentiate between eight different classes. An initial formative user evaluation\ - \ with ten musicians was carried out to help build a picture of how users responded\ - \ to the system; this highlighted areas that need improvement and lent some insight\ - \ into useful features for the next version.},\n address = {Pittsburgh, PA, United\ - \ States},\n author = {Kiefer, Chris and Collins, Nick and Fitzpatrick, Geraldine},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177603},\n issn = {2220-4806},\n\ - \ keywords = {nime09},\n pages = {246--249},\n title = {Phalanger : Controlling\ - \ Music Software With Hand Movement Using A Computer Vision and Machine Learning\ - \ Approach},\n url = {http://www.nime.org/proceedings/2009/nime2009_246.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_71 + abstract: 'This paper presents a room feedback system which the author has been + developing and performing with for nearly three years. The design emerged from + an artistic research process which emphasises multiple explorations coexisting + around a research topic while having a sensitivity to the practicalities of a + customary gig (short set-up time, unpredictable acoustics). Typically enabled + by a stereo room-mic and a pair of speakers, many algorithms have been explored + in the loop with some being tributes to historical feedback works. An overall + design is offered where all feedback pathways are simultaneously available and + mutually interfere via the room. Each algorithm is designed to have one significant + performable parameter but how this is mapped to sensors or widgets is itself performable + with various behaviours available, including some explorations of self-programming + and ‘intra-active’ ideas. Concert experience in solo and small ensemble formats + is discussed and a number of contributions are identified in how the work: extends + room feedback research to explore multiple parallel processes of varied spectro-morphological + character, offers connections to historical work in a pedagogically interesting + fashion, demonstrates several novel algorithms, while exemplifying a characteristic + artistic research method. The paper closes with a speculative ‘feedback aesthetics’ + to help configure future work.' + address: 'Mexico City, Mexico' + articleno: 71 + author: John M Bowers + bibtex: "@inproceedings{nime2023_71,\n abstract = {This paper presents a room feedback\ + \ system which the author has been developing and performing with for nearly three\ + \ years. The design emerged from an artistic research process which emphasises\ + \ multiple explorations coexisting around a research topic while having a sensitivity\ + \ to the practicalities of a customary gig (short set-up time, unpredictable acoustics).\ + \ Typically enabled by a stereo room-mic and a pair of speakers, many algorithms\ + \ have been explored in the loop with some being tributes to historical feedback\ + \ works. An overall design is offered where all feedback pathways are simultaneously\ + \ available and mutually interfere via the room. Each algorithm is designed to\ + \ have one significant performable parameter but how this is mapped to sensors\ + \ or widgets is itself performable with various behaviours available, including\ + \ some explorations of self-programming and ‘intra-active’ ideas. Concert experience\ + \ in solo and small ensemble formats is discussed and a number of contributions\ + \ are identified in how the work: extends room feedback research to explore multiple\ + \ parallel processes of varied spectro-morphological character, offers connections\ + \ to historical work in a pedagogically interesting fashion, demonstrates several\ + \ novel algorithms, while exemplifying a characteristic artistic research method.\ + \ The paper closes with a speculative ‘feedback aesthetics’ to help configure\ + \ future work.},\n address = {Mexico City, Mexico},\n articleno = {71},\n author\ + \ = {John M Bowers},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan\ + \ Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages = {10},\n\ + \ pages = {511--520},\n title = {A Hapless But Entertaining Roar’: Developing\ + \ a Room Feedback System through Artistic Research and Aesthetic Reflection},\n\ + \ track = {Papers},\n url = {http://nime.org/proceedings/2023/nime2023_71.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177603 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: nime09 - pages: 246--249 - title: 'Phalanger : Controlling Music Software With Hand Movement Using A Computer - Vision and Machine Learning Approach' - url: http://www.nime.org/proceedings/2009/nime2009_246.pdf - year: 2009 + month: May + numpages: 10 + pages: 511--520 + title: 'A Hapless But Entertaining Roar’: Developing a Room Feedback System through + Artistic Research and Aesthetic Reflection' + track: Papers + url: http://nime.org/proceedings/2023/nime2023_71.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Nakra2009 - abstract: 'The UBS Virtual Maestro is an interactive conducting system designed - by Immersion Music to simulate the experience of orchestral conducting for the - general public attending a classical music concert. The system utilizes the Wii - Remote, which users hold and move like a conducting baton to affect the tempo - and dynamics of an orchestral video/audio recording. The accelerometer data from - the Wii Remote is used to control playback speed and volume in real-time. The - system is housed in a UBSbranded kiosk that has toured classical performing arts - venues throughout the United States and Europe in 2007 and 2008. In this paper - we share our experiences in designing this standalone system for thousands of - users, and lessons that we learned from the project. ' - address: 'Pittsburgh, PA, United States' - author: 'Nakra, Teresa M. and Ivanov, Yuri and Smaragdis, Paris and Ault, Chris' - bibtex: "@inproceedings{Nakra2009,\n abstract = {The UBS Virtual Maestro is an interactive\ - \ conducting system designed by Immersion Music to simulate the experience of\ - \ orchestral conducting for the general public attending a classical music concert.\ - \ The system utilizes the Wii Remote, which users hold and move like a conducting\ - \ baton to affect the tempo and dynamics of an orchestral video/audio recording.\ - \ The accelerometer data from the Wii Remote is used to control playback speed\ - \ and volume in real-time. The system is housed in a UBSbranded kiosk that has\ - \ toured classical performing arts venues throughout the United States and Europe\ - \ in 2007 and 2008. In this paper we share our experiences in designing this standalone\ - \ system for thousands of users, and lessons that we learned from the project.\ - \ },\n address = {Pittsburgh, PA, United States},\n author = {Nakra, Teresa M.\ - \ and Ivanov, Yuri and Smaragdis, Paris and Ault, Chris},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177637},\n issn = {2220-4806},\n keywords = {conducting,\ - \ gesture, interactive installations, Wii Remote },\n pages = {250--255},\n title\ - \ = {The UBS Virtual Maestro : an Interactive Conducting System},\n url = {http://www.nime.org/proceedings/2009/nime2009_250.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_72 + abstract: 'How do live coders simultaneously develop new creations and master previous + ones? Using conclusions drawn from previous studies about exploratory programming + and our experience practicing live coding, we identified a need to support creation + and mastery in the live coding space—specifically in the realm of live coding + pertaining to musical creations. We developed a tool, SHARP, which attempted to + empower live coders in both their exploration and performances. SHARP is a code + editor extension that visualizes the history of each instrument that the live + coder creates; the visualization can then be used to revisit the previous states + of the instrument and create new ones. We believe that this extension will support + live coders’ exploration in practice as well as enable novel musical aesthetics + in performance contexts. We did an initial evaluation of SHARP using an autoethnographic + approach where one researcher used the tool over multiple sessions to compose + a piece. From the autoethnography, we saw that SHARP supported composition by + making it easier to explore different musical ideas and to revisit past states. + Our analysis also hints at new possible features, such as being able to combine + multiple previous states together using SHARP.' + address: 'Mexico City, Mexico' + articleno: 72 + author: Douglas A Bowman Jr and Daniel Manesh and Sang Won Lee + bibtex: "@inproceedings{nime2023_72,\n abstract = {How do live coders simultaneously\ + \ develop new creations and master previous ones? Using conclusions drawn from\ + \ previous studies about exploratory programming and our experience practicing\ + \ live coding, we identified a need to support creation and mastery in the live\ + \ coding space—specifically in the realm of live coding pertaining to musical\ + \ creations. We developed a tool, SHARP, which attempted to empower live coders\ + \ in both their exploration and performances. SHARP is a code editor extension\ + \ that visualizes the history of each instrument that the live coder creates;\ + \ the visualization can then be used to revisit the previous states of the instrument\ + \ and create new ones. We believe that this extension will support live coders’\ + \ exploration in practice as well as enable novel musical aesthetics in performance\ + \ contexts. We did an initial evaluation of SHARP using an autoethnographic approach\ + \ where one researcher used the tool over multiple sessions to compose a piece.\ + \ From the autoethnography, we saw that SHARP supported composition by making\ + \ it easier to explore different musical ideas and to revisit past states. Our\ + \ analysis also hints at new possible features, such as being able to combine\ + \ multiple previous states together using SHARP.},\n address = {Mexico City, Mexico},\n\ + \ articleno = {72},\n author = {Douglas A Bowman Jr and Daniel Manesh and Sang\ + \ Won Lee},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n\ + \ issn = {2220-4806},\n month = {May},\n numpages = {4},\n pages = {521--524},\n\ + \ title = {SHARP: Supporting Exploration and Rapid State Navigation in Live Coding\ + \ Music},\n track = {Work in Progress},\n url = {http://nime.org/proceedings/2023/nime2023_72.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177637 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'conducting, gesture, interactive installations, Wii Remote ' - pages: 250--255 - title: 'The UBS Virtual Maestro : an Interactive Conducting System' - url: http://www.nime.org/proceedings/2009/nime2009_250.pdf - year: 2009 + month: May + numpages: 4 + pages: 521--524 + title: 'SHARP: Supporting Exploration and Rapid State Navigation in Live Coding + Music' + track: Work in Progress + url: http://nime.org/proceedings/2023/nime2023_72.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Jessop2009 - abstract: 'This paper describes The Vocal Augmentation and Manipulation Prosthesis - (VAMP) a gesture-based wearable controller for live-time vocal performance. This - controller allows a singer to capture and manipulate single notes that he or she - sings, using a gestural vocabulary developed from that of choral conducting. By - drawing from a familiar gestural vocabulary, this controller and the associated - mappings can be more intuitive and expressive for both performer and audience. ' - address: 'Pittsburgh, PA, United States' - author: 'Jessop, Elena' - bibtex: "@inproceedings{Jessop2009,\n abstract = {This paper describes The Vocal\ - \ Augmentation and Manipulation Prosthesis (VAMP) a gesture-based wearable controller\ - \ for live-time vocal performance. This controller allows a singer to capture\ - \ and manipulate single notes that he or she sings, using a gestural vocabulary\ - \ developed from that of choral conducting. By drawing from a familiar gestural\ - \ vocabulary, this controller and the associated mappings can be more intuitive\ - \ and expressive for both performer and audience. },\n address = {Pittsburgh,\ - \ PA, United States},\n author = {Jessop, Elena},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1177583},\n issn = {2220-4806},\n keywords = {musical expressivity,\ - \ vocal performance, gestural control, conducting. },\n pages = {256--259},\n\ - \ title = {The Vocal Augmentation and Manipulation Prosthesis (VAMP): A Conducting-Based\ - \ Gestural Controller for Vocal Performance},\n url = {http://www.nime.org/proceedings/2009/nime2009_256.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_73 + abstract: 'Lambeosaurine hadrosaurs are duck-billed dinosaurs. Scientists hypothesize + that their large, bony crests which encapsulate complicated, hollow nasal passages + function as resonators for vocal calls. This paper discusses the work-in-process, + Dinosaur Choir, which recreates these vocal capabilities as musical skull instruments. + The skull and nasal passages are fabricated based on Computed Topology (CT) scans + of hadrosaur skulls, and larynx design is informed by scientific research. Musicians + and participants voice the instruments by blowing into a mouthpiece or microphone, + and a larynx mechanism creates the sound in response, which is then resonated + through the nasal passages. The instruments are intended both for interactive + exhibition and for on-going musical performance practice. Dinosaur Choir aims + to give life to the voices of dinosaurs, allowing an embodied experience with + extinct animals long lost to the past. This paper focuses on the development of + the first musical instrument in the series, based on an adult Corythosaurus skull. + We consider how scientific research as well as musical and practical concerns + impact the design process and what trade-offs must be contemplated and made in + order to achieve our aims of dinosaurian embodied sound.' + address: 'Mexico City, Mexico' + articleno: 73 + author: Courtney D Brown and Thomas Dudgeon and Cezary Gajewski + bibtex: "@inproceedings{nime2023_73,\n abstract = {Lambeosaurine hadrosaurs are\ + \ duck-billed dinosaurs. Scientists hypothesize that their large, bony crests\ + \ which encapsulate complicated, hollow nasal passages function as resonators\ + \ for vocal calls. This paper discusses the work-in-process, Dinosaur Choir, which\ + \ recreates these vocal capabilities as musical skull instruments. The skull and\ + \ nasal passages are fabricated based on Computed Topology (CT) scans of hadrosaur\ + \ skulls, and larynx design is informed by scientific research. Musicians and\ + \ participants voice the instruments by blowing into a mouthpiece or microphone,\ + \ and a larynx mechanism creates the sound in response, which is then resonated\ + \ through the nasal passages. The instruments are intended both for interactive\ + \ exhibition and for on-going musical performance practice. Dinosaur Choir aims\ + \ to give life to the voices of dinosaurs, allowing an embodied experience with\ + \ extinct animals long lost to the past. This paper focuses on the development\ + \ of the first musical instrument in the series, based on an adult Corythosaurus\ + \ skull. We consider how scientific research as well as musical and practical\ + \ concerns impact the design process and what trade-offs must be contemplated\ + \ and made in order to achieve our aims of dinosaurian embodied sound.},\n address\ + \ = {Mexico City, Mexico},\n articleno = {73},\n author = {Courtney D Brown and\ + \ Thomas Dudgeon and Cezary Gajewski},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz\ + \ and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages\ + \ = {6},\n pages = {525--530},\n title = {Dinosaur Choir: Designing for Scientific\ + \ Exploration, Outreach, and Experimental Music},\n track = {Work in Progress},\n\ + \ url = {http://nime.org/proceedings/2023/nime2023_73.pdf},\n year = {2023}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177583 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'musical expressivity, vocal performance, gestural control, conducting. ' - pages: 256--259 - title: 'The Vocal Augmentation and Manipulation Prosthesis (VAMP): A Conducting-Based - Gestural Controller for Vocal Performance' - url: http://www.nime.org/proceedings/2009/nime2009_256.pdf - year: 2009 + month: May + numpages: 6 + pages: 525--530 + title: 'Dinosaur Choir: Designing for Scientific Exploration, Outreach, and Experimental + Music' + track: Work in Progress + url: http://nime.org/proceedings/2023/nime2023_73.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Henriques2009 - abstract: 'The Double Slide Controller is a new electronic music instrument that - departs from the slide trombone as a model for its design. Going much beyond a - mere simulation of its acoustic counterpart it introduces truly innovative features: - two powerful and versatile sets of gesture driven interfaces actuated by the hands - of the performer, as well as featuring two independent slides, one for each hand/arm - of the musician. The combination of these features make this instrument a great - tool to explore new venues in musical expression, given the many degrees of technical - and musical complexity that can be achieved during its performance.' - address: 'Pittsburgh, PA, United States' - author: 'Henriques, Tomás' - bibtex: "@inproceedings{Henriques2009,\n abstract = {The Double Slide Controller\ - \ is a new electronic music instrument that departs from the slide trombone as\ - \ a model for its design. Going much beyond a mere simulation of its acoustic\ - \ counterpart it introduces truly innovative features: two powerful and versatile\ - \ sets of gesture driven interfaces actuated by the hands of the performer, as\ - \ well as featuring two independent slides, one for each hand/arm of the musician.\ - \ The combination of these features make this instrument a great tool to explore\ - \ new venues in musical expression, given the many degrees of technical and musical\ - \ complexity that can be achieved during its performance.},\n address = {Pittsburgh,\ - \ PA, United States},\n author = {Henriques, Tom\\'{a}s},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177571},\n issn = {2220-4806},\n keywords = {Musical\ - \ Instrument, Sensor technologies, Computer Music, Hardware and Software Design.},\n\ - \ pages = {260--261},\n title = {Double Slide Controller},\n url = {http://www.nime.org/proceedings/2009/nime2009_260.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_74 + abstract: 'This paper introduces the T-Patch, a software application that streamlines + the use of T-Stick Digital Musical Instruments (DMIs). It offers a user-friendly + interface for gesture extraction, mapping, signal conditioning, sound synthesis, + and sequencing with cues, enabling composers to create music without programming. + Our main contribution is two-fold: (1) providing a versatile software solution + to address the current lack of music-making support for T-Stick DMIs, and (2) + highlighting the importance of demonstration content, such as a video, to showcase + the instrument’s capabilities and inspire new users. The T-Patch reduces the barrier + to entry for using the T-Stick DMI and offers a shared software solution for various + music-making scenarios.' + address: 'Mexico City, Mexico' + articleno: 74 + author: Takuto Fukuda and Marcelo Wanderley + bibtex: "@inproceedings{nime2023_74,\n abstract = {This paper introduces the T-Patch,\ + \ a software application that streamlines the use of T-Stick Digital Musical Instruments\ + \ (DMIs). It offers a user-friendly interface for gesture extraction, mapping,\ + \ signal conditioning, sound synthesis, and sequencing with cues, enabling composers\ + \ to create music without programming. Our main contribution is two-fold: (1)\ + \ providing a versatile software solution to address the current lack of music-making\ + \ support for T-Stick DMIs, and (2) highlighting the importance of demonstration\ + \ content, such as a video, to showcase the instrument’s capabilities and inspire\ + \ new users. The T-Patch reduces the barrier to entry for using the T-Stick DMI\ + \ and offers a shared software solution for various music-making scenarios.},\n\ + \ address = {Mexico City, Mexico},\n articleno = {74},\n author = {Takuto Fukuda\ + \ and Marcelo Wanderley},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan\ + \ Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages = {5},\n\ + \ pages = {531--535},\n title = {T-Patch: a software application for T-Stick Digital\ + \ Musical Instruments},\n track = {Work in Progress},\n url = {http://nime.org/proceedings/2023/nime2023_74.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177571 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'Musical Instrument, Sensor technologies, Computer Music, Hardware and - Software Design.' - pages: 260--261 - title: Double Slide Controller - url: http://www.nime.org/proceedings/2009/nime2009_260.pdf - year: 2009 + month: May + numpages: 5 + pages: 531--535 + title: 'T-Patch: a software application for T-Stick Digital Musical Instruments' + track: Work in Progress + url: http://nime.org/proceedings/2023/nime2023_74.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Berdahl2009 - abstract: 'When we asked a colleague of ours why people do not make more haptic - musical instruments, he replied that he thought they were “too hard to program - and too expensive.” We decided to solve these perceived problems by introducing - HSP, a simple platform for implementing haptic musical instruments. HSP obviates - the need for employing low-level embedded control software because the haptic - device is controlled directly from within the Pure Data (Pd) software running - on a general purpose computer. Positions can be read from the haptic device, and - forces can be written to the device using messages in Pd. Various additional objects - have been created to facilitate rapid prototyping of useful haptic musical instruments - in Pd. HSP operates under Linux, OS X, and Windows and supports the mass-produced - Falcon haptic device from NovInt, which can currently be obtained for as little - as US$150. All of the above make HSP an especially excellent choice for pedagogical - environments where multiple workstations are required and example programs should - be complete yet simple.' - address: 'Pittsburgh, PA, United States' - author: 'Berdahl, Edgar and Niemeyer, Günter and Smith, Julius O.' - bibtex: "@inproceedings{Berdahl2009,\n abstract = {When we asked a colleague of\ - \ ours why people do not make more haptic musical instruments, he replied that\ - \ he thought they were “too hard to program and too expensive.” We decided to\ - \ solve these perceived problems by introducing HSP, a simple platform for implementing\ - \ haptic musical instruments. HSP obviates the need for employing low-level embedded\ - \ control software because the haptic device is controlled directly from within\ - \ the Pure Data (Pd) software running on a general purpose computer. Positions\ - \ can be read from the haptic device, and forces can be written to the device\ - \ using messages in Pd. Various additional objects have been created to facilitate\ - \ rapid prototyping of useful haptic musical instruments in Pd. HSP operates under\ - \ Linux, OS X, and Windows and supports the mass-produced Falcon haptic device\ - \ from NovInt, which can currently be obtained for as little as US\\$150. All\ - \ of the above make HSP an especially excellent choice for pedagogical environments\ - \ where multiple workstations are required and example programs should be complete\ - \ yet simple.},\n address = {Pittsburgh, PA, United States},\n author = {Berdahl,\ - \ Edgar and Niemeyer, G\\''{u}nter and Smith, Julius O.},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177477},\n issn = {2220-4806},\n keywords = { haptic\ - \ musical instrument, HSP, haptics, computer music, physical modeling, Pure Data\ - \ (Pd), NovInt},\n pages = {262--263},\n title = {HSP : A Simple and Effective\ - \ Open-Source Platform for Implementing Haptic Musical Instruments},\n url = {http://www.nime.org/proceedings/2009/nime2009_262.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_75 + abstract: 'Currently, most of the digital musical instruments cannot leave the use + of dedicated hardware devices, making them limited in terms of user popularity + and resource conservation. In this paper, we propose a new computer vision-based + interactive multi-functional musical instrument, called MuGeVI, which requires + no additional hardware circuits or sensors, and allows users to create or play + music through different hand gestures and positions. It firstly uses deep neural + network models for hand key point detection to obtain gesture information, secondly + maps it to pitch, chord or other information based on the current mode, then passes + it to Max/MSP via the OSC protocol, and finally implements the generation and + processing of MIDI or audio. MuGeVI is now available in four modes: performance + mode, accompaniment mode, control mode, and audio effects mode, and can be conveniently + used with just a personal computer with a camera. Designed to be human-centric, + MuGeVI is feature-rich, simple to use, affordable, scalable and programmable, + and is certainly a frugal musical innovation. All the material about this work + can be found in https://yewlife.github.io/MuGeVI/.' + address: 'Mexico City, Mexico' + articleno: 75 + author: Yue Yang and Zhaowen Wang and ZIJIN LI + bibtex: "@inproceedings{nime2023_75,\n abstract = {Currently, most of the digital\ + \ musical instruments cannot leave the use of dedicated hardware devices, making\ + \ them limited in terms of user popularity and resource conservation. In this\ + \ paper, we propose a new computer vision-based interactive multi-functional musical\ + \ instrument, called MuGeVI, which requires no additional hardware circuits or\ + \ sensors, and allows users to create or play music through different hand gestures\ + \ and positions. It firstly uses deep neural network models for hand key point\ + \ detection to obtain gesture information, secondly maps it to pitch, chord or\ + \ other information based on the current mode, then passes it to Max/MSP via the\ + \ OSC protocol, and finally implements the generation and processing of MIDI or\ + \ audio. MuGeVI is now available in four modes: performance mode, accompaniment\ + \ mode, control mode, and audio effects mode, and can be conveniently used with\ + \ just a personal computer with a camera. Designed to be human-centric, MuGeVI\ + \ is feature-rich, simple to use, affordable, scalable and programmable, and is\ + \ certainly a frugal musical innovation. All the material about this work can\ + \ be found in https://yewlife.github.io/MuGeVI/.},\n address = {Mexico City, Mexico},\n\ + \ articleno = {75},\n author = {Yue Yang and Zhaowen Wang and ZIJIN LI},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n\ + \ month = {May},\n numpages = {6},\n pages = {536--541},\n title = {MuGeVI: A\ + \ Multi-Functional Gesture-Controlled Virtual Instrument},\n track = {Work in\ + \ Progress},\n url = {http://nime.org/proceedings/2023/nime2023_75.pdf},\n year\ + \ = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177477 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: ' haptic musical instrument, HSP, haptics, computer music, physical modeling, - Pure Data (Pd), NovInt' - pages: 262--263 - title: 'HSP : A Simple and Effective Open-Source Platform for Implementing Haptic - Musical Instruments' - url: http://www.nime.org/proceedings/2009/nime2009_262.pdf - year: 2009 + month: May + numpages: 6 + pages: 536--541 + title: 'MuGeVI: A Multi-Functional Gesture-Controlled Virtual Instrument' + track: Work in Progress + url: http://nime.org/proceedings/2023/nime2023_75.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Barri2009 - abstract: 'This paper introduces the new audiovisual sequencing system "Versum" - that allows users to compose in three dimensions. In the present paper the conceptual - soil from which this system has sprung is discussed first. Secondly, the basic - concepts with which Versum operates are explained, providing a general idea of - what is meant by sequencing in three dimensions and explaining what compositions - made in Versum can look and sound like. Thirdly, the practical ways in which a - composer can use Versum to make his own audiovisual compositions are presented - by means of a more detailed description of the different graphical user interface - elements. Fourthly, a short description is given of the modular structure of the - software underlying Versum. Finally, several foresights regarding the directions - in which Versum will continue to develop in the near future are presented. ' - address: 'Pittsburgh, PA, United States' - author: 'Barri, Tarik' - bibtex: "@inproceedings{Barri2009,\n abstract = {This paper introduces the new audiovisual\ - \ sequencing system \"Versum\" that allows users to compose in three dimensions.\ - \ In the present paper the conceptual soil from which this system has sprung is\ - \ discussed first. Secondly, the basic concepts with which Versum operates are\ - \ explained, providing a general idea of what is meant by sequencing in three\ - \ dimensions and explaining what compositions made in Versum can look and sound\ - \ like. Thirdly, the practical ways in which a composer can use Versum to make\ - \ his own audiovisual compositions are presented by means of a more detailed description\ - \ of the different graphical user interface elements. Fourthly, a short description\ - \ is given of the modular structure of the software underlying Versum. Finally,\ - \ several foresights regarding the directions in which Versum will continue to\ - \ develop in the near future are presented. },\n address = {Pittsburgh, PA, United\ - \ States},\n author = {Barri, Tarik},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177473},\n\ - \ issn = {2220-4806},\n keywords = {audiovisual, sequencing, collaboration. },\n\ - \ pages = {264--265},\n title = {Versum : Audiovisual Composing in 3d},\n url\ - \ = {http://www.nime.org/proceedings/2009/nime2009_264.pdf},\n year = {2009}\n\ - }\n" + ID: nime2023_76 + abstract: 'This paper is an exploration and creative inquiry into the equilibrium + of audio-visual feedback. Following a Research-Through Design approach, we actualized + this inquiry by designing an ad-hoc audio-visual instrument: TAILSPIN. In this + instrument, a closed audio-visual and physical loop is created between a microphone + and its speaker, and a camera and its display, which are controlled by a performer. + The tenets of feedback are then understood through the contextual research of + cycles and loops in our natural environment. In this paper, we present the technical + details of the instrument and offer novel insights into the audio-visual equilibrium + within the context and intricacies of our own natural environment and organic + feedback systems.' + address: 'Mexico City, Mexico' + articleno: 76 + author: Costa K Colachis Glass and Fabio Morreale + bibtex: "@inproceedings{nime2023_76,\n abstract = {This paper is an exploration\ + \ and creative inquiry into the equilibrium of audio-visual feedback. Following\ + \ a Research-Through Design approach, we actualized this inquiry by designing\ + \ an ad-hoc audio-visual instrument: TAILSPIN. In this instrument, a closed audio-visual\ + \ and physical loop is created between a microphone and its speaker, and a camera\ + \ and its display, which are controlled by a performer. The tenets of feedback\ + \ are then understood through the contextual research of cycles and loops in our\ + \ natural environment. In this paper, we present the technical details of the\ + \ instrument and offer novel insights into the audio-visual equilibrium within\ + \ the context and intricacies of our own natural environment and organic feedback\ + \ systems.},\n address = {Mexico City, Mexico},\n articleno = {76},\n author =\ + \ {Costa K Colachis Glass and Fabio Morreale},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n editor\ + \ = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n\ + \ numpages = {7},\n pages = {542--548},\n title = {TAILSPIN: AN INQUIRY INTO THE\ + \ EQUILIBRIUM OF AUDIO-VISUAL FEEDBACK},\n track = {Work in Progress},\n url =\ + \ {http://nime.org/proceedings/2023/nime2023_76.pdf},\n year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177473 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'audiovisual, sequencing, collaboration. ' - pages: 264--265 - title: 'Versum : Audiovisual Composing in 3d' - url: http://www.nime.org/proceedings/2009/nime2009_264.pdf - year: 2009 + month: May + numpages: 7 + pages: 542--548 + title: 'TAILSPIN: AN INQUIRY INTO THE EQUILIBRIUM OF AUDIO-VISUAL FEEDBACK' + track: Work in Progress + url: http://nime.org/proceedings/2023/nime2023_76.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Bullock2009 - abstract: In this paper we describe findings related to user interfacerequirements - for live electronic music arising from researchconducted as part of the first - three-year phase of the EUfunded Integra project. A number of graphical user interface(GUI) - prototypes developed during the Integra project initial phase are described and - conclusions drawn about theirdesign and implementation. - address: 'Pittsburgh, PA, United States' - author: 'Bullock, Jamie and Coccioli, Lamberto' - bibtex: "@inproceedings{Bullock2009,\n abstract = {In this paper we describe findings\ - \ related to user interfacerequirements for live electronic music arising from\ - \ researchconducted as part of the first three-year phase of the EUfunded Integra\ - \ project. A number of graphical user interface(GUI) prototypes developed during\ - \ the Integra project initial phase are described and conclusions drawn about\ - \ theirdesign and implementation.},\n address = {Pittsburgh, PA, United States},\n\ - \ author = {Bullock, Jamie and Coccioli, Lamberto},\n booktitle = {Proceedings\ + ID: nime2023_77 + abstract: "In this paper, we present the Hapstrument: a bimanual haptic interface\ + \ for musical expression. This DMI uses two low-cost 2-DoF haptic force-feedback\ + \ devices, one for each hand. The left device controls pitch selection, while\ + \ the right device controls excitation by simulating the feeling of bowing or\ + \ plucking a string. A user study was run to evaluate the effectiveness of the\ + \ Hapstrument. This evaluation\nreceived a wide range of reviews, from excellent\ + \ to poor. Ultimately, the musical backgrounds of the participants greatly impacted\ + \ their experiences with the Hapstrument. For participants whose expectations\ + \ aligned with what the instrument could provide, it was an effective DMI that\ + \ uses force feedback to enhance musical expression." + address: 'Mexico City, Mexico' + articleno: 77 + author: Jonathan Lane-Smith and Derrek Chow and Sahand Ajami and Jeremy Cooperstock + bibtex: "@inproceedings{nime2023_77,\n abstract = {In this paper, we present the\ + \ Hapstrument: a bimanual haptic interface for musical expression. This DMI uses\ + \ two low-cost 2-DoF haptic force-feedback devices, one for each hand. The left\ + \ device controls pitch selection, while the right device controls excitation\ + \ by simulating the feeling of bowing or plucking a string. A user study was run\ + \ to evaluate the effectiveness of the Hapstrument. This evaluation\nreceived\ + \ a wide range of reviews, from excellent to poor. Ultimately, the musical backgrounds\ + \ of the participants greatly impacted their experiences with the Hapstrument.\ + \ For participants whose expectations aligned with what the instrument could provide,\ + \ it was an effective DMI that uses force feedback to enhance musical expression.},\n\ + \ address = {Mexico City, Mexico},\n articleno = {77},\n author = {Jonathan Lane-Smith\ + \ and Derrek Chow and Sahand Ajami and Jeremy Cooperstock},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177489},\n issn = {2220-4806},\n keywords = {Integra,\ - \ User Interface, Usability, Design, Live Electronics, Music Technology },\n pages\ - \ = {266--267},\n title = {Towards a Humane Graphical User Interface for Live\ - \ Electronic Music},\n url = {http://www.nime.org/proceedings/2009/nime2009_266.pdf},\n\ - \ year = {2009}\n}\n" + \ editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month\ + \ = {May},\n numpages = {5},\n pages = {549--553},\n title = {The Hapstrument:\ + \ A Bimanual Haptic Interface for Musical Expression},\n track = {Work in Progress},\n\ + \ url = {http://nime.org/proceedings/2023/nime2023_77.pdf},\n year = {2023}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177489 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'Integra, User Interface, Usability, Design, Live Electronics, Music Technology ' - pages: 266--267 - title: Towards a Humane Graphical User Interface for Live Electronic Music - url: http://www.nime.org/proceedings/2009/nime2009_266.pdf - year: 2009 + month: May + numpages: 5 + pages: 549--553 + title: 'The Hapstrument: A Bimanual Haptic Interface for Musical Expression' + track: Work in Progress + url: http://nime.org/proceedings/2023/nime2023_77.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Laurenzo2009 - abstract: 'In this paper, we present YARMI, a collaborative, networked, tangible, - musical instrument. YARMI operates on augmented-reality space (shared between - the performers and the public), presenting a multiple tabletop interface where - several musical sequencers and real–time effects machines can be operated.' - address: 'Pittsburgh, PA, United States' - author: 'Laurenzo, Tomas and Rodríguez, Ernesto and Castro, Juan Fabrizio' - bibtex: "@inproceedings{Laurenzo2009,\n abstract = {In this paper, we present YARMI,\ - \ a collaborative, networked, tangible, musical instrument. YARMI operates on\ - \ augmented-reality space (shared between the performers and the public), presenting\ - \ a multiple tabletop interface where several musical sequencers and real–time\ - \ effects machines can be operated.},\n address = {Pittsburgh, PA, United States},\n\ - \ author = {Laurenzo, Tomas and Rodr\\'{\\i}guez, Ernesto and Castro, Juan Fabrizio},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177611},\n issn = {2220-4806},\n\ - \ keywords = {Interactive music instruments, visual interfaces, visual feedback,\ - \ tangible interfaces, augmented reality, collaborative music, networked musical\ - \ instruments, real-time musical systems, musical sequencer. },\n pages = {268--269},\n\ - \ title = {YARMI : an Augmented Reality Musical Instrument},\n url = {http://www.nime.org/proceedings/2009/nime2009_268.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_78 + abstract: "The recent advancements in digital fabrication has led to a wider access\ + \ to prototyping in all sorts of fields. Beginning of the last decade was marked\ + \ by the word “revolution” in relation to “maker’s culture” at least in some publications.\ + \ This has influenced the sphere of physical computing in arts and NIME sphere\ + \ as well. As currently there are more and more possibilities to create new instruments,\ + \ we think that it can be useful to think of approaches to conceptualize these\ + \ creations. This paper is an attempt to suggest methodology for NIME prototyping,\ + \ based on evolutionary metaphor.\nFirst we observe the application of evolutionary\ + \ concepts to the field of music technology, briefly discussing its appearance\ + \ in related publications. We then assemble our own operational concept, which\ + \ can be used for the direct prototyping of interfaces. Mainly by introducing\ + \ metaphorical “DNA”, inside which the “gene” of “interactive kinematic concept”\ + \ is of a particular interest, and also by applying the now obsolete but useful\ + \ “Meckel–Serres recapitulation hypothesis” (embryological parallelism) as a model\ + \ for rapid prototyping. \nUnderstanding the speculative nature of such an approach\ + \ we do not offer it as a scientific basis for classification, research or prediction,\ + \ but as a workable concept for development, which can lead to valuable results.\ + \ \nIn the end we describe two case studies of NIMEs, which were prototyped in\ + \ the discussed fashion, showing illustrations and reflecting on the practicalities." + address: 'Mexico City, Mexico' + articleno: 78 + author: sergey k kasich + bibtex: "@inproceedings{nime2023_78,\n abstract = {The recent advancements in digital\ + \ fabrication has led to a wider access to prototyping in all sorts of fields.\ + \ Beginning of the last decade was marked by the word “revolution” in relation\ + \ to “maker’s culture” at least in some publications. This has influenced the\ + \ sphere of physical computing in arts and NIME sphere as well. As currently there\ + \ are more and more possibilities to create new instruments, we think that it\ + \ can be useful to think of approaches to conceptualize these creations. This\ + \ paper is an attempt to suggest methodology for NIME prototyping, based on evolutionary\ + \ metaphor.\nFirst we observe the application of evolutionary concepts to the\ + \ field of music technology, briefly discussing its appearance in related publications.\ + \ We then assemble our own operational concept, which can be used for the direct\ + \ prototyping of interfaces. Mainly by introducing metaphorical “DNA”, inside\ + \ which the “gene” of “interactive kinematic concept” is of a particular interest,\ + \ and also by applying the now obsolete but useful “Meckel–Serres recapitulation\ + \ hypothesis” (embryological parallelism) as a model for rapid prototyping. \n\ + Understanding the speculative nature of such an approach we do not offer it as\ + \ a scientific basis for classification, research or prediction, but as a workable\ + \ concept for development, which can lead to valuable results. \nIn the end we\ + \ describe two case studies of NIMEs, which were prototyped in the discussed fashion,\ + \ showing illustrations and reflecting on the practicalities.},\n address = {Mexico\ + \ City, Mexico},\n articleno = {78},\n author = {sergey k kasich},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n\ + \ month = {May},\n numpages = {6},\n pages = {554--559},\n title = {Morphological\ + \ evolution of musical interface: design case studies},\n track = {Work in Progress},\n\ + \ url = {http://nime.org/proceedings/2023/nime2023_78.pdf},\n year = {2023}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177611 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'Interactive music instruments, visual interfaces, visual feedback, tangible - interfaces, augmented reality, collaborative music, networked musical instruments, - real-time musical systems, musical sequencer. ' - pages: 268--269 - title: 'YARMI : an Augmented Reality Musical Instrument' - url: http://www.nime.org/proceedings/2009/nime2009_268.pdf - year: 2009 + month: May + numpages: 6 + pages: 554--559 + title: 'Morphological evolution of musical interface: design case studies' + track: Work in Progress + url: http://nime.org/proceedings/2023/nime2023_78.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Essl2009 - abstract: When creating new musical instruments on a mobile phone platform one has - to map sensory input to synthesis algorithms. We propose that the very task of - this mapping belongs in the creative process and to this end we develop a way - to rapidly and on-the-fly edit the mapping of mobile phone instruments. The result - is that the meaning of the instruments can continuously be changed during a live - performance. - address: 'Pittsburgh, PA, United States' - author: 'Essl, Georg' - bibtex: "@inproceedings{Essl2009,\n abstract = {When creating new musical instruments\ - \ on a mobile phone platform one has to map sensory input to synthesis algorithms.\ - \ We propose that the very task of this mapping belongs in the creative process\ - \ and to this end we develop a way to rapidly and on-the-fly edit the mapping\ - \ of mobile phone instruments. The result is that the meaning of the instruments\ - \ can continuously be changed during a live performance.},\n address = {Pittsburgh,\ - \ PA, United States},\n author = {Essl, Georg},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1177503},\n issn = {2220-4806},\n keywords = {mobile phone\ - \ instruments,nime,nime09,on-the-fly},\n pages = {270--273},\n title = {SpeedDial\ - \ : Rapid and On-The-Fly Mapping of Mobile Phone Instruments},\n url = {http://www.nime.org/proceedings/2009/nime2009_270.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_79 + abstract: "Just as the way a performer is moved differs even among audiences who\ + \ have the same impression of the performance, the sensations and experiences\ + \ felt by the performers themselves and the audiences' experiences also differ.\ + \ The purpose of this research is to create a new listening experience by analyzing\ + \ and extracting the performer's introspection of rests, groove, and rhythm, and\ + \ physically presenting it to the audience. Although these elements are important\ + \ in shaping music, they are not always directly expressed as auditory sounds.\n\ + Our hypothesis is that this introspection, such as a sense of rhythm and groove,\ + \ is latent and observable in physiological states such as breathing and heartbeat.\ + \ By sensing and presenting them to the audience, music appreciation that includes\ + \ introspection could become possible. In other words, by sensing and presenting\ + \ introspection to the audience, the music listening experience itself can be\ + \ redesigned to include a physicality that is closer to the performer's experience\ + \ of the music, rather than being passive in an auditory sense. In this study,\ + \ preliminary experiments were conducted on the extraction of the performer's\ + \ introspection, and a device was designed to present it to the audience." + address: 'Mexico City, Mexico' + articleno: 79 + author: Aoi Uyama and Danny Hynds and Dingding Zheng and George Chernyshov and Tatsuya + Saito and Kai Kunze and Kouta Minamizawa + bibtex: "@inproceedings{nime2023_79,\n abstract = {Just as the way a performer is\ + \ moved differs even among audiences who have the same impression of the performance,\ + \ the sensations and experiences felt by the performers themselves and the audiences'\ + \ experiences also differ. The purpose of this research is to create a new listening\ + \ experience by analyzing and extracting the performer's introspection of rests,\ + \ groove, and rhythm, and physically presenting it to the audience. Although these\ + \ elements are important in shaping music, they are not always directly expressed\ + \ as auditory sounds.\nOur hypothesis is that this introspection, such as a sense\ + \ of rhythm and groove, is latent and observable in physiological states such\ + \ as breathing and heartbeat. By sensing and presenting them to the audience,\ + \ music appreciation that includes introspection could become possible. In other\ + \ words, by sensing and presenting introspection to the audience, the music listening\ + \ experience itself can be redesigned to include a physicality that is closer\ + \ to the performer's experience of the music, rather than being passive in an\ + \ auditory sense. In this study, preliminary experiments were conducted on the\ + \ extraction of the performer's introspection, and a device was designed to present\ + \ it to the audience.},\n address = {Mexico City, Mexico},\n articleno = {79},\n\ + \ author = {Aoi Uyama and Danny Hynds and Dingding Zheng and George Chernyshov\ + \ and Tatsuya Saito and Kai Kunze and Kouta Minamizawa},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month\ + \ = {May},\n numpages = {6},\n pages = {560--565},\n title = {Feel What You Don't\ + \ Hear: A New Framework for Non-aural Music Experiences},\n track = {Work in Progress},\n\ + \ url = {http://nime.org/proceedings/2023/nime2023_79.pdf},\n year = {2023}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177503 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'mobile phone instruments,nime,nime09,on-the-fly' - pages: 270--273 - title: 'SpeedDial : Rapid and On-The-Fly Mapping of Mobile Phone Instruments' - url: http://www.nime.org/proceedings/2009/nime2009_270.pdf - year: 2009 + month: May + numpages: 6 + pages: 560--565 + title: 'Feel What You Don''t Hear: A New Framework for Non-aural Music Experiences' + track: Work in Progress + url: http://nime.org/proceedings/2023/nime2023_79.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Fels2009 - abstract: 'We have constructed an easy-to-use portable, wearable gesture-to-speech - system based on the Glove-TalkII [1] and GRASSP [2] Digital Ventriloquized Actors - (DIVAs). Our new portable system, called a ForTouch, is a specific model of a - DIVA and refines the use of a formant speech synthesizer. Using ForTouch, a user - can speak using hand gestures mapped to synthetic sound using a mapping function - that preserves gesture trajectories. By making ForTouch portable and self-contained, - speakers can communicate with others in the community and perform in new music/theatre - stage productions. Figure 1 shows one performer using the ForTouch. ForTouch performers - also allow us to study the relation between gestures and speech/song production.' - address: 'Pittsburgh, PA, United States' - author: 'Fels, Sidney S. and Pritchard, Bob and Lenters, Allison' - bibtex: "@inproceedings{Fels2009,\n abstract = {We have constructed an easy-to-use\ - \ portable, wearable gesture-to-speech system based on the Glove-TalkII [1] and\ - \ GRASSP [2] Digital Ventriloquized Actors (DIVAs). Our new portable system, called\ - \ a ForTouch, is a specific model of a DIVA and refines the use of a formant speech\ - \ synthesizer. Using ForTouch, a user can speak using hand gestures mapped to\ - \ synthetic sound using a mapping function that preserves gesture trajectories.\ - \ By making ForTouch portable and self-contained, speakers can communicate with\ - \ others in the community and perform in new music/theatre stage productions.\ - \ Figure 1 shows one performer using the ForTouch. ForTouch performers also allow\ - \ us to study the relation between gestures and speech/song production.},\n address\ - \ = {Pittsburgh, PA, United States},\n author = {Fels, Sidney S. and Pritchard,\ - \ Bob and Lenters, Allison},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177509},\n\ - \ issn = {2220-4806},\n keywords = {nime09},\n pages = {274--275},\n title = {ForTouch\ - \ : A Wearable Digital Ventriloquized Actor},\n url = {http://www.nime.org/proceedings/2009/nime2009_274.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_80 + abstract: "We present an update on the EAVI physiological interface, a wireless,\ + \ microcontroller based hardware design for the acquisition of bioelectrical signals.\ + \ The system has been updated to process electroencephalogram brain signals in\ + \ addition to muscle electromyogram. The hardware/firmware system interfaces with\ + \ host software carrying out feature extraction and signal processing.\nRecent\ + \ advances in electronics have made physiological computing applications practical\ + \ and feasible. However, there is a gap between high end biomedical equipment\ + \ and consumer DIY solutions. The hardware design we present here bridges this\ + \ gap, and combines a specialized biosignal acquisition chip mated with a general-purpose\ + \ microcontroller. It is based on the Texas Instruments ADS129x family a single\ + \ chip integrated solution for high quality biosignal amplification and digitization.\ + \ It serves as analogue front end via programmable gain amplifiers to a 24bit\ + \ delta-sigma analog-digital converter. The microcontroller is the STMicroelectronics\ + \ STM32F427, a Cortex-M4 family microcontroller with floating point unit . In\ + \ addition to EMG acquisition, the board includes a Kionix KX122 three-axis accelerometer\ + \ . The TI and Kionix sensing chipts communicate with the ST microcontroller over\ + \ an I2C digital serial bus. The board communicates with the host computer or\ + \ rest of the music system wirelessly over Bluetooth LE 4.2 using an ST SPBTLE-1S\ + \ transceiver. The board can also communicate over USB where it registers with\ + \ the host as a class compliant audio and MIDI device. Audio and physiological\ + \ signals are treated in the same signal processing chain using the OWL framework.\n\ + The demo will show multichannel EMG, and single channel EEG. We call this hybridization\ + \ “ExG”. We will present documentation of the EAVI board used in the lab and on\ + \ stage, in user studies with neuro-diverse musicians and trained instrumentalists,\ + \ as well as in performance with the experimental all-female band, Chicks on Speed." + address: 'Mexico City, Mexico' + articleno: 80 + author: Atau Tanaka + bibtex: "@inproceedings{nime2023_80,\n abstract = {We present an update on the EAVI\ + \ physiological interface, a wireless, microcontroller based hardware design for\ + \ the acquisition of bioelectrical signals. The system has been updated to process\ + \ electroencephalogram brain signals in addition to muscle electromyogram. The\ + \ hardware/firmware system interfaces with host software carrying out feature\ + \ extraction and signal processing.\nRecent advances in electronics have made\ + \ physiological computing applications practical and feasible. However, there\ + \ is a gap between high end biomedical equipment and consumer DIY solutions. The\ + \ hardware design we present here bridges this gap, and combines a specialized\ + \ biosignal acquisition chip mated with a general-purpose microcontroller. It\ + \ is based on the Texas Instruments ADS129x family a single chip integrated solution\ + \ for high quality biosignal amplification and digitization. It serves as analogue\ + \ front end via programmable gain amplifiers to a 24bit delta-sigma analog-digital\ + \ converter. The microcontroller is the STMicroelectronics STM32F427, a Cortex-M4\ + \ family microcontroller with floating point unit . In addition to EMG acquisition,\ + \ the board includes a Kionix KX122 three-axis accelerometer . The TI and Kionix\ + \ sensing chipts communicate with the ST microcontroller over an I2C digital serial\ + \ bus. The board communicates with the host computer or rest of the music system\ + \ wirelessly over Bluetooth LE 4.2 using an ST SPBTLE-1S transceiver. The board\ + \ can also communicate over USB where it registers with the host as a class compliant\ + \ audio and MIDI device. Audio and physiological signals are treated in the same\ + \ signal processing chain using the OWL framework.\nThe demo will show multichannel\ + \ EMG, and single channel EEG. We call this hybridization “ExG”. We will present\ + \ documentation of the EAVI board used in the lab and on stage, in user studies\ + \ with neuro-diverse musicians and trained instrumentalists, as well as in performance\ + \ with the experimental all-female band, Chicks on Speed.},\n address = {Mexico\ + \ City, Mexico},\n articleno = {80},\n author = {Atau Tanaka},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month\ + \ = {May},\n numpages = {3},\n pages = {566--568},\n title = {The EAVI EMG/EEG\ + \ Board: Hybrid physiological sensing},\n track = {Demos},\n url = {http://nime.org/proceedings/2023/nime2023_80.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177509 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: nime09 - pages: 274--275 - title: 'ForTouch : A Wearable Digital Ventriloquized Actor' - url: http://www.nime.org/proceedings/2009/nime2009_274.pdf - year: 2009 + month: May + numpages: 3 + pages: 566--568 + title: 'The EAVI EMG/EEG Board: Hybrid physiological sensing' + track: Demos + url: http://nime.org/proceedings/2023/nime2023_80.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Mclean2009 - abstract: 'Phonetic symbols describe movements of the vocal tract,tongue and lips, - and are combined into complex movementsforming the words of language. In music, - vocables are wordsthat describe musical sounds, by relating vocal movementsto - articulations of a musical instrument. We posit that vocable words allow the composers - and listeners to engageclosely with dimensions of timbre, and that vocables couldsee - greater use in electronic music interfaces. A preliminarysystem for controlling - percussive physical modelling synthesis with textual words is introduced, with - particular application in expressive specification of timbre during computer music - performances.' - address: 'Pittsburgh, PA, United States' - author: 'Mclean, Alex and Wiggins, Geraint' - bibtex: "@inproceedings{Mclean2009,\n abstract = {Phonetic symbols describe movements\ - \ of the vocal tract,tongue and lips, and are combined into complex movementsforming\ - \ the words of language. In music, vocables are wordsthat describe musical sounds,\ - \ by relating vocal movementsto articulations of a musical instrument. We posit\ - \ that vocable words allow the composers and listeners to engageclosely with dimensions\ - \ of timbre, and that vocables couldsee greater use in electronic music interfaces.\ - \ A preliminarysystem for controlling percussive physical modelling synthesis\ - \ with textual words is introduced, with particular application in expressive\ - \ specification of timbre during computer music performances.},\n address = {Pittsburgh,\ - \ PA, United States},\n author = {Mclean, Alex and Wiggins, Geraint},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1177629},\n issn = {2220-4806},\n keywords\ - \ = {nime09,timbre,vocable synthesis},\n pages = {276--279},\n title = {Words\ - \ , Movement and Timbre},\n url = {http://www.nime.org/proceedings/2009/nime2009_276.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_81 + abstract: 'In this work, we propose a method for the controllable synthesis of real-time + contact sounds using neural resonators. Previous works have used physically inspired + statistical methods and physical modelling for object materials and excitation + signals. Our method incorporates differentiable second-order resonators and estimates + their coefficients using a neural network that is conditioned on physical parameters. + This allows for interactive dynamic control and the generation of novel sounds + in an intuitive manner. We demonstrate the practical implementation of our method + and explore its potential creative applications.' + address: 'Mexico City, Mexico' + articleno: 81 + author: Rodrigo Diaz and Charalampos Saitis and Mark B Sandler + bibtex: "@inproceedings{nime2023_81,\n abstract = {In this work, we propose a method\ + \ for the controllable synthesis of real-time contact sounds using neural resonators.\ + \ Previous works have used physically inspired statistical methods and physical\ + \ modelling for object materials and excitation signals. Our method incorporates\ + \ differentiable second-order resonators and estimates their coefficients using\ + \ a neural network that is conditioned on physical parameters. This allows for\ + \ interactive dynamic control and the generation of novel sounds in an intuitive\ + \ manner. We demonstrate the practical implementation of our method and explore\ + \ its potential creative applications.},\n address = {Mexico City, Mexico},\n\ + \ articleno = {81},\n author = {Rodrigo Diaz and Charalampos Saitis and Mark B\ + \ Sandler},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n\ + \ issn = {2220-4806},\n month = {May},\n numpages = {5},\n pages = {569--573},\n\ + \ title = {Interactive Neural Resonators},\n track = {Work in Progress},\n url\ + \ = {http://nime.org/proceedings/2023/nime2023_81.pdf},\n year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177629 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'nime09,timbre,vocable synthesis' - pages: 276--279 - title: 'Words , Movement and Timbre' - url: http://www.nime.org/proceedings/2009/nime2009_276.pdf - year: 2009 + month: May + numpages: 5 + pages: 569--573 + title: Interactive Neural Resonators + track: Work in Progress + url: http://nime.org/proceedings/2023/nime2023_81.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Fiebrink2009 - abstract: 'Supervised learning methods have long been used to allow musical interface - designers to generate new mappings by example. We propose a method for harnessing - machine learning algorithms within a radically interactive paradigm, in which - the designer may repeatedly generate examples, train a learner, evaluate outcomes, - and modify parameters in real-time within a single software environment. We describe - our meta-instrument, the Wekinator, which allows a user to engage in on-the-fly - learning using arbitrary control modalities and sound synthesis environments. - We provide details regarding the system implementation and discuss our experiences - using the Wekinator for experimentation and performance. ' - address: 'Pittsburgh, PA, United States' - author: 'Fiebrink, Rebecca and Trueman, Dan and Cook, Perry R.' - bibtex: "@inproceedings{Fiebrink2009,\n abstract = {Supervised learning methods\ - \ have long been used to allow musical interface designers to generate new mappings\ - \ by example. We propose a method for harnessing machine learning algorithms within\ - \ a radically interactive paradigm, in which the designer may repeatedly generate\ - \ examples, train a learner, evaluate outcomes, and modify parameters in real-time\ - \ within a single software environment. We describe our meta-instrument, the Wekinator,\ - \ which allows a user to engage in on-the-fly learning using arbitrary control\ - \ modalities and sound synthesis environments. We provide details regarding the\ - \ system implementation and discuss our experiences using the Wekinator for experimentation\ - \ and performance. },\n address = {Pittsburgh, PA, United States},\n author =\ - \ {Fiebrink, Rebecca and Trueman, Dan and Cook, Perry R.},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177513},\n issn = {2220-4806},\n keywords = {Machine\ - \ learning, mapping, tools. },\n pages = {280--285},\n title = {A Meta-Instrument\ - \ for Interactive, On-the-Fly Machine Learning},\n url = {http://www.nime.org/proceedings/2009/nime2009_280.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_82 + abstract: "In this paper we present the Sabotaging Piano, a prepared electronic\ + \ piano that alters key-to-pitch correspondence by reassigning adjacent pitches\ + \ (i.e. one semi-tone higher or lower) to each key. Performers can control how\ + \ many keys to remap through an expression pedal. If the pedal is not pressed\ + \ the Sabotaging Piano works as a normal piano. When fully pressed, each key is\ + \ remapped one semi-tone up or down with equal probability. Each new performance\ + \ (i.e. when the piano is turned on) triggers a new and unknown remapping pattern,\ + \ but the specific pattern remains fixed throughout the whole performance. This\ + \ aims to provide a balance of uncertain but still explorable and learnable behaviour.\ + \ \nWe invited three professional piano improvisers to rehearse with our piano\ + \ in order to prepare a final improvisation concert. We aimed to explore how much\ + \ can be rehearsed or prepared with a piano that will behave somewhat differently\ + \ for each new performance. We asked pianists to document their rehearsal processes\ + \ to witness the appearing of strategies or techniques with the Sabotaging Piano.\ + \ \nThrough analysis of the rehearsals reports and the MIDI data collected in\ + \ the final concert, here we show that the three pianists not only developed different\ + \ techniques with the Sabotaging Piano, but they also leveraged the particularities\ + \ of it to use them as creative resources." + address: 'Mexico City, Mexico' + articleno: 82 + author: Teodoro Dannemann and Nick Bryan-Kinns + bibtex: "@inproceedings{nime2023_82,\n abstract = {In this paper we present the\ + \ Sabotaging Piano, a prepared electronic piano that alters key-to-pitch correspondence\ + \ by reassigning adjacent pitches (i.e. one semi-tone higher or lower) to each\ + \ key. Performers can control how many keys to remap through an expression pedal.\ + \ If the pedal is not pressed the Sabotaging Piano works as a normal piano. When\ + \ fully pressed, each key is remapped one semi-tone up or down with equal probability.\ + \ Each new performance (i.e. when the piano is turned on) triggers a new and unknown\ + \ remapping pattern, but the specific pattern remains fixed throughout the whole\ + \ performance. This aims to provide a balance of uncertain but still explorable\ + \ and learnable behaviour. \nWe invited three professional piano improvisers to\ + \ rehearse with our piano in order to prepare a final improvisation concert. We\ + \ aimed to explore how much can be rehearsed or prepared with a piano that will\ + \ behave somewhat differently for each new performance. We asked pianists to document\ + \ their rehearsal processes to witness the appearing of strategies or techniques\ + \ with the Sabotaging Piano. \nThrough analysis of the rehearsals reports and\ + \ the MIDI data collected in the final concert, here we show that the three pianists\ + \ not only developed different techniques with the Sabotaging Piano, but they\ + \ also leveraged the particularities of it to use them as creative resources.},\n\ + \ address = {Mexico City, Mexico},\n articleno = {82},\n author = {Teodoro Dannemann\ + \ and Nick Bryan-Kinns},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan\ + \ Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages = {5},\n\ + \ pages = {574--578},\n title = {The Sabotaging Piano: key-to-pitch remapping\ + \ as a source of new techniques in piano improvisation},\n track = {Work in Progress},\n\ + \ url = {http://nime.org/proceedings/2023/nime2023_82.pdf},\n year = {2023}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177513 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'Machine learning, mapping, tools. ' - pages: 280--285 - title: 'A Meta-Instrument for Interactive, On-the-Fly Machine Learning' - url: http://www.nime.org/proceedings/2009/nime2009_280.pdf - year: 2009 + month: May + numpages: 5 + pages: 574--578 + title: 'The Sabotaging Piano: key-to-pitch remapping as a source of new techniques + in piano improvisation' + track: Work in Progress + url: http://nime.org/proceedings/2023/nime2023_82.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Schacher2009 - abstract: 'In this paper mappings and adaptation in the context of interactive sound - installations are discussed. Starting from an ecological perspective on non-expert - audience interaction a brief overview and discussion of mapping strategies with - a special focus on adaptive systems using machine learning algorithms is given. - An audio-visual interactive installation is analyzed and its implementation used - to illustrate the issues of audience engagement and to discuss the efficiency - of adaptive mappings. ' - address: 'Pittsburgh, PA, United States' - author: 'Schacher, Jan C.' - bibtex: "@inproceedings{Schacher2009,\n abstract = {In this paper mappings and adaptation\ - \ in the context of interactive sound installations are discussed. Starting from\ - \ an ecological perspective on non-expert audience interaction a brief overview\ - \ and discussion of mapping strategies with a special focus on adaptive systems\ - \ using machine learning algorithms is given. An audio-visual interactive installation\ - \ is analyzed and its implementation used to illustrate the issues of audience\ - \ engagement and to discuss the efficiency of adaptive mappings. },\n address\ - \ = {Pittsburgh, PA, United States},\n author = {Schacher, Jan C.},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1177667},\n issn = {2220-4806},\n keywords\ - \ = {Interaction, adaptive mapping, machine learning, audience engagement },\n\ - \ pages = {286--289},\n title = {Action and Perception in Interactive Sound Installations\ - \ : An Ecological Approach},\n url = {http://www.nime.org/proceedings/2009/nime2009_286.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_83 + abstract: 'This paper explores the potential of AI text-to-image diffusion models + (e.g. DALLE-2 and Midjourney) to support the early phase design of new digital + musical instruments in collaboration with Disabled musicians. The paper presents + initial findings from two speculative design workshops attended by Disabled participants + who are affiliated with the London-based inclusive arts organisation Joy of Sound. + The workshops included activities enabling participants to co-create speculative + images of new instruments, drawing on their contributions. These included the + overall appearance of the instrument, constituent materials and other design characteristics. + The paper discusses the generated images and examines how diffusion models can + be a useful tool to support the conceptual co-design phase of bespoke accessible + instruments. The project findings indicate that diffusion models can be useful + as a facilitatory tool for idea generation in the initial stages of bespoke instrument + design.' + address: 'Mexico City, Mexico' + articleno: 83 + author: Hugh Aynsley )* and Tom Mitchell and Dave Meckin ) + bibtex: "@inproceedings{nime2023_83,\n abstract = {This paper explores the potential\ + \ of AI text-to-image diffusion models (e.g. DALLE-2 and Midjourney) to support\ + \ the early phase design of new digital musical instruments in collaboration with\ + \ Disabled musicians. The paper presents initial findings from two speculative\ + \ design workshops attended by Disabled participants who are affiliated with the\ + \ London-based inclusive arts organisation Joy of Sound. The workshops included\ + \ activities enabling participants to co-create speculative images of new instruments,\ + \ drawing on their contributions. These included the overall appearance of the\ + \ instrument, constituent materials and other design characteristics. The paper\ + \ discusses the generated images and examines how diffusion models can be a useful\ + \ tool to support the conceptual co-design phase of bespoke accessible instruments.\ + \ The project findings indicate that diffusion models can be useful as a facilitatory\ + \ tool for idea generation in the initial stages of bespoke instrument design.},\n\ + \ address = {Mexico City, Mexico},\n articleno = {83},\n author = {Hugh Aynsley\ + \ )* and Tom Mitchell and Dave Meckin )},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz\ + \ and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages\ + \ = {5},\n pages = {579--583},\n title = {Participatory Conceptual Design of Accessible\ + \ Digital Musical Instruments using Generative AI},\n track = {Work in Progress},\n\ + \ url = {http://nime.org/proceedings/2023/nime2023_83.pdf},\n year = {2023}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177667 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'Interaction, adaptive mapping, machine learning, audience engagement ' - pages: 286--289 - title: 'Action and Perception in Interactive Sound Installations : An Ecological - Approach' - url: http://www.nime.org/proceedings/2009/nime2009_286.pdf - year: 2009 + month: May + numpages: 5 + pages: 579--583 + title: Participatory Conceptual Design of Accessible Digital Musical Instruments + using Generative AI + track: Work in Progress + url: http://nime.org/proceedings/2023/nime2023_83.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Kirk2009 - abstract: 'In this paper we describe and analyze The Argus Project, a sound installation - involving the real-time processing and spatialized projection of sound sources - from beneath a pond’s surface. The primary aim of The Argus Project is to project - the natural sound sources from below the pond’s surface while tracking the changes - in the environmental factors above the surface so as to map this data onto the - real-time audio processing. The project takes as its conceptual model that of - a feedback network, or, a process in which the factors that produce a result are - themselves modified and reinforced by that result. Examples are given of the compositional - process, the execution, and processing techniques.' - address: 'Pittsburgh, PA, United States' - author: 'Kirk, Jonathon and Weisert, Lee' - bibtex: "@inproceedings{Kirk2009,\n abstract = {In this paper we describe and analyze\ - \ The Argus Project, a sound installation involving the real-time processing and\ - \ spatialized projection of sound sources from beneath a pond’s surface. The primary\ - \ aim of The Argus Project is to project the natural sound sources from below\ - \ the pond’s surface while tracking the changes in the environmental factors above\ - \ the surface so as to map this data onto the real-time audio processing. The\ - \ project takes as its conceptual model that of a feedback network, or, a process\ - \ in which the factors that produce a result are themselves modified and reinforced\ - \ by that result. Examples are given of the compositional process, the execution,\ - \ and processing techniques.},\n address = {Pittsburgh, PA, United States},\n\ - \ author = {Kirk, Jonathon and Weisert, Lee},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1177605},\n issn = {2220-4806},\n keywords = {nime09},\n pages\ - \ = {290--292},\n title = {The Argus Project : Underwater Soundscape Composition\ - \ with Laser- Controlled Modulation},\n url = {http://www.nime.org/proceedings/2009/nime2009_290.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_84 + abstract: "The development of bespoke musical tools such as many accessible digital\ + \ musical instruments (ADMI) can necessitate specific design constraints. Within\ + \ a field which often promotes out of the box thinking and new interactions with\ + \ experimental technologies, how do we design for user groups where these notions\ + \ of interaction will be less familiar, and/or increasingly challenging due to\ + \ the progression of cognitive decline?\nThe relationship between age and the\ + \ use of technology is understood within the wider context of human computer interaction\ + \ (HCI), however, how this applies specifically to musical interaction or contributes\ + \ to a ‘dementia-friendly’ approach to digital musical instrument (DMI) design\ + \ is drastically underrepresented within the NIME community. Following a scoping\ + \ review of technology for arts activities designed for older adults with cognitive\ + \ decline, we ran a series of involvement activities with a range of stakeholders\ + \ living with, or caring for those living with dementia. Consolidating the knowledge\ + \ and experience shared at these events, we propose five considerations for designing\ + \ dementia-friendly digital musical instruments. We illustrate our approach with\ + \ a range of new instruments co-designed to enable increased interaction with\ + \ music for people living with dementia." + address: 'Mexico City, Mexico' + articleno: 84 + author: Jonathan M Pigrem and Jennifer MacRitchie and Andrew McPherson + bibtex: "@inproceedings{nime2023_84,\n abstract = {The development of bespoke musical\ + \ tools such as many accessible digital musical instruments (ADMI) can necessitate\ + \ specific design constraints. Within a field which often promotes out of the\ + \ box thinking and new interactions with experimental technologies, how do we\ + \ design for user groups where these notions of interaction will be less familiar,\ + \ and/or increasingly challenging due to the progression of cognitive decline?\n\ + The relationship between age and the use of technology is understood within the\ + \ wider context of human computer interaction (HCI), however, how this applies\ + \ specifically to musical interaction or contributes to a ‘dementia-friendly’\ + \ approach to digital musical instrument (DMI) design is drastically underrepresented\ + \ within the NIME community. Following a scoping review of technology for arts\ + \ activities designed for older adults with cognitive decline, we ran a series\ + \ of involvement activities with a range of stakeholders living with, or caring\ + \ for those living with dementia. Consolidating the knowledge and experience shared\ + \ at these events, we propose five considerations for designing dementia-friendly\ + \ digital musical instruments. We illustrate our approach with a range of new\ + \ instruments co-designed to enable increased interaction with music for people\ + \ living with dementia.},\n address = {Mexico City, Mexico},\n articleno = {84},\n\ + \ author = {Jonathan M Pigrem and Jennifer MacRitchie and Andrew McPherson},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn\ + \ = {2220-4806},\n month = {May},\n numpages = {6},\n pages = {584--589},\n title\ + \ = {Instructions Not Included: Dementia-Friendly approaches to DMI Design},\n\ + \ track = {Work in Progress},\n url = {http://nime.org/proceedings/2023/nime2023_84.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177605 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: nime09 - pages: 290--292 - title: 'The Argus Project : Underwater Soundscape Composition with Laser- Controlled - Modulation' - url: http://www.nime.org/proceedings/2009/nime2009_290.pdf - year: 2009 + month: May + numpages: 6 + pages: 584--589 + title: 'Instructions Not Included: Dementia-Friendly approaches to DMI Design' + track: Work in Progress + url: http://nime.org/proceedings/2023/nime2023_84.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: StClair2009 - abstract: 'We describe a novel transformation of a playground - merry-go-round, - teeter-totter (also referred to as a see-saw), swings, and climbing structure - – from its traditional purpose to a collaborative and interactive musical performance - system by equipping key structures with sensors that communicate with a computer. - A set of Max/ MSP patches translate the physical gestures of playground play into - a variety of performer-selected musical mappings. In addition to the electro-acoustic - interactivity, the climbing structure incorporates acoustic musical instruments.' - address: 'Pittsburgh, PA, United States' - author: 'St. Clair, Michael and Leitman, Sasha' - bibtex: "@inproceedings{StClair2009,\n abstract = {We describe a novel transformation\ - \ of a playground - merry-go-round, teeter-totter (also referred to as a see-saw),\ - \ swings, and climbing structure – from its traditional purpose to a collaborative\ - \ and interactive musical performance system by equipping key structures with\ - \ sensors that communicate with a computer. A set of Max/ MSP patches translate\ - \ the physical gestures of playground play into a variety of performer-selected\ - \ musical mappings. In addition to the electro-acoustic interactivity, the climbing\ - \ structure incorporates acoustic musical instruments.},\n address = {Pittsburgh,\ - \ PA, United States},\n author = {St. Clair, Michael and Leitman, Sasha},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1177685},\n issn = {2220-4806},\n keywords\ - \ = {Real-time, Music, Playground, Interactive, Installation, Radical Collaboration,\ - \ Play.},\n pages = {293--296},\n title = {PlaySoundGround : An Interactive Musical\ - \ Playground},\n url = {http://www.nime.org/proceedings/2009/nime2009_293.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_85 + abstract: "The utility of gestural technologies in broadening analytical- and expressive-interface\ + \ possibilities has been documented extensively; both within the sphere of NIME\ + \ and beyond. \n\nWearable gestural sensors have proved integral components of\ + \ many past NIMEs. Previous implementations have typically made use of specialist,\ + \ IMU and EMG based gestural technologies. Few have proved, singularly, as popular\ + \ as the Myo armband. An informal review of the NIME archives found that the Myo\ + \ has featured in 21 NIME publications, since an initial declaration of the Myo’s\ + \ promise as “a new standard controller in the NIME community” by Nyomen et al.\ + \ in 2015. Ten of those found were published after the Myo’s discontinuation in\ + \ 2018, including three as recently as 2022.\n\nThis paper details an assessment\ + \ of smartwatch-based IMU and audio logging as a ubiquitous, accessible alternative\ + \ to the IMU capabilities of the Myo armband. Six violinists were recorded performing\ + \ a number of exercises using VioLogger; a purpose-built application for the Apple\ + \ Watch. Participants were simultaneously recorded using a Myo armband and a freestanding\ + \ microphone. Initial testing upon this pilot dataset indicated promising results\ + \ for the purposes of audio-gestural analysis; both implementations demonstrated\ + \ similar efficacy for the purposes of MLP-based bow-stroke classification." + address: 'Mexico City, Mexico' + articleno: 85 + author: William Francis Wilson and Niccolo Granieri and Islah Ali-Maclachlan + bibtex: "@inproceedings{nime2023_85,\n abstract = {The utility of gestural technologies\ + \ in broadening analytical- and expressive-interface possibilities has been documented\ + \ extensively; both within the sphere of NIME and beyond. \n\nWearable gestural\ + \ sensors have proved integral components of many past NIMEs. Previous implementations\ + \ have typically made use of specialist, IMU and EMG based gestural technologies.\ + \ Few have proved, singularly, as popular as the Myo armband. An informal review\ + \ of the NIME archives found that the Myo has featured in 21 NIME publications,\ + \ since an initial declaration of the Myo’s promise as “a new standard controller\ + \ in the NIME community” by Nyomen et al. in 2015. Ten of those found were published\ + \ after the Myo’s discontinuation in 2018, including three as recently as 2022.\n\ + \nThis paper details an assessment of smartwatch-based IMU and audio logging as\ + \ a ubiquitous, accessible alternative to the IMU capabilities of the Myo armband.\ + \ Six violinists were recorded performing a number of exercises using VioLogger;\ + \ a purpose-built application for the Apple Watch. Participants were simultaneously\ + \ recorded using a Myo armband and a freestanding microphone. Initial testing\ + \ upon this pilot dataset indicated promising results for the purposes of audio-gestural\ + \ analysis; both implementations demonstrated similar efficacy for the purposes\ + \ of MLP-based bow-stroke classification.},\n address = {Mexico City, Mexico},\n\ + \ articleno = {85},\n author = {William Francis Wilson and Niccolo Granieri and\ + \ Islah Ali-Maclachlan},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan\ + \ Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages = {4},\n\ + \ pages = {590--593},\n title = {Time's up for the Myo? The smartwatch as a ubiquitous\ + \ alternative for audio-gestural analyses.},\n track = {Work in Progress},\n url\ + \ = {http://nime.org/proceedings/2023/nime2023_85.pdf},\n year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177685 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'Real-time, Music, Playground, Interactive, Installation, Radical Collaboration, - Play.' - pages: 293--296 - title: 'PlaySoundGround : An Interactive Musical Playground' - url: http://www.nime.org/proceedings/2009/nime2009_293.pdf - year: 2009 + month: May + numpages: 4 + pages: 590--593 + title: 'Time''s up for the Myo? The smartwatch as a ubiquitous alternative for audio-gestural + analyses.' + track: Work in Progress + url: http://nime.org/proceedings/2023/nime2023_85.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Jones2009 - abstract: 'The Fragmented Orchestra is a distributed musical instrument which combines - live audio streams from geographically disparate sites, and granulates each according - to thespike timings of an artificial spiking neural network. Thispaper introduces - the work, outlining its historical context,technical architecture, neuronal model - and network infrastructure, making specific reference to modes of interactionwith - the public.' - address: 'Pittsburgh, PA, United States' - author: 'Jones, Daniel and Hodgson, Tim and Grant, Jane and Matthias, John and Outram, - Nicholas and Ryan, Nick' - bibtex: "@inproceedings{Jones2009,\n abstract = {The Fragmented Orchestra is a distributed\ - \ musical instrument which combines live audio streams from geographically disparate\ - \ sites, and granulates each according to thespike timings of an artificial spiking\ - \ neural network. Thispaper introduces the work, outlining its historical context,technical\ - \ architecture, neuronal model and network infrastructure, making specific reference\ - \ to modes of interactionwith the public.},\n address = {Pittsburgh, PA, United\ - \ States},\n author = {Jones, Daniel and Hodgson, Tim and Grant, Jane and Matthias,\ - \ John and Outram, Nicholas and Ryan, Nick},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1177587},\n issn = {2220-4806},\n keywords = {distributed,emergent,environmental,installation,neural\ - \ network,nime09,sound,streaming audio},\n pages = {297--302},\n title = {The\ - \ Fragmented Orchestra},\n url = {http://www.nime.org/proceedings/2009/nime2009_297.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_86 + abstract: 'This paper presents the Hummellaphone, a highly-reconfigurable, open-source, + electromagnetically actuated instrument being developed for research in engineering + learning, haptics, and human-computer interaction (HCI). The reconfigurable performance + interface promotes experimentation with gestural control and mapping. Haptic feedback + reintroduces the tangible bilateral communication between performer and instrument + that is present in many acoustic and electro-acoustic instruments but missing + in most digital musical instruments. The overall aim of the project is to create + an open-source, accessible toolkit for facilitating the development of and research + with electromagnetically actuated musical instruments. This paper describes the + hardware and design of the musical instrument and control interface as well as + example research applications.' + address: 'Mexico City, Mexico' + articleno: 86 + author: Adam G Schmidt and Michael Gurevich + bibtex: "@inproceedings{nime2023_86,\n abstract = {This paper presents the Hummellaphone,\ + \ a highly-reconfigurable, open-source, electromagnetically actuated instrument\ + \ being developed for research in engineering learning, haptics, and human-computer\ + \ interaction (HCI). The reconfigurable performance interface promotes experimentation\ + \ with gestural control and mapping. Haptic feedback reintroduces the tangible\ + \ bilateral communication between performer and instrument that is present in\ + \ many acoustic and electro-acoustic instruments but missing in most digital musical\ + \ instruments. The overall aim of the project is to create an open-source, accessible\ + \ toolkit for facilitating the development of and research with electromagnetically\ + \ actuated musical instruments. This paper describes the hardware and design of\ + \ the musical instrument and control interface as well as example research applications.},\n\ + \ address = {Mexico City, Mexico},\n articleno = {86},\n author = {Adam G Schmidt\ + \ and Michael Gurevich},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan\ + \ Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages = {6},\n\ + \ pages = {594--599},\n title = {The Hummellaphone: An Electromagnetically Actuated\ + \ Instrument and Open-Source Toolkit},\n track = {Work in Progress},\n url = {http://nime.org/proceedings/2023/nime2023_86.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177587 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'distributed,emergent,environmental,installation,neural network,nime09,sound,streaming - audio' - pages: 297--302 - title: The Fragmented Orchestra - url: http://www.nime.org/proceedings/2009/nime2009_297.pdf - year: 2009 + month: May + numpages: 6 + pages: 594--599 + title: 'The Hummellaphone: An Electromagnetically Actuated Instrument and Open-Source + Toolkit' + track: Work in Progress + url: http://nime.org/proceedings/2023/nime2023_86.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Wang2009 - abstract: 'The Smule Ocarina is a wind instrument designed for the iPhone, fully - leveraging its wide array of technologies: microphone input (for breath input), - multitouch (for fingering), accelerometer, real-time sound synthesis, highperformance - graphics, GPS/location, and persistent data connection. In this mobile musical - artifact, the interactions of the ancient flute-like instrument are both preserved - and transformed via breath-control and multitouch finger-holes, while the onboard - global positioning and persistent data connection provide the opportunity to create - a new social experience, allowing the users of Ocarina to listen to one another. - In this way, Ocarina is also a type of social instrument that enables a different, - perhaps even magical, sense of global connectivity. ' - address: 'Pittsburgh, PA, United States' - author: 'Wang, Ge' - bibtex: "@inproceedings{Wang2009,\n abstract = {The Smule Ocarina is a wind instrument\ - \ designed for the iPhone, fully leveraging its wide array of technologies: microphone\ - \ input (for breath input), multitouch (for fingering), accelerometer, real-time\ - \ sound synthesis, highperformance graphics, GPS/location, and persistent data\ - \ connection. In this mobile musical artifact, the interactions of the ancient\ - \ flute-like instrument are both preserved and transformed via breath-control\ - \ and multitouch finger-holes, while the onboard global positioning and persistent\ - \ data connection provide the opportunity to create a new social experience, allowing\ - \ the users of Ocarina to listen to one another. In this way, Ocarina is also\ - \ a type of social instrument that enables a different, perhaps even magical,\ - \ sense of global connectivity. },\n address = {Pittsburgh, PA, United States},\n\ - \ author = {Wang, Ge},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177697},\n\ - \ issn = {2220-4806},\n keywords = {chuck,design,in,in real-time,interface,iphone,mobile\ - \ music,multitouch,nime09,ocarina,pulsing waves,social,sonically and onscreen\ - \ and,sound synthesis takes place,the breath is visualized},\n pages = {303--307},\n\ - \ title = {Designing Smule's Ocarina : The iPhone's Magic Flute},\n url = {http://www.nime.org/proceedings/2009/nime2009_303.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_87 + abstract: 'This paper presents the development of a multichannel sound installation + about atmospheric processes. This instrument is an example of taking inspiration + from ancient cultures for NIME design, and of sensing weather to extend the perception + of the performer, who also then becomes a listener of atmospheric processes. The + interface channels dynamics found in the atmosphere: wind''s force and direction, + air quality, atmospheric pressure, and electromagnetism. These sources are translated + into sound by mapping sensor data into a multichannel sonification composition. + The paper outlines the artistic context and expands on its interaction overview.' + address: 'Mexico City, Mexico' + articleno: 87 + author: Juan C Duarte Regino + bibtex: "@inproceedings{nime2023_87,\n abstract = {This paper presents the development\ + \ of a multichannel sound installation about atmospheric processes. This instrument\ + \ is an example of taking inspiration from ancient cultures for NIME design, and\ + \ of sensing weather to extend the perception of the performer, who also then\ + \ becomes a listener of atmospheric processes. The interface channels dynamics\ + \ found in the atmosphere: wind's force and direction, air quality, atmospheric\ + \ pressure, and electromagnetism. These sources are translated into sound by mapping\ + \ sensor data into a multichannel sonification composition. The paper outlines\ + \ the artistic context and expands on its interaction overview.},\n address =\ + \ {Mexico City, Mexico},\n articleno = {87},\n author = {Juan C Duarte Regino},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn\ + \ = {2220-4806},\n month = {May},\n numpages = {4},\n pages = {600--603},\n title\ + \ = {AUGURY : an interface for generating soundscapes inspired by ancient divination},\n\ + \ track = {Work in Progress},\n url = {http://nime.org/proceedings/2023/nime2023_87.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177697 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'chuck,design,in,in real-time,interface,iphone,mobile music,multitouch,nime09,ocarina,pulsing - waves,social,sonically and onscreen and,sound synthesis takes place,the breath - is visualized' - pages: 303--307 - title: 'Designing Smule''s Ocarina : The iPhone''s Magic Flute' - url: http://www.nime.org/proceedings/2009/nime2009_303.pdf - year: 2009 + month: May + numpages: 4 + pages: 600--603 + title: 'AUGURY : an interface for generating soundscapes inspired by ancient divination' + track: Work in Progress + url: http://nime.org/proceedings/2023/nime2023_87.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Gillian2009a - abstract: 'This paper presents Scratch-Off, a new musical multiplayer DJ game that - has been designed for a mobile phone. We describe how the game is used as a test - platform for experimenting with various types of multimodal feedback. The game - uses movement gestures made by the players to scratch a record and control crossfades - between tracks, with the objective of the game to make the correct scratch at - the correct time in relation to the music. Gestures are detected using the devices - built-in tri-axis accelerometer and multi-touch screen display. The players receive - visual, audio and various types of vibrotactile feedback to help them make the - correct scratch on the beat of the music track. We also discuss the results of - a pilot study using this interface. ' - address: 'Pittsburgh, PA, United States' - author: 'Gillian, Nicholas and O''Modhrain, Sile and Essl, Georg' - bibtex: "@inproceedings{Gillian2009a,\n abstract = {This paper presents {Scratch-Off},\ - \ a new musical multiplayer DJ game that has been designed for a mobile phone.\ - \ We describe how the game is used as a test platform for experimenting with various\ - \ types of multimodal feedback. The game uses movement gestures made by the players\ - \ to scratch a record and control crossfades between tracks, with the objective\ - \ of the game to make the correct scratch at the correct time in relation to the\ - \ music. Gestures are detected using the devices built-in tri-axis accelerometer\ - \ and multi-touch screen display. The players receive visual, audio and various\ - \ types of vibrotactile feedback to help them make the correct scratch on the\ - \ beat of the music track. We also discuss the results of a pilot study using\ - \ this interface. },\n address = {Pittsburgh, PA, United States},\n author = {Gillian,\ - \ Nicholas and O'Modhrain, Sile and Essl, Georg},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1177553},\n issn = {2220-4806},\n keywords = {Mobile devices,\ - \ gesture, audio games. },\n pages = {308--311},\n title = {Scratch-Off : A Gesture\ - \ Based Mobile Music Game with Tactile Feedback},\n url = {http://www.nime.org/proceedings/2009/nime2009_308.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_88 + abstract: 'Interactive Machine Learning (IML) is an approach previously explored + in music discipline. However, its adaptation in sound synthesis as an algorithmic + method of creation has not been examined. This article presents the prototype + ASCIML, an Assistant for Sound Creation with Interactive Machine Learning, that + allows musicians to use IML to create personalized datasets and generate new sounds. + Additionally, a preliminary study is presented which aims to evaluate the potential + of ASCIML as a tool for sound synthesis and to gather feedback and suggestions + for future improvements. The prototype can be used in Google Colaboratory and + is divided into four main stages: Data Design, Training, Evaluation and Audio + Creation. Results from the study, which involved 27 musicians with no prior knowledge + of Machine Learning (ML), showed that most participants preferred using microphone + recording and synthesis to design their dataset and that the Envelopegram visualization + was found to be particularly meaningful to understand sound datasets. It was also + found that the majority of participants preferred to implement a pre-trained model + on their data and relied on hearing the audio reconstruction provided by the interface + to evaluate the model performance. Overall, the study demonstrates the potential + of ASCIML as a tool for hands-on neural audio sound synthesis and provides valuable + insights for future developments in the field.' + address: 'Mexico City, Mexico' + articleno: 88 + author: Gerardo Meza + bibtex: "@inproceedings{nime2023_88,\n abstract = {Interactive Machine Learning\ + \ (IML) is an approach previously explored in music discipline. However, its adaptation\ + \ in sound synthesis as an algorithmic method of creation has not been examined.\ + \ This article presents the prototype ASCIML, an Assistant for Sound Creation\ + \ with Interactive Machine Learning, that allows musicians to use IML to create\ + \ personalized datasets and generate new sounds. Additionally, a preliminary study\ + \ is presented which aims to evaluate the potential of ASCIML as a tool for sound\ + \ synthesis and to gather feedback and suggestions for future improvements. The\ + \ prototype can be used in Google Colaboratory and is divided into four main stages:\ + \ Data Design, Training, Evaluation and Audio Creation. Results from the study,\ + \ which involved 27 musicians with no prior knowledge of Machine Learning (ML),\ + \ showed that most participants preferred using microphone recording and synthesis\ + \ to design their dataset and that the Envelopegram visualization was found to\ + \ be particularly meaningful to understand sound datasets. It was also found that\ + \ the majority of participants preferred to implement a pre-trained model on their\ + \ data and relied on hearing the audio reconstruction provided by the interface\ + \ to evaluate the model performance. Overall, the study demonstrates the potential\ + \ of ASCIML as a tool for hands-on neural audio sound synthesis and provides valuable\ + \ insights for future developments in the field.},\n address = {Mexico City, Mexico},\n\ + \ articleno = {88},\n author = {Gerardo Meza},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n editor\ + \ = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n\ + \ numpages = {4},\n pages = {604--607},\n title = {Exploring the potential of\ + \ interactive Machine Learning for Sound Generation: A preliminary study with\ + \ sound artists},\n track = {Work in Progress},\n url = {http://nime.org/proceedings/2023/nime2023_88.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177553 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'Mobile devices, gesture, audio games. ' - pages: 308--311 - title: 'Scratch-Off : A Gesture Based Mobile Music Game with Tactile Feedback' - url: http://www.nime.org/proceedings/2009/nime2009_308.pdf - year: 2009 + month: May + numpages: 4 + pages: 604--607 + title: 'Exploring the potential of interactive Machine Learning for Sound Generation: + A preliminary study with sound artists' + track: Work in Progress + url: http://nime.org/proceedings/2023/nime2023_88.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Weinberg2009 - abstract: 'ZooZBeat is a gesture-based mobile music studio. It is designed to provide - users with expressive and creative access to music making on the go. ZooZBeat - users shake the phone or tap the screen to enter notes. The result is quantized, - mapped onto a musical scale, and looped. Users can then use tilt and shake movements - to manipulate and share their creation in a group. Emphasis is placed on finding - intuitive metaphors for mobile music creation and maintaining a balance between - control and ease-of-use that allows non-musicians to begin creating music with - the application immediately. ' - address: 'Pittsburgh, PA, United States' - author: 'Weinberg, Gil and Beck, Andrew and Godfrey, Mark' - bibtex: "@inproceedings{Weinberg2009,\n abstract = {ZooZBeat is a gesture-based\ - \ mobile music studio. It is designed to provide users with expressive and creative\ - \ access to music making on the go. ZooZBeat users shake the phone or tap the\ - \ screen to enter notes. The result is quantized, mapped onto a musical scale,\ - \ and looped. Users can then use tilt and shake movements to manipulate and share\ - \ their creation in a group. Emphasis is placed on finding intuitive metaphors\ - \ for mobile music creation and maintaining a balance between control and ease-of-use\ - \ that allows non-musicians to begin creating music with the application immediately.\ - \ },\n address = {Pittsburgh, PA, United States},\n author = {Weinberg, Gil and\ - \ Beck, Andrew and Godfrey, Mark},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177703},\n\ - \ issn = {2220-4806},\n keywords = {mobile music, gestural control },\n pages\ - \ = {312--315},\n title = {ZooZBeat : a Gesture-based Mobile Music Studio},\n\ - \ url = {http://www.nime.org/proceedings/2009/nime2009_312.pdf},\n year = {2009}\n\ + ID: nime2023_89 + abstract: "CALM is a performance piece from a collection of works that explore trauma\ + \ through trauma-informed therapeutic models, such as bi-lateral coordination\ + \ drawing, yoga, and tapping, and existing movement practices, such as yoga, Pilates,\ + \ dance, and conducting, to control and manipulate sound in performance. This\ + \ work draws from yoga practice to control the volumes and audio effects on pre-composed\ + \ audio layers through use of datagloves (MiMu with their proprietary software\ + \ Glover (MI.MU Gloves Ltd, 2010), though this is not specific to the constraints\ + \ of the MiMu/Glover system) and Max/MSP (Cycling ’74, 2018). \n\nYoga is a movement\ + \ practice often recommended to manage symptoms of trauma and anxiety due to the\ + \ focus on one’s body and generally meditative nature or the practice. However,\ + \ in cases of sexual trauma, yoga may yield the opposite of the desired results\ + \ when not used in a trauma-sensitive context (Khoudari, 2021; Levine et al.,\ + \ 2010). This is because the individual tries to focus on the body in which they\ + \ do not feel safe and encounter unresolved trauma. Thus, instead of a grounding\ + \ effect, the individual hears the mental and physical pain that they have endured\ + \ repeating itself in the present. To reflect this, “stillness” audio material\ + \ is routed to scream-like and abrasive sounds, while “movement” audio quiets\ + \ the listener’s internal landscape. Movements used in the live piece were chosen\ + \ based on providing extramusical benefit to the composer-performer (and areas\ + \ that are typically carrying tension as a result of the trauma) without contributing\ + \ to any negative effects, for example, the pose “Happy Baby/Ananda Balasana”\ + \ was excluded and Malasana (a deep squat pose) was used in its place as it puts\ + \ the performer in a less vulnerable position by being on one’s feet. " + address: 'Mexico City, Mexico' + articleno: 89 + author: Sophie Rose + bibtex: "@inproceedings{nime2023_89,\n abstract = {CALM is a performance piece from\ + \ a collection of works that explore trauma through trauma-informed therapeutic\ + \ models, such as bi-lateral coordination drawing, yoga, and tapping, and existing\ + \ movement practices, such as yoga, Pilates, dance, and conducting, to control\ + \ and manipulate sound in performance. This work draws from yoga practice to control\ + \ the volumes and audio effects on pre-composed audio layers through use of datagloves\ + \ (MiMu with their proprietary software Glover (MI.MU Gloves Ltd, 2010), though\ + \ this is not specific to the constraints of the MiMu/Glover system) and Max/MSP\ + \ (Cycling ’74, 2018). \n\nYoga is a movement practice often recommended to manage\ + \ symptoms of trauma and anxiety due to the focus on one’s body and generally\ + \ meditative nature or the practice. However, in cases of sexual trauma, yoga\ + \ may yield the opposite of the desired results when not used in a trauma-sensitive\ + \ context (Khoudari, 2021; Levine et al., 2010). This is because the individual\ + \ tries to focus on the body in which they do not feel safe and encounter unresolved\ + \ trauma. Thus, instead of a grounding effect, the individual hears the mental\ + \ and physical pain that they have endured repeating itself in the present. To\ + \ reflect this, “stillness” audio material is routed to scream-like and abrasive\ + \ sounds, while “movement” audio quiets the listener’s internal landscape. Movements\ + \ used in the live piece were chosen based on providing extramusical benefit to\ + \ the composer-performer (and areas that are typically carrying tension as a result\ + \ of the trauma) without contributing to any negative effects, for example, the\ + \ pose “Happy Baby/Ananda Balasana” was excluded and Malasana (a deep squat pose)\ + \ was used in its place as it puts the performer in a less vulnerable position\ + \ by being on one’s feet. },\n address = {Mexico City, Mexico},\n articleno =\ + \ {89},\n author = {Sophie Rose},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz\ + \ and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages\ + \ = {4},\n pages = {608--611},\n title = {CALM: Mapping yoga practice for gestural\ + \ control to externalise traumatic experiences},\n track = {Work in Progress},\n\ + \ url = {http://nime.org/proceedings/2023/nime2023_89.pdf},\n year = {2023}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177703 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'mobile music, gestural control ' - pages: 312--315 - title: 'ZooZBeat : a Gesture-based Mobile Music Studio' - url: http://www.nime.org/proceedings/2009/nime2009_312.pdf - year: 2009 + month: May + numpages: 4 + pages: 608--611 + title: 'CALM: Mapping yoga practice for gestural control to externalise traumatic + experiences' + track: Work in Progress + url: http://nime.org/proceedings/2023/nime2023_89.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Bianchi2009 - abstract: 'It has been shown that collaborative musical interfaces encourage novice - users to explore the sound space and promote their participation as music performers. - Nevertheless, such interfaces are generally physically situated and can limit - the possibility of movements on the stage, a critical factor in live music performance. - In this paper we introduce the Drummer, a networked digital musical interface - that allows multiple performers to design and play drum kits simultaneously while, - at the same time, keeping their ability to freely move on the stage. The system - consists of multiple Nintendo DS clients with an intuitive, user-configurable - interface and a server computer which plays drum sounds. The Drummer Machine, - a small piece of hardware to augment the performance of the Drummer, is also introduced. ' - address: 'Pittsburgh, PA, United States' - author: 'Bianchi, Andrea and Yeo, Woon Seung' - bibtex: "@inproceedings{Bianchi2009,\n abstract = {It has been shown that collaborative\ - \ musical interfaces encourage novice users to explore the sound space and promote\ - \ their participation as music performers. Nevertheless, such interfaces are generally\ - \ physically situated and can limit the possibility of movements on the stage,\ - \ a critical factor in live music performance. In this paper we introduce the\ - \ Drummer, a networked digital musical interface that allows multiple performers\ - \ to design and play drum kits simultaneously while, at the same time, keeping\ - \ their ability to freely move on the stage. The system consists of multiple Nintendo\ - \ DS clients with an intuitive, user-configurable interface and a server computer\ - \ which plays drum sounds. The Drummer Machine, a small piece of hardware to augment\ - \ the performance of the Drummer, is also introduced. },\n address = {Pittsburgh,\ - \ PA, United States},\n author = {Bianchi, Andrea and Yeo, Woon Seung},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1177483},\n issn = {2220-4806},\n keywords\ - \ = {collaborative interface, multiplayer, musical expression, musical control,\ - \ game control, Nintendo DS.},\n pages = {316--319},\n title = {The Drummer :\ - \ a Collaborative Musical Interface with Mobility},\n url = {http://www.nime.org/proceedings/2009/nime2009_316.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_90 + abstract: 'I present "SnakeSynth," a web-based lightweight audio synthesizer that + combines audio generated by a deep generative model and real-time continuous two-dimensional + (2D) input to create and control variable-length generative sounds through 2D + interaction gestures. Interaction gestures are touch and mobile-compatible and + made with analogies to strummed, bowed, brushed, and plucked musical instrument + controls. Point-and-click and drag-and-drop gestures directly control audio playback + length and intensity. I show that I can modulate sound length and intensity by + interacting with a programmable 2D grid and leveraging the speed and ubiquity + of web browser-based audio and hardware acceleration to generate time-varying + high-fidelity sounds with real-time interactivity. SnakeSynth adaptively reproduces + and interpolates between sounds encountered during model training, notably without + long training times, and I briefly discuss possible futures for deep generative + models as an interactive paradigm for musical expression.' + address: 'Mexico City, Mexico' + articleno: 90 + author: Eric Easthope + bibtex: "@inproceedings{nime2023_90,\n abstract = {I present \"SnakeSynth,\" a web-based\ + \ lightweight audio synthesizer that combines audio generated by a deep generative\ + \ model and real-time continuous two-dimensional (2D) input to create and control\ + \ variable-length generative sounds through 2D interaction gestures. Interaction\ + \ gestures are touch and mobile-compatible and made with analogies to strummed,\ + \ bowed, brushed, and plucked musical instrument controls. Point-and-click and\ + \ drag-and-drop gestures directly control audio playback length and intensity.\ + \ I show that I can modulate sound length and intensity by interacting with a\ + \ programmable 2D grid and leveraging the speed and ubiquity of web browser-based\ + \ audio and hardware acceleration to generate time-varying high-fidelity sounds\ + \ with real-time interactivity. SnakeSynth adaptively reproduces and interpolates\ + \ between sounds encountered during model training, notably without long training\ + \ times, and I briefly discuss possible futures for deep generative models as\ + \ an interactive paradigm for musical expression.},\n address = {Mexico City,\ + \ Mexico},\n articleno = {90},\n author = {Eric Easthope},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month\ + \ = {May},\n numpages = {8},\n pages = {612--619},\n title = {SnakeSynth: New\ + \ Interactions for Generative Audio Synthesis},\n track = {Work in Progress},\n\ + \ url = {http://nime.org/proceedings/2023/nime2023_90.pdf},\n year = {2023}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177483 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: 'collaborative interface, multiplayer, musical expression, musical control, - game control, Nintendo DS.' - pages: 316--319 - title: 'The Drummer : a Collaborative Musical Interface with Mobility' - url: http://www.nime.org/proceedings/2009/nime2009_316.pdf - year: 2009 + month: May + numpages: 8 + pages: 612--619 + title: 'SnakeSynth: New Interactions for Generative Audio Synthesis' + track: Work in Progress + url: http://nime.org/proceedings/2023/nime2023_90.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Wechsler2009 - address: 'Pittsburgh, PA, United States' - author: 'Wechsler, Robert' - bibtex: "@inproceedings{Wechsler2009,\n address = {Pittsburgh, PA, United States},\n\ - \ author = {Wechsler, Robert},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177701},\n\ - \ issn = {2220-4806},\n keywords = {nime09},\n pages = {320--320},\n title = {The\ - \ Oklo Phenomenon},\n url = {http://www.nime.org/proceedings/2009/nime2009_320.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_91 + abstract: 'We present a system for interactive co-creation of expressive performances + of notated music using speech and gestures. The system provides real-time or near-real-time + dialog-based control of performance rendering and interaction in multiple modalities. + It is accessible to people regardless of their musical background via smartphones. + The system is trained using sheet music and associated performances, in particular + using notated performance directions and user-system interaction data to ground + performance directions in performances. Users can listen to an autonomously generated + performance or actively engage in the performance process. A speech- and gesture-based + feedback loop and online learning from past user interactions improve the accuracy + of the performance rendering control. There are two important assumptions behind + our approach: a) that many people can express nuanced aspects of expressive performance + using natural human expressive faculties, such as speech, voice, and gesture, + and b) that by doing so and hearing the music follow their direction with low + latency, they can enjoy playing the music that would otherwise be inaccessible + to them. The ultimate goal of this work is to enable fulfilling and accessible + music making experiences for a large number of people who are not currently musically + active.' + address: 'Mexico City, Mexico' + articleno: 91 + author: Ilya Borovik and Vladimir Viro + bibtex: "@inproceedings{nime2023_91,\n abstract = {We present a system for interactive\ + \ co-creation of expressive performances of notated music using speech and gestures.\ + \ The system provides real-time or near-real-time dialog-based control of performance\ + \ rendering and interaction in multiple modalities. It is accessible to people\ + \ regardless of their musical background via smartphones. The system is trained\ + \ using sheet music and associated performances, in particular using notated performance\ + \ directions and user-system interaction data to ground performance directions\ + \ in performances. Users can listen to an autonomously generated performance or\ + \ actively engage in the performance process. A speech- and gesture-based feedback\ + \ loop and online learning from past user interactions improve the accuracy of\ + \ the performance rendering control. There are two important assumptions behind\ + \ our approach: a) that many people can express nuanced aspects of expressive\ + \ performance using natural human expressive faculties, such as speech, voice,\ + \ and gesture, and b) that by doing so and hearing the music follow their direction\ + \ with low latency, they can enjoy playing the music that would otherwise be inaccessible\ + \ to them. The ultimate goal of this work is to enable fulfilling and accessible\ + \ music making experiences for a large number of people who are not currently\ + \ musically active.},\n address = {Mexico City, Mexico},\n articleno = {91},\n\ + \ author = {Ilya Borovik and Vladimir Viro},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n editor\ + \ = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n\ + \ numpages = {6},\n pages = {620--625},\n title = {Real-Time Co-Creation of Expressive\ + \ Music Performances Using Speech and Gestures},\n track = {Work in Progress},\n\ + \ url = {http://nime.org/proceedings/2023/nime2023_91.pdf},\n year = {2023}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177701 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: nime09 - pages: 320--320 - title: The Oklo Phenomenon - url: http://www.nime.org/proceedings/2009/nime2009_320.pdf - year: 2009 + month: May + numpages: 6 + pages: 620--625 + title: Real-Time Co-Creation of Expressive Music Performances Using Speech and Gestures + track: Work in Progress + url: http://nime.org/proceedings/2023/nime2023_91.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Lieberman2009 - address: 'Pittsburgh, PA, United States' - author: 'Lieberman, David' - bibtex: "@inproceedings{Lieberman2009,\n address = {Pittsburgh, PA, United States},\n\ - \ author = {Lieberman, David},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177619},\n\ - \ issn = {2220-4806},\n keywords = {nime09},\n pages = {321--321},\n title = {Anigraphical\ - \ Etude 9},\n url = {http://www.nime.org/proceedings/2009/nime2009_321.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_92 + abstract: 'This paper provides an entry into a decolonial approach to AI driven + music and sound arts by describing an ongoing artistic research project Dhvāni. + The project is a series of responsive, self-regulating, and autonomous installations + driven by Artificial Intelligence and Machine Learning and incorporating ritual + and sacred sounds from South Asia. Such mélange re-emphasizes and advocates for + the values of interconnectivity, codependence, network, and community with a decolonial + approach. By giving the AI an autonomous agency, the project aims to reimagine + the future of AI with an inter-subjective reciprocity in human-machine assemblages + transcending the technologically deterministic approach to AI-driven live art, + media arts and music. Through unpacking the project, this paper underscores the + necessity to dehegemonize the AI-driven music field towards a transcultural exchange, + thereby transcend the field’s Eurocentric bias.' + address: 'Mexico City, Mexico' + articleno: 92 + author: Budhaditya Chattopadhyay + bibtex: "@inproceedings{nime2023_92,\n abstract = {This paper provides an entry\ + \ into a decolonial approach to AI driven music and sound arts by describing an\ + \ ongoing artistic research project Dhvāni. The project is a series of responsive,\ + \ self-regulating, and autonomous installations driven by Artificial Intelligence\ + \ and Machine Learning and incorporating ritual and sacred sounds from South Asia.\ + \ Such mélange re-emphasizes and advocates for the values of interconnectivity,\ + \ codependence, network, and community with a decolonial approach. By giving the\ + \ AI an autonomous agency, the project aims to reimagine the future of AI with\ + \ an inter-subjective reciprocity in human-machine assemblages transcending the\ + \ technologically deterministic approach to AI-driven live art, media arts and\ + \ music. Through unpacking the project, this paper underscores the necessity to\ + \ dehegemonize the AI-driven music field towards a transcultural exchange, thereby\ + \ transcend the field’s Eurocentric bias.},\n address = {Mexico City, Mexico},\n\ + \ articleno = {92},\n author = {Budhaditya Chattopadhyay},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month\ + \ = {May},\n numpages = {3},\n pages = {626--628},\n title = {Dhvāni: Sacred Sounds\ + \ and Decolonial Machines},\n track = {Work in Progress},\n url = {http://nime.org/proceedings/2023/nime2023_92.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177619 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: nime09 - pages: 321--321 - title: Anigraphical Etude 9 - url: http://www.nime.org/proceedings/2009/nime2009_321.pdf - year: 2009 + month: May + numpages: 3 + pages: 626--628 + title: 'Dhvāni: Sacred Sounds and Decolonial Machines' + track: Work in Progress + url: http://nime.org/proceedings/2023/nime2023_92.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Hong2009 - address: 'Pittsburgh, PA, United States' - author: 'Hong, Min Eui' - bibtex: "@inproceedings{Hong2009,\n address = {Pittsburgh, PA, United States},\n\ - \ author = {Hong, Min Eui},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177577},\n\ - \ issn = {2220-4806},\n keywords = {nime09},\n pages = {322--322},\n title = {Cosmic\ - \ Strings II},\n url = {http://www.nime.org/proceedings/2009/nime2009_322.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_93 + abstract: 'Spinal cord injury is one of the most serious causes of disability that + can affect people''s lives. In tetraplegia, the loss of mobility of the upper + limb has a devastating effect on the quality of life and independence of these + patients, so their rehabilitation is considered a crucial objective. We present + a tool for functional motor rehabilitation of the upper limb in patients with + spinal cord injury, based on the use of bio-sensors and the sonification of EMG + activity during the repetitive execution of a specific gesture. During the hospital + stay, the patient has a wide range of therapies available to improve motor function + or compensate for loss of mobility, including execution of different maneuvers. + The repetitive and continuous performance of these tasks is a key element in motor + recovery. However, in many cases, these tasks do not include sufficient feedback + mechanisms to help the patient or to motivate him/her during execution. Through + the sonification of movement and the design of adapted interaction strategies, + our research aims to offer a new therapeutic tool that musically transforms the + gesture and expands the patient''s mechanisms of expression, proprioception and + cognition, in order to optimize, correct and motivate movement.' + address: 'Mexico City, Mexico' + articleno: 93 + author: Jose M Corredera + bibtex: "@inproceedings{nime2023_93,\n abstract = {Spinal cord injury is one of\ + \ the most serious causes of disability that can affect people's lives. In tetraplegia,\ + \ the loss of mobility of the upper limb has a devastating effect on the quality\ + \ of life and independence of these patients, so their rehabilitation is considered\ + \ a crucial objective. We present a tool for functional motor rehabilitation of\ + \ the upper limb in patients with spinal cord injury, based on the use of bio-sensors\ + \ and the sonification of EMG activity during the repetitive execution of a specific\ + \ gesture. During the hospital stay, the patient has a wide range of therapies\ + \ available to improve motor function or compensate for loss of mobility, including\ + \ execution of different maneuvers. The repetitive and continuous performance\ + \ of these tasks is a key element in motor recovery. However, in many cases, these\ + \ tasks do not include sufficient feedback mechanisms to help the patient or to\ + \ motivate him/her during execution. Through the sonification of movement and\ + \ the design of adapted interaction strategies, our research aims to offer a new\ + \ therapeutic tool that musically transforms the gesture and expands the patient's\ + \ mechanisms of expression, proprioception and cognition, in order to optimize,\ + \ correct and motivate movement.},\n address = {Mexico City, Mexico},\n articleno\ + \ = {93},\n author = {Jose M Corredera},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz\ + \ and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages\ + \ = {4},\n pages = {629--632},\n title = {EMG Sonification as a Tool for Functional\ + \ Rehabilitation of Spinal-Cord Injury.},\n track = {Work in Progress},\n url\ + \ = {http://nime.org/proceedings/2023/nime2023_93.pdf},\n year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177577 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: nime09 - pages: 322--322 - title: Cosmic Strings II - url: http://www.nime.org/proceedings/2009/nime2009_322.pdf - year: 2009 + month: May + numpages: 4 + pages: 629--632 + title: EMG Sonification as a Tool for Functional Rehabilitation of Spinal-Cord Injury. + track: Work in Progress + url: http://nime.org/proceedings/2023/nime2023_93.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Rogers2009 - address: 'Pittsburgh, PA, United States' - author: 'Rogers, Troy and Kemper, Steven and Barton, Scott' - bibtex: "@inproceedings{Rogers2009,\n address = {Pittsburgh, PA, United States},\n\ - \ author = {Rogers, Troy and Kemper, Steven and Barton, Scott},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177665},\n issn = {2220-4806},\n keywords = {nime09},\n\ - \ pages = {323--323},\n title = {Study no. 1 for {PAM} and MADI},\n url = {http://www.nime.org/proceedings/2009/nime2009_323.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_94 + abstract: 'This paper describes Improvise+=Chain, an audio-visual installation artwork + of autonomous musical performance using artificial intelligence technology. The + work is designed to provide the audience with an experience exploring the differences + between human and AI-based virtual musicians. Using a transformer decoder, we + developed a four-track (melody, bass, chords and accompaniment, and drums) symbolic + music generation model. The model generates each track in real time to create + an endless chain of phrases, and 3D visuals and LED lights represent the attention + information between four tracks, i.e., four virtual musicians, calculated within + the model. This work aims to highlight the differences for viewers to consider + between humans and artificial intelligence in music jams by visualizing the only + information virtual musicians can communicate with while humans interact in multiple + modals during the performance.' + address: 'Mexico City, Mexico' + articleno: 94 + author: Atsuya Kobayashi and Ryo Nishikado and Nao Tokui + bibtex: "@inproceedings{nime2023_94,\n abstract = {This paper describes Improvise+=Chain,\ + \ an audio-visual installation artwork of autonomous musical performance using\ + \ artificial intelligence technology. The work is designed to provide the audience\ + \ with an experience exploring the differences between human and AI-based virtual\ + \ musicians. Using a transformer decoder, we developed a four-track (melody, bass,\ + \ chords and accompaniment, and drums) symbolic music generation model. The model\ + \ generates each track in real time to create an endless chain of phrases, and\ + \ 3D visuals and LED lights represent the attention information between four tracks,\ + \ i.e., four virtual musicians, calculated within the model. This work aims to\ + \ highlight the differences for viewers to consider between humans and artificial\ + \ intelligence in music jams by visualizing the only information virtual musicians\ + \ can communicate with while humans interact in multiple modals during the performance.},\n\ + \ address = {Mexico City, Mexico},\n articleno = {94},\n author = {Atsuya Kobayashi\ + \ and Ryo Nishikado and Nao Tokui},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz\ + \ and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages\ + \ = {4},\n pages = {633--636},\n title = {Improvise+=Chain: Listening to the Ensemble\ + \ Improvisation of an Autoregressive Generative Model},\n track = {Demos},\n url\ + \ = {http://nime.org/proceedings/2023/nime2023_94.pdf},\n year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177665 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: nime09 - pages: 323--323 - title: 'Study no. 1 for {PAM} and MADI' - url: http://www.nime.org/proceedings/2009/nime2009_323.pdf - year: 2009 + month: May + numpages: 4 + pages: 633--636 + title: 'Improvise+=Chain: Listening to the Ensemble Improvisation of an Autoregressive + Generative Model' + track: Demos + url: http://nime.org/proceedings/2023/nime2023_94.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Paine2009 - address: 'Pittsburgh, PA, United States' - author: 'Paine, Garth and Atherton, Michael' - bibtex: "@inproceedings{Paine2009,\n address = {Pittsburgh, PA, United States},\n\ - \ author = {Paine, Garth and Atherton, Michael},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1177651},\n issn = {2220-4806},\n keywords = {nime09},\n pages\ - \ = {324--324},\n title = {Fue Sho -- Electrofusion},\n url = {http://www.nime.org/proceedings/2009/nime2009_324.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_95 + abstract: 'SketchSynth is an interface that allows users to create mappings between + synthesised sound and a graphical sketch input based on human cross-modal perception. + The project is rooted in the authors'' research which collected 2692 sound-sketches + from 178 participants representing their associations with various sounds. The + interface extracts sketch features in real-time that were shown to correlate with + sound characteristics and can be mapped to synthesis and audio effect parameters + via Open Sound Control (OSC). This modular approach allows for an easy integration + into an existing workflow and can be tailored to individual preferences. The interface + can be accessed online through a web-browser on a computer, laptop, smartphone + or tablet and does not require specialised hard- or software. We demonstrate SketchSynth + with an iPad for sketch input to control synthesis and audio effect parameters + in the Ableton Live digital audio workstation (DAW). A MIDI controller is used + to play notes and trigger pre-recorded accompaniment. This work serves as an example + of how perceptual research can help create strong, meaningful gesture-to-sound + mappings.' + address: 'Mexico City, Mexico' + articleno: 95 + author: Sebastian Lobbers and George Fazekas + bibtex: "@inproceedings{nime2023_95,\n abstract = {SketchSynth is an interface that\ + \ allows users to create mappings between synthesised sound and a graphical sketch\ + \ input based on human cross-modal perception. The project is rooted in the authors'\ + \ research which collected 2692 sound-sketches from 178 participants representing\ + \ their associations with various sounds. The interface extracts sketch features\ + \ in real-time that were shown to correlate with sound characteristics and can\ + \ be mapped to synthesis and audio effect parameters via Open Sound Control (OSC).\ + \ This modular approach allows for an easy integration into an existing workflow\ + \ and can be tailored to individual preferences. The interface can be accessed\ + \ online through a web-browser on a computer, laptop, smartphone or tablet and\ + \ does not require specialised hard- or software. We demonstrate SketchSynth with\ + \ an iPad for sketch input to control synthesis and audio effect parameters in\ + \ the Ableton Live digital audio workstation (DAW). A MIDI controller is used\ + \ to play notes and trigger pre-recorded accompaniment. This work serves as an\ + \ example of how perceptual research can help create strong, meaningful gesture-to-sound\ + \ mappings.},\n address = {Mexico City, Mexico},\n articleno = {95},\n author\ + \ = {Sebastian Lobbers and George Fazekas},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n editor\ + \ = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n\ + \ numpages = {5},\n pages = {637--641},\n title = {SketchSynth: a browser-based\ + \ sketching interface for sound control},\n track = {Demos},\n url = {http://nime.org/proceedings/2023/nime2023_95.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177651 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: nime09 - pages: 324--324 - title: Fue Sho -- Electrofusion - url: http://www.nime.org/proceedings/2009/nime2009_324.pdf - year: 2009 + month: May + numpages: 5 + pages: 637--641 + title: 'SketchSynth: a browser-based sketching interface for sound control' + track: Demos + url: http://nime.org/proceedings/2023/nime2023_95.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Barri2009a - address: 'Pittsburgh, PA, United States' - author: 'Barri, Tarik' - bibtex: "@inproceedings{Barri2009a,\n address = {Pittsburgh, PA, United States},\n\ - \ author = {Barri, Tarik},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177475},\n\ - \ issn = {2220-4806},\n keywords = {nime09},\n pages = {325--325},\n title = {Versum\ - \ -- Fluor},\n url = {http://www.nime.org/proceedings/2009/nime2009_325.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_96 + abstract: 'This paper describes the Harvester, a DIY sampler and synthesizer. The + Harvester provides users with a low-cost, accessible platform for making music + with everyday sounds via open-source hardware and software tools that anyone can + use or modify. This paper goes over the motivation, methodology, features, and + use cases of the Harvester instrument, with the intention of the instrument being + demonstrated for people to play with and use at NIME 2023.' + address: 'Mexico City, Mexico' + articleno: 96 + author: Johann Diedrick + bibtex: "@inproceedings{nime2023_96,\n abstract = {This paper describes the Harvester,\ + \ a DIY sampler and synthesizer. The Harvester provides users with a low-cost,\ + \ accessible platform for making music with everyday sounds via open-source hardware\ + \ and software tools that anyone can use or modify. This paper goes over the motivation,\ + \ methodology, features, and use cases of the Harvester instrument, with the intention\ + \ of the instrument being demonstrated for people to play with and use at NIME\ + \ 2023.},\n address = {Mexico City, Mexico},\n articleno = {96},\n author = {Johann\ + \ Diedrick},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n\ + \ issn = {2220-4806},\n month = {May},\n numpages = {2},\n pages = {642--643},\n\ + \ title = {The Harvester: A DIY Sampler and Synthesizer - Demo},\n track = {Demos},\n\ + \ url = {http://nime.org/proceedings/2023/nime2023_96.pdf},\n year = {2023}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177475 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: nime09 - pages: 325--325 - title: Versum -- Fluor - url: http://www.nime.org/proceedings/2009/nime2009_325.pdf - year: 2009 + month: May + numpages: 2 + pages: 642--643 + title: 'The Harvester: A DIY Sampler and Synthesizer - Demo' + track: Demos + url: http://nime.org/proceedings/2023/nime2023_96.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Miyama2009 - address: 'Pittsburgh, PA, United States' - author: 'Miyama, Chikashi' - bibtex: "@inproceedings{Miyama2009,\n address = {Pittsburgh, PA, United States},\n\ - \ author = {Miyama, Chikashi},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177633},\n\ - \ issn = {2220-4806},\n keywords = {nime09},\n pages = {326--326},\n title = {Angry\ - \ Sparrow},\n url = {http://www.nime.org/proceedings/2009/nime2009_326.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_97 + abstract: 'The Kraakavera (a portmanteau of Kraakdoos—aka the Crackle box, and “calavera”—i.e., + skull in Spanish) is an instrument that honours Michel Waisvisz’s memory by tributing + one of his classic instruments—an exemplary of circuit bending that originated + from STEIM in the 1960s. Inspired by the original design which used six metal + contacts as inputs, I have used conductive paint to paint six pads on a ceramic + skull which interact with the Kraakdoos circuit (using a uA709 IC). The skull + depicts a sugar skull which is a traditional Mexican sweet that is often seen + in altars to honour diseased relatives and loved ones during the Day of the Dead, + but that is also consumed as a treat by children during these festivities. In + this case, I have constructed an altar for Waisvisz, which doubles as an instrument, + where the sugar skull—the centrepiece of the altar (below a picture of Waisvisz) + serves both as traditional decoration but also the main point of contact with + the instrument. Hence, the altar invites the musician to pay their respects by + playing the instrument through the sugar skull. The Kraakavera also features a + second mode which can be accessed by patching the skull’s inputs to another circuit + which features a Trill Craft capacitive sensing board and a Bela board, which + processes a secondary sound output consisting of a sample of a ceramic whistle + running through a granular synthesizer patched in Pure Data (corresponding to + the six pads on the skull). Lastly, the Kraakavera presents a syncretism of Mexican + folklore and circuit bending traditions and a juxtaposition of classic and upcoming + DMIs.' + address: 'Mexico City, Mexico' + articleno: 97 + author: Juan P Martinez Avila + bibtex: "@inproceedings{nime2023_97,\n abstract = {The Kraakavera (a portmanteau\ + \ of Kraakdoos—aka the Crackle box, and “calavera”—i.e., skull in Spanish) is\ + \ an instrument that honours Michel Waisvisz’s memory by tributing one of his\ + \ classic instruments—an exemplary of circuit bending that originated from STEIM\ + \ in the 1960s. Inspired by the original design which used six metal contacts\ + \ as inputs, I have used conductive paint to paint six pads on a ceramic skull\ + \ which interact with the Kraakdoos circuit (using a uA709 IC). The skull depicts\ + \ a sugar skull which is a traditional Mexican sweet that is often seen in altars\ + \ to honour diseased relatives and loved ones during the Day of the Dead, but\ + \ that is also consumed as a treat by children during these festivities. In this\ + \ case, I have constructed an altar for Waisvisz, which doubles as an instrument,\ + \ where the sugar skull—the centrepiece of the altar (below a picture of Waisvisz)\ + \ serves both as traditional decoration but also the main point of contact with\ + \ the instrument. Hence, the altar invites the musician to pay their respects\ + \ by playing the instrument through the sugar skull. The Kraakavera also features\ + \ a second mode which can be accessed by patching the skull’s inputs to another\ + \ circuit which features a Trill Craft capacitive sensing board and a Bela board,\ + \ which processes a secondary sound output consisting of a sample of a ceramic\ + \ whistle running through a granular synthesizer patched in Pure Data (corresponding\ + \ to the six pads on the skull). Lastly, the Kraakavera presents a syncretism\ + \ of Mexican folklore and circuit bending traditions and a juxtaposition of classic\ + \ and upcoming DMIs.},\n address = {Mexico City, Mexico},\n articleno = {97},\n\ + \ author = {Juan P Martinez Avila},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz\ + \ and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages\ + \ = {2},\n pages = {644--645},\n title = {Kraakavera: A Tribute to Michel Waisvisz},\n\ + \ track = {Demos},\n url = {http://nime.org/proceedings/2023/nime2023_97.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177633 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: nime09 - pages: 326--326 - title: Angry Sparrow - url: http://www.nime.org/proceedings/2009/nime2009_326.pdf - year: 2009 + month: May + numpages: 2 + pages: 644--645 + title: 'Kraakavera: A Tribute to Michel Waisvisz' + track: Demos + url: http://nime.org/proceedings/2023/nime2023_97.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Lyon2009 - address: 'Pittsburgh, PA, United States' - author: 'Lyon, Eric and Knapp, Benjamin and Ouzounian, Gascia' - bibtex: "@inproceedings{Lyon2009,\n address = {Pittsburgh, PA, United States},\n\ - \ author = {Lyon, Eric and Knapp, Benjamin and Ouzounian, Gascia},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1177621},\n issn = {2220-4806},\n keywords\ - \ = {nime09},\n pages = {327--327},\n title = {Biomuse Trio},\n url = {http://www.nime.org/proceedings/2009/nime2009_327.pdf},\n\ - \ year = {2009}\n}\n" + ID: nime2023_98 + abstract: 'MaxMSP is a visual programming language for creating interactive audiovisual + media that has found great success as a flexible and accessible option for computer + music. However, the visual interface requires manual object placement and connection, + which can be inefficient. Automated patch editing is possible either by visual + programming with the [thispatcher] object or text-based programming with the [js] + object. However, these objects cannot automatically create and save new patches, + and they operate at run-time only, requiring live input to trigger patch construction. There + is no solution for automated creation of multiple patches at \textitcompile-time, + such that the constructed patches do not contain their own constructors. To this + end, we present MaxPy, an open-source Python package for programmatic construction + and manipulation of MaxMSP patches. MaxPy replaces the manual actions of placing + objects, connecting patchcords, and saving patch files with text-based Python + functions, thus enabling dynamic, procedural, high-volume patch generation at + compile-time. MaxPy also includes the ability to import existing patches, allowing + users to move freely between text-based Python programming and visual programming + with the Max GUI. MaxPy enables composers, programmers, and creators to explore + expanded possibilities for complex, dynamic, and algorithmic patch construction + through text-based Python programming of MaxMSP.' + address: 'Mexico City, Mexico' + articleno: 98 + author: Ranger Y Liu and Satchel Peterson and Richard T Lee and Mark Santolucito + bibtex: "@inproceedings{nime2023_98,\n abstract = {MaxMSP is a visual programming\ + \ language for creating interactive audiovisual media that has found great success\ + \ as a flexible and accessible option for computer music. However, the visual\ + \ interface requires manual object placement and connection, which can be inefficient.\ + \ Automated patch editing is possible either by visual programming with the [thispatcher]\ + \ object or text-based programming with the [js] object. However, these objects\ + \ cannot automatically create and save new patches, and they operate at run-time\ + \ only, requiring live input to trigger patch construction. There is no solution\ + \ for automated creation of multiple patches at \\textit{compile-time}, such that\ + \ the constructed patches do not contain their own constructors. To this end,\ + \ we present MaxPy, an open-source Python package for programmatic construction\ + \ and manipulation of MaxMSP patches. MaxPy replaces the manual actions of placing\ + \ objects, connecting patchcords, and saving patch files with text-based Python\ + \ functions, thus enabling dynamic, procedural, high-volume patch generation at\ + \ compile-time. MaxPy also includes the ability to import existing patches, allowing\ + \ users to move freely between text-based Python programming and visual programming\ + \ with the Max GUI. MaxPy enables composers, programmers, and creators to explore\ + \ expanded possibilities for complex, dynamic, and algorithmic patch construction\ + \ through text-based Python programming of MaxMSP.},\n address = {Mexico City,\ + \ Mexico},\n articleno = {98},\n author = {Ranger Y Liu and Satchel Peterson and\ + \ Richard T Lee and Mark Santolucito},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n editor = {Miguel Ortiz\ + \ and Adnan Marquez-Borbon},\n issn = {2220-4806},\n month = {May},\n numpages\ + \ = {4},\n pages = {646--649},\n title = {MaxPy: An open-source Python package\ + \ for text-based generation of MaxMSP patches},\n track = {Demos},\n url = {http://nime.org/proceedings/2023/nime2023_98.pdf},\n\ + \ year = {2023}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177621 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: nime09 - pages: 327--327 - title: Biomuse Trio - url: http://www.nime.org/proceedings/2009/nime2009_327.pdf - year: 2009 + month: May + numpages: 4 + pages: 646--649 + title: 'MaxPy: An open-source Python package for text-based generation of MaxMSP + patches' + track: Demos + url: http://nime.org/proceedings/2023/nime2023_98.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Goto2009 - address: 'Pittsburgh, PA, United States' - author: 'Goto, Suguru' - bibtex: "@inproceedings{Goto2009,\n address = {Pittsburgh, PA, United States},\n\ - \ author = {Goto, Suguru},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177557},\n\ - \ issn = {2220-4806},\n keywords = {nime09},\n pages = {328--328},\n title = {BodyJack},\n\ - \ url = {http://www.nime.org/proceedings/2009/nime2009_328.pdf},\n year = {2009}\n\ + ID: nime2023_99 + abstract: 'In this demo we present an interactive object called Barahúnda Boba, + that was developed through the exploration of Quito’s identity (a city placed + in the mountains in Latin América). The product is an audio container system that + reproduces environmental sounds of the city to preserve the memory of Quito. It + was built after studying the Baroque’s concepts, as a period, extrapolated to + the Baroque’s culture. The program that plays and stores the audio is written + originally in JavaScript under the p5.js’s library. The object is a decorative + product, handcrafted in pine wood. The components are assembled in an Arduino + controller and they are embedded in the product. Although the object has a user + interface, the product (just like the noise of the city) can not be fully controlled.' + address: 'Mexico City, Mexico' + articleno: 99 + author: Xavier Barriga-Abril and Andres Basantes + bibtex: "@inproceedings{nime2023_99,\n abstract = {In this demo we present an interactive\ + \ object called Barahúnda Boba, that was developed through the exploration of\ + \ Quito’s identity (a city placed in the mountains in Latin América). The product\ + \ is an audio container system that reproduces environmental sounds of the city\ + \ to preserve the memory of Quito. It was built after studying the Baroque’s concepts,\ + \ as a period, extrapolated to the Baroque’s culture. The program that plays and\ + \ stores the audio is written originally in JavaScript under the p5.js’s library.\ + \ The object is a decorative product, handcrafted in pine wood. The components\ + \ are assembled in an Arduino controller and they are embedded in the product.\ + \ Although the object has a user interface, the product (just like the noise of\ + \ the city) can not be fully controlled.},\n address = {Mexico City, Mexico},\n\ + \ articleno = {99},\n author = {Xavier Barriga-Abril and Andres Basantes},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n editor = {Miguel Ortiz and Adnan Marquez-Borbon},\n issn = {2220-4806},\n\ + \ month = {May},\n numpages = {3},\n pages = {650--652},\n title = {Missing the\ + \ hubbub: Memory and Identity in the Interactive Audio},\n track = {Demos},\n\ + \ url = {http://nime.org/proceedings/2023/nime2023_99.pdf},\n year = {2023}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177557 + editor: Miguel Ortiz and Adnan Marquez-Borbon issn: 2220-4806 - keywords: nime09 - pages: 328--328 - title: BodyJack - url: http://www.nime.org/proceedings/2009/nime2009_328.pdf - year: 2009 + month: May + numpages: 3 + pages: 650--652 + title: 'Missing the hubbub: Memory and Identity in the Interactive Audio' + track: Demos + url: http://nime.org/proceedings/2023/nime2023_99.pdf + year: 2023 - ENTRYTYPE: inproceedings - ID: Baalman2009 - address: 'Pittsburgh, PA, United States' - author: 'Baalman, Marije A.' - bibtex: "@inproceedings{Baalman2009,\n address = {Pittsburgh, PA, United States},\n\ - \ author = {Baalman, Marije A.},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177469},\n\ - \ issn = {2220-4806},\n keywords = {nime09},\n pages = {329--329},\n title = {Code\ - \ LiveCode Live, or livecode Embodied},\n url = {http://www.nime.org/proceedings/2009/nime2009_329.pdf},\n\ - \ year = {2009}\n}\n" + ID: Fels2004 + abstract: 'The Tooka was created as an exploration of two personinstruments. We + have worked with two Tooka performers toenhance the original experimental device + to make a musicalinstrument played and enjoyed by them. The main additions tothe + device include: an additional button that behaves as amusic capture button, a + bend sensor, an additional thumbactuated pressure sensor for vibrato, additional + musicalmapping strategies, and new interfacing hardware. Thesedevelopments a rose + through exper iences andrecommendations from the musicians playing it. In addition + tothe changes to the Tooka, this paper describes the learningprocess and experiences + of the musicians performing with theTooka.' + address: 'Hamamatsu, Japan' + author: 'Fels, Sidney S. and Kaastra, Linda and Takahashi, Sachiyo and Mccaig, Graeme' + bibtex: "@inproceedings{Fels2004,\n abstract = {The Tooka was created as an exploration\ + \ of two personinstruments. We have worked with two Tooka performers toenhance\ + \ the original experimental device to make a musicalinstrument played and enjoyed\ + \ by them. The main additions tothe device include: an additional button that\ + \ behaves as amusic capture button, a bend sensor, an additional thumbactuated\ + \ pressure sensor for vibrato, additional musicalmapping strategies, and new interfacing\ + \ hardware. Thesedevelopments a rose through exper iences andrecommendations from\ + \ the musicians playing it. In addition tothe changes to the Tooka, this paper\ + \ describes the learningprocess and experiences of the musicians performing with\ + \ theTooka.},\n address = {Hamamatsu, Japan},\n author = {Fels, Sidney S. and\ + \ Kaastra, Linda and Takahashi, Sachiyo and Mccaig, Graeme},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176595},\n issn = {2220-4806},\n keywords = {Musician-centred\ + \ design, two-person musical instrument.},\n pages = {1--6},\n title = {Evolving\ + \ Tooka: from Experiment to Instrument},\n url = {http://www.nime.org/proceedings/2004/nime2004_001.pdf},\n\ + \ year = {2004}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177469 + doi: 10.5281/zenodo.1176595 issn: 2220-4806 - keywords: nime09 - pages: 329--329 - title: 'Code LiveCode Live, or livecode Embodied' - url: http://www.nime.org/proceedings/2009/nime2009_329.pdf - year: 2009 + keywords: 'Musician-centred design, two-person musical instrument.' + pages: 1--6 + title: 'Evolving Tooka: from Experiment to Instrument' + url: http://www.nime.org/proceedings/2004/nime2004_001.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Torre2009 - address: 'Pittsburgh, PA, United States' - author: 'Torre, Giuseppe and Sazdov, Robert and Konczewska, Dorota' - bibtex: "@inproceedings{Torre2009,\n address = {Pittsburgh, PA, United States},\n\ - \ author = {Torre, Giuseppe and Sazdov, Robert and Konczewska, Dorota},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1177695},\n issn = {2220-4806},\n keywords\ - \ = {nime09},\n pages = {330--330},\n title = {MOLITVA --- Composition for Voice,\ - \ Live Electronics, Pointing-At Glove Device and {3-D} Setup of Speakers},\n url\ - \ = {http://www.nime.org/proceedings/2009/nime2009_330.pdf},\n year = {2009}\n\ - }\n" + ID: Kapur2004 + abstract: 'This paper describes the design of an Electronic Sitar controller, adigitally + modified version of Saraswati''s (the Hindu Goddess ofMusic) 19-stringed, pumpkin + shelled, traditional North Indianinstrument. The ESitar uses sensor technology + to extract gesturalinformation from a performer, deducing music information suchas + pitch, pluck timing, thumb pressure, and 3-axes of head tilt totrigger real-time + sounds and graphics. It allows for a variety oftraditional sitar technique as + well as new performance methods.Graphical feedback allows for artistic display + and pedagogicalfeedback. The ESitar uses a programmable Atmel microprocessorwhich + outputs control messages via a standard MIDI jack.' + address: 'Hamamatsu, Japan' + author: 'Kapur, Ajay and Lazier, Ariel J. and Davidson, Philip L. and Wilson, Scott + and Cook, Perry R.' + bibtex: "@inproceedings{Kapur2004,\n abstract = {This paper describes the design\ + \ of an Electronic Sitar controller, adigitally modified version of Saraswati's\ + \ (the Hindu Goddess ofMusic) 19-stringed, pumpkin shelled, traditional North\ + \ Indianinstrument. The ESitar uses sensor technology to extract gesturalinformation\ + \ from a performer, deducing music information suchas pitch, pluck timing, thumb\ + \ pressure, and 3-axes of head tilt totrigger real-time sounds and graphics. It\ + \ allows for a variety oftraditional sitar technique as well as new performance\ + \ methods.Graphical feedback allows for artistic display and pedagogicalfeedback.\ + \ The ESitar uses a programmable Atmel microprocessorwhich outputs control messages\ + \ via a standard MIDI jack.},\n address = {Hamamatsu, Japan},\n author = {Kapur,\ + \ Ajay and Lazier, Ariel J. and Davidson, Philip L. and Wilson, Scott and Cook,\ + \ Perry R.},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176623},\n issn\ + \ = {2220-4806},\n keywords = {atmel microcontroller,controller,electronic sitar,esitar,human\ + \ computer interface,indian string controller,instrument graphical feedback,midi,veldt},\n\ + \ pages = {7--12},\n title = {The Electronic Sitar Controller},\n url = {http://www.nime.org/proceedings/2004/nime2004_007.pdf},\n\ + \ year = {2004}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177695 + doi: 10.5281/zenodo.1176623 issn: 2220-4806 - keywords: nime09 - pages: 330--330 - title: 'MOLITVA --- Composition for Voice, Live Electronics, Pointing-At Glove Device - and {3-D} Setup of Speakers' - url: http://www.nime.org/proceedings/2009/nime2009_330.pdf - year: 2009 + keywords: 'atmel microcontroller,controller,electronic sitar,esitar,human computer + interface,indian string controller,instrument graphical feedback,midi,veldt' + pages: 7--12 + title: The Electronic Sitar Controller + url: http://www.nime.org/proceedings/2004/nime2004_007.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Neill2009 - address: 'Pittsburgh, PA, United States' - author: 'Neill, Ben and Singer, Eric' - bibtex: "@inproceedings{Neill2009,\n address = {Pittsburgh, PA, United States},\n\ - \ author = {Neill, Ben and Singer, Eric},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177639},\n\ - \ issn = {2220-4806},\n keywords = {nime09},\n pages = {331--331},\n title = {Ben\ - \ Neill and LEMUR},\n url = {http://www.nime.org/proceedings/2009/nime2009_331.pdf},\n\ - \ year = {2009}\n}\n" + ID: Takahata2004 + abstract: 'We have developed new sound feedback for powerful karate training with + pleasure, which enables to extract player''s movement, understand player''s activities, + and generate them to sounds. We have designed a karate training environment which + consists of a multimodal room with cameras, microphones, video displays and loud + speakers, and wearable devices with a sensor and a sound generator. Experiments + have been conducted on ten Karate beginnners for ten months to examine the effectiveness + to learn appropriate body action and sharpness in basic punch called TSUKI. The + experimental results suggest the proposed sound feedback and the training environments + enable beginners to achieve enjoyable Karate.' + address: 'Hamamatsu, Japan' + author: 'Takahata, Masami and Shiraki, Kensuke and Sakane, Yutaka and Takebayashi, + Yoichi' + bibtex: "@inproceedings{Takahata2004,\n abstract = {We have developed new sound\ + \ feedback for powerful karate training with pleasure, which enables to extract\ + \ player's movement, understand player's activities, and generate them to sounds.\ + \ We have designed a karate training environment which consists of a multimodal\ + \ room with cameras, microphones, video displays and loud speakers, and wearable\ + \ devices with a sensor and a sound generator. Experiments have been conducted\ + \ on ten Karate beginnners for ten months to examine the effectiveness to learn\ + \ appropriate body action and sharpness in basic punch called TSUKI. The experimental\ + \ results suggest the proposed sound feedback and the training environments enable\ + \ beginners to achieve enjoyable Karate.},\n address = {Hamamatsu, Japan},\n author\ + \ = {Takahata, Masami and Shiraki, Kensuke and Sakane, Yutaka and Takebayashi,\ + \ Yoichi},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1176673},\n issn = {2220-4806},\n\ + \ keywords = {Sound feedback, Karate, Learning environment, Wearable device},\n\ + \ pages = {13--18},\n title = {Sound Feedback for Powerful Karate Training},\n\ + \ url = {http://www.nime.org/proceedings/2004/nime2004_013.pdf},\n year = {2004}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177639 + doi: 10.5281/zenodo.1176673 issn: 2220-4806 - keywords: nime09 - pages: 331--331 - title: Ben Neill and LEMUR - url: http://www.nime.org/proceedings/2009/nime2009_331.pdf - year: 2009 + keywords: 'Sound feedback, Karate, Learning environment, Wearable device' + pages: 13--18 + title: Sound Feedback for Powerful Karate Training + url: http://www.nime.org/proceedings/2004/nime2004_013.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Hindman2009 - address: 'Pittsburgh, PA, United States' - author: 'Hindman, David and Drummond, Evan' - bibtex: "@inproceedings{Hindman2009,\n address = {Pittsburgh, PA, United States},\n\ - \ author = {Hindman, David and Drummond, Evan},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1177573},\n issn = {2220-4806},\n keywords = {nime09},\n pages\ - \ = {332--332},\n title = {Performance: Modal Kombat Plays {PON}G},\n url = {http://www.nime.org/proceedings/2009/nime2009_332.pdf},\n\ - \ year = {2009}\n}\n" + ID: Kaltenbrunner2004 + abstract: 'This article reflects the current state of the reacTable* project,an + electronic music instrument with a tangible table-basedinterface, which is currently + under development at theAudiovisual Institute at the Universitat Pompeu Fabra. + In thispaper we are focussing on the issue of Dynamic Patching,which is a particular + and unique aspect of the sound synthesisand control paradigms of the reacTable*. + Unlike commonvisual programming languages for sound synthesis, whichconceptually + separate the patch building process from theactual musical performance, the reacTable* + combines theconstruction and playing of the instrument in a unique way.The tangible + interface allows direct manipulation control overany of the used building blocks, + which physically representthe whole synthesizer function.' + address: 'Hamamatsu, Japan' + author: 'Kaltenbrunner, Martin and Geiger, Günter and Jordà, Sergi' + bibtex: "@inproceedings{Kaltenbrunner2004,\n abstract = {This article reflects the\ + \ current state of the reacTable* project,an electronic music instrument with\ + \ a tangible table-basedinterface, which is currently under development at theAudiovisual\ + \ Institute at the Universitat Pompeu Fabra. In thispaper we are focussing on\ + \ the issue of Dynamic Patching,which is a particular and unique aspect of the\ + \ sound synthesisand control paradigms of the reacTable*. Unlike commonvisual\ + \ programming languages for sound synthesis, whichconceptually separate the patch\ + \ building process from theactual musical performance, the reacTable* combines\ + \ theconstruction and playing of the instrument in a unique way.The tangible interface\ + \ allows direct manipulation control overany of the used building blocks, which\ + \ physically representthe whole synthesizer function.},\n address = {Hamamatsu,\ + \ Japan},\n author = {Kaltenbrunner, Martin and Geiger, G\\''{u}nter and Jord\\\ + `{a}, Sergi},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176621},\n issn\ + \ = {2220-4806},\n keywords = {dynamic patching,musical instrument,sound synthesis,tangible\ + \ interfaces,visual programming},\n pages = {19--22},\n title = {Dynamic Patches\ + \ for Live Musical Performance},\n url = {http://www.nime.org/proceedings/2004/nime2004_019.pdf},\n\ + \ year = {2004}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177573 + doi: 10.5281/zenodo.1176621 issn: 2220-4806 - keywords: nime09 - pages: 332--332 - title: 'Performance: Modal Kombat Plays {PON}G' - url: http://www.nime.org/proceedings/2009/nime2009_332.pdf - year: 2009 + keywords: 'dynamic patching,musical instrument,sound synthesis,tangible interfaces,visual + programming' + pages: 19--22 + title: Dynamic Patches for Live Musical Performance + url: http://www.nime.org/proceedings/2004/nime2004_019.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Leider2009 - address: 'Pittsburgh, PA, United States' - author: 'Leider, Colby' - bibtex: "@inproceedings{Leider2009,\n address = {Pittsburgh, PA, United States},\n\ - \ author = {Leider, Colby},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177615},\n\ - \ issn = {2220-4806},\n keywords = {nime09},\n pages = {333--333},\n title = {Afflux/Reflux},\n\ - \ url = {http://www.nime.org/proceedings/2009/nime2009_333.pdf},\n year = {2009}\n\ + ID: Young2004 + abstract: 'We present a prototype of a new musical interface for Japanese drumming + techniques and styles. Our design used in the Aobachi drumming sticks provides + 5 gesture parameters (3 axes of acceleration, and 2 axes of angular velocity) + for each of the two sticks and transmits this data wirelessly using Bluetooth® + technology. This system utilizes minimal hardware embedded in the two drumming + sticks, allowing for gesture tracking of drum strokes by an interface of traditional + form, appearance, and feel. Aobachi is portable, versatile, and robust, and may + be used for a variety of musical applications, as well as analytical studies.' + address: 'Hamamatsu, Japan' + author: 'Young, Diana and Fujinaga, Ichiro' + bibtex: "@inproceedings{Young2004,\n abstract = {We present a prototype of a new\ + \ musical interface for Japanese drumming techniques and styles. Our design used\ + \ in the Aobachi drumming sticks provides 5 gesture parameters (3 axes of acceleration,\ + \ and 2 axes of angular velocity) for each of the two sticks and transmits this\ + \ data wirelessly using Bluetooth® technology. This system utilizes minimal hardware\ + \ embedded in the two drumming sticks, allowing for gesture tracking of drum strokes\ + \ by an interface of traditional form, appearance, and feel. Aobachi is portable,\ + \ versatile, and robust, and may be used for a variety of musical applications,\ + \ as well as analytical studies.},\n address = {Hamamatsu, Japan},\n author =\ + \ {Young, Diana and Fujinaga, Ichiro},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176687},\n\ + \ issn = {2220-4806},\n keywords = {bluetooth,drum stick,japanese drum,taiko,wireless},\n\ + \ pages = {23--26},\n title = {AoBachi: A New Interface for {Japan}ese Drumming},\n\ + \ url = {http://www.nime.org/proceedings/2004/nime2004_023.pdf},\n year = {2004}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177615 + doi: 10.5281/zenodo.1176687 issn: 2220-4806 - keywords: nime09 - pages: 333--333 - title: Afflux/Reflux - url: http://www.nime.org/proceedings/2009/nime2009_333.pdf - year: 2009 + keywords: 'bluetooth,drum stick,japanese drum,taiko,wireless' + pages: 23--26 + title: 'AoBachi: A New Interface for Japanese Drumming' + url: http://www.nime.org/proceedings/2004/nime2004_023.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Wang2009a - address: 'Pittsburgh, PA, United States' - author: 'Wang, Ge and Fiebrink, Rebecca' - bibtex: "@inproceedings{Wang2009a,\n address = {Pittsburgh, PA, United States},\n\ - \ author = {Wang, Ge and Fiebrink, Rebecca},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1177699},\n issn = {2220-4806},\n keywords = {nime09},\n pages\ - \ = {334--334},\n title = {PLOrk Beat Science 2.0},\n url = {http://www.nime.org/proceedings/2009/nime2009_334.pdf},\n\ - \ year = {2009}\n}\n" + ID: BryanKinns2004 + abstract: We have seen many new and exciting developments in new interfaces for + musical expression. In this paper we present the design of an interface for remote + group music improvisation and composition - Daisyphone. The approach relies on + players creating and editing short shared loops of music which are semi-synchronously + updated. The interface emphasizes the looping nature of the music and is designed + to be engaging and deployable on a wide range of interaction devices. Observations + of the use of the tool with different levels of persistence of contribution are + reported and discussed. Future developments centre around ways to string loops + together into larger pieces (composition) and investigating suitable rates of + decay to encourage more group improvisation. + address: 'Hamamatsu, Japan' + author: 'Bryan-Kinns, Nick and Healey, Patrick G.' + bibtex: "@inproceedings{BryanKinns2004,\n abstract = {We have seen many new and\ + \ exciting developments in new interfaces for musical expression. In this paper\ + \ we present the design of an interface for remote group music improvisation and\ + \ composition - Daisyphone. The approach relies on players creating and editing\ + \ short shared loops of music which are semi-synchronously updated. The interface\ + \ emphasizes the looping nature of the music and is designed to be engaging and\ + \ deployable on a wide range of interaction devices. Observations of the use of\ + \ the tool with different levels of persistence of contribution are reported and\ + \ discussed. Future developments centre around ways to string loops together into\ + \ larger pieces (composition) and investigating suitable rates of decay to encourage\ + \ more group improvisation.},\n address = {Hamamatsu, Japan},\n author = {Bryan-Kinns,\ + \ Nick and Healey, Patrick G.},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176583},\n\ + \ issn = {2220-4806},\n keywords = {collaboration,composition,improvisation,music},\n\ + \ pages = {27--30},\n title = {Daisyphone: Support for Remote Music Collaboration},\n\ + \ url = {http://www.nime.org/proceedings/2004/nime2004_027.pdf},\n year = {2004}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177699 + doi: 10.5281/zenodo.1176583 issn: 2220-4806 - keywords: nime09 - pages: 334--334 - title: PLOrk Beat Science 2.0 - url: http://www.nime.org/proceedings/2009/nime2009_334.pdf - year: 2009 + keywords: collaboration,composition,improvisation,music + pages: 27--30 + title: 'Daisyphone: Support for Remote Music Collaboration' + url: http://www.nime.org/proceedings/2004/nime2004_027.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Wessel2009 - address: 'Pittsburgh, PA, United States' - author: 'Wessel, David' - bibtex: "@inproceedings{Wessel2009,\n address = {Pittsburgh, PA, United States},\n\ - \ author = {Wessel, David},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177707},\n\ - \ issn = {2220-4806},\n keywords = {nime09},\n pages = {335--335},\n title = {Hands\ - \ On --- A New Work from SLABS Controller and Generative Algorithms},\n url =\ - \ {http://www.nime.org/proceedings/2009/nime2009_335.pdf},\n year = {2009}\n}\n" + ID: Havel2004 + abstract: 'This paper presents a project involving a percussionist playing on a + virtual percussion. Both artistic and technical aspects of the project are developed. + Especially, a method forstrike recognition using the Flock of Birds is presented, + aswell as its use for artistic purpose.' + address: 'Hamamatsu, Japan' + author: 'Havel, Christophe and Desainte-Catherine, Myriam' + bibtex: "@inproceedings{Havel2004,\n abstract = {This paper presents a project involving\ + \ a percussionist playing on a virtual percussion. Both artistic and technical\ + \ aspects of the project are developed. Especially, a method forstrike recognition\ + \ using the Flock of Birds is presented, aswell as its use for artistic purpose.},\n\ + \ address = {Hamamatsu, Japan},\n author = {Havel, Christophe and Desainte-Catherine,\ + \ Myriam},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1176609},\n issn = {2220-4806},\n\ + \ keywords = {Gesture analysis, virtual percussion, strike recognition.},\n pages\ + \ = {31--34},\n title = {Modeling an Air Percussion for Composition and Performance},\n\ + \ url = {http://www.nime.org/proceedings/2004/nime2004_031.pdf},\n year = {2004}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177707 + doi: 10.5281/zenodo.1176609 issn: 2220-4806 - keywords: nime09 - pages: 335--335 - title: Hands On --- A New Work from SLABS Controller and Generative Algorithms - url: http://www.nime.org/proceedings/2009/nime2009_335.pdf - year: 2009 + keywords: 'Gesture analysis, virtual percussion, strike recognition.' + pages: 31--34 + title: Modeling an Air Percussion for Composition and Performance + url: http://www.nime.org/proceedings/2004/nime2004_031.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Dubois2009 - address: 'Pittsburgh, PA, United States' - author: 'Dubois, R. Luke and Flanigan, Lesley' - bibtex: "@inproceedings{Dubois2009,\n address = {Pittsburgh, PA, United States},\n\ - \ author = {Dubois, R. Luke and Flanigan, Lesley},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177501},\n issn = {2220-4806},\n keywords = {nime09},\n\ - \ pages = {336--336},\n title = {Bioluminescence},\n url = {http://www.nime.org/proceedings/2009/nime2009_336.pdf},\n\ - \ year = {2009}\n}\n" + ID: Nelson2004 + abstract: 'Although MIDI is often used for computer-based interactive music applications, + its real-time performance is rarely quantified, despite concerns about whether + it is capable of adequate performance in realistic settings. We extend existing + proposals for MIDI performance benchmarking so they are useful in realistic interactive + scenarios, including those with heavy MIDI traffic and CPU load. We have produced + a cross-platform freely-available testing suite that is easy to use, and have + used it to survey the interactive performance of several commonly-used computer/MIDI + setups. We describe the suite, summarize the results of our performance survey, + and detail the benefits of this testing methodology.' + address: 'Hamamatsu, Japan' + author: 'Nelson, Mark and Thom, Belinda' + bibtex: "@inproceedings{Nelson2004,\n abstract = {Although MIDI is often used for\ + \ computer-based interactive music applications, its real-time performance is\ + \ rarely quantified, despite concerns about whether it is capable of adequate\ + \ performance in realistic settings. We extend existing proposals for MIDI performance\ + \ benchmarking so they are useful in realistic interactive scenarios, including\ + \ those with heavy MIDI traffic and CPU load. We have produced a cross-platform\ + \ freely-available testing suite that is easy to use, and have used it to survey\ + \ the interactive performance of several commonly-used computer/MIDI setups. We\ + \ describe the suite, summarize the results of our performance survey, and detail\ + \ the benefits of this testing methodology.},\n address = {Hamamatsu, Japan},\n\ + \ author = {Nelson, Mark and Thom, Belinda},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1176643},\n issn = {2220-4806},\n pages = {35--38},\n title\ + \ = {A Survey of Real-Time {MIDI} Performance},\n url = {http://www.nime.org/proceedings/2004/nime2004_035.pdf},\n\ + \ year = {2004}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177501 + doi: 10.5281/zenodo.1176643 issn: 2220-4806 - keywords: nime09 - pages: 336--336 - title: Bioluminescence - url: http://www.nime.org/proceedings/2009/nime2009_336.pdf - year: 2009 + pages: 35--38 + title: A Survey of Real-Time MIDI Performance + url: http://www.nime.org/proceedings/2004/nime2004_035.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Bukvic2009 - address: 'Pittsburgh, PA, United States' - author: 'Bukvic, Ivika and Standley, Eric' - bibtex: "@inproceedings{Bukvic2009,\n address = {Pittsburgh, PA, United States},\n\ - \ author = {Bukvic, Ivika and Standley, Eric},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1177487},\n issn = {2220-4806},\n keywords = {nime09},\n pages\ - \ = {337--337},\n title = {Elemental \\& Cyrene Reefs},\n url = {http://www.nime.org/proceedings/2009/nime2009_337.pdf},\n\ - \ year = {2009}\n}\n" + ID: Cont2004 + abstract: 'In this paper, we describe an adaptive approach to gesture mapping for + musical applications which serves as a mapping system for music instrument design. + A neural network approach is chosen for this goal and all the required interfaces + and abstractions are developed and demonstrated in the Pure Data environment. + In this paper, we will focus on neural network representation and implementation + in a real-time musical environment. This adaptive mapping is evaluated in different + static and dynamic situations by a network of sensors sampled at a rate of 200Hz + in real-time. Finally, some remarks are given on the network design and future + works. ' + address: 'Hamamatsu, Japan' + author: 'Cont, Arshia and Coduys, Thierry and Henry, Cyrille' + bibtex: "@inproceedings{Cont2004,\n abstract = {In this paper, we describe an adaptive\ + \ approach to gesture mapping for musical applications which serves as a mapping\ + \ system for music instrument design. A neural network approach is chosen for\ + \ this goal and all the required interfaces and abstractions are developed and\ + \ demonstrated in the Pure Data environment. In this paper, we will focus on neural\ + \ network representation and implementation in a real-time musical environment.\ + \ This adaptive mapping is evaluated in different static and dynamic situations\ + \ by a network of sensors sampled at a rate of 200Hz in real-time. Finally, some\ + \ remarks are given on the network design and future works. },\n address = {Hamamatsu,\ + \ Japan},\n author = {Cont, Arshia and Coduys, Thierry and Henry, Cyrille},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1176589},\n issn = {2220-4806},\n\ + \ keywords = {Real-time gesture control, adaptive interfaces, Sensor and actuator\ + \ technologies for musical applications, Musical mapping algorithms and intelligent\ + \ controllers, Pure Data.},\n pages = {39--42},\n title = {Real-time Gesture Mapping\ + \ in Pd Environment using Neural Networks},\n url = {http://www.nime.org/proceedings/2004/nime2004_039.pdf},\n\ + \ year = {2004}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177487 + doi: 10.5281/zenodo.1176589 issn: 2220-4806 - keywords: nime09 - pages: 337--337 - title: Elemental \& Cyrene Reefs - url: http://www.nime.org/proceedings/2009/nime2009_337.pdf - year: 2009 + keywords: 'Real-time gesture control, adaptive interfaces, Sensor and actuator technologies + for musical applications, Musical mapping algorithms and intelligent controllers, + Pure Data.' + pages: 39--42 + title: Real-time Gesture Mapping in Pd Environment using Neural Networks + url: http://www.nime.org/proceedings/2004/nime2004_039.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: GreshamLancaster2009 - address: 'Pittsburgh, PA, United States' - author: 'Gresham-Lancaster, Scot and Bull, Steve' - bibtex: "@inproceedings{GreshamLancaster2009,\n address = {Pittsburgh, PA, United\ - \ States},\n author = {Gresham-Lancaster, Scot and Bull, Steve},\n booktitle =\ + ID: Talmudi2004 + abstract: 'This paper presents computer experiments concerning the decentralized + pianola, a hypothetical mechanical music instrument, whose large-scale musical + behavior is the result of local physical interactions between simple elements.Traditional + mechanical music instruments like the pianola and the music box rely for their + operation on the separation between a sequential memory unit and an execution + unit. In a decentralized mechanical instrument, musical memory is an emergent + global property of the system, undistinguishable from the execution process. Such + a machine is botha score andan instrument. The paper starts by discussing the + difference between sequential memory systems and systems exhibiting emergent decentralized + musical behavior. Next, the use of particle system simulation for exploring virtual + decentralized instruments is demonstrated, and the architecture for a simple decentralized + instrument is outlined. The paper continues by describing the use of a genetic + algorithm for evolving decentralized instruments that reproduce a given musical + behavior.' + address: 'Hamamatsu, Japan' + author: 'Talmudi, Assaf K.' + bibtex: "@inproceedings{Talmudi2004,\n abstract = {This paper presents computer\ + \ experiments concerning the decentralized pianola, a hypothetical mechanical\ + \ music instrument, whose large-scale musical behavior is the result of local\ + \ physical interactions between simple elements.Traditional mechanical music instruments\ + \ like the pianola and the music box rely for their operation on the separation\ + \ between a sequential memory unit and an execution unit. In a decentralized mechanical\ + \ instrument, musical memory is an emergent global property of the system, undistinguishable\ + \ from the execution process. Such a machine is botha score andan instrument.\ + \ The paper starts by discussing the difference between sequential memory systems\ + \ and systems exhibiting emergent decentralized musical behavior. Next, the use\ + \ of particle system simulation for exploring virtual decentralized instruments\ + \ is demonstrated, and the architecture for a simple decentralized instrument\ + \ is outlined. The paper continues by describing the use of a genetic algorithm\ + \ for evolving decentralized instruments that reproduce a given musical behavior.},\n\ + \ address = {Hamamatsu, Japan},\n author = {Talmudi, Assaf K.},\n booktitle =\ \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177561},\n issn = {2220-4806},\n keywords = {nime09},\n\ - \ pages = {338--338},\n title = {Cellphonia: 4'33},\n url = {http://www.nime.org/proceedings/2009/nime2009_338.pdf},\n\ - \ year = {2009}\n}\n" + \ doi = {10.5281/zenodo.1176675},\n issn = {2220-4806},\n pages = {43--46},\n\ + \ title = {The Decentralized Pianola: Evolving Mechanical Music Instruments using\ + \ a Genetic Algorithm},\n url = {http://www.nime.org/proceedings/2004/nime2004_043.pdf},\n\ + \ year = {2004}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1177561 - issn: 2220-4806 - keywords: nime09 - pages: 338--338 - title: 'Cellphonia: 4''33' - url: http://www.nime.org/proceedings/2009/nime2009_338.pdf - year: 2009 + Expression + doi: 10.5281/zenodo.1176675 + issn: 2220-4806 + pages: 43--46 + title: 'The Decentralized Pianola: Evolving Mechanical Music Instruments using a + Genetic Algorithm' + url: http://www.nime.org/proceedings/2004/nime2004_043.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Overholt2009 - address: 'Pittsburgh, PA, United States' - author: 'Overholt, Dan and Lahey, Byron and Skriver Hansen, Anne-Marie and Burleson, - Winslow and Norrgaard Jensen, Camilla' - bibtex: "@inproceedings{Overholt2009,\n address = {Pittsburgh, PA, United States},\n\ - \ author = {Overholt, Dan and Lahey, Byron and Skriver Hansen, Anne-Marie and\ - \ Burleson, Winslow and Norrgaard Jensen, Camilla},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177649},\n issn = {2220-4806},\n keywords = {nime09},\n\ - \ pages = {339--339},\n title = {Pendaphonics},\n url = {http://www.nime.org/proceedings/2009/nime2009_339.pdf},\n\ - \ year = {2009}\n}\n" + ID: Mandelis2004 + abstract: 'This paper describes the use of evolutionary and artificial life techniques + in sound design and the development of performance mapping to facilitate the real-time + manipulation of such sounds through some input device controlled by the performer. + A concrete example of such a system is described which allows musicians without + detailed knowledge and experience of sound synthesis techniques to interactively + develop new sounds and performance manipulation mappings according to their own + aesthetic judgements. Experiences with the system are discussed. ' + address: 'Hamamatsu, Japan' + author: 'Mandelis, James and Husbands, Phil' + bibtex: "@inproceedings{Mandelis2004,\n abstract = {This paper describes the use\ + \ of evolutionary and artificial life techniques in sound design and the development\ + \ of performance mapping to facilitate the real-time manipulation of such sounds\ + \ through some input device controlled by the performer. A concrete example of\ + \ such a system is described which allows musicians without detailed knowledge\ + \ and experience of sound synthesis techniques to interactively develop new sounds\ + \ and performance manipulation mappings according to their own aesthetic judgements.\ + \ Experiences with the system are discussed. },\n address = {Hamamatsu, Japan},\n\ + \ author = {Mandelis, James and Husbands, Phil},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1176635},\n issn = {2220-4806},\n keywords = {musical interaction,performance\ + \ mapping,sound synthesis},\n pages = {47--50},\n title = {Don't Just Play it,\ + \ Grow it! : Breeding Sound Synthesis and Performance Mappings},\n url = {http://www.nime.org/proceedings/2004/nime2004_047.pdf},\n\ + \ year = {2004}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177649 + doi: 10.5281/zenodo.1176635 issn: 2220-4806 - keywords: nime09 - pages: 339--339 - title: Pendaphonics - url: http://www.nime.org/proceedings/2009/nime2009_339.pdf - year: 2009 + keywords: 'musical interaction,performance mapping,sound synthesis' + pages: 47--50 + title: 'Don''t Just Play it, Grow it! : Breeding Sound Synthesis and Performance + Mappings' + url: http://www.nime.org/proceedings/2004/nime2004_047.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Smallwood2009 - address: 'Pittsburgh, PA, United States' - author: 'Smallwood, Scott' - bibtex: "@inproceedings{Smallwood2009,\n address = {Pittsburgh, PA, United States},\n\ - \ author = {Smallwood, Scott},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177677},\n\ - \ issn = {2220-4806},\n keywords = {nime09},\n pages = {340--340},\n title = {Sound\ - \ Lanterns},\n url = {http://www.nime.org/proceedings/2009/nime2009_340.pdf},\n\ - \ year = {2009}\n}\n" + ID: Shatin2004 + abstract: 'In this report, we discuss Tree Music, an interactive computer music + installation created using GAIA (Graphical Audio Interface Application), a new + open-source interface for controlling the RTcmix synthesis and effects processing + engine. Tree Music, commissioned by the University of Virginia Art Museum, used + a wireless camera with a wide-angle lens to capture motion and occlusion data + from exhibit visitors. We show how GAIA was used to structure and navigate the + compositional space, and how this program supports both graphical and text-based + programming in the same application. GAIA provides a GUI which combines two open-source + applications: RTcmix and Perl.' + address: 'Hamamatsu, Japan' + author: 'Shatin, Judith and Topper, David' + bibtex: "@inproceedings{Shatin2004,\n abstract = {In this report, we discuss Tree\ + \ Music, an interactive computer music installation created using GAIA (Graphical\ + \ Audio Interface Application), a new open-source interface for controlling the\ + \ RTcmix synthesis and effects processing engine. Tree Music, commissioned by\ + \ the University of Virginia Art Museum, used a wireless camera with a wide-angle\ + \ lens to capture motion and occlusion data from exhibit visitors. We show how\ + \ GAIA was used to structure and navigate the compositional space, and how this\ + \ program supports both graphical and text-based programming in the same application.\ + \ GAIA provides a GUI which combines two open-source applications: RTcmix and\ + \ Perl.},\n address = {Hamamatsu, Japan},\n author = {Shatin, Judith and Topper,\ + \ David},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1176663},\n issn = {2220-4806},\n\ + \ keywords = {Composition, new interfaces, interactive systems, open source, Real\ + \ time audio, GUI controllers, video tracking},\n pages = {51--54},\n title =\ + \ {Tree Music: Composing with GAIA},\n url = {http://www.nime.org/proceedings/2004/nime2004_051.pdf},\n\ + \ year = {2004}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177677 + doi: 10.5281/zenodo.1176663 issn: 2220-4806 - keywords: nime09 - pages: 340--340 - title: Sound Lanterns - url: http://www.nime.org/proceedings/2009/nime2009_340.pdf - year: 2009 + keywords: 'Composition, new interfaces, interactive systems, open source, Real time + audio, GUI controllers, video tracking' + pages: 51--54 + title: 'Tree Music: Composing with GAIA' + url: http://www.nime.org/proceedings/2004/nime2004_051.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Stearns2009 - address: 'Pittsburgh, PA, United States' - author: 'Stearns, Phillip' - bibtex: "@inproceedings{Stearns2009,\n address = {Pittsburgh, PA, United States},\n\ - \ author = {Stearns, Phillip},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177687},\n\ - \ issn = {2220-4806},\n keywords = {nime09},\n pages = {341--341},\n title = {AANN:\ - \ Artificial Analog Neural Network},\n url = {http://www.nime.org/proceedings/2009/nime2009_341.pdf},\n\ - \ year = {2009}\n}\n" + ID: DArcangelo2004 + abstract: 'This essay outlines a framework for understanding newmusical compositions + and performances that utilizepre-existing sound recordings. In attempting toarticulate + why musicians are increasingly using soundrecordings in their creative work, the + author calls fornew performance tools that enable the dynamic use ofpre-recorded + music. ' + address: 'Hamamatsu, Japan' + author: 'D''Arcangelo, Gideon' + bibtex: "@inproceedings{DArcangelo2004,\n abstract = {This essay outlines a framework\ + \ for understanding newmusical compositions and performances that utilizepre-existing\ + \ sound recordings. In attempting toarticulate why musicians are increasingly\ + \ using soundrecordings in their creative work, the author calls fornew performance\ + \ tools that enable the dynamic use ofpre-recorded music. },\n address = {Hamamatsu,\ + \ Japan},\n author = {D'Arcangelo, Gideon},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1176591},\n issn = {2220-4806},\n keywords = {Call and response,\ + \ turntablism, DJ tools, oral culture},\n pages = {55--58},\n title = {Recycling\ + \ Music, Answering Back: Toward an Oral Tradition of Electronic Music},\n url\ + \ = {http://www.nime.org/proceedings/2004/nime2004_055.pdf},\n year = {2004}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177687 + doi: 10.5281/zenodo.1176591 issn: 2220-4806 - keywords: nime09 - pages: 341--341 - title: 'AANN: Artificial Analog Neural Network' - url: http://www.nime.org/proceedings/2009/nime2009_341.pdf - year: 2009 + keywords: 'Call and response, turntablism, DJ tools, oral culture' + pages: 55--58 + title: 'Recycling Music, Answering Back: Toward an Oral Tradition of Electronic + Music' + url: http://www.nime.org/proceedings/2004/nime2004_055.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Overholt2011 - abstract: 'The Overtone Fiddle is a new violin-family instrument that incorporates - electronic sensors, integrated DSP, and physical actuation of the acoustic body. - An embedded tactile sound transducer creates extra vibrations in the body of the - Overtone Fiddle, allowing performer control and sensation via both traditional - violin techniques, as well as extended playing techniques that incorporate shared - man/machine control of the resulting sound. A magnetic pickup system is mounted - to the end of the fiddle''s fingerboard in order to detect the signals from the - vibrating strings, deliberately not capturing vibrations from the full body of - the instrument. This focused sensing approach allows less restrained use of DSP-generated - feedback signals, as there is very little direct leakage from the actuator embedded - in the body of the instrument back to the pickup. ' - address: 'Oslo, Norway' - author: 'Overholt, Dan' - bibtex: "@inproceedings{Overholt2011,\n abstract = {The Overtone Fiddle is a new\ - \ violin-family instrument that incorporates electronic sensors, integrated DSP,\ - \ and physical actuation of the acoustic body. An embedded tactile sound transducer\ - \ creates extra vibrations in the body of the Overtone Fiddle, allowing performer\ - \ control and sensation via both traditional violin techniques, as well as extended\ - \ playing techniques that incorporate shared man/machine control of the resulting\ - \ sound. A magnetic pickup system is mounted to the end of the fiddle's fingerboard\ - \ in order to detect the signals from the vibrating strings, deliberately not\ - \ capturing vibrations from the full body of the instrument. This focused sensing\ - \ approach allows less restrained use of DSP-generated feedback signals, as there\ - \ is very little direct leakage from the actuator embedded in the body of the\ - \ instrument back to the pickup. },\n address = {Oslo, Norway},\n author = {Overholt,\ - \ Dan},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1178127},\n issn = {2220-4806},\n\ - \ keywords = {Actuated Musical Instruments, Hybrid Instruments, Active Acoustics,\ - \ Electronic Violin },\n pages = {30--33},\n presentation-video = {https://vimeo.com/26795157/},\n\ - \ title = {The Overtone Fiddle: an Actuated Acoustic Instrument},\n url = {http://www.nime.org/proceedings/2011/nime2011_004.pdf},\n\ - \ year = {2011}\n}\n" + ID: Jorda2004 + abstract: 'When envisaging new digital instruments, designers do not have to limit + themselves to their sonic capabilities (which can be absolutely any), not even + to their algorithmic power; they must be also especially careful about the instruments'' + conceptual capabilities, to the ways instruments impose or suggest to their players + new ways of thinking, new ways of establishing relations, new ways of interacting, + new ways of organizing time and textures; new ways, in short, of playing new musics. + This article explores the dynamic relation that builds between the player and + the instrument, introducing concepts such as efficiency, apprenticeship and learning + curve It aims at constructing a framework in which the possibilities and the diversity + of music instruments as well as the possibilities and the expressive freedom of + human music performers could start being evaluated. ' + address: 'Hamamatsu, Japan' + author: 'Jordà, Sergi' + bibtex: "@inproceedings{Jorda2004,\n abstract = {When envisaging new digital instruments,\ + \ designers do not have to limit themselves to their sonic capabilities (which\ + \ can be absolutely any), not even to their algorithmic power; they must be also\ + \ especially careful about the instruments' conceptual capabilities, to the ways\ + \ instruments impose or suggest to their players new ways of thinking, new ways\ + \ of establishing relations, new ways of interacting, new ways of organizing time\ + \ and textures; new ways, in short, of playing new musics. This article explores\ + \ the dynamic relation that builds between the player and the instrument, introducing\ + \ concepts such as efficiency, apprenticeship and learning curve It aims at constructing\ + \ a framework in which the possibilities and the diversity of music instruments\ + \ as well as the possibilities and the expressive freedom of human music performers\ + \ could start being evaluated. },\n address = {Hamamatsu, Japan},\n author = {Jord\\\ + `{a}, Sergi},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176619},\n issn\ + \ = {2220-4806},\n keywords = {Musical instruments design, learning curve, apprenticeship,\ + \ musical efficiency.},\n pages = {59--63},\n title = {Digital Instruments and\ + \ Players: Part I -- Efficiency and Apprenticeship},\n url = {http://www.nime.org/proceedings/2004/nime2004_059.pdf},\n\ + \ year = {2004}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178127 + doi: 10.5281/zenodo.1176619 issn: 2220-4806 - keywords: 'Actuated Musical Instruments, Hybrid Instruments, Active Acoustics, Electronic - Violin ' - pages: 30--33 - presentation-video: https://vimeo.com/26795157/ - title: 'The Overtone Fiddle: an Actuated Acoustic Instrument' - url: http://www.nime.org/proceedings/2011/nime2011_004.pdf - year: 2011 + keywords: 'Musical instruments design, learning curve, apprenticeship, musical efficiency.' + pages: 59--63 + title: 'Digital Instruments and Players: Part I -- Efficiency and Apprenticeship' + url: http://www.nime.org/proceedings/2004/nime2004_059.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Montag2011 - abstract: 'During the past decade, multi-touch surfaces have emerged as valuable - tools for collaboration, display, interaction, and musical expression. Unfortunately, - they tend to be costly and often suffer from two drawbacks for music performance:(1) - relatively high latency owing to their sensing mechanism, and (2) lack of haptic - feedback. We analyze the latency present in several current multi-touch platforms, - and we describe a new custom system that reduces latency to an average of 30 ms - while providing programmable haptic feed-back to the user. The paper concludes - with a description of ongoing and future work.' - address: 'Oslo, Norway' - author: 'Montag, Matthew and Sullivan, Stefan and Dickey, Scott and Leider, Colby' - bibtex: "@inproceedings{Montag2011,\n abstract = {During the past decade, multi-touch\ - \ surfaces have emerged as valuable tools for collaboration, display, interaction,\ - \ and musical expression. Unfortunately, they tend to be costly and often suffer\ - \ from two drawbacks for music performance:(1) relatively high latency owing to\ - \ their sensing mechanism, and (2) lack of haptic feedback. We analyze the latency\ - \ present in several current multi-touch platforms, and we describe a new custom\ - \ system that reduces latency to an average of 30 ms while providing programmable\ - \ haptic feed-back to the user. The paper concludes with a description of ongoing\ - \ and future work.},\n address = {Oslo, Norway},\n author = {Montag, Matthew and\ - \ Sullivan, Stefan and Dickey, Scott and Leider, Colby},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178115},\n issn = {2220-4806},\n keywords = {multi-touch,\ - \ haptics, frustrated total internal reflection, music performance, music composition,\ - \ latency, DIY },\n pages = {8--13},\n presentation-video = {https://vimeo.com/26799018/},\n\ - \ title = {A Low-Cost, Low-Latency Multi-Touch Table with Haptic Feedback for\ - \ Musical Applications},\n url = {http://www.nime.org/proceedings/2011/nime2011_008.pdf},\n\ - \ year = {2011}\n}\n" + ID: Pashenkov2004 + abstract: 'This report presents a novel interface for musical performance which + utilizes a record-player turntable augmented with a computation engine and a high-density + optical sensing array. The turntable functions as a standalone step sequencer + for MIDI events transmitted to a computer or another device and it is programmed + in real-time using visual disks. The program instructions are represented on printed + paper disks directly as characters of English alphabet that could be read by human + as effectively as they are picked up by the machine''s optical cartridge. The + result is a tangible interface that allows the user to manipulate pre-arranged + musical material by hand, by adding together instrumental tracks to form a dynamic + mix. A functional implementation of this interface is discussed in view of historical + background and other examples of electronic instruments for music creation and + performance incorporating optical turntable as a central element.' + address: 'Hamamatsu, Japan' + author: 'Pashenkov, Nikita' + bibtex: "@inproceedings{Pashenkov2004,\n abstract = {This report presents a novel\ + \ interface for musical performance which utilizes a record-player turntable augmented\ + \ with a computation engine and a high-density optical sensing array. The turntable\ + \ functions as a standalone step sequencer for MIDI events transmitted to a computer\ + \ or another device and it is programmed in real-time using visual disks. The\ + \ program instructions are represented on printed paper disks directly as characters\ + \ of English alphabet that could be read by human as effectively as they are picked\ + \ up by the machine's optical cartridge. The result is a tangible interface that\ + \ allows the user to manipulate pre-arranged musical material by hand, by adding\ + \ together instrumental tracks to form a dynamic mix. A functional implementation\ + \ of this interface is discussed in view of historical background and other examples\ + \ of electronic instruments for music creation and performance incorporating optical\ + \ turntable as a central element.},\n address = {Hamamatsu, Japan},\n author =\ + \ {Pashenkov, Nikita},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176651},\n\ + \ issn = {2220-4806},\n keywords = {Interaction, visualization, tangible interface,\ + \ controllers, optical turntable, performance.},\n pages = {64--67},\n title =\ + \ {A New Mix of Forgotten Technology: Sound Generation, Sequencing and Performance\ + \ Using an Optical Turntable},\n url = {http://www.nime.org/proceedings/2004/nime2004_064.pdf},\n\ + \ year = {2004}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178115 + doi: 10.5281/zenodo.1176651 issn: 2220-4806 - keywords: 'multi-touch, haptics, frustrated total internal reflection, music performance, - music composition, latency, DIY ' - pages: 8--13 - presentation-video: https://vimeo.com/26799018/ - title: 'A Low-Cost, Low-Latency Multi-Touch Table with Haptic Feedback for Musical - Applications' - url: http://www.nime.org/proceedings/2011/nime2011_008.pdf - year: 2011 + keywords: 'Interaction, visualization, tangible interface, controllers, optical + turntable, performance.' + pages: 64--67 + title: 'A New Mix of Forgotten Technology: Sound Generation, Sequencing and Performance + Using an Optical Turntable' + url: http://www.nime.org/proceedings/2004/nime2004_064.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Shear2011 - abstract: 'The Electromagnetically Sustained Rhodes Piano is an augmentation of - the original instrument with additional control over the amplitude envelope of - individual notes. Thisincludes slow attacks and infinite sustain while preservingthe - familiar spectral qualities of this classic electromechanical piano. These additional - parameters are controlled withaftertouch on the existing keyboard, extending standardpiano - technique. Two sustain methods were investigated,driving the actuator first with - a pure sine wave, and secondwith the output signal of the sensor. A special isolationmethod - effectively decouples the sensors from the actuatorsand tames unruly feedback - in the high-gain signal path.' - address: 'Oslo, Norway' - author: 'Shear, Greg and Wright, Matthew' - bibtex: "@inproceedings{Shear2011,\n abstract = {The Electromagnetically Sustained\ - \ Rhodes Piano is an augmentation of the original instrument with additional control\ - \ over the amplitude envelope of individual notes. Thisincludes slow attacks and\ - \ infinite sustain while preservingthe familiar spectral qualities of this classic\ - \ electromechanical piano. These additional parameters are controlled withaftertouch\ - \ on the existing keyboard, extending standardpiano technique. Two sustain methods\ - \ were investigated,driving the actuator first with a pure sine wave, and secondwith\ - \ the output signal of the sensor. A special isolationmethod effectively decouples\ - \ the sensors from the actuatorsand tames unruly feedback in the high-gain signal\ - \ path.},\n address = {Oslo, Norway},\n author = {Shear, Greg and Wright, Matthew},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178161},\n issn = {2220-4806},\n\ - \ keywords = {Rhodes, keyboard, electromagnetic, sustain, augmented instrument,\ - \ feedback, aftertouch },\n pages = {14--17},\n presentation-video = {https://vimeo.com/26802504/},\n\ - \ title = {The Electromagnetically Sustained Rhodes Piano},\n url = {http://www.nime.org/proceedings/2011/nime2011_014.pdf},\n\ - \ year = {2011}\n}\n" + ID: Lee2004 + abstract: 'This paper describes the first system designed to allow children to conduct + an audio and video recording of an orchestra. No prior music experience is required + to control the orchestra, and the system uses an advanced algorithm to time stretch + the audio in real-time at high quality and without altering the pitch. We will + discuss the requirements and challenges of designing an interface to target our + particular user group (children), followed by some system implementation details. + An overview of the algorithm used for audio time stretching will also be presented. + We are currently using this technology to study and compare professional and non-professional + conducting behavior, and its implications when designing new interfaces for multimedia. + You''re the Conductor is currently a successful exhibit at the Children''s Museum + in Boston, USA.' + address: 'Hamamatsu, Japan' + author: 'Lee, Eric and Nakra, Teresa M. and Borchers, Jan' + bibtex: "@inproceedings{Lee2004,\n abstract = {This paper describes the first system\ + \ designed to allow children to conduct an audio and video recording of an orchestra.\ + \ No prior music experience is required to control the orchestra, and the system\ + \ uses an advanced algorithm to time stretch the audio in real-time at high quality\ + \ and without altering the pitch. We will discuss the requirements and challenges\ + \ of designing an interface to target our particular user group (children), followed\ + \ by some system implementation details. An overview of the algorithm used for\ + \ audio time stretching will also be presented. We are currently using this technology\ + \ to study and compare professional and non-professional conducting behavior,\ + \ and its implications when designing new interfaces for multimedia. You're the\ + \ Conductor is currently a successful exhibit at the Children's Museum in Boston,\ + \ USA.},\n address = {Hamamatsu, Japan},\n author = {Lee, Eric and Nakra, Teresa\ + \ M. and Borchers, Jan},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176629},\n\ + \ issn = {2220-4806},\n keywords = {conducting systems,design patterns,gesture\ + \ recogni-,interactive exhibits,real-time audio stretching,tion},\n pages = {68--73},\n\ + \ title = {You're The Conductor: A Realistic Interactive Conducting System for\ + \ Children},\n url = {http://www.nime.org/proceedings/2004/nime2004_068.pdf},\n\ + \ year = {2004}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178161 + doi: 10.5281/zenodo.1176629 issn: 2220-4806 - keywords: 'Rhodes, keyboard, electromagnetic, sustain, augmented instrument, feedback, - aftertouch ' - pages: 14--17 - presentation-video: https://vimeo.com/26802504/ - title: The Electromagnetically Sustained Rhodes Piano - url: http://www.nime.org/proceedings/2011/nime2011_014.pdf - year: 2011 + keywords: 'conducting systems,design patterns,gesture recogni-,interactive exhibits,real-time + audio stretching,tion' + pages: 68--73 + title: 'You''re The Conductor: A Realistic Interactive Conducting System for Children' + url: http://www.nime.org/proceedings/2004/nime2004_068.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Pardue2011 - abstract: 'This paper describes the motivation and construction of Gamelan Elektrika, - a new electronic gamelan modeled after a Balinese Gong Kebyar. The first of its - kind, Elektrika consists of seven instruments acting as MIDI controllers accompanied - by traditional percussion and played by 11 or more performers following Balinese - performance practice. Three main percussive instrument designs were executed using - a combination of force sensitive resistors, piezos, and capacitive sensing. While - the instrument interfaces are designedto play interchangeably with the original, - the sound andt ravel possiblilities they enable are tremendous. MIDI enables a - massive new sound palette with new scales beyond the quirky traditional tuning - and non-traditional sounds. It also allows simplified transcription for an aurally - taught tradition. Significantly, it reduces the transportation challenges of a - previously large and heavy ensemble, creating opportunities for wider audiences - to experience Gong Kebyar''s enchanting sound. True to the spirit of oneness in - Balinese music, as one of the first large all-MIDI ensembles, ElekTrika challenges - performers to trust silent instruments and develop an understanding of highly - intricate and interlocking music not through the sound of the individual, but - through the sound of the whole.' - address: 'Oslo, Norway' - author: 'Pardue, Laurel S. and Boch, Andrew and Boch, Matt and Southworth, Christine - and Rigopulos, Alex' - bibtex: "@inproceedings{Pardue2011,\n abstract = {This paper describes the motivation\ - \ and construction of Gamelan Elektrika, a new electronic gamelan modeled after\ - \ a Balinese Gong Kebyar. The first of its kind, Elektrika consists of seven instruments\ - \ acting as MIDI controllers accompanied by traditional percussion and played\ - \ by 11 or more performers following Balinese performance practice. Three main\ - \ percussive instrument designs were executed using a combination of force sensitive\ - \ resistors, piezos, and capacitive sensing. While the instrument interfaces are\ - \ designedto play interchangeably with the original, the sound andt ravel possiblilities\ - \ they enable are tremendous. MIDI enables a massive new sound palette with new\ - \ scales beyond the quirky traditional tuning and non-traditional sounds. It also\ - \ allows simplified transcription for an aurally taught tradition. Significantly,\ - \ it reduces the transportation challenges of a previously large and heavy ensemble,\ - \ creating opportunities for wider audiences to experience Gong Kebyar's enchanting\ - \ sound. True to the spirit of oneness in Balinese music, as one of the first\ - \ large all-MIDI ensembles, ElekTrika challenges performers to trust silent instruments\ - \ and develop an understanding of highly intricate and interlocking music not\ - \ through the sound of the individual, but through the sound of the whole.},\n\ - \ address = {Oslo, Norway},\n author = {Pardue, Laurel S. and Boch, Andrew and\ - \ Boch, Matt and Southworth, Christine and Rigopulos, Alex},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178131},\n issn = {2220-4806},\n keywords = {bali, gamelan,\ - \ musical instrument design, MIDI ensemble },\n pages = {18--23},\n presentation-video\ - \ = {https://vimeo.com/26803278/},\n title = {Gamelan Elektrika: An Electronic\ - \ Balinese Gamelan},\n url = {http://www.nime.org/proceedings/2011/nime2011_018.pdf},\n\ - \ year = {2011}\n}\n" + ID: OModhrain2004 + abstract: 'The PebbleBox and the CrumbleBag are examples of a granular interaction + paradigm, in which the manipulation ofphysical grains of arbitrary material becomes + the basis forinteracting with granular sound synthesis models. The soundsmade + by the grains as they are manipulated are analysed,and parameters such as grain + rate, grain amplitude andgrain density are extracted. These parameters are then + usedto control the granulation of arbitrary sound samples in realtime. In this + way, a direct link is made between the haptic sensation of interacting with grains + and the control ofgranular sounds.' + address: 'Hamamatsu, Japan' + author: 'O''Modhrain, Sile and Essl, Georg' + bibtex: "@inproceedings{OModhrain2004,\n abstract = {The PebbleBox and the CrumbleBag\ + \ are examples of a granular interaction paradigm, in which the manipulation ofphysical\ + \ grains of arbitrary material becomes the basis forinteracting with granular\ + \ sound synthesis models. The soundsmade by the grains as they are manipulated\ + \ are analysed,and parameters such as grain rate, grain amplitude andgrain density\ + \ are extracted. These parameters are then usedto control the granulation of arbitrary\ + \ sound samples in realtime. In this way, a direct link is made between the haptic\ + \ sensation of interacting with grains and the control ofgranular sounds.},\n\ + \ address = {Hamamatsu, Japan},\n author = {O'Modhrain, Sile and Essl, Georg},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1176647},\n issn = {2220-4806},\n\ + \ keywords = {Musical instrument, granular synthesis, haptic},\n pages = {74--79},\n\ + \ title = {PebbleBox and CrumbleBag: Tactile Interfaces for Granular Synthesis},\n\ + \ url = {http://www.nime.org/proceedings/2004/nime2004_074.pdf},\n year = {2004}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178131 + doi: 10.5281/zenodo.1176647 issn: 2220-4806 - keywords: 'bali, gamelan, musical instrument design, MIDI ensemble ' - pages: 18--23 - presentation-video: https://vimeo.com/26803278/ - title: 'Gamelan Elektrika: An Electronic Balinese Gamelan' - url: http://www.nime.org/proceedings/2011/nime2011_018.pdf - year: 2011 + keywords: 'Musical instrument, granular synthesis, haptic' + pages: 74--79 + title: 'PebbleBox and CrumbleBag: Tactile Interfaces for Granular Synthesis' + url: http://www.nime.org/proceedings/2004/nime2004_074.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Lee2011 - abstract: 'This paper introduces Sonicstrument, a sound-based interface that traces - the user''s hand motions. Sonicstrument utilizes stereotypical acoustic transducers - (i.e., a pair of earphones and a microphone) for transmission and reception of - acoustic signals whose frequencies are within the highest area of human hearing - range that can rarely be perceived by most people. Being simpler in structure - and easier to implement than typical ultrasonic motion detectors with special - transducers, this system is robust and offers precise results without introducing - any undesired sonic disturbance to users. We describe the design and implementation - of Sonicstrument, evaluate its performance, and present two practical applications - of the system in music and interactive performance.' - address: 'Oslo, Norway' - author: 'Lee, Jeong-seob and Yeo, Woon Seung' - bibtex: "@inproceedings{Lee2011,\n abstract = {This paper introduces Sonicstrument,\ - \ a sound-based interface that traces the user's hand motions. Sonicstrument utilizes\ - \ stereotypical acoustic transducers (i.e., a pair of earphones and a microphone)\ - \ for transmission and reception of acoustic signals whose frequencies are within\ - \ the highest area of human hearing range that can rarely be perceived by most\ - \ people. Being simpler in structure and easier to implement than typical ultrasonic\ - \ motion detectors with special transducers, this system is robust and offers\ - \ precise results without introducing any undesired sonic disturbance to users.\ - \ We describe the design and implementation of Sonicstrument, evaluate its performance,\ - \ and present two practical applications of the system in music and interactive\ - \ performance.},\n address = {Oslo, Norway},\n author = {Lee, Jeong-seob and Yeo,\ - \ Woon Seung},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1180259},\n issn\ - \ = {2220-4806},\n keywords = {Stereotypical transducers, audible sound, Doppler\ - \ effect, handfree interface, musical instrument, interactive performance },\n\ - \ pages = {24--27},\n presentation-video = {https://vimeo.com/26804455/},\n title\ - \ = {Sonicstrument : A Musical Interface with Stereotypical Acoustic Transducers},\n\ - \ url = {http://www.nime.org/proceedings/2011/nime2011_024.pdf},\n year = {2011}\n\ + ID: Paine2004 + abstract: 'New Interfaces for Musical Expression must speak to the nature of ''instrument'', + that is, it must always be understood that the interface binds to a complex musical + phenomenon. This paper explores the nature of engagement, the point of performance + that occurs when a human being engages with a computer based instrument. It asks + questions about the nature of the instrument in computer music and offers some + conceptual models for the mapping of gesture to sonic outcomes.' + address: 'Hamamatsu, Japan' + author: 'Paine, Garth' + bibtex: "@inproceedings{Paine2004,\n abstract = {New Interfaces for Musical Expression\ + \ must speak to the nature of 'instrument', that is, it must always be understood\ + \ that the interface binds to a complex musical phenomenon. This paper explores\ + \ the nature of engagement, the point of performance that occurs when a human\ + \ being engages with a computer based instrument. It asks questions about the\ + \ nature of the instrument in computer music and offers some conceptual models\ + \ for the mapping of gesture to sonic outcomes.},\n address = {Hamamatsu, Japan},\n\ + \ author = {Paine, Garth},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176649},\n\ + \ issn = {2220-4806},\n keywords = {dynamic,dynamic morphology,gesture,interaction,mapping,mind,music,orchestration,spectral\ + \ morphology},\n pages = {80--86},\n title = {Gesture and Musical Interaction\ + \ : Interactive Engagement Through Dynamic Morphology},\n url = {http://www.nime.org/proceedings/2004/nime2004_080.pdf},\n\ + \ year = {2004}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1176649 + issn: 2220-4806 + keywords: 'dynamic,dynamic morphology,gesture,interaction,mapping,mind,music,orchestration,spectral + morphology' + pages: 80--86 + title: 'Gesture and Musical Interaction : Interactive Engagement Through Dynamic + Morphology' + url: http://www.nime.org/proceedings/2004/nime2004_080.pdf + year: 2004 + + +- ENTRYTYPE: inproceedings + ID: VanNort2004 + abstract: 'The choice of mapping strategies to effectively map controller variables + to sound synthesis algorithms is examined.Specifically, we look at continuous + mappings that have ageometric representation. Drawing from underlying mathematical + theory, this paper presents a way to compare mapping strategies, with the goal + of achieving an appropriatematch between mapping and musical performance context.This + method of comparison is applied to existing techniques,while a suggestion is offered + on how to integrate and extendthis work through a new implementation.' + address: 'Hamamatsu, Japan' + author: 'Van Nort, Doug and Wanderley, Marcelo M. and Depalle, Philippe' + bibtex: "@inproceedings{VanNort2004,\n abstract = {The choice of mapping strategies\ + \ to effectively map controller variables to sound synthesis algorithms is examined.Specifically,\ + \ we look at continuous mappings that have ageometric representation. Drawing\ + \ from underlying mathematical theory, this paper presents a way to compare mapping\ + \ strategies, with the goal of achieving an appropriatematch between mapping and\ + \ musical performance context.This method of comparison is applied to existing\ + \ techniques,while a suggestion is offered on how to integrate and extendthis\ + \ work through a new implementation.},\n address = {Hamamatsu, Japan},\n author\ + \ = {Van Nort, Doug and Wanderley, Marcelo M. and Depalle, Philippe},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1176681},\n issn = {2220-4806},\n keywords\ + \ = {Mapping, Interface Design, Interpolation, Computational Geometry},\n pages\ + \ = {87--91},\n title = {On the Choice of Mappings Based on Geometric Properties},\n\ + \ url = {http://www.nime.org/proceedings/2004/nime2004_087.pdf},\n year = {2004}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1180259 + doi: 10.5281/zenodo.1176681 issn: 2220-4806 - keywords: 'Stereotypical transducers, audible sound, Doppler effect, handfree interface, - musical instrument, interactive performance ' - pages: 24--27 - presentation-video: https://vimeo.com/26804455/ - title: 'Sonicstrument : A Musical Interface with Stereotypical Acoustic Transducers' - url: http://www.nime.org/proceedings/2011/nime2011_024.pdf - year: 2011 + keywords: 'Mapping, Interface Design, Interpolation, Computational Geometry' + pages: 87--91 + title: On the Choice of Mappings Based on Geometric Properties + url: http://www.nime.org/proceedings/2004/nime2004_087.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Smallwood2011 - abstract: "This paper describes recent developments in the creation of sound-making\ - \ instruments and devices powered by photovoltaic (PV) technologies. With the\ - \ rise of more efficient PV products in diverse packages, the possibilities for\ - \ creating solar-powered musical instruments, sound installations, and loudspeakers\ - \ are becoming increasingly realizable. This paper surveys past and recent developments\ - \ in this area, including several projects by the ,\n,\nauthor, and demonstrates\ - \ how the use of PV technologies can influence the creative process in unique\ - \ ways. In addition, this paper discusses how solar sound arts can enhance the\ - \ aesthetic direction taken by recent work in soundscape studies and acoustic\ - \ ecology. Finally, this paper will point towards future directions and possibilities\ - \ as PV technologies continue to evolve and improve in terms of performance, and\ - \ become more affordable. " - address: 'Oslo, Norway' - author: 'Smallwood, Scott' - bibtex: "@inproceedings{Smallwood2011,\n abstract = {This paper describes recent\ - \ developments in the creation of sound-making instruments and devices powered\ - \ by photovoltaic (PV) technologies. With the rise of more efficient PV products\ - \ in diverse packages, the possibilities for creating solar-powered musical instruments,\ - \ sound installations, and loudspeakers are becoming increasingly realizable.\ - \ This paper surveys past and recent developments in this area, including several\ - \ projects by the ,\n,\nauthor, and demonstrates how the use of PV technologies\ - \ can influence the creative process in unique ways. In addition, this paper discusses\ - \ how solar sound arts can enhance the aesthetic direction taken by recent work\ - \ in soundscape studies and acoustic ecology. Finally, this paper will point towards\ - \ future directions and possibilities as PV technologies continue to evolve and\ - \ improve in terms of performance, and become more affordable. },\n address =\ - \ {Oslo, Norway},\n author = {Smallwood, Scott},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1178167},\n issn = {2220-4806},\n keywords = {Solar Sound\ - \ Arts, Circuit Bending, Hardware Hacking, Human-Computer Interface Design, Acoustic\ - \ Ecology, Sound Art, Electroacoustics, Laptop Orchestra, PV Technology },\n pages\ - \ = {28--31},\n title = {Solar Sound Arts: Creating Instruments and Devices Powered\ - \ by Photovoltaic Technologies},\n url = {http://www.nime.org/proceedings/2011/nime2011_028.pdf},\n\ - \ year = {2011}\n}\n" + ID: Sheehan2004 + abstract: 'This paper discusses some of the issues pertaining to the design of digital + musical instruments that are to effectively fill the role of traditional instruments + (i.e. those based on physical sound production mechanisms). The design and implementation + of a musical instrument that addresses some of these issues, using scanned synthesis + coupled to a "smart" physical system, is described.' + address: 'Hamamatsu, Japan' + author: 'Sheehan, Brian' + bibtex: "@inproceedings{Sheehan2004,\n abstract = {This paper discusses some of\ + \ the issues pertaining to the design of digital musical instruments that are\ + \ to effectively fill the role of traditional instruments (i.e. those based on\ + \ physical sound production mechanisms). The design and implementation of a musical\ + \ instrument that addresses some of these issues, using scanned synthesis coupled\ + \ to a \"smart\" physical system, is described.},\n address = {Hamamatsu, Japan},\n\ + \ author = {Sheehan, Brian},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176665},\n\ + \ issn = {2220-4806},\n keywords = {Digital musical instruments, real-time performance,\ + \ scanned synthesis, pd, tactile interfaces, sensors, Shapetape, mapping.},\n\ + \ pages = {92--95},\n title = {The Squiggle: A Digital Musical Instrument},\n\ + \ url = {http://www.nime.org/proceedings/2004/nime2004_092.pdf},\n year = {2004}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178167 + doi: 10.5281/zenodo.1176665 issn: 2220-4806 - keywords: 'Solar Sound Arts, Circuit Bending, Hardware Hacking, Human-Computer Interface - Design, Acoustic Ecology, Sound Art, Electroacoustics, Laptop Orchestra, PV Technology ' - pages: 28--31 - title: 'Solar Sound Arts: Creating Instruments and Devices Powered by Photovoltaic - Technologies' - url: http://www.nime.org/proceedings/2011/nime2011_028.pdf - year: 2011 + keywords: 'Digital musical instruments, real-time performance, scanned synthesis, + pd, tactile interfaces, sensors, Shapetape, mapping.' + pages: 92--95 + title: 'The Squiggle: A Digital Musical Instrument' + url: http://www.nime.org/proceedings/2004/nime2004_092.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Klugel2011 - abstract: 'This paper provides a discussion of how the electronic, solely ITbased - composition and performance of electronic music can besupported in realtime with - a collaborative application on a tabletopinterface, mediating between single-user - style music compositiontools and co-located collaborative music improvisation. - After having elaborated on the theoretical backgrounds of prerequisites ofco-located - collaborative tabletop applications as well as the common paradigms in music composition/notation, - we will review related work on novel IT approaches to music composition and improvisation. - Subsequently, we will present our prototypical implementation and the results.' - address: 'Oslo, Norway' - author: 'Klügel, Niklas and Frieß, Marc R. and Groh, Georg and Echtler, Florian' - bibtex: "@inproceedings{Klugel2011,\n abstract = {This paper provides a discussion\ - \ of how the electronic, solely ITbased composition and performance of electronic\ - \ music can besupported in realtime with a collaborative application on a tabletopinterface,\ - \ mediating between single-user style music compositiontools and co-located collaborative\ - \ music improvisation. After having elaborated on the theoretical backgrounds\ - \ of prerequisites ofco-located collaborative tabletop applications as well as\ - \ the common paradigms in music composition/notation, we will review related work\ - \ on novel IT approaches to music composition and improvisation. Subsequently,\ - \ we will present our prototypical implementation and the results.},\n address\ - \ = {Oslo, Norway},\n author = {Kl\\''{u}gel, Niklas and Frie\\ss, Marc R. and\ - \ Groh, Georg and Echtler, Florian},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178071},\n\ - \ issn = {2220-4806},\n keywords = {Tabletop Interface, Collaborative Music Composition,\ - \ Creativity Support },\n pages = {32--35},\n title = {An Approach to Collaborative\ - \ Music Composition},\n url = {http://www.nime.org/proceedings/2011/nime2011_032.pdf},\n\ - \ year = {2011}\n}\n" + ID: Gerhard2004 + abstract: 'This paper describes an approach to match visual and acoustic parameters + to produce an animated musical expression.Music may be generated to correspond + to animation, asdescribed here; imagery may be created to correspond tomusic; + or both may be developed simultaneously. This approach is intended to provide + new tools to facilitate bothcollaboration between visual artists and musicians + and examination of perceptual issues between visual and acousticmedia. As a proof-of-concept, + a complete example is developed with linear fractals as a basis for the animation, + andarranged rhythmic loops for the music. Since both visualand acoustic elements + in the example are generated fromconcise specifications, the potential of this + approach to create new works through parameter space exploration is accentuated, + however, there are opportunities for applicationto a wide variety of source material. + These additional applications are also discussed, along with issues encounteredin + development of the example.' + address: 'Hamamatsu, Japan' + author: 'Gerhard, David and Hepting, Daryl and Mckague, Matthew' + bibtex: "@inproceedings{Gerhard2004,\n abstract = {This paper describes an approach\ + \ to match visual and acoustic parameters to produce an animated musical expression.Music\ + \ may be generated to correspond to animation, asdescribed here; imagery may be\ + \ created to correspond tomusic; or both may be developed simultaneously. This\ + \ approach is intended to provide new tools to facilitate bothcollaboration between\ + \ visual artists and musicians and examination of perceptual issues between visual\ + \ and acousticmedia. As a proof-of-concept, a complete example is developed with\ + \ linear fractals as a basis for the animation, andarranged rhythmic loops for\ + \ the music. Since both visualand acoustic elements in the example are generated\ + \ fromconcise specifications, the potential of this approach to create new works\ + \ through parameter space exploration is accentuated, however, there are opportunities\ + \ for applicationto a wide variety of source material. These additional applications\ + \ are also discussed, along with issues encounteredin development of the example.},\n\ + \ address = {Hamamatsu, Japan},\n author = {Gerhard, David and Hepting, Daryl\ + \ and Mckague, Matthew},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176603},\n\ + \ issn = {2220-4806},\n keywords = {Multimedia creation and interaction, parameter\ + \ space, visualization, sonification.},\n pages = {96--99},\n title = {Exploration\ + \ of the Correspondence between Visual and Acoustic Parameter Spaces},\n url =\ + \ {http://www.nime.org/proceedings/2004/nime2004_096.pdf},\n year = {2004}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178071 + doi: 10.5281/zenodo.1176603 issn: 2220-4806 - keywords: 'Tabletop Interface, Collaborative Music Composition, Creativity Support ' - pages: 32--35 - title: An Approach to Collaborative Music Composition - url: http://www.nime.org/proceedings/2011/nime2011_032.pdf - year: 2011 + keywords: 'Multimedia creation and interaction, parameter space, visualization, + sonification.' + pages: 96--99 + title: Exploration of the Correspondence between Visual and Acoustic Parameter Spaces + url: http://www.nime.org/proceedings/2004/nime2004_096.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Gold2011 - abstract: 'Popular music (characterized by improvised instrumental parts, beat and - measure-level organization, and steady tempo) poses challenges for human-computer - music performance (HCMP). Pieces of music are typically rearrangeable on-the-fly - and involve a high degree of variation from ensemble to ensemble, and even between - rehearsal and performance. Computer systems aiming to participate in such ensembles - must therefore cope with a dynamic high-level structure in addition to the more - traditional problems of beat-tracking, score-following, and machine improvisation. - There are many approaches to integrating the components required to implement - dynamic human-computer music performance systems. This paper presents a reference - architecture designed to allow the typical sub-components (e.g. beat-tracking, - tempo prediction, improvisation) to be integrated in a consistent way, allowing - them to be combined and/or compared systematically. In addition, the paper presents - a dynamic score representation particularly suited to the demands of popular music - performance by computer. ' - address: 'Oslo, Norway' - author: 'Gold, Nicolas E. and Dannenberg, Roger B.' - bibtex: "@inproceedings{Gold2011,\n abstract = {Popular music (characterized by\ - \ improvised instrumental parts, beat and measure-level organization, and steady\ - \ tempo) poses challenges for human-computer music performance (HCMP). Pieces\ - \ of music are typically rearrangeable on-the-fly and involve a high degree of\ - \ variation from ensemble to ensemble, and even between rehearsal and performance.\ - \ Computer systems aiming to participate in such ensembles must therefore cope\ - \ with a dynamic high-level structure in addition to the more traditional problems\ - \ of beat-tracking, score-following, and machine improvisation. There are many\ - \ approaches to integrating the components required to implement dynamic human-computer\ - \ music performance systems. This paper presents a reference architecture designed\ - \ to allow the typical sub-components (e.g. beat-tracking, tempo prediction, improvisation)\ - \ to be integrated in a consistent way, allowing them to be combined and/or compared\ - \ systematically. In addition, the paper presents a dynamic score representation\ - \ particularly suited to the demands of popular music performance by computer.\ - \ },\n address = {Oslo, Norway},\n author = {Gold, Nicolas E. and Dannenberg,\ - \ Roger B.},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178033},\n issn\ - \ = {2220-4806},\n keywords = {live performance,popular music,software design},\n\ - \ pages = {36--39},\n title = {A Reference Architecture and Score Representation\ - \ for Popular Music Human-Computer Music Performance Systems},\n url = {http://www.nime.org/proceedings/2011/nime2011_036.pdf},\n\ - \ year = {2011}\n}\n" + ID: Ramakrishnan2004 + abstract: 'Auracle is a "group instrument," controlled by the voice, for real-time, + interactive, distributed music making over the Internet. It is implemented in + the Java™ programming language using a combination of publicly available libraries + (JSyn and TransJam) and custom-built components. This paper describes how the + various pieces --- the voice analysis, network communication, and sound synthesis + --- are individually built and how they are combined to form Auracle.' + address: 'Hamamatsu, Japan' + author: 'Ramakrishnan, Chandrasekhar and Freeman, Jason and Varnik, Kristjan' + bibtex: "@inproceedings{Ramakrishnan2004,\n abstract = {Auracle is a \"group instrument,\"\ + \ controlled by the voice, for real-time, interactive, distributed music making\ + \ over the Internet. It is implemented in the Java™ programming language using\ + \ a combination of publicly available libraries (JSyn and TransJam) and custom-built\ + \ components. This paper describes how the various pieces --- the voice analysis,\ + \ network communication, and sound synthesis --- are individually built and how\ + \ they are combined to form Auracle.},\n address = {Hamamatsu, Japan},\n author\ + \ = {Ramakrishnan, Chandrasekhar and Freeman, Jason and Varnik, Kristjan},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1176657},\n issn = {2220-4806},\n keywords\ + \ = {Interactive Music Systems, Networking and Control, Voice and Speech Analysis,\ + \ Auracle, JSyn, TransJam, Linear Prediction, Neural Networks, Voice Interface,\ + \ Open Sound Control},\n pages = {100--103},\n title = {The Architecture of Auracle:\ + \ a Real-Time, Distributed, Collaborative Instrument},\n url = {http://www.nime.org/proceedings/2004/nime2004_100.pdf},\n\ + \ year = {2004}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178033 + doi: 10.5281/zenodo.1176657 issn: 2220-4806 - keywords: 'live performance,popular music,software design' - pages: 36--39 - title: A Reference Architecture and Score Representation for Popular Music Human-Computer - Music Performance Systems - url: http://www.nime.org/proceedings/2011/nime2011_036.pdf - year: 2011 + keywords: 'Interactive Music Systems, Networking and Control, Voice and Speech Analysis, + Auracle, JSyn, TransJam, Linear Prediction, Neural Networks, Voice Interface, + Open Sound Control' + pages: 100--103 + title: 'The Architecture of Auracle: a Real-Time, Distributed, Collaborative Instrument' + url: http://www.nime.org/proceedings/2004/nime2004_100.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Bokowiec2011 - abstract: 'V''OCT(Ritual) is a work for solo vocalist/performer and Bodycoder System, - composed in residency at Dartington College of Arts (UK) Easter 2010. This paper - looks at the technical and compositional methodologies used in the realization - of the work, in particular, the choices made with regard to the mapping of sensor - elements to various spatialization functions. Kinaesonics will be discussed in - relation to the coding of real-time one-to-one mapping of sound to gesture and - its expression in terms of hardware and software design. Four forms of expressivity - arising out of interactive work with the Bodycoder system will be identified. - How sonic (electro-acoustic), programmed, gestural (kinaesonic) and in terms of - the V''Oct(Ritual) vocal expressivities are constructed as pragmatic and tangible - elements within the compositional practice will be discussed and the subsequent - importance of collaboration with a performer will be exposed. ' - address: 'Oslo, Norway' - author: 'Bokowiec, Mark A.' - bibtex: "@inproceedings{Bokowiec2011,\n abstract = {V'OCT(Ritual) is a work for\ - \ solo vocalist/performer and Bodycoder System, composed in residency at Dartington\ - \ College of Arts (UK) Easter 2010. This paper looks at the technical and compositional\ - \ methodologies used in the realization of the work, in particular, the choices\ - \ made with regard to the mapping of sensor elements to various spatialization\ - \ functions. Kinaesonics will be discussed in relation to the coding of real-time\ - \ one-to-one mapping of sound to gesture and its expression in terms of hardware\ - \ and software design. Four forms of expressivity arising out of interactive work\ - \ with the Bodycoder system will be identified. How sonic (electro-acoustic),\ - \ programmed, gestural (kinaesonic) and in terms of the V'Oct(Ritual) vocal expressivities\ - \ are constructed as pragmatic and tangible elements within the compositional\ - \ practice will be discussed and the subsequent importance of collaboration with\ - \ a performer will be exposed. },\n address = {Oslo, Norway},\n author = {Bokowiec,\ - \ Mark A.},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177967},\n issn\ - \ = {2220-4806},\n keywords = {Bodycoder, Kinaesonics, Expressivity, Gestural\ - \ Control, Interactive Performance Mechanisms, Collaboration. },\n pages = {40--43},\n\ - \ title = {V'OCT (Ritual): An Interactive Vocal Work for Bodycoder System and\ - \ 8~{C}hannel Spatialization},\n url = {http://www.nime.org/proceedings/2011/nime2011_040.pdf},\n\ - \ year = {2011}\n}\n" + ID: Miyashita2004 + abstract: 'In this paper, we propose Thermoscore, a musical score form-that dynamically + alters the temperature of the instrument/player interface. We developed the first + version of theThermoscore display by lining Peltier devices on piano keys.The + system is controlled by MIDI notes-on messages from anMIDI sequencer, so that + a composer can design songs that aresequences of temperature for each piano key. + We also discussmethodologies for composing with this system, and suggesttwo approaches. + The first is to make desirable keys (or otherkeys) hot. The second one uses chroma-profile, + that is, a radarchart representation of the frequency of pitch notations in the-piece. + By making keys of the same chroma hot in reverse proportion to the value of the + chroma-profile, it is possible to-constrain the performer''s improvisation and + to bring the tonality space close to a certain piece.' + address: 'Hamamatsu, Japan' + author: 'Miyashita, Homei and Nishimoto, Kazushi' + bibtex: "@inproceedings{Miyashita2004,\n abstract = {In this paper, we propose Thermoscore,\ + \ a musical score form-that dynamically alters the temperature of the instrument/player\ + \ interface. We developed the first version of theThermoscore display by lining\ + \ Peltier devices on piano keys.The system is controlled by MIDI notes-on messages\ + \ from anMIDI sequencer, so that a composer can design songs that aresequences\ + \ of temperature for each piano key. We also discussmethodologies for composing\ + \ with this system, and suggesttwo approaches. The first is to make desirable\ + \ keys (or otherkeys) hot. The second one uses chroma-profile, that is, a radarchart\ + \ representation of the frequency of pitch notations in the-piece. By making keys\ + \ of the same chroma hot in reverse proportion to the value of the chroma-profile,\ + \ it is possible to-constrain the performer's improvisation and to bring the tonality\ + \ space close to a certain piece.},\n address = {Hamamatsu, Japan},\n author =\ + \ {Miyashita, Homei and Nishimoto, Kazushi},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1176637},\n issn = {2220-4806},\n keywords = {musical score,\ + \ improvisation, peltier device, chroma profile},\n pages = {104--107},\n title\ + \ = {Thermoscore: A New-type Musical Score with Temperature Sensation},\n url\ + \ = {http://www.nime.org/proceedings/2004/nime2004_104.pdf},\n year = {2004}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177967 + doi: 10.5281/zenodo.1176637 issn: 2220-4806 - keywords: 'Bodycoder, Kinaesonics, Expressivity, Gestural Control, Interactive Performance - Mechanisms, Collaboration. ' - pages: 40--43 - title: 'V''OCT (Ritual): An Interactive Vocal Work for Bodycoder System and 8~Channel - Spatialization' - url: http://www.nime.org/proceedings/2011/nime2011_040.pdf - year: 2011 + keywords: 'musical score, improvisation, peltier device, chroma profile' + pages: 104--107 + title: 'Thermoscore: A New-type Musical Score with Temperature Sensation' + url: http://www.nime.org/proceedings/2004/nime2004_104.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Berthaut2011 - abstract: 'First Person Shooters are among the most played computer videogames. - They combine navigation, interaction and collaboration in3D virtual environments - using simple input devices, i.e. mouseand keyboard. In this paper, we study the - possibilities broughtby these games for musical interaction. We present the Couacs, - acollaborative multiprocess instrument which relies on interactiontechniques used - in FPS together with new techniques adding theexpressiveness required for musical - interaction. In particular, theFaders For All game mode allows musicians to perform - patternbased electronic compositions.' - address: 'Oslo, Norway' - author: 'Berthaut, Florent and Katayose, Haruhiro and Wakama, Hironori and Totani, - Naoyuki and Sato, Yuichi' - bibtex: "@inproceedings{Berthaut2011,\n abstract = {First Person Shooters are among\ - \ the most played computer videogames. They combine navigation, interaction and\ - \ collaboration in3D virtual environments using simple input devices, i.e. mouseand\ - \ keyboard. In this paper, we study the possibilities broughtby these games for\ - \ musical interaction. We present the Couacs, acollaborative multiprocess instrument\ - \ which relies on interactiontechniques used in FPS together with new techniques\ - \ adding theexpressiveness required for musical interaction. In particular, theFaders\ - \ For All game mode allows musicians to perform patternbased electronic compositions.},\n\ - \ address = {Oslo, Norway},\n author = {Berthaut, Florent and Katayose, Haruhiro\ - \ and Wakama, Hironori and Totani, Naoyuki and Sato, Yuichi},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177961},\n issn = {2220-4806},\n keywords = {the couacs,\ - \ fps, first person shooters, collaborative, 3D interaction, multiprocess instrument\ - \ },\n pages = {44--47},\n title = {First Person Shooters as Collaborative Multiprocess\ - \ Instruments},\n url = {http://www.nime.org/proceedings/2011/nime2011_044.pdf},\n\ - \ year = {2011}\n}\n" + ID: Serafin2004 + abstract: 'We present case studies of unusual instruments that share the same excitation + mechanism as that of the bowed string. The musical saw, Tibetan singing bow, glass + harmonica, and bowed cymbal all produce sound by rubbing a hard object on the + surface of the instrument. For each, we discuss the design of its physical model + and present a means for expressively controlling it. Finally, we propose a new + kind of generalized friction controller to be used in all these examples.' + address: 'Hamamatsu, Japan' + author: 'Serafin, Stefania and Young, Diana' + bibtex: "@inproceedings{Serafin2004,\n abstract = {We present case studies of unusual\ + \ instruments that share the same excitation mechanism as that of the bowed string.\ + \ The musical saw, Tibetan singing bow, glass harmonica, and bowed cymbal all\ + \ produce sound by rubbing a hard object on the surface of the instrument. For\ + \ each, we discuss the design of its physical model and present a means for expressively\ + \ controlling it. Finally, we propose a new kind of generalized friction controller\ + \ to be used in all these examples.},\n address = {Hamamatsu, Japan},\n author\ + \ = {Serafin, Stefania and Young, Diana},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176659},\n\ + \ issn = {2220-4806},\n pages = {108--111},\n title = {Toward a Generalized Friction\ + \ Controller: from the Bowed String to Unusual Musical Instruments},\n url = {http://www.nime.org/proceedings/2004/nime2004_108.pdf},\n\ + \ year = {2004}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177961 + doi: 10.5281/zenodo.1176659 issn: 2220-4806 - keywords: 'the couacs, fps, first person shooters, collaborative, 3D interaction, - multiprocess instrument ' - pages: 44--47 - title: First Person Shooters as Collaborative Multiprocess Instruments - url: http://www.nime.org/proceedings/2011/nime2011_044.pdf - year: 2011 + pages: 108--111 + title: 'Toward a Generalized Friction Controller: from the Bowed String to Unusual + Musical Instruments' + url: http://www.nime.org/proceedings/2004/nime2004_108.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Hahnel2011 - address: 'Oslo, Norway' - author: 'H\''''{a}hnel, Tilo and Berndt, Axel' - bibtex: "@inproceedings{Hahnel2011,\n address = {Oslo, Norway},\n author = {H\\\ - ''{a}hnel, Tilo and Berndt, Axel},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178037},\n\ - \ issn = {2220-4806},\n keywords = {articula-,duration,dynamics,egales,loudness,notes\ - \ in,synthetic performance,timing,tion},\n pages = {48--51},\n title = {Studying\ - \ Interdependencies in Music Performance : An Interactive Tool},\n url = {http://www.nime.org/proceedings/2011/nime2011_048.pdf},\n\ - \ year = {2011}\n}\n" + ID: Zaborowski2004 + abstract: 'This paper describes ThumbTEC, a novel general purpose input device for + the thumb or finger that is useful in a wide variety of applications from music + to text entry. The device is made up of three switches in a row and one miniature + joystick on top of the middle switch. The combination of joystick direction and + switch(es) controls what note or alphanumeric character is selected by the finger. + Several applications are detailed.' + address: 'Hamamatsu, Japan' + author: 'Zaborowski, Philippe S.' + bibtex: "@inproceedings{Zaborowski2004,\n abstract = {This paper describes ThumbTEC,\ + \ a novel general purpose input device for the thumb or finger that is useful\ + \ in a wide variety of applications from music to text entry. The device is made\ + \ up of three switches in a row and one miniature joystick on top of the middle\ + \ switch. The combination of joystick direction and switch(es) controls what note\ + \ or alphanumeric character is selected by the finger. Several applications are\ + \ detailed.},\n address = {Hamamatsu, Japan},\n author = {Zaborowski, Philippe\ + \ S.},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1176689},\n issn = {2220-4806},\n\ + \ keywords = {One-Thumb Input Device, HCI, Isometric Joystick, Mobile Computing,\ + \ Handheld Devices, Musical Instrument.},\n pages = {112--115},\n title = {ThumbTec:\ + \ A New Handheld Input Device},\n url = {http://www.nime.org/proceedings/2004/nime2004_112.pdf},\n\ + \ year = {2004}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178037 + doi: 10.5281/zenodo.1176689 issn: 2220-4806 - keywords: 'articula-,duration,dynamics,egales,loudness,notes in,synthetic performance,timing,tion' - pages: 48--51 - title: 'Studying Interdependencies in Music Performance : An Interactive Tool' - url: http://www.nime.org/proceedings/2011/nime2011_048.pdf - year: 2011 + keywords: 'One-Thumb Input Device, HCI, Isometric Joystick, Mobile Computing, Handheld + Devices, Musical Instrument.' + pages: 112--115 + title: 'ThumbTec: A New Handheld Input Device' + url: http://www.nime.org/proceedings/2004/nime2004_112.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Bokesoy2011 - address: 'Oslo, Norway' - author: 'B\''''{o}kesoy, Sinan and Adler, Patrick' - bibtex: "@inproceedings{Bokesoy2011,\n address = {Oslo, Norway},\n author = {B\\\ - ''{o}kesoy, Sinan and Adler, Patrick},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177945},\n\ - \ issn = {2220-4806},\n keywords = {Sound installation, robotic music, interactive\ - \ systems },\n pages = {52--55},\n title = {1city1001vibrations : Development\ - \ of a Interactive Sound Installation with Robotic Instrument Performance},\n\ - \ url = {http://www.nime.org/proceedings/2011/nime2011_052.pdf},\n year = {2011}\n\ - }\n" + ID: Torchia2004 + abstract: 'The authors have developed several methods for spatially distributing + spectral material in real-time using frequency-domain processing. Applying spectral + spatialization techniques to more than two channels introduces a few obstacles, + particularly with controllers, visualization and the manipulation of large amounts + of control data. Various interfaces are presented which address these issues. + We also discuss 3D “cube” controllers and visualizations, which go a long way + in aiding usability. A range of implementations were realized, each with its own + interface, automation, and output characteristics. We also explore a number of + novel techniques. For example, a sound’s spectral components can be mapped in + space based on its own components’ energy, or the energy of another signal’s components + (a kind of spatial cross-synthesis). Finally, we address aesthetic concerns, such + as perceptual and sonic coherency, which arise when sounds have been spectrally + dissected and scattered across a multi-channel spatial field in 64, 128 or more + spectral bands.' + address: 'Hamamatsu, Japan' + author: 'Torchia, Ryan H. and Lippe, Cort' + bibtex: "@inproceedings{Torchia2004,\n abstract = {The authors have developed several\ + \ methods for spatially distributing spectral material in real-time using frequency-domain\ + \ processing. Applying spectral spatialization techniques to more than two channels\ + \ introduces a few obstacles, particularly with controllers, visualization and\ + \ the manipulation of large amounts of control data. Various interfaces are presented\ + \ which address these issues. We also discuss 3D “cube” controllers and visualizations,\ + \ which go a long way in aiding usability. A range of implementations were realized,\ + \ each with its own interface, automation, and output characteristics. We also\ + \ explore a number of novel techniques. For example, a sound’s spectral components\ + \ can be mapped in space based on its own components’ energy, or the energy of\ + \ another signal’s components (a kind of spatial cross-synthesis). Finally, we\ + \ address aesthetic concerns, such as perceptual and sonic coherency, which arise\ + \ when sounds have been spectrally dissected and scattered across a multi-channel\ + \ spatial field in 64, 128 or more spectral bands.},\n address = {Hamamatsu, Japan},\n\ + \ author = {Torchia, Ryan H. and Lippe, Cort},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1176679},\n issn = {2220-4806},\n pages = {116--119},\n title\ + \ = {Techniques for Multi-Channel Real-Time Spatial Distribution Using Frequency-Domain\ + \ Processing},\n url = {http://www.nime.org/proceedings/2004/nime2004_116.pdf},\n\ + \ year = {2004}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177945 + doi: 10.5281/zenodo.1176679 issn: 2220-4806 - keywords: 'Sound installation, robotic music, interactive systems ' - pages: 52--55 - title: '1city1001vibrations : Development of a Interactive Sound Installation with - Robotic Instrument Performance' - url: http://www.nime.org/proceedings/2011/nime2011_052.pdf - year: 2011 + pages: 116--119 + title: Techniques for Multi-Channel Real-Time Spatial Distribution Using Frequency-Domain + Processing + url: http://www.nime.org/proceedings/2004/nime2004_116.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: MurrayBrowne2011 - abstract: "Many performers of novel musical instruments find it difficult to engage\ - \ audiences beyond those in the field. Previousresearch points to a failure to\ - \ balance complexity with usability, and a loss of transparency due to the detachmentof\ - \ the controller and sound generator. The issue is oftenexacerbated by an audience's\ - \ lack of prior exposure to theinstrument and its workings.However, we argue that\ - \ there is a conflict underlyingmany novel musical instruments in that they are\ - \ intendedto be both a tool for creative expression and a creative workof art\ - \ in themselves, resulting in incompatible requirements.By considering the instrument,\ - \ the composition and theperformance together as a whole with careful considerationof\ - \ the rate of learning demanded of the audience, we propose that a lack of transparency\ - \ can become an asset ratherthan a hindrance. Our approach calls for not only\ - \ controllerand sound generator to be designed in sympathy with eachother, but\ - \ composition, performance and physical form too.Identifying three design principles,\ - \ we illustrate this approach with the Serendiptichord, a wearable instrument\ - \ fordancers created by the ,\n,\nauthors." - address: 'Oslo, Norway' - author: 'Murray-Browne, Tim and Mainstone, Di and Bryan-Kinns, Nick and Plumbley, - Mark D.' - bibtex: "@inproceedings{MurrayBrowne2011,\n abstract = {Many performers of novel\ - \ musical instruments find it difficult to engage audiences beyond those in the\ - \ field. Previousresearch points to a failure to balance complexity with usability,\ - \ and a loss of transparency due to the detachmentof the controller and sound\ - \ generator. The issue is oftenexacerbated by an audience's lack of prior exposure\ - \ to theinstrument and its workings.However, we argue that there is a conflict\ - \ underlyingmany novel musical instruments in that they are intendedto be both\ - \ a tool for creative expression and a creative workof art in themselves, resulting\ - \ in incompatible requirements.By considering the instrument, the composition\ - \ and theperformance together as a whole with careful considerationof the rate\ - \ of learning demanded of the audience, we propose that a lack of transparency\ - \ can become an asset ratherthan a hindrance. Our approach calls for not only\ - \ controllerand sound generator to be designed in sympathy with eachother, but\ - \ composition, performance and physical form too.Identifying three design principles,\ - \ we illustrate this approach with the Serendiptichord, a wearable instrument\ - \ fordancers created by the ,\n,\nauthors.},\n address = {Oslo, Norway},\n author\ - \ = {Murray-Browne, Tim and Mainstone, Di and Bryan-Kinns, Nick and Plumbley,\ - \ Mark D.},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178119},\n issn\ - \ = {2220-4806},\n keywords = {Performance, composed instrument, transparency,\ - \ constraint. },\n pages = {56--59},\n title = {The Medium is the Message: Composing\ - \ Instruments and Performing Mappings},\n url = {http://www.nime.org/proceedings/2011/nime2011_056.pdf},\n\ - \ year = {2011}\n}\n" + ID: Hiraga2004 + abstract: 'Rencon is an annual international event that started in 2002. It has + roles of (1) pursuing evaluation methods for systems whose output includes subjective + issues, and (2) providing a forum for researches of several fields related to + musical expression. In the past. Rencon was held as a workshop associated with + a musical contest that provided a forum for presenting and discussing the latest + research in automatic performance rendering. This year we introduce new evaluation + methods of performance expression to Rencon: a Turing Test and a Gnirut Test, + which is a reverse Turing Test, for performance expression. We have opened a section + of the contests to any instruments and genre of music, including synthesized human + voices.' + address: 'Hamamatsu, Japan' + author: 'Hiraga, Rumi and Bresin, Roberto and Hirata, Keiji and Katayose, Haruhiro' + bibtex: "@inproceedings{Hiraga2004,\n abstract = {Rencon is an annual international\ + \ event that started in 2002. It has roles of (1) pursuing evaluation methods\ + \ for systems whose output includes subjective issues, and (2) providing a forum\ + \ for researches of several fields related to musical expression. In the past.\ + \ Rencon was held as a workshop associated with a musical contest that provided\ + \ a forum for presenting and discussing the latest research in automatic performance\ + \ rendering. This year we introduce new evaluation methods of performance expression\ + \ to Rencon: a Turing Test and a Gnirut Test, which is a reverse Turing Test,\ + \ for performance expression. We have opened a section of the contests to any\ + \ instruments and genre of music, including synthesized human voices.},\n address\ + \ = {Hamamatsu, Japan},\n author = {Hiraga, Rumi and Bresin, Roberto and Hirata,\ + \ Keiji and Katayose, Haruhiro},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176611},\n\ + \ issn = {2220-4806},\n keywords = {Rencon, Turing Test, Musical Expression, Performance\ + \ Rendering},\n pages = {120--123},\n title = {Rencon 2004: Turing Test for Musical\ + \ Expression},\n url = {http://www.nime.org/proceedings/2004/nime2004_120.pdf},\n\ + \ year = {2004}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178119 + doi: 10.5281/zenodo.1176611 issn: 2220-4806 - keywords: 'Performance, composed instrument, transparency, constraint. ' - pages: 56--59 - title: 'The Medium is the Message: Composing Instruments and Performing Mappings' - url: http://www.nime.org/proceedings/2011/nime2011_056.pdf - year: 2011 + keywords: 'Rencon, Turing Test, Musical Expression, Performance Rendering' + pages: 120--123 + title: 'Rencon 2004: Turing Test for Musical Expression' + url: http://www.nime.org/proceedings/2004/nime2004_120.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Kim2011 - abstract: 'In this paper, we discuss the use of the clothesline as ametaphor for - designing a musical interface called Airer Choir. This interactive installation - is based on the function ofan ordinary object that is not a traditional instrument, - andhanging articles of clothing is literally the gesture to use theinterface. - Based on this metaphor, a musical interface withhigh transparency was designed. - Using the metaphor, weexplored the possibilities for recognizing of input gesturesand - creating sonic events by mapping data to sound. Thus,four different types of Airer - Choir were developed. By classifying the interfaces, we concluded that various - musicalexpressions are possible by using the same metaphor.' - address: 'Oslo, Norway' - author: 'Kim, Seunghun and Kim, Luke K. and Jeong, Songhee and Yeo, Woon Seung' - bibtex: "@inproceedings{Kim2011,\n abstract = {In this paper, we discuss the use\ - \ of the clothesline as ametaphor for designing a musical interface called Airer\ - \ Choir. This interactive installation is based on the function ofan ordinary\ - \ object that is not a traditional instrument, andhanging articles of clothing\ - \ is literally the gesture to use theinterface. Based on this metaphor, a musical\ - \ interface withhigh transparency was designed. Using the metaphor, weexplored\ - \ the possibilities for recognizing of input gesturesand creating sonic events\ - \ by mapping data to sound. Thus,four different types of Airer Choir were developed.\ - \ By classifying the interfaces, we concluded that various musicalexpressions\ - \ are possible by using the same metaphor.},\n address = {Oslo, Norway},\n author\ - \ = {Kim, Seunghun and Kim, Luke K. and Jeong, Songhee and Yeo, Woon Seung},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178065},\n issn = {2220-4806},\n\ - \ keywords = {musical interface, metaphor, clothesline installation },\n pages\ - \ = {60--63},\n title = {Clothesline as a Metaphor for a Musical Interface},\n\ - \ url = {http://www.nime.org/proceedings/2011/nime2011_060.pdf},\n year = {2011}\n\ - }\n" + ID: Katayose2004 + abstract: 'This paper describes an approach for playing expressivemusic, as it refers + to a pianist''s expressiveness, with atapping-style interface. MIDI-formatted + expressiveperformances played by pianists were first analyzed andtransformed into + performance templates, in which thedeviations from a canonical description was + separatelydescribed for each event. Using one of the templates as askill complement, + a player can play music expressivelyover and under the beat level. This paper + presents ascheduler that allows a player to mix her/his own intensionand the expressiveness + in the performance template. Theresults of a forty-subject user study suggest + that using theexpression template contributes the subject''s joy of playingmusic + with the tapping-style performance interface. Thisresult is also supported by + a brain activation study that wasdone using a near-infrared spectroscopy (NIRS).Categories + and Subject DescriptorsH.5.5 [Information Interfaces and Presentation]: Sound + andMusic Computing methodologies and techniques.' + address: 'Hamamatsu, Japan' + author: 'Katayose, Haruhiro and Okudaira, Keita' + bibtex: "@inproceedings{Katayose2004,\n abstract = {This paper describes an approach\ + \ for playing expressivemusic, as it refers to a pianist's expressiveness, with\ + \ atapping-style interface. MIDI-formatted expressiveperformances played by pianists\ + \ were first analyzed andtransformed into performance templates, in which thedeviations\ + \ from a canonical description was separatelydescribed for each event. Using one\ + \ of the templates as askill complement, a player can play music expressivelyover\ + \ and under the beat level. This paper presents ascheduler that allows a player\ + \ to mix her/his own intensionand the expressiveness in the performance template.\ + \ Theresults of a forty-subject user study suggest that using theexpression template\ + \ contributes the subject's joy of playingmusic with the tapping-style performance\ + \ interface. Thisresult is also supported by a brain activation study that wasdone\ + \ using a near-infrared spectroscopy (NIRS).Categories and Subject DescriptorsH.5.5\ + \ [Information Interfaces and Presentation]: Sound andMusic Computing methodologies\ + \ and techniques.},\n address = {Hamamatsu, Japan},\n author = {Katayose, Haruhiro\ + \ and Okudaira, Keita},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176625},\n\ + \ issn = {2220-4806},\n keywords = {Rencon, interfaces for musical expression,\ + \ visualization},\n pages = {124--129},\n title = {Using an Expressive Performance\ + \ Template in a Music Conducting Interface},\n url = {http://www.nime.org/proceedings/2004/nime2004_124.pdf},\n\ + \ year = {2004}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178065 + doi: 10.5281/zenodo.1176625 issn: 2220-4806 - keywords: 'musical interface, metaphor, clothesline installation ' - pages: 60--63 - title: Clothesline as a Metaphor for a Musical Interface - url: http://www.nime.org/proceedings/2011/nime2011_060.pdf - year: 2011 + keywords: 'Rencon, interfaces for musical expression, visualization' + pages: 124--129 + title: Using an Expressive Performance Template in a Music Conducting Interface + url: http://www.nime.org/proceedings/2004/nime2004_124.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Polotti2011 - abstract: 'In this paper, we discuss the results obtained by means of the EGGS (Elementary - Gestalts for Gesture Sonification) system in terms of artistic realizations. EGGS - was introduced in a previous edition of this conference. The works presented include - interactive installations in the form of public art and interactive onstage performances. - In all of the works, the EGGS principles of simplicity based on the correspondence - between elementary sonic and movement units, and of organicity between sound and - gesture are applied. Indeed, we study both sound as a means for gesture representation - and gesture as embodiment of sound. These principles constitute our guidelines - for the investigation of the bidirectional relationship between sound and body - expression with various strategies involving both educated and non-educated executors. ' - address: 'Oslo, Norway' - author: 'Polotti, Pietro and Goina, Maurizio' - bibtex: "@inproceedings{Polotti2011,\n abstract = {In this paper, we discuss the\ - \ results obtained by means of the EGGS (Elementary Gestalts for Gesture Sonification)\ - \ system in terms of artistic realizations. EGGS was introduced in a previous\ - \ edition of this conference. The works presented include interactive installations\ - \ in the form of public art and interactive onstage performances. In all of the\ - \ works, the EGGS principles of simplicity based on the correspondence between\ - \ elementary sonic and movement units, and of organicity between sound and gesture\ - \ are applied. Indeed, we study both sound as a means for gesture representation\ - \ and gesture as embodiment of sound. These principles constitute our guidelines\ - \ for the investigation of the bidirectional relationship between sound and body\ - \ expression with various strategies involving both educated and non-educated\ - \ executors. },\n address = {Oslo, Norway},\n author = {Polotti, Pietro and Goina,\ - \ Maurizio},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178137},\n issn\ - \ = {2220-4806},\n keywords = {Gesture sonification, Interactive performance,\ - \ Public art. },\n pages = {64--67},\n title = {EGGS in Action},\n url = {http://www.nime.org/proceedings/2011/nime2011_064.pdf},\n\ - \ year = {2011}\n}\n" + ID: Kawahara2004 + abstract: 'A series of demonstrations of synthesized acappella songsbased on an + auditory morphing using STRAIGHT [5] willbe presented. Singing voice data for + morphing were extracted from the RWCmusic database of musical instrument sound. + Discussions on a new extension of the morphing procedure to deal with vibrato + will be introduced basedon the statistical analysis of the database and its effect + onsynthesized acappella will also be demonstrated.' + address: 'Hamamatsu, Japan' + author: 'Kawahara, Hideki and Banno, Hideki and Morise, Masanori' + bibtex: "@inproceedings{Kawahara2004,\n abstract = {A series of demonstrations of\ + \ synthesized acappella songsbased on an auditory morphing using STRAIGHT [5]\ + \ willbe presented. Singing voice data for morphing were extracted from the RWCmusic\ + \ database of musical instrument sound. Discussions on a new extension of the\ + \ morphing procedure to deal with vibrato will be introduced basedon the statistical\ + \ analysis of the database and its effect onsynthesized acappella will also be\ + \ demonstrated.},\n address = {Hamamatsu, Japan},\n author = {Kawahara, Hideki\ + \ and Banno, Hideki and Morise, Masanori},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176627},\n\ + \ issn = {2220-4806},\n keywords = {Rencon, Acappella, RWCdatabase, STRAIGHT,\ + \ morphing},\n pages = {130--131},\n title = {Acappella Synthesis Demonstrations\ + \ using RWC Music Database},\n url = {http://www.nime.org/proceedings/2004/nime2004_130.pdf},\n\ + \ year = {2004}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178137 + doi: 10.5281/zenodo.1176627 issn: 2220-4806 - keywords: 'Gesture sonification, Interactive performance, Public art. ' - pages: 64--67 - title: EGGS in Action - url: http://www.nime.org/proceedings/2011/nime2011_064.pdf - year: 2011 + keywords: 'Rencon, Acappella, RWCdatabase, STRAIGHT, morphing' + pages: 130--131 + title: Acappella Synthesis Demonstrations using RWC Music Database + url: http://www.nime.org/proceedings/2004/nime2004_130.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Janssen2011 - abstract: The present article describes a reverberation instrumentwhich is based - on cognitive categorization of reverberating spaces. Different techniques for - artificial reverberationwill be covered. A multidimensional scaling experimentwas - conducted on impulse responses in order to determinehow humans acoustically perceive - spatiality. This researchseems to indicate that the perceptual dimensions are - related to early energy decay and timbral qualities. Theseresults are applied - to a reverberation instrument based ondelay lines. It can be contended that such - an instrumentcan be controlled more intuitively than other delay line reverberation - tools which often provide a confusing range ofparameters which have a physical - rather than perceptualmeaning. - address: 'Oslo, Norway' - author: 'Janssen, Berit' - bibtex: "@inproceedings{Janssen2011,\n abstract = {The present article describes\ - \ a reverberation instrumentwhich is based on cognitive categorization of reverberating\ - \ spaces. Different techniques for artificial reverberationwill be covered. A\ - \ multidimensional scaling experimentwas conducted on impulse responses in order\ - \ to determinehow humans acoustically perceive spatiality. This researchseems\ - \ to indicate that the perceptual dimensions are related to early energy decay\ - \ and timbral qualities. Theseresults are applied to a reverberation instrument\ - \ based ondelay lines. It can be contended that such an instrumentcan be controlled\ - \ more intuitively than other delay line reverberation tools which often provide\ - \ a confusing range ofparameters which have a physical rather than perceptualmeaning.},\n\ - \ address = {Oslo, Norway},\n author = {Janssen, Berit},\n booktitle = {Proceedings\ + ID: Dannenberg2004 + abstract: 'Real-time interactive software can be difficult to construct and debug. + Aura is a software platform to facilitate highly interactive systems that combine + audio signal processing, sophisticated control, sensors, computer animation, video + processing, and graphical user interfaces. Moreover, Aura is open-ended, allowing + diverse software components to be interconnected in a real-time framework. A recent + assessment of Aura has motivated a redesign of the communication system to support + remote procedure call. In addition, the audio signal processing framework has + been altered to reduce programming errors. The motivation behind these changes + is discussed, and measurements of run-time performance offer some general insights + for system designers.' + address: 'Hamamatsu, Japan' + author: 'Dannenberg, Roger B.' + bibtex: "@inproceedings{Dannenberg2004,\n abstract = {Real-time interactive software\ + \ can be difficult to construct and debug. Aura is a software platform to facilitate\ + \ highly interactive systems that combine audio signal processing, sophisticated\ + \ control, sensors, computer animation, video processing, and graphical user interfaces.\ + \ Moreover, Aura is open-ended, allowing diverse software components to be interconnected\ + \ in a real-time framework. A recent assessment of Aura has motivated a redesign\ + \ of the communication system to support remote procedure call. In addition, the\ + \ audio signal processing framework has been altered to reduce programming errors.\ + \ The motivation behind these changes is discussed, and measurements of run-time\ + \ performance offer some general insights for system designers.},\n address =\ + \ {Hamamatsu, Japan},\n author = {Dannenberg, Roger B.},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178049},\n issn = {2220-4806},\n keywords = {Reverberation,\ - \ perception, multidimensional scaling, mapping },\n pages = {68--71},\n title\ - \ = {A Reverberation Instrument Based on Perceptual Mapping},\n url = {http://www.nime.org/proceedings/2011/nime2011_068.pdf},\n\ - \ year = {2011}\n}\n" + \ doi = {10.5281/zenodo.1176593},\n issn = {2220-4806},\n pages = {132--137},\n\ + \ title = {Aura II: Making Real-Time Systems Safe for Music},\n url = {http://www.nime.org/proceedings/2004/nime2004_132.pdf},\n\ + \ year = {2004}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178049 + doi: 10.5281/zenodo.1176593 issn: 2220-4806 - keywords: 'Reverberation, perception, multidimensional scaling, mapping ' - pages: 68--71 - title: A Reverberation Instrument Based on Perceptual Mapping - url: http://www.nime.org/proceedings/2011/nime2011_068.pdf - year: 2011 + pages: 132--137 + title: 'Aura II: Making Real-Time Systems Safe for Music' + url: http://www.nime.org/proceedings/2004/nime2004_132.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Hayes2011 - address: 'Oslo, Norway' - author: 'Hayes, Lauren' - bibtex: "@inproceedings{Hayes2011,\n address = {Oslo, Norway},\n author = {Hayes,\ - \ Lauren},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1178043},\n issn = {2220-4806},\n\ - \ keywords = {Vibrotactile feedback, human-computer interfaces, digital composition,\ - \ real-time performance, augmented instruments. },\n pages = {72--75},\n title\ - \ = {Vibrotactile Feedback-Assisted Performance},\n url = {http://www.nime.org/proceedings/2011/nime2011_072.pdf},\n\ - \ year = {2011}\n}\n" + ID: Wang2004 + abstract: 'On-the-fly programming is a style of programming in which the programmer/performer/composer + augments and modifies the program while it is running, without stopping or restarting, + in order to assert expressive, programmable control at runtime. Because of the + fundamental powers of programming languages, we believe the technical and aesthetic + aspects of on-the-fly programming are worth exploring. In this paper, we present + a formalized framework for on-the-fly programming, based on the ChucK synthesis + language, which supports a truly concurrent audio programming model with sample-synchronous + timing, and a highly on-the-fly style of programming. We first provide a well-defined + notion of on-thefly programming. We then address four fundamental issues that + confront the on-the-fly programmer: timing, modularity, conciseness, and flexibility. + Using the features and properties of ChucK, we show how it solves many of these + issues. In this new model, we show that (1) concurrency provides natural modularity + for on-the-fly programming, (2) the timing mechanism in ChucK guarantees on-the-fly + precision and consistency, (3) the Chuck syntax improves conciseness, and (4) + the overall system is a useful framework for exploring on-the-fly programming. + Finally, we discuss the aesthetics of on-the-fly performance. ' + address: 'Hamamatsu, Japan' + author: 'Wang, Ge and Cook, Perry R.' + bibtex: "@inproceedings{Wang2004,\n abstract = {On-the-fly programming is a style\ + \ of programming in which the programmer/performer/composer augments and modifies\ + \ the program while it is running, without stopping or restarting, in order to\ + \ assert expressive, programmable control at runtime. Because of the fundamental\ + \ powers of programming languages, we believe the technical and aesthetic aspects\ + \ of on-the-fly programming are worth exploring. In this paper, we present a formalized\ + \ framework for on-the-fly programming, based on the ChucK synthesis language,\ + \ which supports a truly concurrent audio programming model with sample-synchronous\ + \ timing, and a highly on-the-fly style of programming. We first provide a well-defined\ + \ notion of on-thefly programming. We then address four fundamental issues that\ + \ confront the on-the-fly programmer: timing, modularity, conciseness, and flexibility.\ + \ Using the features and properties of ChucK, we show how it solves many of these\ + \ issues. In this new model, we show that (1) concurrency provides natural modularity\ + \ for on-the-fly programming, (2) the timing mechanism in ChucK guarantees on-the-fly\ + \ precision and consistency, (3) the Chuck syntax improves conciseness, and (4)\ + \ the overall system is a useful framework for exploring on-the-fly programming.\ + \ Finally, we discuss the aesthetics of on-the-fly performance. },\n address =\ + \ {Hamamatsu, Japan},\n author = {Wang, Ge and Cook, Perry R.},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176683},\n issn = {2220-4806},\n keywords = {code as\ + \ interface,compiler,concurrency,concurrent audio programming,on-the-fly programming,real-,synchronization,synthesis,time,timing,virtual\ + \ machine},\n pages = {138--143},\n title = {On-the-fly Programming: Using Code\ + \ as an Expressive Musical Instrument},\n url = {http://www.nime.org/proceedings/2004/nime2004_138.pdf},\n\ + \ year = {2004}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178043 + doi: 10.5281/zenodo.1176683 issn: 2220-4806 - keywords: 'Vibrotactile feedback, human-computer interfaces, digital composition, - real-time performance, augmented instruments. ' - pages: 72--75 - title: Vibrotactile Feedback-Assisted Performance - url: http://www.nime.org/proceedings/2011/nime2011_072.pdf - year: 2011 + keywords: 'code as interface,compiler,concurrency,concurrent audio programming,on-the-fly + programming,real-,synchronization,synthesis,time,timing,virtual machine' + pages: 138--143 + title: 'On-the-fly Programming: Using Code as an Expressive Musical Instrument' + url: http://www.nime.org/proceedings/2004/nime2004_138.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Ando2011 - abstract: 'The use of Interactive Evolutionary Computation (IEC) is suitable to - the development of art-creation aid system for beginners. This is because of important - features of IEC, like the ability of optimizing with ambiguous evaluation measures, - and not requiring special knowledge about art-creation. With the popularity of - Consumer Generated Media, many beginners in term of art-creation are interested - in creating their own original art works. Thus developing of useful IEC system - for musical creation is an urgent task. However, user-assist functions for IEC - proposed in pastworks decrease the possibility of getting good unexpected results, - which is an important feature of art-creation with IEC. In this paper, The author - proposes a new IEC evaluation process named "Shopping Basket" procedure IEC. In - the procedure, an user-assist function called Similarity-Based Reasoning allows - for natural evaluation by the user. The function reduces user''s burden without - reducing the possibility of unexpected results. The author performs an experiment - where subjects use the new interface to validate it. As a result of the experiment, - the author concludes that the new interface is better to motivate users to compose - with IEC system than the old interface.' - address: 'Oslo, Norway' - author: 'Ando, Daichi' - bibtex: "@inproceedings{Ando2011,\n abstract = {The use of Interactive Evolutionary\ - \ Computation (IEC) is suitable to the development of art-creation aid system\ - \ for beginners. This is because of important features of IEC, like the ability\ - \ of optimizing with ambiguous evaluation measures, and not requiring special\ - \ knowledge about art-creation. With the popularity of Consumer Generated Media,\ - \ many beginners in term of art-creation are interested in creating their own\ - \ original art works. Thus developing of useful IEC system for musical creation\ - \ is an urgent task. However, user-assist functions for IEC proposed in pastworks\ - \ decrease the possibility of getting good unexpected results, which is an important\ - \ feature of art-creation with IEC. In this paper, The author proposes a new IEC\ - \ evaluation process named \"Shopping Basket\" procedure IEC. In the procedure,\ - \ an user-assist function called Similarity-Based Reasoning allows for natural\ - \ evaluation by the user. The function reduces user's burden without reducing\ - \ the possibility of unexpected results. The author performs an experiment where\ - \ subjects use the new interface to validate it. As a result of the experiment,\ - \ the author concludes that the new interface is better to motivate users to compose\ - \ with IEC system than the old interface.},\n address = {Oslo, Norway},\n author\ - \ = {Ando, Daichi},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177941},\n\ - \ issn = {2220-4806},\n keywords = {Interactive Evolutionary Computation, User-Interface,\ - \ Composition Aid },\n pages = {76--79},\n title = {Improving User-Interface of\ - \ Interactive EC for Composition-Aid by means of Shopping Basket Procedure},\n\ - \ url = {http://www.nime.org/proceedings/2011/nime2011_076.pdf},\n year = {2011}\n\ - }\n" + ID: Lew2004 + abstract: 'This paper describes the design of an expressive tangible interface for + cinema editing as a live performance. A short survey of live video practices is + provided. The Live Cinema instrument is a cross between a musical instrument and + a film editing tool, tailored for improvisational control as well as performance + presence. Design specifications for the instrument evolved based on several types + of observations including: our own performances in which we used a prototype based + on available tools; an analysis of performative aspects of contemporary DJ equipment; + and an evaluation of organizational aspects of several generations of film editing + tools. Our instrument presents the performer with a large canvas where projected + images can be grabbed and moved around with both hands simultaneously; the performer + also has access to two video drums featuring haptic display to manipulate the + shots and cut between streams. The paper ends with a discussion of issues related + to the tensions between narrative structure and hands-on control, live and recorded + arts and the scoring of improvised films. ' + address: 'Hamamatsu, Japan' + author: 'Lew, Michael' + bibtex: "@inproceedings{Lew2004,\n abstract = {This paper describes the design of\ + \ an expressive tangible interface for cinema editing as a live performance. A\ + \ short survey of live video practices is provided. The Live Cinema instrument\ + \ is a cross between a musical instrument and a film editing tool, tailored for\ + \ improvisational control as well as performance presence. Design specifications\ + \ for the instrument evolved based on several types of observations including:\ + \ our own performances in which we used a prototype based on available tools;\ + \ an analysis of performative aspects of contemporary DJ equipment; and an evaluation\ + \ of organizational aspects of several generations of film editing tools. Our\ + \ instrument presents the performer with a large canvas where projected images\ + \ can be grabbed and moved around with both hands simultaneously; the performer\ + \ also has access to two video drums featuring haptic display to manipulate the\ + \ shots and cut between streams. The paper ends with a discussion of issues related\ + \ to the tensions between narrative structure and hands-on control, live and recorded\ + \ arts and the scoring of improvised films. },\n address = {Hamamatsu, Japan},\n\ + \ author = {Lew, Michael},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176631},\n\ + \ issn = {2220-4806},\n keywords = {live cinema, video controller, visual music,\ + \ DJ, VJ, film editing, tactile interface, two-hand interaction, improvisation,\ + \ performance, narrative structure.},\n pages = {144--149},\n title = {Live Cinema:\ + \ Designing an Instrument for Cinema Editing as a Live Performance},\n url = {http://www.nime.org/proceedings/2004/nime2004_144.pdf},\n\ + \ year = {2004}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177941 + doi: 10.5281/zenodo.1176631 issn: 2220-4806 - keywords: 'Interactive Evolutionary Computation, User-Interface, Composition Aid ' - pages: 76--79 - title: Improving User-Interface of Interactive EC for Composition-Aid by means of - Shopping Basket Procedure - url: http://www.nime.org/proceedings/2011/nime2011_076.pdf - year: 2011 + keywords: 'live cinema, video controller, visual music, DJ, VJ, film editing, tactile + interface, two-hand interaction, improvisation, performance, narrative structure.' + pages: 144--149 + title: 'Live Cinema: Designing an Instrument for Cinema Editing as a Live Performance' + url: http://www.nime.org/proceedings/2004/nime2004_144.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Mcgee2011 - abstract: BioRhythm is an interactive bio-feedback installation controlled by the - cardiovascular system. Data from a photoplethysmograph (PPG) sensor controls sonification - and visualization parameters in real-time. Biological signals areobtained using - the techniques of Resonance Theory in Hemodynamics and mapped to audiovisual cues - via the Five Element Philosophy. The result is a new media interface utilizing - sound synthesis and spatialization with advanced graphics rendering. BioRhythm - serves as an artistic explorationof the harmonic spectra of pulse waves. - address: 'Oslo, Norway' - author: 'Mcgee, Ryan and Fan, Yuan-Yi and Ali, Reza' - bibtex: "@inproceedings{Mcgee2011,\n abstract = {BioRhythm is an interactive bio-feedback\ - \ installation controlled by the cardiovascular system. Data from a photoplethysmograph\ - \ (PPG) sensor controls sonification and visualization parameters in real-time.\ - \ Biological signals areobtained using the techniques of Resonance Theory in Hemodynamics\ - \ and mapped to audiovisual cues via the Five Element Philosophy. The result is\ - \ a new media interface utilizing sound synthesis and spatialization with advanced\ - \ graphics rendering. BioRhythm serves as an artistic explorationof the harmonic\ - \ spectra of pulse waves.},\n address = {Oslo, Norway},\n author = {Mcgee, Ryan\ - \ and Fan, Yuan-Yi and Ali, Reza},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178105},\n\ - \ issn = {2220-4806},\n keywords = {bio-feedback,bio-sensing,fm synthesis,open\ - \ sound control,parallel computing,sonification,spa-,spatial audio,tialization,tion,visualiza-},\n\ - \ pages = {80--83},\n title = {BioRhythm : a Biologically-inspired Audio-Visual\ - \ Installation},\n url = {http://www.nime.org/proceedings/2011/nime2011_080.pdf},\n\ - \ year = {2011}\n}\n" + ID: Poepel2004 + abstract: 'A system is introduced that allows a string player to control a synthesis + engine with the gestural skills he is used to. The implemented system is based + on an electric viola and a synthesis engine that is directly controlled by the + unanalysed audio signal of the instrument and indirectly by control parameters + mapped to the synthesis engine. This method offers a highly string-specific playability, + as it is sensitive to the kinds of musical articulation produced by traditional + playing techniques. Nuances of sound variation applied by the player will be present + in the output signal even if those nuances are beyond traditionally measurable + parameters like pitch, amplitude or brightness. The relatively minimal hardware + requirements make the instrument accessible with little expenditure.' + address: 'Hamamatsu, Japan' + author: 'Poepel, Cornelius' + bibtex: "@inproceedings{Poepel2004,\n abstract = {A system is introduced that allows\ + \ a string player to control a synthesis engine with the gestural skills he is\ + \ used to. The implemented system is based on an electric viola and a synthesis\ + \ engine that is directly controlled by the unanalysed audio signal of the instrument\ + \ and indirectly by control parameters mapped to the synthesis engine. This method\ + \ offers a highly string-specific playability, as it is sensitive to the kinds\ + \ of musical articulation produced by traditional playing techniques. Nuances\ + \ of sound variation applied by the player will be present in the output signal\ + \ even if those nuances are beyond traditionally measurable parameters like pitch,\ + \ amplitude or brightness. The relatively minimal hardware requirements make the\ + \ instrument accessible with little expenditure.},\n address = {Hamamatsu, Japan},\n\ + \ author = {Poepel, Cornelius},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176655},\n\ + \ issn = {2220-4806},\n keywords = {Electronic bowed string instrument, playability,\ + \ musical instrument design, human computer interface, oscillation controlled\ + \ sound synthesis},\n pages = {150--153},\n title = {Synthesized Strings for String\ + \ Players},\n url = {http://www.nime.org/proceedings/2004/nime2004_150.pdf},\n\ + \ year = {2004}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178105 + doi: 10.5281/zenodo.1176655 issn: 2220-4806 - keywords: 'bio-feedback,bio-sensing,fm synthesis,open sound control,parallel computing,sonification,spa-,spatial - audio,tialization,tion,visualiza-' - pages: 80--83 - title: 'BioRhythm : a Biologically-inspired Audio-Visual Installation' - url: http://www.nime.org/proceedings/2011/nime2011_080.pdf - year: 2011 + keywords: 'Electronic bowed string instrument, playability, musical instrument design, + human computer interface, oscillation controlled sound synthesis' + pages: 150--153 + title: Synthesized Strings for String Players + url: http://www.nime.org/proceedings/2004/nime2004_150.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Pigott2011 - address: 'Oslo, Norway' - author: 'Pigott, Jon' - bibtex: "@inproceedings{Pigott2011,\n address = {Oslo, Norway},\n author = {Pigott,\ - \ Jon},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1178133},\n issn = {2220-4806},\n\ - \ keywords = {Electromechanical sonic art, kinetic sound art, prepared speakers,\ - \ Infinite Spring. },\n pages = {84--87},\n title = {Vibration , Volts and Sonic\ - \ Art: A Practice and Theory of Electromechanical Sound},\n url = {http://www.nime.org/proceedings/2011/nime2011_084.pdf},\n\ - \ year = {2011}\n}\n" + ID: Tanaka2004 + abstract: 'We present a system for collaborative musical creation onmobile wireless + networks. The work extends on simple peerto-peer file sharing systems towards + ad-hoc mobility andstreaming. It extends upon music listening from a passiveact + to a proactive, participative activity. The system consistsof a network based + interactive music engine and a portablerendering player. It serves as a platform + for experiments onstudying the sense of agency in collaborative creativeprocess, + and requirements for fostering musical satisfactionin remote collaboration. ' + address: 'Hamamatsu, Japan' + author: 'Tanaka, Atau' + bibtex: "@inproceedings{Tanaka2004,\n abstract = {We present a system for collaborative\ + \ musical creation onmobile wireless networks. The work extends on simple peerto-peer\ + \ file sharing systems towards ad-hoc mobility andstreaming. It extends upon music\ + \ listening from a passiveact to a proactive, participative activity. The system\ + \ consistsof a network based interactive music engine and a portablerendering\ + \ player. It serves as a platform for experiments onstudying the sense of agency\ + \ in collaborative creativeprocess, and requirements for fostering musical satisfactionin\ + \ remote collaboration. },\n address = {Hamamatsu, Japan},\n author = {Tanaka,\ + \ Atau},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1176677},\n issn = {2220-4806},\n\ + \ keywords = {mobile music,peer-to-peer,wireless ad-hoc networks},\n pages = {154--156},\n\ + \ title = {Mobile Music Making},\n url = {http://www.nime.org/proceedings/2004/nime2004_154.pdf},\n\ + \ year = {2004}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178133 + doi: 10.5281/zenodo.1176677 issn: 2220-4806 - keywords: 'Electromechanical sonic art, kinetic sound art, prepared speakers, Infinite - Spring. ' - pages: 84--87 - title: 'Vibration , Volts and Sonic Art: A Practice and Theory of Electromechanical - Sound' - url: http://www.nime.org/proceedings/2011/nime2011_084.pdf - year: 2011 + keywords: 'mobile music,peer-to-peer,wireless ad-hoc networks' + pages: 154--156 + title: Mobile Music Making + url: http://www.nime.org/proceedings/2004/nime2004_154.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Sioros2011 - abstract: 'We introduce a novel algorithm for automatically generating rhythms in - real time in a certain meter. The generated rhythms are "generic" in the sense - that they are characteristic of each time signature without belonging to a specific - musical style. The algorithm is based on a stochastic model in which various aspects - and qualities of the generated rhythm can be controlled intuitively and in real - time. Such qualities are the density of the generated events per bar, the amount - of variation in generation, the amount of syncopation, the metrical strength, - and of course the meter itself. The kin.rhythmicator software application was - developed to implement this algorithm. During a performance with the kin.rhythmicator - the user can control all aspects of the performance through descriptive and intuitive - graphic controls. ' - address: 'Oslo, Norway' - author: 'Sioros, George and Guedes, Carlos' - bibtex: "@inproceedings{Sioros2011,\n abstract = {We introduce a novel algorithm\ - \ for automatically generating rhythms in real time in a certain meter. The generated\ - \ rhythms are \"generic\" in the sense that they are characteristic of each time\ - \ signature without belonging to a specific musical style. The algorithm is based\ - \ on a stochastic model in which various aspects and qualities of the generated\ - \ rhythm can be controlled intuitively and in real time. Such qualities are the\ - \ density of the generated events per bar, the amount of variation in generation,\ - \ the amount of syncopation, the metrical strength, and of course the meter itself.\ - \ The kin.rhythmicator software application was developed to implement this algorithm.\ - \ During a performance with the kin.rhythmicator the user can control all aspects\ - \ of the performance through descriptive and intuitive graphic controls. },\n\ - \ address = {Oslo, Norway},\n author = {Sioros, George and Guedes, Carlos},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178163},\n issn = {2220-4806},\n\ - \ keywords = {automatic music generation, generative, stochastic, metric indispensability,\ - \ syncopation, Max/MSP, Max4Live },\n pages = {88--91},\n title = {Automatic Rhythmic\ - \ Performance in Max/MSP: the kin.rhythmicator},\n url = {http://www.nime.org/proceedings/2011/nime2011_088.pdf},\n\ - \ year = {2011}\n}\n" + ID: Flety2004 + abstract: 'This paper reports our recent developments on sensor acquisition systems, + taking advantage of computer network technology. We present a versatile hardware + system which can be connected to wireless modules, Analog to Digital Converters, + and enables Ethernet communication. We are planning to make freely available the + design of this architecture. We describe also several approaches we tested for + wireless communication. Such technology developments are currently used in our + newly formed Performance Arts Technology Group.' + address: 'Hamamatsu, Japan' + author: 'Fléty, Emmanuel and Leroy, Nicolas and Ravarini, Jean-Christophe and Bevilacqua, + Frédéric' + bibtex: "@inproceedings{Flety2004,\n abstract = {This paper reports our recent developments\ + \ on sensor acquisition systems, taking advantage of computer network technology.\ + \ We present a versatile hardware system which can be connected to wireless modules,\ + \ Analog to Digital Converters, and enables Ethernet communication. We are planning\ + \ to make freely available the design of this architecture. We describe also several\ + \ approaches we tested for wireless communication. Such technology developments\ + \ are currently used in our newly formed Performance Arts Technology Group.},\n\ + \ address = {Hamamatsu, Japan},\n author = {Fl\\'{e}ty, Emmanuel and Leroy, Nicolas\ + \ and Ravarini, Jean-Christophe and Bevilacqua, Fr\\'{e}d\\'{e}ric},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1176597},\n issn = {2220-4806},\n keywords\ + \ = {Gesture, Sensors, Ethernet, 802.11, Computer Music.},\n pages = {157--160},\n\ + \ title = {Versatile Sensor Acquisition System Utilizing Network Technology},\n\ + \ url = {http://www.nime.org/proceedings/2004/nime2004_157.pdf},\n year = {2004}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178163 + doi: 10.5281/zenodo.1176597 issn: 2220-4806 - keywords: 'automatic music generation, generative, stochastic, metric indispensability, - syncopation, Max/MSP, Max4Live ' - pages: 88--91 - title: 'Automatic Rhythmic Performance in Max/MSP: the kin.rhythmicator' - url: http://www.nime.org/proceedings/2011/nime2011_088.pdf - year: 2011 + keywords: 'Gesture, Sensors, Ethernet, 802.11, Computer Music.' + pages: 157--160 + title: Versatile Sensor Acquisition System Utilizing Network Technology + url: http://www.nime.org/proceedings/2004/nime2004_157.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Goncalves2011 - abstract: The importance of embedded devices as new devices to thefield of Voltage-Controlled - Synthesizers is realized. Emphasis is directed towards understanding the importance - of suchdevices in Voltage-Controlled Synthesizers. Introducing theVoltage-Controlled - Computer as a new paradigm. Specifications for hardware interfacing and programming - techniquesare described based on real prototypes. Implementationsand successful - results are reported. - address: 'Oslo, Norway' - author: 'Goncalves, André' - bibtex: "@inproceedings{Goncalves2011,\n abstract = {The importance of embedded\ - \ devices as new devices to thefield of Voltage-Controlled Synthesizers is realized.\ - \ Emphasis is directed towards understanding the importance of suchdevices in\ - \ Voltage-Controlled Synthesizers. Introducing theVoltage-Controlled Computer\ - \ as a new paradigm. Specifications for hardware interfacing and programming techniquesare\ - \ described based on real prototypes. Implementationsand successful results are\ - \ reported.},\n address = {Oslo, Norway},\n author = {Goncalves, Andr{\\'{e}}},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178035},\n issn = {2220-4806},\n\ - \ keywords = {Voltage-controlled synthesizer, embedded systems, voltage-controlled\ - \ computer, computer driven control voltage generation },\n pages = {92--95},\n\ - \ title = {Towards a Voltage-Controlled Computer Control and Interaction Beyond\ - \ an Embedded System},\n url = {http://www.nime.org/proceedings/2011/nime2011_092.pdf},\n\ - \ year = {2011}\n}\n" + ID: Gaye2004 + abstract: 'Sonic City is a wearable system enabling the use of the urban environment + as an interface for real-time electronic music making, when walking through and + interacting with a city. The device senses everyday interactions and surrounding + contexts, and maps this information in real time to the sound processing of urban + sounds. We conducted a short-term study with various participants using our prototype + in everyday settings. This paper describes the course of the study and preliminary + results in terms of how the participants used and experienced the system. These + results showed that the city was perceived as the main performer but that the + user improvised different tactics and ad hoc interventions to actively influence + and participate in how the music was created. ' + address: 'Hamamatsu, Japan' + author: 'Gaye, Lalya and Holmquist, Lars E.' + bibtex: "@inproceedings{Gaye2004,\n abstract = {Sonic City is a wearable system\ + \ enabling the use of the urban environment as an interface for real-time electronic\ + \ music making, when walking through and interacting with a city. The device senses\ + \ everyday interactions and surrounding contexts, and maps this information in\ + \ real time to the sound processing of urban sounds. We conducted a short-term\ + \ study with various participants using our prototype in everyday settings. This\ + \ paper describes the course of the study and preliminary results in terms of\ + \ how the participants used and experienced the system. These results showed that\ + \ the city was perceived as the main performer but that the user improvised different\ + \ tactics and ad hoc interventions to actively influence and participate in how\ + \ the music was created. },\n address = {Hamamatsu, Japan},\n author = {Gaye,\ + \ Lalya and Holmquist, Lars E.},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176601},\n\ + \ issn = {2220-4806},\n keywords = {User study, new interface for musical expression,\ + \ interactive music, wearable computing, mobility, context-awareness.},\n pages\ + \ = {161--164},\n title = {In Duet with Everyday Urban Settings: A User Study\ + \ of Sonic City},\n url = {http://www.nime.org/proceedings/2004/nime2004_161.pdf},\n\ + \ year = {2004}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178035 + doi: 10.5281/zenodo.1176601 issn: 2220-4806 - keywords: 'Voltage-controlled synthesizer, embedded systems, voltage-controlled - computer, computer driven control voltage generation ' - pages: 92--95 - title: Towards a Voltage-Controlled Computer Control and Interaction Beyond an Embedded - System - url: http://www.nime.org/proceedings/2011/nime2011_092.pdf - year: 2011 + keywords: 'User study, new interface for musical expression, interactive music, + wearable computing, mobility, context-awareness.' + pages: 161--164 + title: 'In Duet with Everyday Urban Settings: A User Study of Sonic City' + url: http://www.nime.org/proceedings/2004/nime2004_161.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Kim2011b - abstract: 'We developed an automatic piano performance system calledPolyhymnia that - is able to generate expressive polyphonicpiano performances with music scores - so that it can be usedas a computer-based tool for an expressive performance.The - system automatically renders expressive piano musicby means of automatic musical - symbol interpretation andstatistical models of structure-expression relations - regarding polyphonic features of piano performance. Experimental results indicate - that the generated performances of various piano pieces with diverse trained models - had polyphonicexpression and sounded expressively. In addition, the models trained - with different performance styles reflected thestyles observed in the training - performances, and they werewell distinguishable by human listeners. Polyhymnia - wonthe first prize in the autonomous section of the PerformanceRendering Contest - for Computer Systems (Rencon) 2010.' - address: 'Oslo, Norway' - author: 'Kim, Tae Hun and Fukayama, Satoru and Nishimoto, Takuya and Sagayama, Shigeki' - bibtex: "@inproceedings{Kim2011b,\n abstract = {We developed an automatic piano\ - \ performance system calledPolyhymnia that is able to generate expressive polyphonicpiano\ - \ performances with music scores so that it can be usedas a computer-based tool\ - \ for an expressive performance.The system automatically renders expressive piano\ - \ musicby means of automatic musical symbol interpretation andstatistical models\ - \ of structure-expression relations regarding polyphonic features of piano performance.\ - \ Experimental results indicate that the generated performances of various piano\ - \ pieces with diverse trained models had polyphonicexpression and sounded expressively.\ - \ In addition, the models trained with different performance styles reflected\ - \ thestyles observed in the training performances, and they werewell distinguishable\ - \ by human listeners. Polyhymnia wonthe first prize in the autonomous section\ - \ of the PerformanceRendering Contest for Computer Systems (Rencon) 2010.},\n\ - \ address = {Oslo, Norway},\n author = {Kim, Tae Hun and Fukayama, Satoru and\ - \ Nishimoto, Takuya and Sagayama, Shigeki},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1178069},\n issn = {2220-4806},\n keywords = {performance rendering,\ - \ polyphonic expression, statistical modeling, conditional random fields },\n\ - \ pages = {96--99},\n title = {Polyhymnia : An Automatic Piano Performance System\ - \ with Statistical Modeling of Polyphonic Expression and Musical Symbol Interpretation},\n\ - \ url = {http://www.nime.org/proceedings/2011/nime2011_096.pdf},\n year = {2011}\n\ + ID: Franco2004 + abstract: 'This paper begins by evaluating various systems in terms of factors for + building interactive audiovisual environments. The main issues for flexibility + and expressiveness in the generation of dynamic sounds and images are then isolated. + The design and development of an audiovisual system prototype is described at + the end. ' + address: 'Hamamatsu, Japan' + author: 'Franco, Enrique and Griffith, Niall J. and Fernström, Mikael' + bibtex: "@inproceedings{Franco2004,\n abstract = {This paper begins by evaluating\ + \ various systems in terms of factors for building interactive audiovisual environments.\ + \ The main issues for flexibility and expressiveness in the generation of dynamic\ + \ sounds and images are then isolated. The design and development of an audiovisual\ + \ system prototype is described at the end. },\n address = {Hamamatsu, Japan},\n\ + \ author = {Franco, Enrique and Griffith, Niall J. and Fernstr\\''{o}m, Mikael},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1176599},\n issn = {2220-4806},\n\ + \ keywords = {Audiovisual, composition, performance, gesture, image, representation,\ + \ mapping, expressiveness.},\n pages = {165--168},\n title = {Issues for Designing\ + \ a Flexible Expressive Audiovisual System for Real-time Performance \\& Composition},\n\ + \ url = {http://www.nime.org/proceedings/2004/nime2004_165.pdf},\n year = {2004}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178069 + doi: 10.5281/zenodo.1176599 issn: 2220-4806 - keywords: 'performance rendering, polyphonic expression, statistical modeling, conditional - random fields ' - pages: 96--99 - title: 'Polyhymnia : An Automatic Piano Performance System with Statistical Modeling - of Polyphonic Expression and Musical Symbol Interpretation' - url: http://www.nime.org/proceedings/2011/nime2011_096.pdf - year: 2011 + keywords: 'Audiovisual, composition, performance, gesture, image, representation, + mapping, expressiveness.' + pages: 165--168 + title: Issues for Designing a Flexible Expressive Audiovisual System for Real-time + Performance & Composition + url: http://www.nime.org/proceedings/2004/nime2004_165.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Carrascal2011 - abstract: 'Audio mixing is the adjustment of relative volumes, panning and other - parameters corresponding to different soundsources, in order to create a technically - and aesthetically adequate sound sum. To do this, audio engineers employ "panpots" - and faders, the standard controls in audio mixers. The design of such devices - has remained practically unchanged for decades since their introduction. At the - time,no usability studies seem to have been conducted on suchdevices, so one could - question if they are really optimizedfor the task they are meant for.This paper - proposes a new set of controls that might beused to simplify and/or improve the - performance of audiomixing tasks, taking into account the spatial characteristicsof - modern mixing technologies such as surround and 3Daudio and making use of multitouch - interface technologies.A preliminary usability test has shown promising results.' - address: 'Oslo, Norway' - author: 'Carrascal, Juan P. and Jordà, Sergi' - bibtex: "@inproceedings{Carrascal2011,\n abstract = {Audio mixing is the adjustment\ - \ of relative volumes, panning and other parameters corresponding to different\ - \ soundsources, in order to create a technically and aesthetically adequate sound\ - \ sum. To do this, audio engineers employ \"panpots\" and faders, the standard\ - \ controls in audio mixers. The design of such devices has remained practically\ - \ unchanged for decades since their introduction. At the time,no usability studies\ - \ seem to have been conducted on suchdevices, so one could question if they are\ - \ really optimizedfor the task they are meant for.This paper proposes a new set\ - \ of controls that might beused to simplify and/or improve the performance of\ - \ audiomixing tasks, taking into account the spatial characteristicsof modern\ - \ mixing technologies such as surround and 3Daudio and making use of multitouch\ - \ interface technologies.A preliminary usability test has shown promising results.},\n\ - \ address = {Oslo, Norway},\n author = {Carrascal, Juan P. and Jord\\`{a}, Sergi},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177983},\n issn = {2220-4806},\n\ - \ keywords = {audio mixing,control surface,multitouch,touchscreen},\n pages =\ - \ {100--103},\n title = {Multitouch Interface for Audio Mixing},\n url = {http://www.nime.org/proceedings/2011/nime2011_100.pdf},\n\ - \ year = {2011}\n}\n" + ID: Silva2004 + abstract: 'We describe a simple, computationally light, real-time system for tracking + the lower face and extracting informationabout the shape of the open mouth from + a video sequence.The system allows unencumbered control of audio synthesismodules + by action of the mouth. We report work in progressto use the mouth controller + to interact with a physical modelof sound production by the avian syrinx.' + address: 'Hamamatsu, Japan' + author: 'de Silva, Gamhewage C. and Smyth, Tamara and Lyons, Michael J.' + bibtex: "@inproceedings{Silva2004,\n abstract = {We describe a simple, computationally\ + \ light, real-time system for tracking the lower face and extracting informationabout\ + \ the shape of the open mouth from a video sequence.The system allows unencumbered\ + \ control of audio synthesismodules by action of the mouth. We report work in\ + \ progressto use the mouth controller to interact with a physical modelof sound\ + \ production by the avian syrinx.},\n address = {Hamamatsu, Japan},\n author =\ + \ {de Silva, Gamhewage C. and Smyth, Tamara and Lyons, Michael J.},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1176667},\n issn = {2220-4806},\n keywords\ + \ = {Mouth Controller, Face Tracking, Bioacoustics},\n pages = {169--172},\n title\ + \ = {A Novel Face-tracking Mouth Controller and its Application to Interacting\ + \ with Bioacoustic Models},\n url = {http://www.nime.org/proceedings/2004/nime2004_169.pdf},\n\ + \ year = {2004}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177983 + doi: 10.5281/zenodo.1176667 issn: 2220-4806 - keywords: 'audio mixing,control surface,multitouch,touchscreen' - pages: 100--103 - title: Multitouch Interface for Audio Mixing - url: http://www.nime.org/proceedings/2011/nime2011_100.pdf - year: 2011 + keywords: 'Mouth Controller, Face Tracking, Bioacoustics' + pages: 169--172 + title: A Novel Face-tracking Mouth Controller and its Application to Interacting + with Bioacoustic Models + url: http://www.nime.org/proceedings/2004/nime2004_169.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Derbinsky2011 - abstract: 'This paper explores how a general cognitive architecture canpragmatically - facilitate the development and exploration ofinteractive music interfaces on a - mobile platform. To thisend we integrated the Soar cognitive architecture into - themobile music meta-environment urMus. We develop anddemonstrate four artificial - agents which use diverse learningmechanisms within two mobile music interfaces. - We alsoinclude details of the computational performance of theseagents, evincing - that the architecture can support real-timeinteractivity on modern commodity hardware.' - address: 'Oslo, Norway' - author: 'Derbinsky, Nate and Essl, Georg' - bibtex: "@inproceedings{Derbinsky2011,\n abstract = {This paper explores how a general\ - \ cognitive architecture canpragmatically facilitate the development and exploration\ - \ ofinteractive music interfaces on a mobile platform. To thisend we integrated\ - \ the Soar cognitive architecture into themobile music meta-environment urMus.\ - \ We develop anddemonstrate four artificial agents which use diverse learningmechanisms\ - \ within two mobile music interfaces. We alsoinclude details of the computational\ - \ performance of theseagents, evincing that the architecture can support real-timeinteractivity\ - \ on modern commodity hardware.},\n address = {Oslo, Norway},\n author = {Derbinsky,\ - \ Nate and Essl, Georg},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177993},\n\ - \ issn = {2220-4806},\n keywords = {cognitive architecture,machine learning,mobile\ - \ music},\n pages = {104--107},\n title = {Cognitive Architecture in Mobile Music\ - \ Interactions},\n url = {http://www.nime.org/proceedings/2011/nime2011_104.pdf},\n\ - \ year = {2011}\n}\n" + ID: Nagashima2004 + abstract: 'In this paper, I would like to introduce my experimental study of multimedia + psychology. My initial focus of investigation is the interaction between perceptions + of auditory and visual beats. When the musical and graphical beats are completely + synchronized with each other, as in a music video for promotional purposes, the + audience feels that they are natural and comforting. My initial experiment has + proved that the actual tempos of music and images are a little different. If a + slight timelag exists between the musical and pictorial beats, the audience tries + to keep them in synchronization by unconsciously changing the interpretation of + the time-based beat points. As the lag increases over time, the audience seems + to perceive that the beat synchronization has changed from being more downbeat + to more upbeat, and continues enjoying it. I have developed an experiment system + that can generateand control out-of-phase visual and auditory beats in real time, + and have tested many subjects with it. This paper describes the measurement of + time lags generated in the experiment system, as part of my psychological experiment.' + address: 'Hamamatsu, Japan' + author: 'Nagashima, Yoichi' + bibtex: "@inproceedings{Nagashima2004,\n abstract = {In this paper, I would like\ + \ to introduce my experimental study of multimedia psychology. My initial focus\ + \ of investigation is the interaction between perceptions of auditory and visual\ + \ beats. When the musical and graphical beats are completely synchronized with\ + \ each other, as in a music video for promotional purposes, the audience feels\ + \ that they are natural and comforting. My initial experiment has proved that\ + \ the actual tempos of music and images are a little different. If a slight timelag\ + \ exists between the musical and pictorial beats, the audience tries to keep them\ + \ in synchronization by unconsciously changing the interpretation of the time-based\ + \ beat points. As the lag increases over time, the audience seems to perceive\ + \ that the beat synchronization has changed from being more downbeat to more upbeat,\ + \ and continues enjoying it. I have developed an experiment system that can generateand\ + \ control out-of-phase visual and auditory beats in real time, and have tested\ + \ many subjects with it. This paper describes the measurement of time lags generated\ + \ in the experiment system, as part of my psychological experiment.},\n address\ + \ = {Hamamatsu, Japan},\n author = {Nagashima, Yoichi},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176641},\n issn = {2220-4806},\n pages = {173--176},\n\ + \ title = {Measurement of Latency in Interactive Multimedia Art},\n url = {http://www.nime.org/proceedings/2004/nime2004_173.pdf},\n\ + \ year = {2004}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177993 + doi: 10.5281/zenodo.1176641 issn: 2220-4806 - keywords: 'cognitive architecture,machine learning,mobile music' - pages: 104--107 - title: Cognitive Architecture in Mobile Music Interactions - url: http://www.nime.org/proceedings/2011/nime2011_104.pdf - year: 2011 + pages: 173--176 + title: Measurement of Latency in Interactive Multimedia Art + url: http://www.nime.org/proceedings/2004/nime2004_173.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Smith2011 - abstract: 'Supervised machine learning enables complex many-to-manymappings and - control schemes needed in interactive performance systems. One of the persistent - problems in theseapplications is generating, identifying and choosing inputoutput - pairings for training. This poses problems of scope(limiting the realm of potential - control inputs), effort (requiring significant pre-performance training time), - and cognitive load (forcing the performer to learn and remember thecontrol areas). - We discuss the creation and implementationof an automatic "supervisor", using - unsupervised machinelearning algorithms to train a supervised neural networkon - the fly. This hierarchical arrangement enables networktraining in real time based - on the musical or gestural control inputs employed in a performance, aiming at - freeing theperformer to operate in a creative, intuitive realm, makingthe machine - control transparent and automatic. Three implementations of this self supervised - model driven by iPod,iPad, and acoustic violin are described.' - address: 'Oslo, Norway' - author: 'Smith, Benjamin D. and Garnett, Guy E.' - bibtex: "@inproceedings{Smith2011,\n abstract = {Supervised machine learning enables\ - \ complex many-to-manymappings and control schemes needed in interactive performance\ - \ systems. One of the persistent problems in theseapplications is generating,\ - \ identifying and choosing inputoutput pairings for training. This poses problems\ - \ of scope(limiting the realm of potential control inputs), effort (requiring\ - \ significant pre-performance training time), and cognitive load (forcing the\ - \ performer to learn and remember thecontrol areas). We discuss the creation and\ - \ implementationof an automatic \"supervisor\", using unsupervised machinelearning\ - \ algorithms to train a supervised neural networkon the fly. This hierarchical\ - \ arrangement enables networktraining in real time based on the musical or gestural\ - \ control inputs employed in a performance, aiming at freeing theperformer to\ - \ operate in a creative, intuitive realm, makingthe machine control transparent\ - \ and automatic. Three implementations of this self supervised model driven by\ - \ iPod,iPad, and acoustic violin are described.},\n address = {Oslo, Norway},\n\ - \ author = {Smith, Benjamin D. and Garnett, Guy E.},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178169},\n issn = {2220-4806},\n keywords = {NIME, machine\ - \ learning, interactive computer music, machine listening, improvisation, adaptive\ - \ resonance theory },\n pages = {108--111},\n title = {The Self-Supervising Machine},\n\ - \ url = {http://www.nime.org/proceedings/2011/nime2011_108.pdf},\n year = {2011}\n\ + ID: Ishida2004 + abstract: 'In this paper, we describe a novel improvisation supporting system based + on correcting musically unnatural melodies. Since improvisation is the musical + performance style that involves creating melodies while playing, it is not easy + even for the people who can play musical instruments. However, previous studies + have not dealt with improvisation support for the people who can play musical + instruments but cannot improvise. In this study, to support such players'' improvisation, + we propose a novel improvisation supporting system called ism, which corrects + musically unnatural melodies automatically. The main issue in realizing this system + is how to detect notes to be corrected (i.e., musically unnatural or inappropriate). + We propose a method for detecting notes to be corrected based on the N-gram model. + This method first calculates N-gram probabilities of played notes, and then judges + notes with low N-gram probabilities to be corrected. Experimental results show + that the N-gram-based melody correction and the proposed system are useful for + supporting improvisation.' + address: 'Hamamatsu, Japan' + author: 'Ishida, Katsuhisa and Kitahara, Tetsuro and Takeda, Masayuki' + bibtex: "@inproceedings{Ishida2004,\n abstract = {In this paper, we describe a novel\ + \ improvisation supporting system based on correcting musically unnatural melodies.\ + \ Since improvisation is the musical performance style that involves creating\ + \ melodies while playing, it is not easy even for the people who can play musical\ + \ instruments. However, previous studies have not dealt with improvisation support\ + \ for the people who can play musical instruments but cannot improvise. In this\ + \ study, to support such players' improvisation, we propose a novel improvisation\ + \ supporting system called ism, which corrects musically unnatural melodies automatically.\ + \ The main issue in realizing this system is how to detect notes to be corrected\ + \ (i.e., musically unnatural or inappropriate). We propose a method for detecting\ + \ notes to be corrected based on the N-gram model. This method first calculates\ + \ N-gram probabilities of played notes, and then judges notes with low N-gram\ + \ probabilities to be corrected. Experimental results show that the N-gram-based\ + \ melody correction and the proposed system are useful for supporting improvisation.},\n\ + \ address = {Hamamatsu, Japan},\n author = {Ishida, Katsuhisa and Kitahara, Tetsuro\ + \ and Takeda, Masayuki},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176617},\n\ + \ issn = {2220-4806},\n keywords = {Improvisation support, jam session, melody\ + \ correction, N-gram model, melody modeling, musical instrument},\n pages = {177--180},\n\ + \ title = {ism: Improvisation Supporting System based on Melody Correction},\n\ + \ url = {http://www.nime.org/proceedings/2004/nime2004_177.pdf},\n year = {2004}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178169 + doi: 10.5281/zenodo.1176617 issn: 2220-4806 - keywords: 'NIME, machine learning, interactive computer music, machine listening, - improvisation, adaptive resonance theory ' - pages: 108--111 - title: The Self-Supervising Machine - url: http://www.nime.org/proceedings/2011/nime2011_108.pdf - year: 2011 + keywords: 'Improvisation support, jam session, melody correction, N-gram model, + melody modeling, musical instrument' + pages: 177--180 + title: 'ism: Improvisation Supporting System based on Melody Correction' + url: http://www.nime.org/proceedings/2004/nime2004_177.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Albin2011 - abstract: 'A mixed media tool was created that promotes ensemblevirtuosity through - tight coordination and interdepence inmusical performance. Two different types - of performers interact with a virtual space using Wii remote and tangibleinterfaces - using the reacTIVision toolkit [11]. One group ofperformers uses a tangible tabletop - interface to place andmove sound objects in a virtual environment. The soundobjects - are represented by visual avatars and have audiosamples associated with them. - A second set of performersmake use of Wii remotes to create triggering waves thatcan - collide with those sound objects. Sound is only produced upon collision of the - waves with the sound objects.What results is a performance in which users must - negotiate through a physical and virtual space and are positionedto work together - to create musical pieces.' - address: 'Oslo, Norway' - author: 'Albin, Aaron and Şentürk, Sertan and Van Troyer, Akito and Blosser, Brian - and Jan, Oliver and Weinberg, Gil' - bibtex: "@inproceedings{Albin2011,\n abstract = {A mixed media tool was created\ - \ that promotes ensemblevirtuosity through tight coordination and interdepence\ - \ inmusical performance. Two different types of performers interact with a virtual\ - \ space using Wii remote and tangibleinterfaces using the reacTIVision toolkit\ - \ [11]. One group ofperformers uses a tangible tabletop interface to place andmove\ - \ sound objects in a virtual environment. The soundobjects are represented by\ - \ visual avatars and have audiosamples associated with them. A second set of performersmake\ - \ use of Wii remotes to create triggering waves thatcan collide with those sound\ - \ objects. Sound is only produced upon collision of the waves with the sound objects.What\ - \ results is a performance in which users must negotiate through a physical and\ - \ virtual space and are positionedto work together to create musical pieces.},\n\ - \ address = {Oslo, Norway},\n author = {Albin, Aaron and \\c{S}ent\\''{u}rk, Sertan\ - \ and Van Troyer, Akito and Blosser, Brian and Jan, Oliver and Weinberg, Gil},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177939},\n issn = {2220-4806},\n\ - \ keywords = {reacTIVision, processing, ensemble, mixed media, virtualization,\ - \ tangible, sample },\n pages = {112--115},\n title = {Beatscape , a Mixed Virtual-Physical\ - \ Environment for Musical Ensembles},\n url = {http://www.nime.org/proceedings/2011/nime2011_112.pdf},\n\ - \ year = {2011}\n}\n" + ID: Singer2004 + abstract: 'This paper describes new work and creations of LEMUR, agroup of artists + and technologists creating robotic musicalinstruments.' + address: 'Hamamatsu, Japan' + author: 'Singer, Eric and Feddersen, Jeff and Redmon, Chad and Bowen, Bil' + bibtex: "@inproceedings{Singer2004,\n abstract = {This paper describes new work\ + \ and creations of LEMUR, agroup of artists and technologists creating robotic\ + \ musicalinstruments.},\n address = {Hamamatsu, Japan},\n author = {Singer, Eric\ + \ and Feddersen, Jeff and Redmon, Chad and Bowen, Bil},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1176669},\n issn = {2220-4806},\n keywords = {additional\ + \ computer or special,commands allows,familiar tools with no,improvisations,the\ + \ musician or composer,to control the instrument,use of standard midi,using},\n\ + \ pages = {181--184},\n title = {LEMUR's Musical Robots},\n url = {http://www.nime.org/proceedings/2004/nime2004_181.pdf},\n\ + \ year = {2004}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177939 + doi: 10.5281/zenodo.1176669 issn: 2220-4806 - keywords: 'reacTIVision, processing, ensemble, mixed media, virtualization, tangible, - sample ' - pages: 112--115 - title: 'Beatscape , a Mixed Virtual-Physical Environment for Musical Ensembles' - url: http://www.nime.org/proceedings/2011/nime2011_112.pdf - year: 2011 + keywords: 'additional computer or special,commands allows,familiar tools with no,improvisations,the + musician or composer,to control the instrument,use of standard midi,using' + pages: 181--184 + title: LEMUR's Musical Robots + url: http://www.nime.org/proceedings/2004/nime2004_181.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Fabiani2011 - abstract: 'This paper presents MoodifierLive, a mobile phone application for interactive - control of rule-based automatic musicperformance. Five different interaction modes - are available,of which one allows for collaborative performances with upto four - participants, and two let the user control the expressive performance using expressive - hand gestures. Evaluations indicate that the application is interesting, fun touse, - and that the gesture modes, especially the one based ondata from free expressive - gestures, allow for performanceswhose emotional content matches that of the gesture - thatproduced them.' - address: 'Oslo, Norway' - author: 'Fabiani, Marco and Dubus, Gaël and Bresin, Roberto' - bibtex: "@inproceedings{Fabiani2011,\n abstract = {This paper presents MoodifierLive,\ - \ a mobile phone application for interactive control of rule-based automatic musicperformance.\ - \ Five different interaction modes are available,of which one allows for collaborative\ - \ performances with upto four participants, and two let the user control the expressive\ - \ performance using expressive hand gestures. Evaluations indicate that the application\ - \ is interesting, fun touse, and that the gesture modes, especially the one based\ - \ ondata from free expressive gestures, allow for performanceswhose emotional\ - \ content matches that of the gesture thatproduced them.},\n address = {Oslo,\ - \ Norway},\n author = {Fabiani, Marco and Dubus, Ga\\''{e}l and Bresin, Roberto},\n\ + ID: Hornof2004 + abstract: 'Though musical performers routinely use eye movements to communicate + with each other during musical performances, very few performers or composers + have used eye tracking devices to direct musical compositions and performances. + EyeMusic is a system that uses eye movements as an input to electronic music compositions. + The eye movements can directly control the music, or the music can respond to + the eyes moving around a visual scene. EyeMusic is implemented so that any composer + using established composition software can incorporate prerecorded eye movement + data into their musical compositions.' + address: 'Hamamatsu, Japan' + author: 'Hornof, Anthony J. and Sato, Linda' + bibtex: "@inproceedings{Hornof2004,\n abstract = {Though musical performers routinely\ + \ use eye movements to communicate with each other during musical performances,\ + \ very few performers or composers have used eye tracking devices to direct musical\ + \ compositions and performances. EyeMusic is a system that uses eye movements\ + \ as an input to electronic music compositions. The eye movements can directly\ + \ control the music, or the music can respond to the eyes moving around a visual\ + \ scene. EyeMusic is implemented so that any composer using established composition\ + \ software can incorporate prerecorded eye movement data into their musical compositions.},\n\ + \ address = {Hamamatsu, Japan},\n author = {Hornof, Anthony J. and Sato, Linda},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178005},\n issn = {2220-4806},\n\ - \ keywords = {Expressive performance, gesture, collaborative performance, mobile\ - \ phone },\n pages = {116--119},\n title = {MoodifierLive : Interactive and Collaborative\ - \ Expressive Music Performance on Mobile Devices},\n url = {http://www.nime.org/proceedings/2011/nime2011_116.pdf},\n\ - \ year = {2011}\n}\n" + \ Musical Expression},\n doi = {10.5281/zenodo.1176613},\n issn = {2220-4806},\n\ + \ keywords = {Electronic music composition, eye movements, eye tracking, human-computer\ + \ interaction, Max/MSP.},\n pages = {185--188},\n title = {EyeMusic: Making Music\ + \ with the Eyes},\n url = {http://www.nime.org/proceedings/2004/nime2004_185.pdf},\n\ + \ year = {2004}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178005 + doi: 10.5281/zenodo.1176613 issn: 2220-4806 - keywords: 'Expressive performance, gesture, collaborative performance, mobile phone ' - pages: 116--119 - title: 'MoodifierLive : Interactive and Collaborative Expressive Music Performance - on Mobile Devices' - url: http://www.nime.org/proceedings/2011/nime2011_116.pdf - year: 2011 + keywords: 'Electronic music composition, eye movements, eye tracking, human-computer + interaction, Max/MSP.' + pages: 185--188 + title: 'EyeMusic: Making Music with the Eyes' + url: http://www.nime.org/proceedings/2004/nime2004_185.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Schroeder2011 - address: 'Oslo, Norway' - author: 'Schroeder, Benjamin and Ainger, Marc and Parent, Richard' - bibtex: "@inproceedings{Schroeder2011,\n address = {Oslo, Norway},\n author = {Schroeder,\ - \ Benjamin and Ainger, Marc and Parent, Richard},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1178157},\n issn = {2220-4806},\n keywords = {a human performer,agents,agents\ - \ smoothly changing the,behavioral animation,figure 1,length of,physically based\ - \ sound,pro-,strings being played by},\n pages = {120--123},\n title = {A Physically\ - \ Based Sound Space for Procedural Agents},\n url = {http://www.nime.org/proceedings/2011/nime2011_120.pdf},\n\ - \ year = {2011}\n}\n" + ID: Argo2004 + abstract: 'When working with sample-based media, a performer is managing timelines, + loop points, sample parameters and effects parameters. The Slidepipe is a performance + controller that gives the artist a visually simple way to work with their material. + Its design is modular and lightweight, so it can be easily transported and quickly + assembled. Also, its large stature magnifies the gestures associated with its + play, providing a more convincing performance. In this paper, I will describe + what the controller is, how this new controller interface has affected my live + performance, and how it can be used in different performance scenarios. ' + address: 'Hamamatsu, Japan' + author: 'Argo, Mark' + bibtex: "@inproceedings{Argo2004,\n abstract = {When working with sample-based media,\ + \ a performer is managing timelines, loop points, sample parameters and effects\ + \ parameters. The Slidepipe is a performance controller that gives the artist\ + \ a visually simple way to work with their material. Its design is modular and\ + \ lightweight, so it can be easily transported and quickly assembled. Also, its\ + \ large stature magnifies the gestures associated with its play, providing a more\ + \ convincing performance. In this paper, I will describe what the controller is,\ + \ how this new controller interface has affected my live performance, and how\ + \ it can be used in different performance scenarios. },\n address = {Hamamatsu,\ + \ Japan},\n author = {Argo, Mark},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176581},\n\ + \ issn = {2220-4806},\n keywords = {Controller, Sample Manipulation, Live Performance,\ + \ Open Sound Control, Human Computer Interaction},\n pages = {189--192},\n title\ + \ = {The Slidepipe: A Timeline-Based Controller for Real-Time Sample Manipulation},\n\ + \ url = {http://www.nime.org/proceedings/2004/nime2004_189.pdf},\n year = {2004}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178157 + doi: 10.5281/zenodo.1176581 issn: 2220-4806 - keywords: 'a human performer,agents,agents smoothly changing the,behavioral animation,figure - 1,length of,physically based sound,pro-,strings being played by' - pages: 120--123 - title: A Physically Based Sound Space for Procedural Agents - url: http://www.nime.org/proceedings/2011/nime2011_120.pdf - year: 2011 + keywords: 'Controller, Sample Manipulation, Live Performance, Open Sound Control, + Human Computer Interaction' + pages: 189--192 + title: 'The Slidepipe: A Timeline-Based Controller for Real-Time Sample Manipulation' + url: http://www.nime.org/proceedings/2004/nime2004_189.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Garcia2011 - abstract: 'This paper presents a study of blowing pressure profilesacquired from - recorder playing. Blowing pressure signalsare captured from real performance by - means of a a lowintrusiveness acquisition system constructed around commercial - pressure sensors based on piezoelectric transducers.An alto recorder was mechanically - modified by a luthierto allow the measurement and connection of sensors whilerespecting - playability and intrusiveness. A multi-modaldatabase including aligned blowing - pressure and sound signals is constructed from real practice, covering the performance - space by considering different fundamental frequencies, dynamics, articulations - and note durations. Once signals were pre-processed and segmented, a set of temporalenvelope - features were defined as a basis for studying andconstructing a simplified model - of blowing pressure profilesin different performance contexts.' - address: 'Oslo, Norway' - author: 'García, Francisco and Vinceslas, Leny and Tubau, Josep and Maestre, Esteban' - bibtex: "@inproceedings{Garcia2011,\n abstract = {This paper presents a study of\ - \ blowing pressure profilesacquired from recorder playing. Blowing pressure signalsare\ - \ captured from real performance by means of a a lowintrusiveness acquisition\ - \ system constructed around commercial pressure sensors based on piezoelectric\ - \ transducers.An alto recorder was mechanically modified by a luthierto allow\ - \ the measurement and connection of sensors whilerespecting playability and intrusiveness.\ - \ A multi-modaldatabase including aligned blowing pressure and sound signals is\ - \ constructed from real practice, covering the performance space by considering\ - \ different fundamental frequencies, dynamics, articulations and note durations.\ - \ Once signals were pre-processed and segmented, a set of temporalenvelope features\ - \ were defined as a basis for studying andconstructing a simplified model of blowing\ - \ pressure profilesin different performance contexts.},\n address = {Oslo, Norway},\n\ - \ author = {Garc\\'{\\i}a, Francisco and Vinceslas, Leny and Tubau, Josep and\ - \ Maestre, Esteban},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178025},\n\ - \ issn = {2220-4806},\n keywords = {blowing,instrumental gesture,multi-modal data,pressure,recorder,wind\ - \ instrument},\n pages = {124--127},\n title = {Acquisition and Study of Blowing\ - \ Pressure Profiles in Recorder Playing},\n url = {http://www.nime.org/proceedings/2011/nime2011_124.pdf},\n\ - \ year = {2011}\n}\n" + ID: Burtner2004 + abstract: This paper describes a theory for modulated objects based on observations + of recent musical interface design trends. The theory implies extensions to an + object-based approach to controller design. Combining NIME research with ethnographic + study of shamanic traditions. The author discusses the creation of new controllers + based on the shamanic use of ritual objects. + address: 'Hamamatsu, Japan' + author: 'Burtner, Matthew' + bibtex: "@inproceedings{Burtner2004,\n abstract = {This paper describes a theory\ + \ for modulated objects based on observations of recent musical interface design\ + \ trends. The theory implies extensions to an object-based approach to controller\ + \ design. Combining NIME research with ethnographic study of shamanic traditions.\ + \ The author discusses the creation of new controllers based on the shamanic use\ + \ of ritual objects.},\n address = {Hamamatsu, Japan},\n author = {Burtner, Matthew},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1176585},\n issn = {2220-4806},\n\ + \ keywords = {Music and Video Controllers, New Interface Design, Music Composition,\ + \ Multimedia, Mythology, Shamanism, Ecoacoustics},\n pages = {193--196},\n title\ + \ = {A Theory of Modulated Objects for New Shamanic Controller Design},\n url\ + \ = {http://www.nime.org/proceedings/2004/nime2004_193.pdf},\n year = {2004}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178025 + doi: 10.5281/zenodo.1176585 issn: 2220-4806 - keywords: 'blowing,instrumental gesture,multi-modal data,pressure,recorder,wind - instrument' - pages: 124--127 - title: Acquisition and Study of Blowing Pressure Profiles in Recorder Playing - url: http://www.nime.org/proceedings/2011/nime2011_124.pdf - year: 2011 + keywords: 'Music and Video Controllers, New Interface Design, Music Composition, + Multimedia, Mythology, Shamanism, Ecoacoustics' + pages: 193--196 + title: A Theory of Modulated Objects for New Shamanic Controller Design + url: http://www.nime.org/proceedings/2004/nime2004_193.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Friberg2011 - abstract: 'This is an overview of the three installations Hoppsa Universum, CLOSE - and Flying Carpet. They were all designed as choreographed sound and music installations - controlled by the visitors movements. The perspective is from an artistic goal/vision - intention in combination with the technical challenges and possibilities. All - three installations were realized with video cameras in the ceiling registering - the users'' position or movement. The video analysis was then controlling different - types of interactive software audio players. Different aspects like narrativity, - user control, and technical limitations are discussed. ' - address: 'Oslo, Norway' - author: 'Friberg, Anders and Källblad, Anna' - bibtex: "@inproceedings{Friberg2011,\n abstract = {This is an overview of the three\ - \ installations Hoppsa Universum, CLOSE and Flying Carpet. They were all designed\ - \ as choreographed sound and music installations controlled by the visitors movements.\ - \ The perspective is from an artistic goal/vision intention in combination with\ - \ the technical challenges and possibilities. All three installations were realized\ - \ with video cameras in the ceiling registering the users' position or movement.\ - \ The video analysis was then controlling different types of interactive software\ - \ audio players. Different aspects like narrativity, user control, and technical\ - \ limitations are discussed. },\n address = {Oslo, Norway},\n author = {Friberg,\ - \ Anders and K\\''{a}llblad, Anna},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178017},\n\ - \ issn = {2220-4806},\n keywords = {Gestures, dance, choreography, music installation,\ - \ interactive music. },\n pages = {128--131},\n title = {Experiences from Video-Controlled\ - \ Sound Installations},\n url = {http://www.nime.org/proceedings/2011/nime2011_128.pdf},\n\ - \ year = {2011}\n}\n" + ID: Pelletier2004 + abstract: 'In this paper, I will describe a computer vision-based musical performance + system that uses morphological assessments to provide control data. Using shape + analysis allows the system to provide qualitative descriptors of the scene being + captured while ensuring its use in a wide variety of different settings. This + system was implemented under Max/MSP/Jitter, augmented with a number of external + objects. (1)' + address: 'Hamamatsu, Japan' + author: 'Pelletier, Jean-Marc' + bibtex: "@inproceedings{Pelletier2004,\n abstract = {In this paper, I will describe\ + \ a computer vision-based musical performance system that uses morphological assessments\ + \ to provide control data. Using shape analysis allows the system to provide qualitative\ + \ descriptors of the scene being captured while ensuring its use in a wide variety\ + \ of different settings. This system was implemented under Max/MSP/Jitter, augmented\ + \ with a number of external objects. (1)},\n address = {Hamamatsu, Japan},\n author\ + \ = {Pelletier, Jean-Marc},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176653},\n\ + \ issn = {2220-4806},\n keywords = {computer vision,image analysis,maxmsp,morphology,musical},\n\ + \ pages = {197--198},\n title = {A Shape-Based Approach to Computer Vision Musical\ + \ Performance Systems},\n url = {http://www.nime.org/proceedings/2004/nime2004_197.pdf},\n\ + \ year = {2004}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178017 + doi: 10.5281/zenodo.1176653 issn: 2220-4806 - keywords: 'Gestures, dance, choreography, music installation, interactive music. ' - pages: 128--131 - title: Experiences from Video-Controlled Sound Installations - url: http://www.nime.org/proceedings/2011/nime2011_128.pdf - year: 2011 + keywords: 'computer vision,image analysis,maxmsp,morphology,musical' + pages: 197--198 + title: A Shape-Based Approach to Computer Vision Musical Performance Systems + url: http://www.nime.org/proceedings/2004/nime2004_197.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: dAlessandro2011 - address: 'Oslo, Norway' - author: 'd''Alessandro, Nicolas and Calderon, Roberto and M\''''{u}ller, Stefanie' - bibtex: "@inproceedings{dAlessandro2011,\n address = {Oslo, Norway},\n author =\ - \ {d'Alessandro, Nicolas and Calderon, Roberto and M\\''{u}ller, Stefanie},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177933},\n issn = {2220-4806},\n\ - \ keywords = {agent,architecture,collaboration,figure 1,installation,instrument,interactive\ - \ fabric,light,mo-,movements in the installation,space and,tion,voice synthesis},\n\ - \ pages = {132--135},\n title = {ROOM #81---Agent-Based Instrument for Experiencing\ - \ Architectural and Vocal Cues},\n url = {http://www.nime.org/proceedings/2011/nime2011_132.pdf},\n\ - \ year = {2011}\n}\n" + ID: Hughes2004 + abstract: 'The Epipe is a novel electronic woodwind controller with continuous tonehole + coverage sensing, an initial design for which was introduced at NIME ''03. Since + then, we have successfully completed two fully operational prototypes. This short + paper describes some of the issues encountered during the design and construction + of this controller. It also details our own early experiences and impressions + of the interface as well as its technical specifications. ' + address: 'Hamamatsu, Japan' + author: 'Hughes, Stephen and Cannon, Cormac and O''Modhrain, Sile' + bibtex: "@inproceedings{Hughes2004,\n abstract = {The Epipe is a novel electronic\ + \ woodwind controller with continuous tonehole coverage sensing, an initial design\ + \ for which was introduced at NIME '03. Since then, we have successfully completed\ + \ two fully operational prototypes. This short paper describes some of the issues\ + \ encountered during the design and construction of this controller. It also details\ + \ our own early experiences and impressions of the interface as well as its technical\ + \ specifications. },\n address = {Hamamatsu, Japan},\n author = {Hughes, Stephen\ + \ and Cannon, Cormac and O'Modhrain, Sile},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1176615},\n issn = {2220-4806},\n keywords = {woodwind controller,\ + \ variable tonehole control, MIDI, capacitive sensing},\n pages = {199--200},\n\ + \ title = {Epipe : A Novel Electronic Woodwind Controller},\n url = {http://www.nime.org/proceedings/2004/nime2004_199.pdf},\n\ + \ year = {2004}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177933 + doi: 10.5281/zenodo.1176615 issn: 2220-4806 - keywords: 'agent,architecture,collaboration,figure 1,installation,instrument,interactive - fabric,light,mo-,movements in the installation,space and,tion,voice synthesis' - pages: 132--135 - title: "ROOM #81---Agent-Based Instrument for Experiencing Architectural and Vocal\ - \ Cues" - url: http://www.nime.org/proceedings/2011/nime2011_132.pdf - year: 2011 + keywords: 'woodwind controller, variable tonehole control, MIDI, capacitive sensing' + pages: 199--200 + title: 'Epipe : A Novel Electronic Woodwind Controller' + url: http://www.nime.org/proceedings/2004/nime2004_199.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Kuhara2011 - abstract: 'We developed a kinetic particles synthesizer for mobile devices having - a multi-touch screen such as a tablet PC and a smart phone. This synthesizer generates - music based on the kinetics of particles under a two-dimensional physics engine. - The particles move in the screen to synthesize sounds according to their own physical - properties, which are shape, size, mass, linear and angular velocity, friction, - restitution, etc. If a particle collides with others, a percussive sound is generated. - A player can play music by the simple operation of touching or dragging on the - screen of the device. Using a three-axis acceleration sensor, a player can perform - music by shuffling or tilting the device. Each particle sounds just a simple tone. - However, a large amount of various particles play attractive music by aggregating - their sounds. This concept has been inspired by natural sounds made from an assembly - of simple components, for example, rustling leaves or falling rain. For a novice - who has no experience of playing a musical instrument, it is easy to learn how - to play instantly and enjoy performing music with intuitive operation. Our system - is used for musical instruments for interactive music entertainment. ' - address: 'Oslo, Norway' - author: 'Kuhara, Yasuo and Kobayashi, Daiki' - bibtex: "@inproceedings{Kuhara2011,\n abstract = {We developed a kinetic particles\ - \ synthesizer for mobile devices having a multi-touch screen such as a tablet\ - \ PC and a smart phone. This synthesizer generates music based on the kinetics\ - \ of particles under a two-dimensional physics engine. The particles move in the\ - \ screen to synthesize sounds according to their own physical properties, which\ - \ are shape, size, mass, linear and angular velocity, friction, restitution, etc.\ - \ If a particle collides with others, a percussive sound is generated. A player\ - \ can play music by the simple operation of touching or dragging on the screen\ - \ of the device. Using a three-axis acceleration sensor, a player can perform\ - \ music by shuffling or tilting the device. Each particle sounds just a simple\ - \ tone. However, a large amount of various particles play attractive music by\ - \ aggregating their sounds. This concept has been inspired by natural sounds made\ - \ from an assembly of simple components, for example, rustling leaves or falling\ - \ rain. For a novice who has no experience of playing a musical instrument, it\ - \ is easy to learn how to play instantly and enjoy performing music with intuitive\ - \ operation. Our system is used for musical instruments for interactive music\ - \ entertainment. },\n address = {Oslo, Norway},\n author = {Kuhara, Yasuo and\ - \ Kobayashi, Daiki},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178079},\n\ - \ issn = {2220-4806},\n keywords = {Particle, Tablet PC, iPhone, iPod touch, iPad,\ - \ Smart phone, Kinetics, Touch screen, Physics engine. },\n pages = {136--137},\n\ - \ title = {Kinetic Particles Synthesizer Using Multi-Touch Screen Interface of\ - \ Mobile Devices},\n url = {http://www.nime.org/proceedings/2011/nime2011_136.pdf},\n\ - \ year = {2011}\n}\n" + ID: Morris2004 + abstract: 'This paper describes the SillyTone Squish Factory, a haptically engaging + musical interface. It contains the motivation behind the device''s development, + a description of the interface, various mappings of the interface to musical applications, + details of its construction, and the requirements to demo the interface. ' + address: 'Hamamatsu, Japan' + author: 'Morris, Geoffrey C. and Leitman, Sasha and Kassianidou, Marina' + bibtex: "@inproceedings{Morris2004,\n abstract = {This paper describes the SillyTone\ + \ Squish Factory, a haptically engaging musical interface. It contains the motivation\ + \ behind the device's development, a description of the interface, various mappings\ + \ of the interface to musical applications, details of its construction, and the\ + \ requirements to demo the interface. },\n address = {Hamamatsu, Japan},\n author\ + \ = {Morris, Geoffrey C. and Leitman, Sasha and Kassianidou, Marina},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1176639},\n issn = {2220-4806},\n pages\ + \ = {201--202},\n title = {SillyTone Squish Factory},\n url = {http://www.nime.org/proceedings/2004/nime2004_201.pdf},\n\ + \ year = {2004}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178079 + doi: 10.5281/zenodo.1176639 issn: 2220-4806 - keywords: 'Particle, Tablet PC, iPhone, iPod touch, iPad, Smart phone, Kinetics, - Touch screen, Physics engine. ' - pages: 136--137 - title: Kinetic Particles Synthesizer Using Multi-Touch Screen Interface of Mobile - Devices - url: http://www.nime.org/proceedings/2011/nime2011_136.pdf - year: 2011 + pages: 201--202 + title: SillyTone Squish Factory + url: http://www.nime.org/proceedings/2004/nime2004_201.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Carlson2011 - address: 'Oslo, Norway' - author: 'Carlson, Chris and Marschner, Eli and Mccurry, Hunter' - bibtex: "@inproceedings{Carlson2011,\n address = {Oslo, Norway},\n author = {Carlson,\ - \ Chris and Marschner, Eli and Mccurry, Hunter},\n booktitle = {Proceedings of\ + ID: Steiner2004 + abstract: 'StickMusic is an instrument comprised of two haptic devices, a joystick + and a mouse, which control a phase vocoder in real time. The purpose is to experiment + with ideas of how to apply haptic feedback when controlling synthesis algorithms + that have no direct analogy to methods of generating sound in the physical world. ' + address: 'Hamamatsu, Japan' + author: 'Steiner, Hans-Christoph' + bibtex: "@inproceedings{Steiner2004,\n abstract = {StickMusic is an instrument comprised\ + \ of two haptic devices, a joystick and a mouse, which control a phase vocoder\ + \ in real time. The purpose is to experiment with ideas of how to apply haptic\ + \ feedback when controlling synthesis algorithms that have no direct analogy to\ + \ methods of generating sound in the physical world. },\n address = {Hamamatsu,\ + \ Japan},\n author = {Steiner, Hans-Christoph},\n booktitle = {Proceedings of\ \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1177981},\n issn = {2220-4806},\n keywords = {arduino,beagleboard,ccrma,force\ - \ feedback,haptics,jack,linux audio,multi-channel audio,nime,pd,pure data,satellite\ - \ ccrma,sound spatialization},\n pages = {138--139},\n title = {The Sound Flinger\ - \ : A Haptic Spatializer},\n url = {http://www.nime.org/proceedings/2011/nime2011_138.pdf},\n\ - \ year = {2011}\n}\n" + \ = {10.5281/zenodo.1176671},\n issn = {2220-4806},\n keywords = {haptic feedback,\ + \ gestural control, performance, joystick, mouse},\n pages = {203--204},\n title\ + \ = {StickMusic: Using Haptic Feedback with a Phase Vocoder},\n url = {http://www.nime.org/proceedings/2004/nime2004_203.pdf},\n\ + \ year = {2004}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177981 + doi: 10.5281/zenodo.1176671 issn: 2220-4806 - keywords: 'arduino,beagleboard,ccrma,force feedback,haptics,jack,linux audio,multi-channel - audio,nime,pd,pure data,satellite ccrma,sound spatialization' - pages: 138--139 - title: 'The Sound Flinger : A Haptic Spatializer' - url: http://www.nime.org/proceedings/2011/nime2011_138.pdf - year: 2011 + keywords: 'haptic feedback, gestural control, performance, joystick, mouse' + pages: 203--204 + title: 'StickMusic: Using Haptic Feedback with a Phase Vocoder' + url: http://www.nime.org/proceedings/2004/nime2004_203.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Kondapalli2011 - abstract: 'Daft Datum is an autonomous new media artefact that takes input from - movement of the feet (i.e. tapping/stomping/stamping) on a wooden surface, underneath - which is a sensor sheet. The sensors in the sheet are mapped to various sound - samples and synthesized sounds. Attributes of the synthesized sound, such as pitch - and octave, can be controlled using the Nintendo Wii Remote. It also facilitates - switching between modes of sound and recording/playing back a segment of audio. - The result is music generated by dancing on the device that is further modulated - by a hand-held controller. ' - address: 'Oslo, Norway' - author: 'Kondapalli, Ravi and Sung, Ben-Zhen' - bibtex: "@inproceedings{Kondapalli2011,\n abstract = {Daft Datum is an autonomous\ - \ new media artefact that takes input from movement of the feet (i.e. tapping/stomping/stamping)\ - \ on a wooden surface, underneath which is a sensor sheet. The sensors in the\ - \ sheet are mapped to various sound samples and synthesized sounds. Attributes\ - \ of the synthesized sound, such as pitch and octave, can be controlled using\ - \ the Nintendo Wii Remote. It also facilitates switching between modes of sound\ - \ and recording/playing back a segment of audio. The result is music generated\ - \ by dancing on the device that is further modulated by a hand-held controller.\ - \ },\n address = {Oslo, Norway},\n author = {Kondapalli, Ravi and Sung, Ben-Zhen},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178075},\n issn = {2220-4806},\n\ - \ keywords = {Daft Datum, Wii, Dance Pad, Feet, Controller, Bluetooth, Musical\ - \ Interface, Dance, Sensor Sheet },\n pages = {140--141},\n title = {Daft Datum\ - \ -- An Interface for Producing Music Through Foot-based Interaction},\n url =\ - \ {http://www.nime.org/proceedings/2011/nime2011_140.pdf},\n year = {2011}\n}\n" + ID: Coduys2004 + abstract: 'High capacity of transmission lines (Ethernet in particular) is much + higher than what imposed by MIDI today. So it is possible to use capturing interfaces + with high-speed and high-resolution, thanks to the OSC protocol, for musical synthesis + (either in realtime or non real-time). These new interfaces offer many advantages, + not only in the area of musical composition with use of sensors but also in live + and interactive performances. In this manner, the processes of calibration and + signal processing are delocalized on a personal computer and augments possibilities + of processing. In this demo, we present two hardware interfaces developed in La + kitchen with corresponding processing to achieve a high-resolution, high-speed + sensor processing for musical applications. ' + address: 'Hamamatsu, Japan' + author: 'Coduys, Thierry and Henry, Cyrille and Cont, Arshia' + bibtex: "@inproceedings{Coduys2004,\n abstract = {High capacity of transmission\ + \ lines (Ethernet in particular) is much higher than what imposed by MIDI today.\ + \ So it is possible to use capturing interfaces with high-speed and high-resolution,\ + \ thanks to the OSC protocol, for musical synthesis (either in realtime or non\ + \ real-time). These new interfaces offer many advantages, not only in the area\ + \ of musical composition with use of sensors but also in live and interactive\ + \ performances. In this manner, the processes of calibration and signal processing\ + \ are delocalized on a personal computer and augments possibilities of processing.\ + \ In this demo, we present two hardware interfaces developed in La kitchen with\ + \ corresponding processing to achieve a high-resolution, high-speed sensor processing\ + \ for musical applications. },\n address = {Hamamatsu, Japan},\n author = {Coduys,\ + \ Thierry and Henry, Cyrille and Cont, Arshia},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1176587},\n issn = {2220-4806},\n keywords = {Interface, Sensors,\ + \ Calibration, Precision, OSC, Pure Data, Max/MSP.},\n pages = {205--206},\n title\ + \ = {TOASTER and KROONDE: High-Resolution and High- Speed Real-time Sensor Interfaces},\n\ + \ url = {http://www.nime.org/proceedings/2004/nime2004_205.pdf},\n year = {2004}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178075 + doi: 10.5281/zenodo.1176587 issn: 2220-4806 - keywords: 'Daft Datum, Wii, Dance Pad, Feet, Controller, Bluetooth, Musical Interface, - Dance, Sensor Sheet ' - pages: 140--141 - title: Daft Datum -- An Interface for Producing Music Through Foot-based Interaction - url: http://www.nime.org/proceedings/2011/nime2011_140.pdf - year: 2011 + keywords: 'Interface, Sensors, Calibration, Precision, OSC, Pure Data, Max/MSP.' + pages: 205--206 + title: 'TOASTER and KROONDE: High-Resolution and High- Speed Real-time Sensor Interfaces' + url: http://www.nime.org/proceedings/2004/nime2004_205.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Martin2011 - abstract: 'This paper describes Strike on Stage, an interface and corresponding audio-visual performance work developed and - performed in 2010 by percussionists and media artists Chi-Hsia Lai and Charles Martin. The concept of - Strike on Stage is to integrate computer visuals and sound into animprovised - percussion performance. A large projection surface is positioned directly behind the performers, while acomputer - vision system tracks their movements. The setup allows computer visualisation - and sonification to be directly responsive and unified with the performers'' gestures.' - address: 'Oslo, Norway' - author: 'Martin, Charles and Lai, Chi-Hsia' - bibtex: "@inproceedings{Martin2011,\n abstract = {This paper describes Strike on\ - \ Stage, an interface and corresponding audio-visual performance work developed\ - \ and performed in 2010 by percussionists and media artists Chi-Hsia Lai and\ - \ Charles Martin. The concept of Strike on Stage is to integrate computer\ - \ visuals and sound into animprovised percussion performance. A large projection\ - \ surface is positioned directly behind the performers, while acomputer\ - \ vision system tracks their movements. The setup allows computer visualisation\ - \ and sonification to be directly responsive and unified with the performers'\ - \ gestures.},\n address = {Oslo, Norway},\n author = {Martin, Charles and Lai,\ - \ Chi-Hsia},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178103},\n issn\ - \ = {2220-4806},\n keywords = {computer vision, media performance, percussion},\n\ - \ pages = {142--143},\n title = {Strike on Stage: a Percussion and Media Performance},\n\ - \ url = {http://www.nime.org/proceedings/2011/nime2011_142.pdf},\n year = {2011}\n\ - }\n" + ID: Goto2004 + abstract: 'We will discuss the case study of application of the Virtual Musical + Instrument and Sound Synthesis. Doing this application, the main subject is advanced + Mapping Interface in order to connect these. For this experiment, our discussion + also refers to Neural Network, as well as a brief introduction of the Virtual + Musical Instrument "Le SuperPolm" and Gesture Controller "BodySuit".' + address: 'Hamamatsu, Japan' + author: 'Goto, Suguru and Suzuki, Takahiko' + bibtex: "@inproceedings{Goto2004,\n abstract = {We will discuss the case study of\ + \ application of the Virtual Musical Instrument and Sound Synthesis. Doing this\ + \ application, the main subject is advanced Mapping Interface in order to connect\ + \ these. For this experiment, our discussion also refers to Neural Network, as\ + \ well as a brief introduction of the Virtual Musical Instrument \"Le SuperPolm\"\ + \ and Gesture Controller \"BodySuit\".},\n address = {Hamamatsu, Japan},\n author\ + \ = {Goto, Suguru and Suzuki, Takahiko},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176605},\n\ + \ issn = {2220-4806},\n keywords = {Virtual Musical Instrument, Gesture Controller,\ + \ Mapping Interface},\n pages = {207--208},\n title = {The Case Study of Application\ + \ of Advanced Gesture Interface and Mapping Interface, Virtual Musical Instrument\ + \ \"Le SuperPolm\" and Gesture Controller \"BodySuit\"},\n url = {http://www.nime.org/proceedings/2004/nime2004_207.pdf},\n\ + \ year = {2004}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178103 + doi: 10.5281/zenodo.1176605 issn: 2220-4806 - keywords: 'computer vision, media performance, percussion' - pages: 142--143 - title: 'Strike on Stage: a Percussion and Media Performance' - url: http://www.nime.org/proceedings/2011/nime2011_142.pdf - year: 2011 + keywords: 'Virtual Musical Instrument, Gesture Controller, Mapping Interface' + pages: 207--208 + title: 'The Case Study of Application of Advanced Gesture Interface and Mapping + Interface, Virtual Musical Instrument "Le SuperPolm" and Gesture Controller "BodySuit"' + url: http://www.nime.org/proceedings/2004/nime2004_207.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Caramiaux2011a - abstract: 'In this paper we present an experimental study concerninggestural embodiment - of environmental sounds in a listeningcontext. The presented work is part of a - project aiming atmodeling movement-sound relationships, with the end goalof proposing - novel approaches for designing musical instruments and sounding objects. The experiment - is based onsound stimuli corresponding to "causal" and "non-causal" sounds. It - is divided into a performance phase and an interview. The experiment is designed - to investigate possiblecorrelation between the perception of the "causality" of - environmental sounds and different gesture strategies for thesound embodiment. - In analogy with the perception of thesounds'' causality, we propose to distinguish - gestures that "mimic" a sound''s cause and gestures that "trace" a sound''smorphology - following temporal sound characteristics. Results from the interviews show that, - first, our causal soundsdatabase lead to consistent descriptions of the action - at theorigin of the sound and participants mimic this action. Second, non-causal - sounds lead to inconsistent metaphoric descriptions of the sound and participants - make gestures following sound "contours". Quantitatively, the results showthat - gesture variability is higher for causal sounds that noncausal sounds.' - address: 'Oslo, Norway' - author: 'Caramiaux, Baptiste and Susini, Patrick and Bianco, Tommaso and Bevilacqua, - Frédéric and Houix, Olivier and Schnell, Norbert and Misdariis, Nicolas' - bibtex: "@inproceedings{Caramiaux2011a,\n abstract = {In this paper we present an\ - \ experimental study concerninggestural embodiment of environmental sounds in\ - \ a listeningcontext. The presented work is part of a project aiming atmodeling\ - \ movement-sound relationships, with the end goalof proposing novel approaches\ - \ for designing musical instruments and sounding objects. The experiment is based\ - \ onsound stimuli corresponding to \"causal\" and \"non-causal\" sounds. It is\ - \ divided into a performance phase and an interview. The experiment is designed\ - \ to investigate possiblecorrelation between the perception of the \"causality\"\ - \ of environmental sounds and different gesture strategies for thesound embodiment.\ - \ In analogy with the perception of thesounds' causality, we propose to distinguish\ - \ gestures that \"mimic\" a sound's cause and gestures that \"trace\" a sound'smorphology\ - \ following temporal sound characteristics. Results from the interviews show that,\ - \ first, our causal soundsdatabase lead to consistent descriptions of the action\ - \ at theorigin of the sound and participants mimic this action. Second, non-causal\ - \ sounds lead to inconsistent metaphoric descriptions of the sound and participants\ - \ make gestures following sound \"contours\". Quantitatively, the results showthat\ - \ gesture variability is higher for causal sounds that noncausal sounds.},\n address\ - \ = {Oslo, Norway},\n author = {Caramiaux, Baptiste and Susini, Patrick and Bianco,\ - \ Tommaso and Bevilacqua, Fr\\'{e}d\\'{e}ric and Houix, Olivier and Schnell, Norbert\ - \ and Misdariis, Nicolas},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177979},\n\ - \ issn = {2220-4806},\n keywords = {Embodiment, Environmental Sound Perception,\ - \ Listening, Gesture Sound Interaction },\n pages = {144--148},\n presentation-video\ - \ = {https://vimeo.com/26805553/},\n title = {Gestural Embodiment of Environmental\ - \ Sounds: an Experimental Study},\n url = {http://www.nime.org/proceedings/2011/nime2011_144.pdf},\n\ - \ year = {2011}\n}\n" + ID: Won2004 + abstract: 'In this paper, we describe a new MIDI controller, the Light Pipes. The + Light Pipes are a series of pipes that respond to incident light. The paper will + discuss the design of the instrument, and the prototype we built. A piece was + composed for the instrument using algorithms designed in Pure Data.' + address: 'Hamamatsu, Japan' + author: 'Won, Sook Y. and Chan, Humane and Liu, Jeremy' + bibtex: "@inproceedings{Won2004,\n abstract = {In this paper, we describe a new\ + \ MIDI controller, the Light Pipes. The Light Pipes are a series of pipes that\ + \ respond to incident light. The paper will discuss the design of the instrument,\ + \ and the prototype we built. A piece was composed for the instrument using algorithms\ + \ designed in Pure Data.},\n address = {Hamamatsu, Japan},\n author = {Won, Sook\ + \ Y. and Chan, Humane and Liu, Jeremy},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176685},\n\ + \ issn = {2220-4806},\n keywords = {Controllers, MIDI, light sensors, Pure Data.},\n\ + \ pages = {209--210},\n title = {Light Pipes: A Light Controlled {MIDI} Instrument},\n\ + \ url = {http://www.nime.org/proceedings/2004/nime2004_209.pdf},\n year = {2004}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177979 + doi: 10.5281/zenodo.1176685 issn: 2220-4806 - keywords: 'Embodiment, Environmental Sound Perception, Listening, Gesture Sound - Interaction ' - pages: 144--148 - presentation-video: https://vimeo.com/26805553/ - title: 'Gestural Embodiment of Environmental Sounds: an Experimental Study' - url: http://www.nime.org/proceedings/2011/nime2011_144.pdf - year: 2011 + keywords: 'Controllers, MIDI, light sensors, Pure Data.' + pages: 209--210 + title: 'Light Pipes: A Light Controlled MIDI Instrument' + url: http://www.nime.org/proceedings/2004/nime2004_209.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Mealla2011 - abstract: 'The use of physiological signals in Human Computer Interaction (HCI) - is becoming popular and widespread, mostly due to sensors miniaturization and - advances in real-time processing. However, most of the studies that use physiology - based interaction focus on single-user paradigms, and its usage in collaborative - scenarios is still in its beginning. In this paper we explore how interactive - sonification of brain and heart signals, and its representation through physical - objects (physiopucks) in a tabletop interface may enhance motivational and controlling - aspects of music collaboration. A multimodal system is presented, based on an - electrophysiology sensor system and the Reactable, a musical tabletop interface. - Performance and motivation variables were assessed in an experiment involving - a test "Physio" group(N=22) and a control "Placebo" group (N=10). Pairs of participants - used two methods for sound creation: implicit interaction through physiological - signals, and explicit interaction by means of gestural manipulation. The results - showed that pairs in the Physio Group declared less difficulty, higher confidence - and more symmetric control than the Placebo Group, where no real-time sonification - was provided as subjects were using pre-recorded physiological signal being unaware - of it. These results support the feasibility of introducing physiology-based interaction - in multimodal interfaces for collaborative music generation.' - address: 'Oslo, Norway' - author: 'Mealla, Sebastián and Väaljamäae, Aleksander and Bosi, Mathieu and Jordà, - Sergi' - bibtex: "@inproceedings{Mealla2011,\n abstract = {The use of physiological signals\ - \ in Human Computer Interaction (HCI) is becoming popular and widespread, mostly\ - \ due to sensors miniaturization and advances in real-time processing. However,\ - \ most of the studies that use physiology based interaction focus on single-user\ - \ paradigms, and its usage in collaborative scenarios is still in its beginning.\ - \ In this paper we explore how interactive sonification of brain and heart signals,\ - \ and its representation through physical objects (physiopucks) in a tabletop\ - \ interface may enhance motivational and controlling aspects of music collaboration.\ - \ A multimodal system is presented, based on an electrophysiology sensor system\ - \ and the Reactable, a musical tabletop interface. Performance and motivation\ - \ variables were assessed in an experiment involving a test \"Physio\" group(N=22)\ - \ and a control \"Placebo\" group (N=10). Pairs of participants used two methods\ - \ for sound creation: implicit interaction through physiological signals, and\ - \ explicit interaction by means of gestural manipulation. The results showed that\ - \ pairs in the Physio Group declared less difficulty, higher confidence and more\ - \ symmetric control than the Placebo Group, where no real-time sonification was\ - \ provided as subjects were using pre-recorded physiological signal being unaware\ - \ of it. These results support the feasibility of introducing physiology-based\ - \ interaction in multimodal interfaces for collaborative music generation.},\n\ - \ address = {Oslo, Norway},\n author = {Mealla, Sebasti\\'{a}n and V\\''{a}aljam\\\ - ''{a}ae, Aleksander and Bosi, Mathieu and Jord\\`{a}, Sergi},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178107},\n issn = {2220-4806},\n keywords = {bci, collaboration,\ - \ cscw, hci, multimodal interfaces, music, physiological computing, physiopucks,\ - \ tabletops, universitat pompeu fabra},\n pages = {149--154},\n presentation-video\ - \ = {https://vimeo.com/26806576/},\n title = {Listening to Your Brain: Implicit\ - \ Interaction in Collaborative Music Performances},\n url = {http://www.nime.org/proceedings/2011/nime2011_149.pdf},\n\ - \ year = {2011}\n}\n" + ID: Lippit2004 + abstract: 'In this paper, I describe a realtime sampling system for theturntablist, + and the hardware and software design of the secondprototype, 16padjoystickcontroller.' + address: 'Hamamatsu, Japan' + author: 'Lippit, Takuro M.' + bibtex: "@inproceedings{Lippit2004,\n abstract = {In this paper, I describe a realtime\ + \ sampling system for theturntablist, and the hardware and software design of\ + \ the secondprototype, 16padjoystickcontroller.},\n address = {Hamamatsu, Japan},\n\ + \ author = {Lippit, Takuro M.},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176633},\n\ + \ issn = {2220-4806},\n keywords = {DJ, Turntablism, Realtime Sampling, MAX/MSP,\ + \ Microchip PIC microcontroller, MIDI},\n pages = {211--212},\n title = {Realtime\ + \ Sampling System for the Turntablist, Version 2: 16padjoystickcontroller},\n\ + \ url = {http://www.nime.org/proceedings/2004/nime2004_211.pdf},\n year = {2004}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178107 + doi: 10.5281/zenodo.1176633 issn: 2220-4806 - keywords: 'bci, collaboration, cscw, hci, multimodal interfaces, music, physiological - computing, physiopucks, tabletops, universitat pompeu fabra' - pages: 149--154 - presentation-video: https://vimeo.com/26806576/ - title: 'Listening to Your Brain: Implicit Interaction in Collaborative Music Performances' - url: http://www.nime.org/proceedings/2011/nime2011_149.pdf - year: 2011 + keywords: 'DJ, Turntablism, Realtime Sampling, MAX/MSP, Microchip PIC microcontroller, + MIDI' + pages: 211--212 + title: 'Realtime Sampling System for the Turntablist, Version 2: 16padjoystickcontroller' + url: http://www.nime.org/proceedings/2004/nime2004_211.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Newton2011 - abstract: 'This paper examines the creation of augmented musicalinstruments by a - number of musicians. Equipped with asystem called the Augmentalist, 10 musicians - created newaugmented instruments based on their traditional acousticor electric - instruments. This paper discusses the ways inwhich the musicians augmented their - instruments, examines the similarities and differences between the resultinginstruments - and presents a number of interesting findingsresulting from this process.' - address: 'Oslo, Norway' - author: 'Newton, Dan and Marshall, Mark T.' - bibtex: "@inproceedings{Newton2011,\n abstract = {This paper examines the creation\ - \ of augmented musicalinstruments by a number of musicians. Equipped with asystem\ - \ called the Augmentalist, 10 musicians created newaugmented instruments based\ - \ on their traditional acousticor electric instruments. This paper discusses the\ - \ ways inwhich the musicians augmented their instruments, examines the similarities\ - \ and differences between the resultinginstruments and presents a number of interesting\ - \ findingsresulting from this process.},\n address = {Oslo, Norway},\n author\ - \ = {Newton, Dan and Marshall, Mark T.},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178121},\n\ - \ issn = {2220-4806},\n keywords = {Augmented Instruments, Instrument Design,\ - \ Digital Musical Instruments, Performance },\n pages = {155--160},\n presentation-video\ - \ = {https://vimeo.com/26807158/},\n title = {Examining How Musicians Create Augmented\ - \ Musical Instruments},\n url = {http://www.nime.org/proceedings/2011/nime2011_155.pdf},\n\ - \ year = {2011}\n}\n" + ID: Sharon2004 + abstract: This paper describes the design and on-going development of an expressive + gestural MIDI interface and how this couldenhance live performance of electronic + music. + address: 'Hamamatsu, Japan' + author: 'Sharon, Michael E.' + bibtex: "@inproceedings{Sharon2004,\n abstract = {This paper describes the design\ + \ and on-going development of an expressive gestural MIDI interface and how this\ + \ couldenhance live performance of electronic music.},\n address = {Hamamatsu,\ + \ Japan},\n author = {Sharon, Michael E.},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176661},\n\ + \ issn = {2220-4806},\n keywords = {gestural control, mapping, Pure Data (pd),\ + \ accelerometers, MIDI, microcontrollers, synthesis, musical instruments},\n pages\ + \ = {213--214},\n title = {The Stranglophone: Enhancing Expressiveness In Live\ + \ Electronic Music},\n url = {http://www.nime.org/proceedings/2004/nime2004_213.pdf},\n\ + \ year = {2004}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178121 + doi: 10.5281/zenodo.1176661 issn: 2220-4806 - keywords: 'Augmented Instruments, Instrument Design, Digital Musical Instruments, - Performance ' - pages: 155--160 - presentation-video: https://vimeo.com/26807158/ - title: Examining How Musicians Create Augmented Musical Instruments - url: http://www.nime.org/proceedings/2011/nime2011_155.pdf - year: 2011 + keywords: 'gestural control, mapping, Pure Data (pd), accelerometers, MIDI, microcontrollers, + synthesis, musical instruments' + pages: 213--214 + title: 'The Stranglophone: Enhancing Expressiveness In Live Electronic Music' + url: http://www.nime.org/proceedings/2004/nime2004_213.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Seldess2011 - abstract: 'We present Tahakum, an open source, extensible collection of software - tools designed to enhance workflow on multichannel audio systems within complex - multi-functional research and development environments. Tahakum aims to provide - critical functionality required across a broad spectrum of audio systems usage - scenarios, while at the same time remaining sufficiently open as to easily support - modifications and extensions via 3rd party hardware and software. Features provided - in the framework include software for custom mixing/routing and audio system preset - automation, software for network message routing/redirection and protocol conversion, - and software for dynamic audio asset management and control. ' - address: 'Oslo, Norway' - author: 'Seldess, Zachary and Yamada, Toshiro' - bibtex: "@inproceedings{Seldess2011,\n abstract = {We present {Tahakum}, an open\ - \ source, extensible collection of software tools designed to enhance workflow\ - \ on multichannel audio systems within complex multi-functional research and development\ - \ environments. Tahakum aims to provide critical functionality required across\ - \ a broad spectrum of audio systems usage scenarios, while at the same time remaining\ - \ sufficiently open as to easily support modifications and extensions via 3rd\ - \ party hardware and software. Features provided in the framework include software\ - \ for custom mixing/routing and audio system preset automation, software for network\ - \ message routing/redirection and protocol conversion, and software for dynamic\ - \ audio asset management and control. },\n address = {Oslo, Norway},\n author\ - \ = {Seldess, Zachary and Yamada, Toshiro},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1178159},\n issn = {2220-4806},\n keywords = {Audio Control\ - \ Systems, Audio for VR, Max/MSP, Spatial Audio },\n pages = {161--166},\n presentation-video\ - \ = {https://vimeo.com/26809966/},\n title = {Tahakum: A Multi-Purpose Audio Control\ - \ Framework},\n url = {http://www.nime.org/proceedings/2011/nime2011_161.pdf},\n\ - \ year = {2011}\n}\n" + ID: Hashida2004 + abstract: 'This paper proposes an interface for improvisational ensemble plays which + synthesizes musical sounds and graphical images on the floor from people''s act + of "walking". The aim of this paper is to develop such a system that enables nonprofessional + people in our public spaces to play good contrapuntal music without any knowledge + of music theory. The people are just walking. This system is based on the i-trace + system [1] which can capture the people''s behavior and give some visual feedback. ' + address: 'Hamamatsu, Japan' + author: 'Hashida, Tomoko and Kakehi, Yasuaki and Naemura, Takeshi' + bibtex: "@inproceedings{Hashida2004,\n abstract = {This paper proposes an interface\ + \ for improvisational ensemble plays which synthesizes musical sounds and graphical\ + \ images on the floor from people's act of \"walking\". The aim of this paper\ + \ is to develop such a system that enables nonprofessional people in our public\ + \ spaces to play good contrapuntal music without any knowledge of music theory.\ + \ The people are just walking. This system is based on the i-trace system [1]\ + \ which can capture the people's behavior and give some visual feedback. },\n\ + \ address = {Hamamatsu, Japan},\n author = {Hashida, Tomoko and Kakehi, Yasuaki\ + \ and Naemura, Takeshi},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1176607},\n\ + \ issn = {2220-4806},\n keywords = {Improvisational Ensemble Play, Contrapuntal\ + \ Music, Human Tracking, Traces, Spatially Augmented Reality},\n pages = {215--216},\n\ + \ title = {Ensemble System with i-trace},\n url = {http://www.nime.org/proceedings/2004/nime2004_215.pdf},\n\ + \ year = {2004}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178159 + doi: 10.5281/zenodo.1176607 issn: 2220-4806 - keywords: 'Audio Control Systems, Audio for VR, Max/MSP, Spatial Audio ' - pages: 161--166 - presentation-video: https://vimeo.com/26809966/ - title: 'Tahakum: A Multi-Purpose Audio Control Framework' - url: http://www.nime.org/proceedings/2011/nime2011_161.pdf - year: 2011 + keywords: 'Improvisational Ensemble Play, Contrapuntal Music, Human Tracking, Traces, + Spatially Augmented Reality' + pages: 215--216 + title: Ensemble System with i-trace + url: http://www.nime.org/proceedings/2004/nime2004_215.pdf + year: 2004 - ENTRYTYPE: inproceedings - ID: Liang2011 - abstract: 'Computer music systems that coordinate or interact with human musicians - exist in many forms. Often, coordination is at the level of gestures and phrases - without synchronization at the beat level (or perhaps the notion of "beat" does - not even exist). In music with beats, fine-grain synchronization can be achieved - by having humans adapt to the computer (e.g. following a click track), or by computer - accompaniment in which the computer follows a predetermined score. We consider - an alternative scenario in which improvisation prevents traditional score following, - but where synchronization is achieved at the level of beats, measures, and cues. - To explore this new type of human-computer interaction, we have created new software - abstractions for synchronization and coordination of music and interfaces in different - modalities. We describe these new software structures, present examples, and introduce - the idea of music notation as an interactive musical interface rather than a static - document. ' - address: 'Oslo, Norway' - author: 'Liang, Dawen and Xia, Guangyu and Dannenberg, Roger B.' - bibtex: "@inproceedings{Liang2011,\n abstract = {Computer music systems that coordinate\ - \ or interact with human musicians exist in many forms. Often, coordination is\ - \ at the level of gestures and phrases without synchronization at the beat level\ - \ (or perhaps the notion of \"beat\" does not even exist). In music with beats,\ - \ fine-grain synchronization can be achieved by having humans adapt to the computer\ - \ (e.g. following a click track), or by computer accompaniment in which the computer\ - \ follows a predetermined score. We consider an alternative scenario in which\ - \ improvisation prevents traditional score following, but where synchronization\ - \ is achieved at the level of beats, measures, and cues. To explore this new type\ - \ of human-computer interaction, we have created new software abstractions for\ - \ synchronization and coordination of music and interfaces in different modalities.\ - \ We describe these new software structures, present examples, and introduce the\ - \ idea of music notation as an interactive musical interface rather than a static\ - \ document. },\n address = {Oslo, Norway},\n author = {Liang, Dawen and Xia, Guangyu\ - \ and Dannenberg, Roger B.},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178091},\n\ - \ issn = {2220-4806},\n keywords = {automatic accompaniment,interactive,music\ - \ display,popular music,real-time,synchronization},\n pages = {167--172},\n presentation-video\ - \ = {https://vimeo.com/26832515/},\n title = {A Framework for Coordination and\ - \ Synchronization of Media},\n url = {http://www.nime.org/proceedings/2011/nime2011_167.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_0 + abstract: 'We introduce faust2smartphone, a tool to generate an edit-ready project + for musical mobile application, which connects Faust programming language and + mobile application’s development. It is an extended implementation of faust2api. + Faust DSP objects can be easily embedded as a high level API so that the developers + can access various functions and elements across different mobile platforms. This + paper provides several modes and technical details on the structures and implementation + of this system as well as some applications and future directions for this tool.' + address: 'Birmingham, UK' + author: 'Weng, Ruolun' + bibtex: "@inproceedings{NIME20_0,\n abstract = {We introduce faust2smartphone, a\ + \ tool to generate an edit-ready project for musical mobile application, which\ + \ connects Faust programming language and mobile application’s development. It\ + \ is an extended implementation of faust2api. Faust DSP objects can be easily\ + \ embedded as a high level API so that the developers can access various functions\ + \ and elements across different mobile platforms. This paper provides several\ + \ modes and technical details on the structures and implementation of this system\ + \ as well as some applications and future directions for this tool.},\n address\ + \ = {Birmingham, UK},\n author = {Weng, Ruolun},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.4813164},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {1--4},\n publisher = {Birmingham\ + \ City University},\n title = {Interactive Mobile Musical Application using faust2smartphone},\n\ + \ url = {https://www.nime.org/proceedings/2020/nime2020_paper0.pdf},\n year =\ + \ {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178091 + doi: 10.5281/zenodo.4813164 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'automatic accompaniment,interactive,music display,popular music,real-time,synchronization' - pages: 167--172 - presentation-video: https://vimeo.com/26832515/ - title: A Framework for Coordination and Synchronization of Media - url: http://www.nime.org/proceedings/2011/nime2011_167.pdf - year: 2011 + month: July + pages: 1--4 + publisher: Birmingham City University + title: Interactive Mobile Musical Application using faust2smartphone + url: https://www.nime.org/proceedings/2020/nime2020_paper0.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Berdahl2011a - abstract: 'This paper describes a new Beagle Board-based platform forteaching and - practicing interaction design for musical applications. The migration from desktop - and laptop computerbased sound synthesis to a compact and integrated control, - computation and sound generation platform has enormous potential to widen the - range of computer music instruments and installations that can be designed, and - improvesthe portability, autonomy, extensibility and longevity of designed systems. - We describe the technical features of theSatellite CCRMA platform and contrast - it with personalcomputer-based systems used in the past as well as emergingsmart - phone-based platforms. The advantages and tradeoffs of the new platform are considered, - and some projectwork is described.' - address: 'Oslo, Norway' - author: 'Berdahl, Edgar and Ju, Wendy' - bibtex: "@inproceedings{Berdahl2011a,\n abstract = {This paper describes a new Beagle\ - \ Board-based platform forteaching and practicing interaction design for musical\ - \ applications. The migration from desktop and laptop computerbased sound synthesis\ - \ to a compact and integrated control, computation and sound generation platform\ - \ has enormous potential to widen the range of computer music instruments and\ - \ installations that can be designed, and improvesthe portability, autonomy, extensibility\ - \ and longevity of designed systems. We describe the technical features of theSatellite\ - \ CCRMA platform and contrast it with personalcomputer-based systems used in the\ - \ past as well as emergingsmart phone-based platforms. The advantages and tradeoffs\ - \ of the new platform are considered, and some projectwork is described.},\n address\ - \ = {Oslo, Norway},\n author = {Berdahl, Edgar and Ju, Wendy},\n booktitle = {Proceedings\ + ID: NIME20_1 + abstract: 'This paper reports on the user-driven redesign of an embedded digital + musical instrument that has yielded a trio of new instruments, informed by early + user feedback and co-design workshops organized with active musicians. Collectively, + they share a stand-alone design, digitally fabricated enclosures, and a common + sensor acquisition and sound synthesis architecture, yet each is unique in its + playing technique and sonic output. We focus on the technical design of the instruments + and provide examples of key design specifications that were derived from user + input, while reflecting on the challenges to, and opportunities for, creating + instruments that support active practices of performing musicians.' + address: 'Birmingham, UK' + author: 'Sullivan, John and Vanasse, Julian and Guastavino, Catherine and Wanderley, + Marcelo' + bibtex: "@inproceedings{NIME20_1,\n abstract = {This paper reports on the user-driven\ + \ redesign of an embedded digital musical instrument that has yielded a trio of\ + \ new instruments, informed by early user feedback and co-design workshops organized\ + \ with active musicians. Collectively, they share a stand-alone design, digitally\ + \ fabricated enclosures, and a common sensor acquisition and sound synthesis architecture,\ + \ yet each is unique in its playing technique and sonic output. We focus on the\ + \ technical design of the instruments and provide examples of key design specifications\ + \ that were derived from user input, while reflecting on the challenges to, and\ + \ opportunities for, creating instruments that support active practices of performing\ + \ musicians.},\n address = {Birmingham, UK},\n author = {Sullivan, John and Vanasse,\ + \ Julian and Guastavino, Catherine and Wanderley, Marcelo},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177957},\n issn = {2220-4806},\n keywords = {arduino,beagle\ - \ board,instruments omap,linux,microcontrollers,music controllers,nime,pd,pedagogy,texas},\n\ - \ pages = {173--178},\n presentation-video = {https://vimeo.com/26833829/},\n\ - \ title = {Satellite CCRMA: A Musical Interaction and Sound Synthesis Platform},\n\ - \ url = {http://www.nime.org/proceedings/2011/nime2011_173.pdf},\n year = {2011}\n\ - }\n" + \ doi = {10.5281/zenodo.4813166},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {5--10},\n presentation-video\ + \ = {https://youtu.be/DUMXJw-CTVo},\n publisher = {Birmingham City University},\n\ + \ title = {Reinventing the Noisebox: Designing Embedded Instruments for Active\ + \ Musicians},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper1.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177957 + doi: 10.5281/zenodo.4813166 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'arduino,beagle board,instruments omap,linux,microcontrollers,music controllers,nime,pd,pedagogy,texas' - pages: 173--178 - presentation-video: https://vimeo.com/26833829/ - title: 'Satellite CCRMA: A Musical Interaction and Sound Synthesis Platform' - url: http://www.nime.org/proceedings/2011/nime2011_173.pdf - year: 2011 + month: July + pages: 5--10 + presentation-video: https://youtu.be/DUMXJw-CTVo + publisher: Birmingham City University + title: 'Reinventing the Noisebox: Designing Embedded Instruments for Active Musicians' + url: https://www.nime.org/proceedings/2020/nime2020_paper1.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Bryan2011 - abstract: 'A novel method of digital scratching is presented as an alternative to - currently available digital hardware interfaces and time-coded vinyl (TCV). Similar - to TCV, the proposed method leverages existing analog turntables as a physical - interface to manipulate the playback of digital audio. To doso, however, an accelerometer/gyroscope–equipped - smartphone is firmly attached to a modified record, placed on a turntable, and - used to sense a performers movement, resulting in a wireless sensing-based scratching - method. The accelerometer and gyroscope data is wirelessly transmitted to a computer - to manipulate the digital audio playback in real-time. The method provides the - benefit of digital audio and storage, requires minimal additional hardware, accommodates - familiar proprioceptive feedback, and allows a single interface to control both - digital and analog audio. In addition, the proposed method provides numerous additional - benefits including real-time graphical display,multi-touch interaction, and untethered - performance (e.g“air-scratching”). Such a method turns a vinyl record into an - interactive surface and enhances traditional scratching performance by affording - new and creative musical interactions. Informal testing show this approach to - be viable,responsive, and robust.' - address: 'Oslo, Norway' - author: 'Bryan, Nicholas J. and Wang, Ge' - bibtex: "@inproceedings{Bryan2011,\n abstract = {A novel method of digital scratching\ - \ is presented as an alternative to currently available digital hardware interfaces\ - \ and time-coded vinyl (TCV). Similar to TCV, the proposed method leverages existing\ - \ analog turntables as a physical interface to manipulate the playback of digital\ - \ audio. To doso, however, an accelerometer/gyroscope–equipped smartphone is firmly\ - \ attached to a modified record, placed on a turntable, and used to sense a performers\ - \ movement, resulting in a wireless sensing-based scratching method. The accelerometer\ - \ and gyroscope data is wirelessly transmitted to a computer to manipulate the\ - \ digital audio playback in real-time. The method provides the benefit of digital\ - \ audio and storage, requires minimal additional hardware, accommodates familiar\ - \ proprioceptive feedback, and allows a single interface to control both digital\ - \ and analog audio. In addition, the proposed method provides numerous additional\ - \ benefits including real-time graphical display,multi-touch interaction, and\ - \ untethered performance (e.g“air-scratching”). Such a method turns a vinyl record\ - \ into an interactive surface and enhances traditional scratching performance\ - \ by affording new and creative musical interactions. Informal testing show this\ - \ approach to be viable,responsive, and robust.},\n address = {Oslo, Norway},\n\ - \ author = {Bryan, Nicholas J. and Wang, Ge},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1177971},\n issn = {2220-4806},\n keywords = {Digital scratching,\ - \ mobile music, digital DJ, smartphone, turntable, turntablism, record player,\ - \ accelerometer, gyroscope, vinyl emulation software },\n pages = {179--184},\n\ - \ presentation-video = {https://vimeo.com/26835277/},\n title = {Two Turntables\ - \ and a Mobile Phone},\n url = {http://www.nime.org/proceedings/2011/nime2011_179.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_10 + abstract: 'This paper presents a new visualization paradigm for graphical interpolation + systems, known as Star Interpolation, that has been specifically created for sound + design applications. Through the presented investigation of previous visualizations, + it becomes apparent that the existing visuals in this class of system, generally + relate to the interpolation model that determines the weightings of the presets + and not the sonic output. The Star Interpolator looks to resolve this deficiency + by providing visual cues that relate to the parameter space. Through comparative + exploration it has been found this visualization provides a number of benefits + over the previous systems. It is also shown that hybrid visualization can be generated + that combined benefits of the new visualization with the existing interpolation + models. These can then be accessed by using an Interactive Visualization (IV) + approach. The results from our exploration of these visualizations are encouraging + and they appear to be advantageous when using the interpolators for sound designs + tasks. Therefore, it is proposed that formal usability testing is undertaken to + measure the potential value of this form of visualization.' + address: 'Birmingham, UK' + author: 'Gibson, Darrell J and Polfreman, Richard' + bibtex: "@inproceedings{NIME20_10,\n abstract = {This paper presents a new visualization\ + \ paradigm for graphical interpolation systems, known as Star Interpolation, that\ + \ has been specifically created for sound design applications. Through the presented\ + \ investigation of previous visualizations, it becomes apparent that the existing\ + \ visuals in this class of system, generally relate to the interpolation model\ + \ that determines the weightings of the presets and not the sonic output. The\ + \ Star Interpolator looks to resolve this deficiency by providing visual cues\ + \ that relate to the parameter space. Through comparative exploration it has been\ + \ found this visualization provides a number of benefits over the previous systems.\ + \ It is also shown that hybrid visualization can be generated that combined benefits\ + \ of the new visualization with the existing interpolation models. These can then\ + \ be accessed by using an Interactive Visualization (IV) approach. The results\ + \ from our exploration of these visualizations are encouraging and they appear\ + \ to be advantageous when using the interpolators for sound designs tasks. Therefore,\ + \ it is proposed that formal usability testing is undertaken to measure the potential\ + \ value of this form of visualization.},\n address = {Birmingham, UK},\n author\ + \ = {Gibson, Darrell J and Polfreman, Richard},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.4813168},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {49--54},\n presentation-video\ + \ = {https://youtu.be/3ImRZdSsP-M},\n publisher = {Birmingham City University},\n\ + \ title = {Star Interpolator – A Novel Visualization Paradigm for Graphical Interpolators},\n\ + \ url = {https://www.nime.org/proceedings/2020/nime2020_paper10.pdf},\n year =\ + \ {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177971 + doi: 10.5281/zenodo.4813168 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Digital scratching, mobile music, digital DJ, smartphone, turntable, - turntablism, record player, accelerometer, gyroscope, vinyl emulation software ' - pages: 179--184 - presentation-video: https://vimeo.com/26835277/ - title: Two Turntables and a Mobile Phone - url: http://www.nime.org/proceedings/2011/nime2011_179.pdf - year: 2011 + month: July + pages: 49--54 + presentation-video: https://youtu.be/3ImRZdSsP-M + publisher: Birmingham City University + title: Star Interpolator – A Novel Visualization Paradigm for Graphical Interpolators + url: https://www.nime.org/proceedings/2020/nime2020_paper10.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Kruge2011 - abstract: 'MadPad is a networked audiovisual sample station for mobile devices. - Twelve short video clips are loaded onto thescreen in a grid and playback is triggered - by tapping anywhere on the clip. This is similar to tapping the pads of anaudio - sample station, but extends that interaction to addvisual sampling. Clips can - be shot on-the-fly with a cameraenabled mobile device and loaded into the player - instantly,giving the performer an ability to quickly transform his orher surroundings - into a sample-based, audiovisual instrument. Samples can also be sourced from - an online community in which users can post or download content. The recent ubiquity - of multitouch mobile devices and advances inpervasive computing have made this - system possible, providing for a vast amount of content only limited by theimagination - of the performer and the community. This paper presents the core features of MadPad - and the designexplorations that inspired them.' - address: 'Oslo, Norway' - author: 'Kruge, Nick and Wang, Ge' - bibtex: "@inproceedings{Kruge2011,\n abstract = {MadPad is a networked audiovisual\ - \ sample station for mobile devices. Twelve short video clips are loaded onto\ - \ thescreen in a grid and playback is triggered by tapping anywhere on the clip.\ - \ This is similar to tapping the pads of anaudio sample station, but extends that\ - \ interaction to addvisual sampling. Clips can be shot on-the-fly with a cameraenabled\ - \ mobile device and loaded into the player instantly,giving the performer an ability\ - \ to quickly transform his orher surroundings into a sample-based, audiovisual\ - \ instrument. Samples can also be sourced from an online community in which users\ - \ can post or download content. The recent ubiquity of multitouch mobile devices\ - \ and advances inpervasive computing have made this system possible, providing\ - \ for a vast amount of content only limited by theimagination of the performer\ - \ and the community. This paper presents the core features of MadPad and the designexplorations\ - \ that inspired them.},\n address = {Oslo, Norway},\n author = {Kruge, Nick and\ - \ Wang, Ge},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178077},\n issn\ - \ = {2220-4806},\n keywords = {mobile music, networked music, social music, audiovisual,\ - \ sampling, user-generated content, crowdsourcing, sample station, iPad, iPhone\ - \ },\n pages = {185--190},\n presentation-video = {https://vimeo.com/26855684/},\n\ - \ title = {MadPad: A Crowdsourcing System for Audiovisual Sampling},\n url = {http://www.nime.org/proceedings/2011/nime2011_185.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_100 + abstract: 'This paper reports on the process of development of a virtual-acoustic + proto-instrument, Vodhrán, based on a physical model of a plate, within a musical + performance-driven ecosystemic environment. Performers explore the plate model + via tactile interaction through a Sensel Morph interface, chosen to allow damping + and localised striking consistent with playing hand percussion. Through an iteration + of prototypes, we have designed an embedded proto-instrument that allows a bodily + interaction between the performer and the virtual-acoustic plate in a way that + redirects from the perception of the Sensel as a touchpad and reframes it as a + percussive surface. Due to the computational effort required to run such a rich + physical model and the necessity to provide a natural interaction, the audio processing + is implemented on a high powered single board computer. We describe the design + challenges and report on the technological solutions we have found in the implementation + of Vodhrán which we believe are valuable to the wider NIME community.' + address: 'Birmingham, UK' + author: 'Pardue, Laurel S and Ortiz, Miguel and van Walstijn, Maarten and Stapleton, + Paul and Rodger, Matthew' + bibtex: "@inproceedings{NIME20_100,\n abstract = {This paper reports on the process\ + \ of development of a virtual-acoustic proto-instrument, Vodhrán, based on a physical\ + \ model of a plate, within a musical performance-driven ecosystemic environment.\ + \ Performers explore the plate model via tactile interaction through a Sensel\ + \ Morph interface, chosen to allow damping and localised striking consistent with\ + \ playing hand percussion. Through an iteration of prototypes, we have designed\ + \ an embedded proto-instrument that allows a bodily interaction between the performer\ + \ and the virtual-acoustic plate in a way that redirects from the perception of\ + \ the Sensel as a touchpad and reframes it as a percussive surface. Due to the\ + \ computational effort required to run such a rich physical model and the necessity\ + \ to provide a natural interaction, the audio processing is implemented on a high\ + \ powered single board computer. We describe the design challenges and report\ + \ on the technological solutions we have found in the implementation of Vodhrán\ + \ which we believe are valuable to the wider NIME community.},\n address = {Birmingham,\ + \ UK},\n author = {Pardue, Laurel S and Ortiz, Miguel and van Walstijn, Maarten\ + \ and Stapleton, Paul and Rodger, Matthew},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.4813170},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {523--524},\n publisher = {Birmingham\ + \ City University},\n title = {Vodhrán: collaborative design for evolving a physical\ + \ model and interface into a proto-instrument},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper100.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178077 + doi: 10.5281/zenodo.4813170 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'mobile music, networked music, social music, audiovisual, sampling, user-generated - content, crowdsourcing, sample station, iPad, iPhone ' - pages: 185--190 - presentation-video: https://vimeo.com/26855684/ - title: 'MadPad: A Crowdsourcing System for Audiovisual Sampling' - url: http://www.nime.org/proceedings/2011/nime2011_185.pdf - year: 2011 + month: July + pages: 523--524 + publisher: Birmingham City University + title: 'Vodhrán: collaborative design for evolving a physical model and interface + into a proto-instrument' + url: https://www.nime.org/proceedings/2020/nime2020_paper100.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Keefe2011 - abstract: 'Visual information integration in mobile music performanceis an area - that has not been thoroughly explored and currentapplications are often individually - designed. From camerainput to flexible output rendering, we discuss visual performance - support in the context of urMus, a meta-environmentfor mobile interaction and - performance development. Theuse of cameras, a set of image primitives, interactive - visualcontent, projectors, and camera flashes can lead to visuallyintriguing performance - possibilities.' - address: 'Oslo, Norway' - author: 'Keefe, Patrick O. and Essl, Georg' - bibtex: "@inproceedings{Keefe2011,\n abstract = {Visual information integration\ - \ in mobile music performanceis an area that has not been thoroughly explored\ - \ and currentapplications are often individually designed. From camerainput to\ - \ flexible output rendering, we discuss visual performance support in the context\ - \ of urMus, a meta-environmentfor mobile interaction and performance development.\ - \ Theuse of cameras, a set of image primitives, interactive visualcontent, projectors,\ - \ and camera flashes can lead to visuallyintriguing performance possibilities.},\n\ - \ address = {Oslo, Norway},\n author = {Keefe, Patrick O. and Essl, Georg},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178061},\n issn = {2220-4806},\n\ - \ keywords = {Mobile performance, visual interaction, camera phone, mobile collaboration\ - \ },\n pages = {191--196},\n presentation-video = {https://vimeo.com/26836592/},\n\ - \ title = {The Visual in Mobile Music Performance},\n url = {http://www.nime.org/proceedings/2011/nime2011_191.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_101 + abstract: 'Brain-computer interfaces (BCIs) are beneficial for patients who are + suffering from motor disabilities because it offers them a way of creative expression, + which improves mental well-being. BCIs aim to establish a direct communication + medium between the brain and the computer. Therefore, unlike conventional musical + interfaces, it does not require muscular power. This paper explores the potential + of building sound synthesisers with BCIs that are based on steady-state visually + evoked potential (SSVEP). It investigates novel ways to enable patients with motor + disabilities to express themselves. It presents a new concept called sonic expression, + that is to express oneself purely by the synthesis of sound. It introduces new + layouts and designs for BCI-based sound synthesisers and the limitations of these + interfaces are discussed. An evaluation of different sound synthesis techniques + is conducted to find an appropriate one for such systems. Synthesis techniques + are evaluated and compared based on a framework governed by sonic expression.' + address: 'Birmingham, UK' + author: 'Venkatesh, Satvik and Braund, Edward and Miranda, Eduardo' + bibtex: "@inproceedings{NIME20_101,\n abstract = {Brain-computer interfaces (BCIs)\ + \ are beneficial for patients who are suffering from motor disabilities because\ + \ it offers them a way of creative expression, which improves mental well-being.\ + \ BCIs aim to establish a direct communication medium between the brain and the\ + \ computer. Therefore, unlike conventional musical interfaces, it does not require\ + \ muscular power. This paper explores the potential of building sound synthesisers\ + \ with BCIs that are based on steady-state visually evoked potential (SSVEP).\ + \ It investigates novel ways to enable patients with motor disabilities to express\ + \ themselves. It presents a new concept called sonic expression, that is to express\ + \ oneself purely by the synthesis of sound. It introduces new layouts and designs\ + \ for BCI-based sound synthesisers and the limitations of these interfaces are\ + \ discussed. An evaluation of different sound synthesis techniques is conducted\ + \ to find an appropriate one for such systems. Synthesis techniques are evaluated\ + \ and compared based on a framework governed by sonic expression.},\n address\ + \ = {Birmingham, UK},\n author = {Venkatesh, Satvik and Braund, Edward and Miranda,\ + \ Eduardo},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813172},\n editor\ + \ = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n\ + \ pages = {525--530},\n publisher = {Birmingham City University},\n title = {Designing\ + \ Brain-computer Interfaces for Sonic Expression},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper101.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178061 + doi: 10.5281/zenodo.4813172 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Mobile performance, visual interaction, camera phone, mobile collaboration ' - pages: 191--196 - presentation-video: https://vimeo.com/26836592/ - title: The Visual in Mobile Music Performance - url: http://www.nime.org/proceedings/2011/nime2011_191.pdf - year: 2011 + month: July + pages: 525--530 + publisher: Birmingham City University + title: Designing Brain-computer Interfaces for Sonic Expression + url: https://www.nime.org/proceedings/2020/nime2020_paper101.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Wang2011 - abstract: 'This paper describes the origin, design, and implementation of Smule''s - Magic Fiddle, an expressive musical instrument for the iPad. Magic Fiddle takes - advantage of the physical aspects of the device to integrate game-like and pedagogical - elements. We describe the origin of Magic Fiddle, chronicle its design process, - discuss its integrated music education system, and evaluate the overall experience. ' - address: 'Oslo, Norway' - author: 'Wang, Ge and Oh, Jieun and Lieber, Tom' - bibtex: "@inproceedings{Wang2011,\n abstract = {This paper describes the origin,\ - \ design, and implementation of Smule's Magic Fiddle, an expressive musical instrument\ - \ for the iPad. Magic Fiddle takes advantage of the physical aspects of the device\ - \ to integrate game-like and pedagogical elements. We describe the origin of Magic\ - \ Fiddle, chronicle its design process, discuss its integrated music education\ - \ system, and evaluate the overall experience. },\n address = {Oslo, Norway},\n\ - \ author = {Wang, Ge and Oh, Jieun and Lieber, Tom},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178187},\n issn = {2220-4806},\n keywords = {Magic Fiddle,\ - \ iPad, physical interaction design, experiential design, music education. },\n\ - \ pages = {197--202},\n presentation-video = {https://vimeo.com/26857032/},\n\ - \ title = {Designing for the iPad: Magic Fiddle},\n url = {http://www.nime.org/proceedings/2011/nime2011_197.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_102 + abstract: 'Music has previously been shown to be beneficial in improving runners + performance in treadmill based experiments. This paper evaluates a generative + music system, HEARTBEATS, designed to create biosignal synchronous music in real-time + according to an individual athlete’s heart-rate or cadence (steps per minute). + The tempo, melody, and timbral features of the generated music are modulated according + to biosensor input from each runner using a wearable Bluetooth sensor. We compare + the relative performance of athletes listening to heart-rate and cadence synchronous + music, across a randomized trial (N=57) on a trail course with 76ft of elevation. + Participants were instructed to continue until perceived effort went beyond an + 18 using the Borg rating of perceived exertion scale. We found that cadence-synchronous + music improved performance and decreased perceived effort in male runners, and + improved performance but not perceived effort in female runners, in comparison + to heart-rate synchronous music. This work has implications for the future design + and implementation of novel portable music systems and in music-assisted coaching.' + address: 'Birmingham, UK' + author: 'Williams, Duncan A.H. and Fazenda, Bruno and Williamson, Victoria J. and + Fazekas, Gyorgy' + bibtex: "@inproceedings{NIME20_102,\n abstract = {Music has previously been shown\ + \ to be beneficial in improving runners performance in treadmill based experiments.\ + \ This paper evaluates a generative music system, HEARTBEATS, designed to create\ + \ biosignal synchronous music in real-time according to an individual athlete’s\ + \ heart-rate or cadence (steps per minute). The tempo, melody, and timbral features\ + \ of the generated music are modulated according to biosensor input from each\ + \ runner using a wearable Bluetooth sensor. We compare the relative performance\ + \ of athletes listening to heart-rate and cadence synchronous music, across a\ + \ randomized trial (N=57) on a trail course with 76ft of elevation. Participants\ + \ were instructed to continue until perceived effort went beyond an 18 using the\ + \ Borg rating of perceived exertion scale. We found that cadence-synchronous music\ + \ improved performance and decreased perceived effort in male runners, and improved\ + \ performance but not perceived effort in female runners, in comparison to heart-rate\ + \ synchronous music. This work has implications for the future design and implementation\ + \ of novel portable music systems and in music-assisted coaching.},\n address\ + \ = {Birmingham, UK},\n author = {Williams, Duncan A.H. and Fazenda, Bruno and\ + \ Williamson, Victoria J. and Fazekas, Gyorgy},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.4813174},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {531--536},\n publisher = {Birmingham\ + \ City University},\n title = {Biophysiologically synchronous computer generated\ + \ music improves performance and reduces perceived effort in trail runners},\n\ + \ url = {https://www.nime.org/proceedings/2020/nime2020_paper102.pdf},\n year\ + \ = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178187 + doi: 10.5281/zenodo.4813174 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Magic Fiddle, iPad, physical interaction design, experiential design, - music education. ' - pages: 197--202 - presentation-video: https://vimeo.com/26857032/ - title: 'Designing for the iPad: Magic Fiddle' - url: http://www.nime.org/proceedings/2011/nime2011_197.pdf - year: 2011 + month: July + pages: 531--536 + publisher: Birmingham City University + title: Biophysiologically synchronous computer generated music improves performance + and reduces perceived effort in trail runners + url: https://www.nime.org/proceedings/2020/nime2020_paper102.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Knapp2011 - abstract: 'This paper describes a new interface for mobile music creation, the MobileMuse, - that introduces the capability of using physiological indicators of emotion - as a new mode of interaction. Combining both kinematic and physiological measurement - in a mobile environment creates the possibility of integral music control—the - use of both gesture and emotion to control sound creation—where it has never been - possible before. This paper will review the concept of integral music control - and describe the motivation for creating the MobileMuse, its design and future - possibilities.' - address: 'Oslo, Norway' - author: 'Knapp, Benjamin and Bortz, Brennon' - bibtex: "@inproceedings{Knapp2011,\n abstract = {This paper describes a new interface\ - \ for mobile music creation, the MobileMuse, that introduces the capability of\ - \ using physiological indicators of emotion as a new mode of interaction. Combining\ - \ both kinematic and physiological measurement in a mobile environment creates\ - \ the possibility of integral music control—the use of both gesture and emotion\ - \ to control sound creation—where it has never been possible before. This paper\ - \ will review the concept of integral music control and describe the motivation\ - \ for creating the MobileMuse, its design and future possibilities.},\n address\ - \ = {Oslo, Norway},\n author = {Knapp, Benjamin and Bortz, Brennon},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1178073},\n issn = {2220-4806},\n keywords\ - \ = {affective computing,bile music performance,mo-,physiological signal measurement},\n\ - \ pages = {203--206},\n presentation-video = {https://vimeo.com/26858339/},\n\ - \ title = {MobileMuse: Integral Music Control Goes Mobile},\n url = {http://www.nime.org/proceedings/2011/nime2011_203.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_103 + abstract: 'Audio content-based processing has become a pervasive methodology for + techno-fluent musicians. System architectures typically create thumbnail audio + descriptions, based on signal processing methods, to visualize, retrieve and transform + musical audio efficiently. Towards enhanced usability of these descriptor-based + frameworks for the music community, the paper advances a minimal content-based + audio description scheme, rooted on primary musical notation attributes at the + threefold sound object, meso and macro hierarchies. Multiple perceptually-guided + viewpoints from rhythmic, harmonic, timbral and dynamic attributes define a discrete + and finite alphabet with minimal formal and subjective assumptions using unsupervised + and user-guided methods. The Factor Oracle automaton is then adopted to model + and visualize temporal morphology. The generative musical applications enabled + by the descriptor-based framework at multiple structural hierarchies are discussed.' + address: 'Birmingham, UK' + author: 'Bernardes, Gilberto and Bernardes, Gilberto' + bibtex: "@inproceedings{NIME20_103,\n abstract = {Audio content-based processing\ + \ has become a pervasive methodology for techno-fluent musicians. System architectures\ + \ typically create thumbnail audio descriptions, based on signal processing methods,\ + \ to visualize, retrieve and transform musical audio efficiently. Towards enhanced\ + \ usability of these descriptor-based frameworks for the music community, the\ + \ paper advances a minimal content-based audio description scheme, rooted on primary\ + \ musical notation attributes at the threefold sound object, meso and macro hierarchies.\ + \ Multiple perceptually-guided viewpoints from rhythmic, harmonic, timbral and\ + \ dynamic attributes define a discrete and finite alphabet with minimal formal\ + \ and subjective assumptions using unsupervised and user-guided methods. The Factor\ + \ Oracle automaton is then adopted to model and visualize temporal morphology.\ + \ The generative musical applications enabled by the descriptor-based framework\ + \ at multiple structural hierarchies are discussed.},\n address = {Birmingham,\ + \ UK},\n author = {Bernardes, Gilberto and Bernardes, Gilberto},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.4813176},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {537--542},\n presentation-video\ + \ = {https://youtu.be/zEg9Cpir8zA},\n publisher = {Birmingham City University},\n\ + \ title = {Interfacing Sounds: Hierarchical Audio-Content Morphologies for Creative\ + \ Re-purposing in earGram 2.0},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper103.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178073 + doi: 10.5281/zenodo.4813176 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'affective computing,bile music performance,mo-,physiological signal measurement' - pages: 203--206 - presentation-video: https://vimeo.com/26858339/ - title: 'MobileMuse: Integral Music Control Goes Mobile' - url: http://www.nime.org/proceedings/2011/nime2011_203.pdf - year: 2011 + month: July + pages: 537--542 + presentation-video: https://youtu.be/zEg9Cpir8zA + publisher: Birmingham City University + title: 'Interfacing Sounds: Hierarchical Audio-Content Morphologies for Creative + Re-purposing in earGram 2.0' + url: https://www.nime.org/proceedings/2020/nime2020_paper103.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Beck2011 - abstract: 'Laptop Orchestras (LOs) have recently become a very popular mode of musical - expression. They engage groups ofperformers to use ordinary laptop computers as - instrumentsand sound sources in the performance of specially createdmusic software. - Perhaps the biggest challenge for LOs isthe distribution, management and control - of software acrossheterogeneous collections of networked computers. Software must - be stored and distributed from a central repository, but launched on individual - laptops immediately beforeperformance. The GRENDL project leverages proven gridcomputing - frameworks and approaches the Laptop Orchestra as a distributed computing platform - for interactive computer music. This allows us to readily distribute softwareto - each laptop in the orchestra depending on the laptop''sinternal configuration, - its role in the composition, and theplayer assigned to that computer. Using the - SAGA framework, GRENDL is able to distribute software and managesystem and application - environments for each composition.Our latest version includes tangible control - of the GRENDLenvironment for a more natural and familiar user experience.' - address: 'Oslo, Norway' - author: 'Beck, Stephen D. and Branton, Chris and Maddineni, Sharath' - bibtex: "@inproceedings{Beck2011,\n abstract = {Laptop Orchestras (LOs) have recently\ - \ become a very popular mode of musical expression. They engage groups ofperformers\ - \ to use ordinary laptop computers as instrumentsand sound sources in the performance\ - \ of specially createdmusic software. Perhaps the biggest challenge for LOs isthe\ - \ distribution, management and control of software acrossheterogeneous collections\ - \ of networked computers. Software must be stored and distributed from a central\ - \ repository, but launched on individual laptops immediately beforeperformance.\ - \ The GRENDL project leverages proven gridcomputing frameworks and approaches\ - \ the Laptop Orchestra as a distributed computing platform for interactive computer\ - \ music. This allows us to readily distribute softwareto each laptop in the orchestra\ - \ depending on the laptop'sinternal configuration, its role in the composition,\ - \ and theplayer assigned to that computer. Using the SAGA framework, GRENDL is\ - \ able to distribute software and managesystem and application environments for\ - \ each composition.Our latest version includes tangible control of the GRENDLenvironment\ - \ for a more natural and familiar user experience.},\n address = {Oslo, Norway},\n\ - \ author = {Beck, Stephen D. and Branton, Chris and Maddineni, Sharath},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1177951},\n issn = {2220-4806},\n keywords\ - \ = {laptop orchestra, tangible interaction, grid computing },\n pages = {207--210},\n\ - \ presentation-video = {https://vimeo.com/26860960/},\n title = {Tangible Performance\ - \ Management of Grid-based Laptop Orchestras},\n url = {http://www.nime.org/proceedings/2011/nime2011_207.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_104 + abstract: 'For a long time, magnetic tape has been commonly utilized as one of physical + media for recording and playing music. In this research, we propose a novel interactive + musical instrument called ParaSampling that utilizes the technology of magnetic + sound recording, and a improvisational sound playing method based on the instrument. + While a conventional cassette tape player has a single tapehead, which rigidly + placed, our instrument utilizes multiple handheld tapehead modules as an interface. + Players can hold the interfaces and press them against the rotating magnetic tape + at an any point to record or reproduce sounds The player can also easily erase + and rewrite the sound recorded on the tape. With this instrument, they can achieve + improvised and unique musical expressions through tangible and spatial interactions. + In this paper, we describe the system design of ParaSampling, the implementation + of the prototype system, and discuss music expressions enabled by the system.' + address: 'Birmingham, UK' + author: 'Han, Joung Min and Kakehi, Yasuaki' + bibtex: "@inproceedings{NIME20_104,\n abstract = {For a long time, magnetic tape\ + \ has been commonly utilized as one of physical media for recording and playing\ + \ music. In this research, we propose a novel interactive musical instrument called\ + \ ParaSampling that utilizes the technology of magnetic sound recording, and a\ + \ improvisational sound playing method based on the instrument. While a conventional\ + \ cassette tape player has a single tapehead, which rigidly placed, our instrument\ + \ utilizes multiple handheld tapehead modules as an interface. Players can hold\ + \ the interfaces and press them against the rotating magnetic tape at an any point\ + \ to record or reproduce sounds The player can also easily erase and rewrite the\ + \ sound recorded on the tape. With this instrument, they can achieve improvised\ + \ and unique musical expressions through tangible and spatial interactions. In\ + \ this paper, we describe the system design of ParaSampling, the implementation\ + \ of the prototype system, and discuss music expressions enabled by the system.},\n\ + \ address = {Birmingham, UK},\n author = {Han, Joung Min and Kakehi, Yasuaki},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.4813178},\n editor = {Romain Michon\ + \ and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages =\ + \ {543--544},\n publisher = {Birmingham City University},\n title = {ParaSampling:\ + \ A Musical Instrument with Handheld Tapehead Interfaces for Impromptu Recording\ + \ and Playing on a Magnetic Tape},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper104.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177951 + doi: 10.5281/zenodo.4813178 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'laptop orchestra, tangible interaction, grid computing ' - pages: 207--210 - presentation-video: https://vimeo.com/26860960/ - title: Tangible Performance Management of Grid-based Laptop Orchestras - url: http://www.nime.org/proceedings/2011/nime2011_207.pdf - year: 2011 + month: July + pages: 543--544 + publisher: Birmingham City University + title: 'ParaSampling: A Musical Instrument with Handheld Tapehead Interfaces for + Impromptu Recording and Playing on a Magnetic Tape' + url: https://www.nime.org/proceedings/2020/nime2020_paper104.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Dimitrov2011 - abstract: 'A contemporary PC user, typically expects a sound cardto be a piece of - hardware, that: can be manipulated by''audio'' software (most typically exemplified - by ''media players''); and allows interfacing of the PC to audio reproduction - and/or recording equipment. As such, a ''sound card''can be considered to be a - system, that encompasses designdecisions on both hardware and software levels - -- that also demand a certain understanding of the architecture of thetarget PC - operating system.This project outlines how an Arduino Duemillanoveboard (containing - a USB interface chip, manufactured byFuture Technology Devices International Ltd - [FTDI]company) can be demonstrated to behave as a full-duplex,mono, 8-bit 44.1 - kHz soundcard, through an implementation of: a PC audio driver for ALSA (Advanced - LinuxSound Architecture); a matching program for theArduino''sATmega microcontroller - -- and nothing more than headphones (and a couple of capacitors). The main contributionof - this paper is to bring a holistic aspect to the discussionon the topic of implementation - of soundcards -- also by referring to open-source driver, microcontroller code - and testmethods; and outline a complete implementation of an open -- yet functional - -- soundcard system.' - address: 'Oslo, Norway' - author: 'Dimitrov, Smilen and Serafin, Stefania' - bibtex: "@inproceedings{Dimitrov2011,\n abstract = {A contemporary PC user, typically\ - \ expects a sound cardto be a piece of hardware, that: can be manipulated by'audio'\ - \ software (most typically exemplified by 'media players'); and allows interfacing\ - \ of the PC to audio reproduction and/or recording equipment. As such, a 'sound\ - \ card'can be considered to be a system, that encompasses designdecisions on both\ - \ hardware and software levels -- that also demand a certain understanding of\ - \ the architecture of thetarget PC operating system.This project outlines how\ - \ an Arduino Duemillanoveboard (containing a USB interface chip, manufactured\ - \ byFuture Technology Devices International Ltd [FTDI]company) can be demonstrated\ - \ to behave as a full-duplex,mono, 8-bit 44.1 kHz soundcard, through an implementation\ - \ of: a PC audio driver for ALSA (Advanced LinuxSound Architecture); a matching\ - \ program for theArduino'sATmega microcontroller -- and nothing more than headphones\ - \ (and a couple of capacitors). The main contributionof this paper is to bring\ - \ a holistic aspect to the discussionon the topic of implementation of soundcards\ - \ -- also by referring to open-source driver, microcontroller code and testmethods;\ - \ and outline a complete implementation of an open -- yet functional -- soundcard\ - \ system.},\n address = {Oslo, Norway},\n author = {Dimitrov, Smilen and Serafin,\ - \ Stefania},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177997},\n issn\ - \ = {2220-4806},\n keywords = {alsa,arduino,audio,driver,linux,sound card},\n\ - \ pages = {211--216},\n title = {Audio Arduino -- an ALSA (Advanced Linux Sound\ - \ Architecture) Audio Driver for FTDI-based Arduinos},\n url = {http://www.nime.org/proceedings/2011/nime2011_211.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_105 + abstract: 'A variety of controllers with multifarious sensors and functions have + maximized the real time performers control capabilities. The idea behind this + project was to create an interface which enables the interaction between the performers + and the effect processor measuring their brain waves amplitudes, e.g., alpha, + beta, theta, delta and gamma, not necessarily with the user’s awareness. We achieved + this by using an electroencephalography (EEG) sensor for detecting performer’s + different emotional states and, based on these, sending midi messages for digital + processing units automation. The aim is to create a new generation of digital + processor units that could be automatically configured in real-time given the + emotions or thoughts of the performer or the audience. By introducing emotional + state information in the real time control of several aspects of artistic expression, + we highlight the impact of surprise and uniqueness in the artistic performance.' + address: 'Birmingham, UK' + author: 'Filandrianos, Giorgos and Kotsani, Natalia and Dervakos, Edmund G and Stamou, + Giorgos and Amprazis, Vaios and Kiourtzoglou, Panagiotis' + bibtex: "@inproceedings{NIME20_105,\n abstract = {A variety of controllers with\ + \ multifarious sensors and functions have maximized the real time performers control\ + \ capabilities. The idea behind this project was to create an interface which\ + \ enables the interaction between the performers and the effect processor measuring\ + \ their brain waves amplitudes, e.g., alpha, beta, theta, delta and gamma, not\ + \ necessarily with the user’s awareness. We achieved this by using an electroencephalography\ + \ (EEG) sensor for detecting performer’s different emotional states and, based\ + \ on these, sending midi messages for digital processing units automation. The\ + \ aim is to create a new generation of digital processor units that could be automatically\ + \ configured in real-time given the emotions or thoughts of the performer or the\ + \ audience. By introducing emotional state information in the real time control\ + \ of several aspects of artistic expression, we highlight the impact of surprise\ + \ and uniqueness in the artistic performance.},\n address = {Birmingham, UK},\n\ + \ author = {Filandrianos, Giorgos and Kotsani, Natalia and Dervakos, Edmund G\ + \ and Stamou, Giorgos and Amprazis, Vaios and Kiourtzoglou, Panagiotis},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.4813180},\n editor = {Romain Michon and\ + \ Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages = {545--546},\n\ + \ publisher = {Birmingham City University},\n title = {Brainwaves-driven Effects\ + \ Automation in Musical Performance},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper105.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177997 + doi: 10.5281/zenodo.4813180 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'alsa,arduino,audio,driver,linux,sound card' - pages: 211--216 - title: Audio Arduino -- an ALSA (Advanced Linux Sound Architecture) Audio Driver - for FTDI-based Arduinos - url: http://www.nime.org/proceedings/2011/nime2011_211.pdf - year: 2011 + month: July + pages: 545--546 + publisher: Birmingham City University + title: Brainwaves-driven Effects Automation in Musical Performance + url: https://www.nime.org/proceedings/2020/nime2020_paper105.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Kim2011a - abstract: 'In this paper, we introduce a pipe interface that recognizestouch on - tone holes by the resonances in the pipe instead ofa touch sensor. This work was - based on the acoustic principles of woodwind instruments without complex sensors - andelectronic circuits to develop a simple and durable interface.The measured - signals were analyzed to show that differentfingerings generate various sounds. - The audible resonancesignal in the pipe interface can be used as a sonic event - formusical expression by itself and also as an input parameterfor mapping different - sounds.' - address: 'Oslo, Norway' - author: 'Kim, Seunghun and Yeo, Woon Seung' - bibtex: "@inproceedings{Kim2011a,\n abstract = {In this paper, we introduce a pipe\ - \ interface that recognizestouch on tone holes by the resonances in the pipe instead\ - \ ofa touch sensor. This work was based on the acoustic principles of woodwind\ - \ instruments without complex sensors andelectronic circuits to develop a simple\ - \ and durable interface.The measured signals were analyzed to show that differentfingerings\ - \ generate various sounds. The audible resonancesignal in the pipe interface can\ - \ be used as a sonic event formusical expression by itself and also as an input\ - \ parameterfor mapping different sounds.},\n address = {Oslo, Norway},\n author\ - \ = {Kim, Seunghun and Yeo, Woon Seung},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178067},\n\ - \ issn = {2220-4806},\n keywords = {resonance, mapping, pipe },\n pages = {217--219},\n\ - \ title = {Musical Control of a Pipe Based on Acoustic Resonance},\n url = {http://www.nime.org/proceedings/2011/nime2011_217.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_106 + abstract: 'This article focuses on the rich potential of hybrid domain translation + of modular synthesis (MS) into virtual reality (VR). It asks: to what extent can + what is valued in studio-based MS practice find a natural home or rich new interpretations + in the immersive capacities of VR? The article attends particularly to the relative + affordances and constraints of each as they inform the design and development + of a new system ("Mischmasch") supporting collaborative and performative patching + of Max gen~ patches and operators within a shared room-scale VR space.' + address: 'Birmingham, UK' + author: 'Wakefield, Graham and Palumbo, Michael and Zonta, Alexander' + bibtex: "@inproceedings{NIME20_106,\n abstract = {This article focuses on the rich\ + \ potential of hybrid domain translation of modular synthesis (MS) into virtual\ + \ reality (VR). It asks: to what extent can what is valued in studio-based MS\ + \ practice find a natural home or rich new interpretations in the immersive capacities\ + \ of VR? The article attends particularly to the relative affordances and constraints\ + \ of each as they inform the design and development of a new system (\"Mischmasch\"\ + ) supporting collaborative and performative patching of Max gen~ patches and operators\ + \ within a shared room-scale VR space.},\n address = {Birmingham, UK},\n author\ + \ = {Wakefield, Graham and Palumbo, Michael and Zonta, Alexander},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.4813182},\n editor = {Romain Michon and\ + \ Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages = {547--550},\n\ + \ publisher = {Birmingham City University},\n title = {Affordances and Constraints\ + \ of Modular Synthesis in Virtual Reality},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper106.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178067 + doi: 10.5281/zenodo.4813182 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'resonance, mapping, pipe ' - pages: 217--219 - title: Musical Control of a Pipe Based on Acoustic Resonance - url: http://www.nime.org/proceedings/2011/nime2011_217.pdf - year: 2011 + month: July + pages: 547--550 + publisher: Birmingham City University + title: Affordances and Constraints of Modular Synthesis in Virtual Reality + url: https://www.nime.org/proceedings/2020/nime2020_paper106.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Hansen2011 - abstract: 'In this paper a collaborative music game for two pen tablets is studied - in order to see how two people with no professional music background negotiated - musical improvisation. In an initial study of what it is that constitutes play - fluency in improvisation, a music game has been designed and evaluated through - video analysis: A qualitative view of mutual action describes the social context - of music improvisation: how two people with speech, laughter, gestures, postures - and pauses negotiate individual and joint action. The objective behind the design - of the game application was to support players in some aspects of their mutual - play. Results show that even though players activated additional sound feedback - as a result of their mutual play, players also engaged in forms of mutual play - that the game engine did not account for. These ways of mutual play are descibed - further along with some suggestions for how to direct future designs of collaborative - music improvisation games towards ways of mutual play. ' - address: 'Oslo, Norway' - author: 'Hansen, Anne-Marie S. and Anderson, Hans J. and Raudaskoski, Pirkko' - bibtex: "@inproceedings{Hansen2011,\n abstract = {In this paper a collaborative\ - \ music game for two pen tablets is studied in order to see how two people with\ - \ no professional music background negotiated musical improvisation. In an initial\ - \ study of what it is that constitutes play fluency in improvisation, a music\ - \ game has been designed and evaluated through video analysis: A qualitative view\ - \ of mutual action describes the social context of music improvisation: how two\ - \ people with speech, laughter, gestures, postures and pauses negotiate individual\ - \ and joint action. The objective behind the design of the game application was\ - \ to support players in some aspects of their mutual play. Results show that even\ - \ though players activated additional sound feedback as a result of their mutual\ - \ play, players also engaged in forms of mutual play that the game engine did\ - \ not account for. These ways of mutual play are descibed further along with some\ - \ suggestions for how to direct future designs of collaborative music improvisation\ - \ games towards ways of mutual play. },\n address = {Oslo, Norway},\n author =\ - \ {Hansen, Anne-Marie S. and Anderson, Hans J. and Raudaskoski, Pirkko},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1178039},\n issn = {2220-4806},\n keywords\ - \ = {Collaborative interfaces, improvisation, interactive music games, social\ - \ interaction, play, novice. },\n pages = {220--223},\n title = {Play Fluency\ - \ in Music Improvisation Games for Novices},\n url = {http://www.nime.org/proceedings/2011/nime2011_220.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_107 + abstract: 'Focusing on interactive performance works borne out of dancer-musician + collaborations, this paper investigates the relationship between the mediums of + sound and movement through a conceptual interpretation of the biological phenomenon + of symbiosis. Describing the close and persistent interactions between organisms + of different species, symbioses manifest across a spectrum of relationship types, + each identified according to the health effect experienced by the engaged organisms. + This biological taxonomy is appropriated within a framework which identifies specific + modes of interaction between sound and movement according to the collaborating + practitioners’ intended outcome, and required provisions, cognition of affect, + and system operation. Using the symbiotic framework as an analytical tool, six + dancer-musician collaborations from the field of NIME are examined in respect + to the employed modes of interaction within each of the four examined areas. The + findings reveal the emergence of multiple modes in each work, as well as examples + of mutation between different modes over the course of a performance. Furthermore, + the symbiotic concept provides a novel understanding of the ways gesture recognition + technologies (GRTs) have redefined the relationship dynamics between dancers and + musicians, and suggests a more efficient and inclusive approach in communicating + the potential and limitations presented by Human-Computer Interaction tools.' + address: 'Birmingham, UK' + author: 'moraitis, emmanouil' + bibtex: "@inproceedings{NIME20_107,\n abstract = {Focusing on interactive performance\ + \ works borne out of dancer-musician collaborations, this paper investigates the\ + \ relationship between the mediums of sound and movement through a conceptual\ + \ interpretation of the biological phenomenon of symbiosis. Describing the close\ + \ and persistent interactions between organisms of different species, symbioses\ + \ manifest across a spectrum of relationship types, each identified according\ + \ to the health effect experienced by the engaged organisms. This biological taxonomy\ + \ is appropriated within a framework which identifies specific modes of interaction\ + \ between sound and movement according to the collaborating practitioners’ intended\ + \ outcome, and required provisions, cognition of affect, and system operation.\ + \ Using the symbiotic framework as an analytical tool, six dancer-musician collaborations\ + \ from the field of NIME are examined in respect to the employed modes of interaction\ + \ within each of the four examined areas. The findings reveal the emergence of\ + \ multiple modes in each work, as well as examples of mutation between different\ + \ modes over the course of a performance. Furthermore, the symbiotic concept provides\ + \ a novel understanding of the ways gesture recognition technologies (GRTs) have\ + \ redefined the relationship dynamics between dancers and musicians, and suggests\ + \ a more efficient and inclusive approach in communicating the potential and limitations\ + \ presented by Human-Computer Interaction tools.},\n address = {Birmingham, UK},\n\ + \ author = {moraitis, emmanouil},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813184},\n\ + \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ + \ = {July},\n pages = {551--556},\n presentation-video = {https://youtu.be/5X6F_nL8SOg},\n\ + \ publisher = {Birmingham City University},\n title = {Symbiosis: a biological\ + \ taxonomy for modes of interaction in dance-music collaborations},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper107.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178039 + doi: 10.5281/zenodo.4813184 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Collaborative interfaces, improvisation, interactive music games, social - interaction, play, novice. ' - pages: 220--223 - title: Play Fluency in Music Improvisation Games for Novices - url: http://www.nime.org/proceedings/2011/nime2011_220.pdf - year: 2011 + month: July + pages: 551--556 + presentation-video: https://youtu.be/5X6F_nL8SOg + publisher: Birmingham City University + title: 'Symbiosis: a biological taxonomy for modes of interaction in dance-music + collaborations' + url: https://www.nime.org/proceedings/2020/nime2020_paper107.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Ramkissoon2011 - abstract: 'The Bass Sleeve uses an Arduino board with a combination of buttons, - switches, flex sensors, force sensing resistors, and an accelerometer to map the - ancillary movements of a performer to sampling, real-time audio and video processing - including pitch shifting, delay, low pass filtering, and onscreen video movement. - The device was created to augment the existing functions of the electric bass - and explore the use of ancillary gestures to control the laptop in a live performance. - In this research it was found that incorporating ancillary gestures into a live - performance could be useful when controlling the parameters of audio processing, - sound synthesis and video manipulation. These ancillary motions can be a practical - solution to gestural multitasking allowing independent control of computer music - parameters while performing with the electric bass. The process of performing - with the Bass Sleeve resulted in a greater amount of laptop control, an increase - in the amount of expressiveness using the electric bass in combination with the - laptop, and an improvement in the interactivity on both the electric bass and - laptop during a live performance. The design uses various gesture-to-sound mapping - strategies to accomplish a compositional task during an electro acoustic multimedia - musical performance piece. ' - address: 'Oslo, Norway' - author: 'Ramkissoon, Izzi' - bibtex: "@inproceedings{Ramkissoon2011,\n abstract = {The Bass Sleeve uses an Arduino\ - \ board with a combination of buttons, switches, flex sensors, force sensing resistors,\ - \ and an accelerometer to map the ancillary movements of a performer to sampling,\ - \ real-time audio and video processing including pitch shifting, delay, low pass\ - \ filtering, and onscreen video movement. The device was created to augment the\ - \ existing functions of the electric bass and explore the use of ancillary gestures\ - \ to control the laptop in a live performance. In this research it was found that\ - \ incorporating ancillary gestures into a live performance could be useful when\ - \ controlling the parameters of audio processing, sound synthesis and video manipulation.\ - \ These ancillary motions can be a practical solution to gestural multitasking\ - \ allowing independent control of computer music parameters while performing with\ - \ the electric bass. The process of performing with the Bass Sleeve resulted in\ - \ a greater amount of laptop control, an increase in the amount of expressiveness\ - \ using the electric bass in combination with the laptop, and an improvement in\ - \ the interactivity on both the electric bass and laptop during a live performance.\ - \ The design uses various gesture-to-sound mapping strategies to accomplish a\ - \ compositional task during an electro acoustic multimedia musical performance\ - \ piece. },\n address = {Oslo, Norway},\n author = {Ramkissoon, Izzi},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1178141},\n issn = {2220-4806},\n keywords\ - \ = {Interactive Music, Interactive Performance Systems, Gesture Controllers,\ - \ Augmented Instruments, Electric Bass, Video Tracking },\n pages = {224--227},\n\ - \ title = {The Bass Sleeve: A Real-time Multimedia Gestural Controller for Augmented\ - \ Electric Bass Performance},\n url = {http://www.nime.org/proceedings/2011/nime2011_224.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_108 + abstract: 'We present Olly, a musical textile tangible user interface (TUI) designed + around the observations of a group of five children with autism who like music. + The intention is to support scaffolding social interactions and sensory regulation + during a semi-structured and open-ended playful activity. Olly was tested in the + dance studio of a special education needs (SEN) school in North-East London, UK, + for a period of 5 weeks, every Thursday afternoon for 30 minutes. Olly uses one + Bare touch board in midi mode and four stretch analog sensors embedded inside + four elastic ribbons. These ribbons top the main body of the installation which + is made by using an inflatable gym ball wrapped in felt. Each of the ribbons plays + a different instrument and triggers different harmonic chords. Olly allows to + play pleasant melodies if interacting with it in solo mode and more complex harmonies + when playing together with others. Results show great potentials for carefully + designed musical TUI implementation aimed at scaffolding social play while affording + self-regulation in SEN contexts. We present a brief introduction on the background + and motivations, design considerations and results.' + address: 'Birmingham, UK' + author: 'Nonnis, Antonella and Bryan-Kinns, Nick' + bibtex: "@inproceedings{NIME20_108,\n abstract = {We present Olly, a musical textile\ + \ tangible user interface (TUI) designed around the observations of a group of\ + \ five children with autism who like music. The intention is to support scaffolding\ + \ social interactions and sensory regulation during a semi-structured and open-ended\ + \ playful activity. Olly was tested in the dance studio of a special education\ + \ needs (SEN) school in North-East London, UK, for a period of 5 weeks, every\ + \ Thursday afternoon for 30 minutes. Olly uses one Bare touch board in midi mode\ + \ and four stretch analog sensors embedded inside four elastic ribbons. These\ + \ ribbons top the main body of the installation which is made by using an inflatable\ + \ gym ball wrapped in felt. Each of the ribbons plays a different instrument and\ + \ triggers different harmonic chords. Olly allows to play pleasant melodies if\ + \ interacting with it in solo mode and more complex harmonies when playing together\ + \ with others. Results show great potentials for carefully designed musical TUI\ + \ implementation aimed at scaffolding social play while affording self-regulation\ + \ in SEN contexts. We present a brief introduction on the background and motivations,\ + \ design considerations and results.},\n address = {Birmingham, UK},\n author\ + \ = {Nonnis, Antonella and Bryan-Kinns, Nick},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.4813186},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {557--558},\n publisher = {Birmingham\ + \ City University},\n title = {Όλοι: music making to scaffold social playful activities\ + \ and self-regulation},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper108.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178141 + doi: 10.5281/zenodo.4813186 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Interactive Music, Interactive Performance Systems, Gesture Controllers, - Augmented Instruments, Electric Bass, Video Tracking ' - pages: 224--227 - title: 'The Bass Sleeve: A Real-time Multimedia Gestural Controller for Augmented - Electric Bass Performance' - url: http://www.nime.org/proceedings/2011/nime2011_224.pdf - year: 2011 + month: July + pages: 557--558 + publisher: Birmingham City University + title: 'Όλοι: music making to scaffold social playful activities and self-regulation' + url: https://www.nime.org/proceedings/2020/nime2020_paper108.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Kapur2011 - abstract: "This paper describes the KarmetiK NotomotoN, a new musical robotic system\ - \ for performance and education. A long time goal of the ,\n,\nauthors has been\ - \ to provide users with plug-andplay, highly expressive musical robot system with\ - \ a high degree of portability. This paper describes the technical details of\ - \ the NotomotoN, and discusses its use in performance and educational scenarios.\ - \ Detailed tests performed to optimize technical aspects of the NotomotoN are\ - \ described to highlight usability and performance specifications for electronic\ - \ musicians and educators. " - address: 'Oslo, Norway' - author: 'Kapur, Ajay and Darling, Michael and Murphy, Jim and Hochenbaum, Jordan - and Diakopoulos, Dimitri and Trimpin, Trimpin' - bibtex: "@inproceedings{Kapur2011,\n abstract = {This paper describes the KarmetiK\ - \ NotomotoN, a new musical robotic system for performance and education. A long\ - \ time goal of the ,\n,\nauthors has been to provide users with plug-andplay,\ - \ highly expressive musical robot system with a high degree of portability. This\ - \ paper describes the technical details of the NotomotoN, and discusses its use\ - \ in performance and educational scenarios. Detailed tests performed to optimize\ - \ technical aspects of the NotomotoN are described to highlight usability and\ - \ performance specifications for electronic musicians and educators. },\n address\ - \ = {Oslo, Norway},\n author = {Kapur, Ajay and Darling, Michael and Murphy, Jim\ - \ and Hochenbaum, Jordan and Diakopoulos, Dimitri and Trimpin, Trimpin},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1178059},\n issn = {2220-4806},\n keywords\ - \ = {music technology,musical robotics,robotic performance},\n pages = {228--231},\n\ - \ title = {The KarmetiK NotomotoN : A New Breed of Musical Robot for Teaching\ - \ and Performance},\n url = {http://www.nime.org/proceedings/2011/nime2011_228.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_109 + abstract: 'Identity is inextricably linked to culture and sustained through creation + and performance of music and dance, yet discussion of agency and cultural tools + informing design and performance application of gestural controllers is not widely + discussed. The purpose of this paper is to discuss the cultural body, its consideration + in existing gestural controller design, and how cultural design methods have the + potential to extend musical/social identities and/or traditions within a technological + context. In an effort to connect and reconnect with the author’s personal Nikkei + heritage, this paper will discuss the design of Nami – a custom built gestural + controller and its applicability to extend the author’s cultural body through + a community-centric case study performance.' + address: 'Birmingham, UK' + author: 'Sithi-Amnuai, Sara' + bibtex: "@inproceedings{NIME20_109,\n abstract = {Identity is inextricably linked\ + \ to culture and sustained through creation and performance of music and dance,\ + \ yet discussion of agency and cultural tools informing design and performance\ + \ application of gestural controllers is not widely discussed. The purpose of\ + \ this paper is to discuss the cultural body, its consideration in existing gestural\ + \ controller design, and how cultural design methods have the potential to extend\ + \ musical/social identities and/or traditions within a technological context.\ + \ In an effort to connect and reconnect with the author’s personal Nikkei heritage,\ + \ this paper will discuss the design of Nami – a custom built gestural controller\ + \ and its applicability to extend the author’s cultural body through a community-centric\ + \ case study performance.},\n address = {Birmingham, UK},\n author = {Sithi-Amnuai,\ + \ Sara},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.4813188},\n editor = {Romain\ + \ Michon and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages\ + \ = {559--563},\n presentation-video = {https://youtu.be/QCUGtE_z1LE},\n publisher\ + \ = {Birmingham City University},\n title = {Exploring Identity Through Design:\ + \ A Focus on the Cultural Body Via Nami},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper109.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178059 + doi: 10.5281/zenodo.4813188 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'music technology,musical robotics,robotic performance' - pages: 228--231 - title: 'The KarmetiK NotomotoN : A New Breed of Musical Robot for Teaching and Performance' - url: http://www.nime.org/proceedings/2011/nime2011_228.pdf - year: 2011 + month: July + pages: 559--563 + presentation-video: https://youtu.be/QCUGtE_z1LE + publisher: Birmingham City University + title: 'Exploring Identity Through Design: A Focus on the Cultural Body Via Nami' + url: https://www.nime.org/proceedings/2020/nime2020_paper109.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Barenca2011 - abstract: 'The Manipuller is a novel Gestural Controller based on strings manipulation - and multi-dimensional force sensing technology. This paper describes its motivation, - design and operational principles along with some of its musical applications. - Finally the results of a preliminary usability test are presented and discussed. ' - address: 'Oslo, Norway' - author: 'Barenca, Adrián and Torre, Giuseppe' - bibtex: "@inproceedings{Barenca2011,\n abstract = {The Manipuller is a novel Gestural\ - \ Controller based on strings manipulation and multi-dimensional force sensing\ - \ technology. This paper describes its motivation, design and operational principles\ - \ along with some of its musical applications. Finally the results of a preliminary\ - \ usability test are presented and discussed. },\n address = {Oslo, Norway},\n\ - \ author = {Barenca, Adri\\'{a}n and Torre, Giuseppe},\n booktitle = {Proceedings\ + ID: NIME20_11 + abstract: 'With the development of web audio standards, it has quickly become technically + easy to develop and deploy software for inviting audiences to participate in musical + performances using their mobile phones. Thus, a new audience-centric musical genre + has emerged, which aligns with artistic manifestations where there is an explicit + inclusion of the public (e.g. participatory art, cinema or theatre). Previous + research has focused on analysing this new genre from historical, social organisation + and technical perspectives. This follow-up paper contributes with reflections + on technical and aesthetic aspects of composing within this audience-centric approach. + We propose a set of 13 composition dimensions that deal with the role of the performer, + the role of the audience, the location of sound and the type of feedback, among + others. From a reflective approach, four participatory pieces developed by the + authors are analysed using the proposed dimensions. Finally, we discuss a set + of recommendations and challenges for the composers-developers of this new and + promising musical genre. This paper concludes discussing the implications of this + research for the NIME community.' + address: 'Birmingham, UK' + author: 'Xambó, Anna and Roma, Gerard' + bibtex: "@inproceedings{NIME20_11,\n abstract = {With the development of web audio\ + \ standards, it has quickly become technically easy to develop and deploy software\ + \ for inviting audiences to participate in musical performances using their mobile\ + \ phones. Thus, a new audience-centric musical genre has emerged, which aligns\ + \ with artistic manifestations where there is an explicit inclusion of the public\ + \ (e.g. participatory art, cinema or theatre). Previous research has focused on\ + \ analysing this new genre from historical, social organisation and technical\ + \ perspectives. This follow-up paper contributes with reflections on technical\ + \ and aesthetic aspects of composing within this audience-centric approach. We\ + \ propose a set of 13 composition dimensions that deal with the role of the performer,\ + \ the role of the audience, the location of sound and the type of feedback, among\ + \ others. From a reflective approach, four participatory pieces developed by the\ + \ authors are analysed using the proposed dimensions. Finally, we discuss a set\ + \ of recommendations and challenges for the composers-developers of this new and\ + \ promising musical genre. This paper concludes discussing the implications of\ + \ this research for the NIME community.},\n address = {Birmingham, UK},\n author\ + \ = {Xambó, Anna and Roma, Gerard},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813192},\n\ + \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ + \ = {July},\n pages = {55--60},\n publisher = {Birmingham City University},\n\ + \ title = {Performing Audiences: Composition Strategies for Network Music using\ + \ Mobile Phones},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper11.pdf},\n\ + \ year = {2020}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.4813192 + editor: Romain Michon and Franziska Schroeder + issn: 2220-4806 + month: July + pages: 55--60 + publisher: Birmingham City University + title: 'Performing Audiences: Composition Strategies for Network Music using Mobile + Phones' + url: https://www.nime.org/proceedings/2020/nime2020_paper11.pdf + year: 2020 + + +- ENTRYTYPE: inproceedings + ID: NIME20_110 + abstract: 'This paper reflects on players'' first responses to a constrained Accessible + Digital Musical Instrument (ADMI) in open, child-led sessions with seven children + at a special school. Each player''s gestures with the instrument were sketched, + categorised and compared with those of others among the group. Additionally, sensor + data from the instruments was recorded and analysed to give a secondary indication + of playing style, based on note and silence durations. In accord with previous + studies, the high degree of constraints led to a diverse range of playing styles, + allowing each player to appropriate and explore the instruments within a short + inaugural session. The open, undirected sessions also provided insights which + could potentially direct future work based on each person''s responses to the + instruments. The paper closes with a short discussion of these diverse styles, + and the potential role constrained ADMIs could serve as ''ice-breakers'' in musical + projects that seek to co-produce or co-design with neurodiverse children and young + people.' + address: 'Birmingham, UK' + author: 'Wright, Joe' + bibtex: "@inproceedings{NIME20_110,\n abstract = {This paper reflects on players'\ + \ first responses to a constrained Accessible Digital Musical Instrument (ADMI)\ + \ in open, child-led sessions with seven children at a special school. Each player's\ + \ gestures with the instrument were sketched, categorised and compared with those\ + \ of others among the group. Additionally, sensor data from the instruments was\ + \ recorded and analysed to give a secondary indication of playing style, based\ + \ on note and silence durations. In accord with previous studies, the high degree\ + \ of constraints led to a diverse range of playing styles, allowing each player\ + \ to appropriate and explore the instruments within a short inaugural session.\ + \ The open, undirected sessions also provided insights which could potentially\ + \ direct future work based on each person's responses to the instruments. The\ + \ paper closes with a short discussion of these diverse styles, and the potential\ + \ role constrained ADMIs could serve as 'ice-breakers' in musical projects that\ + \ seek to co-produce or co-design with neurodiverse children and young people.},\n\ + \ address = {Birmingham, UK},\n author = {Wright, Joe},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177949},\n issn = {2220-4806},\n keywords = {1,and force\ - \ sensors within,force sensing,gestural,gestural controller,manipulation,strings,strings\ - \ and force sensing,the integration of strings},\n pages = {232--235},\n title\ - \ = {The Manipuller : Strings Manipulation and Multi-Dimensional Force Sensing},\n\ - \ url = {http://www.nime.org/proceedings/2011/nime2011_232.pdf},\n year = {2011}\n\ - }\n" + \ doi = {10.5281/zenodo.4813194},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {564--569},\n presentation-video\ + \ = {https://youtu.be/RhaIzCXQ3uo},\n publisher = {Birmingham City University},\n\ + \ title = {The Appropriation and Utility of Constrained ADMIs},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper110.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177949 + doi: 10.5281/zenodo.4813194 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: '1,and force sensors within,force sensing,gestural,gestural controller,manipulation,strings,strings - and force sensing,the integration of strings' - pages: 232--235 - title: 'The Manipuller : Strings Manipulation and Multi-Dimensional Force Sensing' - url: http://www.nime.org/proceedings/2011/nime2011_232.pdf - year: 2011 + month: July + pages: 564--569 + presentation-video: https://youtu.be/RhaIzCXQ3uo + publisher: Birmingham City University + title: The Appropriation and Utility of Constrained ADMIs + url: https://www.nime.org/proceedings/2020/nime2020_paper110.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Crevoisier2011 - abstract: 'The Surface Editor is a software tool for creating control interfaces - and mapping input actions to OSC or MIDI actions very easily and intuitively. - Originally conceived to be used with a tactile interface, the Surface Editor has - been extended to support the creation of graspable interfaces as well. This paper - presents a new framework for the generic mapping of user actions with graspable - objects on a surface. We also present a system for detecting touch on thin objects, - allowing for extended interactive possibilities. The Surface Editor is not limited - to a particular tracking system though, and the generic mapping approach for objects - can have a broader use with various input interfaces supporting touch and/or objects. ' - address: 'Oslo, Norway' - author: 'Crevoisier, Alain and Picard-Limpens, Cécile' - bibtex: "@inproceedings{Crevoisier2011,\n abstract = {The Surface Editor is a software\ - \ tool for creating control interfaces and mapping input actions to OSC or MIDI\ - \ actions very easily and intuitively. Originally conceived to be used with a\ - \ tactile interface, the Surface Editor has been extended to support the creation\ - \ of graspable interfaces as well. This paper presents a new framework for the\ - \ generic mapping of user actions with graspable objects on a surface. We also\ - \ present a system for detecting touch on thin objects, allowing for extended\ - \ interactive possibilities. The Surface Editor is not limited to a particular\ - \ tracking system though, and the generic mapping approach for objects can have\ - \ a broader use with various input interfaces supporting touch and/or objects.\ - \ },\n address = {Oslo, Norway},\n author = {Crevoisier, Alain and Picard-Limpens,\ - \ C\\'{e}cile},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177989},\n\ - \ issn = {2220-4806},\n keywords = {NIME, mapping, interaction, user-defined interfaces,\ - \ tangibles, graspable interfaces. },\n pages = {236--239},\n title = {Mapping\ - \ Objects with the Surface Editor},\n url = {http://www.nime.org/proceedings/2011/nime2011_236.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_111 + abstract: 'When performing with new instruments, musicians often develop new performative + gestures and playing techniques. Music performance studies on new instruments + often consider interfaces that feature a spectrum of gestures similar to already + existing sound production techniques. This paper considers the choices performers + make when creating an idiomatic gestural language for an entirely unfamiliar instrument. + We designed a musical interface with a unique large-scale layout to encourage + new performers to create fully original instrument-body interactions. We conducted + a study where trained musicians were invited to perform one of two versions of + the same instrument, each physically identical but with a different tone mapping. + The study results reveal insights into how musicians develop novel performance + gestures when encountering a new instrument characterised by an unfamiliar shape + and size. Our discussion highlights the impact of an instrument’s scale and layout + on the emergence of new gestural vocabularies and on the qualities of the music + performed.' + address: 'Birmingham, UK' + author: 'Mice, Lia and McPherson, Andrew' + bibtex: "@inproceedings{NIME20_111,\n abstract = {When performing with new instruments,\ + \ musicians often develop new performative gestures and playing techniques. Music\ + \ performance studies on new instruments often consider interfaces that feature\ + \ a spectrum of gestures similar to already existing sound production techniques.\ + \ This paper considers the choices performers make when creating an idiomatic\ + \ gestural language for an entirely unfamiliar instrument. We designed a musical\ + \ interface with a unique large-scale layout to encourage new performers to create\ + \ fully original instrument-body interactions. We conducted a study where trained\ + \ musicians were invited to perform one of two versions of the same instrument,\ + \ each physically identical but with a different tone mapping. The study results\ + \ reveal insights into how musicians develop novel performance gestures when encountering\ + \ a new instrument characterised by an unfamiliar shape and size. Our discussion\ + \ highlights the impact of an instrument’s scale and layout on the emergence of\ + \ new gestural vocabularies and on the qualities of the music performed.},\n address\ + \ = {Birmingham, UK},\n author = {Mice, Lia and McPherson, Andrew},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.4813200},\n editor = {Romain Michon and\ + \ Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages = {570--575},\n\ + \ presentation-video = {https://youtu.be/mnJN8ELneUU},\n publisher = {Birmingham\ + \ City University},\n title = {From miming to NIMEing: the development of idiomatic\ + \ gestural language on large scale DMIs},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper111.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177989 + doi: 10.5281/zenodo.4813200 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'NIME, mapping, interaction, user-defined interfaces, tangibles, graspable - interfaces. ' - pages: 236--239 - title: Mapping Objects with the Surface Editor - url: http://www.nime.org/proceedings/2011/nime2011_236.pdf - year: 2011 + month: July + pages: 570--575 + presentation-video: https://youtu.be/mnJN8ELneUU + publisher: Birmingham City University + title: 'From miming to NIMEing: the development of idiomatic gestural language on + large scale DMIs' + url: https://www.nime.org/proceedings/2020/nime2020_paper111.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Hochenbaum2011 - abstract: 'This paper presents the SmartFiducial, a wireless tangible object that - facilitates additional modes of expressivity for vision-based tabletop surfaces. - Using infrared proximity sensing and resistive based force-sensors, the SmartFiducial - affords users unique, and highly gestural inputs. Furthermore, the SmartFiducial - incorporates additional customizable pushbutton switches. Using XBee radio frequency - (RF) wireless transmission, the SmartFiducial establishes bipolar communication - with a host computer. This paper describes the design and implementation of the - SmartFiducial, as well as an exploratory use in a musical context. ' - address: 'Oslo, Norway' - author: 'Hochenbaum, Jordan and Kapur, Ajay' - bibtex: "@inproceedings{Hochenbaum2011,\n abstract = {This paper presents the SmartFiducial,\ - \ a wireless tangible object that facilitates additional modes of expressivity\ - \ for vision-based tabletop surfaces. Using infrared proximity sensing and resistive\ - \ based force-sensors, the SmartFiducial affords users unique, and highly gestural\ - \ inputs. Furthermore, the SmartFiducial incorporates additional customizable\ - \ pushbutton switches. Using XBee radio frequency (RF) wireless transmission,\ - \ the SmartFiducial establishes bipolar communication with a host computer. This\ - \ paper describes the design and implementation of the SmartFiducial, as well\ - \ as an exploratory use in a musical context. },\n address = {Oslo, Norway},\n\ - \ author = {Hochenbaum, Jordan and Kapur, Ajay},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1178045},\n issn = {2220-4806},\n keywords = {Fiducial, Tangible\ - \ Interface, Multi-touch, Sensors, Gesture, Haptics, Bricktable, Proximity Sensing\ - \ },\n pages = {240--243},\n title = {Adding Z-Depth and Pressure Expressivity\ - \ to Tangible Tabletop Surfaces},\n url = {http://www.nime.org/proceedings/2011/nime2011_240.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_112 + abstract: 'The Cyclops is an eye-gaze controlled instrument designed for live performance + and improvisation. It is primarily mo- tivated by a need for expressive musical + instruments that are more easily accessible to people who rely on eye track- ers + for computer access, such as those with amyotrophic lateral sclerosis (ALS). At + its current implementation, the Cyclops contains a synthesizer and sequencer, + and provides the ability to easily create and automate musical parameters and + effects through recording eye-gaze gestures on a two- dimensional canvas. In this + paper, we frame our prototype in the context of previous eye-controlled instruments, + and we discuss we designed the Cyclops to make gaze-controlled music making as + fun, accessible, and seamless as possible despite notable interaction challenges + like latency, inaccu- racy, and “Midas Touch.”' + address: 'Birmingham, UK' + author: 'Payne, William C and Paradiso, Ann and Kane, Shaun' + bibtex: "@inproceedings{NIME20_112,\n abstract = {The Cyclops is an eye-gaze controlled\ + \ instrument designed for live performance and improvisation. It is primarily\ + \ mo- tivated by a need for expressive musical instruments that are more easily\ + \ accessible to people who rely on eye track- ers for computer access, such as\ + \ those with amyotrophic lateral sclerosis (ALS). At its current implementation,\ + \ the Cyclops contains a synthesizer and sequencer, and provides the ability to\ + \ easily create and automate musical parameters and effects through recording\ + \ eye-gaze gestures on a two- dimensional canvas. In this paper, we frame our\ + \ prototype in the context of previous eye-controlled instruments, and we discuss\ + \ we designed the Cyclops to make gaze-controlled music making as fun, accessible,\ + \ and seamless as possible despite notable interaction challenges like latency,\ + \ inaccu- racy, and “Midas Touch.”},\n address = {Birmingham, UK},\n author =\ + \ {Payne, William C and Paradiso, Ann and Kane, Shaun},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.4813204},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {576--580},\n presentation-video\ + \ = {https://youtu.be/G6dxngoCx60},\n publisher = {Birmingham City University},\n\ + \ title = {Cyclops: Designing an eye-controlled instrument for accessibility and\ + \ flexible use},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper112.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178045 + doi: 10.5281/zenodo.4813204 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Fiducial, Tangible Interface, Multi-touch, Sensors, Gesture, Haptics, - Bricktable, Proximity Sensing ' - pages: 240--243 - title: Adding Z-Depth and Pressure Expressivity to Tangible Tabletop Surfaces - url: http://www.nime.org/proceedings/2011/nime2011_240.pdf - year: 2011 + month: July + pages: 576--580 + presentation-video: https://youtu.be/G6dxngoCx60 + publisher: Birmingham City University + title: 'Cyclops: Designing an eye-controlled instrument for accessibility and flexible + use' + url: https://www.nime.org/proceedings/2020/nime2020_paper112.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Milne2011 - address: 'Oslo, Norway' - author: 'Milne, Andrew J. and Xamb\''{o}, Anna and Laney, Robin and Sharp, David - B. and Prechtl, Anthony and Holland, Simon' - bibtex: "@inproceedings{Milne2011,\n address = {Oslo, Norway},\n author = {Milne,\ - \ Andrew J. and Xamb\\'{o}, Anna and Laney, Robin and Sharp, David B. and Prechtl,\ - \ Anthony and Holland, Simon},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178109},\n\ - \ issn = {2220-4806},\n keywords = {generalized keyboard, isomorphic layout, multi-touch\ - \ surface, tablet, musical interface design, iPad, microtonality },\n pages =\ - \ {244--247},\n title = {Hex Player --- A Virtual Musical Controller},\n url =\ - \ {http://www.nime.org/proceedings/2011/nime2011_244.pdf},\n year = {2011}\n}\n" + ID: NIME20_113 + abstract: 'This paper presents the results of an observational study focusing on + the collaborative learning processes of a group of performers with an interactive + musical system. The main goal of this study was to implement methods for learning + and developing practice with these technological objects in order to generate + future pedagogical methods. During the research period of six months, four participants + regularly engaged in workshop-type scenarios where learning objectives were proposed + and guided by themselves.The principal researcher, working as participant-observer, + did not impose or prescribed learning objectives to the other members of the group. + Rather, all participants had equal say in what was to be done and how it was to + be accomplished. Results show that the group learning environment is rich in opportunities + for learning, mutual teaching, and for establishing a comunal practice for a given + interactive musical system.Key findings suggest that learning by demonstration, + observation and modelling are significant for learning in this context. Additionally, + it was observed that a dialogue and a continuous flow of information between the + members of the community is needed in order to motivate and further their learning.' + address: 'Birmingham, UK' + author: 'Marquez-Borbon, Adnan' + bibtex: "@inproceedings{NIME20_113,\n abstract = {This paper presents the results\ + \ of an observational study focusing on the collaborative learning processes of\ + \ a group of performers with an interactive musical system. The main goal of this\ + \ study was to implement methods for learning and developing practice with these\ + \ technological objects in order to generate future pedagogical methods. During\ + \ the research period of six months, four participants regularly engaged in workshop-type\ + \ scenarios where learning objectives were proposed and guided by themselves.The\ + \ principal researcher, working as participant-observer, did not impose or prescribed\ + \ learning objectives to the other members of the group. Rather, all participants\ + \ had equal say in what was to be done and how it was to be accomplished. Results\ + \ show that the group learning environment is rich in opportunities for learning,\ + \ mutual teaching, and for establishing a comunal practice for a given interactive\ + \ musical system.Key findings suggest that learning by demonstration, observation\ + \ and modelling are significant for learning in this context. Additionally, it\ + \ was observed that a dialogue and a continuous flow of information between the\ + \ members of the community is needed in order to motivate and further their learning.},\n\ + \ address = {Birmingham, UK},\n author = {Marquez-Borbon, Adnan},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.4813206},\n editor = {Romain Michon and\ + \ Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages = {581--586},\n\ + \ presentation-video = {https://youtu.be/1G0bOVlWwyI},\n publisher = {Birmingham\ + \ City University},\n title = {Collaborative Learning with Interactive Music Systems},\n\ + \ url = {https://www.nime.org/proceedings/2020/nime2020_paper113.pdf},\n year\ + \ = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178109 + doi: 10.5281/zenodo.4813206 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'generalized keyboard, isomorphic layout, multi-touch surface, tablet, - musical interface design, iPad, microtonality ' - pages: 244--247 - title: Hex Player --- A Virtual Musical Controller - url: http://www.nime.org/proceedings/2011/nime2011_244.pdf - year: 2011 + month: July + pages: 581--586 + presentation-video: https://youtu.be/1G0bOVlWwyI + publisher: Birmingham City University + title: Collaborative Learning with Interactive Music Systems + url: https://www.nime.org/proceedings/2020/nime2020_paper113.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Waadeland2011 - address: 'Oslo, Norway' - author: 'Waadeland, Carl H.' - bibtex: "@inproceedings{Waadeland2011,\n address = {Oslo, Norway},\n author = {Waadeland,\ - \ Carl H.},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178185},\n issn\ - \ = {2220-4806},\n keywords = {gesture,movement,rhythm performance,spectral analysis},\n\ - \ pages = {248--251},\n title = {Rhythm Performance from a Spectral Point of View},\n\ - \ url = {http://www.nime.org/proceedings/2011/nime2011_248.pdf},\n year = {2011}\n\ - }\n" + ID: NIME20_114 + abstract: 'This paper presents WELLE, a web-based music environment for blind people, + and describes its development, design, notation syntax and first experiences. + WELLE is intended to serve as a collaborative, performative and educational tool + to quickly create and record musical ideas. It is pattern-oriented, based on textual + notation and focuses on accessibility, playful interaction and ease of use. WELLE + was developed as part of the research project Tangible Signals and will also serve + as a platform for the integration of upcoming new interfaces.' + address: 'Birmingham, UK' + author: 'Vetter, Jens' + bibtex: "@inproceedings{NIME20_114,\n abstract = {This paper presents WELLE, a web-based\ + \ music environment for blind people, and describes its development, design, notation\ + \ syntax and first experiences. WELLE is intended to serve as a collaborative,\ + \ performative and educational tool to quickly create and record musical ideas.\ + \ It is pattern-oriented, based on textual notation and focuses on accessibility,\ + \ playful interaction and ease of use. WELLE was developed as part of the research\ + \ project Tangible Signals and will also serve as a platform for the integration\ + \ of upcoming new interfaces.},\n address = {Birmingham, UK},\n author = {Vetter,\ + \ Jens},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.4813208},\n editor = {Romain\ + \ Michon and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages\ + \ = {587--590},\n publisher = {Birmingham City University},\n title = {WELLE -\ + \ a web-based music environment for the blind},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper114.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178185 + doi: 10.5281/zenodo.4813208 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'gesture,movement,rhythm performance,spectral analysis' - pages: 248--251 - title: Rhythm Performance from a Spectral Point of View - url: http://www.nime.org/proceedings/2011/nime2011_248.pdf - year: 2011 + month: July + pages: 587--590 + publisher: Birmingham City University + title: WELLE - a web-based music environment for the blind + url: https://www.nime.org/proceedings/2020/nime2020_paper114.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Comajuncosas2011 - abstract: 'This research presents a 3D gestural interface for collaborative concatenative - sound synthesis and audio mosaicing.Our goal is to improve the communication between - the audience and performers by means of an enhanced correlationbetween gestures - and musical outcome. Nuvolet consists ofa 3D motion controller coupled to a concatenative - synthesis engine. The interface detects and tracks the performers hands in four - dimensions (x,y,z,t) and allows them toconcurrently explore two or three-dimensional - sound cloudrepresentations of the units from the sound corpus, as wellas to perform - collaborative target-based audio mosaicing.Nuvolet is included in the Esmuc Laptop - Orchestra catalogfor forthcoming performances.' - address: 'Oslo, Norway' - author: 'Comajuncosas, Josep M. and Barrachina, Alex and O''Connell, John and Guaus, - Enric' - bibtex: "@inproceedings{Comajuncosas2011,\n abstract = {This research presents a\ - \ 3D gestural interface for collaborative concatenative sound synthesis and audio\ - \ mosaicing.Our goal is to improve the communication between the audience and\ - \ performers by means of an enhanced correlationbetween gestures and musical outcome.\ - \ Nuvolet consists ofa 3D motion controller coupled to a concatenative synthesis\ - \ engine. The interface detects and tracks the performers hands in four dimensions\ - \ (x,y,z,t) and allows them toconcurrently explore two or three-dimensional sound\ - \ cloudrepresentations of the units from the sound corpus, as wellas to perform\ - \ collaborative target-based audio mosaicing.Nuvolet is included in the Esmuc\ - \ Laptop Orchestra catalogfor forthcoming performances.},\n address = {Oslo, Norway},\n\ - \ author = {Comajuncosas, Josep M. and Barrachina, Alex and O'Connell, John and\ - \ Guaus, Enric},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177987},\n\ - \ issn = {2220-4806},\n keywords = {concatenative synthesis, audio mosaicing,\ - \ open-air interface, gestural controller, musical instrument, 3D },\n pages =\ - \ {252--255},\n title = {Nuvolet: {3D} Gesture-driven Collaborative Audio Mosaicing},\n\ - \ url = {http://www.nime.org/proceedings/2011/nime2011_252.pdf},\n year = {2011}\n\ - }\n" + ID: NIME20_115 + abstract: 'This paper presents an overview of the design principles behind Digital + Music Instruments (DMIs) for education across all editions of the International + Conference on New Interfaces for Music Expression (NIME). We compiled a comprehensive + catalogue of over hundred DMIs with varying degrees of applicability in the educational + practice. Each catalogue entry is annotated according to a proposed taxonomy for + DMIs for education, rooted in the mechanics of control, mapping and feedback of + an interactive music system, along with the required expertise of target user + groups and the instrument learning curve. Global statistics unpack underlying + trends and design goals across the chronological period of the NIME conference. + In recent years, we note a growing number of DMIs targeting non-experts and with + reduced requirements in terms of expertise. Stemming from the identified trends, + we discuss future challenges in the design of DMIs for education towards enhanced + degrees of variation and unpredictability.' + address: 'Birmingham, UK' + author: 'Pessoa, Margarida and Parauta, Cláudio and Luís, Pedro and Corintha, Isabela + and Bernardes, Gilberto' + bibtex: "@inproceedings{NIME20_115,\n abstract = {This paper presents an overview\ + \ of the design principles behind Digital Music Instruments (DMIs) for education\ + \ across all editions of the International Conference on New Interfaces for Music\ + \ Expression (NIME). We compiled a comprehensive catalogue of over hundred DMIs\ + \ with varying degrees of applicability in the educational practice. Each catalogue\ + \ entry is annotated according to a proposed taxonomy for DMIs for education,\ + \ rooted in the mechanics of control, mapping and feedback of an interactive music\ + \ system, along with the required expertise of target user groups and the instrument\ + \ learning curve. Global statistics unpack underlying trends and design goals\ + \ across the chronological period of the NIME conference. In recent years, we\ + \ note a growing number of DMIs targeting non-experts and with reduced requirements\ + \ in terms of expertise. Stemming from the identified trends, we discuss future\ + \ challenges in the design of DMIs for education towards enhanced degrees of variation\ + \ and unpredictability.},\n address = {Birmingham, UK},\n author = {Pessoa, Margarida\ + \ and Parauta, Cláudio and Luís, Pedro and Corintha, Isabela and Bernardes, Gilberto},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.4813210},\n editor = {Romain Michon\ + \ and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages =\ + \ {591--595},\n publisher = {Birmingham City University},\n title = {Examining\ + \ Temporal Trends and Design Goals of Digital Music Instruments for Education\ + \ in NIME: A Proposed Taxonomy},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper115.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177987 + doi: 10.5281/zenodo.4813210 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'concatenative synthesis, audio mosaicing, open-air interface, gestural - controller, musical instrument, 3D ' - pages: 252--255 - title: 'Nuvolet: 3D Gesture-driven Collaborative Audio Mosaicing' - url: http://www.nime.org/proceedings/2011/nime2011_252.pdf - year: 2011 + month: July + pages: 591--595 + publisher: Birmingham City University + title: 'Examining Temporal Trends and Design Goals of Digital Music Instruments + for Education in NIME: A Proposed Taxonomy' + url: https://www.nime.org/proceedings/2020/nime2020_paper115.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Schoonderwaldt2011 - abstract: 'We report on a performance study of a French-Canadian fiddler. The fiddling - tradition forms an interesting contrast toclassical violin performance in several - ways. Distinguishingfeatures include special elements in the bowing techniqueand - the presence of an accompanying foot clogging pattern.These two characteristics - are described, visualized and analyzed using video and motion capture recordings - as sourcematerial.' - address: 'Oslo, Norway' - author: 'Schoonderwaldt, Erwin and Jensenius, Alexander Refsum' - bibtex: "@inproceedings{Schoonderwaldt2011,\n abstract = {We report on a performance\ - \ study of a French-Canadian fiddler. The fiddling tradition forms an interesting\ - \ contrast toclassical violin performance in several ways. Distinguishingfeatures\ - \ include special elements in the bowing techniqueand the presence of an accompanying\ - \ foot clogging pattern.These two characteristics are described, visualized and\ - \ analyzed using video and motion capture recordings as sourcematerial.},\n address\ - \ = {Oslo, Norway},\n author = {Schoonderwaldt, Erwin and Jensenius, Alexander\ - \ Refsum},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1178155},\n issn = {2220-4806},\n\ - \ keywords = {fiddler, violin, French-Canadian, bowing, feet, clogging, motion\ - \ capture, video, motiongram, kinematics, sonification },\n pages = {256--259},\n\ - \ title = {Effective and Expressive Movements in a French-Canadian fiddler's Performance},\n\ - \ url = {http://www.nime.org/proceedings/2011/nime2011_256.pdf},\n year = {2011}\n\ - }\n" + ID: NIME20_116 + abstract: 'The tabla is a traditional pitched two-piece Indian drum set, popular + not only within South East Asian music, but whose sounds also regularly feature + in western music. Yet tabla remains an aural tradition, taught largely through + a guru system heavy in custom and mystique. Tablas can also pose problems for + school and professional performance environments as they are physically bulky, + fragile, and reactive to environmental factors such as damp and heat. As part + of a broader project to demystify tabla, we present an electronic tabla that plays + nearly identically to an acoustic tabla and was created in order to make the tabla + acces- sible and practical for a wider audience of students, pro- fessional musicians + and composers. Along with develop- ment of standardised tabla notation and instructional + educational aides, the electronic tabla is designed to be compact, robust, easily + tuned, and the electronic nature allows for scoring tabla through playing. Further, + used as an interface, it allows the use of learned tabla technique to control + other percussive sounds. We also discuss the technological approaches used to + accurately capture the localized multi-touch rapid-fire strikes and damping that + combine to make tabla such a captivating and virtuosic instrument.' + address: 'Birmingham, UK' + author: 'Pardue, Laurel S and Bhamra, Kuljit and England, Graham and Eddershaw, + Phil and Menzies, Duncan ' + bibtex: "@inproceedings{NIME20_116,\n abstract = {The tabla is a traditional pitched\ + \ two-piece Indian drum set, popular not only within South East Asian music, but\ + \ whose sounds also regularly feature in western music. Yet tabla remains an aural\ + \ tradition, taught largely through a guru system heavy in custom and mystique.\ + \ Tablas can also pose problems for school and professional performance environments\ + \ as they are physically bulky, fragile, and reactive to environmental factors\ + \ such as damp and heat. As part of a broader project to demystify tabla, we present\ + \ an electronic tabla that plays nearly identically to an acoustic tabla and was\ + \ created in order to make the tabla acces- sible and practical for a wider audience\ + \ of students, pro- fessional musicians and composers. Along with develop- ment\ + \ of standardised tabla notation and instructional educational aides, the electronic\ + \ tabla is designed to be compact, robust, easily tuned, and the electronic nature\ + \ allows for scoring tabla through playing. Further, used as an interface, it\ + \ allows the use of learned tabla technique to control other percussive sounds.\ + \ We also discuss the technological approaches used to accurately capture the\ + \ localized multi-touch rapid-fire strikes and damping that combine to make tabla\ + \ such a captivating and virtuosic instrument.},\n address = {Birmingham, UK},\n\ + \ author = {Pardue, Laurel S and Bhamra, Kuljit and England, Graham and Eddershaw,\ + \ Phil and Menzies, Duncan },\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813212},\n\ + \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ + \ = {July},\n pages = {596--599},\n presentation-video = {https://youtu.be/PPaHq8fQjB0},\n\ + \ publisher = {Birmingham City University},\n title = {Demystifying tabla through\ + \ the development of an electronic drum},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper116.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178155 + doi: 10.5281/zenodo.4813212 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'fiddler, violin, French-Canadian, bowing, feet, clogging, motion capture, - video, motiongram, kinematics, sonification ' - pages: 256--259 - title: Effective and Expressive Movements in a French-Canadian fiddler's Performance - url: http://www.nime.org/proceedings/2011/nime2011_256.pdf - year: 2011 + month: July + pages: 596--599 + presentation-video: https://youtu.be/PPaHq8fQjB0 + publisher: Birmingham City University + title: Demystifying tabla through the development of an electronic drum + url: https://www.nime.org/proceedings/2020/nime2020_paper116.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Bisig2011 - abstract: 'In this paper an audio-visual installation is discussed, which combines - interactive, immersive and generative elements. After introducing some of the - challenges in the field of Generative Art and placing the work within its research - context, conceptual reflections are made about the spatial, behavioural, perceptual - and social issues that are raised within the entire installation. A discussion - about the artistic content follows, focussing on the scenography and on working - with flocking algorithms in general, before addressing three specific pieces realised - for the exhibition. Next the technical implementation for both hardand software - are detailed before the idea of a hybrid ecosystem gets discussed and further - developments outlined.' - address: 'Oslo, Norway' - author: 'Bisig, Daniel and Schacher, Jan C. and Neukom, Martin' - bibtex: "@inproceedings{Bisig2011,\n abstract = {In this paper an audio-visual installation\ - \ is discussed, which combines interactive, immersive and generative elements.\ - \ After introducing some of the challenges in the field of Generative Art and\ - \ placing the work within its research context, conceptual reflections are made\ - \ about the spatial, behavioural, perceptual and social issues that are raised\ - \ within the entire installation. A discussion about the artistic content follows,\ - \ focussing on the scenography and on working with flocking algorithms in general,\ - \ before addressing three specific pieces realised for the exhibition. Next the\ - \ technical implementation for both hardand software are detailed before the idea\ - \ of a hybrid ecosystem gets discussed and further developments outlined.},\n\ - \ address = {Oslo, Norway},\n author = {Bisig, Daniel and Schacher, Jan C. and\ - \ Neukom, Martin},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177965},\n\ - \ issn = {2220-4806},\n keywords = {Generative Art, Interactive Environment, Immersive\ - \ Installation, Swarm Simulation, Hybrid Ecosystem },\n pages = {260--263},\n\ - \ title = {Flowspace -- A Hybrid Ecosystem},\n url = {http://www.nime.org/proceedings/2011/nime2011_260.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_117 + abstract: 'SpeakerDrum is an instrument composed of multiple Dual Voice Coil speakers + (DVC) where two coils are used to drive the same membrane. However, in this case, + one of them is used as a microphone which is then used by the performer as an + input interface of percussive gestures. Of course, this leads to poten- tial feedback, + but with enough control, a compelling exploration of resonance haptic feedback + and sound embodiment is possible.' + address: 'Birmingham, UK' + author: 'Sierra, Juan D' + bibtex: "@inproceedings{NIME20_117,\n abstract = {SpeakerDrum is an instrument composed\ + \ of multiple Dual Voice Coil speakers (DVC) where two coils are used to drive\ + \ the same membrane. However, in this case, one of them is used as a microphone\ + \ which is then used by the performer as an input interface of percussive gestures.\ + \ Of course, this leads to poten- tial feedback, but with enough control, a compelling\ + \ exploration of resonance haptic feedback and sound embodiment is possible.},\n\ + \ address = {Birmingham, UK},\n author = {Sierra, Juan D},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.4813216},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {600--604},\n publisher = {Birmingham\ + \ City University},\n title = {SpeakerDrum},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper117.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177965 + doi: 10.5281/zenodo.4813216 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Generative Art, Interactive Environment, Immersive Installation, Swarm - Simulation, Hybrid Ecosystem ' - pages: 260--263 - title: Flowspace -- A Hybrid Ecosystem - url: http://www.nime.org/proceedings/2011/nime2011_260.pdf - year: 2011 + month: July + pages: 600--604 + publisher: Birmingham City University + title: SpeakerDrum + url: https://www.nime.org/proceedings/2020/nime2020_paper117.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Sosnick2011 - abstract: 'In this paper, we describe an implementation of a real-time sound synthesizer - using Finite Difference-based simulation of a two-dimensional membrane. Finite - Difference (FD) methods can be the basis for physics-based music instrument models - that generate realistic audio output. However, such methods are compute-intensive; - large simulations cannot run in real time on current CPUs. Many current systems - now include powerful Graphics Processing Units (GPUs), which are a good fit for - FD methods. We demonstrate that it is possible to use this method to create a - usable real-time audio synthesizer. ' - address: 'Oslo, Norway' - author: 'Sosnick, Marc and Hsu, William' - bibtex: "@inproceedings{Sosnick2011,\n abstract = {In this paper, we describe an\ - \ implementation of a real-time sound synthesizer using Finite Difference-based\ - \ simulation of a two-dimensional membrane. Finite Difference (FD) methods can\ - \ be the basis for physics-based music instrument models that generate realistic\ - \ audio output. However, such methods are compute-intensive; large simulations\ - \ cannot run in real time on current CPUs. Many current systems now include powerful\ - \ Graphics Processing Units (GPUs), which are a good fit for FD methods. We demonstrate\ - \ that it is possible to use this method to create a usable real-time audio synthesizer.\ - \ },\n address = {Oslo, Norway},\n author = {Sosnick, Marc and Hsu, William},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178173},\n issn = {2220-4806},\n\ - \ keywords = {Finite Difference, GPU, CUDA, Synthesis },\n pages = {264--267},\n\ - \ title = {Implementing a Finite Difference-Based Real-time Sound Synthesizer\ - \ using {GPU}s},\n url = {http://www.nime.org/proceedings/2011/nime2011_264.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_118 + abstract: 'This paper presents the KeyWI, an electronic wind instrument design based + on the melodica that both improves upon limitations in current systems and is + general and powerful enough to support a variety of applications. Four opportunities + for growth are identified in current electronic wind instrument systems, which + then are used as focuses in the development and evaluation of the instrument. + The instrument features a breath pressure sensor with a large dynamic range, a + keyboard that allows for polyphonic pitch selection, and a completely integrated + construction. Sound synthesis is performed with Faust code compiled to the Bela + Mini, which offers low-latency audio and a simple yet powerful development workflow. + In order to be as accessible and versatile as possible, the hardware and software + is entirely open-source, and fabrication requires only common maker tools.' + address: 'Birmingham, UK' + author: 'Caren, Matthew and Michon, Romain and Wright, Matthew' + bibtex: "@inproceedings{NIME20_118,\n abstract = {This paper presents the KeyWI,\ + \ an electronic wind instrument design based on the melodica that both improves\ + \ upon limitations in current systems and is general and powerful enough to support\ + \ a variety of applications. Four opportunities for growth are identified in current\ + \ electronic wind instrument systems, which then are used as focuses in the development\ + \ and evaluation of the instrument. The instrument features a breath pressure\ + \ sensor with a large dynamic range, a keyboard that allows for polyphonic pitch\ + \ selection, and a completely integrated construction. Sound synthesis is performed\ + \ with Faust code compiled to the Bela Mini, which offers low-latency audio and\ + \ a simple yet powerful development workflow. In order to be as accessible and\ + \ versatile as possible, the hardware and software is entirely open-source, and\ + \ fabrication requires only common maker tools.},\n address = {Birmingham, UK},\n\ + \ author = {Caren, Matthew and Michon, Romain and Wright, Matthew},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.4813218},\n editor = {Romain Michon and\ + \ Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages = {605--608},\n\ + \ publisher = {Birmingham City University},\n title = {The KeyWI: An Expressive\ + \ and Accessible Electronic Wind Instrument},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper118.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178173 + doi: 10.5281/zenodo.4813218 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Finite Difference, GPU, CUDA, Synthesis ' - pages: 264--267 - title: Implementing a Finite Difference-Based Real-time Sound Synthesizer using - GPUs - url: http://www.nime.org/proceedings/2011/nime2011_264.pdf - year: 2011 + month: July + pages: 605--608 + publisher: Birmingham City University + title: 'The KeyWI: An Expressive and Accessible Electronic Wind Instrument' + url: https://www.nime.org/proceedings/2020/nime2020_paper118.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Tidemann2011 - abstract: 'Interacting with musical avatars have been increasingly popular over - the years, with the introduction of games likeGuitar Hero and Rock Band. These - games provide MIDIequipped controllers that look like their real-world counterparts - (e.g. MIDI guitar, MIDI drumkit) that the users playto control their designated - avatar in the game. The performance of the user is measured against a score that - needs tobe followed. However, the avatar does not move in responseto how the user - plays, it follows some predefined movementpattern. If the user plays badly, the - game ends with theavatar ending the performance (i.e. throwing the guitar onthe - floor). The gaming experience would increase if theavatar would move in accordance - with user input. This paper presents an architecture that couples musical input - withbody movement. Using imitation learning, a simulated human robot learns to - play the drums like human drummersdo, both visually and auditory. Learning data - is recordedusing MIDI and motion tracking. The system uses an artificial intelligence - approach to implement imitation learning,employing artificial neural networks.' - address: 'Oslo, Norway' - author: 'Tidemann, Axel' - bibtex: "@inproceedings{Tidemann2011,\n abstract = {Interacting with musical avatars\ - \ have been increasingly popular over the years, with the introduction of games\ - \ likeGuitar Hero and Rock Band. These games provide MIDIequipped controllers\ - \ that look like their real-world counterparts (e.g. MIDI guitar, MIDI drumkit)\ - \ that the users playto control their designated avatar in the game. The performance\ - \ of the user is measured against a score that needs tobe followed. However, the\ - \ avatar does not move in responseto how the user plays, it follows some predefined\ - \ movementpattern. If the user plays badly, the game ends with theavatar ending\ - \ the performance (i.e. throwing the guitar onthe floor). The gaming experience\ - \ would increase if theavatar would move in accordance with user input. This paper\ - \ presents an architecture that couples musical input withbody movement. Using\ - \ imitation learning, a simulated human robot learns to play the drums like human\ - \ drummersdo, both visually and auditory. Learning data is recordedusing MIDI\ - \ and motion tracking. The system uses an artificial intelligence approach to\ - \ implement imitation learning,employing artificial neural networks.},\n address\ - \ = {Oslo, Norway},\n author = {Tidemann, Axel},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1178175},\n issn = {2220-4806},\n keywords = {artificial intelli-,drumming,modeling\ - \ human behaviour},\n pages = {268--271},\n title = {An Artificial Intelligence\ - \ Architecture for Musical Expressiveness that Learns by Imitation},\n url = {http://www.nime.org/proceedings/2011/nime2011_268.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_119 + abstract: 'In this paper we provide a detailed description of the development of + a new interface for musical expression, the da ̈ıs, with focus on an iterative + development process, control of physical models for sounds synthesis, and haptic + feedback. The development process, consisting of three iterations, is covered + along with a discussion of the tools and methods used. The sound synthesis algorithm + for the da ̈ıs, a physical model of a bowed string, is covered and the mapping + from the interface parameters to those of the synthesis algorithms is described + in detail. Using a qualitative test the affordances, advantages, and disadvantages + of the chosen design, synthesis algorithm, and parameter mapping is highlighted. + Lastly, the possibilities for future work is discussed with special focus on alternate + sounds and mappings.' + address: 'Birmingham, UK' + author: 'Christensen, Pelle Juul and Overholt, Dan and Serafin, Stefania' + bibtex: "@inproceedings{NIME20_119,\n abstract = {In this paper we provide a detailed\ + \ description of the development of a new interface for musical expression, the\ + \ da ̈ıs, with focus on an iterative development process, control of physical\ + \ models for sounds synthesis, and haptic feedback. The development process, consisting\ + \ of three iterations, is covered along with a discussion of the tools and methods\ + \ used. The sound synthesis algorithm for the da ̈ıs, a physical model of a bowed\ + \ string, is covered and the mapping from the interface parameters to those of\ + \ the synthesis algorithms is described in detail. Using a qualitative test the\ + \ affordances, advantages, and disadvantages of the chosen design, synthesis algorithm,\ + \ and parameter mapping is highlighted. Lastly, the possibilities for future work\ + \ is discussed with special focus on alternate sounds and mappings.},\n address\ + \ = {Birmingham, UK},\n author = {Christensen, Pelle Juul and Overholt, Dan and\ + \ Serafin, Stefania},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813220},\n\ + \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ + \ = {July},\n pages = {609--612},\n presentation-video = {https://youtu.be/XOvnc_AKKX8},\n\ + \ publisher = {Birmingham City University},\n title = {The Da ̈ıs: A Haptically\ + \ Enabled New Interface for Musical Expression for Controlling Physical Models\ + \ for Sound Synthesis},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper119.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178175 + doi: 10.5281/zenodo.4813220 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'artificial intelli-,drumming,modeling human behaviour' - pages: 268--271 - title: An Artificial Intelligence Architecture for Musical Expressiveness that Learns - by Imitation - url: http://www.nime.org/proceedings/2011/nime2011_268.pdf - year: 2011 + month: July + pages: 609--612 + presentation-video: https://youtu.be/XOvnc_AKKX8 + publisher: Birmingham City University + title: 'The Da ̈ıs: A Haptically Enabled New Interface for Musical Expression for + Controlling Physical Models for Sound Synthesis' + url: https://www.nime.org/proceedings/2020/nime2020_paper119.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Dahl2011 - abstract: 'TweetDreams is an instrument and musical compositionwhich creates real-time - sonification and visualization oftweets. Tweet data containing specified search - terms is retrieved from Twitter and used to build networks of associated tweets. - These networks govern the creation of melodiesassociated with each tweet and are - displayed graphically.Audience members participate in the piece by tweeting,and - their tweets are given special musical and visual prominence.' - address: 'Oslo, Norway' - author: 'Dahl, Luke and Herrera, Jorge and Wilkerson, Carr' - bibtex: "@inproceedings{Dahl2011,\n abstract = {TweetDreams is an instrument and\ - \ musical compositionwhich creates real-time sonification and visualization oftweets.\ - \ Tweet data containing specified search terms is retrieved from Twitter and used\ - \ to build networks of associated tweets. These networks govern the creation of\ - \ melodiesassociated with each tweet and are displayed graphically.Audience members\ - \ participate in the piece by tweeting,and their tweets are given special musical\ - \ and visual prominence.},\n address = {Oslo, Norway},\n author = {Dahl, Luke\ - \ and Herrera, Jorge and Wilkerson, Carr},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177991},\n\ - \ issn = {2220-4806},\n keywords = {Twitter, audience participation, sonification,\ - \ data visualization, text processing, interaction, multi-user instrument. },\n\ - \ pages = {272--275},\n title = {TweetDreams : Making Music with the Audience\ - \ and the World using Real-time Twitter Data},\n url = {http://www.nime.org/proceedings/2011/nime2011_272.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_12 + abstract: 'Computer composed music remains a novel and challenging problem to solve. + Despite an abundance of techniques and systems little research has explored how + these might be useful for end-users looking to compose with generative and algorithmic + music techniques. User interfaces for generative music systems are often inaccessible + to non-programmers and neglect established composition workflow and design paradigms + that are familiar to computer-based music composers. We have developed a system + called the Interactive Generative Music Environment (IGME) that attempts to bridge + the gap between generative music and music sequencing software, through an easy + to use score editing interface. This paper discusses a series of user studies + in which users explore generative music composition with IGME. A questionnaire + evaluates the user’s perception of interacting with generative music and from + this provide recommendations for future generative music systems and interfaces.' + address: 'Birmingham, UK' + author: 'Hunt, Samuel J and Mitchell, Tom and Nash, Chris' + bibtex: "@inproceedings{NIME20_12,\n abstract = {Computer composed music remains\ + \ a novel and challenging problem to solve. Despite an abundance of techniques\ + \ and systems little research has explored how these might be useful for end-users\ + \ looking to compose with generative and algorithmic music techniques. User interfaces\ + \ for generative music systems are often inaccessible to non-programmers and neglect\ + \ established composition workflow and design paradigms that are familiar to computer-based\ + \ music composers. We have developed a system called the Interactive Generative\ + \ Music Environment (IGME) that attempts to bridge the gap between generative\ + \ music and music sequencing software, through an easy to use score editing interface.\ + \ This paper discusses a series of user studies in which users explore generative\ + \ music composition with IGME. A questionnaire evaluates the user’s perception\ + \ of interacting with generative music and from this provide recommendations for\ + \ future generative music systems and interfaces.},\n address = {Birmingham, UK},\n\ + \ author = {Hunt, Samuel J and Mitchell, Tom and Nash, Chris},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.4813222},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {61--66},\n publisher = {Birmingham\ + \ City University},\n title = {Composing computer generated music, an observational\ + \ study using IGME: the Interactive Generative Music Environment},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper12.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177991 + doi: 10.5281/zenodo.4813222 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Twitter, audience participation, sonification, data visualization, text - processing, interaction, multi-user instrument. ' - pages: 272--275 - title: 'TweetDreams : Making Music with the Audience and the World using Real-time - Twitter Data' - url: http://www.nime.org/proceedings/2011/nime2011_272.pdf - year: 2011 + month: July + pages: 61--66 + publisher: Birmingham City University + title: 'Composing computer generated music, an observational study using IGME: the + Interactive Generative Music Environment' + url: https://www.nime.org/proceedings/2020/nime2020_paper12.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Fyfe2011 - abstract: 'JunctionBox is a new software toolkit for creating multitouch interfaces - for controlling sound and music. Morespecifically, the toolkit has special features - which make iteasy to create TUIO-based touch interfaces for controllingsound engines - via Open Sound Control. Programmers using the toolkit have a great deal of freedom - to create highlycustomized interfaces that work on a variety of hardware.' - address: 'Oslo, Norway' - author: 'Fyfe, Lawrence and Tindale, Adam and Carpendale, Sheelagh' - bibtex: "@inproceedings{Fyfe2011,\n abstract = {JunctionBox is a new software toolkit\ - \ for creating multitouch interfaces for controlling sound and music. Morespecifically,\ - \ the toolkit has special features which make iteasy to create TUIO-based touch\ - \ interfaces for controllingsound engines via Open Sound Control. Programmers\ - \ using the toolkit have a great deal of freedom to create highlycustomized interfaces\ - \ that work on a variety of hardware.},\n address = {Oslo, Norway},\n author =\ - \ {Fyfe, Lawrence and Tindale, Adam and Carpendale, Sheelagh},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178021},\n issn = {2220-4806},\n keywords = {Multi-touch,\ - \ Open Sound Control, Toolkit, TUIO },\n pages = {276--279},\n title = {JunctionBox\ - \ : A Toolkit for Creating Multi-touch Sound Control Interfaces},\n url = {http://www.nime.org/proceedings/2011/nime2011_276.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_120 + abstract: 'Many opportunities and challenges in both the control and performative + aspects of today’s modular synthesizers exist. The user interface prevailing in + the world of synthesizers and music controllers has always been revolving around + knobs, faders, switches, dials, buttons, or capacitive touchpads, to name a few. + This paper presents a novel way of interaction with a modular synthesizer by exploring + the affordances of cord-base UIs. A special patch cable was developed us- ing + commercially available piezo-resistive rubber cords, and was adapted to fit to + the 3.5 mm mono audio jack, making it compatible with the Eurorack modular-synth + standard. Moreover, a module was developed to condition this stretch- able sensor/cable, + to allow multiple Patch-cordes to be used in a given patch simultaneously. This + paper also presents a vocabulary of interactions, labeled through various physical + actions, turning the patch cable into an expressive controller that complements + traditional patching techniques.' + address: 'Birmingham, UK' + author: 'Wilbert, Joao and Haddad, Don D and Ishii, Hiroshi and Paradiso, Joseph' + bibtex: "@inproceedings{NIME20_120,\n abstract = {Many opportunities and challenges\ + \ in both the control and performative aspects of today’s modular synthesizers\ + \ exist. The user interface prevailing in the world of synthesizers and music\ + \ controllers has always been revolving around knobs, faders, switches, dials,\ + \ buttons, or capacitive touchpads, to name a few. This paper presents a novel\ + \ way of interaction with a modular synthesizer by exploring the affordances of\ + \ cord-base UIs. A special patch cable was developed us- ing commercially available\ + \ piezo-resistive rubber cords, and was adapted to fit to the 3.5 mm mono audio\ + \ jack, making it compatible with the Eurorack modular-synth standard. Moreover,\ + \ a module was developed to condition this stretch- able sensor/cable, to allow\ + \ multiple Patch-cordes to be used in a given patch simultaneously. This paper\ + \ also presents a vocabulary of interactions, labeled through various physical\ + \ actions, turning the patch cable into an expressive controller that complements\ + \ traditional patching techniques.},\n address = {Birmingham, UK},\n author =\ + \ {Wilbert, Joao and Haddad, Don D and Ishii, Hiroshi and Paradiso, Joseph},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.4813224},\n editor = {Romain Michon\ + \ and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages =\ + \ {613--616},\n presentation-video = {https://youtu.be/7gklx8ek8U8},\n publisher\ + \ = {Birmingham City University},\n title = {Patch-corde: an expressive patch-cable\ + \ for the modular synthesizer.},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper120.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178021 + doi: 10.5281/zenodo.4813224 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Multi-touch, Open Sound Control, Toolkit, TUIO ' - pages: 276--279 - title: 'JunctionBox : A Toolkit for Creating Multi-touch Sound Control Interfaces' - url: http://www.nime.org/proceedings/2011/nime2011_276.pdf - year: 2011 + month: July + pages: 613--616 + presentation-video: https://youtu.be/7gklx8ek8U8 + publisher: Birmingham City University + title: 'Patch-corde: an expressive patch-cable for the modular synthesizer.' + url: https://www.nime.org/proceedings/2020/nime2020_paper120.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Johnston2011 - abstract: 'This paper presents an approach to practice-based researchin new musical - instrument design. At a high level, the process involves drawing on relevant theories - and aesthetic approaches to design new instruments, attempting to identify relevant - applied design criteria, and then examiningthe experiences of performers who use - the instruments withparticular reference to these criteria. Outcomes of this process - include new instruments, theories relating to musicianinstrument interaction and - a set of design criteria informedby practice and research.' - address: 'Oslo, Norway' - author: 'Johnston, Andrew' - bibtex: "@inproceedings{Johnston2011,\n abstract = {This paper presents an approach\ - \ to practice-based researchin new musical instrument design. At a high level,\ - \ the process involves drawing on relevant theories and aesthetic approaches to\ - \ design new instruments, attempting to identify relevant applied design criteria,\ - \ and then examiningthe experiences of performers who use the instruments withparticular\ - \ reference to these criteria. Outcomes of this process include new instruments,\ - \ theories relating to musicianinstrument interaction and a set of design criteria\ - \ informedby practice and research.},\n address = {Oslo, Norway},\n author = {Johnston,\ - \ Andrew},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1178053},\n issn = {2220-4806},\n\ - \ keywords = {practice-based research, evaluation, Human-Computer Interaction,\ - \ research methods, user studies },\n pages = {280--283},\n title = {Beyond Evaluation\ - \ : Linking Practice and Theory in New Musical Interface Design},\n url = {http://www.nime.org/proceedings/2011/nime2011_280.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_121 + abstract: 'The artistic sonification offers a creative method for putting direct + semantic layers to the abstract sounds. This paper is dedicated to the sound installation + “Soil choir v.1.3” that sonifies soil moisture in different depths and transforms + this non-musical phenomenon into organized sound structures. The sonification + of natural soil moisture processes tests the limits of our attention, patience + and willingness to still perceive ultra-slow reactions and examines the mechanisms + of our sense adaptation. Although the musical time of the installation is set + to almost non-human – environmental time scale (changes happen within hours, days, + weeks or even months…) this system can be explored and even played also as an + instrument by putting sensors to different soil areas or pouring liquid into the + soil and waiting for changes... The crucial aspect of the work was to design the + sonification architecture that deals with extreme slow changes of input data – + measured values from moisture sensors. The result is the sound installation consisting + of three objects – each with different types of soil. Every object is compact, + independent unit consisting of three low-cost capacitive soil moisture sensors, + 1m long perspex tube filled with soil, full range loudspeaker and Bela platform + with custom Supercollider code. I developed this installation during the year + 2019 and this paper gives insight into the aspects and issues connected with creating + this installation.' + address: 'Birmingham, UK' + author: 'Suchánek, Jiří' + bibtex: "@inproceedings{NIME20_121,\n abstract = {The artistic sonification offers\ + \ a creative method for putting direct semantic layers to the abstract sounds.\ + \ This paper is dedicated to the sound installation “Soil choir v.1.3” that sonifies\ + \ soil moisture in different depths and transforms this non-musical phenomenon\ + \ into organized sound structures. The sonification of natural soil moisture processes\ + \ tests the limits of our attention, patience and willingness to still perceive\ + \ ultra-slow reactions and examines the mechanisms of our sense adaptation. Although\ + \ the musical time of the installation is set to almost non-human – environmental\ + \ time scale (changes happen within hours, days, weeks or even months…) this system\ + \ can be explored and even played also as an instrument by putting sensors to\ + \ different soil areas or pouring liquid into the soil and waiting for changes...\ + \ The crucial aspect of the work was to design the sonification architecture that\ + \ deals with extreme slow changes of input data – measured values from moisture\ + \ sensors. The result is the sound installation consisting of three objects –\ + \ each with different types of soil. Every object is compact, independent unit\ + \ consisting of three low-cost capacitive soil moisture sensors, 1m long perspex\ + \ tube filled with soil, full range loudspeaker and Bela platform with custom\ + \ Supercollider code. I developed this installation during the year 2019 and this\ + \ paper gives insight into the aspects and issues connected with creating this\ + \ installation.},\n address = {Birmingham, UK},\n author = {Suchánek, Jiří},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.4813226},\n editor = {Romain Michon\ + \ and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages =\ + \ {617--618},\n publisher = {Birmingham City University},\n title = {SOIL CHOIR\ + \ v.1.3 - soil moisture sonification installation},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper121.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178053 + doi: 10.5281/zenodo.4813226 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'practice-based research, evaluation, Human-Computer Interaction, research - methods, user studies ' - pages: 280--283 - title: 'Beyond Evaluation : Linking Practice and Theory in New Musical Interface - Design' - url: http://www.nime.org/proceedings/2011/nime2011_280.pdf - year: 2011 + month: July + pages: 617--618 + publisher: Birmingham City University + title: SOIL CHOIR v.1.3 - soil moisture sonification installation + url: https://www.nime.org/proceedings/2020/nime2020_paper121.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Popp2011 - address: 'Oslo, Norway' - author: 'Popp, Phillip and Wright, Matthew' - bibtex: "@inproceedings{Popp2011,\n address = {Oslo, Norway},\n author = {Popp,\ - \ Phillip and Wright, Matthew},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178139},\n\ - \ issn = {2220-4806},\n keywords = {Spectral Model Synthesis, Gesture Recognition,\ - \ Synthesis Control, Wacom Tablet, Machine Learning },\n pages = {284--287},\n\ - \ title = {Intuitive Real-Time Control of Spectral Model Synthesis},\n url = {http://www.nime.org/proceedings/2011/nime2011_284.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_122 + abstract: 'Three DIY electronic instruments that the author has used in real-life + multimedia performance contexts are scrutinised herein. The instruments are made + intentionally rough-hewn, non-optimal and user-unfriendly in several respects, + and are shown to draw upon experimental traits in electronics de- sign and interfaces + for music expression. The various different ways in which such design traits affects + their performance are outlined, as are their overall consequence to the artistic + outcome and to individual experiences of it. It is shown that, to a varying extent, + they all embody, mediate, and aid actualise the specifics their parent projects + revolve around. It is eventually suggested that in the context of an exploratory + and hybrid artistic practice, bespoke instruments of sorts, their improvised performance, + the material traits or processes they implement or pivot on, and the ideas/narratives + that perturb thereof, may all intertwine and fuse into one another so that a clear + distinction between one another is not always possible, or meaningful. In such + a vein, this paper aims at being an account of such a practice upon which prospective + researchers/artists may further build upon.' + address: 'Birmingham, UK' + author: 'Koutsomichalis, Marinos' + bibtex: "@inproceedings{NIME20_122,\n abstract = {Three DIY electronic instruments\ + \ that the author has used in real-life multimedia performance contexts are scrutinised\ + \ herein. The instruments are made intentionally rough-hewn, non-optimal and user-unfriendly\ + \ in several respects, and are shown to draw upon experimental traits in electronics\ + \ de- sign and interfaces for music expression. The various different ways in\ + \ which such design traits affects their performance are outlined, as are their\ + \ overall consequence to the artistic outcome and to individual experiences of\ + \ it. It is shown that, to a varying extent, they all embody, mediate, and aid\ + \ actualise the specifics their parent projects revolve around. It is eventually\ + \ suggested that in the context of an exploratory and hybrid artistic practice,\ + \ bespoke instruments of sorts, their improvised performance, the material traits\ + \ or processes they implement or pivot on, and the ideas/narratives that perturb\ + \ thereof, may all intertwine and fuse into one another so that a clear distinction\ + \ between one another is not always possible, or meaningful. In such a vein, this\ + \ paper aims at being an account of such a practice upon which prospective researchers/artists\ + \ may further build upon.},\n address = {Birmingham, UK},\n author = {Koutsomichalis,\ + \ Marinos},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813228},\n editor\ + \ = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n\ + \ pages = {619--624},\n presentation-video = {https://youtu.be/DWecR7exl8k},\n\ + \ publisher = {Birmingham City University},\n title = {Rough-hewn Hertzian Multimedia\ + \ Instruments},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper122.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178139 + doi: 10.5281/zenodo.4813228 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Spectral Model Synthesis, Gesture Recognition, Synthesis Control, Wacom - Tablet, Machine Learning ' - pages: 284--287 - title: Intuitive Real-Time Control of Spectral Model Synthesis - url: http://www.nime.org/proceedings/2011/nime2011_284.pdf - year: 2011 + month: July + pages: 619--624 + presentation-video: https://youtu.be/DWecR7exl8k + publisher: Birmingham City University + title: Rough-hewn Hertzian Multimedia Instruments + url: https://www.nime.org/proceedings/2020/nime2020_paper122.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Molina2011 - abstract: 'We present BeatJockey, a prototype interface which makesuse of Audio - Mosaicing (AM), beat-tracking and machinelearning techniques, for supporting Diskjockeys - (DJs) byproposing them new ways of interaction with the songs onthe DJ''s playlist. - This prototype introduces a new paradigmto DJing in which the user has the capability - to mix songsinteracting with beat-units that accompany the DJ''s mix.For this - type of interaction, the system suggests song slicestaken from songs selected - from a playlist, which could gowell with the beats of whatever master song is - being played.In addition the system allows the synchronization of multiple songs, - thus permitting flexible, coherent and rapid progressions in the DJ''s mix. BeatJockey - uses the Reactable,a musical tangible user interface (TUI), and it has beendesigned - to be used by all DJs regardless of their level ofexpertise, as the system helps - the novice while bringing newcreative opportunities to the expert.' - address: 'Oslo, Norway' - author: 'Molina, Pablo and Haro, Martín and Jordà, Sergi' - bibtex: "@inproceedings{Molina2011,\n abstract = {We present BeatJockey, a prototype\ - \ interface which makesuse of Audio Mosaicing (AM), beat-tracking and machinelearning\ - \ techniques, for supporting Diskjockeys (DJs) byproposing them new ways of interaction\ - \ with the songs onthe DJ's playlist. This prototype introduces a new paradigmto\ - \ DJing in which the user has the capability to mix songsinteracting with beat-units\ - \ that accompany the DJ's mix.For this type of interaction, the system suggests\ - \ song slicestaken from songs selected from a playlist, which could gowell with\ - \ the beats of whatever master song is being played.In addition the system allows\ - \ the synchronization of multiple songs, thus permitting flexible, coherent and\ - \ rapid progressions in the DJ's mix. BeatJockey uses the Reactable,a musical\ - \ tangible user interface (TUI), and it has beendesigned to be used by all DJs\ - \ regardless of their level ofexpertise, as the system helps the novice while\ - \ bringing newcreative opportunities to the expert.},\n address = {Oslo, Norway},\n\ - \ author = {Molina, Pablo and Haro, Mart\\'{\\i}n and Jord\\`{a}, Sergi},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1178113},\n issn = {2220-4806},\n keywords\ - \ = {DJ, music information retrieval, audio mosaicing, percussion, turntable,\ - \ beat-mash, interactive music interfaces, realtime, tabletop interaction, reactable.\ - \ },\n pages = {288--291},\n title = {BeatJockey : A New Tool for Enhancing DJ\ - \ Skills},\n url = {http://www.nime.org/proceedings/2011/nime2011_288.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_123 + abstract: 'The visual-audioizer is a patch created in Max in which the concept of + fluid-time animation techniques, in tandem with basic computer vision tracking + methods, can be used as a tool to allow the visual time-based media artist to + create music. Visual aspects relating to the animator’s knowledge of motion, animated + loops, and auditory synchronization derived from computer vision tracking methods, + allow an immediate connection between the generated audio derived from visuals—becoming + a new way to experience and create audio-visual media. A conceptual overview, + comparisons of past/current audio-visual contributors, and a summary of the Max + patch will be discussed. The novelty of practice-based animation methods in the + field of musical expression, considerations of utilizing the visual-audioizer, + and the future of fluid-time animation techniques as a tool of musical creativity + will also be addressed. ' + address: 'Birmingham, UK' + author: 'Olsen, Taylor J' + bibtex: "@inproceedings{NIME20_123,\n abstract = {The visual-audioizer is a patch\ + \ created in Max in which the concept of fluid-time animation techniques, in tandem\ + \ with basic computer vision tracking methods, can be used as a tool to allow\ + \ the visual time-based media artist to create music. Visual aspects relating\ + \ to the animator’s knowledge of motion, animated loops, and auditory synchronization\ + \ derived from computer vision tracking methods, allow an immediate connection\ + \ between the generated audio derived from visuals—becoming a new way to experience\ + \ and create audio-visual media. A conceptual overview, comparisons of past/current\ + \ audio-visual contributors, and a summary of the Max patch will be discussed.\ + \ The novelty of practice-based animation methods in the field of musical expression,\ + \ considerations of utilizing the visual-audioizer, and the future of fluid-time\ + \ animation techniques as a tool of musical creativity will also be addressed.\ + \ },\n address = {Birmingham, UK},\n author = {Olsen, Taylor J},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.4813230},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {625--630},\n publisher = {Birmingham\ + \ City University},\n title = {Animation, Sonification, and Fluid-Time: A Visual-Audioizer\ + \ Prototype},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper123.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178113 + doi: 10.5281/zenodo.4813230 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'DJ, music information retrieval, audio mosaicing, percussion, turntable, - beat-mash, interactive music interfaces, realtime, tabletop interaction, reactable. ' - pages: 288--291 - title: 'BeatJockey : A New Tool for Enhancing DJ Skills' - url: http://www.nime.org/proceedings/2011/nime2011_288.pdf - year: 2011 + month: July + pages: 625--630 + publisher: Birmingham City University + title: 'Animation, Sonification, and Fluid-Time: A Visual-Audioizer Prototype' + url: https://www.nime.org/proceedings/2020/nime2020_paper123.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Schacher2011 - abstract: 'In this paper the relationship between body, motion and sound is addressed. - The comparison with traditional instruments and dance is shown with regards to - basic types of motion. The difference between gesture and movement is outlined - and some of the models used in dance for structuring motion sequences are described. - In order to identify expressive aspects of motion sequences a test scenario is - devised. After the description of the methods and tools used in a series of measurements, - two types of data-display are shown and the applied in the interpretation. One - salient feature is recognized and put into perspective with regards to movement - and gestalt perception. Finally the merits of the technical means that were applied - are compared and a model-based approach to motion-sound mapping is proposed. ' - address: 'Oslo, Norway' - author: 'Schacher, Jan C. and Stoecklin, Angela' - bibtex: "@inproceedings{Schacher2011,\n abstract = {In this paper the relationship\ - \ between body, motion and sound is addressed. The comparison with traditional\ - \ instruments and dance is shown with regards to basic types of motion. The difference\ - \ between gesture and movement is outlined and some of the models used in dance\ - \ for structuring motion sequences are described. In order to identify expressive\ - \ aspects of motion sequences a test scenario is devised. After the description\ - \ of the methods and tools used in a series of measurements, two types of data-display\ - \ are shown and the applied in the interpretation. One salient feature is recognized\ - \ and put into perspective with regards to movement and gestalt perception. Finally\ - \ the merits of the technical means that were applied are compared and a model-based\ - \ approach to motion-sound mapping is proposed. },\n address = {Oslo, Norway},\n\ - \ author = {Schacher, Jan C. and Stoecklin, Angela},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178149},\n issn = {2220-4806},\n keywords = {Interactive\ - \ Dance, Motion and Gesture, Sonification, Motion Perception, Mapping },\n pages\ - \ = {292--295},\n title = {Traces -- Body, Motion and Sound},\n url = {http://www.nime.org/proceedings/2011/nime2011_292.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_124 + abstract: This paper describes a system for automating the generation of mapping + schemes between human interaction with extramusical objects and electronic dance + music. These mappings are determined through the comparison of sensor input to + a synthesized matrix of sequenced audio. The goal of the system is to facilitate + live performances that feature quotidian objects in the place of traditional musical + instruments. The practical and artistic applications of musical control with quotidian + objects is discussed. The associated object-manipulating gesture vocabularies + are mapped to musical output so that the objects themselves may be perceived as + DMIs. This strategy is used in a performance to explore the liveness qualities + of the system. + address: 'Birmingham, UK' + author: 'de las Pozas, Virginia' + bibtex: "@inproceedings{NIME20_124,\n abstract = {This paper describes a system\ + \ for automating the generation of mapping schemes between human interaction with\ + \ extramusical objects and electronic dance music. These mappings are determined\ + \ through the comparison of sensor input to a synthesized matrix of sequenced\ + \ audio. The goal of the system is to facilitate live performances that feature\ + \ quotidian objects in the place of traditional musical instruments. The practical\ + \ and artistic applications of musical control with quotidian objects is discussed.\ + \ The associated object-manipulating gesture vocabularies are mapped to musical\ + \ output so that the objects themselves may be perceived as DMIs. This strategy\ + \ is used in a performance to explore the liveness qualities of the system.},\n\ + \ address = {Birmingham, UK},\n author = {de las Pozas, Virginia},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.4813232},\n editor = {Romain Michon and\ + \ Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages = {631--634},\n\ + \ publisher = {Birmingham City University},\n title = {Semi-Automated Mappings\ + \ for Object-Manipulating Gestural Control of Electronic Music},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper124.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178149 + doi: 10.5281/zenodo.4813232 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Interactive Dance, Motion and Gesture, Sonification, Motion Perception, - Mapping ' - pages: 292--295 - title: 'Traces -- Body, Motion and Sound' - url: http://www.nime.org/proceedings/2011/nime2011_292.pdf - year: 2011 + month: July + pages: 631--634 + publisher: Birmingham City University + title: Semi-Automated Mappings for Object-Manipulating Gestural Control of Electronic + Music + url: https://www.nime.org/proceedings/2020/nime2020_paper124.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Leslie2011 - abstract: MoodMixer is an interactive installation in which participants collaboratively - navigate a two-dimensional music spaceby manipulating their cognitive state and - conveying thisstate via wearable Electroencephalography (EEG) technology. The - participants can choose to actively manipulateor passively convey their cognitive - state depending on theirdesired approach and experience level. A four-channel - electronic music mixture continuously conveys the participants'expressed cognitive - states while a colored visualization oftheir locations on a two-dimensional projection - of cognitive state attributes aids their navigation through the space.MoodMixer - is a collaborative experience that incorporatesaspects of both passive and active - EEG sonification andperformance art. We discuss the technical design of the installation - and place its collaborative sonification aestheticdesign within the context of - existing EEG-based music andart. - address: 'Oslo, Norway' - author: 'Leslie, Grace and Mullen, Tim' - bibtex: "@inproceedings{Leslie2011,\n abstract = {MoodMixer is an interactive installation\ - \ in which participants collaboratively navigate a two-dimensional music spaceby\ - \ manipulating their cognitive state and conveying thisstate via wearable Electroencephalography\ - \ (EEG) technology. The participants can choose to actively manipulateor passively\ - \ convey their cognitive state depending on theirdesired approach and experience\ - \ level. A four-channel electronic music mixture continuously conveys the participants'expressed\ - \ cognitive states while a colored visualization oftheir locations on a two-dimensional\ - \ projection of cognitive state attributes aids their navigation through the space.MoodMixer\ - \ is a collaborative experience that incorporatesaspects of both passive and active\ - \ EEG sonification andperformance art. We discuss the technical design of the\ - \ installation and place its collaborative sonification aestheticdesign within\ - \ the context of existing EEG-based music andart.},\n address = {Oslo, Norway},\n\ - \ author = {Leslie, Grace and Mullen, Tim},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1178089},\n issn = {2220-4806},\n keywords = {EEG, BCMI, collaboration,\ - \ sonification, visualization },\n pages = {296--299},\n title = {MoodMixer :\ - \ {EEG}-based Collaborative Sonification},\n url = {http://www.nime.org/proceedings/2011/nime2011_296.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_125 + abstract: 'During theBaroque period, improvisation was a key element of music performance + and education. Great musicians, such as J.S. Bach, were better known as improvisers + than composers. Today, however, there is a lack of improvisation culture + in classical music performance and education; classical musicians either are not + trained to improvise, or cannot find other people to improvise with. Motivated + by this observation, we develop BachDuet, a system that enables real-time + counterpoint improvisation between a human anda machine. This system uses a recurrent + neural network toprocess the human musician’s monophonic performance ona MIDI + keyboard and generates the machine’s monophonic performance in real time. We develop + a GUI to visualize the generated music content and to facilitate this interaction. + We conduct user studies with 13 musically trained users and show the feasibility of two-party duet counterpoint + improvisation and the effectiveness of BachDuet for this purpose. We also conduct + listening tests with 48 participants and show that they cannot tell the difference + between duets generated by human-machine improvisation using BachDuet and those + generated by human-human improvisation. Objective evaluation is also conducted + to assess the degree to which these improvisations adhere to common rules of counterpoint, + showing promising results.' + address: 'Birmingham, UK' + author: 'Benetatos, Christodoulos and VanderStel, Joseph and Duan, Zhiyao' + bibtex: "@inproceedings{NIME20_125,\n abstract = {During theBaroque period, improvisation\ + \ was a key element of music performance and education. Great musicians, such\ + \ as J.S. Bach, were better known as improvisers than composers. Today, however,\ + \ there is a lack of improvisation culture in classical music performance\ + \ and education; classical musicians either are not trained to improvise, or cannot\ + \ find other people to improvise with. Motivated by this observation, we develop\ + \ BachDuet, a system that enables real-time counterpoint improvisation between\ + \ a human anda machine. This system uses a recurrent neural network toprocess\ + \ the human musician’s monophonic performance ona MIDI keyboard and generates\ + \ the machine’s monophonic performance in real time. We develop a GUI to visualize\ + \ the generated music content and to facilitate this interaction. We conduct\ + \ user studies with 13 musically trained users and show the feasibility\ + \ of two-party duet counterpoint improvisation and the effectiveness of BachDuet\ + \ for this purpose. We also conduct listening tests with 48 participants and\ + \ show that they cannot tell the difference between duets generated by human-machine\ + \ improvisation using BachDuet and those generated by human-human improvisation.\ + \ Objective evaluation is also conducted to assess the degree to which these\ + \ improvisations adhere to common rules of counterpoint, showing promising results.},\n\ + \ address = {Birmingham, UK},\n author = {Benetatos, Christodoulos and VanderStel,\ + \ Joseph and Duan, Zhiyao},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813234},\n\ + \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ + \ = {July},\n pages = {635--640},\n presentation-video = {https://youtu.be/wFGW0QzuPPk},\n\ + \ publisher = {Birmingham City University},\n title = {BachDuet: A Deep Learning\ + \ System for Human-Machine Counterpoint Improvisation},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper125.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178089 + doi: 10.5281/zenodo.4813234 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'EEG, BCMI, collaboration, sonification, visualization ' - pages: 296--299 - title: 'MoodMixer : EEG-based Collaborative Sonification' - url: http://www.nime.org/proceedings/2011/nime2011_296.pdf - year: 2011 + month: July + pages: 635--640 + presentation-video: https://youtu.be/wFGW0QzuPPk + publisher: Birmingham City University + title: 'BachDuet: A Deep Learning System for Human-Machine Counterpoint Improvisation' + url: https://www.nime.org/proceedings/2020/nime2020_paper125.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Skogstad2011 - abstract: 'The paper presents research about implementing a full body inertial motion - capture system, the Xsens MVN suit, for musical interaction. Three different approaches - for stream-ing real time and prerecorded motion capture data with Open Sound Control - have been implemented. Furthermore, we present technical performance details and - our experience with the motion capture system in realistic practice.' - address: 'Oslo, Norway' - author: 'Skogstad, Ståle A. and de Quay, Yago and Jensenius, Alexander Refsum' - bibtex: "@inproceedings{Skogstad2011,\n abstract = {The paper presents research\ - \ about implementing a full body inertial motion capture system, the Xsens MVN\ - \ suit, for musical interaction. Three different approaches for stream-ing real\ - \ time and prerecorded motion capture data with Open Sound Control have been implemented.\ - \ Furthermore, we present technical performance details and our experience with\ - \ the motion capture system in realistic practice.},\n address = {Oslo, Norway},\n\ - \ author = {Skogstad, Ståle A. and de Quay, Yago and Jensenius, Alexander Refsum},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178165},\n issn = {2220-4806},\n\ - \ pages = {300--303},\n title = {OSC Implementation and Evaluation of the Xsens\ - \ MVN Suit},\n url = {http://www.nime.org/proceedings/2011/nime2011_300.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_13 + abstract: 'Because they break the physical link between gestures and sound, Digital + Musical Instruments offer countless opportunities for musical expression. For + the same reason however, they may hinder the audience experience, making the musician + contribution and expressiveness difficult to perceive. In order to cope with this + issue without altering the instruments, researchers and artists alike have designed + techniques to augment their performances with additional information, through + audio, haptic or visual modalities. These techniques have however only been designed + to offer a fixed level of information, without taking into account the variety + of spectators expertise and preferences. In this paper, we investigate the design, + implementation and effect on audience experience of visual augmentations with + controllable level of detail (LOD). We conduct a controlled experiment with 18 + participants, including novices and experts. Our results show contrasts in the + impact of LOD on experience and comprehension for experts and novices, and highlight + the diversity of usage of visual augmentations by spectators.' + address: 'Birmingham, UK' + author: 'Capra, Olivier and Berthaut, Florent and Grisoni, Laurent' + bibtex: "@inproceedings{NIME20_13,\n abstract = {Because they break the physical\ + \ link between gestures and sound, Digital Musical Instruments offer countless\ + \ opportunities for musical expression. For the same reason however, they may\ + \ hinder the audience experience, making the musician contribution and expressiveness\ + \ difficult to perceive. In order to cope with this issue without altering the\ + \ instruments, researchers and artists alike have designed techniques to augment\ + \ their performances with additional information, through audio, haptic or visual\ + \ modalities. These techniques have however only been designed to offer a fixed\ + \ level of information, without taking into account the variety of spectators\ + \ expertise and preferences. In this paper, we investigate the design, implementation\ + \ and effect on audience experience of visual augmentations with controllable\ + \ level of detail (LOD). We conduct a controlled experiment with 18 participants,\ + \ including novices and experts. Our results show contrasts in the impact of LOD\ + \ on experience and comprehension for experts and novices, and highlight the diversity\ + \ of usage of visual augmentations by spectators.},\n address = {Birmingham, UK},\n\ + \ author = {Capra, Olivier and Berthaut, Florent and Grisoni, Laurent},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.4813236},\n editor = {Romain Michon and\ + \ Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages = {67--72},\n\ + \ presentation-video = {https://youtu.be/3hIGu9QDn4o},\n publisher = {Birmingham\ + \ City University},\n title = {All You Need Is LOD : Levels of Detail in Visual\ + \ Augmentations for the Audience},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper13.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178165 + doi: 10.5281/zenodo.4813236 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - pages: 300--303 - title: OSC Implementation and Evaluation of the Xsens MVN Suit - url: http://www.nime.org/proceedings/2011/nime2011_300.pdf - year: 2011 + month: July + pages: 67--72 + presentation-video: https://youtu.be/3hIGu9QDn4o + publisher: Birmingham City University + title: 'All You Need Is LOD : Levels of Detail in Visual Augmentations for the Audience' + url: https://www.nime.org/proceedings/2020/nime2020_paper13.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Wyse2011 - abstract: 'The goal of our research is to find ways of supporting and encouraging - musical behavior by non-musicians in shared public performance environments. Previous - studies indicated simultaneous music listening and performance is difficult for - non-musicians, and that visual support for the task might be helpful. This paper - presents results from a preliminary user study conducted to evaluate the effect - of visual feedback on a musical tracking task. Participants generated a musical - signal by manipulating a hand-held device with two dimensions of control over - two parameters, pitch and density of note events, and were given the task of following - a target pattern as closely as possible. The target pattern was a machine-generated - musical signal comprising of variation over the same two parameters. Visual feedback - provided participants with information about the control parameters of the musical - signal generated by the machine. We measured the task performance under different - visual feedback strategies. Results show that single parameter visualizations - tend to improve the tracking performance with respect to the visualized parameter, - but not the non-visualized parameter. Visualizing two independent parameters simultaneously - decreases performance in both dimensions. ' - address: 'Oslo, Norway' - author: 'Wyse, Lonce and Mitani, Norikazu and Nanayakkara, Suranga' - bibtex: "@inproceedings{Wyse2011,\n abstract = {The goal of our research is to find\ - \ ways of supporting and encouraging musical behavior by non-musicians in shared\ - \ public performance environments. Previous studies indicated simultaneous music\ - \ listening and performance is difficult for non-musicians, and that visual support\ - \ for the task might be helpful. This paper presents results from a preliminary\ - \ user study conducted to evaluate the effect of visual feedback on a musical\ - \ tracking task. Participants generated a musical signal by manipulating a hand-held\ - \ device with two dimensions of control over two parameters, pitch and density\ - \ of note events, and were given the task of following a target pattern as closely\ - \ as possible. The target pattern was a machine-generated musical signal comprising\ - \ of variation over the same two parameters. Visual feedback provided participants\ - \ with information about the control parameters of the musical signal generated\ - \ by the machine. We measured the task performance under different visual feedback\ - \ strategies. Results show that single parameter visualizations tend to improve\ - \ the tracking performance with respect to the visualized parameter, but not the\ - \ non-visualized parameter. Visualizing two independent parameters simultaneously\ - \ decreases performance in both dimensions. },\n address = {Oslo, Norway},\n author\ - \ = {Wyse, Lonce and Mitani, Norikazu and Nanayakkara, Suranga},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178191},\n issn = {2220-4806},\n keywords = {Mobile phone,\ - \ Interactive music performance, Listening, Group music play, Visual support },\n\ - \ pages = {304--307},\n title = {The Effect of Visualizing Audio Targets in a\ - \ Musical Listening and Performance Task},\n url = {http://www.nime.org/proceedings/2011/nime2011_304.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_14 + abstract: 'In this work we test the performance of multiple ESP32microcontrollers + used as WiFi sensor interfaces in the context of real-time interactive systems. + The number of devices from 1 to 13, and individual sending rates from 50 to 2300 + messages per second are tested to provide examples of various network load situations + that may resemble a performance configuration. The overall end-to-end latency + and bandwidth are measured as the basic performance metrics of interest. The results + show that a maximum message rate of 2300 Hz is possible on a 2.4 GHz network for + a single embedded device and decreases as the number of devices are added. During + testing it was possible to have up to 7 devices transmitting at 100 Hz while attaining + less than 10 ms latency, but performance degrades with increasing sending rates + and number of devices. Performance can also vary significantly from day to day + depending on network usage in a crowded environment.' + address: 'Birmingham, UK' + author: 'Wang, Johnty and Meneses, Eduardo and Wanderley, Marcelo' + bibtex: "@inproceedings{NIME20_14,\n abstract = {In this work we test the performance\ + \ of multiple ESP32microcontrollers used as WiFi sensor interfaces in the context\ + \ of real-time interactive systems. The number of devices from 1 to 13, and individual\ + \ sending rates from 50 to 2300 messages per second are tested to provide examples\ + \ of various network load situations that may resemble a performance configuration.\ + \ The overall end-to-end latency and bandwidth are measured as the basic performance\ + \ metrics of interest. The results show that a maximum message rate of 2300 Hz\ + \ is possible on a 2.4 GHz network for a single embedded device and decreases\ + \ as the number of devices are added. During testing it was possible to have up\ + \ to 7 devices transmitting at 100 Hz while attaining less than 10 ms latency,\ + \ but performance degrades with increasing sending rates and number of devices.\ + \ Performance can also vary significantly from day to day depending on network\ + \ usage in a crowded environment.},\n address = {Birmingham, UK},\n author = {Wang,\ + \ Johnty and Meneses, Eduardo and Wanderley, Marcelo},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.4813239},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {73--76},\n publisher = {Birmingham\ + \ City University},\n title = {The Scalability of WiFi for Mobile Embedded Sensor\ + \ Interfaces},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper14.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178191 + doi: 10.5281/zenodo.4813239 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Mobile phone, Interactive music performance, Listening, Group music play, - Visual support ' - pages: 304--307 - title: The Effect of Visualizing Audio Targets in a Musical Listening and Performance - Task - url: http://www.nime.org/proceedings/2011/nime2011_304.pdf - year: 2011 + month: July + pages: 73--76 + publisher: Birmingham City University + title: The Scalability of WiFi for Mobile Embedded Sensor Interfaces + url: https://www.nime.org/proceedings/2020/nime2020_paper14.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Freed2011 - abstract: 'An effective programming style for gesture signal processing is described - using a new library that brings efficient run-time polymorphism, functional and - instance-based object-oriented programming to Max/MSP. By introducing better support - for generic programming and composability Max/MSP becomes a more productive environment - for managing the growing scale and complexity of gesture sensing systems for musical - instruments and interactive installations. ' - address: 'Oslo, Norway' - author: 'Freed, Adrian and MacCallum, John and Schmeder, Andrew' - bibtex: "@inproceedings{Freed2011,\n abstract = {An effective programming style\ - \ for gesture signal processing is described using a new library that brings efficient\ - \ run-time polymorphism, functional and instance-based object-oriented programming\ - \ to Max/MSP. By introducing better support for generic programming and composability\ - \ Max/MSP becomes a more productive environment for managing the growing scale\ - \ and complexity of gesture sensing systems for musical instruments and interactive\ - \ installations. },\n address = {Oslo, Norway},\n author = {Freed, Adrian and\ - \ MacCallum, John and Schmeder, Andrew},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178015},\n\ - \ issn = {2220-4806},\n keywords = {composability,delegation,functional programming,gesture\ - \ signal,max,msp,object,object-,open sound control,oriented programming,processing},\n\ - \ pages = {308--311},\n title = {Composability for Musical Gesture Signal Processing\ - \ using new OSC-based Object and Functional Programming Extensions to Max/MSP},\n\ - \ url = {http://www.nime.org/proceedings/2011/nime2011_308.pdf},\n year = {2011}\n\ - }\n" + ID: NIME20_15 + abstract: 'Advanced musical cooperation, such as concurrent control of musical parameters + or sharing data between instruments,has previously been investigated using multi-user instruments or orchestras of identical instruments. In the case + of heterogeneous digital orchestras, where the instruments, interfaces, and control + gestures can be very different, a number of issues may impede such collaboration + opportunities. These include the lack of a standard method for sharing + data or control, the incompatibility of parameter types, and limited awareness of other musicians’ activity and instrument structure. As a result, most collaborations remain + limited to synchronising tempo or applying effects to audio outputs. In this paper + we present two interfaces for real-time group collaboration amongst musicians + with heterogeneous instruments. We conducted a qualitative study to investigate + how these interfaces impact musicians’ experience and their musical output, we + performed a thematic analysis of inter-views, and we analysed logs of interactions. From these + results we derive principles and guidelines for the design of advanced + collaboration systems for heterogeneous digital orchestras, namely Adapting (to) the System, + Support Development, Default to Openness, and Minimise Friction to Support + Expressivity.' + address: 'Birmingham, UK' + author: 'Berthaut, Florent and Dahl, Luke' + bibtex: "@inproceedings{NIME20_15,\n abstract = {Advanced musical cooperation, such\ + \ as concurrent control of musical parameters or sharing data between instruments,has\ + \ previously been investigated using multi-user instruments or orchestras\ + \ of identical instruments. In the case of heterogeneous digital orchestras,\ + \ where the instruments, interfaces, and control gestures can be very different,\ + \ a number of issues may impede such collaboration opportunities. These include\ + \ the lack of a standard method for sharing data or control, the incompatibility\ + \ of parameter types, and limited awareness of other musicians’ activity\ + \ and instrument structure. As a result, most collaborations remain\ + \ limited to synchronising tempo or applying effects to audio outputs. In this\ + \ paper we present two interfaces for real-time group collaboration amongst musicians\ + \ with heterogeneous instruments. We conducted a qualitative study to \ + \ investigate how these interfaces impact musicians’ experience and their musical\ + \ output, we performed a thematic analysis of inter-views, and we analysed\ + \ logs of interactions. From these results we derive principles and\ + \ guidelines for the design of advanced collaboration systems for heterogeneous\ + \ digital orchestras, namely Adapting (to) the System, Support Development,\ + \ Default to Openness, and Minimise Friction to Support Expressivity.},\n address\ + \ = {Birmingham, UK},\n author = {Berthaut, Florent and Dahl, Luke},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.4813241},\n editor = {Romain Michon and\ + \ Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages = {77--82},\n\ + \ presentation-video = {https://youtu.be/jGpKkbWq_TY},\n publisher = {Birmingham\ + \ City University},\n title = {Adapting & Openness: Dynamics of Collaboration\ + \ Interfaces for Heterogeneous Digital Orchestras},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper15.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178015 + doi: 10.5281/zenodo.4813241 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'composability,delegation,functional programming,gesture signal,max,msp,object,object-,open - sound control,oriented programming,processing' - pages: 308--311 - title: Composability for Musical Gesture Signal Processing using new OSC-based Object - and Functional Programming Extensions to Max/MSP - url: http://www.nime.org/proceedings/2011/nime2011_308.pdf - year: 2011 + month: July + pages: 77--82 + presentation-video: https://youtu.be/jGpKkbWq_TY + publisher: Birmingham City University + title: 'Adapting & Openness: Dynamics of Collaboration Interfaces for Heterogeneous + Digital Orchestras' + url: https://www.nime.org/proceedings/2020/nime2020_paper15.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Nymoen2011 - abstract: 'The paper presents the SoundSaber-a musical instrument based on motion - capture technology. We present technical details of the instrument and discuss - the design development process. The SoundSaber may be used as an example of how - high-fidelity motion capture equipment can be used for prototyping musical instruments, - and we illustrate this with an example of a low-cost implementation of our motion - capture instrument.' - address: 'Oslo, Norway' - author: 'Nymoen, Kristian and Skogstad, Ståle A. and Jensenius, Alexander Refsum' - bibtex: "@inproceedings{Nymoen2011,\n abstract = {The paper presents the SoundSaber-a\ - \ musical instrument based on motion capture technology. We present technical\ - \ details of the instrument and discuss the design development process. The SoundSaber\ - \ may be used as an example of how high-fidelity motion capture equipment can\ - \ be used for prototyping musical instruments, and we illustrate this with an\ - \ example of a low-cost implementation of our motion capture instrument.},\n address\ - \ = {Oslo, Norway},\n author = {Nymoen, Kristian and Skogstad, Ståle A. and Jensenius,\ - \ Alexander Refsum},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178125},\n\ - \ issn = {2220-4806},\n pages = {312--315},\n title = {SoundSaber -- A Motion\ - \ Capture Instrument},\n url = {http://www.nime.org/proceedings/2011/nime2011_312.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_16 + abstract: 'Music technology can provide persons who experience physical and/or intellectual + barriers using traditional musical instruments with a unique access to active + music making. This applies particularly but not exclusively to the so-called group + of people with physical and/or mental disabilities. This paper presents two Accessible + Digital Musical Instruments (ADMIs) that were specifically designed for the students + of a Special Educational Needs (SEN) school with a focus on intellectual disabilities. + With SnoeSky, we present an ADMI in the form of an interactive starry sky that + integrates into the Snoezel-Room. Here, users can ''play'' with ''melodic constellations'' + using a flashlight. SonicDive is an interactive installation that enables users + to explore a complex water soundscape through their movement inside a ball pool. + The underlying goal of both ADMIs was the promotion of self-efficacy experiences + while stimulating the users'' relaxation and activation. This paper reports on + the design process involving the users and their environment. In addition, it + describes some details of the technical implementaion of the ADMIs as well as + first indices for their effectiveness.' + address: 'Birmingham, UK' + author: 'Förster, Andreas and Komesker, Christina and Schnell, Norbert' + bibtex: "@inproceedings{NIME20_16,\n abstract = {Music technology can provide persons\ + \ who experience physical and/or intellectual barriers using traditional musical\ + \ instruments with a unique access to active music making. This applies particularly\ + \ but not exclusively to the so-called group of people with physical and/or mental\ + \ disabilities. This paper presents two Accessible Digital Musical Instruments\ + \ (ADMIs) that were specifically designed for the students of a Special Educational\ + \ Needs (SEN) school with a focus on intellectual disabilities. With SnoeSky,\ + \ we present an ADMI in the form of an interactive starry sky that integrates\ + \ into the Snoezel-Room. Here, users can 'play' with 'melodic constellations'\ + \ using a flashlight. SonicDive is an interactive installation that enables users\ + \ to explore a complex water soundscape through their movement inside a ball pool.\ + \ The underlying goal of both ADMIs was the promotion of self-efficacy experiences\ + \ while stimulating the users' relaxation and activation. This paper reports on\ + \ the design process involving the users and their environment. In addition, it\ + \ describes some details of the technical implementaion of the ADMIs as well as\ + \ first indices for their effectiveness.},\n address = {Birmingham, UK},\n author\ + \ = {Förster, Andreas and Komesker, Christina and Schnell, Norbert},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.4813243},\n editor = {Romain Michon and\ + \ Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages = {83--88},\n\ + \ publisher = {Birmingham City University},\n title = {SnoeSky and SonicDive -\ + \ Design and Evaluation of Two Accessible Digital Musical Instruments for a SEN\ + \ School},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper16.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178125 + doi: 10.5281/zenodo.4813243 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - pages: 312--315 - title: SoundSaber -- A Motion Capture Instrument - url: http://www.nime.org/proceedings/2011/nime2011_312.pdf - year: 2011 + month: July + pages: 83--88 + publisher: Birmingham City University + title: SnoeSky and SonicDive - Design and Evaluation of Two Accessible Digital Musical + Instruments for a SEN School + url: https://www.nime.org/proceedings/2020/nime2020_paper16.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Brandtsegg2011 - abstract: 'The article describes a flexible mapping technique realized as a many-to-many - dynamic mapping matrix. Digital sound generation is typically controlled by a - large number of parameters and efficient and flexible mapping is necessary to - provide expressive control over the instrument. The proposed modulation matrix - technique may be seen as a generic and selfmodifying mapping mechanism integrated - in a dynamic interpolation scheme. It is implemented efficiently by taking advantage - of its inherent sparse matrix structure. The modulation matrix is used within - the Hadron Particle Synthesizer, a complex granular module with 200 synthesis - parameters and a simplified performance control structure with 4 expression parameters. ' - address: 'Oslo, Norway' - author: 'Brandtsegg, Öyvind and Saue, Sigurd and Johansen, Thom' - bibtex: "@inproceedings{Brandtsegg2011,\n abstract = {The article describes a flexible\ - \ mapping technique realized as a many-to-many dynamic mapping matrix. Digital\ - \ sound generation is typically controlled by a large number of parameters and\ - \ efficient and flexible mapping is necessary to provide expressive control over\ - \ the instrument. The proposed modulation matrix technique may be seen as a generic\ - \ and selfmodifying mapping mechanism integrated in a dynamic interpolation scheme.\ - \ It is implemented efficiently by taking advantage of its inherent sparse matrix\ - \ structure. The modulation matrix is used within the Hadron Particle Synthesizer,\ - \ a complex granular module with 200 synthesis parameters and a simplified performance\ - \ control structure with 4 expression parameters. },\n address = {Oslo, Norway},\n\ - \ author = {Brandtsegg, \\''{O}yvind and Saue, Sigurd and Johansen, Thom},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1177969},\n issn = {2220-4806},\n keywords\ - \ = {Mapping, granular synthesis, modulation, live performance },\n pages = {316--319},\n\ - \ title = {A Modulation Matrix for Complex Parameter Sets},\n url = {http://www.nime.org/proceedings/2011/nime2011_316.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_17 + abstract: 'The NuiTrack IDE supports writing code for an active infrared camera + to track up to six bodies, with up to 25 target points on each person. The system + automatically assigns IDs to performers/users as they enter the tracking area, + but when occlusion of a performer occurs, or when a user exits and then re-enters + the tracking area, upon rediscovery of the user the system generates a new tracking + ID. Because of this any assigned and registered target tracking points for specific + users are lost, as are the linked abilities of that performer to control media + based on their movements. We describe a single camera system for overcoming this + problem by assigning IDs based on the colours worn by the performers, and then + using the colour tracking for updating and confirming identification when the + performer reappears after occlusion or upon re-entry. A video link is supplied + showing the system used for an interactive dance work with four dancers controlling + individual audio tracks. ' + address: 'Birmingham, UK' + author: 'Pritchard, Robert and Lavery, Ian' + bibtex: "@inproceedings{NIME20_17,\n abstract = {The NuiTrack IDE supports writing\ + \ code for an active infrared camera to track up to six bodies, with up to 25\ + \ target points on each person. The system automatically assigns IDs to performers/users\ + \ as they enter the tracking area, but when occlusion of a performer occurs, or\ + \ when a user exits and then re-enters the tracking area, upon rediscovery of\ + \ the user the system generates a new tracking ID. Because of this any assigned\ + \ and registered target tracking points for specific users are lost, as are the\ + \ linked abilities of that performer to control media based on their movements.\ + \ We describe a single camera system for overcoming this problem by assigning\ + \ IDs based on the colours worn by the performers, and then using the colour tracking\ + \ for updating and confirming identification when the performer reappears after\ + \ occlusion or upon re-entry. A video link is supplied showing the system used\ + \ for an interactive dance work with four dancers controlling individual audio\ + \ tracks. },\n address = {Birmingham, UK},\n author = {Pritchard, Robert and Lavery,\ + \ Ian},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.4813245},\n editor = {Romain\ + \ Michon and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages\ + \ = {89--92},\n publisher = {Birmingham City University},\n title = {Inexpensive\ + \ Colour Tracking to Overcome Performer ID Loss },\n url = {https://www.nime.org/proceedings/2020/nime2020_paper17.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177969 + doi: 10.5281/zenodo.4813245 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Mapping, granular synthesis, modulation, live performance ' - pages: 316--319 - title: A Modulation Matrix for Complex Parameter Sets - url: http://www.nime.org/proceedings/2011/nime2011_316.pdf - year: 2011 + month: July + pages: 89--92 + publisher: Birmingham City University + title: 'Inexpensive Colour Tracking to Overcome Performer ID Loss ' + url: https://www.nime.org/proceedings/2020/nime2020_paper17.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Tseng2011 - address: 'Oslo, Norway' - author: 'Tseng, Yu-Chung and Liu, Che-Wei and Chi, Tzu-Heng and Wang, Hui-Yu' - bibtex: "@inproceedings{Tseng2011,\n address = {Oslo, Norway},\n author = {Tseng,\ - \ Yu-Chung and Liu, Che-Wei and Chi, Tzu-Heng and Wang, Hui-Yu},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178179},\n issn = {2220-4806},\n pages = {320--321},\n\ - \ title = {Sound Low Fun},\n url = {http://www.nime.org/proceedings/2011/nime2011_320.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_18 + abstract: 'In this study, an analog synthesizer module using Aloe vera was proposed + as a biomemristor. The recent revival of analog modular synthesizers explores + novel possibilities of sounds based on unconventional technologies such as integrating + biological forms and structures into traditional circuits. A biosignal has been + used in experimental music as the material for composition. However, the recent + development of a biocomputor using a slime mold biomemristor expands the use of + biomemristors in music. Based on prior research, characteristics of Aloe vera + as a biomemristor were electrically measured, and two types of analog synthesizer + modules were developed, current to voltage converter and current spike to voltage + converter. For this application, a live performance was conducted with the CVC + module and the possibilities as a new interface for musical expression were examined.' + address: 'Birmingham, UK' + author: 'Nishida, Kiyu and jo, kazuhiro' + bibtex: "@inproceedings{NIME20_18,\n abstract = {In this study, an analog synthesizer\ + \ module using Aloe vera was proposed as a biomemristor. The recent revival of\ + \ analog modular synthesizers explores novel possibilities of sounds based on\ + \ unconventional technologies such as integrating biological forms and structures\ + \ into traditional circuits. A biosignal has been used in experimental music as\ + \ the material for composition. However, the recent development of a biocomputor\ + \ using a slime mold biomemristor expands the use of biomemristors in music. Based\ + \ on prior research, characteristics of Aloe vera as a biomemristor were electrically\ + \ measured, and two types of analog synthesizer modules were developed, current\ + \ to voltage converter and current spike to voltage converter. For this application,\ + \ a live performance was conducted with the CVC module and the possibilities as\ + \ a new interface for musical expression were examined.},\n address = {Birmingham,\ + \ UK},\n author = {Nishida, Kiyu and jo, kazuhiro},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.4813249},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {93--96},\n presentation-video\ + \ = {https://youtu.be/bZaCd6igKEA},\n publisher = {Birmingham City University},\n\ + \ title = {Modules for analog synthesizers using Aloe vera biomemristor},\n url\ + \ = {https://www.nime.org/proceedings/2020/nime2020_paper18.pdf},\n year = {2020}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178179 + doi: 10.5281/zenodo.4813249 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - pages: 320--321 - title: Sound Low Fun - url: http://www.nime.org/proceedings/2011/nime2011_320.pdf - year: 2011 + month: July + pages: 93--96 + presentation-video: https://youtu.be/bZaCd6igKEA + publisher: Birmingham City University + title: Modules for analog synthesizers using Aloe vera biomemristor + url: https://www.nime.org/proceedings/2020/nime2020_paper18.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Berdahl2011 - abstract: 'The purpose of this brief paper is to revisit the question oflongevity - in present experimental practice and coin the termautonomous new media artefacts - (AutoNMA), which arecomplete and independent of external computer systems,so they - can be operable for a longer period of time andcan be demonstrated at a moment''s - notice. We argue thatplatforms for prototyping should promote the creation ofAutoNMA - to make extant the devices which will be a partof the future history of new media.' - address: 'Oslo, Norway' - author: 'Berdahl, Edgar and Chafe, Chris' - bibtex: "@inproceedings{Berdahl2011,\n abstract = {The purpose of this brief paper\ - \ is to revisit the question oflongevity in present experimental practice and\ - \ coin the termautonomous new media artefacts (AutoNMA), which arecomplete and\ - \ independent of external computer systems,so they can be operable for a longer\ - \ period of time andcan be demonstrated at a moment's notice. We argue thatplatforms\ - \ for prototyping should promote the creation ofAutoNMA to make extant the devices\ - \ which will be a partof the future history of new media.},\n address = {Oslo,\ - \ Norway},\n author = {Berdahl, Edgar and Chafe, Chris},\n booktitle = {Proceedings\ + ID: NIME20_19 + abstract: 'On several acoustic and electromechanical keyboard instruments, the produced + sound is not always strictly dependent exclusively on a discrete key velocity + parameter, and minute gesture details can affect the final sonic result. By contrast, + subtle variations in articulation have a relatively limited effect on the sound + generation when the keyboard controller uses the MIDI standard, used in the vast + majority of digital keyboards. In this paper we present an embedded platform that + can generate sound in response to a controller capable of sensing the continuous + position of keys on a keyboard. This platform enables the creation of keyboard-based + DMIs which allow for a richer set of interaction gestures than would be possible + through a MIDI keyboard, which we demonstrate through two example instruments. + First, in a Hammond organ emulator, the sensing device allows to recreate the + nuances of the interaction with the original instrument in a way a velocity-based + MIDI controller could not. Second, a nonlinear waveguide flute synthesizer is + shown as an example of the expressive capabilities that a continuous-keyboard + controller opens up in the creation of new keyboard-based DMIs.' + address: 'Birmingham, UK' + author: 'Moro, Giulio and McPherson, Andrew' + bibtex: "@inproceedings{NIME20_19,\n abstract = {On several acoustic and electromechanical\ + \ keyboard instruments, the produced sound is not always strictly dependent exclusively\ + \ on a discrete key velocity parameter, and minute gesture details can affect\ + \ the final sonic result. By contrast, subtle variations in articulation have\ + \ a relatively limited effect on the sound generation when the keyboard controller\ + \ uses the MIDI standard, used in the vast majority of digital keyboards. In this\ + \ paper we present an embedded platform that can generate sound in response to\ + \ a controller capable of sensing the continuous position of keys on a keyboard.\ + \ This platform enables the creation of keyboard-based DMIs which allow for a\ + \ richer set of interaction gestures than would be possible through a MIDI keyboard,\ + \ which we demonstrate through two example instruments. First, in a Hammond organ\ + \ emulator, the sensing device allows to recreate the nuances of the interaction\ + \ with the original instrument in a way a velocity-based MIDI controller could\ + \ not. Second, a nonlinear waveguide flute synthesizer is shown as an example\ + \ of the expressive capabilities that a continuous-keyboard controller opens up\ + \ in the creation of new keyboard-based DMIs.},\n address = {Birmingham, UK},\n\ + \ author = {Moro, Giulio and McPherson, Andrew},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.4813253},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {97--102},\n presentation-video\ + \ = {https://youtu.be/Y137M9UoKKg},\n publisher = {Birmingham City University},\n\ + \ title = {A platform for low-latency continuous keyboard sensing and sound generation},\n\ + \ url = {https://www.nime.org/proceedings/2020/nime2020_paper19.pdf},\n year =\ + \ {2020}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.4813253 + editor: Romain Michon and Franziska Schroeder + issn: 2220-4806 + month: July + pages: 97--102 + presentation-video: https://youtu.be/Y137M9UoKKg + publisher: Birmingham City University + title: A platform for low-latency continuous keyboard sensing and sound generation + url: https://www.nime.org/proceedings/2020/nime2020_paper19.pdf + year: 2020 + + +- ENTRYTYPE: inproceedings + ID: NIME20_2 + abstract: 'Excello is a spreadsheet-based music composition and programming environment. + We co-developed Excello with feedback from 21 musicians at varying levels of musical + and computing experience. We asked: can the spreadsheet interface be used for + programmatic music creation? Our design process encountered questions such as + how time should be represented, whether amplitude and octave should be encoded + as properties of individual notes or entire phrases, and how best to leverage + standard spreadsheet features, such as formulae and copy-paste. We present the + user-centric rationale for our current design, and report a user study suggesting + that Excello''s notation retains similar cognitive dimensions to conventional + music composition tools, while allowing the user to write substantially complex + programmatic music.' + address: 'Birmingham, UK' + author: 'Sarkar, Advait and Mattinson, Henry' + bibtex: "@inproceedings{NIME20_2,\n abstract = {Excello is a spreadsheet-based music\ + \ composition and programming environment. We co-developed Excello with feedback\ + \ from 21 musicians at varying levels of musical and computing experience. We\ + \ asked: can the spreadsheet interface be used for programmatic music creation?\ + \ Our design process encountered questions such as how time should be represented,\ + \ whether amplitude and octave should be encoded as properties of individual notes\ + \ or entire phrases, and how best to leverage standard spreadsheet features, such\ + \ as formulae and copy-paste. We present the user-centric rationale for our current\ + \ design, and report a user study suggesting that Excello's notation retains similar\ + \ cognitive dimensions to conventional music composition tools, while allowing\ + \ the user to write substantially complex programmatic music.},\n address = {Birmingham,\ + \ UK},\n author = {Sarkar, Advait and Mattinson, Henry},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177953},\n issn = {2220-4806},\n keywords = {autonomous,\ - \ standalone, Satellite CCRMA, Arduino },\n pages = {322--323},\n title = {Autonomous\ - \ New Media Artefacts ( AutoNMA )},\n url = {http://www.nime.org/proceedings/2011/nime2011_322.pdf},\n\ - \ year = {2011}\n}\n" + \ doi = {10.5281/zenodo.4813256},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {11--16},\n publisher = {Birmingham\ + \ City University},\n title = {Excello: exploring spreadsheets for music composition},\n\ + \ url = {https://www.nime.org/proceedings/2020/nime2020_paper2.pdf},\n year =\ + \ {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177953 + doi: 10.5281/zenodo.4813256 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'autonomous, standalone, Satellite CCRMA, Arduino ' - pages: 322--323 - title: Autonomous New Media Artefacts ( AutoNMA ) - url: http://www.nime.org/proceedings/2011/nime2011_322.pdf - year: 2011 + month: July + pages: 11--16 + publisher: Birmingham City University + title: 'Excello: exploring spreadsheets for music composition' + url: https://www.nime.org/proceedings/2020/nime2020_paper2.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Yoo2011 - abstract: 'Recently, Microsoft introduced a game interface called Kinect for the - Xbox 360 video game platform. This interface enables users to control and interact - with the game console without the need to touch a controller. It largely increases - the users'' degree of freedom to express their emotion. In this paper, we first - describe the system we developed to use this interface for sound generation and - controlling musical expression. The skeleton data are extracted from users'' motions - and the data are translated to pre-defined MIDI data. We then use the MIDI data - to control several applications. To allow the translation between the data, we - implemented a simple Kinect-to-MIDI data convertor, which is introduced in this - paper. We describe two applications to make music with Kinect: we first generate - sound with Max/MSP, and then control the adlib with our own adlib generating system - by the body movements of the users. ' - address: 'Oslo, Norway' - author: 'Yoo, Min-Joon and Beak, Jin-Wook and Lee, In-Kwon' - bibtex: "@inproceedings{Yoo2011,\n abstract = {Recently, Microsoft introduced a\ - \ game interface called Kinect for the Xbox 360 video game platform. This interface\ - \ enables users to control and interact with the game console without the need\ - \ to touch a controller. It largely increases the users' degree of freedom to\ - \ express their emotion. In this paper, we first describe the system we developed\ - \ to use this interface for sound generation and controlling musical expression.\ - \ The skeleton data are extracted from users' motions and the data are translated\ - \ to pre-defined MIDI data. We then use the MIDI data to control several applications.\ - \ To allow the translation between the data, we implemented a simple Kinect-to-MIDI\ - \ data convertor, which is introduced in this paper. We describe two applications\ - \ to make music with Kinect: we first generate sound with Max/MSP, and then control\ - \ the adlib with our own adlib generating system by the body movements of the\ - \ users. },\n address = {Oslo, Norway},\n author = {Yoo, Min-Joon and Beak, Jin-Wook\ - \ and Lee, In-Kwon},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178193},\n\ - \ issn = {2220-4806},\n keywords = {Kinect, gaming interface, sound generation,\ - \ adlib generation },\n pages = {324--325},\n title = {Creating Musical Expression\ - \ using Kinect},\n url = {http://www.nime.org/proceedings/2011/nime2011_324.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_20 + abstract: 'In NIME design, thorough attention has been devoted to feedback modalities, + including auditory, visual and haptic feedback. How the performer executes the + gestures to achieve a sound on an instrument, by contrast, appears to be less + examined. Previous research showed that auditory imagery, or the ability to hear + or recreate sounds in the mind even when no audible sound is present, is essential + to the sensorimotor control involved in playing an instrument. In this paper, + we enquire whether auditory imagery can also help to support skill transfer between + musical instruments resulting in possible implications for new instrument design. + To answer this question, we performed two experimental studies on pitch accuracy + and fluency where professional violinists were asked to play a modified violin. + Results showed altered or even possibly irrelevant auditory feedback on a modified + violin does not appear to be a significant impediment to performance. However, + performers need to have coherent imagery of what they want to do, and the sonic + outcome needs to be coupled to the motor program to achieve it. This finding shows + that the design lens should be shifted from a direct feedback model of instrumental + playing toward a model where imagery guides the playing process. This result is + in agreement with recent research on skilled sensorimotor control that highlights + the value of feedforward anticipation in embodied musical performance. It is also + of primary importance for the design of new instruments: new sounds that cannot + easily be imagined and that are not coupled to a motor program are not likely + to be easily performed on the instrument.' + address: 'Birmingham, UK' + author: 'Guidi, Andrea and Morreale, Fabio and McPherson, Andrew' + bibtex: "@inproceedings{NIME20_20,\n abstract = {In NIME design, thorough attention\ + \ has been devoted to feedback modalities, including auditory, visual and haptic\ + \ feedback. How the performer executes the gestures to achieve a sound on an instrument,\ + \ by contrast, appears to be less examined. Previous research showed that auditory\ + \ imagery, or the ability to hear or recreate sounds in the mind even when no\ + \ audible sound is present, is essential to the sensorimotor control involved\ + \ in playing an instrument. In this paper, we enquire whether auditory imagery\ + \ can also help to support skill transfer between musical instruments resulting\ + \ in possible implications for new instrument design. To answer this question,\ + \ we performed two experimental studies on pitch accuracy and fluency where professional\ + \ violinists were asked to play a modified violin. Results showed altered or even\ + \ possibly irrelevant auditory feedback on a modified violin does not appear to\ + \ be a significant impediment to performance. However, performers need to have\ + \ coherent imagery of what they want to do, and the sonic outcome needs to be\ + \ coupled to the motor program to achieve it. This finding shows that the design\ + \ lens should be shifted from a direct feedback model of instrumental playing\ + \ toward a model where imagery guides the playing process. This result is in agreement\ + \ with recent research on skilled sensorimotor control that highlights the value\ + \ of feedforward anticipation in embodied musical performance. It is also of primary\ + \ importance for the design of new instruments: new sounds that cannot easily\ + \ be imagined and that are not coupled to a motor program are not likely to be\ + \ easily performed on the instrument.},\n address = {Birmingham, UK},\n author\ + \ = {Guidi, Andrea and Morreale, Fabio and McPherson, Andrew},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.4813260},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {103--108},\n presentation-video\ + \ = {https://youtu.be/yK7Tg1kW2No},\n publisher = {Birmingham City University},\n\ + \ title = {Design for auditory imagery: altering instruments to explore performer\ + \ fluency},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper20.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178193 + doi: 10.5281/zenodo.4813260 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Kinect, gaming interface, sound generation, adlib generation ' - pages: 324--325 - title: Creating Musical Expression using Kinect - url: http://www.nime.org/proceedings/2011/nime2011_324.pdf - year: 2011 + month: July + pages: 103--108 + presentation-video: https://youtu.be/yK7Tg1kW2No + publisher: Birmingham City University + title: 'Design for auditory imagery: altering instruments to explore performer fluency' + url: https://www.nime.org/proceedings/2020/nime2020_paper20.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: DeJong2011 - abstract: 'This paper proposes a new research direction for the large family of - instrumental musical interfaces where sound is generated using digital granular - synthesis, and where interaction and control involve the (fine) operation of stiff, - flat contact surfaces. First, within a historical context, a general absence of, - and clear need for, tangible output that is dynamically instantiated by the grain-generating - process itself is identified. Second, to fill this gap, a concrete general approach - is proposed based on the careful construction of non-vibratory and vibratory force - pulses, in a one-to-one relationship with sonic grains.An informal pilot psychophysics - experiment initiating the approach was conducted, which took into account the - two main cases for applying forces to the human skin: perpendicular, and lateral. - Initial results indicate that the force pulse approach can enable perceivably - multidimensional, tangible display of the ongoing grain-generating process. Moreover, - it was found that this can be made to meaningfully happen (in real time) in the - same timescale of basic sonic grain generation. This is not a trivial property, - and provides an important and positive fundament for further developing this type - of enhanced display. It also leads to the exciting prospect of making arbitrary - sonic grains actual physical manipulanda. ' - address: 'Oslo, Norway' - author: 'de Jong, Staas' - bibtex: "@inproceedings{DeJong2011,\n abstract = {This paper proposes a new research\ - \ direction for the large family of instrumental musical interfaces where sound\ - \ is generated using digital granular synthesis, and where interaction and control\ - \ involve the (fine) operation of stiff, flat contact surfaces. First, within\ - \ a historical context, a general absence of, and clear need for, tangible output\ - \ that is dynamically instantiated by the grain-generating process itself is identified.\ - \ Second, to fill this gap, a concrete general approach is proposed based on the\ - \ careful construction of non-vibratory and vibratory force pulses, in a one-to-one\ - \ relationship with sonic grains.An informal pilot psychophysics experiment initiating\ - \ the approach was conducted, which took into account the two main cases for applying\ - \ forces to the human skin: perpendicular, and lateral. Initial results indicate\ - \ that the force pulse approach can enable perceivably multidimensional, tangible\ - \ display of the ongoing grain-generating process. Moreover, it was found that\ - \ this can be made to meaningfully happen (in real time) in the same timescale\ - \ of basic sonic grain generation. This is not a trivial property, and provides\ - \ an important and positive fundament for further developing this type of enhanced\ - \ display. It also leads to the exciting prospect of making arbitrary sonic grains\ - \ actual physical manipulanda. },\n address = {Oslo, Norway},\n author = {de Jong,\ - \ Staas},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1178055},\n issn = {2220-4806},\n\ - \ keywords = {and others,and today granular,barry truax,curtis roads,granular\ - \ sound synthesis,instrumental control,tangible display,tangible manipulation},\n\ - \ pages = {326--328},\n title = {Making Grains Tangible: Microtouch for Microsound},\n\ - \ url = {http://www.nime.org/proceedings/2011/nime2011_326.pdf},\n year = {2011}\n\ - }\n" + ID: NIME20_21 + abstract: 'In this paper, we introduce the concept of VR Open Scores: aleatoric + score-based virtual scenarios where an aleatoric score is embedded in a virtual + environment. This idea builds upon the notion of graphic scores and composed instrument, + and apply them in a new context. Our proposal also explores possible parallels + between open meaning in interaction design, and aleatoric score, conceptualized + as Open Work by the Italian philosopher Umberto Eco. Our approach has two aims. + The first aim is to create an environment where users can immerse themselves in + the visual elements of a score while listening to the corresponding music. The + second aim is to facilitate users to develop a personal relationship with both + the system and the score. To achieve those aims, as a practical implementation + of our proposed concept, we developed two immersive scenarios: a 360º video and + an interactive space. We conclude presenting how our design aims were accomplished + in the two scenarios, and describing positive and negative elements of our implementations.' + address: 'Birmingham, UK' + author: 'Masu, Raul and Bala, Paulo and Ahmad, Muhammad and Correia, Nuno N. and + Nisi, Valentina and Nunes, Nuno and Romão, Teresa' + bibtex: "@inproceedings{NIME20_21,\n abstract = {In this paper, we introduce the\ + \ concept of VR Open Scores: aleatoric score-based virtual scenarios where an\ + \ aleatoric score is embedded in a virtual environment. This idea builds upon\ + \ the notion of graphic scores and composed instrument, and apply them in a new\ + \ context. Our proposal also explores possible parallels between open meaning\ + \ in interaction design, and aleatoric score, conceptualized as Open Work by the\ + \ Italian philosopher Umberto Eco. Our approach has two aims. The first aim is\ + \ to create an environment where users can immerse themselves in the visual elements\ + \ of a score while listening to the corresponding music. The second aim is to\ + \ facilitate users to develop a personal relationship with both the system and\ + \ the score. To achieve those aims, as a practical implementation of our proposed\ + \ concept, we developed two immersive scenarios: a 360º video and an interactive\ + \ space. We conclude presenting how our design aims were accomplished in the two\ + \ scenarios, and describing positive and negative elements of our implementations.},\n\ + \ address = {Birmingham, UK},\n author = {Masu, Raul and Bala, Paulo and Ahmad,\ + \ Muhammad and Correia, Nuno N. and Nisi, Valentina and Nunes, Nuno and Romão,\ + \ Teresa},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.4813262},\n editor = {Romain\ + \ Michon and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages\ + \ = {109--114},\n presentation-video = {https://youtu.be/JSM6Rydz7iE},\n publisher\ + \ = {Birmingham City University},\n title = {VR Open Scores: Scores as Inspiration\ + \ for VR Scenarios},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper21.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178055 + doi: 10.5281/zenodo.4813262 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'and others,and today granular,barry truax,curtis roads,granular sound - synthesis,instrumental control,tangible display,tangible manipulation' - pages: 326--328 - title: 'Making Grains Tangible: Microtouch for Microsound' - url: http://www.nime.org/proceedings/2011/nime2011_326.pdf - year: 2011 + month: July + pages: 109--114 + presentation-video: https://youtu.be/JSM6Rydz7iE + publisher: Birmingham City University + title: 'VR Open Scores: Scores as Inspiration for VR Scenarios' + url: https://www.nime.org/proceedings/2020/nime2020_paper21.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Caramiaux2011 - abstract: 'This paper presents a prototypical tool for sound selection driven by - users'' gestures. Sound selection by gesturesis a particular case of "query by - content" in multimedia databases. Gesture-to-Sound matching is based on computing - the similarity between both gesture and sound parameters'' temporal evolution. - The tool presents three algorithms for matching gesture query to sound target. - Thesystem leads to several applications in sound design, virtualinstrument design - and interactive installation.' - address: 'Oslo, Norway' - author: 'Caramiaux, Baptiste and Bevilacqua, Frédéric and Schnell, Norbert' - bibtex: "@inproceedings{Caramiaux2011,\n abstract = {This paper presents a prototypical\ - \ tool for sound selection driven by users' gestures. Sound selection by gesturesis\ - \ a particular case of \"query by content\" in multimedia databases. Gesture-to-Sound\ - \ matching is based on computing the similarity between both gesture and sound\ - \ parameters' temporal evolution. The tool presents three algorithms for matching\ - \ gesture query to sound target. Thesystem leads to several applications in sound\ - \ design, virtualinstrument design and interactive installation.},\n address =\ - \ {Oslo, Norway},\n author = {Caramiaux, Baptiste and Bevilacqua, Fr\\'{e}d\\\ - '{e}ric and Schnell, Norbert},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177977},\n\ - \ issn = {2220-4806},\n keywords = {Query by Gesture, Time Series Analysis, Sonic\ - \ Interaction },\n pages = {329--330},\n title = {Sound Selection by Gestures},\n\ - \ url = {http://www.nime.org/proceedings/2011/nime2011_329.pdf},\n year = {2011}\n\ - }\n" + ID: NIME20_22 + abstract: 'The project takes a Universal Design approach to exploring the possibility + of creating a software platform to facilitate a Networked Ensemble for Disabled + musicians. In accordance with the Nothing About Us Without Us (Charlton, 1998) + principle I worked with a group of 15 professional musicians who are also disabled. + The group gave interviews as to their perspectives and needs around networked + music practices and this data was then analysed to look at how software design + could be developed to make it more accessible. We also identified key messages + for the wider design of digital musical instrument makers, performers and event + organisers to improve practice around working with and for disabled musicians. ' + address: 'Birmingham, UK' + author: 'Skuse, Amble H C and Knotts, Shelly' + bibtex: "@inproceedings{NIME20_22,\n abstract = {The project takes a Universal Design\ + \ approach to exploring the possibility of creating a software platform to facilitate\ + \ a Networked Ensemble for Disabled musicians. In accordance with the Nothing\ + \ About Us Without Us (Charlton, 1998) principle I worked with a group of 15 professional\ + \ musicians who are also disabled. The group gave interviews as to their perspectives\ + \ and needs around networked music practices and this data was then analysed to\ + \ look at how software design could be developed to make it more accessible. We\ + \ also identified key messages for the wider design of digital musical instrument\ + \ makers, performers and event organisers to improve practice around working with\ + \ and for disabled musicians. },\n address = {Birmingham, UK},\n author = {Skuse,\ + \ Amble H C and Knotts, Shelly},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813266},\n\ + \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ + \ = {July},\n pages = {115--120},\n presentation-video = {https://youtu.be/m4D4FBuHpnE},\n\ + \ publisher = {Birmingham City University},\n title = {Creating an Online Ensemble\ + \ for Home Based Disabled Musicians: Disabled Access and Universal Design - why\ + \ disabled people must be at the heart of developing technology.},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper22.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177977 + doi: 10.5281/zenodo.4813266 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Query by Gesture, Time Series Analysis, Sonic Interaction ' - pages: 329--330 - title: Sound Selection by Gestures - url: http://www.nime.org/proceedings/2011/nime2011_329.pdf - year: 2011 + month: July + pages: 115--120 + presentation-video: https://youtu.be/m4D4FBuHpnE + publisher: Birmingham City University + title: 'Creating an Online Ensemble for Home Based Disabled Musicians: Disabled + Access and Universal Design - why disabled people must be at the heart of developing + technology.' + url: https://www.nime.org/proceedings/2020/nime2020_paper22.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Kerllenevich2011 - abstract: 'We propose and discuss an open source real-time interface that focuses - in the vast potential for interactive soundart creation emerging from biological - neural networks, asparadigmatic complex systems for musical exploration. Inparticular, - we focus on networks that are responsible for thegeneration of rhythmic patterns.The - interface relies uponthe idea of relating metaphorically neural behaviors to electronic - and acoustic instruments notes, by means of flexiblemapping strategies. The user - can intuitively design network configurations by dynamically creating neurons - andconfiguring their inter-connectivity. The core of the systemis based in events - emerging from his network design, whichfunctions in a similar way to what happens - in real smallneural networks. Having multiple signal and data inputsand outputs, - as well as standard communications protocolssuch as MIDI, OSC and TCP/IP, it becomes - and uniquetool for composers and performers, suitable for different performance - scenarios, like live electronics, sound installationsand telematic concerts.' - address: 'Oslo, Norway' - author: 'Kerlleñevich, Hernán and Eguía, Manuel C. and Riera, Pablo E.' - bibtex: "@inproceedings{Kerllenevich2011,\n abstract = {We propose and discuss an\ - \ open source real-time interface that focuses in the vast potential for interactive\ - \ soundart creation emerging from biological neural networks, asparadigmatic complex\ - \ systems for musical exploration. Inparticular, we focus on networks that are\ - \ responsible for thegeneration of rhythmic patterns.The interface relies uponthe\ - \ idea of relating metaphorically neural behaviors to electronic and acoustic\ - \ instruments notes, by means of flexiblemapping strategies. The user can intuitively\ - \ design network configurations by dynamically creating neurons andconfiguring\ - \ their inter-connectivity. The core of the systemis based in events emerging\ - \ from his network design, whichfunctions in a similar way to what happens in\ - \ real smallneural networks. Having multiple signal and data inputsand outputs,\ - \ as well as standard communications protocolssuch as MIDI, OSC and TCP/IP, it\ - \ becomes and uniquetool for composers and performers, suitable for different\ - \ performance scenarios, like live electronics, sound installationsand telematic\ - \ concerts.},\n address = {Oslo, Norway},\n author = {Kerlle\\~{n}evich, Hern\\\ - '{a}n and Egu\\'{\\i}a, Manuel C. and Riera, Pablo E.},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178063},\n issn = {2220-4806},\n keywords = {rhythm generation,\ - \ biological neural networks, complex patterns, musical interface, network performance\ - \ },\n pages = {331--336},\n presentation-video = {https://vimeo.com/26874396/},\n\ - \ title = {An Open Source Interface based on Biological Neural Networks for Interactive\ - \ Music Performance},\n url = {http://www.nime.org/proceedings/2011/nime2011_331.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_23 + abstract: 'As virtual reality (VR) continues to gain prominence as a medium for + artistic expression, a growing number of projects explore the use of VR for musical + interaction design. In this paper, we discuss the concept of VIMEs (Virtual Interfaces + for Musical Expression) through four case studies that explore different aspects + of musical interactions in virtual environments. We then describe a user study + designed to evaluate these VIMEs in terms of various usability considerations, + such as immersion, perception of control, learnability and physical effort. We + offer the results of the study, articulating the relationship between the design + of a VIME and the various performance behaviors observed among its users. Finally, + we discuss how these results, combined with recent developments in VR technology, + can inform the design of new VIMEs.' + address: 'Birmingham, UK' + author: 'Çamcı, Anıl and Vilaplana, Matias and Wang, Ruth' + bibtex: "@inproceedings{NIME20_23,\n abstract = {As virtual reality (VR) continues\ + \ to gain prominence as a medium for artistic expression, a growing number of\ + \ projects explore the use of VR for musical interaction design. In this paper,\ + \ we discuss the concept of VIMEs (Virtual Interfaces for Musical Expression)\ + \ through four case studies that explore different aspects of musical interactions\ + \ in virtual environments. We then describe a user study designed to evaluate\ + \ these VIMEs in terms of various usability considerations, such as immersion,\ + \ perception of control, learnability and physical effort. We offer the results\ + \ of the study, articulating the relationship between the design of a VIME and\ + \ the various performance behaviors observed among its users. Finally, we discuss\ + \ how these results, combined with recent developments in VR technology, can inform\ + \ the design of new VIMEs.},\n address = {Birmingham, UK},\n author = {Çamcı,\ + \ Anıl and Vilaplana, Matias and Wang, Ruth},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.4813268},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {121--126},\n publisher = {Birmingham\ + \ City University},\n title = {Exploring the Affordances of VR for Musical Interaction\ + \ Design with VIMEs},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper23.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178063 + doi: 10.5281/zenodo.4813268 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'rhythm generation, biological neural networks, complex patterns, musical - interface, network performance ' - pages: 331--336 - presentation-video: https://vimeo.com/26874396/ - title: An Open Source Interface based on Biological Neural Networks for Interactive - Music Performance - url: http://www.nime.org/proceedings/2011/nime2011_331.pdf - year: 2011 + month: July + pages: 121--126 + publisher: Birmingham City University + title: Exploring the Affordances of VR for Musical Interaction Design with VIMEs + url: https://www.nime.org/proceedings/2020/nime2020_paper23.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Gillian2011 - abstract: 'This paper presents a novel algorithm that has been specifically designed - for the recognition of multivariate temporal musical gestures. The algorithm is - based on DynamicTime Warping and has been extended to classify any N dimensional - signal, automatically compute a classificationthreshold to reject any data that - is not a valid gesture andbe quickly trained with a low number of training examples.The - algorithm is evaluated using a database of 10 temporalgestures performed by 10 - participants achieving an averagecross-validation result of 99%.' - address: 'Oslo, Norway' - author: 'Gillian, Nicholas and Knapp, Benjamin and O''Modhrain, Sile' - bibtex: "@inproceedings{Gillian2011,\n abstract = {This paper presents a novel algorithm\ - \ that has been specifically designed for the recognition of multivariate temporal\ - \ musical gestures. The algorithm is based on DynamicTime Warping and has been\ - \ extended to classify any N dimensional signal, automatically compute a classificationthreshold\ - \ to reject any data that is not a valid gesture andbe quickly trained with a\ - \ low number of training examples.The algorithm is evaluated using a database\ - \ of 10 temporalgestures performed by 10 participants achieving an averagecross-validation\ - \ result of 99%.},\n address = {Oslo, Norway},\n author = {Gillian, Nicholas and\ - \ Knapp, Benjamin and O'Modhrain, Sile},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178029},\n\ - \ issn = {2220-4806},\n keywords = {Dynamic Time Warping, Gesture Recognition,\ - \ Musician-Computer Interaction, Multivariate Temporal Gestures },\n pages = {337--342},\n\ - \ presentation-video = {https://vimeo.com/26874428/},\n title = {Recognition Of\ - \ Multivariate Temporal Musical Gestures Using N-Dimensional Dynamic Time Warping},\n\ - \ url = {http://www.nime.org/proceedings/2011/nime2011_337.pdf},\n year = {2011}\n\ - }\n" + ID: NIME20_24 + abstract: 'The continued growth of modern VR (virtual reality) platforms into mass + adoption is fundamentally driven by the work of content creators who offer engaging + experiences. It is therefore essential to design accessible creativity support + tools that can facilitate the work of a broad range of practitioners in this domain. + In this paper, we focus on one facet of VR content creation, namely immersive + audio design. We discuss a suite of design tools that enable both novice and expert + users to rapidly prototype immersive sonic environments across desktop, virtual + reality and augmented reality platforms. We discuss the design considerations + adopted for each implementation, and how the individual systems informed one another + in terms of interaction design. We then offer a preliminary evaluation of these + systems with reports from first-time users. Finally, we discuss our road-map for + improving individual and collaborative creative experiences across platforms and + realities in the context of immersive audio.' + address: 'Birmingham, UK' + author: 'Çamcı, Anıl and Willette, Aaron and Gargi, Nachiketa and Kim, Eugene and + Xu, Julia and Lai, Tanya' + bibtex: "@inproceedings{NIME20_24,\n abstract = {The continued growth of modern\ + \ VR (virtual reality) platforms into mass adoption is fundamentally driven by\ + \ the work of content creators who offer engaging experiences. It is therefore\ + \ essential to design accessible creativity support tools that can facilitate\ + \ the work of a broad range of practitioners in this domain. In this paper, we\ + \ focus on one facet of VR content creation, namely immersive audio design. We\ + \ discuss a suite of design tools that enable both novice and expert users to\ + \ rapidly prototype immersive sonic environments across desktop, virtual reality\ + \ and augmented reality platforms. We discuss the design considerations adopted\ + \ for each implementation, and how the individual systems informed one another\ + \ in terms of interaction design. We then offer a preliminary evaluation of these\ + \ systems with reports from first-time users. Finally, we discuss our road-map\ + \ for improving individual and collaborative creative experiences across platforms\ + \ and realities in the context of immersive audio.},\n address = {Birmingham,\ + \ UK},\n author = {Çamcı, Anıl and Willette, Aaron and Gargi, Nachiketa and Kim,\ + \ Eugene and Xu, Julia and Lai, Tanya},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813270},\n\ + \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ + \ = {July},\n pages = {127--130},\n publisher = {Birmingham City University},\n\ + \ title = {Cross-platform and Cross-reality Design of Immersive Sonic Environments},\n\ + \ url = {https://www.nime.org/proceedings/2020/nime2020_paper24.pdf},\n year =\ + \ {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178029 + doi: 10.5281/zenodo.4813270 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Dynamic Time Warping, Gesture Recognition, Musician-Computer Interaction, - Multivariate Temporal Gestures ' - pages: 337--342 - presentation-video: https://vimeo.com/26874428/ - title: Recognition Of Multivariate Temporal Musical Gestures Using N-Dimensional - Dynamic Time Warping - url: http://www.nime.org/proceedings/2011/nime2011_337.pdf - year: 2011 + month: July + pages: 127--130 + publisher: Birmingham City University + title: Cross-platform and Cross-reality Design of Immersive Sonic Environments + url: https://www.nime.org/proceedings/2020/nime2020_paper24.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Gillian2011a - abstract: 'This paper presents the SARC EyesWeb Catalog, (SEC),a machine learning - toolbox that has been specifically developed for musician-computer interaction. - The SEC features a large number of machine learning algorithms that can be used - in real-time to recognise static postures, perform regression and classify multivariate - temporal gestures. The algorithms within the toolbox have been designed to work - with any N -dimensional signal and can be quickly trained with a small number - of training examples. We also provide the motivation for the algorithms used for - the recognition of musical gestures to achieve a low intra-personal generalisation - error, as opposed to the inter-personal generalisation error that is more common - in other areas of human-computer interaction.' - address: 'Oslo, Norway' - author: 'Gillian, Nicholas and Knapp, Benjamin and O''Modhrain, Sile' - bibtex: "@inproceedings{Gillian2011a,\n abstract = {This paper presents the SARC\ - \ EyesWeb Catalog, (SEC),a machine learning toolbox that has been specifically\ - \ developed for musician-computer interaction. The SEC features a large number\ - \ of machine learning algorithms that can be used in real-time to recognise static\ - \ postures, perform regression and classify multivariate temporal gestures. The\ - \ algorithms within the toolbox have been designed to work with any N -dimensional\ - \ signal and can be quickly trained with a small number of training examples.\ - \ We also provide the motivation for the algorithms used for the recognition of\ - \ musical gestures to achieve a low intra-personal generalisation error, as opposed\ - \ to the inter-personal generalisation error that is more common in other areas\ - \ of human-computer interaction.},\n address = {Oslo, Norway},\n author = {Gillian,\ - \ Nicholas and Knapp, Benjamin and O'Modhrain, Sile},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178031},\n issn = {2220-4806},\n keywords = {Machine\ - \ learning, gesture recognition, musician-computer interaction, SEC },\n pages\ - \ = {343--348},\n presentation-video = {https://vimeo.com/26872843/},\n title\ - \ = {A Machine Learning Toolbox For Musician Computer Interaction},\n url = {http://www.nime.org/proceedings/2011/nime2011_343.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_25 + abstract: 'Silver is an artwork that deals with the emotional feeling of contact + by exaggerating it acoustically. It originates from an interactive room installation, + where several textile sculptures merge with sounds. Silver is made from a wire + mesh and its surface is reactive to closeness and touch. This material property + forms a hybrid of artwork and parametric controller for the real-time sound generation. + The textile quality of the fine steel wire-mesh evokes a haptic familiarity inherent + to textile materials. This makes it easy for the audience to overcome the initial + threshold barrier to get in touch with the artwork in an exhibition situation. + Additionally, the interaction is not dependent on visuals. The characteristics + of the surface sensor allows a user to play the instrument without actually touching + it.' + address: 'Birmingham, UK' + author: 'Schebella, Marius and Fischbacher, Gertrud and Mosher, Matthew' + bibtex: "@inproceedings{NIME20_25,\n abstract = {Silver is an artwork that deals\ + \ with the emotional feeling of contact by exaggerating it acoustically. It originates\ + \ from an interactive room installation, where several textile sculptures merge\ + \ with sounds. Silver is made from a wire mesh and its surface is reactive to\ + \ closeness and touch. This material property forms a hybrid of artwork and parametric\ + \ controller for the real-time sound generation. The textile quality of the fine\ + \ steel wire-mesh evokes a haptic familiarity inherent to textile materials. \ + \ This makes it easy for the audience to overcome the initial threshold barrier\ + \ to get in touch with the artwork in an exhibition situation. Additionally, the\ + \ interaction is not dependent on visuals. The characteristics of the surface\ + \ sensor allows a user to play the instrument without actually touching it.},\n\ + \ address = {Birmingham, UK},\n author = {Schebella, Marius and Fischbacher, Gertrud\ + \ and Mosher, Matthew},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813272},\n\ + \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ + \ = {July},\n pages = {131--132},\n publisher = {Birmingham City University},\n\ + \ title = {Silver: A Textile Wireframe Interface for the Interactive Sound Installation\ + \ Idiosynkrasia},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper25.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178031 + doi: 10.5281/zenodo.4813272 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Machine learning, gesture recognition, musician-computer interaction, - SEC ' - pages: 343--348 - presentation-video: https://vimeo.com/26872843/ - title: A Machine Learning Toolbox For Musician Computer Interaction - url: http://www.nime.org/proceedings/2011/nime2011_343.pdf - year: 2011 + month: July + pages: 131--132 + publisher: Birmingham City University + title: 'Silver: A Textile Wireframe Interface for the Interactive Sound Installation + Idiosynkrasia' + url: https://www.nime.org/proceedings/2020/nime2020_paper25.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Jessop2011 - abstract: "In composer Tod Machover's new opera Death and the Powers, the main character\ - \ uploads his consciousness into anelaborate computer system to preserve his essence\ - \ and agencyafter his corporeal death. Consequently, for much of theopera, the\ - \ stage and the environment itself come alive asthe main character. This creative\ - \ need brings with it a hostof technical challenges and opportunities. In order\ - \ to satisfythe needs of this storyline, Machover's Opera of the Futuregroup at\ - \ the MIT Media Lab has developed a suite of newperformance technologies, including\ - \ robot characters, interactive performance capture systems, mapping systems for,\n\ - ,\nauthoring interactive multimedia performances, new musical instruments, unique\ - \ spatialized sound controls, anda unified control system for all these technological\ - \ components. While developed for a particular theatrical production, many of\ - \ the concepts and design procedures remain relevant to broader contexts including\ - \ performance,robotics, and interaction design." - address: 'Oslo, Norway' - author: 'Jessop, Elena and Torpey, Peter A. and Bloomberg, Benjamin' - bibtex: "@inproceedings{Jessop2011,\n abstract = {In composer Tod Machover's new\ - \ opera Death and the Powers, the main character uploads his consciousness into\ - \ anelaborate computer system to preserve his essence and agencyafter his corporeal\ - \ death. Consequently, for much of theopera, the stage and the environment itself\ - \ come alive asthe main character. This creative need brings with it a hostof\ - \ technical challenges and opportunities. In order to satisfythe needs of this\ - \ storyline, Machover's Opera of the Futuregroup at the MIT Media Lab has developed\ - \ a suite of newperformance technologies, including robot characters, interactive\ - \ performance capture systems, mapping systems for,\n,\nauthoring interactive\ - \ multimedia performances, new musical instruments, unique spatialized sound controls,\ - \ anda unified control system for all these technological components. While developed\ - \ for a particular theatrical production, many of the concepts and design procedures\ - \ remain relevant to broader contexts including performance,robotics, and interaction\ - \ design.},\n address = {Oslo, Norway},\n author = {Jessop, Elena and Torpey,\ - \ Peter A. and Bloomberg, Benjamin},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178051},\n\ - \ issn = {2220-4806},\n keywords = {opera, Death and the Powers, Tod Machover,\ - \ gestural interfaces, Disembodied Performance, ambisonics },\n pages = {349--354},\n\ - \ presentation-video = {https://vimeo.com/26878423/},\n title = {Music and Technology\ - \ in Death and the Powers},\n url = {http://www.nime.org/proceedings/2011/nime2011_349.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_26 + abstract: 'Musical expressivity is an important aspect of musical performance for + humans as well as robotic musicians. We present a novel mechatronics-driven implementation + of Brushless Direct Current (BLDC) motors in a robotic marimba player, named ANON, + designed to improve speed, dynamic range (loudness), and ultimately perceived + musical expressivity in comparison to state-of-the-art robotic percussionist actuators. + In an objective test of dynamic range, we find that our implementation provides + wider and more consistent dynamic range response in comparison with solenoid-based + robotic percussionists. Our implementation also outperforms both solenoid and + human marimba players in striking speed. In a subjective listening test measuring + musical expressivity, our system performs significantly better than a solenoid-based + system and is statistically indistinguishable from human performers.' + address: 'Birmingham, UK' + author: 'Yang, Ning and Savery, Richard and Sankaranarayanan, Raghavasimhan and + Zahray, Lisa and Weinberg, Gil' + bibtex: "@inproceedings{NIME20_26,\n abstract = {Musical expressivity is an important\ + \ aspect of musical performance for humans as well as robotic musicians. We present\ + \ a novel mechatronics-driven implementation of Brushless Direct Current (BLDC)\ + \ motors in a robotic marimba player, named ANON, designed to improve speed, dynamic\ + \ range (loudness), and ultimately perceived musical expressivity in comparison\ + \ to state-of-the-art robotic percussionist actuators. In an objective test of\ + \ dynamic range, we find that our implementation provides wider and more consistent\ + \ dynamic range response in comparison with solenoid-based robotic percussionists.\ + \ Our implementation also outperforms both solenoid and human marimba players\ + \ in striking speed. In a subjective listening test measuring musical expressivity,\ + \ our system performs significantly better than a solenoid-based system and is\ + \ statistically indistinguishable from human performers.},\n address = {Birmingham,\ + \ UK},\n author = {Yang, Ning and Savery, Richard and Sankaranarayanan, Raghavasimhan\ + \ and Zahray, Lisa and Weinberg, Gil},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813274},\n\ + \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ + \ = {July},\n pages = {133--138},\n presentation-video = {https://youtu.be/KsQNlArUv2k},\n\ + \ publisher = {Birmingham City University},\n title = {Mechatronics-Driven Musical\ + \ Expressivity for Robotic Percussionists},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper26.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178051 + doi: 10.5281/zenodo.4813274 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'opera, Death and the Powers, Tod Machover, gestural interfaces, Disembodied - Performance, ambisonics ' - pages: 349--354 - presentation-video: https://vimeo.com/26878423/ - title: Music and Technology in Death and the Powers - url: http://www.nime.org/proceedings/2011/nime2011_349.pdf - year: 2011 + month: July + pages: 133--138 + presentation-video: https://youtu.be/KsQNlArUv2k + publisher: Birmingham City University + title: Mechatronics-Driven Musical Expressivity for Robotic Percussionists + url: https://www.nime.org/proceedings/2020/nime2020_paper26.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Zappi2011 - abstract: 'In this paper we introduce a multimodal platform for Hybrid Reality live - performances: by means of non-invasiveVirtual Reality technology, we developed - a system to presentartists and interactive virtual objects in audio/visual choreographies - on the same real stage. These choreographiescould include spectators too, providing - them with the possibility to directly modify the scene and its audio/visual features. - We also introduce the first interactive performancestaged with this technology, - in which an electronic musician played live five tracks manipulating the 3D projectedvisuals. - As questionnaires have been distributed after theshow, in the last part of this - work we discuss the analysisof collected data, underlining positive and negative - aspectsof the proposed experience.This paper belongs together with a performance - proposalcalled Dissonance, in which two performers exploit the platform to create - a progressive soundtrack along with the exploration of an interactive virtual - environment.' - address: 'Oslo, Norway' - author: 'Zappi, Victor and Mazzanti, Dario and Brogni, Andrea and Caldwell, Darwin' - bibtex: "@inproceedings{Zappi2011,\n abstract = {In this paper we introduce a multimodal\ - \ platform for Hybrid Reality live performances: by means of non-invasiveVirtual\ - \ Reality technology, we developed a system to presentartists and interactive\ - \ virtual objects in audio/visual choreographies on the same real stage. These\ - \ choreographiescould include spectators too, providing them with the possibility\ - \ to directly modify the scene and its audio/visual features. We also introduce\ - \ the first interactive performancestaged with this technology, in which an electronic\ - \ musician played live five tracks manipulating the 3D projectedvisuals. As questionnaires\ - \ have been distributed after theshow, in the last part of this work we discuss\ - \ the analysisof collected data, underlining positive and negative aspectsof the\ - \ proposed experience.This paper belongs together with a performance proposalcalled\ - \ Dissonance, in which two performers exploit the platform to create a progressive\ - \ soundtrack along with the exploration of an interactive virtual environment.},\n\ - \ address = {Oslo, Norway},\n author = {Zappi, Victor and Mazzanti, Dario and\ - \ Brogni, Andrea and Caldwell, Darwin},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178197},\n\ - \ issn = {2220-4806},\n keywords = {Interactive Performance, Hybrid Choreographies,\ - \ Virtual Reality, Music Control },\n pages = {355--360},\n presentation-video\ - \ = {https://vimeo.com/26880256/},\n title = {Design and Evaluation of a Hybrid\ - \ Reality Performance},\n url = {http://www.nime.org/proceedings/2011/nime2011_355.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_27 + abstract: 'Discovering outmoded or obsolete technologies and appropriating them + in creative practice can uncover new relationships between those technologies. + Using a media archaeological research approach, this paper presents the electromechanical + relay and a book of random numbers as related forms of obsolete media. Situated + within the context of electromechanical sound art, the work uses a non-deterministic + approach to explore the non-linear and unpredictable agency and materiality of + the objects in the work. Developed by the first author, Click::RAND is an object-based + sound installation. The work has been developed as an audio-visual representation + of a genealogy of connections between these two forms of media in the history + of computing.' + address: 'Birmingham, UK' + author: 'Dunham, Paul' + bibtex: "@inproceedings{NIME20_27,\n abstract = {Discovering outmoded or obsolete\ + \ technologies and appropriating them in creative practice can uncover new relationships\ + \ between those technologies. Using a media archaeological research approach,\ + \ this paper presents the electromechanical relay and a book of random numbers\ + \ as related forms of obsolete media. Situated within the context of electromechanical\ + \ sound art, the work uses a non-deterministic approach to explore the non-linear\ + \ and unpredictable agency and materiality of the objects in the work. Developed\ + \ by the first author, Click::RAND is an object-based sound installation. The\ + \ work has been developed as an audio-visual representation of a genealogy of\ + \ connections between these two forms of media in the history of computing.},\n\ + \ address = {Birmingham, UK},\n author = {Dunham, Paul},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.4813276},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {139--142},\n presentation-video\ + \ = {https://youtu.be/vWKw8H0F9cI},\n publisher = {Birmingham City University},\n\ + \ title = {Click::RAND. A Minimalist Sound Sculpture.},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper27.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178197 + doi: 10.5281/zenodo.4813276 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Interactive Performance, Hybrid Choreographies, Virtual Reality, Music - Control ' - pages: 355--360 - presentation-video: https://vimeo.com/26880256/ - title: Design and Evaluation of a Hybrid Reality Performance - url: http://www.nime.org/proceedings/2011/nime2011_355.pdf - year: 2011 + month: July + pages: 139--142 + presentation-video: https://youtu.be/vWKw8H0F9cI + publisher: Birmingham City University + title: 'Click::RAND. A Minimalist Sound Sculpture.' + url: https://www.nime.org/proceedings/2020/nime2020_paper27.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Garcia2011a - abstract: 'We conducted three studies with contemporary music composers at IRCAM. - We found that even highly computer-literate composers use an iterative process - that begins with expressing musical ideas on paper, followed by active parallel - exploration on paper and in software, prior to final execution of their ideas - as an original score. We conducted a participatory design study that focused on - the creative exploration phase, to design tools that help composers better integrate - their paper-based and electronic activities. We then developed InkSplorer as a - technology probe that connects users'' hand-written gestures on paper to Max/MSP - and OpenMusic. Composers appropriated InkSplorer according to their preferred - composition styles, emphasizing its ability to help them quickly explore musical - ideas on paper as they interact with the computer. We conclude with recommendations - for designing interactive paper tools that support the creative process, letting - users explore musical ideas both on paper and electronically. ' - address: 'Oslo, Norway' - author: 'Garcia, Jérémie and Tsandilas, Theophanis and Agon, Carlos and Mackay, - Wendy E.' - bibtex: "@inproceedings{Garcia2011a,\n abstract = {We conducted three studies with\ - \ contemporary music composers at IRCAM. We found that even highly computer-literate\ - \ composers use an iterative process that begins with expressing musical ideas\ - \ on paper, followed by active parallel exploration on paper and in software,\ - \ prior to final execution of their ideas as an original score. We conducted a\ - \ participatory design study that focused on the creative exploration phase, to\ - \ design tools that help composers better integrate their paper-based and electronic\ - \ activities. We then developed InkSplorer as a technology probe that connects\ - \ users' hand-written gestures on paper to Max/MSP and OpenMusic. Composers appropriated\ - \ InkSplorer according to their preferred composition styles, emphasizing its\ - \ ability to help them quickly explore musical ideas on paper as they interact\ - \ with the computer. We conclude with recommendations for designing interactive\ - \ paper tools that support the creative process, letting users explore musical\ - \ ideas both on paper and electronically. },\n address = {Oslo, Norway},\n author\ - \ = {Garcia, J\\'{e}r\\'{e}mie and Tsandilas, Theophanis and Agon, Carlos and\ - \ Mackay, Wendy E.},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178027},\n\ - \ issn = {2220-4806},\n keywords = {Composer, Creativity, Design Exploration,\ - \ InkSplorer, Interactive Paper, OpenMusic, Technology Probes. },\n pages = {361--366},\n\ - \ presentation-video = {https://vimeo.com/26881368/},\n title = {InkSplorer :\ - \ Exploring Musical Ideas on Paper and Computer},\n url = {http://www.nime.org/proceedings/2011/nime2011_361.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_28 + abstract: 'This paper reports on the experience gained after five years of teaching + a NIME master course designed specifically for artists. A playful pedagogical + approach based on practice-based methods is presented and evaluated. My goal was + introducing the art of NIME design and performance giving less emphasis to technology. + Instead of letting technology determine how we teach and think during the class, + I propose fostering at first the student''s active construction and understanding + of the field experimenting with physical materials,sound production and bodily + movements. For this intention I developed a few classroom exercises which my students + had to study and practice. During this period of five years, 95 students attended + the course. At the end of the semester course, each student designed, built and + performed a new interface for musical expression in front of an audience. Thus, + in this paper I describe and discuss the benefits of applying playfulness and + practice-based methods for teaching NIME in art universities. I introduce the + methods and classroom exercises developed and finally I present some lessons learned + from this pedagogical experience.' + address: 'Birmingham, UK' + author: 'Tomás, Enrique' + bibtex: "@inproceedings{NIME20_28,\n abstract = {This paper reports on the experience\ + \ gained after five years of teaching a NIME master course designed specifically\ + \ for artists. A playful pedagogical approach based on practice-based methods\ + \ is presented and evaluated. My goal was introducing the art of NIME design and\ + \ performance giving less emphasis to technology. Instead of letting technology\ + \ determine how we teach and think during the class, I propose fostering at first\ + \ the student's active construction and understanding of the field experimenting\ + \ with physical materials,sound production and bodily movements. For this intention\ + \ I developed a few classroom exercises which my students had to study and practice.\ + \ During this period of five years, 95 students attended the course. At the end\ + \ of the semester course, each student designed, built and performed a new interface\ + \ for musical expression in front of an audience. Thus, in this paper I describe\ + \ and discuss the benefits of applying playfulness and practice-based methods\ + \ for teaching NIME in art universities. I introduce the methods and classroom\ + \ exercises developed and finally I present some lessons learned from this pedagogical\ + \ experience.},\n address = {Birmingham, UK},\n author = {Tomás, Enrique},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.4813280},\n editor = {Romain Michon and\ + \ Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages = {143--148},\n\ + \ presentation-video = {https://youtu.be/94o3J3ozhMs},\n publisher = {Birmingham\ + \ City University},\n title = {A Playful Approach to Teaching NIME: Pedagogical\ + \ Methods from a Practice-Based Perspective},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper28.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178027 + doi: 10.5281/zenodo.4813280 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Composer, Creativity, Design Exploration, InkSplorer, Interactive Paper, - OpenMusic, Technology Probes. ' - pages: 361--366 - presentation-video: https://vimeo.com/26881368/ - title: 'InkSplorer : Exploring Musical Ideas on Paper and Computer' - url: http://www.nime.org/proceedings/2011/nime2011_361.pdf - year: 2011 + month: July + pages: 143--148 + presentation-video: https://youtu.be/94o3J3ozhMs + publisher: Birmingham City University + title: 'A Playful Approach to Teaching NIME: Pedagogical Methods from a Practice-Based + Perspective' + url: https://www.nime.org/proceedings/2020/nime2020_paper28.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Lopez2011 - abstract: 'The DJ culture uses a gesture lexicon strongly rooted in thetraditional - setup of turntables and a mixer. As novel toolsare introduced in the DJ community, - this lexicon is adaptedto the features they provide. In particular, multitouch - technologies can offer a new syntax while still supporting the oldlexicon, which - is desired by DJs.We present a classification of DJ tools, from an interaction - point of view, that divides the previous work into Traditional, Virtual and Hybrid - setups. Moreover, we presenta multitouch tabletop application, developed with - a groupof DJ consultants to ensure an adequate implementation ofthe traditional - gesture lexicon.To conclude, we conduct an expert evaluation, with tenDJ users - in which we compare the three DJ setups with ourprototype. The study revealed - that our proposal suits expectations of Club/Radio-DJs, but fails against the - mentalmodel of Scratch-DJs, due to the lack of haptic feedback torepresent the - record''s physical rotation. Furthermore, testsshow that our multitouch DJ setup, - reduces task durationwhen compared with Virtual setups.' - address: 'Oslo, Norway' - author: 'Lopez, Pedro and Ferreira, Alfredo and Pereira, J. A. Madeiras' - bibtex: "@inproceedings{Lopez2011,\n abstract = {The DJ culture uses a gesture lexicon\ - \ strongly rooted in thetraditional setup of turntables and a mixer. As novel\ - \ toolsare introduced in the DJ community, this lexicon is adaptedto the features\ - \ they provide. In particular, multitouch technologies can offer a new syntax\ - \ while still supporting the oldlexicon, which is desired by DJs.We present a\ - \ classification of DJ tools, from an interaction point of view, that divides\ - \ the previous work into Traditional, Virtual and Hybrid setups. Moreover, we\ - \ presenta multitouch tabletop application, developed with a groupof DJ consultants\ - \ to ensure an adequate implementation ofthe traditional gesture lexicon.To conclude,\ - \ we conduct an expert evaluation, with tenDJ users in which we compare the three\ - \ DJ setups with ourprototype. The study revealed that our proposal suits expectations\ - \ of Club/Radio-DJs, but fails against the mentalmodel of Scratch-DJs, due to\ - \ the lack of haptic feedback torepresent the record's physical rotation. Furthermore,\ - \ testsshow that our multitouch DJ setup, reduces task durationwhen compared with\ - \ Virtual setups.},\n address = {Oslo, Norway},\n author = {Lopez, Pedro and Ferreira,\ - \ Alfredo and Pereira, J. A. Madeiras},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178093},\n\ - \ issn = {2220-4806},\n keywords = {DJing, Multitouch Interaction, Expert User\ - \ evaluation, HCI },\n pages = {367--372},\n presentation-video = {https://vimeo.com/26881380/},\n\ - \ title = {Battle of the DJs: an HCI Perspective of Traditional, Virtual, Hybrid\ - \ and Multitouch DJing},\n url = {http://www.nime.org/proceedings/2011/nime2011_367.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_29 + abstract: 'Using open-source and creative coding frameworks, a team of artist-engineers + from Portland Community College working with artists that experience Intellectual/Developmental + disabilities prototyped an ensemble of adapted instruments and synthesizers that + facilitate real-time in-key collaboration. The instruments employ a variety of + sensors, sending the resulting musical controls to software sound generators via + MIDI. Careful consideration was given to the balance between freedom of expression, + and curating the possible sonic outcomes as adaptation. Evaluation of adapted + instrument design may differ greatly from frameworks for evaluating traditional + instruments or products intended for mass-market, though the results of such focused + and individualised design have a variety of possible applications.' + address: 'Birmingham, UK' + author: 'Jarvis Holland, Quinn D and Quartez, Crystal and Botello, Francisco and + Gammill, Nathan' + bibtex: "@inproceedings{NIME20_29,\n abstract = {Using open-source and creative\ + \ coding frameworks, a team of artist-engineers from Portland Community College\ + \ working with artists that experience Intellectual/Developmental disabilities\ + \ prototyped an ensemble of adapted instruments and synthesizers that facilitate\ + \ real-time in-key collaboration. The instruments employ a variety of sensors,\ + \ sending the resulting musical controls to software sound generators via MIDI.\ + \ Careful consideration was given to the balance between freedom of expression,\ + \ and curating the possible sonic outcomes as adaptation. Evaluation of adapted\ + \ instrument design may differ greatly from frameworks for evaluating traditional\ + \ instruments or products intended for mass-market, though the results of such\ + \ focused and individualised design have a variety of possible applications.},\n\ + \ address = {Birmingham, UK},\n author = {Jarvis Holland, Quinn D and Quartez,\ + \ Crystal and Botello, Francisco and Gammill, Nathan},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.4813286},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {149--153},\n publisher = {Birmingham\ + \ City University},\n title = {EXPANDING ACCESS TO MUSIC TECHNOLOGY- Rapid Prototyping\ + \ Accessible Instrument Solutions For Musicians With Intellectual Disabilities},\n\ + \ url = {https://www.nime.org/proceedings/2020/nime2020_paper29.pdf},\n year =\ + \ {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178093 + doi: 10.5281/zenodo.4813286 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'DJing, Multitouch Interaction, Expert User evaluation, HCI ' - pages: 367--372 - presentation-video: https://vimeo.com/26881380/ - title: 'Battle of the DJs: an HCI Perspective of Traditional, Virtual, Hybrid and - Multitouch DJing' - url: http://www.nime.org/proceedings/2011/nime2011_367.pdf - year: 2011 + month: July + pages: 149--153 + publisher: Birmingham City University + title: EXPANDING ACCESS TO MUSIC TECHNOLOGY- Rapid Prototyping Accessible Instrument + Solutions For Musicians With Intellectual Disabilities + url: https://www.nime.org/proceedings/2020/nime2020_paper29.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: MarquezBorbon2011 - abstract: 'As NIME''s focus has expanded beyond the design reportswhich were pervasive - in the early days to include studies andexperiments involving music control devices, - we report on aparticular area of activity that has been overlooked: designsof - music devices in experimental contexts. We demonstratethis is distinct from designing - for artistic performances, witha unique set of novel challenges. A survey of methodologicalapproaches - to experiments in NIME reveals a tendency torely on existing instruments or evaluations - of new devicesdesigned for broader creative application. We present twoexamples - from our own studies that reveal the merits ofdesigning purpose-built devices - for experimental contexts.' - address: 'Oslo, Norway' - author: 'Marquez-Borbon, Adnan and Gurevich, Michael and Fyans, A. Cavan and Stapleton, - Paul' - bibtex: "@inproceedings{MarquezBorbon2011,\n abstract = {As NIME's focus has expanded\ - \ beyond the design reportswhich were pervasive in the early days to include studies\ - \ andexperiments involving music control devices, we report on aparticular area\ - \ of activity that has been overlooked: designsof music devices in experimental\ - \ contexts. We demonstratethis is distinct from designing for artistic performances,\ - \ witha unique set of novel challenges. A survey of methodologicalapproaches to\ - \ experiments in NIME reveals a tendency torely on existing instruments or evaluations\ - \ of new devicesdesigned for broader creative application. We present twoexamples\ - \ from our own studies that reveal the merits ofdesigning purpose-built devices\ - \ for experimental contexts.},\n address = {Oslo, Norway},\n author = {Marquez-Borbon,\ - \ Adnan and Gurevich, Michael and Fyans, A. Cavan and Stapleton, Paul},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1178099},\n issn = {2220-4806},\n keywords\ - \ = {Experiment, Methodology, Instrument Design, DMIs },\n pages = {373--376},\n\ - \ presentation-video = {https://vimeo.com/26882375/},\n title = {Designing Digital\ - \ Musical Interactions in Experimental Contexts},\n url = {http://www.nime.org/proceedings/2011/nime2011_373.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_3 + abstract: 'Non-rigid interfaces allow for exploring new interactive paradigms that + rely on deformable input and shape change, and whose possible applications span + several branches of human-computer interaction (HCI). While extensively explored + as deformable game controllers, bendable smartphones, and shape-changing displays, + non-rigid interfaces are rarely framed in a musical context, and their use for + composition and performance is rather sparse and unsystematic. With this work, + we start a systematic exploration of this relatively uncharted research area, + by means of (1) briefly reviewing existing musical interfaces that capitalize + on deformable input,and (2) surveying 11 among experts and pioneers in the field + about their experience with and vision on non-rigid musical interfaces.Based on + experts’ input, we suggest possible next steps of musical appropriation with deformable + and shape-changing technologies.We conclude by discussing how cross-overs between + NIME and HCI research will benefit non-rigid interfaces.' + address: 'Birmingham, UK' + author: 'Boem, Alberto and Troiano, Giovanni M and and Lepri, Giacomo and Zappi, + Victor' + bibtex: "@inproceedings{NIME20_3,\n abstract = {Non-rigid interfaces allow for exploring\ + \ new interactive paradigms that rely on deformable input and shape change, and\ + \ whose possible applications span several branches of human-computer interaction\ + \ (HCI). While extensively explored as deformable game controllers, bendable smartphones,\ + \ and shape-changing displays, non-rigid interfaces are rarely framed in a musical\ + \ context, and their use for composition and performance is rather sparse and\ + \ unsystematic. With this work, we start a systematic exploration of this relatively\ + \ uncharted research area, by means of (1) briefly reviewing existing musical\ + \ interfaces that capitalize on deformable input,and (2) surveying 11 among experts\ + \ and pioneers in the field about their experience with and vision on non-rigid\ + \ musical interfaces.Based on experts’ input, we suggest possible next steps of\ + \ musical appropriation with deformable and shape-changing technologies.We conclude\ + \ by discussing how cross-overs between NIME and HCI research will benefit non-rigid\ + \ interfaces.},\n address = {Birmingham, UK},\n author = {Boem, Alberto and Troiano,\ + \ Giovanni M and and Lepri, Giacomo and Zappi, Victor},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.4813288},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {17--22},\n presentation-video\ + \ = {https://youtu.be/o4CuAglHvf4},\n publisher = {Birmingham City University},\n\ + \ title = {Non-Rigid Musical Interfaces: Exploring Practices, Takes, and Future\ + \ Perspective},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper3.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178099 + doi: 10.5281/zenodo.4813288 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Experiment, Methodology, Instrument Design, DMIs ' - pages: 373--376 - presentation-video: https://vimeo.com/26882375/ - title: Designing Digital Musical Interactions in Experimental Contexts - url: http://www.nime.org/proceedings/2011/nime2011_373.pdf - year: 2011 + month: July + pages: 17--22 + presentation-video: https://youtu.be/o4CuAglHvf4 + publisher: Birmingham City University + title: 'Non-Rigid Musical Interfaces: Exploring Practices, Takes, and Future Perspective' + url: https://www.nime.org/proceedings/2020/nime2020_paper3.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Reus2011 - abstract: 'This paper describes the design of Crackle, a interactivesound and touch - experience inspired by the CrackleBox.We begin by describing a ruleset for Crackle''s - interactionderived from the salient interactive qualities of the CrackleBox. An - implementation strategy is then described forrealizing the ruleset as an application - for the iPhone. Thepaper goes on to consider the potential of using Crackleas - an encapsulated interaction paradigm for exploring arbitrary sound spaces, and - concludes with lessons learned ondesigning for multitouch surfaces as expressive - input sensors.' - address: 'Oslo, Norway' - author: 'Reus, Jonathan' - bibtex: "@inproceedings{Reus2011,\n abstract = {This paper describes the design\ - \ of Crackle, a interactivesound and touch experience inspired by the CrackleBox.We\ - \ begin by describing a ruleset for Crackle's interactionderived from the salient\ - \ interactive qualities of the CrackleBox. An implementation strategy is then\ - \ described forrealizing the ruleset as an application for the iPhone. Thepaper\ - \ goes on to consider the potential of using Crackleas an encapsulated interaction\ - \ paradigm for exploring arbitrary sound spaces, and concludes with lessons learned\ - \ ondesigning for multitouch surfaces as expressive input sensors.},\n address\ - \ = {Oslo, Norway},\n author = {Reus, Jonathan},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1178143},\n issn = {2220-4806},\n keywords = {touchscreen,\ - \ interface topology, mobile music, interaction paradigm, dynamic mapping, CrackleBox,\ - \ iPhone },\n pages = {377--380},\n presentation-video = {https://vimeo.com/26882621/},\n\ - \ title = {Crackle: A Dynamic Mobile Multitouch Topology for Exploratory Sound\ - \ Interaction},\n url = {http://www.nime.org/proceedings/2011/nime2011_377.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_30 + abstract: 'Despite a history spanning nearly 30 years, best practices for the use + of virtual reality (VR) in computer music performance remain exploratory. Here, + we present a case study of a laptop orchestra performance entitled Resilience, + involving one VR performer and an ensemble of instrumental performers, in order + to explore values and design principles for incorporating this emerging technology + into computer music performance. We present a brief history at the intersection + of VR and the laptop orchestra. We then present the design of the piece and distill + it into a set of design principles. Broadly, these design principles address the + interplay between the different conflicting perspectives at play: those of the + VR performer, the ensemble, and the audience. For example, one principle suggests + that the perceptual link between the physical and virtual world maybe enhanced + for the audience by improving the performers'' sense of embodiment. We argue that + these design principles are a form of generalized knowledge about how we might + design laptop orchestra pieces involving virtual reality.' + address: 'Birmingham, UK' + author: 'Atherton, Jack and Wang, Ge' + bibtex: "@inproceedings{NIME20_30,\n abstract = {Despite a history spanning nearly\ + \ 30 years, best practices for the use of virtual reality (VR) in computer music\ + \ performance remain exploratory. Here, we present a case study of a laptop orchestra\ + \ performance entitled Resilience, involving one VR performer and an ensemble\ + \ of instrumental performers, in order to explore values and design principles\ + \ for incorporating this emerging technology into computer music performance.\ + \ We present a brief history at the intersection of VR and the laptop orchestra.\ + \ We then present the design of the piece and distill it into a set of design\ + \ principles. Broadly, these design principles address the interplay between the\ + \ different conflicting perspectives at play: those of the VR performer, the ensemble,\ + \ and the audience. For example, one principle suggests that the perceptual link\ + \ between the physical and virtual world maybe enhanced for the audience by improving\ + \ the performers' sense of embodiment. We argue that these design principles are\ + \ a form of generalized knowledge about how we might design laptop orchestra pieces\ + \ involving virtual reality.},\n address = {Birmingham, UK},\n author = {Atherton,\ + \ Jack and Wang, Ge},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813290},\n\ + \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ + \ = {July},\n pages = {154--159},\n presentation-video = {https://youtu.be/tmeDO5hg56Y},\n\ + \ publisher = {Birmingham City University},\n title = {Curating Perspectives:\ + \ Incorporating Virtual Reality into Laptop Orchestra Performance},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper30.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178143 + doi: 10.5281/zenodo.4813290 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'touchscreen, interface topology, mobile music, interaction paradigm, - dynamic mapping, CrackleBox, iPhone ' - pages: 377--380 - presentation-video: https://vimeo.com/26882621/ - title: 'Crackle: A Dynamic Mobile Multitouch Topology for Exploratory Sound Interaction' - url: http://www.nime.org/proceedings/2011/nime2011_377.pdf - year: 2011 + month: July + pages: 154--159 + presentation-video: https://youtu.be/tmeDO5hg56Y + publisher: Birmingham City University + title: 'Curating Perspectives: Incorporating Virtual Reality into Laptop Orchestra + Performance' + url: https://www.nime.org/proceedings/2020/nime2020_paper30.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Aaron2011 - abstract: 'This paper introduces Improcess, a novel cross-disciplinarycollaborative - project focussed on the design and development of tools to structure the communication - between performer and musical process. We describe a 3-tiered architecture centering - around the notion of a Common MusicRuntime, a shared platform on top of which - inter-operatingclient interfaces may be combined to form new musical instruments. - This approach allows hardware devices such asthe monome to act as an extended - hardware interface withthe same power to initiate and control musical processesas - a bespoke programming language. Finally, we reflect onthe structure of the collaborative - project itself, which offers an opportunity to discuss general research strategy - forconducting highly sophisticated technical research within aperforming arts - environment such as the development of apersonal regime of preparation for performance.' - address: 'Oslo, Norway' - author: 'Aaron, Samuel and Blackwell, Alan and Hoadley, Richard and Regan, Tim' - bibtex: "@inproceedings{Aaron2011,\n abstract = {This paper introduces Improcess,\ - \ a novel cross-disciplinarycollaborative project focussed on the design and development\ - \ of tools to structure the communication between performer and musical process.\ - \ We describe a 3-tiered architecture centering around the notion of a Common\ - \ MusicRuntime, a shared platform on top of which inter-operatingclient interfaces\ - \ may be combined to form new musical instruments. This approach allows hardware\ - \ devices such asthe monome to act as an extended hardware interface withthe same\ - \ power to initiate and control musical processesas a bespoke programming language.\ - \ Finally, we reflect onthe structure of the collaborative project itself, which\ - \ offers an opportunity to discuss general research strategy forconducting highly\ - \ sophisticated technical research within aperforming arts environment such as\ - \ the development of apersonal regime of preparation for performance.},\n address\ - \ = {Oslo, Norway},\n author = {Aaron, Samuel and Blackwell, Alan and Hoadley,\ - \ Richard and Regan, Tim},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177935},\n\ - \ issn = {2220-4806},\n keywords = {Improvisation, live coding, controllers, monome,\ - \ collaboration, concurrency, abstractions },\n pages = {381--386},\n presentation-video\ - \ = {https://vimeo.com/26905683/},\n title = {A Principled Approach to Developing\ - \ New Languages for Live Coding},\n url = {http://www.nime.org/proceedings/2011/nime2011_381.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_31 + abstract: 'So far, NIME research has been mostly inward-looking, dedicated to divulging + and studying our own work and having limited engagement with trends outside our + community. Though musical instruments as cultural artefacts are inherently political, + we have so far not sufficiently engaged with confronting these themes in our own + research. In this paper we argue that we should consider how our work is also + political, and begin to develop a clear political agenda that includes social, + ethical, and cultural considerations through which to consider not only our own + musical instruments, but also those not created by us. Failing to do so would + result in an unintentional but tacit acceptance and support of such ideologies. + We explore one item to be included in this political agenda: the recent trend + in music technology of ``democratising music'''', which carries implicit political + ideologies grounded in techno-solutionism. We conclude with a number of recommendations + for stimulating community-wide discussion on these themes in the hope that this + leads to the development of an outward-facing perspective that fully engages with + political topics.' + address: 'Birmingham, UK' + author: 'Morreale, Fabio and Bin, S. M. Astrid and McPherson, Andrew and Stapleton, + Paul and Wanderley, Marcelo' + bibtex: "@inproceedings{NIME20_31,\n abstract = {So far, NIME research has been\ + \ mostly inward-looking, dedicated to divulging and studying our own work and\ + \ having limited engagement with trends outside our community. Though musical\ + \ instruments as cultural artefacts are inherently political, we have so far not\ + \ sufficiently engaged with confronting these themes in our own research. In this\ + \ paper we argue that we should consider how our work is also political, and begin\ + \ to develop a clear political agenda that includes social, ethical, and cultural\ + \ considerations through which to consider not only our own musical instruments,\ + \ but also those not created by us. Failing to do so would result in an unintentional\ + \ but tacit acceptance and support of such ideologies. We explore one item to\ + \ be included in this political agenda: the recent trend in music technology of\ + \ ``democratising music'', which carries implicit political ideologies grounded\ + \ in techno-solutionism. We conclude with a number of recommendations for stimulating\ + \ community-wide discussion on these themes in the hope that this leads to the\ + \ development of an outward-facing perspective that fully engages with political\ + \ topics.},\n address = {Birmingham, UK},\n author = {Morreale, Fabio and Bin,\ + \ S. M. Astrid and McPherson, Andrew and Stapleton, Paul and Wanderley, Marcelo},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.4813294},\n editor = {Romain Michon\ + \ and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages =\ + \ {160--165},\n presentation-video = {https://youtu.be/y2iDN24ZLTg},\n publisher\ + \ = {Birmingham City University},\n title = {A NIME Of The Times: Developing an\ + \ Outward-Looking Political Agenda For This Community},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper31.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177935 + doi: 10.5281/zenodo.4813294 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Improvisation, live coding, controllers, monome, collaboration, concurrency, - abstractions ' - pages: 381--386 - presentation-video: https://vimeo.com/26905683/ - title: A Principled Approach to Developing New Languages for Live Coding - url: http://www.nime.org/proceedings/2011/nime2011_381.pdf - year: 2011 + month: July + pages: 160--165 + presentation-video: https://youtu.be/y2iDN24ZLTg + publisher: Birmingham City University + title: 'A NIME Of The Times: Developing an Outward-Looking Political Agenda For + This Community' + url: https://www.nime.org/proceedings/2020/nime2020_paper31.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Bullock2011 - address: 'Oslo, Norway' - author: 'Bullock, Jamie and Beattie, Daniel and Turner, Jerome' - bibtex: "@inproceedings{Bullock2011,\n address = {Oslo, Norway},\n author = {Bullock,\ - \ Jamie and Beattie, Daniel and Turner, Jerome},\n booktitle = {Proceedings of\ + ID: NIME20_32 + abstract: 'We present TRAVIS II, an augmented acoustic violin with touch sensors + integrated into its 3D printed fingerboard that track left-hand finger gestures + in real time. The fingerboard has four strips of conductive PLA filament which + produce an electric signal when fingers press down on each string. While these + sensors are physically robust, they are mechanically assembled and thus easy to + replace if damaged. The performer can also trigger presets via four FSRs attached + to the body of the violin. The instrument is completely wireless, giving the performer + the freedom to move throughout the performance space. While the sensing fingerboard + is installed in place of the traditional fingerboard, all other electronics can + be removed from the augmented instrument, maintaining the aesthetics of a traditional + violin. Our design allows violinists to naturally create music for interactive + performance and improvisation without requiring new instrumental techniques. In + this paper, we describe the design of the instrument, experiments leading to the + sensing fingerboard, and performative applications of the instrument.' + address: 'Birmingham, UK' + author: 'Ko, Chantelle L and Oehlberg, Lora' + bibtex: "@inproceedings{NIME20_32,\n abstract = {We present TRAVIS II, an augmented\ + \ acoustic violin with touch sensors integrated into its 3D printed fingerboard\ + \ that track left-hand finger gestures in real time. The fingerboard has four\ + \ strips of conductive PLA filament which produce an electric signal when fingers\ + \ press down on each string. While these sensors are physically robust, they are\ + \ mechanically assembled and thus easy to replace if damaged. The performer can\ + \ also trigger presets via four FSRs attached to the body of the violin. The instrument\ + \ is completely wireless, giving the performer the freedom to move throughout\ + \ the performance space. While the sensing fingerboard is installed in place of\ + \ the traditional fingerboard, all other electronics can be removed from the augmented\ + \ instrument, maintaining the aesthetics of a traditional violin. Our design allows\ + \ violinists to naturally create music for interactive performance and improvisation\ + \ without requiring new instrumental techniques. In this paper, we describe the\ + \ design of the instrument, experiments leading to the sensing fingerboard, and\ + \ performative applications of the instrument.},\n address = {Birmingham, UK},\n\ + \ author = {Ko, Chantelle L and Oehlberg, Lora},\n booktitle = {Proceedings of\ \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1177973},\n issn = {2220-4806},\n keywords = {live electronics,software,usability,user\ - \ experience},\n pages = {387--392},\n presentation-video = {https://vimeo.com/26906574/},\n\ - \ title = {Integra Live : a New Graphical User Interface for Live Electronic Music},\n\ - \ url = {http://www.nime.org/proceedings/2011/nime2011_387.pdf},\n year = {2011}\n\ - }\n" + \ = {10.5281/zenodo.4813300},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {166--171},\n presentation-video\ + \ = {https://youtu.be/XIAd_dr9PHE},\n publisher = {Birmingham City University},\n\ + \ title = {Touch Responsive Augmented Violin Interface System II: Integrating\ + \ Sensors into a 3D Printed Fingerboard},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper32.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177973 + doi: 10.5281/zenodo.4813300 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'live electronics,software,usability,user experience' - pages: 387--392 - presentation-video: https://vimeo.com/26906574/ - title: 'Integra Live : a New Graphical User Interface for Live Electronic Music' - url: http://www.nime.org/proceedings/2011/nime2011_387.pdf - year: 2011 + month: July + pages: 166--171 + presentation-video: https://youtu.be/XIAd_dr9PHE + publisher: Birmingham City University + title: 'Touch Responsive Augmented Violin Interface System II: Integrating Sensors + into a 3D Printed Fingerboard' + url: https://www.nime.org/proceedings/2020/nime2020_paper32.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Roh2011 - abstract: 'The design space of fabric multitouch surface interaction is explored - with emphasis on novel materials and construction techniques aimed towards reliable, - repairable pressure sensing surfaces for musical applications. ' - address: 'Oslo, Norway' - author: 'Roh, Jung-Sim and Mann, Yotam and Freed, Adrian and Wessel, David' - bibtex: "@inproceedings{Roh2011,\n abstract = {The design space of fabric multitouch\ - \ surface interaction is explored with emphasis on novel materials and construction\ - \ techniques aimed towards reliable, repairable pressure sensing surfaces for\ - \ musical applications. },\n address = {Oslo, Norway},\n author = {Roh, Jung-Sim\ - \ and Mann, Yotam and Freed, Adrian and Wessel, David},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178145},\n issn = {2220-4806},\n keywords = {Multitouch,\ - \ surface interaction, piezoresistive, fabric sensor, e-textiles, tangible computing,\ - \ drum controller },\n pages = {393--398},\n presentation-video = {https://vimeo.com/26906580/},\n\ - \ title = {Robust and Reliable Fabric, Piezoresistive Multitouch Sensing Surfaces\ - \ for Musical Controllers},\n url = {http://www.nime.org/proceedings/2011/nime2011_393.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_33 + abstract: 'The expressive control of sound and music through body movements is well-studied. For + some people, body movement is demanding, and although they would prefer to express + themselves freely using gestural control, they are unable to use such interfaces + without difficulty. In this paper, we present the P(l)aying Attention framework + for manipulating recorded music to support these people, and to help the therapists + that work with them. The aim is to facilitate body awareness, exploration, and + expressivity by allowing the manipulation of a pre-recorded ‘ensemble’ through + an interpretation of body movement, provided by a machine-learning system trained + on physiotherapist assessments and movement data from people with chronic pain. The + system considers the nature of a person’s movement (e.g. protective) and offers + an interpretation in terms of the joint-groups that are playing a major role in + the determination at that point in the movement, and to which attention should + perhaps be given (or the opposite at the user’s discretion). Using music to convey + the interpretation offers informational (through movement sonification) and creative + (through manipulating the ensemble by movement) possibilities. The approach offers + the opportunity to explore movement and music at multiple timescales and under + varying musical aesthetics.' + address: 'Birmingham, UK' + author: 'Gold, Nicolas E and Wang, Chongyang and Olugbade, Temitayo and Berthouze, + Nadia and Williams, Amanda' + bibtex: "@inproceedings{NIME20_33,\n abstract = {The expressive control of sound\ + \ and music through body movements is well-studied. For some people, body movement\ + \ is demanding, and although they would prefer to express themselves freely using\ + \ gestural control, they are unable to use such interfaces without difficulty.\ + \ In this paper, we present the P(l)aying Attention framework for manipulating\ + \ recorded music to support these people, and to help the therapists that work\ + \ with them. The aim is to facilitate body awareness, exploration, and expressivity\ + \ by allowing the manipulation of a pre-recorded ‘ensemble’ through an interpretation\ + \ of body movement, provided by a machine-learning system trained on physiotherapist\ + \ assessments and movement data from people with chronic pain. The system considers\ + \ the nature of a person’s movement (e.g. protective) and offers an interpretation\ + \ in terms of the joint-groups that are playing a major role in the determination\ + \ at that point in the movement, and to which attention should perhaps be given\ + \ (or the opposite at the user’s discretion). Using music to convey the interpretation\ + \ offers informational (through movement sonification) and creative (through manipulating\ + \ the ensemble by movement) possibilities. The approach offers the opportunity\ + \ to explore movement and music at multiple timescales and under varying musical\ + \ aesthetics.},\n address = {Birmingham, UK},\n author = {Gold, Nicolas E and\ + \ Wang, Chongyang and Olugbade, Temitayo and Berthouze, Nadia and Williams, Amanda},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.4813303},\n editor = {Romain Michon\ + \ and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages =\ + \ {172--175},\n publisher = {Birmingham City University},\n title = {P(l)aying\ + \ Attention: Multi-modal, multi-temporal music control},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper33.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178145 + doi: 10.5281/zenodo.4813303 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Multitouch, surface interaction, piezoresistive, fabric sensor, e-textiles, - tangible computing, drum controller ' - pages: 393--398 - presentation-video: https://vimeo.com/26906580/ - title: 'Robust and Reliable Fabric, Piezoresistive Multitouch Sensing Surfaces for - Musical Controllers' - url: http://www.nime.org/proceedings/2011/nime2011_393.pdf - year: 2011 + month: July + pages: 172--175 + publisher: Birmingham City University + title: 'P(l)aying Attention: Multi-modal, multi-temporal music control' + url: https://www.nime.org/proceedings/2020/nime2020_paper33.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Marshall2011 - abstract: 'This paper deals with the effects of integrated vibrotactile feedback - on the "feel" of a digital musical instrument(DMI). Building on previous work - developing a DMI withintegrated vibrotactile feedback actuators, we discuss howto - produce instrument-like vibrations, compare these simulated vibrations with those - produced by an acoustic instrument and examine how the integration of this feedbackeffects - performer ratings of the instrument. We found thatintegrated vibrotactile feedback - resulted in an increase inperformer engagement with the instrument, but resulted - ina reduction in the perceived control of the instrument. Wediscuss these results - and their implications for the design ofnew digital musical instruments.' - address: 'Oslo, Norway' - author: 'Marshall, Mark T. and Wanderley, Marcelo M.' - bibtex: "@inproceedings{Marshall2011,\n abstract = {This paper deals with the effects\ - \ of integrated vibrotactile feedback on the \"feel\" of a digital musical instrument(DMI).\ - \ Building on previous work developing a DMI withintegrated vibrotactile feedback\ - \ actuators, we discuss howto produce instrument-like vibrations, compare these\ - \ simulated vibrations with those produced by an acoustic instrument and examine\ - \ how the integration of this feedbackeffects performer ratings of the instrument.\ - \ We found thatintegrated vibrotactile feedback resulted in an increase inperformer\ - \ engagement with the instrument, but resulted ina reduction in the perceived\ - \ control of the instrument. Wediscuss these results and their implications for\ - \ the design ofnew digital musical instruments.},\n address = {Oslo, Norway},\n\ - \ author = {Marshall, Mark T. and Wanderley, Marcelo M.},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178101},\n issn = {2220-4806},\n keywords = {Vibrotactile\ - \ Feedback, Digital Musical Instruments, Feel, Loudspeakers },\n pages = {399--404},\n\ - \ title = {Examining the Effects of Embedded Vibrotactile Feedback on the Feel\ - \ of a Digital Musical Instrument},\n url = {http://www.nime.org/proceedings/2011/nime2011_399.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_34 + abstract: 'We present a musical interface specifically designed for inclusive performance + that offers a shared experience for both individuals who are deaf and hard of + hearing as well as those who are not. This interface borrows gestures (with or + without overt meaning) from American Sign Language (ASL), rendered using low-frequency + sounds that can be felt by everyone in the performance. The Deaf and Hard of Hearing + cannot experience the sound in the same way. Instead, they are able to physically + experience the vibrations, nuances, contours, as well as its correspondence with + the hand gestures. Those who are not hard of hearing can experience the sound, + but also feel it just the same, with the knowledge that the same physical vibrations + are shared by everyone. The employment of sign language adds another aesthetic + dimension to the instrument --a nuanced borrowing of a functional communication + medium for an artistic end. ' + address: 'Birmingham, UK' + author: 'Cavdir, Doga and Wang, Ge' + bibtex: "@inproceedings{NIME20_34,\n abstract = {We present a musical interface\ + \ specifically designed for inclusive performance that offers a shared experience\ + \ for both individuals who are deaf and hard of hearing as well as those who are\ + \ not. This interface borrows gestures (with or without overt meaning) from American\ + \ Sign Language (ASL), rendered using low-frequency sounds that can be felt by\ + \ everyone in the performance. The Deaf and Hard of Hearing cannot experience\ + \ the sound in the same way. Instead, they are able to physically experience the\ + \ vibrations, nuances, contours, as well as its correspondence with the hand gestures.\ + \ Those who are not hard of hearing can experience the sound, but also feel it\ + \ just the same, with the knowledge that the same physical vibrations are shared\ + \ by everyone. The employment of sign language adds another aesthetic dimension\ + \ to the instrument --a nuanced borrowing of a functional communication medium\ + \ for an artistic end. },\n address = {Birmingham, UK},\n author = {Cavdir, Doga\ + \ and Wang, Ge},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813305},\n\ + \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ + \ = {July},\n pages = {176--181},\n presentation-video = {https://youtu.be/JCvlHu4UaZ0},\n\ + \ publisher = {Birmingham City University},\n title = {Felt Sound: A Shared Musical\ + \ Experience for the Deaf and Hard of Hearing},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper34.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178101 + doi: 10.5281/zenodo.4813305 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Vibrotactile Feedback, Digital Musical Instruments, Feel, Loudspeakers ' - pages: 399--404 - title: Examining the Effects of Embedded Vibrotactile Feedback on the Feel of a - Digital Musical Instrument - url: http://www.nime.org/proceedings/2011/nime2011_399.pdf - year: 2011 + month: July + pages: 176--181 + presentation-video: https://youtu.be/JCvlHu4UaZ0 + publisher: Birmingham City University + title: 'Felt Sound: A Shared Musical Experience for the Deaf and Hard of Hearing' + url: https://www.nime.org/proceedings/2020/nime2020_paper34.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Diakopoulos2011 - abstract: 'This paper presents a series of open-source firmwares for the latest - iteration of the popular Arduino microcontroller platform. A portmanteau of Human - Interface Device and Arduino, the HIDUINO project tackles a major problem in designing - NIMEs: easily and reliably communicating with a host computer using standard MIDI - over USB. HIDUINO was developed in conjunction with a class at the California - Institute of the Arts intended to teach introductory-level human-computer and - human-robot interaction within the context of musical controllers. We describe - our frustration with existing microcontroller platforms and our experiences using - the new firmware to facilitate the development and prototyping of new music controllers. ' - address: 'Oslo, Norway' - author: 'Diakopoulos, Dimitri and Kapur, Ajay' - bibtex: "@inproceedings{Diakopoulos2011,\n abstract = {This paper presents a series\ - \ of open-source firmwares for the latest iteration of the popular Arduino microcontroller\ - \ platform. A portmanteau of Human Interface Device and Arduino, the HIDUINO project\ - \ tackles a major problem in designing NIMEs: easily and reliably communicating\ - \ with a host computer using standard MIDI over USB. HIDUINO was developed in\ - \ conjunction with a class at the California Institute of the Arts intended to\ - \ teach introductory-level human-computer and human-robot interaction within the\ - \ context of musical controllers. We describe our frustration with existing microcontroller\ - \ platforms and our experiences using the new firmware to facilitate the development\ - \ and prototyping of new music controllers. },\n address = {Oslo, Norway},\n author\ - \ = {Diakopoulos, Dimitri and Kapur, Ajay},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1177995},\n issn = {2220-4806},\n keywords = {Arduino, USB,\ - \ HID, MIDI, HCI, controllers, microcontrollers },\n pages = {405--408},\n presentation-video\ - \ = {https://vimeo.com/26908264/},\n title = {HIDUINO : A firmware for building\ - \ driverless {USB}-MIDI devices using the Arduino microcontroller},\n url = {http://www.nime.org/proceedings/2011/nime2011_405.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_35 + abstract: 'This paper examines the use of Sound Sensors and audio as input material + for New Interfaces for Musical Expression (NIMEs), exploring the unique affordances + and character of the interactions and instruments that leverage it. Examples of + previous work in the literature that use audio as sensor input data are examined + for insights into how the use of Sound Sensors provides unique opportunities within + the NIME context. We present the results of a user study comparing sound-based + sensors to other sensing modalities within the context of controlling parameters. The + study suggests that the use of Sound Sensors can enhance gestural flexibility + and nuance but that they also present challenges in accuracy and repeatability.' + address: 'Birmingham, UK' + author: 'Leitman, Sasha' + bibtex: "@inproceedings{NIME20_35,\n abstract = {This paper examines the use of\ + \ Sound Sensors and audio as input material for New Interfaces for Musical Expression\ + \ (NIMEs), exploring the unique affordances and character of the interactions\ + \ and instruments that leverage it. Examples of previous work in the literature\ + \ that use audio as sensor input data are examined for insights into how the use\ + \ of Sound Sensors provides unique opportunities within the NIME context. We\ + \ present the results of a user study comparing sound-based sensors to other sensing\ + \ modalities within the context of controlling parameters. The study suggests\ + \ that the use of Sound Sensors can enhance gestural flexibility and nuance but\ + \ that they also present challenges in accuracy and repeatability.},\n address\ + \ = {Birmingham, UK},\n author = {Leitman, Sasha},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.4813309},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {182--187},\n publisher = {Birmingham\ + \ City University},\n title = {Sound Based Sensors for NIMEs},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper35.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177995 + doi: 10.5281/zenodo.4813309 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Arduino, USB, HID, MIDI, HCI, controllers, microcontrollers ' - pages: 405--408 - presentation-video: https://vimeo.com/26908264/ - title: 'HIDUINO : A firmware for building driverless USB-MIDI devices using the - Arduino microcontroller' - url: http://www.nime.org/proceedings/2011/nime2011_405.pdf - year: 2011 + month: July + pages: 182--187 + publisher: Birmingham City University + title: Sound Based Sensors for NIMEs + url: https://www.nime.org/proceedings/2020/nime2020_paper35.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Flety2011 - abstract: 'We present a strategy for the improvement of wireless sensor data transmission - latency, implemented in two current projects involving gesture/control sound interaction. - Our platform was designed to be capable of accepting accessories using a digital - bus. The receiver features a IEEE 802.15.4 microcontroller associated to a TCP/IP - stack integrated circuit that transmits the received wireless data to a host computer - using the Open Sound Control protocol. This paper details how we improved the - latency and sample rate of the said technology while keeping the device small - and scalable. ' - address: 'Oslo, Norway' - author: 'Fléty, Emmanuel and Maestracci, Côme' - bibtex: "@inproceedings{Flety2011,\n abstract = {We present a strategy for the improvement\ - \ of wireless sensor data transmission latency, implemented in two current projects\ - \ involving gesture/control sound interaction. Our platform was designed to be\ - \ capable of accepting accessories using a digital bus. The receiver features\ - \ a IEEE 802.15.4 microcontroller associated to a TCP/IP stack integrated circuit\ - \ that transmits the received wireless data to a host computer using the Open\ - \ Sound Control protocol. This paper details how we improved the latency and sample\ - \ rate of the said technology while keeping the device small and scalable. },\n\ - \ address = {Oslo, Norway},\n author = {Fl\\'{e}ty, Emmanuel and Maestracci, C{\\\ - ^o}me},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1178009},\n issn = {2220-4806},\n\ - \ keywords = {Embedded sensors, gesture recognition, wireless, sound and music\ - \ computing, interaction, 802.15.4, Zigbee. },\n pages = {409--412},\n presentation-video\ - \ = {https://vimeo.com/26908266/},\n title = {Latency Improvement in Sensor Wireless\ - \ Transmission Using {IEEE} 802.15.4},\n url = {http://www.nime.org/proceedings/2011/nime2011_409.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_36 + abstract: 'This paper presents a novel interactive system for creating audio-visual + expressions on tabletop display by dynamically manipulating solids of revolution + called spheroids. The four types of basic spinning and rolling movements of spheroids + are recognized from the physical conditions such as the contact area, the location + of the centroid, the (angular) velocity, and the curvature of the locus all obtained + from sensor data on the display. They are then used for interactively generating + audio-visual effects that match each of the movements. We developed a digital + content that integrated these functionalities and enabled composition and live + performance through manipulation of spheroids.' + address: 'Birmingham, UK' + author: 'Ikawa, Yuma and Matsuura, Akihiro' + bibtex: "@inproceedings{NIME20_36,\n abstract = {This paper presents a novel interactive\ + \ system for creating audio-visual expressions on tabletop display by dynamically\ + \ manipulating solids of revolution called spheroids. The four types of basic\ + \ spinning and rolling movements of spheroids are recognized from the physical\ + \ conditions such as the contact area, the location of the centroid, the (angular)\ + \ velocity, and the curvature of the locus all obtained from sensor data on the\ + \ display. They are then used for interactively generating audio-visual effects\ + \ that match each of the movements. We developed a digital content that integrated\ + \ these functionalities and enabled composition and live performance through manipulation\ + \ of spheroids.},\n address = {Birmingham, UK},\n author = {Ikawa, Yuma and Matsuura,\ + \ Akihiro},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813311},\n editor\ + \ = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n\ + \ pages = {188--189},\n publisher = {Birmingham City University},\n title = {Playful\ + \ Audio-Visual Interaction with Spheroids },\n url = {https://www.nime.org/proceedings/2020/nime2020_paper36.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178009 + doi: 10.5281/zenodo.4813311 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Embedded sensors, gesture recognition, wireless, sound and music computing, - interaction, 802.15.4, Zigbee. ' - pages: 409--412 - presentation-video: https://vimeo.com/26908266/ - title: Latency Improvement in Sensor Wireless Transmission Using IEEE 802.15.4 - url: http://www.nime.org/proceedings/2011/nime2011_409.pdf - year: 2011 + month: July + pages: 188--189 + publisher: Birmingham City University + title: 'Playful Audio-Visual Interaction with Spheroids ' + url: https://www.nime.org/proceedings/2020/nime2020_paper36.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Snyder2011 - abstract: 'The Snyderphonics Manta controller is a USB touch controller for music - and video. It features 48 capacitive touch sensors, arranged in a hexagonal grid, - with bi-color LEDs that are programmable from the computer. The sensors send continuous - data proportional to surface area touched, and a velocitydetection algorithm has - been implemented to estimate attack velocity based on this touch data. In addition - to these hexagonal sensors, the Manta has two high-dimension touch sliders (giving - 12-bit values), and four assignable function buttons. In this paper, I outline - the features of the controller, the available methods for communicating between - the device and a computer, and some current uses for the controller. ' - address: 'Oslo, Norway' - author: 'Snyder, Jeff' - bibtex: "@inproceedings{Snyder2011,\n abstract = {The Snyderphonics Manta controller\ - \ is a USB touch controller for music and video. It features 48 capacitive touch\ - \ sensors, arranged in a hexagonal grid, with bi-color LEDs that are programmable\ - \ from the computer. The sensors send continuous data proportional to surface\ - \ area touched, and a velocitydetection algorithm has been implemented to estimate\ - \ attack velocity based on this touch data. In addition to these hexagonal sensors,\ - \ the Manta has two high-dimension touch sliders (giving 12-bit values), and four\ - \ assignable function buttons. In this paper, I outline the features of the controller,\ - \ the available methods for communicating between the device and a computer, and\ - \ some current uses for the controller. },\n address = {Oslo, Norway},\n author\ - \ = {Snyder, Jeff},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178171},\n\ - \ issn = {2220-4806},\n keywords = {Snyderphonics, Manta, controller, USB, capacitive,\ - \ touch, sensor, decoupled LED, hexagon, grid, touch slider, HID, portable, wood,\ - \ live music, live video },\n pages = {413--416},\n presentation-video = {https://vimeo.com/26908273/},\n\ - \ title = {Snyderphonics Manta Controller, a Novel {USB} Touch-Controller},\n\ - \ url = {http://www.nime.org/proceedings/2011/nime2011_413.pdf},\n year = {2011}\n\ - }\n" + ID: NIME20_37 + abstract: 'This paper presents ARLooper, an augmented reality mobile interface that + allows multiple users to record sound and perform together in a shared AR space. + ARLooper is an attempt to explore the potential of collaborative mobile AR instruments + in supporting non-verbal communication for musical performances. With ARLooper, + the user can record, manipulate, and play sounds being visualized as 3D waveforms + in an AR space. ARLooper provides a shared AR environment wherein multiple users + can observe each other''s activities in real time, supporting increasing the understanding + of collaborative contexts. This paper provides the background of the research + and the design and technical implementation of ARLooper, followed by a user study.' + address: 'Birmingham, UK' + author: 'Park, Sihwa' + bibtex: "@inproceedings{NIME20_37,\n abstract = {This paper presents ARLooper, an\ + \ augmented reality mobile interface that allows multiple users to record sound\ + \ and perform together in a shared AR space. ARLooper is an attempt to explore\ + \ the potential of collaborative mobile AR instruments in supporting non-verbal\ + \ communication for musical performances. With ARLooper, the user can record,\ + \ manipulate, and play sounds being visualized as 3D waveforms in an AR space.\ + \ ARLooper provides a shared AR environment wherein multiple users can observe\ + \ each other's activities in real time, supporting increasing the understanding\ + \ of collaborative contexts. This paper provides the background of the research\ + \ and the design and technical implementation of ARLooper, followed by a user\ + \ study.},\n address = {Birmingham, UK},\n author = {Park, Sihwa},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.4813313},\n editor = {Romain Michon and\ + \ Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages = {190--195},\n\ + \ presentation-video = {https://youtu.be/Trw4epKeUbM},\n publisher = {Birmingham\ + \ City University},\n title = {Collaborative Mobile Instruments in a Shared AR\ + \ Space: a Case of ARLooper},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper37.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178171 + doi: 10.5281/zenodo.4813313 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Snyderphonics, Manta, controller, USB, capacitive, touch, sensor, decoupled - LED, hexagon, grid, touch slider, HID, portable, wood, live music, live video ' - pages: 413--416 - presentation-video: https://vimeo.com/26908273/ - title: 'Snyderphonics Manta Controller, a Novel USB Touch-Controller' - url: http://www.nime.org/proceedings/2011/nime2011_413.pdf - year: 2011 + month: July + pages: 190--195 + presentation-video: https://youtu.be/Trw4epKeUbM + publisher: Birmingham City University + title: 'Collaborative Mobile Instruments in a Shared AR Space: a Case of ARLooper' + url: https://www.nime.org/proceedings/2020/nime2020_paper37.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Hsu2011 - address: 'Oslo, Norway' - author: 'Hsu, William' - bibtex: "@inproceedings{Hsu2011,\n address = {Oslo, Norway},\n author = {Hsu, William},\n\ + ID: NIME20_38 + abstract: 'Expressive 2D multi-touch interfaces have in recent years moved from + research prototypes to industrial products, from repurposed generic computer input + devices to controllers specially designed for musical expression. A host of practicioners + use this type of devices in many different ways, with different gestures and sound + synthesis or transformation methods. In order to get an overview of existing and + desired usages, we launched an on-line survey that collected 37 answers from practicioners + in and outside of academic and design communities. In the survey we inquired about + the participants'' devices, their strengths and weaknesses, the layout of control + dimensions, the used gestures and mappings, the synthesis software or hardware + and the use of audio descriptors and machine learning. The results can inform + the design of future interfaces, gesture analysis and mapping, and give directions + for the need and use of machine learning for user adaptation.' + address: 'Birmingham, UK' + author: 'Schwarz, Diemo and Liu, Abby Wanyu and Bevilacqua, Frederic' + bibtex: "@inproceedings{NIME20_38,\n abstract = {Expressive 2D multi-touch interfaces\ + \ have in recent years moved from research prototypes to industrial products,\ + \ from repurposed generic computer input devices to controllers specially designed\ + \ for musical expression. A host of practicioners use this type of devices in\ + \ many different ways, with different gestures and sound synthesis or transformation\ + \ methods. In order to get an overview of existing and desired usages, we launched\ + \ an on-line survey that collected 37 answers from practicioners in and outside\ + \ of academic and design communities. In the survey we inquired about the participants'\ + \ devices, their strengths and weaknesses, the layout of control dimensions, the\ + \ used gestures and mappings, the synthesis software or hardware and the use of\ + \ audio descriptors and machine learning. The results can inform the design of\ + \ future interfaces, gesture analysis and mapping, and give directions for the\ + \ need and use of machine learning for user adaptation.},\n address = {Birmingham,\ + \ UK},\n author = {Schwarz, Diemo and Liu, Abby Wanyu and Bevilacqua, Frederic},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178047},\n issn = {2220-4806},\n\ - \ keywords = {animation,audio-visual,generative,improvisation,interactive},\n\ - \ pages = {417--420},\n title = {On Movement , Structure and Abstraction in Generative\ - \ Audiovisual Improvisation},\n url = {http://www.nime.org/proceedings/2011/nime2011_417.pdf},\n\ - \ year = {2011}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1178047 - issn: 2220-4806 - keywords: animation,audio-visual,generative,improvisation,interactive - pages: 417--420 - title: 'On Movement , Structure and Abstraction in Generative Audiovisual Improvisation' - url: http://www.nime.org/proceedings/2011/nime2011_417.pdf - year: 2011 - - -- ENTRYTYPE: inproceedings - ID: Angel2011 - abstract: "This paper deals with the usage of bio-data from performers to create\ - \ interactive multimedia performances or installations. It presents this type\ - \ of research in some art works produced in the last fifty years (such as Lucier's\ - \ Music for a Solo Performance, from 1965), including two interactive performances\ - \ of my ,\n,\nauthorship, which use two different types of bio-interfaces: on\ - \ the one hand, an EMG (Electromyography) and on the other hand, an EEG (electroencephalography).\ - \ The paper explores the interaction between the human body and real-time media\ - \ (audio and visual) by the usage of bio-interfaces. This research is based on\ - \ biofeedback investigations pursued by the psychologist Neal E. Miller in the\ - \ 1960s, mainly based on finding new methods to reduce stress. However, this article\ - \ explains and shows examples in which biofeedback research is used for artistic\ - \ purposes only. " - address: 'Oslo, Norway' - author: 'Angel, Claudia R.' - bibtex: "@inproceedings{Angel2011,\n abstract = {This paper deals with the usage\ - \ of bio-data from performers to create interactive multimedia performances or\ - \ installations. It presents this type of research in some art works produced\ - \ in the last fifty years (such as Lucier's Music for a Solo Performance, from\ - \ 1965), including two interactive performances of my ,\n,\nauthorship, which\ - \ use two different types of bio-interfaces: on the one hand, an EMG (Electromyography)\ - \ and on the other hand, an EEG (electroencephalography). The paper explores the\ - \ interaction between the human body and real-time media (audio and visual) by\ - \ the usage of bio-interfaces. This research is based on biofeedback investigations\ - \ pursued by the psychologist Neal E. Miller in the 1960s, mainly based on finding\ - \ new methods to reduce stress. However, this article explains and shows examples\ - \ in which biofeedback research is used for artistic purposes only. },\n address\ - \ = {Oslo, Norway},\n author = {Angel, Claudia R.},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177943},\n issn = {2220-4806},\n keywords = {Live electronics,\ - \ Butoh, performance, biofeedback, interactive sound and video. },\n pages = {421--424},\n\ - \ title = {Creating Interactive Multimedia Works with Bio-data},\n url = {http://www.nime.org/proceedings/2011/nime2011_421.pdf},\n\ - \ year = {2011}\n}\n" + \ Musical Expression},\n doi = {10.5281/zenodo.4813318},\n editor = {Romain Michon\ + \ and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages =\ + \ {196--201},\n presentation-video = {https://youtu.be/eE8I3mecaB8},\n publisher\ + \ = {Birmingham City University},\n title = {A Survey on the Use of 2D Touch Interfaces\ + \ for Musical Expression},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper38.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177943 + doi: 10.5281/zenodo.4813318 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Live electronics, Butoh, performance, biofeedback, interactive sound - and video. ' - pages: 421--424 - title: Creating Interactive Multimedia Works with Bio-data - url: http://www.nime.org/proceedings/2011/nime2011_421.pdf - year: 2011 + month: July + pages: 196--201 + presentation-video: https://youtu.be/eE8I3mecaB8 + publisher: Birmingham City University + title: A Survey on the Use of 2D Touch Interfaces for Musical Expression + url: https://www.nime.org/proceedings/2020/nime2020_paper38.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Ustarroz2011 - abstract: 'TresnaNet explores the potential of Telematics as a generator ofmusical - expressions. I pretend to sound the silent flow ofinformation from the network.This - is realized through the fabrication of a prototypefollowing the intention of giving - substance to the intangibleparameters of our communication. The result may haveeducational, - commercial and artistic applications because it is aphysical and perceptible representation - of the transfer ofinformation over the network. This paper describes the design,implementation - and conclusions about TresnaNet.' - address: 'Oslo, Norway' - author: 'Ustarroz, Paula' - bibtex: "@inproceedings{Ustarroz2011,\n abstract = {TresnaNet explores the potential\ - \ of Telematics as a generator ofmusical expressions. I pretend to sound the silent\ - \ flow ofinformation from the network.This is realized through the fabrication\ - \ of a prototypefollowing the intention of giving substance to the intangibleparameters\ - \ of our communication. The result may haveeducational, commercial and artistic\ - \ applications because it is aphysical and perceptible representation of the transfer\ - \ ofinformation over the network. This paper describes the design,implementation\ - \ and conclusions about TresnaNet.},\n address = {Oslo, Norway},\n author = {Ustarroz,\ - \ Paula},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1178181},\n issn = {2220-4806},\n\ - \ keywords = {Interface, musical generation, telematics, network, musical instrument,\ - \ network sniffer. },\n pages = {425--428},\n title = {TresnaNet Musical Generation\ - \ based on Network Protocols},\n url = {http://www.nime.org/proceedings/2011/nime2011_425.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_39 + abstract: 'General-Purpose GPU computing is becoming an increasingly viable option + for acceleration, including in the audio domain. Although it can improve performance, + the intrinsic nature of a device like the GPU involves data transfers and execution + commands which requires time to complete. Therefore, there is an understandable + caution concerning the overhead involved with using the GPU for audio computation. + This paper aims to clarify the limitations by presenting a performance benchmarking + suite. The benchmarks utilize OpenCL and CUDA across various tests to highlight + the considerations and limitations of processing audio in the GPU environment. + The benchmarking suite has been used to gather a collection of results across + various hardware. Salient results have been reviewed in order to highlight the + benefits and limitations of the GPU for digital audio. The results in this work + show that the minimal GPU overhead fits into the real-time audio requirements + provided the buffer size is selected carefully. The baseline overhead is shown + to be roughly 0.1ms, depending on the GPU. This means buffer sizes 8 and above + are completed within the allocated time frame. Results from more demanding tests, + involving physical modelling synthesis, demonstrated a balance was needed between + meeting the sample rate and keeping within limits for latency and jitter. Buffer + sizes from 1 to 16 failed to sustain the sample rate whilst buffer sizes 512 to + 32768 exceeded either latency or jitter limits. Buffer sizes in between these + ranges, such as 256, satisfied the sample rate, latency and jitter requirements + chosen for this paper.' + address: 'Birmingham, UK' + author: 'Renney, Harri L and Mitchell, Tom and Gaster, Benedict' + bibtex: "@inproceedings{NIME20_39,\n abstract = {General-Purpose GPU computing is\ + \ becoming an increasingly viable option for acceleration, including in the audio\ + \ domain. Although it can improve performance, the intrinsic nature of a device\ + \ like the GPU involves data transfers and execution commands which requires time\ + \ to complete. Therefore, there is an understandable caution concerning the overhead\ + \ involved with using the GPU for audio computation. This paper aims to clarify\ + \ the limitations by presenting a performance benchmarking suite. The benchmarks\ + \ utilize OpenCL and CUDA across various tests to highlight the considerations\ + \ and limitations of processing audio in the GPU environment. The benchmarking\ + \ suite has been used to gather a collection of results across various hardware.\ + \ Salient results have been reviewed in order to highlight the benefits and limitations\ + \ of the GPU for digital audio. The results in this work show that the minimal\ + \ GPU overhead fits into the real-time audio requirements provided the buffer\ + \ size is selected carefully. The baseline overhead is shown to be roughly 0.1ms,\ + \ depending on the GPU. This means buffer sizes 8 and above are completed within\ + \ the allocated time frame. Results from more demanding tests, involving physical\ + \ modelling synthesis, demonstrated a balance was needed between meeting the sample\ + \ rate and keeping within limits for latency and jitter. Buffer sizes from 1 to\ + \ 16 failed to sustain the sample rate whilst buffer sizes 512 to 32768 exceeded\ + \ either latency or jitter limits. Buffer sizes in between these ranges, such\ + \ as 256, satisfied the sample rate, latency and jitter requirements chosen for\ + \ this paper.},\n address = {Birmingham, UK},\n author = {Renney, Harri L and\ + \ Mitchell, Tom and Gaster, Benedict},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813320},\n\ + \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ + \ = {July},\n pages = {202--207},\n presentation-video = {https://youtu.be/xAVEHJZRIx0},\n\ + \ publisher = {Birmingham City University},\n title = {There and Back Again: The\ + \ Practicality of GPU Accelerated Digital Audio},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper39.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178181 + doi: 10.5281/zenodo.4813320 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Interface, musical generation, telematics, network, musical instrument, - network sniffer. ' - pages: 425--428 - title: TresnaNet Musical Generation based on Network Protocols - url: http://www.nime.org/proceedings/2011/nime2011_425.pdf - year: 2011 + month: July + pages: 202--207 + presentation-video: https://youtu.be/xAVEHJZRIx0 + publisher: Birmingham City University + title: 'There and Back Again: The Practicality of GPU Accelerated Digital Audio' + url: https://www.nime.org/proceedings/2020/nime2020_paper39.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Luhtala2011 - address: 'Oslo, Norway' - author: 'Luhtala, Matti and Kym\''''{a}l\''''{a}inen, Tiina and Plomp, Johan' - bibtex: "@inproceedings{Luhtala2011,\n address = {Oslo, Norway},\n author = {Luhtala,\ - \ Matti and Kym\\''{a}l\\''{a}inen, Tiina and Plomp, Johan},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178095},\n issn = {2220-4806},\n keywords = {Music interfaces,\ - \ music therapy, modifiable interfaces, design tools, Human-Technology Interaction\ - \ (HTI), User-Centred Design (UCD), design for all (DfA), prototyping, performance.\ - \ },\n pages = {429--432},\n title = {Designing a Music Performance Space for\ - \ Persons with Intellectual Learning Disabilities},\n url = {http://www.nime.org/proceedings/2011/nime2011_429.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_4 + abstract: 'Ambulation is a sound walk that uses field recording techniques and listening + technologies to create a walking performance using environmental sound. Ambulation + engages with the act of recording as an improvised performance in response to + the soundscapes it is presented within. In this paper we describe the work and + place it in relationship to other artists engaged with field recording and extended + sound walking practices. We will give technical details of the Ambulation system + we developed as part of the creation of the piece, and conclude with a collection + of observations that emerged from the project. The research around the development + and presentation of Ambulation contributes to the idea of field recording as a + live, procedural practice, moving away from the ideas of the movement of documentary + material from one place to another. We will show how having an open, improvisational + approach to technologically supported sound walking enables rich and unexpected + results to occur and how this way of working can contribute to NIME design and + thinking.' + address: 'Birmingham, UK' + author: 'Shaw, Tim and Bowers, John' + bibtex: "@inproceedings{NIME20_4,\n abstract = {Ambulation is a sound walk that\ + \ uses field recording techniques and listening technologies to create a walking\ + \ performance using environmental sound. Ambulation engages with the act of recording\ + \ as an improvised performance in response to the soundscapes it is presented\ + \ within. In this paper we describe the work and place it in relationship to other\ + \ artists engaged with field recording and extended sound walking practices. We\ + \ will give technical details of the Ambulation system we developed as part of\ + \ the creation of the piece, and conclude with a collection of observations that\ + \ emerged from the project. The research around the development and presentation\ + \ of Ambulation contributes to the idea of field recording as a live, procedural\ + \ practice, moving away from the ideas of the movement of documentary material\ + \ from one place to another. We will show how having an open, improvisational\ + \ approach to technologically supported sound walking enables rich and unexpected\ + \ results to occur and how this way of working can contribute to NIME design and\ + \ thinking.},\n address = {Birmingham, UK},\n author = {Shaw, Tim and Bowers,\ + \ John},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.4813322},\n editor = {Romain\ + \ Michon and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages\ + \ = {23--28},\n presentation-video = {https://youtu.be/dDXkNnQXdN4},\n publisher\ + \ = {Birmingham City University},\n title = {Ambulation: Exploring Listening Technologies\ + \ for an Extended Sound Walking Practice},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper4.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178095 + doi: 10.5281/zenodo.4813322 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Music interfaces, music therapy, modifiable interfaces, design tools, - Human-Technology Interaction (HTI), User-Centred Design (UCD), design for all - (DfA), prototyping, performance. ' - pages: 429--432 - title: Designing a Music Performance Space for Persons with Intellectual Learning - Disabilities - url: http://www.nime.org/proceedings/2011/nime2011_429.pdf - year: 2011 + month: July + pages: 23--28 + presentation-video: https://youtu.be/dDXkNnQXdN4 + publisher: Birmingham City University + title: 'Ambulation: Exploring Listening Technologies for an Extended Sound Walking + Practice' + url: https://www.nime.org/proceedings/2020/nime2020_paper4.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Ahola2011 - abstract: 'Motion-based interactive systems have long been utilizedin contemporary - dance performances. These performancesbring new insight to sound-action experiences - in multidisciplinary art forms. This paper discusses the related technology within - the framework of the dance piece, Raja. The performance set up of Raja gives a - possibility to use two complementary tracking systems and two alternative choices - formotion sensors in real-time audio-visual synthesis.' - address: 'Oslo, Norway' - author: 'Ahola, Tom and Tahiroglu, Koray and Ahmaniemi, Teemu and Belloni, Fabio - and Ranki, Ville' - bibtex: "@inproceedings{Ahola2011,\n abstract = {Motion-based interactive systems\ - \ have long been utilizedin contemporary dance performances. These performancesbring\ - \ new insight to sound-action experiences in multidisciplinary art forms. This\ - \ paper discusses the related technology within the framework of the dance piece,\ - \ Raja. The performance set up of Raja gives a possibility to use two complementary\ - \ tracking systems and two alternative choices formotion sensors in real-time\ - \ audio-visual synthesis.},\n address = {Oslo, Norway},\n author = {Ahola, Tom\ - \ and Tahiroglu, Koray and Ahmaniemi, Teemu and Belloni, Fabio and Ranki, Ville},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177937},\n issn = {2220-4806},\n\ - \ keywords = {raja, performance, dance, motion sensor, accelerometer, gyro, positioning,\ - \ sonification, pure data, visualization, Qt},\n pages = {433--436},\n title =\ - \ {Raja -- A Multidisciplinary Artistic Performance},\n url = {http://www.nime.org/proceedings/2011/nime2011_433.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_40 + abstract: 'Learning to play an instrument is intrinsically multimodal, and we have + seen a trend of applying visual and haptic feedback in music games and computer-aided + music tutoring systems. However, most current systems are still designed to master + individual pieces of music; it is unclear how well the learned skills can be generalized + to new pieces. We aim to explore this question. In this study, we contribute Interactive + Rainbow Score, an interactive visual system to boost the learning of sight-playing, + the general musical skill to read music and map the visual representations to + performance motions. The key design of Interactive Rainbow Score is to associate + pitches (and the corresponding motions) with colored notation and further strengthen + such association via real-time interactions. Quantitative results show that the + interactive feature on average increases the learning efficiency by 31.1%. Further + analysis indicates that it is critical to apply the interaction in the early period + of learning.' + address: 'Birmingham, UK' + author: 'Xia, Gus and Chin, Daniel and Zhang, Yian and Zhang, Tianyu and Zhao, Junbo' + bibtex: "@inproceedings{NIME20_40,\n abstract = {Learning to play an instrument\ + \ is intrinsically multimodal, and we have seen a trend of applying visual and\ + \ haptic feedback in music games and computer-aided music tutoring systems. However,\ + \ most current systems are still designed to master individual pieces of music;\ + \ it is unclear how well the learned skills can be generalized to new pieces.\ + \ We aim to explore this question. In this study, we contribute Interactive Rainbow\ + \ Score, an interactive visual system to boost the learning of sight-playing,\ + \ the general musical skill to read music and map the visual representations to\ + \ performance motions. The key design of Interactive Rainbow Score is to associate\ + \ pitches (and the corresponding motions) with colored notation and further strengthen\ + \ such association via real-time interactions. Quantitative results show that\ + \ the interactive feature on average increases the learning efficiency by 31.1%.\ + \ Further analysis indicates that it is critical to apply the interaction in the\ + \ early period of learning.},\n address = {Birmingham, UK},\n author = {Xia, Gus\ + \ and Chin, Daniel and Zhang, Yian and Zhang, Tianyu and Zhao, Junbo},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.4813324},\n editor = {Romain Michon and\ + \ Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages = {208--213},\n\ + \ publisher = {Birmingham City University},\n title = {Interactive Rainbow Score:\ + \ A Visual-centered Multimodal Flute Tutoring System},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper40.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177937 + doi: 10.5281/zenodo.4813324 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'raja, performance, dance, motion sensor, accelerometer, gyro, positioning, - sonification, pure data, visualization, Qt' - pages: 433--436 - title: Raja -- A Multidisciplinary Artistic Performance - url: http://www.nime.org/proceedings/2011/nime2011_433.pdf - year: 2011 + month: July + pages: 208--213 + publisher: Birmingham City University + title: 'Interactive Rainbow Score: A Visual-centered Multimodal Flute Tutoring + System' + url: https://www.nime.org/proceedings/2020/nime2020_paper40.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Gallin2011 - address: 'Oslo, Norway' - author: 'Gallin, Emmanuelle and Sirguy, Marc' - bibtex: "@inproceedings{Gallin2011,\n address = {Oslo, Norway},\n author = {Gallin,\ - \ Emmanuelle and Sirguy, Marc},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178023},\n\ - \ issn = {2220-4806},\n keywords = {Controller, Sensor, MIDI, USB, Computer Music,\ - \ USB, OSC, CV, MIDI, DMX, A/D Converter, Interface. },\n pages = {437--440},\n\ - \ title = {Eobody3: a Ready-to-use Pre-mapped \\& Multi-protocol Sensor Interface},\n\ - \ url = {http://www.nime.org/proceedings/2011/nime2011_437.pdf},\n year = {2011}\n\ - }\n" + ID: NIME20_41 + abstract: 'Research on Accessible Digital Musical Instruments (ADMIs) has received + growing attention over the past decades, carving out an increasingly large space + in the literature. Despite the recent publication of state-of-the-art review works, + there are still few systematic studies on ADMIs design analysis. In this paper + we propose a formal tool to explore the main design aspects of ADMIs based on + Dimension Space Analysis, a well established methodology in the NIME literature + which allows to generate an effective visual representation of the design space. + We therefore propose a set of relevant dimensions, which are based both on categories + proposed in recent works in the research context, and on original contributions. + We then proceed to demonstrate its applicability by selecting a set of relevant + case studies, and analyzing a sample set of ADMIs found in the literature.' + address: 'Birmingham, UK' + author: 'Davanzo, Nicola and Avanzini, Federico' + bibtex: "@inproceedings{NIME20_41,\n abstract = {Research on Accessible Digital\ + \ Musical Instruments (ADMIs) has received growing attention over the past decades,\ + \ carving out an increasingly large space in the literature. Despite the recent\ + \ publication of state-of-the-art review works, there are still few systematic\ + \ studies on ADMIs design analysis. In this paper we propose a formal tool to\ + \ explore the main design aspects of ADMIs based on Dimension Space Analysis,\ + \ a well established methodology in the NIME literature which allows to generate\ + \ an effective visual representation of the design space. We therefore propose\ + \ a set of relevant dimensions, which are based both on categories proposed in\ + \ recent works in the research context, and on original contributions. We then\ + \ proceed to demonstrate its applicability by selecting a set of relevant case\ + \ studies, and analyzing a sample set of ADMIs found in the literature.},\n address\ + \ = {Birmingham, UK},\n author = {Davanzo, Nicola and Avanzini, Federico},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.4813326},\n editor = {Romain Michon and\ + \ Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages = {214--220},\n\ + \ presentation-video = {https://youtu.be/pJlB5k8TV9M},\n publisher = {Birmingham\ + \ City University},\n title = {A Dimension Space for the Evaluation of Accessible\ + \ Digital Musical Instruments},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper41.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178023 + doi: 10.5281/zenodo.4813326 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Controller, Sensor, MIDI, USB, Computer Music, USB, OSC, CV, MIDI, DMX, - A/D Converter, Interface. ' - pages: 437--440 - title: 'Eobody3: a Ready-to-use Pre-mapped \& Multi-protocol Sensor Interface' - url: http://www.nime.org/proceedings/2011/nime2011_437.pdf - year: 2011 + month: July + pages: 214--220 + presentation-video: https://youtu.be/pJlB5k8TV9M + publisher: Birmingham City University + title: A Dimension Space for the Evaluation of Accessible Digital Musical Instruments + url: https://www.nime.org/proceedings/2020/nime2020_paper41.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Baath2011 - abstract: 'The aim of this study was to investigate how well subjectsbeat out a - rhythm using eye movements and to establishthe most accurate method of doing this. - Eighteen subjectsparticipated in an experiment were five different methodswere - evaluated. A fixation based method was found to bethe most accurate. All subjects - were able to synchronizetheir eye movements with a given beat but the accuracywas - much lower than usually found in finger tapping studies. Many parts of the body - are used to make music but sofar, with a few exceptions, the eyes have been silent. - The research presented here provides guidelines for implementingeye controlled - musical interfaces. Such interfaces would enable performers and artists to use - eye movement for musicalexpression and would open up new, exiting possibilities.' - address: 'Oslo, Norway' - author: 'Bååth, Rasmus and Strandberg, Thomas and Balkenius, Christian' - bibtex: "@inproceedings{Baath2011,\n abstract = {The aim of this study was to investigate\ - \ how well subjectsbeat out a rhythm using eye movements and to establishthe most\ - \ accurate method of doing this. Eighteen subjectsparticipated in an experiment\ - \ were five different methodswere evaluated. A fixation based method was found\ - \ to bethe most accurate. All subjects were able to synchronizetheir eye movements\ - \ with a given beat but the accuracywas much lower than usually found in finger\ - \ tapping studies. Many parts of the body are used to make music but sofar, with\ - \ a few exceptions, the eyes have been silent. The research presented here provides\ - \ guidelines for implementingeye controlled musical interfaces. Such interfaces\ - \ would enable performers and artists to use eye movement for musicalexpression\ - \ and would open up new, exiting possibilities.},\n address = {Oslo, Norway},\n\ - \ author = {B\\aa\\aath, Rasmus and Strandberg, Thomas and Balkenius, Christian},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177947},\n issn = {2220-4806},\n\ - \ keywords = {Rhythm, Eye tracking, Sensorimotor synchronization, Eye tapping\ - \ },\n pages = {441--444},\n title = {Eye Tapping : How to Beat Out an Accurate\ - \ Rhythm using Eye Movements},\n url = {http://www.nime.org/proceedings/2011/nime2011_441.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_42 + abstract: "This paper describes physical and digital design strategies for the Feedback-Actuated\ + \ Augmented Bass - a self-contained feedback double bass with embedded DSP capabilities.\ + \ A primary goal of the research project is to create an instrument that responds\ + \ well to the use of extended playing techniques and can manifest complex harmonic\ + \ spectra while retaining the feel and sonic \nfingerprint of an acoustic double\ + \ bass. While the physical con\nfiguration of the instrument builds on similar\ + \ feedback string instruments being developed in recent years, this project focuses\ + \ on modifying the feedback behaviour through low-level audio feature extractions\ + \ coupled to computationally lightweight \nfiltering and amplitude management\ + \ algorithms. We discuss these adaptive and time-variant processing strategies\ + \ and how we apply them in sculpting the system's dynamic and complex behaviour\ + \ to our liking." + address: 'Birmingham, UK' + author: 'Melbye, Adam Pultz and Ulfarsson, Halldor A' + bibtex: "@inproceedings{NIME20_42,\n abstract = {This paper describes physical and\ + \ digital design strategies for the Feedback-Actuated Augmented Bass - a self-contained\ + \ feedback double bass with embedded DSP capabilities. A primary goal of the research\ + \ project is to create an instrument that responds well to the use of extended\ + \ playing techniques and can manifest complex harmonic spectra while retaining\ + \ the feel and sonic \nfingerprint of an acoustic double bass. While the physical\ + \ con\nfiguration of the instrument builds on similar feedback string instruments\ + \ being developed in recent years, this project focuses on modifying the feedback\ + \ behaviour through low-level audio feature extractions coupled to computationally\ + \ lightweight \nfiltering and amplitude management algorithms. We discuss these\ + \ adaptive and time-variant processing strategies and how we apply them in sculpting\ + \ the system's dynamic and complex behaviour to our liking.},\n address = {Birmingham,\ + \ UK},\n author = {Melbye, Adam Pultz and Ulfarsson, Halldor A},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.4813328},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {221--226},\n presentation-video\ + \ = {https://youtu.be/jXePge1MS8A},\n publisher = {Birmingham City University},\n\ + \ title = {Sculpting the behaviour of the Feedback-Actuated Augmented Bass: Design\ + \ strategies for subtle manipulations of string feedback using simple adaptive\ + \ algorithms},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper42.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177947 + doi: 10.5281/zenodo.4813328 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Rhythm, Eye tracking, Sensorimotor synchronization, Eye tapping ' - pages: 441--444 - title: 'Eye Tapping : How to Beat Out an Accurate Rhythm using Eye Movements' - url: http://www.nime.org/proceedings/2011/nime2011_441.pdf - year: 2011 + month: July + pages: 221--226 + presentation-video: https://youtu.be/jXePge1MS8A + publisher: Birmingham City University + title: 'Sculpting the behaviour of the Feedback-Actuated Augmented Bass: Design + strategies for subtle manipulations of string feedback using simple adaptive algorithms' + url: https://www.nime.org/proceedings/2020/nime2020_paper42.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Rosenbaum2011 - abstract: 'I present MelodyMorph, a reconfigurable musical instrument designed with - a focus on melodic improvisation. It is designed for a touch-screen interface, - and allows the user to create "bells" which can be tapped to play a note, and - dragged around on a pannable and zoomable canvas. Colors, textures and shapes - of the bells represent pitch and timbre properties. "Recorder bells" can store - and play back performances. Users can construct instruments that are modifiable - as they play, and build up complex melodies hierarchically from simple parts. ' - address: 'Oslo, Norway' - author: 'Rosenbaum, Eric' - bibtex: "@inproceedings{Rosenbaum2011,\n abstract = {I present MelodyMorph, a reconfigurable\ - \ musical instrument designed with a focus on melodic improvisation. It is designed\ - \ for a touch-screen interface, and allows the user to create \"bells\" which\ - \ can be tapped to play a note, and dragged around on a pannable and zoomable\ - \ canvas. Colors, textures and shapes of the bells represent pitch and timbre\ - \ properties. \"Recorder bells\" can store and play back performances. Users can\ - \ construct instruments that are modifiable as they play, and build up complex\ - \ melodies hierarchically from simple parts. },\n address = {Oslo, Norway},\n\ - \ author = {Rosenbaum, Eric},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178147},\n\ - \ issn = {2220-4806},\n keywords = {Melody, improvisation, representation, multi-touch,\ - \ iPad },\n pages = {445--447},\n title = {MelodyMorph: A Reconfigurable Musical\ - \ Instrument},\n url = {http://www.nime.org/proceedings/2011/nime2011_445.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_43 + abstract: 'The comparative study presented in this paper focuses on two approaches + for the search of sound presets using a specific geometric touch app. The first + approach is based on independent sliders on screen and is called analytic. The + second is based on interpolation between presets represented by polygons on screen + and is called holistic. Participants had to listen to, memorize, and search for + sound presets characterized by four parameters. Ten different configurations of + sound synthesis and processing were presented to each participant, once for each + approach. The performance scores of 28 participants (not including early testers) + were computed using two measured values: the search duration, and the parametric + distance between the reference and answered presets. Compared to the analytic + sliders-based interface, the holistic interpolation-based interface demonstrated + a significant performance improvement for 60% of sound synthesizers. The other + 40% led to equivalent results for the analytic and holistic interfaces. Using + sliders, expert users performed nearly as well as they did with interpolation. + Beginners and intermediate users struggled more with sliders, while the interpolation + allowed them to get quite close to experts’ results.' + address: 'Birmingham, UK' + author: 'Le Vaillant, Gwendal and Dutoit, Thierry and Giot, Rudi' + bibtex: "@inproceedings{NIME20_43,\n abstract = {The comparative study presented\ + \ in this paper focuses on two approaches for the search of sound presets using\ + \ a specific geometric touch app. The first approach is based on independent sliders\ + \ on screen and is called analytic. The second is based on interpolation between\ + \ presets represented by polygons on screen and is called holistic. Participants\ + \ had to listen to, memorize, and search for sound presets characterized by four\ + \ parameters. Ten different configurations of sound synthesis and processing were\ + \ presented to each participant, once for each approach. The performance scores\ + \ of 28 participants (not including early testers) were computed using two measured\ + \ values: the search duration, and the parametric distance between the reference\ + \ and answered presets. Compared to the analytic sliders-based interface, the\ + \ holistic interpolation-based interface demonstrated a significant performance\ + \ improvement for 60% of sound synthesizers. The other 40% led to equivalent results\ + \ for the analytic and holistic interfaces. Using sliders, expert users performed\ + \ nearly as well as they did with interpolation. Beginners and intermediate users\ + \ struggled more with sliders, while the interpolation allowed them to get quite\ + \ close to experts’ results.},\n address = {Birmingham, UK},\n author = {Le Vaillant,\ + \ Gwendal and Dutoit, Thierry and Giot, Rudi},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.4813330},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {227--232},\n presentation-video\ + \ = {https://youtu.be/Korw3J_QvQE},\n publisher = {Birmingham City University},\n\ + \ title = {Analytic vs. holistic approaches for the live search of sound presets\ + \ using graphical interpolation},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper43.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178147 + doi: 10.5281/zenodo.4813330 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Melody, improvisation, representation, multi-touch, iPad ' - pages: 445--447 - title: 'MelodyMorph: A Reconfigurable Musical Instrument' - url: http://www.nime.org/proceedings/2011/nime2011_445.pdf - year: 2011 + month: July + pages: 227--232 + presentation-video: https://youtu.be/Korw3J_QvQE + publisher: Birmingham City University + title: Analytic vs. holistic approaches for the live search of sound presets using + graphical interpolation + url: https://www.nime.org/proceedings/2020/nime2020_paper43.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Franinovic2011 - address: 'Oslo, Norway' - author: 'Franinovic, Karmen' - bibtex: "@inproceedings{Franinovic2011,\n address = {Oslo, Norway},\n author = {Franinovic,\ - \ Karmen},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1178013},\n issn = {2220-4806},\n\ - \ keywords = {exploration,gesture,habit,sonic interaction design},\n pages = {448--452},\n\ - \ title = {The Flo)(ps : Negotiating Between Habitual and Explorative Gestures},\n\ - \ url = {http://www.nime.org/proceedings/2011/nime2011_448.pdf},\n year = {2011}\n\ - }\n" + ID: NIME20_44 + abstract: 'The purpose of this project is to develop an interface for writing and + performing music using sequencers in virtual reality (VR). The VR sequencer deals + with chance-based operations to select audio clips for playback and spatial orientation-based + rhythm and melody generation, while incorporating three-dimensional (3-D) objects + as omnidirectional playheads. Spheres which grow from a variable minimum size + to a variable maximum size at a variable speed, constantly looping, represent + the passage of time in this VR sequencer. The 3-D assets which represent samples + are actually sample containers that come in six common dice shapes. As the dice + come into contact with a sphere, their samples are triggered to play. This behavior + mimics digital audio workstation (DAW) playheads reading MIDI left-to-right in + popular professional and consumer software sequencers. To incorporate height into + VR music making, the VR sequencer is capable of generating terrain at the press + of a button. Each terrain will gradually change, creating the possibility for + the dice to roll on their own. Audio effects are built in to each scene and mapped + to terrain parameters, creating another opportunity for chance operations in the + music making process. The chance-based sample selection, spatial orientation-defined + rhythms, and variable terrain mapped to audio effects lead to indeterminacy in + performance and replication of a single piece of music. This project aims to give + the gaming community access to experimental music making by means of consumer + virtual reality hardware.' + address: 'Birmingham, UK' + author: 'Mitchusson, Chase' + bibtex: "@inproceedings{NIME20_44,\n abstract = {The purpose of this project is\ + \ to develop an interface for writing and performing music using sequencers in\ + \ virtual reality (VR). The VR sequencer deals with chance-based operations to\ + \ select audio clips for playback and spatial orientation-based rhythm and melody\ + \ generation, while incorporating three-dimensional (3-D) objects as omnidirectional\ + \ playheads. Spheres which grow from a variable minimum size to a variable maximum\ + \ size at a variable speed, constantly looping, represent the passage of time\ + \ in this VR sequencer. The 3-D assets which represent samples are actually sample\ + \ containers that come in six common dice shapes. As the dice come into contact\ + \ with a sphere, their samples are triggered to play. This behavior mimics digital\ + \ audio workstation (DAW) playheads reading MIDI left-to-right in popular professional\ + \ and consumer software sequencers. To incorporate height into VR music making,\ + \ the VR sequencer is capable of generating terrain at the press of a button.\ + \ Each terrain will gradually change, creating the possibility for the dice to\ + \ roll on their own. Audio effects are built in to each scene and mapped to terrain\ + \ parameters, creating another opportunity for chance operations in the music\ + \ making process. The chance-based sample selection, spatial orientation-defined\ + \ rhythms, and variable terrain mapped to audio effects lead to indeterminacy\ + \ in performance and replication of a single piece of music. This project aims\ + \ to give the gaming community access to experimental music making by means of\ + \ consumer virtual reality hardware.},\n address = {Birmingham, UK},\n author\ + \ = {Mitchusson, Chase},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813332},\n\ + \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ + \ = {July},\n pages = {233--236},\n publisher = {Birmingham City University},\n\ + \ title = {Indeterminate Sample Sequencing in Virtual Reality},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper44.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178013 + doi: 10.5281/zenodo.4813332 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'exploration,gesture,habit,sonic interaction design' - pages: 448--452 - title: 'The Flo)(ps : Negotiating Between Habitual and Explorative Gestures' - url: http://www.nime.org/proceedings/2011/nime2011_448.pdf - year: 2011 + month: July + pages: 233--236 + publisher: Birmingham City University + title: Indeterminate Sample Sequencing in Virtual Reality + url: https://www.nime.org/proceedings/2020/nime2020_paper44.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Schedel2011 - abstract: 'In this paper we discuss how the band 000000Swan uses machine learning - to parse complex sensor data and create intricate artistic systems for live performance. - Using the Wekinator software for interactive machine learning, we have created - discrete and continuous models for controlling audio and visual environments using - human gestures sensed by a commercially-available sensor bow and the Microsoft - Kinect. In particular, we have employed machine learning to quickly and easily - prototype complex relationships between performer gesture and performative outcome. ' - address: 'Oslo, Norway' - author: 'Schedel, Margaret and Perry, Phoenix and Fiebrink, Rebecca' - bibtex: "@inproceedings{Schedel2011,\n abstract = {In this paper we discuss how\ - \ the band 000000Swan uses machine learning to parse complex sensor data and create\ - \ intricate artistic systems for live performance. Using the Wekinator software\ - \ for interactive machine learning, we have created discrete and continuous models\ - \ for controlling audio and visual environments using human gestures sensed by\ - \ a commercially-available sensor bow and the Microsoft Kinect. In particular,\ - \ we have employed machine learning to quickly and easily prototype complex relationships\ - \ between performer gesture and performative outcome. },\n address = {Oslo, Norway},\n\ - \ author = {Schedel, Margaret and Perry, Phoenix and Fiebrink, Rebecca},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1178151},\n issn = {2220-4806},\n keywords\ - \ = {Wekinator, K-Bow, Machine Learning, Interactive, Multimedia, Kinect, Motion-Tracking,\ - \ Bow Articulation, Animation },\n pages = {453--456},\n title = {Wekinating 000000{S}wan\ - \ : Using Machine Learning to Create and Control Complex Artistic Systems},\n\ - \ url = {http://www.nime.org/proceedings/2011/nime2011_453.pdf},\n year = {2011}\n\ - }\n" + ID: NIME20_45 + abstract: 'Machine learning (ML) has been used to create mappings for digital musical + instruments for over twenty-five years, and numerous ML toolkits have been developed + for the NIME community. However, little published work has studied how ML has + been used in sustained instrument building and performance practices. This paper + examines the experiences of instrument builder and performer Laetitia Sonami, + who has been using ML to build and refine her Spring Spyre instrument since 2012. + Using Sonami’s current practice as a case study, this paper explores the utility, + opportunities, and challenges involved in using ML in practice over many years. + This paper also reports the perspective of Rebecca Fiebrink, the creator of the + Wekinator ML tool used by Sonami, revealing how her work with Sonami has led to + changes to the software and to her teaching. This paper thus contributes a deeper + understanding of the value of ML for NIME practitioners, and it can inform design + considerations for future ML toolkits as well as NIME pedagogy. Further, it provides + new perspectives on familiar NIME conversations about mapping strategies, expressivity, + and control, informed by a dedicated practice over many years.' + address: 'Birmingham, UK' + author: 'Fiebrink, Rebecca and Sonami, Laetitia' + bibtex: "@inproceedings{NIME20_45,\n abstract = {Machine learning (ML) has been\ + \ used to create mappings for digital musical instruments for over twenty-five\ + \ years, and numerous ML toolkits have been developed for the NIME community.\ + \ However, little published work has studied how ML has been used in sustained\ + \ instrument building and performance practices. This paper examines the experiences\ + \ of instrument builder and performer Laetitia Sonami, who has been using ML to\ + \ build and refine her Spring Spyre instrument since 2012. Using Sonami’s current\ + \ practice as a case study, this paper explores the utility, opportunities, and\ + \ challenges involved in using ML in practice over many years. This paper also\ + \ reports the perspective of Rebecca Fiebrink, the creator of the Wekinator ML\ + \ tool used by Sonami, revealing how her work with Sonami has led to changes to\ + \ the software and to her teaching. This paper thus contributes a deeper understanding\ + \ of the value of ML for NIME practitioners, and it can inform design considerations\ + \ for future ML toolkits as well as NIME pedagogy. Further, it provides new perspectives\ + \ on familiar NIME conversations about mapping strategies, expressivity, and control,\ + \ informed by a dedicated practice over many years.},\n address = {Birmingham,\ + \ UK},\n author = {Fiebrink, Rebecca and Sonami, Laetitia},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.4813334},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {237--242},\n presentation-video\ + \ = {https://youtu.be/EvXZ9NayZhA},\n publisher = {Birmingham City University},\n\ + \ title = {Reflections on Eight Years of Instrument Creation with Machine Learning},\n\ + \ url = {https://www.nime.org/proceedings/2020/nime2020_paper45.pdf},\n year =\ + \ {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178151 + doi: 10.5281/zenodo.4813334 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Wekinator, K-Bow, Machine Learning, Interactive, Multimedia, Kinect, - Motion-Tracking, Bow Articulation, Animation ' - pages: 453--456 - title: 'Wekinating 000000Swan : Using Machine Learning to Create and Control Complex - Artistic Systems' - url: http://www.nime.org/proceedings/2011/nime2011_453.pdf - year: 2011 + month: July + pages: 237--242 + presentation-video: https://youtu.be/EvXZ9NayZhA + publisher: Birmingham City University + title: Reflections on Eight Years of Instrument Creation with Machine Learning + url: https://www.nime.org/proceedings/2020/nime2020_paper45.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Julia2011 - abstract: 'In the past decade we have seen a growing presence of tabletop systems - applied to music, lately with even some products becoming commercially available - and being used byprofessional musicians in concerts. The development of thistype - of applications requires several demanding technicalexpertises such as input processing, - graphical design, realtime sound generation or interaction design, and because - ofthis complexity they are usually developed by a multidisciplinary group.In this - paper we present the Musical Tabletop CodingFramework (MTCF) a framework for designing - and codingmusical tabletop applications by using the graphical programming language - for digital sound processing Pure Data(Pd). With this framework we try to simplify - the creationprocess of such type of interfaces, by removing the need ofany programming - skills other than those of Pd.' - address: 'Oslo, Norway' - author: 'Julià, Carles F. and Gallardo, Daniel and Jordà, Sergi' - bibtex: "@inproceedings{Julia2011,\n abstract = {In the past decade we have seen\ - \ a growing presence of tabletop systems applied to music, lately with even some\ - \ products becoming commercially available and being used byprofessional musicians\ - \ in concerts. The development of thistype of applications requires several demanding\ - \ technicalexpertises such as input processing, graphical design, realtime sound\ - \ generation or interaction design, and because ofthis complexity they are usually\ - \ developed by a multidisciplinary group.In this paper we present the Musical\ - \ Tabletop CodingFramework (MTCF) a framework for designing and codingmusical\ - \ tabletop applications by using the graphical programming language for digital\ - \ sound processing Pure Data(Pd). With this framework we try to simplify the creationprocess\ - \ of such type of interfaces, by removing the need ofany programming skills other\ - \ than those of Pd.},\n address = {Oslo, Norway},\n author = {Juli\\`{a}, Carles\ - \ F. and Gallardo, Daniel and Jord\\`{a}, Sergi},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1178057},\n issn = {2220-4806},\n keywords = {Pure Data, tabletop,\ - \ tangible, framework },\n pages = {457--460},\n title = {MTCF : A Framework for\ - \ Designing and Coding Musical Tabletop Applications Directly in Pure Data},\n\ - \ url = {http://www.nime.org/proceedings/2011/nime2011_457.pdf},\n year = {2011}\n\ - }\n" + ID: NIME20_46 + abstract: 'Based on the experience garnered through a longitudinal ethnographic + study, the authors reflect on the practice of designing and fabricating bespoke, + accessible music tech- nologies. Of particular focus are the social, technical + and environmental factors at play which make the provision of such technology + a reality. The authors make suggestions of ways to achieve long-term, sustained + use. Seemingly those involved in its design, fabrication and use could benefit + from a concerted effort to share resources, knowledge and skill as a mobilised + community of practitioners.' + address: 'Birmingham, UK' + author: 'Lucas, Alex and Ortiz, Miguel and Schroeder, Franziska' + bibtex: "@inproceedings{NIME20_46,\n abstract = {Based on the experience garnered\ + \ through a longitudinal ethnographic study, the authors reflect on the practice\ + \ of designing and fabricating bespoke, accessible music tech- nologies. Of particular\ + \ focus are the social, technical and environmental factors at play which make\ + \ the provision of such technology a reality. The authors make suggestions of\ + \ ways to achieve long-term, sustained use. Seemingly those involved in its design,\ + \ fabrication and use could benefit from a concerted effort to share resources,\ + \ knowledge and skill as a mobilised community of practitioners.},\n address =\ + \ {Birmingham, UK},\n author = {Lucas, Alex and Ortiz, Miguel and Schroeder, Franziska},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.4813338},\n editor = {Romain Michon\ + \ and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages =\ + \ {243--248},\n presentation-video = {https://youtu.be/cLguyuZ9weI},\n publisher\ + \ = {Birmingham City University},\n title = {The Longevity of Bespoke, Accessible\ + \ Music Technology: A Case for Community},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper46.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178057 + doi: 10.5281/zenodo.4813338 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Pure Data, tabletop, tangible, framework ' - pages: 457--460 - title: 'MTCF : A Framework for Designing and Coding Musical Tabletop Applications - Directly in Pure Data' - url: http://www.nime.org/proceedings/2011/nime2011_457.pdf - year: 2011 + month: July + pages: 243--248 + presentation-video: https://youtu.be/cLguyuZ9weI + publisher: Birmingham City University + title: 'The Longevity of Bespoke, Accessible Music Technology: A Case for Community' + url: https://www.nime.org/proceedings/2020/nime2020_paper46.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Pirro2011 - address: 'Oslo, Norway' - author: 'Pirr\`{o}, David and Eckel, Gerhard' - bibtex: "@inproceedings{Pirro2011,\n address = {Oslo, Norway},\n author = {Pirr\\\ - `{o}, David and Eckel, Gerhard},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178135},\n\ - \ issn = {2220-4806},\n keywords = {embod-,enactive interfaces,has been ap-,iment,interaction,motion\ - \ tracking,of sound and music,physical modelling,to movement and gesture},\n pages\ - \ = {461--464},\n title = {Physical Modelling Enabling Enaction: an Example},\n\ - \ url = {http://www.nime.org/proceedings/2011/nime2011_461.pdf},\n year = {2011}\n\ - }\n" + ID: NIME20_47 + abstract: 'With the proliferation of venues equipped with the high density loudspeaker + arrays there is a growing interest in developing new interfaces for spatial musical + expression (NISME). Of particular interest are interfaces that focus on the emancipation + of the spatial domain as the primary dimension for musical expression. Here we + present Monet NISME that leverages multitouch pressure-sensitive surface and the + D4 library''s spatial mask and thereby allows for a unique approach to interactive + spatialization. Further, we present a study with 22 participants designed to assess + its usefulness and compare it to the Locus, a NISME introduced in 2019 as part + of a localization study which is built on the same design principles of using + natural gestural interaction with the spatial content. Lastly, we briefly discuss + the utilization of both NISMEs in two artistic performances and propose a set + of guidelines for further exploration in the NISME domain.' + address: 'Birmingham, UK' + author: 'Bukvic, Ivica I and Sardana, Disha and Joo, Woohun' + bibtex: "@inproceedings{NIME20_47,\n abstract = {With the proliferation of venues\ + \ equipped with the high density loudspeaker arrays there is a growing interest\ + \ in developing new interfaces for spatial musical expression (NISME). Of particular\ + \ interest are interfaces that focus on the emancipation of the spatial domain\ + \ as the primary dimension for musical expression. Here we present Monet NISME\ + \ that leverages multitouch pressure-sensitive surface and the D4 library's spatial\ + \ mask and thereby allows for a unique approach to interactive spatialization.\ + \ Further, we present a study with 22 participants designed to assess its usefulness\ + \ and compare it to the Locus, a NISME introduced in 2019 as part of a localization\ + \ study which is built on the same design principles of using natural gestural\ + \ interaction with the spatial content. Lastly, we briefly discuss the utilization\ + \ of both NISMEs in two artistic performances and propose a set of guidelines\ + \ for further exploration in the NISME domain.},\n address = {Birmingham, UK},\n\ + \ author = {Bukvic, Ivica I and Sardana, Disha and Joo, Woohun},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.4813342},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {249--254},\n presentation-video\ + \ = {https://youtu.be/GQ0552Lc1rw},\n publisher = {Birmingham City University},\n\ + \ title = {New Interfaces for Spatial Musical Expression},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper47.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178135 + doi: 10.5281/zenodo.4813342 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'embod-,enactive interfaces,has been ap-,iment,interaction,motion tracking,of - sound and music,physical modelling,to movement and gesture' - pages: 461--464 - title: 'Physical Modelling Enabling Enaction: an Example' - url: http://www.nime.org/proceedings/2011/nime2011_461.pdf - year: 2011 + month: July + pages: 249--254 + presentation-video: https://youtu.be/GQ0552Lc1rw + publisher: Birmingham City University + title: New Interfaces for Spatial Musical Expression + url: https://www.nime.org/proceedings/2020/nime2020_paper47.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Mitchell2011 - abstract: 'This paper documents the first developmental phase of aninterface that - enables the performance of live music usinggestures and body movements. The work - included focuseson the first step of this project: the composition and performance - of live music using hand gestures captured using asingle data glove. The paper - provides a background to thefield, the aim of the project and a technical description - ofthe work completed so far. This includes the developmentof a robust posture - vocabulary, an artificial neural networkbased posture identification process and - a state-based system to map identified postures onto a set of performanceprocesses. - The paper is closed with qualitative usage observations and a projection of future - plans.' - address: 'Oslo, Norway' - author: 'Mitchell, Thomas and Heap, Imogen' - bibtex: "@inproceedings{Mitchell2011,\n abstract = {This paper documents the first\ - \ developmental phase of aninterface that enables the performance of live music\ - \ usinggestures and body movements. The work included focuseson the first step\ - \ of this project: the composition and performance of live music using hand gestures\ - \ captured using asingle data glove. The paper provides a background to thefield,\ - \ the aim of the project and a technical description ofthe work completed so far.\ - \ This includes the developmentof a robust posture vocabulary, an artificial neural\ - \ networkbased posture identification process and a state-based system to map\ - \ identified postures onto a set of performanceprocesses. The paper is closed\ - \ with qualitative usage observations and a projection of future plans.},\n address\ - \ = {Oslo, Norway},\n author = {Mitchell, Thomas and Heap, Imogen},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1178111},\n issn = {2220-4806},\n keywords\ - \ = {Music Controller, Gestural Music, Data Glove, Neural Network, Live Music\ - \ Composition, Looping, Imogen Heap },\n pages = {465--468},\n title = {SoundGrasp\ - \ : A Gestural Interface for the Performance of Live Music},\n url = {http://www.nime.org/proceedings/2011/nime2011_465.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_48 + abstract: 'This study presents an ecosystemic approach to music interaction, through + the practice-based development of a mixed reality installation artwork. It fuses + a generative, immersive audio composition with augmented reality visualisation, + within an architectural space as part of a blended experience. Participants are + encouraged to explore and interact with this combination of elements through physical + engagement, to then develop an understanding of how the blending of real and virtual + space occurs as the installation unfolds. The sonic layer forms a link between + the two, as a three-dimensional sound composition. Connections in the system allow + for multiple streams of data to run between the layers, which are used for the + real-time modulation of parameters. These feedback mechanisms form a complete + loop between the participant in real space, soundscape, and mixed reality visualisation, + providing a participant mediated experience that exists somewhere between creator + and observer.' + address: 'Birmingham, UK' + author: 'Durham, Mark' + bibtex: "@inproceedings{NIME20_48,\n abstract = {This study presents an ecosystemic\ + \ approach to music interaction, through the practice-based development of a mixed\ + \ reality installation artwork. It fuses a generative, immersive audio composition\ + \ with augmented reality visualisation, within an architectural space as part\ + \ of a blended experience. Participants are encouraged to explore and interact\ + \ with this combination of elements through physical engagement, to then develop\ + \ an understanding of how the blending of real and virtual space occurs as the\ + \ installation unfolds. The sonic layer forms a link between the two, as a three-dimensional\ + \ sound composition. Connections in the system allow for multiple streams of data\ + \ to run between the layers, which are used for the real-time modulation of parameters.\ + \ These feedback mechanisms form a complete loop between the participant in real\ + \ space, soundscape, and mixed reality visualisation, providing a participant\ + \ mediated experience that exists somewhere between creator and observer.},\n\ + \ address = {Birmingham, UK},\n author = {Durham, Mark},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.4813344},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {255--258},\n publisher = {Birmingham\ + \ City University},\n title = {Inhabiting the Instrument},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper48.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178111 + doi: 10.5281/zenodo.4813344 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Music Controller, Gestural Music, Data Glove, Neural Network, Live Music - Composition, Looping, Imogen Heap ' - pages: 465--468 - title: 'SoundGrasp : A Gestural Interface for the Performance of Live Music' - url: http://www.nime.org/proceedings/2011/nime2011_465.pdf - year: 2011 + month: July + pages: 255--258 + publisher: Birmingham City University + title: Inhabiting the Instrument + url: https://www.nime.org/proceedings/2020/nime2020_paper48.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Mullen2011 - abstract: 'The use of non-invasive electroencephalography (EEG) in the experimental - arts is not a novel concept. Since 1965, EEG has been used in a large number of, - sometimes highly sophisticated, systems for musical and artistic expression. However, - since the advent of the synthesizer, most such systems have utilized digital and/or - synthesized media in sonifying the EEG signals. There have been relatively few - attempts to create interfaces for musical expression that allow one to mechanically - manipulate acoustic instruments by modulating one''s mental state. Secondly, few - such systems afford a distributed performance medium, with data transfer and audience - participation occurring over the Internet. The use of acoustic instruments and - Internet-enabled communication expands the realm of possibilities for musical - expression in Brain-Computer Music Interfaces (BCMI), while also introducing additional - challenges. In this paper we report and examine a first demonstration (Music for - Online Performer) of a novel system for Internet-enabled manipulation of robotic - acoustic instruments, with feedback, using a non-invasive EEG-based BCI and low-cost, - commercially available robotics hardware. ' - address: 'Oslo, Norway' - author: 'Mullen, Tim and Warp, Richard and Jansch, Adam' - bibtex: "@inproceedings{Mullen2011,\n abstract = {The use of non-invasive electroencephalography\ - \ (EEG) in the experimental arts is not a novel concept. Since 1965, EEG has been\ - \ used in a large number of, sometimes highly sophisticated, systems for musical\ - \ and artistic expression. However, since the advent of the synthesizer, most\ - \ such systems have utilized digital and/or synthesized media in sonifying the\ - \ EEG signals. There have been relatively few attempts to create interfaces for\ - \ musical expression that allow one to mechanically manipulate acoustic instruments\ - \ by modulating one's mental state. Secondly, few such systems afford a distributed\ - \ performance medium, with data transfer and audience participation occurring\ - \ over the Internet. The use of acoustic instruments and Internet-enabled communication\ - \ expands the realm of possibilities for musical expression in Brain-Computer\ - \ Music Interfaces (BCMI), while also introducing additional challenges. In this\ - \ paper we report and examine a first demonstration (Music for Online Performer)\ - \ of a novel system for Internet-enabled manipulation of robotic acoustic instruments,\ - \ with feedback, using a non-invasive EEG-based BCI and low-cost, commercially\ - \ available robotics hardware. },\n address = {Oslo, Norway},\n author = {Mullen,\ - \ Tim and Warp, Richard and Jansch, Adam},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178117},\n\ - \ issn = {2220-4806},\n keywords = {EEG, Brain-Computer Music Interface, Internet,\ - \ Arduino. },\n pages = {469--472},\n title = {Minding the (Transatlantic) Gap:\ - \ An Internet-Enabled Acoustic Brain-Computer Music Interface},\n url = {http://www.nime.org/proceedings/2011/nime2011_469.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_49 + abstract: 'This paper details technologies and artistic approaches to crowd-driven + music, discussed in the context of a live public installation in which activity + in a public space (a busy railway platform) is used to drive the automated composition + and performance of music. The approach presented uses realtime machine vision + applied to a live video feed of a scene, from which detected objects and people + are fed into Manhattan (Nash, 2014), a digital music notation that integrates + sequencing and programming to support the live creation of complex musical works + that combine static, algorithmic, and interactive elements. The paper discusses + the technical details of the system and artistic development of specific musical + works, introducing novel techniques for mapping chaotic systems to musical expression + and exploring issues of agency, aesthetic, accessibility and adaptability relating + to composing interactive music for crowds and public spaces. In particular, performances + as part of an installation for BBC Music Day 2018 are described. The paper subsequently + details a practical workshop, delivered digitally, exploring the development of + interactive performances in which the audience or general public actively or passively + control live generation of a musical piece. Exercises support discussions on technical, + aesthetic, and ontological issues arising from the identification and mapping + of structure, order, and meaning in non-musical domains to analogous concepts + in musical expression. Materials for the workshop are available freely with the + Manhattan software.' + address: 'Birmingham, UK' + author: 'Nash, Chris' + bibtex: "@inproceedings{NIME20_49,\n abstract = {This paper details technologies\ + \ and artistic approaches to crowd-driven music, discussed in the context of a\ + \ live public installation in which activity in a public space (a busy railway\ + \ platform) is used to drive the automated composition and performance of music.\ + \ The approach presented uses realtime machine vision applied to a live video\ + \ feed of a scene, from which detected objects and people are fed into Manhattan\ + \ (Nash, 2014), a digital music notation that integrates sequencing and programming\ + \ to support the live creation of complex musical works that combine static, algorithmic,\ + \ and interactive elements. The paper discusses the technical details of the system\ + \ and artistic development of specific musical works, introducing novel techniques\ + \ for mapping chaotic systems to musical expression and exploring issues of agency,\ + \ aesthetic, accessibility and adaptability relating to composing interactive\ + \ music for crowds and public spaces. In particular, performances as part of an\ + \ installation for BBC Music Day 2018 are described. The paper subsequently details\ + \ a practical workshop, delivered digitally, exploring the development of interactive\ + \ performances in which the audience or general public actively or passively control\ + \ live generation of a musical piece. Exercises support discussions on technical,\ + \ aesthetic, and ontological issues arising from the identification and mapping\ + \ of structure, order, and meaning in non-musical domains to analogous concepts\ + \ in musical expression. Materials for the workshop are available freely with\ + \ the Manhattan software.},\n address = {Birmingham, UK},\n author = {Nash, Chris},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.4813346},\n editor = {Romain Michon\ + \ and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages =\ + \ {259--264},\n presentation-video = {https://youtu.be/DHIowP2lOsA},\n publisher\ + \ = {Birmingham City University},\n title = {Crowd-driven Music: Interactive and\ + \ Generative Approaches using Machine Vision and Manhattan},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper49.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178117 + doi: 10.5281/zenodo.4813346 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'EEG, Brain-Computer Music Interface, Internet, Arduino. ' - pages: 469--472 - title: 'Minding the (Transatlantic) Gap: An Internet-Enabled Acoustic Brain-Computer - Music Interface' - url: http://www.nime.org/proceedings/2011/nime2011_469.pdf - year: 2011 + month: July + pages: 259--264 + presentation-video: https://youtu.be/DHIowP2lOsA + publisher: Birmingham City University + title: 'Crowd-driven Music: Interactive and Generative Approaches using Machine + Vision and Manhattan' + url: https://www.nime.org/proceedings/2020/nime2020_paper49.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Papetti2011 - abstract: 'A shoe-based interface is presented, which enables users toplay percussive - virtual instruments by tapping their feet.The wearable interface consists of a - pair of sandals equippedwith four force sensors and four actuators affording audiotactile - feedback. The sensors provide data via wireless transmission to a host computer, - where they are processed andmapped to a physics-based sound synthesis engine. - Sincethe system provides OSC and MIDI compatibility, alternative electronic instruments - can be used as well. The audiosignals are then sent back wirelessly to audio-tactile - excitersembedded in the sandals'' sole, and optionally to headphonesand external - loudspeakers. The round-trip wireless communication only introduces very small - latency, thus guaranteeing coherence and unity in the multimodal percept andallowing - tight timing while playing.' - address: 'Oslo, Norway' - author: 'Papetti, Stefano and Civolani, Marco and Fontana, Federico' - bibtex: "@inproceedings{Papetti2011,\n abstract = {A shoe-based interface is presented,\ - \ which enables users toplay percussive virtual instruments by tapping their feet.The\ - \ wearable interface consists of a pair of sandals equippedwith four force sensors\ - \ and four actuators affording audiotactile feedback. The sensors provide data\ - \ via wireless transmission to a host computer, where they are processed andmapped\ - \ to a physics-based sound synthesis engine. Sincethe system provides OSC and\ - \ MIDI compatibility, alternative electronic instruments can be used as well.\ - \ The audiosignals are then sent back wirelessly to audio-tactile excitersembedded\ - \ in the sandals' sole, and optionally to headphonesand external loudspeakers.\ - \ The round-trip wireless communication only introduces very small latency, thus\ - \ guaranteeing coherence and unity in the multimodal percept andallowing tight\ - \ timing while playing.},\n address = {Oslo, Norway},\n author = {Papetti, Stefano\ - \ and Civolani, Marco and Fontana, Federico},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1178129},\n issn = {2220-4806},\n keywords = {interface, audio,\ - \ tactile, foot tapping, embodiment, footwear, wireless, wearable, mobile },\n\ - \ pages = {473--476},\n title = {Rhythm'n'Shoes: a Wearable Foot Tapping Interface\ - \ with Audio-Tactile Feedback},\n url = {http://www.nime.org/proceedings/2011/nime2011_473.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_5 + abstract: 'This paper discusses the design of a musical synthesizer that takes words + as input, and attempts to generate music that somehow underscores those words. + This is considered as a tool for sound designers who could, for example, enter + dialogue from a film script and generate appropriate back- ground music. The synthesizer + uses emotional valence and arousal as a common representation between words and + mu- sic. It draws on previous studies that relate words and mu- sical features + to valence and arousal. The synthesizer was evaluated with a user study. Participants + listened to music generated by the synthesizer, and described the music with words. + The arousal of the words they entered was highly correlated with the intended + arousal of the music. The same was, surprisingly, not true for valence. The synthesizer + is online, at [redacted URL].' + address: 'Birmingham, UK' + author: 'Krzyzaniak, Michael J' + bibtex: "@inproceedings{NIME20_5,\n abstract = {This paper discusses the design\ + \ of a musical synthesizer that takes words as input, and attempts to generate\ + \ music that somehow underscores those words. This is considered as a tool for\ + \ sound designers who could, for example, enter dialogue from a film script and\ + \ generate appropriate back- ground music. The synthesizer uses emotional valence\ + \ and arousal as a common representation between words and mu- sic. It draws on\ + \ previous studies that relate words and mu- sical features to valence and arousal.\ + \ The synthesizer was evaluated with a user study. Participants listened to music\ + \ generated by the synthesizer, and described the music with words. The arousal\ + \ of the words they entered was highly correlated with the intended arousal of\ + \ the music. The same was, surprisingly, not true for valence. The synthesizer\ + \ is online, at [redacted URL].},\n address = {Birmingham, UK},\n author = {Krzyzaniak,\ + \ Michael J},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813350},\n editor\ + \ = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n\ + \ pages = {29--34},\n publisher = {Birmingham City University},\n title = {Words\ + \ to Music Synthesis},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper5.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178129 + doi: 10.5281/zenodo.4813350 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'interface, audio, tactile, foot tapping, embodiment, footwear, wireless, - wearable, mobile ' - pages: 473--476 - title: 'Rhythm''n''Shoes: a Wearable Foot Tapping Interface with Audio-Tactile Feedback' - url: http://www.nime.org/proceedings/2011/nime2011_473.pdf - year: 2011 + month: July + pages: 29--34 + publisher: Birmingham City University + title: Words to Music Synthesis + url: https://www.nime.org/proceedings/2020/nime2020_paper5.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Erkut2011 - abstract: 'We present a generic, structured model for design and evaluation of musical - interfaces. This model is developmentoriented, and it is based on the fundamental - function of themusical interfaces, i.e., to coordinate the human action andperception - for musical expression, subject to human capabilities and skills. To illustrate - the particulars of this modeland present it in operation, we consider the previous - designand evaluation phase of iPalmas, our testbed for exploringrhythmic interaction. - Our findings inform the current design phase of iPalmas visual and auditory displays, - wherewe build on what has resonated with the test users, and explore further possibilities - based on the evaluation results.' - address: 'Oslo, Norway' - author: 'Erkut, Cumhur and Jylhä, Antti and Discioglu, Reha' - bibtex: "@inproceedings{Erkut2011,\n abstract = {We present a generic, structured\ - \ model for design and evaluation of musical interfaces. This model is developmentoriented,\ - \ and it is based on the fundamental function of themusical interfaces, i.e.,\ - \ to coordinate the human action andperception for musical expression, subject\ - \ to human capabilities and skills. To illustrate the particulars of this modeland\ - \ present it in operation, we consider the previous designand evaluation phase\ - \ of iPalmas, our testbed for exploringrhythmic interaction. Our findings inform\ - \ the current design phase of iPalmas visual and auditory displays, wherewe build\ - \ on what has resonated with the test users, and explore further possibilities\ - \ based on the evaluation results.},\n address = {Oslo, Norway},\n author = {Erkut,\ - \ Cumhur and Jylh\\''{a}, Antti and Discioglu, Reha},\n booktitle = {Proceedings\ + ID: NIME20_50 + abstract: 'This paper brings together two main perspectives on algorithmic pattern. + First, the writing of musical patterns in live coding performance, and second, + the weaving of patterns in textiles. In both cases, algorithmic pattern is an + interface between the human and the outcome, where small changes have far-reaching + impact on the results. By bringing contemporary live coding and ancient textile + approaches together, we reach a common view of pattern as algorithmic movement + (e.g. looping, shifting, reflecting, interfering) in the making of things. This + works beyond the usual definition of pattern used in musical interfaces, of mere + repeating sequences. We conclude by considering the place of algorithmic pattern + in a wider activity of making.' + address: 'Birmingham, UK' + author: 'Mclean, Alex' + bibtex: "@inproceedings{NIME20_50,\n abstract = {This paper brings together two\ + \ main perspectives on algorithmic pattern. First, the writing of musical patterns\ + \ in live coding performance, and second, the weaving of patterns in textiles.\ + \ In both cases, algorithmic pattern is an interface between the human and the\ + \ outcome, where small changes have far-reaching impact on the results. By bringing\ + \ contemporary live coding and ancient textile approaches together, we reach a\ + \ common view of pattern as algorithmic movement (e.g. looping, shifting, reflecting,\ + \ interfering) in the making of things. This works beyond the usual definition\ + \ of pattern used in musical interfaces, of mere repeating sequences. We conclude\ + \ by considering the place of algorithmic pattern in a wider activity of making.},\n\ + \ address = {Birmingham, UK},\n author = {Mclean, Alex},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178003},\n issn = {2220-4806},\n keywords = {multimodal\ - \ displays,rhythmic interaction,sonification,uml},\n pages = {477--480},\n title\ - \ = {A Structured Design and Evaluation Model with Application to Rhythmic Interaction\ - \ Displays},\n url = {http://www.nime.org/proceedings/2011/nime2011_477.pdf},\n\ - \ year = {2011}\n}\n" + \ doi = {10.5281/zenodo.4813352},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {265--270},\n presentation-video\ + \ = {https://youtu.be/X9AkOAEDV08},\n publisher = {Birmingham City University},\n\ + \ title = {Algorithmic Pattern},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper50.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178003 + doi: 10.5281/zenodo.4813352 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'multimodal displays,rhythmic interaction,sonification,uml' - pages: 477--480 - title: A Structured Design and Evaluation Model with Application to Rhythmic Interaction - Displays - url: http://www.nime.org/proceedings/2011/nime2011_477.pdf - year: 2011 + month: July + pages: 265--270 + presentation-video: https://youtu.be/X9AkOAEDV08 + publisher: Birmingham City University + title: Algorithmic Pattern + url: https://www.nime.org/proceedings/2020/nime2020_paper50.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Marchini2011 - abstract: 'This paper introduces and evaluates a novel methodologyfor the estimation - of bow pressing force in violin performance, aiming at a reduced intrusiveness - while maintaininghigh accuracy. The technique is based on using a simplifiedphysical - model of the hair ribbon deflection, and feeding thismodel solely with position - and orientation measurements ofthe bow and violin spatial coordinates. The physical - modelis both calibrated and evaluated using real force data acquired by means - of a load cell.' - address: 'Oslo, Norway' - author: 'Marchini, Marco and Papiotis, Panos and Pérez, Alfonso and Maestre, Esteban' - bibtex: "@inproceedings{Marchini2011,\n abstract = {This paper introduces and evaluates\ - \ a novel methodologyfor the estimation of bow pressing force in violin performance,\ - \ aiming at a reduced intrusiveness while maintaininghigh accuracy. The technique\ - \ is based on using a simplifiedphysical model of the hair ribbon deflection,\ - \ and feeding thismodel solely with position and orientation measurements ofthe\ - \ bow and violin spatial coordinates. The physical modelis both calibrated and\ - \ evaluated using real force data acquired by means of a load cell.},\n address\ - \ = {Oslo, Norway},\n author = {Marchini, Marco and Papiotis, Panos and P\\'{e}rez,\ - \ Alfonso and Maestre, Esteban},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178097},\n\ - \ issn = {2220-4806},\n keywords = {bow pressing force, bow force, pressing force,\ - \ force, violin playing, bow simplified physical model, 6DOF, hair ribbon ends,\ - \ string ends },\n pages = {481--486},\n title = {A Hair Ribbon Deflection Model\ - \ for Low-intrusiveness Measurement of Bow Force in Violin Performance},\n url\ - \ = {http://www.nime.org/proceedings/2011/nime2011_481.pdf},\n year = {2011}\n\ - }\n" + ID: NIME20_51 + abstract: 'Interactive machine learning (IML) is an approach to building interactive + systems, including DMIs, focusing on iterative end-user data provision and direct + evaluation. This paper describes the implementation of a Javascript library, encapsulating + many of the boilerplate needs of building IML systems for creative tasks with + minimal code inclusion and low barrier to entry. Further, we present a set of + complimentary Audio Worklet-backed instruments to allow for in-browser creation + of new musical systems able to run concurrently with various computationally expensive + feature extractor and lightweight machine learning models without the interference + often seen in interactive Web Audio applications.' + address: 'Birmingham, UK' + author: 'McCallum, Louis and Grierson, Mick S' + bibtex: "@inproceedings{NIME20_51,\n abstract = {Interactive machine learning (IML)\ + \ is an approach to building interactive systems, including DMIs, focusing on\ + \ iterative end-user data provision and direct evaluation. This paper describes\ + \ the implementation of a Javascript library, encapsulating many of the boilerplate\ + \ needs of building IML systems for creative tasks with minimal code inclusion\ + \ and low barrier to entry. Further, we present a set of complimentary Audio Worklet-backed\ + \ instruments to allow for in-browser creation of new musical systems able to\ + \ run concurrently with various computationally expensive feature extractor and\ + \ lightweight machine learning models without the interference often seen in interactive\ + \ Web Audio applications.},\n address = {Birmingham, UK},\n author = {McCallum,\ + \ Louis and Grierson, Mick S},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813357},\n\ + \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ + \ = {July},\n pages = {271--272},\n publisher = {Birmingham City University},\n\ + \ title = {Supporting Interactive Machine Learning Approaches to Building Musical\ + \ Instruments in the Browser},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper51.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178097 + doi: 10.5281/zenodo.4813357 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'bow pressing force, bow force, pressing force, force, violin playing, - bow simplified physical model, 6DOF, hair ribbon ends, string ends ' - pages: 481--486 - title: A Hair Ribbon Deflection Model for Low-intrusiveness Measurement of Bow Force - in Violin Performance - url: http://www.nime.org/proceedings/2011/nime2011_481.pdf - year: 2011 + month: July + pages: 271--272 + publisher: Birmingham City University + title: Supporting Interactive Machine Learning Approaches to Building Musical Instruments + in the Browser + url: https://www.nime.org/proceedings/2020/nime2020_paper51.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Forsyth2011 - abstract: 'Remixing audio samples is a common technique for the creation of electronic - music, and there are a wide variety oftools available to edit, process, and recombine - pre-recordedaudio into new compositions. However, all of these toolsconceive of - the timeline of the pre-recorded audio and theplayback timeline as identical. - In this paper, we introducea dual time axis representation in which these two - timelines are described explicitly. We also discuss the randomaccess remix application - for the iPad, an audio sample editor based on this representation. We describe - an initialuser study with 15 high school students that indicates thatthe random - access remix application has the potential todevelop into a useful and interesting - tool for composers andperformers of electronic music.' - address: 'Oslo, Norway' - author: 'Forsyth, Jon and Glennon, Aron and Bello, Juan P.' - bibtex: "@inproceedings{Forsyth2011,\n abstract = {Remixing audio samples is a common\ - \ technique for the creation of electronic music, and there are a wide variety\ - \ oftools available to edit, process, and recombine pre-recordedaudio into new\ - \ compositions. However, all of these toolsconceive of the timeline of the pre-recorded\ - \ audio and theplayback timeline as identical. In this paper, we introducea dual\ - \ time axis representation in which these two timelines are described explicitly.\ - \ We also discuss the randomaccess remix application for the iPad, an audio sample\ - \ editor based on this representation. We describe an initialuser study with 15\ - \ high school students that indicates thatthe random access remix application\ - \ has the potential todevelop into a useful and interesting tool for composers\ - \ andperformers of electronic music.},\n address = {Oslo, Norway},\n author =\ - \ {Forsyth, Jon and Glennon, Aron and Bello, Juan P.},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1178011},\n issn = {2220-4806},\n keywords = {interactive\ - \ systems, sample editor, remix, iPad, multi-touch },\n pages = {487--490},\n\ - \ title = {Random Access Remixing on the iPad},\n url = {http://www.nime.org/proceedings/2011/nime2011_487.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_52 + abstract: 'TorqueTuner is an embedded module that allows Digital Musical Instrument + (DMI) designers to map sensors to parameters of haptic effects and dynamically + modify rotary force feedback in real-time. We embedded inside TorqueTuner a collection + of haptic effects (Wall, Magnet, Detents, Spring, Friction, Spin, Free) and a + bi-directional interface through libmapper, a software library for making connections + between data signals on a shared network. To increase affordability and portability + of force-feedback implementations in DMI design, we designed our platform to be + wireless, self-contained and built from commercially available components. To + provide examples of modularity and portability, we integrated TorqueTuner into + a standalone haptic knob and into an existing DMI, the T-Stick. We implemented + 3 musical applications (Pitch wheel, Turntable and Exciter), by mapping sensors + to sound synthesis in audio programming environment SuperCollider. While the original + goal was to simulate the haptic feedback associated with turning a knob, we found + that the platform allows for further expanding interaction possibilities in application + scenarios where rotary control is familiar.' + address: 'Birmingham, UK' + author: 'Kirkegaard, Mathias S and Bredholt, Mathias and Frisson, Christian and + Wanderley, Marcelo' + bibtex: "@inproceedings{NIME20_52,\n abstract = {TorqueTuner is an embedded module\ + \ that allows Digital Musical Instrument (DMI) designers to map sensors to parameters\ + \ of haptic effects and dynamically modify rotary force feedback in real-time.\ + \ We embedded inside TorqueTuner a collection of haptic effects (Wall, Magnet,\ + \ Detents, Spring, Friction, Spin, Free) and a bi-directional interface through\ + \ libmapper, a software library for making connections between data signals on\ + \ a shared network. To increase affordability and portability of force-feedback\ + \ implementations in DMI design, we designed our platform to be wireless, self-contained\ + \ and built from commercially available components. To provide examples of modularity\ + \ and portability, we integrated TorqueTuner into a standalone haptic knob and\ + \ into an existing DMI, the T-Stick. We implemented 3 musical applications (Pitch\ + \ wheel, Turntable and Exciter), by mapping sensors to sound synthesis in audio\ + \ programming environment SuperCollider. While the original goal was to simulate\ + \ the haptic feedback associated with turning a knob, we found that the platform\ + \ allows for further expanding interaction possibilities in application scenarios\ + \ where rotary control is familiar.},\n address = {Birmingham, UK},\n author =\ + \ {Kirkegaard, Mathias S and Bredholt, Mathias and Frisson, Christian and Wanderley,\ + \ Marcelo},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813359},\n editor\ + \ = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n\ + \ pages = {273--278},\n presentation-video = {https://youtu.be/V8WDMbuX9QA},\n\ + \ publisher = {Birmingham City University},\n title = {TorqueTuner: A self contained\ + \ module for designing rotary haptic force feedback for digital musical instruments},\n\ + \ url = {https://www.nime.org/proceedings/2020/nime2020_paper52.pdf},\n year =\ + \ {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178011 + doi: 10.5281/zenodo.4813359 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'interactive systems, sample editor, remix, iPad, multi-touch ' - pages: 487--490 - title: Random Access Remixing on the iPad - url: http://www.nime.org/proceedings/2011/nime2011_487.pdf - year: 2011 + month: July + pages: 273--278 + presentation-video: https://youtu.be/V8WDMbuX9QA + publisher: Birmingham City University + title: 'TorqueTuner: A self contained module for designing rotary haptic force feedback + for digital musical instruments' + url: https://www.nime.org/proceedings/2020/nime2020_paper52.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Donald2011 - abstract: 'This paper outlines the formation of the Expanded Performance (EP) trio, - a chamber ensemble comprised of electriccello with sensor bow, augmented digital - percussion, anddigital turntable with mixer. Decisions relating to physical set-ups - and control capabilities, sonic identities, andmappings of each instrument, as - well as their roles withinthe ensemble, are explored. The contributions of these - factors to the design of a coherent, expressive ensemble andits emerging performance - practice are considered. The trioproposes solutions to creation, rehearsal and - performanceissues in ensemble live electronics.' - address: 'Oslo, Norway' - author: 'Donald, Erika and Duinker, Ben and Britton, Eliot' - bibtex: "@inproceedings{Donald2011,\n abstract = {This paper outlines the formation\ - \ of the Expanded Performance (EP) trio, a chamber ensemble comprised of electriccello\ - \ with sensor bow, augmented digital percussion, anddigital turntable with mixer.\ - \ Decisions relating to physical set-ups and control capabilities, sonic identities,\ - \ andmappings of each instrument, as well as their roles withinthe ensemble, are\ - \ explored. The contributions of these factors to the design of a coherent, expressive\ - \ ensemble andits emerging performance practice are considered. The trioproposes\ - \ solutions to creation, rehearsal and performanceissues in ensemble live electronics.},\n\ - \ address = {Oslo, Norway},\n author = {Donald, Erika and Duinker, Ben and Britton,\ - \ Eliot},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1177999},\n issn = {2220-4806},\n\ - \ keywords = {Live electronics, digital performance, mapping, chamber music, ensemble,\ - \ instrument identity },\n pages = {491--494},\n title = {Designing the EP Trio:\ - \ Instrument Identities, Control and Performance Practice in an Electronic Chamber\ - \ Music Ensemble},\n url = {http://www.nime.org/proceedings/2011/nime2011_491.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_53 + abstract: 'Iterative design methods involving children and educators are difficult + to conduct, given both the ethical implications and time commitments understandably + required. The qualitative design process presented here recruits introductory + teacher training students, towards discovering useful design insights relevant + to music education technologies “by proxy”. Therefore, some of the barriers present + in child-computer interaction research are avoided. As an example, the method + is applied to the creation of a block-based music notation system, named Codetta. + Building upon successful educational technologies that intersect both music and + computer programming, Codetta seeks to enable child composition, whilst aiding + generalist educator’s confidence in teaching music.' + address: 'Birmingham, UK' + author: 'Ford, Corey J and Nash, Chris' + bibtex: "@inproceedings{NIME20_53,\n abstract = {Iterative design methods involving\ + \ children and educators are difficult to conduct, given both the ethical implications\ + \ and time commitments understandably required. The qualitative design process\ + \ presented here recruits introductory teacher training students, towards discovering\ + \ useful design insights relevant to music education technologies “by proxy”.\ + \ Therefore, some of the barriers present in child-computer interaction research\ + \ are avoided. As an example, the method is applied to the creation of a block-based\ + \ music notation system, named Codetta. Building upon successful educational technologies\ + \ that intersect both music and computer programming, Codetta seeks to enable\ + \ child composition, whilst aiding generalist educator’s confidence in teaching\ + \ music.},\n address = {Birmingham, UK},\n author = {Ford, Corey J and Nash, Chris},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.4813361},\n editor = {Romain Michon\ + \ and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages =\ + \ {279--284},\n presentation-video = {https://youtu.be/fPbZMQ5LEmk},\n publisher\ + \ = {Birmingham City University},\n title = {An Iterative Design ‘by proxy’ Method\ + \ for Developing Educational Music Interfaces},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper53.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177999 + doi: 10.5281/zenodo.4813361 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Live electronics, digital performance, mapping, chamber music, ensemble, - instrument identity ' - pages: 491--494 - title: 'Designing the EP Trio: Instrument Identities, Control and Performance Practice - in an Electronic Chamber Music Ensemble' - url: http://www.nime.org/proceedings/2011/nime2011_491.pdf - year: 2011 + month: July + pages: 279--284 + presentation-video: https://youtu.be/fPbZMQ5LEmk + publisher: Birmingham City University + title: An Iterative Design ‘by proxy’ Method for Developing Educational Music Interfaces + url: https://www.nime.org/proceedings/2020/nime2020_paper53.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Fyans2011 - abstract: 'We present observations from two separate studies of spectators'' perceptions - of musical performances, one involvingtwo acoustic instruments, the other two - electronic instruments. Both studies followed the same qualitative method,using - structured interviews to ascertain and compare spectators'' experiences. In this - paper, we focus on outcomespertaining to perceptions of the performers'' skill, - relatingto concepts of embodiment and communities of practice.' - address: 'Oslo, Norway' - author: 'Fyans, A. Cavan and Gurevich, Michael' - bibtex: "@inproceedings{Fyans2011,\n abstract = {We present observations from two\ - \ separate studies of spectators' perceptions of musical performances, one involvingtwo\ - \ acoustic instruments, the other two electronic instruments. Both studies followed\ - \ the same qualitative method,using structured interviews to ascertain and compare\ - \ spectators' experiences. In this paper, we focus on outcomespertaining to perceptions\ - \ of the performers' skill, relatingto concepts of embodiment and communities\ - \ of practice.},\n address = {Oslo, Norway},\n author = {Fyans, A. Cavan and Gurevich,\ - \ Michael},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178019},\n issn\ - \ = {2220-4806},\n keywords = {skill, embodiment, perception, effort, control,\ - \ spectator },\n pages = {495--498},\n title = {Perceptions of Skill in Performances\ - \ with Acoustic and Electronic Instruments},\n url = {http://www.nime.org/proceedings/2011/nime2011_495.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_54 + abstract: 'Probatio is an open-source toolkit for prototyping new digital musical + instruments created in 2016. Based on a morphological chart of postures and controls + of musical instruments, it comprises a set of blocks, bases, hubs, and supports + that, when combined, allows designers, artists, and musicians to experiment with + different input devices for musical interaction in different positions and postures. + Several musicians have used the system, and based on these past experiences, we + assembled a list of improvements to implement version 1.0 of the toolkit through + a unique international partnership between two laboratories in Brazil and Canada. + In this paper, we present the original toolkit and its use so far, summarize the + main lessons learned from musicians using it, and present the requirements behind, + and the final design of, v1.0 of the project. We also detail the work developed + in digital fabrication using two different techniques: laser cutting and 3D printing, + comparing their pros and cons. We finally discuss the opportunities and challenges + of fully sharing the project online and replicating its parts in both countries.' + address: 'Birmingham, UK' + author: 'Calegario, Filipe and Wanderley, Marcelo and Tragtenberg, João and Meneses, + Eduardo and Wang, Johnty and Sullivan, John and Franco, Ivan and Kirkegaard, Mathias + S and Bredholt, Mathias and Rohs, Josh' + bibtex: "@inproceedings{NIME20_54,\n abstract = {Probatio is an open-source toolkit\ + \ for prototyping new digital musical instruments created in 2016. Based on a\ + \ morphological chart of postures and controls of musical instruments, it comprises\ + \ a set of blocks, bases, hubs, and supports that, when combined, allows designers,\ + \ artists, and musicians to experiment with different input devices for musical\ + \ interaction in different positions and postures. Several musicians have used\ + \ the system, and based on these past experiences, we assembled a list of improvements\ + \ to implement version 1.0 of the toolkit through a unique international partnership\ + \ between two laboratories in Brazil and Canada. In this paper, we present the\ + \ original toolkit and its use so far, summarize the main lessons learned from\ + \ musicians using it, and present the requirements behind, and the final design\ + \ of, v1.0 of the project. We also detail the work developed in digital fabrication\ + \ using two different techniques: laser cutting and 3D printing, comparing their\ + \ pros and cons. We finally discuss the opportunities and challenges of fully\ + \ sharing the project online and replicating its parts in both countries.},\n\ + \ address = {Birmingham, UK},\n author = {Calegario, Filipe and Wanderley, Marcelo\ + \ and Tragtenberg, João and Meneses, Eduardo and Wang, Johnty and Sullivan, John\ + \ and Franco, Ivan and Kirkegaard, Mathias S and Bredholt, Mathias and Rohs, Josh},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.4813363},\n editor = {Romain Michon\ + \ and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages =\ + \ {285--290},\n presentation-video = {https://youtu.be/jkFnZZUA3xs},\n publisher\ + \ = {Birmingham City University},\n title = {Probatio 1.0: collaborative development\ + \ of a toolkit for functional DMI prototypes},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper54.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178019 + doi: 10.5281/zenodo.4813363 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'skill, embodiment, perception, effort, control, spectator ' - pages: 495--498 - title: Perceptions of Skill in Performances with Acoustic and Electronic Instruments - url: http://www.nime.org/proceedings/2011/nime2011_495.pdf - year: 2011 + month: July + pages: 285--290 + presentation-video: https://youtu.be/jkFnZZUA3xs + publisher: Birmingham City University + title: 'Probatio 1.0: collaborative development of a toolkit for functional DMI + prototypes' + url: https://www.nime.org/proceedings/2020/nime2020_paper54.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Nishino2011 - address: 'Oslo, Norway' - author: 'Nishino, Hiroki' - bibtex: "@inproceedings{Nishino2011,\n address = {Oslo, Norway},\n author = {Nishino,\ - \ Hiroki},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1178123},\n issn = {2220-4806},\n\ - \ keywords = {Computer music, programming language, the psychology of programming,\ - \ usability },\n pages = {499--502},\n title = {Cognitive Issues in Computer Music\ - \ Programming},\n url = {http://www.nime.org/proceedings/2011/nime2011_499.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_55 + abstract: 'We conducted a study which examines mappings from a relatively unexplored + perspective: how they are made. Twelve skilled NIME users designed a mapping from + a T-Stick to a subtractive synthesizer, and were interviewed about their approach + to mapping design. We present a thematic analysis of the interviews, with reference + to data recordings captured while the designers worked. Our results suggest that + the mapping design process is an iterative process that alternates between two + working modes: diffuse exploration and directed experimentation. ' + address: 'Birmingham, UK' + author: 'West, Travis J and Wanderley, Marcelo and Caramiaux, Baptiste' + bibtex: "@inproceedings{NIME20_55,\n abstract = {We conducted a study which examines\ + \ mappings from a relatively unexplored perspective: how they are made. Twelve\ + \ skilled NIME users designed a mapping from a T-Stick to a subtractive synthesizer,\ + \ and were interviewed about their approach to mapping design. We present a thematic\ + \ analysis of the interviews, with reference to data recordings captured while\ + \ the designers worked. Our results suggest that the mapping design process is\ + \ an iterative process that alternates between two working modes: diffuse exploration\ + \ and directed experimentation. },\n address = {Birmingham, UK},\n author = {West,\ + \ Travis J and Wanderley, Marcelo and Caramiaux, Baptiste},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.4813365},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {291--296},\n presentation-video\ + \ = {https://youtu.be/aaoResYjqmE},\n publisher = {Birmingham City University},\n\ + \ title = {Making Mappings: Examining the Design Process},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper55.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178123 + doi: 10.5281/zenodo.4813365 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Computer music, programming language, the psychology of programming, - usability ' - pages: 499--502 - title: Cognitive Issues in Computer Music Programming - url: http://www.nime.org/proceedings/2011/nime2011_499.pdf - year: 2011 + month: July + pages: 291--296 + presentation-video: https://youtu.be/aaoResYjqmE + publisher: Birmingham City University + title: 'Making Mappings: Examining the Design Process' + url: https://www.nime.org/proceedings/2020/nime2020_paper55.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Lamb2011 - abstract: 'This paper introduces the Seaboard, a new tangible musicalinstrument - which aims to provide musicians with significantcapability to manipulate sound - in real-time in a musicallyintuitive way. It introduces the core design features - whichmake the Seaboard unique, and describes the motivationand rationale behind - the design. The fundamental approachto dealing with problems associated with discrete - and continuous inputs is summarized.' - address: 'Oslo, Norway' - author: 'Lamb, Roland and Robertson, Andrew' - bibtex: "@inproceedings{Lamb2011,\n abstract = {This paper introduces the Seaboard,\ - \ a new tangible musicalinstrument which aims to provide musicians with significantcapability\ - \ to manipulate sound in real-time in a musicallyintuitive way. It introduces\ - \ the core design features whichmake the Seaboard unique, and describes the motivationand\ - \ rationale behind the design. The fundamental approachto dealing with problems\ - \ associated with discrete and continuous inputs is summarized.},\n address =\ - \ {Oslo, Norway},\n author = {Lamb, Roland and Robertson, Andrew},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1178081},\n issn = {2220-4806},\n keywords\ - \ = {Piano keyboard-related interface, continuous and discrete control, haptic\ - \ feedback, Human-Computer Interaction (HCI) },\n pages = {503--506},\n title\ - \ = {Seaboard : a New Piano Keyboard-related Interface Combining Discrete and\ - \ Continuous Control},\n url = {http://www.nime.org/proceedings/2011/nime2011_503.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_56 + abstract: 'Parthenope is a robotic musical siren developed to produce unique timbres + and sonic gestures. Parthenope uses perforated spinning disks through which air + is directed to produce sound. Computer-control of disk speed and air flow in conjunction + with a variety of nozzles allow pitches to be precisely produced at different + volumes. The instrument is controlled via Open Sound Control (OSC) messages sent + over an ethernet connection and can interface with common DAWs and physical controllers. + Parthenope is capable of microtonal tuning, portamenti, rapid and precise articulation + (and thus complex rhythms) and distinct timbres that result from its aerophonic + character. It occupies a unique place among robotic musical instruments.' + address: 'Birmingham, UK' + author: 'Sidler, Michael and Bisson, Matthew C and Grotz, Jordan and Barton, Scott' + bibtex: "@inproceedings{NIME20_56,\n abstract = {Parthenope is a robotic musical\ + \ siren developed to produce unique timbres and sonic gestures. Parthenope uses\ + \ perforated spinning disks through which air is directed to produce sound. Computer-control\ + \ of disk speed and air flow in conjunction with a variety of nozzles allow pitches\ + \ to be precisely produced at different volumes. The instrument is controlled\ + \ via Open Sound Control (OSC) messages sent over an ethernet connection and can\ + \ interface with common DAWs and physical controllers. Parthenope is capable of\ + \ microtonal tuning, portamenti, rapid and precise articulation (and thus complex\ + \ rhythms) and distinct timbres that result from its aerophonic character. It\ + \ occupies a unique place among robotic musical instruments.},\n address = {Birmingham,\ + \ UK},\n author = {Sidler, Michael and Bisson, Matthew C and Grotz, Jordan and\ + \ Barton, Scott},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813367},\n\ + \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ + \ = {July},\n pages = {297--300},\n presentation-video = {https://youtu.be/HQuR0aBJ70Y},\n\ + \ publisher = {Birmingham City University},\n title = {Parthenope: A Robotic Musical\ + \ Siren},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper56.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178081 + doi: 10.5281/zenodo.4813367 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Piano keyboard-related interface, continuous and discrete control, haptic - feedback, Human-Computer Interaction (HCI) ' - pages: 503--506 - title: 'Seaboard : a New Piano Keyboard-related Interface Combining Discrete and - Continuous Control' - url: http://www.nime.org/proceedings/2011/nime2011_503.pdf - year: 2011 + month: July + pages: 297--300 + presentation-video: https://youtu.be/HQuR0aBJ70Y + publisher: Birmingham City University + title: 'Parthenope: A Robotic Musical Siren' + url: https://www.nime.org/proceedings/2020/nime2020_paper56.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Beyer2011 - address: 'Oslo, Norway' - author: 'Beyer, Gilbert and Meier, Max' - bibtex: "@inproceedings{Beyer2011,\n address = {Oslo, Norway},\n author = {Beyer,\ - \ Gilbert and Meier, Max},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177963},\n\ - \ issn = {2220-4806},\n keywords = {Interactive music, public displays, user experience,\ - \ out-of-home media, algorithmic composition, soft constraints },\n pages = {507--510},\n\ - \ title = {Music Interfaces for Novice Users : Composing Music on a Public Display\ - \ with Hand Gestures},\n url = {http://www.nime.org/proceedings/2011/nime2011_507.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_57 + abstract: 'The Tremolo-Harp is a twelve-stringed robotic instrument, where each + string is actuated with a DC vibration motor to produce a mechatronic “tremolo” + effect. It was inspired by instruments and musical styles that employ tremolo + as a primary performance technique, including the hammered dulcimer, pipa, banjo, + flamenco guitar, and surf rock guitar. Additionally, the Tremolo-Harp is designed + to produce long, sustained textures and continuous dynamic variation. These capabilities + represent a different approach from the majority of existing robotic string instruments, + which tend to focus on actuation speed and rhythmic precision. The composition + Tremolo-Harp Study 1 (2019) presents an initial exploration of the Tremolo-Harp’s + unique timbre and capability for continuous dynamic variation. ' + address: 'Birmingham, UK' + author: 'Kemper, Steven' + bibtex: "@inproceedings{NIME20_57,\n abstract = {The Tremolo-Harp is a twelve-stringed\ + \ robotic instrument, where each string is actuated with a DC vibration motor\ + \ to produce a mechatronic “tremolo” effect. It was inspired by instruments and\ + \ musical styles that employ tremolo as a primary performance technique, including\ + \ the hammered dulcimer, pipa, banjo, flamenco guitar, and surf rock guitar. Additionally,\ + \ the Tremolo-Harp is designed to produce long, sustained textures and continuous\ + \ dynamic variation. These capabilities represent a different approach from the\ + \ majority of existing robotic string instruments, which tend to focus on actuation\ + \ speed and rhythmic precision. The composition Tremolo-Harp Study 1 (2019) presents\ + \ an initial exploration of the Tremolo-Harp’s unique timbre and capability for\ + \ continuous dynamic variation. },\n address = {Birmingham, UK},\n author = {Kemper,\ + \ Steven},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.4813369},\n editor = {Romain\ + \ Michon and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages\ + \ = {301--304},\n publisher = {Birmingham City University},\n title = {Tremolo-Harp:\ + \ A Vibration-Motor Actuated Robotic String Instrument},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper57.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177963 + doi: 10.5281/zenodo.4813369 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Interactive music, public displays, user experience, out-of-home media, - algorithmic composition, soft constraints ' - pages: 507--510 - title: 'Music Interfaces for Novice Users : Composing Music on a Public Display - with Hand Gestures' - url: http://www.nime.org/proceedings/2011/nime2011_507.pdf - year: 2011 + month: July + pages: 301--304 + publisher: Birmingham City University + title: 'Tremolo-Harp: A Vibration-Motor Actuated Robotic String Instrument' + url: https://www.nime.org/proceedings/2020/nime2020_paper57.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Cappelen2011 - abstract: 'The traditional role of the musical instrument is to be the working tool - of the professional musician. On the instrument the musician performs music for - the audience to listen to. In this paper we present an interactive installation, - where we expand the role of the instrument to motivate musicking and cocreation - between diverse users. We have made an open installation, where users can perform - a variety of actions in several situations. By using the abilities of the computer, - we have made an installation, which can be interpreted to have many roles. It - can both be an instrument, a co-musician, a communication partner, a toy, a meeting - place and an ambient musical landscape. The users can dynamically shift between - roles, based on their abilities, knowledge and motivation. ' - address: 'Oslo, Norway' - author: 'Cappelen, Birgitta and Anderson, Anders-Petter' - bibtex: "@inproceedings{Cappelen2011,\n abstract = {The traditional role of the\ - \ musical instrument is to be the working tool of the professional musician. On\ - \ the instrument the musician performs music for the audience to listen to. In\ - \ this paper we present an interactive installation, where we expand the role\ - \ of the instrument to motivate musicking and cocreation between diverse users.\ - \ We have made an open installation, where users can perform a variety of actions\ - \ in several situations. By using the abilities of the computer, we have made\ - \ an installation, which can be interpreted to have many roles. It can both be\ - \ an instrument, a co-musician, a communication partner, a toy, a meeting place\ - \ and an ambient musical landscape. The users can dynamically shift between roles,\ - \ based on their abilities, knowledge and motivation. },\n address = {Oslo, Norway},\n\ - \ author = {Cappelen, Birgitta and Anderson, Anders-Petter},\n booktitle = {Proceedings\ + ID: NIME20_58 + abstract: 'We propose ExSampling: an integrated system of recording application + and Deep Learning environment for a real-time music performance of environmental + sounds sampled by field recording. Automated sound mapping to Ableton Live tracks + by Deep Learning enables field recording to be applied to real-time performance, + and create interactions among sound recorder, composers and performers.' + address: 'Birmingham, UK' + author: 'Kobayashi, Atsuya and Anzai, Reo and Tokui, Nao' + bibtex: "@inproceedings{NIME20_58,\n abstract = {We propose ExSampling: an integrated\ + \ system of recording application and Deep Learning environment for a real-time\ + \ music performance of environmental sounds sampled by field recording. Automated\ + \ sound mapping to Ableton Live tracks by Deep Learning enables field recording\ + \ to be applied to real-time performance, and create interactions among sound\ + \ recorder, composers and performers.},\n address = {Birmingham, UK},\n author\ + \ = {Kobayashi, Atsuya and Anzai, Reo and Tokui, Nao},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177975},\n issn = {2220-4806},\n keywords = {design,genre,interaction,interactive\ - \ installation,music instrument,musicking,narrative,open,role,sound art},\n pages\ - \ = {511--514},\n title = {Expanding the Role of the Instrument},\n url = {http://www.nime.org/proceedings/2011/nime2011_511.pdf},\n\ - \ year = {2011}\n}\n" + \ doi = {10.5281/zenodo.4813371},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {305--308},\n publisher = {Birmingham\ + \ City University},\n title = {ExSampling: a system for the real-time ensemble\ + \ performance of field-recorded environmental sounds},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper58.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177975 + doi: 10.5281/zenodo.4813371 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'design,genre,interaction,interactive installation,music instrument,musicking,narrative,open,role,sound - art' - pages: 511--514 - title: Expanding the Role of the Instrument - url: http://www.nime.org/proceedings/2011/nime2011_511.pdf - year: 2011 + month: July + pages: 305--308 + publisher: Birmingham City University + title: 'ExSampling: a system for the real-time ensemble performance of field-recorded + environmental sounds' + url: https://www.nime.org/proceedings/2020/nime2020_paper58.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Todoroff2011 - abstract: 'We developed very small and light sensors, each equippedwith 3-axes accelerometers, - magnetometers and gyroscopes.Those MARG (Magnetic, Angular Rate, and Gravity) - sensors allow for a drift-free attitude computation which in turnleads to the - possibility of recovering the skeleton of bodyparts that are of interest for the - performance, improvingthe results of gesture recognition and allowing to get relative - position between the extremities of the limbs and thetorso of the performer. This - opens new possibilities in termsof mapping. We kept our previous approach developed - atARTeM [2]: wireless from the body to the host computer,but wired through a 4-wire - digital bus on the body. Byrelieving the need for a transmitter on each sensing - node,we could built very light and flat sensor nodes that can bemade invisible - under the clothes. Smaller sensors, coupledwith flexible wires on the body, give - more freedom of movement to dancers despite the need for cables on the body.And - as the weight of each sensor node, box included, isonly 5 grams (Figure 1), they - can also be put on the upper and lower arm and hand of a violin or viola player, - toretrieve the skeleton from the torso to the hand, withoutadding any weight that - would disturb the performer. Weused those sensors in several performances with - a dancingviola player and in one where she was simultaneously controlling gas - flames interactively. We are currently applyingthem to other types of musical - performances.' - address: 'Oslo, Norway' - author: 'Todoroff, Todor' - bibtex: "@inproceedings{Todoroff2011,\n abstract = {We developed very small and\ - \ light sensors, each equippedwith 3-axes accelerometers, magnetometers and gyroscopes.Those\ - \ MARG (Magnetic, Angular Rate, and Gravity) sensors allow for a drift-free attitude\ - \ computation which in turnleads to the possibility of recovering the skeleton\ - \ of bodyparts that are of interest for the performance, improvingthe results\ - \ of gesture recognition and allowing to get relative position between the extremities\ - \ of the limbs and thetorso of the performer. This opens new possibilities in\ - \ termsof mapping. We kept our previous approach developed atARTeM [2]: wireless\ - \ from the body to the host computer,but wired through a 4-wire digital bus on\ - \ the body. Byrelieving the need for a transmitter on each sensing node,we could\ - \ built very light and flat sensor nodes that can bemade invisible under the clothes.\ - \ Smaller sensors, coupledwith flexible wires on the body, give more freedom of\ - \ movement to dancers despite the need for cables on the body.And as the weight\ - \ of each sensor node, box included, isonly 5 grams (Figure 1), they can also\ - \ be put on the upper and lower arm and hand of a violin or viola player, toretrieve\ - \ the skeleton from the torso to the hand, withoutadding any weight that would\ - \ disturb the performer. Weused those sensors in several performances with a dancingviola\ - \ player and in one where she was simultaneously controlling gas flames interactively.\ - \ We are currently applyingthem to other types of musical performances.},\n address\ - \ = {Oslo, Norway},\n author = {Todoroff, Todor},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1178177},\n issn = {2220-4806},\n keywords = {wireless MARG\ - \ sensors },\n pages = {515--518},\n title = {Wireless Digital/Analog Sensors\ - \ for Music and Dance Performances},\n url = {http://www.nime.org/proceedings/2011/nime2011_515.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_59 + abstract: 'The exploration of musical robots has been an area of interest due to + the timbral and mechanical advantages they offer for music generation and performance. + However, one of the greatest challenges in mechatronic music is to enable these + robots to deliver a nuanced and expressive performance. This depends on their + capability to integrate dynamics, articulation, and a variety of ornamental techniques + while playing a given musical passage. In this paper we introduce a robot arm + pitch shifter for a mechatronic monochord prototype. This is a fast, precise, + and mechanically quiet system that enables sliding techniques during musical performance. + We discuss the design and construction process, as well as the system''s advantages + and restrictions. We also review the quantitative evaluation process used to assess + if the instrument meets the design requirements. This process reveals how the + pitch shifter outperforms existing configurations, and potential areas of improvement + for future work.' + address: 'Birmingham, UK' + author: 'Yepez Placencia, Juan Pablo and Murphy, Jim and Carnegie, Dale' + bibtex: "@inproceedings{NIME20_59,\n abstract = {The exploration of musical robots\ + \ has been an area of interest due to the timbral and mechanical advantages they\ + \ offer for music generation and performance. However, one of the greatest challenges\ + \ in mechatronic music is to enable these robots to deliver a nuanced and expressive\ + \ performance. This depends on their capability to integrate dynamics, articulation,\ + \ and a variety of ornamental techniques while playing a given musical passage.\ + \ In this paper we introduce a robot arm pitch shifter for a mechatronic monochord\ + \ prototype. This is a fast, precise, and mechanically quiet system that enables\ + \ sliding techniques during musical performance. We discuss the design and construction\ + \ process, as well as the system's advantages and restrictions. We also review\ + \ the quantitative evaluation process used to assess if the instrument meets the\ + \ design requirements. This process reveals how the pitch shifter outperforms\ + \ existing configurations, and potential areas of improvement for future work.},\n\ + \ address = {Birmingham, UK},\n author = {Yepez Placencia, Juan Pablo and Murphy,\ + \ Jim and Carnegie, Dale},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813375},\n\ + \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ + \ = {July},\n pages = {309--314},\n presentation-video = {https://youtu.be/rpX8LTZd-Zs},\n\ + \ publisher = {Birmingham City University},\n title = {Designing an Expressive\ + \ Pitch Shifting Mechanism for Mechatronic Chordophones},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper59.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178177 + doi: 10.5281/zenodo.4813375 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'wireless MARG sensors ' - pages: 515--518 - title: Wireless Digital/Analog Sensors for Music and Dance Performances - url: http://www.nime.org/proceedings/2011/nime2011_515.pdf - year: 2011 + month: July + pages: 309--314 + presentation-video: https://youtu.be/rpX8LTZd-Zs + publisher: Birmingham City University + title: Designing an Expressive Pitch Shifting Mechanism for Mechatronic Chordophones + url: https://www.nime.org/proceedings/2020/nime2020_paper59.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Engum2011 - abstract: 'This paper covers and also describes an ongoing research project focusing - on new artistic possibilities by exchanging music technological methods and techniques - between two distinct musical genres. Through my background as a guitarist and - composer in an experimental metal band I have experienced a vast development in - music technology during the last 20 years. This development has made a great impact - in changing the procedures for composing and producing music within my genre without - necessarily changing the strategies of how the technology is used. The transition - from analogue to digital sound technology not only opened up new ways of manipulating - and manoeuvring sound, it also opened up challenges in how to integrate and control - the digital sound technology as a seamless part of my musical genre. By using - techniques and methods known from electro-acoustic/computer music, and adapting - them for use within my tradition, this research aims to find new strategies for - composing and producing music within my genre. ' - address: 'Oslo, Norway' - author: 'Engum, Trond' - bibtex: "@inproceedings{Engum2011,\n abstract = {This paper covers and also describes\ - \ an ongoing research project focusing on new artistic possibilities by exchanging\ - \ music technological methods and techniques between two distinct musical genres.\ - \ Through my background as a guitarist and composer in an experimental metal band\ - \ I have experienced a vast development in music technology during the last 20\ - \ years. This development has made a great impact in changing the procedures for\ - \ composing and producing music within my genre without necessarily changing the\ - \ strategies of how the technology is used. The transition from analogue to digital\ - \ sound technology not only opened up new ways of manipulating and manoeuvring\ - \ sound, it also opened up challenges in how to integrate and control the digital\ - \ sound technology as a seamless part of my musical genre. By using techniques\ - \ and methods known from electro-acoustic/computer music, and adapting them for\ - \ use within my tradition, this research aims to find new strategies for composing\ - \ and producing music within my genre. },\n address = {Oslo, Norway},\n author\ - \ = {Engum, Trond},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178001},\n\ - \ issn = {2220-4806},\n keywords = {Artistic research, strategies for composition\ - \ and production, convolution, environmental sounds, real time control },\n pages\ - \ = {519--522},\n title = {Real-time Control and Creative Convolution},\n url\ - \ = {http://www.nime.org/proceedings/2011/nime2011_519.pdf},\n year = {2011}\n\ - }\n" + ID: NIME20_6 + abstract: 'Flexible strings with piezoelectric properties have been developed but + until date not evaluated for the use as part of a musical instrument. This paper + is assessing the properties of these new fibers, looking at their possibilities + for NIME applications.' + address: 'Birmingham, UK' + author: 'Ehrhardt, Marcel and Neupert, Max and Wegener, Clemens' + bibtex: "@inproceedings{NIME20_6,\n abstract = {Flexible strings with piezoelectric\ + \ properties have been developed but until date not evaluated for the use as part\ + \ of a musical instrument. This paper is assessing the properties of these new\ + \ fibers, looking at their possibilities for NIME applications.},\n address =\ + \ {Birmingham, UK},\n author = {Ehrhardt, Marcel and Neupert, Max and Wegener,\ + \ Clemens},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813377},\n editor\ + \ = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n\ + \ pages = {35--36},\n publisher = {Birmingham City University},\n title = {Piezoelectric\ + \ strings as a musical interface},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper6.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178001 + doi: 10.5281/zenodo.4813377 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Artistic research, strategies for composition and production, convolution, - environmental sounds, real time control ' - pages: 519--522 - title: Real-time Control and Creative Convolution - url: http://www.nime.org/proceedings/2011/nime2011_519.pdf - year: 2011 + month: July + pages: 35--36 + publisher: Birmingham City University + title: Piezoelectric strings as a musical interface + url: https://www.nime.org/proceedings/2020/nime2020_paper6.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Bergsland2011 - address: 'Oslo, Norway' - author: 'Bergsland, Andreas' - bibtex: "@inproceedings{Bergsland2011,\n address = {Oslo, Norway},\n author = {Bergsland,\ - \ Andreas},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177959},\n issn\ - \ = {2220-4806},\n keywords = {LPC, software instrument, analysis, modeling, csound\ - \ },\n pages = {523--526},\n title = {Phrases from {P}aul {L}ansky's {S}ix {F}antasies},\n\ - \ url = {http://www.nime.org/proceedings/2011/nime2011_523.pdf},\n year = {2011}\n\ - }\n" + ID: NIME20_60 + abstract: 'This paper outlines the development process of an audio-visual gestural + instrument—the AirSticks—and elaborates on the role ‘miming’ has played in the + formation of new mappings for the instrument. The AirSticks, although fully-functioning, + were used as props in live performances in order to evaluate potential mapping + strategies that were later implemented for real. This use of mime when designing + Digital Musical Instruments (DMIs) can help overcome choice paralysis, break from + established habits, and liberate creators to realise more meaningful parameter + mappings. Bringing this process into an interactive performance environment acknowledges + the audience as stakeholders in the design of these instruments, and also leads + us to reflect upon the beliefs and assumptions made by an audience when engaging + with the performance of such ‘magical’ devices. This paper establishes two opposing + strategies to parameter mapping, ‘movement-first’ mapping, and the less conventional + ‘sound-first’ mapping that incorporates mime. We discuss the performance ‘One + Five Nine’, its transformation from a partial mime into a fully interactive presentation, + and the influence this process has had on the outcome of the performance and the + AirSticks as a whole.' + address: 'Birmingham, UK' + author: 'Ilsar, Alon A and Hughes, Matthew and Johnston, Andrew' + bibtex: "@inproceedings{NIME20_60,\n abstract = {This paper outlines the development\ + \ process of an audio-visual gestural instrument—the AirSticks—and elaborates\ + \ on the role ‘miming’ has played in the formation of new mappings for the instrument.\ + \ The AirSticks, although fully-functioning, were used as props in live performances\ + \ in order to evaluate potential mapping strategies that were later implemented\ + \ for real. This use of mime when designing Digital Musical Instruments (DMIs)\ + \ can help overcome choice paralysis, break from established habits, and liberate\ + \ creators to realise more meaningful parameter mappings. Bringing this process\ + \ into an interactive performance environment acknowledges the audience as stakeholders\ + \ in the design of these instruments, and also leads us to reflect upon the beliefs\ + \ and assumptions made by an audience when engaging with the performance of such\ + \ ‘magical’ devices. This paper establishes two opposing strategies to parameter\ + \ mapping, ‘movement-first’ mapping, and the less conventional ‘sound-first’ mapping\ + \ that incorporates mime. We discuss the performance ‘One Five Nine’, its transformation\ + \ from a partial mime into a fully interactive presentation, and the influence\ + \ this process has had on the outcome of the performance and the AirSticks as\ + \ a whole.},\n address = {Birmingham, UK},\n author = {Ilsar, Alon A and Hughes,\ + \ Matthew and Johnston, Andrew},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813383},\n\ + \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ + \ = {July},\n pages = {315--320},\n presentation-video = {https://youtu.be/ZFQKKI3dFhE},\n\ + \ publisher = {Birmingham City University},\n title = {NIME or Mime: A Sound-First\ + \ Approach to Developing an Audio-Visual Gestural Instrument},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper60.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177959 + doi: 10.5281/zenodo.4813383 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'LPC, software instrument, analysis, modeling, csound ' - pages: 523--526 - title: 'Phrases from {P}aul {L}ansky''s {S}ix {F}antasies' - url: http://www.nime.org/proceedings/2011/nime2011_523.pdf - year: 2011 + month: July + pages: 315--320 + presentation-video: https://youtu.be/ZFQKKI3dFhE + publisher: Birmingham City University + title: 'NIME or Mime: A Sound-First Approach to Developing an Audio-Visual Gestural + Instrument' + url: https://www.nime.org/proceedings/2020/nime2020_paper60.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: VonFalkenstein2011 - abstract: 'Gliss is an application for iOS that lets the user sequence five separate - instruments and play them back in various ways. Sequences can be created by drawing - onto the screen while the sequencer is running. The playhead of the sequencer - can be set to randomly deviate from the drawings or can be controlled via the - accelerometer of the device. This makes Gliss a hybrid of a sequencer, an instrument - and a generative music system. ' - address: 'Oslo, Norway' - author: 'von Falkenstein, Jan T.' - bibtex: "@inproceedings{VonFalkenstein2011,\n abstract = {Gliss is an application\ - \ for iOS that lets the user sequence five separate instruments and play them\ - \ back in various ways. Sequences can be created by drawing onto the screen while\ - \ the sequencer is running. The playhead of the sequencer can be set to randomly\ - \ deviate from the drawings or can be controlled via the accelerometer of the\ - \ device. This makes Gliss a hybrid of a sequencer, an instrument and a generative\ - \ music system. },\n address = {Oslo, Norway},\n author = {von Falkenstein, Jan\ - \ T.},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1178007},\n issn = {2220-4806},\n\ - \ keywords = {Gliss, iOS, iPhone, iPad, interface, UPIC, music, sequencer, accelerometer,\ - \ drawing },\n pages = {527--528},\n title = {Gliss : An Intuitive Sequencer for\ - \ the iPhone and iPad},\n url = {http://www.nime.org/proceedings/2011/nime2011_527.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_61 + abstract: 'This demonstration presents URack, a custom-built audio-visual composition + and performance environment that combines the Unity video-game engine with the + VCV Rack software modular synthesiser. In alternative cross-modal solutions, a + compromise is likely made in either the sonic or visual output, or the consistency + and intuitiveness of the composition environment. By integrating control mechanisms + for graphics inside VCV Rack, the music-making metaphors used to build a patch + are extended into the visual domain. Users familiar with modular synthesizers + are immediately able to start building high-fidelity graphics using the same control + voltages regularly used to compose sound. Without needing to interact with two + separate development environments, languages or metaphorical domains, users are + encouraged to freely, creatively and enjoyably construct their own highly-integrated + audio-visual instruments. This demonstration will showcase the construction of + an audio-visual patch using URack, focusing on the integration of flexible GPU + particle systems present in Unity with the vast library of creative audio composition + modules inside VCV.' + address: 'Birmingham, UK' + author: 'Hughes, Matthew and Johnston, Andrew' + bibtex: "@inproceedings{NIME20_61,\n abstract = {This demonstration presents URack,\ + \ a custom-built audio-visual composition and performance environment that combines\ + \ the Unity video-game engine with the VCV Rack software modular synthesiser.\ + \ In alternative cross-modal solutions, a compromise is likely made in either\ + \ the sonic or visual output, or the consistency and intuitiveness of the composition\ + \ environment. By integrating control mechanisms for graphics inside VCV Rack,\ + \ the music-making metaphors used to build a patch are extended into the visual\ + \ domain. Users familiar with modular synthesizers are immediately able to start\ + \ building high-fidelity graphics using the same control voltages regularly used\ + \ to compose sound. Without needing to interact with two separate development\ + \ environments, languages or metaphorical domains, users are encouraged to freely,\ + \ creatively and enjoyably construct their own highly-integrated audio-visual\ + \ instruments. This demonstration will showcase the construction of an audio-visual\ + \ patch using URack, focusing on the integration of flexible GPU particle systems\ + \ present in Unity with the vast library of creative audio composition modules\ + \ inside VCV.},\n address = {Birmingham, UK},\n author = {Hughes, Matthew and\ + \ Johnston, Andrew},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813389},\n\ + \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ + \ = {July},\n pages = {321--322},\n publisher = {Birmingham City University},\n\ + \ title = {URack: Audio-visual Composition and Performance using Unity and VCV\ + \ Rack},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper61.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178007 + doi: 10.5281/zenodo.4813389 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Gliss, iOS, iPhone, iPad, interface, UPIC, music, sequencer, accelerometer, - drawing ' - pages: 527--528 - title: 'Gliss : An Intuitive Sequencer for the iPhone and iPad' - url: http://www.nime.org/proceedings/2011/nime2011_527.pdf - year: 2011 + month: July + pages: 321--322 + publisher: Birmingham City University + title: 'URack: Audio-visual Composition and Performance using Unity and VCV Rack' + url: https://www.nime.org/proceedings/2020/nime2020_paper61.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Harriman2011 - abstract: 'This paper describes a new musical instrument inspired by the pedal-steel - guitar, along with its motivations and other considerations. Creating a multi-dimensional, - expressive instrument was the primary driving force. For these criteria the pedal - steel guitar proved an apt model as it allows control over several instrument - parameters simultaneously and continuously. The parameters we wanted control over - were volume, timbre, release time and pitch.The Quadrofeelia is played with two - hands on a horizontal surface. Single notes and melodies are easily played as - well as chordal accompaniment with a variety of timbres and release times enabling - a range of legato and staccato notes in an intuitive manner with a new yet familiar - interface.' - address: 'Oslo, Norway' - author: 'Harriman, Jiffer and Casey, Locky and Melvin, Linden' - bibtex: "@inproceedings{Harriman2011,\n abstract = {This paper describes a new musical\ - \ instrument inspired by the pedal-steel guitar, along with its motivations and\ - \ other considerations. Creating a multi-dimensional, expressive instrument was\ - \ the primary driving force. For these criteria the pedal steel guitar proved\ - \ an apt model as it allows control over several instrument parameters simultaneously\ - \ and continuously. The parameters we wanted control over were volume, timbre,\ - \ release time and pitch.The Quadrofeelia is played with two hands on a horizontal\ - \ surface. Single notes and melodies are easily played as well as chordal accompaniment\ - \ with a variety of timbres and release times enabling a range of legato and staccato\ - \ notes in an intuitive manner with a new yet familiar interface.},\n address\ - \ = {Oslo, Norway},\n author = {Harriman, Jiffer and Casey, Locky and Melvin,\ - \ Linden},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1178041},\n issn = {2220-4806},\n\ - \ keywords = {NIME, pedal-steel, electronic, slide, demonstration, membrane, continuous,\ - \ ribbon, instrument, polyphony, lead },\n pages = {529--530},\n title = {Quadrofeelia\ - \ -- A New Instrument for Sliding into Notes},\n url = {http://www.nime.org/proceedings/2011/nime2011_529.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_62 + abstract: 'In this work, we have developed a textile-based interactive surface fabricated + through digital knitting technology. Our prototype explores intarsia, interlock + patterning, and a collection of functional and non-functional fibers to create + a piano-pattern textile for expressive and virtuosic sonic interaction. We combined + conductive, thermochromic, and composite yarns with high-flex polyester yarns + to develop KnittedKeyboard with its soft physical properties and responsive sensing + and display capabilities. The individual and combination of each key could simultaneously + sense discrete touch, as well as continuous proximity and pressure. The KnittedKeyboard + enables performers to experience fabric-based multimodal interaction as they explore + the seamless texture and materiality of the electronic textile.' + address: 'Birmingham, UK' + author: 'Wicaksono, Irmandy and Paradiso, Joseph' + bibtex: "@inproceedings{NIME20_62,\n abstract = {In this work, we have developed\ + \ a textile-based interactive surface fabricated through digital knitting technology.\ + \ Our prototype explores intarsia, interlock patterning, and a collection of functional\ + \ and non-functional fibers to create a piano-pattern textile for expressive and\ + \ virtuosic sonic interaction. We combined conductive, thermochromic, and composite\ + \ yarns with high-flex polyester yarns to develop KnittedKeyboard with its soft\ + \ physical properties and responsive sensing and display capabilities. The individual\ + \ and combination of each key could simultaneously sense discrete touch, as well\ + \ as continuous proximity and pressure. The KnittedKeyboard enables performers\ + \ to experience fabric-based multimodal interaction as they explore the seamless\ + \ texture and materiality of the electronic textile.},\n address = {Birmingham,\ + \ UK},\n author = {Wicaksono, Irmandy and Paradiso, Joseph},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.4813391},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {323--326},\n publisher = {Birmingham\ + \ City University},\n title = {KnittedKeyboard: Digital Knitting of Electronic\ + \ Textile Musical Controllers},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper62.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178041 + doi: 10.5281/zenodo.4813391 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'NIME, pedal-steel, electronic, slide, demonstration, membrane, continuous, - ribbon, instrument, polyphony, lead ' - pages: 529--530 - title: Quadrofeelia -- A New Instrument for Sliding into Notes - url: http://www.nime.org/proceedings/2011/nime2011_529.pdf - year: 2011 + month: July + pages: 323--326 + publisher: Birmingham City University + title: 'KnittedKeyboard: Digital Knitting of Electronic Textile Musical Controllers' + url: https://www.nime.org/proceedings/2020/nime2020_paper62.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Wang2011a - address: 'Oslo, Norway' - author: 'Wang, Johnty and d''Alessandro, Nicolas and Fels, Sidney S. and Pritchard, - Bob' - bibtex: "@inproceedings{Wang2011a,\n address = {Oslo, Norway},\n author = {Wang,\ - \ Johnty and d'Alessandro, Nicolas and Fels, Sidney S. and Pritchard, Bob},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178189},\n issn = {2220-4806},\n\ - \ pages = {531--532},\n title = {SQUEEZY : Extending a Multi-touch Screen with\ - \ Force Sensing Objects for Controlling Articulatory Synthesis},\n url = {http://www.nime.org/proceedings/2011/nime2011_531.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_63 + abstract: 'In the context of artistic performances, the complexity and diversity + of digital interfaces may impair the spectator experience, in particular hiding + the engagement and virtuosity of the performers. Artists and researchers have + made attempts at solving this by augmenting performances with additional information + provided through visual, haptic or sonic modalities. However, the proposed techniques + have not yet been formalized and we believe a clarification of their many aspects + is necessary for future research. In this paper, we propose a taxonomy for what + we define as Spectator Experience Augmentation Techniques (SEATs). We use it to + analyse existing techniques and we demonstrate how it can serve as a basis for + the exploration of novel ones.' + address: 'Birmingham, UK' + author: 'Capra, Olivier and Berthaut, Florent and Grisoni, Laurent' + bibtex: "@inproceedings{NIME20_63,\n abstract = {In the context of artistic performances,\ + \ the complexity and diversity of digital interfaces may impair the spectator\ + \ experience, in particular hiding the engagement and virtuosity of the performers.\ + \ Artists and researchers have made attempts at solving this by augmenting performances\ + \ with additional information provided through visual, haptic or sonic modalities.\ + \ However, the proposed techniques have not yet been formalized and we believe\ + \ a clarification of their many aspects is necessary for future research. In this\ + \ paper, we propose a taxonomy for what we define as Spectator Experience Augmentation\ + \ Techniques (SEATs). We use it to analyse existing techniques and we demonstrate\ + \ how it can serve as a basis for the exploration of novel ones.},\n address =\ + \ {Birmingham, UK},\n author = {Capra, Olivier and Berthaut, Florent and Grisoni,\ + \ Laurent},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813396},\n editor\ + \ = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n\ + \ pages = {327--330},\n publisher = {Birmingham City University},\n title = {A\ + \ Taxonomy of Spectator Experience Augmentation Techniques},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper63.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178189 + doi: 10.5281/zenodo.4813396 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - pages: 531--532 - title: 'SQUEEZY : Extending a Multi-touch Screen with Force Sensing Objects for - Controlling Articulatory Synthesis' - url: http://www.nime.org/proceedings/2011/nime2011_531.pdf - year: 2011 + month: July + pages: 327--330 + publisher: Birmingham City University + title: A Taxonomy of Spectator Experience Augmentation Techniques + url: https://www.nime.org/proceedings/2020/nime2020_paper63.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Choe2011 - abstract: 'In this paper, we suggest a conceptual model of a Web application framework - for the composition and documentation of soundscape and introduce corresponding - prototype projects, SeoulSoundMap and SoundScape Composer. We also survey the - current Web-based sound projects in terms of soundscape documentation. ' - address: 'Oslo, Norway' - author: 'Choe, Souhwan and Lee, Kyogu' - bibtex: "@inproceedings{Choe2011,\n abstract = {In this paper, we suggest a conceptual\ - \ model of a Web application framework for the composition and documentation of\ - \ soundscape and introduce corresponding prototype projects, SeoulSoundMap and\ - \ SoundScape Composer. We also survey the current Web-based sound projects in\ - \ terms of soundscape documentation. },\n address = {Oslo, Norway},\n author =\ - \ {Choe, Souhwan and Lee, Kyogu},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177985},\n\ - \ issn = {2220-4806},\n keywords = {soundscape, web application framework, sound\ - \ archive, sound map, soundscape composition, soundscape documentation. },\n pages\ - \ = {533--534},\n title = {{SW}AF: Towards a Web Application Framework for Composition\ - \ and Documentation of Soundscape},\n url = {http://www.nime.org/proceedings/2011/nime2011_533.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_64 + abstract: 'Existing applications of mobile music tools are often concerned with + the simulation of acoustic or digital musical instruments, extended with graphical + representations of keys, pads, etc. Following an intensive review of existing + tools and approaches to mobile music making, we implemented a digital drawing + tool, employing a time-based graphical/gestural interface for music composition + and performance. In this paper, we introduce our Sounding Brush project, through + which we explore music making in various forms with the natural gestures of drawing + and mark making on a tablet device. Subsequently, we present the design and development + of the Sounding Brush application. Utilising this project idea, we discuss the + act of drawing as an activity that is not separated from the act of playing musical + instrument. Drawing is essentially the act of playing music by means of a continuous + process of observation, individualisation and exploring time and space in a unique + way.' + address: 'Birmingham, UK' + author: 'Sen, Sourya and Tahiroğlu, Koray and Lohmann, Julia' + bibtex: "@inproceedings{NIME20_64,\n abstract = {Existing applications of mobile\ + \ music tools are often concerned with the simulation of acoustic or digital\ + \ musical instruments, extended with graphical representations of keys, pads,\ + \ etc. Following an intensive review of existing tools and approaches to mobile\ + \ music making, we implemented a digital drawing tool, employing a time-based\ + \ graphical/gestural interface for music composition and performance. In this\ + \ paper, we introduce our Sounding Brush project, through which we explore music\ + \ making in various forms with the natural gestures of drawing and mark making\ + \ on a tablet device. Subsequently, we present the design and development of the\ + \ Sounding Brush application. Utilising this project idea, we discuss the act\ + \ of drawing as an activity that is not separated from the act of playing musical\ + \ instrument. Drawing is essentially the act of playing music by means of a continuous\ + \ process of observation, individualisation and exploring time and space in a\ + \ unique way.},\n address = {Birmingham, UK},\n author = {Sen, Sourya and Tahiroğlu,\ + \ Koray and Lohmann, Julia},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813398},\n\ + \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ + \ = {July},\n pages = {331--336},\n presentation-video = {https://youtu.be/7RkGbyGM-Ho},\n\ + \ publisher = {Birmingham City University},\n title = {Sounding Brush: A Tablet\ + \ based Musical Instrument for Drawing and Mark Making},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper64.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177985 + doi: 10.5281/zenodo.4813398 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'soundscape, web application framework, sound archive, sound map, soundscape - composition, soundscape documentation. ' - pages: 533--534 - title: 'SWAF: Towards a Web Application Framework for Composition and Documentation - of Soundscape' - url: http://www.nime.org/proceedings/2011/nime2011_533.pdf - year: 2011 + month: July + pages: 331--336 + presentation-video: https://youtu.be/7RkGbyGM-Ho + publisher: Birmingham City University + title: 'Sounding Brush: A Tablet based Musical Instrument for Drawing and Mark Making' + url: https://www.nime.org/proceedings/2020/nime2020_paper64.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Schnell2011 - abstract: 'We are presenting a set of applications that have been realized with - the MO modular wireless motion capture deviceand a set of software components - integrated into Max/MSP.These applications, created in the context of artistic - projects,music pedagogy, and research, allow for the gestural reembodiment of - recorded sound and music. They demonstrate a large variety of different "playing - techniques" inmusical performance using wireless motion sensor modulesin conjunction - with gesture analysis and real-time audioprocessing components.' - address: 'Oslo, Norway' - author: 'Schnell, Norbert and Bevilacqua, Frédéric and Rasamimanana, Nicolas and - Blois, Julien and Guédy, Fabrice and Fléty, Emmanuel' - bibtex: "@inproceedings{Schnell2011,\n abstract = {We are presenting a set of applications\ - \ that have been realized with the MO modular wireless motion capture deviceand\ - \ a set of software components integrated into Max/MSP.These applications, created\ - \ in the context of artistic projects,music pedagogy, and research, allow for\ - \ the gestural reembodiment of recorded sound and music. They demonstrate a large\ - \ variety of different \"playing techniques\" inmusical performance using wireless\ - \ motion sensor modulesin conjunction with gesture analysis and real-time audioprocessing\ - \ components.},\n address = {Oslo, Norway},\n author = {Schnell, Norbert and Bevilacqua,\ - \ Fr\\'{e}d\\'{e}ric and Rasamimanana, Nicolas and Blois, Julien and Gu\\'{e}dy,\ - \ Fabrice and Fl\\'{e}ty, Emmanuel},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178153},\n\ - \ issn = {2220-4806},\n keywords = {Music, Gesture, Interface, Wireless Sensors,\ - \ Gesture Recognition, Audio Processing, Design, Interaction },\n pages = {535--536},\n\ - \ title = {Playing the \"MO\" -- Gestural Control and Re-Embodiment of Recorded\ - \ Sound and Music},\n url = {http://www.nime.org/proceedings/2011/nime2011_535.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_65 + abstract: 'A deformable musical instrument can take numerous distinct shapes with + its non-rigid features. Building audio synthesis module for such an interface + behaviour can be challenging. In this paper, we present the Al-terity, a non-rigid + musical instrument that comprises a deep learning model with generative adversarial + network architecture and use it for generating audio samples for real-time audio + synthesis. The particular deep learning model we use for this instrument was trained + with existing data set as input for purposes of further experimentation. The main + benefits of the model used are the ability to produce the realistic range of timbre + of the trained data set and the ability to generate new audio samples in real-time, + in the moment of playing, with the characteristics of sounds that the performer + ever heard before. We argue that these advanced intelligence features on the + audio synthesis level could allow us to explore performing music with particular + response features that define the instrument''s digital idiomaticity and allow + us reinvent the instrument in the act of music performance.' + address: 'Birmingham, UK' + author: 'Tahiroğlu, Koray and Kastemaa, Miranda and Koli, Oskar' + bibtex: "@inproceedings{NIME20_65,\n abstract = {A deformable musical instrument\ + \ can take numerous distinct shapes with its non-rigid features. Building audio\ + \ synthesis module for such an interface behaviour can be challenging. In this\ + \ paper, we present the Al-terity, a non-rigid musical instrument that comprises\ + \ a deep learning model with generative adversarial network architecture and\ + \ use it for generating audio samples for real-time audio synthesis. The particular\ + \ deep learning model we use for this instrument was trained with existing data\ + \ set as input for purposes of further experimentation. The main benefits of the\ + \ model used are the ability to produce the realistic range of timbre of the trained\ + \ data set and the ability to generate new audio samples in real-time, in the\ + \ moment of playing, with the characteristics of sounds that the performer ever\ + \ heard before. We argue that these advanced intelligence features on the audio\ + \ synthesis level could allow us to explore performing music with particular response\ + \ features that define the instrument's digital idiomaticity and allow us reinvent\ + \ the instrument in the act of music performance.},\n address = {Birmingham, UK},\n\ + \ author = {Tahiroğlu, Koray and Kastemaa, Miranda and Koli, Oskar},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.4813402},\n editor = {Romain Michon and\ + \ Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages = {337--342},\n\ + \ presentation-video = {https://youtu.be/giYxFovZAvQ},\n publisher = {Birmingham\ + \ City University},\n title = {Al-terity: Non-Rigid Musical Instrument with Artificial\ + \ Intelligence Applied to Real-Time Audio Synthesis},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper65.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178153 + doi: 10.5281/zenodo.4813402 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Music, Gesture, Interface, Wireless Sensors, Gesture Recognition, Audio - Processing, Design, Interaction ' - pages: 535--536 - title: Playing the "MO" -- Gestural Control and Re-Embodiment of Recorded Sound - and Music - url: http://www.nime.org/proceedings/2011/nime2011_535.pdf - year: 2011 + month: July + pages: 337--342 + presentation-video: https://youtu.be/giYxFovZAvQ + publisher: Birmingham City University + title: 'Al-terity: Non-Rigid Musical Instrument with Artificial Intelligence Applied + to Real-Time Audio Synthesis' + url: https://www.nime.org/proceedings/2020/nime2020_paper65.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Zamborlin2011 - abstract: '(land)moves is an interactive installation: the user''s gestures control - the multimedia processing with a total synergybetween audio and video synthesis - and treatment.' - address: 'Oslo, Norway' - author: 'Zamborlin, Bruno and Partesana, Giorgio and Liuni, Marco' - bibtex: "@inproceedings{Zamborlin2011,\n abstract = {(land)moves is an interactive\ - \ installation: the user's gestures control the multimedia processing with a total\ - \ synergybetween audio and video synthesis and treatment.},\n address = {Oslo,\ - \ Norway},\n author = {Zamborlin, Bruno and Partesana, Giorgio and Liuni, Marco},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1178195},\n issn = {2220-4806},\n\ - \ keywords = {mapping gesture-audio-video, gesture recognition, landscape, soundscape\ - \ },\n pages = {537--538},\n title = {({LAN}D)MOVES},\n url = {http://www.nime.org/proceedings/2011/nime2011_537.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_66 + abstract: 'Feedback instruments offer radical new ways of engaging with instrument + design and musicianship. They are defined by recurrent circulation of signals + through the instrument, which give the instrument ‘a life of its own’ and a ''stimulating + uncontrollability''. Arguably, the most interesting musical behaviour in these + instruments happens when their dynamic complexity is maximised, without falling + into saturating feedback. It is often challenging to keep the instrument in this + zone; this research looks at algorithmic ways to manage the behaviour of feedback + loops in order to make feedback instruments more playable and musical; to expand + and maintain the `sweet spot''. We propose a solution that manages gain dynamics + based on measurement of complexity, using a realtime implementation of the Effort + to Compress algorithm. The system was evaluated with four musicians, each of whom + have different variations of string-based feedback instruments, following an autobiographical + design approach. Qualitative feedback was gathered, showing that the system was + successful in modifying the behaviour of these instruments to allow easier access + to edge transition zones, sometimes at the expense of losing some of the more + compelling dynamics of the instruments. The basic efficacy of the system is evidenced + by descriptive audio analysis. This paper is accompanied by a dataset of sounds + collected during the study, and the open source software that was written to support + the research.' + address: 'Birmingham, UK' + author: 'Kiefer, Chris and Overholt, Dan and Eldridge, Alice' + bibtex: "@inproceedings{NIME20_66,\n abstract = {Feedback instruments offer radical\ + \ new ways of engaging with instrument design and musicianship. They are defined\ + \ by recurrent circulation of signals through the instrument, which give the instrument\ + \ ‘a life of its own’ and a 'stimulating uncontrollability'. Arguably, the most\ + \ interesting musical behaviour in these instruments happens when their dynamic\ + \ complexity is maximised, without falling into saturating feedback. It is often\ + \ challenging to keep the instrument in this zone; this research looks at algorithmic\ + \ ways to manage the behaviour of feedback loops in order to make feedback instruments\ + \ more playable and musical; to expand and maintain the `sweet spot'. We propose\ + \ a solution that manages gain dynamics based on measurement of complexity, using\ + \ a realtime implementation of the Effort to Compress algorithm. The system was\ + \ evaluated with four musicians, each of whom have different variations of string-based\ + \ feedback instruments, following an autobiographical design approach. Qualitative\ + \ feedback was gathered, showing that the system was successful in modifying the\ + \ behaviour of these instruments to allow easier access to edge transition zones,\ + \ sometimes at the expense of losing some of the more compelling dynamics of the\ + \ instruments. The basic efficacy of the system is evidenced by descriptive audio\ + \ analysis. This paper is accompanied by a dataset of sounds collected during\ + \ the study, and the open source software that was written to support the research.},\n\ + \ address = {Birmingham, UK},\n author = {Kiefer, Chris and Overholt, Dan and\ + \ Eldridge, Alice},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813406},\n\ + \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ + \ = {July},\n pages = {343--348},\n presentation-video = {https://youtu.be/sf6FwsUX-84},\n\ + \ publisher = {Birmingham City University},\n title = {Shaping the behaviour of\ + \ feedback instruments with complexity-controlled gain dynamics},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper66.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178195 + doi: 10.5281/zenodo.4813406 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'mapping gesture-audio-video, gesture recognition, landscape, soundscape ' - pages: 537--538 - title: (LAND)MOVES - url: http://www.nime.org/proceedings/2011/nime2011_537.pdf - year: 2011 + month: July + pages: 343--348 + presentation-video: https://youtu.be/sf6FwsUX-84 + publisher: Birmingham City University + title: Shaping the behaviour of feedback instruments with complexity-controlled + gain dynamics + url: https://www.nime.org/proceedings/2020/nime2020_paper66.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Verplank2011 - abstract: 'Haptic interfaces using active force-feedback have mostly been used for - emulating existing instruments and making conventional music. With the right speed, - force, precision and software they can also be used to make new sounds and perhaps - new music. The requirements are local microprocessors (for low-latency and high - update rates), strategic sensors (for force as well as position), and non-linear - dynamics (that make for rich overtones and chaotic music).' - address: 'Oslo, Norway' - author: 'Verplank, Bill and Georg, Francesco' - bibtex: "@inproceedings{Verplank2011,\n abstract = {Haptic interfaces using active\ - \ force-feedback have mostly been used for emulating existing instruments and\ - \ making conventional music. With the right speed, force, precision and software\ - \ they can also be used to make new sounds and perhaps new music. The requirements\ - \ are local microprocessors (for low-latency and high update rates), strategic\ - \ sensors (for force as well as position), and non-linear dynamics (that make\ - \ for rich overtones and chaotic music).},\n address = {Oslo, Norway},\n author\ - \ = {Verplank, Bill and Georg, Francesco},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178183},\n\ - \ issn = {2220-4806},\n keywords = {NIME, Haptics, Music Controllers, Microprocessors.\ - \ },\n pages = {539--540},\n title = {Can Haptics Make New Music ? -- Fader and\ - \ Plank Demos},\n url = {http://www.nime.org/proceedings/2011/nime2011_539.pdf},\n\ - \ year = {2011}\n}\n" + ID: NIME20_67 + abstract: 'Brain-computer interfacing (BCI) offers novel methods to facilitate participation + in audio engineering, providing access for individuals who might otherwise be + unable to take part (either due to lack of training, or physical disability). This + paper describes the development of a BCI system for conscious, or ‘active’, control + of parameters on an audio mixer by generation of synchronous MIDI Machine Control + messages. The mapping between neurophysiological cues and audio parameter must + be intuitive for a neophyte audience (i.e., one without prior training or the + physical skills developed by professional audio engineers when working with tactile + interfaces). The prototype is dubbed MINDMIX (a portmanteau of ‘mind’ and ‘mixer’), + combining discrete and many-to-many mappings of audio mixer parameters and BCI + control signals measured via Electronecephalograph (EEG). In future, specific + evaluation of discrete mappings would be useful for iterative system design.' + address: 'Birmingham, UK' + author: 'Williams, Duncan A.H.' + bibtex: "@inproceedings{NIME20_67,\n abstract = {Brain-computer interfacing (BCI)\ + \ offers novel methods to facilitate participation in audio engineering, providing\ + \ access for individuals who might otherwise be unable to take part (either due\ + \ to lack of training, or physical disability). This paper describes the development\ + \ of a BCI system for conscious, or ‘active’, control of parameters on an audio\ + \ mixer by generation of synchronous MIDI Machine Control messages. The mapping\ + \ between neurophysiological cues and audio parameter must be intuitive for a\ + \ neophyte audience (i.e., one without prior training or the physical skills developed\ + \ by professional audio engineers when working with tactile interfaces). The prototype\ + \ is dubbed MINDMIX (a portmanteau of ‘mind’ and ‘mixer’), combining discrete\ + \ and many-to-many mappings of audio mixer parameters and BCI control signals\ + \ measured via Electronecephalograph (EEG). In future, specific evaluation of\ + \ discrete mappings would be useful for iterative system design.},\n address =\ + \ {Birmingham, UK},\n author = {Williams, Duncan A.H.},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.4813408},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {349--352},\n publisher = {Birmingham\ + \ City University},\n title = {MINDMIX: Mapping of brain activity to congruent\ + \ audio mixing features},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper67.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1178183 + doi: 10.5281/zenodo.4813408 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'NIME, Haptics, Music Controllers, Microprocessors. ' - pages: 539--540 - title: 'Can Haptics Make New Music ? -- Fader and Plank Demos' - url: http://www.nime.org/proceedings/2011/nime2011_539.pdf - year: 2011 + month: July + pages: 349--352 + publisher: Birmingham City University + title: 'MINDMIX: Mapping of brain activity to congruent audio mixing features' + url: https://www.nime.org/proceedings/2020/nime2020_paper67.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Vallis2010 - abstract: 'The aim of this paper is to define the process of iterative interface - design as it pertains to musical performance. Embodying this design approach, - the Monome OSC/MIDI USB controller represents a minimalist, open-source hardware - device. The open-source nature of the device has allowed for a small group of - Monome users to modify the hardware, firmware, and software associated with the - interface. These user driven modifications have allowed the re-imagining of the - interface for new and novel purposes, beyond even that of the device''s original - intentions. With development being driven by a community of users, a device can - become several related but unique generations of musical controllers, each one - focused on a specific set of needs. ' - address: 'Sydney, Australia' - author: 'Vallis, Owen and Hochenbaum, Jordan and Kapur, Ajay' - bibtex: "@inproceedings{Vallis2010,\n abstract = {The aim of this paper is to define\ - \ the process of iterative interface design as it pertains to musical performance.\ - \ Embodying this design approach, the Monome OSC/MIDI USB controller represents\ - \ a minimalist, open-source hardware device. The open-source nature of the device\ - \ has allowed for a small group of Monome users to modify the hardware, firmware,\ - \ and software associated with the interface. These user driven modifications\ - \ have allowed the re-imagining of the interface for new and novel purposes, beyond\ - \ even that of the device's original intentions. With development being driven\ - \ by a community of users, a device can become several related but unique generations\ - \ of musical controllers, each one focused on a specific set of needs. },\n address\ - \ = {Sydney, Australia},\n author = {Vallis, Owen and Hochenbaum, Jordan and Kapur,\ - \ Ajay},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1177919},\n issn = {2220-4806},\n\ - \ keywords = {Iterative Design, Monome, Arduinome, Arduino.},\n pages = {1--6},\n\ - \ title = {A Shift Towards Iterative and Open-Source Design for Musical Interfaces},\n\ - \ url = {http://www.nime.org/proceedings/2010/nime2010_001.pdf},\n year = {2010}\n\ - }\n" + ID: NIME20_68 + abstract: 'We present SQUISHBOI, a continuous touch controller for interacting with + complex musical systems. An elastic rubber membrane forms the playing surface + of the instrument, while machine learning is used for dimensionality reduction + and gesture recognition. The membrane is stretched over a hollow shell which permits + considerable depth excursion, with an array of distance sensors tracking the surface + displacement from underneath. The inherent dynamics of the membrane lead to cross-coupling + between nearby sensors, however we do not see this as a flaw or limitation. Instead + we find this coupling gives structure to the playing techniques and mapping schemes + chosen by the user. The instrument is best utilized as a tool for actively designing + abstraction and forming a relative control structure within a given system, one + which allows for intuitive gestural control beyond what can be accomplished with + conventional musical controllers.' + address: 'Birmingham, UK' + author: 'DeSmith, Marcel O and Piepenbrink, Andrew and Kapur, Ajay' + bibtex: "@inproceedings{NIME20_68,\n abstract = {We present SQUISHBOI, a continuous\ + \ touch controller for interacting with complex musical systems. An elastic rubber\ + \ membrane forms the playing surface of the instrument, while machine learning\ + \ is used for dimensionality reduction and gesture recognition. The membrane is\ + \ stretched over a hollow shell which permits considerable depth excursion, with\ + \ an array of distance sensors tracking the surface displacement from underneath.\ + \ The inherent dynamics of the membrane lead to cross-coupling between nearby\ + \ sensors, however we do not see this as a flaw or limitation. Instead we find\ + \ this coupling gives structure to the playing techniques and mapping schemes\ + \ chosen by the user. The instrument is best utilized as a tool for actively designing\ + \ abstraction and forming a relative control structure within a given system,\ + \ one which allows for intuitive gestural control beyond what can be accomplished\ + \ with conventional musical controllers.},\n address = {Birmingham, UK},\n author\ + \ = {DeSmith, Marcel O and Piepenbrink, Andrew and Kapur, Ajay},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.4813412},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {353--356},\n publisher = {Birmingham\ + \ City University},\n title = {SQUISHBOI: A Multidimensional Controller for Complex\ + \ Musical Interactions using Machine Learning},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper68.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177919 + doi: 10.5281/zenodo.4813412 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Iterative Design, Monome, Arduinome, Arduino.' - pages: 1--6 - title: A Shift Towards Iterative and Open-Source Design for Musical Interfaces - url: http://www.nime.org/proceedings/2010/nime2010_001.pdf - year: 2010 + month: July + pages: 353--356 + publisher: Birmingham City University + title: 'SQUISHBOI: A Multidimensional Controller for Complex Musical Interactions + using Machine Learning' + url: https://www.nime.org/proceedings/2020/nime2020_paper68.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Maruyama2010 - abstract: 'Musical instruments have a long history, and many types of musical instruments - have been created to attain ideal sound production. At the same time, various - types of electronic musical instruments have been developed. Since the main purpose - of conventional electronic instruments is to duplicate the shape of acoustic instruments - with no change in their hardware configuration, the diapason and the performance - style of each instrument is inflexible. Therefore, the goal of our study is to - construct the UnitInstrument that consists of various types of musical units. - A unit is constructed by simulating functional elements of conventional musical - instruments, such as output timing of sound and pitch decision. Each unit has - connectors for connecting other units to create various types of musical instruments. - Additionally, we propose a language for easily and flexibly describing the settings - of units. We evaluated the effectiveness of our proposed system by using it in - actual performances.' - address: 'Sydney, Australia' - author: 'Maruyama, Yutaro and Takegawa, Yoshinari and Terada, Tsutomu and Tsukamoto, - Masahiko' - bibtex: "@inproceedings{Maruyama2010,\n abstract = {Musical instruments have a long\ - \ history, and many types of musical instruments have been created to attain ideal\ - \ sound production. At the same time, various types of electronic musical instruments\ - \ have been developed. Since the main purpose of conventional electronic instruments\ - \ is to duplicate the shape of acoustic instruments with no change in their hardware\ - \ configuration, the diapason and the performance style of each instrument is\ - \ inflexible. Therefore, the goal of our study is to construct the UnitInstrument\ - \ that consists of various types of musical units. A unit is constructed by simulating\ - \ functional elements of conventional musical instruments, such as output timing\ - \ of sound and pitch decision. Each unit has connectors for connecting other units\ - \ to create various types of musical instruments. Additionally, we propose a language\ - \ for easily and flexibly describing the settings of units. We evaluated the effectiveness\ - \ of our proposed system by using it in actual performances.},\n address = {Sydney,\ - \ Australia},\n author = {Maruyama, Yutaro and Takegawa, Yoshinari and Terada,\ - \ Tsutomu and Tsukamoto, Masahiko},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177845},\n\ - \ issn = {2220-4806},\n keywords = {Musical instruments, Script language},\n pages\ - \ = {7--12},\n title = {UnitInstrument : Easy Configurable Musical Instruments},\n\ - \ url = {http://www.nime.org/proceedings/2010/nime2010_007.pdf},\n year = {2010}\n\ + ID: NIME20_69 + abstract: 'Digital technologies play a fundamental role in New Interfaces for Musical + Expression as well as music making and consumption more widely. This paper reports + on two workshops with music professionals and researchers who undertook an initial + exploration of the differences between digital platforms (software and online + services) for music in the UK and China. Differences were found in primary target + user groups of digital platforms in the UK and China as well as the stages of + the culture creation cycle they were developed for. Reasons for the divergence + of digital platforms include differences in culture, regulation, and infrastructure, + as well as the inherent Western bias of software for music making such as Digital + Audio Workstations. Using AI to bridge between Western and Chinese music traditions + is suggested as an opportunity to address aspects of the divergent landscape of + digital platforms for music inside and outside China.' + address: 'Birmingham, UK' + author: 'Bryan-Kinns, Nick and ZIJIN, LI and Sun, Xiaohua' + bibtex: "@inproceedings{NIME20_69,\n abstract = {Digital technologies play a fundamental\ + \ role in New Interfaces for Musical Expression as well as music making and consumption\ + \ more widely. This paper reports on two workshops with music professionals\ + \ and researchers who undertook an initial exploration of the differences between\ + \ digital platforms (software and online services) for music in the UK and China.\ + \ Differences were found in primary target user groups of digital platforms in\ + \ the UK and China as well as the stages of the culture creation cycle they were\ + \ developed for. Reasons for the divergence of digital platforms include differences\ + \ in culture, regulation, and infrastructure, as well as the inherent Western\ + \ bias of software for music making such as Digital Audio Workstations. Using\ + \ AI to bridge between Western and Chinese music traditions is suggested as an\ + \ opportunity to address aspects of the divergent landscape of digital platforms\ + \ for music inside and outside China.},\n address = {Birmingham, UK},\n author\ + \ = {Bryan-Kinns, Nick and ZIJIN, LI and Sun, Xiaohua},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.4813414},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {357--360},\n presentation-video\ + \ = {https://youtu.be/c7nkCBBTnDA},\n publisher = {Birmingham City University},\n\ + \ title = {On Digital Platforms and AI for Music in the UK and China},\n url =\ + \ {https://www.nime.org/proceedings/2020/nime2020_paper69.pdf},\n year = {2020}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177845 + doi: 10.5281/zenodo.4813414 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Musical instruments, Script language' - pages: 7--12 - title: 'UnitInstrument : Easy Configurable Musical Instruments' - url: http://www.nime.org/proceedings/2010/nime2010_007.pdf - year: 2010 + month: July + pages: 357--360 + presentation-video: https://youtu.be/c7nkCBBTnDA + publisher: Birmingham City University + title: On Digital Platforms and AI for Music in the UK and China + url: https://www.nime.org/proceedings/2020/nime2020_paper69.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Mulder2010 - abstract: 'With the author’s own experiences in mind, this paper argues that, when - used to amplify musical instruments or to play back other sonic material to an - audience, loudspeakers and the technology that drives them, can be considered - as a musical instrument. Particularly in situations with acoustic instruments - this perspective can provide insight into the often cumbersome relation between - the –technology orientated– sound engineer and the –music orientated– performer. - Playing a musical instrument (whether acoustic, electric or electronic) involves - navigating often complicated but very precise interfaces. The interface for sound - amplification technology in a certain environment is not limited to the control - surface of a mixing desk but includes the interaction with other stakeholder, - i.e. the performers and the choice of loudspeakers and microphones and their positions. - As such this interface can be as accurate and intimate but also as complicated - as the interfaces of ''normal'' musical instruments. By zooming in on differences - between acoustic and electronic sources a step is taken towards inclusion in this - discussion of the perception of amplified music and the possible influence of - that amplification on performance practise.' - address: 'Sydney, Australia' - author: 'Mulder, Jos' - bibtex: "@inproceedings{Mulder2010,\n abstract = {With the author’s own experiences\ - \ in mind, this paper argues that, when used to amplify musical instruments or\ - \ to play back other sonic material to an audience, loudspeakers and the technology\ - \ that drives them, can be considered as a musical instrument. Particularly in\ - \ situations with acoustic instruments this perspective can provide insight into\ - \ the often cumbersome relation between the –technology orientated– sound engineer\ - \ and the –music orientated– performer. Playing a musical instrument (whether\ - \ acoustic, electric or electronic) involves navigating often complicated but\ - \ very precise interfaces. The interface for sound amplification technology in\ - \ a certain environment is not limited to the control surface of a mixing desk\ - \ but includes the interaction with other stakeholder, i.e. the performers and\ - \ the choice of loudspeakers and microphones and their positions. As such this\ - \ interface can be as accurate and intimate but also as complicated as the interfaces\ - \ of 'normal' musical instruments. By zooming in on differences between acoustic\ - \ and electronic sources a step is taken towards inclusion in this discussion\ - \ of the perception of amplified music and the possible influence of that amplification\ - \ on performance practise.},\n address = {Sydney, Australia},\n author = {Mulder,\ - \ Jos},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1177861},\n issn = {2220-4806},\n\ - \ keywords = {Sound technology (amplification), musical instruments, multi modal\ - \ perception, performance practice.},\n pages = {13--18},\n title = {The Loudspeaker\ - \ as Musical Instrument},\n url = {http://www.nime.org/proceedings/2010/nime2010_013.pdf},\n\ - \ year = {2010}\n}\n" + ID: NIME20_7 + abstract: 'Digitally integrating the materiality, form, and tactility in everyday + objects (e.g., pottery) provides inspiration for new ways of musical expression + and performance. In this project we reinterpret the creative process and aesthetic + philosophy of pottery as algorithmic music to help users rediscover the latent + story behind pottery through a synesthetic experience. Projects Mobius I and Mobius + II illustrate two potential directions toward a musical interface, one focusing + on the circular form, and the other, on graphical ornaments of pottery. Six conductive + graphics on the pottery function as capacitive sensors while retaining their resemblance + to traditional ornamental patterns in pottery. Offering pottery as a musical interface, + we invite users to orchestrate algorithmic music by physically touching the different + graphics.' + address: 'Birmingham, UK' + author: 'Chu, Jean and Choi, Jaewon' + bibtex: "@inproceedings{NIME20_7,\n abstract = {Digitally integrating the materiality,\ + \ form, and tactility in everyday objects (e.g., pottery) provides inspiration\ + \ for new ways of musical expression and performance. In this project we reinterpret\ + \ the creative process and aesthetic philosophy of pottery as algorithmic music\ + \ to help users rediscover the latent story behind pottery through a synesthetic\ + \ experience. Projects Mobius I and Mobius II illustrate two potential directions\ + \ toward a musical interface, one focusing on the circular form, and the other,\ + \ on graphical ornaments of pottery. Six conductive graphics on the pottery function\ + \ as capacitive sensors while retaining their resemblance to traditional ornamental\ + \ patterns in pottery. Offering pottery as a musical interface, we invite users\ + \ to orchestrate algorithmic music by physically touching the different graphics.},\n\ + \ address = {Birmingham, UK},\n author = {Chu, Jean and Choi, Jaewon},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.4813416},\n editor = {Romain Michon and\ + \ Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages = {37--38},\n\ + \ publisher = {Birmingham City University},\n title = {Reinterpretation of Pottery\ + \ as a Musical Interface},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper7.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177861 + doi: 10.5281/zenodo.4813416 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Sound technology (amplification), musical instruments, multi modal perception, - performance practice.' - pages: 13--18 - title: The Loudspeaker as Musical Instrument - url: http://www.nime.org/proceedings/2010/nime2010_013.pdf - year: 2010 + month: July + pages: 37--38 + publisher: Birmingham City University + title: Reinterpretation of Pottery as a Musical Interface + url: https://www.nime.org/proceedings/2020/nime2020_paper7.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Ciglar2010 - abstract: 'This paper, describes the second phase of an ongoing research project - dealing with the implementation of an interactive interface. It is a "hands free" - instrument, utilizing a non-contact tactile feedback method based on airborne - ultrasound. The three main elements/components of the interface that will be discussed - in this paper are: 1. Generation of audible sound by self-demodulation of an ultrasound - signal during its propagation through air; 2. The condensation of the ultrasound - energy in one spatial point generating a precise tactile reproduction of the audible - sound; and 3. The feed-forward method enabling a real-time intervention of the - musician, by shaping the tactile (ultra)sound directly with his hands.' - address: 'Sydney, Australia' - author: 'Ciglar, Miha' - bibtex: "@inproceedings{Ciglar2010,\n abstract = {This paper, describes the second\ - \ phase of an ongoing research project dealing with the implementation of an interactive\ - \ interface. It is a \"hands free\" instrument, utilizing a non-contact tactile\ - \ feedback method based on airborne ultrasound. The three main elements/components\ - \ of the interface that will be discussed in this paper are: 1. Generation of\ - \ audible sound by self-demodulation of an ultrasound signal during its propagation\ - \ through air; 2. The condensation of the ultrasound energy in one spatial point\ - \ generating a precise tactile reproduction of the audible sound; and 3. The feed-forward\ - \ method enabling a real-time intervention of the musician, by shaping the tactile\ - \ (ultra)sound directly with his hands.},\n address = {Sydney, Australia},\n author\ - \ = {Ciglar, Miha},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177745},\n\ - \ issn = {2220-4806},\n keywords = {haptics, vibro-tactility, feedback, ultrasound,\ - \ hands-free interface, nonlinear acoustics, parametric array.},\n pages = {19--22},\n\ - \ title = {An Ultrasound Based Instrument Generating Audible and Tactile Sound},\n\ - \ url = {http://www.nime.org/proceedings/2010/nime2010_019.pdf},\n year = {2010}\n\ - }\n" + ID: NIME20_70 + abstract: 'In this paper we adopt the theory of force dynamics in human cognition + as a fundamental design principle for the development of mid-air musical interfaces. + We argue that this principle can provide more intuitive user experiences when + the interface does not provide direct haptic feedback – such as interfaces made + with various gesture-tracking technologies. Grounded in five concepts from the + theoretical literature on force dynamics in musical cognition, the paper presents + a set of principles for interaction design focused on five force schemas: Path + restraint, Containment restraint, Counter-force, Attraction, and Compulsion. We + describe an initial set of examples that implement these principles using a Leap + Motion sensor for gesture tracking and SuperCollider for interactive audio design. + Finally, the paper presents a pilot experiment that provides initial ratings of + intuitiveness in the user experience.' + address: 'Birmingham, UK' + author: 'Eskildsen, Anders and Walther-Hansen, Mads' + bibtex: "@inproceedings{NIME20_70,\n abstract = {In this paper we adopt the theory\ + \ of force dynamics in human cognition as a fundamental design principle for the\ + \ development of mid-air musical interfaces. We argue that this principle can\ + \ provide more intuitive user experiences when the interface does not provide\ + \ direct haptic feedback – such as interfaces made with various gesture-tracking\ + \ technologies. Grounded in five concepts from the theoretical literature on force\ + \ dynamics in musical cognition, the paper presents a set of principles for interaction\ + \ design focused on five force schemas: Path restraint, Containment restraint,\ + \ Counter-force, Attraction, and Compulsion. We describe an initial set of examples\ + \ that implement these principles using a Leap Motion sensor for gesture tracking\ + \ and SuperCollider for interactive audio design. Finally, the paper presents\ + \ a pilot experiment that provides initial ratings of intuitiveness in the user\ + \ experience.},\n address = {Birmingham, UK},\n author = {Eskildsen, Anders and\ + \ Walther-Hansen, Mads},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813418},\n\ + \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ + \ = {July},\n pages = {361--366},\n presentation-video = {https://youtu.be/REe967aGVN4},\n\ + \ publisher = {Birmingham City University},\n title = {Force dynamics as a design\ + \ framework for mid-air musical interfaces},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper70.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177745 + doi: 10.5281/zenodo.4813418 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'haptics, vibro-tactility, feedback, ultrasound, hands-free interface, - nonlinear acoustics, parametric array.' - pages: 19--22 - title: An Ultrasound Based Instrument Generating Audible and Tactile Sound - url: http://www.nime.org/proceedings/2010/nime2010_019.pdf - year: 2010 + month: July + pages: 361--366 + presentation-video: https://youtu.be/REe967aGVN4 + publisher: Birmingham City University + title: Force dynamics as a design framework for mid-air musical interfaces + url: https://www.nime.org/proceedings/2020/nime2020_paper70.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Hayes2010 - abstract: 'The Neurohedron is a multi-modal interface for a nonlinear sequencer - software model, embodied physically in a dodecahedron. The faces of the dodecahedron - are both inputs and outputs, allowing the device to visualize the activity of - the software model as well as convey input to it. The software model maps MIDI - notes to the faces of the device, and defines and controls the behavior of the - sequencer''s progression around its surface, resulting in a unique instrument - for computer-based performance and composition. ' - address: 'Sydney, Australia' - author: 'Hayes, Ted' - bibtex: "@inproceedings{Hayes2010,\n abstract = {The Neurohedron is a multi-modal\ - \ interface for a nonlinear sequencer software model, embodied physically in a\ - \ dodecahedron. The faces of the dodecahedron are both inputs and outputs, allowing\ - \ the device to visualize the activity of the software model as well as convey\ - \ input to it. The software model maps MIDI notes to the faces of the device,\ - \ and defines and controls the behavior of the sequencer's progression around\ - \ its surface, resulting in a unique instrument for computer-based performance\ - \ and composition. },\n address = {Sydney, Australia},\n author = {Hayes, Ted},\n\ + ID: NIME20_71 + abstract: 'Continuous MIDI controllers commonly output their position only, with + no influence of the performative energy with which they were set. In this paper, + creative uses of time as a parameter in continuous controller mapping are demonstrated: + the speed of movement affects the position mapping and control output. A set of + SuperCollider classes are presented, developed in the author’s practice in computer + music, where they have been used together with commercial MIDI controllers. The + creative applications employ various approaches and metaphors for scaling time, + but also machine learning for recognising patterns. In the techniques, performer, + controller and synthesis ‘intra-act’, to use Karen Barad’s term: because position + and velocity are derived from the same data, sound output cannot be predicted + without the temporal context of performance.' + address: 'Birmingham, UK' + author: 'Nyström, Erik' + bibtex: "@inproceedings{NIME20_71,\n abstract = {Continuous MIDI controllers commonly\ + \ output their position only, with no influence of the performative energy with\ + \ which they were set. In this paper, creative uses of time as a parameter in\ + \ continuous controller mapping are demonstrated: the speed of movement affects\ + \ the position mapping and control output. A set of SuperCollider classes are\ + \ presented, developed in the author’s practice in computer music, where they\ + \ have been used together with commercial MIDI controllers. The creative applications\ + \ employ various approaches and metaphors for scaling time, but also machine learning\ + \ for recognising patterns. In the techniques, performer, controller and synthesis\ + \ ‘intra-act’, to use Karen Barad’s term: because position and velocity are derived\ + \ from the same data, sound output cannot be predicted without the temporal context\ + \ of performance.},\n address = {Birmingham, UK},\n author = {Nyström, Erik},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177799},\n issn = {2220-4806},\n\ - \ keywords = {controller, human computer interaction, interface, live performance,\ - \ neural network, sequencer},\n pages = {23--25},\n title = {Neurohedron : A Nonlinear\ - \ Sequencer Interface},\n url = {http://www.nime.org/proceedings/2010/nime2010_023.pdf},\n\ - \ year = {2010}\n}\n" + \ Musical Expression},\n doi = {10.5281/zenodo.4813420},\n editor = {Romain Michon\ + \ and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages =\ + \ {367--368},\n publisher = {Birmingham City University},\n title = {Intra-Actions:\ + \ Experiments with Velocity and Position in Continuous Controllers},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper71.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177799 + doi: 10.5281/zenodo.4813420 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'controller, human computer interaction, interface, live performance, - neural network, sequencer' - pages: 23--25 - title: 'Neurohedron : A Nonlinear Sequencer Interface' - url: http://www.nime.org/proceedings/2010/nime2010_023.pdf - year: 2010 + month: July + pages: 367--368 + publisher: Birmingham City University + title: 'Intra-Actions: Experiments with Velocity and Position in Continuous Controllers' + url: https://www.nime.org/proceedings/2020/nime2020_paper71.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Umetani2010 - abstract: 'We introduce an interactive interface for the custom designof metallophones. - The shape of each plate must be determined in the design process so that the metallophone - willproduce the proper tone when struck with a mallet. Unfortunately, the relationship - between plate shape and tone iscomplex, which makes it difficult to design plates - with arbitrary shapes. Our system addresses this problem by runninga concurrent - numerical eigenanalysis during interactive geometry editing. It continuously presents - a predicted tone tothe user with both visual and audio feedback, thus makingit - possible to design a plate with any desired shape and tone.We developed this system - to demonstrate the effectivenessof integrating real-time finite element method - analysis intogeometric editing to facilitate the design of custom-mademusical - instruments. An informal study demonstrated theability of technically unsophisticated - user to apply the system to complex metallophone design.' - address: 'Sydney, Australia' - author: 'Umetani, Nobuyuki and Mitani, Jun and Igarashi, Takeo' - bibtex: "@inproceedings{Umetani2010,\n abstract = {We introduce an interactive interface\ - \ for the custom designof metallophones. The shape of each plate must be determined\ - \ in the design process so that the metallophone willproduce the proper tone when\ - \ struck with a mallet. Unfortunately, the relationship between plate shape and\ - \ tone iscomplex, which makes it difficult to design plates with arbitrary shapes.\ - \ Our system addresses this problem by runninga concurrent numerical eigenanalysis\ - \ during interactive geometry editing. It continuously presents a predicted tone\ - \ tothe user with both visual and audio feedback, thus makingit possible to design\ - \ a plate with any desired shape and tone.We developed this system to demonstrate\ - \ the effectivenessof integrating real-time finite element method analysis intogeometric\ - \ editing to facilitate the design of custom-mademusical instruments. An informal\ - \ study demonstrated theability of technically unsophisticated user to apply the\ - \ system to complex metallophone design.},\n address = {Sydney, Australia},\n\ - \ author = {Umetani, Nobuyuki and Mitani, Jun and Igarashi, Takeo},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1177917},\n issn = {2220-4806},\n keywords\ - \ = {Modeling Interfaces, Geometric Modeling, CAD, Education, Real-time FEM},\n\ - \ pages = {26--30},\n title = {Designing Custom-made Metallophone with Concurrent\ - \ Eigenanalysis},\n url = {http://www.nime.org/proceedings/2010/nime2010_026.pdf},\n\ - \ year = {2010}\n}\n" + ID: NIME20_72 + abstract: 'This paper presents an ongoing research on hand gesture interactive sonification + in dance performances. For this purpose, a conceptual framework and a multilayered + mapping model issued from an experimental case study will be proposed. The goal + of this research is twofold. On the one hand, we aim to determine action-based + perceptual invariants that allow us to establish pertinent relations between gesture + qualities and sound features. On the other hand, we are interested in analysing + how an interactive model-based sonification can provide useful and effective feedback + for dance practitioners. From this point of view, our research explicitly addresses + the convergence between the scientific understandings provided by the field of + movement sonification and the traditional know-how developed over the years within + the digital instrument and interaction design communities. A key component of + our study is the combination between physically-based sound synthesis and motion + features analysis. This approach has proven effective in providing interesting + insights for devising novel sonification models for artistic and scientific purposes, + and for developing a collaborative platform involving the designer, the musician + and the performer.' + address: 'Birmingham, UK' + author: 'Leonard, James and Giomi, Andrea' + bibtex: "@inproceedings{NIME20_72,\n abstract = {This paper presents an ongoing\ + \ research on hand gesture interactive sonification in dance performances. For\ + \ this purpose, a conceptual framework and a multilayered mapping model issued\ + \ from an experimental case study will be proposed. The goal of this research\ + \ is twofold. On the one hand, we aim to determine action-based perceptual invariants\ + \ that allow us to establish pertinent relations between gesture qualities and\ + \ sound features. On the other hand, we are interested in analysing how an interactive\ + \ model-based sonification can provide useful and effective feedback for dance\ + \ practitioners. From this point of view, our research explicitly addresses the\ + \ convergence between the scientific understandings provided by the field of movement\ + \ sonification and the traditional know-how developed over the years within the\ + \ digital instrument and interaction design communities. A key component of our\ + \ study is the combination between physically-based sound synthesis and motion\ + \ features analysis. This approach has proven effective in providing interesting\ + \ insights for devising novel sonification models for artistic and scientific\ + \ purposes, and for developing a collaborative platform involving the designer,\ + \ the musician and the performer.},\n address = {Birmingham, UK},\n author = {Leonard,\ + \ James and Giomi, Andrea},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813422},\n\ + \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ + \ = {July},\n pages = {369--374},\n presentation-video = {https://youtu.be/HQqIjL-Z8dA},\n\ + \ publisher = {Birmingham City University},\n title = {Towards an Interactive\ + \ Model-Based Sonification of Hand Gesture for Dance Performance},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper72.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177917 + doi: 10.5281/zenodo.4813422 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Modeling Interfaces, Geometric Modeling, CAD, Education, Real-time FEM' - pages: 26--30 - title: Designing Custom-made Metallophone with Concurrent Eigenanalysis - url: http://www.nime.org/proceedings/2010/nime2010_026.pdf - year: 2010 + month: July + pages: 369--374 + presentation-video: https://youtu.be/HQqIjL-Z8dA + publisher: Birmingham City University + title: Towards an Interactive Model-Based Sonification of Hand Gesture for Dance + Performance + url: https://www.nime.org/proceedings/2020/nime2020_paper72.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Chun2010 - abstract: 'The field of mixed-reality interface design is relatively young and in - regards to music, has not been explored in great depth. Using computer vision - and collision detection techniques, Freepad further explores the development of - mixed-reality interfaces for music. The result is an accessible user-definable - MIDI interface for anyone with a webcam, pen and paper, which outputs MIDI notes - with velocity values based on the speed of the strikes on drawn pads. ' - address: 'Sydney, Australia' - author: 'Chun, Sungkuk and Hawryshkewich, Andrew and Jung, Keechul and Pasquier, - Philippe' - bibtex: "@inproceedings{Chun2010,\n abstract = {The field of mixed-reality interface\ - \ design is relatively young and in regards to music, has not been explored in\ - \ great depth. Using computer vision and collision detection techniques, Freepad\ - \ further explores the development of mixed-reality interfaces for music. The\ - \ result is an accessible user-definable MIDI interface for anyone with a webcam,\ - \ pen and paper, which outputs MIDI notes with velocity values based on the speed\ - \ of the strikes on drawn pads. },\n address = {Sydney, Australia},\n author =\ - \ {Chun, Sungkuk and Hawryshkewich, Andrew and Jung, Keechul and Pasquier, Philippe},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177743},\n issn = {2220-4806},\n\ - \ keywords = {Computer vision, form recognition, collision detection, mixed- reality,\ - \ custom interface, MIDI},\n pages = {31--36},\n title = {Freepad : A Custom Paper-based\ - \ MIDI Interface},\n url = {http://www.nime.org/proceedings/2010/nime2010_031.pdf},\n\ - \ year = {2010}\n}\n" + ID: NIME20_73 + abstract: 'Lack of access to technological devices is a common exponent of a new + form of social exclusion. Coupled with this, there are also the risk of increasing + inequality between developed and underdeveloped countries when concerning technology + access. Regarding Internet access, the percentage of young Africans who do not + have access to this technology is around 60%, while in Europe the figure is 4%. + This limitation also expands for musical instruments, whether electronic or not. + In light of this worldwide problem, this paper aims to showcase a method for building + a MIDI Controller, a prominent instrument for musical production and live performance, + in an economically viable form that can be accessible to the poorest populations. + It is also desirable that the equipment is suitable for teaching various subjects + such as Music, Computer Science and Engineering. The outcome of this research + is not an amazing controller or a brandy new cool interface but the experience + of building a controller concerning all the bad conditions of doing it.' + address: 'Birmingham, UK' + author: 'Vieira, Romulo A and Schiavoni, Flávio Luiz' + bibtex: "@inproceedings{NIME20_73,\n abstract = {Lack of access to technological\ + \ devices is a common exponent of a new form of social exclusion. Coupled with\ + \ this, there are also the risk of increasing inequality between developed and\ + \ underdeveloped countries when concerning technology access. Regarding Internet\ + \ access, the percentage of young Africans who do not have access to this technology\ + \ is around 60%, while in Europe the figure is 4%. This limitation also expands\ + \ for musical instruments, whether electronic or not. In light of this worldwide\ + \ problem, this paper aims to showcase a method for building a MIDI Controller,\ + \ a prominent instrument for musical production and live performance, in an economically\ + \ viable form that can be accessible to the poorest populations. It is also desirable\ + \ that the equipment is suitable for teaching various subjects such as Music,\ + \ Computer Science and Engineering. The outcome of this research is not an amazing\ + \ controller or a brandy new cool interface but the experience of building a controller\ + \ concerning all the bad conditions of doing it.},\n address = {Birmingham, UK},\n\ + \ author = {Vieira, Romulo A and Schiavoni, Flávio Luiz},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.4813424},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {375--379},\n presentation-video\ + \ = {https://youtu.be/X1GE5jk2cgc},\n publisher = {Birmingham City University},\n\ + \ title = {Fliperama: An affordable Arduino based MIDI Controller},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper73.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177743 + doi: 10.5281/zenodo.4813424 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Computer vision, form recognition, collision detection, mixed- reality, - custom interface, MIDI' - pages: 31--36 - title: 'Freepad : A Custom Paper-based MIDI Interface' - url: http://www.nime.org/proceedings/2010/nime2010_031.pdf - year: 2010 + month: July + pages: 375--379 + presentation-video: https://youtu.be/X1GE5jk2cgc + publisher: Birmingham City University + title: 'Fliperama: An affordable Arduino based MIDI Controller' + url: https://www.nime.org/proceedings/2020/nime2020_paper73.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Mills2010 - abstract: 'Our team realized that a need existed for a music programming interface - in the Minim audio library of the Processingprogramming environment. The audience - for this new interface would be the novice programmer interested in usingmusic - as part of the learning experience, though the interface should also be complex - enough to benefit experiencedartist-programmers. We collected many ideas from - currently available music programming languages and librariesto design and create - the new capabilities in Minim. Thebasic mechanisms include chained unit generators, - instruments, and notes. In general, one "patches" unit generators(for example, - oscillators, delays, and envelopes) together inorder to create synthesis algorithms. - These algorithms canthen either create continuous sound, or be used in instruments - to play notes with specific start time and duration.We have written a base set - of unit generators to enablea wide variety of synthesis options, and the capabilities - ofthe unit generators, instruments, and Processing allow fora wide range of composition - techniques.' - address: 'Sydney, Australia' - author: 'Mills, John A. and Di Fede, Damien and Brix, Nicolas' - bibtex: "@inproceedings{Mills2010,\n abstract = {Our team realized that a need existed\ - \ for a music programming interface in the Minim audio library of the Processingprogramming\ - \ environment. The audience for this new interface would be the novice programmer\ - \ interested in usingmusic as part of the learning experience, though the interface\ - \ should also be complex enough to benefit experiencedartist-programmers. We collected\ - \ many ideas from currently available music programming languages and librariesto\ - \ design and create the new capabilities in Minim. Thebasic mechanisms include\ - \ chained unit generators, instruments, and notes. In general, one \"patches\"\ - \ unit generators(for example, oscillators, delays, and envelopes) together inorder\ - \ to create synthesis algorithms. These algorithms canthen either create continuous\ - \ sound, or be used in instruments to play notes with specific start time and\ - \ duration.We have written a base set of unit generators to enablea wide variety\ - \ of synthesis options, and the capabilities ofthe unit generators, instruments,\ - \ and Processing allow fora wide range of composition techniques.},\n address\ - \ = {Sydney, Australia},\n author = {Mills, John A. and Di Fede, Damien and Brix,\ - \ Nicolas},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177855},\n issn\ - \ = {2220-4806},\n keywords = {Minim, music programming, audio library, Processing,\ - \ mu- sic software},\n pages = {37--42},\n title = {Music Programming in Minim},\n\ - \ url = {http://www.nime.org/proceedings/2010/nime2010_037.pdf},\n year = {2010}\n\ + ID: NIME20_74 + abstract: 'This paper reports on a project that aimed to break apart the isolation + of VR and share an experience between both the wearer of a headset and a room + of observers. It presented the user with an acoustically playable virtual environment + in which their interactions with objects spawned audio events from the room’s + 80 loudspeakers and animations on the room’s 3 display walls. This required the + use of several Unity engines running on separate machines and SuperCollider running + as the audio engine. The perspectives into what the wearer of the headset was + doing allowed the audience to connect their movements to the sounds and images + being experienced, effectively allowing them all to participate in the installation + simultaneously.' + address: 'Birmingham, UK' + author: 'MacLean, Alex' + bibtex: "@inproceedings{NIME20_74,\n abstract = {This paper reports on a project\ + \ that aimed to break apart the isolation of VR and share an experience between\ + \ both the wearer of a headset and a room of observers. It presented the user\ + \ with an acoustically playable virtual environment in which their interactions\ + \ with objects spawned audio events from the room’s 80 loudspeakers and animations\ + \ on the room’s 3 display walls. This required the use of several Unity engines\ + \ running on separate machines and SuperCollider running as the audio engine.\ + \ The perspectives into what the wearer of the headset was doing allowed the audience\ + \ to connect their movements to the sounds and images being experienced, effectively\ + \ allowing them all to participate in the installation simultaneously.},\n address\ + \ = {Birmingham, UK},\n author = {MacLean, Alex},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.4813426},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {380--381},\n publisher = {Birmingham\ + \ City University},\n title = {Immersive Dreams: A Shared VR Experience},\n url\ + \ = {https://www.nime.org/proceedings/2020/nime2020_paper74.pdf},\n year = {2020}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177855 + doi: 10.5281/zenodo.4813426 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Minim, music programming, audio library, Processing, mu- sic software' - pages: 37--42 - title: Music Programming in Minim - url: http://www.nime.org/proceedings/2010/nime2010_037.pdf - year: 2010 + month: July + pages: 380--381 + publisher: Birmingham City University + title: 'Immersive Dreams: A Shared VR Experience' + url: https://www.nime.org/proceedings/2020/nime2020_paper74.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Magnusson2010 - abstract: 'The analysis of digital music systems has traditionally been characterized - by an approach that can be defined as phenomenological. The focus has been on - the body and its relationship to the machine, often neglecting the system''s conceptual - design. This paper brings into focus the epistemic features of digital systems, - which implies emphasizing the cognitive, conceptual and music theoretical side - of our musical instruments. An epistemic dimension space for the analysis of musical - devices is proposed. ' - address: 'Sydney, Australia' - author: 'Magnusson, Thor' - bibtex: "@inproceedings{Magnusson2010,\n abstract = {The analysis of digital music\ - \ systems has traditionally been characterized by an approach that can be defined\ - \ as phenomenological. The focus has been on the body and its relationship to\ - \ the machine, often neglecting the system's conceptual design. This paper brings\ - \ into focus the epistemic features of digital systems, which implies emphasizing\ - \ the cognitive, conceptual and music theoretical side of our musical instruments.\ - \ An epistemic dimension space for the analysis of musical devices is proposed.\ - \ },\n address = {Sydney, Australia},\n author = {Magnusson, Thor},\n booktitle\ + ID: NIME20_75 + abstract: 'There are many studies of Digital Musical Instrument (DMI) design, but + there is little research on the cross-cultural co-creation of DMIs drawing on + traditional musical instruments. We present a study of cross-cultural co-creation + inspired by the Duxianqin - a traditional Chinese Jing ethnic minority single + stringed musical instrument. We report on how we structured the co-creation with + European and Chinese participants ranging from DMI designers to composers and + performers. We discuss how we identified the `essence'' of the Duxianqin and used + this to drive co-creation of three Duxianqin reimagined through digital technologies. + Music was specially composed for these reimagined Duxianqin and performed in public + as the culmination of the design process. We reflect on our co-creation process + and how others could use such an approach to identify the essence of traditional + instruments and reimagine them in the digital age.' + address: 'Birmingham, UK' + author: 'Bryan-Kinns, Nick and ZIJIN, LI' + bibtex: "@inproceedings{NIME20_75,\n abstract = {There are many studies of Digital\ + \ Musical Instrument (DMI) design, but there is little research on the cross-cultural\ + \ co-creation of DMIs drawing on traditional musical instruments. We present a\ + \ study of cross-cultural co-creation inspired by the Duxianqin - a traditional\ + \ Chinese Jing ethnic minority single stringed musical instrument. We report on\ + \ how we structured the co-creation with European and Chinese participants ranging\ + \ from DMI designers to composers and performers. We discuss how we identified\ + \ the `essence' of the Duxianqin and used this to drive co-creation of three Duxianqin\ + \ reimagined through digital technologies. Music was specially composed for these\ + \ reimagined Duxianqin and performed in public as the culmination of the design\ + \ process. We reflect on our co-creation process and how others could use such\ + \ an approach to identify the essence of traditional instruments and reimagine\ + \ them in the digital age.},\n address = {Birmingham, UK},\n author = {Bryan-Kinns,\ + \ Nick and ZIJIN, LI},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813428},\n\ + \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ + \ = {July},\n pages = {382--387},\n presentation-video = {https://youtu.be/NvHcUQea82I},\n\ + \ publisher = {Birmingham City University},\n title = {ReImagining: Cross-cultural\ + \ Co-Creation of a Chinese Traditional Musical Instrument with Digital Technologies},\n\ + \ url = {https://www.nime.org/proceedings/2020/nime2020_paper75.pdf},\n year =\ + \ {2020}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.4813428 + editor: Romain Michon and Franziska Schroeder + issn: 2220-4806 + month: July + pages: 382--387 + presentation-video: https://youtu.be/NvHcUQea82I + publisher: Birmingham City University + title: 'ReImagining: Cross-cultural Co-Creation of a Chinese Traditional Musical + Instrument with Digital Technologies' + url: https://www.nime.org/proceedings/2020/nime2020_paper75.pdf + year: 2020 + + +- ENTRYTYPE: inproceedings + ID: NIME20_76 + abstract: 'This paper presents a discussion of Dark Matter, a sonification project + using live coding and just-in-time programming techniques. The project uses data + from proton-proton collisions produced by the Large Hadron Collider (LHC) at CERN, + Switzerland, and then detected and reconstructed by the Compact Muon Solenoid + (CMS) experiment, and was developed with the support of the art@CMS project. Work + for the Dark Matter project included the development of a custom-made environment + in the SuperCollider (SC) programming language that lets the performers of the + group engage in collective improvisations using dynamic interventions and networked + music systems. This paper will also provide information about a spin-off project + entitled the Interactive Physics Sonification System (IPSOS), an interactive and + standalone online application developed in the JavaScript programming language. + It provides a web-based interface that allows users to map particle data to sound + on commonly used web browsers, mobile devices, such as smartphones, tablets etc. + The project was developed as an educational outreach tool to engage young students + and the general public with data derived from LHC collisions.' + address: 'Birmingham, UK' + author: 'Vasilakos, Konstantinos n/a and Wilson, Scott and McCauley, Thomas and + Yeung, Tsun Winston and Margetson, Emma and Khosravi Mardakheh, Milad' + bibtex: "@inproceedings{NIME20_76,\n abstract = {This paper presents a discussion\ + \ of Dark Matter, a sonification project using live coding and just-in-time programming\ + \ techniques. The project uses data from proton-proton collisions produced by\ + \ the Large Hadron Collider (LHC) at CERN, Switzerland, and then detected and\ + \ reconstructed by the Compact Muon Solenoid (CMS) experiment, and was developed\ + \ with the support of the art@CMS project. Work for the Dark Matter project included\ + \ the development of a custom-made environment in the SuperCollider (SC) programming\ + \ language that lets the performers of the group engage in collective improvisations\ + \ using dynamic interventions and networked music systems. This paper will also\ + \ provide information about a spin-off project entitled the Interactive Physics\ + \ Sonification System (IPSOS), an interactive and standalone online application\ + \ developed in the JavaScript programming language. It provides a web-based interface\ + \ that allows users to map particle data to sound on commonly used web browsers,\ + \ mobile devices, such as smartphones, tablets etc. The project was developed\ + \ as an educational outreach tool to engage young students and the general public\ + \ with data derived from LHC collisions.},\n address = {Birmingham, UK},\n author\ + \ = {Vasilakos, Konstantinos n/a and Wilson, Scott and McCauley, Thomas and Yeung,\ + \ Tsun Winston and Margetson, Emma and Khosravi Mardakheh, Milad},\n booktitle\ \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1177837},\n issn = {2220-4806},\n keywords\ - \ = {Epistemic tools, music theory, dimension space, analysis.},\n pages = {43--46},\n\ - \ title = {An Epistemic Dimension Space for Musical Devices},\n url = {http://www.nime.org/proceedings/2010/nime2010_043.pdf},\n\ - \ year = {2010}\n}\n" + \ Expression},\n doi = {10.5281/zenodo.4813430},\n editor = {Romain Michon and\ + \ Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages = {388--393},\n\ + \ presentation-video = {https://youtu.be/1vS_tFUyz7g},\n publisher = {Birmingham\ + \ City University},\n title = {Sonification of High Energy Physics Data Using\ + \ Live Coding and Web Based Interfaces.},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper76.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177837 + doi: 10.5281/zenodo.4813430 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Epistemic tools, music theory, dimension space, analysis.' - pages: 43--46 - title: An Epistemic Dimension Space for Musical Devices - url: http://www.nime.org/proceedings/2010/nime2010_043.pdf - year: 2010 + month: July + pages: 388--393 + presentation-video: https://youtu.be/1vS_tFUyz7g + publisher: Birmingham City University + title: Sonification of High Energy Physics Data Using Live Coding and Web Based + Interfaces. + url: https://www.nime.org/proceedings/2020/nime2020_paper76.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Kocaballi2010 - abstract: 'Human agency, our capacity for action, has been at the hub of discussions - centring upon philosophical enquiry for a long period of time. Sensory supplementation - devices can provide us with unique opportunities to investigate the different - aspects of our agency by enabling new modes of perception and facilitating the - emergence of novel interactions, all of which is impossible without the aforesaid - devices. Our preliminary study investigates the non-verbal strategies employed - for negotiation of our capacity for action with other bodies and the surrounding - space through body-to-body and body-to-space couplings enabled by sensory supplementation - devices. We employed a lowfi rapid prototyping approach to build this device, - enabling distal perception by sonic and haptic feedback. Further, we conducted - a workshop in which participants equipped with this device engaged in game-like - activities. ' - address: 'Sydney, Australia' - author: 'Kocaballi, A. Baki and Gemeinboeck, Petra and Saunders, Rob' - bibtex: "@inproceedings{Kocaballi2010,\n abstract = {Human agency, our capacity\ - \ for action, has been at the hub of discussions centring upon philosophical enquiry\ - \ for a long period of time. Sensory supplementation devices can provide us with\ - \ unique opportunities to investigate the different aspects of our agency by enabling\ - \ new modes of perception and facilitating the emergence of novel interactions,\ - \ all of which is impossible without the aforesaid devices. Our preliminary study\ - \ investigates the non-verbal strategies employed for negotiation of our capacity\ - \ for action with other bodies and the surrounding space through body-to-body\ - \ and body-to-space couplings enabled by sensory supplementation devices. We employed\ - \ a lowfi rapid prototyping approach to build this device, enabling distal perception\ - \ by sonic and haptic feedback. Further, we conducted a workshop in which participants\ - \ equipped with this device engaged in game-like activities. },\n address = {Sydney,\ - \ Australia},\n author = {Kocaballi, A. Baki and Gemeinboeck, Petra and Saunders,\ - \ Rob},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1177829},\n issn = {2220-4806},\n\ - \ keywords = {Human agency, sensory supplementation, distal perception, sonic\ - \ feedback, tactile feedback, enactive interfaces},\n pages = {47--50},\n title\ - \ = {Investigating the Potential for Shared Agency using Enactive Interfaces},\n\ - \ url = {http://www.nime.org/proceedings/2010/nime2010_047.pdf},\n year = {2010}\n\ - }\n" + ID: NIME20_77 + abstract: 'Our goal is to develop an improvisational ensemble support system for + music beginners who do not have knowledge of chord progressions and do not have + enough experience of playing an instrument. We hypothesized that a music beginner + cannot determine tonal pitches of melody over a particular chord but can use body + movements to specify the pitch contour (i.e., melodic outline) and the attack + timings (i.e., rhythm). We aim to realize a performance interface for supporting + expressing intuitive pitch contour and attack timings using body motion and outputting + harmonious pitches over the chord progression of the background music. Since the + intended users of this system are not limited to people with music experience, + we plan to develop a system that uses Android smartphones, which many people have. + Our system consists of three modules: a module for specifying attack timing using + smartphone sensors, module for estimating the vertical movement of the smartphone + using smartphone sensors, and module for estimating the sound height using smartphone + vertical movement and background chord progression. Each estimation module is + developed using long short-term memory (LSTM), which is often used to estimate + time series data. We conduct evaluation experiments for each module. As a result, + the attack timing estimation had zero misjudgments, and the mean error time of + the estimated attack timing was smaller than the sensor-acquisition interval. + The accuracy of the vertical motion estimation was 64%, and that of the pitch + estimation was 7.6%. The results indicate that the attack timing is accurate enough, + but the vertical motion estimation and the pitch estimation need to be improved + for actual use.' + address: 'Birmingham, UK' + author: 'Takase, Haruya and Shiramatsu, Shun' + bibtex: "@inproceedings{NIME20_77,\n abstract = {Our goal is to develop an improvisational\ + \ ensemble support system for music beginners who do not have knowledge of chord\ + \ progressions and do not have enough experience of playing an instrument. We\ + \ hypothesized that a music beginner cannot determine tonal pitches of melody\ + \ over a particular chord but can use body movements to specify the pitch contour\ + \ (i.e., melodic outline) and the attack timings (i.e., rhythm). We aim to realize\ + \ a performance interface for supporting expressing intuitive pitch contour and\ + \ attack timings using body motion and outputting harmonious pitches over the\ + \ chord progression of the background music. Since the intended users of this\ + \ system are not limited to people with music experience, we plan to develop a\ + \ system that uses Android smartphones, which many people have. Our system consists\ + \ of three modules: a module for specifying attack timing using smartphone sensors,\ + \ module for estimating the vertical movement of the smartphone using smartphone\ + \ sensors, and module for estimating the sound height using smartphone vertical\ + \ movement and background chord progression. Each estimation module is developed\ + \ using long short-term memory (LSTM), which is often used to estimate time series\ + \ data. We conduct evaluation experiments for each module. As a result, the attack\ + \ timing estimation had zero misjudgments, and the mean error time of the estimated\ + \ attack timing was smaller than the sensor-acquisition interval. The accuracy\ + \ of the vertical motion estimation was 64%, and that of the pitch estimation\ + \ was 7.6%. The results indicate that the attack timing is accurate enough, but\ + \ the vertical motion estimation and the pitch estimation need to be improved\ + \ for actual use.},\n address = {Birmingham, UK},\n author = {Takase, Haruya and\ + \ Shiramatsu, Shun},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813434},\n\ + \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ + \ = {July},\n pages = {394--398},\n presentation-video = {https://youtu.be/WhrGhas9Cvc},\n\ + \ publisher = {Birmingham City University},\n title = {Support System for Improvisational\ + \ Ensemble Based on Long Short-Term Memory Using Smartphone Sensor},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper77.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177829 + doi: 10.5281/zenodo.4813434 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Human agency, sensory supplementation, distal perception, sonic feedback, - tactile feedback, enactive interfaces' - pages: 47--50 - title: Investigating the Potential for Shared Agency using Enactive Interfaces - url: http://www.nime.org/proceedings/2010/nime2010_047.pdf - year: 2010 + month: July + pages: 394--398 + presentation-video: https://youtu.be/WhrGhas9Cvc + publisher: Birmingham City University + title: Support System for Improvisational Ensemble Based on Long Short-Term Memory + Using Smartphone Sensor + url: https://www.nime.org/proceedings/2020/nime2020_paper77.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Liebman2010 - abstract: 'We present Cuebert, a mixing board concept for musical theatre. Using - a user-centered design process, our goal was to reconceptualize the mixer using - modern technology and interaction techniques, questioning over fifty years of - interface design in audio technology. Our research resulted in a design that retains - the physical controls — faders and knobs — demanded by sound engineers while taking - advantage of multitouch display technology to allow for flexible display of dynamic - and context-sensitive content.' - address: 'Sydney, Australia' - author: 'Liebman, Noah and Nagara, Michael and Spiewla, Jacek and Zolkosky, Erin' - bibtex: "@inproceedings{Liebman2010,\n abstract = {We present Cuebert, a mixing\ - \ board concept for musical theatre. Using a user-centered design process, our\ - \ goal was to reconceptualize the mixer using modern technology and interaction\ - \ techniques, questioning over fifty years of interface design in audio technology.\ - \ Our research resulted in a design that retains the physical controls — faders\ - \ and knobs — demanded by sound engineers while taking advantage of multitouch\ - \ display technology to allow for flexible display of dynamic and context-sensitive\ - \ content.},\n address = {Sydney, Australia},\n author = {Liebman, Noah and Nagara,\ - \ Michael and Spiewla, Jacek and Zolkosky, Erin},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1177833},\n issn = {2220-4806},\n keywords = {audio, control\ - \ surfaces, mixing board, multitouch, sound, theatre, touch-screen, user-centered\ - \ design},\n pages = {51--56},\n title = {Cuebert : A New Mixing Board Concept\ - \ for Musical Theatre},\n url = {http://www.nime.org/proceedings/2010/nime2010_051.pdf},\n\ - \ year = {2010}\n}\n" + ID: NIME20_78 + abstract: 'In this paper, we contribute to the discussion on how to best design + human-centric MIR tools for live audio mixing by bridging the gap between research + on complex systems, the psychology of automation and the design of tools that + support creativity in music production. We present the design of the Channel-AI, + an embedded AI system which performs instrument recognition and generates parameter + settings suggestions for gain levels, gating, compression and equalization which + are specific to the input signal and the instrument type. We discuss what we believe + to be the key design principles and perspectives on the making of intelligent + tools for creativity and for experts in the loop. We demonstrate how these principles + have been applied to inform the design of the interaction between expert live + audio mixing engineers with the Channel-AI (i.e. a corpus of AI features embedded + in the Midas HD Console. We report the findings from a preliminary evaluation + we conducted with three professional mixing engineers and reflect on mixing engineers’ + comments about the Channel-AI on social media.' + address: 'Birmingham, UK' + author: 'Tsiros, Augoustinos and Palladini, Alessandro' + bibtex: "@inproceedings{NIME20_78,\n abstract = {In this paper, we contribute to\ + \ the discussion on how to best design human-centric MIR tools for live audio\ + \ mixing by bridging the gap between research on complex systems, the psychology\ + \ of automation and the design of tools that support creativity in music production.\ + \ We present the design of the Channel-AI, an embedded AI system which performs\ + \ instrument recognition and generates parameter settings suggestions for gain\ + \ levels, gating, compression and equalization which are specific to the input\ + \ signal and the instrument type. We discuss what we believe to be the key design\ + \ principles and perspectives on the making of intelligent tools for creativity\ + \ and for experts in the loop. We demonstrate how these principles have been applied\ + \ to inform the design of the interaction between expert live audio mixing engineers\ + \ with the Channel-AI (i.e. a corpus of AI features embedded in the Midas HD Console.\ + \ We report the findings from a preliminary evaluation we conducted with three\ + \ professional mixing engineers and reflect on mixing engineers’ comments about\ + \ the Channel-AI on social media.},\n address = {Birmingham, UK},\n author = {Tsiros,\ + \ Augoustinos and Palladini, Alessandro},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813436},\n\ + \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ + \ = {July},\n pages = {399--404},\n publisher = {Birmingham City University},\n\ + \ title = {Towards a Human-Centric Design Framework for AI Assisted Music Production},\n\ + \ url = {https://www.nime.org/proceedings/2020/nime2020_paper78.pdf},\n year =\ + \ {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177833 + doi: 10.5281/zenodo.4813436 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'audio, control surfaces, mixing board, multitouch, sound, theatre, touch-screen, - user-centered design' - pages: 51--56 - title: 'Cuebert : A New Mixing Board Concept for Musical Theatre' - url: http://www.nime.org/proceedings/2010/nime2010_051.pdf - year: 2010 + month: July + pages: 399--404 + publisher: Birmingham City University + title: Towards a Human-Centric Design Framework for AI Assisted Music Production + url: https://www.nime.org/proceedings/2020/nime2020_paper78.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Roberts2010 - abstract: 'We present the Device Server, a framework and application driving interaction - in the AlloSphere virtual reality environment. The motivation and development - of the Device Server stems from the practical concerns of managing multi-user - interactivity with a variety of physical devices for disparate performance and - virtual reality environments housed in the same physical location. The interface - of the Device Server allows users to see how devices are assigned to application - functionalities, alter these assignments and save them into configuration files - for later use. Configurations defining how applications use devices can be changed - on the fly without recompiling or relaunching applications. Multiple applications - can be connected to the Device Server concurrently. The Device Server provides - several conveniences for performance environments. It can process control data - efficiently using Just-In-Time compiled Lua expressions; in doing so it frees - processing cycles on audio and video rendering computers. All control signals - entering the Device Server can be recorded, saved, and played back allowing performances - based on control data to be recreated in their entirety. The Device Server attempts - to homogenize the appearance of different control signals to applications so that - users can assign any interface element they choose to application functionalities - and easily experiment with different control configurations.' - address: 'Sydney, Australia' - author: 'Roberts, Charles and Wright, Matthew and Kuchera-Morin, JoAnn and Putnam, - Lance' - bibtex: "@inproceedings{Roberts2010,\n abstract = {We present the Device Server,\ - \ a framework and application driving interaction in the AlloSphere virtual reality\ - \ environment. The motivation and development of the Device Server stems from\ - \ the practical concerns of managing multi-user interactivity with a variety of\ - \ physical devices for disparate performance and virtual reality environments\ - \ housed in the same physical location. The interface of the Device Server allows\ - \ users to see how devices are assigned to application functionalities, alter\ - \ these assignments and save them into configuration files for later use. Configurations\ - \ defining how applications use devices can be changed on the fly without recompiling\ - \ or relaunching applications. Multiple applications can be connected to the Device\ - \ Server concurrently. The Device Server provides several conveniences for performance\ - \ environments. It can process control data efficiently using Just-In-Time compiled\ - \ Lua expressions; in doing so it frees processing cycles on audio and video rendering\ - \ computers. All control signals entering the Device Server can be recorded, saved,\ - \ and played back allowing performances based on control data to be recreated\ - \ in their entirety. The Device Server attempts to homogenize the appearance of\ - \ different control signals to applications so that users can assign any interface\ - \ element they choose to application functionalities and easily experiment with\ - \ different control configurations.},\n address = {Sydney, Australia},\n author\ - \ = {Roberts, Charles and Wright, Matthew and Kuchera-Morin, JoAnn and Putnam,\ - \ Lance},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1177883},\n issn = {2220-4806},\n\ - \ keywords = {AlloSphere, mapping, performance, HCI, interactivity, Virtual Reality,\ - \ OSC, multi-user, network},\n pages = {57--62},\n title = {Dynamic Interactivity\ - \ Inside the AlloSphere},\n url = {http://www.nime.org/proceedings/2010/nime2010_057.pdf},\n\ - \ year = {2010}\n}\n" + ID: NIME20_79 + abstract: 'Understanding the question of what makes a good musical instrument raises + several conceptual challenges. Researchers have regularly adopted tools from traditional + HCI as a framework to address this issue, in which instrumental musical activities + are taken to comprise a device and a user, and should be evaluated as such. We + argue that this approach is not equipped to fully address the conceptual issues + raised by this question. It is worth reflecting on what exactly an instrument + is, and how instruments contribute toward meaningful musical experiences. Based + on a theoretical framework that incorporates ideas from ecological psychology, + enactivism, and phenomenology, we propose an alternative approach to studying + musical instruments. According to this approach, instruments are better understood + in terms of processes rather than as devices, while musicians are not users, but + rather agents in musical ecologies. A consequence of this reframing is that any + evaluations of instruments, if warranted, should align with the specificities + of the relevant processes and ecologies concerned. We present an outline of this + argument and conclude with a description of a current research project to illustrate + how our approach can shape the design and performance of a musical instrument + in-progress.' + address: 'Birmingham, UK' + author: 'Rodger, Matthew and Stapleton, Paul and van Walstijn, Maarten and Ortiz, + Miguel and Pardue, Laurel S' + bibtex: "@inproceedings{NIME20_79,\n abstract = {Understanding the question of what\ + \ makes a good musical instrument raises several conceptual challenges. Researchers\ + \ have regularly adopted tools from traditional HCI as a framework to address\ + \ this issue, in which instrumental musical activities are taken to comprise a\ + \ device and a user, and should be evaluated as such. We argue that this approach\ + \ is not equipped to fully address the conceptual issues raised by this question.\ + \ It is worth reflecting on what exactly an instrument is, and how instruments\ + \ contribute toward meaningful musical experiences. Based on a theoretical framework\ + \ that incorporates ideas from ecological psychology, enactivism, and phenomenology,\ + \ we propose an alternative approach to studying musical instruments. According\ + \ to this approach, instruments are better understood in terms of processes rather\ + \ than as devices, while musicians are not users, but rather agents in musical\ + \ ecologies. A consequence of this reframing is that any evaluations of instruments,\ + \ if warranted, should align with the specificities of the relevant processes\ + \ and ecologies concerned. We present an outline of this argument and conclude\ + \ with a description of a current research project to illustrate how our approach\ + \ can shape the design and performance of a musical instrument in-progress.},\n\ + \ address = {Birmingham, UK},\n author = {Rodger, Matthew and Stapleton, Paul\ + \ and van Walstijn, Maarten and Ortiz, Miguel and Pardue, Laurel S},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.4813438},\n editor = {Romain Michon and\ + \ Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages = {405--410},\n\ + \ presentation-video = {https://youtu.be/ADLo-QdSwBc},\n publisher = {Birmingham\ + \ City University},\n title = {What Makes a Good Musical Instrument? A Matter\ + \ of Processes, Ecologies and Specificities },\n url = {https://www.nime.org/proceedings/2020/nime2020_paper79.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177883 + doi: 10.5281/zenodo.4813438 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'AlloSphere, mapping, performance, HCI, interactivity, Virtual Reality, - OSC, multi-user, network' - pages: 57--62 - title: Dynamic Interactivity Inside the AlloSphere - url: http://www.nime.org/proceedings/2010/nime2010_057.pdf - year: 2010 + month: July + pages: 405--410 + presentation-video: https://youtu.be/ADLo-QdSwBc + publisher: Birmingham City University + title: 'What Makes a Good Musical Instrument? A Matter of Processes, Ecologies and + Specificities ' + url: https://www.nime.org/proceedings/2020/nime2020_paper79.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Alt2010 - abstract: 'Writing text messages (e.g. email, SMS, instant messaging) is a popular - form of synchronous and asynchronous communication. However, when it comes to - notifying users about new messages, current audio-based approaches, such as notification - tones, are very limited in conveying information. In this paper we show how entire - text messages can be encoded into a meaningful and euphonic melody in such a way - that users can guess a message’s intention without actually seeing the content. - First, as a proof of concept, we report on the findings of an initial on-line - survey among 37 musicians and 32 non-musicians evaluating the feasibility and - validity of our approach. We show that our representation is understandable and - that there are no significant differences between musicians and non-musicians. - Second, we evaluated the approach in a real world scenario based on a Skype plug-in. - In a field study with 14 participants we showed that sonified text messages strongly - impact on the users’ message checking behavior by significantly reducing the time - between receiving and reading an incoming message.' - address: 'Sydney, Australia' - author: 'Alt, Florian and Shirazi, Alireza S. and Legien, Stefan and Schmidt, Albrecht - and Mennenöh, Julian' - bibtex: "@inproceedings{Alt2010,\n abstract = {Writing text messages (e.g. email,\ - \ SMS, instant messaging) is a popular form of synchronous and asynchronous communication.\ - \ However, when it comes to notifying users about new messages, current audio-based\ - \ approaches, such as notification tones, are very limited in conveying information.\ - \ In this paper we show how entire text messages can be encoded into a meaningful\ - \ and euphonic melody in such a way that users can guess a message’s intention\ - \ without actually seeing the content. First, as a proof of concept, we report\ - \ on the findings of an initial on-line survey among 37 musicians and 32 non-musicians\ - \ evaluating the feasibility and validity of our approach. We show that our representation\ - \ is understandable and that there are no significant differences between musicians\ - \ and non-musicians. Second, we evaluated the approach in a real world scenario\ - \ based on a Skype plug-in. In a field study with 14 participants we showed that\ - \ sonified text messages strongly impact on the users’ message checking behavior\ - \ by significantly reducing the time between receiving and reading an incoming\ - \ message.},\n address = {Sydney, Australia},\n author = {Alt, Florian and Shirazi,\ - \ Alireza S. and Legien, Stefan and Schmidt, Albrecht and Mennen\\''{o}h, Julian},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177713},\n issn = {2220-4806},\n\ - \ keywords = {instant messaging, sms, sonority, text sonification},\n pages =\ - \ {63--68},\n title = {Creating Meaningful Melodies from Text Messages},\n url\ - \ = {http://www.nime.org/proceedings/2010/nime2010_063.pdf},\n year = {2010}\n\ - }\n" + ID: NIME20_8 + abstract: 'We describe a sonic artwork, "Listening To Listening", that has been + designed to accompany a real-world sculpture with two prototype interaction schemes. + Our artwork is created for the HoloLens platform so that users can have an individual + experience in a mixed reality context. Personal AR systems have recently become + available and practical for integration into public art projects, however research + into sonic sculpture works has yet to account for the affordances of current portable + and mainstream AR systems. In this work, we take advantage of the HoloLens'' spatial + awareness to build sonic spaces that have a precise spatial relationship to a + given sculpture and where the sculpture itself is modelled in the augmented scene + as an "invisible hologram". We describe the artistic rationale for our artwork, + the design of the two interaction schemes, and the technical and usability feedback + that we have obtained from demonstrations during iterative development. This work + appears to be the first time that head-mounted AR has been used to build an interactive + sonic landscape to engage with a public sculpture.' + address: 'Birmingham, UK' + author: 'Martin, Charles Patrick and Liu, Zeruo and Wang, Yichen and He, Wennan + and Gardner, Henry' + bibtex: "@inproceedings{NIME20_8,\n abstract = {We describe a sonic artwork, \"\ + Listening To Listening\", that has been designed to accompany a real-world sculpture\ + \ with two prototype interaction schemes. Our artwork is created for the HoloLens\ + \ platform so that users can have an individual experience in a mixed reality\ + \ context. Personal AR systems have recently become available and practical for\ + \ integration into public art projects, however research into sonic sculpture\ + \ works has yet to account for the affordances of current portable and mainstream\ + \ AR systems. In this work, we take advantage of the HoloLens' spatial awareness\ + \ to build sonic spaces that have a precise spatial relationship to a given sculpture\ + \ and where the sculpture itself is modelled in the augmented scene as an \"invisible\ + \ hologram\". We describe the artistic rationale for our artwork, the design of\ + \ the two interaction schemes, and the technical and usability feedback that we\ + \ have obtained from demonstrations during iterative development. This work appears\ + \ to be the first time that head-mounted AR has been used to build an interactive\ + \ sonic landscape to engage with a public sculpture.},\n address = {Birmingham,\ + \ UK},\n author = {Martin, Charles Patrick and Liu, Zeruo and Wang, Yichen and\ + \ He, Wennan and Gardner, Henry},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813445},\n\ + \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ + \ = {July},\n pages = {39--42},\n presentation-video = {https://youtu.be/RlTWXnFOLN8},\n\ + \ publisher = {Birmingham City University},\n title = {Sonic Sculpture: Activating\ + \ Engagement with Head-Mounted Augmented Reality},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper8.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177713 + doi: 10.5281/zenodo.4813445 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'instant messaging, sms, sonority, text sonification' - pages: 63--68 - title: Creating Meaningful Melodies from Text Messages - url: http://www.nime.org/proceedings/2010/nime2010_063.pdf - year: 2010 + month: July + pages: 39--42 + presentation-video: https://youtu.be/RlTWXnFOLN8 + publisher: Birmingham City University + title: 'Sonic Sculpture: Activating Engagement with Head-Mounted Augmented Reality' + url: https://www.nime.org/proceedings/2020/nime2020_paper8.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Humphrey2010 - abstract: 'This paper articulates an interest in a kind of interactive musical instrument - and artwork that defines the mechanisms for instrumental interactivity from the - iconic morphologies of ready-mades, casting historical utilitarian objects as - the basis for performed musical experiences by spectators. The interactive repertoires - are therefore partially pre-determined through enculturated behaviors that are - associated with particular objects, but more importantly, inextricably linked - to the thematic and meaningful assemblage of the work itself. Our new work epi-thet - gathers data from individual interactions with common microscopes placed on platforms - within a large space. This data is correlated with public domain genetic datasets - obtained from micro-array analysis. A sonification algorithm generates unique - compositions associated with the spectator "as measured" through their individual - specification in performing an iconic measurement action. The apparatus is a receptacle - for unique compositions in sound, and invites a participatory choreography of - stillness that is available for reception as a live musical performance. ' - address: 'Sydney, Australia' - author: 'Humphrey, Tim and Flynn, Madeleine and Stevens, Jesse' - bibtex: "@inproceedings{Humphrey2010,\n abstract = {This paper articulates an interest\ - \ in a kind of interactive musical instrument and artwork that defines the mechanisms\ - \ for instrumental interactivity from the iconic morphologies of {ready-mades},\ - \ casting historical utilitarian objects as the basis for performed musical experiences\ - \ by spectators. The interactive repertoires are therefore partially pre-determined\ - \ through enculturated behaviors that are associated with particular objects,\ - \ but more importantly, inextricably linked to the thematic and meaningful assemblage\ - \ of the work itself. Our new work epi-thet gathers data from individual interactions\ - \ with common microscopes placed on platforms within a large space. This data\ - \ is correlated with public domain genetic datasets obtained from micro-array\ - \ analysis. A sonification algorithm generates unique compositions associated\ - \ with the spectator \"as measured\" through their individual specification in\ - \ performing an iconic measurement action. The apparatus is a receptacle for unique\ - \ compositions in sound, and invites a participatory choreography of stillness\ - \ that is available for reception as a live musical performance. },\n address\ - \ = {Sydney, Australia},\n author = {Humphrey, Tim and Flynn, Madeleine and Stevens,\ - \ Jesse},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1177811},\n issn = {2220-4806},\n\ - \ keywords = {Sonification installation spectator-choreography micro-array ready-mades\ - \ morphology stillness},\n pages = {69--71},\n title = {Epi-thet : A Musical Performance\ - \ Installation and a Choreography of Stillness},\n url = {http://www.nime.org/proceedings/2010/nime2010_069.pdf},\n\ - \ year = {2010}\n}\n" + ID: NIME20_80 + abstract: 'Augmented instruments have been a widely explored research topic since + the late 80s. The possibility to use sensors for providing an input for sound + processing/synthesis units let composers and sound artist open up new ways for + experimentation. Augmented Reality, by rendering virtual objects in the real world + and by making those objects interactive (via some sensor-generated input), provides + a new frame for this research field. In fact, the 3D visual feedback, delivering + a precise indication of the spatial configuration/function of each virtual interface, + can make the instrumental augmentation process more intuitive for the interpreter + and more resourceful for a composer/creator: interfaces can change their behavior + over time, can be reshaped, activated or deactivated. Each of these modifications + can be made obvious to the performer by using strategies of visual feedback. In + addition, it is possible to accurately sample space and to map it with differentiated + functions. Augmenting interfaces can also be considered a visual expressive tool + for the audience and designed accordingly: the performer’s point of view (or another + point of view provided by an external camera) can be mirrored to a projector. + This article will show some example of different designs of AR piano augmentation + from the composition Studi sulla realtà nuova.' + address: 'Birmingham, UK' + author: 'Santini, Giovanni ' + bibtex: "@inproceedings{NIME20_80,\n abstract = {Augmented instruments have been\ + \ a widely explored research topic since the late 80s. The possibility to use\ + \ sensors for providing an input for sound processing/synthesis units let composers\ + \ and sound artist open up new ways for experimentation. Augmented Reality, by\ + \ rendering virtual objects in the real world and by making those objects interactive\ + \ (via some sensor-generated input), provides a new frame for this research field.\ + \ In fact, the 3D visual feedback, delivering a precise indication of the spatial\ + \ configuration/function of each virtual interface, can make the instrumental\ + \ augmentation process more intuitive for the interpreter and more resourceful\ + \ for a composer/creator: interfaces can change their behavior over time, can\ + \ be reshaped, activated or deactivated. Each of these modifications can be made\ + \ obvious to the performer by using strategies of visual feedback. In addition,\ + \ it is possible to accurately sample space and to map it with differentiated\ + \ functions. Augmenting interfaces can also be considered a visual expressive\ + \ tool for the audience and designed accordingly: the performer’s point of view\ + \ (or another point of view provided by an external camera) can be mirrored to\ + \ a projector. This article will show some example of different designs of AR\ + \ piano augmentation from the composition Studi sulla realtà nuova.},\n address\ + \ = {Birmingham, UK},\n author = {Santini, Giovanni },\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.4813449},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {411--415},\n presentation-video\ + \ = {https://youtu.be/3HBWvKj2cqc},\n publisher = {Birmingham City University},\n\ + \ title = {Augmented Piano in Augmented Reality},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper80.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177811 + doi: 10.5281/zenodo.4813449 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: Sonification installation spectator-choreography micro-array ready-mades - morphology stillness - pages: 69--71 - title: 'Epi-thet : A Musical Performance Installation and a Choreography of Stillness' - url: http://www.nime.org/proceedings/2010/nime2010_069.pdf - year: 2010 + month: July + pages: 411--415 + presentation-video: https://youtu.be/3HBWvKj2cqc + publisher: Birmingham City University + title: Augmented Piano in Augmented Reality + url: https://www.nime.org/proceedings/2020/nime2020_paper80.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Hahnel2010 - abstract: 'The propriety of articulation, especially of notes that lackannotations, - is influenced by the origin of the particularmusic. This paper presents a rule - system for articulationderived from late Baroque and early Classic treatises on - performance. Expressive articulation, in this respect, is understood as a combination - of alterable tone features like duration, loudness, and timbre. The model differentiates - globalcharacteristics and local particularities, provides a generalframework for - human-like music performances, and, therefore, serves as a basis for further and - more complex rulesystems.' - address: 'Sydney, Australia' - author: 'Hähnel, Tilo' - bibtex: "@inproceedings{Hahnel2010,\n abstract = {The propriety of articulation,\ - \ especially of notes that lackannotations, is influenced by the origin of the\ - \ particularmusic. This paper presents a rule system for articulationderived from\ - \ late Baroque and early Classic treatises on performance. Expressive articulation,\ - \ in this respect, is understood as a combination of alterable tone features like\ - \ duration, loudness, and timbre. The model differentiates globalcharacteristics\ - \ and local particularities, provides a generalframework for human-like music\ - \ performances, and, therefore, serves as a basis for further and more complex\ - \ rulesystems.},\n address = {Sydney, Australia},\n author = {H\\''{a}hnel, Tilo},\n\ + ID: NIME20_81 + abstract: 'Whilst there is a large body of NIME papers that concentrate on the presentation + of new technologies there are fewer papers that have focused on a longitudinal + understanding of NIMEs in practice. This paper embodies the more recent acknowledgement + of the importance of practice-based methods of evaluation [1,2,3,4] concerning + the use of NIMEs within performance and the recognition that it is only within + the situation of practice that the context is available to actually interpret + and evaluate the instrument [2]. Within this context this paper revisits the Feral + Cello performance system that was first presented at NIME 2017 [5]. This paper + explores what has been learned through the artistic practice of performing and + workshopping in this context by drawing heavily on the experiences of the performer/composer + who has become an integral part of this project and co-author of this paper. The + original philosophical context is also revisited and reflections are made on the + tensions between this position and the need to ‘get something to work’. The authors + feel the presentation of the semi-structured interview within the paper is the + best method of staying truthful to Hayes understanding of musical improvisation + as an enactive framework ‘in its ability to demonstrate the importance of participatory, + relational, emergent, and embodied musical activities and processes’ [4].' + address: 'Birmingham, UK' + author: 'Davis, Tom and Reid, Laura' + bibtex: "@inproceedings{NIME20_81,\n abstract = {Whilst there is a large body of\ + \ NIME papers that concentrate on the presentation of new technologies there are\ + \ fewer papers that have focused on a longitudinal understanding of NIMEs in practice.\ + \ This paper embodies the more recent acknowledgement of the importance of practice-based\ + \ methods of evaluation [1,2,3,4] concerning the use of NIMEs within performance\ + \ and the recognition that it is only within the situation of practice that the\ + \ context is available to actually interpret and evaluate the instrument [2].\ + \ Within this context this paper revisits the Feral Cello performance system that\ + \ was first presented at NIME 2017 [5]. This paper explores what has been learned\ + \ through the artistic practice of performing and workshopping in this context\ + \ by drawing heavily on the experiences of the performer/composer who has become\ + \ an integral part of this project and co-author of this paper. The original philosophical\ + \ context is also revisited and reflections are made on the tensions between this\ + \ position and the need to ‘get something to work’. The authors feel the presentation\ + \ of the semi-structured interview within the paper is the best method of staying\ + \ truthful to Hayes understanding of musical improvisation as an enactive framework\ + \ ‘in its ability to demonstrate the importance of participatory, relational,\ + \ emergent, and embodied musical activities and processes’ [4].},\n address =\ + \ {Birmingham, UK},\n author = {Davis, Tom and Reid, Laura},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.4813453},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {416--421},\n presentation-video\ + \ = {https://youtu.be/9npR0T6YGiA},\n publisher = {Birmingham City University},\n\ + \ title = {Taking Back Control: Taming the Feral Cello},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper81.pdf},\n\ + \ year = {2020}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.4813453 + editor: Romain Michon and Franziska Schroeder + issn: 2220-4806 + month: July + pages: 416--421 + presentation-video: https://youtu.be/9npR0T6YGiA + publisher: Birmingham City University + title: 'Taking Back Control: Taming the Feral Cello' + url: https://www.nime.org/proceedings/2020/nime2020_paper81.pdf + year: 2020 + + +- ENTRYTYPE: inproceedings + ID: NIME20_82 + abstract: 'Becoming a practical musician traditionally requires an extensive amount + of preparatory work to master the technical and theoretical challenges of the + particular instrument and musical style before being able to devote oneself to + musical expression. In particular, in jazz improvisation, one of the major barriers + is the mastery and appropriate selection of scales from a wide range, according + to harmonic context and style. In this paper, we present AutoScale, an interactive + software for making jazz improvisation more accessible by lifting the burden of + scale selection from the musician while still allowing full controllability if + desired. This is realized by implementing a MIDI effect that dynamically maps + the desired scales onto a standardized layout. Scale selection can be pre-programmed, + automated based on algorithmic lead sheet analysis, or interactively adapted. + We discuss the music-theoretical foundations underlying our approach, the design + choices taken for building an intuitive user interface, and provide implementations + as VST plugin and web applications for use with a Launchpad or traditional MIDI + keyboard.' + address: 'Birmingham, UK' + author: 'Jaccard, Thibault and Lieck, Robert and Rohrmeier, Martin' + bibtex: "@inproceedings{NIME20_82,\n abstract = {Becoming a practical musician traditionally\ + \ requires an extensive amount of preparatory work to master the technical and\ + \ theoretical challenges of the particular instrument and musical style before\ + \ being able to devote oneself to musical expression. In particular, in jazz improvisation,\ + \ one of the major barriers is the mastery and appropriate selection of scales\ + \ from a wide range, according to harmonic context and style. In this paper, we\ + \ present AutoScale, an interactive software for making jazz improvisation more\ + \ accessible by lifting the burden of scale selection from the musician while\ + \ still allowing full controllability if desired. This is realized by implementing\ + \ a MIDI effect that dynamically maps the desired scales onto a standardized layout.\ + \ Scale selection can be pre-programmed, automated based on algorithmic lead sheet\ + \ analysis, or interactively adapted. We discuss the music-theoretical foundations\ + \ underlying our approach, the design choices taken for building an intuitive\ + \ user interface, and provide implementations as VST plugin and web applications\ + \ for use with a Launchpad or traditional MIDI keyboard.},\n address = {Birmingham,\ + \ UK},\n author = {Jaccard, Thibault and Lieck, Robert and Rohrmeier, Martin},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177787},\n issn = {2220-4806},\n\ - \ keywords = {Articulation, Historically Informed Performance, Expres- sive Performance,\ - \ Synthetic Performance},\n pages = {72--75},\n title = {From Mozart to {MIDI}\ - \ : A Rule System for Expressive Articulation},\n url = {http://www.nime.org/proceedings/2010/nime2010_072.pdf},\n\ - \ year = {2010}\n}\n" + \ Musical Expression},\n doi = {10.5281/zenodo.4813457},\n editor = {Romain Michon\ + \ and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages =\ + \ {422--427},\n presentation-video = {https://youtu.be/KqGpTTQ9ZrE},\n publisher\ + \ = {Birmingham City University},\n title = {AutoScale: Automatic and Dynamic\ + \ Scale Selection for Live Jazz Improvisation},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper82.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177787 + doi: 10.5281/zenodo.4813457 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Articulation, Historically Informed Performance, Expres- sive Performance, - Synthetic Performance' - pages: 72--75 - title: 'From Mozart to MIDI : A Rule System for Expressive Articulation' - url: http://www.nime.org/proceedings/2010/nime2010_072.pdf - year: 2010 + month: July + pages: 422--427 + presentation-video: https://youtu.be/KqGpTTQ9ZrE + publisher: Birmingham City University + title: 'AutoScale: Automatic and Dynamic Scale Selection for Live Jazz Improvisation' + url: https://www.nime.org/proceedings/2020/nime2020_paper82.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Essl2010 - abstract: 'We discuss how the environment urMus was designed to allow creation of - mobile musical instruments on multi-touch smartphones. The design of a mobile - musical instrument consists of connecting sensory capabilities to output modalities - through various means of processing. We describe how the default mapping interface - was designed which allows to set up such a pipeline and how visual and interactive - multi-touch UIs for musical instruments can be designed within the system. ' - address: 'Sydney, Australia' - author: 'Essl, Georg and Müller, Alexander' - bibtex: "@inproceedings{Essl2010,\n abstract = {We discuss how the environment urMus\ - \ was designed to allow creation of mobile musical instruments on multi-touch\ - \ smartphones. The design of a mobile musical instrument consists of connecting\ - \ sensory capabilities to output modalities through various means of processing.\ - \ We describe how the default mapping interface was designed which allows to set\ - \ up such a pipeline and how visual and interactive multi-touch UIs for musical\ - \ instruments can be designed within the system. },\n address = {Sydney, Australia},\n\ - \ author = {Essl, Georg and M\\''{u}ller, Alexander},\n booktitle = {Proceedings\ + ID: NIME20_83 + abstract: 'Nearly two decades after its inception as a workshop at the ACM Conference + on Human Factors in Computing Systems, NIME exists as an established international + conference significantly distinct from its precursor. While this origin story + is often noted, the implications of NIME''s history as emerging from a field predominantly + dealing with human-computer interaction have rarely been discussed. In this paper + we highlight many of the recent—and some not so recent—challenges that have been + brought upon the NIME community as it attempts to maintain and expand its identity + as a platform for multidisciplinary research into HCI, interface design, and electronic + and computer music. We discuss the relationship between the market demands of + the neoliberal university—which have underpinned academia''s drive for innovation—and + the quantification and economisation of research performance which have facilitated + certain disciplinary and social frictions to emerge within NIME-related research + and practice. Drawing on work that engages with feminist theory and cultural studies, + we suggest that critical reflection and moreover mediation is necessary in order + to address burgeoning concerns which have been raised within the NIME discourse + in relation to methodological approaches,''diversity and inclusion'', ''accessibility'', + and the fostering of rigorous interdisciplinary research.' + address: 'Birmingham, UK' + author: 'Hayes, Lauren and Marquez-Borbon, Adnan' + bibtex: "@inproceedings{NIME20_83,\n abstract = {Nearly two decades after its inception\ + \ as a workshop at the ACM Conference on Human Factors in Computing Systems, NIME\ + \ exists as an established international conference significantly distinct from\ + \ its precursor. While this origin story is often noted, the implications of NIME's\ + \ history as emerging from a field predominantly dealing with human-computer interaction\ + \ have rarely been discussed. In this paper we highlight many of the recent—and\ + \ some not so recent—challenges that have been brought upon the NIME community\ + \ as it attempts to maintain and expand its identity as a platform for multidisciplinary\ + \ research into HCI, interface design, and electronic and computer music. We discuss\ + \ the relationship between the market demands of the neoliberal university—which\ + \ have underpinned academia's drive for innovation—and the quantification and\ + \ economisation of research performance which have facilitated certain disciplinary\ + \ and social frictions to emerge within NIME-related research and practice. Drawing\ + \ on work that engages with feminist theory and cultural studies, we suggest that\ + \ critical reflection and moreover mediation is necessary in order to address\ + \ burgeoning concerns which have been raised within the NIME discourse in relation\ + \ to methodological approaches,'diversity and inclusion', 'accessibility', and\ + \ the fostering of rigorous interdisciplinary research.},\n address = {Birmingham,\ + \ UK},\n author = {Hayes, Lauren and Marquez-Borbon, Adnan},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177759},\n issn = {2220-4806},\n keywords = {Mobile music\ - \ making, meta-environment, design, mapping, user interface},\n pages = {76--81},\n\ - \ title = {Designing Mobile Musical Instruments and Environments with urMus},\n\ - \ url = {http://www.nime.org/proceedings/2010/nime2010_076.pdf},\n year = {2010}\n\ - }\n" + \ doi = {10.5281/zenodo.4813459},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {428--433},\n presentation-video\ + \ = {https://youtu.be/4UERHlFUQzo},\n publisher = {Birmingham City University},\n\ + \ title = {Nuanced and Interrelated Mediations and Exigencies (NIME): Addressing\ + \ the Prevailing Political and Epistemological Crises},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper83.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177759 + doi: 10.5281/zenodo.4813459 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Mobile music making, meta-environment, design, mapping, user interface' - pages: 76--81 - title: Designing Mobile Musical Instruments and Environments with urMus - url: http://www.nime.org/proceedings/2010/nime2010_076.pdf - year: 2010 + month: July + pages: 428--433 + presentation-video: https://youtu.be/4UERHlFUQzo + publisher: Birmingham City University + title: 'Nuanced and Interrelated Mediations and Exigencies (NIME): Addressing the + Prevailing Political and Epistemological Crises' + url: https://www.nime.org/proceedings/2020/nime2020_paper83.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Oh2010 - abstract: 'In this paper, we describe the development of the Stanford Mobile Phone - Orchestra (MoPhO) since its inceptionin 2007. As a newly structured ensemble of - musicians withiPhones and wearable speakers, MoPhO takes advantageof the ubiquity - and mobility of smartphones as well asthe unique interaction techniques offered - by such devices.MoPhO offers a new platform for research, instrument design, composition, - and performance that can be juxtaposedto that of a laptop orchestra. We trace - the origins of MoPhO,describe the motivations behind the current hardware andsoftware - design in relation to the backdrop of current trendsin mobile music making, detail - key interaction conceptsaround new repertoire, and conclude with an analysis onthe - development of MoPhO thus far.' - address: 'Sydney, Australia' - author: 'Oh, Jieun and Herrera, Jorge and Bryan, Nicholas J. and Dahl, Luke and - Wang, Ge' - bibtex: "@inproceedings{Oh2010,\n abstract = {In this paper, we describe the development\ - \ of the Stanford Mobile Phone Orchestra (MoPhO) since its inceptionin 2007. As\ - \ a newly structured ensemble of musicians withiPhones and wearable speakers,\ - \ MoPhO takes advantageof the ubiquity and mobility of smartphones as well asthe\ - \ unique interaction techniques offered by such devices.MoPhO offers a new platform\ - \ for research, instrument design, composition, and performance that can be juxtaposedto\ - \ that of a laptop orchestra. We trace the origins of MoPhO,describe the motivations\ - \ behind the current hardware andsoftware design in relation to the backdrop of\ - \ current trendsin mobile music making, detail key interaction conceptsaround\ - \ new repertoire, and conclude with an analysis onthe development of MoPhO thus\ - \ far.},\n address = {Sydney, Australia},\n author = {Oh, Jieun and Herrera, Jorge\ - \ and Bryan, Nicholas J. and Dahl, Luke and Wang, Ge},\n booktitle = {Proceedings\ + ID: NIME20_84 + abstract: 'Digital musical instrument design is often presented as an open-ended + creative process in which technology is adopted and adapted to serve the musical + will of the designer. The real-time music programming languages powering many + new instruments often provide access to audio manipulation at a low level, theoretically + allowing the creation of any sonic structure from primitive operations. As a result, + designers may assume that these seemingly omnipotent tools are pliable vehicles + for the expression of musical ideas. We present the outcomes of a compositional + game in which sound designers were invited to create simple instruments using + common sensors and the Pure Data programming language. We report on the patterns + and structures that often emerged during the exercise, arguing that designers + respond strongly to suggestions offered by the tools they use. We discuss the + idea that current music programming languages may be as culturally loaded as the + communities of practice that produce and use them. Instrument making is then best + viewed as a protracted negotiation between designer and tools.' + address: 'Birmingham, UK' + author: 'McPherson, Andrew and Lepri, Giacomo' + bibtex: "@inproceedings{NIME20_84,\n abstract = {Digital musical instrument design\ + \ is often presented as an open-ended creative process in which technology is\ + \ adopted and adapted to serve the musical will of the designer. The real-time\ + \ music programming languages powering many new instruments often provide access\ + \ to audio manipulation at a low level, theoretically allowing the creation of\ + \ any sonic structure from primitive operations. As a result, designers may assume\ + \ that these seemingly omnipotent tools are pliable vehicles for the expression\ + \ of musical ideas. We present the outcomes of a compositional game in which sound\ + \ designers were invited to create simple instruments using common sensors and\ + \ the Pure Data programming language. We report on the patterns and structures\ + \ that often emerged during the exercise, arguing that designers respond strongly\ + \ to suggestions offered by the tools they use. We discuss the idea that current\ + \ music programming languages may be as culturally loaded as the communities of\ + \ practice that produce and use them. Instrument making is then best viewed as\ + \ a protracted negotiation between designer and tools.},\n address = {Birmingham,\ + \ UK},\n author = {McPherson, Andrew and Lepri, Giacomo},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177871},\n issn = {2220-4806},\n keywords = {mobile phone\ - \ orchestra, live performance, iPhone, mobile music},\n pages = {82--87},\n title\ - \ = {Evolving The Mobile Phone Orchestra},\n url = {http://www.nime.org/proceedings/2010/nime2010_082.pdf},\n\ - \ year = {2010}\n}\n" + \ doi = {10.5281/zenodo.4813461},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {434--439},\n presentation-video\ + \ = {https://youtu.be/-nRtaucPKx4},\n publisher = {Birmingham City University},\n\ + \ title = {Beholden to our tools: negotiating with technology while sketching\ + \ digital instruments},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper84.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177871 + doi: 10.5281/zenodo.4813461 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'mobile phone orchestra, live performance, iPhone, mobile music' - pages: 82--87 - title: Evolving The Mobile Phone Orchestra - url: http://www.nime.org/proceedings/2010/nime2010_082.pdf - year: 2010 + month: July + pages: 434--439 + presentation-video: https://youtu.be/-nRtaucPKx4 + publisher: Birmingham City University + title: 'Beholden to our tools: negotiating with technology while sketching digital + instruments' + url: https://www.nime.org/proceedings/2020/nime2020_paper84.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Tanaka2010 - abstract: This paper reviews and extends questions of the scope of an interactive - musical instrument and mapping strategies for expressive performance. We apply - notions of embodiment and affordance to characterize gestural instruments. We - note that the democratization of sensor technology in consumer devices has extended - the cultural contexts for interaction. We revisit questions of mapping drawing - upon the theory of affordances to consider mapping and instrument together. This - is applied to recent work by the author and his collaborators in the development - of instruments based on mobile devices designed for specific performance situations. - address: 'Sydney, Australia' - author: 'Tanaka, Atau' - bibtex: "@inproceedings{Tanaka2010,\n abstract = {This paper reviews and extends\ - \ questions of the scope of an interactive musical instrument and mapping strategies\ - \ for expressive performance. We apply notions of embodiment and affordance to\ - \ characterize gestural instruments. We note that the democratization of sensor\ - \ technology in consumer devices has extended the cultural contexts for interaction.\ - \ We revisit questions of mapping drawing upon the theory of affordances to consider\ - \ mapping and instrument together. This is applied to recent work by the author\ - \ and his collaborators in the development of instruments based on mobile devices\ - \ designed for specific performance situations.},\n address = {Sydney, Australia},\n\ - \ author = {Tanaka, Atau},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177903},\n\ - \ issn = {2220-4806},\n keywords = {Musical affordance, NIME, mapping, instrument\ - \ definition, mobile, multimodal interaction.},\n pages = {88--93},\n title =\ - \ {Mapping Out Instruments, Affordances, and Mobiles},\n url = {http://www.nime.org/proceedings/2010/nime2010_088.pdf},\n\ - \ year = {2010}\n}\n" + ID: NIME20_85 + abstract: 'Percussive fingerstyle is a playing technique adopted by many contemporary + acoustic guitarists, and it has grown substantially in popularity over the last + decade. Its foundations lie in the use of the guitar''s body for percussive lines, + and in the extended range given by the novel use of altered tunings. There are + very few formal accounts of percussive fingerstyle, therefore, we devised an interview + study to investigate its approach to composition, performance and musical experimentation. + Our aim was to gain insight into the technique from a gesture-based point of view, + observe whether modern fingerstyle shares similarities to the approaches in NIME + practice and investigate possible avenues for guitar augmentations inspired by + the percussive technique. We conducted an inductive thematic analysis on the transcribed + interviews: our findings highlight the participants'' material-based approach + to musical interaction and we present a three-zone model of the most common percussive + gestures on the guitar''s body. Furthermore, we examine current trends in Digital + Musical Instruments, especially in guitar augmentation, and we discuss possible + future directions in augmented guitars in light of the interviewees'' perspectives.' + address: 'Birmingham, UK' + author: 'Martelloni, Andrea and McPherson, Andrew and Barthet, Mathieu' + bibtex: "@inproceedings{NIME20_85,\n abstract = {Percussive fingerstyle is a playing\ + \ technique adopted by many contemporary acoustic guitarists, and it has grown\ + \ substantially in popularity over the last decade. Its foundations lie in the\ + \ use of the guitar's body for percussive lines, and in the extended range given\ + \ by the novel use of altered tunings. There are very few formal accounts of percussive\ + \ fingerstyle, therefore, we devised an interview study to investigate its approach\ + \ to composition, performance and musical experimentation. Our aim was to gain\ + \ insight into the technique from a gesture-based point of view, observe whether\ + \ modern fingerstyle shares similarities to the approaches in NIME practice and\ + \ investigate possible avenues for guitar augmentations inspired by the percussive\ + \ technique. We conducted an inductive thematic analysis on the transcribed interviews:\ + \ our findings highlight the participants' material-based approach to musical\ + \ interaction and we present a three-zone model of the most common percussive\ + \ gestures on the guitar's body. Furthermore, we examine current trends in Digital\ + \ Musical Instruments, especially in guitar augmentation, and we discuss possible\ + \ future directions in augmented guitars in light of the interviewees' perspectives.},\n\ + \ address = {Birmingham, UK},\n author = {Martelloni, Andrea and McPherson, Andrew\ + \ and Barthet, Mathieu},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813463},\n\ + \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ + \ = {July},\n pages = {440--445},\n presentation-video = {https://youtu.be/ON8ckEBcQ98},\n\ + \ publisher = {Birmingham City University},\n title = {Percussive Fingerstyle\ + \ Guitar through the Lens of NIME: an Interview Study},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper85.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177903 + doi: 10.5281/zenodo.4813463 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Musical affordance, NIME, mapping, instrument definition, mobile, multimodal - interaction.' - pages: 88--93 - title: 'Mapping Out Instruments, Affordances, and Mobiles' - url: http://www.nime.org/proceedings/2010/nime2010_088.pdf - year: 2010 + month: July + pages: 440--445 + presentation-video: https://youtu.be/ON8ckEBcQ98 + publisher: Birmingham City University + title: 'Percussive Fingerstyle Guitar through the Lens of NIME: an Interview Study' + url: https://www.nime.org/proceedings/2020/nime2020_paper85.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Havryliv2010 - abstract: 'This paper describes a novel method for composing andimprovisation with - real-time chaotic oscillators. Recentlydiscovered algebraically simple nonlinear - third-order differential equations are solved and acoustical descriptors relating - to their frequency spectrums are determined accordingto the MPEG-7 specification. - A second nonlinearity is thenadded to these equations: a real-time audio signal. - Descriptive properties of the complex behaviour of these equationsare then determined - as a function of difference tones derived from a Just Intonation scale and the - amplitude ofthe audio signal. By using only the real-time audio signalfrom live - performer/s as an input the causal relationshipbetween acoustic performance gestures - and computer output, including any visual or performer-instruction output,is deterministic - even if the chaotic behaviours are not.' - address: 'Sydney, Australia' - author: 'Havryliv, Mark' - bibtex: "@inproceedings{Havryliv2010,\n abstract = {This paper describes a novel\ - \ method for composing andimprovisation with real-time chaotic oscillators. Recentlydiscovered\ - \ algebraically simple nonlinear third-order differential equations are solved\ - \ and acoustical descriptors relating to their frequency spectrums are determined\ - \ accordingto the MPEG-7 specification. A second nonlinearity is thenadded to\ - \ these equations: a real-time audio signal. Descriptive properties of the complex\ - \ behaviour of these equationsare then determined as a function of difference\ - \ tones derived from a Just Intonation scale and the amplitude ofthe audio signal.\ - \ By using only the real-time audio signalfrom live performer/s as an input the\ - \ causal relationshipbetween acoustic performance gestures and computer output,\ - \ including any visual or performer-instruction output,is deterministic even if\ - \ the chaotic behaviours are not.},\n address = {Sydney, Australia},\n author\ - \ = {Havryliv, Mark},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177795},\n\ - \ issn = {2220-4806},\n keywords = {chaos and music, chaotic dynamics and oscillators,\ - \ differential equations and music, mathematica, audio descriptors and mpeg-7},\n\ - \ pages = {94--99},\n title = {Composing For Improvisation with Chaotic Oscillators},\n\ - \ url = {http://www.nime.org/proceedings/2010/nime2010_094.pdf},\n year = {2010}\n\ - }\n" + ID: NIME20_86 + abstract: 'In the field of human computer interaction (HCI) the limitations of prototypes + as the primary artefact used in research are being realised. Prototypes often + remain open in their design, are partially-finished, and have a focus on a specific + aspect of interaction. Previous authors have proposed `research products'' as + a specific category of artefact distinct from both research prototypes and commercial + products. The characteristics of research products are their holistic completeness + as a design artefact, their situatedness in a specific cultural context, and the + fact that they are evaluated for what they are, not what they will become. This + paper discusses the ways in which many instruments created within the context + of New Interfaces for Musical Expression (NIME), including those that are used + in performances, often fall into the category of prototype. We shall discuss why + research products might be a useful framing for NIME research. Research products + shall be weighed up against some of the main themes of NIME research: technological + innovation; musical expression; instrumentality. We conclude this paper with a + case study of Strummi, a digital musical instrument which we frame as research + product.' + address: 'Birmingham, UK' + author: 'Jack, Robert and Harrison, Jacob and McPherson, Andrew' + bibtex: "@inproceedings{NIME20_86,\n abstract = {In the field of human computer\ + \ interaction (HCI) the limitations of prototypes as the primary artefact used\ + \ in research are being realised. Prototypes often remain open in their design,\ + \ are partially-finished, and have a focus on a specific aspect of interaction.\ + \ Previous authors have proposed `research products' as a specific category of\ + \ artefact distinct from both research prototypes and commercial products. The\ + \ characteristics of research products are their holistic completeness as a design\ + \ artefact, their situatedness in a specific cultural context, and the fact that\ + \ they are evaluated for what they are, not what they will become. This paper\ + \ discusses the ways in which many instruments created within the context of New\ + \ Interfaces for Musical Expression (NIME), including those that are used in performances,\ + \ often fall into the category of prototype. We shall discuss why research products\ + \ might be a useful framing for NIME research. Research products shall be weighed\ + \ up against some of the main themes of NIME research: technological innovation;\ + \ musical expression; instrumentality. We conclude this paper with a case study\ + \ of Strummi, a digital musical instrument which we frame as research product.},\n\ + \ address = {Birmingham, UK},\n author = {Jack, Robert and Harrison, Jacob and\ + \ McPherson, Andrew},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813465},\n\ + \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ + \ = {July},\n pages = {446--451},\n presentation-video = {https://youtu.be/luJwlZBeBqY},\n\ + \ publisher = {Birmingham City University},\n title = {Digital Musical Instruments\ + \ as Research Products},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper86.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177795 + doi: 10.5281/zenodo.4813465 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'chaos and music, chaotic dynamics and oscillators, differential equations - and music, mathematica, audio descriptors and mpeg-7' - pages: 94--99 - title: Composing For Improvisation with Chaotic Oscillators - url: http://www.nime.org/proceedings/2010/nime2010_094.pdf - year: 2010 + month: July + pages: 446--451 + presentation-video: https://youtu.be/luJwlZBeBqY + publisher: Birmingham City University + title: Digital Musical Instruments as Research Products + url: https://www.nime.org/proceedings/2020/nime2020_paper86.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Hawryshkewich2010 - abstract: 'Traditional drum machines and digital drum-kits offer users the ability - to practice or perform with a supporting ensemble – such as a bass, guitar and - piano – but rarely provide support in the form of an accompanying percussion part. - Beatback is a system which develops upon this missing interaction through offering - a MIDI enabled drum system which learns and plays in the user''s style. In the - contexts of rhythmic practise and exploration, Beatback looks at call-response - and accompaniment models of interaction to enable new possibilities for rhythmic - creativity.' - address: 'Sydney, Australia' - author: 'Hawryshkewich, Andrew and Pasquier, Philippe and Eigenfeldt, Arne' - bibtex: "@inproceedings{Hawryshkewich2010,\n abstract = {Traditional drum machines\ - \ and digital drum-kits offer users the ability to practice or perform with a\ - \ supporting ensemble – such as a bass, guitar and piano – but rarely provide\ - \ support in the form of an accompanying percussion part. Beatback is a system\ - \ which develops upon this missing interaction through offering a MIDI enabled\ - \ drum system which learns and plays in the user's style. In the contexts of rhythmic\ - \ practise and exploration, Beatback looks at call-response and accompaniment\ - \ models of interaction to enable new possibilities for rhythmic creativity.},\n\ - \ address = {Sydney, Australia},\n author = {Hawryshkewich, Andrew and Pasquier,\ - \ Philippe and Eigenfeldt, Arne},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177797},\n\ - \ issn = {2220-4806},\n keywords = {Interactive music interface, real-time, percussion,\ - \ machine learning, Markov models, MIDI.},\n pages = {100--105},\n title = {Beatback\ - \ : A Real-time Interactive Percussion System for Rhythmic Practise and Exploration},\n\ - \ url = {http://www.nime.org/proceedings/2010/nime2010_100.pdf},\n year = {2010}\n\ + ID: NIME20_87 + abstract: 'This paper presents a micro-residency in a pop-up shop and collaborative + making amongst a group of researchers and practitioners. The making extends to + sound(-making) objects, instruments, workshop, sound installation, performance + and discourse on DIY electronic music. Our research builds on creative workshopping + and speculative design and is informed by ideas of collective making. The ad hoc + and temporary pop-up space is seen as formative in shaping the outcomes of the + work. Through the lens of curated research, working together with a provocative + brief, we explored handmade objects, craft, non-craft, human error, and the spirit + of DIY, DIYness. We used the Studio Bench - a method that brings making, recording + and performance together in one space - and viewed workshopping and performance + as a holistic event. A range of methodologies were investigated in relation to + NIME. These included the Hardware Mash-up, Speculative Sound Circuits and Reverse + Design, from product to prototype, resulting in the instrument the Radical Nails. + Finally, our work drew on the notion of design as performance and making in public + and further developed our understanding of workshop-installation and performance-installation.' + address: 'Birmingham, UK' + author: 'Patel, Amit D and Richards, John ' + bibtex: "@inproceedings{NIME20_87,\n abstract = {This paper presents a micro-residency\ + \ in a pop-up shop and collaborative making amongst a group of researchers and\ + \ practitioners. The making extends to sound(-making) objects, instruments, workshop,\ + \ sound installation, performance and discourse on DIY electronic music. Our research\ + \ builds on creative workshopping and speculative design and is informed by ideas\ + \ of collective making. The ad hoc and temporary pop-up space is seen as formative\ + \ in shaping the outcomes of the work. Through the lens of curated research, working\ + \ together with a provocative brief, we explored handmade objects, craft, non-craft,\ + \ human error, and the spirit of DIY, DIYness. We used the Studio Bench - a method\ + \ that brings making, recording and performance together in one space - and viewed\ + \ workshopping and performance as a holistic event. A range of methodologies were\ + \ investigated in relation to NIME. These included the Hardware Mash-up, Speculative\ + \ Sound Circuits and Reverse Design, from product to prototype, resulting in the\ + \ instrument the Radical Nails. Finally, our work drew on the notion of design\ + \ as performance and making in public and further developed our understanding\ + \ of workshop-installation and performance-installation.},\n address = {Birmingham,\ + \ UK},\n author = {Patel, Amit D and Richards, John },\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.4813473},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {452--457},\n publisher = {Birmingham\ + \ City University},\n title = {Pop-up for Collaborative Music-making},\n url =\ + \ {https://www.nime.org/proceedings/2020/nime2020_paper87.pdf},\n year = {2020}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177797 + doi: 10.5281/zenodo.4813473 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Interactive music interface, real-time, percussion, machine learning, - Markov models, MIDI.' - pages: 100--105 - title: 'Beatback : A Real-time Interactive Percussion System for Rhythmic Practise - and Exploration' - url: http://www.nime.org/proceedings/2010/nime2010_100.pdf - year: 2010 + month: July + pages: 452--457 + publisher: Birmingham City University + title: Pop-up for Collaborative Music-making + url: https://www.nime.org/proceedings/2020/nime2020_paper87.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Gurevich2010 - abstract: 'A qualitative study to investigate the development of stylein performance - with a highly constrained musical instrument is described. A new one-button instrument - was designed, with which several musicians were each asked topractice and develop - a solo performance. Observations oftrends in attributes of these performances - are detailed in relation to participants'' statements in structured interviews.Participants - were observed to develop stylistic variationsboth within the domain of activities - suggested by the constraint, and by discovering non-obvious techniques througha - variety of strategies. Data suggest that stylistic variationsoccurred in spite - of perceived constraint, but also becauseof perceived constraint. Furthermore, - participants tendedto draw on unique experiences, approaches and perspectivesthat - shaped individual performances.' - address: 'Sydney, Australia' - author: 'Gurevich, Michael and Stapleton, Paul and Marquez-Borbon, Adnan' - bibtex: "@inproceedings{Gurevich2010,\n abstract = {A qualitative study to investigate\ - \ the development of stylein performance with a highly constrained musical instrument\ - \ is described. A new one-button instrument was designed, with which several musicians\ - \ were each asked topractice and develop a solo performance. Observations oftrends\ - \ in attributes of these performances are detailed in relation to participants'\ - \ statements in structured interviews.Participants were observed to develop stylistic\ - \ variationsboth within the domain of activities suggested by the constraint,\ - \ and by discovering non-obvious techniques througha variety of strategies. Data\ - \ suggest that stylistic variationsoccurred in spite of perceived constraint,\ - \ but also becauseof perceived constraint. Furthermore, participants tendedto\ - \ draw on unique experiences, approaches and perspectivesthat shaped individual\ - \ performances.},\n address = {Sydney, Australia},\n author = {Gurevich, Michael\ - \ and Stapleton, Paul and Marquez-Borbon, Adnan},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1177785},\n issn = {2220-4806},\n keywords = {design, interaction,\ - \ performance, persuasive technology},\n pages = {106--111},\n title = {Style\ - \ and Constraint in Electronic Musical Instruments},\n url = {http://www.nime.org/proceedings/2010/nime2010_106.pdf},\n\ - \ year = {2010}\n}\n" + ID: NIME20_88 + abstract: 'This paper introduces a new method for direct control using the voice + via measurement of vocal muscular activation with surface electromyography (sEMG). + Digital musical interfaces based on the voice have typically used indirect control, + in which features extracted from audio signals control the parameters of sound + generation, for example in audio to MIDI controllers. By contrast, focusing on + the musculature of the singing voice allows direct muscular control, or alternatively, + combined direct and indirect control in an augmented vocal instrument. In this + way we aim to both preserve the intimate relationship a vocalist has with their + instrument and key timbral and stylistic characteristics of the voice while expanding + its sonic capabilities. This paper discusses other digital instruments which effectively + utilise a combination of indirect and direct control as well as a history of controllers + involving the voice. Subsequently, a new method of direct control from physiological + aspects of singing through sEMG and its capabilities are discussed. Future developments + of the system are further outlined along with usage in performance studies, interactive + live vocal performance, and educational and practice tools.' + address: 'Birmingham, UK' + author: 'Reed, Courtney and McPherson, Andrew' + bibtex: "@inproceedings{NIME20_88,\n abstract = {This paper introduces a new method\ + \ for direct control using the voice via measurement of vocal muscular activation\ + \ with surface electromyography (sEMG). Digital musical interfaces based on the\ + \ voice have typically used indirect control, in which features extracted from\ + \ audio signals control the parameters of sound generation, for example in audio\ + \ to MIDI controllers. By contrast, focusing on the musculature of the singing\ + \ voice allows direct muscular control, or alternatively, combined direct and\ + \ indirect control in an augmented vocal instrument. In this way we aim to both\ + \ preserve the intimate relationship a vocalist has with their instrument and\ + \ key timbral and stylistic characteristics of the voice while expanding its sonic\ + \ capabilities. This paper discusses other digital instruments which effectively\ + \ utilise a combination of indirect and direct control as well as a history of\ + \ controllers involving the voice. Subsequently, a new method of direct control\ + \ from physiological aspects of singing through sEMG and its capabilities are\ + \ discussed. Future developments of the system are further outlined along with\ + \ usage in performance studies, interactive live vocal performance, and educational\ + \ and practice tools.},\n address = {Birmingham, UK},\n author = {Reed, Courtney\ + \ and McPherson, Andrew},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813475},\n\ + \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ + \ = {July},\n pages = {458--463},\n presentation-video = {https://youtu.be/1nWLgQGNh0g},\n\ + \ publisher = {Birmingham City University},\n title = {Surface Electromyography\ + \ for Direct Vocal Control},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper88.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177785 + doi: 10.5281/zenodo.4813475 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'design, interaction, performance, persuasive technology' - pages: 106--111 - title: Style and Constraint in Electronic Musical Instruments - url: http://www.nime.org/proceedings/2010/nime2010_106.pdf - year: 2010 + month: July + pages: 458--463 + presentation-video: https://youtu.be/1nWLgQGNh0g + publisher: Birmingham City University + title: Surface Electromyography for Direct Vocal Control + url: https://www.nime.org/proceedings/2020/nime2020_paper88.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Choi2010 - abstract: 'We propose an environment that allows users to create music by leveraging - playful visualization and organic interaction. Our attempt to improve ideas drawn - from traditional sequencer paradigm has been made in terms of extemporizing music - and associating with visualization in real-time. In order to offer different user - experience and musical possibility, this system incorporates many techniques, - including; flocking simulation, nondeterministic finite automata (NFA), score - file analysis, vector calculation, OpenGL animation, and networking. We transform - a sequencer into an audiovisual platform for composition and performance, which - is furnished with artistry and ease of use. Thus we believe that it is suitable - for not only artists such as algorithmic composers or audiovisual performers, - but also anyone who wants to play music and imagery in a different way. ' - address: 'Sydney, Australia' - author: 'Choi, Hongchan and Wang, Ge' - bibtex: "@inproceedings{Choi2010,\n abstract = {We propose an environment that allows\ - \ users to create music by leveraging playful visualization and organic interaction.\ - \ Our attempt to improve ideas drawn from traditional sequencer paradigm has been\ - \ made in terms of extemporizing music and associating with visualization in real-time.\ - \ In order to offer different user experience and musical possibility, this system\ - \ incorporates many techniques, including; flocking simulation, nondeterministic\ - \ finite automata (NFA), score file analysis, vector calculation, OpenGL animation,\ - \ and networking. We transform a sequencer into an audiovisual platform for composition\ - \ and performance, which is furnished with artistry and ease of use. Thus we believe\ - \ that it is suitable for not only artists such as algorithmic composers or audiovisual\ - \ performers, but also anyone who wants to play music and imagery in a different\ - \ way. },\n address = {Sydney, Australia},\n author = {Choi, Hongchan and Wang,\ - \ Ge},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1177741},\n issn = {2220-4806},\n\ - \ keywords = {algorithmic composition,audiovisual,automata,behavior simulation,music,music\ - \ sequencer,musical interface,nime10,visualization},\n pages = {112--115},\n title\ - \ = {LUSH : An Organic Eco + Music System},\n url = {http://www.nime.org/proceedings/2010/nime2010_112.pdf},\n\ - \ year = {2010}\n}\n" + ID: NIME20_89 + abstract: 'The presented sound synthesis system allows the individual spatialization + of spectral components in real-time, using a sinusoidal modeling approach within + 3-dimensional sound reproduction systems. A co-developed, dedicated haptic interface + is used to jointly control spectral and spatial attributes of the sound. Within + a user study, participants were asked to create an individual mapping between + control parameters of the interface and rendering parameters of sound synthesis + and spatialization, using a visual programming environment. Resulting mappings + of all participants are evaluated, indicating the preference of single control + parameters for specific tasks. In comparison with mappings intended by the development + team, the results validate certain design decisions and indicate new directions.' + address: 'Birmingham, UK' + author: 'von Coler, Henrik and Lepa, Steffen and Weinzierl, Stefan' + bibtex: "@inproceedings{NIME20_89,\n abstract = {The presented sound synthesis system\ + \ allows the individual spatialization of spectral components in real-time, using\ + \ a sinusoidal modeling approach within 3-dimensional sound reproduction systems.\ + \ A co-developed, dedicated haptic interface is used to jointly control spectral\ + \ and spatial attributes of the sound. Within a user study, participants were\ + \ asked to create an individual mapping between control parameters of the interface\ + \ and rendering parameters of sound synthesis and spatialization, using a visual\ + \ programming environment. Resulting mappings of all participants are evaluated,\ + \ indicating the preference of single control parameters for specific tasks. In\ + \ comparison with mappings intended by the development team, the results validate\ + \ certain design decisions and indicate new directions.},\n address = {Birmingham,\ + \ UK},\n author = {von Coler, Henrik and Lepa, Steffen and Weinzierl, Stefan},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.4813477},\n editor = {Romain Michon\ + \ and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages =\ + \ {464--469},\n publisher = {Birmingham City University},\n title = {User-Defined\ + \ Mappings for Spatial Sound Synthesis},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper89.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177741 + doi: 10.5281/zenodo.4813477 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'algorithmic composition,audiovisual,automata,behavior simulation,music,music - sequencer,musical interface,nime10,visualization' - pages: 112--115 - title: 'LUSH : An Organic Eco + Music System' - url: http://www.nime.org/proceedings/2010/nime2010_112.pdf - year: 2010 + month: July + pages: 464--469 + publisher: Birmingham City University + title: User-Defined Mappings for Spatial Sound Synthesis + url: https://www.nime.org/proceedings/2020/nime2020_paper89.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Yamaguchi2010 - abstract: 'In this paper, we introduce a wireless musical interface driven by grasping - forces and human motion. The sounds generated by the traditional digital musical - instruments are dependent on the physical shape of the musical instruments. The - freedom of the musical performance is restricted by its structure. Therefore, - the sounds cannot be generated with the body expression like the dance. We developed - a ball-shaped interface, TwinkleBall, to achieve the free-style performance. A - photo sensor is embedded in the translucent rubber ball to detect the grasping - force of the performer. The grasping force is translated into the luminance intensity - for processing. Moreover, an accelerometer is also embedded in the interface for - motion sensing. By using these sensors, a performer can control the note and volume - by varying grasping force and motion respectively. The features of the proposed - interface are ball-shaped, wireless, and handheld size. As a result, the proposed - interface is able to generate the sound from the body expression such as dance. ' - address: 'Sydney, Australia' - author: 'Yamaguchi, Tomoyuki and Kobayashi, Tsukasa and Ariga, Anna and Hashimoto, - Shuji' - bibtex: "@inproceedings{Yamaguchi2010,\n abstract = {In this paper, we introduce\ - \ a wireless musical interface driven by grasping forces and human motion. The\ - \ sounds generated by the traditional digital musical instruments are dependent\ - \ on the physical shape of the musical instruments. The freedom of the musical\ - \ performance is restricted by its structure. Therefore, the sounds cannot be\ - \ generated with the body expression like the dance. We developed a ball-shaped\ - \ interface, TwinkleBall, to achieve the free-style performance. A photo sensor\ - \ is embedded in the translucent rubber ball to detect the grasping force of the\ - \ performer. The grasping force is translated into the luminance intensity for\ - \ processing. Moreover, an accelerometer is also embedded in the interface for\ - \ motion sensing. By using these sensors, a performer can control the note and\ - \ volume by varying grasping force and motion respectively. The features of the\ - \ proposed interface are ball-shaped, wireless, and handheld size. As a result,\ - \ the proposed interface is able to generate the sound from the body expression\ - \ such as dance. },\n address = {Sydney, Australia},\n author = {Yamaguchi, Tomoyuki\ - \ and Kobayashi, Tsukasa and Ariga, Anna and Hashimoto, Shuji},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177927},\n issn = {2220-4806},\n keywords = {Musical\ - \ Interface, Embodied Sound Media, Dance Performance.},\n pages = {116--119},\n\ - \ title = {TwinkleBall : A Wireless Musical Interface for Embodied Sound Media},\n\ - \ url = {http://www.nime.org/proceedings/2010/nime2010_116.pdf},\n year = {2010}\n\ - }\n" + ID: NIME20_9 + abstract: 'The popularity of applying machine learning techniques in musical domains + has created an inherent availability of freely accessible pre-trained neural network + (NN) models ready for use in creative applications. This work outlines the implementation + of one such application in the form of an assistance tool designed for live improvisational + performances by laptop ensembles. The primary intention was to leverage off-the-shelf + pre-trained NN models as a basis for assisting individual performers either as + musical novices looking to engage with more experienced performers or as a tool + to expand musical possibilities through new forms of creative expression. The + system expands upon a variety of ideas found in different research areas including + new interfaces for musical expression, generative music and group performance + to produce a networked performance solution served via a web-browser interface. + The final implementation of the system offers performers a mixture of high and + low-level controls to influence the shape of sequences of notes output by locally + run NN models in real time, also allowing performers to define their level of + engagement with the assisting generative models. Two test performances were played, + with the system shown to feasibly support four performers over a four minute piece + while producing musically cohesive and engaging music. Iterations on the design + of the system exposed technical constraints on the use of a JavaScript environment + for generative models in a live music context, largely derived from inescapable + processing overheads.' + address: 'Birmingham, UK' + author: 'Proctor, Rohan and Martin, Charles Patrick' + bibtex: "@inproceedings{NIME20_9,\n abstract = {The popularity of applying machine\ + \ learning techniques in musical domains has created an inherent availability\ + \ of freely accessible pre-trained neural network (NN) models ready for use in\ + \ creative applications. This work outlines the implementation of one such application\ + \ in the form of an assistance tool designed for live improvisational performances\ + \ by laptop ensembles. The primary intention was to leverage off-the-shelf pre-trained\ + \ NN models as a basis for assisting individual performers either as musical novices\ + \ looking to engage with more experienced performers or as a tool to expand musical\ + \ possibilities through new forms of creative expression. The system expands upon\ + \ a variety of ideas found in different research areas including new interfaces\ + \ for musical expression, generative music and group performance to produce a\ + \ networked performance solution served via a web-browser interface. The final\ + \ implementation of the system offers performers a mixture of high and low-level\ + \ controls to influence the shape of sequences of notes output by locally run\ + \ NN models in real time, also allowing performers to define their level of engagement\ + \ with the assisting generative models. Two test performances were played, with\ + \ the system shown to feasibly support four performers over a four minute piece\ + \ while producing musically cohesive and engaging music. Iterations on the design\ + \ of the system exposed technical constraints on the use of a JavaScript environment\ + \ for generative models in a live music context, largely derived from inescapable\ + \ processing overheads.},\n address = {Birmingham, UK},\n author = {Proctor, Rohan\ + \ and Martin, Charles Patrick},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813481},\n\ + \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ + \ = {July},\n pages = {43--48},\n publisher = {Birmingham City University},\n\ + \ title = {A Laptop Ensemble Performance System using Recurrent Neural Networks},\n\ + \ url = {https://www.nime.org/proceedings/2020/nime2020_paper9.pdf},\n year =\ + \ {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177927 + doi: 10.5281/zenodo.4813481 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Musical Interface, Embodied Sound Media, Dance Performance.' - pages: 116--119 - title: 'TwinkleBall : A Wireless Musical Interface for Embodied Sound Media' - url: http://www.nime.org/proceedings/2010/nime2010_116.pdf - year: 2010 + month: July + pages: 43--48 + publisher: Birmingham City University + title: A Laptop Ensemble Performance System using Recurrent Neural Networks + url: https://www.nime.org/proceedings/2020/nime2020_paper9.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Cannon2010 - abstract: This paper presents research undertaken by the Bent Leather Band investigating - the application of live Ambisonics to large digital-instrument ensemble improvisation. - Their playable approach to live ambisonic projection is inspired by the work of - Trevor Wishart and presents a systematic investigation of the potential for live - spatial motion improvisation. - address: 'Sydney, Australia' - author: 'Cannon, Joanne and Favilla, Stuart' - bibtex: "@inproceedings{Cannon2010,\n abstract = {This paper presents research undertaken\ - \ by the Bent Leather Band investigating the application of live Ambisonics to\ - \ large digital-instrument ensemble improvisation. Their playable approach to\ - \ live ambisonic projection is inspired by the work of Trevor Wishart and presents\ - \ a systematic investigation of the potential for live spatial motion improvisation.},\n\ - \ address = {Sydney, Australia},\n author = {Cannon, Joanne and Favilla, Stuart},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177735},\n issn = {2220-4806},\n\ - \ keywords = {ambisonics, augmented instruments, expressive spatial motion, playable\ - \ instruments},\n pages = {120--124},\n title = {Expression and Spatial Motion\ - \ : Playable Ambisonics},\n url = {http://www.nime.org/proceedings/2010/nime2010_120.pdf},\n\ - \ year = {2010}\n}\n" + ID: NIME20_90 + abstract: 'In this paper, we present and evaluate Elemental, a NIME (New Interface + for Musical Expression) based on audio synthesis of sounds of meteorological phenomena, + namely rain, wind and thunder, intended for application in contemporary music/sound + art, performing arts and entertainment. We first describe the system, controlled + by the performer’s arms through Inertial Measuring Units and Electromyography + sensors. The produced data is analyzed and used through mapping strategies as + input of the sound synthesis engine. We conducted user studies to refine the sound + synthesis engine, the choice of gestures and the mappings between them, and to + finally evaluate this proof of concept. Indeed, the users approached the system + with their own awareness ranging from the manipulation of abstract sound to the + direct simulation of atmospheric phenomena - in the latter case, it could even + be to revive memories or to create novel situations. This suggests that the approach + of instrumentalization of sounds of known source may be a fruitful strategy for + constructing expressive interactive sonic systems.' + address: 'Birmingham, UK' + author: 'Brizolara, Tiago and Gibet, Sylvie and Larboulette, Caroline ' + bibtex: "@inproceedings{NIME20_90,\n abstract = {In this paper, we present and evaluate\ + \ Elemental, a NIME (New Interface for Musical Expression) based on audio synthesis\ + \ of sounds of meteorological phenomena, namely rain, wind and thunder, intended\ + \ for application in contemporary music/sound art, performing arts and entertainment.\ + \ We first describe the system, controlled by the performer’s arms through Inertial\ + \ Measuring Units and Electromyography sensors. The produced data is analyzed\ + \ and used through mapping strategies as input of the sound synthesis engine.\ + \ We conducted user studies to refine the sound synthesis engine, the choice of\ + \ gestures and the mappings between them, and to finally evaluate this proof of\ + \ concept. Indeed, the users approached the system with their own awareness ranging\ + \ from the manipulation of abstract sound to the direct simulation of atmospheric\ + \ phenomena - in the latter case, it could even be to revive memories or to create\ + \ novel situations. This suggests that the approach of instrumentalization of\ + \ sounds of known source may be a fruitful strategy for constructing expressive\ + \ interactive sonic systems.},\n address = {Birmingham, UK},\n author = {Brizolara,\ + \ Tiago and Gibet, Sylvie and Larboulette, Caroline },\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.4813483},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {470--476},\n publisher = {Birmingham\ + \ City University},\n title = {Elemental: a Gesturally Controlled System to Perform\ + \ Meteorological Sounds},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper90.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177735 + doi: 10.5281/zenodo.4813483 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'ambisonics, augmented instruments, expressive spatial motion, playable - instruments' - pages: 120--124 - title: 'Expression and Spatial Motion : Playable Ambisonics' - url: http://www.nime.org/proceedings/2010/nime2010_120.pdf - year: 2010 + month: July + pages: 470--476 + publisher: Birmingham City University + title: 'Elemental: a Gesturally Controlled System to Perform Meteorological Sounds' + url: https://www.nime.org/proceedings/2020/nime2020_paper90.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Collins2010 - abstract: 'The hypothesis of this interaction research project is that it can be - stimulating for experimental musicians to confront a system which ‘opposes’ their - musical style. The ‘contrary motion’ of the title is the name of a MIDI-based - realtime musical software agent which uses machine listening to establish the - musical context, and thereby chooses its own responses to differentiate its position - from that of its human interlocutant. To do this requires a deep consideration - of the space of musical actions, so as to explicate what opposition should constitute, - and machine listening technology (most prominently represented by new online beat - and stream tracking algorithms) which gives an accurate measurement of player - position so as to consistently avoid it. An initial pilot evaluation was undertaken, - feeding back critical data to the developing design.' - address: 'Sydney, Australia' - author: 'Collins, Nick' - bibtex: "@inproceedings{Collins2010,\n abstract = {The hypothesis of this interaction\ - \ research project is that it can be stimulating for experimental musicians to\ - \ confront a system which ‘opposes’ their musical style. The ‘contrary motion’\ - \ of the title is the name of a MIDI-based realtime musical software agent which\ - \ uses machine listening to establish the musical context, and thereby chooses\ - \ its own responses to differentiate its position from that of its human interlocutant.\ - \ To do this requires a deep consideration of the space of musical actions, so\ - \ as to explicate what opposition should constitute, and machine listening technology\ - \ (most prominently represented by new online beat and stream tracking algorithms)\ - \ which gives an accurate measurement of player position so as to consistently\ - \ avoid it. An initial pilot evaluation was undertaken, feeding back critical\ - \ data to the developing design.},\n address = {Sydney, Australia},\n author =\ - \ {Collins, Nick},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177747},\n\ - \ issn = {2220-4806},\n keywords = {contrary, beat tracking, stream analysis,\ - \ musical agent},\n pages = {125--129},\n title = {Contrary Motion : An Oppositional\ - \ Interactive Music System},\n url = {http://www.nime.org/proceedings/2010/nime2010_125.pdf},\n\ - \ year = {2010}\n}\n" + ID: NIME20_91 + abstract: 'This paper describes the ongoing process of developing RAW, a collaborative + body–machine instrument that relies on ''sculpting'' the sonification of raw EMG + signals. The instrument is built around two Myo armbands located on the forearms + of the performer. These are used to investigate muscle contraction, which is again + used as the basis for the sonic interaction design. Using a practice-based approach, + the aim is to explore the musical aesthetics of naturally occurring bioelectric + signals. We are particularly interested in exploring the differences between processing + at audio rate versus control rate, and how the level of detail in the signal–and + the complexity of the mappings–influence the experience of control in the instrument. + This is exemplified through reflections on four concerts in which RAW has been + used in different types of collective improvisation.' + address: 'Birmingham, UK' + author: 'Erdem, Çağrı and Jensenius, Alexander Refsum' + bibtex: "@inproceedings{NIME20_91,\n abstract = {This paper describes the ongoing\ + \ process of developing RAW, a collaborative body–machine instrument that relies\ + \ on 'sculpting' the sonification of raw EMG signals. The instrument is built\ + \ around two Myo armbands located on the forearms of the performer. These are\ + \ used to investigate muscle contraction, which is again used as the basis for\ + \ the sonic interaction design. Using a practice-based approach, the aim is to\ + \ explore the musical aesthetics of naturally occurring bioelectric signals. We\ + \ are particularly interested in exploring the differences between processing\ + \ at audio rate versus control rate, and how the level of detail in the signal–and\ + \ the complexity of the mappings–influence the experience of control in the instrument.\ + \ This is exemplified through reflections on four concerts in which RAW has been\ + \ used in different types of collective improvisation.},\n address = {Birmingham,\ + \ UK},\n author = {Erdem, Çağrı and Jensenius, Alexander Refsum},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.4813485},\n editor = {Romain Michon and\ + \ Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages = {477--482},\n\ + \ presentation-video = {https://youtu.be/gX-X1iw7uWE},\n publisher = {Birmingham\ + \ City University},\n title = {RAW: Exploring Control Structures for Muscle-based\ + \ Interaction in Collective Improvisation},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper91.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177747 + doi: 10.5281/zenodo.4813485 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'contrary, beat tracking, stream analysis, musical agent' - pages: 125--129 - title: 'Contrary Motion : An Oppositional Interactive Music System' - url: http://www.nime.org/proceedings/2010/nime2010_125.pdf - year: 2010 + month: July + pages: 477--482 + presentation-video: https://youtu.be/gX-X1iw7uWE + publisher: Birmingham City University + title: 'RAW: Exploring Control Structures for Muscle-based Interaction in Collective + Improvisation' + url: https://www.nime.org/proceedings/2020/nime2020_paper91.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Deleflie2010 - abstract: 'The tools for spatial composition typically model just a small subset - of the spatial audio cues known to researchers. As composers explore this medium - it has become evident that the nature of spatial sound perception is complex. - Yet interfaces for spatial composition are often simplistic and the end results - can be disappointing. This paper presents an interface that is designed to liberate - the composer from thinking of spatialised sound as points in space. Instead, visual - images are used to define sound in terms of shape, size and location. Images can - be sequenced into video, thereby creating rich and complex temporal soundscapes. - The interface offers both the ability to craft soundscapes and also compose their - evolution in time. ' - address: 'Sydney, Australia' - author: 'Deleflie, Etienne and Schiemer, Greg' - bibtex: "@inproceedings{Deleflie2010,\n abstract = {The tools for spatial composition\ - \ typically model just a small subset of the spatial audio cues known to researchers.\ - \ As composers explore this medium it has become evident that the nature of spatial\ - \ sound perception is complex. Yet interfaces for spatial composition are often\ - \ simplistic and the end results can be disappointing. This paper presents an\ - \ interface that is designed to liberate the composer from thinking of spatialised\ - \ sound as points in space. Instead, visual images are used to define sound in\ - \ terms of shape, size and location. Images can be sequenced into video, thereby\ - \ creating rich and complex temporal soundscapes. The interface offers both the\ - \ ability to craft soundscapes and also compose their evolution in time. },\n\ - \ address = {Sydney, Australia},\n author = {Deleflie, Etienne and Schiemer, Greg},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177753},\n issn = {2220-4806},\n\ - \ keywords = {Spatial audio, surround sound, ambisonics, granular synthesis, decorrelation,\ - \ diffusion.},\n pages = {130--135},\n title = {Images as Spatial Sound Maps},\n\ - \ url = {http://www.nime.org/proceedings/2010/nime2010_130.pdf},\n year = {2010}\n\ - }\n" + ID: NIME20_92 + abstract: 'Mobile devices provide musicians with the convenience of musical accompaniment + wherever they are, granting them new methods for developing their craft. We developed + the application SmartDrone to give users the freedom to practice in different + harmonic settings with the assistance of their smartphone. This application further + explores the area of dynamic accompaniment by implementing functionality so that + chords are generated based on the key in which the user is playing. Since this + app was designed to be a tool for scale practice, drone-like accompaniment was + chosen so that musicians could experiment with combinations of melody and harmony. + The details of the application development process are discussed in this paper, + with the main focus on scale analysis and harmonic transposition. By using these + two components, the application is able to dynamically alter key to reflect the + user''s playing. As well as the design and implementation details, this paper + reports and examines feedback from a small user study of undergraduate music students + who used the app. ' + address: 'Birmingham, UK' + author: 'MacDonald, Travis C and Hughes, James and MacKenzie, Barry' + bibtex: "@inproceedings{NIME20_92,\n abstract = {Mobile devices provide musicians\ + \ with the convenience of musical accompaniment wherever they are, granting them\ + \ new methods for developing their craft. We developed the application SmartDrone\ + \ to give users the freedom to practice in different harmonic settings with the\ + \ assistance of their smartphone. This application further explores the area of\ + \ dynamic accompaniment by implementing functionality so that chords are generated\ + \ based on the key in which the user is playing. Since this app was designed to\ + \ be a tool for scale practice, drone-like accompaniment was chosen so that musicians\ + \ could experiment with combinations of melody and harmony. The details of the\ + \ application development process are discussed in this paper, with the main focus\ + \ on scale analysis and harmonic transposition. By using these two components,\ + \ the application is able to dynamically alter key to reflect the user's playing.\ + \ As well as the design and implementation details, this paper reports and examines\ + \ feedback from a small user study of undergraduate music students who used the\ + \ app. },\n address = {Birmingham, UK},\n author = {MacDonald, Travis C and Hughes,\ + \ James and MacKenzie, Barry},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813488},\n\ + \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ + \ = {July},\n pages = {483--488},\n publisher = {Birmingham City University},\n\ + \ title = {SmartDrone: An Aurally Interactive Harmonic Drone},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper92.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177753 + doi: 10.5281/zenodo.4813488 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Spatial audio, surround sound, ambisonics, granular synthesis, decorrelation, - diffusion.' - pages: 130--135 - title: Images as Spatial Sound Maps - url: http://www.nime.org/proceedings/2010/nime2010_130.pdf - year: 2010 + month: July + pages: 483--488 + publisher: Birmingham City University + title: 'SmartDrone: An Aurally Interactive Harmonic Drone' + url: https://www.nime.org/proceedings/2020/nime2020_paper92.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Schlei2010 - abstract: 'Multi-point devices are rapidly becoming a practical interface choice - for electronic musicians. Interfaces that generate multiple simultaneous streams - of point data present a unique mapping challenge. This paper describes an analysis - system for point relationships that acts as a bridge between raw streams of multi-point - data and the instruments they control, using a multipoint trackpad to test various - configurations. The aim is to provide a practical approach for instrument programmers - working with multi-point tools, while highlighting the difference between mapping - systems based on point coordinate streams, grid evaluations, or object interaction - and mapping systems based on multi-point data relationships. ' - address: 'Sydney, Australia' - author: 'Schlei, Kevin' - bibtex: "@inproceedings{Schlei2010,\n abstract = {Multi-point devices are rapidly\ - \ becoming a practical interface choice for electronic musicians. Interfaces that\ - \ generate multiple simultaneous streams of point data present a unique mapping\ - \ challenge. This paper describes an analysis system for point relationships that\ - \ acts as a bridge between raw streams of multi-point data and the instruments\ - \ they control, using a multipoint trackpad to test various configurations. The\ - \ aim is to provide a practical approach for instrument programmers working with\ - \ multi-point tools, while highlighting the difference between mapping systems\ - \ based on point coordinate streams, grid evaluations, or object interaction and\ - \ mapping systems based on multi-point data relationships. },\n address = {Sydney,\ - \ Australia},\n author = {Schlei, Kevin},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177891},\n\ - \ issn = {2220-4806},\n keywords = {Multi-point, multi-touch interface, instrument\ - \ mapping, multi-point data analysis, trackpad instrument},\n pages = {136--139},\n\ - \ title = {Relationship-Based Instrument Mapping of Multi-Point Data Streams Using\ - \ a Trackpad Interface},\n url = {http://www.nime.org/proceedings/2010/nime2010_136.pdf},\n\ - \ year = {2010}\n}\n" + ID: NIME20_93 + abstract: 'Previous research on musical embodiment has reported that expert performers + often regard their instruments as an extension of their body. Not every digital + musical instrument seeks to create a close relationship between body and instrument, + but even for the many that do, the design process often focuses heavily on technical + and sonic factors, with relatively less attention to the bodily experience of + the performer. In this paper we propose Somaesthetic design as an alternative + to explore this space. The Soma method aims to attune the sensibilities of designers, + as well as their experience of their body, and make use of these notions as a + resource for creative design. We then report on a series of workshops exploring + the relationship between the body and the guitar with a Soma design approach. + The workshops resulted in a series of guitar-related artefacts and NIMEs that + emerged from the somatic exploration of balance and tension during guitar performance. + Lastly we present lessons learned from our research that could inform future Soma-based + musical instrument design, and how NIME research may also inform Soma design.' + address: 'Birmingham, UK' + author: 'Martinez Avila, Juan P and Tsaknaki, Vasiliki and Karpashevich, Pavel + and Windlin, Charles and Valenti, Niklas and Höök, Kristina and McPherson, Andrew + and Benford, Steve' + bibtex: "@inproceedings{NIME20_93,\n abstract = {Previous research on musical embodiment\ + \ has reported that expert performers often regard their instruments as an extension\ + \ of their body. Not every digital musical instrument seeks to create a close\ + \ relationship between body and instrument, but even for the many that do, the\ + \ design process often focuses heavily on technical and sonic factors, with relatively\ + \ less attention to the bodily experience of the performer. In this paper we propose\ + \ Somaesthetic design as an alternative to explore this space. The Soma method\ + \ aims to attune the sensibilities of designers, as well as their experience of\ + \ their body, and make use of these notions as a resource for creative design.\ + \ We then report on a series of workshops exploring the relationship between the\ + \ body and the guitar with a Soma design approach. The workshops resulted in a\ + \ series of guitar-related artefacts and NIMEs that emerged from the somatic exploration\ + \ of balance and tension during guitar performance. Lastly we present lessons\ + \ learned from our research that could inform future Soma-based musical instrument\ + \ design, and how NIME research may also inform Soma design.},\n address = {Birmingham,\ + \ UK},\n author = {Martinez Avila, Juan P and Tsaknaki, Vasiliki and Karpashevich,\ + \ Pavel and Windlin, Charles and Valenti, Niklas and Höök, Kristina and McPherson,\ + \ Andrew and Benford, Steve},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813491},\n\ + \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ + \ = {July},\n pages = {489--494},\n presentation-video = {https://youtu.be/i4UN_23A_SE},\n\ + \ publisher = {Birmingham City University},\n title = {Soma Design for NIME},\n\ + \ url = {https://www.nime.org/proceedings/2020/nime2020_paper93.pdf},\n year =\ + \ {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177891 + doi: 10.5281/zenodo.4813491 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Multi-point, multi-touch interface, instrument mapping, multi-point data - analysis, trackpad instrument' - pages: 136--139 - title: Relationship-Based Instrument Mapping of Multi-Point Data Streams Using a - Trackpad Interface - url: http://www.nime.org/proceedings/2010/nime2010_136.pdf - year: 2010 + month: July + pages: 489--494 + presentation-video: https://youtu.be/i4UN_23A_SE + publisher: Birmingham City University + title: Soma Design for NIME + url: https://www.nime.org/proceedings/2020/nime2020_paper93.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Wyse2010 - abstract: 'An important part of building interactive sound models is designing the - interface and control strategy. The multidimensional structure of the gestures - natural for a musical or physical interface may have little obvious relationship - to the parameters that a sound synthesis algorithm exposes for control. A common - situation arises when there is a nonlinear synthesis technique for which a traditional - instrumental interface with quasi-independent control of pitch and expression - is desired. This paper presents a semi-automatic meta-modeling tool called the - Instrumentalizer for embedding arbitrary synthesis algorithms in a control structure - that exposes traditional instrument controls for pitch and expression. ' - address: 'Sydney, Australia' - author: 'Wyse, Lonce and Duy, Nguyen D.' - bibtex: "@inproceedings{Wyse2010,\n abstract = {An important part of building interactive\ - \ sound models is designing the interface and control strategy. The multidimensional\ - \ structure of the gestures natural for a musical or physical interface may have\ - \ little obvious relationship to the parameters that a sound synthesis algorithm\ - \ exposes for control. A common situation arises when there is a nonlinear synthesis\ - \ technique for which a traditional instrumental interface with quasi-independent\ - \ control of pitch and expression is desired. This paper presents a semi-automatic\ - \ meta-modeling tool called the Instrumentalizer for embedding arbitrary synthesis\ - \ algorithms in a control structure that exposes traditional instrument controls\ - \ for pitch and expression. },\n address = {Sydney, Australia},\n author = {Wyse,\ - \ Lonce and Duy, Nguyen D.},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177925},\n\ - \ issn = {2220-4806},\n keywords = {Musical interface, parameter mapping, expressive\ - \ control.},\n pages = {140--143},\n title = {Instrumentalizing Synthesis Models},\n\ - \ url = {http://www.nime.org/proceedings/2010/nime2010_140.pdf},\n year = {2010}\n\ - }\n" + ID: NIME20_94 + abstract: 'The khipu is an information processing and transmission device used mainly + by the Inca empire and previous Andean societies. This mnemotechnic interface + is one of the first textile computers known, consisting of a central wool or cotton + cord to which other strings are attached with knots of different shapes, colors, + and sizes encrypting different kinds of values and information. The system was + widely used until the Spanish colonization that banned their use and destroyed + a large number of these devices. This paper introduces the creation process of + a NIME based in a khipu converted into an electronic instrument for the interaction + and generation of live experimental sound by weaving knots with conductive rubber + cords, and its implementation in the performance Knotting the memory//Encoding + the Khipu_ that aim to pay homage to this system, from a decolonial perspective + continuing the interrupted legacy of this ancestral practice in a different experience + of tangible live coding and computer music, as well as weaving the past with the + present of the indigenous and people resistance of the Andean territory with their + sounds.' + address: 'Birmingham, UK' + author: 'Cadavid, Laddy P' + bibtex: "@inproceedings{NIME20_94,\n abstract = {The khipu is an information processing\ + \ and transmission device used mainly by the Inca empire and previous Andean societies.\ + \ This mnemotechnic interface is one of the first textile computers known, consisting\ + \ of a central wool or cotton cord to which other strings are attached with knots\ + \ of different shapes, colors, and sizes encrypting different kinds of values\ + \ and information. The system was widely used until the Spanish colonization that\ + \ banned their use and destroyed a large number of these devices. This paper introduces\ + \ the creation process of a NIME based in a khipu converted into an electronic\ + \ instrument for the interaction and generation of live experimental sound by\ + \ weaving knots with conductive rubber cords, and its implementation in the performance\ + \ Knotting the memory//Encoding the Khipu_ that aim to pay homage to this system,\ + \ from a decolonial perspective continuing the interrupted legacy of this ancestral\ + \ practice in a different experience of tangible live coding and computer music,\ + \ as well as weaving the past with the present of the indigenous and people resistance\ + \ of the Andean territory with their sounds.},\n address = {Birmingham, UK},\n\ + \ author = {Cadavid, Laddy P},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813495},\n\ + \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ + \ = {July},\n pages = {495--498},\n presentation-video = {https://youtu.be/nw5rbc15pT8},\n\ + \ publisher = {Birmingham City University},\n title = {Knotting the memory//Encoding\ + \ the Khipu_: Reuse of an ancient Andean device as a NIME },\n url = {https://www.nime.org/proceedings/2020/nime2020_paper94.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177925 + doi: 10.5281/zenodo.4813495 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Musical interface, parameter mapping, expressive control.' - pages: 140--143 - title: Instrumentalizing Synthesis Models - url: http://www.nime.org/proceedings/2010/nime2010_140.pdf - year: 2010 + month: July + pages: 495--498 + presentation-video: https://youtu.be/nw5rbc15pT8 + publisher: Birmingham City University + title: 'Knotting the memory//Encoding the Khipu_: Reuse of an ancient Andean device + as a NIME ' + url: https://www.nime.org/proceedings/2020/nime2020_paper94.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Cassinelli2010 - abstract: 'scoreLight is a playful musical instrument capable of generating sound - from the lines of drawings as well as from theedges of three-dimensional objects - nearby (including everyday objects, sculptures and architectural details, but - alsothe performer''s hands or even the moving silhouettes ofdancers). There is - no camera nor projector: a laser spotexplores shapes as a pick-up head would search - for soundover the surface of a vinyl record --- with the significant difference - that the groove is generated by the contours of thedrawing itself.' - address: 'Sydney, Australia' - author: 'Cassinelli, Alavaro and Kuribara, Yusaku and Zerroug, Alexis and Ishikawa, - Masatoshi and Manabe, Daito' - bibtex: "@inproceedings{Cassinelli2010,\n abstract = {scoreLight is a playful musical\ - \ instrument capable of generating sound from the lines of drawings as well as\ - \ from theedges of three-dimensional objects nearby (including everyday objects,\ - \ sculptures and architectural details, but alsothe performer's hands or even\ - \ the moving silhouettes ofdancers). There is no camera nor projector: a laser\ - \ spotexplores shapes as a pick-up head would search for soundover the surface\ - \ of a vinyl record --- with the significant difference that the groove is generated\ - \ by the contours of thedrawing itself.},\n address = {Sydney, Australia},\n author\ - \ = {Cassinelli, Alavaro and Kuribara, Yusaku and Zerroug, Alexis and Ishikawa,\ - \ Masatoshi and Manabe, Daito},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177739},\n\ - \ issn = {2220-4806},\n keywords = {H5.2 [User Interfaces] interaction styles\ - \ / H.5.5 [Sound and Music Computing] Methodologies and techniques / J.5 [Arts\ - \ and Humanities] performing arts},\n pages = {144--149},\n title = {scoreLight\ - \ : Playing with a Human-Sized Laser Pick-Up},\n url = {http://www.nime.org/proceedings/2010/nime2010_144.pdf},\n\ - \ year = {2010}\n}\n" + ID: NIME20_95 + abstract: 'The recent proliferation of commercial software claiming ground in the + field of music AI has provided opportunity to engage with AI in music making without + the need to use libraries aimed at those with programming skills. Pre-packaged + music AI software has the potential to broaden access to machine learning tools + but it is unclear how widely these softwares are used by music technologists or + how engagement affects attitudes towards AI in music making. To interrogate these + questions we undertook a survey in October 2019, gaining 117 responses. The survey + collected statistical information on the use of pre-packaged and self-written + music AI software. Respondents reported a range of musical outputs including producing + recordings, live performance and generative work across many genres of music making. + The survey also gauged general attitudes towards AI in music and provided an open + field for general comments. The responses to the survey suggested a forward-looking + attitude to music AI with participants often pointing to the future potential + of AI tools, rather than present utility. Optimism was partially related to programming + skill with those with more experience showing higher skepticism towards the current + state and future potential of AI.' + address: 'Birmingham, UK' + author: 'Knotts, Shelly and Collins, Nick' + bibtex: "@inproceedings{NIME20_95,\n abstract = {The recent proliferation of commercial\ + \ software claiming ground in the field of music AI has provided opportunity to\ + \ engage with AI in music making without the need to use libraries aimed at those\ + \ with programming skills. Pre-packaged music AI software has the potential to\ + \ broaden access to machine learning tools but it is unclear how widely these\ + \ softwares are used by music technologists or how engagement affects attitudes\ + \ towards AI in music making. To interrogate these questions we undertook a survey\ + \ in October 2019, gaining 117 responses. The survey collected statistical information\ + \ on the use of pre-packaged and self-written music AI software. Respondents reported\ + \ a range of musical outputs including producing recordings, live performance\ + \ and generative work across many genres of music making. The survey also gauged\ + \ general attitudes towards AI in music and provided an open field for general\ + \ comments. The responses to the survey suggested a forward-looking attitude to\ + \ music AI with participants often pointing to the future potential of AI tools,\ + \ rather than present utility. Optimism was partially related to programming skill\ + \ with those with more experience showing higher skepticism towards the current\ + \ state and future potential of AI.},\n address = {Birmingham, UK},\n author =\ + \ {Knotts, Shelly and Collins, Nick},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.4813499},\n\ + \ editor = {Romain Michon and Franziska Schroeder},\n issn = {2220-4806},\n month\ + \ = {July},\n pages = {499--504},\n presentation-video = {https://youtu.be/v6hT3ED3N60},\n\ + \ publisher = {Birmingham City University},\n title = {A survey on the uptake\ + \ of Music AI Software},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper95.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177739 + doi: 10.5281/zenodo.4813499 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'H5.2 [User Interfaces] interaction styles / H.5.5 [Sound and Music Computing] - Methodologies and techniques / J.5 [Arts and Humanities] performing arts' - pages: 144--149 - title: 'scoreLight : Playing with a Human-Sized Laser Pick-Up' - url: http://www.nime.org/proceedings/2010/nime2010_144.pdf - year: 2010 + month: July + pages: 499--504 + presentation-video: https://youtu.be/v6hT3ED3N60 + publisher: Birmingham City University + title: A survey on the uptake of Music AI Software + url: https://www.nime.org/proceedings/2020/nime2020_paper95.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Yerkes2010 - abstract: Disky is a computer hard drive re-purposed into a do-it-yourself USB turntable - controller that offers high resolution and low latency for controlling parameters - of multimedia performance software. Disky is a response to the challenge “re-purpose - something that is often discarded and share it with the do-it-yourself community - to promote reuse!” - address: 'Sydney, Australia' - author: 'Yerkes, Karl and Shear, Greg and Wright, Matthew' - bibtex: "@inproceedings{Yerkes2010,\n abstract = {Disky is a computer hard drive\ - \ re-purposed into a do-it-yourself USB turntable controller that offers high\ - \ resolution and low latency for controlling parameters of multimedia performance\ - \ software. Disky is a response to the challenge “re-purpose something that is\ - \ often discarded and share it with the do-it-yourself community to promote reuse!”},\n\ - \ address = {Sydney, Australia},\n author = {Yerkes, Karl and Shear, Greg and\ - \ Wright, Matthew},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177929},\n\ - \ issn = {2220-4806},\n keywords = {turntable, dial, encoder, re-purposed, hard\ - \ drive, scratch-ing, inherent dynamics, DIY},\n pages = {150--155},\n title =\ - \ {Disky : a DIY Rotational Interface with Inherent Dynamics},\n url = {http://www.nime.org/proceedings/2010/nime2010_150.pdf},\n\ - \ year = {2010}\n}\n" + ID: NIME20_96 + abstract: 'Cycle is a software tool for musical composition and improvisation that + represents events along a circular timeline. In doing so, it breaks from the linear + representational conventions of European Art music and modern Digital Audio Workstations. + A user specifies time points on different layers, each of which corresponds to + a particular sound. The layers are superimposed on a single circle, which allows + a unique visual perspective on the relationships between musical voices given + their geometric positions. Positions in-between quantizations are possible, which + encourages experimentation with expressive timing and machine rhythms. User-selected + transformations affect groups of notes, layers, and the pattern as a whole. Past + and future states are also represented, synthesizing linear and cyclical notions + of time. This paper will contemplate philosophical questions raised by circular + rhythmic notation and will reflect on the ways in which the representational novelties + and editing functions of Cycle have inspired creativity in musical composition.' + address: 'Birmingham, UK' + author: 'Barton, Scott' + bibtex: "@inproceedings{NIME20_96,\n abstract = {Cycle is a software tool for musical\ + \ composition and improvisation that represents events along a circular timeline.\ + \ In doing so, it breaks from the linear representational conventions of European\ + \ Art music and modern Digital Audio Workstations. A user specifies time points\ + \ on different layers, each of which corresponds to a particular sound. The layers\ + \ are superimposed on a single circle, which allows a unique visual perspective\ + \ on the relationships between musical voices given their geometric positions.\ + \ Positions in-between quantizations are possible, which encourages experimentation\ + \ with expressive timing and machine rhythms. User-selected transformations affect\ + \ groups of notes, layers, and the pattern as a whole. Past and future states\ + \ are also represented, synthesizing linear and cyclical notions of time. This\ + \ paper will contemplate philosophical questions raised by circular rhythmic notation\ + \ and will reflect on the ways in which the representational novelties and editing\ + \ functions of Cycle have inspired creativity in musical composition.},\n address\ + \ = {Birmingham, UK},\n author = {Barton, Scott},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.4813501},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {505--508},\n presentation-video\ + \ = {https://youtu.be/0CEKbyJUSw4},\n publisher = {Birmingham City University},\n\ + \ title = {Circularity in Rhythmic Representation and Composition},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper96.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177929 - issn: 2220-4806 - keywords: 'turntable, dial, encoder, re-purposed, hard drive, scratch-ing, inherent - dynamics, DIY' - pages: 150--155 - title: 'Disky : a DIY Rotational Interface with Inherent Dynamics' - url: http://www.nime.org/proceedings/2010/nime2010_150.pdf - year: 2010 + doi: 10.5281/zenodo.4813501 + editor: Romain Michon and Franziska Schroeder + issn: 2220-4806 + month: July + pages: 505--508 + presentation-video: https://youtu.be/0CEKbyJUSw4 + publisher: Birmingham City University + title: Circularity in Rhythmic Representation and Composition + url: https://www.nime.org/proceedings/2020/nime2020_paper96.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Solis2010 - abstract: 'Since 2007, our research is related to the development of an anthropomorphic - saxophonist robot, which it has been designed to imitate the saxophonist playing - by mechanically reproducing the organs involved for playing a saxophone. Our research - aims in understanding the motor control from an engineering point of view and - enabling the communication. In this paper, the Waseda Saxophone Robot No. 2 (WAS-2) - which is composed by 22-DOFs is detailed. The lip mechanism of WAS-2 has been - designed with 3-DOFs to control the motion of the lower, upper and sideway lips. - In addition, a human-like hand (16 DOF-s) has been designed to enable to play - all the keys of the instrument. Regarding the improvement of the control system, - a feed-forward control system with dead-time compensation has been implemented - to assure the accurate control of the air pressure. In addition, the implementation - of an auditory feedback control system has been proposed and implemented in order - to adjust the positioning of the physical parameters of the components of the - robot by providing a pitch feedback and defining a recovery position (off-line). - A set of experiments were carried out to verify the mechanical design improvements - and the dynamic response of the air pressure. As a result, the range of sound - pressure has been increased and the proposed control system improved the dynamic - response of the air pressure control. ' - address: 'Sydney, Australia' - author: 'Solis, Jorge and Petersen, Klaus and Yamamoto, Tetsuro and Takeuchi, Masaki - and Ishikawa, Shimpei and Takanishi, Atsuo and Hashimoto, Kunimatsu' - bibtex: "@inproceedings{Solis2010,\n abstract = {Since 2007, our research is related\ - \ to the development of an anthropomorphic saxophonist robot, which it has been\ - \ designed to imitate the saxophonist playing by mechanically reproducing the\ - \ organs involved for playing a saxophone. Our research aims in understanding\ - \ the motor control from an engineering point of view and enabling the communication.\ - \ In this paper, the Waseda Saxophone Robot No. 2 (WAS-2) which is composed by\ - \ 22-DOFs is detailed. The lip mechanism of WAS-2 has been designed with 3-DOFs\ - \ to control the motion of the lower, upper and sideway lips. In addition, a human-like\ - \ hand (16 DOF-s) has been designed to enable to play all the keys of the instrument.\ - \ Regarding the improvement of the control system, a feed-forward control system\ - \ with dead-time compensation has been implemented to assure the accurate control\ - \ of the air pressure. In addition, the implementation of an auditory feedback\ - \ control system has been proposed and implemented in order to adjust the positioning\ - \ of the physical parameters of the components of the robot by providing a pitch\ - \ feedback and defining a recovery position (off-line). A set of experiments were\ - \ carried out to verify the mechanical design improvements and the dynamic response\ - \ of the air pressure. As a result, the range of sound pressure has been increased\ - \ and the proposed control system improved the dynamic response of the air pressure\ - \ control. },\n address = {Sydney, Australia},\n author = {Solis, Jorge and Petersen,\ - \ Klaus and Yamamoto, Tetsuro and Takeuchi, Masaki and Ishikawa, Shimpei and Takanishi,\ - \ Atsuo and Hashimoto, Kunimatsu},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177897},\n\ - \ issn = {2220-4806},\n keywords = {Humanoid Robot, Auditory Feedback, Music,\ - \ Saxophone.},\n pages = {156--161},\n title = {Development of the Waseda Saxophonist\ - \ Robot and Implementation of an Auditory Feedback Control},\n url = {http://www.nime.org/proceedings/2010/nime2010_156.pdf},\n\ - \ year = {2010}\n}\n" + ID: NIME20_97 + abstract: 'This lab report discusses recent projects and activities of the Experimental + Music Technologies Lab at the University of Sussex. The lab was founded in 2014 + and has contributed to the development of the field of new musical technologies. + The report introduces the lab’s agenda, gives examples of its activities through + common themes and gives short description of lab members’ work. The lab environment, + funding income and future vision are also presented.' + address: 'Birmingham, UK' + author: 'Magnusson, Thor' + bibtex: "@inproceedings{NIME20_97,\n abstract = {This lab report discusses recent\ + \ projects and activities of the Experimental Music Technologies Lab at the University\ + \ of Sussex. The lab was founded in 2014 and has contributed to the development\ + \ of the field of new musical technologies. The report introduces the lab’s agenda,\ + \ gives examples of its activities through common themes and gives short description\ + \ of lab members’ work. The lab environment, funding income and future vision\ + \ are also presented.},\n address = {Birmingham, UK},\n author = {Magnusson, Thor},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.4813503},\n editor = {Romain Michon\ + \ and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages =\ + \ {509--513},\n publisher = {Birmingham City University},\n title = {Instrumental\ + \ Investigations at Emute Lab},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper97.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177897 + doi: 10.5281/zenodo.4813503 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Humanoid Robot, Auditory Feedback, Music, Saxophone.' - pages: 156--161 - title: Development of the Waseda Saxophonist Robot and Implementation of an Auditory - Feedback Control - url: http://www.nime.org/proceedings/2010/nime2010_156.pdf - year: 2010 + month: July + pages: 509--513 + publisher: Birmingham City University + title: Instrumental Investigations at Emute Lab + url: https://www.nime.org/proceedings/2020/nime2020_paper97.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Kapur2010 - abstract: 'This paper describes the making of a class to teach the history and art - of musical robotics. The details of the curriculum are described as well as designs - for our custom schematics for robotic solenoid driven percussion. This paper also - introduces four new robotic instruments that were built during the term of this - course. This paper also introduces the Machine Orchestra, a laptop orchestra with - ten human performers and our five robotic instruments. ' - address: 'Sydney, Australia' - author: 'Kapur, Ajay and Darling, Michael' - bibtex: "@inproceedings{Kapur2010,\n abstract = {This paper describes the making\ - \ of a class to teach the history and art of musical robotics. The details of\ - \ the curriculum are described as well as designs for our custom schematics for\ - \ robotic solenoid driven percussion. This paper also introduces four new robotic\ - \ instruments that were built during the term of this course. This paper also\ - \ introduces the Machine Orchestra, a laptop orchestra with ten human performers\ - \ and our five robotic instruments. },\n address = {Sydney, Australia},\n author\ - \ = {Kapur, Ajay and Darling, Michael},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177821},\n\ - \ issn = {2220-4806},\n keywords = {dartron,digital classroom,laptop orchestra,machine\ - \ orchestra,musical robotics,nime pedagogy,nime10,solenoid},\n pages = {162--165},\n\ - \ title = {A Pedagogical Paradigm for Musical Robotics},\n url = {http://www.nime.org/proceedings/2010/nime2010_162.pdf},\n\ - \ year = {2010}\n}\n" + ID: NIME20_98 + abstract: 'Creative systems such as algorithmic composers often use Artificial Intelligence + models like Markov chains, Artificial Neural Networks, and Genetic Algorithms + in order to model stochastic processes. Unconventional Computing (UC) technologies + explore non-digital ways of data storage, processing, input, and output. UC paradigms + such as Quantum Computing and Biocomputing delve into domains beyond the binary + bit to handle complex non-linear functions. In this paper, we harness Physarum + polycephalum as memristors to process and generate creative data for popular music. + While there has been research conducted in this area, the literature lacks examples + of popular music and how the organism''s non-linear behaviour can be controlled + while composing music. This is important because non-linear forms of representation + are not as obvious as conventional digital means. This study aims at disseminating + this technology to non-experts and musicians so that they can incorporate it in + their creative processes. Furthermore, it combines resistors and memristors to + have more flexibility while generating music and optimises parameters for faster + processing and performance. ' + address: 'Birmingham, UK' + author: 'Venkatesh, Satvik and Braund, Edward and Miranda, Eduardo' + bibtex: "@inproceedings{NIME20_98,\n abstract = {Creative systems such as algorithmic\ + \ composers often use Artificial Intelligence models like Markov chains, Artificial\ + \ Neural Networks, and Genetic Algorithms in order to model stochastic processes.\ + \ Unconventional Computing (UC) technologies explore non-digital ways of data\ + \ storage, processing, input, and output. UC paradigms such as Quantum Computing\ + \ and Biocomputing delve into domains beyond the binary bit to handle complex\ + \ non-linear functions. In this paper, we harness Physarum polycephalum as memristors\ + \ to process and generate creative data for popular music. While there has been\ + \ research conducted in this area, the literature lacks examples of popular music\ + \ and how the organism's non-linear behaviour can be controlled while composing\ + \ music. This is important because non-linear forms of representation are not\ + \ as obvious as conventional digital means. This study aims at disseminating this\ + \ technology to non-experts and musicians so that they can incorporate it in their\ + \ creative processes. Furthermore, it combines resistors and memristors to have\ + \ more flexibility while generating music and optimises parameters for faster\ + \ processing and performance. },\n address = {Birmingham, UK},\n author = {Venkatesh,\ + \ Satvik and Braund, Edward and Miranda, Eduardo},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.4813507},\n editor = {Romain Michon and Franziska Schroeder},\n\ + \ issn = {2220-4806},\n month = {July},\n pages = {514--519},\n presentation-video\ + \ = {https://youtu.be/NBLa-KoMUh8},\n publisher = {Birmingham City University},\n\ + \ title = {Composing Popular Music with Physarum polycephalum-based Memristors},\n\ + \ url = {https://www.nime.org/proceedings/2020/nime2020_paper98.pdf},\n year =\ + \ {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177821 + doi: 10.5281/zenodo.4813507 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'dartron,digital classroom,laptop orchestra,machine orchestra,musical - robotics,nime pedagogy,nime10,solenoid' - pages: 162--165 - title: A Pedagogical Paradigm for Musical Robotics - url: http://www.nime.org/proceedings/2010/nime2010_162.pdf - year: 2010 + month: July + pages: 514--519 + presentation-video: https://youtu.be/NBLa-KoMUh8 + publisher: Birmingham City University + title: Composing Popular Music with Physarum polycephalum-based Memristors + url: https://www.nime.org/proceedings/2020/nime2020_paper98.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Pan2010 - abstract: 'This paper proposes a novel method to realize an initiativeexchange for - robot. A humanoid robot plays vibraphone exchanging initiative with a human performer - by perceivingmultimodal cues in real time. It understands the initiative exchange - cues through vision and audio information.In order to achieve the natural initiative - exchange betweena human and a robot in musical performance, we built thesystem - and the software architecture and carried out the experiments for fundamental - algorithms which are necessaryto the initiative exchange.' - address: 'Sydney, Australia' - author: 'Pan, Ye and Kim, Min-Gyu and Suzuki, Kenji' - bibtex: "@inproceedings{Pan2010,\n abstract = {This paper proposes a novel method\ - \ to realize an initiativeexchange for robot. A humanoid robot plays vibraphone\ - \ exchanging initiative with a human performer by perceivingmultimodal cues in\ - \ real time. It understands the initiative exchange cues through vision and audio\ - \ information.In order to achieve the natural initiative exchange betweena human\ - \ and a robot in musical performance, we built thesystem and the software architecture\ - \ and carried out the experiments for fundamental algorithms which are necessaryto\ - \ the initiative exchange.},\n address = {Sydney, Australia},\n author = {Pan,\ - \ Ye and Kim, Min-Gyu and Suzuki, Kenji},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177875},\n\ - \ issn = {2220-4806},\n keywords = {Human-robot interaction, initiative exchange,\ - \ prediction},\n pages = {166--169},\n title = {A Robot Musician Interacting with\ - \ a Human Partner through Initiative Exchange},\n url = {http://www.nime.org/proceedings/2010/nime2010_166.pdf},\n\ - \ year = {2010}\n}\n" + ID: NIME20_99 + abstract: 'PathoSonic is a VR experience that enables a participant to visualize + and perform a sound file based on timbre feature descriptors displayed in space. + The name comes from the different paths the participant can create through their + sonic explorations. The goal of this research is to leverage affordances of virtual + reality technology to visualize sound through different levels of performance-based + interactivity that immerses the participant''s body in a spatial virtual environment. + Through implementation of a multi-sensory experience, including visual aesthetics, + sound, and haptic feedback, we explore inclusive approaches to sound visualization, + making it more accessible to a wider audience including those with hearing, and + mobility impairments. The online version of the paper can be accessed here: https://fdch.github.io/pathosonic' + address: 'Birmingham, UK' + author: 'Camara Halac, Fede and Addy, Shadrick' + bibtex: "@inproceedings{NIME20_99,\n abstract = {PathoSonic is a VR experience that\ + \ enables a participant to visualize and perform a sound file based on timbre\ + \ feature descriptors displayed in space. The name comes from the different paths\ + \ the participant can create through their sonic explorations. The goal of this\ + \ research is to leverage affordances of virtual reality technology to visualize\ + \ sound through different levels of performance-based interactivity that immerses\ + \ the participant's body in a spatial virtual environment. Through implementation\ + \ of a multi-sensory experience, including visual aesthetics, sound, and haptic\ + \ feedback, we explore inclusive approaches to sound visualization, making it\ + \ more accessible to a wider audience including those with hearing, and mobility\ + \ impairments. The online version of the paper can be accessed here: https://fdch.github.io/pathosonic},\n\ + \ address = {Birmingham, UK},\n author = {Camara Halac, Fede and Addy, Shadrick},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.4813510},\n editor = {Romain Michon\ + \ and Franziska Schroeder},\n issn = {2220-4806},\n month = {July},\n pages =\ + \ {520--522},\n publisher = {Birmingham City University},\n title = {PathoSonic:\ + \ Performing Sound In Virtual Reality Feature Space},\n url = {https://www.nime.org/proceedings/2020/nime2020_paper99.pdf},\n\ + \ year = {2020}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177875 + doi: 10.5281/zenodo.4813510 + editor: Romain Michon and Franziska Schroeder issn: 2220-4806 - keywords: 'Human-robot interaction, initiative exchange, prediction' - pages: 166--169 - title: A Robot Musician Interacting with a Human Partner through Initiative Exchange - url: http://www.nime.org/proceedings/2010/nime2010_166.pdf - year: 2010 + month: July + pages: 520--522 + publisher: Birmingham City University + title: 'PathoSonic: Performing Sound In Virtual Reality Feature Space' + url: https://www.nime.org/proceedings/2020/nime2020_paper99.pdf + year: 2020 - ENTRYTYPE: inproceedings - ID: Bukvic2010 - abstract: 'Virginia Tech Department of Music’s Digital Interactive Sound & Intermedia - Studio in collaboration with the College of Engineering and School of Visual Arts - presents the latest addition to the *Ork family, the Linux Laptop Orchestra. Apart - from maintaining compatibility with its precursors and sources of inspiration, - Princeton’s PLOrk, and Stanford’s SLOrk, L2Ork’s particular focus is on delivering - unprecedented affordability without sacrificing quality, as well as flexibility - necessary to encourage a more widespread adoption and standardization of the laptop - orchestra ensemble. The newfound strengths of L2Ork’s design have resulted in - opportunities in K-12 education with a particular focus on cross-pollinating STEM - and Arts, as well as research of an innovative content delivery system that can - seamlessly engage students regardless of their educational background. In this - document we discuss key components of the L2Ork initiative, their benefits, and - offer resources necessary for the creation of other Linux-based *Orks' - address: 'Sydney, Australia' - author: 'Bukvic, Ivika and Martin, Thomas and Standley, Eric and Matthews, Michael' - bibtex: "@inproceedings{Bukvic2010,\n abstract = {Virginia Tech Department of Music’s\ - \ Digital Interactive Sound & Intermedia Studio in collaboration with the College\ - \ of Engineering and School of Visual Arts presents the latest addition to the\ - \ *Ork family, the Linux Laptop Orchestra. Apart from maintaining compatibility\ - \ with its precursors and sources of inspiration, Princeton’s PLOrk, and Stanford’s\ - \ SLOrk, L2Ork’s particular focus is on delivering unprecedented affordability\ - \ without sacrificing quality, as well as flexibility necessary to encourage a\ - \ more widespread adoption and standardization of the laptop orchestra ensemble.\ - \ The newfound strengths of L2Ork’s design have resulted in opportunities in K-12\ - \ education with a particular focus on cross-pollinating STEM and Arts, as well\ - \ as research of an innovative content delivery system that can seamlessly engage\ - \ students regardless of their educational background. In this document we discuss\ - \ key components of the L2Ork initiative, their benefits, and offer resources\ - \ necessary for the creation of other Linux-based *Orks},\n address = {Sydney,\ - \ Australia},\n author = {Bukvic, Ivika and Martin, Thomas and Standley, Eric\ - \ and Matthews, Michael},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177731},\n\ - \ issn = {2220-4806},\n keywords = {l2ork,laptop orchestra,linux,nime10},\n pages\ - \ = {170--173},\n title = {Introducing L2Ork : Linux Laptop Orchestra},\n url\ - \ = {http://www.nime.org/proceedings/2010/nime2010_170.pdf},\n year = {2010}\n\ - }\n" + ID: Murphy2012 + abstract: 'A problem with many contemporary musical robotic percussion systems lies + in the fact that solenoids fail to respond lin-early to linear increases in input + velocity. This nonlinearity forces performers to individually tailor their compositions + to specific robotic drummers. To address this problem, we introduce a method of + pre-performance calibration using metaheuristic search techniques. A variety of + such techniques are introduced and evaluated and the results of the optimized + solenoid-based percussion systems are presented and compared with output from + non-calibrated systems.' + address: 'Ann Arbor, Michigan' + author: Jim Murphy and Ajay Kapur and Dale Carnegie + bibtex: "@inproceedings{Murphy2012,\n abstract = {A problem with many contemporary\ + \ musical robotic percussion systems lies in the fact that solenoids fail to respond\ + \ lin-early to linear increases in input velocity. This nonlinearity forces performers\ + \ to individually tailor their compositions to specific robotic drummers. To address\ + \ this problem, we introduce a method of pre-performance calibration using metaheuristic\ + \ search techniques. A variety of such techniques are introduced and evaluated\ + \ and the results of the optimized solenoid-based percussion systems are presented\ + \ and compared with output from non-calibrated systems.},\n address = {Ann Arbor,\ + \ Michigan},\n author = {Jim Murphy and Ajay Kapur and Dale Carnegie},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1180545},\n issn = {2220-4806},\n keywords\ + \ = {musical robotics, human-robot interaction},\n publisher = {University of\ + \ Michigan},\n title = {Better Drumming Through Calibration: Techniques for Pre-Performance\ + \ Robotic Percussion Optimization},\n url = {http://www.nime.org/proceedings/2012/nime2012_100.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177731 + doi: 10.5281/zenodo.1180545 issn: 2220-4806 - keywords: 'l2ork,laptop orchestra,linux,nime10' - pages: 170--173 - title: 'Introducing L2Ork : Linux Laptop Orchestra' - url: http://www.nime.org/proceedings/2010/nime2010_170.pdf - year: 2010 + keywords: 'musical robotics, human-robot interaction' + publisher: University of Michigan + title: 'Better Drumming Through Calibration: Techniques for Pre-Performance Robotic + Percussion Optimization' + url: http://www.nime.org/proceedings/2012/nime2012_100.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Bryan2010 - abstract: 'The Mobile Music (MoMu) toolkit is a new open-sourcesoftware development - toolkit focusing on musical interaction design for mobile phones. The toolkit, - currently implemented for iPhone OS, emphasizes usability and rapidprototyping - with the end goal of aiding developers in creating real-time interactive audio - applications. Simple andunified access to onboard sensors along with utilities - forcommon tasks found in mobile music development are provided. The toolkit has - been deployed and evaluated in theStanford Mobile Phone Orchestra (MoPhO) and - serves asthe primary software platform in a new course exploringmobile music.' - address: 'Sydney, Australia' - author: 'Bryan, Nicholas J. and Herrera, Jorge and Oh, Jieun and Wang, Ge' - bibtex: "@inproceedings{Bryan2010,\n abstract = {The Mobile Music (MoMu) toolkit\ - \ is a new open-sourcesoftware development toolkit focusing on musical interaction\ - \ design for mobile phones. The toolkit, currently implemented for iPhone OS,\ - \ emphasizes usability and rapidprototyping with the end goal of aiding developers\ - \ in creating real-time interactive audio applications. Simple andunified access\ - \ to onboard sensors along with utilities forcommon tasks found in mobile music\ - \ development are provided. The toolkit has been deployed and evaluated in theStanford\ - \ Mobile Phone Orchestra (MoPhO) and serves asthe primary software platform in\ - \ a new course exploringmobile music.},\n address = {Sydney, Australia},\n author\ - \ = {Bryan, Nicholas J. and Herrera, Jorge and Oh, Jieun and Wang, Ge},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1177725},\n issn = {2220-4806},\n keywords\ - \ = {instrument design, iPhone, mobile music, software develop- ment, toolkit},\n\ - \ pages = {174--177},\n title = {MoMu : A Mobile Music Toolkit},\n url = {http://www.nime.org/proceedings/2010/nime2010_174.pdf},\n\ - \ year = {2010}\n}\n" + ID: Britt2012 + abstract: 'The EMvibe is an augmented vibraphone that allows for continuous control + over the amplitude and spectrum of in-dividual notes. The system uses electromagnetic + actuators to induce vibrations in the vibraphone''s aluminum tone bars. The tone + bars and the electromagnetic actuators are coupled via neodymium magnets affixed + to each bar. The acoustic properties of the vibraphone allowed us to develop a + very simple, low-cost and powerful amplification solution that requires no heat + sinking. The physical design is meant to be portable and robust, and the system + can be easily installed on any vibraphone without interfering with normal performance + techniques. The system supports multiple in-terfacing solutions, affording the + performer and composer the ability to interact with the EMvibe in different ways + depending on the musical context.' + address: 'Ann Arbor, Michigan' + author: N. Cameron Britt and Jeff Snyder and Andrew McPherson + bibtex: "@inproceedings{Britt2012,\n abstract = {The EMvibe is an augmented vibraphone\ + \ that allows for continuous control over the amplitude and spectrum of in-dividual\ + \ notes. The system uses electromagnetic actuators to induce vibrations in the\ + \ vibraphone's aluminum tone bars. The tone bars and the electromagnetic actuators\ + \ are coupled via neodymium magnets affixed to each bar. The acoustic properties\ + \ of the vibraphone allowed us to develop a very simple, low-cost and powerful\ + \ amplification solution that requires no heat sinking. The physical design is\ + \ meant to be portable and robust, and the system can be easily installed on any\ + \ vibraphone without interfering with normal performance techniques. The system\ + \ supports multiple in-terfacing solutions, affording the performer and composer\ + \ the ability to interact with the EMvibe in different ways depending on the musical\ + \ context.},\n address = {Ann Arbor, Michigan},\n author = {N. Cameron Britt and\ + \ Jeff Snyder and Andrew McPherson},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178221},\n\ + \ issn = {2220-4806},\n keywords = {Vibraphone, augmented instrument, electromagnetic\ + \ actuation},\n publisher = {University of Michigan},\n title = {The EMvibe: An\ + \ Electromagnetically Actuated Vibraphone},\n url = {http://www.nime.org/proceedings/2012/nime2012_101.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177725 + doi: 10.5281/zenodo.1178221 issn: 2220-4806 - keywords: 'instrument design, iPhone, mobile music, software develop- ment, toolkit' - pages: 174--177 - title: 'MoMu : A Mobile Music Toolkit' - url: http://www.nime.org/proceedings/2010/nime2010_174.pdf - year: 2010 + keywords: 'Vibraphone, augmented instrument, electromagnetic actuation' + publisher: University of Michigan + title: 'The EMvibe: An Electromagnetically Actuated Vibraphone' + url: http://www.nime.org/proceedings/2012/nime2012_101.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Dahl2010 - abstract: 'The use of metaphor has a prominent role in HCI, both as a device to - help users understand unfamiliar technologies, and as a tool to guide the design - process. Creators of new computerbased instruments face similar design challenges - as those in HCI. In the course of creating a new piece for Mobile Phone Orchestra - we propose the metaphor of a sound as a ball and explore the interactions and - sound mappings it suggests. These lead to the design of a gesture-controlled instrument - that allows players to "bounce" sounds, "throw" them to other players, and compete - in a game to "knock out" others'' sounds. We composed the piece SoundBounce based - on these interactions, and note that audiences seem to find performances of the - piece accessible and engaging, perhaps due to the visibility of the metaphor. ' - address: 'Sydney, Australia' - author: 'Dahl, Luke and Wang, Ge' - bibtex: "@inproceedings{Dahl2010,\n abstract = {The use of metaphor has a prominent\ - \ role in HCI, both as a device to help users understand unfamiliar technologies,\ - \ and as a tool to guide the design process. Creators of new computerbased instruments\ - \ face similar design challenges as those in HCI. In the course of creating a\ - \ new piece for Mobile Phone Orchestra we propose the metaphor of a sound as a\ - \ ball and explore the interactions and sound mappings it suggests. These lead\ - \ to the design of a gesture-controlled instrument that allows players to \"bounce\"\ - \ sounds, \"throw\" them to other players, and compete in a game to \"knock out\"\ - \ others' sounds. We composed the piece SoundBounce based on these interactions,\ - \ and note that audiences seem to find performances of the piece accessible and\ - \ engaging, perhaps due to the visibility of the metaphor. },\n address = {Sydney,\ - \ Australia},\n author = {Dahl, Luke and Wang, Ge},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177751},\n issn = {2220-4806},\n keywords = {Mobile music,\ - \ design, metaphor, performance, gameplay.},\n pages = {178--181},\n title = {Sound\ - \ Bounce : Physical Metaphors in Designing Mobile Music Performance},\n url =\ - \ {http://www.nime.org/proceedings/2010/nime2010_178.pdf},\n year = {2010}\n}\n" + ID: Brent2012 + abstract: 'This paper introduces the Gesturally Extended Piano---an augmented instrument + controller that relies on information drawn from performer motion tracking in + order to control real-time audiovisual processing and synthesis. Specifically, + the positions, heights, velocities, and relative distances and angles of points + on the hands and forearms are followed. Technical details and installation of + the tracking system are covered, as well as strategies for interpreting and mapping + the resulting data in relation to synthesis parameters. Design factors surrounding + mapping choices and the interrelation between mapped parameters are also considered.' + address: 'Ann Arbor, Michigan' + author: William Brent + bibtex: "@inproceedings{Brent2012,\n abstract = {This paper introduces the Gesturally\ + \ Extended Piano---an augmented instrument controller that relies on information\ + \ drawn from performer motion tracking in order to control real-time audiovisual\ + \ processing and synthesis. Specifically, the positions, heights, velocities,\ + \ and relative distances and angles of points on the hands and forearms are followed.\ + \ Technical details and installation of the tracking system are covered, as well\ + \ as strategies for interpreting and mapping the resulting data in relation to\ + \ synthesis parameters. Design factors surrounding mapping choices and the interrelation\ + \ between mapped parameters are also considered.},\n address = {Ann Arbor, Michigan},\n\ + \ author = {William Brent},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178219},\n\ + \ issn = {2220-4806},\n keywords = {Augmented instruments, controllers, motion\ + \ tracking, mapping},\n publisher = {University of Michigan},\n title = {The Gesturally\ + \ Extended Piano},\n url = {http://www.nime.org/proceedings/2012/nime2012_102.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177751 + doi: 10.5281/zenodo.1178219 issn: 2220-4806 - keywords: 'Mobile music, design, metaphor, performance, gameplay.' - pages: 178--181 - title: 'Sound Bounce : Physical Metaphors in Designing Mobile Music Performance' - url: http://www.nime.org/proceedings/2010/nime2010_178.pdf - year: 2010 + keywords: 'Augmented instruments, controllers, motion tracking, mapping' + publisher: University of Michigan + title: The Gesturally Extended Piano + url: http://www.nime.org/proceedings/2012/nime2012_102.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Essl2010a - abstract: 'Impact force is an important dimension for percussive musical instruments - such as the piano. We explore three possible mechanisms how to get impact forces - on mobile multi-touch devices: using built-in accelerometers, the pressure sensing - capability of Android phones, and external force sensing resistors. We find that - accelerometers are difficult to control for this purpose. Android''s pressure - sensing shows some promise, especially when combined with augmented playing technique. - Force sensing resistors can offer good dynamic resolution but this technology - is not currently offered in commodity devices and proper coupling of the sensor - with the applied impact is difficult. ' - address: 'Sydney, Australia' - author: 'Essl, Georg and Rohs, Michael and Kratz, Sven' - bibtex: "@inproceedings{Essl2010a,\n abstract = {Impact force is an important dimension\ - \ for percussive musical instruments such as the piano. We explore three possible\ - \ mechanisms how to get impact forces on mobile multi-touch devices: using built-in\ - \ accelerometers, the pressure sensing capability of Android phones, and external\ - \ force sensing resistors. We find that accelerometers are difficult to control\ - \ for this purpose. Android's pressure sensing shows some promise, especially\ - \ when combined with augmented playing technique. Force sensing resistors can\ - \ offer good dynamic resolution but this technology is not currently offered in\ - \ commodity devices and proper coupling of the sensor with the applied impact\ - \ is difficult. },\n address = {Sydney, Australia},\n author = {Essl, Georg and\ - \ Rohs, Michael and Kratz, Sven},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177761},\n\ - \ issn = {2220-4806},\n keywords = {Force, impact, pressure, multi-touch, mobile\ - \ phone, mobile music making.},\n pages = {182--185},\n title = {Use the Force\ - \ (or something) --- Pressure and Pressure --- Like Input for Mobile Music Performance},\n\ - \ url = {http://www.nime.org/proceedings/2010/nime2010_182.pdf},\n year = {2010}\n\ + ID: Wyse2012 + abstract: 'The upper limit of frequency sensitivity for vibrotactile stimulation + of the fingers and hand is commonly accepted as 1 kHz. However, during the course + of our research to develop a full-hand vibrotactile musical communication device + for the hearing-impaired, we repeatedly found evidence suggesting sensitivity + to higher frequencies. Most of the studies on which vibrotactile sensitivity are + based have been conducted using sine tones delivered by point-contact actuators. + The current study was designed to investigate vibrotactile sensitivity using complex + signals and full, open-hand contact with a flat vibrating surface representing + more natural environmental conditions. Sensitivity to frequencies considerably + higher than previously reported was demonstrated for all the signal types tested. + Furthermore, complex signals seem to be more easily detected than sine tones, + especially at low frequencies. Our findings are applicable to a general understanding + of sensory physiology, and to the development of new vibrotactile display devices + for music and other applications.' + address: 'Ann Arbor, Michigan' + author: Lonce Wyse and Suranga Nanayakkara and Paul Seekings and Sim Heng Ong and + Elizabeth Taylor + bibtex: "@inproceedings{Wyse2012,\n abstract = {The upper limit of frequency sensitivity\ + \ for vibrotactile stimulation of the fingers and hand is commonly accepted as\ + \ 1 kHz. However, during the course of our research to develop a full-hand vibrotactile\ + \ musical communication device for the hearing-impaired, we repeatedly found evidence\ + \ suggesting sensitivity to higher frequencies. Most of the studies on which vibrotactile\ + \ sensitivity are based have been conducted using sine tones delivered by point-contact\ + \ actuators. The current study was designed to investigate vibrotactile sensitivity\ + \ using complex signals and full, open-hand contact with a flat vibrating surface\ + \ representing more natural environmental conditions. Sensitivity to frequencies\ + \ considerably higher than previously reported was demonstrated for all the signal\ + \ types tested. Furthermore, complex signals seem to be more easily detected than\ + \ sine tones, especially at low frequencies. Our findings are applicable to a\ + \ general understanding of sensory physiology, and to the development of new vibrotactile\ + \ display devices for music and other applications.},\n address = {Ann Arbor,\ + \ Michigan},\n author = {Lonce Wyse and Suranga Nanayakkara and Paul Seekings\ + \ and Sim Heng Ong and Elizabeth Taylor},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178453},\n\ + \ issn = {2220-4806},\n keywords = {Haptic Sensitivity, Hearing-impaired, Vibrotactile\ + \ Threshold},\n publisher = {University of Michigan},\n title = {Palm-area sensitivity\ + \ to vibrotactile stimuli above 1~{kHz}},\n url = {http://www.nime.org/proceedings/2012/nime2012_105.pdf},\n\ + \ year = {2012}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1178453 + issn: 2220-4806 + keywords: 'Haptic Sensitivity, Hearing-impaired, Vibrotactile Threshold' + publisher: University of Michigan + title: Palm-area sensitivity to vibrotactile stimuli above 1~kHz + url: http://www.nime.org/proceedings/2012/nime2012_105.pdf + year: 2012 + + +- ENTRYTYPE: inproceedings + ID: Pugliese2012 + abstract: 'In this paper strategies for augmenting the social dimension of collaborative + music making, in particular in the form of bodily and situated interaction are + presented. Mobile instruments are extended by means of relational descriptors + democratically controlled by the group and mapped to sound parameters. A qualitative + evaluation approach is described and a user test with participants playing in + groups of three conducted. The results of the analysis show core-categories such + as familiarity with instrument and situation, shift of focus in activity, family + of interactions and different categories of the experience emerging from the interviews. + Our evaluation shows the suitability of our approach but also the need for iterating + on our design on the basis of the perspectives brought forth by the users. This + latter observation confirms the importance of conducting a thorough interview + session followed by data analysis on the line of grounded theory.' + address: 'Ann Arbor, Michigan' + author: Roberto Pugliese and Koray Tahiroglu and Callum Goddard and James Nesfield + bibtex: "@inproceedings{Pugliese2012,\n abstract = {In this paper strategies for\ + \ augmenting the social dimension of collaborative music making, in particular\ + \ in the form of bodily and situated interaction are presented. Mobile instruments\ + \ are extended by means of relational descriptors democratically controlled by\ + \ the group and mapped to sound parameters. A qualitative evaluation approach\ + \ is described and a user test with participants playing in groups of three conducted.\ + \ The results of the analysis show core-categories such as familiarity with instrument\ + \ and situation, shift of focus in activity, family of interactions and different\ + \ categories of the experience emerging from the interviews. Our evaluation shows\ + \ the suitability of our approach but also the need for iterating on our design\ + \ on the basis of the perspectives brought forth by the users. This latter observation\ + \ confirms the importance of conducting a thorough interview session followed\ + \ by data analysis on the line of grounded theory.},\n address = {Ann Arbor, Michigan},\n\ + \ author = {Roberto Pugliese and Koray Tahiroglu and Callum Goddard and James\ + \ Nesfield},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1180573},\n issn\ + \ = {2220-4806},\n keywords = {Collaborative music making, evaluation methods,\ + \ mobile music, human-human interaction.},\n publisher = {University of Michigan},\n\ + \ title = {Augmenting human-human interaction in mobile group improvisation},\n\ + \ url = {http://www.nime.org/proceedings/2012/nime2012_108.pdf},\n year = {2012}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177761 + doi: 10.5281/zenodo.1180573 issn: 2220-4806 - keywords: 'Force, impact, pressure, multi-touch, mobile phone, mobile music making.' - pages: 182--185 - title: Use the Force (or something) --- Pressure and Pressure --- Like Input for - Mobile Music Performance - url: http://www.nime.org/proceedings/2010/nime2010_182.pdf - year: 2010 + keywords: 'Collaborative music making, evaluation methods, mobile music, human-human + interaction.' + publisher: University of Michigan + title: Augmenting human-human interaction in mobile group improvisation + url: http://www.nime.org/proceedings/2012/nime2012_108.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Mills2010a - abstract: 'The evolution of networked audio technologies has created unprecedented - opportunities for musicians to improvise with instrumentalists from a diverse - range of cultures and disciplines. As network speeds increase and latency is consigned - to history, tele-musical collaboration, and in particular improvisation will be - shaped by new methodologies that respond to this potential. While networked technologies - eliminate distance in physical space, for the remote improviser, this creates - a liminality of experience through which their performance is mediated. As a first - step in understanding the conditions arising from collaboration in networked audio - platforms, this paper will examine selected case studies of improvisation in a - variety of networked interfaces. The author will examine how platform characteristics - and network conditions influence the process of collective improvisation and the - methodologies musicians are employing to negotiate their networked experiences.' - address: 'Sydney, Australia' - author: 'Mills, Roger' - bibtex: "@inproceedings{Mills2010a,\n abstract = {The evolution of networked audio\ - \ technologies has created unprecedented opportunities for musicians to improvise\ - \ with instrumentalists from a diverse range of cultures and disciplines. As network\ - \ speeds increase and latency is consigned to history, tele-musical collaboration,\ - \ and in particular improvisation will be shaped by new methodologies that respond\ - \ to this potential. While networked technologies eliminate distance in physical\ - \ space, for the remote improviser, this creates a liminality of experience through\ - \ which their performance is mediated. As a first step in understanding the conditions\ - \ arising from collaboration in networked audio platforms, this paper will examine\ - \ selected case studies of improvisation in a variety of networked interfaces.\ - \ The author will examine how platform characteristics and network conditions\ - \ influence the process of collective improvisation and the methodologies musicians\ - \ are employing to negotiate their networked experiences.},\n address = {Sydney,\ - \ Australia},\n author = {Mills, Roger},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177857},\n\ - \ issn = {2220-4806},\n keywords = {improvisation, internet audio, networked collaboration,\ - \ sound art},\n pages = {186--191},\n title = {Dislocated Sound : A Survey of\ - \ Improvisation in Networked Audio Platforms},\n url = {http://www.nime.org/proceedings/2010/nime2010_186.pdf},\n\ - \ year = {2010}\n}\n" + ID: Oliver2012 + abstract: 'There is some evidence that structured training can benefit cochlear + implant (CI) users'' appraisal of music as well as their music perception abilities. + There are currently very limited music training resources available for CI users + to explore. This demonstration will introduce delegates to the `Interactive Music + Awareness Program'' (IMAP) for cochlear implant users, which was developed in + response to the need for a client-centered, structured, interactive, creative, + open-ended, educational and challenging music (re)habilitation resource.' + address: 'Ann Arbor, Michigan' + author: Benjamin R. Oliver and Rachel M. van Besouw and David R. Nicholls + bibtex: "@inproceedings{Oliver2012,\n abstract = {There is some evidence that structured\ + \ training can benefit cochlear implant (CI) users' appraisal of music as well\ + \ as their music perception abilities. There are currently very limited music\ + \ training resources available for CI users to explore. This demonstration will\ + \ introduce delegates to the `Interactive Music Awareness Program' (IMAP) for\ + \ cochlear implant users, which was developed in response to the need for a client-centered,\ + \ structured, interactive, creative, open-ended, educational and challenging music\ + \ (re)habilitation resource.},\n address = {Ann Arbor, Michigan},\n author = {Benjamin\ + \ R. Oliver and Rachel M. van Besouw and David R. Nicholls},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1180557},\n issn = {2220-4806},\n keywords = {music, cochlear\ + \ implants, perception, rehabilitation, auditory training, interactive learning,\ + \ client-centred software},\n publisher = {University of Michigan},\n title =\ + \ {The `Interactive Music Awareness Program' (IMAP) for Cochlear Implant Users},\n\ + \ url = {http://www.nime.org/proceedings/2012/nime2012_109.pdf},\n year = {2012}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177857 + doi: 10.5281/zenodo.1180557 issn: 2220-4806 - keywords: 'improvisation, internet audio, networked collaboration, sound art' - pages: 186--191 - title: 'Dislocated Sound : A Survey of Improvisation in Networked Audio Platforms' - url: http://www.nime.org/proceedings/2010/nime2010_186.pdf - year: 2010 + keywords: 'music, cochlear implants, perception, rehabilitation, auditory training, + interactive learning, client-centred software' + publisher: University of Michigan + title: The `Interactive Music Awareness Program' (IMAP) for Cochlear Implant Users + url: http://www.nime.org/proceedings/2012/nime2012_109.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Berthaut2010 - abstract: 'We present Drile, a multiprocess immersive instrument built uponthe hierarchical - live-looping technique and aimed at musical performance. This technique consists - in creating musical trees whosenodes are composed of sound effects applied to - a musical content.In the leaves, this content is a one-shot sound, whereas in - higherlevel nodes this content is composed of live-recorded sequencesof parameters - of the children nodes. Drile allows musicians tointeract efficiently with these - trees in an immersive environment.Nodes are represented as worms, which are 3D - audiovisual objects. Worms can be manipulated using 3D interaction techniques,and - several operations can be applied to the live-looping trees. Theenvironment is - composed of several virtual rooms, i.e. group oftrees, corresponding to specific - sounds and effects. Learning Drileis progressive since the musical control complexity - varies according to the levels in live-looping trees. Thus beginners may havelimited - control over only root worms while still obtaining musically interesting results. - Advanced users may modify the trees andmanipulate each of the worms.' - address: 'Sydney, Australia' - author: 'Berthaut, Florent and Desainte-Catherine, Myriam and Hachet, Martin' - bibtex: "@inproceedings{Berthaut2010,\n abstract = {We present Drile, a multiprocess\ - \ immersive instrument built uponthe hierarchical live-looping technique and aimed\ - \ at musical performance. This technique consists in creating musical trees whosenodes\ - \ are composed of sound effects applied to a musical content.In the leaves, this\ - \ content is a one-shot sound, whereas in higherlevel nodes this content is composed\ - \ of live-recorded sequencesof parameters of the children nodes. Drile allows\ - \ musicians tointeract efficiently with these trees in an immersive environment.Nodes\ - \ are represented as worms, which are 3D audiovisual objects. Worms can be manipulated\ - \ using 3D interaction techniques,and several operations can be applied to the\ - \ live-looping trees. Theenvironment is composed of several virtual rooms, i.e.\ - \ group oftrees, corresponding to specific sounds and effects. Learning Drileis\ - \ progressive since the musical control complexity varies according to the levels\ - \ in live-looping trees. Thus beginners may havelimited control over only root\ - \ worms while still obtaining musically interesting results. Advanced users may\ - \ modify the trees andmanipulate each of the worms.},\n address = {Sydney, Australia},\n\ - \ author = {Berthaut, Florent and Desainte-Catherine, Myriam and Hachet, Martin},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177721},\n issn = {2220-4806},\n\ - \ keywords = {Drile, immersive instrument, hierarchical live-looping, 3D interac-\ - \ tion},\n pages = {192--197},\n title = {DRILE : An Immersive Environment for\ - \ Hierarchical Live-Looping},\n url = {http://www.nime.org/proceedings/2010/nime2010_192.pdf},\n\ - \ year = {2010}\n}\n" + ID: Pineyro2012 + abstract: The Electric Slide Organistrum (Figure 1) is an acoustic stringed instrument + played through a video capture system. The vibration of the instrument string + is generated electro-magnetically and the pitch variation is achieved by movements + carried out by the player in front of a video camera. This instrument results + from integrating an ancient technique for the production of sounds as it is the + vibration of a string on a soundbox and actual human-computer interaction technology + such as motion detection. + address: 'Ann Arbor, Michigan' + author: Martin Piñeyro + bibtex: "@inproceedings{Pineyro2012,\n abstract = {The Electric Slide Organistrum\ + \ (Figure 1) is an acoustic stringed instrument played through a video capture\ + \ system. The vibration of the instrument string is generated electro-magnetically\ + \ and the pitch variation is achieved by movements carried out by the player in\ + \ front of a video camera. This instrument results from integrating an ancient\ + \ technique for the production of sounds as it is the vibration of a string on\ + \ a soundbox and actual human-computer interaction technology such as motion detection.},\n\ + \ address = {Ann Arbor, Michigan},\n author = {Martin Pi{\\~n}eyro},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1180571},\n issn = {2220-4806},\n keywords\ + \ = {Gestural Interface, eBow, Pickup, Bowed string, Electromagnetic actuation},\n\ + \ publisher = {University of Michigan},\n title = {Electric Slide Organistrum},\n\ + \ url = {http://www.nime.org/proceedings/2012/nime2012_114.pdf},\n year = {2012}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177721 + doi: 10.5281/zenodo.1180571 issn: 2220-4806 - keywords: 'Drile, immersive instrument, hierarchical live-looping, 3D interac- tion' - pages: 192--197 - title: 'DRILE : An Immersive Environment for Hierarchical Live-Looping' - url: http://www.nime.org/proceedings/2010/nime2010_192.pdf - year: 2010 + keywords: 'Gestural Interface, eBow, Pickup, Bowed string, Electromagnetic actuation' + publisher: University of Michigan + title: Electric Slide Organistrum + url: http://www.nime.org/proceedings/2012/nime2012_114.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Fencott2010 - abstract: 'This research is concerned with issues of privacy, awareness and the - emergence of roles in the process of digitallymediated collaborative music making. - Specifically we areinterested in how providing collaborators with varying degrees - of privacy and awareness of one another influencesthe group interaction. A study - is presented whereby ninegroups of co-located musicians compose music together - using three different interface designs. We use qualitative andquantitative data - to study and characterise the musician''sinteraction with each other and the software. - We show thatwhen made available to them, participants make extensiveuse of a private - working area to develop musical contributions before they are introduced to the - group. We also arguethat our awareness mechanisms change the perceived quality - of the musical interaction, but have no impact on theway musicians interact with - the software. We then reflecton implications for the design of new collaborative - musicmaking tools which exploit the potential of digital technologies, while at - the same time support creative musicalinteraction.' - address: 'Sydney, Australia' - author: 'Fencott, Robin and Bryan-Kinns, Nick' - bibtex: "@inproceedings{Fencott2010,\n abstract = {This research is concerned with\ - \ issues of privacy, awareness and the emergence of roles in the process of digitallymediated\ - \ collaborative music making. Specifically we areinterested in how providing collaborators\ - \ with varying degrees of privacy and awareness of one another influencesthe group\ - \ interaction. A study is presented whereby ninegroups of co-located musicians\ - \ compose music together using three different interface designs. We use qualitative\ - \ andquantitative data to study and characterise the musician'sinteraction with\ - \ each other and the software. We show thatwhen made available to them, participants\ - \ make extensiveuse of a private working area to develop musical contributions\ - \ before they are introduced to the group. We also arguethat our awareness mechanisms\ - \ change the perceived quality of the musical interaction, but have no impact\ - \ on theway musicians interact with the software. We then reflecton implications\ - \ for the design of new collaborative musicmaking tools which exploit the potential\ - \ of digital technologies, while at the same time support creative musicalinteraction.},\n\ - \ address = {Sydney, Australia},\n author = {Fencott, Robin and Bryan-Kinns, Nick},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177763},\n issn = {2220-4806},\n\ - \ keywords = {Awareness, Privacy, Collaboration, Music, Interaction, En- gagement,\ - \ Group Music Making, Design, Evaluation.},\n pages = {198--203},\n title = {Hey\ - \ Man, You're Invading my Personal Space ! Privacy and Awareness in Collaborative\ - \ Music},\n url = {http://www.nime.org/proceedings/2010/nime2010_198.pdf},\n year\ - \ = {2010}\n}\n" + ID: McPherson2012a + abstract: 'There is growing interest in the field of augmented musical instruments, + which extend traditional acoustic instruments using new sensors and actuators. + Several designs use electromagnetic actuation to induce vibrations in the acoustic + mechanism, manipulating the traditional sound of the in-strument without external + speakers. This paper presents techniques and guidelines for the use of electromagnetic + actuation in augmented instruments, including actuator design and selection, interfacing + with the instrument, and cir-cuits for driving the actuators. The material in + this pa-per forms the basis of the magnetic resonator piano, an electromagnetically-augmented + acoustic grand piano now in its second design iteration. In addition to discussing + applications to the piano, this paper aims to provide a toolbox to accelerate + the design of new hybrid acoustic-electronic instruments.' + address: 'Ann Arbor, Michigan' + author: Andrew McPherson + bibtex: "@inproceedings{McPherson2012a,\n abstract = {There is growing interest\ + \ in the field of augmented musical instruments, which extend traditional acoustic\ + \ instruments using new sensors and actuators. Several designs use electromagnetic\ + \ actuation to induce vibrations in the acoustic mechanism, manipulating the traditional\ + \ sound of the in-strument without external speakers. This paper presents techniques\ + \ and guidelines for the use of electromagnetic actuation in augmented instruments,\ + \ including actuator design and selection, interfacing with the instrument, and\ + \ cir-cuits for driving the actuators. The material in this pa-per forms the basis\ + \ of the magnetic resonator piano, an electromagnetically-augmented acoustic grand\ + \ piano now in its second design iteration. In addition to discussing applications\ + \ to the piano, this paper aims to provide a toolbox to accelerate the design\ + \ of new hybrid acoustic-electronic instruments.},\n address = {Ann Arbor, Michigan},\n\ + \ author = {Andrew McPherson},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1180533},\n\ + \ issn = {2220-4806},\n keywords = {augmented instruments, electromagnetic actuation,\ + \ circuit design, hardware},\n publisher = {University of Michigan},\n title =\ + \ {Techniques and Circuits for Electromagnetic Instrument Actuation},\n url =\ + \ {http://www.nime.org/proceedings/2012/nime2012_117.pdf},\n year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177763 + doi: 10.5281/zenodo.1180533 issn: 2220-4806 - keywords: 'Awareness, Privacy, Collaboration, Music, Interaction, En- gagement, - Group Music Making, Design, Evaluation.' - pages: 198--203 - title: 'Hey Man, You''re Invading my Personal Space ! Privacy and Awareness in Collaborative - Music' - url: http://www.nime.org/proceedings/2010/nime2010_198.pdf - year: 2010 + keywords: 'augmented instruments, electromagnetic actuation, circuit design, hardware' + publisher: University of Michigan + title: Techniques and Circuits for Electromagnetic Instrument Actuation + url: http://www.nime.org/proceedings/2012/nime2012_117.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Martin2010a - abstract: 'In 2009 the cross artform group, Last Man to Die, presenteda series of - performances using new interfaces and networkedperformance to integrate the three - artforms of its members(actor, Hanna Cormick, visual artist, Benjamin Forster - andpercussionist, Charles Martin). This paper explains ourartistic motivations - and design for a computer vision surfaceand networked heartbeat sensor as well - as the experience ofmounting our first major work, Vital LMTD.' - address: 'Sydney, Australia' - author: 'Martin, Charles and Forster, Benjamin and Cormick, Hanna' - bibtex: "@inproceedings{Martin2010a,\n abstract = {In 2009 the cross artform group,\ - \ Last Man to Die, presenteda series of performances using new interfaces and\ - \ networkedperformance to integrate the three artforms of its members(actor, Hanna\ - \ Cormick, visual artist, Benjamin Forster andpercussionist, Charles Martin).\ - \ This paper explains ourartistic motivations and design for a computer vision\ - \ surfaceand networked heartbeat sensor as well as the experience ofmounting our\ - \ first major work, Vital LMTD.},\n address = {Sydney, Australia},\n author =\ - \ {Martin, Charles and Forster, Benjamin and Cormick, Hanna},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177843},\n issn = {2220-4806},\n keywords = {cross-artform\ - \ performance, networked performance, physi- cal computing},\n pages = {204--207},\n\ - \ title = {Cross-Artform Performance Using Networked Interfaces : Last Man to\ - \ Die's Vital LMTD},\n url = {http://www.nime.org/proceedings/2010/nime2010_204.pdf},\n\ - \ year = {2010}\n}\n" + ID: Subramanian2012 + abstract: 'This paper describes a recent addition to LOLC, a text-based environment + for collaborative improvisation for laptop ensembles, incorporating a machine + musician that plays along with human performers. The machine musician LOLbot analyses + the patterns created by human performers and the composite music they create as + they are layered in performance. Based on user specified settings, LOLbot chooses + appropriate patterns to play with the ensemble, either to add contrast to the + existing performance or to be coherent with the rhythmic structure of the performance. + The paper describes the background and motivations of the project, outlines the + design of the original LOLC environment and describes the architecture and implementation + of LOLbot.' + address: 'Ann Arbor, Michigan' + author: Sidharth Subramanian and Jason Freeman and Scott McCoid + bibtex: "@inproceedings{Subramanian2012,\n abstract = {This paper describes a recent\ + \ addition to LOLC, a text-based environment for collaborative improvisation for\ + \ laptop ensembles, incorporating a machine musician that plays along with human\ + \ performers. The machine musician LOLbot analyses the patterns created by human\ + \ performers and the composite music they create as they are layered in performance.\ + \ Based on user specified settings, LOLbot chooses appropriate patterns to play\ + \ with the ensemble, either to add contrast to the existing performance or to\ + \ be coherent with the rhythmic structure of the performance. The paper describes\ + \ the background and motivations of the project, outlines the design of the original\ + \ LOLC environment and describes the architecture and implementation of LOLbot.},\n\ + \ address = {Ann Arbor, Michigan},\n author = {Sidharth Subramanian and Jason\ + \ Freeman and Scott McCoid},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178425},\n\ + \ issn = {2220-4806},\n keywords = {Machine Musicianship, Live Coding, Laptop\ + \ Orchestra},\n publisher = {University of Michigan},\n title = {LOLbot: Machine\ + \ Musicianship in Laptop Ensembles},\n url = {http://www.nime.org/proceedings/2012/nime2012_119.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177843 + doi: 10.5281/zenodo.1178425 issn: 2220-4806 - keywords: 'cross-artform performance, networked performance, physi- cal computing' - pages: 204--207 - title: 'Cross-Artform Performance Using Networked Interfaces : Last Man to Die''s - Vital LMTD' - url: http://www.nime.org/proceedings/2010/nime2010_204.pdf - year: 2010 + keywords: 'Machine Musicianship, Live Coding, Laptop Orchestra' + publisher: University of Michigan + title: 'LOLbot: Machine Musicianship in Laptop Ensembles' + url: http://www.nime.org/proceedings/2012/nime2012_119.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Jensenius2010 - abstract: 'We report on a study of perceptual and acoustic featuresrelated to the - placement of microphones around a custommade glass instrument. Different microphone - setups weretested: above, inside and outside the instrument and at different distances. - The sounds were evaluated by an expertperformer, and further qualitative and quantitative - analyses have been carried out. Preference was given to therecordings from microphones - placed close to the rim of theinstrument, either from the inside or the outside.' - address: 'Sydney, Australia' - author: 'Jensenius, Alexander R. and Innervik, Kjell Tore and Frounberg, Ivar' - bibtex: "@inproceedings{Jensenius2010,\n abstract = {We report on a study of perceptual\ - \ and acoustic featuresrelated to the placement of microphones around a custommade\ - \ glass instrument. Different microphone setups weretested: above, inside and\ - \ outside the instrument and at different distances. The sounds were evaluated\ - \ by an expertperformer, and further qualitative and quantitative analyses have\ - \ been carried out. Preference was given to therecordings from microphones placed\ - \ close to the rim of theinstrument, either from the inside or the outside.},\n\ - \ address = {Sydney, Australia},\n author = {Jensenius, Alexander R. and Innervik,\ - \ Kjell Tore and Frounberg, Ivar},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177817},\n\ - \ issn = {2220-4806},\n keywords = {glass instruments, microphone placement, sound\ - \ analysis},\n pages = {208--211},\n title = {Evaluating the Subjective Effects\ - \ of Microphone Placement on Glass Instruments},\n url = {http://www.nime.org/proceedings/2010/nime2010_208.pdf},\n\ - \ year = {2010}\n}\n" + ID: Schwarz2012 + abstract: 'Corpus-based concatenative synthesis is a fairly recent sound synthesis + method, based on descriptor analysis of any number of existing or live-recorded + sounds, and synthesis by selection of sound segments from the database matching + given sound characteristics. It is well described in the literature, but has been + rarely examined for its capacity as a new interface for musical expression. The + interesting outcome of such an examination is that the actual instrument is the + space of sound characteristics, through which the performer navigates with gestures + captured by various input devices. We will take a look at different types of interaction + modes and controllers (positional, inertial, audio analysis) and the gestures + they afford, and provide a critical assessment of their musical and expressive + capabilities, based on several years of musical experience, performing with the + CataRT system for real-time CBCS.' + address: 'Ann Arbor, Michigan' + author: Diemo Schwarz + bibtex: "@inproceedings{Schwarz2012,\n abstract = {Corpus-based concatenative synthesis\ + \ is a fairly recent sound synthesis method, based on descriptor analysis of any\ + \ number of existing or live-recorded sounds, and synthesis by selection of sound\ + \ segments from the database matching given sound characteristics. It is well\ + \ described in the literature, but has been rarely examined for its capacity as\ + \ a new interface for musical expression. The interesting outcome of such an examination\ + \ is that the actual instrument is the space of sound characteristics, through\ + \ which the performer navigates with gestures captured by various input devices.\ + \ We will take a look at different types of interaction modes and controllers\ + \ (positional, inertial, audio analysis) and the gestures they afford, and provide\ + \ a critical assessment of their musical and expressive capabilities, based on\ + \ several years of musical experience, performing with the CataRT system for real-time\ + \ CBCS.},\n address = {Ann Arbor, Michigan},\n author = {Diemo Schwarz},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1180593},\n issn = {2220-4806},\n keywords\ + \ = {CataRT, corpus-based concatenative synthesis, gesture},\n publisher = {University\ + \ of Michigan},\n title = {The Sound Space as Musical Instrument: Playing Corpus-Based\ + \ Concatenative Synthesis},\n url = {http://www.nime.org/proceedings/2012/nime2012_120.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177817 + doi: 10.5281/zenodo.1180593 issn: 2220-4806 - keywords: 'glass instruments, microphone placement, sound analysis' - pages: 208--211 - title: Evaluating the Subjective Effects of Microphone Placement on Glass Instruments - url: http://www.nime.org/proceedings/2010/nime2010_208.pdf - year: 2010 + keywords: 'CataRT, corpus-based concatenative synthesis, gesture' + publisher: University of Michigan + title: 'The Sound Space as Musical Instrument: Playing Corpus-Based Concatenative + Synthesis' + url: http://www.nime.org/proceedings/2012/nime2012_120.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Quintas2010 - abstract: 'Glitch DeLighter is a HyperInstrument conceived for Glitch music, based - on the idea of using fire expressiveness to digitally distort sound, pushing the - body and primitive ritualism into a computer mediated sound performance. Glitch - DeLighter uses ordinary lighters as physical controllers that can be played by - creating a flame and moving it in the air. Droned sounds are played by sustaining - the flame and beats by generating sparks and fast flames. The pitch of every sound - can be changed moving the flame vertically in the air. This is achieved by using - a custom computer vision system as an interface which maps the real-time the data - extracted from the flame and transmits those parameters to the sound generator. - As a result, the flame visual dynamics are deeply connected to the aural perception - of the sound - ‘the sound seems to be burning’. This process establishes a metaphor - dramaturgically engaging for an audience. This paper contextualizes the glitch - music aesthetics, prior research, the design and development of the instrument - and reports on Burning The Sound– the first music composition created and performed - with the instrument (by the author).' - address: 'Sydney, Australia' - author: 'Quintas, Rudolfo' - bibtex: "@inproceedings{Quintas2010,\n abstract = {Glitch DeLighter is a HyperInstrument\ - \ conceived for Glitch music, based on the idea of using fire expressiveness to\ - \ digitally distort sound, pushing the body and primitive ritualism into a computer\ - \ mediated sound performance. Glitch DeLighter uses ordinary lighters as physical\ - \ controllers that can be played by creating a flame and moving it in the air.\ - \ Droned sounds are played by sustaining the flame and beats by generating sparks\ - \ and fast flames. The pitch of every sound can be changed moving the flame vertically\ - \ in the air. This is achieved by using a custom computer vision system as an\ - \ interface which maps the real-time the data extracted from the flame and transmits\ - \ those parameters to the sound generator. As a result, the flame visual dynamics\ - \ are deeply connected to the aural perception of the sound - ‘the sound seems\ - \ to be burning’. This process establishes a metaphor dramaturgically engaging\ - \ for an audience. This paper contextualizes the glitch music aesthetics, prior\ - \ research, the design and development of the instrument and reports on Burning\ - \ The Sound– the first music composition created and performed with the instrument\ - \ (by the author).},\n address = {Sydney, Australia},\n author = {Quintas, Rudolfo},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177879},\n issn = {2220-4806},\n\ - \ keywords = {Hyper-Instruments, Glitch Music, Interactive Systems, Electronic\ - \ Music Performance.},\n pages = {212--216},\n title = {Glitch Delighter : Lighter's\ - \ Flame Base Hyper-Instrument for Glitch Music in Burning The Sound Performance},\n\ - \ url = {http://www.nime.org/proceedings/2010/nime2010_212.pdf},\n year = {2010}\n\ - }\n" + ID: Hansen2012 + abstract: 'This paper presents the results of user interaction with two explorative + music environments (sound system A and B) that were inspired from the Banda Linda + music tradition in two different ways. The sound systems adapted to how a team + of two players improvised and made a melody together in an interleaved fashion: + Systems A and B used a fuzzy logic algorithm and pattern recognition to respond + with modifications of a background rhythms. In an experiment with a pen tablet + interface as the music instrument, users aged 10-13 were to tap tones and continue + each other''s melody. The sound systems rewarded users sonically, if they managed + to add tones to their mutual melody in a rapid turn taking manner with rhythmical + patterns. Videos of experiment sessions show that user teams contributed to a + melody in ways that resemble conversation. Interaction data show that each sound + system made player teams play in different ways, but players in general had a + hard time adjusting to a non-Western music tradition. The paper concludes with + a comparison and evaluation of the two sound systems. Finally it proposes a new + approach to the design of collaborative and shared music environments that is + based on ''''listening applications''''.' + address: 'Ann Arbor, Michigan' + author: Anne-Marie Skriver Hansen and Hans Jørgen Andersen and Pirkko Raudaskoski + bibtex: "@inproceedings{Hansen2012,\n abstract = {This paper presents the results\ + \ of user interaction with two explorative music environments (sound system A\ + \ and B) that were inspired from the Banda Linda music tradition in two different\ + \ ways. The sound systems adapted to how a team of two players improvised and\ + \ made a melody together in an interleaved fashion: Systems A and B used a fuzzy\ + \ logic algorithm and pattern recognition to respond with modifications of a background\ + \ rhythms. In an experiment with a pen tablet interface as the music instrument,\ + \ users aged 10-13 were to tap tones and continue each other's melody. The sound\ + \ systems rewarded users sonically, if they managed to add tones to their mutual\ + \ melody in a rapid turn taking manner with rhythmical patterns. Videos of experiment\ + \ sessions show that user teams contributed to a melody in ways that resemble\ + \ conversation. Interaction data show that each sound system made player teams\ + \ play in different ways, but players in general had a hard time adjusting to\ + \ a non-Western music tradition. The paper concludes with a comparison and evaluation\ + \ of the two sound systems. Finally it proposes a new approach to the design of\ + \ collaborative and shared music environments that is based on ''listening applications''.},\n\ + \ address = {Ann Arbor, Michigan},\n author = {Anne-Marie Skriver Hansen and Hans\ + \ J{\\o}rgen Andersen and Pirkko Raudaskoski},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1178275},\n issn = {2220-4806},\n keywords = {Music improvisation,\ + \ novices, social learning, interaction studies, interaction design.},\n publisher\ + \ = {University of Michigan},\n title = {Two Shared Rapid Turn Taking Sound Interfaces\ + \ for Novices},\n url = {http://www.nime.org/proceedings/2012/nime2012_123.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177879 + doi: 10.5281/zenodo.1178275 issn: 2220-4806 - keywords: 'Hyper-Instruments, Glitch Music, Interactive Systems, Electronic Music - Performance.' - pages: 212--216 - title: 'Glitch Delighter : Lighter''s Flame Base Hyper-Instrument for Glitch Music - in Burning The Sound Performance' - url: http://www.nime.org/proceedings/2010/nime2010_212.pdf - year: 2010 + keywords: 'Music improvisation, novices, social learning, interaction studies, interaction + design.' + publisher: University of Michigan + title: Two Shared Rapid Turn Taking Sound Interfaces for Novices + url: http://www.nime.org/proceedings/2012/nime2012_123.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: McPherson2010 - abstract: 'This paper presents the magnetic resonator piano, an augmented instrument - enhancing the capabilities of the acoustic grand piano. Electromagnetic actuators - induce the stringsto vibration, allowing each note to be continuously controlled - in amplitude, frequency, and timbre without external loudspeakers. Feedback from - a single pickup on thepiano soundboard allows the actuator waveforms to remainlocked - in phase with the natural motion of each string. Wealso present an augmented piano - keyboard which reportsthe continuous position of every key. Time and spatial resolution - are sufficient to capture detailed data about keypress, release, pretouch, aftertouch, - and other extended gestures. The system, which is designed with cost and setupconstraints - in mind, seeks to give pianists continuous control over the musical sound of their - instrument. The instrument has been used in concert performances, with theelectronically-actuated - sounds blending with acoustic instruments naturally and without amplification.' - address: 'Sydney, Australia' - author: 'McPherson, Andrew and Kim, Youngmoo' - bibtex: "@inproceedings{McPherson2010,\n abstract = {This paper presents the magnetic\ - \ resonator piano, an augmented instrument enhancing the capabilities of the acoustic\ - \ grand piano. Electromagnetic actuators induce the stringsto vibration, allowing\ - \ each note to be continuously controlled in amplitude, frequency, and timbre\ - \ without external loudspeakers. Feedback from a single pickup on thepiano soundboard\ - \ allows the actuator waveforms to remainlocked in phase with the natural motion\ - \ of each string. Wealso present an augmented piano keyboard which reportsthe\ - \ continuous position of every key. Time and spatial resolution are sufficient\ - \ to capture detailed data about keypress, release, pretouch, aftertouch, and\ - \ other extended gestures. The system, which is designed with cost and setupconstraints\ - \ in mind, seeks to give pianists continuous control over the musical sound of\ - \ their instrument. The instrument has been used in concert performances, with\ - \ theelectronically-actuated sounds blending with acoustic instruments naturally\ - \ and without amplification.},\n address = {Sydney, Australia},\n author = {McPherson,\ - \ Andrew and Kim, Youngmoo},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177849},\n\ - \ issn = {2220-4806},\n keywords = {Augmented instruments, piano, interfaces,\ - \ electromagnetic actuation, gesture measurement},\n pages = {217--222},\n title\ - \ = {Augmenting the Acoustic Piano with Electromagnetic String Actuation and Continuous\ - \ Key Position Sensing},\n url = {http://www.nime.org/proceedings/2010/nime2010_217.pdf},\n\ - \ year = {2010}\n}\n" + ID: Shahar2012 + abstract: "SoundStrand is a tangible music composition tool. It demonstrates a paradigm\ + \ developed to enable music composition through the use of tangible interfaces.\ + \ This paradigm attempts to overcome the contrast between the relatively small\ + \ of amount degrees of freedom usually demonstrated by tangible interfaces and\ + \ the vast number of possibilities that musical composition presents.\nSoundStrand\ + \ is comprised of a set of physical objects called cells, each representing a\ + \ musical phrase. Cells can be sequentially connected to each other to create\ + \ a musical theme. Cells can also be physically manipulated to access a wide range\ + \ of melodic, rhythmic and harmonic variations. The SoundStrand software assures\ + \ that as the cells are manipulated, the melodic flow, harmonic transitions and\ + \ rhythmic patterns of the theme remain musically plausible while preserving the\ + \ user's intentions." + address: 'Ann Arbor, Michigan' + author: Eyal Shahar + bibtex: "@inproceedings{Shahar2012,\n abstract = {SoundStrand is a tangible music\ + \ composition tool. It demonstrates a paradigm developed to enable music composition\ + \ through the use of tangible interfaces. This paradigm attempts to overcome the\ + \ contrast between the relatively small of amount degrees of freedom usually demonstrated\ + \ by tangible interfaces and the vast number of possibilities that musical composition\ + \ presents.\nSoundStrand is comprised of a set of physical objects called cells,\ + \ each representing a musical phrase. Cells can be sequentially connected to each\ + \ other to create a musical theme. Cells can also be physically manipulated to\ + \ access a wide range of melodic, rhythmic and harmonic variations. The SoundStrand\ + \ software assures that as the cells are manipulated, the melodic flow, harmonic\ + \ transitions and rhythmic patterns of the theme remain musically plausible while\ + \ preserving the user's intentions.},\n address = {Ann Arbor, Michigan},\n author\ + \ = {Eyal Shahar},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1180595},\n\ + \ issn = {2220-4806},\n keywords = {Tangible, algorithmic, composition, computer\ + \ assisted},\n publisher = {University of Michigan},\n title = {SoundStrand: a\ + \ Tangible Interface for Composing Music with Limited Degrees of Freedom},\n url\ + \ = {http://www.nime.org/proceedings/2012/nime2012_125.pdf},\n year = {2012}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177849 + doi: 10.5281/zenodo.1180595 issn: 2220-4806 - keywords: 'Augmented instruments, piano, interfaces, electromagnetic actuation, - gesture measurement' - pages: 217--222 - title: Augmenting the Acoustic Piano with Electromagnetic String Actuation and Continuous - Key Position Sensing - url: http://www.nime.org/proceedings/2010/nime2010_217.pdf - year: 2010 + keywords: 'Tangible, algorithmic, composition, computer assisted' + publisher: University of Michigan + title: 'SoundStrand: a Tangible Interface for Composing Music with Limited Degrees + of Freedom' + url: http://www.nime.org/proceedings/2012/nime2012_125.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Grossmann2010 - abstract: 'In this paper I describe aspects that have been involved in my experience - of developing a hybrid instrument. The process of transformation and extension - of the instrument is informed by ideas concerning the intrinsic communication - aspects of musical activities. Decisions taken for designing the instrument and - performing with it take into account the hypothesis that there are ontological - levels of human reception in music that are related to the intercorporeal. Arguing - that it is necessary to encounter resistances for achieving expression, it is - suggested that new instrumental development ought to reflect on the concern for - keeping the natural connections of live performances. ' - address: 'Sydney, Australia' - author: 'Grossmann, Cesar M.' - bibtex: "@inproceedings{Grossmann2010,\n abstract = {In this paper I describe aspects\ - \ that have been involved in my experience of developing a hybrid instrument.\ - \ The process of transformation and extension of the instrument is informed by\ - \ ideas concerning the intrinsic communication aspects of musical activities.\ - \ Decisions taken for designing the instrument and performing with it take into\ - \ account the hypothesis that there are ontological levels of human reception\ - \ in music that are related to the intercorporeal. Arguing that it is necessary\ - \ to encounter resistances for achieving expression, it is suggested that new\ - \ instrumental development ought to reflect on the concern for keeping the natural\ - \ connections of live performances. },\n address = {Sydney, Australia},\n author\ - \ = {Grossmann, Cesar M.},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177781},\n\ - \ issn = {2220-4806},\n keywords = {live processing,new instruments,nime10,recorder},\n\ - \ pages = {223--228},\n title = {Developing a Hybrid Contrabass Recorder Resistances,\ - \ Expression, Gestures and Rhetoric},\n url = {http://www.nime.org/proceedings/2010/nime2010_223.pdf},\n\ - \ year = {2010}\n}\n" + ID: Weitzner2012 + abstract: 'massMobile is a client-server system for mass audience participation + in live performances using smartphones. It was designed to flexibly adapt to a + variety of participatory performance needs and to a variety of performance venues. + It allows for real time bi-directional communication between performers and audiences + utilizing existing wireless 3G, 4G, or WiFi networks. In this paper, we discuss + the goals, design, and implementation of the framework, and we describe several + projects realized with massMobile.' + address: 'Ann Arbor, Michigan' + author: Nathan Weitzner and Jason Freeman and Stephen Garrett and Yan-Ling Chen + bibtex: "@inproceedings{Weitzner2012,\n abstract = {massMobile is a client-server\ + \ system for mass audience participation in live performances using smartphones.\ + \ It was designed to flexibly adapt to a variety of participatory performance\ + \ needs and to a variety of performance venues. It allows for real time bi-directional\ + \ communication between performers and audiences utilizing existing wireless 3G,\ + \ 4G, or WiFi networks. In this paper, we discuss the goals, design, and implementation\ + \ of the framework, and we describe several projects realized with massMobile.},\n\ + \ address = {Ann Arbor, Michigan},\n author = {Nathan Weitzner and Jason Freeman\ + \ and Stephen Garrett and Yan-Ling Chen},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178449},\n\ + \ issn = {2220-4806},\n keywords = {audience participation, network music, smartphone,\ + \ performance, mobile},\n publisher = {University of Michigan},\n title = {massMobile\ + \ -an Audience Participation Framework},\n url = {http://www.nime.org/proceedings/2012/nime2012_128.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177781 + doi: 10.5281/zenodo.1178449 issn: 2220-4806 - keywords: 'live processing,new instruments,nime10,recorder' - pages: 223--228 - title: 'Developing a Hybrid Contrabass Recorder Resistances, Expression, Gestures - and Rhetoric' - url: http://www.nime.org/proceedings/2010/nime2010_223.pdf - year: 2010 + keywords: 'audience participation, network music, smartphone, performance, mobile' + publisher: University of Michigan + title: massMobile -an Audience Participation Framework + url: http://www.nime.org/proceedings/2012/nime2012_128.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Carrillo2010 - abstract: 'This paper presents a virtual violin for real-time performances consisting - of two modules: a violin spectral model and a control interface. The interface - is composed by a sensing bow and a tube with drawn strings in substitution of - a real violin. The spectral model is driven by the bowing controls captured with - the control interface and it is able to predict spectral envelopes of the sound - corresponding to those controls. The envelopes are filled with harmonic andnoisy - content and given to an additive synthesizer in order to produce violin sounds. - The sensing system is based on two motion trackers with 6 degrees of freedom. - One tracker is attached to the bow and the other to the tube. Bowing controls - are computed after a calibration process where the position of virtual strings - and the hair-ribbon of the bowis obtained. A real time implementation was developed - asa MAX/MSP patch with external objects for each of the modules.' - address: 'Sydney, Australia' - author: 'Carrillo, Alfonso P. and Bonada, Jordi' - bibtex: "@inproceedings{Carrillo2010,\n abstract = {This paper presents a virtual\ - \ violin for real-time performances consisting of two modules: a violin spectral\ - \ model and a control interface. The interface is composed by a sensing bow and\ - \ a tube with drawn strings in substitution of a real violin. The spectral model\ - \ is driven by the bowing controls captured with the control interface and it\ - \ is able to predict spectral envelopes of the sound corresponding to those controls.\ - \ The envelopes are filled with harmonic andnoisy content and given to an additive\ - \ synthesizer in order to produce violin sounds. The sensing system is based on\ - \ two motion trackers with 6 degrees of freedom. One tracker is attached to the\ - \ bow and the other to the tube. Bowing controls are computed after a calibration\ - \ process where the position of virtual strings and the hair-ribbon of the bowis\ - \ obtained. A real time implementation was developed asa MAX/MSP patch with external\ - \ objects for each of the modules.},\n address = {Sydney, Australia},\n author\ - \ = {Carrillo, Alfonso P. and Bonada, Jordi},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1177737},\n issn = {2220-4806},\n keywords = {violin, synthesis,\ - \ control, spectral, virtual},\n pages = {229--232},\n title = {The Bowed Tube\ - \ : a Virtual Violin},\n url = {http://www.nime.org/proceedings/2010/nime2010_229.pdf},\n\ - \ year = {2010}\n}\n" + ID: Henson2012 + abstract: 'This paper introduces the concept of Kugelschwung, a digital musical + instrument centrally based around the use of pendulums and lasers to create unique + and highly interactive electronic ambient soundscapes. Here, we explore the underlying + design and physical construction of the instrument, as well as its implementation + and feasibility as an instrument in the real world. To conclude, we outline potential + expansions to the instrument, describing how its range of applications can be + extended to accommodate a variety of musical styles.' + address: 'Ann Arbor, Michigan' + author: Jamie Henson and Benjamin Collins and Alexander Giles and Kathryn Webb and + Matthew Livingston and Thomas Mortensson + bibtex: "@inproceedings{Henson2012,\n abstract = {This paper introduces the concept\ + \ of Kugelschwung, a digital musical instrument centrally based around the use\ + \ of pendulums and lasers to create unique and highly interactive electronic ambient\ + \ soundscapes. Here, we explore the underlying design and physical construction\ + \ of the instrument, as well as its implementation and feasibility as an instrument\ + \ in the real world. To conclude, we outline potential expansions to the instrument,\ + \ describing how its range of applications can be extended to accommodate a variety\ + \ of musical styles.},\n address = {Ann Arbor, Michigan},\n author = {Jamie Henson\ + \ and Benjamin Collins and Alexander Giles and Kathryn Webb and Matthew Livingston\ + \ and Thomas Mortensson},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178285},\n\ + \ issn = {2220-4806},\n keywords = {laser, pendulums, instrument design, electronic,\ + \ sampler, soundscape, expressive performance},\n publisher = {University of Michigan},\n\ + \ title = {Kugelschwung -a Pendulum-based Musical Instrument},\n url = {http://www.nime.org/proceedings/2012/nime2012_131.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177737 + doi: 10.5281/zenodo.1178285 issn: 2220-4806 - keywords: 'violin, synthesis, control, spectral, virtual' - pages: 229--232 - title: 'The Bowed Tube : a Virtual Violin' - url: http://www.nime.org/proceedings/2010/nime2010_229.pdf - year: 2010 + keywords: 'laser, pendulums, instrument design, electronic, sampler, soundscape, + expressive performance' + publisher: University of Michigan + title: Kugelschwung -a Pendulum-based Musical Instrument + url: http://www.nime.org/proceedings/2012/nime2012_131.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Hochenbaum2010 - abstract: 'This research is an initial effort in showing how a multimodal approach - can improve systems for gaining insight into a musician''s practice and technique. - Embedding a variety of sensors inside musical instruments and synchronously recording - the sensors'' data along with audio, we gather a database of gestural information - from multiple performers, then use machine-learning techniques to recognize which - musician is performing. Our multimodal approach (using both audio and sensor data) - yields promising performer classification results, which we see as a first step - in a larger effort to gain insight into musicians'' practice and technique. ' - address: 'Sydney, Australia' - author: 'Hochenbaum, Jordan and Kapur, Ajay and Wright, Matthew' - bibtex: "@inproceedings{Hochenbaum2010,\n abstract = {This research is an initial\ - \ effort in showing how a multimodal approach can improve systems for gaining\ - \ insight into a musician's practice and technique. Embedding a variety of sensors\ - \ inside musical instruments and synchronously recording the sensors' data along\ - \ with audio, we gather a database of gestural information from multiple performers,\ - \ then use machine-learning techniques to recognize which musician is performing.\ - \ Our multimodal approach (using both audio and sensor data) yields promising\ - \ performer classification results, which we see as a first step in a larger effort\ - \ to gain insight into musicians' practice and technique. },\n address = {Sydney,\ - \ Australia},\n author = {Hochenbaum, Jordan and Kapur, Ajay and Wright, Matthew},\n\ + ID: McGlynn2012 + abstract: 'This paper contends that the development of expressive performance interfaces + using multi-touch technology has been hindered by an over-reliance upon GUI paradigms. + Despite offering rich and robust data output and multiple ways to interpret it, + approaches towards using multi-touch technology in digit al musical inst rument + design have been markedly conservative, showing a strong tendency towards modeling + existing hardware. This not only negates many of the benefits of multi-touch technology + but also creates specific difficulties in the context of live music performance. + A case study of two other interface types that have seen considerable musical + use --the XY pad and button grid --illustrates the manner in which the implicit + characteristics of a device determine the conditions under which it will favorably + perform. Accordingly, this paper proposes an alternative approach to multi-touch + which emp hasizes the imp licit strengths of the technology and establishes a + philosophy of design around them. Finally, we introduce two toolkits currently + being used to assess the validity of this approach.' + address: 'Ann Arbor, Michigan' + author: Patrick McGlynn and Victor Lazzarini and Gordon Delap and Xiaoyu Chen + bibtex: "@inproceedings{McGlynn2012,\n abstract = {This paper contends that the\ + \ development of expressive performance interfaces using multi-touch technology\ + \ has been hindered by an over-reliance upon GUI paradigms. Despite offering rich\ + \ and robust data output and multiple ways to interpret it, approaches towards\ + \ using multi-touch technology in digit al musical inst rument design have been\ + \ markedly conservative, showing a strong tendency towards modeling existing hardware.\ + \ This not only negates many of the benefits of multi-touch technology but also\ + \ creates specific difficulties in the context of live music performance. A case\ + \ study of two other interface types that have seen considerable musical use --the\ + \ XY pad and button grid --illustrates the manner in which the implicit characteristics\ + \ of a device determine the conditions under which it will favorably perform.\ + \ Accordingly, this paper proposes an alternative approach to multi-touch which\ + \ emp hasizes the imp licit strengths of the technology and establishes a philosophy\ + \ of design around them. Finally, we introduce two toolkits currently being used\ + \ to assess the validity of this approach.},\n address = {Ann Arbor, Michigan},\n\ + \ author = {Patrick McGlynn and Victor Lazzarini and Gordon Delap and Xiaoyu Chen},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177805},\n issn = {2220-4806},\n\ - \ keywords = {Performer Recognition, Multimodal, HCI, Machine Learning, Hyperinstrument,\ - \ eSitar},\n pages = {233--237},\n title = {Multimodal Musician Recognition},\n\ - \ url = {http://www.nime.org/proceedings/2010/nime2010_233.pdf},\n year = {2010}\n\ - }\n" + \ Musical Expression},\n doi = {10.5281/zenodo.1178349},\n issn = {2220-4806},\n\ + \ keywords = {Multi-touch, controllers, mapping, gesture, GUIs, physical interfaces,\ + \ perceptual & cognitive issues},\n publisher = {University of Michigan},\n title\ + \ = {Recontextualizing the Multi-touch Surface},\n url = {http://www.nime.org/proceedings/2012/nime2012_132.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177805 + doi: 10.5281/zenodo.1178349 issn: 2220-4806 - keywords: 'Performer Recognition, Multimodal, HCI, Machine Learning, Hyperinstrument, - eSitar' - pages: 233--237 - title: Multimodal Musician Recognition - url: http://www.nime.org/proceedings/2010/nime2010_233.pdf - year: 2010 + keywords: 'Multi-touch, controllers, mapping, gesture, GUIs, physical interfaces, + perceptual & cognitive issues' + publisher: University of Michigan + title: Recontextualizing the Multi-touch Surface + url: http://www.nime.org/proceedings/2012/nime2012_132.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Guaus2010 - abstract: 'In this paper, we present our research on the acquisitionof gesture information - for the study of the expressivenessin guitar performances. For that purpose, we - design a sensor system which is able to gather the movements from lefthand fingers. - Our effort is focused on a design that is (1)non-intrusive to the performer and - (2) able to detect fromstrong movements of the left hand to subtle movements ofthe - fingers. The proposed system is based on capacitive sensors mounted on the fingerboard - of the guitar. We presentthe setup of the sensor system and analyze its response - toseveral finger movements.' - address: 'Sydney, Australia' - author: 'Guaus, Enric and Ozaslan, Tan and Palacios, Eric and Arcos, Josep L.' - bibtex: "@inproceedings{Guaus2010,\n abstract = {In this paper, we present our research\ - \ on the acquisitionof gesture information for the study of the expressivenessin\ - \ guitar performances. For that purpose, we design a sensor system which is able\ - \ to gather the movements from lefthand fingers. Our effort is focused on a design\ - \ that is (1)non-intrusive to the performer and (2) able to detect fromstrong\ - \ movements of the left hand to subtle movements ofthe fingers. The proposed system\ - \ is based on capacitive sensors mounted on the fingerboard of the guitar. We\ - \ presentthe setup of the sensor system and analyze its response toseveral finger\ - \ movements.},\n address = {Sydney, Australia},\n author = {Guaus, Enric and Ozaslan,\ - \ Tan and Palacios, Eric and Arcos, Josep L.},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1177783},\n issn = {2220-4806},\n keywords = {Guitar; Gesture\ - \ acquisition; Capacitive sensors},\n pages = {238--243},\n title = {A Left Hand\ - \ Gesture Caption System for Guitar Based on Capacitive Sensors},\n url = {http://www.nime.org/proceedings/2010/nime2010_238.pdf},\n\ - \ year = {2010}\n}\n" + ID: Donnarumma2012 + abstract: "Performing music with a computer and loudspeakers represents always a\ + \ challenge. The lack of a traditional instrument requires the performer to study\ + \ idiomatic strategies by which musicianship becomes apparent. On the other hand,\ + \ the audience needs to decode those strategies, so to achieve an understanding\ + \ and appreciation of the music being played. The issue is particularly relevant\ + \ to the performance of music that results from the mediation between biological\ + \ signals of the human body and physical performance.\nThe present article tackles\ + \ this concern by demonstrating a new model of musical performance; what I define\ + \ biophysical music. This is music generated and played in real time by amplifying\ + \ and processing the acoustic sound of a performer's muscle contractions. The\ + \ model relies on an original and open source technology made of custom biosensors\ + \ and a related software framework. The succesfull application of these tools\ + \ is discussed in the practical context of a solo piece for sensors, laptop and\ + \ loudspeakers. Eventually, the compositional strategies that characterize the\ + \ piece are discussed along with a systematic description of the relevant mapping\ + \ techniques and their sonic outcome." + address: 'Ann Arbor, Michigan' + author: Marco Donnarumma + bibtex: "@inproceedings{Donnarumma2012,\n abstract = {Performing music with a computer\ + \ and loudspeakers represents always a challenge. The lack of a traditional instrument\ + \ requires the performer to study idiomatic strategies by which musicianship becomes\ + \ apparent. On the other hand, the audience needs to decode those strategies,\ + \ so to achieve an understanding and appreciation of the music being played. The\ + \ issue is particularly relevant to the performance of music that results from\ + \ the mediation between biological signals of the human body and physical performance.\n\ + The present article tackles this concern by demonstrating a new model of musical\ + \ performance; what I define biophysical music. This is music generated and played\ + \ in real time by amplifying and processing the acoustic sound of a performer's\ + \ muscle contractions. The model relies on an original and open source technology\ + \ made of custom biosensors and a related software framework. The succesfull application\ + \ of these tools is discussed in the practical context of a solo piece for sensors,\ + \ laptop and loudspeakers. Eventually, the compositional strategies that characterize\ + \ the piece are discussed along with a systematic description of the relevant\ + \ mapping techniques and their sonic outcome.},\n address = {Ann Arbor, Michigan},\n\ + \ author = {Marco Donnarumma},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178245},\n\ + \ issn = {2220-4806},\n keywords = {Muscle sounds, biophysical music, augmented\ + \ body, realtime performance, human-computer interaction, embodiment.},\n publisher\ + \ = {University of Michigan},\n title = {Music for Flesh II: informing interactive\ + \ music performance with the viscerality of the body system},\n url = {http://www.nime.org/proceedings/2012/nime2012_133.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177783 + doi: 10.5281/zenodo.1178245 issn: 2220-4806 - keywords: Guitar; Gesture acquisition; Capacitive sensors - pages: 238--243 - title: A Left Hand Gesture Caption System for Guitar Based on Capacitive Sensors - url: http://www.nime.org/proceedings/2010/nime2010_238.pdf - year: 2010 + keywords: 'Muscle sounds, biophysical music, augmented body, realtime performance, + human-computer interaction, embodiment.' + publisher: University of Michigan + title: 'Music for Flesh II: informing interactive music performance with the viscerality + of the body system' + url: http://www.nime.org/proceedings/2012/nime2012_133.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Schmeder2010 - abstract: 'The design of an unusually simple fabric-based touchlocation and pressure - sensor is introduced. An analysisof the raw sensor data is shown to have significant - nonlinearities and non-uniform noise. Using support vectormachine learning and - a state-dependent adaptive filter itis demonstrated that these problems can be - overcome.The method is evaluated quantitatively using a statisticalestimate of - the instantaneous rate of information transfer.The SVM regression alone is shown - to improve the gesturesignal information rate by up to 20% with zero addedlatency, - and in combination with filtering by 40% subjectto a constant latency bound of - 10 milliseconds.' - address: 'Sydney, Australia' - author: 'Schmeder, Andrew and Freed, Adrian' - bibtex: "@inproceedings{Schmeder2010,\n abstract = {The design of an unusually simple\ - \ fabric-based touchlocation and pressure sensor is introduced. An analysisof\ - \ the raw sensor data is shown to have significant nonlinearities and non-uniform\ - \ noise. Using support vectormachine learning and a state-dependent adaptive filter\ - \ itis demonstrated that these problems can be overcome.The method is evaluated\ - \ quantitatively using a statisticalestimate of the instantaneous rate of information\ - \ transfer.The SVM regression alone is shown to improve the gesturesignal information\ - \ rate by up to 20% with zero addedlatency, and in combination with filtering\ - \ by 40% subjectto a constant latency bound of 10 milliseconds.},\n address =\ - \ {Sydney, Australia},\n author = {Schmeder, Andrew and Freed, Adrian},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1177893},\n issn = {2220-4806},\n keywords\ - \ = {gesture signal processing, support vector machine, touch sensor},\n pages\ - \ = {244--249},\n title = {Support Vector Machine Learning for Gesture Signal\ - \ Estimation with a Piezo-Resistive Fabric Touch Surface},\n url = {http://www.nime.org/proceedings/2010/nime2010_244.pdf},\n\ - \ year = {2010}\n}\n" + ID: Booth2012 + abstract: 'In this paper, we argue that the design of New Interfaces for Musical + Expression has much to gain from the study of interaction in ensemble laptop performance + contexts using ethnographic techniques. Inspired by recent third-stream research + in the field of human computer interaction, we describe a recent ethnomethodologically-informed + study of the Birmingham Laptop Ensemble (BiLE), and detail our approach to thick + description of the group''s working practices. Initial formal analysis of this + material sheds light on the fluidity of composer, performer and designer roles + within the ensemble and shows how confluences of these roles constitute member''s + differing viewpoints. We go on to draw out a number of strands of interaction + that highlight the essentially complex, socially constructed and value driven + nature of the group''s practice and conclude by reviewing the implications of + these factors on the design of software tools for laptop ensembles.' + address: 'Ann Arbor, Michigan' + author: Graham Booth and Michael Gurevich + bibtex: "@inproceedings{Booth2012,\n abstract = {In this paper, we argue that the\ + \ design of New Interfaces for Musical Expression has much to gain from the study\ + \ of interaction in ensemble laptop performance contexts using ethnographic techniques.\ + \ Inspired by recent third-stream research in the field of human computer interaction,\ + \ we describe a recent ethnomethodologically-informed study of the Birmingham\ + \ Laptop Ensemble (BiLE), and detail our approach to thick description of the\ + \ group's working practices. Initial formal analysis of this material sheds light\ + \ on the fluidity of composer, performer and designer roles within the ensemble\ + \ and shows how confluences of these roles constitute member's differing viewpoints.\ + \ We go on to draw out a number of strands of interaction that highlight the essentially\ + \ complex, socially constructed and value driven nature of the group's practice\ + \ and conclude by reviewing the implications of these factors on the design of\ + \ software tools for laptop ensembles.},\n address = {Ann Arbor, Michigan},\n\ + \ author = {Graham Booth and Michael Gurevich},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1178215},\n issn = {2220-4806},\n keywords = {Laptop Performance,\ + \ Ethnography, Ethnomethodology, Human Computer Interaction.},\n publisher = {University\ + \ of Michigan},\n title = {Collaborative composition and socially constituted\ + \ instruments: Ensemble laptop performance through the lens of ethnography},\n\ + \ url = {http://www.nime.org/proceedings/2012/nime2012_136.pdf},\n year = {2012}\n\ + }\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1178215 + issn: 2220-4806 + keywords: 'Laptop Performance, Ethnography, Ethnomethodology, Human Computer Interaction.' + publisher: University of Michigan + title: 'Collaborative composition and socially constituted instruments: Ensemble + laptop performance through the lens of ethnography' + url: http://www.nime.org/proceedings/2012/nime2012_136.pdf + year: 2012 + + +- ENTRYTYPE: inproceedings + ID: Manousakis2012 + abstract: 'This paper presents the system and technology developed for the distributed, + micro-telematic, interactive sound art installation, The Network Is A Blind Space. + The piece uses sound to explore the physical yet invisible electromagnetic spaces + created by Wireless Local Area Networks (WLANs). To this end, the author created + a framework for indoor WiFi localization, providing a variety of control data + for various types of `musical echolocation''. This data, generated mostly by visitors + exploring the installation while holding WiFi-enabled devices, is used to convey + the hidden properties of wireless networks as dynamic spaces through an artistic + experience.' + address: 'Ann Arbor, Michigan' + author: Stelios Manousakis + bibtex: "@inproceedings{Manousakis2012,\n abstract = {This paper presents the system\ + \ and technology developed for the distributed, micro-telematic, interactive sound\ + \ art installation, The Network Is A Blind Space. The piece uses sound to explore\ + \ the physical yet invisible electromagnetic spaces created by Wireless Local\ + \ Area Networks (WLANs). To this end, the author created a framework for indoor\ + \ WiFi localization, providing a variety of control data for various types of\ + \ `musical echolocation'. This data, generated mostly by visitors exploring the\ + \ installation while holding WiFi-enabled devices, is used to convey the hidden\ + \ properties of wireless networks as dynamic spaces through an artistic experience.},\n\ + \ address = {Ann Arbor, Michigan},\n author = {Stelios Manousakis},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1178341},\n issn = {2220-4806},\n keywords\ + \ = {Network music, mobile music, distributed music, interactivity, sound art\ + \ installation, collaborative instrument, site-specific, electromagnetic signals,\ + \ WiFi, trilateration, traceroute, echolocation, SuperCollider, Pure Data, RjDj,\ + \ mapping},\n publisher = {University of Michigan},\n title = {Network spaces\ + \ as collaborative instruments: {WLAN} trilateration for musical echolocation\ + \ in sound art},\n url = {http://www.nime.org/proceedings/2012/nime2012_142.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177893 + doi: 10.5281/zenodo.1178341 issn: 2220-4806 - keywords: 'gesture signal processing, support vector machine, touch sensor' - pages: 244--249 - title: Support Vector Machine Learning for Gesture Signal Estimation with a Piezo-Resistive - Fabric Touch Surface - url: http://www.nime.org/proceedings/2010/nime2010_244.pdf - year: 2010 + keywords: 'Network music, mobile music, distributed music, interactivity, sound + art installation, collaborative instrument, site-specific, electromagnetic signals, + WiFi, trilateration, traceroute, echolocation, SuperCollider, Pure Data, RjDj, + mapping' + publisher: University of Michigan + title: 'Network spaces as collaborative instruments: WLAN trilateration for musical + echolocation in sound art' + url: http://www.nime.org/proceedings/2012/nime2012_142.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Schacher2010 - abstract: 'Mapping in interactive dance performance poses a number of questions - related to the perception and expression of gestures in contrast to pure motion-detection - and analysis. A specific interactive dance project is discussed, in which two - complementary sensing modes are integrated to obtain higherlevel expressive gestures. - These are applied to a modular nonlinear composition, in which the exploratory - dance performance assumes the role of instrumentalist and conductor. The development - strategies and methods for each of the involved artists are discussed and the - software tools and wearable devices that were developed for this project are presented. ' - address: 'Sydney, Australia' - author: 'Schacher, Jan C.' - bibtex: "@inproceedings{Schacher2010,\n abstract = {Mapping in interactive dance\ - \ performance poses a number of questions related to the perception and expression\ - \ of gestures in contrast to pure motion-detection and analysis. A specific interactive\ - \ dance project is discussed, in which two complementary sensing modes are integrated\ - \ to obtain higherlevel expressive gestures. These are applied to a modular nonlinear\ - \ composition, in which the exploratory dance performance assumes the role of\ - \ instrumentalist and conductor. The development strategies and methods for each\ - \ of the involved artists are discussed and the software tools and wearable devices\ - \ that were developed for this project are presented. },\n address = {Sydney,\ - \ Australia},\n author = {Schacher, Jan C.},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1177889},\n issn = {2220-4806},\n keywords = {Mapping, motion\ - \ sensing, computer vision, artistic strategies, wearable sensors, mapping tools,\ - \ splines, delaunay tessellation.},\n pages = {250--254},\n title = {Motion To\ - \ Gesture To Sound : Mapping For Interactive Dance},\n url = {http://www.nime.org/proceedings/2010/nime2010_250.pdf},\n\ - \ year = {2010}\n}\n" + ID: McGee2012 + abstract: 'SenSynth is an open-source mobile application that allows for arbitrary, + dynamic mapping between several sensors and sound synthesis parameters. In addition + to synthesis techniques commonly found on mobile devices, SenSynth includes a + scanned synthesis source for the audification of sensor data. Using SenSynth, + we present a novel instrument based on the audification of accelerometer data + and introduce a new means of mobile synthesis control via a wearable magnetic + ring. SenSynth also employs a global pitch quantizer so one may adjust the level + of virtuosity required to play any instruments created via mapping.' + address: 'Ann Arbor, Michigan' + author: Ryan McGee and Daniel Ashbrook and Sean White + bibtex: "@inproceedings{McGee2012,\n abstract = {SenSynth is an open-source mobile\ + \ application that allows for arbitrary, dynamic mapping between several sensors\ + \ and sound synthesis parameters. In addition to synthesis techniques commonly\ + \ found on mobile devices, SenSynth includes a scanned synthesis source for the\ + \ audification of sensor data. Using SenSynth, we present a novel instrument based\ + \ on the audification of accelerometer data and introduce a new means of mobile\ + \ synthesis control via a wearable magnetic ring. SenSynth also employs a global\ + \ pitch quantizer so one may adjust the level of virtuosity required to play any\ + \ instruments created via mapping.},\n address = {Ann Arbor, Michigan},\n author\ + \ = {Ryan McGee and Daniel Ashbrook and Sean White},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178347},\n issn = {2220-4806},\n keywords = {mobile music,\ + \ sonification, audification, mobile sensors},\n publisher = {University of Michigan},\n\ + \ title = {SenSynth: a Mobile Application for Dynamic Sensor to Sound Mapping},\n\ + \ url = {http://www.nime.org/proceedings/2012/nime2012_149.pdf},\n year = {2012}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177889 + doi: 10.5281/zenodo.1178347 issn: 2220-4806 - keywords: 'Mapping, motion sensing, computer vision, artistic strategies, wearable - sensors, mapping tools, splines, delaunay tessellation.' - pages: 250--254 - title: 'Motion To Gesture To Sound : Mapping For Interactive Dance' - url: http://www.nime.org/proceedings/2010/nime2010_250.pdf - year: 2010 + keywords: 'mobile music, sonification, audification, mobile sensors' + publisher: University of Michigan + title: 'SenSynth: a Mobile Application for Dynamic Sensor to Sound Mapping' + url: http://www.nime.org/proceedings/2012/nime2012_149.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Whalley2010 - abstract: 'GIIMP addresses the criticism that in many interactive music systems - the machine simply reacts. Interaction is addressed by extending Winkler''s [18] - model toward adapting Paine''s [10] conversational model of interaction. Realized - using commercial tools, GIIMP implements a machine/human generative improvisation - system using human gesture input, machine gesture capture, and a gesture mutation - module in conjunction with a flocking patch, mapped through microtonal/spectral - techniques to sound. The intention is to meld some established and current practices, - and combine aspects of symbolic and sub-symbolic approaches, toward musical outcomes. ' - address: 'Sydney, Australia' - author: 'Whalley, Ian' - bibtex: "@inproceedings{Whalley2010,\n abstract = {GIIMP addresses the criticism\ - \ that in many interactive music systems the machine simply reacts. Interaction\ - \ is addressed by extending Winkler's [18] model toward adapting Paine's [10]\ - \ conversational model of interaction. Realized using commercial tools, GIIMP\ - \ implements a machine/human generative improvisation system using human gesture\ - \ input, machine gesture capture, and a gesture mutation module in conjunction\ - \ with a flocking patch, mapped through microtonal/spectral techniques to sound.\ - \ The intention is to meld some established and current practices, and combine\ - \ aspects of symbolic and sub-symbolic approaches, toward musical outcomes. },\n\ - \ address = {Sydney, Australia},\n author = {Whalley, Ian},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177923},\n issn = {2220-4806},\n keywords = {Interaction,\ - \ gesture, genetic algorithm, flocking, improvisation.},\n pages = {255--258},\n\ - \ title = {Generative Improv . \\& Interactive Music Project (GIIMP)},\n url =\ - \ {http://www.nime.org/proceedings/2010/nime2010_255.pdf},\n year = {2010}\n}\n" + ID: Hattwick2012 + abstract: 'The configurability and networking abilities of digital musical instruments + increases the possibilities for collaboration in musical performances. Computer + music ensembles such as laptop orchestras are becoming increasingly common and + provide laboratories for the exploration of these possibilities. However, much + of the literature regarding the creation of DMIs has been focused on individual + expressivity, and their potential for collaborative performance has been under-utilized. + This paper makes the case for the benefits of an approach to digital musical instrument + design that begins with their collaborative potential, examines several frameworks + and sets of principles for the creation of digital musical instruments, and proposes + a dimension space representation of collaborative approaches which can be used + to evaluate and guide future DMI creation. Several examples of DMIs and compositions + are then evaluated and discussed in the context of this dimension space.' + address: 'Ann Arbor, Michigan' + author: Ian Hattwick and Marcelo Wanderley + bibtex: "@inproceedings{Hattwick2012,\n abstract = {The configurability and networking\ + \ abilities of digital musical instruments increases the possibilities for collaboration\ + \ in musical performances. Computer music ensembles such as laptop orchestras\ + \ are becoming increasingly common and provide laboratories for the exploration\ + \ of these possibilities. However, much of the literature regarding the creation\ + \ of DMIs has been focused on individual expressivity, and their potential for\ + \ collaborative performance has been under-utilized. This paper makes the case\ + \ for the benefits of an approach to digital musical instrument design that begins\ + \ with their collaborative potential, examines several frameworks and sets of\ + \ principles for the creation of digital musical instruments, and proposes a dimension\ + \ space representation of collaborative approaches which can be used to evaluate\ + \ and guide future DMI creation. Several examples of DMIs and compositions are\ + \ then evaluated and discussed in the context of this dimension space.},\n address\ + \ = {Ann Arbor, Michigan},\n author = {Ian Hattwick and Marcelo Wanderley},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178281},\n issn = {2220-4806},\n\ + \ keywords = {dimension space, collaborative, digital musical instrument, dmi,\ + \ digital music ensemble, dme},\n publisher = {University of Michigan},\n title\ + \ = {A Dimension Space for Evaluating Collaborative Musical Performance Systems},\n\ + \ url = {http://www.nime.org/proceedings/2012/nime2012_150.pdf},\n year = {2012}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177923 + doi: 10.5281/zenodo.1178281 issn: 2220-4806 - keywords: 'Interaction, gesture, genetic algorithm, flocking, improvisation.' - pages: 255--258 - title: Generative Improv . & Interactive Music Project (GIIMP) - url: http://www.nime.org/proceedings/2010/nime2010_255.pdf - year: 2010 + keywords: 'dimension space, collaborative, digital musical instrument, dmi, digital + music ensemble, dme' + publisher: University of Michigan + title: A Dimension Space for Evaluating Collaborative Musical Performance Systems + url: http://www.nime.org/proceedings/2012/nime2012_150.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Nymoen2010 - abstract: 'In this paper we present a method for studying relationships between - features of sound and features of movement. The method has been tested by carrying - out an experiment with people moving an object in space along with short sounds. - 3D position data of the object was recorded and several features were calculated - from each of the recordings. These features were provided as input to a classifier - which was able to classify the recorded actions satisfactorily, particularly when - taking into account that the only link between the actions performed by the different - subjects was the sound they heard while making the action.' - address: 'Sydney, Australia' - author: 'Nymoen, Kristian and Glette, Kyrre and Skogstad, Ståle A. and Torresen, - Jim and Jensenius, Alexander Refsum' - bibtex: "@inproceedings{Nymoen2010,\n abstract = {In this paper we present a method\ - \ for studying relationships between features of sound and features of movement.\ - \ The method has been tested by carrying out an experiment with people moving\ - \ an object in space along with short sounds. 3D position data of the object was\ - \ recorded and several features were calculated from each of the recordings. These\ - \ features were provided as input to a classifier which was able to classify the\ - \ recorded actions satisfactorily, particularly when taking into account that\ - \ the only link between the actions performed by the different subjects was the\ - \ sound they heard while making the action.},\n address = {Sydney, Australia},\n\ - \ author = {Nymoen, Kristian and Glette, Kyrre and Skogstad, Ståle A. and Torresen,\ - \ Jim and Jensenius, Alexander Refsum},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177869},\n\ - \ issn = {2220-4806},\n keywords = {nime10},\n pages = {259--262},\n title = {Searching\ - \ for Cross-Individual Relationships between Sound and Movement Features using\ - \ an {SVM} Classifier},\n url = {http://www.nime.org/proceedings/2010/nime2010_259.pdf},\n\ - \ year = {2010}\n}\n" + ID: Carlson2012 + abstract: 'Borderlands is a new interface for composing and performing with granular + synthesis. The software enables flexible, realtime improvisation and is designed + to allow users to engage with sonic material on a fundamental level, breaking + free of traditional paradigms for interaction with this technique. The user is + envisioned as an organizer of sound, simultaneously assuming the roles of curator, + performer, and listener. This paper places the software within the context of + painterly interfaces and describes the user interaction design and synthesis methodology.' + address: 'Ann Arbor, Michigan' + author: Chris Carlson and Ge Wang + bibtex: "@inproceedings{Carlson2012,\n abstract = {Borderlands is a new interface\ + \ for composing and performing with granular synthesis. The software enables flexible,\ + \ realtime improvisation and is designed to allow users to engage with sonic material\ + \ on a fundamental level, breaking free of traditional paradigms for interaction\ + \ with this technique. The user is envisioned as an organizer of sound, simultaneously\ + \ assuming the roles of curator, performer, and listener. This paper places the\ + \ software within the context of painterly interfaces and describes the user interaction\ + \ design and synthesis methodology.},\n address = {Ann Arbor, Michigan},\n author\ + \ = {Chris Carlson and Ge Wang},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178229},\n\ + \ issn = {2220-4806},\n keywords = {Granular synthesis, painterly interfaces,\ + \ improvisation, organized sound, NIME, CCRMA},\n publisher = {University of Michigan},\n\ + \ title = {Borderlands -An Audiovisual Interface for Granular Synthesis},\n url\ + \ = {http://www.nime.org/proceedings/2012/nime2012_152.pdf},\n year = {2012}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177869 + doi: 10.5281/zenodo.1178229 issn: 2220-4806 - keywords: nime10 - pages: 259--262 - title: Searching for Cross-Individual Relationships between Sound and Movement Features - using an SVM Classifier - url: http://www.nime.org/proceedings/2010/nime2010_259.pdf - year: 2010 + keywords: 'Granular synthesis, painterly interfaces, improvisation, organized sound, + NIME, CCRMA' + publisher: University of Michigan + title: Borderlands -An Audiovisual Interface for Granular Synthesis + url: http://www.nime.org/proceedings/2012/nime2012_152.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Baba2010 - abstract: '''''VirtualPhilharmony'''' (V.P.) is a conducting interface that enables - users to perform expressive music with conducting action. Several previously developed - conducting interfaces do not satisfy users who have conducting experience because - the feedback from the conducting action does not always correspond with a natural - performance. The tempo scheduler, which is the main engine of a conducting system, - must be improved. V.P. solves this problem by introducing heuristics of conducting - an orchestra in detecting beats, applying rules regarding the tempo expression - in a bar, etc. We confirmed with users that the system realized a high "following" - performance and had musical persuasiveness. ' - address: 'Sydney, Australia' - author: 'Baba, Takashi and Hashida, Mitsuyo and Katayose, Haruhiro' - bibtex: "@inproceedings{Baba2010,\n abstract = {''VirtualPhilharmony'' (V.P.) is\ - \ a conducting interface that enables users to perform expressive music with conducting\ - \ action. Several previously developed conducting interfaces do not satisfy users\ - \ who have conducting experience because the feedback from the conducting action\ - \ does not always correspond with a natural performance. The tempo scheduler,\ - \ which is the main engine of a conducting system, must be improved. V.P. solves\ - \ this problem by introducing heuristics of conducting an orchestra in detecting\ - \ beats, applying rules regarding the tempo expression in a bar, etc. We confirmed\ - \ with users that the system realized a high \"following\" performance and had\ - \ musical persuasiveness. },\n address = {Sydney, Australia},\n author = {Baba,\ - \ Takashi and Hashida, Mitsuyo and Katayose, Haruhiro},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177715},\n issn = {2220-4806},\n keywords = {Conducting\ - \ system, heuristics, sensor, template.},\n pages = {263--270},\n title = {''VirtualPhilharmony'':\ - \ A Conducting System with Heuristics of Conducting an Orchestra},\n url = {http://www.nime.org/proceedings/2010/nime2010_263.pdf},\n\ - \ year = {2010}\n}\n" + ID: Hattwick2012a + abstract: "The Physical Computing Ensemble was created in order to determine the\ + \ viability of an approach to musical performance which focuses on the relationships\ + \ and interactions of the performers. Three performance systems utilizing gestural\ + \ controllers were designed and implemented, each with a different strategy for\ + \ performer interaction.\nThese strategies took advantage of the opportunities\ + \ for collaborative performance inherent in digital musical instruments due to\ + \ their networking abilities and reconfigurable software. These characteristics\ + \ allow for the easy implementation of varying approaches to collaborative performance.\ + \ Ensembles who utilize digital musical instruments provide a fertile environment\ + \ for the design, testing, and utilization of collaborative performance systems.\n\ + The three strategies discussed in this paper are the parameterization of musical\ + \ elements, turn-based collaborative control of sound, and the interaction of\ + \ musical systems created by multiple performers. Design principles, implementation,\ + \ and a performance using these strategies are discussed, and the conclusion is\ + \ drawn that performer interaction and collaboration as a primary focus for system\ + \ design, composition, and performance is viable." + address: 'Ann Arbor, Michigan' + author: Ian Hattwick and Kojiro Umezaki + bibtex: "@inproceedings{Hattwick2012a,\n abstract = {The Physical Computing Ensemble\ + \ was created in order to determine the viability of an approach to musical performance\ + \ which focuses on the relationships and interactions of the performers. Three\ + \ performance systems utilizing gestural controllers were designed and implemented,\ + \ each with a different strategy for performer interaction.\nThese strategies\ + \ took advantage of the opportunities for collaborative performance inherent in\ + \ digital musical instruments due to their networking abilities and reconfigurable\ + \ software. These characteristics allow for the easy implementation of varying\ + \ approaches to collaborative performance. Ensembles who utilize digital musical\ + \ instruments provide a fertile environment for the design, testing, and utilization\ + \ of collaborative performance systems.\nThe three strategies discussed in this\ + \ paper are the parameterization of musical elements, turn-based collaborative\ + \ control of sound, and the interaction of musical systems created by multiple\ + \ performers. Design principles, implementation, and a performance using these\ + \ strategies are discussed, and the conclusion is drawn that performer interaction\ + \ and collaboration as a primary focus for system design, composition, and performance\ + \ is viable.},\n address = {Ann Arbor, Michigan},\n author = {Ian Hattwick and\ + \ Kojiro Umezaki},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178279},\n\ + \ issn = {2220-4806},\n keywords = {Collaborative performance, interaction, digital\ + \ musical instruments, gestural controller, digital music ensemble, Wii},\n publisher\ + \ = {University of Michigan},\n title = {Approaches to Interaction in a Digital\ + \ Music Ensemble},\n url = {http://www.nime.org/proceedings/2012/nime2012_153.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177715 + doi: 10.5281/zenodo.1178279 issn: 2220-4806 - keywords: 'Conducting system, heuristics, sensor, template.' - pages: 263--270 - title: '''''VirtualPhilharmony'''': A Conducting System with Heuristics of Conducting - an Orchestra' - url: http://www.nime.org/proceedings/2010/nime2010_263.pdf - year: 2010 + keywords: 'Collaborative performance, interaction, digital musical instruments, + gestural controller, digital music ensemble, Wii' + publisher: University of Michigan + title: Approaches to Interaction in a Digital Music Ensemble + url: http://www.nime.org/proceedings/2012/nime2012_153.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Grosshauser2010 - abstract: 'Pressure, motion, and gesture are important parameters inmusical instrument - playing. Pressure sensing allows to interpret complex hidden forces, which appear - during playinga musical instrument. The combination of our new sensorsetup with - pattern recognition techniques like the lately developed ordered means models - allows fast and precise recognition of highly skilled playing techniques. This - includes leftand right hand analysis as well as a combination of both. Inthis - paper we show bow position recognition for string instruments by means of support - vector regression machineson the right hand finger pressure, as well as bowing - recognition and inaccurate playing detection with ordered meansmodels. We also - introduce a new left hand and chin pressuresensing method for coordination and - position change analysis. Our methods in combination with our audio, video,and - gesture recording software can be used for teachingand exercising. Especially - studies of complex movementsand finger force distribution changes can benefit - from suchan approach. Practical applications include the recognitionof inaccuracy, - cramping, or malposition, and, last but notleast, the development of augmented - instruments and newplaying techniques.' - address: 'Sydney, Australia' - author: 'Großhauser, Tobias and Großekathöfer, Ulf and Hermann, Thomas' - bibtex: "@inproceedings{Grosshauser2010,\n abstract = {Pressure, motion, and gesture\ - \ are important parameters inmusical instrument playing. Pressure sensing allows\ - \ to interpret complex hidden forces, which appear during playinga musical instrument.\ - \ The combination of our new sensorsetup with pattern recognition techniques like\ - \ the lately developed ordered means models allows fast and precise recognition\ - \ of highly skilled playing techniques. This includes leftand right hand analysis\ - \ as well as a combination of both. Inthis paper we show bow position recognition\ - \ for string instruments by means of support vector regression machineson the\ - \ right hand finger pressure, as well as bowing recognition and inaccurate playing\ - \ detection with ordered meansmodels. We also introduce a new left hand and chin\ - \ pressuresensing method for coordination and position change analysis. Our methods\ - \ in combination with our audio, video,and gesture recording software can be used\ - \ for teachingand exercising. Especially studies of complex movementsand finger\ - \ force distribution changes can benefit from suchan approach. Practical applications\ - \ include the recognitionof inaccuracy, cramping, or malposition, and, last but\ - \ notleast, the development of augmented instruments and newplaying techniques.},\n\ - \ address = {Sydney, Australia},\n author = {Gro{\\ss}hauser, Tobias and Gro{\\\ - ss}ekath\\\"{o}fer, Ulf and Hermann, Thomas},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1177779},\n issn = {2220-4806},\n keywords = {left hand,nime10,ordered\ - \ means models,pressure,sensor,strings},\n pages = {271--276},\n title = {New\ - \ Sensors and Pattern Recognition Techniques for String Instruments},\n url =\ - \ {http://www.nime.org/proceedings/2010/nime2010_271.pdf},\n year = {2010}\n}\n" + ID: Vigliensoni2012 + abstract: 'This paper presents a comparison of three-dimensional (3D) position tracking + systems in terms of some of their performance parameters such as static accuracy + and precision, update rate, and shape of the space they sense. The underlying + concepts and characteristics of position tracking tech-nologies are reviewed, + and four position tracking systems (Vicon, Polhemus, Kinect, and Gametrak), based + on dif-ferent technologies, are empirically compared according to their performance + parameters and technical specifications. Our results show that, overall, the Vicon + was the position tracker with the best performance.' + address: 'Ann Arbor, Michigan' + author: Gabriel Vigliensoni and Marcelo M. Wanderley + bibtex: "@inproceedings{Vigliensoni2012,\n abstract = {This paper presents a comparison\ + \ of three-dimensional (3D) position tracking systems in terms of some of their\ + \ performance parameters such as static accuracy and precision, update rate, and\ + \ shape of the space they sense. The underlying concepts and characteristics of\ + \ position tracking tech-nologies are reviewed, and four position tracking systems\ + \ (Vicon, Polhemus, Kinect, and Gametrak), based on dif-ferent technologies, are\ + \ empirically compared according to their performance parameters and technical\ + \ specifications. Our results show that, overall, the Vicon was the position tracker\ + \ with the best performance.},\n address = {Ann Arbor, Michigan},\n author = {Gabriel\ + \ Vigliensoni and Marcelo M. Wanderley},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178445},\n\ + \ issn = {2220-4806},\n keywords = {Position tracker, comparison, touch-less,\ + \ gestural control},\n publisher = {University of Michigan},\n title = {A Quantitative\ + \ Comparison of Position Trackers for the Development of a Touch-less Musical\ + \ Interface},\n url = {http://www.nime.org/proceedings/2012/nime2012_155.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177779 + doi: 10.5281/zenodo.1178445 issn: 2220-4806 - keywords: 'left hand,nime10,ordered means models,pressure,sensor,strings' - pages: 271--276 - title: New Sensors and Pattern Recognition Techniques for String Instruments - url: http://www.nime.org/proceedings/2010/nime2010_271.pdf - year: 2010 + keywords: 'Position tracker, comparison, touch-less, gestural control' + publisher: University of Michigan + title: A Quantitative Comparison of Position Trackers for the Development of a Touch-less + Musical Interface + url: http://www.nime.org/proceedings/2012/nime2012_155.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Hahnel2010b - abstract: 'As one of the main expressive feature in music, articulationaffects a - wide range of tone attributes. Based on experimental recordings we analyzed human - articulation in the lateBaroque style. The results are useful for both the understanding - of historically informed performance practices andfurther progress in synthetic - performance generation. Thispaper reports of our findings and the implementation - in aperformance system. Because of its flexibility and universality the system - allows more than Baroque articulation.' - address: 'Sydney, Australia' - author: 'Hähnel, Tilo and Berndt, Axel' - bibtex: "@inproceedings{Hahnel2010b,\n abstract = {As one of the main expressive\ - \ feature in music, articulationaffects a wide range of tone attributes. Based\ - \ on experimental recordings we analyzed human articulation in the lateBaroque\ - \ style. The results are useful for both the understanding of historically informed\ - \ performance practices andfurther progress in synthetic performance generation.\ - \ Thispaper reports of our findings and the implementation in aperformance system.\ - \ Because of its flexibility and universality the system allows more than Baroque\ - \ articulation.},\n address = {Sydney, Australia},\n author = {H\\''{a}hnel, Tilo\ - \ and Berndt, Axel},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177789},\n\ - \ issn = {2220-4806},\n keywords = {Expressive Performance, Articulation, Historically\ - \ Informed Performance},\n pages = {277--282},\n title = {Expressive Articulation\ - \ for Synthetic Music Performances},\n url = {http://www.nime.org/proceedings/2010/nime2010_277.pdf},\n\ - \ year = {2010}\n}\n" + ID: Marier2012 + abstract: 'A new method for interpolating between presets is described. The interpolation + algorithm called Intersecting N-Spheres Interpolation is simple to compute and + its generalization to higher dimensions is straightforward. The current imple-mentation + in the SuperCollider environment is presented as a tool that eases the design + of many-to-many mappings for musical interfaces. Examples of its uses, including + such mappings in conjunction with a musical interface called the sponge, are given + and discussed.' + address: 'Ann Arbor, Michigan' + author: Martin Marier + bibtex: "@inproceedings{Marier2012,\n abstract = {A new method for interpolating\ + \ between presets is described. The interpolation algorithm called Intersecting\ + \ N-Spheres Interpolation is simple to compute and its generalization to higher\ + \ dimensions is straightforward. The current imple-mentation in the SuperCollider\ + \ environment is presented as a tool that eases the design of many-to-many mappings\ + \ for musical interfaces. Examples of its uses, including such mappings in conjunction\ + \ with a musical interface called the sponge, are given and discussed.},\n address\ + \ = {Ann Arbor, Michigan},\n author = {Martin Marier},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178343},\n issn = {2220-4806},\n keywords = {Mapping,\ + \ Preset, Interpolation, Sponge, SuperCollider},\n publisher = {University of\ + \ Michigan},\n title = {Designing Mappings for Musical Interfaces Using Preset\ + \ Interpolation},\n url = {http://www.nime.org/proceedings/2012/nime2012_159.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177789 + doi: 10.5281/zenodo.1178343 issn: 2220-4806 - keywords: 'Expressive Performance, Articulation, Historically Informed Performance' - pages: 277--282 - title: Expressive Articulation for Synthetic Music Performances - url: http://www.nime.org/proceedings/2010/nime2010_277.pdf - year: 2010 + keywords: 'Mapping, Preset, Interpolation, Sponge, SuperCollider' + publisher: University of Michigan + title: Designing Mappings for Musical Interfaces Using Preset Interpolation + url: http://www.nime.org/proceedings/2012/nime2012_159.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Brown2010 - abstract: 'Generative music systems can be played by musicians who manipulate the - values of algorithmic parameters, and their datacentric nature provides an opportunity - for coordinated interaction amongst a group of systems linked over IP networks; - a practice we call Network Jamming. This paper outlines the characteristics of - this networked performance practice and discusses the types of mediated musical - relationships and ensemble configurations that can arise. We have developed and - tested the jam2jam network jamming software over recent years. We describe this - system, draw from our experiences with it, and use it to illustrate some characteristics - of Network Jamming.' - address: 'Sydney, Australia' - author: 'Brown, Andrew R.' - bibtex: "@inproceedings{Brown2010,\n abstract = {Generative music systems can be\ - \ played by musicians who manipulate the values of algorithmic parameters, and\ - \ their datacentric nature provides an opportunity for coordinated interaction\ - \ amongst a group of systems linked over IP networks; a practice we call Network\ - \ Jamming. This paper outlines the characteristics of this networked performance\ - \ practice and discusses the types of mediated musical relationships and ensemble\ - \ configurations that can arise. We have developed and tested the jam2jam network\ - \ jamming software over recent years. We describe this system, draw from our experiences\ - \ with it, and use it to illustrate some characteristics of Network Jamming.},\n\ - \ address = {Sydney, Australia},\n author = {Brown, Andrew R.},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177723},\n issn = {2220-4806},\n keywords = {collaborative,ensemble,generative,interaction,network,nime10},\n\ - \ pages = {283--286},\n title = {Network Jamming : Distributed Performance using\ - \ Generative Music},\n url = {http://www.nime.org/proceedings/2010/nime2010_283.pdf},\n\ - \ year = {2010}\n}\n" + ID: Jensenius2012 + abstract: 'We report on the Music Ball Project, a longterm, exploratory project + focused on creating novel instruments/controllers with a spherical shape as the + common denominator. Besides a simple and attractive geometrical shape, balls afford + many different types of use, including play. This has made our music balls popular + among widely different groups of people, from toddlers to seniors, including those + that would not otherwise engage with a musical instrument. The paper summarises + our experience of designing, constructing and using a number of music balls of + various sizes and with different types of sound-producing elements.' + address: 'Ann Arbor, Michigan' + author: 'Jensenius, Alexander Refsum and Voldsund, Arve' + bibtex: "@inproceedings{Jensenius2012,\n abstract = {We report on the Music Ball\ + \ Project, a longterm, exploratory project focused on creating novel instruments/controllers\ + \ with a spherical shape as the common denominator. Besides a simple and attractive\ + \ geometrical shape, balls afford many different types of use, including play.\ + \ This has made our music balls popular among widely different groups of people,\ + \ from toddlers to seniors, including those that would not otherwise engage with\ + \ a musical instrument. The paper summarises our experience of designing, constructing\ + \ and using a number of music balls of various sizes and with different types\ + \ of sound-producing elements.},\n address = {Ann Arbor, Michigan},\n author =\ + \ {Jensenius, Alexander Refsum and Voldsund, Arve},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1180579},\n issn = {2220-4806},\n keywords = {music balls,\ + \ instruments, controllers, inexpensive},\n publisher = {University of Michigan},\n\ + \ title = {The Music Ball Project: Concept, Design, Development, Performance},\n\ + \ url = {http://www.nime.org/proceedings/2012/nime2012_161.pdf},\n year = {2012}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177723 + doi: 10.5281/zenodo.1180579 issn: 2220-4806 - keywords: collaborative,ensemble,generative,interaction,network,nime10 - pages: 283--286 - title: 'Network Jamming : Distributed Performance using Generative Music' - url: http://www.nime.org/proceedings/2010/nime2010_283.pdf - year: 2010 + keywords: 'music balls, instruments, controllers, inexpensive' + publisher: University of Michigan + title: 'The Music Ball Project: Concept, Design, Development, Performance' + url: http://www.nime.org/proceedings/2012/nime2012_161.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Frounberg2010 - abstract: 'The paper reports on the development of prototypes of glassinstruments. - The focus has been on developing acousticinstruments specifically designed for - electronic treatment,and where timbral qualities have had priority over pitch.The - paper starts with a brief historical overview of glassinstruments and their artistic - use. Then follows an overviewof the glass blowing process. Finally the musical - use of theinstruments is discussed.' - address: 'Sydney, Australia' - author: 'Frounberg, Ivar and Innervik, Kjell Tore and Jensenius, Alexander R.' - bibtex: "@inproceedings{Frounberg2010,\n abstract = {The paper reports on the development\ - \ of prototypes of glassinstruments. The focus has been on developing acousticinstruments\ - \ specifically designed for electronic treatment,and where timbral qualities have\ - \ had priority over pitch.The paper starts with a brief historical overview of\ - \ glassinstruments and their artistic use. Then follows an overviewof the glass\ - \ blowing process. Finally the musical use of theinstruments is discussed.},\n\ - \ address = {Sydney, Australia},\n author = {Frounberg, Ivar and Innervik, Kjell\ - \ Tore and Jensenius, Alexander R.},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177773},\n\ - \ issn = {2220-4806},\n keywords = {glass instruments,nime,nime10,performance\ - \ practice},\n pages = {287--290},\n title = {Glass Instruments -- From Pitch\ - \ to Timbre},\n url = {http://www.nime.org/proceedings/2010/nime2010_287.pdf},\n\ - \ year = {2010}\n}\n" + ID: Nesfield2012 + abstract: 'A general strategy for encouraging embodied engagement within musical + interface design is introduced. A pair of ex-ample implementations of this strategy + are described, one tangible and one graphical. As part of a potentially larger + set within our general approach, two separate relationships are described termed + `decay and contribution'' and `instability and adjustment'', which are heavily + dependent on the action requirements and timeliness of the interaction. By suggesting + this process occurs on a timescale of less than one second it is hoped attentiveness + and engagement can be en-couraged to the possible benefit of future developments + in digital musical instrument design.' + address: 'Ann Arbor, Michigan' + author: James Nesfield + bibtex: "@inproceedings{Nesfield2012,\n abstract = {A general strategy for encouraging\ + \ embodied engagement within musical interface design is introduced. A pair of\ + \ ex-ample implementations of this strategy are described, one tangible and one\ + \ graphical. As part of a potentially larger set within our general approach,\ + \ two separate relationships are described termed `decay and contribution' and\ + \ `instability and adjustment', which are heavily dependent on the action requirements\ + \ and timeliness of the interaction. By suggesting this process occurs on a timescale\ + \ of less than one second it is hoped attentiveness and engagement can be en-couraged\ + \ to the possible benefit of future developments in digital musical instrument\ + \ design.},\n address = {Ann Arbor, Michigan},\n author = {James Nesfield},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1180549},\n issn = {2220-4806},\n\ + \ keywords = {engagement, embodiment, flow, decay, instability, design, NIME},\n\ + \ publisher = {University of Michigan},\n title = {Strategies for Engagement in\ + \ Computer-Mediated Musical Performance},\n url = {http://www.nime.org/proceedings/2012/nime2012_162.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177773 + doi: 10.5281/zenodo.1180549 issn: 2220-4806 - keywords: 'glass instruments,nime,nime10,performance practice' - pages: 287--290 - title: Glass Instruments -- From Pitch to Timbre - url: http://www.nime.org/proceedings/2010/nime2010_287.pdf - year: 2010 + keywords: 'engagement, embodiment, flow, decay, instability, design, NIME' + publisher: University of Michigan + title: Strategies for Engagement in Computer-Mediated Musical Performance + url: http://www.nime.org/proceedings/2012/nime2012_162.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Kiefer2010 - abstract: 'Input devices for controlling music software can benefit fromexploiting - the use of perceptual-motor skill in interaction.The project described here is - a new musical controller, designed with the aim of enabling intuitive and nuanced - interaction through direct physical manipulation of malleablematerial.The controller - is made from conductive foam. This foamchanges electrical resistance when deformed; - the controllerworks by measuring resistance at multiple points in a single piece - of foam in order to track its shape. These measurements are complex and interdependent - so an echo statenetwork, a form of recurrent neural network, is employed totranslate - the sensor readings into usable control data.A cube shaped controller was built - and evaluated in thecontext of the haptic exploration of sound synthesis parameter - spaces. Eight participants experimented with the controller and were interviewed - about their experiences. Thecontroller achieves its aim of enabling intuitive - interaction,but in terms of nuanced interaction, accuracy and repeatability were - issues for some participants. It''s not clear fromthe short evaluation study whether - these issues would improve with practice, a longitudinal study that gives musicians - time to practice and find the creative limitations ofthe controller would help - to evaluate this fully.The evaluation highlighted interesting issues concerningthe - high level nature of malleable control and different approaches to sonic exploration.' - address: 'Sydney, Australia' - author: 'Kiefer, Chris' - bibtex: "@inproceedings{Kiefer2010,\n abstract = {Input devices for controlling\ - \ music software can benefit fromexploiting the use of perceptual-motor skill\ - \ in interaction.The project described here is a new musical controller, designed\ - \ with the aim of enabling intuitive and nuanced interaction through direct physical\ - \ manipulation of malleablematerial.The controller is made from conductive foam.\ - \ This foamchanges electrical resistance when deformed; the controllerworks by\ - \ measuring resistance at multiple points in a single piece of foam in order to\ - \ track its shape. These measurements are complex and interdependent so an echo\ - \ statenetwork, a form of recurrent neural network, is employed totranslate the\ - \ sensor readings into usable control data.A cube shaped controller was built\ - \ and evaluated in thecontext of the haptic exploration of sound synthesis parameter\ - \ spaces. Eight participants experimented with the controller and were interviewed\ - \ about their experiences. Thecontroller achieves its aim of enabling intuitive\ - \ interaction,but in terms of nuanced interaction, accuracy and repeatability\ - \ were issues for some participants. It's not clear fromthe short evaluation study\ - \ whether these issues would improve with practice, a longitudinal study that\ - \ gives musicians time to practice and find the creative limitations ofthe controller\ - \ would help to evaluate this fully.The evaluation highlighted interesting issues\ - \ concerningthe high level nature of malleable control and different approaches\ - \ to sonic exploration.},\n address = {Sydney, Australia},\n author = {Kiefer,\ - \ Chris},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1177823},\n issn = {2220-4806},\n\ - \ keywords = {Musical Controller, Reservoir Computing, Human Computer Interaction,\ - \ Tangible User Interface, Evaluation},\n pages = {291--296},\n title = {A Malleable\ - \ Interface for Sonic Exploration},\n url = {http://www.nime.org/proceedings/2010/nime2010_291.pdf},\n\ - \ year = {2010}\n}\n" + ID: Astrinaki2012 + abstract: 'In this paper, we describe our pioneering work in developing speech synthesis + beyond the Text-To-Speech paradigm. We introduce tangible speech synthesis as + an alternate way of envisioning how artificial speech content can be produced. + Tangible speech synthesis refers to the ability, for a given system, to provide + some physicality and interactivity to important speech production parameters. + We present MAGE, our new software platform for high-quality reactive speech synthesis, + based on statistical parametric modeling and more particularly hidden Markov models. + We also introduce a new HandSketch-based musical instrument. This instrument brings + pen and posture based interaction on the top of MAGE, and demonstrates a first + proof of concept.' + address: 'Ann Arbor, Michigan' + author: Maria Astrinaki and Nicolas d'Alessandro and Thierry Dutoit + bibtex: "@inproceedings{Astrinaki2012,\n abstract = {In this paper, we describe\ + \ our pioneering work in developing speech synthesis beyond the Text-To-Speech\ + \ paradigm. We introduce tangible speech synthesis as an alternate way of envisioning\ + \ how artificial speech content can be produced. Tangible speech synthesis refers\ + \ to the ability, for a given system, to provide some physicality and interactivity\ + \ to important speech production parameters. We present MAGE, our new software\ + \ platform for high-quality reactive speech synthesis, based on statistical parametric\ + \ modeling and more particularly hidden Markov models. We also introduce a new\ + \ HandSketch-based musical instrument. This instrument brings pen and posture\ + \ based interaction on the top of MAGE, and demonstrates a first proof of concept.},\n\ + \ address = {Ann Arbor, Michigan},\n author = {Maria Astrinaki and Nicolas d'Alessandro\ + \ and Thierry Dutoit},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178207},\n\ + \ issn = {2220-4806},\n keywords = {speech synthesis, Hidden Markov Models, tangible\ + \ interaction, software library, MAGE, HTS, performative},\n publisher = {University\ + \ of Michigan},\n title = {MAGE --A Platform for Tangible Speech Synthesis},\n\ + \ url = {http://www.nime.org/proceedings/2012/nime2012_164.pdf},\n year = {2012}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177823 + doi: 10.5281/zenodo.1178207 issn: 2220-4806 - keywords: 'Musical Controller, Reservoir Computing, Human Computer Interaction, - Tangible User Interface, Evaluation' - pages: 291--296 - title: A Malleable Interface for Sonic Exploration - url: http://www.nime.org/proceedings/2010/nime2010_291.pdf - year: 2010 + keywords: 'speech synthesis, Hidden Markov Models, tangible interaction, software + library, MAGE, HTS, performative' + publisher: University of Michigan + title: MAGE --A Platform for Tangible Speech Synthesis + url: http://www.nime.org/proceedings/2012/nime2012_164.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Zappi2010 - abstract: 'The number of artists who express themselves through music in an unconventional - way is constantly growing. Thistrend strongly depends on the high diffusion of - laptops,which proved to be powerful and flexible musical devices.However laptops - still lack in flexible interface, specificallydesigned for music creation in live - and studio performances.To resolve this issue many controllers have been developed,taking - into account not only the performer''s needs andhabits during music creation, - but also the audience desire tovisually understand how performer''s gestures are - linked tothe way music is made. According to the common need ofadaptable visual - interface to manipulate music, in this paper we present a custom tridimensional - controller, based onOpen Sound Control protocol and completely designed towork - inside Virtual Reality: simple geometrical shapes canbe created to directly control - loop triggering and parametermodification, just using free hand interaction.' - address: 'Sydney, Australia' - author: 'Zappi, Victor and Brogni, Andrea and Caldwell, Darwin' - bibtex: "@inproceedings{Zappi2010,\n abstract = {The number of artists who express\ - \ themselves through music in an unconventional way is constantly growing. Thistrend\ - \ strongly depends on the high diffusion of laptops,which proved to be powerful\ - \ and flexible musical devices.However laptops still lack in flexible interface,\ - \ specificallydesigned for music creation in live and studio performances.To resolve\ - \ this issue many controllers have been developed,taking into account not only\ - \ the performer's needs andhabits during music creation, but also the audience\ - \ desire tovisually understand how performer's gestures are linked tothe way music\ - \ is made. According to the common need ofadaptable visual interface to manipulate\ - \ music, in this paper we present a custom tridimensional controller, based onOpen\ - \ Sound Control protocol and completely designed towork inside Virtual Reality:\ - \ simple geometrical shapes canbe created to directly control loop triggering\ - \ and parametermodification, just using free hand interaction.},\n address = {Sydney,\ - \ Australia},\n author = {Zappi, Victor and Brogni, Andrea and Caldwell, Darwin},\n\ + ID: tKlooster2012 + abstract: 'This paper describes the development of the Emotion Light, an interactive + biofeedback artwork where the user listens to a piece of electronic music whilst + holding a semi-transparent sculpture that tracks his/her bodily responses and + translates these into changing light patterns that emerge from the sculpture. + The context of this work is briefly described and the questions it poses are derived + from interviews held with audience members.' + address: 'Ann Arbor, Michigan' + author: Adinda Rosa van 't Klooster + bibtex: "@inproceedings{tKlooster2012,\n abstract = {This paper describes the development\ + \ of the Emotion Light, an interactive biofeedback artwork where the user listens\ + \ to a piece of electronic music whilst holding a semi-transparent sculpture that\ + \ tracks his/her bodily responses and translates these into changing light patterns\ + \ that emerge from the sculpture. The context of this work is briefly described\ + \ and the questions it poses are derived from interviews held with audience members.},\n\ + \ address = {Ann Arbor, Michigan},\n author = {Adinda Rosa van 't Klooster},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177931},\n issn = {2220-4806},\n\ - \ keywords = {Glove device, Music controller, Virtual Reality, OSC, con- trol\ - \ mapping},\n pages = {297--302},\n title = {OSC Virtual Controller},\n url =\ - \ {http://www.nime.org/proceedings/2010/nime2010_297.pdf},\n year = {2010}\n}\n" + \ Musical Expression},\n doi = {10.5281/zenodo.1178307},\n issn = {2220-4806},\n\ + \ keywords = {Interactive biofeedback artwork, music and emotion, novel interfaces,\ + \ practice based research, bodily response, heart rate, biosignals, affective\ + \ computing, aesthetic interaction, mediating body, biology inspired system},\n\ + \ publisher = {University of Michigan},\n title = {The body as mediator of music\ + \ in the Emotion Light},\n url = {http://www.nime.org/proceedings/2012/nime2012_167.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177931 + doi: 10.5281/zenodo.1178307 issn: 2220-4806 - keywords: 'Glove device, Music controller, Virtual Reality, OSC, con- trol mapping' - pages: 297--302 - title: OSC Virtual Controller - url: http://www.nime.org/proceedings/2010/nime2010_297.pdf - year: 2010 + keywords: 'Interactive biofeedback artwork, music and emotion, novel interfaces, + practice based research, bodily response, heart rate, biosignals, affective computing, + aesthetic interaction, mediating body, biology inspired system' + publisher: University of Michigan + title: The body as mediator of music in the Emotion Light + url: http://www.nime.org/proceedings/2012/nime2012_167.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Dimitrov2010 - abstract: 'The sound card anno 2010, is an ubiquitous part of almostany personal - computing system; what was once considereda high-end, CD-quality audio fidelity, - is today found in mostcommon sound cards. The increased presence of multichannel - devices, along with the high sampling frequency, makesthe sound card desirable - as a generic interface for acquisition of analog signals in prototyping of sensor-based - musicinterfaces. However, due to the need for coupling capacitorsat a sound card''s - inputs and outputs, the use as a genericsignal interface of a sound card is limited - to signals not carrying information in a constant DC component. Through arevisit - of a card design for the (now defunct) ISA bus, thispaper proposes use of analog - gates for bypassing the DCfiltering input sections, controllable from software - --- therebyallowing for arbitrary choice by the user, if a soundcardinput channel - is to be used as a generic analog-to-digitalsensor interface. Issues regarding - use of obsolete technology and educational aspects are discussed as well.' - address: 'Sydney, Australia' - author: 'Dimitrov, Smilen' - bibtex: "@inproceedings{Dimitrov2010,\n abstract = {The sound card anno 2010, is\ - \ an ubiquitous part of almostany personal computing system; what was once considereda\ - \ high-end, CD-quality audio fidelity, is today found in mostcommon sound cards.\ - \ The increased presence of multichannel devices, along with the high sampling\ - \ frequency, makesthe sound card desirable as a generic interface for acquisition\ - \ of analog signals in prototyping of sensor-based musicinterfaces. However, due\ - \ to the need for coupling capacitorsat a sound card's inputs and outputs, the\ - \ use as a genericsignal interface of a sound card is limited to signals not carrying\ - \ information in a constant DC component. Through arevisit of a card design for\ - \ the (now defunct) ISA bus, thispaper proposes use of analog gates for bypassing\ - \ the DCfiltering input sections, controllable from software --- therebyallowing\ - \ for arbitrary choice by the user, if a soundcardinput channel is to be used\ - \ as a generic analog-to-digitalsensor interface. Issues regarding use of obsolete\ - \ technology and educational aspects are discussed as well.},\n address = {Sydney,\ - \ Australia},\n author = {Dimitrov, Smilen},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1177755},\n issn = {2220-4806},\n keywords = {dc,isa,nime10,sensors,soundcard},\n\ - \ pages = {303--308},\n title = {Extending the Soundcard for Use with Generic\ - \ {DC} Sensors Demonstrated by Revisiting a Vintage ISA Design},\n url = {http://www.nime.org/proceedings/2010/nime2010_303.pdf},\n\ - \ year = {2010}\n}\n" + ID: Bergsland2012 + abstract: "As a part of the research project Voice Meetings, a solo live-electronic\ + \ vocal performance was presented for 63 students. Through a mixed method approach\ + \ applying both written and oral response, feedback from one blindfolded and one\ + \ seeing audience group was collected and analyzed.\nThere were marked differences\ + \ between the groups regarding focus, in that the participants in blindfolded\ + \ group tended to focus on fewer aspects, have a heightened focus and be less\ + \ distracted than the seeing group. The seeing group, on its part, focused more\ + \ on the technological instruments applied in the performance, the performer herself\ + \ and her actions. This study also shows that there were only minor differences\ + \ between the groups regarding the experience of skill and control, and argues\ + \ that this observation can be explained by earlier research on skill in NIMEs." + address: 'Ann Arbor, Michigan' + author: Andreas Bergsland and Tone Åse + bibtex: "@inproceedings{Bergsland2012,\n abstract = {As a part of the research project\ + \ Voice Meetings, a solo live-electronic vocal performance was presented for 63\ + \ students. Through a mixed method approach applying both written and oral response,\ + \ feedback from one blindfolded and one seeing audience group was collected and\ + \ analyzed.\nThere were marked differences between the groups regarding focus,\ + \ in that the participants in blindfolded group tended to focus on fewer aspects,\ + \ have a heightened focus and be less distracted than the seeing group. The seeing\ + \ group, on its part, focused more on the technological instruments applied in\ + \ the performance, the performer herself and her actions. This study also shows\ + \ that there were only minor differences between the groups regarding the experience\ + \ of skill and control, and argues that this observation can be explained by earlier\ + \ research on skill in NIMEs.},\n address = {Ann Arbor, Michigan},\n author =\ + \ {Andreas Bergsland and Tone {\\AA}se},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178213},\n\ + \ issn = {2220-4806},\n keywords = {Performance, audience reception, acousmatic\ + \ listening, live-electronics, voice, qualitative research},\n publisher = {University\ + \ of Michigan},\n title = {Using a seeing/blindfolded paradigm to study audience\ + \ experiences of live-electronic performances with voice},\n url = {http://www.nime.org/proceedings/2012/nime2012_168.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177755 + doi: 10.5281/zenodo.1178213 issn: 2220-4806 - keywords: dc,isa,nime10,sensors,soundcard - pages: 303--308 - title: Extending the Soundcard for Use with Generic DC Sensors Demonstrated by Revisiting - a Vintage ISA Design - url: http://www.nime.org/proceedings/2010/nime2010_303.pdf - year: 2010 + keywords: 'Performance, audience reception, acousmatic listening, live-electronics, + voice, qualitative research' + publisher: University of Michigan + title: Using a seeing/blindfolded paradigm to study audience experiences of live-electronic + performances with voice + url: http://www.nime.org/proceedings/2012/nime2012_168.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: LeGroux2010 - abstract: 'Most new digital musical interfaces have evolved upon theintuitive idea - that there is a causality between sonic outputand physical actions. Nevertheless, - the advent of braincomputer interfaces (BCI) now allows us to directly accesssubjective - mental states and express these in the physicalworld without bodily actions. In - the context of an interactive and collaborative live performance, we propose to - exploit novel brain-computer technologies to achieve unmediated brain control - over music generation and expression.We introduce a general framework for the - generation, synchronization and modulation of musical material from brainsignal - and describe its use in the realization of Xmotion, amultimodal performance for - a "brain quartet".' - address: 'Sydney, Australia' - author: 'Le Groux, Sylvain and Manzolli, Jonatas and Verschure, Paul F. and Marti - Sanchez and Andre Luvizotto and Anna Mura and Aleksander Valjamae and Christoph - Guger and Robert Prueckl and Ulysses Bernardet' - bibtex: "@inproceedings{LeGroux2010,\n abstract = {Most new digital musical interfaces\ - \ have evolved upon theintuitive idea that there is a causality between sonic\ - \ outputand physical actions. Nevertheless, the advent of braincomputer interfaces\ - \ (BCI) now allows us to directly accesssubjective mental states and express these\ - \ in the physicalworld without bodily actions. In the context of an interactive\ - \ and collaborative live performance, we propose to exploit novel brain-computer\ - \ technologies to achieve unmediated brain control over music generation and expression.We\ - \ introduce a general framework for the generation, synchronization and modulation\ - \ of musical material from brainsignal and describe its use in the realization\ - \ of Xmotion, amultimodal performance for a \"brain quartet\".},\n address = {Sydney,\ - \ Australia},\n author = {Le Groux, Sylvain and Manzolli, Jonatas and Verschure,\ - \ Paul F. and Marti Sanchez and Andre Luvizotto and Anna Mura and Aleksander Valjamae\ - \ and Christoph Guger and Robert Prueckl and Ulysses Bernardet},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177831},\n issn = {2220-4806},\n keywords = {Brain-computer\ - \ Interface, Biosignals, Interactive Music System, Collaborative Musical Performance},\n\ - \ pages = {309--314},\n title = {Disembodied and Collaborative Musical Interaction\ - \ in the Multimodal Brain Orchestra},\n url = {http://www.nime.org/proceedings/2010/nime2010_309.pdf},\n\ - \ year = {2010}\n}\n" + ID: Lympouridis2012 + abstract: 'Through a series of collaborative research projects usingOrient, a wireless, + inertial sensor-based motion capture system,I have studied the requirements of + musicians, dancers,performers and choreographers and identified various design + strategies for the realization of Whole Body Interactive (WBI)performance systems. + The acquired experience and knowledge led to the design and development of EnActor, + prototypeWhole Body Interaction Design software. The software has been realized + as a collection of modules that were proved valuable for the design of interactive + performance systems that are directly controlled by the body.This paper presents + EnActor''s layout as a blueprint for the design and development of more sophisticated + descendants.Complete video archive of my research projects in WBI performance + systems at: http://www.inter-axions.com' + address: 'Ann Arbor, Michigan' + author: Vangelis Lympouridis + bibtex: "@inproceedings{Lympouridis2012,\n abstract = {Through a series of collaborative\ + \ research projects usingOrient, a wireless, inertial sensor-based motion capture\ + \ system,I have studied the requirements of musicians, dancers,performers and\ + \ choreographers and identified various design strategies for the realization\ + \ of Whole Body Interactive (WBI)performance systems. The acquired experience\ + \ and knowledge led to the design and development of EnActor, prototypeWhole Body\ + \ Interaction Design software. The software has been realized as a collection\ + \ of modules that were proved valuable for the design of interactive performance\ + \ systems that are directly controlled by the body.This paper presents EnActor's\ + \ layout as a blueprint for the design and development of more sophisticated descendants.Complete\ + \ video archive of my research projects in WBI performance systems at: http://www.inter-axions.com},\n\ + \ address = {Ann Arbor, Michigan},\n author = {Vangelis Lympouridis},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1178333},\n issn = {2220-4806},\n keywords\ + \ = {Whole Body Interaction, Motion Capture, Interactive Performance Systems,\ + \ Interaction Design, Software Prototype},\n publisher = {University of Michigan},\n\ + \ title = {EnActor: A Blueprint for a Whole Body Interaction Design Software Platform},\n\ + \ url = {http://www.nime.org/proceedings/2012/nime2012_169.pdf},\n year = {2012}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177831 + doi: 10.5281/zenodo.1178333 issn: 2220-4806 - keywords: 'Brain-computer Interface, Biosignals, Interactive Music System, Collaborative - Musical Performance' - pages: 309--314 - title: Disembodied and Collaborative Musical Interaction in the Multimodal Brain - Orchestra - url: http://www.nime.org/proceedings/2010/nime2010_309.pdf - year: 2010 + keywords: 'Whole Body Interaction, Motion Capture, Interactive Performance Systems, + Interaction Design, Software Prototype' + publisher: University of Michigan + title: 'EnActor: A Blueprint for a Whole Body Interaction Design Software Platform' + url: http://www.nime.org/proceedings/2012/nime2012_169.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Hochenbaum2010a - abstract: 'This paper explores the evolution of collaborative, multi-user, musical - interfaces developed for the Bricktable interactive surface. Two key types of - applications are addressed: user interfaces for artistic installation and interfaces - for musical performance. In describing our software, we provide insight on the - methodologies and practicalities of designing interactive musical systems for - tabletop surfaces. Additionally, subtleties of working with custom-designed tabletop - hardware are addressed. ' - address: 'Sydney, Australia' - author: 'Hochenbaum, Jordan and Vallis, Owen and Diakopoulos, Dimitri and Murphy, - Jim and Kapur, Ajay' - bibtex: "@inproceedings{Hochenbaum2010a,\n abstract = {This paper explores the evolution\ - \ of collaborative, multi-user, musical interfaces developed for the Bricktable\ - \ interactive surface. Two key types of applications are addressed: user interfaces\ - \ for artistic installation and interfaces for musical performance. In describing\ - \ our software, we provide insight on the methodologies and practicalities of\ - \ designing interactive musical systems for tabletop surfaces. Additionally, subtleties\ - \ of working with custom-designed tabletop hardware are addressed. },\n address\ - \ = {Sydney, Australia},\n author = {Hochenbaum, Jordan and Vallis, Owen and Diakopoulos,\ - \ Dimitri and Murphy, Jim and Kapur, Ajay},\n booktitle = {Proceedings of the\ + ID: Kim2012 + abstract: 'In this paper we introduce an interactive mobile music performance system + using the digital compass of mobile phones. Compass-based interface can detect + the aiming orientation of performers on stage, allowing us to obtain information + on interactions between performers and use it for both musical mappings and visualizations + on screen for the audience. We document and discuss the result of a compass-based + mobile music performance, Where Are You Standing, and present an algorithm for + a new app to track down the performers'' positions in real-time.' + address: 'Ann Arbor, Michigan' + author: Bongjun Kim and Woon Seung Yeo + bibtex: "@inproceedings{Kim2012,\n abstract = {In this paper we introduce an interactive\ + \ mobile music performance system using the digital compass of mobile phones.\ + \ Compass-based interface can detect the aiming orientation of performers on stage,\ + \ allowing us to obtain information on interactions between performers and use\ + \ it for both musical mappings and visualizations on screen for the audience.\ + \ We document and discuss the result of a compass-based mobile music performance,\ + \ Where Are You Standing, and present an algorithm for a new app to track down\ + \ the performers' positions in real-time.},\n address = {Ann Arbor, Michigan},\n\ + \ author = {Bongjun Kim and Woon Seung Yeo},\n booktitle = {Proceedings of the\ \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1177807},\n issn = {2220-4806},\n keywords = {Bricktable, Multi-touch\ - \ Interface, Tangible Interface, Generative Music, Music Information Retrieval},\n\ - \ pages = {315--318},\n title = {Designing Expressive Musical Interfaces for Tabletop\ - \ Surfaces},\n url = {http://www.nime.org/proceedings/2010/nime2010_315.pdf},\n\ - \ year = {2010}\n}\n" + \ {10.5281/zenodo.1178303},\n issn = {2220-4806},\n keywords = {Mobile music,\ + \ mobile phone, smartphone, compass, magnetometer, aiming gesture, musical mapping,\ + \ musical sonification},\n publisher = {University of Michigan},\n title = {Interactive\ + \ Mobile Music Performance with Digital Compass},\n url = {http://www.nime.org/proceedings/2012/nime2012_170.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177807 + doi: 10.5281/zenodo.1178303 issn: 2220-4806 - keywords: 'Bricktable, Multi-touch Interface, Tangible Interface, Generative Music, - Music Information Retrieval' - pages: 315--318 - title: Designing Expressive Musical Interfaces for Tabletop Surfaces - url: http://www.nime.org/proceedings/2010/nime2010_315.pdf - year: 2010 + keywords: 'Mobile music, mobile phone, smartphone, compass, magnetometer, aiming + gesture, musical mapping, musical sonification' + publisher: University of Michigan + title: Interactive Mobile Music Performance with Digital Compass + url: http://www.nime.org/proceedings/2012/nime2012_170.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Suiter2010 - abstract: 'This paper introduces the concept of composing expressive music using - the principles of Fuzzy Logic. The paper provides a conceptual model of a musical - work which follows compositional decision making processes. Significant features - of this Fuzzy Logic framework are its inclusiveness through the consideration - of all the many and varied musical details, while also incorporating the imprecision - that characterises musical terminology and discourse. A significant attribute - of my Fuzzy Logic method is that it traces the trajectory of all musical details, - since it is both the individual elements and their combination over time which - is significant to the effectiveness of a musical work in achieving its goals. - The goal of this work is to find a set of elements and rules, which will ultimately - enable the construction of a genralised algorithmic compositional system which - can produce expressive music if so desired. ' - address: 'Sydney, Australia' - author: 'Suiter, Wendy' - bibtex: "@inproceedings{Suiter2010,\n abstract = {This paper introduces the concept\ - \ of composing expressive music using the principles of Fuzzy Logic. The paper\ - \ provides a conceptual model of a musical work which follows compositional decision\ - \ making processes. Significant features of this Fuzzy Logic framework are its\ - \ inclusiveness through the consideration of all the many and varied musical details,\ - \ while also incorporating the imprecision that characterises musical terminology\ - \ and discourse. A significant attribute of my Fuzzy Logic method is that it traces\ - \ the trajectory of all musical details, since it is both the individual elements\ - \ and their combination over time which is significant to the effectiveness of\ - \ a musical work in achieving its goals. The goal of this work is to find a set\ - \ of elements and rules, which will ultimately enable the construction of a genralised\ - \ algorithmic compositional system which can produce expressive music if so desired.\ - \ },\n address = {Sydney, Australia},\n author = {Suiter, Wendy},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1177901},\n issn = {2220-4806},\n keywords\ - \ = {fuzzy logic,music composition,musical expression,nime10},\n pages = {319--322},\n\ - \ title = {Toward Algorithmic Composition of Expression in Music Using Fuzzy Logic},\n\ - \ url = {http://www.nime.org/proceedings/2010/nime2010_319.pdf},\n year = {2010}\n\ + ID: Rotondo2012 + abstract: 'In this paper we explore the concept of instruments which are played + by more than one person, and present two case studies. We designed, built and + performed with Feedbørk, a two-player instrument comprising two iPads which form + a video feedback loop, and Barrel, a nine-player instrument made up of eight Gametrak + controllers fastened to a steel industrial barrel. By splitting the control of + these instruments into distinct but interdependent roles, we allow each individual + to easily play a part while retaining a rich complexity of output for the whole + system. We found that the relationships between those roles had a significant + effect on how the players communicated with each other, and on how the performance + was perceived by the audience.' + address: 'Ann Arbor, Michigan' + author: Michael Rotondo and Nick Kruge and Ge Wang + bibtex: "@inproceedings{Rotondo2012,\n abstract = {In this paper we explore the\ + \ concept of instruments which are played by more than one person, and present\ + \ two case studies. We designed, built and performed with Feedb{\\o}rk, a two-player\ + \ instrument comprising two iPads which form a video feedback loop, and Barrel,\ + \ a nine-player instrument made up of eight Gametrak controllers fastened to a\ + \ steel industrial barrel. By splitting the control of these instruments into\ + \ distinct but interdependent roles, we allow each individual to easily play a\ + \ part while retaining a rich complexity of output for the whole system. We found\ + \ that the relationships between those roles had a significant effect on how the\ + \ players communicated with each other, and on how the performance was perceived\ + \ by the audience.},\n address = {Ann Arbor, Michigan},\n author = {Michael Rotondo\ + \ and Nick Kruge and Ge Wang},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1180583},\n\ + \ issn = {2220-4806},\n keywords = {Many person musical instruments, cooperative\ + \ music, asymmetric interfaces, transmodal feedback},\n publisher = {University\ + \ of Michigan},\n title = {Many-Person Instruments for Computer Music Performance},\n\ + \ url = {http://www.nime.org/proceedings/2012/nime2012_171.pdf},\n year = {2012}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177901 + doi: 10.5281/zenodo.1180583 issn: 2220-4806 - keywords: 'fuzzy logic,music composition,musical expression,nime10' - pages: 319--322 - title: Toward Algorithmic Composition of Expression in Music Using Fuzzy Logic - url: http://www.nime.org/proceedings/2010/nime2010_319.pdf - year: 2010 + keywords: 'Many person musical instruments, cooperative music, asymmetric interfaces, + transmodal feedback' + publisher: University of Michigan + title: Many-Person Instruments for Computer Music Performance + url: http://www.nime.org/proceedings/2012/nime2012_171.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Beilharz2010 - abstract: 'In this paper we examine a wearable sonification and visualisation display - that uses physical analogue visualisation and digital sonification to convey feedback - about the wearer''s activity and environment. Intended to bridge a gap between - art aesthetics, fashionable technologies and informative physical computing, the - user experience evaluation reveals the wearers'' responses and understanding of - a novel medium for wearable expression. The study reveals useful insights for - wearable device design in general and future iterations of this sonification and - visualisation display. ' - address: 'Sydney, Australia' - author: 'Beilharz, Kirsty and Vande Moere, Andrew and Stiel, Barbara and Calo, Claudia - and Tomitsch, Martin and Lombard, Adrian' - bibtex: "@inproceedings{Beilharz2010,\n abstract = {In this paper we examine a wearable\ - \ sonification and visualisation display that uses physical analogue visualisation\ - \ and digital sonification to convey feedback about the wearer's activity and\ - \ environment. Intended to bridge a gap between art aesthetics, fashionable technologies\ - \ and informative physical computing, the user experience evaluation reveals the\ - \ wearers' responses and understanding of a novel medium for wearable expression.\ - \ The study reveals useful insights for wearable device design in general and\ - \ future iterations of this sonification and visualisation display. },\n address\ - \ = {Sydney, Australia},\n author = {Beilharz, Kirsty and Vande Moere, Andrew\ - \ and Stiel, Barbara and Calo, Claudia and Tomitsch, Martin and Lombard, Adrian},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177717},\n issn = {2220-4806},\n\ - \ keywords = {Wearable display, sonification, visualisation, design aesthetics,\ - \ physical computing, multimodal expression, bimodal display},\n pages = {323--326},\n\ - \ title = {Expressive Wearable Sonification and Visualisation : Design and Evaluation\ - \ of a Flexible Display},\n url = {http://www.nime.org/proceedings/2010/nime2010_323.pdf},\n\ - \ year = {2010}\n}\n" + ID: Barbosa2012 + abstract: 'The authors propose the development of a more complete Digital Music + Instrument (DMI) evaluation methodology, which provides structured tools for the + incremental development of prototypes based on user feedback. This paper emphasizes + an important but often ignored stakeholder present in the context of musical performance: + the audience. We demonstrate the practical application of an audience focused + methodology through a case study (`Illusio''), discuss the obtained results and + possible improvements for future works.' + address: 'Ann Arbor, Michigan' + author: Jerônimo Barbosa and Filipe Calegario and Verônica Teichrieb and Geber Ramalho + and Patrick McGlynn + bibtex: "@inproceedings{Barbosa2012,\n abstract = {The authors propose the development\ + \ of a more complete Digital Music Instrument (DMI) evaluation methodology, which\ + \ provides structured tools for the incremental development of prototypes based\ + \ on user feedback. This paper emphasizes an important but often ignored stakeholder\ + \ present in the context of musical performance: the audience. We demonstrate\ + \ the practical application of an audience focused methodology through a case\ + \ study (`Illusio'), discuss the obtained results and possible improvements for\ + \ future works.},\n address = {Ann Arbor, Michigan},\n author = {Jer{\\^o}nimo\ + \ Barbosa and Filipe Calegario and Ver{\\^o}nica Teichrieb and Geber Ramalho and\ + \ Patrick McGlynn},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178209},\n\ + \ issn = {2220-4806},\n keywords = {Empirical methods, quantitative, usability\ + \ testing and evaluation, digital musical instruments, evaluation methodology,\ + \ Illusio},\n publisher = {University of Michigan},\n title = {Considering Audience's\ + \ View Towards an Evaluation Methodology for Digital Musical Instruments},\n url\ + \ = {http://www.nime.org/proceedings/2012/nime2012_174.pdf},\n year = {2012}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177717 + doi: 10.5281/zenodo.1178209 issn: 2220-4806 - keywords: 'Wearable display, sonification, visualisation, design aesthetics, physical - computing, multimodal expression, bimodal display' - pages: 323--326 - title: 'Expressive Wearable Sonification and Visualisation : Design and Evaluation - of a Flexible Display' - url: http://www.nime.org/proceedings/2010/nime2010_323.pdf - year: 2010 + keywords: 'Empirical methods, quantitative, usability testing and evaluation, digital + musical instruments, evaluation methodology, Illusio' + publisher: University of Michigan + title: Considering Audience's View Towards an Evaluation Methodology for Digital + Musical Instruments + url: http://www.nime.org/proceedings/2012/nime2012_174.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Nugroho2010 - abstract: 'In this paper, we describe the shaping factors, which simplify and help - us understand the multi-dimensional aspects of designing Wearable Expressions. - These descriptive shaping factors contribute to both the design and user-experience - evaluation of Wearable Expressions. ' - address: 'Sydney, Australia' - author: 'Nugroho, Jeremiah and Beilharz, Kirsty' - bibtex: "@inproceedings{Nugroho2010,\n abstract = {In this paper, we describe the\ - \ shaping factors, which simplify and help us understand the multi-dimensional\ - \ aspects of designing Wearable Expressions. These descriptive shaping factors\ - \ contribute to both the design and user-experience evaluation of Wearable Expressions.\ - \ },\n address = {Sydney, Australia},\n author = {Nugroho, Jeremiah and Beilharz,\ - \ Kirsty},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1177867},\n issn = {2220-4806},\n\ - \ keywords = {Wearable expressions, body, user-centered design.},\n pages = {327--330},\n\ - \ title = {Understanding and Evaluating User Centred Design in Wearable Expressions},\n\ - \ url = {http://www.nime.org/proceedings/2010/nime2010_327.pdf},\n year = {2010}\n\ - }\n" + ID: Beilharz2012 + abstract: 'In site-specific installation or situated media, a significant part of + the "I" in NIME is the environment, the site and the implicit features of site + such as humans, weather, materials, natural acoustics, etc. These could be viewed + as design constraints, or features, even agency determining the outcome of responsive + sound installation works. This paper discusses the notion of interface in public + (especially outdoor) installation, starting with the authors'' Sculpture by the + Sea Windtraces work using this recent experience as the launch-pad, with reference + to ways in which others have approached it (focusing on sensor, weather-activated + outdoor installations in a brief traverse of related cases, e.g. works by Garth + Paine, James Bulley and Daniel Jones, and David Bowen). This is a dialogical paper + on the topic of interface and `site'' as the aetiology of interaction/interface/instrument + and its type of response (e.g. to environment and audience). While the focus here + is on outdoor factors (particularly the climatic environment), indoor site-specific + installation also experiences the effects of ambient noise, acoustic context, + and audience as integral agents in the interface and perception of the work, its + musical expression. The way in which features of the situation are integrated + has relevance for others in the NIME community in the design of responsive spaces, + art installation, and large-scale or installed instruments in which users, participants, + acoustics play a significant role.' + address: 'Ann Arbor, Michigan' + author: Kirsty Beilharz and Aengus Martin + bibtex: "@inproceedings{Beilharz2012,\n abstract = {In site-specific installation\ + \ or situated media, a significant part of the \"I\" in NIME is the environment,\ + \ the site and the implicit features of site such as humans, weather, materials,\ + \ natural acoustics, etc. These could be viewed as design constraints, or features,\ + \ even agency determining the outcome of responsive sound installation works.\ + \ This paper discusses the notion of interface in public (especially outdoor)\ + \ installation, starting with the authors' Sculpture by the Sea Windtraces work\ + \ using this recent experience as the launch-pad, with reference to ways in which\ + \ others have approached it (focusing on sensor, weather-activated outdoor installations\ + \ in a brief traverse of related cases, e.g. works by Garth Paine, James Bulley\ + \ and Daniel Jones, and David Bowen). This is a dialogical paper on the topic\ + \ of interface and `site' as the aetiology of interaction/interface/instrument\ + \ and its type of response (e.g. to environment and audience). While the focus\ + \ here is on outdoor factors (particularly the climatic environment), indoor site-specific\ + \ installation also experiences the effects of ambient noise, acoustic context,\ + \ and audience as integral agents in the interface and perception of the work,\ + \ its musical expression. The way in which features of the situation are integrated\ + \ has relevance for others in the NIME community in the design of responsive spaces,\ + \ art installation, and large-scale or installed instruments in which users, participants,\ + \ acoustics play a significant role.},\n address = {Ann Arbor, Michigan},\n author\ + \ = {Kirsty Beilharz and Aengus Martin},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178211},\n\ + \ issn = {2220-4806},\n keywords = {NIME, site-specific installation, outdoor\ + \ sound installation},\n publisher = {University of Michigan},\n title = {The\ + \ `Interface' in Site-Specific Sound Installation},\n url = {http://www.nime.org/proceedings/2012/nime2012_175.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177867 + doi: 10.5281/zenodo.1178211 issn: 2220-4806 - keywords: 'Wearable expressions, body, user-centered design.' - pages: 327--330 - title: Understanding and Evaluating User Centred Design in Wearable Expressions - url: http://www.nime.org/proceedings/2010/nime2010_327.pdf - year: 2010 + keywords: 'NIME, site-specific installation, outdoor sound installation' + publisher: University of Michigan + title: The `Interface' in Site-Specific Sound Installation + url: http://www.nime.org/proceedings/2012/nime2012_175.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Park2010 - abstract: 'In this paper, we discuss the musical potential of COMPath --- an online - map based music-making tool --- as a noveland unique interface for interactive - music composition andperformance. COMPath provides an intuitive environmentfor - creative music making by sonification of georeferenceddata. Users can generate - musical events with simple andfamiliar actions on an online map interface; a set - of local information is collected along the user-drawn route andthen interpreted - as sounds of various musical instruments.We discuss the musical interpretation - of routes on a map,review the design and implementation of COMPath, andpresent - selected sonification results with focus on mappingstrategies for map-based composition.' - address: 'Sydney, Australia' - author: 'Park, Sihwa and Kim, Seunghun and Lee, Samuel and Yeo, Woon Seung' - bibtex: "@inproceedings{Park2010,\n abstract = {In this paper, we discuss the musical\ - \ potential of COMPath --- an online map based music-making tool --- as a noveland\ - \ unique interface for interactive music composition andperformance. COMPath provides\ - \ an intuitive environmentfor creative music making by sonification of georeferenceddata.\ - \ Users can generate musical events with simple andfamiliar actions on an online\ - \ map interface; a set of local information is collected along the user-drawn\ - \ route andthen interpreted as sounds of various musical instruments.We discuss\ - \ the musical interpretation of routes on a map,review the design and implementation\ - \ of COMPath, andpresent selected sonification results with focus on mappingstrategies\ - \ for map-based composition.},\n address = {Sydney, Australia},\n author = {Park,\ - \ Sihwa and Kim, Seunghun and Lee, Samuel and Yeo, Woon Seung},\n booktitle =\ - \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177877},\n issn = {2220-4806},\n keywords = {Musical\ - \ sonification, map interface, online map service, geo- referenced data, composition,\ - \ mashup},\n pages = {331--334},\n title = {Online Map Interface for Creative\ - \ and Interactive},\n url = {http://www.nime.org/proceedings/2010/nime2010_331.pdf},\n\ - \ year = {2010}\n}\n" + ID: Muller2012 + abstract: 'This paper discusses the utilization of human skin as a tangible interface + for musical expression and collaborative performance. We present an overview of + existing different instrument designs that include the skin as the main input. + As a further development of a previous exploration [16] we outline the setup and + interaction methods of `Skintimacy'', an instrument that appropriates the skin + for low voltage power transmission in multi-player interaction. Observations deriving + from proof-of-concept exploration and performances using the instrument are brought + into the reflection and discussion concerning the capabilities and limitations + of skin as an input surface.' + address: 'Ann Arbor, Michigan' + author: Alexander Müller-Rakow and Jochen Fuchs + bibtex: "@inproceedings{Muller2012,\n abstract = {This paper discusses the utilization\ + \ of human skin as a tangible interface for musical expression and collaborative\ + \ performance. We present an overview of existing different instrument designs\ + \ that include the skin as the main input. As a further development of a previous\ + \ exploration [16] we outline the setup and interaction methods of `Skintimacy',\ + \ an instrument that appropriates the skin for low voltage power transmission\ + \ in multi-player interaction. Observations deriving from proof-of-concept exploration\ + \ and performances using the instrument are brought into the reflection and discussion\ + \ concerning the capabilities and limitations of skin as an input surface.},\n\ + \ address = {Ann Arbor, Michigan},\n author = {Alexander M{\\''u}ller-Rakow and\ + \ Jochen Fuchs},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178335},\n\ + \ issn = {2220-4806},\n keywords = {Skin-based instruments, skin conductivity,\ + \ collaborative interfaces, embodiment, intimacy, multi-player performance},\n\ + \ publisher = {University of Michigan},\n title = {The Human Skin as an Interface\ + \ for Musical Expression},\n url = {http://www.nime.org/proceedings/2012/nime2012_177.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177877 + doi: 10.5281/zenodo.1178335 issn: 2220-4806 - keywords: 'Musical sonification, map interface, online map service, geo- referenced - data, composition, mashup' - pages: 331--334 - title: Online Map Interface for Creative and Interactive - url: http://www.nime.org/proceedings/2010/nime2010_331.pdf - year: 2010 + keywords: 'Skin-based instruments, skin conductivity, collaborative interfaces, + embodiment, intimacy, multi-player performance' + publisher: University of Michigan + title: The Human Skin as an Interface for Musical Expression + url: http://www.nime.org/proceedings/2012/nime2012_177.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Hadjakos2010 - abstract: Awareness of playing movements can help a piano student to improve technique. - We are developing a piano pedagogy application that uses sensor data of hand and - arm movement and generates feedback to increase movement awareness. This paper - reports on a method for analysis of piano playing movements. The method allows - to judge whether an active movement in a joint has occurred during a given time - interval. This time interval may include one or more touches. The problem is complicated - by the fact that the mechanical interaction between the arm and piano action generates - additional movements that are not under direct control of the player. The analysis - method is able to ignore these movements and can therefore be used to provide - useful feedback. - address: 'Sydney, Australia' - author: 'Hadjakos, Aristotelis and Mühlhäuser, Max' - bibtex: "@inproceedings{Hadjakos2010,\n abstract = {Awareness of playing movements\ - \ can help a piano student to improve technique. We are developing a piano pedagogy\ - \ application that uses sensor data of hand and arm movement and generates feedback\ - \ to increase movement awareness. This paper reports on a method for analysis\ - \ of piano playing movements. The method allows to judge whether an active movement\ - \ in a joint has occurred during a given time interval. This time interval may\ - \ include one or more touches. The problem is complicated by the fact that the\ - \ mechanical interaction between the arm and piano action generates additional\ - \ movements that are not under direct control of the player. The analysis method\ - \ is able to ignore these movements and can therefore be used to provide useful\ - \ feedback.},\n address = {Sydney, Australia},\n author = {Hadjakos, Aristotelis\ - \ and M\\''{u}hlh\\''{a}user, Max},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177791},\n\ - \ issn = {2220-4806},\n keywords = {nime10},\n pages = {335--338},\n title = {Analysis\ - \ of Piano Playing Movements Spanning Multiple Touches},\n url = {http://www.nime.org/proceedings/2010/nime2010_335.pdf},\n\ - \ year = {2010}\n}\n" + ID: Lee2012 + abstract: 'Empatheater is a video playing system that is controlled by multimodal + interaction. As the video is played, the user must interact and emulate predefined + ``events'''' for the video to continue on. The user is given the illusion of playing + an active role in the unraveling video content and can empathize with the performer. + In this paper, we report about user experiences with Empatheater when applied + to musical video contents.' + address: 'Ann Arbor, Michigan' + author: Myunghee Lee and Youngsun Kim and Gerard Kim + bibtex: "@inproceedings{Lee2012,\n abstract = {Empatheater is a video playing system\ + \ that is controlled by multimodal interaction. As the video is played, the user\ + \ must interact and emulate predefined ``events'' for the video to continue on.\ + \ The user is given the illusion of playing an active role in the unraveling video\ + \ content and can empathize with the performer. In this paper, we report about\ + \ user experiences with Empatheater when applied to musical video contents.},\n\ + \ address = {Ann Arbor, Michigan},\n author = {Myunghee Lee and Youngsun Kim and\ + \ Gerard Kim},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178313},\n issn\ + \ = {2220-4806},\n keywords = {Music video, Empathy, Interactive video, Musical\ + \ event, Multimodal interaction.},\n publisher = {University of Michigan},\n title\ + \ = {Empathetic Interactive Music Video Experience},\n url = {http://www.nime.org/proceedings/2012/nime2012_179.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177791 + doi: 10.5281/zenodo.1178313 issn: 2220-4806 - keywords: nime10 - pages: 335--338 - title: Analysis of Piano Playing Movements Spanning Multiple Touches - url: http://www.nime.org/proceedings/2010/nime2010_335.pdf - year: 2010 + keywords: 'Music video, Empathy, Interactive video, Musical event, Multimodal interaction.' + publisher: University of Michigan + title: Empathetic Interactive Music Video Experience + url: http://www.nime.org/proceedings/2012/nime2012_179.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Heinz2010 - abstract: 'This paper proposes a design concept for a tangible interface forcollaborative - performances that incorporates two social factorspresent during performance, the - individual creation andadaptation of technology and the sharing of it within acommunity. - These factors are identified using the example of alaptop ensemble and then applied - to three existing collaborativeperformance paradigms. Finally relevant technology, - challengesand the current state of our implementation are discussed.' - address: 'Sydney, Australia' - author: 'Heinz, Sebastian and O''Modhrain, Sile' - bibtex: "@inproceedings{Heinz2010,\n abstract = {This paper proposes a design concept\ - \ for a tangible interface forcollaborative performances that incorporates two\ - \ social factorspresent during performance, the individual creation andadaptation\ - \ of technology and the sharing of it within acommunity. These factors are identified\ - \ using the example of alaptop ensemble and then applied to three existing collaborativeperformance\ - \ paradigms. Finally relevant technology, challengesand the current state of our\ - \ implementation are discussed.},\n address = {Sydney, Australia},\n author =\ - \ {Heinz, Sebastian and O'Modhrain, Sile},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177803},\n\ - \ issn = {2220-4806},\n keywords = {Tangible User Interfaces, collaborative performances,\ - \ social factors},\n pages = {339--342},\n title = {Designing a Shareable Musical\ - \ TUI},\n url = {http://www.nime.org/proceedings/2010/nime2010_339.pdf},\n year\ - \ = {2010}\n}\n" + ID: Clay2012 + abstract: 'The augmented ballet project aims at gathering research from several + fields and directing them towards a same application case: adding virtual elements + (visual and acoustic) to a dance live performance, and allowing the dancer to + interact with them. In this paper, we describe a novel interaction that we used + in the frame of this project: using the dancer''s movements to recognize the emotions + he expresses, and use these emotions to generate musical audio flows evolving + in real-time. The originality of this interaction is threefold. First, it covers + the whole interaction cycle from the input (the dancer''s movements) to the output + (the generated music). Second, this interaction isn''t direct but goes through + a high level of abstraction: dancer''s emotional expression is recognized and + is the source of music generation. Third, this interaction has been designed and + validated through constant collaboration with a choreographer, culminating in + an augmented ballet performance in front of a live audience.' + address: 'Ann Arbor, Michigan' + author: Alexis Clay and Nadine Couture and Myriam Desainte-Catherine and Pierre-Henri + Vulliard and Joseph Larralde and Elodie Decarsin + bibtex: "@inproceedings{Clay2012,\n abstract = {The augmented ballet project aims\ + \ at gathering research from several fields and directing them towards a same\ + \ application case: adding virtual elements (visual and acoustic) to a dance live\ + \ performance, and allowing the dancer to interact with them. In this paper, we\ + \ describe a novel interaction that we used in the frame of this project: using\ + \ the dancer's movements to recognize the emotions he expresses, and use these\ + \ emotions to generate musical audio flows evolving in real-time. The originality\ + \ of this interaction is threefold. First, it covers the whole interaction cycle\ + \ from the input (the dancer's movements) to the output (the generated music).\ + \ Second, this interaction isn't direct but goes through a high level of abstraction:\ + \ dancer's emotional expression is recognized and is the source of music generation.\ + \ Third, this interaction has been designed and validated through constant collaboration\ + \ with a choreographer, culminating in an augmented ballet performance in front\ + \ of a live audience.},\n address = {Ann Arbor, Michigan},\n author = {Alexis\ + \ Clay and Nadine Couture and Myriam Desainte-Catherine and Pierre-Henri Vulliard\ + \ and Joseph Larralde and Elodie Decarsin},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1178237},\n issn = {2220-4806},\n keywords = {Interactive sonification,\ + \ motion, gesture and music, interaction, live performance, musical human-computer\ + \ interaction},\n publisher = {University of Michigan},\n title = {Movement to\ + \ emotions to music: using whole body emotional expression as an interaction for\ + \ electronic music generation},\n url = {http://www.nime.org/proceedings/2012/nime2012_180.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177803 + doi: 10.5281/zenodo.1178237 issn: 2220-4806 - keywords: 'Tangible User Interfaces, collaborative performances, social factors' - pages: 339--342 - title: Designing a Shareable Musical TUI - url: http://www.nime.org/proceedings/2010/nime2010_339.pdf - year: 2010 + keywords: 'Interactive sonification, motion, gesture and music, interaction, live + performance, musical human-computer interaction' + publisher: University of Michigan + title: 'Movement to emotions to music: using whole body emotional expression as + an interaction for electronic music generation' + url: http://www.nime.org/proceedings/2012/nime2012_180.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Freed2010 - abstract: We present two complementary approaches for the visualization and interaction - of dimensionally reduced data setsusing hybridization interfaces. Our implementations - privilege syncretic systems allowing one to explore combinations(hybrids) of disparate - elements of a data set through theirplacement in a 2-D space. The first approach - allows for theplacement of data points anywhere on the plane accordingto an anticipated - performance strategy. The contribution(weight) of each data point varies according - to a power function of the distance from the control cursor. The secondapproach - uses constrained vertex colored triangulations ofmanifolds with labels placed - at the vertices of triangulartiles. Weights are computed by barycentric projection - ofthe control cursor position. - address: 'Sydney, Australia' - author: 'Freed, Adrian' - bibtex: "@inproceedings{Freed2010,\n abstract = {We present two complementary approaches\ - \ for the visualization and interaction of dimensionally reduced data setsusing\ - \ hybridization interfaces. Our implementations privilege syncretic systems allowing\ - \ one to explore combinations(hybrids) of disparate elements of a data set through\ - \ theirplacement in a 2-D space. The first approach allows for theplacement of\ - \ data points anywhere on the plane accordingto an anticipated performance strategy.\ - \ The contribution(weight) of each data point varies according to a power function\ - \ of the distance from the control cursor. The secondapproach uses constrained\ - \ vertex colored triangulations ofmanifolds with labels placed at the vertices\ - \ of triangulartiles. Weights are computed by barycentric projection ofthe control\ - \ cursor position.},\n address = {Sydney, Australia},\n author = {Freed, Adrian},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177769},\n issn = {2220-4806},\n\ - \ keywords = {Interpolation, dimension reduction, radial basis functions, triangular\ - \ mesh},\n pages = {343--347},\n title = {Visualizations and Interaction Strategies\ - \ for Hybridization Interfaces},\n url = {http://www.nime.org/proceedings/2010/nime2010_343.pdf},\n\ - \ year = {2010}\n}\n" + ID: Trappe2012 + abstract: 'In this paper we present our project to make sound synthesis and music + controller construction accessible to children in a technology design workshop. + We present the work we have carried out to develop a graphical user interface, + and give account of the workshop we conducted in collaboration with a local primary + school. Our results indicate that the production of audio events by means of digital + synthesis and algorithmic composition provides a rich and interesting field to + be discovered for pedagogical workshops taking a Constructionist approach.' + address: 'Ann Arbor, Michigan' + author: Christoph Trappe + bibtex: "@inproceedings{Trappe2012,\n abstract = {In this paper we present our project\ + \ to make sound synthesis and music controller construction accessible to children\ + \ in a technology design workshop. We present the work we have carried out to\ + \ develop a graphical user interface, and give account of the workshop we conducted\ + \ in collaboration with a local primary school. Our results indicate that the\ + \ production of audio events by means of digital synthesis and algorithmic composition\ + \ provides a rich and interesting field to be discovered for pedagogical workshops\ + \ taking a Constructionist approach.},\n address = {Ann Arbor, Michigan},\n author\ + \ = {Christoph Trappe},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178439},\n\ + \ issn = {2220-4806},\n keywords = {Child Computer Interaction, Constructionism,\ + \ Sound and Music Computing, Human-Computer Interface Design, Mu-sic Composition\ + \ and Generation, Interactive Audio Sys-tems, Technology Design Activities.},\n\ + \ publisher = {University of Michigan},\n title = {Making Sound Synthesis Accessible\ + \ for Children},\n url = {http://www.nime.org/proceedings/2012/nime2012_181.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177769 + doi: 10.5281/zenodo.1178439 issn: 2220-4806 - keywords: 'Interpolation, dimension reduction, radial basis functions, triangular - mesh' - pages: 343--347 - title: Visualizations and Interaction Strategies for Hybridization Interfaces - url: http://www.nime.org/proceedings/2010/nime2010_343.pdf - year: 2010 + keywords: 'Child Computer Interaction, Constructionism, Sound and Music Computing, + Human-Computer Interface Design, Mu-sic Composition and Generation, Interactive + Audio Sys-tems, Technology Design Activities.' + publisher: University of Michigan + title: Making Sound Synthesis Accessible for Children + url: http://www.nime.org/proceedings/2012/nime2012_181.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Woldecke2010 - abstract: In this paper we describe work in progress on generative music generation - on multi-touch devices. Our goal is to create a musical application framework - for multiple casual users that use state of the art multitouch devices. We choose - the metaphor of ants moving on a hexagonal grid to interact with a pitch pattern. - The set of devices used includes a custom built multitouch table and a number - of iPhones to jointly create musical expressions. - address: 'Sydney, Australia' - author: 'Wöldecke, Björn and Geiger, Christian and Reckter, Holger and Schulz, Florian' - bibtex: "@inproceedings{Woldecke2010,\n abstract = {In this paper we describe work\ - \ in progress on generative music generation on multi-touch devices. Our goal\ - \ is to create a musical application framework for multiple casual users that\ - \ use state of the art multitouch devices. We choose the metaphor of ants moving\ - \ on a hexagonal grid to interact with a pitch pattern. The set of devices used\ - \ includes a custom built multitouch table and a number of iPhones to jointly\ - \ create musical expressions.},\n address = {Sydney, Australia},\n author = {W\\\ - ''{o}ldecke, Bj\\''{o}rn and Geiger, Christian and Reckter, Holger and Schulz,\ - \ Florian},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177921},\n issn\ - \ = {2220-4806},\n keywords = {Generative music, mobile interfaces, multitouch\ - \ interaction},\n pages = {348--351},\n title = {ANTracks 2.0 --- Generative Music\ - \ on Multiple Multitouch Devices Categories and Subject Descriptors},\n url =\ - \ {http://www.nime.org/proceedings/2010/nime2010_348.pdf},\n year = {2010}\n}\n" + ID: Skogstad2012 + abstract: 'In this paper we present the Dance Jockey System, a system developed + for using a full body inertial motion capture suit (Xsens MVN) in music/dance + performances. We present different strategies for extracting relevant postures + and actions from the continuous data, and how these postures and actions can be + used to control sonic and musical features. The system has been used in several + public performances, and we believe it has great potential for further exploration. + However, to overcome the current practical and technical challenges when working + with the system, it is important to further refine tools and software in order + to facilitate making of new performance pieces.' + address: 'Ann Arbor, Michigan' + author: 'Skogstad, Ståle A. and Kristian Nymoen and de Quay, Yago and Jensenius, + Alexander Refsum' + bibtex: "@inproceedings{Skogstad2012,\n abstract = {In this paper we present the\ + \ Dance Jockey System, a system developed for using a full body inertial motion\ + \ capture suit (Xsens MVN) in music/dance performances. We present different strategies\ + \ for extracting relevant postures and actions from the continuous data, and how\ + \ these postures and actions can be used to control sonic and musical features.\ + \ The system has been used in several public performances, and we believe it has\ + \ great potential for further exploration. However, to overcome the current practical\ + \ and technical challenges when working with the system, it is important to further\ + \ refine tools and software in order to facilitate making of new performance pieces.},\n\ + \ address = {Ann Arbor, Michigan},\n author = {Skogstad, St{\\aa}le A. and Kristian\ + \ Nymoen and de Quay, Yago and Jensenius, Alexander Refsum},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1180601},\n issn = {2220-4806},\n publisher = {University\ + \ of Michigan},\n title = {Developing the Dance Jockey System for Musical Interaction\ + \ with the Xsens {MV}N Suit},\n url = {http://www.nime.org/proceedings/2012/nime2012_182.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177921 + doi: 10.5281/zenodo.1180601 issn: 2220-4806 - keywords: 'Generative music, mobile interfaces, multitouch interaction' - pages: 348--351 - title: ANTracks 2.0 --- Generative Music on Multiple Multitouch Devices Categories - and Subject Descriptors - url: http://www.nime.org/proceedings/2010/nime2010_348.pdf - year: 2010 + publisher: University of Michigan + title: Developing the Dance Jockey System for Musical Interaction with the Xsens + MVN Suit + url: http://www.nime.org/proceedings/2012/nime2012_182.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Kang2010 - abstract: 'The project Hé(和, harmony) is a sound installation that enables a user - to play music by writing calligraphy. We developed a system where calligraphic - symbols can be detected and converted to a sound composed of pitch, pitch length, - and volume though MIDI and serial communication. The Hé sound installation involves - a micro-controller, photocells, and multiplexers. A DC motor controls the speed - of a spooled paper roll that is capable of setting the music tempo. This paper - presents the design concept and implementation of Hé. We discuss the major research - issues such as using photocells for detecting components of calligraphy like thickness - and location. Hardware and software details are also discussed. Finally, we explore - the potential for further extending musical and visual experience through this - project’s applications and outcomes.' - address: 'Sydney, Australia' - author: 'Kang, Laewoo and Chien, Hsin-Yi' - bibtex: "@inproceedings{Kang2010,\n abstract = {The project H\\'{e}(和, harmony)\ - \ is a sound installation that enables a user to play music by writing calligraphy.\ - \ We developed a system where calligraphic symbols can be detected and converted\ - \ to a sound composed of pitch, pitch length, and volume though MIDI and serial\ - \ communication. The H\\'{e} sound installation involves a micro-controller, photocells,\ - \ and multiplexers. A DC motor controls the speed of a spooled paper roll that\ - \ is capable of setting the music tempo. This paper presents the design concept\ - \ and implementation of H\\'{e}. We discuss the major research issues such as\ - \ using photocells for detecting components of calligraphy like thickness and\ - \ location. Hardware and software details are also discussed. Finally, we explore\ - \ the potential for further extending musical and visual experience through this\ - \ project’s applications and outcomes.},\n address = {Sydney, Australia},\n author\ - \ = {Kang, Laewoo and Chien, Hsin-Yi},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177819},\n\ - \ issn = {2220-4806},\n keywords = {Interactive music interface, calligraphy,\ - \ graphical music composing, sonification},\n pages = {352--355},\n title = {H\\\ - '{e} : Calligraphy as a Musical Interface},\n url = {http://www.nime.org/proceedings/2010/nime2010_352.pdf},\n\ - \ year = {2010}\n}\n" + ID: Senturk2012 + abstract: 'Meaning crossword of sound, Crossole is a musical meta-instrument where + the music is visualized as a set of virtual blocks that resemble a crossword puzzle. + In Crossole, the chord progressions are visually presented as a set of virtual + blocks. With the aid of the Kinect sensing technology, a performer controls music + by manipulating the crossword blocks using hand movements. The performer can build + chords in the high level, traverse over the blocks, step into the low level to + control the chord arpeggiations note by note, loop a chord progression or map + gestures to various processing algorithms to enhance the timbral scenery.' + address: 'Ann Arbor, Michigan' + author: Sertan Şentürk and Sang Won Lee and Avinash Sastry and Anosh Daruwalla and + Gil Weinberg + bibtex: "@inproceedings{Senturk2012,\n abstract = {Meaning crossword of sound, Crossole\ + \ is a musical meta-instrument where the music is visualized as a set of virtual\ + \ blocks that resemble a crossword puzzle. In Crossole, the chord progressions\ + \ are visually presented as a set of virtual blocks. With the aid of the Kinect\ + \ sensing technology, a performer controls music by manipulating the crossword\ + \ blocks using hand movements. The performer can build chords in the high level,\ + \ traverse over the blocks, step into the low level to control the chord arpeggiations\ + \ note by note, loop a chord progression or map gestures to various processing\ + \ algorithms to enhance the timbral scenery.},\n address = {Ann Arbor, Michigan},\n\ + \ author = {Sertan \\c{S}ent\\\"{u}rk and Sang Won Lee and Avinash Sastry and\ + \ Anosh Daruwalla and Gil Weinberg},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178201},\n\ + \ issn = {2220-4806},\n keywords = {Kinect, meta-instrument, chord progression,\ + \ body gesture},\n publisher = {University of Michigan},\n title = {Crossole:\ + \ A Gestural Interface for Composition, Improvisation and Performance using Kinect},\n\ + \ url = {http://www.nime.org/proceedings/2012/nime2012_185.pdf},\n year = {2012}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177819 + doi: 10.5281/zenodo.1178201 issn: 2220-4806 - keywords: 'Interactive music interface, calligraphy, graphical music composing, - sonification' - pages: 352--355 - title: 'Hé : Calligraphy as a Musical Interface' - url: http://www.nime.org/proceedings/2010/nime2010_352.pdf - year: 2010 + keywords: 'Kinect, meta-instrument, chord progression, body gesture' + publisher: University of Michigan + title: 'Crossole: A Gestural Interface for Composition, Improvisation and Performance + using Kinect' + url: http://www.nime.org/proceedings/2012/nime2012_185.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Marier2010 - abstract: 'The sponge is an interface that allows a clear link to beestablished - between gesture and sound in electroacousticmusic. The goals in developing the - sponge were to reintroduce the pleasure of playing and to improve the interaction - between the composer/performer and the audience. Ithas been argued that expenditure - of effort or energy is required to obtain expressive interfaces. The sponge favors - anenergy-sound relationship in two ways : 1) it senses acceleration, which is - closely related to energy; and 2) it is madeout of a flexible material (foam) - that requires effort to besqueezed or twisted. Some of the mapping strategies - usedin a performance context with the sponge are discussed.' - address: 'Sydney, Australia' - author: 'Marier, Martin' - bibtex: "@inproceedings{Marier2010,\n abstract = {The sponge is an interface that\ - \ allows a clear link to beestablished between gesture and sound in electroacousticmusic.\ - \ The goals in developing the sponge were to reintroduce the pleasure of playing\ - \ and to improve the interaction between the composer/performer and the audience.\ - \ Ithas been argued that expenditure of effort or energy is required to obtain\ - \ expressive interfaces. The sponge favors anenergy-sound relationship in two\ - \ ways : 1) it senses acceleration, which is closely related to energy; and 2)\ - \ it is madeout of a flexible material (foam) that requires effort to besqueezed\ - \ or twisted. Some of the mapping strategies usedin a performance context with\ - \ the sponge are discussed.},\n address = {Sydney, Australia},\n author = {Marier,\ - \ Martin},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1177839},\n issn = {2220-4806},\n\ - \ keywords = {Interface, electroacoustic music, performance, expressivity, mapping},\n\ - \ pages = {356--359},\n title = {The Sponge A Flexible Interface},\n url = {http://www.nime.org/proceedings/2010/nime2010_356.pdf},\n\ - \ year = {2010}\n}\n" + ID: Snyder2012 + abstract: 'This paper presents the JD-1, a digital controller for analog modular + synthesizers. The JD-1 features a capacitive touch-sensing keyboard that responds + to continuous variations in finger contact, high-accuracy polyphonic control-voltage + outputs, a built-in sequencer, and digital interfaces for connection to MIDI and + OSC devices. Design goals include interoperability with a wide range of synthesizers, + very high-resolution pitch control, and intuitive control of the sequencer from + the keyboard.' + address: 'Ann Arbor, Michigan' + author: Jeff Snyder and Andrew McPherson + bibtex: "@inproceedings{Snyder2012,\n abstract = {This paper presents the JD-1,\ + \ a digital controller for analog modular synthesizers. The JD-1 features a capacitive\ + \ touch-sensing keyboard that responds to continuous variations in finger contact,\ + \ high-accuracy polyphonic control-voltage outputs, a built-in sequencer, and\ + \ digital interfaces for connection to MIDI and OSC devices. Design goals include\ + \ interoperability with a wide range of synthesizers, very high-resolution pitch\ + \ control, and intuitive control of the sequencer from the keyboard.},\n address\ + \ = {Ann Arbor, Michigan},\n author = {Jeff Snyder and Andrew McPherson},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1178421},\n issn = {2220-4806},\n keywords\ + \ = {keyboard, sequencer, analog synthesizer, capacitive touch sensing},\n publisher\ + \ = {University of Michigan},\n title = {The JD-1: an Implementation of a Hybrid\ + \ Keyboard/Sequencer Controller for Analog Synthesizers},\n url = {http://www.nime.org/proceedings/2012/nime2012_187.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177839 + doi: 10.5281/zenodo.1178421 issn: 2220-4806 - keywords: 'Interface, electroacoustic music, performance, expressivity, mapping' - pages: 356--359 - title: The Sponge A Flexible Interface - url: http://www.nime.org/proceedings/2010/nime2010_356.pdf - year: 2010 + keywords: 'keyboard, sequencer, analog synthesizer, capacitive touch sensing' + publisher: University of Michigan + title: 'The JD-1: an Implementation of a Hybrid Keyboard/Sequencer Controller for + Analog Synthesizers' + url: http://www.nime.org/proceedings/2012/nime2012_187.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Fyfe2010 - abstract: 'In this paper we discuss SurfaceMusic, a tabletop music system in which - touch gestures are mapped to physical modelsof instruments. With physical models, - parametric controlover the sound allows for a more natural interaction between - gesture and sound. We discuss the design and implementation of a simple gestural - interface for interactingwith virtual instruments and a messaging system that - conveys gesture data to the audio system.' - address: 'Sydney, Australia' - author: 'Fyfe, Lawrence and Lynch, Sean and Hull, Carmen and Carpendale, Sheelagh' - bibtex: "@inproceedings{Fyfe2010,\n abstract = {In this paper we discuss SurfaceMusic,\ - \ a tabletop music system in which touch gestures are mapped to physical modelsof\ - \ instruments. With physical models, parametric controlover the sound allows for\ - \ a more natural interaction between gesture and sound. We discuss the design\ - \ and implementation of a simple gestural interface for interactingwith virtual\ - \ instruments and a messaging system that conveys gesture data to the audio system.},\n\ - \ address = {Sydney, Australia},\n author = {Fyfe, Lawrence and Lynch, Sean and\ - \ Hull, Carmen and Carpendale, Sheelagh},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177777},\n\ - \ issn = {2220-4806},\n keywords = {Tabletop, multi-touch, gesture, physical model,\ - \ Open Sound Control.},\n pages = {360--363},\n title = {SurfaceMusic : Mapping\ - \ Virtual Touch-based Instruments to Physical Models},\n url = {http://www.nime.org/proceedings/2010/nime2010_360.pdf},\n\ - \ year = {2010}\n}\n" + ID: OSullivan2012 + abstract: 'Development of new musical interfaces often requires experimentation + with the mapping of available controller inputs to output parameters. Useful mappings + for a particular application may be complex in nature, with one or more inputs + being linked to one or more outputs. Existing development environments are commonly + used to program such mappings, while code libraries provide powerful data-stream + manipulation. However, room exists for a standalone application with a simpler + graphical user interface for dynamically patching between inputs and outputs. + This paper presents an early prototype version of a software tool that allows + the user to route control signals in real time, using various messaging formats. + It is cross-platform and runs as a standalone application in desktop and Android + OS versions. The latter allows the users of mobile devices to experiment with + mapping signals to and from physical computing components using the inbuilt multi-touch + screen. Potential uses therefore include real-time mapping during performance + in a more expressive manner than facilitated by existing tools.' + address: 'Ann Arbor, Michigan' + author: Liam O'Sullivan and Dermot Furlong and Frank Boland + bibtex: "@inproceedings{OSullivan2012,\n abstract = {Development of new musical\ + \ interfaces often requires experimentation with the mapping of available controller\ + \ inputs to output parameters. Useful mappings for a particular application may\ + \ be complex in nature, with one or more inputs being linked to one or more outputs.\ + \ Existing development environments are commonly used to program such mappings,\ + \ while code libraries provide powerful data-stream manipulation. However, room\ + \ exists for a standalone application with a simpler graphical user interface\ + \ for dynamically patching between inputs and outputs. This paper presents an\ + \ early prototype version of a software tool that allows the user to route control\ + \ signals in real time, using various messaging formats. It is cross-platform\ + \ and runs as a standalone application in desktop and Android OS versions. The\ + \ latter allows the users of mobile devices to experiment with mapping signals\ + \ to and from physical computing components using the inbuilt multi-touch screen.\ + \ Potential uses therefore include real-time mapping during performance in a more\ + \ expressive manner than facilitated by existing tools.},\n address = {Ann Arbor,\ + \ Michigan},\n author = {Liam O'Sullivan and Dermot Furlong and Frank Boland},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1180555},\n issn = {2220-4806},\n\ + \ keywords = {Mapping, Software Tools, Android.},\n publisher = {University of\ + \ Michigan},\n title = {Introducing CrossMapper: Another Tool for Mapping Musical\ + \ Control Parameters},\n url = {http://www.nime.org/proceedings/2012/nime2012_189.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177777 + doi: 10.5281/zenodo.1180555 issn: 2220-4806 - keywords: 'Tabletop, multi-touch, gesture, physical model, Open Sound Control.' - pages: 360--363 - title: 'SurfaceMusic : Mapping Virtual Touch-based Instruments to Physical Models' - url: http://www.nime.org/proceedings/2010/nime2010_360.pdf - year: 2010 + keywords: 'Mapping, Software Tools, Android.' + publisher: University of Michigan + title: 'Introducing CrossMapper: Another Tool for Mapping Musical Control Parameters' + url: http://www.nime.org/proceedings/2012/nime2012_189.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Martin2010 - abstract: 'Many musical instruments have interfaces which emphasisethe pitch of - the sound produced over other perceptual characteristics, such as its timbre. - This is at odds with the musical developments of the last century. In this paper, - weintroduce a method for replacing the interface of musicalinstruments (both conventional - and unconventional) witha more flexible interface which can present the intrument''savailable - sounds according to variety of different perceptualcharacteristics, such as their - brightness or roughness. Weapply this method to an instrument of our own design - whichcomprises an electro-mechanically controlled electric guitarand amplifier - configured to produce feedback tones.' - address: 'Sydney, Australia' - author: 'Martin, Aengus and Ferguson, Sam and Beilharz, Kirsty' - bibtex: "@inproceedings{Martin2010,\n abstract = {Many musical instruments have\ - \ interfaces which emphasisethe pitch of the sound produced over other perceptual\ - \ characteristics, such as its timbre. This is at odds with the musical developments\ - \ of the last century. In this paper, weintroduce a method for replacing the interface\ - \ of musicalinstruments (both conventional and unconventional) witha more flexible\ - \ interface which can present the intrument'savailable sounds according to variety\ - \ of different perceptualcharacteristics, such as their brightness or roughness.\ - \ Weapply this method to an instrument of our own design whichcomprises an electro-mechanically\ - \ controlled electric guitarand amplifier configured to produce feedback tones.},\n\ - \ address = {Sydney, Australia},\n author = {Martin, Aengus and Ferguson, Sam\ - \ and Beilharz, Kirsty},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177841},\n\ - \ issn = {2220-4806},\n keywords = {Concatenative Synthesis, Feedback, Guitar},\n\ - \ pages = {364--367},\n title = {Mechanisms for Controlling Complex Sound Sources\ - \ : Applications to Guitar Feedback Control},\n url = {http://www.nime.org/proceedings/2010/nime2010_364.pdf},\n\ - \ year = {2010}\n}\n" + ID: Schiesser2012 + abstract: "An augmented bass clarinet is developed in order to extend the performance\ + \ and composition potential of the instru-ment. Four groups of sensors are added:\ + \ key positions, inertial movement, mouth pressure and trigger switches. The instrument\ + \ communicates wirelessly with a receiver setup which produces an OSC data stream,\ + \ usable by any appli-cation on a host computer.\nThe SABRe projects intention\ + \ is to be neither tied to its inventors nor to one single player but to offer\ + \ a reference design for a larger community of bass clarinet players and composers.\ + \ For this purpose, several instruments are made available and a number of composer\ + \ residencies, workshops, presentations and concerts are organized. These serve\ + \ for evaluation and improvement purposes in order to build a robust and user\ + \ friendly extended musical instrument, that opens new playing modalities." + address: 'Ann Arbor, Michigan' + author: Sébastien Schiesser and Jan C. Schacher + bibtex: "@inproceedings{Schiesser2012,\n abstract = {An augmented bass clarinet\ + \ is developed in order to extend the performance and composition potential of\ + \ the instru-ment. Four groups of sensors are added: key positions, inertial movement,\ + \ mouth pressure and trigger switches. The instrument communicates wirelessly\ + \ with a receiver setup which produces an OSC data stream, usable by any appli-cation\ + \ on a host computer.\nThe SABRe projects intention is to be neither tied to its\ + \ inventors nor to one single player but to offer a reference design for a larger\ + \ community of bass clarinet players and composers. For this purpose, several\ + \ instruments are made available and a number of composer residencies, workshops,\ + \ presentations and concerts are organized. These serve for evaluation and improvement\ + \ purposes in order to build a robust and user friendly extended musical instrument,\ + \ that opens new playing modalities.},\n address = {Ann Arbor, Michigan},\n author\ + \ = {S{\\'e}bastien Schiesser and Jan C. Schacher},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1180587},\n issn = {2220-4806},\n keywords = {augmented\ + \ instrument, bass clarinet, sensors, air pressure, gesture, OSC},\n publisher\ + \ = {University of Michigan},\n title = {SABRe: The Augmented Bass Clarinet},\n\ + \ url = {http://www.nime.org/proceedings/2012/nime2012_193.pdf},\n year = {2012}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177841 + doi: 10.5281/zenodo.1180587 issn: 2220-4806 - keywords: 'Concatenative Synthesis, Feedback, Guitar' - pages: 364--367 - title: 'Mechanisms for Controlling Complex Sound Sources : Applications to Guitar - Feedback Control' - url: http://www.nime.org/proceedings/2010/nime2010_364.pdf - year: 2010 + keywords: 'augmented instrument, bass clarinet, sensors, air pressure, gesture, + OSC' + publisher: University of Michigan + title: 'SABRe: The Augmented Bass Clarinet' + url: http://www.nime.org/proceedings/2012/nime2012_193.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Torresen2010 - abstract: 'This paper presents a comparison of different configurationsof a wireless - sensor system for capturing human motion.The systems consist of sensor elements - which wirelesslytransfers motion data to a receiver element. The sensorelements - consist of a microcontroller, accelerometer(s) anda radio transceiver. The receiver - element consists of a radioreceiver connected through a microcontroller to a computerfor - real time sound synthesis. The wireless transmission between the sensor elements - and the receiver element is basedon the low rate IEEE 802.15.4/ZigBee standard.A - configuration with several accelerometers connected bywire to a wireless sensor - element is compared to using multiple wireless sensor elements with only one accelerometer - ineach. The study shows that it would be feasable to connect5-6 accelerometers - in the given setups.Sensor data processing can be done in either the receiverelement - or in the sensor element. For various reasons it canbe reasonable to implement - some sensor data processing inthe sensor element. The paper also looks at how - much timethat typically would be needed for a simple pre-processingtask.' - address: 'Sydney, Australia' - author: 'Torresen, Jim and Renton, Eirik and Jensenius, Alexander R.' - bibtex: "@inproceedings{Torresen2010,\n abstract = {This paper presents a comparison\ - \ of different configurationsof a wireless sensor system for capturing human motion.The\ - \ systems consist of sensor elements which wirelesslytransfers motion data to\ - \ a receiver element. The sensorelements consist of a microcontroller, accelerometer(s)\ - \ anda radio transceiver. The receiver element consists of a radioreceiver connected\ - \ through a microcontroller to a computerfor real time sound synthesis. The wireless\ - \ transmission between the sensor elements and the receiver element is basedon\ - \ the low rate IEEE 802.15.4/ZigBee standard.A configuration with several accelerometers\ - \ connected bywire to a wireless sensor element is compared to using multiple\ - \ wireless sensor elements with only one accelerometer ineach. The study shows\ - \ that it would be feasable to connect5-6 accelerometers in the given setups.Sensor\ - \ data processing can be done in either the receiverelement or in the sensor element.\ - \ For various reasons it canbe reasonable to implement some sensor data processing\ - \ inthe sensor element. The paper also looks at how much timethat typically would\ - \ be needed for a simple pre-processingtask.},\n address = {Sydney, Australia},\n\ - \ author = {Torresen, Jim and Renton, Eirik and Jensenius, Alexander R.},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1177911},\n issn = {2220-4806},\n keywords\ - \ = {wireless communication, ZigBee, microcontroller},\n pages = {368--371},\n\ - \ title = {Wireless Sensor Data Collection based on {ZigBee} Communication},\n\ - \ url = {http://www.nime.org/proceedings/2010/nime2010_368.pdf},\n year = {2010}\n\ - }\n" + ID: Overholt2012 + abstract: 'The Create USB Interface is an open source microcontroller board that + can be programmed in C, BASIC, or Arduino languages. The latest version is called + the CUI32Stem, and it is designed to work `hand-in-hand'' with the GROVE prototyping + system that includes a wide range of sensors and actuators. It utilizes a high-performance + Microchip® PIC32 microcontroller unit to allow programmable user interfaces. Its + development and typical uses are described, focusing on musical interaction design + scenarios. Several options for wireless connectivity are described as well, enabling + the CUI32Stem to pair with a smartphone and/or a normal computer. Finally, SeeedStudio''s + GROVE system is explained, which provides a prototyping system comprised of various + elements that incorporate simple plugs, allowing the CUI32Stem to easily connect + to the growing collection of open source GROVE transducers.' + address: 'Ann Arbor, Michigan' + author: Dan Overholt + bibtex: "@inproceedings{Overholt2012,\n abstract = {The Create USB Interface is\ + \ an open source microcontroller board that can be programmed in C, BASIC, or\ + \ Arduino languages. The latest version is called the CUI32Stem, and it is designed\ + \ to work `hand-in-hand' with the GROVE prototyping system that includes a wide\ + \ range of sensors and actuators. It utilizes a high-performance Microchip{\\\ + textregistered} PIC32 microcontroller unit to allow programmable user interfaces.\ + \ Its development and typical uses are described, focusing on musical interaction\ + \ design scenarios. Several options for wireless connectivity are described as\ + \ well, enabling the CUI32Stem to pair with a smartphone and/or a normal computer.\ + \ Finally, SeeedStudio's GROVE system is explained, which provides a prototyping\ + \ system comprised of various elements that incorporate simple plugs, allowing\ + \ the CUI32Stem to easily connect to the growing collection of open source GROVE\ + \ transducers.},\n address = {Ann Arbor, Michigan},\n author = {Dan Overholt},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1180561},\n issn = {2220-4806},\n\ + \ keywords = {Musical Interaction Design, NIME education, Microcontroller, Arduino\ + \ language, StickOS BASIC, Open Sound Control, Microchip PIC32, Wireless, Zigflea,\ + \ Wifi, 802.11g, Bluetooth, CUI32, CUI32Stem},\n publisher = {University of Michigan},\n\ + \ title = {Musical Interaction Design with the CUI32{S}tem: Wireless Options and\ + \ the GROVE system for prototyping new interfaces},\n url = {http://www.nime.org/proceedings/2012/nime2012_194.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177911 + doi: 10.5281/zenodo.1180561 issn: 2220-4806 - keywords: 'wireless communication, ZigBee, microcontroller' - pages: 368--371 - title: Wireless Sensor Data Collection based on ZigBee Communication - url: http://www.nime.org/proceedings/2010/nime2010_368.pdf - year: 2010 + keywords: 'Musical Interaction Design, NIME education, Microcontroller, Arduino + language, StickOS BASIC, Open Sound Control, Microchip PIC32, Wireless, Zigflea, + Wifi, 802.11g, Bluetooth, CUI32, CUI32Stem' + publisher: University of Michigan + title: 'Musical Interaction Design with the CUI32Stem: Wireless Options and the + GROVE system for prototyping new interfaces' + url: http://www.nime.org/proceedings/2012/nime2012_194.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Jaimovich2010a - abstract: 'The past decade has seen an increase of low-cost technology for sensor - data acquisition, which has been utilized for the expanding field of research - in gesture measurement for music performance. Unfortunately, these devices are - still far from being compatible with the audiovisual recording platforms which - have been used to record synchronized streams of data. In this paper, we describe - a practical solution for simultaneous recording of heterogeneous multimodal signals. - The recording system presented uses MIDI Time Code to time-stamp sensor data and - to synchronize with standard video and audio recording systems. We also present - a set of tools for recording sensor data, as well as a set of analysis tools to - evaluate in realtime the sample rate of different signals, and the overall synchronization - status of the recording system. ' - address: 'Sydney, Australia' - author: 'Jaimovich, Javier and Knapp, Benjamin' - bibtex: "@inproceedings{Jaimovich2010a,\n abstract = {The past decade has seen an\ - \ increase of low-cost technology for sensor data acquisition, which has been\ - \ utilized for the expanding field of research in gesture measurement for music\ - \ performance. Unfortunately, these devices are still far from being compatible\ - \ with the audiovisual recording platforms which have been used to record synchronized\ - \ streams of data. In this paper, we describe a practical solution for simultaneous\ - \ recording of heterogeneous multimodal signals. The recording system presented\ - \ uses MIDI Time Code to time-stamp sensor data and to synchronize with standard\ - \ video and audio recording systems. We also present a set of tools for recording\ - \ sensor data, as well as a set of analysis tools to evaluate in realtime the\ - \ sample rate of different signals, and the overall synchronization status of\ - \ the recording system. },\n address = {Sydney, Australia},\n author = {Jaimovich,\ - \ Javier and Knapp, Benjamin},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177815},\n\ - \ issn = {2220-4806},\n keywords = {Synchronization, Multimodal Signals, Sensor\ - \ Data Acquisition, Signal Recording.},\n pages = {372--374},\n title = {Synchronization\ - \ of Multimodal Recordings for Musical Performance Research},\n url = {http://www.nime.org/proceedings/2010/nime2010_372.pdf},\n\ - \ year = {2010}\n}\n" + ID: McPherson2012 + abstract: 'Capacitive touch sensing is increasingly used in musical con-trollers, + particularly those based on multi-touch screen interfaces. However, in contrast + to the venerable piano-style keyboard, touch screen controllers lack the tactile + feedback many performers find crucial. This paper presents an augmentation system + for acoustic and electronic keyboards in which multi-touch capacitive sensors + are added to the surface of each key. Each key records the position of fingers + on the surface, and by combining this data with MIDI note onsets and aftertouch + from the host keyboard, the system functions as a multidimensional polyphonic + controller for a wide variety of synthesis software. The paper will discuss general + capacitive touch sensor design, keyboard-specific implementation strategies, and + the development of a flexible mapping engine using OSC and MIDI.' + address: 'Ann Arbor, Michigan' + author: Andrew McPherson + bibtex: "@inproceedings{McPherson2012,\n abstract = {Capacitive touch sensing is\ + \ increasingly used in musical con-trollers, particularly those based on multi-touch\ + \ screen interfaces. However, in contrast to the venerable piano-style keyboard,\ + \ touch screen controllers lack the tactile feedback many performers find crucial.\ + \ This paper presents an augmentation system for acoustic and electronic keyboards\ + \ in which multi-touch capacitive sensors are added to the surface of each key.\ + \ Each key records the position of fingers on the surface, and by combining this\ + \ data with MIDI note onsets and aftertouch from the host keyboard, the system\ + \ functions as a multidimensional polyphonic controller for a wide variety of\ + \ synthesis software. The paper will discuss general capacitive touch sensor design,\ + \ keyboard-specific implementation strategies, and the development of a flexible\ + \ mapping engine using OSC and MIDI.},\n address = {Ann Arbor, Michigan},\n author\ + \ = {Andrew McPherson},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1180531},\n\ + \ issn = {2220-4806},\n keywords = {augmented instruments, keyboard, capacitive\ + \ sensing, multitouch},\n publisher = {University of Michigan},\n title = {TouchKeys:\ + \ Capacitive Multi-Touch Sensing on a Physical Keyboard},\n url = {http://www.nime.org/proceedings/2012/nime2012_195.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177815 + doi: 10.5281/zenodo.1180531 issn: 2220-4806 - keywords: 'Synchronization, Multimodal Signals, Sensor Data Acquisition, Signal - Recording.' - pages: 372--374 - title: Synchronization of Multimodal Recordings for Musical Performance Research - url: http://www.nime.org/proceedings/2010/nime2010_372.pdf - year: 2010 + keywords: 'augmented instruments, keyboard, capacitive sensing, multitouch' + publisher: University of Michigan + title: 'TouchKeys: Capacitive Multi-Touch Sensing on a Physical Keyboard' + url: http://www.nime.org/proceedings/2012/nime2012_195.pdf + year: 2012 + + +- ENTRYTYPE: inproceedings + ID: Lai2012 + abstract: 'This paper addresses the issue of engaging the audience with new musical + instruments in live performance context. We introduce design concerns that we + consider influential to enhance the communication flow between the audience and + the performer. We also propose and put in practice a design approach that considers + the use of performance space as a way to engage with the audience. A collaborative + project, Sound Gloves, presented here exemplifies such a concept by dissolving + the space between performers and audience. Our approach resulted in a continuous + interaction between audience and performers, in which the social dynamics was + changed in a positive way in a live performance context of NIMEs. Such an approach, + we argue, may be considered as one way to further engage and interact with the + audience.' + address: 'Ann Arbor, Michigan' + author: Chi-Hsia Lai and Koray Tahiroglu + bibtex: "@inproceedings{Lai2012,\n abstract = {This paper addresses the issue of\ + \ engaging the audience with new musical instruments in live performance context.\ + \ We introduce design concerns that we consider influential to enhance the communication\ + \ flow between the audience and the performer. We also propose and put in practice\ + \ a design approach that considers the use of performance space as a way to engage\ + \ with the audience. A collaborative project, Sound Gloves, presented here exemplifies\ + \ such a concept by dissolving the space between performers and audience. Our\ + \ approach resulted in a continuous interaction between audience and performers,\ + \ in which the social dynamics was changed in a positive way in a live performance\ + \ context of NIMEs. Such an approach, we argue, may be considered as one way to\ + \ further engage and interact with the audience.},\n address = {Ann Arbor, Michigan},\n\ + \ author = {Chi-Hsia Lai and Koray Tahiroglu},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1178309},\n issn = {2220-4806},\n keywords = {NIME, wearable\ + \ electronics, performance, design approach},\n publisher = {University of Michigan},\n\ + \ title = {A Design Approach to Engage with Audience with Wearable Musical Instruments:\ + \ Sound Gloves},\n url = {http://www.nime.org/proceedings/2012/nime2012_197.pdf},\n\ + \ year = {2012}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1178309 + issn: 2220-4806 + keywords: 'NIME, wearable electronics, performance, design approach' + publisher: University of Michigan + title: 'A Design Approach to Engage with Audience with Wearable Musical Instruments: + Sound Gloves' + url: http://www.nime.org/proceedings/2012/nime2012_197.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Torre2010 - abstract: 'This paper describes the development of an interactive 3Daudio/visual - and network installation entitled POLLEN.Specifically designed for large computer - Laboratories, theartwork explores the regeneration of those spaces throughthe - creation of a fully immersive multimedia art experience.The paper describes the - technical, aesthetic and educational development of the piece.' - address: 'Sydney, Australia' - author: 'Torre, Giuseppe and O''Leary, Mark and Tuohy, Brian' - bibtex: "@inproceedings{Torre2010,\n abstract = {This paper describes the development\ - \ of an interactive 3Daudio/visual and network installation entitled POLLEN.Specifically\ - \ designed for large computer Laboratories, theartwork explores the regeneration\ - \ of those spaces throughthe creation of a fully immersive multimedia art experience.The\ - \ paper describes the technical, aesthetic and educational development of the\ - \ piece.},\n address = {Sydney, Australia},\n author = {Torre, Giuseppe and O'Leary,\ - \ Mark and Tuohy, Brian},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177909},\n\ - \ issn = {2220-4806},\n keywords = {Interactive, Installation, Network, 3D Physics\ - \ Emulator, Educational Tools, Public Spaces, Computer Labs, Sound Design, Site-Specific\ - \ Art},\n pages = {375--376},\n title = {POLLEN A Multimedia Interactive Network\ - \ Installation},\n url = {http://www.nime.org/proceedings/2010/nime2010_375.pdf},\n\ - \ year = {2010}\n}\n" + ID: Nymoen2012 + abstract: 'The paper presents an analysis of the quality of motion data from an + iPod Touch (4th gen.). Acceleration and orientation data derived from internal + sensors of an iPod is com-pared to data from a high end optical infrared marker-based + motion capture system (Qualisys) in terms of latency, jitter, accuracy and precision. + We identify some rotational drift in the iPod, and some time lag between the two + systems. Still, the iPod motion data is quite reliable, especially for describing + relative motion over a short period of time.' + address: 'Ann Arbor, Michigan' + author: 'Kristian Nymoen and Arve Voldsund and Skogstad, Ståle A. and Jensenius, + Alexander Refsum and Jim Torresen' + bibtex: "@inproceedings{Nymoen2012,\n abstract = {The paper presents an analysis\ + \ of the quality of motion data from an iPod Touch (4th gen.). Acceleration and\ + \ orientation data derived from internal sensors of an iPod is com-pared to data\ + \ from a high end optical infrared marker-based motion capture system (Qualisys)\ + \ in terms of latency, jitter, accuracy and precision. We identify some rotational\ + \ drift in the iPod, and some time lag between the two systems. Still, the iPod\ + \ motion data is quite reliable, especially for describing relative motion over\ + \ a short period of time.},\n address = {Ann Arbor, Michigan},\n author = {Kristian\ + \ Nymoen and Arve Voldsund and Skogstad, St{\\aa}le A. and Jensenius, Alexander\ + \ Refsum and Jim Torresen},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1180553},\n\ + \ issn = {2220-4806},\n publisher = {University of Michigan},\n title = {Comparing\ + \ Motion Data from an iPod Touch to a High-End Optical Infrared Marker-Based Motion\ + \ Capture System},\n url = {http://www.nime.org/proceedings/2012/nime2012_198.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177909 + doi: 10.5281/zenodo.1180553 issn: 2220-4806 - keywords: 'Interactive, Installation, Network, 3D Physics Emulator, Educational - Tools, Public Spaces, Computer Labs, Sound Design, Site-Specific Art' - pages: 375--376 - title: POLLEN A Multimedia Interactive Network Installation - url: http://www.nime.org/proceedings/2010/nime2010_375.pdf - year: 2010 + publisher: University of Michigan + title: Comparing Motion Data from an iPod Touch to a High-End Optical Infrared Marker-Based + Motion Capture System + url: http://www.nime.org/proceedings/2012/nime2012_198.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Feng2010 - abstract: ' Irregular Incurve is a MIDI controllable robotic string instrument. - The twelve independent string-units compose the complete musical scale of 12 units. - Each string can be plucked by a motor control guitar pick. A MIDI keyboard is - attached to the instrument and serves as an interface for real-time interactions - between the instrument and the audience. Irregular Incurve can also play preprogrammed - music by itself. This paper presents the design concept and the technical solutions - to realizing the functionality of Irregular Incurve. The future features are also - discussed. ' - address: 'Sydney, Australia' - author: 'Feng, Xiaoyang' - bibtex: "@inproceedings{Feng2010,\n abstract = { Irregular Incurve is a MIDI controllable\ - \ robotic string instrument. The twelve independent string-units compose the complete\ - \ musical scale of 12 units. Each string can be plucked by a motor control guitar\ - \ pick. A MIDI keyboard is attached to the instrument and serves as an interface\ - \ for real-time interactions between the instrument and the audience. Irregular\ - \ Incurve can also play preprogrammed music by itself. This paper presents the\ - \ design concept and the technical solutions to realizing the functionality of\ - \ Irregular Incurve. The future features are also discussed. },\n address = {Sydney,\ - \ Australia},\n author = {Feng, Xiaoyang},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177765},\n\ - \ issn = {2220-4806},\n keywords = {NIME, Robotics, Acoustic, Interactive, MIDI,\ - \ Real time Performance, String Instrument, Arduino, Servo, Motor Control},\n\ - \ pages = {377--379},\n title = {Irregular Incurve},\n url = {http://www.nime.org/proceedings/2010/nime2010_377.pdf},\n\ - \ year = {2010}\n}\n" + ID: Park2012 + abstract: 'This paper describes an interactive gestural microphone for vocal performance + named Voicon. Voicon is a non-invasive and gesture-sensitive microphone which + allows vocal performers to use natural gestures to create vocal augmentations + and modifications by using embedded sensors in a microphone. Through vocal augmentation + and modulation, the performers can easily generate desired amount of the vibrato + and achieve wider vocal range. These vocal en-hancements will deliberately enrich + the vocal performance both in its expressiveness and the dynamics. Using Voicon, + singers can generate additional vibrato, control the pitch and activate customizable + vocal effect by simple and intuitive gestures in live and recording context.' + address: 'Ann Arbor, Michigan' + author: Yongki Park and Hoon Heo and Kyogu Lee + bibtex: "@inproceedings{Park2012,\n abstract = {This paper describes an interactive\ + \ gestural microphone for vocal performance named Voicon. Voicon is a non-invasive\ + \ and gesture-sensitive microphone which allows vocal performers to use natural\ + \ gestures to create vocal augmentations and modifications by using embedded sensors\ + \ in a microphone. Through vocal augmentation and modulation, the performers can\ + \ easily generate desired amount of the vibrato and achieve wider vocal range.\ + \ These vocal en-hancements will deliberately enrich the vocal performance both\ + \ in its expressiveness and the dynamics. Using Voicon, singers can generate additional\ + \ vibrato, control the pitch and activate customizable vocal effect by simple\ + \ and intuitive gestures in live and recording context.},\n address = {Ann Arbor,\ + \ Michigan},\n author = {Yongki Park and Hoon Heo and Kyogu Lee},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1180565},\n issn = {2220-4806},\n keywords\ + \ = {Gesture, Microphone, Vocal Performance, Performance In-terface},\n publisher\ + \ = {University of Michigan},\n title = {Voicon: An Interactive Gestural Microphone\ + \ For Vocal Performance},\n url = {http://www.nime.org/proceedings/2012/nime2012_199.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177765 + doi: 10.5281/zenodo.1180565 issn: 2220-4806 - keywords: 'NIME, Robotics, Acoustic, Interactive, MIDI, Real time Performance, String - Instrument, Arduino, Servo, Motor Control' - pages: 377--379 - title: Irregular Incurve - url: http://www.nime.org/proceedings/2010/nime2010_377.pdf - year: 2010 + keywords: 'Gesture, Microphone, Vocal Performance, Performance In-terface' + publisher: University of Michigan + title: 'Voicon: An Interactive Gestural Microphone For Vocal Performance' + url: http://www.nime.org/proceedings/2012/nime2012_199.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Miyama2010 - abstract: 'Peacock is a newly designed interface for improvisational performances. - The interface is equipped with thirty-five proximity sensors arranged in five - rows and seven columns. The sensors detect the movements of a performer''s hands - and arms in a three-dimensional space above them. The interface digitizes the - output of the sensors into sets of high precision digital packets, and sends them - to a patch running in Pdextended with a sufficiently high bandwidth for performances - with almost no computational resource consumption in Pd. The precision, speed, - and efficiency of the system enable the sonification of hand gestures in realtime - without the need to attach any physical devices to the performer''s body. This - paper traces the interface''s evolution, discussing relevant technologies, hardware - construction, system design, and input monitoring. ' - address: 'Sydney, Australia' - author: 'Miyama, Chikashi' - bibtex: "@inproceedings{Miyama2010,\n abstract = {Peacock is a newly designed interface\ - \ for improvisational performances. The interface is equipped with thirty-five\ - \ proximity sensors arranged in five rows and seven columns. The sensors detect\ - \ the movements of a performer's hands and arms in a three-dimensional space above\ - \ them. The interface digitizes the output of the sensors into sets of high precision\ - \ digital packets, and sends them to a patch running in Pdextended with a sufficiently\ - \ high bandwidth for performances with almost no computational resource consumption\ - \ in Pd. The precision, speed, and efficiency of the system enable the sonification\ - \ of hand gestures in realtime without the need to attach any physical devices\ - \ to the performer's body. This paper traces the interface's evolution, discussing\ - \ relevant technologies, hardware construction, system design, and input monitoring.\ - \ },\n address = {Sydney, Australia},\n author = {Miyama, Chikashi},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1177859},\n issn = {2220-4806},\n keywords\ - \ = {Musical interface, Sensor technologies, Computer music, Hardware and software\ - \ design},\n pages = {380--382},\n title = {Peacock : A Non-Haptic {3D} Performance\ - \ Interface},\n url = {http://www.nime.org/proceedings/2010/nime2010_380.pdf},\n\ - \ year = {2010}\n}\n" + ID: Henriques2012 + abstract: 'The Sonik Spring is a portable and wireless digital instrument, created + for real-time synthesis and control of sound. It brings together different types + of sensory input, linking gestural motion and kinesthetic feedback to the production + of sound. The interface consists of a 15-inch spring with unique flexibility, + which allows multiple degrees of variation in its shape and length. The design + of the instrument is described and its features discussed. Three performance modes + are detailed highlighting the instrument''s expressive potential and wide range + of functionality. ' + address: 'Ann Arbor, Michigan' + author: Tomas Henriques + bibtex: "@inproceedings{Henriques2012,\n abstract = {The Sonik Spring is a portable\ + \ and wireless digital instrument, created for real-time synthesis and control\ + \ of sound. It brings together different types of sensory input, linking gestural\ + \ motion and kinesthetic feedback to the production of sound. The interface consists\ + \ of a 15-inch spring with unique flexibility, which allows multiple degrees of\ + \ variation in its shape and length. The design of the instrument is described\ + \ and its features discussed. Three performance modes are detailed highlighting\ + \ the instrument's expressive potential and wide range of functionality. },\n\ + \ address = {Ann Arbor, Michigan},\n author = {Tomas Henriques},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178283},\n issn = {2220-4806},\n keywords = {Interface\ + \ for sound and music, Gestural control of sound, Kinesthetic and visual feedback},\n\ + \ publisher = {University of Michigan},\n title = {SONIK SPRING},\n url = {http://www.nime.org/proceedings/2012/nime2012_20.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177859 + doi: 10.5281/zenodo.1178283 issn: 2220-4806 - keywords: 'Musical interface, Sensor technologies, Computer music, Hardware and - software design' - pages: 380--382 - title: 'Peacock : A Non-Haptic 3D Performance Interface' - url: http://www.nime.org/proceedings/2010/nime2010_380.pdf - year: 2010 + keywords: 'Interface for sound and music, Gestural control of sound, Kinesthetic + and visual feedback' + publisher: University of Michigan + title: SONIK SPRING + url: http://www.nime.org/proceedings/2012/nime2012_20.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Holm2010 - abstract: 'Music recommendation systems can observe user''s personal preferences - and suggest new tracks from a large online catalog. In the case of context-aware - recommenders, user''s current emotional state plays an important role. One simple - way to visualize emotions and moods is graphical emoticons. In this study, we - researched a high-level mapping between genres, as descriptions of music, and - emoticons, as descriptions of emotions and moods. An online questionnaire with - 87 participants was arranged. Based on the results, we present a list of genres - that could be used as a starting point for making recommendations fitting the - current mood of the user. ' - address: 'Sydney, Australia' - author: 'Holm, Jukka and Holm, Harri and Seppänen, Jarno' - bibtex: "@inproceedings{Holm2010,\n abstract = {Music recommendation systems can\ - \ observe user's personal preferences and suggest new tracks from a large online\ - \ catalog. In the case of context-aware recommenders, user's current emotional\ - \ state plays an important role. One simple way to visualize emotions and moods\ - \ is graphical emoticons. In this study, we researched a high-level mapping between\ - \ genres, as descriptions of music, and emoticons, as descriptions of emotions\ - \ and moods. An online questionnaire with 87 participants was arranged. Based\ - \ on the results, we present a list of genres that could be used as a starting\ - \ point for making recommendations fitting the current mood of the user. },\n\ - \ address = {Sydney, Australia},\n author = {Holm, Jukka and Holm, Harri and Sepp\\\ - ''{a}nen, Jarno},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177809},\n\ - \ issn = {2220-4806},\n keywords = {Music, music recommendation, context, facial\ - \ expression, mood, emotion, emoticon, and musical genre.},\n pages = {383--386},\n\ - \ title = {Associating Emoticons with Musical Genres},\n url = {http://www.nime.org/proceedings/2010/nime2010_383.pdf},\n\ - \ year = {2010}\n}\n" + ID: Menzies2012 + abstract: "The Highland piping tradition requires the performer to learn and accurately\ + \ reproduce a diverse array of ornaments, which can be a daunting prospect to\ + \ the novice piper. This paper presents a system which analyses a player's technique\ + \ using sensor data obtained from an electronic bagpipe chanter interface. Automatic\ + \ recognition of a broad range of piping embellishments allows real-time visual\ + \ feedback to be generated, enabling the learner to ensure that they are practicing\ + \ each movement correctly.\nThe electronic chanter employs a robust and responsive\ + \ infrared (IR) sensing strategy, and uses audio samples from acoustic recordings\ + \ to produce a high quality bagpipe sound. Moreover, the continuous nature of\ + \ the IR sensors offers the controller a considerable degree of flexibility, indicating\ + \ sig-nificant potential for the inclusion of extended and novel techniques for\ + \ musical expression in the future." + address: 'Ann Arbor, Michigan' + author: Duncan Menzies and Andrew McPherson + bibtex: "@inproceedings{Menzies2012,\n abstract = {The Highland piping tradition\ + \ requires the performer to learn and accurately reproduce a diverse array of\ + \ ornaments, which can be a daunting prospect to the novice piper. This paper\ + \ presents a system which analyses a player's technique using sensor data obtained\ + \ from an electronic bagpipe chanter interface. Automatic recognition of a broad\ + \ range of piping embellishments allows real-time visual feedback to be generated,\ + \ enabling the learner to ensure that they are practicing each movement correctly.\n\ + The electronic chanter employs a robust and responsive infrared (IR) sensing strategy,\ + \ and uses audio samples from acoustic recordings to produce a high quality bagpipe\ + \ sound. Moreover, the continuous nature of the IR sensors offers the controller\ + \ a considerable degree of flexibility, indicating sig-nificant potential for\ + \ the inclusion of extended and novel techniques for musical expression in the\ + \ future.},\n address = {Ann Arbor, Michigan},\n author = {Duncan Menzies and\ + \ Andrew McPherson},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1180537},\n\ + \ issn = {2220-4806},\n keywords = {Great Highland Bagpipe, continuous infrared\ + \ sensors, ornament recognition, practice tool, SuperCollider, OSC.},\n publisher\ + \ = {University of Michigan},\n title = {An Electronic Bagpipe Chanter for Automatic\ + \ Recognition of Highland Piping Ornamentation},\n url = {http://www.nime.org/proceedings/2012/nime2012_200.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177809 + doi: 10.5281/zenodo.1180537 issn: 2220-4806 - keywords: 'Music, music recommendation, context, facial expression, mood, emotion, - emoticon, and musical genre.' - pages: 383--386 - title: Associating Emoticons with Musical Genres - url: http://www.nime.org/proceedings/2010/nime2010_383.pdf - year: 2010 + keywords: 'Great Highland Bagpipe, continuous infrared sensors, ornament recognition, + practice tool, SuperCollider, OSC.' + publisher: University of Michigan + title: An Electronic Bagpipe Chanter for Automatic Recognition of Highland Piping + Ornamentation + url: http://www.nime.org/proceedings/2012/nime2012_200.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Nagashima2010 - abstract: This paper is a report on the development of a new musical instrument - in which the main concept is "Untouchable". The key concept of this instrument - is "sound generation by body gesture (both hands)" and "sound generation by kneading - with hands". The new composition project had completed as the premiere of a new - work "controllable untouchableness" with this new instrument in December 2009. - address: 'Sydney, Australia' - author: 'Nagashima, Yoichi' - bibtex: "@inproceedings{Nagashima2010,\n abstract = {This paper is a report on the\ - \ development of a new musical instrument in which the main concept is \"Untouchable\"\ - . The key concept of this instrument is \"sound generation by body gesture (both\ - \ hands)\" and \"sound generation by kneading with hands\". The new composition\ - \ project had completed as the premiere of a new work \"controllable untouchableness\"\ - \ with this new instrument in December 2009.},\n address = {Sydney, Australia},\n\ - \ author = {Nagashima, Yoichi},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177865},\n\ - \ issn = {2220-4806},\n keywords = {Theremin, untouchable, distance sensor, Propeller\ - \ processor},\n pages = {387--390},\n title = {Untouchable Instrument \"Peller-Min\"\ - },\n url = {http://www.nime.org/proceedings/2010/nime2010_387.pdf},\n year = {2010}\n\ - }\n" + ID: Gong2012 + abstract: 'This paper describes a novel music control sensate surface, which enables + integration between any musical instruments with a v ersatile, customizable, and + essentially cost-effective user interface. This sensate surface is based on c + onductive inkjet printing technology which allows capacitive sensor electrodes + and connections between electronics components to be printed onto a large roll + of flexible substrate that is unrestricted in length. The high dynamic range capacitive + sensing electrodes can not only infer touch, but near-range, non-contact gestural + nuance in a music performance. With this sensate surface, users can ``cut'''' + out their desired shapes, ``paste'''' the number of inputs, and customize their + controller interface, which can then send signals wirelessly to effects or software + synthesizers. We seek to find a solution for integrating the form factor of traditional + music controllers seamlessly on top of one''s music instrument and meanwhile adding + expressiveness to the music performance by sensing and incorporating movements + and gestures to manipulate the musical output. We present an example of implementation + on an electric ukulele and provide several design examples to demonstrate the + versatile capabilities of this system.' + address: 'Ann Arbor, Michigan' + author: Nan-Wei Gong and Nan Zhao and Joseph Paradiso + bibtex: "@inproceedings{Gong2012,\n abstract = {This paper describes a novel music\ + \ control sensate surface, which enables integration between any musical instruments\ + \ with a v ersatile, customizable, and essentially cost-effective user interface.\ + \ This sensate surface is based on c onductive inkjet printing technology which\ + \ allows capacitive sensor electrodes and connections between electronics components\ + \ to be printed onto a large roll of flexible substrate that is unrestricted in\ + \ length. The high dynamic range capacitive sensing electrodes can not only infer\ + \ touch, but near-range, non-contact gestural nuance in a music performance. With\ + \ this sensate surface, users can ``cut'' out their desired shapes, ``paste''\ + \ the number of inputs, and customize their controller interface, which can then\ + \ send signals wirelessly to effects or software synthesizers. We seek to find\ + \ a solution for integrating the form factor of traditional music controllers\ + \ seamlessly on top of one's music instrument and meanwhile adding expressiveness\ + \ to the music performance by sensing and incorporating movements and gestures\ + \ to manipulate the musical output. We present an example of implementation on\ + \ an electric ukulele and provide several design examples to demonstrate the versatile\ + \ capabilities of this system.},\n address = {Ann Arbor, Michigan},\n author =\ + \ {Nan-Wei Gong and Nan Zhao and Joseph Paradiso},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178267},\n issn = {2220-4806},\n keywords = {Sensate\ + \ surface, music controller skin, customizable controller surface, flexible electronics},\n\ + \ publisher = {University of Michigan},\n title = {A Customizable Sensate Surface\ + \ for Music Control},\n url = {http://www.nime.org/proceedings/2012/nime2012_201.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177865 + doi: 10.5281/zenodo.1178267 issn: 2220-4806 - keywords: 'Theremin, untouchable, distance sensor, Propeller processor' - pages: 387--390 - title: Untouchable Instrument "Peller-Min" - url: http://www.nime.org/proceedings/2010/nime2010_387.pdf - year: 2010 + keywords: 'Sensate surface, music controller skin, customizable controller surface, + flexible electronics' + publisher: University of Michigan + title: A Customizable Sensate Surface for Music Control + url: http://www.nime.org/proceedings/2012/nime2012_201.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Jaimovich2010 - abstract: 'This paper describes the design, implementation and outcome of Ground - Me!, an interactive sound installation set up in the Sonic Lab of the Sonic Arts - Research Centre. The site-specific interactive installation consists of multiple - copper poles hanging from the Sonic Lab''s ceiling panels, which trigger samples - of electricity sounds when grounded through the visitor''s'' body to the space''s - metallic floor. ' - address: 'Sydney, Australia' - author: 'Jaimovich, Javier' - bibtex: "@inproceedings{Jaimovich2010,\n abstract = {This paper describes the design,\ - \ implementation and outcome of Ground Me!, an interactive sound installation\ - \ set up in the Sonic Lab of the Sonic Arts Research Centre. The site-specific\ - \ interactive installation consists of multiple copper poles hanging from the\ - \ Sonic Lab's ceiling panels, which trigger samples of electricity sounds when\ - \ grounded through the visitor's' body to the space's metallic floor. },\n address\ - \ = {Sydney, Australia},\n author = {Jaimovich, Javier},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177813},\n issn = {2220-4806},\n keywords = {Interactive\ - \ sound installation, body impedance, skin conductivity, site-specific sound installation,\ - \ human network, Sonic Lab, Arduino.},\n pages = {391--394},\n title = {Ground\ - \ Me ! An Interactive Sound Art Installation},\n url = {http://www.nime.org/proceedings/2010/nime2010_391.pdf},\n\ - \ year = {2010}\n}\n" + ID: Schlessinger2012 + abstract: 'We have developed a prototype wireless microphone that provides vocalists + with control over their vocal effects directly from the body of the microphone. + A wireless microphone has been augmented with six momentary switches, one fader, + and three axes of motion and position sensors, all of which provide MIDI output + from the wireless receiver. The MIDI data is used to control external vocal effects + units such as live loopers, reverbs, distortion pedals, etc. The goal was to to + provide dramatically increased expressive control to vocal performances, and address + some of the shortcomings of pedal-controlled effects. The addition of gestural + controls from the motion sensors opens up new performance possibilities such as + panning the voice simply by pointing the microphone in one direction or another. + The result is a hybrid microphone-musical instrument which has recieved extremely + positive results from vocalists in numerous infor-mal workshops.' + address: 'Ann Arbor, Michigan' + author: Dan Moses Schlessinger + bibtex: "@inproceedings{Schlessinger2012,\n abstract = {We have developed a prototype\ + \ wireless microphone that provides vocalists with control over their vocal effects\ + \ directly from the body of the microphone. A wireless microphone has been augmented\ + \ with six momentary switches, one fader, and three axes of motion and position\ + \ sensors, all of which provide MIDI output from the wireless receiver. The MIDI\ + \ data is used to control external vocal effects units such as live loopers, reverbs,\ + \ distortion pedals, etc. The goal was to to provide dramatically increased expressive\ + \ control to vocal performances, and address some of the shortcomings of pedal-controlled\ + \ effects. The addition of gestural controls from the motion sensors opens up\ + \ new performance possibilities such as panning the voice simply by pointing the\ + \ microphone in one direction or another. The result is a hybrid microphone-musical\ + \ instrument which has recieved extremely positive results from vocalists in numerous\ + \ infor-mal workshops.},\n address = {Ann Arbor, Michigan},\n author = {Dan Moses\ + \ Schlessinger},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1180591},\n\ + \ issn = {2220-4806},\n keywords = {NIME, Sennheiser, Concept Tahoe, MIDI, control,\ + \ microphone},\n publisher = {University of Michigan},\n title = {Concept Tahoe:\ + \ Microphone Midi Control},\n url = {http://www.nime.org/proceedings/2012/nime2012_202.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177813 + doi: 10.5281/zenodo.1180591 issn: 2220-4806 - keywords: 'Interactive sound installation, body impedance, skin conductivity, site-specific - sound installation, human network, Sonic Lab, Arduino.' - pages: 391--394 - title: Ground Me ! An Interactive Sound Art Installation - url: http://www.nime.org/proceedings/2010/nime2010_391.pdf - year: 2010 + keywords: 'NIME, Sennheiser, Concept Tahoe, MIDI, control, microphone' + publisher: University of Michigan + title: 'Concept Tahoe: Microphone Midi Control' + url: http://www.nime.org/proceedings/2012/nime2012_202.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Savage2010 - abstract: 'This paper presents Mmmmm; a Multimodal Mobile Music Mixer that provides - DJs a new interface for mixing musicon the Nokia N900 phones. Mmmmm presents a - novel way for DJ to become more interactive with their audience andvise versa. - The software developed for the N900 mobilephone utilizes the phones built-in accelerometer - sensor andBluetooth audio streaming capabilities to mix and apply effects to music - using hand gestures and have the mixed audiostream to Bluetooth speakers, which - allows the DJ to moveabout the environment and get familiarized with their audience, - turning the experience of DJing into an interactiveand audience engaging process. - Mmmmm is designed so that the DJ can utilize handgestures and haptic feedback - to help them perform the various tasks involved in DJing (mixing, applying effects, - andetc). This allows the DJ to focus on the crowd, thus providing the DJ a better - intuition of what kind of music ormusical mixing style the audience is more likely - to enjoyand engage with. Additionally, Mmmmm has an Ambient Tempo Detection mode - in which the phones camera is utilized to detect the amount of movement in the - environment and suggest to the DJ the tempo of music that should be played. This - mode utilizes frame differencing and pixelchange overtime to get a sense of how - fast the environmentis changing, loosely correlating to how fast the audience - isdancing or the lights are flashing in the scene. By determining the ambient - tempo of the environment the DJ canget a better sense for the type of music that - would fit bestfor their venue.Mmmmm helps novice DJs achieve a better music repertoire - by allowing them to interact with their audience andreceive direct feedback on - their performance. The DJ canchoose to utilize these modes of interaction and - performance or utilize traditional DJ controls using MmmmmsN900 touch screen based - graphics user interface.' - address: 'Sydney, Australia' - author: 'Savage, Norma S. and Ali, Syed R. and Chavez, Norma E.' - bibtex: "@inproceedings{Savage2010,\n abstract = {This paper presents Mmmmm; a Multimodal\ - \ Mobile Music Mixer that provides DJs a new interface for mixing musicon the\ - \ Nokia N900 phones. Mmmmm presents a novel way for DJ to become more interactive\ - \ with their audience andvise versa. The software developed for the N900 mobilephone\ - \ utilizes the phones built-in accelerometer sensor andBluetooth audio streaming\ - \ capabilities to mix and apply effects to music using hand gestures and have\ - \ the mixed audiostream to Bluetooth speakers, which allows the DJ to moveabout\ - \ the environment and get familiarized with their audience, turning the experience\ - \ of DJing into an interactiveand audience engaging process. Mmmmm is designed\ - \ so that the DJ can utilize handgestures and haptic feedback to help them perform\ - \ the various tasks involved in DJing (mixing, applying effects, andetc). This\ - \ allows the DJ to focus on the crowd, thus providing the DJ a better intuition\ - \ of what kind of music ormusical mixing style the audience is more likely to\ - \ enjoyand engage with. Additionally, Mmmmm has an Ambient Tempo Detection mode\ - \ in which the phones camera is utilized to detect the amount of movement in the\ - \ environment and suggest to the DJ the tempo of music that should be played.\ - \ This mode utilizes frame differencing and pixelchange overtime to get a sense\ - \ of how fast the environmentis changing, loosely correlating to how fast the\ - \ audience isdancing or the lights are flashing in the scene. By determining the\ - \ ambient tempo of the environment the DJ canget a better sense for the type of\ - \ music that would fit bestfor their venue.Mmmmm helps novice DJs achieve a better\ - \ music repertoire by allowing them to interact with their audience andreceive\ - \ direct feedback on their performance. The DJ canchoose to utilize these modes\ - \ of interaction and performance or utilize traditional DJ controls using MmmmmsN900\ - \ touch screen based graphics user interface.},\n address = {Sydney, Australia},\n\ - \ author = {Savage, Norma S. and Ali, Syed R. and Chavez, Norma E.},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.1177887},\n issn = {2220-4806},\n keywords\ - \ = {Multi-modal, interaction, music, mixer, mobile, interactive, DJ, smart phones,\ - \ Nokia, n900, touch screen, accelerometer, phone, audience},\n pages = {395--398},\n\ - \ title = {Mmmmm: A Multi-modal Mobile Music Mixer},\n url = {http://www.nime.org/proceedings/2010/nime2010_395.pdf},\n\ - \ year = {2010}\n}\n" + ID: Yang2012 + abstract: 'We augment the piano keyboard with a 3D gesture space using Microsoft + Kinect for sensing and top-down projection for visual feedback. This interface + provides multi-axial gesture controls to enable continuous adjustments to multiple + acoustic parameters such as those on the typical digital synthesizers. We believe + that using gesture control is more visceral and aesthetically pleasing, especially + during concert performance where the visibility of the performer''s action is + important. Our system can also be used for other types of gesture interaction + as well as for pedagogical applications.' + address: 'Ann Arbor, Michigan' + author: Qi Yang and Georg Essl + bibtex: "@inproceedings{Yang2012,\n abstract = {We augment the piano keyboard with\ + \ a 3D gesture space using Microsoft Kinect for sensing and top-down projection\ + \ for visual feedback. This interface provides multi-axial gesture controls to\ + \ enable continuous adjustments to multiple acoustic parameters such as those\ + \ on the typical digital synthesizers. We believe that using gesture control is\ + \ more visceral and aesthetically pleasing, especially during concert performance\ + \ where the visibility of the performer's action is important. Our system can\ + \ also be used for other types of gesture interaction as well as for pedagogical\ + \ applications.},\n address = {Ann Arbor, Michigan},\n author = {Qi Yang and Georg\ + \ Essl},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1178455},\n issn = {2220-4806},\n\ + \ keywords = {NIME, piano, depth camera, musical instrument, gesture, tabletop\ + \ projection},\n publisher = {University of Michigan},\n title = {Augmented Piano\ + \ Performance using a Depth Camera},\n url = {http://www.nime.org/proceedings/2012/nime2012_203.pdf},\n\ + \ year = {2012}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1178455 + issn: 2220-4806 + keywords: 'NIME, piano, depth camera, musical instrument, gesture, tabletop projection' + publisher: University of Michigan + title: Augmented Piano Performance using a Depth Camera + url: http://www.nime.org/proceedings/2012/nime2012_203.pdf + year: 2012 + + +- ENTRYTYPE: inproceedings + ID: Torresen2012 + abstract: 'We present a new wireless transceiver board for the CUI32 sensor interface, + aimed at creating a solution that is flexible, reliable, and with little power + consumption. Communica-tion with the board is based on the ZigFlea protocol and + it has been evaluated on a CUI32 using the StickOS oper-ating system. Experiments + show that the total sensor data collection time is linearly increasing with the + number of sensor samples used. A data rate of 0.8 kbit/s is achieved for wirelessly + transmitting three axes of a 3D accelerometer. Although this data rate is low + compared to other systems, our solution benefits from ease-of-use and stability, + and is useful for applications that are not time-critical.' + address: 'Ann Arbor, Michigan' + author: 'Jim Torresen and Hauback, Øyvind N. and Dan Overholt and Jensenius, Alexander + Refsum' + bibtex: "@inproceedings{Torresen2012,\n abstract = {We present a new wireless transceiver\ + \ board for the CUI32 sensor interface, aimed at creating a solution that is flexible,\ + \ reliable, and with little power consumption. Communica-tion with the board is\ + \ based on the ZigFlea protocol and it has been evaluated on a CUI32 using the\ + \ StickOS oper-ating system. Experiments show that the total sensor data collection\ + \ time is linearly increasing with the number of sensor samples used. A data rate\ + \ of 0.8 kbit/s is achieved for wirelessly transmitting three axes of a 3D accelerometer.\ + \ Although this data rate is low compared to other systems, our solution benefits\ + \ from ease-of-use and stability, and is useful for applications that are not\ + \ time-critical.},\n address = {Ann Arbor, Michigan},\n author = {Jim Torresen\ + \ and Hauback, Øyvind N. and Dan Overholt and Jensenius, Alexander Refsum},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178433},\n issn = {2220-4806},\n\ + \ keywords = {wireless sensing, CUI32, StickOS, ZigBee, ZigFlea},\n publisher\ + \ = {University of Michigan},\n title = {Development and Evaluation of a ZigFlea-based\ + \ Wireless Transceiver Board for CUI32},\n url = {http://www.nime.org/proceedings/2012/nime2012_205.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177887 + doi: 10.5281/zenodo.1178433 issn: 2220-4806 - keywords: 'Multi-modal, interaction, music, mixer, mobile, interactive, DJ, smart - phones, Nokia, n900, touch screen, accelerometer, phone, audience' - pages: 395--398 - title: 'Mmmmm: A Multi-modal Mobile Music Mixer' - url: http://www.nime.org/proceedings/2010/nime2010_395.pdf - year: 2010 + keywords: 'wireless sensing, CUI32, StickOS, ZigBee, ZigFlea' + publisher: University of Michigan + title: Development and Evaluation of a ZigFlea-based Wireless Transceiver Board + for CUI32 + url: http://www.nime.org/proceedings/2012/nime2012_205.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Tsai2010 - abstract: 'With the decreasing audience of classical music performance, this research - aims to develop a performance-enhancement system, called AIDA, to help classical - performers better communicating with their audiences. With three procedures Input-Processing-Output, - AIDA system can sense and analyze the body information of performers and further - reflect it onto the responsive skin. Thus abstract and intangible emotional expressions - of performers are transformed into tangible and concrete visual elements, which - clearly facilitating the audiences'' threshold for music appreciation. ' - address: 'Sydney, Australia' - author: 'Tsai, Chih-Chieh and Liu, Cha-Lin and Chang, Teng-Wen' - bibtex: "@inproceedings{Tsai2010,\n abstract = {With the decreasing audience of\ - \ classical music performance, this research aims to develop a performance-enhancement\ - \ system, called AIDA, to help classical performers better communicating with\ - \ their audiences. With three procedures Input-Processing-Output, AIDA system\ - \ can sense and analyze the body information of performers and further reflect\ - \ it onto the responsive skin. Thus abstract and intangible emotional expressions\ - \ of performers are transformed into tangible and concrete visual elements, which\ - \ clearly facilitating the audiences' threshold for music appreciation. },\n address\ - \ = {Sydney, Australia},\n author = {Tsai, Chih-Chieh and Liu, Cha-Lin and Chang,\ - \ Teng-Wen},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177915},\n issn\ - \ = {2220-4806},\n keywords = {Interactive Performance, Ambient Environment, Responsive\ - \ Skin, Music performance.},\n pages = {399--402},\n title = {An Interactive Responsive\ - \ Skin for Music},\n url = {http://www.nime.org/proceedings/2010/nime2010_399.pdf},\n\ - \ year = {2010}\n}\n" + ID: Makelberge2012 + abstract: '''''Perfect Take'''' is a public installation out of networked acoustic + instruments that let composers from all over the world exhibit their MIDI-works + by means of the Internet. The primary aim of this system is to offer composers + a way to have works exhibited and recorded in venues and with technologies not + accessible to him/her under normal circumstances. The Secondary aim of this research + is to highlight experience design as a complement to interaction design, and a + shift of focus from functionality of a specific gestural controller, towards the + environments, events and processes that they are part of.' + address: 'Ann Arbor, Michigan' + author: Nicolas Makelberge and Álvaro Barbosa and André Perrotta and Luís Sarmento + Ferreira + bibtex: "@inproceedings{Makelberge2012,\n abstract = {''Perfect Take'' is a public\ + \ installation out of networked acoustic instruments that let composers from all\ + \ over the world exhibit their MIDI-works by means of the Internet. The primary\ + \ aim of this system is to offer composers a way to have works exhibited and recorded\ + \ in venues and with technologies not accessible to him/her under normal circumstances.\ + \ The Secondary aim of this research is to highlight experience design as a complement\ + \ to interaction design, and a shift of focus from functionality of a specific\ + \ gestural controller, towards the environments, events and processes that they\ + \ are part of.},\n address = {Ann Arbor, Michigan},\n author = {Nicolas Makelberge\ + \ and {\\'A}lvaro Barbosa and Andr{\\'e} Perrotta and Lu{\\'i}s Sarmento Ferreira},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178339},\n issn = {2220-4806},\n\ + \ keywords = {NIME, Networked Music, MIDI, Disklavier, music collaboration, creativity},\n\ + \ publisher = {University of Michigan},\n title = {Perfect Take: Experience design\ + \ and new interfaces for musical expression},\n url = {http://www.nime.org/proceedings/2012/nime2012_208.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177915 + doi: 10.5281/zenodo.1178339 issn: 2220-4806 - keywords: 'Interactive Performance, Ambient Environment, Responsive Skin, Music - performance.' - pages: 399--402 - title: An Interactive Responsive Skin for Music - url: http://www.nime.org/proceedings/2010/nime2010_399.pdf - year: 2010 + keywords: 'NIME, Networked Music, MIDI, Disklavier, music collaboration, creativity' + publisher: University of Michigan + title: 'Perfect Take: Experience design and new interfaces for musical expression' + url: http://www.nime.org/proceedings/2012/nime2012_208.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: BryanKinns2010 - abstract: 'In this paper we outline the emerging field of Interactional Sound and - Music which concerns itself with multi-person technologically mediated interactions - primarily using audio. We present several examples of interactive systems in our - group, and reflect on how they were designed and evaluated. Evaluation techniques - for collective, performative, and task oriented activities are outlined and compared. - We emphasise the importance of designing for awareness in these systems, and provide - examples of different awareness mechanisms. ' - address: 'Sydney, Australia' - author: 'Bryan-Kinns, Nick and Fencott, Robin and Metatla, Oussama and Nabavian, - Shahin and Sheridan, Jennifer G.' - bibtex: "@inproceedings{BryanKinns2010,\n abstract = {In this paper we outline the\ - \ emerging field of Interactional Sound and Music which concerns itself with multi-person\ - \ technologically mediated interactions primarily using audio. We present several\ - \ examples of interactive systems in our group, and reflect on how they were designed\ - \ and evaluated. Evaluation techniques for collective, performative, and task\ - \ oriented activities are outlined and compared. We emphasise the importance of\ - \ designing for awareness in these systems, and provide examples of different\ - \ awareness mechanisms. },\n address = {Sydney, Australia},\n author = {Bryan-Kinns,\ - \ Nick and Fencott, Robin and Metatla, Oussama and Nabavian, Shahin and Sheridan,\ - \ Jennifer G.},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177727},\n\ - \ issn = {2220-4806},\n keywords = {Interactional, sound, music, mutual engagement,\ - \ improvisation, composition, collaboration, awareness.},\n pages = {403--406},\n\ - \ title = {Interactional Sound and Music : Listening to CSCW, Sonification, and\ - \ Sound Art},\n url = {http://www.nime.org/proceedings/2010/nime2010_403.pdf},\n\ - \ year = {2010}\n}\n" + ID: Han2012a + abstract: 'FutureGrab is a new wearable musical instrument for live performance + that is highly intuitive while still generating an interesting sound by subtractive + synthesis. Its sound effects resemble the human vowel pronunciation, which were + mapped to hand gestures that are similar to the mouth shape of human to pronounce + corresponding vowel. FutureGrab also provides all necessary features for a lead + musical instrument such as pitch control, trigger, glissando and key adjustment. + In addition, pitch indicator was added to give visual feedback to the performer, + which can reduce the mistakes during live performances. This paper describes the + motivation, system design, mapping strategy and implementation of FutureGrab, + and evaluates the overall experience.' + address: 'Ann Arbor, Michigan' + author: Yoonchang Han and Jinsoo Na and Kyogu Lee + bibtex: "@inproceedings{Han2012a,\n abstract = {FutureGrab is a new wearable musical\ + \ instrument for live performance that is highly intuitive while still generating\ + \ an interesting sound by subtractive synthesis. Its sound effects resemble the\ + \ human vowel pronunciation, which were mapped to hand gestures that are similar\ + \ to the mouth shape of human to pronounce corresponding vowel. FutureGrab also\ + \ provides all necessary features for a lead musical instrument such as pitch\ + \ control, trigger, glissando and key adjustment. In addition, pitch indicator\ + \ was added to give visual feedback to the performer, which can reduce the mistakes\ + \ during live performances. This paper describes the motivation, system design,\ + \ mapping strategy and implementation of FutureGrab, and evaluates the overall\ + \ experience.},\n address = {Ann Arbor, Michigan},\n author = {Yoonchang Han and\ + \ Jinsoo Na and Kyogu Lee},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178271},\n\ + \ issn = {2220-4806},\n keywords = {Wearable musical instrument, Pure Data, gestural\ + \ synthesis, formant synthesis, data-glove, visual feedback, subtractive synthesis},\n\ + \ publisher = {University of Michigan},\n title = {FutureGrab: A wearable subtractive\ + \ synthesizer using hand gesture},\n url = {http://www.nime.org/proceedings/2012/nime2012_209.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177727 + doi: 10.5281/zenodo.1178271 issn: 2220-4806 - keywords: 'Interactional, sound, music, mutual engagement, improvisation, composition, - collaboration, awareness.' - pages: 403--406 - title: 'Interactional Sound and Music : Listening to CSCW, Sonification, and Sound - Art' - url: http://www.nime.org/proceedings/2010/nime2010_403.pdf - year: 2010 + keywords: 'Wearable musical instrument, Pure Data, gestural synthesis, formant synthesis, + data-glove, visual feedback, subtractive synthesis' + publisher: University of Michigan + title: 'FutureGrab: A wearable subtractive synthesizer using hand gesture' + url: http://www.nime.org/proceedings/2012/nime2012_209.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Skogstad2010 - abstract: 'The paper presents a conceptual overview of how optical infrared marker - based motion capture systems (IrMoCap) can be used in musical interaction. First - we present a review of related work of using IrMoCap for musical control. This - is followed by a discussion of possible features which can be exploited. Finally, - the question of mapping movement features to sound features is presented and discussed.' - address: 'Sydney, Australia' - author: 'Skogstad, Ståle A. and Jensenius, Alexander Refsum and Nymoen, Kristian' - bibtex: "@inproceedings{Skogstad2010,\n abstract = {The paper presents a conceptual\ - \ overview of how optical infrared marker based motion capture systems (IrMoCap)\ - \ can be used in musical interaction. First we present a review of related work\ - \ of using IrMoCap for musical control. This is followed by a discussion of possible\ - \ features which can be exploited. Finally, the question of mapping movement features\ - \ to sound features is presented and discussed.},\n address = {Sydney, Australia},\n\ - \ author = {Skogstad, Ståle A. and Jensenius, Alexander Refsum and Nymoen, Kristian},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177895},\n issn = {2220-4806},\n\ - \ keywords = {nime10},\n pages = {407--410},\n title = {Using {IR} Optical Marker\ - \ Based Motion Capture for Exploring Musical Interaction},\n url = {http://www.nime.org/proceedings/2010/nime2010_407.pdf},\n\ - \ year = {2010}\n}\n" + ID: Wierenga2012 + abstract: 'In an attempt to utilize the expert pianist''s technique and spare bandwidth, + a new keyboard-based instrument augmented by sensors suggested by the examination + of existing acoustic instruments is introduced. The complete instrument includes + a keyboard, various pedals and knee levers, several bowing controllers, and breath + and embouchure sensors connected to an Arduino microcontroller that sends sensor + data to a laptop running Max/MSP, where custom software maps the data to synthesis + algorithms. The audio is output to a digital amplifier powering a transducer mounted + on a resonator box to which several of the sensors are attached. Careful sensor + selection and mapping help to facilitate performance mode.' + address: 'Ann Arbor, Michigan' + author: Red Wierenga + bibtex: "@inproceedings{Wierenga2012,\n abstract = {In an attempt to utilize the\ + \ expert pianist's technique and spare bandwidth, a new keyboard-based instrument\ + \ augmented by sensors suggested by the examination of existing acoustic instruments\ + \ is introduced. The complete instrument includes a keyboard, various pedals and\ + \ knee levers, several bowing controllers, and breath and embouchure sensors connected\ + \ to an Arduino microcontroller that sends sensor data to a laptop running Max/MSP,\ + \ where custom software maps the data to synthesis algorithms. The audio is output\ + \ to a digital amplifier powering a transducer mounted on a resonator box to which\ + \ several of the sensors are attached. Careful sensor selection and mapping help\ + \ to facilitate performance mode.},\n address = {Ann Arbor, Michigan},\n author\ + \ = {Red Wierenga},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178451},\n\ + \ issn = {2220-4806},\n keywords = {Gesture, controllers, Digital Musical Instrument,\ + \ keyboard},\n publisher = {University of Michigan},\n title = {A New Keyboard-Based,\ + \ Sensor-Augmented Instrument For Live Performance},\n url = {http://www.nime.org/proceedings/2012/nime2012_211.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177895 + doi: 10.5281/zenodo.1178451 issn: 2220-4806 - keywords: nime10 - pages: 407--410 - title: Using IR Optical Marker Based Motion Capture for Exploring Musical Interaction - url: http://www.nime.org/proceedings/2010/nime2010_407.pdf - year: 2010 + keywords: 'Gesture, controllers, Digital Musical Instrument, keyboard' + publisher: University of Michigan + title: 'A New Keyboard-Based, Sensor-Augmented Instrument For Live Performance' + url: http://www.nime.org/proceedings/2012/nime2012_211.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Buch2010 - abstract: 'In this study artistic human-robot interaction design is introduced as - a means for scientific research and artistic investigations. It serves as a methodology - for situated cognitionintegrating empirical methodology and computational modeling, - and is exemplified by the installation playing robot.Its artistic purpose is to - aid to create and explore robots as anew medium for art and entertainment. We - discuss the useof finite state machines to organize robots'' behavioral reactions - to sensor data, and give a brief outlook on structuredobservation as a potential - method for data collection.' - address: 'Sydney, Australia' - author: 'Buch, Benjamin and Coussement, Pieter and Schmidt, Lüder' - bibtex: "@inproceedings{Buch2010,\n abstract = {In this study artistic human-robot\ - \ interaction design is introduced as a means for scientific research and artistic\ - \ investigations. It serves as a methodology for situated cognitionintegrating\ - \ empirical methodology and computational modeling, and is exemplified by the\ - \ installation playing robot.Its artistic purpose is to aid to create and explore\ - \ robots as anew medium for art and entertainment. We discuss the useof finite\ - \ state machines to organize robots' behavioral reactions to sensor data, and\ - \ give a brief outlook on structuredobservation as a potential method for data\ - \ collection.},\n address = {Sydney, Australia},\n author = {Buch, Benjamin and\ - \ Coussement, Pieter and Schmidt, L\\''{u}der},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1177729},\n issn = {2220-4806},\n keywords = {dynamic mapping,embodiment,finite\ - \ state au-,human-robot interaction,new media art,nime10,structured,tomata},\n\ - \ pages = {411--414},\n title = {''playing robot'' : An Interactive Sound Installation\ - \ in Human-Robot Interaction Design for New Media Art},\n url = {http://www.nime.org/proceedings/2010/nime2010_411.pdf},\n\ - \ year = {2010}\n}\n" + ID: Savary2012 + abstract: 'Dirty Tangible Interfaces (DIRTI) are a new concept in interface design + that forgoes the dogma of repeatability in favor of a richer and more complex + experience, constantly evolving, never reversible, and infinitely modifiable. + We built a prototype based on granular or liquid interaction material placed in + a glass dish, that is analyzed by video tracking for its 3D relief. This relief, + and the dynamic changes applied to it by the user, are interpreted as activation + profiles to drive corpus-based concatenative sound synthesis, allowing one or + more players to mold sonic landscapes and to plow through them in an inherently + collaborative, expressive, and dynamic experience.' + address: 'Ann Arbor, Michigan' + author: Matthieu Savary and Diemo Schwarz and Denis Pellerin + bibtex: "@inproceedings{Savary2012,\n abstract = {Dirty Tangible Interfaces (DIRTI)\ + \ are a new concept in interface design that forgoes the dogma of repeatability\ + \ in favor of a richer and more complex experience, constantly evolving, never\ + \ reversible, and infinitely modifiable. We built a prototype based on granular\ + \ or liquid interaction material placed in a glass dish, that is analyzed by video\ + \ tracking for its 3D relief. This relief, and the dynamic changes applied to\ + \ it by the user, are interpreted as activation profiles to drive corpus-based\ + \ concatenative sound synthesis, allowing one or more players to mold sonic landscapes\ + \ and to plow through them in an inherently collaborative, expressive, and dynamic\ + \ experience.},\n address = {Ann Arbor, Michigan},\n author = {Matthieu Savary\ + \ and Diemo Schwarz and Denis Pellerin},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1180585},\n\ + \ issn = {2220-4806},\n keywords = {Tangible interface, Corpus-based concatenative\ + \ synthesis, Non-standard interaction},\n publisher = {University of Michigan},\n\ + \ title = {DIRTI ---Dirty Tangible Interfaces},\n url = {http://www.nime.org/proceedings/2012/nime2012_212.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177729 + doi: 10.5281/zenodo.1180585 issn: 2220-4806 - keywords: 'dynamic mapping,embodiment,finite state au-,human-robot interaction,new - media art,nime10,structured,tomata' - pages: 411--414 - title: '''''playing robot'''' : An Interactive Sound Installation in Human-Robot - Interaction Design for New Media Art' - url: http://www.nime.org/proceedings/2010/nime2010_411.pdf - year: 2010 + keywords: 'Tangible interface, Corpus-based concatenative synthesis, Non-standard + interaction' + publisher: University of Michigan + title: DIRTI ---Dirty Tangible Interfaces + url: http://www.nime.org/proceedings/2012/nime2012_212.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Reboursiere2010 - abstract: 'This project aims at studying how recent interactive and interactions - technologies would help extend how we play theguitar, thus defining the "multimodal - guitar". Our contributions target three main axes: audio analysis, gestural control - and audio synthesis. For this purpose, we designed anddeveloped a freely-available - toolbox for augmented guitarperformances, compliant with the PureData and Max/MSPenvironments, - gathering tools for: polyphonic pitch estimation, fretboard visualization and - grouping, pressure sensing,modal synthesis, infinite sustain, rearranging looping - and "smart" harmonizing.' - address: 'Sydney, Australia' - author: 'Reboursière, Loïc and Frisson, Christian and Lähdeoja, Otso and Mills, - John A. and Picard-Limpens, Cécile and Todoroff, Todor' - bibtex: "@inproceedings{Reboursiere2010,\n abstract = {This project aims at studying\ - \ how recent interactive and interactions technologies would help extend how we\ - \ play theguitar, thus defining the \"multimodal guitar\". Our contributions target\ - \ three main axes: audio analysis, gestural control and audio synthesis. For this\ - \ purpose, we designed anddeveloped a freely-available toolbox for augmented guitarperformances,\ - \ compliant with the PureData and Max/MSPenvironments, gathering tools for: polyphonic\ - \ pitch estimation, fretboard visualization and grouping, pressure sensing,modal\ - \ synthesis, infinite sustain, rearranging looping and \"smart\" harmonizing.},\n\ - \ address = {Sydney, Australia},\n author = {Reboursi\\`{e}re, Lo\\\"{i}c and\ - \ Frisson, Christian and L\\\"{a}hdeoja, Otso and Mills, John A. and Picard-Limpens,\ - \ C\\'{e}cile and Todoroff, Todor},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177881},\n\ - \ issn = {2220-4806},\n keywords = {Augmented guitar, audio synthesis, digital\ - \ audio effects, multimodal interaction, gestural sensing, polyphonic tran- scription,\ - \ hexaphonic guitar},\n pages = {415--418},\n title = {Multimodal Guitar : A Toolbox\ - \ For Augmented Guitar Performances},\n url = {http://www.nime.org/proceedings/2010/nime2010_415.pdf},\n\ - \ year = {2010}\n}\n" + ID: Reboursiere2012 + abstract: 'In this paper we present a series of algorithms developed to detect the + following guitar playing techniques : bend, hammer-on, pull-off, slide, palm muting + and harmonic. Detection of playing techniques can be used to control exter-nal + content (i.e audio loops and effects, videos, light events, etc.), as well as + to write real-time score or to assist guitar novices in their learning process. + The guitar used is a Godin Multiac with an under-saddle RMC hexaphonic piezo pickup + (one pickup per string, i.e six mono signals).' + address: 'Ann Arbor, Michigan' + author: Loïc Reboursière and Otso Lähdeoja and Thomas Drugman and Stéphane Dupont + and Cécile Picard-Limpens and Nicolas Riche + bibtex: "@inproceedings{Reboursiere2012,\n abstract = {In this paper we present\ + \ a series of algorithms developed to detect the following guitar playing techniques\ + \ : bend, hammer-on, pull-off, slide, palm muting and harmonic. Detection of playing\ + \ techniques can be used to control exter-nal content (i.e audio loops and effects,\ + \ videos, light events, etc.), as well as to write real-time score or to assist\ + \ guitar novices in their learning process. The guitar used is a Godin Multiac\ + \ with an under-saddle RMC hexaphonic piezo pickup (one pickup per string, i.e\ + \ six mono signals).},\n address = {Ann Arbor, Michigan},\n author = {Lo{\\\"\ + i}c Reboursi{\\`e}re and Otso L{\\\"a}hdeoja and Thomas Drugman and St{\\'e}phane\ + \ Dupont and C{\\'e}cile Picard-Limpens and Nicolas Riche},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1180575},\n issn = {2220-4806},\n keywords = {Guitar audio\ + \ analysis, playing techniques, hexaphonic pickup, controller, augmented guitar},\n\ + \ publisher = {University of Michigan},\n title = {Left and right-hand guitar\ + \ playing techniques detection},\n url = {http://www.nime.org/proceedings/2012/nime2012_213.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177881 + doi: 10.5281/zenodo.1180575 issn: 2220-4806 - keywords: 'Augmented guitar, audio synthesis, digital audio effects, multimodal - interaction, gestural sensing, polyphonic tran- scription, hexaphonic guitar' - pages: 415--418 - title: 'Multimodal Guitar : A Toolbox For Augmented Guitar Performances' - url: http://www.nime.org/proceedings/2010/nime2010_415.pdf - year: 2010 + keywords: 'Guitar audio analysis, playing techniques, hexaphonic pickup, controller, + augmented guitar' + publisher: University of Michigan + title: Left and right-hand guitar playing techniques detection + url: http://www.nime.org/proceedings/2012/nime2012_213.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Berger2010 - abstract: 'This paper introduces my research in physical interactive design with - my "GRIP MAESTRO" electroacoustic performance interface. It then discusses the - considerations involved in creating intuitive software mappings of emotive performative - gestures such that they are idiomatic not only of the sounds they create but also - of the physical nature of the interface itself. ' - address: 'Sydney, Australia' - author: 'Berger, Michael' - bibtex: "@inproceedings{Berger2010,\n abstract = {This paper introduces my research\ - \ in physical interactive design with my \"GRIP MAESTRO\" electroacoustic performance\ - \ interface. It then discusses the considerations involved in creating intuitive\ - \ software mappings of emotive performative gestures such that they are idiomatic\ - \ not only of the sounds they create but also of the physical nature of the interface\ - \ itself. },\n address = {Sydney, Australia},\n author = {Berger, Michael},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177719},\n issn = {2220-4806},\n\ - \ keywords = {emotive gesture and music,hall effect,human-controller interaction,musical\ - \ mapping strategies,nime10,novel musical instrument,passive haptic feedback,sensor-augmented\ - \ hand-exerciser},\n pages = {419--422},\n title = {The GRIP MAESTRO : Idiomatic\ - \ Mappings of Emotive Gestures for Control of Live Electroacoustic Music},\n url\ - \ = {http://www.nime.org/proceedings/2010/nime2010_419.pdf},\n year = {2010}\n\ - }\n" + ID: Choi2012 + abstract: 'The Deckle Group1 is an ensemble that designs, builds and performs on + electroacoustic drawing boards. These draw-ing surfaces are augmented with Satellite + CCRMA Beagle-Boards and Arduinos2.[1] Piezo microphones are used in conjunction + with other sensors to produce sounds that are coupled tightly to mark-making gestures. + Position tracking is achieved with infra-red object tracking, conductive fabric + and a magnetometer.' + address: 'Ann Arbor, Michigan' + author: Hongchan Choi and John Granzow and Joel Sadler + bibtex: "@inproceedings{Choi2012,\n abstract = {The Deckle Group1 is an ensemble\ + \ that designs, builds and performs on electroacoustic drawing boards. These draw-ing\ + \ surfaces are augmented with Satellite CCRMA Beagle-Boards and Arduinos2.[1]\ + \ Piezo microphones are used in conjunction with other sensors to produce sounds\ + \ that are coupled tightly to mark-making gestures. Position tracking is achieved\ + \ with infra-red object tracking, conductive fabric and a magnetometer.},\n address\ + \ = {Ann Arbor, Michigan},\n author = {Hongchan Choi and John Granzow and Joel\ + \ Sadler},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1178235},\n issn = {2220-4806},\n\ + \ keywords = {Deckle, BeagleBoard, Drawing, Sonification, Performance, Audiovisual,\ + \ Gestural Interface},\n publisher = {University of Michigan},\n title = {The\ + \ Deckle Project : A Sketch of Three Sensors},\n url = {http://www.nime.org/proceedings/2012/nime2012_214.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177719 + doi: 10.5281/zenodo.1178235 issn: 2220-4806 - keywords: 'emotive gesture and music,hall effect,human-controller interaction,musical - mapping strategies,nime10,novel musical instrument,passive haptic feedback,sensor-augmented - hand-exerciser' - pages: 419--422 - title: 'The GRIP MAESTRO : Idiomatic Mappings of Emotive Gestures for Control of - Live Electroacoustic Music' - url: http://www.nime.org/proceedings/2010/nime2010_419.pdf - year: 2010 + keywords: 'Deckle, BeagleBoard, Drawing, Sonification, Performance, Audiovisual, + Gestural Interface' + publisher: University of Michigan + title: 'The Deckle Project : A Sketch of Three Sensors' + url: http://www.nime.org/proceedings/2012/nime2012_214.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Headlee2010 - abstract: 'In this paper, we present an interactive system that uses the body as - a generative tool for creating music. We explore innovative ways to make music, - create self-awareness, and provide the opportunity for unique, interactive social - experiences. The system uses a multi-player game paradigm, where players work - together to add layers to a soundscape of three distinct environments. Various - sensors and hardware are attached to the body and transmit signals to a workstation, - where they are processed using Max/MSP. The game is divided into three levels, - each of a different soundscape. The underlying purpose of our system is to move - the player''s focus away from complexities of the modern urban world toward a - more internalized meditative state. The system is currently viewed as an interactive - installation piece, but future iterations have potential applications in music - therapy, bio games, extended performance art, and as a prototype for new interfaces - for musical expression. ' - address: 'Sydney, Australia' - author: 'Headlee, Kimberlee and Koziupa, Tatyana and Siwiak, Diana' - bibtex: "@inproceedings{Headlee2010,\n abstract = {In this paper, we present an\ - \ interactive system that uses the body as a generative tool for creating music.\ - \ We explore innovative ways to make music, create self-awareness, and provide\ - \ the opportunity for unique, interactive social experiences. The system uses\ - \ a multi-player game paradigm, where players work together to add layers to a\ - \ soundscape of three distinct environments. Various sensors and hardware are\ - \ attached to the body and transmit signals to a workstation, where they are processed\ - \ using Max/MSP. The game is divided into three levels, each of a different soundscape.\ - \ The underlying purpose of our system is to move the player's focus away from\ - \ complexities of the modern urban world toward a more internalized meditative\ - \ state. The system is currently viewed as an interactive installation piece,\ - \ but future iterations have potential applications in music therapy, bio games,\ - \ extended performance art, and as a prototype for new interfaces for musical\ - \ expression. },\n address = {Sydney, Australia},\n author = {Headlee, Kimberlee\ - \ and Koziupa, Tatyana and Siwiak, Diana},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177801},\n\ - \ issn = {2220-4806},\n keywords = {biomusic, collaborative, expressive, hci,\ - \ interactive, interactivity design, interface for musical expression, multimodal,\ - \ musical mapping strategies,nime10,performance,sonification},\n pages = {423--426},\n\ - \ title = {Sonic Virtual Reality Game : How Does Your Body Sound ?},\n url = {http://www.nime.org/proceedings/2010/nime2010_423.pdf},\n\ - \ year = {2010}\n}\n" + ID: Vamvakousis2012 + abstract: 'In this paper we describe the EyeHarp, a new gaze-controlled musical + instrument, and the new features we recently added to its design. In particular, + we report on the EyeHarp new controls, the arpeggiator, the new remote eye-tracking + device, and the EyeHarp capacity to act as a MIDI controller for any VST plugin + virtual instrument. We conducted an evaluation of the EyeHarp Temporal accuracy + by monitor-ing 10 users while performing a melody task, and comparing their gaze + control accuracy with their accuracy using a com-puter keyboard. We report on + the results of the evaluation.' + address: 'Ann Arbor, Michigan' + author: Zacharias Vamvakousis and Rafael Ramirez + bibtex: "@inproceedings{Vamvakousis2012,\n abstract = {In this paper we describe\ + \ the EyeHarp, a new gaze-controlled musical instrument, and the new features\ + \ we recently added to its design. In particular, we report on the EyeHarp new\ + \ controls, the arpeggiator, the new remote eye-tracking device, and the EyeHarp\ + \ capacity to act as a MIDI controller for any VST plugin virtual instrument.\ + \ We conducted an evaluation of the EyeHarp Temporal accuracy by monitor-ing 10\ + \ users while performing a melody task, and comparing their gaze control accuracy\ + \ with their accuracy using a com-puter keyboard. We report on the results of\ + \ the evaluation.},\n address = {Ann Arbor, Michigan},\n author = {Zacharias Vamvakousis\ + \ and Rafael Ramirez},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178443},\n\ + \ issn = {2220-4806},\n keywords = {Eye-tracking systems, music interfaces, gaze\ + \ interaction},\n publisher = {University of Michigan},\n title = {Temporal Control\ + \ In the EyeHarp Gaze-Controlled Musical Interface},\n url = {http://www.nime.org/proceedings/2012/nime2012_215.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177801 + doi: 10.5281/zenodo.1178443 issn: 2220-4806 - keywords: 'biomusic, collaborative, expressive, hci, interactive, interactivity - design, interface for musical expression, multimodal, musical mapping strategies,nime10,performance,sonification' - pages: 423--426 - title: 'Sonic Virtual Reality Game : How Does Your Body Sound ?' - url: http://www.nime.org/proceedings/2010/nime2010_423.pdf - year: 2010 + keywords: 'Eye-tracking systems, music interfaces, gaze interaction' + publisher: University of Michigan + title: Temporal Control In the EyeHarp Gaze-Controlled Musical Interface + url: http://www.nime.org/proceedings/2012/nime2012_215.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Stahl2010 - abstract: 'Maintaining a sense of personal connection between increasingly synthetic - performers and increasingly diffuse audiences is vital to storytelling and entertainment. - Sonic intimacy is important, because voice is one of the highestbandwidth channels - for expressing our real and imagined selves.New tools for highly focused spatialization - could help improve acoustical clarity, encourage audience engagement, reduce noise - pollution and inspire creative expression. We have a particular interest in embodied, - embedded systems for vocal performance enhancement and transformation. This short - paper describes work in progress on a toolkit for high-quality wearable sound - suits. Design goals include tailored directionality and resonance, full bandwidth, - and sensible ergonomics. Engineering details to accompany a demonstration of recent - prototypes are presented, highlighting a novel magnetostrictive flextensional - transducer. Based on initial observations we suggest that vocal acoustic output - from the torso, and spatial perception of situated low frequency sources, are - two areas deserving greater attention and further study.' - address: 'Sydney, Australia' - author: 'Stahl, Alex and Clemens, Patricia' - bibtex: "@inproceedings{Stahl2010,\n abstract = {Maintaining a sense of personal\ - \ connection between increasingly synthetic performers and increasingly diffuse\ - \ audiences is vital to storytelling and entertainment. Sonic intimacy is important,\ - \ because voice is one of the highestbandwidth channels for expressing our real\ - \ and imagined selves.New tools for highly focused spatialization could help improve\ - \ acoustical clarity, encourage audience engagement, reduce noise pollution and\ - \ inspire creative expression. We have a particular interest in embodied, embedded\ - \ systems for vocal performance enhancement and transformation. This short paper\ - \ describes work in progress on a toolkit for high-quality wearable sound suits.\ - \ Design goals include tailored directionality and resonance, full bandwidth,\ - \ and sensible ergonomics. Engineering details to accompany a demonstration of\ - \ recent prototypes are presented, highlighting a novel magnetostrictive flextensional\ - \ transducer. Based on initial observations we suggest that vocal acoustic output\ - \ from the torso, and spatial perception of situated low frequency sources, are\ - \ two areas deserving greater attention and further study.},\n address = {Sydney,\ - \ Australia},\n author = {Stahl, Alex and Clemens, Patricia},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177899},\n issn = {2220-4806},\n keywords = {magnetostrictive\ - \ flextensional transducer,nime10,paralinguistics,sound reinforcement,spatialization,speech\ - \ enhancement,transformation,voice,wearable systems},\n pages = {427--430},\n\ - \ title = {Auditory Masquing : Wearable Sound Systems for Diegetic Character Voices},\n\ - \ url = {http://www.nime.org/proceedings/2010/nime2010_427.pdf},\n year = {2010}\n\ - }\n" + ID: Han2012 + abstract: 'Virtual Pottery is an interactive audiovisual piece that uses hand gesture + to create 3D pottery objects and sound shape. Using the OptiTrack motion capture + (Rigid Body) system at TransLab in UCSB, performers can take a glove with attached + trackers, move the hand in x, y, and z axis and create their own sound pieces. + Performers can also manipulate their pottery pieces in real time and change arrangement + on the musical score interface in order to create a continuous musical composition. + In this paper we address the relationship between body, sound and 3D shapes. We + also describe the origin of Virtual Pottery, its design process, discuss its aesthetic + value and musical sound synthesis system, and evaluate the overall experience.' + address: 'Ann Arbor, Michigan' + author: Yoon Chung Han and Byeong-jun Han + bibtex: "@inproceedings{Han2012,\n abstract = {Virtual Pottery is an interactive\ + \ audiovisual piece that uses hand gesture to create 3D pottery objects and sound\ + \ shape. Using the OptiTrack motion capture (Rigid Body) system at TransLab in\ + \ UCSB, performers can take a glove with attached trackers, move the hand in x,\ + \ y, and z axis and create their own sound pieces. Performers can also manipulate\ + \ their pottery pieces in real time and change arrangement on the musical score\ + \ interface in order to create a continuous musical composition. In this paper\ + \ we address the relationship between body, sound and 3D shapes. We also describe\ + \ the origin of Virtual Pottery, its design process, discuss its aesthetic value\ + \ and musical sound synthesis system, and evaluate the overall experience.},\n\ + \ address = {Ann Arbor, Michigan},\n author = {Yoon Chung Han and Byeong-jun Han},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178273},\n issn = {2220-4806},\n\ + \ keywords = {Virtual Pottery, virtual musical instrument, sound synthesis, motion\ + \ and gesture, pottery, motion perception, interactive sound installation.},\n\ + \ publisher = {University of Michigan},\n title = {Virtual Pottery: An Interactive\ + \ Audio-Visual Installation},\n url = {http://www.nime.org/proceedings/2012/nime2012_216.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177899 + doi: 10.5281/zenodo.1178273 issn: 2220-4806 - keywords: 'magnetostrictive flextensional transducer,nime10,paralinguistics,sound - reinforcement,spatialization,speech enhancement,transformation,voice,wearable - systems' - pages: 427--430 - title: 'Auditory Masquing : Wearable Sound Systems for Diegetic Character Voices' - url: http://www.nime.org/proceedings/2010/nime2010_427.pdf - year: 2010 + keywords: 'Virtual Pottery, virtual musical instrument, sound synthesis, motion + and gesture, pottery, motion perception, interactive sound installation.' + publisher: University of Michigan + title: 'Virtual Pottery: An Interactive Audio-Visual Installation' + url: http://www.nime.org/proceedings/2012/nime2012_216.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Rothman2010 - abstract: 'The Ghost has been developed to create a merger between the standard - MIDI keyboard controller, MIDI/digital guitars and alternative desktop controllers. - Using a custom software editor, The Ghost''s controls can be mapped to suit the - users performative needs. The interface takes its interaction and gestural cues - from the guitar but it is not a MIDI guitar. The Ghost''s hardware, firmware and - software will be open sourced with the hopes of creating a community of users - that are invested in creating music with controller.' - address: 'Sydney, Australia' - author: 'Rothman, Paul' - bibtex: "@inproceedings{Rothman2010,\n abstract = {The Ghost has been developed\ - \ to create a merger between the standard MIDI keyboard controller, MIDI/digital\ - \ guitars and alternative desktop controllers. Using a custom software editor,\ - \ The Ghost's controls can be mapped to suit the users performative needs. The\ - \ interface takes its interaction and gestural cues from the guitar but it is\ - \ not a MIDI guitar. The Ghost's hardware, firmware and software will be open\ - \ sourced with the hopes of creating a community of users that are invested in\ - \ creating music with controller.},\n address = {Sydney, Australia},\n author\ - \ = {Rothman, Paul},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177885},\n\ - \ issn = {2220-4806},\n keywords = {Controller, MIDI, Live Performance, Programmable,\ - \ Open-Source},\n pages = {431--435},\n title = {The Ghost : An Open-Source, User\ - \ Programmable {MIDI} Performance Controller},\n url = {http://www.nime.org/proceedings/2010/nime2010_431.pdf},\n\ - \ year = {2010}\n}\n" + ID: Nash2012 + abstract: 'This paper presents concepts, models, and empirical findings relating + to liveness and flow in the user experience of systems mediated by notation. Results + from an extensive two-year field study of over 1,000 sequencer and tracker users, + combining interaction logging, user surveys, and a video study, are used to illustrate + the properties of notations and interfaces that facilitate greater immersion in + musical activities and domains, borrowing concepts from programming to illustrate + the role of visual and musical feedback, from the notation and domain respectively. + The Cognitive Dimensions of Notations framework and Csikszentmihalyi''s flow theory + are combined to demonstrate how non-realtime, notation-mediated interaction can + support focused, immersive, energetic, and intrinsically-rewarding musical experiences, + and to what extent they are supported in the interfaces of music production software. + Users are shown to maintain liveness through a rapid, iterative edit-audition + cycle that integrates audio and visual feedback.' + address: 'Ann Arbor, Michigan' + author: Chris Nash and Alan Blackwell + bibtex: "@inproceedings{Nash2012,\n abstract = {This paper presents concepts, models,\ + \ and empirical findings relating to liveness and flow in the user experience\ + \ of systems mediated by notation. Results from an extensive two-year field study\ + \ of over 1,000 sequencer and tracker users, combining interaction logging, user\ + \ surveys, and a video study, are used to illustrate the properties of notations\ + \ and interfaces that facilitate greater immersion in musical activities and domains,\ + \ borrowing concepts from programming to illustrate the role of visual and musical\ + \ feedback, from the notation and domain respectively. The Cognitive Dimensions\ + \ of Notations framework and Csikszentmihalyi's flow theory are combined to demonstrate\ + \ how non-realtime, notation-mediated interaction can support focused, immersive,\ + \ energetic, and intrinsically-rewarding musical experiences, and to what extent\ + \ they are supported in the interfaces of music production software. Users are\ + \ shown to maintain liveness through a rapid, iterative edit-audition cycle that\ + \ integrates audio and visual feedback.},\n address = {Ann Arbor, Michigan},\n\ + \ author = {Chris Nash and Alan Blackwell},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1180547},\n issn = {2220-4806},\n keywords = {notation, composition,\ + \ liveness, flow, feedback, sequencers, DAWs, soundtracking, performance, user\ + \ studies, programming},\n publisher = {University of Michigan},\n title = {Liveness\ + \ and Flow in Notation Use},\n url = {http://www.nime.org/proceedings/2012/nime2012_217.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177885 + doi: 10.5281/zenodo.1180547 issn: 2220-4806 - keywords: 'Controller, MIDI, Live Performance, Programmable, Open-Source' - pages: 431--435 - title: 'The Ghost : An Open-Source, User Programmable MIDI Performance Controller' - url: http://www.nime.org/proceedings/2010/nime2010_431.pdf - year: 2010 + keywords: 'notation, composition, liveness, flow, feedback, sequencers, DAWs, soundtracking, + performance, user studies, programming' + publisher: University of Michigan + title: Liveness and Flow in Notation Use + url: http://www.nime.org/proceedings/2012/nime2012_217.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Paine2010 - abstract: 'This paper presents a discussion regarding organology classification - and taxonomies for digital musical instruments (DMI), arising from the TIEM (Taxonomy - of Interfaces for Electronic Music performance) survey (http://tiem.emf.org/), - conducted as part of an Australian Research Council Linkage project titled "Performance - Practice in New Interfaces for Realtime Electronic Music Performance". This research - is being carried out at the VIPRe Lab at, the University of Western Sydney in - partnership with the Electronic Music Foundation (EMF), Infusion Systems1 and - The Input Devices and Music Interaction Laboratory (IDMIL) at McGill University. - The project seeks to develop a schema of new interfaces for realtime electronic - music performance. ' - address: 'Sydney, Australia' - author: 'Paine, Garth' - bibtex: "@inproceedings{Paine2010,\n abstract = {This paper presents a discussion\ - \ regarding organology classification and taxonomies for digital musical instruments\ - \ (DMI), arising from the TIEM (Taxonomy of Interfaces for Electronic Music performance)\ - \ survey (http://tiem.emf.org/), conducted as part of an Australian Research Council\ - \ Linkage project titled \"Performance Practice in New Interfaces for Realtime\ - \ Electronic Music Performance\". This research is being carried out at the VIPRe\ - \ Lab at, the University of Western Sydney in partnership with the Electronic\ - \ Music Foundation (EMF), Infusion Systems1 and The Input Devices and Music Interaction\ - \ Laboratory (IDMIL) at McGill University. The project seeks to develop a schema\ - \ of new interfaces for realtime electronic music performance. },\n address =\ - \ {Sydney, Australia},\n author = {Paine, Garth},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1177873},\n issn = {2220-4806},\n keywords = {Instrument,\ - \ Interface, Organology, Taxonomy.},\n pages = {436--439},\n title = {Towards\ - \ a Taxonomy of Realtime Interfaces for Electronic Music Performance},\n url =\ - \ {http://www.nime.org/proceedings/2010/nime2010_436.pdf},\n year = {2010}\n}\n" + ID: Trail2012 + abstract: 'The Gyil is a pentatonic African wooden xylophone with 14-15 keys. The + work described in this paper has been motivated by three applications: computer + analysis of Gyil performance, live improvised electro-acoustic music incorporating + the Gyil, and hybrid sampling and physical mod-eling. In all three of these cases, + detailed information about what is played on the Gyil needs to be digitally captured + in real-time. We describe a direct sensing apparatus that can be used to achieve + this. It is based on contact microphones and is informed by the specific characteristics + of the Gyil. An alternative approach based on indirect acquisition is to apply + polyphonic transcription on the signal acquired by a microphone without requiring + the instrument to be modified. The direct sensing apparatus we have developed + can be used to acquire ground truth for evaluating different approaches to polyphonic + transcription and help create a ``surrogate'''' sensor. Some initial results comparing + different strategies to polyphonic transcription are presented.' + address: 'Ann Arbor, Michigan' + author: Shawn Trail and Tiago Fernandes Tavares and Dan Godlovitch and George Tzanetakis + bibtex: "@inproceedings{Trail2012,\n abstract = {The Gyil is a pentatonic African\ + \ wooden xylophone with 14-15 keys. The work described in this paper has been\ + \ motivated by three applications: computer analysis of Gyil performance, live\ + \ improvised electro-acoustic music incorporating the Gyil, and hybrid sampling\ + \ and physical mod-eling. In all three of these cases, detailed information about\ + \ what is played on the Gyil needs to be digitally captured in real-time. We describe\ + \ a direct sensing apparatus that can be used to achieve this. It is based on\ + \ contact microphones and is informed by the specific characteristics of the Gyil.\ + \ An alternative approach based on indirect acquisition is to apply polyphonic\ + \ transcription on the signal acquired by a microphone without requiring the instrument\ + \ to be modified. The direct sensing apparatus we have developed can be used to\ + \ acquire ground truth for evaluating different approaches to polyphonic transcription\ + \ and help create a ``surrogate'' sensor. Some initial results comparing different\ + \ strategies to polyphonic transcription are presented.},\n address = {Ann Arbor,\ + \ Michigan},\n author = {Shawn Trail and Tiago Fernandes Tavares and Dan Godlovitch\ + \ and George Tzanetakis},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178437},\n\ + \ issn = {2220-4806},\n keywords = {hyperinstruments, indirect acquisition, surrogate\ + \ sensors, computational ethnomusicology, physical modeling, perfor-mance analysis},\n\ + \ publisher = {University of Michigan},\n title = {Direct and surrogate sensing\ + \ for the Gyil african xylophone},\n url = {http://www.nime.org/proceedings/2012/nime2012_222.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177873 + doi: 10.5281/zenodo.1178437 issn: 2220-4806 - keywords: 'Instrument, Interface, Organology, Taxonomy.' - pages: 436--439 - title: Towards a Taxonomy of Realtime Interfaces for Electronic Music Performance - url: http://www.nime.org/proceedings/2010/nime2010_436.pdf - year: 2010 + keywords: 'hyperinstruments, indirect acquisition, surrogate sensors, computational + ethnomusicology, physical modeling, perfor-mance analysis' + publisher: University of Michigan + title: Direct and surrogate sensing for the Gyil african xylophone + url: http://www.nime.org/proceedings/2012/nime2012_222.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Taylor2010 - abstract: 'humanaquarium is a self-contained, transportable performance environment - that is used to stage technology-mediated interactive performances in public spaces. - Drawing upon the creative practices of busking and street performance, humanaquarium - incorporates live musicians, real-time audiovisual content generation, and frustrated - total internal reflection (FTIR) technology to facilitate participatory interaction - by members of the public. ' - address: 'Sydney, Australia' - author: 'Taylor, Robyn and Schofield, Guy and Shearer, John and Boulanger, Pierre - and Wallace, Jayne and Olivier, Patrick' - bibtex: "@inproceedings{Taylor2010,\n abstract = {humanaquarium is a self-contained,\ - \ transportable performance environment that is used to stage technology-mediated\ - \ interactive performances in public spaces. Drawing upon the creative practices\ - \ of busking and street performance, humanaquarium incorporates live musicians,\ - \ real-time audiovisual content generation, and frustrated total internal reflection\ - \ (FTIR) technology to facilitate participatory interaction by members of the\ - \ public. },\n address = {Sydney, Australia},\n author = {Taylor, Robyn and Schofield,\ - \ Guy and Shearer, John and Boulanger, Pierre and Wallace, Jayne and Olivier,\ - \ Patrick},\n booktitle = {Proceedings of the International Conference on New\ - \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177905},\n issn\ - \ = {2220-4806},\n keywords = {busking, collaborative interface, creative practice,\ - \ experience centered design, frustrated total internal reflection (FTIR), multi-touch\ - \ screen, multimedia, participatory performance},\n pages = {88--93},\n title\ - \ = {humanaquarium : A Participatory Performance System},\n url = {http://www.nime.org/proceedings/2010/nime2010_440.pdf},\n\ - \ year = {2010}\n}\n" + ID: Gerhard2012 + abstract: 'The Instant Instrument Anywhere (IIA) is a small device which can be + attached to any metal object to create an electronic instrument. The device uses + capacitive sensing to detect proximity of the player''s body to the metal object, + and sound is generated through a surface transducer which can be attached to any + flat surface. Because the capacitive sensor can be any shape or size, absolute + capacitive thresholding is not possible since the baseline capacitance will change. + Instead, we use a differential-based moving sum threshold which can rapidly adjust + to changes in the environment or be re-calibrated to a new metal object. We show + that this dynamic threshold is effective in rejecting environmental noise and + rapidly adapting to new objects. We also present details for constructing Instant + Instruments Anywhere, including using smartphone as the synthesis engine and power + supply.' + address: 'Ann Arbor, Michigan' + author: David Gerhard and Brett Park + bibtex: "@inproceedings{Gerhard2012,\n abstract = {The Instant Instrument Anywhere\ + \ (IIA) is a small device which can be attached to any metal object to create\ + \ an electronic instrument. The device uses capacitive sensing to detect proximity\ + \ of the player's body to the metal object, and sound is generated through a surface\ + \ transducer which can be attached to any flat surface. Because the capacitive\ + \ sensor can be any shape or size, absolute capacitive thresholding is not possible\ + \ since the baseline capacitance will change. Instead, we use a differential-based\ + \ moving sum threshold which can rapidly adjust to changes in the environment\ + \ or be re-calibrated to a new metal object. We show that this dynamic threshold\ + \ is effective in rejecting environmental noise and rapidly adapting to new objects.\ + \ We also present details for constructing Instant Instruments Anywhere, including\ + \ using smartphone as the synthesis engine and power supply.},\n address = {Ann\ + \ Arbor, Michigan},\n author = {David Gerhard and Brett Park},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178261},\n issn = {2220-4806},\n keywords = {Capacitive\ + \ Sensing, Arduino},\n publisher = {University of Michigan},\n title = {Instant\ + \ Instrument Anywhere: A Self-Contained Capacitive Synthesizer},\n url = {http://www.nime.org/proceedings/2012/nime2012_223.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177905 + doi: 10.5281/zenodo.1178261 issn: 2220-4806 - keywords: 'busking, collaborative interface, creative practice, experience centered - design, frustrated total internal reflection (FTIR), multi-touch screen, multimedia, - participatory performance' - pages: 88--93 - title: 'humanaquarium : A Participatory Performance System' - url: http://www.nime.org/proceedings/2010/nime2010_440.pdf - year: 2010 + keywords: 'Capacitive Sensing, Arduino' + publisher: University of Michigan + title: 'Instant Instrument Anywhere: A Self-Contained Capacitive Synthesizer' + url: http://www.nime.org/proceedings/2012/nime2012_223.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Kim2010 - abstract: 'In this paper, we present and demonstrate Samsung’s new concept music - creation engine and music composer application for mobile devices such as touch - phones or MP3 players, ‘Interactive Music Studio : the soloist’.' - address: 'Sydney, Australia' - author: 'Kim, Hyun-Soo and Yoon, Je-Han and Jung, Moon-Sik' - bibtex: "@inproceedings{Kim2010,\n abstract = {In this paper, we present and demonstrate\ - \ Samsung’s new concept music creation engine and music composer application for\ - \ mobile devices such as touch phones or MP3 players, ‘Interactive Music Studio\ - \ : the soloist’.},\n address = {Sydney, Australia},\n author = {Kim, Hyun-Soo\ - \ and Yoon, Je-Han and Jung, Moon-Sik},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177825},\n\ - \ issn = {2220-4806},\n keywords = {Mobile device, music composer, pattern composing,\ - \ MIDI},\n pages = {444--446},\n title = {Interactive Music Studio : The Soloist},\n\ - \ url = {http://www.nime.org/proceedings/2010/nime2010_444.pdf},\n year = {2010}\n\ + ID: Luhtala2012 + abstract: "This paper introduces `The Aesthetic Experience Prism', a framework for\ + \ studying how components of aesthetic experience materialize in the model's of\ + \ interaction of novel musical interfaces as well as how the role of aesthetics\ + \ could be made more explicit in the processes of designing interaction for musical\ + \ technologies. The Aesthetic Experience Prism makes use of Arthur Danto's framework\ + \ of aesthetic experience that consists of three conceptual entities: (1) metaphor;\ + \ (2) expression; and (3) style. In this paper we present key questions driving\ + \ the research, theoretical background, artistic research approach and user research\ + \ activities.\nIn the DIYSE project a proof-of-concept music creation system prototype\ + \ was developed in a collaborative design setting. The prototype provides means\ + \ to the performer to create music with minimum effort while allowing for versatile\ + \ interaction. We argue that by using an artistic research approach specifically\ + \ targeting designing for aesthetic experience we were able to transform the knowledge\ + \ from early design ideas to resulting technology products in which model's of\ + \ interaction metaphors, expression and style are in an apparent role." + address: 'Ann Arbor, Michigan' + author: Matti Luhtala and Ilkka Niemeläinen and Johan Plomp and Markku Turunen and + Julius Tuomisto + bibtex: "@inproceedings{Luhtala2012,\n abstract = {This paper introduces `The Aesthetic\ + \ Experience Prism', a framework for studying how components of aesthetic experience\ + \ materialize in the model's of interaction of novel musical interfaces as well\ + \ as how the role of aesthetics could be made more explicit in the processes of\ + \ designing interaction for musical technologies. The Aesthetic Experience Prism\ + \ makes use of Arthur Danto's framework of aesthetic experience that consists\ + \ of three conceptual entities: (1) metaphor; (2) expression; and (3) style. In\ + \ this paper we present key questions driving the research, theoretical background,\ + \ artistic research approach and user research activities.\nIn the DIYSE project\ + \ a proof-of-concept music creation system prototype was developed in a collaborative\ + \ design setting. The prototype provides means to the performer to create music\ + \ with minimum effort while allowing for versatile interaction. We argue that\ + \ by using an artistic research approach specifically targeting designing for\ + \ aesthetic experience we were able to transform the knowledge from early design\ + \ ideas to resulting technology products in which model's of interaction metaphors,\ + \ expression and style are in an apparent role.},\n address = {Ann Arbor, Michigan},\n\ + \ author = {Matti Luhtala and Ilkka Niemel{\\''a}inen and Johan Plomp and Markku\ + \ Turunen and Julius Tuomisto},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178331},\n\ + \ issn = {2220-4806},\n keywords = {Aesthetics, Interaction Design, Artistic Research,\ + \ Exploration},\n publisher = {University of Michigan},\n title = {Studying Aesthetics\ + \ in a Musical Interface Design Process Through `Aesthetic Experience Prism'},\n\ + \ url = {http://www.nime.org/proceedings/2012/nime2012_226.pdf},\n year = {2012}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177825 - issn: 2220-4806 - keywords: 'Mobile device, music composer, pattern composing, MIDI' - pages: 444--446 - title: 'Interactive Music Studio : The Soloist' - url: http://www.nime.org/proceedings/2010/nime2010_444.pdf - year: 2010 - - -- ENTRYTYPE: inproceedings - ID: Tremblay2010 - abstract: "In this paper, the authors describe how they use an electric bass as\ - \ a subtle, expressive and intuitive interface to browse the rich sample bank\ - \ available to most laptop owners. This is achieved by audio mosaicing of the\ - \ live bass performance audio, through corpus-based concatenative synthesis (CBCS)\ - \ techniques, allowing a mapping of the multi-dimensional expressivity of the\ - \ performance onto foreign audio material, thus recycling the virtuosity acquired\ - \ on the electric instrument with a trivial learning curve. This design hypothesis\ - \ is contextualised and assessed within the Sandbox#n series of bass+laptop meta-instruments,\ - \ and the authors describe technical means of the implementation through the use\ - \ of the open-source CataRT CBCS system adapted for live mosaicing. They also\ - \ discuss their encouraging early results and provide a list of further explorations\ - \ to be made with that rich new interface." - address: 'Sydney, Australia' - author: 'Tremblay, Pierre Alexandre and Schwarz, Diemo' - bibtex: "@inproceedings{Tremblay2010,\n abstract = {In this paper, the authors describe\ - \ how they use an electric bass as a subtle, expressive and intuitive interface\ - \ to browse the rich sample bank available to most laptop owners. This is achieved\ - \ by audio mosaicing of the live bass performance audio, through corpus-based\ - \ concatenative synthesis (CBCS) techniques, allowing a mapping of the multi-dimensional\ - \ expressivity of the performance onto foreign audio material, thus recycling\ - \ the virtuosity acquired on the electric instrument with a trivial learning curve.\ - \ This design hypothesis is contextualised and assessed within the Sandbox#n series\ - \ of bass+laptop meta-instruments, and the authors describe technical means of\ - \ the implementation through the use of the open-source CataRT CBCS system adapted\ - \ for live mosaicing. They also discuss their encouraging early results and provide\ - \ a list of further explorations to be made with that rich new interface.},\n\ - \ address = {Sydney, Australia},\n author = {Tremblay, Pierre Alexandre and Schwarz,\ - \ Diemo},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1177913},\n issn = {2220-4806},\n\ - \ keywords = {laptop improvisation, corpus-based concatenative synthesis, haptic\ - \ interface, multi-dimensional mapping, audio mosaic},\n pages = {447--450},\n\ - \ title = {Surfing the Waves : Live Audio Mosaicing of an Electric Bass Performance\ - \ as a Corpus Browsing Interface},\n url = {http://www.nime.org/proceedings/2010/nime2010_447.pdf},\n\ - \ year = {2010}\n}\n" - booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - doi: 10.5281/zenodo.1177913 + doi: 10.5281/zenodo.1178331 issn: 2220-4806 - keywords: 'laptop improvisation, corpus-based concatenative synthesis, haptic interface, - multi-dimensional mapping, audio mosaic' - pages: 447--450 - title: 'Surfing the Waves : Live Audio Mosaicing of an Electric Bass Performance - as a Corpus Browsing Interface' - url: http://www.nime.org/proceedings/2010/nime2010_447.pdf - year: 2010 + keywords: 'Aesthetics, Interaction Design, Artistic Research, Exploration' + publisher: University of Michigan + title: Studying Aesthetics in a Musical Interface Design Process Through `Aesthetic + Experience Prism' + url: http://www.nime.org/proceedings/2012/nime2012_226.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Fyans2010 - abstract: 'Drawing on a model of spectator understanding of error inperformance - in the literature, we document a qualitativeexperiment that explores the relationships - between domainknowledge, mental models, intention and error recognitionby spectators - of performances with electronic instruments.Participants saw two performances - with contrasting instruments, with controls on their mental model and understanding - of intention. Based on data from a subsequent structured interview, we identify - themes in participants'' judgements and understanding of performance and explanationsof - their spectator experience. These reveal both elementsof similarity and difference - between the two performances,instruments and between domain knowledge groups. - Fromthese, we suggest and discuss implications for the design ofnovel performative - interactions with technology.' - address: 'Sydney, Australia' - author: 'Fyans, A. Cavan and Gurevich, Michael and Stapleton, Paul' - bibtex: "@inproceedings{Fyans2010,\n abstract = {Drawing on a model of spectator\ - \ understanding of error inperformance in the literature, we document a qualitativeexperiment\ - \ that explores the relationships between domainknowledge, mental models, intention\ - \ and error recognitionby spectators of performances with electronic instruments.Participants\ - \ saw two performances with contrasting instruments, with controls on their mental\ - \ model and understanding of intention. Based on data from a subsequent structured\ - \ interview, we identify themes in participants' judgements and understanding\ - \ of performance and explanationsof their spectator experience. These reveal both\ - \ elementsof similarity and difference between the two performances,instruments\ - \ and between domain knowledge groups. Fromthese, we suggest and discuss implications\ - \ for the design ofnovel performative interactions with technology.},\n address\ - \ = {Sydney, Australia},\n author = {Fyans, A. Cavan and Gurevich, Michael and\ - \ Stapleton, Paul},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177775},\n\ - \ issn = {2220-4806},\n keywords = {error,intention,mental model,nime10,qualitative,spectator},\n\ - \ pages = {451--454},\n title = {Examining the Spectator Experience},\n url =\ - \ {http://www.nime.org/proceedings/2010/nime2010_451.pdf},\n year = {2010}\n}\n" + ID: Hollinger2012 + abstract: 'A modular and reconfigurable hardware platform for analog optoelectronic + signal acquisition is presented. Its intended application is for fiber optic sensing + in electronic musical interfaces, however the flexible design enables its use + with a wide range of analog and digital sensors. Multiple gain and multiplexing + stages as well as programmable analog and digital hardware blocks allow for the + acquisition, processing, and communication of single-ended and differential signals. + Along with a hub board, multiple acquisition boards can be connected to modularly + extend the system''s capabilities to suit the needs of the application. Fiber + optic sensors and their application in DMIs are briefly discussed, as well as + the use of the hardware platform with specific musical interfaces.' + address: 'Ann Arbor, Michigan' + author: Avrum Hollinger and Marcelo M. Wanderley + bibtex: "@inproceedings{Hollinger2012,\n abstract = {A modular and reconfigurable\ + \ hardware platform for analog optoelectronic signal acquisition is presented.\ + \ Its intended application is for fiber optic sensing in electronic musical interfaces,\ + \ however the flexible design enables its use with a wide range of analog and\ + \ digital sensors. Multiple gain and multiplexing stages as well as programmable\ + \ analog and digital hardware blocks allow for the acquisition, processing, and\ + \ communication of single-ended and differential signals. Along with a hub board,\ + \ multiple acquisition boards can be connected to modularly extend the system's\ + \ capabilities to suit the needs of the application. Fiber optic sensors and their\ + \ application in DMIs are briefly discussed, as well as the use of the hardware\ + \ platform with specific musical interfaces.},\n address = {Ann Arbor, Michigan},\n\ + \ author = {Avrum Hollinger and Marcelo M. Wanderley},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178289},\n issn = {2220-4806},\n keywords = {fiber optic\ + \ sensing, analog signal acquisition, musical interface, MRI-compatible},\n publisher\ + \ = {University of Michigan},\n title = {Optoelectronic Acquisition and Control\ + \ Board for Musical Applications},\n url = {http://www.nime.org/proceedings/2012/nime2012_228.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177775 + doi: 10.5281/zenodo.1178289 issn: 2220-4806 - keywords: 'error,intention,mental model,nime10,qualitative,spectator' - pages: 451--454 - title: Examining the Spectator Experience - url: http://www.nime.org/proceedings/2010/nime2010_451.pdf - year: 2010 + keywords: 'fiber optic sensing, analog signal acquisition, musical interface, MRI-compatible' + publisher: University of Michigan + title: Optoelectronic Acquisition and Control Board for Musical Applications + url: http://www.nime.org/proceedings/2012/nime2012_228.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Collins2010a - abstract: 'Gaining access to a prototype motion capture suit designedby the Animazoo - company, the Interactive Systems groupat the University of Sussex have been investigating - application areas. This paper describes our initial experimentsin mapping the - suit control data to sonic attributes for musical purposes. Given the lab conditions - under which weworked, an agile design cycle methodology was employed,with live - coding of audio software incorporating fast feedback, and more reflective preparations - between sessions, exploiting both individual and pair programming. As the suitprovides - up to 66 channels of information, we confront achallenging mapping problem, and - techniques are describedfor automatic calibration, and the use of echo state networksfor - dimensionality reduction.' - address: 'Sydney, Australia' - author: 'Collins, Nick and Kiefer, Chris and Patoli, Zeeshan and White, Martin' - bibtex: "@inproceedings{Collins2010a,\n abstract = {Gaining access to a prototype\ - \ motion capture suit designedby the Animazoo company, the Interactive Systems\ - \ groupat the University of Sussex have been investigating application areas.\ - \ This paper describes our initial experimentsin mapping the suit control data\ - \ to sonic attributes for musical purposes. Given the lab conditions under which\ - \ weworked, an agile design cycle methodology was employed,with live coding of\ - \ audio software incorporating fast feedback, and more reflective preparations\ - \ between sessions, exploiting both individual and pair programming. As the suitprovides\ - \ up to 66 channels of information, we confront achallenging mapping problem,\ - \ and techniques are describedfor automatic calibration, and the use of echo state\ - \ networksfor dimensionality reduction.},\n address = {Sydney, Australia},\n author\ - \ = {Collins, Nick and Kiefer, Chris and Patoli, Zeeshan and White, Martin},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177749},\n issn = {2220-4806},\n\ - \ keywords = {Motion Capture, Musical Controller, Mapping, Agile Design},\n pages\ - \ = {455--458},\n title = {Musical Exoskeletons : Experiments with a Motion Capture\ - \ Suit},\n url = {http://www.nime.org/proceedings/2010/nime2010_455.pdf},\n year\ - \ = {2010}\n}\n" + ID: Ouzounian2012 + abstract: 'Music for Sleeping & Waking Minds (2011-2012) is a new, overnight work + in which four performers fall asleep while wearing custom designed EEG sensors + which monitor their brainwave activity. The data gathered from the EEG sensors + is applied in real time to different audio and image signal processing functions, + resulting in continuously evolving multi-channel sound environment and visual + projection. This material serves as an audiovisual description of the individual + and collective neurophysiological state of the ensemble. Audiences are invited + to experience the work in different states of attention: while alert and asleep, + resting and awakening.' + address: 'Ann Arbor, Michigan' + author: Gascia Ouzounian and R. Benjamin Knapp and Eric Lyon and Luke DuBois + bibtex: "@inproceedings{Ouzounian2012,\n abstract = {Music for Sleeping & Waking\ + \ Minds (2011-2012) is a new, overnight work in which four performers fall asleep\ + \ while wearing custom designed EEG sensors which monitor their brainwave activity.\ + \ The data gathered from the EEG sensors is applied in real time to different\ + \ audio and image signal processing functions, resulting in continuously evolving\ + \ multi-channel sound environment and visual projection. This material serves\ + \ as an audiovisual description of the individual and collective neurophysiological\ + \ state of the ensemble. Audiences are invited to experience the work in different\ + \ states of attention: while alert and asleep, resting and awakening.},\n address\ + \ = {Ann Arbor, Michigan},\n author = {Gascia Ouzounian and R. Benjamin Knapp\ + \ and Eric Lyon and Luke DuBois},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1180559},\n\ + \ issn = {2220-4806},\n keywords = {EEG, sleep, dream, biosignals, bio art, consciousness,\ + \ BCI},\n publisher = {University of Michigan},\n title = {Music for Sleeping\ + \ \\& Waking Minds},\n url = {http://www.nime.org/proceedings/2012/nime2012_229.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177749 + doi: 10.5281/zenodo.1180559 issn: 2220-4806 - keywords: 'Motion Capture, Musical Controller, Mapping, Agile Design' - pages: 455--458 - title: 'Musical Exoskeletons : Experiments with a Motion Capture Suit' - url: http://www.nime.org/proceedings/2010/nime2010_455.pdf - year: 2010 + keywords: 'EEG, sleep, dream, biosignals, bio art, consciousness, BCI' + publisher: University of Michigan + title: Music for Sleeping & Waking Minds + url: http://www.nime.org/proceedings/2012/nime2012_229.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Murphy2010 - abstract: 'This paper describes a study of membrane potentiometers and long force - sensing resistors as tools to enable greater interaction between performers and - audiences. This is accomplished through the building of a new interface called - the Helio. In preparation for the Helio''s construction, a variety of brands of - membrane potentiometers and long force sensing resistors were analyzed for their - suitability for use in a performance interface. Analog and digital circuit design - considerations are discussed. We discuss in detail the design process and performance - scenarios explored with the Helio. ' - address: 'Sydney, Australia' - author: 'Murphy, Jim and Kapur, Ajay and Burgin, Carl' - bibtex: "@inproceedings{Murphy2010,\n abstract = {This paper describes a study of\ - \ membrane potentiometers and long force sensing resistors as tools to enable\ - \ greater interaction between performers and audiences. This is accomplished through\ - \ the building of a new interface called the Helio. In preparation for the Helio's\ - \ construction, a variety of brands of membrane potentiometers and long force\ - \ sensing resistors were analyzed for their suitability for use in a performance\ - \ interface. Analog and digital circuit design considerations are discussed. We\ - \ discuss in detail the design process and performance scenarios explored with\ - \ the Helio. },\n address = {Sydney, Australia},\n author = {Murphy, Jim and Kapur,\ - \ Ajay and Burgin, Carl},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177863},\n\ - \ issn = {2220-4806},\n keywords = {Force Sensing Resistors, Membrane Potentiometers,\ - \ Force Sensing Resistors, Haptic Feedback, Helio},\n pages = {459--462},\n title\ - \ = {The Helio : A Study of Membrane Potentiometers and Long Force Sensing Resistors\ - \ for Musical Interfaces},\n url = {http://www.nime.org/proceedings/2010/nime2010_459.pdf},\n\ - \ year = {2010}\n}\n" + ID: Schlei2012 + abstract: 'This paper describes the design and realization of TC-11, a software + instrument based on programmable multi-point controllers. TC-11 is a modular synthesizer + for the iPad that uses multi-touch and device motion sensors for control. It has + a robust patch programming interface that centers around multi-point controllers, + providing powerful flexibility. This paper details the origin, design principles, + programming implementation, and performance result of TC-11.' + address: 'Ann Arbor, Michigan' + author: Kevin Schlei + bibtex: "@inproceedings{Schlei2012,\n abstract = {This paper describes the design\ + \ and realization of TC-11, a software instrument based on programmable multi-point\ + \ controllers. TC-11 is a modular synthesizer for the iPad that uses multi-touch\ + \ and device motion sensors for control. It has a robust patch programming interface\ + \ that centers around multi-point controllers, providing powerful flexibility.\ + \ This paper details the origin, design principles, programming implementation,\ + \ and performance result of TC-11.},\n address = {Ann Arbor, Michigan},\n author\ + \ = {Kevin Schlei},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1180589},\n\ + \ issn = {2220-4806},\n keywords = {TC-11, iPad, multi-touch, multi-point, controller\ + \ mapping, synthesis programming},\n publisher = {University of Michigan},\n title\ + \ = {TC-11: A Programmable Multi-Touch Synthesizer for the iPad},\n url = {http://www.nime.org/proceedings/2012/nime2012_230.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177863 + doi: 10.5281/zenodo.1180589 issn: 2220-4806 - keywords: 'Force Sensing Resistors, Membrane Potentiometers, Force Sensing Resistors, - Haptic Feedback, Helio' - pages: 459--462 - title: 'The Helio : A Study of Membrane Potentiometers and Long Force Sensing Resistors - for Musical Interfaces' - url: http://www.nime.org/proceedings/2010/nime2010_459.pdf - year: 2010 + keywords: 'TC-11, iPad, multi-touch, multi-point, controller mapping, synthesis + programming' + publisher: University of Michigan + title: 'TC-11: A Programmable Multi-Touch Synthesizer for the iPad' + url: http://www.nime.org/proceedings/2012/nime2012_230.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Taylor2010a - abstract: 'We present a novel user interface device based around ferromagnetic sensing. - The physical form of the interface can easily be reconfigured by simply adding - and removing a variety of ferromagnetic objects to the device''s sensing surface. - This allows the user to change the physical form of the interface resulting in - a variety of different interaction modes. When used in a musical context, the - performer can leverage the physical reconfiguration of the device to affect the - method of playing and ultimately the sound produced. We describe the implementation - of the sensing system, along with a range of mapping techniques used to transform - the sensor data into musical output, including both the direct synthesis of sound - and also the generation of MIDI data for use with Ableton Live. We conclude with - a discussion of future directions for the device. ' - address: 'Sydney, Australia' - author: 'Taylor, Stuart and Hook, Jonathan' - bibtex: "@inproceedings{Taylor2010a,\n abstract = {We present a novel user interface\ - \ device based around ferromagnetic sensing. The physical form of the interface\ - \ can easily be reconfigured by simply adding and removing a variety of ferromagnetic\ - \ objects to the device's sensing surface. This allows the user to change the\ - \ physical form of the interface resulting in a variety of different interaction\ - \ modes. When used in a musical context, the performer can leverage the physical\ - \ reconfiguration of the device to affect the method of playing and ultimately\ - \ the sound produced. We describe the implementation of the sensing system, along\ - \ with a range of mapping techniques used to transform the sensor data into musical\ - \ output, including both the direct synthesis of sound and also the generation\ - \ of MIDI data for use with Ableton Live. We conclude with a discussion of future\ - \ directions for the device. },\n address = {Sydney, Australia},\n author = {Taylor,\ - \ Stuart and Hook, Jonathan},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177907},\n\ - \ issn = {2220-4806},\n keywords = {Ferromagnetic sensing, ferrofluid, reconfigurable\ - \ user interface, wave terrain synthesis, MIDI controller.},\n pages = {463--466},\n\ - \ title = {FerroSynth : A Ferromagnetic Music Interface},\n url = {http://www.nime.org/proceedings/2010/nime2010_463.pdf},\n\ - \ year = {2010}\n}\n" + ID: Kikukawa2012 + abstract: 'We developed original solenoid actuator units with several built-in sensors, + and produced a box-shaped musical inter-face ``PocoPoco'''' using 16 units of + them as a universal input/output device. We applied up-and-down movement of the + solenoid-units and user''s intuitive input to musical interface. Using transformation + of the physical interface, we can apply movement of the units to new interaction + design. At the same time we intend to suggest a new interface whose movement itself + can attract the user.' + address: 'Ann Arbor, Michigan' + author: Yuya Kikukawa and Takaharu Kanai and Tatsuhiko Suzuki and Toshiki Yoshiike + and Tetsuaki Baba and Kumiko Kushiyama + bibtex: "@inproceedings{Kikukawa2012,\n abstract = {We developed original solenoid\ + \ actuator units with several built-in sensors, and produced a box-shaped musical\ + \ inter-face ``PocoPoco'' using 16 units of them as a universal input/output device.\ + \ We applied up-and-down movement of the solenoid-units and user's intuitive input\ + \ to musical interface. Using transformation of the physical interface, we can\ + \ apply movement of the units to new interaction design. At the same time we intend\ + \ to suggest a new interface whose movement itself can attract the user.},\n address\ + \ = {Ann Arbor, Michigan},\n author = {Yuya Kikukawa and Takaharu Kanai and Tatsuhiko\ + \ Suzuki and Toshiki Yoshiike and Tetsuaki Baba and Kumiko Kushiyama},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1178301},\n issn = {2220-4806},\n keywords\ + \ = {musical interface, interaction design, tactile, moving, kinetic},\n publisher\ + \ = {University of Michigan},\n title = {PocoPoco: A Kinetic Musical Interface\ + \ With Electro-Magnetic Levitation Units},\n url = {http://www.nime.org/proceedings/2012/nime2012_232.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177907 + doi: 10.5281/zenodo.1178301 issn: 2220-4806 - keywords: 'Ferromagnetic sensing, ferrofluid, reconfigurable user interface, wave - terrain synthesis, MIDI controller.' - pages: 463--466 - title: 'FerroSynth : A Ferromagnetic Music Interface' - url: http://www.nime.org/proceedings/2010/nime2010_463.pdf - year: 2010 + keywords: 'musical interface, interaction design, tactile, moving, kinetic' + publisher: University of Michigan + title: 'PocoPoco: A Kinetic Musical Interface With Electro-Magnetic Levitation Units' + url: http://www.nime.org/proceedings/2012/nime2012_232.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Dubrau2010 - abstract: 'P[a]ra[pra]xis is an ongoing collaborative project incorporating a two-piece - software package which explores human relations to language through dynamic sound - and text production. Incorporating an exploration of the potential functions and - limitations of the ‘sign’ and the intrusions of the Unconscious into the linguistic - utterance via parapraxes, or ‘Freudian slips’, our software utilises realtime - subject response to automatically- generated changes in a narrative of their own - writing to create music. This paper considers the relative paucity of truly interactive - realtime text and audio works and provides an account of current and future potential - for the simultaneous production of realtime poetry and electronic music through - the P[a]ra[pra]xis software. It also provides the basis for a demonstration session - in which we hope to show users how the program works, discuss possibilities for - different applications of the software, and collect data for future collaborative - work.' - address: 'Sydney, Australia' - author: 'Dubrau, Josh M. and Havryliv, Mark' - bibtex: "@inproceedings{Dubrau2010,\n abstract = {P[a]ra[pra]xis is an ongoing collaborative\ - \ project incorporating a two-piece software package which explores human relations\ - \ to language through dynamic sound and text production. Incorporating an exploration\ - \ of the potential functions and limitations of the ‘sign’ and the intrusions\ - \ of the Unconscious into the linguistic utterance via parapraxes, or ‘Freudian\ - \ slips’, our software utilises realtime subject response to automatically- generated\ - \ changes in a narrative of their own writing to create music. This paper considers\ - \ the relative paucity of truly interactive realtime text and audio works and\ - \ provides an account of current and future potential for the simultaneous production\ - \ of realtime poetry and electronic music through the P[a]ra[pra]xis software.\ - \ It also provides the basis for a demonstration session in which we hope to show\ - \ users how the program works, discuss possibilities for different applications\ - \ of the software, and collect data for future collaborative work.},\n address\ - \ = {Sydney, Australia},\n author = {Dubrau, Josh M. and Havryliv, Mark},\n booktitle\ + ID: Nort2012 + abstract: 'In this paper we discuss aspects of our work in develop-ing performance + systems that are geared towards human-machine co-performance with a particular + emphasis on improvisation. We present one particular system, FILTER, which was + created in the context of a larger project related to artificial intelligence + and performance, and has been tested in the context of our electro-acoustic performance + trio. We discuss how this timbrally rich and highly non-idiomatic musical context + has challenged the design of the system, with particular emphasis on the mapping + of machine listening parameters to higher-level behaviors of the system in such + a way that spontaneity and creativity are encouraged while maintaining a sense + of novel dialogue.' + address: 'Ann Arbor, Michigan' + author: Doug Van Nort and Jonas Braasch and Pauline Oliveros + bibtex: "@inproceedings{Nort2012,\n abstract = {In this paper we discuss aspects\ + \ of our work in develop-ing performance systems that are geared towards human-machine\ + \ co-performance with a particular emphasis on improvisation. We present one particular\ + \ system, FILTER, which was created in the context of a larger project related\ + \ to artificial intelligence and performance, and has been tested in the context\ + \ of our electro-acoustic performance trio. We discuss how this timbrally rich\ + \ and highly non-idiomatic musical context has challenged the design of the system,\ + \ with particular emphasis on the mapping of machine listening parameters to higher-level\ + \ behaviors of the system in such a way that spontaneity and creativity are encouraged\ + \ while maintaining a sense of novel dialogue.},\n address = {Ann Arbor, Michigan},\n\ + \ author = {Doug Van Nort and Jonas Braasch and Pauline Oliveros},\n booktitle\ \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n doi = {10.5281/zenodo.117777},\n issn = {2220-4806},\n keywords\ - \ = {language sonification, new media poetry, realtime, Lacan, semiotics, collaborative\ - \ environment, psychoanalysis, Freud},\n pages = {467--468},\n title = {P[a]ra[pra]xis\ - \ : Towards Genuine Realtime 'Audiopoetry'},\n url = {http://www.nime.org/proceedings/2010/nime2010_467.pdf},\n\ - \ year = {2010}\n}\n" + \ Expression},\n doi = {10.5281/zenodo.1180551},\n issn = {2220-4806},\n keywords\ + \ = {Electroacoustic Improvisation, Machine Learning, Mapping, Sonic Gestures,\ + \ Spatialization},\n publisher = {University of Michigan},\n title = {Mapping\ + \ to musical actions in the FILTER system},\n url = {http://www.nime.org/proceedings/2012/nime2012_235.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.117777 + doi: 10.5281/zenodo.1180551 issn: 2220-4806 - keywords: 'language sonification, new media poetry, realtime, Lacan, semiotics, - collaborative environment, psychoanalysis, Freud' - pages: 467--468 - title: 'P[a]ra[pra]xis : Towards Genuine Realtime ''Audiopoetry''' - url: http://www.nime.org/proceedings/2010/nime2010_467.pdf - year: 2010 + keywords: 'Electroacoustic Improvisation, Machine Learning, Mapping, Sonic Gestures, + Spatialization' + publisher: University of Michigan + title: Mapping to musical actions in the FILTER system + url: http://www.nime.org/proceedings/2012/nime2012_235.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Kitani2010 - abstract: 'We propose an online generative algorithm to enhance musical expression - via intelligent improvisation accompaniment.Our framework called the ImprovGenerator, - takes a livestream of percussion patterns and generates an improvisedaccompaniment - track in real-time to stimulate new expressions in the improvisation. We use a - mixture model togenerate an accompaniment pattern, that takes into account both - the hierarchical temporal structure of the liveinput patterns and the current - musical context of the performance. The hierarchical structure is represented - as astochastic context-free grammar, which is used to generateaccompaniment patterns - based on the history of temporalpatterns. We use a transition probability model - to augmentthe grammar generated pattern to take into account thecurrent context - of the performance. In our experiments weshow how basic beat patterns performed - by a percussioniston a cajon can be used to automatically generate on-the-flyimprovisation - accompaniment for live performance.' - address: 'Sydney, Australia' - author: 'Kitani, Kris M. and Koike, Hideki' - bibtex: "@inproceedings{Kitani2010,\n abstract = {We propose an online generative\ - \ algorithm to enhance musical expression via intelligent improvisation accompaniment.Our\ - \ framework called the ImprovGenerator, takes a livestream of percussion patterns\ - \ and generates an improvisedaccompaniment track in real-time to stimulate new\ - \ expressions in the improvisation. We use a mixture model togenerate an accompaniment\ - \ pattern, that takes into account both the hierarchical temporal structure of\ - \ the liveinput patterns and the current musical context of the performance. The\ - \ hierarchical structure is represented as astochastic context-free grammar, which\ - \ is used to generateaccompaniment patterns based on the history of temporalpatterns.\ - \ We use a transition probability model to augmentthe grammar generated pattern\ - \ to take into account thecurrent context of the performance. In our experiments\ - \ weshow how basic beat patterns performed by a percussioniston a cajon can be\ - \ used to automatically generate on-the-flyimprovisation accompaniment for live\ - \ performance.},\n address = {Sydney, Australia},\n author = {Kitani, Kris M.\ - \ and Koike, Hideki},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177827},\n\ - \ issn = {2220-4806},\n keywords = {Machine Improvisation, Grammatical Induction,\ - \ Stochastic Context-Free Grammars, Algorithmic Composition},\n pages = {469--472},\n\ - \ title = {ImprovGenerator : Online Grammatical Induction for On-the-Fly Improvisation\ - \ Accompaniment},\n url = {http://www.nime.org/proceedings/2010/nime2010_469.pdf},\n\ - \ year = {2010}\n}\n" + ID: Magnus2012 + abstract: "The purpose of the Musician Assistance and Score Distribution (MASD)\ + \ system is to assist novice musicians with playing in an orchestra, concert band,\ + \ choir or other musical ensemble. MASD helps novice musicians in three ways.\ + \ It removes the confusion that results from page turns, aides a musician's return\ + \ to the proper location in the music score after the looking at the conductor\ + \ and notifies musicians of conductor instructions. MASD is currently verified\ + \ by evaluating the time between sending beats or conductor information and this\ + \ information being rendered for the musician. Future work includes user testing\ + \ of this system.\nThere are three major components to the MASD system. These\ + \ components are Score Distribution, Score Rendering and Information Distribution.\ + \ Score Distribution passes score information to clients and is facilitated by\ + \ the Internet Communication Engine (ICE). Score Rendering uses the GUIDO Library\ + \ to display the musical score. Information Distribution uses ICE and the IceStorm\ + \ service to pass beat and instruction information to musicians." + address: 'Ann Arbor, Michigan' + author: Nathan Magnus and David Gerhard + bibtex: "@inproceedings{Magnus2012,\n abstract = {The purpose of the Musician Assistance\ + \ and Score Distribution (MASD) system is to assist novice musicians with playing\ + \ in an orchestra, concert band, choir or other musical ensemble. MASD helps novice\ + \ musicians in three ways. It removes the confusion that results from page turns,\ + \ aides a musician's return to the proper location in the music score after the\ + \ looking at the conductor and notifies musicians of conductor instructions. MASD\ + \ is currently verified by evaluating the time between sending beats or conductor\ + \ information and this information being rendered for the musician. Future work\ + \ includes user testing of this system.\nThere are three major components to the\ + \ MASD system. These components are Score Distribution, Score Rendering and Information\ + \ Distribution. Score Distribution passes score information to clients and is\ + \ facilitated by the Internet Communication Engine (ICE). Score Rendering uses\ + \ the GUIDO Library to display the musical score. Information Distribution uses\ + \ ICE and the IceStorm service to pass beat and instruction information to musicians.},\n\ + \ address = {Ann Arbor, Michigan},\n author = {Nathan Magnus and David Gerhard},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178337},\n issn = {2220-4806},\n\ + \ keywords = {score distribution, score-following, score rendering, musician assistance},\n\ + \ publisher = {University of Michigan},\n title = {Musician Assistance and Score\ + \ Distribution (MASD)},\n url = {http://www.nime.org/proceedings/2012/nime2012_237.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177827 + doi: 10.5281/zenodo.1178337 issn: 2220-4806 - keywords: 'Machine Improvisation, Grammatical Induction, Stochastic Context-Free - Grammars, Algorithmic Composition' - pages: 469--472 - title: 'ImprovGenerator : Online Grammatical Induction for On-the-Fly Improvisation - Accompaniment' - url: http://www.nime.org/proceedings/2010/nime2010_469.pdf - year: 2010 + keywords: 'score distribution, score-following, score rendering, musician assistance' + publisher: University of Michigan + title: Musician Assistance and Score Distribution (MASD) + url: http://www.nime.org/proceedings/2012/nime2012_237.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Frisson2010 - abstract: 'This paper presents the development of rapid and reusablegestural interface - prototypes for navigation by similarity inan audio database and for sound manipulation, - using theAudioCycle application. For this purpose, we propose andfollow guidelines - for rapid prototyping that we apply usingthe PureData visual programming environment. - We havemainly developed three prototypes of manual control: onecombining a 3D - mouse and a jog wheel, a second featuring a force-feedback 3D mouse, and a third - taking advantage of the multitouch trackpad. We discuss benefits andshortcomings - we experienced while prototyping using thisapproach.' - address: 'Sydney, Australia' - author: 'Frisson, Christian and Macq, Benoît and Dupont, Stéphane and Siebert, Xavier - and Tardieu, Damien and Dutoit, Thierry' - bibtex: "@inproceedings{Frisson2010,\n abstract = {This paper presents the development\ - \ of rapid and reusablegestural interface prototypes for navigation by similarity\ - \ inan audio database and for sound manipulation, using theAudioCycle application.\ - \ For this purpose, we propose andfollow guidelines for rapid prototyping that\ - \ we apply usingthe PureData visual programming environment. We havemainly developed\ - \ three prototypes of manual control: onecombining a 3D mouse and a jog wheel,\ - \ a second featuring a force-feedback 3D mouse, and a third taking advantage of\ - \ the multitouch trackpad. We discuss benefits andshortcomings we experienced\ - \ while prototyping using thisapproach.},\n address = {Sydney, Australia},\n author\ - \ = {Frisson, Christian and Macq, Beno{\\^i}t and Dupont, St\\'{e}phane and Siebert,\ - \ Xavier and Tardieu, Damien and Dutoit, Thierry},\n booktitle = {Proceedings\ + ID: Tanaka2012 + abstract: 'Mobile devices represent a growing research field within NIME, and a + growing area for commercial music software. They present unique design challenges + and opportunities, which are yet to be fully explored and exploited. In this paper, + we propose using a survey method combined with qualitative analysis to investigate + the way in which people use mobiles musically. We subsequently present as an area + of future research our own PDplayer, which provides a completely self contained + end application in the mobile device, potentially making the mobile a more viable + and expressive tool for musicians.' + address: 'Ann Arbor, Michigan' + author: Atau Tanaka and Adam Parkinson and Zack Settel and Koray Tahiroglu + bibtex: "@inproceedings{Tanaka2012,\n abstract = {Mobile devices represent a growing\ + \ research field within NIME, and a growing area for commercial music software.\ + \ They present unique design challenges and opportunities, which are yet to be\ + \ fully explored and exploited. In this paper, we propose using a survey method\ + \ combined with qualitative analysis to investigate the way in which people use\ + \ mobiles musically. We subsequently present as an area of future research our\ + \ own PDplayer, which provides a completely self contained end application in\ + \ the mobile device, potentially making the mobile a more viable and expressive\ + \ tool for musicians.},\n address = {Ann Arbor, Michigan},\n author = {Atau Tanaka\ + \ and Adam Parkinson and Zack Settel and Koray Tahiroglu},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177771},\n issn = {2220-4806},\n keywords = {Human-computer\ - \ interaction, gestural interfaces, rapid prototyping, browsing by similarity,\ - \ audio database},\n pages = {473--476},\n title = {DeviceCycle : Rapid and Reusable\ - \ Prototyping of Gestural Interfaces, Applied to Audio Browsing by Similarity},\n\ - \ url = {http://www.nime.org/proceedings/2010/nime2010_473.pdf},\n year = {2010}\n\ + \ doi = {10.5281/zenodo.1178431},\n issn = {2220-4806},\n keywords = {NIME, Mobile\ + \ Music, Pure Data},\n publisher = {University of Michigan},\n title = {A Survey\ + \ and Thematic Analysis Approach as Input to the Design of Mobile Music GUIs},\n\ + \ url = {http://www.nime.org/proceedings/2012/nime2012_240.pdf},\n year = {2012}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177771 + doi: 10.5281/zenodo.1178431 issn: 2220-4806 - keywords: 'Human-computer interaction, gestural interfaces, rapid prototyping, browsing - by similarity, audio database' - pages: 473--476 - title: 'DeviceCycle : Rapid and Reusable Prototyping of Gestural Interfaces, Applied - to Audio Browsing by Similarity' - url: http://www.nime.org/proceedings/2010/nime2010_473.pdf - year: 2010 + keywords: 'NIME, Mobile Music, Pure Data' + publisher: University of Michigan + title: A Survey and Thematic Analysis Approach as Input to the Design of Mobile + Music GUIs + url: http://www.nime.org/proceedings/2012/nime2012_240.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Muller2010 - abstract: 'In this paper we present a novel system for tactile actuation in stylus-based - musical interactions. The proposed controller aims to support rhythmical musical - performance. The system builds on resistive force feedback, which is achieved - through a brakeaugmented ball pen stylus on a sticky touch-sensitive surface. - Along the device itself, we present musical interaction principles that are enabled - through the aforementioned tactile response. Further variations of the device - and perspectives of the friction-based feedback are outlined. ' - address: 'Sydney, Australia' - author: 'Müller, Alexander and Hemmert, Fabian and Wintergerst, Götz and Jagodzinski, - Ron' - bibtex: "@inproceedings{Muller2010,\n abstract = {In this paper we present a novel\ - \ system for tactile actuation in stylus-based musical interactions. The proposed\ - \ controller aims to support rhythmical musical performance. The system builds\ - \ on resistive force feedback, which is achieved through a brakeaugmented ball\ - \ pen stylus on a sticky touch-sensitive surface. Along the device itself, we\ - \ present musical interaction principles that are enabled through the aforementioned\ - \ tactile response. Further variations of the device and perspectives of the friction-based\ - \ feedback are outlined. },\n address = {Sydney, Australia},\n author = {M\\\"\ - {u}ller, Alexander and Hemmert, Fabian and Wintergerst, G\\\"{o}tz and Jagodzinski,\ - \ Ron},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1177835},\n issn = {2220-4806},\n\ - \ keywords = {force feedback, haptic feedback, interactive, pen controller},\n\ - \ pages = {477--478},\n title = {Reflective Haptics : Resistive Force Feedback\ - \ for Musical Performances with Stylus-Controlled Instruments},\n url = {http://www.nime.org/proceedings/2010/nime2010_477.pdf},\n\ - \ year = {2010}\n}\n" + ID: Derbinsky2012 + abstract: 'This paper presents a system for mobile percussive collaboration. We + show that reinforcement learning can incrementally learn percussive beat patterns + played by humans and supports realtime collaborative performance in the absence + of one or more performers. This work leverages an existing integration between + urMus and Soar and addresses multiple challenges involved in the deployment of + machine-learning algorithms for mobile music expression, including tradeoffs between + learning speed & quality; interface design for human collaborators; and real-time + performance and improvisation.' + address: 'Ann Arbor, Michigan' + author: Nate Derbinsky and Georg Essl + bibtex: "@inproceedings{Derbinsky2012,\n abstract = {This paper presents a system\ + \ for mobile percussive collaboration. We show that reinforcement learning can\ + \ incrementally learn percussive beat patterns played by humans and supports realtime\ + \ collaborative performance in the absence of one or more performers. This work\ + \ leverages an existing integration between urMus and Soar and addresses multiple\ + \ challenges involved in the deployment of machine-learning algorithms for mobile\ + \ music expression, including tradeoffs between learning speed & quality; interface\ + \ design for human collaborators; and real-time performance and improvisation.},\n\ + \ address = {Ann Arbor, Michigan},\n author = {Nate Derbinsky and Georg Essl},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178243},\n issn = {2220-4806},\n\ + \ keywords = {Mobile music, machine learning, cognitive architecture},\n publisher\ + \ = {University of Michigan},\n title = {Exploring Reinforcement Learning for\ + \ Mobile Percussive Collaboration},\n url = {http://www.nime.org/proceedings/2012/nime2012_241.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177835 + doi: 10.5281/zenodo.1178243 issn: 2220-4806 - keywords: 'force feedback, haptic feedback, interactive, pen controller' - pages: 477--478 - title: 'Reflective Haptics : Resistive Force Feedback for Musical Performances with - Stylus-Controlled Instruments' - url: http://www.nime.org/proceedings/2010/nime2010_477.pdf - year: 2010 + keywords: 'Mobile music, machine learning, cognitive architecture' + publisher: University of Michigan + title: Exploring Reinforcement Learning for Mobile Percussive Collaboration + url: http://www.nime.org/proceedings/2012/nime2012_241.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Mattek2010 - abstract: 'The American experimental tradition in music emphasizes a process-oriented - – rather than goal-oriented – composition style. According to this tradition, - the composition process is considered an experiment beginning with a problem resolved - by the composer. The noted experimental composer John Cage believed that the artist’s - role in composition should be one of coexistence, as opposed to the traditional - view of directly controlling the process. Consequently, Cage devel- oped methods - of composing that upheld this philosophy by utilizing musical charts and the I - Ching, also known as the Chinese Book of Changes. This project investigates these - methods and models them via an interactive computer system to explore the use - of modern interfaces in experimental composition.' - address: 'Sydney, Australia' - author: 'Mattek, Alison and Freeman, Mark and Humphrey, Eric' - bibtex: "@inproceedings{Mattek2010,\n abstract = {The American experimental tradition\ - \ in music emphasizes a process-oriented – rather than goal-oriented – composition\ - \ style. According to this tradition, the composition process is considered an\ - \ experiment beginning with a problem resolved by the composer. The noted experimental\ - \ composer John Cage believed that the artist’s role in composition should be\ - \ one of coexistence, as opposed to the traditional view of directly controlling\ - \ the process. Consequently, Cage devel- oped methods of composing that upheld\ - \ this philosophy by utilizing musical charts and the I Ching, also known as the\ - \ Chinese Book of Changes. This project investigates these methods and models\ - \ them via an interactive computer system to explore the use of modern interfaces\ - \ in experimental composition.},\n address = {Sydney, Australia},\n author = {Mattek,\ - \ Alison and Freeman, Mark and Humphrey, Eric},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n doi\ - \ = {10.5281/zenodo.1177847},\n issn = {2220-4806},\n keywords = {Multi-touch\ - \ Interfaces, Computer-Assisted Composition},\n pages = {479--480},\n title =\ - \ {Revisiting Cagean Composition Methodology with a Modern Computational Implementation},\n\ - \ url = {http://www.nime.org/proceedings/2010/nime2010_479.pdf},\n year = {2010}\n\ - }\n" + ID: Tache2012 + abstract: 'Force-feedback and physical modeling technologies now allow to achieve + the same kind of relation with virtual instruments as with acoustic instruments, + but the design of such elaborate models needs guidelines based on the study of + the human sensory-motor system and behaviour. This article presents a qualitative + study of a simulated instrumental interaction in the case of the virtual bowed + string, using both waveguide and mass-interaction models. Subjects were invited + to explore the possibilities of the simulations and to express themselves verbally + at the same time, allowing us to identify key qualities of the proposed systems + that determine the construction of an intimate and rich relationship with the + users.' + address: 'Ann Arbor, Michigan' + author: Olivier Tache and Stephen Sinclair and Jean-Loup Florens and Marcelo Wanderley + bibtex: "@inproceedings{Tache2012,\n abstract = {Force-feedback and physical modeling\ + \ technologies now allow to achieve the same kind of relation with virtual instruments\ + \ as with acoustic instruments, but the design of such elaborate models needs\ + \ guidelines based on the study of the human sensory-motor system and behaviour.\ + \ This article presents a qualitative study of a simulated instrumental interaction\ + \ in the case of the virtual bowed string, using both waveguide and mass-interaction\ + \ models. Subjects were invited to explore the possibilities of the simulations\ + \ and to express themselves verbally at the same time, allowing us to identify\ + \ key qualities of the proposed systems that determine the construction of an\ + \ intimate and rich relationship with the users.},\n address = {Ann Arbor, Michigan},\n\ + \ author = {Olivier Tache and Stephen Sinclair and Jean-Loup Florens and Marcelo\ + \ Wanderley},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178429},\n issn\ + \ = {2220-4806},\n keywords = {Instrumental interaction, presence, force-feedback,\ + \ physical modeling, simulation, haptics, bowed string.},\n publisher = {University\ + \ of Michigan},\n title = {Exploring audio and tactile qualities of instrumentality\ + \ with bowed string simulations},\n url = {http://www.nime.org/proceedings/2012/nime2012_243.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177847 + doi: 10.5281/zenodo.1178429 issn: 2220-4806 - keywords: 'Multi-touch Interfaces, Computer-Assisted Composition' - pages: 479--480 - title: Revisiting Cagean Composition Methodology with a Modern Computational Implementation - url: http://www.nime.org/proceedings/2010/nime2010_479.pdf - year: 2010 + keywords: 'Instrumental interaction, presence, force-feedback, physical modeling, + simulation, haptics, bowed string.' + publisher: University of Michigan + title: Exploring audio and tactile qualities of instrumentality with bowed string + simulations + url: http://www.nime.org/proceedings/2012/nime2012_243.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Ferguson2010 - abstract: 'In this paper, we describe a comparison between parameters drawn from - 3-dimensional measurement of a dance performance, and continuous emotional response - data recorded from an audience present during this performance. A continuous time - series representing the mean movement as the dance unfolds is extracted from the - 3-dimensional data. The audiences'' continuous emotional response data are also - represented as a time series, and the series are compared. We concluded that movement - in the dance performance directly influences the emotional arousal response of - the audience. ' - address: 'Sydney, Australia' - author: 'Ferguson, Sam and Schubert, Emery and Stevens, Catherine' - bibtex: "@inproceedings{Ferguson2010,\n abstract = {In this paper, we describe a\ - \ comparison between parameters drawn from 3-dimensional measurement of a dance\ - \ performance, and continuous emotional response data recorded from an audience\ - \ present during this performance. A continuous time series representing the mean\ - \ movement as the dance unfolds is extracted from the 3-dimensional data. The\ - \ audiences' continuous emotional response data are also represented as a time\ - \ series, and the series are compared. We concluded that movement in the dance\ - \ performance directly influences the emotional arousal response of the audience.\ - \ },\n address = {Sydney, Australia},\n author = {Ferguson, Sam and Schubert,\ - \ Emery and Stevens, Catherine},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177767},\n\ - \ issn = {2220-4806},\n keywords = {Dance, Emotion, Motion Capture, Continuous\ - \ Response.},\n pages = {481--484},\n title = {Movement in a Contemporary Dance\ - \ Work and its Relation to Continuous Emotional Response},\n url = {http://www.nime.org/proceedings/2010/nime2010_481.pdf},\n\ - \ year = {2010}\n}\n" + ID: Leeuw2012a + abstract: "This position paper likes to stress the role and importance of performance\ + \ based education in NIME like subjects. It describes the `klankontwerp' learning\ + \ line at the `school of the arts Utrecht' in its department Music Technology.\n\ + Our educational system also reflects the way that we could treat performance in\ + \ the NIME community as a whole. The importance of performing with our instruments\ + \ other then in the form of a mere demonstration should get more emphasis." + address: 'Ann Arbor, Michigan' + author: Hans Leeuw and Jorrit Tamminga + bibtex: "@inproceedings{Leeuw2012a,\n abstract = {This position paper likes to stress\ + \ the role and importance of performance based education in NIME like subjects.\ + \ It describes the `klankontwerp' learning line at the `school of the arts Utrecht'\ + \ in its department Music Technology.\nOur educational system also reflects the\ + \ way that we could treat performance in the NIME community as a whole. The importance\ + \ of performing with our instruments other then in the form of a mere demonstration\ + \ should get more emphasis.},\n address = {Ann Arbor, Michigan},\n author = {Hans\ + \ Leeuw and Jorrit Tamminga},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178321},\n\ + \ issn = {2220-4806},\n keywords = {NIME, education, position paper, live electronics,\ + \ performance},\n publisher = {University of Michigan},\n title = {{NIME} Education\ + \ at the {HKU}, Emphasizing performance},\n url = {http://www.nime.org/proceedings/2012/nime2012_247.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177767 + doi: 10.5281/zenodo.1178321 issn: 2220-4806 - keywords: 'Dance, Emotion, Motion Capture, Continuous Response.' - pages: 481--484 - title: Movement in a Contemporary Dance Work and its Relation to Continuous Emotional - Response - url: http://www.nime.org/proceedings/2010/nime2010_481.pdf - year: 2010 + keywords: 'NIME, education, position paper, live electronics, performance' + publisher: University of Michigan + title: 'NIME Education at the HKU, Emphasizing performance' + url: http://www.nime.org/proceedings/2012/nime2012_247.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Ahmaniemi2010 - abstract: 'This paper investigates whether a dynamic vibrotactile feedback improves - the playability of a gesture controlled virtual instrument. The instrument described - in this study is based on a virtual control surface that player strikes with a - hand held sensor-actuator device. We designed two tactile cues to augment the - stroke across the control surface: a static and dynamic cue. The static cue was - a simple burst of vibration triggered when crossing the control surface. The dynamic - cue was continuous vibration increasing in amplitude when approaching the surface. - We arranged an experiment to study the influence of the tactile cues in performance. - In a tempo follow task, the dynamic cue yielded significantly the best temporal - and periodic accuracy and control of movement velocity and amplitude. The static - cue did not significantly improve the rhythmic accuracy but assisted the control - of movement velocity compared to the condition without tactile feedback at all. - The findings of the study indicate that careful design of dynamic vibrotactile - feedback can improve the controllability of gesture based virtual instrument. ' - address: 'Sydney, Australia' - author: 'Ahmaniemi, Teemu' - bibtex: "@inproceedings{Ahmaniemi2010,\n abstract = {This paper investigates whether\ - \ a dynamic vibrotactile feedback improves the playability of a gesture controlled\ - \ virtual instrument. The instrument described in this study is based on a virtual\ - \ control surface that player strikes with a hand held sensor-actuator device.\ - \ We designed two tactile cues to augment the stroke across the control surface:\ - \ a static and dynamic cue. The static cue was a simple burst of vibration triggered\ - \ when crossing the control surface. The dynamic cue was continuous vibration\ - \ increasing in amplitude when approaching the surface. We arranged an experiment\ - \ to study the influence of the tactile cues in performance. In a tempo follow\ - \ task, the dynamic cue yielded significantly the best temporal and periodic accuracy\ - \ and control of movement velocity and amplitude. The static cue did not significantly\ - \ improve the rhythmic accuracy but assisted the control of movement velocity\ - \ compared to the condition without tactile feedback at all. The findings of the\ - \ study indicate that careful design of dynamic vibrotactile feedback can improve\ - \ the controllability of gesture based virtual instrument. },\n address = {Sydney,\ - \ Australia},\n author = {Ahmaniemi, Teemu},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n doi =\ - \ {10.5281/zenodo.1177711},\n issn = {2220-4806},\n keywords = {Virtual instrument,\ - \ Gesture, Tactile feedback, Motor control},\n pages = {485--488},\n title = {Gesture\ - \ Controlled Virtual Instrument with Dynamic Vibrotactile Feedback},\n url = {http://www.nime.org/proceedings/2010/nime2010_485.pdf},\n\ - \ year = {2010}\n}\n" + ID: Gillian2012 + abstract: 'This paper presents Digito, a gesturally controlled virtual musical instrument. + Digito is controlled through a number of intricate hand gestures, providing both + discrete and continuous control of Digito''s sound engine; with the fine-grain + hand gestures captured by a 3D depth sensor and recognized using computer vision + and machine learning algorithms. We describe the design and initial iterative + development of Digito, the hand and finger tracking algorithms and gesture recognition + algorithms that drive the system, and report the insights gained during the initial + development cycles and user testing of this gesturally controlled virtual musical + instrument.' + address: 'Ann Arbor, Michigan' + author: Nicholas Gillian and Joseph A. Paradiso + bibtex: "@inproceedings{Gillian2012,\n abstract = {This paper presents Digito, a\ + \ gesturally controlled virtual musical instrument. Digito is controlled through\ + \ a number of intricate hand gestures, providing both discrete and continuous\ + \ control of Digito's sound engine; with the fine-grain hand gestures captured\ + \ by a 3D depth sensor and recognized using computer vision and machine learning\ + \ algorithms. We describe the design and initial iterative development of Digito,\ + \ the hand and finger tracking algorithms and gesture recognition algorithms that\ + \ drive the system, and report the insights gained during the initial development\ + \ cycles and user testing of this gesturally controlled virtual musical instrument.},\n\ + \ address = {Ann Arbor, Michigan},\n author = {Nicholas Gillian and Joseph A.\ + \ Paradiso},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178263},\n issn\ + \ = {2220-4806},\n keywords = {Gesture Recognition, Virtual Musical Instrument},\n\ + \ publisher = {University of Michigan},\n title = {Digito: A Fine-Grain Gesturally\ + \ Controlled Virtual Musical Instrument},\n url = {http://www.nime.org/proceedings/2012/nime2012_248.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177711 + doi: 10.5281/zenodo.1178263 issn: 2220-4806 - keywords: 'Virtual instrument, Gesture, Tactile feedback, Motor control' - pages: 485--488 - title: Gesture Controlled Virtual Instrument with Dynamic Vibrotactile Feedback - url: http://www.nime.org/proceedings/2010/nime2010_485.pdf - year: 2010 + keywords: 'Gesture Recognition, Virtual Musical Instrument' + publisher: University of Michigan + title: 'Digito: A Fine-Grain Gesturally Controlled Virtual Musical Instrument' + url: http://www.nime.org/proceedings/2012/nime2012_248.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Hass2010 - abstract: 'In his demonstration, the author discusses the sequential progress of - his technical and aesthetic decisions as composer and videographer for four large-scale - works for dance through annotated video examples of live performances and PowerPoint - slides. In addition, he discusses his current real-time dance work with wireless - sensor interfaces using sewable LilyPad Arduino modules and Xbee radio hardware.' - address: 'Sydney, Australia' - author: 'Hass, Jeffrey' - bibtex: "@inproceedings{Hass2010,\n abstract = {In his demonstration, the author\ - \ discusses the sequential progress of his technical and aesthetic decisions as\ - \ composer and videographer for four large-scale works for dance through annotated\ - \ video examples of live performances and PowerPoint slides. In addition, he discusses\ - \ his current real-time dance work with wireless sensor interfaces using sewable\ - \ LilyPad Arduino modules and Xbee radio hardware.},\n address = {Sydney, Australia},\n\ - \ author = {Hass, Jeffrey},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1177793},\n\ - \ issn = {2220-4806},\n keywords = {dance, video processing, video tracking, LilyPad\ - \ Arduino.},\n pages = {489--492},\n title = {Creating Integrated Music and Video\ - \ for Dance : Lessons Learned and Lessons Ignored},\n url = {http://www.nime.org/proceedings/2010/nime2010_489.pdf},\n\ - \ year = {2010}\n}\n" + ID: Lehrman2012 + abstract: "George Antheil's notorious Ballet mécanique (1924-1925) was originally\ + \ scored for percussion ensemble, sound effects, and 16 pianolas. He was never\ + \ able to perform the piece with those forces, however, due to his inability to\ + \ synchronize multiple pianolas. Thus all performances of the piece in his lifetime,\ + \ and for decades after, were done with a single pianola or player piano.*\nThe\ + \ author traces the origin of the concept of synchronizing multiple pianolas,\ + \ and explains the attendant technological issues. He examines attempts to synchronize\ + \ mechanical pianos and other time-based devices at the time of Ballet mécanique's\ + \ composition, and suggests that Antheil's vision for his piece was not as farfetched\ + \ as has long been thought." + address: 'Ann Arbor, Michigan' + author: Paul Lehrman + bibtex: "@inproceedings{Lehrman2012,\n abstract = {George Antheil's notorious Ballet\ + \ m{\\'e}canique (1924-1925) was originally scored for percussion ensemble, sound\ + \ effects, and 16 pianolas. He was never able to perform the piece with those\ + \ forces, however, due to his inability to synchronize multiple pianolas. Thus\ + \ all performances of the piece in his lifetime, and for decades after, were done\ + \ with a single pianola or player piano.*\nThe author traces the origin of the\ + \ concept of synchronizing multiple pianolas, and explains the attendant technological\ + \ issues. He examines attempts to synchronize mechanical pianos and other time-based\ + \ devices at the time of Ballet m{\\'e}canique's composition, and suggests that\ + \ Antheil's vision for his piece was not as farfetched as has long been thought.},\n\ + \ address = {Ann Arbor, Michigan},\n author = {Paul Lehrman},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178323},\n issn = {2220-4806},\n keywords = {Antheil,\ + \ Stravinsky, player piano, pianola, mechanical instruments, synchronization},\n\ + \ publisher = {University of Michigan},\n title = {Multiple Pianolas in Antheil's\ + \ Ballet m{\\'e}canique},\n url = {http://www.nime.org/proceedings/2012/nime2012_25.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177793 + doi: 10.5281/zenodo.1178323 issn: 2220-4806 - keywords: 'dance, video processing, video tracking, LilyPad Arduino.' - pages: 489--492 - title: 'Creating Integrated Music and Video for Dance : Lessons Learned and Lessons - Ignored' - url: http://www.nime.org/proceedings/2010/nime2010_489.pdf - year: 2010 + keywords: 'Antheil, Stravinsky, player piano, pianola, mechanical instruments, synchronization' + publisher: University of Michigan + title: Multiple Pianolas in Antheil's Ballet mécanique + url: http://www.nime.org/proceedings/2012/nime2012_25.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Burt2010 - abstract: 'This paper describes a series of mathematical functions implemented by - the author in the commercial algorithmic software language ArtWonk, written by - John Dunn, which are offered with that language as resources for composers. It - gives a history of the development of the functions, with an emphasis on how I - developed them for use in my compositions.' - address: 'Sydney, Australia' - author: 'Burt, Warren' - bibtex: "@inproceedings{Burt2010,\n abstract = {This paper describes a series of\ - \ mathematical functions implemented by the author in the commercial algorithmic\ - \ software language ArtWonk, written by John Dunn, which are offered with that\ - \ language as resources for composers. It gives a history of the development of\ - \ the functions, with an emphasis on how I developed them for use in my compositions.},\n\ - \ address = {Sydney, Australia},\n author = {Burt, Warren},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ doi = {10.5281/zenodo.1177733},\n issn = {2220-4806},\n keywords = {Algorithmic\ - \ composition, mathematical composition, probability distributions, fractals,\ - \ additive sequences},\n pages = {493--496},\n title = {Packages for ArtWonk :\ - \ New Mathematical Tools for Composers},\n url = {http://www.nime.org/proceedings/2010/nime2010_493.pdf},\n\ - \ year = {2010}\n}\n" + ID: Fyans2012 + abstract: 'A study is presented examining the participatory design of digital musical + interactions. The study takes into consideration the entire ecology of digital + musical interactions including the designer, performer and spectator. A new instrument + is developed through iterative participatory design involving a group of performers. + Across the study the evolution of creative practice and skill development in an + emerging community of practice is examined and a spectator study addresses the + cognition of performance and the perception of skill with the instrument. Observations + are presented regarding the cognition of a novel interaction and evolving notions + of skill. The design process of digital musical interactions is reflected on focusing + on involvement of the spectator in design contexts.' + address: 'Ann Arbor, Michigan' + author: A. Cavan Fyans and Adnan Marquez-Borbon and Paul Stapleton and Michael Gurevich + bibtex: "@inproceedings{Fyans2012,\n abstract = {A study is presented examining\ + \ the participatory design of digital musical interactions. The study takes into\ + \ consideration the entire ecology of digital musical interactions including the\ + \ designer, performer and spectator. A new instrument is developed through iterative\ + \ participatory design involving a group of performers. Across the study the evolution\ + \ of creative practice and skill development in an emerging community of practice\ + \ is examined and a spectator study addresses the cognition of performance and\ + \ the perception of skill with the instrument. Observations are presented regarding\ + \ the cognition of a novel interaction and evolving notions of skill. The design\ + \ process of digital musical interactions is reflected on focusing on involvement\ + \ of the spectator in design contexts.},\n address = {Ann Arbor, Michigan},\n\ + \ author = {A. Cavan Fyans and Adnan Marquez-Borbon and Paul Stapleton and Michael\ + \ Gurevich},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178257},\n issn\ + \ = {2220-4806},\n keywords = {participatory design, DMIs, skill, cognition, spectator},\n\ + \ publisher = {University of Michigan},\n title = {Ecological considerations for\ + \ participatory design of DMIs},\n url = {http://www.nime.org/proceedings/2012/nime2012_253.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177733 + doi: 10.5281/zenodo.1178257 issn: 2220-4806 - keywords: 'Algorithmic composition, mathematical composition, probability distributions, - fractals, additive sequences' - pages: 493--496 - title: 'Packages for ArtWonk : New Mathematical Tools for Composers' - url: http://www.nime.org/proceedings/2010/nime2010_493.pdf - year: 2010 + keywords: 'participatory design, DMIs, skill, cognition, spectator' + publisher: University of Michigan + title: Ecological considerations for participatory design of DMIs + url: http://www.nime.org/proceedings/2012/nime2012_253.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Miller2010 - abstract: 'The console gaming industry is experiencing a revolution in terms of - user control, and a large part to Nintendo''s introduction of the Wii remote. - The online open source development community has embraced the Wii remote, integrating - the inexpensive technology into numerous applications. Some of the more interesting - applications demonstrate how the remote hardware can be leveraged for nonstandard - uses. In this paper we describe a new way of interacting with the Wii remote and - sensor bar to produce music. The Wiiolin is a virtual instrument which can mimic - a violin or cello. Sensor bar motion relative to the Wii remote and button presses - are analyzed in real-time to generate notes. Our design is novel in that it involves - the remote''s infrared camera and sensor bar as an integral part of music production, - allowing users to change notes by simply altering the angle of their wrist, and - henceforth, bow. The Wiiolin introduces a more realistic way of instrument interaction - than other attempts that rely on button presses and accelerometer data alone. ' - address: 'Sydney, Australia' - author: 'Miller, Jace and Hammond, Tracy' - bibtex: "@inproceedings{Miller2010,\n abstract = {The console gaming industry is\ - \ experiencing a revolution in terms of user control, and a large part to Nintendo's\ - \ introduction of the Wii remote. The online open source development community\ - \ has embraced the Wii remote, integrating the inexpensive technology into numerous\ - \ applications. Some of the more interesting applications demonstrate how the\ - \ remote hardware can be leveraged for nonstandard uses. In this paper we describe\ - \ a new way of interacting with the Wii remote and sensor bar to produce music.\ - \ The Wiiolin is a virtual instrument which can mimic a violin or cello. Sensor\ - \ bar motion relative to the Wii remote and button presses are analyzed in real-time\ - \ to generate notes. Our design is novel in that it involves the remote's infrared\ - \ camera and sensor bar as an integral part of music production, allowing users\ - \ to change notes by simply altering the angle of their wrist, and henceforth,\ - \ bow. The Wiiolin introduces a more realistic way of instrument interaction than\ - \ other attempts that rely on button presses and accelerometer data alone. },\n\ - \ address = {Sydney, Australia},\n author = {Miller, Jace and Hammond, Tracy},\n\ + ID: Jaimovich2012 + abstract: 'In order to further understand our emotional reaction to music, a museum-based + installation was designed to collect physiological and self-report data from people + listening to music. This demo will describe the technical implementation of this + installation as a tool for collecting large samples of data in public spaces. + The Emotion in Motion terminal is built upon a standard desktop computer running + Max/MSP and using sensors that measure physiological indicators of emotion that + are connected to an Arduino. The terminal has been installed in museums and galleries + in Europe and the USA, helping create the largest database of physiology and self-report + data while listening to music.' + address: 'Ann Arbor, Michigan' + author: Javier Jaimovich and Miguel Ortiz and Niall Coghlan and R. Benjamin Knapp + bibtex: "@inproceedings{Jaimovich2012,\n abstract = {In order to further understand\ + \ our emotional reaction to music, a museum-based installation was designed to\ + \ collect physiological and self-report data from people listening to music. This\ + \ demo will describe the technical implementation of this installation as a tool\ + \ for collecting large samples of data in public spaces. The Emotion in Motion\ + \ terminal is built upon a standard desktop computer running Max/MSP and using\ + \ sensors that measure physiological indicators of emotion that are connected\ + \ to an Arduino. The terminal has been installed in museums and galleries in Europe\ + \ and the USA, helping create the largest database of physiology and self-report\ + \ data while listening to music.},\n address = {Ann Arbor, Michigan},\n author\ + \ = {Javier Jaimovich and Miguel Ortiz and Niall Coghlan and R. Benjamin Knapp},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n doi = {10.5281/zenodo.1177853},\n issn = {2220-4806},\n\ - \ keywords = {Wii remote, virtual instrument, violin, cello, motion recognition,\ - \ human computer interaction, gesture recognition.},\n pages = {497--500},\n title\ - \ = {Wiiolin : a Virtual Instrument Using the Wii Remote},\n url = {http://www.nime.org/proceedings/2010/nime2010_497.pdf},\n\ - \ year = {2010}\n}\n" + \ Musical Expression},\n doi = {10.5281/zenodo.1178295},\n issn = {2220-4806},\n\ + \ keywords = {Biosignals, EDA, SC, GSR, HR, POX, Self-Report, Database, Physiological\ + \ Signals, Max/MSP, FTM, SAM, GEMS},\n publisher = {University of Michigan},\n\ + \ title = {The Emotion in Motion Experiment: Using an Interactive Installation\ + \ as a Means for Understanding Emotional Response to Music},\n url = {http://www.nime.org/proceedings/2012/nime2012_254.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177853 + doi: 10.5281/zenodo.1178295 issn: 2220-4806 - keywords: 'Wii remote, virtual instrument, violin, cello, motion recognition, human - computer interaction, gesture recognition.' - pages: 497--500 - title: 'Wiiolin : a Virtual Instrument Using the Wii Remote' - url: http://www.nime.org/proceedings/2010/nime2010_497.pdf - year: 2010 + keywords: 'Biosignals, EDA, SC, GSR, HR, POX, Self-Report, Database, Physiological + Signals, Max/MSP, FTM, SAM, GEMS' + publisher: University of Michigan + title: 'The Emotion in Motion Experiment: Using an Interactive Installation as a + Means for Understanding Emotional Response to Music' + url: http://www.nime.org/proceedings/2012/nime2012_254.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Meier2010 - abstract: '‘The Planets’ combines a novel approach for algorithmic composition with - new human-computer interaction paradigms and realistic painting techniques. The - main inspiration for it was the composition ‘The Planets’ from Gustav Holst who - portrayed each planet in our solar system with music. Our application allows to - interactively compose music in real-time by arranging planet constellations on - an interactive table. The music generation is controlled by painted miniatures - of the planets and the sun which are detected by the table and supplemented with - an additional graphical visualization, creating a unique audio-visual experience. - A video of the application can be found in [1].' - address: 'Sydney, Australia' - author: 'Meier, Max and Schranner, Max' - bibtex: "@inproceedings{Meier2010,\n abstract = {‘The Planets’ combines a novel\ - \ approach for algorithmic composition with new human-computer interaction paradigms\ - \ and realistic painting techniques. The main inspiration for it was the composition\ - \ ‘The Planets’ from Gustav Holst who portrayed each planet in our solar system\ - \ with music. Our application allows to interactively compose music in real-time\ - \ by arranging planet constellations on an interactive table. The music generation\ - \ is controlled by painted miniatures of the planets and the sun which are detected\ - \ by the table and supplemented with an additional graphical visualization, creating\ - \ a unique audio-visual experience. A video of the application can be found in\ - \ [1].},\n address = {Sydney, Australia},\n author = {Meier, Max and Schranner,\ - \ Max},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n doi = {10.5281/zenodo.1177851},\n issn = {2220-4806},\n\ - \ keywords = {algorithmic composition, soft constraints, tangible interaction},\n\ - \ pages = {501--504},\n title = {The Planets},\n url = {http://www.nime.org/proceedings/2010/nime2010_501.pdf},\n\ - \ year = {2010}\n}\n" + ID: Grosshauser2012 + abstract: 'From a technical point of view, instrumental music mak-ing involves audible, + visible and hidden playing parameters. Hidden parameters like force, pressure + and fast movements, happening within milliseconds are particularly difficult to + capture. Here, we present data focusing on movement coordination parameters of + the left hand fingers with the bow hand in violinists and between two violinists + in group playing. Data was recorded with different position sensors, a micro camcorder + fixed on a violin and an acceleration sensor placed on the bow. Sensor measurements + were obtained at a high sampling rate, gathering the data with a small mi-crocontroller + unit, connected with a laptop computer. To capture bow''s position, rotation and + angle directly on the bow to string contact point, the micro camcorder was fixed + near the bridge. Main focuses of interest were the changes of the left hand finger, + the temporal synchronization between left hand fingers with the right hand, the + close up view to the bow to string contact point and the contact of the left hand + finger and/or string to the fingerboard. Seven violinists, from beginners to master + class students played scales in different rhythms, speeds and bowings and music + excerpts of free choice while being recorded. One measure-ment with 2 violinists + was made to see the time differences between two musicians while playing together. + For simple integration of a conventional violin into electronic music environments, + left hand sensor data were exemplary converted to MIDI and OSC.' + address: 'Ann Arbor, Michigan' + author: Tobias Grosshauser and Victor Candia and Horst Hildebrand and Gerhard Tröster + bibtex: "@inproceedings{Grosshauser2012,\n abstract = {From a technical point of\ + \ view, instrumental music mak-ing involves audible, visible and hidden playing\ + \ parameters. Hidden parameters like force, pressure and fast movements, happening\ + \ within milliseconds are particularly difficult to capture. Here, we present\ + \ data focusing on movement coordination parameters of the left hand fingers with\ + \ the bow hand in violinists and between two violinists in group playing. Data\ + \ was recorded with different position sensors, a micro camcorder fixed on a violin\ + \ and an acceleration sensor placed on the bow. Sensor measurements were obtained\ + \ at a high sampling rate, gathering the data with a small mi-crocontroller unit,\ + \ connected with a laptop computer. To capture bow's position, rotation and angle\ + \ directly on the bow to string contact point, the micro camcorder was fixed near\ + \ the bridge. Main focuses of interest were the changes of the left hand finger,\ + \ the temporal synchronization between left hand fingers with the right hand,\ + \ the close up view to the bow to string contact point and the contact of the\ + \ left hand finger and/or string to the fingerboard. Seven violinists, from beginners\ + \ to master class students played scales in different rhythms, speeds and bowings\ + \ and music excerpts of free choice while being recorded. One measure-ment with\ + \ 2 violinists was made to see the time differences between two musicians while\ + \ playing together. For simple integration of a conventional violin into electronic\ + \ music environments, left hand sensor data were exemplary converted to MIDI and\ + \ OSC.},\n address = {Ann Arbor, Michigan},\n author = {Tobias Grosshauser and\ + \ Victor Candia and Horst Hildebrand and Gerhard Tr{\\''o}ster},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178269},\n issn = {2220-4806},\n keywords = {Strings,\ + \ violin, coordination, left, finger, right, hand},\n publisher = {University\ + \ of Michigan},\n title = {Sensor Based Measurements of Musicians' Synchronization\ + \ Issues},\n url = {http://www.nime.org/proceedings/2012/nime2012_256.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - doi: 10.5281/zenodo.1177851 + doi: 10.5281/zenodo.1178269 issn: 2220-4806 - keywords: 'algorithmic composition, soft constraints, tangible interaction' - pages: 501--504 - title: The Planets - url: http://www.nime.org/proceedings/2010/nime2010_501.pdf - year: 2010 + keywords: 'Strings, violin, coordination, left, finger, right, hand' + publisher: University of Michigan + title: Sensor Based Measurements of Musicians' Synchronization Issues + url: http://www.nime.org/proceedings/2012/nime2012_256.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Arfib2002 - abstract: In this paper we describe the digital emulation of a optical photosonic - instrument. First we briefly describe theoptical instrument which is the basis - of this emulation.Then we give a musical description of the instrument implementation - and its musical use and we concludewith the "duo" possibility of such an emulation. - address: 'Dublin, Ireland' - author: 'Arfib, Daniel and Dudon, Jacques' - bibtex: "@inproceedings{Arfib2002,\n abstract = {In this paper we describe the digital\ - \ emulation of a optical photosonic instrument. First we briefly describe theoptical\ - \ instrument which is the basis of this emulation.Then we give a musical description\ - \ of the instrument implementation and its musical use and we concludewith the\ - \ \"duo\" possibility of such an emulation.},\n address = {Dublin, Ireland},\n\ - \ author = {Arfib, Daniel and Dudon, Jacques},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n date =\ - \ {24-26 May, 2002},\n doi = {10.5281/zenodo.1176388},\n issn = {2220-4806},\n\ - \ keywords = {Photosonic synthesis, digital emulation, Max-Msp, gestural devices.},\n\ - \ pages = {1--4},\n title = {A Digital Emulator of the Photosonic Instrument},\n\ - \ url = {http://www.nime.org/proceedings/2002/nime2002_001.pdf},\n year = {2002}\n\ - }\n" + ID: Bosi2012 + abstract: 'Tangible tabletop musical interfaces allowing for a collabo-rative real-time + interaction in live music performances are one of the promising fields in NIMEs. + At present, this kind of interfaces present at least some of the following charac-teristics + that limit their musical use: latency in the inter-action, and partial or complete + lack of responsiveness to gestures such as tapping, scrubbing or pressing force. + Our current research is exploring ways of improving the quality of interaction + with this kind of interfaces, and in particular with the tangible tabletop instrument + Reactable . In this paper we present a system based on a circular array of me-chanically + intercoupled force sensing resistors used to obtain a low-latency, affordable, + and easily embeddable hardware system able to detect surface impacts and pressures + on the tabletop perimeter. We also consider the option of com-pleting this detected + gestural information with the sound information coming from a contact microphone + attached to the mechanical coupling layer, to control physical modelling synthesis + of percussion instruments.' + address: 'Ann Arbor, Michigan' + author: Mathieu Bosi and Sergi Jordà + bibtex: "@inproceedings{Bosi2012,\n abstract = {Tangible tabletop musical interfaces\ + \ allowing for a collabo-rative real-time interaction in live music performances\ + \ are one of the promising fields in NIMEs. At present, this kind of interfaces\ + \ present at least some of the following charac-teristics that limit their musical\ + \ use: latency in the inter-action, and partial or complete lack of responsiveness\ + \ to gestures such as tapping, scrubbing or pressing force. Our current research\ + \ is exploring ways of improving the quality of interaction with this kind of\ + \ interfaces, and in particular with the tangible tabletop instrument Reactable\ + \ . In this paper we present a system based on a circular array of me-chanically\ + \ intercoupled force sensing resistors used to obtain a low-latency, affordable,\ + \ and easily embeddable hardware system able to detect surface impacts and pressures\ + \ on the tabletop perimeter. We also consider the option of com-pleting this detected\ + \ gestural information with the sound information coming from a contact microphone\ + \ attached to the mechanical coupling layer, to control physical modelling synthesis\ + \ of percussion instruments.},\n address = {Ann Arbor, Michigan},\n author = {Mathieu\ + \ Bosi and Sergi Jord{\\`a}},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178217},\n\ + \ issn = {2220-4806},\n keywords = {tangible tabletop interfaces, force sensing\ + \ resistor, mechanical coupling, fast low-noise analog to digital conversion,\ + \ low-latency sensing, micro controller, multimodal systems, complementary sensing.},\n\ + \ publisher = {University of Michigan},\n title = {Towards fast multi-point force\ + \ and hit detection in tabletops using mechanically intercoupled force sensing\ + \ resisors},\n url = {http://www.nime.org/proceedings/2012/nime2012_257.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176388 + doi: 10.5281/zenodo.1178217 issn: 2220-4806 - keywords: 'Photosonic synthesis, digital emulation, Max-Msp, gestural devices.' - pages: 1--4 - title: A Digital Emulator of the Photosonic Instrument - url: http://www.nime.org/proceedings/2002/nime2002_001.pdf - year: 2002 + keywords: 'tangible tabletop interfaces, force sensing resistor, mechanical coupling, + fast low-noise analog to digital conversion, low-latency sensing, micro controller, + multimodal systems, complementary sensing.' + publisher: University of Michigan + title: Towards fast multi-point force and hit detection in tabletops using mechanically + intercoupled force sensing resisors + url: http://www.nime.org/proceedings/2012/nime2012_257.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Baumann2002 - abstract: 'In this paper we will have a short overview of some of the systems we - have been developing as an independent company over the last years. We will focus - especially on our latest experiments in developing wireless gestural systems using - the camera as an interactive tool to generate 2D and 3D visuals and music. ' - address: 'Dublin, Ireland' - author: 'Baumann, Alain and Sánchez, Rosa' - bibtex: "@inproceedings{Baumann2002,\n abstract = {In this paper we will have a\ - \ short overview of some of the systems we have been developing as an independent\ - \ company over the last years. We will focus especially on our latest experiments\ - \ in developing wireless gestural systems using the camera as an interactive tool\ - \ to generate 2D and 3D visuals and music. },\n address = {Dublin, Ireland},\n\ - \ author = {Baumann, Alain and S\\'{a}nchez, Rosa},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176390},\n issn = {2220-4806},\n\ - \ keywords = {interdisciplinary applications of new instruments, mixed media instruments},\n\ - \ pages = {5--9},\n title = {Interdisciplinary Applications of New Instruments},\n\ - \ url = {http://www.nime.org/proceedings/2002/nime2002_005.pdf},\n year = {2002}\n\ + ID: Zamorano2012 + abstract: "This paper introduces Simpletones, an interactive sound system that enables\ + \ a sense of musical collaboration for non-musicians. Participants can easily\ + \ create simple sound compositions in real time by collaboratively operating physical\ + \ artifacts as sound controllers. The physical configuration of the artifacts\ + \ requires coordinated actions between participants to control sound (thus requiring,\ + \ and emphasizing collaboration).\nSimpletones encourages playful human-to-human\ + \ interaction by introducing a simple interface and a set of basic rules [1].\ + \ This enables novices to focus on the collaborative aspects of making music as\ + \ a group (such as synchronization and taking collective decisions through non-verbal\ + \ communication) to ultimately engage a state of group flow[2].\nThis project\ + \ is relevant to a contemporary discourse on musical expression because it allows\ + \ novices to experience the social aspects of group music making, something that\ + \ is usually reserved only for trained performers [3]." + address: 'Ann Arbor, Michigan' + author: Francisco Zamorano + bibtex: "@inproceedings{Zamorano2012,\n abstract = {This paper introduces Simpletones,\ + \ an interactive sound system that enables a sense of musical collaboration for\ + \ non-musicians. Participants can easily create simple sound compositions in real\ + \ time by collaboratively operating physical artifacts as sound controllers. The\ + \ physical configuration of the artifacts requires coordinated actions between\ + \ participants to control sound (thus requiring, and emphasizing collaboration).\n\ + Simpletones encourages playful human-to-human interaction by introducing a simple\ + \ interface and a set of basic rules [1]. This enables novices to focus on the\ + \ collaborative aspects of making music as a group (such as synchronization and\ + \ taking collective decisions through non-verbal communication) to ultimately\ + \ engage a state of group flow[2].\nThis project is relevant to a contemporary\ + \ discourse on musical expression because it allows novices to experience the\ + \ social aspects of group music making, something that is usually reserved only\ + \ for trained performers [3].},\n address = {Ann Arbor, Michigan},\n author =\ + \ {Francisco Zamorano},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178459},\n\ + \ issn = {2220-4806},\n keywords = {Collaboration, Artifacts, Computer Vision,\ + \ Color Tracking, State of Flow.},\n publisher = {University of Michigan},\n title\ + \ = {Simpletones: A System of Collaborative Physical Controllers for Novices},\n\ + \ url = {http://www.nime.org/proceedings/2012/nime2012_258.pdf},\n year = {2012}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176390 + doi: 10.5281/zenodo.1178459 issn: 2220-4806 - keywords: 'interdisciplinary applications of new instruments, mixed media instruments' - pages: 5--9 - title: Interdisciplinary Applications of New Instruments - url: http://www.nime.org/proceedings/2002/nime2002_005.pdf - year: 2002 + keywords: 'Collaboration, Artifacts, Computer Vision, Color Tracking, State of Flow.' + publisher: University of Michigan + title: 'Simpletones: A System of Collaborative Physical Controllers for Novices' + url: http://www.nime.org/proceedings/2012/nime2012_258.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Bernard2002 - abstract: 'This paper describes the design and development of several musical instruments - and MIDI controllers built byDavid Bernard (as part of The Sound Surgery project:www.thesoundsurgery.co.uk) - and used in club performances around Glasgow during 1995-2002. It argues that - changing technologies and copyright are shifting ourunderstanding of music from - "live art" to "recorded medium" whilst blurring the boundaries between sound and - visual production.' - address: 'Dublin, Ireland' - author: 'Bernard, David' - bibtex: "@inproceedings{Bernard2002,\n abstract = {This paper describes the design\ - \ and development of several musical instruments and MIDI controllers built byDavid\ - \ Bernard (as part of The Sound Surgery project:www.thesoundsurgery.co.uk) and\ - \ used in club performances around Glasgow during 1995-2002. It argues that changing\ - \ technologies and copyright are shifting ourunderstanding of music from \"live\ - \ art\" to \"recorded medium\" whilst blurring the boundaries between sound and\ - \ visual production.},\n address = {Dublin, Ireland},\n author = {Bernard, David},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176392},\n\ - \ issn = {2220-4806},\n keywords = {Live electronic music, experimental instruments,\ - \ MIDI controllers, audio-visual synchronisation, copyright, SKINS digital hand\ - \ drum.},\n pages = {10--11},\n title = {Experimental Controllers for Live Electronic\ - \ Music Performance (vs. Copyright).},\n url = {http://www.nime.org/proceedings/2002/nime2002_010.pdf},\n\ - \ year = {2002}\n}\n" + ID: Dahl2012 + abstract: 'Composing music for ensembles of computer-based instruments, such as + laptop orchestra or mobile phone orchestra, is a multi-faceted and challenging + endeavor whose parameters and criteria for success are ill-defined. In the design + community, tasks with these qualities are known as wicked problems. This paper + frames composing for computer-based ensemble as a design task, shows how Buchanan''s + four domains of design are present in the task, and discusses its wicked properties. + The themes of visibility, risk, and embodiment, as formulated by Klemmer, are + shown to be implicitly present in this design task. Composers are encouraged to + address them explicitly and to take advantage of the practices of prototyping + and iteration.' + address: 'Ann Arbor, Michigan' + author: Luke Dahl + bibtex: "@inproceedings{Dahl2012,\n abstract = {Composing music for ensembles of\ + \ computer-based instruments, such as laptop orchestra or mobile phone orchestra,\ + \ is a multi-faceted and challenging endeavor whose parameters and criteria for\ + \ success are ill-defined. In the design community, tasks with these qualities\ + \ are known as wicked problems. This paper frames composing for computer-based\ + \ ensemble as a design task, shows how Buchanan's four domains of design are present\ + \ in the task, and discusses its wicked properties. The themes of visibility,\ + \ risk, and embodiment, as formulated by Klemmer, are shown to be implicitly present\ + \ in this design task. Composers are encouraged to address them explicitly and\ + \ to take advantage of the practices of prototyping and iteration.},\n address\ + \ = {Ann Arbor, Michigan},\n author = {Luke Dahl},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178239},\n issn = {2220-4806},\n keywords = {Design,\ + \ laptop orchestra, mobile phone orchestra, instrument design, interaction design,\ + \ composition},\n publisher = {University of Michigan},\n title = {Wicked Problems\ + \ and Design Considerations in Composing for Laptop Orchestra},\n url = {http://www.nime.org/proceedings/2012/nime2012_259.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176392 + doi: 10.5281/zenodo.1178239 issn: 2220-4806 - keywords: 'Live electronic music, experimental instruments, MIDI controllers, audio-visual - synchronisation, copyright, SKINS digital hand drum.' - pages: 10--11 - title: Experimental Controllers for Live Electronic Music Performance (vs. Copyright). - url: http://www.nime.org/proceedings/2002/nime2002_010.pdf - year: 2002 + keywords: 'Design, laptop orchestra, mobile phone orchestra, instrument design, + interaction design, composition' + publisher: University of Michigan + title: Wicked Problems and Design Considerations in Composing for Laptop Orchestra + url: http://www.nime.org/proceedings/2012/nime2012_259.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Blaine2002 - abstract: 'This paper discusses the Jam-O-Drum multi-player musical controller and - its adaptation into a gaming controller interface known as the Jam-O-Whirl. The - Jam-O-World project positioned these two controller devices in a dedicated projection - environment that enabled novice players to participate in immersive musical gaming - experiences. Players'' actions, detected via embedded sensors in an integrated - tabletop surface, control game play, real-time computer graphics and musical interaction. - Jam-O-World requires physical and social interaction as well as collaboration - among players. ' - address: 'Dublin, Ireland' - author: 'Blaine, Tina and Forlines, Clifton' - bibtex: "@inproceedings{Blaine2002,\n abstract = {This paper discusses the Jam-O-Drum\ - \ multi-player musical controller and its adaptation into a gaming controller\ - \ interface known as the Jam-O-Whirl. The Jam-O-World project positioned these\ - \ two controller devices in a dedicated projection environment that enabled novice\ - \ players to participate in immersive musical gaming experiences. Players' actions,\ - \ detected via embedded sensors in an integrated tabletop surface, control game\ - \ play, real-time computer graphics and musical interaction. Jam-O-World requires\ - \ physical and social interaction as well as collaboration among players. },\n\ - \ address = {Dublin, Ireland},\n author = {Blaine, Tina and Forlines, Clifton},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176394},\n\ - \ issn = {2220-4806},\n keywords = {Collaboration, computer graphics, embedded\ - \ sensors, gaming controller, immersive musical gaming experiences, musical controller,\ - \ multi-player, novice, social interaction.},\n pages = {12--17},\n title = {JAM-O-WORLD:\ - \ Evolution of the Jam-O-Drum Multi-player Musical Controller into the Jam-O-Whirl\ - \ Gaming Interface},\n url = {http://www.nime.org/proceedings/2002/nime2002_012.pdf},\n\ - \ year = {2002}\n}\n" + ID: Frisson2012 + abstract: 'This paper presents the LoopJam installation which allows participants + to interact with a sound map using a 3D com-puter vision tracking system. The + sound map results from similarity-based clustering of sounds. The playback of + these sounds is controlled by the positions or gestures of partic-ipants tracked + with a Kinect depth-sensing camera. The beat-inclined bodily movements of participants + in the in-stallation are mapped to the tempo of played sounds, while the playback + speed is synchronized by default among all sounds. We presented and tested an + early version of the in-stallation to three exhibitions in Belgium, Italy and + France. The reactions among participants ranged between curiosity and amusement.' + address: 'Ann Arbor, Michigan' + author: Christian Frisson and Stéphane Dupont and Julien Leroy and Alexis Moinet + and Thierry Ravet and Xavier Siebert and Thierry Dutoit + bibtex: "@inproceedings{Frisson2012,\n abstract = {This paper presents the LoopJam\ + \ installation which allows participants to interact with a sound map using a\ + \ 3D com-puter vision tracking system. The sound map results from similarity-based\ + \ clustering of sounds. The playback of these sounds is controlled by the positions\ + \ or gestures of partic-ipants tracked with a Kinect depth-sensing camera. The\ + \ beat-inclined bodily movements of participants in the in-stallation are mapped\ + \ to the tempo of played sounds, while the playback speed is synchronized by default\ + \ among all sounds. We presented and tested an early version of the in-stallation\ + \ to three exhibitions in Belgium, Italy and France. The reactions among participants\ + \ ranged between curiosity and amusement.},\n address = {Ann Arbor, Michigan},\n\ + \ author = {Christian Frisson and St{\\'e}phane Dupont and Julien Leroy and Alexis\ + \ Moinet and Thierry Ravet and Xavier Siebert and Thierry Dutoit},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1178255},\n issn = {2220-4806},\n keywords\ + \ = {Interactive music systems and retrieval, user interaction and interfaces,\ + \ audio similarity, depth sensors},\n publisher = {University of Michigan},\n\ + \ title = {LoopJam: turning the dance floor into a collaborative instrumental\ + \ map},\n url = {http://www.nime.org/proceedings/2012/nime2012_260.pdf},\n year\ + \ = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176394 + doi: 10.5281/zenodo.1178255 issn: 2220-4806 - keywords: 'Collaboration, computer graphics, embedded sensors, gaming controller, - immersive musical gaming experiences, musical controller, multi-player, novice, - social interaction.' - pages: 12--17 - title: 'JAM-O-WORLD: Evolution of the Jam-O-Drum Multi-player Musical Controller - into the Jam-O-Whirl Gaming Interface' - url: http://www.nime.org/proceedings/2002/nime2002_012.pdf - year: 2002 + keywords: 'Interactive music systems and retrieval, user interaction and interfaces, + audio similarity, depth sensors' + publisher: University of Michigan + title: 'LoopJam: turning the dance floor into a collaborative instrumental map' + url: http://www.nime.org/proceedings/2012/nime2012_260.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Bongers2002 - abstract: 'The Video-Organ is an instrument for the live performance of audio-visual - material. To design an interface we apply a modular approach, in an attempt to - split up the complex task of finding physical interfaces and mappings to control - sound and video as generated by the computer. Generally, most modules, or instrumentlets - as they are called, consist of a human interface element mapped to a certain effect. - To describe the instrumentlets a design space is used consisting of the parameters - degrees of freedom, range and precision. This paper is addressing the notion that - traditional approaches to composition are challenged and changed in this situation, - where the material is both audio and visual, and where the design and development - of an instrument becomes involved in the process of performing and composing.' - address: 'Dublin, Ireland' - author: 'Bongers, Bert and Harris, Yolande' - bibtex: "@inproceedings{Bongers2002,\n abstract = {The Video-Organ is an instrument\ - \ for the live performance of audio-visual material. To design an interface we\ - \ apply a modular approach, in an attempt to split up the complex task of finding\ - \ physical interfaces and mappings to control sound and video as generated by\ - \ the computer. Generally, most modules, or instrumentlets as they are called,\ - \ consist of a human interface element mapped to a certain effect. To describe\ - \ the instrumentlets a design space is used consisting of the parameters degrees\ - \ of freedom, range and precision. This paper is addressing the notion that traditional\ - \ approaches to composition are challenged and changed in this situation, where\ - \ the material is both audio and visual, and where the design and development\ - \ of an instrument becomes involved in the process of performing and composing.},\n\ - \ address = {Dublin, Ireland},\n author = {Bongers, Bert and Harris, Yolande},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176396},\n\ - \ issn = {2220-4806},\n pages = {18--23},\n title = {A Structured Instrument Design\ - \ Approach: The Video-Organ},\n url = {http://www.nime.org/proceedings/2002/nime2002_018.pdf},\n\ - \ year = {2002}\n}\n" + ID: Melo2012 + abstract: 'This paper describes the conceptualization and development of an open + source tool for controlling the sound of a saxophone via the gestures of its performer. + The motivation behind this work is the need for easy access tools to explore, + compose and perform electroacoustic music in Colombian music schools and conservatories. + This work led to the adaptation of common hardware to be used as a sensor attached + to an acoustic instrument and the development of software applications to record, + visualize and map performers gesture data into signal processing parameters. The + scope of this work suggested that focus was to be made on a specific instrument + so the saxophone was chosen. Gestures were selected in an iterative process with + the performer, although a more ambitious strategy to figure out main gestures + of an instruments performance was first defined. Detailed gesture-to-sound processing + mappings are exposed in the text. An electroacoustic musical piece was successfully + rehearsed and recorded using the Gest-O system.' + address: 'Ann Arbor, Michigan' + author: Jonh Melo and Daniel Gómez and Miguel Vargas + bibtex: "@inproceedings{Melo2012,\n abstract = {This paper describes the conceptualization\ + \ and development of an open source tool for controlling the sound of a saxophone\ + \ via the gestures of its performer. The motivation behind this work is the need\ + \ for easy access tools to explore, compose and perform electroacoustic music\ + \ in Colombian music schools and conservatories. This work led to the adaptation\ + \ of common hardware to be used as a sensor attached to an acoustic instrument\ + \ and the development of software applications to record, visualize and map performers\ + \ gesture data into signal processing parameters. The scope of this work suggested\ + \ that focus was to be made on a specific instrument so the saxophone was chosen.\ + \ Gestures were selected in an iterative process with the performer, although\ + \ a more ambitious strategy to figure out main gestures of an instruments performance\ + \ was first defined. Detailed gesture-to-sound processing mappings are exposed\ + \ in the text. An electroacoustic musical piece was successfully rehearsed and\ + \ recorded using the Gest-O system.},\n address = {Ann Arbor, Michigan},\n author\ + \ = {Jonh Melo and Daniel G{\\'o}mez and Miguel Vargas},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1180535},\n issn = {2220-4806},\n keywords = {Electroacoustic\ + \ music, saxophone, expanded instrument, gesture.},\n publisher = {University\ + \ of Michigan},\n title = {Gest-O: Performer gestures used to expand the sounds\ + \ of the saxophone},\n url = {http://www.nime.org/proceedings/2012/nime2012_262.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176396 + doi: 10.5281/zenodo.1180535 issn: 2220-4806 - pages: 18--23 - title: 'A Structured Instrument Design Approach: The Video-Organ' - url: http://www.nime.org/proceedings/2002/nime2002_018.pdf - year: 2002 + keywords: 'Electroacoustic music, saxophone, expanded instrument, gesture.' + publisher: University of Michigan + title: 'Gest-O: Performer gestures used to expand the sounds of the saxophone' + url: http://www.nime.org/proceedings/2012/nime2012_262.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Burtner2002 - abstract: 'Noisegate 67 was the first fully interactive composition written for - the Computer Metasaxophone, a new computer controller interface for electroacoustic - music. The Metasaxophone is an acoustic tenor saxophone retrofitted with an onboard - computer microprocessor and an array of sensors that convert performance data - into MIDI control messages. While maintaining full acoustic functionality the - Metasaxophone is a versatile MIDI controller. This paper discusses the compositionally - driven technical and aesthetic concerns that went into building the Metasaxophone, - and the resulting aesthetic implementations in Noisegate 67. By juxtaposing the - compositional approach to the saxophone before and after the electronic enhancements - an attempt is made to expose working paradigms of composition for metainstruments.' - address: 'Dublin, Ireland' - author: 'Burtner, Matthew' - bibtex: "@inproceedings{Burtner2002,\n abstract = {Noisegate 67 was the first fully\ - \ interactive composition written for the Computer Metasaxophone, a new computer\ - \ controller interface for electroacoustic music. The Metasaxophone is an acoustic\ - \ tenor saxophone retrofitted with an onboard computer microprocessor and an array\ - \ of sensors that convert performance data into MIDI control messages. While maintaining\ - \ full acoustic functionality the Metasaxophone is a versatile MIDI controller.\ - \ This paper discusses the compositionally driven technical and aesthetic concerns\ - \ that went into building the Metasaxophone, and the resulting aesthetic implementations\ - \ in Noisegate 67. By juxtaposing the compositional approach to the saxophone\ - \ before and after the electronic enhancements an attempt is made to expose working\ - \ paradigms of composition for metainstruments.},\n address = {Dublin, Ireland},\n\ - \ author = {Burtner, Matthew},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n date = {24-26 May, 2002},\n\ - \ doi = {10.5281/zenodo.1176398},\n issn = {2220-4806},\n pages = {24--29},\n\ - \ title = {Noisegate 67 for Metasaxophone: Composition and Performance Considerations\ - \ of a New Computer Music Controller},\n url = {http://www.nime.org/proceedings/2002/nime2002_024.pdf},\n\ - \ year = {2002}\n}\n" + ID: Freed2012 + abstract: 'The Fingerphone, a reworking of the Stylophone in conductive paper, is + presented as an example of new design approaches for sustainability and playability + of electronic musical instruments.' + address: 'Ann Arbor, Michigan' + author: Adrian Freed + bibtex: "@inproceedings{Freed2012,\n abstract = {The Fingerphone, a reworking of\ + \ the Stylophone in conductive paper, is presented as an example of new design\ + \ approaches for sustainability and playability of electronic musical instruments.},\n\ + \ address = {Ann Arbor, Michigan},\n author = {Adrian Freed},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178253},\n issn = {2220-4806},\n keywords = {Stylophone,\ + \ Conductive Paper, Pressure Sensing, Touch Sensing, Capacitive Sensing, Plurifunctionality,\ + \ Fingerphone, Sustainable Design},\n publisher = {University of Michigan},\n\ + \ title = {The Fingerphone: a Case Study of Sustainable Instrument Redesign},\n\ + \ url = {http://www.nime.org/proceedings/2012/nime2012_264.pdf},\n year = {2012}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176398 + doi: 10.5281/zenodo.1178253 issn: 2220-4806 - pages: 24--29 - title: 'Noisegate 67 for Metasaxophone: Composition and Performance Considerations - of a New Computer Music Controller' - url: http://www.nime.org/proceedings/2002/nime2002_024.pdf - year: 2002 + keywords: 'Stylophone, Conductive Paper, Pressure Sensing, Touch Sensing, Capacitive + Sensing, Plurifunctionality, Fingerphone, Sustainable Design' + publisher: University of Michigan + title: 'The Fingerphone: a Case Study of Sustainable Instrument Redesign' + url: http://www.nime.org/proceedings/2012/nime2012_264.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Camurri2002 - abstract: 'This paper presents some our recent research on computational models - and algorithms for real-time analysis of full-body human movement. The focus here - is on techniques to extract in real-time expressive cues relevant to KANSEI and - emotional content in human expressive gesture, e.g., in dance and music performances. - Expressive gesture can contribute to new perspectives for the design of interactive - systems. The EyesWeb open software platform is a main concrete result from our - research work. EyesWeb is used in interactive applications, including music and - other artistic productions, museum interactive exhibits, therapy and rehabilitation, - based on the paradigm of expressive gesture. EyesWeb is freely available from - www.eyesweb.org.' - address: 'Dublin, Ireland' - author: 'Camurri, Antonio and Trocca, Riccardo and Volpe, Gualtiero' - bibtex: "@inproceedings{Camurri2002,\n abstract = {This paper presents some our\ - \ recent research on computational models and algorithms for real-time analysis\ - \ of full-body human movement. The focus here is on techniques to extract in real-time\ - \ expressive cues relevant to KANSEI and emotional content in human expressive\ - \ gesture, e.g., in dance and music performances. Expressive gesture can contribute\ - \ to new perspectives for the design of interactive systems. The EyesWeb open\ - \ software platform is a main concrete result from our research work. EyesWeb\ - \ is used in interactive applications, including music and other artistic productions,\ - \ museum interactive exhibits, therapy and rehabilitation, based on the paradigm\ - \ of expressive gesture. EyesWeb is freely available from www.eyesweb.org.},\n\ - \ address = {Dublin, Ireland},\n author = {Camurri, Antonio and Trocca, Riccardo\ - \ and Volpe, Gualtiero},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n date = {24-26 May, 2002},\n doi\ - \ = {10.5281/zenodo.1176400},\n issn = {2220-4806},\n pages = {30--37},\n title\ - \ = {Interactive Systems Design: A KANSEI-based Approach},\n url = {http://www.nime.org/proceedings/2002/nime2002_030.pdf},\n\ - \ year = {2002}\n}\n" + ID: Leeuw2012 + abstract: "This short paper follows an earlier NIME paper [1] describing the invention\ + \ and construction of the Electrumpet. Revisions and playing experience are both\ + \ part of the current paper.\nThe Electrumpet can be heard in the performance\ + \ given by Hans Leeuw and Diemo Schwarz at this NIME conference." + address: 'Ann Arbor, Michigan' + author: Hans Leeuw + bibtex: "@inproceedings{Leeuw2012,\n abstract = {This short paper follows an earlier\ + \ NIME paper [1] describing the invention and construction of the Electrumpet.\ + \ Revisions and playing experience are both part of the current paper.\nThe Electrumpet\ + \ can be heard in the performance given by Hans Leeuw and Diemo Schwarz at this\ + \ NIME conference.},\n address = {Ann Arbor, Michigan},\n author = {Hans Leeuw},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178319},\n issn = {2220-4806},\n\ + \ keywords = {NIME, Electrumpet, live-electronics, hybrid instruments.},\n publisher\ + \ = {University of Michigan},\n title = {The electrumpet, additions and revisions},\n\ + \ url = {http://www.nime.org/proceedings/2012/nime2012_271.pdf},\n year = {2012}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176400 + doi: 10.5281/zenodo.1178319 issn: 2220-4806 - pages: 30--37 - title: 'Interactive Systems Design: A KANSEI-based Approach' - url: http://www.nime.org/proceedings/2002/nime2002_030.pdf - year: 2002 - - -- ENTRYTYPE: inproceedings - ID: Chadabe2002 - abstract: 'Mapping, which describes the way a performer''s controls are connected - to sound variables, is a useful concept when applied to the structure of electronic - instruments modelled after traditional acoustic instruments. But mapping is a - less useful concept when applied to the structure of complex and interactive instruments - in which algorithms generate control information. This paper relates the functioning - and benefits of different types of electronic instruments to the structural principles - on which they are based. Structural models of various instruments will be discussed - and musical examples played. ' - address: 'Dublin, Ireland' - author: 'Chadabe, Joel' - bibtex: "@inproceedings{Chadabe2002,\n abstract = {Mapping, which describes the\ - \ way a performer's controls are connected to sound variables, is a useful concept\ - \ when applied to the structure of electronic instruments modelled after traditional\ - \ acoustic instruments. But mapping is a less useful concept when applied to the\ - \ structure of complex and interactive instruments in which algorithms generate\ - \ control information. This paper relates the functioning and benefits of different\ - \ types of electronic instruments to the structural principles on which they are\ - \ based. Structural models of various instruments will be discussed and musical\ - \ examples played. },\n address = {Dublin, Ireland},\n author = {Chadabe, Joel},\n\ + keywords: 'NIME, Electrumpet, live-electronics, hybrid instruments.' + publisher: University of Michigan + title: 'The electrumpet, additions and revisions' + url: http://www.nime.org/proceedings/2012/nime2012_271.pdf + year: 2012 + + +- ENTRYTYPE: inproceedings + ID: Mitchell2012 + abstract: 'This paper presents a toolbox of gestural control mechanisms which are + available when the input sensing apparatus is a pair of data gloves fitted with + orientation sensors. The toolbox was developed in advance of a live music performance + in which the mapping from gestural input to audio output was to be developed rapidly + in collaboration with the performer. The paper begins with an introduction to + the associated literature before introducing a range of continuous, discrete and + combined control mechanisms, enabling a flexible range of mappings to be explored + and modified easily. An application of the toolbox within a live music performance + is then described with an evaluation of the system with ideas for future developments.' + address: 'Ann Arbor, Michigan' + author: Thomas Mitchell and Sebastian Madgwick and Imogen Heap + bibtex: "@inproceedings{Mitchell2012,\n abstract = {This paper presents a toolbox\ + \ of gestural control mechanisms which are available when the input sensing apparatus\ + \ is a pair of data gloves fitted with orientation sensors. The toolbox was developed\ + \ in advance of a live music performance in which the mapping from gestural input\ + \ to audio output was to be developed rapidly in collaboration with the performer.\ + \ The paper begins with an introduction to the associated literature before introducing\ + \ a range of continuous, discrete and combined control mechanisms, enabling a\ + \ flexible range of mappings to be explored and modified easily. An application\ + \ of the toolbox within a live music performance is then described with an evaluation\ + \ of the system with ideas for future developments.},\n address = {Ann Arbor,\ + \ Michigan},\n author = {Thomas Mitchell and Sebastian Madgwick and Imogen Heap},\n\ \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176402},\n\ - \ issn = {2220-4806},\n keywords = {mapping fly-by-wire algorithmic network interactivity\ - \ instrument deterministic indeterministic},\n pages = {38--42},\n title = {The\ - \ Limitations of Mapping as a Structural Descriptive in Electronic Instruments},\n\ - \ url = {http://www.nime.org/proceedings/2002/nime2002_038.pdf},\n year = {2002}\n\ - }\n" + \ Musical Expression},\n doi = {10.5281/zenodo.1180543},\n issn = {2220-4806},\n\ + \ keywords = {Computer Music, Gestural Control, Data Gloves},\n publisher = {University\ + \ of Michigan},\n title = {Musical Interaction with Hand Posture and Orientation:\ + \ A Toolbox of Gestural Control Mechanisms},\n url = {http://www.nime.org/proceedings/2012/nime2012_272.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176402 + doi: 10.5281/zenodo.1180543 issn: 2220-4806 - keywords: mapping fly-by-wire algorithmic network interactivity instrument deterministic - indeterministic - pages: 38--42 - title: The Limitations of Mapping as a Structural Descriptive in Electronic Instruments - url: http://www.nime.org/proceedings/2002/nime2002_038.pdf - year: 2002 + keywords: 'Computer Music, Gestural Control, Data Gloves' + publisher: University of Michigan + title: 'Musical Interaction with Hand Posture and Orientation: A Toolbox of Gestural + Control Mechanisms' + url: http://www.nime.org/proceedings/2012/nime2012_272.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Couturier2002 - abstract: 'This paper describes a virtual musical instrument based on the scanned - synthesis technique and implemented in Max-Msp. The device is composed of a computer - and three gesture sensors. The timbre of the produced sound is rich and changing. - The instrument proposes an intuitive and expressive control of the sound thanks - to a complex mapping between gesture and sound. ' - address: 'Dublin, Ireland' - author: 'Couturier, Jean-Michel' - bibtex: "@inproceedings{Couturier2002,\n abstract = {This paper describes a virtual\ - \ musical instrument based on the scanned synthesis technique and implemented\ - \ in Max-Msp. The device is composed of a computer and three gesture sensors.\ - \ The timbre of the produced sound is rich and changing. The instrument proposes\ - \ an intuitive and expressive control of the sound thanks to a complex mapping\ - \ between gesture and sound. },\n address = {Dublin, Ireland},\n author = {Couturier,\ - \ Jean-Michel},\n booktitle = {Proceedings of the International Conference on\ - \ New Interfaces for Musical Expression},\n date = {24-26 May, 2002},\n doi =\ - \ {10.5281/zenodo.1176404},\n issn = {2220-4806},\n keywords = {graphics tablet,\ - \ meta-parameters, multi-touch tactile surface, scanned synthesis},\n pages =\ - \ {43--45},\n title = {A Scanned Synthesis Virtual Instrument},\n url = {http://www.nime.org/proceedings/2002/nime2002_043.pdf},\n\ - \ year = {2002}\n}\n" + ID: Dahlstedt2012 + abstract: 'I present a novel low-tech multidimensional gestural con-troller, based + on the resistive properties of a 2D field of pencil markings on paper. A set of + movable electrodes (+, -, ground) made from soldered stacks of coins create a + dynamic voltage potential field in the carbon layer, and an-other set of movable + electrodes tap voltages from this field. These voltages are used to control complex + sound engines in an analogue modular synthesizer. Both the voltage field and the + tap electrodes can be moved freely. The design was inspired by previous research + in complex mappings for advanced digital instruments, and provides a similarly + dynamic playing environment for analogue synthesis. The interface is cheap to + build, and provides flexible control over a large set of parameters. It is musically + satisfying to play, and allows for a wide range of playing techniques, from wild + exploration to subtle expressions. I also present an inven-tory of the available + playing techniques, motivated by the interface design, musically, conceptually + and theatrically. The performance aspects of the interface are also discussed. + The interface has been used in a number of performances in Sweden and Japan in + 2011, and is also used by other musicians.' + address: 'Ann Arbor, Michigan' + author: Palle Dahlstedt + bibtex: "@inproceedings{Dahlstedt2012,\n abstract = {I present a novel low-tech\ + \ multidimensional gestural con-troller, based on the resistive properties of\ + \ a 2D field of pencil markings on paper. A set of movable electrodes (+, -, ground)\ + \ made from soldered stacks of coins create a dynamic voltage potential field\ + \ in the carbon layer, and an-other set of movable electrodes tap voltages from\ + \ this field. These voltages are used to control complex sound engines in an analogue\ + \ modular synthesizer. Both the voltage field and the tap electrodes can be moved\ + \ freely. The design was inspired by previous research in complex mappings for\ + \ advanced digital instruments, and provides a similarly dynamic playing environment\ + \ for analogue synthesis. The interface is cheap to build, and provides flexible\ + \ control over a large set of parameters. It is musically satisfying to play,\ + \ and allows for a wide range of playing techniques, from wild exploration to\ + \ subtle expressions. I also present an inven-tory of the available playing techniques,\ + \ motivated by the interface design, musically, conceptually and theatrically.\ + \ The performance aspects of the interface are also discussed. The interface has\ + \ been used in a number of performances in Sweden and Japan in 2011, and is also\ + \ used by other musicians.},\n address = {Ann Arbor, Michigan},\n author = {Palle\ + \ Dahlstedt},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178241},\n issn\ + \ = {2220-4806},\n keywords = {gestural interface, 2d, analog synthesis, performance,\ + \ improvisation},\n publisher = {University of Michigan},\n title = {Pencil Fields:\ + \ An Expressive Low-Tech Performance Interface for Analog Synthesis},\n url =\ + \ {http://www.nime.org/proceedings/2012/nime2012_275.pdf},\n year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176404 + doi: 10.5281/zenodo.1178241 issn: 2220-4806 - keywords: 'graphics tablet, meta-parameters, multi-touch tactile surface, scanned - synthesis' - pages: 43--45 - title: A Scanned Synthesis Virtual Instrument - url: http://www.nime.org/proceedings/2002/nime2002_043.pdf - year: 2002 + keywords: 'gestural interface, 2d, analog synthesis, performance, improvisation' + publisher: University of Michigan + title: 'Pencil Fields: An Expressive Low-Tech Performance Interface for Analog Synthesis' + url: http://www.nime.org/proceedings/2012/nime2012_275.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: DArcangelo2002 - abstract: 'This paper presents the approaches and expectations of a recently launched - course at New York University (NYU) in the design and development of musical controllers. - The framework for the course, which is also entitled "New Interfaces for Musical - Expression," is largely based on the proceedings of the first NIME workshop held - in Seattle, WA in April 2001.' - address: 'Dublin, Ireland' - author: 'D''Arcangelo, Gideon' - bibtex: "@inproceedings{DArcangelo2002,\n abstract = {This paper presents the approaches\ - \ and expectations of a recently launched course at New York University (NYU)\ - \ in the design and development of musical controllers. The framework for the\ - \ course, which is also entitled \"New Interfaces for Musical Expression,\" is\ - \ largely based on the proceedings of the first NIME workshop held in Seattle,\ - \ WA in April 2001.},\n address = {Dublin, Ireland},\n author = {D'Arcangelo,\ - \ Gideon},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176406},\n\ - \ issn = {2220-4806},\n keywords = {creative expression, input devices, musical\ - \ controllers},\n pages = {46--49},\n title = {Creating a Context for Musical\ - \ Innovation: A NIME Curriculum},\n url = {http://www.nime.org/proceedings/2002/nime2002_046.pdf},\n\ - \ year = {2002}\n}\n" + ID: Kimura2012 + abstract: 'As a 2010 Artist in Residence in Musical Research at IRCAM, Mari Kimura + used the Augmented Violin to develop new compositional approaches, and new ways + of creating interactive performances [1]. She contributed her empirical and historical + knowledge of violin bowing technique, working with the Real Time Musical Interactions + Team at IRCAM. Thanks to this residency, her ongoing long-distance collaboration + with the team since 2007 dramatically accelerated, and led to solving several + compositional and calibration issues of the Gesture Follower (GF) [2]. Kimura + was also the first artist to develop projects between the two teams at IRCAM, + using OMAX (Musical Representation Team) with GF. In the past year, the performance + with Augmented Violin has been expanded in larger scale interactive audio/visual + projects as well. In this paper, we report on the various techniques developed + for the Augmented Violin and compositions by Kimura using them, offering specific + examples and scores.' + address: 'Ann Arbor, Michigan' + author: Mari Kimura and Nicolas Rasamimanana and Frédéric Bevilacqua and Norbert + Schnell and Bruno Zamborlin and Emmanuel Fléty + bibtex: "@inproceedings{Kimura2012,\n abstract = {As a 2010 Artist in Residence\ + \ in Musical Research at IRCAM, Mari Kimura used the Augmented Violin to develop\ + \ new compositional approaches, and new ways of creating interactive performances\ + \ [1]. She contributed her empirical and historical knowledge of violin bowing\ + \ technique, working with the Real Time Musical Interactions Team at IRCAM. Thanks\ + \ to this residency, her ongoing long-distance collaboration with the team since\ + \ 2007 dramatically accelerated, and led to solving several compositional and\ + \ calibration issues of the Gesture Follower (GF) [2]. Kimura was also the first\ + \ artist to develop projects between the two teams at IRCAM, using OMAX (Musical\ + \ Representation Team) with GF. In the past year, the performance with Augmented\ + \ Violin has been expanded in larger scale interactive audio/visual projects as\ + \ well. In this paper, we report on the various techniques developed for the Augmented\ + \ Violin and compositions by Kimura using them, offering specific examples and\ + \ scores.},\n address = {Ann Arbor, Michigan},\n author = {Mari Kimura and Nicolas\ + \ Rasamimanana and Fr{\\'e}d{\\'e}ric Bevilacqua and Norbert Schnell and Bruno\ + \ Zamborlin and Emmanuel Fl{\\'e}ty},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178305},\n\ + \ issn = {2220-4806},\n keywords = {Augmented Violin, Gesture Follower, Interactive\ + \ Performance},\n publisher = {University of Michigan},\n title = {Extracting\ + \ Human Expression For Interactive Composition with the Augmented Violin},\n url\ + \ = {http://www.nime.org/proceedings/2012/nime2012_279.pdf},\n year = {2012}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176406 + doi: 10.5281/zenodo.1178305 issn: 2220-4806 - keywords: 'creative expression, input devices, musical controllers' - pages: 46--49 - title: 'Creating a Context for Musical Innovation: A NIME Curriculum' - url: http://www.nime.org/proceedings/2002/nime2002_046.pdf - year: 2002 + keywords: 'Augmented Violin, Gesture Follower, Interactive Performance' + publisher: University of Michigan + title: Extracting Human Expression For Interactive Composition with the Augmented + Violin + url: http://www.nime.org/proceedings/2012/nime2012_279.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Fels2002 - abstract: 'In this paper we describe three new music controllers, each designed - to be played by two players. As the intimacy between two people increases so does - their ability to anticipate and predict the other''s actions. We hypothesize that - this intimacy between two people can be used as a basis for new controllers for - musical expression. Looking at ways people communicate non-verbally, we are developing - three new instruments based on different communication channels. The Tooka is - a hollow tube with a pressure sensor and buttons for each player. Players place - opposite ends in their mouths and modulate the pressure in the tube with their - tongues and lungs, controlling sound. Coordinated button presses control the music - as well. The Pushka, yet to be built, is a semirigid rod with strain gauges and - position sensors to track the rod''s position. Each player holds opposite ends - of the rod and manipulates it together. Bend, end point position, velocity and - acceleration and torque are mapped to musical parameters. The Pullka, yet to be - built, is simply a string attached at both ends with two bridges. Tension is measured - with strain gauges. Players manipulate the string tension at each end together - to modulate sound. We are looking at different musical mappings appropriate for - two players.' - address: 'Dublin, Ireland' - author: 'Fels, Sidney S. and Vogt, Florian' - bibtex: "@inproceedings{Fels2002,\n abstract = {In this paper we describe three\ - \ new music controllers, each designed to be played by two players. As the intimacy\ - \ between two people increases so does their ability to anticipate and predict\ - \ the other's actions. We hypothesize that this intimacy between two people can\ - \ be used as a basis for new controllers for musical expression. Looking at ways\ - \ people communicate non-verbally, we are developing three new instruments based\ - \ on different communication channels. The Tooka is a hollow tube with a pressure\ - \ sensor and buttons for each player. Players place opposite ends in their mouths\ - \ and modulate the pressure in the tube with their tongues and lungs, controlling\ - \ sound. Coordinated button presses control the music as well. The Pushka, yet\ - \ to be built, is a semirigid rod with strain gauges and position sensors to track\ - \ the rod's position. Each player holds opposite ends of the rod and manipulates\ - \ it together. Bend, end point position, velocity and acceleration and torque\ - \ are mapped to musical parameters. The Pullka, yet to be built, is simply a string\ - \ attached at both ends with two bridges. Tension is measured with strain gauges.\ - \ Players manipulate the string tension at each end together to modulate sound.\ - \ We are looking at different musical mappings appropriate for two players.},\n\ - \ address = {Dublin, Ireland},\n author = {Fels, Sidney S. and Vogt, Florian},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176408},\n\ - \ issn = {2220-4806},\n keywords = {Two person musical instruments, intimacy,\ - \ human-human communication, cooperative music, passive haptic interface},\n pages\ - \ = {50--55},\n title = {Tooka: Explorations of Two Person Instruments},\n url\ - \ = {http://www.nime.org/proceedings/2002/nime2002_050.pdf},\n year = {2002}\n\ - }\n" + ID: Shear2012 + abstract: "The Electromagnetically Sustained Rhodes Piano is an orig-inal Rhodes\ + \ Piano modified to provide control over the amplitude envelope of individual\ + \ notes through aftertouch pressure. Although there are many opportunities to\ + \ shape the amplitude envelope before loudspeaker amplification, they are all\ + \ governed by the ever-decaying physical vibra-tions of the tone generating mechanism.\ + \ A single-note proof of concept for electromagnetic control over this vibrating\ + \ mechanism was presented at NIME 2011.\nIn the past year, virtually every aspect\ + \ of the system has been improved. We use a different vibration sensor that is\ + \ immune to electromagnetic interference, thus eliminat-ing troublesome feedback.\ + \ For control, we both reduce cost and gain continuous position sensing throughout\ + \ the entire range of key motion in addition to aftertouch pressure. Finally,\ + \ the entire system now fits within the space constraints presented by the original\ + \ piano, allowing it to be installed on adjacent notes." + address: 'Ann Arbor, Michigan' + author: Greg Shear and Matthew Wright + bibtex: "@inproceedings{Shear2012,\n abstract = {The Electromagnetically Sustained\ + \ Rhodes Piano is an orig-inal Rhodes Piano modified to provide control over the\ + \ amplitude envelope of individual notes through aftertouch pressure. Although\ + \ there are many opportunities to shape the amplitude envelope before loudspeaker\ + \ amplification, they are all governed by the ever-decaying physical vibra-tions\ + \ of the tone generating mechanism. A single-note proof of concept for electromagnetic\ + \ control over this vibrating mechanism was presented at NIME 2011.\nIn the past\ + \ year, virtually every aspect of the system has been improved. We use a different\ + \ vibration sensor that is immune to electromagnetic interference, thus eliminat-ing\ + \ troublesome feedback. For control, we both reduce cost and gain continuous position\ + \ sensing throughout the entire range of key motion in addition to aftertouch\ + \ pressure. Finally, the entire system now fits within the space constraints presented\ + \ by the original piano, allowing it to be installed on adjacent notes.},\n address\ + \ = {Ann Arbor, Michigan},\n author = {Greg Shear and Matthew Wright},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1180599},\n issn = {2220-4806},\n keywords\ + \ = {Rhodes, piano, mechanical synthesizer, electromagnetic, sustain, feedback},\n\ + \ publisher = {University of Michigan},\n title = {Further Developments in the\ + \ Electromagnetically Sustained Rhodes Piano},\n url = {http://www.nime.org/proceedings/2012/nime2012_284.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176408 + doi: 10.5281/zenodo.1180599 issn: 2220-4806 - keywords: 'Two person musical instruments, intimacy, human-human communication, - cooperative music, passive haptic interface' - pages: 50--55 - title: 'Tooka: Explorations of Two Person Instruments' - url: http://www.nime.org/proceedings/2002/nime2002_050.pdf - year: 2002 + keywords: 'Rhodes, piano, mechanical synthesizer, electromagnetic, sustain, feedback' + publisher: University of Michigan + title: Further Developments in the Electromagnetically Sustained Rhodes Piano + url: http://www.nime.org/proceedings/2012/nime2012_284.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Ferris2002 - abstract: 'The Cardboard Box Garden (CBG) originated from a dissatisfaction with - current computer technology as it is presented to children. This paper shall briefly - review the process involved in the creation of this installation, from motivation - through to design and subsequent implementation and user experience with the CBG. - Through the augmentation of an everyday artefact, namely the standard cardboard - box, a simple yet powerful interactive environment was created that has achieved - its goal of stirring childrens imagination judging from the experience of our - users. ' - address: 'Dublin, Ireland' - author: 'Ferris, Kieran and Bannon, Liam' - bibtex: "@inproceedings{Ferris2002,\n abstract = {The Cardboard Box Garden (CBG)\ - \ originated from a dissatisfaction with current computer technology as it is\ - \ presented to children. This paper shall briefly review the process involved\ - \ in the creation of this installation, from motivation through to design and\ - \ subsequent implementation and user experience with the CBG. Through the augmentation\ - \ of an everyday artefact, namely the standard cardboard box, a simple yet powerful\ - \ interactive environment was created that has achieved its goal of stirring childrens\ - \ imagination judging from the experience of our users. },\n address = {Dublin,\ - \ Ireland},\n author = {Ferris, Kieran and Bannon, Liam},\n booktitle = {Proceedings\ + ID: Wang2012 + abstract: 'We have added a dynamic bio-mechanical mapping layer that contains a + model of the human vocal tract with tongue muscle activations as input and tract + geometry as output to a real time gesture controlled voice synthesizer system + used for musical performance and speech research. Using this mapping layer, we + conducted user studies comparing controlling the model muscle activations using + a 2D set of force sensors with a position controlled kinematic input space that + maps directly to the sound. Preliminary user evaluation suggests that it was more + difficult to using force input but the resultant output sound was more intelligible + and natural compared to the kinematic controller. This result shows that force + input is a potentially feasible for browsing through a vowel space for an articulatory + voice synthesis system, although further evaluation is required.' + address: 'Ann Arbor, Michigan' + author: Johnty Wang and Nicolas d'Alessandro and Sidney Fels and Robert Pritchard + bibtex: "@inproceedings{Wang2012,\n abstract = {We have added a dynamic bio-mechanical\ + \ mapping layer that contains a model of the human vocal tract with tongue muscle\ + \ activations as input and tract geometry as output to a real time gesture controlled\ + \ voice synthesizer system used for musical performance and speech research. Using\ + \ this mapping layer, we conducted user studies comparing controlling the model\ + \ muscle activations using a 2D set of force sensors with a position controlled\ + \ kinematic input space that maps directly to the sound. Preliminary user evaluation\ + \ suggests that it was more difficult to using force input but the resultant output\ + \ sound was more intelligible and natural compared to the kinematic controller.\ + \ This result shows that force input is a potentially feasible for browsing through\ + \ a vowel space for an articulatory voice synthesis system, although further evaluation\ + \ is required.},\n address = {Ann Arbor, Michigan},\n author = {Johnty Wang and\ + \ Nicolas d'Alessandro and Sidney Fels and Robert Pritchard},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176410},\n issn = {2220-4806},\n\ - \ keywords = {Education, play, augmented reality, pervasive computing, disappearing\ - \ computer, assembly, cardboard box},\n pages = {56--58},\n title = {The Musical\ - \ Box Garden},\n url = {http://www.nime.org/proceedings/2002/nime2002_056.pdf},\n\ - \ year = {2002}\n}\n" + \ doi = {10.5281/zenodo.1178447},\n issn = {2220-4806},\n keywords = {Gesture,\ + \ Mapping, Articulatory, Speech, Singing, Synthesis},\n publisher = {University\ + \ of Michigan},\n title = {Investigation of Gesture Controlled Articulatory Vocal\ + \ Synthesizer using a Bio-Mechanical Mapping Layer},\n url = {http://www.nime.org/proceedings/2012/nime2012_291.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176410 + doi: 10.5281/zenodo.1178447 issn: 2220-4806 - keywords: 'Education, play, augmented reality, pervasive computing, disappearing - computer, assembly, cardboard box' - pages: 56--58 - title: The Musical Box Garden - url: http://www.nime.org/proceedings/2002/nime2002_056.pdf - year: 2002 + keywords: 'Gesture, Mapping, Articulatory, Speech, Singing, Synthesis' + publisher: University of Michigan + title: Investigation of Gesture Controlled Articulatory Vocal Synthesizer using + a Bio-Mechanical Mapping Layer + url: http://www.nime.org/proceedings/2012/nime2012_291.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Flety2002 - abstract: 'Research and musical creation with gestural-oriented interfaces have - recently seen a renewal of interest and activity at Ircam [1][2]. In the course - of several musical projects, undertaken by young composers attending the one-year - Course in Composition and Computer Music or by guests artists, Ircam Education - and Creation departments have proposed various solutions for gesture-controlled - sound synthesis and processing. In this article, we describe the technical aspects - of AtoMIC Pro, an Analog to MIDI converter proposed as a re-usable solution for - digitizing several sensors in different contexts such as interactive sound installation - or virtual instruments.The main direction of our researches, and of this one in - particular, is to create tools that can be fully integrated into an artistic project - as a real part of the composition and performance processes.' - address: 'Dublin, Ireland' - author: 'Fléty, Emmanuel' - bibtex: "@inproceedings{Flety2002,\n abstract = {Research and musical creation with\ - \ gestural-oriented interfaces have recently seen a renewal of interest and activity\ - \ at Ircam [1][2]. In the course of several musical projects, undertaken by young\ - \ composers attending the one-year Course in Composition and Computer Music or\ - \ by guests artists, Ircam Education and Creation departments have proposed various\ - \ solutions for gesture-controlled sound synthesis and processing. In this article,\ - \ we describe the technical aspects of AtoMIC Pro, an Analog to MIDI converter\ - \ proposed as a re-usable solution for digitizing several sensors in different\ - \ contexts such as interactive sound installation or virtual instruments.The main\ - \ direction of our researches, and of this one in particular, is to create tools\ - \ that can be fully integrated into an artistic project as a real part of the\ - \ composition and performance processes.},\n address = {Dublin, Ireland},\n author\ - \ = {Fl\\'{e}ty, Emmanuel},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n date = {24-26 May, 2002},\n doi\ - \ = {10.5281/zenodo.1176412},\n issn = {2220-4806},\n keywords = {Gestural controller,\ - \ Sensor, MIDI, Music. Solution for Multi-sensor Acquisition},\n pages = {59--64},\n\ - \ title = {AtoMIC Pro: a Multiple Sensor Acquisition Device},\n url = {http://www.nime.org/proceedings/2002/nime2002_059.pdf},\n\ - \ year = {2002}\n}\n" + ID: Carey2012 + abstract: 'This paper presents the author''s derivations system, an interactive + performance system for solo improvising instrumentalist. The system makes use + of a combination of real-time audio analysis, live sampling and spectral re-synthesis + to build a vocabulary of possible performative responses to live instrumental + input throughout an improvisatory performance. A form of timbral matching is employed + to form a link between the live performer and an expanding database of musical + materials. In addition, the system takes into account the unique nature of the + rehearsal/practice space in musical performance through the implementation of + performer-configurable cumulative rehearsal databases into the final design. This + paper discusses the system in detail with reference to related work in the field, + making specific reference to the system''s interactive potential both inside and + outside of a real-time performance context.' + address: 'Ann Arbor, Michigan' + author: Benjamin Carey + bibtex: "@inproceedings{Carey2012,\n abstract = {This paper presents the author's\ + \ derivations system, an interactive performance system for solo improvising instrumentalist.\ + \ The system makes use of a combination of real-time audio analysis, live sampling\ + \ and spectral re-synthesis to build a vocabulary of possible performative responses\ + \ to live instrumental input throughout an improvisatory performance. A form of\ + \ timbral matching is employed to form a link between the live performer and an\ + \ expanding database of musical materials. In addition, the system takes into\ + \ account the unique nature of the rehearsal/practice space in musical performance\ + \ through the implementation of performer-configurable cumulative rehearsal databases\ + \ into the final design. This paper discusses the system in detail with reference\ + \ to related work in the field, making specific reference to the system's interactive\ + \ potential both inside and outside of a real-time performance context.},\n address\ + \ = {Ann Arbor, Michigan},\n author = {Benjamin Carey},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178227},\n issn = {2220-4806},\n keywords = {Interactivity,\ + \ performance systems, improvisation},\n publisher = {University of Michigan},\n\ + \ title = {Designing for Cumulative Interactivity: The {\\_}derivations System},\n\ + \ url = {http://www.nime.org/proceedings/2012/nime2012_292.pdf},\n year = {2012}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176412 + doi: 10.5281/zenodo.1178227 issn: 2220-4806 - keywords: 'Gestural controller, Sensor, MIDI, Music. Solution for Multi-sensor Acquisition' - pages: 59--64 - title: 'AtoMIC Pro: a Multiple Sensor Acquisition Device' - url: http://www.nime.org/proceedings/2002/nime2002_059.pdf - year: 2002 + keywords: 'Interactivity, performance systems, improvisation' + publisher: University of Michigan + title: 'Designing for Cumulative Interactivity: The _derivations System' + url: http://www.nime.org/proceedings/2012/nime2012_292.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Gadd2002 - abstract: 'We explore the role that metaphor plays in developing expressive devices - by examining the MetaMuse system. MetaMuse is a prop-based system that uses the - metaphor of rainfall to make the process of granular synthesis understandable. - We discuss MetaMuse within a framework we call ''''transparency'''' that can be - used as a predictor of the expressivity of musical devices. Metaphor depends on - a literature,or cultural basis, which forms the basis for making transparent device - mappings. In this context we evaluate the effect of metaphor in the MetaMuse system.' - address: 'Dublin, Ireland' - author: 'Gadd, Ashley and Fels, Sidney S.' - bibtex: "@inproceedings{Gadd2002,\n abstract = {We explore the role that metaphor\ - \ plays in developing expressive devices by examining the MetaMuse system. MetaMuse\ - \ is a prop-based system that uses the metaphor of rainfall to make the process\ - \ of granular synthesis understandable. We discuss MetaMuse within a framework\ - \ we call ''transparency'' that can be used as a predictor of the expressivity\ - \ of musical devices. Metaphor depends on a literature,or cultural basis, which\ - \ forms the basis for making transparent device mappings. In this context we evaluate\ - \ the effect of metaphor in the MetaMuse system.},\n address = {Dublin, Ireland},\n\ - \ author = {Gadd, Ashley and Fels, Sidney S.},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n date =\ - \ {24-26 May, 2002},\n doi = {10.5281/zenodo.1176414},\n issn = {2220-4806},\n\ - \ keywords = {Expressive interface, transparency, metaphor, prop-based controller,\ - \ granular synthesis.},\n pages = {65--70},\n title = {MetaMuse: Metaphors for\ - \ Expressive Instruments},\n url = {http://www.nime.org/proceedings/2002/nime2002_065.pdf},\n\ - \ year = {2002}\n}\n" + ID: Mayton2012 + abstract: 'We present Patchwerk, a networked synthesizer module with tightly coupled + web browser and tangible interfaces. Patchwerk connects to a pre-existing modular + synthesizer using the emerging cross-platform HTML5 WebSocket standard to enable + low-latency, high-bandwidth, concurrent control of analog signals by multiple + users. Online users control physical outputs on a custom-designed cabinet that + reflects their activity through a combination of motorized knobs and LEDs, and + streams the resultant audio. In a typical installation, a composer creates a complex + physical patch on the modular synth that exposes a set of analog and digital parameters + (knobs, buttons, toggles, and triggers) to the web-enabled cabinet. Both physically + present and online audiences can control those parameters, simultane-ously seeing + and hearing the results of each other''s actions. By enabling collaborative interaction + with a massive analog synthesizer, Patchwerk brings a broad audience closer to + a rare and historically important instrument. Patchwerk is available online at + http://synth.media.mit.edu.' + address: 'Ann Arbor, Michigan' + author: Brian Mayton and Gershon Dublon and Nicholas Joliat and Joseph A. Paradiso + bibtex: "@inproceedings{Mayton2012,\n abstract = {We present Patchwerk, a networked\ + \ synthesizer module with tightly coupled web browser and tangible interfaces.\ + \ Patchwerk connects to a pre-existing modular synthesizer using the emerging\ + \ cross-platform HTML5 WebSocket standard to enable low-latency, high-bandwidth,\ + \ concurrent control of analog signals by multiple users. Online users control\ + \ physical outputs on a custom-designed cabinet that reflects their activity through\ + \ a combination of motorized knobs and LEDs, and streams the resultant audio.\ + \ In a typical installation, a composer creates a complex physical patch on the\ + \ modular synth that exposes a set of analog and digital parameters (knobs, buttons,\ + \ toggles, and triggers) to the web-enabled cabinet. Both physically present and\ + \ online audiences can control those parameters, simultane-ously seeing and hearing\ + \ the results of each other's actions. By enabling collaborative interaction with\ + \ a massive analog synthesizer, Patchwerk brings a broad audience closer to a\ + \ rare and historically important instrument. Patchwerk is available online at\ + \ http://synth.media.mit.edu.},\n address = {Ann Arbor, Michigan},\n author =\ + \ {Brian Mayton and Gershon Dublon and Nicholas Joliat and Joseph A. Paradiso},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1178345},\n issn = {2220-4806},\n\ + \ keywords = {Modular synthesizer, HTML5, tangible interface, collaborative musical\ + \ instrument},\n publisher = {University of Michigan},\n title = {Patchwork: Multi-User\ + \ Network Control of a Massive Modular Synthesizer},\n url = {http://www.nime.org/proceedings/2012/nime2012_293.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176414 + doi: 10.5281/zenodo.1178345 issn: 2220-4806 - keywords: 'Expressive interface, transparency, metaphor, prop-based controller, - granular synthesis.' - pages: 65--70 - title: 'MetaMuse: Metaphors for Expressive Instruments' - url: http://www.nime.org/proceedings/2002/nime2002_065.pdf - year: 2002 + keywords: 'Modular synthesizer, HTML5, tangible interface, collaborative musical + instrument' + publisher: University of Michigan + title: 'Patchwork: Multi-User Network Control of a Massive Modular Synthesizer' + url: http://www.nime.org/proceedings/2012/nime2012_293.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Griffith2002 - abstract: 'The use of free gesture in making music has usually been confined to - instruments that use direct mappings between movement and sound space. Here we - demonstrate the use of categories of gesture as the basis of musical learning - and performance collaboration. These are used in a system that reinterprets the - approach to learning through performance that is found in many musical cultures - and discussed here through the example of Kpelle music. ' - address: 'Dublin, Ireland' - author: 'Griffith, Niall J. and O''Leary, Sean and O''Shea, Donagh and Hammond, - Ed and O''Modhrain, Sile' - bibtex: "@inproceedings{Griffith2002,\n abstract = {The use of free gesture in making\ - \ music has usually been confined to instruments that use direct mappings between\ - \ movement and sound space. Here we demonstrate the use of categories of gesture\ - \ as the basis of musical learning and performance collaboration. These are used\ - \ in a system that reinterprets the approach to learning through performance that\ - \ is found in many musical cultures and discussed here through the example of\ - \ Kpelle music. },\n address = {Dublin, Ireland},\n author = {Griffith, Niall\ - \ J. and O'Leary, Sean and O'Shea, Donagh and Hammond, Ed and O'Modhrain, Sile},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176416},\n\ - \ issn = {2220-4806},\n keywords = {Collaboration, Performance, Metaphor, Gesture},\n\ - \ pages = {71--72},\n title = {Circles and Seeds: Adapting Kpelle Ideas about\ - \ Music Performance for Collaborative Digital Music performance},\n url = {http://www.nime.org/proceedings/2002/nime2002_071.pdf},\n\ - \ year = {2002}\n}\n" + ID: Trail2012a + abstract: 'Hyper-instruments extend traditional acoustic instruments with sensing + technologies that capture digitally subtle and sophisticated aspects of human + performance. They leverage the long training and skills of performers while simultaneously + providing rich possibilities for digital control. Many existing hyper-instruments + suffer from being one of a kind instruments that require invasive modifications + to the underlying acoustic instrument. In this paper we focus on the pitched percussion + family and describe a non-invasive sensing approach for extending them to hyper-instruments. + Our primary concern is to retain the technical integrity of the acoustic instrument + and sound production methods while being able to intuitively interface the computer. + This is accomplished by utilizing the Kinect sensor to track the position of the + mallets without any modification to the instrument which enables easy and cheap + replication of the pro-posed hyper-instrument extensions. In addition we describe + two approaches to higher-level gesture control that remove the need for additional + control devices such as foot pedals and fader boxes that are frequently used in + electro-acoustic performance. This gesture control integrates more organically + with the natural flow of playing the instrument providing user selectable control + over filter parameters, synthesis, sampling, sequencing, and improvisation using + a commer-cially available low-cost sensing apparatus.' + address: 'Ann Arbor, Michigan' + author: Shawn Trail and Michael Dean and Gabrielle Odowichuk and Tiago Fernandes + Tavares and Peter Driessen and W. Andrew Schloss and George Tzanetakis + bibtex: "@inproceedings{Trail2012a,\n abstract = {Hyper-instruments extend traditional\ + \ acoustic instruments with sensing technologies that capture digitally subtle\ + \ and sophisticated aspects of human performance. They leverage the long training\ + \ and skills of performers while simultaneously providing rich possibilities for\ + \ digital control. Many existing hyper-instruments suffer from being one of a\ + \ kind instruments that require invasive modifications to the underlying acoustic\ + \ instrument. In this paper we focus on the pitched percussion family and describe\ + \ a non-invasive sensing approach for extending them to hyper-instruments. Our\ + \ primary concern is to retain the technical integrity of the acoustic instrument\ + \ and sound production methods while being able to intuitively interface the computer.\ + \ This is accomplished by utilizing the Kinect sensor to track the position of\ + \ the mallets without any modification to the instrument which enables easy and\ + \ cheap replication of the pro-posed hyper-instrument extensions. In addition\ + \ we describe two approaches to higher-level gesture control that remove the need\ + \ for additional control devices such as foot pedals and fader boxes that are\ + \ frequently used in electro-acoustic performance. This gesture control integrates\ + \ more organically with the natural flow of playing the instrument providing user\ + \ selectable control over filter parameters, synthesis, sampling, sequencing,\ + \ and improvisation using a commer-cially available low-cost sensing apparatus.},\n\ + \ address = {Ann Arbor, Michigan},\n author = {Shawn Trail and Michael Dean and\ + \ Gabrielle Odowichuk and Tiago Fernandes Tavares and Peter Driessen and W. Andrew\ + \ Schloss and George Tzanetakis},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178435},\n\ + \ issn = {2220-4806},\n publisher = {University of Michigan},\n title = {Non-invasive\ + \ sensing and gesture control for pitched percussion hyper-instruments using the\ + \ Kinect},\n url = {http://www.nime.org/proceedings/2012/nime2012_297.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176416 + doi: 10.5281/zenodo.1178435 issn: 2220-4806 - keywords: 'Collaboration, Performance, Metaphor, Gesture' - pages: 71--72 - title: 'Circles and Seeds: Adapting Kpelle Ideas about Music Performance for Collaborative - Digital Music performance' - url: http://www.nime.org/proceedings/2002/nime2002_071.pdf - year: 2002 + publisher: University of Michigan + title: Non-invasive sensing and gesture control for pitched percussion hyper-instruments + using the Kinect + url: http://www.nime.org/proceedings/2012/nime2012_297.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Gunther2002 - abstract: 'This paper presents a novel coupling of haptics technology and music, - introducing the notion of tactile composition or aesthetic composition for the - sense of touch. A system that facilitates the composition and perception of intricate, - musically structured spatio-temporal patterns of vibration on the surface of the - body is described. An initial test of the system in a performance context is discussed. - The fundamental building blocks of a compositional language for touch are considered. ' - address: 'Dublin, Ireland' - author: 'Gunther, Eric and Davenport, Glorianna and O''Modhrain, Sile' - bibtex: "@inproceedings{Gunther2002,\n abstract = {This paper presents a novel coupling\ - \ of haptics technology and music, introducing the notion of tactile composition\ - \ or aesthetic composition for the sense of touch. A system that facilitates the\ - \ composition and perception of intricate, musically structured spatio-temporal\ - \ patterns of vibration on the surface of the body is described. An initial test\ - \ of the system in a performance context is discussed. The fundamental building\ - \ blocks of a compositional language for touch are considered. },\n address =\ - \ {Dublin, Ireland},\n author = {Gunther, Eric and Davenport, Glorianna and O'Modhrain,\ - \ Sile},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176418},\n\ - \ issn = {2220-4806},\n keywords = {multi-modal,music,tactile composition,vibrotactile},\n\ - \ pages = {73--79},\n title = {Cutaneous Grooves: Composing for the Sense of Touch},\n\ - \ url = {http://www.nime.org/proceedings/2002/nime2002_073.pdf},\n year = {2002}\n\ + ID: Fyfe2012 + abstract: 'Message mapping between control interfaces and sound engines is an important + task that could benefit from tools that streamline development. A new Open Sound + Control (OSC) namespace called Nexus Data Exchange Format (NDEF) streamlines message + mapping by offering developers the ability to manage sound engines as network + nodes and to query those nodes for the messages in their OSC address spaces. By + using NDEF, developers will have an eas-ier time managing nodes and their messages, + especially for scenarios in which a single application or interface controls multiple + sound engines. NDEF is currently implemented in the JunctionBox interaction toolkit + but could easily be implemented in other toolkits.' + address: 'Ann Arbor, Michigan' + author: Lawrence Fyfe and Adam Tindale and Sheelagh Carpendale + bibtex: "@inproceedings{Fyfe2012,\n abstract = {Message mapping between control\ + \ interfaces and sound engines is an important task that could benefit from tools\ + \ that streamline development. A new Open Sound Control (OSC) namespace called\ + \ Nexus Data Exchange Format (NDEF) streamlines message mapping by offering developers\ + \ the ability to manage sound engines as network nodes and to query those nodes\ + \ for the messages in their OSC address spaces. By using NDEF, developers will\ + \ have an eas-ier time managing nodes and their messages, especially for scenarios\ + \ in which a single application or interface controls multiple sound engines.\ + \ NDEF is currently implemented in the JunctionBox interaction toolkit but could\ + \ easily be implemented in other toolkits.},\n address = {Ann Arbor, Michigan},\n\ + \ author = {Lawrence Fyfe and Adam Tindale and Sheelagh Carpendale},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1178259},\n issn = {2220-4806},\n keywords\ + \ = {OSC, namespace, interaction, node},\n publisher = {University of Michigan},\n\ + \ title = {Node and Message Management with the JunctionBox Interaction Toolkit},\n\ + \ url = {http://www.nime.org/proceedings/2012/nime2012_299.pdf},\n year = {2012}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176418 + doi: 10.5281/zenodo.1178259 issn: 2220-4806 - keywords: 'multi-modal,music,tactile composition,vibrotactile' - pages: 73--79 - title: 'Cutaneous Grooves: Composing for the Sense of Touch' - url: http://www.nime.org/proceedings/2002/nime2002_073.pdf - year: 2002 + keywords: 'OSC, namespace, interaction, node' + publisher: University of Michigan + title: Node and Message Management with the JunctionBox Interaction Toolkit + url: http://www.nime.org/proceedings/2012/nime2012_299.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Hankins2002 - abstract: 'The Circular Optical Object Locator is a collaborative and cooperative - music-making device. It uses an inexpensive digital video camera to observe a - rotating platter. Opaque objects placed on the platter are detected by the camera - during rotation. The locations of the objects passing under the camera are used - to generate music. ' - address: 'Dublin, Ireland' - author: 'Hankins, Tim and Merrill, David and Robert, Jocelyn' - bibtex: "@inproceedings{Hankins2002,\n abstract = {The Circular Optical Object Locator\ - \ is a collaborative and cooperative music-making device. It uses an inexpensive\ - \ digital video camera to observe a rotating platter. Opaque objects placed on\ - \ the platter are detected by the camera during rotation. The locations of the\ - \ objects passing under the camera are used to generate music. },\n address =\ - \ {Dublin, Ireland},\n author = {Hankins, Tim and Merrill, David and Robert, Jocelyn},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176420},\n\ - \ issn = {2220-4806},\n keywords = {Input devices, music controllers, collaborative,\ - \ real-time score manipulation.},\n pages = {80--81},\n title = {Circular Optical\ - \ Object Locator},\n url = {http://www.nime.org/proceedings/2002/nime2002_080.pdf},\n\ - \ year = {2002}\n}\n" + ID: Castet2012 + abstract: 'This paper presents ongoing work on methods dedicated torelations between + composers and performers in the contextof experimental music. The computer music + community hasover the last decade paid a strong interest on various kindsof gestural + interfaces to control sound synthesis processes.The mapping between gesture and + sound parameters hasspecially been investigated in order to design the most relevant + schemes of sonic interaction. In fact, this relevanceresults in an aesthetic choice + that encroaches on the process of composition. This work proposes to examine therelations + between composers and performers in the contextof the new interfaces for musical + expression. It aims to define a theoretical and methodological framework clarifyingthese + relations. In this project, this paper is the first experimental study about the + use of physical models as gesturalmaps for the production of textural sounds.' + address: 'Ann Arbor, Michigan' + author: Julien Castet + bibtex: "@inproceedings{Castet2012,\n abstract = {This paper presents ongoing work\ + \ on methods dedicated torelations between composers and performers in the contextof\ + \ experimental music. The computer music community hasover the last decade paid\ + \ a strong interest on various kindsof gestural interfaces to control sound synthesis\ + \ processes.The mapping between gesture and sound parameters hasspecially been\ + \ investigated in order to design the most relevant schemes of sonic interaction.\ + \ In fact, this relevanceresults in an aesthetic choice that encroaches on the\ + \ process of composition. This work proposes to examine therelations between composers\ + \ and performers in the contextof the new interfaces for musical expression. It\ + \ aims to define a theoretical and methodological framework clarifyingthese relations.\ + \ In this project, this paper is the first experimental study about the use of\ + \ physical models as gesturalmaps for the production of textural sounds.},\n address\ + \ = {Ann Arbor, Michigan},\n author = {Julien Castet},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178231},\n issn = {2220-4806},\n keywords = {Simulation,\ + \ Interaction, Sonic textures},\n publisher = {University of Michigan},\n title\ + \ = {Performing experimental music by physical simulation},\n url = {http://www.nime.org/proceedings/2012/nime2012_30.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176420 + doi: 10.5281/zenodo.1178231 issn: 2220-4806 - keywords: 'Input devices, music controllers, collaborative, real-time score manipulation.' - pages: 80--81 - title: Circular Optical Object Locator - url: http://www.nime.org/proceedings/2002/nime2002_080.pdf - year: 2002 + keywords: 'Simulation, Interaction, Sonic textures' + publisher: University of Michigan + title: Performing experimental music by physical simulation + url: http://www.nime.org/proceedings/2012/nime2012_30.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Hasan2002 - abstract: 'We have created a new electronic musical instrument, referred to as the - Termenova (Russian for "daughter of Theremin") that combines a free-gesture capacitive - sensing device with an optical sensing system that detects the reflection of a - hand when it intersects a beam of an array of red lasers. The laser beams, which - are made visible by a thin layer of theatrical mist, provide visual feedback and - guidance to the performer to alleviate the difficulties of using a non-contact - interface as well as adding an interesting component for the audience to observe. - The system uses capacitive sensing to detect the proximity of the player''s hands; - this distance is mapped to pitch, volume, or other continuous effect. The laser - guide positions are calibrated before play with position controlled servo motors - interfaced to a main controller board; the location of each beam corresponds to - the position where the performer should move his or her hand to achieve a pre-specified - pitch and/or effect. The optical system senses the distance of the player''s hands - from the source of each laser beam, providing an additional dimension of musical - control. ' - address: 'Dublin, Ireland' - author: 'Hasan, Leila and Yu, Nicholas and Paradiso, Joseph A.' - bibtex: "@inproceedings{Hasan2002,\n abstract = {We have created a new electronic\ - \ musical instrument, referred to as the Termenova (Russian for \"daughter of\ - \ Theremin\") that combines a free-gesture capacitive sensing device with an optical\ - \ sensing system that detects the reflection of a hand when it intersects a beam\ - \ of an array of red lasers. The laser beams, which are made visible by a thin\ - \ layer of theatrical mist, provide visual feedback and guidance to the performer\ - \ to alleviate the difficulties of using a non-contact interface as well as adding\ - \ an interesting component for the audience to observe. The system uses capacitive\ - \ sensing to detect the proximity of the player's hands; this distance is mapped\ - \ to pitch, volume, or other continuous effect. The laser guide positions are\ - \ calibrated before play with position controlled servo motors interfaced to a\ - \ main controller board; the location of each beam corresponds to the position\ - \ where the performer should move his or her hand to achieve a pre-specified pitch\ - \ and/or effect. The optical system senses the distance of the player's hands\ - \ from the source of each laser beam, providing an additional dimension of musical\ - \ control. },\n address = {Dublin, Ireland},\n author = {Hasan, Leila and Yu,\ - \ Nicholas and Paradiso, Joseph A.},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n date = {24-26 May, 2002},\n\ - \ doi = {10.5281/zenodo.1176422},\n issn = {2220-4806},\n keywords = {Theremin,\ - \ gesture interface, capacitive sensing, laser harp, optical proximity sensing,\ - \ servo control, musical controller},\n pages = {82--87},\n title = {The Termenova\ - \ : A Hybrid Free-Gesture Interface},\n url = {http://www.nime.org/proceedings/2002/nime2002_082.pdf},\n\ - \ year = {2002}\n}\n" + ID: Allison2012 + abstract: "Aural -of or relateing to the ear or hearing\nAura -an invisible breath,\ + \ emanation, or radiation AR -Augmented Reality\nAuRal is an environmental audio\ + \ system in which individual participants form ad hoc ensembles based on geolocation\ + \ and affect the overall sound of the music associated with the location that\ + \ they are in.\nThe AuRal environment binds physical location and the choices\ + \ of multiple, simultaneous performers to act as the generative force of music\ + \ tied to the region. Through a mobile device interface, musical participants,\ + \ or agents, have a degree of input into the generated music essentially defining\ + \ the sound of a given region. The audio landscape is superimposed onto the physical\ + \ one. The resultant musical experience is not tied simply to the passage of time,\ + \ but through the incorporation of participants over time and spatial proximity,\ + \ it becomes an aural location as much as a piece of music. As a result, walking\ + \ through the same location at different times results in unique collaborative\ + \ listening experiences." + address: 'Ann Arbor, Michigan' + author: Jesse Allison and Christian Dell + bibtex: "@inproceedings{Allison2012,\n abstract = {Aural -of or relateing to the\ + \ ear or hearing\nAura -an invisible breath, emanation, or radiation AR -Augmented\ + \ Reality\nAuRal is an environmental audio system in which individual participants\ + \ form ad hoc ensembles based on geolocation and affect the overall sound of the\ + \ music associated with the location that they are in.\nThe AuRal environment\ + \ binds physical location and the choices of multiple, simultaneous performers\ + \ to act as the generative force of music tied to the region. Through a mobile\ + \ device interface, musical participants, or agents, have a degree of input into\ + \ the generated music essentially defining the sound of a given region. The audio\ + \ landscape is superimposed onto the physical one. The resultant musical experience\ + \ is not tied simply to the passage of time, but through the incorporation of\ + \ participants over time and spatial proximity, it becomes an aural location as\ + \ much as a piece of music. As a result, walking through the same location at\ + \ different times results in unique collaborative listening experiences.},\n address\ + \ = {Ann Arbor, Michigan},\n author = {Jesse Allison and Christian Dell},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1178203},\n issn = {2220-4806},\n keywords\ + \ = {AuRal, sonic environment, distributed performance system, mobile music, android,\ + \ ruby on rails, supercollider},\n publisher = {University of Michigan},\n title\ + \ = {AuRal: A Mobile Interactive System for Geo-Locative Audio Synthesis},\n url\ + \ = {http://www.nime.org/proceedings/2012/nime2012_301.pdf},\n year = {2012}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176422 + doi: 10.5281/zenodo.1178203 issn: 2220-4806 - keywords: 'Theremin, gesture interface, capacitive sensing, laser harp, optical - proximity sensing, servo control, musical controller' - pages: 82--87 - title: 'The Termenova : A Hybrid Free-Gesture Interface' - url: http://www.nime.org/proceedings/2002/nime2002_082.pdf - year: 2002 + keywords: 'AuRal, sonic environment, distributed performance system, mobile music, + android, ruby on rails, supercollider' + publisher: University of Michigan + title: 'AuRal: A Mobile Interactive System for Geo-Locative Audio Synthesis' + url: http://www.nime.org/proceedings/2012/nime2012_301.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Hunt2002 - abstract: 'In this paper we challenge the assumption that an electronic instrument - consists solely of an interface and a sound generator. We emphasise the importance - of the mapping between input parameters and system parameters, and claim that - this can define the very essence of an instrument.' - address: 'Dublin, Ireland' - author: 'Hunt, Andy D. and Wanderley, Marcelo M. and Paradis, Matthew' - bibtex: "@inproceedings{Hunt2002,\n abstract = {In this paper we challenge the assumption\ - \ that an electronic instrument consists solely of an interface and a sound generator.\ - \ We emphasise the importance of the mapping between input parameters and system\ - \ parameters, and claim that this can define the very essence of an instrument.},\n\ - \ address = {Dublin, Ireland},\n author = {Hunt, Andy D. and Wanderley, Marcelo\ - \ M. and Paradis, Matthew},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n date = {24-26 May, 2002},\n doi\ - \ = {10.5281/zenodo.1176424},\n issn = {2220-4806},\n keywords = {electronic musical\ - \ instruments,human-computer interaction,mapping strategies},\n pages = {88--93},\n\ - \ title = {The importance of Parameter Mapping in Electronic Instrument Design},\n\ - \ url = {http://www.nime.org/proceedings/2002/nime2002_088.pdf},\n year = {2002}\n\ - }\n" + ID: Roberts2012 + abstract: 'Designing mobile interfaces for computer-based musical performance is + generally a time-consuming task that can be exasperating for performers. Instead + of being able to experiment freely with physical interfaces'' affordances, performers + must spend time and attention on non-musical tasks including network configuration, + development environments for the mobile devices, defining OSC address spaces, + and handling the receipt of OSC in the environment that will control and produce + sound. Our research seeks to overcome such obstacles by minimizing the code needed + to both generate and read the output of interfaces on mobile devices. For iOS + and Android devices, our implementation extends the application Control to use + a simple set of OSC messages to define interfaces and automatically route output. + On the desktop, our implementations in Max/MSP/Jitter, LuaAV, and Su-perCollider + allow users to create mobile widgets mapped to sonic parameters with a single + line of code. We believe the fluidity of our approach will encourage users to + incorporate mobile devices into their everyday performance practice.' + address: 'Ann Arbor, Michigan' + author: Charles Roberts and Graham Wakefield and Matt Wright + bibtex: "@inproceedings{Roberts2012,\n abstract = {Designing mobile interfaces for\ + \ computer-based musical performance is generally a time-consuming task that can\ + \ be exasperating for performers. Instead of being able to experiment freely with\ + \ physical interfaces' affordances, performers must spend time and attention on\ + \ non-musical tasks including network configuration, development environments\ + \ for the mobile devices, defining OSC address spaces, and handling the receipt\ + \ of OSC in the environment that will control and produce sound. Our research\ + \ seeks to overcome such obstacles by minimizing the code needed to both generate\ + \ and read the output of interfaces on mobile devices. For iOS and Android devices,\ + \ our implementation extends the application Control to use a simple set of OSC\ + \ messages to define interfaces and automatically route output. On the desktop,\ + \ our implementations in Max/MSP/Jitter, LuaAV, and Su-perCollider allow users\ + \ to create mobile widgets mapped to sonic parameters with a single line of code.\ + \ We believe the fluidity of our approach will encourage users to incorporate\ + \ mobile devices into their everyday performance practice.},\n address = {Ann\ + \ Arbor, Michigan},\n author = {Charles Roberts and Graham Wakefield and Matt\ + \ Wright},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1180581},\n issn = {2220-4806},\n\ + \ keywords = {NIME, OSC, Zeroconf, iOS, Android, Max/MSP/Jitter, LuaAV, SuperCollider,\ + \ Mobile},\n publisher = {University of Michigan},\n title = {Mobile Controls\ + \ On-The-Fly: An Abstraction for Distributed {NIME}s},\n url = {http://www.nime.org/proceedings/2012/nime2012_303.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176424 + doi: 10.5281/zenodo.1180581 issn: 2220-4806 - keywords: 'electronic musical instruments,human-computer interaction,mapping strategies' - pages: 88--93 - title: The importance of Parameter Mapping in Electronic Instrument Design - url: http://www.nime.org/proceedings/2002/nime2002_088.pdf - year: 2002 + keywords: 'NIME, OSC, Zeroconf, iOS, Android, Max/MSP/Jitter, LuaAV, SuperCollider, + Mobile' + publisher: University of Michigan + title: 'Mobile Controls On-The-Fly: An Abstraction for Distributed NIMEs' + url: http://www.nime.org/proceedings/2012/nime2012_303.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Huott2002 - abstract: 'This paper is a design report on a prototype musical controller based - on fiberoptic sensing pads from Tactex Controls [8]. It will discuss elements - of form factor, technical design, and tuning/sound generation systems tested while - building the device I have dubbed ''the Ski''. The goal is the creation of a fine - musical instrument with which a skilled performer can play music from standard - repertoire as well as break sonic ground in modern forms.' - address: 'Dublin, Ireland' - author: 'Huott, Robert' - bibtex: "@inproceedings{Huott2002,\n abstract = {This paper is a design report on\ - \ a prototype musical controller based on fiberoptic sensing pads from Tactex\ - \ Controls [8]. It will discuss elements of form factor, technical design, and\ - \ tuning/sound generation systems tested while building the device I have dubbed\ - \ 'the Ski'. The goal is the creation of a fine musical instrument with which\ - \ a skilled performer can play music from standard repertoire as well as break\ - \ sonic ground in modern forms.},\n address = {Dublin, Ireland},\n author = {Huott,\ - \ Robert},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176428},\n\ - \ issn = {2220-4806},\n keywords = {musical controller, Tactex, tactile interface,\ - \ tuning systems},\n pages = {94--98},\n title = {An Interface for Precise Musical\ - \ Control},\n url = {http://www.nime.org/proceedings/2002/nime2002_094.pdf},\n\ - \ year = {2002}\n}\n" + ID: Harriman2012 + abstract: 'This paper provides an overview of a new method for approaching beat + sequencing. As we have come to know them drum machines provide means to loop rhythmic + patterns over a certain interval. Usually with the option to specify different + beat divisions. What I developed and propose for consideration is a rethinking + of the traditional drum machine confines. The Sinkapater is an untethered beat + sequencer in that the beat division, and the loop length can be arbitrarily modified + for each track. The result is the capability to create complex syncopated patterns + which evolve over time as different tracks follow their own loop rate. To keep + cohesion all channels can be locked to a master channel forcing a loop to be an + integer number of "Master Beats". Further a visualization mode enables exploring + the patterns in another new way. Using synchronized OpenGL a 3-Dimensional environment + visualizes the beats as droplets falling from faucets of varying heights determined + by the loop length. Waves form in the bottom as beats splash into the virtual + "sink". By combining compelling visuals and a new approach to sequencing a new + way of exploring beats and experiencing music has been created.' + address: 'Ann Arbor, Michigan' + author: Jiffer Harriman + bibtex: "@inproceedings{Harriman2012,\n abstract = {This paper provides an overview\ + \ of a new method for approaching beat sequencing. As we have come to know them\ + \ drum machines provide means to loop rhythmic patterns over a certain interval.\ + \ Usually with the option to specify different beat divisions. What I developed\ + \ and propose for consideration is a rethinking of the traditional drum machine\ + \ confines. The Sinkapater is an untethered beat sequencer in that the beat division,\ + \ and the loop length can be arbitrarily modified for each track. The result is\ + \ the capability to create complex syncopated patterns which evolve over time\ + \ as different tracks follow their own loop rate. To keep cohesion all channels\ + \ can be locked to a master channel forcing a loop to be an integer number of\ + \ \"Master Beats\". Further a visualization mode enables exploring the patterns\ + \ in another new way. Using synchronized OpenGL a 3-Dimensional environment visualizes\ + \ the beats as droplets falling from faucets of varying heights determined by\ + \ the loop length. Waves form in the bottom as beats splash into the virtual \"\ + sink\". By combining compelling visuals and a new approach to sequencing a new\ + \ way of exploring beats and experiencing music has been created.},\n address\ + \ = {Ann Arbor, Michigan},\n author = {Jiffer Harriman},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178277},\n issn = {2220-4806},\n keywords = {NIME, proceedings,\ + \ drum machine, sequencer, visualization},\n publisher = {University of Michigan},\n\ + \ title = {Sinkapater -An Untethered Beat Sequencer},\n url = {http://www.nime.org/proceedings/2012/nime2012_308.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176428 + doi: 10.5281/zenodo.1178277 + issn: 2220-4806 + keywords: 'NIME, proceedings, drum machine, sequencer, visualization' + publisher: University of Michigan + title: Sinkapater -An Untethered Beat Sequencer + url: http://www.nime.org/proceedings/2012/nime2012_308.pdf + year: 2012 + + +- ENTRYTYPE: inproceedings + ID: Lee2012c + abstract: "This research aims to improve the correspondence between music and dance,\ + \ and explores the use of human respiration pattern for musical applications with\ + \ focus on the motional aspect of breathing. While respiration is frequently considered\ + \ as an indicator of the metabolic state of human body that contains meaningful\ + \ information for medicine or psychology, motional aspect of respiration has been\ + \ relatively unnoticed in spite of its strong correlation with muscles and the\ + \ brain.\nThis paper introduces an interactive system to control music playback\ + \ for dance performances based on the respiration pattern of the dancer. A wireless\ + \ wearable sensor device detects the dancer's respiration, which is then utilized\ + \ to modify the dynamic of music. Two different respiration-dynamic mappings were\ + \ designed and evaluated through public performances and private tests by professional\ + \ choreographers. Results from this research suggest a new conceptual approach\ + \ to musical applications of respiration based on the technical characteristics\ + \ of music and dance." + address: 'Ann Arbor, Michigan' + author: Jeong-seob Lee and Woon Seung Yeo + bibtex: "@inproceedings{Lee2012c,\n abstract = {This research aims to improve the\ + \ correspondence between music and dance, and explores the use of human respiration\ + \ pattern for musical applications with focus on the motional aspect of breathing.\ + \ While respiration is frequently considered as an indicator of the metabolic\ + \ state of human body that contains meaningful information for medicine or psychology,\ + \ motional aspect of respiration has been relatively unnoticed in spite of its\ + \ strong correlation with muscles and the brain.\nThis paper introduces an interactive\ + \ system to control music playback for dance performances based on the respiration\ + \ pattern of the dancer. A wireless wearable sensor device detects the dancer's\ + \ respiration, which is then utilized to modify the dynamic of music. Two different\ + \ respiration-dynamic mappings were designed and evaluated through public performances\ + \ and private tests by professional choreographers. Results from this research\ + \ suggest a new conceptual approach to musical applications of respiration based\ + \ on the technical characteristics of music and dance.},\n address = {Ann Arbor,\ + \ Michigan},\n author = {Jeong-seob Lee and Woon Seung Yeo},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178311},\n issn = {2220-4806},\n keywords = {Music, dance,\ + \ respiration, correspondence, wireless interface, interactive performance},\n\ + \ publisher = {University of Michigan},\n title = {Real-time Modification of Music\ + \ with Dancer's Respiration Pattern},\n url = {http://www.nime.org/proceedings/2012/nime2012_309.pdf},\n\ + \ year = {2012}\n}\n" + booktitle: Proceedings of the International Conference on New Interfaces for Musical + Expression + doi: 10.5281/zenodo.1178311 issn: 2220-4806 - keywords: 'musical controller, Tactex, tactile interface, tuning systems' - pages: 94--98 - title: An Interface for Precise Musical Control - url: http://www.nime.org/proceedings/2002/nime2002_094.pdf - year: 2002 + keywords: 'Music, dance, respiration, correspondence, wireless interface, interactive + performance' + publisher: University of Michigan + title: Real-time Modification of Music with Dancer's Respiration Pattern + url: http://www.nime.org/proceedings/2012/nime2012_309.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Magnusson2002 - abstract: 'We are interested in exhibiting our programs at your demo section at - the conference. We believe that the subject of your conference is precisely what - we are experimenting with in our musical software. ' - address: 'Dublin, Ireland' - author: 'Magnusson, Thor' - bibtex: "@inproceedings{Magnusson2002,\n abstract = {We are interested in exhibiting\ - \ our programs at your demo section at the conference. We believe that the subject\ - \ of your conference is precisely what we are experimenting with in our musical\ - \ software. },\n address = {Dublin, Ireland},\n author = {Magnusson, Thor},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176384},\n\ - \ issn = {2220-4806},\n keywords = {Further info on our website http//www.ixi-software.net.},\n\ - \ pages = {101--101},\n title = {IXI software},\n url = {http://www.nime.org/proceedings/2002/nime2002_101.pdf},\n\ - \ year = {2002}\n}\n" + ID: dAlessandro2012 + abstract: 'We present the integration of two musical interfaces into a new music-making + system that seeks to capture the expe-rience of a choir and bring it into the + mobile space. This system relies on three pervasive technologies that each support + a different part of the musical experience. First, the mobile device application + for performing with an artificial voice, called ChoirMob. Then, a central composing + and conducting application running on a local interactive display, called Vuzik. + Finally, a network protocol to synchronize the two. ChoirMob musicians can perform + music together at any location where they can connect to a Vuzik central conducting + device displaying a composed piece of music. We explored this system by creating + a chamber choir of ChoirMob performers, consisting of both experienced musicians + and novices, that performed in rehearsals and live concert scenarios with music + composed using the Vuzik interface.' + address: 'Ann Arbor, Michigan' + author: Nicolas d'Alessandro and Aura Pon and Johnty Wang and David Eagle and Ehud + Sharlin and Sidney Fels + bibtex: "@inproceedings{dAlessandro2012,\n abstract = {We present the integration\ + \ of two musical interfaces into a new music-making system that seeks to capture\ + \ the expe-rience of a choir and bring it into the mobile space. This system relies\ + \ on three pervasive technologies that each support a different part of the musical\ + \ experience. First, the mobile device application for performing with an artificial\ + \ voice, called ChoirMob. Then, a central composing and conducting application\ + \ running on a local interactive display, called Vuzik. Finally, a network protocol\ + \ to synchronize the two. ChoirMob musicians can perform music together at any\ + \ location where they can connect to a Vuzik central conducting device displaying\ + \ a composed piece of music. We explored this system by creating a chamber choir\ + \ of ChoirMob performers, consisting of both experienced musicians and novices,\ + \ that performed in rehearsals and live concert scenarios with music composed\ + \ using the Vuzik interface.},\n address = {Ann Arbor, Michigan},\n author = {Nicolas\ + \ d'Alessandro and Aura Pon and Johnty Wang and David Eagle and Ehud Sharlin and\ + \ Sidney Fels},\n booktitle = {Proceedings of the International Conference on\ + \ New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178199},\n\ + \ issn = {2220-4806},\n keywords = {singing synthesis, mobile music, interactive\ + \ display, interface design, OSC, ChoirMob, Vuzik, social music, choir},\n publisher\ + \ = {University of Michigan},\n title = {A Digital Mobile Choir: Joining Two Interfaces\ + \ towards Composing and Performing Collaborative Mobile Music},\n url = {http://www.nime.org/proceedings/2012/nime2012_310.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176384 + doi: 10.5281/zenodo.1178199 issn: 2220-4806 - keywords: Further info on our website http//www.ixi-software.net. - pages: 101--101 - title: IXI software - url: http://www.nime.org/proceedings/2002/nime2002_101.pdf - year: 2002 + keywords: 'singing synthesis, mobile music, interactive display, interface design, + OSC, ChoirMob, Vuzik, social music, choir' + publisher: University of Michigan + title: 'A Digital Mobile Choir: Joining Two Interfaces towards Composing and Performing + Collaborative Mobile Music' + url: http://www.nime.org/proceedings/2012/nime2012_310.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Jorda2002 - abstract: 'In this paper we present Afasia, an interactive multimedia performance - based in Homer''s Odyssey [2]. Afasia is a one-man digital theater play in which - a lone performer fitted with a sensor-suit conducts, like Homer, the whole show - by himself, controlling 2D animations, DVD video and conducting the music mechanically - performed by a robot quartet. After contextualizing the piece, all of its technical - elements, starting with the hardware input and output components, are described. - A special emphasis is given to the interactivity strategies and the subsequent - software design. Since its first version premiered in Barcelona in 1998, Afasia - has been performed in many European and American countries and has received several - international awards. ' - address: 'Dublin, Ireland' - author: 'Jordà, Sergi' - bibtex: "@inproceedings{Jorda2002,\n abstract = {In this paper we present Afasia,\ - \ an interactive multimedia performance based in Homer's Odyssey [2]. Afasia is\ - \ a one-man digital theater play in which a lone performer fitted with a sensor-suit\ - \ conducts, like Homer, the whole show by himself, controlling 2D animations,\ - \ DVD video and conducting the music mechanically performed by a robot quartet.\ - \ After contextualizing the piece, all of its technical elements, starting with\ - \ the hardware input and output components, are described. A special emphasis\ - \ is given to the interactivity strategies and the subsequent software design.\ - \ Since its first version premiered in Barcelona in 1998, Afasia has been performed\ - \ in many European and American countries and has received several international\ - \ awards. },\n address = {Dublin, Ireland},\n author = {Jord\\`{a}, Sergi},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176432},\n\ - \ issn = {2220-4806},\n keywords = {Multimedia interaction, musical robots, real-time\ - \ musical systems.},\n pages = {102--107},\n title = {Afasia: the Ultimate Homeric\ - \ One-man-multimedia-band},\n url = {http://www.nime.org/proceedings/2002/nime2002_102.pdf},\n\ - \ year = {2002}\n}\n" + ID: Bukvic2012 + abstract: 'In the following paper we propose a new tiered granularity approach to + developing modules or abstractions in the Pd-L2Ork visual multimedia programming + environment with the specific goal of devising creative environments that scale + their educational scope and difficulty to encompass several stages within the + context of primary and secondary (K-12) education. As part of a preliminary study, + the team designed modules targeting 4th and 5th grade students, the primary focus + being exploration of creativity and collaborative learning. The resulting environment + infrastructure -coupled with the Boys & Girls Club of Southwest Virginia Satellite + Linux Laptop Orchestra -offers opportunities for students to design and build + original instruments, master them through a series of rehearsals, and ultimately + utilize them as part of an ensemble in a performance of a predetermined piece + whose parameters are coordinated by instructor through an embedded networked module. + The ensuing model will serve for the assessment and development of a stronger + connection with content-area standards and the development of creative thinking + and collaboration skills.' + address: 'Ann Arbor, Michigan' + author: Ivica Bukvic and Liesl Baum and Bennett Layman and Kendall Woodard + bibtex: "@inproceedings{Bukvic2012,\n abstract = {In the following paper we propose\ + \ a new tiered granularity approach to developing modules or abstractions in the\ + \ Pd-L2Ork visual multimedia programming environment with the specific goal of\ + \ devising creative environments that scale their educational scope and difficulty\ + \ to encompass several stages within the context of primary and secondary (K-12)\ + \ education. As part of a preliminary study, the team designed modules targeting\ + \ 4th and 5th grade students, the primary focus being exploration of creativity\ + \ and collaborative learning. The resulting environment infrastructure -coupled\ + \ with the Boys & Girls Club of Southwest Virginia Satellite Linux Laptop Orchestra\ + \ -offers opportunities for students to design and build original instruments,\ + \ master them through a series of rehearsals, and ultimately utilize them as part\ + \ of an ensemble in a performance of a predetermined piece whose parameters are\ + \ coordinated by instructor through an embedded networked module. The ensuing\ + \ model will serve for the assessment and development of a stronger connection\ + \ with content-area standards and the development of creative thinking and collaboration\ + \ skills.},\n address = {Ann Arbor, Michigan},\n author = {Ivica Bukvic and Liesl\ + \ Baum and Bennett Layman and Kendall Woodard},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1178223},\n issn = {2220-4806},\n keywords = {Granular, Learning\ + \ Objects, K-12, Education, L2Ork, PdL2Ork},\n publisher = {University of Michigan},\n\ + \ title = {Granular Learning Objects for Instrument Design and Collaborative Performance\ + \ in K-12 Education},\n url = {http://www.nime.org/proceedings/2012/nime2012_315.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176432 + doi: 10.5281/zenodo.1178223 issn: 2220-4806 - keywords: 'Multimedia interaction, musical robots, real-time musical systems.' - pages: 102--107 - title: 'Afasia: the Ultimate Homeric One-man-multimedia-band' - url: http://www.nime.org/proceedings/2002/nime2002_102.pdf - year: 2002 + keywords: 'Granular, Learning Objects, K-12, Education, L2Ork, PdL2Ork' + publisher: University of Michigan + title: Granular Learning Objects for Instrument Design and Collaborative Performance + in K-12 Education + url: http://www.nime.org/proceedings/2012/nime2012_315.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Kapur2002 - abstract: 'This paper describes the design of an electronic Tabla controller. The - E-Tabla controls both sound and graphics simultaneously. It allows for a variety - of traditional Tabla strokes and new performance techniques. Graphical feedback - allows for artistical display and pedagogical feedback. ' - address: 'Dublin, Ireland' - author: 'Kapur, Ajay and Essl, Georg and Davidson, Philip L. and Cook, Perry R.' - bibtex: "@inproceedings{Kapur2002,\n abstract = {This paper describes the design\ - \ of an electronic Tabla controller. The E-Tabla controls both sound and graphics\ - \ simultaneously. It allows for a variety of traditional Tabla strokes and new\ - \ performance techniques. Graphical feedback allows for artistical display and\ - \ pedagogical feedback. },\n address = {Dublin, Ireland},\n author = {Kapur, Ajay\ - \ and Essl, Georg and Davidson, Philip L. and Cook, Perry R.},\n booktitle = {Proceedings\ + ID: Buschert2012 + abstract: 'Musician Maker is a system to allow novice players the opportunity to + create expressive improvisational music. While the system plays an accompaniment + background chord progression, each participant plays some kind of controller to + make music through the system. The program takes the signals from the controllers + and adjusts the pitches somewhat so that the players are limited to notes which + fit the chord progression. The various controllers are designed to be very easy + and intuitive so anyone can pick one up and quickly be able to play it. Since + the computer is making sure that wrong notes are avoided, even inexperienced players + can immediately make music and enjoy focusing on some of the more expressive elements + and thus become musicians.' + address: 'Ann Arbor, Michigan' + author: John Buschert + bibtex: "@inproceedings{Buschert2012,\n abstract = {Musician Maker is a system to\ + \ allow novice players the opportunity to create expressive improvisational music.\ + \ While the system plays an accompaniment background chord progression, each participant\ + \ plays some kind of controller to make music through the system. The program\ + \ takes the signals from the controllers and adjusts the pitches somewhat so that\ + \ the players are limited to notes which fit the chord progression. The various\ + \ controllers are designed to be very easy and intuitive so anyone can pick one\ + \ up and quickly be able to play it. Since the computer is making sure that wrong\ + \ notes are avoided, even inexperienced players can immediately make music and\ + \ enjoy focusing on some of the more expressive elements and thus become musicians.},\n\ + \ address = {Ann Arbor, Michigan},\n author = {John Buschert},\n booktitle = {Proceedings\ \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176434},\n issn = {2220-4806},\n\ - \ keywords = {Electronic Tabla, Indian Drum Controller, Physical Models, Graphical\ - \ Feedback},\n pages = {108--112},\n title = {The Electronic Tabla Controller},\n\ - \ url = {http://www.nime.org/proceedings/2002/nime2002_108.pdf},\n year = {2002}\n\ + \ doi = {10.5281/zenodo.1178225},\n issn = {2220-4806},\n keywords = {Musical\ + \ Instrument, Electronic, Computer Music, Novice, Controller},\n publisher = {University\ + \ of Michigan},\n title = {Musician Maker: Play expressive music without practice},\n\ + \ url = {http://www.nime.org/proceedings/2012/nime2012_36.pdf},\n year = {2012}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176434 + doi: 10.5281/zenodo.1178225 issn: 2220-4806 - keywords: 'Electronic Tabla, Indian Drum Controller, Physical Models, Graphical - Feedback' - pages: 108--112 - title: The Electronic Tabla Controller - url: http://www.nime.org/proceedings/2002/nime2002_108.pdf - year: 2002 + keywords: 'Musical Instrument, Electronic, Computer Music, Novice, Controller' + publisher: University of Michigan + title: 'Musician Maker: Play expressive music without practice' + url: http://www.nime.org/proceedings/2012/nime2012_36.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Kessous2002 - abstract: 'In this paper, we describe a computer-based solo musical instrument for - live performance. We have adapted a Wacom graphic tablet equipped with a stylus - transducer and a game joystick to use them as a solo expressive instrument. We - have used a formant-synthesis model that can produce a vowel-like singing voice. - This instrument allows multidimensional expressive fundamental frequency control - and vowel articulation. The fundamental frequency angular control used here allows - different mapping adjustments that correspond to different melodic styles. ' - address: 'Dublin, Ireland' - author: 'Kessous, Loïc' - bibtex: "@inproceedings{Kessous2002,\n abstract = {In this paper, we describe a\ - \ computer-based solo musical instrument for live performance. We have adapted\ - \ a Wacom graphic tablet equipped with a stylus transducer and a game joystick\ - \ to use them as a solo expressive instrument. We have used a formant-synthesis\ - \ model that can produce a vowel-like singing voice. This instrument allows multidimensional\ - \ expressive fundamental frequency control and vowel articulation. The fundamental\ - \ frequency angular control used here allows different mapping adjustments that\ - \ correspond to different melodic styles. },\n address = {Dublin, Ireland},\n\ - \ author = {Kessous, Lo\\\"{i}c},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n date = {24-26 May, 2002},\n\ - \ doi = {10.5281/zenodo.1176436},\n issn = {2220-4806},\n keywords = {Bi-manual,\ - \ off-the-shelf input devices, fundamental frequency control, sound color navigation,\ - \ mapping.},\n pages = {113--114},\n title = {Bi-manual Mapping Experimentation,\ - \ with Angular Fundamental Frequency Control and Sound Color Navigation},\n url\ - \ = {http://www.nime.org/proceedings/2002/nime2002_113.pdf},\n year = {2002}\n\ - }\n" + ID: Giordano2012 + abstract: 'Force-feedback devices can provide haptic feedback duringinteraction + with physical models for sound synthesis. However, low-end devices may not always + provide high-fidelitydisplay of the acoustic characteristics of the model. This + article describes an enhanced handle for the Phantom Omnicontaining a vibration + actuator intended to display the highfrequency portion of the synthesized forces. + Measurementsare provided to show that this approach achieves a morefaithful representation + of the acoustic signal, overcominglimitations in the device control and dynamics.' + address: 'Ann Arbor, Michigan' + author: Marcello Giordano and Stephen Sinclair and Marcelo M. Wanderley + bibtex: "@inproceedings{Giordano2012,\n abstract = {Force-feedback devices can provide\ + \ haptic feedback duringinteraction with physical models for sound synthesis.\ + \ However, low-end devices may not always provide high-fidelitydisplay of the\ + \ acoustic characteristics of the model. This article describes an enhanced handle\ + \ for the Phantom Omnicontaining a vibration actuator intended to display the\ + \ highfrequency portion of the synthesized forces. Measurementsare provided to\ + \ show that this approach achieves a morefaithful representation of the acoustic\ + \ signal, overcominglimitations in the device control and dynamics.},\n address\ + \ = {Ann Arbor, Michigan},\n author = {Marcello Giordano and Stephen Sinclair\ + \ and Marcelo M. Wanderley},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178265},\n\ + \ issn = {2220-4806},\n keywords = {Haptics, force feedback, bowing, audio, interaction},\n\ + \ publisher = {University of Michigan},\n title = {Bowing a vibration-enhanced\ + \ force feedback device},\n url = {http://www.nime.org/proceedings/2012/nime2012_37.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176436 + doi: 10.5281/zenodo.1178265 issn: 2220-4806 - keywords: 'Bi-manual, off-the-shelf input devices, fundamental frequency control, - sound color navigation, mapping.' - pages: 113--114 - title: 'Bi-manual Mapping Experimentation, with Angular Fundamental Frequency Control - and Sound Color Navigation' - url: http://www.nime.org/proceedings/2002/nime2002_113.pdf - year: 2002 + keywords: 'Haptics, force feedback, bowing, audio, interaction' + publisher: University of Michigan + title: Bowing a vibration-enhanced force feedback device + url: http://www.nime.org/proceedings/2012/nime2012_37.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Machover2002 - abstract: 'It is astonishing to think that a mere twenty years ago, real-time music - production and performance was not only in a fledgling state with only primitive - (such as the IRCAM 4X machine) or limited (like the Synclavier) capabilities, - but was also the subject of very heated debate. At IRCAM in the early 1980''s, - for instance, some (such as Luciano Berio) questioned whether any digital technology - could ever be truly "instrumental", while others (such as Jean-Claude Risset) - doubted whether real-time activity of any sort would ever acquire the richness - and introspection of composition.' - address: 'Dublin, Ireland' - author: 'Machover, Tod' - bibtex: "@inproceedings{Machover2002,\n abstract = {It is astonishing to think that\ - \ a mere twenty years ago, real-time music production and performance was not\ - \ only in a fledgling state with only primitive (such as the IRCAM 4X machine)\ - \ or limited (like the Synclavier) capabilities, but was also the subject of very\ - \ heated debate. At IRCAM in the early 1980's, for instance, some (such as Luciano\ - \ Berio) questioned whether any digital technology could ever be truly \"instrumental\"\ - , while others (such as Jean-Claude Risset) doubted whether real-time activity\ - \ of any sort would ever acquire the richness and introspection of composition.},\n\ - \ address = {Dublin, Ireland},\n author = {Machover, Tod},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176438},\n issn = {2220-4806},\n\ - \ pages = {115--115},\n title = {Instruments, Interactivity, and Inevitability},\n\ - \ url = {http://www.nime.org/proceedings/2002/nime2002_115.pdf},\n year = {2002}\n\ - }\n" + ID: Parson2012 + abstract: 'With the advent of high resolution digital video projection and high + quality spatial sound systems in modern planetariums, the planetarium can become + the basis for a unique set of virtual musical instrument capabilities that go + well beyond packaged multimedia shows. The dome, circular speaker and circular + seating arrangements provide means for skilled composers and performers to create + a virtual reality in which attendees are immersed in the composite instrument. + This initial foray into designing an audio-visual computerbased instrument for + improvisational performance in a planetarium builds on prior, successful work + in mapping the rules and state of two-dimensional computer board games to improvised + computer music. The unique visual and audio geometries of the planetarium present + challenges and opportunities. The game tessellates the dome in mobile, colored + hexagons that emulate both atoms and musical scale intervals in an expanding universe. + Spatial activity in the game maps to spatial locale and instrument voices in the + speakers, in essence creating a virtual orchestra with a string section, percussion + section, etc. on the dome. Future work includes distribution of game play via + mobile devices to permit attendees to participate in a performance. This environment + is open-ended, with great educational and aesthetic potential.' + address: 'Ann Arbor, Michigan' + author: Dale Parson and Phillip Reed + bibtex: "@inproceedings{Parson2012,\n abstract = {With the advent of high resolution\ + \ digital video projection and high quality spatial sound systems in modern planetariums,\ + \ the planetarium can become the basis for a unique set of virtual musical instrument\ + \ capabilities that go well beyond packaged multimedia shows. The dome, circular\ + \ speaker and circular seating arrangements provide means for skilled composers\ + \ and performers to create a virtual reality in which attendees are immersed in\ + \ the composite instrument. This initial foray into designing an audio-visual\ + \ computerbased instrument for improvisational performance in a planetarium builds\ + \ on prior, successful work in mapping the rules and state of two-dimensional\ + \ computer board games to improvised computer music. The unique visual and audio\ + \ geometries of the planetarium present challenges and opportunities. The game\ + \ tessellates the dome in mobile, colored hexagons that emulate both atoms and\ + \ musical scale intervals in an expanding universe. Spatial activity in the game\ + \ maps to spatial locale and instrument voices in the speakers, in essence creating\ + \ a virtual orchestra with a string section, percussion section, etc. on the dome.\ + \ Future work includes distribution of game play via mobile devices to permit\ + \ attendees to participate in a performance. This environment is open-ended, with\ + \ great educational and aesthetic potential.},\n address = {Ann Arbor, Michigan},\n\ + \ author = {Dale Parson and Phillip Reed},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1180567},\n\ + \ issn = {2220-4806},\n keywords = {aleatory music, algorithmic improvisation,\ + \ computer game, planetarium},\n publisher = {University of Michigan},\n title\ + \ = {The Planetarium as a Musical Instrument},\n url = {http://www.nime.org/proceedings/2012/nime2012_47.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176438 + doi: 10.5281/zenodo.1180567 issn: 2220-4806 - pages: 115--115 - title: 'Instruments, Interactivity, and Inevitability' - url: http://www.nime.org/proceedings/2002/nime2002_115.pdf - year: 2002 + keywords: 'aleatory music, algorithmic improvisation, computer game, planetarium' + publisher: University of Michigan + title: The Planetarium as a Musical Instrument + url: http://www.nime.org/proceedings/2012/nime2012_47.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Mandelis2002 - abstract: 'This paper describes the Genophone [2], a hyperinstrument developed for - Sound-Performance-Design using the evolutionary paradigm of selective breeding - as the driving process. Sound design, and control assignments (performance mappings), - on most current systems rely heavily on an intimate knowledge of the Sound Synthesis - Techniques (SSTs) employed by the sound generator (hardware or software based). - This intimate knowledge can only be achieved by investing long periods of time - playing around with sounds and experimenting with how parameters change the nature - of the sounds produced. This experience is also needed when control mappings are - defined for performance purposes, so external stimuli can effect changes in SST - parameters. Often such experience can be gained after years of interaction with - one particular SST. The system presented here attempts to aid the user in designing - performance sounds and mappings without the necessity for deep knowledge of the - SSTs involved. This is achieved by a selective breeding process on populations - of individual sounds and their mapping. The initial populations are made up of - individuals of existing hand-coded sounds and their mapping. Initial populations - never have randomly derived individuals (this is not an issue as man''s best friend - was also not selectively bred from protozoa). The user previews the population - then expresses how much individuals are liked by their relative repositioning - on the screen (fitness). Some individuals are selected as parents to create a - new population of offspring, through variable mutation and genetic recombination. - These operators use the fitness as a bias for their function, and they were also - successfully used in MutaSynth [1]. The offspring are then evaluated (as their - parents were) and selected for breeding. This cycle continues until satisfactory - sounds and their mapping are reached. Individuals can also be saved to disk for - future "strain" development. The aim of the system is to encourage the creation - of novel performance mappings and sounds with emphasis on exploration, rather - than designs that satisfy specific a priori criteria.' - address: 'Dublin, Ireland' - author: 'Mandelis, James' - bibtex: "@inproceedings{Mandelis2002,\n abstract = {This paper describes the Genophone\ - \ [2], a hyperinstrument developed for Sound-Performance-Design using the evolutionary\ - \ paradigm of selective breeding as the driving process. Sound design, and control\ - \ assignments (performance mappings), on most current systems rely heavily on\ - \ an intimate knowledge of the Sound Synthesis Techniques (SSTs) employed by the\ - \ sound generator (hardware or software based). This intimate knowledge can only\ - \ be achieved by investing long periods of time playing around with sounds and\ - \ experimenting with how parameters change the nature of the sounds produced.\ - \ This experience is also needed when control mappings are defined for performance\ - \ purposes, so external stimuli can effect changes in SST parameters. Often such\ - \ experience can be gained after years of interaction with one particular SST.\ - \ The system presented here attempts to aid the user in designing performance\ - \ sounds and mappings without the necessity for deep knowledge of the SSTs involved.\ - \ This is achieved by a selective breeding process on populations of individual\ - \ sounds and their mapping. The initial populations are made up of individuals\ - \ of existing hand-coded sounds and their mapping. Initial populations never have\ - \ randomly derived individuals (this is not an issue as man's best friend was\ - \ also not selectively bred from protozoa). The user previews the population then\ - \ expresses how much individuals are liked by their relative repositioning on\ - \ the screen (fitness). Some individuals are selected as parents to create a new\ - \ population of offspring, through variable mutation and genetic recombination.\ - \ These operators use the fitness as a bias for their function, and they were\ - \ also successfully used in MutaSynth [1]. The offspring are then evaluated (as\ - \ their parents were) and selected for breeding. This cycle continues until satisfactory\ - \ sounds and their mapping are reached. Individuals can also be saved to disk\ - \ for future \"strain\" development. The aim of the system is to encourage the\ - \ creation of novel performance mappings and sounds with emphasis on exploration,\ - \ rather than designs that satisfy specific a priori criteria.},\n address = {Dublin,\ - \ Ireland},\n author = {Mandelis, James},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n date = {24-26 May, 2002},\n\ - \ doi = {10.5281/zenodo.1176440},\n issn = {2220-4806},\n keywords = {adaptive\ - \ interfaces, artificial life,expressivity, hyperinstruments, live performance,\ - \ motion-to-sound mapping, selective breeding, sound meta-synthesis},\n pages\ - \ = {116--117},\n title = {Adaptive Hyperinstruments: Applying Evolutionary Techniques\ - \ to Sound Synthesis and Performance},\n url = {http://www.nime.org/proceedings/2002/nime2002_116.pdf},\n\ - \ year = {2002}\n}\n" + ID: Chacin2012 + abstract: 'This paper is an in depth exploration of the fashion object and device, + the Play-A-Grill. It details inspirations, socio-cultural implications, technical + function and operation, and potential applications for the Play-A-Grill system.' + address: 'Ann Arbor, Michigan' + author: Aisen Caro Chacin + bibtex: "@inproceedings{Chacin2012,\n abstract = {This paper is an in depth exploration\ + \ of the fashion object and device, the Play-A-Grill. It details inspirations,\ + \ socio-cultural implications, technical function and operation, and potential\ + \ applications for the Play-A-Grill system.},\n address = {Ann Arbor, Michigan},\n\ + \ author = {Aisen Caro Chacin},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178233},\n\ + \ issn = {2220-4806},\n keywords = {Digital Music Players, Hip Hop, Rap, Music\ + \ Fashion, Grills, Mouth Jewelry, Mouth Controllers, and Bone Conduction Hearing.},\n\ + \ publisher = {University of Michigan},\n title = {Play-A-Grill: Music To Your\ + \ Teeth},\n url = {http://www.nime.org/proceedings/2012/nime2012_48.pdf},\n year\ + \ = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176440 + doi: 10.5281/zenodo.1178233 issn: 2220-4806 - keywords: 'adaptive interfaces, artificial life,expressivity, hyperinstruments, - live performance, motion-to-sound mapping, selective breeding, sound meta-synthesis' - pages: 116--117 - title: 'Adaptive Hyperinstruments: Applying Evolutionary Techniques to Sound Synthesis - and Performance' - url: http://www.nime.org/proceedings/2002/nime2002_116.pdf - year: 2002 + keywords: 'Digital Music Players, Hip Hop, Rap, Music Fashion, Grills, Mouth Jewelry, + Mouth Controllers, and Bone Conduction Hearing.' + publisher: University of Michigan + title: 'Play-A-Grill: Music To Your Teeth' + url: http://www.nime.org/proceedings/2012/nime2012_48.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Marshall2002 - abstract: 'This paper introduces a subtle interface, which evolved from the design - of an alternative gestural controller in the development of a performance interface. - The conceptual idea used is based on that of the traditional Bodhran instrument, - an Irish frame drum. The design process was user-centered and involved professional - Bodhran players and through prototyping and user testing the resulting Vodhran - emerged. ' - address: 'Dublin, Ireland' - author: 'Marshall, Mark T. and Rath, Matthias and Moynihan, Breege' - bibtex: "@inproceedings{Marshall2002,\n abstract = {This paper introduces a subtle\ - \ interface, which evolved from the design of an alternative gestural controller\ - \ in the development of a performance interface. The conceptual idea used is based\ - \ on that of the traditional Bodhran instrument, an Irish frame drum. The design\ - \ process was user-centered and involved professional Bodhran players and through\ - \ prototyping and user testing the resulting Vodhran emerged. },\n address = {Dublin,\ - \ Ireland},\n author = {Marshall, Mark T. and Rath, Matthias and Moynihan, Breege},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176442},\n\ - \ issn = {2220-4806},\n keywords = {Virtual instrument, sound modeling, gesture,\ - \ user-centered design},\n pages = {118--119},\n title = {The Virtual Bodhran\ - \ -- The Vodhran},\n url = {http://www.nime.org/proceedings/2002/nime2002_118.pdf},\n\ - \ year = {2002}\n}\n" + ID: FASCIANI2012 + abstract: 'Sound generators and synthesis engines expose a large set of parameters, + allowing run-time timbre morphing and exploration of sonic space. However, control + over these high-dimensional interfaces is constrained by the physical limitations + of performers. In this paper we propose the exploitation of vocal gesture as an + extension or alternative to traditional physical controllers. The approach uses + dynamic aspects of vocal sound to control variations in the timbre of the synthesized + sound. The mapping from vocal to synthesis parameters is automatically adapted + to information extracted from vocal examples as well as to the relationship between + parameters and timbre within the synthesizer. The mapping strategy aims to maximize + the breadth of the explorable perceptual sonic space over a set of the synthesizer''s + real-valued parameters, indirectly driven by the voice-controlled interface.' + address: 'Ann Arbor, Michigan' + author: STEFANO FASCIANI and LONCE WYSE + bibtex: "@inproceedings{FASCIANI2012,\n abstract = {Sound generators and synthesis\ + \ engines expose a large set of parameters, allowing run-time timbre morphing\ + \ and exploration of sonic space. However, control over these high-dimensional\ + \ interfaces is constrained by the physical limitations of performers. In this\ + \ paper we propose the exploitation of vocal gesture as an extension or alternative\ + \ to traditional physical controllers. The approach uses dynamic aspects of vocal\ + \ sound to control variations in the timbre of the synthesized sound. The mapping\ + \ from vocal to synthesis parameters is automatically adapted to information extracted\ + \ from vocal examples as well as to the relationship between parameters and timbre\ + \ within the synthesizer. The mapping strategy aims to maximize the breadth of\ + \ the explorable perceptual sonic space over a set of the synthesizer's real-valued\ + \ parameters, indirectly driven by the voice-controlled interface.},\n address\ + \ = {Ann Arbor, Michigan},\n author = {STEFANO FASCIANI and LONCE WYSE},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1178251},\n issn = {2220-4806},\n keywords\ + \ = {Voice Control, Adaptive Interface, Automatic Mapping, Timbre Morphing, Sonic\ + \ Space Exploration},\n publisher = {University of Michigan},\n title = {A Voice\ + \ Interface for Sound Generators: adaptive and automatic mapping of gestures to\ + \ sound},\n url = {http://www.nime.org/proceedings/2012/nime2012_57.pdf},\n year\ + \ = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176442 + doi: 10.5281/zenodo.1178251 issn: 2220-4806 - keywords: 'Virtual instrument, sound modeling, gesture, user-centered design' - pages: 118--119 - title: The Virtual Bodhran -- The Vodhran - url: http://www.nime.org/proceedings/2002/nime2002_118.pdf - year: 2002 + keywords: 'Voice Control, Adaptive Interface, Automatic Mapping, Timbre Morphing, + Sonic Space Exploration' + publisher: University of Michigan + title: 'A Voice Interface for Sound Generators: adaptive and automatic mapping of + gestures to sound' + url: http://www.nime.org/proceedings/2012/nime2012_57.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Mccaig2002 - abstract: 'Here we present 2Hearts, a music system controlled bythe heartbeats of - two people. As the players speak and touch, 2Hearts extracts meaningful variables - from their heartbeat signals. These variables are mapped to musical parameters, - conveying the changing patterns of tension and relaxation in the players'' relationship. - We describe the motivation for creating 2Hearts, observations from the prototypes - that have been built, and principles learnt in the ongoing development process.' - address: 'Dublin, Ireland' - author: 'Mccaig, Graeme and Fels, Sidney S.' - bibtex: "@inproceedings{Mccaig2002,\n abstract = {Here we present 2Hearts, a music\ - \ system controlled bythe heartbeats of two people. As the players speak and touch,\ - \ 2Hearts extracts meaningful variables from their heartbeat signals. These variables\ - \ are mapped to musical parameters, conveying the changing patterns of tension\ - \ and relaxation in the players' relationship. We describe the motivation for\ - \ creating 2Hearts, observations from the prototypes that have been built, and\ - \ principles learnt in the ongoing development process.},\n address = {Dublin,\ - \ Ireland},\n author = {Mccaig, Graeme and Fels, Sidney S.},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176444},\n issn = {2220-4806},\n\ - \ keywords = {Heart Rate, Biosensor, Interactive Music, Non-Verbal Communication,\ - \ Affective Computing, Ambient Display},\n pages = {120--125},\n title = {Playing\ - \ on Heart-Strings: Experiences with the 2{H}earts System},\n url = {http://www.nime.org/proceedings/2002/nime2002_120.pdf},\n\ - \ year = {2002}\n}\n" + ID: Yuksel2012 + abstract: 'In this work, a comprehensive study is performed on the relationship + between audio, visual and emotion by applying the principles of cognitive emotion + theory into digital creation. The study is driven by an audiovisual emotion library + project that is named AVIEM, which provides an interactive interface for experimentation + and evaluation of the perception and creation processes of audiovisuals. AVIEM + primarily consists of separate audio and visual libraries and grows with user + contribution as users explore different combinations between them. The library + provides a wide range of experimentation possibilities by allowing users to create + audiovisual relations and logging their emotional responses through its interface. + Besides being a resourceful tool of experimentation, AVIEM aims to become a source + of inspiration, where digitally created abstract virtual environments and soundscapes + can elicit target emotions at a preconscious level, by building genuine audiovisual + relations that would engage the viewer on a strong emotional stage. Lastly, various + schemes are proposed to visualize information extracted through AVIEM, to improve + the navigation and designate the trends and dependencies among audiovisual relations.' + address: 'Ann Arbor, Michigan' + author: Kamer Ali Yuksel and Sinan Buyukbas and Elif Ayiter + bibtex: "@inproceedings{Yuksel2012,\n abstract = {In this work, a comprehensive\ + \ study is performed on the relationship between audio, visual and emotion by\ + \ applying the principles of cognitive emotion theory into digital creation. The\ + \ study is driven by an audiovisual emotion library project that is named AVIEM,\ + \ which provides an interactive interface for experimentation and evaluation of\ + \ the perception and creation processes of audiovisuals. AVIEM primarily consists\ + \ of separate audio and visual libraries and grows with user contribution as users\ + \ explore different combinations between them. The library provides a wide range\ + \ of experimentation possibilities by allowing users to create audiovisual relations\ + \ and logging their emotional responses through its interface. Besides being a\ + \ resourceful tool of experimentation, AVIEM aims to become a source of inspiration,\ + \ where digitally created abstract virtual environments and soundscapes can elicit\ + \ target emotions at a preconscious level, by building genuine audiovisual relations\ + \ that would engage the viewer on a strong emotional stage. Lastly, various schemes\ + \ are proposed to visualize information extracted through AVIEM, to improve the\ + \ navigation and designate the trends and dependencies among audiovisual relations.},\n\ + \ address = {Ann Arbor, Michigan},\n author = {Kamer Ali Yuksel and Sinan Buyukbas\ + \ and Elif Ayiter},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178457},\n\ + \ issn = {2220-4806},\n keywords = {Designing emotive audiovisuals, cognitive\ + \ emotion theory, audiovisual perception and interaction, synaesthesia},\n publisher\ + \ = {University of Michigan},\n title = {An Interface for Emotional Expression\ + \ in Audio-Visuals},\n url = {http://www.nime.org/proceedings/2012/nime2012_60.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176444 + doi: 10.5281/zenodo.1178457 issn: 2220-4806 - keywords: 'Heart Rate, Biosensor, Interactive Music, Non-Verbal Communication, Affective - Computing, Ambient Display' - pages: 120--125 - title: 'Playing on Heart-Strings: Experiences with the 2Hearts System' - url: http://www.nime.org/proceedings/2002/nime2002_120.pdf - year: 2002 + keywords: 'Designing emotive audiovisuals, cognitive emotion theory, audiovisual + perception and interaction, synaesthesia' + publisher: University of Michigan + title: An Interface for Emotional Expression in Audio-Visuals + url: http://www.nime.org/proceedings/2012/nime2012_60.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: McElligott2002 - abstract: 'In this paper we discuss the possibility of augmenting existing musical - performance by using a novel sensing device termed ''PegLeg''. This device interprets - the movements and motions of a musician during play by allowing the musician to - manipulate a sensor in three dimensions. A force sensitive surface allows us to - detect, interpret and interface the subtle but integral element of physical "effort" - in music playing. This device is designed to extend the musicians control over - any given instrument, granting an additional means of ''playing'' that would previously - have been impossible - granting an additional limb to extend their playing potential - - a PegLeg...' - address: 'Dublin, Ireland' - author: 'McElligott, Lisa and Dixon, Edward and Dillon, Michelle' - bibtex: "@inproceedings{McElligott2002,\n abstract = {In this paper we discuss the\ - \ possibility of augmenting existing musical performance by using a novel sensing\ - \ device termed 'PegLeg'. This device interprets the movements and motions of\ - \ a musician during play by allowing the musician to manipulate a sensor in three\ - \ dimensions. A force sensitive surface allows us to detect, interpret and interface\ - \ the subtle but integral element of physical \"effort\" in music playing. This\ - \ device is designed to extend the musicians control over any given instrument,\ - \ granting an additional means of 'playing' that would previously have been impossible\ - \ - granting an additional limb to extend their playing potential - a PegLeg...},\n\ - \ address = {Dublin, Ireland},\n author = {McElligott, Lisa and Dixon, Edward\ - \ and Dillon, Michelle},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n date = {24-26 May, 2002},\n doi\ - \ = {10.5281/zenodo.1176446},\n issn = {2220-4806},\n keywords = {Gesture, weight\ - \ distribution, effort, expression, intent, movement, 3D sensing pressure, force,\ - \ sensor, resolution, control device, sound, music, input.},\n pages = {126--130},\n\ - \ title = {`PegLegs in Music' Processing the Effort Generated by Levels of Expressive\ - \ Gesturing in Music},\n url = {http://www.nime.org/proceedings/2002/nime2002_126.pdf},\n\ - \ year = {2002}\n}\n" + ID: Lee2012b + abstract: 'Tok! is a collaborative acoustic instrument application for iOS devices + aimed at real time percussive music making in a colocated setup. It utilizes the + mobility of hand-held devices and transforms them into drumsticks to tap on flat + surfaces and produce acoustic music. Tok! is also networked and consists of a + shared interactive music score to which the players tap their phones, creating + a percussion ensemble. Through their social interaction and real-time modifications + to the music score, and through their creative selection of tapping surfaces, + the players can collaborate and dynamically create interesting rhythmic music + with a variety of timbres.' + address: 'Ann Arbor, Michigan' + author: Sang Won Lee and Ajay Srinivasamurthy and Gregoire Tronel and Weibin Shen + and Jason Freeman + bibtex: "@inproceedings{Lee2012b,\n abstract = {Tok! is a collaborative acoustic\ + \ instrument application for iOS devices aimed at real time percussive music making\ + \ in a colocated setup. It utilizes the mobility of hand-held devices and transforms\ + \ them into drumsticks to tap on flat surfaces and produce acoustic music. Tok!\ + \ is also networked and consists of a shared interactive music score to which\ + \ the players tap their phones, creating a percussion ensemble. Through their\ + \ social interaction and real-time modifications to the music score, and through\ + \ their creative selection of tapping surfaces, the players can collaborate and\ + \ dynamically create interesting rhythmic music with a variety of timbres.},\n\ + \ address = {Ann Arbor, Michigan},\n author = {Sang Won Lee and Ajay Srinivasamurthy\ + \ and Gregoire Tronel and Weibin Shen and Jason Freeman},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178317},\n issn = {2220-4806},\n keywords = {Mobile Phones,\ + \ Collaboration, Social Interaction, Acoustic Musical Instrument},\n publisher\ + \ = {University of Michigan},\n title = {Tok! : A Collaborative Acoustic Instrument\ + \ using Mobile Phones},\n url = {http://www.nime.org/proceedings/2012/nime2012_61.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176446 + doi: 10.5281/zenodo.1178317 issn: 2220-4806 - keywords: 'Gesture, weight distribution, effort, expression, intent, movement, 3D - sensing pressure, force, sensor, resolution, control device, sound, music, input.' - pages: 126--130 - title: '`PegLegs in Music'' Processing the Effort Generated by Levels of Expressive - Gesturing in Music' - url: http://www.nime.org/proceedings/2002/nime2002_126.pdf - year: 2002 + keywords: 'Mobile Phones, Collaboration, Social Interaction, Acoustic Musical Instrument' + publisher: University of Michigan + title: 'Tok! : A Collaborative Acoustic Instrument using Mobile Phones' + url: http://www.nime.org/proceedings/2012/nime2012_61.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Ng2002 - abstract: 'This paper briefly describes a number of performance interfaces under - the broad theme of Interactive Gesture Music (IGM). With a short introduction, - this paper discusses the main components of a Trans-Domain Mapping (TDM) framework, - and presents various prototypes developed under this framework, to translate meaningful - activities from one creative domain onto another, to provide real-time control - of musical events with physical movements. ' - address: 'Dublin, Ireland' - author: 'Ng, Kia' - bibtex: "@inproceedings{Ng2002,\n abstract = {This paper briefly describes a number\ - \ of performance interfaces under the broad theme of Interactive Gesture Music\ - \ (IGM). With a short introduction, this paper discusses the main components of\ - \ a Trans-Domain Mapping (TDM) framework, and presents various prototypes developed\ - \ under this framework, to translate meaningful activities from one creative domain\ - \ onto another, to provide real-time control of musical events with physical movements.\ - \ },\n address = {Dublin, Ireland},\n author = {Ng, Kia},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176448},\n issn = {2220-4806},\n\ - \ keywords = {Gesture, Motion, Interactive, Performance, Music.},\n pages = {131--132},\n\ - \ title = {Interactive Gesture Music Performance Interface},\n url = {http://www.nime.org/proceedings/2002/nime2002_131.pdf},\n\ - \ year = {2002}\n}\n" + ID: Lee2012a + abstract: 'This paper describes recent extensions to LOLC, a text-based environment + for collaborative improvisation for laptop ensembles, which integrate acoustic + instrumental musicians into the environment. Laptop musicians author short commands + to create, transform, and share pre-composed musical fragments, and the resulting + notation is digitally displayed, in real time, to instrumental musicians to sight-read + in performance. The paper describes the background and motivations of the project, + outlines the design of the original LOLC environment and describes its new real-time + notation components in detail, and explains the use of these new components in + a musical composition, SGLC, by one of the authors.' + address: 'Ann Arbor, Michigan' + author: Sang Won Lee and Jason Freeman and Andrew Collela + bibtex: "@inproceedings{Lee2012a,\n abstract = {This paper describes recent extensions\ + \ to LOLC, a text-based environment for collaborative improvisation for laptop\ + \ ensembles, which integrate acoustic instrumental musicians into the environment.\ + \ Laptop musicians author short commands to create, transform, and share pre-composed\ + \ musical fragments, and the resulting notation is digitally displayed, in real\ + \ time, to instrumental musicians to sight-read in performance. The paper describes\ + \ the background and motivations of the project, outlines the design of the original\ + \ LOLC environment and describes its new real-time notation components in detail,\ + \ and explains the use of these new components in a musical composition, SGLC,\ + \ by one of the authors.},\n address = {Ann Arbor, Michigan},\n author = {Sang\ + \ Won Lee and Jason Freeman and Andrew Collela},\n booktitle = {Proceedings of\ + \ the International Conference on New Interfaces for Musical Expression},\n doi\ + \ = {10.5281/zenodo.1178315},\n issn = {2220-4806},\n keywords = {Real-time Music\ + \ Notation, Live Coding, Laptop Orchestra},\n publisher = {University of Michigan},\n\ + \ title = {Real-Time Music Notation, Collaborative Improvisation, and Laptop Ensembles},\n\ + \ url = {http://www.nime.org/proceedings/2012/nime2012_62.pdf},\n year = {2012}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176448 + doi: 10.5281/zenodo.1178315 issn: 2220-4806 - keywords: 'Gesture, Motion, Interactive, Performance, Music.' - pages: 131--132 - title: Interactive Gesture Music Performance Interface - url: http://www.nime.org/proceedings/2002/nime2002_131.pdf - year: 2002 + keywords: 'Real-time Music Notation, Live Coding, Laptop Orchestra' + publisher: University of Michigan + title: 'Real-Time Music Notation, Collaborative Improvisation, and Laptop Ensembles' + url: http://www.nime.org/proceedings/2012/nime2012_62.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Nichols2002 - abstract: 'This paper describes the development of a virtual violin bow haptic human-computer - interface, which senses bow position with encoders, to drive bowed-string physical - model synthesis, while engaging servomotors, to simulate the haptic feedback of - a violin bow on a string. Construction of the hardware and programming of the - software are discussed, as well as the motivation for building the instrument, - and its planned uses.' - address: 'Dublin, Ireland' - author: 'Nichols, Charles' - bibtex: "@inproceedings{Nichols2002,\n abstract = {This paper describes the development\ - \ of a virtual violin bow haptic human-computer interface, which senses bow position\ - \ with encoders, to drive bowed-string physical model synthesis, while engaging\ - \ servomotors, to simulate the haptic feedback of a violin bow on a string. Construction\ - \ of the hardware and programming of the software are discussed, as well as the\ - \ motivation for building the instrument, and its planned uses.},\n address =\ - \ {Dublin, Ireland},\n author = {Nichols, Charles},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176450},\n issn = {2220-4806},\n\ - \ keywords = {bow, controller, haptic, hci, interface, violin},\n pages = {133--136},\n\ - \ title = {The vBow: Development of a Virtual Violin Bow Haptic Human-Computer\ - \ Interface},\n url = {http://www.nime.org/proceedings/2002/nime2002_133.pdf},\n\ - \ year = {2002}\n}\n" + ID: Migneco2012 + abstract: 'Platforms for mobile computing and gesture recognitionprovide enticing + interfaces for creative expression on virtualmusical instruments. However, sound + synthesis on thesesystems is often limited to sample-based synthesizers, whichlimits + their expressive capabilities. Source-filter models areadept for such interfaces + since they provide flexible, algorithmic sound synthesis, especially in the case + of the guitar.In this paper, we present a data-driven approach for modeling guitar + excitation signals using principal componentsderived from a corpus of excitation + signals. Using thesecomponents as features, we apply nonlinear principal components + analysis to derive a feature space that describesthe expressive attributes characteristic + to our corpus. Finally, we propose using the reduced dimensionality space asa + control interface for an expressive guitar synthesizer.' + address: 'Ann Arbor, Michigan' + author: Raymond Migneco and Youngmoo Kim + bibtex: "@inproceedings{Migneco2012,\n abstract = {Platforms for mobile computing\ + \ and gesture recognitionprovide enticing interfaces for creative expression on\ + \ virtualmusical instruments. However, sound synthesis on thesesystems is often\ + \ limited to sample-based synthesizers, whichlimits their expressive capabilities.\ + \ Source-filter models areadept for such interfaces since they provide flexible,\ + \ algorithmic sound synthesis, especially in the case of the guitar.In this paper,\ + \ we present a data-driven approach for modeling guitar excitation signals using\ + \ principal componentsderived from a corpus of excitation signals. Using thesecomponents\ + \ as features, we apply nonlinear principal components analysis to derive a feature\ + \ space that describesthe expressive attributes characteristic to our corpus.\ + \ Finally, we propose using the reduced dimensionality space asa control interface\ + \ for an expressive guitar synthesizer.},\n address = {Ann Arbor, Michigan},\n\ + \ author = {Raymond Migneco and Youngmoo Kim},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1180541},\n issn = {2220-4806},\n keywords = {Source-filter\ + \ models, musical instrument synthesis, PCA, touch musical interfaces},\n publisher\ + \ = {University of Michigan},\n title = {A Component-Based Approach for Modeling\ + \ Plucked-Guitar Excitation Signals},\n url = {http://www.nime.org/proceedings/2012/nime2012_63.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176450 + doi: 10.5281/zenodo.1180541 issn: 2220-4806 - keywords: 'bow, controller, haptic, hci, interface, violin' - pages: 133--136 - title: 'The vBow: Development of a Virtual Violin Bow Haptic Human-Computer Interface' - url: http://www.nime.org/proceedings/2002/nime2002_133.pdf - year: 2002 + keywords: 'Source-filter models, musical instrument synthesis, PCA, touch musical + interfaces' + publisher: University of Michigan + title: A Component-Based Approach for Modeling Plucked-Guitar Excitation Signals + url: http://www.nime.org/proceedings/2012/nime2012_63.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Oboe2002 - abstract: 'The design of a virtual keyboard, capable of reproducing the tactile - feedback of several musical instruments is reported. The key is driven by a direct - drive motor, which allows friction free operations. The force to be generated - by the motor is calculated in real time by a dynamic simulator, which contains - the model of mechanisms'' components and constraints. Each model is tuned on the - basis of measurements performed on the real system. So far, grand piano action, - harpsichord and Hammond organ have been implemented successfully on the system - presented here. ' - address: 'Dublin, Ireland' - author: 'Oboe, Roberto and De Poli, Giovanni' - bibtex: "@inproceedings{Oboe2002,\n abstract = {The design of a virtual keyboard,\ - \ capable of reproducing the tactile feedback of several musical instruments is\ - \ reported. The key is driven by a direct drive motor, which allows friction free\ - \ operations. The force to be generated by the motor is calculated in real time\ - \ by a dynamic simulator, which contains the model of mechanisms' components and\ - \ constraints. Each model is tuned on the basis of measurements performed on the\ - \ real system. So far, grand piano action, harpsichord and Hammond organ have\ - \ been implemented successfully on the system presented here. },\n address = {Dublin,\ - \ Ireland},\n author = {Oboe, Roberto and De Poli, Giovanni},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176452},\n issn = {2220-4806},\n\ - \ keywords = {Virtual mechanisms, dynamic simulation},\n pages = {137--142},\n\ - \ title = {Multi-instrument Virtual Keyboard -- The MIKEY Project},\n url = {http://www.nime.org/proceedings/2002/nime2002_137.pdf},\n\ - \ year = {2002}\n}\n" + ID: Patricio2012 + abstract: 'This article proposes a wireless handheld multimedia digital instrument, + which allows one to compose and perform digital music for films in real-time. + Not only does it allow the performer and the audience to follow the film images + in question, but also the relationship between the gestures performed and the + sound generated. Furthermore, it allows one to have an effective control over + the sound, and consequently achieve great musical expression. In addition, a method + for calibrating the multimedia digital instrument, devised to overcome the lack + of a reliable reference point of the accelerometer and a process to obtain a video + score are presented. This instrument has been used in a number of concerts (Portugal + and Brazil) so as to test its robustness.' + address: 'Ann Arbor, Michigan' + author: Pedro Patrício + bibtex: "@inproceedings{Patricio2012,\n abstract = {This article proposes a wireless\ + \ handheld multimedia digital instrument, which allows one to compose and perform\ + \ digital music for films in real-time. Not only does it allow the performer and\ + \ the audience to follow the film images in question, but also the relationship\ + \ between the gestures performed and the sound generated. Furthermore, it allows\ + \ one to have an effective control over the sound, and consequently achieve great\ + \ musical expression. In addition, a method for calibrating the multimedia digital\ + \ instrument, devised to overcome the lack of a reliable reference point of the\ + \ accelerometer and a process to obtain a video score are presented. This instrument\ + \ has been used in a number of concerts (Portugal and Brazil) so as to test its\ + \ robustness.},\n address = {Ann Arbor, Michigan},\n author = {Pedro Patr{\\'i}cio},\n\ + \ booktitle = {Proceedings of the International Conference on New Interfaces for\ + \ Musical Expression},\n doi = {10.5281/zenodo.1180569},\n issn = {2220-4806},\n\ + \ keywords = {Digital musical instrument, mobile music performance, real-time\ + \ musical composition, digital sound synthesis.},\n publisher = {University of\ + \ Michigan},\n title = {MuDI - Multimedia Digital Instrument for Composing and\ + \ Performing Digital Music for Films in Real-time},\n url = {http://www.nime.org/proceedings/2012/nime2012_64.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176452 + doi: 10.5281/zenodo.1180569 issn: 2220-4806 - keywords: 'Virtual mechanisms, dynamic simulation' - pages: 137--142 - title: Multi-instrument Virtual Keyboard -- The MIKEY Project - url: http://www.nime.org/proceedings/2002/nime2002_137.pdf - year: 2002 + keywords: 'Digital musical instrument, mobile music performance, real-time musical + composition, digital sound synthesis.' + publisher: University of Michigan + title: MuDI - Multimedia Digital Instrument for Composing and Performing Digital + Music for Films in Real-time + url: http://www.nime.org/proceedings/2012/nime2012_64.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Paine2002 - abstract: 'Interactivity has become a major consideration in the development of - a contemporary art practice that engages with the proliferation of computer based - technologies. Keywords ' - address: 'Dublin, Ireland' - author: 'Paine, Garth' - bibtex: "@inproceedings{Paine2002,\n abstract = {Interactivity has become a major\ - \ consideration in the development of a contemporary art practice that engages\ - \ with the proliferation of computer based technologies. Keywords },\n address\ - \ = {Dublin, Ireland},\n author = {Paine, Garth},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n date\ - \ = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176454},\n issn = {2220-4806},\n\ - \ keywords = {are your choice.},\n pages = {143--144},\n title = {GESTATION},\n\ - \ url = {http://www.nime.org/proceedings/2002/nime2002_143.pdf},\n year = {2002}\n\ - }\n" + ID: Endo2012 + abstract: 'Tweet Harp is a musical instrument using Twitter and a laser harp. This + instrument features the use of the human voice speaking tweets in Twitter as sounds + for music. It is played by touching the six harp strings of laser beams. Tweet + Harp gets the latest tweets from Twitter in real-time, and it creates music like + a song with unexpected words. It also creates animation displaying the texts at + the same time. The audience can visually enjoy this performance by sounds synchronized + with animation. If the audience has a Twitter account, they can participate in + the performance by tweeting.' + address: 'Ann Arbor, Michigan' + author: Ayaka Endo and Takuma Moriyama and Yasuo Kuhara + bibtex: "@inproceedings{Endo2012,\n abstract = {Tweet Harp is a musical instrument\ + \ using Twitter and a laser harp. This instrument features the use of the human\ + \ voice speaking tweets in Twitter as sounds for music. It is played by touching\ + \ the six harp strings of laser beams. Tweet Harp gets the latest tweets from\ + \ Twitter in real-time, and it creates music like a song with unexpected words.\ + \ It also creates animation displaying the texts at the same time. The audience\ + \ can visually enjoy this performance by sounds synchronized with animation. If\ + \ the audience has a Twitter account, they can participate in the performance\ + \ by tweeting.},\n address = {Ann Arbor, Michigan},\n author = {Ayaka Endo and\ + \ Takuma Moriyama and Yasuo Kuhara},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178249},\n\ + \ issn = {2220-4806},\n keywords = {Twitter, laser harp, text, speech, voice,\ + \ AppleScript, Quartz Composer, Max/MSP, TTS, Arduino},\n publisher = {University\ + \ of Michigan},\n title = {Tweet Harp: Laser Harp Generating Voice and Text of\ + \ Real-time Tweets in Twitter},\n url = {http://www.nime.org/proceedings/2012/nime2012_66.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176454 + doi: 10.5281/zenodo.1178249 issn: 2220-4806 - keywords: are your choice. - pages: 143--144 - title: GESTATION - url: http://www.nime.org/proceedings/2002/nime2002_143.pdf - year: 2002 + keywords: 'Twitter, laser harp, text, speech, voice, AppleScript, Quartz Composer, + Max/MSP, TTS, Arduino' + publisher: University of Michigan + title: 'Tweet Harp: Laser Harp Generating Voice and Text of Real-time Tweets in + Twitter' + url: http://www.nime.org/proceedings/2012/nime2012_66.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Pardue2002 - abstract: 'Passive RF Tagging can provide an attractive medium for development of - free-gesture musical interfaces. This was initially explored in our Musical Trinkets - installation, which used magnetically-coupled resonant LC circuits to identify - and track the position of multiple objects in real-time. Manipulation of these - objects in free space over a read coil triggered simple musical interactions. - Musical Navigatrics builds upon this success with new more sensitive and stable - sensing, multi-dimensional response, and vastly more intricate musical mappings - that enable full musical exploration of free space through the dynamic use and - control of arpeggiatiation and effects. The addition of basic sequencing abilities - also allows for the building of complex, layered musical interactions in a uniquely - easy and intuitive manner. ' - address: 'Dublin, Ireland' - author: 'Pardue, Laurel S. and Paradiso, Joseph A.' - bibtex: "@inproceedings{Pardue2002,\n abstract = {Passive RF Tagging can provide\ - \ an attractive medium for development of free-gesture musical interfaces. This\ - \ was initially explored in our Musical Trinkets installation, which used magnetically-coupled\ - \ resonant LC circuits to identify and track the position of multiple objects\ - \ in real-time. Manipulation of these objects in free space over a read coil triggered\ - \ simple musical interactions. Musical Navigatrics builds upon this success with\ - \ new more sensitive and stable sensing, multi-dimensional response, and vastly\ - \ more intricate musical mappings that enable full musical exploration of free\ - \ space through the dynamic use and control of arpeggiatiation and effects. The\ - \ addition of basic sequencing abilities also allows for the building of complex,\ - \ layered musical interactions in a uniquely easy and intuitive manner. },\n address\ - \ = {Dublin, Ireland},\n author = {Pardue, Laurel S. and Paradiso, Joseph A.},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176456},\n\ - \ issn = {2220-4806},\n keywords = {passive tag, position tracking, music sequencer\ - \ interface},\n pages = {145--147},\n title = {Musical Navigatrics: New Musical\ - \ Interactions with Passive Magnetic Tags},\n url = {http://www.nime.org/proceedings/2002/nime2002_145.pdf},\n\ - \ year = {2002}\n}\n" + ID: Smith2012 + abstract: 'Machine learning models are useful and attractive tools forthe interactive + computer musician, enabling a breadth of interfaces and instruments. With current + consumer hardwareit becomes possible to run advanced machine learning algorithms + in demanding performance situations, yet expertiseremains a prominent entry barrier + for most would-be users.Currently available implementations predominantly employsupervised + machine learning techniques, while the adaptive,self-organizing capabilities of + unsupervised models are notgenerally available. We present a free, new toolbox + of unsupervised machine learning algorithms implemented in Max5 to support real-time + interactive music and video, aimedat the non-expert computer artist.' + address: 'Ann Arbor, Michigan' + author: Benjamin D. Smith and Guy E. Garnett + bibtex: "@inproceedings{Smith2012,\n abstract = {Machine learning models are useful\ + \ and attractive tools forthe interactive computer musician, enabling a breadth\ + \ of interfaces and instruments. With current consumer hardwareit becomes possible\ + \ to run advanced machine learning algorithms in demanding performance situations,\ + \ yet expertiseremains a prominent entry barrier for most would-be users.Currently\ + \ available implementations predominantly employsupervised machine learning techniques,\ + \ while the adaptive,self-organizing capabilities of unsupervised models are notgenerally\ + \ available. We present a free, new toolbox of unsupervised machine learning algorithms\ + \ implemented in Max5 to support real-time interactive music and video, aimedat\ + \ the non-expert computer artist.},\n address = {Ann Arbor, Michigan},\n author\ + \ = {Benjamin D. Smith and Guy E. Garnett},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1178419},\n issn = {2220-4806},\n keywords = {NIME, unsupervised\ + \ machine learning, adaptive resonance theory, self-organizing maps, Max 5},\n\ + \ publisher = {University of Michigan},\n title = {Unsupervised Play: Machine\ + \ Learning Toolkit for Max},\n url = {http://www.nime.org/proceedings/2012/nime2012_68.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical - Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176456 - issn: 2220-4806 - keywords: 'passive tag, position tracking, music sequencer interface' - pages: 145--147 - title: 'Musical Navigatrics: New Musical Interactions with Passive Magnetic Tags' - url: http://www.nime.org/proceedings/2002/nime2002_145.pdf - year: 2002 + Expression + doi: 10.5281/zenodo.1178419 + issn: 2220-4806 + keywords: 'NIME, unsupervised machine learning, adaptive resonance theory, self-organizing + maps, Max 5' + publisher: University of Michigan + title: 'Unsupervised Play: Machine Learning Toolkit for Max' + url: http://www.nime.org/proceedings/2012/nime2012_68.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Patten2002 - abstract: 'We present Audiopad, an interface for musical performance that aims to - combine the modularity of knob based controllers with the expressive character - of multidimensional tracking interfaces. The performer''s manipulations of physical - pucks on a tabletop control a real-time synthesis process. The pucks are embedded - with LC tags that the system tracks in two dimensions with a series of specially - shaped antennae. The system projects graphical information on and around the pucks - to give the performer sophisticated control over the synthesis process.' - address: 'Dublin, Ireland' - author: 'Patten, James and Recht, Ben and Ishii, Hiroshi' - bibtex: "@inproceedings{Patten2002,\n abstract = {We present Audiopad, an interface\ - \ for musical performance that aims to combine the modularity of knob based controllers\ - \ with the expressive character of multidimensional tracking interfaces. The performer's\ - \ manipulations of physical pucks on a tabletop control a real-time synthesis\ - \ process. The pucks are embedded with LC tags that the system tracks in two dimensions\ - \ with a series of specially shaped antennae. The system projects graphical information\ - \ on and around the pucks to give the performer sophisticated control over the\ - \ synthesis process.},\n address = {Dublin, Ireland},\n author = {Patten, James\ - \ and Recht, Ben and Ishii, Hiroshi},\n booktitle = {Proceedings of the International\ - \ Conference on New Interfaces for Musical Expression},\n date = {24-26 May, 2002},\n\ - \ doi = {10.5281/zenodo.1176458},\n issn = {2220-4806},\n keywords = {RF tagging,\ - \ MIDI, tangible interfaces, musical controllers, object tracking},\n pages =\ - \ {148--153},\n title = {Audiopad: A Tag-based Interface for Musical Performance},\n\ - \ url = {http://www.nime.org/proceedings/2002/nime2002_148.pdf},\n year = {2002}\n\ - }\n" + ID: Smith2012a + abstract: 'Machine learning models are useful and attractive tools for the interactive + computer musician, enabling a breadth of interfaces and instruments. With current + consumer hardware it becomes possible to run advanced machine learning algorithms + in demanding performance situations, yet expertise remains a prominent entry barrier + for most would-be users. Currently available implementations predominantly employ + supervised machine learning techniques, while the adaptive, self-organizing capabilities + of unsupervised models are not generally available. We present a free, new toolbox + of unsupervised machine learning algorithms implemented in Max 5 to support real-time + interactive music and video, aimed at the non-expert computer artist.' + address: 'Ann Arbor, Michigan' + author: Benjamin D. Smith and Guy E. Garnett + bibtex: "@inproceedings{Smith2012a,\n abstract = {Machine learning models are useful\ + \ and attractive tools for the interactive computer musician, enabling a breadth\ + \ of interfaces and instruments. With current consumer hardware it becomes possible\ + \ to run advanced machine learning algorithms in demanding performance situations,\ + \ yet expertise remains a prominent entry barrier for most would-be users. Currently\ + \ available implementations predominantly employ supervised machine learning techniques,\ + \ while the adaptive, self-organizing capabilities of unsupervised models are\ + \ not generally available. We present a free, new toolbox of unsupervised machine\ + \ learning algorithms implemented in Max 5 to support real-time interactive music\ + \ and video, aimed at the non-expert computer artist.},\n address = {Ann Arbor,\ + \ Michigan},\n author = {Benjamin D. Smith and Guy E. Garnett},\n booktitle =\ + \ {Proceedings of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178419},\n issn = {2220-4806},\n keywords = {NIME, unsupervised\ + \ machine learning, adaptive resonance theory, self-organizing maps, Max 5},\n\ + \ publisher = {University of Michigan},\n title = {Unsupervised Play: Machine\ + \ Learning Toolkit for Max},\n url = {http://www.nime.org/proceedings/2012/nime2012_68.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176458 + doi: 10.5281/zenodo.1178419 issn: 2220-4806 - keywords: 'RF tagging, MIDI, tangible interfaces, musical controllers, object tracking' - pages: 148--153 - title: 'Audiopad: A Tag-based Interface for Musical Performance' - url: http://www.nime.org/proceedings/2002/nime2002_148.pdf - year: 2002 + keywords: 'NIME, unsupervised machine learning, adaptive resonance theory, self-organizing + maps, Max 5' + publisher: University of Michigan + title: 'Unsupervised Play: Machine Learning Toolkit for Max' + url: http://www.nime.org/proceedings/2012/nime2012_68.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Wynnychuk2002 - abstract: 'The demo sutoolz 1.0 alpha is a 3D software interface for music performance. - By navigating through a 3D virtual architecture the musician uses a set of 3D - tools to interact with the virtual environment: gameplay zones, speaker volumes, - speaker volume membranes, speaker navigation volumes and 3D multi-band FFT visualization - systems.' - address: 'Dublin, Ireland' - author: 'Wynnychuk, Jordan and Porcher, Richard and Brajovic, Lucas and Brajovic, - Marko and Platas, Nacho' - bibtex: "@inproceedings{Wynnychuk2002,\n abstract = {The demo sutoolz 1.0 alpha\ - \ is a 3D software interface for music performance. By navigating through a 3D\ - \ virtual architecture the musician uses a set of 3D tools to interact with the\ - \ virtual environment: gameplay zones, speaker volumes, speaker volume membranes,\ - \ speaker navigation volumes and 3D multi-band FFT visualization systems.},\n\ - \ address = {Dublin, Ireland},\n author = {Wynnychuk, Jordan and Porcher, Richard\ - \ and Brajovic, Lucas and Brajovic, Marko and Platas, Nacho},\n booktitle = {Proceedings\ - \ of the International Conference on New Interfaces for Musical Expression},\n\ - \ date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176478},\n issn = {2220-4806},\n\ - \ keywords = {3D music interface, 3D sound, analogue input controllers, audio\ - \ localization, audio visualization, digital architecture, hybrid environments,\ - \ video game navigation},\n pages = {154--155},\n title = {sutoolz 1.0 alpha :\ - \ {3D} Software Music Interface},\n url = {http://www.nime.org/proceedings/2002/nime2002_154.pdf},\n\ - \ year = {2002}\n}\n" + ID: Troyer2012 + abstract: 'We introduce a prototype of a new tangible step sequencerthat transforms + everyday objects into percussive musicalinstruments. DrumTop adapts our everyday + task-orientedhand gestures with everyday objects as the basis of musicalinteraction, + resulting in an easily graspable musical interfacefor musical novices. The sound, + tactile, and visual feedbackcomes directly from everyday objects as the players + programdrum patterns and rearrange the objects on the tabletopinterface. DrumTop + encourages the players to explore themusical potentiality of their surroundings + and be musicallycreative through rhythmic interactions with everyday objects. + The interface consists of transducers that trigger ahit, causing the objects themselves + to produce sound whenthey are in close contact with the transducers. We discusshow + we designed and implemented our current DrumTopprototype and describe how players + interact with the interface. We then highlight the players'' experience with Drumtop + and our plans for future work in the fields of musiceducation and performance.' + address: 'Ann Arbor, Michigan' + author: Akito van Troyer + bibtex: "@inproceedings{Troyer2012,\n abstract = {We introduce a prototype of a\ + \ new tangible step sequencerthat transforms everyday objects into percussive\ + \ musicalinstruments. DrumTop adapts our everyday task-orientedhand gestures with\ + \ everyday objects as the basis of musicalinteraction, resulting in an easily\ + \ graspable musical interfacefor musical novices. The sound, tactile, and visual\ + \ feedbackcomes directly from everyday objects as the players programdrum patterns\ + \ and rearrange the objects on the tabletopinterface. DrumTop encourages the players\ + \ to explore themusical potentiality of their surroundings and be musicallycreative\ + \ through rhythmic interactions with everyday objects. The interface consists\ + \ of transducers that trigger ahit, causing the objects themselves to produce\ + \ sound whenthey are in close contact with the transducers. We discusshow we designed\ + \ and implemented our current DrumTopprototype and describe how players interact\ + \ with the interface. We then highlight the players' experience with Drumtop and\ + \ our plans for future work in the fields of musiceducation and performance.},\n\ + \ address = {Ann Arbor, Michigan},\n author = {Akito van Troyer},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1178441},\n issn = {2220-4806},\n keywords\ + \ = {Tangible User Interfaces, Playful Experience, Percussion, Step Sequencer,\ + \ Transducers, Everyday Objects},\n publisher = {University of Michigan},\n title\ + \ = {DrumTop: Playing with Everyday Objects},\n url = {http://www.nime.org/proceedings/2012/nime2012_70.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176478 + doi: 10.5281/zenodo.1178441 issn: 2220-4806 - keywords: '3D music interface, 3D sound, analogue input controllers, audio localization, - audio visualization, digital architecture, hybrid environments, video game navigation' - pages: 154--155 - title: 'sutoolz 1.0 alpha : 3D Software Music Interface' - url: http://www.nime.org/proceedings/2002/nime2002_154.pdf - year: 2002 + keywords: 'Tangible User Interfaces, Playful Experience, Percussion, Step Sequencer, + Transducers, Everyday Objects' + publisher: University of Michigan + title: 'DrumTop: Playing with Everyday Objects' + url: http://www.nime.org/proceedings/2012/nime2012_70.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Schnell2002 - abstract: 'In this paper, we develop the concept of "composed instruments". We will - look at this idea from two perspectives: the design of computer systems in the - context of live performed music and musicological considerations. A historical - context is developed. Examples will be drawn from recent compositions. Finally - basic concepts from computer science will be examined for their relation ship - to this concept. ' - address: 'Dublin, Ireland' - author: 'Schnell, Norbert and Battier, Marc' - bibtex: "@inproceedings{Schnell2002,\n abstract = {In this paper, we develop the\ - \ concept of \"composed instruments\". We will look at this idea from two perspectives:\ - \ the design of computer systems in the context of live performed music and musicological\ - \ considerations. A historical context is developed. Examples will be drawn from\ - \ recent compositions. Finally basic concepts from computer science will be examined\ - \ for their relation ship to this concept. },\n address = {Dublin, Ireland},\n\ - \ author = {Schnell, Norbert and Battier, Marc},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n date\ - \ = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176460},\n issn = {2220-4806},\n\ - \ keywords = {Instruments, musicology, composed instrument, Theremin, Martenot,\ - \ interaction, streams, MAX.},\n pages = {156--160},\n title = {Introducing Composed\ - \ Instruments, Technical and Musicological Implications},\n url = {http://www.nime.org/proceedings/2002/nime2002_156.pdf},\n\ - \ year = {2002}\n}\n" + ID: Ariza2012 + abstract: 'This paper demonstrates the practical benefits and performance opportunities + of using the dual-analog gamepad as a controller for real-time live electronics. + Numerous diverse instruments and interfaces, as well as detailed control mappings, + are described. Approaches to instrument and preset switching are also presented. + While all of the instrument implementations presented are made available through + the Martingale Pd library, resources for other synthesis languages are also described.' + address: 'Ann Arbor, Michigan' + author: Christopher Ariza + bibtex: "@inproceedings{Ariza2012,\n abstract = {This paper demonstrates the practical\ + \ benefits and performance opportunities of using the dual-analog gamepad as a\ + \ controller for real-time live electronics. Numerous diverse instruments and\ + \ interfaces, as well as detailed control mappings, are described. Approaches\ + \ to instrument and preset switching are also presented. While all of the instrument\ + \ implementations presented are made available through the Martingale Pd library,\ + \ resources for other synthesis languages are also described.},\n address = {Ann\ + \ Arbor, Michigan},\n author = {Christopher Ariza},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178205},\n issn = {2220-4806},\n keywords = {Controllers,\ + \ live electronics, dual-analog, gamepad, joystick, computer music, instrument,\ + \ interface},\n publisher = {University of Michigan},\n title = {The Dual-Analog\ + \ Gamepad as a Practical Platform for Live Electronics Instrument and Interface\ + \ Design},\n url = {http://www.nime.org/proceedings/2012/nime2012_73.pdf},\n year\ + \ = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176460 + doi: 10.5281/zenodo.1178205 issn: 2220-4806 - keywords: 'Instruments, musicology, composed instrument, Theremin, Martenot, interaction, - streams, MAX.' - pages: 156--160 - title: 'Introducing Composed Instruments, Technical and Musicological Implications' - url: http://www.nime.org/proceedings/2002/nime2002_156.pdf - year: 2002 + keywords: 'Controllers, live electronics, dual-analog, gamepad, joystick, computer + music, instrument, interface' + publisher: University of Michigan + title: The Dual-Analog Gamepad as a Practical Platform for Live Electronics Instrument + and Interface Design + url: http://www.nime.org/proceedings/2012/nime2012_73.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Smyth2002 - abstract: 'The cicada uses a rapid sequence of buckling ribs to initiate and sustain - vibrations in its tymbal plate (the primary mechanical resonator in the cicada''s - sound production system). The tymbalimba, a music controller based on this same - mechanism, has a row of 4 convex aluminum ribs (ason the cicada''s tymbal) arranged - much like the keys on a calimba. Each rib is spring loaded and capable of snapping - down into a V-shape (a motion referred to as buckling), under the downward force - of the user''s finger. This energy generated by the buckling motion is measured - by an accelerometer located under each rib and used as the input to a physical - model.' - address: 'Dublin, Ireland' - author: 'Smyth, Tamara and Smith, Julius O.' - bibtex: "@inproceedings{Smyth2002,\n abstract = {The cicada uses a rapid sequence\ - \ of buckling ribs to initiate and sustain vibrations in its tymbal plate (the\ - \ primary mechanical resonator in the cicada's sound production system). The tymbalimba,\ - \ a music controller based on this same mechanism, has a row of 4 convex aluminum\ - \ ribs (ason the cicada's tymbal) arranged much like the keys on a calimba. Each\ - \ rib is spring loaded and capable of snapping down into a V-shape (a motion referred\ - \ to as buckling), under the downward force of the user's finger. This energy\ - \ generated by the buckling motion is measured by an accelerometer located under\ - \ each rib and used as the input to a physical model.},\n address = {Dublin, Ireland},\n\ - \ author = {Smyth, Tamara and Smith, Julius O.},\n booktitle = {Proceedings of\ - \ the International Conference on New Interfaces for Musical Expression},\n date\ - \ = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176462},\n issn = {2220-4806},\n\ - \ keywords = {Bioacoustics, Physical Modeling, Controllers, Cicada, Buckling mechanism.},\n\ - \ pages = {24--27},\n title = {Creating Sustained Tones with the Cicada's Rapid\ - \ Sequential Buckling Mechanism},\n url = {http://www.nime.org/proceedings/2002/nime2002_161.pdf},\n\ - \ year = {2002}\n}\n" + ID: Pardo2012 + abstract: 'Potential users of audio production software, such as parametric audio + equalizers, may be discouraged by the complexity of the interface. A new approach + creates a personalized on-screen slider that lets the user manipulate the audio + in terms of a descriptive term (e.g. "warm"), without the user needing to learn + or use the interface of an equalizer. This system learns mappings by presenting + a sequence of sounds to the user and correlating the gain in each frequency band + with the user''s preference rating. The system speeds learning through transfer + learning. Results on a study of 35 participants show how an effective, personalized + audio manipulation tool can be automatically built after only three ratings from + the user.' + address: 'Ann Arbor, Michigan' + author: Bryan Pardo and David Little and Darren Gergle + bibtex: "@inproceedings{Pardo2012,\n abstract = {Potential users of audio production\ + \ software, such as parametric audio equalizers, may be discouraged by the complexity\ + \ of the interface. A new approach creates a personalized on-screen slider that\ + \ lets the user manipulate the audio in terms of a descriptive term (e.g. \"warm\"\ + ), without the user needing to learn or use the interface of an equalizer. This\ + \ system learns mappings by presenting a sequence of sounds to the user and correlating\ + \ the gain in each frequency band with the user's preference rating. The system\ + \ speeds learning through transfer learning. Results on a study of 35 participants\ + \ show how an effective, personalized audio manipulation tool can be automatically\ + \ built after only three ratings from the user.},\n address = {Ann Arbor, Michigan},\n\ + \ author = {Bryan Pardo and David Little and Darren Gergle},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1180563},\n issn = {2220-4806},\n keywords = {Human computer\ + \ interaction, music, multimedia production, transfer learning},\n publisher =\ + \ {University of Michigan},\n title = {Towards Speeding Audio EQ Interface Building\ + \ with Transfer Learning},\n url = {http://www.nime.org/proceedings/2012/nime2012_74.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176462 + doi: 10.5281/zenodo.1180563 issn: 2220-4806 - keywords: 'Bioacoustics, Physical Modeling, Controllers, Cicada, Buckling mechanism.' - pages: 24--27 - title: Creating Sustained Tones with the Cicada's Rapid Sequential Buckling Mechanism - url: http://www.nime.org/proceedings/2002/nime2002_161.pdf - year: 2002 + keywords: 'Human computer interaction, music, multimedia production, transfer learning' + publisher: University of Michigan + title: Towards Speeding Audio EQ Interface Building with Transfer Learning + url: http://www.nime.org/proceedings/2012/nime2012_74.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Stanza2002 - abstract: 'Amorphoscapes by Stanza are interactive, generative, audio visual, digital - paintings and drawings created specifically for the internet. This is interactive - art on the Internet, incorporating generative sounds and 3D imaging.' - address: 'Dublin, Ireland' - author: Stanza - bibtex: "@inproceedings{Stanza2002,\n abstract = {Amorphoscapes by Stanza are interactive,\ - \ generative, audio visual, digital paintings and drawings created specifically\ - \ for the internet. This is interactive art on the Internet, incorporating generative\ - \ sounds and 3D imaging.},\n address = {Dublin, Ireland},\n author = {Stanza},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176386},\n\ - \ issn = {2220-4806},\n pages = {165--166},\n title = {Amorphoscapes \\& Soundtoys},\n\ - \ url = {http://www.nime.org/proceedings/2002/nime2002_165.pdf},\n year = {2002}\n\ - }\n" + ID: Stead2012 + abstract: 'We describe a system that allows non-programmers to specify the grammar + for a novel graphic score notation of their own design, defining performance notations + suitable for drawing in live situations on a surface such as a whiteboard. Thescore + can be interpreted via the camera of a smartphone,interactively scanned over the + whiteboard to control the parameters of synthesisers implemented in Overtone. + The visual grammar of the score, and its correspondence to the sound parameters, + can be defined by the user with a simple visual condition-action language. This + language can be edited on the touchscreen of an Android phone, allowing the grammar + to be modified live in performance situations.Interactive scanning of the score + is visible to the audience asa performance interface, with a colour classifier + and visual feature recogniser causing the grammar-specified events to be sent + using OSC messages via Wi-Fi from the hand-held smartphone to an audio workstation.' + address: 'Ann Arbor, Michigan' + author: Alistair G. Stead and Alan F. Blackwell and Samual Aaron + bibtex: "@inproceedings{Stead2012,\n abstract = {We describe a system that allows\ + \ non-programmers to specify the grammar for a novel graphic score notation of\ + \ their own design, defining performance notations suitable for drawing in live\ + \ situations on a surface such as a whiteboard. Thescore can be interpreted via\ + \ the camera of a smartphone,interactively scanned over the whiteboard to control\ + \ the parameters of synthesisers implemented in Overtone. The visual grammar of\ + \ the score, and its correspondence to the sound parameters, can be defined by\ + \ the user with a simple visual condition-action language. This language can be\ + \ edited on the touchscreen of an Android phone, allowing the grammar to be modified\ + \ live in performance situations.Interactive scanning of the score is visible\ + \ to the audience asa performance interface, with a colour classifier and visual\ + \ feature recogniser causing the grammar-specified events to be sent using OSC\ + \ messages via Wi-Fi from the hand-held smartphone to an audio workstation.},\n\ + \ address = {Ann Arbor, Michigan},\n author = {Alistair G. Stead and Alan F. Blackwell\ + \ and Samual Aaron},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178423},\n\ + \ issn = {2220-4806},\n keywords = {Graphic Notation, Disposable Notation, Live\ + \ Coding, Com-puter Vision, Mobile Music},\n publisher = {University of Michigan},\n\ + \ title = {Graphic Score Grammars for End-Users},\n url = {http://www.nime.org/proceedings/2012/nime2012_77.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176386 + doi: 10.5281/zenodo.1178423 issn: 2220-4806 - pages: 165--166 - title: Amorphoscapes & Soundtoys - url: http://www.nime.org/proceedings/2002/nime2002_165.pdf - year: 2002 + keywords: 'Graphic Notation, Disposable Notation, Live Coding, Com-puter Vision, + Mobile Music' + publisher: University of Michigan + title: Graphic Score Grammars for End-Users + url: http://www.nime.org/proceedings/2012/nime2012_77.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Johannes2002 - abstract: 'This paper describes the hardware and the software of a computer-based - doppler-sonar system for movement detection. The design is focused on simplicity - and lowcost do-it-yourself construction. ' - address: 'Dublin, Ireland' - author: 'Johannes, Taelman' - bibtex: "@inproceedings{Johannes2002,\n abstract = {This paper describes the hardware\ - \ and the software of a computer-based doppler-sonar system for movement detection.\ - \ The design is focused on simplicity and lowcost do-it-yourself construction.\ - \ },\n address = {Dublin, Ireland},\n author = {Johannes, Taelman},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176430},\n\ - \ issn = {2220-4806},\n keywords = {sonar},\n pages = {167--170},\n title = {A\ - \ Low-cost Sonar for Unobtrusive Man-machine Interfacing},\n url = {http://www.nime.org/proceedings/2002/nime2002_167.pdf},\n\ - \ year = {2002}\n}\n" + ID: Jackson2012 + abstract: 'This paper describes the bubble drum set, along with several polyrhythm + games and interactive music activities that have been developed to show its potential + for use as an input controller. The bubble drum set combines various sizes of + colorful exercise balls, held in place or suspended with conventional drum hardware + and thus creating a trap kit configuration in which the spherical surfaces can + be struck and stroked from varying angles using sticks, brushes, or even by hands + alone. The acoustic properties of these fitness balls are surprisingly rich, capable + of producing subtle differences in timbre while being responsive over a wide dynamic + range. The entire set has been purposefully designed to provide a player with + the means to achieve a rigorous and healthy physical workout, in addition to the + achieving beneficial cognitive and sensory stimulation that comes from playing + music with a sensitive and expressive instrument.' + address: 'Ann Arbor, Michigan' + author: Jay Alan Jackson + bibtex: "@inproceedings{Jackson2012,\n abstract = {This paper describes the bubble\ + \ drum set, along with several polyrhythm games and interactive music activities\ + \ that have been developed to show its potential for use as an input controller.\ + \ The bubble drum set combines various sizes of colorful exercise balls, held\ + \ in place or suspended with conventional drum hardware and thus creating a trap\ + \ kit configuration in which the spherical surfaces can be struck and stroked\ + \ from varying angles using sticks, brushes, or even by hands alone. The acoustic\ + \ properties of these fitness balls are surprisingly rich, capable of producing\ + \ subtle differences in timbre while being responsive over a wide dynamic range.\ + \ The entire set has been purposefully designed to provide a player with the means\ + \ to achieve a rigorous and healthy physical workout, in addition to the achieving\ + \ beneficial cognitive and sensory stimulation that comes from playing music with\ + \ a sensitive and expressive instrument.},\n address = {Ann Arbor, Michigan},\n\ + \ author = {Jay Alan Jackson},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178293},\n\ + \ issn = {2220-4806},\n keywords = {Bubble Drums, WaveMachine Lab’s Drumagog,\ + \ Polyrhythms.},\n publisher = {University of Michigan},\n title = {Bubble Drum-agog-ing:\ + \ Polyrhythm Games \\& Other Inter Activities},\n url = {http://www.nime.org/proceedings/2012/nime2012_8.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176430 + doi: 10.5281/zenodo.1178293 issn: 2220-4806 - keywords: sonar - pages: 167--170 - title: A Low-cost Sonar for Unobtrusive Man-machine Interfacing - url: http://www.nime.org/proceedings/2002/nime2002_167.pdf - year: 2002 + keywords: 'Bubble Drums, WaveMachine Lab’s Drumagog, Polyrhythms.' + publisher: University of Michigan + title: 'Bubble Drum-agog-ing: Polyrhythm Games & Other Inter Activities' + url: http://www.nime.org/proceedings/2012/nime2012_8.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Tanaka2002 - abstract: 'This paper describes a technique of multimodal, multichannel control - of electronic musical devices using two control methodologies, the Electromyogram - (EMG) and relative position sensing. Requirements for the application of multimodal - interaction theory in the musical domain are discussed. We introduce the concept - of bidirectional complementarity to characterize the relationship between the - component sensing technologies. Each control can be used independently, but together - they are mutually complementary. This reveals a fundamental difference from orthogonal - systems. The creation of a concert piece based on this system is given as example. ' - address: 'Dublin, Ireland' - author: 'Tanaka, Atau and Knapp, Benjamin' - bibtex: "@inproceedings{Tanaka2002,\n abstract = {This paper describes a technique\ - \ of multimodal, multichannel control of electronic musical devices using two\ - \ control methodologies, the Electromyogram (EMG) and relative position sensing.\ - \ Requirements for the application of multimodal interaction theory in the musical\ - \ domain are discussed. We introduce the concept of bidirectional complementarity\ - \ to characterize the relationship between the component sensing technologies.\ - \ Each control can be used independently, but together they are mutually complementary.\ - \ This reveals a fundamental difference from orthogonal systems. The creation\ - \ of a concert piece based on this system is given as example. },\n address =\ - \ {Dublin, Ireland},\n author = {Tanaka, Atau and Knapp, Benjamin},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176464},\n\ - \ issn = {2220-4806},\n keywords = {Human Computer Interaction, Musical Controllers,\ - \ Electromyogram, Position Sensing, Sensor Instruments},\n pages = {171--176},\n\ - \ title = {Multimodal Interaction in Music Using the Electromyogram and Relative\ - \ Position Sensing},\n url = {http://www.nime.org/proceedings/2002/nime2002_171.pdf},\n\ - \ year = {2002}\n}\n" + ID: Hochenbaum2012 + abstract: 'In this paper we present a multimodal system for analyzing drum performance. + In the first example we perform automatic drum hand recognition utilizing a technique + for automatic labeling of training data using direct sensors, and only indirect + sensors (e.g. a microphone) for testing. Left/Right drum hand recognition is achieved + with an average accuracy of 84.95% for two performers. Secondly we provide a study + investigating multimodality dependent performance metrics analysis.' + address: 'Ann Arbor, Michigan' + author: Jordan Hochenbaum and Ajay Kapur + bibtex: "@inproceedings{Hochenbaum2012,\n abstract = {In this paper we present a\ + \ multimodal system for analyzing drum performance. In the first example we perform\ + \ automatic drum hand recognition utilizing a technique for automatic labeling\ + \ of training data using direct sensors, and only indirect sensors (e.g. a microphone)\ + \ for testing. Left/Right drum hand recognition is achieved with an average accuracy\ + \ of 84.95% for two performers. Secondly we provide a study investigating multimodality\ + \ dependent performance metrics analysis.},\n address = {Ann Arbor, Michigan},\n\ + \ author = {Jordan Hochenbaum and Ajay Kapur},\n booktitle = {Proceedings of the\ + \ International Conference on New Interfaces for Musical Expression},\n doi =\ + \ {10.5281/zenodo.1178287},\n issn = {2220-4806},\n keywords = {Multimodality,\ + \ Drum stroke identification, surrogate sensors, surrogate data training, machine\ + \ learning, music information retrieval, performance metrics},\n publisher = {University\ + \ of Michigan},\n title = {Drum Stroke Computing: Multimodal Signal Processing\ + \ for Drum Stroke Identification and Performance Metrics},\n url = {http://www.nime.org/proceedings/2012/nime2012_82.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176464 + doi: 10.5281/zenodo.1178287 issn: 2220-4806 - keywords: 'Human Computer Interaction, Musical Controllers, Electromyogram, Position - Sensing, Sensor Instruments' - pages: 171--176 - title: Multimodal Interaction in Music Using the Electromyogram and Relative Position - Sensing - url: http://www.nime.org/proceedings/2002/nime2002_171.pdf - year: 2002 + keywords: 'Multimodality, Drum stroke identification, surrogate sensors, surrogate + data training, machine learning, music information retrieval, performance metrics' + publisher: University of Michigan + title: 'Drum Stroke Computing: Multimodal Signal Processing for Drum Stroke Identification + and Performance Metrics' + url: http://www.nime.org/proceedings/2012/nime2012_82.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Verplank2002 - abstract: 'Active force-feedback holds the potential for precise and rapid controls. - A high performance device can be built from a surplus disk drive and controlled - from an inexpensive microcontroller. Our new design,The Plank has only one axis - of force-feedback with limited range of motion. It is being used to explore methods - of feeling and directly manipulating sound waves and spectra suitable for live - performance of computer music.' - address: 'Dublin, Ireland' - author: 'Verplank, Bill and Gurevich, Michael and Mathews, Max' - bibtex: "@inproceedings{Verplank2002,\n abstract = {Active force-feedback holds\ - \ the potential for precise and rapid controls. A high performance device can\ - \ be built from a surplus disk drive and controlled from an inexpensive microcontroller.\ - \ Our new design,The Plank has only one axis of force-feedback with limited range\ - \ of motion. It is being used to explore methods of feeling and directly manipulating\ - \ sound waves and spectra suitable for live performance of computer music.},\n\ - \ address = {Dublin, Ireland},\n author = {Verplank, Bill and Gurevich, Michael\ - \ and Mathews, Max},\n booktitle = {Proceedings of the International Conference\ - \ on New Interfaces for Musical Expression},\n date = {24-26 May, 2002},\n doi\ - \ = {10.5281/zenodo.1176466},\n issn = {2220-4806},\n keywords = {Haptics, music\ - \ controllers, scanned synthesis.},\n pages = {177--180},\n title = {THE PLANK:\ - \ Designing a Simple Haptic Controller.},\n url = {http://www.nime.org/proceedings/2002/nime2002_177.pdf},\n\ - \ year = {2002}\n}\n" + ID: Levy2012 + abstract: 'OMax is an improvisation software based on a graph representation encoding + the pattern repetitions and structures of a sequence, built incrementally and + in real-time from a live Midi or Audio source. We present in this paper a totally + rewritten version of the software. The new design leads to refine the spectral + listening of OMax and to consider different methods to build the symbolic alphabet + labeling our symbolic units. The very modular and versatile architecture makes + possible new musical configurations and we tried the software with different styles + and musical situations. A novel visualization is proposed, which displays the + current state of the learnt knowledge and allows to notice, both on the fly and + a posteriori, points of musical interest and higher level structures.' + address: 'Ann Arbor, Michigan' + author: Benjamin Levy and Georges Bloch and Gerard Assayag + bibtex: "@inproceedings{Levy2012,\n abstract = {OMax is an improvisation software\ + \ based on a graph representation encoding the pattern repetitions and structures\ + \ of a sequence, built incrementally and in real-time from a live Midi or Audio\ + \ source. We present in this paper a totally rewritten version of the software.\ + \ The new design leads to refine the spectral listening of OMax and to consider\ + \ different methods to build the symbolic alphabet labeling our symbolic units.\ + \ The very modular and versatile architecture makes possible new musical configurations\ + \ and we tried the software with different styles and musical situations. A novel\ + \ visualization is proposed, which displays the current state of the learnt knowledge\ + \ and allows to notice, both on the fly and a posteriori, points of musical interest\ + \ and higher level structures.},\n address = {Ann Arbor, Michigan},\n author =\ + \ {Benjamin Levy and Georges Bloch and Gerard Assayag},\n booktitle = {Proceedings\ + \ of the International Conference on New Interfaces for Musical Expression},\n\ + \ doi = {10.5281/zenodo.1178327},\n issn = {2220-4806},\n keywords = {OMax, Improvisation,\ + \ Machine Learning, Machine Listen-ing, Visualization, Sequence Model, Software\ + \ Architecture},\n publisher = {University of Michigan},\n title = {OMaxist Dialectics:\ + \ Capturing, Visualizing and Expanding Improvisations},\n url = {http://www.nime.org/proceedings/2012/nime2012_87.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176466 + doi: 10.5281/zenodo.1178327 issn: 2220-4806 - keywords: 'Haptics, music controllers, scanned synthesis.' - pages: 177--180 - title: 'THE PLANK: Designing a Simple Haptic Controller.' - url: http://www.nime.org/proceedings/2002/nime2002_177.pdf - year: 2002 + keywords: 'OMax, Improvisation, Machine Learning, Machine Listen-ing, Visualization, + Sequence Model, Software Architecture' + publisher: University of Michigan + title: 'OMaxist Dialectics: Capturing, Visualizing and Expanding Improvisations' + url: http://www.nime.org/proceedings/2012/nime2012_87.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Vogt2002 - abstract: 'Here we propose a novel musical controller which acquires imaging data - of the tongue with a two-dimensional medical ultrasound scanner. A computer vision - algorithm extracts from the image a discrete tongue shape to control, in realtime, - a musical synthesizer and musical effects. We evaluate the mapping space between - tongue shape and controller parameters and its expressive characteristics.' - address: 'Dublin, Ireland' - author: 'Vogt, Florian and Mccaig, Graeme and Ali, Mir A. and Fels, Sidney S.' - bibtex: "@inproceedings{Vogt2002,\n abstract = {Here we propose a novel musical\ - \ controller which acquires imaging data of the tongue with a two-dimensional\ - \ medical ultrasound scanner. A computer vision algorithm extracts from the image\ - \ a discrete tongue shape to control, in realtime, a musical synthesizer and musical\ - \ effects. We evaluate the mapping space between tongue shape and controller parameters\ - \ and its expressive characteristics.},\n address = {Dublin, Ireland},\n author\ - \ = {Vogt, Florian and Mccaig, Graeme and Ali, Mir A. and Fels, Sidney S.},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176468},\n\ - \ issn = {2220-4806},\n keywords = {Tongue model, ultrasound, real-time, music\ - \ synthesis, speech interface},\n pages = {181--185},\n title = {Tongue `n' Groove:\ - \ An Ultrasound based Music Controller},\n url = {http://www.nime.org/proceedings/2002/nime2002_181.pdf},\n\ - \ year = {2002}\n}\n" + ID: ElShimy2012 + abstract: 'In this paper, we discuss the design and testing of a reactive environment + for musical performance. Driven by the interpersonal interactions amongst musicians, + our system gives users, i.e., several musicians playing together in a band, real-time + control over certain aspects of their performance, enabling them to change volume + levels dynamically simply by moving around. It differs most notably from the majority + of ventures into the design of novel musical interfaces and installations in its + multidisciplinary approach, drawing on techniques from Human-Computer Interaction, + social sciences and ludology. Our User-Centered Design methodology was central + to producing an interactive environment that enhances traditional performance + with novel functionalities. During a formal experiment, musicians reported finding + our system exciting and enjoyable. We also introduce some additional interactions + that can further enhance the interactivity of our reactive environment. In describing + the particular challenges of working with such a unique and creative user as the + musician, we hope that our approach can be of guidance to interface developers + working on applications of a creative nature.' + address: 'Ann Arbor, Michigan' + author: Dalia El-Shimy and Thomas Hermann and Jeremy Cooperstock + bibtex: "@inproceedings{ElShimy2012,\n abstract = {In this paper, we discuss the\ + \ design and testing of a reactive environment for musical performance. Driven\ + \ by the interpersonal interactions amongst musicians, our system gives users,\ + \ i.e., several musicians playing together in a band, real-time control over certain\ + \ aspects of their performance, enabling them to change volume levels dynamically\ + \ simply by moving around. It differs most notably from the majority of ventures\ + \ into the design of novel musical interfaces and installations in its multidisciplinary\ + \ approach, drawing on techniques from Human-Computer Interaction, social sciences\ + \ and ludology. Our User-Centered Design methodology was central to producing\ + \ an interactive environment that enhances traditional performance with novel\ + \ functionalities. During a formal experiment, musicians reported finding our\ + \ system exciting and enjoyable. We also introduce some additional interactions\ + \ that can further enhance the interactivity of our reactive environment. In describing\ + \ the particular challenges of working with such a unique and creative user as\ + \ the musician, we hope that our approach can be of guidance to interface developers\ + \ working on applications of a creative nature.},\n address = {Ann Arbor, Michigan},\n\ + \ author = {Dalia El-Shimy and Thomas Hermann and Jeremy Cooperstock},\n booktitle\ + \ = {Proceedings of the International Conference on New Interfaces for Musical\ + \ Expression},\n doi = {10.5281/zenodo.1178247},\n issn = {2220-4806},\n publisher\ + \ = {University of Michigan},\n title = {A Reactive Environment for Dynamic Volume\ + \ Control},\n url = {http://www.nime.org/proceedings/2012/nime2012_88.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176468 + doi: 10.5281/zenodo.1178247 issn: 2220-4806 - keywords: 'Tongue model, ultrasound, real-time, music synthesis, speech interface' - pages: 181--185 - title: 'Tongue `n'' Groove: An Ultrasound based Music Controller' - url: http://www.nime.org/proceedings/2002/nime2002_181.pdf - year: 2002 + publisher: University of Michigan + title: A Reactive Environment for Dynamic Volume Control + url: http://www.nime.org/proceedings/2012/nime2012_88.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Weinberg2002 - abstract: 'The Beatbugs are hand-held percussive instruments that allow the creation, - manipulation, and sharing of rhythmic motifs through a simple interface. When - multiple Beatbugs are connected in a network, players can form large-scale collaborative - compositions by interdependently sharing and developing each other''s motifs. - Each Beatbug player can enter a motif that is then sent through a stochastic computerized - "Nerve Center" to other players in the network. Receiving players can decide whether - to develop the motif further (by continuously manipulating pitch, timbre, and - rhythmic elements using two bend sensor antennae) or to keep it in their personal - instrument (by entering and sending their own new motifs to the group.) The tension - between the system''s stochastic routing scheme and the players'' improvised real-time - decisions leads to an interdependent, dynamic, and constantly evolving musical - experience. A musical composition entitled "Nerve" was written for the system - by author Gil Weinberg. It was premiered on February 2002 as part of Tod Machover''s - Toy Symphony [1] in a concert with the Deutsches Symphonie Orchester Berlin, conducted - by Kent Nagano. The paper concludes with a short evaluative discussion of the - concert and the weeklong workshops that led to it. ' - address: 'Dublin, Ireland' - author: 'Weinberg, Gil and Aimi, Roberto and Jennings, Kevin' - bibtex: "@inproceedings{Weinberg2002,\n abstract = {The Beatbugs are hand-held percussive\ - \ instruments that allow the creation, manipulation, and sharing of rhythmic motifs\ - \ through a simple interface. When multiple Beatbugs are connected in a network,\ - \ players can form large-scale collaborative compositions by interdependently\ - \ sharing and developing each other's motifs. Each Beatbug player can enter a\ - \ motif that is then sent through a stochastic computerized \"Nerve Center\" to\ - \ other players in the network. Receiving players can decide whether to develop\ - \ the motif further (by continuously manipulating pitch, timbre, and rhythmic\ - \ elements using two bend sensor antennae) or to keep it in their personal instrument\ - \ (by entering and sending their own new motifs to the group.) The tension between\ - \ the system's stochastic routing scheme and the players' improvised real-time\ - \ decisions leads to an interdependent, dynamic, and constantly evolving musical\ - \ experience. A musical composition entitled \"Nerve\" was written for the system\ - \ by author Gil Weinberg. It was premiered on February 2002 as part of Tod Machover's\ - \ Toy Symphony [1] in a concert with the Deutsches Symphonie Orchester Berlin,\ - \ conducted by Kent Nagano. The paper concludes with a short evaluative discussion\ - \ of the concert and the weeklong workshops that led to it. },\n address = {Dublin,\ - \ Ireland},\n author = {Weinberg, Gil and Aimi, Roberto and Jennings, Kevin},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176470},\n\ - \ issn = {2220-4806},\n keywords = {Interdependent Musical Networks, group playing,\ - \ percussive controllers.},\n pages = {186--191},\n title = {The Beatbug Network\ - \ -A Rhythmic System for Interdependent Group Collaboration},\n url = {http://www.nime.org/proceedings/2002/nime2002_186.pdf},\n\ - \ year = {2002}\n}\n" + ID: Surges2012 + abstract: 'This paper describes three hardware devices for integrating modular synthesizers + with computers, each with a different approach to the relationship between hardware + and software. The devices discussed are the USB-Octomod, an 8-channel OSC-compatible + computer-controlled control-voltage generator, the tabulaRasa, a hardware table-lookup + oscillator synthesis module with corresponding waveform design software, and the + pucktronix.snake.corral, a dual 8x8 computer-controlled analog signal routing + matrix. The devices make use of open-source hardware and software, and are designed + around affordable micro-controllers and integrated circuits. ' + address: 'Ann Arbor, Michigan' + author: Greg Surges + bibtex: "@inproceedings{Surges2012,\n abstract = {This paper describes three hardware\ + \ devices for integrating modular synthesizers with computers, each with a different\ + \ approach to the relationship between hardware and software. The devices discussed\ + \ are the USB-Octomod, an 8-channel OSC-compatible computer-controlled control-voltage\ + \ generator, the tabulaRasa, a hardware table-lookup oscillator synthesis module\ + \ with corresponding waveform design software, and the pucktronix.snake.corral,\ + \ a dual 8x8 computer-controlled analog signal routing matrix. The devices make\ + \ use of open-source hardware and software, and are designed around affordable\ + \ micro-controllers and integrated circuits. },\n address = {Ann Arbor, Michigan},\n\ + \ author = {Greg Surges},\n booktitle = {Proceedings of the International Conference\ + \ on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178427},\n\ + \ issn = {2220-4806},\n keywords = {modular synthesis, interface, diy, open-source},\n\ + \ publisher = {University of Michigan},\n title = {DIY Hybrid Analog/Digital Modular\ + \ Synthesis},\n url = {http://www.nime.org/proceedings/2012/nime2012_9.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176470 + doi: 10.5281/zenodo.1178427 issn: 2220-4806 - keywords: 'Interdependent Musical Networks, group playing, percussive controllers.' - pages: 186--191 - title: The Beatbug Network -A Rhythmic System for Interdependent Group Collaboration - url: http://www.nime.org/proceedings/2002/nime2002_186.pdf - year: 2002 + keywords: 'modular synthesis, interface, diy, open-source' + publisher: University of Michigan + title: DIY Hybrid Analog/Digital Modular Synthesis + url: http://www.nime.org/proceedings/2012/nime2012_9.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Wessel2002 - abstract: 'In this demonstration we will show a variety of computer-based musical - instruments designed for live performance. Our design criteria include initial - ease of use coupled with a long term potential for virtuosity, minimal and low - variance latency, and clear and simple strategies for programming the relationship - between gesture and musical result. We present custom controllers and unique adaptations - of standard gestural interfaces, a programmable connectivity processor, a communications - protocol called Open Sound Control (OSC), and a variety of metaphors for musical - control. ' - address: 'Dublin, Ireland' - author: 'Wessel, David and Wright, Matthew and Schott, John' - bibtex: "@inproceedings{Wessel2002,\n abstract = {In this demonstration we will\ - \ show a variety of computer-based musical instruments designed for live performance.\ - \ Our design criteria include initial ease of use coupled with a long term potential\ - \ for virtuosity, minimal and low variance latency, and clear and simple strategies\ - \ for programming the relationship between gesture and musical result. We present\ - \ custom controllers and unique adaptations of standard gestural interfaces, a\ - \ programmable connectivity processor, a communications protocol called Open Sound\ - \ Control (OSC), and a variety of metaphors for musical control. },\n address\ - \ = {Dublin, Ireland},\n author = {Wessel, David and Wright, Matthew and Schott,\ - \ John},\n booktitle = {Proceedings of the International Conference on New Interfaces\ - \ for Musical Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176472},\n\ - \ issn = {2220-4806},\n keywords = {Expressive control, mapping gestures to acoustic\ - \ results, metaphors for musical control, Tactex, Buchla Thunder, digitizing tablets.},\n\ - \ pages = {192--194},\n title = {Intimate Musical Control of Computers with a\ - \ Variety of Controllers and Gesture Mapping Metaphors},\n url = {http://www.nime.org/proceedings/2002/nime2002_192.pdf},\n\ - \ year = {2002}\n}\n" + ID: Johnston2012 + abstract: 'Mapping between musical interfaces, and sound engines, is integral to + the nature of an interface [3]. Traditionally, musical applications for touch + surfaces have directly mapped touch coordinates to control parameters. However, + recent work [9] is looking at new methods of control that use relational multi-point + analysis. Instead of directly using touch coordinates, which are related to a + global screen space, an initial touch is used as an `anchor'' to create a local + coordinate space in which subsequent touches can be located and compared. This + local coordinate space frees touches from being locked to one single relationship, + and allows for more complex interaction between touch events. So far, this method + has only been implemented on Apple computer''s small capacitive touch pads. Additionally, + there has yet to be a user study that directly compares [9] against mappings of + touch events within global coordinate spaces. With this in mind, we have developed + and evaluated two interfaces with the aim of determining and quantifying some + of these differences within the context of our custom large multi-touch surfaces + [1].' + address: 'Ann Arbor, Michigan' + author: Blake Johnston and Owen Vallis and Ajay Kapur + bibtex: "@inproceedings{Johnston2012,\n abstract = {Mapping between musical interfaces,\ + \ and sound engines, is integral to the nature of an interface [3]. Traditionally,\ + \ musical applications for touch surfaces have directly mapped touch coordinates\ + \ to control parameters. However, recent work [9] is looking at new methods of\ + \ control that use relational multi-point analysis. Instead of directly using\ + \ touch coordinates, which are related to a global screen space, an initial touch\ + \ is used as an `anchor' to create a local coordinate space in which subsequent\ + \ touches can be located and compared. This local coordinate space frees touches\ + \ from being locked to one single relationship, and allows for more complex interaction\ + \ between touch events. So far, this method has only been implemented on Apple\ + \ computer's small capacitive touch pads. Additionally, there has yet to be a\ + \ user study that directly compares [9] against mappings of touch events within\ + \ global coordinate spaces. With this in mind, we have developed and evaluated\ + \ two interfaces with the aim of determining and quantifying some of these differences\ + \ within the context of our custom large multi-touch surfaces [1].},\n address\ + \ = {Ann Arbor, Michigan},\n author = {Blake Johnston and Owen Vallis and Ajay\ + \ Kapur},\n booktitle = {Proceedings of the International Conference on New Interfaces\ + \ for Musical Expression},\n doi = {10.5281/zenodo.1178297},\n issn = {2220-4806},\n\ + \ keywords = {Multi-Touch, User Study, Relational-point interface},\n publisher\ + \ = {University of Michigan},\n title = {A Comparative User Study of Two Methods\ + \ of Control on a Multi-Touch Surface for Musical Expression},\n url = {http://www.nime.org/proceedings/2012/nime2012_94.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176472 + doi: 10.5281/zenodo.1178297 issn: 2220-4806 - keywords: 'Expressive control, mapping gestures to acoustic results, metaphors for - musical control, Tactex, Buchla Thunder, digitizing tablets.' - pages: 192--194 - title: Intimate Musical Control of Computers with a Variety of Controllers and Gesture - Mapping Metaphors - url: http://www.nime.org/proceedings/2002/nime2002_192.pdf - year: 2002 + keywords: 'Multi-Touch, User Study, Relational-point interface' + publisher: University of Michigan + title: A Comparative User Study of Two Methods of Control on a Multi-Touch Surface + for Musical Expression + url: http://www.nime.org/proceedings/2012/nime2012_94.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Wilkerson2002 - abstract: 'The Mutha Rubboard is a musical controller based on the rubboard, washboard - or frottoir metaphor commonly used in the Zydeco music genre of South Louisiana. - It is not onlya metamorphosis of a traditional instrument, but a modern bridge - of exploration into a rich musical heritage. It uses capacitive and piezo sensing - technology to output MIDI and raw audio data.This new controller reads the key - placement in two parallel planes by using radio capacitive sensing circuitry expanding - greatly on the standard corrugated metal playing surface. The percussive output - normally associated with the rubboard is captured through piezo contact sensors - mounted directly on the keys (the playing implements). Additionally,mode functionality - is controlled by discrete switching on the keys.This new instrument is meant to - be easily played by both experienced players and those new to the rubboard. It - lends itself to an expressive freedom by placing the control surface on the chest - and allowing the hands to move uninhibited about it or by playing it in the usual - way, preserving its musical heritage.' - address: 'Dublin, Ireland' - author: 'Wilkerson, Carr and Serafin, Stefania and Ng, Carmen' - bibtex: "@inproceedings{Wilkerson2002,\n abstract = {The Mutha Rubboard is a musical\ - \ controller based on the rubboard, washboard or frottoir metaphor commonly used\ - \ in the Zydeco music genre of South Louisiana. It is not onlya metamorphosis\ - \ of a traditional instrument, but a modern bridge of exploration into a rich\ - \ musical heritage. It uses capacitive and piezo sensing technology to output\ - \ MIDI and raw audio data.This new controller reads the key placement in two parallel\ - \ planes by using radio capacitive sensing circuitry expanding greatly on the\ - \ standard corrugated metal playing surface. The percussive output normally associated\ - \ with the rubboard is captured through piezo contact sensors mounted directly\ - \ on the keys (the playing implements). Additionally,mode functionality is controlled\ - \ by discrete switching on the keys.This new instrument is meant to be easily\ - \ played by both experienced players and those new to the rubboard. It lends itself\ - \ to an expressive freedom by placing the control surface on the chest and allowing\ - \ the hands to move uninhibited about it or by playing it in the usual way, preserving\ - \ its musical heritage.},\n address = {Dublin, Ireland},\n author = {Wilkerson,\ - \ Carr and Serafin, Stefania and Ng, Carmen},\n booktitle = {Proceedings of the\ - \ International Conference on New Interfaces for Musical Expression},\n date =\ - \ {24-26 May, 2002},\n doi = {10.5281/zenodo.1176474},\n issn = {2220-4806},\n\ - \ keywords = {MIDI controllers, computer music, Zydeco music, interactive music,\ - \ electronic musical instrument, human computer interface, Louisiana heritage,\ - \ physical modeling, bowl resonators.},\n pages = {195--198},\n title = {The Mutha\ - \ Rubboard Controller},\n url = {http://www.nime.org/proceedings/2002/nime2002_195.pdf},\n\ - \ year = {2002}\n}\n" + ID: Levinson2012 + abstract: 'TedStick is a new wireless musical instrument that processes acoustic + sounds resonating within its wooden body and ma-nipulates them via gestural movements. + The sounds are transduced by a piezoelectric sensor inside the wooden body, so + any tactile contact with TedStick is transmitted as audio and further processed + by a computer. The main method for performing with TedStick focuses on extracting + diverse sounds from within the resonant properties of TedStick it-self. This is + done by holding TedStick in one hand and a standard drumstick in the opposite + hand while tapping, rubbing, or scraping the two against each other. Gestural + movements of TedStick are then mapped to parameters for several sound effects + including pitch shift, delay, reverb and low/high pass filters. Using this technique + the hand holding the drumstick can control the acoustic sounds/interaction between + the sticks while the hand holding TedStick can fo-cus purely on controlling the + sound manipulation and effects parameters.' + address: 'Ann Arbor, Michigan' + author: Cory Levinson + bibtex: "@inproceedings{Levinson2012,\n abstract = {TedStick is a new wireless musical\ + \ instrument that processes acoustic sounds resonating within its wooden body\ + \ and ma-nipulates them via gestural movements. The sounds are transduced by a\ + \ piezoelectric sensor inside the wooden body, so any tactile contact with TedStick\ + \ is transmitted as audio and further processed by a computer. The main method\ + \ for performing with TedStick focuses on extracting diverse sounds from within\ + \ the resonant properties of TedStick it-self. This is done by holding TedStick\ + \ in one hand and a standard drumstick in the opposite hand while tapping, rubbing,\ + \ or scraping the two against each other. Gestural movements of TedStick are then\ + \ mapped to parameters for several sound effects including pitch shift, delay,\ + \ reverb and low/high pass filters. Using this technique the hand holding the\ + \ drumstick can control the acoustic sounds/interaction between the sticks while\ + \ the hand holding TedStick can fo-cus purely on controlling the sound manipulation\ + \ and effects parameters.},\n address = {Ann Arbor, Michigan},\n author = {Cory\ + \ Levinson},\n booktitle = {Proceedings of the International Conference on New\ + \ Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178325},\n issn\ + \ = {2220-4806},\n keywords = {tangible user interface, piezoelectric sensors,\ + \ gestural per-formance, digital sound manipulation},\n publisher = {University\ + \ of Michigan},\n title = {TedStick: A Tangible Electrophonic Drumstick},\n url\ + \ = {http://www.nime.org/proceedings/2012/nime2012_96.pdf},\n year = {2012}\n\ + }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176474 + doi: 10.5281/zenodo.1178325 issn: 2220-4806 - keywords: 'MIDI controllers, computer music, Zydeco music, interactive music, electronic - musical instrument, human computer interface, Louisiana heritage, physical modeling, - bowl resonators.' - pages: 195--198 - title: The Mutha Rubboard Controller - url: http://www.nime.org/proceedings/2002/nime2002_195.pdf - year: 2002 + keywords: 'tangible user interface, piezoelectric sensors, gestural per-formance, + digital sound manipulation' + publisher: University of Michigan + title: 'TedStick: A Tangible Electrophonic Drumstick' + url: http://www.nime.org/proceedings/2012/nime2012_96.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Winkler2002 - abstract: 'Falling Up is an evening-length performance incorporating dance and theatre - with movement-controlled audio/video playback and processing. The solo show is - a collaboration between Cindy Cummings (performance) and Todd Winkler(sound, video), - first performed at the Dublin Fringe Festival,2001. Each thematic section of the - work shows a different typeof interactive relationship between movement, video - and sound. This demonstration explains the various technical configurations and - aesthetic thinking behind aspects of the work.' - address: 'Dublin, Ireland' - author: 'Winkler, Todd' - bibtex: "@inproceedings{Winkler2002,\n abstract = {Falling Up is an evening-length\ - \ performance incorporating dance and theatre with movement-controlled audio/video\ - \ playback and processing. The solo show is a collaboration between Cindy Cummings\ - \ (performance) and Todd Winkler(sound, video), first performed at the Dublin\ - \ Fringe Festival,2001. Each thematic section of the work shows a different typeof\ - \ interactive relationship between movement, video and sound. This demonstration\ - \ explains the various technical configurations and aesthetic thinking behind\ - \ aspects of the work.},\n address = {Dublin, Ireland},\n author = {Winkler, Todd},\n\ - \ booktitle = {Proceedings of the International Conference on New Interfaces for\ - \ Musical Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176476},\n\ - \ issn = {2220-4806},\n keywords = {Dance, Video processing, Movement sensor,\ - \ VNS, Very Nervous System},\n pages = {199--200},\n title = {Fusing Movement,\ - \ Sound, and Video in Falling Up, an Interactive Dance/Theatre Production},\n\ - \ url = {http://www.nime.org/proceedings/2002/nime2002_199.pdf},\n year = {2002}\n\ - }\n" + ID: Lu2012 + abstract: 'WIS platform is a wireless interactive sensor platform de-signed to support + dynamic and interactive applications. The platform consists of a capture system + which includes multi-ple on-body Zigbee compatible motion sensors, a processing + unit and an audio-visual display control unit. It has a com-plete open architecture + and provides interfaces to interact with other user-designed applications. Therefore, + WIS plat-form is highly extensible. Through gesture recognitions by on-body sensor + nodes and data processing, WIS platform can offer real-time audio and visual experiences + to the users. Based on this platform, we set up a multimedia installation that + presents a new interaction model between the partic-ipants and the audio-visual + environment. Furthermore, we are also trying to apply WIS platform to other installations + and performances.' + address: 'Ann Arbor, Michigan' + author: Jia-Liang Lu and Da-Lei Fang and Yi Qin and Jiu-Qiang Tang + bibtex: "@inproceedings{Lu2012,\n abstract = {WIS platform is a wireless interactive\ + \ sensor platform de-signed to support dynamic and interactive applications. The\ + \ platform consists of a capture system which includes multi-ple on-body Zigbee\ + \ compatible motion sensors, a processing unit and an audio-visual display control\ + \ unit. It has a com-plete open architecture and provides interfaces to interact\ + \ with other user-designed applications. Therefore, WIS plat-form is highly extensible.\ + \ Through gesture recognitions by on-body sensor nodes and data processing, WIS\ + \ platform can offer real-time audio and visual experiences to the users. Based\ + \ on this platform, we set up a multimedia installation that presents a new interaction\ + \ model between the partic-ipants and the audio-visual environment. Furthermore,\ + \ we are also trying to apply WIS platform to other installations and performances.},\n\ + \ address = {Ann Arbor, Michigan},\n author = {Jia-Liang Lu and Da-Lei Fang and\ + \ Yi Qin and Jiu-Qiang Tang},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178329},\n\ + \ issn = {2220-4806},\n keywords = {Interactive, Audio-visual experience},\n publisher\ + \ = {University of Michigan},\n title = {Wireless Interactive Sensor Platform\ + \ for Real-Time Audio-Visual Experience},\n url = {http://www.nime.org/proceedings/2012/nime2012_98.pdf},\n\ + \ year = {2012}\n}\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176476 + doi: 10.5281/zenodo.1178329 issn: 2220-4806 - keywords: 'Dance, Video processing, Movement sensor, VNS, Very Nervous System' - pages: 199--200 - title: 'Fusing Movement, Sound, and Video in Falling Up, an Interactive Dance/Theatre - Production' - url: http://www.nime.org/proceedings/2002/nime2002_199.pdf - year: 2002 + keywords: 'Interactive, Audio-visual experience' + publisher: University of Michigan + title: Wireless Interactive Sensor Platform for Real-Time Audio-Visual Experience + url: http://www.nime.org/proceedings/2012/nime2012_98.pdf + year: 2012 - ENTRYTYPE: inproceedings - ID: Young2002 - abstract: 'In this paper, the design and construction of a new violin interface, - the Hyperbow, is discussed. The motivation driving the research of this instrument - was the desire to create a violin bow capable of measuring the most intricate - aspects of violin techniquethe subtle elements of physical gesture that immediately - and directly impact the sound of the instrument while playing. In order to provide - this insight into the subtleties of bow articulation, a sensing system has been - integrated into a commercial carbon fiber bow to measure changes in position, - acceleration, and the downward and lateral strains of the bow stick. The sensors - were fashioned using an electromagnetic field sensing technique, commercial MEMS - accelerometers, and foil strain gauges. The measurement techniques used in this - work were found to be quite sensitive and yielded sensors that were easily controllable - by a player using traditional right hand bowing technique.' - address: 'Dublin, Ireland' - author: 'Young, Diana' - bibtex: "@inproceedings{Young2002,\n abstract = {In this paper, the design and construction\ - \ of a new violin interface, the Hyperbow, is discussed. The motivation driving\ - \ the research of this instrument was the desire to create a violin bow capable\ - \ of measuring the most intricate aspects of violin techniquethe subtle elements\ - \ of physical gesture that immediately and directly impact the sound of the instrument\ - \ while playing. In order to provide this insight into the subtleties of bow articulation,\ - \ a sensing system has been integrated into a commercial carbon fiber bow to measure\ - \ changes in position, acceleration, and the downward and lateral strains of the\ - \ bow stick. The sensors were fashioned using an electromagnetic field sensing\ - \ technique, commercial MEMS accelerometers, and foil strain gauges. The measurement\ - \ techniques used in this work were found to be quite sensitive and yielded sensors\ - \ that were easily controllable by a player using traditional right hand bowing\ - \ technique.},\n address = {Dublin, Ireland},\n author = {Young, Diana},\n booktitle\ - \ = {Proceedings of the International Conference on New Interfaces for Musical\ - \ Expression},\n date = {24-26 May, 2002},\n doi = {10.5281/zenodo.1176480},\n\ - \ issn = {2220-4806},\n keywords = {Hyperbow, Hyperviolin, Hyperinstrument, violin,\ - \ bow, position sensor, accelerometer, strain sensor},\n pages = {201--206},\n\ - \ title = {The Hyperbow Controller: Real-Time Dynamics Measurement of Violin Performance},\n\ - \ url = {http://www.nime.org/proceedings/2002/nime2002_201.pdf},\n year = {2002}\n\ + ID: Kapur2012 + abstract: 'In this paper, we introduce Kritaanjli, a robotic harmo-nium. Details + concerning the design, construction, and use of Kritaanjli are discussed. After + an examination of related work, quantitative research concerning the hardware + chosen in the construction of the instrument is shown, as is a thor-ough exposition + of the design process and use of CAD/CAM techniques in the design lifecycle of + the instrument. Addi-tionally, avenues for future work and compositional prac-tices + are focused upon, with particular emphasis placed on human/robot interaction, + pedagogical techniques afforded by the robotic instrument, and compositional avenues + made accessible through the use of Kritaanjli.' + address: 'Ann Arbor, Michigan' + author: Ajay Kapur and Jim Murphy and Dale Carnegie + bibtex: "@inproceedings{Kapur2012,\n abstract = {In this paper, we introduce Kritaanjli,\ + \ a robotic harmo-nium. Details concerning the design, construction, and use of\ + \ Kritaanjli are discussed. After an examination of related work, quantitative\ + \ research concerning the hardware chosen in the construction of the instrument\ + \ is shown, as is a thor-ough exposition of the design process and use of CAD/CAM\ + \ techniques in the design lifecycle of the instrument. Addi-tionally, avenues\ + \ for future work and compositional prac-tices are focused upon, with particular\ + \ emphasis placed on human/robot interaction, pedagogical techniques afforded\ + \ by the robotic instrument, and compositional avenues made accessible through\ + \ the use of Kritaanjli.},\n address = {Ann Arbor, Michigan},\n author = {Ajay\ + \ Kapur and Jim Murphy and Dale Carnegie},\n booktitle = {Proceedings of the International\ + \ Conference on New Interfaces for Musical Expression},\n doi = {10.5281/zenodo.1178299},\n\ + \ issn = {2220-4806},\n keywords = {Musical Robotics, pedagogy, North Indian Classical\ + \ Music, augmented instruments},\n publisher = {University of Michigan},\n title\ + \ = {Kritaanjali: A Robotic Harmonium for Performance, Pedogogy and Research},\n\ + \ url = {http://www.nime.org/proceedings/2012/nime2012_99.pdf},\n year = {2012}\n\ }\n" booktitle: Proceedings of the International Conference on New Interfaces for Musical Expression - date: '24-26 May, 2002' - doi: 10.5281/zenodo.1176480 + doi: 10.5281/zenodo.1178299 issn: 2220-4806 - keywords: 'Hyperbow, Hyperviolin, Hyperinstrument, violin, bow, position sensor, - accelerometer, strain sensor' - pages: 201--206 - title: 'The Hyperbow Controller: Real-Time Dynamics Measurement of Violin Performance' - url: http://www.nime.org/proceedings/2002/nime2002_201.pdf - year: 2002 + keywords: 'Musical Robotics, pedagogy, North Indian Classical Music, augmented instruments' + publisher: University of Michigan + title: 'Kritaanjali: A Robotic Harmonium for Performance, Pedogogy and Research' + url: http://www.nime.org/proceedings/2012/nime2012_99.pdf + year: 2012